[
  {
    "path": ".github/workflows/bump-version.js",
    "content": "const fs = require(\"fs\");\nconst os = require(\"os\");\nconst path = require(\"path\");\nconst semver = require(\"semver\");\n\nconst latestPublish = process.argv[2];\n\nconst packageJson = fs.readFileSync(path.join(\"./\", \"package.json\"), {\n  encoding: \"utf-8\",\n});\n\nlet release = JSON.parse(packageJson).version;\n\nlet newVersion = latestPublish;\n\n// If the main release gets a major bump but did not get published yet, the package.json version\n// will be higher than the one retrieved from the marketplace, so we need to increment from the main release\n// E.g. package.json gets bumped from 1.6.0 -> 2.0.0\nif (semver.major(release) - semver.major(latestPublish) === 1) {\n  newVersion = semver.inc(release, \"minor\", semver.rel);\n}\n// A prepublished version must be one minor higher than a regular published version.\n// E.g. if package.json has version 1.3.0 and there is no prepublished version yet,\n// increment minor by one -> 1.4.0.\nelse if (semver.minor(latestPublish) === semver.minor(release)) {\n  newVersion = semver.inc(newVersion, \"minor\", semver.rel);\n}\n// Increment the version patch. E.g. if we fetch version 1.4.0 as the latest pre-release,\n// increment patch by one -> 1.4.1.\nelse if (semver.minor(latestPublish) > semver.minor(release)) {\n  newVersion = semver.inc(newVersion, \"patch\", semver.rel);\n}\n// If the main release gets a minor bump but did not get published yet, the package.json version\n// will be higher than the one retrieved from the marketplace, so we need to increment from the main release\n// E.g. package.json gets bumped to 1.5.0 -> 1.6.0\nelse if (semver.minor(release) - semver.minor(latestPublish) === 1) {\n  newVersion = semver.inc(release, \"minor\", semver.rel);\n}\n// Otherwise throw an error, because the pre-release version should always be just one\n// minor higher than the release version.\nelse {\n  throw new Error(\n    \"Version number minors are more than off by one, check package.json and (pre-)published versions manually.\",\n  );\n}\n\nif (!semver.valid(newVersion)) {\n  throw new Error(\"Invalid version string: \", newVersion);\n}\n\nfs.appendFileSync(\n  process.env.GITHUB_OUTPUT,\n  `new_version=${newVersion}${os.EOL}`,\n);\n"
  },
  {
    "path": ".github/workflows/ci.yml",
    "content": "name: CI\n\non:\n  push:\n    branches: [master]\n    tags: \"*.*.*\"\n  pull_request:\n    branches: [master]\n\nconcurrency:\n  group: ${{ github.ref }}\n  cancel-in-progress: true\n\njobs:\n  build:\n    strategy:\n      fail-fast: false\n      matrix:\n        os: [\n            macos-15-intel, # x64\n            macos-15, # ARM\n            ubuntu-24.04, # x64\n            ubuntu-24.04-arm, # ARM\n            windows-latest,\n          ]\n        # syntax explanation:\n        # https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#example-including-additional-values-into-combinations\n        include:\n          - os: macos-15-intel\n            ocaml-compiler: 5.2.1\n            dune-profile: release\n            artifact-folder: darwin\n          - os: macos-15\n            ocaml-compiler: 5.2.1\n            dune-profile: release\n            artifact-folder: darwinarm64\n          - os: ubuntu-24.04\n            ocaml-compiler: ocaml-variants.5.2.1+options,ocaml-option-static\n            dune-profile: static\n            artifact-folder: linux\n          - os: ubuntu-24.04-arm\n            ocaml-compiler: ocaml-variants.5.2.1+options,ocaml-option-static\n            dune-profile: static\n            artifact-folder: linuxarm64\n          - os: windows-latest\n            ocaml-compiler: 5.2.1\n            dune-profile: release\n            artifact-folder: win32\n\n    runs-on: ${{matrix.os}}\n\n    steps:\n      # needed for Windows testing\n      - name: Set git to use LF\n        run: |\n          git config --global core.autocrlf false\n          git config --global core.eol lf\n\n      - uses: actions/checkout@v4\n\n      - name: Cache OCaml's opam\n        uses: actions/cache@v4\n        with:\n          path: ~/.opam\n          key: ${{matrix.os}}-rescript-vscode-v5\n\n      - name: Use OCaml\n        uses: ocaml/setup-ocaml@v3\n        with:\n          ocaml-compiler: ${{matrix.ocaml-compiler}}\n          opam-pin: false\n\n      - name: Use Node.js\n        uses: actions/setup-node@v4\n        with:\n          node-version: 20\n          registry-url: \"https://registry.npmjs.org\"\n\n      - run: npm ci\n      - run: npm install --include=optional\n      - run: opam install dune cppo\n      - run: npm run compile\n      - run: npm run bundle\n\n      # These 2 runs (or just the second?) are for when you have opam dependencies. We don't.\n      # Don't add deps. But if you ever do, un-comment these and add an .opam file.\n      # - run: opam pin add rescript-editor-analysis.dev . --no-action\n      # - run: opam install . --deps-only --with-doc --with-test\n\n      - name: Set dune profile according to platform\n        run: echo \"DUNE_PROFILE=${{ matrix.dune-profile }}\" >> $GITHUB_ENV\n\n      - name: Install ocamlformat\n        run: opam install ocamlformat.0.27.0\n\n      - name: Format check\n        run: opam exec -- make checkformat\n\n      - name: Build\n        run: opam exec -- make\n\n      - name: Test\n        run: opam exec -- make test\n\n      - name: Strip binaries\n        if: matrix.os != 'windows-latest'\n        run: strip rescript-editor-analysis.exe rescript-tools.exe\n\n      # Also avoids artifacts upload permission loss:\n      # https://github.com/actions/upload-artifact/tree/ee69f02b3dfdecd58bb31b4d133da38ba6fe3700#permission-loss\n      - name: Compress files\n        run: |\n          mkdir ${{matrix.artifact-folder}}\n          mv rescript-editor-analysis.exe ${{matrix.artifact-folder}}\n          mv rescript-tools.exe ${{matrix.artifact-folder}}\n          tar -cvf binary.tar ${{matrix.artifact-folder}}\n\n      - name: Upload binaries\n        uses: actions/upload-artifact@v4\n        with:\n          name: ${{matrix.artifact-folder}}\n          path: binary.tar\n\n      - name: Upload platform bindings\n        if: always()\n        uses: actions/upload-artifact@v4\n        with:\n          name: bindings-${{matrix.artifact-folder}}\n          path: node_modules/@oxc-parser/\n          retention-days: 1\n\n  package:\n    needs:\n      - build\n    runs-on: ubuntu-24.04\n\n    steps:\n      - uses: actions/checkout@v4\n\n      - name: Use Node.js\n        uses: actions/setup-node@v4\n        with:\n          node-version: 20\n          registry-url: \"https://registry.npmjs.org\"\n\n      - run: npm ci\n      - run: npm install --include=optional\n      - run: npm run compile\n\n      - name: Download MacOS binaries\n        uses: actions/download-artifact@v4\n        with:\n          name: darwin\n          path: binaries\n      - run: tar -xvf binary.tar\n        working-directory: binaries\n\n      - name: Download MacOS ARM binaries\n        uses: actions/download-artifact@v4\n        with:\n          name: darwinarm64\n          path: binaries\n      - run: tar -xvf binary.tar\n        working-directory: binaries\n\n      - name: Download Linux binaries\n        uses: actions/download-artifact@v4\n        with:\n          name: linux\n          path: binaries\n      - run: tar -xvf binary.tar\n        working-directory: binaries\n\n      - name: Download Linux ARM binaries\n        uses: actions/download-artifact@v4\n        with:\n          name: linuxarm64\n          path: binaries\n      - run: tar -xvf binary.tar\n        working-directory: binaries\n\n      - name: Download Windows binaries\n        uses: actions/download-artifact@v4\n        with:\n          name: win32\n          path: binaries\n      - run: tar -xvf binary.tar\n        working-directory: binaries\n\n      - name: Cleanup tar file\n        run: rm binary.tar\n        working-directory: binaries\n\n      - name: Download platform bindings from all platforms\n        uses: actions/download-artifact@v4\n        with:\n          pattern: bindings-*\n          path: bindings\n          merge-multiple: true\n\n      - name: Move binaries to folders\n        run: |\n          declare -a platforms=(\"darwin\" \"darwinarm64\" \"linux\" \"linuxarm64\" \"win32\")\n\n          for platform in \"${platforms[@]}\"; do\n              mkdir server/analysis_binaries/\"$platform\"\n              mv binaries/\"$platform\"/rescript-editor-analysis.exe server/analysis_binaries/\"$platform\"\n          done\n\n          for platform in \"${platforms[@]}\"; do\n              mkdir tools/binaries/\"$platform\"\n              mv binaries/\"$platform\"/rescript-tools.exe tools/binaries/\"$platform\"\n          done\n\n      - name: Merge platform bindings into node_modules\n        run: |\n          mkdir -p node_modules/@oxc-parser\n          # Copy all bindings from downloaded artifacts\n          if [ -d \"bindings\" ]; then\n            find bindings -type d -name \"binding-*\" -exec cp -r {} node_modules/@oxc-parser/ \\;\n          fi\n          # Ensure we have the Linux binding from current platform\n          npm install --include=optional || true\n\n      - name: Store short commit SHA for filename\n        id: vars\n        env:\n          COMMIT_SHA: ${{ github.event.pull_request.head.sha || github.sha }}\n        run: echo \"sha_short=${COMMIT_SHA:0:7}\" >> $GITHUB_OUTPUT\n\n      - name: Store tag name\n        id: tag_name\n        if: startsWith(github.ref, 'refs/tags/')\n        run: echo \"tag=${GITHUB_REF#refs/*/}\" >> $GITHUB_OUTPUT\n\n      - name: Increment pre-release version\n        if: github.ref == 'refs/heads/master'\n        id: increment_pre_release\n        run: |\n          JSON=$(npx vsce show chenglou92.rescript-vscode --json)\n          NEW_VERSION=$(echo $JSON | jq -r '.versions | .[0] | .[\"version\"]')\n          node .github/workflows/bump-version.js ${NEW_VERSION}\n\n      - name: Package Extension\n        if: github.ref != 'refs/heads/master'\n        run: npx vsce package --no-yarn -o rescript-vscode-${{ steps.vars.outputs.sha_short }}.vsix\n\n      - name: Package Extension pre-release version\n        if: github.ref == 'refs/heads/master'\n        run: npx vsce package --no-yarn -o rescript-vscode-latest-master.vsix ${{ steps.increment_pre_release.outputs.new_version }} --no-git-tag-version\n\n      - name: Package Extension release version\n        if: startsWith(github.ref, 'refs/tags/')\n        run: npx vsce package --no-yarn -o rescript-vscode-${{ steps.tag_name.outputs.tag }}.vsix ${{ steps.tag_name.outputs.tag }} --no-git-tag-version\n\n      - name: Verify Package Contents\n        run: npm run verify-package\n\n      - uses: actions/upload-artifact@v4\n        if: github.ref != 'refs/heads/master'\n        with:\n          name: rescript-vscode-${{ steps.vars.outputs.sha_short }}.vsix\n          path: rescript-vscode-${{ steps.vars.outputs.sha_short }}.vsix\n\n      - uses: actions/upload-artifact@v4\n        if: github.ref == 'refs/heads/master'\n        with:\n          name: rescript-vscode-latest-master.vsix\n          path: rescript-vscode-latest-master.vsix\n\n      - uses: actions/upload-artifact@v4\n        if: startsWith(github.ref, 'refs/tags/')\n        with:\n          name: rescript-vscode-${{ steps.tag_name.outputs.tag }}.vsix\n          path: rescript-vscode-${{ steps.tag_name.outputs.tag }}.vsix\n\n      - name: Move latest-master tag to current commit\n        if: github.ref == 'refs/heads/master'\n        run: |\n          git tag -d latest-master || true\n          git push origin --delete latest-master || true\n          git tag latest-master\n          git push origin latest-master\n\n      - name: Publish latest master to GitHub\n        if: github.ref == 'refs/heads/master'\n        uses: softprops/action-gh-release@v2\n        with:\n          token: \"${{ secrets.GITHUB_TOKEN }}\"\n          tag_name: \"latest-master\"\n          prerelease: true\n          generate_release_notes: true\n          name: \"Latest master\"\n          files: rescript-vscode-latest-master.vsix\n\n      - name: Generate release notes from changelog\n        if: startsWith(github.ref, 'refs/tags/')\n        run: |\n          sed -e \"/^## ${{ steps.tag_name.outputs.tag }}/,/^## / ! d\" CHANGELOG.md | head -n -2 > RELEASE.md\n\n      - name: Publish release version to GitHub\n        if: startsWith(github.ref, 'refs/tags/')\n        uses: softprops/action-gh-release@v2\n        with:\n          token: \"${{ secrets.GITHUB_TOKEN }}\"\n          prerelease: false\n          body_path: RELEASE.md\n          name: ${{ steps.tag_name.outputs.tag }}\n          files: rescript-vscode-${{ steps.tag_name.outputs.tag }}.vsix\n\n      - name: Publish extension as pre-release\n        if: github.ref == 'refs/heads/master' && !startsWith(github.event.head_commit.message, 'publish tools')\n        run: npx vsce publish --pat ${{ secrets.MARKETPLACE_TOKEN }} --pre-release ${{ steps.increment_pre_release.outputs.new_version }} --no-git-tag-version\n\n      - name: Publish extension as release\n        if: startsWith(github.ref, 'refs/tags/')\n        run: npx vsce publish --pat ${{ secrets.MARKETPLACE_TOKEN }} ${{ steps.tag_name.outputs.tag }} --no-git-tag-version\n\n      - name: Publish LSP as pre-release to NPM\n        if: github.ref == 'refs/heads/master'\n        working-directory: server\n        run: |\n          npm version preminor --preid next-$(git rev-parse --short HEAD)\n          npm publish --access public --tag next\n        env:\n          NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}\n\n      - name: Publish LSP to NPM\n        if: startsWith(github.ref, 'refs/tags/')\n        working-directory: server\n        run: npm publish --access public\n        env:\n          NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}\n\n      - name: Build @rescript/tools package\n        working-directory: tools\n        run: |\n          npm ci\n          npm run build\n\n      - name: Publish @rescript/tools package\n        if: ${{ startsWith(github.event.head_commit.message, 'publish tools') && (github.ref == 'refs/heads/master') }}\n        working-directory: tools\n        run: npm publish --access public\n        env:\n          NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}\n"
  },
  {
    "path": ".gitignore",
    "content": ".DS_Store\nout\nnode_modules\nexamples/*/lib\n\nanalysis/tests/lib\nanalysis/tests/.bsb.lock\n\nanalysis/tests-generic-jsx-transform/lib\nanalysis/tests-generic-jsx-transform/.bsb.lock\n\nanalysis/tests-incremental-typechecking/lib\nanalysis/tests-incremental-typechecking/.bsb.lock\n\ntools/node_modules\ntools/lib\ntools/**/*.res.js\ntools/tests/node_modules\ntools/tests/lib\n\nrescript-editor-analysis.exe\nrescript-tools.exe\n\n_opam/\n_build/\n\n*.tsbuildinfo\n\n# VSCode test downloads\n.vscode-test/"
  },
  {
    "path": ".ocamlformat",
    "content": "profile = default\nversion = 0.27.0\n\nfield-space = tight-decl\nbreak-cases = toplevel\nmodule-item-spacing = preserve\ncases-exp-indent = 2\nspace-around-arrays = false\nspace-around-lists = false\nspace-around-records = false\nspace-around-variants = false\n"
  },
  {
    "path": ".ocamlformat-ignore",
    "content": "analysis/vendor/js_parser/**\nanalysis/vendor/ml/cmt_format.ml\nanalysis/vendor/ml/parser.ml\nanalysis/vendor/ml/pprintast.ml\nanalysis/vendor/ext/bs_hash_stubs.ml\nanalysis/vendor/ext/js_reserved_map.ml\nanalysis/vendor/ext/ext_string.ml\nanalysis/vendor/ext/ext_string.mli\nanalysis/vendor/ext/ext_sys.ml\nanalysis/vendor/ext/hash.cppo.ml\nanalysis/vendor/ext/hash_set.cppo.ml\nanalysis/vendor/ext/map.cppo.ml\nanalysis/vendor/ext/ordered_hash_map.cppo.ml\nanalysis/vendor/ext/set.cppo.ml\nanalysis/vendor/ext/vec.cppo.ml\n**/node_modules/**"
  },
  {
    "path": ".prettierignore",
    "content": "# Ignore artifacts:\nserver/out\nanalysis/examples\nanalysis/reanalyze/examples\ntools/tests\n.history/"
  },
  {
    "path": ".prettierrc",
    "content": "{}\n"
  },
  {
    "path": ".vscode/extensions.json",
    "content": "{\n  // See https://go.microsoft.com/fwlink/?LinkId=827846 to learn about workspace recommendations.\n  // Extension identifier format: ${publisher}.${name}. Example: vscode.csharp\n\n  // List of extensions which should be recommended for users of this workspace.\n  \"recommendations\": [\"dbaeumer.vscode-eslint\"]\n}\n"
  },
  {
    "path": ".vscode/launch.json",
    "content": "// A launch configuration that compiles the extension and then opens it inside a new window\n{\n  \"version\": \"0.2.0\",\n  \"configurations\": [\n    {\n      \"type\": \"extensionHost\",\n      \"request\": \"launch\",\n      \"name\": \"Launch Client\",\n      \"runtimeExecutable\": \"${execPath}\",\n      \"args\": [\"--extensionDevelopmentPath=${workspaceRoot}\"],\n      \"outFiles\": [\"${workspaceRoot}/client/out/**/*.js\"],\n      \"preLaunchTask\": {\n        \"type\": \"npm\",\n        \"script\": \"watch\"\n      }\n    },\n    {\n      \"type\": \"node\",\n      \"request\": \"attach\",\n      \"name\": \"Attach to Server\",\n      \"port\": 6009,\n      \"restart\": true,\n      \"outFiles\": [\"${workspaceRoot}/server/out/**/*.js\"]\n    },\n    {\n      \"name\": \"Language Server E2E Test\",\n      \"type\": \"extensionHost\",\n      \"request\": \"launch\",\n      \"runtimeExecutable\": \"${execPath}\",\n      \"args\": [\n        \"--extensionDevelopmentPath=${workspaceRoot}\",\n        \"--extensionTestsPath=${workspaceRoot}/client/out/test/index\",\n        \"${workspaceRoot}/client/testFixture\"\n      ],\n      \"outFiles\": [\"${workspaceRoot}/client/out/test/**/*.js\"]\n    }\n  ],\n  \"compounds\": [\n    {\n      \"name\": \"Client + Server\",\n      \"configurations\": [\"Launch Client\", \"Attach to Server\"]\n    }\n  ]\n}\n"
  },
  {
    "path": ".vscode/settings.json",
    "content": "{\n  \"editor.insertSpaces\": false,\n  \"tslint.enable\": true,\n  \"typescript.tsc.autoDetect\": \"off\",\n  \"typescript.preferences.quoteStyle\": \"single\",\n  \"editor.codeActionsOnSave\": {\n    \"source.fixAll.eslint\": \"explicit\"\n  },\n  \"ocaml.sandbox\": {\n    \"kind\": \"opam\",\n    \"switch\": \"${workspaceFolder:rescript-vscode}\"\n  }\n}\n"
  },
  {
    "path": ".vscode/tasks.json",
    "content": "{\n  \"version\": \"2.0.0\",\n  \"tasks\": [\n    {\n      \"type\": \"npm\",\n      \"script\": \"compile\",\n      \"group\": \"build\",\n      \"presentation\": {\n        \"panel\": \"dedicated\",\n        \"reveal\": \"never\"\n      },\n      \"problemMatcher\": [\"$tsc\"]\n    },\n    {\n      \"type\": \"npm\",\n      \"script\": \"watch\",\n      \"isBackground\": true,\n      \"group\": {\n        \"kind\": \"build\",\n        \"isDefault\": true\n      },\n      \"presentation\": {\n        \"panel\": \"dedicated\",\n        \"reveal\": \"never\"\n      },\n      \"problemMatcher\": [\"$tsc-watch\"]\n    }\n  ]\n}\n"
  },
  {
    "path": ".vscodeignore",
    "content": ".vscode/**\r\n**/*.ts\r\n**/*.map\r\n.gitignore\r\n**/tsconfig.json\r\ncontributing.md\r\n.github/**\r\nserver/node_modules/.bin\r\nnode_modules/.bin\r\nanalysis/\r\ntools/\r\nscripts/\r\ndune-project\r\nanalysis.opam\r\ntools.opam\r\n.ocamlformat\r\n.ocamlformat-ignore\r\nclient/node_modules\r\nserver/node_modules\r\n_opam\r\n_build\r\nMakefile\r\n"
  },
  {
    "path": "AGENTS.md",
    "content": "# AGENTS.md\n\nThis file provides guidance to AI coding assistants when working with code in this repository.\n\n## Project Overview\n\nThis is the official ReScript VSCode extension, providing language support for ReScript (.res/.resi files) in Visual Studio Code. The project uses a Language Server Protocol (LSP) architecture with a TypeScript client/server and native OCaml binaries for analysis.\n\n## Architecture\n\n### Key Components\n\n- **client/**: VSCode extension client (`client/src/extension.ts`) - handles UI, commands, and language client initialization\n- **server/**: Language Server (`server/src/server.ts`, `server/src/cli.ts`) - implements LSP features, communicates with ReScript compiler\n- **analysis/**: Native OCaml binary for code analysis, hover, autocomplete, and other language features. This is for older ReScript versions only (ReScript 11 and below). New features are usually only implemented in the rescript compiler monorepo.\n- **tools/**: ReScript tools binary for additional functionality like interface file generation. This is also for older ReScript versions only (ReScript 11 and below). New features are usually only implemented in the rescript compiler monorepo.\n- **grammars/**: TextMate grammar files for syntax highlighting\n- **snippets.json**: Code snippets for common ReScript patterns\n\n### Build System\n\nThe project uses:\n\n- **dune**: For building OCaml components (analysis & tools)\n- **esbuild**: For bundling TypeScript client/server\n- **npm**: For JavaScript/TypeScript dependencies\n- **TypeScript**: For type checking the client/server code\n\n## Development Commands\n\n### Initial Setup\n\n```bash\nnpm install                    # Install all dependencies including client/server\nopam switch 5.2.0             # Install OCaml switch (if not already done)\nopam install . --deps-only    # Install OCaml dependencies\n```\n\n### Building\n\n```bash\nmake build                    # Build OCaml binaries and copy to root\nnpm run compile               # Compile TypeScript (client & server)\nnpm run bundle                # Bundle for production (esbuild)\nnpm run vscode:prepublish     # Clean and bundle (used for publishing)\n```\n\n### Development\n\n```bash\nnpm run watch                 # Watch TypeScript compilation\nmake -C analysis test         # Run analysis tests\nmake -C tools/tests test      # Run tools tests\nmake test                     # Run all tests\n```\n\n### Code Quality\n\n```bash\nmake format                   # Format OCaml (dune) and JS/TS (prettier)\nmake checkformat              # Check formatting without modifying\nmake clean                    # Clean build artifacts\n```\n\n### Running the Extension in Development\n\n1. Open the project in VSCode\n2. Press F5 to launch a new VSCode window (Extension Development Host)\n3. Open a ReScript project to test the extension\n\n## Key Files\n\n### Configuration\n\n- `package.json`: Extension manifest, commands, settings, and scripts\n- `rescript.configuration.json`: Editor configuration for ReScript files\n- `client/src/extension.ts`: Extension entry point and client initialization\n- `server/src/server.ts`: Language server implementation\n- `server/src/cli.ts`: CLI entry point for the language server\n\n### OCaml Components\n\n- `analysis/`: Code analysis binary (hover, autocomplete, etc.)\n- `tools/`: ReScript tools binary (interface generation, etc.)\n\n### Language Features\n\n- **LSP Features**: hover, goto definition, find references, rename, autocomplete\n- **Code Analysis**: dead code detection, exception analysis (via reanalyze)\n- **Build Integration**: compile diagnostics, status indicators\n- **Commands**: interface creation, file switching, compiled JS opening\n\n## Testing\n\nThe project has several test suites:\n\n- `analysis/tests/`: Tests for the analysis binary\n- `tools/tests/`: Tests for ReScript tools\n- `analysis/tests-incremental-typechecking/`: Incremental typechecking tests\n- `analysis/tests-generic-jsx-transform/`: JSX transformation tests\n\n## Project Structure Notes\n\n- The extension supports both `.res` (implementation) and `.resi` (interface) files\n- Uses VSCode Language Client protocol for communication between client and server\n- Native binaries are cross-platform (darwin, linux, win32) and included in the extension. The rescript-editor-analysis is invoked by the LSP Server in a one-shot mode. Dumping JSON to stdout and the LSP picks that up.\n- Supports workspace configurations and monorepo structures\n- Incremental type checking can be enabled for better performance on large projects\n- As mentioned above the native OCaml binaries here are only here for backwards-compatibility with ReScript versions 11 or below. Since ReScript 12 both `analysis` and `tools` are part of the [ReScript compiler monorepo](https://github.com/rescript-lang/rescript), thus refrain from changing them too much (bugfixes that need to be ported are ok).\n"
  },
  {
    "path": "CHANGELOG.md",
    "content": "# Changelog\n\n> **Tags:**\n>\n> - :boom: [Breaking Change]\n> - :eyeglasses: [Spec Compliance]\n> - :rocket: [New Feature]\n> - :bug: [Bug Fix]\n> - :memo: [Documentation]\n> - :house: [Internal]\n> - :nail_care: [Polish]\n\n## [Unreleased]\n\n#### :bug: Bug fix\n\n- Fix Code Analyzer cwd/binary lookup in monorepos (run from workspace root).\n- Fix monorepo build detection by only watching the workspace root `.compiler.log`.\n- Fix Start Build for ReScript v12+ projects by preferring `rescript.exe`.\n- Take namespace into account for incremental cleanup. https://github.com/rescript-lang/rescript-vscode/pull/1164\n- Potential race condition in incremental compilation. https://github.com/rescript-lang/rescript-vscode/pull/1167\n- Fix extension crash triggered by incremental compilation. https://github.com/rescript-lang/rescript-vscode/pull/1169\n- Fix file watchers on Windows when using WSL. https://github.com/rescript-lang/rescript-vscode/pull/1178\n\n#### :nail_care: Polish\n\n- Stale .compiler.log can still spill through in incremental compilation. https://github.com/rescript-lang/rescript-vscode/pull/1167\n\n## 1.72.0\n\n#### :bug: Bug fix\n\n- Fix rewatch lockfile detection on Windows. https://github.com/rescript-lang/rescript-vscode/pull/1160\n- Override default `initialConfiguration` with user specific config. https://github.com/rescript-lang/rescript-vscode/pull/1162\n\n#### :nail_care: Polish\n\n- Resolve symlinks when finding platform binaries. https://github.com/rescript-lang/rescript-vscode/pull/1154\n- Use `window/logMessage` in LSP Server for logging. https://github.com/rescript-lang/rescript-vscode/pull/1162\n\n## 1.70.0\n\n#### :bug: Bug fix\n\n- Remove automatic paste provider that interfered with default paste behavior. Paste as ReScript JSON/JSX commands are now explicit commands only. https://github.com/rescript-lang/rescript-vscode/pull/1150\n- Only paste objects/arrays are JSON.t https://github.com/rescript-lang/rescript-vscode/pull/1148\n\n## 1.68.0\n\n#### :rocket: New Feature\n\n- Paste as JSON.t or ReScript JSX in VSCode. https://github.com/rescript-lang/rescript-vscode/pull/1141\n\n#### :bug: Bug fix\n\n- Pass RESCRIPT_RUNTIME to analysis process. https://github.com/rescript-lang/rescript-vscode/pull/1145\n\n## 1.66.0\n\n#### :bug: Bug fix\n\n- Protect against trying to read non-existant `.compiler.log`. https://github.com/rescript-lang/rescript-vscode/pull/1116\n- Detected quoted paths in bsb arguments on Windows. https://github.com/rescript-lang/rescript-vscode/pull/1120\n\n#### :rocket: New Feature\n\n- Add status bar item tracking compilation state. https://github.com/rescript-lang/rescript-vscode/pull/1119\n\n#### :house: Internal\n\n- Find `@rescript/runtime` for Rewatch compiler-args call. https://github.com/rescript-lang/rescript-vscode/pull/1125\n- Use `prepareRename` command (when a new enough ReScript version is used) to speed up the `rename` command. https://github.com/rescript-lang/rescript-vscode/pull/1124\n- Use `compiler-info.json` to find the `@rescript/runtime` and `bsc.exe` if available. https://github.com/rescript-lang/rescript-vscode/pull/1129\n- Add `Dump LSP Server State` command to client. https://github.com/rescript-lang/rescript-vscode/pull/1130\n- Use `compiler-info.json` to locate other binaries as well. https://github.com/rescript-lang/rescript-vscode/pull/1135\n- Detect Rewatch from workspace root. https://github.com/rescript-lang/rescript-vscode/pull/1135\n\n## 1.64.0\n\n#### :rocket: New Feature\n\n- Find `bsc.exe` and `rescript-code-editor-analysis.exe` from platform-specific packages used by ReScript `v12.0.0-alpha.13`+.https://github.com/rescript-lang/rescript-vscode/pull/1092\n- Find `rewatch.exe` from platform-specific packages used by ReScript `v12.0.0-alpha.13`+. https://github.com/rescript-lang/rescript-vscode/pull/1101\n\n#### :bug: Bug fix\n\n- Fix: Dont create empty diagnostic messages. https://github.com/rescript-lang/rescript-vscode/pull/1114\n\n- Fix: `rescript-editor-analysis.exe semanticTokens` sometimes returned invalid JSON, which affected syntax highlighting. https://github.com/rescript-lang/rescript-vscode/pull/1113\n\n- Fix: hang in `rescript-editor-analysis.exe codeAction` that sometimes prevented ReScript files from being saved in VS Code. https://github.com/rescript-lang/rescript-vscode/pull/1112\n\n- Fix: show existing compiler errors and warnings on file open. https://github.com/rescript-lang/rescript-vscode/pull/1103\n\n- Fix: bug where we incorrectly showed a warning notification about something going wrong with incremental type checking, when in fact the compiler was reporting module-related type errors https://github.com/rescript-lang/rescript-vscode/pull/1090\n\n- Fix: bug where we incorrectly showed a warning notification about something going wrong with incremental type checking, when in fact the compiler was reporting multiple definitions of the same type or module name https://github.com/rescript-lang/rescript-vscode/pull/1086\n\n- Fix: incorrect highlighting of `as` inside labelled arguments like `toast` https://github.com/rescript-lang/rescript-vscode/pull/1085\n\n- Fix: bug where incremental analysis does not work when the project folder contains a dot. https://github.com/rescript-lang/rescript-vscode/pull/1080\n\n- Fix: bug where incremental compilation crashes when rewatch is being run in a specific package vs the root of the monorepo. https://github.com/rescript-lang/rescript-vscode/pull/1082\n\n- Fix: Absence of Node.js does not hinder LSP server. https://github.com/rescript-lang/rescript-vscode/pull/1083\n\n- Fix: JSON from `rescript-code-editor-analysis` was not always escaped properly, which prevented code actions from being available in certain situations https://github.com/rescript-lang/rescript-vscode/pull/1089\n\n#### :house: Internal\n\n- Find binary paths asynchronously. On `>=12.0.0-alpha.13` we do this by dynamically importing the `@rescript/{target}` package in the project root. https://github.com/rescript-lang/rescript-vscode/pull/1093\n- Remove chokidar from LSP server. We expect LSP clients to support [workspace_didChangeWatchedFiles](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#workspace_didChangeWatchedFiles). https://github.com/rescript-lang/rescript-vscode/pull/1096\n\n## 1.62.0\n\n#### :nail_care: Polish\n\n- Remove the built-in formatter since it has been causing more harm than done good. https://github.com/rescript-lang/rescript-vscode/pull/1073\n\n#### :rocket: New Feature\n\n- Port [7292](https://github.com/rescript-lang/rescript/pull/7292): Fix dot completion issue with React primitives. https://github.com/rescript-lang/rescript-vscode/pull/1074\n\n- Add support for \"dot completion everywhere\". In addition to record fields, dots will now complete for object fields, and pipe completions applicable to the type the dot is on. You can also configure where the editor draws extra pipe completions from via the `@editor.completeFrom` attribute. https://github.com/rescript-lang/rescript-vscode/pull/1054\n\n#### :bug: Bug fix\n\n- Fix bug where type args stopped working in some completions when passed through inline records. https://github.com/rescript-lang/rescript-vscode/pull/1064\n- Fix bug where objects weren't actually iterated on, making completions inside of them not work. https://github.com/rescript-lang/rescript-vscode/pull/1065\n- Fix bug where pipe completions would not trigger with generic type arguments. https://github.com/rescript-lang/rescript-vscode/pull/1067\n\n## 1.60.0\n\n#### :rocket: New Feature\n\n- Enable incremental typechecking and project config cache by default. https://github.com/rescript-lang/rescript-vscode/pull/1047\n\n#### :house: Internal\n\n- Auto-format vendored OCaml sources like in compiler repo. https://github.com/rescript-lang/rescript-vscode/pull/1053\n- All OCaml sources in this repo is now considered \"legacy\", as the OCaml parts of the editor integration are now shipped with the compiler instead.\n\n## 1.58.0\n\n#### :bug: Bug fix\n\n- Fix an issue where the extension would still crash in a monorepo with rewatch\n\n#### :rocket: New Feature\n\n- Add hightlighting for the new dict literal syntax `dict{}`. https://github.com/rescript-lang/rescript-vscode/pull/934\n\n## 1.56.0\n\n#### :bug: Bug Fix\n\n- Fix a regression with incremental typechecking in monorepos with rewatch, where the workspace directory was not properly set.\n- When log files are deleted (due to a clean), the editor tooling doesn't crash anymore.\n\n#### :rocket: New Feature\n\n- Support for the `namespace-entry` feature of rewatch, to allow entrypoint modules for namespaced packages.\n\n## 1.54.0\n\n#### :nail_care: Polish\n\n- Reduce latency of language server by caching a few project config related things. https://github.com/rescript-lang/rescript-vscode/pull/1003\n\n#### :bug: Bug Fix\n\n- Fix edge case in switch expr completion. https://github.com/rescript-lang/rescript-vscode/pull/1002\n\n## 1.52.0\n\n#### :rocket: New Feature\n\n- Experimental support for caching the project config to reduce latency. https://github.com/rescript-lang/rescript-vscode/pull/1000\n\n#### :bug: Bug Fix\n\n- Fix highlighting of other languages being affected by rescript-vscode. https://github.com/rescript-lang/rescript-vscode/pull/973\n- Use canonicalized URIs/paths for jump to definition. https://github.com/rescript-lang/rescript-vscode/pull/982\n- Fix JSX prop special case in end of JSX element. https://github.com/rescript-lang/rescript-vscode/pull/984\n- preserve URI format in canonicalizeUri. https://github.com/rescript-lang/rescript-vscode/pull/990\n- Remove workaround for canonicalize function in tests https://github.com/rescript-lang/rescript-vscode/pull/992\n- Get completions for writing new field names in a record body expressions in more cases. https://github.com/rescript-lang/rescript-vscode/pull/997\n\n#### :nail_care: Polish\n\n- Make sure doc strings are always on top in hovers. https://github.com/rescript-lang/rescript-vscode/pull/956\n- Make JSX completion work for `make` functions of type `React.component<props>`, like what you get when using `React.lazy_`. https://github.com/rescript-lang/rescript-vscode/pull/966\n- Hover: print signature above docstrings. https://github.com/rescript-lang/rescript-vscode/pull/969\n- Adjust function template snippet return. https://github.com/rescript-lang/rescript-vscode/pull/985\n- Don't expand `type t` maker functions in patterns. https://github.com/rescript-lang/rescript-vscode/pull/986\n- Use `loc` for identifiers to get more and better completions in certain scenarios with type parameters. https://github.com/rescript-lang/rescript-vscode/pull/993\n- Improve the DX of running the code analyzer some. https://github.com/rescript-lang/rescript-vscode/pull/995\n\n#### :rocket: New Feature\n\n- Add support for the rewatch build system for incremental compilation. https://github.com/rescript-lang/rescript-vscode/pull/965\n- Add support for Linux ARM64\n- Statically linked Linux binaries\n- Emit `%todo` instead of `failwith(\"TODO\")` when we can (ReScript >= v11.1). https://github.com/rescript-lang/rescript-vscode/pull/981\n- Complete `%todo`. https://github.com/rescript-lang/rescript-vscode/pull/981\n- Add code action for extracting a locally defined module into its own file. https://github.com/rescript-lang/rescript-vscode/pull/983\n- Add code action for expanding catch-all patterns. https://github.com/rescript-lang/rescript-vscode/pull/987\n- Add code actions for removing unused code (per item and for an entire file), driven by `reanalyze`. https://github.com/rescript-lang/rescript-vscode/pull/989\n\n#### :house: Internal\n\n- Update parser and compiler support files to the latest version. https://github.com/rescript-lang/rescript-vscode/pull/998\n\n## 1.50.0\n\n#### :rocket: New Feature\n\n- Extend signature help to work on constructor payloads in both expressions and patterns as well. Can be turned off if wanted through settings. https://github.com/rescript-lang/rescript-vscode/pull/947 https://github.com/rescript-lang/rescript-vscode/pull/954\n- Show module docs for file modules. https://github.com/rescript-lang/rescript-vscode/pull/952\n\n#### :nail_care: Polish\n\n- Enhance variant constructor payload completion. https://github.com/rescript-lang/rescript-vscode/pull/946\n- Clean occasional dots from \"insert missing fields\" code action. https://github.com/rescript-lang/rescript-vscode/pull/948\n- Pick up code actions in incremental compilation. https://github.com/rescript-lang/rescript-vscode/pull/948\n- Various improvements to the signature help functionality. https://github.com/rescript-lang/rescript-vscode/pull/950\n- Clean up completion item \"details\" and \"documentation\". https://github.com/rescript-lang/rescript-vscode/pull/952\n\n## 1.48.0\n\n#### :bug: Bug Fix\n\n- Stability fixes for the experimental incremental compilation mode. https://github.com/rescript-lang/rescript-vscode/pull/945\n\n## 1.46.0\n\n#### :bug: Bug Fix\n\n- Fix null checks for editor config, so things don't blow up. https://github.com/rescript-lang/rescript-vscode/pull/944\n\n## 1.44.0\n\n#### :rocket: New Feature\n\n- Experimental support for type checking without saving the file :tada:. https://github.com/rescript-lang/rescript-vscode/pull/939\n\n## 1.42.0\n\n#### :bug: Bug Fix\n\n- Fix issue with unlabelled arg code swallowing completions. https://github.com/rescript-lang/rescript-vscode/pull/937\n- Fix issue where completion inside of switch expression would not work in some cases. https://github.com/rescript-lang/rescript-vscode/pull/936\n- Fix bug that made empty prop expressions in JSX not complete if in the middle of a JSX element. https://github.com/rescript-lang/rescript-vscode/pull/935\n\n## 1.40.0\n\n#### :nail_care: Polish\n\n- Enhance decorator completion. https://github.com/rescript-lang/rescript-vscode/pull/908\n- Completion for import attributes in `@module`. https://github.com/rescript-lang/rescript-vscode/pull/913\n- Relax filter for what local files that come up in from and regular string completion in `@module`. https://github.com/rescript-lang/rescript-vscode/pull/918\n- Make from completion trigger for expr hole so we get a nice experience when completing {from: <com>} in `@module`. https://github.com/rescript-lang/rescript-vscode/pull/918\n- Latest parser for newest syntax features. https://github.com/rescript-lang/rescript-vscode/pull/917\n- Handle completion for DOM/element attributes and attribute values properly when using a generic JSX transform. https://github.com/rescript-lang/rescript-vscode/pull/919\n- Highlight tagged template literal functions as functions. https://github.com/rescript-lang/rescript-vscode/pull/920\n- Complete for `type t` values when encountering a `type t` in relevant scenarios. https://github.com/rescript-lang/rescript-vscode/pull/924\n- Highlight escaped sequences as a whole and not only the first character. https://github.com/rescript-lang/rescript-vscode/pull/929\n- Start highlighting escaped sequences in template literals. https://github.com/rescript-lang/rescript-vscode/pull/929\n\n## 1.38.0\n\n#### :nail_care: Polish\n\n- Prefer Core's `RegExp` when Core is open and completing for regexp functions. https://github.com/rescript-lang/rescript-vscode/pull/903\n- Add `%re(\"\")` to the completions list when completing in a position where a regexp value is expected. https://github.com/rescript-lang/rescript-vscode/pull/903\n\n#### :bug: Bug Fix\n\n- Fix issue with completion in nested patterns that would make it not possible to complete for new record fields via trailing commas in certain situations. https://github.com/rescript-lang/rescript-vscode/pull/906\n\n## 1.36.0\n\n#### :bug: Bug Fix\n\n- Fix issue with ambigious wraps in JSX prop values (`<SomeComp someProp={<com>}`) - need to figure out if we're completing for a record body or if `{}` are just wraps for the type of `someProp`. In the case of ambiguity, completions for both scenarios are provided. https://github.com/rescript-lang/rescript-vscode/pull/894\n- Many bugfixes around nested pattern and expression completion. https://github.com/rescript-lang/rescript-vscode/pull/892\n- Fix (very annoying) issue where empty pipe completion wouldn't work inside of a parenthesised function call: `Console.log(someArray->)` completing at the pipe. https://github.com/rescript-lang/rescript-vscode/pull/895\n\n#### :nail_care: Polish\n\n- More cases of not emitting `_` when completing in expressions. https://github.com/rescript-lang/rescript-vscode/pull/890\n\n#### :house: Internal\n\n- Move `rescript-tools` to OCaml code and make `analysis` an library. https://github.com/rescript-lang/rescript-vscode/pull/855\n\n## 1.34.0\n\n#### :rocket: New Feature\n\n- Complete domProps for lowercase JSX components from `ReactDOM.domProps` if possible. https://github.com/rescript-lang/rescript-vscode/pull/883\n- Complete for maker-style functions (functions returning type `t` of a module) when encountering a `type t` in relevant scenarios. https://github.com/rescript-lang/rescript-vscode/pull/884\n- Expand type aliases in hovers. https://github.com/rescript-lang/rescript-vscode/pull/881\n\n#### :nail_care: Polish\n\n- Better error recovery when analysis fails. https://github.com/rescript-lang/rescript-vscode/pull/880\n- Do not emit `_` when completing in expressions. https://github.com/rescript-lang/rescript-vscode/pull/885\n- Include fields when completing a braced expr that's an ID, where it the path likely starts with a module. https://github.com/rescript-lang/rescript-vscode/pull/882\n\n## 1.32.0\n\n#### :bug: Bug Fix\n\n- Fix so that you don't need a leading `#` to complete for polyvariant constructors. https://github.com/rescript-lang/rescript-vscode/pull/874\n- Print keyword polyvariant constructors with quotes when doing completions. https://github.com/rescript-lang/rescript-vscode/pull/877\n\n## 1.30.0\n\n#### :rocket: New Feature\n\n- If interface file exists, ask if it should be overwritten. https://github.com/rescript-lang/rescript-vscode/pull/865\n\n#### :bug: Bug Fix\n\n- Proper default for `\"uncurried\"` in V11 projects. https://github.com/rescript-lang/rescript-vscode/pull/867\n- Treat `result` type as a proper built in type. https://github.com/rescript-lang/rescript-vscode/pull/860\n- Fix infinite loop when resolving inferred completions when several values in scope has the same name. https://github.com/rescript-lang/rescript-vscode/pull/869\n- Fix crash when trying to print recursive polymorphic variants without a concrete definition. https://github.com/rescript-lang/rescript-vscode/pull/851\n- Fix `rescript-language-server --version` command. https://github.com/rescript-lang/rescript-vscode/pull/873\n- Print exotic polyvariant constructor names with quotes when doing completion. https://github.com/rescript-lang/rescript-vscode/pull/870\n\n#### :nail_care: Polish\n\n- Change end position of cursor when completing `Some(<fieldName>)` in patterns. https://github.com/rescript-lang/rescript-vscode/pull/857\n\n#### :bug: Bug Fix\n\n- Add support for detecting dead fields inside inline records. https://github.com/rescript-lang/rescript-vscode/pull/858\n\n## 1.28.0\n\n#### :bug: Bug Fix\n\n- Fix issue introduced in recent PR for module completion. https://github.com/rescript-lang/rescript-vscode/pull/856\n\n## 1.26.0\n\n#### :bug: Bug Fix\n\n- More robust handling of namespaces in pipe completions. https://github.com/rescript-lang/rescript-vscode/pull/850\n\n## 1.24.0\n\n#### :bug: Bug Fix\n\n- Clean up name of namespaced module when hovering. https://github.com/rescript-lang/rescript-vscode/pull/845\n- Don't complete illegal file module names. https://github.com/rescript-lang/rescript-vscode/pull/844\n- Fix issue `open` on submodules exposed via `-open` in bsconfig.json/rescript.json, that would cause the content of those `open` modules to not actually appear in autocomplete. https://github.com/rescript-lang/rescript-vscode/pull/842\n- Account for namespace when filtering pipe completion items. https://github.com/rescript-lang/rescript-vscode/pull/843\n\n## 1.22.0\n\n#### :nail_care: Polish\n\n- Resolve module aliases in hover. https://github.com/rescript-lang/rescript-vscode/pull/820\n\n## 1.20.0\n\n#### :rocket: New Feature\n\n- Add support for syntax highlighting in `%raw` and `%ffi` extension points. https://github.com/rescript-lang/rescript-vscode/pull/774\n- Add completion to top level decorators. https://github.com/rescript-lang/rescript-vscode/pull/799\n- Add code action for wrapping patterns where option is expected with `Some`. https://github.com/rescript-lang/rescript-vscode/pull/806\n- Better completion from identifiers with inferred types. https://github.com/rescript-lang/rescript-vscode/pull/808\n- Make suggested template functions async when the target function returns a promise. https://github.com/rescript-lang/rescript-vscode/pull/816\n- Fix code action for inserting undefined record fields in ReScript v11. https://github.com/rescript-lang/rescript-vscode/pull/817\n\n#### :nail_care: Polish\n\n- Revamp \"Insert missing cases\" code action to make it apply in more cases and be much more robust. https://github.com/rescript-lang/rescript-vscode/pull/804\n- Make the completion engine understand async/await. https://github.com/rescript-lang/rescript-vscode/pull/813\n- Comments are now automatically closed and indented. https://github.com/rescript-lang/rescript-vscode/pull/815\n\n#### :bug: Bug Fix\n\n- Fix invalid range for `definition`. https://github.com/rescript-lang/rescript-vscode/pull/781\n- Don't emit object keys in uppercase as namespace. https://github.com/rescript-lang/rescript-vscode/pull/798\n- Fix accidental output of extra `|` when producing exhaustive switch code for polyvariants. https://github.com/rescript-lang/rescript-vscode/pull/805\n- Fix JS syntax highlighting in single-line FFI extension points. https://github.com/rescript-lang/rescript-vscode/pull/807\n- Fix signature help in uncurried mode. https://github.com/rescript-lang/rescript-vscode/pull/809\n- Fix various issues in uncurried mode. https://github.com/rescript-lang/rescript-vscode/pull/810\n- Fixes a bug in pattern completion where for example `result` wouldn't complete, due to type variables getting lost/not being instantiated. https://github.com/rescript-lang/rescript-vscode/pull/814\n- Fix bug where pipes would not be considered in certain cases when completing for single unlabelled function arguments. https://github.com/rescript-lang/rescript-vscode/pull/818\n\n## 1.18.0\n\n#### :rocket: New Feature\n\n- Docstring template Code Action. https://github.com/rescript-lang/rescript-vscode/pull/764\n- Improve unlabelled argument names in completion function templates. https://github.com/rescript-lang/rescript-vscode/pull/754\n- Add `Some(fieldName)` case when completing in a pattern with an option on a record field. https://github.com/rescript-lang/rescript-vscode/pull/766\n\n#### :bug: Bug Fix\n\n- Fix URL scheme for Code Action. https://github.com/rescript-lang/rescript-vscode/pull/748\n- Support uncurried functions in various places where we look up and use function types. https://github.com/rescript-lang/rescript-vscode/pull/771\n\n## 1.16.0\n\n#### :rocket: New Feature\n\n- Greatly extend completion abilities for unsaved code. WARNING: Might be a bit unstable initially. Report any issues you see. https://github.com/rescript-lang/rescript-vscode/pull/712\n- Provide hovers for more unsaved code via the new completion features. https://github.com/rescript-lang/rescript-vscode/pull/749\n\n## 1.14.0\n\n#### :rocket: New Feature\n\n- Enable completion for `Js.Exn.Error(error)` when pattern matching on `exn`. This is to make the `Js.Exn.Error` API more discoverable. https://github.com/rescript-lang/rescript-vscode/pull/728\n\n#### :nail_care: Polish\n\n- Signature Help is now considered stable, and enabled for all users. Can still be turned off in settings.\n- Show whether record fields and variant constructors are deprecated when completing. https://github.com/rescript-lang/rescript-vscode/pull/731\n- Prettify how optional record fields are printed in the completion item detail. https://github.com/rescript-lang/rescript-vscode/pull/737\n\n#### :bug: Bug Fix\n\n- Fix crashes in document symbol requests when broken syntax exists. https://github.com/rescript-lang/rescript-vscode/pull/736\n\n## 1.12.0\n\n#### :rocket: New Feature\n\n- Add autocomplete for function argument values (booleans, variants and options. More values coming), both labelled and unlabelled. https://github.com/rescript-lang/rescript-vscode/pull/665\n- Add autocomplete for JSX prop values. https://github.com/rescript-lang/rescript-vscode/pull/667\n- Add snippet support in completion items. https://github.com/rescript-lang/rescript-vscode/pull/668\n- Add support from completing polyvariants as values. https://github.com/rescript-lang/rescript-vscode/pull/669\n- Add support for completion in patterns. https://github.com/rescript-lang/rescript-vscode/pull/670\n- Add support for pattern completion of unsaved tuples. https://github.com/rescript-lang/rescript-vscode/pull/679\n- Add support for completion in typed expressions. https://github.com/rescript-lang/rescript-vscode/pull/682\n- Complete for `React.element` creator functions (`React.string` etc) when in JSX context. https://github.com/rescript-lang/rescript-vscode/pull/681\n- Handle optional record fields in expression/pattern completion. https://github.com/rescript-lang/rescript-vscode/pull/691\n- Expand options in completion to make working with options a bit more ergonomic. https://github.com/rescript-lang/rescript-vscode/pull/690\n- Let `_` trigger completion in patterns. https://github.com/rescript-lang/rescript-vscode/pull/692\n- Support inline records in completion. https://github.com/rescript-lang/rescript-vscode/pull/695\n- Add way to autocomplete an exhaustive switch statement for identifiers. Example: an identifier that's a variant can have a switch autoinserted matching all variant cases. https://github.com/rescript-lang/rescript-vscode/pull/699\n- Support typed expression completion for lowercase (builtin) JSX tags. https://github.com/rescript-lang/rescript-vscode/pull/702\n- Support typed expression completion driven by type annotations. https://github.com/rescript-lang/rescript-vscode/pull/711\n- Completion for lowercase JSX elements, treating them like HTML elements. https://github.com/rescript-lang/rescript-vscode/pull/719\n\n#### :nail_care: Polish\n\n- Prefer opened `Belt` modules in autocomplete when `-open Belt` is detected in `bsconfig`. https://github.com/rescript-lang/rescript-vscode/pull/673\n- Improve precision in signature help. You now do not need to type anything into the argument for it to highlight. https://github.com/rescript-lang/rescript-vscode/pull/675\n- Remove redundant function name in signature help, to clean up what's shown to the user some. https://github.com/rescript-lang/rescript-vscode/pull/678\n- Show docstrings in hover for record fields and variant constructors. https://github.com/rescript-lang/rescript-vscode/pull/694\n- The necessary leading `?` is now automatically inserted for optional fields when destructuring records. https://github.com/rescript-lang/rescript-vscode/pull/715\n\n#### :bug: Bug Fix\n\n- Highlight `catch` like a keyword https://github.com/rescript-lang/rescript-vscode/pull/677\n- Make signature help work in calls nested inside of other calls. https://github.com/rescript-lang/rescript-vscode/pull/687\n- Fix pipe completion to work on aliased types. https://github.com/rescript-lang/rescript-vscode/pull/700\n- Fix issue with not finding type for function arguments. https://github.com/rescript-lang/rescript-vscode/pull/706\n- Fix incorrect syntax in hover help for module. https://github.com/rescript-lang/rescript-vscode/pull/709\n\n## v1.10.0\n\n#### :nail_care: Polish\n\n- Remove spacing between type definition in clients that do not support markdown links. https://github.com/rescript-lang/rescript-vscode/pull/619\n- Rename custom LSP methods names. https://github.com/rescript-lang/rescript-vscode/pull/611\n- Better performance for Inlay Hints and Codelens. https://github.com/rescript-lang/rescript-vscode/pull/634\n- Accept both `@ns.doc` and the new `@res.doc` for the internal representation of doc comments. And both `@ns.optional` and `@res.optional` for the optional fields. https://github.com/rescript-lang/rescript-vscode/pull/642\n- Make pipe completion work more reliably after function calls. https://github.com/rescript-lang/rescript-vscode/pull/656\n- Make pipe completion work in pipe chains, not just on the first pipe. https://github.com/rescript-lang/rescript-vscode/pull/656\n- Make pipe completion work reliably when the path resolution needs to traverse submodules https://github.com/rescript-lang/rescript-vscode/pull/663\n- Make pipe completion work (after saving/compiling) when the return type of a function call is unknown until compilation https://github.com/rescript-lang/rescript-vscode/pull/662\n- Add pipe completion for `int` and `float` constants https://github.com/rescript-lang/rescript-vscode/pull/664\n\n#### :bug: Bug Fix\n\n- Fix issue where `-open Some.Path` in `\"bsc-flags\"` would sometimes be treated differently from `open Some.Path` locally in a file https://github.com/rescript-lang/rescript-vscode/pull/616\n\n- Fix issue where doc comment is not shown on hover in case of shadowed identifier (in particular for JSX V4 components which shadow `make`) https://github.com/rescript-lang/rescript-vscode/issues/621\n\n- Adapt command to create interface files to latest JSX V4 (no key prop, possibly empty record) https://github.com/rescript-lang/rescript-vscode/issues/617\n\n- Fix issue where pipes were not taken into account in the signature help, resulting in the highlighted argument in signature help always being off by one for unlabelled arguments in piped expressions https://github.com/rescript-lang/rescript-vscode/issues/626\n\n- Fix incorrect type hint for module type. https://github.com/rescript-lang/rescript-vscode/pull/626\n\n- Fix file location in Document Symbols response. https://github.com/rescript-lang/rescript-vscode/issues/629\n\n- Fix issue where create interface file would not work with certain JSX V4 components https://github.com/rescript-lang/rescript-vscode/issues/617\n\n- Fix issue with completing `foo. x` where `x` should not be part of the completion https://github.com/rescript-lang/rescript-vscode/pull/644\n\n- Fix issue where code analysis would not track types in inner modules across implementations and interfaces https://github.com/rescript-association/reanalyze/issues/186\n\n- Fix issue with references to elements defined in an interface file https://github.com/rescript-lang/rescript-vscode/pull/646\n\n- Fix issue with references from implementation files which also happen to have interface files https://github.com/rescript-lang/rescript-vscode/issues/645\n\n- Fix issue where jump to definition would go to the wrong place when there are aliased identifiers in submodules https://github.com/rescript-lang/rescript-vscode/pull/653\n\n- Fix issue where document symbols were not shown nested https://github.com/rescript-lang/rescript-vscode/pull/655\n\n## v1.8.2\n\n#### :rocket: New Feature\n\n- Add configuration parameter `\"transitive\"` under `\"reanalyze\"` is `bsconfig.json` and make reportst non-transitive by default. If set to `false`, the analysis does not report transitively dead items. So removing the reported item individually can be done in isolation. This is a more fine-grained process for guiding the user to remove dead code one item at a time. https://github.com/rescript-lang/rescript-vscode/pull/601 https://github.com/rescript-lang/rescript-vscode/pull/610\n  This feature comes from a conversation with @jfmengels on how https://github.com/jfmengels/elm-review is designed.\n\n#### :bug: Bug Fix\n\n- Fix issue where module paths in `-open` in `bsc-flags` such as \"-open ReScriptJs.Js\" were not recognized https://github.com/rescript-lang/rescript-vscode/issues/607\n\n## v1.8.1\n\n#### :rocket: New Feature\n\n- Add support for prop completion for JSX V4 https://github.com/rescript-lang/rescript-vscode/pull/579\n- Add support for create interface file for JSX V4 https://github.com/rescript-lang/rescript-vscode/pull/580\n- Expand one level of type definition on hover. Dig into record/variant body. https://github.com/rescript-lang/rescript-vscode/pull/584\n- Add clickable links to type definitions in hovers. https://github.com/rescript-lang/rescript-vscode/pull/585\n- Add experimental signature help for function calls. https://github.com/rescript-lang/rescript-vscode/pull/547\n\n#### :bug: Bug Fix\n\n- Fix printing of record types with optional fields https://github.com/rescript-lang/rescript-vscode/pull/584\n\n## v1.6.0\n\n#### :rocket: New Feature\n\n- Inlay Hints (experimental). `rescript.settings.inlayHints.enable: true`. Turned off by default. https://github.com/rescript-lang/rescript-vscode/pull/453\n- Code Lenses for functions (experimental). `rescript.settings.codeLens: true`. Turned off by default. https://github.com/rescript-lang/rescript-vscode/pull/513\n- Markdown code blocks tagged as `rescript` now get basic syntax highlighting. https://github.com/rescript-lang/rescript-vscode/pull/97\n- Hover support for doc comments on v10 compiler `/** this is a doc comment */`\n\n#### :bug: Bug Fix\n\n- Fix issue where debug output would end up in the JSON file produced by Reanalyze https://github.com/rescript-lang/rescript-vscode/pull/575\n- Fix issue where autocomplete would not perform type instantiation https://github.com/rescript-lang/rescript-vscode/pull/561\n- Fix issue where hovering over a field in record construction would show the type without instantiating its type arguments https://github.com/rescript-lang/rescript-vscode/pull/560\n- Fix Incorrect semantic highlighting of `external` declarations https://github.com/rescript-lang/rescript-vscode/pull/517\n- Fix issue where doc comment with nested comments inside is not shown properly on hover https://github.com/rescript-lang/rescript-vscode/pull/526\n- Fix server crashes when open file is removed from disk with inlayHints enabled https://github.com/rescript-lang/rescript-vscode/issues/538\n- Fix inlay hint for destructured record/array https://github.com/rescript-lang/rescript-vscode/issues/536\n\n## v1.4.2\n\n#### :bug: Bug Fix\n\n- Fix finding the ReScript bin for even more kinds of setups.\n- Document the process of finding the ReScript bin in README.\n\n## v1.4.1\n\n#### :bug: Bug Fix\n\n- Fix formatting not preferring the locally installed formatter (if available), which made formatting potentially different between formatting via `rescript format` and the extension.\n- Fix finding the ReScript bin in monorepos\n\n## v1.4.0\n\n#### :rocket: New Feature\n\n- Add command: ReScript: Restart Language Server\n- Extend integration with reanalyze for code analysis. Support both `dce` and `exception` analysis which can be configured to e.g. both run by adding `{\"reanalyze\": {\"analysis\": [\"dce\", \"exception\"]} }` in `bsconfig.json`.\n- Add configuration option for suppressing the \"Do you want to start a build?\" prompt.\n- Add configuration option for autostarting the Code Analyzer.\n- Report syntax errors as you type.\n\n#### :bug: Bug Fix\n\n- Fix issue with autocompletion for constructors in switch statements.\n- Fix issue with autocompletion inside template expressions.\n- Fix handling of local opens.\n- Fix extension crash when renaming a file.\n- Fix issue where the server would crash on genType's errors.\n- Fix issue where the server would crash if the project contains an OCaml file with a syntax error.\n- Fix issue where `@inline` was not suported by the command to generate an interface file.\n\n#### :nail_care: Polish\n\n- Add hover information with links to documentation for decorators.\n- Sync with latest parser/printer.\n\n#### :house: Internal\n\n- Support paths to rescript executables in arm64 architectures.\n\n#### :boom: Breaking Change\n\n- Drop support for `bs-patform`. Only `rescript` supported.\n\n## 1.3.0\n\n- Fix issue where using paths of the form `./something` would show multiple copies of the same file in vscode.\n- When hovering on a field access, show the instantiated type of the field.\n- Support autocomplete for objects from another module `M.x[...`.\n- Fix command for creating interface files when the project uses namespaces.\n- Added command `ReScript: Open the compiled JS file for this implementation file.`.\n- Use semantic syntax highlighting (https://github.com/rescript-lang/rescript-vscode/pull/367).\n- Report \"Fatal error\" when it happens in the compiler log (e.g. a make function with type annotation) and don't crash the extension.\n- Fix issue in functions the form \"~foo as name\" where the location would only cover \"ame\".\n- Extend the command to create an interface file, to support components and ReScript decorators used in bindings.\n- Enable formatting files without needing the file to be in an actual ReScript project.\n- New feature: Show Outline which was previously disabled.\n- Add command to quickly switch between implementation and interface file.\n- Support hover on JSX props and labelled arguments.\n\n## 1.2.1\n\n- Fix issue with highlighting of interpolation strings (those with backticks) introduced in release 1.2.0.\n- Fix crash when the project contains OCaml files that have warnings.\n- Fix crash on hover when a dependency contains a type with functor application. This is not expressible in ReScript syntax, but can appear in a dependent OCaml package and be pulled in for processing by the extension.\n- Remove obsolete `@bs` snippets\n\n## 1.2.0\n\nFeatures:\n\n- Add autocompletion for object access of the form `foo[\"x\"]` and `foo[\"x\"][\"y\"][\"z\"]`.\n- Support autocomplete of records for variables defined in other files.\n- Improve autocomplete when several values have the same name, with a heuristic to approximate the correct scope.\n- Add a \"Dead Code Analysis\" mode that will highlight globally dead values, redundant optional arguments, dead modules, dead types (records and variants) ([#334](https://github.com/rescript-lang/rescript-vscode/pull/334))\n\nFixes:\n\n- Fix issue in JSX autocomplete when the component is declared external.\n- Fix jump-to-definition for uncurried calls.\n- Fix issue where values for autocomplete were pulled from implementations instead of interfaces.\n- Fix issue with autocomplete then punned props are used in JSX. E.g. `<M foo ...>`.\n- Fix issue with JSX autocompletion not working after `foo=#variant`.\n- Fix issue in JSX autocompletion where the `key` label would always appear.\n- Fix issue in record field autocomplete not working with type aliases.\n- Fix issue where autocomplete for local values would not work in the presence of `@react.component` annotations.\n- Fix issue where the server would crash on large output produced by the binary command.\n- Fix issue where the server would crash when a file has a self cycle.\n\n## 1.1.3\n\nFeatures:\n\n- Find references to files as modules.\n- Autocomplete: skip inline comments to decide if a labeled argument was already supplied.\n- Rename: support file rename when renaming a module name.\n- Rename: use renameProvider to give a warning when it's not a symbol that can be renamed.\n- Jump to definition: support jumping to type definition.\n- Jump to definition: jump to the `res` file when both `res` and `resi` are present.\n- Restore creation of interface files (fully supported from compiler 9.1.3 onwards).\n\n## 1.1.2\n\nFeatures:\n\n- Rename has landed! Works across-files!\n- More autocomplete improvements.\n- Wider Linux support.\n\n## 1.1.1\n\nThis update contains _lots_ of autocomplete, hover and jump-to-definition improvements. We'll list only a few below.\n\nFixes:\n\n- Jump-to-definition on some Windows paths.\n- `->` autocomplete overruled `.`.\n- Hover on components in interface files.\n\nFeatures:\n\n- Show References! Works cross-files too.\n- Hover now supports markdown docs.\n- Hover on labels in component functions with compiler version 9.1, and labels with type annotation.\n- Don't show file path on hover and autocomplete (cleaner).\n- Autocomplete for props in JSX components.\n- `->` autocomplete for built-in list, array, string, option types. And for string and array literals.\n- Slimmer download.\n\nBreakages:\n\n- Very old linux versions are no longer supported.\n- Hover: no more odoc format support (but it'll still display as text).\n\n## 1.0.8\n\nFixes:\n\n- Diagnostics display for long lines.\n\nFeatures:\n\n- Full support for the newest `rescript` npm package!\n- Highlight type parameters.\n\n## 1.0.7\n\nFixes:\n\n- Highlighting for some decorators and keywords.\n- Various hover & autocomplete opportunities.\n\nFeatures:\n\n- Autocomplete for `->` pipe!\n- Autocomplete for decorators such as `@module` and `@val` and `@deprecated`.\n- Autocomplete for labels `func(~...)`.\n- Support for the upcoming `rescript` npm package.\n\n## 1.0.6\n\nFixes:\n\n- Diagnostics crashing when a file's range isn't found (advice: use fewer ppxes that cause these bugs!). See [#77](https://github.com/rescript-lang/rescript-vscode/issues/77).\n- Weird behaviors when project path contains white space.\n- Proper audit of the windows bugs. Windows is now officially first-class!\n\nSyntax colors:\n\n- Highlight operators for default VSCode dark+ theme. This means slightly less diverse highlight for the other themes that previously already highlighted operators.\n- Worked with [One Dark Pro](https://marketplace.visualstudio.com/items?itemName=zhuangtongfa.Material-theme) and [Mariana Pro](https://marketplace.visualstudio.com/items?itemName=rickynormandeau.mariana-pro). We now officially recommend these 2 themes, in addition to the existing recommendations in README.\n- Highlight deprecated elements using the deprecation scopes.\n- JSX bracket highlight fix (still no color; before, some parts were erroneously highlighted).\n\n## 1.0.5\n\nFeatures:\n\n- Custom folding. See README.\n- Support for doc strings when hovering on modules.\n- Jump to type definition for types defined in inner modules.\n\nFixes:\n\n- Properly highlight nested comments.\n- Windows diagnostics!\n- Removed a potential infinite loop issue in autocomplete.\n- Don't autocomplete `open MyModule` inside line comments.\n- Don't print parentheses as in `A()` for 0-ary variants.\n\n## 1.0.4\n\n- Some diagnostics watcher staleness fix.\n- Various type hover fixes.\n- Monorepo/yarn workspace support.\n\n## 1.0.2\n\n- All the usual features (type hint, autocomplete) now work on `bsconfig.json` too!\n- Snippets, to ease a few syntaxes.\n- Improved highlighting for polymorphic variants. Don't abuse them please.\n\n## 1.0.1\n\n- Fix temp file creation logic.\n\n## 1.0.0\n\nOfficial first release!\n"
  },
  {
    "path": "CONTRIBUTING.md",
    "content": "# Contributing\n\nThanks for your interest. Below is an informal spec of how the plugin's server communicates with the actual compiler. If you're a ReScript editor plugin implementor, you should probably read this to understand the various important nuances and copy it.\n\n## Repo Structure\n\n```\n.\n├── client // Language Client. VSCode UI\n│   └── src\n│       └── extension.ts // Language Client entry point\n├── analysis // Native binary powering hover, autocomplete, etc.\n│   ├── src // Analysis library\n│   ├── bin // Analysis binary\n├── package.json // The extension manifest\n├── server // Language Server. Usable standalone\n│   ├── src\n│   │   ├── server.ts // Language Server Module\n│   │   ├── cli.ts // LSP CLI\n│   └── analysis_binaries // Prod-time platform-specific analysis binaries\n│       ├── darwin\n│       ├── linux\n│       └── win32\n└── tools // ReScript Tools\n    ├── bin // OCaml Binary\n    ├── src // ReScript Tools library\n    └── binaries // Prod-time platform-specific binaries\n        ├── darwin\n        ├── linux\n        └── win32\n```\n\n## Install Dependencies\n\n- Run `npm install` at the root. This will also install the npm modules for both the `client` and `server` folders.\n\n## OCaml Code\n\nThis is needed for the `analysis` and `tools` folder, which is native code.\n\nAt the root:\n\n```sh\n# If you haven't created the switch, do it. OPAM(https://opam.ocaml.org)\nopam switch 5.2.0 # can also create local switch with opam switch create . 5.2.0\n\n# Install dev dependencies from OPAM\nopam install . --deps-only\n\n# For IDE support, install the OCaml language server and OCaml Formatter\nopam install ocaml-lsp-server ocamlformat\n```\n\nYou need `dune` to build the OCaml source code, if it is not available in your shell try running `eval $(opam env)`.\nIf `dune` is present, run `make build` to build the OCaml projects and copy the binaries to the root.\n\n## Build & Run\n\n- `npm run compile`. You don't need this if you're developing this repo in VSCode. The compilation happens automatically in the background.\n- `make`.\n\n## Test\n\n- Open VS Code to the project root.\n- Switch to the Debug viewlet (command palette -> View: Show Run and Debug).\n- Select `Client + Server` from the drop down, launch it (green arrow):\n\n  <img width=\"235\" alt=\"image\" src=\"https://user-images.githubusercontent.com/1909539/97448097-7d186a80-18ed-11eb-82d6-d55b70f54811.png\">\n\n  If you're getting some Promise-related error alert: this is a VSCode and/or template bug.\n  - If that newly launched VSCode test instance has no project in its explorer view, drag in a random project.\n  - Kill all your node processes.\n  - Redo the launch.\n\n- In the [Extension Development Host] instance of VSCode that just opened, open a `.res` file.\n- Try various features.\n- When you make a change, Go to the same Debug viewlet's Call Stack panel and restart the client and the server:\n\n  <img width=\"359\" alt=\"image\" src=\"https://user-images.githubusercontent.com/1909539/97448639-19db0800-18ee-11eb-875a-d17cd1b141d1.png\">\n\n- For the native analysis and tools binary tests: `make test`.\n\n## Change the Grammar\n\nThe _real_ source of truth for our grammar is at https://github.com/rescript-lang/rescript-sublime. We port that `sublime-syntax` grammar over to this weaker TextMate language grammar for VSCode and the rest. There are some subtle differences between the 2 grammars; currently we manually sync between them.\n\n- Modify `grammars/rescript.tmLanguage.json`.\n\nFor more grammar inspirations, check:\n\n- [TypeScript's grammar](https://github.com/microsoft/TypeScript-TmLanguage/blob/a771bc4e79deeae81a01d988a273e300290d0072/TypeScript.YAML-tmLanguage)\n- [Writing a TextMate Grammar: Some Lessons Learned](https://www.apeth.com/nonblog/stories/textmatebundle.html)\n\n## Snippets\n\nSnippets are also synced from https://github.com/rescript-lang/rescript-sublime. VSCode snippets docs [here](https://code.visualstudio.com/api/references/contribution-points#contributes.snippets).\n\n## Binary Invocation\n\nWe call a few binaries and it's tricky to call them properly cross-platform. Here are some tips:\n\n- We try to call the binaries synchronously to avoid races.\n- Make sure you cater to calling a binary and passing e.g. a path with whitespace in it.\n- `execFile` and its sync version do the above for free.\n- `execFile` does not work on windows for batch scripts, which is what Node scripts are wrapped in. Use `exec`. See more [here](https://github.com/rescript-lang/rescript-vscode/blob/8fcc1ab428b8225c97d2c9a5b8e3a782c70d9439/server/src/utils.ts#L110).\n- Thankfully, many of our binaries are native, so we can keep using `execFile` most of the time.\n\n## General Coding Guidance\n\n- `server/` is a standalone LSP server. Keep it light, don't add deps unless absolutely necessarily, and don't accidentally use a runtime dep from the top level `package.json`.\n- This codebase stayed alive by not trying to babysit long-living processes. Be fast, call a binary and shut down.\n\n## Rough Description Of How The Plugin Works\n\n### Text Changes\n\nThe flow below shows how the LSP server reacts to incremental text changes and produces diagnostics:\n\n```mermaid\nflowchart TD\n  A[Your ReScript file in your editor]\n  B[LSP Client]\n  C[LSP Server]\n  D[bsc]\n\n  A -->|Type a character| B\n  B -->|textDocument/didChange| C\n\n  subgraph LSP_Server_Internal_Flow[\"LSP Server\"]\n    C1[triggerIncrementalCompilationOfFile]\n    C2[compileContents]\n    C3[figureOutBscArgs]\n    C4[parse .compiler.log]\n  end\n\n  C --> C1\n  C1 --> C2 --> C3\n  C3 -->|invoke| D\n  D -->|writes| C4\n  C4 -->|textDocument/publishDiagnostics| B\n```\n\n### Completion\n\nThe flow below shows how the LSP server handles completion requests by delegating to the native analysis binary:\n\n```mermaid\nflowchart TD\n  A[Your ReScript file in your editor]\n  B[LSP Client]\n  C[LSP Server]\n  D[rescript-editor-analysis.exe]\n\n  A -->|Trigger completion| B\n  B -->|textDocument/completion| C\n\n  subgraph LSP_Server_Internal_Flow[\"LSP Server\"]\n    C1[shell out to rescript-editor-analysis.exe]\n    C2[build completion response]\n  end\n\n  C --> C1\n  C1 -->|exec| D\n  D --> C2\n  C2 -->|textDocument/completion response| B\n```\n\n### Editor Diagnostics\n\nThey should be synced in from `lib/bs/.compiler.log` build. Don't take them from other places.\n\n### `.compiler.log`\n\nThe build output is streamed into `lib/bs/.compiler.log`. Here are its various states, numbered here:\n\n1. Doesn't exist: artifacts not built yet, or cleaned away.\n2. Present, without a final line `#Done`: still building.\n3. Present, with the final line `#Done`: finished building.\n\nBarring FS errors, there should be no other state to `.compiler.log`. Among others, this means the file is never present but empty.\n\nThe compiler log contains exactly the same things you'd see in a regular terminal `bsb` guild, except:\n\n- The errors are indented 2 spaces\n- The extra `#Start` and `#Done` (which aren't indented).\n\nA parser for the diagnostics is [here](https://github.com/rescript-lang/rescript-vscode/blob/0dbf2eb9cdb0bd6d95be1aee88b73830feecb5cc/server/src/utils.ts#L129-L329).\n\n### State 1\n\nArtifacts cleaning through `bsb -clean` removes `.compiler.log` and turns into state 1. If that's the case, remove the diagnostics in the editor too. One could argue that they should be kept, but that's misleading UX-wise, and harder to implement correctly.\n\n### Streaming Update of Diagnostics\n\nAfter saving a file and running the build, the results stream into the log file. Unfortunately, UX-wise, in the editor, this might look like the diagnostics are suddenly gone then coming back in file by file. This looks bad. To remediate:\n\n- If it's in state 2, update those particular files' diagnostics but don't wipe the files' diagnostics yet.\n- If in state 3, finish by clean up the rest of the old diagnostics. This means there's a bit of bookeeping needed here. Make sure you get it right. It's possible for a build to be interrupted (and therefore state 4 never reached) and restarted.\n\nEven this fix isn't great. Ideally, the editor's diagnostics can be greyed out while we're updating them...\n\nKeep in mind that you might be tracking multiple `.compiler.log`s. You should do the above for each.\n\n### Stale Diagnostics Detection\n\nTo check whether the artifacts are stale, do **not** check `.bsb.lock` at the project root. This is unreliable, since it's possible that `bsb` wasn't running in watcher mode. We also don't want to encourage overuse of the watcher mode, though it seems increasingly common.\n\nWe currently do that; we wish we aren't.\n\n<!-- Instead, do `bsb.exe -- -n` -->\n\n### Files from Other Projects\n\nIt's possible to open files from different projects into the same editor instance. In that case, also read _that_ file's project's `.compiler.log`.\n\nThe bad alternatives are:\n\n- Not show that file's project's errors. That's wrong for several reasons (looks like the file has no error, assumes an editor window has a default project, etc.).\n- Show only that file's error. That's just weird, the errors are already read from that project's `.compiler.log`. Might as well show all of them (?).\n\n## Running `bsb` in the Editor\n\n**Don't** do that unless you've prompted the user. This plugin currently prompts the user upon opening thr first the first file of a project. It's not great, but otherwise lots of folks forget to start a `bsb` in the terminal to see the freshest diagnostics.\n\nDrawbacks:\n\n- Running an implicit `bsb -w` means you've acquired the build watch mode lockfile. The user won't be able to run his/her own `bsb -w` in the terminal.\n- Running a one-shot `bsb` doesn't conflict, but is a waste. It's also incorrect, as there might be external file system changes you're not detecting, e.g. version control changes.\n- The build might be a step in a bigger build. The editor running `bsb -w` by itself might clash with that.\n- If you have multiple files with different project roots open, running all of the `bsb -w`s is too intense.\n\n## Format\n\nTo find the location of `bsc.exe` to run the formatter:\n\n- Search in the file's directory's `node_modules/bs-platform/{platform}/bsc.exe`. If not found, recursively search upward (because [monorepos](https://github.com/rescript-lang/rescript-vscode/blob/0dbf2eb9cdb0bd6d95be1aee88b73830feecb5cc/server/src/utils.ts#L39-L45)).\n  - Do **not** directly use `node_modules/.bin/bsc` if you can help it. That's a Nodejs wrapper. Slow startup. We don't want our formatting to be momentarily stalled because some Nodejs cache went cold.\n  - `platform` can be `darwin`, `linux`, `win32` or `freebsd`.\n\n### Formatting Newline\n\nThe formatted result should be taken as-is, without any extra string trimming and newline addition/removal by the editor plugin.\n\n### Formatting Errors\n\nThe errors returned from `bsc.exe -format` should be discarded; in theory, they should have been duplicates of the errors from `.compiler.log`.\n\nIn the future, we should consier showing the format errors when `.compiler.log` isn't found.\n\n## Analysis bin\n\nAnalysis bin is what we currently call the OCaml code that does deeper language related analysis, and that powers most of the language specific functionality like hovers, completion, and so on. Here's a list of PRs and other resources you can have a look at if you're interested in contributing to the analysis bin:\n\n- Implementing \"code lens\" for function definitions. PR + commits have a bunch of comments intended to be educational as to what's done where, and why. https://github.com/rescript-lang/rescript-vscode/pull/513\n- Cristiano fixes a bug where autocomplete wasn't working in switch branches, because the analysis did not cover that context. Contains a bunch of good comments on how the test setup works, etc. https://github.com/rescript-lang/rescript-vscode/pull/415\n\nWe're happy to gather more resources over time here, including more in-depth getting started guides.\n\n## Releasing the VSCode extension and standalone LSP package\n\n_This below will automatically release the LSP package as well._\n\n1. Bump the version to an _even minor_ version number in `package.json` and `server/package.json` and their lockfiles. It's very important that it's an even minor like `1.8.0`, and not `1.7.0`. This is because even minors are reserved for actual releases, and uneven minors for pre-releases. Commit and push the version bump.\n2. Let CI build your version bump commit.\n3. Tag the commit with the version number (e.g. `git tag 1.6.0`) and push the tag (e.g. `git push origin 1.6.0`). Another build will trigger, which should automatically:\n   - create a `rescript-vscode-<version-number>.vsix` file\n   - publish that extension version to the VSCode marketplace\n   - create an automatic release on GitHub\n\nIf that somehow does not work, you can do the above steps manually:\n\n1. Download the autogenerated `.vsix` from the previous successful CI run, unzip it, and rename it to `rescript-vscode-<version-number>.vsix` (`rescript-vscode-1.3.0.vsix` for example).\n2. Go to the appropriate [VSCode Marketplace Publisher](https://marketplace.visualstudio.com/manage/publishers/chenglou92), select the three dots next to the extension name, and choose `Update`. Upload your `.vsix` there.\n3. Not done! Make a new manual release [here](https://github.com/rescript-lang/rescript-vscode/releases), and make sure you attach the generated `.vsix` onto that new release as well. This is for folks who don't use the VSCode marketplace.\n\nFor beta releases, ask folks to use the pre-release version installable from the VSCode Marketplace.\n\n## Releasing the `@rescript/tools` package\n\nThe tools package is released by bumping the version in `tools/package.json` and run `node scripts/updateVersion.js`, running `npm i` in the `tools/` folder, and then pushing those changes with the commit message `publish tools`.\n"
  },
  {
    "path": "LICENSE.txt",
    "content": "Permission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in\nall copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\nTHE SOFTWARE."
  },
  {
    "path": "Makefile",
    "content": "SHELL = /bin/bash\n\nbuild:\n\tdune build\n\tcp -f _build/install/default/bin/rescript-editor-analysis analysis/rescript-editor-analysis.exe\n\tcp -f _build/install/default/bin/rescript-editor-analysis rescript-editor-analysis.exe\n\tcp -f _build/install/default/bin/rescript-tools rescript-tools.exe\n\ntest:\n\tmake -C analysis test\n\tmake -C tools/tests test\n\nclean:\n\tdune clean\n\tmake -C analysis clean\n\tmake -C tools/tests clean\n\nformat:\n\tdune build @fmt --auto-promote\n\tnpx prettier --write --experimental-cli .\n\ncheckformat:\n\tdune build @fmt\n\tnpx prettier --check --experimental-cli .\n\n.DEFAULT_GOAL := build\n\n.PHONY: build clean test\n"
  },
  {
    "path": "README.md",
    "content": "<h1 align=\"center\">\n  <a href=\"https://marketplace.visualstudio.com/items?itemName=chenglou92.rescript-vscode\">ReScript VSCode</a>\n</h1>\n\n<p align=\"center\">The Official VSCode plugin for ReScript</p>\n\n<p align=\"center\">\n  <img src=\"https://user-images.githubusercontent.com/1909539/101266821-790b1400-3707-11eb-8e9f-fb7e36e660e6.gif\"/>\n</p>\n\n## Contents\n\n- [Contents](#contents)\n- [📝 Prerequisite](#-prerequisite)\n- [🌈 Supported Themes](#-supported-themes)\n- [💡 Features](#-features)\n- [📥 Installation](#-installation)\n  - [Pre-release channel](#pre-release-channel)\n- [📦 Commands](#-commands)\n- [🔨 Settings](#-settings)\n- [🚀 Code Analyzer](#-code-analyzer)\n  - [Configuring the Code Analyzer](#configuring-the-code-analyzer)\n  - [Usage](#usage)\n  - [Caveats](#caveats)\n- [🪄 Tips \\& Tricks](#-tips--tricks)\n  - [Hide generated files](#hide-generated-files)\n- [📰 Changelog](#-changelog)\n- [👏 How to Contribute](#-how-to-contribute)\n- [📄 License](#-license)\n\n## 📝 Prerequisite\n\nYou **must** have [ReScript](https://www.npmjs.com/package/rescript) >= 9.1 installed locally in your project, through the usual [npm or yarn installation](https://rescript-lang.org/docs/manual/latest/installation#integrate-into-existing-js-project). Older versions are not guaranteed to work.\n\n## 🌈 Supported Themes\n\nOur highlighting works well with most popular VSCode themes, such as:\n\n- Dark+ (default dark), Light+ (default light)\n- Solarized Dark, Solarized Light\n- Monokai Dimmed\n- Tomorrow Night Blue\n- [One Dark Pro](https://marketplace.visualstudio.com/items?itemName=zhuangtongfa.Material-theme)\n\nThe only 2 themes we don't (and can't) support, due to their lack of coloring, are:\n\n- Dark (Visual Studio), Light (Visual Studio)\n\n> **Note**\n> If your custom theme doesn't seem to highlight much (e.g. no colors for upper-case JSX tag, no distinction between module and variant), try one of the recommended themes to see if that's the problem. For more info, see [this post](https://github.com/rescript-lang/rescript-vscode/pull/8#issuecomment-764469070).\n\n## 💡 Features\n\n- Supports `.res`, `.resi`, `rescript.json` and the legacy config file `bsconfig.json`.\n- Syntax highlighting.\n- Formatting.\n- Build diagnostics.\n- Built-in bsb watcher (optional, and exposed explicitly as a pop-up; no worries of dangling build).\n- Type hint hover.\n- Jump to definition.\n- Autocomplete.\n- Find references.\n- Rename.\n- Inlay Hints.\n- Signature help.\n- Code lenses.\n- Snippets to ease a few syntaxes:\n  - `external` features such as `@bs.module` and `@bs.val`\n  - `try`, `for`, etc.\n- Folding, and [custom folding](https://code.visualstudio.com/docs/editor/codebasics#_folding) through `//#region` and `//#endregion`.\n\n## 📥 Installation\n\nLaunch VS Code Quick Open (`Ctrl+P`), paste the following command, and press enter.\n\n```\next install chenglou92.rescript-vscode\n```\n\nThe plugin activates on `.res` and `.resi` files. If you've already got Reason-Language-Server installed, it's possible that the latter took precedence over this one. Make sure you're using this plugin (\"ReScript syntax\") rather than Reason-Language-Server (\"BuckleScript syntax\").\n\n### Pre-release channel\n\nThere is a pre-release channel available. It is intended for testing new and therefore possibly unstable features. You can activate it by clicking on the \"Switch to Pre-Release Version\" button on the `rescript-vscode` extension page in VSCode. From this point on, pre-release versions will always have an odd version minor (1.5.x, 1.7.x, 2.1.x, etc.) while stable releases have even version minor numbers (1.4.x, 1.6.x, 2.0.0, etc.).\n\nEven if the pre-release channel seems too experimental to you, we still suggest you to give it a try and submit any issues that you run into. In the long run it will give us a better editor experience overall.\n\n## 📦 Commands\n\n| Command                                                          | Description                                                                                                                                                                                                                                                                                           |\n| ---------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |\n| ReScript: Create an interface file for this implementation file  | Creates an interface file (`.resi`) for the current `.res` file, automatically filling in all types and values in the current file.                                                                                                                                                                   |\n| ReScript: Open the compiled JS file for this implementation file | Opens the compiled JS file for the current ReScript file.                                                                                                                                                                                                                                             |\n| ReScript: Switch implementation/interface                        | Switches between the implementation and interface file. If you're in a `.res` file, the command will open the corresponding `.resi` file (if it exists), and if you're in a `.resi` file the command will open the corresponding `.res` file. This can also be triggered with the keybinding `Alt+O`. |\n| ReScript: Start Code Analyzer                                    | This will start code analysis in the ReScript project of the file you run the command from.                                                                                                                                                                                                           |\n| ReScript: Paste as ReScript JSON.t                               | Converts JSON from the clipboard and pastes it as ReScript `JSON.t` format. Automatically handles indentation based on cursor position.<br><br>![Kapture 2025-11-11 at 09 31 40](https://github.com/user-attachments/assets/ae543a04-1a97-4202-aaf0-15bf6e55aa71)                                     |\n| ReScript: Paste as ReScript JSX                                  | Converts vanilla JSX from the clipboard and pastes it as ReScript JSX format. Automatically handles indentation based on cursor position.                                                                                                                                                             |\n\n## 🔨 Settings\n\nYou'll find all ReScript specific settings under the scope `rescript.settings`.\n\n| Setting                    | Description                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                      |\n| -------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |\n| Prompt to Start Build      | If there's no ReScript build running already in the opened project, the extension will prompt you and ask if you want to start a build automatically. You can turn off this automatic prompt via the setting `rescript.settings.askToStartBuild`.                                                                                                                                                                                                                                                                                |\n| ReScript Binary Path       | The extension will look for the existence of a `node_modules/.bin/rescript` file and use its directory as the `binaryPath`. If it does not find it at the project root (which is where the nearest `rescript.json` resides), it goes up folders in the filesystem recursively until it either finds it (often the case in monorepos) or hits the top level. To override this lookup process, the path can be configured explicitly using the setting `rescript.settings.binaryPath`                                              |\n| ReScript Platform Path     | The extension will look for the existence of a `node_modules/rescript` directory and use the subdirectory corresponding to the current platform as the `platformPath`. If it does not find it at the project root (which is where the nearest `rescript.json` resides), it goes up folders in the filesystem recursively until it either finds it (often the case in monorepos) or hits the top level. To override this lookup process, the path can be configured explicitly using the setting `rescript.settings.platformPath` |\n| ReScript Runtime Path      | The extension will look for the existence of a `node_modules/@rescript/runtime` directory (ReScript v12 beta 11+). To override this lookup process, the path can be configured explicitly using the setting `rescript.settings.runtimePath`.                                                                                                                                                                                                                                                                                     |\n| Inlay Hints (experimental) | This allows an editor to place annotations inline with text to display type hints. Enable using `rescript.settings.inlayHints.enable: true`                                                                                                                                                                                                                                                                                                                                                                                      |\n| Code Lens (experimental)   | This tells the editor to add code lenses to function definitions, showing its full type above the definition. Enable using `rescript.settings.codeLens: true`                                                                                                                                                                                                                                                                                                                                                                    |\n| Signature Help             | This tells the editor to show signature help when you're writing function calls. Enable using `rescript.settings.signatureHelp.enabled: true`                                                                                                                                                                                                                                                                                                                                                                                    |\n| Compile Status Indicator   | Shows compile status in the status bar (Compiling, Errors, Warnings, Success). Toggle via `rescript.settings.compileStatus.enable`. Clicking in Error/Warning modes focuses the Problems view.                                                                                                                                                                                                                                                                                                                                   |\n\n**Default settings:**\n\n```jsonc\n// Whether you want the extension to prompt for autostarting a ReScript build if a project is opened with no build running\n\"rescript.settings.askToStartBuild\": true,\n\n// Path to the directory where cross-platform ReScript binaries are. You can use it if you haven't or don't want to use the installed ReScript from node_modules in your project.\n\"rescript.settings.binaryPath\": null\n\n// Path to the directory where platform-specific ReScript binaries are. You can use it if you haven't or don't want to use the installed ReScript from node_modules in your project.\n\"rescript.settings.platformPath\": null\n\n// Enable (experimental) inlay hints.\n\"rescript.settings.inlayHints.enable\": true\n\n// Maximum length of character for inlay hints. Set to null to have an unlimited length. Inlay hints that exceed the maximum length will not be shown\n\"rescript.settings.inlayHints.maxLength\": 25\n\n// Enable (experimental) code lens for function definitions.\n\"rescript.settings.codeLens\": true\n\n// Show compile status in the status bar (compiling/errors/warnings/success)\n\"rescript.settings.compileStatus.enable\": true\n```\n\n## 🚀 Code Analyzer\n\nThe Code Analyzer is a mode in the extension that runs additional code analysis in your project. The analysis helps you find dead code at a granular level, find unhandled exceptions, and more.\n\n> The Code Analyzer uses [reanalyze](https://github.com/rescript-association/reanalyze), which is embedded in the extension, so you don't need to install anything extra to run it.\n\n### Configuring the Code Analyzer\n\nYou'll need to configure what code analysis you want to run, and what (if any) directories you want to ignore. Configuration is done via adding `reanalyze` in `rescript.json`. You'll get autocomplete for what configuration options are valid. You can also read [all about configuring `reanalyze` here](https://github.com/rescript-association/reanalyze#configuration-via-bsconfigjson).\n\n### Usage\n\nOpen the command palette and run `ReScript: Start Code Analyzer`. This will start code analysis in the ReScript project of the file you run the command from.\n\nDead code is highlighted in the editor, and code actions for suppressing dead code warnings are available in most cases.\n\nWhen done, stop the code analysis mode by clicking the `Stop Code Analyzer` button in the editor status bar. This will clear all reported analysis warnings.\n\n### Caveats\n\nCurrently does not work for full monorepo dead code analysis (although it should work for each monorepo package individually).\n\n## 🪄 Tips & Tricks\n\n### Hide generated files\n\nYou can configure VSCode to collapse the JavaScript files ReScript generates under its source ReScript file. This will \"hide\" the generated files in the VSCode file explorer, but still leaving them accessible by expanding the source ReScript file they belong to.\n\nOpen your VSCode settings and type:\n\n```jsonc\n\"explorer.fileNesting.enabled\": true,\n\"explorer.fileNesting.patterns\": {\n  \"*.res\": \"${capture}.mjs, ${capture}.js, ${capture}.cmi, ${capture}.cmt, ${capture}.cmj\",\n  \"*.resi\": \"${capture}.res\"\n},\n```\n\nThis nests implementations under interfaces if they're present and nests all generated files under the main ReScript file. Adapt and tweak to your liking.\n\nA screenshot of the result:\n\n![Shows the end result in VSCode, with ReScript related files nested under eachother appropriately.](https://user-images.githubusercontent.com/1457626/168123647-400e2f09-31e3-45a2-b74b-190c7c207446.png)\n\n## 📰 Changelog\n\nSee [CHANGELOG](CHANGELOG.md)\n\n## 👏 How to Contribute\n\nRead our [Contributing Guide](CONTRIBUTING.md)\n\n## 📄 License\n\nSee the [LICENSE](./LICENSE.txt) file for details.\n"
  },
  {
    "path": "analysis/Makefile",
    "content": "SHELL = /bin/bash\n\nbuild-tests:\n\tmake -C tests build\n\nbuild-tests-generic-jsx-transform:\n\tmake -C tests-generic-jsx-transform build\n\nbuild-tests-incremental-typechecking:\n\tmake -C tests-incremental-typechecking build\n\nbuild-reanalyze:\n\tmake -C reanalyze build\n\nbuild: build-reanalyze build-tests build-tests-generic-jsx-transform build-tests-incremental-typechecking\n\ndce: build-analysis-binary\n\topam exec reanalyze.exe -- -dce-cmt _build -suppress vendor\n\ntest-analysis-binary:\n\tmake -C tests test\n\tmake -C tests-generic-jsx-transform test\n\tmake -C tests-incremental-typechecking test\n\ntest-reanalyze:\n\tmake -C reanalyze test\n\ntest: test-analysis-binary test-reanalyze\n\nclean:\n\tmake -C tests clean\n\tmake -C tests-generic-jsx-transform clean\n\tmake -C tests-incremental-typechecking clean\n\tmake -C reanalyze clean\n\n.PHONY: build-reanalyze build-tests dce clean test\n"
  },
  {
    "path": "analysis/README.md",
    "content": "# Analysis Library and Binary\n\nThis subfolder builds a private command line binary used by the plugin to power a few functionalities such as jump to definition, hover and autocomplete.\n\nThe binary reads the `.cmt` and `.cmti` files and analyses them.\n\nFor installation & build instructions, see the main CONTRIBUTING.md.\n\n## Overview\n\nSee main CONTRIBUTING.md's repo structure. Additionally, `examples/` is a convenience debugging repo. Check out `test.sh` (invoked through `make test`) to see the snapshots testing workflow stored in `tests/`.\n\n## Usage\n\nAt root:\n\n```sh\n./rescript-editor-analysis.exe --help\n\n# or\n\ndune exec -- rescript-editor-analysis --help\n```\n\n## History\n\nThis project is based on a fork of [Reason Language Server](https://github.com/jaredly/reason-language-server).\n"
  },
  {
    "path": "analysis/bin/dune",
    "content": "(env\n (static\n  (flags\n   (:standard -ccopt -static))))\n\n(executable\n (public_name rescript-editor-analysis)\n (package analysis)\n (modes byte exe)\n (name main)\n (libraries analysis))\n"
  },
  {
    "path": "analysis/bin/main.ml",
    "content": "open Analysis\n\nlet help =\n  {|\n**Private CLI For rescript-vscode usage only**\n\nAPI examples:\n  ./rescript-editor-analysis.exe completion src/MyFile.res 0 4 currentContent.res true\n  ./rescript-editor-analysis.exe definition src/MyFile.res 9 3\n  ./rescript-editor-analysis.exe typeDefinition src/MyFile.res 9 3\n  ./rescript-editor-analysis.exe documentSymbol src/Foo.res\n  ./rescript-editor-analysis.exe hover src/MyFile.res 10 2 true\n  ./rescript-editor-analysis.exe references src/MyFile.res 10 2\n  ./rescript-editor-analysis.exe rename src/MyFile.res 10 2 foo\n  ./rescript-editor-analysis.exe diagnosticSyntax src/MyFile.res\n  ./rescript-editor-analysis.exe inlayHint src/MyFile.res 0 3 25\n  ./rescript-editor-analysis.exe codeLens src/MyFile.res\n\nDev-time examples:\n  ./rescript-editor-analysis.exe dump src/MyFile.res src/MyFile2.res\n  ./rescript-editor-analysis.exe test src/MyFile.res\n\nNote: positions are zero-indexed (start at 0 0), following LSP.\nhttps://microsoft.github.io/language-server-protocol/specification#position\n\nOptions:\n  completion: compute autocomplete for MyFile.res at line 0 and column 4,\n    where MyFile.res is being edited and the editor content is in file current.res.\n\n    ./rescript-editor-analysis.exe completion src/MyFile.res 0 4 current.res\n\n  definition: get definition for item in MyFile.res at line 10 column 2:\n\n    ./rescript-editor-analysis.exe definition src/MyFile.res 10 2\n\n  typeDefinition: get type definition for item in MyFile.res at line 10 column 2:\n\n    ./rescript-editor-analysis.exe typeDefinition src/MyFile.res 10 2\n\n  documentSymbol: get all symbols declared in MyFile.res\n\n    ./rescript-editor-analysis.exe documentSymbol src/MyFile.res\n\n  hover: get inferred type for MyFile.res at line 10 column 2 (supporting markdown links):\n\n    ./rescript-editor-analysis.exe hover src/MyFile.res 10 2 true\n\n  references: get all references to item in MyFile.res at line 10 column 2:\n\n    ./rescript-editor-analysis.exe references src/MyFile.res 10 2\n\n  rename: rename all appearances of item in MyFile.res at line 10 column 2 with foo:\n\n    ./rescript-editor-analysis.exe rename src/MyFile.res 10 2 foo\n\n  semanticTokens: return token semantic highlighting info for MyFile.res\n\n    ./rescript-editor-analysis.exe semanticTokens src/MyFile.res\n\n  createInterface: print to stdout the interface file for src/MyFile.res\n\n    ./rescript-editor-analysis.exe createInterface src/MyFile.res lib/bs/src/MyFile.cmi\n\n  format: print to stdout the formatted version of the provided file\n\n    ./rescript-editor-analysis.exe format src/MyFile.res\n\n  diagnosticSyntax: print to stdout diagnostic for syntax\n\n    ./rescript-editor-analysis.exe diagnosticSyntax src/MyFile.res\n\n  inlayHint: get all inlay Hint between line 0 and 3 declared in MyFile.res. Last argument is maximum of character length for inlay hints\n\n    ./rescript-editor-analysis.exe inlayHint src/MyFile.res 0 3 25\n\n  codeLens: get all code lens entries for file src/MyFile.res\n\n    ./rescript-editor-analysis.exe codeLens src/MyFile.res\n\n  signatureHelp: get signature help if available for position at line 10 column 2 in src/MyFile.res\n\n    ./rescript-editor-analysis.exe signatureHelp src/MyFile.res 10 2\n\n  test: run tests specified by special comments in file src/MyFile.res\n\n    ./rescript-editor-analysis.exe test src/src/MyFile.res\n|}\n\nlet main () =\n  let args = Array.to_list Sys.argv in\n  let debugLevel, args =\n    match args with\n    | _ :: \"debug-dump\" :: logLevel :: rest ->\n      ( (match logLevel with\n        | \"verbose\" -> Debug.Verbose\n        | \"regular\" -> Regular\n        | _ -> Off),\n        \"dummy\" :: rest )\n    | args -> (Off, args)\n  in\n  Debug.debugLevel := debugLevel;\n  let debug = debugLevel <> Debug.Off in\n  let printHeaderInfo path line col =\n    if debug then\n      Printf.printf \"Debug level: %s\\n%s:%s-%s\\n\\n\"\n        (match debugLevel with\n        | Debug.Verbose -> \"verbose\"\n        | Regular -> \"regular\"\n        | Off -> \"off\")\n        path line col\n  in\n  match args with\n  | [_; \"cache-project\"; rootPath] -> (\n    Cfg.readProjectConfigCache := false;\n    let uri = Uri.fromPath rootPath in\n    match Packages.getPackage ~uri with\n    | Some package -> Cache.cacheProject package\n    | None -> print_endline \"\\\"ERR\\\"\")\n  | [_; \"cache-delete\"; rootPath] -> (\n    Cfg.readProjectConfigCache := false;\n    let uri = Uri.fromPath rootPath in\n    match Packages.findRoot ~uri (Hashtbl.create 0) with\n    | Some (`Bs rootPath) -> (\n      match BuildSystem.getLibBs rootPath with\n      | None -> print_endline \"\\\"ERR\\\"\"\n      | Some libBs ->\n        Cache.deleteCache (Cache.targetFileFromLibBs libBs);\n        print_endline \"\\\"OK\\\"\")\n    | _ -> print_endline \"\\\"ERR: Did not find root \\\"\")\n  | [_; \"completion\"; path; line; col; currentFile] ->\n    printHeaderInfo path line col;\n    Commands.completion ~debug ~path\n      ~pos:(int_of_string line, int_of_string col)\n      ~currentFile\n  | [_; \"completionResolve\"; path; modulePath] ->\n    Commands.completionResolve ~path ~modulePath\n  | [_; \"definition\"; path; line; col] ->\n    Commands.definition ~path\n      ~pos:(int_of_string line, int_of_string col)\n      ~debug\n  | [_; \"typeDefinition\"; path; line; col] ->\n    Commands.typeDefinition ~path\n      ~pos:(int_of_string line, int_of_string col)\n      ~debug\n  | [_; \"documentSymbol\"; path] -> DocumentSymbol.command ~path\n  | [_; \"hover\"; path; line; col; currentFile; supportsMarkdownLinks] ->\n    Commands.hover ~path\n      ~pos:(int_of_string line, int_of_string col)\n      ~currentFile ~debug\n      ~supportsMarkdownLinks:\n        (match supportsMarkdownLinks with\n        | \"true\" -> true\n        | _ -> false)\n  | [\n   _; \"signatureHelp\"; path; line; col; currentFile; allowForConstructorPayloads;\n  ] ->\n    Commands.signatureHelp ~path\n      ~pos:(int_of_string line, int_of_string col)\n      ~currentFile ~debug\n      ~allowForConstructorPayloads:\n        (match allowForConstructorPayloads with\n        | \"true\" -> true\n        | _ -> false)\n  | [_; \"inlayHint\"; path; line_start; line_end; maxLength] ->\n    Commands.inlayhint ~path\n      ~pos:(int_of_string line_start, int_of_string line_end)\n      ~maxLength ~debug\n  | [_; \"codeLens\"; path] -> Commands.codeLens ~path ~debug\n  | [_; \"codeAction\"; path; startLine; startCol; endLine; endCol; currentFile]\n    ->\n    Commands.codeAction ~path\n      ~startPos:(int_of_string startLine, int_of_string startCol)\n      ~endPos:(int_of_string endLine, int_of_string endCol)\n      ~currentFile ~debug\n  | [_; \"codemod\"; path; line; col; typ; hint] ->\n    let typ =\n      match typ with\n      | \"add-missing-cases\" -> Codemod.AddMissingCases\n      | _ -> raise (Failure \"unsupported type\")\n    in\n    let res =\n      Codemod.transform ~path\n        ~pos:(int_of_string line, int_of_string col)\n        ~debug ~typ ~hint\n      |> Json.escape\n    in\n    Printf.printf \"\\\"%s\\\"\" res\n  | [_; \"diagnosticSyntax\"; path] -> Commands.diagnosticSyntax ~path\n  | _ :: \"reanalyze\" :: _ ->\n    let len = Array.length Sys.argv in\n    for i = 1 to len - 2 do\n      Sys.argv.(i) <- Sys.argv.(i + 1)\n    done;\n    Sys.argv.(len - 1) <- \"\";\n    Reanalyze.cli ()\n  | [_; \"references\"; path; line; col] ->\n    Commands.references ~path\n      ~pos:(int_of_string line, int_of_string col)\n      ~debug\n  | [_; \"rename\"; path; line; col; newName] ->\n    Commands.rename ~path\n      ~pos:(int_of_string line, int_of_string col)\n      ~newName ~debug\n  | [_; \"semanticTokens\"; currentFile] ->\n    SemanticTokens.semanticTokens ~currentFile\n  | [_; \"createInterface\"; path; cmiFile] ->\n    Printf.printf \"\\\"%s\\\"\"\n      (Json.escape (CreateInterface.command ~path ~cmiFile))\n  | [_; \"format\"; path] ->\n    Printf.printf \"\\\"%s\\\"\" (Json.escape (Commands.format ~path))\n  | [_; \"test\"; path] -> Commands.test ~path\n  | args when List.mem \"-h\" args || List.mem \"--help\" args -> prerr_endline help\n  | _ ->\n    prerr_endline help;\n    exit 1\n;;\n\nmain ()\n"
  },
  {
    "path": "analysis/dune",
    "content": "(dirs bin src reanalyze vendor)\n\n(env\n (dev\n  (env-vars\n   (CPPO_FLAGS -U=RELEASE)))\n (release\n  (env-vars\n   (CPPO_FLAGS -D=RELEASE))\n  (ocamlopt_flags\n   (:standard -O3 -unbox-closures)))\n (static\n  (env-vars\n   (CPPO_FLAGS -D=RELEASE))\n  (ocamlopt_flags\n   (:standard -O3 -unbox-closures))))\n"
  },
  {
    "path": "analysis/examples/example-project/.gitignore",
    "content": "lib\n.merlin"
  },
  {
    "path": "analysis/examples/example-project/.vscode/settings.json",
    "content": "{\n  \"rescript.settings.logLevel\": \"log\"\n}\n"
  },
  {
    "path": "analysis/examples/example-project/package.json",
    "content": "{\n  \"name\": \"tryit\",\n  \"dependencies\": {\n    \"@rescript/react\": \"^0.14.0\",\n    \"rescript\": \"12.1.0\"\n  },\n  \"scripts\": {\n    \"build\": \"rescript\",\n    \"start\": \"rescript build -w\",\n    \"clean\": \"rescript clean\",\n    \"format\": \"rescript format\"\n  }\n}\n"
  },
  {
    "path": "analysis/examples/example-project/rescript.json",
    "content": "{\n  \"name\": \"tryit\",\n  \"sources\": \"src\",\n  \"compiler-flags\": [],\n  \"warnings\": {\n    \"number\": \"-32-26-27-33\"\n  },\n  \"dependencies\": [\n    \"@rescript/react\"\n  ],\n  \"jsx\": {\n    \"version\": 4\n  },\n  \"namespace\": \"my-namespace\",\n  \"package-specs\": {\n    \"module\": \"esmodule\",\n    \"in-source\": true,\n    \"suffix\": \".res.js\"\n  }\n}"
  },
  {
    "path": "analysis/examples/example-project/src/Embeded.md",
    "content": "# Markdown Embedded Fenced Code Regression Test\n\n```re\nmodule Something = {\n  open Other;\n\n  let m = {name: \"Me\", age: 0};\n  let animal = Things(10);\n  let other = Things(2);\n  let me: animals = People(\"Hie\");\n  let x = something + 10;\n  let r = m.name;\n\n  let awesome = 20;\n  if (true) {\n    ()\n  }\n};\n```\n\n```reason\nmodule Something = {\n  open Other;\n\n  let m = {name: \"Me\", age: 0};\n  let animal = Things(10);\n  let other = Things(2);\n  let me: animals = People(\"Hie\");\n  let x = something + 10;\n  let r = m.name;\n\n  let awesome = 20;\n  if (true) {\n    ()\n  }\n};\n```\n\n```reasonml\nmodule Something = {\n  open Other;\n\n  let m = {name: \"Me\", age: 0};\n  let animal = Things(10);\n  let other = Things(2);\n  let me: animals = People(\"Hie\");\n  let x = something + 10;\n  let r = m.name;\n\n  let awesome = 20;\n  if (true) {\n    ()\n  }\n};\n```"
  },
  {
    "path": "analysis/examples/example-project/src/Hello.res",
    "content": "let someLongName = 10\n\nlet otherLongName = \"string\"\n\nlet x = {\"a\": 3}\n\nlet r = Other.something\n\nlet l = More.inner + More.n + Other.inner\n\nlet n = More.n\n\nlet _ = More.party\nlet _ = string_of_bool\n\n/* let m = {More.a: 2, b: 32.}; */\n\nmodule Something = {\n  open Other\n\n  let m = {name: \"Me\", age: 0}\n  let animal = Things(10)\n  let other = Things(2)\n  let me: animals = People(\"Hie\")\n  let x = something + 10\n  let r = m.name\n\n  let awesome = 20\n  if true {\n    ()\n  }\n}\n\nopen! Something\n\nlet y = x + 10\n\nswitch me {\n| Things(n) => ()\n| _ => ()\n}\n\nlet z = x * x\n\nlet aThing = 10 + Other.something\n\n@ocaml.doc(\" Some docs about this **awesome** thing. \")\nlet awesome =\n  100 + m.age\n\nlet thing = \"thing\"\n\nlet transform = (x, y) => x ++ Float.toString(y)\n\nlet z = transform(\"hello \", 5.)\n\nlet zzz = 1\n\nlet more = 20\n\n@ocaml.doc(\" Something here \")\nlet added =\n  10 + awesome\n\nopen Other\n\n@ocaml.doc(\" Some more documentation about this \")\nlet awesome = x => x + 2\n\nlet a = list{\"hello\", \"my fine\" ++ \"folks\", \"in boonville\"}\n\nlet div = (~x, ~y, ~children, ()) => 10\n\nlet m = <div x=\"10\" y=\"20\" />\n\nlet something = animal =>\n  switch animal {\n  | blank => ()\n  }\n\nsomething(animal)\n\nlet someFunction = (memorableName, {contents}) => {\n  let innerMemorable = 20\n  memorableName + innerMemorable\n}\n\n/* let awesome = 10000; */\n\n/* let awesome = 111; */\n\nlet z = 10\n\nlet z = later\n\nlet m = Other.later\n\nfor _index in 0 to 10 {\n  Console.log(\"hellO\")\n}\n\nmodule OneOneOneOne = {\n  module TwoTwoTwoTwo = {\n    let xxxxxxxxxx = 10\n  }\n}\nlet r = OneOneOneOne.TwoTwoTwoTwo.xxxxxxxxxx\n\ntype awesome = {\n  one: string,\n  two: float,\n}\n\nopen OneOneOneOne.TwoTwoTwoTwo\n\ninclude OneOneOneOne.TwoTwoTwoTwo\n\ninclude More\n\nlet _ = Other.oo.person.name\n\ntype lots =\n  | Parties\n  | Plutocrats(int, float)\n  | Possums\n  | Oppossums\n\nlet y = Some(10 + awesome(3))\n\nlet z = {contents: 30}\nlet party = {one: \"one\", two: 2.}\n\nlet {one, two} = party\n\nlet thing = () => 34 + 43\n\ntype more = awesome\n\nlet {contents} = z\n\nswitch y {\n| Some(u) => ()\n| None => ()\n}\n\n/* let x = [%raw \" hello\"]; */\n\nlet awesome = \"hello\"\n\ntype shortReference = (string, list<string>, string)\n\ntype reference = {\n  uri: string,\n  moduleName: string,\n  modulePath: list<string>,\n  name: string,\n}\n\ntype typeSource =\n  | Builtin(string)\n  | Public(reference)\n  | NotFound\n\ntype lockfile = {\n  version: int,\n  pastVersions: Belt.HashMap.Int.t<list<(shortReference, int)>>,\n  current: list<(shortReference, int)>,\n}\n"
  },
  {
    "path": "analysis/examples/example-project/src/Hello.res.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as More$MyNamespace from \"./More.res.js\";\nimport * as Other$MyNamespace from \"./Other.res.js\";\nimport * as JsxRuntime from \"react/jsx-runtime\";\n\nlet x = {\n  a: 3\n};\n\nlet l = (More$MyNamespace.inner + More$MyNamespace.n | 0) + Other$MyNamespace.inner | 0;\n\nlet me = {\n  TAG: \"People\",\n  _0: \"Hie\"\n};\n\nlet x$1 = Other$MyNamespace.something + 10 | 0;\n\nlet Something_m = {\n  name: \"Me\",\n  age: 0\n};\n\nlet Something_animal = {\n  TAG: \"Things\",\n  _0: 10\n};\n\nlet Something_other = {\n  TAG: \"Things\",\n  _0: 2\n};\n\nlet Something = {\n  m: Something_m,\n  animal: Something_animal,\n  other: Something_other,\n  me: me,\n  x: x$1,\n  r: \"Me\",\n  awesome: 20\n};\n\nlet aThing = 10 + Other$MyNamespace.something | 0;\n\nfunction transform(x, y) {\n  return x + y.toString();\n}\n\ntransform(\"hello \", 5);\n\nlet added = 110;\n\nlet a_1 = {\n  hd: \"my finefolks\",\n  tl: {\n    hd: \"in boonville\",\n    tl: /* [] */0\n  }\n};\n\nlet a = {\n  hd: \"hello\",\n  tl: a_1\n};\n\nfunction div(x, y, children, param) {\n  return 10;\n}\n\nJsxRuntime.jsx(\"div\", {\n  x: \"10\",\n  y: \"20\"\n});\n\nfunction something(animal) {\n  \n}\n\nfunction someFunction(memorableName, param) {\n  return memorableName + 20 | 0;\n}\n\nfor (let _index = 0; _index <= 10; ++_index) {\n  console.log(\"hellO\");\n}\n\nlet TwoTwoTwoTwo = {\n  xxxxxxxxxx: 10\n};\n\nlet OneOneOneOne = {\n  TwoTwoTwoTwo: TwoTwoTwoTwo\n};\n\nlet y = 15;\n\nlet z = {\n  contents: 30\n};\n\nfunction thing() {\n  return 77;\n}\n\nlet contents = z.contents;\n\nlet someLongName = 10;\n\nlet otherLongName = \"string\";\n\nlet zzz = 1;\n\nlet more = 20;\n\nlet m = Other$MyNamespace.later;\n\nlet r = 10;\n\nlet xxxxxxxxxx = 10;\n\nlet contnets = More$MyNamespace.contnets;\n\nlet inner = More$MyNamespace.inner;\n\nlet n = More$MyNamespace.n;\n\nlet party = {\n  one: \"one\",\n  two: 2\n};\n\nlet one = \"one\";\n\nlet two = 2;\n\nlet awesome = \"hello\";\n\nexport {\n  someLongName,\n  otherLongName,\n  x,\n  l,\n  Something,\n  aThing,\n  transform,\n  zzz,\n  more,\n  added,\n  a,\n  div,\n  something,\n  someFunction,\n  m,\n  OneOneOneOne,\n  r,\n  xxxxxxxxxx,\n  contnets,\n  inner,\n  n,\n  y,\n  z,\n  party,\n  one,\n  two,\n  thing,\n  contents,\n  awesome,\n}\n/*  Not a pure module */\n"
  },
  {
    "path": "analysis/examples/example-project/src/Json.res",
    "content": "@@ocaml.doc(\" # Json parser\n *\n * Works with bucklescript and bsb-native\n *\n * ## Basics\n *\n * ```\n * open Json.Infix; /* for the nice infix operators */\n * let raw = {|{\\\"hello\\\": \\\"folks\\\"}|};\n * let who = Json.parse(raw) |> Json.get(\\\"hello\\\") |?> Json.string;\n * Js.log(who);\n * ```\n *\n * ## Parse & stringify\n *\n * @doc parse, stringify\n *\n * ## Accessing descendents\n *\n * @doc get, nth, getPath\n *\n * ## Coercing to types\n *\n * @doc string, number, array, obj, bool, null\n *\n * ## The JSON type\n *\n * @doc t\n *\n * ## Infix operators for easier working\n *\n * @doc Infix\n \")\n\nexternal parseFloat: string => float = \"parseFloat\"\n\ntype rec t =\n  | String(string)\n  | Number(float)\n  | Array(list<t>)\n  | Object(list<(string, t)>)\n  | True\n  | False\n  | Null\n\nlet string_of_number = f => {\n  let s = Float.toString(f)\n  if String.get(s, String.length(s) - 1) == Some(\".\") {\n    String.slice(s, ~start=0, ~end=String.length(s) - 1)\n  } else {\n    s\n  }\n}\n\n@ocaml.doc(\"\n * This module is provided for easier working with optional values.\n \")\nmodule Infix = {\n  @ocaml.doc(\" The \\\"force unwrap\\\" operator\n   *\n   * If you're sure there's a value, you can force it.\n   * ```\n   * open Json.Infix;\n   * let x: int = Some(10) |! \\\"Expected this to be present\\\";\n   * Js.log(x);\n   * ```\n   *\n   * But you gotta be sure, otherwise it will throw.\n   * ```reason;raises\n   * open Json.Infix;\n   * let x: int = None |! \\\"This will throw\\\";\n   * ```\n   \")\n  let \\\"|!\" = (o, d) =>\n    switch o {\n    | None => failwith(d)\n    | Some(v) => v\n    }\n  @ocaml.doc(\" The \\\"upwrap with default\\\" operator\n   * ```\n   * open Json.Infix;\n   * let x: int = Some(10) |? 4;\n   * let y: int = None |? 5;\n   * Js.log2(x, y);\n   * ```\n   \")\n  let \\\"|?\" = (o, d) =>\n    switch o {\n    | None => d\n    | Some(v) => v\n    }\n  @ocaml.doc(\" The \\\"transform contents into new optional\\\" operator\n   * ```\n   * open Json.Infix;\n   * let maybeInc = x => x > 5 ? Some(x + 1) : None;\n   * let x: option(int) = Some(14) |?> maybeInc;\n   * let y: option(int) = None |?> maybeInc;\n   * ```\n   \")\n  let \\\"|?>\" = (o, fn) =>\n    switch o {\n    | None => None\n    | Some(v) => fn(v)\n    }\n  @ocaml.doc(\" The \\\"transform contents into new value & then re-wrap\\\" operator\n   * ```\n   * open Json.Infix;\n   * let inc = x => x + 1;\n   * let x: option(int) = Some(7) |?>> inc;\n   * let y: option(int) = None |?>> inc;\n   * Js.log2(x, y);\n   * ```\n   \")\n  let \\\"|?>>\" = (o, fn) =>\n    switch o {\n    | None => None\n    | Some(v) => Some(fn(v))\n    }\n  @ocaml.doc(\" \\\"handle the value if present, otherwise here's the default\\\"\n   *\n   * It's called fold because that's what people call it :?. It's the same as \\\"transform contents to new value\\\" + \\\"unwrap with default\\\".\n   *\n   * ```\n   * open Json.Infix;\n   * let inc = x => x + 1;\n   * let x: int = fold(Some(4), 10, inc);\n   * let y: int = fold(None, 2, inc);\n   * Js.log2(x, y);\n   * ```\n   \")\n  let fold = (o, d, f) =>\n    switch o {\n    | None => d\n    | Some(v) => f(v)\n    }\n}\n\nlet escape = text => {\n  let ln = String.length(text)\n  let rec loop = (i, acc) =>\n    if i < ln {\n      let next = switch String.get(text, i) {\n      | Some(\"\\x0c\") => acc ++ \"\\\\f\"\n      | Some(\"\\\\\") => acc ++ \"\\\\\\\\\"\n      | Some(\"\\\"\") => acc ++ \"\\\\\\\"\"\n      | Some(\"\\n\") => acc ++ \"\\\\n\"\n      | Some(\"\\b\") => acc ++ \"\\\\b\"\n      | Some(\"\\r\") => acc ++ \"\\\\r\"\n      | Some(\"\\t\") => acc ++ \"\\\\t\"\n      | Some(c) => acc ++ c\n      | None => acc\n      }\n      loop(i + 1, next)\n    } else {\n      acc\n    }\n  loop(0, \"\")\n}\n\n@ocaml.doc(\" ```\n * let text = {|{\\\"hello\\\": \\\"folks\\\", \\\"aa\\\": [2, 3, \\\"four\\\"]}|};\n * let result = Json.stringify(Json.parse(text));\n * Js.log(result);\n * assert(text == result);\n * ```\n \")\nlet rec stringify = t =>\n  switch t {\n  | String(value) => \"\\\"\" ++ (escape(value) ++ \"\\\"\")\n  | Number(num) => string_of_number(num)\n  | Array(items) => {\n      let rec join = (items, sep) =>\n        switch items {\n        | list{} => \"\"\n        | list{x} => x\n        | list{x, ...rest} => x ++ sep ++ join(rest, sep)\n        }\n      let parts = List.map(items, stringify)\n      \"[\" ++ join(parts, \", \") ++ \"]\"\n    }\n  | Object(items) => {\n      let rec join = (items, sep) =>\n        switch items {\n        | list{} => \"\"\n        | list{x} => x\n        | list{x, ...rest} => x ++ sep ++ join(rest, sep)\n        }\n      let parts = List.map(items, ((k, v)) => \"\\\"\" ++ (escape(k) ++ (\"\\\": \" ++ stringify(v))))\n      \"{\" ++ join(parts, \", \") ++ \"}\"\n    }\n  | True => \"true\"\n  | False => \"false\"\n  | Null => \"null\"\n  }\n\nlet white = n => {\n  let rec loop = (i, acc) =>\n    if i < n {\n      loop(i + 1, acc ++ \" \")\n    } else {\n      acc\n    }\n  loop(0, \"\")\n}\n\nlet rec stringifyPretty = (~indent=0, t) => {\n  let rec join = (items, sep) =>\n    switch items {\n    | list{} => \"\"\n    | list{x} => x\n    | list{x, ...rest} => x ++ sep ++ join(rest, sep)\n    }\n  switch t {\n  | String(value) => \"\\\"\" ++ (escape(value) ++ \"\\\"\")\n  | Number(num) => string_of_number(num)\n  | Array(list{}) => \"[]\"\n  | Array(items) => {\n      let parts = List.map(items, item => stringifyPretty(~indent=indent + 2, item))\n      \"[\\n\" ++\n      white(indent + 2) ++\n      join(parts, \",\\n\" ++ white(indent + 2)) ++\n      \"\\n\" ++\n      white(indent) ++ \"]\"\n    }\n  | Object(list{}) => \"{}\"\n  | Object(items) => {\n      let parts = List.map(items, ((k, v)) =>\n        \"\\\"\" ++ (escape(k) ++ (\"\\\": \" ++ stringifyPretty(~indent=indent + 2, v)))\n      )\n      \"{\\n\" ++\n      white(indent + 2) ++\n      join(parts, \",\\n\" ++ white(indent + 2)) ++\n      \"\\n\" ++\n      white(indent) ++ \"}\"\n    }\n  | True => \"true\"\n  | False => \"false\"\n  | Null => \"null\"\n  }\n}\n\nlet unwrap = (message, t) =>\n  switch t {\n  | Some(v) => v\n  | None => failwith(message)\n  }\n\n@nodoc\nmodule Parser = {\n  let split_by = (~keep_empty=false, is_delim, str) => {\n    let len = String.length(str)\n    let rec loop = (acc, last_pos, pos) =>\n      if pos == -1 {\n        if last_pos == 0 && !keep_empty {\n          acc\n        } else {\n          list{String.slice(str, ~start=0, ~end=last_pos), ...acc}\n        }\n      } else if is_delim(String.get(str, pos)) {\n        let new_len = last_pos - pos - 1\n        if new_len != 0 || keep_empty {\n          let v = String.slice(str, ~start=pos + 1, ~end=pos + 1 + new_len)\n          loop(list{v, ...acc}, pos, pos - 1)\n        } else {\n          loop(acc, pos, pos - 1)\n        }\n      } else {\n        loop(acc, last_pos, pos - 1)\n      }\n    loop(list{}, len, len - 1)\n  }\n  let fail = (text, pos, message) => {\n    let pre = String.slice(text, ~start=0, ~end=pos)\n    let lines = split_by(c => c == Some(\"\\n\"), pre)\n    let count = List.length(lines)\n    let last = count > 0 ? List.getOrThrow(lines, count - 1) : \"\"\n    let col = String.length(last) + 1\n    let line = List.length(lines)\n    let string =\n      \"Error \\\"\" ++\n      message ++\n      \"\\\" at \" ++\n      Int.toString(line) ++\n      \":\" ++\n      Int.toString(col) ++\n      \" -> \" ++\n      last ++ \"\\n\"\n    failwith(string)\n  }\n  let rec skipToNewline = (text, pos) =>\n    if pos >= String.length(text) {\n      pos\n    } else if String.get(text, pos) == Some(\"\\n\") {\n      pos + 1\n    } else {\n      skipToNewline(text, pos + 1)\n    }\n  let stringTail = text => {\n    let len = String.length(text)\n    if len > 1 {\n      String.slice(text, ~start=1, ~end=len)\n    } else {\n      \"\"\n    }\n  }\n  let rec skipToCloseMultilineComment = (text, pos) =>\n    if pos + 1 >= String.length(text) {\n      failwith(\"Unterminated comment\")\n    } else if String.get(text, pos) == Some(\"*\") && String.get(text, pos + 1) == Some(\"/\") {\n      pos + 2\n    } else {\n      skipToCloseMultilineComment(text, pos + 1)\n    }\n  let rec skipWhite = (text, pos) =>\n    if (\n      pos < String.length(text) &&\n        (String.get(text, pos) == Some(\" \") ||\n          (String.get(text, pos) == Some(\"\\t\") ||\n          (String.get(text, pos) == Some(\"\\n\") || String.get(text, pos) == Some(\"\\r\"))))\n    ) {\n      skipWhite(text, pos + 1)\n    } else {\n      pos\n    }\n  let parseString = (text, pos) => {\n    let ln = String.length(text)\n    let rec loop = (i, acc) =>\n      i >= ln\n        ? fail(text, i, \"Unterminated string\")\n        : switch String.get(text, i) {\n          | Some(\"\\\"\") => (i + 1, acc)\n          | Some(\"\\\\\") =>\n            i + 1 >= ln\n              ? fail(text, i, \"Unterminated string\")\n              : switch String.get(text, i + 1) {\n                | Some(\"/\") => loop(i + 2, acc ++ \"/\")\n                | Some(\"f\") => loop(i + 2, acc ++ \"\\x0c\")\n                | _ =>\n                  let escaped = String.slice(text, ~start=i, ~end=i + 2)\n                  loop(i + 2, acc ++ escaped)\n                }\n          | Some(c) => loop(i + 1, acc ++ c)\n          | None => (i, acc)\n          }\n    let (final, result) = loop(pos, \"\")\n    (result, final)\n  }\n  let parseDigits = (text, pos) => {\n    let len = String.length(text)\n    let rec loop = i =>\n      if i >= len {\n        i\n      } else {\n        switch String.get(text, i) {\n        | Some(\"0\")\n        | Some(\"1\")\n        | Some(\"2\")\n        | Some(\"3\")\n        | Some(\"4\")\n        | Some(\"5\")\n        | Some(\"6\")\n        | Some(\"7\")\n        | Some(\"8\")\n        | Some(\"9\") =>\n          loop(i + 1)\n        | _ => i\n        }\n      }\n    loop(pos + 1)\n  }\n  let parseWithDecimal = (text, pos) => {\n    let pos = parseDigits(text, pos)\n    if pos < String.length(text) && String.get(text, pos) == Some(\".\") {\n      let pos = parseDigits(text, pos + 1)\n      pos\n    } else {\n      pos\n    }\n  }\n  let parseNumber = (text, pos) => {\n    let pos = parseWithDecimal(text, pos)\n    let ln = String.length(text)\n    if pos < ln - 1 && (String.get(text, pos) == Some(\"E\") || String.get(text, pos) == Some(\"e\")) {\n      let pos = switch String.get(text, pos + 1) {\n      | Some(\"-\")\n      | Some(\"+\") =>\n        pos + 2\n      | _ => pos + 1\n      }\n      parseDigits(text, pos)\n    } else {\n      pos\n    }\n  }\n  let parseNegativeNumber = (text, pos) => {\n    let final = if String.get(text, pos) == Some(\"-\") {\n      parseNumber(text, pos + 1)\n    } else {\n      parseNumber(text, pos)\n    }\n    let numStr = String.slice(text, ~start=pos, ~end=final)\n    (Number(parseFloat(numStr)), final)\n  }\n  let expect = (char, text, pos, message) =>\n    if String.get(text, pos) != Some(char) {\n      fail(text, pos, \"Expected: \" ++ message)\n    } else {\n      pos + 1\n    }\n  let parseComment: 'a. (string, int, (string, int) => 'a) => 'a = (text, pos, next) =>\n    if String.get(text, pos) != Some(\"/\") {\n      if String.get(text, pos) == Some(\"*\") {\n        next(text, skipToCloseMultilineComment(text, pos + 1))\n      } else {\n        failwith(\"Invalid syntax\")\n      }\n    } else {\n      next(text, skipToNewline(text, pos + 1))\n    }\n  let maybeSkipComment = (text, pos) =>\n    if pos < String.length(text) && String.get(text, pos) == Some(\"/\") {\n      if pos + 1 < String.length(text) && String.get(text, pos + 1) == Some(\"/\") {\n        skipToNewline(text, pos + 1)\n      } else if pos + 1 < String.length(text) && String.get(text, pos + 1) == Some(\"*\") {\n        skipToCloseMultilineComment(text, pos + 1)\n      } else {\n        fail(text, pos, \"Invalid synatx\")\n      }\n    } else {\n      pos\n    }\n  let rec skip = (text, pos) =>\n    if pos == String.length(text) {\n      pos\n    } else {\n      let n = maybeSkipComment(text, skipWhite(text, pos))\n      if n > pos {\n        skip(text, n)\n      } else {\n        n\n      }\n    }\n  let rec parse = (text, pos) =>\n    if pos >= String.length(text) {\n      fail(text, pos, \"Reached end of file without being done parsing\")\n    } else {\n      switch String.get(text, pos) {\n      | Some(\"/\") => parseComment(text, pos + 1, parse)\n      | Some(\"[\") => parseArray(text, pos + 1)\n      | Some(\"{\") => parseObject(text, pos + 1)\n      | Some(\"n\") =>\n        if String.slice(text, ~start=pos, ~end=pos + 4) == \"null\" {\n          (Null, pos + 4)\n        } else {\n          fail(text, pos, \"unexpected character\")\n        }\n      | Some(\"t\") =>\n        if String.slice(text, ~start=pos, ~end=pos + 4) == \"true\" {\n          (True, pos + 4)\n        } else {\n          fail(text, pos, \"unexpected character\")\n        }\n      | Some(\"f\") =>\n        if String.slice(text, ~start=pos, ~end=pos + 5) == \"false\" {\n          (False, pos + 5)\n        } else {\n          fail(text, pos, \"unexpected character\")\n        }\n      | Some(\"\\n\")\n      | Some(\"\\t\")\n      | Some(\" \")\n      | Some(\"\\r\") =>\n        parse(text, skipWhite(text, pos))\n      | Some(\"\\\"\") =>\n        let (s, pos) = parseString(text, pos + 1)\n        (String(s), pos)\n      | Some(\"-\")\n      | Some(\"0\")\n      | Some(\"1\")\n      | Some(\"2\")\n      | Some(\"3\")\n      | Some(\"4\")\n      | Some(\"5\")\n      | Some(\"6\")\n      | Some(\"7\")\n      | Some(\"8\")\n      | Some(\"9\") =>\n        parseNegativeNumber(text, pos)\n      | _ => fail(text, pos, \"unexpected character\")\n      }\n    }\n  and parseArrayValue = (text, pos) => {\n    let pos = skip(text, pos)\n    let (value, pos) = parse(text, pos)\n    let pos = skip(text, pos)\n    switch String.get(text, pos) {\n    | Some(\",\") =>\n      let pos = skip(text, pos + 1)\n      if String.get(text, pos) == Some(\"]\") {\n        (list{value}, pos + 1)\n      } else {\n        let (rest, pos) = parseArrayValue(text, pos)\n        (list{value, ...rest}, pos)\n      }\n    | Some(\"]\") => (list{value}, pos + 1)\n    | _ => fail(text, pos, \"unexpected character\")\n    }\n  }\n  and parseArray = (text, pos) => {\n    let pos = skip(text, pos)\n    switch String.get(text, pos) {\n    | Some(\"]\") => (Array(list{}), pos + 1)\n    | _ =>\n      let (items, pos) = parseArrayValue(text, pos)\n      (Array(items), pos)\n    }\n  }\n  and parseObjectValue = (text, pos) => {\n    let pos = skip(text, pos)\n    if String.get(text, pos) != Some(\"\\\"\") {\n      fail(text, pos, \"Expected string\")\n    } else {\n      let (key, pos) = parseString(text, pos + 1)\n      let pos = skip(text, pos)\n      let pos = expect(\":\", text, pos, \"Colon\")\n      let (value, pos) = parse(text, pos)\n      let pos = skip(text, pos)\n      switch String.get(text, pos) {\n      | Some(\",\") =>\n        let pos = skip(text, pos + 1)\n        if String.get(text, pos) == Some(\"}\") {\n          (list{(key, value)}, pos + 1)\n        } else {\n          let (rest, pos) = parseObjectValue(text, pos)\n          (list{(key, value), ...rest}, pos)\n        }\n      | Some(\"}\") => (list{(key, value)}, pos + 1)\n      | _ =>\n        let (rest, pos) = parseObjectValue(text, pos)\n        (list{(key, value), ...rest}, pos)\n      }\n    }\n  }\n  and parseObject = (text, pos) => {\n    let pos = skip(text, pos)\n    if String.get(text, pos) == Some(\"}\") {\n      (Object(list{}), pos + 1)\n    } else {\n      let (pairs, pos) = parseObjectValue(text, pos)\n      (Object(pairs), pos)\n    }\n  }\n}\n\n@ocaml.doc(\" Turns some text into a json object. throws on failure \")\nlet parse = text => {\n  let (item, pos) = Parser.parse(text, 0)\n  let pos = Parser.skip(text, pos)\n  if pos < String.length(text) {\n    failwith(\n      \"Extra data after parse finished: \" ++\n      String.slice(text, ~start=pos, ~end=String.length(text)),\n    )\n  } else {\n    item\n  }\n}\n\n/* Accessor helpers */\nlet bind = (v, fn) =>\n  switch v {\n  | None => None\n  | Some(v) => fn(v)\n  }\n\n@ocaml.doc(\" If `t` is an object, get the value associated with the given string key \")\nlet get = (key, t) =>\n  switch t {\n  | Object(items) => {\n      let rec find = items =>\n        switch items {\n        | list{} => None\n        | list{(k, v), ...rest} => k == key ? Some(v) : find(rest)\n        }\n      find(items)\n    }\n  | _ => None\n  }\n\n@ocaml.doc(\" If `t` is an array, get the value associated with the given index \")\nlet nth = (n, t) =>\n  switch t {\n  | Array(items) =>\n    if n < List.length(items) {\n      Some(List.getOrThrow(items, n))\n    } else {\n      None\n    }\n  | _ => None\n  }\n\nlet string = t =>\n  switch t {\n  | String(s) => Some(s)\n  | _ => None\n  }\n\nlet number = t =>\n  switch t {\n  | Number(s) => Some(s)\n  | _ => None\n  }\n\nlet array = t =>\n  switch t {\n  | Array(s) => Some(s)\n  | _ => None\n  }\n\nlet obj = t =>\n  switch t {\n  | Object(s) => Some(s)\n  | _ => None\n  }\n\nlet bool = t =>\n  switch t {\n  | True => Some(true)\n  | False => Some(false)\n  | _ => None\n  }\n\nlet null = t =>\n  switch t {\n  | Null => Some()\n  | _ => None\n  }\n\nlet rec parsePath = (keyList, t) =>\n  switch keyList {\n  | list{} => Some(t)\n  | list{head, ...rest} =>\n    switch get(head, t) {\n    | None => None\n    | Some(value) => parsePath(rest, value)\n    }\n  }\n\n@ocaml.doc(\" Get a deeply nested value from an object `t`.\n * ```\n * open Json.Infix;\n * let json = Json.parse({|{\\\"a\\\": {\\\"b\\\": {\\\"c\\\": 2}}}|});\n * let num = Json.getPath(\\\"a.b.c\\\", json) |?> Json.number;\n * assert(num == Some(2.))\n * ```\n \")\nlet getPath = (path, t) => {\n  let keys = Parser.split_by(c => c == Some(\".\"), path)\n  parsePath(keys, t)\n}\n"
  },
  {
    "path": "analysis/examples/example-project/src/Json.res.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as Pervasives from \"@rescript/runtime/lib/es6/Pervasives.js\";\nimport * as Stdlib_List from \"@rescript/runtime/lib/es6/Stdlib_List.js\";\nimport * as Primitive_object from \"@rescript/runtime/lib/es6/Primitive_object.js\";\nimport * as Primitive_option from \"@rescript/runtime/lib/es6/Primitive_option.js\";\n\nfunction string_of_number(f) {\n  let s = f.toString();\n  if (Primitive_object.equal(s[s.length - 1 | 0], \".\")) {\n    return s.slice(0, s.length - 1 | 0);\n  } else {\n    return s;\n  }\n}\n\nfunction $pipe$bang(o, d) {\n  if (o !== undefined) {\n    return Primitive_option.valFromOption(o);\n  } else {\n    return Pervasives.failwith(d);\n  }\n}\n\nfunction $pipe$question(o, d) {\n  if (o !== undefined) {\n    return Primitive_option.valFromOption(o);\n  } else {\n    return d;\n  }\n}\n\nfunction $pipe$question$great(o, fn) {\n  if (o !== undefined) {\n    return fn(Primitive_option.valFromOption(o));\n  }\n}\n\nfunction $pipe$question$great$great(o, fn) {\n  if (o !== undefined) {\n    return Primitive_option.some(fn(Primitive_option.valFromOption(o)));\n  }\n}\n\nfunction fold(o, d, f) {\n  if (o !== undefined) {\n    return f(Primitive_option.valFromOption(o));\n  } else {\n    return d;\n  }\n}\n\nlet Infix = {\n  $pipe$bang: $pipe$bang,\n  $pipe$question: $pipe$question,\n  $pipe$question$great: $pipe$question$great,\n  $pipe$question$great$great: $pipe$question$great$great,\n  fold: fold\n};\n\nfunction escape(text) {\n  let ln = text.length;\n  let _i = 0;\n  let _acc = \"\";\n  while (true) {\n    let acc = _acc;\n    let i = _i;\n    if (i >= ln) {\n      return acc;\n    }\n    let c = text[i];\n    let next;\n    if (c !== undefined) {\n      switch (c) {\n        case \"\\\"\" :\n          next = acc + \"\\\\\\\"\";\n          break;\n        case \"\\\\\" :\n          next = acc + \"\\\\\\\\\";\n          break;\n        case \"\\b\" :\n          next = acc + \"\\\\b\";\n          break;\n        case \"\\n\" :\n          next = acc + \"\\\\n\";\n          break;\n        case \"\\r\" :\n          next = acc + \"\\\\r\";\n          break;\n        case \"\\t\" :\n          next = acc + \"\\\\t\";\n          break;\n        case \"\\x0c\" :\n          next = acc + \"\\\\f\";\n          break;\n        default:\n          next = acc + c;\n      }\n    } else {\n      next = acc;\n    }\n    _acc = next;\n    _i = i + 1 | 0;\n    continue;\n  };\n}\n\nfunction stringify(t) {\n  if (typeof t !== \"object\") {\n    switch (t) {\n      case \"True\" :\n        return \"true\";\n      case \"False\" :\n        return \"false\";\n      case \"Null\" :\n        return \"null\";\n    }\n  } else {\n    switch (t.TAG) {\n      case \"String\" :\n        return \"\\\"\" + (escape(t._0) + \"\\\"\");\n      case \"Number\" :\n        return string_of_number(t._0);\n      case \"Array\" :\n        let join = (items, sep) => {\n          if (items === 0) {\n            return \"\";\n          }\n          let rest = items.tl;\n          let x = items.hd;\n          if (rest !== 0) {\n            return x + sep + join(rest, sep);\n          } else {\n            return x;\n          }\n        };\n        let parts = Stdlib_List.map(t._0, stringify);\n        return \"[\" + join(parts, \", \") + \"]\";\n      case \"Object\" :\n        let join$1 = (items, sep) => {\n          if (items === 0) {\n            return \"\";\n          }\n          let rest = items.tl;\n          let x = items.hd;\n          if (rest !== 0) {\n            return x + sep + join$1(rest, sep);\n          } else {\n            return x;\n          }\n        };\n        let parts$1 = Stdlib_List.map(t._0, param => \"\\\"\" + (escape(param[0]) + (\"\\\": \" + stringify(param[1]))));\n        return \"{\" + join$1(parts$1, \", \") + \"}\";\n    }\n  }\n}\n\nfunction white(n) {\n  let _i = 0;\n  let _acc = \"\";\n  while (true) {\n    let acc = _acc;\n    let i = _i;\n    if (i >= n) {\n      return acc;\n    }\n    _acc = acc + \" \";\n    _i = i + 1 | 0;\n    continue;\n  };\n}\n\nfunction stringifyPretty(indentOpt, t) {\n  let indent = indentOpt !== undefined ? indentOpt : 0;\n  let join = (items, sep) => {\n    if (items === 0) {\n      return \"\";\n    }\n    let rest = items.tl;\n    let x = items.hd;\n    if (rest !== 0) {\n      return x + sep + join(rest, sep);\n    } else {\n      return x;\n    }\n  };\n  if (typeof t !== \"object\") {\n    switch (t) {\n      case \"True\" :\n        return \"true\";\n      case \"False\" :\n        return \"false\";\n      case \"Null\" :\n        return \"null\";\n    }\n  } else {\n    switch (t.TAG) {\n      case \"String\" :\n        return \"\\\"\" + (escape(t._0) + \"\\\"\");\n      case \"Number\" :\n        return string_of_number(t._0);\n      case \"Array\" :\n        let items = t._0;\n        if (items === 0) {\n          return \"[]\";\n        }\n        let parts = Stdlib_List.map(items, item => stringifyPretty(indent + 2 | 0, item));\n        return \"[\\n\" + white(indent + 2 | 0) + join(parts, \",\\n\" + white(indent + 2 | 0)) + \"\\n\" + white(indent) + \"]\";\n      case \"Object\" :\n        let items$1 = t._0;\n        if (items$1 === 0) {\n          return \"{}\";\n        }\n        let parts$1 = Stdlib_List.map(items$1, param => \"\\\"\" + (escape(param[0]) + (\"\\\": \" + stringifyPretty(indent + 2 | 0, param[1]))));\n        return \"{\\n\" + white(indent + 2 | 0) + join(parts$1, \",\\n\" + white(indent + 2 | 0)) + \"\\n\" + white(indent) + \"}\";\n    }\n  }\n}\n\nfunction unwrap(message, t) {\n  if (t !== undefined) {\n    return Primitive_option.valFromOption(t);\n  } else {\n    return Pervasives.failwith(message);\n  }\n}\n\nfunction split_by(keep_emptyOpt, is_delim, str) {\n  let keep_empty = keep_emptyOpt !== undefined ? keep_emptyOpt : false;\n  let len = str.length;\n  let _acc = /* [] */0;\n  let _last_pos = len;\n  let _pos = len - 1 | 0;\n  while (true) {\n    let pos = _pos;\n    let last_pos = _last_pos;\n    let acc = _acc;\n    if (pos === -1) {\n      if (last_pos === 0 && !keep_empty) {\n        return acc;\n      } else {\n        return {\n          hd: str.slice(0, last_pos),\n          tl: acc\n        };\n      }\n    }\n    if (is_delim(str[pos])) {\n      let new_len = (last_pos - pos | 0) - 1 | 0;\n      if (new_len !== 0 || keep_empty) {\n        let v = str.slice(pos + 1 | 0, (pos + 1 | 0) + new_len | 0);\n        _pos = pos - 1 | 0;\n        _last_pos = pos;\n        _acc = {\n          hd: v,\n          tl: acc\n        };\n        continue;\n      }\n      _pos = pos - 1 | 0;\n      _last_pos = pos;\n      continue;\n    }\n    _pos = pos - 1 | 0;\n    continue;\n  };\n}\n\nfunction fail(text, pos, message) {\n  let pre = text.slice(0, pos);\n  let lines = split_by(undefined, c => Primitive_object.equal(c, \"\\n\"), pre);\n  let count = Stdlib_List.length(lines);\n  let last = count > 0 ? Stdlib_List.getOrThrow(lines, count - 1 | 0) : \"\";\n  let col = last.length + 1 | 0;\n  let line = Stdlib_List.length(lines);\n  return Pervasives.failwith(\"Error \\\"\" + message + \"\\\" at \" + line.toString() + \":\" + col.toString() + \" -> \" + last + \"\\n\");\n}\n\nfunction skipToNewline(text, _pos) {\n  while (true) {\n    let pos = _pos;\n    if (pos >= text.length) {\n      return pos;\n    }\n    if (Primitive_object.equal(text[pos], \"\\n\")) {\n      return pos + 1 | 0;\n    }\n    _pos = pos + 1 | 0;\n    continue;\n  };\n}\n\nfunction stringTail(text) {\n  let len = text.length;\n  if (len > 1) {\n    return text.slice(1, len);\n  } else {\n    return \"\";\n  }\n}\n\nfunction skipToCloseMultilineComment(text, _pos) {\n  while (true) {\n    let pos = _pos;\n    if ((pos + 1 | 0) >= text.length) {\n      return Pervasives.failwith(\"Unterminated comment\");\n    }\n    if (Primitive_object.equal(text[pos], \"*\") && Primitive_object.equal(text[pos + 1 | 0], \"/\")) {\n      return pos + 2 | 0;\n    }\n    _pos = pos + 1 | 0;\n    continue;\n  };\n}\n\nfunction skipWhite(text, _pos) {\n  while (true) {\n    let pos = _pos;\n    if (!(pos < text.length && (Primitive_object.equal(text[pos], \" \") || Primitive_object.equal(text[pos], \"\\t\") || Primitive_object.equal(text[pos], \"\\n\") || Primitive_object.equal(text[pos], \"\\r\")))) {\n      return pos;\n    }\n    _pos = pos + 1 | 0;\n    continue;\n  };\n}\n\nfunction parseString(text, pos) {\n  let ln = text.length;\n  let loop = (_i, _acc) => {\n    while (true) {\n      let acc = _acc;\n      let i = _i;\n      if (i >= ln) {\n        return fail(text, i, \"Unterminated string\");\n      }\n      let c = text[i];\n      if (c === undefined) {\n        return [\n          i,\n          acc\n        ];\n      }\n      switch (c) {\n        case \"\\\"\" :\n          return [\n            i + 1 | 0,\n            acc\n          ];\n        case \"\\\\\" :\n          if ((i + 1 | 0) >= ln) {\n            return fail(text, i, \"Unterminated string\");\n          }\n          let match = text[i + 1 | 0];\n          if (match !== undefined) {\n            switch (match) {\n              case \"/\" :\n                _acc = acc + \"/\";\n                _i = i + 2 | 0;\n                continue;\n              case \"f\" :\n                _acc = acc + \"\\x0c\";\n                _i = i + 2 | 0;\n                continue;\n            }\n          }\n          let escaped = text.slice(i, i + 2 | 0);\n          _acc = acc + escaped;\n          _i = i + 2 | 0;\n          continue;\n          break;\n        default:\n          _acc = acc + c;\n          _i = i + 1 | 0;\n          continue;\n      }\n    };\n  };\n  let match = loop(pos, \"\");\n  return [\n    match[1],\n    match[0]\n  ];\n}\n\nfunction parseDigits(text, pos) {\n  let len = text.length;\n  let _i = pos + 1 | 0;\n  while (true) {\n    let i = _i;\n    if (i >= len) {\n      return i;\n    }\n    let match = text[i];\n    if (match === undefined) {\n      return i;\n    }\n    switch (match) {\n      case \"0\" :\n      case \"1\" :\n      case \"2\" :\n      case \"3\" :\n      case \"4\" :\n      case \"5\" :\n      case \"6\" :\n      case \"7\" :\n      case \"8\" :\n      case \"9\" :\n        _i = i + 1 | 0;\n        continue;\n      default:\n        return i;\n    }\n  };\n}\n\nfunction parseWithDecimal(text, pos) {\n  let pos$1 = parseDigits(text, pos);\n  if (pos$1 < text.length && Primitive_object.equal(text[pos$1], \".\")) {\n    return parseDigits(text, pos$1 + 1 | 0);\n  } else {\n    return pos$1;\n  }\n}\n\nfunction parseNumber(text, pos) {\n  let pos$1 = parseWithDecimal(text, pos);\n  let ln = text.length;\n  if (!(pos$1 < (ln - 1 | 0) && (Primitive_object.equal(text[pos$1], \"E\") || Primitive_object.equal(text[pos$1], \"e\")))) {\n    return pos$1;\n  }\n  let match = text[pos$1 + 1 | 0];\n  let pos$2;\n  if (match !== undefined) {\n    switch (match) {\n      case \"+\" :\n      case \"-\" :\n        pos$2 = pos$1 + 2 | 0;\n        break;\n      default:\n        pos$2 = pos$1 + 1 | 0;\n    }\n  } else {\n    pos$2 = pos$1 + 1 | 0;\n  }\n  return parseDigits(text, pos$2);\n}\n\nfunction parseNegativeNumber(text, pos) {\n  let final = Primitive_object.equal(text[pos], \"-\") ? parseNumber(text, pos + 1 | 0) : parseNumber(text, pos);\n  let numStr = text.slice(pos, final);\n  return [\n    {\n      TAG: \"Number\",\n      _0: parseFloat(numStr)\n    },\n    final\n  ];\n}\n\nfunction expect(char, text, pos, message) {\n  if (Primitive_object.notequal(text[pos], char)) {\n    return fail(text, pos, \"Expected: \" + message);\n  } else {\n    return pos + 1 | 0;\n  }\n}\n\nfunction parseComment(text, pos, next) {\n  if (Primitive_object.notequal(text[pos], \"/\")) {\n    if (Primitive_object.equal(text[pos], \"*\")) {\n      return next(text, skipToCloseMultilineComment(text, pos + 1 | 0));\n    } else {\n      return Pervasives.failwith(\"Invalid syntax\");\n    }\n  } else {\n    return next(text, skipToNewline(text, pos + 1 | 0));\n  }\n}\n\nfunction maybeSkipComment(text, pos) {\n  if (pos < text.length && Primitive_object.equal(text[pos], \"/\")) {\n    if ((pos + 1 | 0) < text.length && Primitive_object.equal(text[pos + 1 | 0], \"/\")) {\n      return skipToNewline(text, pos + 1 | 0);\n    } else if ((pos + 1 | 0) < text.length && Primitive_object.equal(text[pos + 1 | 0], \"*\")) {\n      return skipToCloseMultilineComment(text, pos + 1 | 0);\n    } else {\n      return fail(text, pos, \"Invalid synatx\");\n    }\n  } else {\n    return pos;\n  }\n}\n\nfunction skip(text, _pos) {\n  while (true) {\n    let pos = _pos;\n    if (pos === text.length) {\n      return pos;\n    }\n    let n = maybeSkipComment(text, skipWhite(text, pos));\n    if (n <= pos) {\n      return n;\n    }\n    _pos = n;\n    continue;\n  };\n}\n\nfunction parse(text, _pos) {\n  while (true) {\n    let pos = _pos;\n    if (pos >= text.length) {\n      return fail(text, pos, \"Reached end of file without being done parsing\");\n    }\n    let match = text[pos];\n    if (match === undefined) {\n      return fail(text, pos, \"unexpected character\");\n    }\n    switch (match) {\n      case \"/\" :\n        return parseComment(text, pos + 1 | 0, parse);\n      case \"-\" :\n      case \"0\" :\n      case \"1\" :\n      case \"2\" :\n      case \"3\" :\n      case \"4\" :\n      case \"5\" :\n      case \"6\" :\n      case \"7\" :\n      case \"8\" :\n      case \"9\" :\n        return parseNegativeNumber(text, pos);\n      case \"[\" :\n        return parseArray(text, pos + 1 | 0);\n      case \"\\\"\" :\n        let match$1 = parseString(text, pos + 1 | 0);\n        return [\n          {\n            TAG: \"String\",\n            _0: match$1[0]\n          },\n          match$1[1]\n        ];\n      case \" \" :\n      case \"\\n\" :\n      case \"\\r\" :\n      case \"\\t\" :\n        break;\n      case \"f\" :\n        if (text.slice(pos, pos + 5 | 0) === \"false\") {\n          return [\n            \"False\",\n            pos + 5 | 0\n          ];\n        } else {\n          return fail(text, pos, \"unexpected character\");\n        }\n      case \"n\" :\n        if (text.slice(pos, pos + 4 | 0) === \"null\") {\n          return [\n            \"Null\",\n            pos + 4 | 0\n          ];\n        } else {\n          return fail(text, pos, \"unexpected character\");\n        }\n      case \"t\" :\n        if (text.slice(pos, pos + 4 | 0) === \"true\") {\n          return [\n            \"True\",\n            pos + 4 | 0\n          ];\n        } else {\n          return fail(text, pos, \"unexpected character\");\n        }\n      case \"{\" :\n        return parseObject(text, pos + 1 | 0);\n      default:\n        return fail(text, pos, \"unexpected character\");\n    }\n    _pos = skipWhite(text, pos);\n    continue;\n  };\n}\n\nfunction parseArrayValue(text, pos) {\n  let pos$1 = skip(text, pos);\n  let match = parse(text, pos$1);\n  let value = match[0];\n  let pos$2 = skip(text, match[1]);\n  let match$1 = text[pos$2];\n  if (match$1 === undefined) {\n    return fail(text, pos$2, \"unexpected character\");\n  }\n  switch (match$1) {\n    case \",\" :\n      let pos$3 = skip(text, pos$2 + 1 | 0);\n      if (Primitive_object.equal(text[pos$3], \"]\")) {\n        return [\n          {\n            hd: value,\n            tl: /* [] */0\n          },\n          pos$3 + 1 | 0\n        ];\n      }\n      let match$2 = parseArrayValue(text, pos$3);\n      return [\n        {\n          hd: value,\n          tl: match$2[0]\n        },\n        match$2[1]\n      ];\n    case \"]\" :\n      return [\n        {\n          hd: value,\n          tl: /* [] */0\n        },\n        pos$2 + 1 | 0\n      ];\n    default:\n      return fail(text, pos$2, \"unexpected character\");\n  }\n}\n\nfunction parseArray(text, pos) {\n  let pos$1 = skip(text, pos);\n  let match = text[pos$1];\n  if (match === \"]\") {\n    return [\n      {\n        TAG: \"Array\",\n        _0: /* [] */0\n      },\n      pos$1 + 1 | 0\n    ];\n  }\n  let match$1 = parseArrayValue(text, pos$1);\n  return [\n    {\n      TAG: \"Array\",\n      _0: match$1[0]\n    },\n    match$1[1]\n  ];\n}\n\nfunction parseObjectValue(text, pos) {\n  let pos$1 = skip(text, pos);\n  if (Primitive_object.notequal(text[pos$1], \"\\\"\")) {\n    return fail(text, pos$1, \"Expected string\");\n  }\n  let match = parseString(text, pos$1 + 1 | 0);\n  let key = match[0];\n  let pos$2 = skip(text, match[1]);\n  let pos$3 = expect(\":\", text, pos$2, \"Colon\");\n  let match$1 = parse(text, pos$3);\n  let value = match$1[0];\n  let pos$4 = skip(text, match$1[1]);\n  let match$2 = text[pos$4];\n  if (match$2 !== undefined) {\n    switch (match$2) {\n      case \",\" :\n        let pos$5 = skip(text, pos$4 + 1 | 0);\n        if (Primitive_object.equal(text[pos$5], \"}\")) {\n          return [\n            {\n              hd: [\n                key,\n                value\n              ],\n              tl: /* [] */0\n            },\n            pos$5 + 1 | 0\n          ];\n        }\n        let match$3 = parseObjectValue(text, pos$5);\n        return [\n          {\n            hd: [\n              key,\n              value\n            ],\n            tl: match$3[0]\n          },\n          match$3[1]\n        ];\n      case \"}\" :\n        return [\n          {\n            hd: [\n              key,\n              value\n            ],\n            tl: /* [] */0\n          },\n          pos$4 + 1 | 0\n        ];\n    }\n  }\n  let match$4 = parseObjectValue(text, pos$4);\n  return [\n    {\n      hd: [\n        key,\n        value\n      ],\n      tl: match$4[0]\n    },\n    match$4[1]\n  ];\n}\n\nfunction parseObject(text, pos) {\n  let pos$1 = skip(text, pos);\n  if (Primitive_object.equal(text[pos$1], \"}\")) {\n    return [\n      {\n        TAG: \"Object\",\n        _0: /* [] */0\n      },\n      pos$1 + 1 | 0\n    ];\n  }\n  let match = parseObjectValue(text, pos$1);\n  return [\n    {\n      TAG: \"Object\",\n      _0: match[0]\n    },\n    match[1]\n  ];\n}\n\nlet Parser = {\n  split_by: split_by,\n  fail: fail,\n  skipToNewline: skipToNewline,\n  stringTail: stringTail,\n  skipToCloseMultilineComment: skipToCloseMultilineComment,\n  skipWhite: skipWhite,\n  parseString: parseString,\n  parseDigits: parseDigits,\n  parseWithDecimal: parseWithDecimal,\n  parseNumber: parseNumber,\n  parseNegativeNumber: parseNegativeNumber,\n  expect: expect,\n  parseComment: parseComment,\n  maybeSkipComment: maybeSkipComment,\n  skip: skip,\n  parse: parse,\n  parseArrayValue: parseArrayValue,\n  parseArray: parseArray,\n  parseObjectValue: parseObjectValue,\n  parseObject: parseObject\n};\n\nfunction parse$1(text) {\n  let match = parse(text, 0);\n  let pos = skip(text, match[1]);\n  if (pos < text.length) {\n    return Pervasives.failwith(\"Extra data after parse finished: \" + text.slice(pos, text.length));\n  } else {\n    return match[0];\n  }\n}\n\nfunction bind(v, fn) {\n  if (v !== undefined) {\n    return fn(Primitive_option.valFromOption(v));\n  }\n}\n\nfunction get(key, t) {\n  if (typeof t !== \"object\") {\n    return;\n  }\n  if (t.TAG !== \"Object\") {\n    return;\n  }\n  let _items = t._0;\n  while (true) {\n    let items = _items;\n    if (items === 0) {\n      return;\n    }\n    let match = items.hd;\n    if (match[0] === key) {\n      return Primitive_option.some(match[1]);\n    }\n    _items = items.tl;\n    continue;\n  };\n}\n\nfunction nth(n, t) {\n  if (typeof t !== \"object\") {\n    return;\n  }\n  if (t.TAG !== \"Array\") {\n    return;\n  }\n  let items = t._0;\n  if (n < Stdlib_List.length(items)) {\n    return Stdlib_List.getOrThrow(items, n);\n  }\n}\n\nfunction string(t) {\n  if (typeof t !== \"object\" || t.TAG !== \"String\") {\n    return;\n  } else {\n    return t._0;\n  }\n}\n\nfunction number(t) {\n  if (typeof t !== \"object\" || t.TAG !== \"Number\") {\n    return;\n  } else {\n    return t._0;\n  }\n}\n\nfunction array(t) {\n  if (typeof t !== \"object\" || t.TAG !== \"Array\") {\n    return;\n  } else {\n    return t._0;\n  }\n}\n\nfunction obj(t) {\n  if (typeof t !== \"object\" || t.TAG !== \"Object\") {\n    return;\n  } else {\n    return t._0;\n  }\n}\n\nfunction bool(t) {\n  if (typeof t === \"object\") {\n    return;\n  }\n  switch (t) {\n    case \"True\" :\n      return true;\n    case \"False\" :\n      return false;\n    default:\n      return;\n  }\n}\n\nfunction $$null(t) {\n  if (typeof t !== \"object\" && t === \"Null\") {\n    return Primitive_option.some(undefined);\n  }\n}\n\nfunction parsePath(_keyList, _t) {\n  while (true) {\n    let t = _t;\n    let keyList = _keyList;\n    if (keyList === 0) {\n      return t;\n    }\n    let value = get(keyList.hd, t);\n    if (value === undefined) {\n      return;\n    }\n    _t = value;\n    _keyList = keyList.tl;\n    continue;\n  };\n}\n\nfunction getPath(path, t) {\n  let keys = split_by(undefined, c => Primitive_object.equal(c, \".\"), path);\n  return parsePath(keys, t);\n}\n\nexport {\n  string_of_number,\n  Infix,\n  escape,\n  stringify,\n  white,\n  stringifyPretty,\n  unwrap,\n  Parser,\n  parse$1 as parse,\n  bind,\n  get,\n  nth,\n  string,\n  number,\n  array,\n  obj,\n  bool,\n  $$null,\n  parsePath,\n  getPath,\n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/examples/example-project/src/ModuleWithDocComment.res",
    "content": "@@ocaml.doc(\"This comment is for the **toplevel** module.\")\n\n@ocaml.doc(\"This comment is for the first **nested** module.\")\nmodule Nested = {\n  let x = \"123\"\n\n  @ocaml.doc(\"This comment is for the inner **nested-again** module.\")\n  module NestedAgain = {\n    let y = 123\n  }\n}\n\nmodule M = Nested.NestedAgain\n"
  },
  {
    "path": "analysis/examples/example-project/src/ModuleWithDocComment.res.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nlet NestedAgain = {\n  y: 123\n};\n\nlet Nested = {\n  x: \"123\",\n  NestedAgain: NestedAgain\n};\n\nlet M;\n\nexport {\n  Nested,\n  M,\n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/examples/example-project/src/More.res",
    "content": "@@ocaml.doc(\" Toplevel docs \")\n\n@ocaml.doc(\" Some contents \")\nlet contnets = \"here\"\n\nlet inner = 20\n\nlet n = 10\n\nlet party = 30\n\nlet awesome = 200\n"
  },
  {
    "path": "analysis/examples/example-project/src/More.res.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nlet contnets = \"here\";\n\nlet inner = 20;\n\nlet n = 10;\n\nlet party = 30;\n\nexport {\n  contnets,\n  inner,\n  n,\n  party,\n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/examples/example-project/src/More.resi",
    "content": "let contnets: string\nlet inner: int\nlet n: int\nlet party: int\n"
  },
  {
    "path": "analysis/examples/example-project/src/Other.res",
    "content": "/* let later = 10; */\n\n/* Ok testing things */\n\nlet something = 10\n\ntype person = {name: string, age: int}\n\ntype animals = Things(int) | People(string) | Mouse\n\nlet inner = 10\n/* More.outer; */\n\nlet m = Things(1)\n\n/* working on things. */\n\nlet z = {name: \"hi\", age: 20}\n\nlet later = 20\n\nlet concat = (~first, ~second) => first + second\n\ntype other = {person: person, height: float}\nlet oo = {person: z, height: 34.2}\n\nlet show = o => {\n  let m = o.height\n}\n"
  },
  {
    "path": "analysis/examples/example-project/src/Other.res.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nlet z = {\n  name: \"hi\",\n  age: 20\n};\n\nfunction concat(first, second) {\n  return first + second | 0;\n}\n\nlet oo = {\n  person: z,\n  height: 34.2\n};\n\nfunction show(o) {\n  \n}\n\nlet something = 10;\n\nlet inner = 10;\n\nlet m = {\n  TAG: \"Things\",\n  _0: 1\n};\n\nlet later = 20;\n\nexport {\n  something,\n  inner,\n  m,\n  z,\n  later,\n  concat,\n  oo,\n  show,\n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/examples/example-project/src/TransformHelpers.res",
    "content": "let deserialize_Belt__HashMapInt__t = (transformer, t) => assert(false)\n\nlet deserialize_Belt_HashMapInt____t = (a, b) => assert(false)\n\nlet deserialize_Belt__HashMap__Int__t = (a, b) => assert(false)\n\nlet serialize_Belt_HashMapInt____t = (a, b) => assert(false)\n\nlet serialize_Belt__HashMap__Int__t = (a, b) => assert(false)\n\nlet serialize_Belt_HashMapInt____t = (transformer, t) => assert(false)\n"
  },
  {
    "path": "analysis/examples/example-project/src/TransformHelpers.res.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nfunction deserialize_Belt__HashMapInt__t(transformer, t) {\n  throw {\n    RE_EXN_ID: \"Assert_failure\",\n    _1: [\n      \"TransformHelpers.res\",\n      1,\n      58\n    ],\n    Error: new Error()\n  };\n}\n\nfunction deserialize_Belt_HashMapInt____t(a, b) {\n  throw {\n    RE_EXN_ID: \"Assert_failure\",\n    _1: [\n      \"TransformHelpers.res\",\n      3,\n      49\n    ],\n    Error: new Error()\n  };\n}\n\nfunction deserialize_Belt__HashMap__Int__t(a, b) {\n  throw {\n    RE_EXN_ID: \"Assert_failure\",\n    _1: [\n      \"TransformHelpers.res\",\n      5,\n      50\n    ],\n    Error: new Error()\n  };\n}\n\nfunction serialize_Belt__HashMap__Int__t(a, b) {\n  throw {\n    RE_EXN_ID: \"Assert_failure\",\n    _1: [\n      \"TransformHelpers.res\",\n      9,\n      48\n    ],\n    Error: new Error()\n  };\n}\n\nfunction serialize_Belt_HashMapInt____t(transformer, t) {\n  throw {\n    RE_EXN_ID: \"Assert_failure\",\n    _1: [\n      \"TransformHelpers.res\",\n      11,\n      57\n    ],\n    Error: new Error()\n  };\n}\n\nexport {\n  deserialize_Belt__HashMapInt__t,\n  deserialize_Belt_HashMapInt____t,\n  deserialize_Belt__HashMap__Int__t,\n  serialize_Belt__HashMap__Int__t,\n  serialize_Belt_HashMapInt____t,\n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/examples/example-project/src/ZZ.res",
    "content": "let a = 12\n\nlet b = [1, 2, 3, a]\n\nlet c = <div />\n\nlet s = React.string\n\nmodule M = {\n  @react.component\n  let make = (~x) => React.string(x)\n}\n\nlet d = <M x=\"abc\" />\n\nmodule J = {\n  @react.component\n  let make = (~children: React.element) => React.null\n}\n\nlet z =\n  <J>\n    {React.string(\"\")}\n    {React.string(\"\")}\n  </J>\n\ntype inline =\n  | A({x: int, y: string})\n  | B({x: int, y: string})\n  | C({\n      x: int,\n      y: string,\n      z: string,\n      w: string,\n      x0: string,\n      q1: string,\n      q2: string,\n      q3: string,\n      q4: string,\n    })\n  | D({x: int, y: string})\n  | E({x: int, y: string})\n  | F\n\nmodule MSig: {\n  type rec t = A(list<s>)\n  and s = list<t>\n\n  let x: int\n} = {\n  type rec t = A(list<s>)\n  and s = list<t>\n\n  let x = 14\n}\n\nmodule Impl = {\n  type rec t = A(list<s>)\n  and s = list<t>\n\n  type w = int\n\n  let x = 14\n}\n\nmodule Impl2 = {\n  include Impl\n}\n\nmodule D = MSig\nmodule E = Impl\nmodule F = Impl2\n\n@ocaml.doc(\"str docstring\")\ntype str = string\n\n@ocaml.doc(\"gr docstring\")\ntype gr = {x: int, s: str}\n\nlet testRecordFields = (gr: gr) => {\n  let str = gr.s\n  str\n}\n\n@ocaml.doc(\"vr docstring\")\ntype vr = V1 | V2\n\nlet v1 = V1\n\nmodule DoubleNested = ModuleWithDocComment.Nested.NestedAgain\n\nlet uncurried = x => x + 1\n\nmodule Inner = {\n  type tInner = int\n  let vInner = 34\n}\n\ntype typeInner = Inner.tInner\n\nlet valueInner = Inner.vInner\n\n@ocaml.doc(\"Doc comment for functionWithTypeAnnotation\")\nlet functionWithTypeAnnotation: unit => int = () => 1\n\nmodule HoverInsideModuleWithComponent = {\n  let x = 2 // check that hover on x works\n\n  @react.component\n  let make = () => React.null\n}\n\nmodule Lib = {\n  let foo = (~age, ~name) => name ++ Int.toString(age)\n  let next = (~number=0, ~year) => number + year\n}\n\n@ocaml.doc(\"This module is commented\") @deprecated(\"This module is deprecated\")\nmodule Dep: {\n  @ocaml.doc(\"Some doc comment\") @deprecated(\"Use customDouble instead\")\n  let customDouble: int => int\n\n  let customDouble2: int => int\n} = {\n  let customDouble = foo => foo * 2\n  let customDouble2 = foo => foo * 2\n}\n\nlet customDouble = foo => foo * 2\nlet cc = customDouble(11)\n\nmodule O = {\n  module Comp = {\n    @react.component\n    let make = (~first=\"\", ~kas=11, ~foo=3, ~second, ~v) =>\n      React.string(first ++ second ++ Int.toString(foo))\n  }\n}\n\nlet comp = <O.Comp key=\"12\" second=\"abcc\" v=12 />\n\nlet lll = List.make(~length=3, 4)\n\nlet abc = \"abc\"\n\nlet arr = [1, 2, 3]\n\nlet some7 = Some(7)\n"
  },
  {
    "path": "analysis/examples/example-project/src/ZZ.res.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as Stdlib_List from \"@rescript/runtime/lib/es6/Stdlib_List.js\";\nimport * as JsxRuntime from \"react/jsx-runtime\";\n\nlet b = [\n  1,\n  2,\n  3,\n  12\n];\n\nlet c = JsxRuntime.jsx(\"div\", {});\n\nfunction s(prim) {\n  return prim;\n}\n\nfunction ZZ$M(props) {\n  return props.x;\n}\n\nlet M = {\n  make: ZZ$M\n};\n\nlet d = JsxRuntime.jsx(ZZ$M, {\n  x: \"abc\"\n});\n\nfunction ZZ$J(props) {\n  return null;\n}\n\nlet J = {\n  make: ZZ$J\n};\n\nlet z = JsxRuntime.jsxs(ZZ$J, {\n  children: [\n    \"\",\n    \"\"\n  ]\n});\n\nlet MSig = {\n  x: 14\n};\n\nlet Impl = {\n  x: 14\n};\n\nlet Impl2 = {\n  x: 14\n};\n\nfunction testRecordFields(gr) {\n  return gr.s;\n}\n\nfunction uncurried(x) {\n  return x + 1 | 0;\n}\n\nlet Inner = {\n  vInner: 34\n};\n\nfunction functionWithTypeAnnotation() {\n  return 1;\n}\n\nfunction ZZ$HoverInsideModuleWithComponent(props) {\n  return null;\n}\n\nlet HoverInsideModuleWithComponent = {\n  x: 2,\n  make: ZZ$HoverInsideModuleWithComponent\n};\n\nfunction foo(age, name) {\n  return name + age.toString();\n}\n\nfunction next(numberOpt, year) {\n  let number = numberOpt !== undefined ? numberOpt : 0;\n  return number + year | 0;\n}\n\nlet Lib = {\n  foo: foo,\n  next: next\n};\n\nfunction customDouble(foo) {\n  return (foo << 1);\n}\n\nfunction customDouble2(foo) {\n  return (foo << 1);\n}\n\nlet Dep = {\n  customDouble: customDouble,\n  customDouble2: customDouble2\n};\n\nfunction customDouble$1(foo) {\n  return (foo << 1);\n}\n\nlet cc = 22;\n\nfunction ZZ$O$Comp(props) {\n  let __foo = props.foo;\n  let __first = props.first;\n  let first = __first !== undefined ? __first : \"\";\n  let foo = __foo !== undefined ? __foo : 3;\n  return first + props.second + foo.toString();\n}\n\nlet Comp = {\n  make: ZZ$O$Comp\n};\n\nlet O = {\n  Comp: Comp\n};\n\nlet comp = JsxRuntime.jsx(ZZ$O$Comp, {\n  second: \"abcc\",\n  v: 12\n}, \"12\");\n\nlet lll = Stdlib_List.make(3, 4);\n\nlet arr = [\n  1,\n  2,\n  3\n];\n\nlet a = 12;\n\nlet D;\n\nlet E;\n\nlet F;\n\nlet v1 = \"V1\";\n\nlet DoubleNested;\n\nlet valueInner = 34;\n\nlet abc = \"abc\";\n\nlet some7 = 7;\n\nexport {\n  a,\n  b,\n  c,\n  s,\n  M,\n  d,\n  J,\n  z,\n  MSig,\n  Impl,\n  Impl2,\n  D,\n  E,\n  F,\n  testRecordFields,\n  v1,\n  DoubleNested,\n  uncurried,\n  Inner,\n  valueInner,\n  functionWithTypeAnnotation,\n  HoverInsideModuleWithComponent,\n  Lib,\n  Dep,\n  customDouble$1 as customDouble,\n  cc,\n  O,\n  comp,\n  lll,\n  abc,\n  arr,\n  some7,\n}\n/* c Not a pure module */\n"
  },
  {
    "path": "analysis/examples/example-project/src/syntax/sample-highlighting.res",
    "content": "// Bindings\nlet numberBinding = 123\n\nlet someFunction = (param: int): int => {\n  let innerBinding = param + 2\n  innerBinding\n}\n\n// Types\ntype someRecord<'typeParameter> = {\n  someField: int,\n  someOtherField: string,\n  theParam: typeParameter,\n  another: bool,\n  to: string,\n}\n\ntype someEnum =\n  | SomeMember\n  | AnotherMember\n  | SomeMemberWithPayload(someRecord<int>)\n\ntype somePolyEnum = [\n  | #someMember\n  | #AnotherMember\n  | #SomeMemberWithPayload(someRecord<int>)\n  | #\"fourth Member\"\n]\n\n// Destructuring\nlet destructuring = () => {\n  let someVar = (1, 2, 3)\n  let (one, two, three) = someVar\n  let someObj: someRecord<int> = {\n    someField: 1,\n    someOtherField: \"hello\",\n    theParam: 2,\n    another: true,\n    to: \"123\",\n  }\n  let {someField, someOtherField, theParam} = someObj\n\n  someField\n}\n\nmodule SomeModule = {\n  type t = Some | Value | Here\n}\n\n// Strings\nlet interpolated = `${numberBinding} ${\"123\"}`\n\n// JSX\nmodule SomeComponent = {\n  @react.component\n  let make = (\n    ~someProp: int,\n    ~otherProp: string,\n    ~thirdProp: SomeModule.t,\n    ~fourth: somePolyEnum=#\"fourth member\",\n  ) => {\n    React.null\n  }\n\n  module Nested = {\n    @react.component\n    let make = (~children) => {\n      <> {children} </>\n    }\n  }\n}\n\nlet jsx =\n  <div>\n    <SomeComponent someProp=123 otherProp=\"hello\" thirdProp=Value fourth=#AnotherMember />\n    <SomeComponent.Nested> {React.string(\"Nested\")} </SomeComponent.Nested>\n  </div>\n"
  },
  {
    "path": "analysis/examples/example-project/src/syntax/sample-highlighting.rs",
    "content": "// Bindings\nfn some_function(param: usize) -> usize {\n    let innerBinding = param + 2;\n    innerBinding\n}\n\n// Types\nstruct someRecord<typeParameter> {\n    someField: usize,\n    someOtherField: String,\n    theParam: typeParameter,\n}\n\nenum someEnum {\n    SomeMember,\n    AnotherMember,\n    SomeMemberWithPayload(someRecord<usize>),\n}\n\n// Destructuring\nfn destructuring() -> usize {\n    let someVar = (1, 2, 3);\n    let (one, two, three) = someVar;\n    let someObj = someRecord::<usize> {\n        someField: 1,\n        someOtherField: String::new(\"HEllo\"),\n        theParam: 2,\n    };\n\n    someObj.someField\n}\n"
  },
  {
    "path": "analysis/examples/example-project/src/syntax/sample-highlighting.tsx",
    "content": "// Bindings\nlet numberBinding = 123;\n\nconst SomeComp = {\n  Nested: () => null,\n};\n\nlet someFunction = (param: number): number => {\n  let innerBinding = param + 2;\n  return innerBinding;\n};\n\n// Types\ntype someRecord<typeParameter> = {\n  someField: number;\n  someOtherField: string;\n  theParam: typeParameter;\n  another: boolean;\n  to: string;\n};\n\nenum someEnum {\n  SomeMember,\n  AnotherMember,\n}\n\n// Destructuring\nlet destructuring = () => {\n  let someVar = [1, 2, 3];\n  let [one, two, three] = someVar;\n  let someObj: someRecord<number> = {\n    someField: 1,\n    someOtherField: \"hello\",\n    theParam: 2,\n    another: true,\n    to: \"123\",\n  };\n  let { someField, someOtherField, theParam } = someObj;\n\n  return someField;\n};\n\nnamespace SomeModule {\n  export enum t {\n    Some,\n    Value,\n    Here,\n  }\n}\n\n// Decorators and classes\nfunction someDecorator() {\n  return function (\n    target: any,\n    propertyKey: string,\n    descriptor: PropertyDescriptor\n  ) {\n    console.log(\"first(): called\");\n  };\n}\n\nclass SomeClass {\n  @someDecorator() doStuff() {\n    return 123;\n  }\n}\n\n// Strings\nlet interpolated = `${numberBinding} ${\"123\"}`;\n\n// JSX\ninterface Props {\n  someProp: number;\n  otherProp: string;\n  thirdProp: SomeModule.t;\n}\nconst SomeComponent = ({ someProp, otherProp, thirdProp }: Props) => {\n  return null;\n};\n\nlet jsx = (\n  <div>\n    <SomeComponent\n      someProp={123}\n      otherProp=\"hello\"\n      thirdProp={SomeModule.t.Value}\n    />\n    <SomeComp.Nested />\n    {\"Hello\"}\n  </div>\n);\nfunction Property() {\n  throw new Error(\"Function not implemented.\");\n}\n"
  },
  {
    "path": "analysis/examples/larger-project/.gitignore",
    "content": "/node_modules\n/lib"
  },
  {
    "path": "analysis/examples/larger-project/.merlin",
    "content": "####{BSB GENERATED: NO EDIT\nFLG -ppx '/home/pedro/Desktop/Projects/rescript-vscode/analysis/examples/larger-project/node_modules/rescript/linux/bsc.exe -as-ppx -bs-jsx 3'\nS /home/pedro/Desktop/Projects/rescript-vscode/analysis/examples/larger-project/node_modules/rescript/lib/ocaml\nB /home/pedro/Desktop/Projects/rescript-vscode/analysis/examples/larger-project/node_modules/rescript/lib/ocaml\nFLG -w +a-4-9-20-40-41-42-50-61-102\nS /home/pedro/Desktop/Projects/rescript-vscode/analysis/examples/larger-project/node_modules/@rescript/react/lib/ocaml\nB /home/pedro/Desktop/Projects/rescript-vscode/analysis/examples/larger-project/node_modules/@rescript/react/lib/ocaml\nS /home/pedro/Desktop/Projects/rescript-vscode/analysis/examples/larger-project/node_modules/@glennsl/bs-json/lib/ocaml\nB /home/pedro/Desktop/Projects/rescript-vscode/analysis/examples/larger-project/node_modules/@glennsl/bs-json/lib/ocaml\nS src\nB lib/bs/src\nS src/exception\nB lib/bs/src/exception\n####BSB GENERATED: NO EDIT}\n"
  },
  {
    "path": "analysis/examples/larger-project/.watchmanconfig",
    "content": ""
  },
  {
    "path": "analysis/examples/larger-project/bsconfig.json",
    "content": "{\n  \"reanalyze\": {\n    \"analysis\": [\"dce\"],\n    \"suppress\": [],\n    \"unsuppress\": []\n  },\n  \"name\": \"sample-typescript-app\",\n  \"bsc-flags\": [\"-bs-super-errors -w a\"],\n  \"reason\": { \"react-jsx\": 3 },\n  \"bs-dependencies\": [\"@rescript/react\", \"@glennsl/bs-json\"],\n  \"sources\": [\n    {\n      \"dir\": \"src\",\n      \"subdirs\": true\n    }\n  ],\n  \"package-specs\": {\n    \"module\": \"es6\",\n    \"in-source\": true\n  }\n}\n"
  },
  {
    "path": "analysis/examples/larger-project/package.json",
    "content": "{\n  \"name\": \"large-project\",\n  \"version\": \"0.1.0\",\n  \"private\": true,\n  \"scripts\": {\n    \"start\": \"rescript build -w\",\n    \"build\": \"rescript build\",\n    \"clean\": \"rescript clean -with-deps\"\n  },\n  \"devDependencies\": {\n    \"react\": \"^16.13.1\",\n    \"react-dom\": \"^16.8.6\",\n    \"rescript\": \"^9.1.4\"\n  },\n  \"dependencies\": {\n    \"@glennsl/bs-json\": \"^5.0.4\",\n    \"@rescript/react\": \"^0.10.3\"\n  }\n}\n"
  },
  {
    "path": "analysis/examples/larger-project/src/AutoAnnotate.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n/* This output is empty. Its source's type definitions, externals and/or unused code got optimized away. */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/AutoAnnotate.res",
    "content": "type variant = R(int)\n\n@genType\ntype record = {variant: variant}\n\ntype r2 = {r2: int}\n\ntype r3 = {r3: int}\n\ntype r4 = {r4: int}\n\n@genType\ntype annotatedVariant =\n  | R2(r2, r3)\n  | R4(r4)\n"
  },
  {
    "path": "analysis/examples/larger-project/src/BootloaderResource.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n/* This output is empty. Its source's type definitions, externals and/or unused code got optimized away. */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/BootloaderResource.res",
    "content": "/* NOTE: This is a spooky interface that provides no type safety. It should be\n * improved. Use with caution. */\n@module(\"BootloaderResource\")\nexternal read: JSResource.t<'a> => 'a = \"read\"\n"
  },
  {
    "path": "analysis/examples/larger-project/src/BucklescriptAnnotations.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nfunction bar(x) {\n  var f = x.twoArgs;\n  return f(3, \"a\");\n}\n\nexport {\n  bar ,\n  \n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/BucklescriptAnnotations.res",
    "content": "@genType\ntype someMutableFields = {\n  @set\n  \"mutable0\": string,\n  \"immutable\": int,\n  @set\n  \"mutable1\": string,\n  @set\n  \"mutable2\": string,\n}\n\n@genType\ntype someMethods = {\n  @meth\n  \"send\": string => unit,\n  @meth\n  \"on\": (string, (. int) => unit) => unit,\n  @meth\n  \"threeargs\": (int, string, int) => string,\n  \"twoArgs\": (. int, string) => int,\n}\n\n// let foo = (x: someMethods) => x[\"threeargs\"](3, \"a\", 4)\n\nlet bar = (x: someMethods) => {\n  let f = x[\"twoArgs\"]\n  f(. 3, \"a\")\n}\n"
  },
  {
    "path": "analysis/examples/larger-project/src/ComponentAsProp.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as React from \"react\";\nimport * as Caml_option from \"rescript/lib/es6/caml_option.js\";\n\nfunction ComponentAsProp(Props) {\n  var title = Props.title;\n  var description = Props.description;\n  var button = Props.button;\n  return React.createElement(\"div\", undefined, React.createElement(\"div\", undefined, title, description, button !== undefined ? Caml_option.valFromOption(button) : null));\n}\n\nvar make = ComponentAsProp;\n\nexport {\n  make ,\n  \n}\n/* react Not a pure module */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/ComponentAsProp.res",
    "content": "@ocaml.doc(\n  \" This is like declaring a normal ReasonReact component's `make` function, except the body is a the interop hook wrapJsForReason \"\n)\n@genType\n@react.component\nlet make = (~title, ~description, ~button=?) => {\n  <div>\n    <div>\n      title\n      description\n      {switch button {\n      | Some(button) => button\n      | None => React.null\n      }}\n    </div>\n  </div>\n}\n"
  },
  {
    "path": "analysis/examples/larger-project/src/CreateErrorHandler1.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as Curry from \"rescript/lib/es6/curry.js\";\nimport * as ErrorHandler from \"./ErrorHandler.js\";\n\nfunction notification(s) {\n  return [\n          s,\n          s\n        ];\n}\n\nvar Error1 = {\n  notification: notification\n};\n\nvar MyErrorHandler = ErrorHandler.Make(Error1);\n\nCurry._1(MyErrorHandler.notify, \"abc\");\n\nexport {\n  Error1 ,\n  MyErrorHandler ,\n  \n}\n/* MyErrorHandler Not a pure module */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/CreateErrorHandler1.res",
    "content": "module Error1 = {\n  type t = string\n  let notification = s => (s, s)\n}\n\nmodule MyErrorHandler = ErrorHandler.Make(Error1)\n\nMyErrorHandler.notify(\"abc\")\n"
  },
  {
    "path": "analysis/examples/larger-project/src/CreateErrorHandler2.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as ErrorHandler from \"./ErrorHandler.js\";\n\nfunction notification(n) {\n  return [\n          String(n),\n          \"\"\n        ];\n}\n\nvar Error2 = {\n  notification: notification\n};\n\nvar MyErrorHandler = ErrorHandler.Make(Error2);\n\nexport {\n  Error2 ,\n  MyErrorHandler ,\n  \n}\n/* MyErrorHandler Not a pure module */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/CreateErrorHandler2.res",
    "content": "module Error2 = {\n  type t = int\n  let notification = n => (string_of_int(n), \"\")\n}\n\nmodule MyErrorHandler = ErrorHandler.Make(Error2) /* MyErrorHandler.notify(42) */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/DeadCodeImplementation.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nvar M = {\n  x: 42\n};\n\nexport {\n  M ,\n  \n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/DeadCodeImplementation.res",
    "content": "module M: DeadCodeInterface.T = {\n  let x = 42\n}\n"
  },
  {
    "path": "analysis/examples/larger-project/src/DeadCodeInterface.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n/* This output is empty. Its source's type definitions, externals and/or unused code got optimized away. */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/DeadCodeInterface.res",
    "content": "module type T = {\n  let x: int\n}\n"
  },
  {
    "path": "analysis/examples/larger-project/src/DeadExn.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as Caml_exceptions from \"rescript/lib/es6/caml_exceptions.js\";\n\nvar Etoplevel = /* @__PURE__ */Caml_exceptions.create(\"DeadExn.Etoplevel\");\n\nvar Einside = /* @__PURE__ */Caml_exceptions.create(\"DeadExn.Inside.Einside\");\n\nvar eInside = {\n  RE_EXN_ID: Einside\n};\n\nconsole.log(eInside);\n\nexport {\n  Etoplevel ,\n  \n}\n/*  Not a pure module */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/DeadExn.res",
    "content": "exception Etoplevel\n\nmodule Inside = {\n  exception Einside\n}\n\nexception DeadE\nlet eToplevel = Etoplevel\n\nlet eInside = Inside.Einside\n\nJs.log(eInside)\n"
  },
  {
    "path": "analysis/examples/larger-project/src/DeadExn.resi",
    "content": "// empty\nexception Etoplevel\n"
  },
  {
    "path": "analysis/examples/larger-project/src/DeadRT.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nconsole.log(/* Kaboom */0);\n\nexport {\n  \n}\n/*  Not a pure module */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/DeadRT.res",
    "content": "type moduleAccessPath =\n  | Root(string)\n  | Kaboom\n\nlet rec emitModuleAccessPath = moduleAccessPath =>\n  switch moduleAccessPath {\n  | Root(s) => s\n  | Kaboom => \"\"\n  }\n\nlet () = Js.log(Kaboom)\n"
  },
  {
    "path": "analysis/examples/larger-project/src/DeadRT.resi",
    "content": "type moduleAccessPath =\n  | Root(string)\n  | Kaboom\n"
  },
  {
    "path": "analysis/examples/larger-project/src/DeadTest.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n/* This output is empty. Its source's type definitions, externals and/or unused code got optimized away. */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/DeadTest.res",
    "content": ""
  },
  {
    "path": "analysis/examples/larger-project/src/DeadTestBlacklist.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nvar x = 34;\n\nexport {\n  x ,\n  \n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/DeadTestBlacklist.res",
    "content": "let x = 34\n"
  },
  {
    "path": "analysis/examples/larger-project/src/DeadTestWithInterface.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n/* This output is empty. Its source's type definitions, externals and/or unused code got optimized away. */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/DeadTestWithInterface.res",
    "content": "module Ext_buffer: {\n  let x: int\n} = {\n  let x = 42\n}\n"
  },
  {
    "path": "analysis/examples/larger-project/src/DeadTestWithInterface.resi",
    "content": "\n"
  },
  {
    "path": "analysis/examples/larger-project/src/DeadTypeTest.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nvar a = /* A */0;\n\nexport {\n  a ,\n  \n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/DeadTypeTest.res",
    "content": "type t =\n  | A\n  | B\nlet a = A\n\ntype deadType =\n  | OnlyInImplementation\n  | OnlyInInterface\n  | InBoth\n  | InNeither\n\nlet _ = OnlyInImplementation\nlet _ = InBoth\n\n@live\ntype record = {x: int, y: string, z: float}\n"
  },
  {
    "path": "analysis/examples/larger-project/src/DeadTypeTest.resi",
    "content": "type t =\n  | A\n  | B\nlet a: t\n\ntype deadType =\n  | OnlyInImplementation\n  | OnlyInInterface\n  | InBoth\n  | InNeither\n"
  },
  {
    "path": "analysis/examples/larger-project/src/DeadValueTest.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nvar valueAlive = 1;\n\nvar valueDead = 2;\n\nexport {\n  valueAlive ,\n  valueDead ,\n  \n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/DeadValueTest.res",
    "content": "let valueAlive = 1\nlet valueDead = 2\n\nlet valueOnlyInImplementation = 3\n\n@raises(Failure)\nlet rec subList = (b, e, l) =>\n  switch l {\n  | list{} => failwith(\"subList\")\n  | list{h, ...t} =>\n    let tail = if e == 0 {\n      list{}\n    } else {\n      subList(b - 1, e - 1, t)\n    }\n    if b > 0 {\n      tail\n    } else {\n      list{h, ...tail}\n    }\n  }\n"
  },
  {
    "path": "analysis/examples/larger-project/src/DeadValueTest.resi",
    "content": "let valueAlive: int\nlet valueDead: int\n"
  },
  {
    "path": "analysis/examples/larger-project/src/Docstrings.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nfunction signMessage(message, key) {\n  return message + String(key);\n}\n\nfunction one(a) {\n  return a + 0 | 0;\n}\n\nfunction two(a, b) {\n  return (a + b | 0) + 0 | 0;\n}\n\nfunction tree(a, b, c) {\n  return ((a + b | 0) + c | 0) + 0 | 0;\n}\n\nfunction oneU(a) {\n  return a + 0 | 0;\n}\n\nfunction twoU(a, b) {\n  return (a + b | 0) + 0 | 0;\n}\n\nfunction treeU(a, b, c) {\n  return ((a + b | 0) + c | 0) + 0 | 0;\n}\n\nfunction useParam(param) {\n  return param + 34 | 0;\n}\n\nfunction useParamU(param) {\n  return param + 34 | 0;\n}\n\nfunction unnamed1(param) {\n  return 34;\n}\n\nfunction unnamed1U(param) {\n  return 34;\n}\n\nfunction unnamed2(param, param$1) {\n  return 34;\n}\n\nfunction unnamed2U(param, param$1) {\n  return 34;\n}\n\nfunction grouped(x, y, a, b, c, z) {\n  return ((((x + y | 0) + a | 0) + b | 0) + c | 0) + z | 0;\n}\n\nfunction unitArgWithoutConversion(param) {\n  return \"abc\";\n}\n\nfunction unitArgWithoutConversionU() {\n  return \"abc\";\n}\n\nfunction unitArgWithConversion(param) {\n  return /* A */0;\n}\n\nfunction unitArgWithConversionU() {\n  return /* A */0;\n}\n\nvar flat = 34;\n\nexport {\n  flat ,\n  signMessage ,\n  one ,\n  two ,\n  tree ,\n  oneU ,\n  twoU ,\n  treeU ,\n  useParam ,\n  useParamU ,\n  unnamed1 ,\n  unnamed1U ,\n  unnamed2 ,\n  unnamed2U ,\n  grouped ,\n  unitArgWithoutConversion ,\n  unitArgWithoutConversionU ,\n  unitArgWithConversion ,\n  unitArgWithConversionU ,\n  \n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/Docstrings.res",
    "content": "@ocaml.doc(\" hello \") @genType\nlet flat = 34\n\n@ocaml.doc(\"\n  * Sign a message with a key.\n  *\n  * @param message - A message to be signed\n  * @param key - The key with which to sign the message\n  * @returns A signed message\n \")\n@genType\nlet signMessage = (. message, key) => message ++ string_of_int(key)\n\n@genType\nlet one = a => a + 0\n\n@genType\nlet two = (a, b) => a + b + 0\n\n@genType\nlet tree = (a, b, c) => a + b + c + 0\n\n@genType\nlet oneU = (. a) => a + 0\n\n@genType\nlet twoU = (. a, b) => a + b + 0\n\n@genType\nlet treeU = (. a, b, c) => a + b + c + 0\n\n@genType\nlet useParam = param => param + 34\n\n@genType\nlet useParamU = (. param) => param + 34\n\n@genType\nlet unnamed1 = (_: int) => 34\n\n@genType\nlet unnamed1U = (. _: int) => 34\n\n@genType\nlet unnamed2 = (_: int, _: int) => 34\n\n@genType\nlet unnamed2U = (. _: int, _: int) => 34\n\n@genType\nlet grouped = (~x, ~y, a, b, c, ~z) => x + y + a + b + c + z\n\n@genType\nlet unitArgWithoutConversion = () => \"abc\"\n\n@genType\nlet unitArgWithoutConversionU = (. ()) => \"abc\"\n\ntype t =\n  | A\n  | B\n\n@genType\nlet unitArgWithConversion = () => A\n\n@genType\nlet unitArgWithConversionU = (. ()) => A\n"
  },
  {
    "path": "analysis/examples/larger-project/src/DynamicallyLoadedComponent.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nfunction DynamicallyLoadedComponent(Props) {\n  return Props.s;\n}\n\nvar make = DynamicallyLoadedComponent;\n\nexport {\n  make ,\n  \n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/DynamicallyLoadedComponent.res",
    "content": "@react.component\nlet make = (~s) => React.string(s)\n"
  },
  {
    "path": "analysis/examples/larger-project/src/EmptyArray.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as React from \"react\";\n\nfunction EmptyArray$Z(Props) {\n  return React.createElement(\"br\", undefined);\n}\n\nvar Z = {\n  make: EmptyArray$Z\n};\n\nReact.createElement(EmptyArray$Z, {});\n\nexport {\n  Z ,\n  \n}\n/*  Not a pure module */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/EmptyArray.res",
    "content": "// @@config({flags : [\"-dsource\"]});\n\nmodule Z = {\n  @react.component\n  let make = () => {\n    <br />\n  }\n}\n\nlet _ = <Z />\n"
  },
  {
    "path": "analysis/examples/larger-project/src/ErrorHandler.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as Curry from \"rescript/lib/es6/curry.js\";\n\nfunction Make($$Error) {\n  var notify = function (x) {\n    return Curry._1($$Error.notification, x);\n  };\n  return {\n          notify: notify\n        };\n}\n\nvar x = 42;\n\nexport {\n  Make ,\n  x ,\n  \n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/ErrorHandler.res",
    "content": "module type Error = {\n  type t\n  let notification: t => (string, string)\n}\n\nmodule Make = (Error: Error) => {\n  let notify = x => Error.notification(x)\n}\n\n// This is ignored as there's an interface file\n@genType\nlet x = 42\n"
  },
  {
    "path": "analysis/examples/larger-project/src/ErrorHandler.resi",
    "content": "module type Error = {\n  type t\n  let notification: t => (string, string)\n}\nmodule Make: (Error: Error) =>\n{\n  let notify: Error.t => (string, string)\n}\n\nlet x: int\n"
  },
  {
    "path": "analysis/examples/larger-project/src/EverythingLiveHere.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nvar x = 1;\n\nvar y = 3;\n\nvar z = 4;\n\nexport {\n  x ,\n  y ,\n  z ,\n  \n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/EverythingLiveHere.res",
    "content": "let x = 1\n\nlet y = 3\n\nlet z = 4\n"
  },
  {
    "path": "analysis/examples/larger-project/src/FC.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nfunction foo(impl) {\n  return impl.make;\n}\n\nconsole.log(foo);\n\nexport {\n  foo ,\n  \n}\n/*  Not a pure module */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/FC.res",
    "content": "module type ReplacebleComponent = {\n  @react.component\n  let make: unit => React.element\n}\n\nlet foo = (~impl: module(ReplacebleComponent)) => {\n  let module(X) = impl\n  X.make\n}\n\nJs.log(foo)\n"
  },
  {
    "path": "analysis/examples/larger-project/src/FirstClassModules.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nvar y = \"abc\";\n\nvar EmptyInnerModule = {};\n\nvar InnerModule2 = {\n  k: 4242\n};\n\nfunction k3(x) {\n  return x + 1 | 0;\n}\n\nvar InnerModule3 = {\n  k3: k3\n};\n\nvar Z = {\n  u: [\n    0,\n    0\n  ]\n};\n\nvar M = {\n  y: y,\n  EmptyInnerModule: EmptyInnerModule,\n  InnerModule2: InnerModule2,\n  InnerModule3: InnerModule3,\n  Z: Z,\n  x: 42\n};\n\nvar firstClassModule = {\n  x: 42,\n  EmptyInnerModule: EmptyInnerModule,\n  InnerModule2: InnerModule2,\n  InnerModule3: InnerModule3,\n  Z: Z,\n  y: y\n};\n\nfunction testConvert(m) {\n  return m;\n}\n\nfunction SomeFunctor(X) {\n  return {\n          ww: X.y\n        };\n}\n\nfunction someFunctorAsFunction(x) {\n  return {\n          ww: x.y\n        };\n}\n\nexport {\n  M ,\n  firstClassModule ,\n  testConvert ,\n  SomeFunctor ,\n  someFunctorAsFunction ,\n  \n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/FirstClassModules.res",
    "content": "module type MT = {\n  let x: int\n  type t = int\n  @module(\"foo\") external f: int => int = \"f\"\n  module type MT2 = {\n    type tt = string\n  }\n  module EmptyInnerModule: {}\n  module InnerModule2: {\n    let k: t\n  }\n  module InnerModule3: {\n    type inner = int\n    let k3: inner => inner\n  }\n  module type TT = {\n    let u: (int, int)\n  }\n  module Z: TT\n  let y: string\n}\nmodule M = {\n  let y = \"abc\"\n  module type MT2 = {\n    type tt = string\n  }\n  module EmptyInnerModule = {}\n  module InnerModule2 = {\n    let k = 4242\n  }\n  module InnerModule3 = {\n    type inner = int\n    let k3 = x => x + 1\n  }\n\n  module type TT = {\n    let u: (int, int)\n  }\n  module Z = {\n    let u = (0, 0)\n  }\n  type t = int\n  @module(\"foo\") external f: int => int = \"f\"\n  let x = 42\n}\n\n@genType\ntype firstClassModule = module(MT)\n\n@genType\nlet firstClassModule: firstClassModule = module(M)\n\n@genType\nlet testConvert = (m: module(MT)) => m\n\nmodule type ResT = {\n  let ww: string\n}\n\nmodule SomeFunctor = (X: MT): ResT => {\n  let ww = X.y\n}\n\n@genType\nlet someFunctorAsFunction = (x: module(MT)): module(ResT) => module(SomeFunctor(unpack(x)))\n"
  },
  {
    "path": "analysis/examples/larger-project/src/FirstClassModulesInterface.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nvar r = {\n  x: 3,\n  y: \"hello\"\n};\n\nexport {\n  r ,\n  \n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/FirstClassModulesInterface.res",
    "content": "type record = {\n  x: int,\n  y: string,\n}\n\nlet r = {x: 3, y: \"hello\"}\n\nmodule type MT = {\n  let x: int\n}\n\ntype firstClassModule = module(MT)\n"
  },
  {
    "path": "analysis/examples/larger-project/src/FirstClassModulesInterface.resi",
    "content": "@genType\ntype record = {\n  x: int,\n  y: string,\n}\n\nlet r: record\n\n@genType\nmodule type MT = {\n  let x: int\n}\n\n@genType\ntype firstClassModule = module(MT)\n"
  },
  {
    "path": "analysis/examples/larger-project/src/Hooks.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as Curry from \"rescript/lib/es6/curry.js\";\nimport * as React from \"react\";\nimport * as ImportHooks from \"./ImportHooks.js\";\nimport * as ImportHookDefault from \"./ImportHookDefault.js\";\n\nfunction Hooks(Props) {\n  var vehicle = Props.vehicle;\n  var match = React.useState(function () {\n        return 0;\n      });\n  var setCount = match[1];\n  var count = match[0];\n  return React.createElement(\"div\", undefined, React.createElement(\"p\", undefined, \"Hooks example \" + (vehicle.name + (\" clicked \" + (String(count) + \" times\")))), React.createElement(\"button\", {\n                  onClick: (function (param) {\n                      return Curry._1(setCount, (function (param) {\n                                    return count + 1 | 0;\n                                  }));\n                    })\n                }, \"Click me\"), React.createElement(ImportHooks.make, {\n                  person: {\n                    name: \"Mary\",\n                    age: 71\n                  },\n                  children: null,\n                  renderMe: (function (x) {\n                      return x.randomString;\n                    })\n                }, \"child1\", \"child2\"), React.createElement(ImportHookDefault.make, {\n                  person: {\n                    name: \"DefaultImport\",\n                    age: 42\n                  },\n                  children: null,\n                  renderMe: (function (x) {\n                      return x.randomString;\n                    })\n                }, \"child1\", \"child2\"));\n}\n\nfunction Hooks$anotherComponent(Props) {\n  var vehicle = Props.vehicle;\n  var callback = Props.callback;\n  Curry._1(callback, undefined);\n  return React.createElement(\"div\", undefined, \"Another Hook \" + vehicle.name);\n}\n\nfunction Hooks$Inner(Props) {\n  var vehicle = Props.vehicle;\n  return React.createElement(\"div\", undefined, \"Another Hook \" + vehicle.name);\n}\n\nfunction Hooks$Inner$anotherComponent(Props) {\n  var vehicle = Props.vehicle;\n  return React.createElement(\"div\", undefined, \"Another Hook \" + vehicle.name);\n}\n\nfunction Hooks$Inner$Inner2(Props) {\n  var vehicle = Props.vehicle;\n  return React.createElement(\"div\", undefined, \"Another Hook \" + vehicle.name);\n}\n\nfunction Hooks$Inner$Inner2$anotherComponent(Props) {\n  var vehicle = Props.vehicle;\n  return React.createElement(\"div\", undefined, \"Another Hook \" + vehicle.name);\n}\n\nvar Inner2 = {\n  make: Hooks$Inner$Inner2,\n  anotherComponent: Hooks$Inner$Inner2$anotherComponent\n};\n\nvar Inner = {\n  make: Hooks$Inner,\n  anotherComponent: Hooks$Inner$anotherComponent,\n  Inner2: Inner2\n};\n\nfunction Hooks$NoProps(Props) {\n  return React.createElement(\"div\", undefined, null);\n}\n\nvar NoProps = {\n  make: Hooks$NoProps\n};\n\nfunction functionWithRenamedArgs(_to, _Type, cb) {\n  Curry._1(cb, _to);\n  return _to.name + _Type.name;\n}\n\nfunction Hooks$componentWithRenamedArgs(Props) {\n  var _to = Props.to;\n  var _Type = Props.Type;\n  var cb = Props.cb;\n  Curry._1(cb, _to);\n  return _to.name + _Type.name;\n}\n\nfunction Hooks$makeWithRef(Props) {\n  var vehicle = Props.vehicle;\n  return function (ref) {\n    if (ref == null) {\n      return null;\n    } else {\n      return React.createElement(\"button\", {\n                  ref: ref\n                }, vehicle.name);\n    }\n  };\n}\n\nvar testForwardRef = React.forwardRef(function (param, param$1) {\n      return Hooks$makeWithRef(param)(param$1);\n    });\n\nvar input = React.forwardRef(function (Props, param) {\n      var partial_arg = Props.r;\n      return React.createElement(\"div\", {\n                  ref: param\n                }, partial_arg.x);\n    });\n\nfunction Hooks$polymorphicComponent(Props) {\n  var param = Props.p;\n  return param[0].name;\n}\n\nfunction Hooks$functionReturningReactElement(Props) {\n  return Props.name;\n}\n\nfunction Hooks$RenderPropRequiresConversion(Props) {\n  var renderVehicle = Props.renderVehicle;\n  return Curry._1(renderVehicle, {\n              vehicle: {\n                name: \"Car\"\n              },\n              number: 42\n            });\n}\n\nvar RenderPropRequiresConversion = {\n  make: Hooks$RenderPropRequiresConversion\n};\n\nfunction Hooks$aComponentWithChildren(Props) {\n  var vehicle = Props.vehicle;\n  var children = Props.children;\n  return React.createElement(\"div\", undefined, \"Another Hook \" + vehicle.name, React.createElement(\"div\", undefined, children));\n}\n\nvar make = Hooks;\n\nvar $$default = Hooks;\n\nvar anotherComponent = Hooks$anotherComponent;\n\nvar componentWithRenamedArgs = Hooks$componentWithRenamedArgs;\n\nvar makeWithRef = Hooks$makeWithRef;\n\nvar polymorphicComponent = Hooks$polymorphicComponent;\n\nvar functionReturningReactElement = Hooks$functionReturningReactElement;\n\nvar aComponentWithChildren = Hooks$aComponentWithChildren;\n\nexport {\n  make ,\n  $$default ,\n  $$default as default,\n  anotherComponent ,\n  Inner ,\n  NoProps ,\n  functionWithRenamedArgs ,\n  componentWithRenamedArgs ,\n  makeWithRef ,\n  testForwardRef ,\n  input ,\n  polymorphicComponent ,\n  functionReturningReactElement ,\n  RenderPropRequiresConversion ,\n  aComponentWithChildren ,\n  \n}\n/* testForwardRef Not a pure module */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/Hooks.res",
    "content": "type vehicle = {name: string}\n\n@react.component\nlet make = (~vehicle) => {\n  let (count, setCount) = React.useState(() => 0)\n\n  <div>\n    <p>\n      {React.string(\n        \"Hooks example \" ++ (vehicle.name ++ (\" clicked \" ++ (string_of_int(count) ++ \" times\"))),\n      )}\n    </p>\n    <button onClick={_ => setCount(_ => count + 1)}> {React.string(\"Click me\")} </button>\n    <ImportHooks person={name: \"Mary\", age: 71} renderMe={x => React.string(x[\"randomString\"])}>\n      {React.string(\"child1\")} {React.string(\"child2\")}\n    </ImportHooks>\n    <ImportHookDefault\n      person={name: \"DefaultImport\", age: 42} renderMe={x => React.string(x[\"randomString\"])}>\n      {React.string(\"child1\")} {React.string(\"child2\")}\n    </ImportHookDefault>\n  </div>\n}\n\n@genType\nlet default = make\n\n@genType @react.component\nlet anotherComponent = (~vehicle, ~callback: unit => unit) => {\n  callback()\n  <div> {React.string(\"Another Hook \" ++ vehicle.name)} </div>\n}\n\nmodule Inner = {\n  @genType @react.component\n  let make = (~vehicle) => <div> {React.string(\"Another Hook \" ++ vehicle.name)} </div>\n\n  @genType @react.component\n  let anotherComponent = (~vehicle) => <div> {React.string(\"Another Hook \" ++ vehicle.name)} </div>\n\n  module Inner2 = {\n    @genType @react.component\n    let make = (~vehicle) => <div> {React.string(\"Another Hook \" ++ vehicle.name)} </div>\n\n    @genType @react.component\n    let anotherComponent = (~vehicle) =>\n      <div> {React.string(\"Another Hook \" ++ vehicle.name)} </div>\n  }\n}\n\nmodule NoProps = {\n  @genType @react.component\n  let make = () => <div> React.null </div>\n}\n\ntype cb = (~_to: vehicle) => unit\n\n@genType\nlet functionWithRenamedArgs = (~_to, ~_Type, ~cb: cb) => {\n  cb(~_to)\n  _to.name ++ _Type.name\n}\n\n@genType @react.component\nlet componentWithRenamedArgs = (~_to, ~_Type, ~cb: cb) => {\n  cb(~_to)\n  React.string(_to.name ++ _Type.name)\n}\n\n@genType @react.component\nlet makeWithRef = (~vehicle) => {\n  let _ = 34\n  ref =>\n    switch ref->Js.Nullable.toOption {\n    | Some(ref) => <button ref={ReactDOM.Ref.domRef(ref)}> {React.string(vehicle.name)} </button>\n    | None => React.null\n    }\n}\n\n@genType\nlet testForwardRef = React.forwardRef(makeWithRef)\n\ntype r = {x: string}\n\n@genType @react.component\nlet input = React.forwardRef((~r, (), ref) => <div ref={Obj.magic(ref)}> {React.string(r.x)} </div>)\n\n@genType\ntype callback<'input, 'output> = React.callback<'input, 'output>\n\n@genType\ntype testReactContext = React.Context.t<int>\n\n@genType\ntype testReactRef = React.Ref.t<int>\n\n@genType\ntype testDomRef = ReactDOM.domRef\n\n@genType @react.component\nlet polymorphicComponent = (~p as (x, _)) => React.string(x.name)\n\n@genType @react.component\nlet functionReturningReactElement = (~name) => React.string(name)\n\nmodule RenderPropRequiresConversion = {\n  @genType @react.component\n  let make = (~renderVehicle: {\"vehicle\": vehicle, \"number\": int} => React.element) => {\n    let car = {name: \"Car\"}\n    renderVehicle({\"vehicle\": car, \"number\": 42})\n  }\n}\n\n@genType @react.component\nlet aComponentWithChildren = (~vehicle, ~children) =>\n  <div> {React.string(\"Another Hook \" ++ vehicle.name)} <div> children </div> </div>\n"
  },
  {
    "path": "analysis/examples/larger-project/src/IgnoreInterface.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n/* This output is empty. Its source's type definitions, externals and/or unused code got optimized away. */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/IgnoreInterface.res",
    "content": "@gentype\ntype t = int\n"
  },
  {
    "path": "analysis/examples/larger-project/src/IgnoreInterface.resi",
    "content": "// Use the annotations, and definitions, from the .re file\n@@genType.ignoreInterface\n\n@genType\ntype t\n"
  },
  {
    "path": "analysis/examples/larger-project/src/ImmutableArray.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n/* This output is empty. Its source's type definitions, externals and/or unused code got optimized away. */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/ImmutableArray.res",
    "content": ""
  },
  {
    "path": "analysis/examples/larger-project/src/ImmutableArray.resi",
    "content": ""
  },
  {
    "path": "analysis/examples/larger-project/src/ImportHookDefault.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport ImportHookDefaultGen from \"./ImportHookDefault.gen\";\nimport * as ImportHookDefaultGen$1 from \"./ImportHookDefault.gen\";\n\nvar make = ImportHookDefaultGen$1.make;\n\nvar make2 = ImportHookDefaultGen;\n\nexport {\n  make ,\n  make2 ,\n  \n}\n/* make Not a pure module */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/ImportHookDefault.res",
    "content": "type person = {\n  name: string,\n  age: int,\n}\n\n@genType.import((\"./hookExample\", \"default\")) @react.component\nexternal make: (\n  ~person: person,\n  ~children: React.element,\n  ~renderMe: ImportHooks.renderMe<string>,\n) => React.element = \"make\"\n\n@genType.import(\"./hookExample\") @react.component\nexternal make2: (\n  ~person: person,\n  ~children: React.element,\n  ~renderMe: ImportHooks.renderMe<string>,\n) => React.element = \"default\"\n"
  },
  {
    "path": "analysis/examples/larger-project/src/ImportHooks.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as ImportHooksGen from \"./ImportHooks.gen\";\n\nvar make = ImportHooksGen.makeRenamed;\n\nfunction foo(prim) {\n  return ImportHooksGen.foo(prim);\n}\n\nexport {\n  make ,\n  foo ,\n  \n}\n/* make Not a pure module */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/ImportHooks.res",
    "content": "@genType\ntype person = {\n  name: string,\n  age: int,\n}\n\n@genType\ntype renderMe<'a> = React.component<{\n  \"randomString\": string,\n  \"poly\": 'a,\n}>\n\n@genType.import(\"./hookExample\") @react.component\nexternal make: (\n  ~person: person,\n  ~children: React.element,\n  ~renderMe: renderMe<'a>,\n) => React.element = \"makeRenamed\"\n\n@genType.import(\"./hookExample\")\nexternal foo: (~person: person) => string = \"foo\"\n"
  },
  {
    "path": "analysis/examples/larger-project/src/ImportIndex.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport ImportIndexGen from \"./ImportIndex.gen\";\n\nvar make = ImportIndexGen;\n\nexport {\n  make ,\n  \n}\n/* make Not a pure module */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/ImportIndex.res",
    "content": "// TODO: rename metodd back once remmt bug is fixed\n@genType.import(\"./\") @react.component\nexternal make: (~method: @string [#push | #replace]=?) => React.element = \"default\"\n"
  },
  {
    "path": "analysis/examples/larger-project/src/ImportJsValue.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport ImportJsValueGen from \"./ImportJsValue.gen\";\nimport * as ImportJsValueGen$1 from \"./ImportJsValue.gen\";\n\nfunction round(prim) {\n  return ImportJsValueGen$1.round(prim);\n}\n\nfunction area(prim) {\n  return ImportJsValueGen$1.area(prim);\n}\n\nfunction returnMixedArray(prim) {\n  return ImportJsValueGen$1.returnMixedArray();\n}\n\nvar roundedNumber = ImportJsValueGen$1.round(1.8);\n\nvar areaValue = ImportJsValueGen$1.area({\n      x: 3,\n      y: undefined\n    });\n\nfunction getAbs(x) {\n  return x.getAbs();\n}\n\nvar AbsoluteValue = {\n  getAbs: getAbs\n};\n\nfunction useGetProp(x) {\n  return x.getProp() + 1 | 0;\n}\n\nfunction useGetAbs(x) {\n  return x.getAbs() + 1 | 0;\n}\n\nfunction useColor(prim) {\n  return ImportJsValueGen$1.useColor(prim);\n}\n\nfunction higherOrder(prim) {\n  return ImportJsValueGen$1.higherOrder(prim);\n}\n\nvar returnedFromHigherOrder = ImportJsValueGen$1.higherOrder(function (prim0, prim1) {\n      return prim0 + prim1 | 0;\n    });\n\nfunction convertVariant(prim) {\n  return ImportJsValueGen$1.convertVariant(prim);\n}\n\nfunction polymorphic(prim) {\n  return ImportJsValueGen$1.polymorphic(prim);\n}\n\nvar $$default = ImportJsValueGen;\n\nexport {\n  round ,\n  area ,\n  returnMixedArray ,\n  roundedNumber ,\n  areaValue ,\n  AbsoluteValue ,\n  useGetProp ,\n  useGetAbs ,\n  useColor ,\n  higherOrder ,\n  returnedFromHigherOrder ,\n  convertVariant ,\n  polymorphic ,\n  $$default ,\n  $$default as default,\n  \n}\n/* roundedNumber Not a pure module */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/ImportJsValue.res",
    "content": "@ocaml.doc(\"\n  * Wrap JS values to be used from Reason\n  \")\n@genType.import(\"./MyMath\")\nexternal /* This is the module to import from. */\n/* Name and type of the JS value to bind to. */\nround: float => float = \"round\"\n\n@genType\ntype point = {\n  x: int,\n  y: option<int>,\n}\n\n@genType.import(\"./MyMath\")\nexternal /* This is the module to import from. */\n/* Name and type of the JS value to bind to. */\narea: point => int = \"area\"\n\n@genType.import(\"./MyMath\")\ntype numberOrString\n\n@genType.import(\"./MyMath\")\nexternal returnMixedArray: unit => array<numberOrString> = \"returnMixedArray\"\n\n@genType\nlet roundedNumber = round(1.8)\n\n@genType\nlet areaValue = area({x: 3, y: None})\n\nmodule AbsoluteValue = {\n  @genType.import((\"./MyMath\", \"AbsoluteValue\"))\n  type t = {\"getAbs\": (. unit) => int}\n\n  /* This is untyped */\n  @send external getProp: t => int = \"getProp\"\n\n  /* This is also untyped, as we \"trust\" the type declaration in absoluteVaue */\n  let getAbs = (x: t) => {\n    let getAbs = x[\"getAbs\"]\n    getAbs(.)\n  }\n}\n\n@genType\nlet useGetProp = (x: AbsoluteValue.t) => x->AbsoluteValue.getProp + 1\n\n@genType\nlet useGetAbs = (x: AbsoluteValue.t) => x->AbsoluteValue.getAbs + 1\n\n@genType.import(\"./MyMath\")\ntype stringFunction\n\n@genType\ntype color = [#tomato | #gray]\n\n@genType.import(\"./MyMath\") external useColor: color => int = \"useColor\"\n\n@genType.import(\"./MyMath\")\nexternal higherOrder: ((int, int) => int) => int = \"higherOrder\"\n\n@genType\nlet returnedFromHigherOrder = higherOrder(\\\"+\")\n\ntype variant =\n  | I(int)\n  | S(string)\n\n@genType.import(\"./MyMath\")\nexternal convertVariant: variant => variant = \"convertVariant\"\n\n@genType.import(\"./MyMath\") external polymorphic: 'a => 'a = \"polymorphic\"\n\n@genType.import(\"./MyMath\") external default: int = \"default\"\n\n@genType.import((\"./MyMath\", \"num\"))\ntype num\n\n@genType.import((\"./MyMath\", \"num\"))\ntype myNum\n\n@genType.import(\"./MyMath\")\ntype polyType<'a>\n"
  },
  {
    "path": "analysis/examples/larger-project/src/ImportMyBanner.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as Caml_option from \"rescript/lib/es6/caml_option.js\";\nimport * as ImportMyBannerGen from \"./ImportMyBanner.gen\";\n\nfunction make(prim0, prim1, prim2) {\n  return ImportMyBannerGen.make(prim0, prim1 !== undefined ? Caml_option.valFromOption(prim1) : undefined, prim2);\n}\n\nexport {\n  make ,\n  \n}\n/* ./ImportMyBanner.gen Not a pure module */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/ImportMyBanner.res",
    "content": "@ocaml.doc(\"\n  * Wrap component MyBanner to be used from Reason.\n  \")\n@genType\ntype message = {text: string}\n\n@genType.import(\"./MyBanner\")\nexternal /* Module with the JS component to be wrapped. */\n/* The make function will be automatically generated from the types below. */\nmake: (~show: bool, ~message: option<message>=?, 'a) => React.element = \"make\"\n\nlet make = make\n"
  },
  {
    "path": "analysis/examples/larger-project/src/JSResource.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n/* This output is empty. Its source's type definitions, externals and/or unused code got optimized away. */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/JSResource.res",
    "content": "type t<'a>\n\n@module external jSResource: string => t<'a> = \"JSResource\"\n"
  },
  {
    "path": "analysis/examples/larger-project/src/LetPrivate.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nvar y = 34;\n\nexport {\n  y ,\n  \n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/LetPrivate.res",
    "content": "%%private(\n  @genType\n  let x = 34\n)\n\n@genType\nlet y = x\n"
  },
  {
    "path": "analysis/examples/larger-project/src/ModuleAliases.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nvar Inner = {};\n\nvar Outer = {\n  Inner: Inner\n};\n\nvar InnerNested = {};\n\nvar Inner2 = {\n  InnerNested: InnerNested,\n  OuterInnerAlias2: undefined\n};\n\nvar Outer2 = {\n  OuterInnerAlias: undefined,\n  Inner2: Inner2\n};\n\nfunction testNested(x) {\n  return x;\n}\n\nfunction testInner(x) {\n  return x;\n}\n\nfunction testInner2(x) {\n  return x;\n}\n\nvar Outer2Alias;\n\nvar InnerNestedAlias;\n\nexport {\n  Outer ,\n  Outer2 ,\n  Outer2Alias ,\n  InnerNestedAlias ,\n  testNested ,\n  testInner ,\n  testInner2 ,\n  \n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/ModuleAliases.res",
    "content": "module Outer = {\n  module Inner = {\n    type innerT = {inner: string}\n  }\n}\n\nmodule Outer2 = {\n  module OuterInnerAlias = Outer.Inner\n  module Inner2 = {\n    module InnerNested = {\n      type t = {nested: int}\n    }\n    module OuterInnerAlias2 = OuterInnerAlias\n  }\n}\n\nmodule Outer2Alias = Outer2\n\nmodule InnerNestedAlias = Outer2.Inner2.InnerNested\n\n@genType\nlet testNested = (x: InnerNestedAlias.t) => x\n\n@genType\nlet testInner = (x: Outer2Alias.OuterInnerAlias.innerT) => x\n\n@genType\nlet testInner2 = (x: Outer2Alias.Inner2.OuterInnerAlias2.innerT) => x\n"
  },
  {
    "path": "analysis/examples/larger-project/src/ModuleAliases2.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nvar Inner = {};\n\nvar Outer = {\n  Inner: Inner\n};\n\nvar OuterAlias;\n\nvar InnerAlias;\n\nvar q = 42;\n\nexport {\n  Outer ,\n  OuterAlias ,\n  InnerAlias ,\n  q ,\n  \n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/ModuleAliases2.res",
    "content": "@genType\ntype record = {\n  x: int,\n  y: string,\n}\n\nmodule Outer = {\n  @genType\n  type outer = {outer: string}\n\n  module Inner = {\n    @genType\n    type inner = {inner: string}\n  }\n}\n\nmodule OuterAlias = Outer\n\nmodule InnerAlias = OuterAlias.Inner\n\nlet q = 42\n"
  },
  {
    "path": "analysis/examples/larger-project/src/ModuleExceptionBug.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as Caml_exceptions from \"rescript/lib/es6/caml_exceptions.js\";\n\nfunction customDouble(foo) {\n  return (foo << 1);\n}\n\nvar Dep = {\n  customDouble: customDouble\n};\n\nvar MyOtherException = /* @__PURE__ */Caml_exceptions.create(\"ModuleExceptionBug.MyOtherException\");\n\nconsole.log(34);\n\nvar ddjdj = 34;\n\nexport {\n  Dep ,\n  MyOtherException ,\n  ddjdj ,\n  \n}\n/*  Not a pure module */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/ModuleExceptionBug.res",
    "content": "module Dep = {\n  let customDouble = foo => foo * 2\n}\n\nexception MyOtherException\n\nlet ddjdj = 34\nJs.log(ddjdj)\n"
  },
  {
    "path": "analysis/examples/larger-project/src/NestedModules.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nfunction nested3Function(x) {\n  return x;\n}\n\nvar Nested3 = {\n  x: 0,\n  y: 1,\n  z: 2,\n  w: 3,\n  nested3Value: \"nested3Value\",\n  nested3Function: nested3Function\n};\n\nfunction nested2Function(x) {\n  return x;\n}\n\nvar Nested2 = {\n  x: 0,\n  nested2Value: 1,\n  y: 2,\n  Nested3: Nested3,\n  nested2Function: nested2Function\n};\n\nvar Universe = {\n  theAnswer: 42,\n  notExported: 33,\n  Nested2: Nested2,\n  someString: \"some exported string\"\n};\n\nvar notNested = 1;\n\nexport {\n  notNested ,\n  Universe ,\n  \n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/NestedModules.res",
    "content": "@genType\nlet notNested = 1\n\nmodule Universe = {\n  @genType\n  let theAnswer = 42\n\n  let notExported = 33\n\n  @genType\n  type nestedType = array<string>\n\n  module Nested2 = {\n    let x = 0\n\n    @genType\n    let nested2Value = 1\n\n    let y = 2\n\n    @genType\n    type nested2Type = array<array<string>>\n\n    module Nested3 = {\n      let x = 0\n      let y = 1\n      let z = 2\n      let w = 3\n\n      @genType\n      type nested3Type = array<array<array<string>>>\n\n      @genType\n      let nested3Value = \"nested3Value\"\n\n      @genType\n      let nested3Function = (x: nested2Type) => x\n    }\n\n    @genType\n    let nested2Function = (x: Nested3.nested3Type) => x\n  }\n\n  @genType\n  type variant =\n    | A\n    | B(string)\n\n  @genType\n  let someString = \"some exported string\"\n}\n"
  },
  {
    "path": "analysis/examples/larger-project/src/NestedModulesInSignature.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nvar Universe = {\n  theAnswer: 42\n};\n\nexport {\n  Universe ,\n  \n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/NestedModulesInSignature.res",
    "content": "module Universe = {\n  let theAnswer = 42\n}\n"
  },
  {
    "path": "analysis/examples/larger-project/src/NestedModulesInSignature.resi",
    "content": "module Universe: {\n  @genType\n  let theAnswer: int\n}\n"
  },
  {
    "path": "analysis/examples/larger-project/src/Newsyntax.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nvar x = 34;\n\nvar y = 11;\n\nexport {\n  x ,\n  y ,\n  \n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/Newsyntax.res",
    "content": "let x = 34\n\nlet y = 11\n\ntype record = {\n  xxx: int,\n  yyy: int,\n}\n\ntype variant = A | B(int) | C\n\ntype record2 = {xx: int, yy: int}\n"
  },
  {
    "path": "analysis/examples/larger-project/src/Newton.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as Curry from \"rescript/lib/es6/curry.js\";\n\nfunction $neg(prim0, prim1) {\n  return prim0 - prim1;\n}\n\nfunction $plus(prim0, prim1) {\n  return prim0 + prim1;\n}\n\nfunction $star(prim0, prim1) {\n  return prim0 * prim1;\n}\n\nfunction $slash(prim0, prim1) {\n  return prim0 / prim1;\n}\n\nfunction newton(f, fPrimed, initial, threshold) {\n  var current = {\n    contents: initial\n  };\n  var iterateMore = function (previous, next) {\n    var delta = next >= previous ? next - previous : previous - next;\n    current.contents = next;\n    return delta >= threshold;\n  };\n  var _param;\n  while(true) {\n    var previous = current.contents;\n    var next = previous - Curry._1(f, previous) / Curry._1(fPrimed, previous);\n    if (!iterateMore(previous, next)) {\n      return current.contents;\n    }\n    _param = undefined;\n    continue ;\n  };\n}\n\nfunction f(x) {\n  return x * x * x - 2.0 * x * x - 11.0 * x + 12.0;\n}\n\nfunction fPrimed(x) {\n  return 3.0 * x * x - 4.0 * x - 11.0;\n}\n\nvar result = newton(f, fPrimed, 5.0, 0.0003);\n\nconsole.log(result, f(result));\n\nexport {\n  $neg ,\n  $plus ,\n  $star ,\n  $slash ,\n  newton ,\n  f ,\n  fPrimed ,\n  result ,\n  \n}\n/* result Not a pure module */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/Newton.res",
    "content": "let \\\"-\" = \\\"-.\"\nlet \\\"+\" = \\\"+.\"\nlet \\\"*\" = \\\"*.\"\nlet \\\"/\" = \\\"/.\"\n\nlet newton = (~f, ~fPrimed, ~initial, ~threshold) => {\n  let current = ref(initial)\n  let iterateMore = (previous, next) => {\n    let delta = next >= previous ? next - previous : previous - next\n    current := next\n    !(delta < threshold)\n  }\n\n  @progress(iterateMore)\n  let rec loop = () => {\n    let previous = current.contents\n    let next = previous - f(previous) / fPrimed(previous)\n    if iterateMore(previous, next) {\n      loop()\n    } else {\n      current.contents\n    }\n  }\n  loop()\n}\nlet f = x => x * x * x - 2.0 * x * x - 11.0 * x + 12.0\n\nlet fPrimed = x => 3.0 * x * x - 4.0 * x - 11.0\n\nlet result = newton(~f, ~fPrimed, ~initial=5.0, ~threshold=0.0003)\n\nJs.log2(result, f(result))\n"
  },
  {
    "path": "analysis/examples/larger-project/src/Opaque.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nfunction noConversion(x) {\n  return x;\n}\n\nfunction testConvertNestedRecordFromOtherFile(x) {\n  return x;\n}\n\nexport {\n  noConversion ,\n  testConvertNestedRecordFromOtherFile ,\n  \n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/Opaque.res",
    "content": "@genType.opaque\ntype opaqueFromRecords = A(Records.coord)\n\n@genType\nlet noConversion = (x: opaqueFromRecords) => x\n\n@genType\ntype pair = (opaqueFromRecords, opaqueFromRecords)\n\n@genType\nlet testConvertNestedRecordFromOtherFile = (x: Records.business) => x\n"
  },
  {
    "path": "analysis/examples/larger-project/src/OptArg.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nfunction foo(xOpt, yOpt, zOpt, w) {\n  var x = xOpt !== undefined ? xOpt : 1;\n  var y = yOpt !== undefined ? yOpt : 2;\n  var z = zOpt !== undefined ? zOpt : 3;\n  return ((x + y | 0) + z | 0) + w | 0;\n}\n\nfunction bar(x, y, z, w) {\n  return y + w | 0;\n}\n\nconsole.log(foo(3, undefined, undefined, 4));\n\nconsole.log(7);\n\nfunction threeArgs(aOpt, bOpt, cOpt, d) {\n  var a = aOpt !== undefined ? aOpt : 1;\n  var b = bOpt !== undefined ? bOpt : 2;\n  var c = cOpt !== undefined ? cOpt : 3;\n  return ((a + b | 0) + c | 0) + d | 0;\n}\n\nconsole.log(threeArgs(4, undefined, 7, 1));\n\nconsole.log(threeArgs(4, undefined, undefined, 1));\n\nfunction twoArgs(aOpt, bOpt, c) {\n  var a = aOpt !== undefined ? aOpt : 1;\n  var b = bOpt !== undefined ? bOpt : 2;\n  return (a + b | 0) + c | 0;\n}\n\nconsole.log(twoArgs(undefined, undefined, 1));\n\nvar a = 3;\n\nconsole.log(a + 44 | 0);\n\nfunction wrapfourArgs(a, b, c, n) {\n  var dOpt;\n  var a$1 = a !== undefined ? a : 1;\n  var b$1 = b !== undefined ? b : 2;\n  var c$1 = c !== undefined ? c : 3;\n  var d = dOpt !== undefined ? dOpt : 4;\n  return (((a$1 + b$1 | 0) + c$1 | 0) + d | 0) + n | 0;\n}\n\nconsole.log(wrapfourArgs(3, undefined, 44, 44));\n\nconsole.log(wrapfourArgs(undefined, 4, 44, 44));\n\nexport {\n  foo ,\n  bar ,\n  \n}\n/*  Not a pure module */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/OptArg.res",
    "content": "let foo = (~x=1, ~y=2, ~z=3, w) => x + y + z + w\n\nlet bar = (~x=?, ~y, ~z=?, w) => y + w\n\nJs.log(foo(~x=3, 4))\n\nJs.log(bar(~y=3, 4))\n\nlet threeArgs = (~a=1, ~b=2, ~c=3, d) => a + b + c + d\n\nJs.log(threeArgs(~a=4, ~c=7, 1))\nJs.log(threeArgs(~a=4, 1))\n\nlet twoArgs = (~a=1, ~b=2, c) => a + b + c\n\nJs.log(1 |> twoArgs)\n\nlet oneArg = (~a=1, ~z, b) => a + b\n\nlet wrapOneArg = (~a=?, n) => oneArg(~a?, ~z=33, n)\n\nJs.log(wrapOneArg(~a=3, 44))\n\nlet fourArgs = (~a=1, ~b=2, ~c=3, ~d=4, n) => a + b + c + d + n\n\nlet wrapfourArgs = (~a=?, ~b=?, ~c=?, n) => fourArgs(~a?, ~b?, ~c?, n)\n\nJs.log(wrapfourArgs(~a=3, ~c=44, 44))\nJs.log(wrapfourArgs(~b=4, ~c=44, 44))\n"
  },
  {
    "path": "analysis/examples/larger-project/src/OptArg.resi",
    "content": "let foo: (~x: int=?, ~y: int=?, ~z: int=?, int) => int\nlet bar: (~x: 'a=?, ~y: int, ~z: 'b=?, int) => int\n"
  },
  {
    "path": "analysis/examples/larger-project/src/P.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as Caml_exceptions from \"rescript/lib/es6/caml_exceptions.js\";\n\nvar Sys_error = /* @__PURE__ */Caml_exceptions.create(\"P.Sys_error\");\n\nfunction input(param) {\n  throw {\n        RE_EXN_ID: \"Assert_failure\",\n        _1: [\n          \"P.res\",\n          2,\n          17\n        ],\n        Error: new Error()\n      };\n}\n\nfunction output(param) {\n  throw {\n        RE_EXN_ID: \"Assert_failure\",\n        _1: [\n          \"P.res\",\n          3,\n          18\n        ],\n        Error: new Error()\n      };\n}\n\nfunction open_temp_file(param) {\n  throw {\n        RE_EXN_ID: \"Assert_failure\",\n        _1: [\n          \"P.res\",\n          4,\n          26\n        ],\n        Error: new Error()\n      };\n}\n\nfunction close_out(param) {\n  throw {\n        RE_EXN_ID: \"Assert_failure\",\n        _1: [\n          \"P.res\",\n          5,\n          21\n        ],\n        Error: new Error()\n      };\n}\n\nfunction output_char(param) {\n  throw {\n        RE_EXN_ID: \"Assert_failure\",\n        _1: [\n          \"P.res\",\n          6,\n          23\n        ],\n        Error: new Error()\n      };\n}\n\nfunction really_input(param) {\n  throw {\n        RE_EXN_ID: \"Assert_failure\",\n        _1: [\n          \"P.res\",\n          7,\n          24\n        ],\n        Error: new Error()\n      };\n}\n\nfunction pp_get_formatter_tag_functions(param) {\n  throw {\n        RE_EXN_ID: \"Assert_failure\",\n        _1: [\n          \"P.res\",\n          8,\n          42\n        ],\n        Error: new Error()\n      };\n}\n\nthrow {\n      RE_EXN_ID: \"Assert_failure\",\n      _1: [\n        \"P.res\",\n        11,\n        13\n      ],\n      Error: new Error()\n    };\n\nexport {\n  Sys_error ,\n  input ,\n  output ,\n  open_temp_file ,\n  close_out ,\n  output_char ,\n  really_input ,\n  pp_get_formatter_tag_functions ,\n  stderr ,\n  print_char ,\n  \n}\n/* stderr Not a pure module */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/P.res",
    "content": "exception Sys_error(string)\nlet input = _ => assert false\nlet output = _ => assert false\nlet open_temp_file = _ => assert false\nlet close_out = _ => assert false\nlet output_char = _ => assert false\nlet really_input = _ => assert false\nlet pp_get_formatter_tag_functions = _ => assert false\ntype ttt = Open_text\ntype out_channel\nlet stderr = assert false\nlet print_char = _ => assert false\n\ntype nativeint"
  },
  {
    "path": "analysis/examples/larger-project/src/Records.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as Belt_List from \"rescript/lib/es6/belt_List.js\";\nimport * as Belt_Array from \"rescript/lib/es6/belt_Array.js\";\nimport * as Pervasives from \"rescript/lib/es6/pervasives.js\";\nimport * as Belt_Option from \"rescript/lib/es6/belt_Option.js\";\nimport * as Caml_option from \"rescript/lib/es6/caml_option.js\";\n\nfunction computeArea(param) {\n  return Math.imul(Math.imul(param.x, param.y), Belt_Option.mapWithDefault(param.z, 1, (function (n) {\n                    return n;\n                  })));\n}\n\nfunction coord2d(x, y) {\n  return {\n          x: x,\n          y: y,\n          z: undefined\n        };\n}\n\nvar getOpt = Belt_Option.mapWithDefault;\n\nfunction findAddress(business) {\n  return Belt_Option.mapWithDefault(business.address, /* [] */0, (function (a) {\n                return {\n                        hd: a,\n                        tl: /* [] */0\n                      };\n              }));\n}\n\nfunction findAllAddresses(businesses) {\n  return Belt_List.toArray(Belt_List.flatten(Belt_List.fromArray(Belt_Array.map(businesses, (function (business) {\n                            return Pervasives.$at(Belt_Option.mapWithDefault(business.address, /* [] */0, (function (a) {\n                                              return {\n                                                      hd: a,\n                                                      tl: /* [] */0\n                                                    };\n                                            })), Belt_Option.mapWithDefault(business.owner, /* [] */0, (function (p) {\n                                              return Belt_Option.mapWithDefault(p.address, /* [] */0, (function (a) {\n                                                            return {\n                                                                    hd: a,\n                                                                    tl: /* [] */0\n                                                                  };\n                                                          }));\n                                            })));\n                          })))));\n}\n\nfunction getPayload(param) {\n  return param.payload;\n}\n\nfunction getPayloadRecord(param) {\n  return param.payload;\n}\n\nvar recordValue = {\n  v: 1,\n  w: 1\n};\n\nvar payloadValue = {\n  num: 1,\n  payload: recordValue\n};\n\nfunction getPayloadRecordPlusOne(param) {\n  var payload = param.payload;\n  return {\n          v: payload.v + 1 | 0,\n          w: payload.w\n        };\n}\n\nfunction findAddress2(business) {\n  return Belt_Option.mapWithDefault(Caml_option.nullable_to_opt(business.address2), /* [] */0, (function (a) {\n                return {\n                        hd: a,\n                        tl: /* [] */0\n                      };\n              }));\n}\n\nvar someBusiness2_owner = null;\n\nvar someBusiness2_address2 = null;\n\nvar someBusiness2 = {\n  name: \"SomeBusiness\",\n  owner: someBusiness2_owner,\n  address2: someBusiness2_address2\n};\n\nfunction computeArea3(o) {\n  return Math.imul(Math.imul(o.x, o.y), Belt_Option.mapWithDefault(Caml_option.nullable_to_opt(o.z), 1, (function (n) {\n                    return n;\n                  })));\n}\n\nfunction computeArea4(o) {\n  return Math.imul(Math.imul(o.x, o.y), Belt_Option.mapWithDefault(o.z, 1, (function (n) {\n                    return n;\n                  })));\n}\n\nfunction testMyRec(x) {\n  return x.type_;\n}\n\nfunction testMyRec2(x) {\n  return x;\n}\n\nfunction testMyObj(x) {\n  return x.type_;\n}\n\nfunction testMyObj2(x) {\n  return x;\n}\n\nfunction testMyRecBsAs(x) {\n  return x.type;\n}\n\nfunction testMyRecBsAs2(x) {\n  return x;\n}\n\nvar origin = {\n  x: 0,\n  y: 0,\n  z: 0\n};\n\nvar someBusiness = {\n  name: \"SomeBusiness\",\n  owner: undefined,\n  address: undefined\n};\n\nexport {\n  origin ,\n  computeArea ,\n  coord2d ,\n  getOpt ,\n  findAddress ,\n  someBusiness ,\n  findAllAddresses ,\n  getPayload ,\n  getPayloadRecord ,\n  recordValue ,\n  payloadValue ,\n  getPayloadRecordPlusOne ,\n  findAddress2 ,\n  someBusiness2 ,\n  computeArea3 ,\n  computeArea4 ,\n  testMyRec ,\n  testMyRec2 ,\n  testMyObj ,\n  testMyObj2 ,\n  testMyRecBsAs ,\n  testMyRecBsAs2 ,\n  \n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/Records.res",
    "content": "open Belt\n\n@genType\ntype coord = {\n  x: int,\n  y: int,\n  z: option<int>,\n}\n\n@genType\nlet origin = {x: 0, y: 0, z: Some(0)}\n\n@genType\nlet computeArea = ({x, y, z}) => {\n  open Option\n  x * y * z->mapWithDefault(1, n => n)\n}\n\n@genType\nlet coord2d = (x, y) => {x: x, y: y, z: None}\n\n@genType\ntype person = {\n  name: string,\n  age: int,\n  address: option<string>,\n}\n\n@genType\ntype business = {\n  name: string,\n  owner: option<person>,\n  address: option<string>,\n}\n\nlet getOpt = (opt, default, foo) => opt->Option.mapWithDefault(default, foo)\n\n@genType\nlet findAddress = (business: business): list<string> =>\n  business.address->getOpt(list{}, a => list{a})\n\n@genType\nlet someBusiness = {name: \"SomeBusiness\", owner: None, address: None}\n\n@genType\nlet findAllAddresses = (businesses: array<business>): array<string> =>\n  businesses\n  ->Array.map(business =>\n    \\\"@\"(\n      business.address->getOpt(list{}, a => list{a}),\n      business.owner->getOpt(list{}, p => p.address->getOpt(list{}, a => list{a})),\n    )\n  )\n  ->List.fromArray\n  ->List.flatten\n  ->List.toArray\n\n@genType\ntype payload<'a> = {\n  num: int,\n  payload: 'a,\n}\n\n@genType\nlet getPayload = ({payload}) => payload\n\n@genType\ntype record = {\n  v: int,\n  w: int,\n}\n\n@genType\nlet getPayloadRecord = ({payload}): record => payload\n\n@genType\nlet recordValue = {v: 1, w: 1}\n\n@genType\nlet payloadValue = {num: 1, payload: recordValue}\n\n@genType\nlet getPayloadRecordPlusOne = ({payload}): record => {\n  ...payload,\n  v: payload.v + 1,\n}\n\n@genType\ntype business2 = {\n  name: string,\n  owner: Js.Nullable.t<person>,\n  address2: Js.Nullable.t<string>,\n}\n\n@genType\nlet findAddress2 = (business: business2): list<string> =>\n  business.address2->Js.Nullable.toOption->getOpt(list{}, a => list{a})\n\n@genType\nlet someBusiness2 = {\n  name: \"SomeBusiness\",\n  owner: Js.Nullable.null,\n  address2: Js.Nullable.null,\n}\n\n@genType\nlet computeArea3 = (o: {\"x\": int, \"y\": int, \"z\": Js.Nullable.t<int>}) =>\n  o[\"x\"] * o[\"y\"] * o[\"z\"]->Js.Nullable.toOption->Option.mapWithDefault(1, n => n)\n\n@genType\nlet computeArea4 = (o: {\"x\": int, \"y\": int, \"z\": option<int>}) =>\n  o[\"x\"] * o[\"y\"] * o[\"z\"]->Option.mapWithDefault(1, n => n)\n\n@genType\ntype mix = {\"a\": int, \"b\": int, \"c\": option<{\"name\": string, \"surname\": string}>}\n\n@genType\ntype myRec = {\n  @genType.as(\"type\")\n  type_: string,\n}\n\n@genType\ntype myObj = {\"type_\": string}\n\n@genType\nlet testMyRec = (x: myRec) => x.type_\n\n@genType\nlet testMyRec2 = (x: myRec) => x\n\n@genType\nlet testMyObj = (x: myObj) => x[\"type_\"]\n\n@genType\nlet testMyObj2 = (x: myObj) => x\n\n@genType\ntype myRecBsAs = {\n  @as(\"type\")\n  type_: string,\n}\n\n@genType\nlet testMyRecBsAs = (x: myRecBsAs) => x.type_\n\n@genType\nlet testMyRecBsAs2 = (x: myRecBsAs) => x\n"
  },
  {
    "path": "analysis/examples/larger-project/src/References.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nfunction create(x) {\n  return {\n          contents: x\n        };\n}\n\nfunction access(r) {\n  return r.contents + 1 | 0;\n}\n\nfunction update(r) {\n  r.contents = r.contents + 1 | 0;\n  \n}\n\nfunction get(r) {\n  return r.contents;\n}\n\nfunction make(prim) {\n  return {\n          contents: prim\n        };\n}\n\nfunction set(r, v) {\n  r.contents = v;\n  \n}\n\nvar R = {\n  get: get,\n  make: make,\n  set: set\n};\n\nfunction destroysRefIdentity(x) {\n  return x;\n}\n\nfunction preserveRefIdentity(x) {\n  return x;\n}\n\nexport {\n  create ,\n  access ,\n  update ,\n  R ,\n  get ,\n  make ,\n  set ,\n  destroysRefIdentity ,\n  preserveRefIdentity ,\n  \n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/References.res",
    "content": "// Test pervasive references\n\n@genType\nlet create = (x: int) => ref(x)\n\n@genType\nlet access = r => r.contents + 1\n\n@genType\nlet update = r => r.contents = r.contents + 1\n\n// Abstract version of references: works when conversion is required.\n\nmodule R: {\n  @genType\n  type t<'a>\n  let get: t<'a> => 'a\n  let make: 'a => t<'a>\n  let set: (t<'a>, 'a) => unit\n} = {\n  type t<'a> = ref<'a>\n  let get = r => r.contents\n  let make = ref\n  let set = (r, v) => r.contents = v\n}\n\n@genType\ntype t<'a> = R.t<'a>\n\n@genType\nlet get = R.get\n\n@gentype\nlet make = R.make\n\n@genType\nlet set = R.set\n\ntype requiresConversion = {x: int}\n\n// Careful: conversion makes a copy and destroys the reference identity.\n@genType\nlet destroysRefIdentity = (x: ref<requiresConversion>) => x\n\n// Using abstract references preserves the identity.\n@genType\nlet preserveRefIdentity = (x: R.t<requiresConversion>) => x\n"
  },
  {
    "path": "analysis/examples/larger-project/src/RepeatedLabel.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nfunction userData(param) {\n  return {\n          a: param.a,\n          b: param.b\n        };\n}\n\nconsole.log(userData);\n\nexport {\n  userData ,\n  \n}\n/*  Not a pure module */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/RepeatedLabel.res",
    "content": "type userData = {\n  a: bool,\n  b: int,\n}\n\ntype tabState = {\n  a: bool,\n  b: int,\n  f: string,\n}\n\nlet userData = ({a, b}): userData => {a: a, b: b}\n\nJs.log(userData)\n"
  },
  {
    "path": "analysis/examples/larger-project/src/RequireCond.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n/* This output is empty. Its source's type definitions, externals and/or unused code got optimized away. */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/RequireCond.res",
    "content": "@module\n@deprecated(\n  \"Please use this syntax to guarantee safe usage: [%requireCond(`gk, \\\"gk_name\\\", ConditionalModule)]\"\n)\nexternal make: (\n  @string [@as(\"qe.bool\") #qeBool | @as(\"gk\") #gk],\n  string,\n  string,\n) => Js.Nullable.t<'a> = \"requireCond\"\n\n@module\n@deprecated(\n  \"Please use this syntax to guarantee safe usage: [%requireCond(`gk, \\\"gk_name\\\", {\\\"true\\\": ModuleA, \\\"false\\\": ModuleB})]\"\n)\nexternal either: (\n  @string [@as(\"qe.bool\") #qeBool | @as(\"gk\") #gk],\n  string,\n  {\"true\": string, \"false\": string},\n) => 'b = \"requireCond\"\n"
  },
  {
    "path": "analysis/examples/larger-project/src/Shadow.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nfunction test(param) {\n  return \"a\";\n}\n\nfunction test$1(param) {\n  return \"a\";\n}\n\nvar M = {\n  test: test$1\n};\n\nexport {\n  test ,\n  M ,\n  \n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/Shadow.res",
    "content": "@genType\nlet test = () => 3\n\n@genType\nlet test = () => \"a\"\n\nmodule M = {\n  @genType\n  let test = () => 3\n\n  let test = () => \"a\"\n}\n"
  },
  {
    "path": "analysis/examples/larger-project/src/TestDeadExn.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as DeadExn from \"./DeadExn.js\";\n\nconsole.log({\n      RE_EXN_ID: DeadExn.Etoplevel\n    });\n\nexport {\n  \n}\n/*  Not a pure module */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/TestDeadExn.res",
    "content": "Js.log(DeadExn.Etoplevel)\n"
  },
  {
    "path": "analysis/examples/larger-project/src/TestEmitInnerModules.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nvar Inner = {\n  x: 34,\n  y: \"hello\"\n};\n\nvar Inner$1 = {\n  y: 44\n};\n\nvar Medium = {\n  Inner: Inner$1\n};\n\nvar Outer = {\n  Medium: Medium\n};\n\nexport {\n  Inner ,\n  Outer ,\n  \n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/TestEmitInnerModules.res",
    "content": "module Inner = {\n  @genType\n  let x = 34\n  @genType\n  let y = \"hello\"\n}\n\nmodule Outer = {\n  module Medium = {\n    module Inner = {\n      @genType\n      let y = 44\n    }\n  }\n}\n"
  },
  {
    "path": "analysis/examples/larger-project/src/TestFirstClassModules.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nfunction convert(x) {\n  return x;\n}\n\nfunction convertInterface(x) {\n  return x;\n}\n\nfunction convertRecord(x) {\n  return x;\n}\n\nfunction convertFirstClassModuleWithTypeEquations(x) {\n  return x;\n}\n\nexport {\n  convert ,\n  convertInterface ,\n  convertRecord ,\n  convertFirstClassModuleWithTypeEquations ,\n  \n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/TestFirstClassModules.res",
    "content": "@genType\nlet convert = (x: FirstClassModules.firstClassModule) => x\n\n@genType\nlet convertInterface = (x: FirstClassModulesInterface.firstClassModule) => x\n\n@genType\nlet convertRecord = (x: FirstClassModulesInterface.record) => x\n\nmodule type MT = {\n  type outer\n  let out: outer => outer\n\n  module Inner: {\n    type inner\n    let inn: inner => inner\n  }\n}\n\n@genType\ntype firstClassModuleWithTypeEquations<'i, 'o> = module(MT with\n  type Inner.inner = 'i\n  and type outer = 'o\n)\n\n@genType\nlet convertFirstClassModuleWithTypeEquations = (\n  type o i,\n  x: module(MT with type Inner.inner = i and type outer = o),\n) => x\n"
  },
  {
    "path": "analysis/examples/larger-project/src/TestImmutableArray.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as Belt_Array from \"rescript/lib/es6/belt_Array.js\";\nimport * as Caml_array from \"rescript/lib/es6/caml_array.js\";\n\nfunction testImmutableArrayGet(arr) {\n  return Caml_array.get(arr, 3);\n}\n\nfunction testBeltArrayGet(arr) {\n  return Belt_Array.get(arr, 3);\n}\n\nfunction testBeltArraySet(arr) {\n  return Belt_Array.set(arr, 3, 4);\n}\n\nexport {\n  testImmutableArrayGet ,\n  testBeltArrayGet ,\n  testBeltArraySet ,\n  \n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/TestImmutableArray.res",
    "content": "@genType\nlet testImmutableArrayGet = arr => {\n  open ImmutableArray\n  arr[3]\n}\n\n/*\n   type error\n   let testImmutableArraySet = arr => ImmutableArray.(arr[3] = 4);\n */\n\nlet testBeltArrayGet = arr => {\n  open Belt\n  arr[3]\n}\n\nlet testBeltArraySet = arr => {\n  open Belt\n  arr[3] = 4\n}\n"
  },
  {
    "path": "analysis/examples/larger-project/src/TestImport.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as Caml_option from \"rescript/lib/es6/caml_option.js\";\nimport * as TestImportGen from \"./TestImport.gen\";\n\nvar innerStuffContents = TestImportGen.innerStuffContents;\n\nvar innerStuffContentsAsEmptyObject = TestImportGen.innerStuffContentsAsEmptyObject;\n\nvar valueStartingWithUpperCaseLetter = TestImportGen.valueStartingWithUpperCaseLetter;\n\nvar defaultValue = TestImportGen.defaultValue;\n\nfunction make(prim0, prim1, prim2) {\n  return TestImportGen.make(prim0, prim1 !== undefined ? Caml_option.valFromOption(prim1) : undefined, prim2);\n}\n\nvar defaultValue2 = TestImportGen.defaultValue2;\n\nexport {\n  innerStuffContentsAsEmptyObject ,\n  innerStuffContents ,\n  valueStartingWithUpperCaseLetter ,\n  defaultValue ,\n  make ,\n  defaultValue2 ,\n  \n}\n/* innerStuffContents Not a pure module */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/TestImport.res",
    "content": "@genType.import((\n  \"./exportNestedValues\",\n  \"TopLevelClass.MiddleLevelElements.stuff.InnerStuff.innerStuffContents\",\n))\nexternal innerStuffContents: {\"x\": int} = \"innerStuffContents\"\n\n@genType.import((\n  \"./exportNestedValues\",\n  \"TopLevelClass.MiddleLevelElements.stuff.InnerStuff.innerStuffContents\",\n))\nexternal innerStuffContentsAsEmptyObject: {.} = \"innerStuffContentsAsEmptyObject\"\n\nlet innerStuffContents = innerStuffContents\n\n@genType.import((\"./exportNestedValues\", \"ValueStartingWithUpperCaseLetter\"))\nexternal valueStartingWithUpperCaseLetter: string = \"valueStartingWithUpperCaseLetter\"\n\n@genType.import((\"./exportNestedValues\", \"default\"))\nexternal defaultValue: int = \"defaultValue\"\n\n@genType\ntype message = {text: string}\n\n@genType.import((\"./MyBanner\", \"TopLevelClass.MiddleLevelElements.MyBannerInternal\"))\nexternal make: (~show: bool, ~message: option<message>=?, 'a) => React.element = \"make\"\n\nlet make = make\n\n@genType.import((\"./exportNestedValues\", \"default\"))\nexternal defaultValue2: int = \"defaultValue2\"\n"
  },
  {
    "path": "analysis/examples/larger-project/src/TestModuleAliases.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nfunction testInner1(x) {\n  return x;\n}\n\nfunction testInner1Expanded(x) {\n  return x;\n}\n\nfunction testInner2(x) {\n  return x;\n}\n\nfunction testInner2Expanded(x) {\n  return x;\n}\n\nvar OtherFile;\n\nvar OtherFileAlias;\n\nvar OuterAlias;\n\nvar OtherFile1;\n\nvar Outer2;\n\nvar Inner2;\n\nexport {\n  OtherFile ,\n  OtherFileAlias ,\n  OuterAlias ,\n  OtherFile1 ,\n  Outer2 ,\n  Inner2 ,\n  testInner1 ,\n  testInner1Expanded ,\n  testInner2 ,\n  testInner2Expanded ,\n  \n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/TestModuleAliases.res",
    "content": "module OtherFile = ModuleAliases2\nmodule OtherFileAlias = OtherFile\n\n@genType\ntype record = OtherFile.record\n\n@genType\ntype record2 = OtherFileAlias.record\n\nmodule OuterAlias = OtherFile.Outer\n\n@genType\ntype outer = OtherFileAlias.Outer.outer\n\n@genType\ntype outer2 = OuterAlias.outer\n\nmodule OtherFile1 = OtherFile\nmodule Outer2 = OtherFile1.Outer\nmodule Inner2 = Outer2.Inner\n\n@genType\ntype my2 = Inner2.inner\n\n@genType\ntype inner1 = OtherFile.InnerAlias.inner\n\n@genType\ntype inner2 = OtherFile.Outer.Inner.inner\n\n@genType\nlet testInner1 = (x: inner1) => x\n\n@genType\nlet testInner1Expanded = (x: OtherFile.InnerAlias.inner) => x\n\n@genType\nlet testInner2 = (x: inner2) => x\n\n@genType\nlet testInner2Expanded = (x: OtherFile.Outer.Inner.inner) => x\n"
  },
  {
    "path": "analysis/examples/larger-project/src/TestOptArg.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as OptArg from \"./OptArg.js\";\n\nconsole.log(OptArg.bar(undefined, 3, 3, 4));\n\nfunction foo(xOpt, y) {\n  var x = xOpt !== undefined ? xOpt : 3;\n  return x + y | 0;\n}\n\nfunction bar(param) {\n  var x = 12;\n  return x + 3 | 0;\n}\n\nconsole.log(bar);\n\nfunction notSuppressesOptArgs(xOpt, yOpt, zOpt, w) {\n  var x = xOpt !== undefined ? xOpt : 1;\n  var y = yOpt !== undefined ? yOpt : 2;\n  var z = zOpt !== undefined ? zOpt : 3;\n  return ((x + y | 0) + z | 0) + w | 0;\n}\n\nnotSuppressesOptArgs(undefined, undefined, undefined, 3);\n\nfunction liveSuppressesOptArgs(xOpt, yOpt, zOpt, w) {\n  var x = xOpt !== undefined ? xOpt : 1;\n  var y = yOpt !== undefined ? yOpt : 2;\n  var z = zOpt !== undefined ? zOpt : 3;\n  return ((x + y | 0) + z | 0) + w | 0;\n}\n\nliveSuppressesOptArgs(3, undefined, undefined, 3);\n\nexport {\n  foo ,\n  bar ,\n  notSuppressesOptArgs ,\n  liveSuppressesOptArgs ,\n  \n}\n/*  Not a pure module */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/TestOptArg.res",
    "content": "Js.log(OptArg.bar(~z=3, ~y=3, 4))\n\nlet foo = (~x=3, y) => x + y\n\nlet bar = () => foo(~x=12, 3)\n\nJs.log(bar)\n\nlet notSuppressesOptArgs = (~x=1, ~y=2, ~z=3, w) => x + y + z + w\n\nlet _ = notSuppressesOptArgs(3)\n\n@live\nlet liveSuppressesOptArgs = (~x=1, ~y=2, ~z=3, w) => x + y + z + w\n\nlet _ = liveSuppressesOptArgs(~x=3, 3)\n"
  },
  {
    "path": "analysis/examples/larger-project/src/TestPromise.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nfunction convert(param) {\n  return param.then(function (param) {\n              return Promise.resolve({\n                          result: param.s\n                        });\n            });\n}\n\nexport {\n  convert ,\n  \n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/TestPromise.res",
    "content": "@genType\ntype promise<'a> = Js.Promise.t<'a>\n\n@genType\ntype fromPayload = {\n  x: int,\n  s: string,\n}\n\n@genType\ntype toPayload = {result: string}\n\n@genType\nlet convert = Js.Promise.then_(({s}) => Js.Promise.resolve({result: s}))\n"
  },
  {
    "path": "analysis/examples/larger-project/src/ToSuppress.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nvar toSuppress = 0;\n\nexport {\n  toSuppress ,\n  \n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/ToSuppress.res",
    "content": "let toSuppress = 0\n"
  },
  {
    "path": "analysis/examples/larger-project/src/TransitiveType1.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nfunction convert(x) {\n  return x;\n}\n\nfunction convertAlias(x) {\n  return x;\n}\n\nexport {\n  convert ,\n  convertAlias ,\n  \n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/TransitiveType1.res",
    "content": "@genType\nlet convert = (x: TransitiveType2.t2) => x\n\n@genType\nlet convertAlias = (x: TransitiveType2.t2Alias) => x\n"
  },
  {
    "path": "analysis/examples/larger-project/src/TransitiveType2.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nfunction convertT2(x) {\n  return x;\n}\n\nexport {\n  convertT2 ,\n  \n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/TransitiveType2.res",
    "content": "@genType\ntype t2 = option<TransitiveType3.t3>\n\n@genType\ntype t2Alias = t2\n\nlet convertT2 = (x: t2) => x\n"
  },
  {
    "path": "analysis/examples/larger-project/src/TransitiveType3.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nfunction convertT3(x) {\n  return x;\n}\n\nexport {\n  convertT3 ,\n  \n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/TransitiveType3.res",
    "content": "@genType\ntype t3 = {\n  i: int,\n  s: string,\n}\n\n@genType\nlet convertT3 = (x: t3) => x\n"
  },
  {
    "path": "analysis/examples/larger-project/src/Tuples.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as Belt_Option from \"rescript/lib/es6/belt_Option.js\";\n\nfunction testTuple(param) {\n  return param[0] + param[1] | 0;\n}\n\nfunction computeArea(param) {\n  return Math.imul(Math.imul(param[0], param[1]), Belt_Option.mapWithDefault(param[2], 1, (function (n) {\n                    return n;\n                  })));\n}\n\nfunction computeAreaWithIdent(param) {\n  return Math.imul(Math.imul(param[0], param[1]), Belt_Option.mapWithDefault(param[2], 1, (function (n) {\n                    return n;\n                  })));\n}\n\nfunction computeAreaNoConverters(param) {\n  return Math.imul(param[0], param[1]);\n}\n\nfunction coord2d(x, y) {\n  return [\n          x,\n          y,\n          undefined\n        ];\n}\n\nfunction getFirstName(param) {\n  return param[0].name;\n}\n\nfunction marry(first, second) {\n  return [\n          first,\n          second\n        ];\n}\n\nfunction changeSecondAge(param) {\n  var second = param[1];\n  return [\n          param[0],\n          {\n            name: second.name,\n            age: second.age + 1 | 0\n          }\n        ];\n}\n\nvar origin = [\n  0,\n  0,\n  0\n];\n\nexport {\n  testTuple ,\n  origin ,\n  computeArea ,\n  computeAreaWithIdent ,\n  computeAreaNoConverters ,\n  coord2d ,\n  getFirstName ,\n  marry ,\n  changeSecondAge ,\n  \n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/Tuples.res",
    "content": "open Belt\n\n@genType\nlet testTuple = ((a, b)) => a + b\n\n@genType\ntype coord = (int, int, option<int>)\n\n@genType\nlet origin = (0, 0, Some(0))\n\n@genType\nlet computeArea = ((x, y, z)) => {\n  open Option\n  x * y * z->mapWithDefault(1, n => n)\n}\n\n@genType\nlet computeAreaWithIdent = ((x, y, z): coord) => {\n  open Option\n  x * y * z->mapWithDefault(1, n => n)\n}\n\n@genType\nlet computeAreaNoConverters = ((x: int, y: int)) => x * y\n\n@genType\nlet coord2d = (x, y) => (x, y, None)\n\n@genType\ntype coord2 = (int, int, Js.Nullable.t<int>)\n\n@genType\ntype person = {\n  name: string,\n  age: int,\n}\n\n@genType\ntype couple = (person, person)\n\n@genType\nlet getFirstName = ((first, _second): couple) => first.name\n\n@genType\nlet marry = (first, second): couple => (first, second)\n\n@genType\nlet changeSecondAge = ((first, second): couple): couple => (first, {...second, age: second.age + 1})\n"
  },
  {
    "path": "analysis/examples/larger-project/src/TypeParams1.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nvar exportSomething = 10;\n\nexport {\n  exportSomething ,\n  \n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/TypeParams1.res",
    "content": "@gentype\ntype ocaml_array<'a> = array<'a>\n\nlet exportSomething = 10\n"
  },
  {
    "path": "analysis/examples/larger-project/src/TypeParams2.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nvar exportSomething = 10;\n\nexport {\n  exportSomething ,\n  \n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/TypeParams2.res",
    "content": "@genType\ntype item = {id: int}\n\n@genType\ntype items = TypeParams1.ocaml_array<item>\n\n@genType\ntype items2 = array<item>\n\nlet exportSomething = 10\n"
  },
  {
    "path": "analysis/examples/larger-project/src/TypeParams3.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nfunction test(x) {\n  return x;\n}\n\nfunction test2(x) {\n  return x;\n}\n\nexport {\n  test ,\n  test2 ,\n  \n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/TypeParams3.res",
    "content": "@genType\nlet test = (x: TypeParams2.items) => x\n\n@genType\nlet test2 = (x: TypeParams2.items2) => x\n"
  },
  {
    "path": "analysis/examples/larger-project/src/Types.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as List from \"rescript/lib/es6/list.js\";\nimport * as Curry from \"rescript/lib/es6/curry.js\";\nimport * as Belt_Option from \"rescript/lib/es6/belt_Option.js\";\n\nfunction swap(tree) {\n  return {\n          label: tree.label,\n          left: Belt_Option.map(tree.right, swap),\n          right: Belt_Option.map(tree.left, swap)\n        };\n}\n\nfunction selfRecursiveConverter(param) {\n  return param.self;\n}\n\nfunction mutuallyRecursiveConverter(param) {\n  return param.b;\n}\n\nfunction testFunctionOnOptionsAsArgument(a, foo) {\n  return Curry._1(foo, a);\n}\n\nfunction jsonStringify(prim) {\n  return JSON.stringify(prim);\n}\n\nfunction testConvertNull(x) {\n  return x;\n}\n\nvar testMarshalFields = {\n  rec: \"rec\",\n  _switch: \"_switch\",\n  switch: \"switch\",\n  __: \"__\",\n  _: \"_\",\n  foo: \"foo\",\n  _foo: \"_foo\",\n  Uppercase: \"Uppercase\",\n  _Uppercase: \"_Uppercase\"\n};\n\nfunction setMatch(x) {\n  x.match = 34;\n  \n}\n\nfunction testInstantiateTypeParameter(x) {\n  return x;\n}\n\nvar currentTime = new Date();\n\nvar optFunction = (function (param) {\n    return 3;\n  });\n\nvar ObjectId = {};\n\nvar someIntList = {\n  hd: 1,\n  tl: {\n    hd: 2,\n    tl: {\n      hd: 3,\n      tl: /* [] */0\n    }\n  }\n};\n\nvar map = List.map;\n\nvar stringT = \"a\";\n\nvar jsStringT = \"a\";\n\nvar jsString2T = \"a\";\n\nvar i64Const = [\n  0,\n  34\n];\n\nexport {\n  someIntList ,\n  map ,\n  swap ,\n  selfRecursiveConverter ,\n  mutuallyRecursiveConverter ,\n  testFunctionOnOptionsAsArgument ,\n  stringT ,\n  jsStringT ,\n  jsString2T ,\n  jsonStringify ,\n  testConvertNull ,\n  testMarshalFields ,\n  setMatch ,\n  testInstantiateTypeParameter ,\n  currentTime ,\n  i64Const ,\n  optFunction ,\n  ObjectId ,\n  \n}\n/* currentTime Not a pure module */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/Types.res",
    "content": "@genType\ntype t = int\n\n@genType\nlet someIntList = list{1, 2, 3}\n\n@genType\nlet map = List.map\n\n@genType\ntype typeWithVars<'x, 'y, 'z> =\n  | A('x, 'y)\n  | B('z)\n\n@genType\ntype rec tree = {\"label\": string, \"left\": option<tree>, \"right\": option<tree>}\n\n/*\n * A tree is a recursive type which does not require any conversion (JS object).\n * All is well.\n */\n@genType\nlet rec swap = (tree: tree): tree =>\n  {\n    \"label\": tree[\"label\"],\n    \"left\": tree[\"right\"]->Belt.Option.map(swap),\n    \"right\": tree[\"left\"]->Belt.Option.map(swap),\n  }\n\n@genType\ntype rec selfRecursive = {self: selfRecursive}\n\n@genType\ntype rec mutuallyRecursiveA = {b: mutuallyRecursiveB}\nand mutuallyRecursiveB = {a: mutuallyRecursiveA}\n\n/*\n * This is a recursive type which requires conversion (a record).\n * Only a shallow conversion of the top-level element is performed.\n */\n@genType\nlet selfRecursiveConverter = ({self}) => self\n\n/*\n * This is a mutually recursive type which requires conversion (a record).\n * Only a shallow conversion of the two top-level elements is performed.\n */\n@genType\nlet mutuallyRecursiveConverter = ({b}) => b\n\n@genType\nlet testFunctionOnOptionsAsArgument = (a: option<'a>, foo) => foo(a)\n\n@genType.opaque\ntype opaqueVariant =\n  | A\n  | B\n\n@genType\nlet stringT: String.t = \"a\"\n\n@genType\nlet jsStringT: Js.String.t = \"a\"\n\n@genType\nlet jsString2T: Js.String2.t = \"a\"\n\n@genType\ntype twice<'a> = ('a, 'a)\n\n@gentype\ntype genTypeMispelled = int\n\n@genType\ntype dictString = Js.Dict.t<string>\n\n@genType\nlet jsonStringify = Js.Json.stringify\n\n@genType\ntype nullOrString = Js.Null.t<string>\n\n@genType\ntype nullOrString2 = Js.null<string>\n\ntype record = {\n  i: int,\n  s: string,\n}\n\n@genType\nlet testConvertNull = (x: Js.Null.t<record>) => x\n\n@genType\ntype decorator<'a, 'b> = 'a => 'b constraint 'a = int constraint 'b = _ => _\n\n/* Bucklescript's marshaling rules. */\n@genType\ntype marshalFields = {\n  \"_rec\": string,\n  \"_switch\": string,\n  \"switch\": string,\n  \"__\": string,\n  \"___\": string,\n  \"foo__\": string,\n  \"_foo__\": string,\n  \"_Uppercase\": string,\n  \"_Uppercase__\": string,\n}\n\n@genType\nlet testMarshalFields: marshalFields = {\n  \"_rec\": \"rec\",\n  \"_switch\" /* reason keywords are not recognized */: \"_switch\",\n  \"switch\": \"switch\",\n  \"__\": \"__\",\n  \"___\": \"_\",\n  \"foo__\": \"foo\",\n  \"_foo__\": \"_foo\",\n  \"_Uppercase\": \"Uppercase\",\n  \"_Uppercase__\": \"_Uppercase\",\n}\n\n@genType\ntype marshalMutableField = {@set \"_match\": int}\n\n@genType\nlet setMatch = (x: marshalMutableField) => x[\"_match\"] = 34\n\ntype ocaml_array<'a> = array<'a>\n\n// This should be considered annotated automatically.\ntype someRecord = {id: int}\n\ntype instantiateTypeParameter = ocaml_array<someRecord>\n\n@genType\nlet testInstantiateTypeParameter = (x: instantiateTypeParameter) => x\n\n@genType @genType.as(\"Vector\")\ntype vector<'a> = ('a, 'a)\n\n@genType\ntype date = Js.Date.t\n\n@genType\nlet currentTime = Js.Date.make()\n\n@genType\ntype i64A = Int64.t\n\n@genType\ntype i64B = int64\n\n@genType\nlet i64Const: i64B = 34L\n\n@genType\nlet optFunction = Some(() => 3)\n\nmodule ObjectId: {\n  @genType\n  type t = int\n} = {\n  type t = int\n  let x = 1\n}\n"
  },
  {
    "path": "analysis/examples/larger-project/src/Unboxed.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nfunction testV1(x) {\n  return x;\n}\n\nfunction r2Test(x) {\n  return x;\n}\n\nexport {\n  testV1 ,\n  r2Test ,\n  \n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/Unboxed.res",
    "content": "@genType @ocaml.unboxed\ntype v1 = A(int)\n\n@genType @unboxed\ntype v2 = A(int)\n\n@genType\nlet testV1 = (x: v1) => x\n\n@genType @unboxed\ntype r1 = {x: int}\n\n@genType @ocaml.unboxed\ntype r2 = B({g: string})\n\n@genType\nlet r2Test = (x: r2) => x\n"
  },
  {
    "path": "analysis/examples/larger-project/src/Uncurried.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as Curry from \"rescript/lib/es6/curry.js\";\n\nfunction uncurried0() {\n  return \"\";\n}\n\nfunction uncurried1(x) {\n  return String(x);\n}\n\nfunction uncurried2(x, y) {\n  return String(x) + y;\n}\n\nfunction uncurried3(x, y, z) {\n  return String(x) + (y + String(z));\n}\n\nfunction curried3(x, y, z) {\n  return String(x) + (y + String(z));\n}\n\nfunction callback(cb) {\n  return String(Curry._1(cb, undefined));\n}\n\nfunction callback2(auth) {\n  return Curry._1(auth.login, undefined);\n}\n\nfunction callback2U(auth) {\n  return auth.loginU();\n}\n\nfunction sumU(n, m) {\n  console.log(\"sumU 2nd arg\", m, \"result\", n + m | 0);\n  \n}\n\nfunction sumU2(n) {\n  return function (m) {\n    console.log(\"sumU2 2nd arg\", m, \"result\", n + m | 0);\n    \n  };\n}\n\nfunction sumCurried(n) {\n  console.log(\"sumCurried 1st arg\", n);\n  return function (m) {\n    console.log(\"sumCurried 2nd arg\", m, \"result\", n + m | 0);\n    \n  };\n}\n\nfunction sumLblCurried(s, n) {\n  console.log(s, \"sumLblCurried 1st arg\", n);\n  return function (m) {\n    console.log(\"sumLblCurried 2nd arg\", m, \"result\", n + m | 0);\n    \n  };\n}\n\nexport {\n  uncurried0 ,\n  uncurried1 ,\n  uncurried2 ,\n  uncurried3 ,\n  curried3 ,\n  callback ,\n  callback2 ,\n  callback2U ,\n  sumU ,\n  sumU2 ,\n  sumCurried ,\n  sumLblCurried ,\n  \n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/Uncurried.res",
    "content": "@genType\ntype u0 = (. unit) => string\n\n@genType\ntype u1 = (. int) => string\n\n@genType\ntype u2 = (. int, string) => string\n\n@genType\ntype u3 = (. int, string, int) => string\n\n@genType\nlet uncurried0 = (. ()) => \"\"\n\n@genType\nlet uncurried1 = (. x) => x |> string_of_int\n\n@genType\nlet uncurried2 = (. x, y) => (x |> string_of_int) ++ y\n\n@genType\nlet uncurried3 = (. x, y, z) => (x |> string_of_int) ++ (y ++ (z |> string_of_int))\n\n@genType\nlet curried3 = (x, y, z) => (x |> string_of_int) ++ (y ++ (z |> string_of_int))\n\n@genType\nlet callback = cb => cb() |> string_of_int\n\ntype auth = {login: unit => string}\ntype authU = {loginU: (. unit) => string}\n\n@genType\nlet callback2 = auth => auth.login()\n\n@genType\nlet callback2U = auth => auth.loginU(.)\n\n@genType\nlet sumU = (. n, m) => Js.log4(\"sumU 2nd arg\", m, \"result\", n + m)\n\n@genType\nlet sumU2 = (. n, . m) => Js.log4(\"sumU2 2nd arg\", m, \"result\", n + m)\n\n@genType\nlet sumCurried = n => {\n  Js.log2(\"sumCurried 1st arg\", n)\n  m => Js.log4(\"sumCurried 2nd arg\", m, \"result\", n + m)\n}\n\n@genType\nlet sumLblCurried = (s: string, ~n) => {\n  Js.log3(s, \"sumLblCurried 1st arg\", n)\n  (~m) => Js.log4(\"sumLblCurried 2nd arg\", m, \"result\", n + m)\n}\n"
  },
  {
    "path": "analysis/examples/larger-project/src/Unison.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nfunction group(breakOpt, doc) {\n  var $$break = breakOpt !== undefined ? breakOpt : /* IfNeed */0;\n  return {\n          break: $$break,\n          doc: doc\n        };\n}\n\nfunction fits(_w, _stack) {\n  while(true) {\n    var stack = _stack;\n    var w = _w;\n    if (w < 0) {\n      return false;\n    }\n    if (!stack) {\n      return true;\n    }\n    _stack = stack._1;\n    _w = w - stack._0.doc.length | 0;\n    continue ;\n  };\n}\n\nfunction toString(width, stack) {\n  if (!stack) {\n    return \"\";\n  }\n  var stack$1 = stack._1;\n  var match = stack._0;\n  var doc = match.doc;\n  switch (match.break) {\n    case /* IfNeed */0 :\n        return (\n                fits(width, stack$1) ? \"fits \" : \"no \"\n              ) + toString(width - 1 | 0, stack$1);\n    case /* Never */1 :\n        return \"never \" + (doc + toString(width - 1 | 0, stack$1));\n    case /* Always */2 :\n        return \"always \" + (doc + toString(width - 1 | 0, stack$1));\n    \n  }\n}\n\ntoString(80, /* Empty */0);\n\nvar $$break = /* Never */1;\n\ntoString(80, /* Cons */{\n      _0: {\n        break: $$break,\n        doc: \"abc\"\n      },\n      _1: /* Empty */0\n    });\n\nvar $$break$1 = /* Always */2;\n\ntoString(80, /* Cons */{\n      _0: {\n        break: $$break$1,\n        doc: \"d\"\n      },\n      _1: /* Empty */0\n    });\n\nexport {\n  group ,\n  fits ,\n  toString ,\n  \n}\n/*  Not a pure module */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/Unison.res",
    "content": "// Exmple of several DCE checks operating in unison\n\ntype break =\n  | IfNeed\n  | Never\n  | Always\n\ntype t = {\n  break: break,\n  doc: string,\n}\n\ntype rec stack =\n  | Empty\n  | Cons(t, stack)\n\nlet group = (~break=IfNeed, doc) => {break: break, doc: doc}\n\nlet rec fits = (w, stack) =>\n  switch stack {\n  | _ if w < 0 => false\n  | Empty => true\n  | Cons({doc}, stack) => fits(w - String.length(doc), stack)\n  }\n\nlet rec toString = (~width, stack) =>\n  switch stack {\n  | Cons({break, doc}, stack) =>\n    switch break {\n    | IfNeed => (fits(width, stack) ? \"fits \" : \"no \") ++ (stack |> toString(~width=width - 1))\n    | Never => \"never \" ++ (doc ++ (stack |> toString(~width=width - 1)))\n    | Always => \"always \" ++ (doc ++ (stack |> toString(~width=width - 1)))\n    }\n  | Empty => \"\"\n  }\n\ntoString(~width=80, Empty)\ntoString(~width=80, Cons(group(~break=Never, \"abc\"), Empty))\ntoString(~width=80, Cons(group(~break=Always, \"d\"), Empty))\n"
  },
  {
    "path": "analysis/examples/larger-project/src/UseImportJsValue.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nfunction useGetProp(x) {\n  return x.getProp() + 1 | 0;\n}\n\nfunction useTypeImportedInOtherModule(x) {\n  return x;\n}\n\nexport {\n  useGetProp ,\n  useTypeImportedInOtherModule ,\n  \n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/UseImportJsValue.res",
    "content": "@genType\nlet useGetProp = (x: ImportJsValue.AbsoluteValue.t) => x->ImportJsValue.AbsoluteValue.getProp + 1\n\n@genType\nlet useTypeImportedInOtherModule = (x: ImportJsValue.stringFunction) => x\n"
  },
  {
    "path": "analysis/examples/larger-project/src/Variants.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nfunction isWeekend(x) {\n  if (x === \"sunday\") {\n    return true;\n  } else {\n    return x === \"saturday\";\n  }\n}\n\nfunction onlySunday(param) {\n  \n}\n\nfunction swap(x) {\n  if (x === \"sunday\") {\n    return \"saturday\";\n  } else {\n    return \"sunday\";\n  }\n}\n\nfunction testConvert(x) {\n  return x;\n}\n\nfunction testConvert2(x) {\n  return x;\n}\n\nfunction testConvert3(x) {\n  return x;\n}\n\nfunction testConvert2to3(x) {\n  return x;\n}\n\nfunction id1(x) {\n  return x;\n}\n\nfunction id2(x) {\n  return x;\n}\n\nfunction polyWithOpt(foo) {\n  if (foo === \"bar\") {\n    return ;\n  } else if (foo !== \"baz\") {\n    return {\n            NAME: \"One\",\n            VAL: foo\n          };\n  } else {\n    return {\n            NAME: \"Two\",\n            VAL: 1\n          };\n  }\n}\n\nfunction restResult1(x) {\n  return x;\n}\n\nfunction restResult2(x) {\n  return x;\n}\n\nfunction restResult3(x) {\n  return x;\n}\n\nvar monday = \"monday\";\n\nvar saturday = \"saturday\";\n\nvar sunday = \"sunday\";\n\nvar fortytwoOK = \"fortytwo\";\n\nvar fortytwoBAD = \"fortytwo\";\n\nexport {\n  isWeekend ,\n  monday ,\n  saturday ,\n  sunday ,\n  onlySunday ,\n  swap ,\n  testConvert ,\n  fortytwoOK ,\n  fortytwoBAD ,\n  testConvert2 ,\n  testConvert3 ,\n  testConvert2to3 ,\n  id1 ,\n  id2 ,\n  polyWithOpt ,\n  restResult1 ,\n  restResult2 ,\n  restResult3 ,\n  \n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/Variants.res",
    "content": "@genType\ntype weekday = [\n  | #monday\n  | #tuesday\n  | #wednesday\n  | #thursday\n  | #friday\n  | #saturday\n  | #sunday\n]\n\n@genType\nlet isWeekend = (x: weekday) =>\n  switch x {\n  | #saturday\n  | #sunday => true\n  | _ => false\n  }\n\n@genType\nlet monday = #monday\n@genType\nlet saturday = #saturday\n@genType\nlet sunday = #sunday\n\n@genType\nlet onlySunday = (_: [#sunday]) => ()\n\n@genType\nlet swap = x =>\n  switch x {\n  | #sunday => #saturday\n  | #saturday => #sunday\n  }\n\n@genType\ntype testGenTypeAs = [\n  | @genType.as(\"type\") #type_\n  | @genType.as(\"module\") #module_\n  | @genType.as(\"42\") #fortytwo\n]\n\n@genType\nlet testConvert = (x: testGenTypeAs) => x\n\n@genType\nlet fortytwoOK: testGenTypeAs = #fortytwo\n\n/* Exporting this is BAD: type inference means it's not mapped to \"42\" */\n@genType\nlet fortytwoBAD = #fortytwo\n\n@genType\ntype testGenTypeAs2 = [\n  | @genType.as(\"type\") #type_\n  | @genType.as(\"module\") #module_\n  | @genType.as(\"42\") #fortytwo\n]\n\n/* Since testGenTypeAs2 is the same type as testGenTypeAs1,\n share the conversion map. */\n@genType\nlet testConvert2 = (x: testGenTypeAs2) => x\n\n@genType\ntype testGenTypeAs3 = [\n  | @genType.as(\"type\") #type_\n  | @genType.as(\"module\") #module_\n  | @genType.as(\"THIS IS DIFFERENT\") #fortytwo\n]\n\n/* Since testGenTypeAs3 has a different representation:\n use a new conversion map. */\n@genType\nlet testConvert3 = (x: testGenTypeAs3) => x\n\n/* This converts between testGenTypeAs2 and testGenTypeAs3 */\n@genType\nlet testConvert2to3 = (x: testGenTypeAs2): testGenTypeAs3 => x\n\n@genType\ntype x1 = [#x | @genType.as(\"same\") #x1]\n\n@genType\ntype x2 = [#x | @genType.as(\"same\") #x2]\n\n@genType\nlet id1 = (x: x1) => x\n\n@genType\nlet id2 = (x: x2) => x\n\n@genType @genType.as(\"type\")\ntype type_ = | @genType.as(\"type\") Type\n\n@genType\nlet polyWithOpt = foo => foo === \"bar\" ? None : foo !== \"baz\" ? Some(#One(foo)) : Some(#Two(1))\n\n@genType\ntype result1<'a, 'b> =\n  | Ok('a)\n  | Error('b)\n\n@genType\ntype result2<'a, 'b> = result<'a, 'b>\n\n@genType\ntype result3<'a, 'b> = Belt.Result.t<'a, 'b>\n\n@genType\nlet restResult1 = (x: result1<int, string>) => x\n\n@genType\nlet restResult2 = (x: result2<int, string>) => x\n\n@genType\nlet restResult3 = (x: result3<int, string>) => x\n"
  },
  {
    "path": "analysis/examples/larger-project/src/VariantsWithPayload.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nfunction testWithPayload(x) {\n  return x;\n}\n\nfunction printVariantWithPayload(x) {\n  if (typeof x !== \"object\") {\n    if (x === \"a\") {\n      console.log(\"printVariantWithPayload: a\");\n    } else if (x === \"b\") {\n      console.log(\"printVariantWithPayload: b\");\n    } else if (x === \"Half\") {\n      console.log(\"printVariantWithPayload: Half\");\n    } else if (x === \"True\") {\n      console.log(\"printVariantWithPayload: True\");\n    } else {\n      console.log(\"printVariantWithPayload: Twenty\");\n    }\n    return ;\n  }\n  var payload = x.VAL;\n  console.log(\"printVariantWithPayload x:\", payload.x, \"y:\", payload.y);\n  \n}\n\nfunction testManyPayloads(x) {\n  return x;\n}\n\nfunction printManyPayloads(x) {\n  var variant = x.NAME;\n  if (variant === \"two\") {\n    var match = x.VAL;\n    console.log(\"printManyPayloads two:\", match[0], match[1]);\n    return ;\n  }\n  if (variant === \"three\") {\n    var payload = x.VAL;\n    console.log(\"printManyPayloads x:\", payload.x, \"y:\", payload.y);\n    return ;\n  }\n  console.log(\"printManyPayloads one:\", x.VAL);\n  \n}\n\nfunction testSimpleVariant(x) {\n  return x;\n}\n\nfunction testVariantWithPayloads(x) {\n  return x;\n}\n\nfunction printVariantWithPayloads(x) {\n  if (typeof x === \"number\") {\n    console.log(\"printVariantWithPayloads\", \"A\");\n    return ;\n  }\n  switch (x.TAG | 0) {\n    case /* B */0 :\n        console.log(\"printVariantWithPayloads\", \"B(\" + (String(x._0) + \")\"));\n        return ;\n    case /* C */1 :\n        console.log(\"printVariantWithPayloads\", \"C(\" + (String(x._0) + (\", \" + (String(x._1) + \")\"))));\n        return ;\n    case /* D */2 :\n        var match = x._0;\n        console.log(\"printVariantWithPayloads\", \"D((\" + (String(match[0]) + (\", \" + (String(match[1]) + \"))\"))));\n        return ;\n    case /* E */3 :\n        console.log(\"printVariantWithPayloads\", \"E(\" + (String(x._0) + (\", \" + (x._1 + (\", \" + (String(x._2) + \")\"))))));\n        return ;\n    \n  }\n}\n\nfunction testVariant1Int(x) {\n  return x;\n}\n\nfunction testVariant1Object(x) {\n  return x;\n}\n\nexport {\n  testWithPayload ,\n  printVariantWithPayload ,\n  testManyPayloads ,\n  printManyPayloads ,\n  testSimpleVariant ,\n  testVariantWithPayloads ,\n  printVariantWithPayloads ,\n  testVariant1Int ,\n  testVariant1Object ,\n  \n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/VariantsWithPayload.res",
    "content": "type payload = {\n  x: int,\n  y: option<string>,\n}\n\ntype withPayload = [\n  | #a\n  | @genType.as(\"bRenamed\") #b\n  | @genType.as(true) #True\n  | @genType.as(20) #Twenty\n  | @genType.as(0.5) #Half\n  | #c(payload)\n]\n\n@genType\nlet testWithPayload = (x: withPayload) => x\n\n@genType\nlet printVariantWithPayload = (x: withPayload) =>\n  switch x {\n  | #a => Js.log(\"printVariantWithPayload: a\")\n  | #b => Js.log(\"printVariantWithPayload: b\")\n  | #True => Js.log(\"printVariantWithPayload: True\")\n  | #Twenty => Js.log(\"printVariantWithPayload: Twenty\")\n  | #Half => Js.log(\"printVariantWithPayload: Half\")\n  | #c(payload) => Js.log4(\"printVariantWithPayload x:\", payload.x, \"y:\", payload.y)\n  }\n\n@genType\ntype manyPayloads = [\n  | @genType.as(\"oneRenamed\") #one(int)\n  | @genType.as(2) #two(string, string)\n  | #three(payload)\n]\n\n@genType\nlet testManyPayloads = (x: manyPayloads) => x\n\n@genType\nlet printManyPayloads = (x: manyPayloads) =>\n  switch x {\n  | #one(n) => Js.log2(\"printManyPayloads one:\", n)\n  | #two(s1, s2) => Js.log3(\"printManyPayloads two:\", s1, s2)\n  | #three(payload) => Js.log4(\"printManyPayloads x:\", payload.x, \"y:\", payload.y)\n  }\n\n@genType\ntype simpleVariant =\n  | A\n  | B\n  | C\n\n@genType\nlet testSimpleVariant = (x: simpleVariant) => x\n\n@genType\ntype variantWithPayloads =\n  | @genType.as(\"ARenamed\") A\n  | B(int)\n  | C(int, int)\n  | D((int, int))\n  | E(int, string, int)\n\n@genType\nlet testVariantWithPayloads = (x: variantWithPayloads) => x\n\n@genType\nlet printVariantWithPayloads = x =>\n  switch x {\n  | A => Js.log2(\"printVariantWithPayloads\", \"A\")\n  | B(x) => Js.log2(\"printVariantWithPayloads\", \"B(\" ++ (string_of_int(x) ++ \")\"))\n  | C(x, y) =>\n    Js.log2(\n      \"printVariantWithPayloads\",\n      \"C(\" ++ (string_of_int(x) ++ (\", \" ++ (string_of_int(y) ++ \")\"))),\n    )\n  | D((x, y)) =>\n    Js.log2(\n      \"printVariantWithPayloads\",\n      \"D((\" ++ (string_of_int(x) ++ (\", \" ++ (string_of_int(y) ++ \"))\"))),\n    )\n  | E(x, s, y) =>\n    Js.log2(\n      \"printVariantWithPayloads\",\n      \"E(\" ++ (string_of_int(x) ++ (\", \" ++ (s ++ (\", \" ++ (string_of_int(y) ++ \")\"))))),\n    )\n  }\n\n@genType\ntype variant1Int = R(int)\n\n@genType\nlet testVariant1Int = (x: variant1Int) => x\n\n@genType\ntype variant1Object = R(payload)\n\n@genType\nlet testVariant1Object = (x: variant1Object) => x\n"
  },
  {
    "path": "analysis/examples/larger-project/src/arg_helper.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as List from \"rescript/lib/es6/list.js\";\nimport * as Curry from \"rescript/lib/es6/curry.js\";\nimport * as Printf from \"./printf.js\";\nimport * as $$String from \"rescript/lib/es6/string.js\";\nimport * as Printexc from \"rescript/lib/es6/printexc.js\";\nimport * as Pervasives from \"rescript/lib/es6/pervasives.js\";\nimport * as Caml_option from \"rescript/lib/es6/caml_option.js\";\nimport * as Caml_exceptions from \"rescript/lib/es6/caml_exceptions.js\";\nimport * as Caml_js_exceptions from \"rescript/lib/es6/caml_js_exceptions.js\";\n\nfunction fatal(err) {\n  console.error(err);\n  return Pervasives.exit(2);\n}\n\nfunction Make(S) {\n  var $$default = function (v) {\n    return {\n            base_default: v,\n            base_override: S.Key.$$Map.empty,\n            user_default: undefined,\n            user_override: S.Key.$$Map.empty\n          };\n  };\n  var set_base_default = function (value, t) {\n    return {\n            base_default: value,\n            base_override: t.base_override,\n            user_default: t.user_default,\n            user_override: t.user_override\n          };\n  };\n  var add_base_override = function (key, value, t) {\n    return {\n            base_default: t.base_default,\n            base_override: Curry._3(S.Key.$$Map.add, key, value, t.base_override),\n            user_default: t.user_default,\n            user_override: t.user_override\n          };\n  };\n  var reset_base_overrides = function (t) {\n    return {\n            base_default: t.base_default,\n            base_override: S.Key.$$Map.empty,\n            user_default: t.user_default,\n            user_override: t.user_override\n          };\n  };\n  var set_user_default = function (value, t) {\n    return {\n            base_default: t.base_default,\n            base_override: t.base_override,\n            user_default: Caml_option.some(value),\n            user_override: t.user_override\n          };\n  };\n  var add_user_override = function (key, value, t) {\n    return {\n            base_default: t.base_default,\n            base_override: t.base_override,\n            user_default: t.user_default,\n            user_override: Curry._3(S.Key.$$Map.add, key, value, t.user_override)\n          };\n  };\n  var Parse_failure = /* @__PURE__ */Caml_exceptions.create(\"Arg_helper.Make(S).Parse_failure\");\n  var parse_exn = function (str, update) {\n    var values = List.filter(function (param) {\n            return \"\" !== param;\n          })($$String.split_on_char(/* ',' */44, str));\n    var parsed = List.fold_left((function (acc, value) {\n            var equals;\n            try {\n              equals = $$String.index(value, /* '=' */61);\n            }\n            catch (raw_exn){\n              var exn = Caml_js_exceptions.internalToOCamlException(raw_exn);\n              if (exn.RE_EXN_ID === \"Not_found\") {\n                var exit = 0;\n                var value$1;\n                try {\n                  value$1 = Curry._1(S.Value.of_string, value);\n                  exit = 2;\n                }\n                catch (raw_exn$1){\n                  var exn$1 = Caml_js_exceptions.internalToOCamlException(raw_exn$1);\n                  throw {\n                        RE_EXN_ID: Parse_failure,\n                        _1: exn$1,\n                        Error: new Error()\n                      };\n                }\n                if (exit === 2) {\n                  return set_user_default(value$1, acc);\n                }\n                \n              } else {\n                throw exn;\n              }\n            }\n            var length = value.length;\n            if (!(equals >= 0 && equals < length)) {\n              throw {\n                    RE_EXN_ID: \"Assert_failure\",\n                    _1: [\n                      \"arg_helper.res\",\n                      84,\n                      8\n                    ],\n                    Error: new Error()\n                  };\n            }\n            if (equals === 0) {\n              throw {\n                    RE_EXN_ID: Parse_failure,\n                    _1: {\n                      RE_EXN_ID: \"Failure\",\n                      _1: \"Missing key in argument specification\"\n                    },\n                    Error: new Error()\n                  };\n            }\n            var key = $$String.sub(value, 0, equals);\n            var key$1;\n            try {\n              key$1 = Curry._1(S.Key.of_string, key);\n            }\n            catch (raw_exn$2){\n              var exn$2 = Caml_js_exceptions.internalToOCamlException(raw_exn$2);\n              throw {\n                    RE_EXN_ID: Parse_failure,\n                    _1: exn$2,\n                    Error: new Error()\n                  };\n            }\n            var value$2 = $$String.sub(value, equals + 1 | 0, (length - equals | 0) - 1 | 0);\n            var value$3;\n            try {\n              value$3 = Curry._1(S.Value.of_string, value$2);\n            }\n            catch (raw_exn$3){\n              var exn$3 = Caml_js_exceptions.internalToOCamlException(raw_exn$3);\n              throw {\n                    RE_EXN_ID: Parse_failure,\n                    _1: exn$3,\n                    Error: new Error()\n                  };\n            }\n            return add_user_override(key$1, value$3, acc);\n          }), update.contents, values);\n    update.contents = parsed;\n    \n  };\n  var parse = function (str, help_text, update) {\n    try {\n      parse_exn(str, update);\n      return ;\n    }\n    catch (raw_exn){\n      var exn = Caml_js_exceptions.internalToOCamlException(raw_exn);\n      if (exn.RE_EXN_ID === Parse_failure) {\n        return fatal(Curry._2(Printf.sprintf(\"%s: %s\"), Printexc.to_string(exn._1), help_text));\n      }\n      throw exn;\n    }\n  };\n  var parse_no_error = function (str, update) {\n    try {\n      parse_exn(str, update);\n      return /* Ok */0;\n    }\n    catch (raw_exn){\n      var exn = Caml_js_exceptions.internalToOCamlException(raw_exn);\n      if (exn.RE_EXN_ID === Parse_failure) {\n        return /* Parse_failed */{\n                _0: exn._1\n              };\n      }\n      throw exn;\n    }\n  };\n  var get = function (key, parsed) {\n    try {\n      return Curry._2(S.Key.$$Map.find, key, parsed.user_override);\n    }\n    catch (raw_exn){\n      var exn = Caml_js_exceptions.internalToOCamlException(raw_exn);\n      if (exn.RE_EXN_ID === \"Not_found\") {\n        var value = parsed.user_default;\n        if (value !== undefined) {\n          return Caml_option.valFromOption(value);\n        }\n        try {\n          return Curry._2(S.Key.$$Map.find, key, parsed.base_override);\n        }\n        catch (raw_exn$1){\n          var exn$1 = Caml_js_exceptions.internalToOCamlException(raw_exn$1);\n          if (exn$1.RE_EXN_ID === \"Not_found\") {\n            return parsed.base_default;\n          }\n          throw exn$1;\n        }\n      } else {\n        throw exn;\n      }\n    }\n  };\n  return {\n          $$default: $$default,\n          set_base_default: set_base_default,\n          add_base_override: add_base_override,\n          reset_base_overrides: reset_base_overrides,\n          set_user_default: set_user_default,\n          add_user_override: add_user_override,\n          Parse_failure: Parse_failure,\n          parse_exn: parse_exn,\n          parse: parse,\n          parse_no_error: parse_no_error,\n          get: get\n        };\n}\n\nexport {\n  fatal ,\n  Make ,\n  \n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/arg_helper.res",
    "content": "/* ************************************************************************ */\n/*  */\n/* OCaml */\n/*  */\n/* Pierre Chambart, OCamlPro */\n/* Mark Shinwell and Leo White, Jane Street Europe */\n/*  */\n/* Copyright 2015--2016 OCamlPro SAS */\n/* Copyright 2015--2016 Jane Street Group LLC */\n/*  */\n/* All rights reserved.  This file is distributed under the terms of */\n/* the GNU Lesser General Public License version 2.1, with the */\n/* special exception on linking described in the file LICENSE. */\n/*  */\n/* ************************************************************************ */\n\n@raises(exit)\nlet fatal = err => {\n  prerr_endline(err)\n  exit(2)\n}\n\nmodule Make = (\n  S: {\n    module Key: {\n      type t\n      let of_string: string => t\n      module Map: Map.S with type key = t\n    }\n\n    module Value: {\n      type t\n      let of_string: string => t\n    }\n  },\n) => {\n  type parsed = {\n    base_default: S.Value.t,\n    base_override: S.Key.Map.t<S.Value.t>,\n    user_default: option<S.Value.t>,\n    user_override: S.Key.Map.t<S.Value.t>,\n  }\n\n  let default = v => {\n    base_default: v,\n    base_override: S.Key.Map.empty,\n    user_default: None,\n    user_override: S.Key.Map.empty,\n  }\n\n  let set_base_default = (value, t) => {...t, base_default: value}\n\n  let add_base_override = (key, value, t) => {\n    ...t,\n    base_override: S.Key.Map.add(key, value, t.base_override),\n  }\n\n  let reset_base_overrides = t => {...t, base_override: S.Key.Map.empty}\n\n  let set_user_default = (value, t) => {...t, user_default: Some(value)}\n\n  let add_user_override = (key, value, t) => {\n    ...t,\n    user_override: S.Key.Map.add(key, value, t.user_override),\n  }\n\n  exception Parse_failure(exn)\n\n  @raises([Invalid_argument, Parse_failure])\n  let parse_exn = (str, ~update) => {\n    /* Is the removal of empty chunks really relevant here? */\n    /* (It has been added to mimic the old Misc.String.split.) */\n    let values = String.split_on_char(',', str) |> List.filter(\\\"<>\"(\"\"))\n    let parsed = List.fold_left((acc, value) =>\n      switch String.index(value, '=') {\n      | exception Not_found =>\n        switch S.Value.of_string(value) {\n        | value => set_user_default(value, acc)\n        | exception exn => raise(Parse_failure(exn))\n        }\n      | equals =>\n        let key_value_pair = value\n        let length = String.length(key_value_pair)\n        assert (equals >= 0 && equals < length)\n        if equals == 0 {\n          raise(Parse_failure(Failure(\"Missing key in argument specification\")))\n        }\n        let key = {\n          let key = String.sub(key_value_pair, 0, equals)\n          try S.Key.of_string(key) catch {\n          | exn => raise(Parse_failure(exn))\n          }\n        }\n\n        let value = {\n          let value = String.sub(key_value_pair, equals + 1, length - equals - 1)\n\n          try S.Value.of_string(value) catch {\n          | exn => raise(Parse_failure(exn))\n          }\n        }\n\n        add_user_override(key, value, acc)\n      }\n    , update.contents, values)\n\n    update := parsed\n  }\n\n  @raises([Invalid_argument, exit])\n  let parse = (str, help_text, update) =>\n    switch parse_exn(str, ~update) {\n    | () => ()\n    | exception Parse_failure(exn) =>\n      fatal(Printf.sprintf(\"%s: %s\", Printexc.to_string(exn), help_text))\n    }\n\n  type parse_result =\n    | Ok\n    | Parse_failed(exn)\n\n  @raises(Invalid_argument)\n  let parse_no_error = (str, update) =>\n    switch parse_exn(str, ~update) {\n    | () => Ok\n    | exception Parse_failure(exn) => Parse_failed(exn)\n    }\n\n  let get = (~key, parsed) =>\n    switch S.Key.Map.find(key, parsed.user_override) {\n    | value => value\n    | exception Not_found =>\n      switch parsed.user_default {\n      | Some(value) => value\n      | None =>\n        switch S.Key.Map.find(key, parsed.base_override) {\n        | value => value\n        | exception Not_found => parsed.base_default\n        }\n      }\n    }\n}\n"
  },
  {
    "path": "analysis/examples/larger-project/src/ast_helper.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as $$Location from \"./location.js\";\nimport * as Syntaxerr from \"./syntaxerr.js\";\n\nthrow {\n      RE_EXN_ID: \"Assert_failure\",\n      _1: [\n        \"ast_helper.res\",\n        23,\n        21\n      ],\n      Error: new Error()\n    };\n\nexport {\n  docstring_body ,\n  docstring_loc ,\n  text_attr ,\n  empty_docs ,\n  add_docs_attrs ,\n  add_text_attrs ,\n  empty_info ,\n  add_info_attrs ,\n  default_loc ,\n  with_default_loc ,\n  Const ,\n  Typ ,\n  Pat ,\n  Exp ,\n  Mty ,\n  Mod ,\n  Sig ,\n  Str ,\n  Cl ,\n  Cty ,\n  Ctf ,\n  Cf ,\n  Val ,\n  Md ,\n  Mtd ,\n  Mb ,\n  Opn ,\n  Incl ,\n  Vb ,\n  Ci ,\n  Type ,\n  Te ,\n  Csig ,\n  Cstr ,\n  \n}\n/* docstring_body Not a pure module */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/ast_helper.res",
    "content": "@@ocaml.text(\n  /* ************************************************************************ */\n  /*  */\n  /* OCaml */\n  /*  */\n  /* Alain Frisch, LexiFi */\n  /*  */\n  /* Copyright 2012 Institut National de Recherche en Informatique et */\n  /* en Automatique. */\n  /*  */\n  /* All rights reserved.  This file is distributed under the terms of */\n  /* the GNU Lesser General Public License version 2.1, with the */\n  /* special exception on linking described in the file LICENSE. */\n  /*  */\n  /* ************************************************************************ */\n\n  \" Helpers to produce Parsetree fragments \"\n)\n\nopen Asttypes\nopen Parsetree\nopen Docstrings\nlet docstring_body = assert false\nlet docstring_loc = assert false\nlet text_attr = assert false\nlet empty_docs = assert false\nlet add_docs_attrs = assert false\nlet add_text_attrs = assert false\nlet empty_info = assert false\nlet add_info_attrs = assert false\n\ntype lid = loc<Longident.t>\ntype str = loc<string>\ntype loc = Location.t\ntype attrs = list<attribute>\n\nlet default_loc = ref(Location.none)\n\n@raises(genericException)\nlet with_default_loc = (l, f) => {\n  let old = default_loc.contents\n  default_loc := l\n  try {\n    let r = f()\n    default_loc := old\n    r\n  } catch {\n  | exn =>\n    default_loc := old\n    raise(exn)\n  }\n}\n\nmodule Const = {\n  let integer = (~suffix=?, i) => Pconst_integer(i, suffix)\n  let int = (~suffix=?, i) => integer(~suffix?, string_of_int(i))\n  let int32 = (~suffix='l', i) => integer(~suffix, Int32.to_string(i))\n  let int64 = (~suffix='L', i) => integer(~suffix, Int64.to_string(i))\n  let nativeint = (~suffix='n', i) => integer(~suffix, Nativeint.to_string(i))\n  let float = (~suffix=?, f) => Pconst_float(f, suffix)\n  let char = c => Pconst_char(c)\n  let string = (~quotation_delimiter=?, s) => Pconst_string(s, quotation_delimiter)\n}\n\nmodule Typ = {\n  let mk = (~loc=default_loc.contents, ~attrs=list{}, d) => {\n    ptyp_desc: d,\n    ptyp_loc: loc,\n    ptyp_attributes: attrs,\n  }\n  let attr = (d, a) => {...d, ptyp_attributes: \\\"@\"(d.ptyp_attributes, list{a})}\n\n  let any = (~loc=?, ~attrs=?, ()) => mk(~loc?, ~attrs?, Ptyp_any)\n  let var = (~loc=?, ~attrs=?, a) => mk(~loc?, ~attrs?, Ptyp_var(a))\n  let arrow = (~loc=?, ~attrs=?, a, b, c) => mk(~loc?, ~attrs?, Ptyp_arrow(a, b, c))\n  let tuple = (~loc=?, ~attrs=?, a) => mk(~loc?, ~attrs?, Ptyp_tuple(a))\n  let constr = (~loc=?, ~attrs=?, a, b) => mk(~loc?, ~attrs?, Ptyp_constr(a, b))\n  let object_ = (~loc=?, ~attrs=?, a, b) => mk(~loc?, ~attrs?, Ptyp_object(a, b))\n  let class_ = (~loc=?, ~attrs=?, a, b) => mk(~loc?, ~attrs?, Ptyp_class(a, b))\n  let alias = (~loc=?, ~attrs=?, a, b) => mk(~loc?, ~attrs?, Ptyp_alias(a, b))\n  let variant = (~loc=?, ~attrs=?, a, b, c) => mk(~loc?, ~attrs?, Ptyp_variant(a, b, c))\n  let poly = (~loc=?, ~attrs=?, a, b) => mk(~loc?, ~attrs?, Ptyp_poly(a, b))\n  let package = (~loc=?, ~attrs=?, a, b) => mk(~loc?, ~attrs?, Ptyp_package(a, b))\n  let extension = (~loc=?, ~attrs=?, a) => mk(~loc?, ~attrs?, Ptyp_extension(a))\n\n  let force_poly = t =>\n    switch t.ptyp_desc {\n    | Ptyp_poly(_) => t\n    | _ => poly(~loc=t.ptyp_loc, list{}, t)\n    } /* -> ghost? */\n\n  @raises(Error)\n  let varify_constructors = (var_names, t) => {\n    @raises(Error)\n    let check_variable = (vl, loc, v) =>\n      if List.mem(v, vl) {\n        raise({\n          open Syntaxerr\n          Error(Variable_in_scope(loc, v))\n        })\n      }\n    let var_names = List.map(v => v.txt, var_names)\n\n    @raises(Error)\n    let rec loop = t => {\n      let desc = switch t.ptyp_desc {\n      | Ptyp_any => Ptyp_any\n      | Ptyp_var(x) =>\n        check_variable(var_names, t.ptyp_loc, x)\n        Ptyp_var(x)\n      | Ptyp_arrow(label, core_type, core_type') =>\n        Ptyp_arrow(label, loop(core_type), loop(core_type'))\n      | Ptyp_tuple(lst) => Ptyp_tuple(List.map(loop, lst))\n      | Ptyp_constr({txt: Longident.Lident(s)}, list{}) if List.mem(s, var_names) => Ptyp_var(s)\n      | Ptyp_constr(longident, lst) => Ptyp_constr(longident, List.map(loop, lst))\n      | Ptyp_object(lst, o) => Ptyp_object(List.map(loop_object_field, lst), o)\n      | Ptyp_class(longident, lst) => Ptyp_class(longident, List.map(loop, lst))\n      | Ptyp_alias(core_type, string) =>\n        check_variable(var_names, t.ptyp_loc, string)\n        Ptyp_alias(loop(core_type), string)\n      | Ptyp_variant(row_field_list, flag, lbl_lst_option) =>\n        Ptyp_variant(List.map(loop_row_field, row_field_list), flag, lbl_lst_option)\n      | Ptyp_poly(string_lst, core_type) =>\n        List.iter(v => check_variable(var_names, t.ptyp_loc, v.txt), string_lst)\n        Ptyp_poly(string_lst, loop(core_type))\n      | Ptyp_package(longident, lst) =>\n        Ptyp_package(longident, List.map(((n, typ)) => (n, loop(typ)), lst))\n      | Ptyp_extension(s, arg) => Ptyp_extension(s, arg)\n      }\n\n      {...t, ptyp_desc: desc}\n    }\n    @raises(Error)\n    and loop_row_field = x =>\n      switch x {\n      | Rtag(label, attrs, flag, lst) => Rtag(label, attrs, flag, List.map(loop, lst))\n      | Rinherit(t) => Rinherit(loop(t))\n      }\n    @raises(Error)\n    and loop_object_field = x =>\n      switch x {\n      | Otag(label, attrs, t) => Otag(label, attrs, loop(t))\n      | Oinherit(t) => Oinherit(loop(t))\n      }\n\n    loop(t)\n  }\n}\n\nmodule Pat = {\n  let mk = (~loc=default_loc.contents, ~attrs=list{}, d) => {\n    ppat_desc: d,\n    ppat_loc: loc,\n    ppat_attributes: attrs,\n  }\n  let attr = (d, a) => {...d, ppat_attributes: \\\"@\"(d.ppat_attributes, list{a})}\n\n  let any = (~loc=?, ~attrs=?, ()) => mk(~loc?, ~attrs?, Ppat_any)\n  let var = (~loc=?, ~attrs=?, a) => mk(~loc?, ~attrs?, Ppat_var(a))\n  let alias = (~loc=?, ~attrs=?, a, b) => mk(~loc?, ~attrs?, Ppat_alias(a, b))\n  let constant = (~loc=?, ~attrs=?, a) => mk(~loc?, ~attrs?, Ppat_constant(a))\n  let interval = (~loc=?, ~attrs=?, a, b) => mk(~loc?, ~attrs?, Ppat_interval(a, b))\n  let tuple = (~loc=?, ~attrs=?, a) => mk(~loc?, ~attrs?, Ppat_tuple(a))\n  let construct = (~loc=?, ~attrs=?, a, b) => mk(~loc?, ~attrs?, Ppat_construct(a, b))\n  let variant = (~loc=?, ~attrs=?, a, b) => mk(~loc?, ~attrs?, Ppat_variant(a, b))\n  let record = (~loc=?, ~attrs=?, a, b) => mk(~loc?, ~attrs?, Ppat_record(a, b))\n  let array = (~loc=?, ~attrs=?, a) => mk(~loc?, ~attrs?, Ppat_array(a))\n  let or_ = (~loc=?, ~attrs=?, a, b) => mk(~loc?, ~attrs?, Ppat_or(a, b))\n  let constraint_ = (~loc=?, ~attrs=?, a, b) => mk(~loc?, ~attrs?, Ppat_constraint(a, b))\n  let type_ = (~loc=?, ~attrs=?, a) => mk(~loc?, ~attrs?, Ppat_type(a))\n  let lazy_ = (~loc=?, ~attrs=?, a) => mk(~loc?, ~attrs?, Ppat_lazy(a))\n  let unpack = (~loc=?, ~attrs=?, a) => mk(~loc?, ~attrs?, Ppat_unpack(a))\n  let open_ = (~loc=?, ~attrs=?, a, b) => mk(~loc?, ~attrs?, Ppat_open(a, b))\n  let exception_ = (~loc=?, ~attrs=?, a) => mk(~loc?, ~attrs?, Ppat_exception(a))\n  let extension = (~loc=?, ~attrs=?, a) => mk(~loc?, ~attrs?, Ppat_extension(a))\n}\n\nmodule Exp = {\n  let mk = (~loc=default_loc.contents, ~attrs=list{}, d) => {\n    pexp_desc: d,\n    pexp_loc: loc,\n    pexp_attributes: attrs,\n  }\n  let attr = (d, a) => {...d, pexp_attributes: \\\"@\"(d.pexp_attributes, list{a})}\n\n  let ident = (~loc=?, ~attrs=?, a) => mk(~loc?, ~attrs?, Pexp_ident(a))\n  let constant = (~loc=?, ~attrs=?, a) => mk(~loc?, ~attrs?, Pexp_constant(a))\n  let let_ = (~loc=?, ~attrs=?, a, b, c) => mk(~loc?, ~attrs?, Pexp_let(a, b, c))\n  let fun_ = (~loc=?, ~attrs=?, a, b, c, d) => mk(~loc?, ~attrs?, Pexp_fun(a, b, c, d))\n  let function_ = (~loc=?, ~attrs=?, a) => mk(~loc?, ~attrs?, Pexp_function(a))\n  let apply = (~loc=?, ~attrs=?, a, b) => mk(~loc?, ~attrs?, Pexp_apply(a, b))\n  let match_ = (~loc=?, ~attrs=?, a, b) => mk(~loc?, ~attrs?, Pexp_match(a, b))\n  let try_ = (~loc=?, ~attrs=?, a, b) => mk(~loc?, ~attrs?, Pexp_try(a, b))\n  let tuple = (~loc=?, ~attrs=?, a) => mk(~loc?, ~attrs?, Pexp_tuple(a))\n  let construct = (~loc=?, ~attrs=?, a, b) => mk(~loc?, ~attrs?, Pexp_construct(a, b))\n  let variant = (~loc=?, ~attrs=?, a, b) => mk(~loc?, ~attrs?, Pexp_variant(a, b))\n  let record = (~loc=?, ~attrs=?, a, b) => mk(~loc?, ~attrs?, Pexp_record(a, b))\n  let field = (~loc=?, ~attrs=?, a, b) => mk(~loc?, ~attrs?, Pexp_field(a, b))\n  let setfield = (~loc=?, ~attrs=?, a, b, c) => mk(~loc?, ~attrs?, Pexp_setfield(a, b, c))\n  let array = (~loc=?, ~attrs=?, a) => mk(~loc?, ~attrs?, Pexp_array(a))\n  let ifthenelse = (~loc=?, ~attrs=?, a, b, c) => mk(~loc?, ~attrs?, Pexp_ifthenelse(a, b, c))\n  let sequence = (~loc=?, ~attrs=?, a, b) => mk(~loc?, ~attrs?, Pexp_sequence(a, b))\n  let while_ = (~loc=?, ~attrs=?, a, b) => mk(~loc?, ~attrs?, Pexp_while(a, b))\n  let for_ = (~loc=?, ~attrs=?, a, b, c, d, e) => mk(~loc?, ~attrs?, Pexp_for(a, b, c, d, e))\n  let constraint_ = (~loc=?, ~attrs=?, a, b) => mk(~loc?, ~attrs?, Pexp_constraint(a, b))\n  let coerce = (~loc=?, ~attrs=?, a, b, c) => mk(~loc?, ~attrs?, Pexp_coerce(a, b, c))\n  let send = (~loc=?, ~attrs=?, a, b) => mk(~loc?, ~attrs?, Pexp_send(a, b))\n  let new_ = (~loc=?, ~attrs=?, a) => mk(~loc?, ~attrs?, Pexp_new(a))\n  let setinstvar = (~loc=?, ~attrs=?, a, b) => mk(~loc?, ~attrs?, Pexp_setinstvar(a, b))\n  let override = (~loc=?, ~attrs=?, a) => mk(~loc?, ~attrs?, Pexp_override(a))\n  let letmodule = (~loc=?, ~attrs=?, a, b, c) => mk(~loc?, ~attrs?, Pexp_letmodule(a, b, c))\n  let letexception = (~loc=?, ~attrs=?, a, b) => mk(~loc?, ~attrs?, Pexp_letexception(a, b))\n  let assert_ = (~loc=?, ~attrs=?, a) => mk(~loc?, ~attrs?, Pexp_assert(a))\n  let lazy_ = (~loc=?, ~attrs=?, a) => mk(~loc?, ~attrs?, Pexp_lazy(a))\n  let poly = (~loc=?, ~attrs=?, a, b) => mk(~loc?, ~attrs?, Pexp_poly(a, b))\n  let object_ = (~loc=?, ~attrs=?, a) => mk(~loc?, ~attrs?, Pexp_object(a))\n  let newtype = (~loc=?, ~attrs=?, a, b) => mk(~loc?, ~attrs?, Pexp_newtype(a, b))\n  let pack = (~loc=?, ~attrs=?, a) => mk(~loc?, ~attrs?, Pexp_pack(a))\n  let open_ = (~loc=?, ~attrs=?, a, b, c) => mk(~loc?, ~attrs?, Pexp_open(a, b, c))\n  let extension = (~loc=?, ~attrs=?, a) => mk(~loc?, ~attrs?, Pexp_extension(a))\n  let unreachable = (~loc=?, ~attrs=?, ()) => mk(~loc?, ~attrs?, Pexp_unreachable)\n\n  let case = (lhs, ~guard=?, rhs) => {\n    pc_lhs: lhs,\n    pc_guard: guard,\n    pc_rhs: rhs,\n  }\n}\n\nmodule Mty = {\n  let mk = (~loc=default_loc.contents, ~attrs=list{}, d) => {\n    pmty_desc: d,\n    pmty_loc: loc,\n    pmty_attributes: attrs,\n  }\n  let attr = (d, a) => {...d, pmty_attributes: \\\"@\"(d.pmty_attributes, list{a})}\n\n  let ident = (~loc=?, ~attrs=?, a) => mk(~loc?, ~attrs?, Pmty_ident(a))\n  let alias = (~loc=?, ~attrs=?, a) => mk(~loc?, ~attrs?, Pmty_alias(a))\n  let signature = (~loc=?, ~attrs=?, a) => mk(~loc?, ~attrs?, Pmty_signature(a))\n  let functor_ = (~loc=?, ~attrs=?, a, b, c) => mk(~loc?, ~attrs?, Pmty_functor(a, b, c))\n  let with_ = (~loc=?, ~attrs=?, a, b) => mk(~loc?, ~attrs?, Pmty_with(a, b))\n  let typeof_ = (~loc=?, ~attrs=?, a) => mk(~loc?, ~attrs?, Pmty_typeof(a))\n  let extension = (~loc=?, ~attrs=?, a) => mk(~loc?, ~attrs?, Pmty_extension(a))\n}\n\nmodule Mod = {\n  let mk = (~loc=default_loc.contents, ~attrs=list{}, d) => {\n    pmod_desc: d,\n    pmod_loc: loc,\n    pmod_attributes: attrs,\n  }\n  let attr = (d, a) => {...d, pmod_attributes: \\\"@\"(d.pmod_attributes, list{a})}\n\n  let ident = (~loc=?, ~attrs=?, x) => mk(~loc?, ~attrs?, Pmod_ident(x))\n  let structure = (~loc=?, ~attrs=?, x) => mk(~loc?, ~attrs?, Pmod_structure(x))\n  let functor_ = (~loc=?, ~attrs=?, arg, arg_ty, body) =>\n    mk(~loc?, ~attrs?, Pmod_functor(arg, arg_ty, body))\n  let apply = (~loc=?, ~attrs=?, m1, m2) => mk(~loc?, ~attrs?, Pmod_apply(m1, m2))\n  let constraint_ = (~loc=?, ~attrs=?, m, mty) => mk(~loc?, ~attrs?, Pmod_constraint(m, mty))\n  let unpack = (~loc=?, ~attrs=?, e) => mk(~loc?, ~attrs?, Pmod_unpack(e))\n  let extension = (~loc=?, ~attrs=?, a) => mk(~loc?, ~attrs?, Pmod_extension(a))\n}\n\nmodule Sig = {\n  let mk = (~loc=default_loc.contents, d) => {psig_desc: d, psig_loc: loc}\n\n  let value = (~loc=?, a) => mk(~loc?, Psig_value(a))\n  let type_ = (~loc=?, rec_flag, a) => mk(~loc?, Psig_type(rec_flag, a))\n  let type_extension = (~loc=?, a) => mk(~loc?, Psig_typext(a))\n  let exception_ = (~loc=?, a) => mk(~loc?, Psig_exception(a))\n  let module_ = (~loc=?, a) => mk(~loc?, Psig_module(a))\n  let rec_module = (~loc=?, a) => mk(~loc?, Psig_recmodule(a))\n  let modtype = (~loc=?, a) => mk(~loc?, Psig_modtype(a))\n  let open_ = (~loc=?, a) => mk(~loc?, Psig_open(a))\n  let include_ = (~loc=?, a) => mk(~loc?, Psig_include(a))\n  let class_ = (~loc=?, a) => mk(~loc?, Psig_class(a))\n  let class_type = (~loc=?, a) => mk(~loc?, Psig_class_type(a))\n  let extension = (~loc=?, ~attrs=list{}, a) => mk(~loc?, Psig_extension(a, attrs))\n  let attribute = (~loc=?, a) => mk(~loc?, Psig_attribute(a))\n  let text = txt => {\n    let f_txt = List.filter(ds => docstring_body(ds) != \"\", txt)\n    List.map(ds => attribute(~loc=docstring_loc(ds), text_attr(ds)), f_txt)\n  }\n}\n\nmodule Str = {\n  let mk = (~loc=default_loc.contents, d) => {pstr_desc: d, pstr_loc: loc}\n\n  let eval = (~loc=?, ~attrs=list{}, a) => mk(~loc?, Pstr_eval(a, attrs))\n  let value = (~loc=?, a, b) => mk(~loc?, Pstr_value(a, b))\n  let primitive = (~loc=?, a) => mk(~loc?, Pstr_primitive(a))\n  let type_ = (~loc=?, rec_flag, a) => mk(~loc?, Pstr_type(rec_flag, a))\n  let type_extension = (~loc=?, a) => mk(~loc?, Pstr_typext(a))\n  let exception_ = (~loc=?, a) => mk(~loc?, Pstr_exception(a))\n  let module_ = (~loc=?, a) => mk(~loc?, Pstr_module(a))\n  let rec_module = (~loc=?, a) => mk(~loc?, Pstr_recmodule(a))\n  let modtype = (~loc=?, a) => mk(~loc?, Pstr_modtype(a))\n  let open_ = (~loc=?, a) => mk(~loc?, Pstr_open(a))\n  let class_ = (~loc=?, a) => mk(~loc?, Pstr_class(a))\n  let class_type = (~loc=?, a) => mk(~loc?, Pstr_class_type(a))\n  let include_ = (~loc=?, a) => mk(~loc?, Pstr_include(a))\n  let extension = (~loc=?, ~attrs=list{}, a) => mk(~loc?, Pstr_extension(a, attrs))\n  let attribute = (~loc=?, a) => mk(~loc?, Pstr_attribute(a))\n  let text = txt => {\n    let f_txt = List.filter(ds => docstring_body(ds) != \"\", txt)\n    List.map(ds => attribute(~loc=docstring_loc(ds), text_attr(ds)), f_txt)\n  }\n}\n\nmodule Cl = {\n  let mk = (~loc=default_loc.contents, ~attrs=list{}, d) => {\n    pcl_desc: d,\n    pcl_loc: loc,\n    pcl_attributes: attrs,\n  }\n  let attr = (d, a) => {...d, pcl_attributes: \\\"@\"(d.pcl_attributes, list{a})}\n\n  let constr = (~loc=?, ~attrs=?, a, b) => mk(~loc?, ~attrs?, Pcl_constr(a, b))\n  let structure = (~loc=?, ~attrs=?, a) => mk(~loc?, ~attrs?, Pcl_structure(a))\n  let fun_ = (~loc=?, ~attrs=?, a, b, c, d) => mk(~loc?, ~attrs?, Pcl_fun(a, b, c, d))\n  let apply = (~loc=?, ~attrs=?, a, b) => mk(~loc?, ~attrs?, Pcl_apply(a, b))\n  let let_ = (~loc=?, ~attrs=?, a, b, c) => mk(~loc?, ~attrs?, Pcl_let(a, b, c))\n  let constraint_ = (~loc=?, ~attrs=?, a, b) => mk(~loc?, ~attrs?, Pcl_constraint(a, b))\n  let extension = (~loc=?, ~attrs=?, a) => mk(~loc?, ~attrs?, Pcl_extension(a))\n  let open_ = (~loc=?, ~attrs=?, a, b, c) => mk(~loc?, ~attrs?, Pcl_open(a, b, c))\n}\n\nmodule Cty = {\n  let mk = (~loc=default_loc.contents, ~attrs=list{}, d) => {\n    pcty_desc: d,\n    pcty_loc: loc,\n    pcty_attributes: attrs,\n  }\n  let attr = (d, a) => {...d, pcty_attributes: \\\"@\"(d.pcty_attributes, list{a})}\n\n  let constr = (~loc=?, ~attrs=?, a, b) => mk(~loc?, ~attrs?, Pcty_constr(a, b))\n  let signature = (~loc=?, ~attrs=?, a) => mk(~loc?, ~attrs?, Pcty_signature(a))\n  let arrow = (~loc=?, ~attrs=?, a, b, c) => mk(~loc?, ~attrs?, Pcty_arrow(a, b, c))\n  let extension = (~loc=?, ~attrs=?, a) => mk(~loc?, ~attrs?, Pcty_extension(a))\n  let open_ = (~loc=?, ~attrs=?, a, b, c) => mk(~loc?, ~attrs?, Pcty_open(a, b, c))\n}\n\nmodule Ctf = {\n  let mk = (~loc=default_loc.contents, ~attrs=list{}, ~docs=empty_docs, d) => {\n    pctf_desc: d,\n    pctf_loc: loc,\n    pctf_attributes: add_docs_attrs(docs, attrs),\n  }\n\n  let inherit_ = (~loc=?, ~attrs=?, a) => mk(~loc?, ~attrs?, Pctf_inherit(a))\n  let val_ = (~loc=?, ~attrs=?, a, b, c, d) => mk(~loc?, ~attrs?, Pctf_val(a, b, c, d))\n  let method_ = (~loc=?, ~attrs=?, a, b, c, d) => mk(~loc?, ~attrs?, Pctf_method(a, b, c, d))\n  let constraint_ = (~loc=?, ~attrs=?, a, b) => mk(~loc?, ~attrs?, Pctf_constraint(a, b))\n  let extension = (~loc=?, ~attrs=?, a) => mk(~loc?, ~attrs?, Pctf_extension(a))\n  let attribute = (~loc=?, a) => mk(~loc?, Pctf_attribute(a))\n  let text = txt => {\n    let f_txt = List.filter(ds => docstring_body(ds) != \"\", txt)\n    List.map(ds => attribute(~loc=docstring_loc(ds), text_attr(ds)), f_txt)\n  }\n\n  let attr = (d, a) => {...d, pctf_attributes: \\\"@\"(d.pctf_attributes, list{a})}\n}\n\nmodule Cf = {\n  let mk = (~loc=default_loc.contents, ~attrs=list{}, ~docs=empty_docs, d) => {\n    pcf_desc: d,\n    pcf_loc: loc,\n    pcf_attributes: add_docs_attrs(docs, attrs),\n  }\n\n  let inherit_ = (~loc=?, ~attrs=?, a, b, c) => mk(~loc?, ~attrs?, Pcf_inherit(a, b, c))\n  let val_ = (~loc=?, ~attrs=?, a, b, c) => mk(~loc?, ~attrs?, Pcf_val(a, b, c))\n  let method_ = (~loc=?, ~attrs=?, a, b, c) => mk(~loc?, ~attrs?, Pcf_method(a, b, c))\n  let constraint_ = (~loc=?, ~attrs=?, a, b) => mk(~loc?, ~attrs?, Pcf_constraint(a, b))\n  let initializer_ = (~loc=?, ~attrs=?, a) => mk(~loc?, ~attrs?, Pcf_initializer(a))\n  let extension = (~loc=?, ~attrs=?, a) => mk(~loc?, ~attrs?, Pcf_extension(a))\n  let attribute = (~loc=?, a) => mk(~loc?, Pcf_attribute(a))\n  let text = txt => {\n    let f_txt = List.filter(ds => docstring_body(ds) != \"\", txt)\n    List.map(ds => attribute(~loc=docstring_loc(ds), text_attr(ds)), f_txt)\n  }\n\n  let virtual_ = ct => Cfk_virtual(ct)\n  let concrete = (o, e) => Cfk_concrete(o, e)\n\n  let attr = (d, a) => {...d, pcf_attributes: \\\"@\"(d.pcf_attributes, list{a})}\n}\n\nmodule Val = {\n  let mk = (\n    ~loc=default_loc.contents,\n    ~attrs=list{},\n    ~docs=empty_docs,\n    ~prim=list{},\n    name,\n    typ,\n  ) => {\n    pval_name: name,\n    pval_type: typ,\n    pval_attributes: add_docs_attrs(docs, attrs),\n    pval_loc: loc,\n    pval_prim: prim,\n  }\n}\n\nmodule Md = {\n  let mk = (\n    ~loc=default_loc.contents,\n    ~attrs=list{},\n    ~docs=empty_docs,\n    ~text=list{},\n    name,\n    typ,\n  ) => {\n    pmd_name: name,\n    pmd_type: typ,\n    pmd_attributes: add_text_attrs(text, add_docs_attrs(docs, attrs)),\n    pmd_loc: loc,\n  }\n}\n\nmodule Mtd = {\n  let mk = (\n    ~loc=default_loc.contents,\n    ~attrs=list{},\n    ~docs=empty_docs,\n    ~text=list{},\n    ~typ=?,\n    name,\n  ) => {\n    pmtd_name: name,\n    pmtd_type: typ,\n    pmtd_attributes: add_text_attrs(text, add_docs_attrs(docs, attrs)),\n    pmtd_loc: loc,\n  }\n}\n\nmodule Mb = {\n  let mk = (\n    ~loc=default_loc.contents,\n    ~attrs=list{},\n    ~docs=empty_docs,\n    ~text=list{},\n    name,\n    expr,\n  ) => {\n    pmb_name: name,\n    pmb_expr: expr,\n    pmb_attributes: add_text_attrs(text, add_docs_attrs(docs, attrs)),\n    pmb_loc: loc,\n  }\n}\n\nmodule Opn = {\n  let mk = (~loc=default_loc.contents, ~attrs=list{}, ~docs=empty_docs, ~override=Fresh, lid) => {\n    popen_lid: lid,\n    popen_override: override,\n    popen_loc: loc,\n    popen_attributes: add_docs_attrs(docs, attrs),\n  }\n}\n\nmodule Incl = {\n  let mk = (~loc=default_loc.contents, ~attrs=list{}, ~docs=empty_docs, mexpr) => {\n    pincl_mod: mexpr,\n    pincl_loc: loc,\n    pincl_attributes: add_docs_attrs(docs, attrs),\n  }\n}\n\nmodule Vb = {\n  let mk = (\n    ~loc=default_loc.contents,\n    ~attrs=list{},\n    ~docs=empty_docs,\n    ~text=list{},\n    pat,\n    expr,\n  ) => {\n    pvb_pat: pat,\n    pvb_expr: expr,\n    pvb_attributes: add_text_attrs(text, add_docs_attrs(docs, attrs)),\n    pvb_loc: loc,\n  }\n}\n\nmodule Ci = {\n  let mk = (\n    ~loc=default_loc.contents,\n    ~attrs=list{},\n    ~docs=empty_docs,\n    ~text=list{},\n    ~virt=Concrete,\n    ~params=list{},\n    name,\n    expr,\n  ) => {\n    pci_virt: virt,\n    pci_params: params,\n    pci_name: name,\n    pci_expr: expr,\n    pci_attributes: add_text_attrs(text, add_docs_attrs(docs, attrs)),\n    pci_loc: loc,\n  }\n}\n\nmodule Type = {\n  let mk = (\n    ~loc=default_loc.contents,\n    ~attrs=list{},\n    ~docs=empty_docs,\n    ~text=list{},\n    ~params=list{},\n    ~cstrs=list{},\n    ~kind=Ptype_abstract,\n    ~priv=Public,\n    ~manifest=?,\n    name,\n  ) => {\n    ptype_name: name,\n    ptype_params: params,\n    ptype_cstrs: cstrs,\n    ptype_kind: kind,\n    ptype_private: priv,\n    ptype_manifest: manifest,\n    ptype_attributes: add_text_attrs(text, add_docs_attrs(docs, attrs)),\n    ptype_loc: loc,\n  }\n\n  let constructor = (\n    ~loc=default_loc.contents,\n    ~attrs=list{},\n    ~info=empty_info,\n    ~args=Pcstr_tuple(list{}),\n    ~res=?,\n    name,\n  ) => {\n    pcd_name: name,\n    pcd_args: args,\n    pcd_res: res,\n    pcd_loc: loc,\n    pcd_attributes: add_info_attrs(info, attrs),\n  }\n\n  let field = (\n    ~loc=default_loc.contents,\n    ~attrs=list{},\n    ~info=empty_info,\n    ~mut=Immutable,\n    name,\n    typ,\n  ) => {\n    pld_name: name,\n    pld_mutable: mut,\n    pld_type: typ,\n    pld_loc: loc,\n    pld_attributes: add_info_attrs(info, attrs),\n  }\n}\n\n@ocaml.doc(\" Type extensions \")\nmodule Te = {\n  let mk = (~attrs=list{}, ~docs=empty_docs, ~params=list{}, ~priv=Public, path, constructors) => {\n    ptyext_path: path,\n    ptyext_params: params,\n    ptyext_constructors: constructors,\n    ptyext_private: priv,\n    ptyext_attributes: add_docs_attrs(docs, attrs),\n  }\n\n  let constructor = (\n    ~loc=default_loc.contents,\n    ~attrs=list{},\n    ~docs=empty_docs,\n    ~info=empty_info,\n    name,\n    kind,\n  ) => {\n    pext_name: name,\n    pext_kind: kind,\n    pext_loc: loc,\n    pext_attributes: add_docs_attrs(docs, add_info_attrs(info, attrs)),\n  }\n\n  let decl = (\n    ~loc=default_loc.contents,\n    ~attrs=list{},\n    ~docs=empty_docs,\n    ~info=empty_info,\n    ~args=Pcstr_tuple(list{}),\n    ~res=?,\n    name,\n  ) => {\n    pext_name: name,\n    pext_kind: Pext_decl(args, res),\n    pext_loc: loc,\n    pext_attributes: add_docs_attrs(docs, add_info_attrs(info, attrs)),\n  }\n\n  let rebind = (\n    ~loc=default_loc.contents,\n    ~attrs=list{},\n    ~docs=empty_docs,\n    ~info=empty_info,\n    name,\n    lid,\n  ) => {\n    pext_name: name,\n    pext_kind: Pext_rebind(lid),\n    pext_loc: loc,\n    pext_attributes: add_docs_attrs(docs, add_info_attrs(info, attrs)),\n  }\n}\n\nmodule Csig = {\n  let mk = (self, fields) => {\n    pcsig_self: self,\n    pcsig_fields: fields,\n  }\n}\n\nmodule Cstr = {\n  let mk = (self, fields) => {\n    pcstr_self: self,\n    pcstr_fields: fields,\n  }\n}\n"
  },
  {
    "path": "analysis/examples/larger-project/src/asttypes.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n/* This output is empty. Its source's type definitions, externals and/or unused code got optimized away. */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/asttypes.res",
    "content": "open P\n\n@@ocaml.text(\n  /* ************************************************************************ */\n  /*  */\n  /* OCaml */\n  /*  */\n  /* Xavier Leroy, projet Cristal, INRIA Rocquencourt */\n  /*  */\n  /* Copyright 1996 Institut National de Recherche en Informatique et */\n  /* en Automatique. */\n  /*  */\n  /* All rights reserved.  This file is distributed under the terms of */\n  /* the GNU Lesser General Public License version 2.1, with the */\n  /* special exception on linking described in the file LICENSE. */\n  /*  */\n  /* ************************************************************************ */\n\n  \" Auxiliary AST types used by parsetree and typedtree. \"\n)\n\ntype constant =\n  | Const_int(int)\n  | Const_char(char)\n  | Const_string(string, option<string>)\n  | Const_float(string)\n  | Const_int32(int32)\n  | Const_int64(int64)\n  | Const_nativeint(nativeint)\n\ntype rec_flag = Nonrecursive | Recursive\n\ntype direction_flag = Upto | Downto\n\n/* Order matters, used in polymorphic comparison */\ntype private_flag = Private | Public\n\ntype mutable_flag = Immutable | Mutable\n\ntype virtual_flag = Virtual | Concrete\n\ntype override_flag = Override | Fresh\n\ntype closed_flag = Closed | Open\n\ntype label = string\n\ntype arg_label =\n  | Nolabel\n  | Labelled(string) /* label:T -> ... */\n  | Optional(string) /* ?label:T -> ... */\n\ntype loc<'a> = Location.loc<'a> = {\n  txt: 'a,\n  loc: Location.t,\n}\n\ntype variance =\n  | Covariant\n  | Contravariant\n  | Invariant\n\n"
  },
  {
    "path": "analysis/examples/larger-project/src/clflags.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as Arg from \"rescript/lib/es6/arg.js\";\nimport * as Sys from \"rescript/lib/es6/sys.js\";\nimport * as List from \"rescript/lib/es6/list.js\";\nimport * as Misc from \"./misc.js\";\nimport * as Curry from \"rescript/lib/es6/curry.js\";\nimport * as Config from \"./config.js\";\nimport * as Printf from \"./printf.js\";\nimport * as Filename from \"rescript/lib/es6/filename.js\";\nimport * as Pervasives from \"rescript/lib/es6/pervasives.js\";\nimport * as Caml_js_exceptions from \"rescript/lib/es6/caml_js_exceptions.js\";\n\nvar Int_arg_helper = {};\n\nvar Float_arg_helper = {};\n\nvar objfiles = {\n  contents: /* [] */0\n};\n\nvar ccobjs = {\n  contents: /* [] */0\n};\n\nvar dllibs = {\n  contents: /* [] */0\n};\n\nvar compile_only = {\n  contents: false\n};\n\nvar output_name = {\n  contents: undefined\n};\n\nvar include_dirs = {\n  contents: /* [] */0\n};\n\nvar no_std_include = {\n  contents: false\n};\n\nvar print_types = {\n  contents: false\n};\n\nvar make_archive = {\n  contents: false\n};\n\nvar debug = {\n  contents: false\n};\n\nvar fast = {\n  contents: false\n};\n\nvar use_linscan = {\n  contents: false\n};\n\nvar link_everything = {\n  contents: false\n};\n\nvar custom_runtime = {\n  contents: false\n};\n\nvar no_check_prims = {\n  contents: false\n};\n\nvar bytecode_compatible_32 = {\n  contents: false\n};\n\nvar output_c_object = {\n  contents: false\n};\n\nvar output_complete_object = {\n  contents: false\n};\n\nvar all_ccopts = {\n  contents: /* [] */0\n};\n\nvar classic = {\n  contents: false\n};\n\nvar nopervasives = {\n  contents: false\n};\n\nvar preprocessor = {\n  contents: undefined\n};\n\nvar all_ppx = {\n  contents: /* [] */0\n};\n\nvar annotations = {\n  contents: false\n};\n\nvar binary_annotations = {\n  contents: false\n};\n\nvar use_threads = {\n  contents: false\n};\n\nvar use_vmthreads = {\n  contents: false\n};\n\nvar noassert = {\n  contents: false\n};\n\nvar verbose = {\n  contents: false\n};\n\nvar noversion = {\n  contents: false\n};\n\nvar noprompt = {\n  contents: false\n};\n\nvar nopromptcont = {\n  contents: false\n};\n\nvar init_file = {\n  contents: undefined\n};\n\nvar noinit = {\n  contents: false\n};\n\nvar open_modules = {\n  contents: /* [] */0\n};\n\nvar use_prims = {\n  contents: \"\"\n};\n\nvar use_runtime = {\n  contents: \"\"\n};\n\nvar principal = {\n  contents: false\n};\n\nvar real_paths = {\n  contents: true\n};\n\nvar recursive_types = {\n  contents: false\n};\n\nvar strict_sequence = {\n  contents: false\n};\n\nvar strict_formats = {\n  contents: false\n};\n\nvar applicative_functors = {\n  contents: true\n};\n\nvar make_runtime = {\n  contents: false\n};\n\nvar gprofile = {\n  contents: false\n};\n\nvar c_compiler = {\n  contents: undefined\n};\n\nvar no_auto_link = {\n  contents: false\n};\n\nvar dllpaths = {\n  contents: /* [] */0\n};\n\nvar make_package = {\n  contents: false\n};\n\nvar for_package = {\n  contents: undefined\n};\n\nvar error_size = {\n  contents: 500\n};\n\nvar float_const_prop = {\n  contents: true\n};\n\nvar transparent_modules = {\n  contents: false\n};\n\nvar dump_source = {\n  contents: false\n};\n\nvar dump_parsetree = {\n  contents: false\n};\n\nvar dump_typedtree = {\n  contents: false\n};\n\nvar dump_rawlambda = {\n  contents: false\n};\n\nvar dump_lambda = {\n  contents: false\n};\n\nvar dump_rawclambda = {\n  contents: false\n};\n\nvar dump_clambda = {\n  contents: false\n};\n\nvar dump_rawflambda = {\n  contents: false\n};\n\nvar dump_flambda = {\n  contents: false\n};\n\nvar dump_flambda_let = {\n  contents: undefined\n};\n\nvar dump_flambda_verbose = {\n  contents: false\n};\n\nvar dump_instr = {\n  contents: false\n};\n\nvar keep_asm_file = {\n  contents: false\n};\n\nvar optimize_for_speed = {\n  contents: true\n};\n\nvar opaque = {\n  contents: false\n};\n\nvar dump_cmm = {\n  contents: false\n};\n\nvar dump_selection = {\n  contents: false\n};\n\nvar dump_cse = {\n  contents: false\n};\n\nvar dump_live = {\n  contents: false\n};\n\nvar dump_avail = {\n  contents: false\n};\n\nvar dump_spill = {\n  contents: false\n};\n\nvar dump_split = {\n  contents: false\n};\n\nvar dump_interf = {\n  contents: false\n};\n\nvar dump_prefer = {\n  contents: false\n};\n\nvar dump_regalloc = {\n  contents: false\n};\n\nvar dump_reload = {\n  contents: false\n};\n\nvar dump_scheduling = {\n  contents: false\n};\n\nvar dump_linear = {\n  contents: false\n};\n\nvar dump_interval = {\n  contents: false\n};\n\nvar keep_startup_file = {\n  contents: false\n};\n\nvar dump_combine = {\n  contents: false\n};\n\nvar debug_runavail = {\n  contents: false\n};\n\nvar native_code = {\n  contents: false\n};\n\nvar force_slash = {\n  contents: false\n};\n\nvar clambda_checks = {\n  contents: false\n};\n\nvar flambda_invariant_checks = {\n  contents: true\n};\n\nvar dont_write_files = {\n  contents: false\n};\n\nfunction std_include_flag(prefix) {\n  if (no_std_include.contents) {\n    return \"\";\n  } else {\n    return prefix + Curry._1(Filename.quote, Config.standard_library);\n  }\n}\n\nfunction std_include_dir(param) {\n  if (no_std_include.contents) {\n    return /* [] */0;\n  } else {\n    return {\n            hd: Config.standard_library,\n            tl: /* [] */0\n          };\n  }\n}\n\nvar shared = {\n  contents: false\n};\n\nvar dlcode = {\n  contents: true\n};\n\nvar tmp = Config.architecture === \"amd64\" ? true : false;\n\nvar pic_code = {\n  contents: tmp\n};\n\nvar runtime_variant = {\n  contents: \"\"\n};\n\nvar keep_docs = {\n  contents: false\n};\n\nvar keep_locs = {\n  contents: true\n};\n\nvar unsafe_string = {\n  contents: false\n};\n\nvar classic_inlining = {\n  contents: false\n};\n\nvar inlining_report = {\n  contents: false\n};\n\nvar afl_instrument = {\n  contents: false\n};\n\nvar afl_inst_ratio = {\n  contents: 100\n};\n\nvar simplify_rounds = {\n  contents: undefined\n};\n\nvar default_simplify_rounds = {\n  contents: 1\n};\n\nfunction rounds(param) {\n  var r = simplify_rounds.contents;\n  if (r !== undefined) {\n    return r;\n  } else {\n    return default_simplify_rounds.contents;\n  }\n}\n\nvar default_inline_threshold = 10 / 8;\n\nvar default_inline_toplevel_threshold = 16 * default_inline_threshold | 0;\n\nvar unbox_specialised_args = {\n  contents: true\n};\n\nvar unbox_free_vars_of_closures = {\n  contents: true\n};\n\nvar unbox_closures = {\n  contents: false\n};\n\nvar unbox_closures_factor = {\n  contents: 10\n};\n\nvar remove_unused_arguments = {\n  contents: false\n};\n\nvar classic_arguments_inline_threshold = 10 / 8;\n\nvar classic_arguments_inline_toplevel_threshold = 1;\n\nvar classic_arguments = {\n  inline_call_cost: undefined,\n  inline_alloc_cost: undefined,\n  inline_prim_cost: undefined,\n  inline_branch_cost: undefined,\n  inline_indirect_cost: undefined,\n  inline_lifting_benefit: undefined,\n  inline_branch_factor: undefined,\n  inline_max_depth: undefined,\n  inline_max_unroll: undefined,\n  inline_threshold: classic_arguments_inline_threshold,\n  inline_toplevel_threshold: classic_arguments_inline_toplevel_threshold\n};\n\nvar o2_arguments_inline_call_cost = 10;\n\nvar o2_arguments_inline_alloc_cost = 14;\n\nvar o2_arguments_inline_prim_cost = 6;\n\nvar o2_arguments_inline_branch_cost = 10;\n\nvar o2_arguments_inline_indirect_cost = 8;\n\nvar o2_arguments_inline_max_depth = 2;\n\nvar o2_arguments_inline_threshold = 25;\n\nvar o2_arguments_inline_toplevel_threshold = 400;\n\nvar o2_arguments = {\n  inline_call_cost: o2_arguments_inline_call_cost,\n  inline_alloc_cost: o2_arguments_inline_alloc_cost,\n  inline_prim_cost: o2_arguments_inline_prim_cost,\n  inline_branch_cost: o2_arguments_inline_branch_cost,\n  inline_indirect_cost: o2_arguments_inline_indirect_cost,\n  inline_lifting_benefit: undefined,\n  inline_branch_factor: undefined,\n  inline_max_depth: o2_arguments_inline_max_depth,\n  inline_max_unroll: undefined,\n  inline_threshold: o2_arguments_inline_threshold,\n  inline_toplevel_threshold: o2_arguments_inline_toplevel_threshold\n};\n\nvar o3_arguments_inline_call_cost = 15;\n\nvar o3_arguments_inline_alloc_cost = 21;\n\nvar o3_arguments_inline_prim_cost = 9;\n\nvar o3_arguments_inline_branch_cost = 15;\n\nvar o3_arguments_inline_indirect_cost = 12;\n\nvar o3_arguments_inline_branch_factor = 0;\n\nvar o3_arguments_inline_max_depth = 3;\n\nvar o3_arguments_inline_max_unroll = 1;\n\nvar o3_arguments_inline_threshold = 50;\n\nvar o3_arguments_inline_toplevel_threshold = 800;\n\nvar o3_arguments = {\n  inline_call_cost: o3_arguments_inline_call_cost,\n  inline_alloc_cost: o3_arguments_inline_alloc_cost,\n  inline_prim_cost: o3_arguments_inline_prim_cost,\n  inline_branch_cost: o3_arguments_inline_branch_cost,\n  inline_indirect_cost: o3_arguments_inline_indirect_cost,\n  inline_lifting_benefit: undefined,\n  inline_branch_factor: o3_arguments_inline_branch_factor,\n  inline_max_depth: o3_arguments_inline_max_depth,\n  inline_max_unroll: o3_arguments_inline_max_unroll,\n  inline_threshold: o3_arguments_inline_threshold,\n  inline_toplevel_threshold: o3_arguments_inline_toplevel_threshold\n};\n\nvar all_passes = {\n  contents: /* [] */0\n};\n\nvar dumped_passes_list = {\n  contents: /* [] */0\n};\n\nfunction dumped_pass(s) {\n  if (!List.mem(s, all_passes.contents)) {\n    throw {\n          RE_EXN_ID: \"Assert_failure\",\n          _1: [\n            \"clflags.res\",\n            276,\n            2\n          ],\n          Error: new Error()\n        };\n  }\n  return List.mem(s, dumped_passes_list.contents);\n}\n\nfunction set_dumped_pass(s, enabled) {\n  if (!List.mem(s, all_passes.contents)) {\n    return ;\n  }\n  var passes_without_s = List.filter(function (param) {\n          return s !== param;\n        })(dumped_passes_list.contents);\n  var dumped_passes = enabled ? ({\n        hd: s,\n        tl: passes_without_s\n      }) : passes_without_s;\n  dumped_passes_list.contents = dumped_passes;\n  \n}\n\nfunction parse_color_setting(x) {\n  switch (x) {\n    case \"always\" :\n        return /* Always */1;\n    case \"auto\" :\n        return /* Auto */0;\n    case \"never\" :\n        return /* Never */2;\n    default:\n      return ;\n  }\n}\n\nvar color = {\n  contents: undefined\n};\n\nvar unboxed_types = {\n  contents: false\n};\n\nvar arg_spec = {\n  contents: /* [] */0\n};\n\nvar arg_names = {\n  contents: Misc.StringMap.empty\n};\n\nfunction reset_arguments(param) {\n  arg_spec.contents = /* [] */0;\n  arg_names.contents = Misc.StringMap.empty;\n  \n}\n\nfunction add_arguments(loc, args) {\n  return List.iter((function (x) {\n                var arg_name = x[0];\n                try {\n                  var loc2 = Curry._2(Misc.StringMap.find, arg_name, arg_names.contents);\n                  Curry._1(Printf.eprintf(\"Warning: plugin argument %s is already defined:\\n\"), arg_name);\n                  Curry._1(Printf.eprintf(\"   First definition: %s\\n\"), loc2);\n                  return Curry._1(Printf.eprintf(\"   New definition: %s\\n\"), loc);\n                }\n                catch (raw_exn){\n                  var exn = Caml_js_exceptions.internalToOCamlException(raw_exn);\n                  if (exn.RE_EXN_ID === \"Not_found\") {\n                    arg_spec.contents = Pervasives.$at(arg_spec.contents, {\n                          hd: x,\n                          tl: /* [] */0\n                        });\n                    arg_names.contents = Curry._3(Misc.StringMap.add, arg_name, loc, arg_names.contents);\n                    return ;\n                  }\n                  throw exn;\n                }\n              }), args);\n}\n\nfunction print_arguments(usage) {\n  return Arg.usage(arg_spec.contents, usage);\n}\n\nfunction parse_arguments(f, msg) {\n  try {\n    var argv = {\n      contents: Sys.argv\n    };\n    var current = {\n      contents: Arg.current.contents\n    };\n    return Arg.parse_and_expand_argv_dynamic(current, argv, arg_spec, f, msg);\n  }\n  catch (raw_msg){\n    var msg$1 = Caml_js_exceptions.internalToOCamlException(raw_msg);\n    if (msg$1.RE_EXN_ID === Arg.Bad) {\n      Curry._1(Printf.eprintf(\"%s\"), msg$1._1);\n      return Pervasives.exit(2);\n    }\n    if (msg$1.RE_EXN_ID === Arg.Help) {\n      Curry._1(Printf.printf(\"%s\"), msg$1._1);\n      return Pervasives.exit(0);\n    }\n    throw msg$1;\n  }\n}\n\nvar inline_toplevel_multiplier = 16;\n\nvar default_inline_call_cost = 5;\n\nvar default_inline_alloc_cost = 7;\n\nvar default_inline_prim_cost = 3;\n\nvar default_inline_branch_cost = 5;\n\nvar default_inline_indirect_cost = 4;\n\nvar default_inline_branch_factor = 0.1;\n\nvar default_inline_lifting_benefit = 1300;\n\nvar default_inline_max_unroll = 0;\n\nvar default_inline_max_depth = 1;\n\nvar default_unbox_closures_factor = 10;\n\nvar o1_arguments = {\n  inline_call_cost: undefined,\n  inline_alloc_cost: undefined,\n  inline_prim_cost: undefined,\n  inline_branch_cost: undefined,\n  inline_indirect_cost: undefined,\n  inline_lifting_benefit: undefined,\n  inline_branch_factor: undefined,\n  inline_max_depth: undefined,\n  inline_max_unroll: undefined,\n  inline_threshold: undefined,\n  inline_toplevel_threshold: undefined\n};\n\nexport {\n  Int_arg_helper ,\n  Float_arg_helper ,\n  objfiles ,\n  ccobjs ,\n  dllibs ,\n  compile_only ,\n  output_name ,\n  include_dirs ,\n  no_std_include ,\n  print_types ,\n  make_archive ,\n  debug ,\n  fast ,\n  use_linscan ,\n  link_everything ,\n  custom_runtime ,\n  no_check_prims ,\n  bytecode_compatible_32 ,\n  output_c_object ,\n  output_complete_object ,\n  all_ccopts ,\n  classic ,\n  nopervasives ,\n  preprocessor ,\n  all_ppx ,\n  annotations ,\n  binary_annotations ,\n  use_threads ,\n  use_vmthreads ,\n  noassert ,\n  verbose ,\n  noversion ,\n  noprompt ,\n  nopromptcont ,\n  init_file ,\n  noinit ,\n  open_modules ,\n  use_prims ,\n  use_runtime ,\n  principal ,\n  real_paths ,\n  recursive_types ,\n  strict_sequence ,\n  strict_formats ,\n  applicative_functors ,\n  make_runtime ,\n  gprofile ,\n  c_compiler ,\n  no_auto_link ,\n  dllpaths ,\n  make_package ,\n  for_package ,\n  error_size ,\n  float_const_prop ,\n  transparent_modules ,\n  dump_source ,\n  dump_parsetree ,\n  dump_typedtree ,\n  dump_rawlambda ,\n  dump_lambda ,\n  dump_rawclambda ,\n  dump_clambda ,\n  dump_rawflambda ,\n  dump_flambda ,\n  dump_flambda_let ,\n  dump_flambda_verbose ,\n  dump_instr ,\n  keep_asm_file ,\n  optimize_for_speed ,\n  opaque ,\n  dump_cmm ,\n  dump_selection ,\n  dump_cse ,\n  dump_live ,\n  dump_avail ,\n  dump_spill ,\n  dump_split ,\n  dump_interf ,\n  dump_prefer ,\n  dump_regalloc ,\n  dump_reload ,\n  dump_scheduling ,\n  dump_linear ,\n  dump_interval ,\n  keep_startup_file ,\n  dump_combine ,\n  debug_runavail ,\n  native_code ,\n  force_slash ,\n  clambda_checks ,\n  flambda_invariant_checks ,\n  dont_write_files ,\n  std_include_flag ,\n  std_include_dir ,\n  shared ,\n  dlcode ,\n  pic_code ,\n  runtime_variant ,\n  keep_docs ,\n  keep_locs ,\n  unsafe_string ,\n  classic_inlining ,\n  inlining_report ,\n  afl_instrument ,\n  afl_inst_ratio ,\n  simplify_rounds ,\n  default_simplify_rounds ,\n  rounds ,\n  default_inline_threshold ,\n  inline_toplevel_multiplier ,\n  default_inline_toplevel_threshold ,\n  default_inline_call_cost ,\n  default_inline_alloc_cost ,\n  default_inline_prim_cost ,\n  default_inline_branch_cost ,\n  default_inline_indirect_cost ,\n  default_inline_branch_factor ,\n  default_inline_lifting_benefit ,\n  default_inline_max_unroll ,\n  default_inline_max_depth ,\n  unbox_specialised_args ,\n  unbox_free_vars_of_closures ,\n  unbox_closures ,\n  default_unbox_closures_factor ,\n  unbox_closures_factor ,\n  remove_unused_arguments ,\n  o1_arguments ,\n  classic_arguments ,\n  o2_arguments ,\n  o3_arguments ,\n  all_passes ,\n  dumped_passes_list ,\n  dumped_pass ,\n  set_dumped_pass ,\n  parse_color_setting ,\n  color ,\n  unboxed_types ,\n  arg_spec ,\n  arg_names ,\n  reset_arguments ,\n  add_arguments ,\n  print_arguments ,\n  parse_arguments ,\n  \n}\n/* pic_code Not a pure module */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/clflags.res",
    "content": "/* ************************************************************************ */\n/*  */\n/* OCaml */\n/*  */\n/* Xavier Leroy, projet Cristal, INRIA Rocquencourt */\n/*  */\n/* Copyright 1996 Institut National de Recherche en Informatique et */\n/* en Automatique. */\n/*  */\n/* All rights reserved.  This file is distributed under the terms of */\n/* the GNU Lesser General Public License version 2.1, with the */\n/* special exception on linking described in the file LICENSE. */\n/*  */\n/* ************************************************************************ */\n\n/* Command-line parameters */\n\nmodule Int_arg_helper = {}\nmodule Float_arg_helper = {\n\n}\n\nlet objfiles = ref((list{}: list<string>)) /* .cmo and .cma files */\nand ccobjs = ref((list{}: list<string>)) /* .o, .a, .so and -cclib -lxxx */\nand dllibs = ref((list{}: list<string>)) /* .so and -dllib -lxxx */\n\nlet compile_only = ref(false) /* -c */\nand output_name = ref((None: option<string>)) /* -o */\nand include_dirs = ref((list{}: list<string>)) /* -I */\nand no_std_include = ref(false) /* -nostdlib */\nand print_types = ref(false) /* -i */\nand make_archive = ref(false) /* -a */\nand debug = ref(false) /* -g */\nand fast = ref(false) /* -unsafe */\nand use_linscan = ref(false) /* -linscan */\nand link_everything = ref(false) /* -linkall */\nand custom_runtime = ref(false) /* -custom */\nand no_check_prims = ref(false) /* -no-check-prims */\nand bytecode_compatible_32 = ref(false) /* -compat-32 */\nand output_c_object = ref(false) /* -output-obj */\nand output_complete_object = ref(false) /* -output-complete-obj */\nand all_ccopts = ref((list{}: list<string>)) /* -ccopt */\nand classic = ref(false) /* -nolabels */\nand nopervasives = ref(false) /* -nopervasives */\nand preprocessor = ref((None: option<string>)) /* -pp */\nand all_ppx = ref((list{}: list<string>)) /* -ppx */\nlet annotations = ref(false) /* -annot */\nlet binary_annotations = ref(false) /* -annot */\nand use_threads = ref(false) /* -thread */\nand use_vmthreads = ref(false) /* -vmthread */\nand noassert = ref(false) /* -noassert */\nand verbose = ref(false) /* -verbose */\nand noversion = ref(false) /* -no-version */\nand noprompt = ref(false) /* -noprompt */\nand nopromptcont = ref(false) /* -nopromptcont */\nand init_file = ref((None: option<string>)) /* -init */\nand noinit = ref(false) /* -noinit */\nand open_modules: ref<list<string>> = ref(list{}) /* -open */\nand use_prims = ref(\"\") /* -use-prims ... */\nand use_runtime = ref(\"\") /* -use-runtime ... */\nand principal = ref(false) /* -principal */\nand real_paths = ref(true) /* -short-paths */\nand recursive_types = ref(false) /* -rectypes */\nand strict_sequence = ref(false) /* -strict-sequence */\nand strict_formats = ref(false) /* -strict-formats */\nand applicative_functors = ref(true) /* -no-app-funct */\nand make_runtime = ref(false) /* -make-runtime */\nand gprofile = ref(false) /* -p */\nand c_compiler = ref((None: option<string>)) /* -cc */\nand no_auto_link = ref(false) /* -noautolink */\nand dllpaths = ref((list{}: list<string>)) /* -dllpath */\nand make_package = ref(false) /* -pack */\nand for_package = ref((None: option<string>)) /* -for-pack */\nand error_size = ref(500) /* -error-size */\nand float_const_prop = ref(true) /* -no-float-const-prop */\nand transparent_modules = ref(false) /* -trans-mod */\nlet dump_source = ref(false) /* -dsource */\nlet dump_parsetree = ref(false) /* -dparsetree */\nand dump_typedtree = ref(false) /* -dtypedtree */\nand dump_rawlambda = ref(false) /* -drawlambda */\nand dump_lambda = ref(false) /* -dlambda */\nand dump_rawclambda = ref(false) /* -drawclambda */\nand dump_clambda = ref(false) /* -dclambda */\nand dump_rawflambda = ref(false) /* -drawflambda */\nand dump_flambda = ref(false) /* -dflambda */\nand dump_flambda_let = ref((None: option<int>)) /* -dflambda-let=... */\nand dump_flambda_verbose = ref(false) /* -dflambda-verbose */\nand dump_instr = ref(false) /* -dinstr */\n\nlet keep_asm_file = ref(false) /* -S */\nlet optimize_for_speed = ref(true) /* -compact */\nand opaque = ref(false) /* -opaque */\n\nand dump_cmm = ref(false) /* -dcmm */\nlet dump_selection = ref(false) /* -dsel */\nlet dump_cse = ref(false) /* -dcse */\nlet dump_live = ref(false) /* -dlive */\nlet dump_avail = ref(false) /* -davail */\nlet dump_spill = ref(false) /* -dspill */\nlet dump_split = ref(false) /* -dsplit */\nlet dump_interf = ref(false) /* -dinterf */\nlet dump_prefer = ref(false) /* -dprefer */\nlet dump_regalloc = ref(false) /* -dalloc */\nlet dump_reload = ref(false) /* -dreload */\nlet dump_scheduling = ref(false) /* -dscheduling */\nlet dump_linear = ref(false) /* -dlinear */\nlet dump_interval = ref(false) /* -dinterval */\nlet keep_startup_file = ref(false) /* -dstartup */\nlet dump_combine = ref(false) /* -dcombine */\n\nlet debug_runavail = ref(false) /* -drunavail */\n\nlet native_code = ref(false) /* set to true under ocamlopt */\n\nlet force_slash = ref(false) /* for ocamldep */\nlet clambda_checks = ref(false) /* -clambda-checks */\n\nlet flambda_invariant_checks = ref(true) /* -flambda-invariants */\n\nlet dont_write_files = ref(false) /* set to true under ocamldoc */\n\nlet std_include_flag = prefix =>\n  if no_std_include.contents {\n    \"\"\n  } else {\n    prefix ++ Filename.quote(Config.standard_library)\n  }\n\nlet std_include_dir = () =>\n  if no_std_include.contents {\n    list{}\n  } else {\n    list{Config.standard_library}\n  }\n\nlet shared = ref(false) /* -shared */\nlet dlcode = ref(true) /* not -nodynlink */\n\nlet pic_code = ref(\n  switch Config.architecture {\n  /* -fPIC */\n  | \"amd64\" => true\n  | _ => false\n  },\n)\n\nlet runtime_variant = ref(\"\") /* -runtime-variant */\n\nlet keep_docs = ref(false) /* -keep-docs */\nlet keep_locs = ref(true) /* -keep-locs */\nlet unsafe_string = if Config.safe_string {\n  ref(false)\n} else {\n  ref(!Config.default_safe_string)\n}\n/* -safe-string / -unsafe-string */\n\nlet classic_inlining = ref(false) /* -Oclassic */\nlet inlining_report = ref(false) /* -inlining-report */\n\nlet afl_instrument = ref(Config.afl_instrument) /* -afl-instrument */\nlet afl_inst_ratio = ref(100) /* -afl-inst-ratio */\n\nlet simplify_rounds = ref(None) /* -rounds */\nlet default_simplify_rounds = ref(1) /* -rounds */\nlet rounds = () =>\n  switch simplify_rounds.contents {\n  | None => default_simplify_rounds.contents\n  | Some(r) => r\n  }\n\nlet default_inline_threshold = if Config.flambda {\n  10.\n} else {\n  10. /. 8.\n}\nlet inline_toplevel_multiplier = 16\nlet default_inline_toplevel_threshold = int_of_float(\n  float(inline_toplevel_multiplier) *. default_inline_threshold,\n)\nlet default_inline_call_cost = 5\nlet default_inline_alloc_cost = 7\nlet default_inline_prim_cost = 3\nlet default_inline_branch_cost = 5\nlet default_inline_indirect_cost = 4\nlet default_inline_branch_factor = 0.1\nlet default_inline_lifting_benefit = 1300\nlet default_inline_max_unroll = 0\nlet default_inline_max_depth = 1\n\nlet unbox_specialised_args = ref(true) /* -no-unbox-specialised-args */\nlet unbox_free_vars_of_closures = ref(true)\nlet unbox_closures = ref(false) /* -unbox-closures */\nlet default_unbox_closures_factor = 10\nlet unbox_closures_factor = ref(default_unbox_closures_factor) /* -unbox-closures-factor */\nlet remove_unused_arguments = ref(false) /* -remove-unused-arguments */\n\ntype inlining_arguments = {\n  inline_call_cost: option<int>,\n  inline_alloc_cost: option<int>,\n  inline_prim_cost: option<int>,\n  inline_branch_cost: option<int>,\n  inline_indirect_cost: option<int>,\n  inline_lifting_benefit: option<int>,\n  inline_branch_factor: option<float>,\n  inline_max_depth: option<int>,\n  inline_max_unroll: option<int>,\n  inline_threshold: option<float>,\n  inline_toplevel_threshold: option<int>,\n}\n\n/* o1 is the default */\nlet o1_arguments = {\n  inline_call_cost: None,\n  inline_alloc_cost: None,\n  inline_prim_cost: None,\n  inline_branch_cost: None,\n  inline_indirect_cost: None,\n  inline_lifting_benefit: None,\n  inline_branch_factor: None,\n  inline_max_depth: None,\n  inline_max_unroll: None,\n  inline_threshold: None,\n  inline_toplevel_threshold: None,\n}\n\nlet classic_arguments = {\n  inline_call_cost: None,\n  inline_alloc_cost: None,\n  inline_prim_cost: None,\n  inline_branch_cost: None,\n  inline_indirect_cost: None,\n  inline_lifting_benefit: None,\n  inline_branch_factor: None,\n  inline_max_depth: None,\n  inline_max_unroll: None,\n  /* [inline_threshold] matches the current compiler's default.\n     Note that this particular fraction can be expressed exactly in\n     floating point. */\n  inline_threshold: Some(10. /. 8.),\n  /* [inline_toplevel_threshold] is not used in classic mode. */\n  inline_toplevel_threshold: Some(1),\n}\n\nlet o2_arguments = {\n  inline_call_cost: Some(2 * default_inline_call_cost),\n  inline_alloc_cost: Some(2 * default_inline_alloc_cost),\n  inline_prim_cost: Some(2 * default_inline_prim_cost),\n  inline_branch_cost: Some(2 * default_inline_branch_cost),\n  inline_indirect_cost: Some(2 * default_inline_indirect_cost),\n  inline_lifting_benefit: None,\n  inline_branch_factor: None,\n  inline_max_depth: Some(2),\n  inline_max_unroll: None,\n  inline_threshold: Some(25.),\n  inline_toplevel_threshold: Some(25 * inline_toplevel_multiplier),\n}\n\nlet o3_arguments = {\n  inline_call_cost: Some(3 * default_inline_call_cost),\n  inline_alloc_cost: Some(3 * default_inline_alloc_cost),\n  inline_prim_cost: Some(3 * default_inline_prim_cost),\n  inline_branch_cost: Some(3 * default_inline_branch_cost),\n  inline_indirect_cost: Some(3 * default_inline_indirect_cost),\n  inline_lifting_benefit: None,\n  inline_branch_factor: Some(0.),\n  inline_max_depth: Some(3),\n  inline_max_unroll: Some(1),\n  inline_threshold: Some(50.),\n  inline_toplevel_threshold: Some(50 * inline_toplevel_multiplier),\n}\n\nlet all_passes: ref<list<string>> = ref(list{})\nlet dumped_passes_list = ref(list{})\nlet dumped_pass = s => {\n  assert List.mem(s, all_passes.contents)\n  List.mem(s, dumped_passes_list.contents)\n}\n\nlet set_dumped_pass = (s, enabled) =>\n  if List.mem(s, all_passes.contents) {\n    let passes_without_s = List.filter(\\\"<>\"(s), dumped_passes_list.contents)\n    let dumped_passes = if enabled {\n      list{s, ...passes_without_s}\n    } else {\n      passes_without_s\n    }\n\n    dumped_passes_list := dumped_passes\n  }\n\nlet parse_color_setting = x =>\n  switch x {\n  | \"auto\" => Some(Misc.Color.Auto)\n  | \"always\" => Some(Misc.Color.Always)\n  | \"never\" => Some(Misc.Color.Never)\n  | _ => None\n  }\nlet color: ref<option<string>> = ref(None) /* -color */\n\nlet unboxed_types = ref(false)\n\nlet arg_spec = ref(list{})\nlet arg_names : ref<Misc.StringMap.t<int>> = ref(Misc.StringMap.empty)\n\nlet reset_arguments = () => {\n  arg_spec := list{}\n  arg_names := Misc.StringMap.empty\n}\n\nlet add_arguments = (loc, args) => List.iter(x =>\n    switch x {\n    | (arg_name, _, _) as arg =>\n      try {\n        let loc2 = Misc.StringMap.find(arg_name, arg_names.contents)\n        Printf.eprintf(\"Warning: plugin argument %s is already defined:\\n\", arg_name)\n        Printf.eprintf(\"   First definition: %s\\n\", loc2)\n        Printf.eprintf(\"   New definition: %s\\n\", loc)\n      } catch {\n      | Not_found =>\n        arg_spec := \\\"@\"(arg_spec.contents, list{arg})\n        arg_names := Misc.StringMap.add(arg_name, loc, arg_names.contents)\n      }\n    }\n  , args)\n\nlet print_arguments = usage => Arg.usage(arg_spec.contents, usage)\n\n/* This function is almost the same as [Arg.parse_expand], except\n   that [Arg.parse_expand] could not be used because it does not take a\n   reference for [arg_spec].*/\n@raises(exit)\nlet parse_arguments = (f, msg) =>\n  try {\n    let argv = ref(Sys.argv)\n    let current = ref(Arg.current.contents)\n    Arg.parse_and_expand_argv_dynamic(current, argv, arg_spec, f, msg)\n  } catch {\n  | Arg.Bad(msg) =>\n    Printf.eprintf(\"%s\", msg)\n    exit(2)\n  | Arg.Help(msg) =>\n    Printf.printf(\"%s\", msg)\n    exit(0)\n  }\n\n"
  },
  {
    "path": "analysis/examples/larger-project/src/config.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as Sys from \"rescript/lib/es6/sys.js\";\nimport * as Curry from \"rescript/lib/es6/curry.js\";\nimport * as Printf from \"./printf.js\";\nimport * as $$String from \"rescript/lib/es6/string.js\";\nimport * as Caml_sys from \"rescript/lib/es6/caml_sys.js\";\nimport * as Caml_string from \"rescript/lib/es6/caml_string.js\";\nimport * as Caml_js_exceptions from \"rescript/lib/es6/caml_js_exceptions.js\";\n\nvar standard_library_default = \"/usr/local/lib/ocaml\";\n\nvar standard_library;\n\ntry {\n  standard_library = Caml_sys.caml_sys_getenv(\"OCAMLLIB\");\n}\ncatch (raw_exn){\n  var exn = Caml_js_exceptions.internalToOCamlException(raw_exn);\n  if (exn.RE_EXN_ID === \"Not_found\") {\n    try {\n      standard_library = Caml_sys.caml_sys_getenv(\"CAMLLIB\");\n    }\n    catch (raw_exn$1){\n      var exn$1 = Caml_js_exceptions.internalToOCamlException(raw_exn$1);\n      if (exn$1.RE_EXN_ID === \"Not_found\") {\n        standard_library = standard_library_default;\n      } else {\n        throw exn$1;\n      }\n    }\n  } else {\n    throw exn;\n  }\n}\n\nvar standard_runtime = \"/usr/local/bin/ocamlrun\";\n\nvar ccomp_type = \"cc\";\n\nvar c_compiler = \"gcc\";\n\nvar ocamlc_cflags = \"-O2 -fno-strict-aliasing -fwrapv \";\n\nvar ocamlc_cppflags = \"-D_FILE_OFFSET_BITS=64 -D_REENTRANT\";\n\nvar ocamlopt_cflags = \"-O2 -fno-strict-aliasing -fwrapv\";\n\nvar ocamlopt_cppflags = \"-D_FILE_OFFSET_BITS=64 -D_REENTRANT\";\n\nvar bytecomp_c_libraries = \"-lpthread                  \";\n\nvar bytecomp_c_compiler = \"gcc -O2 -fno-strict-aliasing -fwrapv  -D_FILE_OFFSET_BITS=64 -D_REENTRANT\";\n\nvar native_c_compiler = \"gcc -O2 -fno-strict-aliasing -fwrapv -D_FILE_OFFSET_BITS=64 -D_REENTRANT\";\n\nvar native_c_libraries = \"\";\n\nvar native_pack_linker = \"ld -r -arch x86_64 -o \";\n\nvar ranlib = \"ranlib\";\n\nvar cc_profile = \"-pg\";\n\nvar match;\n\nif (Sys.os_type === \"Win32\") {\n  try {\n    var flexlink = Caml_sys.caml_sys_getenv(\"OCAML_FLEXLINK\");\n    var f = function (i) {\n      var c = Caml_string.get(flexlink, i);\n      if (c === /* '/' */47) {\n        return /* '\\\\' */92;\n      } else {\n        return c;\n      }\n    };\n    var flexlink$1 = $$String.init(flexlink.length, f) + \" \";\n    match = [\n      flexlink$1,\n      flexlink$1 + \" -exe\",\n      flexlink$1 + \" -maindll\"\n    ];\n  }\n  catch (raw_exn$2){\n    var exn$2 = Caml_js_exceptions.internalToOCamlException(raw_exn$2);\n    if (exn$2.RE_EXN_ID === \"Not_found\") {\n      match = [\n        \"gcc -shared -flat_namespace -undefined suppress                    -Wl,-no_compact_unwind\",\n        \"gcc -O2 -fno-strict-aliasing -fwrapv -Wall -Werror -D_FILE_OFFSET_BITS=64 -D_REENTRANT -DCAML_NAME_SPACE   -Wl,-no_compact_unwind\",\n        \"gcc -shared -flat_namespace -undefined suppress                    -Wl,-no_compact_unwind\"\n      ];\n    } else {\n      throw exn$2;\n    }\n  }\n} else {\n  match = [\n    \"gcc -shared -flat_namespace -undefined suppress                    -Wl,-no_compact_unwind\",\n    \"gcc -O2 -fno-strict-aliasing -fwrapv -Wall -Werror -D_FILE_OFFSET_BITS=64 -D_REENTRANT -DCAML_NAME_SPACE   -Wl,-no_compact_unwind\",\n    \"gcc -shared -flat_namespace -undefined suppress                    -Wl,-no_compact_unwind\"\n  ];\n}\n\nvar exec_magic_number = \"Caml1999X011\";\n\nvar cmi_magic_number = \"Caml1999I022\";\n\nvar cmo_magic_number = \"Caml1999O022\";\n\nvar cma_magic_number = \"Caml1999A022\";\n\nvar cmx_magic_number = \"Caml1999Y022\";\n\nvar cmxa_magic_number = \"Caml1999Z022\";\n\nvar ast_impl_magic_number = \"Caml1999M022\";\n\nvar ast_intf_magic_number = \"Caml1999N022\";\n\nvar cmxs_magic_number = \"Caml1999D022\";\n\nvar cmt_magic_number = \"Caml1999T022\";\n\nvar load_path = {\n  contents: /* [] */0\n};\n\nvar interface_suffix = {\n  contents: \".mli\"\n};\n\nvar architecture = \"amd64\";\n\nvar model = \"default\";\n\nvar system = \"macosx\";\n\nvar asm = \"clang -arch x86_64 -Wno-trigraphs -c\";\n\nvar ext_exe = \"\";\n\nvar ext_obj = \".o\";\n\nvar ext_asm = \".s\";\n\nvar ext_lib = \".a\";\n\nvar ext_dll = \".so\";\n\nvar host = \"x86_64-apple-darwin21.4.0\";\n\nvar target = \"x86_64-apple-darwin21.4.0\";\n\nvar default_executable_name;\n\nswitch (Sys.os_type) {\n  case \"Unix\" :\n      default_executable_name = \"a.out\";\n      break;\n  case \"Cygwin\" :\n  case \"Win32\" :\n      default_executable_name = \"camlprog.exe\";\n      break;\n  default:\n    default_executable_name = \"camlprog\";\n}\n\nfunction print_config(oc) {\n  var p = function (name, valu) {\n    return Curry._3(Printf.fprintf(oc), \"%s: %s\\n\", name, valu);\n  };\n  var p_int = function (name, valu) {\n    return Curry._3(Printf.fprintf(oc), \"%s: %d\\n\", name, valu);\n  };\n  var p_bool = function (name, valu) {\n    return Curry._3(Printf.fprintf(oc), \"%s: %B\\n\", name, valu);\n  };\n  p(\"version\", Sys.ocaml_version);\n  p(\"standard_library_default\", standard_library_default);\n  p(\"standard_library\", standard_library);\n  p(\"standard_runtime\", standard_runtime);\n  p(\"ccomp_type\", ccomp_type);\n  p(\"c_compiler\", c_compiler);\n  p(\"ocamlc_cflags\", ocamlc_cflags);\n  p(\"ocamlc_cppflags\", ocamlc_cppflags);\n  p(\"ocamlopt_cflags\", ocamlopt_cflags);\n  p(\"ocamlopt_cppflags\", ocamlopt_cppflags);\n  p(\"bytecomp_c_compiler\", bytecomp_c_compiler);\n  p(\"native_c_compiler\", native_c_compiler);\n  p(\"bytecomp_c_libraries\", bytecomp_c_libraries);\n  p(\"native_c_libraries\", native_c_libraries);\n  p(\"native_pack_linker\", native_pack_linker);\n  p(\"ranlib\", ranlib);\n  p(\"cc_profile\", cc_profile);\n  p(\"architecture\", architecture);\n  p(\"model\", model);\n  p_int(\"int_size\", Sys.int_size);\n  p_int(\"word_size\", Sys.word_size);\n  p(\"system\", system);\n  p(\"asm\", asm);\n  p_bool(\"asm_cfi_supported\", true);\n  p_bool(\"with_frame_pointers\", false);\n  p(\"ext_exe\", ext_exe);\n  p(\"ext_obj\", ext_obj);\n  p(\"ext_asm\", ext_asm);\n  p(\"ext_lib\", ext_lib);\n  p(\"ext_dll\", ext_dll);\n  p(\"os_type\", Sys.os_type);\n  p(\"default_executable_name\", default_executable_name);\n  p_bool(\"systhread_supported\", true);\n  p(\"host\", host);\n  p(\"target\", target);\n  p_bool(\"profiling\", true);\n  p_bool(\"flambda\", false);\n  p_bool(\"spacetime\", false);\n  p_bool(\"safe_string\", false);\n  p_bool(\"default_safe_string\", true);\n  p_bool(\"flat_float_array\", true);\n  p_bool(\"afl_instrument\", false);\n  p_bool(\"windows_unicode\", false);\n  p(\"exec_magic_number\", exec_magic_number);\n  p(\"cmi_magic_number\", cmi_magic_number);\n  p(\"cmo_magic_number\", cmo_magic_number);\n  p(\"cma_magic_number\", cma_magic_number);\n  p(\"cmx_magic_number\", cmx_magic_number);\n  p(\"cmxa_magic_number\", cmxa_magic_number);\n  p(\"ast_impl_magic_number\", ast_impl_magic_number);\n  p(\"ast_intf_magic_number\", ast_intf_magic_number);\n  p(\"cmxs_magic_number\", cmxs_magic_number);\n  return p(\"cmt_magic_number\", cmt_magic_number);\n}\n\nvar version = Sys.ocaml_version;\n\nvar c_output_obj = \"-o \";\n\nvar ar = \"ar\";\n\nvar mkdll = match[0];\n\nvar mkexe = match[1];\n\nvar mkmaindll = match[2];\n\nvar profiling = true;\n\nvar flambda = false;\n\nvar safe_string = false;\n\nvar default_safe_string = true;\n\nvar windows_unicode = false;\n\nvar flat_float_array = true;\n\nvar afl_instrument = false;\n\nvar max_tag = 245;\n\nvar lazy_tag = 246;\n\nvar max_young_wosize = 256;\n\nvar stack_threshold = 256;\n\nvar stack_safety_margin = 60;\n\nvar asm_cfi_supported = true;\n\nvar with_frame_pointers = false;\n\nvar spacetime = false;\n\nvar enable_call_counts = true;\n\nvar libunwind_available = false;\n\nvar libunwind_link_flags = \"\";\n\nvar profinfo = false;\n\nvar profinfo_width = 0;\n\nvar systhread_supported = true;\n\nvar flexdll_dirs = /* [] */0;\n\nexport {\n  version ,\n  standard_library_default ,\n  standard_library ,\n  standard_runtime ,\n  ccomp_type ,\n  c_compiler ,\n  c_output_obj ,\n  ocamlc_cflags ,\n  ocamlc_cppflags ,\n  ocamlopt_cflags ,\n  ocamlopt_cppflags ,\n  bytecomp_c_libraries ,\n  bytecomp_c_compiler ,\n  native_c_compiler ,\n  native_c_libraries ,\n  native_pack_linker ,\n  ranlib ,\n  ar ,\n  cc_profile ,\n  mkdll ,\n  mkexe ,\n  mkmaindll ,\n  profiling ,\n  flambda ,\n  safe_string ,\n  default_safe_string ,\n  windows_unicode ,\n  flat_float_array ,\n  afl_instrument ,\n  exec_magic_number ,\n  cmi_magic_number ,\n  cmo_magic_number ,\n  cma_magic_number ,\n  cmx_magic_number ,\n  cmxa_magic_number ,\n  ast_impl_magic_number ,\n  ast_intf_magic_number ,\n  cmxs_magic_number ,\n  cmt_magic_number ,\n  load_path ,\n  interface_suffix ,\n  max_tag ,\n  lazy_tag ,\n  max_young_wosize ,\n  stack_threshold ,\n  stack_safety_margin ,\n  architecture ,\n  model ,\n  system ,\n  asm ,\n  asm_cfi_supported ,\n  with_frame_pointers ,\n  spacetime ,\n  enable_call_counts ,\n  libunwind_available ,\n  libunwind_link_flags ,\n  profinfo ,\n  profinfo_width ,\n  ext_exe ,\n  ext_obj ,\n  ext_asm ,\n  ext_lib ,\n  ext_dll ,\n  host ,\n  target ,\n  default_executable_name ,\n  systhread_supported ,\n  flexdll_dirs ,\n  print_config ,\n  \n}\n/* standard_library Not a pure module */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/config.res",
    "content": "/* ************************************************************************ */\n/*  */\n/* OCaml */\n/*  */\n/* Xavier Leroy, projet Cristal, INRIA Rocquencourt */\n/*  */\n/* Copyright 1996 Institut National de Recherche en Informatique et */\n/* en Automatique. */\n/*  */\n/* All rights reserved.  This file is distributed under the terms of */\n/* the GNU Lesser General Public License version 2.1, with the */\n/* special exception on linking described in the file LICENSE. */\n/*  */\n/* ************************************************************************ */\n\n/* The main OCaml version string has moved to ../VERSION */\nlet version = Sys.ocaml_version\n\nlet standard_library_default = \"/usr/local/lib/ocaml\"\n\nlet standard_library = try Sys.getenv(\"OCAMLLIB\") catch {\n| Not_found =>\n  try Sys.getenv(\"CAMLLIB\") catch {\n  | Not_found => standard_library_default\n  }\n}\n\nlet standard_runtime = \"/usr/local/bin/ocamlrun\"\nlet ccomp_type = \"cc\"\nlet c_compiler = \"gcc\"\nlet c_output_obj = \"-o \"\nlet ocamlc_cflags = \"-O2 -fno-strict-aliasing -fwrapv \"\nlet ocamlc_cppflags = \"-D_FILE_OFFSET_BITS=64 -D_REENTRANT\"\nlet ocamlopt_cflags = \"-O2 -fno-strict-aliasing -fwrapv\"\nlet ocamlopt_cppflags = \"-D_FILE_OFFSET_BITS=64 -D_REENTRANT\"\nlet bytecomp_c_libraries = \"-lpthread                  \"\n/* bytecomp_c_compiler and native_c_compiler have been supported for a\n   long time and are retained for backwards compatibility.\n   For programs that don't need compatibility with older OCaml releases\n   the recommended approach is to use the constituent variables\n   c_compiler, ocamlc_cflags, ocamlc_cppflags etc., directly.\n*/\nlet bytecomp_c_compiler = c_compiler ++ (\" \" ++ (ocamlc_cflags ++ (\" \" ++ ocamlc_cppflags)))\nlet native_c_compiler = c_compiler ++ (\" \" ++ (ocamlopt_cflags ++ (\" \" ++ ocamlopt_cppflags)))\nlet native_c_libraries = \"\"\nlet native_pack_linker = \"ld -r -arch x86_64 -o\\ \"\nlet ranlib = \"ranlib\"\nlet ar = \"ar\"\nlet cc_profile = \"-pg\"\nlet (mkdll, mkexe, mkmaindll) = /* @@DRA Cygwin - but only if shared libraries are enabled, which we\n should be able to detect? */\nif Sys.os_type == \"Win32\" {\n  try {\n    @raises(Invalid_argument)\n    let flexlink = {\n      let flexlink = Sys.getenv(\"OCAML_FLEXLINK\")\n\n      @raises(Invalid_argument)\n      let f = i => {\n        let c = String.get(flexlink, i)\n        if c == '/' {\n          '\\\\'\n        } else {\n          c\n        }\n      }\n      String.init(String.length(flexlink), f) ++ \" \"\n    }\n    (flexlink, flexlink ++ \" -exe\", flexlink ++ \" -maindll\")\n  } catch {\n  | Not_found => (\n      \"gcc -shared -flat_namespace -undefined suppress                    -Wl,-no_compact_unwind\",\n      \"gcc -O2 -fno-strict-aliasing -fwrapv -Wall -Werror -D_FILE_OFFSET_BITS=64 -D_REENTRANT -DCAML_NAME_SPACE   -Wl,-no_compact_unwind\",\n      \"gcc -shared -flat_namespace -undefined suppress                    -Wl,-no_compact_unwind\",\n    )\n  }\n} else {\n  (\n    \"gcc -shared -flat_namespace -undefined suppress                    -Wl,-no_compact_unwind\",\n    \"gcc -O2 -fno-strict-aliasing -fwrapv -Wall -Werror -D_FILE_OFFSET_BITS=64 -D_REENTRANT -DCAML_NAME_SPACE   -Wl,-no_compact_unwind\",\n    \"gcc -shared -flat_namespace -undefined suppress                    -Wl,-no_compact_unwind\",\n  )\n}\n\nlet profiling = true\nlet flambda = false\nlet safe_string = false\nlet default_safe_string = true\nlet windows_unicode = 0 !== 0\n\nlet flat_float_array = true\n\nlet afl_instrument = false\n\nlet exec_magic_number = \"Caml1999X011\"\nand cmi_magic_number = \"Caml1999I022\"\nand cmo_magic_number = \"Caml1999O022\"\nand cma_magic_number = \"Caml1999A022\"\nand cmx_magic_number = if flambda {\n  \"Caml1999y022\"\n} else {\n  \"Caml1999Y022\"\n}\nand cmxa_magic_number = if flambda {\n  \"Caml1999z022\"\n} else {\n  \"Caml1999Z022\"\n}\nand ast_impl_magic_number = \"Caml1999M022\"\nand ast_intf_magic_number = \"Caml1999N022\"\nand cmxs_magic_number = \"Caml1999D022\"\n/* cmxs_magic_number is duplicated in otherlibs/dynlink/natdynlink.ml */\nand cmt_magic_number = \"Caml1999T022\"\n\nlet load_path = ref((list{}: list<string>))\n\nlet interface_suffix = ref(\".mli\")\n\nlet max_tag = 245\n/* This is normally the same as in obj.ml, but we have to define it\n   separately because it can differ when we're in the middle of a\n   bootstrapping phase. */\nlet lazy_tag = 246\n\nlet max_young_wosize = 256\nlet stack_threshold = 256 /* see byterun/config.h */\nlet stack_safety_margin = 60\n\nlet architecture = \"amd64\"\nlet model = \"default\"\nlet system = \"macosx\"\n\nlet asm = \"clang -arch x86_64 -Wno-trigraphs -c\"\nlet asm_cfi_supported = true\nlet with_frame_pointers = false\nlet spacetime = false\nlet enable_call_counts = true\nlet libunwind_available = false\nlet libunwind_link_flags = \"\"\nlet profinfo = false\nlet profinfo_width = 0\n\nlet ext_exe = \"\"\nlet ext_obj = \".o\"\nlet ext_asm = \".s\"\nlet ext_lib = \".a\"\nlet ext_dll = \".so\"\n\nlet host = \"x86_64-apple-darwin21.4.0\"\nlet target = \"x86_64-apple-darwin21.4.0\"\n\nlet default_executable_name = switch Sys.os_type {\n| \"Unix\" => \"a.out\"\n| \"Win32\" | \"Cygwin\" => \"camlprog.exe\"\n| _ => \"camlprog\"\n}\n\nlet systhread_supported = true\n\nlet flexdll_dirs = list{}\n\nlet print_config = oc => {\n  let p = (name, valu) => Printf.fprintf(oc, \"%s: %s\\n\", name, valu)\n  let p_int = (name, valu) => Printf.fprintf(oc, \"%s: %d\\n\", name, valu)\n  let p_bool = (name, valu) => Printf.fprintf(oc, \"%s: %B\\n\", name, valu)\n  p(\"version\", version)\n  p(\"standard_library_default\", standard_library_default)\n  p(\"standard_library\", standard_library)\n  p(\"standard_runtime\", standard_runtime)\n  p(\"ccomp_type\", ccomp_type)\n  p(\"c_compiler\", c_compiler)\n  p(\"ocamlc_cflags\", ocamlc_cflags)\n  p(\"ocamlc_cppflags\", ocamlc_cppflags)\n  p(\"ocamlopt_cflags\", ocamlopt_cflags)\n  p(\"ocamlopt_cppflags\", ocamlopt_cppflags)\n  p(\"bytecomp_c_compiler\", bytecomp_c_compiler)\n  p(\"native_c_compiler\", native_c_compiler)\n  p(\"bytecomp_c_libraries\", bytecomp_c_libraries)\n  p(\"native_c_libraries\", native_c_libraries)\n  p(\"native_pack_linker\", native_pack_linker)\n  p(\"ranlib\", ranlib)\n  p(\"cc_profile\", cc_profile)\n  p(\"architecture\", architecture)\n  p(\"model\", model)\n  p_int(\"int_size\", Sys.int_size)\n  p_int(\"word_size\", Sys.word_size)\n  p(\"system\", system)\n  p(\"asm\", asm)\n  p_bool(\"asm_cfi_supported\", asm_cfi_supported)\n  p_bool(\"with_frame_pointers\", with_frame_pointers)\n  p(\"ext_exe\", ext_exe)\n  p(\"ext_obj\", ext_obj)\n  p(\"ext_asm\", ext_asm)\n  p(\"ext_lib\", ext_lib)\n  p(\"ext_dll\", ext_dll)\n  p(\"os_type\", Sys.os_type)\n  p(\"default_executable_name\", default_executable_name)\n  p_bool(\"systhread_supported\", systhread_supported)\n  p(\"host\", host)\n  p(\"target\", target)\n  p_bool(\"profiling\", profiling)\n  p_bool(\"flambda\", flambda)\n  p_bool(\"spacetime\", spacetime)\n  p_bool(\"safe_string\", safe_string)\n  p_bool(\"default_safe_string\", default_safe_string)\n  p_bool(\"flat_float_array\", flat_float_array)\n  p_bool(\"afl_instrument\", afl_instrument)\n  p_bool(\"windows_unicode\", windows_unicode)\n\n  /* print the magic number */\n  p(\"exec_magic_number\", exec_magic_number)\n  p(\"cmi_magic_number\", cmi_magic_number)\n  p(\"cmo_magic_number\", cmo_magic_number)\n  p(\"cma_magic_number\", cma_magic_number)\n  p(\"cmx_magic_number\", cmx_magic_number)\n  p(\"cmxa_magic_number\", cmxa_magic_number)\n  p(\"ast_impl_magic_number\", ast_impl_magic_number)\n  p(\"ast_intf_magic_number\", ast_intf_magic_number)\n  p(\"cmxs_magic_number\", cmxs_magic_number)\n  p(\"cmt_magic_number\", cmt_magic_number)\n\n}\n"
  },
  {
    "path": "analysis/examples/larger-project/src/exception/Arr.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as Belt_Array from \"rescript/lib/es6/belt_Array.js\";\n\nfunction ff(a) {\n  Belt_Array.get(a, 3);\n  return 11;\n}\n\nvar MM = {\n  ff: ff\n};\n\nvar B;\n\nvar $$Array;\n\nexport {\n  B ,\n  $$Array ,\n  MM ,\n  \n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/exception/Arr.res",
    "content": "module B = Belt\nmodule Array = B.Array\n\nmodule MM = {\n  let ff = a =>\n    switch a[3] {\n    | _ => 11\n    }\n}\n"
  },
  {
    "path": "analysis/examples/larger-project/src/exception/BeltTest.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as Belt_Map from \"rescript/lib/es6/belt_Map.js\";\nimport * as Belt_List from \"rescript/lib/es6/belt_List.js\";\nimport * as Belt_MapInt from \"rescript/lib/es6/belt_MapInt.js\";\nimport * as Belt_MapString from \"rescript/lib/es6/belt_MapString.js\";\n\nvar lstHead1 = Belt_List.headExn;\n\nvar lstHead2 = Belt_List.headExn;\n\nvar mapGetExn1 = Belt_MapInt.getExn;\n\nvar mapGetExn2 = Belt_MapInt.getExn;\n\nvar mapGetExn3 = Belt_MapInt.getExn;\n\nvar mapGetExn4 = Belt_MapString.getExn;\n\nvar mapGetExn5 = Belt_MapString.getExn;\n\nvar mapGetExn6 = Belt_MapString.getExn;\n\nvar mapGetExn7 = Belt_Map.getExn;\n\nvar mapGetExn8 = Belt_Map.getExn;\n\nexport {\n  lstHead1 ,\n  lstHead2 ,\n  mapGetExn1 ,\n  mapGetExn2 ,\n  mapGetExn3 ,\n  mapGetExn4 ,\n  mapGetExn5 ,\n  mapGetExn6 ,\n  mapGetExn7 ,\n  mapGetExn8 ,\n  \n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/exception/BeltTest.res",
    "content": "open Belt.List\n\n@raises(Not_found)\nlet lstHead1 = l => l->Belt.List.headExn\n\n@raises(Not_found)\nlet lstHead2 = l => l->Belt_List.headExn\n\n@raises(Not_found)\nlet mapGetExn1 = (s, k) => s->Belt.Map.Int.getExn(k)\n\n@raises(Not_found)\nlet mapGetExn2 = (s, k) => s->Belt_Map.Int.getExn(k)\n\n@raises(Not_found)\nlet mapGetExn3 = (s, k) => s->Belt_MapInt.getExn(k)\n\n@raises(Not_found)\nlet mapGetExn4 = (s, k) => s->Belt.Map.String.getExn(k)\n\n@raises(Not_found)\nlet mapGetExn5 = (s, k) => s->Belt_Map.String.getExn(k)\n\n@raises(Not_found)\nlet mapGetExn6 = (s, k) => s->Belt_MapString.getExn(k)\n\n@raises(Not_found)\nlet mapGetExn7 = (s, k) => s->Belt.Map.getExn(k)\n\n@raises(Not_found)\nlet mapGetExn8 = (s, k) => s->Belt_Map.getExn(k)\n"
  },
  {
    "path": "analysis/examples/larger-project/src/exception/BsJson.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as Json_decode from \"@glennsl/bs-json/src/Json_decode.js\";\n\nvar testBsJson = Json_decode.string;\n\nvar testBsJson2 = Json_decode.string;\n\nexport {\n  testBsJson ,\n  testBsJson2 ,\n  \n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/exception/BsJson.res",
    "content": "@raise(DecodeError)\nlet testBsJson = x => Json_decode.string(x)\n\n@raise(DecodeError)\nlet testBsJson2 = x => Json.Decode.string(x)\n"
  },
  {
    "path": "analysis/examples/larger-project/src/exception/Exn.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n/* This output is empty. Its source's type definitions, externals and/or unused code got optimized away. */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/exception/Exn.res",
    "content": ""
  },
  {
    "path": "analysis/examples/larger-project/src/exception/ExnA.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n/* This output is empty. Its source's type definitions, externals and/or unused code got optimized away. */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/exception/ExnA.res",
    "content": ""
  },
  {
    "path": "analysis/examples/larger-project/src/exception/ExnB.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nfunction foo(param) {\n  throw {\n        RE_EXN_ID: \"Not_found\",\n        Error: new Error()\n      };\n}\n\nexport {\n  foo ,\n  \n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/exception/ExnB.res",
    "content": "@raises(Not_found)\nlet foo = () => raise(Not_found)\n"
  },
  {
    "path": "analysis/examples/larger-project/src/exception/ExportWithRename.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nfunction ExportWithRename(Props) {\n  return Props.s;\n}\n\nvar make = ExportWithRename;\n\nexport {\n  make ,\n  \n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/exception/ExportWithRename.res",
    "content": "@genType(\"ExportWithRename\") @react.component\nlet make = (~s) => React.string(s)\n"
  },
  {
    "path": "analysis/examples/larger-project/src/exception/InnerModules.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as Pervasives from \"rescript/lib/es6/pervasives.js\";\n\nvar wrapExitTop = Pervasives.exit;\n\nvar wrapExitM1 = Pervasives.exit;\n\nvar callLocally = Pervasives.exit;\n\nvar callTop = Pervasives.exit;\n\nvar wrapExitM2 = Pervasives.exit;\n\nvar callM1 = Pervasives.exit;\n\nvar callTop$1 = Pervasives.exit;\n\nvar M2 = {\n  wrapExitM2: wrapExitM2,\n  callM1: callM1,\n  callTop: callTop$1\n};\n\nvar M1 = {\n  wrapExitM1: wrapExitM1,\n  callLocally: callLocally,\n  callTop: callTop,\n  M2: M2\n};\n\nvar callM1$1 = Pervasives.exit;\n\nexport {\n  wrapExitTop ,\n  M1 ,\n  callM1$1 as callM1,\n  \n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/exception/InnerModules.res",
    "content": "@raises(exit)\nlet wrapExitTop = x => exit(x)\n\nmodule M1 = {\n  @raises(exit)\n  let wrapExitM1 = x => exit(x)\n\n  @raises(exit)\n  let callLocally = x => wrapExitM1(x)\n\n  @raises(exit)\n  let callTop = x => wrapExitTop(x)\n\n  module M2 = {\n    @raises(exit)\n    let wrapExitM2 = x => exit(x)\n\n    @raises(exit)\n    let callM1 = x => wrapExitM1(x)\n\n    @raises(exit)\n    let callTop = x => wrapExitTop(x)\n  }\n}\n\n@raises(exit)\nlet callM1 = x => M1.wrapExitM1(x)\n"
  },
  {
    "path": "analysis/examples/larger-project/src/exception/TestInnerModules.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as Curry from \"rescript/lib/es6/curry.js\";\nimport * as InnerModules from \"./InnerModules.js\";\n\nvar testTop = InnerModules.wrapExitTop;\n\nfunction testM1(x) {\n  return InnerModules.M1.wrapExitM1(x);\n}\n\nfunction testM2(x) {\n  return Curry._1(InnerModules.M1.M2.wrapExitM2, x);\n}\n\nexport {\n  testTop ,\n  testM1 ,\n  testM2 ,\n  \n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/exception/TestInnerModules.res",
    "content": "@raises(exit)\nlet testTop = x => InnerModules.wrapExitTop(x)\n\n@raises(exit)\nlet testM1 = x => InnerModules.M1.wrapExitM1(x)\n\n@raises(exit)\nlet testM2 = x => InnerModules.M1.M2.wrapExitM2(x)\n"
  },
  {
    "path": "analysis/examples/larger-project/src/exception/TestYojson.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as Curry from \"rescript/lib/es6/curry.js\";\nimport * as Yojson from \"./Yojson.js\";\nimport * as Caml_obj from \"rescript/lib/es6/caml_obj.js\";\nimport * as Caml_js_exceptions from \"rescript/lib/es6/caml_js_exceptions.js\";\n\nfunction foo(x) {\n  return Yojson.Basic.from_string(x);\n}\n\nfunction bar(str, json) {\n  try {\n    return Curry._2(Yojson.Basic.Util.member, str, json);\n  }\n  catch (raw_exn){\n    var exn = Caml_js_exceptions.internalToOCamlException(raw_exn);\n    if (exn.RE_EXN_ID === Yojson.Basic.Util.Type_error) {\n      if (exn._1 === \"a\") {\n        if (Caml_obj.caml_equal(exn._2, json)) {\n          return json;\n        }\n        throw exn;\n      }\n      throw exn;\n    }\n    throw exn;\n  }\n}\n\nfunction toString(x) {\n  return Curry._1(Yojson.Basic.Util.to_string, x);\n}\n\nfunction toInt(x) {\n  return Curry._1(Yojson.Basic.Util.to_int, x);\n}\n\nexport {\n  foo ,\n  bar ,\n  toString ,\n  toInt ,\n  \n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/exception/TestYojson.res",
    "content": "@raises(Yojson.Json_error)\nlet foo = x => Yojson.Basic.from_string(x)\n\nlet bar = (str, json) =>\n  switch {\n    open Yojson.Basic.Util\n    json |> member(str)\n  } {\n  | j => j\n  | exception Yojson.Basic.Util.Type_error(\"a\", d) if d == json => json\n  }\n\n@raises(Yojson.Basic.Util.Type_error)\nlet toString = x => Yojson.Basic.Util.to_string(x)\n\n@raises(Yojson.Basic.Util.Type_error)\nlet toInt = x => Yojson.Basic.Util.to_int(x)\n"
  },
  {
    "path": "analysis/examples/larger-project/src/exception/Yojson.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as Caml_exceptions from \"rescript/lib/es6/caml_exceptions.js\";\n\nvar Json_error = /* @__PURE__ */Caml_exceptions.create(\"Yojson.Json_error\");\n\nfunction from_string(param) {\n  throw {\n        RE_EXN_ID: Json_error,\n        _1: \"Basic.from_string\",\n        Error: new Error()\n      };\n}\n\nvar Type_error = /* @__PURE__ */Caml_exceptions.create(\"Yojson.Basic.Util.Type_error\");\n\nfunction member(_s, j) {\n  throw {\n        RE_EXN_ID: Type_error,\n        _1: \"Basic.Util.member\",\n        _2: j,\n        Error: new Error()\n      };\n}\n\nfunction to_int(param) {\n  return 34;\n}\n\nfunction to_string(param) {\n  return \"\";\n}\n\nvar Util = {\n  Type_error: Type_error,\n  member: member,\n  to_int: to_int,\n  to_string: to_string\n};\n\nvar Basic = {\n  from_string: from_string,\n  Util: Util\n};\n\nexport {\n  Json_error ,\n  Basic ,\n  \n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/exception/Yojson.res",
    "content": "exception Json_error(string)\n\nmodule Basic = {\n  type t\n\n  @raises(Json_error)\n  let from_string: string => t = _ => raise(Json_error(\"Basic.from_string\"))\n\n  module Util = {\n    exception Type_error(string, t)\n\n    @raises(Type_error)\n    let member: (string, t) => t = (_s, j) => raise(Type_error(\"Basic.Util.member\", j))\n\n    let to_int: t => int = _ => 34\n\n    let to_string: t => string = _ => \"\"\n  }\n}\n"
  },
  {
    "path": "analysis/examples/larger-project/src/format.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nthrow {\n      RE_EXN_ID: \"Assert_failure\",\n      _1: [\n        \"format.res\",\n        3,\n        20\n      ],\n      Error: new Error()\n    };\n\nexport {\n  std_formatter ,\n  err_formatter ,\n  str_formatter ,\n  fprintf ,\n  sprintf ,\n  kasprintf ,\n  asprintf ,\n  kfprintf ,\n  set_mark_tags ,\n  formatter_of_buffer ,\n  pp_print_flush ,\n  pp_print_as ,\n  \n}\n/* std_formatter Not a pure module */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/format.res",
    "content": "type formatter\n\nlet std_formatter = assert false\nlet err_formatter = assert false\nlet str_formatter = assert false\n\nlet fprintf = _ => assert false\n\nlet sprintf = _ => assert false\n\nlet kasprintf = _ => assert false\n\nlet asprintf = _ => assert false\n\nlet kfprintf = _ => assert false\n\nlet set_mark_tags = _ => assert false\n\nlet formatter_of_buffer = _ => assert false\n\nlet pp_print_flush = _ => assert false\n\nlet pp_print_as = _ => assert false"
  },
  {
    "path": "analysis/examples/larger-project/src/identifiable.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as $$Map from \"rescript/lib/es6/map.js\";\nimport * as $$Set from \"rescript/lib/es6/set.js\";\nimport * as List from \"rescript/lib/es6/list.js\";\nimport * as Misc from \"./misc.js\";\nimport * as Curry from \"rescript/lib/es6/curry.js\";\nimport * as Format from \"./format.js\";\nimport * as Printf from \"./printf.js\";\nimport * as Hashtbl from \"rescript/lib/es6/hashtbl.js\";\nimport * as Caml_option from \"rescript/lib/es6/caml_option.js\";\nimport * as Caml_js_exceptions from \"rescript/lib/es6/caml_js_exceptions.js\";\n\nfunction Pair(A, B) {\n  var compare = function (param, param$1) {\n    var c = Curry._2(A.compare, param[0], param$1[0]);\n    if (c !== 0) {\n      return c;\n    } else {\n      return Curry._2(B.compare, param[1], param$1[1]);\n    }\n  };\n  var output = function (oc, param) {\n    return Curry._5(Printf.fprintf(oc), \" (%a, %a)\", A.output, param[0], B.output, param[1]);\n  };\n  var hash = function (param) {\n    return Hashtbl.hash([\n                Curry._1(A.hash, param[0]),\n                Curry._1(B.hash, param[1])\n              ]);\n  };\n  var equal = function (param, param$1) {\n    if (Curry._2(A.equal, param[0], param$1[0])) {\n      return Curry._2(B.equal, param[1], param$1[1]);\n    } else {\n      return false;\n    }\n  };\n  var print = function (ppf, param) {\n    return Curry._5(Format.fprintf(ppf), \" (%a, @ %a)\", A.print, param[0], B.print, param[1]);\n  };\n  return {\n          equal: equal,\n          hash: hash,\n          compare: compare,\n          output: output,\n          print: print\n        };\n}\n\nfunction Make_map(T) {\n  var include = $$Map.Make({\n        compare: T.compare\n      });\n  var empty = include.empty;\n  var add = include.add;\n  var merge = include.merge;\n  var union = include.union;\n  var iter = include.iter;\n  var fold = include.fold;\n  var bindings = include.bindings;\n  var find = include.find;\n  var filter_map = function (t, f) {\n    return Curry._3(fold, (function (id, v, map) {\n                  var r = Curry._2(f, id, v);\n                  if (r !== undefined) {\n                    return Curry._3(add, id, Caml_option.valFromOption(r), map);\n                  } else {\n                    return map;\n                  }\n                }), t, empty);\n  };\n  var of_list = function (l) {\n    return List.fold_left((function (map, param) {\n                  return Curry._3(add, param[0], param[1], map);\n                }), empty, l);\n  };\n  var disjoint_union = function (eq, print, m1, m2) {\n    return Curry._3(union, (function (id, v1, v2) {\n                  var ok = eq !== undefined ? Curry._2(eq, v1, v2) : false;\n                  if (ok) {\n                    return Caml_option.some(v1);\n                  }\n                  var tmp;\n                  if (print !== undefined) {\n                    var print$1 = Caml_option.valFromOption(print);\n                    tmp = Curry._6(Format.asprintf(\"Map.disjoint_union %a => %a <> %a\"), T.print, id, print$1, v1, print$1, v2);\n                  } else {\n                    tmp = Curry._2(Format.asprintf(\"Map.disjoint_union %a\"), T.print, id);\n                  }\n                  return Misc.fatal_error(tmp);\n                }), m1, m2);\n  };\n  var union_right = function (m1, m2) {\n    return Curry._3(merge, (function (_id, x, y) {\n                  if (x !== undefined) {\n                    if (y !== undefined) {\n                      return Caml_option.some(Caml_option.valFromOption(y));\n                    } else {\n                      return Caml_option.some(Caml_option.valFromOption(x));\n                    }\n                  } else if (y !== undefined) {\n                    return Caml_option.some(Caml_option.valFromOption(y));\n                  } else {\n                    return ;\n                  }\n                }), m1, m2);\n  };\n  var union_left = function (m1, m2) {\n    return union_right(m2, m1);\n  };\n  var union_merge = function (f, m1, m2) {\n    var aux = function (param, m1, m2) {\n      if (m1 !== undefined) {\n        if (m2 !== undefined) {\n          return Caml_option.some(Curry._2(f, Caml_option.valFromOption(m1), Caml_option.valFromOption(m2)));\n        } else {\n          return m1;\n        }\n      } else {\n        return m2;\n      }\n    };\n    return Curry._3(merge, aux, m1, m2);\n  };\n  var rename = function (m, v) {\n    try {\n      return Curry._2(find, v, m);\n    }\n    catch (raw_exn){\n      var exn = Caml_js_exceptions.internalToOCamlException(raw_exn);\n      if (exn.RE_EXN_ID === \"Not_found\") {\n        return v;\n      }\n      throw exn;\n    }\n  };\n  var map_keys = function (f, m) {\n    return of_list(List.map((function (param) {\n                      return [\n                              Curry._1(f, param[0]),\n                              param[1]\n                            ];\n                    }), Curry._1(bindings, m)));\n  };\n  var print = function (f, ppf, s) {\n    var elts = function (ppf, s) {\n      return Curry._2(iter, (function (id, v) {\n                    return Curry._5(Format.fprintf(ppf), \"@ (@[%a@ %a@])\", T.print, id, f, v);\n                  }), s);\n    };\n    return Curry._3(Format.fprintf(ppf), \"@[<1>{@[%a@ @]}@]\", elts, s);\n  };\n  var T_set = $$Set.Make({\n        compare: T.compare\n      });\n  var keys = function (map) {\n    return Curry._3(fold, (function (k, param, set) {\n                  return Curry._2(T_set.add, k, set);\n                }), map, T_set.empty);\n  };\n  var data = function (t) {\n    return List.map((function (prim) {\n                  return prim[1];\n                }), Curry._1(bindings, t));\n  };\n  var of_set = function (f, set) {\n    return Curry._3(T_set.fold, (function (e, map) {\n                  return Curry._3(add, e, Curry._1(f, e), map);\n                }), set, empty);\n  };\n  var transpose_keys_and_data = function (map) {\n    return Curry._3(fold, (function (k, v, m) {\n                  return Curry._3(add, v, k, m);\n                }), map, empty);\n  };\n  var transpose_keys_and_data_set = function (map) {\n    return Curry._3(fold, (function (k, v, m) {\n                  var set;\n                  var exit = 0;\n                  var set$1;\n                  try {\n                    set$1 = Curry._2(find, v, m);\n                    exit = 1;\n                  }\n                  catch (raw_exn){\n                    var exn = Caml_js_exceptions.internalToOCamlException(raw_exn);\n                    if (exn.RE_EXN_ID === \"Not_found\") {\n                      set = Curry._1(T_set.singleton, k);\n                    } else {\n                      throw exn;\n                    }\n                  }\n                  if (exit === 1) {\n                    set = Curry._2(T_set.add, k, set$1);\n                  }\n                  return Curry._3(add, v, set, m);\n                }), map, empty);\n  };\n  return {\n          empty: empty,\n          is_empty: include.is_empty,\n          mem: include.mem,\n          add: add,\n          update: include.update,\n          singleton: include.singleton,\n          remove: include.remove,\n          merge: merge,\n          union: union,\n          compare: include.compare,\n          equal: include.equal,\n          iter: iter,\n          fold: fold,\n          for_all: include.for_all,\n          exists: include.exists,\n          filter: include.filter,\n          partition: include.partition,\n          cardinal: include.cardinal,\n          bindings: bindings,\n          min_binding: include.min_binding,\n          min_binding_opt: include.min_binding_opt,\n          max_binding: include.max_binding,\n          max_binding_opt: include.max_binding_opt,\n          choose: include.choose,\n          choose_opt: include.choose_opt,\n          split: include.split,\n          find: find,\n          find_opt: include.find_opt,\n          find_first: include.find_first,\n          find_first_opt: include.find_first_opt,\n          find_last: include.find_last,\n          find_last_opt: include.find_last_opt,\n          map: include.map,\n          mapi: include.mapi,\n          filter_map: filter_map,\n          of_list: of_list,\n          disjoint_union: disjoint_union,\n          union_right: union_right,\n          union_left: union_left,\n          union_merge: union_merge,\n          rename: rename,\n          map_keys: map_keys,\n          print: print,\n          T_set: T_set,\n          keys: keys,\n          data: data,\n          of_set: of_set,\n          transpose_keys_and_data: transpose_keys_and_data,\n          transpose_keys_and_data_set: transpose_keys_and_data_set\n        };\n}\n\nfunction Make_set(T) {\n  var include = $$Set.Make({\n        compare: T.compare\n      });\n  var empty = include.empty;\n  var add = include.add;\n  var singleton = include.singleton;\n  var iter = include.iter;\n  var elements = include.elements;\n  var output = function (oc, s) {\n    Curry._1(Printf.fprintf(oc), \" ( \");\n    Curry._2(iter, (function (v) {\n            return Curry._3(Printf.fprintf(oc), \"%a \", T.output, v);\n          }), s);\n    return Curry._1(Printf.fprintf(oc), \")\");\n  };\n  var print = function (ppf, s) {\n    var elts = function (ppf, s) {\n      return Curry._2(iter, (function (e) {\n                    return Curry._3(Format.fprintf(ppf), \"@ %a\", T.print, e);\n                  }), s);\n    };\n    return Curry._3(Format.fprintf(ppf), \"@[<1>{@[%a@ @]}@]\", elts, s);\n  };\n  var to_string = function (s) {\n    return Curry._2(Format.asprintf(\"%a\"), print, s);\n  };\n  var of_list = function (l) {\n    if (!l) {\n      return empty;\n    }\n    var q = l.tl;\n    var t = l.hd;\n    if (q) {\n      return List.fold_left((function (acc, e) {\n                    return Curry._2(add, e, acc);\n                  }), Curry._1(singleton, t), q);\n    } else {\n      return Curry._1(singleton, t);\n    }\n  };\n  var map = function (f, s) {\n    return of_list(List.map(f, Curry._1(elements, s)));\n  };\n  return {\n          empty: empty,\n          is_empty: include.is_empty,\n          mem: include.mem,\n          add: add,\n          singleton: singleton,\n          remove: include.remove,\n          union: include.union,\n          inter: include.inter,\n          diff: include.diff,\n          compare: include.compare,\n          equal: include.equal,\n          subset: include.subset,\n          iter: iter,\n          fold: include.fold,\n          for_all: include.for_all,\n          exists: include.exists,\n          filter: include.filter,\n          partition: include.partition,\n          cardinal: include.cardinal,\n          elements: elements,\n          min_elt: include.min_elt,\n          min_elt_opt: include.min_elt_opt,\n          max_elt: include.max_elt,\n          max_elt_opt: include.max_elt_opt,\n          choose: include.choose,\n          choose_opt: include.choose_opt,\n          split: include.split,\n          find: include.find,\n          find_opt: include.find_opt,\n          find_first: include.find_first,\n          find_first_opt: include.find_first_opt,\n          find_last: include.find_last,\n          find_last_opt: include.find_last_opt,\n          output: output,\n          print: print,\n          to_string: to_string,\n          of_list: of_list,\n          map: map\n        };\n}\n\nfunction Make_tbl(T) {\n  var include = Hashtbl.Make(T);\n  var create = include.create;\n  var add = include.add;\n  var find = include.find;\n  var fold = include.fold;\n  var include$1 = $$Map.Make({\n        compare: T.compare\n      });\n  var empty = include$1.empty;\n  var add$1 = include$1.add;\n  var merge = include$1.merge;\n  var union = include$1.union;\n  var iter = include$1.iter;\n  var fold$1 = include$1.fold;\n  var cardinal = include$1.cardinal;\n  var bindings = include$1.bindings;\n  var find$1 = include$1.find;\n  var map = include$1.map;\n  var filter_map = function (t, f) {\n    return Curry._3(fold$1, (function (id, v, map) {\n                  var r = Curry._2(f, id, v);\n                  if (r !== undefined) {\n                    return Curry._3(add$1, id, Caml_option.valFromOption(r), map);\n                  } else {\n                    return map;\n                  }\n                }), t, empty);\n  };\n  var of_list = function (l) {\n    return List.fold_left((function (map, param) {\n                  return Curry._3(add$1, param[0], param[1], map);\n                }), empty, l);\n  };\n  var disjoint_union = function (eq, print, m1, m2) {\n    return Curry._3(union, (function (id, v1, v2) {\n                  var ok = eq !== undefined ? Curry._2(eq, v1, v2) : false;\n                  if (ok) {\n                    return Caml_option.some(v1);\n                  }\n                  var tmp;\n                  if (print !== undefined) {\n                    var print$1 = Caml_option.valFromOption(print);\n                    tmp = Curry._6(Format.asprintf(\"Map.disjoint_union %a => %a <> %a\"), T.print, id, print$1, v1, print$1, v2);\n                  } else {\n                    tmp = Curry._2(Format.asprintf(\"Map.disjoint_union %a\"), T.print, id);\n                  }\n                  return Misc.fatal_error(tmp);\n                }), m1, m2);\n  };\n  var union_right = function (m1, m2) {\n    return Curry._3(merge, (function (_id, x, y) {\n                  if (x !== undefined) {\n                    if (y !== undefined) {\n                      return Caml_option.some(Caml_option.valFromOption(y));\n                    } else {\n                      return Caml_option.some(Caml_option.valFromOption(x));\n                    }\n                  } else if (y !== undefined) {\n                    return Caml_option.some(Caml_option.valFromOption(y));\n                  } else {\n                    return ;\n                  }\n                }), m1, m2);\n  };\n  var union_left = function (m1, m2) {\n    return union_right(m2, m1);\n  };\n  var union_merge = function (f, m1, m2) {\n    var aux = function (param, m1, m2) {\n      if (m1 !== undefined) {\n        if (m2 !== undefined) {\n          return Caml_option.some(Curry._2(f, Caml_option.valFromOption(m1), Caml_option.valFromOption(m2)));\n        } else {\n          return m1;\n        }\n      } else {\n        return m2;\n      }\n    };\n    return Curry._3(merge, aux, m1, m2);\n  };\n  var rename = function (m, v) {\n    try {\n      return Curry._2(find$1, v, m);\n    }\n    catch (raw_exn){\n      var exn = Caml_js_exceptions.internalToOCamlException(raw_exn);\n      if (exn.RE_EXN_ID === \"Not_found\") {\n        return v;\n      }\n      throw exn;\n    }\n  };\n  var map_keys = function (f, m) {\n    return of_list(List.map((function (param) {\n                      return [\n                              Curry._1(f, param[0]),\n                              param[1]\n                            ];\n                    }), Curry._1(bindings, m)));\n  };\n  var print = function (f, ppf, s) {\n    var elts = function (ppf, s) {\n      return Curry._2(iter, (function (id, v) {\n                    return Curry._5(Format.fprintf(ppf), \"@ (@[%a@ %a@])\", T.print, id, f, v);\n                  }), s);\n    };\n    return Curry._3(Format.fprintf(ppf), \"@[<1>{@[%a@ @]}@]\", elts, s);\n  };\n  var T_set = $$Set.Make({\n        compare: T.compare\n      });\n  var keys = function (map) {\n    return Curry._3(fold$1, (function (k, param, set) {\n                  return Curry._2(T_set.add, k, set);\n                }), map, T_set.empty);\n  };\n  var data = function (t) {\n    return List.map((function (prim) {\n                  return prim[1];\n                }), Curry._1(bindings, t));\n  };\n  var of_set = function (f, set) {\n    return Curry._3(T_set.fold, (function (e, map) {\n                  return Curry._3(add$1, e, Curry._1(f, e), map);\n                }), set, empty);\n  };\n  var transpose_keys_and_data = function (map) {\n    return Curry._3(fold$1, (function (k, v, m) {\n                  return Curry._3(add$1, v, k, m);\n                }), map, empty);\n  };\n  var transpose_keys_and_data_set = function (map) {\n    return Curry._3(fold$1, (function (k, v, m) {\n                  var set;\n                  var exit = 0;\n                  var set$1;\n                  try {\n                    set$1 = Curry._2(find$1, v, m);\n                    exit = 1;\n                  }\n                  catch (raw_exn){\n                    var exn = Caml_js_exceptions.internalToOCamlException(raw_exn);\n                    if (exn.RE_EXN_ID === \"Not_found\") {\n                      set = Curry._1(T_set.singleton, k);\n                    } else {\n                      throw exn;\n                    }\n                  }\n                  if (exit === 1) {\n                    set = Curry._2(T_set.add, k, set$1);\n                  }\n                  return Curry._3(add$1, v, set, m);\n                }), map, empty);\n  };\n  var T_map_is_empty = include$1.is_empty;\n  var T_map_mem = include$1.mem;\n  var T_map_update = include$1.update;\n  var T_map_singleton = include$1.singleton;\n  var T_map_remove = include$1.remove;\n  var T_map_compare = include$1.compare;\n  var T_map_equal = include$1.equal;\n  var T_map_for_all = include$1.for_all;\n  var T_map_exists = include$1.exists;\n  var T_map_filter = include$1.filter;\n  var T_map_partition = include$1.partition;\n  var T_map_min_binding = include$1.min_binding;\n  var T_map_min_binding_opt = include$1.min_binding_opt;\n  var T_map_max_binding = include$1.max_binding;\n  var T_map_max_binding_opt = include$1.max_binding_opt;\n  var T_map_choose = include$1.choose;\n  var T_map_choose_opt = include$1.choose_opt;\n  var T_map_split = include$1.split;\n  var T_map_find_opt = include$1.find_opt;\n  var T_map_find_first = include$1.find_first;\n  var T_map_find_first_opt = include$1.find_first_opt;\n  var T_map_find_last = include$1.find_last;\n  var T_map_find_last_opt = include$1.find_last_opt;\n  var T_map_mapi = include$1.mapi;\n  var T_map = {\n    empty: empty,\n    is_empty: T_map_is_empty,\n    mem: T_map_mem,\n    add: add$1,\n    update: T_map_update,\n    singleton: T_map_singleton,\n    remove: T_map_remove,\n    merge: merge,\n    union: union,\n    compare: T_map_compare,\n    equal: T_map_equal,\n    iter: iter,\n    fold: fold$1,\n    for_all: T_map_for_all,\n    exists: T_map_exists,\n    filter: T_map_filter,\n    partition: T_map_partition,\n    cardinal: cardinal,\n    bindings: bindings,\n    min_binding: T_map_min_binding,\n    min_binding_opt: T_map_min_binding_opt,\n    max_binding: T_map_max_binding,\n    max_binding_opt: T_map_max_binding_opt,\n    choose: T_map_choose,\n    choose_opt: T_map_choose_opt,\n    split: T_map_split,\n    find: find$1,\n    find_opt: T_map_find_opt,\n    find_first: T_map_find_first,\n    find_first_opt: T_map_find_first_opt,\n    find_last: T_map_find_last,\n    find_last_opt: T_map_find_last_opt,\n    map: map,\n    mapi: T_map_mapi,\n    filter_map: filter_map,\n    of_list: of_list,\n    disjoint_union: disjoint_union,\n    union_right: union_right,\n    union_left: union_left,\n    union_merge: union_merge,\n    rename: rename,\n    map_keys: map_keys,\n    print: print,\n    T_set: T_set,\n    keys: keys,\n    data: data,\n    of_set: of_set,\n    transpose_keys_and_data: transpose_keys_and_data,\n    transpose_keys_and_data_set: transpose_keys_and_data_set\n  };\n  var to_list = function (t) {\n    return Curry._3(fold, (function (key, datum, elts) {\n                  return {\n                          hd: [\n                            key,\n                            datum\n                          ],\n                          tl: elts\n                        };\n                }), t, /* [] */0);\n  };\n  var of_list$1 = function (elts) {\n    var t = Curry._1(create, 42);\n    List.iter((function (param) {\n            return Curry._3(add, t, param[0], param[1]);\n          }), elts);\n    return t;\n  };\n  var to_map = function (v) {\n    return Curry._3(fold, add$1, v, empty);\n  };\n  var of_map = function (m) {\n    var t = Curry._1(create, Curry._1(cardinal, m));\n    Curry._2(iter, (function (k, v) {\n            return Curry._3(add, t, k, v);\n          }), m);\n    return t;\n  };\n  var memoize = function (t, f, key) {\n    try {\n      return Curry._2(find, t, key);\n    }\n    catch (raw_exn){\n      var exn = Caml_js_exceptions.internalToOCamlException(raw_exn);\n      if (exn.RE_EXN_ID === \"Not_found\") {\n        var r = Curry._1(f, key);\n        Curry._3(add, t, key, r);\n        return r;\n      }\n      throw exn;\n    }\n  };\n  var map$1 = function (t, f) {\n    return of_map(Curry._2(map, f, Curry._3(fold, add$1, t, empty)));\n  };\n  return {\n          create: create,\n          clear: include.clear,\n          reset: include.reset,\n          copy: include.copy,\n          add: add,\n          remove: include.remove,\n          find: find,\n          find_opt: include.find_opt,\n          find_all: include.find_all,\n          replace: include.replace,\n          mem: include.mem,\n          iter: include.iter,\n          filter_map_inplace: include.filter_map_inplace,\n          fold: fold,\n          length: include.length,\n          stats: include.stats,\n          T_map: T_map,\n          to_list: to_list,\n          of_list: of_list$1,\n          to_map: to_map,\n          of_map: of_map,\n          memoize: memoize,\n          map: map$1\n        };\n}\n\nfunction Make(T) {\n  var include = $$Set.Make({\n        compare: T.compare\n      });\n  var empty = include.empty;\n  var add = include.add;\n  var singleton = include.singleton;\n  var iter = include.iter;\n  var elements = include.elements;\n  var output = function (oc, s) {\n    Curry._1(Printf.fprintf(oc), \" ( \");\n    Curry._2(iter, (function (v) {\n            return Curry._3(Printf.fprintf(oc), \"%a \", T.output, v);\n          }), s);\n    return Curry._1(Printf.fprintf(oc), \")\");\n  };\n  var print = function (ppf, s) {\n    var elts = function (ppf, s) {\n      return Curry._2(iter, (function (e) {\n                    return Curry._3(Format.fprintf(ppf), \"@ %a\", T.print, e);\n                  }), s);\n    };\n    return Curry._3(Format.fprintf(ppf), \"@[<1>{@[%a@ @]}@]\", elts, s);\n  };\n  var to_string = function (s) {\n    return Curry._2(Format.asprintf(\"%a\"), print, s);\n  };\n  var of_list = function (l) {\n    if (!l) {\n      return empty;\n    }\n    var q = l.tl;\n    var t = l.hd;\n    if (q) {\n      return List.fold_left((function (acc, e) {\n                    return Curry._2(add, e, acc);\n                  }), Curry._1(singleton, t), q);\n    } else {\n      return Curry._1(singleton, t);\n    }\n  };\n  var map = function (f, s) {\n    return of_list(List.map(f, Curry._1(elements, s)));\n  };\n  var Set_is_empty = include.is_empty;\n  var Set_mem = include.mem;\n  var Set_remove = include.remove;\n  var Set_union = include.union;\n  var Set_inter = include.inter;\n  var Set_diff = include.diff;\n  var Set_compare = include.compare;\n  var Set_equal = include.equal;\n  var Set_subset = include.subset;\n  var Set_fold = include.fold;\n  var Set_for_all = include.for_all;\n  var Set_exists = include.exists;\n  var Set_filter = include.filter;\n  var Set_partition = include.partition;\n  var Set_cardinal = include.cardinal;\n  var Set_min_elt = include.min_elt;\n  var Set_min_elt_opt = include.min_elt_opt;\n  var Set_max_elt = include.max_elt;\n  var Set_max_elt_opt = include.max_elt_opt;\n  var Set_choose = include.choose;\n  var Set_choose_opt = include.choose_opt;\n  var Set_split = include.split;\n  var Set_find = include.find;\n  var Set_find_opt = include.find_opt;\n  var Set_find_first = include.find_first;\n  var Set_find_first_opt = include.find_first_opt;\n  var Set_find_last = include.find_last;\n  var Set_find_last_opt = include.find_last_opt;\n  var $$Set$1 = {\n    empty: empty,\n    is_empty: Set_is_empty,\n    mem: Set_mem,\n    add: add,\n    singleton: singleton,\n    remove: Set_remove,\n    union: Set_union,\n    inter: Set_inter,\n    diff: Set_diff,\n    compare: Set_compare,\n    equal: Set_equal,\n    subset: Set_subset,\n    iter: iter,\n    fold: Set_fold,\n    for_all: Set_for_all,\n    exists: Set_exists,\n    filter: Set_filter,\n    partition: Set_partition,\n    cardinal: Set_cardinal,\n    elements: elements,\n    min_elt: Set_min_elt,\n    min_elt_opt: Set_min_elt_opt,\n    max_elt: Set_max_elt,\n    max_elt_opt: Set_max_elt_opt,\n    choose: Set_choose,\n    choose_opt: Set_choose_opt,\n    split: Set_split,\n    find: Set_find,\n    find_opt: Set_find_opt,\n    find_first: Set_find_first,\n    find_first_opt: Set_find_first_opt,\n    find_last: Set_find_last,\n    find_last_opt: Set_find_last_opt,\n    output: output,\n    print: print,\n    to_string: to_string,\n    of_list: of_list,\n    map: map\n  };\n  var include$1 = $$Map.Make({\n        compare: T.compare\n      });\n  var empty$1 = include$1.empty;\n  var add$1 = include$1.add;\n  var merge = include$1.merge;\n  var union = include$1.union;\n  var iter$1 = include$1.iter;\n  var fold = include$1.fold;\n  var bindings = include$1.bindings;\n  var find = include$1.find;\n  var filter_map = function (t, f) {\n    return Curry._3(fold, (function (id, v, map) {\n                  var r = Curry._2(f, id, v);\n                  if (r !== undefined) {\n                    return Curry._3(add$1, id, Caml_option.valFromOption(r), map);\n                  } else {\n                    return map;\n                  }\n                }), t, empty$1);\n  };\n  var of_list$1 = function (l) {\n    return List.fold_left((function (map, param) {\n                  return Curry._3(add$1, param[0], param[1], map);\n                }), empty$1, l);\n  };\n  var disjoint_union = function (eq, print, m1, m2) {\n    return Curry._3(union, (function (id, v1, v2) {\n                  var ok = eq !== undefined ? Curry._2(eq, v1, v2) : false;\n                  if (ok) {\n                    return Caml_option.some(v1);\n                  }\n                  var tmp;\n                  if (print !== undefined) {\n                    var print$1 = Caml_option.valFromOption(print);\n                    tmp = Curry._6(Format.asprintf(\"Map.disjoint_union %a => %a <> %a\"), T.print, id, print$1, v1, print$1, v2);\n                  } else {\n                    tmp = Curry._2(Format.asprintf(\"Map.disjoint_union %a\"), T.print, id);\n                  }\n                  return Misc.fatal_error(tmp);\n                }), m1, m2);\n  };\n  var union_right = function (m1, m2) {\n    return Curry._3(merge, (function (_id, x, y) {\n                  if (x !== undefined) {\n                    if (y !== undefined) {\n                      return Caml_option.some(Caml_option.valFromOption(y));\n                    } else {\n                      return Caml_option.some(Caml_option.valFromOption(x));\n                    }\n                  } else if (y !== undefined) {\n                    return Caml_option.some(Caml_option.valFromOption(y));\n                  } else {\n                    return ;\n                  }\n                }), m1, m2);\n  };\n  var union_left = function (m1, m2) {\n    return union_right(m2, m1);\n  };\n  var union_merge = function (f, m1, m2) {\n    var aux = function (param, m1, m2) {\n      if (m1 !== undefined) {\n        if (m2 !== undefined) {\n          return Caml_option.some(Curry._2(f, Caml_option.valFromOption(m1), Caml_option.valFromOption(m2)));\n        } else {\n          return m1;\n        }\n      } else {\n        return m2;\n      }\n    };\n    return Curry._3(merge, aux, m1, m2);\n  };\n  var rename = function (m, v) {\n    try {\n      return Curry._2(find, v, m);\n    }\n    catch (raw_exn){\n      var exn = Caml_js_exceptions.internalToOCamlException(raw_exn);\n      if (exn.RE_EXN_ID === \"Not_found\") {\n        return v;\n      }\n      throw exn;\n    }\n  };\n  var map_keys = function (f, m) {\n    return of_list$1(List.map((function (param) {\n                      return [\n                              Curry._1(f, param[0]),\n                              param[1]\n                            ];\n                    }), Curry._1(bindings, m)));\n  };\n  var print$1 = function (f, ppf, s) {\n    var elts = function (ppf, s) {\n      return Curry._2(iter$1, (function (id, v) {\n                    return Curry._5(Format.fprintf(ppf), \"@ (@[%a@ %a@])\", T.print, id, f, v);\n                  }), s);\n    };\n    return Curry._3(Format.fprintf(ppf), \"@[<1>{@[%a@ @]}@]\", elts, s);\n  };\n  var T_set = $$Set.Make({\n        compare: T.compare\n      });\n  var keys = function (map) {\n    return Curry._3(fold, (function (k, param, set) {\n                  return Curry._2(T_set.add, k, set);\n                }), map, T_set.empty);\n  };\n  var data = function (t) {\n    return List.map((function (prim) {\n                  return prim[1];\n                }), Curry._1(bindings, t));\n  };\n  var of_set = function (f, set) {\n    return Curry._3(T_set.fold, (function (e, map) {\n                  return Curry._3(add$1, e, Curry._1(f, e), map);\n                }), set, empty$1);\n  };\n  var transpose_keys_and_data = function (map) {\n    return Curry._3(fold, (function (k, v, m) {\n                  return Curry._3(add$1, v, k, m);\n                }), map, empty$1);\n  };\n  var transpose_keys_and_data_set = function (map) {\n    return Curry._3(fold, (function (k, v, m) {\n                  var set;\n                  var exit = 0;\n                  var set$1;\n                  try {\n                    set$1 = Curry._2(find, v, m);\n                    exit = 1;\n                  }\n                  catch (raw_exn){\n                    var exn = Caml_js_exceptions.internalToOCamlException(raw_exn);\n                    if (exn.RE_EXN_ID === \"Not_found\") {\n                      set = Curry._1(T_set.singleton, k);\n                    } else {\n                      throw exn;\n                    }\n                  }\n                  if (exit === 1) {\n                    set = Curry._2(T_set.add, k, set$1);\n                  }\n                  return Curry._3(add$1, v, set, m);\n                }), map, empty$1);\n  };\n  var Map_is_empty = include$1.is_empty;\n  var Map_mem = include$1.mem;\n  var Map_update = include$1.update;\n  var Map_singleton = include$1.singleton;\n  var Map_remove = include$1.remove;\n  var Map_compare = include$1.compare;\n  var Map_equal = include$1.equal;\n  var Map_for_all = include$1.for_all;\n  var Map_exists = include$1.exists;\n  var Map_filter = include$1.filter;\n  var Map_partition = include$1.partition;\n  var Map_cardinal = include$1.cardinal;\n  var Map_min_binding = include$1.min_binding;\n  var Map_min_binding_opt = include$1.min_binding_opt;\n  var Map_max_binding = include$1.max_binding;\n  var Map_max_binding_opt = include$1.max_binding_opt;\n  var Map_choose = include$1.choose;\n  var Map_choose_opt = include$1.choose_opt;\n  var Map_split = include$1.split;\n  var Map_find_opt = include$1.find_opt;\n  var Map_find_first = include$1.find_first;\n  var Map_find_first_opt = include$1.find_first_opt;\n  var Map_find_last = include$1.find_last;\n  var Map_find_last_opt = include$1.find_last_opt;\n  var Map_map = include$1.map;\n  var Map_mapi = include$1.mapi;\n  var $$Map$1 = {\n    empty: empty$1,\n    is_empty: Map_is_empty,\n    mem: Map_mem,\n    add: add$1,\n    update: Map_update,\n    singleton: Map_singleton,\n    remove: Map_remove,\n    merge: merge,\n    union: union,\n    compare: Map_compare,\n    equal: Map_equal,\n    iter: iter$1,\n    fold: fold,\n    for_all: Map_for_all,\n    exists: Map_exists,\n    filter: Map_filter,\n    partition: Map_partition,\n    cardinal: Map_cardinal,\n    bindings: bindings,\n    min_binding: Map_min_binding,\n    min_binding_opt: Map_min_binding_opt,\n    max_binding: Map_max_binding,\n    max_binding_opt: Map_max_binding_opt,\n    choose: Map_choose,\n    choose_opt: Map_choose_opt,\n    split: Map_split,\n    find: find,\n    find_opt: Map_find_opt,\n    find_first: Map_find_first,\n    find_first_opt: Map_find_first_opt,\n    find_last: Map_find_last,\n    find_last_opt: Map_find_last_opt,\n    map: Map_map,\n    mapi: Map_mapi,\n    filter_map: filter_map,\n    of_list: of_list$1,\n    disjoint_union: disjoint_union,\n    union_right: union_right,\n    union_left: union_left,\n    union_merge: union_merge,\n    rename: rename,\n    map_keys: map_keys,\n    print: print$1,\n    T_set: T_set,\n    keys: keys,\n    data: data,\n    of_set: of_set,\n    transpose_keys_and_data: transpose_keys_and_data,\n    transpose_keys_and_data_set: transpose_keys_and_data_set\n  };\n  var include$2 = Hashtbl.Make(T);\n  var create = include$2.create;\n  var add$2 = include$2.add;\n  var find$1 = include$2.find;\n  var fold$1 = include$2.fold;\n  var include$3 = $$Map.Make({\n        compare: T.compare\n      });\n  var empty$2 = include$3.empty;\n  var add$3 = include$3.add;\n  var merge$1 = include$3.merge;\n  var union$1 = include$3.union;\n  var iter$2 = include$3.iter;\n  var fold$2 = include$3.fold;\n  var cardinal = include$3.cardinal;\n  var bindings$1 = include$3.bindings;\n  var find$2 = include$3.find;\n  var map$1 = include$3.map;\n  var filter_map$1 = function (t, f) {\n    return Curry._3(fold$2, (function (id, v, map) {\n                  var r = Curry._2(f, id, v);\n                  if (r !== undefined) {\n                    return Curry._3(add$3, id, Caml_option.valFromOption(r), map);\n                  } else {\n                    return map;\n                  }\n                }), t, empty$2);\n  };\n  var of_list$2 = function (l) {\n    return List.fold_left((function (map, param) {\n                  return Curry._3(add$3, param[0], param[1], map);\n                }), empty$2, l);\n  };\n  var disjoint_union$1 = function (eq, print, m1, m2) {\n    return Curry._3(union$1, (function (id, v1, v2) {\n                  var ok = eq !== undefined ? Curry._2(eq, v1, v2) : false;\n                  if (ok) {\n                    return Caml_option.some(v1);\n                  }\n                  var tmp;\n                  if (print !== undefined) {\n                    var print$1 = Caml_option.valFromOption(print);\n                    tmp = Curry._6(Format.asprintf(\"Map.disjoint_union %a => %a <> %a\"), T.print, id, print$1, v1, print$1, v2);\n                  } else {\n                    tmp = Curry._2(Format.asprintf(\"Map.disjoint_union %a\"), T.print, id);\n                  }\n                  return Misc.fatal_error(tmp);\n                }), m1, m2);\n  };\n  var union_right$1 = function (m1, m2) {\n    return Curry._3(merge$1, (function (_id, x, y) {\n                  if (x !== undefined) {\n                    if (y !== undefined) {\n                      return Caml_option.some(Caml_option.valFromOption(y));\n                    } else {\n                      return Caml_option.some(Caml_option.valFromOption(x));\n                    }\n                  } else if (y !== undefined) {\n                    return Caml_option.some(Caml_option.valFromOption(y));\n                  } else {\n                    return ;\n                  }\n                }), m1, m2);\n  };\n  var union_left$1 = function (m1, m2) {\n    return union_right$1(m2, m1);\n  };\n  var union_merge$1 = function (f, m1, m2) {\n    var aux = function (param, m1, m2) {\n      if (m1 !== undefined) {\n        if (m2 !== undefined) {\n          return Caml_option.some(Curry._2(f, Caml_option.valFromOption(m1), Caml_option.valFromOption(m2)));\n        } else {\n          return m1;\n        }\n      } else {\n        return m2;\n      }\n    };\n    return Curry._3(merge$1, aux, m1, m2);\n  };\n  var rename$1 = function (m, v) {\n    try {\n      return Curry._2(find$2, v, m);\n    }\n    catch (raw_exn){\n      var exn = Caml_js_exceptions.internalToOCamlException(raw_exn);\n      if (exn.RE_EXN_ID === \"Not_found\") {\n        return v;\n      }\n      throw exn;\n    }\n  };\n  var map_keys$1 = function (f, m) {\n    return of_list$2(List.map((function (param) {\n                      return [\n                              Curry._1(f, param[0]),\n                              param[1]\n                            ];\n                    }), Curry._1(bindings$1, m)));\n  };\n  var print$2 = function (f, ppf, s) {\n    var elts = function (ppf, s) {\n      return Curry._2(iter$2, (function (id, v) {\n                    return Curry._5(Format.fprintf(ppf), \"@ (@[%a@ %a@])\", T.print, id, f, v);\n                  }), s);\n    };\n    return Curry._3(Format.fprintf(ppf), \"@[<1>{@[%a@ @]}@]\", elts, s);\n  };\n  var T_set$1 = $$Set.Make({\n        compare: T.compare\n      });\n  var keys$1 = function (map) {\n    return Curry._3(fold$2, (function (k, param, set) {\n                  return Curry._2(T_set$1.add, k, set);\n                }), map, T_set$1.empty);\n  };\n  var data$1 = function (t) {\n    return List.map((function (prim) {\n                  return prim[1];\n                }), Curry._1(bindings$1, t));\n  };\n  var of_set$1 = function (f, set) {\n    return Curry._3(T_set$1.fold, (function (e, map) {\n                  return Curry._3(add$3, e, Curry._1(f, e), map);\n                }), set, empty$2);\n  };\n  var transpose_keys_and_data$1 = function (map) {\n    return Curry._3(fold$2, (function (k, v, m) {\n                  return Curry._3(add$3, v, k, m);\n                }), map, empty$2);\n  };\n  var transpose_keys_and_data_set$1 = function (map) {\n    return Curry._3(fold$2, (function (k, v, m) {\n                  var set;\n                  var exit = 0;\n                  var set$1;\n                  try {\n                    set$1 = Curry._2(find$2, v, m);\n                    exit = 1;\n                  }\n                  catch (raw_exn){\n                    var exn = Caml_js_exceptions.internalToOCamlException(raw_exn);\n                    if (exn.RE_EXN_ID === \"Not_found\") {\n                      set = Curry._1(T_set$1.singleton, k);\n                    } else {\n                      throw exn;\n                    }\n                  }\n                  if (exit === 1) {\n                    set = Curry._2(T_set$1.add, k, set$1);\n                  }\n                  return Curry._3(add$3, v, set, m);\n                }), map, empty$2);\n  };\n  var T_map_is_empty = include$3.is_empty;\n  var T_map_mem = include$3.mem;\n  var T_map_update = include$3.update;\n  var T_map_singleton = include$3.singleton;\n  var T_map_remove = include$3.remove;\n  var T_map_compare = include$3.compare;\n  var T_map_equal = include$3.equal;\n  var T_map_for_all = include$3.for_all;\n  var T_map_exists = include$3.exists;\n  var T_map_filter = include$3.filter;\n  var T_map_partition = include$3.partition;\n  var T_map_min_binding = include$3.min_binding;\n  var T_map_min_binding_opt = include$3.min_binding_opt;\n  var T_map_max_binding = include$3.max_binding;\n  var T_map_max_binding_opt = include$3.max_binding_opt;\n  var T_map_choose = include$3.choose;\n  var T_map_choose_opt = include$3.choose_opt;\n  var T_map_split = include$3.split;\n  var T_map_find_opt = include$3.find_opt;\n  var T_map_find_first = include$3.find_first;\n  var T_map_find_first_opt = include$3.find_first_opt;\n  var T_map_find_last = include$3.find_last;\n  var T_map_find_last_opt = include$3.find_last_opt;\n  var T_map_mapi = include$3.mapi;\n  var T_map = {\n    empty: empty$2,\n    is_empty: T_map_is_empty,\n    mem: T_map_mem,\n    add: add$3,\n    update: T_map_update,\n    singleton: T_map_singleton,\n    remove: T_map_remove,\n    merge: merge$1,\n    union: union$1,\n    compare: T_map_compare,\n    equal: T_map_equal,\n    iter: iter$2,\n    fold: fold$2,\n    for_all: T_map_for_all,\n    exists: T_map_exists,\n    filter: T_map_filter,\n    partition: T_map_partition,\n    cardinal: cardinal,\n    bindings: bindings$1,\n    min_binding: T_map_min_binding,\n    min_binding_opt: T_map_min_binding_opt,\n    max_binding: T_map_max_binding,\n    max_binding_opt: T_map_max_binding_opt,\n    choose: T_map_choose,\n    choose_opt: T_map_choose_opt,\n    split: T_map_split,\n    find: find$2,\n    find_opt: T_map_find_opt,\n    find_first: T_map_find_first,\n    find_first_opt: T_map_find_first_opt,\n    find_last: T_map_find_last,\n    find_last_opt: T_map_find_last_opt,\n    map: map$1,\n    mapi: T_map_mapi,\n    filter_map: filter_map$1,\n    of_list: of_list$2,\n    disjoint_union: disjoint_union$1,\n    union_right: union_right$1,\n    union_left: union_left$1,\n    union_merge: union_merge$1,\n    rename: rename$1,\n    map_keys: map_keys$1,\n    print: print$2,\n    T_set: T_set$1,\n    keys: keys$1,\n    data: data$1,\n    of_set: of_set$1,\n    transpose_keys_and_data: transpose_keys_and_data$1,\n    transpose_keys_and_data_set: transpose_keys_and_data_set$1\n  };\n  var to_list = function (t) {\n    return Curry._3(fold$1, (function (key, datum, elts) {\n                  return {\n                          hd: [\n                            key,\n                            datum\n                          ],\n                          tl: elts\n                        };\n                }), t, /* [] */0);\n  };\n  var of_list$3 = function (elts) {\n    var t = Curry._1(create, 42);\n    List.iter((function (param) {\n            return Curry._3(add$2, t, param[0], param[1]);\n          }), elts);\n    return t;\n  };\n  var to_map = function (v) {\n    return Curry._3(fold$1, add$3, v, empty$2);\n  };\n  var of_map = function (m) {\n    var t = Curry._1(create, Curry._1(cardinal, m));\n    Curry._2(iter$2, (function (k, v) {\n            return Curry._3(add$2, t, k, v);\n          }), m);\n    return t;\n  };\n  var memoize = function (t, f, key) {\n    try {\n      return Curry._2(find$1, t, key);\n    }\n    catch (raw_exn){\n      var exn = Caml_js_exceptions.internalToOCamlException(raw_exn);\n      if (exn.RE_EXN_ID === \"Not_found\") {\n        var r = Curry._1(f, key);\n        Curry._3(add$2, t, key, r);\n        return r;\n      }\n      throw exn;\n    }\n  };\n  var map$2 = function (t, f) {\n    return of_map(Curry._2(map$1, f, Curry._3(fold$1, add$3, t, empty$2)));\n  };\n  var Tbl_clear = include$2.clear;\n  var Tbl_reset = include$2.reset;\n  var Tbl_copy = include$2.copy;\n  var Tbl_remove = include$2.remove;\n  var Tbl_find_opt = include$2.find_opt;\n  var Tbl_find_all = include$2.find_all;\n  var Tbl_replace = include$2.replace;\n  var Tbl_mem = include$2.mem;\n  var Tbl_iter = include$2.iter;\n  var Tbl_filter_map_inplace = include$2.filter_map_inplace;\n  var Tbl_length = include$2.length;\n  var Tbl_stats = include$2.stats;\n  var Tbl = {\n    create: create,\n    clear: Tbl_clear,\n    reset: Tbl_reset,\n    copy: Tbl_copy,\n    add: add$2,\n    remove: Tbl_remove,\n    find: find$1,\n    find_opt: Tbl_find_opt,\n    find_all: Tbl_find_all,\n    replace: Tbl_replace,\n    mem: Tbl_mem,\n    iter: Tbl_iter,\n    filter_map_inplace: Tbl_filter_map_inplace,\n    fold: fold$1,\n    length: Tbl_length,\n    stats: Tbl_stats,\n    T_map: T_map,\n    to_list: to_list,\n    of_list: of_list$3,\n    to_map: to_map,\n    of_map: of_map,\n    memoize: memoize,\n    map: map$2\n  };\n  return {\n          T: T,\n          equal: T.equal,\n          hash: T.hash,\n          compare: T.compare,\n          output: T.output,\n          print: T.print,\n          $$Set: $$Set$1,\n          $$Map: $$Map$1,\n          Tbl: Tbl\n        };\n}\n\nexport {\n  Pair ,\n  Make_map ,\n  Make_set ,\n  Make_tbl ,\n  Make ,\n  \n}\n/* Misc Not a pure module */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/location.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as P from \"./P.js\";\nimport * as Misc from \"./misc.js\";\nimport * as Curry from \"rescript/lib/es6/curry.js\";\nimport * as $$Buffer from \"rescript/lib/es6/buffer.js\";\nimport * as Format from \"./format.js\";\nimport * as Parsing from \"rescript/lib/es6/parsing.js\";\nimport * as Caml_sys from \"rescript/lib/es6/caml_sys.js\";\nimport * as Filename from \"rescript/lib/es6/filename.js\";\nimport * as Printexc from \"rescript/lib/es6/printexc.js\";\nimport * as Warnings from \"./warnings.js\";\nimport * as Pervasives from \"rescript/lib/es6/pervasives.js\";\nimport * as Caml_option from \"rescript/lib/es6/caml_option.js\";\nimport * as Caml_exceptions from \"rescript/lib/es6/caml_exceptions.js\";\n\nvar absname = {\n  contents: false\n};\n\nfunction in_file(name) {\n  var loc = {\n    pos_fname: name,\n    pos_lnum: 1,\n    pos_bol: 0,\n    pos_cnum: -1\n  };\n  return {\n          loc_start: loc,\n          loc_end: loc,\n          loc_ghost: true\n        };\n}\n\nvar none = in_file(\"_none_\");\n\nfunction curr(lexbuf) {\n  return {\n          loc_start: lexbuf.lex_start_p,\n          loc_end: lexbuf.lex_curr_p,\n          loc_ghost: false\n        };\n}\n\nfunction init(lexbuf, fname) {\n  lexbuf.lex_curr_p = {\n    pos_fname: fname,\n    pos_lnum: 1,\n    pos_bol: 0,\n    pos_cnum: 0\n  };\n  \n}\n\nfunction symbol_rloc(param) {\n  return {\n          loc_start: Parsing.symbol_start_pos(undefined),\n          loc_end: Parsing.symbol_end_pos(undefined),\n          loc_ghost: false\n        };\n}\n\nfunction symbol_gloc(param) {\n  return {\n          loc_start: Parsing.symbol_start_pos(undefined),\n          loc_end: Parsing.symbol_end_pos(undefined),\n          loc_ghost: true\n        };\n}\n\nfunction rhs_loc(n) {\n  return {\n          loc_start: Parsing.rhs_start_pos(n),\n          loc_end: Parsing.rhs_end_pos(n),\n          loc_ghost: false\n        };\n}\n\nvar input_name = {\n  contents: \"_none_\"\n};\n\nvar input_lexbuf = {\n  contents: undefined\n};\n\nfunction set_input_name(name) {\n  if (name !== \"\") {\n    input_name.contents = name;\n    return ;\n  }\n  \n}\n\nvar num_loc_lines = {\n  contents: 0\n};\n\nfunction absolute_path(s) {\n  var s$1 = Curry._1(Filename.is_relative, s) ? Filename.concat(Caml_sys.caml_sys_getcwd(undefined), s) : s;\n  var aux = function (_s) {\n    while(true) {\n      var s = _s;\n      var base = Curry._1(Filename.basename, s);\n      var dir = Curry._1(Filename.dirname, s);\n      if (dir === s) {\n        return dir;\n      }\n      if (base !== Filename.current_dir_name) {\n        if (base === Filename.parent_dir_name) {\n          return Curry._1(Filename.dirname, aux(dir));\n        } else {\n          return Filename.concat(aux(dir), base);\n        }\n      }\n      _s = dir;\n      continue ;\n    };\n  };\n  return aux(s$1);\n}\n\nfunction show_filename(file) {\n  var file$1 = file === \"_none_\" ? input_name.contents : file;\n  if (absname.contents) {\n    return absolute_path(file$1);\n  } else {\n    return file$1;\n  }\n}\n\nfunction print_filename(ppf, file) {\n  return Curry._2(Format.fprintf(ppf), \"%s\", show_filename(file));\n}\n\nfunction reset(param) {\n  num_loc_lines.contents = 0;\n  \n}\n\nfunction get_pos_info(pos) {\n  return [\n          pos.pos_fname,\n          pos.pos_lnum,\n          pos.pos_cnum - pos.pos_bol | 0\n        ];\n}\n\nvar error_prefix = \"Error\";\n\nfunction print_compact(ppf, loc) {\n  var match = get_pos_info(loc.loc_start);\n  var startchar = match[2];\n  var endchar = (loc.loc_end.pos_cnum - loc.loc_start.pos_cnum | 0) + startchar | 0;\n  Curry._4(Format.fprintf(ppf), \"%a:%i\", print_filename, match[0], match[1]);\n  if (startchar >= 0) {\n    return Curry._3(Format.fprintf(ppf), \",%i--%i\", startchar, endchar);\n  }\n  \n}\n\nfunction echo_eof(param) {\n  Pervasives.print_newline(undefined);\n  num_loc_lines.contents = num_loc_lines.contents + 1 | 0;\n  \n}\n\nfunction mkloc(txt, loc) {\n  return {\n          txt: txt,\n          loc: loc\n        };\n}\n\nfunction mknoloc(txt) {\n  return {\n          txt: txt,\n          loc: none\n        };\n}\n\nfunction pp_ksprintf(before, k, fmt) {\n  var buf = $$Buffer.create(64);\n  var ppf = Format.formatter_of_buffer(buf);\n  Misc.Color.set_color_tag_handling(ppf);\n  if (before !== undefined) {\n    Curry._1(before, ppf);\n  }\n  return Curry._2(Format.kfprintf(function (param) {\n                  Curry._1(Format.pp_print_flush(ppf), undefined);\n                  return Curry._1(k, $$Buffer.contents(buf));\n                }), ppf, fmt);\n}\n\nfunction print_phanton_error_prefix(ppf) {\n  return Curry._2(Format.pp_print_as(ppf), error_prefix.length + 2 | 0, \"\");\n}\n\nfunction errorf(locOpt, subOpt, if_highlightOpt, fmt) {\n  var loc = locOpt !== undefined ? locOpt : none;\n  var sub = subOpt !== undefined ? subOpt : /* [] */0;\n  var if_highlight = if_highlightOpt !== undefined ? if_highlightOpt : \"\";\n  return pp_ksprintf(print_phanton_error_prefix, (function (msg) {\n                return {\n                        loc: loc,\n                        msg: msg,\n                        sub: sub,\n                        if_highlight: if_highlight\n                      };\n              }), fmt);\n}\n\nfunction error(locOpt, subOpt, if_highlightOpt, msg) {\n  var loc = locOpt !== undefined ? locOpt : none;\n  var sub = subOpt !== undefined ? subOpt : /* [] */0;\n  var if_highlight = if_highlightOpt !== undefined ? if_highlightOpt : \"\";\n  return {\n          loc: loc,\n          msg: msg,\n          sub: sub,\n          if_highlight: if_highlight\n        };\n}\n\nvar error_of_exn = {\n  contents: /* [] */0\n};\n\nfunction register_error_of_exn(f) {\n  error_of_exn.contents = {\n    hd: f,\n    tl: error_of_exn.contents\n  };\n  \n}\n\nfunction error_of_exn$1(exn) {\n  if (exn.RE_EXN_ID === Warnings.Errors) {\n    return \"Already_displayed\";\n  }\n  var _x = error_of_exn.contents;\n  while(true) {\n    var x = _x;\n    if (!x) {\n      return ;\n    }\n    var error = Curry._1(x.hd, exn);\n    if (error !== undefined) {\n      return {\n              NAME: \"Ok\",\n              VAL: Caml_option.valFromOption(error)\n            };\n    }\n    _x = x.tl;\n    continue ;\n  };\n}\n\nfunction error_of_printer(loc, print, x) {\n  return Curry._2(errorf(loc, undefined, undefined, \"%a@?\"), print, x);\n}\n\nfunction error_of_printer_file(print, x) {\n  return error_of_printer(in_file(input_name.contents), print, x);\n}\n\nregister_error_of_exn(function (x) {\n      if (x.RE_EXN_ID === P.Sys_error) {\n        return Curry._1(errorf(in_file(input_name.contents), undefined, undefined, \"I/O error: %s\"), x._1);\n      }\n      if (x.RE_EXN_ID !== Misc.HookExnWrapper) {\n        return ;\n      }\n      var e = x.error;\n      var match = error_of_exn$1(e);\n      var sub = match !== undefined && typeof match === \"object\" ? match.VAL : error(undefined, undefined, undefined, Printexc.to_string(e));\n      return Curry._1(errorf(in_file(x.hook_info.sourcefile), {\n                      hd: sub,\n                      tl: /* [] */0\n                    }, undefined, \"In hook %S:\"), x.hook_name);\n    });\n\nvar $$Error = /* @__PURE__ */Caml_exceptions.create(\"Location.Error\");\n\nregister_error_of_exn(function (x) {\n      if (x.RE_EXN_ID === $$Error) {\n        return x._1;\n      }\n      \n    });\n\nfunction raise_errorf(locOpt, subOpt, if_highlightOpt) {\n  var loc = locOpt !== undefined ? locOpt : none;\n  var sub = subOpt !== undefined ? subOpt : /* [] */0;\n  var if_highlight = if_highlightOpt !== undefined ? if_highlightOpt : \"\";\n  var partial_arg = print_phanton_error_prefix;\n  return function (param) {\n    return pp_ksprintf(partial_arg, (function (msg) {\n                  throw {\n                        RE_EXN_ID: $$Error,\n                        _1: {\n                          loc: loc,\n                          msg: msg,\n                          sub: sub,\n                          if_highlight: if_highlight\n                        },\n                        Error: new Error()\n                      };\n                }), param);\n  };\n}\n\nvar msg_file = \"File \\\"\";\n\nvar msg_line = \"\\\", line \";\n\nvar msg_chars = \", characters \";\n\nvar msg_to = \"-\";\n\nvar msg_colon = \":\";\n\nvar warning_prefix = \"Warning\";\n\nvar Already_displayed_error = Warnings.Errors;\n\nexport {\n  absname ,\n  in_file ,\n  none ,\n  curr ,\n  init ,\n  symbol_rloc ,\n  symbol_gloc ,\n  rhs_loc ,\n  input_name ,\n  input_lexbuf ,\n  set_input_name ,\n  num_loc_lines ,\n  absolute_path ,\n  show_filename ,\n  print_filename ,\n  reset ,\n  msg_file ,\n  msg_line ,\n  msg_chars ,\n  msg_to ,\n  msg_colon ,\n  get_pos_info ,\n  error_prefix ,\n  warning_prefix ,\n  print_compact ,\n  echo_eof ,\n  mkloc ,\n  mknoloc ,\n  pp_ksprintf ,\n  print_phanton_error_prefix ,\n  errorf ,\n  error ,\n  register_error_of_exn ,\n  Already_displayed_error ,\n  error_of_exn$1 as error_of_exn,\n  error_of_printer ,\n  error_of_printer_file ,\n  $$Error ,\n  raise_errorf ,\n  \n}\n/* none Not a pure module */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/location.res",
    "content": "/* ************************************************************************ */\n/*  */\n/* OCaml */\n/*  */\n/* Xavier Leroy, projet Cristal, INRIA Rocquencourt */\n/*  */\n/* Copyright 1996 Institut National de Recherche en Informatique et */\n/* en Automatique. */\n/*  */\n/* All rights reserved.  This file is distributed under the terms of */\n/* the GNU Lesser General Public License version 2.1, with the */\n/* special exception on linking described in the file LICENSE. */\n/*  */\n/* ************************************************************************ */\n\nopen P\n\nopen Lexing\n\nlet absname = ref(false)\n/* This reference should be in Clflags, but it would create an additional\n dependency and make bootstrapping Camlp4 more difficult. */\n\ntype t = Warnings.loc = {loc_start: position, loc_end: position, loc_ghost: bool}\n\nlet in_file = name => {\n  let loc = {\n    pos_fname: name,\n    pos_lnum: 1,\n    pos_bol: 0,\n    pos_cnum: -1,\n  }\n  {loc_start: loc, loc_end: loc, loc_ghost: true}\n}\n\nlet none = in_file(\"_none_\")\n\nlet curr = lexbuf => {\n  loc_start: lexbuf.lex_start_p,\n  loc_end: lexbuf.lex_curr_p,\n  loc_ghost: false,\n}\n\nlet init = (lexbuf, fname) =>\n  lexbuf.lex_curr_p = {\n    pos_fname: fname,\n    pos_lnum: 1,\n    pos_bol: 0,\n    pos_cnum: 0,\n  }\n\nlet symbol_rloc = () => {\n  loc_start: Parsing.symbol_start_pos(),\n  loc_end: Parsing.symbol_end_pos(),\n  loc_ghost: false,\n}\n\nlet symbol_gloc = () => {\n  loc_start: Parsing.symbol_start_pos(),\n  loc_end: Parsing.symbol_end_pos(),\n  loc_ghost: true,\n}\n\nlet rhs_loc = n => {\n  loc_start: Parsing.rhs_start_pos(n),\n  loc_end: Parsing.rhs_end_pos(n),\n  loc_ghost: false,\n}\n\nlet input_name = ref(\"_none_\")\nlet input_lexbuf = ref((None: option<lexbuf>))\nlet set_input_name = name =>\n  if name != \"\" {\n    input_name := name\n  }\n/* Terminal info */\n\nlet num_loc_lines = ref(0) /* number of lines already printed after input */\n\n/* Print the location in some way or another */\n\nopen Format\n\nlet absolute_path = s => {\n  /* This function could go into Filename */\n  open Filename\n  let s = if is_relative(s) {\n    concat(Sys.getcwd(), s)\n  } else {\n    s\n  }\n  /* Now simplify . and .. components */\n  let rec aux = s => {\n    let base = basename(s)\n    let dir = dirname(s)\n    if dir == s {\n      dir\n    } else if base == current_dir_name {\n      aux(dir)\n    } else if base == parent_dir_name {\n      dirname(aux(dir))\n    } else {\n      concat(aux(dir), base)\n    }\n  }\n\n  aux(s)\n}\n\nlet show_filename = file => {\n  let file = if file == \"_none_\" {\n    input_name.contents\n  } else {\n    file\n  }\n  if absname.contents {\n    absolute_path(file)\n  } else {\n    file\n  }\n}\n\nlet print_filename = (ppf, file) => Format.fprintf(ppf, \"%s\", show_filename(file))\n\nlet reset = () => num_loc_lines := 0\n\nlet (msg_file, msg_line, msg_chars, msg_to, msg_colon) = (\n  \"File \\\"\",\n  \"\\\", line \",\n  \", characters \",\n  \"-\",\n  \":\",\n)\n\n/* return file, line, char from the given position */\nlet get_pos_info = pos => (pos.pos_fname, pos.pos_lnum, pos.pos_cnum - pos.pos_bol)\n\nlet error_prefix = \"Error\"\nlet warning_prefix = \"Warning\"\n\nlet print_compact = (ppf, loc) => {\n  let (file, line, startchar) = get_pos_info(loc.loc_start)\n  let endchar = loc.loc_end.pos_cnum - loc.loc_start.pos_cnum + startchar\n  fprintf(ppf, \"%a:%i\", print_filename, file, line)\n  if startchar >= 0 {\n    fprintf(ppf, \",%i--%i\", startchar, endchar)\n  }\n}\n\nlet echo_eof = () => {\n  print_newline()\n  incr(num_loc_lines)\n}\n\ntype loc<'a> = {\n  txt: 'a,\n  loc: t,\n}\n\nlet mkloc = (txt, loc) => {txt: txt, loc: loc}\nlet mknoloc = txt => mkloc(txt, none)\n\ntype rec error = {\n  loc: t,\n  msg: string,\n  sub: list<error>,\n  if_highlight: string /* alternative message if locations are highlighted */,\n}\n\nlet pp_ksprintf = (~before=?, k, fmt) => {\n  let buf = Buffer.create(64)\n  let ppf = Format.formatter_of_buffer(buf)\n  Misc.Color.set_color_tag_handling(ppf)\n  switch before {\n  | None => ()\n  | Some(f) => f(ppf)\n  }\n  kfprintf(_ => {\n    pp_print_flush(ppf, ())\n    let msg = Buffer.contents(buf)\n    k(msg)\n  }, ppf, fmt)\n}\n\n/* Shift the formatter's offset by the length of the error prefix, which\n is always added by the compiler after the message has been formatted */\nlet print_phanton_error_prefix = ppf =>\n  Format.pp_print_as(ppf, String.length(error_prefix) + 2 /* \": \" */, \"\")\n\nlet errorf = (~loc=none, ~sub=list{}, ~if_highlight=\"\", fmt) =>\n  pp_ksprintf(\n    ~before=print_phanton_error_prefix,\n    msg => {loc: loc, msg: msg, sub: sub, if_highlight: if_highlight},\n    fmt,\n  )\n\nlet error = (~loc=none, ~sub=list{}, ~if_highlight=\"\", msg) => {\n  loc: loc,\n  msg: msg,\n  sub: sub,\n  if_highlight: if_highlight,\n}\n\nlet error_of_exn: ref<list<exn => option<error>>> = ref(list{})\n\nlet register_error_of_exn = f => error_of_exn := list{f, ...error_of_exn.contents}\n\nexception Already_displayed_error = Warnings.Errors\n\nlet error_of_exn = exn =>\n  switch exn {\n  | Already_displayed_error => Some(#Already_displayed)\n  | _ =>\n    let rec loop = x =>\n      switch x {\n      | list{} => None\n      | list{f, ...rest} =>\n        switch f(exn) {\n        | Some(error) => Some(#Ok(error))\n        | None => loop(rest)\n        }\n      }\n\n    loop(error_of_exn.contents)\n  }\n\nlet error_of_printer = (loc, print, x) => errorf(~loc, \"%a@?\", print, x)\n\nlet error_of_printer_file = (print, x) => error_of_printer(in_file(input_name.contents), print, x)\n\nlet () = register_error_of_exn(x =>\n  switch x {\n  | Sys_error(msg) => Some(errorf(~loc=in_file(input_name.contents), \"I/O error: %s\", msg))\n\n  | Misc.HookExnWrapper({error: e, hook_name, hook_info: {Misc.sourcefile: sourcefile}}) =>\n    let sub = switch error_of_exn(e) {\n    | None | Some(#Already_displayed) => error(Printexc.to_string(e))\n    | Some(#Ok(err)) => err\n    }\n\n    Some(errorf(~loc=in_file(sourcefile), \"In hook %S:\", hook_name, ~sub=list{sub}))\n  | _ => None\n  }\n)\n\nexternal reraise: exn => 'a = \"%reraise\"\n\nexception Error(error)\n\nlet () = register_error_of_exn(x =>\n  switch x {\n  | Error(e) => Some(e)\n  | _ => None\n  }\n)\n\n@raises(Error)\nlet raise_errorf = (~loc=none, ~sub=list{}, ~if_highlight=\"\") =>\n  pp_ksprintf(~before=print_phanton_error_prefix, msg =>\n    raise(Error({loc: loc, msg: msg, sub: sub, if_highlight: if_highlight}))\n  )\n"
  },
  {
    "path": "analysis/examples/larger-project/src/longident.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as List from \"rescript/lib/es6/list.js\";\nimport * as Misc from \"./misc.js\";\nimport * as $$String from \"rescript/lib/es6/string.js\";\nimport * as Caml_js_exceptions from \"rescript/lib/es6/caml_js_exceptions.js\";\n\nfunction flat(_accu, _x) {\n  while(true) {\n    var x = _x;\n    var accu = _accu;\n    switch (x.TAG | 0) {\n      case /* Lident */0 :\n          return {\n                  hd: x._0,\n                  tl: accu\n                };\n      case /* Ldot */1 :\n          _x = x._0;\n          _accu = {\n            hd: x._1,\n            tl: accu\n          };\n          continue ;\n      case /* Lapply */2 :\n          return Misc.fatal_error(\"Longident.flat\");\n      \n    }\n  };\n}\n\nfunction flatten(lid) {\n  return flat(/* [] */0, lid);\n}\n\nfunction last(x) {\n  switch (x.TAG | 0) {\n    case /* Lident */0 :\n        return x._0;\n    case /* Ldot */1 :\n        return x._1;\n    case /* Lapply */2 :\n        return Misc.fatal_error(\"Longident.last\");\n    \n  }\n}\n\nfunction split_at_dots(s, pos) {\n  try {\n    var dot = $$String.index_from(s, pos, /* '.' */46);\n    return {\n            hd: $$String.sub(s, pos, dot - pos | 0),\n            tl: split_at_dots(s, dot + 1 | 0)\n          };\n  }\n  catch (raw_exn){\n    var exn = Caml_js_exceptions.internalToOCamlException(raw_exn);\n    if (exn.RE_EXN_ID === \"Not_found\") {\n      return {\n              hd: $$String.sub(s, pos, s.length - pos | 0),\n              tl: /* [] */0\n            };\n    }\n    throw exn;\n  }\n}\n\nfunction unflatten(l) {\n  if (l) {\n    return List.fold_left((function (p, s) {\n                  return {\n                          TAG: /* Ldot */1,\n                          _0: p,\n                          _1: s\n                        };\n                }), {\n                TAG: /* Lident */0,\n                _0: l.hd\n              }, l.tl);\n  }\n  \n}\n\nfunction parse(s) {\n  var v = unflatten(split_at_dots(s, 0));\n  if (v !== undefined) {\n    return v;\n  } else {\n    return {\n            TAG: /* Lident */0,\n            _0: \"\"\n          };\n  }\n}\n\nexport {\n  flat ,\n  flatten ,\n  last ,\n  split_at_dots ,\n  unflatten ,\n  parse ,\n  \n}\n/* Misc Not a pure module */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/longident.res",
    "content": "/* ************************************************************************ */\n/*  */\n/* OCaml */\n/*  */\n/* Xavier Leroy, projet Cristal, INRIA Rocquencourt */\n/*  */\n/* Copyright 1996 Institut National de Recherche en Informatique et */\n/* en Automatique. */\n/*  */\n/* All rights reserved.  This file is distributed under the terms of */\n/* the GNU Lesser General Public License version 2.1, with the */\n/* special exception on linking described in the file LICENSE. */\n/*  */\n/* ************************************************************************ */\n\ntype rec t =\n  | Lident(string)\n  | Ldot(t, string)\n  | Lapply(t, t)\n\nlet rec flat = (accu, x) =>\n  switch x {\n  | Lident(s) => list{s, ...accu}\n  | Ldot(lid, s) => flat(list{s, ...accu}, lid)\n  | Lapply(_, _) => Misc.fatal_error(\"Longident.flat\")\n  }\n\nlet flatten = lid => flat(list{}, lid)\n\nlet last = x =>\n  switch x {\n  | Lident(s) => s\n  | Ldot(_, s) => s\n  | Lapply(_, _) => Misc.fatal_error(\"Longident.last\")\n  }\n\n@raises(Invalid_argument)\nlet rec split_at_dots = (s, pos) =>\n  try {\n    let dot = String.index_from(s, pos, '.')\n    list{String.sub(s, pos, dot - pos), ...split_at_dots(s, dot + 1)}\n  } catch {\n  | Not_found => list{String.sub(s, pos, String.length(s) - pos)}\n  }\n\nlet unflatten = l =>\n  switch l {\n  | list{} => None\n  | list{hd, ...tl} => Some(List.fold_left((p, s) => Ldot(p, s), Lident(hd), tl))\n  }\n\n@raises(Invalid_argument)\nlet parse = s =>\n  switch unflatten(split_at_dots(s, 0)) {\n  | None => Lident(\"\") /* should not happen, but don't put assert false\n   so as not to crash the toplevel (see Genprintval) */\n\n  | Some(v) => v\n  }\n\n"
  },
  {
    "path": "analysis/examples/larger-project/src/loop.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n/* This output is empty. Its source's type definitions, externals and/or unused code got optimized away. */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/loop.res",
    "content": "// let foo = x =>\n//   switch x {\n//   | `${\n\n"
  },
  {
    "path": "analysis/examples/larger-project/src/misc.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as P from \"./P.js\";\nimport * as $$Map from \"rescript/lib/es6/map.js\";\nimport * as $$Set from \"rescript/lib/es6/set.js\";\nimport * as Sys from \"rescript/lib/es6/sys.js\";\nimport * as Caml from \"rescript/lib/es6/caml.js\";\nimport * as List from \"rescript/lib/es6/list.js\";\nimport * as $$Array from \"rescript/lib/es6/array.js\";\nimport * as Bytes from \"rescript/lib/es6/bytes.js\";\nimport * as Curry from \"rescript/lib/es6/curry.js\";\nimport * as $$Buffer from \"rescript/lib/es6/buffer.js\";\nimport * as Format from \"./format.js\";\nimport * as $$String from \"rescript/lib/es6/string.js\";\nimport * as Hashtbl from \"rescript/lib/es6/hashtbl.js\";\nimport * as Caml_obj from \"rescript/lib/es6/caml_obj.js\";\nimport * as Caml_sys from \"rescript/lib/es6/caml_sys.js\";\nimport * as Filename from \"rescript/lib/es6/filename.js\";\nimport * as Caml_array from \"rescript/lib/es6/caml_array.js\";\nimport * as Caml_bytes from \"rescript/lib/es6/caml_bytes.js\";\nimport * as Caml_int32 from \"rescript/lib/es6/caml_int32.js\";\nimport * as Caml_int64 from \"rescript/lib/es6/caml_int64.js\";\nimport * as Pervasives from \"rescript/lib/es6/pervasives.js\";\nimport * as Caml_format from \"rescript/lib/es6/caml_format.js\";\nimport * as Caml_option from \"rescript/lib/es6/caml_option.js\";\nimport * as Caml_string from \"rescript/lib/es6/caml_string.js\";\nimport * as Caml_exceptions from \"rescript/lib/es6/caml_exceptions.js\";\nimport * as Caml_js_exceptions from \"rescript/lib/es6/caml_js_exceptions.js\";\nimport * as Caml_external_polyfill from \"rescript/lib/es6/caml_external_polyfill.js\";\n\nvar Fatal_error = /* @__PURE__ */Caml_exceptions.create(\"Misc.Fatal_error\");\n\nfunction fatal_error(msg) {\n  Pervasives.print_string(\">> Fatal error: \");\n  console.error(msg);\n  throw {\n        RE_EXN_ID: Fatal_error,\n        Error: new Error()\n      };\n}\n\nfunction fatal_errorf(fmt) {\n  return Curry._1(Format.kasprintf(fatal_error), fmt);\n}\n\nfunction try_finally(work, cleanup) {\n  var result;\n  try {\n    result = Curry._1(work, undefined);\n  }\n  catch (e){\n    Curry._1(cleanup, undefined);\n    throw e;\n  }\n  Curry._1(cleanup, undefined);\n  return result;\n}\n\nfunction set_refs(l) {\n  return List.iter((function (param) {\n                param._0.contents = param._1;\n                \n              }), l);\n}\n\nfunction protect_refs(refs, f) {\n  var backup = List.map((function (param) {\n          var r = param._0;\n          return /* R */{\n                  _0: r,\n                  _1: r.contents\n                };\n        }), refs);\n  set_refs(refs);\n  var x;\n  try {\n    x = Curry._1(f, undefined);\n  }\n  catch (e){\n    set_refs(backup);\n    throw e;\n  }\n  set_refs(backup);\n  return x;\n}\n\nfunction map_end(f, l1, l2) {\n  if (l1) {\n    return {\n            hd: Curry._1(f, l1.hd),\n            tl: map_end(f, l1.tl, l2)\n          };\n  } else {\n    return l2;\n  }\n}\n\nfunction map_left_right(f, x) {\n  if (!x) {\n    return /* [] */0;\n  }\n  var res = Curry._1(f, x.hd);\n  return {\n          hd: res,\n          tl: map_left_right(f, x.tl)\n        };\n}\n\nfunction for_all2(pred, _l1, _l2) {\n  while(true) {\n    var l2 = _l2;\n    var l1 = _l1;\n    if (!l1) {\n      if (l2) {\n        return false;\n      } else {\n        return true;\n      }\n    }\n    if (!l2) {\n      return false;\n    }\n    if (!Curry._2(pred, l1.hd, l2.hd)) {\n      return false;\n    }\n    _l2 = l2.tl;\n    _l1 = l1.tl;\n    continue ;\n  };\n}\n\nfunction replicate_list(elem, n) {\n  if (n <= 0) {\n    return /* [] */0;\n  } else {\n    return {\n            hd: elem,\n            tl: replicate_list(elem, n - 1 | 0)\n          };\n  }\n}\n\nfunction list_remove(x, y) {\n  throw {\n        RE_EXN_ID: \"Assert_failure\",\n        _1: [\n          \"misc.res\",\n          94,\n          32\n        ],\n        Error: new Error()\n      };\n}\n\nfunction split_last(x) {\n  if (x) {\n    var tl = x.tl;\n    var x$1 = x.hd;\n    if (!tl) {\n      return [\n              /* [] */0,\n              x$1\n            ];\n    }\n    var match = split_last(tl);\n    return [\n            {\n              hd: x$1,\n              tl: match[0]\n            },\n            match[1]\n          ];\n  }\n  throw {\n        RE_EXN_ID: \"Assert_failure\",\n        _1: [\n          \"misc.res\",\n          98,\n          14\n        ],\n        Error: new Error()\n      };\n}\n\nfunction compare(cmp, _l1, _l2) {\n  while(true) {\n    var l2 = _l2;\n    var l1 = _l1;\n    if (!l1) {\n      if (l2) {\n        return -1;\n      } else {\n        return 0;\n      }\n    }\n    if (!l2) {\n      return 1;\n    }\n    var c = Curry._2(cmp, l1.hd, l2.hd);\n    if (c !== 0) {\n      return c;\n    }\n    _l2 = l2.tl;\n    _l1 = l1.tl;\n    continue ;\n  };\n}\n\nfunction equal(eq, _l1, _l2) {\n  while(true) {\n    var l2 = _l2;\n    var l1 = _l1;\n    if (!l1) {\n      if (l2) {\n        return false;\n      } else {\n        return true;\n      }\n    }\n    if (!l2) {\n      return false;\n    }\n    if (!Curry._2(eq, l1.hd, l2.hd)) {\n      return false;\n    }\n    _l2 = l2.tl;\n    _l1 = l1.tl;\n    continue ;\n  };\n}\n\nfunction filter_map(f, l) {\n  var _acc = /* [] */0;\n  var _l = l;\n  while(true) {\n    var l$1 = _l;\n    var acc = _acc;\n    if (!l$1) {\n      return List.rev(acc);\n    }\n    var t = l$1.tl;\n    var v = Curry._1(f, l$1.hd);\n    if (v !== undefined) {\n      _l = t;\n      _acc = {\n        hd: Caml_option.valFromOption(v),\n        tl: acc\n      };\n      continue ;\n    }\n    _l = t;\n    continue ;\n  };\n}\n\nfunction map2_prefix(f, l1, l2) {\n  var _acc = /* [] */0;\n  var _l1 = l1;\n  var _l2 = l2;\n  while(true) {\n    var l2$1 = _l2;\n    var l1$1 = _l1;\n    var acc = _acc;\n    if (!l1$1) {\n      return [\n              List.rev(acc),\n              l2$1\n            ];\n    }\n    if (l2$1) {\n      var h = Curry._2(f, l1$1.hd, l2$1.hd);\n      _l2 = l2$1.tl;\n      _l1 = l1$1.tl;\n      _acc = {\n        hd: h,\n        tl: acc\n      };\n      continue ;\n    }\n    throw {\n          RE_EXN_ID: \"Invalid_argument\",\n          _1: \"map2_prefix\",\n          Error: new Error()\n        };\n  };\n}\n\nfunction some_if_all_elements_are_some(l) {\n  var _acc = /* [] */0;\n  var _l = l;\n  while(true) {\n    var l$1 = _l;\n    var acc = _acc;\n    if (!l$1) {\n      return List.rev(acc);\n    }\n    var h = l$1.hd;\n    if (h === undefined) {\n      return ;\n    }\n    _l = l$1.tl;\n    _acc = {\n      hd: Caml_option.valFromOption(h),\n      tl: acc\n    };\n    continue ;\n  };\n}\n\nfunction split_at(n, l) {\n  var _n = n;\n  var _acc = /* [] */0;\n  var _l = l;\n  while(true) {\n    var l$1 = _l;\n    var acc = _acc;\n    var n$1 = _n;\n    if (n$1 === 0) {\n      return [\n              List.rev(acc),\n              l$1\n            ];\n    }\n    if (l$1) {\n      _l = l$1.tl;\n      _acc = {\n        hd: l$1.hd,\n        tl: acc\n      };\n      _n = n$1 - 1 | 0;\n      continue ;\n    }\n    throw {\n          RE_EXN_ID: \"Invalid_argument\",\n          _1: \"split_at\",\n          Error: new Error()\n        };\n  };\n}\n\nvar List$1 = {\n  compare: compare,\n  equal: equal,\n  filter_map: filter_map,\n  map2_prefix: map2_prefix,\n  some_if_all_elements_are_some: some_if_all_elements_are_some,\n  split_at: split_at\n};\n\nfunction equal$1(eq, o1, o2) {\n  if (o1 !== undefined) {\n    if (o2 !== undefined) {\n      return Curry._2(eq, Caml_option.valFromOption(o1), Caml_option.valFromOption(o2));\n    } else {\n      return false;\n    }\n  } else {\n    return o2 === undefined;\n  }\n}\n\nfunction iter(f, x) {\n  if (x !== undefined) {\n    return Curry._1(f, Caml_option.valFromOption(x));\n  }\n  \n}\n\nfunction map(f, x) {\n  if (x !== undefined) {\n    return Caml_option.some(Curry._1(f, Caml_option.valFromOption(x)));\n  }\n  \n}\n\nfunction fold(f, a, b) {\n  if (a !== undefined) {\n    return Curry._2(f, Caml_option.valFromOption(a), b);\n  } else {\n    return b;\n  }\n}\n\nfunction value_default(f, $$default, a) {\n  if (a !== undefined) {\n    return Curry._1(f, Caml_option.valFromOption(a));\n  } else {\n    return $$default;\n  }\n}\n\nvar $$Option = {\n  equal: equal$1,\n  iter: iter,\n  map: map,\n  fold: fold,\n  value_default: value_default\n};\n\nfunction exists2(p, a1, a2) {\n  var n = a1.length;\n  if (a2.length !== n) {\n    Pervasives.invalid_arg(\"Misc.Stdlib.Array.exists2\");\n  }\n  var _i = 0;\n  while(true) {\n    var i = _i;\n    if (i === n) {\n      return false;\n    }\n    if (Curry._2(p, a1[i], a2[i])) {\n      return true;\n    }\n    _i = i + 1 | 0;\n    continue ;\n  };\n}\n\nvar $$Array$1 = {\n  exists2: exists2\n};\n\nvar Stdlib = {\n  List: List$1,\n  $$Option: $$Option,\n  $$Array: $$Array$1\n};\n\nfunction find_in_path(path, name) {\n  if (Curry._1(Filename.is_implicit, name)) {\n    var _x = path;\n    while(true) {\n      var x = _x;\n      if (x) {\n        var fullname = Filename.concat(x.hd, name);\n        if (Caml_external_polyfill.resolve(\"caml_sys_file_exists\")(fullname)) {\n          return fullname;\n        }\n        _x = x.tl;\n        continue ;\n      }\n      throw {\n            RE_EXN_ID: \"Not_found\",\n            Error: new Error()\n          };\n    };\n  }\n  if (Caml_external_polyfill.resolve(\"caml_sys_file_exists\")(name)) {\n    return name;\n  }\n  throw {\n        RE_EXN_ID: \"Not_found\",\n        Error: new Error()\n      };\n}\n\nfunction find_in_path_rel(path, name) {\n  var simplify = function (_s) {\n    while(true) {\n      var s = _s;\n      var base = Curry._1(Filename.basename, s);\n      var dir = Curry._1(Filename.dirname, s);\n      if (dir === s) {\n        return dir;\n      }\n      if (base !== Filename.current_dir_name) {\n        return Filename.concat(simplify(dir), base);\n      }\n      _s = dir;\n      continue ;\n    };\n  };\n  var _x = path;\n  while(true) {\n    var x = _x;\n    if (x) {\n      var fullname = simplify(Filename.concat(x.hd, name));\n      if (Caml_external_polyfill.resolve(\"caml_sys_file_exists\")(fullname)) {\n        return fullname;\n      }\n      _x = x.tl;\n      continue ;\n    }\n    throw {\n          RE_EXN_ID: \"Not_found\",\n          Error: new Error()\n        };\n  };\n}\n\nfunction find_in_path_uncap(path, name) {\n  var uname = $$String.uncapitalize_ascii(name);\n  var _x = path;\n  while(true) {\n    var x = _x;\n    if (x) {\n      var dir = x.hd;\n      var fullname = Filename.concat(dir, name);\n      var ufullname = Filename.concat(dir, uname);\n      if (Caml_external_polyfill.resolve(\"caml_sys_file_exists\")(ufullname)) {\n        return ufullname;\n      }\n      if (Caml_external_polyfill.resolve(\"caml_sys_file_exists\")(fullname)) {\n        return fullname;\n      }\n      _x = x.tl;\n      continue ;\n    }\n    throw {\n          RE_EXN_ID: \"Not_found\",\n          Error: new Error()\n        };\n  };\n}\n\nfunction remove_file(filename) {\n  try {\n    if (Caml_external_polyfill.resolve(\"caml_sys_file_exists\")(filename)) {\n      return Caml_external_polyfill.resolve(\"caml_sys_remove\")(filename);\n    } else {\n      return ;\n    }\n  }\n  catch (raw__msg){\n    var _msg = Caml_js_exceptions.internalToOCamlException(raw__msg);\n    if (_msg.RE_EXN_ID === P.Sys_error) {\n      return ;\n    }\n    throw _msg;\n  }\n}\n\nfunction expand_directory(alt, s) {\n  if (s.length !== 0 && Caml_string.get(s, 0) === /* '+' */43) {\n    return Filename.concat(alt, $$String.sub(s, 1, s.length - 1 | 0));\n  } else {\n    return s;\n  }\n}\n\nfunction create_hashtable(size, init) {\n  var tbl = Hashtbl.create(undefined, size);\n  List.iter((function (param) {\n          return Hashtbl.add(tbl, param[0], param[1]);\n        }), init);\n  return tbl;\n}\n\nfunction copy_file(ic, oc) {\n  var buff = Caml_bytes.caml_create_bytes(4096);\n  var _param;\n  while(true) {\n    var n = Curry._3(P.input(ic), buff, 0, 4096);\n    if (n === 0) {\n      return ;\n    }\n    Curry._3(P.output(oc), buff, 0, n);\n    _param = undefined;\n    continue ;\n  };\n}\n\nfunction copy_file_chunk(ic, oc, len) {\n  var buff = Caml_bytes.caml_create_bytes(4096);\n  var _n = len;\n  while(true) {\n    var n = _n;\n    if (n <= 0) {\n      return ;\n    }\n    var r = Curry._3(P.input(ic), buff, 0, n < 4096 ? n : 4096);\n    if (r === 0) {\n      throw {\n            RE_EXN_ID: \"End_of_file\",\n            Error: new Error()\n          };\n    }\n    Curry._3(P.output(oc), buff, 0, r);\n    _n = n - r | 0;\n    continue ;\n  };\n}\n\nfunction string_of_file(ic) {\n  var b = $$Buffer.create(65536);\n  var buff = Caml_bytes.caml_create_bytes(4096);\n  var _param;\n  while(true) {\n    var n = Curry._3(P.input(ic), buff, 0, 4096);\n    if (n === 0) {\n      return $$Buffer.contents(b);\n    }\n    $$Buffer.add_subbytes(b, buff, 0, n);\n    _param = undefined;\n    continue ;\n  };\n}\n\nfunction output_to_file_via_temporary(modeOpt, filename, fn) {\n  var mode = modeOpt !== undefined ? modeOpt : ({\n        hd: /* Open_text */0,\n        tl: /* [] */0\n      });\n  var match = Curry._4(P.open_temp_file(Curry._1(Filename.basename, filename)), mode, 438, Curry._1(Filename.dirname, filename), \".tmp\");\n  var oc = match[1];\n  var temp_filename = match[0];\n  var res;\n  try {\n    res = Curry._2(fn, temp_filename, oc);\n  }\n  catch (exn){\n    P.close_out(oc);\n    remove_file(temp_filename);\n    throw exn;\n  }\n  P.close_out(oc);\n  try {\n    Caml_external_polyfill.resolve(\"caml_sys_rename\")(temp_filename, filename);\n    return res;\n  }\n  catch (exn$1){\n    remove_file(temp_filename);\n    throw exn$1;\n  }\n}\n\nfunction log2(n) {\n  if (n <= 1) {\n    return 0;\n  } else {\n    return 1 + log2((n >> 1)) | 0;\n  }\n}\n\nfunction align(n, a) {\n  if (n >= 0) {\n    return (n + a | 0) - 1 & (-a | 0);\n  } else {\n    return n & (-a | 0);\n  }\n}\n\nfunction no_overflow_add(a, b) {\n  return (a ^ b | a ^ Pervasives.lnot(a + b | 0)) < 0;\n}\n\nfunction no_overflow_sub(a, b) {\n  return (a ^ Pervasives.lnot(b) | b ^ (a - b | 0)) < 0;\n}\n\nfunction no_overflow_mul(a, b) {\n  if (b !== 0) {\n    return Caml_int32.div(Math.imul(a, b), b) === a;\n  } else {\n    return false;\n  }\n}\n\nfunction no_overflow_lsl(a, k) {\n  if (0 <= k && k < Sys.word_size && (Pervasives.min_int >> k) <= a) {\n    return a <= (Pervasives.max_int >> k);\n  } else {\n    return false;\n  }\n}\n\nfunction cvt_int_aux(str, neg, of_string) {\n  if (str.length === 0 || Caml_string.get(str, 0) === /* '-' */45) {\n    return Curry._1(of_string, str);\n  } else {\n    return Curry._1(neg, Curry._1(of_string, \"-\" + str));\n  }\n}\n\nfunction $$int(s) {\n  return cvt_int_aux(s, (function (prim) {\n                return -prim | 0;\n              }), Caml_format.caml_int_of_string);\n}\n\nfunction int32(s) {\n  return cvt_int_aux(s, (function (prim) {\n                return -prim | 0;\n              }), Caml_format.caml_int32_of_string);\n}\n\nfunction int64(s) {\n  return cvt_int_aux(s, Caml_int64.neg, Caml_format.caml_int64_of_string);\n}\n\nfunction nativeint(s) {\n  return cvt_int_aux(s, (function (prim) {\n                return -prim | 0;\n              }), Caml_format.caml_nativeint_of_string);\n}\n\nvar Int_literal_converter = {\n  cvt_int_aux: cvt_int_aux,\n  $$int: $$int,\n  int32: int32,\n  int64: int64,\n  nativeint: nativeint\n};\n\nfunction chop_extensions(file) {\n  var dirname = Curry._1(Filename.dirname, file);\n  var basename = Curry._1(Filename.basename, file);\n  try {\n    var pos = $$String.index(basename, /* '.' */46);\n    var basename$1 = $$String.sub(basename, 0, pos);\n    if (Curry._1(Filename.is_implicit, file) && dirname === Filename.current_dir_name) {\n      return basename$1;\n    } else {\n      return Filename.concat(dirname, basename$1);\n    }\n  }\n  catch (raw_exn){\n    var exn = Caml_js_exceptions.internalToOCamlException(raw_exn);\n    if (exn.RE_EXN_ID === \"Not_found\") {\n      return file;\n    }\n    throw exn;\n  }\n}\n\nfunction search_substring(pat, str, start) {\n  var _i = start;\n  var _j = 0;\n  while(true) {\n    var j = _j;\n    var i = _i;\n    if (j >= pat.length) {\n      return i;\n    }\n    if ((i + j | 0) >= str.length) {\n      throw {\n            RE_EXN_ID: \"Not_found\",\n            Error: new Error()\n          };\n    }\n    if (Caml_string.get(str, i + j | 0) === Caml_string.get(pat, j)) {\n      _j = j + 1 | 0;\n      continue ;\n    }\n    _j = 0;\n    _i = i + 1 | 0;\n    continue ;\n  };\n}\n\nfunction replace_substring(before, after, str) {\n  var search = function (_acc, _curr) {\n    while(true) {\n      var curr = _curr;\n      var acc = _acc;\n      var next;\n      try {\n        next = search_substring(before, str, curr);\n      }\n      catch (raw_exn){\n        var exn = Caml_js_exceptions.internalToOCamlException(raw_exn);\n        if (exn.RE_EXN_ID === \"Not_found\") {\n          var suffix = $$String.sub(str, curr, str.length - curr | 0);\n          return List.rev({\n                      hd: suffix,\n                      tl: acc\n                    });\n        }\n        throw exn;\n      }\n      var prefix = $$String.sub(str, curr, next - curr | 0);\n      _curr = next + before.length | 0;\n      _acc = {\n        hd: prefix,\n        tl: acc\n      };\n      continue ;\n    };\n  };\n  return $$String.concat(after, search(/* [] */0, 0));\n}\n\nfunction rev_split_words(s) {\n  var split1 = function (res, _i) {\n    while(true) {\n      var i = _i;\n      if (i >= s.length) {\n        return res;\n      }\n      var match = Caml_string.get(s, i);\n      if (match > 13 || match < 9) {\n        if (match !== 32) {\n          return split2(res, i, i + 1 | 0);\n        }\n        _i = i + 1 | 0;\n        continue ;\n      }\n      if (match === 12 || match === 11) {\n        return split2(res, i, i + 1 | 0);\n      }\n      _i = i + 1 | 0;\n      continue ;\n    };\n  };\n  var split2 = function (res, i, _j) {\n    while(true) {\n      var j = _j;\n      if (j >= s.length) {\n        return {\n                hd: $$String.sub(s, i, j - i | 0),\n                tl: res\n              };\n      }\n      var match = Caml_string.get(s, j);\n      if (match > 13 || match < 9) {\n        if (match !== 32) {\n          _j = j + 1 | 0;\n          continue ;\n        }\n        \n      } else if (match === 12 || match === 11) {\n        _j = j + 1 | 0;\n        continue ;\n      }\n      return split1({\n                  hd: $$String.sub(s, i, j - i | 0),\n                  tl: res\n                }, j + 1 | 0);\n    };\n  };\n  return split1(/* [] */0, 0);\n}\n\nfunction get_ref(r) {\n  var v = r.contents;\n  r.contents = /* [] */0;\n  return v;\n}\n\nfunction fst3(param) {\n  return param[0];\n}\n\nfunction snd3(param) {\n  return param[1];\n}\n\nfunction thd3(param) {\n  return param[2];\n}\n\nfunction fst4(param) {\n  return param[0];\n}\n\nfunction snd4(param) {\n  return param[1];\n}\n\nfunction thd4(param) {\n  return param[2];\n}\n\nfunction for4(param) {\n  return param[3];\n}\n\nfunction create(str_size) {\n  var tbl_size = Caml_int32.div(str_size, Sys.max_string_length) + 1 | 0;\n  var tbl = Caml_array.make(tbl_size, Bytes.empty);\n  for(var i = 0 ,i_finish = tbl_size - 2 | 0; i <= i_finish; ++i){\n    Caml_array.set(tbl, i, Caml_bytes.caml_create_bytes(Sys.max_string_length));\n  }\n  Caml_array.set(tbl, tbl_size - 1 | 0, Caml_bytes.caml_create_bytes(Caml_int32.mod_(str_size, Sys.max_string_length)));\n  return tbl;\n}\n\nfunction length(tbl) {\n  var tbl_size = tbl.length;\n  return Math.imul(Sys.max_string_length, tbl_size - 1 | 0) + Caml_array.get(tbl, tbl_size - 1 | 0).length | 0;\n}\n\nfunction get(tbl, ind) {\n  return Caml_bytes.get(Caml_array.get(tbl, Caml_int32.div(ind, Sys.max_string_length)), Caml_int32.mod_(ind, Sys.max_string_length));\n}\n\nfunction set(tbl, ind, c) {\n  return Caml_bytes.set(Caml_array.get(tbl, Caml_int32.div(ind, Sys.max_string_length)), Caml_int32.mod_(ind, Sys.max_string_length), c);\n}\n\nfunction blit(src, srcoff, dst, dstoff, len) {\n  for(var i = 0; i < len; ++i){\n    set(dst, dstoff + i | 0, get(src, srcoff + i | 0));\n  }\n  \n}\n\nfunction output(oc, tbl, pos, len) {\n  for(var i = pos ,i_finish = pos + len | 0; i < i_finish; ++i){\n    Curry._1(P.output_char(oc), get(tbl, i));\n  }\n  \n}\n\nfunction unsafe_blit_to_bytes(src, srcoff, dst, dstoff, len) {\n  for(var i = 0; i < len; ++i){\n    dst[dstoff + i | 0] = get(src, srcoff + i | 0);\n  }\n  \n}\n\nfunction input_bytes(ic, len) {\n  var tbl = create(len);\n  $$Array.iter((function (str) {\n          return Curry._3(P.really_input(ic), str, 0, str.length);\n        }), tbl);\n  return tbl;\n}\n\nvar LongString = {\n  create: create,\n  length: length,\n  get: get,\n  set: set,\n  blit: blit,\n  output: output,\n  unsafe_blit_to_bytes: unsafe_blit_to_bytes,\n  input_bytes: input_bytes\n};\n\nfunction edit_distance(a, b, cutoff) {\n  var la = a.length;\n  var lb = b.length;\n  var cutoff$1 = Caml.caml_int_min(la > lb ? la : lb, cutoff);\n  if (Pervasives.abs(la - lb | 0) > cutoff$1) {\n    return ;\n  }\n  var m = $$Array.make_matrix(la + 1 | 0, lb + 1 | 0, cutoff$1 + 1 | 0);\n  Caml_array.set(Caml_array.get(m, 0), 0, 0);\n  for(var i = 1; i <= la; ++i){\n    Caml_array.set(Caml_array.get(m, i), 0, i);\n  }\n  for(var j = 1; j <= lb; ++j){\n    Caml_array.set(Caml_array.get(m, 0), j, j);\n  }\n  for(var i$1 = 1; i$1 <= la; ++i$1){\n    for(var j$1 = Caml.caml_int_max(1, (i$1 - cutoff$1 | 0) - 1 | 0) ,j_finish = Caml.caml_int_min(lb, (i$1 + cutoff$1 | 0) + 1 | 0); j$1 <= j_finish; ++j$1){\n      var cost = Caml_string.get(a, i$1 - 1 | 0) === Caml_string.get(b, j$1 - 1 | 0) ? 0 : 1;\n      var best = Caml.caml_int_min(1 + Caml.caml_int_min(Caml_array.get(Caml_array.get(m, i$1 - 1 | 0), j$1), Caml_array.get(Caml_array.get(m, i$1), j$1 - 1 | 0)) | 0, Caml_array.get(Caml_array.get(m, i$1 - 1 | 0), j$1 - 1 | 0) + cost | 0);\n      var best$1 = i$1 > 1 && j$1 > 1 && Caml_string.get(a, i$1 - 1 | 0) === Caml_string.get(b, j$1 - 2 | 0) && Caml_string.get(a, i$1 - 2 | 0) === Caml_string.get(b, j$1 - 1 | 0) ? Caml.caml_int_min(best, Caml_array.get(Caml_array.get(m, i$1 - 2 | 0), j$1 - 2 | 0) + cost | 0) : best;\n      Caml_array.set(Caml_array.get(m, i$1), j$1, best$1);\n    }\n  }\n  var result = Caml_array.get(Caml_array.get(m, la), lb);\n  if (result > cutoff$1) {\n    return ;\n  } else {\n    return result;\n  }\n}\n\nfunction spellcheck(env, name) {\n  var match = name.length;\n  var cutoff = match > 4 || match < 1 ? (\n      match === 6 || match === 5 ? 2 : 3\n    ) : (\n      match >= 3 ? 1 : 0\n    );\n  return List.fold_left((function (param, param$1) {\n                  var dist = edit_distance(name, param$1, cutoff);\n                  if (dist === undefined) {\n                    return param;\n                  }\n                  var best_dist = param[1];\n                  if (dist < best_dist) {\n                    return [\n                            {\n                              hd: param$1,\n                              tl: /* [] */0\n                            },\n                            dist\n                          ];\n                  } else if (dist === best_dist) {\n                    return [\n                            {\n                              hd: param$1,\n                              tl: param[0]\n                            },\n                            dist\n                          ];\n                  } else {\n                    return param;\n                  }\n                }), [\n                /* [] */0,\n                Pervasives.max_int\n              ], env)[0];\n}\n\nfunction did_you_mean(ppf, get_choices) {\n  Curry._1(Format.fprintf(ppf), \"@?\");\n  var choices = Curry._1(get_choices, undefined);\n  if (!choices) {\n    return ;\n  }\n  var match = split_last(choices);\n  var rest = match[0];\n  return Curry._4(Format.fprintf(ppf), \"@\\nHint: Did you mean %s%s%s?@?\", $$String.concat(\", \", rest), rest === /* [] */0 ? \"\" : \" or \", match[1]);\n}\n\nfunction cut_at(s, c) {\n  var pos = $$String.index(s, c);\n  return [\n          $$String.sub(s, 0, pos),\n          $$String.sub(s, pos + 1 | 0, (s.length - pos | 0) - 1 | 0)\n        ];\n}\n\nvar compare$1 = Caml_obj.caml_compare;\n\nvar StringSet = $$Set.Make({\n      compare: compare$1\n    });\n\nvar compare$2 = Caml_obj.caml_compare;\n\nvar StringMap = $$Map.Make({\n      compare: compare$2\n    });\n\nfunction ansi_of_color(x) {\n  switch (x) {\n    case /* Black */0 :\n        return \"0\";\n    case /* Red */1 :\n        return \"1\";\n    case /* Green */2 :\n        return \"2\";\n    case /* Yellow */3 :\n        return \"3\";\n    case /* Blue */4 :\n        return \"4\";\n    case /* Magenta */5 :\n        return \"5\";\n    case /* Cyan */6 :\n        return \"6\";\n    case /* White */7 :\n        return \"7\";\n    \n  }\n}\n\nfunction code_of_style(x) {\n  if (typeof x === \"number\") {\n    if (x === /* Bold */0) {\n      return \"1\";\n    } else {\n      return \"0\";\n    }\n  } else if (x.TAG === /* FG */0) {\n    return \"3\" + ansi_of_color(x._0);\n  } else {\n    return \"4\" + ansi_of_color(x._0);\n  }\n}\n\nfunction ansi_of_style_l(l) {\n  var s = l ? (\n      l.tl ? $$String.concat(\";\", List.map(code_of_style, l)) : code_of_style(l.hd)\n    ) : \"0\";\n  return \"\\x11[\" + (s + \"m\");\n}\n\nvar default_styles = {\n  error: {\n    hd: /* Bold */0,\n    tl: {\n      hd: {\n        TAG: /* FG */0,\n        _0: /* Red */1\n      },\n      tl: /* [] */0\n    }\n  },\n  warning: {\n    hd: /* Bold */0,\n    tl: {\n      hd: {\n        TAG: /* FG */0,\n        _0: /* Magenta */5\n      },\n      tl: /* [] */0\n    }\n  },\n  loc: {\n    hd: /* Bold */0,\n    tl: /* [] */0\n  }\n};\n\nvar cur_styles = {\n  contents: default_styles\n};\n\nfunction get_styles(param) {\n  return cur_styles.contents;\n}\n\nfunction set_styles(s) {\n  cur_styles.contents = s;\n  \n}\n\nfunction style_of_tag(s) {\n  switch (s) {\n    case \"error\" :\n        return cur_styles.contents.error;\n    case \"loc\" :\n        return cur_styles.contents.loc;\n    case \"warning\" :\n        return cur_styles.contents.warning;\n    default:\n      throw {\n            RE_EXN_ID: \"Not_found\",\n            Error: new Error()\n          };\n  }\n}\n\nvar color_enabled = {\n  contents: true\n};\n\nfunction mark_open_tag(or_else, s) {\n  try {\n    var style = style_of_tag(s);\n    if (color_enabled.contents) {\n      return ansi_of_style_l(style);\n    } else {\n      return \"\";\n    }\n  }\n  catch (raw_exn){\n    var exn = Caml_js_exceptions.internalToOCamlException(raw_exn);\n    if (exn.RE_EXN_ID === \"Not_found\") {\n      return Curry._1(or_else, s);\n    }\n    throw exn;\n  }\n}\n\nfunction mark_close_tag(or_else, s) {\n  try {\n    style_of_tag(s);\n    if (color_enabled.contents) {\n      return ansi_of_style_l({\n                  hd: /* Reset */1,\n                  tl: /* [] */0\n                });\n    } else {\n      return \"\";\n    }\n  }\n  catch (raw_exn){\n    var exn = Caml_js_exceptions.internalToOCamlException(raw_exn);\n    if (exn.RE_EXN_ID === \"Not_found\") {\n      return Curry._1(or_else, s);\n    }\n    throw exn;\n  }\n}\n\nfunction set_color_tag_handling(ppf) {\n  throw {\n        RE_EXN_ID: \"Assert_failure\",\n        _1: [\n          \"misc.res\",\n          866,\n          4\n        ],\n        Error: new Error()\n      };\n}\n\nfunction should_enable_color(param) {\n  var term;\n  try {\n    term = Caml_sys.caml_sys_getenv(\"TERM\");\n  }\n  catch (raw_exn){\n    var exn = Caml_js_exceptions.internalToOCamlException(raw_exn);\n    if (exn.RE_EXN_ID === \"Not_found\") {\n      term = \"\";\n    } else {\n      throw exn;\n    }\n  }\n  if (term !== \"dumb\" && term !== \"\") {\n    return Caml_external_polyfill.resolve(\"caml_sys_isatty\")(P.stderr);\n  } else {\n    return false;\n  }\n}\n\nvar first = {\n  contents: true\n};\n\nvar formatter_l_1 = {\n  hd: Format.err_formatter,\n  tl: {\n    hd: Format.str_formatter,\n    tl: /* [] */0\n  }\n};\n\nvar formatter_l = {\n  hd: Format.std_formatter,\n  tl: formatter_l_1\n};\n\nfunction setup(o) {\n  if (first.contents) {\n    first.contents = false;\n    Format.set_mark_tags(true);\n    List.iter(set_color_tag_handling, formatter_l);\n    var tmp;\n    if (o !== undefined) {\n      switch (o) {\n        case /* Auto */0 :\n            tmp = should_enable_color(undefined);\n            break;\n        case /* Always */1 :\n            tmp = true;\n            break;\n        case /* Never */2 :\n            tmp = false;\n            break;\n        \n      }\n    } else {\n      tmp = should_enable_color(undefined);\n    }\n    color_enabled.contents = tmp;\n  }\n  \n}\n\nvar Color = {\n  ansi_of_color: ansi_of_color,\n  code_of_style: code_of_style,\n  ansi_of_style_l: ansi_of_style_l,\n  default_styles: default_styles,\n  cur_styles: cur_styles,\n  get_styles: get_styles,\n  set_styles: set_styles,\n  style_of_tag: style_of_tag,\n  color_enabled: color_enabled,\n  mark_open_tag: mark_open_tag,\n  mark_close_tag: mark_close_tag,\n  set_color_tag_handling: set_color_tag_handling,\n  should_enable_color: should_enable_color,\n  setup: setup\n};\n\nfunction normalise_eol(s) {\n  var b = $$Buffer.create(80);\n  for(var i = 0 ,i_finish = s.length; i < i_finish; ++i){\n    if (Caml_string.get(s, i) !== /* '\\r' */13) {\n      $$Buffer.add_char(b, Caml_string.get(s, i));\n    }\n    \n  }\n  return $$Buffer.contents(b);\n}\n\nfunction delete_eol_spaces(src) {\n  var len_src = src.length;\n  var dst = Caml_bytes.caml_create_bytes(len_src);\n  var loop = function (_i_src, _i_dst) {\n    while(true) {\n      var i_dst = _i_dst;\n      var i_src = _i_src;\n      if (i_src === len_src) {\n        return i_dst;\n      }\n      var c = Caml_string.get(src, i_src);\n      if (c === 9) {\n        return loop_spaces(1, i_src + 1 | 0, i_dst);\n      }\n      if (c === 32) {\n        return loop_spaces(1, i_src + 1 | 0, i_dst);\n      }\n      Caml_bytes.set(dst, i_dst, c);\n      _i_dst = i_dst + 1 | 0;\n      _i_src = i_src + 1 | 0;\n      continue ;\n    };\n  };\n  var loop_spaces = function (_spaces, _i_src, i_dst) {\n    while(true) {\n      var i_src = _i_src;\n      var spaces = _spaces;\n      if (i_src === len_src) {\n        return i_dst;\n      }\n      var match = Caml_string.get(src, i_src);\n      if (match === 10 || match === 9) {\n        if (match >= 10) {\n          Caml_bytes.set(dst, i_dst, /* '\\n' */10);\n          return loop(i_src + 1 | 0, i_dst + 1 | 0);\n        }\n        \n      } else if (match !== 32) {\n        for(var n = 0; n <= spaces; ++n){\n          Caml_bytes.set(dst, i_dst + n | 0, Caml_string.get(src, (i_src - spaces | 0) + n | 0));\n        }\n        return loop(i_src + 1 | 0, (i_dst + spaces | 0) + 1 | 0);\n      }\n      _i_src = i_src + 1 | 0;\n      _spaces = spaces + 1 | 0;\n      continue ;\n    };\n  };\n  var stop = loop(0, 0);\n  return Bytes.sub_string(dst, 0, stop);\n}\n\nvar HookExnWrapper = /* @__PURE__ */Caml_exceptions.create(\"Misc.HookExnWrapper\");\n\nvar HookExn = /* @__PURE__ */Caml_exceptions.create(\"Misc.HookExn\");\n\nfunction raise_direct_hook_exn(e) {\n  throw {\n        RE_EXN_ID: HookExn,\n        _1: e,\n        Error: new Error()\n      };\n}\n\nfunction fold_hooks(list, hook_info, ast) {\n  return List.fold_left((function (ast, param) {\n                try {\n                  return Curry._2(param[1], hook_info, ast);\n                }\n                catch (raw_e){\n                  var e = Caml_js_exceptions.internalToOCamlException(raw_e);\n                  if (e.RE_EXN_ID === HookExn) {\n                    throw e._1;\n                  }\n                  throw {\n                        RE_EXN_ID: HookExnWrapper,\n                        error: e,\n                        hook_name: param[0],\n                        hook_info: hook_info,\n                        Error: new Error()\n                      };\n                }\n              }), ast, List.sort(Caml_obj.caml_compare, list));\n}\n\nfunction MakeHooks(M) {\n  var hooks = {\n    contents: /* [] */0\n  };\n  var add_hook = function (name, f) {\n    hooks.contents = {\n      hd: [\n        name,\n        f\n      ],\n      tl: hooks.contents\n    };\n    \n  };\n  var apply_hooks = function (sourcefile, intf) {\n    return fold_hooks(hooks.contents, sourcefile, intf);\n  };\n  return {\n          add_hook: add_hook,\n          apply_hooks: apply_hooks\n        };\n}\n\nvar may = iter;\n\nvar may_map = map;\n\nexport {\n  Fatal_error ,\n  fatal_error ,\n  fatal_errorf ,\n  try_finally ,\n  protect_refs ,\n  map_end ,\n  map_left_right ,\n  for_all2 ,\n  replicate_list ,\n  list_remove ,\n  split_last ,\n  Stdlib ,\n  may ,\n  may_map ,\n  find_in_path ,\n  find_in_path_rel ,\n  find_in_path_uncap ,\n  remove_file ,\n  expand_directory ,\n  create_hashtable ,\n  copy_file ,\n  copy_file_chunk ,\n  string_of_file ,\n  output_to_file_via_temporary ,\n  log2 ,\n  align ,\n  no_overflow_add ,\n  no_overflow_sub ,\n  no_overflow_mul ,\n  no_overflow_lsl ,\n  Int_literal_converter ,\n  chop_extensions ,\n  search_substring ,\n  replace_substring ,\n  rev_split_words ,\n  get_ref ,\n  fst3 ,\n  snd3 ,\n  thd3 ,\n  fst4 ,\n  snd4 ,\n  thd4 ,\n  for4 ,\n  LongString ,\n  edit_distance ,\n  spellcheck ,\n  did_you_mean ,\n  cut_at ,\n  StringSet ,\n  StringMap ,\n  Color ,\n  normalise_eol ,\n  delete_eol_spaces ,\n  HookExnWrapper ,\n  HookExn ,\n  raise_direct_hook_exn ,\n  fold_hooks ,\n  MakeHooks ,\n  \n}\n/* StringSet Not a pure module */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/misc.res",
    "content": "/* ************************************************************************ */\n/*  */\n/* OCaml */\n/*  */\n/* Xavier Leroy, projet Cristal, INRIA Rocquencourt */\n/*  */\n/* Copyright 1996 Institut National de Recherche en Informatique et */\n/* en Automatique. */\n/*  */\n/* All rights reserved.  This file is distributed under the terms of */\n/* the GNU Lesser General Public License version 2.1, with the */\n/* special exception on linking described in the file LICENSE. */\n/*  */\n/* ************************************************************************ */\n\n/* Errors */\n\nopen P\n\nexception Fatal_error\n\n@raises(Fatal_error)\nlet fatal_error = msg => {\n  print_string(\">> Fatal error: \")\n  prerr_endline(msg)\n  raise(Fatal_error)\n}\n\n@raises(Fatal_error)\nlet fatal_errorf = fmt => Format.kasprintf(fatal_error, fmt)\n\n/* Exceptions */\n\n@raises(genericException)\nlet try_finally = (work, cleanup) => {\n  let result = try work() catch {\n  | e =>\n    cleanup()\n    raise(e)\n  }\n  cleanup()\n  result\n}\n\ntype rec ref_and_value = R(ref<'a>, 'a): ref_and_value\n\n@raises(genericException)\nlet protect_refs = {\n  let set_refs = l => List.iter((R(r, v)) => r := v, l)\n  (refs, f) => {\n    let backup = List.map((R(r, _)) => R(r, r.contents), refs)\n    set_refs(refs)\n    switch f() {\n    | x =>\n      set_refs(backup)\n      x\n    | exception e =>\n      set_refs(backup)\n      raise(e)\n    }\n  }\n}\n\n/* List functions */\n\nlet rec map_end = (f, l1, l2) =>\n  switch l1 {\n  | list{} => l2\n  | list{hd, ...tl} => list{f(hd), ...map_end(f, tl, l2)}\n  }\n\nlet rec map_left_right = (f, x) =>\n  switch x {\n  | list{} => list{}\n  | list{hd, ...tl} =>\n    let res = f(hd)\n    list{res, ...map_left_right(f, tl)}\n  }\n\nlet rec for_all2 = (pred, l1, l2) =>\n  switch (l1, l2) {\n  | (list{}, list{}) => true\n  | (list{hd1, ...tl1}, list{hd2, ...tl2}) => pred(hd1, hd2) && for_all2(pred, tl1, tl2)\n  | (_, _) => false\n  }\n\nlet rec replicate_list = (elem, n) =>\n  if n <= 0 {\n    list{}\n  } else {\n    list{elem, ...replicate_list(elem, n - 1)}\n  }\n\nlet rec list_remove = (x, y) => assert false\n\nlet rec split_last = x =>\n  switch x {\n  | list{} => assert false\n  | list{x} => (list{}, x)\n  | list{hd, ...tl} =>\n    let (lst, last) = split_last(tl)\n    (list{hd, ...lst}, last)\n  }\n\nmodule Stdlib = {\n  module List = {\n    type t<'a> = list<'a>\n\n    let rec compare = (cmp, l1, l2) =>\n      switch (l1, l2) {\n      | (list{}, list{}) => 0\n      | (list{}, list{_, ..._}) => -1\n      | (list{_, ..._}, list{}) => 1\n      | (list{h1, ...t1}, list{h2, ...t2}) =>\n        let c = cmp(h1, h2)\n        if c != 0 {\n          c\n        } else {\n          compare(cmp, t1, t2)\n        }\n      }\n\n    let rec equal = (eq, l1, l2) =>\n      switch (l1, l2) {\n      | (list{}, list{}) => true\n      | (list{hd1, ...tl1}, list{hd2, ...tl2}) => eq(hd1, hd2) && equal(eq, tl1, tl2)\n      | (_, _) => false\n      }\n\n    @raises(Invalid_argument)\n    let filter_map = (f, l) => {\n      @raises(Invalid_argument)\n      let rec aux = (acc, l) =>\n        switch l {\n        | list{} => List.rev(acc)\n        | list{h, ...t} =>\n          switch f(h) {\n          | None => aux(acc, t)\n          | Some(v) => aux(list{v, ...acc}, t)\n          }\n        }\n\n      aux(list{}, l)\n    }\n\n    @raises(Invalid_argument)\n    let map2_prefix = (f, l1, l2) => {\n      @raises(Invalid_argument)\n      let rec aux = (acc, l1, l2) =>\n        switch (l1, l2) {\n        | (list{}, _) => (List.rev(acc), l2)\n        | (list{_, ..._}, list{}) => raise(Invalid_argument(\"map2_prefix\"))\n        | (list{h1, ...t1}, list{h2, ...t2}) =>\n          let h = f(h1, h2)\n          aux(list{h, ...acc}, t1, t2)\n        }\n\n      aux(list{}, l1, l2)\n    }\n\n    @raises(Invalid_argument)\n    let some_if_all_elements_are_some = l => {\n      @raises(Invalid_argument)\n      let rec aux = (acc, l) =>\n        switch l {\n        | list{} => Some(List.rev(acc))\n        | list{None, ..._} => None\n        | list{Some(h), ...t} => aux(list{h, ...acc}, t)\n        }\n\n      aux(list{}, l)\n    }\n\n    @raises(Invalid_argument)\n    let split_at = (n, l) => {\n      @raises(Invalid_argument)\n      let rec aux = (n, acc, l) =>\n        if n == 0 {\n          (List.rev(acc), l)\n        } else {\n          switch l {\n          | list{} => raise(Invalid_argument(\"split_at\"))\n          | list{t, ...q} => aux(n - 1, list{t, ...acc}, q)\n          }\n        }\n\n      aux(n, list{}, l)\n    }\n  }\n\n  module Option = {\n    type t<'a> = option<'a>\n\n    let equal = (eq, o1, o2) =>\n      switch (o1, o2) {\n      | (None, None) => true\n      | (Some(e1), Some(e2)) => eq(e1, e2)\n      | (_, _) => false\n      }\n\n    let iter = (f, x) =>\n      switch x {\n      | Some(x) => f(x)\n      | None => ()\n      }\n\n    let map = (f, x) =>\n      switch x {\n      | Some(x) => Some(f(x))\n      | None => None\n      }\n\n    let fold = (f, a, b) =>\n      switch a {\n      | None => b\n      | Some(a) => f(a, b)\n      }\n\n    let value_default = (f, ~default, a) =>\n      switch a {\n      | None => default\n      | Some(a) => f(a)\n      }\n  }\n\n  module Array = {\n    @raises(Invalid_argument)\n    let exists2 = (p, a1, a2) => {\n      let n = Array.length(a1)\n      if Array.length(a2) != n {\n        invalid_arg(\"Misc.Stdlib.Array.exists2\")\n      }\n      let rec loop = i =>\n        if i == n {\n          false\n        } else if p(Array.unsafe_get(a1, i), Array.unsafe_get(a2, i)) {\n          true\n        } else {\n          loop(succ(i))\n        }\n      loop(0)\n    }\n  }\n}\n\nlet may = Stdlib.Option.iter\nlet may_map = Stdlib.Option.map\n\n/* File functions */\n\n@raises(Not_found)\nlet find_in_path = (path, name) =>\n  if !Filename.is_implicit(name) {\n    if Sys.file_exists(name) {\n      name\n    } else {\n      raise(Not_found)\n    }\n  } else {\n    @raises(Not_found)\n    let rec try_dir = x =>\n      switch x {\n      | list{} => raise(Not_found)\n      | list{dir, ...rem} =>\n        let fullname = Filename.concat(dir, name)\n        if Sys.file_exists(fullname) {\n          fullname\n        } else {\n          try_dir(rem)\n        }\n      }\n    try_dir(path)\n  }\n\n@raises(Not_found)\nlet find_in_path_rel = (path, name) => {\n  let rec simplify = s => {\n    open Filename\n    let base = basename(s)\n    let dir = dirname(s)\n    if dir == s {\n      dir\n    } else if base == current_dir_name {\n      simplify(dir)\n    } else {\n      concat(simplify(dir), base)\n    }\n  }\n\n  @raises(Not_found)\n  let rec try_dir = x =>\n    switch x {\n    | list{} => raise(Not_found)\n    | list{dir, ...rem} =>\n      let fullname = simplify(Filename.concat(dir, name))\n      if Sys.file_exists(fullname) {\n        fullname\n      } else {\n        try_dir(rem)\n      }\n    }\n  try_dir(path)\n}\n\n@raises(Not_found)\nlet find_in_path_uncap = (path, name) => {\n  let uname = String.uncapitalize_ascii(name)\n\n  @raises(Not_found)\n  let rec try_dir = x =>\n    switch x {\n    | list{} => raise(Not_found)\n    | list{dir, ...rem} =>\n      let fullname = Filename.concat(dir, name)\n      and ufullname = Filename.concat(dir, uname)\n      if Sys.file_exists(ufullname) {\n        ufullname\n      } else if Sys.file_exists(fullname) {\n        fullname\n      } else {\n        try_dir(rem)\n      }\n    }\n  try_dir(path)\n}\n\n\nlet remove_file = filename =>\n  try if Sys.file_exists(filename) {\n    Sys.remove(filename)\n  } catch {\n  | Sys_error(_msg) => ()\n  }\n\n/* Expand a -I option: if it starts with +, make it relative to the standard\n library directory */\n\n@raises(Invalid_argument)\nlet expand_directory = (alt, s) =>\n  if String.length(s) > 0 && String.get(s, 0) == '+' {\n    Filename.concat(alt, String.sub(s, 1, String.length(s) - 1))\n  } else {\n    s\n  }\n\n/* Hashtable functions */\n\nlet create_hashtable = (size, init) => {\n  let tbl = Hashtbl.create(size)\n  List.iter(((key, data)) => Hashtbl.add(tbl, key, data), init)\n  tbl\n}\n\n/* File copy */\n\n@raises(Invalid_argument)\nlet copy_file = (ic, oc) => {\n  let buff = Bytes.create(0x1000)\n\n  @raises(Invalid_argument)\n  let rec copy = () => {\n    let n = input(ic, buff, 0, 0x1000)\n    if n == 0 {\n      ()\n    } else {\n      output(oc, buff, 0, n)\n      copy()\n    }\n  }\n  copy()\n}\n\n@raises(Invalid_argument)\nlet copy_file_chunk = (ic, oc, len) => {\n  let buff = Bytes.create(0x1000)\n\n  @raises([End_of_file, Invalid_argument])\n  let rec copy = n =>\n    if n <= 0 {\n      ()\n    } else {\n      let r = input(ic, buff, 0, min(n, 0x1000))\n      if r == 0 {\n        raise(End_of_file)\n      } else {\n        output(oc, buff, 0, r)\n        copy(n - r)\n      }\n    }\n  copy(len)\n}\n\n@raises(Invalid_argument)\nlet string_of_file = ic => {\n  let b = Buffer.create(0x10000)\n  let buff = Bytes.create(0x1000)\n\n  @raises(Invalid_argument)\n  let rec copy = () => {\n    let n = input(ic, buff, 0, 0x1000)\n    if n == 0 {\n      Buffer.contents(b)\n    } else {\n      Buffer.add_subbytes(b, buff, 0, n)\n      copy()\n    }\n  }\n  copy()\n}\n\n@raises([Sys_error, genericException])\nlet output_to_file_via_temporary = (~mode=list{Open_text}, filename, fn) => {\n  let (temp_filename, oc) = open_temp_file(\n    ~mode,\n    ~perms=0o666,\n    ~temp_dir=Filename.dirname(filename),\n    Filename.basename(filename),\n    \".tmp\",\n  )\n  /* The 0o666 permissions will be modified by the umask.  It's just\n       like what [open_out] and [open_out_bin] do.\n       With temp_dir = dirname filename, we ensure that the returned\n       temp file is in the same directory as filename itself, making\n       it safe to rename temp_filename to filename later.\n       With prefix = basename filename, we are almost certain that\n       the first generated name will be unique.  A fixed prefix\n       would work too but might generate more collisions if many\n       files are being produced simultaneously in the same directory. */\n  switch fn(temp_filename, oc) {\n  | res =>\n    close_out(oc)\n    try {\n      Sys.rename(temp_filename, filename)\n      res\n    } catch {\n    | exn =>\n      remove_file(temp_filename)\n      raise(exn)\n    }\n  | exception exn =>\n    close_out(oc)\n    remove_file(temp_filename)\n    raise(exn)\n  }\n}\n\n/* Integer operations */\n\nlet rec log2 = n =>\n  if n <= 1 {\n    0\n  } else {\n    1 + log2(asr(n, 1))\n  }\n\nlet align = (n, a) =>\n  if n >= 0 {\n    land(n + a - 1, -a)\n  } else {\n    land(n, -a)\n  }\n\nlet no_overflow_add = (a, b) => lor(lxor(a, b), lxor(a, lnot(a + b))) < 0\n\nlet no_overflow_sub = (a, b) => lor(lxor(a, lnot(b)), lxor(b, a - b)) < 0\n\n@raises(Division_by_zero)\nlet no_overflow_mul = (a, b) => b != 0 && a * b / b == a\n\nlet no_overflow_lsl = (a, k) =>\n  0 <= k && (k < Sys.word_size && (asr(min_int, k) <= a && a <= asr(max_int, k)))\n\nmodule Int_literal_converter = {\n  /* To convert integer literals, allowing max_int + 1 (PR#4210) */\n  @raises(Invalid_argument)\n  let cvt_int_aux = (str, neg, of_string) =>\n    if String.length(str) == 0 || String.get(str, 0) == '-' {\n      of_string(str)\n    } else {\n      neg(of_string(\"-\" ++ str))\n    }\n  @raises([Failure, Invalid_argument])\n  let int = s => cvt_int_aux(s, \\\"~-\", int_of_string)\n  @raises(Invalid_argument)\n  let int32 = s => cvt_int_aux(s, Int32.neg, Int32.of_string)\n  @raises(Invalid_argument)\n  let int64 = s => cvt_int_aux(s, Int64.neg, Int64.of_string)\n  @raises(Invalid_argument)\n  let nativeint = s => cvt_int_aux(s, Nativeint.neg, Nativeint.of_string)\n}\n\n/* String operations */\n\n@raises(Invalid_argument)\nlet chop_extensions = file => {\n  let dirname = Filename.dirname(file) and basename = Filename.basename(file)\n  try {\n    let pos = String.index(basename, '.')\n    let basename = String.sub(basename, 0, pos)\n    if Filename.is_implicit(file) && dirname == Filename.current_dir_name {\n      basename\n    } else {\n      Filename.concat(dirname, basename)\n    }\n  } catch {\n  | Not_found => file\n  }\n}\n\n@raises(Invalid_argument)\nlet search_substring = (pat, str, start) => {\n  @raises([Invalid_argument, Not_found])\n  let rec search = (i, j) =>\n    if j >= String.length(pat) {\n      i\n    } else if i + j >= String.length(str) {\n      raise(Not_found)\n    } else if String.get(str, i + j) == String.get(pat, j) {\n      search(i, j + 1)\n    } else {\n      search(i + 1, 0)\n    }\n  search(start, 0)\n}\n\n@raises(Invalid_argument)\nlet replace_substring = (~before, ~after, str) => {\n  @raises(Invalid_argument)\n  let rec search = (acc, curr) =>\n    switch search_substring(before, str, curr) {\n    | next =>\n      let prefix = String.sub(str, curr, next - curr)\n      search(list{prefix, ...acc}, next + String.length(before))\n    | exception Not_found =>\n      let suffix = String.sub(str, curr, String.length(str) - curr)\n      List.rev(list{suffix, ...acc})\n    }\n  String.concat(after, search(list{}, 0))\n}\n\n@raises(Invalid_argument)\nlet rev_split_words = s => {\n  @raises(Invalid_argument)\n  let rec split1 = (res, i) =>\n    if i >= String.length(s) {\n      res\n    } else {\n      switch String.get(s, i) {\n      | ' ' | '\\t' | '\\r' | '\\n' => split1(res, i + 1)\n      | _ => split2(res, i, i + 1)\n      }\n    }\n  @raises(Invalid_argument)\n  and split2 = (res, i, j) =>\n    if j >= String.length(s) {\n      list{String.sub(s, i, j - i), ...res}\n    } else {\n      switch String.get(s, j) {\n      | ' ' | '\\t' | '\\r' | '\\n' => split1(list{String.sub(s, i, j - i), ...res}, j + 1)\n      | _ => split2(res, i, j + 1)\n      }\n    }\n  split1(list{}, 0)\n}\n\nlet get_ref = r => {\n  let v = r.contents\n  r := list{}\n  v\n}\n\nlet fst3 = ((x, _, _)) => x\nlet snd3 = ((_, x, _)) => x\nlet thd3 = ((_, _, x)) => x\n\nlet fst4 = ((x, _, _, _)) => x\nlet snd4 = ((_, x, _, _)) => x\nlet thd4 = ((_, _, x, _)) => x\nlet for4 = ((_, _, _, x)) => x\n\nmodule LongString = {\n  type t = array<bytes>\n\n  @raises([Division_by_zero, Invalid_argument])\n  let create = str_size => {\n    let tbl_size = str_size / Sys.max_string_length + 1\n    let tbl = Array.make(tbl_size, Bytes.empty)\n    for i in 0 to tbl_size - 2 {\n      tbl[i] = Bytes.create(Sys.max_string_length)\n    }\n    tbl[tbl_size - 1] = Bytes.create(mod(str_size, Sys.max_string_length))\n    tbl\n  }\n\n  @raises(Invalid_argument)\n  let length = tbl => {\n    let tbl_size = Array.length(tbl)\n    Sys.max_string_length * (tbl_size - 1) + Bytes.length(tbl[tbl_size - 1])\n  }\n\n  @raises([Division_by_zero, Invalid_argument])\n  let get = (tbl, ind) =>\n    Bytes.get(tbl[ind / Sys.max_string_length], mod(ind, Sys.max_string_length))\n\n  @raises([Division_by_zero, Invalid_argument])\n  let set = (tbl, ind, c) =>\n    Bytes.set(tbl[ind / Sys.max_string_length], mod(ind, Sys.max_string_length), c)\n\n  @raises([Division_by_zero, Invalid_argument])\n  let blit = (src, srcoff, dst, dstoff, len) =>\n    for i in 0 to len - 1 {\n      set(dst, dstoff + i, get(src, srcoff + i))\n    }\n\n  @raises([Division_by_zero, Invalid_argument])\n  let output = (oc, tbl, pos, len) =>\n    for i in pos to pos + len - 1 {\n      output_char(oc, get(tbl, i))\n    }\n\n  @raises([Division_by_zero, Invalid_argument])\n  let unsafe_blit_to_bytes = (src, srcoff, dst, dstoff, len) =>\n    for i in 0 to len - 1 {\n      Bytes.unsafe_set(dst, dstoff + i, get(src, srcoff + i))\n    }\n\n  @raises([Division_by_zero, End_of_file, Invalid_argument])\n  let input_bytes = (ic, len) => {\n    let tbl = create(len)\n    Array.iter(str => really_input(ic, str, 0, Bytes.length(str)), tbl)\n    tbl\n  }\n}\n\n@raises(Invalid_argument)\nlet edit_distance = (a, b, cutoff) => {\n  let (la, lb) = (String.length(a), String.length(b))\n  let cutoff = /* using max_int for cutoff would cause overflows in (i + cutoff + 1);\n   we bring it back to the (max la lb) worstcase */\n  min(max(la, lb), cutoff)\n  if abs(la - lb) > cutoff {\n    None\n  } else {\n    /* initialize with 'cutoff + 1' so that not-yet-written-to cases have\n       the worst possible cost; this is useful when computing the cost of\n       a case just at the boundary of the cutoff diagonal. */\n    let m = Array.make_matrix(la + 1, lb + 1, cutoff + 1)\n    m[0][0] = 0\n    for i in 1 to la {\n      m[i][0] = i\n    }\n    for j in 1 to lb {\n      m[0][j] = j\n    }\n    for i in 1 to la {\n      for j in max(1, i - cutoff - 1) to min(lb, i + cutoff + 1) {\n        let cost = if String.get(a, i - 1) == String.get(b, j - 1) {\n          0\n        } else {\n          1\n        }\n        let best = /* insert, delete or substitute */\n        min(1 + min(m[i - 1][j], m[i][j - 1]), m[i - 1][j - 1] + cost)\n\n        let best = /* swap two adjacent letters; we use \"cost\" again in case of\n             a swap between two identical letters; this is slightly\n             redundant as this is a double-substitution case, but it\n             was done this way in most online implementations and\n             imitation has its virtues */\n        if (\n          !(\n            i > 1 &&\n              (j > 1 &&\n              (String.get(a, i - 1) == String.get(b, j - 2) &&\n                String.get(a, i - 2) == String.get(b, j - 1)))\n          )\n        ) {\n          best\n        } else {\n          min(best, m[i - 2][j - 2] + cost)\n        }\n\n        m[i][j] = best\n      }\n    }\n    let result = m[la][lb]\n    if result > cutoff {\n      None\n    } else {\n      Some(result)\n    }\n  }\n}\n\n@raises(Invalid_argument)\nlet spellcheck = (env, name) => {\n  let cutoff = switch String.length(name) {\n  | 1 | 2 => 0\n  | 3 | 4 => 1\n  | 5 | 6 => 2\n  | _ => 3\n  }\n\n  @raises(Invalid_argument)\n  let compare = (target, acc, head) =>\n    switch edit_distance(target, head, cutoff) {\n    | None => acc\n    | Some(dist) =>\n      let (best_choice, best_dist) = acc\n      if dist < best_dist {\n        (list{head}, dist)\n      } else if dist == best_dist {\n        (list{head, ...best_choice}, dist)\n      } else {\n        acc\n      }\n    }\n\n  fst(List.fold_left(compare(name), (list{}, max_int), env))\n}\n\nlet did_you_mean = (ppf, get_choices) => {\n  /* flush now to get the error report early, in the (unheard of) case\n     where the search in the get_choices function would take a bit of\n     time; in the worst case, the user has seen the error, she can\n     interrupt the process before the spell-checking terminates. */\n  Format.fprintf(ppf, \"@?\")\n  switch get_choices() {\n  | list{} => ()\n  | choices =>\n    let (rest, last) = split_last(choices)\n    Format.fprintf(\n      ppf,\n      \"@\\nHint: Did you mean %s%s%s?@?\",\n      String.concat(\", \", rest),\n      if rest == list{} {\n        \"\"\n      } else {\n        \" or \"\n      },\n      last,\n    )\n  }\n}\n\n@raises([Invalid_argument, Not_found])\nlet cut_at = (s, c) => {\n  let pos = String.index(s, c)\n  (String.sub(s, 0, pos), String.sub(s, pos + 1, String.length(s) - pos - 1))\n}\n\nmodule StringSet = Set.Make({\n  type t = string\n  let compare = compare\n})\nmodule StringMap = Map.Make({\n  type t = string\n  let compare = compare\n})\n\n/* Color handling */\nmodule Color = {\n  /* use ANSI color codes, see https://en.wikipedia.org/wiki/ANSI_escape_code */\n  type color =\n    | Black\n    | Red\n    | Green\n    | Yellow\n    | Blue\n    | Magenta\n    | Cyan\n    | White\n\n  type style =\n    | FG(color) /* foreground */\n    | BG(color) /* background */\n    | Bold\n    | Reset\n\n  let ansi_of_color = x =>\n    switch x {\n    | Black => \"0\"\n    | Red => \"1\"\n    | Green => \"2\"\n    | Yellow => \"3\"\n    | Blue => \"4\"\n    | Magenta => \"5\"\n    | Cyan => \"6\"\n    | White => \"7\"\n    }\n\n  let code_of_style = x =>\n    switch x {\n    | FG(c) => \"3\" ++ ansi_of_color(c)\n    | BG(c) => \"4\" ++ ansi_of_color(c)\n    | Bold => \"1\"\n    | Reset => \"0\"\n    }\n\n  let ansi_of_style_l = l => {\n    let s = switch l {\n    | list{} => code_of_style(Reset)\n    | list{s} => code_of_style(s)\n    | _ => String.concat(\";\", List.map(code_of_style, l))\n    }\n\n    \"\\x1b[\" ++ (s ++ \"m\")\n  }\n\n  type styles = {\n    error: list<style>,\n    warning: list<style>,\n    loc: list<style>,\n  }\n\n  let default_styles = {\n    warning: list{Bold, FG(Magenta)},\n    error: list{Bold, FG(Red)},\n    loc: list{Bold},\n  }\n\n  let cur_styles = ref(default_styles)\n  let get_styles = () => cur_styles.contents\n  let set_styles = s => cur_styles := s\n\n  /* map a tag to a style, if the tag is known.\n   @raise Not_found otherwise */\n  @raises(Not_found)\n  let style_of_tag = s =>\n    switch s {\n    | \"error\" => cur_styles.contents.error\n    | \"warning\" => cur_styles.contents.warning\n    | \"loc\" => cur_styles.contents.loc\n    | _ => raise(Not_found)\n    }\n\n  let color_enabled = ref(true)\n\n  /* either prints the tag of [s] or delegates to [or_else] */\n  let mark_open_tag = (~or_else, s) =>\n    try {\n      let style = style_of_tag(s)\n      if color_enabled.contents {\n        ansi_of_style_l(style)\n      } else {\n        \"\"\n      }\n    } catch {\n    | Not_found => or_else(s)\n    }\n\n  let mark_close_tag = (~or_else, s) =>\n    try {\n      let _ = style_of_tag(s)\n      if color_enabled.contents {\n        ansi_of_style_l(list{Reset})\n      } else {\n        \"\"\n      }\n    } catch {\n    | Not_found => or_else(s)\n    }\n\n  /* add color handling to formatter [ppf] */\n  let set_color_tag_handling = ppf => {\n    assert false\n  }\n\n  external isatty: out_channel => bool = \"caml_sys_isatty\"\n\n  /* reasonable heuristic on whether colors should be enabled */\n  let should_enable_color = () => {\n    let term = try Sys.getenv(\"TERM\") catch {\n    | Not_found => \"\"\n    }\n    term != \"dumb\" && (term != \"\" && isatty(stderr))\n  }\n\n  type setting = Auto | Always | Never\n\n  let setup = {\n    let first = ref(true) /* initialize only once */\n    let formatter_l = list{Format.std_formatter, Format.err_formatter, Format.str_formatter}\n\n    o => {\n      if first.contents {\n        first := false\n        Format.set_mark_tags(true)\n        List.iter(set_color_tag_handling, formatter_l)\n        color_enabled :=\n          switch o {\n          | Some(Always) => true\n          | Some(Auto) => should_enable_color()\n          | Some(Never) => false\n          | None => should_enable_color()\n          }\n      }\n      ()\n    }\n  }\n}\n\n@raises(Invalid_argument)\nlet normalise_eol = s => {\n  let b = Buffer.create(80)\n  for i in 0 to String.length(s) - 1 {\n    if String.get(s, i) != '\\r' {\n      Buffer.add_char(b, String.get(s, i))\n    }\n  }\n  Buffer.contents(b)\n}\n\n@raises(Invalid_argument)\nlet delete_eol_spaces = src => {\n  let len_src = String.length(src)\n  let dst = Bytes.create(len_src)\n\n  @raises(Invalid_argument)\n  let rec loop = (i_src, i_dst) =>\n    if i_src == len_src {\n      i_dst\n    } else {\n      switch String.get(src, i_src) {\n      | ' ' | '\\t' => loop_spaces(1, i_src + 1, i_dst)\n      | c =>\n        Bytes.set(dst, i_dst, c)\n        loop(i_src + 1, i_dst + 1)\n      }\n    }\n  @raises(Invalid_argument)\n  and loop_spaces = (spaces, i_src, i_dst) =>\n    if i_src == len_src {\n      i_dst\n    } else {\n      switch String.get(src, i_src) {\n      | ' ' | '\\t' => loop_spaces(spaces + 1, i_src + 1, i_dst)\n      | '\\n' =>\n        Bytes.set(dst, i_dst, '\\n')\n        loop(i_src + 1, i_dst + 1)\n      | _ =>\n        for n in 0 to spaces {\n          Bytes.set(dst, i_dst + n, String.get(src, i_src - spaces + n))\n        }\n        loop(i_src + 1, i_dst + spaces + 1)\n      }\n    }\n\n  let stop = loop(0, 0)\n  Bytes.sub_string(dst, 0, stop)\n}\n\ntype hook_info = {sourcefile: string}\n\nexception HookExnWrapper({error: exn, hook_name: string, hook_info: hook_info})\n\nexception HookExn(exn)\n\n@raises(HookExn)\nlet raise_direct_hook_exn = e => raise(HookExn(e))\n\n@raises([HookExnWrapper, genericException])\nlet fold_hooks = (list, hook_info, ast) =>\n  List.fold_left(\n    (ast, (hook_name, f)) =>\n      try f(hook_info, ast) catch {\n      | HookExn(e) => raise(e)\n      | error => raise(HookExnWrapper({error: error, hook_name: hook_name, hook_info: hook_info}))\n      },\n    /* when explicit reraise with backtrace will be available,\n     it should be used here */\n    ast,\n    List.sort(compare, list),\n  )\n\nmodule type HookSig = {\n  type t\n\n  let add_hook: (string, (hook_info, t) => t) => unit\n  let apply_hooks: (hook_info, t) => t\n}\n\nmodule MakeHooks = (\n  M: {\n    type t\n  },\n): (HookSig with type t = M.t) => {\n  type t = M.t\n\n  let hooks = ref(list{})\n  let add_hook = (name, f) => hooks := list{(name, f), ...hooks.contents}\n  @raises([HookExnWrapper, genericException])\n  let apply_hooks = (sourcefile, intf) => fold_hooks(hooks.contents, sourcefile, intf)\n}\n"
  },
  {
    "path": "analysis/examples/larger-project/src/nativeint.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as Caml_obj from \"rescript/lib/es6/caml_obj.js\";\nimport * as Caml_format from \"rescript/lib/es6/caml_format.js\";\nimport * as Caml_option from \"rescript/lib/es6/caml_option.js\";\nimport * as Caml_js_exceptions from \"rescript/lib/es6/caml_js_exceptions.js\";\n\nfunction to_string(n) {\n  return Caml_format.caml_nativeint_format(\"%d\", n);\n}\n\nfunction of_string_opt(s) {\n  try {\n    return Caml_option.some(Caml_format.caml_nativeint_of_string(s));\n  }\n  catch (raw_exn){\n    var exn = Caml_js_exceptions.internalToOCamlException(raw_exn);\n    if (exn.RE_EXN_ID === \"Failure\") {\n      return ;\n    }\n    throw exn;\n  }\n}\n\nvar compare = Caml_obj.caml_compare;\n\nvar equal = Caml_obj.caml_equal;\n\nexport {\n  to_string ,\n  of_string_opt ,\n  compare ,\n  equal ,\n  \n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/nativeint.res",
    "content": "/* ************************************************************************ */\n/*  */\n/* OCaml */\n/*  */\n/* Xavier Leroy, projet Cristal, INRIA Rocquencourt */\n/*  */\n/* Copyright 1996 Institut National de Recherche en Informatique et */\n/* en Automatique. */\n/*  */\n/* All rights reserved.  This file is distributed under the terms of */\n/* the GNU Lesser General Public License version 2.1, with the */\n/* special exception on linking described in the file LICENSE. */\n/*  */\n/* ************************************************************************ */\n\n/* Module [Nativeint]: processor-native integers */\n\ntype nativeint\n\nexternal neg: nativeint => nativeint = \"%nativeint_neg\"\nexternal add: (nativeint, nativeint) => nativeint = \"%nativeint_add\"\nexternal sub: (nativeint, nativeint) => nativeint = \"%nativeint_sub\"\nexternal mul: (nativeint, nativeint) => nativeint = \"%nativeint_mul\"\nexternal div: (nativeint, nativeint) => nativeint = \"%nativeint_div\"\nexternal rem: (nativeint, nativeint) => nativeint = \"%nativeint_mod\"\nexternal logand: (nativeint, nativeint) => nativeint = \"%nativeint_and\"\nexternal logor: (nativeint, nativeint) => nativeint = \"%nativeint_or\"\nexternal logxor: (nativeint, nativeint) => nativeint = \"%nativeint_xor\"\nexternal shift_left: (nativeint, int) => nativeint = \"%nativeint_lsl\"\nexternal shift_right: (nativeint, int) => nativeint = \"%nativeint_asr\"\nexternal shift_right_logical: (nativeint, int) => nativeint = \"%nativeint_lsr\"\nexternal of_int: int => nativeint = \"%nativeint_of_int\"\nexternal to_int: nativeint => int = \"%nativeint_to_int\"\nexternal of_float: float => nativeint = \"caml_nativeint_of_float\"\nexternal to_float: nativeint => float = \"caml_nativeint_to_float\"\nexternal of_int32: int32 => nativeint = \"%nativeint_of_int32\"\nexternal to_int32: nativeint => int32 = \"%nativeint_to_int32\"\n\nexternal format: (string, nativeint) => string = \"caml_nativeint_format\"\nlet to_string = n => format(\"%d\", n)\n\nexternal of_string: string => nativeint = \"caml_nativeint_of_string\"\n\nlet of_string_opt = s =>\n  /* TODO: expose a non-raising primitive directly. */\n  try Some(of_string(s)) catch {\n  | Failure(_) => None\n  }\n\ntype t = nativeint\n\nlet compare = (x: t, y: t) => compare(x, y)\nlet equal = (x: t, y: t) => compare(x, y) == 0\n\n"
  },
  {
    "path": "analysis/examples/larger-project/src/numbers.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as Int64 from \"rescript/lib/es6/int64.js\";\nimport * as Caml_int64 from \"rescript/lib/es6/caml_int64.js\";\n\nvar Int_base = {};\n\nvar Int = {};\n\nfunction of_int_exn(i) {\n  return i;\n}\n\nfunction to_int(i) {\n  return i;\n}\n\nvar Int8 = {\n  zero: 0,\n  one: 1,\n  of_int_exn: of_int_exn,\n  to_int: to_int\n};\n\nfunction of_int_exn$1(i) {\n  return i;\n}\n\nvar lower_int64 = Caml_int64.neg(Caml_int64.lsl_(Int64.one, 15));\n\nvar upper_int64 = Caml_int64.sub(Caml_int64.lsl_(Int64.one, 15), Int64.one);\n\nvar of_int64_exn = Caml_int64.to_int32;\n\nfunction to_int$1(t) {\n  return t;\n}\n\nvar Int16 = {\n  of_int_exn: of_int_exn$1,\n  lower_int64: lower_int64,\n  upper_int64: upper_int64,\n  of_int64_exn: of_int64_exn,\n  to_int: to_int$1\n};\n\nvar Float = {};\n\nexport {\n  Int_base ,\n  Int ,\n  Int8 ,\n  Int16 ,\n  Float ,\n  \n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/numbers.res",
    "content": "/* ************************************************************************ */\n/*  */\n/* OCaml */\n/*  */\n/* Pierre Chambart, OCamlPro */\n/* Mark Shinwell and Leo White, Jane Street Europe */\n/*  */\n/* Copyright 2013--2016 OCamlPro SAS */\n/* Copyright 2014--2016 Jane Street Group LLC */\n/*  */\n/* All rights reserved.  This file is distributed under the terms of */\n/* the GNU Lesser General Public License version 2.1, with the */\n/* special exception on linking described in the file LICENSE. */\n/*  */\n/* ************************************************************************ */\n\nmodule Int_base = {}\n\nmodule Int = {}\n\nmodule Int8 = {\n  type t = int\n\n  let zero = 0\n  let one = 1\n\n  let of_int_exn = i => i\n\n  let to_int = i => i\n}\n\nmodule Int16 = {\n  type t = int\n\n  let of_int_exn = i => i\n\n  let lower_int64 = Int64.neg(Int64.shift_left(Int64.one, 15))\n  let upper_int64 = Int64.sub(Int64.shift_left(Int64.one, 15), Int64.one)\n\n  let of_int64_exn = i => Int64.to_int(i)\n\n  let to_int = t => t\n}\n\nmodule Float = {\n  type t = float\n}\n\n"
  },
  {
    "path": "analysis/examples/larger-project/src/parsetree.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n/* This output is empty. Its source's type definitions, externals and/or unused code got optimized away. */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/parsetree.res",
    "content": "@@ocaml.text(\n  /* ************************************************************************ */\n  /*  */\n  /* OCaml */\n  /*  */\n  /* Xavier Leroy, projet Cristal, INRIA Rocquencourt */\n  /*  */\n  /* Copyright 1996 Institut National de Recherche en Informatique et */\n  /* en Automatique. */\n  /*  */\n  /* All rights reserved.  This file is distributed under the terms of */\n  /* the GNU Lesser General Public License version 2.1, with the */\n  /* special exception on linking described in the file LICENSE. */\n  /*  */\n  /* ************************************************************************ */\n\n  \" Abstract syntax tree produced by parsing \"\n)\n\nopen Asttypes\n\ntype constant =\n  | Pconst_integer(string, option<char>)\n  /* 3 3l 3L 3n\n\n     Suffixes [g-z][G-Z] are accepted by the parser.\n     Suffixes except 'l', 'L' and 'n' are rejected by the typechecker\n */\n  | Pconst_char(char)\n  /* 'c' */\n  | Pconst_string(string, option<string>)\n  /* \"constant\"\n     {delim|other constant|delim}\n */\n  | Pconst_float(string, option<char>)\n@@ocaml.text(\n  /* 3.4 2e5 1.4e-4\n\n     Suffixes [g-z][G-Z] are accepted by the parser.\n     Suffixes are rejected by the typechecker.\n */\n\n  \" {1 Extension points} \"\n)\n\ntype rec attribute = (loc<string>, payload)\n/* [@id ARG]\n          [@@id ARG]\n\n          Metadata containers passed around within the AST.\n          The compiler ignores unknown attributes.\n */\n\nand extension = (loc<string>, payload)\n/* [%id ARG]\n         [%%id ARG]\n\n         Sub-language placeholder -- rejected by the typechecker.\n */\n\nand attributes = list<attribute>\n\nand payload =\n  | PStr(structure)\n  | PSig(signature) /* : SIG */\n  | PTyp(core_type) /* : T */\n  | PPat(pattern, option<expression>) /* ? P  or  ? P when E */\n\n/* Type expressions */\n\n@ocaml.text(\" {1 Core language} \")\nand core_type = {\n  ptyp_desc: core_type_desc,\n  ptyp_loc: Location.t,\n  ptyp_attributes: attributes /* ... [@id1] [@id2] */,\n}\n\nand core_type_desc =\n  | Ptyp_any\n  /* _ */\n  | Ptyp_var(string)\n  /* 'a */\n  | Ptyp_arrow(arg_label, core_type, core_type)\n  /* T1 -> T2       Simple\n           ~l:T1 -> T2    Labelled\n           ?l:T1 -> T2    Optional\n */\n  | Ptyp_tuple(list<core_type>)\n  /* T1 * ... * Tn\n\n           Invariant: n >= 2\n */\n  | Ptyp_constr(loc<Longident.t>, list<core_type>)\n  /* tconstr\n           T tconstr\n           (T1, ..., Tn) tconstr\n */\n  | Ptyp_object(list<object_field>, closed_flag)\n  /* < l1:T1; ...; ln:Tn >     (flag = Closed)\n           < l1:T1; ...; ln:Tn; .. > (flag = Open)\n */\n  | Ptyp_class(loc<Longident.t>, list<core_type>)\n  /* #tconstr\n           T #tconstr\n           (T1, ..., Tn) #tconstr\n */\n  | Ptyp_alias(core_type, string)\n  /* T as 'a */\n  | Ptyp_variant(list<row_field>, closed_flag, option<list<label>>)\n  /* [ `A|`B ]         (flag = Closed; labels = None)\n           [> `A|`B ]        (flag = Open;   labels = None)\n           [< `A|`B ]        (flag = Closed; labels = Some [])\n           [< `A|`B > `X `Y ](flag = Closed; labels = Some [\"X\";\"Y\"])\n */\n  | Ptyp_poly(list<loc<string>>, core_type)\n  /* 'a1 ... 'an. T\n\n           Can only appear in the following context:\n\n           - As the core_type of a Ppat_constraint node corresponding\n             to a constraint on a let-binding: let x : 'a1 ... 'an. T\n             = e ...\n\n           - Under Cfk_virtual for methods (not values).\n\n           - As the core_type of a Pctf_method node.\n\n           - As the core_type of a Pexp_poly node.\n\n           - As the pld_type field of a label_declaration.\n\n           - As a core_type of a Ptyp_object node.\n */\n\n  | Ptyp_package(package_type)\n  /* (module S) */\n  | Ptyp_extension(extension)\n/* [%id] */\n\nand package_type = (loc<Longident.t>, list<(loc<Longident.t>, core_type)>)\n/*\n        (module S)\n        (module S with type t1 = T1 and ... and tn = Tn)\n */\n\nand row_field =\n  | Rtag(loc<label>, attributes, bool, list<core_type>)\n  /* [`A]                   ( true,  [] )\n           [`A of T]              ( false, [T] )\n           [`A of T1 & .. & Tn]   ( false, [T1;...Tn] )\n           [`A of & T1 & .. & Tn] ( true,  [T1;...Tn] )\n\n          - The 2nd field is true if the tag contains a\n            constant (empty) constructor.\n          - '&' occurs when several types are used for the same constructor\n            (see 4.2 in the manual)\n\n          - TODO: switch to a record representation, and keep location\n */\n  | Rinherit(core_type)\n/* [ T ] */\n\nand object_field =\n  | Otag(loc<label>, attributes, core_type)\n  | Oinherit(core_type)\n\n/* Patterns */\n\nand pattern = {\n  ppat_desc: pattern_desc,\n  ppat_loc: Location.t,\n  ppat_attributes: attributes /* ... [@id1] [@id2] */,\n}\n\nand pattern_desc =\n  | Ppat_any\n  /* _ */\n  | Ppat_var(loc<string>)\n  /* x */\n  | Ppat_alias(pattern, loc<string>)\n  /* P as 'a */\n  | Ppat_constant(constant)\n  /* 1, 'a', \"true\", 1.0, 1l, 1L, 1n */\n  | Ppat_interval(constant, constant)\n  /* 'a'..'z'\n\n           Other forms of interval are recognized by the parser\n           but rejected by the type-checker. */\n  | Ppat_tuple(list<pattern>)\n  /* (P1, ..., Pn)\n\n           Invariant: n >= 2\n */\n  | Ppat_construct(loc<Longident.t>, option<pattern>)\n  /* C                None\n           C P              Some P\n           C (P1, ..., Pn)  Some (Ppat_tuple [P1; ...; Pn])\n */\n  | Ppat_variant(label, option<pattern>)\n  /* `A             (None)\n           `A P           (Some P)\n */\n  | Ppat_record(list<(loc<Longident.t>, pattern)>, closed_flag)\n  /* { l1=P1; ...; ln=Pn }     (flag = Closed)\n           { l1=P1; ...; ln=Pn; _}   (flag = Open)\n\n           Invariant: n > 0\n */\n  | Ppat_array(list<pattern>)\n  /* [| P1; ...; Pn |] */\n  | Ppat_or(pattern, pattern)\n  /* P1 | P2 */\n  | Ppat_constraint(pattern, core_type)\n  /* (P : T) */\n  | Ppat_type(loc<Longident.t>)\n  /* #tconst */\n  | Ppat_lazy(pattern)\n  /* lazy P */\n  | Ppat_unpack(loc<string>)\n  /* (module P)\n           Note: (module P : S) is represented as\n           Ppat_constraint(Ppat_unpack, Ptyp_package)\n */\n  | Ppat_exception(pattern)\n  /* exception P */\n  | Ppat_extension(extension)\n  /* [%id] */\n  | Ppat_open(loc<Longident.t>, pattern)\n/* M.(P) */\n\n/* Value expressions */\n\nand expression = {\n  pexp_desc: expression_desc,\n  pexp_loc: Location.t,\n  pexp_attributes: attributes /* ... [@id1] [@id2] */,\n}\n\nand expression_desc =\n  | Pexp_ident(loc<Longident.t>)\n  /* x\n           M.x\n */\n  | Pexp_constant(constant)\n  /* 1, 'a', \"true\", 1.0, 1l, 1L, 1n */\n  | Pexp_let(rec_flag, list<value_binding>, expression)\n  /* let P1 = E1 and ... and Pn = EN in E       (flag = Nonrecursive)\n           let rec P1 = E1 and ... and Pn = EN in E   (flag = Recursive)\n */\n  | Pexp_function(list<case>)\n  /* function P1 -> E1 | ... | Pn -> En */\n  | Pexp_fun(arg_label, option<expression>, pattern, expression)\n  /* fun P -> E1                          (Simple, None)\n           fun ~l:P -> E1                       (Labelled l, None)\n           fun ?l:P -> E1                       (Optional l, None)\n           fun ?l:(P = E0) -> E1                (Optional l, Some E0)\n\n           Notes:\n           - If E0 is provided, only Optional is allowed.\n           - \"fun P1 P2 .. Pn -> E1\" is represented as nested Pexp_fun.\n           - \"let f P = E\" is represented using Pexp_fun.\n */\n  | Pexp_apply(expression, list<(arg_label, expression)>)\n  /* E0 ~l1:E1 ... ~ln:En\n           li can be empty (non labeled argument) or start with '?'\n           (optional argument).\n\n           Invariant: n > 0\n */\n  | Pexp_match(expression, list<case>)\n  /* match E0 with P1 -> E1 | ... | Pn -> En */\n  | Pexp_try(expression, list<case>)\n  /* try E0 with P1 -> E1 | ... | Pn -> En */\n  | Pexp_tuple(list<expression>)\n  /* (E1, ..., En)\n\n           Invariant: n >= 2\n */\n  | Pexp_construct(loc<Longident.t>, option<expression>)\n  /* C                None\n           C E              Some E\n           C (E1, ..., En)  Some (Pexp_tuple[E1;...;En])\n */\n  | Pexp_variant(label, option<expression>)\n  /* `A             (None)\n           `A E           (Some E)\n */\n  | Pexp_record(list<(loc<Longident.t>, expression)>, option<expression>)\n  /* { l1=P1; ...; ln=Pn }     (None)\n           { E0 with l1=P1; ...; ln=Pn }   (Some E0)\n\n           Invariant: n > 0\n */\n  | Pexp_field(expression, loc<Longident.t>)\n  /* E.l */\n  | Pexp_setfield(expression, loc<Longident.t>, expression)\n  /* E1.l <- E2 */\n  | Pexp_array(list<expression>)\n  /* [| E1; ...; En |] */\n  | Pexp_ifthenelse(expression, expression, option<expression>)\n  /* if E1 then E2 else E3 */\n  | Pexp_sequence(expression, expression)\n  /* E1; E2 */\n  | Pexp_while(expression, expression)\n  /* while E1 do E2 done */\n  | Pexp_for(pattern, expression, expression, direction_flag, expression)\n  /* for i = E1 to E2 do E3 done      (flag = Upto)\n           for i = E1 downto E2 do E3 done  (flag = Downto)\n */\n  | Pexp_constraint(expression, core_type)\n  /* (E : T) */\n  | Pexp_coerce(expression, option<core_type>, core_type)\n  /* (E :> T)        (None, T)\n           (E : T0 :> T)   (Some T0, T)\n */\n  | Pexp_send(expression, loc<label>)\n  /* E # m */\n  | Pexp_new(loc<Longident.t>)\n  /* new M.c */\n  | Pexp_setinstvar(loc<label>, expression)\n  /* x <- 2 */\n  | Pexp_override(list<(loc<label>, expression)>)\n  /* {< x1 = E1; ...; Xn = En >} */\n  | Pexp_letmodule(loc<string>, module_expr, expression)\n  /* let module M = ME in E */\n  | Pexp_letexception(extension_constructor, expression)\n  /* let exception C in E */\n  | Pexp_assert(expression)\n  /* assert E\n           Note: \"assert false\" is treated in a special way by the\n           type-checker. */\n  | Pexp_lazy(expression)\n  /* lazy E */\n  | Pexp_poly(expression, option<core_type>)\n  /* Used for method bodies.\n\n           Can only be used as the expression under Cfk_concrete\n           for methods (not values). */\n  | Pexp_object(class_structure)\n  /* object ... end */\n  | Pexp_newtype(loc<string>, expression)\n  /* fun (type t) -> E */\n  | Pexp_pack(module_expr)\n  /* (module ME)\n\n           (module ME : S) is represented as\n           Pexp_constraint(Pexp_pack, Ptyp_package S) */\n  | Pexp_open(override_flag, loc<Longident.t>, expression)\n  /* M.(E)\n           let open M in E\n           let! open M in E */\n  | Pexp_extension(extension)\n  /* [%id] */\n  | Pexp_unreachable\n/* . */\n\nand case = {\n  /* (P -> E) or (P when E0 -> E) */\n\n  pc_lhs: pattern,\n  pc_guard: option<expression>,\n  pc_rhs: expression,\n}\n\n/* Value descriptions */\n\nand value_description = {\n  pval_name: loc<string>,\n  pval_type: core_type,\n  pval_prim: list<string>,\n  pval_attributes: attributes /* ... [@@id1] [@@id2] */,\n  pval_loc: Location.t,\n}\n\n/*\n  val x: T                            (prim = [])\n  external x: T = \"s1\" ... \"sn\"       (prim = [\"s1\";...\"sn\"])\n*/\n\n/* Type declarations */\n\nand type_declaration = {\n  ptype_name: loc<string>,\n  ptype_params: list<(core_type, variance)>,\n  /* ('a1,...'an) t; None represents  _ */\n  ptype_cstrs: list<(core_type, core_type, Location.t)>,\n  /* ... constraint T1=T1'  ... constraint Tn=Tn' */\n  ptype_kind: type_kind,\n  ptype_private: private_flag /* = private ... */,\n  ptype_manifest: option<core_type> /* = T */,\n  ptype_attributes: attributes /* ... [@@id1] [@@id2] */,\n  ptype_loc: Location.t,\n}\n\n/*\n  type t                     (abstract, no manifest)\n  type t = T0                (abstract, manifest=T0)\n  type t = C of T | ...      (variant,  no manifest)\n  type t = T0 = C of T | ... (variant,  manifest=T0)\n  type t = {l: T; ...}       (record,   no manifest)\n  type t = T0 = {l : T; ...} (record,   manifest=T0)\n  type t = ..                (open,     no manifest)\n*/\n\nand type_kind =\n  | Ptype_abstract\n  | Ptype_variant(list<constructor_declaration>)\n  /* Invariant: non-empty list */\n  | Ptype_record(list<label_declaration>)\n  /* Invariant: non-empty list */\n  | Ptype_open\n\nand label_declaration = {\n  pld_name: loc<string>,\n  pld_mutable: mutable_flag,\n  pld_type: core_type,\n  pld_loc: Location.t,\n  pld_attributes: attributes /* l : T [@id1] [@id2] */,\n}\n\n/* { ...; l: T; ... }            (mutable=Immutable)\n    { ...; mutable l: T; ... }    (mutable=Mutable)\n\n    Note: T can be a Ptyp_poly.\n*/\n\nand constructor_declaration = {\n  pcd_name: loc<string>,\n  pcd_args: constructor_arguments,\n  pcd_res: option<core_type>,\n  pcd_loc: Location.t,\n  pcd_attributes: attributes /* C of ... [@id1] [@id2] */,\n}\n\nand constructor_arguments =\n  | Pcstr_tuple(list<core_type>)\n  | Pcstr_record(list<label_declaration>)\n\n/*\n  | C of T1 * ... * Tn     (res = None,    args = Pcstr_tuple [])\n  | C: T0                  (res = Some T0, args = [])\n  | C: T1 * ... * Tn -> T0 (res = Some T0, args = Pcstr_tuple)\n  | C of {...}             (res = None,    args = Pcstr_record)\n  | C: {...} -> T0         (res = Some T0, args = Pcstr_record)\n  | C of {...} as t        (res = None,    args = Pcstr_record)\n*/\n\nand type_extension = {\n  ptyext_path: loc<Longident.t>,\n  ptyext_params: list<(core_type, variance)>,\n  ptyext_constructors: list<extension_constructor>,\n  ptyext_private: private_flag,\n  ptyext_attributes: attributes /* ... [@@id1] [@@id2] */,\n}\n/*\n  type t += ...\n*/\n\nand extension_constructor = {\n  pext_name: loc<string>,\n  pext_kind: extension_constructor_kind,\n  pext_loc: Location.t,\n  pext_attributes: attributes /* C of ... [@id1] [@id2] */,\n}\n\nand extension_constructor_kind =\n  | Pext_decl(constructor_arguments, option<core_type>)\n  /*\n         | C of T1 * ... * Tn     ([T1; ...; Tn], None)\n         | C: T0                  ([], Some T0)\n         | C: T1 * ... * Tn -> T0 ([T1; ...; Tn], Some T0)\n */\n  | Pext_rebind(loc<Longident.t>)\n/*\n         | C = D\n */\n\n/* Type expressions for the class language */\n\n@ocaml.text(\" {1 Class language} \")\nand class_type = {\n  pcty_desc: class_type_desc,\n  pcty_loc: Location.t,\n  pcty_attributes: attributes /* ... [@id1] [@id2] */,\n}\n\nand class_type_desc =\n  | Pcty_constr(loc<Longident.t>, list<core_type>)\n  /* c\n   ['a1, ..., 'an] c */\n  | Pcty_signature(class_signature)\n  /* object ... end */\n  | Pcty_arrow(arg_label, core_type, class_type)\n  /* T -> CT       Simple\n           ~l:T -> CT    Labelled l\n           ?l:T -> CT    Optional l\n */\n  | Pcty_extension(extension)\n  /* [%id] */\n  | Pcty_open(override_flag, loc<Longident.t>, class_type)\n/* let open M in CT */\n\nand class_signature = {\n  pcsig_self: core_type,\n  pcsig_fields: list<class_type_field>,\n}\n/* object('selfpat) ... end\n   object ... end             (self = Ptyp_any)\n */\n\nand class_type_field = {\n  pctf_desc: class_type_field_desc,\n  pctf_loc: Location.t,\n  pctf_attributes: attributes /* ... [@@id1] [@@id2] */,\n}\n\nand class_type_field_desc =\n  | Pctf_inherit(class_type)\n  /* inherit CT */\n  | Pctf_val((loc<label>, mutable_flag, virtual_flag, core_type))\n  /* val x: T */\n  | Pctf_method((loc<label>, private_flag, virtual_flag, core_type))\n  /* method x: T\n\n           Note: T can be a Ptyp_poly.\n */\n  | Pctf_constraint((core_type, core_type))\n  /* constraint T1 = T2 */\n  | Pctf_attribute(attribute)\n  /* [@@@id] */\n  | Pctf_extension(extension)\n/* [%%id] */\n\nand class_infos<'a> = {\n  pci_virt: virtual_flag,\n  pci_params: list<(core_type, variance)>,\n  pci_name: loc<string>,\n  pci_expr: 'a,\n  pci_loc: Location.t,\n  pci_attributes: attributes /* ... [@@id1] [@@id2] */,\n}\n/* class c = ...\n   class ['a1,...,'an] c = ...\n   class virtual c = ...\n\n   Also used for \"class type\" declaration.\n*/\n\nand class_description = class_infos<class_type>\n\nand class_type_declaration = class_infos<class_type>\n\n/* Value expressions for the class language */\n\nand class_expr = {\n  pcl_desc: class_expr_desc,\n  pcl_loc: Location.t,\n  pcl_attributes: attributes /* ... [@id1] [@id2] */,\n}\n\nand class_expr_desc =\n  | Pcl_constr(loc<Longident.t>, list<core_type>)\n  /* c\n   ['a1, ..., 'an] c */\n  | Pcl_structure(class_structure)\n  /* object ... end */\n  | Pcl_fun(arg_label, option<expression>, pattern, class_expr)\n  /* fun P -> CE                          (Simple, None)\n           fun ~l:P -> CE                       (Labelled l, None)\n           fun ?l:P -> CE                       (Optional l, None)\n           fun ?l:(P = E0) -> CE                (Optional l, Some E0)\n */\n  | Pcl_apply(class_expr, list<(arg_label, expression)>)\n  /* CE ~l1:E1 ... ~ln:En\n           li can be empty (non labeled argument) or start with '?'\n           (optional argument).\n\n           Invariant: n > 0\n */\n  | Pcl_let(rec_flag, list<value_binding>, class_expr)\n  /* let P1 = E1 and ... and Pn = EN in CE      (flag = Nonrecursive)\n           let rec P1 = E1 and ... and Pn = EN in CE  (flag = Recursive)\n */\n  | Pcl_constraint(class_expr, class_type)\n  /* (CE : CT) */\n  | Pcl_extension(extension)\n  /* [%id] */\n  | Pcl_open(override_flag, loc<Longident.t>, class_expr)\n/* let open M in CE */\n\nand class_structure = {\n  pcstr_self: pattern,\n  pcstr_fields: list<class_field>,\n}\n/* object(selfpat) ... end\n   object ... end           (self = Ppat_any)\n */\n\nand class_field = {\n  pcf_desc: class_field_desc,\n  pcf_loc: Location.t,\n  pcf_attributes: attributes /* ... [@@id1] [@@id2] */,\n}\n\nand class_field_desc =\n  | Pcf_inherit(override_flag, class_expr, option<loc<string>>)\n  /* inherit CE\n           inherit CE as x\n           inherit! CE\n           inherit! CE as x\n */\n  | Pcf_val((loc<label>, mutable_flag, class_field_kind))\n  /* val x = E\n           val virtual x: T\n */\n  | Pcf_method((loc<label>, private_flag, class_field_kind))\n  /* method x = E            (E can be a Pexp_poly)\n           method virtual x: T     (T can be a Ptyp_poly)\n */\n  | Pcf_constraint((core_type, core_type))\n  /* constraint T1 = T2 */\n  | Pcf_initializer(expression)\n  /* initializer E */\n  | Pcf_attribute(attribute)\n  /* [@@@id] */\n  | Pcf_extension(extension)\n/* [%%id] */\n\nand class_field_kind =\n  | Cfk_virtual(core_type)\n  | Cfk_concrete(override_flag, expression)\n\nand class_declaration = class_infos<class_expr>\n\n/* Type expressions for the module language */\n\n@ocaml.text(\" {1 Module language} \")\nand module_type = {\n  pmty_desc: module_type_desc,\n  pmty_loc: Location.t,\n  pmty_attributes: attributes /* ... [@id1] [@id2] */,\n}\n\nand module_type_desc =\n  | Pmty_ident(loc<Longident.t>)\n  /* S */\n  | Pmty_signature(signature)\n  /* sig ... end */\n  | Pmty_functor(loc<string>, option<module_type>, module_type)\n  /* functor(X : MT1) -> MT2 */\n  | Pmty_with(module_type, list<with_constraint>)\n  /* MT with ... */\n  | Pmty_typeof(module_expr)\n  /* module type of ME */\n  | Pmty_extension(extension)\n  /* [%id] */\n  | Pmty_alias(loc<Longident.t>)\n/* (module M) */\n\nand signature = list<signature_item>\n\nand signature_item = {\n  psig_desc: signature_item_desc,\n  psig_loc: Location.t,\n}\n\nand signature_item_desc =\n  | Psig_value(value_description)\n  /*\n          val x: T\n          external x: T = \"s1\" ... \"sn\"\n */\n  | Psig_type(rec_flag, list<type_declaration>)\n  /* type t1 = ... and ... and tn = ... */\n  | Psig_typext(type_extension)\n  /* type t1 += ... */\n  | Psig_exception(extension_constructor)\n  /* exception C of T */\n  | Psig_module(module_declaration)\n  /* module X : MT */\n  | Psig_recmodule(list<module_declaration>)\n  /* module rec X1 : MT1 and ... and Xn : MTn */\n  | Psig_modtype(module_type_declaration)\n  /* module type S = MT\n   module type S */\n  | Psig_open(open_description)\n  /* open X */\n  | Psig_include(include_description)\n  /* include MT */\n  | Psig_class(list<class_description>)\n  /* class c1 : ... and ... and cn : ... */\n  | Psig_class_type(list<class_type_declaration>)\n  /* class type ct1 = ... and ... and ctn = ... */\n  | Psig_attribute(attribute)\n  /* [@@@id] */\n  | Psig_extension(extension, attributes)\n/* [%%id] */\n\nand module_declaration = {\n  pmd_name: loc<string>,\n  pmd_type: module_type,\n  pmd_attributes: attributes /* ... [@@id1] [@@id2] */,\n  pmd_loc: Location.t,\n}\n/* S : MT */\n\nand module_type_declaration = {\n  pmtd_name: loc<string>,\n  pmtd_type: option<module_type>,\n  pmtd_attributes: attributes /* ... [@@id1] [@@id2] */,\n  pmtd_loc: Location.t,\n}\n/* S = MT\n   S       (abstract module type declaration, pmtd_type = None)\n*/\n\nand open_description = {\n  popen_lid: loc<Longident.t>,\n  popen_override: override_flag,\n  popen_loc: Location.t,\n  popen_attributes: attributes,\n}\n/* open! X - popen_override = Override (silences the 'used identifier\n                              shadowing' warning)\n   open  X - popen_override = Fresh\n */\n\nand include_infos<'a> = {\n  pincl_mod: 'a,\n  pincl_loc: Location.t,\n  pincl_attributes: attributes,\n}\n\nand include_description = include_infos<module_type>\n/* include MT */\n\nand include_declaration = include_infos<module_expr>\n/* include ME */\n\nand with_constraint =\n  | Pwith_type(loc<Longident.t>, type_declaration)\n  /* with type X.t = ...\n\n           Note: the last component of the longident must match\n           the name of the type_declaration. */\n  | Pwith_module(loc<Longident.t>, loc<Longident.t>)\n  /* with module X.Y = Z */\n  | Pwith_typesubst(loc<Longident.t>, type_declaration)\n  /* with type X.t := ..., same format as [Pwith_type] */\n  | Pwith_modsubst(loc<Longident.t>, loc<Longident.t>)\n/* with module X.Y := Z */\n\n/* Value expressions for the module language */\n\nand module_expr = {\n  pmod_desc: module_expr_desc,\n  pmod_loc: Location.t,\n  pmod_attributes: attributes /* ... [@id1] [@id2] */,\n}\n\nand module_expr_desc =\n  | Pmod_ident(loc<Longident.t>)\n  /* X */\n  | Pmod_structure(structure)\n  /* struct ... end */\n  | Pmod_functor(loc<string>, option<module_type>, module_expr)\n  /* functor(X : MT1) -> ME */\n  | Pmod_apply(module_expr, module_expr)\n  /* ME1(ME2) */\n  | Pmod_constraint(module_expr, module_type)\n  /* (ME : MT) */\n  | Pmod_unpack(expression)\n  /* (val E) */\n  | Pmod_extension(extension)\n/* [%id] */\n\nand structure = list<structure_item>\n\nand structure_item = {\n  pstr_desc: structure_item_desc,\n  pstr_loc: Location.t,\n}\n\nand structure_item_desc =\n  | Pstr_eval(expression, attributes)\n  /* E */\n  | Pstr_value(rec_flag, list<value_binding>)\n  /* let P1 = E1 and ... and Pn = EN       (flag = Nonrecursive)\n           let rec P1 = E1 and ... and Pn = EN   (flag = Recursive)\n */\n  | Pstr_primitive(value_description)\n  /* val x: T\n   external x: T = \"s1\" ... \"sn\" */\n  | Pstr_type(rec_flag, list<type_declaration>)\n  /* type t1 = ... and ... and tn = ... */\n  | Pstr_typext(type_extension)\n  /* type t1 += ... */\n  | Pstr_exception(extension_constructor)\n  /* exception C of T\n   exception C = M.X */\n  | Pstr_module(module_binding)\n  /* module X = ME */\n  | Pstr_recmodule(list<module_binding>)\n  /* module rec X1 = ME1 and ... and Xn = MEn */\n  | Pstr_modtype(module_type_declaration)\n  /* module type S = MT */\n  | Pstr_open(open_description)\n  /* open X */\n  | Pstr_class(list<class_declaration>)\n  /* class c1 = ... and ... and cn = ... */\n  | Pstr_class_type(list<class_type_declaration>)\n  /* class type ct1 = ... and ... and ctn = ... */\n  | Pstr_include(include_declaration)\n  /* include ME */\n  | Pstr_attribute(attribute)\n  /* [@@@id] */\n  | Pstr_extension(extension, attributes)\n/* [%%id] */\n\nand value_binding = {\n  pvb_pat: pattern,\n  pvb_expr: expression,\n  pvb_attributes: attributes,\n  pvb_loc: Location.t,\n}\n\nand module_binding = {\n  pmb_name: loc<string>,\n  pmb_expr: module_expr,\n  pmb_attributes: attributes,\n  pmb_loc: Location.t,\n}\n@@ocaml.text(\n  /* X = ME */\n\n  \" {1 Toplevel} \"\n)\n\n/* Toplevel phrases */\n\ntype rec toplevel_phrase =\n  | Ptop_def(structure)\n  | Ptop_dir(string, directive_argument)\n/* #use, #load ... */\n\nand directive_argument =\n  | Pdir_none\n  | Pdir_string(string)\n  | Pdir_int(string, option<char>)\n  | Pdir_ident(Longident.t)\n  | Pdir_bool(bool)\n\n"
  },
  {
    "path": "analysis/examples/larger-project/src/printf.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nfunction printf(param) {\n  throw {\n        RE_EXN_ID: \"Assert_failure\",\n        _1: [\n          \"printf.res\",\n          1,\n          18\n        ],\n        Error: new Error()\n      };\n}\n\nfunction fprintf(param) {\n  throw {\n        RE_EXN_ID: \"Assert_failure\",\n        _1: [\n          \"printf.res\",\n          3,\n          19\n        ],\n        Error: new Error()\n      };\n}\n\nfunction sprintf(param) {\n  throw {\n        RE_EXN_ID: \"Assert_failure\",\n        _1: [\n          \"printf.res\",\n          5,\n          19\n        ],\n        Error: new Error()\n      };\n}\n\nfunction eprintf(param) {\n  throw {\n        RE_EXN_ID: \"Assert_failure\",\n        _1: [\n          \"printf.res\",\n          7,\n          19\n        ],\n        Error: new Error()\n      };\n}\n\nexport {\n  printf ,\n  fprintf ,\n  sprintf ,\n  eprintf ,\n  \n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/printf.res",
    "content": "let printf = _ => assert false\n\nlet fprintf = _ => assert false\n\nlet sprintf = _ => assert false\n\nlet eprintf = _ => assert false\n"
  },
  {
    "path": "analysis/examples/larger-project/src/res_comment.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as Curry from \"rescript/lib/es6/curry.js\";\nimport * as Format from \"./format.js\";\nimport * as Lexing from \"rescript/lib/es6/lexing.js\";\nimport * as $$String from \"rescript/lib/es6/string.js\";\n\nfunction styleToString(s) {\n  if (s) {\n    return \"MultiLine\";\n  } else {\n    return \"SingleLine\";\n  }\n}\n\nfunction loc(t) {\n  return t.loc;\n}\n\nfunction txt(t) {\n  return t.txt;\n}\n\nfunction prevTokEndPos(t) {\n  return t.prevTokEndPos;\n}\n\nfunction setPrevTokEndPos(t, pos) {\n  t.prevTokEndPos = pos;\n  \n}\n\nfunction isSingleLineComment(t) {\n  var match = t.style;\n  if (match) {\n    return false;\n  } else {\n    return true;\n  }\n}\n\nfunction toString(t) {\n  return Curry._4(Format.sprintf(\"(txt: %s\\nstyle: %s\\nlines: %d-%d)\"), t.txt, t.style ? \"MultiLine\" : \"SingleLine\", t.loc.loc_start.pos_lnum, t.loc.loc_end.pos_lnum);\n}\n\nfunction makeSingleLineComment(loc, txt) {\n  return {\n          txt: txt,\n          style: /* SingleLine */0,\n          loc: loc,\n          prevTokEndPos: Lexing.dummy_pos\n        };\n}\n\nfunction makeMultiLineComment(loc, txt) {\n  return {\n          txt: txt,\n          style: /* MultiLine */1,\n          loc: loc,\n          prevTokEndPos: Lexing.dummy_pos\n        };\n}\n\nfunction fromOcamlComment(loc, txt, prevTokEndPos) {\n  return {\n          txt: txt,\n          style: /* MultiLine */1,\n          loc: loc,\n          prevTokEndPos: prevTokEndPos\n        };\n}\n\nfunction trimSpaces(s) {\n  var len = s.length;\n  if (len === 0) {\n    return s;\n  }\n  if (!(s[0] === \" \" || s[len - 1 | 0] === \" \")) {\n    return s;\n  }\n  var i = 0;\n  while(i < len && s[i] === \" \") {\n    i = i + 1 | 0;\n  };\n  var j = len - 1 | 0;\n  while(j >= i && s[j] === \" \") {\n    j = j - 1 | 0;\n  };\n  if (j >= i) {\n    return $$String.sub(s, i, (j - i | 0) + 1 | 0);\n  } else {\n    return \"\";\n  }\n}\n\nexport {\n  styleToString ,\n  loc ,\n  txt ,\n  prevTokEndPos ,\n  setPrevTokEndPos ,\n  isSingleLineComment ,\n  toString ,\n  makeSingleLineComment ,\n  makeMultiLineComment ,\n  fromOcamlComment ,\n  trimSpaces ,\n  \n}\n/* Format Not a pure module */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/res_comment.res",
    "content": "type style =\n  | SingleLine\n  | MultiLine\n\nlet styleToString = s =>\n  switch s {\n  | SingleLine => \"SingleLine\"\n  | MultiLine => \"MultiLine\"\n  }\n\ntype t = {\n  txt: string,\n  style: style,\n  loc: Location.t,\n  mutable prevTokEndPos: Lexing.position,\n}\n\nlet loc = t => t.loc\nlet txt = t => t.txt\nlet prevTokEndPos = t => t.prevTokEndPos\n\nlet setPrevTokEndPos = (t, pos) => t.prevTokEndPos = pos\n\nlet isSingleLineComment = t =>\n  switch t.style {\n  | SingleLine => true\n  | MultiLine => false\n  }\n\nlet toString = t =>\n  Format.sprintf(\n    \"(txt: %s\\nstyle: %s\\nlines: %d-%d)\",\n    t.txt,\n    styleToString(t.style),\n    t.loc.loc_start.pos_lnum,\n    t.loc.loc_end.pos_lnum,\n  )\n\nlet makeSingleLineComment = (~loc, txt) => {\n  txt: txt,\n  loc: loc,\n  style: SingleLine,\n  prevTokEndPos: Lexing.dummy_pos,\n}\n\nlet makeMultiLineComment = (~loc, txt) => {\n  txt: txt,\n  loc: loc,\n  style: MultiLine,\n  prevTokEndPos: Lexing.dummy_pos,\n}\n\nlet fromOcamlComment = (~loc, ~txt, ~prevTokEndPos) => {\n  txt: txt,\n  loc: loc,\n  style: MultiLine,\n  prevTokEndPos: prevTokEndPos,\n}\n\nlet trimSpaces = s => {\n  let len = String.length(s)\n  if len == 0 {\n    s\n  } else if String.unsafe_get(s, 0) == ' ' || String.unsafe_get(s, len - 1) == ' ' {\n    let i = ref(0)\n    while i.contents < len && String.unsafe_get(s, i.contents) == ' ' {\n      incr(i)\n    }\n    let j = ref(len - 1)\n    while j.contents >= i.contents && String.unsafe_get(s, j.contents) == ' ' {\n      decr(j)\n    }\n    if j.contents >= i.contents {\n      (@doesNotRaise String.sub)(s, i.contents, j.contents - i.contents + 1)\n    } else {\n      \"\"\n    }\n  } else {\n    s\n  }\n}\n"
  },
  {
    "path": "analysis/examples/larger-project/src/res_comments_table.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as List from \"rescript/lib/es6/list.js\";\nimport * as Curry from \"rescript/lib/es6/curry.js\";\nimport * as Hashtbl from \"rescript/lib/es6/hashtbl.js\";\nimport * as Res_doc from \"./res_doc.js\";\nimport * as Caml_obj from \"rescript/lib/es6/caml_obj.js\";\nimport * as $$Location from \"./location.js\";\nimport * as Ast_helper from \"./ast_helper.js\";\nimport * as Res_comment from \"./res_comment.js\";\nimport * as Res_parsetree_viewer from \"./res_parsetree_viewer.js\";\n\nfunction make(param) {\n  return {\n          leading: Hashtbl.create(undefined, 100),\n          inside: Hashtbl.create(undefined, 100),\n          trailing: Hashtbl.create(undefined, 100)\n        };\n}\n\nfunction copy(tbl) {\n  return {\n          leading: Hashtbl.copy(tbl.leading),\n          inside: Hashtbl.copy(tbl.inside),\n          trailing: Hashtbl.copy(tbl.trailing)\n        };\n}\n\nvar empty = make(undefined);\n\nfunction log(t) {\n  var leadingStuff = Hashtbl.fold((function (k, v, acc) {\n          var loc = Res_doc.concat({\n                hd: Res_doc.lbracket,\n                tl: {\n                  hd: Res_doc.text(String(k.loc_start.pos_lnum)),\n                  tl: {\n                    hd: Res_doc.text(\":\"),\n                    tl: {\n                      hd: Res_doc.text(String(k.loc_start.pos_cnum - k.loc_start.pos_bol | 0)),\n                      tl: {\n                        hd: Res_doc.text(\"-\"),\n                        tl: {\n                          hd: Res_doc.text(String(k.loc_end.pos_lnum)),\n                          tl: {\n                            hd: Res_doc.text(\":\"),\n                            tl: {\n                              hd: Res_doc.text(String(k.loc_end.pos_cnum - k.loc_end.pos_bol | 0)),\n                              tl: {\n                                hd: Res_doc.rbracket,\n                                tl: /* [] */0\n                              }\n                            }\n                          }\n                        }\n                      }\n                    }\n                  }\n                }\n              });\n          var doc = Res_doc.breakableGroup(true, Res_doc.concat({\n                    hd: loc,\n                    tl: {\n                      hd: Res_doc.indent(Res_doc.concat({\n                                hd: Res_doc.line,\n                                tl: {\n                                  hd: Res_doc.join(Res_doc.comma, List.map((function (c) {\n                                              return Res_doc.text(Res_comment.txt(c));\n                                            }), v)),\n                                  tl: /* [] */0\n                                }\n                              })),\n                      tl: {\n                        hd: Res_doc.line,\n                        tl: /* [] */0\n                      }\n                    }\n                  }));\n          return {\n                  hd: doc,\n                  tl: acc\n                };\n        }), t.leading, /* [] */0);\n  var trailingStuff = Hashtbl.fold((function (k, v, acc) {\n          var loc = Res_doc.concat({\n                hd: Res_doc.lbracket,\n                tl: {\n                  hd: Res_doc.text(String(k.loc_start.pos_lnum)),\n                  tl: {\n                    hd: Res_doc.text(\":\"),\n                    tl: {\n                      hd: Res_doc.text(String(k.loc_start.pos_cnum - k.loc_start.pos_bol | 0)),\n                      tl: {\n                        hd: Res_doc.text(\"-\"),\n                        tl: {\n                          hd: Res_doc.text(String(k.loc_end.pos_lnum)),\n                          tl: {\n                            hd: Res_doc.text(\":\"),\n                            tl: {\n                              hd: Res_doc.text(String(k.loc_end.pos_cnum - k.loc_end.pos_bol | 0)),\n                              tl: {\n                                hd: Res_doc.rbracket,\n                                tl: /* [] */0\n                              }\n                            }\n                          }\n                        }\n                      }\n                    }\n                  }\n                }\n              });\n          var doc = Res_doc.breakableGroup(true, Res_doc.concat({\n                    hd: loc,\n                    tl: {\n                      hd: Res_doc.indent(Res_doc.concat({\n                                hd: Res_doc.line,\n                                tl: {\n                                  hd: Res_doc.join(Res_doc.concat({\n                                            hd: Res_doc.comma,\n                                            tl: {\n                                              hd: Res_doc.line,\n                                              tl: /* [] */0\n                                            }\n                                          }), List.map((function (c) {\n                                              return Res_doc.text(Res_comment.txt(c));\n                                            }), v)),\n                                  tl: /* [] */0\n                                }\n                              })),\n                      tl: {\n                        hd: Res_doc.line,\n                        tl: /* [] */0\n                      }\n                    }\n                  }));\n          return {\n                  hd: doc,\n                  tl: acc\n                };\n        }), t.trailing, /* [] */0);\n  console.log(Res_doc.toString(80, Res_doc.breakableGroup(true, Res_doc.concat({\n                    hd: Res_doc.text(\"leading comments:\"),\n                    tl: {\n                      hd: Res_doc.line,\n                      tl: {\n                        hd: Res_doc.indent(Res_doc.concat(leadingStuff)),\n                        tl: {\n                          hd: Res_doc.line,\n                          tl: {\n                            hd: Res_doc.line,\n                            tl: {\n                              hd: Res_doc.text(\"trailing comments:\"),\n                              tl: {\n                                hd: Res_doc.indent(Res_doc.concat(trailingStuff)),\n                                tl: {\n                                  hd: Res_doc.line,\n                                  tl: {\n                                    hd: Res_doc.line,\n                                    tl: /* [] */0\n                                  }\n                                }\n                              }\n                            }\n                          }\n                        }\n                      }\n                    }\n                  }))));\n  \n}\n\nfunction attach(tbl, loc, comments) {\n  if (comments) {\n    return Hashtbl.replace(tbl, loc, comments);\n  }\n  \n}\n\nfunction partitionByLoc(comments, loc) {\n  var _param = [\n    /* [] */0,\n    /* [] */0,\n    /* [] */0\n  ];\n  var _comments = comments;\n  while(true) {\n    var param = _param;\n    var comments$1 = _comments;\n    var trailing = param[2];\n    var inside = param[1];\n    var leading = param[0];\n    if (!comments$1) {\n      return [\n              List.rev(leading),\n              List.rev(inside),\n              List.rev(trailing)\n            ];\n    }\n    var rest = comments$1.tl;\n    var comment = comments$1.hd;\n    var cmtLoc = Res_comment.loc(comment);\n    if (cmtLoc.loc_end.pos_cnum <= loc.loc_start.pos_cnum) {\n      _comments = rest;\n      _param = [\n        {\n          hd: comment,\n          tl: leading\n        },\n        inside,\n        trailing\n      ];\n      continue ;\n    }\n    if (cmtLoc.loc_start.pos_cnum >= loc.loc_end.pos_cnum) {\n      _comments = rest;\n      _param = [\n        leading,\n        inside,\n        {\n          hd: comment,\n          tl: trailing\n        }\n      ];\n      continue ;\n    }\n    _comments = rest;\n    _param = [\n      leading,\n      {\n        hd: comment,\n        tl: inside\n      },\n      trailing\n    ];\n    continue ;\n  };\n}\n\nfunction partitionLeadingTrailing(comments, loc) {\n  var _param = [\n    /* [] */0,\n    /* [] */0\n  ];\n  var _comments = comments;\n  while(true) {\n    var param = _param;\n    var comments$1 = _comments;\n    var trailing = param[1];\n    var leading = param[0];\n    if (!comments$1) {\n      return [\n              List.rev(leading),\n              List.rev(trailing)\n            ];\n    }\n    var rest = comments$1.tl;\n    var comment = comments$1.hd;\n    var cmtLoc = Res_comment.loc(comment);\n    if (cmtLoc.loc_end.pos_cnum <= loc.loc_start.pos_cnum) {\n      _comments = rest;\n      _param = [\n        {\n          hd: comment,\n          tl: leading\n        },\n        trailing\n      ];\n      continue ;\n    }\n    _comments = rest;\n    _param = [\n      leading,\n      {\n        hd: comment,\n        tl: trailing\n      }\n    ];\n    continue ;\n  };\n}\n\nfunction partitionByOnSameLine(loc, comments) {\n  var _param = [\n    /* [] */0,\n    /* [] */0\n  ];\n  var _comments = comments;\n  while(true) {\n    var param = _param;\n    var comments$1 = _comments;\n    var onOtherLine = param[1];\n    var onSameLine = param[0];\n    if (!comments$1) {\n      return [\n              List.rev(onSameLine),\n              List.rev(onOtherLine)\n            ];\n    }\n    var rest = comments$1.tl;\n    var comment = comments$1.hd;\n    var cmtLoc = Res_comment.loc(comment);\n    if (cmtLoc.loc_start.pos_lnum === loc.loc_end.pos_lnum) {\n      _comments = rest;\n      _param = [\n        {\n          hd: comment,\n          tl: onSameLine\n        },\n        onOtherLine\n      ];\n      continue ;\n    }\n    _comments = rest;\n    _param = [\n      onSameLine,\n      {\n        hd: comment,\n        tl: onOtherLine\n      }\n    ];\n    continue ;\n  };\n}\n\nfunction partitionAdjacentTrailing(loc1, comments) {\n  var _prevEndPos = loc1.loc_end;\n  var _afterLoc1 = /* [] */0;\n  var _comments = comments;\n  while(true) {\n    var comments$1 = _comments;\n    var afterLoc1 = _afterLoc1;\n    var prevEndPos = _prevEndPos;\n    if (!comments$1) {\n      return [\n              List.rev(afterLoc1),\n              /* [] */0\n            ];\n    }\n    var comment = comments$1.hd;\n    var cmtPrevEndPos = Res_comment.prevTokEndPos(comment);\n    if (prevEndPos.pos_cnum !== cmtPrevEndPos.pos_cnum) {\n      return [\n              List.rev(afterLoc1),\n              comments$1\n            ];\n    }\n    var commentEnd = Res_comment.loc(comment).loc_end;\n    _comments = comments$1.tl;\n    _afterLoc1 = {\n      hd: comment,\n      tl: afterLoc1\n    };\n    _prevEndPos = commentEnd;\n    continue ;\n  };\n}\n\nfunction collectListPatterns(_acc, _pattern) {\n  while(true) {\n    var pattern = _pattern;\n    var acc = _acc;\n    var match = pattern.ppat_desc;\n    if (typeof match === \"number\") {\n      return List.rev({\n                  hd: pattern,\n                  tl: acc\n                });\n    }\n    if (match.TAG !== /* Ppat_construct */5) {\n      return List.rev({\n                  hd: pattern,\n                  tl: acc\n                });\n    }\n    var match$1 = match._0.txt;\n    switch (match$1.TAG | 0) {\n      case /* Lident */0 :\n          switch (match$1._0) {\n            case \"::\" :\n                var match$2 = match._1;\n                if (match$2 === undefined) {\n                  return List.rev({\n                              hd: pattern,\n                              tl: acc\n                            });\n                }\n                var match$3 = match$2.ppat_desc;\n                if (typeof match$3 === \"number\") {\n                  return List.rev({\n                              hd: pattern,\n                              tl: acc\n                            });\n                }\n                if (match$3.TAG !== /* Ppat_tuple */4) {\n                  return List.rev({\n                              hd: pattern,\n                              tl: acc\n                            });\n                }\n                var match$4 = match$3._0;\n                if (!match$4) {\n                  return List.rev({\n                              hd: pattern,\n                              tl: acc\n                            });\n                }\n                var match$5 = match$4.tl;\n                if (!match$5) {\n                  return List.rev({\n                              hd: pattern,\n                              tl: acc\n                            });\n                }\n                if (match$5.tl) {\n                  return List.rev({\n                              hd: pattern,\n                              tl: acc\n                            });\n                }\n                _pattern = match$5.hd;\n                _acc = {\n                  hd: match$4.hd,\n                  tl: acc\n                };\n                continue ;\n            case \"[]\" :\n                if (match._1 !== undefined) {\n                  return List.rev({\n                              hd: pattern,\n                              tl: acc\n                            });\n                } else {\n                  return List.rev(acc);\n                }\n            default:\n              return List.rev({\n                          hd: pattern,\n                          tl: acc\n                        });\n          }\n      case /* Ldot */1 :\n      case /* Lapply */2 :\n          return List.rev({\n                      hd: pattern,\n                      tl: acc\n                    });\n      \n    }\n  };\n}\n\nfunction collectListExprs(_acc, _expr) {\n  while(true) {\n    var expr = _expr;\n    var acc = _acc;\n    var match = expr.pexp_desc;\n    if (typeof match === \"number\") {\n      return List.rev({\n                  hd: expr,\n                  tl: acc\n                });\n    }\n    if (match.TAG !== /* Pexp_construct */9) {\n      return List.rev({\n                  hd: expr,\n                  tl: acc\n                });\n    }\n    var match$1 = match._0.txt;\n    switch (match$1.TAG | 0) {\n      case /* Lident */0 :\n          switch (match$1._0) {\n            case \"::\" :\n                var match$2 = match._1;\n                if (match$2 === undefined) {\n                  return List.rev({\n                              hd: expr,\n                              tl: acc\n                            });\n                }\n                var match$3 = match$2.pexp_desc;\n                if (typeof match$3 === \"number\") {\n                  return List.rev({\n                              hd: expr,\n                              tl: acc\n                            });\n                }\n                if (match$3.TAG !== /* Pexp_tuple */8) {\n                  return List.rev({\n                              hd: expr,\n                              tl: acc\n                            });\n                }\n                var match$4 = match$3._0;\n                if (!match$4) {\n                  return List.rev({\n                              hd: expr,\n                              tl: acc\n                            });\n                }\n                var match$5 = match$4.tl;\n                if (!match$5) {\n                  return List.rev({\n                              hd: expr,\n                              tl: acc\n                            });\n                }\n                if (match$5.tl) {\n                  return List.rev({\n                              hd: expr,\n                              tl: acc\n                            });\n                }\n                _expr = match$5.hd;\n                _acc = {\n                  hd: match$4.hd,\n                  tl: acc\n                };\n                continue ;\n            case \"[]\" :\n                return List.rev(acc);\n            default:\n              return List.rev({\n                          hd: expr,\n                          tl: acc\n                        });\n          }\n      case /* Ldot */1 :\n      case /* Lapply */2 :\n          return List.rev({\n                      hd: expr,\n                      tl: acc\n                    });\n      \n    }\n  };\n}\n\nfunction arrowType(ct) {\n  var $$process = function (attrsBefore, _acc, _typ) {\n    while(true) {\n      var typ = _typ;\n      var acc = _acc;\n      var match = typ.ptyp_desc;\n      if (typeof match === \"number\") {\n        return [\n                attrsBefore,\n                List.rev(acc),\n                typ\n              ];\n      }\n      if (match.TAG !== /* Ptyp_arrow */1) {\n        return [\n                attrsBefore,\n                List.rev(acc),\n                typ\n              ];\n      }\n      var lbl = match._0;\n      if (typeof lbl === \"number\") {\n        var attrs = typ.ptyp_attributes;\n        var typ2 = match._2;\n        var typ1 = match._1;\n        if (attrs) {\n          if (attrs.hd[0].txt === \"bs\" && !attrs.tl) {\n            var arg = [\n              attrs,\n              lbl,\n              typ1\n            ];\n            _typ = typ2;\n            _acc = {\n              hd: arg,\n              tl: acc\n            };\n            continue ;\n          }\n          \n        } else {\n          var arg$1 = [\n            /* [] */0,\n            lbl,\n            typ1\n          ];\n          _typ = typ2;\n          _acc = {\n            hd: arg$1,\n            tl: acc\n          };\n          continue ;\n        }\n        var args = List.rev(acc);\n        return [\n                attrsBefore,\n                args,\n                typ\n              ];\n      }\n      var arg_0 = typ.ptyp_attributes;\n      var arg_2 = match._1;\n      var arg$2 = [\n        arg_0,\n        lbl,\n        arg_2\n      ];\n      _typ = match._2;\n      _acc = {\n        hd: arg$2,\n        tl: acc\n      };\n      continue ;\n    };\n  };\n  var match = ct.ptyp_desc;\n  if (typeof match === \"number\" || !(match.TAG === /* Ptyp_arrow */1 && typeof match._0 === \"number\")) {\n    return $$process(/* [] */0, /* [] */0, ct);\n  } else {\n    return $$process(ct.ptyp_attributes, /* [] */0, {\n                ptyp_desc: ct.ptyp_desc,\n                ptyp_loc: ct.ptyp_loc,\n                ptyp_attributes: /* [] */0\n              });\n  }\n}\n\nfunction modExprApply(modExpr) {\n  var _acc = /* [] */0;\n  var _modExpr = modExpr;\n  while(true) {\n    var modExpr$1 = _modExpr;\n    var acc = _acc;\n    var match = modExpr$1.pmod_desc;\n    if (match.TAG !== /* Pmod_apply */3) {\n      return {\n              hd: modExpr$1,\n              tl: acc\n            };\n    }\n    _modExpr = match._0;\n    _acc = {\n      hd: match._1,\n      tl: acc\n    };\n    continue ;\n  };\n}\n\nfunction modExprFunctor(modExpr) {\n  var _acc = /* [] */0;\n  var _modExpr = modExpr;\n  while(true) {\n    var modExpr$1 = _modExpr;\n    var acc = _acc;\n    var match = modExpr$1.pmod_desc;\n    if (match.TAG !== /* Pmod_functor */2) {\n      return [\n              List.rev(acc),\n              modExpr$1\n            ];\n    }\n    var param_0 = modExpr$1.pmod_attributes;\n    var param_1 = match._0;\n    var param_2 = match._1;\n    var param = [\n      param_0,\n      param_1,\n      param_2\n    ];\n    _modExpr = match._2;\n    _acc = {\n      hd: param,\n      tl: acc\n    };\n    continue ;\n  };\n}\n\nfunction functorType(modtype) {\n  var _acc = /* [] */0;\n  var _modtype = modtype;\n  while(true) {\n    var modtype$1 = _modtype;\n    var acc = _acc;\n    var match = modtype$1.pmty_desc;\n    if (match.TAG !== /* Pmty_functor */2) {\n      return [\n              List.rev(acc),\n              modtype$1\n            ];\n    }\n    var arg_0 = modtype$1.pmty_attributes;\n    var arg_1 = match._0;\n    var arg_2 = match._1;\n    var arg = [\n      arg_0,\n      arg_1,\n      arg_2\n    ];\n    _modtype = match._2;\n    _acc = {\n      hd: arg,\n      tl: acc\n    };\n    continue ;\n  };\n}\n\nfunction funExpr(expr) {\n  var collectNewTypes = function (_acc, _returnExpr) {\n    while(true) {\n      var returnExpr = _returnExpr;\n      var acc = _acc;\n      var match = returnExpr.pexp_desc;\n      if (typeof match !== \"number\" && match.TAG === /* Pexp_newtype */31 && !returnExpr.pexp_attributes) {\n        _returnExpr = match._1;\n        _acc = {\n          hd: match._0,\n          tl: acc\n        };\n        continue ;\n      }\n      var match$1 = List.rev(acc);\n      var loc;\n      if (acc && match$1) {\n        var endLoc = match$1.hd;\n        var init = endLoc.loc;\n        loc = {\n          loc_start: init.loc_start,\n          loc_end: endLoc.loc.loc_end,\n          loc_ghost: init.loc_ghost\n        };\n      } else {\n        loc = $$Location.none;\n      }\n      var txt = List.fold_right((function (curr, acc) {\n              return acc + (\" \" + curr.txt);\n            }), acc, \"type\");\n      return [\n              $$Location.mkloc(txt, loc),\n              returnExpr\n            ];\n    };\n  };\n  var collect = function (attrsBefore, _acc, _expr) {\n    while(true) {\n      var expr = _expr;\n      var acc = _acc;\n      var match = expr.pexp_desc;\n      if (typeof match !== \"number\") {\n        switch (match.TAG | 0) {\n          case /* Pexp_fun */4 :\n              var lbl = match._0;\n              var exit = 0;\n              var attrs = expr.pexp_attributes;\n              var returnExpr = match._3;\n              var pattern = match._2;\n              var defaultExpr = match._1;\n              if (attrs) {\n                if (attrs.hd[0].txt === \"bs\" && !attrs.tl) {\n                  var parameter = [\n                    attrs,\n                    lbl,\n                    defaultExpr,\n                    pattern\n                  ];\n                  _expr = returnExpr;\n                  _acc = {\n                    hd: parameter,\n                    tl: acc\n                  };\n                  continue ;\n                }\n                exit = 2;\n              } else {\n                var parameter$1 = [\n                  /* [] */0,\n                  lbl,\n                  defaultExpr,\n                  pattern\n                ];\n                _expr = returnExpr;\n                _acc = {\n                  hd: parameter$1,\n                  tl: acc\n                };\n                continue ;\n              }\n              if (exit === 2 && typeof lbl !== \"number\") {\n                var parameter_0 = expr.pexp_attributes;\n                var parameter_2 = match._1;\n                var parameter_3 = match._2;\n                var parameter$2 = [\n                  parameter_0,\n                  lbl,\n                  parameter_2,\n                  parameter_3\n                ];\n                _expr = match._3;\n                _acc = {\n                  hd: parameter$2,\n                  tl: acc\n                };\n                continue ;\n              }\n              break;\n          case /* Pexp_newtype */31 :\n              var stringLoc = match._0;\n              var match$1 = collectNewTypes({\n                    hd: stringLoc,\n                    tl: /* [] */0\n                  }, match._1);\n              var parameter_0$1 = expr.pexp_attributes;\n              var parameter_3$1 = Ast_helper.Pat.$$var(stringLoc.loc, undefined, match$1[0]);\n              var parameter$3 = [\n                parameter_0$1,\n                /* Nolabel */0,\n                undefined,\n                parameter_3$1\n              ];\n              _expr = match$1[1];\n              _acc = {\n                hd: parameter$3,\n                tl: acc\n              };\n              continue ;\n          default:\n            \n        }\n      }\n      return [\n              attrsBefore,\n              List.rev(acc),\n              expr\n            ];\n    };\n  };\n  var match = expr.pexp_desc;\n  if (typeof match === \"number\" || !(match.TAG === /* Pexp_fun */4 && typeof match._0 === \"number\")) {\n    return collect(/* [] */0, /* [] */0, expr);\n  } else {\n    return collect(expr.pexp_attributes, /* [] */0, {\n                pexp_desc: expr.pexp_desc,\n                pexp_loc: expr.pexp_loc,\n                pexp_attributes: /* [] */0\n              });\n  }\n}\n\nfunction isBlockExpr(expr) {\n  var match = expr.pexp_desc;\n  if (typeof match === \"number\") {\n    return false;\n  }\n  switch (match.TAG | 0) {\n    case /* Pexp_apply */5 :\n    case /* Pexp_field */12 :\n    case /* Pexp_setfield */13 :\n    case /* Pexp_constraint */19 :\n        break;\n    case /* Pexp_let */2 :\n    case /* Pexp_sequence */16 :\n    case /* Pexp_letmodule */25 :\n    case /* Pexp_letexception */26 :\n    case /* Pexp_open */33 :\n        return true;\n    default:\n      return false;\n  }\n  if (isBlockExpr(match._0)) {\n    return true;\n  } else {\n    return false;\n  }\n}\n\nfunction isIfThenElseExpr(expr) {\n  var match = expr.pexp_desc;\n  if (typeof match === \"number\" || match.TAG !== /* Pexp_ifthenelse */15) {\n    return false;\n  } else {\n    return true;\n  }\n}\n\nfunction walkStructure(s, t, comments) {\n  if (comments === /* [] */0) {\n    return ;\n  } else if (s) {\n    return walkList(undefined, (function (n) {\n                  return n.pstr_loc;\n                }), walkStructureItem, s, t, comments);\n  } else {\n    return attach(t.inside, $$Location.none, comments);\n  }\n}\n\nfunction walkStructureItem(si, t, comments) {\n  var valueDescription = si.pstr_desc;\n  if (comments === /* [] */0) {\n    return ;\n  }\n  switch (valueDescription.TAG | 0) {\n    case /* Pstr_eval */0 :\n        return walkExpr(valueDescription._0, t, comments);\n    case /* Pstr_value */1 :\n        return walkValueBindings(valueDescription._1, t, comments);\n    case /* Pstr_primitive */2 :\n        return walkValueDescription(valueDescription._0, t, comments);\n    case /* Pstr_type */3 :\n        return walkTypeDeclarations(valueDescription._1, t, comments);\n    case /* Pstr_typext */4 :\n        return walkTypeExtension(valueDescription._0, t, comments);\n    case /* Pstr_exception */5 :\n        return walkExtConstr(valueDescription._0, t, comments);\n    case /* Pstr_module */6 :\n        return walkModuleBinding(valueDescription._0, t, comments);\n    case /* Pstr_recmodule */7 :\n        return walkList(undefined, (function (mb) {\n                      return mb.pmb_loc;\n                    }), walkModuleBinding, valueDescription._0, t, comments);\n    case /* Pstr_modtype */8 :\n        return walkModuleTypeDeclaration(valueDescription._0, t, comments);\n    case /* Pstr_open */9 :\n        return walkOpenDescription(valueDescription._0, t, comments);\n    case /* Pstr_class */10 :\n    case /* Pstr_class_type */11 :\n        return ;\n    case /* Pstr_include */12 :\n        return walkIncludeDeclaration(valueDescription._0, t, comments);\n    case /* Pstr_attribute */13 :\n        return walkAttribute(valueDescription._0, t, comments);\n    case /* Pstr_extension */14 :\n        return walkExtension(valueDescription._0, t, comments);\n    \n  }\n}\n\nfunction walkValueDescription(vd, t, comments) {\n  var match = partitionLeadingTrailing(comments, vd.pval_name.loc);\n  attach(t.leading, vd.pval_name.loc, match[0]);\n  var match$1 = partitionAdjacentTrailing(vd.pval_name.loc, match[1]);\n  attach(t.trailing, vd.pval_name.loc, match$1[0]);\n  var match$2 = partitionByLoc(match$1[1], vd.pval_type.ptyp_loc);\n  attach(t.leading, vd.pval_type.ptyp_loc, match$2[0]);\n  walkTypExpr(vd.pval_type, t, match$2[1]);\n  return attach(t.trailing, vd.pval_type.ptyp_loc, match$2[2]);\n}\n\nfunction walkTypeExtension(te, t, comments) {\n  var match = partitionLeadingTrailing(comments, te.ptyext_path.loc);\n  attach(t.leading, te.ptyext_path.loc, match[0]);\n  var match$1 = partitionAdjacentTrailing(te.ptyext_path.loc, match[1]);\n  var rest = match$1[1];\n  attach(t.trailing, te.ptyext_path.loc, match$1[0]);\n  var typeParams = te.ptyext_params;\n  var rest$1 = typeParams ? visitListButContinueWithRemainingComments(undefined, false, (function (param) {\n            return param[0].ptyp_loc;\n          }), walkTypeParam, typeParams, t, rest) : rest;\n  return walkList(undefined, (function (n) {\n                return n.pext_loc;\n              }), walkExtConstr, te.ptyext_constructors, t, rest$1);\n}\n\nfunction walkIncludeDeclaration(inclDecl, t, comments) {\n  var match = partitionByLoc(comments, inclDecl.pincl_mod.pmod_loc);\n  attach(t.leading, inclDecl.pincl_mod.pmod_loc, match[0]);\n  walkModExpr(inclDecl.pincl_mod, t, match[1]);\n  return attach(t.trailing, inclDecl.pincl_mod.pmod_loc, match[2]);\n}\n\nfunction walkModuleTypeDeclaration(mtd, t, comments) {\n  var match = partitionLeadingTrailing(comments, mtd.pmtd_name.loc);\n  var trailing = match[1];\n  attach(t.leading, mtd.pmtd_name.loc, match[0]);\n  var modType = mtd.pmtd_type;\n  if (modType === undefined) {\n    return attach(t.trailing, mtd.pmtd_name.loc, trailing);\n  }\n  var match$1 = partitionAdjacentTrailing(mtd.pmtd_name.loc, trailing);\n  attach(t.trailing, mtd.pmtd_name.loc, match$1[0]);\n  var match$2 = partitionByLoc(match$1[1], modType.pmty_loc);\n  attach(t.leading, modType.pmty_loc, match$2[0]);\n  walkModType(modType, t, match$2[1]);\n  return attach(t.trailing, modType.pmty_loc, match$2[2]);\n}\n\nfunction walkModuleBinding(mb, t, comments) {\n  var match = partitionLeadingTrailing(comments, mb.pmb_name.loc);\n  attach(t.leading, mb.pmb_name.loc, match[0]);\n  var match$1 = partitionAdjacentTrailing(mb.pmb_name.loc, match[1]);\n  attach(t.trailing, mb.pmb_name.loc, match$1[0]);\n  var match$2 = partitionByLoc(match$1[1], mb.pmb_expr.pmod_loc);\n  var inside = match$2[1];\n  var leading = match$2[0];\n  var match$3 = mb.pmb_expr.pmod_desc;\n  if (match$3.TAG === /* Pmod_constraint */4) {\n    walkModExpr(mb.pmb_expr, t, List.concat({\n              hd: leading,\n              tl: {\n                hd: inside,\n                tl: /* [] */0\n              }\n            }));\n  } else {\n    attach(t.leading, mb.pmb_expr.pmod_loc, leading);\n    walkModExpr(mb.pmb_expr, t, inside);\n  }\n  return attach(t.trailing, mb.pmb_expr.pmod_loc, match$2[2]);\n}\n\nfunction walkSignature(signature, t, comments) {\n  if (comments === /* [] */0) {\n    return ;\n  } else if (signature) {\n    return walkList(undefined, (function (n) {\n                  return n.psig_loc;\n                }), walkSignatureItem, signature, t, comments);\n  } else {\n    return attach(t.inside, $$Location.none, comments);\n  }\n}\n\nfunction walkSignatureItem(si, t, comments) {\n  var valueDescription = si.psig_desc;\n  if (comments === /* [] */0) {\n    return ;\n  }\n  switch (valueDescription.TAG | 0) {\n    case /* Psig_value */0 :\n        return walkValueDescription(valueDescription._0, t, comments);\n    case /* Psig_type */1 :\n        return walkTypeDeclarations(valueDescription._1, t, comments);\n    case /* Psig_typext */2 :\n        return walkTypeExtension(valueDescription._0, t, comments);\n    case /* Psig_exception */3 :\n        return walkExtConstr(valueDescription._0, t, comments);\n    case /* Psig_module */4 :\n        return walkModuleDeclaration(valueDescription._0, t, comments);\n    case /* Psig_recmodule */5 :\n        return walkList(undefined, (function (n) {\n                      return n.pmd_loc;\n                    }), walkModuleDeclaration, valueDescription._0, t, comments);\n    case /* Psig_modtype */6 :\n        return walkModuleTypeDeclaration(valueDescription._0, t, comments);\n    case /* Psig_open */7 :\n        return walkOpenDescription(valueDescription._0, t, comments);\n    case /* Psig_include */8 :\n        return walkIncludeDescription(valueDescription._0, t, comments);\n    case /* Psig_class */9 :\n    case /* Psig_class_type */10 :\n        return ;\n    case /* Psig_attribute */11 :\n        return walkAttribute(valueDescription._0, t, comments);\n    case /* Psig_extension */12 :\n        return walkExtension(valueDescription._0, t, comments);\n    \n  }\n}\n\nfunction walkIncludeDescription(id, t, comments) {\n  var match = partitionByLoc(comments, id.pincl_mod.pmty_loc);\n  attach(t.leading, id.pincl_mod.pmty_loc, match[0]);\n  walkModType(id.pincl_mod, t, match[1]);\n  return attach(t.trailing, id.pincl_mod.pmty_loc, match[2]);\n}\n\nfunction walkModuleDeclaration(md, t, comments) {\n  var match = partitionLeadingTrailing(comments, md.pmd_name.loc);\n  attach(t.leading, md.pmd_name.loc, match[0]);\n  var match$1 = partitionAdjacentTrailing(md.pmd_name.loc, match[1]);\n  attach(t.trailing, md.pmd_name.loc, match$1[0]);\n  var match$2 = partitionByLoc(match$1[1], md.pmd_type.pmty_loc);\n  attach(t.leading, md.pmd_type.pmty_loc, match$2[0]);\n  walkModType(md.pmd_type, t, match$2[1]);\n  return attach(t.trailing, md.pmd_type.pmty_loc, match$2[2]);\n}\n\nfunction walkList(_prevLoc, getLoc, walkNode, _l, t, _comments) {\n  while(true) {\n    var comments = _comments;\n    var l = _l;\n    var prevLoc = _prevLoc;\n    if (comments === /* [] */0) {\n      return ;\n    }\n    if (!l) {\n      if (prevLoc !== undefined) {\n        return attach(t.trailing, prevLoc, comments);\n      } else {\n        return ;\n      }\n    }\n    var node = l.hd;\n    var currLoc = Curry._1(getLoc, node);\n    var match = partitionByLoc(comments, currLoc);\n    var leading = match[0];\n    if (prevLoc !== undefined) {\n      if (prevLoc.loc_end.pos_lnum === currLoc.loc_start.pos_lnum) {\n        var match$1 = partitionAdjacentTrailing(prevLoc, leading);\n        attach(t.trailing, prevLoc, match$1[0]);\n        attach(t.leading, currLoc, match$1[1]);\n      } else {\n        var match$2 = partitionByOnSameLine(prevLoc, leading);\n        attach(t.trailing, prevLoc, match$2[0]);\n        var match$3 = partitionByLoc(match$2[1], currLoc);\n        attach(t.leading, currLoc, match$3[0]);\n      }\n    } else {\n      attach(t.leading, currLoc, leading);\n    }\n    Curry._3(walkNode, node, t, match[1]);\n    _comments = match[2];\n    _l = l.tl;\n    _prevLoc = currLoc;\n    continue ;\n  };\n}\n\nfunction visitListButContinueWithRemainingComments(_prevLoc, newlineDelimited, getLoc, walkNode, _l, t, _comments) {\n  while(true) {\n    var comments = _comments;\n    var l = _l;\n    var prevLoc = _prevLoc;\n    if (comments === /* [] */0) {\n      return /* [] */0;\n    }\n    if (l) {\n      var node = l.hd;\n      var currLoc = Curry._1(getLoc, node);\n      var match = partitionByLoc(comments, currLoc);\n      var leading = match[0];\n      if (prevLoc !== undefined) {\n        if (prevLoc.loc_end.pos_lnum === currLoc.loc_start.pos_lnum) {\n          var match$1 = partitionAdjacentTrailing(prevLoc, leading);\n          attach(t.trailing, prevLoc, match$1[0]);\n          attach(t.leading, currLoc, match$1[1]);\n        } else {\n          var match$2 = partitionByOnSameLine(prevLoc, leading);\n          attach(t.trailing, prevLoc, match$2[0]);\n          var match$3 = partitionByLoc(match$2[1], currLoc);\n          attach(t.leading, currLoc, match$3[0]);\n        }\n      } else {\n        attach(t.leading, currLoc, leading);\n      }\n      Curry._3(walkNode, node, t, match[1]);\n      _comments = match[2];\n      _l = l.tl;\n      _prevLoc = currLoc;\n      continue ;\n    }\n    if (prevLoc === undefined) {\n      return comments;\n    }\n    var match$4 = newlineDelimited ? partitionByOnSameLine(prevLoc, comments) : partitionAdjacentTrailing(prevLoc, comments);\n    attach(t.trailing, prevLoc, match$4[0]);\n    return match$4[1];\n  };\n}\n\nfunction walkValueBindings(vbs, t, comments) {\n  return walkList(undefined, (function (n) {\n                return n.pvb_loc;\n              }), walkValueBinding, vbs, t, comments);\n}\n\nfunction walkOpenDescription(openDescription, t, comments) {\n  var loc = openDescription.popen_lid.loc;\n  var match = partitionLeadingTrailing(comments, loc);\n  attach(t.leading, loc, match[0]);\n  return attach(t.trailing, loc, match[1]);\n}\n\nfunction walkTypeDeclarations(typeDeclarations, t, comments) {\n  return walkList(undefined, (function (n) {\n                return n.ptype_loc;\n              }), walkTypeDeclaration, typeDeclarations, t, comments);\n}\n\nfunction walkTypeParam(param, t, comments) {\n  return walkTypExpr(param[0], t, comments);\n}\n\nfunction walkTypeDeclaration(td, t, comments) {\n  var match = partitionLeadingTrailing(comments, td.ptype_name.loc);\n  attach(t.leading, td.ptype_name.loc, match[0]);\n  var match$1 = partitionAdjacentTrailing(td.ptype_name.loc, match[1]);\n  var rest = match$1[1];\n  attach(t.trailing, td.ptype_name.loc, match$1[0]);\n  var typeParams = td.ptype_params;\n  var rest$1 = typeParams ? visitListButContinueWithRemainingComments(undefined, false, (function (param) {\n            return param[0].ptyp_loc;\n          }), walkTypeParam, typeParams, t, rest) : rest;\n  var typexpr = td.ptype_manifest;\n  var rest$2;\n  if (typexpr !== undefined) {\n    var match$2 = partitionByLoc(rest$1, typexpr.ptyp_loc);\n    attach(t.leading, typexpr.ptyp_loc, match$2[0]);\n    walkTypExpr(typexpr, t, match$2[1]);\n    var match$3 = partitionAdjacentTrailing(typexpr.ptyp_loc, match$2[2]);\n    attach(t.trailing, typexpr.ptyp_loc, match$3[0]);\n    rest$2 = match$3[1];\n  } else {\n    rest$2 = rest$1;\n  }\n  var labelDeclarations = td.ptype_kind;\n  var rest$3;\n  if (typeof labelDeclarations === \"number\") {\n    rest$3 = rest$2;\n  } else if (labelDeclarations.TAG === /* Ptype_variant */0) {\n    rest$3 = walkConstructorDeclarations(labelDeclarations._0, t, rest$2);\n  } else {\n    walkList(undefined, (function (ld) {\n            return ld.pld_loc;\n          }), walkLabelDeclaration, labelDeclarations._0, t, rest$2);\n    rest$3 = /* [] */0;\n  }\n  return attach(t.trailing, td.ptype_loc, rest$3);\n}\n\nfunction walkLabelDeclarations(lds, t, comments) {\n  return visitListButContinueWithRemainingComments(undefined, false, (function (ld) {\n                return ld.pld_loc;\n              }), walkLabelDeclaration, lds, t, comments);\n}\n\nfunction walkLabelDeclaration(ld, t, comments) {\n  var match = partitionLeadingTrailing(comments, ld.pld_name.loc);\n  attach(t.leading, ld.pld_name.loc, match[0]);\n  var match$1 = partitionAdjacentTrailing(ld.pld_name.loc, match[1]);\n  attach(t.trailing, ld.pld_name.loc, match$1[0]);\n  var match$2 = partitionByLoc(match$1[1], ld.pld_type.ptyp_loc);\n  attach(t.leading, ld.pld_type.ptyp_loc, match$2[0]);\n  walkTypExpr(ld.pld_type, t, match$2[1]);\n  return attach(t.trailing, ld.pld_type.ptyp_loc, match$2[2]);\n}\n\nfunction walkConstructorDeclarations(cds, t, comments) {\n  return visitListButContinueWithRemainingComments(undefined, false, (function (cd) {\n                return cd.pcd_loc;\n              }), walkConstructorDeclaration, cds, t, comments);\n}\n\nfunction walkConstructorDeclaration(cd, t, comments) {\n  var match = partitionLeadingTrailing(comments, cd.pcd_name.loc);\n  attach(t.leading, cd.pcd_name.loc, match[0]);\n  var match$1 = partitionAdjacentTrailing(cd.pcd_name.loc, match[1]);\n  attach(t.trailing, cd.pcd_name.loc, match$1[0]);\n  var rest = walkConstructorArguments(cd.pcd_args, t, match$1[1]);\n  var typexpr = cd.pcd_res;\n  var rest$1;\n  if (typexpr !== undefined) {\n    var match$2 = partitionByLoc(rest, typexpr.ptyp_loc);\n    attach(t.leading, typexpr.ptyp_loc, match$2[0]);\n    walkTypExpr(typexpr, t, match$2[1]);\n    var match$3 = partitionAdjacentTrailing(typexpr.ptyp_loc, match$2[2]);\n    attach(t.trailing, typexpr.ptyp_loc, match$3[0]);\n    rest$1 = match$3[1];\n  } else {\n    rest$1 = rest;\n  }\n  return attach(t.trailing, cd.pcd_loc, rest$1);\n}\n\nfunction walkConstructorArguments(args, t, comments) {\n  if (args.TAG === /* Pcstr_tuple */0) {\n    return visitListButContinueWithRemainingComments(undefined, false, (function (n) {\n                  return n.ptyp_loc;\n                }), walkTypExpr, args._0, t, comments);\n  } else {\n    return walkLabelDeclarations(args._0, t, comments);\n  }\n}\n\nfunction walkValueBinding(vb, t, comments) {\n  var match = vb.pvb_pat;\n  var match$1 = vb.pvb_expr;\n  var match$2 = match.ppat_desc;\n  var vb$1;\n  if (typeof match$2 === \"number\" || match$2.TAG !== /* Ppat_constraint */10) {\n    vb$1 = vb;\n  } else {\n    var typ = match$2._1;\n    var match$3 = typ.ptyp_desc;\n    if (typeof match$3 === \"number\" || match$3.TAG !== /* Ptyp_poly */8) {\n      vb$1 = vb;\n    } else {\n      var pat = match$2._0;\n      if (match$3._0) {\n        var match$4 = match$1.pexp_desc;\n        var t$1 = match$3._1;\n        if (typeof match$4 === \"number\") {\n          vb$1 = vb;\n        } else {\n          switch (match$4.TAG | 0) {\n            case /* Pexp_fun */4 :\n                var init = vb.pvb_pat;\n                var init$1 = pat.ppat_loc;\n                vb$1 = {\n                  pvb_pat: {\n                    ppat_desc: init.ppat_desc,\n                    ppat_loc: {\n                      loc_start: init$1.loc_start,\n                      loc_end: t$1.ptyp_loc.loc_end,\n                      loc_ghost: init$1.loc_ghost\n                    },\n                    ppat_attributes: init.ppat_attributes\n                  },\n                  pvb_expr: vb.pvb_expr,\n                  pvb_attributes: vb.pvb_attributes,\n                  pvb_loc: vb.pvb_loc\n                };\n                break;\n            case /* Pexp_newtype */31 :\n                var match$5 = match$4._1.pexp_desc;\n                if (typeof match$5 === \"number\" || match$5.TAG !== /* Pexp_constraint */19) {\n                  vb$1 = vb;\n                } else {\n                  var init$2 = match.ppat_loc;\n                  vb$1 = {\n                    pvb_pat: {\n                      ppat_desc: {\n                        TAG: /* Ppat_constraint */10,\n                        _0: pat,\n                        _1: typ\n                      },\n                      ppat_loc: {\n                        loc_start: init$2.loc_start,\n                        loc_end: t$1.ptyp_loc.loc_end,\n                        loc_ghost: init$2.loc_ghost\n                      },\n                      ppat_attributes: match.ppat_attributes\n                    },\n                    pvb_expr: match$5._0,\n                    pvb_attributes: vb.pvb_attributes,\n                    pvb_loc: vb.pvb_loc\n                  };\n                }\n                break;\n            default:\n              vb$1 = vb;\n          }\n        }\n      } else {\n        var match$6 = match$1.pexp_desc;\n        if (typeof match$6 === \"number\" || match$6.TAG !== /* Pexp_constraint */19) {\n          vb$1 = vb;\n        } else {\n          var t$2 = match$3._1;\n          var init$3 = pat.ppat_loc;\n          vb$1 = {\n            pvb_pat: Ast_helper.Pat.constraint_({\n                  loc_start: init$3.loc_start,\n                  loc_end: t$2.ptyp_loc.loc_end,\n                  loc_ghost: init$3.loc_ghost\n                }, undefined, pat, t$2),\n            pvb_expr: match$6._0,\n            pvb_attributes: vb.pvb_attributes,\n            pvb_loc: vb.pvb_loc\n          };\n        }\n      }\n    }\n  }\n  var patternLoc = vb$1.pvb_pat.ppat_loc;\n  var exprLoc = vb$1.pvb_expr.pexp_loc;\n  var expr = vb$1.pvb_expr;\n  var match$7 = partitionByLoc(comments, patternLoc);\n  attach(t.leading, patternLoc, match$7[0]);\n  walkPattern(vb$1.pvb_pat, t, match$7[1]);\n  var match$8 = partitionAdjacentTrailing(patternLoc, match$7[2]);\n  attach(t.trailing, patternLoc, match$8[0]);\n  var match$9 = partitionByLoc(match$8[1], exprLoc);\n  var afterExpr = match$9[2];\n  var insideExpr = match$9[1];\n  var beforeExpr = match$9[0];\n  if (isBlockExpr(expr)) {\n    return walkExpr(expr, t, List.concat({\n                    hd: beforeExpr,\n                    tl: {\n                      hd: insideExpr,\n                      tl: {\n                        hd: afterExpr,\n                        tl: /* [] */0\n                      }\n                    }\n                  }));\n  } else {\n    attach(t.leading, exprLoc, beforeExpr);\n    walkExpr(expr, t, insideExpr);\n    return attach(t.trailing, exprLoc, afterExpr);\n  }\n}\n\nfunction walkExpr(_expr, t, _comments) {\n  while(true) {\n    var comments = _comments;\n    var expr = _expr;\n    var longident = expr.pexp_desc;\n    var exit = 0;\n    var exprs;\n    var expr$1;\n    var cases;\n    if (comments === /* [] */0) {\n      return ;\n    }\n    var exit$1 = 0;\n    if (typeof longident === \"number\") {\n      return ;\n    }\n    switch (longident.TAG | 0) {\n      case /* Pexp_ident */0 :\n          var longident$1 = longident._0;\n          var match = partitionLeadingTrailing(comments, longident$1.loc);\n          attach(t.leading, longident$1.loc, match[0]);\n          return attach(t.trailing, longident$1.loc, match[1]);\n      case /* Pexp_constant */1 :\n          var match$1 = partitionLeadingTrailing(comments, expr.pexp_loc);\n          attach(t.leading, expr.pexp_loc, match$1[0]);\n          return attach(t.trailing, expr.pexp_loc, match$1[1]);\n      case /* Pexp_let */2 :\n          var expr2 = longident._2;\n          var match$2 = expr2.pexp_desc;\n          var valueBindings = longident._1;\n          var exit$2 = 0;\n          if (typeof match$2 === \"number\" || match$2.TAG !== /* Pexp_construct */9) {\n            exit$2 = 6;\n          } else {\n            var match$3 = match$2._0.txt;\n            switch (match$3.TAG | 0) {\n              case /* Lident */0 :\n                  if (match$3._0 === \"()\") {\n                    if (match$2._1 === undefined) {\n                      return walkValueBindings(valueBindings, t, comments);\n                    }\n                    exit$2 = 6;\n                  } else {\n                    exit$2 = 6;\n                  }\n                  break;\n              case /* Ldot */1 :\n              case /* Lapply */2 :\n                  exit$2 = 6;\n                  break;\n              \n            }\n          }\n          if (exit$2 === 6) {\n            var comments$1 = visitListButContinueWithRemainingComments(undefined, true, (function (n) {\n                    if (n.pvb_pat.ppat_loc.loc_ghost) {\n                      return n.pvb_expr.pexp_loc;\n                    } else {\n                      return n.pvb_loc;\n                    }\n                  }), walkValueBinding, valueBindings, t, comments);\n            if (isBlockExpr(expr2)) {\n              _comments = comments$1;\n              _expr = expr2;\n              continue ;\n            }\n            var match$4 = partitionByLoc(comments$1, expr2.pexp_loc);\n            attach(t.leading, expr2.pexp_loc, match$4[0]);\n            walkExpr(expr2, t, match$4[1]);\n            return attach(t.trailing, expr2.pexp_loc, match$4[2]);\n          }\n          break;\n      case /* Pexp_apply */5 :\n          var callExpr = longident._0;\n          var match$5 = callExpr.pexp_desc;\n          var exit$3 = 0;\n          if (typeof match$5 === \"number\" || match$5.TAG !== /* Pexp_ident */0) {\n            exit$3 = 6;\n          } else {\n            var match$6 = match$5._0.txt;\n            switch (match$6.TAG | 0) {\n              case /* Lident */0 :\n                  var exit$4 = 0;\n                  switch (match$6._0) {\n                    case \"!=\" :\n                    case \"!==\" :\n                    case \"&&\" :\n                    case \"*\" :\n                    case \"**\" :\n                    case \"*.\" :\n                    case \"+\" :\n                    case \"++\" :\n                    case \"+.\" :\n                    case \"-\" :\n                    case \"-.\" :\n                    case \"/\" :\n                    case \"/.\" :\n                    case \":=\" :\n                    case \"<\" :\n                    case \"<=\" :\n                    case \"<>\" :\n                    case \"=\" :\n                    case \"==\" :\n                    case \">\" :\n                    case \">=\" :\n                    case \"^\" :\n                    case \"|.\" :\n                    case \"|>\" :\n                    case \"||\" :\n                        exit$4 = 8;\n                        break;\n                    case \"!\" :\n                    case \"not\" :\n                    case \"~+\" :\n                    case \"~+.\" :\n                    case \"~-\" :\n                    case \"~-.\" :\n                        exit$4 = 7;\n                        break;\n                    default:\n                      exit$3 = 6;\n                  }\n                  switch (exit$4) {\n                    case 7 :\n                        var match$7 = longident._1;\n                        if (match$7) {\n                          var match$8 = match$7.hd;\n                          if (typeof match$8[0] === \"number\" && !match$7.tl) {\n                            var argExpr = match$8[1];\n                            var match$9 = partitionByLoc(comments, argExpr.pexp_loc);\n                            attach(t.leading, argExpr.pexp_loc, match$9[0]);\n                            walkExpr(argExpr, t, match$9[1]);\n                            return attach(t.trailing, argExpr.pexp_loc, match$9[2]);\n                          }\n                          exit$3 = 6;\n                        } else {\n                          exit$3 = 6;\n                        }\n                        break;\n                    case 8 :\n                        var match$10 = longident._1;\n                        if (match$10) {\n                          var match$11 = match$10.hd;\n                          if (typeof match$11[0] === \"number\") {\n                            var match$12 = match$10.tl;\n                            if (match$12) {\n                              var match$13 = match$12.hd;\n                              if (typeof match$13[0] === \"number\" && !match$12.tl) {\n                                var operand2 = match$13[1];\n                                var operand1 = match$11[1];\n                                var match$14 = partitionByLoc(comments, operand1.pexp_loc);\n                                attach(t.leading, operand1.pexp_loc, match$14[0]);\n                                walkExpr(operand1, t, match$14[1]);\n                                var match$15 = partitionAdjacentTrailing(operand1.pexp_loc, match$14[2]);\n                                attach(t.trailing, operand1.pexp_loc, match$15[0]);\n                                var match$16 = partitionByLoc(match$15[1], operand2.pexp_loc);\n                                attach(t.leading, operand2.pexp_loc, match$16[0]);\n                                walkExpr(operand2, t, match$16[1]);\n                                return attach(t.trailing, operand2.pexp_loc, match$16[2]);\n                              }\n                              exit$3 = 6;\n                            } else {\n                              exit$3 = 6;\n                            }\n                          } else {\n                            exit$3 = 6;\n                          }\n                        } else {\n                          exit$3 = 6;\n                        }\n                        break;\n                    \n                  }\n                  break;\n              case /* Ldot */1 :\n              case /* Lapply */2 :\n                  exit$3 = 6;\n                  break;\n              \n            }\n          }\n          if (exit$3 === 6) {\n            var match$17 = partitionByLoc(comments, callExpr.pexp_loc);\n            var after = match$17[2];\n            var inside = match$17[1];\n            var before = match$17[0];\n            var after$1;\n            if (isBlockExpr(callExpr)) {\n              var match$18 = partitionAdjacentTrailing(callExpr.pexp_loc, after);\n              walkExpr(callExpr, t, List.concat({\n                        hd: before,\n                        tl: {\n                          hd: inside,\n                          tl: {\n                            hd: match$18[0],\n                            tl: /* [] */0\n                          }\n                        }\n                      }));\n              after$1 = match$18[1];\n            } else {\n              attach(t.leading, callExpr.pexp_loc, before);\n              walkExpr(callExpr, t, inside);\n              after$1 = after;\n            }\n            var match$19 = partitionAdjacentTrailing(callExpr.pexp_loc, after$1);\n            attach(t.trailing, callExpr.pexp_loc, match$19[0]);\n            return walkList(undefined, (function (param) {\n                          var expr = param[1];\n                          var match = expr.pexp_attributes;\n                          if (!match) {\n                            return expr.pexp_loc;\n                          }\n                          var match$1 = match.hd[0];\n                          if (match$1.txt !== \"ns.namedArgLoc\") {\n                            return expr.pexp_loc;\n                          }\n                          var loc = match$1.loc;\n                          return {\n                                  loc_start: loc.loc_start,\n                                  loc_end: expr.pexp_loc.loc_end,\n                                  loc_ghost: loc.loc_ghost\n                                };\n                        }), walkExprArgument, longident._1, t, match$19[1]);\n          }\n          break;\n      case /* Pexp_match */6 :\n          var cases$1 = longident._1;\n          var expr1 = longident._0;\n          if (cases$1) {\n            var match$20 = cases$1.tl;\n            if (match$20 && !match$20.tl) {\n              var elseBranch = match$20.hd;\n              var $$case = cases$1.hd;\n              if (Res_parsetree_viewer.hasIfLetAttribute(expr.pexp_attributes)) {\n                var match$21 = partitionByLoc(comments, $$case.pc_lhs.ppat_loc);\n                attach(t.leading, $$case.pc_lhs.ppat_loc, match$21[0]);\n                walkPattern($$case.pc_lhs, t, match$21[1]);\n                var match$22 = partitionAdjacentTrailing($$case.pc_lhs.ppat_loc, match$21[2]);\n                attach(t.trailing, $$case.pc_lhs.ppat_loc, match$22[0]);\n                var match$23 = partitionByLoc(match$22[1], expr1.pexp_loc);\n                attach(t.leading, expr1.pexp_loc, match$23[0]);\n                walkExpr(expr1, t, match$23[1]);\n                var match$24 = partitionAdjacentTrailing(expr1.pexp_loc, match$23[2]);\n                attach(t.trailing, expr1.pexp_loc, match$24[0]);\n                var match$25 = partitionByLoc(match$24[1], $$case.pc_rhs.pexp_loc);\n                var after$2 = match$25[2];\n                var inside$1 = match$25[1];\n                var before$1 = match$25[0];\n                var after$3;\n                if (isBlockExpr($$case.pc_rhs)) {\n                  var match$26 = partitionAdjacentTrailing($$case.pc_rhs.pexp_loc, after$2);\n                  walkExpr($$case.pc_rhs, t, List.concat({\n                            hd: before$1,\n                            tl: {\n                              hd: inside$1,\n                              tl: {\n                                hd: match$26[0],\n                                tl: /* [] */0\n                              }\n                            }\n                          }));\n                  after$3 = match$26[1];\n                } else {\n                  attach(t.leading, $$case.pc_rhs.pexp_loc, before$1);\n                  walkExpr($$case.pc_rhs, t, inside$1);\n                  after$3 = after$2;\n                }\n                var match$27 = partitionAdjacentTrailing($$case.pc_rhs.pexp_loc, after$3);\n                attach(t.trailing, $$case.pc_rhs.pexp_loc, match$27[0]);\n                var match$28 = partitionByLoc(match$27[1], elseBranch.pc_rhs.pexp_loc);\n                var after$4 = match$28[2];\n                var inside$2 = match$28[1];\n                var before$2 = match$28[0];\n                var after$5;\n                if (isBlockExpr(elseBranch.pc_rhs)) {\n                  var match$29 = partitionAdjacentTrailing(elseBranch.pc_rhs.pexp_loc, after$4);\n                  walkExpr(elseBranch.pc_rhs, t, List.concat({\n                            hd: before$2,\n                            tl: {\n                              hd: inside$2,\n                              tl: {\n                                hd: match$29[0],\n                                tl: /* [] */0\n                              }\n                            }\n                          }));\n                  after$5 = match$29[1];\n                } else {\n                  attach(t.leading, elseBranch.pc_rhs.pexp_loc, before$2);\n                  walkExpr(elseBranch.pc_rhs, t, inside$2);\n                  after$5 = after$4;\n                }\n                return attach(t.trailing, elseBranch.pc_rhs.pexp_loc, after$5);\n              }\n              expr$1 = expr1;\n              cases = cases$1;\n              exit = 3;\n            } else {\n              expr$1 = expr1;\n              cases = cases$1;\n              exit = 3;\n            }\n          } else {\n            expr$1 = expr1;\n            cases = cases$1;\n            exit = 3;\n          }\n          break;\n      case /* Pexp_try */7 :\n          expr$1 = longident._0;\n          cases = longident._1;\n          exit = 3;\n          break;\n      case /* Pexp_tuple */8 :\n          var exprs$1 = longident._0;\n          if (exprs$1) {\n            exprs = exprs$1;\n            exit = 2;\n          } else {\n            exit = 1;\n          }\n          break;\n      case /* Pexp_construct */9 :\n          var longident$2 = longident._0;\n          var match$30 = longident$2.txt;\n          var exit$5 = 0;\n          switch (match$30.TAG | 0) {\n            case /* Lident */0 :\n                switch (match$30._0) {\n                  case \"::\" :\n                      return walkList(undefined, (function (n) {\n                                    return n.pexp_loc;\n                                  }), walkExpr, collectListExprs(/* [] */0, expr), t, comments);\n                  case \"[]\" :\n                      exit = 1;\n                      break;\n                  default:\n                    exit$5 = 6;\n                }\n                break;\n            case /* Ldot */1 :\n            case /* Lapply */2 :\n                exit$5 = 6;\n                break;\n            \n          }\n          if (exit$5 === 6) {\n            var args = longident._1;\n            var match$31 = partitionLeadingTrailing(comments, longident$2.loc);\n            var trailing = match$31[1];\n            attach(t.leading, longident$2.loc, match$31[0]);\n            if (args === undefined) {\n              return attach(t.trailing, longident$2.loc, trailing);\n            }\n            var match$32 = partitionAdjacentTrailing(longident$2.loc, trailing);\n            attach(t.trailing, longident$2.loc, match$32[0]);\n            _comments = match$32[1];\n            _expr = args;\n            continue ;\n          }\n          break;\n      case /* Pexp_variant */10 :\n          var expr$2 = longident._1;\n          if (expr$2 === undefined) {\n            return ;\n          }\n          _expr = expr$2;\n          continue ;\n      case /* Pexp_record */11 :\n          var spreadExpr = longident._1;\n          var comments$2;\n          if (spreadExpr !== undefined) {\n            var match$33 = partitionByLoc(comments, spreadExpr.pexp_loc);\n            attach(t.leading, spreadExpr.pexp_loc, match$33[0]);\n            walkExpr(spreadExpr, t, match$33[1]);\n            var match$34 = partitionAdjacentTrailing(spreadExpr.pexp_loc, match$33[2]);\n            attach(t.trailing, spreadExpr.pexp_loc, match$34[0]);\n            comments$2 = match$34[1];\n          } else {\n            comments$2 = comments;\n          }\n          return walkList(undefined, (function (param) {\n                        var init = param[0].loc;\n                        return {\n                                loc_start: init.loc_start,\n                                loc_end: param[1].pexp_loc.loc_end,\n                                loc_ghost: init.loc_ghost\n                              };\n                      }), walkExprRecordRow, longident._0, t, comments$2);\n      case /* Pexp_field */12 :\n          var longident$3 = longident._1;\n          var expr$3 = longident._0;\n          var match$35 = partitionByLoc(comments, expr$3.pexp_loc);\n          var trailing$1 = match$35[2];\n          var inside$3 = match$35[1];\n          var leading = match$35[0];\n          var trailing$2;\n          if (isBlockExpr(expr$3)) {\n            var match$36 = partitionAdjacentTrailing(expr$3.pexp_loc, trailing$1);\n            walkExpr(expr$3, t, List.concat({\n                      hd: leading,\n                      tl: {\n                        hd: inside$3,\n                        tl: {\n                          hd: match$36[0],\n                          tl: /* [] */0\n                        }\n                      }\n                    }));\n            trailing$2 = match$36[1];\n          } else {\n            attach(t.leading, expr$3.pexp_loc, leading);\n            walkExpr(expr$3, t, inside$3);\n            trailing$2 = trailing$1;\n          }\n          var match$37 = partitionAdjacentTrailing(expr$3.pexp_loc, trailing$2);\n          attach(t.trailing, expr$3.pexp_loc, match$37[0]);\n          var match$38 = partitionLeadingTrailing(match$37[1], longident$3.loc);\n          attach(t.leading, longident$3.loc, match$38[0]);\n          return attach(t.trailing, longident$3.loc, match$38[1]);\n      case /* Pexp_setfield */13 :\n          var expr2$1 = longident._2;\n          var longident$4 = longident._1;\n          var expr1$1 = longident._0;\n          var match$39 = partitionByLoc(comments, expr1$1.pexp_loc);\n          var trailing$3 = match$39[2];\n          var inside$4 = match$39[1];\n          var leading$1 = match$39[0];\n          var rest;\n          if (isBlockExpr(expr1$1)) {\n            var match$40 = partitionAdjacentTrailing(expr1$1.pexp_loc, trailing$3);\n            walkExpr(expr1$1, t, List.concat({\n                      hd: leading$1,\n                      tl: {\n                        hd: inside$4,\n                        tl: {\n                          hd: match$40[0],\n                          tl: /* [] */0\n                        }\n                      }\n                    }));\n            rest = match$40[1];\n          } else {\n            var match$41 = partitionAdjacentTrailing(expr1$1.pexp_loc, trailing$3);\n            attach(t.leading, expr1$1.pexp_loc, leading$1);\n            walkExpr(expr1$1, t, inside$4);\n            attach(t.trailing, expr1$1.pexp_loc, match$41[0]);\n            rest = match$41[1];\n          }\n          var match$42 = partitionLeadingTrailing(rest, longident$4.loc);\n          attach(t.leading, longident$4.loc, match$42[0]);\n          var match$43 = partitionAdjacentTrailing(longident$4.loc, match$42[1]);\n          var rest$1 = match$43[1];\n          attach(t.trailing, longident$4.loc, match$43[0]);\n          if (isBlockExpr(expr2$1)) {\n            _comments = rest$1;\n            _expr = expr2$1;\n            continue ;\n          }\n          var match$44 = partitionByLoc(rest$1, expr2$1.pexp_loc);\n          attach(t.leading, expr2$1.pexp_loc, match$44[0]);\n          walkExpr(expr2$1, t, match$44[1]);\n          return attach(t.trailing, expr2$1.pexp_loc, match$44[2]);\n      case /* Pexp_array */14 :\n          var exprs$2 = longident._0;\n          if (exprs$2) {\n            exprs = exprs$2;\n            exit = 2;\n          } else {\n            exit = 1;\n          }\n          break;\n      case /* Pexp_ifthenelse */15 :\n          var elseExpr = longident._2;\n          var thenExpr = longident._1;\n          var ifExpr = longident._0;\n          var match$45 = partitionByLoc(comments, ifExpr.pexp_loc);\n          var trailing$4 = match$45[2];\n          var inside$5 = match$45[1];\n          var leading$2 = match$45[0];\n          var comments$3;\n          if (isBlockExpr(ifExpr)) {\n            var match$46 = partitionAdjacentTrailing(ifExpr.pexp_loc, trailing$4);\n            walkExpr(ifExpr, t, List.concat({\n                      hd: leading$2,\n                      tl: {\n                        hd: inside$5,\n                        tl: {\n                          hd: match$46[0],\n                          tl: /* [] */0\n                        }\n                      }\n                    }));\n            comments$3 = match$46[1];\n          } else {\n            attach(t.leading, ifExpr.pexp_loc, leading$2);\n            walkExpr(ifExpr, t, inside$5);\n            var match$47 = partitionAdjacentTrailing(ifExpr.pexp_loc, trailing$4);\n            attach(t.trailing, ifExpr.pexp_loc, match$47[0]);\n            comments$3 = match$47[1];\n          }\n          var match$48 = partitionByLoc(comments$3, thenExpr.pexp_loc);\n          var trailing$5 = match$48[2];\n          var inside$6 = match$48[1];\n          var leading$3 = match$48[0];\n          var comments$4;\n          if (isBlockExpr(thenExpr)) {\n            var match$49 = partitionAdjacentTrailing(thenExpr.pexp_loc, trailing$5);\n            walkExpr(thenExpr, t, List.concat({\n                      hd: leading$3,\n                      tl: {\n                        hd: inside$6,\n                        tl: {\n                          hd: match$49[0],\n                          tl: /* [] */0\n                        }\n                      }\n                    }));\n            comments$4 = match$49[1];\n          } else {\n            attach(t.leading, thenExpr.pexp_loc, leading$3);\n            walkExpr(thenExpr, t, inside$6);\n            var match$50 = partitionAdjacentTrailing(thenExpr.pexp_loc, trailing$5);\n            attach(t.trailing, thenExpr.pexp_loc, match$50[0]);\n            comments$4 = match$50[1];\n          }\n          if (elseExpr === undefined) {\n            return ;\n          }\n          if (isBlockExpr(elseExpr) || isIfThenElseExpr(elseExpr)) {\n            _comments = comments$4;\n            _expr = elseExpr;\n            continue ;\n          }\n          var match$51 = partitionByLoc(comments$4, elseExpr.pexp_loc);\n          attach(t.leading, elseExpr.pexp_loc, match$51[0]);\n          walkExpr(elseExpr, t, match$51[1]);\n          return attach(t.trailing, elseExpr.pexp_loc, match$51[2]);\n      case /* Pexp_sequence */16 :\n          var expr2$2 = longident._1;\n          var expr1$2 = longident._0;\n          var match$52 = partitionByLoc(comments, expr1$2.pexp_loc);\n          var trailing$6 = match$52[2];\n          var inside$7 = match$52[1];\n          var leading$4 = match$52[0];\n          var comments$5;\n          if (isBlockExpr(expr1$2)) {\n            var match$53 = partitionByOnSameLine(expr1$2.pexp_loc, trailing$6);\n            walkExpr(expr1$2, t, List.concat({\n                      hd: leading$4,\n                      tl: {\n                        hd: inside$7,\n                        tl: {\n                          hd: match$53[0],\n                          tl: /* [] */0\n                        }\n                      }\n                    }));\n            comments$5 = match$53[1];\n          } else {\n            attach(t.leading, expr1$2.pexp_loc, leading$4);\n            walkExpr(expr1$2, t, inside$7);\n            var match$54 = partitionByOnSameLine(expr1$2.pexp_loc, trailing$6);\n            attach(t.trailing, expr1$2.pexp_loc, match$54[0]);\n            comments$5 = match$54[1];\n          }\n          if (isBlockExpr(expr2$2)) {\n            _comments = comments$5;\n            _expr = expr2$2;\n            continue ;\n          }\n          var match$55 = partitionByLoc(comments$5, expr2$2.pexp_loc);\n          attach(t.leading, expr2$2.pexp_loc, match$55[0]);\n          walkExpr(expr2$2, t, match$55[1]);\n          return attach(t.trailing, expr2$2.pexp_loc, match$55[2]);\n      case /* Pexp_while */17 :\n          var expr2$3 = longident._1;\n          var expr1$3 = longident._0;\n          var match$56 = partitionByLoc(comments, expr1$3.pexp_loc);\n          var trailing$7 = match$56[2];\n          var inside$8 = match$56[1];\n          var leading$5 = match$56[0];\n          var rest$2;\n          if (isBlockExpr(expr1$3)) {\n            var match$57 = partitionAdjacentTrailing(expr1$3.pexp_loc, trailing$7);\n            walkExpr(expr1$3, t, List.concat({\n                      hd: leading$5,\n                      tl: {\n                        hd: inside$8,\n                        tl: {\n                          hd: match$57[0],\n                          tl: /* [] */0\n                        }\n                      }\n                    }));\n            rest$2 = match$57[1];\n          } else {\n            attach(t.leading, expr1$3.pexp_loc, leading$5);\n            walkExpr(expr1$3, t, inside$8);\n            var match$58 = partitionAdjacentTrailing(expr1$3.pexp_loc, trailing$7);\n            attach(t.trailing, expr1$3.pexp_loc, match$58[0]);\n            rest$2 = match$58[1];\n          }\n          if (isBlockExpr(expr2$3)) {\n            _comments = rest$2;\n            _expr = expr2$3;\n            continue ;\n          }\n          var match$59 = partitionByLoc(rest$2, expr2$3.pexp_loc);\n          attach(t.leading, expr2$3.pexp_loc, match$59[0]);\n          walkExpr(expr2$3, t, match$59[1]);\n          return attach(t.trailing, expr2$3.pexp_loc, match$59[2]);\n      case /* Pexp_for */18 :\n          var expr3 = longident._4;\n          var expr2$4 = longident._2;\n          var expr1$4 = longident._1;\n          var pat = longident._0;\n          var match$60 = partitionByLoc(comments, pat.ppat_loc);\n          attach(t.leading, pat.ppat_loc, match$60[0]);\n          walkPattern(pat, t, match$60[1]);\n          var match$61 = partitionAdjacentTrailing(pat.ppat_loc, match$60[2]);\n          attach(t.trailing, pat.ppat_loc, match$61[0]);\n          var match$62 = partitionByLoc(match$61[1], expr1$4.pexp_loc);\n          attach(t.leading, expr1$4.pexp_loc, match$62[0]);\n          walkExpr(expr1$4, t, match$62[1]);\n          var match$63 = partitionAdjacentTrailing(expr1$4.pexp_loc, match$62[2]);\n          attach(t.trailing, expr1$4.pexp_loc, match$63[0]);\n          var match$64 = partitionByLoc(match$63[1], expr2$4.pexp_loc);\n          attach(t.leading, expr2$4.pexp_loc, match$64[0]);\n          walkExpr(expr2$4, t, match$64[1]);\n          var match$65 = partitionAdjacentTrailing(expr2$4.pexp_loc, match$64[2]);\n          var rest$3 = match$65[1];\n          attach(t.trailing, expr2$4.pexp_loc, match$65[0]);\n          if (isBlockExpr(expr3)) {\n            _comments = rest$3;\n            _expr = expr3;\n            continue ;\n          }\n          var match$66 = partitionByLoc(rest$3, expr3.pexp_loc);\n          attach(t.leading, expr3.pexp_loc, match$66[0]);\n          walkExpr(expr3, t, match$66[1]);\n          return attach(t.trailing, expr3.pexp_loc, match$66[2]);\n      case /* Pexp_constraint */19 :\n          var typexpr = longident._1;\n          var expr$4 = longident._0;\n          var match$67 = partitionByLoc(comments, expr$4.pexp_loc);\n          attach(t.leading, expr$4.pexp_loc, match$67[0]);\n          walkExpr(expr$4, t, match$67[1]);\n          var match$68 = partitionAdjacentTrailing(expr$4.pexp_loc, match$67[2]);\n          attach(t.trailing, expr$4.pexp_loc, match$68[0]);\n          var match$69 = partitionByLoc(match$68[1], typexpr.ptyp_loc);\n          attach(t.leading, typexpr.ptyp_loc, match$69[0]);\n          walkTypExpr(typexpr, t, match$69[1]);\n          return attach(t.trailing, typexpr.ptyp_loc, match$69[2]);\n      case /* Pexp_coerce */20 :\n          var typexpr$1 = longident._2;\n          var optTypexpr = longident._1;\n          var expr$5 = longident._0;\n          var match$70 = partitionByLoc(comments, expr$5.pexp_loc);\n          attach(t.leading, expr$5.pexp_loc, match$70[0]);\n          walkExpr(expr$5, t, match$70[1]);\n          var match$71 = partitionAdjacentTrailing(expr$5.pexp_loc, match$70[2]);\n          attach(t.trailing, expr$5.pexp_loc, match$71[0]);\n          var rest$4;\n          if (optTypexpr !== undefined) {\n            var match$72 = partitionByLoc(comments, optTypexpr.ptyp_loc);\n            attach(t.leading, optTypexpr.ptyp_loc, match$72[0]);\n            walkTypExpr(optTypexpr, t, match$72[1]);\n            var match$73 = partitionAdjacentTrailing(optTypexpr.ptyp_loc, match$72[2]);\n            attach(t.trailing, optTypexpr.ptyp_loc, match$73[0]);\n            rest$4 = match$73[1];\n          } else {\n            rest$4 = match$71[1];\n          }\n          var match$74 = partitionByLoc(rest$4, typexpr$1.ptyp_loc);\n          attach(t.leading, typexpr$1.ptyp_loc, match$74[0]);\n          walkTypExpr(typexpr$1, t, match$74[1]);\n          return attach(t.trailing, typexpr$1.ptyp_loc, match$74[2]);\n      case /* Pexp_letmodule */25 :\n          var expr2$5 = longident._2;\n          var modExpr = longident._1;\n          var stringLoc = longident._0;\n          var match$75 = partitionLeadingTrailing(comments, expr.pexp_loc);\n          var init = expr.pexp_loc;\n          attach(t.leading, {\n                loc_start: init.loc_start,\n                loc_end: modExpr.pmod_loc.loc_end,\n                loc_ghost: init.loc_ghost\n              }, match$75[0]);\n          var match$76 = partitionLeadingTrailing(match$75[1], stringLoc.loc);\n          attach(t.leading, stringLoc.loc, match$76[0]);\n          var match$77 = partitionAdjacentTrailing(stringLoc.loc, match$76[1]);\n          attach(t.trailing, stringLoc.loc, match$77[0]);\n          var match$78 = partitionByLoc(match$77[1], modExpr.pmod_loc);\n          attach(t.leading, modExpr.pmod_loc, match$78[0]);\n          walkModExpr(modExpr, t, match$78[1]);\n          var match$79 = partitionByOnSameLine(modExpr.pmod_loc, match$78[2]);\n          var rest$5 = match$79[1];\n          attach(t.trailing, modExpr.pmod_loc, match$79[0]);\n          if (isBlockExpr(expr2$5)) {\n            _comments = rest$5;\n            _expr = expr2$5;\n            continue ;\n          }\n          var match$80 = partitionByLoc(rest$5, expr2$5.pexp_loc);\n          attach(t.leading, expr2$5.pexp_loc, match$80[0]);\n          walkExpr(expr2$5, t, match$80[1]);\n          return attach(t.trailing, expr2$5.pexp_loc, match$80[2]);\n      case /* Pexp_letexception */26 :\n          var expr2$6 = longident._1;\n          var extensionConstructor = longident._0;\n          var match$81 = partitionLeadingTrailing(comments, expr.pexp_loc);\n          var init$1 = expr.pexp_loc;\n          attach(t.leading, {\n                loc_start: init$1.loc_start,\n                loc_end: extensionConstructor.pext_loc.loc_end,\n                loc_ghost: init$1.loc_ghost\n              }, match$81[0]);\n          var match$82 = partitionByLoc(match$81[1], extensionConstructor.pext_loc);\n          attach(t.leading, extensionConstructor.pext_loc, match$82[0]);\n          walkExtConstr(extensionConstructor, t, match$82[1]);\n          var match$83 = partitionByOnSameLine(extensionConstructor.pext_loc, match$82[2]);\n          var rest$6 = match$83[1];\n          attach(t.trailing, extensionConstructor.pext_loc, match$83[0]);\n          if (isBlockExpr(expr2$6)) {\n            _comments = rest$6;\n            _expr = expr2$6;\n            continue ;\n          }\n          var match$84 = partitionByLoc(rest$6, expr2$6.pexp_loc);\n          attach(t.leading, expr2$6.pexp_loc, match$84[0]);\n          walkExpr(expr2$6, t, match$84[1]);\n          return attach(t.trailing, expr2$6.pexp_loc, match$84[2]);\n      case /* Pexp_assert */27 :\n      case /* Pexp_lazy */28 :\n          exit$1 = 5;\n          break;\n      case /* Pexp_fun */4 :\n      case /* Pexp_newtype */31 :\n          exit = 4;\n          break;\n      case /* Pexp_pack */32 :\n          var modExpr$1 = longident._0;\n          var match$85 = partitionByLoc(comments, modExpr$1.pmod_loc);\n          attach(t.leading, modExpr$1.pmod_loc, match$85[0]);\n          walkModExpr(modExpr$1, t, match$85[1]);\n          return attach(t.trailing, modExpr$1.pmod_loc, match$85[2]);\n      case /* Pexp_open */33 :\n          var expr2$7 = longident._2;\n          var longident$5 = longident._1;\n          var match$86 = partitionLeadingTrailing(comments, expr.pexp_loc);\n          var init$2 = expr.pexp_loc;\n          attach(t.leading, {\n                loc_start: init$2.loc_start,\n                loc_end: longident$5.loc.loc_end,\n                loc_ghost: init$2.loc_ghost\n              }, match$86[0]);\n          var match$87 = partitionLeadingTrailing(match$86[1], longident$5.loc);\n          attach(t.leading, longident$5.loc, match$87[0]);\n          var match$88 = partitionByOnSameLine(longident$5.loc, match$87[1]);\n          var rest$7 = match$88[1];\n          attach(t.trailing, longident$5.loc, match$88[0]);\n          if (isBlockExpr(expr2$7)) {\n            _comments = rest$7;\n            _expr = expr2$7;\n            continue ;\n          }\n          var match$89 = partitionByLoc(rest$7, expr2$7.pexp_loc);\n          attach(t.leading, expr2$7.pexp_loc, match$89[0]);\n          walkExpr(expr2$7, t, match$89[1]);\n          return attach(t.trailing, expr2$7.pexp_loc, match$89[2]);\n      case /* Pexp_extension */34 :\n          var extension = longident._0;\n          var exit$6 = 0;\n          switch (extension[0].txt) {\n            case \"bs.obj\" :\n            case \"obj\" :\n                exit$6 = 6;\n                break;\n            default:\n              return walkExtension(extension, t, comments);\n          }\n          if (exit$6 === 6) {\n            var match$90 = extension[1];\n            if (match$90.TAG !== /* PStr */0) {\n              return walkExtension(extension, t, comments);\n            }\n            var match$91 = match$90._0;\n            if (!match$91) {\n              return walkExtension(extension, t, comments);\n            }\n            var match$92 = match$91.hd.pstr_desc;\n            if (match$92.TAG !== /* Pstr_eval */0) {\n              return walkExtension(extension, t, comments);\n            }\n            var match$93 = match$92._0.pexp_desc;\n            if (typeof match$93 === \"number\" || !(match$93.TAG === /* Pexp_record */11 && !(match$92._1 || match$91.tl))) {\n              return walkExtension(extension, t, comments);\n            } else {\n              return walkList(undefined, (function (param) {\n                            var init = param[0].loc;\n                            return {\n                                    loc_start: init.loc_start,\n                                    loc_end: param[1].pexp_loc.loc_end,\n                                    loc_ghost: init.loc_ghost\n                                  };\n                          }), walkExprRecordRow, match$93._0, t, comments);\n            }\n          }\n          break;\n      default:\n        return ;\n    }\n    if (exit$1 === 5) {\n      var expr$6 = longident._0;\n      if (isBlockExpr(expr$6)) {\n        _expr = expr$6;\n        continue ;\n      }\n      var match$94 = partitionByLoc(comments, expr$6.pexp_loc);\n      attach(t.leading, expr$6.pexp_loc, match$94[0]);\n      walkExpr(expr$6, t, match$94[1]);\n      return attach(t.trailing, expr$6.pexp_loc, match$94[2]);\n    }\n    switch (exit) {\n      case 1 :\n          return attach(t.inside, expr.pexp_loc, comments);\n      case 2 :\n          return walkList(undefined, (function (n) {\n                        return n.pexp_loc;\n                      }), walkExpr, exprs, t, comments);\n      case 3 :\n          var match$95 = partitionByLoc(comments, expr$1.pexp_loc);\n          var after$6 = match$95[2];\n          var inside$9 = match$95[1];\n          var before$3 = match$95[0];\n          var after$7;\n          if (isBlockExpr(expr$1)) {\n            var match$96 = partitionAdjacentTrailing(expr$1.pexp_loc, after$6);\n            walkExpr(expr$1, t, List.concat({\n                      hd: before$3,\n                      tl: {\n                        hd: inside$9,\n                        tl: {\n                          hd: match$96[0],\n                          tl: /* [] */0\n                        }\n                      }\n                    }));\n            after$7 = match$96[1];\n          } else {\n            attach(t.leading, expr$1.pexp_loc, before$3);\n            walkExpr(expr$1, t, inside$9);\n            after$7 = after$6;\n          }\n          var match$97 = partitionAdjacentTrailing(expr$1.pexp_loc, after$7);\n          attach(t.trailing, expr$1.pexp_loc, match$97[0]);\n          return walkList(undefined, (function (n) {\n                        var init = n.pc_lhs.ppat_loc;\n                        return {\n                                loc_start: init.loc_start,\n                                loc_end: n.pc_rhs.pexp_loc.loc_end,\n                                loc_ghost: init.loc_ghost\n                              };\n                      }), walkCase, cases, t, match$97[1]);\n      case 4 :\n          var match$98 = funExpr(expr);\n          var returnExpr = match$98[2];\n          var comments$6 = visitListButContinueWithRemainingComments(undefined, false, (function (param) {\n                  var pattern = param[3];\n                  var exprOpt = param[2];\n                  var match = pattern.ppat_attributes;\n                  var startPos;\n                  if (match) {\n                    var match$1 = match.hd[0];\n                    startPos = match$1.txt === \"ns.namedArgLoc\" ? match$1.loc.loc_start : pattern.ppat_loc.loc_start;\n                  } else {\n                    startPos = pattern.ppat_loc.loc_start;\n                  }\n                  if (exprOpt !== undefined) {\n                    var init = pattern.ppat_loc;\n                    return {\n                            loc_start: startPos,\n                            loc_end: exprOpt.pexp_loc.loc_end,\n                            loc_ghost: init.loc_ghost\n                          };\n                  }\n                  var init$1 = pattern.ppat_loc;\n                  return {\n                          loc_start: startPos,\n                          loc_end: init$1.loc_end,\n                          loc_ghost: init$1.loc_ghost\n                        };\n                }), walkExprPararameter, match$98[1], t, comments);\n          var match$99 = returnExpr.pexp_desc;\n          var exit$7 = 0;\n          if (typeof match$99 === \"number\" || match$99.TAG !== /* Pexp_constraint */19) {\n            exit$7 = 5;\n          } else {\n            var typ = match$99._1;\n            var expr$7 = match$99._0;\n            if (expr$7.pexp_loc.loc_start.pos_cnum >= typ.ptyp_loc.loc_end.pos_cnum) {\n              var match$100 = partitionByLoc(comments$6, typ.ptyp_loc);\n              attach(t.leading, typ.ptyp_loc, match$100[0]);\n              walkTypExpr(typ, t, match$100[1]);\n              var match$101 = partitionAdjacentTrailing(typ.ptyp_loc, match$100[2]);\n              var comments$7 = match$101[1];\n              attach(t.trailing, typ.ptyp_loc, match$101[0]);\n              if (isBlockExpr(expr$7)) {\n                _comments = comments$7;\n                _expr = expr$7;\n                continue ;\n              }\n              var match$102 = partitionByLoc(comments$7, expr$7.pexp_loc);\n              attach(t.leading, expr$7.pexp_loc, match$102[0]);\n              walkExpr(expr$7, t, match$102[1]);\n              return attach(t.trailing, expr$7.pexp_loc, match$102[2]);\n            }\n            exit$7 = 5;\n          }\n          if (exit$7 === 5) {\n            if (isBlockExpr(returnExpr)) {\n              _comments = comments$6;\n              _expr = returnExpr;\n              continue ;\n            }\n            var match$103 = partitionByLoc(comments$6, returnExpr.pexp_loc);\n            attach(t.leading, returnExpr.pexp_loc, match$103[0]);\n            walkExpr(returnExpr, t, match$103[1]);\n            return attach(t.trailing, returnExpr.pexp_loc, match$103[2]);\n          }\n          break;\n      \n    }\n  };\n}\n\nfunction walkExprPararameter(param, t, comments) {\n  var pattern = param[3];\n  var exprOpt = param[2];\n  var match = partitionByLoc(comments, pattern.ppat_loc);\n  var trailing = match[2];\n  attach(t.leading, pattern.ppat_loc, match[0]);\n  walkPattern(pattern, t, match[1]);\n  if (exprOpt === undefined) {\n    return attach(t.trailing, pattern.ppat_loc, trailing);\n  }\n  var match$1 = partitionAdjacentTrailing(pattern.ppat_loc, trailing);\n  var rest = match$1[1];\n  attach(t.trailing, pattern.ppat_loc, trailing);\n  if (isBlockExpr(exprOpt)) {\n    return walkExpr(exprOpt, t, rest);\n  }\n  var match$2 = partitionByLoc(rest, exprOpt.pexp_loc);\n  attach(t.leading, exprOpt.pexp_loc, match$2[0]);\n  walkExpr(exprOpt, t, match$2[1]);\n  return attach(t.trailing, exprOpt.pexp_loc, match$2[2]);\n}\n\nfunction walkExprArgument(param, t, comments) {\n  var expr = param[1];\n  var match = expr.pexp_attributes;\n  if (match) {\n    var match$1 = match.hd[0];\n    if (match$1.txt === \"ns.namedArgLoc\") {\n      var loc = match$1.loc;\n      var match$2 = partitionLeadingTrailing(comments, loc);\n      attach(t.leading, loc, match$2[0]);\n      var match$3 = partitionAdjacentTrailing(loc, match$2[1]);\n      attach(t.trailing, loc, match$3[0]);\n      var match$4 = partitionByLoc(match$3[1], expr.pexp_loc);\n      attach(t.leading, expr.pexp_loc, match$4[0]);\n      walkExpr(expr, t, match$4[1]);\n      return attach(t.trailing, expr.pexp_loc, match$4[2]);\n    }\n    \n  }\n  var match$5 = partitionByLoc(comments, expr.pexp_loc);\n  attach(t.leading, expr.pexp_loc, match$5[0]);\n  walkExpr(expr, t, match$5[1]);\n  return attach(t.trailing, expr.pexp_loc, match$5[2]);\n}\n\nfunction walkCase($$case, t, comments) {\n  var match = partitionByLoc(comments, $$case.pc_lhs.ppat_loc);\n  walkPattern($$case.pc_lhs, t, List.concat({\n            hd: match[0],\n            tl: {\n              hd: match[1],\n              tl: /* [] */0\n            }\n          }));\n  var match$1 = partitionAdjacentTrailing($$case.pc_lhs.ppat_loc, match[2]);\n  var rest = match$1[1];\n  attach(t.trailing, $$case.pc_lhs.ppat_loc, match$1[0]);\n  var expr = $$case.pc_guard;\n  var comments$1;\n  if (expr !== undefined) {\n    var match$2 = partitionByLoc(rest, expr.pexp_loc);\n    var inside = match$2[1];\n    var before = match$2[0];\n    var match$3 = partitionAdjacentTrailing(expr.pexp_loc, match$2[2]);\n    var afterExpr = match$3[0];\n    if (isBlockExpr(expr)) {\n      walkExpr(expr, t, List.concat({\n                hd: before,\n                tl: {\n                  hd: inside,\n                  tl: {\n                    hd: afterExpr,\n                    tl: /* [] */0\n                  }\n                }\n              }));\n    } else {\n      attach(t.leading, expr.pexp_loc, before);\n      walkExpr(expr, t, inside);\n      attach(t.trailing, expr.pexp_loc, afterExpr);\n    }\n    comments$1 = match$3[1];\n  } else {\n    comments$1 = rest;\n  }\n  if (isBlockExpr($$case.pc_rhs)) {\n    return walkExpr($$case.pc_rhs, t, comments$1);\n  }\n  var match$4 = partitionByLoc(comments$1, $$case.pc_rhs.pexp_loc);\n  attach(t.leading, $$case.pc_rhs.pexp_loc, match$4[0]);\n  walkExpr($$case.pc_rhs, t, match$4[1]);\n  return attach(t.trailing, $$case.pc_rhs.pexp_loc, match$4[2]);\n}\n\nfunction walkExprRecordRow(param, t, comments) {\n  var expr = param[1];\n  var longident = param[0];\n  var match = partitionLeadingTrailing(comments, longident.loc);\n  attach(t.leading, longident.loc, match[0]);\n  var match$1 = partitionAdjacentTrailing(longident.loc, match[1]);\n  attach(t.trailing, longident.loc, match$1[0]);\n  var match$2 = partitionByLoc(match$1[1], expr.pexp_loc);\n  attach(t.leading, expr.pexp_loc, match$2[0]);\n  walkExpr(expr, t, match$2[1]);\n  return attach(t.trailing, expr.pexp_loc, match$2[2]);\n}\n\nfunction walkExtConstr(extConstr, t, comments) {\n  var match = partitionLeadingTrailing(comments, extConstr.pext_name.loc);\n  attach(t.leading, extConstr.pext_name.loc, match[0]);\n  var match$1 = partitionAdjacentTrailing(extConstr.pext_name.loc, match[1]);\n  attach(t.trailing, extConstr.pext_name.loc, match$1[0]);\n  return walkExtensionConstructorKind(extConstr.pext_kind, t, match$1[1]);\n}\n\nfunction walkExtensionConstructorKind(kind, t, comments) {\n  if (kind.TAG === /* Pext_decl */0) {\n    var maybeTypExpr = kind._1;\n    var rest = walkConstructorArguments(kind._0, t, comments);\n    if (maybeTypExpr === undefined) {\n      return ;\n    }\n    var match = partitionByLoc(rest, maybeTypExpr.ptyp_loc);\n    attach(t.leading, maybeTypExpr.ptyp_loc, match[0]);\n    walkTypExpr(maybeTypExpr, t, match[1]);\n    return attach(t.trailing, maybeTypExpr.ptyp_loc, match[2]);\n  }\n  var longident = kind._0;\n  var match$1 = partitionLeadingTrailing(comments, longident.loc);\n  attach(t.leading, longident.loc, match$1[0]);\n  return attach(t.trailing, longident.loc, match$1[1]);\n}\n\nfunction walkModExpr(modExpr, t, comments) {\n  var longident = modExpr.pmod_desc;\n  switch (longident.TAG | 0) {\n    case /* Pmod_ident */0 :\n        var longident$1 = longident._0;\n        var match = partitionLeadingTrailing(comments, longident$1.loc);\n        attach(t.leading, longident$1.loc, match[0]);\n        return attach(t.trailing, longident$1.loc, match[1]);\n    case /* Pmod_structure */1 :\n        var structure = longident._0;\n        if (structure) {\n          return walkStructure(structure, t, comments);\n        } else {\n          return attach(t.inside, modExpr.pmod_loc, comments);\n        }\n    case /* Pmod_functor */2 :\n        var match$1 = modExprFunctor(modExpr);\n        var returnModExpr = match$1[1];\n        var comments$1 = visitListButContinueWithRemainingComments(undefined, false, (function (param) {\n                var modTypeOption = param[2];\n                var lbl = param[1];\n                if (modTypeOption === undefined) {\n                  return lbl.loc;\n                }\n                var init = lbl.loc;\n                return {\n                        loc_start: init.loc_start,\n                        loc_end: modTypeOption.pmty_loc.loc_end,\n                        loc_ghost: init.loc_ghost\n                      };\n              }), walkModExprParameter, match$1[0], t, comments);\n        var match$2 = returnModExpr.pmod_desc;\n        if (match$2.TAG === /* Pmod_constraint */4) {\n          var modType = match$2._1;\n          var modExpr$1 = match$2._0;\n          if (modType.pmty_loc.loc_end.pos_cnum <= modExpr$1.pmod_loc.loc_start.pos_cnum) {\n            var match$3 = partitionByLoc(comments$1, modType.pmty_loc);\n            attach(t.leading, modType.pmty_loc, match$3[0]);\n            walkModType(modType, t, match$3[1]);\n            var match$4 = partitionAdjacentTrailing(modType.pmty_loc, match$3[2]);\n            attach(t.trailing, modType.pmty_loc, match$4[0]);\n            var match$5 = partitionByLoc(match$4[1], modExpr$1.pmod_loc);\n            attach(t.leading, modExpr$1.pmod_loc, match$5[0]);\n            walkModExpr(modExpr$1, t, match$5[1]);\n            return attach(t.trailing, modExpr$1.pmod_loc, match$5[2]);\n          }\n          \n        }\n        var match$6 = partitionByLoc(comments$1, returnModExpr.pmod_loc);\n        attach(t.leading, returnModExpr.pmod_loc, match$6[0]);\n        walkModExpr(returnModExpr, t, match$6[1]);\n        return attach(t.trailing, returnModExpr.pmod_loc, match$6[2]);\n    case /* Pmod_apply */3 :\n        var modExprs = modExprApply(modExpr);\n        return walkList(undefined, (function (n) {\n                      return n.pmod_loc;\n                    }), walkModExpr, modExprs, t, comments);\n    case /* Pmod_constraint */4 :\n        var modtype = longident._1;\n        var modexpr = longident._0;\n        if (Caml_obj.caml_greaterequal(modtype.pmty_loc.loc_start, modexpr.pmod_loc.loc_end)) {\n          var match$7 = partitionByLoc(comments, modexpr.pmod_loc);\n          attach(t.leading, modexpr.pmod_loc, match$7[0]);\n          walkModExpr(modexpr, t, match$7[1]);\n          var match$8 = partitionAdjacentTrailing(modexpr.pmod_loc, match$7[2]);\n          attach(t.trailing, modexpr.pmod_loc, match$8[0]);\n          var match$9 = partitionByLoc(match$8[1], modtype.pmty_loc);\n          attach(t.leading, modtype.pmty_loc, match$9[0]);\n          walkModType(modtype, t, match$9[1]);\n          return attach(t.trailing, modtype.pmty_loc, match$9[2]);\n        }\n        var match$10 = partitionByLoc(comments, modtype.pmty_loc);\n        attach(t.leading, modtype.pmty_loc, match$10[0]);\n        walkModType(modtype, t, match$10[1]);\n        var match$11 = partitionAdjacentTrailing(modtype.pmty_loc, match$10[2]);\n        attach(t.trailing, modtype.pmty_loc, match$11[0]);\n        var match$12 = partitionByLoc(match$11[1], modexpr.pmod_loc);\n        attach(t.leading, modexpr.pmod_loc, match$12[0]);\n        walkModExpr(modexpr, t, match$12[1]);\n        return attach(t.trailing, modexpr.pmod_loc, match$12[2]);\n    case /* Pmod_unpack */5 :\n        var expr = longident._0;\n        var match$13 = partitionByLoc(comments, expr.pexp_loc);\n        attach(t.leading, expr.pexp_loc, match$13[0]);\n        walkExpr(expr, t, match$13[1]);\n        return attach(t.trailing, expr.pexp_loc, match$13[2]);\n    case /* Pmod_extension */6 :\n        return walkExtension(longident._0, t, comments);\n    \n  }\n}\n\nfunction walkModExprParameter(parameter, t, comments) {\n  var modTypeOption = parameter[2];\n  var lbl = parameter[1];\n  var match = partitionLeadingTrailing(comments, lbl.loc);\n  var trailing = match[1];\n  attach(t.leading, lbl.loc, match[0]);\n  if (modTypeOption === undefined) {\n    return attach(t.trailing, lbl.loc, trailing);\n  }\n  var match$1 = partitionAdjacentTrailing(lbl.loc, trailing);\n  attach(t.trailing, lbl.loc, match$1[0]);\n  var match$2 = partitionByLoc(match$1[1], modTypeOption.pmty_loc);\n  attach(t.leading, modTypeOption.pmty_loc, match$2[0]);\n  walkModType(modTypeOption, t, match$2[1]);\n  return attach(t.trailing, modTypeOption.pmty_loc, match$2[2]);\n}\n\nfunction walkModType(modType, t, comments) {\n  var signature = modType.pmty_desc;\n  switch (signature.TAG | 0) {\n    case /* Pmty_signature */1 :\n        var signature$1 = signature._0;\n        if (signature$1) {\n          return walkSignature(signature$1, t, comments);\n        } else {\n          return attach(t.inside, modType.pmty_loc, comments);\n        }\n    case /* Pmty_functor */2 :\n        var match = functorType(modType);\n        var returnModType = match[1];\n        var comments$1 = visitListButContinueWithRemainingComments(undefined, false, (function (param) {\n                var modTypeOption = param[2];\n                var lbl = param[1];\n                if (modTypeOption === undefined) {\n                  return lbl.loc;\n                }\n                if (lbl.txt === \"_\") {\n                  return modTypeOption.pmty_loc;\n                }\n                var init = lbl.loc;\n                return {\n                        loc_start: init.loc_start,\n                        loc_end: modTypeOption.pmty_loc.loc_end,\n                        loc_ghost: init.loc_ghost\n                      };\n              }), walkModTypeParameter, match[0], t, comments);\n        var match$1 = partitionByLoc(comments$1, returnModType.pmty_loc);\n        attach(t.leading, returnModType.pmty_loc, match$1[0]);\n        walkModType(returnModType, t, match$1[1]);\n        return attach(t.trailing, returnModType.pmty_loc, match$1[2]);\n    case /* Pmty_with */3 :\n        var modType$1 = signature._0;\n        var match$2 = partitionByLoc(comments, modType$1.pmty_loc);\n        attach(t.leading, modType$1.pmty_loc, match$2[0]);\n        walkModType(modType$1, t, match$2[1]);\n        return attach(t.trailing, modType$1.pmty_loc, match$2[2]);\n    case /* Pmty_typeof */4 :\n        var modExpr = signature._0;\n        var match$3 = partitionByLoc(comments, modExpr.pmod_loc);\n        attach(t.leading, modExpr.pmod_loc, match$3[0]);\n        walkModExpr(modExpr, t, match$3[1]);\n        return attach(t.trailing, modExpr.pmod_loc, match$3[2]);\n    case /* Pmty_extension */5 :\n        return walkExtension(signature._0, t, comments);\n    case /* Pmty_ident */0 :\n    case /* Pmty_alias */6 :\n        break;\n    \n  }\n  var longident = signature._0;\n  var match$4 = partitionLeadingTrailing(comments, longident.loc);\n  attach(t.leading, longident.loc, match$4[0]);\n  return attach(t.trailing, longident.loc, match$4[1]);\n}\n\nfunction walkModTypeParameter(param, t, comments) {\n  var modTypeOption = param[2];\n  var lbl = param[1];\n  var match = partitionLeadingTrailing(comments, lbl.loc);\n  var trailing = match[1];\n  attach(t.leading, lbl.loc, match[0]);\n  if (modTypeOption === undefined) {\n    return attach(t.trailing, lbl.loc, trailing);\n  }\n  var match$1 = partitionAdjacentTrailing(lbl.loc, trailing);\n  attach(t.trailing, lbl.loc, match$1[0]);\n  var match$2 = partitionByLoc(match$1[1], modTypeOption.pmty_loc);\n  attach(t.leading, modTypeOption.pmty_loc, match$2[0]);\n  walkModType(modTypeOption, t, match$2[1]);\n  return attach(t.trailing, modTypeOption.pmty_loc, match$2[2]);\n}\n\nfunction walkPattern(_pat, t, comments) {\n  while(true) {\n    var pat = _pat;\n    var patterns = pat.ppat_desc;\n    if (comments === /* [] */0) {\n      return ;\n    }\n    var exit = 0;\n    if (typeof patterns === \"number\") {\n      return ;\n    }\n    switch (patterns.TAG | 0) {\n      case /* Ppat_alias */1 :\n          var alias = patterns._1;\n          var pat$1 = patterns._0;\n          var match = partitionByLoc(comments, pat$1.ppat_loc);\n          var leading = match[0];\n          attach(t.leading, pat$1.ppat_loc, leading);\n          walkPattern(pat$1, t, match[1]);\n          var match$1 = partitionAdjacentTrailing(pat$1.ppat_loc, match[2]);\n          attach(t.leading, pat$1.ppat_loc, leading);\n          attach(t.trailing, pat$1.ppat_loc, match$1[0]);\n          var match$2 = partitionLeadingTrailing(match$1[1], alias.loc);\n          attach(t.leading, alias.loc, match$2[0]);\n          return attach(t.trailing, alias.loc, match$2[1]);\n      case /* Ppat_tuple */4 :\n          var patterns$1 = patterns._0;\n          if (patterns$1) {\n            return walkList(undefined, (function (n) {\n                          return n.ppat_loc;\n                        }), walkPattern, patterns$1, t, comments);\n          }\n          break;\n      case /* Ppat_construct */5 :\n          var constr = patterns._0;\n          var match$3 = constr.txt;\n          var exit$1 = 0;\n          switch (match$3.TAG | 0) {\n            case /* Lident */0 :\n                switch (match$3._0) {\n                  case \"::\" :\n                      return walkList(undefined, (function (n) {\n                                    return n.ppat_loc;\n                                  }), walkPattern, collectListPatterns(/* [] */0, pat), t, comments);\n                  case \"()\" :\n                  case \"[]\" :\n                      break;\n                  default:\n                    exit$1 = 3;\n                }\n                break;\n            case /* Ldot */1 :\n            case /* Lapply */2 :\n                exit$1 = 3;\n                break;\n            \n          }\n          if (exit$1 === 3) {\n            var pat$2 = patterns._1;\n            if (pat$2 !== undefined) {\n              var match$4 = partitionLeadingTrailing(comments, constr.loc);\n              attach(t.leading, constr.loc, match$4[0]);\n              var match$5 = partitionAdjacentTrailing(constr.loc, match$4[1]);\n              attach(t.trailing, constr.loc, match$5[0]);\n              var match$6 = partitionByLoc(match$5[1], pat$2.ppat_loc);\n              attach(t.leading, pat$2.ppat_loc, match$6[0]);\n              walkPattern(pat$2, t, match$6[1]);\n              return attach(t.trailing, pat$2.ppat_loc, match$6[2]);\n            }\n            var match$7 = partitionLeadingTrailing(comments, constr.loc);\n            attach(t.leading, constr.loc, match$7[0]);\n            return attach(t.trailing, constr.loc, match$7[1]);\n          }\n          break;\n      case /* Ppat_variant */6 :\n          var pat$3 = patterns._1;\n          if (pat$3 === undefined) {\n            return ;\n          }\n          _pat = pat$3;\n          continue ;\n      case /* Ppat_record */7 :\n          return walkList(undefined, (function (param) {\n                        var init = param[0].loc;\n                        return {\n                                loc_start: init.loc_start,\n                                loc_end: param[1].ppat_loc.loc_end,\n                                loc_ghost: init.loc_ghost\n                              };\n                      }), walkPatternRecordRow, patterns._0, t, comments);\n      case /* Ppat_array */8 :\n          var patterns$2 = patterns._0;\n          if (patterns$2) {\n            return walkList(undefined, (function (n) {\n                          return n.ppat_loc;\n                        }), walkPattern, patterns$2, t, comments);\n          }\n          break;\n      case /* Ppat_or */9 :\n          return walkList(undefined, (function (pattern) {\n                        return pattern.ppat_loc;\n                      }), (function (pattern) {\n                        return function (param, param$1) {\n                          return walkPattern(pattern, param, param$1);\n                        };\n                      }), Res_parsetree_viewer.collectOrPatternChain(pat), t, comments);\n      case /* Ppat_constraint */10 :\n          var typ = patterns._1;\n          var pattern = patterns._0;\n          var match$8 = partitionByLoc(comments, pattern.ppat_loc);\n          attach(t.leading, pattern.ppat_loc, match$8[0]);\n          walkPattern(pattern, t, match$8[1]);\n          var match$9 = partitionAdjacentTrailing(pattern.ppat_loc, match$8[2]);\n          attach(t.trailing, pattern.ppat_loc, match$9[0]);\n          var match$10 = partitionByLoc(match$9[1], typ.ptyp_loc);\n          attach(t.leading, typ.ptyp_loc, match$10[0]);\n          walkTypExpr(typ, t, match$10[1]);\n          return attach(t.trailing, typ.ptyp_loc, match$10[2]);\n      case /* Ppat_type */11 :\n          return ;\n      case /* Ppat_unpack */13 :\n          var stringLoc = patterns._0;\n          var match$11 = partitionLeadingTrailing(comments, stringLoc.loc);\n          attach(t.leading, stringLoc.loc, match$11[0]);\n          return attach(t.trailing, stringLoc.loc, match$11[1]);\n      case /* Ppat_lazy */12 :\n      case /* Ppat_exception */14 :\n          exit = 2;\n          break;\n      case /* Ppat_extension */15 :\n          return walkExtension(patterns._0, t, comments);\n      default:\n        return ;\n    }\n    if (exit === 2) {\n      var pattern$1 = patterns._0;\n      var match$12 = partitionByLoc(comments, pattern$1.ppat_loc);\n      attach(t.leading, pattern$1.ppat_loc, match$12[0]);\n      walkPattern(pattern$1, t, match$12[1]);\n      return attach(t.trailing, pattern$1.ppat_loc, match$12[2]);\n    }\n    return attach(t.inside, pat.ppat_loc, comments);\n  };\n}\n\nfunction walkPatternRecordRow(row, t, comments) {\n  var longident = row[0];\n  var ident = longident.txt;\n  switch (ident.TAG | 0) {\n    case /* Lident */0 :\n        var match = row[1].ppat_desc;\n        if (typeof match !== \"number\" && match.TAG === /* Ppat_var */0 && ident._0 === match._0.txt) {\n          var longidentLoc = longident.loc;\n          var match$1 = partitionLeadingTrailing(comments, longidentLoc);\n          attach(t.leading, longidentLoc, match$1[0]);\n          return attach(t.trailing, longidentLoc, match$1[1]);\n        }\n        break;\n    case /* Ldot */1 :\n    case /* Lapply */2 :\n        break;\n    \n  }\n  var pattern = row[1];\n  var match$2 = partitionLeadingTrailing(comments, longident.loc);\n  attach(t.leading, longident.loc, match$2[0]);\n  var match$3 = partitionAdjacentTrailing(longident.loc, match$2[1]);\n  attach(t.trailing, longident.loc, match$3[0]);\n  var match$4 = partitionByLoc(match$3[1], pattern.ppat_loc);\n  attach(t.leading, pattern.ppat_loc, match$4[0]);\n  walkPattern(pattern, t, match$4[1]);\n  return attach(t.trailing, pattern.ppat_loc, match$4[2]);\n}\n\nfunction walkTypExpr(typ, t, comments) {\n  var typexprs = typ.ptyp_desc;\n  if (comments === /* [] */0) {\n    return ;\n  }\n  if (typeof typexprs === \"number\") {\n    return ;\n  }\n  switch (typexprs.TAG | 0) {\n    case /* Ptyp_arrow */1 :\n        var match = arrowType(typ);\n        var typexpr = match[2];\n        var comments$1 = walkTypeParameters(match[1], t, comments);\n        var match$1 = partitionByLoc(comments$1, typexpr.ptyp_loc);\n        attach(t.leading, typexpr.ptyp_loc, match$1[0]);\n        walkTypExpr(typexpr, t, match$1[1]);\n        return attach(t.trailing, typexpr.ptyp_loc, match$1[2]);\n    case /* Ptyp_tuple */2 :\n        return walkList(undefined, (function (n) {\n                      return n.ptyp_loc;\n                    }), walkTypExpr, typexprs._0, t, comments);\n    case /* Ptyp_constr */3 :\n        var longident = typexprs._0;\n        var match$2 = partitionLeadingTrailing(comments, longident.loc);\n        var match$3 = partitionAdjacentTrailing(longident.loc, comments);\n        attach(t.leading, longident.loc, match$2[0]);\n        attach(t.trailing, longident.loc, match$3[0]);\n        return walkList(undefined, (function (n) {\n                      return n.ptyp_loc;\n                    }), walkTypExpr, typexprs._1, t, match$3[1]);\n    case /* Ptyp_object */4 :\n        return walkTypObjectFields(typexprs._0, t, comments);\n    case /* Ptyp_alias */6 :\n        var typexpr$1 = typexprs._0;\n        var match$4 = partitionByLoc(comments, typexpr$1.ptyp_loc);\n        attach(t.leading, typexpr$1.ptyp_loc, match$4[0]);\n        walkTypExpr(typexpr$1, t, match$4[1]);\n        return attach(t.trailing, typexpr$1.ptyp_loc, match$4[2]);\n    case /* Ptyp_poly */8 :\n        var typexpr$2 = typexprs._1;\n        var comments$2 = visitListButContinueWithRemainingComments(undefined, false, (function (n) {\n                return n.loc;\n              }), (function (longident, t, comments) {\n                var match = partitionLeadingTrailing(comments, longident.loc);\n                attach(t.leading, longident.loc, match[0]);\n                return attach(t.trailing, longident.loc, match[1]);\n              }), typexprs._0, t, comments);\n        var match$5 = partitionByLoc(comments$2, typexpr$2.ptyp_loc);\n        attach(t.leading, typexpr$2.ptyp_loc, match$5[0]);\n        walkTypExpr(typexpr$2, t, match$5[1]);\n        return attach(t.trailing, typexpr$2.ptyp_loc, match$5[2]);\n    case /* Ptyp_package */9 :\n        return walkPackageType(typexprs._0, t, comments);\n    case /* Ptyp_extension */10 :\n        return walkExtension(typexprs._0, t, comments);\n    default:\n      return ;\n  }\n}\n\nfunction walkTypObjectFields(fields, t, comments) {\n  return walkList(undefined, (function (field) {\n                if (field.TAG !== /* Otag */0) {\n                  return $$Location.none;\n                }\n                var init = field._0.loc;\n                return {\n                        loc_start: init.loc_start,\n                        loc_end: field._2.ptyp_loc.loc_end,\n                        loc_ghost: init.loc_ghost\n                      };\n              }), walkTypObjectField, fields, t, comments);\n}\n\nfunction walkTypObjectField(field, t, comments) {\n  if (field.TAG !== /* Otag */0) {\n    return ;\n  }\n  var typexpr = field._2;\n  var lbl = field._0;\n  var match = partitionLeadingTrailing(comments, lbl.loc);\n  attach(t.leading, lbl.loc, match[0]);\n  var match$1 = partitionAdjacentTrailing(lbl.loc, match[1]);\n  attach(t.trailing, lbl.loc, match$1[0]);\n  var match$2 = partitionByLoc(match$1[1], typexpr.ptyp_loc);\n  attach(t.leading, typexpr.ptyp_loc, match$2[0]);\n  walkTypExpr(typexpr, t, match$2[1]);\n  return attach(t.trailing, typexpr.ptyp_loc, match$2[2]);\n}\n\nfunction walkTypeParameters(typeParameters, t, comments) {\n  return visitListButContinueWithRemainingComments(undefined, false, (function (param) {\n                var typexpr = param[2];\n                var match = typexpr.ptyp_attributes;\n                if (!match) {\n                  return typexpr.ptyp_loc;\n                }\n                var match$1 = match.hd[0];\n                if (match$1.txt !== \"ns.namedArgLoc\") {\n                  return typexpr.ptyp_loc;\n                }\n                var loc = match$1.loc;\n                return {\n                        loc_start: loc.loc_start,\n                        loc_end: typexpr.ptyp_loc.loc_end,\n                        loc_ghost: loc.loc_ghost\n                      };\n              }), walkTypeParameter, typeParameters, t, comments);\n}\n\nfunction walkTypeParameter(param, t, comments) {\n  var typexpr = param[2];\n  var match = partitionByLoc(comments, typexpr.ptyp_loc);\n  attach(t.leading, typexpr.ptyp_loc, match[0]);\n  walkTypExpr(typexpr, t, match[1]);\n  return attach(t.trailing, typexpr.ptyp_loc, match[2]);\n}\n\nfunction walkPackageType(packageType, t, comments) {\n  var longident = packageType[0];\n  var match = partitionLeadingTrailing(comments, longident.loc);\n  attach(t.leading, longident.loc, match[0]);\n  var match$1 = partitionAdjacentTrailing(longident.loc, match[1]);\n  attach(t.trailing, longident.loc, match$1[0]);\n  return walkPackageConstraints(packageType[1], t, match$1[1]);\n}\n\nfunction walkPackageConstraints(packageConstraints, t, comments) {\n  return walkList(undefined, (function (param) {\n                var init = param[0].loc;\n                return {\n                        loc_start: init.loc_start,\n                        loc_end: param[1].ptyp_loc.loc_end,\n                        loc_ghost: init.loc_ghost\n                      };\n              }), walkPackageConstraint, packageConstraints, t, comments);\n}\n\nfunction walkPackageConstraint(packageConstraint, t, comments) {\n  var typexpr = packageConstraint[1];\n  var longident = packageConstraint[0];\n  var match = partitionLeadingTrailing(comments, longident.loc);\n  attach(t.leading, longident.loc, match[0]);\n  var match$1 = partitionAdjacentTrailing(longident.loc, match[1]);\n  attach(t.trailing, longident.loc, match$1[0]);\n  var match$2 = partitionByLoc(match$1[1], typexpr.ptyp_loc);\n  attach(t.leading, typexpr.ptyp_loc, match$2[0]);\n  walkTypExpr(typexpr, t, match$2[1]);\n  return attach(t.trailing, typexpr.ptyp_loc, match$2[2]);\n}\n\nfunction walkExtension(extension, t, comments) {\n  var id = extension[0];\n  var match = partitionLeadingTrailing(comments, id.loc);\n  attach(t.leading, id.loc, match[0]);\n  var match$1 = partitionAdjacentTrailing(id.loc, match[1]);\n  attach(t.trailing, id.loc, match$1[0]);\n  return walkPayload(extension[1], t, match$1[1]);\n}\n\nfunction walkAttribute(param, t, comments) {\n  var id = param[0];\n  var match = partitionLeadingTrailing(comments, id.loc);\n  attach(t.leading, id.loc, match[0]);\n  var match$1 = partitionAdjacentTrailing(id.loc, match[1]);\n  attach(t.trailing, id.loc, match$1[0]);\n  return walkPayload(param[1], t, match$1[1]);\n}\n\nfunction walkPayload(payload, t, comments) {\n  if (payload.TAG === /* PStr */0) {\n    return walkStructure(payload._0, t, comments);\n  }\n  \n}\n\nvar $$Comment;\n\nvar Doc;\n\nexport {\n  $$Comment ,\n  Doc ,\n  make ,\n  copy ,\n  empty ,\n  log ,\n  attach ,\n  partitionByLoc ,\n  partitionLeadingTrailing ,\n  partitionByOnSameLine ,\n  partitionAdjacentTrailing ,\n  collectListPatterns ,\n  collectListExprs ,\n  arrowType ,\n  modExprApply ,\n  modExprFunctor ,\n  functorType ,\n  funExpr ,\n  isBlockExpr ,\n  isIfThenElseExpr ,\n  walkStructure ,\n  walkStructureItem ,\n  walkValueDescription ,\n  walkTypeExtension ,\n  walkIncludeDeclaration ,\n  walkModuleTypeDeclaration ,\n  walkModuleBinding ,\n  walkSignature ,\n  walkSignatureItem ,\n  walkIncludeDescription ,\n  walkModuleDeclaration ,\n  walkList ,\n  visitListButContinueWithRemainingComments ,\n  walkValueBindings ,\n  walkOpenDescription ,\n  walkTypeDeclarations ,\n  walkTypeParam ,\n  walkTypeDeclaration ,\n  walkLabelDeclarations ,\n  walkLabelDeclaration ,\n  walkConstructorDeclarations ,\n  walkConstructorDeclaration ,\n  walkConstructorArguments ,\n  walkValueBinding ,\n  walkExpr ,\n  walkExprPararameter ,\n  walkExprArgument ,\n  walkCase ,\n  walkExprRecordRow ,\n  walkExtConstr ,\n  walkExtensionConstructorKind ,\n  walkModExpr ,\n  walkModExprParameter ,\n  walkModType ,\n  walkModTypeParameter ,\n  walkPattern ,\n  walkPatternRecordRow ,\n  walkTypExpr ,\n  walkTypObjectFields ,\n  walkTypObjectField ,\n  walkTypeParameters ,\n  walkTypeParameter ,\n  walkPackageType ,\n  walkPackageConstraints ,\n  walkPackageConstraint ,\n  walkExtension ,\n  walkAttribute ,\n  walkPayload ,\n  \n}\n/* empty Not a pure module */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/res_comments_table.res",
    "content": "module Comment = Res_comment\nmodule Doc = Res_doc\n\ntype t = {\n  leading: Hashtbl.t<Location.t, list<Comment.t>>,\n  inside: Hashtbl.t<Location.t, list<Comment.t>>,\n  trailing: Hashtbl.t<Location.t, list<Comment.t>>,\n}\n\nlet make = () => {\n  leading: Hashtbl.create(100),\n  inside: Hashtbl.create(100),\n  trailing: Hashtbl.create(100),\n}\n\nlet copy = tbl => {\n  leading: Hashtbl.copy(tbl.leading),\n  inside: Hashtbl.copy(tbl.inside),\n  trailing: Hashtbl.copy(tbl.trailing),\n}\n\nlet empty = make()\n\n@live\nlet log = t => {\n  open Location\n  let leadingStuff = Hashtbl.fold((k: Location.t, v: list<Comment.t>, acc) => {\n    let loc = Doc.concat(list{\n      Doc.lbracket,\n      Doc.text(string_of_int(k.loc_start.pos_lnum)),\n      Doc.text(\":\"),\n      Doc.text(string_of_int(k.loc_start.pos_cnum - k.loc_start.pos_bol)),\n      Doc.text(\"-\"),\n      Doc.text(string_of_int(k.loc_end.pos_lnum)),\n      Doc.text(\":\"),\n      Doc.text(string_of_int(k.loc_end.pos_cnum - k.loc_end.pos_bol)),\n      Doc.rbracket,\n    })\n    let doc = Doc.breakableGroup(\n      ~forceBreak=true,\n      Doc.concat(list{\n        loc,\n        Doc.indent(\n          Doc.concat(list{\n            Doc.line,\n            Doc.join(~sep=Doc.comma, List.map(c => Doc.text(Comment.txt(c)), v)),\n          }),\n        ),\n        Doc.line,\n      }),\n    )\n    list{doc, ...acc}\n  }, t.leading, list{})\n\n  let trailingStuff = Hashtbl.fold((k: Location.t, v: list<Comment.t>, acc) => {\n    let loc = Doc.concat(list{\n      Doc.lbracket,\n      Doc.text(string_of_int(k.loc_start.pos_lnum)),\n      Doc.text(\":\"),\n      Doc.text(string_of_int(k.loc_start.pos_cnum - k.loc_start.pos_bol)),\n      Doc.text(\"-\"),\n      Doc.text(string_of_int(k.loc_end.pos_lnum)),\n      Doc.text(\":\"),\n      Doc.text(string_of_int(k.loc_end.pos_cnum - k.loc_end.pos_bol)),\n      Doc.rbracket,\n    })\n    let doc = Doc.breakableGroup(\n      ~forceBreak=true,\n      Doc.concat(list{\n        loc,\n        Doc.indent(\n          Doc.concat(list{\n            Doc.line,\n            Doc.join(\n              ~sep=Doc.concat(list{Doc.comma, Doc.line}),\n              List.map(c => Doc.text(Comment.txt(c)), v),\n            ),\n          }),\n        ),\n        Doc.line,\n      }),\n    )\n    list{doc, ...acc}\n  }, t.trailing, list{})\n\n  Doc.breakableGroup(\n    ~forceBreak=true,\n    Doc.concat(list{\n      Doc.text(\"leading comments:\"),\n      Doc.line,\n      Doc.indent(Doc.concat(leadingStuff)),\n      Doc.line,\n      Doc.line,\n      Doc.text(\"trailing comments:\"),\n      Doc.indent(Doc.concat(trailingStuff)),\n      Doc.line,\n      Doc.line,\n    }),\n  )\n  |> Doc.toString(~width=80)\n  |> print_endline\n}\n\nlet attach = (tbl, loc, comments) =>\n  switch comments {\n  | list{} => ()\n  | comments => Hashtbl.replace(tbl, loc, comments)\n  }\n\nlet partitionByLoc = (comments, loc) => {\n  let rec loop = ((leading, inside, trailing), comments) => {\n    open Location\n    switch comments {\n    | list{comment, ...rest} =>\n      let cmtLoc = Comment.loc(comment)\n      if cmtLoc.loc_end.pos_cnum <= loc.loc_start.pos_cnum {\n        loop((list{comment, ...leading}, inside, trailing), rest)\n      } else if cmtLoc.loc_start.pos_cnum >= loc.loc_end.pos_cnum {\n        loop((leading, inside, list{comment, ...trailing}), rest)\n      } else {\n        loop((leading, list{comment, ...inside}, trailing), rest)\n      }\n    | list{} => (List.rev(leading), List.rev(inside), List.rev(trailing))\n    }\n  }\n\n  loop((list{}, list{}, list{}), comments)\n}\n\nlet partitionLeadingTrailing = (comments, loc) => {\n  let rec loop = ((leading, trailing), comments) => {\n    open Location\n    switch comments {\n    | list{comment, ...rest} =>\n      let cmtLoc = Comment.loc(comment)\n      if cmtLoc.loc_end.pos_cnum <= loc.loc_start.pos_cnum {\n        loop((list{comment, ...leading}, trailing), rest)\n      } else {\n        loop((leading, list{comment, ...trailing}), rest)\n      }\n    | list{} => (List.rev(leading), List.rev(trailing))\n    }\n  }\n\n  loop((list{}, list{}), comments)\n}\n\nlet partitionByOnSameLine = (loc, comments) => {\n  let rec loop = ((onSameLine, onOtherLine), comments) => {\n    open Location\n    switch comments {\n    | list{} => (List.rev(onSameLine), List.rev(onOtherLine))\n    | list{comment, ...rest} =>\n      let cmtLoc = Comment.loc(comment)\n      if cmtLoc.loc_start.pos_lnum === loc.loc_end.pos_lnum {\n        loop((list{comment, ...onSameLine}, onOtherLine), rest)\n      } else {\n        loop((onSameLine, list{comment, ...onOtherLine}), rest)\n      }\n    }\n  }\n\n  loop((list{}, list{}), comments)\n}\n\nlet partitionAdjacentTrailing = (loc1, comments) => {\n  open Location\n  open Lexing\n  let rec loop = (~prevEndPos, afterLoc1, comments) =>\n    switch comments {\n    | list{} => (List.rev(afterLoc1), list{})\n    | list{comment, ...rest} as comments =>\n      let cmtPrevEndPos = Comment.prevTokEndPos(comment)\n      if prevEndPos.Lexing.pos_cnum === cmtPrevEndPos.pos_cnum {\n        let commentEnd = Comment.loc(comment).loc_end\n        loop(~prevEndPos=commentEnd, list{comment, ...afterLoc1}, rest)\n      } else {\n        (List.rev(afterLoc1), comments)\n      }\n    }\n\n  loop(~prevEndPos=loc1.loc_end, list{}, comments)\n}\n\nlet rec collectListPatterns = (acc, pattern) => {\n  open Parsetree\n  switch pattern.ppat_desc {\n  | Ppat_construct({txt: Longident.Lident(\"::\")}, Some({ppat_desc: Ppat_tuple(list{pat, rest})})) =>\n    collectListPatterns(list{pat, ...acc}, rest)\n  | Ppat_construct({txt: Longident.Lident(\"[]\")}, None) => List.rev(acc)\n  | _ => List.rev(list{pattern, ...acc})\n  }\n}\n\nlet rec collectListExprs = (acc, expr) => {\n  open Parsetree\n  switch expr.pexp_desc {\n  | Pexp_construct(\n      {txt: Longident.Lident(\"::\")},\n      Some({pexp_desc: Pexp_tuple(list{expr, rest})}),\n    ) =>\n    collectListExprs(list{expr, ...acc}, rest)\n  | Pexp_construct({txt: Longident.Lident(\"[]\")}, _) => List.rev(acc)\n  | _ => List.rev(list{expr, ...acc})\n  }\n}\n\n/* TODO: use ParsetreeViewer */\nlet arrowType = ct => {\n  open Parsetree\n  let rec process = (attrsBefore, acc, typ) =>\n    switch typ {\n    | {ptyp_desc: Ptyp_arrow(Nolabel as lbl, typ1, typ2), ptyp_attributes: list{}} =>\n      let arg = (list{}, lbl, typ1)\n      process(attrsBefore, list{arg, ...acc}, typ2)\n    | {\n        ptyp_desc: Ptyp_arrow(Nolabel as lbl, typ1, typ2),\n        ptyp_attributes: list{({txt: \"bs\"}, _)} as attrs,\n      } =>\n      let arg = (attrs, lbl, typ1)\n      process(attrsBefore, list{arg, ...acc}, typ2)\n    | {ptyp_desc: Ptyp_arrow(Nolabel, _typ1, _typ2), ptyp_attributes: _attrs} as returnType =>\n      let args = List.rev(acc)\n      (attrsBefore, args, returnType)\n    | {\n        ptyp_desc: Ptyp_arrow((Labelled(_) | Optional(_)) as lbl, typ1, typ2),\n        ptyp_attributes: attrs,\n      } =>\n      let arg = (attrs, lbl, typ1)\n      process(attrsBefore, list{arg, ...acc}, typ2)\n    | typ => (attrsBefore, List.rev(acc), typ)\n    }\n\n  switch ct {\n  | {ptyp_desc: Ptyp_arrow(Nolabel, _typ1, _typ2), ptyp_attributes: attrs} as typ =>\n    process(attrs, list{}, {...typ, ptyp_attributes: list{}})\n  | typ => process(list{}, list{}, typ)\n  }\n}\n\n/* TODO: avoiding the dependency on ParsetreeViewer here, is this a good idea? */\nlet modExprApply = modExpr => {\n  let rec loop = (acc, modExpr) =>\n    switch modExpr {\n    | {Parsetree.pmod_desc: Pmod_apply(next, arg)} => loop(list{arg, ...acc}, next)\n    | _ => list{modExpr, ...acc}\n    }\n\n  loop(list{}, modExpr)\n}\n\n/* TODO: avoiding the dependency on ParsetreeViewer here, is this a good idea? */\nlet modExprFunctor = modExpr => {\n  let rec loop = (acc, modExpr) =>\n    switch modExpr {\n    | {Parsetree.pmod_desc: Pmod_functor(lbl, modType, returnModExpr), pmod_attributes: attrs} =>\n      let param = (attrs, lbl, modType)\n      loop(list{param, ...acc}, returnModExpr)\n    | returnModExpr => (List.rev(acc), returnModExpr)\n    }\n\n  loop(list{}, modExpr)\n}\n\nlet functorType = modtype => {\n  let rec process = (acc, modtype) =>\n    switch modtype {\n    | {Parsetree.pmty_desc: Pmty_functor(lbl, argType, returnType), pmty_attributes: attrs} =>\n      let arg = (attrs, lbl, argType)\n      process(list{arg, ...acc}, returnType)\n    | modType => (List.rev(acc), modType)\n    }\n\n  process(list{}, modtype)\n}\n\nlet funExpr = expr => {\n  open Parsetree\n  /* Turns (type t, type u, type z) into \"type t u z\" */\n  let rec collectNewTypes = (acc, returnExpr) =>\n    switch returnExpr {\n    | {pexp_desc: Pexp_newtype(stringLoc, returnExpr), pexp_attributes: list{}} =>\n      collectNewTypes(list{stringLoc, ...acc}, returnExpr)\n    | returnExpr =>\n      let loc = switch (acc, List.rev(acc)) {\n      | (list{_startLoc, ..._}, list{endLoc, ..._}) => {...endLoc.loc, loc_end: endLoc.loc.loc_end}\n      | _ => Location.none\n      }\n\n      let txt = List.fold_right((curr, acc) => acc ++ (\" \" ++ curr.Location.txt), acc, \"type\")\n      (Location.mkloc(txt, loc), returnExpr)\n    }\n\n  /* For simplicity reason Pexp_newtype gets converted to a Nolabel parameter,\n   * otherwise this function would need to return a variant:\n   * | NormalParamater(...)\n   * | NewType(...)\n   * This complicates printing with an extra variant/boxing/allocation for a code-path\n   * that is not often used. Lets just keep it simple for now */\n  let rec collect = (attrsBefore, acc, expr) =>\n    switch expr {\n    | {pexp_desc: Pexp_fun(lbl, defaultExpr, pattern, returnExpr), pexp_attributes: list{}} =>\n      let parameter = (list{}, lbl, defaultExpr, pattern)\n      collect(attrsBefore, list{parameter, ...acc}, returnExpr)\n    | {pexp_desc: Pexp_newtype(stringLoc, rest), pexp_attributes: attrs} =>\n      let (var, returnExpr) = collectNewTypes(list{stringLoc}, rest)\n      let parameter = (attrs, Asttypes.Nolabel, None, Ast_helper.Pat.var(~loc=stringLoc.loc, var))\n      collect(attrsBefore, list{parameter, ...acc}, returnExpr)\n    | {\n        pexp_desc: Pexp_fun(lbl, defaultExpr, pattern, returnExpr),\n        pexp_attributes: list{({txt: \"bs\"}, _)} as attrs,\n      } =>\n      let parameter = (attrs, lbl, defaultExpr, pattern)\n      collect(attrsBefore, list{parameter, ...acc}, returnExpr)\n    | {\n        pexp_desc: Pexp_fun((Labelled(_) | Optional(_)) as lbl, defaultExpr, pattern, returnExpr),\n        pexp_attributes: attrs,\n      } =>\n      let parameter = (attrs, lbl, defaultExpr, pattern)\n      collect(attrsBefore, list{parameter, ...acc}, returnExpr)\n    | expr => (attrsBefore, List.rev(acc), expr)\n    }\n\n  switch expr {\n  | {\n      pexp_desc: Pexp_fun(Nolabel, _defaultExpr, _pattern, _returnExpr),\n      pexp_attributes: attrs,\n    } as expr =>\n    collect(attrs, list{}, {...expr, pexp_attributes: list{}})\n  | expr => collect(list{}, list{}, expr)\n  }\n}\n\nlet rec isBlockExpr = expr => {\n  open Parsetree\n  switch expr.pexp_desc {\n  | Pexp_letmodule(_)\n  | Pexp_letexception(_)\n  | Pexp_let(_)\n  | Pexp_open(_)\n  | Pexp_sequence(_) => true\n  | Pexp_apply(callExpr, _) if isBlockExpr(callExpr) => true\n  | Pexp_constraint(expr, _) if isBlockExpr(expr) => true\n  | Pexp_field(expr, _) if isBlockExpr(expr) => true\n  | Pexp_setfield(expr, _, _) if isBlockExpr(expr) => true\n  | _ => false\n  }\n}\n\nlet isIfThenElseExpr = expr => {\n  open Parsetree\n  switch expr.pexp_desc {\n  | Pexp_ifthenelse(_) => true\n  | _ => false\n  }\n}\n\nlet rec walkStructure = (s, t, comments) =>\n  switch s {\n  | _ if comments == list{} => ()\n  | list{} => attach(t.inside, Location.none, comments)\n  | s => walkList(~getLoc=n => n.Parsetree.pstr_loc, ~walkNode=walkStructureItem, s, t, comments)\n  }\n\nand walkStructureItem = (si, t, comments) =>\n  switch si.Parsetree.pstr_desc {\n  | _ if comments == list{} => ()\n  | Pstr_primitive(valueDescription) => walkValueDescription(valueDescription, t, comments)\n  | Pstr_open(openDescription) => walkOpenDescription(openDescription, t, comments)\n  | Pstr_value(_, valueBindings) => walkValueBindings(valueBindings, t, comments)\n  | Pstr_type(_, typeDeclarations) => walkTypeDeclarations(typeDeclarations, t, comments)\n  | Pstr_eval(expr, _) => walkExpr(expr, t, comments)\n  | Pstr_module(moduleBinding) => walkModuleBinding(moduleBinding, t, comments)\n  | Pstr_recmodule(moduleBindings) =>\n    walkList(\n      ~getLoc=mb => mb.Parsetree.pmb_loc,\n      ~walkNode=walkModuleBinding,\n      moduleBindings,\n      t,\n      comments,\n    )\n  | Pstr_modtype(modTypDecl) => walkModuleTypeDeclaration(modTypDecl, t, comments)\n  | Pstr_attribute(attribute) => walkAttribute(attribute, t, comments)\n  | Pstr_extension(extension, _) => walkExtension(extension, t, comments)\n  | Pstr_include(includeDeclaration) => walkIncludeDeclaration(includeDeclaration, t, comments)\n  | Pstr_exception(extensionConstructor) => walkExtConstr(extensionConstructor, t, comments)\n  | Pstr_typext(typeExtension) => walkTypeExtension(typeExtension, t, comments)\n  | Pstr_class_type(_) | Pstr_class(_) => ()\n  }\n\nand walkValueDescription = (vd, t, comments) => {\n  let (leading, trailing) = partitionLeadingTrailing(comments, vd.pval_name.loc)\n  attach(t.leading, vd.pval_name.loc, leading)\n  let (afterName, rest) = partitionAdjacentTrailing(vd.pval_name.loc, trailing)\n  attach(t.trailing, vd.pval_name.loc, afterName)\n  let (before, inside, after) = partitionByLoc(rest, vd.pval_type.ptyp_loc)\n\n  attach(t.leading, vd.pval_type.ptyp_loc, before)\n  walkTypExpr(vd.pval_type, t, inside)\n  attach(t.trailing, vd.pval_type.ptyp_loc, after)\n}\n\nand walkTypeExtension = (te, t, comments) => {\n  let (leading, trailing) = partitionLeadingTrailing(comments, te.ptyext_path.loc)\n  attach(t.leading, te.ptyext_path.loc, leading)\n  let (afterPath, rest) = partitionAdjacentTrailing(te.ptyext_path.loc, trailing)\n  attach(t.trailing, te.ptyext_path.loc, afterPath)\n\n  /* type params */\n  let rest = switch te.ptyext_params {\n  | list{} => rest\n  | typeParams =>\n    visitListButContinueWithRemainingComments(\n      ~getLoc=((typexpr, _variance)) => typexpr.Parsetree.ptyp_loc,\n      ~walkNode=walkTypeParam,\n      ~newlineDelimited=false,\n      typeParams,\n      t,\n      rest,\n    )\n  }\n\n  walkList(\n    ~getLoc=n => n.Parsetree.pext_loc,\n    ~walkNode=walkExtConstr,\n    te.ptyext_constructors,\n    t,\n    rest,\n  )\n}\n\nand walkIncludeDeclaration = (inclDecl, t, comments) => {\n  let (before, inside, after) = partitionByLoc(comments, inclDecl.pincl_mod.pmod_loc)\n  attach(t.leading, inclDecl.pincl_mod.pmod_loc, before)\n  walkModExpr(inclDecl.pincl_mod, t, inside)\n  attach(t.trailing, inclDecl.pincl_mod.pmod_loc, after)\n}\n\nand walkModuleTypeDeclaration = (mtd, t, comments) => {\n  let (leading, trailing) = partitionLeadingTrailing(comments, mtd.pmtd_name.loc)\n  attach(t.leading, mtd.pmtd_name.loc, leading)\n  switch mtd.pmtd_type {\n  | None => attach(t.trailing, mtd.pmtd_name.loc, trailing)\n  | Some(modType) =>\n    let (afterName, rest) = partitionAdjacentTrailing(mtd.pmtd_name.loc, trailing)\n    attach(t.trailing, mtd.pmtd_name.loc, afterName)\n    let (before, inside, after) = partitionByLoc(rest, modType.pmty_loc)\n    attach(t.leading, modType.pmty_loc, before)\n    walkModType(modType, t, inside)\n    attach(t.trailing, modType.pmty_loc, after)\n  }\n}\n\nand walkModuleBinding = (mb, t, comments) => {\n  let (leading, trailing) = partitionLeadingTrailing(comments, mb.pmb_name.loc)\n  attach(t.leading, mb.pmb_name.loc, leading)\n  let (afterName, rest) = partitionAdjacentTrailing(mb.pmb_name.loc, trailing)\n  attach(t.trailing, mb.pmb_name.loc, afterName)\n  let (leading, inside, trailing) = partitionByLoc(rest, mb.pmb_expr.pmod_loc)\n  switch mb.pmb_expr.pmod_desc {\n  | Pmod_constraint(_) => walkModExpr(mb.pmb_expr, t, List.concat(list{leading, inside}))\n  | _ =>\n    attach(t.leading, mb.pmb_expr.pmod_loc, leading)\n    walkModExpr(mb.pmb_expr, t, inside)\n  }\n  attach(t.trailing, mb.pmb_expr.pmod_loc, trailing)\n}\n\nand walkSignature = (signature, t, comments) =>\n  switch signature {\n  | _ if comments == list{} => ()\n  | list{} => attach(t.inside, Location.none, comments)\n  | _s =>\n    walkList(~getLoc=n => n.Parsetree.psig_loc, ~walkNode=walkSignatureItem, signature, t, comments)\n  }\n\nand walkSignatureItem = (si, t, comments) =>\n  switch si.psig_desc {\n  | _ if comments == list{} => ()\n  | Psig_value(valueDescription) => walkValueDescription(valueDescription, t, comments)\n  | Psig_type(_, typeDeclarations) => walkTypeDeclarations(typeDeclarations, t, comments)\n  | Psig_typext(typeExtension) => walkTypeExtension(typeExtension, t, comments)\n  | Psig_exception(extensionConstructor) => walkExtConstr(extensionConstructor, t, comments)\n  | Psig_module(moduleDeclaration) => walkModuleDeclaration(moduleDeclaration, t, comments)\n  | Psig_recmodule(moduleDeclarations) =>\n    walkList(\n      ~getLoc=n => n.Parsetree.pmd_loc,\n      ~walkNode=walkModuleDeclaration,\n      moduleDeclarations,\n      t,\n      comments,\n    )\n  | Psig_modtype(moduleTypeDeclaration) =>\n    walkModuleTypeDeclaration(moduleTypeDeclaration, t, comments)\n  | Psig_open(openDescription) => walkOpenDescription(openDescription, t, comments)\n  | Psig_include(includeDescription) => walkIncludeDescription(includeDescription, t, comments)\n  | Psig_attribute(attribute) => walkAttribute(attribute, t, comments)\n  | Psig_extension(extension, _) => walkExtension(extension, t, comments)\n  | Psig_class(_) | Psig_class_type(_) => ()\n  }\n\nand walkIncludeDescription = (id, t, comments) => {\n  let (before, inside, after) = partitionByLoc(comments, id.pincl_mod.pmty_loc)\n  attach(t.leading, id.pincl_mod.pmty_loc, before)\n  walkModType(id.pincl_mod, t, inside)\n  attach(t.trailing, id.pincl_mod.pmty_loc, after)\n}\n\nand walkModuleDeclaration = (md, t, comments) => {\n  let (leading, trailing) = partitionLeadingTrailing(comments, md.pmd_name.loc)\n  attach(t.leading, md.pmd_name.loc, leading)\n  let (afterName, rest) = partitionAdjacentTrailing(md.pmd_name.loc, trailing)\n  attach(t.trailing, md.pmd_name.loc, afterName)\n  let (leading, inside, trailing) = partitionByLoc(rest, md.pmd_type.pmty_loc)\n  attach(t.leading, md.pmd_type.pmty_loc, leading)\n  walkModType(md.pmd_type, t, inside)\n  attach(t.trailing, md.pmd_type.pmty_loc, trailing)\n}\n\nand walkList: 'node. (\n  ~prevLoc: Location.t=?,\n  ~getLoc: 'node => Location.t,\n  ~walkNode: ('node, t, list<Comment.t>) => unit,\n  list<'node>,\n  t,\n  list<Comment.t>,\n) => unit = (~prevLoc=?, ~getLoc, ~walkNode, l, t, comments) => {\n  open Location\n  switch l {\n  | _ if comments == list{} => ()\n  | list{} =>\n    switch prevLoc {\n    | Some(loc) => attach(t.trailing, loc, comments)\n    | None => ()\n    }\n  | list{node, ...rest} =>\n    let currLoc = getLoc(node)\n    let (leading, inside, trailing) = partitionByLoc(comments, currLoc)\n    switch prevLoc {\n    | None =>\n      /* first node, all leading comments attach here */\n      attach(t.leading, currLoc, leading)\n    | Some(prevLoc) =>\n      /* Same line */\n      if prevLoc.loc_end.pos_lnum === currLoc.loc_start.pos_lnum {\n        let (afterPrev, beforeCurr) = partitionAdjacentTrailing(prevLoc, leading)\n        let () = attach(t.trailing, prevLoc, afterPrev)\n        attach(t.leading, currLoc, beforeCurr)\n      } else {\n        let (onSameLineAsPrev, afterPrev) = partitionByOnSameLine(prevLoc, leading)\n        let () = attach(t.trailing, prevLoc, onSameLineAsPrev)\n        let (leading, _inside, _trailing) = partitionByLoc(afterPrev, currLoc)\n        attach(t.leading, currLoc, leading)\n      }\n    }\n    walkNode(node, t, inside)\n    walkList(~prevLoc=currLoc, ~getLoc, ~walkNode, rest, t, trailing)\n  }\n}\n\n/* The parsetree doesn't always contain location info about the opening or\n * closing token of a \"list-of-things\". This routine visits the whole list,\n * but returns any remaining comments that likely fall after the whole list. */\nand visitListButContinueWithRemainingComments: 'node. (\n  ~prevLoc: Location.t=?,\n  ~newlineDelimited: bool,\n  ~getLoc: 'node => Location.t,\n  ~walkNode: ('node, t, list<Comment.t>) => unit,\n  list<'node>,\n  t,\n  list<Comment.t>,\n) => list<Comment.t> = (~prevLoc=?, ~newlineDelimited, ~getLoc, ~walkNode, l, t, comments) => {\n  open Location\n  switch l {\n  | _ if comments == list{} => list{}\n  | list{} =>\n    switch prevLoc {\n    | Some(loc) =>\n      let (afterPrev, rest) = if newlineDelimited {\n        partitionByOnSameLine(loc, comments)\n      } else {\n        partitionAdjacentTrailing(loc, comments)\n      }\n\n      attach(t.trailing, loc, afterPrev)\n      rest\n    | None => comments\n    }\n  | list{node, ...rest} =>\n    let currLoc = getLoc(node)\n    let (leading, inside, trailing) = partitionByLoc(comments, currLoc)\n    let () = switch prevLoc {\n    | None =>\n      /* first node, all leading comments attach here */\n      attach(t.leading, currLoc, leading)\n      ()\n    | Some(prevLoc) =>\n      /* Same line */\n      if prevLoc.loc_end.pos_lnum === currLoc.loc_start.pos_lnum {\n        let (afterPrev, beforeCurr) = partitionAdjacentTrailing(prevLoc, leading)\n        let () = attach(t.trailing, prevLoc, afterPrev)\n        let () = attach(t.leading, currLoc, beforeCurr)\n      } else {\n        let (onSameLineAsPrev, afterPrev) = partitionByOnSameLine(prevLoc, leading)\n        let () = attach(t.trailing, prevLoc, onSameLineAsPrev)\n        let (leading, _inside, _trailing) = partitionByLoc(afterPrev, currLoc)\n        let () = attach(t.leading, currLoc, leading)\n      }\n    }\n\n    walkNode(node, t, inside)\n    visitListButContinueWithRemainingComments(\n      ~prevLoc=currLoc,\n      ~getLoc,\n      ~walkNode,\n      ~newlineDelimited,\n      rest,\n      t,\n      trailing,\n    )\n  }\n}\n\nand walkValueBindings = (vbs, t, comments) =>\n  walkList(~getLoc=n => n.Parsetree.pvb_loc, ~walkNode=walkValueBinding, vbs, t, comments)\n\nand walkOpenDescription = (openDescription, t, comments) => {\n  let loc = openDescription.popen_lid.loc\n  let (leading, trailing) = partitionLeadingTrailing(comments, loc)\n  attach(t.leading, loc, leading)\n  attach(t.trailing, loc, trailing)\n}\n\nand walkTypeDeclarations = (typeDeclarations, t, comments) =>\n  walkList(\n    ~getLoc=n => n.Parsetree.ptype_loc,\n    ~walkNode=walkTypeDeclaration,\n    typeDeclarations,\n    t,\n    comments,\n  )\n\nand walkTypeParam = ((typexpr, _variance), t, comments) => walkTypExpr(typexpr, t, comments)\n\nand walkTypeDeclaration = (td, t, comments) => {\n  let (beforeName, rest) = partitionLeadingTrailing(comments, td.ptype_name.loc)\n  attach(t.leading, td.ptype_name.loc, beforeName)\n\n  let (afterName, rest) = partitionAdjacentTrailing(td.ptype_name.loc, rest)\n  attach(t.trailing, td.ptype_name.loc, afterName)\n\n  /* type params */\n  let rest = switch td.ptype_params {\n  | list{} => rest\n  | typeParams =>\n    visitListButContinueWithRemainingComments(\n      ~getLoc=((typexpr, _variance)) => typexpr.Parsetree.ptyp_loc,\n      ~walkNode=walkTypeParam,\n      ~newlineDelimited=false,\n      typeParams,\n      t,\n      rest,\n    )\n  }\n\n  /* manifest:  = typexpr */\n  let rest = switch td.ptype_manifest {\n  | Some(typexpr) =>\n    let (beforeTyp, insideTyp, afterTyp) = partitionByLoc(rest, typexpr.ptyp_loc)\n    attach(t.leading, typexpr.ptyp_loc, beforeTyp)\n    walkTypExpr(typexpr, t, insideTyp)\n    let (afterTyp, rest) = partitionAdjacentTrailing(typexpr.Parsetree.ptyp_loc, afterTyp)\n    attach(t.trailing, typexpr.ptyp_loc, afterTyp)\n    rest\n  | None => rest\n  }\n\n  let rest = switch td.ptype_kind {\n  | Ptype_abstract | Ptype_open => rest\n  | Ptype_record(labelDeclarations) =>\n    let () = walkList(\n      ~getLoc=ld => ld.Parsetree.pld_loc,\n      ~walkNode=walkLabelDeclaration,\n      labelDeclarations,\n      t,\n      rest,\n    )\n\n    list{}\n  | Ptype_variant(constructorDeclarations) =>\n    walkConstructorDeclarations(constructorDeclarations, t, rest)\n  }\n\n  attach(t.trailing, td.ptype_loc, rest)\n}\n\nand walkLabelDeclarations = (lds, t, comments) =>\n  visitListButContinueWithRemainingComments(\n    ~getLoc=ld => ld.Parsetree.pld_loc,\n    ~walkNode=walkLabelDeclaration,\n    ~newlineDelimited=false,\n    lds,\n    t,\n    comments,\n  )\n\nand walkLabelDeclaration = (ld, t, comments) => {\n  let (beforeName, rest) = partitionLeadingTrailing(comments, ld.pld_name.loc)\n  attach(t.leading, ld.pld_name.loc, beforeName)\n  let (afterName, rest) = partitionAdjacentTrailing(ld.pld_name.loc, rest)\n  attach(t.trailing, ld.pld_name.loc, afterName)\n  let (beforeTyp, insideTyp, afterTyp) = partitionByLoc(rest, ld.pld_type.ptyp_loc)\n  attach(t.leading, ld.pld_type.ptyp_loc, beforeTyp)\n  walkTypExpr(ld.pld_type, t, insideTyp)\n  attach(t.trailing, ld.pld_type.ptyp_loc, afterTyp)\n}\n\nand walkConstructorDeclarations = (cds, t, comments) =>\n  visitListButContinueWithRemainingComments(\n    ~getLoc=cd => cd.Parsetree.pcd_loc,\n    ~walkNode=walkConstructorDeclaration,\n    ~newlineDelimited=false,\n    cds,\n    t,\n    comments,\n  )\n\nand walkConstructorDeclaration = (cd, t, comments) => {\n  let (beforeName, rest) = partitionLeadingTrailing(comments, cd.pcd_name.loc)\n  attach(t.leading, cd.pcd_name.loc, beforeName)\n  let (afterName, rest) = partitionAdjacentTrailing(cd.pcd_name.loc, rest)\n  attach(t.trailing, cd.pcd_name.loc, afterName)\n  let rest = walkConstructorArguments(cd.pcd_args, t, rest)\n\n  let rest = switch cd.pcd_res {\n  | Some(typexpr) =>\n    let (beforeTyp, insideTyp, afterTyp) = partitionByLoc(rest, typexpr.ptyp_loc)\n    attach(t.leading, typexpr.ptyp_loc, beforeTyp)\n    walkTypExpr(typexpr, t, insideTyp)\n    let (afterTyp, rest) = partitionAdjacentTrailing(typexpr.Parsetree.ptyp_loc, afterTyp)\n    attach(t.trailing, typexpr.ptyp_loc, afterTyp)\n    rest\n  | None => rest\n  }\n\n  attach(t.trailing, cd.pcd_loc, rest)\n}\n\nand walkConstructorArguments = (args, t, comments) =>\n  switch args {\n  | Pcstr_tuple(typexprs) =>\n    visitListButContinueWithRemainingComments(\n      ~getLoc=n => n.Parsetree.ptyp_loc,\n      ~walkNode=walkTypExpr,\n      ~newlineDelimited=false,\n      typexprs,\n      t,\n      comments,\n    )\n  | Pcstr_record(labelDeclarations) => walkLabelDeclarations(labelDeclarations, t, comments)\n  }\n\nand walkValueBinding = (vb, t, comments) => {\n  open Location\n\n  let vb = {\n    open Parsetree\n    switch (vb.pvb_pat, vb.pvb_expr) {\n    | (\n        {ppat_desc: Ppat_constraint(pat, {ptyp_desc: Ptyp_poly(list{}, t)})},\n        {pexp_desc: Pexp_constraint(expr, _typ)},\n      ) => {\n        ...vb,\n        pvb_pat: Ast_helper.Pat.constraint_(\n          ~loc={...pat.ppat_loc, loc_end: t.Parsetree.ptyp_loc.loc_end},\n          pat,\n          t,\n        ),\n        pvb_expr: expr,\n      }\n    | (\n        {ppat_desc: Ppat_constraint(pat, {ptyp_desc: Ptyp_poly(list{_, ..._}, t)})},\n        {pexp_desc: Pexp_fun(_)},\n      ) => {\n        ...vb,\n        pvb_pat: {\n          ...vb.pvb_pat,\n          ppat_loc: {...pat.ppat_loc, loc_end: t.ptyp_loc.loc_end},\n        },\n      }\n\n    | (\n        {\n          ppat_desc: Ppat_constraint(pat, {ptyp_desc: Ptyp_poly(list{_, ..._}, t)} as typ),\n        } as constrainedPattern,\n        {pexp_desc: Pexp_newtype(_, {pexp_desc: Pexp_constraint(expr, _)})},\n      ) => /*\n       * The location of the Ptyp_poly on the pattern is the whole thing.\n       * let x:\n       *   type t. (int, int) => int =\n       *   (a, b) => {\n       *     // comment\n       *     a + b\n       *   }\n       */\n      {\n        ...vb,\n        pvb_pat: {\n          ...constrainedPattern,\n          ppat_desc: Ppat_constraint(pat, typ),\n          ppat_loc: {...constrainedPattern.ppat_loc, loc_end: t.ptyp_loc.loc_end},\n        },\n        pvb_expr: expr,\n      }\n    | _ => vb\n    }\n  }\n\n  let patternLoc = vb.Parsetree.pvb_pat.ppat_loc\n  let exprLoc = vb.Parsetree.pvb_expr.pexp_loc\n  let expr = vb.pvb_expr\n\n  let (leading, inside, trailing) = partitionByLoc(comments, patternLoc)\n\n  /* everything before start of pattern can only be leading on the pattern:\n   *   let |* before *| a = 1 */\n  attach(t.leading, patternLoc, leading)\n  walkPattern(vb.Parsetree.pvb_pat, t, inside)\n  let (afterPat, surroundingExpr) = partitionAdjacentTrailing(patternLoc, trailing)\n\n  attach(t.trailing, patternLoc, afterPat)\n  let (beforeExpr, insideExpr, afterExpr) = partitionByLoc(surroundingExpr, exprLoc)\n  if isBlockExpr(expr) {\n    walkExpr(expr, t, List.concat(list{beforeExpr, insideExpr, afterExpr}))\n  } else {\n    attach(t.leading, exprLoc, beforeExpr)\n    walkExpr(expr, t, insideExpr)\n    attach(t.trailing, exprLoc, afterExpr)\n  }\n}\n\nand walkExpr = (expr, t, comments) => {\n  open Location\n  switch expr.Parsetree.pexp_desc {\n  | _ if comments == list{} => ()\n  | Pexp_constant(_) =>\n    let (leading, trailing) = partitionLeadingTrailing(comments, expr.pexp_loc)\n    attach(t.leading, expr.pexp_loc, leading)\n    attach(t.trailing, expr.pexp_loc, trailing)\n  | Pexp_ident(longident) =>\n    let (leading, trailing) = partitionLeadingTrailing(comments, longident.loc)\n    attach(t.leading, longident.loc, leading)\n    attach(t.trailing, longident.loc, trailing)\n  | Pexp_let(\n      _recFlag,\n      valueBindings,\n      {pexp_desc: Pexp_construct({txt: Longident.Lident(\"()\")}, None)},\n    ) =>\n    walkValueBindings(valueBindings, t, comments)\n  | Pexp_let(_recFlag, valueBindings, expr2) =>\n    let comments = visitListButContinueWithRemainingComments(~getLoc=n =>\n      if n.Parsetree.pvb_pat.ppat_loc.loc_ghost {\n        n.pvb_expr.pexp_loc\n      } else {\n        n.Parsetree.pvb_loc\n      }\n    , ~walkNode=walkValueBinding, ~newlineDelimited=true, valueBindings, t, comments)\n\n    if isBlockExpr(expr2) {\n      walkExpr(expr2, t, comments)\n    } else {\n      let (leading, inside, trailing) = partitionByLoc(comments, expr2.pexp_loc)\n      attach(t.leading, expr2.pexp_loc, leading)\n      walkExpr(expr2, t, inside)\n      attach(t.trailing, expr2.pexp_loc, trailing)\n    }\n  | Pexp_sequence(expr1, expr2) =>\n    let (leading, inside, trailing) = partitionByLoc(comments, expr1.pexp_loc)\n    let comments = if isBlockExpr(expr1) {\n      let (afterExpr, comments) = partitionByOnSameLine(expr1.pexp_loc, trailing)\n      walkExpr(expr1, t, List.concat(list{leading, inside, afterExpr}))\n      comments\n    } else {\n      attach(t.leading, expr1.pexp_loc, leading)\n      walkExpr(expr1, t, inside)\n      let (afterExpr, comments) = partitionByOnSameLine(expr1.pexp_loc, trailing)\n      attach(t.trailing, expr1.pexp_loc, afterExpr)\n      comments\n    }\n    if isBlockExpr(expr2) {\n      walkExpr(expr2, t, comments)\n    } else {\n      let (leading, inside, trailing) = partitionByLoc(comments, expr2.pexp_loc)\n      attach(t.leading, expr2.pexp_loc, leading)\n      walkExpr(expr2, t, inside)\n      attach(t.trailing, expr2.pexp_loc, trailing)\n    }\n  | Pexp_open(_override, longident, expr2) =>\n    let (leading, comments) = partitionLeadingTrailing(comments, expr.pexp_loc)\n    attach(t.leading, {...expr.pexp_loc, loc_end: longident.loc.loc_end}, leading)\n    let (leading, trailing) = partitionLeadingTrailing(comments, longident.loc)\n    attach(t.leading, longident.loc, leading)\n    let (afterLongident, rest) = partitionByOnSameLine(longident.loc, trailing)\n    attach(t.trailing, longident.loc, afterLongident)\n    if isBlockExpr(expr2) {\n      walkExpr(expr2, t, rest)\n    } else {\n      let (leading, inside, trailing) = partitionByLoc(rest, expr2.pexp_loc)\n      attach(t.leading, expr2.pexp_loc, leading)\n      walkExpr(expr2, t, inside)\n      attach(t.trailing, expr2.pexp_loc, trailing)\n    }\n  | Pexp_extension(\n      {txt: \"bs.obj\" | \"obj\"},\n      PStr(list{{pstr_desc: Pstr_eval({pexp_desc: Pexp_record(rows, _)}, list{})}}),\n    ) =>\n    walkList(~getLoc=((longident, expr): (Asttypes.loc<Longident.t>, Parsetree.expression)) => {\n      ...longident.loc,\n      loc_end: expr.pexp_loc.loc_end,\n    }, ~walkNode=walkExprRecordRow, rows, t, comments)\n  | Pexp_extension(extension) => walkExtension(extension, t, comments)\n  | Pexp_letexception(extensionConstructor, expr2) =>\n    let (leading, comments) = partitionLeadingTrailing(comments, expr.pexp_loc)\n    attach(t.leading, {...expr.pexp_loc, loc_end: extensionConstructor.pext_loc.loc_end}, leading)\n    let (leading, inside, trailing) = partitionByLoc(comments, extensionConstructor.pext_loc)\n    attach(t.leading, extensionConstructor.pext_loc, leading)\n    walkExtConstr(extensionConstructor, t, inside)\n    let (afterExtConstr, rest) = partitionByOnSameLine(extensionConstructor.pext_loc, trailing)\n    attach(t.trailing, extensionConstructor.pext_loc, afterExtConstr)\n    if isBlockExpr(expr2) {\n      walkExpr(expr2, t, rest)\n    } else {\n      let (leading, inside, trailing) = partitionByLoc(rest, expr2.pexp_loc)\n      attach(t.leading, expr2.pexp_loc, leading)\n      walkExpr(expr2, t, inside)\n      attach(t.trailing, expr2.pexp_loc, trailing)\n    }\n  | Pexp_letmodule(stringLoc, modExpr, expr2) =>\n    let (leading, comments) = partitionLeadingTrailing(comments, expr.pexp_loc)\n    attach(t.leading, {...expr.pexp_loc, loc_end: modExpr.pmod_loc.loc_end}, leading)\n    let (leading, trailing) = partitionLeadingTrailing(comments, stringLoc.loc)\n    attach(t.leading, stringLoc.loc, leading)\n    let (afterString, rest) = partitionAdjacentTrailing(stringLoc.loc, trailing)\n    attach(t.trailing, stringLoc.loc, afterString)\n    let (beforeModExpr, insideModExpr, afterModExpr) = partitionByLoc(rest, modExpr.pmod_loc)\n    attach(t.leading, modExpr.pmod_loc, beforeModExpr)\n    walkModExpr(modExpr, t, insideModExpr)\n    let (afterModExpr, rest) = partitionByOnSameLine(modExpr.pmod_loc, afterModExpr)\n    attach(t.trailing, modExpr.pmod_loc, afterModExpr)\n    if isBlockExpr(expr2) {\n      walkExpr(expr2, t, rest)\n    } else {\n      let (leading, inside, trailing) = partitionByLoc(rest, expr2.pexp_loc)\n      attach(t.leading, expr2.pexp_loc, leading)\n      walkExpr(expr2, t, inside)\n      attach(t.trailing, expr2.pexp_loc, trailing)\n    }\n  | Pexp_assert(expr)\n  | Pexp_lazy(expr) =>\n    if isBlockExpr(expr) {\n      walkExpr(expr, t, comments)\n    } else {\n      let (leading, inside, trailing) = partitionByLoc(comments, expr.pexp_loc)\n      attach(t.leading, expr.pexp_loc, leading)\n      walkExpr(expr, t, inside)\n      attach(t.trailing, expr.pexp_loc, trailing)\n    }\n  | Pexp_coerce(expr, optTypexpr, typexpr) =>\n    let (leading, inside, trailing) = partitionByLoc(comments, expr.pexp_loc)\n    attach(t.leading, expr.pexp_loc, leading)\n    walkExpr(expr, t, inside)\n    let (afterExpr, rest) = partitionAdjacentTrailing(expr.pexp_loc, trailing)\n    attach(t.trailing, expr.pexp_loc, afterExpr)\n    let rest = switch optTypexpr {\n    | Some(typexpr) =>\n      let (leading, inside, trailing) = partitionByLoc(comments, typexpr.ptyp_loc)\n      attach(t.leading, typexpr.ptyp_loc, leading)\n      walkTypExpr(typexpr, t, inside)\n      let (afterTyp, rest) = partitionAdjacentTrailing(typexpr.ptyp_loc, trailing)\n      attach(t.trailing, typexpr.ptyp_loc, afterTyp)\n      rest\n    | None => rest\n    }\n\n    let (leading, inside, trailing) = partitionByLoc(rest, typexpr.ptyp_loc)\n    attach(t.leading, typexpr.ptyp_loc, leading)\n    walkTypExpr(typexpr, t, inside)\n    attach(t.trailing, typexpr.ptyp_loc, trailing)\n  | Pexp_constraint(expr, typexpr) =>\n    let (leading, inside, trailing) = partitionByLoc(comments, expr.pexp_loc)\n    attach(t.leading, expr.pexp_loc, leading)\n    walkExpr(expr, t, inside)\n    let (afterExpr, rest) = partitionAdjacentTrailing(expr.pexp_loc, trailing)\n    attach(t.trailing, expr.pexp_loc, afterExpr)\n    let (leading, inside, trailing) = partitionByLoc(rest, typexpr.ptyp_loc)\n    attach(t.leading, typexpr.ptyp_loc, leading)\n    walkTypExpr(typexpr, t, inside)\n    attach(t.trailing, typexpr.ptyp_loc, trailing)\n  | Pexp_tuple(list{})\n  | Pexp_array(list{})\n  | Pexp_construct({txt: Longident.Lident(\"[]\")}, _) =>\n    attach(t.inside, expr.pexp_loc, comments)\n  | Pexp_construct({txt: Longident.Lident(\"::\")}, _) =>\n    walkList(\n      ~getLoc=n => n.Parsetree.pexp_loc,\n      ~walkNode=walkExpr,\n      collectListExprs(list{}, expr),\n      t,\n      comments,\n    )\n  | Pexp_construct(longident, args) =>\n    let (leading, trailing) = partitionLeadingTrailing(comments, longident.loc)\n    attach(t.leading, longident.loc, leading)\n    switch args {\n    | Some(expr) =>\n      let (afterLongident, rest) = partitionAdjacentTrailing(longident.loc, trailing)\n      attach(t.trailing, longident.loc, afterLongident)\n      walkExpr(expr, t, rest)\n    | None => attach(t.trailing, longident.loc, trailing)\n    }\n  | Pexp_variant(_label, None) => ()\n  | Pexp_variant(_label, Some(expr)) => walkExpr(expr, t, comments)\n  | Pexp_array(exprs) | Pexp_tuple(exprs) =>\n    walkList(~getLoc=n => n.Parsetree.pexp_loc, ~walkNode=walkExpr, exprs, t, comments)\n  | Pexp_record(rows, spreadExpr) =>\n    let comments = switch spreadExpr {\n    | None => comments\n    | Some(expr) =>\n      let (leading, inside, trailing) = partitionByLoc(comments, expr.pexp_loc)\n      attach(t.leading, expr.pexp_loc, leading)\n      walkExpr(expr, t, inside)\n      let (afterExpr, rest) = partitionAdjacentTrailing(expr.pexp_loc, trailing)\n      attach(t.trailing, expr.pexp_loc, afterExpr)\n      rest\n    }\n\n    walkList(~getLoc=((longident, expr): (Asttypes.loc<Longident.t>, Parsetree.expression)) => {\n      ...longident.loc,\n      loc_end: expr.pexp_loc.loc_end,\n    }, ~walkNode=walkExprRecordRow, rows, t, comments)\n  | Pexp_field(expr, longident) =>\n    let (leading, inside, trailing) = partitionByLoc(comments, expr.pexp_loc)\n    let trailing = if isBlockExpr(expr) {\n      let (afterExpr, rest) = partitionAdjacentTrailing(expr.pexp_loc, trailing)\n      walkExpr(expr, t, List.concat(list{leading, inside, afterExpr}))\n      rest\n    } else {\n      attach(t.leading, expr.pexp_loc, leading)\n      walkExpr(expr, t, inside)\n      trailing\n    }\n    let (afterExpr, rest) = partitionAdjacentTrailing(expr.pexp_loc, trailing)\n    attach(t.trailing, expr.pexp_loc, afterExpr)\n    let (leading, trailing) = partitionLeadingTrailing(rest, longident.loc)\n    attach(t.leading, longident.loc, leading)\n    attach(t.trailing, longident.loc, trailing)\n  | Pexp_setfield(expr1, longident, expr2) =>\n    let (leading, inside, trailing) = partitionByLoc(comments, expr1.pexp_loc)\n    let rest = if isBlockExpr(expr1) {\n      let (afterExpr, rest) = partitionAdjacentTrailing(expr1.pexp_loc, trailing)\n      walkExpr(expr1, t, List.concat(list{leading, inside, afterExpr}))\n      rest\n    } else {\n      let (afterExpr, rest) = partitionAdjacentTrailing(expr1.pexp_loc, trailing)\n      attach(t.leading, expr1.pexp_loc, leading)\n      walkExpr(expr1, t, inside)\n      attach(t.trailing, expr1.pexp_loc, afterExpr)\n      rest\n    }\n    let (beforeLongident, afterLongident) = partitionLeadingTrailing(rest, longident.loc)\n    attach(t.leading, longident.loc, beforeLongident)\n    let (afterLongident, rest) = partitionAdjacentTrailing(longident.loc, afterLongident)\n    attach(t.trailing, longident.loc, afterLongident)\n    if isBlockExpr(expr2) {\n      walkExpr(expr2, t, rest)\n    } else {\n      let (leading, inside, trailing) = partitionByLoc(rest, expr2.pexp_loc)\n      attach(t.leading, expr2.pexp_loc, leading)\n      walkExpr(expr2, t, inside)\n      attach(t.trailing, expr2.pexp_loc, trailing)\n    }\n  | Pexp_ifthenelse(ifExpr, thenExpr, elseExpr) =>\n    let (leading, inside, trailing) = partitionByLoc(comments, ifExpr.pexp_loc)\n    let comments = if isBlockExpr(ifExpr) {\n      let (afterExpr, comments) = partitionAdjacentTrailing(ifExpr.pexp_loc, trailing)\n      walkExpr(ifExpr, t, List.concat(list{leading, inside, afterExpr}))\n      comments\n    } else {\n      attach(t.leading, ifExpr.pexp_loc, leading)\n      walkExpr(ifExpr, t, inside)\n      let (afterExpr, comments) = partitionAdjacentTrailing(ifExpr.pexp_loc, trailing)\n      attach(t.trailing, ifExpr.pexp_loc, afterExpr)\n      comments\n    }\n    let (leading, inside, trailing) = partitionByLoc(comments, thenExpr.pexp_loc)\n    let comments = if isBlockExpr(thenExpr) {\n      let (afterExpr, trailing) = partitionAdjacentTrailing(thenExpr.pexp_loc, trailing)\n      walkExpr(thenExpr, t, List.concat(list{leading, inside, afterExpr}))\n      trailing\n    } else {\n      attach(t.leading, thenExpr.pexp_loc, leading)\n      walkExpr(thenExpr, t, inside)\n      let (afterExpr, comments) = partitionAdjacentTrailing(thenExpr.pexp_loc, trailing)\n      attach(t.trailing, thenExpr.pexp_loc, afterExpr)\n      comments\n    }\n    switch elseExpr {\n    | None => ()\n    | Some(expr) =>\n      if isBlockExpr(expr) || isIfThenElseExpr(expr) {\n        walkExpr(expr, t, comments)\n      } else {\n        let (leading, inside, trailing) = partitionByLoc(comments, expr.pexp_loc)\n        attach(t.leading, expr.pexp_loc, leading)\n        walkExpr(expr, t, inside)\n        attach(t.trailing, expr.pexp_loc, trailing)\n      }\n    }\n  | Pexp_while(expr1, expr2) =>\n    let (leading, inside, trailing) = partitionByLoc(comments, expr1.pexp_loc)\n    let rest = if isBlockExpr(expr1) {\n      let (afterExpr, rest) = partitionAdjacentTrailing(expr1.pexp_loc, trailing)\n      walkExpr(expr1, t, List.concat(list{leading, inside, afterExpr}))\n      rest\n    } else {\n      attach(t.leading, expr1.pexp_loc, leading)\n      walkExpr(expr1, t, inside)\n      let (afterExpr, rest) = partitionAdjacentTrailing(expr1.pexp_loc, trailing)\n      attach(t.trailing, expr1.pexp_loc, afterExpr)\n      rest\n    }\n    if isBlockExpr(expr2) {\n      walkExpr(expr2, t, rest)\n    } else {\n      let (leading, inside, trailing) = partitionByLoc(rest, expr2.pexp_loc)\n      attach(t.leading, expr2.pexp_loc, leading)\n      walkExpr(expr2, t, inside)\n      attach(t.trailing, expr2.pexp_loc, trailing)\n    }\n  | Pexp_for(pat, expr1, expr2, _, expr3) =>\n    let (leading, inside, trailing) = partitionByLoc(comments, pat.ppat_loc)\n    attach(t.leading, pat.ppat_loc, leading)\n    walkPattern(pat, t, inside)\n    let (afterPat, rest) = partitionAdjacentTrailing(pat.ppat_loc, trailing)\n    attach(t.trailing, pat.ppat_loc, afterPat)\n    let (leading, inside, trailing) = partitionByLoc(rest, expr1.pexp_loc)\n    attach(t.leading, expr1.pexp_loc, leading)\n    walkExpr(expr1, t, inside)\n    let (afterExpr, rest) = partitionAdjacentTrailing(expr1.pexp_loc, trailing)\n    attach(t.trailing, expr1.pexp_loc, afterExpr)\n    let (leading, inside, trailing) = partitionByLoc(rest, expr2.pexp_loc)\n    attach(t.leading, expr2.pexp_loc, leading)\n    walkExpr(expr2, t, inside)\n    let (afterExpr, rest) = partitionAdjacentTrailing(expr2.pexp_loc, trailing)\n    attach(t.trailing, expr2.pexp_loc, afterExpr)\n    if isBlockExpr(expr3) {\n      walkExpr(expr3, t, rest)\n    } else {\n      let (leading, inside, trailing) = partitionByLoc(rest, expr3.pexp_loc)\n      attach(t.leading, expr3.pexp_loc, leading)\n      walkExpr(expr3, t, inside)\n      attach(t.trailing, expr3.pexp_loc, trailing)\n    }\n  | Pexp_pack(modExpr) =>\n    let (before, inside, after) = partitionByLoc(comments, modExpr.pmod_loc)\n    attach(t.leading, modExpr.pmod_loc, before)\n    walkModExpr(modExpr, t, inside)\n    attach(t.trailing, modExpr.pmod_loc, after)\n  | Pexp_match(expr1, list{case, elseBranch})\n    if Res_parsetree_viewer.hasIfLetAttribute(expr.pexp_attributes) =>\n    let (before, inside, after) = partitionByLoc(comments, case.pc_lhs.ppat_loc)\n    attach(t.leading, case.pc_lhs.ppat_loc, before)\n    walkPattern(case.pc_lhs, t, inside)\n    let (afterPat, rest) = partitionAdjacentTrailing(case.pc_lhs.ppat_loc, after)\n    attach(t.trailing, case.pc_lhs.ppat_loc, afterPat)\n    let (before, inside, after) = partitionByLoc(rest, expr1.pexp_loc)\n    attach(t.leading, expr1.pexp_loc, before)\n    walkExpr(expr1, t, inside)\n    let (afterExpr, rest) = partitionAdjacentTrailing(expr1.pexp_loc, after)\n    attach(t.trailing, expr1.pexp_loc, afterExpr)\n    let (before, inside, after) = partitionByLoc(rest, case.pc_rhs.pexp_loc)\n    let after = if isBlockExpr(case.pc_rhs) {\n      let (afterExpr, rest) = partitionAdjacentTrailing(case.pc_rhs.pexp_loc, after)\n      walkExpr(case.pc_rhs, t, List.concat(list{before, inside, afterExpr}))\n      rest\n    } else {\n      attach(t.leading, case.pc_rhs.pexp_loc, before)\n      walkExpr(case.pc_rhs, t, inside)\n      after\n    }\n    let (afterExpr, rest) = partitionAdjacentTrailing(case.pc_rhs.pexp_loc, after)\n    attach(t.trailing, case.pc_rhs.pexp_loc, afterExpr)\n    let (before, inside, after) = partitionByLoc(rest, elseBranch.pc_rhs.pexp_loc)\n    let after = if isBlockExpr(elseBranch.pc_rhs) {\n      let (afterExpr, rest) = partitionAdjacentTrailing(elseBranch.pc_rhs.pexp_loc, after)\n      walkExpr(elseBranch.pc_rhs, t, List.concat(list{before, inside, afterExpr}))\n      rest\n    } else {\n      attach(t.leading, elseBranch.pc_rhs.pexp_loc, before)\n      walkExpr(elseBranch.pc_rhs, t, inside)\n      after\n    }\n    attach(t.trailing, elseBranch.pc_rhs.pexp_loc, after)\n\n  | Pexp_match(expr, cases) | Pexp_try(expr, cases) =>\n    let (before, inside, after) = partitionByLoc(comments, expr.pexp_loc)\n    let after = if isBlockExpr(expr) {\n      let (afterExpr, rest) = partitionAdjacentTrailing(expr.pexp_loc, after)\n      walkExpr(expr, t, List.concat(list{before, inside, afterExpr}))\n      rest\n    } else {\n      attach(t.leading, expr.pexp_loc, before)\n      walkExpr(expr, t, inside)\n      after\n    }\n    let (afterExpr, rest) = partitionAdjacentTrailing(expr.pexp_loc, after)\n    attach(t.trailing, expr.pexp_loc, afterExpr)\n    walkList(~getLoc=n => {\n      ...n.Parsetree.pc_lhs.ppat_loc,\n      loc_end: n.pc_rhs.pexp_loc.loc_end,\n    }, ~walkNode=walkCase, cases, t, rest)\n  /* unary expression: todo use parsetreeviewer */\n  | Pexp_apply(\n      {pexp_desc: Pexp_ident({txt: Longident.Lident(\"~+\" | \"~+.\" | \"~-\" | \"~-.\" | \"not\" | \"!\")})},\n      list{(Nolabel, argExpr)},\n    ) =>\n    let (before, inside, after) = partitionByLoc(comments, argExpr.pexp_loc)\n    attach(t.leading, argExpr.pexp_loc, before)\n    walkExpr(argExpr, t, inside)\n    attach(t.trailing, argExpr.pexp_loc, after)\n  /* binary expression */\n  | Pexp_apply(\n      {\n        pexp_desc: Pexp_ident({\n          txt: Longident.Lident(\n            \":=\"\n            | \"||\"\n            | \"&&\"\n            | \"=\"\n            | \"==\"\n            | \"<\"\n            | \">\"\n            | \"!=\"\n            | \"!==\"\n            | \"<=\"\n            | \">=\"\n            | \"|>\"\n            | \"+\"\n            | \"+.\"\n            | \"-\"\n            | \"-.\"\n            | \"++\"\n            | \"^\"\n            | \"*\"\n            | \"*.\"\n            | \"/\"\n            | \"/.\"\n            | \"**\"\n            | \"|.\"\n            | \"<>\",\n          ),\n        }),\n      },\n      list{(Nolabel, operand1), (Nolabel, operand2)},\n    ) =>\n    let (before, inside, after) = partitionByLoc(comments, operand1.pexp_loc)\n    attach(t.leading, operand1.pexp_loc, before)\n    walkExpr(operand1, t, inside)\n    let (afterOperand1, rest) = partitionAdjacentTrailing(operand1.pexp_loc, after)\n    attach(t.trailing, operand1.pexp_loc, afterOperand1)\n    let (before, inside, after) = partitionByLoc(rest, operand2.pexp_loc)\n    attach(t.leading, operand2.pexp_loc, before)\n    walkExpr(operand2, t, inside) /* (List.concat [inside; after]); */\n    attach(t.trailing, operand2.pexp_loc, after)\n  | Pexp_apply(callExpr, arguments) =>\n    let (before, inside, after) = partitionByLoc(comments, callExpr.pexp_loc)\n    let after = if isBlockExpr(callExpr) {\n      let (afterExpr, rest) = partitionAdjacentTrailing(callExpr.pexp_loc, after)\n      walkExpr(callExpr, t, List.concat(list{before, inside, afterExpr}))\n      rest\n    } else {\n      attach(t.leading, callExpr.pexp_loc, before)\n      walkExpr(callExpr, t, inside)\n      after\n    }\n    let (afterExpr, rest) = partitionAdjacentTrailing(callExpr.pexp_loc, after)\n    attach(t.trailing, callExpr.pexp_loc, afterExpr)\n    walkList(~getLoc=((_argLabel, expr)) =>\n      switch expr.Parsetree.pexp_attributes {\n      | list{({Location.txt: \"ns.namedArgLoc\", loc}, _), ..._attrs} => {\n          ...loc,\n          loc_end: expr.pexp_loc.loc_end,\n        }\n      | _ => expr.pexp_loc\n      }\n    , ~walkNode=walkExprArgument, arguments, t, rest)\n  | Pexp_fun(_, _, _, _) | Pexp_newtype(_) =>\n    let (_, parameters, returnExpr) = funExpr(expr)\n    let comments = visitListButContinueWithRemainingComments(\n      ~newlineDelimited=false,\n      ~walkNode=walkExprPararameter,\n      ~getLoc=((_attrs, _argLbl, exprOpt, pattern)) => {\n        open Parsetree\n        let startPos = switch pattern.ppat_attributes {\n        | list{({Location.txt: \"ns.namedArgLoc\", loc}, _), ..._attrs} => loc.loc_start\n        | _ => pattern.ppat_loc.loc_start\n        }\n\n        switch exprOpt {\n        | None => {...pattern.ppat_loc, loc_start: startPos}\n        | Some(expr) => {\n            ...pattern.ppat_loc,\n            loc_start: startPos,\n            loc_end: expr.pexp_loc.loc_end,\n          }\n        }\n      },\n      parameters,\n      t,\n      comments,\n    )\n\n    switch returnExpr.pexp_desc {\n    | Pexp_constraint(expr, typ)\n      if expr.pexp_loc.loc_start.pos_cnum >= typ.ptyp_loc.loc_end.pos_cnum =>\n      let (leading, inside, trailing) = partitionByLoc(comments, typ.ptyp_loc)\n      attach(t.leading, typ.ptyp_loc, leading)\n      walkTypExpr(typ, t, inside)\n      let (afterTyp, comments) = partitionAdjacentTrailing(typ.ptyp_loc, trailing)\n      attach(t.trailing, typ.ptyp_loc, afterTyp)\n      if isBlockExpr(expr) {\n        walkExpr(expr, t, comments)\n      } else {\n        let (leading, inside, trailing) = partitionByLoc(comments, expr.pexp_loc)\n        attach(t.leading, expr.pexp_loc, leading)\n        walkExpr(expr, t, inside)\n        attach(t.trailing, expr.pexp_loc, trailing)\n      }\n    | _ =>\n      if isBlockExpr(returnExpr) {\n        walkExpr(returnExpr, t, comments)\n      } else {\n        let (leading, inside, trailing) = partitionByLoc(comments, returnExpr.pexp_loc)\n        attach(t.leading, returnExpr.pexp_loc, leading)\n        walkExpr(returnExpr, t, inside)\n        attach(t.trailing, returnExpr.pexp_loc, trailing)\n      }\n    }\n  | _ => ()\n  }\n}\n\nand walkExprPararameter = ((_attrs, _argLbl, exprOpt, pattern), t, comments) => {\n  let (leading, inside, trailing) = partitionByLoc(comments, pattern.ppat_loc)\n  attach(t.leading, pattern.ppat_loc, leading)\n  walkPattern(pattern, t, inside)\n  switch exprOpt {\n  | Some(expr) =>\n    let (_afterPat, rest) = partitionAdjacentTrailing(pattern.ppat_loc, trailing)\n    attach(t.trailing, pattern.ppat_loc, trailing)\n    if isBlockExpr(expr) {\n      walkExpr(expr, t, rest)\n    } else {\n      let (leading, inside, trailing) = partitionByLoc(rest, expr.pexp_loc)\n      attach(t.leading, expr.pexp_loc, leading)\n      walkExpr(expr, t, inside)\n      attach(t.trailing, expr.pexp_loc, trailing)\n    }\n  | None => attach(t.trailing, pattern.ppat_loc, trailing)\n  }\n}\n\nand walkExprArgument = ((_argLabel, expr), t, comments) =>\n  switch expr.Parsetree.pexp_attributes {\n  | list{({Location.txt: \"ns.namedArgLoc\", loc}, _), ..._attrs} =>\n    let (leading, trailing) = partitionLeadingTrailing(comments, loc)\n    attach(t.leading, loc, leading)\n    let (afterLabel, rest) = partitionAdjacentTrailing(loc, trailing)\n    attach(t.trailing, loc, afterLabel)\n    let (before, inside, after) = partitionByLoc(rest, expr.pexp_loc)\n    attach(t.leading, expr.pexp_loc, before)\n    walkExpr(expr, t, inside)\n    attach(t.trailing, expr.pexp_loc, after)\n  | _ =>\n    let (before, inside, after) = partitionByLoc(comments, expr.pexp_loc)\n    attach(t.leading, expr.pexp_loc, before)\n    walkExpr(expr, t, inside)\n    attach(t.trailing, expr.pexp_loc, after)\n  }\n\nand walkCase = (case, t, comments) => {\n  let (before, inside, after) = partitionByLoc(comments, case.pc_lhs.ppat_loc)\n  /* cases don't have a location on their own, leading comments should go\n   * after the bar on the pattern */\n  walkPattern(case.pc_lhs, t, List.concat(list{before, inside}))\n  let (afterPat, rest) = partitionAdjacentTrailing(case.pc_lhs.ppat_loc, after)\n  attach(t.trailing, case.pc_lhs.ppat_loc, afterPat)\n  let comments = switch case.pc_guard {\n  | Some(expr) =>\n    let (before, inside, after) = partitionByLoc(rest, expr.pexp_loc)\n    let (afterExpr, rest) = partitionAdjacentTrailing(expr.pexp_loc, after)\n    if isBlockExpr(expr) {\n      walkExpr(expr, t, List.concat(list{before, inside, afterExpr}))\n    } else {\n      attach(t.leading, expr.pexp_loc, before)\n      walkExpr(expr, t, inside)\n      attach(t.trailing, expr.pexp_loc, afterExpr)\n    }\n    rest\n  | None => rest\n  }\n\n  if isBlockExpr(case.pc_rhs) {\n    walkExpr(case.pc_rhs, t, comments)\n  } else {\n    let (before, inside, after) = partitionByLoc(comments, case.pc_rhs.pexp_loc)\n    attach(t.leading, case.pc_rhs.pexp_loc, before)\n    walkExpr(case.pc_rhs, t, inside)\n    attach(t.trailing, case.pc_rhs.pexp_loc, after)\n  }\n}\n\nand walkExprRecordRow = ((longident, expr), t, comments) => {\n  let (beforeLongident, afterLongident) = partitionLeadingTrailing(comments, longident.loc)\n\n  attach(t.leading, longident.loc, beforeLongident)\n  let (afterLongident, rest) = partitionAdjacentTrailing(longident.loc, afterLongident)\n  attach(t.trailing, longident.loc, afterLongident)\n  let (leading, inside, trailing) = partitionByLoc(rest, expr.pexp_loc)\n  attach(t.leading, expr.pexp_loc, leading)\n  walkExpr(expr, t, inside)\n  attach(t.trailing, expr.pexp_loc, trailing)\n}\n\nand walkExtConstr = (extConstr, t, comments) => {\n  let (leading, trailing) = partitionLeadingTrailing(comments, extConstr.pext_name.loc)\n  attach(t.leading, extConstr.pext_name.loc, leading)\n  let (afterName, rest) = partitionAdjacentTrailing(extConstr.pext_name.loc, trailing)\n  attach(t.trailing, extConstr.pext_name.loc, afterName)\n  walkExtensionConstructorKind(extConstr.pext_kind, t, rest)\n}\n\nand walkExtensionConstructorKind = (kind, t, comments) =>\n  switch kind {\n  | Pext_rebind(longident) =>\n    let (leading, trailing) = partitionLeadingTrailing(comments, longident.loc)\n    attach(t.leading, longident.loc, leading)\n    attach(t.trailing, longident.loc, trailing)\n  | Pext_decl(constructorArguments, maybeTypExpr) =>\n    let rest = walkConstructorArguments(constructorArguments, t, comments)\n    switch maybeTypExpr {\n    | None => ()\n    | Some(typexpr) =>\n      let (before, inside, after) = partitionByLoc(rest, typexpr.ptyp_loc)\n      attach(t.leading, typexpr.ptyp_loc, before)\n      walkTypExpr(typexpr, t, inside)\n      attach(t.trailing, typexpr.ptyp_loc, after)\n    }\n  }\n\nand walkModExpr = (modExpr, t, comments) =>\n  switch modExpr.pmod_desc {\n  | Pmod_ident(longident) =>\n    let (before, after) = partitionLeadingTrailing(comments, longident.loc)\n    attach(t.leading, longident.loc, before)\n    attach(t.trailing, longident.loc, after)\n  | Pmod_structure(list{}) => attach(t.inside, modExpr.pmod_loc, comments)\n  | Pmod_structure(structure) => walkStructure(structure, t, comments)\n  | Pmod_extension(extension) => walkExtension(extension, t, comments)\n  | Pmod_unpack(expr) =>\n    let (before, inside, after) = partitionByLoc(comments, expr.pexp_loc)\n    attach(t.leading, expr.pexp_loc, before)\n    walkExpr(expr, t, inside)\n    attach(t.trailing, expr.pexp_loc, after)\n  | Pmod_constraint(modexpr, modtype) =>\n    if modtype.pmty_loc.loc_start >= modexpr.pmod_loc.loc_end {\n      let (before, inside, after) = partitionByLoc(comments, modexpr.pmod_loc)\n      attach(t.leading, modexpr.pmod_loc, before)\n      walkModExpr(modexpr, t, inside)\n      let (after, rest) = partitionAdjacentTrailing(modexpr.pmod_loc, after)\n      attach(t.trailing, modexpr.pmod_loc, after)\n      let (before, inside, after) = partitionByLoc(rest, modtype.pmty_loc)\n      attach(t.leading, modtype.pmty_loc, before)\n      walkModType(modtype, t, inside)\n      attach(t.trailing, modtype.pmty_loc, after)\n    } else {\n      let (before, inside, after) = partitionByLoc(comments, modtype.pmty_loc)\n      attach(t.leading, modtype.pmty_loc, before)\n      walkModType(modtype, t, inside)\n      let (after, rest) = partitionAdjacentTrailing(modtype.pmty_loc, after)\n      attach(t.trailing, modtype.pmty_loc, after)\n      let (before, inside, after) = partitionByLoc(rest, modexpr.pmod_loc)\n      attach(t.leading, modexpr.pmod_loc, before)\n      walkModExpr(modexpr, t, inside)\n      attach(t.trailing, modexpr.pmod_loc, after)\n    }\n  | Pmod_apply(_callModExpr, _argModExpr) =>\n    let modExprs = modExprApply(modExpr)\n    walkList(~getLoc=n => n.Parsetree.pmod_loc, ~walkNode=walkModExpr, modExprs, t, comments)\n  | Pmod_functor(_) =>\n    let (parameters, returnModExpr) = modExprFunctor(modExpr)\n    let comments = visitListButContinueWithRemainingComments(~getLoc=((_, lbl, modTypeOption)) =>\n      switch modTypeOption {\n      | None => lbl.Asttypes.loc\n      | Some(modType) => {...lbl.loc, loc_end: modType.Parsetree.pmty_loc.loc_end}\n      }\n    , ~walkNode=walkModExprParameter, ~newlineDelimited=false, parameters, t, comments)\n\n    switch returnModExpr.pmod_desc {\n    | Pmod_constraint(modExpr, modType)\n      if modType.pmty_loc.loc_end.pos_cnum <= modExpr.pmod_loc.loc_start.pos_cnum =>\n      let (before, inside, after) = partitionByLoc(comments, modType.pmty_loc)\n      attach(t.leading, modType.pmty_loc, before)\n      walkModType(modType, t, inside)\n      let (after, rest) = partitionAdjacentTrailing(modType.pmty_loc, after)\n      attach(t.trailing, modType.pmty_loc, after)\n      let (before, inside, after) = partitionByLoc(rest, modExpr.pmod_loc)\n      attach(t.leading, modExpr.pmod_loc, before)\n      walkModExpr(modExpr, t, inside)\n      attach(t.trailing, modExpr.pmod_loc, after)\n    | _ =>\n      let (before, inside, after) = partitionByLoc(comments, returnModExpr.pmod_loc)\n      attach(t.leading, returnModExpr.pmod_loc, before)\n      walkModExpr(returnModExpr, t, inside)\n      attach(t.trailing, returnModExpr.pmod_loc, after)\n    }\n  }\n\nand walkModExprParameter = (parameter, t, comments) => {\n  let (_attrs, lbl, modTypeOption) = parameter\n  let (leading, trailing) = partitionLeadingTrailing(comments, lbl.loc)\n  attach(t.leading, lbl.loc, leading)\n  switch modTypeOption {\n  | None => attach(t.trailing, lbl.loc, trailing)\n  | Some(modType) =>\n    let (afterLbl, rest) = partitionAdjacentTrailing(lbl.loc, trailing)\n    attach(t.trailing, lbl.loc, afterLbl)\n    let (before, inside, after) = partitionByLoc(rest, modType.pmty_loc)\n    attach(t.leading, modType.pmty_loc, before)\n    walkModType(modType, t, inside)\n    attach(t.trailing, modType.pmty_loc, after)\n  }\n}\n\nand walkModType = (modType, t, comments) =>\n  switch modType.pmty_desc {\n  | Pmty_ident(longident) | Pmty_alias(longident) =>\n    let (leading, trailing) = partitionLeadingTrailing(comments, longident.loc)\n    attach(t.leading, longident.loc, leading)\n    attach(t.trailing, longident.loc, trailing)\n  | Pmty_signature(list{}) => attach(t.inside, modType.pmty_loc, comments)\n  | Pmty_signature(signature) => walkSignature(signature, t, comments)\n  | Pmty_extension(extension) => walkExtension(extension, t, comments)\n  | Pmty_typeof(modExpr) =>\n    let (before, inside, after) = partitionByLoc(comments, modExpr.pmod_loc)\n    attach(t.leading, modExpr.pmod_loc, before)\n    walkModExpr(modExpr, t, inside)\n    attach(t.trailing, modExpr.pmod_loc, after)\n  | Pmty_with(modType, _withConstraints) =>\n    let (before, inside, after) = partitionByLoc(comments, modType.pmty_loc)\n    attach(t.leading, modType.pmty_loc, before)\n    walkModType(modType, t, inside)\n    attach(t.trailing, modType.pmty_loc, after)\n  /* TODO: withConstraints */\n  | Pmty_functor(_) =>\n    let (parameters, returnModType) = functorType(modType)\n    let comments = visitListButContinueWithRemainingComments(~getLoc=((_, lbl, modTypeOption)) =>\n      switch modTypeOption {\n      | None => lbl.Asttypes.loc\n      | Some(modType) =>\n        if lbl.txt == \"_\" {\n          modType.Parsetree.pmty_loc\n        } else {\n          {...lbl.loc, loc_end: modType.Parsetree.pmty_loc.loc_end}\n        }\n      }\n    , ~walkNode=walkModTypeParameter, ~newlineDelimited=false, parameters, t, comments)\n\n    let (before, inside, after) = partitionByLoc(comments, returnModType.pmty_loc)\n    attach(t.leading, returnModType.pmty_loc, before)\n    walkModType(returnModType, t, inside)\n    attach(t.trailing, returnModType.pmty_loc, after)\n  }\n\nand walkModTypeParameter = ((_, lbl, modTypeOption), t, comments) => {\n  let (leading, trailing) = partitionLeadingTrailing(comments, lbl.loc)\n  attach(t.leading, lbl.loc, leading)\n  switch modTypeOption {\n  | None => attach(t.trailing, lbl.loc, trailing)\n  | Some(modType) =>\n    let (afterLbl, rest) = partitionAdjacentTrailing(lbl.loc, trailing)\n    attach(t.trailing, lbl.loc, afterLbl)\n    let (before, inside, after) = partitionByLoc(rest, modType.pmty_loc)\n    attach(t.leading, modType.pmty_loc, before)\n    walkModType(modType, t, inside)\n    attach(t.trailing, modType.pmty_loc, after)\n  }\n}\n\nand walkPattern = (pat, t, comments) => {\n  open Location\n  switch pat.Parsetree.ppat_desc {\n  | _ if comments == list{} => ()\n  | Ppat_alias(pat, alias) =>\n    let (leading, inside, trailing) = partitionByLoc(comments, pat.ppat_loc)\n    attach(t.leading, pat.ppat_loc, leading)\n    walkPattern(pat, t, inside)\n    let (afterPat, rest) = partitionAdjacentTrailing(pat.ppat_loc, trailing)\n    attach(t.leading, pat.ppat_loc, leading)\n    attach(t.trailing, pat.ppat_loc, afterPat)\n    let (beforeAlias, afterAlias) = partitionLeadingTrailing(rest, alias.loc)\n    attach(t.leading, alias.loc, beforeAlias)\n    attach(t.trailing, alias.loc, afterAlias)\n  | Ppat_tuple(list{})\n  | Ppat_array(list{})\n  | Ppat_construct({txt: Longident.Lident(\"()\")}, _)\n  | Ppat_construct({txt: Longident.Lident(\"[]\")}, _) =>\n    attach(t.inside, pat.ppat_loc, comments)\n  | Ppat_array(patterns) =>\n    walkList(~getLoc=n => n.Parsetree.ppat_loc, ~walkNode=walkPattern, patterns, t, comments)\n  | Ppat_tuple(patterns) =>\n    walkList(~getLoc=n => n.Parsetree.ppat_loc, ~walkNode=walkPattern, patterns, t, comments)\n  | Ppat_construct({txt: Longident.Lident(\"::\")}, _) =>\n    walkList(\n      ~getLoc=n => n.Parsetree.ppat_loc,\n      ~walkNode=walkPattern,\n      collectListPatterns(list{}, pat),\n      t,\n      comments,\n    )\n  | Ppat_construct(constr, None) =>\n    let (beforeConstr, afterConstr) = partitionLeadingTrailing(comments, constr.loc)\n\n    attach(t.leading, constr.loc, beforeConstr)\n    attach(t.trailing, constr.loc, afterConstr)\n  | Ppat_construct(constr, Some(pat)) =>\n    let (leading, trailing) = partitionLeadingTrailing(comments, constr.loc)\n    attach(t.leading, constr.loc, leading)\n    let (afterConstructor, rest) = partitionAdjacentTrailing(constr.loc, trailing)\n\n    attach(t.trailing, constr.loc, afterConstructor)\n    let (leading, inside, trailing) = partitionByLoc(rest, pat.ppat_loc)\n    attach(t.leading, pat.ppat_loc, leading)\n    walkPattern(pat, t, inside)\n    attach(t.trailing, pat.ppat_loc, trailing)\n  | Ppat_variant(_label, None) => ()\n  | Ppat_variant(_label, Some(pat)) => walkPattern(pat, t, comments)\n  | Ppat_type(_) => ()\n  | Ppat_record(recordRows, _) =>\n    walkList(~getLoc=((longidentLoc, pattern): (Asttypes.loc<Longident.t>, Parsetree.pattern)) => {\n      ...longidentLoc.loc,\n      loc_end: pattern.Parsetree.ppat_loc.loc_end,\n    }, ~walkNode=walkPatternRecordRow, recordRows, t, comments)\n  | Ppat_or(_) =>\n    walkList(\n      ~getLoc=pattern => pattern.Parsetree.ppat_loc,\n      ~walkNode=pattern => walkPattern(pattern),\n      Res_parsetree_viewer.collectOrPatternChain(pat),\n      t,\n      comments,\n    )\n  | Ppat_constraint(pattern, typ) =>\n    let (beforePattern, insidePattern, afterPattern) = partitionByLoc(comments, pattern.ppat_loc)\n\n    attach(t.leading, pattern.ppat_loc, beforePattern)\n    walkPattern(pattern, t, insidePattern)\n    let (afterPattern, rest) = partitionAdjacentTrailing(pattern.ppat_loc, afterPattern)\n\n    attach(t.trailing, pattern.ppat_loc, afterPattern)\n    let (beforeTyp, insideTyp, afterTyp) = partitionByLoc(rest, typ.ptyp_loc)\n\n    attach(t.leading, typ.ptyp_loc, beforeTyp)\n    walkTypExpr(typ, t, insideTyp)\n    attach(t.trailing, typ.ptyp_loc, afterTyp)\n  | Ppat_lazy(pattern) | Ppat_exception(pattern) =>\n    let (leading, inside, trailing) = partitionByLoc(comments, pattern.ppat_loc)\n    attach(t.leading, pattern.ppat_loc, leading)\n    walkPattern(pattern, t, inside)\n    attach(t.trailing, pattern.ppat_loc, trailing)\n  | Ppat_unpack(stringLoc) =>\n    let (leading, trailing) = partitionLeadingTrailing(comments, stringLoc.loc)\n    attach(t.leading, stringLoc.loc, leading)\n    attach(t.trailing, stringLoc.loc, trailing)\n  | Ppat_extension(extension) => walkExtension(extension, t, comments)\n  | _ => ()\n  }\n}\n\n/* name: firstName */\nand walkPatternRecordRow = (row, t, comments) =>\n  switch row {\n  /* punned {x} */\n  | (\n      {Location.txt: Longident.Lident(ident), loc: longidentLoc},\n      {Parsetree.ppat_desc: Ppat_var({txt, _})},\n    ) if ident == txt =>\n    let (beforeLbl, afterLbl) = partitionLeadingTrailing(comments, longidentLoc)\n\n    attach(t.leading, longidentLoc, beforeLbl)\n    attach(t.trailing, longidentLoc, afterLbl)\n  | (longident, pattern) =>\n    let (beforeLbl, afterLbl) = partitionLeadingTrailing(comments, longident.loc)\n\n    attach(t.leading, longident.loc, beforeLbl)\n    let (afterLbl, rest) = partitionAdjacentTrailing(longident.loc, afterLbl)\n    attach(t.trailing, longident.loc, afterLbl)\n    let (leading, inside, trailing) = partitionByLoc(rest, pattern.ppat_loc)\n    attach(t.leading, pattern.ppat_loc, leading)\n    walkPattern(pattern, t, inside)\n    attach(t.trailing, pattern.ppat_loc, trailing)\n  }\n\nand walkTypExpr = (typ, t, comments) =>\n  switch typ.Parsetree.ptyp_desc {\n  | _ if comments == list{} => ()\n  | Ptyp_tuple(typexprs) =>\n    walkList(~getLoc=n => n.Parsetree.ptyp_loc, ~walkNode=walkTypExpr, typexprs, t, comments)\n  | Ptyp_extension(extension) => walkExtension(extension, t, comments)\n  | Ptyp_package(packageType) => walkPackageType(packageType, t, comments)\n  | Ptyp_alias(typexpr, _alias) =>\n    let (beforeTyp, insideTyp, afterTyp) = partitionByLoc(comments, typexpr.ptyp_loc)\n    attach(t.leading, typexpr.ptyp_loc, beforeTyp)\n    walkTypExpr(typexpr, t, insideTyp)\n    attach(t.trailing, typexpr.ptyp_loc, afterTyp)\n  | Ptyp_poly(strings, typexpr) =>\n    let comments = visitListButContinueWithRemainingComments(\n      ~getLoc=n => n.Asttypes.loc,\n      ~walkNode=(longident, t, comments) => {\n        let (beforeLongident, afterLongident) = partitionLeadingTrailing(comments, longident.loc)\n        attach(t.leading, longident.loc, beforeLongident)\n        attach(t.trailing, longident.loc, afterLongident)\n      },\n      ~newlineDelimited=false,\n      strings,\n      t,\n      comments,\n    )\n\n    let (beforeTyp, insideTyp, afterTyp) = partitionByLoc(comments, typexpr.ptyp_loc)\n    attach(t.leading, typexpr.ptyp_loc, beforeTyp)\n    walkTypExpr(typexpr, t, insideTyp)\n    attach(t.trailing, typexpr.ptyp_loc, afterTyp)\n  | Ptyp_constr(longident, typexprs) =>\n    let (beforeLongident, _afterLongident) = partitionLeadingTrailing(comments, longident.loc)\n    let (afterLongident, rest) = partitionAdjacentTrailing(longident.loc, comments)\n    attach(t.leading, longident.loc, beforeLongident)\n    attach(t.trailing, longident.loc, afterLongident)\n    walkList(~getLoc=n => n.Parsetree.ptyp_loc, ~walkNode=walkTypExpr, typexprs, t, rest)\n  | Ptyp_arrow(_) =>\n    let (_, parameters, typexpr) = arrowType(typ)\n    let comments = walkTypeParameters(parameters, t, comments)\n    let (beforeTyp, insideTyp, afterTyp) = partitionByLoc(comments, typexpr.ptyp_loc)\n    attach(t.leading, typexpr.ptyp_loc, beforeTyp)\n    walkTypExpr(typexpr, t, insideTyp)\n    attach(t.trailing, typexpr.ptyp_loc, afterTyp)\n  | Ptyp_object(fields, _) => walkTypObjectFields(fields, t, comments)\n  | _ => ()\n  }\n\nand walkTypObjectFields = (fields, t, comments) => walkList(~getLoc=field =>\n    switch field {\n    | Parsetree.Otag(lbl, _, typ) => {...lbl.loc, loc_end: typ.ptyp_loc.loc_end}\n    | _ => Location.none\n    }\n  , ~walkNode=walkTypObjectField, fields, t, comments)\n\nand walkTypObjectField = (field, t, comments) =>\n  switch field {\n  | Otag(lbl, _, typexpr) =>\n    let (beforeLbl, afterLbl) = partitionLeadingTrailing(comments, lbl.loc)\n    attach(t.leading, lbl.loc, beforeLbl)\n    let (afterLbl, rest) = partitionAdjacentTrailing(lbl.loc, afterLbl)\n    attach(t.trailing, lbl.loc, afterLbl)\n    let (beforeTyp, insideTyp, afterTyp) = partitionByLoc(rest, typexpr.ptyp_loc)\n    attach(t.leading, typexpr.ptyp_loc, beforeTyp)\n    walkTypExpr(typexpr, t, insideTyp)\n    attach(t.trailing, typexpr.ptyp_loc, afterTyp)\n  | _ => ()\n  }\n\nand walkTypeParameters = (typeParameters, t, comments) =>\n  visitListButContinueWithRemainingComments(~getLoc=((_, _, typexpr)) =>\n    switch typexpr.Parsetree.ptyp_attributes {\n    | list{({Location.txt: \"ns.namedArgLoc\", loc}, _), ..._attrs} => {\n        ...loc,\n        loc_end: typexpr.ptyp_loc.loc_end,\n      }\n    | _ => typexpr.ptyp_loc\n    }\n  , ~walkNode=walkTypeParameter, ~newlineDelimited=false, typeParameters, t, comments)\n\nand walkTypeParameter = ((_attrs, _lbl, typexpr), t, comments) => {\n  let (beforeTyp, insideTyp, afterTyp) = partitionByLoc(comments, typexpr.ptyp_loc)\n  attach(t.leading, typexpr.ptyp_loc, beforeTyp)\n  walkTypExpr(typexpr, t, insideTyp)\n  attach(t.trailing, typexpr.ptyp_loc, afterTyp)\n}\n\nand walkPackageType = (packageType, t, comments) => {\n  let (longident, packageConstraints) = packageType\n  let (beforeLongident, afterLongident) = partitionLeadingTrailing(comments, longident.loc)\n  attach(t.leading, longident.loc, beforeLongident)\n  let (afterLongident, rest) = partitionAdjacentTrailing(longident.loc, afterLongident)\n  attach(t.trailing, longident.loc, afterLongident)\n  walkPackageConstraints(packageConstraints, t, rest)\n}\n\nand walkPackageConstraints = (packageConstraints, t, comments) =>\n  walkList(~getLoc=((longident, typexpr)) => {\n    ...longident.Asttypes.loc,\n    loc_end: typexpr.Parsetree.ptyp_loc.loc_end,\n  }, ~walkNode=walkPackageConstraint, packageConstraints, t, comments)\n\nand walkPackageConstraint = (packageConstraint, t, comments) => {\n  let (longident, typexpr) = packageConstraint\n  let (beforeLongident, afterLongident) = partitionLeadingTrailing(comments, longident.loc)\n  attach(t.leading, longident.loc, beforeLongident)\n  let (afterLongident, rest) = partitionAdjacentTrailing(longident.loc, afterLongident)\n  attach(t.trailing, longident.loc, afterLongident)\n  let (beforeTyp, insideTyp, afterTyp) = partitionByLoc(rest, typexpr.ptyp_loc)\n  attach(t.leading, typexpr.ptyp_loc, beforeTyp)\n  walkTypExpr(typexpr, t, insideTyp)\n  attach(t.trailing, typexpr.ptyp_loc, afterTyp)\n}\n\nand walkExtension = (extension, t, comments) => {\n  let (id, payload) = extension\n  let (beforeId, afterId) = partitionLeadingTrailing(comments, id.loc)\n  attach(t.leading, id.loc, beforeId)\n  let (afterId, rest) = partitionAdjacentTrailing(id.loc, afterId)\n  attach(t.trailing, id.loc, afterId)\n  walkPayload(payload, t, rest)\n}\n\nand walkAttribute = ((id, payload), t, comments) => {\n  let (beforeId, afterId) = partitionLeadingTrailing(comments, id.loc)\n  attach(t.leading, id.loc, beforeId)\n  let (afterId, rest) = partitionAdjacentTrailing(id.loc, afterId)\n  attach(t.trailing, id.loc, afterId)\n  walkPayload(payload, t, rest)\n}\n\nand walkPayload = (payload, t, comments) =>\n  switch payload {\n  | PStr(s) => walkStructure(s, t, comments)\n  | _ => ()\n  }\n\n"
  },
  {
    "path": "analysis/examples/larger-project/src/res_core.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as List from \"rescript/lib/es6/list.js\";\nimport * as Curry from \"rescript/lib/es6/curry.js\";\nimport * as $$Buffer from \"rescript/lib/es6/buffer.js\";\nimport * as $$String from \"rescript/lib/es6/string.js\";\nimport * as Res_doc from \"./res_doc.js\";\nimport * as Caml_obj from \"rescript/lib/es6/caml_obj.js\";\nimport * as $$Location from \"./location.js\";\nimport * as Res_utf8 from \"./res_utf8.js\";\nimport * as Longident from \"./longident.js\";\nimport * as Res_token from \"./res_token.js\";\nimport * as Ast_helper from \"./ast_helper.js\";\nimport * as Pervasives from \"rescript/lib/es6/pervasives.js\";\nimport * as Res_js_ffi from \"./res_js_ffi.js\";\nimport * as Res_parser from \"./res_parser.js\";\nimport * as Caml_option from \"rescript/lib/es6/caml_option.js\";\nimport * as Caml_string from \"rescript/lib/es6/caml_string.js\";\nimport * as Res_grammar from \"./res_grammar.js\";\nimport * as Res_printer from \"./res_printer.js\";\nimport * as Res_scanner from \"./res_scanner.js\";\nimport * as Res_diagnostics from \"./res_diagnostics.js\";\nimport * as Res_comments_table from \"./res_comments_table.js\";\n\nfunction mkLoc(startLoc, endLoc) {\n  return {\n          loc_start: startLoc,\n          loc_end: endLoc,\n          loc_ghost: false\n        };\n}\n\nfunction defaultExpr(param) {\n  var id = $$Location.mknoloc(\"rescript.exprhole\");\n  return Ast_helper.Exp.mk(undefined, undefined, {\n              TAG: /* Pexp_extension */34,\n              _0: [\n                id,\n                {\n                  TAG: /* PStr */0,\n                  _0: /* [] */0\n                }\n              ]\n            });\n}\n\nfunction defaultType(param) {\n  var id = $$Location.mknoloc(\"rescript.typehole\");\n  return Ast_helper.Typ.extension(undefined, undefined, [\n              id,\n              {\n                TAG: /* PStr */0,\n                _0: /* [] */0\n              }\n            ]);\n}\n\nfunction defaultPattern(param) {\n  var id = $$Location.mknoloc(\"rescript.patternhole\");\n  return Ast_helper.Pat.extension(undefined, undefined, [\n              id,\n              {\n                TAG: /* PStr */0,\n                _0: /* [] */0\n              }\n            ]);\n}\n\nfunction defaultModuleExpr(param) {\n  return Ast_helper.Mod.structure(undefined, undefined, /* [] */0);\n}\n\nfunction defaultModuleType(param) {\n  return Ast_helper.Mty.signature(undefined, undefined, /* [] */0);\n}\n\nvar id = $$Location.mknoloc(\"rescript.sigitemhole\");\n\nvar defaultSignatureItem = Ast_helper.Sig.extension(undefined, undefined, [\n      id,\n      {\n        TAG: /* PStr */0,\n        _0: /* [] */0\n      }\n    ]);\n\nfunction recoverEqualGreater(p) {\n  Res_parser.expect(undefined, /* EqualGreater */57, p);\n  var match = p.token;\n  if (match === 58) {\n    return Res_parser.next(undefined, p);\n  }\n  \n}\n\nfunction shouldAbortListParse(p) {\n  var _breadcrumbs = p.breadcrumbs;\n  while(true) {\n    var breadcrumbs = _breadcrumbs;\n    if (!breadcrumbs) {\n      return false;\n    }\n    if (Res_grammar.isPartOfList(breadcrumbs.hd[0], p.token)) {\n      return true;\n    }\n    _breadcrumbs = breadcrumbs.tl;\n    continue ;\n  };\n}\n\nvar Recover = {\n  defaultExpr: defaultExpr,\n  defaultType: defaultType,\n  defaultPattern: defaultPattern,\n  defaultModuleExpr: defaultModuleExpr,\n  defaultModuleType: defaultModuleType,\n  defaultSignatureItem: defaultSignatureItem,\n  recoverEqualGreater: recoverEqualGreater,\n  shouldAbortListParse: shouldAbortListParse\n};\n\nvar listPatternSpread = \"List pattern matches only supports one `...` spread, at the end.\\nExplanation: a list spread at the tail is efficient, but a spread in the middle would create new list[s]; out of performance concern, our pattern matching currently guarantees to never create new intermediate data.\";\n\nvar recordPatternSpread = \"Record's `...` spread is not supported in pattern matches.\\nExplanation: you can't collect a subset of a record's field into its own record, since a record needs an explicit declaration and that subset wouldn't have one.\\nSolution: you need to pull out each field you want explicitly.\";\n\nvar arrayPatternSpread = \"Array's `...` spread is not supported in pattern matches.\\nExplanation: such spread would create a subarray; out of performance concern, our pattern matching currently guarantees to never create new intermediate data.\\nSolution: if it's to validate the first few elements, use a `when` clause + Array size check + `get` checks on the current pattern. If it's to obtain a subarray, use `Array.sub` or `Belt.Array.slice`.\";\n\nvar arrayExprSpread = \"Arrays can't use the `...` spread currently. Please use `concat` or other Array helpers.\";\n\nvar recordExprSpread = \"Records can only have one `...` spread, at the beginning.\\nExplanation: since records have a known, fixed shape, a spread like `{a, ...b}` wouldn't make sense, as `b` would override every field of `a` anyway.\";\n\nvar listExprSpread = \"Lists can only have one `...` spread, and at the end.\\nExplanation: lists are singly-linked list, where a node contains a value and points to the next node. `list[a, ...bc]` efficiently creates a new item and links `bc` as its next nodes. `[...bc, a]` would be expensive, as it'd need to traverse `bc` and prepend each item to `a` one by one. We therefore disallow such syntax sugar.\\nSolution: directly use `concat`.\";\n\nvar variantIdent = \"A polymorphic variant (e.g. #id) must start with an alphabetical letter or be a number (e.g. #742)\";\n\nfunction experimentalIfLet(expr) {\n  var switchExpr_pexp_desc = expr.pexp_desc;\n  var switchExpr_pexp_loc = expr.pexp_loc;\n  var switchExpr = {\n    pexp_desc: switchExpr_pexp_desc,\n    pexp_loc: switchExpr_pexp_loc,\n    pexp_attributes: /* [] */0\n  };\n  return Res_doc.toString(80, Res_doc.concat({\n                  hd: Res_doc.text(\"If-let is currently highly experimental.\"),\n                  tl: {\n                    hd: Res_doc.line,\n                    tl: {\n                      hd: Res_doc.text(\"Use a regular `switch` with pattern matching instead:\"),\n                      tl: {\n                        hd: Res_doc.concat({\n                              hd: Res_doc.hardLine,\n                              tl: {\n                                hd: Res_doc.hardLine,\n                                tl: {\n                                  hd: Res_printer.printExpression(switchExpr, Res_comments_table.empty),\n                                  tl: /* [] */0\n                                }\n                              }\n                            }),\n                        tl: /* [] */0\n                      }\n                    }\n                  }\n                }));\n}\n\nvar typeParam = \"A type param consists of a singlequote followed by a name like `'a` or `'A`\";\n\nvar typeVar = \"A type variable consists of a singlequote followed by a name like `'a` or `'A`\";\n\nfunction attributeWithoutNode(attr) {\n  var attrName = attr[0].txt;\n  return \"Did you forget to attach `\" + (attrName + (\"` to an item?\\n  Standalone attributes start with `@@` like: `@@\" + (attrName + \"`\")));\n}\n\nfunction typeDeclarationNameLongident(longident) {\n  return \"A type declaration's name cannot contain a module access. Did you mean `\" + (Longident.last(longident) + \"`?\");\n}\n\nvar tupleSingleElement = \"A tuple needs at least two elements\";\n\nfunction missingTildeLabeledParameter(name) {\n  if (name === \"\") {\n    return \"A labeled parameter starts with a `~`.\";\n  } else {\n    return \"A labeled parameter starts with a `~`. Did you mean: `~\" + (name + \"`?\");\n  }\n}\n\nvar stringInterpolationInPattern = \"String interpolation is not supported in pattern matching.\";\n\nvar spreadInRecordDeclaration = \"A record type declaration doesn't support the ... spread. Only an object (with quoted field names) does.\";\n\nfunction objectQuotedFieldName(name) {\n  return \"An object type declaration needs quoted field names. Did you mean \\\"\" + (name + \"\\\"?\");\n}\n\nvar forbiddenInlineRecordDeclaration = \"An inline record type declaration is only allowed in a variant constructor's declaration\";\n\nvar sameTypeSpread = \"You're using a ... spread without extra fields. This is the same type.\";\n\nfunction polyVarIntWithSuffix(number) {\n  return \"A numeric polymorphic variant cannot be followed by a letter. Did you mean `#\" + (number + \"`?\");\n}\n\nvar ErrorMessages = {\n  listPatternSpread: listPatternSpread,\n  recordPatternSpread: recordPatternSpread,\n  arrayPatternSpread: arrayPatternSpread,\n  arrayExprSpread: arrayExprSpread,\n  recordExprSpread: recordExprSpread,\n  listExprSpread: listExprSpread,\n  variantIdent: variantIdent,\n  experimentalIfLet: experimentalIfLet,\n  typeParam: typeParam,\n  typeVar: typeVar,\n  attributeWithoutNode: attributeWithoutNode,\n  typeDeclarationNameLongident: typeDeclarationNameLongident,\n  tupleSingleElement: tupleSingleElement,\n  missingTildeLabeledParameter: missingTildeLabeledParameter,\n  stringInterpolationInPattern: stringInterpolationInPattern,\n  spreadInRecordDeclaration: spreadInRecordDeclaration,\n  objectQuotedFieldName: objectQuotedFieldName,\n  forbiddenInlineRecordDeclaration: forbiddenInlineRecordDeclaration,\n  sameTypeSpread: sameTypeSpread,\n  polyVarIntWithSuffix: polyVarIntWithSuffix\n};\n\nvar jsxAttr_0 = $$Location.mknoloc(\"JSX\");\n\nvar jsxAttr_1 = {\n  TAG: /* PStr */0,\n  _0: /* [] */0\n};\n\nvar jsxAttr = [\n  jsxAttr_0,\n  jsxAttr_1\n];\n\nvar uncurryAttr_0 = $$Location.mknoloc(\"bs\");\n\nvar uncurryAttr_1 = {\n  TAG: /* PStr */0,\n  _0: /* [] */0\n};\n\nvar uncurryAttr = [\n  uncurryAttr_0,\n  uncurryAttr_1\n];\n\nvar ternaryAttr_0 = $$Location.mknoloc(\"ns.ternary\");\n\nvar ternaryAttr_1 = {\n  TAG: /* PStr */0,\n  _0: /* [] */0\n};\n\nvar ternaryAttr = [\n  ternaryAttr_0,\n  ternaryAttr_1\n];\n\nvar ifLetAttr_0 = $$Location.mknoloc(\"ns.iflet\");\n\nvar ifLetAttr_1 = {\n  TAG: /* PStr */0,\n  _0: /* [] */0\n};\n\nvar ifLetAttr = [\n  ifLetAttr_0,\n  ifLetAttr_1\n];\n\nvar suppressFragileMatchWarningAttr_0 = $$Location.mknoloc(\"warning\");\n\nvar suppressFragileMatchWarningAttr_1 = {\n  TAG: /* PStr */0,\n  _0: {\n    hd: Ast_helper.Str.$$eval(undefined, undefined, Ast_helper.Exp.constant(undefined, undefined, {\n              TAG: /* Pconst_string */2,\n              _0: \"-4\",\n              _1: undefined\n            })),\n    tl: /* [] */0\n  }\n};\n\nvar suppressFragileMatchWarningAttr = [\n  suppressFragileMatchWarningAttr_0,\n  suppressFragileMatchWarningAttr_1\n];\n\nfunction makeBracesAttr(loc) {\n  return [\n          $$Location.mkloc(\"ns.braces\", loc),\n          {\n            TAG: /* PStr */0,\n            _0: /* [] */0\n          }\n        ];\n}\n\nvar templateLiteralAttr_0 = $$Location.mknoloc(\"res.template\");\n\nvar templateLiteralAttr_1 = {\n  TAG: /* PStr */0,\n  _0: /* [] */0\n};\n\nvar templateLiteralAttr = [\n  templateLiteralAttr_0,\n  templateLiteralAttr_1\n];\n\nfunction getClosingToken(x) {\n  if (typeof x === \"number\") {\n    if (x === 42) {\n      return /* GreaterThan */41;\n    }\n    if (x >= 23) {\n      if (x === 79) {\n        return /* Rbrace */23;\n      }\n      \n    } else if (x >= 18) {\n      switch (x) {\n        case /* Lparen */18 :\n            return /* Rparen */19;\n        case /* Lbracket */20 :\n            return /* Rbracket */21;\n        case /* Rparen */19 :\n        case /* Rbracket */21 :\n            break;\n        case /* Lbrace */22 :\n            return /* Rbrace */23;\n        \n      }\n    }\n    \n  }\n  throw {\n        RE_EXN_ID: \"Assert_failure\",\n        _1: [\n          \"res_core.res\",\n          207,\n          9\n        ],\n        Error: new Error()\n      };\n}\n\nfunction goToClosing(closingToken, state) {\n  while(true) {\n    var match = state.token;\n    var exit = 0;\n    if (typeof match === \"number\") {\n      if (match >= 43) {\n        exit = match !== 79 ? 1 : 2;\n      } else if (match >= 18) {\n        switch (match) {\n          case /* Rparen */19 :\n              if (closingToken === 19) {\n                Res_parser.next(undefined, state);\n                return ;\n              } else {\n                return ;\n              }\n          case /* Rbracket */21 :\n              if (closingToken === 21) {\n                Res_parser.next(undefined, state);\n                return ;\n              } else {\n                return ;\n              }\n          case /* Rbrace */23 :\n              if (closingToken === 23) {\n                Res_parser.next(undefined, state);\n                return ;\n              } else {\n                return ;\n              }\n          case /* Eof */26 :\n              return ;\n          case /* Colon */24 :\n          case /* Comma */25 :\n          case /* Exception */27 :\n          case /* Backslash */28 :\n          case /* Forwardslash */29 :\n          case /* ForwardslashDot */30 :\n          case /* Asterisk */31 :\n          case /* AsteriskDot */32 :\n          case /* Exponentiation */33 :\n          case /* Minus */34 :\n          case /* MinusDot */35 :\n          case /* Plus */36 :\n          case /* PlusDot */37 :\n          case /* PlusPlus */38 :\n          case /* PlusEqual */39 :\n          case /* ColonGreaterThan */40 :\n              exit = 1;\n              break;\n          case /* GreaterThan */41 :\n              if (closingToken === 41) {\n                Res_parser.next(undefined, state);\n                return ;\n              }\n              exit = 1;\n              break;\n          case /* Lparen */18 :\n          case /* Lbracket */20 :\n          case /* Lbrace */22 :\n          case /* LessThan */42 :\n              exit = 2;\n              break;\n          \n        }\n      } else {\n        exit = 1;\n      }\n    } else {\n      exit = 1;\n    }\n    switch (exit) {\n      case 1 :\n          Res_parser.next(undefined, state);\n          continue ;\n      case 2 :\n          Res_parser.next(undefined, state);\n          goToClosing(getClosingToken(match), state);\n          continue ;\n      \n    }\n  };\n}\n\nfunction isEs6ArrowExpression(inTernary, p) {\n  return Res_parser.lookahead(p, (function (state) {\n                var match = state.token;\n                if (typeof match === \"number\") {\n                  switch (match) {\n                    case /* Underscore */12 :\n                        break;\n                    case /* Lparen */18 :\n                        var prevEndPos = state.prevEndPos;\n                        Res_parser.next(undefined, state);\n                        var match$1 = state.token;\n                        var exit = 0;\n                        if (typeof match$1 === \"number\") {\n                          if (match$1 >= 20) {\n                            if (match$1 === 48) {\n                              return true;\n                            }\n                            if (match$1 === 80) {\n                              return false;\n                            }\n                            exit = 2;\n                          } else {\n                            if (match$1 === 4) {\n                              return true;\n                            }\n                            if (match$1 >= 19) {\n                              Res_parser.next(undefined, state);\n                              var match$2 = state.token;\n                              if (typeof match$2 !== \"number\") {\n                                return false;\n                              }\n                              if (match$2 !== 24) {\n                                return match$2 === 57;\n                              }\n                              if (inTernary) {\n                                return false;\n                              }\n                              Res_parser.next(undefined, state);\n                              var match$3 = state.token;\n                              if (typeof match$3 === \"number\") {\n                                return true;\n                              }\n                              if (match$3.TAG !== /* Lident */4) {\n                                return true;\n                              }\n                              Res_parser.next(undefined, state);\n                              var match$4 = state.token;\n                              if (match$4 === 42) {\n                                Res_parser.next(undefined, state);\n                                goToClosing(/* GreaterThan */41, state);\n                              }\n                              var match$5 = state.token;\n                              return match$5 === 57;\n                            } else {\n                              exit = 2;\n                            }\n                          }\n                        } else {\n                          exit = 2;\n                        }\n                        if (exit === 2) {\n                          goToClosing(/* Rparen */19, state);\n                          var match$6 = state.token;\n                          var exit$1 = 0;\n                          if (typeof match$6 === \"number\") {\n                            if (match$6 === 19) {\n                              return false;\n                            }\n                            if (match$6 !== 24) {\n                              if (match$6 === 57) {\n                                return true;\n                              }\n                              exit$1 = 3;\n                            } else {\n                              if (!inTernary) {\n                                return true;\n                              }\n                              exit$1 = 3;\n                            }\n                          } else {\n                            exit$1 = 3;\n                          }\n                          if (exit$1 === 3) {\n                            Res_parser.nextUnsafe(state);\n                            var match$7 = state.token;\n                            if (match$7 === 57) {\n                              return state.startPos.pos_lnum === prevEndPos.pos_lnum;\n                            } else {\n                              return false;\n                            }\n                          }\n                          \n                        }\n                        break;\n                    default:\n                      return false;\n                  }\n                } else if (match.TAG !== /* Lident */4) {\n                  return false;\n                }\n                Res_parser.next(undefined, state);\n                var match$8 = state.token;\n                return match$8 === 57;\n              }));\n}\n\nfunction isEs6ArrowFunctor(p) {\n  return Res_parser.lookahead(p, (function (state) {\n                var match = state.token;\n                if (match !== 18) {\n                  return false;\n                }\n                Res_parser.next(undefined, state);\n                var match$1 = state.token;\n                if (match$1 === 19) {\n                  Res_parser.next(undefined, state);\n                  var match$2 = state.token;\n                  if (typeof match$2 === \"number\") {\n                    if (match$2 !== 24) {\n                      return match$2 === 57;\n                    } else {\n                      return true;\n                    }\n                  } else {\n                    return false;\n                  }\n                }\n                goToClosing(/* Rparen */19, state);\n                var match$3 = state.token;\n                if (typeof match$3 !== \"number\") {\n                  return false;\n                }\n                if (match$3 >= 25) {\n                  return match$3 === 57;\n                }\n                if (match$3 < 22) {\n                  return false;\n                }\n                switch (match$3) {\n                  case /* Rbrace */23 :\n                      return false;\n                  case /* Lbrace */22 :\n                  case /* Colon */24 :\n                      return true;\n                  \n                }\n              }));\n}\n\nfunction isEs6ArrowType(p) {\n  return Res_parser.lookahead(p, (function (state) {\n                var match = state.token;\n                if (typeof match !== \"number\") {\n                  return false;\n                }\n                if (match !== 18) {\n                  return match === 48;\n                }\n                Res_parser.next(undefined, state);\n                var match$1 = state.token;\n                if (typeof match$1 === \"number\") {\n                  if (match$1 > 47 || match$1 < 5) {\n                    if (!(match$1 > 48 || match$1 < 4)) {\n                      return true;\n                    }\n                    \n                  } else if (match$1 === 19) {\n                    Res_parser.next(undefined, state);\n                    var match$2 = state.token;\n                    return match$2 === 57;\n                  }\n                  \n                }\n                goToClosing(/* Rparen */19, state);\n                var match$3 = state.token;\n                return match$3 === 57;\n              }));\n}\n\nfunction buildLongident(words) {\n  var match = List.rev(words);\n  if (match) {\n    return List.fold_left((function (p, s) {\n                  return {\n                          TAG: /* Ldot */1,\n                          _0: p,\n                          _1: s\n                        };\n                }), {\n                TAG: /* Lident */0,\n                _0: match.hd\n              }, match.tl);\n  }\n  throw {\n        RE_EXN_ID: \"Assert_failure\",\n        _1: [\n          \"res_core.res\",\n          363,\n          14\n        ],\n        Error: new Error()\n      };\n}\n\nfunction makeInfixOperator(p, token, startPos, endPos) {\n  var stringifiedToken = token === /* MinusGreater */58 ? \"|.\" : (\n      token === /* PlusPlus */38 ? \"^\" : (\n          token === /* BangEqual */70 ? \"<>\" : (\n              token === /* BangEqualEqual */71 ? \"!=\" : (\n                  token === /* Equal */14 ? (Res_parser.err(startPos, endPos, p, Res_diagnostics.message(\"Did you mean `==` here?\")), \"=\") : (\n                      token === /* EqualEqual */15 ? \"=\" : (\n                          token === /* EqualEqualEqual */16 ? \"==\" : Res_token.toString(token)\n                        )\n                    )\n                )\n            )\n        )\n    );\n  var loc = {\n    loc_start: startPos,\n    loc_end: endPos,\n    loc_ghost: false\n  };\n  var operator = $$Location.mkloc({\n        TAG: /* Lident */0,\n        _0: stringifiedToken\n      }, loc);\n  return Ast_helper.Exp.ident(loc, undefined, operator);\n}\n\nfunction negateString(s) {\n  if (s.length !== 0 && Caml_string.get(s, 0) === /* '-' */45) {\n    return $$String.sub(s, 1, s.length - 1 | 0);\n  } else {\n    return \"-\" + s;\n  }\n}\n\nfunction makeUnaryExpr(startPos, tokenEnd, token, operand) {\n  var match = operand.pexp_desc;\n  var exit = 0;\n  if (typeof token === \"number\") {\n    if (token >= 34) {\n      if (token >= 36) {\n        if (token < 38 && typeof match !== \"number\" && match.TAG === /* Pexp_constant */1) {\n          switch (match._0.TAG | 0) {\n            case /* Pconst_char */1 :\n            case /* Pconst_string */2 :\n                break;\n            case /* Pconst_integer */0 :\n            case /* Pconst_float */3 :\n                return operand;\n            \n          }\n        }\n        \n      } else if (token >= 35 || typeof match === \"number\" || match.TAG !== /* Pexp_constant */1) {\n        exit = 2;\n      } else {\n        var match$1 = match._0;\n        if (match$1.TAG === /* Pconst_integer */0) {\n          return {\n                  pexp_desc: {\n                    TAG: /* Pexp_constant */1,\n                    _0: {\n                      TAG: /* Pconst_integer */0,\n                      _0: negateString(match$1._0),\n                      _1: match$1._1\n                    }\n                  },\n                  pexp_loc: operand.pexp_loc,\n                  pexp_attributes: operand.pexp_attributes\n                };\n        }\n        exit = 2;\n      }\n    } else if (token === 7) {\n      var tokenLoc = {\n        loc_start: startPos,\n        loc_end: tokenEnd,\n        loc_ghost: false\n      };\n      return Ast_helper.Exp.apply({\n                  loc_start: startPos,\n                  loc_end: operand.pexp_loc.loc_end,\n                  loc_ghost: false\n                }, undefined, Ast_helper.Exp.ident(tokenLoc, undefined, $$Location.mkloc({\n                          TAG: /* Lident */0,\n                          _0: \"not\"\n                        }, tokenLoc)), {\n                  hd: [\n                    /* Nolabel */0,\n                    operand\n                  ],\n                  tl: /* [] */0\n                });\n    }\n    \n  }\n  if (exit === 2 && typeof match !== \"number\" && match.TAG === /* Pexp_constant */1) {\n    var match$2 = match._0;\n    if (match$2.TAG === /* Pconst_float */3) {\n      return {\n              pexp_desc: {\n                TAG: /* Pexp_constant */1,\n                _0: {\n                  TAG: /* Pconst_float */3,\n                  _0: negateString(match$2._0),\n                  _1: match$2._1\n                }\n              },\n              pexp_loc: operand.pexp_loc,\n              pexp_attributes: operand.pexp_attributes\n            };\n    }\n    \n  }\n  if (typeof token !== \"number\") {\n    return operand;\n  }\n  if (token > 37 || token < 34) {\n    return operand;\n  }\n  var tokenLoc$1 = {\n    loc_start: startPos,\n    loc_end: tokenEnd,\n    loc_ghost: false\n  };\n  var operator = \"~\" + Res_token.toString(token);\n  return Ast_helper.Exp.apply({\n              loc_start: startPos,\n              loc_end: operand.pexp_loc.loc_end,\n              loc_ghost: false\n            }, undefined, Ast_helper.Exp.ident(tokenLoc$1, undefined, $$Location.mkloc({\n                      TAG: /* Lident */0,\n                      _0: operator\n                    }, tokenLoc$1)), {\n              hd: [\n                /* Nolabel */0,\n                operand\n              ],\n              tl: /* [] */0\n            });\n}\n\nfunction makeListExpression(loc, seq, extOpt) {\n  var handleSeq = function (x) {\n    if (x) {\n      var e1 = x.hd;\n      var exp_el = handleSeq(x.tl);\n      var loc_loc_start = e1.pexp_loc.loc_start;\n      var loc_loc_end = exp_el.pexp_loc.loc_end;\n      var loc$1 = {\n        loc_start: loc_loc_start,\n        loc_end: loc_loc_end,\n        loc_ghost: false\n      };\n      var arg = Ast_helper.Exp.tuple(loc$1, undefined, {\n            hd: e1,\n            tl: {\n              hd: exp_el,\n              tl: /* [] */0\n            }\n          });\n      return Ast_helper.Exp.construct(loc$1, undefined, $$Location.mkloc({\n                      TAG: /* Lident */0,\n                      _0: \"::\"\n                    }, loc$1), arg);\n    }\n    if (extOpt !== undefined) {\n      return extOpt;\n    }\n    var loc_loc_start$1 = loc.loc_start;\n    var loc_loc_end$1 = loc.loc_end;\n    var loc$2 = {\n      loc_start: loc_loc_start$1,\n      loc_end: loc_loc_end$1,\n      loc_ghost: true\n    };\n    var nil = $$Location.mkloc({\n          TAG: /* Lident */0,\n          _0: \"[]\"\n        }, loc$2);\n    return Ast_helper.Exp.construct(loc$2, undefined, nil, undefined);\n  };\n  var expr = handleSeq(seq);\n  return {\n          pexp_desc: expr.pexp_desc,\n          pexp_loc: loc,\n          pexp_attributes: expr.pexp_attributes\n        };\n}\n\nfunction makeListPattern(loc, seq, ext_opt) {\n  var handle_seq = function (x) {\n    if (x) {\n      var p1 = x.hd;\n      var pat_pl = handle_seq(x.tl);\n      var loc_loc_start = p1.ppat_loc.loc_start;\n      var loc_loc_end = pat_pl.ppat_loc.loc_end;\n      var loc$1 = {\n        loc_start: loc_loc_start,\n        loc_end: loc_loc_end,\n        loc_ghost: false\n      };\n      var arg = Ast_helper.Pat.mk(loc$1, undefined, {\n            TAG: /* Ppat_tuple */4,\n            _0: {\n              hd: p1,\n              tl: {\n                hd: pat_pl,\n                tl: /* [] */0\n              }\n            }\n          });\n      return Ast_helper.Pat.mk(loc$1, undefined, {\n                  TAG: /* Ppat_construct */5,\n                  _0: $$Location.mkloc({\n                        TAG: /* Lident */0,\n                        _0: \"::\"\n                      }, loc$1),\n                  _1: arg\n                });\n    }\n    if (ext_opt !== undefined) {\n      return ext_opt;\n    }\n    var loc_loc_start$1 = loc.loc_start;\n    var loc_loc_end$1 = loc.loc_end;\n    var loc$2 = {\n      loc_start: loc_loc_start$1,\n      loc_end: loc_loc_end$1,\n      loc_ghost: true\n    };\n    var nil_txt = {\n      TAG: /* Lident */0,\n      _0: \"[]\"\n    };\n    var nil = {\n      txt: nil_txt,\n      loc: loc$2\n    };\n    return Ast_helper.Pat.construct(loc$2, undefined, nil, undefined);\n  };\n  return handle_seq(seq);\n}\n\nfunction lidentOfPath(longident) {\n  var match = List.rev(Longident.flatten(longident));\n  if (match) {\n    return match.hd;\n  } else {\n    return \"\";\n  }\n}\n\nfunction makeNewtypes(attrs, loc, newtypes, exp) {\n  var expr = List.fold_right((function (newtype, exp) {\n          return Ast_helper.Exp.mk(loc, undefined, {\n                      TAG: /* Pexp_newtype */31,\n                      _0: newtype,\n                      _1: exp\n                    });\n        }), newtypes, exp);\n  return {\n          pexp_desc: expr.pexp_desc,\n          pexp_loc: expr.pexp_loc,\n          pexp_attributes: attrs\n        };\n}\n\nfunction wrapTypeAnnotation(loc, newtypes, core_type, body) {\n  var exp = makeNewtypes(/* [] */0, loc, newtypes, Ast_helper.Exp.constraint_(loc, undefined, body, core_type));\n  var typ = Ast_helper.Typ.poly(loc, undefined, newtypes, Ast_helper.Typ.varify_constructors(newtypes, core_type));\n  return [\n          exp,\n          typ\n        ];\n}\n\nfunction processUnderscoreApplication(args) {\n  var exp_question = {\n    contents: undefined\n  };\n  var hidden_var = \"__x\";\n  var check_arg = function (arg) {\n    var exp = arg[1];\n    var id = exp.pexp_desc;\n    if (typeof id === \"number\") {\n      return arg;\n    }\n    if (id.TAG !== /* Pexp_ident */0) {\n      return arg;\n    }\n    var id$1 = id._0;\n    var match = id$1.txt;\n    switch (match.TAG | 0) {\n      case /* Lident */0 :\n          if (match._0 !== \"_\") {\n            return arg;\n          }\n          var new_id = $$Location.mkloc({\n                TAG: /* Lident */0,\n                _0: hidden_var\n              }, id$1.loc);\n          var new_exp = Ast_helper.Exp.mk(exp.pexp_loc, undefined, {\n                TAG: /* Pexp_ident */0,\n                _0: new_id\n              });\n          exp_question.contents = new_exp;\n          return [\n                  arg[0],\n                  new_exp\n                ];\n      case /* Ldot */1 :\n      case /* Lapply */2 :\n          return arg;\n      \n    }\n  };\n  var args$1 = List.map(check_arg, args);\n  var wrap = function (exp_apply) {\n    var match = exp_question.contents;\n    if (match === undefined) {\n      return exp_apply;\n    }\n    var loc = match.pexp_loc;\n    var pattern = Ast_helper.Pat.mk(loc, undefined, {\n          TAG: /* Ppat_var */0,\n          _0: $$Location.mkloc(hidden_var, loc)\n        });\n    return Ast_helper.Exp.mk(loc, undefined, {\n                TAG: /* Pexp_fun */4,\n                _0: /* Nolabel */0,\n                _1: undefined,\n                _2: pattern,\n                _3: exp_apply\n              });\n  };\n  return [\n          args$1,\n          wrap\n        ];\n}\n\nfunction hexValue(ch) {\n  if (ch >= 65) {\n    if (ch >= 97) {\n      if (ch >= 103) {\n        return 16;\n      } else {\n        return (ch - /* 'a' */97 | 0) + 10 | 0;\n      }\n    } else if (ch >= 71) {\n      return 16;\n    } else {\n      return ((ch + 32 | 0) - /* 'a' */97 | 0) + 10 | 0;\n    }\n  } else if (ch > 57 || ch < 48) {\n    return 16;\n  } else {\n    return ch - 48 | 0;\n  }\n}\n\nfunction removeModuleNameFromPunnedFieldValue(exp) {\n  var pathIdent = exp.pexp_desc;\n  if (typeof pathIdent === \"number\") {\n    return exp;\n  }\n  if (pathIdent.TAG !== /* Pexp_ident */0) {\n    return exp;\n  }\n  var pathIdent$1 = pathIdent._0;\n  return {\n          pexp_desc: {\n            TAG: /* Pexp_ident */0,\n            _0: {\n              txt: {\n                TAG: /* Lident */0,\n                _0: Longident.last(pathIdent$1.txt)\n              },\n              loc: pathIdent$1.loc\n            }\n          },\n          pexp_loc: exp.pexp_loc,\n          pexp_attributes: exp.pexp_attributes\n        };\n}\n\nfunction parseStringLiteral(s) {\n  var len = s.length;\n  var b = $$Buffer.create(s.length);\n  var parse = function (_state, _i, _d) {\n    while(true) {\n      var d = _d;\n      var i = _i;\n      var state = _state;\n      if (i === len) {\n        return state > 6 || state < 2;\n      }\n      var c = s.charCodeAt(i);\n      switch (state) {\n        case /* Start */0 :\n            if (c !== 92) {\n              $$Buffer.add_char(b, c);\n              _i = i + 1 | 0;\n              _state = /* Start */0;\n              continue ;\n            }\n            _i = i + 1 | 0;\n            _state = /* Backslash */1;\n            continue ;\n        case /* Backslash */1 :\n            var exit = 0;\n            if (c >= 32) {\n              if (c < 58) {\n                if (c >= 40) {\n                  if (c >= 48) {\n                    _d = 0;\n                    _state = /* DecimalEscape */3;\n                    continue ;\n                  }\n                  exit = 1;\n                } else {\n                  switch (c) {\n                    case 33 :\n                    case 35 :\n                    case 36 :\n                    case 37 :\n                    case 38 :\n                        exit = 1;\n                        break;\n                    case 32 :\n                    case 34 :\n                    case 39 :\n                        exit = 2;\n                        break;\n                    \n                  }\n                }\n              } else {\n                switch (c) {\n                  case 92 :\n                      exit = 2;\n                      break;\n                  case 98 :\n                      $$Buffer.add_char(b, /* '\\b' */8);\n                      _i = i + 1 | 0;\n                      _state = /* Start */0;\n                      continue ;\n                  case 110 :\n                      $$Buffer.add_char(b, /* '\\n' */10);\n                      _i = i + 1 | 0;\n                      _state = /* Start */0;\n                      continue ;\n                  case 111 :\n                      _d = 0;\n                      _i = i + 1 | 0;\n                      _state = /* OctalEscape */4;\n                      continue ;\n                  case 114 :\n                      $$Buffer.add_char(b, /* '\\r' */13);\n                      _i = i + 1 | 0;\n                      _state = /* Start */0;\n                      continue ;\n                  case 116 :\n                      $$Buffer.add_char(b, /* '\\t' */9);\n                      _i = i + 1 | 0;\n                      _state = /* Start */0;\n                      continue ;\n                  case 117 :\n                      _d = 0;\n                      _i = i + 1 | 0;\n                      _state = /* UnicodeEscapeStart */7;\n                      continue ;\n                  case 93 :\n                  case 94 :\n                  case 95 :\n                  case 96 :\n                  case 97 :\n                  case 99 :\n                  case 100 :\n                  case 101 :\n                  case 102 :\n                  case 103 :\n                  case 104 :\n                  case 105 :\n                  case 106 :\n                  case 107 :\n                  case 108 :\n                  case 109 :\n                  case 112 :\n                  case 113 :\n                  case 115 :\n                  case 118 :\n                  case 119 :\n                      exit = 1;\n                      break;\n                  case 120 :\n                      _d = 0;\n                      _i = i + 1 | 0;\n                      _state = /* HexEscape */2;\n                      continue ;\n                  default:\n                    exit = 1;\n                }\n              }\n            } else if (c !== 10) {\n              if (c !== 13) {\n                exit = 1;\n              } else {\n                _i = i + 1 | 0;\n                _state = /* EscapedLineBreak */8;\n                continue ;\n              }\n            } else {\n              _i = i + 1 | 0;\n              _state = /* EscapedLineBreak */8;\n              continue ;\n            }\n            switch (exit) {\n              case 1 :\n                  $$Buffer.add_char(b, /* '\\\\' */92);\n                  $$Buffer.add_char(b, c);\n                  _i = i + 1 | 0;\n                  _state = /* Start */0;\n                  continue ;\n              case 2 :\n                  $$Buffer.add_char(b, c);\n                  _i = i + 1 | 0;\n                  _state = /* Start */0;\n                  continue ;\n              \n            }\n            break;\n        case /* HexEscape */2 :\n            if (d === 1) {\n              var c0 = s.charCodeAt(i - 1 | 0);\n              var c1 = s.charCodeAt(i);\n              var c$1 = (hexValue(c0) << 4) + hexValue(c1) | 0;\n              if (c$1 < 0 || c$1 > 255) {\n                return false;\n              }\n              $$Buffer.add_char(b, c$1);\n              _d = 0;\n              _i = i + 1 | 0;\n              _state = /* Start */0;\n              continue ;\n            }\n            _d = d + 1 | 0;\n            _i = i + 1 | 0;\n            _state = /* HexEscape */2;\n            continue ;\n        case /* DecimalEscape */3 :\n            if (d === 2) {\n              var c0$1 = s.charCodeAt(i - 2 | 0);\n              var c1$1 = s.charCodeAt(i - 1 | 0);\n              var c2 = s.charCodeAt(i);\n              var c$2 = (Math.imul(100, c0$1 - 48 | 0) + Math.imul(10, c1$1 - 48 | 0) | 0) + (c2 - 48 | 0) | 0;\n              if (c$2 < 0 || c$2 > 255) {\n                return false;\n              }\n              $$Buffer.add_char(b, c$2);\n              _d = 0;\n              _i = i + 1 | 0;\n              _state = /* Start */0;\n              continue ;\n            }\n            _d = d + 1 | 0;\n            _i = i + 1 | 0;\n            _state = /* DecimalEscape */3;\n            continue ;\n        case /* OctalEscape */4 :\n            if (d === 2) {\n              var c0$2 = s.charCodeAt(i - 2 | 0);\n              var c1$2 = s.charCodeAt(i - 1 | 0);\n              var c2$1 = s.charCodeAt(i);\n              var c$3 = (((c0$2 - 48 | 0) << 6) + ((c1$2 - 48 | 0) << 3) | 0) + (c2$1 - 48 | 0) | 0;\n              if (c$3 < 0 || c$3 > 255) {\n                return false;\n              }\n              $$Buffer.add_char(b, c$3);\n              _d = 0;\n              _i = i + 1 | 0;\n              _state = /* Start */0;\n              continue ;\n            }\n            _d = d + 1 | 0;\n            _i = i + 1 | 0;\n            _state = /* OctalEscape */4;\n            continue ;\n        case /* UnicodeEscape */5 :\n            if (d === 3) {\n              var c0$3 = s.charCodeAt(i - 3 | 0);\n              var c1$3 = s.charCodeAt(i - 2 | 0);\n              var c2$2 = s.charCodeAt(i - 1 | 0);\n              var c3 = s.charCodeAt(i);\n              var c$4 = (((hexValue(c0$3) << 12) + (hexValue(c1$3) << 8) | 0) + (hexValue(c2$2) << 4) | 0) + hexValue(c3) | 0;\n              if (!Res_utf8.isValidCodePoint(c$4)) {\n                return false;\n              }\n              var codePoint = Res_utf8.encodeCodePoint(c$4);\n              $$Buffer.add_string(b, codePoint);\n              _d = 0;\n              _i = i + 1 | 0;\n              _state = /* Start */0;\n              continue ;\n            }\n            _d = d + 1 | 0;\n            _i = i + 1 | 0;\n            _state = /* UnicodeEscape */5;\n            continue ;\n        case /* UnicodeCodePointEscape */6 :\n            if (c >= 71) {\n              if (c >= 103) {\n                if (c !== 125) {\n                  return false;\n                }\n                var x = 0;\n                for(var remaining = d; remaining >= 1; --remaining){\n                  var ix = i - remaining | 0;\n                  x = (x << 4) + hexValue(s.charCodeAt(ix)) | 0;\n                }\n                var c$5 = x;\n                if (!Res_utf8.isValidCodePoint(c$5)) {\n                  return false;\n                }\n                var codePoint$1 = Res_utf8.encodeCodePoint(x);\n                $$Buffer.add_string(b, codePoint$1);\n                _d = 0;\n                _i = i + 1 | 0;\n                _state = /* Start */0;\n                continue ;\n              }\n              if (c < 97) {\n                return false;\n              }\n              \n            } else if (c >= 58) {\n              if (c < 65) {\n                return false;\n              }\n              \n            } else if (c < 48) {\n              return false;\n            }\n            _d = d + 1 | 0;\n            _i = i + 1 | 0;\n            _state = /* UnicodeCodePointEscape */6;\n            continue ;\n        case /* UnicodeEscapeStart */7 :\n            if (c !== 123) {\n              _d = 1;\n              _i = i + 1 | 0;\n              _state = /* UnicodeEscape */5;\n              continue ;\n            }\n            _d = 0;\n            _i = i + 1 | 0;\n            _state = /* UnicodeCodePointEscape */6;\n            continue ;\n        case /* EscapedLineBreak */8 :\n            if (c !== 9) {\n              if (c !== 32) {\n                $$Buffer.add_char(b, c);\n                _i = i + 1 | 0;\n                _state = /* Start */0;\n                continue ;\n              }\n              _i = i + 1 | 0;\n              _state = /* EscapedLineBreak */8;\n              continue ;\n            }\n            _i = i + 1 | 0;\n            _state = /* EscapedLineBreak */8;\n            continue ;\n        \n      }\n    };\n  };\n  if (parse(/* Start */0, 0, 0)) {\n    return $$Buffer.contents(b);\n  } else {\n    return s;\n  }\n}\n\nfunction parseLident(p) {\n  while(true) {\n    var recoverLident = function (p) {\n      if (Res_token.isKeyword(p.token) && p.prevEndPos.pos_lnum === p.startPos.pos_lnum) {\n        Res_parser.err(undefined, undefined, p, Res_diagnostics.lident(p.token));\n        Res_parser.next(undefined, p);\n        return ;\n      }\n      var loop = function (p) {\n        while(true) {\n          if (shouldAbortListParse(p)) {\n            return ;\n          }\n          Res_parser.next(undefined, p);\n          continue ;\n        };\n      };\n      Res_parser.err(undefined, undefined, p, Res_diagnostics.lident(p.token));\n      Res_parser.next(undefined, p);\n      loop(p);\n      var match = p.token;\n      if (typeof match === \"number\" || match.TAG !== /* Lident */4) {\n        return ;\n      } else {\n        return Caml_option.some(undefined);\n      }\n    };\n    var startPos = p.startPos;\n    var ident = p.token;\n    if (typeof ident !== \"number\" && ident.TAG === /* Lident */4) {\n      Res_parser.next(undefined, p);\n      var loc_loc_end = p.prevEndPos;\n      var loc = {\n        loc_start: startPos,\n        loc_end: loc_loc_end,\n        loc_ghost: false\n      };\n      return [\n              ident._0,\n              loc\n            ];\n    }\n    var match = recoverLident(p);\n    if (match === undefined) {\n      return [\n              \"_\",\n              {\n                loc_start: startPos,\n                loc_end: p.prevEndPos,\n                loc_ghost: false\n              }\n            ];\n    }\n    continue ;\n  };\n}\n\nfunction parseIdent(msg, startPos, p) {\n  var token = p.token;\n  var exit = 0;\n  if (typeof token === \"number\") {\n    exit = 2;\n  } else {\n    switch (token.TAG | 0) {\n      case /* Lident */4 :\n      case /* Uident */5 :\n          exit = 1;\n          break;\n      default:\n        exit = 2;\n    }\n  }\n  switch (exit) {\n    case 1 :\n        Res_parser.next(undefined, p);\n        var loc_loc_end = p.prevEndPos;\n        var loc = {\n          loc_start: startPos,\n          loc_end: loc_loc_end,\n          loc_ghost: false\n        };\n        return [\n                token._0,\n                loc\n              ];\n    case 2 :\n        if (Res_token.isKeyword(token) && p.prevEndPos.pos_lnum === p.startPos.pos_lnum) {\n          var tokenTxt = Res_token.toString(token);\n          var msg$1 = \"`\" + (tokenTxt + (\"` is a reserved keyword. Keywords need to be escaped: \\\\\\\"\" + (tokenTxt + \"\\\"\")));\n          Res_parser.err(startPos, undefined, p, Res_diagnostics.message(msg$1));\n          Res_parser.next(undefined, p);\n          return [\n                  tokenTxt,\n                  {\n                    loc_start: startPos,\n                    loc_end: p.prevEndPos,\n                    loc_ghost: false\n                  }\n                ];\n        }\n        Res_parser.err(startPos, undefined, p, Res_diagnostics.message(msg));\n        Res_parser.next(undefined, p);\n        return [\n                \"\",\n                {\n                  loc_start: startPos,\n                  loc_end: p.prevEndPos,\n                  loc_ghost: false\n                }\n              ];\n    \n  }\n}\n\nfunction parseHashIdent(startPos, p) {\n  Res_parser.expect(undefined, /* Hash */44, p);\n  var text = p.token;\n  if (typeof text === \"number\") {\n    return parseIdent(variantIdent, startPos, p);\n  }\n  switch (text.TAG | 0) {\n    case /* Int */1 :\n        var i = text.i;\n        if (text.suffix !== undefined) {\n          Res_parser.err(undefined, undefined, p, Res_diagnostics.message(polyVarIntWithSuffix(i)));\n        }\n        Res_parser.next(undefined, p);\n        return [\n                i,\n                {\n                  loc_start: startPos,\n                  loc_end: p.prevEndPos,\n                  loc_ghost: false\n                }\n              ];\n    case /* String */3 :\n        var text$1 = text._0;\n        var text$2 = p.mode === /* ParseForTypeChecker */0 ? parseStringLiteral(text$1) : text$1;\n        Res_parser.next(undefined, p);\n        return [\n                text$2,\n                {\n                  loc_start: startPos,\n                  loc_end: p.prevEndPos,\n                  loc_ghost: false\n                }\n              ];\n    default:\n      return parseIdent(variantIdent, startPos, p);\n  }\n}\n\nfunction parseValuePath(p) {\n  var startPos = p.startPos;\n  var aux = function (p, _path) {\n    while(true) {\n      var path = _path;\n      var ident = p.token;\n      if (typeof ident !== \"number\") {\n        switch (ident.TAG | 0) {\n          case /* Lident */4 :\n              return {\n                      TAG: /* Ldot */1,\n                      _0: path,\n                      _1: ident._0\n                    };\n          case /* Uident */5 :\n              Res_parser.next(undefined, p);\n              if (p.token === /* Dot */4) {\n                Res_parser.expect(undefined, /* Dot */4, p);\n                _path = {\n                  TAG: /* Ldot */1,\n                  _0: path,\n                  _1: ident._0\n                };\n                continue ;\n              }\n              Res_parser.err(undefined, undefined, p, Res_diagnostics.unexpected(p.token, p.breadcrumbs));\n              return path;\n          default:\n            \n        }\n      }\n      Res_parser.err(undefined, undefined, p, Res_diagnostics.unexpected(ident, p.breadcrumbs));\n      return {\n              TAG: /* Ldot */1,\n              _0: path,\n              _1: \"_\"\n            };\n    };\n  };\n  var ident = p.token;\n  var ident$1;\n  var exit = 0;\n  if (typeof ident === \"number\") {\n    exit = 1;\n  } else {\n    switch (ident.TAG | 0) {\n      case /* Lident */4 :\n          ident$1 = {\n            TAG: /* Lident */0,\n            _0: ident._0\n          };\n          break;\n      case /* Uident */5 :\n          var ident$2 = ident._0;\n          Res_parser.next(undefined, p);\n          if (p.token === /* Dot */4) {\n            Res_parser.expect(undefined, /* Dot */4, p);\n            ident$1 = aux(p, {\n                  TAG: /* Lident */0,\n                  _0: ident$2\n                });\n          } else {\n            Res_parser.err(undefined, undefined, p, Res_diagnostics.unexpected(p.token, p.breadcrumbs));\n            ident$1 = {\n              TAG: /* Lident */0,\n              _0: ident$2\n            };\n          }\n          break;\n      default:\n        exit = 1;\n    }\n  }\n  if (exit === 1) {\n    Res_parser.err(undefined, undefined, p, Res_diagnostics.unexpected(ident, p.breadcrumbs));\n    ident$1 = {\n      TAG: /* Lident */0,\n      _0: \"_\"\n    };\n  }\n  Res_parser.next(undefined, p);\n  return $$Location.mkloc(ident$1, {\n              loc_start: startPos,\n              loc_end: p.prevEndPos,\n              loc_ghost: false\n            });\n}\n\nfunction parseValuePathAfterDot(p) {\n  var startPos = p.startPos;\n  var token = p.token;\n  if (typeof token !== \"number\") {\n    switch (token.TAG | 0) {\n      case /* Lident */4 :\n      case /* Uident */5 :\n          return parseValuePath(p);\n      default:\n        \n    }\n  }\n  Res_parser.err(undefined, undefined, p, Res_diagnostics.unexpected(token, p.breadcrumbs));\n  return $$Location.mkloc({\n              TAG: /* Lident */0,\n              _0: \"_\"\n            }, {\n              loc_start: startPos,\n              loc_end: p.prevEndPos,\n              loc_ghost: false\n            });\n}\n\nfunction parseValuePathTail(p, startPos, ident) {\n  var _path = ident;\n  while(true) {\n    var path = _path;\n    var ident$1 = p.token;\n    if (typeof ident$1 !== \"number\") {\n      switch (ident$1.TAG | 0) {\n        case /* Lident */4 :\n            Res_parser.next(undefined, p);\n            return $$Location.mkloc({\n                        TAG: /* Ldot */1,\n                        _0: path,\n                        _1: ident$1._0\n                      }, {\n                        loc_start: startPos,\n                        loc_end: p.prevEndPos,\n                        loc_ghost: false\n                      });\n        case /* Uident */5 :\n            Res_parser.next(undefined, p);\n            Res_parser.expect(undefined, /* Dot */4, p);\n            _path = {\n              TAG: /* Ldot */1,\n              _0: path,\n              _1: ident$1._0\n            };\n            continue ;\n        default:\n          \n      }\n    }\n    Res_parser.err(undefined, undefined, p, Res_diagnostics.unexpected(ident$1, p.breadcrumbs));\n    return $$Location.mkloc({\n                TAG: /* Ldot */1,\n                _0: path,\n                _1: \"_\"\n              }, {\n                loc_start: startPos,\n                loc_end: p.prevEndPos,\n                loc_ghost: false\n              });\n  };\n}\n\nfunction parseModuleLongIdentTail(lowercase, p, startPos, ident) {\n  var _acc = ident;\n  while(true) {\n    var acc = _acc;\n    var ident$1 = p.token;\n    if (typeof ident$1 !== \"number\") {\n      switch (ident$1.TAG | 0) {\n        case /* Lident */4 :\n            if (lowercase) {\n              Res_parser.next(undefined, p);\n              var lident_1 = ident$1._0;\n              var lident = {\n                TAG: /* Ldot */1,\n                _0: acc,\n                _1: lident_1\n              };\n              return $$Location.mkloc(lident, {\n                          loc_start: startPos,\n                          loc_end: p.prevEndPos,\n                          loc_ghost: false\n                        });\n            }\n            break;\n        case /* Uident */5 :\n            Res_parser.next(undefined, p);\n            var endPos = p.prevEndPos;\n            var lident_1$1 = ident$1._0;\n            var lident$1 = {\n              TAG: /* Ldot */1,\n              _0: acc,\n              _1: lident_1$1\n            };\n            var match = p.token;\n            if (match !== 4) {\n              return $$Location.mkloc(lident$1, {\n                          loc_start: startPos,\n                          loc_end: endPos,\n                          loc_ghost: false\n                        });\n            }\n            Res_parser.next(undefined, p);\n            _acc = lident$1;\n            continue ;\n        default:\n          \n      }\n    }\n    Res_parser.err(undefined, undefined, p, Res_diagnostics.uident(ident$1));\n    return $$Location.mkloc({\n                TAG: /* Ldot */1,\n                _0: acc,\n                _1: \"_\"\n              }, {\n                loc_start: startPos,\n                loc_end: p.prevEndPos,\n                loc_ghost: false\n              });\n  };\n}\n\nfunction parseModuleLongIdent(lowercase, p) {\n  var startPos = p.startPos;\n  var ident = p.token;\n  if (typeof ident !== \"number\") {\n    switch (ident.TAG | 0) {\n      case /* Lident */4 :\n          if (lowercase) {\n            var loc_loc_end = p.endPos;\n            var loc = {\n              loc_start: startPos,\n              loc_end: loc_loc_end,\n              loc_ghost: false\n            };\n            var lident = {\n              TAG: /* Lident */0,\n              _0: ident._0\n            };\n            Res_parser.next(undefined, p);\n            return $$Location.mkloc(lident, loc);\n          }\n          break;\n      case /* Uident */5 :\n          var lident$1 = {\n            TAG: /* Lident */0,\n            _0: ident._0\n          };\n          var endPos = p.endPos;\n          Res_parser.next(undefined, p);\n          var match = p.token;\n          if (match === 4) {\n            Res_parser.next(undefined, p);\n            return parseModuleLongIdentTail(lowercase, p, startPos, lident$1);\n          } else {\n            return $$Location.mkloc(lident$1, {\n                        loc_start: startPos,\n                        loc_end: endPos,\n                        loc_ghost: false\n                      });\n          }\n      default:\n        \n    }\n  }\n  Res_parser.err(undefined, undefined, p, Res_diagnostics.uident(ident));\n  return $$Location.mkloc({\n              TAG: /* Lident */0,\n              _0: \"_\"\n            }, {\n              loc_start: startPos,\n              loc_end: p.prevEndPos,\n              loc_ghost: false\n            });\n}\n\nfunction parseIdentPath(p) {\n  var match = p.token;\n  if (typeof match === \"number\") {\n    return {\n            TAG: /* Lident */0,\n            _0: \"_\"\n          };\n  }\n  switch (match.TAG | 0) {\n    case /* Lident */4 :\n    case /* Uident */5 :\n        break;\n    default:\n      return {\n              TAG: /* Lident */0,\n              _0: \"_\"\n            };\n  }\n  var ident = match._0;\n  Res_parser.next(undefined, p);\n  var match$1 = p.token;\n  if (match$1 === 4) {\n    Res_parser.next(undefined, p);\n    var _acc = {\n      TAG: /* Lident */0,\n      _0: ident\n    };\n    while(true) {\n      var acc = _acc;\n      var _t = p.token;\n      if (typeof _t === \"number\") {\n        return acc;\n      }\n      switch (_t.TAG | 0) {\n        case /* Lident */4 :\n        case /* Uident */5 :\n            break;\n        default:\n          return acc;\n      }\n      Res_parser.next(undefined, p);\n      var lident_1 = _t._0;\n      var lident = {\n        TAG: /* Ldot */1,\n        _0: acc,\n        _1: lident_1\n      };\n      var match$2 = p.token;\n      if (match$2 !== 4) {\n        return lident;\n      }\n      Res_parser.next(undefined, p);\n      _acc = lident;\n      continue ;\n    };\n  } else {\n    return {\n            TAG: /* Lident */0,\n            _0: ident\n          };\n  }\n}\n\nfunction verifyJsxOpeningClosingName(p, nameExpr) {\n  var lident = p.token;\n  var closing;\n  if (typeof lident === \"number\") {\n    closing = {\n      TAG: /* Lident */0,\n      _0: \"\"\n    };\n  } else {\n    switch (lident.TAG | 0) {\n      case /* Lident */4 :\n          Res_parser.next(undefined, p);\n          closing = {\n            TAG: /* Lident */0,\n            _0: lident._0\n          };\n          break;\n      case /* Uident */5 :\n          closing = parseModuleLongIdent(true, p).txt;\n          break;\n      default:\n        closing = {\n          TAG: /* Lident */0,\n          _0: \"\"\n        };\n    }\n  }\n  var openingIdent = nameExpr.pexp_desc;\n  if (typeof openingIdent === \"number\") {\n    throw {\n          RE_EXN_ID: \"Assert_failure\",\n          _1: [\n            \"res_core.res\",\n            975,\n            9\n          ],\n          Error: new Error()\n        };\n  }\n  if (openingIdent.TAG === /* Pexp_ident */0) {\n    var withoutCreateElement = List.filter(function (s) {\n            return s !== \"createElement\";\n          })(Longident.flatten(openingIdent._0.txt));\n    var li = Longident.unflatten(withoutCreateElement);\n    var opening = li !== undefined ? li : ({\n          TAG: /* Lident */0,\n          _0: \"\"\n        });\n    return Caml_obj.caml_equal(opening, closing);\n  }\n  throw {\n        RE_EXN_ID: \"Assert_failure\",\n        _1: [\n          \"res_core.res\",\n          975,\n          9\n        ],\n        Error: new Error()\n      };\n}\n\nfunction string_of_pexp_ident(nameExpr) {\n  var openingIdent = nameExpr.pexp_desc;\n  if (typeof openingIdent === \"number\" || openingIdent.TAG !== /* Pexp_ident */0) {\n    return \"\";\n  } else {\n    return $$String.concat(\".\", List.filter(function (s) {\n                      return s !== \"createElement\";\n                    })(Longident.flatten(openingIdent._0.txt)));\n  }\n}\n\nfunction parseOpenDescription(attrs, p) {\n  Res_parser.leaveBreadcrumb(p, /* OpenDescription */0);\n  var startPos = p.startPos;\n  Res_parser.expect(undefined, /* Open */0, p);\n  var override = Res_parser.optional(p, /* Bang */7) ? /* Override */0 : /* Fresh */1;\n  var modident = parseModuleLongIdent(false, p);\n  var loc_loc_end = p.prevEndPos;\n  var loc = {\n    loc_start: startPos,\n    loc_end: loc_loc_end,\n    loc_ghost: false\n  };\n  Res_parser.eatBreadcrumb(p);\n  return Ast_helper.Opn.mk(loc, attrs, undefined, override, modident);\n}\n\nfunction parseTemplateStringLiteral(s) {\n  var len = s.length;\n  var b = $$Buffer.create(len);\n  var loop = function (_i) {\n    while(true) {\n      var i = _i;\n      if (i >= len) {\n        return ;\n      }\n      var c = s.charCodeAt(i);\n      if (c !== 92) {\n        $$Buffer.add_char(b, c);\n        _i = i + 1 | 0;\n        continue ;\n      }\n      if ((i + 1 | 0) >= len) {\n        return $$Buffer.add_char(b, c);\n      }\n      var nextChar = s.charCodeAt(i + 1 | 0);\n      var exit = 0;\n      if (nextChar >= 36) {\n        exit = nextChar > 95 || nextChar < 37 ? (\n            nextChar >= 97 ? 2 : 1\n          ) : (\n            nextChar !== 92 ? 2 : 1\n          );\n      } else if (nextChar !== 10) {\n        if (nextChar !== 13) {\n          exit = 2;\n        } else {\n          _i = i + 2 | 0;\n          continue ;\n        }\n      } else {\n        _i = i + 2 | 0;\n        continue ;\n      }\n      switch (exit) {\n        case 1 :\n            $$Buffer.add_char(b, nextChar);\n            _i = i + 2 | 0;\n            continue ;\n        case 2 :\n            $$Buffer.add_char(b, /* '\\\\' */92);\n            $$Buffer.add_char(b, nextChar);\n            _i = i + 2 | 0;\n            continue ;\n        \n      }\n    };\n  };\n  loop(0);\n  return $$Buffer.contents(b);\n}\n\nfunction parseConstant(p) {\n  var match = p.token;\n  var isNegative;\n  if (typeof match === \"number\") {\n    switch (match) {\n      case /* Minus */34 :\n          Res_parser.next(undefined, p);\n          isNegative = true;\n          break;\n      case /* MinusDot */35 :\n          isNegative = false;\n          break;\n      case /* Plus */36 :\n          Res_parser.next(undefined, p);\n          isNegative = false;\n          break;\n      default:\n        isNegative = false;\n    }\n  } else {\n    isNegative = false;\n  }\n  var s = p.token;\n  var constant;\n  var exit = 0;\n  if (typeof s === \"number\") {\n    exit = 1;\n  } else {\n    switch (s.TAG | 0) {\n      case /* Codepoint */0 :\n          constant = p.mode === /* ParseForTypeChecker */0 ? ({\n                TAG: /* Pconst_char */1,\n                _0: s.c\n              }) : ({\n                TAG: /* Pconst_string */2,\n                _0: s.original,\n                _1: \"INTERNAL_RES_CHAR_CONTENTS\"\n              });\n          break;\n      case /* Int */1 :\n          var i = s.i;\n          var intTxt = isNegative ? \"-\" + i : i;\n          constant = {\n            TAG: /* Pconst_integer */0,\n            _0: intTxt,\n            _1: s.suffix\n          };\n          break;\n      case /* Float */2 :\n          var f = s.f;\n          var floatTxt = isNegative ? \"-\" + f : f;\n          constant = {\n            TAG: /* Pconst_float */3,\n            _0: floatTxt,\n            _1: s.suffix\n          };\n          break;\n      case /* String */3 :\n          var s$1 = s._0;\n          constant = p.mode === /* ParseForTypeChecker */0 ? ({\n                TAG: /* Pconst_string */2,\n                _0: s$1,\n                _1: \"js\"\n              }) : ({\n                TAG: /* Pconst_string */2,\n                _0: s$1,\n                _1: undefined\n              });\n          break;\n      default:\n        exit = 1;\n    }\n  }\n  if (exit === 1) {\n    Res_parser.err(undefined, undefined, p, Res_diagnostics.unexpected(s, p.breadcrumbs));\n    constant = {\n      TAG: /* Pconst_string */2,\n      _0: \"\",\n      _1: undefined\n    };\n  }\n  Res_parser.next(undefined, p);\n  return constant;\n}\n\nfunction parseTemplateConstant(prefix, p) {\n  var startPos = p.startPos;\n  Res_parser.nextTemplateLiteralToken(p);\n  var txt = p.token;\n  if (typeof txt !== \"number\" && txt.TAG === /* TemplateTail */7) {\n    var txt$1 = txt._0;\n    Res_parser.next(undefined, p);\n    var txt$2 = p.mode === /* ParseForTypeChecker */0 ? parseTemplateStringLiteral(txt$1) : txt$1;\n    return {\n            TAG: /* Pconst_string */2,\n            _0: txt$2,\n            _1: prefix\n          };\n  }\n  var skipTokens = function (_param) {\n    while(true) {\n      Res_parser.next(undefined, p);\n      var match = p.token;\n      if (match === 80) {\n        Res_parser.next(undefined, p);\n        return ;\n      }\n      _param = undefined;\n      continue ;\n    };\n  };\n  skipTokens(undefined);\n  Res_parser.err(startPos, p.prevEndPos, p, Res_diagnostics.message(stringInterpolationInPattern));\n  return {\n          TAG: /* Pconst_string */2,\n          _0: \"\",\n          _1: undefined\n        };\n}\n\nfunction parseCommaDelimitedRegion(p, grammar, closing, f) {\n  Res_parser.leaveBreadcrumb(p, grammar);\n  var loop = function (_nodes) {\n    while(true) {\n      var nodes = _nodes;\n      var node = Curry._1(f, p);\n      if (node !== undefined) {\n        var node$1 = Caml_option.valFromOption(node);\n        var token = p.token;\n        if (token === 25) {\n          Res_parser.next(undefined, p);\n          _nodes = {\n            hd: node$1,\n            tl: nodes\n          };\n          continue ;\n        }\n        if (Caml_obj.caml_equal(token, closing) || token === /* Eof */26) {\n          return List.rev({\n                      hd: node$1,\n                      tl: nodes\n                    });\n        }\n        if (Res_grammar.isListElement(grammar, p.token)) {\n          Res_parser.expect(undefined, /* Comma */25, p);\n          _nodes = {\n            hd: node$1,\n            tl: nodes\n          };\n          continue ;\n        }\n        if (!(p.token === /* Eof */26 || Caml_obj.caml_equal(p.token, closing) || shouldAbortListParse(p))) {\n          Res_parser.expect(undefined, /* Comma */25, p);\n        }\n        if (p.token === /* Semicolon */8) {\n          Res_parser.next(undefined, p);\n        }\n        _nodes = {\n          hd: node$1,\n          tl: nodes\n        };\n        continue ;\n      }\n      if (p.token === /* Eof */26 || Caml_obj.caml_equal(p.token, closing) || shouldAbortListParse(p)) {\n        return List.rev(nodes);\n      }\n      Res_parser.err(undefined, undefined, p, Res_diagnostics.unexpected(p.token, p.breadcrumbs));\n      Res_parser.next(undefined, p);\n      continue ;\n    };\n  };\n  var nodes = loop(/* [] */0);\n  Res_parser.eatBreadcrumb(p);\n  return nodes;\n}\n\nfunction parseCommaDelimitedReversedList(p, grammar, closing, f) {\n  Res_parser.leaveBreadcrumb(p, grammar);\n  var loop = function (_nodes) {\n    while(true) {\n      var nodes = _nodes;\n      var node = Curry._1(f, p);\n      if (node !== undefined) {\n        var node$1 = Caml_option.valFromOption(node);\n        var token = p.token;\n        if (token === 25) {\n          Res_parser.next(undefined, p);\n          _nodes = {\n            hd: node$1,\n            tl: nodes\n          };\n          continue ;\n        }\n        if (Caml_obj.caml_equal(token, closing) || token === /* Eof */26) {\n          return {\n                  hd: node$1,\n                  tl: nodes\n                };\n        }\n        if (Res_grammar.isListElement(grammar, p.token)) {\n          Res_parser.expect(undefined, /* Comma */25, p);\n          _nodes = {\n            hd: node$1,\n            tl: nodes\n          };\n          continue ;\n        }\n        if (!(p.token === /* Eof */26 || Caml_obj.caml_equal(p.token, closing) || shouldAbortListParse(p))) {\n          Res_parser.expect(undefined, /* Comma */25, p);\n        }\n        if (p.token === /* Semicolon */8) {\n          Res_parser.next(undefined, p);\n        }\n        _nodes = {\n          hd: node$1,\n          tl: nodes\n        };\n        continue ;\n      }\n      if (p.token === /* Eof */26 || Caml_obj.caml_equal(p.token, closing) || shouldAbortListParse(p)) {\n        return nodes;\n      }\n      Res_parser.err(undefined, undefined, p, Res_diagnostics.unexpected(p.token, p.breadcrumbs));\n      Res_parser.next(undefined, p);\n      continue ;\n    };\n  };\n  var nodes = loop(/* [] */0);\n  Res_parser.eatBreadcrumb(p);\n  return nodes;\n}\n\nfunction parseDelimitedRegion(p, grammar, closing, f) {\n  Res_parser.leaveBreadcrumb(p, grammar);\n  var loop = function (_nodes) {\n    while(true) {\n      var nodes = _nodes;\n      var node = Curry._1(f, p);\n      if (node !== undefined) {\n        _nodes = {\n          hd: Caml_option.valFromOption(node),\n          tl: nodes\n        };\n        continue ;\n      }\n      if (p.token === /* Eof */26 || Caml_obj.caml_equal(p.token, closing) || shouldAbortListParse(p)) {\n        return List.rev(nodes);\n      }\n      Res_parser.err(undefined, undefined, p, Res_diagnostics.unexpected(p.token, p.breadcrumbs));\n      Res_parser.next(undefined, p);\n      continue ;\n    };\n  };\n  var nodes = loop(/* [] */0);\n  Res_parser.eatBreadcrumb(p);\n  return nodes;\n}\n\nfunction parseRegion(p, grammar, f) {\n  Res_parser.leaveBreadcrumb(p, grammar);\n  var loop = function (_nodes) {\n    while(true) {\n      var nodes = _nodes;\n      var node = Curry._1(f, p);\n      if (node !== undefined) {\n        _nodes = {\n          hd: Caml_option.valFromOption(node),\n          tl: nodes\n        };\n        continue ;\n      }\n      if (p.token === /* Eof */26 || shouldAbortListParse(p)) {\n        return List.rev(nodes);\n      }\n      Res_parser.err(undefined, undefined, p, Res_diagnostics.unexpected(p.token, p.breadcrumbs));\n      Res_parser.next(undefined, p);\n      continue ;\n    };\n  };\n  var nodes = loop(/* [] */0);\n  Res_parser.eatBreadcrumb(p);\n  return nodes;\n}\n\nfunction parsePattern(aliasOpt, or_Opt, p) {\n  var alias = aliasOpt !== undefined ? aliasOpt : true;\n  var or_ = or_Opt !== undefined ? or_Opt : true;\n  var startPos = p.startPos;\n  var attrs = parseRegion(p, /* Attribute */50, parseAttribute);\n  var token = p.token;\n  var pat;\n  var exit = 0;\n  if (typeof token === \"number\") {\n    switch (token) {\n      case /* True */1 :\n      case /* False */2 :\n          exit = 2;\n          break;\n      case /* Underscore */12 :\n          var endPos = p.endPos;\n          var loc = {\n            loc_start: startPos,\n            loc_end: endPos,\n            loc_ghost: false\n          };\n          Res_parser.next(undefined, p);\n          pat = Ast_helper.Pat.any(loc, attrs, undefined);\n          break;\n      case /* Lparen */18 :\n          Res_parser.next(undefined, p);\n          var match = p.token;\n          if (match === 19) {\n            Res_parser.next(undefined, p);\n            var loc_loc_end = p.prevEndPos;\n            var loc$1 = {\n              loc_start: startPos,\n              loc_end: loc_loc_end,\n              loc_ghost: false\n            };\n            var lid = $$Location.mkloc({\n                  TAG: /* Lident */0,\n                  _0: \"()\"\n                }, loc$1);\n            pat = Ast_helper.Pat.construct(loc$1, undefined, lid, undefined);\n          } else {\n            var pat$1 = parseConstrainedPattern(p);\n            var match$1 = p.token;\n            if (match$1 === 25) {\n              Res_parser.next(undefined, p);\n              pat = parseTuplePattern(attrs, pat$1, startPos, p);\n            } else {\n              Res_parser.expect(undefined, /* Rparen */19, p);\n              var loc_loc_end$1 = p.prevEndPos;\n              var loc$2 = {\n                loc_start: startPos,\n                loc_end: loc_loc_end$1,\n                loc_ghost: false\n              };\n              pat = {\n                ppat_desc: pat$1.ppat_desc,\n                ppat_loc: loc$2,\n                ppat_attributes: pat$1.ppat_attributes\n              };\n            }\n          }\n          break;\n      case /* Lbracket */20 :\n          pat = parseArrayPattern(attrs, p);\n          break;\n      case /* Lbrace */22 :\n          pat = parseRecordPattern(attrs, p);\n          break;\n      case /* Exception */27 :\n          Res_parser.next(undefined, p);\n          var pat$2 = parsePattern(false, false, p);\n          var loc_loc_end$2 = p.prevEndPos;\n          var loc$3 = {\n            loc_start: startPos,\n            loc_end: loc_loc_end$2,\n            loc_ghost: false\n          };\n          pat = Ast_helper.Pat.exception_(loc$3, attrs, pat$2);\n          break;\n      case /* Minus */34 :\n      case /* Plus */36 :\n          exit = 3;\n          break;\n      case /* Hash */44 :\n          Res_parser.next(undefined, p);\n          if (p.token === /* DotDotDot */6) {\n            Res_parser.next(undefined, p);\n            var ident = parseValuePath(p);\n            var loc_loc_end$3 = ident.loc.loc_end;\n            var loc$4 = {\n              loc_start: startPos,\n              loc_end: loc_loc_end$3,\n              loc_ghost: false\n            };\n            pat = Ast_helper.Pat.type_(loc$4, attrs, ident);\n          } else {\n            var text = p.token;\n            var match$2;\n            if (typeof text === \"number\") {\n              match$2 = parseIdent(variantIdent, startPos, p);\n            } else {\n              switch (text.TAG | 0) {\n                case /* Int */1 :\n                    var i = text.i;\n                    if (text.suffix !== undefined) {\n                      Res_parser.err(undefined, undefined, p, Res_diagnostics.message(polyVarIntWithSuffix(i)));\n                    }\n                    Res_parser.next(undefined, p);\n                    match$2 = [\n                      i,\n                      {\n                        loc_start: startPos,\n                        loc_end: p.prevEndPos,\n                        loc_ghost: false\n                      }\n                    ];\n                    break;\n                case /* String */3 :\n                    var text$1 = text._0;\n                    var text$2 = p.mode === /* ParseForTypeChecker */0 ? parseStringLiteral(text$1) : text$1;\n                    Res_parser.next(undefined, p);\n                    match$2 = [\n                      text$2,\n                      {\n                        loc_start: startPos,\n                        loc_end: p.prevEndPos,\n                        loc_ghost: false\n                      }\n                    ];\n                    break;\n                default:\n                  match$2 = parseIdent(variantIdent, startPos, p);\n              }\n            }\n            var ident$1 = match$2[0];\n            var match$3 = p.token;\n            pat = match$3 === 18 ? parseVariantPatternArgs(p, ident$1, startPos, attrs) : Ast_helper.Pat.variant(match$2[1], attrs, ident$1, undefined);\n          }\n          break;\n      case /* Lazy */47 :\n          Res_parser.next(undefined, p);\n          var pat$3 = parsePattern(false, false, p);\n          var loc_loc_end$4 = p.prevEndPos;\n          var loc$5 = {\n            loc_start: startPos,\n            loc_end: loc_loc_end$4,\n            loc_ghost: false\n          };\n          pat = Ast_helper.Pat.lazy_(loc$5, attrs, pat$3);\n          break;\n      case /* Module */65 :\n          pat = parseModulePattern(attrs, p);\n          break;\n      case /* Percent */77 :\n          var extension = parseExtension(undefined, p);\n          var loc_loc_end$5 = p.prevEndPos;\n          var loc$6 = {\n            loc_start: startPos,\n            loc_end: loc_loc_end$5,\n            loc_ghost: false\n          };\n          pat = Ast_helper.Pat.extension(loc$6, attrs, extension);\n          break;\n      case /* List */79 :\n          Res_parser.next(undefined, p);\n          pat = parseListPattern(startPos, attrs, p);\n          break;\n      case /* Backtick */80 :\n          var constant = parseTemplateConstant(\"js\", p);\n          pat = Ast_helper.Pat.constant({\n                loc_start: startPos,\n                loc_end: p.prevEndPos,\n                loc_ghost: false\n              }, {\n                hd: templateLiteralAttr,\n                tl: /* [] */0\n              }, constant);\n          break;\n      default:\n        exit = 1;\n    }\n  } else {\n    switch (token.TAG | 0) {\n      case /* Codepoint */0 :\n      case /* Int */1 :\n      case /* Float */2 :\n      case /* String */3 :\n          exit = 3;\n          break;\n      case /* Lident */4 :\n          var ident$2 = token._0;\n          var endPos$1 = p.endPos;\n          var loc$7 = {\n            loc_start: startPos,\n            loc_end: endPos$1,\n            loc_ghost: false\n          };\n          Res_parser.next(undefined, p);\n          var match$4 = p.token;\n          if (match$4 === 80) {\n            var constant$1 = parseTemplateConstant(ident$2, p);\n            pat = Ast_helper.Pat.constant({\n                  loc_start: startPos,\n                  loc_end: p.prevEndPos,\n                  loc_ghost: false\n                }, undefined, constant$1);\n          } else {\n            pat = Ast_helper.Pat.$$var(loc$7, attrs, $$Location.mkloc(ident$2, loc$7));\n          }\n          break;\n      case /* Uident */5 :\n          var constr = parseModuleLongIdent(false, p);\n          var match$5 = p.token;\n          pat = match$5 === 18 ? parseConstructorPatternArgs(p, constr, startPos, attrs) : Ast_helper.Pat.construct(constr.loc, attrs, constr, undefined);\n          break;\n      default:\n        exit = 1;\n    }\n  }\n  switch (exit) {\n    case 1 :\n        Res_parser.err(undefined, undefined, p, Res_diagnostics.unexpected(token, p.breadcrumbs));\n        var match$6 = skipTokensAndMaybeRetry(p, Res_grammar.isAtomicPatternStart);\n        pat = match$6 !== undefined ? parsePattern(undefined, undefined, p) : defaultPattern(undefined);\n        break;\n    case 2 :\n        var endPos$2 = p.endPos;\n        Res_parser.next(undefined, p);\n        var loc$8 = {\n          loc_start: startPos,\n          loc_end: endPos$2,\n          loc_ghost: false\n        };\n        pat = Ast_helper.Pat.construct(loc$8, undefined, $$Location.mkloc({\n                  TAG: /* Lident */0,\n                  _0: Res_token.toString(token)\n                }, loc$8), undefined);\n        break;\n    case 3 :\n        var c = parseConstant(p);\n        var match$7 = p.token;\n        if (match$7 === 5) {\n          Res_parser.next(undefined, p);\n          var c2 = parseConstant(p);\n          pat = Ast_helper.Pat.interval({\n                loc_start: startPos,\n                loc_end: p.prevEndPos,\n                loc_ghost: false\n              }, undefined, c, c2);\n        } else {\n          pat = Ast_helper.Pat.constant({\n                loc_start: startPos,\n                loc_end: p.prevEndPos,\n                loc_ghost: false\n              }, undefined, c);\n        }\n        break;\n    \n  }\n  var pat$4 = alias ? parseAliasPattern(attrs, pat, p) : pat;\n  if (or_) {\n    return parseOrPattern(pat$4, p);\n  } else {\n    return pat$4;\n  }\n}\n\nfunction parseAttribute(p) {\n  var match = p.token;\n  if (match !== 75) {\n    return ;\n  }\n  var startPos = p.startPos;\n  Res_parser.next(undefined, p);\n  var attrId = parseAttributeId(startPos, p);\n  var payload = parsePayload(p);\n  return [\n          attrId,\n          payload\n        ];\n}\n\nfunction parseModuleType(es6ArrowOpt, with_Opt, p) {\n  var es6Arrow = es6ArrowOpt !== undefined ? es6ArrowOpt : true;\n  var with_ = with_Opt !== undefined ? with_Opt : true;\n  var attrs = parseRegion(p, /* Attribute */50, parseAttribute);\n  var modty;\n  if (es6Arrow && isEs6ArrowFunctor(p)) {\n    modty = parseFunctorModuleType(p);\n  } else {\n    var modty$1 = parseAtomicModuleType(p);\n    var match = p.token;\n    if (match === 57 && es6Arrow === true) {\n      Res_parser.next(undefined, p);\n      var rhs = parseModuleType(undefined, false, p);\n      var str = $$Location.mknoloc(\"_\");\n      var loc_loc_start = modty$1.pmty_loc.loc_start;\n      var loc_loc_end = p.prevEndPos;\n      var loc = {\n        loc_start: loc_loc_start,\n        loc_end: loc_loc_end,\n        loc_ghost: false\n      };\n      modty = Ast_helper.Mty.functor_(loc, undefined, str, modty$1, rhs);\n    } else {\n      modty = modty$1;\n    }\n  }\n  var moduleType_pmty_desc = modty.pmty_desc;\n  var moduleType_pmty_loc = modty.pmty_loc;\n  var moduleType_pmty_attributes = List.concat({\n        hd: modty.pmty_attributes,\n        tl: {\n          hd: attrs,\n          tl: /* [] */0\n        }\n      });\n  var moduleType = {\n    pmty_desc: moduleType_pmty_desc,\n    pmty_loc: moduleType_pmty_loc,\n    pmty_attributes: moduleType_pmty_attributes\n  };\n  if (with_) {\n    return parseWithConstraints(moduleType, p);\n  } else {\n    return moduleType;\n  }\n}\n\nfunction parseModuleBindings(attrs, startPos, p) {\n  var first = parseModuleBinding(attrs, startPos, p);\n  var _acc = {\n    hd: first,\n    tl: /* [] */0\n  };\n  while(true) {\n    var acc = _acc;\n    var startPos$1 = p.startPos;\n    var attrs$1 = parseAttributesAndBinding(p);\n    var match = p.token;\n    if (match !== 10) {\n      return List.rev(acc);\n    }\n    Res_parser.next(undefined, p);\n    Res_parser.optional(p, /* Module */65);\n    var modBinding = parseModuleBinding(attrs$1, startPos$1, p);\n    _acc = {\n      hd: modBinding,\n      tl: acc\n    };\n    continue ;\n  };\n}\n\nfunction parseModuleBinding(attrs, startPos, p) {\n  var ident = p.token;\n  var name;\n  var exit = 0;\n  if (typeof ident === \"number\" || ident.TAG !== /* Uident */5) {\n    exit = 1;\n  } else {\n    var startPos$1 = p.startPos;\n    Res_parser.next(undefined, p);\n    var loc_loc_end = p.prevEndPos;\n    var loc = {\n      loc_start: startPos$1,\n      loc_end: loc_loc_end,\n      loc_ghost: false\n    };\n    name = $$Location.mkloc(ident._0, loc);\n  }\n  if (exit === 1) {\n    Res_parser.err(undefined, undefined, p, Res_diagnostics.uident(ident));\n    name = $$Location.mknoloc(\"_\");\n  }\n  var body = parseModuleBindingBody(p);\n  var loc_loc_end$1 = p.prevEndPos;\n  var loc$1 = {\n    loc_start: startPos,\n    loc_end: loc_loc_end$1,\n    loc_ghost: false\n  };\n  return Ast_helper.Mb.mk(loc$1, attrs, undefined, undefined, name, body);\n}\n\nfunction parseConstrainedPattern(p) {\n  var pat = parsePattern(undefined, undefined, p);\n  var match = p.token;\n  if (match !== 24) {\n    return pat;\n  }\n  Res_parser.next(undefined, p);\n  var typ = parseTypExpr(undefined, undefined, undefined, p);\n  var loc_loc_start = pat.ppat_loc.loc_start;\n  var loc_loc_end = typ.ptyp_loc.loc_end;\n  var loc = {\n    loc_start: loc_loc_start,\n    loc_end: loc_loc_end,\n    loc_ghost: false\n  };\n  return Ast_helper.Pat.constraint_(loc, undefined, pat, typ);\n}\n\nfunction parseLidentList(p) {\n  var _ls = /* [] */0;\n  while(true) {\n    var ls = _ls;\n    var lident = p.token;\n    if (typeof lident === \"number\") {\n      return List.rev(ls);\n    }\n    if (lident.TAG !== /* Lident */4) {\n      return List.rev(ls);\n    }\n    var loc_loc_start = p.startPos;\n    var loc_loc_end = p.endPos;\n    var loc = {\n      loc_start: loc_loc_start,\n      loc_end: loc_loc_end,\n      loc_ghost: false\n    };\n    Res_parser.next(undefined, p);\n    _ls = {\n      hd: $$Location.mkloc(lident._0, loc),\n      tl: ls\n    };\n    continue ;\n  };\n}\n\nfunction parseTypExpr(attrs, es6ArrowOpt, aliasOpt, p) {\n  var es6Arrow = es6ArrowOpt !== undefined ? es6ArrowOpt : true;\n  var alias = aliasOpt !== undefined ? aliasOpt : true;\n  var startPos = p.startPos;\n  var attrs$1 = attrs !== undefined ? attrs : parseRegion(p, /* Attribute */50, parseAttribute);\n  var typ;\n  if (es6Arrow && isEs6ArrowType(p)) {\n    typ = parseEs6ArrowType(attrs$1, p);\n  } else {\n    var typ$1 = parseAtomicTypExpr(attrs$1, p);\n    typ = parseArrowTypeRest(es6Arrow, startPos, typ$1, p);\n  }\n  if (alias) {\n    return parseTypeAlias(p, typ);\n  } else {\n    return typ;\n  }\n}\n\nfunction parseConstrainedOrCoercedExpr(p) {\n  var expr = parseExpr(undefined, p);\n  var match = p.token;\n  if (typeof match !== \"number\") {\n    return expr;\n  }\n  if (match !== 24) {\n    if (match !== 40) {\n      return expr;\n    } else {\n      return parseCoercedExpr(expr, p);\n    }\n  }\n  Res_parser.next(undefined, p);\n  var typ = parseTypExpr(undefined, undefined, undefined, p);\n  var loc_loc_start = expr.pexp_loc.loc_start;\n  var loc_loc_end = typ.ptyp_loc.loc_end;\n  var loc = {\n    loc_start: loc_loc_start,\n    loc_end: loc_loc_end,\n    loc_ghost: false\n  };\n  var expr$1 = Ast_helper.Exp.constraint_(loc, undefined, expr, typ);\n  var match$1 = p.token;\n  if (match$1 === 40) {\n    return parseCoercedExpr(expr$1, p);\n  } else {\n    return expr$1;\n  }\n}\n\nfunction parseNonSpreadPattern(msg, p) {\n  var match = p.token;\n  if (match === 6) {\n    Res_parser.err(undefined, undefined, p, Res_diagnostics.message(msg));\n    Res_parser.next(undefined, p);\n  }\n  var token = p.token;\n  if (!Res_grammar.isPatternStart(token)) {\n    return ;\n  }\n  var pat = parsePattern(undefined, undefined, p);\n  var match$1 = p.token;\n  if (match$1 !== 24) {\n    return pat;\n  }\n  Res_parser.next(undefined, p);\n  var typ = parseTypExpr(undefined, undefined, undefined, p);\n  var loc_loc_start = pat.ppat_loc.loc_start;\n  var loc_loc_end = typ.ptyp_loc.loc_end;\n  var loc = {\n    loc_start: loc_loc_start,\n    loc_end: loc_loc_end,\n    loc_ghost: false\n  };\n  return Ast_helper.Pat.constraint_(loc, undefined, pat, typ);\n}\n\nfunction parseTypeAlias(p, typ) {\n  var match = p.token;\n  if (match !== 3) {\n    return typ;\n  }\n  Res_parser.next(undefined, p);\n  Res_parser.expect(undefined, /* SingleQuote */13, p);\n  var match$1 = parseLident(p);\n  return Ast_helper.Typ.alias({\n              loc_start: typ.ptyp_loc.loc_start,\n              loc_end: p.prevEndPos,\n              loc_ghost: false\n            }, undefined, typ, match$1[0]);\n}\n\nfunction parseFieldDeclarationRegion(p) {\n  var startPos = p.startPos;\n  var attrs = parseRegion(p, /* Attribute */50, parseAttribute);\n  var mut = Res_parser.optional(p, /* Mutable */62) ? /* Mutable */1 : /* Immutable */0;\n  var match = p.token;\n  if (typeof match === \"number\") {\n    return ;\n  }\n  if (match.TAG !== /* Lident */4) {\n    return ;\n  }\n  var match$1 = parseLident(p);\n  var name = $$Location.mkloc(match$1[0], match$1[1]);\n  var match$2 = p.token;\n  var typ = match$2 === 24 ? (Res_parser.next(undefined, p), parsePolyTypeExpr(p)) : Ast_helper.Typ.constr(name.loc, undefined, {\n          txt: {\n            TAG: /* Lident */0,\n            _0: name.txt\n          },\n          loc: name.loc\n        }, /* [] */0);\n  var loc_loc_end = typ.ptyp_loc.loc_end;\n  var loc = {\n    loc_start: startPos,\n    loc_end: loc_loc_end,\n    loc_ghost: false\n  };\n  return Ast_helper.Type.field(loc, attrs, undefined, mut, name, typ);\n}\n\nfunction parseFieldDeclaration(p) {\n  var startPos = p.startPos;\n  var attrs = parseRegion(p, /* Attribute */50, parseAttribute);\n  var mut = Res_parser.optional(p, /* Mutable */62) ? /* Mutable */1 : /* Immutable */0;\n  var match = parseLident(p);\n  var name = $$Location.mkloc(match[0], match[1]);\n  var match$1 = p.token;\n  var typ = match$1 === 24 ? (Res_parser.next(undefined, p), parsePolyTypeExpr(p)) : Ast_helper.Typ.constr(name.loc, undefined, {\n          txt: {\n            TAG: /* Lident */0,\n            _0: name.txt\n          },\n          loc: name.loc\n        }, /* [] */0);\n  var loc_loc_end = typ.ptyp_loc.loc_end;\n  var loc = {\n    loc_start: startPos,\n    loc_end: loc_loc_end,\n    loc_ghost: false\n  };\n  return Ast_helper.Type.field(loc, attrs, undefined, mut, name, typ);\n}\n\nfunction parseArrowTypeRest(es6Arrow, startPos, typ, p) {\n  var token = p.token;\n  if (typeof token !== \"number\") {\n    return typ;\n  }\n  if (!(token === 58 || token === 57)) {\n    return typ;\n  }\n  if (es6Arrow !== true) {\n    return typ;\n  }\n  if (token === /* MinusGreater */58) {\n    Res_parser.expect(undefined, /* EqualGreater */57, p);\n  }\n  Res_parser.next(undefined, p);\n  var returnType = parseTypExpr(undefined, undefined, false, p);\n  var loc_loc_end = p.prevEndPos;\n  var loc = {\n    loc_start: startPos,\n    loc_end: loc_loc_end,\n    loc_ghost: false\n  };\n  return Ast_helper.Typ.arrow(loc, undefined, /* Nolabel */0, typ, returnType);\n}\n\nfunction parseStringFieldDeclaration(p) {\n  var attrs = parseRegion(p, /* Attribute */50, parseAttribute);\n  var name = p.token;\n  if (typeof name === \"number\") {\n    if (name !== /* DotDotDot */6) {\n      return ;\n    }\n    Res_parser.next(undefined, p);\n    var typ = parseTypExpr(undefined, undefined, undefined, p);\n    return {\n            TAG: /* Oinherit */1,\n            _0: typ\n          };\n  } else {\n    switch (name.TAG | 0) {\n      case /* String */3 :\n          var nameStartPos = p.startPos;\n          var nameEndPos = p.endPos;\n          Res_parser.next(undefined, p);\n          var fieldName = $$Location.mkloc(name._0, {\n                loc_start: nameStartPos,\n                loc_end: nameEndPos,\n                loc_ghost: false\n              });\n          Res_parser.expect(/* TypeExpression */20, /* Colon */24, p);\n          var typ$1 = parsePolyTypeExpr(p);\n          return {\n                  TAG: /* Otag */0,\n                  _0: fieldName,\n                  _1: attrs,\n                  _2: typ$1\n                };\n      case /* Lident */4 :\n          var name$1 = name._0;\n          var nameLoc_loc_start = p.startPos;\n          var nameLoc_loc_end = p.endPos;\n          var nameLoc = {\n            loc_start: nameLoc_loc_start,\n            loc_end: nameLoc_loc_end,\n            loc_ghost: false\n          };\n          Res_parser.err(undefined, undefined, p, Res_diagnostics.message(objectQuotedFieldName(name$1)));\n          Res_parser.next(undefined, p);\n          var fieldName$1 = $$Location.mkloc(name$1, nameLoc);\n          Res_parser.expect(/* TypeExpression */20, /* Colon */24, p);\n          var typ$2 = parsePolyTypeExpr(p);\n          return {\n                  TAG: /* Otag */0,\n                  _0: fieldName$1,\n                  _1: attrs,\n                  _2: typ$2\n                };\n      default:\n        return ;\n    }\n  }\n}\n\nfunction parseTagSpec(p) {\n  var attrs = parseRegion(p, /* Attribute */50, parseAttribute);\n  var match = p.token;\n  if (match === 44) {\n    return parsePolymorphicVariantTypeSpecHash(attrs, false, p);\n  }\n  var typ = parseTypExpr(attrs, undefined, undefined, p);\n  return {\n          TAG: /* Rinherit */1,\n          _0: typ\n        };\n}\n\nfunction parseTagSpecFulls(p) {\n  var match = p.token;\n  if (typeof match !== \"number\") {\n    return /* [] */0;\n  }\n  if (!(match > 41 || match < 21)) {\n    return /* [] */0;\n  }\n  if (match !== 17) {\n    return /* [] */0;\n  }\n  Res_parser.next(undefined, p);\n  var rowField = parseTagSpecFull(p);\n  return {\n          hd: rowField,\n          tl: parseTagSpecFulls(p)\n        };\n}\n\nfunction parseTagSpecFull(p) {\n  var attrs = parseRegion(p, /* Attribute */50, parseAttribute);\n  var match = p.token;\n  if (match === 44) {\n    return parsePolymorphicVariantTypeSpecHash(attrs, true, p);\n  }\n  var typ = parseTypExpr(attrs, undefined, undefined, p);\n  return {\n          TAG: /* Rinherit */1,\n          _0: typ\n        };\n}\n\nfunction parseTagSpecFirst(p) {\n  var attrs = parseRegion(p, /* Attribute */50, parseAttribute);\n  var match = p.token;\n  if (typeof match === \"number\") {\n    if (match !== 17) {\n      if (match === 44) {\n        return {\n                hd: parsePolymorphicVariantTypeSpecHash(attrs, false, p),\n                tl: /* [] */0\n              };\n      }\n      \n    } else {\n      Res_parser.next(undefined, p);\n      return {\n              hd: parseTagSpec(p),\n              tl: /* [] */0\n            };\n    }\n  }\n  var typ = parseTypExpr(attrs, undefined, undefined, p);\n  var match$1 = p.token;\n  if (match$1 === 21) {\n    return {\n            hd: {\n              TAG: /* Rinherit */1,\n              _0: typ\n            },\n            tl: /* [] */0\n          };\n  } else {\n    Res_parser.expect(undefined, /* Bar */17, p);\n    return {\n            hd: {\n              TAG: /* Rinherit */1,\n              _0: typ\n            },\n            tl: {\n              hd: parseTagSpec(p),\n              tl: /* [] */0\n            }\n          };\n  }\n}\n\nfunction parseTagSpecs(p) {\n  var match = p.token;\n  if (match !== 17) {\n    return /* [] */0;\n  }\n  Res_parser.next(undefined, p);\n  var rowField = parseTagSpec(p);\n  return {\n          hd: rowField,\n          tl: parseTagSpecs(p)\n        };\n}\n\nfunction parseTagNames(p) {\n  if (p.token === /* GreaterThan */41) {\n    Res_parser.next(undefined, p);\n    return parseRegion(p, /* TagNames */57, parseTagName);\n  } else {\n    return /* [] */0;\n  }\n}\n\nfunction parseRecordRow(p) {\n  var match = p.token;\n  if (match === 6) {\n    Res_parser.err(undefined, undefined, p, Res_diagnostics.message(recordExprSpread));\n    Res_parser.next(undefined, p);\n  }\n  var match$1 = p.token;\n  if (typeof match$1 === \"number\") {\n    return ;\n  }\n  switch (match$1.TAG | 0) {\n    case /* Lident */4 :\n    case /* Uident */5 :\n        break;\n    default:\n      return ;\n  }\n  var startToken = p.token;\n  var field = parseValuePath(p);\n  var match$2 = p.token;\n  if (match$2 === 24) {\n    Res_parser.next(undefined, p);\n    var fieldExpr = parseExpr(undefined, p);\n    return [\n            field,\n            fieldExpr\n          ];\n  }\n  var value = Ast_helper.Exp.ident(field.loc, undefined, field);\n  var value$1;\n  value$1 = typeof startToken === \"number\" || startToken.TAG !== /* Uident */5 ? value : removeModuleNameFromPunnedFieldValue(value);\n  return [\n          field,\n          value$1\n        ];\n}\n\nfunction parsePolyTypeExpr(p) {\n  var startPos = p.startPos;\n  var match = p.token;\n  if (match !== 13) {\n    return parseTypExpr(undefined, undefined, undefined, p);\n  }\n  var vars = parseTypeVarList(p);\n  if (vars) {\n    var _v1 = vars.hd;\n    if (vars.tl) {\n      Res_parser.expect(undefined, /* Dot */4, p);\n      var typ = parseTypExpr(undefined, undefined, undefined, p);\n      var loc_loc_end = p.prevEndPos;\n      var loc = {\n        loc_start: startPos,\n        loc_end: loc_loc_end,\n        loc_ghost: false\n      };\n      return Ast_helper.Typ.poly(loc, undefined, vars, typ);\n    }\n    var match$1 = p.token;\n    if (typeof match$1 === \"number\") {\n      if (match$1 !== 4) {\n        if (match$1 === 57) {\n          Res_parser.next(undefined, p);\n          var typ$1 = Ast_helper.Typ.$$var(_v1.loc, undefined, _v1.txt);\n          var returnType = parseTypExpr(undefined, undefined, false, p);\n          var loc_loc_start = typ$1.ptyp_loc.loc_start;\n          var loc_loc_end$1 = p.prevEndPos;\n          var loc$1 = {\n            loc_start: loc_loc_start,\n            loc_end: loc_loc_end$1,\n            loc_ghost: false\n          };\n          return Ast_helper.Typ.arrow(loc$1, undefined, /* Nolabel */0, typ$1, returnType);\n        }\n        \n      } else {\n        Res_parser.next(undefined, p);\n        var typ$2 = parseTypExpr(undefined, undefined, undefined, p);\n        var loc_loc_end$2 = p.prevEndPos;\n        var loc$2 = {\n          loc_start: startPos,\n          loc_end: loc_loc_end$2,\n          loc_ghost: false\n        };\n        return Ast_helper.Typ.poly(loc$2, undefined, vars, typ$2);\n      }\n    }\n    return Ast_helper.Typ.$$var(_v1.loc, undefined, _v1.txt);\n  }\n  throw {\n        RE_EXN_ID: \"Assert_failure\",\n        _1: [\n          \"res_core.res\",\n          4299,\n          11\n        ],\n        Error: new Error()\n      };\n}\n\nfunction overParseConstrainedOrCoercedOrArrowExpression(p, expr) {\n  var match = p.token;\n  if (typeof match !== \"number\") {\n    return expr;\n  }\n  if (match !== 24) {\n    if (match !== 40) {\n      return expr;\n    } else {\n      return parseCoercedExpr(expr, p);\n    }\n  }\n  Res_parser.next(undefined, p);\n  var typ = parseTypExpr(undefined, false, undefined, p);\n  var match$1 = p.token;\n  if (match$1 === 57) {\n    Res_parser.next(undefined, p);\n    var body = parseExpr(undefined, p);\n    var longident = expr.pexp_desc;\n    var pat;\n    var exit = 0;\n    if (typeof longident === \"number\" || longident.TAG !== /* Pexp_ident */0) {\n      exit = 1;\n    } else {\n      var longident$1 = longident._0;\n      pat = Ast_helper.Pat.$$var(expr.pexp_loc, undefined, $$Location.mkloc($$String.concat(\".\", Longident.flatten(longident$1.txt)), longident$1.loc));\n    }\n    if (exit === 1) {\n      pat = Ast_helper.Pat.$$var(expr.pexp_loc, undefined, $$Location.mkloc(\"pattern\", expr.pexp_loc));\n    }\n    var arrow1 = Ast_helper.Exp.fun_({\n          loc_start: expr.pexp_loc.loc_start,\n          loc_end: body.pexp_loc.loc_end,\n          loc_ghost: false\n        }, undefined, /* Nolabel */0, undefined, pat, Ast_helper.Exp.constraint_(undefined, undefined, body, typ));\n    var arrow2 = Ast_helper.Exp.fun_({\n          loc_start: expr.pexp_loc.loc_start,\n          loc_end: body.pexp_loc.loc_end,\n          loc_ghost: false\n        }, undefined, /* Nolabel */0, undefined, Ast_helper.Pat.constraint_(undefined, undefined, pat, typ), body);\n    var msg = Res_doc.toString(80, Res_doc.breakableGroup(true, Res_doc.concat({\n                  hd: Res_doc.text(\"Did you mean to annotate the parameter type or the return type?\"),\n                  tl: {\n                    hd: Res_doc.indent(Res_doc.concat({\n                              hd: Res_doc.line,\n                              tl: {\n                                hd: Res_doc.text(\"1) \"),\n                                tl: {\n                                  hd: Res_printer.printExpression(arrow1, Res_comments_table.empty),\n                                  tl: {\n                                    hd: Res_doc.line,\n                                    tl: {\n                                      hd: Res_doc.text(\"2) \"),\n                                      tl: {\n                                        hd: Res_printer.printExpression(arrow2, Res_comments_table.empty),\n                                        tl: /* [] */0\n                                      }\n                                    }\n                                  }\n                                }\n                              }\n                            })),\n                    tl: /* [] */0\n                  }\n                })));\n    Res_parser.err(expr.pexp_loc.loc_start, body.pexp_loc.loc_end, p, Res_diagnostics.message(msg));\n    return arrow1;\n  }\n  var loc_loc_start = expr.pexp_loc.loc_start;\n  var loc_loc_end = typ.ptyp_loc.loc_end;\n  var loc = {\n    loc_start: loc_loc_start,\n    loc_end: loc_loc_end,\n    loc_ghost: false\n  };\n  var expr$1 = Ast_helper.Exp.constraint_(loc, undefined, expr, typ);\n  Res_parser.err(expr$1.pexp_loc.loc_start, typ.ptyp_loc.loc_end, p, Res_diagnostics.message(Res_doc.toString(80, Res_doc.breakableGroup(true, Res_doc.concat({\n                        hd: Res_doc.text(\"Expressions with type constraints need to be wrapped in parens:\"),\n                        tl: {\n                          hd: Res_doc.indent(Res_doc.concat({\n                                    hd: Res_doc.line,\n                                    tl: {\n                                      hd: Res_printer.addParens(Res_printer.printExpression(expr$1, Res_comments_table.empty)),\n                                      tl: /* [] */0\n                                    }\n                                  })),\n                          tl: /* [] */0\n                        }\n                      })))));\n  return expr$1;\n}\n\nfunction parseExpr(contextOpt, p) {\n  var context = contextOpt !== undefined ? contextOpt : /* OrdinaryExpr */0;\n  var expr = parseOperandExpr(context, p);\n  var expr$1 = parseBinaryExpr(context, expr, p, 1);\n  return parseTernaryExpr(expr$1, p);\n}\n\nfunction parsePayload(p) {\n  var match = p.token;\n  if (match !== 18) {\n    return {\n            TAG: /* PStr */0,\n            _0: /* [] */0\n          };\n  }\n  if (p.startPos.pos_cnum !== p.prevEndPos.pos_cnum) {\n    return {\n            TAG: /* PStr */0,\n            _0: /* [] */0\n          };\n  }\n  Res_parser.leaveBreadcrumb(p, /* AttributePayload */56);\n  Res_parser.next(undefined, p);\n  var match$1 = p.token;\n  if (typeof match$1 === \"number\") {\n    if (match$1 !== 24) {\n      if (match$1 === 49) {\n        Res_parser.next(undefined, p);\n        var pattern = parsePattern(undefined, undefined, p);\n        var match$2 = p.token;\n        var expr;\n        var exit = 0;\n        if (typeof match$2 === \"number\" && !(match$2 !== 50 && match$2 !== 56)) {\n          exit = 2;\n        } else {\n          expr = undefined;\n        }\n        if (exit === 2) {\n          Res_parser.next(undefined, p);\n          expr = parseExpr(undefined, p);\n        }\n        Res_parser.expect(undefined, /* Rparen */19, p);\n        Res_parser.eatBreadcrumb(p);\n        return {\n                TAG: /* PPat */3,\n                _0: pattern,\n                _1: expr\n              };\n      }\n      \n    } else {\n      Res_parser.next(undefined, p);\n      var payload = Res_grammar.isSignatureItemStart(p.token) ? ({\n            TAG: /* PSig */1,\n            _0: parseDelimitedRegion(p, /* Signature */46, /* Rparen */19, parseSignatureItemRegion)\n          }) : ({\n            TAG: /* PTyp */2,\n            _0: parseTypExpr(undefined, undefined, undefined, p)\n          });\n      Res_parser.expect(undefined, /* Rparen */19, p);\n      Res_parser.eatBreadcrumb(p);\n      return payload;\n    }\n  }\n  var items = parseDelimitedRegion(p, /* Structure */48, /* Rparen */19, parseStructureItemRegion);\n  Res_parser.expect(undefined, /* Rparen */19, p);\n  Res_parser.eatBreadcrumb(p);\n  return {\n          TAG: /* PStr */0,\n          _0: items\n        };\n}\n\nfunction parseAttributeId(startPos, p) {\n  var loop = function (p, _acc) {\n    while(true) {\n      var acc = _acc;\n      var token = p.token;\n      var exit = 0;\n      if (typeof token === \"number\") {\n        exit = 2;\n      } else {\n        switch (token.TAG | 0) {\n          case /* Lident */4 :\n          case /* Uident */5 :\n              exit = 1;\n              break;\n          default:\n            exit = 2;\n        }\n      }\n      switch (exit) {\n        case 1 :\n            Res_parser.next(undefined, p);\n            var id = acc + token._0;\n            var match = p.token;\n            if (match !== 4) {\n              return id;\n            }\n            Res_parser.next(undefined, p);\n            _acc = id + \".\";\n            continue ;\n        case 2 :\n            if (Res_token.isKeyword(token)) {\n              Res_parser.next(undefined, p);\n              var id$1 = acc + Res_token.toString(token);\n              var match$1 = p.token;\n              if (match$1 !== 4) {\n                return id$1;\n              }\n              Res_parser.next(undefined, p);\n              _acc = id$1 + \".\";\n              continue ;\n            }\n            Res_parser.err(undefined, undefined, p, Res_diagnostics.unexpected(token, p.breadcrumbs));\n            return acc;\n        \n      }\n    };\n  };\n  var id = loop(p, \"\");\n  var endPos = p.prevEndPos;\n  return $$Location.mkloc(id, {\n              loc_start: startPos,\n              loc_end: endPos,\n              loc_ghost: false\n            });\n}\n\nfunction parseConstrDeclArgs(p) {\n  var match = p.token;\n  var constrArgs;\n  if (match === 18) {\n    Res_parser.next(undefined, p);\n    var match$1 = p.token;\n    if (match$1 === 22) {\n      var lbrace = p.startPos;\n      Res_parser.next(undefined, p);\n      var startPos = p.startPos;\n      var match$2 = p.token;\n      var exit = 0;\n      if (typeof match$2 === \"number\") {\n        if (match$2 >= 6) {\n          if (match$2 >= 7) {\n            exit = 1;\n          } else {\n            var dotdotdotStart = p.startPos;\n            var dotdotdotEnd = p.endPos;\n            Res_parser.next(undefined, p);\n            var typ = parseTypExpr(undefined, undefined, undefined, p);\n            var match$3 = p.token;\n            if (match$3 === 23) {\n              Res_parser.err(dotdotdotStart, dotdotdotEnd, p, Res_diagnostics.message(sameTypeSpread));\n              Res_parser.next(undefined, p);\n            } else {\n              Res_parser.expect(undefined, /* Comma */25, p);\n            }\n            var match$4 = p.token;\n            if (typeof match$4 !== \"number\" && match$4.TAG === /* Lident */4) {\n              Res_parser.err(dotdotdotStart, dotdotdotEnd, p, Res_diagnostics.message(spreadInRecordDeclaration));\n            }\n            var fields_0 = {\n              TAG: /* Oinherit */1,\n              _0: typ\n            };\n            var fields_1 = parseCommaDelimitedRegion(p, /* StringFieldDeclarations */37, /* Rbrace */23, parseStringFieldDeclaration);\n            var fields = {\n              hd: fields_0,\n              tl: fields_1\n            };\n            Res_parser.expect(undefined, /* Rbrace */23, p);\n            var loc_loc_end = p.prevEndPos;\n            var loc = {\n              loc_start: startPos,\n              loc_end: loc_loc_end,\n              loc_ghost: false\n            };\n            var typ$1 = parseTypeAlias(p, Ast_helper.Typ.object_(loc, undefined, fields, /* Closed */0));\n            var typ$2 = parseArrowTypeRest(true, startPos, typ$1, p);\n            Res_parser.optional(p, /* Comma */25);\n            var moreArgs = parseCommaDelimitedRegion(p, /* TypExprList */39, /* Rparen */19, parseTypExprRegion);\n            Res_parser.expect(undefined, /* Rparen */19, p);\n            constrArgs = {\n              TAG: /* Pcstr_tuple */0,\n              _0: {\n                hd: typ$2,\n                tl: moreArgs\n              }\n            };\n          }\n        } else if (match$2 >= 4) {\n          var match$5 = p.token;\n          var closedFlag = typeof match$5 === \"number\" ? (\n              match$5 !== 4 ? (\n                  match$5 !== 5 ? /* Closed */0 : (Res_parser.next(undefined, p), /* Open */1)\n                ) : (Res_parser.next(undefined, p), /* Closed */0)\n            ) : /* Closed */0;\n          var fields$1 = parseCommaDelimitedRegion(p, /* StringFieldDeclarations */37, /* Rbrace */23, parseStringFieldDeclaration);\n          Res_parser.expect(undefined, /* Rbrace */23, p);\n          var loc_loc_end$1 = p.prevEndPos;\n          var loc$1 = {\n            loc_start: startPos,\n            loc_end: loc_loc_end$1,\n            loc_ghost: false\n          };\n          var typ$3 = Ast_helper.Typ.object_(loc$1, /* [] */0, fields$1, closedFlag);\n          Res_parser.optional(p, /* Comma */25);\n          var moreArgs$1 = parseCommaDelimitedRegion(p, /* TypExprList */39, /* Rparen */19, parseTypExprRegion);\n          Res_parser.expect(undefined, /* Rparen */19, p);\n          constrArgs = {\n            TAG: /* Pcstr_tuple */0,\n            _0: {\n              hd: typ$3,\n              tl: moreArgs$1\n            }\n          };\n        } else {\n          exit = 1;\n        }\n      } else {\n        exit = 1;\n      }\n      if (exit === 1) {\n        var attrs = parseRegion(p, /* Attribute */50, parseAttribute);\n        var match$6 = p.token;\n        var exit$1 = 0;\n        if (typeof match$6 === \"number\" || match$6.TAG !== /* String */3) {\n          exit$1 = 2;\n        } else {\n          var fields$2;\n          if (attrs) {\n            Res_parser.leaveBreadcrumb(p, /* StringFieldDeclarations */37);\n            var field = parseStringFieldDeclaration(p);\n            var field$1;\n            if (field !== undefined) {\n              field$1 = field;\n            } else {\n              throw {\n                    RE_EXN_ID: \"Assert_failure\",\n                    _1: [\n                      \"res_core.res\",\n                      5016,\n                      24\n                    ],\n                    Error: new Error()\n                  };\n            }\n            var match$7 = p.token;\n            if (typeof match$7 === \"number\") {\n              if (match$7 >= 24) {\n                if (match$7 >= 27) {\n                  Res_parser.expect(undefined, /* Comma */25, p);\n                } else {\n                  switch (match$7) {\n                    case /* Colon */24 :\n                        Res_parser.expect(undefined, /* Comma */25, p);\n                        break;\n                    case /* Comma */25 :\n                        Res_parser.next(undefined, p);\n                        break;\n                    case /* Eof */26 :\n                        break;\n                    \n                  }\n                }\n              } else if (match$7 >= 23) {\n                \n              } else {\n                Res_parser.expect(undefined, /* Comma */25, p);\n              }\n            } else {\n              Res_parser.expect(undefined, /* Comma */25, p);\n            }\n            Res_parser.eatBreadcrumb(p);\n            var first;\n            first = field$1.TAG === /* Otag */0 ? ({\n                  TAG: /* Otag */0,\n                  _0: field$1._0,\n                  _1: attrs,\n                  _2: field$1._2\n                }) : ({\n                  TAG: /* Oinherit */1,\n                  _0: field$1._0\n                });\n            fields$2 = {\n              hd: first,\n              tl: parseCommaDelimitedRegion(p, /* StringFieldDeclarations */37, /* Rbrace */23, parseStringFieldDeclaration)\n            };\n          } else {\n            fields$2 = parseCommaDelimitedRegion(p, /* StringFieldDeclarations */37, /* Rbrace */23, parseStringFieldDeclaration);\n          }\n          Res_parser.expect(undefined, /* Rbrace */23, p);\n          var loc_loc_end$2 = p.prevEndPos;\n          var loc$2 = {\n            loc_start: startPos,\n            loc_end: loc_loc_end$2,\n            loc_ghost: false\n          };\n          var typ$4 = parseTypeAlias(p, Ast_helper.Typ.object_(loc$2, /* [] */0, fields$2, /* Closed */0));\n          var typ$5 = parseArrowTypeRest(true, startPos, typ$4, p);\n          Res_parser.optional(p, /* Comma */25);\n          var moreArgs$2 = parseCommaDelimitedRegion(p, /* TypExprList */39, /* Rparen */19, parseTypExprRegion);\n          Res_parser.expect(undefined, /* Rparen */19, p);\n          constrArgs = {\n            TAG: /* Pcstr_tuple */0,\n            _0: {\n              hd: typ$5,\n              tl: moreArgs$2\n            }\n          };\n        }\n        if (exit$1 === 2) {\n          var fields$3;\n          if (attrs) {\n            var field$2 = parseFieldDeclaration(p);\n            Res_parser.expect(undefined, /* Comma */25, p);\n            var first_pld_name = field$2.pld_name;\n            var first_pld_mutable = field$2.pld_mutable;\n            var first_pld_type = field$2.pld_type;\n            var first_pld_loc = field$2.pld_loc;\n            var first$1 = {\n              pld_name: first_pld_name,\n              pld_mutable: first_pld_mutable,\n              pld_type: first_pld_type,\n              pld_loc: first_pld_loc,\n              pld_attributes: attrs\n            };\n            fields$3 = {\n              hd: first$1,\n              tl: parseCommaDelimitedRegion(p, /* FieldDeclarations */38, /* Rbrace */23, parseFieldDeclarationRegion)\n            };\n          } else {\n            fields$3 = parseCommaDelimitedRegion(p, /* FieldDeclarations */38, /* Rbrace */23, parseFieldDeclarationRegion);\n          }\n          if (fields$3) {\n            \n          } else {\n            Res_parser.err(lbrace, undefined, p, Res_diagnostics.message(\"An inline record declaration needs at least one field\"));\n          }\n          Res_parser.expect(undefined, /* Rbrace */23, p);\n          Res_parser.optional(p, /* Comma */25);\n          Res_parser.expect(undefined, /* Rparen */19, p);\n          constrArgs = {\n            TAG: /* Pcstr_record */1,\n            _0: fields$3\n          };\n        }\n        \n      }\n      \n    } else {\n      var args = parseCommaDelimitedRegion(p, /* TypExprList */39, /* Rparen */19, parseTypExprRegion);\n      Res_parser.expect(undefined, /* Rparen */19, p);\n      constrArgs = {\n        TAG: /* Pcstr_tuple */0,\n        _0: args\n      };\n    }\n  } else {\n    constrArgs = {\n      TAG: /* Pcstr_tuple */0,\n      _0: /* [] */0\n    };\n  }\n  var match$8 = p.token;\n  var res = match$8 === 24 ? (Res_parser.next(undefined, p), parseTypExpr(undefined, undefined, undefined, p)) : undefined;\n  return [\n          constrArgs,\n          res\n        ];\n}\n\nfunction parsePackageConstraint(p) {\n  var match = p.token;\n  if (match !== 10) {\n    return ;\n  }\n  Res_parser.next(undefined, p);\n  Res_parser.expect(undefined, /* Typ */60, p);\n  var typeConstr = parseValuePath(p);\n  Res_parser.expect(undefined, /* Equal */14, p);\n  var typ = parseTypExpr(undefined, undefined, undefined, p);\n  return [\n          typeConstr,\n          typ\n        ];\n}\n\nfunction parseConstrDef(parseAttrs, p) {\n  var attrs = parseAttrs ? parseRegion(p, /* Attribute */50, parseAttribute) : /* [] */0;\n  var name = p.token;\n  var name$1;\n  var exit = 0;\n  if (typeof name === \"number\" || name.TAG !== /* Uident */5) {\n    exit = 1;\n  } else {\n    var loc_loc_start = p.startPos;\n    var loc_loc_end = p.endPos;\n    var loc = {\n      loc_start: loc_loc_start,\n      loc_end: loc_loc_end,\n      loc_ghost: false\n    };\n    Res_parser.next(undefined, p);\n    name$1 = $$Location.mkloc(name._0, loc);\n  }\n  if (exit === 1) {\n    Res_parser.err(undefined, undefined, p, Res_diagnostics.uident(name));\n    name$1 = $$Location.mknoloc(\"_\");\n  }\n  var match = p.token;\n  var kind;\n  if (typeof match === \"number\") {\n    switch (match) {\n      case /* Equal */14 :\n          Res_parser.next(undefined, p);\n          var longident = parseModuleLongIdent(false, p);\n          kind = {\n            TAG: /* Pext_rebind */1,\n            _0: longident\n          };\n          break;\n      case /* Lparen */18 :\n          var match$1 = parseConstrDeclArgs(p);\n          kind = {\n            TAG: /* Pext_decl */0,\n            _0: match$1[0],\n            _1: match$1[1]\n          };\n          break;\n      case /* EqualEqual */15 :\n      case /* EqualEqualEqual */16 :\n      case /* Bar */17 :\n      case /* Rparen */19 :\n      case /* Lbracket */20 :\n      case /* Rbracket */21 :\n      case /* Lbrace */22 :\n      case /* Rbrace */23 :\n          kind = {\n            TAG: /* Pext_decl */0,\n            _0: {\n              TAG: /* Pcstr_tuple */0,\n              _0: /* [] */0\n            },\n            _1: undefined\n          };\n          break;\n      case /* Colon */24 :\n          Res_parser.next(undefined, p);\n          var typ = parseTypExpr(undefined, undefined, undefined, p);\n          kind = {\n            TAG: /* Pext_decl */0,\n            _0: {\n              TAG: /* Pcstr_tuple */0,\n              _0: /* [] */0\n            },\n            _1: typ\n          };\n          break;\n      default:\n        kind = {\n          TAG: /* Pext_decl */0,\n          _0: {\n            TAG: /* Pcstr_tuple */0,\n            _0: /* [] */0\n          },\n          _1: undefined\n        };\n    }\n  } else {\n    kind = {\n      TAG: /* Pext_decl */0,\n      _0: {\n        TAG: /* Pcstr_tuple */0,\n        _0: /* [] */0\n      },\n      _1: undefined\n    };\n  }\n  return [\n          attrs,\n          name$1,\n          kind\n        ];\n}\n\nfunction parseTypeVarList(p) {\n  var _vars = /* [] */0;\n  while(true) {\n    var vars = _vars;\n    var match = p.token;\n    if (match !== 13) {\n      return List.rev(vars);\n    }\n    Res_parser.next(undefined, p);\n    var match$1 = parseLident(p);\n    var $$var = $$Location.mkloc(match$1[0], match$1[1]);\n    _vars = {\n      hd: $$var,\n      tl: vars\n    };\n    continue ;\n  };\n}\n\nfunction parseModuleExpr(p) {\n  var attrs = parseRegion(p, /* Attribute */50, parseAttribute);\n  var modExpr = isEs6ArrowFunctor(p) ? parseFunctorModuleExpr(p) : parsePrimaryModExpr(p);\n  return {\n          pmod_desc: modExpr.pmod_desc,\n          pmod_loc: modExpr.pmod_loc,\n          pmod_attributes: List.concat({\n                hd: modExpr.pmod_attributes,\n                tl: {\n                  hd: attrs,\n                  tl: /* [] */0\n                }\n              })\n        };\n}\n\nfunction parseTypeConstructorDeclaration(startPos, p) {\n  Res_parser.leaveBreadcrumb(p, /* ConstructorDeclaration */35);\n  var attrs = parseRegion(p, /* Attribute */50, parseAttribute);\n  var uident = p.token;\n  if (typeof uident !== \"number\" && uident.TAG === /* Uident */5) {\n    var uidentLoc_loc_start = p.startPos;\n    var uidentLoc_loc_end = p.endPos;\n    var uidentLoc = {\n      loc_start: uidentLoc_loc_start,\n      loc_end: uidentLoc_loc_end,\n      loc_ghost: false\n    };\n    Res_parser.next(undefined, p);\n    var match = parseConstrDeclArgs(p);\n    Res_parser.eatBreadcrumb(p);\n    var loc_loc_end = p.prevEndPos;\n    var loc = {\n      loc_start: startPos,\n      loc_end: loc_loc_end,\n      loc_ghost: false\n    };\n    return Ast_helper.Type.constructor(loc, attrs, undefined, match[0], match[1], $$Location.mkloc(uident._0, uidentLoc));\n  }\n  Res_parser.err(undefined, undefined, p, Res_diagnostics.uident(uident));\n  return Ast_helper.Type.constructor(undefined, undefined, undefined, undefined, undefined, $$Location.mknoloc(\"_\"));\n}\n\nfunction parseConstrainedPatternRegion(p) {\n  var token = p.token;\n  if (Res_grammar.isPatternStart(token)) {\n    return parseConstrainedPattern(p);\n  }\n  \n}\n\nfunction parseExprBlock(first, p) {\n  Res_parser.leaveBreadcrumb(p, /* ExprBlock */10);\n  var item = first !== undefined ? first : parseExprBlockItem(p);\n  parseNewlineOrSemicolonExprBlock(p);\n  var blockExpr;\n  if (Res_grammar.isBlockExprStart(p.token)) {\n    var next = parseExprBlockItem(p);\n    var init = item.pexp_loc;\n    var loc_loc_start = init.loc_start;\n    var loc_loc_end = next.pexp_loc.loc_end;\n    var loc_loc_ghost = init.loc_ghost;\n    var loc = {\n      loc_start: loc_loc_start,\n      loc_end: loc_loc_end,\n      loc_ghost: loc_loc_ghost\n    };\n    blockExpr = Ast_helper.Exp.sequence(loc, undefined, item, next);\n  } else {\n    blockExpr = item;\n  }\n  Res_parser.eatBreadcrumb(p);\n  return overParseConstrainedOrCoercedOrArrowExpression(p, blockExpr);\n}\n\nfunction parsePatternRegion(p) {\n  var token = p.token;\n  if (token === 6) {\n    Res_parser.next(undefined, p);\n    return [\n            true,\n            parseConstrainedPattern(p)\n          ];\n  } else if (Res_grammar.isPatternStart(token)) {\n    return [\n            false,\n            parseConstrainedPattern(p)\n          ];\n  } else {\n    return ;\n  }\n}\n\nfunction parseSpreadExprRegion(p) {\n  var token = p.token;\n  if (token !== 6) {\n    if (Res_grammar.isExprStart(token)) {\n      return [\n              false,\n              parseConstrainedOrCoercedExpr(p)\n            ];\n    } else {\n      return ;\n    }\n  }\n  Res_parser.next(undefined, p);\n  var expr = parseConstrainedOrCoercedExpr(p);\n  return [\n          true,\n          expr\n        ];\n}\n\nfunction parsePackageType(startPos, attrs, p) {\n  var modTypePath = parseModuleLongIdent(true, p);\n  var match = p.token;\n  if (typeof match !== \"number\" && match.TAG === /* Lident */4 && match._0 === \"with\") {\n    Res_parser.next(undefined, p);\n    var constraints = parsePackageConstraints(p);\n    var loc_loc_end = p.prevEndPos;\n    var loc = {\n      loc_start: startPos,\n      loc_end: loc_loc_end,\n      loc_ghost: false\n    };\n    return Ast_helper.Typ.$$package(loc, attrs, modTypePath, constraints);\n  }\n  var loc_loc_end$1 = p.prevEndPos;\n  var loc$1 = {\n    loc_start: startPos,\n    loc_end: loc_loc_end$1,\n    loc_ghost: false\n  };\n  return Ast_helper.Typ.$$package(loc$1, attrs, modTypePath, /* [] */0);\n}\n\nfunction parseConstrainedModExpr(p) {\n  var modExpr = parseModuleExpr(p);\n  var match = p.token;\n  if (match !== 24) {\n    return modExpr;\n  }\n  Res_parser.next(undefined, p);\n  var modType = parseModuleType(undefined, undefined, p);\n  var loc_loc_start = modExpr.pmod_loc.loc_start;\n  var loc_loc_end = modType.pmty_loc.loc_end;\n  var loc = {\n    loc_start: loc_loc_start,\n    loc_end: loc_loc_end,\n    loc_ghost: false\n  };\n  return Ast_helper.Mod.constraint_(loc, undefined, modExpr, modType);\n}\n\nfunction parsePolymorphicVariantTypeArgs(p) {\n  var startPos = p.startPos;\n  Res_parser.expect(undefined, /* Lparen */18, p);\n  var args = parseCommaDelimitedRegion(p, /* TypExprList */39, /* Rparen */19, parseTypExprRegion);\n  Res_parser.expect(undefined, /* Rparen */19, p);\n  var loc_loc_end = p.prevEndPos;\n  var loc = {\n    loc_start: startPos,\n    loc_end: loc_loc_end,\n    loc_ghost: false\n  };\n  if (args) {\n    var typ = args.hd;\n    var tmp = typ.ptyp_desc;\n    if (typeof tmp === \"number\") {\n      if (!args.tl) {\n        return typ;\n      }\n      \n    } else if (tmp.TAG === /* Ptyp_tuple */2) {\n      if (!args.tl) {\n        if (p.mode === /* ParseForTypeChecker */0) {\n          return typ;\n        } else {\n          return Ast_helper.Typ.tuple(loc, /* [] */0, args);\n        }\n      }\n      \n    } else if (!args.tl) {\n      return typ;\n    }\n    \n  }\n  return Ast_helper.Typ.tuple(loc, /* [] */0, args);\n}\n\nfunction parsePolymorphicVariantTypeSpecHash(attrs, full, p) {\n  var startPos = p.startPos;\n  var match = parseHashIdent(startPos, p);\n  var loop = function (p) {\n    var match = p.token;\n    if (match !== 69) {\n      return /* [] */0;\n    }\n    if (!full) {\n      return /* [] */0;\n    }\n    Res_parser.next(undefined, p);\n    var rowField = parsePolymorphicVariantTypeArgs(p);\n    return {\n            hd: rowField,\n            tl: loop(p)\n          };\n  };\n  var match$1 = p.token;\n  var match$2 = typeof match$1 === \"number\" ? (\n      match$1 !== 18 ? (\n          match$1 !== 69 || !full ? [\n              /* [] */0,\n              true\n            ] : (Res_parser.next(undefined, p), [\n                {\n                  hd: parsePolymorphicVariantTypeArgs(p),\n                  tl: /* [] */0\n                },\n                true\n              ])\n        ) : [\n          {\n            hd: parsePolymorphicVariantTypeArgs(p),\n            tl: /* [] */0\n          },\n          false\n        ]\n    ) : [\n      /* [] */0,\n      true\n    ];\n  var tuples = Pervasives.$at(match$2[0], loop(p));\n  return {\n          TAG: /* Rtag */0,\n          _0: $$Location.mkloc(match[0], match[1]),\n          _1: attrs,\n          _2: match$2[1],\n          _3: tuples\n        };\n}\n\nfunction parseTypExprRegion(p) {\n  if (Res_grammar.isTypExprStart(p.token)) {\n    return parseTypExpr(undefined, undefined, undefined, p);\n  }\n  \n}\n\nfunction parseModuleBindingBody(p) {\n  var match = p.token;\n  var returnModType = match === 24 ? (Res_parser.next(undefined, p), parseModuleType(undefined, undefined, p)) : undefined;\n  Res_parser.expect(undefined, /* Equal */14, p);\n  var modExpr = parseModuleExpr(p);\n  if (returnModType !== undefined) {\n    return Ast_helper.Mod.constraint_({\n                loc_start: returnModType.pmty_loc.loc_start,\n                loc_end: modExpr.pmod_loc.loc_end,\n                loc_ghost: false\n              }, undefined, modExpr, returnModType);\n  } else {\n    return modExpr;\n  }\n}\n\nfunction parsePrimaryExpr(operand, noCallOpt, p) {\n  var noCall = noCallOpt !== undefined ? noCallOpt : false;\n  var startPos = operand.pexp_loc.loc_start;\n  var _expr = operand;\n  while(true) {\n    var expr = _expr;\n    var match = p.token;\n    if (typeof match !== \"number\") {\n      return expr;\n    }\n    if (match !== 4) {\n      if (match >= 21) {\n        if (match !== 80) {\n          return expr;\n        }\n        if (!(noCall === false && p.prevEndPos.pos_lnum === p.startPos.pos_lnum)) {\n          return expr;\n        }\n        var match$1 = expr.pexp_desc;\n        if (typeof match$1 !== \"number\" && match$1.TAG === /* Pexp_ident */0) {\n          var ident = match$1._0.txt;\n          switch (ident.TAG | 0) {\n            case /* Lident */0 :\n                return parseTemplateExpr(ident._0, p);\n            case /* Ldot */1 :\n            case /* Lapply */2 :\n                break;\n            \n          }\n        }\n        Res_parser.err(expr.pexp_loc.loc_start, expr.pexp_loc.loc_end, p, Res_diagnostics.message(\"Tagged template literals are currently restricted to names like: json`null`.\"));\n        return parseTemplateExpr(undefined, p);\n      }\n      if (match < 18) {\n        return expr;\n      }\n      switch (match) {\n        case /* Lparen */18 :\n            if (!(noCall === false && p.prevEndPos.pos_lnum === p.startPos.pos_lnum)) {\n              return expr;\n            }\n            _expr = parseCallExpr(p, expr);\n            continue ;\n        case /* Rparen */19 :\n            return expr;\n        case /* Lbracket */20 :\n            if (noCall === false && p.prevEndPos.pos_lnum === p.startPos.pos_lnum) {\n              return parseBracketAccess(p, expr, startPos);\n            } else {\n              return expr;\n            }\n        \n      }\n    } else {\n      Res_parser.next(undefined, p);\n      var lident = parseValuePathAfterDot(p);\n      var match$2 = p.token;\n      if (match$2 === 14 && noCall === false) {\n        Res_parser.leaveBreadcrumb(p, /* ExprSetField */9);\n        Res_parser.next(undefined, p);\n        var targetExpr = parseExpr(undefined, p);\n        var loc_loc_end = p.prevEndPos;\n        var loc = {\n          loc_start: startPos,\n          loc_end: loc_loc_end,\n          loc_ghost: false\n        };\n        var setfield = Ast_helper.Exp.setfield(loc, undefined, expr, lident, targetExpr);\n        Res_parser.eatBreadcrumb(p);\n        return setfield;\n      }\n      var endPos = p.prevEndPos;\n      var loc$1 = {\n        loc_start: startPos,\n        loc_end: endPos,\n        loc_ghost: false\n      };\n      _expr = Ast_helper.Exp.field(loc$1, undefined, expr, lident);\n      continue ;\n    }\n  };\n}\n\nfunction parseExceptionDef(attrs, p) {\n  var startPos = p.startPos;\n  Res_parser.expect(undefined, /* Exception */27, p);\n  var match = parseConstrDef(false, p);\n  var loc_loc_end = p.prevEndPos;\n  var loc = {\n    loc_start: startPos,\n    loc_end: loc_loc_end,\n    loc_ghost: false\n  };\n  return Ast_helper.Te.constructor(loc, attrs, undefined, undefined, match[1], match[2]);\n}\n\nfunction parseNewlineOrSemicolonExprBlock(p) {\n  var token = p.token;\n  if (token === 8) {\n    return Res_parser.next(undefined, p);\n  } else if (Res_grammar.isBlockExprStart(token) && p.prevEndPos.pos_lnum >= p.startPos.pos_lnum) {\n    return Res_parser.err(p.prevEndPos, p.endPos, p, Res_diagnostics.message(\"consecutive expressions on a line must be separated by ';' or a newline\"));\n  } else {\n    return ;\n  }\n}\n\nfunction parseLetBindings(attrs, p) {\n  var startPos = p.startPos;\n  Res_parser.optional(p, /* Let */9);\n  var recFlag = Res_parser.optional(p, /* Rec */11) ? /* Recursive */1 : /* Nonrecursive */0;\n  var first = parseLetBindingBody(startPos, attrs, p);\n  var loop = function (p, _bindings) {\n    while(true) {\n      var bindings = _bindings;\n      var startPos = p.startPos;\n      var attrs = parseAttributesAndBinding(p);\n      var match = p.token;\n      if (match !== 10) {\n        return List.rev(bindings);\n      }\n      Res_parser.next(undefined, p);\n      var match$1 = p.token;\n      var attrs$1;\n      if (typeof match$1 === \"number\" && match$1 >= 84) {\n        var exportLoc_loc_start = p.startPos;\n        var exportLoc_loc_end = p.endPos;\n        var exportLoc = {\n          loc_start: exportLoc_loc_start,\n          loc_end: exportLoc_loc_end,\n          loc_ghost: false\n        };\n        Res_parser.next(undefined, p);\n        var genTypeAttr_0 = $$Location.mkloc(\"genType\", exportLoc);\n        var genTypeAttr_1 = {\n          TAG: /* PStr */0,\n          _0: /* [] */0\n        };\n        var genTypeAttr = [\n          genTypeAttr_0,\n          genTypeAttr_1\n        ];\n        attrs$1 = {\n          hd: genTypeAttr,\n          tl: attrs\n        };\n      } else {\n        attrs$1 = attrs;\n      }\n      Res_parser.optional(p, /* Let */9);\n      var letBinding = parseLetBindingBody(startPos, attrs$1, p);\n      _bindings = {\n        hd: letBinding,\n        tl: bindings\n      };\n      continue ;\n    };\n  };\n  return [\n          recFlag,\n          loop(p, {\n                hd: first,\n                tl: /* [] */0\n              })\n        ];\n}\n\nfunction parseTernaryExpr(leftOperand, p) {\n  var match = p.token;\n  if (match !== 49) {\n    return leftOperand;\n  }\n  Res_parser.leaveBreadcrumb(p, /* Ternary */2);\n  Res_parser.next(undefined, p);\n  var trueBranch = parseExpr(/* TernaryTrueBranchExpr */1, p);\n  Res_parser.expect(undefined, /* Colon */24, p);\n  var falseBranch = parseExpr(undefined, p);\n  Res_parser.eatBreadcrumb(p);\n  var init = leftOperand.pexp_loc;\n  var loc_loc_start = leftOperand.pexp_loc.loc_start;\n  var loc_loc_end = falseBranch.pexp_loc.loc_end;\n  var loc_loc_ghost = init.loc_ghost;\n  var loc = {\n    loc_start: loc_loc_start,\n    loc_end: loc_loc_end,\n    loc_ghost: loc_loc_ghost\n  };\n  return Ast_helper.Exp.ifthenelse(loc, {\n              hd: ternaryAttr,\n              tl: /* [] */0\n            }, leftOperand, trueBranch, falseBranch);\n}\n\nfunction parseFirstClassModuleExpr(startPos, p) {\n  Res_parser.expect(undefined, /* Lparen */18, p);\n  var modExpr = parseModuleExpr(p);\n  var modEndLoc = p.prevEndPos;\n  var match = p.token;\n  if (match === 24) {\n    var colonStart = p.startPos;\n    Res_parser.next(undefined, p);\n    var attrs = parseRegion(p, /* Attribute */50, parseAttribute);\n    var packageType = parsePackageType(colonStart, attrs, p);\n    Res_parser.expect(undefined, /* Rparen */19, p);\n    var loc = {\n      loc_start: startPos,\n      loc_end: modEndLoc,\n      loc_ghost: false\n    };\n    var firstClassModule = Ast_helper.Exp.pack(loc, undefined, modExpr);\n    var loc_loc_end = p.prevEndPos;\n    var loc$1 = {\n      loc_start: startPos,\n      loc_end: loc_loc_end,\n      loc_ghost: false\n    };\n    return Ast_helper.Exp.constraint_(loc$1, undefined, firstClassModule, packageType);\n  }\n  Res_parser.expect(undefined, /* Rparen */19, p);\n  var loc_loc_end$1 = p.prevEndPos;\n  var loc$2 = {\n    loc_start: startPos,\n    loc_end: loc_loc_end$1,\n    loc_ghost: false\n  };\n  return Ast_helper.Exp.pack(loc$2, undefined, modExpr);\n}\n\nfunction parseBinaryExpr(contextOpt, a, p, prec) {\n  var context = contextOpt !== undefined ? contextOpt : /* OrdinaryExpr */0;\n  var a$1 = a !== undefined ? a : parseOperandExpr(context, p);\n  var _a = a$1;\n  while(true) {\n    var a$2 = _a;\n    var token = p.token;\n    var tokenPrec;\n    var exit = 0;\n    if (typeof token === \"number\") {\n      if (token >= 36) {\n        if (token !== 42) {\n          tokenPrec = Res_token.precedence(token);\n        } else {\n          exit = 1;\n        }\n      } else if (token >= 34) {\n        exit = 1;\n      } else {\n        tokenPrec = Res_token.precedence(token);\n      }\n    } else {\n      tokenPrec = Res_token.precedence(token);\n    }\n    if (exit === 1) {\n      tokenPrec = !Res_scanner.isBinaryOp(p.scanner.src, p.startPos.pos_cnum, p.endPos.pos_cnum) && p.startPos.pos_lnum > p.prevEndPos.pos_lnum ? -1 : Res_token.precedence(token);\n    }\n    if (tokenPrec < prec) {\n      return a$2;\n    }\n    Res_parser.leaveBreadcrumb(p, /* ExprBinaryAfterOp */{\n          _0: token\n        });\n    var startPos = p.startPos;\n    Res_parser.next(undefined, p);\n    var endPos = p.prevEndPos;\n    var b = parseBinaryExpr(context, undefined, p, tokenPrec + 1 | 0);\n    var loc_loc_start = a$2.pexp_loc.loc_start;\n    var loc_loc_end = b.pexp_loc.loc_end;\n    var loc = {\n      loc_start: loc_loc_start,\n      loc_end: loc_loc_end,\n      loc_ghost: false\n    };\n    var expr = Ast_helper.Exp.apply(loc, undefined, makeInfixOperator(p, token, startPos, endPos), {\n          hd: [\n            /* Nolabel */0,\n            a$2\n          ],\n          tl: {\n            hd: [\n              /* Nolabel */0,\n              b\n            ],\n            tl: /* [] */0\n          }\n        });\n    Res_parser.eatBreadcrumb(p);\n    _a = expr;\n    continue ;\n  };\n}\n\nfunction parseOperandExpr(context, p) {\n  var startPos = p.startPos;\n  var attrs = parseRegion(p, /* Attribute */50, parseAttribute);\n  var match = p.token;\n  var expr;\n  var exit = 0;\n  if (typeof match === \"number\") {\n    if (match >= 56) {\n      if (match !== 82) {\n        exit = 1;\n      } else {\n        expr = parseTryExpression(p);\n      }\n    } else if (match >= 46) {\n      switch (match) {\n        case /* Assert */46 :\n            Res_parser.next(undefined, p);\n            var expr$1 = parseUnaryExpr(p);\n            var loc_loc_end = p.prevEndPos;\n            var loc = {\n              loc_start: startPos,\n              loc_end: loc_loc_end,\n              loc_ghost: false\n            };\n            expr = Ast_helper.Exp.assert_(loc, undefined, expr$1);\n            break;\n        case /* Lazy */47 :\n            Res_parser.next(undefined, p);\n            var expr$2 = parseUnaryExpr(p);\n            var loc_loc_end$1 = p.prevEndPos;\n            var loc$1 = {\n              loc_start: startPos,\n              loc_end: loc_loc_end$1,\n              loc_ghost: false\n            };\n            expr = Ast_helper.Exp.lazy_(loc$1, undefined, expr$2);\n            break;\n        case /* If */50 :\n            expr = parseIfOrIfLetExpression(p);\n            break;\n        case /* For */52 :\n            expr = parseForExpression(p);\n            break;\n        case /* Tilde */48 :\n        case /* Question */49 :\n        case /* Else */51 :\n        case /* In */53 :\n            exit = 1;\n            break;\n        case /* While */54 :\n            expr = parseWhileExpression(p);\n            break;\n        case /* Switch */55 :\n            expr = parseSwitchExpression(p);\n            break;\n        \n      }\n    } else {\n      exit = 1;\n    }\n  } else {\n    exit = 1;\n  }\n  if (exit === 1) {\n    expr = context !== /* WhenExpr */2 && isEs6ArrowExpression(context === /* TernaryTrueBranchExpr */1, p) ? parseEs6ArrowExpression(context, undefined, p) : parseUnaryExpr(p);\n  }\n  return {\n          pexp_desc: expr.pexp_desc,\n          pexp_loc: expr.pexp_loc,\n          pexp_attributes: List.concat({\n                hd: expr.pexp_attributes,\n                tl: {\n                  hd: attrs,\n                  tl: /* [] */0\n                }\n              })\n        };\n}\n\nfunction parseStandaloneAttribute(p) {\n  var startPos = p.startPos;\n  Res_parser.expect(undefined, /* AtAt */76, p);\n  var attrId = parseAttributeId(startPos, p);\n  var payload = parsePayload(p);\n  return [\n          attrId,\n          payload\n        ];\n}\n\nfunction parseJsExport(attrs, p) {\n  var exportStart = p.startPos;\n  Res_parser.expect(undefined, /* Export */84, p);\n  var exportLoc_loc_end = p.prevEndPos;\n  var exportLoc = {\n    loc_start: exportStart,\n    loc_end: exportLoc_loc_end,\n    loc_ghost: false\n  };\n  var genTypeAttr_0 = $$Location.mkloc(\"genType\", exportLoc);\n  var genTypeAttr_1 = {\n    TAG: /* PStr */0,\n    _0: /* [] */0\n  };\n  var genTypeAttr = [\n    genTypeAttr_0,\n    genTypeAttr_1\n  ];\n  var attrs$1 = {\n    hd: genTypeAttr,\n    tl: attrs\n  };\n  var match = p.token;\n  if (match === 60) {\n    var ext = parseTypeDefinitionOrExtension(attrs$1, p);\n    if (ext.TAG === /* TypeDef */0) {\n      return Ast_helper.Str.type_(undefined, ext.recFlag, ext.types);\n    } else {\n      return Ast_helper.Str.type_extension(undefined, ext._0);\n    }\n  }\n  var match$1 = parseLetBindings(attrs$1, p);\n  return Ast_helper.Str.value(undefined, match$1[0], match$1[1]);\n}\n\nfunction parseModuleOrModuleTypeImplOrPackExpr(attrs, p) {\n  var startPos = p.startPos;\n  Res_parser.expect(undefined, /* Module */65, p);\n  var match = p.token;\n  if (typeof match !== \"number\") {\n    return parseMaybeRecModuleBinding(attrs, startPos, p);\n  }\n  if (match !== 18) {\n    if (match !== 60) {\n      return parseMaybeRecModuleBinding(attrs, startPos, p);\n    } else {\n      return parseModuleTypeImpl(attrs, startPos, p);\n    }\n  }\n  var expr = parseFirstClassModuleExpr(startPos, p);\n  var a = parsePrimaryExpr(expr, undefined, p);\n  var expr$1 = parseBinaryExpr(undefined, a, p, 1);\n  var expr$2 = parseTernaryExpr(expr$1, p);\n  return Ast_helper.Str.$$eval(undefined, attrs, expr$2);\n}\n\nfunction parseExternalDef(attrs, startPos, p) {\n  Res_parser.leaveBreadcrumb(p, /* External */21);\n  Res_parser.expect(undefined, /* External */59, p);\n  var match = parseLident(p);\n  var name = $$Location.mkloc(match[0], match[1]);\n  Res_parser.expect(/* TypeExpression */20, /* Colon */24, p);\n  var typExpr = parseTypExpr(undefined, undefined, undefined, p);\n  var equalStart = p.startPos;\n  var equalEnd = p.endPos;\n  Res_parser.expect(undefined, /* Equal */14, p);\n  var s = p.token;\n  var prim;\n  var exit = 0;\n  if (typeof s === \"number\" || s.TAG !== /* String */3) {\n    exit = 1;\n  } else {\n    Res_parser.next(undefined, p);\n    prim = {\n      hd: s._0,\n      tl: /* [] */0\n    };\n  }\n  if (exit === 1) {\n    Res_parser.err(equalStart, equalEnd, p, Res_diagnostics.message(\"An external requires the name of the JS value you're referring to, like \\\"\" + (name.txt + \"\\\".\")));\n    prim = /* [] */0;\n  }\n  var loc_loc_end = p.prevEndPos;\n  var loc = {\n    loc_start: startPos,\n    loc_end: loc_loc_end,\n    loc_ghost: false\n  };\n  var vb = Ast_helper.Val.mk(loc, attrs, undefined, prim, name, typExpr);\n  Res_parser.eatBreadcrumb(p);\n  return vb;\n}\n\nfunction parseNewlineOrSemicolonStructure(p) {\n  var token = p.token;\n  if (token === 8) {\n    return Res_parser.next(undefined, p);\n  } else if (Res_grammar.isStructureItemStart(token) && p.prevEndPos.pos_lnum >= p.startPos.pos_lnum) {\n    return Res_parser.err(p.prevEndPos, p.endPos, p, Res_diagnostics.message(\"consecutive statements on a line must be separated by ';' or a newline\"));\n  } else {\n    return ;\n  }\n}\n\nfunction parseTypeDefinitionOrExtension(attrs, p) {\n  var startPos = p.startPos;\n  Res_parser.expect(undefined, /* Typ */60, p);\n  var match = p.token;\n  var recFlag;\n  if (typeof match === \"number\") {\n    if (match === /* Rec */11) {\n      Res_parser.next(undefined, p);\n      recFlag = /* Recursive */1;\n    } else {\n      recFlag = /* Nonrecursive */0;\n    }\n  } else if (match.TAG === /* Lident */4 && match._0 === \"nonrec\") {\n    Res_parser.next(undefined, p);\n    recFlag = /* Nonrecursive */0;\n  } else {\n    recFlag = /* Nonrecursive */0;\n  }\n  var name = parseValuePath(p);\n  var params = parseTypeParams(name, p);\n  var match$1 = p.token;\n  if (match$1 === 39) {\n    return {\n            TAG: /* TypeExt */1,\n            _0: parseTypeExtension(params, attrs, name, p)\n          };\n  }\n  var longident = name.txt;\n  var exit = 0;\n  switch (longident.TAG | 0) {\n    case /* Lident */0 :\n        break;\n    case /* Ldot */1 :\n    case /* Lapply */2 :\n        exit = 1;\n        break;\n    \n  }\n  if (exit === 1) {\n    Res_parser.err(name.loc.loc_start, name.loc.loc_end, p, Res_diagnostics.message(typeDeclarationNameLongident(longident)));\n  }\n  var typeDefs = parseTypeDefinitions(attrs, name, params, startPos, p);\n  return {\n          TAG: /* TypeDef */0,\n          recFlag: recFlag,\n          types: typeDefs\n        };\n}\n\nfunction parseExtension(moduleLanguageOpt, p) {\n  var moduleLanguage = moduleLanguageOpt !== undefined ? moduleLanguageOpt : false;\n  var startPos = p.startPos;\n  if (moduleLanguage) {\n    Res_parser.expect(undefined, /* PercentPercent */78, p);\n  } else {\n    Res_parser.expect(undefined, /* Percent */77, p);\n  }\n  var attrId = parseAttributeId(startPos, p);\n  var payload = parsePayload(p);\n  return [\n          attrId,\n          payload\n        ];\n}\n\nfunction parseIncludeStatement(attrs, p) {\n  var startPos = p.startPos;\n  Res_parser.expect(undefined, /* Include */64, p);\n  var modExpr = parseModuleExpr(p);\n  var loc_loc_end = p.prevEndPos;\n  var loc = {\n    loc_start: startPos,\n    loc_end: loc_loc_end,\n    loc_ghost: false\n  };\n  return Ast_helper.Incl.mk(loc, attrs, undefined, modExpr);\n}\n\nfunction parseJsImport(startPos, attrs, p) {\n  Res_parser.expect(undefined, /* Import */83, p);\n  var match = p.token;\n  var importSpec;\n  var exit = 0;\n  if (typeof match === \"number\") {\n    if (match === /* At */75) {\n      exit = 1;\n    } else {\n      importSpec = {\n        TAG: /* Spec */1,\n        _0: parseJsFfiDeclarations(p)\n      };\n    }\n  } else if (match.TAG === /* Lident */4) {\n    exit = 1;\n  } else {\n    importSpec = {\n      TAG: /* Spec */1,\n      _0: parseJsFfiDeclarations(p)\n    };\n  }\n  if (exit === 1) {\n    var decl = parseJsFfiDeclaration(p);\n    var decl$1;\n    if (decl !== undefined) {\n      decl$1 = decl;\n    } else {\n      throw {\n            RE_EXN_ID: \"Assert_failure\",\n            _1: [\n              \"res_core.res\",\n              6159,\n              14\n            ],\n            Error: new Error()\n          };\n    }\n    importSpec = {\n      TAG: /* Default */0,\n      _0: decl$1\n    };\n  }\n  var scope = parseJsFfiScope(p);\n  var loc_loc_end = p.prevEndPos;\n  var loc = {\n    loc_start: startPos,\n    loc_end: loc_loc_end,\n    loc_ghost: false\n  };\n  return Res_js_ffi.importDescr(attrs, scope, importSpec, loc);\n}\n\nfunction parseRecordPatternField(p) {\n  var label = parseValuePath(p);\n  var match = p.token;\n  var pattern = match === 24 ? (Res_parser.next(undefined, p), parsePattern(undefined, undefined, p)) : Ast_helper.Pat.$$var(label.loc, undefined, $$Location.mkloc(Longident.last(label.txt), label.loc));\n  return [\n          label,\n          pattern\n        ];\n}\n\nfunction parseTypeParam(p) {\n  var match = p.token;\n  var variance;\n  if (typeof match === \"number\") {\n    switch (match) {\n      case /* Minus */34 :\n          Res_parser.next(undefined, p);\n          variance = /* Contravariant */1;\n          break;\n      case /* MinusDot */35 :\n          variance = /* Invariant */2;\n          break;\n      case /* Plus */36 :\n          Res_parser.next(undefined, p);\n          variance = /* Covariant */0;\n          break;\n      default:\n        variance = /* Invariant */2;\n    }\n  } else {\n    variance = /* Invariant */2;\n  }\n  var token = p.token;\n  if (typeof token === \"number\") {\n    switch (token) {\n      case /* Underscore */12 :\n          var loc_loc_start = p.startPos;\n          var loc_loc_end = p.endPos;\n          var loc = {\n            loc_start: loc_loc_start,\n            loc_end: loc_loc_end,\n            loc_ghost: false\n          };\n          Res_parser.next(undefined, p);\n          return [\n                  Ast_helper.Typ.any(loc, undefined, undefined),\n                  variance\n                ];\n      case /* SingleQuote */13 :\n          Res_parser.next(undefined, p);\n          var match$1 = parseIdent(typeParam, p.startPos, p);\n          return [\n                  Ast_helper.Typ.$$var(match$1[1], undefined, match$1[0]),\n                  variance\n                ];\n      default:\n        return ;\n    }\n  } else {\n    switch (token.TAG | 0) {\n      case /* Lident */4 :\n      case /* Uident */5 :\n          break;\n      default:\n        return ;\n    }\n  }\n  Res_parser.err(undefined, undefined, p, Res_diagnostics.message(\"Type params start with a singlequote: '\" + Res_token.toString(token)));\n  var match$2 = parseIdent(typeParam, p.startPos, p);\n  return [\n          Ast_helper.Typ.$$var(match$2[1], undefined, match$2[0]),\n          variance\n        ];\n}\n\nfunction parseTypeConstructorArgs(constrName, p) {\n  var opening = p.token;\n  var openingStartPos = p.startPos;\n  if (typeof opening !== \"number\") {\n    return /* [] */0;\n  }\n  if (opening !== 18 && opening !== 42) {\n    return /* [] */0;\n  }\n  Res_scanner.setDiamondMode(p.scanner);\n  Res_parser.next(undefined, p);\n  var typeArgs = parseCommaDelimitedRegion(p, /* TypExprList */39, /* GreaterThan */41, parseTypeConstructorArgRegion);\n  var match = p.token;\n  if (match === 19 && opening === /* Lparen */18) {\n    var typ = Ast_helper.Typ.constr(undefined, undefined, constrName, typeArgs);\n    var msg = Res_doc.toString(80, Res_doc.breakableGroup(true, Res_doc.concat({\n                  hd: Res_doc.text(\"Type parameters require angle brackets:\"),\n                  tl: {\n                    hd: Res_doc.indent(Res_doc.concat({\n                              hd: Res_doc.line,\n                              tl: {\n                                hd: Res_printer.printTypExpr(typ, Res_comments_table.empty),\n                                tl: /* [] */0\n                              }\n                            })),\n                    tl: /* [] */0\n                  }\n                })));\n    Res_parser.err(openingStartPos, undefined, p, Res_diagnostics.message(msg));\n    Res_parser.next(undefined, p);\n  } else {\n    Res_parser.expect(undefined, /* GreaterThan */41, p);\n  }\n  Res_scanner.popMode(p.scanner, /* Diamond */1);\n  return typeArgs;\n}\n\nfunction parseTypeRepresentation(p) {\n  Res_parser.leaveBreadcrumb(p, /* TypeRepresentation */33);\n  var privateFlag = Res_parser.optional(p, /* Private */61) ? /* Private */0 : /* Public */1;\n  var token = p.token;\n  var kind;\n  var exit = 0;\n  if (typeof token === \"number\") {\n    switch (token) {\n      case /* DotDot */5 :\n          Res_parser.next(undefined, p);\n          kind = /* Ptype_open */1;\n          break;\n      case /* Bar */17 :\n          kind = {\n            TAG: /* Ptype_variant */0,\n            _0: parseTypeConstructorDeclarations(undefined, p)\n          };\n          break;\n      case /* Lbrace */22 :\n          kind = {\n            TAG: /* Ptype_record */1,\n            _0: parseRecordDeclaration(p)\n          };\n          break;\n      default:\n        exit = 1;\n    }\n  } else if (token.TAG === /* Uident */5) {\n    kind = {\n      TAG: /* Ptype_variant */0,\n      _0: parseTypeConstructorDeclarations(undefined, p)\n    };\n  } else {\n    exit = 1;\n  }\n  if (exit === 1) {\n    Res_parser.err(undefined, undefined, p, Res_diagnostics.unexpected(token, p.breadcrumbs));\n    kind = {\n      TAG: /* Ptype_variant */0,\n      _0: /* [] */0\n    };\n  }\n  Res_parser.eatBreadcrumb(p);\n  return [\n          privateFlag,\n          kind\n        ];\n}\n\nfunction parseTypeConstructorDeclarations(first, p) {\n  var firstConstrDecl;\n  if (first !== undefined) {\n    firstConstrDecl = first;\n  } else {\n    var startPos = p.startPos;\n    Res_parser.optional(p, /* Bar */17);\n    firstConstrDecl = parseTypeConstructorDeclaration(startPos, p);\n  }\n  return {\n          hd: firstConstrDecl,\n          tl: parseRegion(p, /* ConstructorDeclaration */35, parseTypeConstructorDeclarationWithBar)\n        };\n}\n\nfunction parseParameterList(p) {\n  var parameters = parseCommaDelimitedRegion(p, /* ParameterList */36, /* Rparen */19, parseParameter);\n  Res_parser.expect(undefined, /* Rparen */19, p);\n  return parameters;\n}\n\nfunction parseTypeConstraint(p) {\n  var startPos = p.startPos;\n  var match = p.token;\n  if (match !== 63) {\n    return ;\n  }\n  Res_parser.next(undefined, p);\n  Res_parser.expect(undefined, /* SingleQuote */13, p);\n  var ident = p.token;\n  if (typeof ident !== \"number\" && ident.TAG === /* Lident */4) {\n    var identLoc_loc_end = p.endPos;\n    var identLoc = {\n      loc_start: startPos,\n      loc_end: identLoc_loc_end,\n      loc_ghost: false\n    };\n    Res_parser.next(undefined, p);\n    Res_parser.expect(undefined, /* Equal */14, p);\n    var typ = parseTypExpr(undefined, undefined, undefined, p);\n    var loc_loc_end = p.prevEndPos;\n    var loc = {\n      loc_start: startPos,\n      loc_end: loc_loc_end,\n      loc_ghost: false\n    };\n    return [\n            Ast_helper.Typ.$$var(identLoc, undefined, ident._0),\n            typ,\n            loc\n          ];\n  }\n  Res_parser.err(undefined, undefined, p, Res_diagnostics.lident(ident));\n  var loc_loc_end$1 = p.prevEndPos;\n  var loc$1 = {\n    loc_start: startPos,\n    loc_end: loc_loc_end$1,\n    loc_ghost: false\n  };\n  return [\n          Ast_helper.Typ.any(undefined, undefined, undefined),\n          parseTypExpr(undefined, undefined, undefined, p),\n          loc$1\n        ];\n}\n\nfunction parseAttributesAndBinding(p) {\n  var err = p.scanner.err;\n  var ch = p.scanner.ch;\n  var offset = p.scanner.offset;\n  var lineOffset = p.scanner.lineOffset;\n  var lnum = p.scanner.lnum;\n  var mode = p.scanner.mode;\n  var token = p.token;\n  var startPos = p.startPos;\n  var endPos = p.endPos;\n  var prevEndPos = p.prevEndPos;\n  var breadcrumbs = p.breadcrumbs;\n  var errors = p.errors;\n  var diagnostics = p.diagnostics;\n  var comments = p.comments;\n  var match = p.token;\n  if (match !== 75) {\n    return /* [] */0;\n  }\n  var attrs = parseRegion(p, /* Attribute */50, parseAttribute);\n  var match$1 = p.token;\n  if (match$1 === 10) {\n    return attrs;\n  } else {\n    p.scanner.err = err;\n    p.scanner.ch = ch;\n    p.scanner.offset = offset;\n    p.scanner.lineOffset = lineOffset;\n    p.scanner.lnum = lnum;\n    p.scanner.mode = mode;\n    p.token = token;\n    p.startPos = startPos;\n    p.endPos = endPos;\n    p.prevEndPos = prevEndPos;\n    p.breadcrumbs = breadcrumbs;\n    p.errors = errors;\n    p.diagnostics = diagnostics;\n    p.comments = comments;\n    return /* [] */0;\n  }\n}\n\nfunction parseTypeDef(attrs, startPos, p) {\n  Res_parser.leaveBreadcrumb(p, /* TypeDef */28);\n  Res_parser.leaveBreadcrumb(p, /* TypeConstrName */29);\n  var match = parseLident(p);\n  var loc = match[1];\n  var name = match[0];\n  var typeConstrName = $$Location.mkloc(name, loc);\n  Res_parser.eatBreadcrumb(p);\n  var constrName = $$Location.mkloc({\n        TAG: /* Lident */0,\n        _0: name\n      }, loc);\n  var params = parseTypeParams(constrName, p);\n  var match$1 = parseTypeEquationAndRepresentation(p);\n  var cstrs = parseRegion(p, /* TypeConstraint */51, parseTypeConstraint);\n  var loc_loc_end = p.prevEndPos;\n  var loc$1 = {\n    loc_start: startPos,\n    loc_end: loc_loc_end,\n    loc_ghost: false\n  };\n  var typeDef = Ast_helper.Type.mk(loc$1, attrs, undefined, undefined, params, cstrs, match$1[2], match$1[1], match$1[0], typeConstrName);\n  Res_parser.eatBreadcrumb(p);\n  return typeDef;\n}\n\nfunction parseTypeEquationAndRepresentation(p) {\n  var token = p.token;\n  var exit = 0;\n  if (typeof token !== \"number\") {\n    return [\n            undefined,\n            /* Public */1,\n            /* Ptype_abstract */0\n          ];\n  }\n  if (token !== 14) {\n    if (token !== 17) {\n      return [\n              undefined,\n              /* Public */1,\n              /* Ptype_abstract */0\n            ];\n    }\n    exit = 1;\n  } else {\n    exit = 1;\n  }\n  if (exit === 1) {\n    if (token === /* Bar */17) {\n      Res_parser.expect(undefined, /* Equal */14, p);\n    }\n    Res_parser.next(undefined, p);\n    var match = p.token;\n    var exit$1 = 0;\n    if (typeof match === \"number\") {\n      switch (match) {\n        case /* DotDot */5 :\n        case /* Bar */17 :\n            exit$1 = 3;\n            break;\n        case /* Lbrace */22 :\n            return parseRecordOrObjectDecl(p);\n        case /* Private */61 :\n            return parsePrivateEqOrRepr(p);\n        default:\n          exit$1 = 2;\n      }\n    } else {\n      if (match.TAG === /* Uident */5) {\n        return parseTypeEquationOrConstrDecl(p);\n      }\n      exit$1 = 2;\n    }\n    switch (exit$1) {\n      case 2 :\n          var manifest = parseTypExpr(undefined, undefined, undefined, p);\n          var match$1 = p.token;\n          if (match$1 !== 14) {\n            return [\n                    manifest,\n                    /* Public */1,\n                    /* Ptype_abstract */0\n                  ];\n          }\n          Res_parser.next(undefined, p);\n          var match$2 = parseTypeRepresentation(p);\n          return [\n                  manifest,\n                  match$2[0],\n                  match$2[1]\n                ];\n      case 3 :\n          var match$3 = parseTypeRepresentation(p);\n          return [\n                  undefined,\n                  match$3[0],\n                  match$3[1]\n                ];\n      \n    }\n  }\n  \n}\n\nfunction parseIfLetExpr(startPos, p) {\n  var pattern = parsePattern(undefined, undefined, p);\n  Res_parser.expect(undefined, /* Equal */14, p);\n  var conditionExpr = parseIfCondition(p);\n  var thenExpr = parseThenBranch(p);\n  var match = p.token;\n  var elseExpr;\n  Res_parser.endRegion(p);\n  if (match === 51) {\n    Res_parser.leaveBreadcrumb(p, /* ElseBranch */19);\n    Res_parser.next(undefined, p);\n    Res_parser.beginRegion(p);\n    var match$1 = p.token;\n    var elseExpr$1 = match$1 === 50 ? parseIfOrIfLetExpression(p) : parseElseBranch(p);\n    Res_parser.eatBreadcrumb(p);\n    Res_parser.endRegion(p);\n    elseExpr = elseExpr$1;\n  } else {\n    var startPos$1 = p.startPos;\n    var loc_loc_end = p.prevEndPos;\n    var loc = {\n      loc_start: startPos$1,\n      loc_end: loc_loc_end,\n      loc_ghost: false\n    };\n    elseExpr = Ast_helper.Exp.construct(loc, undefined, $$Location.mkloc({\n              TAG: /* Lident */0,\n              _0: \"()\"\n            }, loc), undefined);\n  }\n  var loc_loc_end$1 = p.prevEndPos;\n  var loc$1 = {\n    loc_start: startPos,\n    loc_end: loc_loc_end$1,\n    loc_ghost: false\n  };\n  return Ast_helper.Exp.match_(loc$1, {\n              hd: ifLetAttr,\n              tl: {\n                hd: suppressFragileMatchWarningAttr,\n                tl: /* [] */0\n              }\n            }, conditionExpr, {\n              hd: Ast_helper.Exp.$$case(pattern, undefined, thenExpr),\n              tl: {\n                hd: Ast_helper.Exp.$$case(Ast_helper.Pat.any(undefined, undefined, undefined), undefined, elseExpr),\n                tl: /* [] */0\n              }\n            });\n}\n\nfunction parseIfExpr(startPos, p) {\n  var conditionExpr = parseIfCondition(p);\n  var thenExpr = parseThenBranch(p);\n  var match = p.token;\n  var elseExpr;\n  Res_parser.endRegion(p);\n  if (match === 51) {\n    Res_parser.leaveBreadcrumb(p, /* ElseBranch */19);\n    Res_parser.next(undefined, p);\n    Res_parser.beginRegion(p);\n    var match$1 = p.token;\n    var elseExpr$1 = match$1 === 50 ? parseIfOrIfLetExpression(p) : parseElseBranch(p);\n    Res_parser.eatBreadcrumb(p);\n    Res_parser.endRegion(p);\n    elseExpr = elseExpr$1;\n  } else {\n    elseExpr = undefined;\n  }\n  var loc_loc_end = p.prevEndPos;\n  var loc = {\n    loc_start: startPos,\n    loc_end: loc_loc_end,\n    loc_ghost: false\n  };\n  return Ast_helper.Exp.ifthenelse(loc, undefined, conditionExpr, thenExpr, elseExpr);\n}\n\nfunction parseRecordOrObjectDecl(p) {\n  var startPos = p.startPos;\n  Res_parser.expect(undefined, /* Lbrace */22, p);\n  var match = p.token;\n  var exit = 0;\n  if (typeof match === \"number\") {\n    if (match >= 6) {\n      if (match >= 7) {\n        exit = 1;\n      } else {\n        var dotdotdotStart = p.startPos;\n        var dotdotdotEnd = p.endPos;\n        Res_parser.next(undefined, p);\n        var typ = parseTypExpr(undefined, undefined, undefined, p);\n        var match$1 = p.token;\n        if (match$1 === 23) {\n          Res_parser.err(dotdotdotStart, dotdotdotEnd, p, Res_diagnostics.message(sameTypeSpread));\n          Res_parser.next(undefined, p);\n        } else {\n          Res_parser.expect(undefined, /* Comma */25, p);\n        }\n        var match$2 = p.token;\n        if (typeof match$2 !== \"number\" && match$2.TAG === /* Lident */4) {\n          Res_parser.err(dotdotdotStart, dotdotdotEnd, p, Res_diagnostics.message(spreadInRecordDeclaration));\n        }\n        var fields_0 = {\n          TAG: /* Oinherit */1,\n          _0: typ\n        };\n        var fields_1 = parseCommaDelimitedRegion(p, /* StringFieldDeclarations */37, /* Rbrace */23, parseStringFieldDeclaration);\n        var fields = {\n          hd: fields_0,\n          tl: fields_1\n        };\n        Res_parser.expect(undefined, /* Rbrace */23, p);\n        var loc_loc_end = p.prevEndPos;\n        var loc = {\n          loc_start: startPos,\n          loc_end: loc_loc_end,\n          loc_ghost: false\n        };\n        var typ$1 = parseTypeAlias(p, Ast_helper.Typ.object_(loc, undefined, fields, /* Closed */0));\n        var typ$2 = parseArrowTypeRest(true, startPos, typ$1, p);\n        return [\n                typ$2,\n                /* Public */1,\n                /* Ptype_abstract */0\n              ];\n      }\n    } else {\n      if (match >= 4) {\n        var match$3 = p.token;\n        var closedFlag = typeof match$3 === \"number\" ? (\n            match$3 !== 4 ? (\n                match$3 !== 5 ? /* Closed */0 : (Res_parser.next(undefined, p), /* Open */1)\n              ) : (Res_parser.next(undefined, p), /* Closed */0)\n          ) : /* Closed */0;\n        var fields$1 = parseCommaDelimitedRegion(p, /* StringFieldDeclarations */37, /* Rbrace */23, parseStringFieldDeclaration);\n        Res_parser.expect(undefined, /* Rbrace */23, p);\n        var loc_loc_end$1 = p.prevEndPos;\n        var loc$1 = {\n          loc_start: startPos,\n          loc_end: loc_loc_end$1,\n          loc_ghost: false\n        };\n        var typ$3 = parseTypeAlias(p, Ast_helper.Typ.object_(loc$1, /* [] */0, fields$1, closedFlag));\n        var typ$4 = parseArrowTypeRest(true, startPos, typ$3, p);\n        return [\n                typ$4,\n                /* Public */1,\n                /* Ptype_abstract */0\n              ];\n      }\n      exit = 1;\n    }\n  } else {\n    exit = 1;\n  }\n  if (exit === 1) {\n    var attrs = parseRegion(p, /* Attribute */50, parseAttribute);\n    var match$4 = p.token;\n    var exit$1 = 0;\n    if (typeof match$4 === \"number\") {\n      exit$1 = 2;\n    } else {\n      if (match$4.TAG === /* String */3) {\n        var fields$2;\n        if (attrs) {\n          Res_parser.leaveBreadcrumb(p, /* StringFieldDeclarations */37);\n          var field = parseStringFieldDeclaration(p);\n          var field$1;\n          if (field !== undefined) {\n            field$1 = field;\n          } else {\n            throw {\n                  RE_EXN_ID: \"Assert_failure\",\n                  _1: [\n                    \"res_core.res\",\n                    5486,\n                    20\n                  ],\n                  Error: new Error()\n                };\n          }\n          var match$5 = p.token;\n          if (typeof match$5 === \"number\") {\n            if (match$5 >= 24) {\n              if (match$5 >= 27) {\n                Res_parser.expect(undefined, /* Comma */25, p);\n              } else {\n                switch (match$5) {\n                  case /* Colon */24 :\n                      Res_parser.expect(undefined, /* Comma */25, p);\n                      break;\n                  case /* Comma */25 :\n                      Res_parser.next(undefined, p);\n                      break;\n                  case /* Eof */26 :\n                      break;\n                  \n                }\n              }\n            } else if (match$5 >= 23) {\n              \n            } else {\n              Res_parser.expect(undefined, /* Comma */25, p);\n            }\n          } else {\n            Res_parser.expect(undefined, /* Comma */25, p);\n          }\n          Res_parser.eatBreadcrumb(p);\n          var first;\n          first = field$1.TAG === /* Otag */0 ? ({\n                TAG: /* Otag */0,\n                _0: field$1._0,\n                _1: attrs,\n                _2: field$1._2\n              }) : ({\n                TAG: /* Oinherit */1,\n                _0: field$1._0\n              });\n          fields$2 = {\n            hd: first,\n            tl: parseCommaDelimitedRegion(p, /* StringFieldDeclarations */37, /* Rbrace */23, parseStringFieldDeclaration)\n          };\n        } else {\n          fields$2 = parseCommaDelimitedRegion(p, /* StringFieldDeclarations */37, /* Rbrace */23, parseStringFieldDeclaration);\n        }\n        Res_parser.expect(undefined, /* Rbrace */23, p);\n        var loc_loc_end$2 = p.prevEndPos;\n        var loc$2 = {\n          loc_start: startPos,\n          loc_end: loc_loc_end$2,\n          loc_ghost: false\n        };\n        var typ$5 = parseTypeAlias(p, Ast_helper.Typ.object_(loc$2, /* [] */0, fields$2, /* Closed */0));\n        var typ$6 = parseArrowTypeRest(true, startPos, typ$5, p);\n        return [\n                typ$6,\n                /* Public */1,\n                /* Ptype_abstract */0\n              ];\n      }\n      exit$1 = 2;\n    }\n    if (exit$1 === 2) {\n      Res_parser.leaveBreadcrumb(p, /* RecordDecl */34);\n      var fields$3;\n      if (attrs) {\n        var field$2 = parseFieldDeclaration(p);\n        Res_parser.optional(p, /* Comma */25);\n        var init = field$2.pld_loc;\n        var first_pld_name = field$2.pld_name;\n        var first_pld_mutable = field$2.pld_mutable;\n        var first_pld_type = field$2.pld_type;\n        var first_pld_loc = {\n          loc_start: attrs.hd[0].loc.loc_start,\n          loc_end: init.loc_end,\n          loc_ghost: init.loc_ghost\n        };\n        var first$1 = {\n          pld_name: first_pld_name,\n          pld_mutable: first_pld_mutable,\n          pld_type: first_pld_type,\n          pld_loc: first_pld_loc,\n          pld_attributes: attrs\n        };\n        fields$3 = {\n          hd: first$1,\n          tl: parseCommaDelimitedRegion(p, /* FieldDeclarations */38, /* Rbrace */23, parseFieldDeclarationRegion)\n        };\n      } else {\n        fields$3 = parseCommaDelimitedRegion(p, /* FieldDeclarations */38, /* Rbrace */23, parseFieldDeclarationRegion);\n      }\n      if (fields$3) {\n        \n      } else {\n        Res_parser.err(startPos, undefined, p, Res_diagnostics.message(\"A record needs at least one field\"));\n      }\n      Res_parser.expect(undefined, /* Rbrace */23, p);\n      Res_parser.eatBreadcrumb(p);\n      return [\n              undefined,\n              /* Public */1,\n              {\n                TAG: /* Ptype_record */1,\n                _0: fields$3\n              }\n            ];\n    }\n    \n  }\n  \n}\n\nfunction parseTypeEquationOrConstrDecl(p) {\n  var uidentStartPos = p.startPos;\n  var uident = p.token;\n  if (typeof uident !== \"number\" && uident.TAG === /* Uident */5) {\n    var uident$1 = uident._0;\n    Res_parser.next(undefined, p);\n    var match = p.token;\n    if (match === 4) {\n      Res_parser.next(undefined, p);\n      var typeConstr = parseValuePathTail(p, uidentStartPos, {\n            TAG: /* Lident */0,\n            _0: uident$1\n          });\n      var loc_loc_end = p.prevEndPos;\n      var loc = {\n        loc_start: uidentStartPos,\n        loc_end: loc_loc_end,\n        loc_ghost: false\n      };\n      var typ = parseTypeAlias(p, Ast_helper.Typ.constr(loc, undefined, typeConstr, parseTypeConstructorArgs(typeConstr, p)));\n      var match$1 = p.token;\n      if (typeof match$1 !== \"number\") {\n        return [\n                typ,\n                /* Public */1,\n                /* Ptype_abstract */0\n              ];\n      }\n      if (match$1 !== 14) {\n        if (match$1 !== 57) {\n          return [\n                  typ,\n                  /* Public */1,\n                  /* Ptype_abstract */0\n                ];\n        }\n        Res_parser.next(undefined, p);\n        var returnType = parseTypExpr(undefined, undefined, false, p);\n        var loc_loc_end$1 = p.prevEndPos;\n        var loc$1 = {\n          loc_start: uidentStartPos,\n          loc_end: loc_loc_end$1,\n          loc_ghost: false\n        };\n        var arrowType = Ast_helper.Typ.arrow(loc$1, undefined, /* Nolabel */0, typ, returnType);\n        var typ$1 = parseTypeAlias(p, arrowType);\n        return [\n                typ$1,\n                /* Public */1,\n                /* Ptype_abstract */0\n              ];\n      }\n      Res_parser.next(undefined, p);\n      var match$2 = parseTypeRepresentation(p);\n      return [\n              typ,\n              match$2[0],\n              match$2[1]\n            ];\n    }\n    var uidentEndPos = p.prevEndPos;\n    var match$3 = parseConstrDeclArgs(p);\n    var uidentLoc = {\n      loc_start: uidentStartPos,\n      loc_end: uidentEndPos,\n      loc_ghost: false\n    };\n    var first = Ast_helper.Type.constructor({\n          loc_start: uidentStartPos,\n          loc_end: p.prevEndPos,\n          loc_ghost: false\n        }, undefined, undefined, match$3[0], match$3[1], $$Location.mkloc(uident$1, uidentLoc));\n    return [\n            undefined,\n            /* Public */1,\n            {\n              TAG: /* Ptype_variant */0,\n              _0: parseTypeConstructorDeclarations(first, p)\n            }\n          ];\n  }\n  Res_parser.err(undefined, undefined, p, Res_diagnostics.uident(uident));\n  return [\n          undefined,\n          /* Public */1,\n          /* Ptype_abstract */0\n        ];\n}\n\nfunction parseJsFfiDeclaration(p) {\n  var startPos = p.startPos;\n  var attrs = parseRegion(p, /* Attribute */50, parseAttribute);\n  var match = p.token;\n  if (typeof match === \"number\") {\n    return ;\n  }\n  if (match.TAG !== /* Lident */4) {\n    return ;\n  }\n  var match$1 = parseLident(p);\n  var ident = match$1[0];\n  var match$2 = p.token;\n  var alias = match$2 === 3 ? (Res_parser.next(undefined, p), parseLident(p)[0]) : ident;\n  Res_parser.expect(undefined, /* Colon */24, p);\n  var typ = parseTypExpr(undefined, undefined, undefined, p);\n  var loc_loc_end = p.prevEndPos;\n  var loc = {\n    loc_start: startPos,\n    loc_end: loc_loc_end,\n    loc_ghost: false\n  };\n  return Res_js_ffi.decl(attrs, loc, ident, alias, typ);\n}\n\nfunction parseWithConstraint(p) {\n  var token = p.token;\n  if (typeof token === \"number\") {\n    if (token !== 60) {\n      if (token === 65) {\n        Res_parser.next(undefined, p);\n        var modulePath = parseModuleLongIdent(false, p);\n        var token$1 = p.token;\n        var exit = 0;\n        if (typeof token$1 === \"number\") {\n          if (token$1 !== 14) {\n            if (token$1 !== 74) {\n              exit = 2;\n            } else {\n              Res_parser.next(undefined, p);\n              var lident = parseModuleLongIdent(false, p);\n              return {\n                      TAG: /* Pwith_modsubst */3,\n                      _0: modulePath,\n                      _1: lident\n                    };\n            }\n          } else {\n            Res_parser.next(undefined, p);\n            var lident$1 = parseModuleLongIdent(false, p);\n            return {\n                    TAG: /* Pwith_module */1,\n                    _0: modulePath,\n                    _1: lident$1\n                  };\n          }\n        } else {\n          exit = 2;\n        }\n        if (exit === 2) {\n          Res_parser.err(undefined, undefined, p, Res_diagnostics.unexpected(token$1, p.breadcrumbs));\n          var lident$2 = parseModuleLongIdent(false, p);\n          return {\n                  TAG: /* Pwith_modsubst */3,\n                  _0: modulePath,\n                  _1: lident$2\n                };\n        }\n        \n      }\n      \n    } else {\n      Res_parser.next(undefined, p);\n      var typeConstr = parseValuePath(p);\n      var params = parseTypeParams(typeConstr, p);\n      var token$2 = p.token;\n      var exit$1 = 0;\n      if (typeof token$2 === \"number\") {\n        if (token$2 !== 14) {\n          if (token$2 !== 74) {\n            exit$1 = 2;\n          } else {\n            Res_parser.next(undefined, p);\n            var typExpr = parseTypExpr(undefined, undefined, undefined, p);\n            return {\n                    TAG: /* Pwith_typesubst */2,\n                    _0: typeConstr,\n                    _1: Ast_helper.Type.mk(typeConstr.loc, undefined, undefined, undefined, params, undefined, undefined, undefined, typExpr, $$Location.mkloc(Longident.last(typeConstr.txt), typeConstr.loc))\n                  };\n          }\n        } else {\n          Res_parser.next(undefined, p);\n          var typExpr$1 = parseTypExpr(undefined, undefined, undefined, p);\n          var typeConstraints = parseRegion(p, /* TypeConstraint */51, parseTypeConstraint);\n          return {\n                  TAG: /* Pwith_type */0,\n                  _0: typeConstr,\n                  _1: Ast_helper.Type.mk(typeConstr.loc, undefined, undefined, undefined, params, typeConstraints, undefined, undefined, typExpr$1, $$Location.mkloc(Longident.last(typeConstr.txt), typeConstr.loc))\n                };\n        }\n      } else {\n        exit$1 = 2;\n      }\n      if (exit$1 === 2) {\n        Res_parser.err(undefined, undefined, p, Res_diagnostics.unexpected(token$2, p.breadcrumbs));\n        var typExpr$2 = parseTypExpr(undefined, undefined, undefined, p);\n        var typeConstraints$1 = parseRegion(p, /* TypeConstraint */51, parseTypeConstraint);\n        return {\n                TAG: /* Pwith_type */0,\n                _0: typeConstr,\n                _1: Ast_helper.Type.mk(typeConstr.loc, undefined, undefined, undefined, params, typeConstraints$1, undefined, undefined, typExpr$2, $$Location.mkloc(Longident.last(typeConstr.txt), typeConstr.loc))\n              };\n      }\n      \n    }\n  }\n  Res_parser.err(undefined, undefined, p, Res_diagnostics.unexpected(token, p.breadcrumbs));\n  return {\n          TAG: /* Pwith_type */0,\n          _0: $$Location.mknoloc({\n                TAG: /* Lident */0,\n                _0: \"\"\n              }),\n          _1: Ast_helper.Type.mk(undefined, undefined, undefined, undefined, /* [] */0, /* [] */0, undefined, undefined, defaultType(undefined), $$Location.mknoloc(\"\"))\n        };\n}\n\nfunction parseTypeParameter(p) {\n  if (!(p.token === /* Tilde */48 || p.token === /* Dot */4 || Res_grammar.isTypExprStart(p.token))) {\n    return ;\n  }\n  var startPos = p.startPos;\n  var uncurried = Res_parser.optional(p, /* Dot */4);\n  var attrs = parseRegion(p, /* Attribute */50, parseAttribute);\n  var match = p.token;\n  if (typeof match === \"number\") {\n    if (match === /* Tilde */48) {\n      Res_parser.next(undefined, p);\n      var match$1 = parseLident(p);\n      var name = match$1[0];\n      var lblLocAttr_0 = $$Location.mkloc(\"ns.namedArgLoc\", match$1[1]);\n      var lblLocAttr_1 = {\n        TAG: /* PStr */0,\n        _0: /* [] */0\n      };\n      var lblLocAttr = [\n        lblLocAttr_0,\n        lblLocAttr_1\n      ];\n      Res_parser.expect(/* TypeExpression */20, /* Colon */24, p);\n      var typ = parseTypExpr(undefined, undefined, undefined, p);\n      var typ_ptyp_desc = typ.ptyp_desc;\n      var typ_ptyp_loc = typ.ptyp_loc;\n      var typ_ptyp_attributes = {\n        hd: lblLocAttr,\n        tl: typ.ptyp_attributes\n      };\n      var typ$1 = {\n        ptyp_desc: typ_ptyp_desc,\n        ptyp_loc: typ_ptyp_loc,\n        ptyp_attributes: typ_ptyp_attributes\n      };\n      var match$2 = p.token;\n      if (match$2 === 14) {\n        Res_parser.next(undefined, p);\n        Res_parser.expect(undefined, /* Question */49, p);\n        return [\n                uncurried,\n                attrs,\n                {\n                  TAG: /* Optional */1,\n                  _0: name\n                },\n                typ$1,\n                startPos\n              ];\n      } else {\n        return [\n                uncurried,\n                attrs,\n                {\n                  TAG: /* Labelled */0,\n                  _0: name\n                },\n                typ$1,\n                startPos\n              ];\n      }\n    }\n    \n  } else if (match.TAG === /* Lident */4) {\n    var match$3 = parseLident(p);\n    var loc = match$3[1];\n    var name$1 = match$3[0];\n    var match$4 = p.token;\n    if (match$4 === 24) {\n      var error = Res_diagnostics.message(missingTildeLabeledParameter(name$1));\n      Res_parser.err(loc.loc_start, loc.loc_end, p, error);\n      Res_parser.next(undefined, p);\n      var typ$2 = parseTypExpr(undefined, undefined, undefined, p);\n      var match$5 = p.token;\n      if (match$5 === 14) {\n        Res_parser.next(undefined, p);\n        Res_parser.expect(undefined, /* Question */49, p);\n        return [\n                uncurried,\n                attrs,\n                {\n                  TAG: /* Optional */1,\n                  _0: name$1\n                },\n                typ$2,\n                startPos\n              ];\n      } else {\n        return [\n                uncurried,\n                attrs,\n                {\n                  TAG: /* Labelled */0,\n                  _0: name$1\n                },\n                typ$2,\n                startPos\n              ];\n      }\n    }\n    var constr = $$Location.mkloc({\n          TAG: /* Lident */0,\n          _0: name$1\n        }, loc);\n    var args = parseTypeConstructorArgs(constr, p);\n    var typ$3 = Ast_helper.Typ.constr({\n          loc_start: startPos,\n          loc_end: p.prevEndPos,\n          loc_ghost: false\n        }, attrs, constr, args);\n    var typ$4 = parseArrowTypeRest(true, startPos, typ$3, p);\n    var typ$5 = parseTypeAlias(p, typ$4);\n    return [\n            uncurried,\n            /* [] */0,\n            /* Nolabel */0,\n            typ$5,\n            startPos\n          ];\n  }\n  var typ$6 = parseTypExpr(undefined, undefined, undefined, p);\n  var typWithAttributes_ptyp_desc = typ$6.ptyp_desc;\n  var typWithAttributes_ptyp_loc = typ$6.ptyp_loc;\n  var typWithAttributes_ptyp_attributes = List.concat({\n        hd: attrs,\n        tl: {\n          hd: typ$6.ptyp_attributes,\n          tl: /* [] */0\n        }\n      });\n  var typWithAttributes = {\n    ptyp_desc: typWithAttributes_ptyp_desc,\n    ptyp_loc: typWithAttributes_ptyp_loc,\n    ptyp_attributes: typWithAttributes_ptyp_attributes\n  };\n  return [\n          uncurried,\n          /* [] */0,\n          /* Nolabel */0,\n          typWithAttributes,\n          startPos\n        ];\n}\n\nfunction parseRecordDeclaration(p) {\n  Res_parser.leaveBreadcrumb(p, /* RecordDecl */34);\n  Res_parser.expect(undefined, /* Lbrace */22, p);\n  var rows = parseCommaDelimitedRegion(p, /* RecordDecl */34, /* Rbrace */23, parseFieldDeclarationRegion);\n  Res_parser.expect(undefined, /* Rbrace */23, p);\n  Res_parser.eatBreadcrumb(p);\n  return rows;\n}\n\nfunction parseStructureItemRegion(p) {\n  var startPos = p.startPos;\n  var attrs = parseRegion(p, /* Attribute */50, parseAttribute);\n  var token = p.token;\n  if (typeof token === \"number\") {\n    if (token >= 10) {\n      if (token !== 27) {\n        if (token >= 59) {\n          switch (token) {\n            case /* External */59 :\n                var externalDef = parseExternalDef(attrs, startPos, p);\n                parseNewlineOrSemicolonStructure(p);\n                var loc_loc_end = p.prevEndPos;\n                var loc = {\n                  loc_start: startPos,\n                  loc_end: loc_loc_end,\n                  loc_ghost: false\n                };\n                return Ast_helper.Str.primitive(loc, externalDef);\n            case /* Typ */60 :\n                Res_parser.beginRegion(p);\n                var ext = parseTypeDefinitionOrExtension(attrs, p);\n                if (ext.TAG === /* TypeDef */0) {\n                  parseNewlineOrSemicolonStructure(p);\n                  var loc_loc_end$1 = p.prevEndPos;\n                  var loc$1 = {\n                    loc_start: startPos,\n                    loc_end: loc_loc_end$1,\n                    loc_ghost: false\n                  };\n                  Res_parser.endRegion(p);\n                  return Ast_helper.Str.type_(loc$1, ext.recFlag, ext.types);\n                }\n                parseNewlineOrSemicolonStructure(p);\n                var loc_loc_end$2 = p.prevEndPos;\n                var loc$2 = {\n                  loc_start: startPos,\n                  loc_end: loc_loc_end$2,\n                  loc_ghost: false\n                };\n                Res_parser.endRegion(p);\n                return Ast_helper.Str.type_extension(loc$2, ext._0);\n            case /* Include */64 :\n                var includeStatement = parseIncludeStatement(attrs, p);\n                parseNewlineOrSemicolonStructure(p);\n                var loc_loc_end$3 = p.prevEndPos;\n                var loc$3 = {\n                  loc_start: startPos,\n                  loc_end: loc_loc_end$3,\n                  loc_ghost: false\n                };\n                return Ast_helper.Str.include_(loc$3, includeStatement);\n            case /* Module */65 :\n                Res_parser.beginRegion(p);\n                var structureItem = parseModuleOrModuleTypeImplOrPackExpr(attrs, p);\n                parseNewlineOrSemicolonStructure(p);\n                var loc_loc_end$4 = p.prevEndPos;\n                var loc$4 = {\n                  loc_start: startPos,\n                  loc_end: loc_loc_end$4,\n                  loc_ghost: false\n                };\n                Res_parser.endRegion(p);\n                return {\n                        pstr_desc: structureItem.pstr_desc,\n                        pstr_loc: loc$4\n                      };\n            case /* AtAt */76 :\n                var attr = parseStandaloneAttribute(p);\n                parseNewlineOrSemicolonStructure(p);\n                var loc_loc_end$5 = p.prevEndPos;\n                var loc$5 = {\n                  loc_start: startPos,\n                  loc_end: loc_loc_end$5,\n                  loc_ghost: false\n                };\n                return Ast_helper.Str.attribute(loc$5, attr);\n            case /* PercentPercent */78 :\n                var extension = parseExtension(true, p);\n                parseNewlineOrSemicolonStructure(p);\n                var loc_loc_end$6 = p.prevEndPos;\n                var loc$6 = {\n                  loc_start: startPos,\n                  loc_end: loc_loc_end$6,\n                  loc_ghost: false\n                };\n                return Ast_helper.Str.extension(loc$6, attrs, extension);\n            case /* Private */61 :\n            case /* Mutable */62 :\n            case /* Constraint */63 :\n            case /* Of */66 :\n            case /* Land */67 :\n            case /* Lor */68 :\n            case /* Band */69 :\n            case /* BangEqual */70 :\n            case /* BangEqualEqual */71 :\n            case /* LessEqual */72 :\n            case /* GreaterEqual */73 :\n            case /* ColonEqual */74 :\n            case /* At */75 :\n            case /* Percent */77 :\n            case /* List */79 :\n            case /* Backtick */80 :\n            case /* BarGreater */81 :\n            case /* Try */82 :\n                break;\n            case /* Import */83 :\n                var importDescr = parseJsImport(startPos, attrs, p);\n                parseNewlineOrSemicolonStructure(p);\n                var loc_loc_end$7 = p.prevEndPos;\n                var loc$7 = {\n                  loc_start: startPos,\n                  loc_end: loc_loc_end$7,\n                  loc_ghost: false\n                };\n                var structureItem$1 = Res_js_ffi.toParsetree(importDescr);\n                return {\n                        pstr_desc: structureItem$1.pstr_desc,\n                        pstr_loc: loc$7\n                      };\n            case /* Export */84 :\n                var structureItem$2 = parseJsExport(attrs, p);\n                parseNewlineOrSemicolonStructure(p);\n                var loc_loc_end$8 = p.prevEndPos;\n                var loc$8 = {\n                  loc_start: startPos,\n                  loc_end: loc_loc_end$8,\n                  loc_ghost: false\n                };\n                return {\n                        pstr_desc: structureItem$2.pstr_desc,\n                        pstr_loc: loc$8\n                      };\n            \n          }\n        }\n        \n      } else {\n        var exceptionDef = parseExceptionDef(attrs, p);\n        parseNewlineOrSemicolonStructure(p);\n        var loc_loc_end$9 = p.prevEndPos;\n        var loc$9 = {\n          loc_start: startPos,\n          loc_end: loc_loc_end$9,\n          loc_ghost: false\n        };\n        return Ast_helper.Str.exception_(loc$9, exceptionDef);\n      }\n    } else if (token !== 0) {\n      if (token >= 9) {\n        var match = parseLetBindings(attrs, p);\n        parseNewlineOrSemicolonStructure(p);\n        var loc_loc_end$10 = p.prevEndPos;\n        var loc$10 = {\n          loc_start: startPos,\n          loc_end: loc_loc_end$10,\n          loc_ghost: false\n        };\n        return Ast_helper.Str.value(loc$10, match[0], match[1]);\n      }\n      \n    } else {\n      var openDescription = parseOpenDescription(attrs, p);\n      parseNewlineOrSemicolonStructure(p);\n      var loc_loc_end$11 = p.prevEndPos;\n      var loc$11 = {\n        loc_start: startPos,\n        loc_end: loc_loc_end$11,\n        loc_ghost: false\n      };\n      return Ast_helper.Str.open_(loc$11, openDescription);\n    }\n  }\n  if (Res_grammar.isExprStart(token)) {\n    var prevEndPos = p.endPos;\n    var exp = parseExpr(undefined, p);\n    parseNewlineOrSemicolonStructure(p);\n    var loc_loc_end$12 = p.prevEndPos;\n    var loc$12 = {\n      loc_start: startPos,\n      loc_end: loc_loc_end$12,\n      loc_ghost: false\n    };\n    return Res_parser.checkProgress(prevEndPos, Ast_helper.Str.$$eval(loc$12, attrs, exp), p);\n  }\n  if (!attrs) {\n    return ;\n  }\n  var attr$1 = attrs.hd;\n  var attrLoc = attr$1[0].loc;\n  Res_parser.err(attrLoc.loc_start, attrLoc.loc_end, p, Res_diagnostics.message(attributeWithoutNode(attr$1)));\n  var expr = parseExpr(undefined, p);\n  return Ast_helper.Str.$$eval({\n              loc_start: p.startPos,\n              loc_end: p.prevEndPos,\n              loc_ghost: false\n            }, attrs, expr);\n}\n\nfunction parseSignatureItemRegion(p) {\n  while(true) {\n    var startPos = p.startPos;\n    var attrs = parseRegion(p, /* Attribute */50, parseAttribute);\n    var match = p.token;\n    if (typeof match === \"number\") {\n      if (match >= 10) {\n        if (match !== 27) {\n          if (match >= 59) {\n            switch (match) {\n              case /* External */59 :\n                  var externalDef = parseExternalDef(attrs, startPos, p);\n                  parseNewlineOrSemicolonSignature(p);\n                  var loc_loc_end = p.prevEndPos;\n                  var loc = {\n                    loc_start: startPos,\n                    loc_end: loc_loc_end,\n                    loc_ghost: false\n                  };\n                  return Ast_helper.Sig.value(loc, externalDef);\n              case /* Typ */60 :\n                  Res_parser.beginRegion(p);\n                  var ext = parseTypeDefinitionOrExtension(attrs, p);\n                  if (ext.TAG === /* TypeDef */0) {\n                    parseNewlineOrSemicolonSignature(p);\n                    var loc_loc_end$1 = p.prevEndPos;\n                    var loc$1 = {\n                      loc_start: startPos,\n                      loc_end: loc_loc_end$1,\n                      loc_ghost: false\n                    };\n                    Res_parser.endRegion(p);\n                    return Ast_helper.Sig.type_(loc$1, ext.recFlag, ext.types);\n                  }\n                  parseNewlineOrSemicolonSignature(p);\n                  var loc_loc_end$2 = p.prevEndPos;\n                  var loc$2 = {\n                    loc_start: startPos,\n                    loc_end: loc_loc_end$2,\n                    loc_ghost: false\n                  };\n                  Res_parser.endRegion(p);\n                  return Ast_helper.Sig.type_extension(loc$2, ext._0);\n              case /* Include */64 :\n                  Res_parser.next(undefined, p);\n                  var moduleType = parseModuleType(undefined, undefined, p);\n                  var includeDescription = Ast_helper.Incl.mk({\n                        loc_start: startPos,\n                        loc_end: p.prevEndPos,\n                        loc_ghost: false\n                      }, attrs, undefined, moduleType);\n                  parseNewlineOrSemicolonSignature(p);\n                  var loc_loc_end$3 = p.prevEndPos;\n                  var loc$3 = {\n                    loc_start: startPos,\n                    loc_end: loc_loc_end$3,\n                    loc_ghost: false\n                  };\n                  return Ast_helper.Sig.include_(loc$3, includeDescription);\n              case /* Module */65 :\n                  Res_parser.beginRegion(p);\n                  Res_parser.next(undefined, p);\n                  var _t = p.token;\n                  var exit = 0;\n                  if (typeof _t === \"number\") {\n                    switch (_t) {\n                      case /* Rec */11 :\n                          var recModule = parseRecModuleSpec(attrs, startPos, p);\n                          parseNewlineOrSemicolonSignature(p);\n                          var loc_loc_end$4 = p.prevEndPos;\n                          var loc$4 = {\n                            loc_start: startPos,\n                            loc_end: loc_loc_end$4,\n                            loc_ghost: false\n                          };\n                          Res_parser.endRegion(p);\n                          return Ast_helper.Sig.rec_module(loc$4, recModule);\n                      case /* Typ */60 :\n                          var modTypeDecl = parseModuleTypeDeclaration(attrs, startPos, p);\n                          Res_parser.endRegion(p);\n                          return modTypeDecl;\n                      default:\n                        exit = 2;\n                    }\n                  } else {\n                    if (_t.TAG === /* Uident */5) {\n                      var modDecl = parseModuleDeclarationOrAlias(attrs, p);\n                      parseNewlineOrSemicolonSignature(p);\n                      var loc_loc_end$5 = p.prevEndPos;\n                      var loc$5 = {\n                        loc_start: startPos,\n                        loc_end: loc_loc_end$5,\n                        loc_ghost: false\n                      };\n                      Res_parser.endRegion(p);\n                      return Ast_helper.Sig.module_(loc$5, modDecl);\n                    }\n                    exit = 2;\n                  }\n                  if (exit === 2) {\n                    var modDecl$1 = parseModuleDeclarationOrAlias(attrs, p);\n                    parseNewlineOrSemicolonSignature(p);\n                    var loc_loc_end$6 = p.prevEndPos;\n                    var loc$6 = {\n                      loc_start: startPos,\n                      loc_end: loc_loc_end$6,\n                      loc_ghost: false\n                    };\n                    Res_parser.endRegion(p);\n                    return Ast_helper.Sig.module_(loc$6, modDecl$1);\n                  }\n                  break;\n              case /* AtAt */76 :\n                  var attr = parseStandaloneAttribute(p);\n                  parseNewlineOrSemicolonSignature(p);\n                  var loc_loc_end$7 = p.prevEndPos;\n                  var loc$7 = {\n                    loc_start: startPos,\n                    loc_end: loc_loc_end$7,\n                    loc_ghost: false\n                  };\n                  return Ast_helper.Sig.attribute(loc$7, attr);\n              case /* PercentPercent */78 :\n                  var extension = parseExtension(true, p);\n                  parseNewlineOrSemicolonSignature(p);\n                  var loc_loc_end$8 = p.prevEndPos;\n                  var loc$8 = {\n                    loc_start: startPos,\n                    loc_end: loc_loc_end$8,\n                    loc_ghost: false\n                  };\n                  return Ast_helper.Sig.extension(loc$8, attrs, extension);\n              case /* Private */61 :\n              case /* Mutable */62 :\n              case /* Constraint */63 :\n              case /* Of */66 :\n              case /* Land */67 :\n              case /* Lor */68 :\n              case /* Band */69 :\n              case /* BangEqual */70 :\n              case /* BangEqualEqual */71 :\n              case /* LessEqual */72 :\n              case /* GreaterEqual */73 :\n              case /* ColonEqual */74 :\n              case /* At */75 :\n              case /* Percent */77 :\n              case /* List */79 :\n              case /* Backtick */80 :\n              case /* BarGreater */81 :\n              case /* Try */82 :\n                  break;\n              case /* Import */83 :\n                  Res_parser.next(undefined, p);\n                  continue ;\n              case /* Export */84 :\n                  var signatureItem = parseSignJsExport(attrs, p);\n                  parseNewlineOrSemicolonSignature(p);\n                  var loc_loc_end$9 = p.prevEndPos;\n                  var loc$9 = {\n                    loc_start: startPos,\n                    loc_end: loc_loc_end$9,\n                    loc_ghost: false\n                  };\n                  return {\n                          psig_desc: signatureItem.psig_desc,\n                          psig_loc: loc$9\n                        };\n              \n            }\n          }\n          \n        } else {\n          var exceptionDef = parseExceptionDef(attrs, p);\n          parseNewlineOrSemicolonSignature(p);\n          var loc_loc_end$10 = p.prevEndPos;\n          var loc$10 = {\n            loc_start: startPos,\n            loc_end: loc_loc_end$10,\n            loc_ghost: false\n          };\n          return Ast_helper.Sig.exception_(loc$10, exceptionDef);\n        }\n      } else if (match !== 0) {\n        if (match >= 9) {\n          Res_parser.beginRegion(p);\n          var valueDesc = parseSignLetDesc(attrs, p);\n          parseNewlineOrSemicolonSignature(p);\n          var loc_loc_end$11 = p.prevEndPos;\n          var loc$11 = {\n            loc_start: startPos,\n            loc_end: loc_loc_end$11,\n            loc_ghost: false\n          };\n          Res_parser.endRegion(p);\n          return Ast_helper.Sig.value(loc$11, valueDesc);\n        }\n        \n      } else {\n        var openDescription = parseOpenDescription(attrs, p);\n        parseNewlineOrSemicolonSignature(p);\n        var loc_loc_end$12 = p.prevEndPos;\n        var loc$12 = {\n          loc_start: startPos,\n          loc_end: loc_loc_end$12,\n          loc_ghost: false\n        };\n        return Ast_helper.Sig.open_(loc$12, openDescription);\n      }\n    }\n    if (!attrs) {\n      return ;\n    }\n    var attr$1 = attrs.hd;\n    var attrLoc = attr$1[0].loc;\n    Res_parser.err(attrLoc.loc_start, attrLoc.loc_end, p, Res_diagnostics.message(attributeWithoutNode(attr$1)));\n    return defaultSignatureItem;\n  };\n}\n\nfunction parseNonSpreadExp(msg, p) {\n  var match = p.token;\n  if (match === 6) {\n    Res_parser.err(undefined, undefined, p, Res_diagnostics.message(msg));\n    Res_parser.next(undefined, p);\n  }\n  var token = p.token;\n  if (!Res_grammar.isExprStart(token)) {\n    return ;\n  }\n  var expr = parseExpr(undefined, p);\n  var match$1 = p.token;\n  if (match$1 !== 24) {\n    return expr;\n  }\n  Res_parser.next(undefined, p);\n  var typ = parseTypExpr(undefined, undefined, undefined, p);\n  var loc_loc_start = expr.pexp_loc.loc_start;\n  var loc_loc_end = typ.ptyp_loc.loc_end;\n  var loc = {\n    loc_start: loc_loc_start,\n    loc_end: loc_loc_end,\n    loc_ghost: false\n  };\n  return Ast_helper.Exp.constraint_(loc, undefined, expr, typ);\n}\n\nfunction parseTypeParameters(p) {\n  var startPos = p.startPos;\n  Res_parser.expect(undefined, /* Lparen */18, p);\n  var match = p.token;\n  if (match === 19) {\n    Res_parser.next(undefined, p);\n    var loc_loc_end = p.prevEndPos;\n    var loc = {\n      loc_start: startPos,\n      loc_end: loc_loc_end,\n      loc_ghost: false\n    };\n    var unitConstr = $$Location.mkloc({\n          TAG: /* Lident */0,\n          _0: \"unit\"\n        }, loc);\n    var typ = Ast_helper.Typ.constr(undefined, undefined, unitConstr, /* [] */0);\n    return {\n            hd: [\n              false,\n              /* [] */0,\n              /* Nolabel */0,\n              typ,\n              startPos\n            ],\n            tl: /* [] */0\n          };\n  }\n  var params = parseCommaDelimitedRegion(p, /* TypeParameters */42, /* Rparen */19, parseTypeParameter);\n  Res_parser.expect(undefined, /* Rparen */19, p);\n  return params;\n}\n\nfunction parsePatternGuard(p) {\n  var match = p.token;\n  if (typeof match !== \"number\") {\n    return ;\n  }\n  if (match !== 50 && match !== 56) {\n    return ;\n  }\n  Res_parser.next(undefined, p);\n  return parseExpr(/* WhenExpr */2, p);\n}\n\nfunction parseRecModuleDeclaration(attrs, startPos, p) {\n  var modName = p.token;\n  var name;\n  var exit = 0;\n  if (typeof modName === \"number\" || modName.TAG !== /* Uident */5) {\n    exit = 1;\n  } else {\n    var loc_loc_start = p.startPos;\n    var loc_loc_end = p.endPos;\n    var loc = {\n      loc_start: loc_loc_start,\n      loc_end: loc_loc_end,\n      loc_ghost: false\n    };\n    Res_parser.next(undefined, p);\n    name = $$Location.mkloc(modName._0, loc);\n  }\n  if (exit === 1) {\n    Res_parser.err(undefined, undefined, p, Res_diagnostics.uident(modName));\n    name = $$Location.mknoloc(\"_\");\n  }\n  Res_parser.expect(undefined, /* Colon */24, p);\n  var modType = parseModuleType(undefined, undefined, p);\n  return Ast_helper.Md.mk({\n              loc_start: startPos,\n              loc_end: p.prevEndPos,\n              loc_ghost: false\n            }, attrs, undefined, undefined, name, modType);\n}\n\nfunction parseEs6ArrowExpression(context, parameters, p) {\n  var startPos = p.startPos;\n  Res_parser.leaveBreadcrumb(p, /* Es6ArrowExpr */3);\n  var parameters$1 = parameters !== undefined ? parameters : parseParameters(p);\n  var match = p.token;\n  var returnType = match === 24 ? (Res_parser.next(undefined, p), parseTypExpr(undefined, false, undefined, p)) : undefined;\n  Res_parser.expect(undefined, /* EqualGreater */57, p);\n  var expr = parseExpr(context, p);\n  var body = returnType !== undefined ? Ast_helper.Exp.constraint_({\n          loc_start: expr.pexp_loc.loc_start,\n          loc_end: returnType.ptyp_loc.loc_end,\n          loc_ghost: false\n        }, undefined, expr, returnType) : expr;\n  Res_parser.eatBreadcrumb(p);\n  var endPos = p.prevEndPos;\n  var arrowExpr = List.fold_right((function (parameter, expr) {\n          if (parameter.TAG === /* TermParameter */0) {\n            var attrs = parameter.attrs;\n            var attrs$1 = parameter.uncurried ? ({\n                  hd: uncurryAttr,\n                  tl: attrs\n                }) : attrs;\n            return Ast_helper.Exp.fun_({\n                        loc_start: parameter.pos,\n                        loc_end: endPos,\n                        loc_ghost: false\n                      }, attrs$1, parameter.label, parameter.expr, parameter.pat, expr);\n          }\n          var attrs$2 = parameter.attrs;\n          var attrs$3 = parameter.uncurried ? ({\n                hd: uncurryAttr,\n                tl: attrs$2\n              }) : attrs$2;\n          return makeNewtypes(attrs$3, {\n                      loc_start: parameter.pos,\n                      loc_end: endPos,\n                      loc_ghost: false\n                    }, parameter.locs, expr);\n        }), parameters$1, body);\n  var init = arrowExpr.pexp_loc;\n  return {\n          pexp_desc: arrowExpr.pexp_desc,\n          pexp_loc: {\n            loc_start: startPos,\n            loc_end: init.loc_end,\n            loc_ghost: init.loc_ghost\n          },\n          pexp_attributes: arrowExpr.pexp_attributes\n        };\n}\n\nfunction parseRecordExprWithStringKeys(startPos, firstRow, p) {\n  var rows_1 = parseCommaDelimitedRegion(p, /* RecordRowsStringKey */44, /* Rbrace */23, parseRecordRowWithStringKey);\n  var rows = {\n    hd: firstRow,\n    tl: rows_1\n  };\n  var loc_loc_end = p.endPos;\n  var loc = {\n    loc_start: startPos,\n    loc_end: loc_loc_end,\n    loc_ghost: false\n  };\n  var recordStrExpr = Ast_helper.Str.$$eval(loc, undefined, Ast_helper.Exp.record(loc, undefined, rows, undefined));\n  return Ast_helper.Exp.extension(loc, undefined, [\n              $$Location.mkloc(\"obj\", loc),\n              {\n                TAG: /* PStr */0,\n                _0: {\n                  hd: recordStrExpr,\n                  tl: /* [] */0\n                }\n              }\n            ]);\n}\n\nfunction parseRecordExpr(startPos, spreadOpt, rows, p) {\n  var spread = spreadOpt !== undefined ? Caml_option.valFromOption(spreadOpt) : undefined;\n  var exprs = parseCommaDelimitedRegion(p, /* RecordRows */43, /* Rbrace */23, parseRecordRow);\n  var rows$1 = List.concat({\n        hd: rows,\n        tl: {\n          hd: exprs,\n          tl: /* [] */0\n        }\n      });\n  if (rows$1) {\n    \n  } else {\n    Res_parser.err(undefined, undefined, p, Res_diagnostics.message(\"Record spread needs at least one field that's updated\"));\n  }\n  var loc_loc_end = p.endPos;\n  var loc = {\n    loc_start: startPos,\n    loc_end: loc_loc_end,\n    loc_ghost: false\n  };\n  return Ast_helper.Exp.record(loc, undefined, rows$1, spread);\n}\n\nfunction parseValueOrConstructor(p) {\n  var startPos = p.startPos;\n  var _acc = /* [] */0;\n  while(true) {\n    var acc = _acc;\n    var ident = p.token;\n    if (typeof ident !== \"number\") {\n      switch (ident.TAG | 0) {\n        case /* Lident */4 :\n            Res_parser.next(undefined, p);\n            var loc_loc_end = p.prevEndPos;\n            var loc = {\n              loc_start: startPos,\n              loc_end: loc_loc_end,\n              loc_ghost: false\n            };\n            var lident = buildLongident({\n                  hd: ident._0,\n                  tl: acc\n                });\n            return Ast_helper.Exp.ident(loc, undefined, $$Location.mkloc(lident, loc));\n        case /* Uident */5 :\n            var ident$1 = ident._0;\n            var endPosLident = p.endPos;\n            Res_parser.next(undefined, p);\n            var match = p.token;\n            var exit = 0;\n            if (typeof match === \"number\") {\n              if (match !== 4) {\n                if (match !== 18) {\n                  exit = 2;\n                } else {\n                  if (p.prevEndPos.pos_lnum === p.startPos.pos_lnum) {\n                    var lparen = p.startPos;\n                    var args = parseConstructorArgs(p);\n                    var rparen = p.prevEndPos;\n                    var lident$1 = buildLongident({\n                          hd: ident$1,\n                          tl: acc\n                        });\n                    var tail;\n                    var exit$1 = 0;\n                    if (args) {\n                      var arg = args.hd;\n                      var tmp = arg.pexp_desc;\n                      if (typeof tmp === \"number\") {\n                        if (args.tl) {\n                          exit$1 = 3;\n                        } else {\n                          tail = arg;\n                        }\n                      } else if (tmp.TAG === /* Pexp_tuple */8) {\n                        if (args.tl) {\n                          exit$1 = 3;\n                        } else {\n                          var loc$1 = {\n                            loc_start: lparen,\n                            loc_end: rparen,\n                            loc_ghost: false\n                          };\n                          tail = p.mode === /* ParseForTypeChecker */0 ? arg : Ast_helper.Exp.tuple(loc$1, undefined, args);\n                        }\n                      } else if (args.tl) {\n                        exit$1 = 3;\n                      } else {\n                        tail = arg;\n                      }\n                    } else {\n                      tail = undefined;\n                    }\n                    if (exit$1 === 3) {\n                      var loc$2 = {\n                        loc_start: lparen,\n                        loc_end: rparen,\n                        loc_ghost: false\n                      };\n                      tail = Ast_helper.Exp.tuple(loc$2, undefined, args);\n                    }\n                    var loc_loc_end$1 = p.prevEndPos;\n                    var loc$3 = {\n                      loc_start: startPos,\n                      loc_end: loc_loc_end$1,\n                      loc_ghost: false\n                    };\n                    var identLoc = {\n                      loc_start: startPos,\n                      loc_end: endPosLident,\n                      loc_ghost: false\n                    };\n                    return Ast_helper.Exp.construct(loc$3, undefined, $$Location.mkloc(lident$1, identLoc), tail);\n                  }\n                  exit = 2;\n                }\n              } else {\n                Res_parser.next(undefined, p);\n                _acc = {\n                  hd: ident$1,\n                  tl: acc\n                };\n                continue ;\n              }\n            } else {\n              exit = 2;\n            }\n            if (exit === 2) {\n              var loc_loc_end$2 = p.prevEndPos;\n              var loc$4 = {\n                loc_start: startPos,\n                loc_end: loc_loc_end$2,\n                loc_ghost: false\n              };\n              var lident$2 = buildLongident({\n                    hd: ident$1,\n                    tl: acc\n                  });\n              return Ast_helper.Exp.construct(loc$4, undefined, $$Location.mkloc(lident$2, loc$4), undefined);\n            }\n            break;\n        default:\n          \n      }\n    }\n    if (acc === /* [] */0) {\n      Res_parser.next(undefined, p);\n      Res_parser.err(undefined, undefined, p, Res_diagnostics.unexpected(ident, p.breadcrumbs));\n      return defaultExpr(undefined);\n    }\n    var loc_loc_end$3 = p.prevEndPos;\n    var loc$5 = {\n      loc_start: startPos,\n      loc_end: loc_loc_end$3,\n      loc_ghost: false\n    };\n    Res_parser.err(undefined, undefined, p, Res_diagnostics.unexpected(ident, p.breadcrumbs));\n    var lident$3 = buildLongident({\n          hd: \"_\",\n          tl: acc\n        });\n    return Ast_helper.Exp.ident(loc$5, undefined, $$Location.mkloc(lident$3, loc$5));\n  };\n}\n\nfunction parseAtomicExpr(p) {\n  Res_parser.leaveBreadcrumb(p, /* ExprOperand */7);\n  var startPos = p.startPos;\n  var token = p.token;\n  var expr;\n  var exit = 0;\n  if (typeof token === \"number\") {\n    switch (token) {\n      case /* True */1 :\n      case /* False */2 :\n          exit = 2;\n          break;\n      case /* Underscore */12 :\n          Res_parser.err(undefined, undefined, p, Res_diagnostics.lident(token));\n          Res_parser.next(undefined, p);\n          expr = defaultExpr(undefined);\n          break;\n      case /* Lparen */18 :\n          Res_parser.next(undefined, p);\n          var _t = p.token;\n          if (_t === 19) {\n            Res_parser.next(undefined, p);\n            var loc_loc_end = p.prevEndPos;\n            var loc = {\n              loc_start: startPos,\n              loc_end: loc_loc_end,\n              loc_ghost: false\n            };\n            expr = Ast_helper.Exp.construct(loc, undefined, $$Location.mkloc({\n                      TAG: /* Lident */0,\n                      _0: \"()\"\n                    }, loc), undefined);\n          } else {\n            var expr$1 = parseConstrainedOrCoercedExpr(p);\n            var match = p.token;\n            if (match === 25) {\n              Res_parser.next(undefined, p);\n              expr = parseTupleExpr(expr$1, startPos, p);\n            } else {\n              Res_parser.expect(undefined, /* Rparen */19, p);\n              expr = expr$1;\n            }\n          }\n          break;\n      case /* Lbracket */20 :\n          expr = parseArrayExp(p);\n          break;\n      case /* Lbrace */22 :\n          expr = parseBracedOrRecordExpr(p);\n          break;\n      case /* LessThan */42 :\n          expr = parseJsx(p);\n          break;\n      case /* Hash */44 :\n          expr = parsePolyVariantExpr(p);\n          break;\n      case /* Module */65 :\n          Res_parser.next(undefined, p);\n          expr = parseFirstClassModuleExpr(startPos, p);\n          break;\n      case /* Percent */77 :\n          var extension = parseExtension(undefined, p);\n          var loc_loc_end$1 = p.prevEndPos;\n          var loc$1 = {\n            loc_start: startPos,\n            loc_end: loc_loc_end$1,\n            loc_ghost: false\n          };\n          expr = Ast_helper.Exp.extension(loc$1, undefined, extension);\n          break;\n      case /* List */79 :\n          Res_parser.next(undefined, p);\n          expr = parseListExpr(startPos, p);\n          break;\n      case /* Backtick */80 :\n          var expr$2 = parseTemplateExpr(undefined, p);\n          expr = {\n            pexp_desc: expr$2.pexp_desc,\n            pexp_loc: {\n              loc_start: startPos,\n              loc_end: p.prevEndPos,\n              loc_ghost: false\n            },\n            pexp_attributes: expr$2.pexp_attributes\n          };\n          break;\n      default:\n        exit = 1;\n    }\n  } else {\n    switch (token.TAG | 0) {\n      case /* Codepoint */0 :\n      case /* Int */1 :\n      case /* Float */2 :\n      case /* String */3 :\n          exit = 3;\n          break;\n      case /* Lident */4 :\n      case /* Uident */5 :\n          expr = parseValueOrConstructor(p);\n          break;\n      default:\n        exit = 1;\n    }\n  }\n  switch (exit) {\n    case 1 :\n        var errPos = p.prevEndPos;\n        Res_parser.err(errPos, undefined, p, Res_diagnostics.unexpected(token, p.breadcrumbs));\n        var match$1 = skipTokensAndMaybeRetry(p, Res_grammar.isAtomicExprStart);\n        expr = match$1 !== undefined ? parseAtomicExpr(p) : defaultExpr(undefined);\n        break;\n    case 2 :\n        Res_parser.next(undefined, p);\n        var loc_loc_end$2 = p.prevEndPos;\n        var loc$2 = {\n          loc_start: startPos,\n          loc_end: loc_loc_end$2,\n          loc_ghost: false\n        };\n        expr = Ast_helper.Exp.construct(loc$2, undefined, $$Location.mkloc({\n                  TAG: /* Lident */0,\n                  _0: Res_token.toString(token)\n                }, loc$2), undefined);\n        break;\n    case 3 :\n        var c = parseConstant(p);\n        var loc_loc_end$3 = p.prevEndPos;\n        var loc$3 = {\n          loc_start: startPos,\n          loc_end: loc_loc_end$3,\n          loc_ghost: false\n        };\n        expr = Ast_helper.Exp.constant(loc$3, undefined, c);\n        break;\n    \n  }\n  Res_parser.eatBreadcrumb(p);\n  return expr;\n}\n\nfunction parseRecordPatternItem(p) {\n  var match = p.token;\n  if (typeof match === \"number\") {\n    switch (match) {\n      case /* DotDotDot */6 :\n          Res_parser.next(undefined, p);\n          return [\n                  true,\n                  /* PatField */{\n                    _0: parseRecordPatternField(p)\n                  }\n                ];\n      case /* Underscore */12 :\n          Res_parser.next(undefined, p);\n          return [\n                  false,\n                  /* PatUnderscore */0\n                ];\n      default:\n        return ;\n    }\n  } else {\n    switch (match.TAG | 0) {\n      case /* Lident */4 :\n      case /* Uident */5 :\n          return [\n                  false,\n                  /* PatField */{\n                    _0: parseRecordPatternField(p)\n                  }\n                ];\n      default:\n        return ;\n    }\n  }\n}\n\nfunction parsePackageConstraints(p) {\n  Res_parser.expect(undefined, /* Typ */60, p);\n  var typeConstr = parseValuePath(p);\n  Res_parser.expect(undefined, /* Equal */14, p);\n  var typ = parseTypExpr(undefined, undefined, undefined, p);\n  var first = [\n    typeConstr,\n    typ\n  ];\n  var rest = parseRegion(p, /* PackageConstraint */32, parsePackageConstraint);\n  return {\n          hd: first,\n          tl: rest\n        };\n}\n\nfunction parseConstrainedExprRegion(p) {\n  var token = p.token;\n  if (!Res_grammar.isExprStart(token)) {\n    return ;\n  }\n  var expr = parseExpr(undefined, p);\n  var match = p.token;\n  if (match !== 24) {\n    return expr;\n  }\n  Res_parser.next(undefined, p);\n  var typ = parseTypExpr(undefined, undefined, undefined, p);\n  var loc_loc_start = expr.pexp_loc.loc_start;\n  var loc_loc_end = typ.ptyp_loc.loc_end;\n  var loc = {\n    loc_start: loc_loc_start,\n    loc_end: loc_loc_end,\n    loc_ghost: false\n  };\n  return Ast_helper.Exp.constraint_(loc, undefined, expr, typ);\n}\n\nfunction parseTagName(p) {\n  var match = p.token;\n  if (match !== 44) {\n    return ;\n  }\n  var match$1 = parseHashIdent(p.startPos, p);\n  return match$1[0];\n}\n\nfunction parseFunctorModuleType(p) {\n  var startPos = p.startPos;\n  var args = parseFunctorArgs(p);\n  Res_parser.expect(undefined, /* EqualGreater */57, p);\n  var rhs = parseModuleType(undefined, undefined, p);\n  var endPos = p.prevEndPos;\n  var modType = List.fold_right((function (param, acc) {\n          return Ast_helper.Mty.functor_({\n                      loc_start: param[3],\n                      loc_end: endPos,\n                      loc_ghost: false\n                    }, param[0], param[1], param[2], acc);\n        }), args, rhs);\n  return {\n          pmty_desc: modType.pmty_desc,\n          pmty_loc: {\n            loc_start: startPos,\n            loc_end: endPos,\n            loc_ghost: false\n          },\n          pmty_attributes: modType.pmty_attributes\n        };\n}\n\nfunction parseWithConstraints(moduleType, p) {\n  var match = p.token;\n  if (typeof match === \"number\") {\n    return moduleType;\n  }\n  if (match.TAG !== /* Lident */4) {\n    return moduleType;\n  }\n  if (match._0 !== \"with\") {\n    return moduleType;\n  }\n  Res_parser.next(undefined, p);\n  var first = parseWithConstraint(p);\n  var loop = function (p, _acc) {\n    while(true) {\n      var acc = _acc;\n      var match = p.token;\n      if (match !== 10) {\n        return List.rev(acc);\n      }\n      Res_parser.next(undefined, p);\n      _acc = {\n        hd: parseWithConstraint(p),\n        tl: acc\n      };\n      continue ;\n    };\n  };\n  var constraints = loop(p, {\n        hd: first,\n        tl: /* [] */0\n      });\n  var loc_loc_start = moduleType.pmty_loc.loc_start;\n  var loc_loc_end = p.prevEndPos;\n  var loc = {\n    loc_start: loc_loc_start,\n    loc_end: loc_loc_end,\n    loc_ghost: false\n  };\n  return Ast_helper.Mty.with_(loc, undefined, moduleType, constraints);\n}\n\nfunction parseAtomicModuleType(p) {\n  var startPos = p.startPos;\n  var token = p.token;\n  var moduleType;\n  var exit = 0;\n  if (typeof token === \"number\") {\n    switch (token) {\n      case /* Lparen */18 :\n          Res_parser.next(undefined, p);\n          var mty = parseModuleType(undefined, undefined, p);\n          Res_parser.expect(undefined, /* Rparen */19, p);\n          moduleType = {\n            pmty_desc: mty.pmty_desc,\n            pmty_loc: {\n              loc_start: startPos,\n              loc_end: p.prevEndPos,\n              loc_ghost: false\n            },\n            pmty_attributes: mty.pmty_attributes\n          };\n          break;\n      case /* Lbrace */22 :\n          Res_parser.next(undefined, p);\n          var spec = parseDelimitedRegion(p, /* Signature */46, /* Rbrace */23, parseSignatureItemRegion);\n          Res_parser.expect(undefined, /* Rbrace */23, p);\n          var loc_loc_end = p.prevEndPos;\n          var loc = {\n            loc_start: startPos,\n            loc_end: loc_loc_end,\n            loc_ghost: false\n          };\n          moduleType = Ast_helper.Mty.signature(loc, undefined, spec);\n          break;\n      case /* Module */65 :\n          moduleType = parseModuleTypeOf(p);\n          break;\n      case /* Percent */77 :\n          var extension = parseExtension(undefined, p);\n          var loc_loc_end$1 = p.prevEndPos;\n          var loc$1 = {\n            loc_start: startPos,\n            loc_end: loc_loc_end$1,\n            loc_ghost: false\n          };\n          moduleType = Ast_helper.Mty.extension(loc$1, undefined, extension);\n          break;\n      default:\n        exit = 1;\n    }\n  } else {\n    switch (token.TAG | 0) {\n      case /* Lident */4 :\n      case /* Uident */5 :\n          exit = 2;\n          break;\n      default:\n        exit = 1;\n    }\n  }\n  switch (exit) {\n    case 1 :\n        Res_parser.err(undefined, undefined, p, Res_diagnostics.unexpected(token, p.breadcrumbs));\n        moduleType = defaultModuleType(undefined);\n        break;\n    case 2 :\n        var moduleLongIdent = parseModuleLongIdent(true, p);\n        moduleType = Ast_helper.Mty.ident(moduleLongIdent.loc, undefined, moduleLongIdent);\n        break;\n    \n  }\n  var moduleTypeLoc_loc_end = p.prevEndPos;\n  var moduleTypeLoc = {\n    loc_start: startPos,\n    loc_end: moduleTypeLoc_loc_end,\n    loc_ghost: false\n  };\n  return {\n          pmty_desc: moduleType.pmty_desc,\n          pmty_loc: moduleTypeLoc,\n          pmty_attributes: moduleType.pmty_attributes\n        };\n}\n\nfunction parseFunctorArgs(p) {\n  var startPos = p.startPos;\n  Res_parser.expect(undefined, /* Lparen */18, p);\n  var args = parseCommaDelimitedRegion(p, /* FunctorArgs */40, /* Rparen */19, parseFunctorArg);\n  Res_parser.expect(undefined, /* Rparen */19, p);\n  if (args) {\n    return args;\n  } else {\n    return {\n            hd: [\n              /* [] */0,\n              $$Location.mkloc(\"*\", {\n                    loc_start: startPos,\n                    loc_end: p.prevEndPos,\n                    loc_ghost: false\n                  }),\n              undefined,\n              startPos\n            ],\n            tl: /* [] */0\n          };\n  }\n}\n\nfunction parseFunctorArg(p) {\n  var startPos = p.startPos;\n  var attrs = parseRegion(p, /* Attribute */50, parseAttribute);\n  var ident = p.token;\n  if (typeof ident === \"number\") {\n    switch (ident) {\n      case /* Underscore */12 :\n          Res_parser.next(undefined, p);\n          var argName = $$Location.mkloc(\"_\", {\n                loc_start: startPos,\n                loc_end: p.prevEndPos,\n                loc_ghost: false\n              });\n          Res_parser.expect(undefined, /* Colon */24, p);\n          var moduleType = parseModuleType(undefined, undefined, p);\n          return [\n                  attrs,\n                  argName,\n                  moduleType,\n                  startPos\n                ];\n      case /* Lparen */18 :\n          Res_parser.next(undefined, p);\n          Res_parser.expect(undefined, /* Rparen */19, p);\n          var argName$1 = $$Location.mkloc(\"*\", {\n                loc_start: startPos,\n                loc_end: p.prevEndPos,\n                loc_ghost: false\n              });\n          return [\n                  attrs,\n                  argName$1,\n                  undefined,\n                  startPos\n                ];\n      default:\n        return ;\n    }\n  } else {\n    if (ident.TAG !== /* Uident */5) {\n      return ;\n    }\n    var ident$1 = ident._0;\n    Res_parser.next(undefined, p);\n    var uidentEndPos = p.prevEndPos;\n    var match = p.token;\n    if (typeof match === \"number\") {\n      if (match !== 4) {\n        if (match === 24) {\n          Res_parser.next(undefined, p);\n          var moduleType$1 = parseModuleType(undefined, undefined, p);\n          var loc = {\n            loc_start: startPos,\n            loc_end: uidentEndPos,\n            loc_ghost: false\n          };\n          var argName$2 = $$Location.mkloc(ident$1, loc);\n          return [\n                  attrs,\n                  argName$2,\n                  moduleType$1,\n                  startPos\n                ];\n        }\n        \n      } else {\n        Res_parser.next(undefined, p);\n        var moduleLongIdent = parseModuleLongIdentTail(false, p, startPos, {\n              TAG: /* Lident */0,\n              _0: ident$1\n            });\n        var moduleType$2 = Ast_helper.Mty.ident(moduleLongIdent.loc, undefined, moduleLongIdent);\n        var argName$3 = $$Location.mknoloc(\"_\");\n        return [\n                attrs,\n                argName$3,\n                moduleType$2,\n                startPos\n              ];\n      }\n    }\n    var loc$1 = {\n      loc_start: startPos,\n      loc_end: uidentEndPos,\n      loc_ghost: false\n    };\n    var modIdent = $$Location.mkloc({\n          TAG: /* Lident */0,\n          _0: ident$1\n        }, loc$1);\n    var moduleType$3 = Ast_helper.Mty.ident(loc$1, undefined, modIdent);\n    var argName$4 = $$Location.mknoloc(\"_\");\n    return [\n            attrs,\n            argName$4,\n            moduleType$3,\n            startPos\n          ];\n  }\n}\n\nfunction parsePrivateEqOrRepr(p) {\n  Res_parser.expect(undefined, /* Private */61, p);\n  var t = p.token;\n  var exit = 0;\n  if (typeof t === \"number\") {\n    switch (t) {\n      case /* DotDot */5 :\n      case /* Bar */17 :\n          exit = 2;\n          break;\n      case /* Lbrace */22 :\n          var match = parseRecordOrObjectDecl(p);\n          return [\n                  match[0],\n                  /* Private */0,\n                  match[2]\n                ];\n      default:\n        exit = 1;\n    }\n  } else {\n    if (t.TAG === /* Uident */5) {\n      var match$1 = parseTypeEquationOrConstrDecl(p);\n      return [\n              match$1[0],\n              /* Private */0,\n              match$1[2]\n            ];\n    }\n    exit = 1;\n  }\n  switch (exit) {\n    case 1 :\n        if (Res_grammar.isTypExprStart(t)) {\n          return [\n                  parseTypExpr(undefined, undefined, undefined, p),\n                  /* Private */0,\n                  /* Ptype_abstract */0\n                ];\n        }\n        var match$2 = parseTypeRepresentation(p);\n        return [\n                undefined,\n                /* Private */0,\n                match$2[1]\n              ];\n    case 2 :\n        var match$3 = parseTypeRepresentation(p);\n        return [\n                undefined,\n                /* Private */0,\n                match$3[1]\n              ];\n    \n  }\n}\n\nfunction parseAtomicModuleExpr(p) {\n  var startPos = p.startPos;\n  var _ident = p.token;\n  if (typeof _ident === \"number\") {\n    switch (_ident) {\n      case /* Lparen */18 :\n          Res_parser.next(undefined, p);\n          var match = p.token;\n          var modExpr = match === 19 ? Ast_helper.Mod.structure({\n                  loc_start: startPos,\n                  loc_end: p.prevEndPos,\n                  loc_ghost: false\n                }, undefined, /* [] */0) : parseConstrainedModExpr(p);\n          Res_parser.expect(undefined, /* Rparen */19, p);\n          return modExpr;\n      case /* Lbrace */22 :\n          Res_parser.next(undefined, p);\n          var structure = Ast_helper.Mod.structure(undefined, undefined, parseDelimitedRegion(p, /* Structure */48, /* Rbrace */23, parseStructureItemRegion));\n          Res_parser.expect(undefined, /* Rbrace */23, p);\n          var endPos = p.prevEndPos;\n          return {\n                  pmod_desc: structure.pmod_desc,\n                  pmod_loc: {\n                    loc_start: startPos,\n                    loc_end: endPos,\n                    loc_ghost: false\n                  },\n                  pmod_attributes: structure.pmod_attributes\n                };\n      case /* Percent */77 :\n          var extension = parseExtension(undefined, p);\n          var loc_loc_end = p.prevEndPos;\n          var loc = {\n            loc_start: startPos,\n            loc_end: loc_loc_end,\n            loc_ghost: false\n          };\n          return Ast_helper.Mod.extension(loc, undefined, extension);\n      default:\n        \n    }\n  } else {\n    switch (_ident.TAG | 0) {\n      case /* Lident */4 :\n          if (_ident._0 === \"unpack\") {\n            Res_parser.next(undefined, p);\n            Res_parser.expect(undefined, /* Lparen */18, p);\n            var expr = parseExpr(undefined, p);\n            var match$1 = p.token;\n            if (match$1 === 24) {\n              var colonStart = p.startPos;\n              Res_parser.next(undefined, p);\n              var attrs = parseRegion(p, /* Attribute */50, parseAttribute);\n              var packageType = parsePackageType(colonStart, attrs, p);\n              Res_parser.expect(undefined, /* Rparen */19, p);\n              var loc_loc_end$1 = p.prevEndPos;\n              var loc$1 = {\n                loc_start: startPos,\n                loc_end: loc_loc_end$1,\n                loc_ghost: false\n              };\n              var constraintExpr = Ast_helper.Exp.constraint_(loc$1, undefined, expr, packageType);\n              return Ast_helper.Mod.unpack(loc$1, undefined, constraintExpr);\n            }\n            Res_parser.expect(undefined, /* Rparen */19, p);\n            var loc_loc_end$2 = p.prevEndPos;\n            var loc$2 = {\n              loc_start: startPos,\n              loc_end: loc_loc_end$2,\n              loc_ghost: false\n            };\n            return Ast_helper.Mod.unpack(loc$2, undefined, expr);\n          }\n          break;\n      case /* Uident */5 :\n          var longident = parseModuleLongIdent(false, p);\n          return Ast_helper.Mod.ident(longident.loc, undefined, longident);\n      default:\n        \n    }\n  }\n  Res_parser.err(undefined, undefined, p, Res_diagnostics.unexpected(_ident, p.breadcrumbs));\n  return defaultModuleExpr(undefined);\n}\n\nfunction parseModuleApplication(p, modExpr) {\n  var startPos = p.startPos;\n  Res_parser.expect(undefined, /* Lparen */18, p);\n  var args = parseCommaDelimitedRegion(p, /* ModExprList */41, /* Rparen */19, parseConstrainedModExprRegion);\n  Res_parser.expect(undefined, /* Rparen */19, p);\n  var args$1;\n  if (args) {\n    args$1 = args;\n  } else {\n    var loc_loc_end = p.prevEndPos;\n    var loc = {\n      loc_start: startPos,\n      loc_end: loc_loc_end,\n      loc_ghost: false\n    };\n    args$1 = {\n      hd: Ast_helper.Mod.structure(loc, undefined, /* [] */0),\n      tl: /* [] */0\n    };\n  }\n  return List.fold_left((function (modExpr, arg) {\n                return Ast_helper.Mod.apply({\n                            loc_start: modExpr.pmod_loc.loc_start,\n                            loc_end: arg.pmod_loc.loc_end,\n                            loc_ghost: false\n                          }, undefined, modExpr, arg);\n              }), modExpr, args$1);\n}\n\nfunction parseTemplateExpr(prefixOpt, p) {\n  var prefix = prefixOpt !== undefined ? prefixOpt : \"js\";\n  var op = $$Location.mknoloc({\n        TAG: /* Lident */0,\n        _0: \"^\"\n      });\n  var hiddenOperator = Ast_helper.Exp.ident(undefined, undefined, op);\n  var startPos = p.startPos;\n  Res_parser.nextTemplateLiteralToken(p);\n  var txt = p.token;\n  if (typeof txt !== \"number\") {\n    switch (txt.TAG | 0) {\n      case /* TemplateTail */7 :\n          var txt$1 = txt._0;\n          Res_parser.next(undefined, p);\n          var txt$2 = p.mode === /* ParseForTypeChecker */0 ? parseTemplateStringLiteral(txt$1) : txt$1;\n          return Ast_helper.Exp.constant({\n                      loc_start: startPos,\n                      loc_end: p.prevEndPos,\n                      loc_ghost: false\n                    }, {\n                      hd: templateLiteralAttr,\n                      tl: /* [] */0\n                    }, {\n                      TAG: /* Pconst_string */2,\n                      _0: txt$2,\n                      _1: prefix\n                    });\n      case /* TemplatePart */8 :\n          var txt$3 = txt._0;\n          Res_parser.next(undefined, p);\n          var constantLoc_loc_end = p.prevEndPos;\n          var constantLoc = {\n            loc_start: startPos,\n            loc_end: constantLoc_loc_end,\n            loc_ghost: false\n          };\n          var expr = parseExprBlock(undefined, p);\n          var fullLoc_loc_end = p.prevEndPos;\n          var fullLoc = {\n            loc_start: startPos,\n            loc_end: fullLoc_loc_end,\n            loc_ghost: false\n          };\n          var txt$4 = p.mode === /* ParseForTypeChecker */0 ? parseTemplateStringLiteral(txt$3) : txt$3;\n          var str = Ast_helper.Exp.constant(constantLoc, {\n                hd: templateLiteralAttr,\n                tl: /* [] */0\n              }, {\n                TAG: /* Pconst_string */2,\n                _0: txt$4,\n                _1: prefix\n              });\n          var _acc = Ast_helper.Exp.apply(fullLoc, {\n                hd: templateLiteralAttr,\n                tl: /* [] */0\n              }, hiddenOperator, {\n                hd: [\n                  /* Nolabel */0,\n                  str\n                ],\n                tl: {\n                  hd: [\n                    /* Nolabel */0,\n                    expr\n                  ],\n                  tl: /* [] */0\n                }\n              });\n          while(true) {\n            var acc = _acc;\n            var startPos$1 = p.startPos;\n            Res_parser.nextTemplateLiteralToken(p);\n            var txt$5 = p.token;\n            if (typeof txt$5 !== \"number\") {\n              switch (txt$5.TAG | 0) {\n                case /* TemplateTail */7 :\n                    var txt$6 = txt$5._0;\n                    Res_parser.next(undefined, p);\n                    var loc_loc_end = p.prevEndPos;\n                    var loc = {\n                      loc_start: startPos$1,\n                      loc_end: loc_loc_end,\n                      loc_ghost: false\n                    };\n                    var txt$7 = p.mode === /* ParseForTypeChecker */0 ? parseTemplateStringLiteral(txt$6) : txt$6;\n                    var str$1 = Ast_helper.Exp.constant(loc, {\n                          hd: templateLiteralAttr,\n                          tl: /* [] */0\n                        }, {\n                          TAG: /* Pconst_string */2,\n                          _0: txt$7,\n                          _1: prefix\n                        });\n                    return Ast_helper.Exp.apply(loc, {\n                                hd: templateLiteralAttr,\n                                tl: /* [] */0\n                              }, hiddenOperator, {\n                                hd: [\n                                  /* Nolabel */0,\n                                  acc\n                                ],\n                                tl: {\n                                  hd: [\n                                    /* Nolabel */0,\n                                    str$1\n                                  ],\n                                  tl: /* [] */0\n                                }\n                              });\n                case /* TemplatePart */8 :\n                    var txt$8 = txt$5._0;\n                    Res_parser.next(undefined, p);\n                    var loc_loc_end$1 = p.prevEndPos;\n                    var loc$1 = {\n                      loc_start: startPos$1,\n                      loc_end: loc_loc_end$1,\n                      loc_ghost: false\n                    };\n                    var expr$1 = parseExprBlock(undefined, p);\n                    var fullLoc_loc_end$1 = p.prevEndPos;\n                    var fullLoc$1 = {\n                      loc_start: startPos$1,\n                      loc_end: fullLoc_loc_end$1,\n                      loc_ghost: false\n                    };\n                    var txt$9 = p.mode === /* ParseForTypeChecker */0 ? parseTemplateStringLiteral(txt$8) : txt$8;\n                    var str$2 = Ast_helper.Exp.constant(loc$1, {\n                          hd: templateLiteralAttr,\n                          tl: /* [] */0\n                        }, {\n                          TAG: /* Pconst_string */2,\n                          _0: txt$9,\n                          _1: prefix\n                        });\n                    var a = Ast_helper.Exp.apply(fullLoc$1, {\n                          hd: templateLiteralAttr,\n                          tl: /* [] */0\n                        }, hiddenOperator, {\n                          hd: [\n                            /* Nolabel */0,\n                            acc\n                          ],\n                          tl: {\n                            hd: [\n                              /* Nolabel */0,\n                              str$2\n                            ],\n                            tl: /* [] */0\n                          }\n                        });\n                    _acc = Ast_helper.Exp.apply(fullLoc$1, undefined, hiddenOperator, {\n                          hd: [\n                            /* Nolabel */0,\n                            a\n                          ],\n                          tl: {\n                            hd: [\n                              /* Nolabel */0,\n                              expr$1\n                            ],\n                            tl: /* [] */0\n                          }\n                        });\n                    continue ;\n                default:\n                  \n              }\n            }\n            Res_parser.err(undefined, undefined, p, Res_diagnostics.unexpected(txt$5, p.breadcrumbs));\n            return Ast_helper.Exp.constant(undefined, undefined, {\n                        TAG: /* Pconst_string */2,\n                        _0: \"\",\n                        _1: undefined\n                      });\n          };\n      default:\n        \n    }\n  }\n  Res_parser.err(undefined, undefined, p, Res_diagnostics.unexpected(txt, p.breadcrumbs));\n  return Ast_helper.Exp.constant(undefined, undefined, {\n              TAG: /* Pconst_string */2,\n              _0: \"\",\n              _1: undefined\n            });\n}\n\nfunction parseCallExpr(p, funExpr) {\n  Res_parser.expect(undefined, /* Lparen */18, p);\n  var startPos = p.startPos;\n  Res_parser.leaveBreadcrumb(p, /* ExprCall */11);\n  var args = parseCommaDelimitedRegion(p, /* ArgumentList */45, /* Rparen */19, parseArgument);\n  Res_parser.expect(undefined, /* Rparen */19, p);\n  var args$1;\n  if (args) {\n    var match = args.hd;\n    if (match[0] && typeof match[1] === \"number\") {\n      var expr = match[2];\n      var match$1 = expr.pexp_desc;\n      if (typeof match$1 === \"number\" || match$1.TAG !== /* Pexp_construct */9) {\n        args$1 = args;\n      } else {\n        var match$2 = match$1._0.txt;\n        switch (match$2.TAG | 0) {\n          case /* Lident */0 :\n              args$1 = match$2._0 === \"()\" && !(match$1._1 !== undefined || expr.pexp_attributes || args.tl || !(!expr.pexp_loc.loc_ghost && p.mode === /* ParseForTypeChecker */0)) ? ({\n                    hd: [\n                      true,\n                      /* Nolabel */0,\n                      Ast_helper.Exp.let_(undefined, undefined, /* Nonrecursive */0, {\n                            hd: Ast_helper.Vb.mk(undefined, undefined, undefined, undefined, Ast_helper.Pat.$$var(undefined, undefined, $$Location.mknoloc(\"__res_unit\")), expr),\n                            tl: /* [] */0\n                          }, Ast_helper.Exp.ident(undefined, undefined, $$Location.mknoloc({\n                                    TAG: /* Lident */0,\n                                    _0: \"__res_unit\"\n                                  })))\n                    ],\n                    tl: /* [] */0\n                  }) : args;\n              break;\n          case /* Ldot */1 :\n          case /* Lapply */2 :\n              args$1 = args;\n              break;\n          \n        }\n      }\n    } else {\n      args$1 = args;\n    }\n  } else {\n    var loc_loc_end = p.prevEndPos;\n    var loc = {\n      loc_start: startPos,\n      loc_end: loc_loc_end,\n      loc_ghost: false\n    };\n    args$1 = {\n      hd: [\n        false,\n        /* Nolabel */0,\n        Ast_helper.Exp.construct(loc, undefined, $$Location.mkloc({\n                  TAG: /* Lident */0,\n                  _0: \"()\"\n                }, loc), undefined)\n      ],\n      tl: /* [] */0\n    };\n  }\n  var init = funExpr.pexp_loc;\n  var loc_loc_start = init.loc_start;\n  var loc_loc_end$1 = p.prevEndPos;\n  var loc_loc_ghost = init.loc_ghost;\n  var loc$1 = {\n    loc_start: loc_loc_start,\n    loc_end: loc_loc_end$1,\n    loc_ghost: loc_loc_ghost\n  };\n  var args$2;\n  if (args$1) {\n    var match$3 = args$1.hd;\n    var group = function (param, param$1) {\n      var grp = param[0];\n      var grp$1 = grp[1];\n      var _u = grp[0];\n      var expr = param$1[2];\n      var lbl = param$1[1];\n      var acc = param[1];\n      if (param$1[0] === true) {\n        return [\n                [\n                  true,\n                  {\n                    hd: [\n                      lbl,\n                      expr\n                    ],\n                    tl: /* [] */0\n                  }\n                ],\n                {\n                  hd: [\n                    _u,\n                    List.rev(grp$1)\n                  ],\n                  tl: acc\n                }\n              ];\n      } else {\n        return [\n                [\n                  _u,\n                  {\n                    hd: [\n                      lbl,\n                      expr\n                    ],\n                    tl: grp$1\n                  }\n                ],\n                acc\n              ];\n      }\n    };\n    var match$4 = List.fold_left(group, [\n          [\n            match$3[0],\n            {\n              hd: [\n                match$3[1],\n                match$3[2]\n              ],\n              tl: /* [] */0\n            }\n          ],\n          /* [] */0\n        ], args$1.tl);\n    var match$5 = match$4[0];\n    args$2 = List.rev({\n          hd: [\n            match$5[0],\n            List.rev(match$5[1])\n          ],\n          tl: match$4[1]\n        });\n  } else {\n    args$2 = /* [] */0;\n  }\n  var apply = List.fold_left((function (callBody, group) {\n          var match = processUnderscoreApplication(group[1]);\n          var args = match[0];\n          var tmp;\n          if (group[0]) {\n            var attrs = {\n              hd: uncurryAttr,\n              tl: /* [] */0\n            };\n            tmp = Ast_helper.Exp.apply(loc$1, attrs, callBody, args);\n          } else {\n            tmp = Ast_helper.Exp.apply(loc$1, undefined, callBody, args);\n          }\n          return Curry._1(match[1], tmp);\n        }), funExpr, args$2);\n  Res_parser.eatBreadcrumb(p);\n  return apply;\n}\n\nfunction parseBracketAccess(p, expr, startPos) {\n  Res_parser.leaveBreadcrumb(p, /* ExprArrayAccess */13);\n  var lbracket = p.startPos;\n  Res_parser.next(undefined, p);\n  var stringStart = p.startPos;\n  var s = p.token;\n  if (typeof s !== \"number\" && s.TAG === /* String */3) {\n    var s$1 = s._0;\n    var s$2 = p.mode === /* ParseForTypeChecker */0 ? parseStringLiteral(s$1) : s$1;\n    Res_parser.next(undefined, p);\n    var stringEnd = p.prevEndPos;\n    Res_parser.expect(undefined, /* Rbracket */21, p);\n    Res_parser.eatBreadcrumb(p);\n    var rbracket = p.prevEndPos;\n    var identLoc = {\n      loc_start: stringStart,\n      loc_end: stringEnd,\n      loc_ghost: false\n    };\n    var loc = {\n      loc_start: startPos,\n      loc_end: rbracket,\n      loc_ghost: false\n    };\n    var e = Ast_helper.Exp.send(loc, undefined, expr, $$Location.mkloc(s$2, identLoc));\n    var e$1 = parsePrimaryExpr(e, undefined, p);\n    var equalStart = p.startPos;\n    var match = p.token;\n    if (match !== 14) {\n      return e$1;\n    }\n    Res_parser.next(undefined, p);\n    var equalEnd = p.prevEndPos;\n    var rhsExpr = parseExpr(undefined, p);\n    var loc_loc_end = rhsExpr.pexp_loc.loc_end;\n    var loc$1 = {\n      loc_start: startPos,\n      loc_end: loc_loc_end,\n      loc_ghost: false\n    };\n    var operatorLoc = {\n      loc_start: equalStart,\n      loc_end: equalEnd,\n      loc_ghost: false\n    };\n    return Ast_helper.Exp.apply(loc$1, undefined, Ast_helper.Exp.ident(operatorLoc, undefined, $$Location.mkloc({\n                        TAG: /* Lident */0,\n                        _0: \"#=\"\n                      }, operatorLoc)), {\n                hd: [\n                  /* Nolabel */0,\n                  e$1\n                ],\n                tl: {\n                  hd: [\n                    /* Nolabel */0,\n                    rhsExpr\n                  ],\n                  tl: /* [] */0\n                }\n              });\n  }\n  var accessExpr = parseConstrainedOrCoercedExpr(p);\n  Res_parser.expect(undefined, /* Rbracket */21, p);\n  Res_parser.eatBreadcrumb(p);\n  var rbracket$1 = p.prevEndPos;\n  var arrayLoc = {\n    loc_start: lbracket,\n    loc_end: rbracket$1,\n    loc_ghost: false\n  };\n  var match$1 = p.token;\n  if (match$1 === 14) {\n    Res_parser.leaveBreadcrumb(p, /* ExprArrayMutation */14);\n    Res_parser.next(undefined, p);\n    var rhsExpr$1 = parseExpr(undefined, p);\n    var arraySet = $$Location.mkloc({\n          TAG: /* Ldot */1,\n          _0: {\n            TAG: /* Lident */0,\n            _0: \"Array\"\n          },\n          _1: \"set\"\n        }, arrayLoc);\n    var endPos = p.prevEndPos;\n    var arraySet$1 = Ast_helper.Exp.apply({\n          loc_start: startPos,\n          loc_end: endPos,\n          loc_ghost: false\n        }, undefined, Ast_helper.Exp.ident(arrayLoc, undefined, arraySet), {\n          hd: [\n            /* Nolabel */0,\n            expr\n          ],\n          tl: {\n            hd: [\n              /* Nolabel */0,\n              accessExpr\n            ],\n            tl: {\n              hd: [\n                /* Nolabel */0,\n                rhsExpr$1\n              ],\n              tl: /* [] */0\n            }\n          }\n        });\n    Res_parser.eatBreadcrumb(p);\n    return arraySet$1;\n  }\n  var endPos$1 = p.prevEndPos;\n  var e$2 = Ast_helper.Exp.apply({\n        loc_start: startPos,\n        loc_end: endPos$1,\n        loc_ghost: false\n      }, undefined, Ast_helper.Exp.ident(arrayLoc, undefined, $$Location.mkloc({\n                TAG: /* Ldot */1,\n                _0: {\n                  TAG: /* Lident */0,\n                  _0: \"Array\"\n                },\n                _1: \"get\"\n              }, arrayLoc)), {\n        hd: [\n          /* Nolabel */0,\n          expr\n        ],\n        tl: {\n          hd: [\n            /* Nolabel */0,\n            accessExpr\n          ],\n          tl: /* [] */0\n        }\n      });\n  return parsePrimaryExpr(e$2, undefined, p);\n}\n\nfunction parseRecordOrObjectType(attrs, p) {\n  var startPos = p.startPos;\n  Res_parser.expect(undefined, /* Lbrace */22, p);\n  var match = p.token;\n  var closedFlag = typeof match === \"number\" ? (\n      match !== 4 ? (\n          match !== 5 ? /* Closed */0 : (Res_parser.next(undefined, p), /* Open */1)\n        ) : (Res_parser.next(undefined, p), /* Closed */0)\n    ) : /* Closed */0;\n  var match$1 = p.token;\n  if (typeof match$1 !== \"number\" && match$1.TAG === /* Lident */4) {\n    Res_parser.err(undefined, undefined, p, Res_diagnostics.message(forbiddenInlineRecordDeclaration));\n  }\n  var startFirstField = p.startPos;\n  var fields = parseCommaDelimitedRegion(p, /* StringFieldDeclarations */37, /* Rbrace */23, parseStringFieldDeclaration);\n  if (fields) {\n    var match$2 = fields.hd;\n    if (match$2.TAG !== /* Otag */0) {\n      if (fields.tl) {\n        \n      } else {\n        Res_parser.err(startFirstField, match$2._0.ptyp_loc.loc_end, p, Res_diagnostics.message(sameTypeSpread));\n      }\n    }\n    \n  }\n  Res_parser.expect(undefined, /* Rbrace */23, p);\n  var loc_loc_end = p.prevEndPos;\n  var loc = {\n    loc_start: startPos,\n    loc_end: loc_loc_end,\n    loc_ghost: false\n  };\n  return Ast_helper.Typ.object_(loc, attrs, fields, closedFlag);\n}\n\nfunction parseAtomicTypExpr(attrs, p) {\n  Res_parser.leaveBreadcrumb(p, /* AtomicTypExpr */52);\n  var startPos = p.startPos;\n  var token = p.token;\n  var typ;\n  var exit = 0;\n  if (typeof token === \"number\") {\n    switch (token) {\n      case /* Underscore */12 :\n          var endPos = p.endPos;\n          Res_parser.next(undefined, p);\n          typ = Ast_helper.Typ.any({\n                loc_start: startPos,\n                loc_end: endPos,\n                loc_ghost: false\n              }, attrs, undefined);\n          break;\n      case /* SingleQuote */13 :\n          Res_parser.next(undefined, p);\n          var match = parseIdent(typeVar, p.startPos, p);\n          typ = Ast_helper.Typ.$$var(match[1], attrs, match[0]);\n          break;\n      case /* Lparen */18 :\n          Res_parser.next(undefined, p);\n          var match$1 = p.token;\n          if (match$1 === 19) {\n            Res_parser.next(undefined, p);\n            var loc_loc_end = p.prevEndPos;\n            var loc = {\n              loc_start: startPos,\n              loc_end: loc_loc_end,\n              loc_ghost: false\n            };\n            var unitConstr = $$Location.mkloc({\n                  TAG: /* Lident */0,\n                  _0: \"unit\"\n                }, loc);\n            typ = Ast_helper.Typ.constr(undefined, attrs, unitConstr, /* [] */0);\n          } else {\n            var t = parseTypExpr(undefined, undefined, undefined, p);\n            var match$2 = p.token;\n            if (match$2 === 25) {\n              Res_parser.next(undefined, p);\n              typ = parseTupleType(attrs, t, startPos, p);\n            } else {\n              Res_parser.expect(undefined, /* Rparen */19, p);\n              typ = {\n                ptyp_desc: t.ptyp_desc,\n                ptyp_loc: {\n                  loc_start: startPos,\n                  loc_end: p.prevEndPos,\n                  loc_ghost: false\n                },\n                ptyp_attributes: List.concat({\n                      hd: attrs,\n                      tl: {\n                        hd: t.ptyp_attributes,\n                        tl: /* [] */0\n                      }\n                    })\n              };\n            }\n          }\n          break;\n      case /* Lbracket */20 :\n          typ = parsePolymorphicVariantType(attrs, p);\n          break;\n      case /* Lbrace */22 :\n          typ = parseRecordOrObjectType(attrs, p);\n          break;\n      case /* Module */65 :\n          Res_parser.next(undefined, p);\n          Res_parser.expect(undefined, /* Lparen */18, p);\n          var packageType = parsePackageType(startPos, attrs, p);\n          Res_parser.expect(undefined, /* Rparen */19, p);\n          typ = {\n            ptyp_desc: packageType.ptyp_desc,\n            ptyp_loc: {\n              loc_start: startPos,\n              loc_end: p.prevEndPos,\n              loc_ghost: false\n            },\n            ptyp_attributes: packageType.ptyp_attributes\n          };\n          break;\n      case /* Percent */77 :\n          var extension = parseExtension(undefined, p);\n          var loc_loc_end$1 = p.prevEndPos;\n          var loc$1 = {\n            loc_start: startPos,\n            loc_end: loc_loc_end$1,\n            loc_ghost: false\n          };\n          typ = Ast_helper.Typ.extension(loc$1, attrs, extension);\n          break;\n      default:\n        exit = 1;\n    }\n  } else {\n    switch (token.TAG | 0) {\n      case /* Lident */4 :\n      case /* Uident */5 :\n          exit = 2;\n          break;\n      default:\n        exit = 1;\n    }\n  }\n  switch (exit) {\n    case 1 :\n        Res_parser.err(undefined, undefined, p, Res_diagnostics.unexpected(token, p.breadcrumbs));\n        var match$3 = skipTokensAndMaybeRetry(p, Res_grammar.isAtomicTypExprStart);\n        if (match$3 !== undefined) {\n          typ = parseAtomicTypExpr(attrs, p);\n        } else {\n          Res_parser.err(p.prevEndPos, undefined, p, Res_diagnostics.unexpected(token, p.breadcrumbs));\n          typ = defaultType(undefined);\n        }\n        break;\n    case 2 :\n        var constr = parseValuePath(p);\n        var args = parseTypeConstructorArgs(constr, p);\n        typ = Ast_helper.Typ.constr({\n              loc_start: startPos,\n              loc_end: p.prevEndPos,\n              loc_ghost: false\n            }, attrs, constr, args);\n        break;\n    \n  }\n  Res_parser.eatBreadcrumb(p);\n  return typ;\n}\n\nfunction skipTokensAndMaybeRetry(p, isStartOfGrammar) {\n  if (Res_token.isKeyword(p.token) && p.prevEndPos.pos_lnum === p.startPos.pos_lnum) {\n    Res_parser.next(undefined, p);\n    return ;\n  }\n  if (shouldAbortListParse(p)) {\n    if (Curry._1(isStartOfGrammar, p.token)) {\n      Res_parser.next(undefined, p);\n      return Caml_option.some(undefined);\n    } else {\n      return ;\n    }\n  }\n  Res_parser.next(undefined, p);\n  var loop = function (p) {\n    while(true) {\n      if (shouldAbortListParse(p)) {\n        return ;\n      }\n      Res_parser.next(undefined, p);\n      continue ;\n    };\n  };\n  loop(p);\n  if (Curry._1(isStartOfGrammar, p.token)) {\n    return Caml_option.some(undefined);\n  }\n  \n}\n\nfunction parsePolymorphicVariantType(attrs, p) {\n  var startPos = p.startPos;\n  Res_parser.expect(undefined, /* Lbracket */20, p);\n  var match = p.token;\n  if (typeof match === \"number\") {\n    if (match !== 41) {\n      if (match === 42) {\n        Res_parser.next(undefined, p);\n        Res_parser.optional(p, /* Bar */17);\n        var rowField = parseTagSpecFull(p);\n        var rowFields = parseTagSpecFulls(p);\n        var tagNames = parseTagNames(p);\n        var loc_loc_end = p.prevEndPos;\n        var loc = {\n          loc_start: startPos,\n          loc_end: loc_loc_end,\n          loc_ghost: false\n        };\n        var variant = Ast_helper.Typ.variant(loc, attrs, {\n              hd: rowField,\n              tl: rowFields\n            }, /* Closed */0, tagNames);\n        Res_parser.expect(undefined, /* Rbracket */21, p);\n        return variant;\n      }\n      \n    } else {\n      Res_parser.next(undefined, p);\n      var match$1 = p.token;\n      var rowFields$1;\n      var exit = 0;\n      if (typeof match$1 === \"number\") {\n        if (match$1 !== 17) {\n          if (match$1 !== 21) {\n            exit = 2;\n          } else {\n            rowFields$1 = /* [] */0;\n          }\n        } else {\n          rowFields$1 = parseTagSpecs(p);\n        }\n      } else {\n        exit = 2;\n      }\n      if (exit === 2) {\n        var rowField$1 = parseTagSpec(p);\n        rowFields$1 = {\n          hd: rowField$1,\n          tl: parseTagSpecs(p)\n        };\n      }\n      var loc_loc_end$1 = p.prevEndPos;\n      var loc$1 = {\n        loc_start: startPos,\n        loc_end: loc_loc_end$1,\n        loc_ghost: false\n      };\n      var variant$1 = Ast_helper.Typ.variant(loc$1, attrs, rowFields$1, /* Open */1, undefined);\n      Res_parser.expect(undefined, /* Rbracket */21, p);\n      return variant$1;\n    }\n  }\n  var rowFields1 = parseTagSpecFirst(p);\n  var rowFields2 = parseTagSpecs(p);\n  var loc_loc_end$2 = p.prevEndPos;\n  var loc$2 = {\n    loc_start: startPos,\n    loc_end: loc_loc_end$2,\n    loc_ghost: false\n  };\n  var variant$2 = Ast_helper.Typ.variant(loc$2, attrs, Pervasives.$at(rowFields1, rowFields2), /* Closed */0, undefined);\n  Res_parser.expect(undefined, /* Rbracket */21, p);\n  return variant$2;\n}\n\nfunction parseTupleType(attrs, first, startPos, p) {\n  var typexprs_1 = parseCommaDelimitedRegion(p, /* TypExprList */39, /* Rparen */19, parseTypExprRegion);\n  var typexprs = {\n    hd: first,\n    tl: typexprs_1\n  };\n  Res_parser.expect(undefined, /* Rparen */19, p);\n  if (typexprs_1) {\n    \n  } else {\n    Res_parser.err(startPos, p.prevEndPos, p, Res_diagnostics.message(tupleSingleElement));\n  }\n  var tupleLoc_loc_end = p.prevEndPos;\n  var tupleLoc = {\n    loc_start: startPos,\n    loc_end: tupleLoc_loc_end,\n    loc_ghost: false\n  };\n  return Ast_helper.Typ.tuple(tupleLoc, attrs, typexprs);\n}\n\nfunction parseTypeParams(parent, p) {\n  var opening = p.token;\n  if (typeof opening !== \"number\") {\n    return /* [] */0;\n  }\n  if (opening !== 18 && opening !== 42) {\n    return /* [] */0;\n  }\n  if (p.startPos.pos_lnum !== p.prevEndPos.pos_lnum) {\n    return /* [] */0;\n  }\n  Res_scanner.setDiamondMode(p.scanner);\n  var openingStartPos = p.startPos;\n  Res_parser.leaveBreadcrumb(p, /* TypeParams */30);\n  Res_parser.next(undefined, p);\n  var params = parseCommaDelimitedRegion(p, /* TypeParams */30, /* GreaterThan */41, parseTypeParam);\n  var match = p.token;\n  if (match === 19 && opening === /* Lparen */18) {\n    var msg = Res_doc.toString(80, Res_doc.breakableGroup(true, Res_doc.concat({\n                  hd: Res_doc.text(\"Type parameters require angle brackets:\"),\n                  tl: {\n                    hd: Res_doc.indent(Res_doc.concat({\n                              hd: Res_doc.line,\n                              tl: {\n                                hd: Res_doc.concat({\n                                      hd: Res_printer.printLongident(parent.txt),\n                                      tl: {\n                                        hd: Res_printer.printTypeParams(params, Res_comments_table.empty),\n                                        tl: /* [] */0\n                                      }\n                                    }),\n                                tl: /* [] */0\n                              }\n                            })),\n                    tl: /* [] */0\n                  }\n                })));\n    Res_parser.err(openingStartPos, undefined, p, Res_diagnostics.message(msg));\n    Res_parser.next(undefined, p);\n  } else {\n    Res_parser.expect(undefined, /* GreaterThan */41, p);\n  }\n  Res_scanner.popMode(p.scanner, /* Diamond */1);\n  Res_parser.eatBreadcrumb(p);\n  return params;\n}\n\nfunction parseConstructorArgs(p) {\n  var lparen = p.startPos;\n  Res_parser.expect(undefined, /* Lparen */18, p);\n  var args = parseCommaDelimitedRegion(p, /* ExprList */12, /* Rparen */19, parseConstrainedExprRegion);\n  Res_parser.expect(undefined, /* Rparen */19, p);\n  if (args) {\n    return args;\n  }\n  var loc_loc_end = p.prevEndPos;\n  var loc = {\n    loc_start: lparen,\n    loc_end: loc_loc_end,\n    loc_ghost: false\n  };\n  return {\n          hd: Ast_helper.Exp.construct(loc, undefined, $$Location.mkloc({\n                    TAG: /* Lident */0,\n                    _0: \"()\"\n                  }, loc), undefined),\n          tl: /* [] */0\n        };\n}\n\nfunction parseParameters(p) {\n  var startPos = p.startPos;\n  var ident = p.token;\n  if (typeof ident === \"number\") {\n    switch (ident) {\n      case /* Underscore */12 :\n          Res_parser.next(undefined, p);\n          var loc_loc_end = p.prevEndPos;\n          var loc = {\n            loc_start: startPos,\n            loc_end: loc_loc_end,\n            loc_ghost: false\n          };\n          return {\n                  hd: {\n                    TAG: /* TermParameter */0,\n                    uncurried: false,\n                    attrs: /* [] */0,\n                    label: /* Nolabel */0,\n                    expr: undefined,\n                    pat: Ast_helper.Pat.any(loc, undefined, undefined),\n                    pos: startPos\n                  },\n                  tl: /* [] */0\n                };\n      case /* Lparen */18 :\n          Res_parser.next(undefined, p);\n          var match = p.token;\n          if (typeof match !== \"number\") {\n            return parseParameterList(p);\n          }\n          if (match !== 4) {\n            if (match !== 19) {\n              return parseParameterList(p);\n            }\n            Res_parser.next(undefined, p);\n            var loc_loc_end$1 = p.prevEndPos;\n            var loc$1 = {\n              loc_start: startPos,\n              loc_end: loc_loc_end$1,\n              loc_ghost: false\n            };\n            var unitPattern = Ast_helper.Pat.construct(loc$1, undefined, $$Location.mkloc({\n                      TAG: /* Lident */0,\n                      _0: \"()\"\n                    }, loc$1), undefined);\n            return {\n                    hd: {\n                      TAG: /* TermParameter */0,\n                      uncurried: false,\n                      attrs: /* [] */0,\n                      label: /* Nolabel */0,\n                      expr: undefined,\n                      pat: unitPattern,\n                      pos: startPos\n                    },\n                    tl: /* [] */0\n                  };\n          }\n          Res_parser.next(undefined, p);\n          var match$1 = p.token;\n          if (match$1 === 19) {\n            Res_parser.next(undefined, p);\n            var loc_loc_end$2 = p.prevEndPos;\n            var loc$2 = {\n              loc_start: startPos,\n              loc_end: loc_loc_end$2,\n              loc_ghost: false\n            };\n            var unitPattern$1 = Ast_helper.Pat.construct(loc$2, undefined, $$Location.mkloc({\n                      TAG: /* Lident */0,\n                      _0: \"()\"\n                    }, loc$2), undefined);\n            return {\n                    hd: {\n                      TAG: /* TermParameter */0,\n                      uncurried: true,\n                      attrs: /* [] */0,\n                      label: /* Nolabel */0,\n                      expr: undefined,\n                      pat: unitPattern$1,\n                      pos: startPos\n                    },\n                    tl: /* [] */0\n                  };\n          }\n          var parameters = parseParameterList(p);\n          if (!parameters) {\n            return parameters;\n          }\n          var match$2 = parameters.hd;\n          if (match$2.TAG === /* TermParameter */0) {\n            return {\n                    hd: {\n                      TAG: /* TermParameter */0,\n                      uncurried: true,\n                      attrs: match$2.attrs,\n                      label: match$2.label,\n                      expr: match$2.expr,\n                      pat: match$2.pat,\n                      pos: match$2.pos\n                    },\n                    tl: parameters.tl\n                  };\n          } else {\n            return parameters;\n          }\n      default:\n        \n    }\n  } else if (ident.TAG === /* Lident */4) {\n    Res_parser.next(undefined, p);\n    var loc_loc_end$3 = p.prevEndPos;\n    var loc$3 = {\n      loc_start: startPos,\n      loc_end: loc_loc_end$3,\n      loc_ghost: false\n    };\n    return {\n            hd: {\n              TAG: /* TermParameter */0,\n              uncurried: false,\n              attrs: /* [] */0,\n              label: /* Nolabel */0,\n              expr: undefined,\n              pat: Ast_helper.Pat.$$var(loc$3, undefined, $$Location.mkloc(ident._0, loc$3)),\n              pos: startPos\n            },\n            tl: /* [] */0\n          };\n  }\n  Res_parser.err(undefined, undefined, p, Res_diagnostics.unexpected(ident, p.breadcrumbs));\n  return /* [] */0;\n}\n\nfunction parseSignLetDesc(attrs, p) {\n  var startPos = p.startPos;\n  Res_parser.optional(p, /* Let */9);\n  var match = parseLident(p);\n  var name = $$Location.mkloc(match[0], match[1]);\n  Res_parser.expect(undefined, /* Colon */24, p);\n  var typExpr = parsePolyTypeExpr(p);\n  var loc_loc_end = p.prevEndPos;\n  var loc = {\n    loc_start: startPos,\n    loc_end: loc_loc_end,\n    loc_ghost: false\n  };\n  return Ast_helper.Val.mk(loc, attrs, undefined, undefined, name, typExpr);\n}\n\nfunction parseCoercedExpr(expr, p) {\n  Res_parser.expect(undefined, /* ColonGreaterThan */40, p);\n  var typ = parseTypExpr(undefined, undefined, undefined, p);\n  var loc_loc_start = expr.pexp_loc.loc_start;\n  var loc_loc_end = p.prevEndPos;\n  var loc = {\n    loc_start: loc_loc_start,\n    loc_end: loc_loc_end,\n    loc_ghost: false\n  };\n  return Ast_helper.Exp.coerce(loc, undefined, expr, undefined, typ);\n}\n\nfunction parseMaybeRecModuleBinding(attrs, startPos, p) {\n  var match = p.token;\n  if (match === 11) {\n    Res_parser.next(undefined, p);\n    return Ast_helper.Str.rec_module(undefined, parseModuleBindings(attrs, startPos, p));\n  } else {\n    return Ast_helper.Str.module_(undefined, parseModuleBinding(attrs, p.startPos, p));\n  }\n}\n\nfunction parseModuleTypeImpl(attrs, startPos, p) {\n  Res_parser.expect(undefined, /* Typ */60, p);\n  var nameStart = p.startPos;\n  var ident = p.token;\n  var name;\n  var exit = 0;\n  if (typeof ident === \"number\") {\n    exit = 2;\n  } else {\n    switch (ident.TAG | 0) {\n      case /* Lident */4 :\n      case /* Uident */5 :\n          exit = 1;\n          break;\n      default:\n        exit = 2;\n    }\n  }\n  switch (exit) {\n    case 1 :\n        Res_parser.next(undefined, p);\n        var loc_loc_end = p.prevEndPos;\n        var loc = {\n          loc_start: nameStart,\n          loc_end: loc_loc_end,\n          loc_ghost: false\n        };\n        name = $$Location.mkloc(ident._0, loc);\n        break;\n    case 2 :\n        Res_parser.err(undefined, undefined, p, Res_diagnostics.uident(ident));\n        name = $$Location.mknoloc(\"_\");\n        break;\n    \n  }\n  Res_parser.expect(undefined, /* Equal */14, p);\n  var moduleType = parseModuleType(undefined, undefined, p);\n  var moduleTypeDeclaration = Ast_helper.Mtd.mk({\n        loc_start: nameStart,\n        loc_end: p.prevEndPos,\n        loc_ghost: false\n      }, attrs, undefined, undefined, moduleType, name);\n  var loc_loc_end$1 = p.prevEndPos;\n  var loc$1 = {\n    loc_start: startPos,\n    loc_end: loc_loc_end$1,\n    loc_ghost: false\n  };\n  return Ast_helper.Str.modtype(loc$1, moduleTypeDeclaration);\n}\n\nfunction parseIfCondition(p) {\n  Res_parser.leaveBreadcrumb(p, /* IfCondition */17);\n  var conditionExpr = parseExpr(/* WhenExpr */2, p);\n  Res_parser.eatBreadcrumb(p);\n  return conditionExpr;\n}\n\nfunction parseIfOrIfLetExpression(p) {\n  Res_parser.beginRegion(p);\n  Res_parser.leaveBreadcrumb(p, /* ExprIf */15);\n  var startPos = p.startPos;\n  Res_parser.expect(undefined, /* If */50, p);\n  var match = p.token;\n  var expr;\n  if (match === 9) {\n    Res_parser.next(undefined, p);\n    var ifLetExpr = parseIfLetExpr(startPos, p);\n    Res_parser.err(ifLetExpr.pexp_loc.loc_start, ifLetExpr.pexp_loc.loc_end, p, Res_diagnostics.message(experimentalIfLet(ifLetExpr)));\n    expr = ifLetExpr;\n  } else {\n    expr = parseIfExpr(startPos, p);\n  }\n  Res_parser.eatBreadcrumb(p);\n  return expr;\n}\n\nfunction parseElseBranch(p) {\n  Res_parser.expect(undefined, /* Lbrace */22, p);\n  var blockExpr = parseExprBlock(undefined, p);\n  Res_parser.expect(undefined, /* Rbrace */23, p);\n  return blockExpr;\n}\n\nfunction parseThenBranch(p) {\n  Res_parser.leaveBreadcrumb(p, /* IfBranch */18);\n  Res_parser.expect(undefined, /* Lbrace */22, p);\n  var thenExpr = parseExprBlock(undefined, p);\n  Res_parser.expect(undefined, /* Rbrace */23, p);\n  Res_parser.eatBreadcrumb(p);\n  return thenExpr;\n}\n\nfunction parseRecordRowWithStringKey(p) {\n  var s = p.token;\n  if (typeof s === \"number\") {\n    return ;\n  }\n  if (s.TAG !== /* String */3) {\n    return ;\n  }\n  var loc_loc_start = p.startPos;\n  var loc_loc_end = p.endPos;\n  var loc = {\n    loc_start: loc_loc_start,\n    loc_end: loc_loc_end,\n    loc_ghost: false\n  };\n  Res_parser.next(undefined, p);\n  var field = $$Location.mkloc({\n        TAG: /* Lident */0,\n        _0: s._0\n      }, loc);\n  var match = p.token;\n  if (match !== 24) {\n    return [\n            field,\n            Ast_helper.Exp.ident(field.loc, undefined, field)\n          ];\n  }\n  Res_parser.next(undefined, p);\n  var fieldExpr = parseExpr(undefined, p);\n  return [\n          field,\n          fieldExpr\n        ];\n}\n\nfunction parseModuleTypeOf(p) {\n  var startPos = p.startPos;\n  Res_parser.expect(undefined, /* Module */65, p);\n  Res_parser.expect(undefined, /* Typ */60, p);\n  Res_parser.expect(undefined, /* Of */66, p);\n  var moduleExpr = parseModuleExpr(p);\n  return Ast_helper.Mty.typeof_({\n              loc_start: startPos,\n              loc_end: p.prevEndPos,\n              loc_ghost: false\n            }, undefined, moduleExpr);\n}\n\nfunction parseParameter(p) {\n  if (!(p.token === /* Typ */60 || p.token === /* Tilde */48 || p.token === /* Dot */4 || Res_grammar.isPatternStart(p.token))) {\n    return ;\n  }\n  var startPos = p.startPos;\n  var uncurried = Res_parser.optional(p, /* Dot */4);\n  var attrs = parseRegion(p, /* Attribute */50, parseAttribute);\n  if (p.token === /* Typ */60) {\n    Res_parser.next(undefined, p);\n    var lidents = parseLidentList(p);\n    return {\n            TAG: /* TypeParameter */1,\n            uncurried: uncurried,\n            attrs: attrs,\n            locs: lidents,\n            pos: startPos\n          };\n  }\n  var match = p.token;\n  var match$1;\n  if (match === 48) {\n    Res_parser.next(undefined, p);\n    var match$2 = parseLident(p);\n    var lblName = match$2[0];\n    var propLocAttr_0 = $$Location.mkloc(\"ns.namedArgLoc\", match$2[1]);\n    var propLocAttr_1 = {\n      TAG: /* PStr */0,\n      _0: /* [] */0\n    };\n    var propLocAttr = [\n      propLocAttr_0,\n      propLocAttr_1\n    ];\n    var t = p.token;\n    var exit = 0;\n    if (typeof t === \"number\" && t < 26) {\n      switch (t) {\n        case /* As */3 :\n            Res_parser.next(undefined, p);\n            var pat = parseConstrainedPattern(p);\n            var pat_ppat_desc = pat.ppat_desc;\n            var pat_ppat_loc = pat.ppat_loc;\n            var pat_ppat_attributes = {\n              hd: propLocAttr,\n              tl: pat.ppat_attributes\n            };\n            var pat$1 = {\n              ppat_desc: pat_ppat_desc,\n              ppat_loc: pat_ppat_loc,\n              ppat_attributes: pat_ppat_attributes\n            };\n            match$1 = [\n              attrs,\n              {\n                TAG: /* Labelled */0,\n                _0: lblName\n              },\n              pat$1\n            ];\n            break;\n        case /* Open */0 :\n        case /* True */1 :\n        case /* False */2 :\n        case /* Dot */4 :\n        case /* DotDot */5 :\n        case /* DotDotDot */6 :\n        case /* Bang */7 :\n        case /* Semicolon */8 :\n        case /* Let */9 :\n        case /* And */10 :\n        case /* Rec */11 :\n        case /* Underscore */12 :\n        case /* SingleQuote */13 :\n        case /* EqualEqual */15 :\n        case /* EqualEqualEqual */16 :\n        case /* Bar */17 :\n        case /* Lparen */18 :\n        case /* Lbracket */20 :\n        case /* Rbracket */21 :\n        case /* Lbrace */22 :\n        case /* Rbrace */23 :\n            exit = 1;\n            break;\n        case /* Colon */24 :\n            var lblEnd = p.prevEndPos;\n            Res_parser.next(undefined, p);\n            var typ = parseTypExpr(undefined, undefined, undefined, p);\n            var loc = {\n              loc_start: startPos,\n              loc_end: lblEnd,\n              loc_ghost: false\n            };\n            var pat$2 = Ast_helper.Pat.$$var(loc, undefined, $$Location.mkloc(lblName, loc));\n            var loc_loc_end = p.prevEndPos;\n            var loc$1 = {\n              loc_start: startPos,\n              loc_end: loc_loc_end,\n              loc_ghost: false\n            };\n            var pat$3 = Ast_helper.Pat.constraint_(loc$1, {\n                  hd: propLocAttr,\n                  tl: /* [] */0\n                }, pat$2, typ);\n            match$1 = [\n              attrs,\n              {\n                TAG: /* Labelled */0,\n                _0: lblName\n              },\n              pat$3\n            ];\n            break;\n        case /* Equal */14 :\n        case /* Rparen */19 :\n        case /* Comma */25 :\n            exit = 2;\n            break;\n        \n      }\n    } else {\n      exit = 1;\n    }\n    switch (exit) {\n      case 1 :\n          Res_parser.err(undefined, undefined, p, Res_diagnostics.unexpected(t, p.breadcrumbs));\n          var loc_loc_end$1 = p.prevEndPos;\n          var loc$2 = {\n            loc_start: startPos,\n            loc_end: loc_loc_end$1,\n            loc_ghost: false\n          };\n          match$1 = [\n            attrs,\n            {\n              TAG: /* Labelled */0,\n              _0: lblName\n            },\n            Ast_helper.Pat.$$var(loc$2, undefined, $$Location.mkloc(lblName, loc$2))\n          ];\n          break;\n      case 2 :\n          var loc_loc_end$2 = p.prevEndPos;\n          var loc$3 = {\n            loc_start: startPos,\n            loc_end: loc_loc_end$2,\n            loc_ghost: false\n          };\n          match$1 = [\n            attrs,\n            {\n              TAG: /* Labelled */0,\n              _0: lblName\n            },\n            Ast_helper.Pat.$$var(loc$3, {\n                  hd: propLocAttr,\n                  tl: /* [] */0\n                }, $$Location.mkloc(lblName, loc$3))\n          ];\n          break;\n      \n    }\n  } else {\n    var pattern = parseConstrainedPattern(p);\n    var attrs$1 = List.concat({\n          hd: attrs,\n          tl: {\n            hd: pattern.ppat_attributes,\n            tl: /* [] */0\n          }\n        });\n    match$1 = [\n      /* [] */0,\n      /* Nolabel */0,\n      {\n        ppat_desc: pattern.ppat_desc,\n        ppat_loc: pattern.ppat_loc,\n        ppat_attributes: attrs$1\n      }\n    ];\n  }\n  var pat$4 = match$1[2];\n  var lbl = match$1[1];\n  var attrs$2 = match$1[0];\n  var match$3 = p.token;\n  if (match$3 !== 14) {\n    return {\n            TAG: /* TermParameter */0,\n            uncurried: uncurried,\n            attrs: attrs$2,\n            label: lbl,\n            expr: undefined,\n            pat: pat$4,\n            pos: startPos\n          };\n  }\n  Res_parser.next(undefined, p);\n  var lbl$1;\n  if (typeof lbl === \"number\") {\n    var $$var = pat$4.ppat_desc;\n    var lblName$1;\n    lblName$1 = typeof $$var === \"number\" || $$var.TAG !== /* Ppat_var */0 ? \"\" : $$var._0.txt;\n    Res_parser.err(startPos, p.prevEndPos, p, Res_diagnostics.message(missingTildeLabeledParameter(lblName$1)));\n    lbl$1 = {\n      TAG: /* Optional */1,\n      _0: lblName$1\n    };\n  } else {\n    lbl$1 = lbl.TAG === /* Labelled */0 ? ({\n          TAG: /* Optional */1,\n          _0: lbl._0\n        }) : lbl;\n  }\n  var match$4 = p.token;\n  if (match$4 === 49) {\n    Res_parser.next(undefined, p);\n    return {\n            TAG: /* TermParameter */0,\n            uncurried: uncurried,\n            attrs: attrs$2,\n            label: lbl$1,\n            expr: undefined,\n            pat: pat$4,\n            pos: startPos\n          };\n  }\n  var expr = parseConstrainedOrCoercedExpr(p);\n  return {\n          TAG: /* TermParameter */0,\n          uncurried: uncurried,\n          attrs: attrs$2,\n          label: lbl$1,\n          expr: expr,\n          pat: pat$4,\n          pos: startPos\n        };\n}\n\nfunction parseExprBlockItem(p) {\n  var startPos = p.startPos;\n  var attrs = parseRegion(p, /* Attribute */50, parseAttribute);\n  var match = p.token;\n  if (typeof match === \"number\") {\n    if (match >= 27) {\n      if (match !== 65) {\n        if (match < 28) {\n          var extensionConstructor = parseExceptionDef(attrs, p);\n          parseNewlineOrSemicolonExprBlock(p);\n          var blockExpr = parseExprBlock(undefined, p);\n          var loc_loc_end = p.prevEndPos;\n          var loc = {\n            loc_start: startPos,\n            loc_end: loc_loc_end,\n            loc_ghost: false\n          };\n          return Ast_helper.Exp.letexception(loc, undefined, extensionConstructor, blockExpr);\n        }\n        \n      } else {\n        Res_parser.next(undefined, p);\n        var match$1 = p.token;\n        if (match$1 === 18) {\n          var expr = parseFirstClassModuleExpr(startPos, p);\n          var a = parsePrimaryExpr(expr, undefined, p);\n          var expr$1 = parseBinaryExpr(undefined, a, p, 1);\n          return parseTernaryExpr(expr$1, p);\n        }\n        var ident = p.token;\n        var name;\n        var exit = 0;\n        if (typeof ident === \"number\" || ident.TAG !== /* Uident */5) {\n          exit = 2;\n        } else {\n          var loc_loc_start = p.startPos;\n          var loc_loc_end$1 = p.endPos;\n          var loc$1 = {\n            loc_start: loc_loc_start,\n            loc_end: loc_loc_end$1,\n            loc_ghost: false\n          };\n          Res_parser.next(undefined, p);\n          name = $$Location.mkloc(ident._0, loc$1);\n        }\n        if (exit === 2) {\n          Res_parser.err(undefined, undefined, p, Res_diagnostics.uident(ident));\n          name = $$Location.mknoloc(\"_\");\n        }\n        var body = parseModuleBindingBody(p);\n        parseNewlineOrSemicolonExprBlock(p);\n        var expr$2 = parseExprBlock(undefined, p);\n        var loc_loc_end$2 = p.prevEndPos;\n        var loc$2 = {\n          loc_start: startPos,\n          loc_end: loc_loc_end$2,\n          loc_ghost: false\n        };\n        return Ast_helper.Exp.letmodule(loc$2, undefined, name, body, expr$2);\n      }\n    } else if (match !== 9) {\n      if (match === 0) {\n        var od = parseOpenDescription(attrs, p);\n        parseNewlineOrSemicolonExprBlock(p);\n        var blockExpr$1 = parseExprBlock(undefined, p);\n        var loc_loc_end$3 = p.prevEndPos;\n        var loc$3 = {\n          loc_start: startPos,\n          loc_end: loc_loc_end$3,\n          loc_ghost: false\n        };\n        return Ast_helper.Exp.open_(loc$3, undefined, od.popen_override, od.popen_lid, blockExpr$1);\n      }\n      \n    } else {\n      var match$2 = parseLetBindings(attrs, p);\n      parseNewlineOrSemicolonExprBlock(p);\n      var next;\n      if (Res_grammar.isBlockExprStart(p.token)) {\n        next = parseExprBlock(undefined, p);\n      } else {\n        var loc_loc_start$1 = p.startPos;\n        var loc_loc_end$4 = p.endPos;\n        var loc$4 = {\n          loc_start: loc_loc_start$1,\n          loc_end: loc_loc_end$4,\n          loc_ghost: false\n        };\n        next = Ast_helper.Exp.construct(loc$4, undefined, $$Location.mkloc({\n                  TAG: /* Lident */0,\n                  _0: \"()\"\n                }, loc$4), undefined);\n      }\n      var loc_loc_end$5 = p.prevEndPos;\n      var loc$5 = {\n        loc_start: startPos,\n        loc_end: loc_loc_end$5,\n        loc_ghost: false\n      };\n      return Ast_helper.Exp.let_(loc$5, undefined, match$2[0], match$2[1], next);\n    }\n  }\n  var expr$3 = parseExpr(undefined, p);\n  var e1_pexp_desc = expr$3.pexp_desc;\n  var e1_pexp_loc = expr$3.pexp_loc;\n  var e1_pexp_attributes = List.concat({\n        hd: attrs,\n        tl: {\n          hd: expr$3.pexp_attributes,\n          tl: /* [] */0\n        }\n      });\n  var e1 = {\n    pexp_desc: e1_pexp_desc,\n    pexp_loc: e1_pexp_loc,\n    pexp_attributes: e1_pexp_attributes\n  };\n  parseNewlineOrSemicolonExprBlock(p);\n  if (!Res_grammar.isBlockExprStart(p.token)) {\n    return e1;\n  }\n  var e2 = parseExprBlock(undefined, p);\n  var init = e1_pexp_loc;\n  var loc_loc_start$2 = init.loc_start;\n  var loc_loc_end$6 = e2.pexp_loc.loc_end;\n  var loc_loc_ghost = init.loc_ghost;\n  var loc$6 = {\n    loc_start: loc_loc_start$2,\n    loc_end: loc_loc_end$6,\n    loc_ghost: loc_loc_ghost\n  };\n  return Ast_helper.Exp.sequence(loc$6, undefined, e1, e2);\n}\n\nfunction parseJsxName(p) {\n  var ident = p.token;\n  var longident;\n  var exit = 0;\n  if (typeof ident === \"number\") {\n    exit = 1;\n  } else {\n    switch (ident.TAG | 0) {\n      case /* Lident */4 :\n          var identStart = p.startPos;\n          var identEnd = p.endPos;\n          Res_parser.next(undefined, p);\n          var loc = {\n            loc_start: identStart,\n            loc_end: identEnd,\n            loc_ghost: false\n          };\n          longident = $$Location.mkloc({\n                TAG: /* Lident */0,\n                _0: ident._0\n              }, loc);\n          break;\n      case /* Uident */5 :\n          var longident$1 = parseModuleLongIdent(true, p);\n          longident = $$Location.mkloc({\n                TAG: /* Ldot */1,\n                _0: longident$1.txt,\n                _1: \"createElement\"\n              }, longident$1.loc);\n          break;\n      default:\n        exit = 1;\n    }\n  }\n  if (exit === 1) {\n    Res_parser.err(undefined, undefined, p, Res_diagnostics.message(\"A jsx name must be a lowercase or uppercase name, like: div in <div /> or Navbar in <Navbar />\"));\n    longident = $$Location.mknoloc({\n          TAG: /* Lident */0,\n          _0: \"_\"\n        });\n  }\n  return Ast_helper.Exp.ident(longident.loc, undefined, longident);\n}\n\nfunction parseJsxOpeningOrSelfClosingElement(startPos, p) {\n  var jsxStartPos = p.startPos;\n  var name = parseJsxName(p);\n  var jsxProps = parseRegion(p, /* JsxAttribute */5, parseJsxProp);\n  var token = p.token;\n  var children;\n  var exit = 0;\n  if (typeof token === \"number\") {\n    if (token !== 29) {\n      if (token !== 41) {\n        exit = 1;\n      } else {\n        var childrenStartPos = p.startPos;\n        Res_scanner.setJsxMode(p.scanner);\n        Res_parser.next(undefined, p);\n        var match = parseJsxChildren(p);\n        var children$1 = match[1];\n        var spread = match[0];\n        var childrenEndPos = p.startPos;\n        var token$1 = p.token;\n        var exit$1 = 0;\n        if (typeof token$1 === \"number\") {\n          if (token$1 !== 42) {\n            if (token$1 !== 43) {\n              exit$1 = 2;\n            } else {\n              Res_parser.next(undefined, p);\n            }\n          } else {\n            Res_parser.next(undefined, p);\n            Res_parser.expect(undefined, /* Forwardslash */29, p);\n          }\n        } else {\n          exit$1 = 2;\n        }\n        if (exit$1 === 2) {\n          if (Res_grammar.isStructureItemStart(token$1)) {\n            \n          } else {\n            Res_parser.expect(undefined, /* LessThanSlash */43, p);\n          }\n        }\n        var token$2 = p.token;\n        var exit$2 = 0;\n        var exit$3 = 0;\n        if (typeof token$2 === \"number\") {\n          exit$2 = 2;\n        } else {\n          switch (token$2.TAG | 0) {\n            case /* Lident */4 :\n            case /* Uident */5 :\n                exit$3 = 3;\n                break;\n            default:\n              exit$2 = 2;\n          }\n        }\n        if (exit$3 === 3) {\n          if (verifyJsxOpeningClosingName(p, name)) {\n            Res_parser.expect(undefined, /* GreaterThan */41, p);\n            var loc = {\n              loc_start: childrenStartPos,\n              loc_end: childrenEndPos,\n              loc_ghost: false\n            };\n            children = spread && children$1 ? children$1.hd : makeListExpression(loc, children$1, undefined);\n          } else {\n            exit$2 = 2;\n          }\n        }\n        if (exit$2 === 2) {\n          if (Res_grammar.isStructureItemStart(token$2)) {\n            var closing = \"</\" + (string_of_pexp_ident(name) + \">\");\n            var msg = Res_diagnostics.message(\"Missing \" + closing);\n            Res_parser.err(startPos, p.prevEndPos, p, msg);\n          } else {\n            var opening = \"</\" + (string_of_pexp_ident(name) + \">\");\n            var msg$1 = \"Closing jsx name should be the same as the opening name. Did you mean \" + (opening + \" ?\");\n            Res_parser.err(startPos, p.prevEndPos, p, Res_diagnostics.message(msg$1));\n            Res_parser.expect(undefined, /* GreaterThan */41, p);\n          }\n          var loc$1 = {\n            loc_start: childrenStartPos,\n            loc_end: childrenEndPos,\n            loc_ghost: false\n          };\n          children = spread && children$1 ? children$1.hd : makeListExpression(loc$1, children$1, undefined);\n        }\n        \n      }\n    } else {\n      var childrenStartPos$1 = p.startPos;\n      Res_parser.next(undefined, p);\n      var childrenEndPos$1 = p.startPos;\n      Res_parser.expect(undefined, /* GreaterThan */41, p);\n      var loc$2 = {\n        loc_start: childrenStartPos$1,\n        loc_end: childrenEndPos$1,\n        loc_ghost: false\n      };\n      children = makeListExpression(loc$2, /* [] */0, undefined);\n    }\n  } else {\n    exit = 1;\n  }\n  if (exit === 1) {\n    Res_parser.err(undefined, undefined, p, Res_diagnostics.unexpected(token, p.breadcrumbs));\n    children = makeListExpression($$Location.none, /* [] */0, undefined);\n  }\n  var jsxEndPos = p.prevEndPos;\n  var loc$3 = {\n    loc_start: jsxStartPos,\n    loc_end: jsxEndPos,\n    loc_ghost: false\n  };\n  return Ast_helper.Exp.apply(loc$3, undefined, name, List.concat({\n                  hd: jsxProps,\n                  tl: {\n                    hd: {\n                      hd: [\n                        {\n                          TAG: /* Labelled */0,\n                          _0: \"children\"\n                        },\n                        children\n                      ],\n                      tl: {\n                        hd: [\n                          /* Nolabel */0,\n                          Ast_helper.Exp.construct(undefined, undefined, $$Location.mknoloc({\n                                    TAG: /* Lident */0,\n                                    _0: \"()\"\n                                  }), undefined)\n                        ],\n                        tl: /* [] */0\n                      }\n                    },\n                    tl: /* [] */0\n                  }\n                }));\n}\n\nfunction parseJsxFragment(p) {\n  var childrenStartPos = p.startPos;\n  Res_scanner.setJsxMode(p.scanner);\n  Res_parser.expect(undefined, /* GreaterThan */41, p);\n  var match = parseJsxChildren(p);\n  var childrenEndPos = p.startPos;\n  Res_parser.expect(undefined, /* LessThanSlash */43, p);\n  Res_parser.expect(undefined, /* GreaterThan */41, p);\n  var loc = {\n    loc_start: childrenStartPos,\n    loc_end: childrenEndPos,\n    loc_ghost: false\n  };\n  return makeListExpression(loc, match[1], undefined);\n}\n\nfunction parseTypeConstructorDeclarationWithBar(p) {\n  var match = p.token;\n  if (match !== 17) {\n    return ;\n  }\n  var startPos = p.startPos;\n  Res_parser.next(undefined, p);\n  return parseTypeConstructorDeclaration(startPos, p);\n}\n\nfunction parsePatternMatching(p) {\n  var cases = parseDelimitedRegion(p, /* PatternMatching */22, /* Rbrace */23, parsePatternMatchCase);\n  if (cases) {\n    \n  } else {\n    Res_parser.err(p.prevEndPos, undefined, p, Res_diagnostics.message(\"Pattern matching needs at least one case\"));\n  }\n  return cases;\n}\n\nfunction parseJsxProp(p) {\n  var match = p.token;\n  if (typeof match === \"number\") {\n    if (match !== /* Question */49) {\n      return ;\n    }\n    \n  } else if (match.TAG !== /* Lident */4) {\n    return ;\n  }\n  var optional = Res_parser.optional(p, /* Question */49);\n  var match$1 = parseLident(p);\n  var loc = match$1[1];\n  var name = match$1[0];\n  var propLocAttr_0 = $$Location.mkloc(\"ns.namedArgLoc\", loc);\n  var propLocAttr_1 = {\n    TAG: /* PStr */0,\n    _0: /* [] */0\n  };\n  var propLocAttr = [\n    propLocAttr_0,\n    propLocAttr_1\n  ];\n  if (optional) {\n    return [\n            {\n              TAG: /* Optional */1,\n              _0: name\n            },\n            Ast_helper.Exp.ident(loc, {\n                  hd: propLocAttr,\n                  tl: /* [] */0\n                }, $$Location.mkloc({\n                      TAG: /* Lident */0,\n                      _0: name\n                    }, loc))\n          ];\n  }\n  var match$2 = p.token;\n  if (match$2 === 14) {\n    Res_parser.next(undefined, p);\n    var optional$1 = Res_parser.optional(p, /* Question */49);\n    var e = parsePrimaryExpr(parseAtomicExpr(p), undefined, p);\n    var attrExpr_pexp_desc = e.pexp_desc;\n    var attrExpr_pexp_loc = e.pexp_loc;\n    var attrExpr_pexp_attributes = {\n      hd: propLocAttr,\n      tl: e.pexp_attributes\n    };\n    var attrExpr = {\n      pexp_desc: attrExpr_pexp_desc,\n      pexp_loc: attrExpr_pexp_loc,\n      pexp_attributes: attrExpr_pexp_attributes\n    };\n    var label = optional$1 ? ({\n          TAG: /* Optional */1,\n          _0: name\n        }) : ({\n          TAG: /* Labelled */0,\n          _0: name\n        });\n    return [\n            label,\n            attrExpr\n          ];\n  }\n  var attrExpr$1 = Ast_helper.Exp.ident(loc, {\n        hd: propLocAttr,\n        tl: /* [] */0\n      }, $$Location.mkloc({\n            TAG: /* Lident */0,\n            _0: name\n          }, loc));\n  var label$1 = optional ? ({\n        TAG: /* Optional */1,\n        _0: name\n      }) : ({\n        TAG: /* Labelled */0,\n        _0: name\n      });\n  return [\n          label$1,\n          attrExpr$1\n        ];\n}\n\nfunction parseTypeConstructorArgRegion(p) {\n  while(true) {\n    if (Res_grammar.isTypExprStart(p.token)) {\n      return parseTypExpr(undefined, undefined, undefined, p);\n    }\n    if (p.token !== /* LessThan */42) {\n      return ;\n    }\n    Res_parser.next(undefined, p);\n    continue ;\n  };\n}\n\nfunction parseModuleTypeDeclaration(attrs, startPos, p) {\n  Res_parser.expect(undefined, /* Typ */60, p);\n  var ident = p.token;\n  var moduleName;\n  var exit = 0;\n  if (typeof ident === \"number\") {\n    exit = 2;\n  } else {\n    switch (ident.TAG | 0) {\n      case /* Lident */4 :\n      case /* Uident */5 :\n          exit = 1;\n          break;\n      default:\n        exit = 2;\n    }\n  }\n  switch (exit) {\n    case 1 :\n        var loc_loc_start = p.startPos;\n        var loc_loc_end = p.endPos;\n        var loc = {\n          loc_start: loc_loc_start,\n          loc_end: loc_loc_end,\n          loc_ghost: false\n        };\n        Res_parser.next(undefined, p);\n        moduleName = $$Location.mkloc(ident._0, loc);\n        break;\n    case 2 :\n        Res_parser.err(undefined, undefined, p, Res_diagnostics.uident(ident));\n        moduleName = $$Location.mknoloc(\"_\");\n        break;\n    \n  }\n  var match = p.token;\n  var typ = match === 14 ? (Res_parser.next(undefined, p), parseModuleType(undefined, undefined, p)) : undefined;\n  var moduleDecl = Ast_helper.Mtd.mk(undefined, attrs, undefined, undefined, typ, moduleName);\n  return Ast_helper.Sig.modtype({\n              loc_start: startPos,\n              loc_end: p.prevEndPos,\n              loc_ghost: false\n            }, moduleDecl);\n}\n\nfunction parseNewlineOrSemicolonSignature(p) {\n  var token = p.token;\n  if (token === 8) {\n    return Res_parser.next(undefined, p);\n  } else if (Res_grammar.isSignatureItemStart(token) && p.prevEndPos.pos_lnum >= p.startPos.pos_lnum) {\n    return Res_parser.err(p.prevEndPos, p.endPos, p, Res_diagnostics.message(\"consecutive specifications on a line must be separated by ';' or a newline\"));\n  } else {\n    return ;\n  }\n}\n\nfunction parseModuleDeclarationOrAlias(attrs, p) {\n  var startPos = p.startPos;\n  var ident = p.token;\n  var moduleName;\n  var exit = 0;\n  if (typeof ident === \"number\" || ident.TAG !== /* Uident */5) {\n    exit = 1;\n  } else {\n    var loc_loc_start = p.startPos;\n    var loc_loc_end = p.endPos;\n    var loc = {\n      loc_start: loc_loc_start,\n      loc_end: loc_loc_end,\n      loc_ghost: false\n    };\n    Res_parser.next(undefined, p);\n    moduleName = $$Location.mkloc(ident._0, loc);\n  }\n  if (exit === 1) {\n    Res_parser.err(undefined, undefined, p, Res_diagnostics.uident(ident));\n    moduleName = $$Location.mknoloc(\"_\");\n  }\n  var token = p.token;\n  var body;\n  var exit$1 = 0;\n  if (typeof token === \"number\") {\n    if (token !== 14) {\n      if (token !== 24) {\n        exit$1 = 1;\n      } else {\n        Res_parser.next(undefined, p);\n        body = parseModuleType(undefined, undefined, p);\n      }\n    } else {\n      Res_parser.next(undefined, p);\n      var lident = parseModuleLongIdent(false, p);\n      body = Ast_helper.Mty.alias(undefined, undefined, lident);\n    }\n  } else {\n    exit$1 = 1;\n  }\n  if (exit$1 === 1) {\n    Res_parser.err(undefined, undefined, p, Res_diagnostics.unexpected(token, p.breadcrumbs));\n    body = defaultModuleType(undefined);\n  }\n  var loc_loc_end$1 = p.prevEndPos;\n  var loc$1 = {\n    loc_start: startPos,\n    loc_end: loc_loc_end$1,\n    loc_ghost: false\n  };\n  return Ast_helper.Md.mk(loc$1, attrs, undefined, undefined, moduleName, body);\n}\n\nfunction parseSignJsExport(attrs, p) {\n  var exportStart = p.startPos;\n  Res_parser.expect(undefined, /* Export */84, p);\n  var exportLoc_loc_end = p.prevEndPos;\n  var exportLoc = {\n    loc_start: exportStart,\n    loc_end: exportLoc_loc_end,\n    loc_ghost: false\n  };\n  var genTypeAttr_0 = $$Location.mkloc(\"genType\", exportLoc);\n  var genTypeAttr_1 = {\n    TAG: /* PStr */0,\n    _0: /* [] */0\n  };\n  var genTypeAttr = [\n    genTypeAttr_0,\n    genTypeAttr_1\n  ];\n  var attrs$1 = {\n    hd: genTypeAttr,\n    tl: attrs\n  };\n  var match = p.token;\n  if (match === 60) {\n    var ext = parseTypeDefinitionOrExtension(attrs$1, p);\n    if (ext.TAG === /* TypeDef */0) {\n      var loc_loc_end = p.prevEndPos;\n      var loc = {\n        loc_start: exportStart,\n        loc_end: loc_loc_end,\n        loc_ghost: false\n      };\n      return Ast_helper.Sig.type_(loc, ext.recFlag, ext.types);\n    }\n    var loc_loc_end$1 = p.prevEndPos;\n    var loc$1 = {\n      loc_start: exportStart,\n      loc_end: loc_loc_end$1,\n      loc_ghost: false\n    };\n    return Ast_helper.Sig.type_extension(loc$1, ext._0);\n  }\n  var valueDesc = parseSignLetDesc(attrs$1, p);\n  var loc_loc_end$2 = p.prevEndPos;\n  var loc$2 = {\n    loc_start: exportStart,\n    loc_end: loc_loc_end$2,\n    loc_ghost: false\n  };\n  return Ast_helper.Sig.value(loc$2, valueDesc);\n}\n\nfunction parseRecModuleSpec(attrs, startPos, p) {\n  Res_parser.expect(undefined, /* Rec */11, p);\n  var first = parseRecModuleDeclaration(attrs, startPos, p);\n  var _spec = {\n    hd: first,\n    tl: /* [] */0\n  };\n  while(true) {\n    var spec = _spec;\n    var startPos$1 = p.startPos;\n    var attrs$1 = parseAttributesAndBinding(p);\n    var match = p.token;\n    if (match !== 10) {\n      return List.rev(spec);\n    }\n    Res_parser.expect(undefined, /* And */10, p);\n    var decl = parseRecModuleDeclaration(attrs$1, startPos$1, p);\n    _spec = {\n      hd: decl,\n      tl: spec\n    };\n    continue ;\n  };\n}\n\nfunction parseEs6ArrowType(attrs, p) {\n  var startPos = p.startPos;\n  var match = p.token;\n  if (match === 48) {\n    Res_parser.next(undefined, p);\n    var match$1 = parseLident(p);\n    var name = match$1[0];\n    var lblLocAttr_0 = $$Location.mkloc(\"ns.namedArgLoc\", match$1[1]);\n    var lblLocAttr_1 = {\n      TAG: /* PStr */0,\n      _0: /* [] */0\n    };\n    var lblLocAttr = [\n      lblLocAttr_0,\n      lblLocAttr_1\n    ];\n    Res_parser.expect(/* TypeExpression */20, /* Colon */24, p);\n    var typ = parseTypExpr(undefined, false, false, p);\n    var typ_ptyp_desc = typ.ptyp_desc;\n    var typ_ptyp_loc = typ.ptyp_loc;\n    var typ_ptyp_attributes = {\n      hd: lblLocAttr,\n      tl: typ.ptyp_attributes\n    };\n    var typ$1 = {\n      ptyp_desc: typ_ptyp_desc,\n      ptyp_loc: typ_ptyp_loc,\n      ptyp_attributes: typ_ptyp_attributes\n    };\n    var match$2 = p.token;\n    var arg = match$2 === 14 ? (Res_parser.next(undefined, p), Res_parser.expect(undefined, /* Question */49, p), {\n          TAG: /* Optional */1,\n          _0: name\n        }) : ({\n          TAG: /* Labelled */0,\n          _0: name\n        });\n    Res_parser.expect(undefined, /* EqualGreater */57, p);\n    var returnType = parseTypExpr(undefined, undefined, false, p);\n    var loc_loc_end = p.prevEndPos;\n    var loc = {\n      loc_start: startPos,\n      loc_end: loc_loc_end,\n      loc_ghost: false\n    };\n    return Ast_helper.Typ.arrow(loc, attrs, arg, typ$1, returnType);\n  }\n  var parameters = parseTypeParameters(p);\n  Res_parser.expect(undefined, /* EqualGreater */57, p);\n  var returnType$1 = parseTypExpr(undefined, undefined, false, p);\n  var endPos = p.prevEndPos;\n  var typ$2 = List.fold_right((function (param, t) {\n          var attrs = param[1];\n          var attrs$1 = param[0] ? ({\n                hd: uncurryAttr,\n                tl: attrs\n              }) : attrs;\n          return Ast_helper.Typ.arrow({\n                      loc_start: param[4],\n                      loc_end: endPos,\n                      loc_ghost: false\n                    }, attrs$1, param[2], param[3], t);\n        }), parameters, returnType$1);\n  return {\n          ptyp_desc: typ$2.ptyp_desc,\n          ptyp_loc: {\n            loc_start: startPos,\n            loc_end: p.prevEndPos,\n            loc_ghost: false\n          },\n          ptyp_attributes: List.concat({\n                hd: typ$2.ptyp_attributes,\n                tl: {\n                  hd: attrs,\n                  tl: /* [] */0\n                }\n              })\n        };\n}\n\nfunction parseJsxChildren(p) {\n  var loop = function (p, _children) {\n    while(true) {\n      var children = _children;\n      var token = p.token;\n      if (typeof token === \"number\") {\n        if (token === 43 || token === 42) {\n          if (token >= 43) {\n            Res_scanner.popMode(p.scanner, /* Jsx */0);\n            return List.rev(children);\n          }\n          var token$1 = Res_scanner.reconsiderLessThan(p.scanner);\n          if (token$1 === /* LessThan */42) {\n            var child = parsePrimaryExpr(parseAtomicExpr(p), true, p);\n            _children = {\n              hd: child,\n              tl: children\n            };\n            continue ;\n          }\n          p.token = token$1;\n          Res_scanner.popMode(p.scanner, /* Jsx */0);\n          return List.rev(children);\n        }\n        if (token === 26) {\n          Res_scanner.popMode(p.scanner, /* Jsx */0);\n          return List.rev(children);\n        }\n        \n      }\n      if (Res_grammar.isJsxChildStart(token)) {\n        Res_scanner.popMode(p.scanner, /* Jsx */0);\n        var child$1 = parsePrimaryExpr(parseAtomicExpr(p), true, p);\n        _children = {\n          hd: child$1,\n          tl: children\n        };\n        continue ;\n      }\n      Res_scanner.popMode(p.scanner, /* Jsx */0);\n      return List.rev(children);\n    };\n  };\n  var match = p.token;\n  if (match === 6) {\n    Res_parser.next(undefined, p);\n    return [\n            true,\n            {\n              hd: parsePrimaryExpr(parseAtomicExpr(p), true, p),\n              tl: /* [] */0\n            }\n          ];\n  } else {\n    return [\n            false,\n            loop(p, /* [] */0)\n          ];\n  }\n}\n\nfunction parseConstrainedModExprRegion(p) {\n  if (Res_grammar.isModExprStart(p.token)) {\n    return parseConstrainedModExpr(p);\n  }\n  \n}\n\nfunction parsePolyVariantExpr(p) {\n  var startPos = p.startPos;\n  var match = parseHashIdent(startPos, p);\n  var ident = match[0];\n  var match$1 = p.token;\n  if (match$1 === 18 && p.prevEndPos.pos_lnum === p.startPos.pos_lnum) {\n    var lparen = p.startPos;\n    var args = parseConstructorArgs(p);\n    var rparen = p.prevEndPos;\n    var loc_paren = {\n      loc_start: lparen,\n      loc_end: rparen,\n      loc_ghost: false\n    };\n    var tail;\n    var exit = 0;\n    if (args) {\n      var expr = args.hd;\n      var tmp = expr.pexp_desc;\n      if (typeof tmp === \"number\") {\n        if (args.tl) {\n          exit = 2;\n        } else {\n          tail = expr;\n        }\n      } else if (tmp.TAG === /* Pexp_tuple */8) {\n        if (args.tl) {\n          exit = 2;\n        } else {\n          tail = p.mode === /* ParseForTypeChecker */0 ? expr : Ast_helper.Exp.tuple(loc_paren, undefined, args);\n        }\n      } else if (args.tl) {\n        exit = 2;\n      } else {\n        tail = expr;\n      }\n    } else {\n      tail = undefined;\n    }\n    if (exit === 2) {\n      tail = Ast_helper.Exp.tuple(loc_paren, undefined, args);\n    }\n    var loc_loc_end = p.prevEndPos;\n    var loc = {\n      loc_start: startPos,\n      loc_end: loc_loc_end,\n      loc_ghost: false\n    };\n    return Ast_helper.Exp.variant(loc, undefined, ident, tail);\n  }\n  var loc_loc_end$1 = p.prevEndPos;\n  var loc$1 = {\n    loc_start: startPos,\n    loc_end: loc_loc_end$1,\n    loc_ghost: false\n  };\n  return Ast_helper.Exp.variant(loc$1, undefined, ident, undefined);\n}\n\nfunction parseJsx(p) {\n  Res_parser.leaveBreadcrumb(p, /* Jsx */4);\n  var startPos = p.startPos;\n  Res_parser.expect(undefined, /* LessThan */42, p);\n  var match = p.token;\n  var jsxExpr;\n  if (typeof match === \"number\") {\n    jsxExpr = match === /* GreaterThan */41 ? parseJsxFragment(p) : parseJsxName(p);\n  } else {\n    switch (match.TAG | 0) {\n      case /* Lident */4 :\n      case /* Uident */5 :\n          jsxExpr = parseJsxOpeningOrSelfClosingElement(startPos, p);\n          break;\n      default:\n        jsxExpr = parseJsxName(p);\n    }\n  }\n  Res_parser.eatBreadcrumb(p);\n  return {\n          pexp_desc: jsxExpr.pexp_desc,\n          pexp_loc: jsxExpr.pexp_loc,\n          pexp_attributes: {\n            hd: jsxAttr,\n            tl: /* [] */0\n          }\n        };\n}\n\nfunction parseListExpr(startPos, p) {\n  var listExprs = parseCommaDelimitedReversedList(p, /* ListExpr */53, /* Rbrace */23, parseSpreadExprRegion);\n  Res_parser.expect(undefined, /* Rbrace */23, p);\n  var loc_loc_end = p.prevEndPos;\n  var loc = {\n    loc_start: startPos,\n    loc_end: loc_loc_end,\n    loc_ghost: false\n  };\n  if (listExprs) {\n    var match = listExprs.hd;\n    if (match[0]) {\n      var exprs = List.rev(List.map((function (prim) {\n                  return prim[1];\n                }), listExprs.tl));\n      return makeListExpression(loc, exprs, match[1]);\n    }\n    \n  }\n  var exprs$1 = List.rev(List.map((function (param) {\n              if (param[0]) {\n                Res_parser.err(undefined, undefined, p, Res_diagnostics.message(listExprSpread));\n              }\n              return param[1];\n            }), listExprs));\n  return makeListExpression(loc, exprs$1, undefined);\n}\n\nfunction parseArrayExp(p) {\n  var startPos = p.startPos;\n  Res_parser.expect(undefined, /* Lbracket */20, p);\n  var exprs = parseCommaDelimitedRegion(p, /* ExprList */12, /* Rbracket */21, (function (param) {\n          return parseNonSpreadExp(arrayExprSpread, param);\n        }));\n  Res_parser.expect(undefined, /* Rbracket */21, p);\n  return Ast_helper.Exp.array({\n              loc_start: startPos,\n              loc_end: p.prevEndPos,\n              loc_ghost: false\n            }, undefined, exprs);\n}\n\nfunction parseTupleExpr(first, startPos, p) {\n  var exprs_1 = parseCommaDelimitedRegion(p, /* ExprList */12, /* Rparen */19, parseConstrainedExprRegion);\n  var exprs = {\n    hd: first,\n    tl: exprs_1\n  };\n  Res_parser.expect(undefined, /* Rparen */19, p);\n  if (exprs_1) {\n    \n  } else {\n    Res_parser.err(startPos, p.prevEndPos, p, Res_diagnostics.message(tupleSingleElement));\n  }\n  var loc_loc_end = p.prevEndPos;\n  var loc = {\n    loc_start: startPos,\n    loc_end: loc_loc_end,\n    loc_ghost: false\n  };\n  return Ast_helper.Exp.tuple(loc, undefined, exprs);\n}\n\nfunction parseBracedOrRecordExpr(p) {\n  var startPos = p.startPos;\n  Res_parser.expect(undefined, /* Lbrace */22, p);\n  var s = p.token;\n  var exit = 0;\n  if (typeof s === \"number\") {\n    switch (s) {\n      case /* DotDotDot */6 :\n          Res_parser.next(undefined, p);\n          var spreadExpr = parseConstrainedOrCoercedExpr(p);\n          Res_parser.expect(undefined, /* Comma */25, p);\n          var expr = parseRecordExpr(startPos, Caml_option.some(spreadExpr), /* [] */0, p);\n          Res_parser.expect(undefined, /* Rbrace */23, p);\n          return expr;\n      case /* Rbrace */23 :\n          Res_parser.err(undefined, undefined, p, Res_diagnostics.unexpected(/* Rbrace */23, p.breadcrumbs));\n          Res_parser.next(undefined, p);\n          var loc_loc_end = p.prevEndPos;\n          var loc = {\n            loc_start: startPos,\n            loc_end: loc_loc_end,\n            loc_ghost: false\n          };\n          var braces = makeBracesAttr(loc);\n          return Ast_helper.Exp.construct(loc, {\n                      hd: braces,\n                      tl: /* [] */0\n                    }, $$Location.mkloc({\n                          TAG: /* Lident */0,\n                          _0: \"()\"\n                        }, loc), undefined);\n      default:\n        exit = 1;\n    }\n  } else {\n    switch (s.TAG | 0) {\n      case /* String */3 :\n          var s$1 = s._0;\n          var s$2 = p.mode === /* ParseForTypeChecker */0 ? parseStringLiteral(s$1) : s$1;\n          var loc_loc_start = p.startPos;\n          var loc_loc_end$1 = p.endPos;\n          var loc$1 = {\n            loc_start: loc_loc_start,\n            loc_end: loc_loc_end$1,\n            loc_ghost: false\n          };\n          Res_parser.next(undefined, p);\n          var field = $$Location.mkloc({\n                TAG: /* Lident */0,\n                _0: s$2\n              }, loc$1);\n          var match = p.token;\n          if (match === 24) {\n            Res_parser.next(undefined, p);\n            var fieldExpr = parseExpr(undefined, p);\n            Res_parser.optional(p, /* Comma */25);\n            var expr$1 = parseRecordExprWithStringKeys(startPos, [\n                  field,\n                  fieldExpr\n                ], p);\n            Res_parser.expect(undefined, /* Rbrace */23, p);\n            return expr$1;\n          }\n          var tag = p.mode === /* ParseForTypeChecker */0 ? \"js\" : undefined;\n          var constant = Ast_helper.Exp.constant(field.loc, undefined, {\n                TAG: /* Pconst_string */2,\n                _0: s$2,\n                _1: tag\n              });\n          var a = parsePrimaryExpr(constant, undefined, p);\n          var e = parseBinaryExpr(undefined, a, p, 1);\n          var e$1 = parseTernaryExpr(e, p);\n          var match$1 = p.token;\n          var exit$1 = 0;\n          if (typeof match$1 === \"number\") {\n            if (match$1 !== 8) {\n              if (match$1 !== 23) {\n                exit$1 = 3;\n              } else {\n                Res_parser.next(undefined, p);\n                var loc_loc_end$2 = p.prevEndPos;\n                var loc$2 = {\n                  loc_start: startPos,\n                  loc_end: loc_loc_end$2,\n                  loc_ghost: false\n                };\n                var braces$1 = makeBracesAttr(loc$2);\n                return {\n                        pexp_desc: e$1.pexp_desc,\n                        pexp_loc: e$1.pexp_loc,\n                        pexp_attributes: {\n                          hd: braces$1,\n                          tl: e$1.pexp_attributes\n                        }\n                      };\n              }\n            } else {\n              var expr$2 = parseExprBlock(e$1, p);\n              Res_parser.expect(undefined, /* Rbrace */23, p);\n              var loc_loc_end$3 = p.prevEndPos;\n              var loc$3 = {\n                loc_start: startPos,\n                loc_end: loc_loc_end$3,\n                loc_ghost: false\n              };\n              var braces$2 = makeBracesAttr(loc$3);\n              return {\n                      pexp_desc: expr$2.pexp_desc,\n                      pexp_loc: expr$2.pexp_loc,\n                      pexp_attributes: {\n                        hd: braces$2,\n                        tl: expr$2.pexp_attributes\n                      }\n                    };\n            }\n          } else {\n            exit$1 = 3;\n          }\n          if (exit$1 === 3) {\n            var expr$3 = parseExprBlock(e$1, p);\n            Res_parser.expect(undefined, /* Rbrace */23, p);\n            var loc_loc_end$4 = p.prevEndPos;\n            var loc$4 = {\n              loc_start: startPos,\n              loc_end: loc_loc_end$4,\n              loc_ghost: false\n            };\n            var braces$3 = makeBracesAttr(loc$4);\n            return {\n                    pexp_desc: expr$3.pexp_desc,\n                    pexp_loc: expr$3.pexp_loc,\n                    pexp_attributes: {\n                      hd: braces$3,\n                      tl: expr$3.pexp_attributes\n                    }\n                  };\n          }\n          break;\n      case /* Lident */4 :\n      case /* Uident */5 :\n          exit = 2;\n          break;\n      default:\n        exit = 1;\n    }\n  }\n  switch (exit) {\n    case 1 :\n        var expr$4 = parseExprBlock(undefined, p);\n        Res_parser.expect(undefined, /* Rbrace */23, p);\n        var loc_loc_end$5 = p.prevEndPos;\n        var loc$5 = {\n          loc_start: startPos,\n          loc_end: loc_loc_end$5,\n          loc_ghost: false\n        };\n        var braces$4 = makeBracesAttr(loc$5);\n        return {\n                pexp_desc: expr$4.pexp_desc,\n                pexp_loc: expr$4.pexp_loc,\n                pexp_attributes: {\n                  hd: braces$4,\n                  tl: expr$4.pexp_attributes\n                }\n              };\n    case 2 :\n        var startToken = p.token;\n        var valueOrConstructor = parseValueOrConstructor(p);\n        var pathIdent = valueOrConstructor.pexp_desc;\n        var exit$2 = 0;\n        if (typeof pathIdent === \"number\" || pathIdent.TAG !== /* Pexp_ident */0) {\n          exit$2 = 3;\n        } else {\n          var pathIdent$1 = pathIdent._0;\n          var identEndPos = p.prevEndPos;\n          var match$2 = p.token;\n          var exit$3 = 0;\n          if (typeof match$2 === \"number\") {\n            switch (match$2) {\n              case /* Semicolon */8 :\n                  var expr$5 = parseExprBlock(Ast_helper.Exp.ident(undefined, undefined, pathIdent$1), p);\n                  Res_parser.expect(undefined, /* Rbrace */23, p);\n                  var loc_loc_end$6 = p.prevEndPos;\n                  var loc$6 = {\n                    loc_start: startPos,\n                    loc_end: loc_loc_end$6,\n                    loc_ghost: false\n                  };\n                  var braces$5 = makeBracesAttr(loc$6);\n                  return {\n                          pexp_desc: expr$5.pexp_desc,\n                          pexp_loc: expr$5.pexp_loc,\n                          pexp_attributes: {\n                            hd: braces$5,\n                            tl: expr$5.pexp_attributes\n                          }\n                        };\n              case /* Rbrace */23 :\n                  Res_parser.next(undefined, p);\n                  var expr$6 = Ast_helper.Exp.ident(pathIdent$1.loc, undefined, pathIdent$1);\n                  var loc_loc_end$7 = p.prevEndPos;\n                  var loc$7 = {\n                    loc_start: startPos,\n                    loc_end: loc_loc_end$7,\n                    loc_ghost: false\n                  };\n                  var braces$6 = makeBracesAttr(loc$7);\n                  return {\n                          pexp_desc: expr$6.pexp_desc,\n                          pexp_loc: expr$6.pexp_loc,\n                          pexp_attributes: {\n                            hd: braces$6,\n                            tl: expr$6.pexp_attributes\n                          }\n                        };\n              case /* Colon */24 :\n                  Res_parser.next(undefined, p);\n                  var fieldExpr$1 = parseExpr(undefined, p);\n                  var match$3 = p.token;\n                  if (match$3 === 23) {\n                    Res_parser.next(undefined, p);\n                    var loc_loc_end$8 = p.prevEndPos;\n                    var loc$8 = {\n                      loc_start: startPos,\n                      loc_end: loc_loc_end$8,\n                      loc_ghost: false\n                    };\n                    return Ast_helper.Exp.record(loc$8, undefined, {\n                                hd: [\n                                  pathIdent$1,\n                                  fieldExpr$1\n                                ],\n                                tl: /* [] */0\n                              }, undefined);\n                  }\n                  Res_parser.expect(undefined, /* Comma */25, p);\n                  var expr$7 = parseRecordExpr(startPos, undefined, {\n                        hd: [\n                          pathIdent$1,\n                          fieldExpr$1\n                        ],\n                        tl: /* [] */0\n                      }, p);\n                  Res_parser.expect(undefined, /* Rbrace */23, p);\n                  return expr$7;\n              case /* Comma */25 :\n                  Res_parser.next(undefined, p);\n                  var valueOrConstructor$1;\n                  valueOrConstructor$1 = typeof startToken === \"number\" || startToken.TAG !== /* Uident */5 ? valueOrConstructor : removeModuleNameFromPunnedFieldValue(valueOrConstructor);\n                  var expr$8 = parseRecordExpr(startPos, undefined, {\n                        hd: [\n                          pathIdent$1,\n                          valueOrConstructor$1\n                        ],\n                        tl: /* [] */0\n                      }, p);\n                  Res_parser.expect(undefined, /* Rbrace */23, p);\n                  return expr$8;\n              case /* EqualGreater */57 :\n                  var loc$9 = {\n                    loc_start: startPos,\n                    loc_end: identEndPos,\n                    loc_ghost: false\n                  };\n                  var ident = $$Location.mkloc(Longident.last(pathIdent$1.txt), loc$9);\n                  var a$1 = parseEs6ArrowExpression(undefined, {\n                        hd: {\n                          TAG: /* TermParameter */0,\n                          uncurried: false,\n                          attrs: /* [] */0,\n                          label: /* Nolabel */0,\n                          expr: undefined,\n                          pat: Ast_helper.Pat.$$var(undefined, undefined, ident),\n                          pos: startPos\n                        },\n                        tl: /* [] */0\n                      }, p);\n                  var e$2 = parseBinaryExpr(undefined, a$1, p, 1);\n                  var e$3 = parseTernaryExpr(e$2, p);\n                  var match$4 = p.token;\n                  var exit$4 = 0;\n                  if (typeof match$4 === \"number\") {\n                    if (match$4 !== 8) {\n                      if (match$4 !== 23) {\n                        exit$4 = 5;\n                      } else {\n                        Res_parser.next(undefined, p);\n                        var loc_loc_end$9 = p.prevEndPos;\n                        var loc$10 = {\n                          loc_start: startPos,\n                          loc_end: loc_loc_end$9,\n                          loc_ghost: false\n                        };\n                        var braces$7 = makeBracesAttr(loc$10);\n                        return {\n                                pexp_desc: e$3.pexp_desc,\n                                pexp_loc: e$3.pexp_loc,\n                                pexp_attributes: {\n                                  hd: braces$7,\n                                  tl: e$3.pexp_attributes\n                                }\n                              };\n                      }\n                    } else {\n                      var expr$9 = parseExprBlock(e$3, p);\n                      Res_parser.expect(undefined, /* Rbrace */23, p);\n                      var loc_loc_end$10 = p.prevEndPos;\n                      var loc$11 = {\n                        loc_start: startPos,\n                        loc_end: loc_loc_end$10,\n                        loc_ghost: false\n                      };\n                      var braces$8 = makeBracesAttr(loc$11);\n                      return {\n                              pexp_desc: expr$9.pexp_desc,\n                              pexp_loc: expr$9.pexp_loc,\n                              pexp_attributes: {\n                                hd: braces$8,\n                                tl: expr$9.pexp_attributes\n                              }\n                            };\n                    }\n                  } else {\n                    exit$4 = 5;\n                  }\n                  if (exit$4 === 5) {\n                    var expr$10 = parseExprBlock(e$3, p);\n                    Res_parser.expect(undefined, /* Rbrace */23, p);\n                    var loc_loc_end$11 = p.prevEndPos;\n                    var loc$12 = {\n                      loc_start: startPos,\n                      loc_end: loc_loc_end$11,\n                      loc_ghost: false\n                    };\n                    var braces$9 = makeBracesAttr(loc$12);\n                    return {\n                            pexp_desc: expr$10.pexp_desc,\n                            pexp_loc: expr$10.pexp_loc,\n                            pexp_attributes: {\n                              hd: braces$9,\n                              tl: expr$10.pexp_attributes\n                            }\n                          };\n                  }\n                  break;\n              default:\n                exit$3 = 4;\n            }\n          } else {\n            if (match$2.TAG === /* Lident */4) {\n              if (p.prevEndPos.pos_lnum < p.startPos.pos_lnum) {\n                Res_parser.expect(undefined, /* Comma */25, p);\n                var expr$11 = parseRecordExpr(startPos, undefined, {\n                      hd: [\n                        pathIdent$1,\n                        valueOrConstructor\n                      ],\n                      tl: /* [] */0\n                    }, p);\n                Res_parser.expect(undefined, /* Rbrace */23, p);\n                return expr$11;\n              }\n              Res_parser.expect(undefined, /* Colon */24, p);\n              var expr$12 = parseRecordExpr(startPos, undefined, {\n                    hd: [\n                      pathIdent$1,\n                      valueOrConstructor\n                    ],\n                    tl: /* [] */0\n                  }, p);\n              Res_parser.expect(undefined, /* Rbrace */23, p);\n              return expr$12;\n            }\n            exit$3 = 4;\n          }\n          if (exit$3 === 4) {\n            Res_parser.leaveBreadcrumb(p, /* ExprBlock */10);\n            var a$2 = parsePrimaryExpr(Ast_helper.Exp.ident(pathIdent$1.loc, undefined, pathIdent$1), undefined, p);\n            var e$4 = parseBinaryExpr(undefined, a$2, p, 1);\n            var e$5 = parseTernaryExpr(e$4, p);\n            Res_parser.eatBreadcrumb(p);\n            var match$5 = p.token;\n            var exit$5 = 0;\n            if (typeof match$5 === \"number\") {\n              if (match$5 !== 8) {\n                if (match$5 !== 23) {\n                  exit$5 = 5;\n                } else {\n                  Res_parser.next(undefined, p);\n                  var loc_loc_end$12 = p.prevEndPos;\n                  var loc$13 = {\n                    loc_start: startPos,\n                    loc_end: loc_loc_end$12,\n                    loc_ghost: false\n                  };\n                  var braces$10 = makeBracesAttr(loc$13);\n                  return {\n                          pexp_desc: e$5.pexp_desc,\n                          pexp_loc: e$5.pexp_loc,\n                          pexp_attributes: {\n                            hd: braces$10,\n                            tl: e$5.pexp_attributes\n                          }\n                        };\n                }\n              } else {\n                var expr$13 = parseExprBlock(e$5, p);\n                Res_parser.expect(undefined, /* Rbrace */23, p);\n                var loc_loc_end$13 = p.prevEndPos;\n                var loc$14 = {\n                  loc_start: startPos,\n                  loc_end: loc_loc_end$13,\n                  loc_ghost: false\n                };\n                var braces$11 = makeBracesAttr(loc$14);\n                return {\n                        pexp_desc: expr$13.pexp_desc,\n                        pexp_loc: expr$13.pexp_loc,\n                        pexp_attributes: {\n                          hd: braces$11,\n                          tl: expr$13.pexp_attributes\n                        }\n                      };\n              }\n            } else {\n              exit$5 = 5;\n            }\n            if (exit$5 === 5) {\n              var expr$14 = parseExprBlock(e$5, p);\n              Res_parser.expect(undefined, /* Rbrace */23, p);\n              var loc_loc_end$14 = p.prevEndPos;\n              var loc$15 = {\n                loc_start: startPos,\n                loc_end: loc_loc_end$14,\n                loc_ghost: false\n              };\n              var braces$12 = makeBracesAttr(loc$15);\n              return {\n                      pexp_desc: expr$14.pexp_desc,\n                      pexp_loc: expr$14.pexp_loc,\n                      pexp_attributes: {\n                        hd: braces$12,\n                        tl: expr$14.pexp_attributes\n                      }\n                    };\n            }\n            \n          }\n          \n        }\n        if (exit$2 === 3) {\n          Res_parser.leaveBreadcrumb(p, /* ExprBlock */10);\n          var a$3 = parsePrimaryExpr(valueOrConstructor, undefined, p);\n          var e$6 = parseBinaryExpr(undefined, a$3, p, 1);\n          var e$7 = parseTernaryExpr(e$6, p);\n          Res_parser.eatBreadcrumb(p);\n          var match$6 = p.token;\n          var exit$6 = 0;\n          if (typeof match$6 === \"number\") {\n            if (match$6 !== 8) {\n              if (match$6 !== 23) {\n                exit$6 = 4;\n              } else {\n                Res_parser.next(undefined, p);\n                var loc_loc_end$15 = p.prevEndPos;\n                var loc$16 = {\n                  loc_start: startPos,\n                  loc_end: loc_loc_end$15,\n                  loc_ghost: false\n                };\n                var braces$13 = makeBracesAttr(loc$16);\n                return {\n                        pexp_desc: e$7.pexp_desc,\n                        pexp_loc: e$7.pexp_loc,\n                        pexp_attributes: {\n                          hd: braces$13,\n                          tl: e$7.pexp_attributes\n                        }\n                      };\n              }\n            } else {\n              var expr$15 = parseExprBlock(e$7, p);\n              Res_parser.expect(undefined, /* Rbrace */23, p);\n              var loc_loc_end$16 = p.prevEndPos;\n              var loc$17 = {\n                loc_start: startPos,\n                loc_end: loc_loc_end$16,\n                loc_ghost: false\n              };\n              var braces$14 = makeBracesAttr(loc$17);\n              return {\n                      pexp_desc: expr$15.pexp_desc,\n                      pexp_loc: expr$15.pexp_loc,\n                      pexp_attributes: {\n                        hd: braces$14,\n                        tl: expr$15.pexp_attributes\n                      }\n                    };\n            }\n          } else {\n            exit$6 = 4;\n          }\n          if (exit$6 === 4) {\n            var expr$16 = parseExprBlock(e$7, p);\n            Res_parser.expect(undefined, /* Rbrace */23, p);\n            var loc_loc_end$17 = p.prevEndPos;\n            var loc$18 = {\n              loc_start: startPos,\n              loc_end: loc_loc_end$17,\n              loc_ghost: false\n            };\n            var braces$15 = makeBracesAttr(loc$18);\n            return {\n                    pexp_desc: expr$16.pexp_desc,\n                    pexp_loc: expr$16.pexp_loc,\n                    pexp_attributes: {\n                      hd: braces$15,\n                      tl: expr$16.pexp_attributes\n                    }\n                  };\n          }\n          \n        }\n        break;\n    \n  }\n}\n\nfunction parsePatternMatchCase(p) {\n  Res_parser.beginRegion(p);\n  Res_parser.leaveBreadcrumb(p, /* PatternMatchCase */23);\n  var match = p.token;\n  if (match === 17) {\n    Res_parser.next(undefined, p);\n    Res_parser.leaveBreadcrumb(p, /* Pattern */55);\n    var lhs = parsePattern(undefined, undefined, p);\n    Res_parser.eatBreadcrumb(p);\n    var guard = parsePatternGuard(p);\n    var match$1 = p.token;\n    if (match$1 === 57) {\n      Res_parser.next(undefined, p);\n    } else {\n      recoverEqualGreater(p);\n    }\n    var rhs = parseExprBlock(undefined, p);\n    Res_parser.endRegion(p);\n    Res_parser.eatBreadcrumb(p);\n    return Ast_helper.Exp.$$case(lhs, guard, rhs);\n  }\n  Res_parser.endRegion(p);\n  Res_parser.eatBreadcrumb(p);\n  \n}\n\nfunction parseAliasPattern(attrs, pattern, p) {\n  var match = p.token;\n  if (match !== 3) {\n    return pattern;\n  }\n  Res_parser.next(undefined, p);\n  var match$1 = parseLident(p);\n  var name = $$Location.mkloc(match$1[0], match$1[1]);\n  var init = pattern.ppat_loc;\n  return Ast_helper.Pat.alias({\n              loc_start: init.loc_start,\n              loc_end: p.prevEndPos,\n              loc_ghost: init.loc_ghost\n            }, attrs, pattern, name);\n}\n\nfunction parseTuplePattern(attrs, first, startPos, p) {\n  var patterns_1 = parseCommaDelimitedRegion(p, /* PatternList */25, /* Rparen */19, parseConstrainedPatternRegion);\n  var patterns = {\n    hd: first,\n    tl: patterns_1\n  };\n  Res_parser.expect(undefined, /* Rparen */19, p);\n  if (patterns_1) {\n    \n  } else {\n    Res_parser.err(startPos, p.prevEndPos, p, Res_diagnostics.message(tupleSingleElement));\n  }\n  var loc_loc_end = p.prevEndPos;\n  var loc = {\n    loc_start: startPos,\n    loc_end: loc_loc_end,\n    loc_ghost: false\n  };\n  return Ast_helper.Pat.tuple(loc, attrs, patterns);\n}\n\nfunction parseForRest(hasOpeningParen, pattern, startPos, p) {\n  Res_parser.expect(undefined, /* In */53, p);\n  var e1 = parseExpr(undefined, p);\n  var token = p.token;\n  var direction;\n  var exit = 0;\n  if (typeof token === \"number\" || token.TAG !== /* Lident */4) {\n    exit = 1;\n  } else {\n    switch (token._0) {\n      case \"downto\" :\n          direction = /* Downto */1;\n          break;\n      case \"to\" :\n          direction = /* Upto */0;\n          break;\n      default:\n        exit = 1;\n    }\n  }\n  if (exit === 1) {\n    Res_parser.err(undefined, undefined, p, Res_diagnostics.unexpected(token, p.breadcrumbs));\n    direction = /* Upto */0;\n  }\n  Res_parser.next(undefined, p);\n  var e2 = parseExpr(/* WhenExpr */2, p);\n  if (hasOpeningParen) {\n    Res_parser.expect(undefined, /* Rparen */19, p);\n  }\n  Res_parser.expect(undefined, /* Lbrace */22, p);\n  var bodyExpr = parseExprBlock(undefined, p);\n  Res_parser.expect(undefined, /* Rbrace */23, p);\n  var loc_loc_end = p.prevEndPos;\n  var loc = {\n    loc_start: startPos,\n    loc_end: loc_loc_end,\n    loc_ghost: false\n  };\n  return Ast_helper.Exp.for_(loc, undefined, pattern, e1, e2, direction, bodyExpr);\n}\n\nfunction parseTypeDefinitions(attrs, name, params, startPos, p) {\n  var match = parseTypeEquationAndRepresentation(p);\n  var cstrs = parseRegion(p, /* TypeConstraint */51, parseTypeConstraint);\n  var loc_loc_end = p.prevEndPos;\n  var loc = {\n    loc_start: startPos,\n    loc_end: loc_loc_end,\n    loc_ghost: false\n  };\n  var typeDef = Ast_helper.Type.mk(loc, attrs, undefined, undefined, params, cstrs, match[2], match[1], match[0], {\n        txt: lidentOfPath(name.txt),\n        loc: name.loc\n      });\n  var _defs = {\n    hd: typeDef,\n    tl: /* [] */0\n  };\n  while(true) {\n    var defs = _defs;\n    var startPos$1 = p.startPos;\n    var attrs$1 = parseAttributesAndBinding(p);\n    var match$1 = p.token;\n    if (match$1 !== 10) {\n      return List.rev(defs);\n    }\n    Res_parser.next(undefined, p);\n    var match$2 = p.token;\n    var attrs$2;\n    if (typeof match$2 === \"number\" && match$2 >= 84) {\n      var exportLoc_loc_start = p.startPos;\n      var exportLoc_loc_end = p.endPos;\n      var exportLoc = {\n        loc_start: exportLoc_loc_start,\n        loc_end: exportLoc_loc_end,\n        loc_ghost: false\n      };\n      Res_parser.next(undefined, p);\n      var genTypeAttr_0 = $$Location.mkloc(\"genType\", exportLoc);\n      var genTypeAttr_1 = {\n        TAG: /* PStr */0,\n        _0: /* [] */0\n      };\n      var genTypeAttr = [\n        genTypeAttr_0,\n        genTypeAttr_1\n      ];\n      attrs$2 = {\n        hd: genTypeAttr,\n        tl: attrs$1\n      };\n    } else {\n      attrs$2 = attrs$1;\n    }\n    var typeDef$1 = parseTypeDef(attrs$2, startPos$1, p);\n    _defs = {\n      hd: typeDef$1,\n      tl: defs\n    };\n    continue ;\n  };\n}\n\nfunction parseTypeExtension(params, attrs, name, p) {\n  Res_parser.expect(undefined, /* PlusEqual */39, p);\n  var priv = Res_parser.optional(p, /* Private */61) ? /* Private */0 : /* Public */1;\n  var constrStart = p.startPos;\n  Res_parser.optional(p, /* Bar */17);\n  var match = p.token;\n  var match$1 = match === 17 ? (Res_parser.next(undefined, p), parseConstrDef(true, p)) : parseConstrDef(true, p);\n  var loc_loc_end = p.prevEndPos;\n  var loc = {\n    loc_start: constrStart,\n    loc_end: loc_loc_end,\n    loc_ghost: false\n  };\n  var first = Ast_helper.Te.constructor(loc, match$1[0], undefined, undefined, match$1[1], match$1[2]);\n  var loop = function (p, _cs) {\n    while(true) {\n      var cs = _cs;\n      var match = p.token;\n      if (match !== 17) {\n        return List.rev(cs);\n      }\n      var startPos = p.startPos;\n      Res_parser.next(undefined, p);\n      var match$1 = parseConstrDef(true, p);\n      var extConstr = Ast_helper.Te.constructor({\n            loc_start: startPos,\n            loc_end: p.prevEndPos,\n            loc_ghost: false\n          }, match$1[0], undefined, undefined, match$1[1], match$1[2]);\n      _cs = {\n        hd: extConstr,\n        tl: cs\n      };\n      continue ;\n    };\n  };\n  var constructors = loop(p, {\n        hd: first,\n        tl: /* [] */0\n      });\n  return Ast_helper.Te.mk(attrs, undefined, params, priv, name, constructors);\n}\n\nfunction parseArgument2(p, uncurried) {\n  var match = p.token;\n  if (typeof match === \"number\") {\n    if (match !== 12) {\n      if (match === 48) {\n        Res_parser.next(undefined, p);\n        var ident = p.token;\n        var exit = 0;\n        if (typeof ident === \"number\") {\n          exit = 2;\n        } else {\n          if (ident.TAG === /* Lident */4) {\n            var ident$1 = ident._0;\n            var startPos = p.startPos;\n            Res_parser.next(undefined, p);\n            var endPos = p.prevEndPos;\n            var loc = {\n              loc_start: startPos,\n              loc_end: endPos,\n              loc_ghost: false\n            };\n            var propLocAttr_0 = $$Location.mkloc(\"ns.namedArgLoc\", loc);\n            var propLocAttr_1 = {\n              TAG: /* PStr */0,\n              _0: /* [] */0\n            };\n            var propLocAttr = [\n              propLocAttr_0,\n              propLocAttr_1\n            ];\n            var identExpr = Ast_helper.Exp.ident(loc, {\n                  hd: propLocAttr,\n                  tl: /* [] */0\n                }, $$Location.mkloc({\n                      TAG: /* Lident */0,\n                      _0: ident$1\n                    }, loc));\n            var match$1 = p.token;\n            if (typeof match$1 !== \"number\") {\n              return [\n                      uncurried,\n                      {\n                        TAG: /* Labelled */0,\n                        _0: ident$1\n                      },\n                      identExpr\n                    ];\n            }\n            if (match$1 !== 14) {\n              if (match$1 !== 24) {\n                if (match$1 !== 49) {\n                  return [\n                          uncurried,\n                          {\n                            TAG: /* Labelled */0,\n                            _0: ident$1\n                          },\n                          identExpr\n                        ];\n                } else {\n                  Res_parser.next(undefined, p);\n                  return [\n                          uncurried,\n                          {\n                            TAG: /* Optional */1,\n                            _0: ident$1\n                          },\n                          identExpr\n                        ];\n                }\n              }\n              Res_parser.next(undefined, p);\n              var typ = parseTypExpr(undefined, undefined, undefined, p);\n              var loc_loc_end = p.prevEndPos;\n              var loc$1 = {\n                loc_start: startPos,\n                loc_end: loc_loc_end,\n                loc_ghost: false\n              };\n              var expr = Ast_helper.Exp.constraint_(loc$1, {\n                    hd: propLocAttr,\n                    tl: /* [] */0\n                  }, identExpr, typ);\n              return [\n                      uncurried,\n                      {\n                        TAG: /* Labelled */0,\n                        _0: ident$1\n                      },\n                      expr\n                    ];\n            }\n            Res_parser.next(undefined, p);\n            var match$2 = p.token;\n            var label = match$2 === 49 ? (Res_parser.next(undefined, p), {\n                  TAG: /* Optional */1,\n                  _0: ident$1\n                }) : ({\n                  TAG: /* Labelled */0,\n                  _0: ident$1\n                });\n            var match$3 = p.token;\n            var expr$1;\n            var exit$1 = 0;\n            if (match$3 === 12 && !isEs6ArrowExpression(false, p)) {\n              var loc_loc_start = p.startPos;\n              var loc_loc_end$1 = p.endPos;\n              var loc$2 = {\n                loc_start: loc_loc_start,\n                loc_end: loc_loc_end$1,\n                loc_ghost: false\n              };\n              Res_parser.next(undefined, p);\n              expr$1 = Ast_helper.Exp.ident(loc$2, undefined, $$Location.mkloc({\n                        TAG: /* Lident */0,\n                        _0: \"_\"\n                      }, loc$2));\n            } else {\n              exit$1 = 3;\n            }\n            if (exit$1 === 3) {\n              var expr$2 = parseConstrainedOrCoercedExpr(p);\n              expr$1 = {\n                pexp_desc: expr$2.pexp_desc,\n                pexp_loc: expr$2.pexp_loc,\n                pexp_attributes: {\n                  hd: propLocAttr,\n                  tl: expr$2.pexp_attributes\n                }\n              };\n            }\n            return [\n                    uncurried,\n                    label,\n                    expr$1\n                  ];\n          }\n          exit = 2;\n        }\n        if (exit === 2) {\n          Res_parser.err(undefined, undefined, p, Res_diagnostics.lident(ident));\n          return [\n                  uncurried,\n                  /* Nolabel */0,\n                  defaultExpr(undefined)\n                ];\n        }\n        \n      }\n      \n    } else if (!isEs6ArrowExpression(false, p)) {\n      var loc_loc_start$1 = p.startPos;\n      var loc_loc_end$2 = p.endPos;\n      var loc$3 = {\n        loc_start: loc_loc_start$1,\n        loc_end: loc_loc_end$2,\n        loc_ghost: false\n      };\n      Res_parser.next(undefined, p);\n      var exp = Ast_helper.Exp.ident(loc$3, undefined, $$Location.mkloc({\n                TAG: /* Lident */0,\n                _0: \"_\"\n              }, loc$3));\n      return [\n              uncurried,\n              /* Nolabel */0,\n              exp\n            ];\n    }\n    \n  }\n  return [\n          uncurried,\n          /* Nolabel */0,\n          parseConstrainedOrCoercedExpr(p)\n        ];\n}\n\nfunction parseLetBindingBody(startPos, attrs, p) {\n  Res_parser.beginRegion(p);\n  Res_parser.leaveBreadcrumb(p, /* LetBinding */24);\n  Res_parser.leaveBreadcrumb(p, /* Pattern */55);\n  var pat = parsePattern(undefined, undefined, p);\n  Res_parser.eatBreadcrumb(p);\n  var match = p.token;\n  var match$1;\n  if (match === 24) {\n    Res_parser.next(undefined, p);\n    var match$2 = p.token;\n    if (match$2 === 60) {\n      Res_parser.next(undefined, p);\n      var newtypes = parseLidentList(p);\n      Res_parser.expect(undefined, /* Dot */4, p);\n      var typ = parseTypExpr(undefined, undefined, undefined, p);\n      Res_parser.expect(undefined, /* Equal */14, p);\n      var expr = parseExpr(undefined, p);\n      var loc_loc_end = p.prevEndPos;\n      var loc = {\n        loc_start: startPos,\n        loc_end: loc_loc_end,\n        loc_ghost: false\n      };\n      var match$3 = wrapTypeAnnotation(loc, newtypes, typ, expr);\n      var pat$1 = Ast_helper.Pat.constraint_(loc, undefined, pat, match$3[1]);\n      match$1 = [\n        pat$1,\n        match$3[0]\n      ];\n    } else {\n      var polyType = parsePolyTypeExpr(p);\n      var init = pat.ppat_loc;\n      var loc_loc_start = init.loc_start;\n      var loc_loc_end$1 = polyType.ptyp_loc.loc_end;\n      var loc_loc_ghost = init.loc_ghost;\n      var loc$1 = {\n        loc_start: loc_loc_start,\n        loc_end: loc_loc_end$1,\n        loc_ghost: loc_loc_ghost\n      };\n      var pat$2 = Ast_helper.Pat.constraint_(loc$1, undefined, pat, polyType);\n      Res_parser.expect(undefined, /* Equal */14, p);\n      var exp = parseExpr(undefined, p);\n      var exp$1 = overParseConstrainedOrCoercedOrArrowExpression(p, exp);\n      match$1 = [\n        pat$2,\n        exp$1\n      ];\n    }\n  } else {\n    Res_parser.expect(undefined, /* Equal */14, p);\n    var exp$2 = overParseConstrainedOrCoercedOrArrowExpression(p, parseExpr(undefined, p));\n    match$1 = [\n      pat,\n      exp$2\n    ];\n  }\n  var loc_loc_end$2 = p.prevEndPos;\n  var loc$2 = {\n    loc_start: startPos,\n    loc_end: loc_loc_end$2,\n    loc_ghost: false\n  };\n  var vb = Ast_helper.Vb.mk(loc$2, attrs, undefined, undefined, match$1[0], match$1[1]);\n  Res_parser.eatBreadcrumb(p);\n  Res_parser.endRegion(p);\n  return vb;\n}\n\nfunction parseArgument(p) {\n  if (!(p.token === /* Tilde */48 || p.token === /* Dot */4 || p.token === /* Underscore */12 || Res_grammar.isExprStart(p.token))) {\n    return ;\n  }\n  var match = p.token;\n  if (match !== 4) {\n    return parseArgument2(p, false);\n  }\n  Res_parser.next(undefined, p);\n  var match$1 = p.token;\n  if (match$1 !== 19) {\n    return parseArgument2(p, true);\n  }\n  var unitExpr = Ast_helper.Exp.construct(undefined, undefined, $$Location.mknoloc({\n            TAG: /* Lident */0,\n            _0: \"()\"\n          }), undefined);\n  return [\n          true,\n          /* Nolabel */0,\n          unitExpr\n        ];\n}\n\nfunction parsePrimaryModExpr(p) {\n  var startPos = p.startPos;\n  var modExpr = parseAtomicModuleExpr(p);\n  var loop = function (p, _modExpr) {\n    while(true) {\n      var modExpr = _modExpr;\n      var match = p.token;\n      if (match !== 18) {\n        return modExpr;\n      }\n      if (p.prevEndPos.pos_lnum !== p.startPos.pos_lnum) {\n        return modExpr;\n      }\n      _modExpr = parseModuleApplication(p, modExpr);\n      continue ;\n    };\n  };\n  var modExpr$1 = loop(p, modExpr);\n  return {\n          pmod_desc: modExpr$1.pmod_desc,\n          pmod_loc: {\n            loc_start: startPos,\n            loc_end: p.prevEndPos,\n            loc_ghost: false\n          },\n          pmod_attributes: modExpr$1.pmod_attributes\n        };\n}\n\nfunction parseFunctorModuleExpr(p) {\n  var startPos = p.startPos;\n  var args = parseFunctorArgs(p);\n  var match = p.token;\n  var returnType = match === 24 ? (Res_parser.next(undefined, p), parseModuleType(false, undefined, p)) : undefined;\n  Res_parser.expect(undefined, /* EqualGreater */57, p);\n  var modExpr = parseModuleExpr(p);\n  var rhsModuleExpr = returnType !== undefined ? Ast_helper.Mod.constraint_({\n          loc_start: modExpr.pmod_loc.loc_start,\n          loc_end: returnType.pmty_loc.loc_end,\n          loc_ghost: false\n        }, undefined, modExpr, returnType) : modExpr;\n  var endPos = p.prevEndPos;\n  var modExpr$1 = List.fold_right((function (param, acc) {\n          return Ast_helper.Mod.functor_({\n                      loc_start: param[3],\n                      loc_end: endPos,\n                      loc_ghost: false\n                    }, param[0], param[1], param[2], acc);\n        }), args, rhsModuleExpr);\n  return {\n          pmod_desc: modExpr$1.pmod_desc,\n          pmod_loc: {\n            loc_start: startPos,\n            loc_end: endPos,\n            loc_ghost: false\n          },\n          pmod_attributes: modExpr$1.pmod_attributes\n        };\n}\n\nfunction parseUnaryExpr(p) {\n  var startPos = p.startPos;\n  var token = p.token;\n  var exit = 0;\n  exit = typeof token === \"number\" ? (\n      token >= 34 ? (\n          token >= 38 ? 1 : 2\n        ) : (\n          token !== 7 ? 1 : 2\n        )\n    ) : 1;\n  switch (exit) {\n    case 1 :\n        return parsePrimaryExpr(parseAtomicExpr(p), undefined, p);\n    case 2 :\n        Res_parser.leaveBreadcrumb(p, /* ExprUnary */8);\n        var tokenEnd = p.endPos;\n        Res_parser.next(undefined, p);\n        var operand = parseUnaryExpr(p);\n        var unaryExpr = makeUnaryExpr(startPos, tokenEnd, token, operand);\n        Res_parser.eatBreadcrumb(p);\n        return unaryExpr;\n    \n  }\n}\n\nfunction parseSwitchExpression(p) {\n  var startPos = p.startPos;\n  Res_parser.expect(undefined, /* Switch */55, p);\n  var switchExpr = parseExpr(/* WhenExpr */2, p);\n  Res_parser.expect(undefined, /* Lbrace */22, p);\n  var cases = parsePatternMatching(p);\n  Res_parser.expect(undefined, /* Rbrace */23, p);\n  var loc_loc_end = p.prevEndPos;\n  var loc = {\n    loc_start: startPos,\n    loc_end: loc_loc_end,\n    loc_ghost: false\n  };\n  return Ast_helper.Exp.match_(loc, undefined, switchExpr, cases);\n}\n\nfunction parseTryExpression(p) {\n  var startPos = p.startPos;\n  Res_parser.expect(undefined, /* Try */82, p);\n  var expr = parseExpr(/* WhenExpr */2, p);\n  Res_parser.expect(undefined, Res_token.$$catch, p);\n  Res_parser.expect(undefined, /* Lbrace */22, p);\n  var cases = parsePatternMatching(p);\n  Res_parser.expect(undefined, /* Rbrace */23, p);\n  var loc_loc_end = p.prevEndPos;\n  var loc = {\n    loc_start: startPos,\n    loc_end: loc_loc_end,\n    loc_ghost: false\n  };\n  return Ast_helper.Exp.try_(loc, undefined, expr, cases);\n}\n\nfunction parseWhileExpression(p) {\n  var startPos = p.startPos;\n  Res_parser.expect(undefined, /* While */54, p);\n  var expr1 = parseExpr(/* WhenExpr */2, p);\n  Res_parser.expect(undefined, /* Lbrace */22, p);\n  var expr2 = parseExprBlock(undefined, p);\n  Res_parser.expect(undefined, /* Rbrace */23, p);\n  var loc_loc_end = p.prevEndPos;\n  var loc = {\n    loc_start: startPos,\n    loc_end: loc_loc_end,\n    loc_ghost: false\n  };\n  return Ast_helper.Exp.while_(loc, undefined, expr1, expr2);\n}\n\nfunction parseForExpression(p) {\n  var startPos = p.startPos;\n  Res_parser.leaveBreadcrumb(p, /* ExprFor */16);\n  Res_parser.expect(undefined, /* For */52, p);\n  Res_parser.beginRegion(p);\n  var match = p.token;\n  var forExpr;\n  if (match === 18) {\n    var lparen = p.startPos;\n    Res_parser.next(undefined, p);\n    var match$1 = p.token;\n    if (match$1 === 19) {\n      Res_parser.next(undefined, p);\n      var loc_loc_end = p.prevEndPos;\n      var loc = {\n        loc_start: lparen,\n        loc_end: loc_loc_end,\n        loc_ghost: false\n      };\n      var lid = $$Location.mkloc({\n            TAG: /* Lident */0,\n            _0: \"()\"\n          }, loc);\n      var unitPattern = Ast_helper.Pat.construct(undefined, undefined, lid, undefined);\n      forExpr = parseForRest(false, parseAliasPattern(/* [] */0, unitPattern, p), startPos, p);\n    } else {\n      Res_parser.leaveBreadcrumb(p, /* Pattern */55);\n      var pat = parsePattern(undefined, undefined, p);\n      Res_parser.eatBreadcrumb(p);\n      var match$2 = p.token;\n      if (match$2 === 25) {\n        Res_parser.next(undefined, p);\n        var tuplePattern = parseTuplePattern(/* [] */0, pat, lparen, p);\n        var pattern = parseAliasPattern(/* [] */0, tuplePattern, p);\n        forExpr = parseForRest(false, pattern, startPos, p);\n      } else {\n        forExpr = parseForRest(true, pat, startPos, p);\n      }\n    }\n  } else {\n    Res_parser.leaveBreadcrumb(p, /* Pattern */55);\n    var pat$1 = parsePattern(undefined, undefined, p);\n    Res_parser.eatBreadcrumb(p);\n    forExpr = parseForRest(false, pat$1, startPos, p);\n  }\n  Res_parser.eatBreadcrumb(p);\n  Res_parser.endRegion(p);\n  return forExpr;\n}\n\nfunction parseConstructorPatternArgs(p, constr, startPos, attrs) {\n  var lparen = p.startPos;\n  Res_parser.expect(undefined, /* Lparen */18, p);\n  var args = parseCommaDelimitedRegion(p, /* PatternList */25, /* Rparen */19, parseConstrainedPatternRegion);\n  Res_parser.expect(undefined, /* Rparen */19, p);\n  var args$1;\n  var exit = 0;\n  if (args) {\n    var pat = args.hd;\n    var tmp = pat.ppat_desc;\n    if (typeof tmp === \"number\") {\n      if (args.tl) {\n        exit = 1;\n      } else {\n        args$1 = pat;\n      }\n    } else if (tmp.TAG === /* Ppat_tuple */4) {\n      if (args.tl) {\n        exit = 1;\n      } else {\n        args$1 = p.mode === /* ParseForTypeChecker */0 ? pat : Ast_helper.Pat.tuple({\n                loc_start: lparen,\n                loc_end: p.endPos,\n                loc_ghost: false\n              }, undefined, args);\n      }\n    } else if (args.tl) {\n      exit = 1;\n    } else {\n      args$1 = pat;\n    }\n  } else {\n    var loc_loc_end = p.prevEndPos;\n    var loc = {\n      loc_start: lparen,\n      loc_end: loc_loc_end,\n      loc_ghost: false\n    };\n    args$1 = Ast_helper.Pat.construct(loc, undefined, $$Location.mkloc({\n              TAG: /* Lident */0,\n              _0: \"()\"\n            }, loc), undefined);\n  }\n  if (exit === 1) {\n    args$1 = Ast_helper.Pat.tuple({\n          loc_start: lparen,\n          loc_end: p.endPos,\n          loc_ghost: false\n        }, undefined, args);\n  }\n  return Ast_helper.Pat.construct({\n              loc_start: startPos,\n              loc_end: p.prevEndPos,\n              loc_ghost: false\n            }, attrs, constr, args$1);\n}\n\nfunction parseRecordPattern(attrs, p) {\n  var startPos = p.startPos;\n  Res_parser.expect(undefined, /* Lbrace */22, p);\n  var rawFields = parseCommaDelimitedReversedList(p, /* PatternRecord */27, /* Rbrace */23, parseRecordPatternItem);\n  Res_parser.expect(undefined, /* Rbrace */23, p);\n  var match = rawFields && !rawFields.hd[1] ? [\n      rawFields.tl,\n      /* Open */1\n    ] : [\n      rawFields,\n      /* Closed */0\n    ];\n  var match$1 = List.fold_left((function (param, curr) {\n          var field = curr[1];\n          var flag = param[1];\n          var fields = param[0];\n          if (!field) {\n            return [\n                    fields,\n                    flag\n                  ];\n          }\n          var field$1 = field._0;\n          if (curr[0]) {\n            Res_parser.err(field$1[1].ppat_loc.loc_start, undefined, p, Res_diagnostics.message(recordPatternSpread));\n          }\n          return [\n                  {\n                    hd: field$1,\n                    tl: fields\n                  },\n                  flag\n                ];\n        }), [\n        /* [] */0,\n        match[1]\n      ], match[0]);\n  var loc_loc_end = p.prevEndPos;\n  var loc = {\n    loc_start: startPos,\n    loc_end: loc_loc_end,\n    loc_ghost: false\n  };\n  return Ast_helper.Pat.record(loc, attrs, match$1[0], match$1[1]);\n}\n\nfunction parseModulePattern(attrs, p) {\n  var startPos = p.startPos;\n  Res_parser.expect(undefined, /* Module */65, p);\n  Res_parser.expect(undefined, /* Lparen */18, p);\n  var uident = p.token;\n  var uident$1;\n  if (typeof uident === \"number\" || uident.TAG !== /* Uident */5) {\n    uident$1 = $$Location.mknoloc(\"_\");\n  } else {\n    var loc_loc_start = p.startPos;\n    var loc_loc_end = p.endPos;\n    var loc = {\n      loc_start: loc_loc_start,\n      loc_end: loc_loc_end,\n      loc_ghost: false\n    };\n    Res_parser.next(undefined, p);\n    uident$1 = $$Location.mkloc(uident._0, loc);\n  }\n  var match = p.token;\n  if (match === 24) {\n    var colonStart = p.startPos;\n    Res_parser.next(undefined, p);\n    var packageTypAttrs = parseRegion(p, /* Attribute */50, parseAttribute);\n    var packageType = parsePackageType(colonStart, packageTypAttrs, p);\n    Res_parser.expect(undefined, /* Rparen */19, p);\n    var loc_loc_end$1 = p.prevEndPos;\n    var loc$1 = {\n      loc_start: startPos,\n      loc_end: loc_loc_end$1,\n      loc_ghost: false\n    };\n    var unpack = Ast_helper.Pat.unpack(uident$1.loc, undefined, uident$1);\n    return Ast_helper.Pat.constraint_(loc$1, attrs, unpack, packageType);\n  }\n  Res_parser.expect(undefined, /* Rparen */19, p);\n  var loc_loc_end$2 = p.prevEndPos;\n  var loc$2 = {\n    loc_start: startPos,\n    loc_end: loc_loc_end$2,\n    loc_ghost: false\n  };\n  return Ast_helper.Pat.unpack(loc$2, attrs, uident$1);\n}\n\nfunction parseListPattern(startPos, attrs, p) {\n  var listPatterns = parseCommaDelimitedReversedList(p, /* PatternOcamlList */26, /* Rbrace */23, parsePatternRegion);\n  Res_parser.expect(undefined, /* Rbrace */23, p);\n  var loc_loc_end = p.prevEndPos;\n  var loc = {\n    loc_start: startPos,\n    loc_end: loc_loc_end,\n    loc_ghost: false\n  };\n  var filterSpread = function (param) {\n    var pattern = param[1];\n    if (param[0]) {\n      Res_parser.err(pattern.ppat_loc.loc_start, undefined, p, Res_diagnostics.message(listPatternSpread));\n      return pattern;\n    } else {\n      return pattern;\n    }\n  };\n  if (listPatterns) {\n    var match = listPatterns.hd;\n    if (match[0]) {\n      var patterns = List.rev(List.map(filterSpread, listPatterns.tl));\n      var pat = makeListPattern(loc, patterns, match[1]);\n      return {\n              ppat_desc: pat.ppat_desc,\n              ppat_loc: loc,\n              ppat_attributes: attrs\n            };\n    }\n    \n  }\n  var patterns$1 = List.rev(List.map(filterSpread, listPatterns));\n  var pat$1 = makeListPattern(loc, patterns$1, undefined);\n  return {\n          ppat_desc: pat$1.ppat_desc,\n          ppat_loc: loc,\n          ppat_attributes: attrs\n        };\n}\n\nfunction parseVariantPatternArgs(p, ident, startPos, attrs) {\n  var lparen = p.startPos;\n  Res_parser.expect(undefined, /* Lparen */18, p);\n  var patterns = parseCommaDelimitedRegion(p, /* PatternList */25, /* Rparen */19, parseConstrainedPatternRegion);\n  var args;\n  var exit = 0;\n  if (patterns) {\n    var pat = patterns.hd;\n    var tmp = pat.ppat_desc;\n    if (typeof tmp === \"number\") {\n      if (patterns.tl) {\n        exit = 1;\n      } else {\n        args = pat;\n      }\n    } else if (tmp.TAG === /* Ppat_tuple */4) {\n      if (patterns.tl) {\n        exit = 1;\n      } else {\n        args = p.mode === /* ParseForTypeChecker */0 ? pat : Ast_helper.Pat.tuple({\n                loc_start: lparen,\n                loc_end: p.endPos,\n                loc_ghost: false\n              }, undefined, patterns);\n      }\n    } else if (patterns.tl) {\n      exit = 1;\n    } else {\n      args = pat;\n    }\n  } else {\n    var loc_loc_end = p.prevEndPos;\n    var loc = {\n      loc_start: lparen,\n      loc_end: loc_loc_end,\n      loc_ghost: false\n    };\n    args = Ast_helper.Pat.construct(loc, undefined, $$Location.mkloc({\n              TAG: /* Lident */0,\n              _0: \"()\"\n            }, loc), undefined);\n  }\n  if (exit === 1) {\n    args = Ast_helper.Pat.tuple({\n          loc_start: lparen,\n          loc_end: p.endPos,\n          loc_ghost: false\n        }, undefined, patterns);\n  }\n  Res_parser.expect(undefined, /* Rparen */19, p);\n  return Ast_helper.Pat.variant({\n              loc_start: startPos,\n              loc_end: p.prevEndPos,\n              loc_ghost: false\n            }, attrs, ident, args);\n}\n\nfunction parseOrPattern(pattern1, p) {\n  var _pattern1 = pattern1;\n  while(true) {\n    var pattern1$1 = _pattern1;\n    var match = p.token;\n    if (match !== 17) {\n      return pattern1$1;\n    }\n    Res_parser.next(undefined, p);\n    var pattern2 = parsePattern(undefined, false, p);\n    var init = pattern1$1.ppat_loc;\n    var loc_loc_start = init.loc_start;\n    var loc_loc_end = pattern2.ppat_loc.loc_end;\n    var loc_loc_ghost = init.loc_ghost;\n    var loc = {\n      loc_start: loc_loc_start,\n      loc_end: loc_loc_end,\n      loc_ghost: loc_loc_ghost\n    };\n    _pattern1 = Ast_helper.Pat.or_(loc, undefined, pattern1$1, pattern2);\n    continue ;\n  };\n}\n\nfunction parseArrayPattern(attrs, p) {\n  var startPos = p.startPos;\n  Res_parser.expect(undefined, /* Lbracket */20, p);\n  var patterns = parseCommaDelimitedRegion(p, /* PatternList */25, /* Rbracket */21, (function (param) {\n          return parseNonSpreadPattern(arrayPatternSpread, param);\n        }));\n  Res_parser.expect(undefined, /* Rbracket */21, p);\n  var loc_loc_end = p.prevEndPos;\n  var loc = {\n    loc_start: startPos,\n    loc_end: loc_loc_end,\n    loc_ghost: false\n  };\n  return Ast_helper.Pat.array(loc, attrs, patterns);\n}\n\nfunction parseJsFfiDeclarations(p) {\n  Res_parser.expect(undefined, /* Lbrace */22, p);\n  var decls = parseCommaDelimitedRegion(p, /* JsFfiImport */54, /* Rbrace */23, parseJsFfiDeclaration);\n  Res_parser.expect(undefined, /* Rbrace */23, p);\n  return decls;\n}\n\nfunction parseJsFfiScope(p) {\n  var match = p.token;\n  if (typeof match === \"number\") {\n    return /* Global */0;\n  }\n  if (match.TAG !== /* Lident */4) {\n    return /* Global */0;\n  }\n  if (match._0 !== \"from\") {\n    return /* Global */0;\n  }\n  Res_parser.next(undefined, p);\n  var s = p.token;\n  if (typeof s === \"number\") {\n    return /* Global */0;\n  }\n  switch (s.TAG | 0) {\n    case /* String */3 :\n        Res_parser.next(undefined, p);\n        return {\n                TAG: /* Module */0,\n                _0: s._0\n              };\n    case /* Lident */4 :\n    case /* Uident */5 :\n        break;\n    default:\n      return /* Global */0;\n  }\n  var value = parseIdentPath(p);\n  return {\n          TAG: /* Scope */1,\n          _0: value\n        };\n}\n\nfunction parseJsxProps(p) {\n  return parseRegion(p, /* JsxAttribute */5, parseJsxProp);\n}\n\nfunction parseTypeConstraints(p) {\n  return parseRegion(p, /* TypeConstraint */51, parseTypeConstraint);\n}\n\nfunction parseAttributes(p) {\n  return parseRegion(p, /* Attribute */50, parseAttribute);\n}\n\nfunction parseSpecification(p) {\n  return parseRegion(p, /* Specification */47, parseSignatureItemRegion);\n}\n\nfunction parseImplementation(p) {\n  return parseRegion(p, /* Implementation */49, parseStructureItemRegion);\n}\n\nvar Doc;\n\nvar Grammar;\n\nvar Token;\n\nvar Diagnostics;\n\nvar CommentTable;\n\nvar ResPrinter;\n\nvar Scanner;\n\nvar JsFfi;\n\nvar Parser;\n\nexport {\n  Doc ,\n  Grammar ,\n  Token ,\n  Diagnostics ,\n  CommentTable ,\n  ResPrinter ,\n  Scanner ,\n  JsFfi ,\n  Parser ,\n  mkLoc ,\n  Recover ,\n  ErrorMessages ,\n  jsxAttr ,\n  uncurryAttr ,\n  ternaryAttr ,\n  ifLetAttr ,\n  suppressFragileMatchWarningAttr ,\n  makeBracesAttr ,\n  templateLiteralAttr ,\n  getClosingToken ,\n  goToClosing ,\n  isEs6ArrowExpression ,\n  isEs6ArrowFunctor ,\n  isEs6ArrowType ,\n  buildLongident ,\n  makeInfixOperator ,\n  negateString ,\n  makeUnaryExpr ,\n  makeListExpression ,\n  makeListPattern ,\n  lidentOfPath ,\n  makeNewtypes ,\n  wrapTypeAnnotation ,\n  processUnderscoreApplication ,\n  hexValue ,\n  removeModuleNameFromPunnedFieldValue ,\n  parseStringLiteral ,\n  parseLident ,\n  parseIdent ,\n  parseHashIdent ,\n  parseValuePath ,\n  parseValuePathAfterDot ,\n  parseValuePathTail ,\n  parseModuleLongIdentTail ,\n  parseModuleLongIdent ,\n  parseIdentPath ,\n  verifyJsxOpeningClosingName ,\n  string_of_pexp_ident ,\n  parseOpenDescription ,\n  parseTemplateStringLiteral ,\n  parseConstant ,\n  parseTemplateConstant ,\n  parseCommaDelimitedRegion ,\n  parseCommaDelimitedReversedList ,\n  parseDelimitedRegion ,\n  parseRegion ,\n  parsePattern ,\n  skipTokensAndMaybeRetry ,\n  parseAliasPattern ,\n  parseOrPattern ,\n  parseNonSpreadPattern ,\n  parseConstrainedPattern ,\n  parseConstrainedPatternRegion ,\n  parseRecordPatternField ,\n  parseRecordPatternItem ,\n  parseRecordPattern ,\n  parseTuplePattern ,\n  parsePatternRegion ,\n  parseModulePattern ,\n  parseListPattern ,\n  parseArrayPattern ,\n  parseConstructorPatternArgs ,\n  parseVariantPatternArgs ,\n  parseExpr ,\n  parseTernaryExpr ,\n  parseEs6ArrowExpression ,\n  parseParameter ,\n  parseParameterList ,\n  parseParameters ,\n  parseCoercedExpr ,\n  parseConstrainedOrCoercedExpr ,\n  parseConstrainedExprRegion ,\n  parseAtomicExpr ,\n  parseFirstClassModuleExpr ,\n  parseBracketAccess ,\n  parsePrimaryExpr ,\n  parseUnaryExpr ,\n  parseOperandExpr ,\n  parseBinaryExpr ,\n  parseTemplateExpr ,\n  overParseConstrainedOrCoercedOrArrowExpression ,\n  parseLetBindingBody ,\n  parseAttributesAndBinding ,\n  parseLetBindings ,\n  parseJsxName ,\n  parseJsxOpeningOrSelfClosingElement ,\n  parseJsx ,\n  parseJsxFragment ,\n  parseJsxProp ,\n  parseJsxProps ,\n  parseJsxChildren ,\n  parseBracedOrRecordExpr ,\n  parseRecordRowWithStringKey ,\n  parseRecordRow ,\n  parseRecordExprWithStringKeys ,\n  parseRecordExpr ,\n  parseNewlineOrSemicolonExprBlock ,\n  parseExprBlockItem ,\n  parseExprBlock ,\n  parseTryExpression ,\n  parseIfCondition ,\n  parseThenBranch ,\n  parseElseBranch ,\n  parseIfExpr ,\n  parseIfLetExpr ,\n  parseIfOrIfLetExpression ,\n  parseForRest ,\n  parseForExpression ,\n  parseWhileExpression ,\n  parsePatternGuard ,\n  parsePatternMatchCase ,\n  parsePatternMatching ,\n  parseSwitchExpression ,\n  parseArgument ,\n  parseArgument2 ,\n  parseCallExpr ,\n  parseValueOrConstructor ,\n  parsePolyVariantExpr ,\n  parseConstructorArgs ,\n  parseTupleExpr ,\n  parseSpreadExprRegion ,\n  parseListExpr ,\n  parseNonSpreadExp ,\n  parseArrayExp ,\n  parsePolyTypeExpr ,\n  parseTypeVarList ,\n  parseLidentList ,\n  parseAtomicTypExpr ,\n  parsePackageType ,\n  parsePackageConstraints ,\n  parsePackageConstraint ,\n  parseRecordOrObjectType ,\n  parseTypeAlias ,\n  parseTypeParameter ,\n  parseTypeParameters ,\n  parseEs6ArrowType ,\n  parseTypExpr ,\n  parseArrowTypeRest ,\n  parseTypExprRegion ,\n  parseTupleType ,\n  parseTypeConstructorArgRegion ,\n  parseTypeConstructorArgs ,\n  parseStringFieldDeclaration ,\n  parseFieldDeclaration ,\n  parseFieldDeclarationRegion ,\n  parseRecordDeclaration ,\n  parseConstrDeclArgs ,\n  parseTypeConstructorDeclarationWithBar ,\n  parseTypeConstructorDeclaration ,\n  parseTypeConstructorDeclarations ,\n  parseTypeRepresentation ,\n  parseTypeParam ,\n  parseTypeParams ,\n  parseTypeConstraint ,\n  parseTypeConstraints ,\n  parseTypeEquationOrConstrDecl ,\n  parseRecordOrObjectDecl ,\n  parsePrivateEqOrRepr ,\n  parsePolymorphicVariantType ,\n  parseTagName ,\n  parseTagNames ,\n  parseTagSpecFulls ,\n  parseTagSpecFull ,\n  parseTagSpecs ,\n  parseTagSpec ,\n  parseTagSpecFirst ,\n  parsePolymorphicVariantTypeSpecHash ,\n  parsePolymorphicVariantTypeArgs ,\n  parseTypeEquationAndRepresentation ,\n  parseTypeDef ,\n  parseTypeExtension ,\n  parseTypeDefinitions ,\n  parseTypeDefinitionOrExtension ,\n  parseExternalDef ,\n  parseConstrDef ,\n  parseExceptionDef ,\n  parseNewlineOrSemicolonStructure ,\n  parseStructureItemRegion ,\n  parseJsImport ,\n  parseJsExport ,\n  parseSignJsExport ,\n  parseJsFfiScope ,\n  parseJsFfiDeclarations ,\n  parseJsFfiDeclaration ,\n  parseIncludeStatement ,\n  parseAtomicModuleExpr ,\n  parsePrimaryModExpr ,\n  parseFunctorArg ,\n  parseFunctorArgs ,\n  parseFunctorModuleExpr ,\n  parseModuleExpr ,\n  parseConstrainedModExpr ,\n  parseConstrainedModExprRegion ,\n  parseModuleApplication ,\n  parseModuleOrModuleTypeImplOrPackExpr ,\n  parseModuleTypeImpl ,\n  parseMaybeRecModuleBinding ,\n  parseModuleBinding ,\n  parseModuleBindingBody ,\n  parseModuleBindings ,\n  parseAtomicModuleType ,\n  parseFunctorModuleType ,\n  parseModuleType ,\n  parseWithConstraints ,\n  parseWithConstraint ,\n  parseModuleTypeOf ,\n  parseNewlineOrSemicolonSignature ,\n  parseSignatureItemRegion ,\n  parseRecModuleSpec ,\n  parseRecModuleDeclaration ,\n  parseModuleDeclarationOrAlias ,\n  parseModuleTypeDeclaration ,\n  parseSignLetDesc ,\n  parseAttributeId ,\n  parsePayload ,\n  parseAttribute ,\n  parseAttributes ,\n  parseStandaloneAttribute ,\n  parseExtension ,\n  parseSpecification ,\n  parseImplementation ,\n  \n}\n/* id Not a pure module */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/res_core.res",
    "content": "module Doc = Res_doc\nmodule Grammar = Res_grammar\nmodule Token = Res_token\nmodule Diagnostics = Res_diagnostics\nmodule CommentTable = Res_comments_table\nmodule ResPrinter = Res_printer\nmodule Scanner = Res_scanner\nmodule JsFfi = Res_js_ffi\nmodule Parser = Res_parser\n\nlet mkLoc = (startLoc, endLoc) => {\n  open Location\n  {\n    loc_start: startLoc,\n    loc_end: endLoc,\n    loc_ghost: false,\n  }\n}\n\nmodule Recover = {\n  let defaultExpr = () => {\n    let id = Location.mknoloc(\"rescript.exprhole\")\n    Ast_helper.Exp.mk(Pexp_extension(id, PStr(list{})))\n  }\n\n  let defaultType = () => {\n    let id = Location.mknoloc(\"rescript.typehole\")\n    Ast_helper.Typ.extension((id, PStr(list{})))\n  }\n\n  let defaultPattern = () => {\n    let id = Location.mknoloc(\"rescript.patternhole\")\n    Ast_helper.Pat.extension((id, PStr(list{})))\n  }\n\n  let defaultModuleExpr = () => Ast_helper.Mod.structure(list{})\n  let defaultModuleType = () => Ast_helper.Mty.signature(list{})\n\n  let defaultSignatureItem = {\n    let id = Location.mknoloc(\"rescript.sigitemhole\")\n    Ast_helper.Sig.extension((id, PStr(list{})))\n  }\n\n  let recoverEqualGreater = p => {\n    Parser.expect(EqualGreater, p)\n    switch p.Parser.token {\n    | MinusGreater => Parser.next(p)\n    | _ => ()\n    }\n  }\n\n  let shouldAbortListParse = p => {\n    let rec check = breadcrumbs =>\n      switch breadcrumbs {\n      | list{} => false\n      | list{(grammar, _), ...rest} =>\n        if Grammar.isPartOfList(grammar, p.Parser.token) {\n          true\n        } else {\n          check(rest)\n        }\n      }\n\n    check(p.breadcrumbs)\n  }\n}\n\nmodule ErrorMessages = {\n  let listPatternSpread = \"List pattern matches only supports one `...` spread, at the end.\\n\\\nExplanation: a list spread at the tail is efficient, but a spread in the middle would create new list[s]; out of performance concern, our pattern matching currently guarantees to never create new intermediate data.\"\n\n  @live\n  let recordPatternSpread = \"Record's `...` spread is not supported in pattern matches.\\n\\\nExplanation: you can't collect a subset of a record's field into its own record, since a record needs an explicit declaration and that subset wouldn't have one.\\n\\\nSolution: you need to pull out each field you want explicitly.\"\n\n  /* let recordPatternUnderscore = \"Record patterns only support one `_`, at the end.\" */\n\n  let arrayPatternSpread = \"Array's `...` spread is not supported in pattern matches.\\n\\\nExplanation: such spread would create a subarray; out of performance concern, our pattern matching currently guarantees to never create new intermediate data.\\n\\\nSolution: if it's to validate the first few elements, use a `when` clause + Array size check + `get` checks on the current pattern. If it's to obtain a subarray, use `Array.sub` or `Belt.Array.slice`.\"\n\n  let arrayExprSpread = \"Arrays can't use the `...` spread currently. Please use `concat` or other Array helpers.\"\n\n  let recordExprSpread = \"Records can only have one `...` spread, at the beginning.\\n\\\nExplanation: since records have a known, fixed shape, a spread like `{a, ...b}` wouldn't make sense, as `b` would override every field of `a` anyway.\"\n\n  let listExprSpread = \"Lists can only have one `...` spread, and at the end.\\n\\\nExplanation: lists are singly-linked list, where a node contains a value and points to the next node. `list[a, ...bc]` efficiently creates a new item and links `bc` as its next nodes. `[...bc, a]` would be expensive, as it'd need to traverse `bc` and prepend each item to `a` one by one. We therefore disallow such syntax sugar.\\n\\\nSolution: directly use `concat`.\"\n\n  let variantIdent = \"A polymorphic variant (e.g. #id) must start with an alphabetical letter or be a number (e.g. #742)\"\n\n  let experimentalIfLet = expr => {\n    let switchExpr = {...expr, Parsetree.pexp_attributes: list{}}\n    Doc.concat(list{\n      Doc.text(\"If-let is currently highly experimental.\"),\n      Doc.line,\n      Doc.text(\"Use a regular `switch` with pattern matching instead:\"),\n      Doc.concat(list{\n        Doc.hardLine,\n        Doc.hardLine,\n        ResPrinter.printExpression(switchExpr, CommentTable.empty),\n      }),\n    }) |> Doc.toString(~width=80)\n  }\n\n  let typeParam = \"A type param consists of a singlequote followed by a name like `'a` or `'A`\"\n  let typeVar = \"A type variable consists of a singlequote followed by a name like `'a` or `'A`\"\n\n  let attributeWithoutNode = (attr: Parsetree.attribute) => {\n    let ({Asttypes.txt: attrName}, _) = attr\n    \"Did you forget to attach `\" ++\n    (attrName ++\n    (\"` to an item?\\n  Standalone attributes start with `@@` like: `@@\" ++ (attrName ++ \"`\")))\n  }\n\n  let typeDeclarationNameLongident = longident =>\n    \"A type declaration's name cannot contain a module access. Did you mean `\" ++\n    (Longident.last(longident) ++\n    \"`?\")\n\n  let tupleSingleElement = \"A tuple needs at least two elements\"\n\n  let missingTildeLabeledParameter = name =>\n    if name == \"\" {\n      \"A labeled parameter starts with a `~`.\"\n    } else {\n      \"A labeled parameter starts with a `~`. Did you mean: `~\" ++ (name ++ \"`?\")\n    }\n\n  let stringInterpolationInPattern = \"String interpolation is not supported in pattern matching.\"\n\n  let spreadInRecordDeclaration = \"A record type declaration doesn't support the ... spread. Only an object (with quoted field names) does.\"\n\n  let objectQuotedFieldName = name =>\n    \"An object type declaration needs quoted field names. Did you mean \\\"\" ++ (name ++ \"\\\"?\")\n\n  let forbiddenInlineRecordDeclaration = \"An inline record type declaration is only allowed in a variant constructor's declaration\"\n\n  let sameTypeSpread = \"You're using a ... spread without extra fields. This is the same type.\"\n\n  let polyVarIntWithSuffix = number =>\n    \"A numeric polymorphic variant cannot be followed by a letter. Did you mean `#\" ++\n    (number ++\n    \"`?\")\n}\n\nlet jsxAttr = (Location.mknoloc(\"JSX\"), Parsetree.PStr(list{}))\nlet uncurryAttr = (Location.mknoloc(\"bs\"), Parsetree.PStr(list{}))\nlet ternaryAttr = (Location.mknoloc(\"ns.ternary\"), Parsetree.PStr(list{}))\nlet ifLetAttr = (Location.mknoloc(\"ns.iflet\"), Parsetree.PStr(list{}))\nlet suppressFragileMatchWarningAttr = (\n  Location.mknoloc(\"warning\"),\n  Parsetree.PStr(list{Ast_helper.Str.eval(Ast_helper.Exp.constant(Pconst_string(\"-4\", None)))}),\n)\nlet makeBracesAttr = loc => (Location.mkloc(\"ns.braces\", loc), Parsetree.PStr(list{}))\nlet templateLiteralAttr = (Location.mknoloc(\"res.template\"), Parsetree.PStr(list{}))\n\ntype stringLiteralState =\n  | Start\n  | Backslash\n  | HexEscape\n  | DecimalEscape\n  | OctalEscape\n  | UnicodeEscape\n  | UnicodeCodePointEscape\n  | UnicodeEscapeStart\n  | EscapedLineBreak\n\ntype typDefOrExt =\n  | TypeDef({recFlag: Asttypes.rec_flag, types: list<Parsetree.type_declaration>})\n  | TypeExt(Parsetree.type_extension)\n\ntype labelledParameter =\n  | TermParameter({\n      uncurried: bool,\n      attrs: Parsetree.attributes,\n      label: Asttypes.arg_label,\n      expr: option<Parsetree.expression>,\n      pat: Parsetree.pattern,\n      pos: Lexing.position,\n    })\n  | TypeParameter({\n      uncurried: bool,\n      attrs: Parsetree.attributes,\n      locs: list<Location.loc<string>>,\n      pos: Lexing.position,\n    })\n\ntype recordPatternItem =\n  | PatUnderscore\n  | PatField((Ast_helper.lid, Parsetree.pattern))\n\ntype context =\n  | OrdinaryExpr\n  | TernaryTrueBranchExpr\n  | WhenExpr\n\nlet getClosingToken = x =>\n  switch x {\n  | Token.Lparen => Token.Rparen\n  | Lbrace => Rbrace\n  | Lbracket => Rbracket\n  | List => Rbrace\n  | LessThan => GreaterThan\n  | _ => assert false\n  }\n\nlet rec goToClosing = (closingToken, state) =>\n  switch (state.Parser.token, closingToken) {\n  | (Rparen, Token.Rparen) | (Rbrace, Rbrace) | (Rbracket, Rbracket) | (GreaterThan, GreaterThan) =>\n    Parser.next(state)\n    ()\n  | ((Token.Lbracket | Lparen | Lbrace | List | LessThan) as t, _) =>\n    Parser.next(state)\n    goToClosing(getClosingToken(t), state)\n    goToClosing(closingToken, state)\n  | (Rparen | Token.Rbrace | Rbracket | Eof, _) => () /* TODO: how do report errors here? */\n  | _ =>\n    Parser.next(state)\n    goToClosing(closingToken, state)\n  }\n\n/* Madness */\nlet isEs6ArrowExpression = (~inTernary, p) =>\n  Parser.lookahead(p, state =>\n    switch state.Parser.token {\n    | Lident(_) | Underscore =>\n      Parser.next(state)\n      switch state.Parser.token {\n      /* Don't think that this valid\n       * Imagine: let x = (a: int)\n       * This is a parenthesized expression with a type constraint, wait for\n       * the arrow */\n      /* | Colon when not inTernary -> true */\n      | EqualGreater => true\n      | _ => false\n      }\n    | Lparen =>\n      let prevEndPos = state.prevEndPos\n      Parser.next(state)\n      switch state.token {\n      /* arrived at `()` here */\n      | Rparen =>\n        Parser.next(state)\n        switch state.Parser.token {\n        /* arrived at `() :` here */\n        | Colon if !inTernary =>\n          Parser.next(state)\n          switch state.Parser.token {\n          /* arrived at `() :typ` here */\n          | Lident(_) =>\n            Parser.next(state)\n            switch state.Parser.token {\n            /* arrived at `() :typ<` here */\n            | LessThan =>\n              Parser.next(state)\n              goToClosing(GreaterThan, state)\n            | _ => ()\n            }\n            switch state.Parser.token {\n            /* arrived at `() :typ =>` or `() :typ<'a,'b> =>` here */\n            | EqualGreater => true\n            | _ => false\n            }\n          | _ => true\n          }\n        | EqualGreater => true\n        | _ => false\n        }\n      | Dot /* uncurried */ => true\n      | Tilde => true\n      | Backtick => false /* (` always indicates the start of an expr, can't be es6 parameter */\n      | _ =>\n        goToClosing(Rparen, state)\n        switch state.Parser.token {\n        | EqualGreater => true\n        /* | Lbrace TODO: detect missing =>, is this possible? */\n        | Colon if !inTernary => true\n        | Rparen => /* imagine having something as :\n           * switch colour {\n           * | Red\n           *    when l == l'\n           *    || (&Clflags.classic && (l == Nolabel && !is_optional(l'))) => (t1, t2)\n           * We'll arrive at the outer rparen just before the =>.\n           * This is not an es6 arrow.\n           * */\n          false\n        | _ =>\n          Parser.nextUnsafe(state)\n          /* error recovery, peek at the next token,\n           * (elements, providerId] => {\n           *  in the example above, we have an unbalanced ] here\n           */\n          switch state.Parser.token {\n          | EqualGreater if state.startPos.pos_lnum === prevEndPos.pos_lnum => true\n          | _ => false\n          }\n        }\n      }\n    | _ => false\n    }\n  )\n\nlet isEs6ArrowFunctor = p =>\n  Parser.lookahead(p, state =>\n    switch state.Parser.token {\n    /* | Uident _ | Underscore -> */\n    /* Parser.next state; */\n    /* begin match state.Parser.token with */\n    /* | EqualGreater -> true */\n    /* | _ -> false */\n    /* end */\n    | Lparen =>\n      Parser.next(state)\n      switch state.token {\n      | Rparen =>\n        Parser.next(state)\n        switch state.token {\n        | Colon | EqualGreater => true\n        | _ => false\n        }\n      | _ =>\n        goToClosing(Rparen, state)\n        switch state.Parser.token {\n        | EqualGreater | Lbrace => true\n        | Colon => true\n        | _ => false\n        }\n      }\n    | _ => false\n    }\n  )\n\nlet isEs6ArrowType = p =>\n  Parser.lookahead(p, state =>\n    switch state.Parser.token {\n    | Lparen =>\n      Parser.next(state)\n      switch state.Parser.token {\n      | Rparen =>\n        Parser.next(state)\n        switch state.Parser.token {\n        | EqualGreater => true\n        | _ => false\n        }\n      | Tilde | Dot => true\n      | _ =>\n        goToClosing(Rparen, state)\n        switch state.Parser.token {\n        | EqualGreater => true\n        | _ => false\n        }\n      }\n    | Tilde => true\n    | _ => false\n    }\n  )\n\nlet buildLongident = words =>\n  switch List.rev(words) {\n  | list{} => assert false\n  | list{hd, ...tl} => List.fold_left((p, s) => Longident.Ldot(p, s), Lident(hd), tl)\n  }\n\nlet makeInfixOperator = (p, token, startPos, endPos) => {\n  let stringifiedToken = if token == Token.MinusGreater {\n    \"|.\"\n  } else if token == Token.PlusPlus {\n    \"^\"\n  } else if token == Token.BangEqual {\n    \"<>\"\n  } else if token == Token.BangEqualEqual {\n    \"!=\"\n  } else if token == Token.Equal {\n    /* TODO: could have a totally different meaning like x->fooSet(y) */\n    Parser.err(~startPos, ~endPos, p, Diagnostics.message(\"Did you mean `==` here?\"))\n    \"=\"\n  } else if token == Token.EqualEqual {\n    \"=\"\n  } else if token == Token.EqualEqualEqual {\n    \"==\"\n  } else {\n    Token.toString(token)\n  }\n\n  let loc = mkLoc(startPos, endPos)\n  let operator = Location.mkloc(Longident.Lident(stringifiedToken), loc)\n\n  Ast_helper.Exp.ident(~loc, operator)\n}\n\nlet negateString = s =>\n  if String.length(s) > 0 && @doesNotRaise String.get(s, 0) == '-' {\n    (@doesNotRaise String.sub)(s, 1, String.length(s) - 1)\n  } else {\n    \"-\" ++ s\n  }\n\nlet makeUnaryExpr = (startPos, tokenEnd, token, operand) =>\n  switch (token, operand.Parsetree.pexp_desc) {\n  | (Token.Plus | PlusDot, Pexp_constant(Pconst_integer(_) | Pconst_float(_))) => operand\n  | (Minus, Pexp_constant(Pconst_integer(n, m))) => {\n      ...operand,\n      pexp_desc: Pexp_constant(Pconst_integer(negateString(n), m)),\n    }\n  | (Minus | MinusDot, Pexp_constant(Pconst_float(n, m))) => {\n      ...operand,\n      pexp_desc: Pexp_constant(Pconst_float(negateString(n), m)),\n    }\n  | (Token.Plus | PlusDot | Minus | MinusDot, _) =>\n    let tokenLoc = mkLoc(startPos, tokenEnd)\n    let operator = \"~\" ++ Token.toString(token)\n    Ast_helper.Exp.apply(\n      ~loc=mkLoc(startPos, operand.Parsetree.pexp_loc.loc_end),\n      Ast_helper.Exp.ident(~loc=tokenLoc, Location.mkloc(Longident.Lident(operator), tokenLoc)),\n      list{(Nolabel, operand)},\n    )\n  | (Token.Bang, _) =>\n    let tokenLoc = mkLoc(startPos, tokenEnd)\n    Ast_helper.Exp.apply(\n      ~loc=mkLoc(startPos, operand.Parsetree.pexp_loc.loc_end),\n      Ast_helper.Exp.ident(~loc=tokenLoc, Location.mkloc(Longident.Lident(\"not\"), tokenLoc)),\n      list{(Nolabel, operand)},\n    )\n  | _ => operand\n  }\n\nlet makeListExpression = (loc, seq, extOpt) => {\n  let rec handleSeq = x =>\n    switch x {\n    | list{} =>\n      switch extOpt {\n      | Some(ext) => ext\n      | None =>\n        let loc = {...loc, Location.loc_ghost: true}\n        let nil = Location.mkloc(Longident.Lident(\"[]\"), loc)\n        Ast_helper.Exp.construct(~loc, nil, None)\n      }\n    | list{e1, ...el} =>\n      let exp_el = handleSeq(el)\n      let loc = mkLoc(e1.Parsetree.pexp_loc.Location.loc_start, exp_el.pexp_loc.loc_end)\n\n      let arg = Ast_helper.Exp.tuple(~loc, list{e1, exp_el})\n      Ast_helper.Exp.construct(~loc, Location.mkloc(Longident.Lident(\"::\"), loc), Some(arg))\n    }\n\n  let expr = handleSeq(seq)\n  {...expr, pexp_loc: loc}\n}\n\nlet makeListPattern = (loc, seq, ext_opt) => {\n  let rec handle_seq = x =>\n    switch x {\n    | list{} =>\n      let base_case = switch ext_opt {\n      | Some(ext) => ext\n      | None =>\n        let loc = {...loc, Location.loc_ghost: true}\n        let nil = {Location.txt: Longident.Lident(\"[]\"), loc: loc}\n        Ast_helper.Pat.construct(~loc, nil, None)\n      }\n\n      base_case\n    | list{p1, ...pl} =>\n      let pat_pl = handle_seq(pl)\n      let loc = mkLoc(p1.Parsetree.ppat_loc.loc_start, pat_pl.ppat_loc.loc_end)\n      let arg = Ast_helper.Pat.mk(~loc, Ppat_tuple(list{p1, pat_pl}))\n      Ast_helper.Pat.mk(\n        ~loc,\n        Ppat_construct(Location.mkloc(Longident.Lident(\"::\"), loc), Some(arg)),\n      )\n    }\n\n  handle_seq(seq)\n}\n\n/* TODO: diagnostic reporting */\nlet lidentOfPath = longident =>\n  switch Longident.flatten(longident) |> List.rev {\n  | list{} => \"\"\n  | list{ident, ..._} => ident\n  }\n\nlet makeNewtypes = (~attrs, ~loc, newtypes, exp) => {\n  let expr = List.fold_right(\n    (newtype, exp) => Ast_helper.Exp.mk(~loc, Pexp_newtype(newtype, exp)),\n    newtypes,\n    exp,\n  )\n  {...expr, pexp_attributes: attrs}\n}\n\n/* locally abstract types syntax sugar\n * Transforms\n *  let f: type t u v. = (foo : list</t, u, v/>) => ...\n * into\n *  let f = (type t u v. foo : list</t, u, v/>) => ...\n */\nlet wrapTypeAnnotation = (~loc, newtypes, core_type, body) => {\n  let exp = makeNewtypes(\n    ~attrs=list{},\n    ~loc,\n    newtypes,\n    Ast_helper.Exp.constraint_(~loc, body, core_type),\n  )\n\n  let typ = Ast_helper.Typ.poly(\n    ~loc,\n    newtypes,\n    Ast_helper.Typ.varify_constructors(newtypes, core_type),\n  )\n\n  (exp, typ)\n}\n\n@ocaml.doc(\"\n  * process the occurrence of _ in the arguments of a function application\n  * replace _ with a new variable, currently __x, in the arguments\n  * return a wrapping function that wraps ((__x) => ...) around an expression\n  * e.g. foo(_, 3) becomes (__x) => foo(__x, 3)\n  \")\nlet processUnderscoreApplication = args => {\n  let exp_question = ref(None)\n  let hidden_var = \"__x\"\n  let check_arg = ((lab, exp) as arg) =>\n    switch exp.Parsetree.pexp_desc {\n    | Pexp_ident({txt: Lident(\"_\")} as id) =>\n      let new_id = Location.mkloc(Longident.Lident(hidden_var), id.loc)\n      let new_exp = Ast_helper.Exp.mk(Pexp_ident(new_id), ~loc=exp.pexp_loc)\n      exp_question := Some(new_exp)\n      (lab, new_exp)\n    | _ => arg\n    }\n\n  let args = List.map(check_arg, args)\n  let wrap = exp_apply =>\n    switch exp_question.contents {\n    | Some({pexp_loc: loc}) =>\n      let pattern = Ast_helper.Pat.mk(Ppat_var(Location.mkloc(hidden_var, loc)), ~loc)\n      Ast_helper.Exp.mk(Pexp_fun(Nolabel, None, pattern, exp_apply), ~loc)\n    | None => exp_apply\n    }\n\n  (args, wrap)\n}\n\nlet hexValue = ch =>\n  switch ch {\n  | '0' .. '9' => Char.code(ch) - 48\n  | 'a' .. 'f' => Char.code(ch) - Char.code('a') + 10\n  | 'A' .. 'F' => Char.code(ch) + 32 - Char.code('a') + 10\n  | _ => 16\n  } /* larger than any legal value */\n\n/* Transform A.a into a. For use with punned record fields as in {A.a, b}. */\nlet removeModuleNameFromPunnedFieldValue = exp =>\n  switch exp.Parsetree.pexp_desc {\n  | Pexp_ident(pathIdent) => {\n      ...exp,\n      pexp_desc: Pexp_ident({...pathIdent, txt: Lident(Longident.last(pathIdent.txt))}),\n    }\n  | _ => exp\n  }\n\nlet parseStringLiteral = s => {\n  let len = String.length(s)\n  let b = Buffer.create(String.length(s))\n\n  let rec parse = (state, i, d) =>\n    if i == len {\n      switch state {\n      | HexEscape | DecimalEscape | OctalEscape | UnicodeEscape | UnicodeCodePointEscape => false\n      | _ => true\n      }\n    } else {\n      let c = String.unsafe_get(s, i)\n      switch state {\n      | Start =>\n        switch c {\n        | '\\\\' => parse(Backslash, i + 1, d)\n        | c =>\n          Buffer.add_char(b, c)\n          parse(Start, i + 1, d)\n        }\n      | Backslash =>\n        switch c {\n        | 'n' =>\n          Buffer.add_char(b, '\\n')\n          parse(Start, i + 1, d)\n        | 'r' =>\n          Buffer.add_char(b, '\\r')\n          parse(Start, i + 1, d)\n        | 'b' =>\n          Buffer.add_char(b, '\\b')\n          parse(Start, i + 1, d)\n        | 't' =>\n          Buffer.add_char(b, '\\t')\n          parse(Start, i + 1, d)\n        | ('\\\\' | ' ' | '\\'' | '\"') as c =>\n          Buffer.add_char(b, c)\n          parse(Start, i + 1, d)\n        | 'x' => parse(HexEscape, i + 1, 0)\n        | 'o' => parse(OctalEscape, i + 1, 0)\n        | 'u' => parse(UnicodeEscapeStart, i + 1, 0)\n        | '0' .. '9' => parse(DecimalEscape, i, 0)\n        | '\\n' | '\\r' => parse(EscapedLineBreak, i + 1, d)\n        | c =>\n          Buffer.add_char(b, '\\\\')\n          Buffer.add_char(b, c)\n          parse(Start, i + 1, d)\n        }\n      | HexEscape =>\n        if d === 1 {\n          let c0 = String.unsafe_get(s, i - 1)\n          let c1 = String.unsafe_get(s, i)\n          let c = 16 * hexValue(c0) + hexValue(c1)\n          if c < 0 || c > 255 {\n            false\n          } else {\n            Buffer.add_char(b, Char.unsafe_chr(c))\n            parse(Start, i + 1, 0)\n          }\n        } else {\n          parse(HexEscape, i + 1, d + 1)\n        }\n      | DecimalEscape =>\n        if d === 2 {\n          let c0 = String.unsafe_get(s, i - 2)\n          let c1 = String.unsafe_get(s, i - 1)\n          let c2 = String.unsafe_get(s, i)\n          let c = 100 * (Char.code(c0) - 48) + 10 * (Char.code(c1) - 48) + (Char.code(c2) - 48)\n          if c < 0 || c > 255 {\n            false\n          } else {\n            Buffer.add_char(b, Char.unsafe_chr(c))\n            parse(Start, i + 1, 0)\n          }\n        } else {\n          parse(DecimalEscape, i + 1, d + 1)\n        }\n      | OctalEscape =>\n        if d === 2 {\n          let c0 = String.unsafe_get(s, i - 2)\n          let c1 = String.unsafe_get(s, i - 1)\n          let c2 = String.unsafe_get(s, i)\n          let c = 64 * (Char.code(c0) - 48) + 8 * (Char.code(c1) - 48) + (Char.code(c2) - 48)\n          if c < 0 || c > 255 {\n            false\n          } else {\n            Buffer.add_char(b, Char.unsafe_chr(c))\n            parse(Start, i + 1, 0)\n          }\n        } else {\n          parse(OctalEscape, i + 1, d + 1)\n        }\n      | UnicodeEscapeStart =>\n        switch c {\n        | '{' => parse(UnicodeCodePointEscape, i + 1, 0)\n        | _ => parse(UnicodeEscape, i + 1, 1)\n        }\n      | UnicodeEscape =>\n        if d === 3 {\n          let c0 = String.unsafe_get(s, i - 3)\n          let c1 = String.unsafe_get(s, i - 2)\n          let c2 = String.unsafe_get(s, i - 1)\n          let c3 = String.unsafe_get(s, i)\n          let c = 4096 * hexValue(c0) + 256 * hexValue(c1) + 16 * hexValue(c2) + hexValue(c3)\n          if Res_utf8.isValidCodePoint(c) {\n            let codePoint = Res_utf8.encodeCodePoint(c)\n            Buffer.add_string(b, codePoint)\n            parse(Start, i + 1, 0)\n          } else {\n            false\n          }\n        } else {\n          parse(UnicodeEscape, i + 1, d + 1)\n        }\n      | UnicodeCodePointEscape =>\n        switch c {\n        | '0' .. '9' | 'a' .. 'f' | 'A' .. 'F' => parse(UnicodeCodePointEscape, i + 1, d + 1)\n        | '}' =>\n          let x = ref(0)\n          for remaining in d downto 1 {\n            let ix = i - remaining\n            x := x.contents * 16 + hexValue(String.unsafe_get(s, ix))\n          }\n          let c = x.contents\n          if Res_utf8.isValidCodePoint(c) {\n            let codePoint = Res_utf8.encodeCodePoint(x.contents)\n            Buffer.add_string(b, codePoint)\n            parse(Start, i + 1, 0)\n          } else {\n            false\n          }\n        | _ => false\n        }\n      | EscapedLineBreak =>\n        switch c {\n        | ' ' | '\\t' => parse(EscapedLineBreak, i + 1, d)\n        | c =>\n          Buffer.add_char(b, c)\n          parse(Start, i + 1, d)\n        }\n      }\n    }\n\n  if parse(Start, 0, 0) {\n    Buffer.contents(b)\n  } else {\n    s\n  }\n}\n\nlet rec parseLident = p => {\n  let recoverLident = p =>\n    if Token.isKeyword(p.Parser.token) && p.Parser.prevEndPos.pos_lnum === p.startPos.pos_lnum {\n      Parser.err(p, Diagnostics.lident(p.Parser.token))\n      Parser.next(p)\n      None\n    } else {\n      let rec loop = p =>\n        if !Recover.shouldAbortListParse(p) {\n          Parser.next(p)\n          loop(p)\n        }\n\n      Parser.err(p, Diagnostics.lident(p.Parser.token))\n      Parser.next(p)\n      loop(p)\n      switch p.Parser.token {\n      | Lident(_) => Some()\n      | _ => None\n      }\n    }\n\n  let startPos = p.Parser.startPos\n  switch p.Parser.token {\n  | Lident(ident) =>\n    Parser.next(p)\n    let loc = mkLoc(startPos, p.prevEndPos)\n    (ident, loc)\n  | _ =>\n    switch recoverLident(p) {\n    | Some() => parseLident(p)\n    | None => (\"_\", mkLoc(startPos, p.prevEndPos))\n    }\n  }\n}\n\nlet parseIdent = (~msg, ~startPos, p) =>\n  switch p.Parser.token {\n  | Lident(ident)\n  | Uident(ident) =>\n    Parser.next(p)\n    let loc = mkLoc(startPos, p.prevEndPos)\n    (ident, loc)\n  | token if Token.isKeyword(token) && p.prevEndPos.pos_lnum === p.startPos.pos_lnum =>\n    let tokenTxt = Token.toString(token)\n    let msg =\n      \"`\" ++\n      (tokenTxt ++\n      (\"` is a reserved keyword. Keywords need to be escaped: \\\\\\\"\" ++ (tokenTxt ++ \"\\\"\")))\n\n    Parser.err(~startPos, p, Diagnostics.message(msg))\n    Parser.next(p)\n    (tokenTxt, mkLoc(startPos, p.prevEndPos))\n  | _token =>\n    Parser.err(~startPos, p, Diagnostics.message(msg))\n    Parser.next(p)\n    (\"\", mkLoc(startPos, p.prevEndPos))\n  }\n\nlet parseHashIdent = (~startPos, p) => {\n  Parser.expect(Hash, p)\n  switch p.token {\n  | String(text) =>\n    let text = if p.mode == ParseForTypeChecker {\n      parseStringLiteral(text)\n    } else {\n      text\n    }\n    Parser.next(p)\n    (text, mkLoc(startPos, p.prevEndPos))\n  | Int({i, suffix}) =>\n    let () = switch suffix {\n    | Some(_) => Parser.err(p, Diagnostics.message(ErrorMessages.polyVarIntWithSuffix(i)))\n    | None => ()\n    }\n\n    Parser.next(p)\n    (i, mkLoc(startPos, p.prevEndPos))\n  | _ => parseIdent(~startPos, ~msg=ErrorMessages.variantIdent, p)\n  }\n}\n\n/* Ldot (Ldot (Lident \"Foo\", \"Bar\"), \"baz\") */\nlet parseValuePath = p => {\n  let startPos = p.Parser.startPos\n  let rec aux = (p, path) =>\n    switch p.Parser.token {\n    | Lident(ident) => Longident.Ldot(path, ident)\n    | Uident(uident) =>\n      Parser.next(p)\n      if p.Parser.token == Dot {\n        Parser.expect(Dot, p)\n        aux(p, Ldot(path, uident))\n      } else {\n        Parser.err(p, Diagnostics.unexpected(p.Parser.token, p.breadcrumbs))\n        path\n      }\n    | token =>\n      Parser.err(p, Diagnostics.unexpected(token, p.breadcrumbs))\n      Longident.Ldot(path, \"_\")\n    }\n\n  let ident = switch p.Parser.token {\n  | Lident(ident) => Longident.Lident(ident)\n  | Uident(ident) =>\n    Parser.next(p)\n    if p.Parser.token == Dot {\n      Parser.expect(Dot, p)\n      aux(p, Lident(ident))\n    } else {\n      Parser.err(p, Diagnostics.unexpected(p.Parser.token, p.breadcrumbs))\n      Longident.Lident(ident)\n    }\n  | token =>\n    Parser.err(p, Diagnostics.unexpected(token, p.breadcrumbs))\n    Longident.Lident(\"_\")\n  }\n\n  Parser.next(p)\n  Location.mkloc(ident, mkLoc(startPos, p.prevEndPos))\n}\n\nlet parseValuePathAfterDot = p => {\n  let startPos = p.Parser.startPos\n  switch p.Parser.token {\n  | Lident(_)\n  | Uident(_) =>\n    parseValuePath(p)\n  | token =>\n    Parser.err(p, Diagnostics.unexpected(token, p.breadcrumbs))\n    Location.mkloc(Longident.Lident(\"_\"), mkLoc(startPos, p.prevEndPos))\n  }\n}\n\nlet parseValuePathTail = (p, startPos, ident) => {\n  let rec loop = (p, path) =>\n    switch p.Parser.token {\n    | Lident(ident) =>\n      Parser.next(p)\n      Location.mkloc(Longident.Ldot(path, ident), mkLoc(startPos, p.prevEndPos))\n    | Uident(ident) =>\n      Parser.next(p)\n      Parser.expect(Dot, p)\n      loop(p, Longident.Ldot(path, ident))\n    | token =>\n      Parser.err(p, Diagnostics.unexpected(token, p.breadcrumbs))\n      Location.mkloc(Longident.Ldot(path, \"_\"), mkLoc(startPos, p.prevEndPos))\n    }\n\n  loop(p, ident)\n}\n\nlet parseModuleLongIdentTail = (~lowercase, p, startPos, ident) => {\n  let rec loop = (p, acc) =>\n    switch p.Parser.token {\n    | Lident(ident) if lowercase =>\n      Parser.next(p)\n      let lident = Longident.Ldot(acc, ident)\n      Location.mkloc(lident, mkLoc(startPos, p.prevEndPos))\n    | Uident(ident) =>\n      Parser.next(p)\n      let endPos = p.prevEndPos\n      let lident = Longident.Ldot(acc, ident)\n      switch p.Parser.token {\n      | Dot =>\n        Parser.next(p)\n        loop(p, lident)\n      | _ => Location.mkloc(lident, mkLoc(startPos, endPos))\n      }\n    | t =>\n      Parser.err(p, Diagnostics.uident(t))\n      Location.mkloc(Longident.Ldot(acc, \"_\"), mkLoc(startPos, p.prevEndPos))\n    }\n\n  loop(p, ident)\n}\n\n/* Parses module identifiers:\n     Foo\n     Foo.Bar */\nlet parseModuleLongIdent = (~lowercase, p) => {\n  /* Parser.leaveBreadcrumb p Reporting.ModuleLongIdent; */\n  let startPos = p.Parser.startPos\n  let moduleIdent = switch p.Parser.token {\n  | Lident(ident) if lowercase =>\n    let loc = mkLoc(startPos, p.endPos)\n    let lident = Longident.Lident(ident)\n    Parser.next(p)\n    Location.mkloc(lident, loc)\n  | Uident(ident) =>\n    let lident = Longident.Lident(ident)\n    let endPos = p.endPos\n    Parser.next(p)\n    switch p.Parser.token {\n    | Dot =>\n      Parser.next(p)\n      parseModuleLongIdentTail(~lowercase, p, startPos, lident)\n    | _ => Location.mkloc(lident, mkLoc(startPos, endPos))\n    }\n  | t =>\n    Parser.err(p, Diagnostics.uident(t))\n    Location.mkloc(Longident.Lident(\"_\"), mkLoc(startPos, p.prevEndPos))\n  }\n\n  /* Parser.eatBreadcrumb p; */\n  moduleIdent\n}\n\n/* `window.location` or `Math` or `Foo.Bar` */\nlet parseIdentPath = p => {\n  let rec loop = (p, acc) =>\n    switch p.Parser.token {\n    | Uident(ident) | Lident(ident) =>\n      Parser.next(p)\n      let lident = Longident.Ldot(acc, ident)\n      switch p.Parser.token {\n      | Dot =>\n        Parser.next(p)\n        loop(p, lident)\n      | _ => lident\n      }\n    | _t => acc\n    }\n\n  switch p.Parser.token {\n  | Lident(ident) | Uident(ident) =>\n    Parser.next(p)\n    switch p.Parser.token {\n    | Dot =>\n      Parser.next(p)\n      loop(p, Longident.Lident(ident))\n    | _ => Longident.Lident(ident)\n    }\n  | _ => Longident.Lident(\"_\")\n  }\n}\n\nlet verifyJsxOpeningClosingName = (p, nameExpr) => {\n  let closing = switch p.Parser.token {\n  | Lident(lident) =>\n    Parser.next(p)\n    Longident.Lident(lident)\n  | Uident(_) => parseModuleLongIdent(~lowercase=true, p).txt\n  | _ => Longident.Lident(\"\")\n  }\n\n  switch nameExpr.Parsetree.pexp_desc {\n  | Pexp_ident(openingIdent) =>\n    let opening = {\n      let withoutCreateElement =\n        Longident.flatten(openingIdent.txt) |> List.filter(s => s != \"createElement\")\n\n      switch Longident.unflatten(withoutCreateElement) {\n      | Some(li) => li\n      | None => Longident.Lident(\"\")\n      }\n    }\n\n    opening == closing\n  | _ => assert false\n  }\n}\n\nlet string_of_pexp_ident = nameExpr =>\n  switch nameExpr.Parsetree.pexp_desc {\n  | Pexp_ident(openingIdent) =>\n    Longident.flatten(openingIdent.txt)\n    |> List.filter(s => s != \"createElement\")\n    |> String.concat(\".\")\n  | _ => \"\"\n  }\n\n/* open-def ::=\n *   | open module-path\n *   | open! module-path */\nlet parseOpenDescription = (~attrs, p) => {\n  Parser.leaveBreadcrumb(p, Grammar.OpenDescription)\n  let startPos = p.Parser.startPos\n  Parser.expect(Open, p)\n  let override = if Parser.optional(p, Token.Bang) {\n    Asttypes.Override\n  } else {\n    Asttypes.Fresh\n  }\n\n  let modident = parseModuleLongIdent(~lowercase=false, p)\n  let loc = mkLoc(startPos, p.prevEndPos)\n  Parser.eatBreadcrumb(p)\n  Ast_helper.Opn.mk(~loc, ~attrs, ~override, modident)\n}\n\nlet parseTemplateStringLiteral = s => {\n  let len = String.length(s)\n  let b = Buffer.create(len)\n\n  let rec loop = i =>\n    if i < len {\n      let c = String.unsafe_get(s, i)\n      switch c {\n      | '\\\\' as c =>\n        if i + 1 < len {\n          let nextChar = String.unsafe_get(s, i + 1)\n          switch nextChar {\n          | '\\\\' as c =>\n            Buffer.add_char(b, c)\n            loop(i + 2)\n          | '$' as c =>\n            Buffer.add_char(b, c)\n            loop(i + 2)\n          | '`' as c =>\n            Buffer.add_char(b, c)\n            loop(i + 2)\n          | '\\n' | '\\r' =>\n            /* line break */\n            loop(i + 2)\n          | c =>\n            Buffer.add_char(b, '\\\\')\n            Buffer.add_char(b, c)\n            loop(i + 2)\n          }\n        } else {\n          Buffer.add_char(b, c)\n        }\n\n      | c =>\n        Buffer.add_char(b, c)\n        loop(i + 1)\n      }\n    } else {\n      ()\n    }\n\n  loop(0)\n  Buffer.contents(b)\n}\n\n/* constant\t::=\tinteger-literal */\n/* ∣\t float-literal */\n/* ∣\t string-literal */\nlet parseConstant = p => {\n  let isNegative = switch p.Parser.token {\n  | Token.Minus =>\n    Parser.next(p)\n    true\n  | Plus =>\n    Parser.next(p)\n    false\n  | _ => false\n  }\n\n  let constant = switch p.Parser.token {\n  | Int({i, suffix}) =>\n    let intTxt = if isNegative {\n      \"-\" ++ i\n    } else {\n      i\n    }\n    Parsetree.Pconst_integer(intTxt, suffix)\n  | Float({f, suffix}) =>\n    let floatTxt = if isNegative {\n      \"-\" ++ f\n    } else {\n      f\n    }\n    Parsetree.Pconst_float(floatTxt, suffix)\n  | String(s) =>\n    if p.mode == ParseForTypeChecker {\n      Pconst_string(s, Some(\"js\"))\n    } else {\n      Pconst_string(s, None)\n    }\n  | Codepoint({c, original}) =>\n    if p.mode == ParseForTypeChecker {\n      Pconst_char(c)\n    } else {\n      /* Pconst_char char does not have enough information for formatting.\n       * When parsing for the printer, we encode the char contents as a string\n       * with a special prefix. */\n      Pconst_string(original, Some(\"INTERNAL_RES_CHAR_CONTENTS\"))\n    }\n  | token =>\n    Parser.err(p, Diagnostics.unexpected(token, p.breadcrumbs))\n    Pconst_string(\"\", None)\n  }\n\n  Parser.next(p)\n  constant\n}\n\nlet parseTemplateConstant = (~prefix, p: Parser.t) => {\n  /* Arrived at the ` char */\n  let startPos = p.startPos\n  Parser.nextTemplateLiteralToken(p)\n  switch p.token {\n  | TemplateTail(txt) =>\n    Parser.next(p)\n    let txt = if p.mode == ParseForTypeChecker {\n      parseTemplateStringLiteral(txt)\n    } else {\n      txt\n    }\n    Parsetree.Pconst_string(txt, prefix)\n  | _ =>\n    let rec skipTokens = () => {\n      Parser.next(p)\n      switch p.token {\n      | Backtick =>\n        Parser.next(p)\n        ()\n      | _ => skipTokens()\n      }\n    }\n\n    skipTokens()\n    Parser.err(\n      ~startPos,\n      ~endPos=p.prevEndPos,\n      p,\n      Diagnostics.message(ErrorMessages.stringInterpolationInPattern),\n    )\n    Pconst_string(\"\", None)\n  }\n}\n\nlet parseCommaDelimitedRegion = (p, ~grammar, ~closing, ~f) => {\n  Parser.leaveBreadcrumb(p, grammar)\n  let rec loop = nodes =>\n    switch f(p) {\n    | Some(node) =>\n      switch p.Parser.token {\n      | Comma =>\n        Parser.next(p)\n        loop(list{node, ...nodes})\n      | token if token == closing || token == Eof => List.rev(list{node, ...nodes})\n      | _ if Grammar.isListElement(grammar, p.token) =>\n        /* missing comma between nodes in the region and the current token\n         * looks like the start of something valid in the current region.\n         * Example:\n         *   type student<'extraInfo> = {\n         *     name: string,\n         *     age: int\n         *     otherInfo: 'extraInfo\n         *   }\n         * There is a missing comma between `int` and `otherInfo`.\n         * `otherInfo` looks like a valid start of the record declaration.\n         * We report the error here and then continue parsing the region.\n         */\n        Parser.expect(Comma, p)\n        loop(list{node, ...nodes})\n      | _ =>\n        if !(p.token == Eof || (p.token == closing || Recover.shouldAbortListParse(p))) {\n          Parser.expect(Comma, p)\n        }\n        if p.token == Semicolon {\n          Parser.next(p)\n        }\n        loop(list{node, ...nodes})\n      }\n    | None =>\n      if p.token == Eof || (p.token == closing || Recover.shouldAbortListParse(p)) {\n        List.rev(nodes)\n      } else {\n        Parser.err(p, Diagnostics.unexpected(p.token, p.breadcrumbs))\n        Parser.next(p)\n        loop(nodes)\n      }\n    }\n\n  let nodes = loop(list{})\n  Parser.eatBreadcrumb(p)\n  nodes\n}\n\nlet parseCommaDelimitedReversedList = (p, ~grammar, ~closing, ~f) => {\n  Parser.leaveBreadcrumb(p, grammar)\n  let rec loop = nodes =>\n    switch f(p) {\n    | Some(node) =>\n      switch p.Parser.token {\n      | Comma =>\n        Parser.next(p)\n        loop(list{node, ...nodes})\n      | token if token == closing || token == Eof => list{node, ...nodes}\n      | _ if Grammar.isListElement(grammar, p.token) =>\n        /* missing comma between nodes in the region and the current token\n         * looks like the start of something valid in the current region.\n         * Example:\n         *   type student<'extraInfo> = {\n         *     name: string,\n         *     age: int\n         *     otherInfo: 'extraInfo\n         *   }\n         * There is a missing comma between `int` and `otherInfo`.\n         * `otherInfo` looks like a valid start of the record declaration.\n         * We report the error here and then continue parsing the region.\n         */\n        Parser.expect(Comma, p)\n        loop(list{node, ...nodes})\n      | _ =>\n        if !(p.token == Eof || (p.token == closing || Recover.shouldAbortListParse(p))) {\n          Parser.expect(Comma, p)\n        }\n        if p.token == Semicolon {\n          Parser.next(p)\n        }\n        loop(list{node, ...nodes})\n      }\n    | None =>\n      if p.token == Eof || (p.token == closing || Recover.shouldAbortListParse(p)) {\n        nodes\n      } else {\n        Parser.err(p, Diagnostics.unexpected(p.token, p.breadcrumbs))\n        Parser.next(p)\n        loop(nodes)\n      }\n    }\n\n  let nodes = loop(list{})\n  Parser.eatBreadcrumb(p)\n  nodes\n}\n\nlet parseDelimitedRegion = (p, ~grammar, ~closing, ~f) => {\n  Parser.leaveBreadcrumb(p, grammar)\n  let rec loop = nodes =>\n    switch f(p) {\n    | Some(node) => loop(list{node, ...nodes})\n    | None =>\n      if p.Parser.token == Token.Eof || (p.token == closing || Recover.shouldAbortListParse(p)) {\n        List.rev(nodes)\n      } else {\n        Parser.err(p, Diagnostics.unexpected(p.token, p.breadcrumbs))\n        Parser.next(p)\n        loop(nodes)\n      }\n    }\n\n  let nodes = loop(list{})\n  Parser.eatBreadcrumb(p)\n  nodes\n}\n\nlet parseRegion = (p, ~grammar, ~f) => {\n  Parser.leaveBreadcrumb(p, grammar)\n  let rec loop = nodes =>\n    switch f(p) {\n    | Some(node) => loop(list{node, ...nodes})\n    | None =>\n      if p.Parser.token == Token.Eof || Recover.shouldAbortListParse(p) {\n        List.rev(nodes)\n      } else {\n        Parser.err(p, Diagnostics.unexpected(p.token, p.breadcrumbs))\n        Parser.next(p)\n        loop(nodes)\n      }\n    }\n\n  let nodes = loop(list{})\n  Parser.eatBreadcrumb(p)\n  nodes\n}\n\n/* let-binding\t::=\tpattern =  expr */\n/* ∣\t value-name  { parameter }  [: typexpr]  [:> typexpr] =  expr */\n/* ∣\t value-name :  poly-typexpr =  expr */\n\n/* pattern\t::=\tvalue-name */\n/* ∣\t _ */\n/* ∣\t constant */\n/* ∣\t pattern as  value-name */\n/* ∣\t ( pattern ) */\n/* ∣\t ( pattern :  typexpr ) */\n/* ∣\t pattern |  pattern */\n/* ∣\t constr  pattern */\n/* ∣\t #variant variant-pattern */\n/* ∣\t #...type */\n/* ∣\t / pattern  { , pattern }+  / */\n/* ∣\t { field  [: typexpr]  [= pattern] { ; field  [: typexpr]  [= pattern] }  [; _ ] [ ; ] } */\n/* ∣\t [ pattern  { ; pattern }  [ ; ] ] */\n/* ∣\t pattern ::  pattern */\n/* ∣\t [| pattern  { ; pattern }  [ ; ] |] */\n/* ∣\t char-literal ..  char-literal */\n/* \t∣\t exception pattern */\nlet rec parsePattern = (~alias=true, ~or_=true, p) => {\n  let startPos = p.Parser.startPos\n  let attrs = parseAttributes(p)\n  let pat = switch p.Parser.token {\n  | (True | False) as token =>\n    let endPos = p.endPos\n    Parser.next(p)\n    let loc = mkLoc(startPos, endPos)\n    Ast_helper.Pat.construct(\n      ~loc,\n      Location.mkloc(Longident.Lident(Token.toString(token)), loc),\n      None,\n    )\n  | Int(_) | String(_) | Float(_) | Codepoint(_) | Minus | Plus =>\n    let c = parseConstant(p)\n    switch p.token {\n    | DotDot =>\n      Parser.next(p)\n      let c2 = parseConstant(p)\n      Ast_helper.Pat.interval(~loc=mkLoc(startPos, p.prevEndPos), c, c2)\n    | _ => Ast_helper.Pat.constant(~loc=mkLoc(startPos, p.prevEndPos), c)\n    }\n  | Backtick =>\n    let constant = parseTemplateConstant(~prefix=Some(\"js\"), p)\n    Ast_helper.Pat.constant(\n      ~attrs=list{templateLiteralAttr},\n      ~loc=mkLoc(startPos, p.prevEndPos),\n      constant,\n    )\n  | Lparen =>\n    Parser.next(p)\n    switch p.token {\n    | Rparen =>\n      Parser.next(p)\n      let loc = mkLoc(startPos, p.prevEndPos)\n      let lid = Location.mkloc(Longident.Lident(\"()\"), loc)\n      Ast_helper.Pat.construct(~loc, lid, None)\n    | _ =>\n      let pat = parseConstrainedPattern(p)\n      switch p.token {\n      | Comma =>\n        Parser.next(p)\n        parseTuplePattern(~attrs, ~first=pat, ~startPos, p)\n      | _ =>\n        Parser.expect(Rparen, p)\n        let loc = mkLoc(startPos, p.prevEndPos)\n        {...pat, ppat_loc: loc}\n      }\n    }\n  | Lbracket => parseArrayPattern(~attrs, p)\n  | Lbrace => parseRecordPattern(~attrs, p)\n  | Underscore =>\n    let endPos = p.endPos\n    let loc = mkLoc(startPos, endPos)\n    Parser.next(p)\n    Ast_helper.Pat.any(~loc, ~attrs, ())\n  | Lident(ident) =>\n    let endPos = p.endPos\n    let loc = mkLoc(startPos, endPos)\n    Parser.next(p)\n    switch p.token {\n    | Backtick =>\n      let constant = parseTemplateConstant(~prefix=Some(ident), p)\n      Ast_helper.Pat.constant(~loc=mkLoc(startPos, p.prevEndPos), constant)\n    | _ => Ast_helper.Pat.var(~loc, ~attrs, Location.mkloc(ident, loc))\n    }\n  | Uident(_) =>\n    let constr = parseModuleLongIdent(~lowercase=false, p)\n    switch p.Parser.token {\n    | Lparen => parseConstructorPatternArgs(p, constr, startPos, attrs)\n    | _ => Ast_helper.Pat.construct(~loc=constr.loc, ~attrs, constr, None)\n    }\n  | Hash =>\n    Parser.next(p)\n    if p.Parser.token === DotDotDot {\n      Parser.next(p)\n      let ident = parseValuePath(p)\n      let loc = mkLoc(startPos, ident.loc.loc_end)\n      Ast_helper.Pat.type_(~loc, ~attrs, ident)\n    } else {\n      let (ident, loc) = switch p.token {\n      | String(text) =>\n        let text = if p.mode == ParseForTypeChecker {\n          parseStringLiteral(text)\n        } else {\n          text\n        }\n        Parser.next(p)\n        (text, mkLoc(startPos, p.prevEndPos))\n      | Int({i, suffix}) =>\n        let () = switch suffix {\n        | Some(_) => Parser.err(p, Diagnostics.message(ErrorMessages.polyVarIntWithSuffix(i)))\n        | None => ()\n        }\n\n        Parser.next(p)\n        (i, mkLoc(startPos, p.prevEndPos))\n      | _ => parseIdent(~msg=ErrorMessages.variantIdent, ~startPos, p)\n      }\n\n      switch p.Parser.token {\n      | Lparen => parseVariantPatternArgs(p, ident, startPos, attrs)\n      | _ => Ast_helper.Pat.variant(~loc, ~attrs, ident, None)\n      }\n    }\n  | Exception =>\n    Parser.next(p)\n    let pat = parsePattern(~alias=false, ~or_=false, p)\n    let loc = mkLoc(startPos, p.prevEndPos)\n    Ast_helper.Pat.exception_(~loc, ~attrs, pat)\n  | Lazy =>\n    Parser.next(p)\n    let pat = parsePattern(~alias=false, ~or_=false, p)\n    let loc = mkLoc(startPos, p.prevEndPos)\n    Ast_helper.Pat.lazy_(~loc, ~attrs, pat)\n  | List =>\n    Parser.next(p)\n    parseListPattern(~startPos, ~attrs, p)\n  | Module => parseModulePattern(~attrs, p)\n  | Percent =>\n    let extension = parseExtension(p)\n    let loc = mkLoc(startPos, p.prevEndPos)\n    Ast_helper.Pat.extension(~loc, ~attrs, extension)\n  | token =>\n    Parser.err(p, Diagnostics.unexpected(token, p.breadcrumbs))\n    switch skipTokensAndMaybeRetry(p, ~isStartOfGrammar=Grammar.isAtomicPatternStart) {\n    | None => Recover.defaultPattern()\n    | Some() => parsePattern(p)\n    }\n  }\n\n  let pat = if alias {\n    parseAliasPattern(~attrs, pat, p)\n  } else {\n    pat\n  }\n  if or_ {\n    parseOrPattern(pat, p)\n  } else {\n    pat\n  }\n}\n\nand skipTokensAndMaybeRetry = (p, ~isStartOfGrammar) =>\n  if Token.isKeyword(p.Parser.token) && p.Parser.prevEndPos.pos_lnum === p.startPos.pos_lnum {\n    Parser.next(p)\n    None\n  } else if Recover.shouldAbortListParse(p) {\n    if isStartOfGrammar(p.Parser.token) {\n      Parser.next(p)\n      Some()\n    } else {\n      None\n    }\n  } else {\n    Parser.next(p)\n    let rec loop = p =>\n      if !Recover.shouldAbortListParse(p) {\n        Parser.next(p)\n        loop(p)\n      }\n    loop(p)\n    if isStartOfGrammar(p.Parser.token) {\n      Some()\n    } else {\n      None\n    }\n  }\n\n/* alias ::= pattern as lident */\nand parseAliasPattern = (~attrs, pattern, p) =>\n  switch p.Parser.token {\n  | As =>\n    Parser.next(p)\n    let (name, loc) = parseLident(p)\n    let name = Location.mkloc(name, loc)\n    Ast_helper.Pat.alias(~loc={...pattern.ppat_loc, loc_end: p.prevEndPos}, ~attrs, pattern, name)\n  | _ => pattern\n  }\n\n/* or ::= pattern | pattern\n * precedence: Red | Blue | Green is interpreted as (Red | Blue) | Green */\nand parseOrPattern = (pattern1, p) => {\n  let rec loop = pattern1 =>\n    switch p.Parser.token {\n    | Bar =>\n      Parser.next(p)\n      let pattern2 = parsePattern(~or_=false, p)\n      let loc = {\n        ...pattern1.Parsetree.ppat_loc,\n        loc_end: pattern2.ppat_loc.loc_end,\n      }\n      loop(Ast_helper.Pat.or_(~loc, pattern1, pattern2))\n    | _ => pattern1\n    }\n\n  loop(pattern1)\n}\n\nand parseNonSpreadPattern = (~msg, p) => {\n  let () = switch p.Parser.token {\n  | DotDotDot =>\n    Parser.err(p, Diagnostics.message(msg))\n    Parser.next(p)\n  | _ => ()\n  }\n\n  switch p.Parser.token {\n  | token if Grammar.isPatternStart(token) =>\n    let pat = parsePattern(p)\n    switch p.Parser.token {\n    | Colon =>\n      Parser.next(p)\n      let typ = parseTypExpr(p)\n      let loc = mkLoc(pat.ppat_loc.loc_start, typ.Parsetree.ptyp_loc.loc_end)\n      Some(Ast_helper.Pat.constraint_(~loc, pat, typ))\n    | _ => Some(pat)\n    }\n  | _ => None\n  }\n}\n\nand parseConstrainedPattern = p => {\n  let pat = parsePattern(p)\n  switch p.Parser.token {\n  | Colon =>\n    Parser.next(p)\n    let typ = parseTypExpr(p)\n    let loc = mkLoc(pat.ppat_loc.loc_start, typ.Parsetree.ptyp_loc.loc_end)\n    Ast_helper.Pat.constraint_(~loc, pat, typ)\n  | _ => pat\n  }\n}\n\nand parseConstrainedPatternRegion = p =>\n  switch p.Parser.token {\n  | token if Grammar.isPatternStart(token) => Some(parseConstrainedPattern(p))\n  | _ => None\n  }\n\n/* field ::=\n *   | longident\n *   | longident : pattern\n *   | longident as lident\n *\n *  row ::=\n *\t | field ,\n *\t | field , _\n *\t | field , _,\n */\nand parseRecordPatternField = p => {\n  let label = parseValuePath(p)\n  let pattern = switch p.Parser.token {\n  | Colon =>\n    Parser.next(p)\n    parsePattern(p)\n  | _ => Ast_helper.Pat.var(~loc=label.loc, Location.mkloc(Longident.last(label.txt), label.loc))\n  }\n\n  (label, pattern)\n}\n\n/* TODO: there are better representations than PatField|Underscore ? */\nand parseRecordPatternItem = p =>\n  switch p.Parser.token {\n  | DotDotDot =>\n    Parser.next(p)\n    Some(true, PatField(parseRecordPatternField(p)))\n  | Uident(_) | Lident(_) => Some(false, PatField(parseRecordPatternField(p)))\n  | Underscore =>\n    Parser.next(p)\n    Some(false, PatUnderscore)\n  | _ => None\n  }\n\nand parseRecordPattern = (~attrs, p) => {\n  let startPos = p.startPos\n  Parser.expect(Lbrace, p)\n  let rawFields = parseCommaDelimitedReversedList(\n    p,\n    ~grammar=PatternRecord,\n    ~closing=Rbrace,\n    ~f=parseRecordPatternItem,\n  )\n\n  Parser.expect(Rbrace, p)\n  let (fields, closedFlag) = {\n    let (rawFields, flag) = switch rawFields {\n    | list{(_hasSpread, PatUnderscore), ...rest} => (rest, Asttypes.Open)\n    | rawFields => (rawFields, Asttypes.Closed)\n    }\n\n    List.fold_left(((fields, flag), curr) => {\n      let (hasSpread, field) = curr\n      switch field {\n      | PatField(field) =>\n        if hasSpread {\n          let (_, pattern) = field\n          Parser.err(\n            ~startPos=pattern.Parsetree.ppat_loc.loc_start,\n            p,\n            Diagnostics.message(ErrorMessages.recordPatternSpread),\n          )\n        }\n        (list{field, ...fields}, flag)\n      | PatUnderscore => (fields, flag)\n      }\n    }, (list{}, flag), rawFields)\n  }\n\n  let loc = mkLoc(startPos, p.prevEndPos)\n  Ast_helper.Pat.record(~loc, ~attrs, fields, closedFlag)\n}\n\nand parseTuplePattern = (~attrs, ~first, ~startPos, p) => {\n  let patterns = list{\n    first,\n    ...parseCommaDelimitedRegion(\n      p,\n      ~grammar=Grammar.PatternList,\n      ~closing=Rparen,\n      ~f=parseConstrainedPatternRegion,\n    ),\n  }\n\n  Parser.expect(Rparen, p)\n  let () = switch patterns {\n  | list{_} =>\n    Parser.err(\n      ~startPos,\n      ~endPos=p.prevEndPos,\n      p,\n      Diagnostics.message(ErrorMessages.tupleSingleElement),\n    )\n  | _ => ()\n  }\n\n  let loc = mkLoc(startPos, p.prevEndPos)\n  Ast_helper.Pat.tuple(~loc, ~attrs, patterns)\n}\n\nand parsePatternRegion = p =>\n  switch p.Parser.token {\n  | DotDotDot =>\n    Parser.next(p)\n    Some(true, parseConstrainedPattern(p))\n  | token if Grammar.isPatternStart(token) => Some(false, parseConstrainedPattern(p))\n  | _ => None\n  }\n\nand parseModulePattern = (~attrs, p) => {\n  let startPos = p.Parser.startPos\n  Parser.expect(Module, p)\n  Parser.expect(Lparen, p)\n  let uident = switch p.token {\n  | Uident(uident) =>\n    let loc = mkLoc(p.startPos, p.endPos)\n    Parser.next(p)\n    Location.mkloc(uident, loc)\n  | _ =>\n    /* TODO: error recovery */\n    Location.mknoloc(\"_\")\n  }\n\n  switch p.token {\n  | Colon =>\n    let colonStart = p.Parser.startPos\n    Parser.next(p)\n    let packageTypAttrs = parseAttributes(p)\n    let packageType = parsePackageType(~startPos=colonStart, ~attrs=packageTypAttrs, p)\n    Parser.expect(Rparen, p)\n    let loc = mkLoc(startPos, p.prevEndPos)\n    let unpack = Ast_helper.Pat.unpack(~loc=uident.loc, uident)\n    Ast_helper.Pat.constraint_(~loc, ~attrs, unpack, packageType)\n  | _ =>\n    Parser.expect(Rparen, p)\n    let loc = mkLoc(startPos, p.prevEndPos)\n    Ast_helper.Pat.unpack(~loc, ~attrs, uident)\n  }\n}\n\nand parseListPattern = (~startPos, ~attrs, p) => {\n  let listPatterns = parseCommaDelimitedReversedList(\n    p,\n    ~grammar=Grammar.PatternOcamlList,\n    ~closing=Rbrace,\n    ~f=parsePatternRegion,\n  )\n\n  Parser.expect(Rbrace, p)\n  let loc = mkLoc(startPos, p.prevEndPos)\n  let filterSpread = ((hasSpread, pattern)) =>\n    if hasSpread {\n      Parser.err(\n        ~startPos=pattern.Parsetree.ppat_loc.loc_start,\n        p,\n        Diagnostics.message(ErrorMessages.listPatternSpread),\n      )\n      pattern\n    } else {\n      pattern\n    }\n\n  switch listPatterns {\n  | list{(true, pattern), ...patterns} =>\n    let patterns = patterns |> List.map(filterSpread) |> List.rev\n    let pat = makeListPattern(loc, patterns, Some(pattern))\n    {...pat, ppat_loc: loc, ppat_attributes: attrs}\n  | patterns =>\n    let patterns = patterns |> List.map(filterSpread) |> List.rev\n    let pat = makeListPattern(loc, patterns, None)\n    {...pat, ppat_loc: loc, ppat_attributes: attrs}\n  }\n}\n\nand parseArrayPattern = (~attrs, p) => {\n  let startPos = p.startPos\n  Parser.expect(Lbracket, p)\n  let patterns = parseCommaDelimitedRegion(\n    p,\n    ~grammar=Grammar.PatternList,\n    ~closing=Rbracket,\n    ~f=parseNonSpreadPattern(~msg=ErrorMessages.arrayPatternSpread),\n  )\n\n  Parser.expect(Rbracket, p)\n  let loc = mkLoc(startPos, p.prevEndPos)\n  Ast_helper.Pat.array(~loc, ~attrs, patterns)\n}\n\nand parseConstructorPatternArgs = (p, constr, startPos, attrs) => {\n  let lparen = p.startPos\n  Parser.expect(Lparen, p)\n  let args = parseCommaDelimitedRegion(\n    p,\n    ~grammar=Grammar.PatternList,\n    ~closing=Rparen,\n    ~f=parseConstrainedPatternRegion,\n  )\n\n  Parser.expect(Rparen, p)\n  let args = switch args {\n  | list{} =>\n    let loc = mkLoc(lparen, p.prevEndPos)\n    Some(Ast_helper.Pat.construct(~loc, Location.mkloc(Longident.Lident(\"()\"), loc), None))\n  | list{{ppat_desc: Ppat_tuple(_)} as pat} as patterns =>\n    if p.mode == ParseForTypeChecker {\n      /* Some(1, 2) for type-checker */\n      Some(pat)\n    } else {\n      /* Some((1, 2)) for printer */\n      Some(Ast_helper.Pat.tuple(~loc=mkLoc(lparen, p.endPos), patterns))\n    }\n  | list{pattern} => Some(pattern)\n  | patterns => Some(Ast_helper.Pat.tuple(~loc=mkLoc(lparen, p.endPos), patterns))\n  }\n\n  Ast_helper.Pat.construct(~loc=mkLoc(startPos, p.prevEndPos), ~attrs, constr, args)\n}\n\nand parseVariantPatternArgs = (p, ident, startPos, attrs) => {\n  let lparen = p.startPos\n  Parser.expect(Lparen, p)\n  let patterns = parseCommaDelimitedRegion(\n    p,\n    ~grammar=Grammar.PatternList,\n    ~closing=Rparen,\n    ~f=parseConstrainedPatternRegion,\n  )\n  let args = switch patterns {\n  | list{} =>\n    let loc = mkLoc(lparen, p.prevEndPos)\n    Some(Ast_helper.Pat.construct(~loc, Location.mkloc(Longident.Lident(\"()\"), loc), None))\n  | list{{ppat_desc: Ppat_tuple(_)} as pat} as patterns =>\n    if p.mode == ParseForTypeChecker {\n      /* #ident(1, 2) for type-checker */\n      Some(pat)\n    } else {\n      /* #ident((1, 2)) for printer */\n      Some(Ast_helper.Pat.tuple(~loc=mkLoc(lparen, p.endPos), patterns))\n    }\n  | list{pattern} => Some(pattern)\n  | patterns => Some(Ast_helper.Pat.tuple(~loc=mkLoc(lparen, p.endPos), patterns))\n  }\n\n  Parser.expect(Rparen, p)\n  Ast_helper.Pat.variant(~loc=mkLoc(startPos, p.prevEndPos), ~attrs, ident, args)\n}\n\nand parseExpr = (~context=OrdinaryExpr, p) => {\n  let expr = parseOperandExpr(~context, p)\n  let expr = parseBinaryExpr(~context, ~a=expr, p, 1)\n  parseTernaryExpr(expr, p)\n}\n\n/* expr ? expr : expr */\nand parseTernaryExpr = (leftOperand, p) =>\n  switch p.Parser.token {\n  | Question =>\n    Parser.leaveBreadcrumb(p, Grammar.Ternary)\n    Parser.next(p)\n    let trueBranch = parseExpr(~context=TernaryTrueBranchExpr, p)\n    Parser.expect(Colon, p)\n    let falseBranch = parseExpr(p)\n    Parser.eatBreadcrumb(p)\n    let loc = {\n      ...leftOperand.Parsetree.pexp_loc,\n      loc_start: leftOperand.pexp_loc.loc_start,\n      loc_end: falseBranch.Parsetree.pexp_loc.loc_end,\n    }\n    Ast_helper.Exp.ifthenelse(\n      ~attrs=list{ternaryAttr},\n      ~loc,\n      leftOperand,\n      trueBranch,\n      Some(falseBranch),\n    )\n  | _ => leftOperand\n  }\n\nand parseEs6ArrowExpression = (~context=?, ~parameters=?, p) => {\n  let startPos = p.Parser.startPos\n  Parser.leaveBreadcrumb(p, Grammar.Es6ArrowExpr)\n  let parameters = switch parameters {\n  | Some(params) => params\n  | None => parseParameters(p)\n  }\n\n  let returnType = switch p.Parser.token {\n  | Colon =>\n    Parser.next(p)\n    Some(parseTypExpr(~es6Arrow=false, p))\n  | _ => None\n  }\n\n  Parser.expect(EqualGreater, p)\n  let body = {\n    let expr = parseExpr(~context?, p)\n    switch returnType {\n    | Some(typ) =>\n      Ast_helper.Exp.constraint_(\n        ~loc=mkLoc(expr.pexp_loc.loc_start, typ.Parsetree.ptyp_loc.loc_end),\n        expr,\n        typ,\n      )\n    | None => expr\n    }\n  }\n\n  Parser.eatBreadcrumb(p)\n  let endPos = p.prevEndPos\n  let arrowExpr = List.fold_right((parameter, expr) =>\n    switch parameter {\n    | TermParameter({uncurried, attrs, label: lbl, expr: defaultExpr, pat, pos: startPos}) =>\n      let attrs = if uncurried {\n        list{uncurryAttr, ...attrs}\n      } else {\n        attrs\n      }\n      Ast_helper.Exp.fun_(~loc=mkLoc(startPos, endPos), ~attrs, lbl, defaultExpr, pat, expr)\n    | TypeParameter({uncurried, attrs, locs: newtypes, pos: startPos}) =>\n      let attrs = if uncurried {\n        list{uncurryAttr, ...attrs}\n      } else {\n        attrs\n      }\n      makeNewtypes(~attrs, ~loc=mkLoc(startPos, endPos), newtypes, expr)\n    }\n  , parameters, body)\n\n  {...arrowExpr, pexp_loc: {...arrowExpr.pexp_loc, loc_start: startPos}}\n}\n\n/*\n * uncurried_parameter ::=\n *   | . parameter\n *\n * parameter ::=\n *   | pattern\n *   | pattern : type\n *   | ~ labelName\n *   | ~ labelName as pattern\n *   | ~ labelName as pattern : type\n *   | ~ labelName = expr\n *   | ~ labelName as pattern = expr\n *   | ~ labelName as pattern : type = expr\n *   | ~ labelName = ?\n *   | ~ labelName as pattern = ?\n *   | ~ labelName as pattern : type = ?\n *\n * labelName ::= lident\n */\nand parseParameter = p =>\n  if (\n    p.Parser.token == Token.Typ ||\n      (p.token == Tilde ||\n      (p.token == Dot || Grammar.isPatternStart(p.token)))\n  ) {\n    let startPos = p.Parser.startPos\n    let uncurried = Parser.optional(p, Token.Dot)\n    /* two scenarios:\n     *   attrs ~lbl ...\n     *   attrs pattern\n     * Attributes before a labelled arg, indicate that it's on the whole arrow expr\n     * Otherwise it's part of the pattern\n     * */\n    let attrs = parseAttributes(p)\n    if p.Parser.token == Typ {\n      Parser.next(p)\n      let lidents = parseLidentList(p)\n      Some(TypeParameter({uncurried: uncurried, attrs: attrs, locs: lidents, pos: startPos}))\n    } else {\n      let (attrs, lbl, pat) = switch p.Parser.token {\n      | Tilde =>\n        Parser.next(p)\n        let (lblName, loc) = parseLident(p)\n        let propLocAttr = (Location.mkloc(\"ns.namedArgLoc\", loc), Parsetree.PStr(list{}))\n        switch p.Parser.token {\n        | Comma | Equal | Rparen =>\n          let loc = mkLoc(startPos, p.prevEndPos)\n          (\n            attrs,\n            Asttypes.Labelled(lblName),\n            Ast_helper.Pat.var(~attrs=list{propLocAttr}, ~loc, Location.mkloc(lblName, loc)),\n          )\n        | Colon =>\n          let lblEnd = p.prevEndPos\n          Parser.next(p)\n          let typ = parseTypExpr(p)\n          let loc = mkLoc(startPos, lblEnd)\n          let pat = {\n            let pat = Ast_helper.Pat.var(~loc, Location.mkloc(lblName, loc))\n            let loc = mkLoc(startPos, p.prevEndPos)\n            Ast_helper.Pat.constraint_(~attrs=list{propLocAttr}, ~loc, pat, typ)\n          }\n          (attrs, Asttypes.Labelled(lblName), pat)\n        | As =>\n          Parser.next(p)\n          let pat = {\n            let pat = parseConstrainedPattern(p)\n            {...pat, ppat_attributes: list{propLocAttr, ...pat.ppat_attributes}}\n          }\n\n          (attrs, Asttypes.Labelled(lblName), pat)\n        | t =>\n          Parser.err(p, Diagnostics.unexpected(t, p.breadcrumbs))\n          let loc = mkLoc(startPos, p.prevEndPos)\n          (\n            attrs,\n            Asttypes.Labelled(lblName),\n            Ast_helper.Pat.var(~loc, Location.mkloc(lblName, loc)),\n          )\n        }\n      | _ =>\n        let pattern = parseConstrainedPattern(p)\n        let attrs = List.concat(list{attrs, pattern.ppat_attributes})\n        (list{}, Asttypes.Nolabel, {...pattern, ppat_attributes: attrs})\n      }\n\n      switch p.Parser.token {\n      | Equal =>\n        Parser.next(p)\n        let lbl = switch lbl {\n        | Asttypes.Labelled(lblName) => Asttypes.Optional(lblName)\n        | Asttypes.Nolabel =>\n          let lblName = switch pat.ppat_desc {\n          | Ppat_var(var) => var.txt\n          | _ => \"\"\n          }\n          Parser.err(\n            ~startPos,\n            ~endPos=p.prevEndPos,\n            p,\n            Diagnostics.message(ErrorMessages.missingTildeLabeledParameter(lblName)),\n          )\n          Asttypes.Optional(lblName)\n        | lbl => lbl\n        }\n\n        switch p.Parser.token {\n        | Question =>\n          Parser.next(p)\n          Some(\n            TermParameter({\n              uncurried: uncurried,\n              attrs: attrs,\n              label: lbl,\n              expr: None,\n              pat: pat,\n              pos: startPos,\n            }),\n          )\n        | _ =>\n          let expr = parseConstrainedOrCoercedExpr(p)\n          Some(\n            TermParameter({\n              uncurried: uncurried,\n              attrs: attrs,\n              label: lbl,\n              expr: Some(expr),\n              pat: pat,\n              pos: startPos,\n            }),\n          )\n        }\n      | _ =>\n        Some(\n          TermParameter({\n            uncurried: uncurried,\n            attrs: attrs,\n            label: lbl,\n            expr: None,\n            pat: pat,\n            pos: startPos,\n          }),\n        )\n      }\n    }\n  } else {\n    None\n  }\n\nand parseParameterList = p => {\n  let parameters = parseCommaDelimitedRegion(\n    ~grammar=Grammar.ParameterList,\n    ~f=parseParameter,\n    ~closing=Rparen,\n    p,\n  )\n\n  Parser.expect(Rparen, p)\n  parameters\n}\n\n/* parameters ::=\n *   | _\n *   | lident\n *   | ()\n *   | (.)\n *   | ( parameter {, parameter} [,] )\n */\nand parseParameters = p => {\n  let startPos = p.Parser.startPos\n  switch p.Parser.token {\n  | Lident(ident) =>\n    Parser.next(p)\n    let loc = mkLoc(startPos, p.Parser.prevEndPos)\n    list{\n      TermParameter({\n        uncurried: false,\n        attrs: list{},\n        label: Asttypes.Nolabel,\n        expr: None,\n        pat: Ast_helper.Pat.var(~loc, Location.mkloc(ident, loc)),\n        pos: startPos,\n      }),\n    }\n  | Underscore =>\n    Parser.next(p)\n    let loc = mkLoc(startPos, p.Parser.prevEndPos)\n    list{\n      TermParameter({\n        uncurried: false,\n        attrs: list{},\n        label: Asttypes.Nolabel,\n        expr: None,\n        pat: Ast_helper.Pat.any(~loc, ()),\n        pos: startPos,\n      }),\n    }\n  | Lparen =>\n    Parser.next(p)\n    switch p.Parser.token {\n    | Rparen =>\n      Parser.next(p)\n      let loc = mkLoc(startPos, p.Parser.prevEndPos)\n      let unitPattern = Ast_helper.Pat.construct(\n        ~loc,\n        Location.mkloc(Longident.Lident(\"()\"), loc),\n        None,\n      )\n\n      list{\n        TermParameter({\n          uncurried: false,\n          attrs: list{},\n          label: Asttypes.Nolabel,\n          expr: None,\n          pat: unitPattern,\n          pos: startPos,\n        }),\n      }\n    | Dot =>\n      Parser.next(p)\n      switch p.token {\n      | Rparen =>\n        Parser.next(p)\n        let loc = mkLoc(startPos, p.Parser.prevEndPos)\n        let unitPattern = Ast_helper.Pat.construct(\n          ~loc,\n          Location.mkloc(Longident.Lident(\"()\"), loc),\n          None,\n        )\n\n        list{\n          TermParameter({\n            uncurried: true,\n            attrs: list{},\n            label: Asttypes.Nolabel,\n            expr: None,\n            pat: unitPattern,\n            pos: startPos,\n          }),\n        }\n      | _ =>\n        switch parseParameterList(p) {\n        | list{\n            TermParameter({attrs, label: lbl, expr: defaultExpr, pat: pattern, pos: startPos}),\n            ...rest,\n          } => list{\n            TermParameter({\n              uncurried: true,\n              attrs: attrs,\n              label: lbl,\n              expr: defaultExpr,\n              pat: pattern,\n              pos: startPos,\n            }),\n            ...rest,\n          }\n        | parameters => parameters\n        }\n      }\n    | _ => parseParameterList(p)\n    }\n  | token =>\n    Parser.err(p, Diagnostics.unexpected(token, p.breadcrumbs))\n    list{}\n  }\n}\n\nand parseCoercedExpr = (~expr: Parsetree.expression, p) => {\n  Parser.expect(ColonGreaterThan, p)\n  let typ = parseTypExpr(p)\n  let loc = mkLoc(expr.pexp_loc.loc_start, p.prevEndPos)\n  Ast_helper.Exp.coerce(~loc, expr, None, typ)\n}\n\nand parseConstrainedOrCoercedExpr = p => {\n  let expr = parseExpr(p)\n  switch p.Parser.token {\n  | ColonGreaterThan => parseCoercedExpr(~expr, p)\n  | Colon =>\n    Parser.next(p)\n    switch p.token {\n    | _ =>\n      let typ = parseTypExpr(p)\n      let loc = mkLoc(expr.pexp_loc.loc_start, typ.ptyp_loc.loc_end)\n      let expr = Ast_helper.Exp.constraint_(~loc, expr, typ)\n      switch p.token {\n      | ColonGreaterThan => parseCoercedExpr(~expr, p)\n      | _ => expr\n      }\n    }\n  | _ => expr\n  }\n}\n\nand parseConstrainedExprRegion = p =>\n  switch p.Parser.token {\n  | token if Grammar.isExprStart(token) =>\n    let expr = parseExpr(p)\n    switch p.Parser.token {\n    | Colon =>\n      Parser.next(p)\n      let typ = parseTypExpr(p)\n      let loc = mkLoc(expr.pexp_loc.loc_start, typ.ptyp_loc.loc_end)\n      Some(Ast_helper.Exp.constraint_(~loc, expr, typ))\n    | _ => Some(expr)\n    }\n  | _ => None\n  }\n\n/* Atomic expressions represent unambiguous expressions.\n * This means that regardless of the context, these expressions\n * are always interpreted correctly. */\nand parseAtomicExpr = p => {\n  Parser.leaveBreadcrumb(p, Grammar.ExprOperand)\n  let startPos = p.Parser.startPos\n  let expr = switch p.Parser.token {\n  | (True | False) as token =>\n    Parser.next(p)\n    let loc = mkLoc(startPos, p.prevEndPos)\n    Ast_helper.Exp.construct(\n      ~loc,\n      Location.mkloc(Longident.Lident(Token.toString(token)), loc),\n      None,\n    )\n  | Int(_) | String(_) | Float(_) | Codepoint(_) =>\n    let c = parseConstant(p)\n    let loc = mkLoc(startPos, p.prevEndPos)\n    Ast_helper.Exp.constant(~loc, c)\n  | Backtick =>\n    let expr = parseTemplateExpr(p)\n    {...expr, pexp_loc: mkLoc(startPos, p.prevEndPos)}\n  | Uident(_) | Lident(_) => parseValueOrConstructor(p)\n  | Hash => parsePolyVariantExpr(p)\n  | Lparen =>\n    Parser.next(p)\n    switch p.Parser.token {\n    | Rparen =>\n      Parser.next(p)\n      let loc = mkLoc(startPos, p.prevEndPos)\n      Ast_helper.Exp.construct(~loc, Location.mkloc(Longident.Lident(\"()\"), loc), None)\n    | _t =>\n      let expr = parseConstrainedOrCoercedExpr(p)\n      switch p.token {\n      | Comma =>\n        Parser.next(p)\n        parseTupleExpr(~startPos, ~first=expr, p)\n      | _ =>\n        Parser.expect(Rparen, p)\n        expr\n      /* {expr with pexp_loc = mkLoc startPos p.prevEndPos}\n       * What does this location mean here? It means that when there's\n       * a parenthesized we keep the location here for whitespace interleaving.\n       * Without the closing paren in the location there will always be an extra\n       * line. For now we don't include it, because it does weird things\n       * with for comments. */\n      }\n    }\n  | List =>\n    Parser.next(p)\n    parseListExpr(~startPos, p)\n  | Module =>\n    Parser.next(p)\n    parseFirstClassModuleExpr(~startPos, p)\n  | Lbracket => parseArrayExp(p)\n  | Lbrace => parseBracedOrRecordExpr(p)\n  | LessThan => parseJsx(p)\n  | Percent =>\n    let extension = parseExtension(p)\n    let loc = mkLoc(startPos, p.prevEndPos)\n    Ast_helper.Exp.extension(~loc, extension)\n  | Underscore as token =>\n    /* This case is for error recovery. Not sure if it's the correct place */\n    Parser.err(p, Diagnostics.lident(token))\n    Parser.next(p)\n    Recover.defaultExpr()\n  | token =>\n    let errPos = p.prevEndPos\n    Parser.err(~startPos=errPos, p, Diagnostics.unexpected(token, p.breadcrumbs))\n    switch skipTokensAndMaybeRetry(p, ~isStartOfGrammar=Grammar.isAtomicExprStart) {\n    | None => Recover.defaultExpr()\n    | Some() => parseAtomicExpr(p)\n    }\n  }\n\n  Parser.eatBreadcrumb(p)\n  expr\n}\n\n/* module(module-expr)\n * module(module-expr : package-type) */\nand parseFirstClassModuleExpr = (~startPos, p) => {\n  Parser.expect(Lparen, p)\n\n  let modExpr = parseModuleExpr(p)\n  let modEndLoc = p.prevEndPos\n  switch p.Parser.token {\n  | Colon =>\n    let colonStart = p.Parser.startPos\n    Parser.next(p)\n    let attrs = parseAttributes(p)\n    let packageType = parsePackageType(~startPos=colonStart, ~attrs, p)\n    Parser.expect(Rparen, p)\n    let loc = mkLoc(startPos, modEndLoc)\n    let firstClassModule = Ast_helper.Exp.pack(~loc, modExpr)\n    let loc = mkLoc(startPos, p.prevEndPos)\n    Ast_helper.Exp.constraint_(~loc, firstClassModule, packageType)\n  | _ =>\n    Parser.expect(Rparen, p)\n    let loc = mkLoc(startPos, p.prevEndPos)\n    Ast_helper.Exp.pack(~loc, modExpr)\n  }\n}\n\nand parseBracketAccess = (p, expr, startPos) => {\n  Parser.leaveBreadcrumb(p, Grammar.ExprArrayAccess)\n  let lbracket = p.startPos\n  Parser.next(p)\n  let stringStart = p.startPos\n  switch p.Parser.token {\n  | String(s) =>\n    let s = if p.mode == ParseForTypeChecker {\n      parseStringLiteral(s)\n    } else {\n      s\n    }\n    Parser.next(p)\n    let stringEnd = p.prevEndPos\n    Parser.expect(Rbracket, p)\n    Parser.eatBreadcrumb(p)\n    let rbracket = p.prevEndPos\n    let e = {\n      let identLoc = mkLoc(stringStart, stringEnd)\n      let loc = mkLoc(startPos, rbracket)\n      Ast_helper.Exp.send(~loc, expr, Location.mkloc(s, identLoc))\n    }\n\n    let e = parsePrimaryExpr(~operand=e, p)\n    let equalStart = p.startPos\n    switch p.token {\n    | Equal =>\n      Parser.next(p)\n      let equalEnd = p.prevEndPos\n      let rhsExpr = parseExpr(p)\n      let loc = mkLoc(startPos, rhsExpr.pexp_loc.loc_end)\n      let operatorLoc = mkLoc(equalStart, equalEnd)\n      Ast_helper.Exp.apply(\n        ~loc,\n        Ast_helper.Exp.ident(~loc=operatorLoc, Location.mkloc(Longident.Lident(\"#=\"), operatorLoc)),\n        list{(Nolabel, e), (Nolabel, rhsExpr)},\n      )\n    | _ => e\n    }\n  | _ =>\n    let accessExpr = parseConstrainedOrCoercedExpr(p)\n    Parser.expect(Rbracket, p)\n    Parser.eatBreadcrumb(p)\n    let rbracket = p.prevEndPos\n    let arrayLoc = mkLoc(lbracket, rbracket)\n    switch p.token {\n    | Equal =>\n      Parser.leaveBreadcrumb(p, ExprArrayMutation)\n      Parser.next(p)\n      let rhsExpr = parseExpr(p)\n      let arraySet = Location.mkloc(Longident.Ldot(Lident(\"Array\"), \"set\"), arrayLoc)\n\n      let endPos = p.prevEndPos\n      let arraySet = Ast_helper.Exp.apply(\n        ~loc=mkLoc(startPos, endPos),\n        Ast_helper.Exp.ident(~loc=arrayLoc, arraySet),\n        list{(Nolabel, expr), (Nolabel, accessExpr), (Nolabel, rhsExpr)},\n      )\n\n      Parser.eatBreadcrumb(p)\n      arraySet\n    | _ =>\n      let endPos = p.prevEndPos\n      let e = Ast_helper.Exp.apply(\n        ~loc=mkLoc(startPos, endPos),\n        Ast_helper.Exp.ident(\n          ~loc=arrayLoc,\n          Location.mkloc(Longident.Ldot(Lident(\"Array\"), \"get\"), arrayLoc),\n        ),\n        list{(Nolabel, expr), (Nolabel, accessExpr)},\n      )\n\n      parsePrimaryExpr(~operand=e, p)\n    }\n  }\n}\n\n/* * A primary expression represents\n *  - atomic-expr\n *  - john.age\n *  - array[0]\n *  - applyFunctionTo(arg1, arg2)\n *\n *  The \"operand\" represents the expression that is operated on\n */\nand parsePrimaryExpr = (~operand, ~noCall=false, p) => {\n  let startPos = operand.pexp_loc.loc_start\n  let rec loop = (p, expr) =>\n    switch p.Parser.token {\n    | Dot =>\n      Parser.next(p)\n      let lident = parseValuePathAfterDot(p)\n      switch p.Parser.token {\n      | Equal if noCall == false =>\n        Parser.leaveBreadcrumb(p, Grammar.ExprSetField)\n        Parser.next(p)\n        let targetExpr = parseExpr(p)\n        let loc = mkLoc(startPos, p.prevEndPos)\n        let setfield = Ast_helper.Exp.setfield(~loc, expr, lident, targetExpr)\n        Parser.eatBreadcrumb(p)\n        setfield\n      | _ =>\n        let endPos = p.prevEndPos\n        let loc = mkLoc(startPos, endPos)\n        loop(p, Ast_helper.Exp.field(~loc, expr, lident))\n      }\n    | Lbracket if noCall == false && p.prevEndPos.pos_lnum === p.startPos.pos_lnum =>\n      parseBracketAccess(p, expr, startPos)\n    | Lparen if noCall == false && p.prevEndPos.pos_lnum === p.startPos.pos_lnum =>\n      loop(p, parseCallExpr(p, expr))\n    | Backtick if noCall == false && p.prevEndPos.pos_lnum === p.startPos.pos_lnum =>\n      switch expr.pexp_desc {\n      | Pexp_ident({txt: Longident.Lident(ident)}) => parseTemplateExpr(~prefix=ident, p)\n      | _ =>\n        Parser.err(\n          ~startPos=expr.pexp_loc.loc_start,\n          ~endPos=expr.pexp_loc.loc_end,\n          p,\n          Diagnostics.message(\n            \"Tagged template literals are currently restricted to names like: json`null`.\",\n          ),\n        )\n        parseTemplateExpr(p)\n      }\n    | _ => expr\n    }\n\n  loop(p, operand)\n}\n\n/* a unary expression is an expression with only one operand and\n * unary operator. Examples:\n *   -1\n *   !condition\n *   -. 1.6\n */\nand parseUnaryExpr = p => {\n  let startPos = p.Parser.startPos\n  switch p.Parser.token {\n  | (Minus | MinusDot | Plus | PlusDot | Bang) as token =>\n    Parser.leaveBreadcrumb(p, Grammar.ExprUnary)\n    let tokenEnd = p.endPos\n    Parser.next(p)\n    let operand = parseUnaryExpr(p)\n    let unaryExpr = makeUnaryExpr(startPos, tokenEnd, token, operand)\n    Parser.eatBreadcrumb(p)\n    unaryExpr\n  | _ => parsePrimaryExpr(~operand=parseAtomicExpr(p), p)\n  }\n}\n\n/* Represents an \"operand\" in a binary expression.\n * If you have `a + b`, `a` and `b` both represent\n * the operands of the binary expression with opeartor `+` */\nand parseOperandExpr = (~context, p) => {\n  let startPos = p.Parser.startPos\n  let attrs = parseAttributes(p)\n  let expr = switch p.Parser.token {\n  | Assert =>\n    Parser.next(p)\n    let expr = parseUnaryExpr(p)\n    let loc = mkLoc(startPos, p.prevEndPos)\n    Ast_helper.Exp.assert_(~loc, expr)\n  | Lazy =>\n    Parser.next(p)\n    let expr = parseUnaryExpr(p)\n    let loc = mkLoc(startPos, p.prevEndPos)\n    Ast_helper.Exp.lazy_(~loc, expr)\n  | Try => parseTryExpression(p)\n  | If => parseIfOrIfLetExpression(p)\n  | For => parseForExpression(p)\n  | While => parseWhileExpression(p)\n  | Switch => parseSwitchExpression(p)\n  | _ =>\n    if (\n      context !== WhenExpr && isEs6ArrowExpression(~inTernary=context == TernaryTrueBranchExpr, p)\n    ) {\n      parseEs6ArrowExpression(~context, p)\n    } else {\n      parseUnaryExpr(p)\n    }\n  }\n\n  /* let endPos = p.Parser.prevEndPos in */\n  {\n    ...expr,\n    pexp_attributes: List.concat(list{expr.Parsetree.pexp_attributes, attrs}),\n    /* pexp_loc = mkLoc startPos endPos */\n  }\n}\n\n/* a binary expression is an expression that combines two expressions with an\n * operator. Examples:\n *    a + b\n *    f(x) |> g(y)\n */\nand parseBinaryExpr = (~context=OrdinaryExpr, ~a=?, p, prec) => {\n  let a = switch a {\n  | Some(e) => e\n  | None => parseOperandExpr(~context, p)\n  }\n\n  let rec loop = a => {\n    let token = p.Parser.token\n    let tokenPrec = switch token {\n    /* Can the minus be interpreted as a binary operator? Or is it a unary?\n     * let w = {\n     *   x\n     *   -10\n     * }\n     * vs\n     * let w = {\n     *   width\n     *   - gap\n     * }\n     *\n     * First case is unary, second is a binary operator.\n     * See Scanner.isBinaryOp */\n    | Minus | MinusDot | LessThan\n      if !Scanner.isBinaryOp(p.scanner.src, p.startPos.pos_cnum, p.endPos.pos_cnum) &&\n      p.startPos.pos_lnum > p.prevEndPos.pos_lnum => -1\n    | token => Token.precedence(token)\n    }\n\n    if tokenPrec < prec {\n      a\n    } else {\n      Parser.leaveBreadcrumb(p, Grammar.ExprBinaryAfterOp(token))\n      let startPos = p.startPos\n      Parser.next(p)\n      let endPos = p.prevEndPos\n      let b = parseBinaryExpr(~context, p, tokenPrec + 1)\n      let loc = mkLoc(a.Parsetree.pexp_loc.loc_start, b.pexp_loc.loc_end)\n      let expr = Ast_helper.Exp.apply(\n        ~loc,\n        makeInfixOperator(p, token, startPos, endPos),\n        list{(Nolabel, a), (Nolabel, b)},\n      )\n\n      Parser.eatBreadcrumb(p)\n      loop(expr)\n    }\n  }\n\n  loop(a)\n}\n\n/* If we even need this, determines if < might be the start of jsx. Not 100% complete */\n/* and isStartOfJsx p = */\n/* Parser.lookahead p (fun p -> */\n/* match p.Parser.token with */\n/* | LessThan -> */\n/* Parser.next p; */\n/* begin match p.token with */\n/* | GreaterThan (* <> *) -> true */\n/* | Lident _ | Uident _ | List -> */\n/* ignore (parseJsxName p); */\n/* begin match p.token with */\n/* | GreaterThan (* <div> *) -> true */\n/* | Question (*<Component ? *) -> true */\n/* | Lident _ | List -> */\n/* Parser.next p; */\n/* begin match p.token with */\n/* | Equal (* <Component handleClick= *) -> true */\n/* | _ -> false (* TODO *) */\n/* end */\n/* | Forwardslash (* <Component / *)-> */\n/* Parser.next p; */\n/* begin match p.token with */\n/* | GreaterThan (* <Component /> *) -> true */\n/* | _ -> false */\n/* end */\n/* | _ -> */\n/* false */\n/* end */\n/* | _ -> false */\n/* end */\n/* | _ -> false */\n/* ) */\n\nand parseTemplateExpr = (~prefix=\"js\", p) => {\n  let hiddenOperator = {\n    let op = Location.mknoloc(Longident.Lident(\"^\"))\n    Ast_helper.Exp.ident(op)\n  }\n\n  let rec parseParts = acc => {\n    let startPos = p.Parser.startPos\n    Parser.nextTemplateLiteralToken(p)\n    switch p.token {\n    | TemplateTail(txt) =>\n      Parser.next(p)\n      let loc = mkLoc(startPos, p.prevEndPos)\n      let txt = if p.mode == ParseForTypeChecker {\n        parseTemplateStringLiteral(txt)\n      } else {\n        txt\n      }\n      let str = Ast_helper.Exp.constant(\n        ~attrs=list{templateLiteralAttr},\n        ~loc,\n        Pconst_string(txt, Some(prefix)),\n      )\n      Ast_helper.Exp.apply(\n        ~attrs=list{templateLiteralAttr},\n        ~loc,\n        hiddenOperator,\n        list{(Nolabel, acc), (Nolabel, str)},\n      )\n    | TemplatePart(txt) =>\n      Parser.next(p)\n      let loc = mkLoc(startPos, p.prevEndPos)\n      let expr = parseExprBlock(p)\n      let fullLoc = mkLoc(startPos, p.prevEndPos)\n      let txt = if p.mode == ParseForTypeChecker {\n        parseTemplateStringLiteral(txt)\n      } else {\n        txt\n      }\n      let str = Ast_helper.Exp.constant(\n        ~attrs=list{templateLiteralAttr},\n        ~loc,\n        Pconst_string(txt, Some(prefix)),\n      )\n      let next = {\n        let a = Ast_helper.Exp.apply(\n          ~attrs=list{templateLiteralAttr},\n          ~loc=fullLoc,\n          hiddenOperator,\n          list{(Nolabel, acc), (Nolabel, str)},\n        )\n        Ast_helper.Exp.apply(~loc=fullLoc, hiddenOperator, list{(Nolabel, a), (Nolabel, expr)})\n      }\n\n      parseParts(next)\n    | token =>\n      Parser.err(p, Diagnostics.unexpected(token, p.breadcrumbs))\n      Ast_helper.Exp.constant(Pconst_string(\"\", None))\n    }\n  }\n\n  let startPos = p.startPos\n  Parser.nextTemplateLiteralToken(p)\n  switch p.token {\n  | TemplateTail(txt) =>\n    Parser.next(p)\n    let txt = if p.mode == ParseForTypeChecker {\n      parseTemplateStringLiteral(txt)\n    } else {\n      txt\n    }\n    Ast_helper.Exp.constant(\n      ~attrs=list{templateLiteralAttr},\n      ~loc=mkLoc(startPos, p.prevEndPos),\n      Pconst_string(txt, Some(prefix)),\n    )\n  | TemplatePart(txt) =>\n    Parser.next(p)\n    let constantLoc = mkLoc(startPos, p.prevEndPos)\n    let expr = parseExprBlock(p)\n    let fullLoc = mkLoc(startPos, p.prevEndPos)\n    let txt = if p.mode == ParseForTypeChecker {\n      parseTemplateStringLiteral(txt)\n    } else {\n      txt\n    }\n    let str = Ast_helper.Exp.constant(\n      ~attrs=list{templateLiteralAttr},\n      ~loc=constantLoc,\n      Pconst_string(txt, Some(prefix)),\n    )\n    let next = Ast_helper.Exp.apply(\n      ~attrs=list{templateLiteralAttr},\n      ~loc=fullLoc,\n      hiddenOperator,\n      list{(Nolabel, str), (Nolabel, expr)},\n    )\n\n    parseParts(next)\n  | token =>\n    Parser.err(p, Diagnostics.unexpected(token, p.breadcrumbs))\n    Ast_helper.Exp.constant(Pconst_string(\"\", None))\n  }\n}\n\n/* Overparse: let f = a : int => a + 1, is it (a : int) => or (a): int =>\n * Also overparse constraints:\n *  let x = {\n *    let a = 1\n *    a + pi: int\n *  }\n *\n *  We want to give a nice error message in these cases\n * */\nand overParseConstrainedOrCoercedOrArrowExpression = (p, expr) =>\n  switch p.Parser.token {\n  | ColonGreaterThan => parseCoercedExpr(~expr, p)\n  | Colon =>\n    Parser.next(p)\n    let typ = parseTypExpr(~es6Arrow=false, p)\n    switch p.Parser.token {\n    | EqualGreater =>\n      Parser.next(p)\n      let body = parseExpr(p)\n      let pat = switch expr.pexp_desc {\n      | Pexp_ident(longident) =>\n        Ast_helper.Pat.var(\n          ~loc=expr.pexp_loc,\n          Location.mkloc(Longident.flatten(longident.txt) |> String.concat(\".\"), longident.loc),\n        )\n      /* TODO: can we convert more expressions to patterns? */\n      | _ => Ast_helper.Pat.var(~loc=expr.pexp_loc, Location.mkloc(\"pattern\", expr.pexp_loc))\n      }\n\n      let arrow1 = Ast_helper.Exp.fun_(\n        ~loc=mkLoc(expr.pexp_loc.loc_start, body.pexp_loc.loc_end),\n        Asttypes.Nolabel,\n        None,\n        pat,\n        Ast_helper.Exp.constraint_(body, typ),\n      )\n\n      let arrow2 = Ast_helper.Exp.fun_(\n        ~loc=mkLoc(expr.pexp_loc.loc_start, body.pexp_loc.loc_end),\n        Asttypes.Nolabel,\n        None,\n        Ast_helper.Pat.constraint_(pat, typ),\n        body,\n      )\n\n      let msg =\n        Doc.breakableGroup(\n          ~forceBreak=true,\n          Doc.concat(list{\n            Doc.text(\"Did you mean to annotate the parameter type or the return type?\"),\n            Doc.indent(\n              Doc.concat(list{\n                Doc.line,\n                Doc.text(\"1) \"),\n                ResPrinter.printExpression(arrow1, CommentTable.empty),\n                Doc.line,\n                Doc.text(\"2) \"),\n                ResPrinter.printExpression(arrow2, CommentTable.empty),\n              }),\n            ),\n          }),\n        ) |> Doc.toString(~width=80)\n\n      Parser.err(\n        ~startPos=expr.pexp_loc.loc_start,\n        ~endPos=body.pexp_loc.loc_end,\n        p,\n        Diagnostics.message(msg),\n      )\n      arrow1\n    | _ =>\n      let loc = mkLoc(expr.pexp_loc.loc_start, typ.ptyp_loc.loc_end)\n      let expr = Ast_helper.Exp.constraint_(~loc, expr, typ)\n      let () = Parser.err(\n        ~startPos=expr.pexp_loc.loc_start,\n        ~endPos=typ.ptyp_loc.loc_end,\n        p,\n        Diagnostics.message(\n          Doc.breakableGroup(\n            ~forceBreak=true,\n            Doc.concat(list{\n              Doc.text(\"Expressions with type constraints need to be wrapped in parens:\"),\n              Doc.indent(\n                Doc.concat(list{\n                  Doc.line,\n                  ResPrinter.addParens(ResPrinter.printExpression(expr, CommentTable.empty)),\n                }),\n              ),\n            }),\n          ) |> Doc.toString(~width=80),\n        ),\n      )\n\n      expr\n    }\n  | _ => expr\n  }\n\nand parseLetBindingBody = (~startPos, ~attrs, p) => {\n  Parser.beginRegion(p)\n  Parser.leaveBreadcrumb(p, Grammar.LetBinding)\n  let (pat, exp) = {\n    Parser.leaveBreadcrumb(p, Grammar.Pattern)\n    let pat = parsePattern(p)\n    Parser.eatBreadcrumb(p)\n    switch p.Parser.token {\n    | Colon =>\n      Parser.next(p)\n      switch p.token {\n      | Typ =>\n        /* locally abstract types */\n        Parser.next(p)\n        let newtypes = parseLidentList(p)\n        Parser.expect(Dot, p)\n        let typ = parseTypExpr(p)\n        Parser.expect(Equal, p)\n        let expr = parseExpr(p)\n        let loc = mkLoc(startPos, p.prevEndPos)\n        let (exp, poly) = wrapTypeAnnotation(~loc, newtypes, typ, expr)\n        let pat = Ast_helper.Pat.constraint_(~loc, pat, poly)\n        (pat, exp)\n      | _ =>\n        let polyType = parsePolyTypeExpr(p)\n        let loc = {...pat.ppat_loc, loc_end: polyType.Parsetree.ptyp_loc.loc_end}\n        let pat = Ast_helper.Pat.constraint_(~loc, pat, polyType)\n        Parser.expect(Token.Equal, p)\n        let exp = parseExpr(p)\n        let exp = overParseConstrainedOrCoercedOrArrowExpression(p, exp)\n        (pat, exp)\n      }\n    | _ =>\n      Parser.expect(Token.Equal, p)\n      let exp = overParseConstrainedOrCoercedOrArrowExpression(p, parseExpr(p))\n      (pat, exp)\n    }\n  }\n\n  let loc = mkLoc(startPos, p.prevEndPos)\n  let vb = Ast_helper.Vb.mk(~loc, ~attrs, pat, exp)\n  Parser.eatBreadcrumb(p)\n  Parser.endRegion(p)\n  vb\n}\n\n/* TODO: find a better way? Is it possible?\n * let a = 1\n * @attr\n * and b = 2\n *\n * The problem is that without semi we need a lookahead to determine\n * if the attr is on the letbinding or the start of a new thing\n *\n * let a = 1\n * @attr\n * let b = 1\n *\n * Here @attr should attach to something \"new\": `let b = 1`\n * The parser state is forked, which is quite expensive…\n */\nand parseAttributesAndBinding = (p: Parser.t) => {\n  let err = p.scanner.err\n  let ch = p.scanner.ch\n  let offset = p.scanner.offset\n  let lineOffset = p.scanner.lineOffset\n  let lnum = p.scanner.lnum\n  let mode = p.scanner.mode\n  let token = p.token\n  let startPos = p.startPos\n  let endPos = p.endPos\n  let prevEndPos = p.prevEndPos\n  let breadcrumbs = p.breadcrumbs\n  let errors = p.errors\n  let diagnostics = p.diagnostics\n  let comments = p.comments\n\n  switch p.Parser.token {\n  | At =>\n    let attrs = parseAttributes(p)\n    switch p.Parser.token {\n    | And => attrs\n    | _ =>\n      p.scanner.err = err\n      p.scanner.ch = ch\n      p.scanner.offset = offset\n      p.scanner.lineOffset = lineOffset\n      p.scanner.lnum = lnum\n      p.scanner.mode = mode\n      p.token = token\n      p.startPos = startPos\n      p.endPos = endPos\n      p.prevEndPos = prevEndPos\n      p.breadcrumbs = breadcrumbs\n      p.errors = errors\n      p.diagnostics = diagnostics\n      p.comments = comments\n      list{}\n    }\n  | _ => list{}\n  }\n}\n\n/* definition\t::=\tlet [rec] let-binding  { and let-binding } */\nand parseLetBindings = (~attrs, p) => {\n  let startPos = p.Parser.startPos\n  Parser.optional(p, Let) |> ignore\n  let recFlag = if Parser.optional(p, Token.Rec) {\n    Asttypes.Recursive\n  } else {\n    Asttypes.Nonrecursive\n  }\n\n  let first = parseLetBindingBody(~startPos, ~attrs, p)\n\n  let rec loop = (p, bindings) => {\n    let startPos = p.Parser.startPos\n    let attrs = parseAttributesAndBinding(p)\n    switch p.Parser.token {\n    | And =>\n      Parser.next(p)\n      let attrs = switch p.token {\n      | Export =>\n        let exportLoc = mkLoc(p.startPos, p.endPos)\n        Parser.next(p)\n        let genTypeAttr = (Location.mkloc(\"genType\", exportLoc), Parsetree.PStr(list{}))\n        list{genTypeAttr, ...attrs}\n      | _ => attrs\n      }\n\n      ignore(Parser.optional(p, Let)) /* overparse for fault tolerance */\n      let letBinding = parseLetBindingBody(~startPos, ~attrs, p)\n      loop(p, list{letBinding, ...bindings})\n    | _ => List.rev(bindings)\n    }\n  }\n\n  (recFlag, loop(p, list{first}))\n}\n\n/*\n * div -> div\n * Foo -> Foo.createElement\n * Foo.Bar -> Foo.Bar.createElement\n */\nand parseJsxName = p => {\n  let longident = switch p.Parser.token {\n  | Lident(ident) =>\n    let identStart = p.startPos\n    let identEnd = p.endPos\n    Parser.next(p)\n    let loc = mkLoc(identStart, identEnd)\n    Location.mkloc(Longident.Lident(ident), loc)\n  | Uident(_) =>\n    let longident = parseModuleLongIdent(~lowercase=true, p)\n    Location.mkloc(Longident.Ldot(longident.txt, \"createElement\"), longident.loc)\n  | _ =>\n    let msg = \"A jsx name must be a lowercase or uppercase name, like: div in <div /> or Navbar in <Navbar />\"\n\n    Parser.err(p, Diagnostics.message(msg))\n    Location.mknoloc(Longident.Lident(\"_\"))\n  }\n\n  Ast_helper.Exp.ident(~loc=longident.loc, longident)\n}\n\nand parseJsxOpeningOrSelfClosingElement = (~startPos, p) => {\n  let jsxStartPos = p.Parser.startPos\n  let name = parseJsxName(p)\n  let jsxProps = parseJsxProps(p)\n  let children = switch p.Parser.token {\n  | Forwardslash =>\n    /* <foo a=b /> */\n    let childrenStartPos = p.Parser.startPos\n    Parser.next(p)\n    let childrenEndPos = p.Parser.startPos\n    Parser.expect(GreaterThan, p)\n    let loc = mkLoc(childrenStartPos, childrenEndPos)\n    makeListExpression(loc, list{}, None) /* no children */\n  | GreaterThan =>\n    /* <foo a=b> bar </foo> */\n    let childrenStartPos = p.Parser.startPos\n    Scanner.setJsxMode(p.scanner)\n    Parser.next(p)\n    let (spread, children) = parseJsxChildren(p)\n    let childrenEndPos = p.Parser.startPos\n    let () = switch p.token {\n    | LessThanSlash => Parser.next(p)\n    | LessThan =>\n      Parser.next(p)\n      Parser.expect(Forwardslash, p)\n    | token if Grammar.isStructureItemStart(token) => ()\n    | _ => Parser.expect(LessThanSlash, p)\n    }\n\n    switch p.Parser.token {\n    | Lident(_) | Uident(_) if verifyJsxOpeningClosingName(p, name) =>\n      Parser.expect(GreaterThan, p)\n      let loc = mkLoc(childrenStartPos, childrenEndPos)\n      switch (spread, children) {\n      | (true, list{child, ..._}) => child\n      | _ => makeListExpression(loc, children, None)\n      }\n    | token =>\n      let () = if Grammar.isStructureItemStart(token) {\n        let closing = \"</\" ++ (string_of_pexp_ident(name) ++ \">\")\n        let msg = Diagnostics.message(\"Missing \" ++ closing)\n        Parser.err(~startPos, ~endPos=p.prevEndPos, p, msg)\n      } else {\n        let opening = \"</\" ++ (string_of_pexp_ident(name) ++ \">\")\n        let msg =\n          \"Closing jsx name should be the same as the opening name. Did you mean \" ++\n          (opening ++\n          \" ?\")\n        Parser.err(~startPos, ~endPos=p.prevEndPos, p, Diagnostics.message(msg))\n        Parser.expect(GreaterThan, p)\n      }\n\n      let loc = mkLoc(childrenStartPos, childrenEndPos)\n      switch (spread, children) {\n      | (true, list{child, ..._}) => child\n      | _ => makeListExpression(loc, children, None)\n      }\n    }\n  | token =>\n    Parser.err(p, Diagnostics.unexpected(token, p.breadcrumbs))\n    makeListExpression(Location.none, list{}, None)\n  }\n\n  let jsxEndPos = p.prevEndPos\n  let loc = mkLoc(jsxStartPos, jsxEndPos)\n  Ast_helper.Exp.apply(\n    ~loc,\n    name,\n    List.concat(list{\n      jsxProps,\n      list{\n        (Asttypes.Labelled(\"children\"), children),\n        (\n          Asttypes.Nolabel,\n          Ast_helper.Exp.construct(Location.mknoloc(Longident.Lident(\"()\")), None),\n        ),\n      },\n    }),\n  )\n}\n\n/*\n *  jsx ::=\n *    | <> jsx-children </>\n *    | <element-name {jsx-prop} />\n *    | <element-name {jsx-prop}> jsx-children </element-name>\n *\n *  jsx-children ::= primary-expr*          * => 0 or more\n */\nand parseJsx = p => {\n  Parser.leaveBreadcrumb(p, Grammar.Jsx)\n  let startPos = p.Parser.startPos\n  Parser.expect(LessThan, p)\n  let jsxExpr = switch p.Parser.token {\n  | Lident(_) | Uident(_) => parseJsxOpeningOrSelfClosingElement(~startPos, p)\n  | GreaterThan =>\n    /* fragment: <> foo </> */\n    parseJsxFragment(p)\n  | _ => parseJsxName(p)\n  }\n\n  Parser.eatBreadcrumb(p)\n  {...jsxExpr, pexp_attributes: list{jsxAttr}}\n}\n\n/*\n * jsx-fragment ::=\n *  | <> </>\n *  | <> jsx-children </>\n */\nand parseJsxFragment = p => {\n  let childrenStartPos = p.Parser.startPos\n  Scanner.setJsxMode(p.scanner)\n  Parser.expect(GreaterThan, p)\n  let (_spread, children) = parseJsxChildren(p)\n  let childrenEndPos = p.Parser.startPos\n  Parser.expect(LessThanSlash, p)\n  Parser.expect(GreaterThan, p)\n  let loc = mkLoc(childrenStartPos, childrenEndPos)\n  makeListExpression(loc, children, None)\n}\n\n/*\n * jsx-prop ::=\n *   |  lident\n *   | ?lident\n *   |  lident =  jsx_expr\n *   |  lident = ?jsx_expr\n */\nand parseJsxProp = p =>\n  switch p.Parser.token {\n  | Question | Lident(_) =>\n    let optional = Parser.optional(p, Question)\n    let (name, loc) = parseLident(p)\n    let propLocAttr = (Location.mkloc(\"ns.namedArgLoc\", loc), Parsetree.PStr(list{}))\n    /* optional punning: <foo ?a /> */\n    if optional {\n      Some(\n        Asttypes.Optional(name),\n        Ast_helper.Exp.ident(\n          ~attrs=list{propLocAttr},\n          ~loc,\n          Location.mkloc(Longident.Lident(name), loc),\n        ),\n      )\n    } else {\n      switch p.Parser.token {\n      | Equal =>\n        Parser.next(p)\n        /* no punning */\n        let optional = Parser.optional(p, Question)\n        let attrExpr = {\n          let e = parsePrimaryExpr(~operand=parseAtomicExpr(p), p)\n          {...e, pexp_attributes: list{propLocAttr, ...e.pexp_attributes}}\n        }\n\n        let label = if optional {\n          Asttypes.Optional(name)\n        } else {\n          Asttypes.Labelled(name)\n        }\n\n        Some(label, attrExpr)\n      | _ =>\n        let attrExpr = Ast_helper.Exp.ident(\n          ~loc,\n          ~attrs=list{propLocAttr},\n          Location.mkloc(Longident.Lident(name), loc),\n        )\n        let label = if optional {\n          Asttypes.Optional(name)\n        } else {\n          Asttypes.Labelled(name)\n        }\n\n        Some(label, attrExpr)\n      }\n    }\n  | _ => None\n  }\n\nand parseJsxProps = p => parseRegion(~grammar=Grammar.JsxAttribute, ~f=parseJsxProp, p)\n\nand parseJsxChildren = p => {\n  let rec loop = (p, children) =>\n    switch p.Parser.token {\n    | Token.Eof | LessThanSlash =>\n      Scanner.popMode(p.scanner, Jsx)\n      List.rev(children)\n    | LessThan =>\n      /* Imagine: <div> <Navbar /> <\n       * is `<` the start of a jsx-child? <div …\n       * or is it the start of a closing tag?  </div>\n       * reconsiderLessThan peeks at the next token and\n       * determines the correct token to disambiguate */\n      let token = Scanner.reconsiderLessThan(p.scanner)\n      if token == LessThan {\n        let child = parsePrimaryExpr(~operand=parseAtomicExpr(p), ~noCall=true, p)\n        loop(p, list{child, ...children})\n      } else {\n        /* LessThanSlash */\n        let () = p.token = token\n        let () = Scanner.popMode(p.scanner, Jsx)\n        List.rev(children)\n      }\n    | token if Grammar.isJsxChildStart(token) =>\n      let () = Scanner.popMode(p.scanner, Jsx)\n      let child = parsePrimaryExpr(~operand=parseAtomicExpr(p), ~noCall=true, p)\n      loop(p, list{child, ...children})\n    | _ =>\n      Scanner.popMode(p.scanner, Jsx)\n      List.rev(children)\n    }\n\n  switch p.Parser.token {\n  | DotDotDot =>\n    Parser.next(p)\n    (true, list{parsePrimaryExpr(~operand=parseAtomicExpr(p), ~noCall=true, p)})\n  | _ => (false, loop(p, list{}))\n  }\n}\n\nand parseBracedOrRecordExpr = p => {\n  let startPos = p.Parser.startPos\n  Parser.expect(Lbrace, p)\n  switch p.Parser.token {\n  | Rbrace =>\n    Parser.err(p, Diagnostics.unexpected(Rbrace, p.breadcrumbs))\n    Parser.next(p)\n    let loc = mkLoc(startPos, p.prevEndPos)\n    let braces = makeBracesAttr(loc)\n    Ast_helper.Exp.construct(\n      ~attrs=list{braces},\n      ~loc,\n      Location.mkloc(Longident.Lident(\"()\"), loc),\n      None,\n    )\n  | DotDotDot =>\n    /* beginning of record spread, parse record */\n    Parser.next(p)\n    let spreadExpr = parseConstrainedOrCoercedExpr(p)\n    Parser.expect(Comma, p)\n    let expr = parseRecordExpr(~startPos, ~spread=Some(spreadExpr), list{}, p)\n    Parser.expect(Rbrace, p)\n    expr\n  | String(s) =>\n    let s = if p.mode == ParseForTypeChecker {\n      parseStringLiteral(s)\n    } else {\n      s\n    }\n    let field = {\n      let loc = mkLoc(p.startPos, p.endPos)\n      Parser.next(p)\n      Location.mkloc(Longident.Lident(s), loc)\n    }\n\n    switch p.Parser.token {\n    | Colon =>\n      Parser.next(p)\n      let fieldExpr = parseExpr(p)\n      Parser.optional(p, Comma) |> ignore\n      let expr = parseRecordExprWithStringKeys(~startPos, (field, fieldExpr), p)\n      Parser.expect(Rbrace, p)\n      expr\n    | _ =>\n      let tag = if p.mode == ParseForTypeChecker {\n        Some(\"js\")\n      } else {\n        None\n      }\n      let constant = Ast_helper.Exp.constant(~loc=field.loc, Parsetree.Pconst_string(s, tag))\n      let a = parsePrimaryExpr(~operand=constant, p)\n      let e = parseBinaryExpr(~a, p, 1)\n      let e = parseTernaryExpr(e, p)\n      switch p.Parser.token {\n      | Semicolon =>\n        let expr = parseExprBlock(~first=e, p)\n        Parser.expect(Rbrace, p)\n        let loc = mkLoc(startPos, p.prevEndPos)\n        let braces = makeBracesAttr(loc)\n        {...expr, Parsetree.pexp_attributes: list{braces, ...expr.Parsetree.pexp_attributes}}\n      | Rbrace =>\n        Parser.next(p)\n        let loc = mkLoc(startPos, p.prevEndPos)\n        let braces = makeBracesAttr(loc)\n        {...e, pexp_attributes: list{braces, ...e.pexp_attributes}}\n      | _ =>\n        let expr = parseExprBlock(~first=e, p)\n        Parser.expect(Rbrace, p)\n        let loc = mkLoc(startPos, p.prevEndPos)\n        let braces = makeBracesAttr(loc)\n        {...expr, pexp_attributes: list{braces, ...expr.pexp_attributes}}\n      }\n    }\n  | Uident(_) | Lident(_) =>\n    let startToken = p.token\n    let valueOrConstructor = parseValueOrConstructor(p)\n    switch valueOrConstructor.pexp_desc {\n    | Pexp_ident(pathIdent) =>\n      let identEndPos = p.prevEndPos\n      switch p.Parser.token {\n      | Comma =>\n        Parser.next(p)\n        let valueOrConstructor = switch startToken {\n        | Uident(_) => removeModuleNameFromPunnedFieldValue(valueOrConstructor)\n        | _ => valueOrConstructor\n        }\n\n        let expr = parseRecordExpr(~startPos, list{(pathIdent, valueOrConstructor)}, p)\n        Parser.expect(Rbrace, p)\n        expr\n      | Colon =>\n        Parser.next(p)\n        let fieldExpr = parseExpr(p)\n        switch p.token {\n        | Rbrace =>\n          Parser.next(p)\n          let loc = mkLoc(startPos, p.prevEndPos)\n          Ast_helper.Exp.record(~loc, list{(pathIdent, fieldExpr)}, None)\n        | _ =>\n          Parser.expect(Comma, p)\n          let expr = parseRecordExpr(~startPos, list{(pathIdent, fieldExpr)}, p)\n          Parser.expect(Rbrace, p)\n          expr\n        }\n      /* error case */\n      | Lident(_) =>\n        if p.prevEndPos.pos_lnum < p.startPos.pos_lnum {\n          Parser.expect(Comma, p)\n          let expr = parseRecordExpr(~startPos, list{(pathIdent, valueOrConstructor)}, p)\n          Parser.expect(Rbrace, p)\n          expr\n        } else {\n          Parser.expect(Colon, p)\n          let expr = parseRecordExpr(~startPos, list{(pathIdent, valueOrConstructor)}, p)\n          Parser.expect(Rbrace, p)\n          expr\n        }\n      | Semicolon =>\n        let expr = parseExprBlock(~first=Ast_helper.Exp.ident(pathIdent), p)\n        Parser.expect(Rbrace, p)\n        let loc = mkLoc(startPos, p.prevEndPos)\n        let braces = makeBracesAttr(loc)\n        {...expr, pexp_attributes: list{braces, ...expr.pexp_attributes}}\n      | Rbrace =>\n        Parser.next(p)\n        let expr = Ast_helper.Exp.ident(~loc=pathIdent.loc, pathIdent)\n        let loc = mkLoc(startPos, p.prevEndPos)\n        let braces = makeBracesAttr(loc)\n        {...expr, pexp_attributes: list{braces, ...expr.pexp_attributes}}\n      | EqualGreater =>\n        let loc = mkLoc(startPos, identEndPos)\n        let ident = Location.mkloc(Longident.last(pathIdent.txt), loc)\n        let a = parseEs6ArrowExpression(\n          ~parameters=list{\n            TermParameter({\n              uncurried: false,\n              attrs: list{},\n              label: Asttypes.Nolabel,\n              expr: None,\n              pat: Ast_helper.Pat.var(ident),\n              pos: startPos,\n            }),\n          },\n          p,\n        )\n\n        let e = parseBinaryExpr(~a, p, 1)\n        let e = parseTernaryExpr(e, p)\n        switch p.Parser.token {\n        | Semicolon =>\n          let expr = parseExprBlock(~first=e, p)\n          Parser.expect(Rbrace, p)\n          let loc = mkLoc(startPos, p.prevEndPos)\n          let braces = makeBracesAttr(loc)\n          {...expr, pexp_attributes: list{braces, ...expr.pexp_attributes}}\n        | Rbrace =>\n          Parser.next(p)\n          let loc = mkLoc(startPos, p.prevEndPos)\n          let braces = makeBracesAttr(loc)\n          {...e, pexp_attributes: list{braces, ...e.pexp_attributes}}\n        | _ =>\n          let expr = parseExprBlock(~first=e, p)\n          Parser.expect(Rbrace, p)\n          let loc = mkLoc(startPos, p.prevEndPos)\n          let braces = makeBracesAttr(loc)\n          {...expr, pexp_attributes: list{braces, ...expr.pexp_attributes}}\n        }\n      | _ =>\n        Parser.leaveBreadcrumb(p, Grammar.ExprBlock)\n        let a = parsePrimaryExpr(~operand=Ast_helper.Exp.ident(~loc=pathIdent.loc, pathIdent), p)\n        let e = parseBinaryExpr(~a, p, 1)\n        let e = parseTernaryExpr(e, p)\n        Parser.eatBreadcrumb(p)\n        switch p.Parser.token {\n        | Semicolon =>\n          let expr = parseExprBlock(~first=e, p)\n          Parser.expect(Rbrace, p)\n          let loc = mkLoc(startPos, p.prevEndPos)\n          let braces = makeBracesAttr(loc)\n          {...expr, pexp_attributes: list{braces, ...expr.pexp_attributes}}\n        | Rbrace =>\n          Parser.next(p)\n          let loc = mkLoc(startPos, p.prevEndPos)\n          let braces = makeBracesAttr(loc)\n          {...e, pexp_attributes: list{braces, ...e.pexp_attributes}}\n        | _ =>\n          let expr = parseExprBlock(~first=e, p)\n          Parser.expect(Rbrace, p)\n          let loc = mkLoc(startPos, p.prevEndPos)\n          let braces = makeBracesAttr(loc)\n          {...expr, pexp_attributes: list{braces, ...expr.pexp_attributes}}\n        }\n      }\n    | _ =>\n      Parser.leaveBreadcrumb(p, Grammar.ExprBlock)\n      let a = parsePrimaryExpr(~operand=valueOrConstructor, p)\n      let e = parseBinaryExpr(~a, p, 1)\n      let e = parseTernaryExpr(e, p)\n      Parser.eatBreadcrumb(p)\n      switch p.Parser.token {\n      | Semicolon =>\n        let expr = parseExprBlock(~first=e, p)\n        Parser.expect(Rbrace, p)\n        let loc = mkLoc(startPos, p.prevEndPos)\n        let braces = makeBracesAttr(loc)\n        {...expr, pexp_attributes: list{braces, ...expr.pexp_attributes}}\n      | Rbrace =>\n        Parser.next(p)\n        let loc = mkLoc(startPos, p.prevEndPos)\n        let braces = makeBracesAttr(loc)\n        {...e, pexp_attributes: list{braces, ...e.pexp_attributes}}\n      | _ =>\n        let expr = parseExprBlock(~first=e, p)\n        Parser.expect(Rbrace, p)\n        let loc = mkLoc(startPos, p.prevEndPos)\n        let braces = makeBracesAttr(loc)\n        {...expr, pexp_attributes: list{braces, ...expr.pexp_attributes}}\n      }\n    }\n  | _ =>\n    let expr = parseExprBlock(p)\n    Parser.expect(Rbrace, p)\n    let loc = mkLoc(startPos, p.prevEndPos)\n    let braces = makeBracesAttr(loc)\n    {...expr, pexp_attributes: list{braces, ...expr.pexp_attributes}}\n  }\n}\n\nand parseRecordRowWithStringKey = p =>\n  switch p.Parser.token {\n  | String(s) =>\n    let loc = mkLoc(p.startPos, p.endPos)\n    Parser.next(p)\n    let field = Location.mkloc(Longident.Lident(s), loc)\n    switch p.Parser.token {\n    | Colon =>\n      Parser.next(p)\n      let fieldExpr = parseExpr(p)\n      Some(field, fieldExpr)\n    | _ => Some(field, Ast_helper.Exp.ident(~loc=field.loc, field))\n    }\n  | _ => None\n  }\n\nand parseRecordRow = p => {\n  let () = switch p.Parser.token {\n  | Token.DotDotDot =>\n    Parser.err(p, Diagnostics.message(ErrorMessages.recordExprSpread))\n    Parser.next(p)\n  | _ => ()\n  }\n\n  switch p.Parser.token {\n  | Lident(_) | Uident(_) =>\n    let startToken = p.token\n    let field = parseValuePath(p)\n    switch p.Parser.token {\n    | Colon =>\n      Parser.next(p)\n      let fieldExpr = parseExpr(p)\n      Some(field, fieldExpr)\n    | _ =>\n      let value = Ast_helper.Exp.ident(~loc=field.loc, field)\n      let value = switch startToken {\n      | Uident(_) => removeModuleNameFromPunnedFieldValue(value)\n      | _ => value\n      }\n\n      Some(field, value)\n    }\n  | _ => None\n  }\n}\n\nand parseRecordExprWithStringKeys = (~startPos, firstRow, p) => {\n  let rows = list{\n    firstRow,\n    ...parseCommaDelimitedRegion(\n      ~grammar=Grammar.RecordRowsStringKey,\n      ~closing=Rbrace,\n      ~f=parseRecordRowWithStringKey,\n      p,\n    ),\n  }\n  let loc = mkLoc(startPos, p.endPos)\n  let recordStrExpr = Ast_helper.Str.eval(~loc, Ast_helper.Exp.record(~loc, rows, None))\n  Ast_helper.Exp.extension(~loc, (Location.mkloc(\"obj\", loc), Parsetree.PStr(list{recordStrExpr})))\n}\n\nand parseRecordExpr = (~startPos, ~spread=None, rows, p) => {\n  let exprs = parseCommaDelimitedRegion(\n    ~grammar=Grammar.RecordRows,\n    ~closing=Rbrace,\n    ~f=parseRecordRow,\n    p,\n  )\n\n  let rows = List.concat(list{rows, exprs})\n  let () = switch rows {\n  | list{} =>\n    let msg = \"Record spread needs at least one field that's updated\"\n    Parser.err(p, Diagnostics.message(msg))\n  | _rows => ()\n  }\n\n  let loc = mkLoc(startPos, p.endPos)\n  Ast_helper.Exp.record(~loc, rows, spread)\n}\n\nand parseNewlineOrSemicolonExprBlock = p =>\n  switch p.Parser.token {\n  | Semicolon => Parser.next(p)\n  | token if Grammar.isBlockExprStart(token) =>\n    if p.prevEndPos.pos_lnum < p.startPos.pos_lnum {\n      ()\n    } else {\n      Parser.err(\n        ~startPos=p.prevEndPos,\n        ~endPos=p.endPos,\n        p,\n        Diagnostics.message(\n          \"consecutive expressions on a line must be separated by ';' or a newline\",\n        ),\n      )\n    }\n  | _ => ()\n  }\n\nand parseExprBlockItem = p => {\n  let startPos = p.Parser.startPos\n  let attrs = parseAttributes(p)\n  switch p.Parser.token {\n  | Module =>\n    Parser.next(p)\n    switch p.token {\n    | Lparen =>\n      let expr = parseFirstClassModuleExpr(~startPos, p)\n      let a = parsePrimaryExpr(~operand=expr, p)\n      let expr = parseBinaryExpr(~a, p, 1)\n      parseTernaryExpr(expr, p)\n    | _ =>\n      let name = switch p.Parser.token {\n      | Uident(ident) =>\n        let loc = mkLoc(p.startPos, p.endPos)\n        Parser.next(p)\n        Location.mkloc(ident, loc)\n      | t =>\n        Parser.err(p, Diagnostics.uident(t))\n        Location.mknoloc(\"_\")\n      }\n\n      let body = parseModuleBindingBody(p)\n      parseNewlineOrSemicolonExprBlock(p)\n      let expr = parseExprBlock(p)\n      let loc = mkLoc(startPos, p.prevEndPos)\n      Ast_helper.Exp.letmodule(~loc, name, body, expr)\n    }\n  | Exception =>\n    let extensionConstructor = parseExceptionDef(~attrs, p)\n    parseNewlineOrSemicolonExprBlock(p)\n    let blockExpr = parseExprBlock(p)\n    let loc = mkLoc(startPos, p.prevEndPos)\n    Ast_helper.Exp.letexception(~loc, extensionConstructor, blockExpr)\n  | Open =>\n    let od = parseOpenDescription(~attrs, p)\n    parseNewlineOrSemicolonExprBlock(p)\n    let blockExpr = parseExprBlock(p)\n    let loc = mkLoc(startPos, p.prevEndPos)\n    Ast_helper.Exp.open_(~loc, od.popen_override, od.popen_lid, blockExpr)\n  | Let =>\n    let (recFlag, letBindings) = parseLetBindings(~attrs, p)\n    parseNewlineOrSemicolonExprBlock(p)\n    let next = if Grammar.isBlockExprStart(p.Parser.token) {\n      parseExprBlock(p)\n    } else {\n      let loc = mkLoc(p.startPos, p.endPos)\n      Ast_helper.Exp.construct(~loc, Location.mkloc(Longident.Lident(\"()\"), loc), None)\n    }\n\n    let loc = mkLoc(startPos, p.prevEndPos)\n    Ast_helper.Exp.let_(~loc, recFlag, letBindings, next)\n  | _ =>\n    let e1 = {\n      let expr = parseExpr(p)\n      {...expr, pexp_attributes: List.concat(list{attrs, expr.pexp_attributes})}\n    }\n\n    parseNewlineOrSemicolonExprBlock(p)\n    if Grammar.isBlockExprStart(p.Parser.token) {\n      let e2 = parseExprBlock(p)\n      let loc = {...e1.pexp_loc, loc_end: e2.pexp_loc.loc_end}\n      Ast_helper.Exp.sequence(~loc, e1, e2)\n    } else {\n      e1\n    }\n  }\n}\n\n/* blockExpr ::= expr\n *            |  expr          ;\n *            |  expr          ; blockExpr\n *            |  module    ... ; blockExpr\n *            |  open      ... ; blockExpr\n *            |  exception ... ; blockExpr\n *            |  let       ...\n *            |  let       ... ;\n *            |  let       ... ; blockExpr\n *\n *  note: semi should be made optional\n *  a block of expression is always\n */\nand parseExprBlock = (~first=?, p) => {\n  Parser.leaveBreadcrumb(p, Grammar.ExprBlock)\n  let item = switch first {\n  | Some(e) => e\n  | None => parseExprBlockItem(p)\n  }\n\n  parseNewlineOrSemicolonExprBlock(p)\n  let blockExpr = if Grammar.isBlockExprStart(p.Parser.token) {\n    let next = parseExprBlockItem(p)\n    let loc = {...item.pexp_loc, loc_end: next.pexp_loc.loc_end}\n    Ast_helper.Exp.sequence(~loc, item, next)\n  } else {\n    item\n  }\n\n  Parser.eatBreadcrumb(p)\n  overParseConstrainedOrCoercedOrArrowExpression(p, blockExpr)\n}\n\nand parseTryExpression = p => {\n  let startPos = p.Parser.startPos\n  Parser.expect(Try, p)\n  let expr = parseExpr(~context=WhenExpr, p)\n  Parser.expect(Res_token.catch, p)\n  Parser.expect(Lbrace, p)\n  let cases = parsePatternMatching(p)\n  Parser.expect(Rbrace, p)\n  let loc = mkLoc(startPos, p.prevEndPos)\n  Ast_helper.Exp.try_(~loc, expr, cases)\n}\n\nand parseIfCondition = p => {\n  Parser.leaveBreadcrumb(p, Grammar.IfCondition)\n  /* doesn't make sense to try es6 arrow here? */\n  let conditionExpr = parseExpr(~context=WhenExpr, p)\n  Parser.eatBreadcrumb(p)\n  conditionExpr\n}\n\nand parseThenBranch = p => {\n  Parser.leaveBreadcrumb(p, IfBranch)\n  Parser.expect(Lbrace, p)\n  let thenExpr = parseExprBlock(p)\n  Parser.expect(Rbrace, p)\n  Parser.eatBreadcrumb(p)\n  thenExpr\n}\n\nand parseElseBranch = p => {\n  Parser.expect(Lbrace, p)\n  let blockExpr = parseExprBlock(p)\n  Parser.expect(Rbrace, p)\n  blockExpr\n}\n\nand parseIfExpr = (startPos, p) => {\n  let conditionExpr = parseIfCondition(p)\n  let thenExpr = parseThenBranch(p)\n  let elseExpr = switch p.Parser.token {\n  | Else =>\n    Parser.endRegion(p)\n    Parser.leaveBreadcrumb(p, Grammar.ElseBranch)\n    Parser.next(p)\n    Parser.beginRegion(p)\n    let elseExpr = switch p.token {\n    | If => parseIfOrIfLetExpression(p)\n    | _ => parseElseBranch(p)\n    }\n\n    Parser.eatBreadcrumb(p)\n    Parser.endRegion(p)\n    Some(elseExpr)\n  | _ =>\n    Parser.endRegion(p)\n    None\n  }\n\n  let loc = mkLoc(startPos, p.prevEndPos)\n  Ast_helper.Exp.ifthenelse(~loc, conditionExpr, thenExpr, elseExpr)\n}\n\nand parseIfLetExpr = (startPos, p) => {\n  let pattern = parsePattern(p)\n  Parser.expect(Equal, p)\n  let conditionExpr = parseIfCondition(p)\n  let thenExpr = parseThenBranch(p)\n  let elseExpr = switch p.Parser.token {\n  | Else =>\n    Parser.endRegion(p)\n    Parser.leaveBreadcrumb(p, Grammar.ElseBranch)\n    Parser.next(p)\n    Parser.beginRegion(p)\n    let elseExpr = switch p.token {\n    | If => parseIfOrIfLetExpression(p)\n    | _ => parseElseBranch(p)\n    }\n\n    Parser.eatBreadcrumb(p)\n    Parser.endRegion(p)\n    elseExpr\n  | _ =>\n    Parser.endRegion(p)\n    let startPos = p.Parser.startPos\n    let loc = mkLoc(startPos, p.prevEndPos)\n    Ast_helper.Exp.construct(~loc, Location.mkloc(Longident.Lident(\"()\"), loc), None)\n  }\n\n  let loc = mkLoc(startPos, p.prevEndPos)\n  Ast_helper.Exp.match_(\n    ~attrs=list{ifLetAttr, suppressFragileMatchWarningAttr},\n    ~loc,\n    conditionExpr,\n    list{\n      Ast_helper.Exp.case(pattern, thenExpr),\n      Ast_helper.Exp.case(Ast_helper.Pat.any(), elseExpr),\n    },\n  )\n}\n\nand parseIfOrIfLetExpression = p => {\n  Parser.beginRegion(p)\n  Parser.leaveBreadcrumb(p, Grammar.ExprIf)\n  let startPos = p.Parser.startPos\n  Parser.expect(If, p)\n  let expr = switch p.Parser.token {\n  | Let =>\n    Parser.next(p)\n    let ifLetExpr = parseIfLetExpr(startPos, p)\n    Parser.err(\n      ~startPos=ifLetExpr.pexp_loc.loc_start,\n      ~endPos=ifLetExpr.pexp_loc.loc_end,\n      p,\n      Diagnostics.message(ErrorMessages.experimentalIfLet(ifLetExpr)),\n    )\n    ifLetExpr\n  | _ => parseIfExpr(startPos, p)\n  }\n\n  Parser.eatBreadcrumb(p)\n  expr\n}\n\nand parseForRest = (hasOpeningParen, pattern, startPos, p) => {\n  Parser.expect(In, p)\n  let e1 = parseExpr(p)\n  let direction = switch p.Parser.token {\n  | Lident(\"to\") => Asttypes.Upto\n  | Lident(\"downto\") => Asttypes.Downto\n  | token =>\n    Parser.err(p, Diagnostics.unexpected(token, p.breadcrumbs))\n    Asttypes.Upto\n  }\n\n  Parser.next(p)\n  let e2 = parseExpr(~context=WhenExpr, p)\n  if hasOpeningParen {\n    Parser.expect(Rparen, p)\n  }\n  Parser.expect(Lbrace, p)\n  let bodyExpr = parseExprBlock(p)\n  Parser.expect(Rbrace, p)\n  let loc = mkLoc(startPos, p.prevEndPos)\n  Ast_helper.Exp.for_(~loc, pattern, e1, e2, direction, bodyExpr)\n}\n\nand parseForExpression = p => {\n  let startPos = p.Parser.startPos\n  Parser.leaveBreadcrumb(p, Grammar.ExprFor)\n  Parser.expect(For, p)\n  Parser.beginRegion(p)\n  let forExpr = switch p.token {\n  | Lparen =>\n    let lparen = p.startPos\n    Parser.next(p)\n    switch p.token {\n    | Rparen =>\n      Parser.next(p)\n      let unitPattern = {\n        let loc = mkLoc(lparen, p.prevEndPos)\n        let lid = Location.mkloc(Longident.Lident(\"()\"), loc)\n        Ast_helper.Pat.construct(lid, None)\n      }\n\n      parseForRest(false, parseAliasPattern(~attrs=list{}, unitPattern, p), startPos, p)\n    | _ =>\n      Parser.leaveBreadcrumb(p, Grammar.Pattern)\n      let pat = parsePattern(p)\n      Parser.eatBreadcrumb(p)\n      switch p.token {\n      | Comma =>\n        Parser.next(p)\n        let tuplePattern = parseTuplePattern(~attrs=list{}, ~startPos=lparen, ~first=pat, p)\n\n        let pattern = parseAliasPattern(~attrs=list{}, tuplePattern, p)\n        parseForRest(false, pattern, startPos, p)\n      | _ => parseForRest(true, pat, startPos, p)\n      }\n    }\n  | _ =>\n    Parser.leaveBreadcrumb(p, Grammar.Pattern)\n    let pat = parsePattern(p)\n    Parser.eatBreadcrumb(p)\n    parseForRest(false, pat, startPos, p)\n  }\n\n  Parser.eatBreadcrumb(p)\n  Parser.endRegion(p)\n  forExpr\n}\n\nand parseWhileExpression = p => {\n  let startPos = p.Parser.startPos\n  Parser.expect(While, p)\n  let expr1 = parseExpr(~context=WhenExpr, p)\n  Parser.expect(Lbrace, p)\n  let expr2 = parseExprBlock(p)\n  Parser.expect(Rbrace, p)\n  let loc = mkLoc(startPos, p.prevEndPos)\n  Ast_helper.Exp.while_(~loc, expr1, expr2)\n}\n\nand parsePatternGuard = p =>\n  switch p.Parser.token {\n  | When | If =>\n    Parser.next(p)\n    Some(parseExpr(~context=WhenExpr, p))\n  | _ => None\n  }\n\nand parsePatternMatchCase = p => {\n  Parser.beginRegion(p)\n  Parser.leaveBreadcrumb(p, Grammar.PatternMatchCase)\n  switch p.Parser.token {\n  | Token.Bar =>\n    Parser.next(p)\n    Parser.leaveBreadcrumb(p, Grammar.Pattern)\n    let lhs = parsePattern(p)\n    Parser.eatBreadcrumb(p)\n    let guard = parsePatternGuard(p)\n    let () = switch p.token {\n    | EqualGreater => Parser.next(p)\n    | _ => Recover.recoverEqualGreater(p)\n    }\n\n    let rhs = parseExprBlock(p)\n    Parser.endRegion(p)\n    Parser.eatBreadcrumb(p)\n    Some(Ast_helper.Exp.case(lhs, ~guard?, rhs))\n  | _ =>\n    Parser.endRegion(p)\n    Parser.eatBreadcrumb(p)\n    None\n  }\n}\n\nand parsePatternMatching = p => {\n  let cases = parseDelimitedRegion(\n    ~grammar=Grammar.PatternMatching,\n    ~closing=Rbrace,\n    ~f=parsePatternMatchCase,\n    p,\n  )\n\n  let () = switch cases {\n  | list{} =>\n    Parser.err(\n      ~startPos=p.prevEndPos,\n      p,\n      Diagnostics.message(\"Pattern matching needs at least one case\"),\n    )\n  | _ => ()\n  }\n\n  cases\n}\n\nand parseSwitchExpression = p => {\n  let startPos = p.Parser.startPos\n  Parser.expect(Switch, p)\n  let switchExpr = parseExpr(~context=WhenExpr, p)\n  Parser.expect(Lbrace, p)\n  let cases = parsePatternMatching(p)\n  Parser.expect(Rbrace, p)\n  let loc = mkLoc(startPos, p.prevEndPos)\n  Ast_helper.Exp.match_(~loc, switchExpr, cases)\n}\n\n/*\n * argument ::=\n *   | _                            (* syntax sugar *)\n *   | expr\n *   | expr : type\n *   | ~ label-name\n *   | ~ label-name\n *   | ~ label-name ?\n *   | ~ label-name =   expr\n *   | ~ label-name =   _           (* syntax sugar *)\n *   | ~ label-name =   expr : type\n *   | ~ label-name = ? expr\n *   | ~ label-name = ? _           (* syntax sugar *)\n *   | ~ label-name = ? expr : type\n *\n *  uncurried_argument ::=\n *   | . argument\n */\nand parseArgument = p =>\n  if (\n    p.Parser.token == Token.Tilde ||\n      (p.token == Dot ||\n      (p.token == Underscore || Grammar.isExprStart(p.token)))\n  ) {\n    switch p.Parser.token {\n    | Dot =>\n      let uncurried = true\n      Parser.next(p)\n      switch p.token {\n      /* apply(.) */\n      | Rparen =>\n        let unitExpr = Ast_helper.Exp.construct(Location.mknoloc(Longident.Lident(\"()\")), None)\n\n        Some(uncurried, Asttypes.Nolabel, unitExpr)\n      | _ => parseArgument2(p, ~uncurried)\n      }\n    | _ => parseArgument2(p, ~uncurried=false)\n    }\n  } else {\n    None\n  }\n\nand parseArgument2 = (p, ~uncurried) =>\n  switch p.Parser.token {\n  /* foo(_), do not confuse with foo(_ => x), TODO: performance */\n  | Underscore if !isEs6ArrowExpression(~inTernary=false, p) =>\n    let loc = mkLoc(p.startPos, p.endPos)\n    Parser.next(p)\n    let exp = Ast_helper.Exp.ident(~loc, Location.mkloc(Longident.Lident(\"_\"), loc))\n    Some(uncurried, Asttypes.Nolabel, exp)\n  | Tilde =>\n    Parser.next(p)\n    /* TODO: nesting of pattern matches not intuitive for error recovery */\n    switch p.Parser.token {\n    | Lident(ident) =>\n      let startPos = p.startPos\n      Parser.next(p)\n      let endPos = p.prevEndPos\n      let loc = mkLoc(startPos, endPos)\n      let propLocAttr = (Location.mkloc(\"ns.namedArgLoc\", loc), Parsetree.PStr(list{}))\n      let identExpr = Ast_helper.Exp.ident(\n        ~attrs=list{propLocAttr},\n        ~loc,\n        Location.mkloc(Longident.Lident(ident), loc),\n      )\n      switch p.Parser.token {\n      | Question =>\n        Parser.next(p)\n        Some(uncurried, Asttypes.Optional(ident), identExpr)\n      | Equal =>\n        Parser.next(p)\n        let label = switch p.Parser.token {\n        | Question =>\n          Parser.next(p)\n          Asttypes.Optional(ident)\n        | _ => Labelled(ident)\n        }\n\n        let expr = switch p.Parser.token {\n        | Underscore if !isEs6ArrowExpression(~inTernary=false, p) =>\n          let loc = mkLoc(p.startPos, p.endPos)\n          Parser.next(p)\n          Ast_helper.Exp.ident(~loc, Location.mkloc(Longident.Lident(\"_\"), loc))\n        | _ =>\n          let expr = parseConstrainedOrCoercedExpr(p)\n          {...expr, pexp_attributes: list{propLocAttr, ...expr.pexp_attributes}}\n        }\n\n        Some(uncurried, label, expr)\n      | Colon =>\n        Parser.next(p)\n        let typ = parseTypExpr(p)\n        let loc = mkLoc(startPos, p.prevEndPos)\n        let expr = Ast_helper.Exp.constraint_(~attrs=list{propLocAttr}, ~loc, identExpr, typ)\n        Some(uncurried, Labelled(ident), expr)\n      | _ => Some(uncurried, Labelled(ident), identExpr)\n      }\n    | t =>\n      Parser.err(p, Diagnostics.lident(t))\n      Some(uncurried, Nolabel, Recover.defaultExpr())\n    }\n  | _ => Some(uncurried, Nolabel, parseConstrainedOrCoercedExpr(p))\n  }\n\nand parseCallExpr = (p, funExpr) => {\n  Parser.expect(Lparen, p)\n  let startPos = p.Parser.startPos\n  Parser.leaveBreadcrumb(p, Grammar.ExprCall)\n  let args = parseCommaDelimitedRegion(\n    ~grammar=Grammar.ArgumentList,\n    ~closing=Rparen,\n    ~f=parseArgument,\n    p,\n  )\n\n  Parser.expect(Rparen, p)\n  let args = switch args {\n  | list{} =>\n    let loc = mkLoc(startPos, p.prevEndPos)\n    /* No args -> unit sugar: `foo()` */\n    list{\n      (\n        false,\n        Asttypes.Nolabel,\n        Ast_helper.Exp.construct(~loc, Location.mkloc(Longident.Lident(\"()\"), loc), None),\n      ),\n    }\n  | list{(\n      true,\n      Asttypes.Nolabel,\n      {\n        pexp_desc: Pexp_construct({txt: Longident.Lident(\"()\")}, None),\n        pexp_loc: loc,\n        pexp_attributes: list{},\n      } as expr,\n    )}\n    if !loc.loc_ghost &&\n    p.mode == ParseForTypeChecker => /* Since there is no syntax space for arity zero vs arity one,\n     *  we expand\n     *    `fn(. ())` into\n     *    `fn(. {let __res_unit = (); __res_unit})`\n     *  when the parsetree is intended for type checking\n     *\n     *  Note:\n     *    `fn(.)` is treated as zero arity application.\n     *  The invisible unit expression here has loc_ghost === true\n     *\n     *  Related: https://github.com/rescript-lang/syntax/issues/138\n     */\n    list{\n      (\n        true,\n        Asttypes.Nolabel,\n        Ast_helper.Exp.let_(\n          Asttypes.Nonrecursive,\n          list{Ast_helper.Vb.mk(Ast_helper.Pat.var(Location.mknoloc(\"__res_unit\")), expr)},\n          Ast_helper.Exp.ident(Location.mknoloc(Longident.Lident(\"__res_unit\"))),\n        ),\n      ),\n    }\n  | args => args\n  }\n\n  let loc = {...funExpr.pexp_loc, loc_end: p.prevEndPos}\n  let args = switch args {\n  | list{(u, lbl, expr), ...args} =>\n    let group = ((grp, acc), (uncurried, lbl, expr)) => {\n      let (_u, grp) = grp\n      if uncurried === true {\n        ((true, list{(lbl, expr)}), list{(_u, List.rev(grp)), ...acc})\n      } else {\n        ((_u, list{(lbl, expr), ...grp}), acc)\n      }\n    }\n\n    let ((_u, grp), acc) = List.fold_left(group, ((u, list{(lbl, expr)}), list{}), args)\n    List.rev(list{(_u, List.rev(grp)), ...acc})\n  | list{} => list{}\n  }\n\n  let apply = List.fold_left((callBody, group) => {\n    let (uncurried, args) = group\n    let (args, wrap) = processUnderscoreApplication(args)\n    let exp = if uncurried {\n      let attrs = list{uncurryAttr}\n      Ast_helper.Exp.apply(~loc, ~attrs, callBody, args)\n    } else {\n      Ast_helper.Exp.apply(~loc, callBody, args)\n    }\n\n    wrap(exp)\n  }, funExpr, args)\n\n  Parser.eatBreadcrumb(p)\n  apply\n}\n\nand parseValueOrConstructor = p => {\n  let startPos = p.Parser.startPos\n  let rec aux = (p, acc) =>\n    switch p.Parser.token {\n    | Uident(ident) =>\n      let endPosLident = p.endPos\n      Parser.next(p)\n      switch p.Parser.token {\n      | Dot =>\n        Parser.next(p)\n        aux(p, list{ident, ...acc})\n      | Lparen if p.prevEndPos.pos_lnum === p.startPos.pos_lnum =>\n        let lparen = p.startPos\n        let args = parseConstructorArgs(p)\n        let rparen = p.prevEndPos\n        let lident = buildLongident(list{ident, ...acc})\n        let tail = switch args {\n        | list{} => None\n        | list{{Parsetree.pexp_desc: Pexp_tuple(_)} as arg} as args =>\n          let loc = mkLoc(lparen, rparen)\n          if p.mode == ParseForTypeChecker {\n            /* Some(1, 2) for type-checker */\n            Some(arg)\n          } else {\n            /* Some((1, 2)) for printer */\n            Some(Ast_helper.Exp.tuple(~loc, args))\n          }\n        | list{arg} => Some(arg)\n        | args =>\n          let loc = mkLoc(lparen, rparen)\n          Some(Ast_helper.Exp.tuple(~loc, args))\n        }\n\n        let loc = mkLoc(startPos, p.prevEndPos)\n        let identLoc = mkLoc(startPos, endPosLident)\n        Ast_helper.Exp.construct(~loc, Location.mkloc(lident, identLoc), tail)\n      | _ =>\n        let loc = mkLoc(startPos, p.prevEndPos)\n        let lident = buildLongident(list{ident, ...acc})\n        Ast_helper.Exp.construct(~loc, Location.mkloc(lident, loc), None)\n      }\n    | Lident(ident) =>\n      Parser.next(p)\n      let loc = mkLoc(startPos, p.prevEndPos)\n      let lident = buildLongident(list{ident, ...acc})\n      Ast_helper.Exp.ident(~loc, Location.mkloc(lident, loc))\n    | token =>\n      if acc == list{} {\n        Parser.next(p)\n        Parser.err(p, Diagnostics.unexpected(token, p.breadcrumbs))\n        Recover.defaultExpr()\n      } else {\n        let loc = mkLoc(startPos, p.prevEndPos)\n        Parser.err(p, Diagnostics.unexpected(token, p.breadcrumbs))\n        let lident = buildLongident(list{\"_\", ...acc})\n        Ast_helper.Exp.ident(~loc, Location.mkloc(lident, loc))\n      }\n    }\n\n  aux(p, list{})\n}\n\nand parsePolyVariantExpr = p => {\n  let startPos = p.startPos\n  let (ident, _loc) = parseHashIdent(~startPos, p)\n  switch p.Parser.token {\n  | Lparen if p.prevEndPos.pos_lnum === p.startPos.pos_lnum =>\n    let lparen = p.startPos\n    let args = parseConstructorArgs(p)\n    let rparen = p.prevEndPos\n    let loc_paren = mkLoc(lparen, rparen)\n    let tail = switch args {\n    | list{} => None\n    | list{{Parsetree.pexp_desc: Pexp_tuple(_)} as expr} as args =>\n      if p.mode == ParseForTypeChecker {\n        /* #a(1, 2) for type-checker */\n        Some(expr)\n      } else {\n        /* #a((1, 2)) for type-checker */\n        Some(Ast_helper.Exp.tuple(~loc=loc_paren, args))\n      }\n    | list{arg} => Some(arg)\n    | args =>\n      /* #a((1, 2)) for printer */\n      Some(Ast_helper.Exp.tuple(~loc=loc_paren, args))\n    }\n\n    let loc = mkLoc(startPos, p.prevEndPos)\n    Ast_helper.Exp.variant(~loc, ident, tail)\n  | _ =>\n    let loc = mkLoc(startPos, p.prevEndPos)\n    Ast_helper.Exp.variant(~loc, ident, None)\n  }\n}\n\nand parseConstructorArgs = p => {\n  let lparen = p.Parser.startPos\n  Parser.expect(Lparen, p)\n  let args = parseCommaDelimitedRegion(\n    ~grammar=Grammar.ExprList,\n    ~f=parseConstrainedExprRegion,\n    ~closing=Rparen,\n    p,\n  )\n\n  Parser.expect(Rparen, p)\n  switch args {\n  | list{} =>\n    let loc = mkLoc(lparen, p.prevEndPos)\n    list{Ast_helper.Exp.construct(~loc, Location.mkloc(Longident.Lident(\"()\"), loc), None)}\n  | args => args\n  }\n}\n\nand parseTupleExpr = (~first, ~startPos, p) => {\n  let exprs = list{\n    first,\n    ...parseCommaDelimitedRegion(\n      p,\n      ~grammar=Grammar.ExprList,\n      ~closing=Rparen,\n      ~f=parseConstrainedExprRegion,\n    ),\n  }\n\n  Parser.expect(Rparen, p)\n  let () = switch exprs {\n  | list{_} =>\n    Parser.err(\n      ~startPos,\n      ~endPos=p.prevEndPos,\n      p,\n      Diagnostics.message(ErrorMessages.tupleSingleElement),\n    )\n  | _ => ()\n  }\n\n  let loc = mkLoc(startPos, p.prevEndPos)\n  Ast_helper.Exp.tuple(~loc, exprs)\n}\n\nand parseSpreadExprRegion = p =>\n  switch p.Parser.token {\n  | DotDotDot =>\n    Parser.next(p)\n    let expr = parseConstrainedOrCoercedExpr(p)\n    Some(true, expr)\n  | token if Grammar.isExprStart(token) => Some(false, parseConstrainedOrCoercedExpr(p))\n  | _ => None\n  }\n\nand parseListExpr = (~startPos, p) => {\n  let listExprs = parseCommaDelimitedReversedList(\n    p,\n    ~grammar=Grammar.ListExpr,\n    ~closing=Rbrace,\n    ~f=parseSpreadExprRegion,\n  )\n\n  Parser.expect(Rbrace, p)\n  let loc = mkLoc(startPos, p.prevEndPos)\n  switch listExprs {\n  | list{(true, expr), ...exprs} =>\n    let exprs = exprs |> List.map(snd) |> List.rev\n    makeListExpression(loc, exprs, Some(expr))\n  | exprs =>\n    let exprs =\n      exprs\n      |> List.map(((spread, expr)) => {\n        if spread {\n          Parser.err(p, Diagnostics.message(ErrorMessages.listExprSpread))\n        }\n        expr\n      })\n      |> List.rev\n\n    makeListExpression(loc, exprs, None)\n  }\n}\n\n/* Overparse ... and give a nice error message */\nand parseNonSpreadExp = (~msg, p) => {\n  let () = switch p.Parser.token {\n  | DotDotDot =>\n    Parser.err(p, Diagnostics.message(msg))\n    Parser.next(p)\n  | _ => ()\n  }\n\n  switch p.Parser.token {\n  | token if Grammar.isExprStart(token) =>\n    let expr = parseExpr(p)\n    switch p.Parser.token {\n    | Colon =>\n      Parser.next(p)\n      let typ = parseTypExpr(p)\n      let loc = mkLoc(expr.pexp_loc.loc_start, typ.ptyp_loc.loc_end)\n      Some(Ast_helper.Exp.constraint_(~loc, expr, typ))\n    | _ => Some(expr)\n    }\n  | _ => None\n  }\n}\n\nand parseArrayExp = p => {\n  let startPos = p.Parser.startPos\n  Parser.expect(Lbracket, p)\n  let exprs = parseCommaDelimitedRegion(\n    p,\n    ~grammar=Grammar.ExprList,\n    ~closing=Rbracket,\n    ~f=parseNonSpreadExp(~msg=ErrorMessages.arrayExprSpread),\n  )\n\n  Parser.expect(Rbracket, p)\n  Ast_helper.Exp.array(~loc=mkLoc(startPos, p.prevEndPos), exprs)\n}\n\n/* TODO: check attributes in the case of poly type vars,\n * might be context dependend: parseFieldDeclaration (see ocaml) */\nand parsePolyTypeExpr = p => {\n  let startPos = p.Parser.startPos\n  switch p.Parser.token {\n  | SingleQuote =>\n    let vars = parseTypeVarList(p)\n    switch vars {\n    | list{_v1, _v2, ..._} =>\n      Parser.expect(Dot, p)\n      let typ = parseTypExpr(p)\n      let loc = mkLoc(startPos, p.prevEndPos)\n      Ast_helper.Typ.poly(~loc, vars, typ)\n    | list{var} =>\n      switch p.Parser.token {\n      | Dot =>\n        Parser.next(p)\n        let typ = parseTypExpr(p)\n        let loc = mkLoc(startPos, p.prevEndPos)\n        Ast_helper.Typ.poly(~loc, vars, typ)\n      | EqualGreater =>\n        Parser.next(p)\n        let typ = Ast_helper.Typ.var(~loc=var.loc, var.txt)\n        let returnType = parseTypExpr(~alias=false, p)\n        let loc = mkLoc(typ.Parsetree.ptyp_loc.loc_start, p.prevEndPos)\n        Ast_helper.Typ.arrow(~loc, Asttypes.Nolabel, typ, returnType)\n      | _ => Ast_helper.Typ.var(~loc=var.loc, var.txt)\n      }\n    | _ => assert false\n    }\n  | _ => parseTypExpr(p)\n  }\n}\n\n/* 'a 'b 'c */\nand parseTypeVarList = p => {\n  let rec loop = (p, vars) =>\n    switch p.Parser.token {\n    | SingleQuote =>\n      Parser.next(p)\n      let (lident, loc) = parseLident(p)\n      let var = Location.mkloc(lident, loc)\n      loop(p, list{var, ...vars})\n    | _ => List.rev(vars)\n    }\n\n  loop(p, list{})\n}\n\nand parseLidentList = p => {\n  let rec loop = (p, ls) =>\n    switch p.Parser.token {\n    | Lident(lident) =>\n      let loc = mkLoc(p.startPos, p.endPos)\n      Parser.next(p)\n      loop(p, list{Location.mkloc(lident, loc), ...ls})\n    | _ => List.rev(ls)\n    }\n\n  loop(p, list{})\n}\n\nand parseAtomicTypExpr = (~attrs, p) => {\n  Parser.leaveBreadcrumb(p, Grammar.AtomicTypExpr)\n  let startPos = p.Parser.startPos\n  let typ = switch p.Parser.token {\n  | SingleQuote =>\n    Parser.next(p)\n    let (ident, loc) = parseIdent(~msg=ErrorMessages.typeVar, ~startPos=p.startPos, p)\n    Ast_helper.Typ.var(~loc, ~attrs, ident)\n  | Underscore =>\n    let endPos = p.endPos\n    Parser.next(p)\n    Ast_helper.Typ.any(~loc=mkLoc(startPos, endPos), ~attrs, ())\n  | Lparen =>\n    Parser.next(p)\n    switch p.Parser.token {\n    | Rparen =>\n      Parser.next(p)\n      let loc = mkLoc(startPos, p.prevEndPos)\n      let unitConstr = Location.mkloc(Longident.Lident(\"unit\"), loc)\n      Ast_helper.Typ.constr(~attrs, unitConstr, list{})\n    | _ =>\n      let t = parseTypExpr(p)\n      switch p.token {\n      | Comma =>\n        Parser.next(p)\n        parseTupleType(~attrs, ~first=t, ~startPos, p)\n      | _ =>\n        Parser.expect(Rparen, p)\n        {\n          ...t,\n          ptyp_loc: mkLoc(startPos, p.prevEndPos),\n          ptyp_attributes: List.concat(list{attrs, t.ptyp_attributes}),\n        }\n      }\n    }\n  | Lbracket => parsePolymorphicVariantType(~attrs, p)\n  | Uident(_) | Lident(_) =>\n    let constr = parseValuePath(p)\n    let args = parseTypeConstructorArgs(~constrName=constr, p)\n    Ast_helper.Typ.constr(~loc=mkLoc(startPos, p.prevEndPos), ~attrs, constr, args)\n  | Module =>\n    Parser.next(p)\n    Parser.expect(Lparen, p)\n    let packageType = parsePackageType(~startPos, ~attrs, p)\n    Parser.expect(Rparen, p)\n    {...packageType, ptyp_loc: mkLoc(startPos, p.prevEndPos)}\n  | Percent =>\n    let extension = parseExtension(p)\n    let loc = mkLoc(startPos, p.prevEndPos)\n    Ast_helper.Typ.extension(~attrs, ~loc, extension)\n  | Lbrace => parseRecordOrObjectType(~attrs, p)\n  | token =>\n    Parser.err(p, Diagnostics.unexpected(token, p.breadcrumbs))\n    switch skipTokensAndMaybeRetry(p, ~isStartOfGrammar=Grammar.isAtomicTypExprStart) {\n    | Some() => parseAtomicTypExpr(~attrs, p)\n    | None =>\n      Parser.err(~startPos=p.prevEndPos, p, Diagnostics.unexpected(token, p.breadcrumbs))\n      Recover.defaultType()\n    }\n  }\n\n  Parser.eatBreadcrumb(p)\n  typ\n}\n\n/* package-type\t::=\n    | modtype-path\n    ∣ modtype-path with package-constraint  { and package-constraint }\n */\nand parsePackageType = (~startPos, ~attrs, p) => {\n  let modTypePath = parseModuleLongIdent(~lowercase=true, p)\n  switch p.Parser.token {\n  | Lident(\"with\") =>\n    Parser.next(p)\n    let constraints = parsePackageConstraints(p)\n    let loc = mkLoc(startPos, p.prevEndPos)\n    Ast_helper.Typ.package(~loc, ~attrs, modTypePath, constraints)\n  | _ =>\n    let loc = mkLoc(startPos, p.prevEndPos)\n    Ast_helper.Typ.package(~loc, ~attrs, modTypePath, list{})\n  }\n}\n\n/* package-constraint  { and package-constraint } */\nand parsePackageConstraints = p => {\n  let first = {\n    Parser.expect(Typ, p)\n    let typeConstr = parseValuePath(p)\n    Parser.expect(Equal, p)\n    let typ = parseTypExpr(p)\n    (typeConstr, typ)\n  }\n\n  let rest = parseRegion(~grammar=Grammar.PackageConstraint, ~f=parsePackageConstraint, p)\n\n  list{first, ...rest}\n}\n\n/* and type typeconstr = typexpr */\nand parsePackageConstraint = p =>\n  switch p.Parser.token {\n  | And =>\n    Parser.next(p)\n    Parser.expect(Typ, p)\n    let typeConstr = parseValuePath(p)\n    Parser.expect(Equal, p)\n    let typ = parseTypExpr(p)\n    Some(typeConstr, typ)\n  | _ => None\n  }\n\nand parseRecordOrObjectType = (~attrs, p) => {\n  /* for inline record in constructor */\n  let startPos = p.Parser.startPos\n  Parser.expect(Lbrace, p)\n  let closedFlag = switch p.token {\n  | DotDot =>\n    Parser.next(p)\n    Asttypes.Open\n  | Dot =>\n    Parser.next(p)\n    Asttypes.Closed\n  | _ => Asttypes.Closed\n  }\n\n  let () = switch p.token {\n  | Lident(_) => Parser.err(p, Diagnostics.message(ErrorMessages.forbiddenInlineRecordDeclaration))\n  | _ => ()\n  }\n\n  let startFirstField = p.startPos\n  let fields = parseCommaDelimitedRegion(\n    ~grammar=Grammar.StringFieldDeclarations,\n    ~closing=Rbrace,\n    ~f=parseStringFieldDeclaration,\n    p,\n  )\n\n  let () = switch fields {\n  | list{Parsetree.Oinherit({ptyp_loc})} =>\n    /* {...x}, spread without extra fields */\n    Parser.err(\n      p,\n      ~startPos=startFirstField,\n      ~endPos=ptyp_loc.loc_end,\n      Diagnostics.message(ErrorMessages.sameTypeSpread),\n    )\n  | _ => ()\n  }\n\n  Parser.expect(Rbrace, p)\n  let loc = mkLoc(startPos, p.prevEndPos)\n  Ast_helper.Typ.object_(~loc, ~attrs, fields, closedFlag)\n}\n\n/* TODO: check associativity in combination with attributes */\nand parseTypeAlias = (p, typ) =>\n  switch p.Parser.token {\n  | As =>\n    Parser.next(p)\n    Parser.expect(SingleQuote, p)\n    let (ident, _loc) = parseLident(p)\n    /* TODO: how do we parse attributes here? */\n    Ast_helper.Typ.alias(~loc=mkLoc(typ.Parsetree.ptyp_loc.loc_start, p.prevEndPos), typ, ident)\n  | _ => typ\n  }\n\n/* type_parameter ::=\n *  | type_expr\n *  | ~ident: type_expr\n *  | ~ident: type_expr=?\n *\n * note:\n *  | attrs ~ident: type_expr    -> attrs are on the arrow\n *  | attrs type_expr            -> attrs are here part of the type_expr\n *\n * uncurried_type_parameter ::=\n *  | . type_parameter\n */\nand parseTypeParameter = p =>\n  if p.Parser.token == Token.Tilde || (p.token == Dot || Grammar.isTypExprStart(p.token)) {\n    let startPos = p.Parser.startPos\n    let uncurried = Parser.optional(p, Dot)\n    let attrs = parseAttributes(p)\n    switch p.Parser.token {\n    | Tilde =>\n      Parser.next(p)\n      let (name, loc) = parseLident(p)\n      let lblLocAttr = (Location.mkloc(\"ns.namedArgLoc\", loc), Parsetree.PStr(list{}))\n      Parser.expect(~grammar=Grammar.TypeExpression, Colon, p)\n      let typ = {\n        let typ = parseTypExpr(p)\n        {...typ, ptyp_attributes: list{lblLocAttr, ...typ.ptyp_attributes}}\n      }\n\n      switch p.Parser.token {\n      | Equal =>\n        Parser.next(p)\n        Parser.expect(Question, p)\n        Some(uncurried, attrs, Asttypes.Optional(name), typ, startPos)\n      | _ => Some(uncurried, attrs, Asttypes.Labelled(name), typ, startPos)\n      }\n    | Lident(_) =>\n      let (name, loc) = parseLident(p)\n      switch p.token {\n      | Colon =>\n        let () = {\n          let error = Diagnostics.message(ErrorMessages.missingTildeLabeledParameter(name))\n          Parser.err(~startPos=loc.loc_start, ~endPos=loc.loc_end, p, error)\n        }\n\n        Parser.next(p)\n        let typ = parseTypExpr(p)\n        switch p.Parser.token {\n        | Equal =>\n          Parser.next(p)\n          Parser.expect(Question, p)\n          Some(uncurried, attrs, Asttypes.Optional(name), typ, startPos)\n        | _ => Some(uncurried, attrs, Asttypes.Labelled(name), typ, startPos)\n        }\n      | _ =>\n        let constr = Location.mkloc(Longident.Lident(name), loc)\n        let args = parseTypeConstructorArgs(~constrName=constr, p)\n        let typ = Ast_helper.Typ.constr(~loc=mkLoc(startPos, p.prevEndPos), ~attrs, constr, args)\n\n        let typ = parseArrowTypeRest(~es6Arrow=true, ~startPos, typ, p)\n        let typ = parseTypeAlias(p, typ)\n        Some(uncurried, list{}, Asttypes.Nolabel, typ, startPos)\n      }\n    | _ =>\n      let typ = parseTypExpr(p)\n      let typWithAttributes = {\n        ...typ,\n        ptyp_attributes: List.concat(list{attrs, typ.ptyp_attributes}),\n      }\n      Some(uncurried, list{}, Asttypes.Nolabel, typWithAttributes, startPos)\n    }\n  } else {\n    None\n  }\n\n/* (int, ~x:string, float) */\nand parseTypeParameters = p => {\n  let startPos = p.Parser.startPos\n  Parser.expect(Lparen, p)\n  switch p.Parser.token {\n  | Rparen =>\n    Parser.next(p)\n    let loc = mkLoc(startPos, p.prevEndPos)\n    let unitConstr = Location.mkloc(Longident.Lident(\"unit\"), loc)\n    let typ = Ast_helper.Typ.constr(unitConstr, list{})\n    list{(false, list{}, Asttypes.Nolabel, typ, startPos)}\n  | _ =>\n    let params = parseCommaDelimitedRegion(\n      ~grammar=Grammar.TypeParameters,\n      ~closing=Rparen,\n      ~f=parseTypeParameter,\n      p,\n    )\n\n    Parser.expect(Rparen, p)\n    params\n  }\n}\n\nand parseEs6ArrowType = (~attrs, p) => {\n  let startPos = p.Parser.startPos\n  switch p.Parser.token {\n  | Tilde =>\n    Parser.next(p)\n    let (name, loc) = parseLident(p)\n    let lblLocAttr = (Location.mkloc(\"ns.namedArgLoc\", loc), Parsetree.PStr(list{}))\n    Parser.expect(~grammar=Grammar.TypeExpression, Colon, p)\n    let typ = {\n      let typ = parseTypExpr(~alias=false, ~es6Arrow=false, p)\n      {...typ, ptyp_attributes: list{lblLocAttr, ...typ.ptyp_attributes}}\n    }\n\n    let arg = switch p.Parser.token {\n    | Equal =>\n      Parser.next(p)\n      Parser.expect(Question, p)\n      Asttypes.Optional(name)\n    | _ => Asttypes.Labelled(name)\n    }\n\n    Parser.expect(EqualGreater, p)\n    let returnType = parseTypExpr(~alias=false, p)\n    let loc = mkLoc(startPos, p.prevEndPos)\n    Ast_helper.Typ.arrow(~loc, ~attrs, arg, typ, returnType)\n  | _ =>\n    let parameters = parseTypeParameters(p)\n    Parser.expect(EqualGreater, p)\n    let returnType = parseTypExpr(~alias=false, p)\n    let endPos = p.prevEndPos\n    let typ = List.fold_right(((uncurried, attrs, argLbl, typ, startPos), t) => {\n      let attrs = if uncurried {\n        list{uncurryAttr, ...attrs}\n      } else {\n        attrs\n      }\n      Ast_helper.Typ.arrow(~loc=mkLoc(startPos, endPos), ~attrs, argLbl, typ, t)\n    }, parameters, returnType)\n\n    {\n      ...typ,\n      ptyp_attributes: List.concat(list{typ.ptyp_attributes, attrs}),\n      ptyp_loc: mkLoc(startPos, p.prevEndPos),\n    }\n  }\n}\n\n/*\n * typexpr ::=\n *  | 'ident\n *  | _\n *  | (typexpr)\n *  | typexpr => typexpr            --> es6 arrow\n *  | (typexpr, typexpr) => typexpr --> es6 arrow\n *  | /typexpr, typexpr, typexpr/  --> tuple\n *  | typeconstr\n *  | typeconstr<typexpr>\n *  | typeconstr<typexpr, typexpr,>\n *  | typexpr as 'ident\n *  | %attr-id                      --> extension\n *  | %attr-id(payload)             --> extension\n *\n * typeconstr ::=\n *  | lident\n *  | uident.lident\n *  | uident.uident.lident     --> long module path\n */\nand parseTypExpr = (~attrs=?, ~es6Arrow=true, ~alias=true, p) => {\n  /* Parser.leaveBreadcrumb p Grammar.TypeExpression; */\n  let startPos = p.Parser.startPos\n  let attrs = switch attrs {\n  | Some(attrs) => attrs\n  | None => parseAttributes(p)\n  }\n  let typ = if es6Arrow && isEs6ArrowType(p) {\n    parseEs6ArrowType(~attrs, p)\n  } else {\n    let typ = parseAtomicTypExpr(~attrs, p)\n    parseArrowTypeRest(~es6Arrow, ~startPos, typ, p)\n  }\n\n  let typ = if alias {\n    parseTypeAlias(p, typ)\n  } else {\n    typ\n  }\n\n  /* Parser.eatBreadcrumb p; */\n  typ\n}\n\nand parseArrowTypeRest = (~es6Arrow, ~startPos, typ, p) =>\n  switch p.Parser.token {\n  | (EqualGreater | MinusGreater) as token if es6Arrow === true =>\n    /* error recovery */\n    if token == MinusGreater {\n      Parser.expect(EqualGreater, p)\n    }\n    Parser.next(p)\n    let returnType = parseTypExpr(~alias=false, p)\n    let loc = mkLoc(startPos, p.prevEndPos)\n    Ast_helper.Typ.arrow(~loc, Asttypes.Nolabel, typ, returnType)\n  | _ => typ\n  }\n\nand parseTypExprRegion = p =>\n  if Grammar.isTypExprStart(p.Parser.token) {\n    Some(parseTypExpr(p))\n  } else {\n    None\n  }\n\nand parseTupleType = (~attrs, ~first, ~startPos, p) => {\n  let typexprs = list{\n    first,\n    ...parseCommaDelimitedRegion(\n      ~grammar=Grammar.TypExprList,\n      ~closing=Rparen,\n      ~f=parseTypExprRegion,\n      p,\n    ),\n  }\n\n  Parser.expect(Rparen, p)\n  let () = switch typexprs {\n  | list{_} =>\n    Parser.err(\n      ~startPos,\n      ~endPos=p.prevEndPos,\n      p,\n      Diagnostics.message(ErrorMessages.tupleSingleElement),\n    )\n  | _ => ()\n  }\n\n  let tupleLoc = mkLoc(startPos, p.prevEndPos)\n  Ast_helper.Typ.tuple(~attrs, ~loc=tupleLoc, typexprs)\n}\n\nand parseTypeConstructorArgRegion = p =>\n  if Grammar.isTypExprStart(p.Parser.token) {\n    Some(parseTypExpr(p))\n  } else if p.token == LessThan {\n    Parser.next(p)\n    parseTypeConstructorArgRegion(p)\n  } else {\n    None\n  }\n\n/* Js.Nullable.value<'a> */\nand parseTypeConstructorArgs = (~constrName, p) => {\n  let opening = p.Parser.token\n  let openingStartPos = p.startPos\n  switch opening {\n  | LessThan | Lparen =>\n    Scanner.setDiamondMode(p.scanner)\n    Parser.next(p)\n    let typeArgs = /* TODO: change Grammar.TypExprList to TypArgList!!! Why did I wrote this? */\n    parseCommaDelimitedRegion(\n      ~grammar=Grammar.TypExprList,\n      ~closing=GreaterThan,\n      ~f=parseTypeConstructorArgRegion,\n      p,\n    )\n\n    let () = switch p.token {\n    | Rparen if opening == Token.Lparen =>\n      let typ = Ast_helper.Typ.constr(constrName, typeArgs)\n      let msg =\n        Doc.breakableGroup(\n          ~forceBreak=true,\n          Doc.concat(list{\n            Doc.text(\"Type parameters require angle brackets:\"),\n            Doc.indent(\n              Doc.concat(list{Doc.line, ResPrinter.printTypExpr(typ, CommentTable.empty)}),\n            ),\n          }),\n        ) |> Doc.toString(~width=80)\n\n      Parser.err(~startPos=openingStartPos, p, Diagnostics.message(msg))\n      Parser.next(p)\n    | _ => Parser.expect(GreaterThan, p)\n    }\n\n    Scanner.popMode(p.scanner, Diamond)\n    typeArgs\n  | _ => list{}\n  }\n}\n\n/* string-field-decl ::=\n *  | string: poly-typexpr\n *  | attributes string-field-decl */\nand parseStringFieldDeclaration = p => {\n  let attrs = parseAttributes(p)\n  switch p.Parser.token {\n  | String(name) =>\n    let nameStartPos = p.startPos\n    let nameEndPos = p.endPos\n    Parser.next(p)\n    let fieldName = Location.mkloc(name, mkLoc(nameStartPos, nameEndPos))\n    Parser.expect(~grammar=Grammar.TypeExpression, Colon, p)\n    let typ = parsePolyTypeExpr(p)\n    Some(Parsetree.Otag(fieldName, attrs, typ))\n  | DotDotDot =>\n    Parser.next(p)\n    let typ = parseTypExpr(p)\n    Some(Parsetree.Oinherit(typ))\n  | Lident(name) =>\n    let nameLoc = mkLoc(p.startPos, p.endPos)\n    Parser.err(p, Diagnostics.message(ErrorMessages.objectQuotedFieldName(name)))\n    Parser.next(p)\n    let fieldName = Location.mkloc(name, nameLoc)\n    Parser.expect(~grammar=Grammar.TypeExpression, Colon, p)\n    let typ = parsePolyTypeExpr(p)\n    Some(Parsetree.Otag(fieldName, attrs, typ))\n  | _token => None\n  }\n}\n\n/* field-decl\t::=\n *  | [mutable] field-name : poly-typexpr\n *  | attributes field-decl */\nand parseFieldDeclaration = p => {\n  let startPos = p.Parser.startPos\n  let attrs = parseAttributes(p)\n  let mut = if Parser.optional(p, Token.Mutable) {\n    Asttypes.Mutable\n  } else {\n    Asttypes.Immutable\n  }\n\n  let (lident, loc) = switch p.token {\n  | _ => parseLident(p)\n  }\n\n  let name = Location.mkloc(lident, loc)\n  let typ = switch p.Parser.token {\n  | Colon =>\n    Parser.next(p)\n    parsePolyTypeExpr(p)\n  | _ => Ast_helper.Typ.constr(~loc=name.loc, {...name, txt: Lident(name.txt)}, list{})\n  }\n\n  let loc = mkLoc(startPos, typ.ptyp_loc.loc_end)\n  Ast_helper.Type.field(~attrs, ~loc, ~mut, name, typ)\n}\n\nand parseFieldDeclarationRegion = p => {\n  let startPos = p.Parser.startPos\n  let attrs = parseAttributes(p)\n  let mut = if Parser.optional(p, Token.Mutable) {\n    Asttypes.Mutable\n  } else {\n    Asttypes.Immutable\n  }\n\n  switch p.token {\n  | Lident(_) =>\n    let (lident, loc) = parseLident(p)\n    let name = Location.mkloc(lident, loc)\n    let typ = switch p.Parser.token {\n    | Colon =>\n      Parser.next(p)\n      parsePolyTypeExpr(p)\n    | _ => Ast_helper.Typ.constr(~loc=name.loc, {...name, txt: Lident(name.txt)}, list{})\n    }\n\n    let loc = mkLoc(startPos, typ.ptyp_loc.loc_end)\n    Some(Ast_helper.Type.field(~attrs, ~loc, ~mut, name, typ))\n  | _ => None\n  }\n}\n\n/* record-decl ::=\n *  | { field-decl }\n *  | { field-decl, field-decl }\n *  | { field-decl, field-decl, field-decl, }\n */\nand parseRecordDeclaration = p => {\n  Parser.leaveBreadcrumb(p, Grammar.RecordDecl)\n  Parser.expect(Lbrace, p)\n  let rows = parseCommaDelimitedRegion(\n    ~grammar=Grammar.RecordDecl,\n    ~closing=Rbrace,\n    ~f=parseFieldDeclarationRegion,\n    p,\n  )\n\n  Parser.expect(Rbrace, p)\n  Parser.eatBreadcrumb(p)\n  rows\n}\n\n/* constr-args ::=\n *  | (typexpr)\n *  | (typexpr, typexpr)\n *  | (typexpr, typexpr, typexpr,)\n *  | (record-decl)\n *\n * TODO: should we overparse inline-records in every position?\n * Give a good error message afterwards?\n */\nand parseConstrDeclArgs = p => {\n  let constrArgs = switch p.Parser.token {\n  | Lparen =>\n    Parser.next(p)\n    /* TODO: this could use some cleanup/stratification */\n    switch p.Parser.token {\n    | Lbrace =>\n      let lbrace = p.startPos\n      Parser.next(p)\n      let startPos = p.Parser.startPos\n      switch p.Parser.token {\n      | DotDot | Dot =>\n        let closedFlag = switch p.token {\n        | DotDot =>\n          Parser.next(p)\n          Asttypes.Open\n        | Dot =>\n          Parser.next(p)\n          Asttypes.Closed\n        | _ => Asttypes.Closed\n        }\n\n        let fields = parseCommaDelimitedRegion(\n          ~grammar=Grammar.StringFieldDeclarations,\n          ~closing=Rbrace,\n          ~f=parseStringFieldDeclaration,\n          p,\n        )\n\n        Parser.expect(Rbrace, p)\n        let loc = mkLoc(startPos, p.prevEndPos)\n        let typ = Ast_helper.Typ.object_(~loc, ~attrs=list{}, fields, closedFlag)\n        Parser.optional(p, Comma) |> ignore\n        let moreArgs = parseCommaDelimitedRegion(\n          ~grammar=Grammar.TypExprList,\n          ~closing=Rparen,\n          ~f=parseTypExprRegion,\n          p,\n        )\n\n        Parser.expect(Rparen, p)\n        Parsetree.Pcstr_tuple(list{typ, ...moreArgs})\n      | DotDotDot =>\n        let dotdotdotStart = p.startPos\n        let dotdotdotEnd = p.endPos\n        /* start of object type spreading, e.g. `User({...a, \"u\": int})` */\n        Parser.next(p)\n        let typ = parseTypExpr(p)\n        let () = switch p.token {\n        | Rbrace =>\n          /* {...x}, spread without extra fields */\n          Parser.err(\n            ~startPos=dotdotdotStart,\n            ~endPos=dotdotdotEnd,\n            p,\n            Diagnostics.message(ErrorMessages.sameTypeSpread),\n          )\n          Parser.next(p)\n        | _ => Parser.expect(Comma, p)\n        }\n\n        let () = switch p.token {\n        | Lident(_) =>\n          Parser.err(\n            ~startPos=dotdotdotStart,\n            ~endPos=dotdotdotEnd,\n            p,\n            Diagnostics.message(ErrorMessages.spreadInRecordDeclaration),\n          )\n        | _ => ()\n        }\n\n        let fields = list{\n          Parsetree.Oinherit(typ),\n          ...parseCommaDelimitedRegion(\n            ~grammar=Grammar.StringFieldDeclarations,\n            ~closing=Rbrace,\n            ~f=parseStringFieldDeclaration,\n            p,\n          ),\n        }\n\n        Parser.expect(Rbrace, p)\n        let loc = mkLoc(startPos, p.prevEndPos)\n        let typ = Ast_helper.Typ.object_(~loc, fields, Asttypes.Closed) |> parseTypeAlias(p)\n\n        let typ = parseArrowTypeRest(~es6Arrow=true, ~startPos, typ, p)\n        Parser.optional(p, Comma) |> ignore\n        let moreArgs = parseCommaDelimitedRegion(\n          ~grammar=Grammar.TypExprList,\n          ~closing=Rparen,\n          ~f=parseTypExprRegion,\n          p,\n        )\n\n        Parser.expect(Rparen, p)\n        Parsetree.Pcstr_tuple(list{typ, ...moreArgs})\n      | _ =>\n        let attrs = parseAttributes(p)\n        switch p.Parser.token {\n        | String(_) =>\n          let closedFlag = Asttypes.Closed\n          let fields = switch attrs {\n          | list{} =>\n            parseCommaDelimitedRegion(\n              ~grammar=Grammar.StringFieldDeclarations,\n              ~closing=Rbrace,\n              ~f=parseStringFieldDeclaration,\n              p,\n            )\n          | attrs =>\n            let first = {\n              Parser.leaveBreadcrumb(p, Grammar.StringFieldDeclarations)\n              let field = switch parseStringFieldDeclaration(p) {\n              | Some(field) => field\n              | None => assert false\n              }\n\n              /* parse comma after first */\n              let () = switch p.Parser.token {\n              | Rbrace | Eof => ()\n              | Comma => Parser.next(p)\n              | _ => Parser.expect(Comma, p)\n              }\n\n              Parser.eatBreadcrumb(p)\n              switch field {\n              | Parsetree.Otag(label, _, ct) => Parsetree.Otag(label, attrs, ct)\n              | Oinherit(ct) => Oinherit(ct)\n              }\n            }\n\n            list{\n              first,\n              ...parseCommaDelimitedRegion(\n                ~grammar=Grammar.StringFieldDeclarations,\n                ~closing=Rbrace,\n                ~f=parseStringFieldDeclaration,\n                p,\n              ),\n            }\n          }\n          Parser.expect(Rbrace, p)\n          let loc = mkLoc(startPos, p.prevEndPos)\n          let typ =\n            Ast_helper.Typ.object_(~loc, ~attrs=list{}, fields, closedFlag) |> parseTypeAlias(p)\n\n          let typ = parseArrowTypeRest(~es6Arrow=true, ~startPos, typ, p)\n          Parser.optional(p, Comma) |> ignore\n          let moreArgs = parseCommaDelimitedRegion(\n            ~grammar=Grammar.TypExprList,\n            ~closing=Rparen,\n            ~f=parseTypExprRegion,\n            p,\n          )\n\n          Parser.expect(Rparen, p)\n          Parsetree.Pcstr_tuple(list{typ, ...moreArgs})\n        | _ =>\n          let fields = switch attrs {\n          | list{} =>\n            parseCommaDelimitedRegion(\n              ~grammar=Grammar.FieldDeclarations,\n              ~closing=Rbrace,\n              ~f=parseFieldDeclarationRegion,\n              p,\n            )\n          | attrs =>\n            let first = {\n              let field = parseFieldDeclaration(p)\n              Parser.expect(Comma, p)\n              {...field, Parsetree.pld_attributes: attrs}\n            }\n\n            list{\n              first,\n              ...parseCommaDelimitedRegion(\n                ~grammar=Grammar.FieldDeclarations,\n                ~closing=Rbrace,\n                ~f=parseFieldDeclarationRegion,\n                p,\n              ),\n            }\n          }\n\n          let () = switch fields {\n          | list{} =>\n            Parser.err(\n              ~startPos=lbrace,\n              p,\n              Diagnostics.message(\"An inline record declaration needs at least one field\"),\n            )\n          | _ => ()\n          }\n\n          Parser.expect(Rbrace, p)\n          Parser.optional(p, Comma) |> ignore\n          Parser.expect(Rparen, p)\n          Parsetree.Pcstr_record(fields)\n        }\n      }\n    | _ =>\n      let args = parseCommaDelimitedRegion(\n        ~grammar=Grammar.TypExprList,\n        ~closing=Rparen,\n        ~f=parseTypExprRegion,\n        p,\n      )\n\n      Parser.expect(Rparen, p)\n      Parsetree.Pcstr_tuple(args)\n    }\n  | _ => Pcstr_tuple(list{})\n  }\n\n  let res = switch p.Parser.token {\n  | Colon =>\n    Parser.next(p)\n    Some(parseTypExpr(p))\n  | _ => None\n  }\n\n  (constrArgs, res)\n}\n\n/* constr-decl ::=\n *  | constr-name\n *  | attrs constr-name\n *  | constr-name const-args\n *  | attrs constr-name const-args */\nand parseTypeConstructorDeclarationWithBar = p =>\n  switch p.Parser.token {\n  | Bar =>\n    let startPos = p.Parser.startPos\n    Parser.next(p)\n    Some(parseTypeConstructorDeclaration(~startPos, p))\n  | _ => None\n  }\n\nand parseTypeConstructorDeclaration = (~startPos, p) => {\n  Parser.leaveBreadcrumb(p, Grammar.ConstructorDeclaration)\n  let attrs = parseAttributes(p)\n  switch p.Parser.token {\n  | Uident(uident) =>\n    let uidentLoc = mkLoc(p.startPos, p.endPos)\n    Parser.next(p)\n    let (args, res) = parseConstrDeclArgs(p)\n    Parser.eatBreadcrumb(p)\n    let loc = mkLoc(startPos, p.prevEndPos)\n    Ast_helper.Type.constructor(~loc, ~attrs, ~res?, ~args, Location.mkloc(uident, uidentLoc))\n  | t =>\n    Parser.err(p, Diagnostics.uident(t))\n    Ast_helper.Type.constructor(Location.mknoloc(\"_\"))\n  }\n}\n\n/* [|] constr-decl  { | constr-decl } */\nand parseTypeConstructorDeclarations = (~first=?, p) => {\n  let firstConstrDecl = switch first {\n  | None =>\n    let startPos = p.Parser.startPos\n    ignore(Parser.optional(p, Token.Bar))\n    parseTypeConstructorDeclaration(~startPos, p)\n  | Some(firstConstrDecl) => firstConstrDecl\n  }\n\n  list{\n    firstConstrDecl,\n    ...parseRegion(\n      ~grammar=Grammar.ConstructorDeclaration,\n      ~f=parseTypeConstructorDeclarationWithBar,\n      p,\n    ),\n  }\n}\n\n/*\n * type-representation ::=\n *  ∣\t = [ | ] constr-decl  { | constr-decl }\n *  ∣\t = private [ | ] constr-decl  { | constr-decl }\n *  |  = |\n *  ∣\t = private |\n *  ∣\t = record-decl\n *  ∣\t = private record-decl\n *  |  = ..\n */\nand parseTypeRepresentation = p => {\n  Parser.leaveBreadcrumb(p, Grammar.TypeRepresentation)\n  /* = consumed */\n  let privateFlag = if Parser.optional(p, Token.Private) {\n    Asttypes.Private\n  } else {\n    Asttypes.Public\n  }\n\n  let kind = switch p.Parser.token {\n  | Bar | Uident(_) => Parsetree.Ptype_variant(parseTypeConstructorDeclarations(p))\n  | Lbrace => Parsetree.Ptype_record(parseRecordDeclaration(p))\n  | DotDot =>\n    Parser.next(p)\n    Ptype_open\n  | token =>\n    Parser.err(p, Diagnostics.unexpected(token, p.breadcrumbs))\n    /* TODO: I have no idea if this is even remotely a good idea */\n    Parsetree.Ptype_variant(list{})\n  }\n\n  Parser.eatBreadcrumb(p)\n  (privateFlag, kind)\n}\n\n/* type-param\t::=\n *  | variance 'lident\n *  | variance 'uident\n *  | variance _\n *\n * variance ::=\n *   | +\n *   | -\n *   | (* empty *)\n */\nand parseTypeParam = p => {\n  let variance = switch p.Parser.token {\n  | Plus =>\n    Parser.next(p)\n    Asttypes.Covariant\n  | Minus =>\n    Parser.next(p)\n    Contravariant\n  | _ => Invariant\n  }\n\n  switch p.Parser.token {\n  | SingleQuote =>\n    Parser.next(p)\n    let (ident, loc) = parseIdent(~msg=ErrorMessages.typeParam, ~startPos=p.startPos, p)\n    Some(Ast_helper.Typ.var(~loc, ident), variance)\n  | Underscore =>\n    let loc = mkLoc(p.startPos, p.endPos)\n    Parser.next(p)\n    Some(Ast_helper.Typ.any(~loc, ()), variance)\n  | (Uident(_) | Lident(_)) as token =>\n    Parser.err(\n      p,\n      Diagnostics.message(\"Type params start with a singlequote: '\" ++ Token.toString(token)),\n    )\n    let (ident, loc) = parseIdent(~msg=ErrorMessages.typeParam, ~startPos=p.startPos, p)\n    Some(Ast_helper.Typ.var(~loc, ident), variance)\n  | _token => None\n  }\n}\n\n/* type-params\t::=\n *  | <type-param>\n *  ∣\t<type-param, type-param>\n *  ∣\t<type-param, type-param, type-param>\n *  ∣\t<type-param, type-param, type-param,>\n *\n *  TODO: when we have pretty-printer show an error\n *  with the actual code corrected. */\nand parseTypeParams = (~parent, p) => {\n  let opening = p.Parser.token\n  switch opening {\n  | LessThan | Lparen if p.startPos.pos_lnum === p.prevEndPos.pos_lnum =>\n    Scanner.setDiamondMode(p.scanner)\n    let openingStartPos = p.startPos\n    Parser.leaveBreadcrumb(p, Grammar.TypeParams)\n    Parser.next(p)\n    let params = parseCommaDelimitedRegion(\n      ~grammar=Grammar.TypeParams,\n      ~closing=GreaterThan,\n      ~f=parseTypeParam,\n      p,\n    )\n\n    let () = switch p.token {\n    | Rparen if opening == Token.Lparen =>\n      let msg =\n        Doc.breakableGroup(\n          ~forceBreak=true,\n          Doc.concat(list{\n            Doc.text(\"Type parameters require angle brackets:\"),\n            Doc.indent(\n              Doc.concat(list{\n                Doc.line,\n                Doc.concat(list{\n                  ResPrinter.printLongident(parent.Location.txt),\n                  ResPrinter.printTypeParams(params, CommentTable.empty),\n                }),\n              }),\n            ),\n          }),\n        ) |> Doc.toString(~width=80)\n\n      Parser.err(~startPos=openingStartPos, p, Diagnostics.message(msg))\n      Parser.next(p)\n    | _ => Parser.expect(GreaterThan, p)\n    }\n\n    Scanner.popMode(p.scanner, Diamond)\n    Parser.eatBreadcrumb(p)\n    params\n  | _ => list{}\n  }\n}\n\n/* type-constraint\t::=\tconstraint ' ident =  typexpr */\nand parseTypeConstraint = p => {\n  let startPos = p.Parser.startPos\n  switch p.Parser.token {\n  | Token.Constraint =>\n    Parser.next(p)\n    Parser.expect(SingleQuote, p)\n    switch p.Parser.token {\n    | Lident(ident) =>\n      let identLoc = mkLoc(startPos, p.endPos)\n      Parser.next(p)\n      Parser.expect(Equal, p)\n      let typ = parseTypExpr(p)\n      let loc = mkLoc(startPos, p.prevEndPos)\n      Some(Ast_helper.Typ.var(~loc=identLoc, ident), typ, loc)\n    | t =>\n      Parser.err(p, Diagnostics.lident(t))\n      let loc = mkLoc(startPos, p.prevEndPos)\n      Some(Ast_helper.Typ.any(), parseTypExpr(p), loc)\n    }\n  | _ => None\n  }\n}\n\n/* type-constraints ::=\n *  | (* empty *)\n *  | type-constraint\n *  | type-constraint type-constraint\n *  | type-constraint type-constraint type-constraint (* 0 or more *)\n */\nand parseTypeConstraints = p =>\n  parseRegion(~grammar=Grammar.TypeConstraint, ~f=parseTypeConstraint, p)\n\nand parseTypeEquationOrConstrDecl = p => {\n  let uidentStartPos = p.Parser.startPos\n  switch p.Parser.token {\n  | Uident(uident) =>\n    Parser.next(p)\n    switch p.Parser.token {\n    | Dot =>\n      Parser.next(p)\n      let typeConstr = parseValuePathTail(p, uidentStartPos, Longident.Lident(uident))\n\n      let loc = mkLoc(uidentStartPos, p.prevEndPos)\n      let typ = parseTypeAlias(\n        p,\n        Ast_helper.Typ.constr(\n          ~loc,\n          typeConstr,\n          parseTypeConstructorArgs(~constrName=typeConstr, p),\n        ),\n      )\n      switch p.token {\n      | Equal =>\n        Parser.next(p)\n        let (priv, kind) = parseTypeRepresentation(p)\n        (Some(typ), priv, kind)\n      | EqualGreater =>\n        Parser.next(p)\n        let returnType = parseTypExpr(~alias=false, p)\n        let loc = mkLoc(uidentStartPos, p.prevEndPos)\n        let arrowType = Ast_helper.Typ.arrow(~loc, Asttypes.Nolabel, typ, returnType)\n        let typ = parseTypeAlias(p, arrowType)\n        (Some(typ), Asttypes.Public, Parsetree.Ptype_abstract)\n      | _ => (Some(typ), Asttypes.Public, Parsetree.Ptype_abstract)\n      }\n    | _ =>\n      let uidentEndPos = p.prevEndPos\n      let (args, res) = parseConstrDeclArgs(p)\n      let first = Some({\n        let uidentLoc = mkLoc(uidentStartPos, uidentEndPos)\n        Ast_helper.Type.constructor(\n          ~loc=mkLoc(uidentStartPos, p.prevEndPos),\n          ~res?,\n          ~args,\n          Location.mkloc(uident, uidentLoc),\n        )\n      })\n      (None, Asttypes.Public, Parsetree.Ptype_variant(parseTypeConstructorDeclarations(p, ~first?)))\n    }\n  | t =>\n    Parser.err(p, Diagnostics.uident(t))\n    /* TODO: is this a good idea? */\n    (None, Asttypes.Public, Parsetree.Ptype_abstract)\n  }\n}\n\nand parseRecordOrObjectDecl = p => {\n  let startPos = p.Parser.startPos\n  Parser.expect(Lbrace, p)\n  switch p.Parser.token {\n  | DotDot | Dot =>\n    let closedFlag = switch p.token {\n    | DotDot =>\n      Parser.next(p)\n      Asttypes.Open\n    | Dot =>\n      Parser.next(p)\n      Asttypes.Closed\n    | _ => Asttypes.Closed\n    }\n\n    let fields = parseCommaDelimitedRegion(\n      ~grammar=Grammar.StringFieldDeclarations,\n      ~closing=Rbrace,\n      ~f=parseStringFieldDeclaration,\n      p,\n    )\n\n    Parser.expect(Rbrace, p)\n    let loc = mkLoc(startPos, p.prevEndPos)\n    let typ = Ast_helper.Typ.object_(~loc, ~attrs=list{}, fields, closedFlag) |> parseTypeAlias(p)\n\n    let typ = parseArrowTypeRest(~es6Arrow=true, ~startPos, typ, p)\n    (Some(typ), Asttypes.Public, Parsetree.Ptype_abstract)\n  | DotDotDot =>\n    let dotdotdotStart = p.startPos\n    let dotdotdotEnd = p.endPos\n    /* start of object type spreading, e.g. `type u = {...a, \"u\": int}` */\n    Parser.next(p)\n    let typ = parseTypExpr(p)\n    let () = switch p.token {\n    | Rbrace =>\n      /* {...x}, spread without extra fields */\n      Parser.err(\n        ~startPos=dotdotdotStart,\n        ~endPos=dotdotdotEnd,\n        p,\n        Diagnostics.message(ErrorMessages.sameTypeSpread),\n      )\n      Parser.next(p)\n    | _ => Parser.expect(Comma, p)\n    }\n\n    let () = switch p.token {\n    | Lident(_) =>\n      Parser.err(\n        ~startPos=dotdotdotStart,\n        ~endPos=dotdotdotEnd,\n        p,\n        Diagnostics.message(ErrorMessages.spreadInRecordDeclaration),\n      )\n    | _ => ()\n    }\n\n    let fields = list{\n      Parsetree.Oinherit(typ),\n      ...parseCommaDelimitedRegion(\n        ~grammar=Grammar.StringFieldDeclarations,\n        ~closing=Rbrace,\n        ~f=parseStringFieldDeclaration,\n        p,\n      ),\n    }\n\n    Parser.expect(Rbrace, p)\n    let loc = mkLoc(startPos, p.prevEndPos)\n    let typ = Ast_helper.Typ.object_(~loc, fields, Asttypes.Closed) |> parseTypeAlias(p)\n\n    let typ = parseArrowTypeRest(~es6Arrow=true, ~startPos, typ, p)\n    (Some(typ), Asttypes.Public, Parsetree.Ptype_abstract)\n  | _ =>\n    let attrs = parseAttributes(p)\n    switch p.Parser.token {\n    | String(_) =>\n      let closedFlag = Asttypes.Closed\n      let fields = switch attrs {\n      | list{} =>\n        parseCommaDelimitedRegion(\n          ~grammar=Grammar.StringFieldDeclarations,\n          ~closing=Rbrace,\n          ~f=parseStringFieldDeclaration,\n          p,\n        )\n      | attrs =>\n        let first = {\n          Parser.leaveBreadcrumb(p, Grammar.StringFieldDeclarations)\n          let field = switch parseStringFieldDeclaration(p) {\n          | Some(field) => field\n          | None => assert false\n          }\n\n          /* parse comma after first */\n          let () = switch p.Parser.token {\n          | Rbrace | Eof => ()\n          | Comma => Parser.next(p)\n          | _ => Parser.expect(Comma, p)\n          }\n\n          Parser.eatBreadcrumb(p)\n          switch field {\n          | Parsetree.Otag(label, _, ct) => Parsetree.Otag(label, attrs, ct)\n          | Oinherit(ct) => Oinherit(ct)\n          }\n        }\n\n        list{\n          first,\n          ...parseCommaDelimitedRegion(\n            ~grammar=Grammar.StringFieldDeclarations,\n            ~closing=Rbrace,\n            ~f=parseStringFieldDeclaration,\n            p,\n          ),\n        }\n      }\n\n      Parser.expect(Rbrace, p)\n      let loc = mkLoc(startPos, p.prevEndPos)\n      let typ = Ast_helper.Typ.object_(~loc, ~attrs=list{}, fields, closedFlag) |> parseTypeAlias(p)\n\n      let typ = parseArrowTypeRest(~es6Arrow=true, ~startPos, typ, p)\n      (Some(typ), Asttypes.Public, Parsetree.Ptype_abstract)\n    | _ =>\n      Parser.leaveBreadcrumb(p, Grammar.RecordDecl)\n      let fields = switch attrs {\n      | list{} =>\n        parseCommaDelimitedRegion(\n          ~grammar=Grammar.FieldDeclarations,\n          ~closing=Rbrace,\n          ~f=parseFieldDeclarationRegion,\n          p,\n        )\n      | list{attr, ..._} as attrs =>\n        let first = {\n          let field = parseFieldDeclaration(p)\n          Parser.optional(p, Comma) |> ignore\n          {\n            ...field,\n            Parsetree.pld_attributes: attrs,\n            pld_loc: {\n              ...field.Parsetree.pld_loc,\n              loc_start: (attr |> fst).loc.loc_start,\n            },\n          }\n        }\n\n        list{\n          first,\n          ...parseCommaDelimitedRegion(\n            ~grammar=Grammar.FieldDeclarations,\n            ~closing=Rbrace,\n            ~f=parseFieldDeclarationRegion,\n            p,\n          ),\n        }\n      }\n\n      let () = switch fields {\n      | list{} => Parser.err(~startPos, p, Diagnostics.message(\"A record needs at least one field\"))\n      | _ => ()\n      }\n\n      Parser.expect(Rbrace, p)\n      Parser.eatBreadcrumb(p)\n      (None, Asttypes.Public, Parsetree.Ptype_record(fields))\n    }\n  }\n}\n\nand parsePrivateEqOrRepr = p => {\n  Parser.expect(Private, p)\n  switch p.Parser.token {\n  | Lbrace =>\n    let (manifest, _, kind) = parseRecordOrObjectDecl(p)\n    (manifest, Asttypes.Private, kind)\n  | Uident(_) =>\n    let (manifest, _, kind) = parseTypeEquationOrConstrDecl(p)\n    (manifest, Asttypes.Private, kind)\n  | Bar | DotDot =>\n    let (_, kind) = parseTypeRepresentation(p)\n    (None, Asttypes.Private, kind)\n  | t if Grammar.isTypExprStart(t) => (\n      Some(parseTypExpr(p)),\n      Asttypes.Private,\n      Parsetree.Ptype_abstract,\n    )\n  | _ =>\n    let (_, kind) = parseTypeRepresentation(p)\n    (None, Asttypes.Private, kind)\n  }\n}\n\n/*\n  polymorphic-variant-type\t::=\n                            | [ tag-spec-first  { | tag-spec } ]\n                            | [> [ tag-spec ]  { | tag-spec } ]\n                            | [< [|] tag-spec-full  { | tag-spec-full }  [ > { `tag-name }+ ] ]\n\n            tag-spec-first\t::=\t`tag-name  [ of typexpr ]\n                            |\t[ typexpr ] |  tag-spec\n\n                  tag-spec\t::=\t`tag-name  [ of typexpr ]\n                            |\ttypexpr\n\n              tag-spec-full\t::=\t`tag-name  [ of [&] typexpr  { & typexpr } ]\n                             |\ttypexpr\n*/\nand parsePolymorphicVariantType = (~attrs, p) => {\n  let startPos = p.Parser.startPos\n  Parser.expect(Lbracket, p)\n  switch p.token {\n  | GreaterThan =>\n    Parser.next(p)\n    let rowFields = switch p.token {\n    | Rbracket => list{}\n    | Bar => parseTagSpecs(p)\n    | _ =>\n      let rowField = parseTagSpec(p)\n      list{rowField, ...parseTagSpecs(p)}\n    }\n\n    let variant = {\n      let loc = mkLoc(startPos, p.prevEndPos)\n      Ast_helper.Typ.variant(~attrs, ~loc, rowFields, Open, None)\n    }\n    Parser.expect(Rbracket, p)\n    variant\n  | LessThan =>\n    Parser.next(p)\n    Parser.optional(p, Bar) |> ignore\n    let rowField = parseTagSpecFull(p)\n    let rowFields = parseTagSpecFulls(p)\n    let tagNames = parseTagNames(p)\n    let variant = {\n      let loc = mkLoc(startPos, p.prevEndPos)\n      Ast_helper.Typ.variant(~attrs, ~loc, list{rowField, ...rowFields}, Closed, Some(tagNames))\n    }\n    Parser.expect(Rbracket, p)\n    variant\n  | _ =>\n    let rowFields1 = parseTagSpecFirst(p)\n    let rowFields2 = parseTagSpecs(p)\n    let variant = {\n      let loc = mkLoc(startPos, p.prevEndPos)\n      Ast_helper.Typ.variant(~attrs, ~loc, \\\"@\"(rowFields1, rowFields2), Closed, None)\n    }\n    Parser.expect(Rbracket, p)\n    variant\n  }\n}\n\nand parseTagName = p =>\n  switch p.Parser.token {\n  | Hash =>\n    let (ident, _loc) = parseHashIdent(~startPos=p.startPos, p)\n    Some(ident)\n  | _ => None\n  }\n\nand parseTagNames = p =>\n  if p.Parser.token === GreaterThan {\n    Parser.next(p)\n    parseRegion(p, ~grammar=Grammar.TagNames, ~f=parseTagName)\n  } else {\n    list{}\n  }\n\nand parseTagSpecFulls = p =>\n  switch p.Parser.token {\n  | Rbracket => list{}\n  | GreaterThan => list{}\n  | Bar =>\n    Parser.next(p)\n    let rowField = parseTagSpecFull(p)\n    list{rowField, ...parseTagSpecFulls(p)}\n  | _ => list{}\n  }\n\nand parseTagSpecFull = p => {\n  let attrs = parseAttributes(p)\n  switch p.Parser.token {\n  | Hash => parsePolymorphicVariantTypeSpecHash(~attrs, ~full=true, p)\n  | _ =>\n    let typ = parseTypExpr(~attrs, p)\n    Parsetree.Rinherit(typ)\n  }\n}\n\nand parseTagSpecs = p =>\n  switch p.Parser.token {\n  | Bar =>\n    Parser.next(p)\n    let rowField = parseTagSpec(p)\n    list{rowField, ...parseTagSpecs(p)}\n  | _ => list{}\n  }\n\nand parseTagSpec = p => {\n  let attrs = parseAttributes(p)\n  switch p.Parser.token {\n  | Hash => parsePolymorphicVariantTypeSpecHash(~attrs, ~full=false, p)\n  | _ =>\n    let typ = parseTypExpr(~attrs, p)\n    Parsetree.Rinherit(typ)\n  }\n}\n\nand parseTagSpecFirst = p => {\n  let attrs = parseAttributes(p)\n  switch p.Parser.token {\n  | Bar =>\n    Parser.next(p)\n    list{parseTagSpec(p)}\n  | Hash => list{parsePolymorphicVariantTypeSpecHash(~attrs, ~full=false, p)}\n  | _ =>\n    let typ = parseTypExpr(~attrs, p)\n    switch p.token {\n    | Rbracket => /* example: [ListStyleType.t] */\n      list{Parsetree.Rinherit(typ)}\n    | _ =>\n      Parser.expect(Bar, p)\n      list{Parsetree.Rinherit(typ), parseTagSpec(p)}\n    }\n  }\n}\n\nand parsePolymorphicVariantTypeSpecHash = (~attrs, ~full, p): Parsetree.row_field => {\n  let startPos = p.Parser.startPos\n  let (ident, loc) = parseHashIdent(~startPos, p)\n  let rec loop = p =>\n    switch p.Parser.token {\n    | Band if full =>\n      Parser.next(p)\n      let rowField = parsePolymorphicVariantTypeArgs(p)\n      list{rowField, ...loop(p)}\n    | _ => list{}\n    }\n\n  let (firstTuple, tagContainsAConstantEmptyConstructor) = switch p.Parser.token {\n  | Band if full =>\n    Parser.next(p)\n    (list{parsePolymorphicVariantTypeArgs(p)}, true)\n  | Lparen => (list{parsePolymorphicVariantTypeArgs(p)}, false)\n  | _ => (list{}, true)\n  }\n\n  let tuples = \\\"@\"(firstTuple, loop(p))\n  Parsetree.Rtag(Location.mkloc(ident, loc), attrs, tagContainsAConstantEmptyConstructor, tuples)\n}\n\nand parsePolymorphicVariantTypeArgs = p => {\n  let startPos = p.Parser.startPos\n  Parser.expect(Lparen, p)\n  let args = parseCommaDelimitedRegion(\n    ~grammar=Grammar.TypExprList,\n    ~closing=Rparen,\n    ~f=parseTypExprRegion,\n    p,\n  )\n\n  Parser.expect(Rparen, p)\n  let attrs = list{}\n  let loc = mkLoc(startPos, p.prevEndPos)\n  switch args {\n  | list{{ptyp_desc: Ptyp_tuple(_)} as typ} as types =>\n    if p.mode == ParseForTypeChecker {\n      typ\n    } else {\n      Ast_helper.Typ.tuple(~loc, ~attrs, types)\n    }\n  | list{typ} => typ\n  | types => Ast_helper.Typ.tuple(~loc, ~attrs, types)\n  }\n}\n\nand parseTypeEquationAndRepresentation = p =>\n  switch p.Parser.token {\n  | (Equal | Bar) as token =>\n    if token == Bar {\n      Parser.expect(Equal, p)\n    }\n    Parser.next(p)\n    switch p.Parser.token {\n    | Uident(_) => parseTypeEquationOrConstrDecl(p)\n    | Lbrace => parseRecordOrObjectDecl(p)\n    | Private => parsePrivateEqOrRepr(p)\n    | Bar | DotDot =>\n      let (priv, kind) = parseTypeRepresentation(p)\n      (None, priv, kind)\n    | _ =>\n      let manifest = Some(parseTypExpr(p))\n      switch p.Parser.token {\n      | Equal =>\n        Parser.next(p)\n        let (priv, kind) = parseTypeRepresentation(p)\n        (manifest, priv, kind)\n      | _ => (manifest, Public, Parsetree.Ptype_abstract)\n      }\n    }\n  | _ => (None, Public, Parsetree.Ptype_abstract)\n  }\n\n/* type-definition\t::=\ttype [rec] typedef  { and typedef }\n * typedef\t::=\ttypeconstr-name [type-params] type-information\n * type-information\t::=\t[type-equation]  [type-representation]  { type-constraint }\n * type-equation\t::=\t= typexpr */\nand parseTypeDef = (~attrs, ~startPos, p) => {\n  Parser.leaveBreadcrumb(p, Grammar.TypeDef)\n  /* let attrs = match attrs with | Some attrs -> attrs | None -> parseAttributes p in */\n  Parser.leaveBreadcrumb(p, Grammar.TypeConstrName)\n  let (name, loc) = parseLident(p)\n  let typeConstrName = Location.mkloc(name, loc)\n  Parser.eatBreadcrumb(p)\n  let params = {\n    let constrName = Location.mkloc(Longident.Lident(name), loc)\n    parseTypeParams(~parent=constrName, p)\n  }\n  let typeDef = {\n    let (manifest, priv, kind) = parseTypeEquationAndRepresentation(p)\n    let cstrs = parseTypeConstraints(p)\n    let loc = mkLoc(startPos, p.prevEndPos)\n    Ast_helper.Type.mk(~loc, ~attrs, ~priv, ~kind, ~params, ~cstrs, ~manifest?, typeConstrName)\n  }\n\n  Parser.eatBreadcrumb(p)\n  typeDef\n}\n\nand parseTypeExtension = (~params, ~attrs, ~name, p) => {\n  Parser.expect(PlusEqual, p)\n  let priv = if Parser.optional(p, Token.Private) {\n    Asttypes.Private\n  } else {\n    Asttypes.Public\n  }\n\n  let constrStart = p.Parser.startPos\n  Parser.optional(p, Bar) |> ignore\n  let first = {\n    let (attrs, name, kind) = switch p.Parser.token {\n    | Bar =>\n      Parser.next(p)\n      parseConstrDef(~parseAttrs=true, p)\n    | _ => parseConstrDef(~parseAttrs=true, p)\n    }\n\n    let loc = mkLoc(constrStart, p.prevEndPos)\n    Ast_helper.Te.constructor(~loc, ~attrs, name, kind)\n  }\n\n  let rec loop = (p, cs) =>\n    switch p.Parser.token {\n    | Bar =>\n      let startPos = p.Parser.startPos\n      Parser.next(p)\n      let (attrs, name, kind) = parseConstrDef(~parseAttrs=true, p)\n      let extConstr = Ast_helper.Te.constructor(\n        ~attrs,\n        ~loc=mkLoc(startPos, p.prevEndPos),\n        name,\n        kind,\n      )\n\n      loop(p, list{extConstr, ...cs})\n    | _ => List.rev(cs)\n    }\n\n  let constructors = loop(p, list{first})\n  Ast_helper.Te.mk(~attrs, ~params, ~priv, name, constructors)\n}\n\nand parseTypeDefinitions = (~attrs, ~name, ~params, ~startPos, p) => {\n  let typeDef = {\n    let (manifest, priv, kind) = parseTypeEquationAndRepresentation(p)\n    let cstrs = parseTypeConstraints(p)\n    let loc = mkLoc(startPos, p.prevEndPos)\n    Ast_helper.Type.mk(\n      ~loc,\n      ~attrs,\n      ~priv,\n      ~kind,\n      ~params,\n      ~cstrs,\n      ~manifest?,\n      {...name, txt: lidentOfPath(name.Location.txt)},\n    )\n  }\n\n  let rec loop = (p, defs) => {\n    let startPos = p.Parser.startPos\n    let attrs = parseAttributesAndBinding(p)\n    switch p.Parser.token {\n    | And =>\n      Parser.next(p)\n      let attrs = switch p.token {\n      | Export =>\n        let exportLoc = mkLoc(p.startPos, p.endPos)\n        Parser.next(p)\n        let genTypeAttr = (Location.mkloc(\"genType\", exportLoc), Parsetree.PStr(list{}))\n        list{genTypeAttr, ...attrs}\n      | _ => attrs\n      }\n\n      let typeDef = parseTypeDef(~attrs, ~startPos, p)\n      loop(p, list{typeDef, ...defs})\n    | _ => List.rev(defs)\n    }\n  }\n\n  loop(p, list{typeDef})\n}\n\n/* TODO: decide if we really want type extensions (eg. type x += Blue)\n * It adds quite a bit of complexity that can be avoided,\n * implemented for now. Needed to get a feel for the complexities of\n * this territory of the grammar */\nand parseTypeDefinitionOrExtension = (~attrs, p) => {\n  let startPos = p.Parser.startPos\n  Parser.expect(Token.Typ, p)\n  let recFlag = switch p.token {\n  | Rec =>\n    Parser.next(p)\n    Asttypes.Recursive\n  | Lident(\"nonrec\") =>\n    Parser.next(p)\n    Asttypes.Nonrecursive\n  | _ => Asttypes.Nonrecursive\n  }\n\n  let name = parseValuePath(p)\n  let params = parseTypeParams(~parent=name, p)\n  switch p.Parser.token {\n  | PlusEqual => TypeExt(parseTypeExtension(~params, ~attrs, ~name, p))\n  | _ =>\n    /* shape of type name should be Lident, i.e. `t` is accepted. `User.t` not */\n    let () = switch name.Location.txt {\n    | Lident(_) => ()\n    | longident =>\n      Parser.err(\n        ~startPos=name.loc.loc_start,\n        ~endPos=name.loc.loc_end,\n        p,\n        longident |> ErrorMessages.typeDeclarationNameLongident |> Diagnostics.message,\n      )\n    }\n\n    let typeDefs = parseTypeDefinitions(~attrs, ~name, ~params, ~startPos, p)\n    TypeDef({recFlag: recFlag, types: typeDefs})\n  }\n}\n\n/* external value-name : typexp = external-declaration */\nand parseExternalDef = (~attrs, ~startPos, p) => {\n  Parser.leaveBreadcrumb(p, Grammar.External)\n  Parser.expect(Token.External, p)\n  let (name, loc) = parseLident(p)\n  let name = Location.mkloc(name, loc)\n  Parser.expect(~grammar=Grammar.TypeExpression, Colon, p)\n  let typExpr = parseTypExpr(p)\n  let equalStart = p.startPos\n  let equalEnd = p.endPos\n  Parser.expect(Equal, p)\n  let prim = switch p.token {\n  | String(s) =>\n    Parser.next(p)\n    list{s}\n  | _ =>\n    Parser.err(\n      ~startPos=equalStart,\n      ~endPos=equalEnd,\n      p,\n      Diagnostics.message(\n        \"An external requires the name of the JS value you're referring to, like \\\"\" ++\n        (name.txt ++\n        \"\\\".\"),\n      ),\n    )\n    list{}\n  }\n\n  let loc = mkLoc(startPos, p.prevEndPos)\n  let vb = Ast_helper.Val.mk(~loc, ~attrs, ~prim, name, typExpr)\n  Parser.eatBreadcrumb(p)\n  vb\n}\n\n/* constr-def ::=\n *  | constr-decl\n *  | constr-name = constr\n *\n *  constr-decl ::= constr-name constr-args\n *  constr-name ::= uident\n *  constr      ::= path-uident */\nand parseConstrDef = (~parseAttrs, p) => {\n  let attrs = if parseAttrs {\n    parseAttributes(p)\n  } else {\n    list{}\n  }\n  let name = switch p.Parser.token {\n  | Uident(name) =>\n    let loc = mkLoc(p.startPos, p.endPos)\n    Parser.next(p)\n    Location.mkloc(name, loc)\n  | t =>\n    Parser.err(p, Diagnostics.uident(t))\n    Location.mknoloc(\"_\")\n  }\n\n  let kind = switch p.Parser.token {\n  | Lparen =>\n    let (args, res) = parseConstrDeclArgs(p)\n    Parsetree.Pext_decl(args, res)\n  | Equal =>\n    Parser.next(p)\n    let longident = parseModuleLongIdent(~lowercase=false, p)\n    Parsetree.Pext_rebind(longident)\n  | Colon =>\n    Parser.next(p)\n    let typ = parseTypExpr(p)\n    Parsetree.Pext_decl(Pcstr_tuple(list{}), Some(typ))\n  | _ => Parsetree.Pext_decl(Pcstr_tuple(list{}), None)\n  }\n\n  (attrs, name, kind)\n}\n\n/*\n * exception-definition\t::=\n *  | exception constr-decl\n *  ∣\texception constr-name = constr\n *\n *  constr-name ::= uident\n *  constr ::= long_uident */\nand parseExceptionDef = (~attrs, p) => {\n  let startPos = p.Parser.startPos\n  Parser.expect(Token.Exception, p)\n  let (_, name, kind) = parseConstrDef(~parseAttrs=false, p)\n  let loc = mkLoc(startPos, p.prevEndPos)\n  Ast_helper.Te.constructor(~loc, ~attrs, name, kind)\n}\n\nand parseNewlineOrSemicolonStructure = p =>\n  switch p.Parser.token {\n  | Semicolon => Parser.next(p)\n  | token if Grammar.isStructureItemStart(token) =>\n    if p.prevEndPos.pos_lnum < p.startPos.pos_lnum {\n      ()\n    } else {\n      Parser.err(\n        ~startPos=p.prevEndPos,\n        ~endPos=p.endPos,\n        p,\n        Diagnostics.message(\n          \"consecutive statements on a line must be separated by ';' or a newline\",\n        ),\n      )\n    }\n  | _ => ()\n  }\n\n@progress((Parser.next, Parser.expect, Parser.checkProgress))\nand parseStructureItemRegion = p => {\n  let startPos = p.Parser.startPos\n  let attrs = parseAttributes(p)\n  switch p.Parser.token {\n  | Open =>\n    let openDescription = parseOpenDescription(~attrs, p)\n    parseNewlineOrSemicolonStructure(p)\n    let loc = mkLoc(startPos, p.prevEndPos)\n    Some(Ast_helper.Str.open_(~loc, openDescription))\n  | Let =>\n    let (recFlag, letBindings) = parseLetBindings(~attrs, p)\n    parseNewlineOrSemicolonStructure(p)\n    let loc = mkLoc(startPos, p.prevEndPos)\n    Some(Ast_helper.Str.value(~loc, recFlag, letBindings))\n  | Typ =>\n    Parser.beginRegion(p)\n    switch parseTypeDefinitionOrExtension(~attrs, p) {\n    | TypeDef({recFlag, types}) =>\n      parseNewlineOrSemicolonStructure(p)\n      let loc = mkLoc(startPos, p.prevEndPos)\n      Parser.endRegion(p)\n      Some(Ast_helper.Str.type_(~loc, recFlag, types))\n    | TypeExt(ext) =>\n      parseNewlineOrSemicolonStructure(p)\n      let loc = mkLoc(startPos, p.prevEndPos)\n      Parser.endRegion(p)\n      Some(Ast_helper.Str.type_extension(~loc, ext))\n    }\n  | External =>\n    let externalDef = parseExternalDef(~attrs, ~startPos, p)\n    parseNewlineOrSemicolonStructure(p)\n    let loc = mkLoc(startPos, p.prevEndPos)\n    Some(Ast_helper.Str.primitive(~loc, externalDef))\n  | Import =>\n    let importDescr = parseJsImport(~startPos, ~attrs, p)\n    parseNewlineOrSemicolonStructure(p)\n    let loc = mkLoc(startPos, p.prevEndPos)\n    let structureItem = JsFfi.toParsetree(importDescr)\n    Some({...structureItem, pstr_loc: loc})\n  | Exception =>\n    let exceptionDef = parseExceptionDef(~attrs, p)\n    parseNewlineOrSemicolonStructure(p)\n    let loc = mkLoc(startPos, p.prevEndPos)\n    Some(Ast_helper.Str.exception_(~loc, exceptionDef))\n  | Include =>\n    let includeStatement = parseIncludeStatement(~attrs, p)\n    parseNewlineOrSemicolonStructure(p)\n    let loc = mkLoc(startPos, p.prevEndPos)\n    Some(Ast_helper.Str.include_(~loc, includeStatement))\n  | Export =>\n    let structureItem = parseJsExport(~attrs, p)\n    parseNewlineOrSemicolonStructure(p)\n    let loc = mkLoc(startPos, p.prevEndPos)\n    Some({...structureItem, pstr_loc: loc})\n  | Module =>\n    Parser.beginRegion(p)\n    let structureItem = parseModuleOrModuleTypeImplOrPackExpr(~attrs, p)\n    parseNewlineOrSemicolonStructure(p)\n    let loc = mkLoc(startPos, p.prevEndPos)\n    Parser.endRegion(p)\n    Some({...structureItem, pstr_loc: loc})\n  | AtAt =>\n    let attr = parseStandaloneAttribute(p)\n    parseNewlineOrSemicolonStructure(p)\n    let loc = mkLoc(startPos, p.prevEndPos)\n    Some(Ast_helper.Str.attribute(~loc, attr))\n  | PercentPercent =>\n    let extension = parseExtension(~moduleLanguage=true, p)\n    parseNewlineOrSemicolonStructure(p)\n    let loc = mkLoc(startPos, p.prevEndPos)\n    Some(Ast_helper.Str.extension(~attrs, ~loc, extension))\n  | token if Grammar.isExprStart(token) =>\n    let prevEndPos = p.Parser.endPos\n    let exp = parseExpr(p)\n    parseNewlineOrSemicolonStructure(p)\n    let loc = mkLoc(startPos, p.prevEndPos)\n    Parser.checkProgress(~prevEndPos, ~result=Ast_helper.Str.eval(~loc, ~attrs, exp), p)\n  | _ =>\n    switch attrs {\n    | list{({Asttypes.loc: attrLoc}, _) as attr, ..._} =>\n      Parser.err(\n        ~startPos=attrLoc.loc_start,\n        ~endPos=attrLoc.loc_end,\n        p,\n        Diagnostics.message(ErrorMessages.attributeWithoutNode(attr)),\n      )\n      let expr = parseExpr(p)\n      Some(Ast_helper.Str.eval(~loc=mkLoc(p.startPos, p.prevEndPos), ~attrs, expr))\n    | _ => None\n    }\n  }\n}\n\nand parseJsImport = (~startPos, ~attrs, p) => {\n  Parser.expect(Token.Import, p)\n  let importSpec = switch p.Parser.token {\n  | Token.Lident(_) | Token.At =>\n    let decl = switch parseJsFfiDeclaration(p) {\n    | Some(decl) => decl\n    | None => assert false\n    }\n\n    JsFfi.Default(decl)\n  | _ => JsFfi.Spec(parseJsFfiDeclarations(p))\n  }\n\n  let scope = parseJsFfiScope(p)\n  let loc = mkLoc(startPos, p.prevEndPos)\n  JsFfi.importDescr(~attrs, ~importSpec, ~scope, ~loc)\n}\n\nand parseJsExport = (~attrs, p) => {\n  let exportStart = p.Parser.startPos\n  Parser.expect(Token.Export, p)\n  let exportLoc = mkLoc(exportStart, p.prevEndPos)\n  let genTypeAttr = (Location.mkloc(\"genType\", exportLoc), Parsetree.PStr(list{}))\n  let attrs = list{genTypeAttr, ...attrs}\n  switch p.Parser.token {\n  | Typ =>\n    switch parseTypeDefinitionOrExtension(~attrs, p) {\n    | TypeDef({recFlag, types}) => Ast_helper.Str.type_(recFlag, types)\n    | TypeExt(ext) => Ast_helper.Str.type_extension(ext)\n    }\n  /* Let */ | _ =>\n    let (recFlag, letBindings) = parseLetBindings(~attrs, p)\n    Ast_helper.Str.value(recFlag, letBindings)\n  }\n}\n\nand parseSignJsExport = (~attrs, p) => {\n  let exportStart = p.Parser.startPos\n  Parser.expect(Token.Export, p)\n  let exportLoc = mkLoc(exportStart, p.prevEndPos)\n  let genTypeAttr = (Location.mkloc(\"genType\", exportLoc), Parsetree.PStr(list{}))\n  let attrs = list{genTypeAttr, ...attrs}\n  switch p.Parser.token {\n  | Typ =>\n    switch parseTypeDefinitionOrExtension(~attrs, p) {\n    | TypeDef({recFlag, types}) =>\n      let loc = mkLoc(exportStart, p.prevEndPos)\n      Ast_helper.Sig.type_(recFlag, types, ~loc)\n    | TypeExt(ext) =>\n      let loc = mkLoc(exportStart, p.prevEndPos)\n      Ast_helper.Sig.type_extension(ext, ~loc)\n    }\n  /* Let */ | _ =>\n    let valueDesc = parseSignLetDesc(~attrs, p)\n    let loc = mkLoc(exportStart, p.prevEndPos)\n    Ast_helper.Sig.value(valueDesc, ~loc)\n  }\n}\n\nand parseJsFfiScope = p =>\n  switch p.Parser.token {\n  | Token.Lident(\"from\") =>\n    Parser.next(p)\n    switch p.token {\n    | String(s) =>\n      Parser.next(p)\n      JsFfi.Module(s)\n    | Uident(_) | Lident(_) =>\n      let value = parseIdentPath(p)\n      JsFfi.Scope(value)\n    | _ => JsFfi.Global\n    }\n  | _ => JsFfi.Global\n  }\n\nand parseJsFfiDeclarations = p => {\n  Parser.expect(Token.Lbrace, p)\n  let decls = parseCommaDelimitedRegion(\n    ~grammar=Grammar.JsFfiImport,\n    ~closing=Rbrace,\n    ~f=parseJsFfiDeclaration,\n    p,\n  )\n\n  Parser.expect(Rbrace, p)\n  decls\n}\n\nand parseJsFfiDeclaration = p => {\n  let startPos = p.Parser.startPos\n  let attrs = parseAttributes(p)\n  switch p.Parser.token {\n  | Lident(_) =>\n    let (ident, _) = parseLident(p)\n    let alias = switch p.token {\n    | As =>\n      Parser.next(p)\n      let (ident, _) = parseLident(p)\n      ident\n    | _ => ident\n    }\n\n    Parser.expect(Token.Colon, p)\n    let typ = parseTypExpr(p)\n    let loc = mkLoc(startPos, p.prevEndPos)\n    Some(JsFfi.decl(~loc, ~alias, ~attrs, ~name=ident, ~typ))\n  | _ => None\n  }\n}\n\n/* include-statement ::= include module-expr */\nand parseIncludeStatement = (~attrs, p) => {\n  let startPos = p.Parser.startPos\n  Parser.expect(Token.Include, p)\n  let modExpr = parseModuleExpr(p)\n  let loc = mkLoc(startPos, p.prevEndPos)\n  Ast_helper.Incl.mk(~loc, ~attrs, modExpr)\n}\n\nand parseAtomicModuleExpr = p => {\n  let startPos = p.Parser.startPos\n  switch p.Parser.token {\n  | Uident(_ident) =>\n    let longident = parseModuleLongIdent(~lowercase=false, p)\n    Ast_helper.Mod.ident(~loc=longident.loc, longident)\n  | Lbrace =>\n    Parser.next(p)\n    let structure = Ast_helper.Mod.structure(\n      parseDelimitedRegion(\n        ~grammar=Grammar.Structure,\n        ~closing=Rbrace,\n        ~f=parseStructureItemRegion,\n        p,\n      ),\n    )\n    Parser.expect(Rbrace, p)\n    let endPos = p.prevEndPos\n    {...structure, pmod_loc: mkLoc(startPos, endPos)}\n  | Lparen =>\n    Parser.next(p)\n    let modExpr = switch p.token {\n    | Rparen => Ast_helper.Mod.structure(~loc=mkLoc(startPos, p.prevEndPos), list{})\n    | _ => parseConstrainedModExpr(p)\n    }\n\n    Parser.expect(Rparen, p)\n    modExpr\n  | Lident(\"unpack\") =>\n    /* TODO: should this be made a keyword?? */\n    Parser.next(p)\n    Parser.expect(Lparen, p)\n    let expr = parseExpr(p)\n    switch p.Parser.token {\n    | Colon =>\n      let colonStart = p.Parser.startPos\n      Parser.next(p)\n      let attrs = parseAttributes(p)\n      let packageType = parsePackageType(~startPos=colonStart, ~attrs, p)\n      Parser.expect(Rparen, p)\n      let loc = mkLoc(startPos, p.prevEndPos)\n      let constraintExpr = Ast_helper.Exp.constraint_(~loc, expr, packageType)\n\n      Ast_helper.Mod.unpack(~loc, constraintExpr)\n    | _ =>\n      Parser.expect(Rparen, p)\n      let loc = mkLoc(startPos, p.prevEndPos)\n      Ast_helper.Mod.unpack(~loc, expr)\n    }\n  | Percent =>\n    let extension = parseExtension(p)\n    let loc = mkLoc(startPos, p.prevEndPos)\n    Ast_helper.Mod.extension(~loc, extension)\n  | token =>\n    Parser.err(p, Diagnostics.unexpected(token, p.breadcrumbs))\n    Recover.defaultModuleExpr()\n  }\n}\n\nand parsePrimaryModExpr = p => {\n  let startPos = p.Parser.startPos\n  let modExpr = parseAtomicModuleExpr(p)\n  let rec loop = (p, modExpr) =>\n    switch p.Parser.token {\n    | Lparen if p.prevEndPos.pos_lnum === p.startPos.pos_lnum =>\n      loop(p, parseModuleApplication(p, modExpr))\n    | _ => modExpr\n    }\n\n  let modExpr = loop(p, modExpr)\n  {...modExpr, pmod_loc: mkLoc(startPos, p.prevEndPos)}\n}\n\n/*\n * functor-arg ::=\n *  | uident : modtype\n *  | _ : modtype\n *  | modtype           --> \"punning\" for _ : modtype\n *  | attributes functor-arg\n */\nand parseFunctorArg = p => {\n  let startPos = p.Parser.startPos\n  let attrs = parseAttributes(p)\n  switch p.Parser.token {\n  | Uident(ident) =>\n    Parser.next(p)\n    let uidentEndPos = p.prevEndPos\n    switch p.Parser.token {\n    | Colon =>\n      Parser.next(p)\n      let moduleType = parseModuleType(p)\n      let loc = mkLoc(startPos, uidentEndPos)\n      let argName = Location.mkloc(ident, loc)\n      Some(attrs, argName, Some(moduleType), startPos)\n    | Dot =>\n      Parser.next(p)\n      let moduleType = {\n        let moduleLongIdent = parseModuleLongIdentTail(\n          ~lowercase=false,\n          p,\n          startPos,\n          Longident.Lident(ident),\n        )\n        Ast_helper.Mty.ident(~loc=moduleLongIdent.loc, moduleLongIdent)\n      }\n\n      let argName = Location.mknoloc(\"_\")\n      Some(attrs, argName, Some(moduleType), startPos)\n    | _ =>\n      let loc = mkLoc(startPos, uidentEndPos)\n      let modIdent = Location.mkloc(Longident.Lident(ident), loc)\n      let moduleType = Ast_helper.Mty.ident(~loc, modIdent)\n      let argName = Location.mknoloc(\"_\")\n      Some(attrs, argName, Some(moduleType), startPos)\n    }\n  | Underscore =>\n    Parser.next(p)\n    let argName = Location.mkloc(\"_\", mkLoc(startPos, p.prevEndPos))\n    Parser.expect(Colon, p)\n    let moduleType = parseModuleType(p)\n    Some(attrs, argName, Some(moduleType), startPos)\n  | Lparen =>\n    Parser.next(p)\n    Parser.expect(Rparen, p)\n    let argName = Location.mkloc(\"*\", mkLoc(startPos, p.prevEndPos))\n    Some(attrs, argName, None, startPos)\n  | _ => None\n  }\n}\n\nand parseFunctorArgs = p => {\n  let startPos = p.Parser.startPos\n  Parser.expect(Lparen, p)\n  let args = parseCommaDelimitedRegion(\n    ~grammar=Grammar.FunctorArgs,\n    ~closing=Rparen,\n    ~f=parseFunctorArg,\n    p,\n  )\n\n  Parser.expect(Rparen, p)\n  switch args {\n  | list{} => list{(list{}, Location.mkloc(\"*\", mkLoc(startPos, p.prevEndPos)), None, startPos)}\n  | args => args\n  }\n}\n\nand parseFunctorModuleExpr = p => {\n  let startPos = p.Parser.startPos\n  let args = parseFunctorArgs(p)\n  let returnType = switch p.Parser.token {\n  | Colon =>\n    Parser.next(p)\n    Some(parseModuleType(~es6Arrow=false, p))\n  | _ => None\n  }\n\n  Parser.expect(EqualGreater, p)\n  let rhsModuleExpr = {\n    let modExpr = parseModuleExpr(p)\n    switch returnType {\n    | Some(modType) =>\n      Ast_helper.Mod.constraint_(\n        ~loc=mkLoc(modExpr.pmod_loc.loc_start, modType.Parsetree.pmty_loc.loc_end),\n        modExpr,\n        modType,\n      )\n    | None => modExpr\n    }\n  }\n\n  let endPos = p.prevEndPos\n  let modExpr = List.fold_right(\n    ((attrs, name, moduleType, startPos), acc) =>\n      Ast_helper.Mod.functor_(~loc=mkLoc(startPos, endPos), ~attrs, name, moduleType, acc),\n    args,\n    rhsModuleExpr,\n  )\n\n  {...modExpr, pmod_loc: mkLoc(startPos, endPos)}\n}\n\n/* module-expr\t::=\n *  | module-path\n *  ∣\t{ structure-items }\n *  ∣\tfunctorArgs =>  module-expr\n *  ∣\tmodule-expr(module-expr)\n *  ∣\t( module-expr )\n *  ∣\t( module-expr : module-type )\n *  | extension\n *  | attributes module-expr */\nand parseModuleExpr = p => {\n  let attrs = parseAttributes(p)\n  let modExpr = if isEs6ArrowFunctor(p) {\n    parseFunctorModuleExpr(p)\n  } else {\n    parsePrimaryModExpr(p)\n  }\n\n  {...modExpr, pmod_attributes: List.concat(list{modExpr.pmod_attributes, attrs})}\n}\n\nand parseConstrainedModExpr = p => {\n  let modExpr = parseModuleExpr(p)\n  switch p.Parser.token {\n  | Colon =>\n    Parser.next(p)\n    let modType = parseModuleType(p)\n    let loc = mkLoc(modExpr.pmod_loc.loc_start, modType.pmty_loc.loc_end)\n    Ast_helper.Mod.constraint_(~loc, modExpr, modType)\n  | _ => modExpr\n  }\n}\n\nand parseConstrainedModExprRegion = p =>\n  if Grammar.isModExprStart(p.Parser.token) {\n    Some(parseConstrainedModExpr(p))\n  } else {\n    None\n  }\n\nand parseModuleApplication = (p, modExpr) => {\n  let startPos = p.Parser.startPos\n  Parser.expect(Lparen, p)\n  let args = parseCommaDelimitedRegion(\n    ~grammar=Grammar.ModExprList,\n    ~closing=Rparen,\n    ~f=parseConstrainedModExprRegion,\n    p,\n  )\n\n  Parser.expect(Rparen, p)\n  let args = switch args {\n  | list{} =>\n    let loc = mkLoc(startPos, p.prevEndPos)\n    list{Ast_helper.Mod.structure(~loc, list{})}\n  | args => args\n  }\n\n  List.fold_left(\n    (modExpr, arg) =>\n      Ast_helper.Mod.apply(\n        ~loc=mkLoc(modExpr.Parsetree.pmod_loc.loc_start, arg.Parsetree.pmod_loc.loc_end),\n        modExpr,\n        arg,\n      ),\n    modExpr,\n    args,\n  )\n}\n\nand parseModuleOrModuleTypeImplOrPackExpr = (~attrs, p) => {\n  let startPos = p.Parser.startPos\n  Parser.expect(Module, p)\n  switch p.Parser.token {\n  | Typ => parseModuleTypeImpl(~attrs, startPos, p)\n  | Lparen =>\n    let expr = parseFirstClassModuleExpr(~startPos, p)\n    let a = parsePrimaryExpr(~operand=expr, p)\n    let expr = parseBinaryExpr(~a, p, 1)\n    let expr = parseTernaryExpr(expr, p)\n    Ast_helper.Str.eval(~attrs, expr)\n  | _ => parseMaybeRecModuleBinding(~attrs, ~startPos, p)\n  }\n}\n\nand parseModuleTypeImpl = (~attrs, startPos, p) => {\n  Parser.expect(Typ, p)\n  let nameStart = p.Parser.startPos\n  let name = switch p.Parser.token {\n  | Lident(ident) =>\n    Parser.next(p)\n    let loc = mkLoc(nameStart, p.prevEndPos)\n    Location.mkloc(ident, loc)\n  | Uident(ident) =>\n    Parser.next(p)\n    let loc = mkLoc(nameStart, p.prevEndPos)\n    Location.mkloc(ident, loc)\n  | t =>\n    Parser.err(p, Diagnostics.uident(t))\n    Location.mknoloc(\"_\")\n  }\n\n  Parser.expect(Equal, p)\n  let moduleType = parseModuleType(p)\n  let moduleTypeDeclaration = Ast_helper.Mtd.mk(\n    ~attrs,\n    ~loc=mkLoc(nameStart, p.prevEndPos),\n    ~typ=moduleType,\n    name,\n  )\n\n  let loc = mkLoc(startPos, p.prevEndPos)\n  Ast_helper.Str.modtype(~loc, moduleTypeDeclaration)\n}\n\n/* definition\t::=\n  ∣\t module rec module-name :  module-type =  module-expr   { and module-name\n  :  module-type =  module-expr } */\nand parseMaybeRecModuleBinding = (~attrs, ~startPos, p) =>\n  switch p.Parser.token {\n  | Token.Rec =>\n    Parser.next(p)\n    Ast_helper.Str.rec_module(parseModuleBindings(~startPos, ~attrs, p))\n  | _ => Ast_helper.Str.module_(parseModuleBinding(~attrs, ~startPos=p.Parser.startPos, p))\n  }\n\nand parseModuleBinding = (~attrs, ~startPos, p) => {\n  let name = switch p.Parser.token {\n  | Uident(ident) =>\n    let startPos = p.Parser.startPos\n    Parser.next(p)\n    let loc = mkLoc(startPos, p.prevEndPos)\n    Location.mkloc(ident, loc)\n  | t =>\n    Parser.err(p, Diagnostics.uident(t))\n    Location.mknoloc(\"_\")\n  }\n\n  let body = parseModuleBindingBody(p)\n  let loc = mkLoc(startPos, p.prevEndPos)\n  Ast_helper.Mb.mk(~attrs, ~loc, name, body)\n}\n\nand parseModuleBindingBody = p => {\n  /* TODO: make required with good error message when rec module binding */\n  let returnModType = switch p.Parser.token {\n  | Colon =>\n    Parser.next(p)\n    Some(parseModuleType(p))\n  | _ => None\n  }\n\n  Parser.expect(Equal, p)\n  let modExpr = parseModuleExpr(p)\n  switch returnModType {\n  | Some(modType) =>\n    Ast_helper.Mod.constraint_(\n      ~loc=mkLoc(modType.pmty_loc.loc_start, modExpr.pmod_loc.loc_end),\n      modExpr,\n      modType,\n    )\n  | None => modExpr\n  }\n}\n\n/* module-name :  module-type =  module-expr\n * { and module-name :  module-type =  module-expr } */\nand parseModuleBindings = (~attrs, ~startPos, p) => {\n  let rec loop = (p, acc) => {\n    let startPos = p.Parser.startPos\n    let attrs = parseAttributesAndBinding(p)\n    switch p.Parser.token {\n    | And =>\n      Parser.next(p)\n      ignore(Parser.optional(p, Module)) /* over-parse for fault-tolerance */\n      let modBinding = parseModuleBinding(~attrs, ~startPos, p)\n      loop(p, list{modBinding, ...acc})\n    | _ => List.rev(acc)\n    }\n  }\n\n  let first = parseModuleBinding(~attrs, ~startPos, p)\n  loop(p, list{first})\n}\n\nand parseAtomicModuleType = p => {\n  let startPos = p.Parser.startPos\n  let moduleType = switch p.Parser.token {\n  | Uident(_) | Lident(_) =>\n    /* Ocaml allows module types to end with lowercase: module Foo : bar = { ... }\n     * lets go with uppercase terminal for now */\n    let moduleLongIdent = parseModuleLongIdent(~lowercase=true, p)\n    Ast_helper.Mty.ident(~loc=moduleLongIdent.loc, moduleLongIdent)\n  | Lparen =>\n    Parser.next(p)\n    let mty = parseModuleType(p)\n    Parser.expect(Rparen, p)\n    {...mty, pmty_loc: mkLoc(startPos, p.prevEndPos)}\n  | Lbrace =>\n    Parser.next(p)\n    let spec = parseDelimitedRegion(\n      ~grammar=Grammar.Signature,\n      ~closing=Rbrace,\n      ~f=parseSignatureItemRegion,\n      p,\n    )\n\n    Parser.expect(Rbrace, p)\n    let loc = mkLoc(startPos, p.prevEndPos)\n    Ast_helper.Mty.signature(~loc, spec)\n  | Module =>\n    /* TODO: check if this is still atomic when implementing first class modules */\n    parseModuleTypeOf(p)\n  | Percent =>\n    let extension = parseExtension(p)\n    let loc = mkLoc(startPos, p.prevEndPos)\n    Ast_helper.Mty.extension(~loc, extension)\n  | token =>\n    Parser.err(p, Diagnostics.unexpected(token, p.breadcrumbs))\n    Recover.defaultModuleType()\n  }\n\n  let moduleTypeLoc = mkLoc(startPos, p.prevEndPos)\n  {...moduleType, pmty_loc: moduleTypeLoc}\n}\n\nand parseFunctorModuleType = p => {\n  let startPos = p.Parser.startPos\n  let args = parseFunctorArgs(p)\n  Parser.expect(EqualGreater, p)\n  let rhs = parseModuleType(p)\n  let endPos = p.prevEndPos\n  let modType = List.fold_right(\n    ((attrs, name, moduleType, startPos), acc) =>\n      Ast_helper.Mty.functor_(~loc=mkLoc(startPos, endPos), ~attrs, name, moduleType, acc),\n    args,\n    rhs,\n  )\n\n  {...modType, pmty_loc: mkLoc(startPos, endPos)}\n}\n\n/* Module types are the module-level equivalent of type expressions: they\n * specify the general shape and type properties of modules.\n *\n * module-type ::=\n *  | modtype-path\n *  | { signature }\n *  | ( module-type )               --> parenthesized module-type\n *  | functor-args => module-type   --> functor\n *  | module-type => module-type    --> functor\n *  | module type of module-expr\n *  | attributes module-type\n *  | module-type with-mod-constraints\n *  | extension\n */\nand parseModuleType = (~es6Arrow=true, ~with_=true, p) => {\n  let attrs = parseAttributes(p)\n  let modty = if es6Arrow && isEs6ArrowFunctor(p) {\n    parseFunctorModuleType(p)\n  } else {\n    let modty = parseAtomicModuleType(p)\n    switch p.Parser.token {\n    | EqualGreater if es6Arrow === true =>\n      Parser.next(p)\n      let rhs = parseModuleType(~with_=false, p)\n      let str = Location.mknoloc(\"_\")\n      let loc = mkLoc(modty.pmty_loc.loc_start, p.prevEndPos)\n      Ast_helper.Mty.functor_(~loc, str, Some(modty), rhs)\n    | _ => modty\n    }\n  }\n\n  let moduleType = {\n    ...modty,\n    pmty_attributes: List.concat(list{modty.pmty_attributes, attrs}),\n  }\n  if with_ {\n    parseWithConstraints(moduleType, p)\n  } else {\n    moduleType\n  }\n}\n\nand parseWithConstraints = (moduleType, p) =>\n  switch p.Parser.token {\n  | Lident(\"with\") =>\n    Parser.next(p)\n    let first = parseWithConstraint(p)\n    let rec loop = (p, acc) =>\n      switch p.Parser.token {\n      | And =>\n        Parser.next(p)\n        loop(p, list{parseWithConstraint(p), ...acc})\n      | _ => List.rev(acc)\n      }\n\n    let constraints = loop(p, list{first})\n    let loc = mkLoc(moduleType.pmty_loc.loc_start, p.prevEndPos)\n    Ast_helper.Mty.with_(~loc, moduleType, constraints)\n  | _ => moduleType\n  }\n\n/* mod-constraint\t::=\n *  |  type typeconstr<type-params> type-equation type-constraints?\n *  ∣\t type typeconstr-name<type-params> := typexpr\n *  ∣\t module module-path = extended-module-path\n *  ∣\t module module-path :=  extended-module-path\n *\n *  TODO: split this up into multiple functions, better errors */\nand parseWithConstraint = p =>\n  switch p.Parser.token {\n  | Module =>\n    Parser.next(p)\n    let modulePath = parseModuleLongIdent(~lowercase=false, p)\n    switch p.Parser.token {\n    | ColonEqual =>\n      Parser.next(p)\n      let lident = parseModuleLongIdent(~lowercase=false, p)\n      Parsetree.Pwith_modsubst(modulePath, lident)\n    | Equal =>\n      Parser.next(p)\n      let lident = parseModuleLongIdent(~lowercase=false, p)\n      Parsetree.Pwith_module(modulePath, lident)\n    | token =>\n      /* TODO: revisit */\n      Parser.err(p, Diagnostics.unexpected(token, p.breadcrumbs))\n      let lident = parseModuleLongIdent(~lowercase=false, p)\n      Parsetree.Pwith_modsubst(modulePath, lident)\n    }\n  | Typ =>\n    Parser.next(p)\n    let typeConstr = parseValuePath(p)\n    let params = parseTypeParams(~parent=typeConstr, p)\n    switch p.Parser.token {\n    | ColonEqual =>\n      Parser.next(p)\n      let typExpr = parseTypExpr(p)\n      Parsetree.Pwith_typesubst(\n        typeConstr,\n        Ast_helper.Type.mk(\n          ~loc=typeConstr.loc,\n          ~params,\n          ~manifest=typExpr,\n          Location.mkloc(Longident.last(typeConstr.txt), typeConstr.loc),\n        ),\n      )\n    | Equal =>\n      Parser.next(p)\n      let typExpr = parseTypExpr(p)\n      let typeConstraints = parseTypeConstraints(p)\n      Parsetree.Pwith_type(\n        typeConstr,\n        Ast_helper.Type.mk(\n          ~loc=typeConstr.loc,\n          ~params,\n          ~manifest=typExpr,\n          ~cstrs=typeConstraints,\n          Location.mkloc(Longident.last(typeConstr.txt), typeConstr.loc),\n        ),\n      )\n    | token =>\n      /* TODO: revisit */\n      Parser.err(p, Diagnostics.unexpected(token, p.breadcrumbs))\n      let typExpr = parseTypExpr(p)\n      let typeConstraints = parseTypeConstraints(p)\n      Parsetree.Pwith_type(\n        typeConstr,\n        Ast_helper.Type.mk(\n          ~loc=typeConstr.loc,\n          ~params,\n          ~manifest=typExpr,\n          ~cstrs=typeConstraints,\n          Location.mkloc(Longident.last(typeConstr.txt), typeConstr.loc),\n        ),\n      )\n    }\n  | token =>\n    /* TODO: implement recovery strategy */\n    Parser.err(p, Diagnostics.unexpected(token, p.breadcrumbs))\n    Parsetree.Pwith_type(\n      Location.mknoloc(Longident.Lident(\"\")),\n      Ast_helper.Type.mk(\n        ~params=list{},\n        ~manifest=Recover.defaultType(),\n        ~cstrs=list{},\n        Location.mknoloc(\"\"),\n      ),\n    )\n  }\n\nand parseModuleTypeOf = p => {\n  let startPos = p.Parser.startPos\n  Parser.expect(Module, p)\n  Parser.expect(Typ, p)\n  Parser.expect(Of, p)\n  let moduleExpr = parseModuleExpr(p)\n  Ast_helper.Mty.typeof_(~loc=mkLoc(startPos, p.prevEndPos), moduleExpr)\n}\n\nand parseNewlineOrSemicolonSignature = p =>\n  switch p.Parser.token {\n  | Semicolon => Parser.next(p)\n  | token if Grammar.isSignatureItemStart(token) =>\n    if p.prevEndPos.pos_lnum < p.startPos.pos_lnum {\n      ()\n    } else {\n      Parser.err(\n        ~startPos=p.prevEndPos,\n        ~endPos=p.endPos,\n        p,\n        Diagnostics.message(\n          \"consecutive specifications on a line must be separated by ';' or a newline\",\n        ),\n      )\n    }\n  | _ => ()\n  }\n\n@progress((Parser.next, Parser.expect, Parser.checkProgress))\nand parseSignatureItemRegion = p => {\n  let startPos = p.Parser.startPos\n  let attrs = parseAttributes(p)\n  switch p.Parser.token {\n  | Let =>\n    Parser.beginRegion(p)\n    let valueDesc = parseSignLetDesc(~attrs, p)\n    parseNewlineOrSemicolonSignature(p)\n    let loc = mkLoc(startPos, p.prevEndPos)\n    Parser.endRegion(p)\n    Some(Ast_helper.Sig.value(~loc, valueDesc))\n  | Typ =>\n    Parser.beginRegion(p)\n    switch parseTypeDefinitionOrExtension(~attrs, p) {\n    | TypeDef({recFlag, types}) =>\n      parseNewlineOrSemicolonSignature(p)\n      let loc = mkLoc(startPos, p.prevEndPos)\n      Parser.endRegion(p)\n      Some(Ast_helper.Sig.type_(~loc, recFlag, types))\n    | TypeExt(ext) =>\n      parseNewlineOrSemicolonSignature(p)\n      let loc = mkLoc(startPos, p.prevEndPos)\n      Parser.endRegion(p)\n      Some(Ast_helper.Sig.type_extension(~loc, ext))\n    }\n  | External =>\n    let externalDef = parseExternalDef(~attrs, ~startPos, p)\n    parseNewlineOrSemicolonSignature(p)\n    let loc = mkLoc(startPos, p.prevEndPos)\n    Some(Ast_helper.Sig.value(~loc, externalDef))\n  | Export =>\n    let signatureItem = parseSignJsExport(~attrs, p)\n    parseNewlineOrSemicolonSignature(p)\n    let loc = mkLoc(startPos, p.prevEndPos)\n    Some({...signatureItem, psig_loc: loc})\n  | Exception =>\n    let exceptionDef = parseExceptionDef(~attrs, p)\n    parseNewlineOrSemicolonSignature(p)\n    let loc = mkLoc(startPos, p.prevEndPos)\n    Some(Ast_helper.Sig.exception_(~loc, exceptionDef))\n  | Open =>\n    let openDescription = parseOpenDescription(~attrs, p)\n    parseNewlineOrSemicolonSignature(p)\n    let loc = mkLoc(startPos, p.prevEndPos)\n    Some(Ast_helper.Sig.open_(~loc, openDescription))\n  | Include =>\n    Parser.next(p)\n    let moduleType = parseModuleType(p)\n    let includeDescription = Ast_helper.Incl.mk(\n      ~loc=mkLoc(startPos, p.prevEndPos),\n      ~attrs,\n      moduleType,\n    )\n\n    parseNewlineOrSemicolonSignature(p)\n    let loc = mkLoc(startPos, p.prevEndPos)\n    Some(Ast_helper.Sig.include_(~loc, includeDescription))\n  | Module =>\n    Parser.beginRegion(p)\n    Parser.next(p)\n    switch p.Parser.token {\n    | Uident(_) =>\n      let modDecl = parseModuleDeclarationOrAlias(~attrs, p)\n      parseNewlineOrSemicolonSignature(p)\n      let loc = mkLoc(startPos, p.prevEndPos)\n      Parser.endRegion(p)\n      Some(Ast_helper.Sig.module_(~loc, modDecl))\n    | Rec =>\n      let recModule = parseRecModuleSpec(~attrs, ~startPos, p)\n      parseNewlineOrSemicolonSignature(p)\n      let loc = mkLoc(startPos, p.prevEndPos)\n      Parser.endRegion(p)\n      Some(Ast_helper.Sig.rec_module(~loc, recModule))\n    | Typ =>\n      let modTypeDecl = parseModuleTypeDeclaration(~attrs, ~startPos, p)\n      Parser.endRegion(p)\n      Some(modTypeDecl)\n    | _t =>\n      let modDecl = parseModuleDeclarationOrAlias(~attrs, p)\n      parseNewlineOrSemicolonSignature(p)\n      let loc = mkLoc(startPos, p.prevEndPos)\n      Parser.endRegion(p)\n      Some(Ast_helper.Sig.module_(~loc, modDecl))\n    }\n  | AtAt =>\n    let attr = parseStandaloneAttribute(p)\n    parseNewlineOrSemicolonSignature(p)\n    let loc = mkLoc(startPos, p.prevEndPos)\n    Some(Ast_helper.Sig.attribute(~loc, attr))\n  | PercentPercent =>\n    let extension = parseExtension(~moduleLanguage=true, p)\n    parseNewlineOrSemicolonSignature(p)\n    let loc = mkLoc(startPos, p.prevEndPos)\n    Some(Ast_helper.Sig.extension(~attrs, ~loc, extension))\n  | Import =>\n    Parser.next(p)\n    parseSignatureItemRegion(p)\n  | _ =>\n    switch attrs {\n    | list{({Asttypes.loc: attrLoc}, _) as attr, ..._} =>\n      Parser.err(\n        ~startPos=attrLoc.loc_start,\n        ~endPos=attrLoc.loc_end,\n        p,\n        Diagnostics.message(ErrorMessages.attributeWithoutNode(attr)),\n      )\n      Some(Recover.defaultSignatureItem)\n    | _ => None\n    }\n  }\n}\n\n/* module rec module-name :  module-type  { and module-name:  module-type } */\nand parseRecModuleSpec = (~attrs, ~startPos, p) => {\n  Parser.expect(Rec, p)\n  let rec loop = (p, spec) => {\n    let startPos = p.Parser.startPos\n    let attrs = parseAttributesAndBinding(p)\n    switch p.Parser.token {\n    | And =>\n      /* TODO: give a good error message when with constraint, no parens\n       * and ASet: (Set.S with type elt = A.t)\n       * and BTree: (Btree.S with type elt = A.t)\n       * Without parens, the `and` signals the start of another\n       * `with-constraint`\n       */\n      Parser.expect(And, p)\n      let decl = parseRecModuleDeclaration(~attrs, ~startPos, p)\n      loop(p, list{decl, ...spec})\n    | _ => List.rev(spec)\n    }\n  }\n\n  let first = parseRecModuleDeclaration(~attrs, ~startPos, p)\n  loop(p, list{first})\n}\n\n/* module-name : module-type */\nand parseRecModuleDeclaration = (~attrs, ~startPos, p) => {\n  let name = switch p.Parser.token {\n  | Uident(modName) =>\n    let loc = mkLoc(p.startPos, p.endPos)\n    Parser.next(p)\n    Location.mkloc(modName, loc)\n  | t =>\n    Parser.err(p, Diagnostics.uident(t))\n    Location.mknoloc(\"_\")\n  }\n\n  Parser.expect(Colon, p)\n  let modType = parseModuleType(p)\n  Ast_helper.Md.mk(~loc=mkLoc(startPos, p.prevEndPos), ~attrs, name, modType)\n}\n\nand parseModuleDeclarationOrAlias = (~attrs, p) => {\n  let startPos = p.Parser.startPos\n  let moduleName = switch p.Parser.token {\n  | Uident(ident) =>\n    let loc = mkLoc(p.Parser.startPos, p.endPos)\n    Parser.next(p)\n    Location.mkloc(ident, loc)\n  | t =>\n    Parser.err(p, Diagnostics.uident(t))\n    Location.mknoloc(\"_\")\n  }\n\n  let body = switch p.Parser.token {\n  | Colon =>\n    Parser.next(p)\n    parseModuleType(p)\n  | Equal =>\n    Parser.next(p)\n    let lident = parseModuleLongIdent(~lowercase=false, p)\n    Ast_helper.Mty.alias(lident)\n  | token =>\n    Parser.err(p, Diagnostics.unexpected(token, p.breadcrumbs))\n    Recover.defaultModuleType()\n  }\n\n  let loc = mkLoc(startPos, p.prevEndPos)\n  Ast_helper.Md.mk(~loc, ~attrs, moduleName, body)\n}\n\nand parseModuleTypeDeclaration = (~attrs, ~startPos, p) => {\n  Parser.expect(Typ, p)\n  let moduleName = switch p.Parser.token {\n  | Uident(ident) =>\n    let loc = mkLoc(p.startPos, p.endPos)\n    Parser.next(p)\n    Location.mkloc(ident, loc)\n  | Lident(ident) =>\n    let loc = mkLoc(p.startPos, p.endPos)\n    Parser.next(p)\n    Location.mkloc(ident, loc)\n  | t =>\n    Parser.err(p, Diagnostics.uident(t))\n    Location.mknoloc(\"_\")\n  }\n\n  let typ = switch p.Parser.token {\n  | Equal =>\n    Parser.next(p)\n    Some(parseModuleType(p))\n  | _ => None\n  }\n\n  let moduleDecl = Ast_helper.Mtd.mk(~attrs, ~typ?, moduleName)\n  Ast_helper.Sig.modtype(~loc=mkLoc(startPos, p.prevEndPos), moduleDecl)\n}\n\nand parseSignLetDesc = (~attrs, p) => {\n  let startPos = p.Parser.startPos\n  Parser.optional(p, Let) |> ignore\n  let (name, loc) = parseLident(p)\n  let name = Location.mkloc(name, loc)\n  Parser.expect(Colon, p)\n  let typExpr = parsePolyTypeExpr(p)\n  let loc = mkLoc(startPos, p.prevEndPos)\n  Ast_helper.Val.mk(~loc, ~attrs, name, typExpr)\n}\n\n/* attr-id\t::=\tlowercase-ident\n∣\t  capitalized-ident\n∣\t  attr-id .  attr-id */\nand parseAttributeId = (~startPos, p) => {\n  let rec loop = (p, acc) =>\n    switch p.Parser.token {\n    | Lident(ident) | Uident(ident) =>\n      Parser.next(p)\n      let id = acc ++ ident\n      switch p.Parser.token {\n      | Dot =>\n        Parser.next(p)\n        loop(p, id ++ \".\")\n      | _ => id\n      }\n    | token if Token.isKeyword(token) =>\n      Parser.next(p)\n      let id = acc ++ Token.toString(token)\n      switch p.Parser.token {\n      | Dot =>\n        Parser.next(p)\n        loop(p, id ++ \".\")\n      | _ => id\n      }\n    | token =>\n      Parser.err(p, Diagnostics.unexpected(token, p.breadcrumbs))\n      acc\n    }\n\n  let id = loop(p, \"\")\n  let endPos = p.prevEndPos\n  Location.mkloc(id, mkLoc(startPos, endPos))\n}\n\n/*\n * payload ::=  empty\n *          |  ( structure-item )\n *\n * TODO: what about multiple structure items?\n * @attr({let x = 1; let x = 2})\n *\n * Also what about type-expressions and specifications?\n * @attr(:myType) ???\n */\nand parsePayload = p =>\n  switch p.Parser.token {\n  | Lparen if p.startPos.pos_cnum == p.prevEndPos.pos_cnum =>\n    Parser.leaveBreadcrumb(p, Grammar.AttributePayload)\n    Parser.next(p)\n    switch p.token {\n    | Colon =>\n      Parser.next(p)\n      let payload = if Grammar.isSignatureItemStart(p.token) {\n        Parsetree.PSig(\n          parseDelimitedRegion(\n            ~grammar=Grammar.Signature,\n            ~closing=Rparen,\n            ~f=parseSignatureItemRegion,\n            p,\n          ),\n        )\n      } else {\n        Parsetree.PTyp(parseTypExpr(p))\n      }\n\n      Parser.expect(Rparen, p)\n      Parser.eatBreadcrumb(p)\n      payload\n    | Question =>\n      Parser.next(p)\n      let pattern = parsePattern(p)\n      let expr = switch p.token {\n      | When | If =>\n        Parser.next(p)\n        Some(parseExpr(p))\n      | _ => None\n      }\n\n      Parser.expect(Rparen, p)\n      Parser.eatBreadcrumb(p)\n      Parsetree.PPat(pattern, expr)\n    | _ =>\n      let items = parseDelimitedRegion(\n        ~grammar=Grammar.Structure,\n        ~closing=Rparen,\n        ~f=parseStructureItemRegion,\n        p,\n      )\n\n      Parser.expect(Rparen, p)\n      Parser.eatBreadcrumb(p)\n      Parsetree.PStr(items)\n    }\n  | _ => Parsetree.PStr(list{})\n  }\n\n/* type attribute = string loc * payload */\nand parseAttribute = p =>\n  switch p.Parser.token {\n  | At =>\n    let startPos = p.startPos\n    Parser.next(p)\n    let attrId = parseAttributeId(~startPos, p)\n    let payload = parsePayload(p)\n    Some(attrId, payload)\n  | _ => None\n  }\n\nand parseAttributes = p => parseRegion(p, ~grammar=Grammar.Attribute, ~f=parseAttribute)\n\n/*\n * standalone-attribute ::=\n *  | @@ atribute-id\n *  | @@ attribute-id ( structure-item )\n */\nand parseStandaloneAttribute = p => {\n  let startPos = p.startPos\n  Parser.expect(AtAt, p)\n  let attrId = parseAttributeId(~startPos, p)\n  let payload = parsePayload(p)\n  (attrId, payload)\n}\n\n/* extension\t::=\t% attr-id  attr-payload\n *              | %% attr-id(\n *  expr\t::=\t ...\n *    ∣\t extension\n *\n *  typexpr\t::=\t ...\n *    ∣\t extension\n *\n *  pattern\t::=\t ...\n *    ∣\t extension\n *\n *  module-expr\t::=\t ...\n *    ∣\t extension\n *\n *  module-type\t::=\t ...\n *    ∣\t extension\n *\n *  class-expr\t::=\t ...\n *    ∣\t extension\n *\n *  class-type\t::=\t ...\n *    ∣\t extension\n *\n *\n * item extension nodes usable in structures and signature\n *\n * item-extension ::= %% attr-id\n *                  | %% attr-id(structure-item)\n *\n *  attr-payload ::= structure-item\n *\n *  ~moduleLanguage represents whether we're on the module level or not\n */\nand parseExtension = (~moduleLanguage=false, p) => {\n  let startPos = p.Parser.startPos\n  if moduleLanguage {\n    Parser.expect(PercentPercent, p)\n  } else {\n    Parser.expect(Percent, p)\n  }\n  let attrId = parseAttributeId(~startPos, p)\n  let payload = parsePayload(p)\n  (attrId, payload)\n}\n\n/* module signature on the file level */\nlet parseSpecification = (p): Parsetree.signature =>\n  parseRegion(p, ~grammar=Grammar.Specification, ~f=parseSignatureItemRegion)\n\n/* module structure on the file level */\nlet parseImplementation = (p): Parsetree.structure =>\n  parseRegion(p, ~grammar=Grammar.Implementation, ~f=parseStructureItemRegion)\n\n\nlet _ = parseImplementation"
  },
  {
    "path": "analysis/examples/larger-project/src/res_diagnostics.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as List from \"rescript/lib/es6/list.js\";\nimport * as Curry from \"rescript/lib/es6/curry.js\";\nimport * as Format from \"./format.js\";\nimport * as $$String from \"rescript/lib/es6/string.js\";\nimport * as Res_token from \"./res_token.js\";\nimport * as Res_grammar from \"./res_grammar.js\";\nimport * as Res_diagnostics_printing_utils from \"./res_diagnostics_printing_utils.js\";\n\nfunction getStartPos(t) {\n  return t.startPos;\n}\n\nfunction getEndPos(t) {\n  return t.endPos;\n}\n\nfunction defaultUnexpected(token) {\n  return \"I'm not sure what to parse here when looking at \\\"\" + (Res_token.toString(token) + \"\\\".\");\n}\n\nfunction reservedKeyword(token) {\n  var tokenTxt = Res_token.toString(token);\n  return \"`\" + (tokenTxt + (\"` is a reserved keyword. Keywords need to be escaped: \\\\\\\"\" + (tokenTxt + \"\\\"\")));\n}\n\nfunction explain(t) {\n  var currentToken = t.category;\n  if (typeof currentToken === \"number\") {\n    switch (currentToken) {\n      case /* UnclosedString */0 :\n          return \"This string is missing a double quote at the end\";\n      case /* UnclosedTemplate */1 :\n          return \"Did you forget to close this template expression with a backtick?\";\n      case /* UnclosedComment */2 :\n          return \"This comment seems to be missing a closing `*/`\";\n      \n    }\n  } else {\n    switch (currentToken.TAG | 0) {\n      case /* Unexpected */0 :\n          var breadcrumbs = currentToken.context;\n          var t$1 = currentToken.token;\n          var name = Res_token.toString(t$1);\n          if (breadcrumbs) {\n            var match = breadcrumbs.hd[0];\n            if (typeof match === \"number\") {\n              if (match >= 32) {\n                if (match !== 52) {\n                  if (match === 55) {\n                    var breadcrumbs$1 = breadcrumbs.tl;\n                    var exit = 0;\n                    if (typeof t$1 === \"number\") {\n                      if (t$1 !== 14) {\n                        if (t$1 !== 53) {\n                          if (t$1 !== 57 || !breadcrumbs$1) {\n                            exit = 2;\n                          } else {\n                            if (breadcrumbs$1.hd[0] === 23) {\n                              return \"I was expecting a pattern to match on before the `=>`\";\n                            }\n                            exit = 2;\n                          }\n                        } else if (breadcrumbs$1) {\n                          if (breadcrumbs$1.hd[0] === 16) {\n                            return \"A for-loop has the following form: `for i in 0 to 10`. Did you forget to supply a name before `in`?\";\n                          }\n                          exit = 2;\n                        } else {\n                          exit = 2;\n                        }\n                      } else if (breadcrumbs$1) {\n                        if (breadcrumbs$1.hd[0] === 24) {\n                          return \"I was expecting a name for this let-binding. Example: `let message = \\\"hello\\\"`\";\n                        }\n                        exit = 2;\n                      } else {\n                        exit = 2;\n                      }\n                    } else {\n                      exit = 2;\n                    }\n                    if (exit === 2) {\n                      if (Res_token.isKeyword(t$1)) {\n                        return reservedKeyword(t$1);\n                      } else {\n                        return defaultUnexpected(t$1);\n                      }\n                    }\n                    \n                  }\n                  \n                } else {\n                  var breadcrumbs$2 = breadcrumbs.tl;\n                  var exit$1 = 0;\n                  if (breadcrumbs$2) {\n                    var match$1 = breadcrumbs$2.hd[0];\n                    if (typeof match$1 === \"number\" && (match$1 === 38 || match$1 === 37)) {\n                      if (typeof t$1 === \"number\") {\n                        switch (t$1) {\n                          case /* Rbrace */23 :\n                          case /* Comma */25 :\n                          case /* Eof */26 :\n                          case /* At */75 :\n                              return \"I'm missing a type here\";\n                          default:\n                            exit$1 = 2;\n                        }\n                      } else {\n                        if (t$1.TAG === /* String */3) {\n                          return \"I'm missing a type here\";\n                        }\n                        exit$1 = 2;\n                      }\n                    } else {\n                      exit$1 = 2;\n                    }\n                  } else {\n                    exit$1 = 2;\n                  }\n                  if (exit$1 === 2) {\n                    if (Res_grammar.isStructureItemStart(t$1) || t$1 === /* Eof */26) {\n                      return \"Missing a type here\";\n                    } else {\n                      return defaultUnexpected(t$1);\n                    }\n                  }\n                  \n                }\n              } else if (match !== 7) {\n                if (match >= 31) {\n                  if (typeof t$1 === \"number\") {\n                    return \"I'm not sure what to parse here when looking at \\\"\" + (name + \"\\\".\");\n                  } else if (t$1.TAG === /* Lident */4) {\n                    return \"Did you mean '\" + (t$1._0 + \"? A Type parameter starts with a quote.\");\n                  } else {\n                    return \"I'm not sure what to parse here when looking at \\\"\" + (name + \"\\\".\");\n                  }\n                }\n                \n              } else {\n                var breadcrumbs$3 = breadcrumbs.tl;\n                var exit$2 = 0;\n                if (breadcrumbs$3) {\n                  var match$2 = breadcrumbs$3.hd[0];\n                  var exit$3 = 0;\n                  if (typeof match$2 !== \"number\") {\n                    return \"Did you forget to write an expression here?\";\n                  }\n                  switch (match$2) {\n                    case /* ExprUnary */8 :\n                        return \"Did you forget to write an expression here?\";\n                    case /* ExprSetField */9 :\n                        return \"It seems that this record field mutation misses an expression\";\n                    case /* ExprBlock */10 :\n                        if (typeof t$1 === \"number\") {\n                          if (t$1 === 17) {\n                            return \"Looks like there might be an expression missing here\";\n                          }\n                          if (t$1 === 23) {\n                            return \"It seems that this expression block is empty\";\n                          }\n                          exit$3 = 3;\n                        } else {\n                          exit$3 = 3;\n                        }\n                        break;\n                    case /* ExprArrayMutation */14 :\n                        return \"Seems that an expression is missing, with what do I mutate the array?\";\n                    case /* ExprCall */11 :\n                    case /* ExprList */12 :\n                    case /* ExprArrayAccess */13 :\n                    case /* ExprIf */15 :\n                    case /* ExprFor */16 :\n                    case /* IfCondition */17 :\n                    case /* IfBranch */18 :\n                    case /* ElseBranch */19 :\n                    case /* TypeExpression */20 :\n                    case /* External */21 :\n                    case /* PatternMatching */22 :\n                    case /* PatternMatchCase */23 :\n                        exit$3 = 3;\n                        break;\n                    case /* LetBinding */24 :\n                        return \"This let-binding misses an expression\";\n                    default:\n                      exit$3 = 3;\n                  }\n                  if (exit$3 === 3) {\n                    if (typeof t$1 === \"number\") {\n                      switch (t$1) {\n                        case /* Lbrace */22 :\n                        case /* Colon */24 :\n                        case /* Comma */25 :\n                            exit$2 = 2;\n                            break;\n                        case /* Rbracket */21 :\n                        case /* Rbrace */23 :\n                        case /* Eof */26 :\n                            return \"Missing expression\";\n                        default:\n                          exit$2 = 2;\n                      }\n                    } else {\n                      exit$2 = 2;\n                    }\n                  }\n                  \n                } else {\n                  exit$2 = 2;\n                }\n                if (exit$2 === 2) {\n                  return \"I'm not sure what to parse here when looking at \\\"\" + (name + \"\\\".\");\n                }\n                \n              }\n            }\n            \n          }\n          if (Res_token.isKeyword(t$1)) {\n            return \"`\" + (name + (\"` is a reserved keyword. Keywords need to be escaped: \\\\\\\"\" + (Res_token.toString(t$1) + \"\\\"\")));\n          } else {\n            return \"I'm not sure what to parse here when looking at \\\"\" + (name + \"\\\".\");\n          }\n      case /* Expected */1 :\n          var context = currentToken.context;\n          var hint = context !== undefined ? \" It signals the start of \" + Res_grammar.toString(context) : \"\";\n          return \"Did you forget a `\" + (Res_token.toString(currentToken.token) + (\"` here?\" + hint));\n      case /* Message */2 :\n          return currentToken._0;\n      case /* Uident */3 :\n          var currentToken$1 = currentToken._0;\n          if (typeof currentToken$1 !== \"number\" && currentToken$1.TAG === /* Lident */4) {\n            var lident = currentToken$1._0;\n            var guess = $$String.capitalize_ascii(lident);\n            return \"Did you mean `\" + (guess + (\"` instead of `\" + (lident + \"`?\")));\n          }\n          if (!Res_token.isKeyword(currentToken$1)) {\n            return \"At this point, I'm looking for an uppercased name like `Belt` or `Array`\";\n          }\n          var token = Res_token.toString(currentToken$1);\n          return \"`\" + (token + \"` is a reserved keyword.\");\n          break;\n      case /* Lident */4 :\n          var currentToken$2 = currentToken._0;\n          if (typeof currentToken$2 !== \"number\" && currentToken$2.TAG === /* Uident */5) {\n            var uident = currentToken$2._0;\n            var guess$1 = $$String.uncapitalize_ascii(uident);\n            return \"Did you mean `\" + (guess$1 + (\"` instead of `\" + (uident + \"`?\")));\n          }\n          if (!Res_token.isKeyword(currentToken$2)) {\n            if (currentToken$2 === 12) {\n              return \"`_` isn't a valid name.\";\n            } else {\n              return \"I'm expecting a lowercase name like `user or `age`\";\n            }\n          }\n          var token$1 = Res_token.toString(currentToken$2);\n          return \"`\" + (token$1 + (\"` is a reserved keyword. Keywords need to be escaped: \\\\\\\"\" + (token$1 + \"\\\"\")));\n          break;\n      case /* UnknownUchar */5 :\n          if (currentToken._0 !== 94) {\n            return \"Not sure what to do with this character.\";\n          } else {\n            return \"Not sure what to do with this character.\\n  If you're trying to dereference a mutable value, use `myValue.contents` instead.\\n  To concatenate strings, use `\\\"a\\\" ++ \\\"b\\\"` instead.\";\n          }\n      \n    }\n  }\n}\n\nfunction make(startPos, endPos, category) {\n  return {\n          startPos: startPos,\n          endPos: endPos,\n          category: category\n        };\n}\n\nfunction printReport(diagnostics, src) {\n  var print = function (_diagnostics, src) {\n    while(true) {\n      var diagnostics = _diagnostics;\n      if (!diagnostics) {\n        return ;\n      }\n      var rest = diagnostics.tl;\n      var d = diagnostics.hd;\n      Res_diagnostics_printing_utils.Super_location.super_error_reporter(Format.err_formatter, src, {\n            loc: {\n              loc_start: d.startPos,\n              loc_end: d.endPos,\n              loc_ghost: false\n            },\n            msg: explain(d),\n            sub: /* [] */0,\n            if_highlight: \"\"\n          });\n      if (rest) {\n        Curry._1(Format.fprintf(Format.err_formatter), \"@.\");\n      }\n      _diagnostics = rest;\n      continue ;\n    };\n  };\n  Curry._1(Format.fprintf(Format.err_formatter), \"@[<v>\");\n  print(List.rev(diagnostics), src);\n  return Curry._1(Format.fprintf(Format.err_formatter), \"@]@.\");\n}\n\nfunction unexpected(token, context) {\n  return {\n          TAG: /* Unexpected */0,\n          token: token,\n          context: context\n        };\n}\n\nfunction expected(grammar, pos, token) {\n  return {\n          TAG: /* Expected */1,\n          context: grammar,\n          pos: pos,\n          token: token\n        };\n}\n\nfunction uident(currentToken) {\n  return {\n          TAG: /* Uident */3,\n          _0: currentToken\n        };\n}\n\nfunction lident(currentToken) {\n  return {\n          TAG: /* Lident */4,\n          _0: currentToken\n        };\n}\n\nfunction unknownUchar(code) {\n  return {\n          TAG: /* UnknownUchar */5,\n          _0: code\n        };\n}\n\nfunction message(txt) {\n  return {\n          TAG: /* Message */2,\n          _0: txt\n        };\n}\n\nvar Grammar;\n\nvar Token;\n\nvar unclosedString = /* UnclosedString */0;\n\nvar unclosedComment = /* UnclosedComment */2;\n\nvar unclosedTemplate = /* UnclosedTemplate */1;\n\nexport {\n  Grammar ,\n  Token ,\n  getStartPos ,\n  getEndPos ,\n  defaultUnexpected ,\n  reservedKeyword ,\n  explain ,\n  make ,\n  printReport ,\n  unexpected ,\n  expected ,\n  uident ,\n  lident ,\n  unclosedString ,\n  unclosedComment ,\n  unclosedTemplate ,\n  unknownUchar ,\n  message ,\n  \n}\n/* Format Not a pure module */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/res_diagnostics.res",
    "content": "module Grammar = Res_grammar\nmodule Token = Res_token\n\ntype category =\n  | Unexpected({token: Token.t, context: list<(Grammar.t, Lexing.position)>})\n  | Expected({\n      context: option<Grammar.t>,\n      pos: Lexing.position /* prev token end */,\n      token: Token.t,\n    })\n  | Message(string)\n  | Uident(Token.t)\n  | Lident(Token.t)\n  | UnclosedString\n  | UnclosedTemplate\n  | UnclosedComment\n  | UnknownUchar(Char.t)\n\ntype t = {\n  startPos: Lexing.position,\n  endPos: Lexing.position,\n  category: category,\n}\n\ntype report = list<t>\n\nlet getStartPos = t => t.startPos\nlet getEndPos = t => t.endPos\n\nlet defaultUnexpected = token =>\n  \"I'm not sure what to parse here when looking at \\\"\" ++ (Token.toString(token) ++ \"\\\".\")\n\nlet reservedKeyword = token => {\n  let tokenTxt = Token.toString(token)\n  \"`\" ++\n  (tokenTxt ++\n  (\"` is a reserved keyword. Keywords need to be escaped: \\\\\\\"\" ++ (tokenTxt ++ \"\\\"\")))\n}\n\nlet explain = t =>\n  switch t.category {\n  | Uident(currentToken) =>\n    switch currentToken {\n    | Lident(lident) =>\n      let guess = String.capitalize_ascii(lident)\n      \"Did you mean `\" ++ (guess ++ (\"` instead of `\" ++ (lident ++ \"`?\")))\n    | t if Token.isKeyword(t) =>\n      let token = Token.toString(t)\n      \"`\" ++ (token ++ \"` is a reserved keyword.\")\n    | _ => \"At this point, I'm looking for an uppercased name like `Belt` or `Array`\"\n    }\n  | Lident(currentToken) =>\n    switch currentToken {\n    | Uident(uident) =>\n      let guess = String.uncapitalize_ascii(uident)\n      \"Did you mean `\" ++ (guess ++ (\"` instead of `\" ++ (uident ++ \"`?\")))\n    | t if Token.isKeyword(t) =>\n      let token = Token.toString(t)\n      \"`\" ++\n      (token ++\n      (\"` is a reserved keyword. Keywords need to be escaped: \\\\\\\"\" ++ (token ++ \"\\\"\")))\n    | Underscore => \"`_` isn't a valid name.\"\n    | _ => \"I'm expecting a lowercase name like `user or `age`\"\n    }\n  | Message(txt) => txt\n  | UnclosedString => \"This string is missing a double quote at the end\"\n  | UnclosedTemplate => \"Did you forget to close this template expression with a backtick?\"\n  | UnclosedComment => \"This comment seems to be missing a closing `*/`\"\n  | UnknownUchar(uchar) =>\n    switch uchar {\n    | '^' =>\n      \"Not sure what to do with this character.\\n\" ++\n      (\"  If you're trying to dereference a mutable value, use `myValue.contents` instead.\\n\" ++\n      \"  To concatenate strings, use `\\\"a\\\" ++ \\\"b\\\"` instead.\")\n    | _ => \"Not sure what to do with this character.\"\n    }\n  | Expected({context, token: t}) =>\n    let hint = switch context {\n    | Some(grammar) => \" It signals the start of \" ++ Grammar.toString(grammar)\n    | None => \"\"\n    }\n\n    \"Did you forget a `\" ++ (Token.toString(t) ++ (\"` here?\" ++ hint))\n  | Unexpected({token: t, context: breadcrumbs}) =>\n    let name = Token.toString(t)\n    switch breadcrumbs {\n    | list{(AtomicTypExpr, _), ...breadcrumbs} =>\n      switch (breadcrumbs, t) {\n      | (\n          list{(StringFieldDeclarations | FieldDeclarations, _), ..._},\n          String(_) | At | Rbrace | Comma | Eof,\n        ) => \"I'm missing a type here\"\n      | (_, t) if Grammar.isStructureItemStart(t) || t == Eof => \"Missing a type here\"\n      | _ => defaultUnexpected(t)\n      }\n    | list{(ExprOperand, _), ...breadcrumbs} =>\n      switch (breadcrumbs, t) {\n      | (list{(ExprBlock, _), ..._}, Rbrace) => \"It seems that this expression block is empty\"\n      | (list{(ExprBlock, _), ..._}, Bar) => /* Pattern matching */\n        \"Looks like there might be an expression missing here\"\n      | (\n          list{(ExprSetField, _), ..._},\n          _,\n        ) => \"It seems that this record field mutation misses an expression\"\n      | (\n          list{(ExprArrayMutation, _), ..._},\n          _,\n        ) => \"Seems that an expression is missing, with what do I mutate the array?\"\n      | (\n          list{(ExprBinaryAfterOp(_) | ExprUnary, _), ..._},\n          _,\n        ) => \"Did you forget to write an expression here?\"\n      | (list{(Grammar.LetBinding, _), ..._}, _) => \"This let-binding misses an expression\"\n      | (list{_, ..._}, Rbracket | Rbrace | Eof) => \"Missing expression\"\n      | _ => \"I'm not sure what to parse here when looking at \\\"\" ++ (name ++ \"\\\".\")\n      }\n    | list{(TypeParam, _), ..._} =>\n      switch t {\n      | Lident(ident) => \"Did you mean '\" ++ (ident ++ \"? A Type parameter starts with a quote.\")\n      | _ => \"I'm not sure what to parse here when looking at \\\"\" ++ (name ++ \"\\\".\")\n      }\n    | list{(Pattern, _), ...breadcrumbs} =>\n      switch (t, breadcrumbs) {\n      | (\n          Equal,\n          list{(LetBinding, _), ..._},\n        ) => \"I was expecting a name for this let-binding. Example: `let message = \\\"hello\\\"`\"\n      | (\n          In,\n          list{(ExprFor, _), ..._},\n        ) => \"A for-loop has the following form: `for i in 0 to 10`. Did you forget to supply a name before `in`?\"\n      | (\n          EqualGreater,\n          list{(PatternMatchCase, _), ..._},\n        ) => \"I was expecting a pattern to match on before the `=>`\"\n      | (token, _) if Token.isKeyword(t) => reservedKeyword(token)\n      | (token, _) => defaultUnexpected(token)\n      }\n    | _ =>\n      /* TODO: match on circumstance to verify Lident needed ? */\n      if Token.isKeyword(t) {\n        \"`\" ++\n        (name ++\n        (\"` is a reserved keyword. Keywords need to be escaped: \\\\\\\"\" ++\n        (Token.toString(t) ++\n        \"\\\"\")))\n      } else {\n        \"I'm not sure what to parse here when looking at \\\"\" ++ (name ++ \"\\\".\")\n      }\n    }\n  }\n\nlet make = (~startPos, ~endPos, category) => {\n  startPos: startPos,\n  endPos: endPos,\n  category: category,\n}\n\nlet printReport = (diagnostics, src) => {\n  let rec print = (diagnostics, src) =>\n    switch diagnostics {\n    | list{} => ()\n    | list{d, ...rest} =>\n      Res_diagnostics_printing_utils.Super_location.super_error_reporter(\n        Format.err_formatter,\n        src,\n        {\n          open Location\n          {\n            loc: {loc_start: d.startPos, loc_end: d.endPos, loc_ghost: false},\n            msg: explain(d),\n            sub: list{},\n            if_highlight: \"\",\n          }\n        },\n      )\n      switch rest {\n      | list{} => ()\n      | _ => Format.fprintf(Format.err_formatter, \"@.\")\n      }\n      print(rest, src)\n    }\n\n  Format.fprintf(Format.err_formatter, \"@[<v>\")\n  print(List.rev(diagnostics), src)\n  Format.fprintf(Format.err_formatter, \"@]@.\")\n}\n\nlet unexpected = (token, context) => Unexpected({token: token, context: context})\n\nlet expected = (~grammar=?, pos, token) => Expected({context: grammar, pos: pos, token: token})\n\nlet uident = currentToken => Uident(currentToken)\nlet lident = currentToken => Lident(currentToken)\nlet unclosedString = UnclosedString\nlet unclosedComment = UnclosedComment\nlet unclosedTemplate = UnclosedTemplate\nlet unknownUchar = code => UnknownUchar(code)\nlet message = txt => Message(txt)\n\n"
  },
  {
    "path": "analysis/examples/larger-project/src/res_diagnostics_printing_utils.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as P from \"./P.js\";\nimport * as Caml from \"rescript/lib/es6/caml.js\";\nimport * as List from \"rescript/lib/es6/list.js\";\nimport * as Curry from \"rescript/lib/es6/curry.js\";\nimport * as $$Buffer from \"rescript/lib/es6/buffer.js\";\nimport * as Format from \"./format.js\";\nimport * as $$String from \"rescript/lib/es6/string.js\";\nimport * as Caml_sys from \"rescript/lib/es6/caml_sys.js\";\nimport * as $$Location from \"./location.js\";\nimport * as Pervasives from \"rescript/lib/es6/pervasives.js\";\nimport * as Caml_option from \"rescript/lib/es6/caml_option.js\";\nimport * as Caml_string from \"rescript/lib/es6/caml_string.js\";\nimport * as Caml_js_exceptions from \"rescript/lib/es6/caml_js_exceptions.js\";\nimport * as Caml_external_polyfill from \"rescript/lib/es6/caml_external_polyfill.js\";\n\nfunction digits_count(n) {\n  var n$1 = Pervasives.abs(n);\n  var _base = 1;\n  var _count = 0;\n  while(true) {\n    var count = _count;\n    var base = _base;\n    if (n$1 < base) {\n      return count;\n    }\n    _count = count + 1 | 0;\n    _base = Math.imul(base, 10);\n    continue ;\n  };\n}\n\nfunction seek_2_lines_before(src, pos) {\n  var original_line = pos.pos_lnum;\n  var _current_line = 1;\n  var _current_char = 0;\n  while(true) {\n    var current_char = _current_char;\n    var current_line = _current_line;\n    if ((current_line + 2 | 0) >= original_line) {\n      return [\n              current_char,\n              current_line\n            ];\n    }\n    _current_char = current_char + 1 | 0;\n    _current_line = Caml_string.get(src, current_char) === /* '\\n' */10 ? current_line + 1 | 0 : current_line;\n    continue ;\n  };\n}\n\nfunction seek_2_lines_after(src, pos) {\n  var original_line = pos.pos_lnum;\n  var _current_line = original_line;\n  var _current_char = pos.pos_cnum;\n  while(true) {\n    var current_char = _current_char;\n    var current_line = _current_line;\n    if (current_char === src.length) {\n      return [\n              current_char,\n              current_line\n            ];\n    }\n    var match = Caml_string.get(src, current_char);\n    if (match !== 10) {\n      _current_char = current_char + 1 | 0;\n      continue ;\n    }\n    if (current_line === (original_line + 2 | 0)) {\n      return [\n              current_char,\n              current_line\n            ];\n    }\n    _current_char = current_char + 1 | 0;\n    _current_line = current_line + 1 | 0;\n    continue ;\n  };\n}\n\nfunction leading_space_count(str) {\n  var _i = 0;\n  var _count = 0;\n  while(true) {\n    var count = _count;\n    var i = _i;\n    if (i === str.length) {\n      return count;\n    }\n    if (Caml_string.get(str, i) !== /* ' ' */32) {\n      return count;\n    }\n    _count = count + 1 | 0;\n    _i = i + 1 | 0;\n    continue ;\n  };\n}\n\nfunction break_long_line(max_width, line) {\n  var loop = function (_pos, _accum) {\n    while(true) {\n      var accum = _accum;\n      var pos = _pos;\n      if (pos === line.length) {\n        return accum;\n      }\n      var chunk_length = Caml.caml_int_min(max_width, line.length - pos | 0);\n      var chunk = $$String.sub(line, pos, chunk_length);\n      _accum = {\n        hd: chunk,\n        tl: accum\n      };\n      _pos = pos + chunk_length | 0;\n      continue ;\n    };\n  };\n  return List.rev(loop(0, /* [] */0));\n}\n\nfunction filter_mapi(f, l) {\n  var loop = function (f, _l, _i, _accum) {\n    while(true) {\n      var accum = _accum;\n      var i = _i;\n      var l = _l;\n      if (!l) {\n        return accum;\n      }\n      var result = Curry._2(f, i, l.hd);\n      var accum$1 = result !== undefined ? ({\n            hd: Caml_option.valFromOption(result),\n            tl: accum\n          }) : accum;\n      _accum = accum$1;\n      _i = i + 1 | 0;\n      _l = l.tl;\n      continue ;\n    };\n  };\n  return List.rev(loop(f, l, 0, /* [] */0));\n}\n\nvar dim = \"\\x11[2m\";\n\nvar err = \"\\x11[1;31m\";\n\nvar warn = \"\\x11[1;33m\";\n\nvar reset = \"\\x11[0m\";\n\nfunction should_enable_color(param) {\n  var term;\n  try {\n    term = Caml_sys.caml_sys_getenv(\"TERM\");\n  }\n  catch (raw_exn){\n    var exn = Caml_js_exceptions.internalToOCamlException(raw_exn);\n    if (exn.RE_EXN_ID === \"Not_found\") {\n      term = \"\";\n    } else {\n      throw exn;\n    }\n  }\n  if (term !== \"dumb\" && term !== \"\") {\n    return Caml_external_polyfill.resolve(\"caml_sys_isatty\")(P.stderr);\n  } else {\n    return false;\n  }\n}\n\nvar color_enabled = {\n  contents: true\n};\n\nvar first = {\n  contents: true\n};\n\nfunction setup(o) {\n  if (first.contents) {\n    first.contents = false;\n    var tmp;\n    if (o !== undefined) {\n      switch (o) {\n        case /* Auto */0 :\n            tmp = should_enable_color(undefined);\n            break;\n        case /* Always */1 :\n            tmp = true;\n            break;\n        case /* Never */2 :\n            tmp = false;\n            break;\n        \n      }\n    } else {\n      tmp = should_enable_color(undefined);\n    }\n    color_enabled.contents = tmp;\n  }\n  \n}\n\nvar Color = {\n  dim: dim,\n  err: err,\n  warn: warn,\n  reset: reset,\n  should_enable_color: should_enable_color,\n  color_enabled: color_enabled,\n  setup: setup\n};\n\nfunction print(is_warning, src, startPos, endPos) {\n  var highlight_line_start_line = startPos.pos_lnum;\n  var highlight_line_end_line = endPos.pos_lnum;\n  var match = seek_2_lines_before(src, startPos);\n  var first_shown_line = match[1];\n  var start_line_line_offset = match[0];\n  var match$1 = seek_2_lines_after(src, endPos);\n  var more_than_5_highlighted_lines = ((highlight_line_end_line - highlight_line_start_line | 0) + 1 | 0) > 5;\n  var max_line_digits_count = digits_count(match$1[1]);\n  var line_width = ((78 - max_line_digits_count | 0) - 2 | 0) - 3 | 0;\n  var lines = filter_mapi((function (i, line) {\n          var line_number = i + first_shown_line | 0;\n          if (more_than_5_highlighted_lines) {\n            if (line_number === (highlight_line_start_line + 2 | 0)) {\n              return [\n                      /* Elided */0,\n                      line\n                    ];\n            } else if (line_number > (highlight_line_start_line + 2 | 0) && line_number < (highlight_line_end_line - 1 | 0)) {\n              return ;\n            } else {\n              return [\n                      /* Number */{\n                        _0: line_number\n                      },\n                      line\n                    ];\n            }\n          } else {\n            return [\n                    /* Number */{\n                      _0: line_number\n                    },\n                    line\n                  ];\n          }\n        }), $$String.split_on_char(/* '\\n' */10, $$String.sub(src, start_line_line_offset, match$1[0] - start_line_line_offset | 0)));\n  var leading_space_to_cut = List.fold_left((function (current_max, param) {\n          var line = param[1];\n          var leading_spaces = leading_space_count(line);\n          if (line.length === leading_spaces || leading_spaces >= current_max) {\n            return current_max;\n          } else {\n            return leading_spaces;\n          }\n        }), 99999, lines);\n  var separator = leading_space_to_cut === 0 ? \"\\xe2\\x94\\x82\" : \"\\xe2\\x94\\x86\";\n  var stripped_lines = List.map((function (param) {\n          var line = param[1];\n          var gutter = param[0];\n          var new_content = line.length <= leading_space_to_cut ? ({\n                hd: {\n                  s: \"\",\n                  start: 0,\n                  end_: 0\n                },\n                tl: /* [] */0\n              }) : List.mapi((function (i, line) {\n                    if (!gutter) {\n                      return {\n                              s: line,\n                              start: 0,\n                              end_: 0\n                            };\n                    }\n                    var line_number = gutter._0;\n                    var highlight_line_start_offset = startPos.pos_cnum - startPos.pos_bol | 0;\n                    var highlight_line_end_offset = endPos.pos_cnum - endPos.pos_bol | 0;\n                    var start = i === 0 && line_number === highlight_line_start_line ? highlight_line_start_offset - leading_space_to_cut | 0 : 0;\n                    var end_ = line_number < highlight_line_start_line ? 0 : (\n                        line_number === highlight_line_start_line && line_number === highlight_line_end_line ? highlight_line_end_offset - leading_space_to_cut | 0 : (\n                            line_number === highlight_line_start_line ? line.length : (\n                                line_number > highlight_line_start_line && line_number < highlight_line_end_line ? line.length : (\n                                    line_number === highlight_line_end_line ? highlight_line_end_offset - leading_space_to_cut | 0 : 0\n                                  )\n                              )\n                          )\n                      );\n                    return {\n                            s: line,\n                            start: start,\n                            end_: end_\n                          };\n                  }), break_long_line(line_width, $$String.sub(line, leading_space_to_cut, line.length - leading_space_to_cut | 0)));\n          return {\n                  gutter: gutter,\n                  content: new_content\n                };\n        }), lines);\n  var buf = $$Buffer.create(100);\n  var last_color = {\n    contents: /* NoColor */3\n  };\n  var add_ch = function (color, ch) {\n    if (!color_enabled.contents || last_color.contents === color) {\n      return $$Buffer.add_char(buf, ch);\n    }\n    var match = last_color.contents;\n    var ansi;\n    var exit = 0;\n    if (match >= 3) {\n      switch (color) {\n        case /* Dim */0 :\n            ansi = dim;\n            break;\n        case /* Err */1 :\n            ansi = err;\n            break;\n        case /* Warn */2 :\n            ansi = warn;\n            break;\n        case /* NoColor */3 :\n            exit = 1;\n            break;\n        \n      }\n    } else {\n      exit = 1;\n    }\n    if (exit === 1) {\n      switch (color) {\n        case /* Dim */0 :\n            ansi = \"\\x11[0m\\x11[2m\";\n            break;\n        case /* Err */1 :\n            ansi = \"\\x11[0m\\x11[1;31m\";\n            break;\n        case /* Warn */2 :\n            ansi = \"\\x11[0m\\x11[1;33m\";\n            break;\n        case /* NoColor */3 :\n            ansi = reset;\n            break;\n        \n      }\n    }\n    $$Buffer.add_string(buf, ansi);\n    $$Buffer.add_char(buf, ch);\n    last_color.contents = color;\n    \n  };\n  var draw_gutter = function (color, s) {\n    for(var _i = 1 ,_i_finish = (max_line_digits_count + 2 | 0) - s.length | 0; _i <= _i_finish; ++_i){\n      add_ch(/* NoColor */3, /* ' ' */32);\n    }\n    $$String.iter((function (param) {\n            return add_ch(color, param);\n          }), s);\n    add_ch(/* NoColor */3, /* ' ' */32);\n    $$String.iter((function (param) {\n            return add_ch(/* Dim */0, param);\n          }), separator);\n    return add_ch(/* NoColor */3, /* ' ' */32);\n  };\n  List.iter((function (param) {\n          var gutter = param.gutter;\n          if (gutter) {\n            var line_number = gutter._0;\n            return List.iteri((function (i, line) {\n                          var gutter_content = i === 0 ? String(line_number) : \"\";\n                          var gutter_color = i === 0 && line_number >= highlight_line_start_line && line_number <= highlight_line_end_line ? (\n                              is_warning ? /* Warn */2 : /* Err */1\n                            ) : /* NoColor */3;\n                          draw_gutter(gutter_color, gutter_content);\n                          $$String.iteri((function (ii, ch) {\n                                  var c = ii >= line.start && ii < line.end_ ? (\n                                      is_warning ? /* Warn */2 : /* Err */1\n                                    ) : /* NoColor */3;\n                                  return add_ch(c, ch);\n                                }), line.s);\n                          return add_ch(/* NoColor */3, /* '\\n' */10);\n                        }), param.content);\n          }\n          draw_gutter(/* Dim */0, \".\");\n          add_ch(/* Dim */0, /* '.' */46);\n          add_ch(/* Dim */0, /* '.' */46);\n          add_ch(/* Dim */0, /* '.' */46);\n          return add_ch(/* NoColor */3, /* '\\n' */10);\n        }), stripped_lines);\n  return $$Buffer.contents(buf);\n}\n\nvar Super_code_frame = {\n  digits_count: digits_count,\n  seek_2_lines_before: seek_2_lines_before,\n  seek_2_lines_after: seek_2_lines_after,\n  leading_space_count: leading_space_count,\n  break_long_line: break_long_line,\n  filter_mapi: filter_mapi,\n  Color: Color,\n  setup: setup,\n  print: print\n};\n\nfunction print$1(message_kind, intro, src, ppf, loc) {\n  if (message_kind === \"warning_as_error\") {\n    Curry._2(Format.fprintf(ppf), \"@[@{<error>%s@} (configured as error) @]@,\", intro);\n  } else if (message_kind === \"warning\") {\n    Curry._2(Format.fprintf(ppf), \"@[@{<info>%s@}@]@,\", intro);\n  } else {\n    Curry._2(Format.fprintf(ppf), \"@[@{<error>%s@}@]@,\", intro);\n  }\n  var match = $$Location.get_pos_info(loc.loc_start);\n  var start_char = match[2];\n  var start_line = match[1];\n  var match$1 = $$Location.get_pos_info(loc.loc_end);\n  var end_char = match$1[2];\n  var end_line = match$1[1];\n  var normalizedRange;\n  if (start_char === -1 || end_char === -1) {\n    normalizedRange = undefined;\n  } else if (start_line === end_line && start_char >= end_char) {\n    var same_char = start_char + 1 | 0;\n    normalizedRange = [\n      [\n        start_line,\n        same_char\n      ],\n      [\n        end_line,\n        same_char\n      ]\n    ];\n  } else {\n    normalizedRange = [\n      [\n        start_line,\n        start_char + 1 | 0\n      ],\n      [\n        end_line,\n        end_char\n      ]\n    ];\n  }\n  if (normalizedRange === undefined) {\n    return ;\n  }\n  try {\n    return Curry._2(Format.fprintf(ppf), \"@,%s\", print(message_kind === \"warning\", src, loc.loc_start, loc.loc_end));\n  }\n  catch (raw_exn){\n    var exn = Caml_js_exceptions.internalToOCamlException(raw_exn);\n    if (exn.RE_EXN_ID === P.Sys_error) {\n      return ;\n    }\n    throw exn;\n  }\n}\n\nfunction super_error_reporter(ppf, src, param) {\n  return Curry._4(Format.fprintf(ppf), \"@[<v>@,  %a@,  %s@,@]\", (function (param, param$1) {\n                return print$1(\"error\", \"Syntax error!\", src, param, param$1);\n              }), param.loc, param.msg);\n}\n\nvar Super_location = {\n  fprintf: Format.fprintf,\n  print_filename: $$Location.print_filename,\n  print: print$1,\n  super_error_reporter: super_error_reporter\n};\n\nexport {\n  Super_code_frame ,\n  Super_location ,\n  \n}\n/* P Not a pure module */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/res_diagnostics_printing_utils.res",
    "content": "/*\n  This file is taken from ReScript's super_code_frame.ml and super_location.ml\n  We're copying the look of ReScript's terminal error reporting.\n  See https://github.com/rescript-lang/syntax/pull/77 for the rationale.\n  A few lines have been commented out and swapped for their tweaked version.\n*/\n\n/* ===== super_code_frame.ml */\n\nopen P\n\nmodule Super_code_frame = {\n  let digits_count = n => {\n    let rec loop = (n, base, count) =>\n      if n >= base {\n        loop(n, base * 10, count + 1)\n      } else {\n        count\n      }\n\n    loop(abs(n), 1, 0)\n  }\n\n  let seek_2_lines_before = (src, pos) => {\n    open Lexing\n    let original_line = pos.pos_lnum\n    let rec loop = (current_line, current_char) =>\n      if current_line + 2 >= original_line {\n        (current_char, current_line)\n      } else {\n        loop(\n          if @doesNotRaise String.get(src, current_char) == '\\n' {\n            current_line + 1\n          } else {\n            current_line\n          },\n          current_char + 1,\n        )\n      }\n\n    loop(1, 0)\n  }\n\n  let seek_2_lines_after = (src, pos) => {\n    open Lexing\n    let original_line = pos.pos_lnum\n    let rec loop = (current_line, current_char) =>\n      if current_char == String.length(src) {\n        (current_char, current_line)\n      } else {\n        switch @doesNotRaise\n        String.get(src, current_char) {\n        | '\\n' if current_line == original_line + 2 => (current_char, current_line)\n        | '\\n' => loop(current_line + 1, current_char + 1)\n        | _ => loop(current_line, current_char + 1)\n        }\n      }\n\n    loop(original_line, pos.pos_cnum)\n  }\n\n  let leading_space_count = str => {\n    let rec loop = (i, count) =>\n      if i == String.length(str) {\n        count\n      } else if @doesNotRaise String.get(str, i) !== ' ' {\n        count\n      } else {\n        loop(i + 1, count + 1)\n      }\n\n    loop(0, 0)\n  }\n\n  let break_long_line = (max_width, line) => {\n    let rec loop = (pos, accum) =>\n      if pos == String.length(line) {\n        accum\n      } else {\n        let chunk_length = min(max_width, String.length(line) - pos)\n        let chunk = (@doesNotRaise String.sub)(line, pos, chunk_length)\n        loop(pos + chunk_length, list{chunk, ...accum})\n      }\n\n    loop(0, list{}) |> List.rev\n  }\n\n  let filter_mapi = (f, l) => {\n    let rec loop = (f, l, i, accum) =>\n      switch l {\n      | list{} => accum\n      | list{head, ...rest} =>\n        let accum = switch f(i, head) {\n        | None => accum\n        | Some(result) => list{result, ...accum}\n        }\n\n        loop(f, rest, i + 1, accum)\n      }\n\n    loop(f, l, 0, list{}) |> List.rev\n  }\n\n  /* Spiritual equivalent of\n  https://github.com/ocaml/ocaml/blob/414bdec9ae387129b8102cc6bf3c0b6ae173eeb9/utils/misc.ml#L601\n*/\n  module Color = {\n    type color =\n      | Dim\n      /* | Filename */\n      | Err\n      | Warn\n      | NoColor\n\n    let dim = \"\\x1b[2m\"\n    /* let filename = \"\\x1b[46m\" */\n    let err = \"\\x1b[1;31m\"\n    let warn = \"\\x1b[1;33m\"\n    let reset = \"\\x1b[0m\"\n\n    external isatty: out_channel => bool = \"caml_sys_isatty\"\n    /* reasonable heuristic on whether colors should be enabled */\n    let should_enable_color = () => {\n      let term = try Sys.getenv(\"TERM\") catch {\n      | Not_found => \"\"\n      }\n      term != \"dumb\" && (term != \"\" && isatty(stderr))\n    }\n\n    let color_enabled = ref(true)\n\n    let setup = {\n      let first = ref(true) /* initialize only once */\n      o => {\n        if first.contents {\n          first := false\n          color_enabled :=\n            switch o {\n            | Some(Misc.Color.Always) => true\n            | Some(Auto) => should_enable_color()\n            | Some(Never) => false\n            | None => should_enable_color()\n            }\n        }\n        ()\n      }\n    }\n  }\n\n  let setup = Color.setup\n\n  type gutter = Number(int) | Elided\n  type highlighted_string = {s: string, start: int, end_: int}\n  type line = {\n    gutter: gutter,\n    content: list<highlighted_string>,\n  }\n  /*\n  Features:\n  - display a line gutter\n  - break long line into multiple for terminal display\n  - peek 2 lines before & after for context\n  - center snippet when it's heavily indented\n  - ellide intermediate lines when the reported range is huge\n*/\n  let print = (~is_warning, ~src, ~startPos, ~endPos) => {\n    open Lexing\n\n    let indent = 2\n    let highlight_line_start_line = startPos.pos_lnum\n    let highlight_line_end_line = endPos.pos_lnum\n    let (start_line_line_offset, first_shown_line) = seek_2_lines_before(src, startPos)\n    let (end_line_line_end_offset, last_shown_line) = seek_2_lines_after(src, endPos)\n\n    let more_than_5_highlighted_lines = highlight_line_end_line - highlight_line_start_line + 1 > 5\n\n    let max_line_digits_count = digits_count(last_shown_line)\n    /* TODO: change this back to a fixed 100? */\n    /* 3 for separator + the 2 spaces around it */\n    let line_width = 78 - max_line_digits_count - indent - 3\n    let lines =\n      (@doesNotRaise String.sub)(\n        src,\n        start_line_line_offset,\n        end_line_line_end_offset - start_line_line_offset,\n      )\n      |> String.split_on_char('\\n')\n      |> filter_mapi((i, line) => {\n        let line_number = i + first_shown_line\n        if more_than_5_highlighted_lines {\n          if line_number == highlight_line_start_line + 2 {\n            Some(Elided, line)\n          } else if (\n            line_number > highlight_line_start_line + 2 && line_number < highlight_line_end_line - 1\n          ) {\n            None\n          } else {\n            Some(Number(line_number), line)\n          }\n        } else {\n          Some(Number(line_number), line)\n        }\n      })\n\n    let leading_space_to_cut = lines |> List.fold_left((current_max, (_, line)) => {\n      let leading_spaces = leading_space_count(line)\n      if String.length(line) == leading_spaces {\n        /* the line's nothing but spaces. Doesn't count */\n        current_max\n      } else {\n        min(leading_spaces, current_max)\n      }\n    }, 99999)\n\n    let separator = if leading_space_to_cut == 0 {\n      \"│\"\n    } else {\n      \"┆\"\n    }\n    let stripped_lines = lines |> List.map(((gutter, line)) => {\n      let new_content = if String.length(line) <= leading_space_to_cut {\n        list{{s: \"\", start: 0, end_: 0}}\n      } else {\n        (@doesNotRaise String.sub)(\n          line,\n          leading_space_to_cut,\n          String.length(line) - leading_space_to_cut,\n        )\n        |> break_long_line(line_width)\n        |> List.mapi((i, line) =>\n          switch gutter {\n          | Elided => {s: line, start: 0, end_: 0}\n          | Number(line_number) =>\n            let highlight_line_start_offset = startPos.pos_cnum - startPos.pos_bol\n            let highlight_line_end_offset = endPos.pos_cnum - endPos.pos_bol\n            let start = if i == 0 && line_number == highlight_line_start_line {\n              highlight_line_start_offset - leading_space_to_cut\n            } else {\n              0\n            }\n\n            let end_ = if line_number < highlight_line_start_line {\n              0\n            } else if (\n              line_number == highlight_line_start_line && line_number == highlight_line_end_line\n            ) {\n              highlight_line_end_offset - leading_space_to_cut\n            } else if line_number == highlight_line_start_line {\n              String.length(line)\n            } else if (\n              line_number > highlight_line_start_line && line_number < highlight_line_end_line\n            ) {\n              String.length(line)\n            } else if line_number == highlight_line_end_line {\n              highlight_line_end_offset - leading_space_to_cut\n            } else {\n              0\n            }\n\n            {s: line, start: start, end_: end_}\n          }\n        )\n      }\n\n      {gutter: gutter, content: new_content}\n    })\n\n    let buf = Buffer.create(100)\n    open Color\n    let add_ch = {\n      let last_color = ref(NoColor)\n      (color, ch) =>\n        if !Color.color_enabled.contents || last_color.contents == color {\n          Buffer.add_char(buf, ch)\n        } else {\n          let ansi = switch (last_color.contents, color) {\n          | (NoColor, Dim) => dim\n          /* | NoColor, Filename -> filename */\n          | (NoColor, Err) => err\n          | (NoColor, Warn) => warn\n          | (_, NoColor) => reset\n          | (_, Dim) => reset ++ dim\n          /* | _, Filename -> reset ^ filename */\n          | (_, Err) => reset ++ err\n          | (_, Warn) => reset ++ warn\n          }\n\n          Buffer.add_string(buf, ansi)\n          Buffer.add_char(buf, ch)\n          last_color := color\n        }\n    }\n\n    let draw_gutter = (color, s) => {\n      for _i in 1 to max_line_digits_count + indent - String.length(s) {\n        add_ch(NoColor, ' ')\n      }\n      s |> String.iter(add_ch(color))\n      add_ch(NoColor, ' ')\n      separator |> String.iter(add_ch(Dim))\n      add_ch(NoColor, ' ')\n    }\n\n    stripped_lines |> List.iter(({gutter, content}) =>\n      switch gutter {\n      | Elided =>\n        draw_gutter(Dim, \".\")\n        add_ch(Dim, '.')\n        add_ch(Dim, '.')\n        add_ch(Dim, '.')\n        add_ch(NoColor, '\\n')\n      | Number(line_number) =>\n        content |> List.iteri((i, line) => {\n          let gutter_content = if i == 0 {\n            string_of_int(line_number)\n          } else {\n            \"\"\n          }\n          let gutter_color = if (\n            i == 0 &&\n              (line_number >= highlight_line_start_line &&\n              line_number <= highlight_line_end_line)\n          ) {\n            if is_warning {\n              Warn\n            } else {\n              Err\n            }\n          } else {\n            NoColor\n          }\n\n          draw_gutter(gutter_color, gutter_content)\n\n          line.s |> String.iteri((ii, ch) => {\n            let c = if ii >= line.start && ii < line.end_ {\n              if is_warning {\n                Warn\n              } else {\n                Err\n              }\n            } else {\n              NoColor\n            }\n            add_ch(c, ch)\n          })\n          add_ch(NoColor, '\\n')\n        })\n      }\n    )\n    Buffer.contents(buf)\n  }\n}\n\n/* ===== super_location.ml */\nmodule Super_location = {\n  let fprintf = Format.fprintf\n\n  let print_filename = Location.print_filename\n\n  /* let print ~message_kind intro ppf (loc : Location.t) = */\n  let print = (~message_kind, intro, src, ppf, loc: Location.t) => {\n    switch message_kind {\n    | #warning => fprintf(ppf, \"@[@{<info>%s@}@]@,\", intro)\n    | #warning_as_error => fprintf(ppf, \"@[@{<error>%s@} (configured as error) @]@,\", intro)\n    | #error => fprintf(ppf, \"@[@{<error>%s@}@]@,\", intro)\n    }\n    /* ocaml's reported line/col numbering is horrible and super error-prone\n     when being handled programmatically (or humanly for that matter. If you're\n     an ocaml contributor reading this: who the heck reads the character count\n     starting from the first erroring character?) */\n    /* let (file, start_line, start_char) = Location.get_pos_info loc.loc_start in */\n    let (_file, start_line, start_char) = Location.get_pos_info(loc.loc_start)\n    let (_, end_line, end_char) = Location.get_pos_info(loc.loc_end)\n    /* line is 1-indexed, column is 0-indexed. We convert all of them to 1-indexed to avoid confusion */\n    /* start_char is inclusive, end_char is exclusive */\n    let normalizedRange = /* TODO: lots of the handlings here aren't needed anymore because the new\n      rescript syntax has much stronger invariants regarding positions, e.g.\n      no -1 */\n    if start_char === -1 || end_char === -1 {\n      /* happens sometimes. Syntax error for example */\n      None\n    } else if start_line == end_line && start_char >= end_char {\n      /* in some errors, starting char and ending char can be the same. But\n         since ending char was supposed to be exclusive, here it might end up\n         smaller than the starting char if we naively did start_char + 1 to\n         just the starting char and forget ending char */\n      let same_char = start_char + 1\n      Some((start_line, same_char), (end_line, same_char))\n    } else {\n      /* again: end_char is exclusive, so +1-1=0 */\n      Some((start_line, start_char + 1), (end_line, end_char))\n    }\n\n    switch normalizedRange {\n    | None => ()\n    | Some(_) =>\n      try /* let src = Ext_io.load_file file in */\n      /* we're putting the line break `@,` here rather than above, because this\n           branch might not be reached (aka no inline file content display) so\n           we don't wanna end up with two line breaks in the the consequent */\n      fprintf(\n        ppf,\n        \"@,%s\",\n        Super_code_frame.print(\n          ~is_warning=message_kind == #warning,\n          ~src,\n          ~startPos=loc.loc_start,\n          ~endPos=loc.loc_end,\n        ),\n      ) catch {\n      /* this might happen if the file is e.g. \"\", \"_none_\" or any of the fake file name placeholders.\n       we've already printed the location above, so nothing more to do here. */\n      | Sys_error(_) => ()\n      }\n    }\n  }\n\n  /* taken from https://github.com/rescript-lang/ocaml/blob/d4144647d1bf9bc7dc3aadc24c25a7efa3a67915/parsing/location.ml#L380 */\n  /* This is the error report entry point. We'll replace the default reporter with this one. */\n  /* let rec super_error_reporter ppf ({loc; msg; sub} : Location.error) = */\n  let super_error_reporter = (ppf, src, {loc, msg}: Location.error) =>\n    /* open a vertical box. Everything in our message is indented 2 spaces */\n    /* Format.fprintf ppf \"@[<v>@,  %a@,  %s@,@]\" (print ~message_kind:`error \"We've found a bug for you!\") src loc msg; */\n    Format.fprintf(\n      ppf,\n      \"@[<v>@,  %a@,  %s@,@]\",\n      print(~message_kind=#error, \"Syntax error!\", src),\n      loc,\n      msg,\n    )\n  /* List.iter (Format.fprintf ppf \"@,@[%a@]\" super_error_reporter) sub */\n  /* no need to flush here; location's report_exception (which uses this ultimately) flushes */\n}\n\n"
  },
  {
    "path": "analysis/examples/larger-project/src/res_doc.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as List from \"rescript/lib/es6/list.js\";\nimport * as $$String from \"rescript/lib/es6/string.js\";\nimport * as Pervasives from \"rescript/lib/es6/pervasives.js\";\nimport * as Res_minibuffer from \"./res_minibuffer.js\";\n\nvar line = {\n  TAG: /* LineBreak */5,\n  _0: /* Classic */0\n};\n\nvar softLine = {\n  TAG: /* LineBreak */5,\n  _0: /* Soft */1\n};\n\nfunction text(s) {\n  return {\n          TAG: /* Text */0,\n          _0: s\n        };\n}\n\nfunction _concat(_acc, _l) {\n  while(true) {\n    var l = _l;\n    var acc = _acc;\n    if (!l) {\n      return acc;\n    }\n    var s1 = l.hd;\n    if (typeof s1 === \"number\") {\n      if (s1 === /* Nil */0) {\n        _l = l.tl;\n        continue ;\n      }\n      \n    } else {\n      switch (s1.TAG | 0) {\n        case /* Text */0 :\n            var match = l.tl;\n            if (match) {\n              var s2 = match.hd;\n              if (typeof s2 !== \"number\" && s2.TAG === /* Text */0) {\n                return {\n                        hd: {\n                          TAG: /* Text */0,\n                          _0: s1._0 + s2._0\n                        },\n                        tl: _concat(acc, match.tl)\n                      };\n              }\n              \n            }\n            break;\n        case /* Concat */1 :\n            _l = s1._0;\n            _acc = _concat(acc, l.tl);\n            continue ;\n        default:\n          \n      }\n    }\n    var rest = l.tl;\n    var rest1 = _concat(acc, rest);\n    if (rest1 === rest) {\n      return l;\n    } else {\n      return {\n              hd: s1,\n              tl: rest1\n            };\n    }\n  };\n}\n\nfunction concat(l) {\n  return {\n          TAG: /* Concat */1,\n          _0: _concat(/* [] */0, l)\n        };\n}\n\nfunction indent(d) {\n  return {\n          TAG: /* Indent */2,\n          _0: d\n        };\n}\n\nfunction ifBreaks(t, f) {\n  return {\n          TAG: /* IfBreaks */3,\n          yes: t,\n          no: f,\n          broken: false\n        };\n}\n\nfunction lineSuffix(d) {\n  return {\n          TAG: /* LineSuffix */4,\n          _0: d\n        };\n}\n\nfunction group(d) {\n  return {\n          TAG: /* Group */6,\n          shouldBreak: false,\n          doc: d\n        };\n}\n\nfunction breakableGroup(forceBreak, d) {\n  return {\n          TAG: /* Group */6,\n          shouldBreak: forceBreak,\n          doc: d\n        };\n}\n\nfunction customLayout(gs) {\n  return {\n          TAG: /* CustomLayout */7,\n          _0: gs\n        };\n}\n\nvar comma = {\n  TAG: /* Text */0,\n  _0: \",\"\n};\n\nvar trailingComma = {\n  TAG: /* IfBreaks */3,\n  yes: comma,\n  no: /* Nil */0,\n  broken: false\n};\n\nfunction propagateForcedBreaks(doc) {\n  var walk = function (_doc) {\n    while(true) {\n      var doc = _doc;\n      if (typeof doc === \"number\") {\n        if (doc === /* BreakParent */1) {\n          return true;\n        } else {\n          return false;\n        }\n      }\n      switch (doc.TAG | 0) {\n        case /* Concat */1 :\n            return List.fold_left((function (forceBreak, child) {\n                          var childForcesBreak = walk(child);\n                          if (forceBreak) {\n                            return true;\n                          } else {\n                            return childForcesBreak;\n                          }\n                        }), false, doc._0);\n        case /* Indent */2 :\n            _doc = doc._0;\n            continue ;\n        case /* IfBreaks */3 :\n            var trueDoc = doc.yes;\n            var falseForceBreak = walk(doc.no);\n            if (falseForceBreak) {\n              walk(trueDoc);\n              doc.broken = true;\n              return true;\n            }\n            _doc = trueDoc;\n            continue ;\n        case /* LineBreak */5 :\n            return doc._0 >= 2;\n        case /* Group */6 :\n            var forceBreak = doc.shouldBreak;\n            var childForcesBreak = walk(doc.doc);\n            var shouldBreak = forceBreak || childForcesBreak;\n            doc.shouldBreak = shouldBreak;\n            return shouldBreak;\n        case /* CustomLayout */7 :\n            walk({\n                  TAG: /* Concat */1,\n                  _0: doc._0\n                });\n            return false;\n        default:\n          return false;\n      }\n    };\n  };\n  walk(doc);\n  \n}\n\nfunction willBreak(_doc) {\n  while(true) {\n    var doc = _doc;\n    if (typeof doc === \"number\") {\n      if (doc === /* BreakParent */1) {\n        return true;\n      } else {\n        return false;\n      }\n    }\n    switch (doc.TAG | 0) {\n      case /* Concat */1 :\n          return List.exists(willBreak, doc._0);\n      case /* Indent */2 :\n          _doc = doc._0;\n          continue ;\n      case /* IfBreaks */3 :\n          if (willBreak(doc.yes)) {\n            return true;\n          }\n          _doc = doc.no;\n          continue ;\n      case /* LineBreak */5 :\n          return doc._0 >= 2;\n      case /* Group */6 :\n          var match = doc.shouldBreak;\n          if (match) {\n            return true;\n          }\n          _doc = doc.doc;\n          continue ;\n      case /* CustomLayout */7 :\n          var match$1 = doc._0;\n          if (!match$1) {\n            return false;\n          }\n          _doc = match$1.hd;\n          continue ;\n      default:\n        return false;\n    }\n  };\n}\n\nfunction join(sep, docs) {\n  var loop = function (_acc, sep, _docs) {\n    while(true) {\n      var docs = _docs;\n      var acc = _acc;\n      if (!docs) {\n        return List.rev(acc);\n      }\n      var xs = docs.tl;\n      var x = docs.hd;\n      if (!xs) {\n        return List.rev({\n                    hd: x,\n                    tl: acc\n                  });\n      }\n      _docs = xs;\n      _acc = {\n        hd: sep,\n        tl: {\n          hd: x,\n          tl: acc\n        }\n      };\n      continue ;\n    };\n  };\n  var l = loop(/* [] */0, sep, docs);\n  return {\n          TAG: /* Concat */1,\n          _0: _concat(/* [] */0, l)\n        };\n}\n\nfunction fits(w, stack) {\n  var width = {\n    contents: w\n  };\n  var result = {\n    contents: undefined\n  };\n  var calculate = function (_indent, _mode, _doc) {\n    while(true) {\n      var doc = _doc;\n      var mode = _mode;\n      var indent = _indent;\n      if (result.contents !== undefined) {\n        return ;\n      }\n      if (width.contents < 0) {\n        result.contents = false;\n        return ;\n      }\n      if (typeof doc === \"number\") {\n        return ;\n      }\n      switch (doc.TAG | 0) {\n        case /* Text */0 :\n            width.contents = width.contents - doc._0.length | 0;\n            return ;\n        case /* Concat */1 :\n            var _docs = doc._0;\n            while(true) {\n              var docs = _docs;\n              if (result.contents !== undefined) {\n                return ;\n              }\n              if (!docs) {\n                return ;\n              }\n              calculate(indent, mode, docs.hd);\n              _docs = docs.tl;\n              continue ;\n            };\n        case /* Indent */2 :\n            _doc = doc._0;\n            _indent = indent + 2 | 0;\n            continue ;\n        case /* IfBreaks */3 :\n            var match = doc.broken;\n            if (match) {\n              _doc = doc.yes;\n              continue ;\n            }\n            break;\n        case /* LineBreak */5 :\n            break;\n        case /* Group */6 :\n            var match$1 = doc.shouldBreak;\n            if (match$1) {\n              _doc = doc.doc;\n              _mode = /* Break */0;\n              continue ;\n            }\n            _doc = doc.doc;\n            continue ;\n        case /* CustomLayout */7 :\n            var match$2 = doc._0;\n            if (!match$2) {\n              return ;\n            }\n            _doc = match$2.hd;\n            continue ;\n        default:\n          return ;\n      }\n      if (mode) {\n        if (typeof doc !== \"number\") {\n          if (doc.TAG === /* IfBreaks */3) {\n            _doc = doc.no;\n            continue ;\n          }\n          var match$3 = doc._0;\n          if (match$3 !== 1) {\n            if (match$3 !== 0) {\n              result.contents = true;\n            } else {\n              width.contents = width.contents - 1 | 0;\n            }\n            return ;\n          } else {\n            return ;\n          }\n        }\n        \n      } else if (typeof doc !== \"number\") {\n        if (doc.TAG !== /* IfBreaks */3) {\n          result.contents = true;\n          return ;\n        }\n        _doc = doc.yes;\n        continue ;\n      }\n      \n    };\n  };\n  var _stack = stack;\n  while(true) {\n    var stack$1 = _stack;\n    var match = result.contents;\n    if (match !== undefined) {\n      return match;\n    }\n    if (!stack$1) {\n      return width.contents >= 0;\n    }\n    var match$1 = stack$1.hd;\n    calculate(match$1[0], match$1[1], match$1[2]);\n    _stack = stack$1.tl;\n    continue ;\n  };\n}\n\nfunction toString(width, doc) {\n  propagateForcedBreaks(doc);\n  var buffer = Res_minibuffer.create(1000);\n  var $$process = function (_pos, _lineSuffices, _stack) {\n    while(true) {\n      var stack = _stack;\n      var lineSuffices = _lineSuffices;\n      var pos = _pos;\n      if (stack) {\n        var rest = stack.tl;\n        var cmd = stack.hd;\n        var doc = cmd[2];\n        var mode = cmd[1];\n        var ind = cmd[0];\n        if (typeof doc === \"number\") {\n          if (doc === /* Nil */0) {\n            _stack = rest;\n            continue ;\n          }\n          _stack = rest;\n          continue ;\n        } else {\n          switch (doc.TAG | 0) {\n            case /* Text */0 :\n                var txt = doc._0;\n                Res_minibuffer.add_string(buffer, txt);\n                _stack = rest;\n                _pos = txt.length + pos | 0;\n                continue ;\n            case /* Concat */1 :\n                var ops = List.map((function(ind,mode){\n                    return function (doc) {\n                      return [\n                              ind,\n                              mode,\n                              doc\n                            ];\n                    }\n                    }(ind,mode)), doc._0);\n                _stack = List.append(ops, rest);\n                continue ;\n            case /* Indent */2 :\n                _stack = {\n                  hd: [\n                    ind + 2 | 0,\n                    mode,\n                    doc._0\n                  ],\n                  tl: rest\n                };\n                continue ;\n            case /* IfBreaks */3 :\n                var breakDoc = doc.yes;\n                var match = doc.broken;\n                if (match) {\n                  _stack = {\n                    hd: [\n                      ind,\n                      mode,\n                      breakDoc\n                    ],\n                    tl: rest\n                  };\n                  continue ;\n                }\n                if (mode === /* Break */0) {\n                  _stack = {\n                    hd: [\n                      ind,\n                      mode,\n                      breakDoc\n                    ],\n                    tl: rest\n                  };\n                  continue ;\n                }\n                _stack = {\n                  hd: [\n                    ind,\n                    mode,\n                    doc.no\n                  ],\n                  tl: rest\n                };\n                continue ;\n            case /* LineSuffix */4 :\n                _stack = rest;\n                _lineSuffices = {\n                  hd: [\n                    ind,\n                    mode,\n                    doc._0\n                  ],\n                  tl: lineSuffices\n                };\n                continue ;\n            case /* LineBreak */5 :\n                var lineStyle = doc._0;\n                if (mode === /* Break */0) {\n                  if (lineSuffices) {\n                    _stack = List.concat({\n                          hd: List.rev(lineSuffices),\n                          tl: {\n                            hd: {\n                              hd: cmd,\n                              tl: rest\n                            },\n                            tl: /* [] */0\n                          }\n                        });\n                    _lineSuffices = /* [] */0;\n                    _pos = ind;\n                    continue ;\n                  }\n                  if (lineStyle === /* Literal */3) {\n                    Res_minibuffer.add_char(buffer, /* '\\n' */10);\n                    _stack = rest;\n                    _lineSuffices = /* [] */0;\n                    _pos = 0;\n                    continue ;\n                  }\n                  Res_minibuffer.flush_newline(buffer);\n                  Res_minibuffer.add_string(buffer, $$String.make(ind, /* ' ' */32));\n                  _stack = rest;\n                  _lineSuffices = /* [] */0;\n                  _pos = ind;\n                  continue ;\n                }\n                var pos$1;\n                switch (lineStyle) {\n                  case /* Classic */0 :\n                      Res_minibuffer.add_string(buffer, \" \");\n                      pos$1 = pos + 1 | 0;\n                      break;\n                  case /* Soft */1 :\n                      pos$1 = pos;\n                      break;\n                  case /* Hard */2 :\n                      Res_minibuffer.flush_newline(buffer);\n                      pos$1 = 0;\n                      break;\n                  case /* Literal */3 :\n                      Res_minibuffer.add_char(buffer, /* '\\n' */10);\n                      pos$1 = 0;\n                      break;\n                  \n                }\n                _stack = rest;\n                _pos = pos$1;\n                continue ;\n            case /* Group */6 :\n                var shouldBreak = doc.shouldBreak;\n                var doc$1 = doc.doc;\n                if (shouldBreak || !fits(width - pos | 0, {\n                        hd: [\n                          ind,\n                          /* Flat */1,\n                          doc$1\n                        ],\n                        tl: rest\n                      })) {\n                  _stack = {\n                    hd: [\n                      ind,\n                      /* Break */0,\n                      doc$1\n                    ],\n                    tl: rest\n                  };\n                  continue ;\n                }\n                _stack = {\n                  hd: [\n                    ind,\n                    /* Flat */1,\n                    doc$1\n                  ],\n                  tl: rest\n                };\n                continue ;\n            case /* CustomLayout */7 :\n                var findGroupThatFits = (function(pos,ind,rest){\n                return function findGroupThatFits(_groups) {\n                  while(true) {\n                    var groups = _groups;\n                    if (!groups) {\n                      return /* Nil */0;\n                    }\n                    var docs = groups.tl;\n                    var lastGroup = groups.hd;\n                    if (!docs) {\n                      return lastGroup;\n                    }\n                    if (fits(width - pos | 0, {\n                            hd: [\n                              ind,\n                              /* Flat */1,\n                              lastGroup\n                            ],\n                            tl: rest\n                          })) {\n                      return lastGroup;\n                    }\n                    _groups = docs;\n                    continue ;\n                  };\n                }\n                }(pos,ind,rest));\n                var doc$2 = findGroupThatFits(doc._0);\n                _stack = {\n                  hd: [\n                    ind,\n                    /* Flat */1,\n                    doc$2\n                  ],\n                  tl: rest\n                };\n                continue ;\n            \n          }\n        }\n      } else {\n        if (!lineSuffices) {\n          return ;\n        }\n        _stack = List.rev(lineSuffices);\n        _lineSuffices = /* [] */0;\n        _pos = 0;\n        continue ;\n      }\n    };\n  };\n  $$process(0, /* [] */0, {\n        hd: [\n          0,\n          /* Flat */1,\n          doc\n        ],\n        tl: /* [] */0\n      });\n  return Res_minibuffer.contents(buffer);\n}\n\nfunction debug(t) {\n  var toDoc = function (_x) {\n    while(true) {\n      var x = _x;\n      if (typeof x === \"number\") {\n        if (x === /* Nil */0) {\n          return {\n                  TAG: /* Text */0,\n                  _0: \"nil\"\n                };\n        } else {\n          return {\n                  TAG: /* Text */0,\n                  _0: \"breakparent\"\n                };\n        }\n      }\n      switch (x.TAG | 0) {\n        case /* Text */0 :\n            return {\n                    TAG: /* Text */0,\n                    _0: \"text(\\\"\" + (x._0 + \"\\\")\")\n                  };\n        case /* Concat */1 :\n            var docs = x._0;\n            if (!docs) {\n              return {\n                      TAG: /* Text */0,\n                      _0: \"concat()\"\n                    };\n            }\n            var l_0 = {\n              TAG: /* Text */0,\n              _0: \",\"\n            };\n            var l_1 = {\n              hd: line,\n              tl: /* [] */0\n            };\n            var l = {\n              hd: l_0,\n              tl: l_1\n            };\n            var l_1$1 = {\n              hd: join({\n                    TAG: /* Concat */1,\n                    _0: _concat(/* [] */0, l)\n                  }, List.map(toDoc, docs)),\n              tl: /* [] */0\n            };\n            var l$1 = {\n              hd: line,\n              tl: l_1$1\n            };\n            var l_0$1 = {\n              TAG: /* Text */0,\n              _0: \"concat(\"\n            };\n            var l_1$2 = {\n              hd: {\n                TAG: /* Indent */2,\n                _0: {\n                  TAG: /* Concat */1,\n                  _0: _concat(/* [] */0, l$1)\n                }\n              },\n              tl: {\n                hd: line,\n                tl: {\n                  hd: {\n                    TAG: /* Text */0,\n                    _0: \")\"\n                  },\n                  tl: /* [] */0\n                }\n              }\n            };\n            var l$2 = {\n              hd: l_0$1,\n              tl: l_1$2\n            };\n            return {\n                    TAG: /* Group */6,\n                    shouldBreak: false,\n                    doc: {\n                      TAG: /* Concat */1,\n                      _0: _concat(/* [] */0, l$2)\n                    }\n                  };\n        case /* Indent */2 :\n            var l_0$2 = {\n              TAG: /* Text */0,\n              _0: \"indent(\"\n            };\n            var l_1$3 = {\n              hd: softLine,\n              tl: {\n                hd: toDoc(x._0),\n                tl: {\n                  hd: softLine,\n                  tl: {\n                    hd: {\n                      TAG: /* Text */0,\n                      _0: \")\"\n                    },\n                    tl: /* [] */0\n                  }\n                }\n              }\n            };\n            var l$3 = {\n              hd: l_0$2,\n              tl: l_1$3\n            };\n            return {\n                    TAG: /* Concat */1,\n                    _0: _concat(/* [] */0, l$3)\n                  };\n        case /* IfBreaks */3 :\n            var trueDoc = x.yes;\n            var match = x.broken;\n            if (match) {\n              _x = trueDoc;\n              continue ;\n            }\n            var l_0$3 = {\n              TAG: /* Text */0,\n              _0: \",\"\n            };\n            var l_1$4 = {\n              hd: line,\n              tl: /* [] */0\n            };\n            var l$4 = {\n              hd: l_0$3,\n              tl: l_1$4\n            };\n            var l_1$5 = {\n              hd: toDoc(trueDoc),\n              tl: {\n                hd: {\n                  TAG: /* Concat */1,\n                  _0: _concat(/* [] */0, l$4)\n                },\n                tl: {\n                  hd: toDoc(x.no),\n                  tl: /* [] */0\n                }\n              }\n            };\n            var l$5 = {\n              hd: line,\n              tl: l_1$5\n            };\n            var l_0$4 = {\n              TAG: /* Text */0,\n              _0: \"ifBreaks(\"\n            };\n            var l_1$6 = {\n              hd: {\n                TAG: /* Indent */2,\n                _0: {\n                  TAG: /* Concat */1,\n                  _0: _concat(/* [] */0, l$5)\n                }\n              },\n              tl: {\n                hd: line,\n                tl: {\n                  hd: {\n                    TAG: /* Text */0,\n                    _0: \")\"\n                  },\n                  tl: /* [] */0\n                }\n              }\n            };\n            var l$6 = {\n              hd: l_0$4,\n              tl: l_1$6\n            };\n            return {\n                    TAG: /* Group */6,\n                    shouldBreak: false,\n                    doc: {\n                      TAG: /* Concat */1,\n                      _0: _concat(/* [] */0, l$6)\n                    }\n                  };\n        case /* LineSuffix */4 :\n            var l_1$7 = {\n              hd: toDoc(x._0),\n              tl: /* [] */0\n            };\n            var l$7 = {\n              hd: line,\n              tl: l_1$7\n            };\n            var l_0$5 = {\n              TAG: /* Text */0,\n              _0: \"linesuffix(\"\n            };\n            var l_1$8 = {\n              hd: {\n                TAG: /* Indent */2,\n                _0: {\n                  TAG: /* Concat */1,\n                  _0: _concat(/* [] */0, l$7)\n                }\n              },\n              tl: {\n                hd: line,\n                tl: {\n                  hd: {\n                    TAG: /* Text */0,\n                    _0: \")\"\n                  },\n                  tl: /* [] */0\n                }\n              }\n            };\n            var l$8 = {\n              hd: l_0$5,\n              tl: l_1$8\n            };\n            return {\n                    TAG: /* Group */6,\n                    shouldBreak: false,\n                    doc: {\n                      TAG: /* Concat */1,\n                      _0: _concat(/* [] */0, l$8)\n                    }\n                  };\n        case /* LineBreak */5 :\n            var breakTxt;\n            switch (x._0) {\n              case /* Classic */0 :\n                  breakTxt = \"Classic\";\n                  break;\n              case /* Soft */1 :\n                  breakTxt = \"Soft\";\n                  break;\n              case /* Hard */2 :\n                  breakTxt = \"Hard\";\n                  break;\n              case /* Literal */3 :\n                  breakTxt = \"Liteal\";\n                  break;\n              \n            }\n            return {\n                    TAG: /* Text */0,\n                    _0: \"LineBreak(\" + (breakTxt + \")\")\n                  };\n        case /* Group */6 :\n            var shouldBreak = x.shouldBreak;\n            var l_0$6 = {\n              TAG: /* Text */0,\n              _0: \",\"\n            };\n            var l_1$9 = {\n              hd: line,\n              tl: /* [] */0\n            };\n            var l$9 = {\n              hd: l_0$6,\n              tl: l_1$9\n            };\n            var l_1$10 = {\n              hd: {\n                TAG: /* Text */0,\n                _0: \"{shouldBreak: \" + (Pervasives.string_of_bool(shouldBreak) + \"}\")\n              },\n              tl: {\n                hd: {\n                  TAG: /* Concat */1,\n                  _0: _concat(/* [] */0, l$9)\n                },\n                tl: {\n                  hd: toDoc(x.doc),\n                  tl: /* [] */0\n                }\n              }\n            };\n            var l$10 = {\n              hd: line,\n              tl: l_1$10\n            };\n            var l_0$7 = {\n              TAG: /* Text */0,\n              _0: \"Group(\"\n            };\n            var l_1$11 = {\n              hd: {\n                TAG: /* Indent */2,\n                _0: {\n                  TAG: /* Concat */1,\n                  _0: _concat(/* [] */0, l$10)\n                }\n              },\n              tl: {\n                hd: line,\n                tl: {\n                  hd: {\n                    TAG: /* Text */0,\n                    _0: \")\"\n                  },\n                  tl: /* [] */0\n                }\n              }\n            };\n            var l$11 = {\n              hd: l_0$7,\n              tl: l_1$11\n            };\n            return {\n                    TAG: /* Group */6,\n                    shouldBreak: false,\n                    doc: {\n                      TAG: /* Concat */1,\n                      _0: _concat(/* [] */0, l$11)\n                    }\n                  };\n        case /* CustomLayout */7 :\n            var l_0$8 = {\n              TAG: /* Text */0,\n              _0: \",\"\n            };\n            var l_1$12 = {\n              hd: line,\n              tl: /* [] */0\n            };\n            var l$12 = {\n              hd: l_0$8,\n              tl: l_1$12\n            };\n            var l_1$13 = {\n              hd: join({\n                    TAG: /* Concat */1,\n                    _0: _concat(/* [] */0, l$12)\n                  }, List.map(toDoc, x._0)),\n              tl: /* [] */0\n            };\n            var l$13 = {\n              hd: line,\n              tl: l_1$13\n            };\n            var l_0$9 = {\n              TAG: /* Text */0,\n              _0: \"customLayout(\"\n            };\n            var l_1$14 = {\n              hd: {\n                TAG: /* Indent */2,\n                _0: {\n                  TAG: /* Concat */1,\n                  _0: _concat(/* [] */0, l$13)\n                }\n              },\n              tl: {\n                hd: line,\n                tl: {\n                  hd: {\n                    TAG: /* Text */0,\n                    _0: \")\"\n                  },\n                  tl: /* [] */0\n                }\n              }\n            };\n            var l$14 = {\n              hd: l_0$9,\n              tl: l_1$14\n            };\n            return {\n                    TAG: /* Group */6,\n                    shouldBreak: false,\n                    doc: {\n                      TAG: /* Concat */1,\n                      _0: _concat(/* [] */0, l$14)\n                    }\n                  };\n        \n      }\n    };\n  };\n  var doc = toDoc(t);\n  console.log(toString(10, doc));\n  \n}\n\nvar MiniBuffer;\n\nvar nil = /* Nil */0;\n\nvar hardLine = {\n  TAG: /* LineBreak */5,\n  _0: /* Hard */2\n};\n\nvar literalLine = {\n  TAG: /* LineBreak */5,\n  _0: /* Literal */3\n};\n\nvar breakParent = /* BreakParent */1;\n\nvar space = {\n  TAG: /* Text */0,\n  _0: \" \"\n};\n\nvar dot = {\n  TAG: /* Text */0,\n  _0: \".\"\n};\n\nvar dotdot = {\n  TAG: /* Text */0,\n  _0: \"..\"\n};\n\nvar dotdotdot = {\n  TAG: /* Text */0,\n  _0: \"...\"\n};\n\nvar lessThan = {\n  TAG: /* Text */0,\n  _0: \"<\"\n};\n\nvar greaterThan = {\n  TAG: /* Text */0,\n  _0: \">\"\n};\n\nvar lbrace = {\n  TAG: /* Text */0,\n  _0: \"{\"\n};\n\nvar rbrace = {\n  TAG: /* Text */0,\n  _0: \"}\"\n};\n\nvar lparen = {\n  TAG: /* Text */0,\n  _0: \"(\"\n};\n\nvar rparen = {\n  TAG: /* Text */0,\n  _0: \")\"\n};\n\nvar lbracket = {\n  TAG: /* Text */0,\n  _0: \"[\"\n};\n\nvar rbracket = {\n  TAG: /* Text */0,\n  _0: \"]\"\n};\n\nvar question = {\n  TAG: /* Text */0,\n  _0: \"?\"\n};\n\nvar tilde = {\n  TAG: /* Text */0,\n  _0: \"~\"\n};\n\nvar equal = {\n  TAG: /* Text */0,\n  _0: \"=\"\n};\n\nvar doubleQuote = {\n  TAG: /* Text */0,\n  _0: \"\\\"\"\n};\n\nexport {\n  MiniBuffer ,\n  nil ,\n  line ,\n  hardLine ,\n  softLine ,\n  literalLine ,\n  text ,\n  _concat ,\n  concat ,\n  indent ,\n  ifBreaks ,\n  lineSuffix ,\n  group ,\n  breakableGroup ,\n  customLayout ,\n  breakParent ,\n  space ,\n  comma ,\n  dot ,\n  dotdot ,\n  dotdotdot ,\n  lessThan ,\n  greaterThan ,\n  lbrace ,\n  rbrace ,\n  lparen ,\n  rparen ,\n  lbracket ,\n  rbracket ,\n  question ,\n  tilde ,\n  equal ,\n  trailingComma ,\n  doubleQuote ,\n  propagateForcedBreaks ,\n  willBreak ,\n  join ,\n  fits ,\n  toString ,\n  debug ,\n  \n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/res_doc.res",
    "content": "module MiniBuffer = Res_minibuffer\n\ntype mode = Break | Flat\n\ntype lineStyle =\n  | Classic /* fits? -> replace with space */\n  | Soft /* fits? -> replaced with nothing */\n  | Hard /* always included, forces breaks in parents */\n  /* always included, forces breaks in parents, but doesn't increase indentation\n   use case: template literals, multiline string content */\n  | Literal\n\ntype rec t =\n  | Nil\n  | Text(string)\n  | Concat(list<t>)\n  | Indent(t)\n  | IfBreaks({\n      yes: t,\n      no: t,\n      mutable broken: bool,\n    }) /* when broken is true, treat as the yes branch */\n  | LineSuffix(t)\n  | LineBreak(lineStyle)\n  | Group({mutable shouldBreak: bool, doc: t})\n  | CustomLayout(list<t>)\n  | BreakParent\n\nlet nil = Nil\nlet line = LineBreak(Classic)\nlet hardLine = LineBreak(Hard)\nlet softLine = LineBreak(Soft)\nlet literalLine = LineBreak(Literal)\nlet text = s => Text(s)\n\n/* Optimization. We eagerly collapse and reduce whatever allocation we can */\nlet rec _concat = (acc, l) =>\n  switch l {\n  | list{Text(s1), Text(s2), ...rest} => list{Text(s1 ++ s2), ..._concat(acc, rest)}\n  | list{Nil, ...rest} => _concat(acc, rest)\n  | list{Concat(l2), ...rest} => _concat(_concat(acc, rest), l2) /* notice the order here */\n  | list{x, ...rest} =>\n    let rest1 = _concat(acc, rest)\n    if rest1 === rest {\n      l\n    } else {\n      list{x, ...rest1}\n    }\n  | list{} => acc\n  }\n\nlet concat = l => Concat(_concat(list{}, l))\n\nlet indent = d => Indent(d)\nlet ifBreaks = (t, f) => IfBreaks({yes: t, no: f, broken: false})\nlet lineSuffix = d => LineSuffix(d)\nlet group = d => Group({shouldBreak: false, doc: d})\nlet breakableGroup = (~forceBreak, d) => Group({shouldBreak: forceBreak, doc: d})\nlet customLayout = gs => CustomLayout(gs)\nlet breakParent = BreakParent\n\nlet space = Text(\" \")\nlet comma = Text(\",\")\nlet dot = Text(\".\")\nlet dotdot = Text(\"..\")\nlet dotdotdot = Text(\"...\")\nlet lessThan = Text(\"<\")\nlet greaterThan = Text(\">\")\nlet lbrace = Text(\"{\")\nlet rbrace = Text(\"}\")\nlet lparen = Text(\"(\")\nlet rparen = Text(\")\")\nlet lbracket = Text(\"[\")\nlet rbracket = Text(\"]\")\nlet question = Text(\"?\")\nlet tilde = Text(\"~\")\nlet equal = Text(\"=\")\nlet trailingComma = ifBreaks(comma, nil)\nlet doubleQuote = Text(\"\\\"\")\n\nlet propagateForcedBreaks = doc => {\n  let rec walk = doc =>\n    switch doc {\n    | Text(_) | Nil | LineSuffix(_) => false\n    | BreakParent => true\n    | LineBreak(Hard | Literal) => true\n    | LineBreak(Classic | Soft) => false\n    | Indent(children) =>\n      let childForcesBreak = walk(children)\n      childForcesBreak\n    | IfBreaks({yes: trueDoc, no: falseDoc} as ib) =>\n      let falseForceBreak = walk(falseDoc)\n      if falseForceBreak {\n        let _ = walk(trueDoc)\n        ib.broken = true\n        true\n      } else {\n        let forceBreak = walk(trueDoc)\n        forceBreak\n      }\n    | Group({shouldBreak: forceBreak, doc: children} as gr) =>\n      let childForcesBreak = walk(children)\n      let shouldBreak = forceBreak || childForcesBreak\n      gr.shouldBreak = shouldBreak\n      shouldBreak\n    | Concat(children) => List.fold_left((forceBreak, child) => {\n        let childForcesBreak = walk(child)\n        forceBreak || childForcesBreak\n      }, false, children)\n    | CustomLayout(children) =>\n      /* When using CustomLayout, we don't want to propagate forced breaks\n       * from the children up. By definition it picks the first layout that fits\n       * otherwise it takes the last of the list.\n       * However we do want to propagate forced breaks in the sublayouts. They\n       * might need to be broken. We just don't propagate them any higher here */\n      let _ = walk(Concat(children))\n      false\n    }\n\n  let _ = walk(doc)\n}\n\n/* See documentation in interface file */\nlet rec willBreak = doc =>\n  switch doc {\n  | LineBreak(Hard | Literal) | BreakParent | Group({shouldBreak: true}) => true\n  | Group({doc}) | Indent(doc) | CustomLayout(list{doc, ..._}) => willBreak(doc)\n  | Concat(docs) => List.exists(willBreak, docs)\n  | IfBreaks({yes, no}) => willBreak(yes) || willBreak(no)\n  | _ => false\n  }\n\nlet join = (~sep, docs) => {\n  let rec loop = (acc, sep, docs) =>\n    switch docs {\n    | list{} => List.rev(acc)\n    | list{x} => List.rev(list{x, ...acc})\n    | list{x, ...xs} => loop(list{sep, x, ...acc}, sep, xs)\n    }\n\n  concat(loop(list{}, sep, docs))\n}\n\nlet fits = (w, stack) => {\n  let width = ref(w)\n  let result = ref(None)\n\n  let rec calculate = (indent, mode, doc) =>\n    switch (mode, doc) {\n    | _ if result.contents !== None => ()\n    | _ if width.contents < 0 => result := Some(false)\n    | (_, Nil)\n    | (_, LineSuffix(_))\n    | (_, BreakParent) => ()\n    | (_, Text(txt)) => width := width.contents - String.length(txt)\n    | (_, Indent(doc)) => calculate(indent + 2, mode, doc)\n    | (Flat, LineBreak(Hard))\n    | (Flat, LineBreak(Literal)) =>\n      result := Some(true)\n    | (Flat, LineBreak(Classic)) => width := width.contents - 1\n    | (Flat, LineBreak(Soft)) => ()\n    | (Break, LineBreak(_)) => result := Some(true)\n    | (_, Group({shouldBreak: true, doc})) => calculate(indent, Break, doc)\n    | (_, Group({doc})) => calculate(indent, mode, doc)\n    | (_, IfBreaks({yes: breakDoc, broken: true})) => calculate(indent, mode, breakDoc)\n    | (Break, IfBreaks({yes: breakDoc})) => calculate(indent, mode, breakDoc)\n    | (Flat, IfBreaks({no: flatDoc})) => calculate(indent, mode, flatDoc)\n    | (_, Concat(docs)) => calculateConcat(indent, mode, docs)\n    | (_, CustomLayout(list{hd, ..._})) =>\n      /* TODO: if we have nested custom layouts, what we should do here? */\n      calculate(indent, mode, hd)\n    | (_, CustomLayout(list{})) => ()\n    }\n  and calculateConcat = (indent, mode, docs) =>\n    if result.contents === None {\n      switch docs {\n      | list{} => ()\n      | list{doc, ...rest} =>\n        calculate(indent, mode, doc)\n        calculateConcat(indent, mode, rest)\n      }\n    }\n\n  let rec calculateAll = stack =>\n    switch (result.contents, stack) {\n    | (Some(r), _) => r\n    | (None, list{}) => width.contents >= 0\n    | (None, list{(indent, mode, doc), ...rest}) =>\n      calculate(indent, mode, doc)\n      calculateAll(rest)\n    }\n\n  calculateAll(stack)\n}\n\nlet toString = (~width, doc) => {\n  propagateForcedBreaks(doc)\n  let buffer = MiniBuffer.create(1000)\n\n  let rec process = (~pos, lineSuffices, stack) =>\n    switch stack {\n    | list{(ind, mode, doc) as cmd, ...rest} =>\n      switch doc {\n      | Nil | BreakParent => process(~pos, lineSuffices, rest)\n      | Text(txt) =>\n        MiniBuffer.add_string(buffer, txt)\n        process(~pos=String.length(txt) + pos, lineSuffices, rest)\n      | LineSuffix(doc) => process(~pos, list{(ind, mode, doc), ...lineSuffices}, rest)\n      | Concat(docs) =>\n        let ops = List.map(doc => (ind, mode, doc), docs)\n        process(~pos, lineSuffices, List.append(ops, rest))\n      | Indent(doc) => process(~pos, lineSuffices, list{(ind + 2, mode, doc), ...rest})\n      | IfBreaks({yes: breakDoc, broken: true}) =>\n        process(~pos, lineSuffices, list{(ind, mode, breakDoc), ...rest})\n      | IfBreaks({yes: breakDoc, no: flatDoc}) =>\n        if mode == Break {\n          process(~pos, lineSuffices, list{(ind, mode, breakDoc), ...rest})\n        } else {\n          process(~pos, lineSuffices, list{(ind, mode, flatDoc), ...rest})\n        }\n      | LineBreak(lineStyle) =>\n        if mode == Break {\n          switch lineSuffices {\n          | list{} =>\n            if lineStyle == Literal {\n              MiniBuffer.add_char(buffer, '\\n')\n              process(~pos=0, list{}, rest)\n            } else {\n              MiniBuffer.flush_newline(buffer)\n              MiniBuffer.add_string(buffer, @doesNotRaise String.make(ind, ' '))\n              process(~pos=ind, list{}, rest)\n            }\n          | _docs =>\n            process(~pos=ind, list{}, List.concat(list{List.rev(lineSuffices), list{cmd, ...rest}}))\n          }\n        } else {\n          /* mode = Flat */\n          let pos = switch lineStyle {\n          | Classic =>\n            MiniBuffer.add_string(buffer, \" \")\n            pos + 1\n          | Hard =>\n            MiniBuffer.flush_newline(buffer)\n            0\n          | Literal =>\n            MiniBuffer.add_char(buffer, '\\n')\n            0\n          | Soft => pos\n          }\n\n          process(~pos, lineSuffices, rest)\n        }\n      | Group({shouldBreak, doc}) =>\n        if shouldBreak || !fits(width - pos, list{(ind, Flat, doc), ...rest}) {\n          process(~pos, lineSuffices, list{(ind, Break, doc), ...rest})\n        } else {\n          process(~pos, lineSuffices, list{(ind, Flat, doc), ...rest})\n        }\n      | CustomLayout(docs) =>\n        let rec findGroupThatFits = groups =>\n          switch groups {\n          | list{} => Nil\n          | list{lastGroup} => lastGroup\n          | list{doc, ...docs} =>\n            if fits(width - pos, list{(ind, Flat, doc), ...rest}) {\n              doc\n            } else {\n              findGroupThatFits(docs)\n            }\n          }\n\n        let doc = findGroupThatFits(docs)\n        process(~pos, lineSuffices, list{(ind, Flat, doc), ...rest})\n      }\n    | list{} =>\n      switch lineSuffices {\n      | list{} => ()\n      | suffices => process(~pos=0, list{}, List.rev(suffices))\n      }\n    }\n\n  process(~pos=0, list{}, list{(0, Flat, doc)})\n  MiniBuffer.contents(buffer)\n}\n\n@live\nlet debug = t => {\n  let rec toDoc = x =>\n    switch x {\n    | Nil => text(\"nil\")\n    | BreakParent => text(\"breakparent\")\n    | Text(txt) => text(\"text(\\\"\" ++ (txt ++ \"\\\")\"))\n    | LineSuffix(doc) =>\n      group(\n        concat(list{text(\"linesuffix(\"), indent(concat(list{line, toDoc(doc)})), line, text(\")\")}),\n      )\n    | Concat(list{}) => text(\"concat()\")\n    | Concat(docs) =>\n      group(\n        concat(list{\n          text(\"concat(\"),\n          indent(\n            concat(list{line, join(~sep=concat(list{text(\",\"), line}), List.map(toDoc, docs))}),\n          ),\n          line,\n          text(\")\"),\n        }),\n      )\n    | CustomLayout(docs) =>\n      group(\n        concat(list{\n          text(\"customLayout(\"),\n          indent(\n            concat(list{line, join(~sep=concat(list{text(\",\"), line}), List.map(toDoc, docs))}),\n          ),\n          line,\n          text(\")\"),\n        }),\n      )\n    | Indent(doc) => concat(list{text(\"indent(\"), softLine, toDoc(doc), softLine, text(\")\")})\n    | IfBreaks({yes: trueDoc, broken: true}) => toDoc(trueDoc)\n    | IfBreaks({yes: trueDoc, no: falseDoc}) =>\n      group(\n        concat(list{\n          text(\"ifBreaks(\"),\n          indent(\n            concat(list{line, toDoc(trueDoc), concat(list{text(\",\"), line}), toDoc(falseDoc)}),\n          ),\n          line,\n          text(\")\"),\n        }),\n      )\n    | LineBreak(break) =>\n      let breakTxt = switch break {\n      | Classic => \"Classic\"\n      | Soft => \"Soft\"\n      | Hard => \"Hard\"\n      | Literal => \"Liteal\"\n      }\n\n      text(\"LineBreak(\" ++ (breakTxt ++ \")\"))\n    | Group({shouldBreak, doc}) =>\n      group(\n        concat(list{\n          text(\"Group(\"),\n          indent(\n            concat(list{\n              line,\n              text(\"{shouldBreak: \" ++ (string_of_bool(shouldBreak) ++ \"}\")),\n              concat(list{text(\",\"), line}),\n              toDoc(doc),\n            }),\n          ),\n          line,\n          text(\")\"),\n        }),\n      )\n    }\n\n  let doc = toDoc(t)\n  toString(~width=10, doc) |> print_endline\n}\n"
  },
  {
    "path": "analysis/examples/larger-project/src/res_grammar.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as Res_token from \"./res_token.js\";\n\nfunction toString(x) {\n  if (typeof x !== \"number\") {\n    return \"an expression after the operator \\\"\" + (Res_token.toString(x._0) + \"\\\"\");\n  }\n  switch (x) {\n    case /* OpenDescription */0 :\n        return \"an open description\";\n    case /* ModuleLongIdent */1 :\n        return \"a module path\";\n    case /* Ternary */2 :\n        return \"a ternary expression\";\n    case /* Es6ArrowExpr */3 :\n        return \"an es6 arrow function\";\n    case /* Jsx */4 :\n        return \"a jsx expression\";\n    case /* JsxAttribute */5 :\n        return \"a jsx attribute\";\n    case /* JsxChild */6 :\n        return \"jsx child\";\n    case /* ExprOperand */7 :\n        return \"a basic expression\";\n    case /* ExprUnary */8 :\n        return \"a unary expression\";\n    case /* ExprSetField */9 :\n        return \"a record field mutation\";\n    case /* ExprBlock */10 :\n        return \"a block with expressions\";\n    case /* ExprCall */11 :\n        return \"a function application\";\n    case /* ExprList */12 :\n        return \"multiple expressions\";\n    case /* ExprArrayAccess */13 :\n        return \"an array access expression\";\n    case /* ExprArrayMutation */14 :\n        return \"an array mutation\";\n    case /* ExprIf */15 :\n        return \"an if expression\";\n    case /* ExprFor */16 :\n        return \"a for expression\";\n    case /* IfCondition */17 :\n        return \"the condition of an if expression\";\n    case /* IfBranch */18 :\n        return \"the true-branch of an if expression\";\n    case /* ElseBranch */19 :\n        return \"the else-branch of an if expression\";\n    case /* External */21 :\n        return \"an external\";\n    case /* PatternMatching */22 :\n        return \"the cases of a pattern match\";\n    case /* PatternMatchCase */23 :\n        return \"a pattern match case\";\n    case /* LetBinding */24 :\n        return \"a let binding\";\n    case /* PatternList */25 :\n        return \"multiple patterns\";\n    case /* PatternOcamlList */26 :\n        return \"a list pattern\";\n    case /* PatternRecord */27 :\n        return \"a record pattern\";\n    case /* TypeDef */28 :\n        return \"a type definition\";\n    case /* TypeConstrName */29 :\n        return \"a type-constructor name\";\n    case /* TypeParams */30 :\n        return \"type parameters\";\n    case /* TypeParam */31 :\n        return \"a type parameter\";\n    case /* PackageConstraint */32 :\n        return \"a package constraint\";\n    case /* TypeRepresentation */33 :\n        return \"a type representation\";\n    case /* RecordDecl */34 :\n        return \"a record declaration\";\n    case /* ConstructorDeclaration */35 :\n        return \"a constructor declaration\";\n    case /* ParameterList */36 :\n        return \"parameters\";\n    case /* StringFieldDeclarations */37 :\n        return \"string field declarations\";\n    case /* FieldDeclarations */38 :\n        return \"field declarations\";\n    case /* TypExprList */39 :\n        return \"list of types\";\n    case /* FunctorArgs */40 :\n        return \"functor arguments\";\n    case /* ModExprList */41 :\n        return \"list of module expressions\";\n    case /* TypeParameters */42 :\n        return \"list of type parameters\";\n    case /* RecordRows */43 :\n        return \"rows of a record\";\n    case /* RecordRowsStringKey */44 :\n        return \"rows of a record with string keys\";\n    case /* ArgumentList */45 :\n        return \"arguments\";\n    case /* Signature */46 :\n        return \"signature\";\n    case /* Specification */47 :\n        return \"specification\";\n    case /* Structure */48 :\n        return \"structure\";\n    case /* Implementation */49 :\n        return \"implementation\";\n    case /* Attribute */50 :\n        return \"an attribute\";\n    case /* TypeConstraint */51 :\n        return \"constraints on a type\";\n    case /* TypeExpression */20 :\n    case /* AtomicTypExpr */52 :\n        return \"a type\";\n    case /* ListExpr */53 :\n        return \"an ocaml list expr\";\n    case /* JsFfiImport */54 :\n        return \"js ffi import\";\n    case /* Pattern */55 :\n        return \"pattern\";\n    case /* AttributePayload */56 :\n        return \"an attribute payload\";\n    case /* TagNames */57 :\n        return \"tag names\";\n    \n  }\n}\n\nfunction isSignatureItemStart(x) {\n  if (typeof x !== \"number\") {\n    return false;\n  }\n  if (x < 10) {\n    return x > 8 || x < 1;\n  }\n  if (x === 27) {\n    return true;\n  }\n  if (x < 59) {\n    return false;\n  }\n  switch (x) {\n    case /* Private */61 :\n    case /* Mutable */62 :\n    case /* Constraint */63 :\n    case /* Of */66 :\n    case /* Land */67 :\n    case /* Lor */68 :\n    case /* Band */69 :\n    case /* BangEqual */70 :\n    case /* BangEqualEqual */71 :\n    case /* LessEqual */72 :\n    case /* GreaterEqual */73 :\n    case /* ColonEqual */74 :\n    case /* Percent */77 :\n    case /* List */79 :\n    case /* Backtick */80 :\n    case /* BarGreater */81 :\n    case /* Try */82 :\n    case /* Import */83 :\n        return false;\n    case /* External */59 :\n    case /* Typ */60 :\n    case /* Include */64 :\n    case /* Module */65 :\n    case /* At */75 :\n    case /* AtAt */76 :\n    case /* PercentPercent */78 :\n    case /* Export */84 :\n        return true;\n    \n  }\n}\n\nfunction isAtomicPatternStart(x) {\n  if (typeof x === \"number\") {\n    switch (x) {\n      case /* Underscore */12 :\n      case /* Lparen */18 :\n      case /* Lbracket */20 :\n      case /* Lbrace */22 :\n      case /* Exception */27 :\n      case /* Lazy */47 :\n      case /* Percent */77 :\n      case /* List */79 :\n      case /* Backtick */80 :\n          return true;\n      default:\n        return false;\n    }\n  } else {\n    switch (x.TAG | 0) {\n      case /* Codepoint */0 :\n      case /* Int */1 :\n      case /* String */3 :\n      case /* Lident */4 :\n      case /* Uident */5 :\n          return true;\n      default:\n        return false;\n    }\n  }\n}\n\nfunction isAtomicExprStart(x) {\n  if (typeof x === \"number\") {\n    switch (x) {\n      case /* True */1 :\n      case /* False */2 :\n      case /* Lparen */18 :\n      case /* Lbracket */20 :\n      case /* Lbrace */22 :\n      case /* LessThan */42 :\n      case /* Hash */44 :\n      case /* Module */65 :\n      case /* Percent */77 :\n      case /* List */79 :\n      case /* Backtick */80 :\n          return true;\n      default:\n        return false;\n    }\n  } else {\n    switch (x.TAG | 0) {\n      case /* Codepoint */0 :\n      case /* Int */1 :\n      case /* Float */2 :\n      case /* String */3 :\n      case /* Lident */4 :\n      case /* Uident */5 :\n          return true;\n      default:\n        return false;\n    }\n  }\n}\n\nfunction isAtomicTypExprStart(x) {\n  if (typeof x === \"number\") {\n    switch (x) {\n      case /* Underscore */12 :\n      case /* SingleQuote */13 :\n      case /* Lparen */18 :\n      case /* Lbrace */22 :\n      case /* Percent */77 :\n          return true;\n      default:\n        return false;\n    }\n  } else {\n    switch (x.TAG | 0) {\n      case /* Lident */4 :\n      case /* Uident */5 :\n          return true;\n      default:\n        return false;\n    }\n  }\n}\n\nfunction isExprStart(x) {\n  if (typeof x === \"number\") {\n    switch (x) {\n      case /* True */1 :\n      case /* False */2 :\n      case /* Bang */7 :\n      case /* Underscore */12 :\n      case /* Lparen */18 :\n      case /* Lbracket */20 :\n      case /* Lbrace */22 :\n      case /* Minus */34 :\n      case /* MinusDot */35 :\n      case /* Plus */36 :\n      case /* PlusDot */37 :\n      case /* LessThan */42 :\n      case /* Hash */44 :\n      case /* Assert */46 :\n      case /* Lazy */47 :\n      case /* If */50 :\n      case /* For */52 :\n      case /* While */54 :\n      case /* Switch */55 :\n      case /* Module */65 :\n      case /* At */75 :\n      case /* Percent */77 :\n      case /* List */79 :\n      case /* Backtick */80 :\n      case /* Try */82 :\n          return true;\n      default:\n        return false;\n    }\n  } else {\n    switch (x.TAG | 0) {\n      case /* Codepoint */0 :\n      case /* Int */1 :\n      case /* Float */2 :\n      case /* String */3 :\n      case /* Lident */4 :\n      case /* Uident */5 :\n          return true;\n      default:\n        return false;\n    }\n  }\n}\n\nfunction isJsxAttributeStart(x) {\n  if (typeof x === \"number\") {\n    if (x === /* Question */49) {\n      return true;\n    } else {\n      return false;\n    }\n  } else if (x.TAG === /* Lident */4) {\n    return true;\n  } else {\n    return false;\n  }\n}\n\nfunction isStructureItemStart(x) {\n  if (typeof x === \"number\") {\n    if (x >= 10) {\n      if (x === 27) {\n        return true;\n      }\n      if (x >= 59) {\n        switch (x) {\n          case /* Private */61 :\n          case /* Mutable */62 :\n          case /* Constraint */63 :\n          case /* Of */66 :\n          case /* Land */67 :\n          case /* Lor */68 :\n          case /* Band */69 :\n          case /* BangEqual */70 :\n          case /* BangEqualEqual */71 :\n          case /* LessEqual */72 :\n          case /* GreaterEqual */73 :\n          case /* ColonEqual */74 :\n          case /* Percent */77 :\n          case /* List */79 :\n          case /* Backtick */80 :\n          case /* BarGreater */81 :\n          case /* Try */82 :\n              break;\n          case /* External */59 :\n          case /* Typ */60 :\n          case /* Include */64 :\n          case /* Module */65 :\n          case /* At */75 :\n          case /* AtAt */76 :\n          case /* PercentPercent */78 :\n          case /* Import */83 :\n          case /* Export */84 :\n              return true;\n          \n        }\n      }\n      \n    } else if (x > 8 || x < 1) {\n      return true;\n    }\n    \n  }\n  if (isExprStart(x)) {\n    return true;\n  } else {\n    return false;\n  }\n}\n\nfunction isPatternStart(x) {\n  if (typeof x === \"number\") {\n    switch (x) {\n      case /* True */1 :\n      case /* False */2 :\n      case /* Underscore */12 :\n      case /* Lparen */18 :\n      case /* Lbracket */20 :\n      case /* Lbrace */22 :\n      case /* Exception */27 :\n      case /* Minus */34 :\n      case /* Plus */36 :\n      case /* Hash */44 :\n      case /* Lazy */47 :\n      case /* Module */65 :\n      case /* At */75 :\n      case /* Percent */77 :\n      case /* List */79 :\n      case /* Backtick */80 :\n          return true;\n      default:\n        return false;\n    }\n  } else {\n    switch (x.TAG | 0) {\n      case /* Codepoint */0 :\n      case /* Int */1 :\n      case /* Float */2 :\n      case /* String */3 :\n      case /* Lident */4 :\n      case /* Uident */5 :\n          return true;\n      default:\n        return false;\n    }\n  }\n}\n\nfunction isParameterStart(x) {\n  if (typeof x === \"number\") {\n    if (x > 48 || x < 4) {\n      if (x === 60) {\n        return true;\n      }\n      \n    } else if (x > 47 || x < 5) {\n      return true;\n    }\n    \n  }\n  if (isPatternStart(x)) {\n    return true;\n  } else {\n    return false;\n  }\n}\n\nfunction isStringFieldDeclStart(x) {\n  if (typeof x === \"number\") {\n    switch (x) {\n      case /* DotDotDot */6 :\n      case /* At */75 :\n          return true;\n      default:\n        return false;\n    }\n  } else {\n    switch (x.TAG | 0) {\n      case /* String */3 :\n      case /* Lident */4 :\n          return true;\n      default:\n        return false;\n    }\n  }\n}\n\nfunction isFieldDeclStart(x) {\n  if (typeof x === \"number\") {\n    switch (x) {\n      case /* Mutable */62 :\n      case /* At */75 :\n          return true;\n      default:\n        if (Res_token.isKeyword(x)) {\n          return true;\n        } else {\n          return false;\n        }\n    }\n  } else {\n    switch (x.TAG | 0) {\n      case /* Lident */4 :\n      case /* Uident */5 :\n          return true;\n      default:\n        if (Res_token.isKeyword(x)) {\n          return true;\n        } else {\n          return false;\n        }\n    }\n  }\n}\n\nfunction isRecordDeclStart(x) {\n  if (typeof x !== \"number\") {\n    if (x.TAG === /* Lident */4) {\n      return true;\n    } else {\n      return false;\n    }\n  }\n  switch (x) {\n    case /* Mutable */62 :\n    case /* At */75 :\n        return true;\n    default:\n      return false;\n  }\n}\n\nfunction isTypExprStart(x) {\n  if (typeof x === \"number\") {\n    switch (x) {\n      case /* Underscore */12 :\n      case /* SingleQuote */13 :\n      case /* Lparen */18 :\n      case /* Lbracket */20 :\n      case /* Lbrace */22 :\n      case /* Module */65 :\n      case /* At */75 :\n      case /* Percent */77 :\n          return true;\n      default:\n        return false;\n    }\n  } else {\n    switch (x.TAG | 0) {\n      case /* Lident */4 :\n      case /* Uident */5 :\n          return true;\n      default:\n        return false;\n    }\n  }\n}\n\nfunction isTypeParameterStart(x) {\n  if (typeof x === \"number\") {\n    if (x !== 4 && x !== 48 && !isTypExprStart(x)) {\n      return false;\n    } else {\n      return true;\n    }\n  } else if (isTypExprStart(x)) {\n    return true;\n  } else {\n    return false;\n  }\n}\n\nfunction isTypeParamStart(x) {\n  if (typeof x === \"number\") {\n    if (x > 34 || x < 12) {\n      return x === 36;\n    } else {\n      return x > 33 || x < 14;\n    }\n  } else {\n    return false;\n  }\n}\n\nfunction isFunctorArgStart(x) {\n  if (typeof x !== \"number\") {\n    if (x.TAG === /* Uident */5) {\n      return true;\n    } else {\n      return false;\n    }\n  }\n  switch (x) {\n    case /* Underscore */12 :\n    case /* Lparen */18 :\n    case /* Lbrace */22 :\n    case /* At */75 :\n    case /* Percent */77 :\n        return true;\n    default:\n      return false;\n  }\n}\n\nfunction isModExprStart(x) {\n  if (typeof x === \"number\") {\n    switch (x) {\n      case /* Lparen */18 :\n      case /* Lbrace */22 :\n      case /* At */75 :\n      case /* Percent */77 :\n          return true;\n      default:\n        return false;\n    }\n  } else {\n    switch (x.TAG | 0) {\n      case /* Lident */4 :\n          if (x._0 === \"unpack\") {\n            return true;\n          } else {\n            return false;\n          }\n      case /* Uident */5 :\n          return true;\n      default:\n        return false;\n    }\n  }\n}\n\nfunction isRecordRowStart(x) {\n  if (typeof x === \"number\") {\n    if (x === /* DotDotDot */6 || Res_token.isKeyword(x)) {\n      return true;\n    } else {\n      return false;\n    }\n  }\n  switch (x.TAG | 0) {\n    case /* Lident */4 :\n    case /* Uident */5 :\n        return true;\n    default:\n      if (Res_token.isKeyword(x)) {\n        return true;\n      } else {\n        return false;\n      }\n  }\n}\n\nfunction isRecordRowStringKeyStart(x) {\n  if (typeof x === \"number\" || x.TAG !== /* String */3) {\n    return false;\n  } else {\n    return true;\n  }\n}\n\nfunction isArgumentStart(x) {\n  if (typeof x === \"number\") {\n    if (x > 12 || x < 4) {\n      if (x === 48) {\n        return true;\n      }\n      \n    } else if (x > 11 || x < 5) {\n      return true;\n    }\n    \n  }\n  if (isExprStart(x)) {\n    return true;\n  } else {\n    return false;\n  }\n}\n\nfunction isPatternMatchStart(x) {\n  if (x === 17 || isPatternStart(x)) {\n    return true;\n  } else {\n    return false;\n  }\n}\n\nfunction isPatternOcamlListStart(x) {\n  if (x === 6 || isPatternStart(x)) {\n    return true;\n  } else {\n    return false;\n  }\n}\n\nfunction isPatternRecordItemStart(x) {\n  if (typeof x === \"number\") {\n    switch (x) {\n      case /* DotDotDot */6 :\n      case /* Underscore */12 :\n          return true;\n      default:\n        return false;\n    }\n  } else {\n    switch (x.TAG | 0) {\n      case /* Lident */4 :\n      case /* Uident */5 :\n          return true;\n      default:\n        return false;\n    }\n  }\n}\n\nfunction isAttributeStart(x) {\n  return x === 75;\n}\n\nfunction isJsFfiImportStart(x) {\n  if (typeof x === \"number\") {\n    if (x === /* At */75) {\n      return true;\n    } else {\n      return false;\n    }\n  } else if (x.TAG === /* Lident */4) {\n    return true;\n  } else {\n    return false;\n  }\n}\n\nfunction isBlockExprStart(x) {\n  if (typeof x === \"number\") {\n    switch (x) {\n      case /* Open */0 :\n      case /* True */1 :\n      case /* False */2 :\n      case /* Bang */7 :\n      case /* Let */9 :\n      case /* Underscore */12 :\n      case /* Lparen */18 :\n      case /* Lbracket */20 :\n      case /* Lbrace */22 :\n      case /* Exception */27 :\n      case /* Forwardslash */29 :\n      case /* Minus */34 :\n      case /* MinusDot */35 :\n      case /* Plus */36 :\n      case /* PlusDot */37 :\n      case /* LessThan */42 :\n      case /* Hash */44 :\n      case /* Assert */46 :\n      case /* Lazy */47 :\n      case /* If */50 :\n      case /* For */52 :\n      case /* While */54 :\n      case /* Switch */55 :\n      case /* Module */65 :\n      case /* At */75 :\n      case /* Percent */77 :\n      case /* List */79 :\n      case /* Backtick */80 :\n      case /* Try */82 :\n          return true;\n      default:\n        return false;\n    }\n  } else {\n    switch (x.TAG | 0) {\n      case /* Codepoint */0 :\n      case /* Int */1 :\n      case /* Float */2 :\n      case /* String */3 :\n      case /* Lident */4 :\n      case /* Uident */5 :\n          return true;\n      default:\n        return false;\n    }\n  }\n}\n\nfunction isListElement(grammar, token) {\n  if (typeof grammar !== \"number\") {\n    return false;\n  }\n  switch (grammar) {\n    case /* JsxAttribute */5 :\n        return isJsxAttributeStart(token);\n    case /* PatternMatching */22 :\n        return isPatternMatchStart(token);\n    case /* PatternList */25 :\n        if (token === /* DotDotDot */6) {\n          return true;\n        } else {\n          return isPatternStart(token);\n        }\n    case /* PatternOcamlList */26 :\n        return isPatternOcamlListStart(token);\n    case /* PatternRecord */27 :\n        return isPatternRecordItemStart(token);\n    case /* TypeParams */30 :\n        return isTypeParamStart(token);\n    case /* PackageConstraint */32 :\n        return token === /* And */10;\n    case /* RecordDecl */34 :\n        return isRecordDeclStart(token);\n    case /* ConstructorDeclaration */35 :\n        return token === /* Bar */17;\n    case /* ParameterList */36 :\n        return isParameterStart(token);\n    case /* StringFieldDeclarations */37 :\n        return isStringFieldDeclStart(token);\n    case /* FieldDeclarations */38 :\n        return isFieldDeclStart(token);\n    case /* TypExprList */39 :\n        if (isTypExprStart(token)) {\n          return true;\n        } else {\n          return token === /* LessThan */42;\n        }\n    case /* FunctorArgs */40 :\n        return isFunctorArgStart(token);\n    case /* ModExprList */41 :\n        return isModExprStart(token);\n    case /* TypeParameters */42 :\n        return isTypeParameterStart(token);\n    case /* RecordRows */43 :\n        return isRecordRowStart(token);\n    case /* RecordRowsStringKey */44 :\n        return isRecordRowStringKeyStart(token);\n    case /* ArgumentList */45 :\n        return isArgumentStart(token);\n    case /* Signature */46 :\n    case /* Specification */47 :\n        return isSignatureItemStart(token);\n    case /* Structure */48 :\n    case /* Implementation */49 :\n        return isStructureItemStart(token);\n    case /* Attribute */50 :\n        return token === 75;\n    case /* TypeConstraint */51 :\n        return token === /* Constraint */63;\n    case /* ExprList */12 :\n    case /* ListExpr */53 :\n        if (token === /* DotDotDot */6) {\n          return true;\n        } else {\n          return isExprStart(token);\n        }\n    case /* JsFfiImport */54 :\n        return isJsFfiImportStart(token);\n    case /* OpenDescription */0 :\n    case /* ModuleLongIdent */1 :\n    case /* Ternary */2 :\n    case /* Es6ArrowExpr */3 :\n    case /* Jsx */4 :\n    case /* JsxChild */6 :\n    case /* ExprOperand */7 :\n    case /* ExprUnary */8 :\n    case /* ExprSetField */9 :\n    case /* ExprBlock */10 :\n    case /* ExprCall */11 :\n    case /* ExprArrayAccess */13 :\n    case /* ExprArrayMutation */14 :\n    case /* ExprIf */15 :\n    case /* ExprFor */16 :\n    case /* IfCondition */17 :\n    case /* IfBranch */18 :\n    case /* ElseBranch */19 :\n    case /* TypeExpression */20 :\n    case /* External */21 :\n    case /* PatternMatchCase */23 :\n    case /* LetBinding */24 :\n    case /* TypeDef */28 :\n    case /* TypeConstrName */29 :\n    case /* TypeParam */31 :\n    case /* TypeRepresentation */33 :\n    case /* AtomicTypExpr */52 :\n    case /* Pattern */55 :\n        return false;\n    case /* AttributePayload */56 :\n        return token === /* Lparen */18;\n    case /* TagNames */57 :\n        return token === /* Hash */44;\n    \n  }\n}\n\nfunction isListTerminator(grammar, token) {\n  var exit = 0;\n  if (typeof grammar === \"number\") {\n    var exit$1 = 0;\n    switch (grammar) {\n      case /* JsxAttribute */5 :\n          if (typeof token === \"number\") {\n            if (token === 29) {\n              return true;\n            }\n            if (token === 41) {\n              return true;\n            }\n            exit = 2;\n          } else {\n            exit = 2;\n          }\n          break;\n      case /* ExprList */12 :\n          if (typeof token === \"number\") {\n            switch (token) {\n              case /* Lbracket */20 :\n              case /* Lbrace */22 :\n              case /* Rbrace */23 :\n              case /* Colon */24 :\n              case /* Comma */25 :\n              case /* Eof */26 :\n              case /* Exception */27 :\n              case /* Backslash */28 :\n                  exit = 2;\n                  break;\n              case /* Rparen */19 :\n              case /* Rbracket */21 :\n              case /* Forwardslash */29 :\n                  return true;\n              default:\n                exit = 2;\n            }\n          } else {\n            exit = 2;\n          }\n          break;\n      case /* ParameterList */36 :\n          if (typeof token === \"number\") {\n            if (token === 22) {\n              return true;\n            }\n            if (token === 57) {\n              return true;\n            }\n            exit = 2;\n          } else {\n            exit = 2;\n          }\n          break;\n      case /* TypExprList */39 :\n          if (typeof token === \"number\") {\n            if (token >= 20) {\n              if (token === 29) {\n                return true;\n              }\n              if (token === 41) {\n                return true;\n              }\n              exit = 2;\n            } else {\n              if (token === 14) {\n                return true;\n              }\n              if (token >= 19) {\n                return true;\n              }\n              exit = 2;\n            }\n          } else {\n            exit = 2;\n          }\n          break;\n      case /* TypeParams */30 :\n      case /* ModExprList */41 :\n      case /* ArgumentList */45 :\n      case /* ListExpr */53 :\n          exit$1 = 4;\n          break;\n      case /* ExprBlock */10 :\n      case /* StringFieldDeclarations */37 :\n      case /* JsFfiImport */54 :\n          exit$1 = 3;\n          break;\n      case /* OpenDescription */0 :\n      case /* ModuleLongIdent */1 :\n      case /* Ternary */2 :\n      case /* Es6ArrowExpr */3 :\n      case /* Jsx */4 :\n      case /* JsxChild */6 :\n      case /* ExprOperand */7 :\n      case /* ExprUnary */8 :\n      case /* ExprSetField */9 :\n      case /* ExprCall */11 :\n      case /* ExprArrayAccess */13 :\n      case /* ExprArrayMutation */14 :\n      case /* ExprIf */15 :\n      case /* ExprFor */16 :\n      case /* IfCondition */17 :\n      case /* IfBranch */18 :\n      case /* ElseBranch */19 :\n      case /* TypeExpression */20 :\n      case /* External */21 :\n      case /* PatternMatching */22 :\n      case /* PatternMatchCase */23 :\n      case /* LetBinding */24 :\n      case /* PatternList */25 :\n      case /* PatternOcamlList */26 :\n      case /* PatternRecord */27 :\n      case /* TypeDef */28 :\n      case /* TypeConstrName */29 :\n      case /* TypeParam */31 :\n      case /* PackageConstraint */32 :\n      case /* TypeRepresentation */33 :\n      case /* RecordDecl */34 :\n      case /* ConstructorDeclaration */35 :\n      case /* FieldDeclarations */38 :\n      case /* FunctorArgs */40 :\n      case /* TypeParameters */42 :\n      case /* RecordRows */43 :\n      case /* RecordRowsStringKey */44 :\n      case /* Signature */46 :\n      case /* Specification */47 :\n      case /* Structure */48 :\n      case /* Implementation */49 :\n      case /* Attribute */50 :\n      case /* TypeConstraint */51 :\n      case /* AtomicTypExpr */52 :\n      case /* Pattern */55 :\n          exit = 2;\n          break;\n      case /* AttributePayload */56 :\n          if (token === 19) {\n            return true;\n          }\n          exit = 2;\n          break;\n      case /* TagNames */57 :\n          if (token === 21) {\n            return true;\n          }\n          exit = 2;\n          break;\n      \n    }\n    switch (exit$1) {\n      case 3 :\n          if (token === 23) {\n            return true;\n          }\n          exit = 2;\n          break;\n      case 4 :\n          if (token === 19) {\n            return true;\n          }\n          exit = 2;\n          break;\n      \n    }\n  } else {\n    exit = 2;\n  }\n  if (exit === 2) {\n    if (token === 26) {\n      return true;\n    }\n    if (typeof grammar !== \"number\") {\n      return false;\n    }\n    if (grammar < 25) {\n      return false;\n    }\n    switch (grammar) {\n      case /* PatternList */25 :\n      case /* PatternOcamlList */26 :\n      case /* PatternRecord */27 :\n          break;\n      case /* PackageConstraint */32 :\n          return token !== /* And */10;\n      case /* ConstructorDeclaration */35 :\n          return token !== /* Bar */17;\n      case /* Signature */46 :\n      case /* Structure */48 :\n          return token === 23;\n      case /* Attribute */50 :\n          return token !== /* At */75;\n      case /* TypeConstraint */51 :\n          return token !== /* Constraint */63;\n      case /* TypeDef */28 :\n      case /* TypeConstrName */29 :\n      case /* TypeParams */30 :\n      case /* TypeParam */31 :\n      case /* TypeRepresentation */33 :\n      case /* RecordDecl */34 :\n      case /* ParameterList */36 :\n      case /* StringFieldDeclarations */37 :\n      case /* FieldDeclarations */38 :\n      case /* TypExprList */39 :\n      case /* FunctorArgs */40 :\n      case /* ModExprList */41 :\n      case /* TypeParameters */42 :\n      case /* RecordRows */43 :\n      case /* RecordRowsStringKey */44 :\n      case /* ArgumentList */45 :\n      case /* Specification */47 :\n      case /* Implementation */49 :\n      case /* AtomicTypExpr */52 :\n      case /* ListExpr */53 :\n      case /* JsFfiImport */54 :\n      case /* Pattern */55 :\n      case /* AttributePayload */56 :\n      case /* TagNames */57 :\n          return false;\n      \n    }\n  }\n  if (typeof token !== \"number\") {\n    return false;\n  }\n  if (token >= 30) {\n    if (token !== 53) {\n      return token === 57;\n    } else {\n      return true;\n    }\n  }\n  if (token === 14) {\n    return true;\n  }\n  if (token < 19) {\n    return false;\n  }\n  switch (token) {\n    case /* Lbracket */20 :\n    case /* Lbrace */22 :\n    case /* Rbrace */23 :\n    case /* Colon */24 :\n    case /* Comma */25 :\n    case /* Eof */26 :\n    case /* Exception */27 :\n    case /* Backslash */28 :\n        return false;\n    case /* Rparen */19 :\n    case /* Rbracket */21 :\n    case /* Forwardslash */29 :\n        return true;\n    \n  }\n}\n\nfunction isPartOfList(grammar, token) {\n  if (isListElement(grammar, token)) {\n    return true;\n  } else {\n    return isListTerminator(grammar, token);\n  }\n}\n\nvar Token;\n\nvar isJsxChildStart = isAtomicExprStart;\n\nexport {\n  Token ,\n  toString ,\n  isSignatureItemStart ,\n  isAtomicPatternStart ,\n  isAtomicExprStart ,\n  isAtomicTypExprStart ,\n  isExprStart ,\n  isJsxAttributeStart ,\n  isStructureItemStart ,\n  isPatternStart ,\n  isParameterStart ,\n  isStringFieldDeclStart ,\n  isFieldDeclStart ,\n  isRecordDeclStart ,\n  isTypExprStart ,\n  isTypeParameterStart ,\n  isTypeParamStart ,\n  isFunctorArgStart ,\n  isModExprStart ,\n  isRecordRowStart ,\n  isRecordRowStringKeyStart ,\n  isArgumentStart ,\n  isPatternMatchStart ,\n  isPatternOcamlListStart ,\n  isPatternRecordItemStart ,\n  isAttributeStart ,\n  isJsFfiImportStart ,\n  isJsxChildStart ,\n  isBlockExprStart ,\n  isListElement ,\n  isListTerminator ,\n  isPartOfList ,\n  \n}\n/* Res_token Not a pure module */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/res_grammar.res",
    "content": "module Token = Res_token\n\ntype t =\n  | OpenDescription /* open Belt */\n  | @live ModuleLongIdent /* Foo or Foo.Bar */\n  | Ternary /* condExpr ? trueExpr : falseExpr */\n  | Es6ArrowExpr\n  | Jsx\n  | JsxAttribute\n  | @live JsxChild\n  | ExprOperand\n  | ExprUnary\n  | ExprSetField\n  | ExprBinaryAfterOp(Token.t)\n  | ExprBlock\n  | ExprCall\n  | ExprList\n  | ExprArrayAccess\n  | ExprArrayMutation\n  | ExprIf\n  | ExprFor\n  | IfCondition\n  | IfBranch\n  | ElseBranch\n  | TypeExpression\n  | External\n  | PatternMatching\n  | PatternMatchCase\n  | LetBinding\n  | PatternList\n  | PatternOcamlList\n  | PatternRecord\n\n  | TypeDef\n  | TypeConstrName\n  | TypeParams\n  | @live TypeParam\n  | PackageConstraint\n  | TypeRepresentation\n  | RecordDecl\n  | ConstructorDeclaration\n  | ParameterList\n  | StringFieldDeclarations\n  | FieldDeclarations\n  | TypExprList\n  | FunctorArgs\n  | ModExprList\n  | TypeParameters\n  | RecordRows\n  | RecordRowsStringKey\n  | ArgumentList\n  | Signature\n  | Specification\n  | Structure\n  | Implementation\n  | Attribute\n  | TypeConstraint\n  | AtomicTypExpr\n  | ListExpr\n  | JsFfiImport\n  | Pattern\n  | AttributePayload\n  | TagNames\n\nlet toString = x =>\n  switch x {\n  | OpenDescription => \"an open description\"\n  | ModuleLongIdent => \"a module path\"\n  | Ternary => \"a ternary expression\"\n  | Es6ArrowExpr => \"an es6 arrow function\"\n  | Jsx => \"a jsx expression\"\n  | JsxAttribute => \"a jsx attribute\"\n  | ExprOperand => \"a basic expression\"\n  | ExprUnary => \"a unary expression\"\n  | ExprBinaryAfterOp(op) => \"an expression after the operator \\\"\" ++ (Token.toString(op) ++ \"\\\"\")\n  | ExprIf => \"an if expression\"\n  | IfCondition => \"the condition of an if expression\"\n  | IfBranch => \"the true-branch of an if expression\"\n  | ElseBranch => \"the else-branch of an if expression\"\n  | TypeExpression => \"a type\"\n  | External => \"an external\"\n  | PatternMatching => \"the cases of a pattern match\"\n  | ExprBlock => \"a block with expressions\"\n  | ExprSetField => \"a record field mutation\"\n  | ExprCall => \"a function application\"\n  | ExprArrayAccess => \"an array access expression\"\n  | ExprArrayMutation => \"an array mutation\"\n  | LetBinding => \"a let binding\"\n  | TypeDef => \"a type definition\"\n  | TypeParams => \"type parameters\"\n  | TypeParam => \"a type parameter\"\n  | TypeConstrName => \"a type-constructor name\"\n  | TypeRepresentation => \"a type representation\"\n  | RecordDecl => \"a record declaration\"\n  | PatternMatchCase => \"a pattern match case\"\n  | ConstructorDeclaration => \"a constructor declaration\"\n  | ExprList => \"multiple expressions\"\n  | PatternList => \"multiple patterns\"\n  | PatternOcamlList => \"a list pattern\"\n  | PatternRecord => \"a record pattern\"\n  | ParameterList => \"parameters\"\n  | StringFieldDeclarations => \"string field declarations\"\n  | FieldDeclarations => \"field declarations\"\n  | TypExprList => \"list of types\"\n  | FunctorArgs => \"functor arguments\"\n  | ModExprList => \"list of module expressions\"\n  | TypeParameters => \"list of type parameters\"\n  | RecordRows => \"rows of a record\"\n  | RecordRowsStringKey => \"rows of a record with string keys\"\n  | ArgumentList => \"arguments\"\n  | Signature => \"signature\"\n  | Specification => \"specification\"\n  | Structure => \"structure\"\n  | Implementation => \"implementation\"\n  | Attribute => \"an attribute\"\n  | TypeConstraint => \"constraints on a type\"\n  | AtomicTypExpr => \"a type\"\n  | ListExpr => \"an ocaml list expr\"\n  | PackageConstraint => \"a package constraint\"\n  | JsFfiImport => \"js ffi import\"\n  | JsxChild => \"jsx child\"\n  | Pattern => \"pattern\"\n  | ExprFor => \"a for expression\"\n  | AttributePayload => \"an attribute payload\"\n  | TagNames => \"tag names\"\n  }\n\nlet isSignatureItemStart = x =>\n  switch x {\n  | Token.At\n  | Let\n  | Typ\n  | External\n  | Exception\n  | Open\n  | Include\n  | Module\n  | AtAt\n  | Export\n  | PercentPercent => true\n  | _ => false\n  }\n\nlet isAtomicPatternStart = x =>\n  switch x {\n  | Token.Int(_)\n  | String(_)\n  | Codepoint(_)\n  | Backtick\n  | Lparen\n  | Lbracket\n  | Lbrace\n  | Underscore\n  | Lident(_)\n  | Uident(_)\n  | List\n  | Exception\n  | Lazy\n  | Percent => true\n  | _ => false\n  }\n\nlet isAtomicExprStart = x =>\n  switch x {\n  | Token.True\n  | False\n  | Int(_)\n  | String(_)\n  | Float(_)\n  | Codepoint(_)\n  | Backtick\n  | Uident(_)\n  | Lident(_)\n  | Hash\n  | Lparen\n  | List\n  | Lbracket\n  | Lbrace\n  | LessThan\n  | Module\n  | Percent => true\n  | _ => false\n  }\n\nlet isAtomicTypExprStart = x =>\n  switch x {\n  | Token.SingleQuote\n  | Underscore\n  | Lparen\n  | Lbrace\n  | Uident(_)\n  | Lident(_)\n  | Percent => true\n  | _ => false\n  }\n\nlet isExprStart = x =>\n  switch x {\n  | Token.True\n  | False\n  | Int(_)\n  | String(_)\n  | Float(_)\n  | Codepoint(_)\n  | Backtick\n  | Underscore\n  | Uident(_)\n  | Lident(_)\n  | Hash\n  | Lparen\n  | List\n  | Module\n  | Lbracket\n  | Lbrace\n  | LessThan\n  | Minus\n  | MinusDot\n  | Plus\n  | PlusDot\n  | Bang\n  | Percent\n  | At\n  | If\n  | Switch\n  | While\n  | For\n  | Assert\n  | Lazy\n  | Try => true\n  | _ => false\n  }\n\nlet isJsxAttributeStart = x =>\n  switch x {\n  | Token.Lident(_) | Question => true\n  | _ => false\n  }\n\nlet isStructureItemStart = x =>\n  switch x {\n  | Token.Open\n  | Let\n  | Typ\n  | External\n  | Import\n  | Export\n  | Exception\n  | Include\n  | Module\n  | AtAt\n  | PercentPercent\n  | At => true\n  | t if isExprStart(t) => true\n  | _ => false\n  }\n\nlet isPatternStart = x =>\n  switch x {\n  | Token.Int(_)\n  | Float(_)\n  | String(_)\n  | Codepoint(_)\n  | Backtick\n  | True\n  | False\n  | Minus\n  | Plus\n  | Lparen\n  | Lbracket\n  | Lbrace\n  | List\n  | Underscore\n  | Lident(_)\n  | Uident(_)\n  | Hash\n  | Exception\n  | Lazy\n  | Percent\n  | Module\n  | At => true\n  | _ => false\n  }\n\nlet isParameterStart = x =>\n  switch x {\n  | Token.Typ | Tilde | Dot => true\n  | token if isPatternStart(token) => true\n  | _ => false\n  }\n\n/* TODO: overparse Uident ? */\nlet isStringFieldDeclStart = x =>\n  switch x {\n  | Token.String(_) | Lident(_) | At | DotDotDot => true\n  | _ => false\n  }\n\n/* TODO: overparse Uident ? */\nlet isFieldDeclStart = x =>\n  switch x {\n  | Token.At | Mutable | Lident(_) => true\n  /* recovery, TODO: this is not ideal… */\n  | Uident(_) => true\n  | t if Token.isKeyword(t) => true\n  | _ => false\n  }\n\nlet isRecordDeclStart = x =>\n  switch x {\n  | Token.At\n  | Mutable\n  | Lident(_) => true\n  | _ => false\n  }\n\nlet isTypExprStart = x =>\n  switch x {\n  | Token.At\n  | SingleQuote\n  | Underscore\n  | Lparen\n  | Lbracket\n  | Uident(_)\n  | Lident(_)\n  | Module\n  | Percent\n  | Lbrace => true\n  | _ => false\n  }\n\nlet isTypeParameterStart = x =>\n  switch x {\n  | Token.Tilde | Dot => true\n  | token if isTypExprStart(token) => true\n  | _ => false\n  }\n\nlet isTypeParamStart = x =>\n  switch x {\n  | Token.Plus | Minus | SingleQuote | Underscore => true\n  | _ => false\n  }\n\nlet isFunctorArgStart = x =>\n  switch x {\n  | Token.At\n  | Uident(_)\n  | Underscore\n  | Percent\n  | Lbrace\n  | Lparen => true\n  | _ => false\n  }\n\nlet isModExprStart = x =>\n  switch x {\n  | Token.At\n  | Percent\n  | Uident(_)\n  | Lbrace\n  | Lparen\n  | Lident(\"unpack\") => true\n  | _ => false\n  }\n\nlet isRecordRowStart = x =>\n  switch x {\n  | Token.DotDotDot => true\n  | Token.Uident(_) | Lident(_) => true\n  /* TODO */\n  | t if Token.isKeyword(t) => true\n  | _ => false\n  }\n\nlet isRecordRowStringKeyStart = x =>\n  switch x {\n  | Token.String(_) => true\n  | _ => false\n  }\n\nlet isArgumentStart = x =>\n  switch x {\n  | Token.Tilde | Dot | Underscore => true\n  | t if isExprStart(t) => true\n  | _ => false\n  }\n\nlet isPatternMatchStart = x =>\n  switch x {\n  | Token.Bar => true\n  | t if isPatternStart(t) => true\n  | _ => false\n  }\n\nlet isPatternOcamlListStart = x =>\n  switch x {\n  | Token.DotDotDot => true\n  | t if isPatternStart(t) => true\n  | _ => false\n  }\n\nlet isPatternRecordItemStart = x =>\n  switch x {\n  | Token.DotDotDot | Uident(_) | Lident(_) | Underscore => true\n  | _ => false\n  }\n\nlet isAttributeStart = x =>\n  switch x {\n  | Token.At => true\n  | _ => false\n  }\n\nlet isJsFfiImportStart = x =>\n  switch x {\n  | Token.Lident(_) | At => true\n  | _ => false\n  }\n\nlet isJsxChildStart = isAtomicExprStart\n\nlet isBlockExprStart = x =>\n  switch x {\n  | Token.At\n  | Hash\n  | Percent\n  | Minus\n  | MinusDot\n  | Plus\n  | PlusDot\n  | Bang\n  | True\n  | False\n  | Float(_)\n  | Int(_)\n  | String(_)\n  | Codepoint(_)\n  | Lident(_)\n  | Uident(_)\n  | Lparen\n  | List\n  | Lbracket\n  | Lbrace\n  | Forwardslash\n  | Assert\n  | Lazy\n  | If\n  | For\n  | While\n  | Switch\n  | Open\n  | Module\n  | Exception\n  | Let\n  | LessThan\n  | Backtick\n  | Try\n  | Underscore => true\n  | _ => false\n  }\n\nlet isListElement = (grammar, token) =>\n  switch grammar {\n  | ExprList => token == Token.DotDotDot || isExprStart(token)\n  | ListExpr => token == DotDotDot || isExprStart(token)\n  | PatternList => token == DotDotDot || isPatternStart(token)\n  | ParameterList => isParameterStart(token)\n  | StringFieldDeclarations => isStringFieldDeclStart(token)\n  | FieldDeclarations => isFieldDeclStart(token)\n  | RecordDecl => isRecordDeclStart(token)\n  | TypExprList => isTypExprStart(token) || token == Token.LessThan\n  | TypeParams => isTypeParamStart(token)\n  | FunctorArgs => isFunctorArgStart(token)\n  | ModExprList => isModExprStart(token)\n  | TypeParameters => isTypeParameterStart(token)\n  | RecordRows => isRecordRowStart(token)\n  | RecordRowsStringKey => isRecordRowStringKeyStart(token)\n  | ArgumentList => isArgumentStart(token)\n  | Signature | Specification => isSignatureItemStart(token)\n  | Structure | Implementation => isStructureItemStart(token)\n  | PatternMatching => isPatternMatchStart(token)\n  | PatternOcamlList => isPatternOcamlListStart(token)\n  | PatternRecord => isPatternRecordItemStart(token)\n  | Attribute => isAttributeStart(token)\n  | TypeConstraint => token == Constraint\n  | PackageConstraint => token == And\n  | ConstructorDeclaration => token == Bar\n  | JsxAttribute => isJsxAttributeStart(token)\n  | JsFfiImport => isJsFfiImportStart(token)\n  | AttributePayload => token == Lparen\n  | TagNames => token == Hash\n  | _ => false\n  }\n\nlet isListTerminator = (grammar, token) =>\n  switch (grammar, token) {\n  | (_, Token.Eof)\n  | (ExprList, Rparen | Forwardslash | Rbracket)\n  | (ListExpr, Rparen)\n  | (ArgumentList, Rparen)\n  | (TypExprList, Rparen | Forwardslash | GreaterThan | Equal)\n  | (ModExprList, Rparen)\n  | (\n    PatternList | PatternOcamlList | PatternRecord,\n    Forwardslash | Rbracket | Rparen | EqualGreater | In | Equal /* let {x} = foo */,\n  )\n  | (ExprBlock, Rbrace)\n  | (Structure | Signature, Rbrace)\n  | (TypeParams, Rparen)\n  | (ParameterList, EqualGreater | Lbrace)\n  | (JsxAttribute, Forwardslash | GreaterThan)\n  | (JsFfiImport, Rbrace)\n  | (StringFieldDeclarations, Rbrace) => true\n\n  | (Attribute, token) if token != At => true\n  | (TypeConstraint, token) if token != Constraint => true\n  | (PackageConstraint, token) if token != And => true\n  | (ConstructorDeclaration, token) if token != Bar => true\n  | (AttributePayload, Rparen) => true\n  | (TagNames, Rbracket) => true\n\n  | _ => false\n  }\n\nlet isPartOfList = (grammar, token) =>\n  isListElement(grammar, token) || isListTerminator(grammar, token)\n"
  },
  {
    "path": "analysis/examples/larger-project/src/res_js_ffi.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as List from \"rescript/lib/es6/list.js\";\nimport * as Curry from \"rescript/lib/es6/curry.js\";\nimport * as $$Location from \"./location.js\";\nimport * as Longident from \"./longident.js\";\nimport * as Ast_helper from \"./ast_helper.js\";\n\nfunction decl(attrs, loc, name, alias, typ) {\n  return {\n          jld_attributes: attrs,\n          jld_name: name,\n          jld_alias: alias,\n          jld_type: typ,\n          jld_loc: loc\n        };\n}\n\nfunction importDescr(attrs, scope, importSpec, loc) {\n  return {\n          jid_loc: loc,\n          jid_spec: importSpec,\n          jid_scope: scope,\n          jid_attributes: attrs\n        };\n}\n\nfunction toParsetree(importDescr) {\n  var bsVal_0 = $$Location.mknoloc(\"val\");\n  var bsVal_1 = {\n    TAG: /* PStr */0,\n    _0: /* [] */0\n  };\n  var bsVal = [\n    bsVal_0,\n    bsVal_1\n  ];\n  var s = importDescr.jid_scope;\n  var attrs;\n  if (typeof s === \"number\") {\n    attrs = {\n      hd: bsVal,\n      tl: /* [] */0\n    };\n  } else if (s.TAG === /* Module */0) {\n    var arg = Ast_helper.Str.$$eval;\n    var arg$1 = Ast_helper.Exp.constant;\n    var structure_0 = Curry._3(arg, undefined, undefined, Curry._3(arg$1, undefined, undefined, {\n              TAG: /* Pconst_string */2,\n              _0: s._0,\n              _1: undefined\n            }));\n    var structure = {\n      hd: structure_0,\n      tl: /* [] */0\n    };\n    var genType_0 = $$Location.mknoloc(\"genType.import\");\n    var genType_1 = {\n      TAG: /* PStr */0,\n      _0: structure\n    };\n    var genType = [\n      genType_0,\n      genType_1\n    ];\n    attrs = {\n      hd: genType,\n      tl: /* [] */0\n    };\n  } else {\n    var match = List.map((function (s) {\n            return Ast_helper.Exp.constant(undefined, undefined, {\n                        TAG: /* Pconst_string */2,\n                        _0: s,\n                        _1: undefined\n                      });\n          }), Longident.flatten(s._0));\n    var expr;\n    var exit = 0;\n    var exprs;\n    if (match && !match.tl) {\n      expr = match.hd;\n    } else {\n      exprs = match;\n      exit = 1;\n    }\n    if (exit === 1) {\n      var arg$2 = Ast_helper.Exp.tuple;\n      expr = Curry._3(arg$2, undefined, undefined, exprs);\n    }\n    var structureItem = Ast_helper.Str.$$eval(undefined, undefined, expr);\n    var bsScope_0 = $$Location.mknoloc(\"scope\");\n    var bsScope_1 = {\n      TAG: /* PStr */0,\n      _0: {\n        hd: structureItem,\n        tl: /* [] */0\n      }\n    };\n    var bsScope = [\n      bsScope_0,\n      bsScope_1\n    ];\n    attrs = {\n      hd: bsVal,\n      tl: {\n        hd: bsScope,\n        tl: /* [] */0\n      }\n    };\n  }\n  var decl = importDescr.jid_spec;\n  var valueDescrs;\n  if (decl.TAG === /* Default */0) {\n    var decl$1 = decl._0;\n    var prim_0 = decl$1.jld_name;\n    var prim = {\n      hd: prim_0,\n      tl: /* [] */0\n    };\n    var allAttrs = List.map((function (attr) {\n            var id = attr[0];\n            if (id.txt !== \"genType.import\") {\n              return attr;\n            }\n            var match = attr[1];\n            if (match.TAG !== /* PStr */0) {\n              return attr;\n            }\n            var match$1 = match._0;\n            if (!match$1) {\n              return attr;\n            }\n            var match$2 = match$1.hd.pstr_desc;\n            if (match$2.TAG !== /* Pstr_eval */0) {\n              return attr;\n            }\n            if (match$1.tl) {\n              return attr;\n            }\n            var arg = Ast_helper.Exp.constant;\n            var $$default = Curry._3(arg, undefined, undefined, {\n                  TAG: /* Pconst_string */2,\n                  _0: \"default\",\n                  _1: undefined\n                });\n            var arg$1 = Ast_helper.Str.$$eval;\n            var arg$2 = Ast_helper.Exp.tuple;\n            var structureItem = Curry._3(arg$1, undefined, undefined, Curry._3(arg$2, undefined, undefined, {\n                      hd: match$2._0,\n                      tl: {\n                        hd: $$default,\n                        tl: /* [] */0\n                      }\n                    }));\n            return [\n                    id,\n                    {\n                      TAG: /* PStr */0,\n                      _0: {\n                        hd: structureItem,\n                        tl: /* [] */0\n                      }\n                    }\n                  ];\n          }), List.concat({\n              hd: attrs,\n              tl: {\n                hd: importDescr.jid_attributes,\n                tl: /* [] */0\n              }\n            }));\n    var arg$3 = Ast_helper.Str.primitive;\n    valueDescrs = {\n      hd: Curry._2(arg$3, undefined, Ast_helper.Val.mk(importDescr.jid_loc, allAttrs, undefined, prim, $$Location.mknoloc(decl$1.jld_alias), decl$1.jld_type)),\n      tl: /* [] */0\n    };\n  } else {\n    valueDescrs = List.map((function (decl) {\n            var prim_0 = decl.jld_name;\n            var prim = {\n              hd: prim_0,\n              tl: /* [] */0\n            };\n            var allAttrs = List.concat({\n                  hd: attrs,\n                  tl: {\n                    hd: decl.jld_attributes,\n                    tl: /* [] */0\n                  }\n                });\n            return Ast_helper.Str.primitive(decl.jld_loc, Ast_helper.Val.mk(importDescr.jid_loc, allAttrs, undefined, prim, $$Location.mknoloc(decl.jld_alias), decl.jld_type));\n          }), decl._0);\n  }\n  var jsFfiAttr_0 = $$Location.mknoloc(\"ns.jsFfi\");\n  var jsFfiAttr_1 = {\n    TAG: /* PStr */0,\n    _0: /* [] */0\n  };\n  var jsFfiAttr = [\n    jsFfiAttr_0,\n    jsFfiAttr_1\n  ];\n  var partial_arg = {\n    hd: jsFfiAttr,\n    tl: /* [] */0\n  };\n  var partial_arg$1 = importDescr.jid_loc;\n  var arg$4 = function (param, param$1) {\n    return Ast_helper.Incl.mk(partial_arg$1, partial_arg, param, param$1);\n  };\n  return Ast_helper.Str.include_(importDescr.jid_loc, Curry._2(arg$4, undefined, Ast_helper.Mod.structure(importDescr.jid_loc, undefined, valueDescrs)));\n}\n\nexport {\n  decl ,\n  importDescr ,\n  toParsetree ,\n  \n}\n/* Location Not a pure module */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/res_js_ffi.res",
    "content": "/* AST for js externals */\ntype scope =\n  | Global\n  | Module(string) /* bs.module(\"path\") */\n  | Scope(Longident.t) /* bs.scope(/\"window\", \"location\"/) */\n\ntype label_declaration = {\n  @live jld_attributes: Parsetree.attributes,\n  jld_name: string,\n  jld_alias: string,\n  jld_type: Parsetree.core_type,\n  jld_loc: Location.t,\n}\n\ntype importSpec =\n  | Default(label_declaration)\n  | Spec(list<label_declaration>)\n\ntype import_description = {\n  jid_loc: Location.t,\n  jid_spec: importSpec,\n  jid_scope: scope,\n  jid_attributes: Parsetree.attributes,\n}\n\nlet decl = (~attrs, ~loc, ~name, ~alias, ~typ) => {\n  jld_loc: loc,\n  jld_attributes: attrs,\n  jld_name: name,\n  jld_alias: alias,\n  jld_type: typ,\n}\n\nlet importDescr = (~attrs, ~scope, ~importSpec, ~loc) => {\n  jid_loc: loc,\n  jid_spec: importSpec,\n  jid_scope: scope,\n  jid_attributes: attrs,\n}\n\nlet toParsetree = importDescr => {\n  let bsVal = (Location.mknoloc(\"val\"), Parsetree.PStr(list{}))\n  let attrs = switch importDescr.jid_scope {\n  | Global => list{bsVal}\n  /* @genType.import(\"./MyMath\"),\n   * @genType.import(/\"./MyMath\", \"default\"/) */\n  | Module(s) =>\n    let structure = list{\n      Parsetree.Pconst_string(s, None) |> Ast_helper.Exp.constant |> Ast_helper.Str.eval,\n    }\n    let genType = (Location.mknoloc(\"genType.import\"), Parsetree.PStr(structure))\n    list{genType}\n  | Scope(longident) =>\n    let structureItem = {\n      let expr = switch Longident.flatten(longident) |> List.map(s =>\n        Ast_helper.Exp.constant(Parsetree.Pconst_string(s, None))\n      ) {\n      | list{expr} => expr\n      | list{} as exprs | _ as exprs => exprs |> Ast_helper.Exp.tuple\n      }\n\n      Ast_helper.Str.eval(expr)\n    }\n\n    let bsScope = (Location.mknoloc(\"scope\"), Parsetree.PStr(list{structureItem}))\n    list{bsVal, bsScope}\n  }\n\n  let valueDescrs = switch importDescr.jid_spec {\n  | Default(decl) =>\n    let prim = list{decl.jld_name}\n    let allAttrs = List.concat(list{attrs, importDescr.jid_attributes}) |> List.map(attr =>\n      switch attr {\n      | (\n          {Location.txt: \"genType.import\"} as id,\n          Parsetree.PStr(list{{pstr_desc: Parsetree.Pstr_eval(moduleName, _)}}),\n        ) =>\n        let default = Parsetree.Pconst_string(\"default\", None) |> Ast_helper.Exp.constant\n\n        let structureItem = list{moduleName, default} |> Ast_helper.Exp.tuple |> Ast_helper.Str.eval\n\n        (id, Parsetree.PStr(list{structureItem}))\n      | attr => attr\n      }\n    )\n\n    list{\n      Ast_helper.Val.mk(\n        ~loc=importDescr.jid_loc,\n        ~prim,\n        ~attrs=allAttrs,\n        Location.mknoloc(decl.jld_alias),\n        decl.jld_type,\n      ) |> Ast_helper.Str.primitive,\n    }\n  | Spec(decls) => List.map(decl => {\n      let prim = list{decl.jld_name}\n      let allAttrs = List.concat(list{attrs, decl.jld_attributes})\n      Ast_helper.Val.mk(\n        ~loc=importDescr.jid_loc,\n        ~prim,\n        ~attrs=allAttrs,\n        Location.mknoloc(decl.jld_alias),\n        decl.jld_type,\n      ) |> Ast_helper.Str.primitive(~loc=decl.jld_loc)\n    }, decls)\n  }\n\n  let jsFfiAttr = (Location.mknoloc(\"ns.jsFfi\"), Parsetree.PStr(list{}))\n  Ast_helper.Mod.structure(~loc=importDescr.jid_loc, valueDescrs)\n  |> Ast_helper.Incl.mk(~attrs=list{jsFfiAttr}, ~loc=importDescr.jid_loc)\n  |> Ast_helper.Str.include_(~loc=importDescr.jid_loc)\n}\n\n"
  },
  {
    "path": "analysis/examples/larger-project/src/res_minibuffer.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as Sys from \"rescript/lib/es6/sys.js\";\nimport * as Bytes from \"rescript/lib/es6/bytes.js\";\nimport * as Caml_bytes from \"rescript/lib/es6/caml_bytes.js\";\n\nfunction create(n) {\n  var n$1 = n < 1 ? 1 : n;\n  var s = Caml_bytes.caml_create_bytes(n$1);\n  return {\n          buffer: s,\n          position: 0,\n          length: n$1\n        };\n}\n\nfunction contents(b) {\n  return Bytes.sub_string(b.buffer, 0, b.position);\n}\n\nfunction resize_internal(b, more) {\n  var len = b.length;\n  var new_len = len;\n  while((b.position + more | 0) > new_len) {\n    new_len = (new_len << 1);\n  };\n  if (new_len > Sys.max_string_length && (b.position + more | 0) <= Sys.max_string_length) {\n    new_len = Sys.max_string_length;\n  }\n  var new_buffer = Caml_bytes.caml_create_bytes(new_len);\n  Bytes.blit(b.buffer, 0, new_buffer, 0, b.position);\n  b.buffer = new_buffer;\n  b.length = new_len;\n  \n}\n\nfunction add_char(b, c) {\n  var pos = b.position;\n  if (pos >= b.length) {\n    resize_internal(b, 1);\n  }\n  b.buffer[pos] = c;\n  b.position = pos + 1 | 0;\n  \n}\n\nfunction add_string(b, s) {\n  var len = s.length;\n  var new_position = b.position + len | 0;\n  if (new_position > b.length) {\n    resize_internal(b, len);\n  }\n  Bytes.blit_string(s, 0, b.buffer, b.position, len);\n  b.position = new_position;\n  \n}\n\nfunction flush_newline(b) {\n  var position = b.position;\n  while(b.buffer[position - 1 | 0] === /* ' ' */32 && position >= 0) {\n    position = position - 1 | 0;\n  };\n  b.position = position;\n  return add_char(b, /* '\\n' */10);\n}\n\nexport {\n  create ,\n  contents ,\n  resize_internal ,\n  add_char ,\n  add_string ,\n  flush_newline ,\n  \n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/res_minibuffer.res",
    "content": "type t = {\n  mutable buffer: bytes,\n  mutable position: int,\n  mutable length: int,\n}\n\nlet create = n => {\n  let n = if n < 1 {\n    1\n  } else {\n    n\n  }\n  let s = (@doesNotRaise Bytes.create)(n)\n  {buffer: s, position: 0, length: n}\n}\n\nlet contents = b => (@doesNotRaise Bytes.sub_string)(b.buffer, 0, b.position)\n\n/* Can't be called directly, don't add to the interface */\nlet resize_internal = (b, more) => {\n  let len = b.length\n  let new_len = ref(len)\n  while b.position + more > new_len.contents {\n    new_len := 2 * new_len.contents\n  }\n  if new_len.contents > Sys.max_string_length {\n    if b.position + more <= Sys.max_string_length {\n      new_len := Sys.max_string_length\n    }\n  }\n  let new_buffer = (@doesNotRaise Bytes.create)(new_len.contents)\n  /* PR#6148: let's keep using [blit] rather than [unsafe_blit] in\n   this tricky function that is slow anyway. */\n\n  @doesNotRaise\n  Bytes.blit(b.buffer, 0, new_buffer, 0, b.position)\n  b.buffer = new_buffer\n  b.length = new_len.contents\n}\n\nlet add_char = (b, c) => {\n  let pos = b.position\n  if pos >= b.length {\n    resize_internal(b, 1)\n  }\n  Bytes.unsafe_set(b.buffer, pos, c)\n  b.position = pos + 1\n}\n\nlet add_string = (b, s) => {\n  let len = String.length(s)\n  let new_position = b.position + len\n  if new_position > b.length {\n    resize_internal(b, len)\n  }\n\n  @doesNotRaise\n  Bytes.blit_string(s, 0, b.buffer, b.position, len)\n  b.position = new_position\n}\n\n/* adds newline and trims all preceding whitespace */\nlet flush_newline = b => {\n  let position = ref(b.position)\n  while Bytes.unsafe_get(b.buffer, position.contents - 1) == ' ' && position.contents >= 0 {\n    position := position.contents - 1\n  }\n  b.position = position.contents\n  add_char(b, '\\n')\n}\n"
  },
  {
    "path": "analysis/examples/larger-project/src/res_parens.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as Caml_string from \"rescript/lib/es6/caml_string.js\";\nimport * as Res_parsetree_viewer from \"./res_parsetree_viewer.js\";\n\nfunction expr(expr$1) {\n  var match = Res_parsetree_viewer.processBracesAttr(expr$1);\n  var optBraces = match[0];\n  if (optBraces !== undefined) {\n    return /* Braced */{\n            _0: optBraces[0].loc\n          };\n  }\n  var match$1 = expr$1.pexp_desc;\n  if (typeof match$1 === \"number\") {\n    return /* Nothing */1;\n  }\n  if (match$1.TAG !== /* Pexp_constraint */19) {\n    return /* Nothing */1;\n  }\n  var tmp = match$1._0.pexp_desc;\n  if (typeof tmp === \"number\") {\n    return /* Parenthesized */0;\n  }\n  if (tmp.TAG !== /* Pexp_pack */32) {\n    return /* Parenthesized */0;\n  }\n  var tmp$1 = match$1._1.ptyp_desc;\n  if (typeof tmp$1 === \"number\" || tmp$1.TAG !== /* Ptyp_package */9) {\n    return /* Parenthesized */0;\n  } else {\n    return /* Nothing */1;\n  }\n}\n\nfunction callExpr(expr) {\n  var match = Res_parsetree_viewer.processBracesAttr(expr);\n  var optBraces = match[0];\n  if (optBraces !== undefined) {\n    return /* Braced */{\n            _0: optBraces[0].loc\n          };\n  }\n  var match$1 = expr.pexp_desc;\n  var match$2 = Res_parsetree_viewer.filterParsingAttrs(expr.pexp_attributes);\n  if (match$2 ? true : false) {\n    return /* Parenthesized */0;\n  }\n  if (Res_parsetree_viewer.isUnaryExpression(expr) || Res_parsetree_viewer.isBinaryExpression(expr)) {\n    return /* Parenthesized */0;\n  }\n  if (typeof match$1 === \"number\") {\n    return /* Nothing */1;\n  }\n  switch (match$1.TAG | 0) {\n    case /* Pexp_fun */4 :\n        if (Res_parsetree_viewer.isUnderscoreApplySugar(expr)) {\n          return /* Nothing */1;\n        } else {\n          return /* Parenthesized */0;\n        }\n    case /* Pexp_constraint */19 :\n        var tmp = match$1._0.pexp_desc;\n        if (typeof tmp === \"number\") {\n          return /* Parenthesized */0;\n        }\n        if (tmp.TAG !== /* Pexp_pack */32) {\n          return /* Parenthesized */0;\n        }\n        var tmp$1 = match$1._1.ptyp_desc;\n        if (typeof tmp$1 === \"number\" || tmp$1.TAG !== /* Ptyp_package */9) {\n          return /* Parenthesized */0;\n        } else {\n          return /* Nothing */1;\n        }\n    case /* Pexp_function */3 :\n    case /* Pexp_match */6 :\n    case /* Pexp_try */7 :\n    case /* Pexp_setfield */13 :\n    case /* Pexp_ifthenelse */15 :\n    case /* Pexp_while */17 :\n    case /* Pexp_for */18 :\n    case /* Pexp_assert */27 :\n    case /* Pexp_lazy */28 :\n    case /* Pexp_newtype */31 :\n        return /* Parenthesized */0;\n    default:\n      return /* Nothing */1;\n  }\n}\n\nfunction structureExpr(expr) {\n  var match = Res_parsetree_viewer.processBracesAttr(expr);\n  var optBraces = match[0];\n  if (optBraces !== undefined) {\n    return /* Braced */{\n            _0: optBraces[0].loc\n          };\n  }\n  var match$1 = expr.pexp_desc;\n  if (Res_parsetree_viewer.hasAttributes(expr.pexp_attributes) && !Res_parsetree_viewer.isJsxExpression(expr)) {\n    return /* Parenthesized */0;\n  }\n  if (typeof match$1 === \"number\") {\n    return /* Nothing */1;\n  }\n  if (match$1.TAG !== /* Pexp_constraint */19) {\n    return /* Nothing */1;\n  }\n  var tmp = match$1._0.pexp_desc;\n  if (typeof tmp === \"number\") {\n    return /* Parenthesized */0;\n  }\n  if (tmp.TAG !== /* Pexp_pack */32) {\n    return /* Parenthesized */0;\n  }\n  var tmp$1 = match$1._1.ptyp_desc;\n  if (typeof tmp$1 === \"number\" || tmp$1.TAG !== /* Ptyp_package */9) {\n    return /* Parenthesized */0;\n  } else {\n    return /* Nothing */1;\n  }\n}\n\nfunction unaryExprOperand(expr) {\n  var match = Res_parsetree_viewer.processBracesAttr(expr);\n  var optBraces = match[0];\n  if (optBraces !== undefined) {\n    return /* Braced */{\n            _0: optBraces[0].loc\n          };\n  }\n  var match$1 = expr.pexp_desc;\n  var match$2 = Res_parsetree_viewer.filterParsingAttrs(expr.pexp_attributes);\n  if (match$2 ? true : false) {\n    return /* Parenthesized */0;\n  }\n  if (Res_parsetree_viewer.isUnaryExpression(expr) || Res_parsetree_viewer.isBinaryExpression(expr)) {\n    return /* Parenthesized */0;\n  }\n  if (typeof match$1 === \"number\") {\n    return /* Nothing */1;\n  }\n  switch (match$1.TAG | 0) {\n    case /* Pexp_fun */4 :\n        if (Res_parsetree_viewer.isUnderscoreApplySugar(expr)) {\n          return /* Nothing */1;\n        } else {\n          return /* Parenthesized */0;\n        }\n    case /* Pexp_constraint */19 :\n        var tmp = match$1._0.pexp_desc;\n        if (typeof tmp === \"number\") {\n          return /* Parenthesized */0;\n        }\n        if (tmp.TAG !== /* Pexp_pack */32) {\n          return /* Parenthesized */0;\n        }\n        var tmp$1 = match$1._1.ptyp_desc;\n        if (typeof tmp$1 === \"number\" || tmp$1.TAG !== /* Ptyp_package */9) {\n          return /* Parenthesized */0;\n        } else {\n          return /* Nothing */1;\n        }\n    case /* Pexp_function */3 :\n    case /* Pexp_match */6 :\n    case /* Pexp_try */7 :\n    case /* Pexp_setfield */13 :\n    case /* Pexp_ifthenelse */15 :\n    case /* Pexp_while */17 :\n    case /* Pexp_for */18 :\n    case /* Pexp_assert */27 :\n    case /* Pexp_lazy */28 :\n    case /* Pexp_newtype */31 :\n    case /* Pexp_extension */34 :\n        return /* Parenthesized */0;\n    default:\n      return /* Nothing */1;\n  }\n}\n\nfunction binaryExprOperand(isLhs, expr) {\n  var match = Res_parsetree_viewer.processBracesAttr(expr);\n  var optBraces = match[0];\n  if (optBraces !== undefined) {\n    return /* Braced */{\n            _0: optBraces[0].loc\n          };\n  }\n  var match$1 = expr.pexp_desc;\n  var exit = 0;\n  if (typeof match$1 === \"number\") {\n    exit = 2;\n  } else {\n    switch (match$1.TAG | 0) {\n      case /* Pexp_fun */4 :\n          if (Res_parsetree_viewer.isUnderscoreApplySugar(expr)) {\n            return /* Nothing */1;\n          } else {\n            return /* Parenthesized */0;\n          }\n      case /* Pexp_constraint */19 :\n          var tmp = match$1._0.pexp_desc;\n          if (typeof tmp === \"number\") {\n            return /* Parenthesized */0;\n          }\n          if (tmp.TAG !== /* Pexp_pack */32) {\n            return /* Parenthesized */0;\n          }\n          var tmp$1 = match$1._1.ptyp_desc;\n          if (typeof tmp$1 === \"number\" || tmp$1.TAG !== /* Ptyp_package */9) {\n            return /* Parenthesized */0;\n          } else {\n            return /* Nothing */1;\n          }\n      case /* Pexp_function */3 :\n      case /* Pexp_newtype */31 :\n          return /* Parenthesized */0;\n      default:\n        exit = 2;\n    }\n  }\n  if (exit === 2) {\n    if (Res_parsetree_viewer.isBinaryExpression(expr)) {\n      return /* Parenthesized */0;\n    }\n    if (Res_parsetree_viewer.isTernaryExpr(expr)) {\n      return /* Parenthesized */0;\n    }\n    if (typeof match$1 !== \"number\") {\n      switch (match$1.TAG | 0) {\n        case /* Pexp_assert */27 :\n        case /* Pexp_lazy */28 :\n            if (isLhs) {\n              return /* Parenthesized */0;\n            }\n            break;\n        default:\n          \n      }\n    }\n    \n  }\n  if (Res_parsetree_viewer.hasPrintableAttributes(expr.pexp_attributes)) {\n    return /* Parenthesized */0;\n  } else {\n    return /* Nothing */1;\n  }\n}\n\nfunction subBinaryExprOperand(parentOperator, childOperator) {\n  var precParent = Res_parsetree_viewer.operatorPrecedence(parentOperator);\n  var precChild = Res_parsetree_viewer.operatorPrecedence(childOperator);\n  if (precParent > precChild || precParent === precChild && !Res_parsetree_viewer.flattenableOperators(parentOperator, childOperator)) {\n    return true;\n  } else if (parentOperator === \"||\") {\n    return childOperator === \"&&\";\n  } else {\n    return false;\n  }\n}\n\nfunction rhsBinaryExprOperand(parentOperator, rhs) {\n  var match = rhs.pexp_desc;\n  if (typeof match === \"number\") {\n    return false;\n  }\n  if (match.TAG !== /* Pexp_apply */5) {\n    return false;\n  }\n  var match$1 = match._0;\n  var match$2 = match$1.pexp_desc;\n  if (typeof match$2 === \"number\") {\n    return false;\n  }\n  if (match$2.TAG !== /* Pexp_ident */0) {\n    return false;\n  }\n  var match$3 = match$2._0;\n  var operator = match$3.txt;\n  switch (operator.TAG | 0) {\n    case /* Lident */0 :\n        if (match$1.pexp_attributes) {\n          return false;\n        }\n        var match$4 = match._1;\n        if (!match$4) {\n          return false;\n        }\n        var match$5 = match$4.tl;\n        if (!match$5) {\n          return false;\n        }\n        if (match$5.tl) {\n          return false;\n        }\n        var operator$1 = operator._0;\n        if (!(Res_parsetree_viewer.isBinaryOperator(operator$1) && !(match$3.loc.loc_ghost && operator$1 === \"^\"))) {\n          return false;\n        }\n        var precParent = Res_parsetree_viewer.operatorPrecedence(parentOperator);\n        var precChild = Res_parsetree_viewer.operatorPrecedence(operator$1);\n        return precParent === precChild;\n    case /* Ldot */1 :\n    case /* Lapply */2 :\n        return false;\n    \n  }\n}\n\nfunction flattenOperandRhs(parentOperator, rhs) {\n  var match = rhs.pexp_desc;\n  if (typeof match !== \"number\") {\n    switch (match.TAG | 0) {\n      case /* Pexp_fun */4 :\n          if (Res_parsetree_viewer.isUnderscoreApplySugar(rhs)) {\n            return false;\n          } else {\n            return true;\n          }\n      case /* Pexp_apply */5 :\n          var match$1 = match._0.pexp_desc;\n          if (typeof match$1 !== \"number\" && match$1.TAG === /* Pexp_ident */0) {\n            var match$2 = match$1._0;\n            var operator = match$2.txt;\n            switch (operator.TAG | 0) {\n              case /* Lident */0 :\n                  var match$3 = match._1;\n                  if (match$3) {\n                    var match$4 = match$3.tl;\n                    if (match$4 && !match$4.tl) {\n                      var operator$1 = operator._0;\n                      if (Res_parsetree_viewer.isBinaryOperator(operator$1) && !(match$2.loc.loc_ghost && operator$1 === \"^\")) {\n                        var precParent = Res_parsetree_viewer.operatorPrecedence(parentOperator);\n                        var precChild = Res_parsetree_viewer.operatorPrecedence(operator$1);\n                        if (precParent >= precChild) {\n                          return true;\n                        } else {\n                          return rhs.pexp_attributes !== /* [] */0;\n                        }\n                      }\n                      \n                    }\n                    \n                  }\n                  break;\n              case /* Ldot */1 :\n              case /* Lapply */2 :\n                  break;\n              \n            }\n          }\n          break;\n      case /* Pexp_constraint */19 :\n          var tmp = match._0.pexp_desc;\n          if (typeof tmp === \"number\") {\n            return true;\n          }\n          if (tmp.TAG !== /* Pexp_pack */32) {\n            return true;\n          }\n          var tmp$1 = match._1.ptyp_desc;\n          if (typeof tmp$1 === \"number\" || tmp$1.TAG !== /* Ptyp_package */9) {\n            return true;\n          } else {\n            return false;\n          }\n      case /* Pexp_setfield */13 :\n      case /* Pexp_newtype */31 :\n          return true;\n      default:\n        \n    }\n  }\n  if (Res_parsetree_viewer.isTernaryExpr(rhs)) {\n    return true;\n  } else {\n    return false;\n  }\n}\n\nfunction lazyOrAssertExprRhs(expr) {\n  var match = Res_parsetree_viewer.processBracesAttr(expr);\n  var optBraces = match[0];\n  if (optBraces !== undefined) {\n    return /* Braced */{\n            _0: optBraces[0].loc\n          };\n  }\n  var match$1 = expr.pexp_desc;\n  var match$2 = Res_parsetree_viewer.filterParsingAttrs(expr.pexp_attributes);\n  if (match$2 ? true : false) {\n    return /* Parenthesized */0;\n  }\n  if (Res_parsetree_viewer.isBinaryExpression(expr)) {\n    return /* Parenthesized */0;\n  }\n  if (typeof match$1 === \"number\") {\n    return /* Nothing */1;\n  }\n  switch (match$1.TAG | 0) {\n    case /* Pexp_fun */4 :\n        if (Res_parsetree_viewer.isUnderscoreApplySugar(expr)) {\n          return /* Nothing */1;\n        } else {\n          return /* Parenthesized */0;\n        }\n    case /* Pexp_constraint */19 :\n        var tmp = match$1._0.pexp_desc;\n        if (typeof tmp === \"number\") {\n          return /* Parenthesized */0;\n        }\n        if (tmp.TAG !== /* Pexp_pack */32) {\n          return /* Parenthesized */0;\n        }\n        var tmp$1 = match$1._1.ptyp_desc;\n        if (typeof tmp$1 === \"number\" || tmp$1.TAG !== /* Ptyp_package */9) {\n          return /* Parenthesized */0;\n        } else {\n          return /* Nothing */1;\n        }\n    case /* Pexp_function */3 :\n    case /* Pexp_match */6 :\n    case /* Pexp_try */7 :\n    case /* Pexp_setfield */13 :\n    case /* Pexp_ifthenelse */15 :\n    case /* Pexp_while */17 :\n    case /* Pexp_for */18 :\n    case /* Pexp_assert */27 :\n    case /* Pexp_lazy */28 :\n    case /* Pexp_newtype */31 :\n        return /* Parenthesized */0;\n    default:\n      return /* Nothing */1;\n  }\n}\n\nfunction isNegativeConstant(constant) {\n  var isNeg = function (txt) {\n    var len = txt.length;\n    if (len > 0) {\n      return Caml_string.get(txt, 0) === /* '-' */45;\n    } else {\n      return false;\n    }\n  };\n  switch (constant.TAG | 0) {\n    case /* Pconst_char */1 :\n    case /* Pconst_string */2 :\n        return false;\n    case /* Pconst_integer */0 :\n    case /* Pconst_float */3 :\n        if (isNeg(constant._0)) {\n          return true;\n        } else {\n          return false;\n        }\n    \n  }\n}\n\nfunction fieldExpr(expr) {\n  var match = Res_parsetree_viewer.processBracesAttr(expr);\n  var optBraces = match[0];\n  if (optBraces !== undefined) {\n    return /* Braced */{\n            _0: optBraces[0].loc\n          };\n  }\n  var c = expr.pexp_desc;\n  var match$1 = Res_parsetree_viewer.filterParsingAttrs(expr.pexp_attributes);\n  if (match$1 ? true : false) {\n    return /* Parenthesized */0;\n  }\n  if (Res_parsetree_viewer.isBinaryExpression(expr) || Res_parsetree_viewer.isUnaryExpression(expr)) {\n    return /* Parenthesized */0;\n  }\n  if (typeof c === \"number\") {\n    return /* Nothing */1;\n  }\n  switch (c.TAG | 0) {\n    case /* Pexp_constant */1 :\n        if (isNegativeConstant(c._0)) {\n          return /* Parenthesized */0;\n        } else {\n          return /* Nothing */1;\n        }\n    case /* Pexp_fun */4 :\n        if (Res_parsetree_viewer.isUnderscoreApplySugar(expr)) {\n          return /* Nothing */1;\n        } else {\n          return /* Parenthesized */0;\n        }\n    case /* Pexp_constraint */19 :\n        var tmp = c._0.pexp_desc;\n        if (typeof tmp === \"number\") {\n          return /* Parenthesized */0;\n        }\n        if (tmp.TAG !== /* Pexp_pack */32) {\n          return /* Parenthesized */0;\n        }\n        var tmp$1 = c._1.ptyp_desc;\n        if (typeof tmp$1 === \"number\" || tmp$1.TAG !== /* Ptyp_package */9) {\n          return /* Parenthesized */0;\n        } else {\n          return /* Nothing */1;\n        }\n    case /* Pexp_function */3 :\n    case /* Pexp_match */6 :\n    case /* Pexp_try */7 :\n    case /* Pexp_setfield */13 :\n    case /* Pexp_ifthenelse */15 :\n    case /* Pexp_while */17 :\n    case /* Pexp_for */18 :\n    case /* Pexp_assert */27 :\n    case /* Pexp_lazy */28 :\n    case /* Pexp_newtype */31 :\n    case /* Pexp_extension */34 :\n        return /* Parenthesized */0;\n    default:\n      return /* Nothing */1;\n  }\n}\n\nfunction setFieldExprRhs(expr) {\n  var match = Res_parsetree_viewer.processBracesAttr(expr);\n  var optBraces = match[0];\n  if (optBraces !== undefined) {\n    return /* Braced */{\n            _0: optBraces[0].loc\n          };\n  }\n  var match$1 = expr.pexp_desc;\n  if (typeof match$1 === \"number\") {\n    return /* Nothing */1;\n  }\n  if (match$1.TAG !== /* Pexp_constraint */19) {\n    return /* Nothing */1;\n  }\n  var tmp = match$1._0.pexp_desc;\n  if (typeof tmp === \"number\") {\n    return /* Parenthesized */0;\n  }\n  if (tmp.TAG !== /* Pexp_pack */32) {\n    return /* Parenthesized */0;\n  }\n  var tmp$1 = match$1._1.ptyp_desc;\n  if (typeof tmp$1 === \"number\" || tmp$1.TAG !== /* Ptyp_package */9) {\n    return /* Parenthesized */0;\n  } else {\n    return /* Nothing */1;\n  }\n}\n\nfunction ternaryOperand(expr) {\n  var match = Res_parsetree_viewer.processBracesAttr(expr);\n  var optBraces = match[0];\n  if (optBraces !== undefined) {\n    return /* Braced */{\n            _0: optBraces[0].loc\n          };\n  }\n  var match$1 = expr.pexp_desc;\n  if (typeof match$1 === \"number\") {\n    return /* Nothing */1;\n  }\n  switch (match$1.TAG | 0) {\n    case /* Pexp_constraint */19 :\n        var tmp = match$1._0.pexp_desc;\n        if (typeof tmp === \"number\") {\n          return /* Parenthesized */0;\n        }\n        if (tmp.TAG !== /* Pexp_pack */32) {\n          return /* Parenthesized */0;\n        }\n        var tmp$1 = match$1._1.ptyp_desc;\n        if (typeof tmp$1 === \"number\" || tmp$1.TAG !== /* Ptyp_package */9) {\n          return /* Parenthesized */0;\n        } else {\n          return /* Nothing */1;\n        }\n    case /* Pexp_fun */4 :\n    case /* Pexp_newtype */31 :\n        break;\n    default:\n      return /* Nothing */1;\n  }\n  var match$2 = Res_parsetree_viewer.funExpr(expr);\n  var match$3 = match$2[2].pexp_desc;\n  if (typeof match$3 === \"number\" || match$3.TAG !== /* Pexp_constraint */19) {\n    return /* Nothing */1;\n  } else {\n    return /* Parenthesized */0;\n  }\n}\n\nfunction startsWithMinus(txt) {\n  var len = txt.length;\n  if (len === 0) {\n    return false;\n  }\n  var s = Caml_string.get(txt, 0);\n  return s === /* '-' */45;\n}\n\nfunction jsxPropExpr(expr) {\n  var match = expr.pexp_desc;\n  var exit = 0;\n  if (typeof match === \"number\") {\n    exit = 1;\n  } else {\n    switch (match.TAG | 0) {\n      case /* Pexp_let */2 :\n      case /* Pexp_sequence */16 :\n      case /* Pexp_letmodule */25 :\n      case /* Pexp_letexception */26 :\n      case /* Pexp_open */33 :\n          return /* Nothing */1;\n      default:\n        exit = 1;\n    }\n  }\n  if (exit === 1) {\n    var match$1 = Res_parsetree_viewer.processBracesAttr(expr);\n    var optBraces = match$1[0];\n    if (optBraces !== undefined) {\n      return /* Braced */{\n              _0: optBraces[0].loc\n            };\n    }\n    var match$2 = expr.pexp_desc;\n    var exit$1 = 0;\n    if (typeof match$2 === \"number\") {\n      return /* Parenthesized */0;\n    }\n    switch (match$2.TAG | 0) {\n      case /* Pexp_constant */1 :\n          var match$3 = match$2._0;\n          var exit$2 = 0;\n          switch (match$3.TAG | 0) {\n            case /* Pconst_char */1 :\n            case /* Pconst_string */2 :\n                exit$1 = 2;\n                break;\n            case /* Pconst_integer */0 :\n            case /* Pconst_float */3 :\n                exit$2 = 3;\n                break;\n            \n          }\n          if (exit$2 === 3) {\n            if (expr.pexp_attributes) {\n              exit$1 = 2;\n            } else {\n              if (startsWithMinus(match$3._0)) {\n                return /* Parenthesized */0;\n              }\n              exit$1 = 2;\n            }\n          }\n          break;\n      case /* Pexp_constraint */19 :\n          var tmp = match$2._0.pexp_desc;\n          if (typeof tmp === \"number\") {\n            return /* Parenthesized */0;\n          }\n          if (tmp.TAG !== /* Pexp_pack */32) {\n            return /* Parenthesized */0;\n          }\n          var tmp$1 = match$2._1.ptyp_desc;\n          if (typeof tmp$1 === \"number\" || !(tmp$1.TAG === /* Ptyp_package */9 && !expr.pexp_attributes)) {\n            return /* Parenthesized */0;\n          } else {\n            return /* Nothing */1;\n          }\n      case /* Pexp_ident */0 :\n      case /* Pexp_let */2 :\n      case /* Pexp_tuple */8 :\n      case /* Pexp_construct */9 :\n      case /* Pexp_variant */10 :\n      case /* Pexp_record */11 :\n      case /* Pexp_field */12 :\n      case /* Pexp_array */14 :\n      case /* Pexp_sequence */16 :\n      case /* Pexp_letmodule */25 :\n      case /* Pexp_letexception */26 :\n      case /* Pexp_pack */32 :\n      case /* Pexp_open */33 :\n      case /* Pexp_extension */34 :\n          exit$1 = 2;\n          break;\n      default:\n        return /* Parenthesized */0;\n    }\n    if (exit$1 === 2) {\n      if (expr.pexp_attributes) {\n        return /* Parenthesized */0;\n      } else {\n        return /* Nothing */1;\n      }\n    }\n    \n  }\n  \n}\n\nfunction jsxChildExpr(expr) {\n  var match = expr.pexp_desc;\n  var exit = 0;\n  if (typeof match === \"number\") {\n    exit = 1;\n  } else {\n    switch (match.TAG | 0) {\n      case /* Pexp_let */2 :\n      case /* Pexp_sequence */16 :\n      case /* Pexp_letmodule */25 :\n      case /* Pexp_letexception */26 :\n      case /* Pexp_open */33 :\n          return /* Nothing */1;\n      default:\n        exit = 1;\n    }\n  }\n  if (exit === 1) {\n    var match$1 = Res_parsetree_viewer.processBracesAttr(expr);\n    var optBraces = match$1[0];\n    if (optBraces !== undefined) {\n      return /* Braced */{\n              _0: optBraces[0].loc\n            };\n    }\n    var match$2 = expr.pexp_desc;\n    var exit$1 = 0;\n    var exit$2 = 0;\n    if (typeof match$2 === \"number\") {\n      exit$1 = 2;\n    } else {\n      switch (match$2.TAG | 0) {\n        case /* Pexp_constant */1 :\n            var match$3 = match$2._0;\n            var exit$3 = 0;\n            switch (match$3.TAG | 0) {\n              case /* Pconst_char */1 :\n              case /* Pconst_string */2 :\n                  exit$2 = 3;\n                  break;\n              case /* Pconst_integer */0 :\n              case /* Pconst_float */3 :\n                  exit$3 = 4;\n                  break;\n              \n            }\n            if (exit$3 === 4) {\n              if (expr.pexp_attributes) {\n                exit$2 = 3;\n              } else {\n                if (startsWithMinus(match$3._0)) {\n                  return /* Parenthesized */0;\n                }\n                exit$2 = 3;\n              }\n            }\n            break;\n        case /* Pexp_constraint */19 :\n            var tmp = match$2._0.pexp_desc;\n            if (typeof tmp === \"number\" || tmp.TAG !== /* Pexp_pack */32) {\n              exit$1 = 2;\n            } else {\n              var tmp$1 = match$2._1.ptyp_desc;\n              if (typeof tmp$1 === \"number\" || tmp$1.TAG !== /* Ptyp_package */9) {\n                exit$1 = 2;\n              } else {\n                if (!expr.pexp_attributes) {\n                  return /* Nothing */1;\n                }\n                exit$1 = 2;\n              }\n            }\n            break;\n        case /* Pexp_ident */0 :\n        case /* Pexp_let */2 :\n        case /* Pexp_construct */9 :\n        case /* Pexp_variant */10 :\n        case /* Pexp_record */11 :\n        case /* Pexp_field */12 :\n        case /* Pexp_array */14 :\n        case /* Pexp_sequence */16 :\n        case /* Pexp_letmodule */25 :\n        case /* Pexp_letexception */26 :\n        case /* Pexp_pack */32 :\n        case /* Pexp_open */33 :\n        case /* Pexp_extension */34 :\n            exit$2 = 3;\n            break;\n        default:\n          exit$1 = 2;\n      }\n    }\n    if (exit$2 === 3) {\n      if (!expr.pexp_attributes) {\n        return /* Nothing */1;\n      }\n      exit$1 = 2;\n    }\n    if (exit$1 === 2) {\n      if (Res_parsetree_viewer.isJsxExpression(expr)) {\n        return /* Nothing */1;\n      } else {\n        return /* Parenthesized */0;\n      }\n    }\n    \n  }\n  \n}\n\nfunction binaryExpr(expr) {\n  var match = Res_parsetree_viewer.processBracesAttr(expr);\n  var optBraces = match[0];\n  if (optBraces !== undefined) {\n    return /* Braced */{\n            _0: optBraces[0].loc\n          };\n  } else if (expr.pexp_attributes && Res_parsetree_viewer.isBinaryExpression(expr)) {\n    return /* Parenthesized */0;\n  } else {\n    return /* Nothing */1;\n  }\n}\n\nfunction modTypeFunctorReturn(modType) {\n  if (modType.pmty_desc.TAG === /* Pmty_with */3) {\n    return true;\n  } else {\n    return false;\n  }\n}\n\nfunction modTypeWithOperand(modType) {\n  switch (modType.pmty_desc.TAG | 0) {\n    case /* Pmty_functor */2 :\n    case /* Pmty_with */3 :\n        return true;\n    default:\n      return false;\n  }\n}\n\nfunction modExprFunctorConstraint(modType) {\n  switch (modType.pmty_desc.TAG | 0) {\n    case /* Pmty_functor */2 :\n    case /* Pmty_with */3 :\n        return true;\n    default:\n      return false;\n  }\n}\n\nfunction bracedExpr(expr) {\n  var match = expr.pexp_desc;\n  if (typeof match === \"number\") {\n    return false;\n  }\n  if (match.TAG !== /* Pexp_constraint */19) {\n    return false;\n  }\n  var tmp = match._0.pexp_desc;\n  if (typeof tmp === \"number\") {\n    return true;\n  }\n  if (tmp.TAG !== /* Pexp_pack */32) {\n    return true;\n  }\n  var tmp$1 = match._1.ptyp_desc;\n  if (typeof tmp$1 === \"number\" || tmp$1.TAG !== /* Ptyp_package */9) {\n    return true;\n  } else {\n    return false;\n  }\n}\n\nfunction includeModExpr(modExpr) {\n  var match = modExpr.pmod_desc;\n  if (match.TAG === /* Pmod_constraint */4) {\n    return true;\n  } else {\n    return false;\n  }\n}\n\nfunction arrowReturnTypExpr(typExpr) {\n  var match = typExpr.ptyp_desc;\n  if (typeof match === \"number\" || match.TAG !== /* Ptyp_arrow */1) {\n    return false;\n  } else {\n    return true;\n  }\n}\n\nfunction patternRecordRowRhs(pattern) {\n  var match = pattern.ppat_desc;\n  if (typeof match === \"number\") {\n    return false;\n  }\n  if (match.TAG !== /* Ppat_constraint */10) {\n    return false;\n  }\n  var tmp = match._0.ppat_desc;\n  if (typeof tmp === \"number\") {\n    return true;\n  }\n  if (tmp.TAG !== /* Ppat_unpack */13) {\n    return true;\n  }\n  var tmp$1 = match._1.ptyp_desc;\n  if (typeof tmp$1 === \"number\" || tmp$1.TAG !== /* Ptyp_package */9) {\n    return true;\n  } else {\n    return false;\n  }\n}\n\nvar ParsetreeViewer;\n\nexport {\n  ParsetreeViewer ,\n  expr ,\n  callExpr ,\n  structureExpr ,\n  unaryExprOperand ,\n  binaryExprOperand ,\n  subBinaryExprOperand ,\n  rhsBinaryExprOperand ,\n  flattenOperandRhs ,\n  lazyOrAssertExprRhs ,\n  isNegativeConstant ,\n  fieldExpr ,\n  setFieldExprRhs ,\n  ternaryOperand ,\n  startsWithMinus ,\n  jsxPropExpr ,\n  jsxChildExpr ,\n  binaryExpr ,\n  modTypeFunctorReturn ,\n  modTypeWithOperand ,\n  modExprFunctorConstraint ,\n  bracedExpr ,\n  includeModExpr ,\n  arrowReturnTypExpr ,\n  patternRecordRowRhs ,\n  \n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/res_parens.res",
    "content": "module ParsetreeViewer = Res_parsetree_viewer\ntype kind = Parenthesized | Braced(Location.t) | Nothing\n\nlet expr = expr => {\n  let (optBraces, _) = ParsetreeViewer.processBracesAttr(expr)\n  switch optBraces {\n  | Some({Location.loc: bracesLoc}, _) => Braced(bracesLoc)\n  | _ =>\n    switch expr {\n    | {\n        Parsetree.pexp_desc: Pexp_constraint(\n          {pexp_desc: Pexp_pack(_)},\n          {ptyp_desc: Ptyp_package(_)},\n        ),\n      } =>\n      Nothing\n    | {pexp_desc: Pexp_constraint(_)} => Parenthesized\n    | _ => Nothing\n    }\n  }\n}\n\nlet callExpr = expr => {\n  let (optBraces, _) = ParsetreeViewer.processBracesAttr(expr)\n  switch optBraces {\n  | Some({Location.loc: bracesLoc}, _) => Braced(bracesLoc)\n  | _ =>\n    switch expr {\n    | {Parsetree.pexp_attributes: attrs}\n      if switch ParsetreeViewer.filterParsingAttrs(attrs) {\n      | list{_, ..._} => true\n      | list{} => false\n      } =>\n      Parenthesized\n    | _ if ParsetreeViewer.isUnaryExpression(expr) || ParsetreeViewer.isBinaryExpression(expr) =>\n      Parenthesized\n    | {\n        Parsetree.pexp_desc: Pexp_constraint(\n          {pexp_desc: Pexp_pack(_)},\n          {ptyp_desc: Ptyp_package(_)},\n        ),\n      } =>\n      Nothing\n    | {pexp_desc: Pexp_fun(_)} if ParsetreeViewer.isUnderscoreApplySugar(expr) => Nothing\n    | {\n        pexp_desc:\n          Pexp_lazy(_)\n          | Pexp_assert(_)\n          | Pexp_fun(_)\n          | Pexp_newtype(_)\n          | Pexp_function(_)\n          | Pexp_constraint(_)\n          | Pexp_setfield(_)\n          | Pexp_match(_)\n          | Pexp_try(_)\n          | Pexp_while(_)\n          | Pexp_for(_)\n          | Pexp_ifthenelse(_),\n      } =>\n      Parenthesized\n    | _ => Nothing\n    }\n  }\n}\n\nlet structureExpr = expr => {\n  let (optBraces, _) = ParsetreeViewer.processBracesAttr(expr)\n  switch optBraces {\n  | Some({Location.loc: bracesLoc}, _) => Braced(bracesLoc)\n  | None =>\n    switch expr {\n    | _\n      if ParsetreeViewer.hasAttributes(expr.pexp_attributes) &&\n      !ParsetreeViewer.isJsxExpression(expr) =>\n      Parenthesized\n    | {\n        Parsetree.pexp_desc: Pexp_constraint(\n          {pexp_desc: Pexp_pack(_)},\n          {ptyp_desc: Ptyp_package(_)},\n        ),\n      } =>\n      Nothing\n    | {pexp_desc: Pexp_constraint(_)} => Parenthesized\n    | _ => Nothing\n    }\n  }\n}\n\nlet unaryExprOperand = expr => {\n  let (optBraces, _) = ParsetreeViewer.processBracesAttr(expr)\n  switch optBraces {\n  | Some({Location.loc: bracesLoc}, _) => Braced(bracesLoc)\n  | None =>\n    switch expr {\n    | {Parsetree.pexp_attributes: attrs}\n      if switch ParsetreeViewer.filterParsingAttrs(attrs) {\n      | list{_, ..._} => true\n      | list{} => false\n      } =>\n      Parenthesized\n    | expr if ParsetreeViewer.isUnaryExpression(expr) || ParsetreeViewer.isBinaryExpression(expr) =>\n      Parenthesized\n    | {pexp_desc: Pexp_constraint({pexp_desc: Pexp_pack(_)}, {ptyp_desc: Ptyp_package(_)})} =>\n      Nothing\n    | {pexp_desc: Pexp_fun(_)} if ParsetreeViewer.isUnderscoreApplySugar(expr) => Nothing\n    | {\n        pexp_desc:\n          Pexp_lazy(_)\n          | Pexp_assert(_)\n          | Pexp_fun(_)\n          | Pexp_newtype(_)\n          | Pexp_function(_)\n          | Pexp_constraint(_)\n          | Pexp_setfield(_)\n          | Pexp_extension(_)\n          | Pexp_match(_)\n          | Pexp_try(_)\n          | Pexp_while(_)\n          | Pexp_for(_)\n          | Pexp_ifthenelse(_),\n      } =>\n      Parenthesized\n    | _ => Nothing\n    }\n  }\n}\n\nlet binaryExprOperand = (~isLhs, expr) => {\n  let (optBraces, _) = ParsetreeViewer.processBracesAttr(expr)\n  switch optBraces {\n  | Some({Location.loc: bracesLoc}, _) => Braced(bracesLoc)\n  | None =>\n    switch expr {\n    | {\n        Parsetree.pexp_desc: Pexp_constraint(\n          {pexp_desc: Pexp_pack(_)},\n          {ptyp_desc: Ptyp_package(_)},\n        ),\n      } =>\n      Nothing\n    | {pexp_desc: Pexp_fun(_)} if ParsetreeViewer.isUnderscoreApplySugar(expr) => Nothing\n    | {pexp_desc: Pexp_constraint(_) | Pexp_fun(_) | Pexp_function(_) | Pexp_newtype(_)} =>\n      Parenthesized\n    | expr if ParsetreeViewer.isBinaryExpression(expr) => Parenthesized\n    | expr if ParsetreeViewer.isTernaryExpr(expr) => Parenthesized\n    | {\n        pexp_desc:\n          Pexp_lazy(_)\n          | Pexp_assert(_),\n      } if isLhs =>\n      Parenthesized\n    | {Parsetree.pexp_attributes: attrs} =>\n      if ParsetreeViewer.hasPrintableAttributes(attrs) {\n        Parenthesized\n      } else {\n        Nothing\n      }\n    }\n  }\n}\n\nlet subBinaryExprOperand = (parentOperator, childOperator) => {\n  let precParent = ParsetreeViewer.operatorPrecedence(parentOperator)\n  let precChild = ParsetreeViewer.operatorPrecedence(childOperator)\n  precParent > precChild ||\n    ((precParent === precChild &&\n      !ParsetreeViewer.flattenableOperators(parentOperator, childOperator)) ||\n    /* a && b || c, add parens to (a && b) for readability, who knows the difference by heart… */\n    parentOperator == \"||\" && childOperator == \"&&\")\n}\n\nlet rhsBinaryExprOperand = (parentOperator, rhs) =>\n  switch rhs.Parsetree.pexp_desc {\n  | Parsetree.Pexp_apply(\n      {\n        pexp_attributes: list{},\n        pexp_desc: Pexp_ident({txt: Longident.Lident(operator), loc: operatorLoc}),\n      },\n      list{(_, _left), (_, _right)},\n    )\n    if ParsetreeViewer.isBinaryOperator(operator) && !(operatorLoc.loc_ghost && operator == \"^\") =>\n    let precParent = ParsetreeViewer.operatorPrecedence(parentOperator)\n    let precChild = ParsetreeViewer.operatorPrecedence(operator)\n    precParent === precChild\n  | _ => false\n  }\n\nlet flattenOperandRhs = (parentOperator, rhs) =>\n  switch rhs.Parsetree.pexp_desc {\n  | Parsetree.Pexp_apply(\n      {pexp_desc: Pexp_ident({txt: Longident.Lident(operator), loc: operatorLoc})},\n      list{(_, _left), (_, _right)},\n    )\n    if ParsetreeViewer.isBinaryOperator(operator) && !(operatorLoc.loc_ghost && operator == \"^\") =>\n    let precParent = ParsetreeViewer.operatorPrecedence(parentOperator)\n    let precChild = ParsetreeViewer.operatorPrecedence(operator)\n    precParent >= precChild || rhs.pexp_attributes != list{}\n  | Pexp_constraint({pexp_desc: Pexp_pack(_)}, {ptyp_desc: Ptyp_package(_)}) => false\n  | Pexp_fun(_) if ParsetreeViewer.isUnderscoreApplySugar(rhs) => false\n  | Pexp_fun(_)\n  | Pexp_newtype(_)\n  | Pexp_setfield(_)\n  | Pexp_constraint(_) => true\n  | _ if ParsetreeViewer.isTernaryExpr(rhs) => true\n  | _ => false\n  }\n\nlet lazyOrAssertExprRhs = expr => {\n  let (optBraces, _) = ParsetreeViewer.processBracesAttr(expr)\n  switch optBraces {\n  | Some({Location.loc: bracesLoc}, _) => Braced(bracesLoc)\n  | None =>\n    switch expr {\n    | {Parsetree.pexp_attributes: attrs}\n      if switch ParsetreeViewer.filterParsingAttrs(attrs) {\n      | list{_, ..._} => true\n      | list{} => false\n      } =>\n      Parenthesized\n    | expr if ParsetreeViewer.isBinaryExpression(expr) => Parenthesized\n    | {pexp_desc: Pexp_constraint({pexp_desc: Pexp_pack(_)}, {ptyp_desc: Ptyp_package(_)})} =>\n      Nothing\n    | {pexp_desc: Pexp_fun(_)} if ParsetreeViewer.isUnderscoreApplySugar(expr) => Nothing\n    | {\n        pexp_desc:\n          Pexp_lazy(_)\n          | Pexp_assert(_)\n          | Pexp_fun(_)\n          | Pexp_newtype(_)\n          | Pexp_function(_)\n          | Pexp_constraint(_)\n          | Pexp_setfield(_)\n          | Pexp_match(_)\n          | Pexp_try(_)\n          | Pexp_while(_)\n          | Pexp_for(_)\n          | Pexp_ifthenelse(_),\n      } =>\n      Parenthesized\n    | _ => Nothing\n    }\n  }\n}\n\nlet isNegativeConstant = constant => {\n  let isNeg = txt => {\n    let len = String.length(txt)\n    len > 0 && (@doesNotRaise String.get)(txt, 0) == '-'\n  }\n\n  switch constant {\n  | Parsetree.Pconst_integer(i, _) | Pconst_float(i, _) if isNeg(i) => true\n  | _ => false\n  }\n}\n\nlet fieldExpr = expr => {\n  let (optBraces, _) = ParsetreeViewer.processBracesAttr(expr)\n  switch optBraces {\n  | Some({Location.loc: bracesLoc}, _) => Braced(bracesLoc)\n  | None =>\n    switch expr {\n    | {Parsetree.pexp_attributes: attrs}\n      if switch ParsetreeViewer.filterParsingAttrs(attrs) {\n      | list{_, ..._} => true\n      | list{} => false\n      } =>\n      Parenthesized\n    | expr if ParsetreeViewer.isBinaryExpression(expr) || ParsetreeViewer.isUnaryExpression(expr) =>\n      Parenthesized\n    | {pexp_desc: Pexp_constraint({pexp_desc: Pexp_pack(_)}, {ptyp_desc: Ptyp_package(_)})} =>\n      Nothing\n    | {pexp_desc: Pexp_constant(c)} if isNegativeConstant(c) => Parenthesized\n    | {pexp_desc: Pexp_fun(_)} if ParsetreeViewer.isUnderscoreApplySugar(expr) => Nothing\n    | {\n        pexp_desc:\n          Pexp_lazy(_)\n          | Pexp_assert(_)\n          | Pexp_extension(_)\n          | Pexp_fun(_)\n          | Pexp_newtype(_)\n          | Pexp_function(_)\n          | Pexp_constraint(_)\n          | Pexp_setfield(_)\n          | Pexp_match(_)\n          | Pexp_try(_)\n          | Pexp_while(_)\n          | Pexp_for(_)\n          | Pexp_ifthenelse(_),\n      } =>\n      Parenthesized\n    | _ => Nothing\n    }\n  }\n}\n\nlet setFieldExprRhs = expr => {\n  let (optBraces, _) = ParsetreeViewer.processBracesAttr(expr)\n  switch optBraces {\n  | Some({Location.loc: bracesLoc}, _) => Braced(bracesLoc)\n  | None =>\n    switch expr {\n    | {\n        Parsetree.pexp_desc: Pexp_constraint(\n          {pexp_desc: Pexp_pack(_)},\n          {ptyp_desc: Ptyp_package(_)},\n        ),\n      } =>\n      Nothing\n    | {pexp_desc: Pexp_constraint(_)} => Parenthesized\n    | _ => Nothing\n    }\n  }\n}\n\nlet ternaryOperand = expr => {\n  let (optBraces, _) = ParsetreeViewer.processBracesAttr(expr)\n  switch optBraces {\n  | Some({Location.loc: bracesLoc}, _) => Braced(bracesLoc)\n  | None =>\n    switch expr {\n    | {\n        Parsetree.pexp_desc: Pexp_constraint(\n          {pexp_desc: Pexp_pack(_)},\n          {ptyp_desc: Ptyp_package(_)},\n        ),\n      } =>\n      Nothing\n    | {pexp_desc: Pexp_constraint(_)} => Parenthesized\n    | {pexp_desc: Pexp_fun(_) | Pexp_newtype(_)} =>\n      let (_attrsOnArrow, _parameters, returnExpr) = ParsetreeViewer.funExpr(expr)\n      switch returnExpr.pexp_desc {\n      | Pexp_constraint(_) => Parenthesized\n      | _ => Nothing\n      }\n    | _ => Nothing\n    }\n  }\n}\n\nlet startsWithMinus = txt => {\n  let len = String.length(txt)\n  if len === 0 {\n    false\n  } else {\n    let s = (@doesNotRaise String.get)(txt, 0)\n    s == '-'\n  }\n}\n\nlet jsxPropExpr = expr =>\n  switch expr.Parsetree.pexp_desc {\n  | Parsetree.Pexp_let(_)\n  | Pexp_sequence(_)\n  | Pexp_letexception(_)\n  | Pexp_letmodule(_)\n  | Pexp_open(_) =>\n    Nothing\n  | _ =>\n    let (optBraces, _) = ParsetreeViewer.processBracesAttr(expr)\n    switch optBraces {\n    | Some({Location.loc: bracesLoc}, _) => Braced(bracesLoc)\n    | None =>\n      switch expr {\n      | {\n          Parsetree.pexp_desc: Pexp_constant(Pconst_integer(x, _) | Pconst_float(x, _)),\n          pexp_attributes: list{},\n        } if startsWithMinus(x) =>\n        Parenthesized\n      | {\n          Parsetree.pexp_desc:\n            Pexp_ident(_)\n            | Pexp_constant(_)\n            | Pexp_field(_)\n            | Pexp_construct(_)\n            | Pexp_variant(_)\n            | Pexp_array(_)\n            | Pexp_pack(_)\n            | Pexp_record(_)\n            | Pexp_extension(_)\n            | Pexp_letmodule(_)\n            | Pexp_letexception(_)\n            | Pexp_open(_)\n            | Pexp_sequence(_)\n            | Pexp_let(_)\n            | Pexp_tuple(_),\n          pexp_attributes: list{},\n        } =>\n        Nothing\n      | {\n          Parsetree.pexp_desc: Pexp_constraint(\n            {pexp_desc: Pexp_pack(_)},\n            {ptyp_desc: Ptyp_package(_)},\n          ),\n          pexp_attributes: list{},\n        } =>\n        Nothing\n      | _ => Parenthesized\n      }\n    }\n  }\n\nlet jsxChildExpr = expr =>\n  switch expr.Parsetree.pexp_desc {\n  | Parsetree.Pexp_let(_)\n  | Pexp_sequence(_)\n  | Pexp_letexception(_)\n  | Pexp_letmodule(_)\n  | Pexp_open(_) =>\n    Nothing\n  | _ =>\n    let (optBraces, _) = ParsetreeViewer.processBracesAttr(expr)\n    switch optBraces {\n    | Some({Location.loc: bracesLoc}, _) => Braced(bracesLoc)\n    | _ =>\n      switch expr {\n      | {\n          Parsetree.pexp_desc: Pexp_constant(Pconst_integer(x, _) | Pconst_float(x, _)),\n          pexp_attributes: list{},\n        } if startsWithMinus(x) =>\n        Parenthesized\n      | {\n          Parsetree.pexp_desc:\n            Pexp_ident(_)\n            | Pexp_constant(_)\n            | Pexp_field(_)\n            | Pexp_construct(_)\n            | Pexp_variant(_)\n            | Pexp_array(_)\n            | Pexp_pack(_)\n            | Pexp_record(_)\n            | Pexp_extension(_)\n            | Pexp_letmodule(_)\n            | Pexp_letexception(_)\n            | Pexp_open(_)\n            | Pexp_sequence(_)\n            | Pexp_let(_),\n          pexp_attributes: list{},\n        } =>\n        Nothing\n      | {\n          Parsetree.pexp_desc: Pexp_constraint(\n            {pexp_desc: Pexp_pack(_)},\n            {ptyp_desc: Ptyp_package(_)},\n          ),\n          pexp_attributes: list{},\n        } =>\n        Nothing\n      | expr if ParsetreeViewer.isJsxExpression(expr) => Nothing\n      | _ => Parenthesized\n      }\n    }\n  }\n\nlet binaryExpr = expr => {\n  let (optBraces, _) = ParsetreeViewer.processBracesAttr(expr)\n  switch optBraces {\n  | Some({Location.loc: bracesLoc}, _) => Braced(bracesLoc)\n  | None =>\n    switch expr {\n    | {Parsetree.pexp_attributes: list{_, ..._}} as expr\n      if ParsetreeViewer.isBinaryExpression(expr) =>\n      Parenthesized\n    | _ => Nothing\n    }\n  }\n}\n\nlet modTypeFunctorReturn = modType =>\n  switch modType {\n  | {Parsetree.pmty_desc: Pmty_with(_)} => true\n  | _ => false\n  }\n\n/* Add parens for readability:\n       module type Functor = SetLike => Set with type t = A.t\n     This is actually:\n       module type Functor = (SetLike => Set) with type t = A.t\n */\nlet modTypeWithOperand = modType =>\n  switch modType {\n  | {Parsetree.pmty_desc: Pmty_functor(_) | Pmty_with(_)} => true\n  | _ => false\n  }\n\nlet modExprFunctorConstraint = modType =>\n  switch modType {\n  | {Parsetree.pmty_desc: Pmty_functor(_) | Pmty_with(_)} => true\n  | _ => false\n  }\n\nlet bracedExpr = expr =>\n  switch expr.Parsetree.pexp_desc {\n  | Pexp_constraint({pexp_desc: Pexp_pack(_)}, {ptyp_desc: Ptyp_package(_)}) => false\n  | Pexp_constraint(_) => true\n  | _ => false\n  }\n\nlet includeModExpr = modExpr =>\n  switch modExpr.Parsetree.pmod_desc {\n  | Parsetree.Pmod_constraint(_) => true\n  | _ => false\n  }\n\nlet arrowReturnTypExpr = typExpr =>\n  switch typExpr.Parsetree.ptyp_desc {\n  | Parsetree.Ptyp_arrow(_) => true\n  | _ => false\n  }\n\nlet patternRecordRowRhs = (pattern: Parsetree.pattern) =>\n  switch pattern.ppat_desc {\n  | Ppat_constraint({ppat_desc: Ppat_unpack(_)}, {ptyp_desc: Ptyp_package(_)}) => false\n  | Ppat_constraint(_) => true\n  | _ => false\n  }\n\n"
  },
  {
    "path": "analysis/examples/larger-project/src/res_parser.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as Curry from \"rescript/lib/es6/curry.js\";\nimport * as Lexing from \"rescript/lib/es6/lexing.js\";\nimport * as Caml_obj from \"rescript/lib/es6/caml_obj.js\";\nimport * as Caml_option from \"rescript/lib/es6/caml_option.js\";\nimport * as Res_comment from \"./res_comment.js\";\nimport * as Res_scanner from \"./res_scanner.js\";\nimport * as Res_diagnostics from \"./res_diagnostics.js\";\n\nfunction err(startPos, endPos, p, error) {\n  var match = p.regions;\n  if (!match) {\n    return ;\n  }\n  var region = match.hd;\n  var match$1 = region.contents;\n  if (match$1) {\n    return ;\n  }\n  var d = Res_diagnostics.make(startPos !== undefined ? startPos : p.startPos, endPos !== undefined ? endPos : p.endPos, error);\n  p.diagnostics = {\n    hd: d,\n    tl: p.diagnostics\n  };\n  region.contents = /* Silent */1;\n  \n}\n\nfunction beginRegion(p) {\n  p.regions = {\n    hd: {\n      contents: /* Report */0\n    },\n    tl: p.regions\n  };\n  \n}\n\nfunction endRegion(p) {\n  var match = p.regions;\n  if (match) {\n    p.regions = match.tl;\n    return ;\n  }\n  \n}\n\nfunction next(_prevEndPos, p) {\n  while(true) {\n    var prevEndPos = _prevEndPos;\n    if (p.token === /* Eof */26) {\n      throw {\n            RE_EXN_ID: \"Assert_failure\",\n            _1: [\n              \"res_parser.res\",\n              59,\n              4\n            ],\n            Error: new Error()\n          };\n    }\n    var prevEndPos$1 = prevEndPos !== undefined ? prevEndPos : p.endPos;\n    var match = Res_scanner.scan(p.scanner);\n    var token = match[2];\n    var endPos = match[1];\n    if (typeof token !== \"number\" && token.TAG === /* Comment */6) {\n      var c = token._0;\n      Res_comment.setPrevTokEndPos(c, p.endPos);\n      p.comments = {\n        hd: c,\n        tl: p.comments\n      };\n      p.prevEndPos = p.endPos;\n      p.endPos = endPos;\n      _prevEndPos = prevEndPos$1;\n      continue ;\n    }\n    p.token = token;\n    p.prevEndPos = prevEndPos$1;\n    p.startPos = match[0];\n    p.endPos = endPos;\n    return ;\n  };\n}\n\nfunction nextUnsafe(p) {\n  if (p.token !== /* Eof */26) {\n    return next(undefined, p);\n  }\n  \n}\n\nfunction nextTemplateLiteralToken(p) {\n  var match = Res_scanner.scanTemplateLiteralToken(p.scanner);\n  p.token = match[2];\n  p.prevEndPos = p.endPos;\n  p.startPos = match[0];\n  p.endPos = match[1];\n  \n}\n\nfunction checkProgress(prevEndPos, result, p) {\n  if (p.endPos === prevEndPos) {\n    return ;\n  } else {\n    return Caml_option.some(result);\n  }\n}\n\nfunction make(modeOpt, src, filename) {\n  var mode = modeOpt !== undefined ? modeOpt : /* ParseForTypeChecker */0;\n  var scanner = Res_scanner.make(filename, src);\n  var parserState = {\n    mode: mode,\n    scanner: scanner,\n    token: /* Semicolon */8,\n    startPos: Lexing.dummy_pos,\n    endPos: Lexing.dummy_pos,\n    prevEndPos: Lexing.dummy_pos,\n    breadcrumbs: /* [] */0,\n    errors: /* [] */0,\n    diagnostics: /* [] */0,\n    comments: /* [] */0,\n    regions: {\n      hd: {\n        contents: /* Report */0\n      },\n      tl: /* [] */0\n    }\n  };\n  parserState.scanner.err = (function (startPos, endPos, error) {\n      var diagnostic = Res_diagnostics.make(startPos, endPos, error);\n      parserState.diagnostics = {\n        hd: diagnostic,\n        tl: parserState.diagnostics\n      };\n      \n    });\n  next(undefined, parserState);\n  return parserState;\n}\n\nfunction leaveBreadcrumb(p, circumstance) {\n  var crumb_1 = p.startPos;\n  var crumb = [\n    circumstance,\n    crumb_1\n  ];\n  p.breadcrumbs = {\n    hd: crumb,\n    tl: p.breadcrumbs\n  };\n  \n}\n\nfunction eatBreadcrumb(p) {\n  var match = p.breadcrumbs;\n  if (match) {\n    p.breadcrumbs = match.tl;\n    return ;\n  }\n  \n}\n\nfunction optional(p, token) {\n  if (Caml_obj.caml_equal(p.token, token)) {\n    next(undefined, p);\n    return true;\n  } else {\n    return false;\n  }\n}\n\nfunction expect(grammar, token, p) {\n  if (Caml_obj.caml_equal(p.token, token)) {\n    return next(undefined, p);\n  }\n  var error = Res_diagnostics.expected(grammar, p.prevEndPos, token);\n  return err(p.prevEndPos, undefined, p, error);\n}\n\nfunction lookahead(p, callback) {\n  var err = p.scanner.err;\n  var ch = p.scanner.ch;\n  var offset = p.scanner.offset;\n  var lineOffset = p.scanner.lineOffset;\n  var lnum = p.scanner.lnum;\n  var mode = p.scanner.mode;\n  var token = p.token;\n  var startPos = p.startPos;\n  var endPos = p.endPos;\n  var prevEndPos = p.prevEndPos;\n  var breadcrumbs = p.breadcrumbs;\n  var errors = p.errors;\n  var diagnostics = p.diagnostics;\n  var comments = p.comments;\n  var res = Curry._1(callback, p);\n  p.scanner.err = err;\n  p.scanner.ch = ch;\n  p.scanner.offset = offset;\n  p.scanner.lineOffset = lineOffset;\n  p.scanner.lnum = lnum;\n  p.scanner.mode = mode;\n  p.token = token;\n  p.startPos = startPos;\n  p.endPos = endPos;\n  p.prevEndPos = prevEndPos;\n  p.breadcrumbs = breadcrumbs;\n  p.errors = errors;\n  p.diagnostics = diagnostics;\n  p.comments = comments;\n  return res;\n}\n\nvar Scanner;\n\nvar Diagnostics;\n\nvar Token;\n\nvar Grammar;\n\nvar Reporting;\n\nvar $$Comment;\n\nexport {\n  Scanner ,\n  Diagnostics ,\n  Token ,\n  Grammar ,\n  Reporting ,\n  $$Comment ,\n  err ,\n  beginRegion ,\n  endRegion ,\n  next ,\n  nextUnsafe ,\n  nextTemplateLiteralToken ,\n  checkProgress ,\n  make ,\n  leaveBreadcrumb ,\n  eatBreadcrumb ,\n  optional ,\n  expect ,\n  lookahead ,\n  \n}\n/* Res_comment Not a pure module */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/res_parser.res",
    "content": "module Scanner = Res_scanner\nmodule Diagnostics = Res_diagnostics\nmodule Token = Res_token\nmodule Grammar = Res_grammar\nmodule Reporting = Res_reporting\n\nmodule Comment = Res_comment\n\ntype mode = ParseForTypeChecker | Default\n\ntype regionStatus = Report | Silent\n\ntype t = {\n  mode: mode,\n  mutable scanner: Scanner.t,\n  mutable token: Token.t,\n  mutable startPos: Lexing.position,\n  mutable endPos: Lexing.position,\n  mutable prevEndPos: Lexing.position,\n  mutable breadcrumbs: list<(Grammar.t, Lexing.position)>,\n  mutable errors: list<Reporting.parseError>,\n  mutable diagnostics: list<Diagnostics.t>,\n  mutable comments: list<Comment.t>,\n  mutable regions: list<ref<regionStatus>>,\n}\n\nlet err = (~startPos=?, ~endPos=?, p, error) =>\n  switch p.regions {\n  | list{{contents: Report} as region, ..._} =>\n    let d = Diagnostics.make(\n      ~startPos=switch startPos {\n      | Some(pos) => pos\n      | None => p.startPos\n      },\n      ~endPos=switch endPos {\n      | Some(pos) => pos\n      | None => p.endPos\n      },\n      error,\n    )\n\n    p.diagnostics = list{d, ...p.diagnostics}\n    region := Silent\n  | _ => ()\n  }\n\nlet beginRegion = p => p.regions = list{ref(Report), ...p.regions}\nlet endRegion = p =>\n  switch p.regions {\n  | list{} => ()\n  | list{_, ...rest} => p.regions = rest\n  }\n\n/* Advance to the next non-comment token and store any encountered comment\n * in the parser's state. Every comment contains the end position of its\n * previous token to facilite comment interleaving */\nlet rec next = (~prevEndPos=?, p) => {\n  if p.token == Eof {\n    assert false\n  }\n  let prevEndPos = switch prevEndPos {\n  | Some(pos) => pos\n  | None => p.endPos\n  }\n  let (startPos, endPos, token) = Scanner.scan(p.scanner)\n  switch token {\n  | Comment(c) =>\n    Comment.setPrevTokEndPos(c, p.endPos)\n    p.comments = list{c, ...p.comments}\n    p.prevEndPos = p.endPos\n    p.endPos = endPos\n    next(~prevEndPos, p)\n  | _ =>\n    p.token = token\n\n    /* p.prevEndPos <- prevEndPos; */\n    p.prevEndPos = prevEndPos\n    p.startPos = startPos\n    p.endPos = endPos\n  }\n}\n\nlet nextUnsafe = p =>\n  if p.token != Eof {\n    next(p)\n  }\n\nlet nextTemplateLiteralToken = p => {\n  let (startPos, endPos, token) = Scanner.scanTemplateLiteralToken(p.scanner)\n  p.token = token\n  p.prevEndPos = p.endPos\n  p.startPos = startPos\n  p.endPos = endPos\n}\n\nlet checkProgress = (~prevEndPos, ~result, p) =>\n  if p.endPos === prevEndPos {\n    None\n  } else {\n    Some(result)\n  }\n\nlet make = (~mode=ParseForTypeChecker, src, filename) => {\n  let scanner = Scanner.make(~filename, src)\n  let parserState = {\n    mode: mode,\n    scanner: scanner,\n    token: Token.Semicolon,\n    startPos: Lexing.dummy_pos,\n    prevEndPos: Lexing.dummy_pos,\n    endPos: Lexing.dummy_pos,\n    breadcrumbs: list{},\n    errors: list{},\n    diagnostics: list{},\n    comments: list{},\n    regions: list{ref(Report)},\n  }\n  parserState.scanner.err = (~startPos, ~endPos, error) => {\n    let diagnostic = Diagnostics.make(~startPos, ~endPos, error)\n\n    parserState.diagnostics = list{diagnostic, ...parserState.diagnostics}\n  }\n  next(parserState)\n  parserState\n}\n\nlet leaveBreadcrumb = (p, circumstance) => {\n  let crumb = (circumstance, p.startPos)\n  p.breadcrumbs = list{crumb, ...p.breadcrumbs}\n}\n\nlet eatBreadcrumb = p =>\n  switch p.breadcrumbs {\n  | list{} => ()\n  | list{_, ...crumbs} => p.breadcrumbs = crumbs\n  }\n\nlet optional = (p, token) =>\n  if p.token == token {\n    let () = next(p)\n    true\n  } else {\n    false\n  }\n\nlet expect = (~grammar=?, token, p) =>\n  if p.token == token {\n    next(p)\n  } else {\n    let error = Diagnostics.expected(~grammar?, p.prevEndPos, token)\n    err(~startPos=p.prevEndPos, p, error)\n  }\n\n/* Don't use immutable copies here, it trashes certain heuristics\n * in the ocaml compiler, resulting in massive slowdowns of the parser */\nlet lookahead = (p, callback) => {\n  let err = p.scanner.err\n  let ch = p.scanner.ch\n  let offset = p.scanner.offset\n  let lineOffset = p.scanner.lineOffset\n  let lnum = p.scanner.lnum\n  let mode = p.scanner.mode\n  let token = p.token\n  let startPos = p.startPos\n  let endPos = p.endPos\n  let prevEndPos = p.prevEndPos\n  let breadcrumbs = p.breadcrumbs\n  let errors = p.errors\n  let diagnostics = p.diagnostics\n  let comments = p.comments\n\n  let res = callback(p)\n\n  p.scanner.err = err\n  p.scanner.ch = ch\n  p.scanner.offset = offset\n  p.scanner.lineOffset = lineOffset\n  p.scanner.lnum = lnum\n  p.scanner.mode = mode\n  p.token = token\n  p.startPos = startPos\n  p.endPos = endPos\n  p.prevEndPos = prevEndPos\n  p.breadcrumbs = breadcrumbs\n  p.errors = errors\n  p.diagnostics = diagnostics\n  p.comments = comments\n\n  res\n}\n\n"
  },
  {
    "path": "analysis/examples/larger-project/src/res_parsetree_viewer.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as List from \"rescript/lib/es6/list.js\";\n\nfunction arrowType(ct) {\n  var $$process = function (attrsBefore, _acc, _typ) {\n    while(true) {\n      var typ = _typ;\n      var acc = _acc;\n      var match = typ.ptyp_desc;\n      if (typeof match === \"number\") {\n        return [\n                attrsBefore,\n                List.rev(acc),\n                typ\n              ];\n      }\n      if (match.TAG !== /* Ptyp_arrow */1) {\n        return [\n                attrsBefore,\n                List.rev(acc),\n                typ\n              ];\n      }\n      var lbl = match._0;\n      if (typeof lbl === \"number\") {\n        var attrs = typ.ptyp_attributes;\n        var typ2 = match._2;\n        var typ1 = match._1;\n        if (attrs) {\n          if (attrs.hd[0].txt === \"bs\" && !attrs.tl) {\n            var arg = [\n              attrs,\n              lbl,\n              typ1\n            ];\n            _typ = typ2;\n            _acc = {\n              hd: arg,\n              tl: acc\n            };\n            continue ;\n          }\n          \n        } else {\n          var arg$1 = [\n            /* [] */0,\n            lbl,\n            typ1\n          ];\n          _typ = typ2;\n          _acc = {\n            hd: arg$1,\n            tl: acc\n          };\n          continue ;\n        }\n        var args = List.rev(acc);\n        return [\n                attrsBefore,\n                args,\n                typ\n              ];\n      }\n      var arg_0 = typ.ptyp_attributes;\n      var arg_2 = match._1;\n      var arg$2 = [\n        arg_0,\n        lbl,\n        arg_2\n      ];\n      _typ = match._2;\n      _acc = {\n        hd: arg$2,\n        tl: acc\n      };\n      continue ;\n    };\n  };\n  var match = ct.ptyp_desc;\n  if (typeof match === \"number\" || !(match.TAG === /* Ptyp_arrow */1 && typeof match._0 === \"number\")) {\n    return $$process(/* [] */0, /* [] */0, ct);\n  } else {\n    return $$process(ct.ptyp_attributes, /* [] */0, {\n                ptyp_desc: ct.ptyp_desc,\n                ptyp_loc: ct.ptyp_loc,\n                ptyp_attributes: /* [] */0\n              });\n  }\n}\n\nfunction functorType(modtype) {\n  var _acc = /* [] */0;\n  var _modtype = modtype;\n  while(true) {\n    var modtype$1 = _modtype;\n    var acc = _acc;\n    var match = modtype$1.pmty_desc;\n    if (match.TAG !== /* Pmty_functor */2) {\n      return [\n              List.rev(acc),\n              modtype$1\n            ];\n    }\n    var arg_0 = modtype$1.pmty_attributes;\n    var arg_1 = match._0;\n    var arg_2 = match._1;\n    var arg = [\n      arg_0,\n      arg_1,\n      arg_2\n    ];\n    _modtype = match._2;\n    _acc = {\n      hd: arg,\n      tl: acc\n    };\n    continue ;\n  };\n}\n\nfunction processUncurriedAttribute(attrs) {\n  var _uncurriedSpotted = false;\n  var _acc = /* [] */0;\n  var _attrs = attrs;\n  while(true) {\n    var attrs$1 = _attrs;\n    var acc = _acc;\n    var uncurriedSpotted = _uncurriedSpotted;\n    if (!attrs$1) {\n      return [\n              uncurriedSpotted,\n              List.rev(acc)\n            ];\n    }\n    var attr = attrs$1.hd;\n    if (attr[0].txt === \"bs\") {\n      _attrs = attrs$1.tl;\n      _uncurriedSpotted = true;\n      continue ;\n    }\n    _attrs = attrs$1.tl;\n    _acc = {\n      hd: attr,\n      tl: acc\n    };\n    continue ;\n  };\n}\n\nfunction collectListExpressions(expr) {\n  var _acc = /* [] */0;\n  var _expr = expr;\n  while(true) {\n    var expr$1 = _expr;\n    var acc = _acc;\n    var match = expr$1.pexp_desc;\n    if (typeof match !== \"number\" && match.TAG === /* Pexp_construct */9) {\n      var match$1 = match._0.txt;\n      switch (match$1.TAG | 0) {\n        case /* Lident */0 :\n            switch (match$1._0) {\n              case \"::\" :\n                  var match$2 = match._1;\n                  if (match$2 !== undefined) {\n                    var match$3 = match$2.pexp_desc;\n                    if (typeof match$3 !== \"number\" && match$3.TAG === /* Pexp_tuple */8) {\n                      var match$4 = match$3._0;\n                      if (match$4) {\n                        var match$5 = match$4.tl;\n                        if (match$5 && !match$5.tl) {\n                          _expr = match$5.hd;\n                          _acc = {\n                            hd: match$4.hd,\n                            tl: acc\n                          };\n                          continue ;\n                        }\n                        \n                      }\n                      \n                    }\n                    \n                  }\n                  break;\n              case \"[]\" :\n                  return [\n                          List.rev(acc),\n                          undefined\n                        ];\n              default:\n                \n            }\n            break;\n        case /* Ldot */1 :\n        case /* Lapply */2 :\n            break;\n        \n      }\n    }\n    return [\n            List.rev(acc),\n            expr$1\n          ];\n  };\n}\n\nfunction rewriteUnderscoreApply(expr) {\n  var match = expr.pexp_desc;\n  if (typeof match === \"number\") {\n    return expr;\n  }\n  if (match.TAG !== /* Pexp_fun */4) {\n    return expr;\n  }\n  if (typeof match._0 !== \"number\") {\n    return expr;\n  }\n  if (match._1 !== undefined) {\n    return expr;\n  }\n  var match$1 = match._2.ppat_desc;\n  if (typeof match$1 === \"number\") {\n    return expr;\n  }\n  if (match$1.TAG !== /* Ppat_var */0) {\n    return expr;\n  }\n  if (match$1._0.txt !== \"__x\") {\n    return expr;\n  }\n  var e = match._3;\n  var match$2 = e.pexp_desc;\n  if (typeof match$2 === \"number\") {\n    return expr;\n  }\n  if (match$2.TAG !== /* Pexp_apply */5) {\n    return expr;\n  }\n  var newArgs = List.map((function (arg) {\n          var argExpr = arg[1];\n          var lid = argExpr.pexp_desc;\n          if (typeof lid === \"number\") {\n            return arg;\n          }\n          if (lid.TAG !== /* Pexp_ident */0) {\n            return arg;\n          }\n          var lid$1 = lid._0;\n          var match = lid$1.txt;\n          switch (match.TAG | 0) {\n            case /* Lident */0 :\n                if (match._0 === \"__x\") {\n                  return [\n                          arg[0],\n                          {\n                            pexp_desc: {\n                              TAG: /* Pexp_ident */0,\n                              _0: {\n                                txt: {\n                                  TAG: /* Lident */0,\n                                  _0: \"_\"\n                                },\n                                loc: lid$1.loc\n                              }\n                            },\n                            pexp_loc: argExpr.pexp_loc,\n                            pexp_attributes: argExpr.pexp_attributes\n                          }\n                        ];\n                } else {\n                  return arg;\n                }\n            case /* Ldot */1 :\n            case /* Lapply */2 :\n                return arg;\n            \n          }\n        }), match$2._1);\n  return {\n          pexp_desc: {\n            TAG: /* Pexp_apply */5,\n            _0: match$2._0,\n            _1: newArgs\n          },\n          pexp_loc: e.pexp_loc,\n          pexp_attributes: e.pexp_attributes\n        };\n}\n\nfunction funExpr(expr) {\n  var collectNewTypes = function (_acc, _returnExpr) {\n    while(true) {\n      var returnExpr = _returnExpr;\n      var acc = _acc;\n      var match = returnExpr.pexp_desc;\n      if (typeof match === \"number\") {\n        return [\n                List.rev(acc),\n                returnExpr\n              ];\n      }\n      if (match.TAG !== /* Pexp_newtype */31) {\n        return [\n                List.rev(acc),\n                returnExpr\n              ];\n      }\n      if (returnExpr.pexp_attributes) {\n        return [\n                List.rev(acc),\n                returnExpr\n              ];\n      }\n      _returnExpr = match._1;\n      _acc = {\n        hd: match._0,\n        tl: acc\n      };\n      continue ;\n    };\n  };\n  var collect = function (attrsBefore, _acc, _expr) {\n    while(true) {\n      var expr = _expr;\n      var acc = _acc;\n      var match = expr.pexp_desc;\n      if (typeof match !== \"number\") {\n        switch (match.TAG | 0) {\n          case /* Pexp_fun */4 :\n              var lbl = match._0;\n              var exit = 0;\n              var exit$1 = 0;\n              if (typeof lbl === \"number\" && match._1 === undefined) {\n                var match$1 = match._2.ppat_desc;\n                if (typeof match$1 === \"number\" || !(match$1.TAG === /* Ppat_var */0 && match$1._0.txt === \"__x\")) {\n                  exit$1 = 3;\n                } else {\n                  var tmp = match._3.pexp_desc;\n                  if (typeof tmp === \"number\") {\n                    exit$1 = 3;\n                  } else {\n                    if (tmp.TAG === /* Pexp_apply */5) {\n                      return [\n                              attrsBefore,\n                              List.rev(acc),\n                              rewriteUnderscoreApply(expr)\n                            ];\n                    }\n                    exit$1 = 3;\n                  }\n                }\n              } else {\n                exit$1 = 3;\n              }\n              if (exit$1 === 3) {\n                var attrs = expr.pexp_attributes;\n                var returnExpr = match._3;\n                var pattern = match._2;\n                var defaultExpr = match._1;\n                if (attrs) {\n                  if (attrs.hd[0].txt === \"bs\" && !attrs.tl) {\n                    var parameter = {\n                      TAG: /* Parameter */0,\n                      attrs: attrs,\n                      lbl: lbl,\n                      defaultExpr: defaultExpr,\n                      pat: pattern\n                    };\n                    _expr = returnExpr;\n                    _acc = {\n                      hd: parameter,\n                      tl: acc\n                    };\n                    continue ;\n                  }\n                  exit = 2;\n                } else {\n                  var parameter$1 = {\n                    TAG: /* Parameter */0,\n                    attrs: /* [] */0,\n                    lbl: lbl,\n                    defaultExpr: defaultExpr,\n                    pat: pattern\n                  };\n                  _expr = returnExpr;\n                  _acc = {\n                    hd: parameter$1,\n                    tl: acc\n                  };\n                  continue ;\n                }\n              }\n              if (exit === 2 && typeof lbl !== \"number\") {\n                var parameter_0 = expr.pexp_attributes;\n                var parameter_2 = match._1;\n                var parameter_3 = match._2;\n                var parameter$2 = {\n                  TAG: /* Parameter */0,\n                  attrs: parameter_0,\n                  lbl: lbl,\n                  defaultExpr: parameter_2,\n                  pat: parameter_3\n                };\n                _expr = match._3;\n                _acc = {\n                  hd: parameter$2,\n                  tl: acc\n                };\n                continue ;\n              }\n              break;\n          case /* Pexp_newtype */31 :\n              var match$2 = collectNewTypes({\n                    hd: match._0,\n                    tl: /* [] */0\n                  }, match._1);\n              var param_0 = expr.pexp_attributes;\n              var param_1 = match$2[0];\n              var param = {\n                TAG: /* NewTypes */1,\n                attrs: param_0,\n                locs: param_1\n              };\n              _expr = match$2[1];\n              _acc = {\n                hd: param,\n                tl: acc\n              };\n              continue ;\n          default:\n            \n        }\n      }\n      return [\n              attrsBefore,\n              List.rev(acc),\n              expr\n            ];\n    };\n  };\n  var match = expr.pexp_desc;\n  if (typeof match === \"number\" || !(match.TAG === /* Pexp_fun */4 && typeof match._0 === \"number\")) {\n    return collect(/* [] */0, /* [] */0, expr);\n  } else {\n    return collect(expr.pexp_attributes, /* [] */0, {\n                pexp_desc: expr.pexp_desc,\n                pexp_loc: expr.pexp_loc,\n                pexp_attributes: /* [] */0\n              });\n  }\n}\n\nfunction processBracesAttr(expr) {\n  var match = expr.pexp_attributes;\n  if (!match) {\n    return [\n            undefined,\n            expr\n          ];\n  }\n  var attr = match.hd;\n  if (attr[0].txt === \"ns.braces\") {\n    return [\n            attr,\n            {\n              pexp_desc: expr.pexp_desc,\n              pexp_loc: expr.pexp_loc,\n              pexp_attributes: match.tl\n            }\n          ];\n  } else {\n    return [\n            undefined,\n            expr\n          ];\n  }\n}\n\nfunction filterParsingAttrs(attrs) {\n  return List.filter(function (attr) {\n                switch (attr[0].txt) {\n                  case \"bs\" :\n                  case \"ns.braces\" :\n                  case \"ns.iflet\" :\n                  case \"ns.namedArgLoc\" :\n                  case \"ns.ternary\" :\n                  case \"res.template\" :\n                      return false;\n                  default:\n                    return true;\n                }\n              })(attrs);\n}\n\nfunction isBlockExpr(expr) {\n  var match = expr.pexp_desc;\n  if (typeof match === \"number\") {\n    return false;\n  }\n  switch (match.TAG | 0) {\n    case /* Pexp_let */2 :\n    case /* Pexp_sequence */16 :\n    case /* Pexp_letmodule */25 :\n    case /* Pexp_letexception */26 :\n    case /* Pexp_open */33 :\n        return true;\n    default:\n      return false;\n  }\n}\n\nfunction isBracedExpr(expr) {\n  var match = processBracesAttr(expr);\n  return match[0] !== undefined;\n}\n\nfunction isMultilineText(txt) {\n  var len = txt.length;\n  var _i = 0;\n  while(true) {\n    var i = _i;\n    if (i >= len) {\n      return false;\n    }\n    var c = txt.charCodeAt(i);\n    if (c > 13 || c < 10) {\n      if (c !== 92) {\n        _i = i + 1 | 0;\n        continue ;\n      }\n      if ((i + 2 | 0) === len) {\n        return false;\n      }\n      _i = i + 2 | 0;\n      continue ;\n    }\n    if (!(c === 12 || c === 11)) {\n      return true;\n    }\n    _i = i + 1 | 0;\n    continue ;\n  };\n}\n\nfunction isHuggableExpression(expr) {\n  var match = expr.pexp_desc;\n  if (typeof match !== \"number\") {\n    switch (match.TAG | 0) {\n      case /* Pexp_constant */1 :\n          var match$1 = match._0;\n          if (match$1.TAG === /* Pconst_string */2 && match$1._1 !== undefined) {\n            return true;\n          }\n          break;\n      case /* Pexp_construct */9 :\n          var match$2 = match._0.txt;\n          switch (match$2.TAG | 0) {\n            case /* Lident */0 :\n                switch (match$2._0) {\n                  case \"::\" :\n                  case \"[]\" :\n                      return true;\n                  default:\n                    \n                }\n                break;\n            case /* Ldot */1 :\n            case /* Lapply */2 :\n                break;\n            \n          }\n          break;\n      case /* Pexp_tuple */8 :\n      case /* Pexp_record */11 :\n      case /* Pexp_array */14 :\n          return true;\n      case /* Pexp_extension */34 :\n          switch (match._0[0].txt) {\n            case \"bs.obj\" :\n            case \"obj\" :\n                return true;\n            default:\n              \n          }\n          break;\n      default:\n        \n    }\n  }\n  if (isBlockExpr(expr)) {\n    return true;\n  }\n  if (isBracedExpr(expr)) {\n    return true;\n  }\n  if (typeof match === \"number\") {\n    return false;\n  }\n  if (match.TAG !== /* Pexp_constant */1) {\n    return false;\n  }\n  var match$3 = match._0;\n  if (match$3.TAG === /* Pconst_string */2 && isMultilineText(match$3._0)) {\n    return true;\n  } else {\n    return false;\n  }\n}\n\nfunction isHuggableRhs(expr) {\n  var match = expr.pexp_desc;\n  if (typeof match !== \"number\") {\n    switch (match.TAG | 0) {\n      case /* Pexp_construct */9 :\n          var match$1 = match._0.txt;\n          switch (match$1.TAG | 0) {\n            case /* Lident */0 :\n                switch (match$1._0) {\n                  case \"::\" :\n                  case \"[]\" :\n                      return true;\n                  default:\n                    \n                }\n                break;\n            case /* Ldot */1 :\n            case /* Lapply */2 :\n                break;\n            \n          }\n          break;\n      case /* Pexp_tuple */8 :\n      case /* Pexp_record */11 :\n      case /* Pexp_array */14 :\n          return true;\n      case /* Pexp_extension */34 :\n          switch (match._0[0].txt) {\n            case \"bs.obj\" :\n            case \"obj\" :\n                return true;\n            default:\n              \n          }\n          break;\n      default:\n        \n    }\n  }\n  if (isBracedExpr(expr)) {\n    return true;\n  } else {\n    return false;\n  }\n}\n\nfunction isHuggablePattern(pattern) {\n  var match = pattern.ppat_desc;\n  if (typeof match === \"number\") {\n    return false;\n  }\n  switch (match.TAG | 0) {\n    case /* Ppat_tuple */4 :\n    case /* Ppat_construct */5 :\n    case /* Ppat_variant */6 :\n    case /* Ppat_record */7 :\n    case /* Ppat_array */8 :\n        return true;\n    default:\n      return false;\n  }\n}\n\nfunction operatorPrecedence(operator) {\n  switch (operator) {\n    case \"&&\" :\n        return 3;\n    case \"**\" :\n        return 7;\n    case \"*\" :\n    case \"*.\" :\n    case \"/\" :\n    case \"/.\" :\n        return 6;\n    case \":=\" :\n        return 1;\n    case \"+\" :\n    case \"+.\" :\n    case \"-\" :\n    case \"-.\" :\n    case \"^\" :\n        return 5;\n    case \"#\" :\n    case \"##\" :\n    case \"|.\" :\n        return 8;\n    case \"!=\" :\n    case \"!==\" :\n    case \"<\" :\n    case \"<=\" :\n    case \"<>\" :\n    case \"=\" :\n    case \"==\" :\n    case \">\" :\n    case \">=\" :\n    case \"|>\" :\n        return 4;\n    case \"||\" :\n        return 2;\n    default:\n      return 0;\n  }\n}\n\nfunction isUnaryOperator(operator) {\n  switch (operator) {\n    case \"not\" :\n    case \"~+\" :\n    case \"~+.\" :\n    case \"~-\" :\n    case \"~-.\" :\n        return true;\n    default:\n      return false;\n  }\n}\n\nfunction isUnaryExpression(expr) {\n  var match = expr.pexp_desc;\n  if (typeof match === \"number\") {\n    return false;\n  }\n  if (match.TAG !== /* Pexp_apply */5) {\n    return false;\n  }\n  var match$1 = match._0.pexp_desc;\n  if (typeof match$1 === \"number\") {\n    return false;\n  }\n  if (match$1.TAG !== /* Pexp_ident */0) {\n    return false;\n  }\n  var operator = match$1._0.txt;\n  switch (operator.TAG | 0) {\n    case /* Lident */0 :\n        var match$2 = match._1;\n        if (match$2 && typeof match$2.hd[0] === \"number\" && !(match$2.tl || !isUnaryOperator(operator._0))) {\n          return true;\n        } else {\n          return false;\n        }\n    case /* Ldot */1 :\n    case /* Lapply */2 :\n        return false;\n    \n  }\n}\n\nfunction isBinaryOperator(operator) {\n  switch (operator) {\n    case \"!=\" :\n    case \"!==\" :\n    case \"&&\" :\n    case \"*\" :\n    case \"**\" :\n    case \"*.\" :\n    case \"+\" :\n    case \"+.\" :\n    case \"-\" :\n    case \"-.\" :\n    case \"/\" :\n    case \"/.\" :\n    case \":=\" :\n    case \"<\" :\n    case \"<=\" :\n    case \"<>\" :\n    case \"=\" :\n    case \"==\" :\n    case \">\" :\n    case \">=\" :\n    case \"^\" :\n    case \"|.\" :\n    case \"|>\" :\n    case \"||\" :\n        return true;\n    default:\n      return false;\n  }\n}\n\nfunction isBinaryExpression(expr) {\n  var match = expr.pexp_desc;\n  if (typeof match === \"number\") {\n    return false;\n  }\n  if (match.TAG !== /* Pexp_apply */5) {\n    return false;\n  }\n  var match$1 = match._0.pexp_desc;\n  if (typeof match$1 === \"number\") {\n    return false;\n  }\n  if (match$1.TAG !== /* Pexp_ident */0) {\n    return false;\n  }\n  var match$2 = match$1._0;\n  var operator = match$2.txt;\n  switch (operator.TAG | 0) {\n    case /* Lident */0 :\n        var match$3 = match._1;\n        if (!match$3) {\n          return false;\n        }\n        if (typeof match$3.hd[0] !== \"number\") {\n          return false;\n        }\n        var match$4 = match$3.tl;\n        if (!match$4) {\n          return false;\n        }\n        if (typeof match$4.hd[0] !== \"number\") {\n          return false;\n        }\n        if (match$4.tl) {\n          return false;\n        }\n        var operator$1 = operator._0;\n        if (isBinaryOperator(operator$1)) {\n          return !(match$2.loc.loc_ghost && operator$1 === \"^\");\n        } else {\n          return false;\n        }\n    case /* Ldot */1 :\n    case /* Lapply */2 :\n        return false;\n    \n  }\n}\n\nfunction isEqualityOperator(operator) {\n  switch (operator) {\n    case \"!=\" :\n    case \"<>\" :\n    case \"=\" :\n    case \"==\" :\n        return true;\n    default:\n      return false;\n  }\n}\n\nfunction flattenableOperators(parentOperator, childOperator) {\n  var precParent = operatorPrecedence(parentOperator);\n  var precChild = operatorPrecedence(childOperator);\n  if (precParent === precChild) {\n    return !(isEqualityOperator(parentOperator) && isEqualityOperator(childOperator));\n  } else {\n    return false;\n  }\n}\n\nfunction hasIfLetAttribute(_attrs) {\n  while(true) {\n    var attrs = _attrs;\n    if (!attrs) {\n      return false;\n    }\n    if (attrs.hd[0].txt === \"ns.iflet\") {\n      return true;\n    }\n    _attrs = attrs.tl;\n    continue ;\n  };\n}\n\nfunction isIfLetExpr(expr) {\n  var tmp = expr.pexp_desc;\n  if (typeof tmp === \"number\" || !(tmp.TAG === /* Pexp_match */6 && hasIfLetAttribute(expr.pexp_attributes))) {\n    return false;\n  } else {\n    return true;\n  }\n}\n\nfunction hasAttributes(attrs) {\n  return List.exists((function (attr) {\n                switch (attr[0].txt) {\n                  case \"bs\" :\n                  case \"ns.braces\" :\n                  case \"ns.iflet\" :\n                  case \"ns.ternary\" :\n                  case \"res.template\" :\n                      return false;\n                  case \"warning\" :\n                      var match = attr[1];\n                      if (match.TAG !== /* PStr */0) {\n                        return true;\n                      }\n                      var match$1 = match._0;\n                      if (!match$1) {\n                        return true;\n                      }\n                      var match$2 = match$1.hd.pstr_desc;\n                      if (match$2.TAG !== /* Pstr_eval */0) {\n                        return true;\n                      }\n                      var match$3 = match$2._0.pexp_desc;\n                      if (typeof match$3 === \"number\") {\n                        return true;\n                      }\n                      if (match$3.TAG !== /* Pexp_constant */1) {\n                        return true;\n                      }\n                      var match$4 = match$3._0;\n                      if (match$4.TAG === /* Pconst_string */2 && match$4._0 === \"-4\" && !(match$4._1 !== undefined || match$1.tl)) {\n                        return !hasIfLetAttribute(attrs);\n                      } else {\n                        return true;\n                      }\n                  default:\n                    return true;\n                }\n              }), attrs);\n}\n\nfunction isArrayAccess(expr) {\n  var match = expr.pexp_desc;\n  if (typeof match === \"number\") {\n    return false;\n  }\n  if (match.TAG !== /* Pexp_apply */5) {\n    return false;\n  }\n  var match$1 = match._0.pexp_desc;\n  if (typeof match$1 === \"number\") {\n    return false;\n  }\n  if (match$1.TAG !== /* Pexp_ident */0) {\n    return false;\n  }\n  var match$2 = match$1._0.txt;\n  switch (match$2.TAG | 0) {\n    case /* Ldot */1 :\n        var match$3 = match$2._0;\n        switch (match$3.TAG | 0) {\n          case /* Lident */0 :\n              if (match$3._0 !== \"Array\") {\n                return false;\n              }\n              if (match$2._1 !== \"get\") {\n                return false;\n              }\n              var match$4 = match._1;\n              if (!match$4) {\n                return false;\n              }\n              if (typeof match$4.hd[0] !== \"number\") {\n                return false;\n              }\n              var match$5 = match$4.tl;\n              if (match$5 && typeof match$5.hd[0] === \"number\" && !match$5.tl) {\n                return true;\n              } else {\n                return false;\n              }\n          case /* Ldot */1 :\n          case /* Lapply */2 :\n              return false;\n          \n        }\n    case /* Lident */0 :\n    case /* Lapply */2 :\n        return false;\n    \n  }\n}\n\nfunction collectIfExpressions(expr) {\n  var _acc = /* [] */0;\n  var _expr = expr;\n  while(true) {\n    var expr$1 = _expr;\n    var acc = _acc;\n    var match = expr$1.pexp_desc;\n    if (typeof match !== \"number\") {\n      switch (match.TAG | 0) {\n        case /* Pexp_match */6 :\n            var match$1 = match._1;\n            if (match$1) {\n              var match$2 = match$1.hd;\n              if (match$2.pc_guard === undefined) {\n                var match$3 = match$1.tl;\n                if (match$3) {\n                  var elseExpr = match$3.hd.pc_rhs;\n                  var match$4 = elseExpr.pexp_desc;\n                  var thenExpr = match$2.pc_rhs;\n                  var pattern = match$2.pc_lhs;\n                  var condition = match._0;\n                  var exit = 0;\n                  if (typeof match$4 === \"number\" || match$4.TAG !== /* Pexp_construct */9) {\n                    exit = 2;\n                  } else {\n                    var match$5 = match$4._0.txt;\n                    switch (match$5.TAG | 0) {\n                      case /* Lident */0 :\n                          if (match$5._0 === \"()\") {\n                            if (!match$3.tl) {\n                              if (isIfLetExpr(expr$1)) {\n                                var ifs = List.rev({\n                                      hd: [\n                                        {\n                                          TAG: /* IfLet */1,\n                                          _0: pattern,\n                                          _1: condition\n                                        },\n                                        thenExpr\n                                      ],\n                                      tl: acc\n                                    });\n                                return [\n                                        ifs,\n                                        undefined\n                                      ];\n                              }\n                              exit = 2;\n                            }\n                            \n                          } else {\n                            exit = 2;\n                          }\n                          break;\n                      case /* Ldot */1 :\n                      case /* Lapply */2 :\n                          exit = 2;\n                          break;\n                      \n                    }\n                  }\n                  if (exit === 2 && !match$3.tl && isIfLetExpr(expr$1)) {\n                    _expr = elseExpr;\n                    _acc = {\n                      hd: [\n                        {\n                          TAG: /* IfLet */1,\n                          _0: pattern,\n                          _1: condition\n                        },\n                        thenExpr\n                      ],\n                      tl: acc\n                    };\n                    continue ;\n                  }\n                  \n                }\n                \n              }\n              \n            }\n            break;\n        case /* Pexp_ifthenelse */15 :\n            var elseExpr$1 = match._2;\n            var thenExpr$1 = match._1;\n            var ifExpr = match._0;\n            if (elseExpr$1 !== undefined) {\n              _expr = elseExpr$1;\n              _acc = {\n                hd: [\n                  {\n                    TAG: /* If */0,\n                    _0: ifExpr\n                  },\n                  thenExpr$1\n                ],\n                tl: acc\n              };\n              continue ;\n            }\n            var ifs$1 = List.rev({\n                  hd: [\n                    {\n                      TAG: /* If */0,\n                      _0: ifExpr\n                    },\n                    thenExpr$1\n                  ],\n                  tl: acc\n                });\n            return [\n                    ifs$1,\n                    elseExpr$1\n                  ];\n        default:\n          \n      }\n    }\n    return [\n            List.rev(acc),\n            expr$1\n          ];\n  };\n}\n\nfunction hasTernaryAttribute(_attrs) {\n  while(true) {\n    var attrs = _attrs;\n    if (!attrs) {\n      return false;\n    }\n    if (attrs.hd[0].txt === \"ns.ternary\") {\n      return true;\n    }\n    _attrs = attrs.tl;\n    continue ;\n  };\n}\n\nfunction isTernaryExpr(expr) {\n  var tmp = expr.pexp_desc;\n  if (typeof tmp === \"number\" || !(tmp.TAG === /* Pexp_ifthenelse */15 && hasTernaryAttribute(expr.pexp_attributes))) {\n    return false;\n  } else {\n    return true;\n  }\n}\n\nfunction collectTernaryParts(expr) {\n  var _acc = /* [] */0;\n  var _expr = expr;\n  while(true) {\n    var expr$1 = _expr;\n    var acc = _acc;\n    var match = expr$1.pexp_desc;\n    if (typeof match === \"number\") {\n      return [\n              List.rev(acc),\n              expr$1\n            ];\n    }\n    if (match.TAG !== /* Pexp_ifthenelse */15) {\n      return [\n              List.rev(acc),\n              expr$1\n            ];\n    }\n    var alternate = match._2;\n    if (alternate === undefined) {\n      return [\n              List.rev(acc),\n              expr$1\n            ];\n    }\n    if (!hasTernaryAttribute(expr$1.pexp_attributes)) {\n      return [\n              List.rev(acc),\n              expr$1\n            ];\n    }\n    _expr = alternate;\n    _acc = {\n      hd: [\n        match._0,\n        match._1\n      ],\n      tl: acc\n    };\n    continue ;\n  };\n}\n\nfunction parametersShouldHug(parameters) {\n  if (!parameters) {\n    return false;\n  }\n  var match = parameters.hd;\n  if (match.TAG === /* Parameter */0 && !(match.attrs || !(typeof match.lbl === \"number\" && !(match.defaultExpr !== undefined || parameters.tl || !isHuggablePattern(match.pat))))) {\n    return true;\n  } else {\n    return false;\n  }\n}\n\nfunction filterTernaryAttributes(attrs) {\n  return List.filter(function (attr) {\n                if (attr[0].txt === \"ns.ternary\") {\n                  return false;\n                } else {\n                  return true;\n                }\n              })(attrs);\n}\n\nfunction filterFragileMatchAttributes(attrs) {\n  return List.filter(function (attr) {\n                if (attr[0].txt !== \"warning\") {\n                  return true;\n                }\n                var match = attr[1];\n                if (match.TAG !== /* PStr */0) {\n                  return true;\n                }\n                var match$1 = match._0;\n                if (!match$1) {\n                  return true;\n                }\n                var match$2 = match$1.hd.pstr_desc;\n                if (match$2.TAG !== /* Pstr_eval */0) {\n                  return true;\n                }\n                var match$3 = match$2._0.pexp_desc;\n                if (typeof match$3 === \"number\") {\n                  return true;\n                }\n                if (match$3.TAG !== /* Pexp_constant */1) {\n                  return true;\n                }\n                var match$4 = match$3._0;\n                if (match$4.TAG === /* Pconst_string */2 && match$4._0 === \"-4\" && !match$1.tl) {\n                  return false;\n                } else {\n                  return true;\n                }\n              })(attrs);\n}\n\nfunction isJsxExpression(expr) {\n  var match = expr.pexp_desc;\n  if (typeof match === \"number\" || match.TAG !== /* Pexp_apply */5) {\n    return false;\n  } else {\n    var _attrs = expr.pexp_attributes;\n    while(true) {\n      var attrs = _attrs;\n      if (!attrs) {\n        return false;\n      }\n      if (attrs.hd[0].txt === \"JSX\") {\n        return true;\n      }\n      _attrs = attrs.tl;\n      continue ;\n    };\n  }\n}\n\nfunction hasJsxAttribute(attributes) {\n  var _attrs = attributes;\n  while(true) {\n    var attrs = _attrs;\n    if (!attrs) {\n      return false;\n    }\n    if (attrs.hd[0].txt === \"JSX\") {\n      return true;\n    }\n    _attrs = attrs.tl;\n    continue ;\n  };\n}\n\nfunction shouldIndentBinaryExpr(expr) {\n  var samePrecedenceSubExpression = function (operator, subExpression) {\n    var match = subExpression.pexp_desc;\n    if (typeof match === \"number\") {\n      return true;\n    }\n    if (match.TAG !== /* Pexp_apply */5) {\n      return true;\n    }\n    var match$1 = match._0.pexp_desc;\n    if (typeof match$1 === \"number\") {\n      return true;\n    }\n    if (match$1.TAG !== /* Pexp_ident */0) {\n      return true;\n    }\n    var subOperator = match$1._0.txt;\n    switch (subOperator.TAG | 0) {\n      case /* Lident */0 :\n          var match$2 = match._1;\n          if (!match$2) {\n            return true;\n          }\n          if (typeof match$2.hd[0] !== \"number\") {\n            return true;\n          }\n          var match$3 = match$2.tl;\n          if (!match$3) {\n            return true;\n          }\n          if (typeof match$3.hd[0] !== \"number\") {\n            return true;\n          }\n          if (match$3.tl) {\n            return true;\n          }\n          var subOperator$1 = subOperator._0;\n          if (isBinaryOperator(subOperator$1)) {\n            return flattenableOperators(operator, subOperator$1);\n          } else {\n            return true;\n          }\n      case /* Ldot */1 :\n      case /* Lapply */2 :\n          return true;\n      \n    }\n  };\n  var match = expr.pexp_desc;\n  if (typeof match === \"number\") {\n    return false;\n  }\n  if (match.TAG !== /* Pexp_apply */5) {\n    return false;\n  }\n  var match$1 = match._0.pexp_desc;\n  if (typeof match$1 === \"number\") {\n    return false;\n  }\n  if (match$1.TAG !== /* Pexp_ident */0) {\n    return false;\n  }\n  var operator = match$1._0.txt;\n  switch (operator.TAG | 0) {\n    case /* Lident */0 :\n        var match$2 = match._1;\n        if (!match$2) {\n          return false;\n        }\n        var match$3 = match$2.hd;\n        if (typeof match$3[0] !== \"number\") {\n          return false;\n        }\n        var match$4 = match$2.tl;\n        if (!match$4) {\n          return false;\n        }\n        if (typeof match$4.hd[0] !== \"number\") {\n          return false;\n        }\n        if (match$4.tl) {\n          return false;\n        }\n        var operator$1 = operator._0;\n        if (isBinaryOperator(operator$1)) {\n          if (isEqualityOperator(operator$1) || !samePrecedenceSubExpression(operator$1, match$3[1])) {\n            return true;\n          } else {\n            return operator$1 === \":=\";\n          }\n        } else {\n          return false;\n        }\n    case /* Ldot */1 :\n    case /* Lapply */2 :\n        return false;\n    \n  }\n}\n\nfunction shouldInlineRhsBinaryExpr(rhs) {\n  var match = rhs.pexp_desc;\n  if (typeof match === \"number\") {\n    return false;\n  }\n  switch (match.TAG | 0) {\n    case /* Pexp_constant */1 :\n    case /* Pexp_let */2 :\n    case /* Pexp_try */7 :\n    case /* Pexp_record */11 :\n    case /* Pexp_array */14 :\n    case /* Pexp_ifthenelse */15 :\n    case /* Pexp_sequence */16 :\n    case /* Pexp_while */17 :\n    case /* Pexp_for */18 :\n    case /* Pexp_letmodule */25 :\n    case /* Pexp_letexception */26 :\n    case /* Pexp_open */33 :\n        return true;\n    default:\n      return false;\n  }\n}\n\nfunction isPrintableAttribute(attr) {\n  switch (attr[0].txt) {\n    case \"JSX\" :\n    case \"bs\" :\n    case \"ns.braces\" :\n    case \"ns.iflet\" :\n    case \"ns.ternary\" :\n    case \"res.template\" :\n        return false;\n    default:\n      return true;\n  }\n}\n\nfunction hasPrintableAttributes(attrs) {\n  return List.exists(isPrintableAttribute, attrs);\n}\n\nfunction filterPrintableAttributes(attrs) {\n  return List.filter(isPrintableAttribute)(attrs);\n}\n\nfunction partitionPrintableAttributes(attrs) {\n  return List.partition(isPrintableAttribute, attrs);\n}\n\nfunction requiresSpecialCallbackPrintingLastArg(args) {\n  var _args = args;\n  while(true) {\n    var args$1 = _args;\n    if (!args$1) {\n      return false;\n    }\n    var tmp = args$1.hd[1].pexp_desc;\n    if (typeof tmp === \"number\") {\n      _args = args$1.tl;\n      continue ;\n    }\n    switch (tmp.TAG | 0) {\n      case /* Pexp_fun */4 :\n      case /* Pexp_newtype */31 :\n          if (args$1.tl) {\n            return false;\n          } else {\n            return true;\n          }\n      default:\n        _args = args$1.tl;\n        continue ;\n    }\n  };\n}\n\nfunction requiresSpecialCallbackPrintingFirstArg(args) {\n  if (!args) {\n    return false;\n  }\n  var tmp = args.hd[1].pexp_desc;\n  if (typeof tmp === \"number\") {\n    return false;\n  }\n  switch (tmp.TAG | 0) {\n    case /* Pexp_fun */4 :\n    case /* Pexp_newtype */31 :\n        break;\n    default:\n      return false;\n  }\n  var rest = args.tl;\n  if (rest) {\n    var _args = rest;\n    while(true) {\n      var args$1 = _args;\n      if (!args$1) {\n        return true;\n      }\n      var tmp$1 = args$1.hd[1].pexp_desc;\n      if (typeof tmp$1 === \"number\") {\n        _args = args$1.tl;\n        continue ;\n      }\n      switch (tmp$1.TAG | 0) {\n        case /* Pexp_fun */4 :\n        case /* Pexp_newtype */31 :\n            return false;\n        default:\n          _args = args$1.tl;\n          continue ;\n      }\n    };\n  } else {\n    return false;\n  }\n}\n\nfunction modExprApply(modExpr) {\n  var _acc = /* [] */0;\n  var _modExpr = modExpr;\n  while(true) {\n    var modExpr$1 = _modExpr;\n    var acc = _acc;\n    var match = modExpr$1.pmod_desc;\n    if (match.TAG !== /* Pmod_apply */3) {\n      return [\n              acc,\n              modExpr$1\n            ];\n    }\n    _modExpr = match._0;\n    _acc = {\n      hd: match._1,\n      tl: acc\n    };\n    continue ;\n  };\n}\n\nfunction modExprFunctor(modExpr) {\n  var _acc = /* [] */0;\n  var _modExpr = modExpr;\n  while(true) {\n    var modExpr$1 = _modExpr;\n    var acc = _acc;\n    var match = modExpr$1.pmod_desc;\n    if (match.TAG !== /* Pmod_functor */2) {\n      return [\n              List.rev(acc),\n              modExpr$1\n            ];\n    }\n    var param_0 = modExpr$1.pmod_attributes;\n    var param_1 = match._0;\n    var param_2 = match._1;\n    var param = [\n      param_0,\n      param_1,\n      param_2\n    ];\n    _modExpr = match._2;\n    _acc = {\n      hd: param,\n      tl: acc\n    };\n    continue ;\n  };\n}\n\nfunction collectPatternsFromListConstruct(_acc, _pattern) {\n  while(true) {\n    var pattern = _pattern;\n    var acc = _acc;\n    var match = pattern.ppat_desc;\n    if (typeof match === \"number\") {\n      return [\n              List.rev(acc),\n              pattern\n            ];\n    }\n    if (match.TAG !== /* Ppat_construct */5) {\n      return [\n              List.rev(acc),\n              pattern\n            ];\n    }\n    var match$1 = match._0.txt;\n    switch (match$1.TAG | 0) {\n      case /* Lident */0 :\n          if (match$1._0 !== \"::\") {\n            return [\n                    List.rev(acc),\n                    pattern\n                  ];\n          }\n          var match$2 = match._1;\n          if (match$2 === undefined) {\n            return [\n                    List.rev(acc),\n                    pattern\n                  ];\n          }\n          var match$3 = match$2.ppat_desc;\n          if (typeof match$3 === \"number\") {\n            return [\n                    List.rev(acc),\n                    pattern\n                  ];\n          }\n          if (match$3.TAG !== /* Ppat_tuple */4) {\n            return [\n                    List.rev(acc),\n                    pattern\n                  ];\n          }\n          var match$4 = match$3._0;\n          if (!match$4) {\n            return [\n                    List.rev(acc),\n                    pattern\n                  ];\n          }\n          var match$5 = match$4.tl;\n          if (!match$5) {\n            return [\n                    List.rev(acc),\n                    pattern\n                  ];\n          }\n          if (match$5.tl) {\n            return [\n                    List.rev(acc),\n                    pattern\n                  ];\n          }\n          _pattern = match$5.hd;\n          _acc = {\n            hd: match$4.hd,\n            tl: acc\n          };\n          continue ;\n      case /* Ldot */1 :\n      case /* Lapply */2 :\n          return [\n                  List.rev(acc),\n                  pattern\n                ];\n      \n    }\n  };\n}\n\nfunction hasTemplateLiteralAttr(attrs) {\n  return List.exists((function (attr) {\n                if (attr[0].txt === \"res.template\") {\n                  return true;\n                } else {\n                  return false;\n                }\n              }), attrs);\n}\n\nfunction isTemplateLiteral(expr) {\n  var match = expr.pexp_desc;\n  if (typeof match === \"number\") {\n    return false;\n  }\n  switch (match.TAG | 0) {\n    case /* Pexp_constant */1 :\n        var match$1 = match._0;\n        if (match$1.TAG === /* Pconst_string */2) {\n          var match$2 = match$1._1;\n          if (match$2 === \"\") {\n            return true;\n          }\n          \n        }\n        if (hasTemplateLiteralAttr(expr.pexp_attributes)) {\n          return true;\n        } else {\n          return false;\n        }\n    case /* Pexp_apply */5 :\n        var match$3 = match._0.pexp_desc;\n        if (typeof match$3 === \"number\") {\n          return false;\n        }\n        if (match$3.TAG !== /* Pexp_ident */0) {\n          return false;\n        }\n        var match$4 = match$3._0.txt;\n        switch (match$4.TAG | 0) {\n          case /* Lident */0 :\n              if (match$4._0 !== \"^\") {\n                return false;\n              }\n              var match$5 = match._1;\n              if (!match$5) {\n                return false;\n              }\n              if (typeof match$5.hd[0] !== \"number\") {\n                return false;\n              }\n              var match$6 = match$5.tl;\n              if (match$6 && typeof match$6.hd[0] === \"number\" && !(match$6.tl || !hasTemplateLiteralAttr(expr.pexp_attributes))) {\n                return true;\n              } else {\n                return false;\n              }\n          case /* Ldot */1 :\n          case /* Lapply */2 :\n              return false;\n          \n        }\n    default:\n      return false;\n  }\n}\n\nfunction collectOrPatternChain(pat) {\n  var _pattern = pat;\n  var _chain = /* [] */0;\n  while(true) {\n    var chain = _chain;\n    var pattern = _pattern;\n    var match = pattern.ppat_desc;\n    if (typeof match === \"number\") {\n      return {\n              hd: pattern,\n              tl: chain\n            };\n    }\n    if (match.TAG !== /* Ppat_or */9) {\n      return {\n              hd: pattern,\n              tl: chain\n            };\n    }\n    _chain = {\n      hd: match._1,\n      tl: chain\n    };\n    _pattern = match._0;\n    continue ;\n  };\n}\n\nfunction isSinglePipeExpr(expr) {\n  var isPipeExpr = function (expr) {\n    var match = expr.pexp_desc;\n    if (typeof match === \"number\") {\n      return false;\n    }\n    if (match.TAG !== /* Pexp_apply */5) {\n      return false;\n    }\n    var match$1 = match._0.pexp_desc;\n    if (typeof match$1 === \"number\") {\n      return false;\n    }\n    if (match$1.TAG !== /* Pexp_ident */0) {\n      return false;\n    }\n    var match$2 = match$1._0.txt;\n    switch (match$2.TAG | 0) {\n      case /* Lident */0 :\n          switch (match$2._0) {\n            case \"|.\" :\n            case \"|>\" :\n                break;\n            default:\n              return false;\n          }\n          var match$3 = match._1;\n          if (!match$3) {\n            return false;\n          }\n          if (typeof match$3.hd[0] !== \"number\") {\n            return false;\n          }\n          var match$4 = match$3.tl;\n          if (match$4 && typeof match$4.hd[0] === \"number\" && !match$4.tl) {\n            return true;\n          } else {\n            return false;\n          }\n          break;\n      case /* Ldot */1 :\n      case /* Lapply */2 :\n          return false;\n      \n    }\n  };\n  var match = expr.pexp_desc;\n  if (typeof match === \"number\") {\n    return false;\n  }\n  if (match.TAG !== /* Pexp_apply */5) {\n    return false;\n  }\n  var match$1 = match._0.pexp_desc;\n  if (typeof match$1 === \"number\") {\n    return false;\n  }\n  if (match$1.TAG !== /* Pexp_ident */0) {\n    return false;\n  }\n  var match$2 = match$1._0.txt;\n  switch (match$2.TAG | 0) {\n    case /* Lident */0 :\n        switch (match$2._0) {\n          case \"|.\" :\n          case \"|>\" :\n              break;\n          default:\n            return false;\n        }\n        var match$3 = match._1;\n        if (!match$3) {\n          return false;\n        }\n        var match$4 = match$3.hd;\n        if (typeof match$4[0] !== \"number\") {\n          return false;\n        }\n        var match$5 = match$3.tl;\n        if (match$5 && typeof match$5.hd[0] === \"number\" && !match$5.tl) {\n          return !isPipeExpr(match$4[1]);\n        } else {\n          return false;\n        }\n        break;\n    case /* Ldot */1 :\n    case /* Lapply */2 :\n        return false;\n    \n  }\n}\n\nfunction isUnderscoreApplySugar(expr) {\n  var match = expr.pexp_desc;\n  if (typeof match === \"number\") {\n    return false;\n  }\n  if (match.TAG !== /* Pexp_fun */4) {\n    return false;\n  }\n  if (typeof match._0 !== \"number\") {\n    return false;\n  }\n  if (match._1 !== undefined) {\n    return false;\n  }\n  var match$1 = match._2.ppat_desc;\n  if (typeof match$1 === \"number\") {\n    return false;\n  }\n  if (match$1.TAG !== /* Ppat_var */0) {\n    return false;\n  }\n  if (match$1._0.txt !== \"__x\") {\n    return false;\n  }\n  var tmp = match._3.pexp_desc;\n  if (typeof tmp === \"number\" || tmp.TAG !== /* Pexp_apply */5) {\n    return false;\n  } else {\n    return true;\n  }\n}\n\nfunction isRewrittenUnderscoreApplySugar(expr) {\n  var match = expr.pexp_desc;\n  if (typeof match === \"number\") {\n    return false;\n  }\n  if (match.TAG !== /* Pexp_ident */0) {\n    return false;\n  }\n  var match$1 = match._0.txt;\n  switch (match$1.TAG | 0) {\n    case /* Lident */0 :\n        if (match$1._0 === \"_\") {\n          return true;\n        } else {\n          return false;\n        }\n    case /* Ldot */1 :\n    case /* Lapply */2 :\n        return false;\n    \n  }\n}\n\nexport {\n  arrowType ,\n  functorType ,\n  processUncurriedAttribute ,\n  collectListExpressions ,\n  rewriteUnderscoreApply ,\n  funExpr ,\n  processBracesAttr ,\n  filterParsingAttrs ,\n  isBlockExpr ,\n  isBracedExpr ,\n  isMultilineText ,\n  isHuggableExpression ,\n  isHuggableRhs ,\n  isHuggablePattern ,\n  operatorPrecedence ,\n  isUnaryOperator ,\n  isUnaryExpression ,\n  isBinaryOperator ,\n  isBinaryExpression ,\n  isEqualityOperator ,\n  flattenableOperators ,\n  hasIfLetAttribute ,\n  isIfLetExpr ,\n  hasAttributes ,\n  isArrayAccess ,\n  collectIfExpressions ,\n  hasTernaryAttribute ,\n  isTernaryExpr ,\n  collectTernaryParts ,\n  parametersShouldHug ,\n  filterTernaryAttributes ,\n  filterFragileMatchAttributes ,\n  isJsxExpression ,\n  hasJsxAttribute ,\n  shouldIndentBinaryExpr ,\n  shouldInlineRhsBinaryExpr ,\n  isPrintableAttribute ,\n  hasPrintableAttributes ,\n  filterPrintableAttributes ,\n  partitionPrintableAttributes ,\n  requiresSpecialCallbackPrintingLastArg ,\n  requiresSpecialCallbackPrintingFirstArg ,\n  modExprApply ,\n  modExprFunctor ,\n  collectPatternsFromListConstruct ,\n  hasTemplateLiteralAttr ,\n  isTemplateLiteral ,\n  collectOrPatternChain ,\n  isSinglePipeExpr ,\n  isUnderscoreApplySugar ,\n  isRewrittenUnderscoreApplySugar ,\n  \n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/res_parsetree_viewer.res",
    "content": "open Parsetree\n\nlet arrowType = ct => {\n  let rec process = (attrsBefore, acc, typ) =>\n    switch typ {\n    | {ptyp_desc: Ptyp_arrow(Nolabel as lbl, typ1, typ2), ptyp_attributes: list{}} =>\n      let arg = (list{}, lbl, typ1)\n      process(attrsBefore, list{arg, ...acc}, typ2)\n    | {\n        ptyp_desc: Ptyp_arrow(Nolabel as lbl, typ1, typ2),\n        ptyp_attributes: list{({txt: \"bs\"}, _)} as attrs,\n      } =>\n      let arg = (attrs, lbl, typ1)\n      process(attrsBefore, list{arg, ...acc}, typ2)\n    | {ptyp_desc: Ptyp_arrow(Nolabel, _typ1, _typ2), ptyp_attributes: _attrs} as returnType =>\n      let args = List.rev(acc)\n      (attrsBefore, args, returnType)\n    | {\n        ptyp_desc: Ptyp_arrow((Labelled(_) | Optional(_)) as lbl, typ1, typ2),\n        ptyp_attributes: attrs,\n      } =>\n      let arg = (attrs, lbl, typ1)\n      process(attrsBefore, list{arg, ...acc}, typ2)\n    | typ => (attrsBefore, List.rev(acc), typ)\n    }\n\n  switch ct {\n  | {ptyp_desc: Ptyp_arrow(Nolabel, _typ1, _typ2), ptyp_attributes: attrs} as typ =>\n    process(attrs, list{}, {...typ, ptyp_attributes: list{}})\n  | typ => process(list{}, list{}, typ)\n  }\n}\n\nlet functorType = modtype => {\n  let rec process = (acc, modtype) =>\n    switch modtype {\n    | {pmty_desc: Pmty_functor(lbl, argType, returnType), pmty_attributes: attrs} =>\n      let arg = (attrs, lbl, argType)\n      process(list{arg, ...acc}, returnType)\n    | modType => (List.rev(acc), modType)\n    }\n\n  process(list{}, modtype)\n}\n\nlet processUncurriedAttribute = attrs => {\n  let rec process = (uncurriedSpotted, acc, attrs) =>\n    switch attrs {\n    | list{} => (uncurriedSpotted, List.rev(acc))\n    | list{({Location.txt: \"bs\"}, _), ...rest} => process(true, acc, rest)\n    | list{attr, ...rest} => process(uncurriedSpotted, list{attr, ...acc}, rest)\n    }\n\n  process(false, list{}, attrs)\n}\n\nlet collectListExpressions = expr => {\n  let rec collect = (acc, expr) =>\n    switch expr.pexp_desc {\n    | Pexp_construct({txt: Longident.Lident(\"[]\")}, _) => (List.rev(acc), None)\n    | Pexp_construct(\n        {txt: Longident.Lident(\"::\")},\n        Some({pexp_desc: Pexp_tuple(list{hd, tail})}),\n      ) =>\n      collect(list{hd, ...acc}, tail)\n    | _ => (List.rev(acc), Some(expr))\n    }\n\n  collect(list{}, expr)\n}\n\n/* (__x) => f(a, __x, c) -----> f(a, _, c) */\nlet rewriteUnderscoreApply = expr =>\n  switch expr.pexp_desc {\n  | Pexp_fun(\n      Nolabel,\n      None,\n      {ppat_desc: Ppat_var({txt: \"__x\"})},\n      {pexp_desc: Pexp_apply(callExpr, args)} as e,\n    ) =>\n    let newArgs = List.map(arg =>\n      switch arg {\n      | (lbl, {pexp_desc: Pexp_ident({txt: Longident.Lident(\"__x\")} as lid)} as argExpr) => (\n          lbl,\n          {...argExpr, pexp_desc: Pexp_ident({...lid, txt: Longident.Lident(\"_\")})},\n        )\n      | arg => arg\n      }\n    , args)\n    {...e, pexp_desc: Pexp_apply(callExpr, newArgs)}\n  | _ => expr\n  }\n\ntype funParamKind =\n  | Parameter({\n      attrs: Parsetree.attributes,\n      lbl: Asttypes.arg_label,\n      defaultExpr: option<Parsetree.expression>,\n      pat: Parsetree.pattern,\n    })\n  | NewTypes({attrs: Parsetree.attributes, locs: list<Asttypes.loc<string>>})\n\nlet funExpr = expr => {\n  /* Turns (type t, type u, type z) into \"type t u z\" */\n  let rec collectNewTypes = (acc, returnExpr) =>\n    switch returnExpr {\n    | {pexp_desc: Pexp_newtype(stringLoc, returnExpr), pexp_attributes: list{}} =>\n      collectNewTypes(list{stringLoc, ...acc}, returnExpr)\n    | returnExpr => (List.rev(acc), returnExpr)\n    }\n\n  let rec collect = (attrsBefore, acc, expr) =>\n    switch expr {\n    | {\n        pexp_desc: Pexp_fun(\n          Nolabel,\n          None,\n          {ppat_desc: Ppat_var({txt: \"__x\"})},\n          {pexp_desc: Pexp_apply(_)},\n        ),\n      } => (attrsBefore, List.rev(acc), rewriteUnderscoreApply(expr))\n    | {pexp_desc: Pexp_fun(lbl, defaultExpr, pattern, returnExpr), pexp_attributes: list{}} =>\n      let parameter = Parameter({\n        attrs: list{},\n        lbl: lbl,\n        defaultExpr: defaultExpr,\n        pat: pattern,\n      })\n      collect(attrsBefore, list{parameter, ...acc}, returnExpr)\n    | {pexp_desc: Pexp_newtype(stringLoc, rest), pexp_attributes: attrs} =>\n      let (stringLocs, returnExpr) = collectNewTypes(list{stringLoc}, rest)\n      let param = NewTypes({attrs: attrs, locs: stringLocs})\n      collect(attrsBefore, list{param, ...acc}, returnExpr)\n    | {\n        pexp_desc: Pexp_fun(lbl, defaultExpr, pattern, returnExpr),\n        pexp_attributes: list{({txt: \"bs\"}, _)} as attrs,\n      } =>\n      let parameter = Parameter({\n        attrs: attrs,\n        lbl: lbl,\n        defaultExpr: defaultExpr,\n        pat: pattern,\n      })\n      collect(attrsBefore, list{parameter, ...acc}, returnExpr)\n    | {\n        pexp_desc: Pexp_fun((Labelled(_) | Optional(_)) as lbl, defaultExpr, pattern, returnExpr),\n        pexp_attributes: attrs,\n      } =>\n      let parameter = Parameter({\n        attrs: attrs,\n        lbl: lbl,\n        defaultExpr: defaultExpr,\n        pat: pattern,\n      })\n      collect(attrsBefore, list{parameter, ...acc}, returnExpr)\n    | expr => (attrsBefore, List.rev(acc), expr)\n    }\n\n  switch expr {\n  | {\n      pexp_desc: Pexp_fun(Nolabel, _defaultExpr, _pattern, _returnExpr),\n      pexp_attributes: attrs,\n    } as expr =>\n    collect(attrs, list{}, {...expr, pexp_attributes: list{}})\n  | expr => collect(list{}, list{}, expr)\n  }\n}\n\nlet processBracesAttr = expr =>\n  switch expr.pexp_attributes {\n  | list{({txt: \"ns.braces\"}, _) as attr, ...attrs} => (\n      Some(attr),\n      {...expr, pexp_attributes: attrs},\n    )\n  | _ => (None, expr)\n  }\n\nlet filterParsingAttrs = attrs => List.filter(attr =>\n    switch attr {\n    | (\n        {\n          Location.txt:\n            \"ns.ternary\" | \"ns.braces\" | \"res.template\" | \"bs\" | \"ns.iflet\" | \"ns.namedArgLoc\",\n        },\n        _,\n      ) => false\n    | _ => true\n    }\n  , attrs)\n\nlet isBlockExpr = expr =>\n  switch expr.pexp_desc {\n  | Pexp_letmodule(_)\n  | Pexp_letexception(_)\n  | Pexp_let(_)\n  | Pexp_open(_)\n  | Pexp_sequence(_) => true\n  | _ => false\n  }\n\nlet isBracedExpr = expr =>\n  switch processBracesAttr(expr) {\n  | (Some(_), _) => true\n  | _ => false\n  }\n\nlet isMultilineText = txt => {\n  let len = String.length(txt)\n  let rec check = i =>\n    if i >= len {\n      false\n    } else {\n      let c = String.unsafe_get(txt, i)\n      switch c {\n      | '\\n' | '\\r' => true\n      | '\\\\' =>\n        if i + 2 == len {\n          false\n        } else {\n          check(i + 2)\n        }\n      | _ => check(i + 1)\n      }\n    }\n\n  check(0)\n}\n\nlet isHuggableExpression = expr =>\n  switch expr.pexp_desc {\n  | Pexp_array(_)\n  | Pexp_tuple(_)\n  | Pexp_constant(Pconst_string(_, Some(_)))\n  | Pexp_construct({txt: Longident.Lident(\"::\" | \"[]\")}, _)\n  | Pexp_extension({txt: \"bs.obj\" | \"obj\"}, _)\n  | Pexp_record(_) => true\n  | _ if isBlockExpr(expr) => true\n  | _ if isBracedExpr(expr) => true\n  | Pexp_constant(Pconst_string(txt, None)) if isMultilineText(txt) => true\n  | _ => false\n  }\n\nlet isHuggableRhs = expr =>\n  switch expr.pexp_desc {\n  | Pexp_array(_)\n  | Pexp_tuple(_)\n  | Pexp_construct({txt: Longident.Lident(\"::\" | \"[]\")}, _)\n  | Pexp_extension({txt: \"bs.obj\" | \"obj\"}, _)\n  | Pexp_record(_) => true\n  | _ if isBracedExpr(expr) => true\n  | _ => false\n  }\n\nlet isHuggablePattern = pattern =>\n  switch pattern.ppat_desc {\n  | Ppat_array(_)\n  | Ppat_tuple(_)\n  | Ppat_record(_)\n  | Ppat_variant(_)\n  | Ppat_construct(_) => true\n  | _ => false\n  }\n\nlet operatorPrecedence = operator =>\n  switch operator {\n  | \":=\" => 1\n  | \"||\" => 2\n  | \"&&\" => 3\n  | \"=\" | \"==\" | \"<\" | \">\" | \"!=\" | \"<>\" | \"!==\" | \"<=\" | \">=\" | \"|>\" => 4\n  | \"+\" | \"+.\" | \"-\" | \"-.\" | \"^\" => 5\n  | \"*\" | \"*.\" | \"/\" | \"/.\" => 6\n  | \"**\" => 7\n  | \"#\" | \"##\" | \"|.\" => 8\n  | _ => 0\n  }\n\nlet isUnaryOperator = operator =>\n  switch operator {\n  | \"~+\" | \"~+.\" | \"~-\" | \"~-.\" | \"not\" => true\n  | _ => false\n  }\n\nlet isUnaryExpression = expr =>\n  switch expr.pexp_desc {\n  | Pexp_apply({pexp_desc: Pexp_ident({txt: Longident.Lident(operator)})}, list{(Nolabel, _arg)})\n    if isUnaryOperator(operator) => true\n  | _ => false\n  }\n\n/* TODO: tweak this to check for ghost ^ as template literal */\nlet isBinaryOperator = operator =>\n  switch operator {\n  | \":=\"\n  | \"||\"\n  | \"&&\"\n  | \"=\"\n  | \"==\"\n  | \"<\"\n  | \">\"\n  | \"!=\"\n  | \"!==\"\n  | \"<=\"\n  | \">=\"\n  | \"|>\"\n  | \"+\"\n  | \"+.\"\n  | \"-\"\n  | \"-.\"\n  | \"^\"\n  | \"*\"\n  | \"*.\"\n  | \"/\"\n  | \"/.\"\n  | \"**\"\n  | \"|.\"\n  | \"<>\" => true\n  | _ => false\n  }\n\nlet isBinaryExpression = expr =>\n  switch expr.pexp_desc {\n  | Pexp_apply(\n      {pexp_desc: Pexp_ident({txt: Longident.Lident(operator), loc: operatorLoc})},\n      list{(Nolabel, _operand1), (Nolabel, _operand2)},\n    )\n    if isBinaryOperator(operator) &&\n    !(operatorLoc.loc_ghost && operator == \"^\") /* template literal */ => true\n  | _ => false\n  }\n\nlet isEqualityOperator = operator =>\n  switch operator {\n  | \"=\" | \"==\" | \"<>\" | \"!=\" => true\n  | _ => false\n  }\n\nlet flattenableOperators = (parentOperator, childOperator) => {\n  let precParent = operatorPrecedence(parentOperator)\n  let precChild = operatorPrecedence(childOperator)\n  if precParent === precChild {\n    !(isEqualityOperator(parentOperator) && isEqualityOperator(childOperator))\n  } else {\n    false\n  }\n}\n\nlet rec hasIfLetAttribute = attrs =>\n  switch attrs {\n  | list{} => false\n  | list{({Location.txt: \"ns.iflet\"}, _), ..._} => true\n  | list{_, ...attrs} => hasIfLetAttribute(attrs)\n  }\n\nlet isIfLetExpr = expr =>\n  switch expr {\n  | {pexp_attributes: attrs, pexp_desc: Pexp_match(_)} if hasIfLetAttribute(attrs) => true\n  | _ => false\n  }\n\nlet hasAttributes = attrs => List.exists(attr =>\n    switch attr {\n    | ({Location.txt: \"bs\" | \"res.template\" | \"ns.ternary\" | \"ns.braces\" | \"ns.iflet\"}, _) => false\n    /* Remove the fragile pattern warning for iflet expressions */\n    | (\n        {Location.txt: \"warning\"},\n        PStr(list{{\n          pstr_desc: Pstr_eval({pexp_desc: Pexp_constant(Pconst_string(\"-4\", None))}, _),\n        }}),\n      ) =>\n      !hasIfLetAttribute(attrs)\n    | _ => true\n    }\n  , attrs)\n\nlet isArrayAccess = expr =>\n  switch expr.pexp_desc {\n  | Pexp_apply(\n      {pexp_desc: Pexp_ident({txt: Longident.Ldot(Lident(\"Array\"), \"get\")})},\n      list{(Nolabel, _parentExpr), (Nolabel, _memberExpr)},\n    ) => true\n  | _ => false\n  }\n\ntype ifConditionKind =\n  | If(Parsetree.expression)\n  | IfLet(Parsetree.pattern, Parsetree.expression)\n\nlet collectIfExpressions = expr => {\n  let rec collect = (acc, expr) =>\n    switch expr.pexp_desc {\n    | Pexp_ifthenelse(ifExpr, thenExpr, Some(elseExpr)) =>\n      collect(list{(If(ifExpr), thenExpr), ...acc}, elseExpr)\n    | Pexp_ifthenelse(ifExpr, thenExpr, None as elseExpr) =>\n      let ifs = List.rev(list{(If(ifExpr), thenExpr), ...acc})\n      (ifs, elseExpr)\n    | Pexp_match(\n        condition,\n        list{\n          {pc_lhs: pattern, pc_guard: None, pc_rhs: thenExpr},\n          {pc_rhs: {pexp_desc: Pexp_construct({txt: Longident.Lident(\"()\")}, _)}},\n        },\n      ) if isIfLetExpr(expr) =>\n      let ifs = List.rev(list{(IfLet(pattern, condition), thenExpr), ...acc})\n      (ifs, None)\n    | Pexp_match(\n        condition,\n        list{{pc_lhs: pattern, pc_guard: None, pc_rhs: thenExpr}, {pc_rhs: elseExpr}},\n      ) if isIfLetExpr(expr) =>\n      collect(list{(IfLet(pattern, condition), thenExpr), ...acc}, elseExpr)\n    | _ => (List.rev(acc), Some(expr))\n    }\n\n  collect(list{}, expr)\n}\n\nlet rec hasTernaryAttribute = attrs =>\n  switch attrs {\n  | list{} => false\n  | list{({Location.txt: \"ns.ternary\"}, _), ..._} => true\n  | list{_, ...attrs} => hasTernaryAttribute(attrs)\n  }\n\nlet isTernaryExpr = expr =>\n  switch expr {\n  | {pexp_attributes: attrs, pexp_desc: Pexp_ifthenelse(_)} if hasTernaryAttribute(attrs) => true\n  | _ => false\n  }\n\nlet collectTernaryParts = expr => {\n  let rec collect = (acc, expr) =>\n    switch expr {\n    | {pexp_attributes: attrs, pexp_desc: Pexp_ifthenelse(condition, consequent, Some(alternate))}\n      if hasTernaryAttribute(attrs) =>\n      collect(list{(condition, consequent), ...acc}, alternate)\n    | alternate => (List.rev(acc), alternate)\n    }\n\n  collect(list{}, expr)\n}\n\nlet parametersShouldHug = parameters =>\n  switch parameters {\n  | list{Parameter({attrs: list{}, lbl: Asttypes.Nolabel, defaultExpr: None, pat})}\n    if isHuggablePattern(pat) => true\n  | _ => false\n  }\n\nlet filterTernaryAttributes = attrs => List.filter(attr =>\n    switch attr {\n    | ({Location.txt: \"ns.ternary\"}, _) => false\n    | _ => true\n    }\n  , attrs)\n\nlet filterFragileMatchAttributes = attrs => List.filter(attr =>\n    switch attr {\n    | (\n        {Location.txt: \"warning\"},\n        PStr(list{{pstr_desc: Pstr_eval({pexp_desc: Pexp_constant(Pconst_string(\"-4\", _))}, _)}}),\n      ) => false\n    | _ => true\n    }\n  , attrs)\n\nlet isJsxExpression = expr => {\n  let rec loop = attrs =>\n    switch attrs {\n    | list{} => false\n    | list{({Location.txt: \"JSX\"}, _), ..._} => true\n    | list{_, ...attrs} => loop(attrs)\n    }\n\n  switch expr.pexp_desc {\n  | Pexp_apply(_) => loop(expr.Parsetree.pexp_attributes)\n  | _ => false\n  }\n}\n\nlet hasJsxAttribute = attributes => {\n  let rec loop = attrs =>\n    switch attrs {\n    | list{} => false\n    | list{({Location.txt: \"JSX\"}, _), ..._} => true\n    | list{_, ...attrs} => loop(attrs)\n    }\n\n  loop(attributes)\n}\n\nlet shouldIndentBinaryExpr = expr => {\n  let samePrecedenceSubExpression = (operator, subExpression) =>\n    switch subExpression {\n    | {\n        pexp_desc: Pexp_apply(\n          {pexp_desc: Pexp_ident({txt: Longident.Lident(subOperator)})},\n          list{(Nolabel, _lhs), (Nolabel, _rhs)},\n        ),\n      } if isBinaryOperator(subOperator) =>\n      flattenableOperators(operator, subOperator)\n    | _ => true\n    }\n\n  switch expr {\n  | {\n      pexp_desc: Pexp_apply(\n        {pexp_desc: Pexp_ident({txt: Longident.Lident(operator)})},\n        list{(Nolabel, lhs), (Nolabel, _rhs)},\n      ),\n    } if isBinaryOperator(operator) =>\n    isEqualityOperator(operator) ||\n    (!samePrecedenceSubExpression(operator, lhs) ||\n    operator == \":=\")\n  | _ => false\n  }\n}\n\nlet shouldInlineRhsBinaryExpr = rhs =>\n  switch rhs.pexp_desc {\n  | Parsetree.Pexp_constant(_)\n  | Pexp_let(_)\n  | Pexp_letmodule(_)\n  | Pexp_letexception(_)\n  | Pexp_sequence(_)\n  | Pexp_open(_)\n  | Pexp_ifthenelse(_)\n  | Pexp_for(_)\n  | Pexp_while(_)\n  | Pexp_try(_)\n  | Pexp_array(_)\n  | Pexp_record(_) => true\n  | _ => false\n  }\n\nlet isPrintableAttribute = attr =>\n  switch attr {\n  | (\n      {Location.txt: \"bs\" | \"res.template\" | \"ns.ternary\" | \"ns.braces\" | \"ns.iflet\" | \"JSX\"},\n      _,\n    ) => false\n  | _ => true\n  }\n\nlet hasPrintableAttributes = attrs => List.exists(isPrintableAttribute, attrs)\n\nlet filterPrintableAttributes = attrs => List.filter(isPrintableAttribute, attrs)\n\nlet partitionPrintableAttributes = attrs => List.partition(isPrintableAttribute, attrs)\n\nlet requiresSpecialCallbackPrintingLastArg = args => {\n  let rec loop = args =>\n    switch args {\n    | list{} => false\n    | list{(_, {pexp_desc: Pexp_fun(_) | Pexp_newtype(_)})} => true\n    | list{(_, {pexp_desc: Pexp_fun(_) | Pexp_newtype(_)}), ..._} => false\n    | list{_, ...rest} => loop(rest)\n    }\n\n  loop(args)\n}\n\nlet requiresSpecialCallbackPrintingFirstArg = args => {\n  let rec loop = args =>\n    switch args {\n    | list{} => true\n    | list{(_, {pexp_desc: Pexp_fun(_) | Pexp_newtype(_)}), ..._} => false\n    | list{_, ...rest} => loop(rest)\n    }\n\n  switch args {\n  | list{(_, {pexp_desc: Pexp_fun(_) | Pexp_newtype(_)})} => false\n  | list{(_, {pexp_desc: Pexp_fun(_) | Pexp_newtype(_)}), ...rest} => loop(rest)\n  | _ => false\n  }\n}\n\nlet modExprApply = modExpr => {\n  let rec loop = (acc, modExpr) =>\n    switch modExpr {\n    | {pmod_desc: Pmod_apply(next, arg)} => loop(list{arg, ...acc}, next)\n    | _ => (acc, modExpr)\n    }\n\n  loop(list{}, modExpr)\n}\n\nlet modExprFunctor = modExpr => {\n  let rec loop = (acc, modExpr) =>\n    switch modExpr {\n    | {pmod_desc: Pmod_functor(lbl, modType, returnModExpr), pmod_attributes: attrs} =>\n      let param = (attrs, lbl, modType)\n      loop(list{param, ...acc}, returnModExpr)\n    | returnModExpr => (List.rev(acc), returnModExpr)\n    }\n\n  loop(list{}, modExpr)\n}\n\nlet rec collectPatternsFromListConstruct = (acc, pattern) => {\n  open Parsetree\n  switch pattern.ppat_desc {\n  | Ppat_construct({txt: Longident.Lident(\"::\")}, Some({ppat_desc: Ppat_tuple(list{pat, rest})})) =>\n    collectPatternsFromListConstruct(list{pat, ...acc}, rest)\n  | _ => (List.rev(acc), pattern)\n  }\n}\n\nlet hasTemplateLiteralAttr = attrs => List.exists(attr =>\n    switch attr {\n    | ({Location.txt: \"res.template\"}, _) => true\n    | _ => false\n    }\n  , attrs)\n\nlet isTemplateLiteral = expr =>\n  switch expr.pexp_desc {\n  | Pexp_apply(\n      {pexp_desc: Pexp_ident({txt: Longident.Lident(\"^\")})},\n      list{(Nolabel, _), (Nolabel, _)},\n    ) if hasTemplateLiteralAttr(expr.pexp_attributes) => true\n  | Pexp_constant(Pconst_string(_, Some(\"\"))) => true\n  | Pexp_constant(_) if hasTemplateLiteralAttr(expr.pexp_attributes) => true\n  | _ => false\n  }\n\n/* Blue | Red | Green -> [Blue; Red; Green] */\nlet collectOrPatternChain = pat => {\n  let rec loop = (pattern, chain) =>\n    switch pattern.ppat_desc {\n    | Ppat_or(left, right) => loop(left, list{right, ...chain})\n    | _ => list{pattern, ...chain}\n    }\n\n  loop(pat, list{})\n}\n\nlet isSinglePipeExpr = expr => {\n  /* handles:\n   *   x\n   *   ->Js.Dict.get(\"wm-property\")\n   *   ->Option.flatMap(Js.Json.decodeString)\n   *   ->Option.flatMap(x =>\n   *     switch x {\n   *     | \"like-of\" => Some(#like)\n   *     | \"repost-of\" => Some(#repost)\n   *     | _ => None\n   *     }\n   *   )\n   */\n  let isPipeExpr = expr =>\n    switch expr.pexp_desc {\n    | Pexp_apply(\n        {pexp_desc: Pexp_ident({txt: Longident.Lident(\"|.\" | \"|>\")})},\n        list{(Nolabel, _operand1), (Nolabel, _operand2)},\n      ) => true\n    | _ => false\n    }\n\n  switch expr.pexp_desc {\n  | Pexp_apply(\n      {pexp_desc: Pexp_ident({txt: Longident.Lident(\"|.\" | \"|>\")})},\n      list{(Nolabel, operand1), (Nolabel, _operand2)},\n    ) if !isPipeExpr(operand1) => true\n  | _ => false\n  }\n}\n\nlet isUnderscoreApplySugar = expr =>\n  switch expr.pexp_desc {\n  | Pexp_fun(Nolabel, None, {ppat_desc: Ppat_var({txt: \"__x\"})}, {pexp_desc: Pexp_apply(_)}) => true\n  | _ => false\n  }\n\nlet isRewrittenUnderscoreApplySugar = expr =>\n  switch expr.pexp_desc {\n  | Pexp_ident({txt: Longident.Lident(\"_\")}) => true\n  | _ => false\n  }\n\n"
  },
  {
    "path": "analysis/examples/larger-project/src/res_printer.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as Char from \"rescript/lib/es6/char.js\";\nimport * as List from \"rescript/lib/es6/list.js\";\nimport * as Bytes from \"rescript/lib/es6/bytes.js\";\nimport * as Curry from \"rescript/lib/es6/curry.js\";\nimport * as $$String from \"rescript/lib/es6/string.js\";\nimport * as Hashtbl from \"rescript/lib/es6/hashtbl.js\";\nimport * as Res_doc from \"./res_doc.js\";\nimport * as $$Location from \"./location.js\";\nimport * as Res_utf8 from \"./res_utf8.js\";\nimport * as Longident from \"./longident.js\";\nimport * as Res_token from \"./res_token.js\";\nimport * as Res_parens from \"./res_parens.js\";\nimport * as Res_comment from \"./res_comment.js\";\nimport * as Caml_js_exceptions from \"rescript/lib/es6/caml_js_exceptions.js\";\nimport * as Res_comments_table from \"./res_comments_table.js\";\nimport * as Res_parsetree_viewer from \"./res_parsetree_viewer.js\";\n\nfunction convertBsExternalAttribute(x) {\n  switch (x) {\n    case \"bs.as\" :\n        return \"as\";\n    case \"bs.deriving\" :\n        return \"deriving\";\n    case \"bs.get\" :\n        return \"get\";\n    case \"bs.get_index\" :\n        return \"get_index\";\n    case \"bs.ignore\" :\n        return \"ignore\";\n    case \"bs.inline\" :\n        return \"inline\";\n    case \"bs.int\" :\n        return \"int\";\n    case \"bs.meth\" :\n        return \"meth\";\n    case \"bs.module\" :\n        return \"module\";\n    case \"bs.new\" :\n        return \"new\";\n    case \"bs.obj\" :\n        return \"obj\";\n    case \"bs.optional\" :\n        return \"optional\";\n    case \"bs.return\" :\n        return \"return\";\n    case \"bs.scope\" :\n        return \"scope\";\n    case \"bs.send\" :\n        return \"send\";\n    case \"bs.set\" :\n        return \"set\";\n    case \"bs.set_index\" :\n        return \"set_index\";\n    case \"bs.string\" :\n        return \"string\";\n    case \"bs.this\" :\n        return \"this\";\n    case \"bs.uncurry\" :\n        return \"uncurry\";\n    case \"bs.unwrap\" :\n        return \"unwrap\";\n    case \"bs.val\" :\n        return \"val\";\n    case \"bs.splice\" :\n    case \"bs.variadic\" :\n        return \"variadic\";\n    default:\n      return x;\n  }\n}\n\nfunction convertBsExtension(x) {\n  switch (x) {\n    case \"bs.debugger\" :\n        return \"debugger\";\n    case \"bs.obj\" :\n        return \"obj\";\n    case \"bs.external\" :\n    case \"bs.raw\" :\n        return \"raw\";\n    case \"bs.re\" :\n        return \"re\";\n    default:\n      return x;\n  }\n}\n\nfunction addParens(doc) {\n  return Res_doc.group(Res_doc.concat({\n                  hd: Res_doc.lparen,\n                  tl: {\n                    hd: Res_doc.indent(Res_doc.concat({\n                              hd: Res_doc.softLine,\n                              tl: {\n                                hd: doc,\n                                tl: /* [] */0\n                              }\n                            })),\n                    tl: {\n                      hd: Res_doc.softLine,\n                      tl: {\n                        hd: Res_doc.rparen,\n                        tl: /* [] */0\n                      }\n                    }\n                  }\n                }));\n}\n\nfunction addBraces(doc) {\n  return Res_doc.group(Res_doc.concat({\n                  hd: Res_doc.lbrace,\n                  tl: {\n                    hd: Res_doc.indent(Res_doc.concat({\n                              hd: Res_doc.softLine,\n                              tl: {\n                                hd: doc,\n                                tl: /* [] */0\n                              }\n                            })),\n                    tl: {\n                      hd: Res_doc.softLine,\n                      tl: {\n                        hd: Res_doc.rbrace,\n                        tl: /* [] */0\n                      }\n                    }\n                  }\n                }));\n}\n\nfunction getFirstLeadingComment(tbl, loc) {\n  var val;\n  try {\n    val = Hashtbl.find(tbl.leading, loc);\n  }\n  catch (raw_exn){\n    var exn = Caml_js_exceptions.internalToOCamlException(raw_exn);\n    if (exn.RE_EXN_ID === \"Not_found\") {\n      return ;\n    }\n    throw exn;\n  }\n  if (val) {\n    return val.hd;\n  }\n  \n}\n\nfunction hasLeadingLineComment(tbl, loc) {\n  var comment = getFirstLeadingComment(tbl, loc);\n  if (comment !== undefined) {\n    return Res_comment.isSingleLineComment(comment);\n  } else {\n    return false;\n  }\n}\n\nfunction hasCommentBelow(tbl, loc) {\n  var val;\n  try {\n    val = Hashtbl.find(tbl.trailing, loc);\n  }\n  catch (raw_exn){\n    var exn = Caml_js_exceptions.internalToOCamlException(raw_exn);\n    if (exn.RE_EXN_ID === \"Not_found\") {\n      return false;\n    }\n    throw exn;\n  }\n  if (!val) {\n    return false;\n  }\n  var commentLoc = Res_comment.loc(val.hd);\n  return commentLoc.loc_start.pos_lnum > loc.loc_end.pos_lnum;\n}\n\nfunction printMultilineCommentContent(txt) {\n  var indentStars = function (_lines, _acc) {\n    while(true) {\n      var acc = _acc;\n      var lines = _lines;\n      if (!lines) {\n        return Res_doc.nil;\n      }\n      var lines$1 = lines.tl;\n      var lastLine = lines.hd;\n      if (lines$1) {\n        var line = $$String.trim(lastLine);\n        if (line !== \"\" && line[0] === \"*\") {\n          var doc = Res_doc.text(\" \" + line);\n          _acc = {\n            hd: Res_doc.hardLine,\n            tl: {\n              hd: doc,\n              tl: acc\n            }\n          };\n          _lines = lines$1;\n          continue ;\n        }\n        var len = txt.length;\n        var trailingSpace = len > 0 && txt[len - 1 | 0] === \" \" ? Res_doc.space : Res_doc.nil;\n        var content = Res_comment.trimSpaces(txt);\n        return Res_doc.concat({\n                    hd: Res_doc.text(content),\n                    tl: {\n                      hd: trailingSpace,\n                      tl: /* [] */0\n                    }\n                  });\n      }\n      var line$1 = $$String.trim(lastLine);\n      var doc$1 = Res_doc.text(\" \" + line$1);\n      var trailingSpace$1 = line$1 === \"\" ? Res_doc.nil : Res_doc.space;\n      return Res_doc.concat(List.rev({\n                      hd: trailingSpace$1,\n                      tl: {\n                        hd: doc$1,\n                        tl: acc\n                      }\n                    }));\n    };\n  };\n  var lines = $$String.split_on_char(/* '\\n' */10, txt);\n  if (!lines) {\n    return Res_doc.text(\"/* */\");\n  }\n  var rest = lines.tl;\n  var line = lines.hd;\n  if (!rest) {\n    return Res_doc.concat({\n                hd: Res_doc.text(\"/* \"),\n                tl: {\n                  hd: Res_doc.text(Res_comment.trimSpaces(line)),\n                  tl: {\n                    hd: Res_doc.text(\" */\"),\n                    tl: /* [] */0\n                  }\n                }\n              });\n  }\n  var firstLine = Res_comment.trimSpaces(line);\n  var tmp;\n  switch (firstLine) {\n    case \"\" :\n    case \"*\" :\n        tmp = Res_doc.nil;\n        break;\n    default:\n      tmp = Res_doc.space;\n  }\n  return Res_doc.concat({\n              hd: Res_doc.text(\"/*\"),\n              tl: {\n                hd: tmp,\n                tl: {\n                  hd: indentStars(rest, {\n                        hd: Res_doc.hardLine,\n                        tl: {\n                          hd: Res_doc.text(firstLine),\n                          tl: /* [] */0\n                        }\n                      }),\n                  tl: {\n                    hd: Res_doc.text(\"*/\"),\n                    tl: /* [] */0\n                  }\n                }\n              }\n            });\n}\n\nfunction printTrailingComment(prevLoc, nodeLoc, comment) {\n  var singleLine = Res_comment.isSingleLineComment(comment);\n  var txt = Res_comment.txt(comment);\n  var content = singleLine ? Res_doc.text(\"//\" + txt) : printMultilineCommentContent(txt);\n  var cmtStart = Res_comment.loc(comment).loc_start;\n  var diff = cmtStart.pos_lnum - prevLoc.loc_end.pos_lnum | 0;\n  var isBelow = Res_comment.loc(comment).loc_start.pos_lnum > nodeLoc.loc_end.pos_lnum;\n  if (diff > 0 || isBelow) {\n    return Res_doc.concat({\n                hd: Res_doc.breakParent,\n                tl: {\n                  hd: Res_doc.lineSuffix(Res_doc.concat({\n                            hd: Res_doc.hardLine,\n                            tl: {\n                              hd: diff > 1 ? Res_doc.hardLine : Res_doc.nil,\n                              tl: {\n                                hd: content,\n                                tl: /* [] */0\n                              }\n                            }\n                          })),\n                  tl: /* [] */0\n                }\n              });\n  } else if (singleLine) {\n    return Res_doc.lineSuffix(Res_doc.concat({\n                    hd: Res_doc.space,\n                    tl: {\n                      hd: content,\n                      tl: /* [] */0\n                    }\n                  }));\n  } else {\n    return Res_doc.concat({\n                hd: Res_doc.space,\n                tl: {\n                  hd: content,\n                  tl: /* [] */0\n                }\n              });\n  }\n}\n\nfunction printLeadingComment(nextComment, comment) {\n  var singleLine = Res_comment.isSingleLineComment(comment);\n  var txt = Res_comment.txt(comment);\n  var content = singleLine ? Res_doc.text(\"//\" + txt) : printMultilineCommentContent(txt);\n  var tmp;\n  if (nextComment !== undefined) {\n    var nextLoc = Res_comment.loc(nextComment);\n    var currLoc = Res_comment.loc(comment);\n    var diff = nextLoc.loc_start.pos_lnum - currLoc.loc_end.pos_lnum | 0;\n    var nextSingleLine = Res_comment.isSingleLineComment(nextComment);\n    tmp = singleLine && nextSingleLine ? (\n        diff > 1 ? Res_doc.hardLine : Res_doc.nil\n      ) : (\n        singleLine && !nextSingleLine ? (\n            diff > 1 ? Res_doc.hardLine : Res_doc.nil\n          ) : (\n            diff > 1 ? Res_doc.concat({\n                    hd: Res_doc.hardLine,\n                    tl: {\n                      hd: Res_doc.hardLine,\n                      tl: /* [] */0\n                    }\n                  }) : (\n                diff === 1 ? Res_doc.hardLine : Res_doc.space\n              )\n          )\n      );\n  } else {\n    tmp = Res_doc.nil;\n  }\n  var separator = Res_doc.concat({\n        hd: singleLine ? Res_doc.concat({\n                hd: Res_doc.hardLine,\n                tl: {\n                  hd: Res_doc.breakParent,\n                  tl: /* [] */0\n                }\n              }) : Res_doc.nil,\n        tl: {\n          hd: tmp,\n          tl: /* [] */0\n        }\n      });\n  return Res_doc.concat({\n              hd: content,\n              tl: {\n                hd: separator,\n                tl: /* [] */0\n              }\n            });\n}\n\nfunction printCommentsInside(cmtTbl, loc) {\n  var loop = function (_acc, _comments) {\n    while(true) {\n      var comments = _comments;\n      var acc = _acc;\n      if (!comments) {\n        return Res_doc.nil;\n      }\n      var rest = comments.tl;\n      var comment = comments.hd;\n      if (rest) {\n        var cmtDoc = printLeadingComment(rest.hd, comment);\n        _comments = rest;\n        _acc = {\n          hd: cmtDoc,\n          tl: acc\n        };\n        continue ;\n      }\n      var cmtDoc$1 = printLeadingComment(undefined, comment);\n      return Res_doc.group(Res_doc.concat({\n                      hd: Res_doc.concat(List.rev({\n                                hd: cmtDoc$1,\n                                tl: acc\n                              })),\n                      tl: /* [] */0\n                    }));\n    };\n  };\n  var comments;\n  try {\n    comments = Hashtbl.find(cmtTbl.inside, loc);\n  }\n  catch (raw_exn){\n    var exn = Caml_js_exceptions.internalToOCamlException(raw_exn);\n    if (exn.RE_EXN_ID === \"Not_found\") {\n      return Res_doc.nil;\n    }\n    throw exn;\n  }\n  Hashtbl.remove(cmtTbl.inside, loc);\n  return Res_doc.group(loop(/* [] */0, comments));\n}\n\nfunction printLeadingComments(node, tbl, loc) {\n  var comments;\n  try {\n    comments = Hashtbl.find(tbl, loc);\n  }\n  catch (raw_exn){\n    var exn = Caml_js_exceptions.internalToOCamlException(raw_exn);\n    if (exn.RE_EXN_ID === \"Not_found\") {\n      return node;\n    }\n    throw exn;\n  }\n  Hashtbl.remove(tbl, loc);\n  var _acc = /* [] */0;\n  var _comments = comments;\n  while(true) {\n    var comments$1 = _comments;\n    var acc = _acc;\n    if (!comments$1) {\n      return node;\n    }\n    var rest = comments$1.tl;\n    var comment = comments$1.hd;\n    if (rest) {\n      var cmtDoc = printLeadingComment(rest.hd, comment);\n      _comments = rest;\n      _acc = {\n        hd: cmtDoc,\n        tl: acc\n      };\n      continue ;\n    }\n    var cmtDoc$1 = printLeadingComment(undefined, comment);\n    var diff = loc.loc_start.pos_lnum - Res_comment.loc(comment).loc_end.pos_lnum | 0;\n    var separator = Res_comment.isSingleLineComment(comment) ? (\n        diff > 1 ? Res_doc.hardLine : Res_doc.nil\n      ) : (\n        diff === 0 ? Res_doc.space : (\n            diff > 1 ? Res_doc.concat({\n                    hd: Res_doc.hardLine,\n                    tl: {\n                      hd: Res_doc.hardLine,\n                      tl: /* [] */0\n                    }\n                  }) : Res_doc.hardLine\n          )\n      );\n    return Res_doc.group(Res_doc.concat({\n                    hd: Res_doc.concat(List.rev({\n                              hd: cmtDoc$1,\n                              tl: acc\n                            })),\n                    tl: {\n                      hd: separator,\n                      tl: {\n                        hd: node,\n                        tl: /* [] */0\n                      }\n                    }\n                  }));\n  };\n}\n\nfunction printTrailingComments(node, tbl, loc) {\n  var loop = function (_prev, _acc, _comments) {\n    while(true) {\n      var comments = _comments;\n      var acc = _acc;\n      var prev = _prev;\n      if (!comments) {\n        return Res_doc.concat(List.rev(acc));\n      }\n      var comment = comments.hd;\n      var cmtDoc = printTrailingComment(prev, loc, comment);\n      _comments = comments.tl;\n      _acc = {\n        hd: cmtDoc,\n        tl: acc\n      };\n      _prev = Res_comment.loc(comment);\n      continue ;\n    };\n  };\n  var comments;\n  try {\n    comments = Hashtbl.find(tbl, loc);\n  }\n  catch (raw_exn){\n    var exn = Caml_js_exceptions.internalToOCamlException(raw_exn);\n    if (exn.RE_EXN_ID === \"Not_found\") {\n      return node;\n    }\n    throw exn;\n  }\n  if (!comments) {\n    return node;\n  }\n  Hashtbl.remove(tbl, loc);\n  var cmtsDoc = loop(loc, /* [] */0, comments);\n  return Res_doc.concat({\n              hd: node,\n              tl: {\n                hd: cmtsDoc,\n                tl: /* [] */0\n              }\n            });\n}\n\nfunction printComments(doc, tbl, loc) {\n  var docWithLeadingComments = printLeadingComments(doc, tbl.leading, loc);\n  return printTrailingComments(docWithLeadingComments, tbl.trailing, loc);\n}\n\nfunction printList(getLoc, nodes, print, forceBreakOpt, t) {\n  var forceBreak = forceBreakOpt !== undefined ? forceBreakOpt : false;\n  var loop = function (_prevLoc, _acc, _nodes) {\n    while(true) {\n      var nodes = _nodes;\n      var acc = _acc;\n      var prevLoc = _prevLoc;\n      if (!nodes) {\n        return [\n                prevLoc,\n                Res_doc.concat(List.rev(acc))\n              ];\n      }\n      var node = nodes.hd;\n      var loc = Curry._1(getLoc, node);\n      var comment = getFirstLeadingComment(t, loc);\n      var startPos = comment !== undefined ? Res_comment.loc(comment).loc_start : loc.loc_start;\n      var sep = (startPos.pos_lnum - prevLoc.loc_end.pos_lnum | 0) > 1 ? Res_doc.concat({\n              hd: Res_doc.hardLine,\n              tl: {\n                hd: Res_doc.hardLine,\n                tl: /* [] */0\n              }\n            }) : Res_doc.hardLine;\n      var doc = printComments(Curry._2(print, node, t), t, loc);\n      _nodes = nodes.tl;\n      _acc = {\n        hd: doc,\n        tl: {\n          hd: sep,\n          tl: acc\n        }\n      };\n      _prevLoc = loc;\n      continue ;\n    };\n  };\n  if (!nodes) {\n    return Res_doc.nil;\n  }\n  var node = nodes.hd;\n  var firstLoc = Curry._1(getLoc, node);\n  var doc = printComments(Curry._2(print, node, t), t, firstLoc);\n  var match = loop(firstLoc, {\n        hd: doc,\n        tl: /* [] */0\n      }, nodes.tl);\n  var forceBreak$1 = forceBreak || firstLoc.loc_start.pos_lnum !== match[0].loc_end.pos_lnum;\n  return Res_doc.breakableGroup(forceBreak$1, match[1]);\n}\n\nfunction printListi(getLoc, nodes, print, forceBreakOpt, t) {\n  var forceBreak = forceBreakOpt !== undefined ? forceBreakOpt : false;\n  var loop = function (_i, _prevLoc, _acc, _nodes) {\n    while(true) {\n      var nodes = _nodes;\n      var acc = _acc;\n      var prevLoc = _prevLoc;\n      var i = _i;\n      if (!nodes) {\n        return [\n                prevLoc,\n                Res_doc.concat(List.rev(acc))\n              ];\n      }\n      var node = nodes.hd;\n      var loc = Curry._1(getLoc, node);\n      var comment = getFirstLeadingComment(t, loc);\n      var startPos = comment !== undefined ? Res_comment.loc(comment).loc_start : loc.loc_start;\n      var sep = (startPos.pos_lnum - prevLoc.loc_end.pos_lnum | 0) > 1 ? Res_doc.concat({\n              hd: Res_doc.hardLine,\n              tl: {\n                hd: Res_doc.hardLine,\n                tl: /* [] */0\n              }\n            }) : Res_doc.line;\n      var doc = printComments(Curry._3(print, node, t, i), t, loc);\n      _nodes = nodes.tl;\n      _acc = {\n        hd: doc,\n        tl: {\n          hd: sep,\n          tl: acc\n        }\n      };\n      _prevLoc = loc;\n      _i = i + 1 | 0;\n      continue ;\n    };\n  };\n  if (!nodes) {\n    return Res_doc.nil;\n  }\n  var node = nodes.hd;\n  var firstLoc = Curry._1(getLoc, node);\n  var doc = printComments(Curry._3(print, node, t, 0), t, firstLoc);\n  var match = loop(1, firstLoc, {\n        hd: doc,\n        tl: /* [] */0\n      }, nodes.tl);\n  var forceBreak$1 = forceBreak || firstLoc.loc_start.pos_lnum !== match[0].loc_end.pos_lnum;\n  return Res_doc.breakableGroup(forceBreak$1, match[1]);\n}\n\nfunction printLongidentAux(_accu, _x) {\n  while(true) {\n    var x = _x;\n    var accu = _accu;\n    switch (x.TAG | 0) {\n      case /* Lident */0 :\n          return {\n                  hd: Res_doc.text(x._0),\n                  tl: accu\n                };\n      case /* Ldot */1 :\n          _x = x._0;\n          _accu = {\n            hd: Res_doc.text(x._1),\n            tl: accu\n          };\n          continue ;\n      case /* Lapply */2 :\n          var d1 = Res_doc.join(Res_doc.dot, printLongidentAux(/* [] */0, x._0));\n          var d2 = Res_doc.join(Res_doc.dot, printLongidentAux(/* [] */0, x._1));\n          return {\n                  hd: Res_doc.concat({\n                        hd: d1,\n                        tl: {\n                          hd: Res_doc.lparen,\n                          tl: {\n                            hd: d2,\n                            tl: {\n                              hd: Res_doc.rparen,\n                              tl: /* [] */0\n                            }\n                          }\n                        }\n                      }),\n                  tl: accu\n                };\n      \n    }\n  };\n}\n\nfunction printLongident(x) {\n  switch (x.TAG | 0) {\n    case /* Lident */0 :\n        return Res_doc.text(x._0);\n    case /* Ldot */1 :\n    case /* Lapply */2 :\n        return Res_doc.join(Res_doc.dot, printLongidentAux(/* [] */0, x));\n    \n  }\n}\n\nfunction classifyIdentContent(allowUidentOpt, txt) {\n  var allowUident = allowUidentOpt !== undefined ? allowUidentOpt : false;\n  if (Res_token.isKeywordTxt(txt)) {\n    return /* ExoticIdent */0;\n  }\n  var len = txt.length;\n  var _i = 0;\n  while(true) {\n    var i = _i;\n    if (i === len) {\n      return /* NormalIdent */1;\n    }\n    if (i === 0) {\n      var match = txt.charCodeAt(i);\n      if (match > 122 || match < 95) {\n        if (match > 90 || match < 65) {\n          return /* ExoticIdent */0;\n        }\n        if (!allowUident) {\n          return /* ExoticIdent */0;\n        }\n        _i = i + 1 | 0;\n        continue ;\n      }\n      if (match === 96) {\n        return /* ExoticIdent */0;\n      }\n      _i = i + 1 | 0;\n      continue ;\n    }\n    var match$1 = txt.charCodeAt(i);\n    if (match$1 >= 65) {\n      if (match$1 > 96 || match$1 < 91) {\n        if (match$1 >= 123) {\n          return /* ExoticIdent */0;\n        }\n        _i = i + 1 | 0;\n        continue ;\n      }\n      if (match$1 !== 95) {\n        return /* ExoticIdent */0;\n      }\n      _i = i + 1 | 0;\n      continue ;\n    }\n    if (match$1 >= 48) {\n      if (match$1 >= 58) {\n        return /* ExoticIdent */0;\n      }\n      _i = i + 1 | 0;\n      continue ;\n    }\n    if (match$1 !== 39) {\n      return /* ExoticIdent */0;\n    }\n    _i = i + 1 | 0;\n    continue ;\n  };\n}\n\nfunction printIdentLike(allowUident, txt) {\n  var match = classifyIdentContent(allowUident, txt);\n  if (match) {\n    return Res_doc.text(txt);\n  } else {\n    return Res_doc.concat({\n                hd: Res_doc.text(\"\\\\\\\"\"),\n                tl: {\n                  hd: Res_doc.text(txt),\n                  tl: {\n                    hd: Res_doc.text(\"\\\"\"),\n                    tl: /* [] */0\n                  }\n                }\n              });\n  }\n}\n\nfunction unsafe_for_all_range(s, _start, finish, p) {\n  while(true) {\n    var start = _start;\n    if (start > finish) {\n      return true;\n    }\n    if (!Curry._1(p, s.charCodeAt(start))) {\n      return false;\n    }\n    _start = start + 1 | 0;\n    continue ;\n  };\n}\n\nfunction for_all_from(s, start, p) {\n  var len = s.length;\n  return unsafe_for_all_range(s, start, len - 1 | 0, p);\n}\n\nfunction isValidNumericPolyvarNumber(x) {\n  var len = x.length;\n  if (len <= 0) {\n    return false;\n  }\n  var a = x.charCodeAt(0);\n  if (a <= 57) {\n    if (len > 1) {\n      if (a > 48) {\n        return for_all_from(x, 1, (function (x) {\n                      return !(x > 57 || x < 48);\n                    }));\n      } else {\n        return false;\n      }\n    } else {\n      return a >= 48;\n    }\n  } else {\n    return false;\n  }\n}\n\nfunction printPolyVarIdent(txt) {\n  if (isValidNumericPolyvarNumber(txt)) {\n    return Res_doc.text(txt);\n  }\n  var match = classifyIdentContent(true, txt);\n  if (match && txt !== \"\") {\n    return Res_doc.text(txt);\n  } else {\n    return Res_doc.concat({\n                hd: Res_doc.text(\"\\\"\"),\n                tl: {\n                  hd: Res_doc.text(txt),\n                  tl: {\n                    hd: Res_doc.text(\"\\\"\"),\n                    tl: /* [] */0\n                  }\n                }\n              });\n  }\n}\n\nfunction printLident(l) {\n  var flatLidOpt = function (lid) {\n    var _accu = /* [] */0;\n    var _x = lid;\n    while(true) {\n      var x = _x;\n      var accu = _accu;\n      switch (x.TAG | 0) {\n        case /* Lident */0 :\n            return {\n                    hd: x._0,\n                    tl: accu\n                  };\n        case /* Ldot */1 :\n            _x = x._0;\n            _accu = {\n              hd: x._1,\n              tl: accu\n            };\n            continue ;\n        case /* Lapply */2 :\n            return ;\n        \n      }\n    };\n  };\n  switch (l.TAG | 0) {\n    case /* Lident */0 :\n        return printIdentLike(undefined, l._0);\n    case /* Ldot */1 :\n        var txts = flatLidOpt(l._0);\n        if (txts !== undefined) {\n          return Res_doc.concat({\n                      hd: Res_doc.join(Res_doc.dot, List.map(Res_doc.text, txts)),\n                      tl: {\n                        hd: Res_doc.dot,\n                        tl: {\n                          hd: printIdentLike(undefined, l._1),\n                          tl: /* [] */0\n                        }\n                      }\n                    });\n        } else {\n          return Res_doc.text(\"printLident: Longident.Lapply is not supported\");\n        }\n    case /* Lapply */2 :\n        return Res_doc.text(\"printLident: Longident.Lapply is not supported\");\n    \n  }\n}\n\nfunction printLongidentLocation(l, cmtTbl) {\n  var doc = printLongident(l.txt);\n  return printComments(doc, cmtTbl, l.loc);\n}\n\nfunction printLidentPath(path, cmtTbl) {\n  var doc = printLident(path.txt);\n  return printComments(doc, cmtTbl, path.loc);\n}\n\nfunction printIdentPath(path, cmtTbl) {\n  var doc = printLident(path.txt);\n  return printComments(doc, cmtTbl, path.loc);\n}\n\nfunction printStringLoc(sloc, cmtTbl) {\n  var doc = printIdentLike(undefined, sloc.txt);\n  return printComments(doc, cmtTbl, sloc.loc);\n}\n\nfunction printStringContents(txt) {\n  var lines = $$String.split_on_char(/* '\\n' */10, txt);\n  return Res_doc.join(Res_doc.literalLine, List.map(Res_doc.text, lines));\n}\n\nfunction printConstant(templateLiteralOpt, c) {\n  var templateLiteral = templateLiteralOpt !== undefined ? templateLiteralOpt : false;\n  switch (c.TAG | 0) {\n    case /* Pconst_integer */0 :\n        var suffix = c._1;\n        var s = c._0;\n        if (suffix !== undefined) {\n          return Res_doc.text(s + Char.escaped(suffix));\n        } else {\n          return Res_doc.text(s);\n        }\n    case /* Pconst_char */1 :\n        var c$1 = c._0;\n        var str;\n        var exit = 0;\n        if (c$1 >= 40) {\n          if (c$1 !== 92) {\n            if (c$1 >= 127) {\n              str = Res_utf8.encodeCodePoint(c$1);\n            } else {\n              exit = 1;\n            }\n          } else {\n            str = \"\\\\\\\\\";\n          }\n        } else if (c$1 >= 32) {\n          if (c$1 >= 39) {\n            str = \"\\\\'\";\n          } else {\n            exit = 1;\n          }\n        } else if (c$1 >= 14) {\n          str = Res_utf8.encodeCodePoint(c$1);\n        } else {\n          switch (c$1) {\n            case 8 :\n                str = \"\\\\b\";\n                break;\n            case 9 :\n                str = \"\\\\t\";\n                break;\n            case 10 :\n                str = \"\\\\n\";\n                break;\n            case 0 :\n            case 1 :\n            case 2 :\n            case 3 :\n            case 4 :\n            case 5 :\n            case 6 :\n            case 7 :\n            case 11 :\n            case 12 :\n                str = Res_utf8.encodeCodePoint(c$1);\n                break;\n            case 13 :\n                str = \"\\\\r\";\n                break;\n            \n          }\n        }\n        if (exit === 1) {\n          var s$1 = [0];\n          s$1[0] = c$1;\n          str = Bytes.unsafe_to_string(s$1);\n        }\n        return Res_doc.text(\"'\" + (str + \"'\"));\n    case /* Pconst_string */2 :\n        var prefix = c._1;\n        var txt = c._0;\n        if (prefix === undefined) {\n          return Res_doc.concat({\n                      hd: Res_doc.text(\"\\\"\"),\n                      tl: {\n                        hd: printStringContents(txt),\n                        tl: {\n                          hd: Res_doc.text(\"\\\"\"),\n                          tl: /* [] */0\n                        }\n                      }\n                    });\n        }\n        if (prefix === \"INTERNAL_RES_CHAR_CONTENTS\") {\n          return Res_doc.concat({\n                      hd: Res_doc.text(\"'\"),\n                      tl: {\n                        hd: Res_doc.text(txt),\n                        tl: {\n                          hd: Res_doc.text(\"'\"),\n                          tl: /* [] */0\n                        }\n                      }\n                    });\n        }\n        var match = templateLiteral ? [\n            \"`\",\n            \"`\"\n          ] : [\n            \"\\\"\",\n            \"\\\"\"\n          ];\n        return Res_doc.concat({\n                    hd: prefix === \"js\" ? Res_doc.nil : Res_doc.text(prefix),\n                    tl: {\n                      hd: Res_doc.text(match[0]),\n                      tl: {\n                        hd: printStringContents(txt),\n                        tl: {\n                          hd: Res_doc.text(match[1]),\n                          tl: /* [] */0\n                        }\n                      }\n                    }\n                  });\n    case /* Pconst_float */3 :\n        return Res_doc.text(c._0);\n    \n  }\n}\n\nfunction printStructure(s, t) {\n  if (s) {\n    return printList((function (s) {\n                  return s.pstr_loc;\n                }), s, printStructureItem, undefined, t);\n  } else {\n    return printCommentsInside(t, $$Location.none);\n  }\n}\n\nfunction printStructureItem(si, cmtTbl) {\n  var valueDescription = si.pstr_desc;\n  switch (valueDescription.TAG | 0) {\n    case /* Pstr_eval */0 :\n        var expr = valueDescription._0;\n        var doc = printExpressionWithComments(expr, cmtTbl);\n        var braces = Res_parens.structureExpr(expr);\n        var exprDoc = typeof braces === \"number\" ? (\n            braces !== 0 ? doc : addParens(doc)\n          ) : printBraces(doc, expr, braces._0);\n        return Res_doc.concat({\n                    hd: printAttributes(undefined, undefined, valueDescription._1, cmtTbl),\n                    tl: {\n                      hd: exprDoc,\n                      tl: /* [] */0\n                    }\n                  });\n    case /* Pstr_value */1 :\n        var recFlag = valueDescription._0 ? Res_doc.text(\"rec \") : Res_doc.nil;\n        return printValueBindings(recFlag, valueDescription._1, cmtTbl);\n    case /* Pstr_primitive */2 :\n        return printValueDescription(valueDescription._0, cmtTbl);\n    case /* Pstr_type */3 :\n        var recFlag$1 = valueDescription._0 ? Res_doc.text(\"rec \") : Res_doc.nil;\n        return printTypeDeclarations(recFlag$1, valueDescription._1, cmtTbl);\n    case /* Pstr_typext */4 :\n        return printTypeExtension(valueDescription._0, cmtTbl);\n    case /* Pstr_exception */5 :\n        return printExceptionDef(valueDescription._0, cmtTbl);\n    case /* Pstr_module */6 :\n        return printModuleBinding(false, valueDescription._0, cmtTbl, 0);\n    case /* Pstr_recmodule */7 :\n        return printListi((function (mb) {\n                      return mb.pmb_loc;\n                    }), valueDescription._0, (function (param, param$1, param$2) {\n                      return printModuleBinding(true, param, param$1, param$2);\n                    }), undefined, cmtTbl);\n    case /* Pstr_modtype */8 :\n        return printModuleTypeDeclaration(valueDescription._0, cmtTbl);\n    case /* Pstr_open */9 :\n        return printOpenDescription(valueDescription._0, cmtTbl);\n    case /* Pstr_class */10 :\n    case /* Pstr_class_type */11 :\n        return Res_doc.nil;\n    case /* Pstr_include */12 :\n        return printIncludeDeclaration(valueDescription._0, cmtTbl);\n    case /* Pstr_attribute */13 :\n        return Res_doc.concat({\n                    hd: Res_doc.text(\"@\"),\n                    tl: {\n                      hd: printAttribute(valueDescription._0, cmtTbl),\n                      tl: /* [] */0\n                    }\n                  });\n    case /* Pstr_extension */14 :\n        return Res_doc.concat({\n                    hd: printAttributes(undefined, undefined, valueDescription._1, cmtTbl),\n                    tl: {\n                      hd: Res_doc.concat({\n                            hd: printExtension(true, valueDescription._0, cmtTbl),\n                            tl: /* [] */0\n                          }),\n                      tl: /* [] */0\n                    }\n                  });\n    \n  }\n}\n\nfunction printTypeExtension(te, cmtTbl) {\n  var prefix = Res_doc.text(\"type \");\n  var name = printLidentPath(te.ptyext_path, cmtTbl);\n  var typeParams = printTypeParams(te.ptyext_params, cmtTbl);\n  var ecs = te.ptyext_constructors;\n  var match = List.rev(ecs);\n  var forceBreak;\n  if (ecs && match) {\n    var first = ecs.hd;\n    forceBreak = first.pext_loc.loc_start.pos_lnum > te.ptyext_path.loc.loc_end.pos_lnum || first.pext_loc.loc_start.pos_lnum < match.hd.pext_loc.loc_end.pos_lnum;\n  } else {\n    forceBreak = false;\n  }\n  var match$1 = te.ptyext_private;\n  var privateFlag = match$1 ? Res_doc.nil : Res_doc.concat({\n          hd: Res_doc.text(\"private\"),\n          tl: {\n            hd: Res_doc.line,\n            tl: /* [] */0\n          }\n        });\n  var rows = printListi((function (n) {\n          return n.pext_loc;\n        }), ecs, printExtensionConstructor, forceBreak, cmtTbl);\n  var extensionConstructors = Res_doc.breakableGroup(forceBreak, Res_doc.indent(Res_doc.concat({\n                hd: Res_doc.line,\n                tl: {\n                  hd: privateFlag,\n                  tl: {\n                    hd: rows,\n                    tl: /* [] */0\n                  }\n                }\n              })));\n  return Res_doc.group(Res_doc.concat({\n                  hd: printAttributes(te.ptyext_path.loc, undefined, te.ptyext_attributes, cmtTbl),\n                  tl: {\n                    hd: prefix,\n                    tl: {\n                      hd: name,\n                      tl: {\n                        hd: typeParams,\n                        tl: {\n                          hd: Res_doc.text(\" +=\"),\n                          tl: {\n                            hd: extensionConstructors,\n                            tl: /* [] */0\n                          }\n                        }\n                      }\n                    }\n                  }\n                }));\n}\n\nfunction printModuleBinding(isRec, moduleBinding, cmtTbl, i) {\n  var prefix = i === 0 ? Res_doc.concat({\n          hd: Res_doc.text(\"module \"),\n          tl: {\n            hd: isRec ? Res_doc.text(\"rec \") : Res_doc.nil,\n            tl: /* [] */0\n          }\n        }) : Res_doc.text(\"and \");\n  var modExpr = moduleBinding.pmb_expr;\n  var match = modExpr.pmod_desc;\n  var match$1;\n  match$1 = match.TAG === /* Pmod_constraint */4 ? [\n      printModExpr(match._0, cmtTbl),\n      Res_doc.concat({\n            hd: Res_doc.text(\": \"),\n            tl: {\n              hd: printModType(match._1, cmtTbl),\n              tl: /* [] */0\n            }\n          })\n    ] : [\n      printModExpr(modExpr, cmtTbl),\n      Res_doc.nil\n    ];\n  var doc = Res_doc.text(moduleBinding.pmb_name.txt);\n  var modName = printComments(doc, cmtTbl, moduleBinding.pmb_name.loc);\n  var doc$1 = Res_doc.concat({\n        hd: printAttributes(moduleBinding.pmb_name.loc, undefined, moduleBinding.pmb_attributes, cmtTbl),\n        tl: {\n          hd: prefix,\n          tl: {\n            hd: modName,\n            tl: {\n              hd: match$1[1],\n              tl: {\n                hd: Res_doc.text(\" = \"),\n                tl: {\n                  hd: match$1[0],\n                  tl: /* [] */0\n                }\n              }\n            }\n          }\n        }\n      });\n  return printComments(doc$1, cmtTbl, moduleBinding.pmb_loc);\n}\n\nfunction printModuleTypeDeclaration(modTypeDecl, cmtTbl) {\n  var doc = Res_doc.text(modTypeDecl.pmtd_name.txt);\n  var modName = printComments(doc, cmtTbl, modTypeDecl.pmtd_name.loc);\n  var modType = modTypeDecl.pmtd_type;\n  return Res_doc.concat({\n              hd: printAttributes(undefined, undefined, modTypeDecl.pmtd_attributes, cmtTbl),\n              tl: {\n                hd: Res_doc.text(\"module type \"),\n                tl: {\n                  hd: modName,\n                  tl: {\n                    hd: modType !== undefined ? Res_doc.concat({\n                            hd: Res_doc.text(\" = \"),\n                            tl: {\n                              hd: printModType(modType, cmtTbl),\n                              tl: /* [] */0\n                            }\n                          }) : Res_doc.nil,\n                    tl: /* [] */0\n                  }\n                }\n              }\n            });\n}\n\nfunction printModType(modType, cmtTbl) {\n  var longident = modType.pmty_desc;\n  var modTypeDoc;\n  switch (longident.TAG | 0) {\n    case /* Pmty_ident */0 :\n        var longident$1 = longident._0;\n        modTypeDoc = Res_doc.concat({\n              hd: printAttributes(longident$1.loc, undefined, modType.pmty_attributes, cmtTbl),\n              tl: {\n                hd: printLongidentLocation(longident$1, cmtTbl),\n                tl: /* [] */0\n              }\n            });\n        break;\n    case /* Pmty_signature */1 :\n        var signature = longident._0;\n        if (signature) {\n          var signatureDoc = Res_doc.breakableGroup(true, Res_doc.concat({\n                    hd: Res_doc.lbrace,\n                    tl: {\n                      hd: Res_doc.indent(Res_doc.concat({\n                                hd: Res_doc.line,\n                                tl: {\n                                  hd: printSignature(signature, cmtTbl),\n                                  tl: /* [] */0\n                                }\n                              })),\n                      tl: {\n                        hd: Res_doc.line,\n                        tl: {\n                          hd: Res_doc.rbrace,\n                          tl: /* [] */0\n                        }\n                      }\n                    }\n                  }));\n          modTypeDoc = Res_doc.concat({\n                hd: printAttributes(undefined, undefined, modType.pmty_attributes, cmtTbl),\n                tl: {\n                  hd: signatureDoc,\n                  tl: /* [] */0\n                }\n              });\n        } else {\n          var shouldBreak = modType.pmty_loc.loc_start.pos_lnum < modType.pmty_loc.loc_end.pos_lnum;\n          modTypeDoc = Res_doc.breakableGroup(shouldBreak, Res_doc.concat({\n                    hd: Res_doc.lbrace,\n                    tl: {\n                      hd: Res_doc.indent(Res_doc.concat({\n                                hd: Res_doc.softLine,\n                                tl: {\n                                  hd: printCommentsInside(cmtTbl, modType.pmty_loc),\n                                  tl: /* [] */0\n                                }\n                              })),\n                      tl: {\n                        hd: Res_doc.softLine,\n                        tl: {\n                          hd: Res_doc.rbrace,\n                          tl: /* [] */0\n                        }\n                      }\n                    }\n                  }));\n        }\n        break;\n    case /* Pmty_functor */2 :\n        var match = Res_parsetree_viewer.functorType(modType);\n        var returnType = match[1];\n        var parameters = match[0];\n        var parametersDoc;\n        var exit = 0;\n        if (parameters) {\n          var match$1 = parameters.hd;\n          var match$2 = match$1[1];\n          if (match$2.txt === \"_\") {\n            var modType$1 = match$1[2];\n            if (modType$1 !== undefined && !parameters.tl) {\n              var loc = match$2.loc;\n              var cmtLoc_loc_start = loc.loc_start;\n              var cmtLoc_loc_end = modType$1.pmty_loc.loc_end;\n              var cmtLoc_loc_ghost = loc.loc_ghost;\n              var cmtLoc = {\n                loc_start: cmtLoc_loc_start,\n                loc_end: cmtLoc_loc_end,\n                loc_ghost: cmtLoc_loc_ghost\n              };\n              var attrs = printAttributes(undefined, undefined, match$1[0], cmtTbl);\n              var doc = Res_doc.concat({\n                    hd: attrs,\n                    tl: {\n                      hd: printModType(modType$1, cmtTbl),\n                      tl: /* [] */0\n                    }\n                  });\n              parametersDoc = printComments(doc, cmtTbl, cmtLoc);\n            } else {\n              exit = 1;\n            }\n          } else {\n            exit = 1;\n          }\n        } else {\n          parametersDoc = Res_doc.nil;\n        }\n        if (exit === 1) {\n          parametersDoc = Res_doc.group(Res_doc.concat({\n                    hd: Res_doc.lparen,\n                    tl: {\n                      hd: Res_doc.indent(Res_doc.concat({\n                                hd: Res_doc.softLine,\n                                tl: {\n                                  hd: Res_doc.join(Res_doc.concat({\n                                            hd: Res_doc.comma,\n                                            tl: {\n                                              hd: Res_doc.line,\n                                              tl: /* [] */0\n                                            }\n                                          }), List.map((function (param) {\n                                              var modType = param[2];\n                                              var lbl = param[1];\n                                              var cmtLoc;\n                                              if (modType !== undefined) {\n                                                var init = lbl.loc;\n                                                cmtLoc = {\n                                                  loc_start: init.loc_start,\n                                                  loc_end: modType.pmty_loc.loc_end,\n                                                  loc_ghost: init.loc_ghost\n                                                };\n                                              } else {\n                                                cmtLoc = lbl.loc;\n                                              }\n                                              var attrs = printAttributes(undefined, undefined, param[0], cmtTbl);\n                                              var lblDoc;\n                                              if (lbl.txt === \"_\" || lbl.txt === \"*\") {\n                                                lblDoc = Res_doc.nil;\n                                              } else {\n                                                var doc = Res_doc.text(lbl.txt);\n                                                lblDoc = printComments(doc, cmtTbl, lbl.loc);\n                                              }\n                                              var doc$1 = Res_doc.concat({\n                                                    hd: attrs,\n                                                    tl: {\n                                                      hd: lblDoc,\n                                                      tl: {\n                                                        hd: modType !== undefined ? Res_doc.concat({\n                                                                hd: lbl.txt === \"_\" ? Res_doc.nil : Res_doc.text(\": \"),\n                                                                tl: {\n                                                                  hd: printModType(modType, cmtTbl),\n                                                                  tl: /* [] */0\n                                                                }\n                                                              }) : Res_doc.nil,\n                                                        tl: /* [] */0\n                                                      }\n                                                    }\n                                                  });\n                                              return printComments(doc$1, cmtTbl, cmtLoc);\n                                            }), parameters)),\n                                  tl: /* [] */0\n                                }\n                              })),\n                      tl: {\n                        hd: Res_doc.trailingComma,\n                        tl: {\n                          hd: Res_doc.softLine,\n                          tl: {\n                            hd: Res_doc.rparen,\n                            tl: /* [] */0\n                          }\n                        }\n                      }\n                    }\n                  }));\n        }\n        var doc$1 = printModType(returnType, cmtTbl);\n        var returnDoc = Res_parens.modTypeFunctorReturn(returnType) ? addParens(doc$1) : doc$1;\n        modTypeDoc = Res_doc.group(Res_doc.concat({\n                  hd: parametersDoc,\n                  tl: {\n                    hd: Res_doc.group(Res_doc.concat({\n                              hd: Res_doc.text(\" =>\"),\n                              tl: {\n                                hd: Res_doc.line,\n                                tl: {\n                                  hd: returnDoc,\n                                  tl: /* [] */0\n                                }\n                              }\n                            })),\n                    tl: /* [] */0\n                  }\n                }));\n        break;\n    case /* Pmty_with */3 :\n        var modType$2 = longident._0;\n        var doc$2 = printModType(modType$2, cmtTbl);\n        var operand = Res_parens.modTypeWithOperand(modType$2) ? addParens(doc$2) : doc$2;\n        modTypeDoc = Res_doc.group(Res_doc.concat({\n                  hd: operand,\n                  tl: {\n                    hd: Res_doc.indent(Res_doc.concat({\n                              hd: Res_doc.line,\n                              tl: {\n                                hd: printWithConstraints(longident._1, cmtTbl),\n                                tl: /* [] */0\n                              }\n                            })),\n                    tl: /* [] */0\n                  }\n                }));\n        break;\n    case /* Pmty_typeof */4 :\n        modTypeDoc = Res_doc.concat({\n              hd: Res_doc.text(\"module type of \"),\n              tl: {\n                hd: printModExpr(longident._0, cmtTbl),\n                tl: /* [] */0\n              }\n            });\n        break;\n    case /* Pmty_extension */5 :\n        modTypeDoc = printExtension(false, longident._0, cmtTbl);\n        break;\n    case /* Pmty_alias */6 :\n        modTypeDoc = Res_doc.concat({\n              hd: Res_doc.text(\"module \"),\n              tl: {\n                hd: printLongidentLocation(longident._0, cmtTbl),\n                tl: /* [] */0\n              }\n            });\n        break;\n    \n  }\n  var match$3 = modType.pmty_desc;\n  var attrsAlreadyPrinted;\n  switch (match$3.TAG | 0) {\n    case /* Pmty_ident */0 :\n    case /* Pmty_signature */1 :\n    case /* Pmty_functor */2 :\n        attrsAlreadyPrinted = true;\n        break;\n    default:\n      attrsAlreadyPrinted = false;\n  }\n  var doc$3 = Res_doc.concat({\n        hd: attrsAlreadyPrinted ? Res_doc.nil : printAttributes(undefined, undefined, modType.pmty_attributes, cmtTbl),\n        tl: {\n          hd: modTypeDoc,\n          tl: /* [] */0\n        }\n      });\n  return printComments(doc$3, cmtTbl, modType.pmty_loc);\n}\n\nfunction printWithConstraints(withConstraints, cmtTbl) {\n  var rows = List.mapi((function (i, withConstraint) {\n          return Res_doc.group(Res_doc.concat({\n                          hd: i === 0 ? Res_doc.text(\"with \") : Res_doc.text(\"and \"),\n                          tl: {\n                            hd: printWithConstraint(withConstraint, cmtTbl),\n                            tl: /* [] */0\n                          }\n                        }));\n        }), withConstraints);\n  return Res_doc.join(Res_doc.line, rows);\n}\n\nfunction printWithConstraint(withConstraint, cmtTbl) {\n  switch (withConstraint.TAG | 0) {\n    case /* Pwith_type */0 :\n        return Res_doc.group(printTypeDeclaration(printLidentPath(withConstraint._0, cmtTbl), \"=\", Res_doc.nil, 0, withConstraint._1, Res_comments_table.empty));\n    case /* Pwith_module */1 :\n        return Res_doc.concat({\n                    hd: Res_doc.text(\"module \"),\n                    tl: {\n                      hd: printLongident(withConstraint._0.txt),\n                      tl: {\n                        hd: Res_doc.text(\" =\"),\n                        tl: {\n                          hd: Res_doc.indent(Res_doc.concat({\n                                    hd: Res_doc.line,\n                                    tl: {\n                                      hd: printLongident(withConstraint._1.txt),\n                                      tl: /* [] */0\n                                    }\n                                  })),\n                          tl: /* [] */0\n                        }\n                      }\n                    }\n                  });\n    case /* Pwith_typesubst */2 :\n        return Res_doc.group(printTypeDeclaration(printLidentPath(withConstraint._0, cmtTbl), \":=\", Res_doc.nil, 0, withConstraint._1, Res_comments_table.empty));\n    case /* Pwith_modsubst */3 :\n        return Res_doc.concat({\n                    hd: Res_doc.text(\"module \"),\n                    tl: {\n                      hd: printLongident(withConstraint._0.txt),\n                      tl: {\n                        hd: Res_doc.text(\" :=\"),\n                        tl: {\n                          hd: Res_doc.indent(Res_doc.concat({\n                                    hd: Res_doc.line,\n                                    tl: {\n                                      hd: printLongident(withConstraint._1.txt),\n                                      tl: /* [] */0\n                                    }\n                                  })),\n                          tl: /* [] */0\n                        }\n                      }\n                    }\n                  });\n    \n  }\n}\n\nfunction printSignature(signature, cmtTbl) {\n  if (signature) {\n    return printList((function (s) {\n                  return s.psig_loc;\n                }), signature, printSignatureItem, undefined, cmtTbl);\n  } else {\n    return printCommentsInside(cmtTbl, $$Location.none);\n  }\n}\n\nfunction printSignatureItem(si, cmtTbl) {\n  var valueDescription = si.psig_desc;\n  switch (valueDescription.TAG | 0) {\n    case /* Psig_value */0 :\n        return printValueDescription(valueDescription._0, cmtTbl);\n    case /* Psig_type */1 :\n        var recFlag = valueDescription._0 ? Res_doc.text(\"rec \") : Res_doc.nil;\n        return printTypeDeclarations(recFlag, valueDescription._1, cmtTbl);\n    case /* Psig_typext */2 :\n        return printTypeExtension(valueDescription._0, cmtTbl);\n    case /* Psig_exception */3 :\n        return printExceptionDef(valueDescription._0, cmtTbl);\n    case /* Psig_module */4 :\n        return printModuleDeclaration(valueDescription._0, cmtTbl);\n    case /* Psig_recmodule */5 :\n        return printRecModuleDeclarations(valueDescription._0, cmtTbl);\n    case /* Psig_modtype */6 :\n        return printModuleTypeDeclaration(valueDescription._0, cmtTbl);\n    case /* Psig_open */7 :\n        return printOpenDescription(valueDescription._0, cmtTbl);\n    case /* Psig_include */8 :\n        return printIncludeDescription(valueDescription._0, cmtTbl);\n    case /* Psig_class */9 :\n    case /* Psig_class_type */10 :\n        return Res_doc.nil;\n    case /* Psig_attribute */11 :\n        return Res_doc.concat({\n                    hd: Res_doc.text(\"@\"),\n                    tl: {\n                      hd: printAttribute(valueDescription._0, cmtTbl),\n                      tl: /* [] */0\n                    }\n                  });\n    case /* Psig_extension */12 :\n        return Res_doc.concat({\n                    hd: printAttributes(undefined, undefined, valueDescription._1, cmtTbl),\n                    tl: {\n                      hd: Res_doc.concat({\n                            hd: printExtension(true, valueDescription._0, cmtTbl),\n                            tl: /* [] */0\n                          }),\n                      tl: /* [] */0\n                    }\n                  });\n    \n  }\n}\n\nfunction printRecModuleDeclarations(moduleDeclarations, cmtTbl) {\n  return printListi((function (n) {\n                return n.pmd_loc;\n              }), moduleDeclarations, printRecModuleDeclaration, undefined, cmtTbl);\n}\n\nfunction printRecModuleDeclaration(md, cmtTbl, i) {\n  var longident = md.pmd_type.pmty_desc;\n  var body;\n  if (longident.TAG === /* Pmty_alias */6) {\n    body = Res_doc.concat({\n          hd: Res_doc.text(\" = \"),\n          tl: {\n            hd: printLongidentLocation(longident._0, cmtTbl),\n            tl: /* [] */0\n          }\n        });\n  } else {\n    var match = md.pmd_type.pmty_desc;\n    var needsParens;\n    needsParens = match.TAG === /* Pmty_with */3 ? true : false;\n    var doc = printModType(md.pmd_type, cmtTbl);\n    var modTypeDoc = needsParens ? addParens(doc) : doc;\n    body = Res_doc.concat({\n          hd: Res_doc.text(\": \"),\n          tl: {\n            hd: modTypeDoc,\n            tl: /* [] */0\n          }\n        });\n  }\n  var prefix = i < 1 ? \"module rec \" : \"and \";\n  return Res_doc.concat({\n              hd: printAttributes(md.pmd_name.loc, undefined, md.pmd_attributes, cmtTbl),\n              tl: {\n                hd: Res_doc.text(prefix),\n                tl: {\n                  hd: printComments(Res_doc.text(md.pmd_name.txt), cmtTbl, md.pmd_name.loc),\n                  tl: {\n                    hd: body,\n                    tl: /* [] */0\n                  }\n                }\n              }\n            });\n}\n\nfunction printModuleDeclaration(md, cmtTbl) {\n  var longident = md.pmd_type.pmty_desc;\n  var body;\n  body = longident.TAG === /* Pmty_alias */6 ? Res_doc.concat({\n          hd: Res_doc.text(\" = \"),\n          tl: {\n            hd: printLongidentLocation(longident._0, cmtTbl),\n            tl: /* [] */0\n          }\n        }) : Res_doc.concat({\n          hd: Res_doc.text(\": \"),\n          tl: {\n            hd: printModType(md.pmd_type, cmtTbl),\n            tl: /* [] */0\n          }\n        });\n  return Res_doc.concat({\n              hd: printAttributes(md.pmd_name.loc, undefined, md.pmd_attributes, cmtTbl),\n              tl: {\n                hd: Res_doc.text(\"module \"),\n                tl: {\n                  hd: printComments(Res_doc.text(md.pmd_name.txt), cmtTbl, md.pmd_name.loc),\n                  tl: {\n                    hd: body,\n                    tl: /* [] */0\n                  }\n                }\n              }\n            });\n}\n\nfunction printOpenDescription(openDescription, cmtTbl) {\n  var match = openDescription.popen_override;\n  return Res_doc.concat({\n              hd: printAttributes(undefined, undefined, openDescription.popen_attributes, cmtTbl),\n              tl: {\n                hd: Res_doc.text(\"open\"),\n                tl: {\n                  hd: match ? Res_doc.space : Res_doc.text(\"! \"),\n                  tl: {\n                    hd: printLongidentLocation(openDescription.popen_lid, cmtTbl),\n                    tl: /* [] */0\n                  }\n                }\n              }\n            });\n}\n\nfunction printIncludeDescription(includeDescription, cmtTbl) {\n  return Res_doc.concat({\n              hd: printAttributes(undefined, undefined, includeDescription.pincl_attributes, cmtTbl),\n              tl: {\n                hd: Res_doc.text(\"include \"),\n                tl: {\n                  hd: printModType(includeDescription.pincl_mod, cmtTbl),\n                  tl: /* [] */0\n                }\n              }\n            });\n}\n\nfunction printIncludeDeclaration(includeDeclaration, cmtTbl) {\n  var includeDoc = printModExpr(includeDeclaration.pincl_mod, cmtTbl);\n  return Res_doc.concat({\n              hd: printAttributes(undefined, undefined, includeDeclaration.pincl_attributes, cmtTbl),\n              tl: {\n                hd: Res_doc.text(\"include \"),\n                tl: {\n                  hd: Res_parens.includeModExpr(includeDeclaration.pincl_mod) ? addParens(includeDoc) : includeDoc,\n                  tl: /* [] */0\n                }\n              }\n            });\n}\n\nfunction printValueBindings(recFlag, vbs, cmtTbl) {\n  return printListi((function (vb) {\n                return vb.pvb_loc;\n              }), vbs, (function (param, param$1, param$2) {\n                return printValueBinding(recFlag, param, param$1, param$2);\n              }), undefined, cmtTbl);\n}\n\nfunction printValueDescription(valueDescription, cmtTbl) {\n  var match = valueDescription.pval_prim;\n  var isExternal = match ? true : false;\n  var attrs = printAttributes(valueDescription.pval_name.loc, undefined, valueDescription.pval_attributes, cmtTbl);\n  var header = isExternal ? \"external \" : \"let \";\n  return Res_doc.group(Res_doc.concat({\n                  hd: attrs,\n                  tl: {\n                    hd: Res_doc.text(header),\n                    tl: {\n                      hd: printComments(printIdentLike(undefined, valueDescription.pval_name.txt), cmtTbl, valueDescription.pval_name.loc),\n                      tl: {\n                        hd: Res_doc.text(\": \"),\n                        tl: {\n                          hd: printTypExpr(valueDescription.pval_type, cmtTbl),\n                          tl: {\n                            hd: isExternal ? Res_doc.group(Res_doc.concat({\n                                        hd: Res_doc.text(\" =\"),\n                                        tl: {\n                                          hd: Res_doc.indent(Res_doc.concat({\n                                                    hd: Res_doc.line,\n                                                    tl: {\n                                                      hd: Res_doc.join(Res_doc.line, List.map((function (s) {\n                                                                  return Res_doc.concat({\n                                                                              hd: Res_doc.text(\"\\\"\"),\n                                                                              tl: {\n                                                                                hd: Res_doc.text(s),\n                                                                                tl: {\n                                                                                  hd: Res_doc.text(\"\\\"\"),\n                                                                                  tl: /* [] */0\n                                                                                }\n                                                                              }\n                                                                            });\n                                                                }), valueDescription.pval_prim)),\n                                                      tl: /* [] */0\n                                                    }\n                                                  })),\n                                          tl: /* [] */0\n                                        }\n                                      })) : Res_doc.nil,\n                            tl: /* [] */0\n                          }\n                        }\n                      }\n                    }\n                  }\n                }));\n}\n\nfunction printTypeDeclarations(recFlag, typeDeclarations, cmtTbl) {\n  return printListi((function (n) {\n                return n.ptype_loc;\n              }), typeDeclarations, (function (param, param$1, param$2) {\n                return printTypeDeclaration2(recFlag, param, param$1, param$2);\n              }), undefined, cmtTbl);\n}\n\nfunction printTypeDeclaration(name, equalSign, recFlag, i, td, cmtTbl) {\n  var attrs = printAttributes(td.ptype_loc, undefined, td.ptype_attributes, cmtTbl);\n  var prefix = i > 0 ? Res_doc.text(\"and \") : Res_doc.concat({\n          hd: Res_doc.text(\"type \"),\n          tl: {\n            hd: recFlag,\n            tl: /* [] */0\n          }\n        });\n  var typeParams = printTypeParams(td.ptype_params, cmtTbl);\n  var lds = td.ptype_kind;\n  var manifestAndKind;\n  if (typeof lds === \"number\") {\n    if (lds === /* Ptype_abstract */0) {\n      var typ = td.ptype_manifest;\n      manifestAndKind = typ !== undefined ? Res_doc.concat({\n              hd: Res_doc.concat({\n                    hd: Res_doc.space,\n                    tl: {\n                      hd: Res_doc.text(equalSign),\n                      tl: {\n                        hd: Res_doc.space,\n                        tl: /* [] */0\n                      }\n                    }\n                  }),\n              tl: {\n                hd: printPrivateFlag(td.ptype_private),\n                tl: {\n                  hd: printTypExpr(typ, cmtTbl),\n                  tl: /* [] */0\n                }\n              }\n            }) : Res_doc.nil;\n    } else {\n      manifestAndKind = Res_doc.concat({\n            hd: Res_doc.concat({\n                  hd: Res_doc.space,\n                  tl: {\n                    hd: Res_doc.text(equalSign),\n                    tl: {\n                      hd: Res_doc.space,\n                      tl: /* [] */0\n                    }\n                  }\n                }),\n            tl: {\n              hd: printPrivateFlag(td.ptype_private),\n              tl: {\n                hd: Res_doc.text(\"..\"),\n                tl: /* [] */0\n              }\n            }\n          });\n    }\n  } else if (lds.TAG === /* Ptype_variant */0) {\n    var typ$1 = td.ptype_manifest;\n    var manifest = typ$1 !== undefined ? Res_doc.concat({\n            hd: Res_doc.concat({\n                  hd: Res_doc.space,\n                  tl: {\n                    hd: Res_doc.text(equalSign),\n                    tl: {\n                      hd: Res_doc.space,\n                      tl: /* [] */0\n                    }\n                  }\n                }),\n            tl: {\n              hd: printTypExpr(typ$1, cmtTbl),\n              tl: /* [] */0\n            }\n          }) : Res_doc.nil;\n    manifestAndKind = Res_doc.concat({\n          hd: manifest,\n          tl: {\n            hd: Res_doc.concat({\n                  hd: Res_doc.space,\n                  tl: {\n                    hd: Res_doc.text(equalSign),\n                    tl: /* [] */0\n                  }\n                }),\n            tl: {\n              hd: printConstructorDeclarations(td.ptype_private, lds._0, cmtTbl),\n              tl: /* [] */0\n            }\n          }\n        });\n  } else {\n    var typ$2 = td.ptype_manifest;\n    var manifest$1 = typ$2 !== undefined ? Res_doc.concat({\n            hd: Res_doc.concat({\n                  hd: Res_doc.space,\n                  tl: {\n                    hd: Res_doc.text(equalSign),\n                    tl: {\n                      hd: Res_doc.space,\n                      tl: /* [] */0\n                    }\n                  }\n                }),\n            tl: {\n              hd: printTypExpr(typ$2, cmtTbl),\n              tl: /* [] */0\n            }\n          }) : Res_doc.nil;\n    manifestAndKind = Res_doc.concat({\n          hd: manifest$1,\n          tl: {\n            hd: Res_doc.concat({\n                  hd: Res_doc.space,\n                  tl: {\n                    hd: Res_doc.text(equalSign),\n                    tl: {\n                      hd: Res_doc.space,\n                      tl: /* [] */0\n                    }\n                  }\n                }),\n            tl: {\n              hd: printPrivateFlag(td.ptype_private),\n              tl: {\n                hd: printRecordDeclaration(lds._0, cmtTbl),\n                tl: /* [] */0\n              }\n            }\n          }\n        });\n  }\n  var constraints = printTypeDefinitionConstraints(td.ptype_cstrs);\n  return Res_doc.group(Res_doc.concat({\n                  hd: attrs,\n                  tl: {\n                    hd: prefix,\n                    tl: {\n                      hd: name,\n                      tl: {\n                        hd: typeParams,\n                        tl: {\n                          hd: manifestAndKind,\n                          tl: {\n                            hd: constraints,\n                            tl: /* [] */0\n                          }\n                        }\n                      }\n                    }\n                  }\n                }));\n}\n\nfunction printTypeDeclaration2(recFlag, td, cmtTbl, i) {\n  var doc = printIdentLike(undefined, td.ptype_name.txt);\n  var name = printComments(doc, cmtTbl, td.ptype_name.loc);\n  var equalSign = \"=\";\n  var attrs = printAttributes(td.ptype_loc, undefined, td.ptype_attributes, cmtTbl);\n  var prefix = i > 0 ? Res_doc.text(\"and \") : Res_doc.concat({\n          hd: Res_doc.text(\"type \"),\n          tl: {\n            hd: recFlag,\n            tl: /* [] */0\n          }\n        });\n  var typeParams = printTypeParams(td.ptype_params, cmtTbl);\n  var lds = td.ptype_kind;\n  var manifestAndKind;\n  if (typeof lds === \"number\") {\n    if (lds === /* Ptype_abstract */0) {\n      var typ = td.ptype_manifest;\n      manifestAndKind = typ !== undefined ? Res_doc.concat({\n              hd: Res_doc.concat({\n                    hd: Res_doc.space,\n                    tl: {\n                      hd: Res_doc.text(equalSign),\n                      tl: {\n                        hd: Res_doc.space,\n                        tl: /* [] */0\n                      }\n                    }\n                  }),\n              tl: {\n                hd: printPrivateFlag(td.ptype_private),\n                tl: {\n                  hd: printTypExpr(typ, cmtTbl),\n                  tl: /* [] */0\n                }\n              }\n            }) : Res_doc.nil;\n    } else {\n      manifestAndKind = Res_doc.concat({\n            hd: Res_doc.concat({\n                  hd: Res_doc.space,\n                  tl: {\n                    hd: Res_doc.text(equalSign),\n                    tl: {\n                      hd: Res_doc.space,\n                      tl: /* [] */0\n                    }\n                  }\n                }),\n            tl: {\n              hd: printPrivateFlag(td.ptype_private),\n              tl: {\n                hd: Res_doc.text(\"..\"),\n                tl: /* [] */0\n              }\n            }\n          });\n    }\n  } else if (lds.TAG === /* Ptype_variant */0) {\n    var typ$1 = td.ptype_manifest;\n    var manifest = typ$1 !== undefined ? Res_doc.concat({\n            hd: Res_doc.concat({\n                  hd: Res_doc.space,\n                  tl: {\n                    hd: Res_doc.text(equalSign),\n                    tl: {\n                      hd: Res_doc.space,\n                      tl: /* [] */0\n                    }\n                  }\n                }),\n            tl: {\n              hd: printTypExpr(typ$1, cmtTbl),\n              tl: /* [] */0\n            }\n          }) : Res_doc.nil;\n    manifestAndKind = Res_doc.concat({\n          hd: manifest,\n          tl: {\n            hd: Res_doc.concat({\n                  hd: Res_doc.space,\n                  tl: {\n                    hd: Res_doc.text(equalSign),\n                    tl: /* [] */0\n                  }\n                }),\n            tl: {\n              hd: printConstructorDeclarations(td.ptype_private, lds._0, cmtTbl),\n              tl: /* [] */0\n            }\n          }\n        });\n  } else {\n    var typ$2 = td.ptype_manifest;\n    var manifest$1 = typ$2 !== undefined ? Res_doc.concat({\n            hd: Res_doc.concat({\n                  hd: Res_doc.space,\n                  tl: {\n                    hd: Res_doc.text(equalSign),\n                    tl: {\n                      hd: Res_doc.space,\n                      tl: /* [] */0\n                    }\n                  }\n                }),\n            tl: {\n              hd: printTypExpr(typ$2, cmtTbl),\n              tl: /* [] */0\n            }\n          }) : Res_doc.nil;\n    manifestAndKind = Res_doc.concat({\n          hd: manifest$1,\n          tl: {\n            hd: Res_doc.concat({\n                  hd: Res_doc.space,\n                  tl: {\n                    hd: Res_doc.text(equalSign),\n                    tl: {\n                      hd: Res_doc.space,\n                      tl: /* [] */0\n                    }\n                  }\n                }),\n            tl: {\n              hd: printPrivateFlag(td.ptype_private),\n              tl: {\n                hd: printRecordDeclaration(lds._0, cmtTbl),\n                tl: /* [] */0\n              }\n            }\n          }\n        });\n  }\n  var constraints = printTypeDefinitionConstraints(td.ptype_cstrs);\n  return Res_doc.group(Res_doc.concat({\n                  hd: attrs,\n                  tl: {\n                    hd: prefix,\n                    tl: {\n                      hd: name,\n                      tl: {\n                        hd: typeParams,\n                        tl: {\n                          hd: manifestAndKind,\n                          tl: {\n                            hd: constraints,\n                            tl: /* [] */0\n                          }\n                        }\n                      }\n                    }\n                  }\n                }));\n}\n\nfunction printTypeDefinitionConstraints(cstrs) {\n  if (cstrs) {\n    return Res_doc.indent(Res_doc.group(Res_doc.concat({\n                        hd: Res_doc.line,\n                        tl: {\n                          hd: Res_doc.group(Res_doc.join(Res_doc.line, List.map(printTypeDefinitionConstraint, cstrs))),\n                          tl: /* [] */0\n                        }\n                      })));\n  } else {\n    return Res_doc.nil;\n  }\n}\n\nfunction printTypeDefinitionConstraint(param) {\n  return Res_doc.concat({\n              hd: Res_doc.text(\"constraint \"),\n              tl: {\n                hd: printTypExpr(param[0], Res_comments_table.empty),\n                tl: {\n                  hd: Res_doc.text(\" = \"),\n                  tl: {\n                    hd: printTypExpr(param[1], Res_comments_table.empty),\n                    tl: /* [] */0\n                  }\n                }\n              }\n            });\n}\n\nfunction printPrivateFlag(flag) {\n  if (flag) {\n    return Res_doc.nil;\n  } else {\n    return Res_doc.text(\"private \");\n  }\n}\n\nfunction printTypeParams(typeParams, cmtTbl) {\n  if (typeParams) {\n    return Res_doc.group(Res_doc.concat({\n                    hd: Res_doc.lessThan,\n                    tl: {\n                      hd: Res_doc.indent(Res_doc.concat({\n                                hd: Res_doc.softLine,\n                                tl: {\n                                  hd: Res_doc.join(Res_doc.concat({\n                                            hd: Res_doc.comma,\n                                            tl: {\n                                              hd: Res_doc.line,\n                                              tl: /* [] */0\n                                            }\n                                          }), List.map((function (typeParam) {\n                                              var doc = printTypeParam(typeParam, cmtTbl);\n                                              return printComments(doc, cmtTbl, typeParam[0].ptyp_loc);\n                                            }), typeParams)),\n                                  tl: /* [] */0\n                                }\n                              })),\n                      tl: {\n                        hd: Res_doc.trailingComma,\n                        tl: {\n                          hd: Res_doc.softLine,\n                          tl: {\n                            hd: Res_doc.greaterThan,\n                            tl: /* [] */0\n                          }\n                        }\n                      }\n                    }\n                  }));\n  } else {\n    return Res_doc.nil;\n  }\n}\n\nfunction printTypeParam(param, cmtTbl) {\n  var printedVariance;\n  switch (param[1]) {\n    case /* Covariant */0 :\n        printedVariance = Res_doc.text(\"+\");\n        break;\n    case /* Contravariant */1 :\n        printedVariance = Res_doc.text(\"-\");\n        break;\n    case /* Invariant */2 :\n        printedVariance = Res_doc.nil;\n        break;\n    \n  }\n  return Res_doc.concat({\n              hd: printedVariance,\n              tl: {\n                hd: printTypExpr(param[0], cmtTbl),\n                tl: /* [] */0\n              }\n            });\n}\n\nfunction printRecordDeclaration(lds, cmtTbl) {\n  var match = List.rev(lds);\n  var forceBreak = lds && match ? lds.hd.pld_loc.loc_start.pos_lnum < match.hd.pld_loc.loc_end.pos_lnum : false;\n  return Res_doc.breakableGroup(forceBreak, Res_doc.concat({\n                  hd: Res_doc.lbrace,\n                  tl: {\n                    hd: Res_doc.indent(Res_doc.concat({\n                              hd: Res_doc.softLine,\n                              tl: {\n                                hd: Res_doc.join(Res_doc.concat({\n                                          hd: Res_doc.comma,\n                                          tl: {\n                                            hd: Res_doc.line,\n                                            tl: /* [] */0\n                                          }\n                                        }), List.map((function (ld) {\n                                            var doc = printLabelDeclaration(ld, cmtTbl);\n                                            return printComments(doc, cmtTbl, ld.pld_loc);\n                                          }), lds)),\n                                tl: /* [] */0\n                              }\n                            })),\n                    tl: {\n                      hd: Res_doc.trailingComma,\n                      tl: {\n                        hd: Res_doc.softLine,\n                        tl: {\n                          hd: Res_doc.rbrace,\n                          tl: /* [] */0\n                        }\n                      }\n                    }\n                  }\n                }));\n}\n\nfunction printConstructorDeclarations(privateFlag, cds, cmtTbl) {\n  var match = List.rev(cds);\n  var forceBreak = cds && match ? cds.hd.pcd_loc.loc_start.pos_lnum < match.hd.pcd_loc.loc_end.pos_lnum : false;\n  var privateFlag$1 = privateFlag ? Res_doc.nil : Res_doc.concat({\n          hd: Res_doc.text(\"private\"),\n          tl: {\n            hd: Res_doc.line,\n            tl: /* [] */0\n          }\n        });\n  var rows = printListi((function (cd) {\n          return cd.pcd_loc;\n        }), cds, (function (cd, cmtTbl, i) {\n          var doc = printConstructorDeclaration2(i, cd, cmtTbl);\n          return printComments(doc, cmtTbl, cd.pcd_loc);\n        }), forceBreak, cmtTbl);\n  return Res_doc.breakableGroup(forceBreak, Res_doc.indent(Res_doc.concat({\n                      hd: Res_doc.line,\n                      tl: {\n                        hd: privateFlag$1,\n                        tl: {\n                          hd: rows,\n                          tl: /* [] */0\n                        }\n                      }\n                    })));\n}\n\nfunction printConstructorDeclaration2(i, cd, cmtTbl) {\n  var attrs = printAttributes(undefined, undefined, cd.pcd_attributes, cmtTbl);\n  var bar = i > 0 || cd.pcd_attributes !== /* [] */0 ? Res_doc.text(\"| \") : Res_doc.ifBreaks(Res_doc.text(\"| \"), Res_doc.nil);\n  var doc = Res_doc.text(cd.pcd_name.txt);\n  var constrName = printComments(doc, cmtTbl, cd.pcd_name.loc);\n  var constrArgs = printConstructorArguments(true, cd.pcd_args, cmtTbl);\n  var typ = cd.pcd_res;\n  var gadt = typ !== undefined ? Res_doc.indent(Res_doc.concat({\n              hd: Res_doc.text(\": \"),\n              tl: {\n                hd: printTypExpr(typ, cmtTbl),\n                tl: /* [] */0\n              }\n            })) : Res_doc.nil;\n  return Res_doc.concat({\n              hd: bar,\n              tl: {\n                hd: Res_doc.group(Res_doc.concat({\n                          hd: attrs,\n                          tl: {\n                            hd: constrName,\n                            tl: {\n                              hd: constrArgs,\n                              tl: {\n                                hd: gadt,\n                                tl: /* [] */0\n                              }\n                            }\n                          }\n                        })),\n                tl: /* [] */0\n              }\n            });\n}\n\nfunction printConstructorArguments(indent, cdArgs, cmtTbl) {\n  if (cdArgs.TAG === /* Pcstr_tuple */0) {\n    var types = cdArgs._0;\n    if (!types) {\n      return Res_doc.nil;\n    }\n    var args = Res_doc.concat({\n          hd: Res_doc.lparen,\n          tl: {\n            hd: Res_doc.indent(Res_doc.concat({\n                      hd: Res_doc.softLine,\n                      tl: {\n                        hd: Res_doc.join(Res_doc.concat({\n                                  hd: Res_doc.comma,\n                                  tl: {\n                                    hd: Res_doc.line,\n                                    tl: /* [] */0\n                                  }\n                                }), List.map((function (typexpr) {\n                                    return printTypExpr(typexpr, cmtTbl);\n                                  }), types)),\n                        tl: /* [] */0\n                      }\n                    })),\n            tl: {\n              hd: Res_doc.trailingComma,\n              tl: {\n                hd: Res_doc.softLine,\n                tl: {\n                  hd: Res_doc.rparen,\n                  tl: /* [] */0\n                }\n              }\n            }\n          }\n        });\n    return Res_doc.group(indent ? Res_doc.indent(args) : args);\n  }\n  var args$1 = Res_doc.concat({\n        hd: Res_doc.lparen,\n        tl: {\n          hd: Res_doc.lbrace,\n          tl: {\n            hd: Res_doc.indent(Res_doc.concat({\n                      hd: Res_doc.softLine,\n                      tl: {\n                        hd: Res_doc.join(Res_doc.concat({\n                                  hd: Res_doc.comma,\n                                  tl: {\n                                    hd: Res_doc.line,\n                                    tl: /* [] */0\n                                  }\n                                }), List.map((function (ld) {\n                                    var doc = printLabelDeclaration(ld, cmtTbl);\n                                    return printComments(doc, cmtTbl, ld.pld_loc);\n                                  }), cdArgs._0)),\n                        tl: /* [] */0\n                      }\n                    })),\n            tl: {\n              hd: Res_doc.trailingComma,\n              tl: {\n                hd: Res_doc.softLine,\n                tl: {\n                  hd: Res_doc.rbrace,\n                  tl: {\n                    hd: Res_doc.rparen,\n                    tl: /* [] */0\n                  }\n                }\n              }\n            }\n          }\n        }\n      });\n  if (indent) {\n    return Res_doc.indent(args$1);\n  } else {\n    return args$1;\n  }\n}\n\nfunction printLabelDeclaration(ld, cmtTbl) {\n  var attrs = printAttributes(ld.pld_name.loc, undefined, ld.pld_attributes, cmtTbl);\n  var match = ld.pld_mutable;\n  var mutableFlag = match ? Res_doc.text(\"mutable \") : Res_doc.nil;\n  var doc = printIdentLike(undefined, ld.pld_name.txt);\n  var name = printComments(doc, cmtTbl, ld.pld_name.loc);\n  return Res_doc.group(Res_doc.concat({\n                  hd: attrs,\n                  tl: {\n                    hd: mutableFlag,\n                    tl: {\n                      hd: name,\n                      tl: {\n                        hd: Res_doc.text(\": \"),\n                        tl: {\n                          hd: printTypExpr(ld.pld_type, cmtTbl),\n                          tl: /* [] */0\n                        }\n                      }\n                    }\n                  }\n                }));\n}\n\nfunction printTypExpr(typExpr, cmtTbl) {\n  var $$var = typExpr.ptyp_desc;\n  var renderedType;\n  if (typeof $$var === \"number\") {\n    renderedType = Res_doc.text(\"_\");\n  } else {\n    switch ($$var.TAG | 0) {\n      case /* Ptyp_var */0 :\n          renderedType = Res_doc.concat({\n                hd: Res_doc.text(\"'\"),\n                tl: {\n                  hd: printIdentLike(true, $$var._0),\n                  tl: /* [] */0\n                }\n              });\n          break;\n      case /* Ptyp_arrow */1 :\n          var match = Res_parsetree_viewer.arrowType(typExpr);\n          var returnType = match[2];\n          var args = match[1];\n          var attrsBefore = match[0];\n          var match$1 = returnType.ptyp_desc;\n          var returnTypeNeedsParens;\n          returnTypeNeedsParens = typeof match$1 === \"number\" || match$1.TAG !== /* Ptyp_alias */6 ? false : true;\n          var doc = printTypExpr(returnType, cmtTbl);\n          var returnDoc = returnTypeNeedsParens ? Res_doc.concat({\n                  hd: Res_doc.lparen,\n                  tl: {\n                    hd: doc,\n                    tl: {\n                      hd: Res_doc.rparen,\n                      tl: /* [] */0\n                    }\n                  }\n                }) : doc;\n          var match$2 = Res_parsetree_viewer.processUncurriedAttribute(attrsBefore);\n          var attrs = match$2[1];\n          var isUncurried = match$2[0];\n          var exit = 0;\n          if (args) {\n            var match$3 = args.hd;\n            if (match$3[0] || !(typeof match$3[1] === \"number\" && !(args.tl || isUncurried))) {\n              exit = 1;\n            } else {\n              var n = match$3[2];\n              var hasAttrsBefore = attrs !== /* [] */0;\n              var attrs$1 = hasAttrsBefore ? printAttributes(undefined, true, attrsBefore, cmtTbl) : Res_doc.nil;\n              var doc$1 = printTypExpr(n, cmtTbl);\n              var match$4 = n.ptyp_desc;\n              var typDoc;\n              if (typeof match$4 === \"number\") {\n                typDoc = doc$1;\n              } else {\n                switch (match$4.TAG | 0) {\n                  case /* Ptyp_arrow */1 :\n                  case /* Ptyp_tuple */2 :\n                  case /* Ptyp_alias */6 :\n                      typDoc = addParens(doc$1);\n                      break;\n                  default:\n                    typDoc = doc$1;\n                }\n              }\n              renderedType = Res_doc.group(Res_doc.concat({\n                        hd: Res_doc.group(attrs$1),\n                        tl: {\n                          hd: Res_doc.group(hasAttrsBefore ? Res_doc.concat({\n                                      hd: Res_doc.lparen,\n                                      tl: {\n                                        hd: Res_doc.indent(Res_doc.concat({\n                                                  hd: Res_doc.softLine,\n                                                  tl: {\n                                                    hd: typDoc,\n                                                    tl: {\n                                                      hd: Res_doc.text(\" => \"),\n                                                      tl: {\n                                                        hd: returnDoc,\n                                                        tl: /* [] */0\n                                                      }\n                                                    }\n                                                  }\n                                                })),\n                                        tl: {\n                                          hd: Res_doc.softLine,\n                                          tl: {\n                                            hd: Res_doc.rparen,\n                                            tl: /* [] */0\n                                          }\n                                        }\n                                      }\n                                    }) : Res_doc.concat({\n                                      hd: typDoc,\n                                      tl: {\n                                        hd: Res_doc.text(\" => \"),\n                                        tl: {\n                                          hd: returnDoc,\n                                          tl: /* [] */0\n                                        }\n                                      }\n                                    })),\n                          tl: /* [] */0\n                        }\n                      }));\n            }\n          } else {\n            renderedType = Res_doc.nil;\n          }\n          if (exit === 1) {\n            var attrs$2 = printAttributes(undefined, true, attrs, cmtTbl);\n            var renderedArgs = Res_doc.concat({\n                  hd: attrs$2,\n                  tl: {\n                    hd: Res_doc.text(\"(\"),\n                    tl: {\n                      hd: Res_doc.indent(Res_doc.concat({\n                                hd: Res_doc.softLine,\n                                tl: {\n                                  hd: isUncurried ? Res_doc.concat({\n                                          hd: Res_doc.dot,\n                                          tl: {\n                                            hd: Res_doc.space,\n                                            tl: /* [] */0\n                                          }\n                                        }) : Res_doc.nil,\n                                  tl: {\n                                    hd: Res_doc.join(Res_doc.concat({\n                                              hd: Res_doc.comma,\n                                              tl: {\n                                                hd: Res_doc.line,\n                                                tl: /* [] */0\n                                              }\n                                            }), List.map((function (tp) {\n                                                return printTypeParameter(tp, cmtTbl);\n                                              }), args)),\n                                    tl: /* [] */0\n                                  }\n                                }\n                              })),\n                      tl: {\n                        hd: Res_doc.trailingComma,\n                        tl: {\n                          hd: Res_doc.softLine,\n                          tl: {\n                            hd: Res_doc.text(\")\"),\n                            tl: /* [] */0\n                          }\n                        }\n                      }\n                    }\n                  }\n                });\n            renderedType = Res_doc.group(Res_doc.concat({\n                      hd: renderedArgs,\n                      tl: {\n                        hd: Res_doc.text(\" => \"),\n                        tl: {\n                          hd: returnDoc,\n                          tl: /* [] */0\n                        }\n                      }\n                    }));\n          }\n          break;\n      case /* Ptyp_tuple */2 :\n          renderedType = printTupleType(false, $$var._0, cmtTbl);\n          break;\n      case /* Ptyp_constr */3 :\n          var constrArgs = $$var._1;\n          var longidentLoc = $$var._0;\n          var exit$1 = 0;\n          if (constrArgs) {\n            var tuple = constrArgs.hd.ptyp_desc;\n            if (typeof tuple === \"number\") {\n              exit$1 = 1;\n            } else {\n              switch (tuple.TAG | 0) {\n                case /* Ptyp_tuple */2 :\n                    if (constrArgs.tl) {\n                      exit$1 = 1;\n                    } else {\n                      var constrName = printLidentPath(longidentLoc, cmtTbl);\n                      renderedType = Res_doc.group(Res_doc.concat({\n                                hd: constrName,\n                                tl: {\n                                  hd: Res_doc.lessThan,\n                                  tl: {\n                                    hd: printTupleType(true, tuple._0, cmtTbl),\n                                    tl: {\n                                      hd: Res_doc.greaterThan,\n                                      tl: /* [] */0\n                                    }\n                                  }\n                                }\n                              }));\n                    }\n                    break;\n                case /* Ptyp_object */4 :\n                    if (constrArgs.tl) {\n                      exit$1 = 1;\n                    } else {\n                      var constrName$1 = printLidentPath(longidentLoc, cmtTbl);\n                      renderedType = Res_doc.concat({\n                            hd: constrName$1,\n                            tl: {\n                              hd: Res_doc.lessThan,\n                              tl: {\n                                hd: printObject(true, tuple._0, tuple._1, cmtTbl),\n                                tl: {\n                                  hd: Res_doc.greaterThan,\n                                  tl: /* [] */0\n                                }\n                              }\n                            }\n                          });\n                    }\n                    break;\n                default:\n                  exit$1 = 1;\n              }\n            }\n          } else {\n            exit$1 = 1;\n          }\n          if (exit$1 === 1) {\n            var constrName$2 = printLidentPath(longidentLoc, cmtTbl);\n            renderedType = constrArgs ? Res_doc.group(Res_doc.concat({\n                        hd: constrName$2,\n                        tl: {\n                          hd: Res_doc.lessThan,\n                          tl: {\n                            hd: Res_doc.indent(Res_doc.concat({\n                                      hd: Res_doc.softLine,\n                                      tl: {\n                                        hd: Res_doc.join(Res_doc.concat({\n                                                  hd: Res_doc.comma,\n                                                  tl: {\n                                                    hd: Res_doc.line,\n                                                    tl: /* [] */0\n                                                  }\n                                                }), List.map((function (typexpr) {\n                                                    return printTypExpr(typexpr, cmtTbl);\n                                                  }), constrArgs)),\n                                        tl: /* [] */0\n                                      }\n                                    })),\n                            tl: {\n                              hd: Res_doc.trailingComma,\n                              tl: {\n                                hd: Res_doc.softLine,\n                                tl: {\n                                  hd: Res_doc.greaterThan,\n                                  tl: /* [] */0\n                                }\n                              }\n                            }\n                          }\n                        }\n                      })) : constrName$2;\n          }\n          break;\n      case /* Ptyp_object */4 :\n          renderedType = printObject(false, $$var._0, $$var._1, cmtTbl);\n          break;\n      case /* Ptyp_class */5 :\n          renderedType = Res_doc.text(\"classes are not supported in types\");\n          break;\n      case /* Ptyp_alias */6 :\n          var typ = $$var._0;\n          var match$5 = typ.ptyp_desc;\n          var needsParens;\n          needsParens = typeof match$5 === \"number\" || match$5.TAG !== /* Ptyp_arrow */1 ? false : true;\n          var doc$2 = printTypExpr(typ, cmtTbl);\n          var typ$1 = needsParens ? Res_doc.concat({\n                  hd: Res_doc.lparen,\n                  tl: {\n                    hd: doc$2,\n                    tl: {\n                      hd: Res_doc.rparen,\n                      tl: /* [] */0\n                    }\n                  }\n                }) : doc$2;\n          renderedType = Res_doc.concat({\n                hd: typ$1,\n                tl: {\n                  hd: Res_doc.text(\" as \"),\n                  tl: {\n                    hd: Res_doc.concat({\n                          hd: Res_doc.text(\"'\"),\n                          tl: {\n                            hd: printIdentLike(undefined, $$var._1),\n                            tl: /* [] */0\n                          }\n                        }),\n                    tl: /* [] */0\n                  }\n                }\n              });\n          break;\n      case /* Ptyp_variant */7 :\n          var labelsOpt = $$var._2;\n          var forceBreak = typExpr.ptyp_loc.loc_start.pos_lnum < typExpr.ptyp_loc.loc_end.pos_lnum;\n          var printRowField = function (x) {\n            if (x.TAG !== /* Rtag */0) {\n              return printTypExpr(x._0, cmtTbl);\n            }\n            var truth = x._2;\n            var attrs = x._1;\n            var txt = x._0.txt;\n            if (truth && !x._3) {\n              return Res_doc.group(Res_doc.concat({\n                              hd: printAttributes(undefined, undefined, attrs, cmtTbl),\n                              tl: {\n                                hd: Res_doc.concat({\n                                      hd: Res_doc.text(\"#\"),\n                                      tl: {\n                                        hd: printPolyVarIdent(txt),\n                                        tl: /* [] */0\n                                      }\n                                    }),\n                                tl: /* [] */0\n                              }\n                            }));\n            }\n            var doType = function (t) {\n              var match = t.ptyp_desc;\n              if (typeof match !== \"number\" && match.TAG === /* Ptyp_tuple */2) {\n                return printTypExpr(t, cmtTbl);\n              }\n              return Res_doc.concat({\n                          hd: Res_doc.lparen,\n                          tl: {\n                            hd: printTypExpr(t, cmtTbl),\n                            tl: {\n                              hd: Res_doc.rparen,\n                              tl: /* [] */0\n                            }\n                          }\n                        });\n            };\n            var printedTypes = List.map(doType, x._3);\n            var cases = Res_doc.join(Res_doc.concat({\n                      hd: Res_doc.line,\n                      tl: {\n                        hd: Res_doc.text(\"& \"),\n                        tl: /* [] */0\n                      }\n                    }), printedTypes);\n            var cases$1 = truth ? Res_doc.concat({\n                    hd: Res_doc.line,\n                    tl: {\n                      hd: Res_doc.text(\"& \"),\n                      tl: {\n                        hd: cases,\n                        tl: /* [] */0\n                      }\n                    }\n                  }) : cases;\n            return Res_doc.group(Res_doc.concat({\n                            hd: printAttributes(undefined, undefined, attrs, cmtTbl),\n                            tl: {\n                              hd: Res_doc.concat({\n                                    hd: Res_doc.text(\"#\"),\n                                    tl: {\n                                      hd: printPolyVarIdent(txt),\n                                      tl: /* [] */0\n                                    }\n                                  }),\n                              tl: {\n                                hd: cases$1,\n                                tl: /* [] */0\n                              }\n                            }\n                          }));\n          };\n          var docs = List.map(printRowField, $$var._0);\n          var cases = Res_doc.join(Res_doc.concat({\n                    hd: Res_doc.line,\n                    tl: {\n                      hd: Res_doc.text(\"| \"),\n                      tl: /* [] */0\n                    }\n                  }), docs);\n          var cases$1 = docs === /* [] */0 ? cases : Res_doc.concat({\n                  hd: Res_doc.ifBreaks(Res_doc.text(\"| \"), Res_doc.nil),\n                  tl: {\n                    hd: cases,\n                    tl: /* [] */0\n                  }\n                });\n          var openingSymbol = $$var._1 === /* Open */1 ? Res_doc.concat({\n                  hd: Res_doc.greaterThan,\n                  tl: {\n                    hd: Res_doc.line,\n                    tl: /* [] */0\n                  }\n                }) : (\n              labelsOpt === undefined ? Res_doc.softLine : Res_doc.concat({\n                      hd: Res_doc.lessThan,\n                      tl: {\n                        hd: Res_doc.line,\n                        tl: /* [] */0\n                      }\n                    })\n            );\n          var labels = labelsOpt !== undefined && labelsOpt ? Res_doc.concat(List.map((function (label) {\n                        return Res_doc.concat({\n                                    hd: Res_doc.line,\n                                    tl: {\n                                      hd: Res_doc.text(\"#\"),\n                                      tl: {\n                                        hd: printPolyVarIdent(label),\n                                        tl: /* [] */0\n                                      }\n                                    }\n                                  });\n                      }), labelsOpt)) : Res_doc.nil;\n          var closingSymbol = labelsOpt !== undefined && labelsOpt ? Res_doc.text(\" >\") : Res_doc.nil;\n          renderedType = Res_doc.breakableGroup(forceBreak, Res_doc.concat({\n                    hd: Res_doc.lbracket,\n                    tl: {\n                      hd: Res_doc.indent(Res_doc.concat({\n                                hd: openingSymbol,\n                                tl: {\n                                  hd: cases$1,\n                                  tl: {\n                                    hd: closingSymbol,\n                                    tl: {\n                                      hd: labels,\n                                      tl: /* [] */0\n                                    }\n                                  }\n                                }\n                              })),\n                      tl: {\n                        hd: Res_doc.softLine,\n                        tl: {\n                          hd: Res_doc.rbracket,\n                          tl: /* [] */0\n                        }\n                      }\n                    }\n                  }));\n          break;\n      case /* Ptyp_poly */8 :\n          var stringLocs = $$var._0;\n          renderedType = stringLocs ? Res_doc.concat({\n                  hd: Res_doc.join(Res_doc.space, List.map((function (param) {\n                              var doc = Res_doc.concat({\n                                    hd: Res_doc.text(\"'\"),\n                                    tl: {\n                                      hd: Res_doc.text(param.txt),\n                                      tl: /* [] */0\n                                    }\n                                  });\n                              return printComments(doc, cmtTbl, param.loc);\n                            }), stringLocs)),\n                  tl: {\n                    hd: Res_doc.dot,\n                    tl: {\n                      hd: Res_doc.space,\n                      tl: {\n                        hd: printTypExpr($$var._1, cmtTbl),\n                        tl: /* [] */0\n                      }\n                    }\n                  }\n                }) : printTypExpr($$var._1, cmtTbl);\n          break;\n      case /* Ptyp_package */9 :\n          renderedType = printPackageType(true, $$var._0, cmtTbl);\n          break;\n      case /* Ptyp_extension */10 :\n          renderedType = printExtension(false, $$var._0, cmtTbl);\n          break;\n      \n    }\n  }\n  var match$6 = typExpr.ptyp_desc;\n  var shouldPrintItsOwnAttributes;\n  shouldPrintItsOwnAttributes = typeof match$6 === \"number\" || match$6.TAG !== /* Ptyp_arrow */1 ? false : true;\n  var attrs$3 = typExpr.ptyp_attributes;\n  var doc$3 = attrs$3 && !shouldPrintItsOwnAttributes ? Res_doc.group(Res_doc.concat({\n              hd: printAttributes(undefined, undefined, attrs$3, cmtTbl),\n              tl: {\n                hd: renderedType,\n                tl: /* [] */0\n              }\n            })) : renderedType;\n  return printComments(doc$3, cmtTbl, typExpr.ptyp_loc);\n}\n\nfunction printObject(inline, fields, openFlag, cmtTbl) {\n  var doc;\n  if (fields) {\n    var tmp;\n    tmp = openFlag ? (\n        fields && fields.hd.TAG !== /* Otag */0 ? Res_doc.text(\".. \") : Res_doc.dotdot\n      ) : Res_doc.nil;\n    doc = Res_doc.concat({\n          hd: Res_doc.lbrace,\n          tl: {\n            hd: tmp,\n            tl: {\n              hd: Res_doc.indent(Res_doc.concat({\n                        hd: Res_doc.softLine,\n                        tl: {\n                          hd: Res_doc.join(Res_doc.concat({\n                                    hd: Res_doc.comma,\n                                    tl: {\n                                      hd: Res_doc.line,\n                                      tl: /* [] */0\n                                    }\n                                  }), List.map((function (field) {\n                                      return printObjectField(field, cmtTbl);\n                                    }), fields)),\n                          tl: /* [] */0\n                        }\n                      })),\n              tl: {\n                hd: Res_doc.trailingComma,\n                tl: {\n                  hd: Res_doc.softLine,\n                  tl: {\n                    hd: Res_doc.rbrace,\n                    tl: /* [] */0\n                  }\n                }\n              }\n            }\n          }\n        });\n  } else {\n    doc = Res_doc.concat({\n          hd: Res_doc.lbrace,\n          tl: {\n            hd: openFlag ? Res_doc.dotdot : Res_doc.dot,\n            tl: {\n              hd: Res_doc.rbrace,\n              tl: /* [] */0\n            }\n          }\n        });\n  }\n  if (inline) {\n    return doc;\n  } else {\n    return Res_doc.group(doc);\n  }\n}\n\nfunction printTupleType(inline, types, cmtTbl) {\n  var tuple = Res_doc.concat({\n        hd: Res_doc.lparen,\n        tl: {\n          hd: Res_doc.indent(Res_doc.concat({\n                    hd: Res_doc.softLine,\n                    tl: {\n                      hd: Res_doc.join(Res_doc.concat({\n                                hd: Res_doc.comma,\n                                tl: {\n                                  hd: Res_doc.line,\n                                  tl: /* [] */0\n                                }\n                              }), List.map((function (typexpr) {\n                                  return printTypExpr(typexpr, cmtTbl);\n                                }), types)),\n                      tl: /* [] */0\n                    }\n                  })),\n          tl: {\n            hd: Res_doc.trailingComma,\n            tl: {\n              hd: Res_doc.softLine,\n              tl: {\n                hd: Res_doc.rparen,\n                tl: /* [] */0\n              }\n            }\n          }\n        }\n      });\n  if (inline === false) {\n    return Res_doc.group(tuple);\n  } else {\n    return tuple;\n  }\n}\n\nfunction printObjectField(field, cmtTbl) {\n  if (field.TAG !== /* Otag */0) {\n    return Res_doc.concat({\n                hd: Res_doc.dotdotdot,\n                tl: {\n                  hd: printTypExpr(field._0, cmtTbl),\n                  tl: /* [] */0\n                }\n              });\n  }\n  var typ = field._2;\n  var labelLoc = field._0;\n  var doc = Res_doc.text(\"\\\"\" + (labelLoc.txt + \"\\\"\"));\n  var lbl = printComments(doc, cmtTbl, labelLoc.loc);\n  var doc$1 = Res_doc.concat({\n        hd: printAttributes(labelLoc.loc, undefined, field._1, cmtTbl),\n        tl: {\n          hd: lbl,\n          tl: {\n            hd: Res_doc.text(\": \"),\n            tl: {\n              hd: printTypExpr(typ, cmtTbl),\n              tl: /* [] */0\n            }\n          }\n        }\n      });\n  var init = labelLoc.loc;\n  var cmtLoc_loc_start = init.loc_start;\n  var cmtLoc_loc_end = typ.ptyp_loc.loc_end;\n  var cmtLoc_loc_ghost = init.loc_ghost;\n  var cmtLoc = {\n    loc_start: cmtLoc_loc_start,\n    loc_end: cmtLoc_loc_end,\n    loc_ghost: cmtLoc_loc_ghost\n  };\n  return printComments(doc$1, cmtTbl, cmtLoc);\n}\n\nfunction printTypeParameter(param, cmtTbl) {\n  var typ = param[2];\n  var lbl = param[1];\n  var match = Res_parsetree_viewer.processUncurriedAttribute(param[0]);\n  var uncurried = match[0] ? Res_doc.concat({\n          hd: Res_doc.dot,\n          tl: {\n            hd: Res_doc.space,\n            tl: /* [] */0\n          }\n        }) : Res_doc.nil;\n  var attrs = printAttributes(undefined, undefined, match[1], cmtTbl);\n  var label;\n  label = typeof lbl === \"number\" ? Res_doc.nil : Res_doc.concat({\n          hd: Res_doc.text(\"~\"),\n          tl: {\n            hd: printIdentLike(undefined, lbl._0),\n            tl: {\n              hd: Res_doc.text(\": \"),\n              tl: /* [] */0\n            }\n          }\n        });\n  var optionalIndicator;\n  optionalIndicator = typeof lbl === \"number\" || lbl.TAG === /* Labelled */0 ? Res_doc.nil : Res_doc.text(\"=?\");\n  var match$1 = typ.ptyp_attributes;\n  var match$2;\n  if (match$1) {\n    var match$3 = match$1.hd[0];\n    if (match$3.txt === \"ns.namedArgLoc\") {\n      var loc = match$3.loc;\n      match$2 = [\n        {\n          loc_start: loc.loc_start,\n          loc_end: typ.ptyp_loc.loc_end,\n          loc_ghost: loc.loc_ghost\n        },\n        {\n          ptyp_desc: typ.ptyp_desc,\n          ptyp_loc: typ.ptyp_loc,\n          ptyp_attributes: match$1.tl\n        }\n      ];\n    } else {\n      match$2 = [\n        typ.ptyp_loc,\n        typ\n      ];\n    }\n  } else {\n    match$2 = [\n      typ.ptyp_loc,\n      typ\n    ];\n  }\n  var doc = Res_doc.group(Res_doc.concat({\n            hd: uncurried,\n            tl: {\n              hd: attrs,\n              tl: {\n                hd: label,\n                tl: {\n                  hd: printTypExpr(match$2[1], cmtTbl),\n                  tl: {\n                    hd: optionalIndicator,\n                    tl: /* [] */0\n                  }\n                }\n              }\n            }\n          }));\n  return printComments(doc, cmtTbl, match$2[0]);\n}\n\nfunction printValueBinding(recFlag, vb, cmtTbl, i) {\n  var attrs = printAttributes(vb.pvb_pat.ppat_loc, undefined, vb.pvb_attributes, cmtTbl);\n  var header = i === 0 ? Res_doc.concat({\n          hd: Res_doc.text(\"let \"),\n          tl: {\n            hd: recFlag,\n            tl: /* [] */0\n          }\n        }) : Res_doc.text(\"and \");\n  var match = vb.pvb_pat.ppat_desc;\n  var exit = 0;\n  if (typeof match === \"number\" || match.TAG !== /* Ppat_constraint */10) {\n    exit = 1;\n  } else {\n    var patTyp = match._1;\n    var tmp = patTyp.ptyp_desc;\n    if (typeof tmp === \"number\" || tmp.TAG !== /* Ptyp_poly */8) {\n      exit = 1;\n    } else {\n      var expr = vb.pvb_expr;\n      var tmp$1 = expr.pexp_desc;\n      if (typeof tmp$1 === \"number\" || tmp$1.TAG !== /* Pexp_newtype */31) {\n        exit = 1;\n      } else {\n        var pattern = match._0;\n        var match$1 = Res_parsetree_viewer.funExpr(expr);\n        var parameters = match$1[1];\n        var abstractType;\n        if (parameters) {\n          var match$2 = parameters.hd;\n          abstractType = match$2.TAG === /* Parameter */0 || parameters.tl ? Res_doc.nil : Res_doc.concat({\n                  hd: Res_doc.text(\"type \"),\n                  tl: {\n                    hd: Res_doc.join(Res_doc.space, List.map((function ($$var) {\n                                return Res_doc.text($$var.txt);\n                              }), match$2.locs)),\n                    tl: {\n                      hd: Res_doc.dot,\n                      tl: /* [] */0\n                    }\n                  }\n                });\n        } else {\n          abstractType = Res_doc.nil;\n        }\n        var match$3 = match$1[2].pexp_desc;\n        var exit$1 = 0;\n        if (typeof match$3 === \"number\") {\n          exit$1 = 2;\n        } else {\n          if (match$3.TAG === /* Pexp_constraint */19) {\n            return Res_doc.group(Res_doc.concat({\n                            hd: attrs,\n                            tl: {\n                              hd: header,\n                              tl: {\n                                hd: printPattern(pattern, cmtTbl),\n                                tl: {\n                                  hd: Res_doc.text(\":\"),\n                                  tl: {\n                                    hd: Res_doc.indent(Res_doc.concat({\n                                              hd: Res_doc.line,\n                                              tl: {\n                                                hd: abstractType,\n                                                tl: {\n                                                  hd: Res_doc.space,\n                                                  tl: {\n                                                    hd: printTypExpr(match$3._1, cmtTbl),\n                                                    tl: {\n                                                      hd: Res_doc.text(\" =\"),\n                                                      tl: {\n                                                        hd: Res_doc.concat({\n                                                              hd: Res_doc.line,\n                                                              tl: {\n                                                                hd: printExpressionWithComments(match$3._0, cmtTbl),\n                                                                tl: /* [] */0\n                                                              }\n                                                            }),\n                                                        tl: /* [] */0\n                                                      }\n                                                    }\n                                                  }\n                                                }\n                                              }\n                                            })),\n                                    tl: /* [] */0\n                                  }\n                                }\n                              }\n                            }\n                          }));\n          }\n          exit$1 = 2;\n        }\n        if (exit$1 === 2) {\n          return Res_doc.group(Res_doc.concat({\n                          hd: attrs,\n                          tl: {\n                            hd: header,\n                            tl: {\n                              hd: printPattern(pattern, cmtTbl),\n                              tl: {\n                                hd: Res_doc.text(\":\"),\n                                tl: {\n                                  hd: Res_doc.indent(Res_doc.concat({\n                                            hd: Res_doc.line,\n                                            tl: {\n                                              hd: abstractType,\n                                              tl: {\n                                                hd: Res_doc.space,\n                                                tl: {\n                                                  hd: printTypExpr(patTyp, cmtTbl),\n                                                  tl: {\n                                                    hd: Res_doc.text(\" =\"),\n                                                    tl: {\n                                                      hd: Res_doc.concat({\n                                                            hd: Res_doc.line,\n                                                            tl: {\n                                                              hd: printExpressionWithComments(expr, cmtTbl),\n                                                              tl: /* [] */0\n                                                            }\n                                                          }),\n                                                      tl: /* [] */0\n                                                    }\n                                                  }\n                                                }\n                                              }\n                                            }\n                                          })),\n                                  tl: /* [] */0\n                                }\n                              }\n                            }\n                          }\n                        }));\n        }\n        \n      }\n    }\n  }\n  if (exit === 1) {\n    var match$4 = Res_parsetree_viewer.processBracesAttr(vb.pvb_expr);\n    var expr$1 = match$4[1];\n    var doc = printExpressionWithComments(vb.pvb_expr, cmtTbl);\n    var braces = Res_parens.expr(vb.pvb_expr);\n    var printedExpr = typeof braces === \"number\" ? (\n        braces !== 0 ? doc : addParens(doc)\n      ) : printBraces(doc, expr$1, braces._0);\n    var patternDoc = printPattern(vb.pvb_pat, cmtTbl);\n    if (Res_parsetree_viewer.isSinglePipeExpr(vb.pvb_expr)) {\n      return Res_doc.customLayout({\n                  hd: Res_doc.group(Res_doc.concat({\n                            hd: attrs,\n                            tl: {\n                              hd: header,\n                              tl: {\n                                hd: patternDoc,\n                                tl: {\n                                  hd: Res_doc.text(\" =\"),\n                                  tl: {\n                                    hd: Res_doc.space,\n                                    tl: {\n                                      hd: printedExpr,\n                                      tl: /* [] */0\n                                    }\n                                  }\n                                }\n                              }\n                            }\n                          })),\n                  tl: {\n                    hd: Res_doc.group(Res_doc.concat({\n                              hd: attrs,\n                              tl: {\n                                hd: header,\n                                tl: {\n                                  hd: patternDoc,\n                                  tl: {\n                                    hd: Res_doc.text(\" =\"),\n                                    tl: {\n                                      hd: Res_doc.indent(Res_doc.concat({\n                                                hd: Res_doc.line,\n                                                tl: {\n                                                  hd: printedExpr,\n                                                  tl: /* [] */0\n                                                }\n                                              })),\n                                      tl: /* [] */0\n                                    }\n                                  }\n                                }\n                              }\n                            })),\n                    tl: /* [] */0\n                  }\n                });\n    }\n    var shouldIndent;\n    if (match$4[0] !== undefined) {\n      shouldIndent = false;\n    } else if (Res_parsetree_viewer.isBinaryExpression(expr$1)) {\n      shouldIndent = true;\n    } else {\n      var e = vb.pvb_expr;\n      var match$5 = e.pexp_desc;\n      var tmp$2;\n      var exit$2 = 0;\n      if (typeof match$5 === \"number\") {\n        exit$2 = 2;\n      } else {\n        switch (match$5.TAG | 0) {\n          case /* Pexp_ifthenelse */15 :\n              var match$6 = e.pexp_attributes;\n              if (match$6) {\n                var ifExpr = match$5._0;\n                if (match$6.hd[0].txt === \"ns.ternary\" && !match$6.tl) {\n                  tmp$2 = Res_parsetree_viewer.isBinaryExpression(ifExpr) || Res_parsetree_viewer.hasAttributes(ifExpr.pexp_attributes);\n                } else {\n                  exit$2 = 2;\n                }\n              } else {\n                exit$2 = 2;\n              }\n              break;\n          case /* Pexp_newtype */31 :\n              tmp$2 = false;\n              break;\n          default:\n            exit$2 = 2;\n        }\n      }\n      if (exit$2 === 2) {\n        tmp$2 = Res_parsetree_viewer.hasAttributes(e.pexp_attributes) || Res_parsetree_viewer.isArrayAccess(e);\n      }\n      shouldIndent = tmp$2;\n    }\n    return Res_doc.group(Res_doc.concat({\n                    hd: attrs,\n                    tl: {\n                      hd: header,\n                      tl: {\n                        hd: patternDoc,\n                        tl: {\n                          hd: Res_doc.text(\" =\"),\n                          tl: {\n                            hd: shouldIndent ? Res_doc.indent(Res_doc.concat({\n                                        hd: Res_doc.line,\n                                        tl: {\n                                          hd: printedExpr,\n                                          tl: /* [] */0\n                                        }\n                                      })) : Res_doc.concat({\n                                    hd: Res_doc.space,\n                                    tl: {\n                                      hd: printedExpr,\n                                      tl: /* [] */0\n                                    }\n                                  }),\n                            tl: /* [] */0\n                          }\n                        }\n                      }\n                    }\n                  }));\n  }\n  \n}\n\nfunction printPackageType(printModuleKeywordAndParens, packageType, cmtTbl) {\n  var packageConstraints = packageType[1];\n  var longidentLoc = packageType[0];\n  var doc = packageConstraints ? Res_doc.group(Res_doc.concat({\n              hd: printLongidentLocation(longidentLoc, cmtTbl),\n              tl: {\n                hd: printPackageConstraints(packageConstraints, cmtTbl),\n                tl: {\n                  hd: Res_doc.softLine,\n                  tl: /* [] */0\n                }\n              }\n            })) : Res_doc.group(Res_doc.concat({\n              hd: printLongidentLocation(longidentLoc, cmtTbl),\n              tl: /* [] */0\n            }));\n  if (printModuleKeywordAndParens) {\n    return Res_doc.concat({\n                hd: Res_doc.text(\"module(\"),\n                tl: {\n                  hd: doc,\n                  tl: {\n                    hd: Res_doc.rparen,\n                    tl: /* [] */0\n                  }\n                }\n              });\n  } else {\n    return doc;\n  }\n}\n\nfunction printPackageConstraints(packageConstraints, cmtTbl) {\n  return Res_doc.concat({\n              hd: Res_doc.text(\" with\"),\n              tl: {\n                hd: Res_doc.indent(Res_doc.concat({\n                          hd: Res_doc.line,\n                          tl: {\n                            hd: Res_doc.join(Res_doc.line, List.mapi((function (i, pc) {\n                                        var init = pc[0].loc;\n                                        var cmtLoc_loc_start = init.loc_start;\n                                        var cmtLoc_loc_end = pc[1].ptyp_loc.loc_end;\n                                        var cmtLoc_loc_ghost = init.loc_ghost;\n                                        var cmtLoc = {\n                                          loc_start: cmtLoc_loc_start,\n                                          loc_end: cmtLoc_loc_end,\n                                          loc_ghost: cmtLoc_loc_ghost\n                                        };\n                                        var doc = printPackageConstraint(i, cmtTbl, pc);\n                                        return printComments(doc, cmtTbl, cmtLoc);\n                                      }), packageConstraints)),\n                            tl: /* [] */0\n                          }\n                        })),\n                tl: /* [] */0\n              }\n            });\n}\n\nfunction printPackageConstraint(i, cmtTbl, param) {\n  var prefix = i === 0 ? Res_doc.text(\"type \") : Res_doc.text(\"and type \");\n  return Res_doc.concat({\n              hd: prefix,\n              tl: {\n                hd: printLongidentLocation(param[0], cmtTbl),\n                tl: {\n                  hd: Res_doc.text(\" = \"),\n                  tl: {\n                    hd: printTypExpr(param[1], cmtTbl),\n                    tl: /* [] */0\n                  }\n                }\n              }\n            });\n}\n\nfunction printExtension(atModuleLvl, param, cmtTbl) {\n  var stringLoc = param[0];\n  var txt = convertBsExtension(stringLoc.txt);\n  var doc = Res_doc.concat({\n        hd: Res_doc.text(\"%\"),\n        tl: {\n          hd: atModuleLvl ? Res_doc.text(\"%\") : Res_doc.nil,\n          tl: {\n            hd: Res_doc.text(txt),\n            tl: /* [] */0\n          }\n        }\n      });\n  var extName = printComments(doc, cmtTbl, stringLoc.loc);\n  return Res_doc.group(Res_doc.concat({\n                  hd: extName,\n                  tl: {\n                    hd: printPayload(param[1], cmtTbl),\n                    tl: /* [] */0\n                  }\n                }));\n}\n\nfunction printPattern(p, cmtTbl) {\n  var $$var = p.ppat_desc;\n  var patternWithoutAttributes;\n  if (typeof $$var === \"number\") {\n    patternWithoutAttributes = Res_doc.text(\"_\");\n  } else {\n    switch ($$var.TAG | 0) {\n      case /* Ppat_var */0 :\n          patternWithoutAttributes = printIdentLike(undefined, $$var._0.txt);\n          break;\n      case /* Ppat_alias */1 :\n          var p$1 = $$var._0;\n          var match = p$1.ppat_desc;\n          var needsParens;\n          if (typeof match === \"number\") {\n            needsParens = false;\n          } else {\n            switch (match.TAG | 0) {\n              case /* Ppat_alias */1 :\n              case /* Ppat_or */9 :\n                  needsParens = true;\n                  break;\n              default:\n                needsParens = false;\n            }\n          }\n          var p$2 = printPattern(p$1, cmtTbl);\n          var renderedPattern = needsParens ? Res_doc.concat({\n                  hd: Res_doc.text(\"(\"),\n                  tl: {\n                    hd: p$2,\n                    tl: {\n                      hd: Res_doc.text(\")\"),\n                      tl: /* [] */0\n                    }\n                  }\n                }) : p$2;\n          patternWithoutAttributes = Res_doc.concat({\n                hd: renderedPattern,\n                tl: {\n                  hd: Res_doc.text(\" as \"),\n                  tl: {\n                    hd: printStringLoc($$var._1, cmtTbl),\n                    tl: /* [] */0\n                  }\n                }\n              });\n          break;\n      case /* Ppat_constant */2 :\n          var templateLiteral = Res_parsetree_viewer.hasTemplateLiteralAttr(p.ppat_attributes);\n          patternWithoutAttributes = printConstant(templateLiteral, $$var._0);\n          break;\n      case /* Ppat_interval */3 :\n          patternWithoutAttributes = Res_doc.concat({\n                hd: printConstant(undefined, $$var._0),\n                tl: {\n                  hd: Res_doc.text(\" .. \"),\n                  tl: {\n                    hd: printConstant(undefined, $$var._1),\n                    tl: /* [] */0\n                  }\n                }\n              });\n          break;\n      case /* Ppat_tuple */4 :\n          patternWithoutAttributes = Res_doc.group(Res_doc.concat({\n                    hd: Res_doc.lparen,\n                    tl: {\n                      hd: Res_doc.indent(Res_doc.concat({\n                                hd: Res_doc.softLine,\n                                tl: {\n                                  hd: Res_doc.join(Res_doc.concat({\n                                            hd: Res_doc.text(\",\"),\n                                            tl: {\n                                              hd: Res_doc.line,\n                                              tl: /* [] */0\n                                            }\n                                          }), List.map((function (pat) {\n                                              return printPattern(pat, cmtTbl);\n                                            }), $$var._0)),\n                                  tl: /* [] */0\n                                }\n                              })),\n                      tl: {\n                        hd: Res_doc.trailingComma,\n                        tl: {\n                          hd: Res_doc.softLine,\n                          tl: {\n                            hd: Res_doc.rparen,\n                            tl: /* [] */0\n                          }\n                        }\n                      }\n                    }\n                  }));\n          break;\n      case /* Ppat_construct */5 :\n          var constrName = $$var._0;\n          var match$1 = constrName.txt;\n          var exit = 0;\n          switch (match$1.TAG | 0) {\n            case /* Lident */0 :\n                switch (match$1._0) {\n                  case \"()\" :\n                      patternWithoutAttributes = Res_doc.concat({\n                            hd: Res_doc.lparen,\n                            tl: {\n                              hd: printCommentsInside(cmtTbl, p.ppat_loc),\n                              tl: {\n                                hd: Res_doc.rparen,\n                                tl: /* [] */0\n                              }\n                            }\n                          });\n                      break;\n                  case \"::\" :\n                      var match$2 = Res_parsetree_viewer.collectPatternsFromListConstruct(/* [] */0, p);\n                      var tail = match$2[1];\n                      var patterns = match$2[0];\n                      var shouldHug;\n                      if (patterns && !patterns.tl) {\n                        var match$3 = tail.ppat_desc;\n                        if (typeof match$3 === \"number\" || match$3.TAG !== /* Ppat_construct */5) {\n                          shouldHug = false;\n                        } else {\n                          var match$4 = match$3._0.txt;\n                          switch (match$4.TAG | 0) {\n                            case /* Lident */0 :\n                                shouldHug = match$4._0 === \"[]\" && Res_parsetree_viewer.isHuggablePattern(patterns.hd) ? true : false;\n                                break;\n                            case /* Ldot */1 :\n                            case /* Lapply */2 :\n                                shouldHug = false;\n                                break;\n                            \n                          }\n                        }\n                      } else {\n                        shouldHug = false;\n                      }\n                      var match$5 = tail.ppat_desc;\n                      var tmp;\n                      var exit$1 = 0;\n                      if (typeof match$5 === \"number\" || match$5.TAG !== /* Ppat_construct */5) {\n                        exit$1 = 2;\n                      } else {\n                        var match$6 = match$5._0.txt;\n                        switch (match$6.TAG | 0) {\n                          case /* Lident */0 :\n                              if (match$6._0 === \"[]\") {\n                                tmp = Res_doc.nil;\n                              } else {\n                                exit$1 = 2;\n                              }\n                              break;\n                          case /* Ldot */1 :\n                          case /* Lapply */2 :\n                              exit$1 = 2;\n                              break;\n                          \n                        }\n                      }\n                      if (exit$1 === 2) {\n                        var doc = Res_doc.concat({\n                              hd: Res_doc.text(\"...\"),\n                              tl: {\n                                hd: printPattern(tail, cmtTbl),\n                                tl: /* [] */0\n                              }\n                            });\n                        var tail$1 = printComments(doc, cmtTbl, tail.ppat_loc);\n                        tmp = Res_doc.concat({\n                              hd: Res_doc.text(\",\"),\n                              tl: {\n                                hd: Res_doc.line,\n                                tl: {\n                                  hd: tail$1,\n                                  tl: /* [] */0\n                                }\n                              }\n                            });\n                      }\n                      var children = Res_doc.concat({\n                            hd: shouldHug ? Res_doc.nil : Res_doc.softLine,\n                            tl: {\n                              hd: Res_doc.join(Res_doc.concat({\n                                        hd: Res_doc.text(\",\"),\n                                        tl: {\n                                          hd: Res_doc.line,\n                                          tl: /* [] */0\n                                        }\n                                      }), List.map((function (pat) {\n                                          return printPattern(pat, cmtTbl);\n                                        }), patterns)),\n                              tl: {\n                                hd: tmp,\n                                tl: /* [] */0\n                              }\n                            }\n                          });\n                      patternWithoutAttributes = Res_doc.group(Res_doc.concat({\n                                hd: Res_doc.text(\"list{\"),\n                                tl: {\n                                  hd: shouldHug ? children : Res_doc.concat({\n                                          hd: Res_doc.indent(children),\n                                          tl: {\n                                            hd: Res_doc.ifBreaks(Res_doc.text(\",\"), Res_doc.nil),\n                                            tl: {\n                                              hd: Res_doc.softLine,\n                                              tl: /* [] */0\n                                            }\n                                          }\n                                        }),\n                                  tl: {\n                                    hd: Res_doc.rbrace,\n                                    tl: /* [] */0\n                                  }\n                                }\n                              }));\n                      break;\n                  case \"[]\" :\n                      patternWithoutAttributes = Res_doc.concat({\n                            hd: Res_doc.text(\"list{\"),\n                            tl: {\n                              hd: printCommentsInside(cmtTbl, p.ppat_loc),\n                              tl: {\n                                hd: Res_doc.rbrace,\n                                tl: /* [] */0\n                              }\n                            }\n                          });\n                      break;\n                  default:\n                    exit = 1;\n                }\n                break;\n            case /* Ldot */1 :\n            case /* Lapply */2 :\n                exit = 1;\n                break;\n            \n          }\n          if (exit === 1) {\n            var constructorArgs = $$var._1;\n            var constrName$1 = printLongidentLocation(constrName, cmtTbl);\n            var argsDoc;\n            if (constructorArgs !== undefined) {\n              var patterns$1 = constructorArgs.ppat_desc;\n              var exit$2 = 0;\n              if (typeof patterns$1 === \"number\") {\n                exit$2 = 2;\n              } else {\n                switch (patterns$1.TAG | 0) {\n                  case /* Ppat_tuple */4 :\n                      var patterns$2 = patterns$1._0;\n                      var exit$3 = 0;\n                      if (patterns$2) {\n                        var arg = patterns$2.hd;\n                        var tmp$1 = arg.ppat_desc;\n                        if (typeof tmp$1 === \"number\" || !(tmp$1.TAG === /* Ppat_tuple */4 && !patterns$2.tl)) {\n                          exit$3 = 3;\n                        } else {\n                          argsDoc = Res_doc.concat({\n                                hd: Res_doc.lparen,\n                                tl: {\n                                  hd: printPattern(arg, cmtTbl),\n                                  tl: {\n                                    hd: Res_doc.rparen,\n                                    tl: /* [] */0\n                                  }\n                                }\n                              });\n                        }\n                      } else {\n                        argsDoc = Res_doc.concat({\n                              hd: Res_doc.lparen,\n                              tl: {\n                                hd: Res_doc.softLine,\n                                tl: {\n                                  hd: printCommentsInside(cmtTbl, constructorArgs.ppat_loc),\n                                  tl: {\n                                    hd: Res_doc.rparen,\n                                    tl: /* [] */0\n                                  }\n                                }\n                              }\n                            });\n                      }\n                      if (exit$3 === 3) {\n                        argsDoc = Res_doc.concat({\n                              hd: Res_doc.lparen,\n                              tl: {\n                                hd: Res_doc.indent(Res_doc.concat({\n                                          hd: Res_doc.softLine,\n                                          tl: {\n                                            hd: Res_doc.join(Res_doc.concat({\n                                                      hd: Res_doc.comma,\n                                                      tl: {\n                                                        hd: Res_doc.line,\n                                                        tl: /* [] */0\n                                                      }\n                                                    }), List.map((function (pat) {\n                                                        return printPattern(pat, cmtTbl);\n                                                      }), patterns$2)),\n                                            tl: /* [] */0\n                                          }\n                                        })),\n                                tl: {\n                                  hd: Res_doc.trailingComma,\n                                  tl: {\n                                    hd: Res_doc.softLine,\n                                    tl: {\n                                      hd: Res_doc.rparen,\n                                      tl: /* [] */0\n                                    }\n                                  }\n                                }\n                              }\n                            });\n                      }\n                      break;\n                  case /* Ppat_construct */5 :\n                      var match$7 = patterns$1._0.txt;\n                      switch (match$7.TAG | 0) {\n                        case /* Lident */0 :\n                            if (match$7._0 === \"()\") {\n                              argsDoc = Res_doc.concat({\n                                    hd: Res_doc.lparen,\n                                    tl: {\n                                      hd: printCommentsInside(cmtTbl, constructorArgs.ppat_loc),\n                                      tl: {\n                                        hd: Res_doc.rparen,\n                                        tl: /* [] */0\n                                      }\n                                    }\n                                  });\n                            } else {\n                              exit$2 = 2;\n                            }\n                            break;\n                        case /* Ldot */1 :\n                        case /* Lapply */2 :\n                            exit$2 = 2;\n                            break;\n                        \n                      }\n                      break;\n                  default:\n                    exit$2 = 2;\n                }\n              }\n              if (exit$2 === 2) {\n                var argDoc = printPattern(constructorArgs, cmtTbl);\n                var shouldHug$1 = Res_parsetree_viewer.isHuggablePattern(constructorArgs);\n                argsDoc = Res_doc.concat({\n                      hd: Res_doc.lparen,\n                      tl: {\n                        hd: shouldHug$1 ? argDoc : Res_doc.concat({\n                                hd: Res_doc.indent(Res_doc.concat({\n                                          hd: Res_doc.softLine,\n                                          tl: {\n                                            hd: argDoc,\n                                            tl: /* [] */0\n                                          }\n                                        })),\n                                tl: {\n                                  hd: Res_doc.trailingComma,\n                                  tl: {\n                                    hd: Res_doc.softLine,\n                                    tl: /* [] */0\n                                  }\n                                }\n                              }),\n                        tl: {\n                          hd: Res_doc.rparen,\n                          tl: /* [] */0\n                        }\n                      }\n                    });\n              }\n              \n            } else {\n              argsDoc = Res_doc.nil;\n            }\n            patternWithoutAttributes = Res_doc.group(Res_doc.concat({\n                      hd: constrName$1,\n                      tl: {\n                        hd: argsDoc,\n                        tl: /* [] */0\n                      }\n                    }));\n          }\n          break;\n      case /* Ppat_variant */6 :\n          var variantArgs = $$var._1;\n          var label = $$var._0;\n          if (variantArgs !== undefined) {\n            var variantName = Res_doc.concat({\n                  hd: Res_doc.text(\"#\"),\n                  tl: {\n                    hd: printPolyVarIdent(label),\n                    tl: /* [] */0\n                  }\n                });\n            var argsDoc$1;\n            if (variantArgs !== undefined) {\n              var patterns$3 = variantArgs.ppat_desc;\n              var exit$4 = 0;\n              if (typeof patterns$3 === \"number\") {\n                exit$4 = 1;\n              } else {\n                switch (patterns$3.TAG | 0) {\n                  case /* Ppat_tuple */4 :\n                      var patterns$4 = patterns$3._0;\n                      var exit$5 = 0;\n                      if (patterns$4) {\n                        var arg$1 = patterns$4.hd;\n                        var tmp$2 = arg$1.ppat_desc;\n                        if (typeof tmp$2 === \"number\" || !(tmp$2.TAG === /* Ppat_tuple */4 && !patterns$4.tl)) {\n                          exit$5 = 2;\n                        } else {\n                          argsDoc$1 = Res_doc.concat({\n                                hd: Res_doc.lparen,\n                                tl: {\n                                  hd: printPattern(arg$1, cmtTbl),\n                                  tl: {\n                                    hd: Res_doc.rparen,\n                                    tl: /* [] */0\n                                  }\n                                }\n                              });\n                        }\n                      } else {\n                        argsDoc$1 = Res_doc.concat({\n                              hd: Res_doc.lparen,\n                              tl: {\n                                hd: Res_doc.softLine,\n                                tl: {\n                                  hd: printCommentsInside(cmtTbl, variantArgs.ppat_loc),\n                                  tl: {\n                                    hd: Res_doc.rparen,\n                                    tl: /* [] */0\n                                  }\n                                }\n                              }\n                            });\n                      }\n                      if (exit$5 === 2) {\n                        argsDoc$1 = Res_doc.concat({\n                              hd: Res_doc.lparen,\n                              tl: {\n                                hd: Res_doc.indent(Res_doc.concat({\n                                          hd: Res_doc.softLine,\n                                          tl: {\n                                            hd: Res_doc.join(Res_doc.concat({\n                                                      hd: Res_doc.comma,\n                                                      tl: {\n                                                        hd: Res_doc.line,\n                                                        tl: /* [] */0\n                                                      }\n                                                    }), List.map((function (pat) {\n                                                        return printPattern(pat, cmtTbl);\n                                                      }), patterns$4)),\n                                            tl: /* [] */0\n                                          }\n                                        })),\n                                tl: {\n                                  hd: Res_doc.trailingComma,\n                                  tl: {\n                                    hd: Res_doc.softLine,\n                                    tl: {\n                                      hd: Res_doc.rparen,\n                                      tl: /* [] */0\n                                    }\n                                  }\n                                }\n                              }\n                            });\n                      }\n                      break;\n                  case /* Ppat_construct */5 :\n                      var match$8 = patterns$3._0.txt;\n                      switch (match$8.TAG | 0) {\n                        case /* Lident */0 :\n                            if (match$8._0 === \"()\") {\n                              argsDoc$1 = Res_doc.text(\"()\");\n                            } else {\n                              exit$4 = 1;\n                            }\n                            break;\n                        case /* Ldot */1 :\n                        case /* Lapply */2 :\n                            exit$4 = 1;\n                            break;\n                        \n                      }\n                      break;\n                  default:\n                    exit$4 = 1;\n                }\n              }\n              if (exit$4 === 1) {\n                var argDoc$1 = printPattern(variantArgs, cmtTbl);\n                var shouldHug$2 = Res_parsetree_viewer.isHuggablePattern(variantArgs);\n                argsDoc$1 = Res_doc.concat({\n                      hd: Res_doc.lparen,\n                      tl: {\n                        hd: shouldHug$2 ? argDoc$1 : Res_doc.concat({\n                                hd: Res_doc.indent(Res_doc.concat({\n                                          hd: Res_doc.softLine,\n                                          tl: {\n                                            hd: argDoc$1,\n                                            tl: /* [] */0\n                                          }\n                                        })),\n                                tl: {\n                                  hd: Res_doc.trailingComma,\n                                  tl: {\n                                    hd: Res_doc.softLine,\n                                    tl: /* [] */0\n                                  }\n                                }\n                              }),\n                        tl: {\n                          hd: Res_doc.rparen,\n                          tl: /* [] */0\n                        }\n                      }\n                    });\n              }\n              \n            } else {\n              argsDoc$1 = Res_doc.nil;\n            }\n            patternWithoutAttributes = Res_doc.group(Res_doc.concat({\n                      hd: variantName,\n                      tl: {\n                        hd: argsDoc$1,\n                        tl: /* [] */0\n                      }\n                    }));\n          } else {\n            patternWithoutAttributes = Res_doc.concat({\n                  hd: Res_doc.text(\"#\"),\n                  tl: {\n                    hd: printPolyVarIdent(label),\n                    tl: /* [] */0\n                  }\n                });\n          }\n          break;\n      case /* Ppat_record */7 :\n          patternWithoutAttributes = Res_doc.group(Res_doc.concat({\n                    hd: Res_doc.lbrace,\n                    tl: {\n                      hd: Res_doc.indent(Res_doc.concat({\n                                hd: Res_doc.softLine,\n                                tl: {\n                                  hd: Res_doc.join(Res_doc.concat({\n                                            hd: Res_doc.text(\",\"),\n                                            tl: {\n                                              hd: Res_doc.line,\n                                              tl: /* [] */0\n                                            }\n                                          }), List.map((function (row) {\n                                              return printPatternRecordRow(row, cmtTbl);\n                                            }), $$var._0)),\n                                  tl: {\n                                    hd: $$var._1 ? Res_doc.concat({\n                                            hd: Res_doc.text(\",\"),\n                                            tl: {\n                                              hd: Res_doc.line,\n                                              tl: {\n                                                hd: Res_doc.text(\"_\"),\n                                                tl: /* [] */0\n                                              }\n                                            }\n                                          }) : Res_doc.nil,\n                                    tl: /* [] */0\n                                  }\n                                }\n                              })),\n                      tl: {\n                        hd: Res_doc.ifBreaks(Res_doc.text(\",\"), Res_doc.nil),\n                        tl: {\n                          hd: Res_doc.softLine,\n                          tl: {\n                            hd: Res_doc.rbrace,\n                            tl: /* [] */0\n                          }\n                        }\n                      }\n                    }\n                  }));\n          break;\n      case /* Ppat_array */8 :\n          var patterns$5 = $$var._0;\n          patternWithoutAttributes = patterns$5 ? Res_doc.group(Res_doc.concat({\n                      hd: Res_doc.text(\"[\"),\n                      tl: {\n                        hd: Res_doc.indent(Res_doc.concat({\n                                  hd: Res_doc.softLine,\n                                  tl: {\n                                    hd: Res_doc.join(Res_doc.concat({\n                                              hd: Res_doc.text(\",\"),\n                                              tl: {\n                                                hd: Res_doc.line,\n                                                tl: /* [] */0\n                                              }\n                                            }), List.map((function (pat) {\n                                                return printPattern(pat, cmtTbl);\n                                              }), patterns$5)),\n                                    tl: /* [] */0\n                                  }\n                                })),\n                        tl: {\n                          hd: Res_doc.trailingComma,\n                          tl: {\n                            hd: Res_doc.softLine,\n                            tl: {\n                              hd: Res_doc.text(\"]\"),\n                              tl: /* [] */0\n                            }\n                          }\n                        }\n                      }\n                    })) : Res_doc.concat({\n                  hd: Res_doc.lbracket,\n                  tl: {\n                    hd: printCommentsInside(cmtTbl, p.ppat_loc),\n                    tl: {\n                      hd: Res_doc.rbracket,\n                      tl: /* [] */0\n                    }\n                  }\n                });\n          break;\n      case /* Ppat_or */9 :\n          var orChain = Res_parsetree_viewer.collectOrPatternChain(p);\n          var docs = List.mapi((function (i, pat) {\n                  var patternDoc = printPattern(pat, cmtTbl);\n                  var match = pat.ppat_desc;\n                  var tmp;\n                  tmp = typeof match === \"number\" || match.TAG !== /* Ppat_or */9 ? patternDoc : addParens(patternDoc);\n                  return Res_doc.concat({\n                              hd: i === 0 ? Res_doc.nil : Res_doc.concat({\n                                      hd: Res_doc.line,\n                                      tl: {\n                                        hd: Res_doc.text(\"| \"),\n                                        tl: /* [] */0\n                                      }\n                                    }),\n                              tl: {\n                                hd: tmp,\n                                tl: /* [] */0\n                              }\n                            });\n                }), orChain);\n          var match$9 = List.rev(orChain);\n          var isSpreadOverMultipleLines = orChain && match$9 ? orChain.hd.ppat_loc.loc_start.pos_lnum < match$9.hd.ppat_loc.loc_end.pos_lnum : false;\n          patternWithoutAttributes = Res_doc.breakableGroup(isSpreadOverMultipleLines, Res_doc.concat(docs));\n          break;\n      case /* Ppat_constraint */10 :\n          var pattern = $$var._0;\n          var stringLoc = pattern.ppat_desc;\n          var exit$6 = 0;\n          if (typeof stringLoc === \"number\" || stringLoc.TAG !== /* Ppat_unpack */13) {\n            exit$6 = 1;\n          } else {\n            var match$10 = $$var._1;\n            var packageType = match$10.ptyp_desc;\n            if (typeof packageType === \"number\" || packageType.TAG !== /* Ptyp_package */9) {\n              exit$6 = 1;\n            } else {\n              var stringLoc$1 = stringLoc._0;\n              patternWithoutAttributes = Res_doc.concat({\n                    hd: Res_doc.text(\"module(\"),\n                    tl: {\n                      hd: printComments(Res_doc.text(stringLoc$1.txt), cmtTbl, stringLoc$1.loc),\n                      tl: {\n                        hd: Res_doc.text(\": \"),\n                        tl: {\n                          hd: printComments(printPackageType(false, packageType._0, cmtTbl), cmtTbl, match$10.ptyp_loc),\n                          tl: {\n                            hd: Res_doc.rparen,\n                            tl: /* [] */0\n                          }\n                        }\n                      }\n                    }\n                  });\n            }\n          }\n          if (exit$6 === 1) {\n            patternWithoutAttributes = Res_doc.concat({\n                  hd: printPattern(pattern, cmtTbl),\n                  tl: {\n                    hd: Res_doc.text(\": \"),\n                    tl: {\n                      hd: printTypExpr($$var._1, cmtTbl),\n                      tl: /* [] */0\n                    }\n                  }\n                });\n          }\n          break;\n      case /* Ppat_type */11 :\n          patternWithoutAttributes = Res_doc.concat({\n                hd: Res_doc.text(\"#...\"),\n                tl: {\n                  hd: printIdentPath($$var._0, cmtTbl),\n                  tl: /* [] */0\n                }\n              });\n          break;\n      case /* Ppat_lazy */12 :\n          var p$3 = $$var._0;\n          var match$11 = p$3.ppat_desc;\n          var needsParens$1;\n          if (typeof match$11 === \"number\") {\n            needsParens$1 = false;\n          } else {\n            switch (match$11.TAG | 0) {\n              case /* Ppat_alias */1 :\n              case /* Ppat_or */9 :\n                  needsParens$1 = true;\n                  break;\n              default:\n                needsParens$1 = false;\n            }\n          }\n          var p$4 = printPattern(p$3, cmtTbl);\n          var pat = needsParens$1 ? Res_doc.concat({\n                  hd: Res_doc.text(\"(\"),\n                  tl: {\n                    hd: p$4,\n                    tl: {\n                      hd: Res_doc.text(\")\"),\n                      tl: /* [] */0\n                    }\n                  }\n                }) : p$4;\n          patternWithoutAttributes = Res_doc.concat({\n                hd: Res_doc.text(\"lazy \"),\n                tl: {\n                  hd: pat,\n                  tl: /* [] */0\n                }\n              });\n          break;\n      case /* Ppat_unpack */13 :\n          var stringLoc$2 = $$var._0;\n          patternWithoutAttributes = Res_doc.concat({\n                hd: Res_doc.text(\"module(\"),\n                tl: {\n                  hd: printComments(Res_doc.text(stringLoc$2.txt), cmtTbl, stringLoc$2.loc),\n                  tl: {\n                    hd: Res_doc.rparen,\n                    tl: /* [] */0\n                  }\n                }\n              });\n          break;\n      case /* Ppat_exception */14 :\n          var p$5 = $$var._0;\n          var match$12 = p$5.ppat_desc;\n          var needsParens$2;\n          if (typeof match$12 === \"number\") {\n            needsParens$2 = false;\n          } else {\n            switch (match$12.TAG | 0) {\n              case /* Ppat_alias */1 :\n              case /* Ppat_or */9 :\n                  needsParens$2 = true;\n                  break;\n              default:\n                needsParens$2 = false;\n            }\n          }\n          var p$6 = printPattern(p$5, cmtTbl);\n          var pat$1 = needsParens$2 ? Res_doc.concat({\n                  hd: Res_doc.text(\"(\"),\n                  tl: {\n                    hd: p$6,\n                    tl: {\n                      hd: Res_doc.text(\")\"),\n                      tl: /* [] */0\n                    }\n                  }\n                }) : p$6;\n          patternWithoutAttributes = Res_doc.group(Res_doc.concat({\n                    hd: Res_doc.text(\"exception\"),\n                    tl: {\n                      hd: Res_doc.line,\n                      tl: {\n                        hd: pat$1,\n                        tl: /* [] */0\n                      }\n                    }\n                  }));\n          break;\n      case /* Ppat_extension */15 :\n          patternWithoutAttributes = printExtension(false, $$var._0, cmtTbl);\n          break;\n      case /* Ppat_open */16 :\n          patternWithoutAttributes = Res_doc.nil;\n          break;\n      \n    }\n  }\n  var attrs = p.ppat_attributes;\n  var doc$1 = attrs ? Res_doc.group(Res_doc.concat({\n              hd: printAttributes(undefined, undefined, attrs, cmtTbl),\n              tl: {\n                hd: patternWithoutAttributes,\n                tl: /* [] */0\n              }\n            })) : patternWithoutAttributes;\n  return printComments(doc$1, cmtTbl, p.ppat_loc);\n}\n\nfunction printPatternRecordRow(row, cmtTbl) {\n  var longident = row[0];\n  var ident = longident.txt;\n  switch (ident.TAG | 0) {\n    case /* Lident */0 :\n        var match = row[1].ppat_desc;\n        if (typeof match !== \"number\" && match.TAG === /* Ppat_var */0 && ident._0 === match._0.txt) {\n          return printLidentPath(longident, cmtTbl);\n        }\n        break;\n    case /* Ldot */1 :\n    case /* Lapply */2 :\n        break;\n    \n  }\n  var pattern = row[1];\n  var init = longident.loc;\n  var locForComments_loc_start = init.loc_start;\n  var locForComments_loc_end = pattern.ppat_loc.loc_end;\n  var locForComments_loc_ghost = init.loc_ghost;\n  var locForComments = {\n    loc_start: locForComments_loc_start,\n    loc_end: locForComments_loc_end,\n    loc_ghost: locForComments_loc_ghost\n  };\n  var doc = printPattern(pattern, cmtTbl);\n  var rhsDoc = Res_parens.patternRecordRowRhs(pattern) ? addParens(doc) : doc;\n  var doc$1 = Res_doc.group(Res_doc.concat({\n            hd: printLidentPath(longident, cmtTbl),\n            tl: {\n              hd: Res_doc.text(\":\"),\n              tl: {\n                hd: Res_parsetree_viewer.isHuggablePattern(pattern) ? Res_doc.concat({\n                        hd: Res_doc.space,\n                        tl: {\n                          hd: rhsDoc,\n                          tl: /* [] */0\n                        }\n                      }) : Res_doc.indent(Res_doc.concat({\n                            hd: Res_doc.line,\n                            tl: {\n                              hd: rhsDoc,\n                              tl: /* [] */0\n                            }\n                          })),\n                tl: /* [] */0\n              }\n            }\n          }));\n  return printComments(doc$1, cmtTbl, locForComments);\n}\n\nfunction printExpressionWithComments(expr, cmtTbl) {\n  var doc = printExpression(expr, cmtTbl);\n  return printComments(doc, cmtTbl, expr.pexp_loc);\n}\n\nfunction printIfChain(pexp_attributes, ifs, elseExpr, cmtTbl) {\n  var ifDocs = Res_doc.join(Res_doc.space, List.mapi((function (i, param) {\n              var thenExpr = param[1];\n              var ifExpr = param[0];\n              var ifTxt = i > 0 ? Res_doc.text(\"else if \") : Res_doc.text(\"if \");\n              if (ifExpr.TAG === /* If */0) {\n                var ifExpr$1 = ifExpr._0;\n                var condition;\n                if (Res_parsetree_viewer.isBlockExpr(ifExpr$1)) {\n                  condition = printExpressionBlock(true, ifExpr$1, cmtTbl);\n                } else {\n                  var doc = printExpressionWithComments(ifExpr$1, cmtTbl);\n                  var braces = Res_parens.expr(ifExpr$1);\n                  condition = typeof braces === \"number\" ? (\n                      braces !== 0 ? Res_doc.ifBreaks(addParens(doc), doc) : addParens(doc)\n                    ) : printBraces(doc, ifExpr$1, braces._0);\n                }\n                var match = Res_parsetree_viewer.processBracesAttr(thenExpr);\n                var thenExpr$1 = match[0] !== undefined ? match[1] : thenExpr;\n                return Res_doc.concat({\n                            hd: ifTxt,\n                            tl: {\n                              hd: Res_doc.group(condition),\n                              tl: {\n                                hd: Res_doc.space,\n                                tl: {\n                                  hd: printExpressionBlock(true, thenExpr$1, cmtTbl),\n                                  tl: /* [] */0\n                                }\n                              }\n                            }\n                          });\n              }\n              var conditionExpr = ifExpr._1;\n              var doc$1 = printExpressionWithComments(conditionExpr, cmtTbl);\n              var braces$1 = Res_parens.expr(conditionExpr);\n              var conditionDoc = typeof braces$1 === \"number\" ? (\n                  braces$1 !== 0 ? doc$1 : addParens(doc$1)\n                ) : printBraces(doc$1, conditionExpr, braces$1._0);\n              return Res_doc.concat({\n                          hd: ifTxt,\n                          tl: {\n                            hd: Res_doc.text(\"let \"),\n                            tl: {\n                              hd: printPattern(ifExpr._0, cmtTbl),\n                              tl: {\n                                hd: Res_doc.text(\" = \"),\n                                tl: {\n                                  hd: conditionDoc,\n                                  tl: {\n                                    hd: Res_doc.space,\n                                    tl: {\n                                      hd: printExpressionBlock(true, thenExpr, cmtTbl),\n                                      tl: /* [] */0\n                                    }\n                                  }\n                                }\n                              }\n                            }\n                          }\n                        });\n            }), ifs));\n  var elseDoc = elseExpr !== undefined ? Res_doc.concat({\n          hd: Res_doc.text(\" else \"),\n          tl: {\n            hd: printExpressionBlock(true, elseExpr, cmtTbl),\n            tl: /* [] */0\n          }\n        }) : Res_doc.nil;\n  var attrs = Res_parsetree_viewer.filterFragileMatchAttributes(pexp_attributes);\n  return Res_doc.concat({\n              hd: printAttributes(undefined, undefined, attrs, cmtTbl),\n              tl: {\n                hd: ifDocs,\n                tl: {\n                  hd: elseDoc,\n                  tl: /* [] */0\n                }\n              }\n            });\n}\n\nfunction printExpression(e, cmtTbl) {\n  var c = e.pexp_desc;\n  var printedExpression;\n  var exit = 0;\n  if (typeof c === \"number\") {\n    printedExpression = Res_doc.dot;\n  } else {\n    switch (c.TAG | 0) {\n      case /* Pexp_ident */0 :\n          printedExpression = printLidentPath(c._0, cmtTbl);\n          break;\n      case /* Pexp_constant */1 :\n          printedExpression = printConstant(Res_parsetree_viewer.isTemplateLiteral(e), c._0);\n          break;\n      case /* Pexp_function */3 :\n          printedExpression = Res_doc.concat({\n                hd: Res_doc.text(\"x => switch x \"),\n                tl: {\n                  hd: printCases(c._0, cmtTbl),\n                  tl: /* [] */0\n                }\n              });\n          break;\n      case /* Pexp_fun */4 :\n          if (typeof c._0 === \"number\" && c._1 === undefined) {\n            var match = c._2.ppat_desc;\n            if (typeof match === \"number\" || !(match.TAG === /* Ppat_var */0 && match._0.txt === \"__x\")) {\n              exit = 1;\n            } else {\n              var tmp = c._3.pexp_desc;\n              if (typeof tmp === \"number\" || tmp.TAG !== /* Pexp_apply */5) {\n                exit = 1;\n              } else {\n                printedExpression = printExpressionWithComments(Res_parsetree_viewer.rewriteUnderscoreApply(e), cmtTbl);\n              }\n            }\n          } else {\n            exit = 1;\n          }\n          break;\n      case /* Pexp_apply */5 :\n          printedExpression = Res_parsetree_viewer.isUnaryExpression(e) ? printUnaryExpression(e, cmtTbl) : (\n              Res_parsetree_viewer.isTemplateLiteral(e) ? printTemplateLiteral(e, cmtTbl) : (\n                  Res_parsetree_viewer.isBinaryExpression(e) ? printBinaryExpression(e, cmtTbl) : printPexpApply(e, cmtTbl)\n                )\n            );\n          break;\n      case /* Pexp_match */6 :\n          var cases = c._1;\n          var expr = c._0;\n          var exit$1 = 0;\n          if (cases) {\n            var match$1 = cases.tl;\n            if (match$1 && !(match$1.tl || !Res_parsetree_viewer.isIfLetExpr(e))) {\n              var match$2 = Res_parsetree_viewer.collectIfExpressions(e);\n              printedExpression = printIfChain(e.pexp_attributes, match$2[0], match$2[1], cmtTbl);\n            } else {\n              exit$1 = 2;\n            }\n          } else {\n            exit$1 = 2;\n          }\n          if (exit$1 === 2) {\n            var doc = printExpressionWithComments(expr, cmtTbl);\n            var braces = Res_parens.expr(expr);\n            var exprDoc = typeof braces === \"number\" ? (\n                braces !== 0 ? doc : addParens(doc)\n              ) : printBraces(doc, expr, braces._0);\n            printedExpression = Res_doc.concat({\n                  hd: Res_doc.text(\"switch \"),\n                  tl: {\n                    hd: exprDoc,\n                    tl: {\n                      hd: Res_doc.space,\n                      tl: {\n                        hd: printCases(cases, cmtTbl),\n                        tl: /* [] */0\n                      }\n                    }\n                  }\n                });\n          }\n          break;\n      case /* Pexp_try */7 :\n          var expr$1 = c._0;\n          var doc$1 = printExpressionWithComments(expr$1, cmtTbl);\n          var braces$1 = Res_parens.expr(expr$1);\n          var exprDoc$1 = typeof braces$1 === \"number\" ? (\n              braces$1 !== 0 ? doc$1 : addParens(doc$1)\n            ) : printBraces(doc$1, expr$1, braces$1._0);\n          printedExpression = Res_doc.concat({\n                hd: Res_doc.text(\"try \"),\n                tl: {\n                  hd: exprDoc$1,\n                  tl: {\n                    hd: Res_doc.text(\" catch \"),\n                    tl: {\n                      hd: printCases(c._1, cmtTbl),\n                      tl: /* [] */0\n                    }\n                  }\n                }\n              });\n          break;\n      case /* Pexp_tuple */8 :\n          printedExpression = Res_doc.group(Res_doc.concat({\n                    hd: Res_doc.lparen,\n                    tl: {\n                      hd: Res_doc.indent(Res_doc.concat({\n                                hd: Res_doc.softLine,\n                                tl: {\n                                  hd: Res_doc.join(Res_doc.concat({\n                                            hd: Res_doc.text(\",\"),\n                                            tl: {\n                                              hd: Res_doc.line,\n                                              tl: /* [] */0\n                                            }\n                                          }), List.map((function (expr) {\n                                              var doc = printExpressionWithComments(expr, cmtTbl);\n                                              var braces = Res_parens.expr(expr);\n                                              if (typeof braces === \"number\") {\n                                                if (braces !== 0) {\n                                                  return doc;\n                                                } else {\n                                                  return addParens(doc);\n                                                }\n                                              } else {\n                                                return printBraces(doc, expr, braces._0);\n                                              }\n                                            }), c._0)),\n                                  tl: /* [] */0\n                                }\n                              })),\n                      tl: {\n                        hd: Res_doc.ifBreaks(Res_doc.text(\",\"), Res_doc.nil),\n                        tl: {\n                          hd: Res_doc.softLine,\n                          tl: {\n                            hd: Res_doc.rparen,\n                            tl: /* [] */0\n                          }\n                        }\n                      }\n                    }\n                  }));\n          break;\n      case /* Pexp_construct */9 :\n          var longidentLoc = c._0;\n          var match$3 = longidentLoc.txt;\n          var exit$2 = 0;\n          if (Res_parsetree_viewer.hasJsxAttribute(e.pexp_attributes)) {\n            printedExpression = printJsxFragment(e, cmtTbl);\n          } else {\n            switch (match$3.TAG | 0) {\n              case /* Lident */0 :\n                  switch (match$3._0) {\n                    case \"()\" :\n                        printedExpression = Res_doc.text(\"()\");\n                        break;\n                    case \"::\" :\n                        var match$4 = Res_parsetree_viewer.collectListExpressions(e);\n                        var spread = match$4[1];\n                        var spreadDoc;\n                        if (spread !== undefined) {\n                          var doc$2 = printExpressionWithComments(spread, cmtTbl);\n                          var braces$2 = Res_parens.expr(spread);\n                          spreadDoc = Res_doc.concat({\n                                hd: Res_doc.text(\",\"),\n                                tl: {\n                                  hd: Res_doc.line,\n                                  tl: {\n                                    hd: Res_doc.dotdotdot,\n                                    tl: {\n                                      hd: typeof braces$2 === \"number\" ? (\n                                          braces$2 !== 0 ? doc$2 : addParens(doc$2)\n                                        ) : printBraces(doc$2, spread, braces$2._0),\n                                      tl: /* [] */0\n                                    }\n                                  }\n                                }\n                              });\n                        } else {\n                          spreadDoc = Res_doc.nil;\n                        }\n                        printedExpression = Res_doc.group(Res_doc.concat({\n                                  hd: Res_doc.text(\"list{\"),\n                                  tl: {\n                                    hd: Res_doc.indent(Res_doc.concat({\n                                              hd: Res_doc.softLine,\n                                              tl: {\n                                                hd: Res_doc.join(Res_doc.concat({\n                                                          hd: Res_doc.text(\",\"),\n                                                          tl: {\n                                                            hd: Res_doc.line,\n                                                            tl: /* [] */0\n                                                          }\n                                                        }), List.map((function (expr) {\n                                                            var doc = printExpressionWithComments(expr, cmtTbl);\n                                                            var braces = Res_parens.expr(expr);\n                                                            if (typeof braces === \"number\") {\n                                                              if (braces !== 0) {\n                                                                return doc;\n                                                              } else {\n                                                                return addParens(doc);\n                                                              }\n                                                            } else {\n                                                              return printBraces(doc, expr, braces._0);\n                                                            }\n                                                          }), match$4[0])),\n                                                tl: {\n                                                  hd: spreadDoc,\n                                                  tl: /* [] */0\n                                                }\n                                              }\n                                            })),\n                                    tl: {\n                                      hd: Res_doc.trailingComma,\n                                      tl: {\n                                        hd: Res_doc.softLine,\n                                        tl: {\n                                          hd: Res_doc.rbrace,\n                                          tl: /* [] */0\n                                        }\n                                      }\n                                    }\n                                  }\n                                }));\n                        break;\n                    case \"[]\" :\n                        printedExpression = Res_doc.concat({\n                              hd: Res_doc.text(\"list{\"),\n                              tl: {\n                                hd: printCommentsInside(cmtTbl, e.pexp_loc),\n                                tl: {\n                                  hd: Res_doc.rbrace,\n                                  tl: /* [] */0\n                                }\n                              }\n                            });\n                        break;\n                    default:\n                      exit$2 = 2;\n                  }\n                  break;\n              case /* Ldot */1 :\n              case /* Lapply */2 :\n                  exit$2 = 2;\n                  break;\n              \n            }\n          }\n          if (exit$2 === 2) {\n            var args = c._1;\n            var constr = printLongidentLocation(longidentLoc, cmtTbl);\n            var args$1;\n            if (args !== undefined) {\n              var args$2 = args.pexp_desc;\n              var exit$3 = 0;\n              if (typeof args$2 === \"number\") {\n                exit$3 = 3;\n              } else {\n                switch (args$2.TAG | 0) {\n                  case /* Pexp_tuple */8 :\n                      var args$3 = args$2._0;\n                      var exit$4 = 0;\n                      if (args$3) {\n                        var arg = args$3.hd;\n                        var tmp$1 = arg.pexp_desc;\n                        if (typeof tmp$1 === \"number\" || !(tmp$1.TAG === /* Pexp_tuple */8 && !args$3.tl)) {\n                          exit$4 = 4;\n                        } else {\n                          var doc$3 = printExpressionWithComments(arg, cmtTbl);\n                          var braces$3 = Res_parens.expr(arg);\n                          args$1 = Res_doc.concat({\n                                hd: Res_doc.lparen,\n                                tl: {\n                                  hd: typeof braces$3 === \"number\" ? (\n                                      braces$3 !== 0 ? doc$3 : addParens(doc$3)\n                                    ) : printBraces(doc$3, arg, braces$3._0),\n                                  tl: {\n                                    hd: Res_doc.rparen,\n                                    tl: /* [] */0\n                                  }\n                                }\n                              });\n                        }\n                      } else {\n                        exit$4 = 4;\n                      }\n                      if (exit$4 === 4) {\n                        args$1 = Res_doc.concat({\n                              hd: Res_doc.lparen,\n                              tl: {\n                                hd: Res_doc.indent(Res_doc.concat({\n                                          hd: Res_doc.softLine,\n                                          tl: {\n                                            hd: Res_doc.join(Res_doc.concat({\n                                                      hd: Res_doc.comma,\n                                                      tl: {\n                                                        hd: Res_doc.line,\n                                                        tl: /* [] */0\n                                                      }\n                                                    }), List.map((function (expr) {\n                                                        var doc = printExpressionWithComments(expr, cmtTbl);\n                                                        var braces = Res_parens.expr(expr);\n                                                        if (typeof braces === \"number\") {\n                                                          if (braces !== 0) {\n                                                            return doc;\n                                                          } else {\n                                                            return addParens(doc);\n                                                          }\n                                                        } else {\n                                                          return printBraces(doc, expr, braces._0);\n                                                        }\n                                                      }), args$3)),\n                                            tl: /* [] */0\n                                          }\n                                        })),\n                                tl: {\n                                  hd: Res_doc.trailingComma,\n                                  tl: {\n                                    hd: Res_doc.softLine,\n                                    tl: {\n                                      hd: Res_doc.rparen,\n                                      tl: /* [] */0\n                                    }\n                                  }\n                                }\n                              }\n                            });\n                      }\n                      break;\n                  case /* Pexp_construct */9 :\n                      var match$5 = args$2._0.txt;\n                      switch (match$5.TAG | 0) {\n                        case /* Lident */0 :\n                            if (match$5._0 === \"()\") {\n                              args$1 = Res_doc.text(\"()\");\n                            } else {\n                              exit$3 = 3;\n                            }\n                            break;\n                        case /* Ldot */1 :\n                        case /* Lapply */2 :\n                            exit$3 = 3;\n                            break;\n                        \n                      }\n                      break;\n                  default:\n                    exit$3 = 3;\n                }\n              }\n              if (exit$3 === 3) {\n                var doc$4 = printExpressionWithComments(args, cmtTbl);\n                var braces$4 = Res_parens.expr(args);\n                var argDoc = typeof braces$4 === \"number\" ? (\n                    braces$4 !== 0 ? doc$4 : addParens(doc$4)\n                  ) : printBraces(doc$4, args, braces$4._0);\n                var shouldHug = Res_parsetree_viewer.isHuggableExpression(args);\n                args$1 = Res_doc.concat({\n                      hd: Res_doc.lparen,\n                      tl: {\n                        hd: shouldHug ? argDoc : Res_doc.concat({\n                                hd: Res_doc.indent(Res_doc.concat({\n                                          hd: Res_doc.softLine,\n                                          tl: {\n                                            hd: argDoc,\n                                            tl: /* [] */0\n                                          }\n                                        })),\n                                tl: {\n                                  hd: Res_doc.trailingComma,\n                                  tl: {\n                                    hd: Res_doc.softLine,\n                                    tl: /* [] */0\n                                  }\n                                }\n                              }),\n                        tl: {\n                          hd: Res_doc.rparen,\n                          tl: /* [] */0\n                        }\n                      }\n                    });\n              }\n              \n            } else {\n              args$1 = Res_doc.nil;\n            }\n            printedExpression = Res_doc.group(Res_doc.concat({\n                      hd: constr,\n                      tl: {\n                        hd: args$1,\n                        tl: /* [] */0\n                      }\n                    }));\n          }\n          break;\n      case /* Pexp_variant */10 :\n          var args$4 = c._1;\n          var variantName = Res_doc.concat({\n                hd: Res_doc.text(\"#\"),\n                tl: {\n                  hd: printPolyVarIdent(c._0),\n                  tl: /* [] */0\n                }\n              });\n          var args$5;\n          if (args$4 !== undefined) {\n            var args$6 = args$4.pexp_desc;\n            var exit$5 = 0;\n            if (typeof args$6 === \"number\") {\n              exit$5 = 2;\n            } else {\n              switch (args$6.TAG | 0) {\n                case /* Pexp_tuple */8 :\n                    var args$7 = args$6._0;\n                    var exit$6 = 0;\n                    if (args$7) {\n                      var arg$1 = args$7.hd;\n                      var tmp$2 = arg$1.pexp_desc;\n                      if (typeof tmp$2 === \"number\" || !(tmp$2.TAG === /* Pexp_tuple */8 && !args$7.tl)) {\n                        exit$6 = 3;\n                      } else {\n                        var doc$5 = printExpressionWithComments(arg$1, cmtTbl);\n                        var braces$5 = Res_parens.expr(arg$1);\n                        args$5 = Res_doc.concat({\n                              hd: Res_doc.lparen,\n                              tl: {\n                                hd: typeof braces$5 === \"number\" ? (\n                                    braces$5 !== 0 ? doc$5 : addParens(doc$5)\n                                  ) : printBraces(doc$5, arg$1, braces$5._0),\n                                tl: {\n                                  hd: Res_doc.rparen,\n                                  tl: /* [] */0\n                                }\n                              }\n                            });\n                      }\n                    } else {\n                      exit$6 = 3;\n                    }\n                    if (exit$6 === 3) {\n                      args$5 = Res_doc.concat({\n                            hd: Res_doc.lparen,\n                            tl: {\n                              hd: Res_doc.indent(Res_doc.concat({\n                                        hd: Res_doc.softLine,\n                                        tl: {\n                                          hd: Res_doc.join(Res_doc.concat({\n                                                    hd: Res_doc.comma,\n                                                    tl: {\n                                                      hd: Res_doc.line,\n                                                      tl: /* [] */0\n                                                    }\n                                                  }), List.map((function (expr) {\n                                                      var doc = printExpressionWithComments(expr, cmtTbl);\n                                                      var braces = Res_parens.expr(expr);\n                                                      if (typeof braces === \"number\") {\n                                                        if (braces !== 0) {\n                                                          return doc;\n                                                        } else {\n                                                          return addParens(doc);\n                                                        }\n                                                      } else {\n                                                        return printBraces(doc, expr, braces._0);\n                                                      }\n                                                    }), args$7)),\n                                          tl: /* [] */0\n                                        }\n                                      })),\n                              tl: {\n                                hd: Res_doc.trailingComma,\n                                tl: {\n                                  hd: Res_doc.softLine,\n                                  tl: {\n                                    hd: Res_doc.rparen,\n                                    tl: /* [] */0\n                                  }\n                                }\n                              }\n                            }\n                          });\n                    }\n                    break;\n                case /* Pexp_construct */9 :\n                    var match$6 = args$6._0.txt;\n                    switch (match$6.TAG | 0) {\n                      case /* Lident */0 :\n                          if (match$6._0 === \"()\") {\n                            args$5 = Res_doc.text(\"()\");\n                          } else {\n                            exit$5 = 2;\n                          }\n                          break;\n                      case /* Ldot */1 :\n                      case /* Lapply */2 :\n                          exit$5 = 2;\n                          break;\n                      \n                    }\n                    break;\n                default:\n                  exit$5 = 2;\n              }\n            }\n            if (exit$5 === 2) {\n              var doc$6 = printExpressionWithComments(args$4, cmtTbl);\n              var braces$6 = Res_parens.expr(args$4);\n              var argDoc$1 = typeof braces$6 === \"number\" ? (\n                  braces$6 !== 0 ? doc$6 : addParens(doc$6)\n                ) : printBraces(doc$6, args$4, braces$6._0);\n              var shouldHug$1 = Res_parsetree_viewer.isHuggableExpression(args$4);\n              args$5 = Res_doc.concat({\n                    hd: Res_doc.lparen,\n                    tl: {\n                      hd: shouldHug$1 ? argDoc$1 : Res_doc.concat({\n                              hd: Res_doc.indent(Res_doc.concat({\n                                        hd: Res_doc.softLine,\n                                        tl: {\n                                          hd: argDoc$1,\n                                          tl: /* [] */0\n                                        }\n                                      })),\n                              tl: {\n                                hd: Res_doc.trailingComma,\n                                tl: {\n                                  hd: Res_doc.softLine,\n                                  tl: /* [] */0\n                                }\n                              }\n                            }),\n                      tl: {\n                        hd: Res_doc.rparen,\n                        tl: /* [] */0\n                      }\n                    }\n                  });\n            }\n            \n          } else {\n            args$5 = Res_doc.nil;\n          }\n          printedExpression = Res_doc.group(Res_doc.concat({\n                    hd: variantName,\n                    tl: {\n                      hd: args$5,\n                      tl: /* [] */0\n                    }\n                  }));\n          break;\n      case /* Pexp_record */11 :\n          var spreadExpr = c._1;\n          var rows = c._0;\n          var spread$1;\n          if (spreadExpr !== undefined) {\n            var doc$7 = printExpressionWithComments(spreadExpr, cmtTbl);\n            var braces$7 = Res_parens.expr(spreadExpr);\n            spread$1 = Res_doc.concat({\n                  hd: Res_doc.dotdotdot,\n                  tl: {\n                    hd: typeof braces$7 === \"number\" ? (\n                        braces$7 !== 0 ? doc$7 : addParens(doc$7)\n                      ) : printBraces(doc$7, spreadExpr, braces$7._0),\n                    tl: {\n                      hd: Res_doc.comma,\n                      tl: {\n                        hd: Res_doc.line,\n                        tl: /* [] */0\n                      }\n                    }\n                  }\n                });\n          } else {\n            spread$1 = Res_doc.nil;\n          }\n          var forceBreak = e.pexp_loc.loc_start.pos_lnum < e.pexp_loc.loc_end.pos_lnum;\n          var punningAllowed = spreadExpr !== undefined || !(rows && !rows.tl) ? true : false;\n          printedExpression = Res_doc.breakableGroup(forceBreak, Res_doc.concat({\n                    hd: Res_doc.lbrace,\n                    tl: {\n                      hd: Res_doc.indent(Res_doc.concat({\n                                hd: Res_doc.softLine,\n                                tl: {\n                                  hd: spread$1,\n                                  tl: {\n                                    hd: Res_doc.join(Res_doc.concat({\n                                              hd: Res_doc.text(\",\"),\n                                              tl: {\n                                                hd: Res_doc.line,\n                                                tl: /* [] */0\n                                              }\n                                            }), List.map((function (row) {\n                                                return printRecordRow(row, cmtTbl, punningAllowed);\n                                              }), rows)),\n                                    tl: /* [] */0\n                                  }\n                                }\n                              })),\n                      tl: {\n                        hd: Res_doc.trailingComma,\n                        tl: {\n                          hd: Res_doc.softLine,\n                          tl: {\n                            hd: Res_doc.rbrace,\n                            tl: /* [] */0\n                          }\n                        }\n                      }\n                    }\n                  }));\n          break;\n      case /* Pexp_field */12 :\n          var expr$2 = c._0;\n          var doc$8 = printExpressionWithComments(expr$2, cmtTbl);\n          var braces$8 = Res_parens.fieldExpr(expr$2);\n          var lhs = typeof braces$8 === \"number\" ? (\n              braces$8 !== 0 ? doc$8 : addParens(doc$8)\n            ) : printBraces(doc$8, expr$2, braces$8._0);\n          printedExpression = Res_doc.concat({\n                hd: lhs,\n                tl: {\n                  hd: Res_doc.dot,\n                  tl: {\n                    hd: printLidentPath(c._1, cmtTbl),\n                    tl: /* [] */0\n                  }\n                }\n              });\n          break;\n      case /* Pexp_setfield */13 :\n          printedExpression = printSetFieldExpr(e.pexp_attributes, c._0, c._1, c._2, e.pexp_loc, cmtTbl);\n          break;\n      case /* Pexp_array */14 :\n          var exprs = c._0;\n          printedExpression = exprs ? Res_doc.group(Res_doc.concat({\n                      hd: Res_doc.lbracket,\n                      tl: {\n                        hd: Res_doc.indent(Res_doc.concat({\n                                  hd: Res_doc.softLine,\n                                  tl: {\n                                    hd: Res_doc.join(Res_doc.concat({\n                                              hd: Res_doc.text(\",\"),\n                                              tl: {\n                                                hd: Res_doc.line,\n                                                tl: /* [] */0\n                                              }\n                                            }), List.map((function (expr) {\n                                                var doc = printExpressionWithComments(expr, cmtTbl);\n                                                var braces = Res_parens.expr(expr);\n                                                if (typeof braces === \"number\") {\n                                                  if (braces !== 0) {\n                                                    return doc;\n                                                  } else {\n                                                    return addParens(doc);\n                                                  }\n                                                } else {\n                                                  return printBraces(doc, expr, braces._0);\n                                                }\n                                              }), exprs)),\n                                    tl: /* [] */0\n                                  }\n                                })),\n                        tl: {\n                          hd: Res_doc.trailingComma,\n                          tl: {\n                            hd: Res_doc.softLine,\n                            tl: {\n                              hd: Res_doc.rbracket,\n                              tl: /* [] */0\n                            }\n                          }\n                        }\n                      }\n                    })) : Res_doc.concat({\n                  hd: Res_doc.lbracket,\n                  tl: {\n                    hd: printCommentsInside(cmtTbl, e.pexp_loc),\n                    tl: {\n                      hd: Res_doc.rbracket,\n                      tl: /* [] */0\n                    }\n                  }\n                });\n          break;\n      case /* Pexp_ifthenelse */15 :\n          if (Res_parsetree_viewer.isTernaryExpr(e)) {\n            var match$7 = Res_parsetree_viewer.collectTernaryParts(e);\n            var parts = match$7[0];\n            var ternaryDoc;\n            if (parts) {\n              var match$8 = parts.hd;\n              ternaryDoc = Res_doc.group(Res_doc.concat({\n                        hd: printTernaryOperand(match$8[0], cmtTbl),\n                        tl: {\n                          hd: Res_doc.indent(Res_doc.concat({\n                                    hd: Res_doc.line,\n                                    tl: {\n                                      hd: Res_doc.indent(Res_doc.concat({\n                                                hd: Res_doc.text(\"? \"),\n                                                tl: {\n                                                  hd: printTernaryOperand(match$8[1], cmtTbl),\n                                                  tl: /* [] */0\n                                                }\n                                              })),\n                                      tl: {\n                                        hd: Res_doc.concat(List.map((function (param) {\n                                                    return Res_doc.concat({\n                                                                hd: Res_doc.line,\n                                                                tl: {\n                                                                  hd: Res_doc.text(\": \"),\n                                                                  tl: {\n                                                                    hd: printTernaryOperand(param[0], cmtTbl),\n                                                                    tl: {\n                                                                      hd: Res_doc.line,\n                                                                      tl: {\n                                                                        hd: Res_doc.text(\"? \"),\n                                                                        tl: {\n                                                                          hd: printTernaryOperand(param[1], cmtTbl),\n                                                                          tl: /* [] */0\n                                                                        }\n                                                                      }\n                                                                    }\n                                                                  }\n                                                                }\n                                                              });\n                                                  }), parts.tl)),\n                                        tl: {\n                                          hd: Res_doc.line,\n                                          tl: {\n                                            hd: Res_doc.text(\": \"),\n                                            tl: {\n                                              hd: Res_doc.indent(printTernaryOperand(match$7[1], cmtTbl)),\n                                              tl: /* [] */0\n                                            }\n                                          }\n                                        }\n                                      }\n                                    }\n                                  })),\n                          tl: /* [] */0\n                        }\n                      }));\n            } else {\n              ternaryDoc = Res_doc.nil;\n            }\n            var attrs = Res_parsetree_viewer.filterTernaryAttributes(e.pexp_attributes);\n            var match$9 = Res_parsetree_viewer.filterParsingAttrs(attrs);\n            var needsParens = match$9 ? true : false;\n            printedExpression = Res_doc.concat({\n                  hd: printAttributes(undefined, undefined, attrs, cmtTbl),\n                  tl: {\n                    hd: needsParens ? addParens(ternaryDoc) : ternaryDoc,\n                    tl: /* [] */0\n                  }\n                });\n          } else {\n            var match$10 = Res_parsetree_viewer.collectIfExpressions(e);\n            printedExpression = printIfChain(e.pexp_attributes, match$10[0], match$10[1], cmtTbl);\n          }\n          break;\n      case /* Pexp_while */17 :\n          var expr1 = c._0;\n          var doc$9 = printExpressionWithComments(expr1, cmtTbl);\n          var braces$9 = Res_parens.expr(expr1);\n          var condition = typeof braces$9 === \"number\" ? (\n              braces$9 !== 0 ? doc$9 : addParens(doc$9)\n            ) : printBraces(doc$9, expr1, braces$9._0);\n          printedExpression = Res_doc.breakableGroup(true, Res_doc.concat({\n                    hd: Res_doc.text(\"while \"),\n                    tl: {\n                      hd: Res_parsetree_viewer.isBlockExpr(expr1) ? condition : Res_doc.group(Res_doc.ifBreaks(addParens(condition), condition)),\n                      tl: {\n                        hd: Res_doc.space,\n                        tl: {\n                          hd: printExpressionBlock(true, c._1, cmtTbl),\n                          tl: /* [] */0\n                        }\n                      }\n                    }\n                  }));\n          break;\n      case /* Pexp_for */18 :\n          var toExpr = c._2;\n          var fromExpr = c._1;\n          var doc$10 = printExpressionWithComments(fromExpr, cmtTbl);\n          var braces$10 = Res_parens.expr(fromExpr);\n          var doc$11 = printExpressionWithComments(toExpr, cmtTbl);\n          var braces$11 = Res_parens.expr(toExpr);\n          printedExpression = Res_doc.breakableGroup(true, Res_doc.concat({\n                    hd: Res_doc.text(\"for \"),\n                    tl: {\n                      hd: printPattern(c._0, cmtTbl),\n                      tl: {\n                        hd: Res_doc.text(\" in \"),\n                        tl: {\n                          hd: typeof braces$10 === \"number\" ? (\n                              braces$10 !== 0 ? doc$10 : addParens(doc$10)\n                            ) : printBraces(doc$10, fromExpr, braces$10._0),\n                          tl: {\n                            hd: printDirectionFlag(c._3),\n                            tl: {\n                              hd: typeof braces$11 === \"number\" ? (\n                                  braces$11 !== 0 ? doc$11 : addParens(doc$11)\n                                ) : printBraces(doc$11, toExpr, braces$11._0),\n                              tl: {\n                                hd: Res_doc.space,\n                                tl: {\n                                  hd: printExpressionBlock(true, c._4, cmtTbl),\n                                  tl: /* [] */0\n                                }\n                              }\n                            }\n                          }\n                        }\n                      }\n                    }\n                  }));\n          break;\n      case /* Pexp_constraint */19 :\n          var expr$3 = c._0;\n          var modExpr = expr$3.pexp_desc;\n          var exit$7 = 0;\n          if (typeof modExpr === \"number\" || modExpr.TAG !== /* Pexp_pack */32) {\n            exit$7 = 2;\n          } else {\n            var match$11 = c._1;\n            var packageType = match$11.ptyp_desc;\n            if (typeof packageType === \"number\" || packageType.TAG !== /* Ptyp_package */9) {\n              exit$7 = 2;\n            } else {\n              printedExpression = Res_doc.group(Res_doc.concat({\n                        hd: Res_doc.text(\"module(\"),\n                        tl: {\n                          hd: Res_doc.indent(Res_doc.concat({\n                                    hd: Res_doc.softLine,\n                                    tl: {\n                                      hd: printModExpr(modExpr._0, cmtTbl),\n                                      tl: {\n                                        hd: Res_doc.text(\": \"),\n                                        tl: {\n                                          hd: printComments(printPackageType(false, packageType._0, cmtTbl), cmtTbl, match$11.ptyp_loc),\n                                          tl: /* [] */0\n                                        }\n                                      }\n                                    }\n                                  })),\n                          tl: {\n                            hd: Res_doc.softLine,\n                            tl: {\n                              hd: Res_doc.rparen,\n                              tl: /* [] */0\n                            }\n                          }\n                        }\n                      }));\n            }\n          }\n          if (exit$7 === 2) {\n            var doc$12 = printExpressionWithComments(expr$3, cmtTbl);\n            var braces$12 = Res_parens.expr(expr$3);\n            var exprDoc$2 = typeof braces$12 === \"number\" ? (\n                braces$12 !== 0 ? doc$12 : addParens(doc$12)\n              ) : printBraces(doc$12, expr$3, braces$12._0);\n            printedExpression = Res_doc.concat({\n                  hd: exprDoc$2,\n                  tl: {\n                    hd: Res_doc.text(\": \"),\n                    tl: {\n                      hd: printTypExpr(c._1, cmtTbl),\n                      tl: /* [] */0\n                    }\n                  }\n                });\n          }\n          break;\n      case /* Pexp_coerce */20 :\n          var typOpt = c._1;\n          var docExpr = printExpressionWithComments(c._0, cmtTbl);\n          var docTyp = printTypExpr(c._2, cmtTbl);\n          var ofType = typOpt !== undefined ? Res_doc.concat({\n                  hd: Res_doc.text(\": \"),\n                  tl: {\n                    hd: printTypExpr(typOpt, cmtTbl),\n                    tl: /* [] */0\n                  }\n                }) : Res_doc.nil;\n          printedExpression = Res_doc.concat({\n                hd: Res_doc.lparen,\n                tl: {\n                  hd: docExpr,\n                  tl: {\n                    hd: ofType,\n                    tl: {\n                      hd: Res_doc.text(\" :> \"),\n                      tl: {\n                        hd: docTyp,\n                        tl: {\n                          hd: Res_doc.rparen,\n                          tl: /* [] */0\n                        }\n                      }\n                    }\n                  }\n                }\n              });\n          break;\n      case /* Pexp_send */21 :\n          var label = c._1;\n          var parentExpr = c._0;\n          var doc$13 = printExpressionWithComments(parentExpr, cmtTbl);\n          var braces$13 = Res_parens.unaryExprOperand(parentExpr);\n          var parentDoc = typeof braces$13 === \"number\" ? (\n              braces$13 !== 0 ? doc$13 : addParens(doc$13)\n            ) : printBraces(doc$13, parentExpr, braces$13._0);\n          var memberDoc = printComments(Res_doc.text(label.txt), cmtTbl, label.loc);\n          var member = Res_doc.concat({\n                hd: Res_doc.text(\"\\\"\"),\n                tl: {\n                  hd: memberDoc,\n                  tl: {\n                    hd: Res_doc.text(\"\\\"\"),\n                    tl: /* [] */0\n                  }\n                }\n              });\n          printedExpression = Res_doc.group(Res_doc.concat({\n                    hd: parentDoc,\n                    tl: {\n                      hd: Res_doc.lbracket,\n                      tl: {\n                        hd: member,\n                        tl: {\n                          hd: Res_doc.rbracket,\n                          tl: /* [] */0\n                        }\n                      }\n                    }\n                  }));\n          break;\n      case /* Pexp_new */22 :\n          printedExpression = Res_doc.text(\"Pexp_new not impemented in printer\");\n          break;\n      case /* Pexp_setinstvar */23 :\n          printedExpression = Res_doc.text(\"Pexp_setinstvar not impemented in printer\");\n          break;\n      case /* Pexp_override */24 :\n          printedExpression = Res_doc.text(\"Pexp_override not impemented in printer\");\n          break;\n      case /* Pexp_assert */27 :\n          var expr$4 = c._0;\n          var doc$14 = printExpressionWithComments(expr$4, cmtTbl);\n          var braces$14 = Res_parens.lazyOrAssertExprRhs(expr$4);\n          var rhs = typeof braces$14 === \"number\" ? (\n              braces$14 !== 0 ? doc$14 : addParens(doc$14)\n            ) : printBraces(doc$14, expr$4, braces$14._0);\n          printedExpression = Res_doc.concat({\n                hd: Res_doc.text(\"assert \"),\n                tl: {\n                  hd: rhs,\n                  tl: /* [] */0\n                }\n              });\n          break;\n      case /* Pexp_lazy */28 :\n          var expr$5 = c._0;\n          var doc$15 = printExpressionWithComments(expr$5, cmtTbl);\n          var braces$15 = Res_parens.lazyOrAssertExprRhs(expr$5);\n          var rhs$1 = typeof braces$15 === \"number\" ? (\n              braces$15 !== 0 ? doc$15 : addParens(doc$15)\n            ) : printBraces(doc$15, expr$5, braces$15._0);\n          printedExpression = Res_doc.group(Res_doc.concat({\n                    hd: Res_doc.text(\"lazy \"),\n                    tl: {\n                      hd: rhs$1,\n                      tl: /* [] */0\n                    }\n                  }));\n          break;\n      case /* Pexp_poly */29 :\n          printedExpression = Res_doc.text(\"Pexp_poly not impemented in printer\");\n          break;\n      case /* Pexp_object */30 :\n          printedExpression = Res_doc.text(\"Pexp_object not impemented in printer\");\n          break;\n      case /* Pexp_newtype */31 :\n          exit = 1;\n          break;\n      case /* Pexp_pack */32 :\n          printedExpression = Res_doc.group(Res_doc.concat({\n                    hd: Res_doc.text(\"module(\"),\n                    tl: {\n                      hd: Res_doc.indent(Res_doc.concat({\n                                hd: Res_doc.softLine,\n                                tl: {\n                                  hd: printModExpr(c._0, cmtTbl),\n                                  tl: /* [] */0\n                                }\n                              })),\n                      tl: {\n                        hd: Res_doc.softLine,\n                        tl: {\n                          hd: Res_doc.rparen,\n                          tl: /* [] */0\n                        }\n                      }\n                    }\n                  }));\n          break;\n      case /* Pexp_extension */34 :\n          var extension = c._0;\n          var exit$8 = 0;\n          switch (extension[0].txt) {\n            case \"bs.obj\" :\n            case \"obj\" :\n                exit$8 = 2;\n                break;\n            default:\n              printedExpression = printExtension(false, extension, cmtTbl);\n          }\n          if (exit$8 === 2) {\n            var match$12 = extension[1];\n            if (match$12.TAG === /* PStr */0) {\n              var match$13 = match$12._0;\n              if (match$13) {\n                var match$14 = match$13.hd;\n                var match$15 = match$14.pstr_desc;\n                if (match$15.TAG === /* Pstr_eval */0) {\n                  var match$16 = match$15._0.pexp_desc;\n                  if (typeof match$16 === \"number\" || !(match$16.TAG === /* Pexp_record */11 && !(match$15._1 || match$13.tl))) {\n                    printedExpression = printExtension(false, extension, cmtTbl);\n                  } else {\n                    var loc = match$14.pstr_loc;\n                    var forceBreak$1 = loc.loc_start.pos_lnum < loc.loc_end.pos_lnum;\n                    printedExpression = Res_doc.breakableGroup(forceBreak$1, Res_doc.concat({\n                              hd: Res_doc.lbrace,\n                              tl: {\n                                hd: Res_doc.indent(Res_doc.concat({\n                                          hd: Res_doc.softLine,\n                                          tl: {\n                                            hd: Res_doc.join(Res_doc.concat({\n                                                      hd: Res_doc.text(\",\"),\n                                                      tl: {\n                                                        hd: Res_doc.line,\n                                                        tl: /* [] */0\n                                                      }\n                                                    }), List.map((function (row) {\n                                                        return printBsObjectRow(row, cmtTbl);\n                                                      }), match$16._0)),\n                                            tl: /* [] */0\n                                          }\n                                        })),\n                                tl: {\n                                  hd: Res_doc.trailingComma,\n                                  tl: {\n                                    hd: Res_doc.softLine,\n                                    tl: {\n                                      hd: Res_doc.rbrace,\n                                      tl: /* [] */0\n                                    }\n                                  }\n                                }\n                              }\n                            }));\n                  }\n                } else {\n                  printedExpression = printExtension(false, extension, cmtTbl);\n                }\n              } else {\n                printedExpression = printExtension(false, extension, cmtTbl);\n              }\n            } else {\n              printedExpression = printExtension(false, extension, cmtTbl);\n            }\n          }\n          break;\n      default:\n        printedExpression = printExpressionBlock(true, e, cmtTbl);\n    }\n  }\n  if (exit === 1) {\n    var match$17 = Res_parsetree_viewer.funExpr(e);\n    var returnExpr = match$17[2];\n    var match$18 = Res_parsetree_viewer.processUncurriedAttribute(match$17[0]);\n    var match$19 = returnExpr.pexp_desc;\n    var match$20;\n    if (typeof match$19 === \"number\" || match$19.TAG !== /* Pexp_constraint */19) {\n      match$20 = [\n        returnExpr,\n        undefined\n      ];\n    } else {\n      var expr$6 = match$19._0;\n      match$20 = [\n        {\n          pexp_desc: expr$6.pexp_desc,\n          pexp_loc: expr$6.pexp_loc,\n          pexp_attributes: List.concat({\n                hd: expr$6.pexp_attributes,\n                tl: {\n                  hd: returnExpr.pexp_attributes,\n                  tl: /* [] */0\n                }\n              })\n        },\n        match$19._1\n      ];\n    }\n    var typConstraint = match$20[1];\n    var returnExpr$1 = match$20[0];\n    var hasConstraint = typConstraint !== undefined;\n    var parametersDoc = printExprFunParameters(/* NoCallback */0, match$18[0], hasConstraint, match$17[1], cmtTbl);\n    var match$21 = Res_parsetree_viewer.processBracesAttr(returnExpr$1);\n    var match$22 = returnExpr$1.pexp_desc;\n    var shouldInline;\n    if (match$21[0] !== undefined) {\n      shouldInline = true;\n    } else if (typeof match$22 === \"number\") {\n      shouldInline = false;\n    } else {\n      switch (match$22.TAG | 0) {\n        case /* Pexp_construct */9 :\n            shouldInline = match$22._1 !== undefined;\n            break;\n        case /* Pexp_tuple */8 :\n        case /* Pexp_record */11 :\n        case /* Pexp_array */14 :\n            shouldInline = true;\n            break;\n        default:\n          shouldInline = false;\n      }\n    }\n    var match$23 = returnExpr$1.pexp_desc;\n    var shouldIndent;\n    if (typeof match$23 === \"number\") {\n      shouldIndent = true;\n    } else {\n      switch (match$23.TAG | 0) {\n        case /* Pexp_let */2 :\n        case /* Pexp_sequence */16 :\n        case /* Pexp_letmodule */25 :\n        case /* Pexp_letexception */26 :\n        case /* Pexp_open */33 :\n            shouldIndent = false;\n            break;\n        default:\n          shouldIndent = true;\n      }\n    }\n    var doc$16 = printExpressionWithComments(returnExpr$1, cmtTbl);\n    var braces$16 = Res_parens.expr(returnExpr$1);\n    var returnDoc = typeof braces$16 === \"number\" ? (\n        braces$16 !== 0 ? doc$16 : addParens(doc$16)\n      ) : printBraces(doc$16, returnExpr$1, braces$16._0);\n    var returnExprDoc = shouldInline ? Res_doc.concat({\n            hd: Res_doc.space,\n            tl: {\n              hd: returnDoc,\n              tl: /* [] */0\n            }\n          }) : Res_doc.group(shouldIndent ? Res_doc.indent(Res_doc.concat({\n                      hd: Res_doc.line,\n                      tl: {\n                        hd: returnDoc,\n                        tl: /* [] */0\n                      }\n                    })) : Res_doc.concat({\n                  hd: Res_doc.space,\n                  tl: {\n                    hd: returnDoc,\n                    tl: /* [] */0\n                  }\n                }));\n    var typConstraintDoc;\n    if (typConstraint !== undefined) {\n      var doc$17 = printTypExpr(typConstraint, cmtTbl);\n      var typDoc = Res_parens.arrowReturnTypExpr(typConstraint) ? addParens(doc$17) : doc$17;\n      typConstraintDoc = Res_doc.concat({\n            hd: Res_doc.text(\": \"),\n            tl: {\n              hd: typDoc,\n              tl: /* [] */0\n            }\n          });\n    } else {\n      typConstraintDoc = Res_doc.nil;\n    }\n    var attrs$1 = printAttributes(undefined, undefined, match$18[1], cmtTbl);\n    printedExpression = Res_doc.group(Res_doc.concat({\n              hd: attrs$1,\n              tl: {\n                hd: parametersDoc,\n                tl: {\n                  hd: typConstraintDoc,\n                  tl: {\n                    hd: Res_doc.text(\" =>\"),\n                    tl: {\n                      hd: returnExprDoc,\n                      tl: /* [] */0\n                    }\n                  }\n                }\n              }\n            }));\n  }\n  var match$24 = e.pexp_desc;\n  var shouldPrintItsOwnAttributes;\n  if (typeof match$24 === \"number\") {\n    shouldPrintItsOwnAttributes = false;\n  } else {\n    switch (match$24.TAG | 0) {\n      case /* Pexp_match */6 :\n          shouldPrintItsOwnAttributes = Res_parsetree_viewer.isIfLetExpr(e) ? true : false;\n          break;\n      case /* Pexp_construct */9 :\n          shouldPrintItsOwnAttributes = Res_parsetree_viewer.hasJsxAttribute(e.pexp_attributes) ? true : false;\n          break;\n      case /* Pexp_fun */4 :\n      case /* Pexp_apply */5 :\n      case /* Pexp_setfield */13 :\n      case /* Pexp_ifthenelse */15 :\n      case /* Pexp_newtype */31 :\n          shouldPrintItsOwnAttributes = true;\n          break;\n      default:\n        shouldPrintItsOwnAttributes = false;\n    }\n  }\n  var attrs$2 = e.pexp_attributes;\n  if (attrs$2 && !shouldPrintItsOwnAttributes) {\n    return Res_doc.group(Res_doc.concat({\n                    hd: printAttributes(undefined, undefined, attrs$2, cmtTbl),\n                    tl: {\n                      hd: printedExpression,\n                      tl: /* [] */0\n                    }\n                  }));\n  } else {\n    return printedExpression;\n  }\n}\n\nfunction printPexpFun(inCallback, e, cmtTbl) {\n  var match = Res_parsetree_viewer.funExpr(e);\n  var returnExpr = match[2];\n  var match$1 = Res_parsetree_viewer.processUncurriedAttribute(match[0]);\n  var match$2 = returnExpr.pexp_desc;\n  var match$3;\n  if (typeof match$2 === \"number\" || match$2.TAG !== /* Pexp_constraint */19) {\n    match$3 = [\n      returnExpr,\n      undefined\n    ];\n  } else {\n    var expr = match$2._0;\n    match$3 = [\n      {\n        pexp_desc: expr.pexp_desc,\n        pexp_loc: expr.pexp_loc,\n        pexp_attributes: List.concat({\n              hd: expr.pexp_attributes,\n              tl: {\n                hd: returnExpr.pexp_attributes,\n                tl: /* [] */0\n              }\n            })\n      },\n      match$2._1\n    ];\n  }\n  var typConstraint = match$3[1];\n  var returnExpr$1 = match$3[0];\n  var parametersDoc = printExprFunParameters(inCallback, match$1[0], typConstraint !== undefined, match[1], cmtTbl);\n  var match$4 = returnExpr$1.pexp_desc;\n  var returnShouldIndent;\n  if (typeof match$4 === \"number\") {\n    returnShouldIndent = true;\n  } else {\n    switch (match$4.TAG | 0) {\n      case /* Pexp_let */2 :\n      case /* Pexp_sequence */16 :\n      case /* Pexp_letmodule */25 :\n      case /* Pexp_letexception */26 :\n      case /* Pexp_open */33 :\n          returnShouldIndent = false;\n          break;\n      default:\n        returnShouldIndent = true;\n    }\n  }\n  var match$5 = Res_parsetree_viewer.processBracesAttr(returnExpr$1);\n  var match$6 = returnExpr$1.pexp_desc;\n  var shouldInline;\n  if (match$5[0] !== undefined) {\n    shouldInline = true;\n  } else if (typeof match$6 === \"number\") {\n    shouldInline = false;\n  } else {\n    switch (match$6.TAG | 0) {\n      case /* Pexp_construct */9 :\n          shouldInline = match$6._1 !== undefined;\n          break;\n      case /* Pexp_tuple */8 :\n      case /* Pexp_record */11 :\n      case /* Pexp_array */14 :\n          shouldInline = true;\n          break;\n      default:\n        shouldInline = false;\n    }\n  }\n  var doc = printExpressionWithComments(returnExpr$1, cmtTbl);\n  var braces = Res_parens.expr(returnExpr$1);\n  var returnDoc = typeof braces === \"number\" ? (\n      braces !== 0 ? doc : addParens(doc)\n    ) : printBraces(doc, returnExpr$1, braces._0);\n  var returnExprDoc = shouldInline ? Res_doc.concat({\n          hd: Res_doc.space,\n          tl: {\n            hd: returnDoc,\n            tl: /* [] */0\n          }\n        }) : Res_doc.group(returnShouldIndent ? Res_doc.concat({\n                hd: Res_doc.indent(Res_doc.concat({\n                          hd: Res_doc.line,\n                          tl: {\n                            hd: returnDoc,\n                            tl: /* [] */0\n                          }\n                        })),\n                tl: {\n                  hd: inCallback !== 0 ? Res_doc.softLine : Res_doc.nil,\n                  tl: /* [] */0\n                }\n              }) : Res_doc.concat({\n                hd: Res_doc.space,\n                tl: {\n                  hd: returnDoc,\n                  tl: /* [] */0\n                }\n              }));\n  var typConstraintDoc = typConstraint !== undefined ? Res_doc.concat({\n          hd: Res_doc.text(\": \"),\n          tl: {\n            hd: printTypExpr(typConstraint, cmtTbl),\n            tl: /* [] */0\n          }\n        }) : Res_doc.nil;\n  return Res_doc.concat({\n              hd: printAttributes(undefined, undefined, match$1[1], cmtTbl),\n              tl: {\n                hd: parametersDoc,\n                tl: {\n                  hd: typConstraintDoc,\n                  tl: {\n                    hd: Res_doc.text(\" =>\"),\n                    tl: {\n                      hd: returnExprDoc,\n                      tl: /* [] */0\n                    }\n                  }\n                }\n              }\n            });\n}\n\nfunction printTernaryOperand(expr, cmtTbl) {\n  var doc = printExpressionWithComments(expr, cmtTbl);\n  var braces = Res_parens.ternaryOperand(expr);\n  if (typeof braces === \"number\") {\n    if (braces !== 0) {\n      return doc;\n    } else {\n      return addParens(doc);\n    }\n  } else {\n    return printBraces(doc, expr, braces._0);\n  }\n}\n\nfunction printSetFieldExpr(attrs, lhs, longidentLoc, rhs, loc, cmtTbl) {\n  var doc = printExpressionWithComments(rhs, cmtTbl);\n  var braces = Res_parens.setFieldExprRhs(rhs);\n  var rhsDoc = typeof braces === \"number\" ? (\n      braces !== 0 ? doc : addParens(doc)\n    ) : printBraces(doc, rhs, braces._0);\n  var doc$1 = printExpressionWithComments(lhs, cmtTbl);\n  var braces$1 = Res_parens.fieldExpr(lhs);\n  var lhsDoc = typeof braces$1 === \"number\" ? (\n      braces$1 !== 0 ? doc$1 : addParens(doc$1)\n    ) : printBraces(doc$1, lhs, braces$1._0);\n  var shouldIndent = Res_parsetree_viewer.isBinaryExpression(rhs);\n  var doc$2 = Res_doc.group(Res_doc.concat({\n            hd: lhsDoc,\n            tl: {\n              hd: Res_doc.dot,\n              tl: {\n                hd: printLidentPath(longidentLoc, cmtTbl),\n                tl: {\n                  hd: Res_doc.text(\" =\"),\n                  tl: {\n                    hd: shouldIndent ? Res_doc.group(Res_doc.indent(Res_doc.concat({\n                                    hd: Res_doc.line,\n                                    tl: {\n                                      hd: rhsDoc,\n                                      tl: /* [] */0\n                                    }\n                                  }))) : Res_doc.concat({\n                            hd: Res_doc.space,\n                            tl: {\n                              hd: rhsDoc,\n                              tl: /* [] */0\n                            }\n                          }),\n                    tl: /* [] */0\n                  }\n                }\n              }\n            }\n          }));\n  var doc$3 = attrs ? Res_doc.group(Res_doc.concat({\n              hd: printAttributes(undefined, undefined, attrs, cmtTbl),\n              tl: {\n                hd: doc$2,\n                tl: /* [] */0\n              }\n            })) : doc$2;\n  return printComments(doc$3, cmtTbl, loc);\n}\n\nfunction printTemplateLiteral(expr, cmtTbl) {\n  var tag = {\n    contents: \"js\"\n  };\n  var walkExpr = function (expr) {\n    var match = expr.pexp_desc;\n    if (typeof match !== \"number\") {\n      switch (match.TAG | 0) {\n        case /* Pexp_constant */1 :\n            var match$1 = match._0;\n            if (match$1.TAG === /* Pconst_string */2) {\n              var prefix = match$1._1;\n              if (prefix !== undefined) {\n                tag.contents = prefix;\n                return printStringContents(match$1._0);\n              }\n              \n            }\n            break;\n        case /* Pexp_apply */5 :\n            var match$2 = match._0.pexp_desc;\n            if (typeof match$2 !== \"number\" && match$2.TAG === /* Pexp_ident */0) {\n              var match$3 = match$2._0.txt;\n              switch (match$3.TAG | 0) {\n                case /* Lident */0 :\n                    if (match$3._0 === \"^\") {\n                      var match$4 = match._1;\n                      if (match$4) {\n                        var match$5 = match$4.hd;\n                        if (typeof match$5[0] === \"number\") {\n                          var match$6 = match$4.tl;\n                          if (match$6) {\n                            var match$7 = match$6.hd;\n                            if (typeof match$7[0] === \"number\" && !match$6.tl) {\n                              var lhs = walkExpr(match$5[1]);\n                              var rhs = walkExpr(match$7[1]);\n                              return Res_doc.concat({\n                                          hd: lhs,\n                                          tl: {\n                                            hd: rhs,\n                                            tl: /* [] */0\n                                          }\n                                        });\n                            }\n                            \n                          }\n                          \n                        }\n                        \n                      }\n                      \n                    }\n                    break;\n                case /* Ldot */1 :\n                case /* Lapply */2 :\n                    break;\n                \n              }\n            }\n            break;\n        default:\n          \n      }\n    }\n    var doc = printExpressionWithComments(expr, cmtTbl);\n    return Res_doc.group(Res_doc.concat({\n                    hd: Res_doc.text(\"${\"),\n                    tl: {\n                      hd: Res_doc.indent(doc),\n                      tl: {\n                        hd: Res_doc.rbrace,\n                        tl: /* [] */0\n                      }\n                    }\n                  }));\n  };\n  var content = walkExpr(expr);\n  return Res_doc.concat({\n              hd: tag.contents === \"js\" ? Res_doc.nil : Res_doc.text(tag.contents),\n              tl: {\n                hd: Res_doc.text(\"`\"),\n                tl: {\n                  hd: content,\n                  tl: {\n                    hd: Res_doc.text(\"`\"),\n                    tl: /* [] */0\n                  }\n                }\n              }\n            });\n}\n\nfunction printUnaryExpression(expr, cmtTbl) {\n  var printUnaryOperator = function (op) {\n    var tmp;\n    switch (op) {\n      case \"not\" :\n          tmp = \"!\";\n          break;\n      case \"~+\" :\n          tmp = \"+\";\n          break;\n      case \"~+.\" :\n          tmp = \"+.\";\n          break;\n      case \"~-\" :\n          tmp = \"-\";\n          break;\n      case \"~-.\" :\n          tmp = \"-.\";\n          break;\n      default:\n        throw {\n              RE_EXN_ID: \"Assert_failure\",\n              _1: [\n                \"res_printer.res\",\n                3472,\n                13\n              ],\n              Error: new Error()\n            };\n    }\n    return Res_doc.text(tmp);\n  };\n  var match = expr.pexp_desc;\n  if (typeof match !== \"number\" && match.TAG === /* Pexp_apply */5) {\n    var match$1 = match._0.pexp_desc;\n    if (typeof match$1 !== \"number\" && match$1.TAG === /* Pexp_ident */0) {\n      var operator = match$1._0.txt;\n      switch (operator.TAG | 0) {\n        case /* Lident */0 :\n            var match$2 = match._1;\n            if (match$2) {\n              var match$3 = match$2.hd;\n              if (typeof match$3[0] === \"number\" && !match$2.tl) {\n                var operand = match$3[1];\n                var doc = printExpressionWithComments(operand, cmtTbl);\n                var braces = Res_parens.unaryExprOperand(operand);\n                var printedOperand = typeof braces === \"number\" ? (\n                    braces !== 0 ? doc : addParens(doc)\n                  ) : printBraces(doc, operand, braces._0);\n                var doc$1 = Res_doc.concat({\n                      hd: printUnaryOperator(operator._0),\n                      tl: {\n                        hd: printedOperand,\n                        tl: /* [] */0\n                      }\n                    });\n                return printComments(doc$1, cmtTbl, expr.pexp_loc);\n              }\n              \n            }\n            break;\n        case /* Ldot */1 :\n        case /* Lapply */2 :\n            break;\n        \n      }\n    }\n    \n  }\n  throw {\n        RE_EXN_ID: \"Assert_failure\",\n        _1: [\n          \"res_printer.res\",\n          3491,\n          9\n        ],\n        Error: new Error()\n      };\n}\n\nfunction printBinaryExpression(expr, cmtTbl) {\n  var printBinaryOperator = function (inlineRhs, operator) {\n    var operatorTxt;\n    switch (operator) {\n      case \"!=\" :\n          operatorTxt = \"!==\";\n          break;\n      case \"<>\" :\n          operatorTxt = \"!=\";\n          break;\n      case \"=\" :\n          operatorTxt = \"==\";\n          break;\n      case \"==\" :\n          operatorTxt = \"===\";\n          break;\n      case \"^\" :\n          operatorTxt = \"++\";\n          break;\n      case \"|.\" :\n          operatorTxt = \"->\";\n          break;\n      default:\n        operatorTxt = operator;\n    }\n    var spacingBeforeOperator = operator === \"|.\" ? Res_doc.softLine : (\n        operator === \"|>\" ? Res_doc.line : Res_doc.space\n      );\n    var spacingAfterOperator = operator === \"|.\" ? Res_doc.nil : (\n        operator === \"|>\" || inlineRhs ? Res_doc.space : Res_doc.line\n      );\n    return Res_doc.concat({\n                hd: spacingBeforeOperator,\n                tl: {\n                  hd: Res_doc.text(operatorTxt),\n                  tl: {\n                    hd: spacingAfterOperator,\n                    tl: /* [] */0\n                  }\n                }\n              });\n  };\n  var printOperand = function (isLhs, expr, parentOperator) {\n    var flatten = function (isLhs, expr, parentOperator) {\n      if (Res_parsetree_viewer.isBinaryExpression(expr)) {\n        var match = expr.pexp_desc;\n        if (typeof match !== \"number\" && match.TAG === /* Pexp_apply */5) {\n          var match$1 = match._0.pexp_desc;\n          if (typeof match$1 !== \"number\" && match$1.TAG === /* Pexp_ident */0) {\n            var operator = match$1._0.txt;\n            switch (operator.TAG | 0) {\n              case /* Lident */0 :\n                  var match$2 = match._1;\n                  if (match$2) {\n                    var match$3 = match$2.tl;\n                    if (match$3 && !match$3.tl) {\n                      var right = match$3.hd[1];\n                      var operator$1 = operator._0;\n                      if (Res_parsetree_viewer.flattenableOperators(parentOperator, operator$1) && !Res_parsetree_viewer.hasAttributes(expr.pexp_attributes)) {\n                        var leftPrinted = flatten(true, match$2.hd[1], operator$1);\n                        var match$4 = Res_parsetree_viewer.partitionPrintableAttributes(right.pexp_attributes);\n                        var doc = printExpressionWithComments({\n                              pexp_desc: right.pexp_desc,\n                              pexp_loc: right.pexp_loc,\n                              pexp_attributes: match$4[1]\n                            }, cmtTbl);\n                        var doc$1 = Res_parens.flattenOperandRhs(parentOperator, right) ? Res_doc.concat({\n                                hd: Res_doc.lparen,\n                                tl: {\n                                  hd: doc,\n                                  tl: {\n                                    hd: Res_doc.rparen,\n                                    tl: /* [] */0\n                                  }\n                                }\n                              }) : doc;\n                        var printableAttrs = Res_parsetree_viewer.filterPrintableAttributes(right.pexp_attributes);\n                        var doc$2 = Res_doc.concat({\n                              hd: printAttributes(undefined, undefined, printableAttrs, cmtTbl),\n                              tl: {\n                                hd: doc$1,\n                                tl: /* [] */0\n                              }\n                            });\n                        var rightPrinted = printableAttrs ? addParens(doc$2) : doc$2;\n                        var doc$3 = Res_doc.concat({\n                              hd: leftPrinted,\n                              tl: {\n                                hd: printBinaryOperator(false, operator$1),\n                                tl: {\n                                  hd: rightPrinted,\n                                  tl: /* [] */0\n                                }\n                              }\n                            });\n                        var doc$4 = !isLhs && Res_parens.rhsBinaryExprOperand(operator$1, expr) ? Res_doc.concat({\n                                hd: Res_doc.lparen,\n                                tl: {\n                                  hd: doc$3,\n                                  tl: {\n                                    hd: Res_doc.rparen,\n                                    tl: /* [] */0\n                                  }\n                                }\n                              }) : doc$3;\n                        return printComments(doc$4, cmtTbl, expr.pexp_loc);\n                      }\n                      var doc$5 = printExpressionWithComments({\n                            pexp_desc: expr.pexp_desc,\n                            pexp_loc: expr.pexp_loc,\n                            pexp_attributes: /* [] */0\n                          }, cmtTbl);\n                      var doc$6 = Res_parens.subBinaryExprOperand(parentOperator, operator$1) || expr.pexp_attributes !== /* [] */0 && (Res_parsetree_viewer.isBinaryExpression(expr) || Res_parsetree_viewer.isTernaryExpr(expr)) ? Res_doc.concat({\n                              hd: Res_doc.lparen,\n                              tl: {\n                                hd: doc$5,\n                                tl: {\n                                  hd: Res_doc.rparen,\n                                  tl: /* [] */0\n                                }\n                              }\n                            }) : doc$5;\n                      return Res_doc.concat({\n                                  hd: printAttributes(undefined, undefined, expr.pexp_attributes, cmtTbl),\n                                  tl: {\n                                    hd: doc$6,\n                                    tl: /* [] */0\n                                  }\n                                });\n                    }\n                    \n                  }\n                  break;\n              case /* Ldot */1 :\n              case /* Lapply */2 :\n                  break;\n              \n            }\n          }\n          \n        }\n        throw {\n              RE_EXN_ID: \"Assert_failure\",\n              _1: [\n                \"res_printer.res\",\n                3590,\n                15\n              ],\n              Error: new Error()\n            };\n      }\n      var match$5 = expr.pexp_desc;\n      if (typeof match$5 !== \"number\") {\n        switch (match$5.TAG | 0) {\n          case /* Pexp_apply */5 :\n              var match$6 = match$5._0.pexp_desc;\n              if (typeof match$6 !== \"number\" && match$6.TAG === /* Pexp_ident */0) {\n                var match$7 = match$6._0;\n                var match$8 = match$7.txt;\n                switch (match$8.TAG | 0) {\n                  case /* Lident */0 :\n                      switch (match$8._0) {\n                        case \"#=\" :\n                            var match$9 = match$5._1;\n                            if (match$9) {\n                              var match$10 = match$9.hd;\n                              if (typeof match$10[0] === \"number\") {\n                                var match$11 = match$9.tl;\n                                if (match$11) {\n                                  var match$12 = match$11.hd;\n                                  if (typeof match$12[0] === \"number\" && !match$11.tl) {\n                                    var rhs = match$12[1];\n                                    var rhsDoc = printExpressionWithComments(rhs, cmtTbl);\n                                    var lhsDoc = printExpressionWithComments(match$10[1], cmtTbl);\n                                    var shouldIndent = Res_parsetree_viewer.isBinaryExpression(rhs);\n                                    var doc$7 = Res_doc.group(Res_doc.concat({\n                                              hd: lhsDoc,\n                                              tl: {\n                                                hd: Res_doc.text(\" =\"),\n                                                tl: {\n                                                  hd: shouldIndent ? Res_doc.group(Res_doc.indent(Res_doc.concat({\n                                                                  hd: Res_doc.line,\n                                                                  tl: {\n                                                                    hd: rhsDoc,\n                                                                    tl: /* [] */0\n                                                                  }\n                                                                }))) : Res_doc.concat({\n                                                          hd: Res_doc.space,\n                                                          tl: {\n                                                            hd: rhsDoc,\n                                                            tl: /* [] */0\n                                                          }\n                                                        }),\n                                                  tl: /* [] */0\n                                                }\n                                              }\n                                            }));\n                                    var attrs = expr.pexp_attributes;\n                                    var doc$8 = attrs ? Res_doc.group(Res_doc.concat({\n                                                hd: printAttributes(undefined, undefined, attrs, cmtTbl),\n                                                tl: {\n                                                  hd: doc$7,\n                                                  tl: /* [] */0\n                                                }\n                                              })) : doc$7;\n                                    if (isLhs) {\n                                      return addParens(doc$8);\n                                    } else {\n                                      return doc$8;\n                                    }\n                                  }\n                                  \n                                }\n                                \n                              }\n                              \n                            }\n                            break;\n                        case \"^\" :\n                            var match$13 = match$5._1;\n                            if (match$13 && typeof match$13.hd[0] === \"number\") {\n                              var match$14 = match$13.tl;\n                              if (match$14 && typeof match$14.hd[0] === \"number\" && !match$14.tl && match$7.loc.loc_ghost) {\n                                var doc$9 = printTemplateLiteral(expr, cmtTbl);\n                                return printComments(doc$9, cmtTbl, expr.pexp_loc);\n                              }\n                              \n                            }\n                            break;\n                        default:\n                          \n                      }\n                      break;\n                  case /* Ldot */1 :\n                  case /* Lapply */2 :\n                      break;\n                  \n                }\n              }\n              break;\n          case /* Pexp_setfield */13 :\n              var doc$10 = printSetFieldExpr(expr.pexp_attributes, match$5._0, match$5._1, match$5._2, expr.pexp_loc, cmtTbl);\n              if (isLhs) {\n                return addParens(doc$10);\n              } else {\n                return doc$10;\n              }\n          default:\n            \n        }\n      }\n      var doc$11 = printExpressionWithComments(expr, cmtTbl);\n      var braces = Res_parens.binaryExprOperand(isLhs, expr);\n      if (typeof braces === \"number\") {\n        if (braces !== 0) {\n          return doc$11;\n        } else {\n          return addParens(doc$11);\n        }\n      } else {\n        return printBraces(doc$11, expr, braces._0);\n      }\n    };\n    return flatten(isLhs, expr, parentOperator);\n  };\n  var match = expr.pexp_desc;\n  if (typeof match === \"number\") {\n    return Res_doc.nil;\n  }\n  if (match.TAG !== /* Pexp_apply */5) {\n    return Res_doc.nil;\n  }\n  var match$1 = match._0.pexp_desc;\n  if (typeof match$1 === \"number\") {\n    return Res_doc.nil;\n  }\n  if (match$1.TAG !== /* Pexp_ident */0) {\n    return Res_doc.nil;\n  }\n  var op = match$1._0.txt;\n  switch (op.TAG | 0) {\n    case /* Lident */0 :\n        var op$1 = op._0;\n        var exit = 0;\n        switch (op$1) {\n          case \"|.\" :\n          case \"|>\" :\n              exit = 2;\n              break;\n          default:\n            \n        }\n        if (exit === 2) {\n          var match$2 = match._1;\n          if (!match$2) {\n            return Res_doc.nil;\n          }\n          var match$3 = match$2.hd;\n          if (typeof match$3[0] !== \"number\") {\n            return Res_doc.nil;\n          }\n          var match$4 = match$2.tl;\n          if (!match$4) {\n            return Res_doc.nil;\n          }\n          var match$5 = match$4.hd;\n          if (typeof match$5[0] !== \"number\") {\n            return Res_doc.nil;\n          }\n          if (match$4.tl) {\n            return Res_doc.nil;\n          }\n          var rhs = match$5[1];\n          var lhs = match$3[1];\n          if (!(Res_parsetree_viewer.isBinaryExpression(lhs) || Res_parsetree_viewer.isBinaryExpression(rhs))) {\n            var lhsHasCommentBelow = hasCommentBelow(cmtTbl, lhs.pexp_loc);\n            var lhsDoc = printOperand(true, lhs, op$1);\n            var rhsDoc = printOperand(false, rhs, op$1);\n            var tmp;\n            if (lhsHasCommentBelow) {\n              switch (op$1) {\n                case \"|.\" :\n                    tmp = Res_doc.concat({\n                          hd: Res_doc.softLine,\n                          tl: {\n                            hd: Res_doc.text(\"->\"),\n                            tl: /* [] */0\n                          }\n                        });\n                    break;\n                case \"|>\" :\n                    tmp = Res_doc.concat({\n                          hd: Res_doc.line,\n                          tl: {\n                            hd: Res_doc.text(\"|> \"),\n                            tl: /* [] */0\n                          }\n                        });\n                    break;\n                default:\n                  tmp = Res_doc.nil;\n              }\n            } else {\n              switch (op$1) {\n                case \"|.\" :\n                    tmp = Res_doc.text(\"->\");\n                    break;\n                case \"|>\" :\n                    tmp = Res_doc.text(\" |> \");\n                    break;\n                default:\n                  tmp = Res_doc.nil;\n              }\n            }\n            return Res_doc.group(Res_doc.concat({\n                            hd: lhsDoc,\n                            tl: {\n                              hd: tmp,\n                              tl: {\n                                hd: rhsDoc,\n                                tl: /* [] */0\n                              }\n                            }\n                          }));\n          }\n          \n        }\n        var match$6 = match._1;\n        if (!match$6) {\n          return Res_doc.nil;\n        }\n        var match$7 = match$6.hd;\n        if (typeof match$7[0] !== \"number\") {\n          return Res_doc.nil;\n        }\n        var match$8 = match$6.tl;\n        if (!match$8) {\n          return Res_doc.nil;\n        }\n        var match$9 = match$8.hd;\n        if (typeof match$9[0] !== \"number\") {\n          return Res_doc.nil;\n        }\n        if (match$8.tl) {\n          return Res_doc.nil;\n        }\n        var rhs$1 = match$9[1];\n        var rhsDoc$1 = printOperand(false, rhs$1, op$1);\n        var operatorWithRhs = Res_doc.concat({\n              hd: printBinaryOperator(Res_parsetree_viewer.shouldInlineRhsBinaryExpr(rhs$1), op$1),\n              tl: {\n                hd: rhsDoc$1,\n                tl: /* [] */0\n              }\n            });\n        var right = Res_parsetree_viewer.shouldIndentBinaryExpr(expr) ? Res_doc.group(Res_doc.indent(operatorWithRhs)) : operatorWithRhs;\n        var doc = Res_doc.group(Res_doc.concat({\n                  hd: printOperand(true, match$7[1], op$1),\n                  tl: {\n                    hd: right,\n                    tl: /* [] */0\n                  }\n                }));\n        var bracesLoc = Res_parens.binaryExpr({\n              pexp_desc: expr.pexp_desc,\n              pexp_loc: expr.pexp_loc,\n              pexp_attributes: List.filter(function (attr) {\n                      if (attr[0].txt === \"ns.braces\") {\n                        return false;\n                      } else {\n                        return true;\n                      }\n                    })(expr.pexp_attributes)\n            });\n        return Res_doc.group(Res_doc.concat({\n                        hd: printAttributes(undefined, undefined, expr.pexp_attributes, cmtTbl),\n                        tl: {\n                          hd: typeof bracesLoc === \"number\" ? (\n                              bracesLoc !== 0 ? doc : addParens(doc)\n                            ) : printBraces(doc, expr, bracesLoc._0),\n                          tl: /* [] */0\n                        }\n                      }));\n        break;\n    case /* Ldot */1 :\n    case /* Lapply */2 :\n        return Res_doc.nil;\n    \n  }\n}\n\nfunction printPexpApply(expr, cmtTbl) {\n  var match = expr.pexp_desc;\n  if (typeof match === \"number\") {\n    throw {\n          RE_EXN_ID: \"Assert_failure\",\n          _1: [\n            \"res_printer.res\",\n            3945,\n            9\n          ],\n          Error: new Error()\n        };\n  }\n  if (match.TAG === /* Pexp_apply */5) {\n    var callExpr = match._0;\n    var lident = callExpr.pexp_desc;\n    if (typeof lident !== \"number\" && lident.TAG === /* Pexp_ident */0) {\n      var lident$1 = lident._0;\n      var match$1 = lident$1.txt;\n      var exit = 0;\n      switch (match$1.TAG | 0) {\n        case /* Lident */0 :\n            switch (match$1._0) {\n              case \"##\" :\n                  var match$2 = match._1;\n                  if (match$2) {\n                    var match$3 = match$2.hd;\n                    if (typeof match$3[0] === \"number\") {\n                      var match$4 = match$2.tl;\n                      if (match$4) {\n                        var match$5 = match$4.hd;\n                        if (typeof match$5[0] === \"number\" && !match$4.tl) {\n                          var memberExpr = match$5[1];\n                          var parentExpr = match$3[1];\n                          var doc = printExpressionWithComments(parentExpr, cmtTbl);\n                          var braces = Res_parens.unaryExprOperand(parentExpr);\n                          var parentDoc = typeof braces === \"number\" ? (\n                              braces !== 0 ? doc : addParens(doc)\n                            ) : printBraces(doc, parentExpr, braces._0);\n                          var lident$2 = memberExpr.pexp_desc;\n                          var memberDoc;\n                          memberDoc = typeof lident$2 === \"number\" || lident$2.TAG !== /* Pexp_ident */0 ? printExpressionWithComments(memberExpr, cmtTbl) : printComments(printLongident(lident$2._0.txt), cmtTbl, memberExpr.pexp_loc);\n                          var member = Res_doc.concat({\n                                hd: Res_doc.text(\"\\\"\"),\n                                tl: {\n                                  hd: memberDoc,\n                                  tl: {\n                                    hd: Res_doc.text(\"\\\"\"),\n                                    tl: /* [] */0\n                                  }\n                                }\n                              });\n                          return Res_doc.group(Res_doc.concat({\n                                          hd: printAttributes(undefined, undefined, expr.pexp_attributes, cmtTbl),\n                                          tl: {\n                                            hd: parentDoc,\n                                            tl: {\n                                              hd: Res_doc.lbracket,\n                                              tl: {\n                                                hd: member,\n                                                tl: {\n                                                  hd: Res_doc.rbracket,\n                                                  tl: /* [] */0\n                                                }\n                                              }\n                                            }\n                                          }\n                                        }));\n                        }\n                        exit = 2;\n                      } else {\n                        exit = 2;\n                      }\n                    } else {\n                      exit = 2;\n                    }\n                  } else {\n                    exit = 2;\n                  }\n                  break;\n              case \"#=\" :\n                  var match$6 = match._1;\n                  if (match$6) {\n                    var match$7 = match$6.hd;\n                    if (typeof match$7[0] === \"number\") {\n                      var match$8 = match$6.tl;\n                      if (match$8) {\n                        var match$9 = match$8.hd;\n                        if (typeof match$9[0] === \"number\" && !match$8.tl) {\n                          var rhs = match$9[1];\n                          var doc$1 = printExpressionWithComments(rhs, cmtTbl);\n                          var braces$1 = Res_parens.expr(rhs);\n                          var rhsDoc = typeof braces$1 === \"number\" ? (\n                              braces$1 !== 0 ? doc$1 : addParens(doc$1)\n                            ) : printBraces(doc$1, rhs, braces$1._0);\n                          var shouldIndent = !Res_parsetree_viewer.isBracedExpr(rhs) && Res_parsetree_viewer.isBinaryExpression(rhs);\n                          var doc$2 = Res_doc.group(Res_doc.concat({\n                                    hd: printExpressionWithComments(match$7[1], cmtTbl),\n                                    tl: {\n                                      hd: Res_doc.text(\" =\"),\n                                      tl: {\n                                        hd: shouldIndent ? Res_doc.group(Res_doc.indent(Res_doc.concat({\n                                                        hd: Res_doc.line,\n                                                        tl: {\n                                                          hd: rhsDoc,\n                                                          tl: /* [] */0\n                                                        }\n                                                      }))) : Res_doc.concat({\n                                                hd: Res_doc.space,\n                                                tl: {\n                                                  hd: rhsDoc,\n                                                  tl: /* [] */0\n                                                }\n                                              }),\n                                        tl: /* [] */0\n                                      }\n                                    }\n                                  }));\n                          var attrs = expr.pexp_attributes;\n                          if (attrs) {\n                            return Res_doc.group(Res_doc.concat({\n                                            hd: printAttributes(undefined, undefined, attrs, cmtTbl),\n                                            tl: {\n                                              hd: doc$2,\n                                              tl: /* [] */0\n                                            }\n                                          }));\n                          } else {\n                            return doc$2;\n                          }\n                        }\n                        exit = 2;\n                      } else {\n                        exit = 2;\n                      }\n                    } else {\n                      exit = 2;\n                    }\n                  } else {\n                    exit = 2;\n                  }\n                  break;\n              default:\n                exit = 2;\n            }\n            break;\n        case /* Ldot */1 :\n            var match$10 = match$1._0;\n            switch (match$10.TAG | 0) {\n              case /* Lident */0 :\n                  if (match$10._0 === \"Array\") {\n                    switch (match$1._1) {\n                      case \"get\" :\n                          var match$11 = match._1;\n                          if (match$11) {\n                            var match$12 = match$11.hd;\n                            if (typeof match$12[0] === \"number\") {\n                              var match$13 = match$11.tl;\n                              if (match$13) {\n                                var match$14 = match$13.hd;\n                                if (typeof match$14[0] === \"number\" && !match$13.tl) {\n                                  var memberExpr$1 = match$14[1];\n                                  var parentExpr$1 = match$12[1];\n                                  if (Res_parsetree_viewer.isRewrittenUnderscoreApplySugar(parentExpr$1)) {\n                                    exit = 2;\n                                  } else {\n                                    var doc$3 = printExpressionWithComments(memberExpr$1, cmtTbl);\n                                    var braces$2 = Res_parens.expr(memberExpr$1);\n                                    var memberDoc$1 = typeof braces$2 === \"number\" ? (\n                                        braces$2 !== 0 ? doc$3 : addParens(doc$3)\n                                      ) : printBraces(doc$3, memberExpr$1, braces$2._0);\n                                    var match$15 = memberExpr$1.pexp_desc;\n                                    var shouldInline;\n                                    if (typeof match$15 === \"number\") {\n                                      shouldInline = false;\n                                    } else {\n                                      switch (match$15.TAG | 0) {\n                                        case /* Pexp_ident */0 :\n                                        case /* Pexp_constant */1 :\n                                            shouldInline = true;\n                                            break;\n                                        default:\n                                          shouldInline = false;\n                                      }\n                                    }\n                                    var member$1 = shouldInline ? memberDoc$1 : Res_doc.concat({\n                                            hd: Res_doc.indent(Res_doc.concat({\n                                                      hd: Res_doc.softLine,\n                                                      tl: {\n                                                        hd: memberDoc$1,\n                                                        tl: /* [] */0\n                                                      }\n                                                    })),\n                                            tl: {\n                                              hd: Res_doc.softLine,\n                                              tl: /* [] */0\n                                            }\n                                          });\n                                    var doc$4 = printExpressionWithComments(parentExpr$1, cmtTbl);\n                                    var braces$3 = Res_parens.unaryExprOperand(parentExpr$1);\n                                    var parentDoc$1 = typeof braces$3 === \"number\" ? (\n                                        braces$3 !== 0 ? doc$4 : addParens(doc$4)\n                                      ) : printBraces(doc$4, parentExpr$1, braces$3._0);\n                                    return Res_doc.group(Res_doc.concat({\n                                                    hd: printAttributes(undefined, undefined, expr.pexp_attributes, cmtTbl),\n                                                    tl: {\n                                                      hd: parentDoc$1,\n                                                      tl: {\n                                                        hd: Res_doc.lbracket,\n                                                        tl: {\n                                                          hd: member$1,\n                                                          tl: {\n                                                            hd: Res_doc.rbracket,\n                                                            tl: /* [] */0\n                                                          }\n                                                        }\n                                                      }\n                                                    }\n                                                  }));\n                                  }\n                                } else {\n                                  exit = 2;\n                                }\n                              } else {\n                                exit = 2;\n                              }\n                            } else {\n                              exit = 2;\n                            }\n                          } else {\n                            exit = 2;\n                          }\n                          break;\n                      case \"set\" :\n                          var match$16 = match._1;\n                          if (match$16) {\n                            var match$17 = match$16.hd;\n                            if (typeof match$17[0] === \"number\") {\n                              var match$18 = match$16.tl;\n                              if (match$18) {\n                                var match$19 = match$18.hd;\n                                if (typeof match$19[0] === \"number\") {\n                                  var match$20 = match$18.tl;\n                                  if (match$20) {\n                                    var match$21 = match$20.hd;\n                                    if (typeof match$21[0] === \"number\" && !match$20.tl) {\n                                      var targetExpr = match$21[1];\n                                      var memberExpr$2 = match$19[1];\n                                      var parentExpr$2 = match$17[1];\n                                      var doc$5 = printExpressionWithComments(memberExpr$2, cmtTbl);\n                                      var braces$4 = Res_parens.expr(memberExpr$2);\n                                      var memberDoc$2 = typeof braces$4 === \"number\" ? (\n                                          braces$4 !== 0 ? doc$5 : addParens(doc$5)\n                                        ) : printBraces(doc$5, memberExpr$2, braces$4._0);\n                                      var match$22 = memberExpr$2.pexp_desc;\n                                      var shouldInline$1;\n                                      if (typeof match$22 === \"number\") {\n                                        shouldInline$1 = false;\n                                      } else {\n                                        switch (match$22.TAG | 0) {\n                                          case /* Pexp_ident */0 :\n                                          case /* Pexp_constant */1 :\n                                              shouldInline$1 = true;\n                                              break;\n                                          default:\n                                            shouldInline$1 = false;\n                                        }\n                                      }\n                                      var member$2 = shouldInline$1 ? memberDoc$2 : Res_doc.concat({\n                                              hd: Res_doc.indent(Res_doc.concat({\n                                                        hd: Res_doc.softLine,\n                                                        tl: {\n                                                          hd: memberDoc$2,\n                                                          tl: /* [] */0\n                                                        }\n                                                      })),\n                                              tl: {\n                                                hd: Res_doc.softLine,\n                                                tl: /* [] */0\n                                              }\n                                            });\n                                      var shouldIndentTargetExpr;\n                                      if (Res_parsetree_viewer.isBracedExpr(targetExpr)) {\n                                        shouldIndentTargetExpr = false;\n                                      } else if (Res_parsetree_viewer.isBinaryExpression(targetExpr)) {\n                                        shouldIndentTargetExpr = true;\n                                      } else {\n                                        var match$23 = targetExpr.pexp_desc;\n                                        var tmp;\n                                        var exit$1 = 0;\n                                        if (typeof match$23 === \"number\") {\n                                          exit$1 = 3;\n                                        } else {\n                                          switch (match$23.TAG | 0) {\n                                            case /* Pexp_ifthenelse */15 :\n                                                var match$24 = targetExpr.pexp_attributes;\n                                                if (match$24) {\n                                                  var ifExpr = match$23._0;\n                                                  if (match$24.hd[0].txt === \"ns.ternary\" && !match$24.tl) {\n                                                    tmp = Res_parsetree_viewer.isBinaryExpression(ifExpr) || Res_parsetree_viewer.hasAttributes(ifExpr.pexp_attributes);\n                                                  } else {\n                                                    exit$1 = 3;\n                                                  }\n                                                } else {\n                                                  exit$1 = 3;\n                                                }\n                                                break;\n                                            case /* Pexp_newtype */31 :\n                                                tmp = false;\n                                                break;\n                                            default:\n                                              exit$1 = 3;\n                                          }\n                                        }\n                                        if (exit$1 === 3) {\n                                          tmp = Res_parsetree_viewer.hasAttributes(targetExpr.pexp_attributes) || Res_parsetree_viewer.isArrayAccess(targetExpr);\n                                        }\n                                        shouldIndentTargetExpr = tmp;\n                                      }\n                                      var doc$6 = printExpressionWithComments(targetExpr, cmtTbl);\n                                      var braces$5 = Res_parens.expr(targetExpr);\n                                      var targetExpr$1 = typeof braces$5 === \"number\" ? (\n                                          braces$5 !== 0 ? doc$6 : addParens(doc$6)\n                                        ) : printBraces(doc$6, targetExpr, braces$5._0);\n                                      var doc$7 = printExpressionWithComments(parentExpr$2, cmtTbl);\n                                      var braces$6 = Res_parens.unaryExprOperand(parentExpr$2);\n                                      var parentDoc$2 = typeof braces$6 === \"number\" ? (\n                                          braces$6 !== 0 ? doc$7 : addParens(doc$7)\n                                        ) : printBraces(doc$7, parentExpr$2, braces$6._0);\n                                      return Res_doc.group(Res_doc.concat({\n                                                      hd: printAttributes(undefined, undefined, expr.pexp_attributes, cmtTbl),\n                                                      tl: {\n                                                        hd: parentDoc$2,\n                                                        tl: {\n                                                          hd: Res_doc.lbracket,\n                                                          tl: {\n                                                            hd: member$2,\n                                                            tl: {\n                                                              hd: Res_doc.rbracket,\n                                                              tl: {\n                                                                hd: Res_doc.text(\" =\"),\n                                                                tl: {\n                                                                  hd: shouldIndentTargetExpr ? Res_doc.indent(Res_doc.concat({\n                                                                              hd: Res_doc.line,\n                                                                              tl: {\n                                                                                hd: targetExpr$1,\n                                                                                tl: /* [] */0\n                                                                              }\n                                                                            })) : Res_doc.concat({\n                                                                          hd: Res_doc.space,\n                                                                          tl: {\n                                                                            hd: targetExpr$1,\n                                                                            tl: /* [] */0\n                                                                          }\n                                                                        }),\n                                                                  tl: /* [] */0\n                                                                }\n                                                              }\n                                                            }\n                                                          }\n                                                        }\n                                                      }\n                                                    }));\n                                    }\n                                    exit = 2;\n                                  } else {\n                                    exit = 2;\n                                  }\n                                } else {\n                                  exit = 2;\n                                }\n                              } else {\n                                exit = 2;\n                              }\n                            } else {\n                              exit = 2;\n                            }\n                          } else {\n                            exit = 2;\n                          }\n                          break;\n                      default:\n                        exit = 2;\n                    }\n                  } else {\n                    exit = 2;\n                  }\n                  break;\n              case /* Ldot */1 :\n              case /* Lapply */2 :\n                  exit = 2;\n                  break;\n              \n            }\n            break;\n        case /* Lapply */2 :\n            exit = 2;\n            break;\n        \n      }\n      if (exit === 2 && Res_parsetree_viewer.isJsxExpression(expr)) {\n        return printJsxExpression(lident$1, match._1, cmtTbl);\n      }\n      \n    }\n    var args = List.map((function (param) {\n            return [\n                    param[0],\n                    Res_parsetree_viewer.rewriteUnderscoreApply(param[1])\n                  ];\n          }), match._1);\n    var match$25 = Res_parsetree_viewer.processUncurriedAttribute(expr.pexp_attributes);\n    var attrs$1 = match$25[1];\n    var uncurried = match$25[0];\n    var doc$8 = printExpressionWithComments(callExpr, cmtTbl);\n    var braces$7 = Res_parens.callExpr(callExpr);\n    var callExprDoc = typeof braces$7 === \"number\" ? (\n        braces$7 !== 0 ? doc$8 : addParens(doc$8)\n      ) : printBraces(doc$8, callExpr, braces$7._0);\n    if (Res_parsetree_viewer.requiresSpecialCallbackPrintingFirstArg(args)) {\n      var argsDoc = printArgumentsWithCallbackInFirstPosition(uncurried, args, cmtTbl);\n      return Res_doc.concat({\n                  hd: printAttributes(undefined, undefined, attrs$1, cmtTbl),\n                  tl: {\n                    hd: callExprDoc,\n                    tl: {\n                      hd: argsDoc,\n                      tl: /* [] */0\n                    }\n                  }\n                });\n    }\n    if (Res_parsetree_viewer.requiresSpecialCallbackPrintingLastArg(args)) {\n      var argsDoc$1 = printArgumentsWithCallbackInLastPosition(uncurried, args, cmtTbl);\n      var maybeBreakParent = Res_doc.willBreak(argsDoc$1) ? Res_doc.breakParent : Res_doc.nil;\n      return Res_doc.concat({\n                  hd: maybeBreakParent,\n                  tl: {\n                    hd: printAttributes(undefined, undefined, attrs$1, cmtTbl),\n                    tl: {\n                      hd: callExprDoc,\n                      tl: {\n                        hd: argsDoc$1,\n                        tl: /* [] */0\n                      }\n                    }\n                  }\n                });\n    }\n    var argsDoc$2 = printArguments(uncurried, args, cmtTbl);\n    return Res_doc.concat({\n                hd: printAttributes(undefined, undefined, attrs$1, cmtTbl),\n                tl: {\n                  hd: callExprDoc,\n                  tl: {\n                    hd: argsDoc$2,\n                    tl: /* [] */0\n                  }\n                }\n              });\n  }\n  throw {\n        RE_EXN_ID: \"Assert_failure\",\n        _1: [\n          \"res_printer.res\",\n          3945,\n          9\n        ],\n        Error: new Error()\n      };\n}\n\nfunction printJsxExpression(lident, args, cmtTbl) {\n  var name = printJsxName(lident);\n  var match = printJsxProps(args, cmtTbl);\n  var children = match[1];\n  var isSelfClosing;\n  if (children !== undefined) {\n    var match$1 = children.pexp_desc;\n    if (typeof match$1 === \"number\" || match$1.TAG !== /* Pexp_construct */9) {\n      isSelfClosing = false;\n    } else {\n      var match$2 = match$1._0.txt;\n      switch (match$2.TAG | 0) {\n        case /* Lident */0 :\n            isSelfClosing = match$2._0 === \"[]\" ? match$1._1 === undefined : false;\n            break;\n        case /* Ldot */1 :\n        case /* Lapply */2 :\n            isSelfClosing = false;\n            break;\n        \n      }\n    }\n  } else {\n    isSelfClosing = false;\n  }\n  return Res_doc.group(Res_doc.concat({\n                  hd: Res_doc.group(Res_doc.concat({\n                            hd: printComments(Res_doc.concat({\n                                      hd: Res_doc.lessThan,\n                                      tl: {\n                                        hd: name,\n                                        tl: /* [] */0\n                                      }\n                                    }), cmtTbl, lident.loc),\n                            tl: {\n                              hd: match[0],\n                              tl: {\n                                hd: isSelfClosing ? Res_doc.concat({\n                                        hd: Res_doc.line,\n                                        tl: {\n                                          hd: Res_doc.text(\"/>\"),\n                                          tl: /* [] */0\n                                        }\n                                      }) : Res_doc.nil,\n                                tl: /* [] */0\n                              }\n                            }\n                          })),\n                  tl: {\n                    hd: isSelfClosing ? Res_doc.nil : Res_doc.concat({\n                            hd: Res_doc.greaterThan,\n                            tl: {\n                              hd: Res_doc.indent(Res_doc.concat({\n                                        hd: Res_doc.line,\n                                        tl: {\n                                          hd: children !== undefined ? printJsxChildren(children, cmtTbl) : Res_doc.nil,\n                                          tl: /* [] */0\n                                        }\n                                      })),\n                              tl: {\n                                hd: Res_doc.line,\n                                tl: {\n                                  hd: Res_doc.text(\"</\"),\n                                  tl: {\n                                    hd: name,\n                                    tl: {\n                                      hd: Res_doc.greaterThan,\n                                      tl: /* [] */0\n                                    }\n                                  }\n                                }\n                              }\n                            }\n                          }),\n                    tl: /* [] */0\n                  }\n                }));\n}\n\nfunction printJsxFragment(expr, cmtTbl) {\n  var opening = Res_doc.text(\"<>\");\n  var closing = Res_doc.text(\"</>\");\n  var match = expr.pexp_desc;\n  var tmp;\n  var exit = 0;\n  if (typeof match === \"number\" || match.TAG !== /* Pexp_construct */9) {\n    exit = 1;\n  } else {\n    var match$1 = match._0.txt;\n    switch (match$1.TAG | 0) {\n      case /* Lident */0 :\n          if (match$1._0 === \"[]\" && match._1 === undefined) {\n            tmp = Res_doc.nil;\n          } else {\n            exit = 1;\n          }\n          break;\n      case /* Ldot */1 :\n      case /* Lapply */2 :\n          exit = 1;\n          break;\n      \n    }\n  }\n  if (exit === 1) {\n    tmp = Res_doc.indent(Res_doc.concat({\n              hd: Res_doc.line,\n              tl: {\n                hd: printJsxChildren(expr, cmtTbl),\n                tl: /* [] */0\n              }\n            }));\n  }\n  return Res_doc.group(Res_doc.concat({\n                  hd: opening,\n                  tl: {\n                    hd: tmp,\n                    tl: {\n                      hd: Res_doc.line,\n                      tl: {\n                        hd: closing,\n                        tl: /* [] */0\n                      }\n                    }\n                  }\n                }));\n}\n\nfunction printJsxChildren(childrenExpr, cmtTbl) {\n  var match = childrenExpr.pexp_desc;\n  var exit = 0;\n  if (typeof match === \"number\" || match.TAG !== /* Pexp_construct */9) {\n    exit = 1;\n  } else {\n    var match$1 = match._0.txt;\n    switch (match$1.TAG | 0) {\n      case /* Lident */0 :\n          if (match$1._0 === \"::\") {\n            var match$2 = Res_parsetree_viewer.collectListExpressions(childrenExpr);\n            return Res_doc.group(Res_doc.join(Res_doc.line, List.map((function (expr) {\n                                  var leadingLineCommentPresent = hasLeadingLineComment(cmtTbl, expr.pexp_loc);\n                                  var exprDoc = printExpressionWithComments(expr, cmtTbl);\n                                  var match = Res_parens.jsxChildExpr(expr);\n                                  if (typeof match === \"number\" && match !== 0) {\n                                    return exprDoc;\n                                  }\n                                  var innerDoc = Res_parens.bracedExpr(expr) ? addParens(exprDoc) : exprDoc;\n                                  if (leadingLineCommentPresent) {\n                                    return addBraces(innerDoc);\n                                  } else {\n                                    return Res_doc.concat({\n                                                hd: Res_doc.lbrace,\n                                                tl: {\n                                                  hd: innerDoc,\n                                                  tl: {\n                                                    hd: Res_doc.rbrace,\n                                                    tl: /* [] */0\n                                                  }\n                                                }\n                                              });\n                                  }\n                                }), match$2[0])));\n          }\n          exit = 1;\n          break;\n      case /* Ldot */1 :\n      case /* Lapply */2 :\n          exit = 1;\n          break;\n      \n    }\n  }\n  if (exit === 1) {\n    var leadingLineCommentPresent = hasLeadingLineComment(cmtTbl, childrenExpr.pexp_loc);\n    var exprDoc = printExpressionWithComments(childrenExpr, cmtTbl);\n    var match$3 = Res_parens.jsxChildExpr(childrenExpr);\n    var tmp;\n    var exit$1 = 0;\n    if (typeof match$3 === \"number\" && match$3 !== 0) {\n      tmp = exprDoc;\n    } else {\n      exit$1 = 2;\n    }\n    if (exit$1 === 2) {\n      var innerDoc = Res_parens.bracedExpr(childrenExpr) ? addParens(exprDoc) : exprDoc;\n      tmp = leadingLineCommentPresent ? addBraces(innerDoc) : Res_doc.concat({\n              hd: Res_doc.lbrace,\n              tl: {\n                hd: innerDoc,\n                tl: {\n                  hd: Res_doc.rbrace,\n                  tl: /* [] */0\n                }\n              }\n            });\n    }\n    return Res_doc.concat({\n                hd: Res_doc.dotdotdot,\n                tl: {\n                  hd: tmp,\n                  tl: /* [] */0\n                }\n              });\n  }\n  \n}\n\nfunction printJsxProps(args, cmtTbl) {\n  var _props = /* [] */0;\n  var _args = args;\n  while(true) {\n    var args$1 = _args;\n    var props = _props;\n    if (!args$1) {\n      return [\n              Res_doc.nil,\n              undefined\n            ];\n    }\n    var arg = args$1.hd;\n    var match = arg[0];\n    if (typeof match !== \"number\" && match.TAG === /* Labelled */0 && match._0 === \"children\") {\n      var match$1 = args$1.tl;\n      if (match$1) {\n        var match$2 = match$1.hd;\n        if (typeof match$2[0] === \"number\") {\n          var match$3 = match$2[1].pexp_desc;\n          if (typeof match$3 !== \"number\" && match$3.TAG === /* Pexp_construct */9) {\n            var match$4 = match$3._0.txt;\n            switch (match$4.TAG | 0) {\n              case /* Lident */0 :\n                  if (match$4._0 === \"()\" && match$3._1 === undefined && !match$1.tl) {\n                    var formattedProps = Res_doc.indent(props ? Res_doc.concat({\n                                hd: Res_doc.line,\n                                tl: {\n                                  hd: Res_doc.group(Res_doc.join(Res_doc.line, List.rev(props))),\n                                  tl: /* [] */0\n                                }\n                              }) : Res_doc.nil);\n                    return [\n                            formattedProps,\n                            arg[1]\n                          ];\n                  }\n                  break;\n              case /* Ldot */1 :\n              case /* Lapply */2 :\n                  break;\n              \n            }\n          }\n          \n        }\n        \n      }\n      \n    }\n    var propDoc = printJsxProp(arg, cmtTbl);\n    _args = args$1.tl;\n    _props = {\n      hd: propDoc,\n      tl: props\n    };\n    continue ;\n  };\n}\n\nfunction printJsxProp(arg, cmtTbl) {\n  var lbl = arg[0];\n  var exit = 0;\n  var exit$1 = 0;\n  if (typeof lbl === \"number\") {\n    exit = 1;\n  } else {\n    var match = arg[1];\n    var match$1 = match.pexp_desc;\n    if (typeof match$1 === \"number\" || match$1.TAG !== /* Pexp_ident */0) {\n      exit$1 = 2;\n    } else {\n      var ident = match$1._0.txt;\n      switch (ident.TAG | 0) {\n        case /* Lident */0 :\n            var match$2 = match.pexp_attributes;\n            if (match$2) {\n              var match$3 = match$2.hd[0];\n              var ident$1 = ident._0;\n              if (match$3.txt === \"ns.namedArgLoc\" && !(match$2.tl || lbl._0 !== ident$1)) {\n                var argLoc = match$3.loc;\n                if (typeof lbl === \"number\") {\n                  return Res_doc.nil;\n                }\n                if (lbl.TAG === /* Labelled */0) {\n                  return printComments(printIdentLike(undefined, ident$1), cmtTbl, argLoc);\n                }\n                var doc = Res_doc.concat({\n                      hd: Res_doc.question,\n                      tl: {\n                        hd: printIdentLike(undefined, ident$1),\n                        tl: /* [] */0\n                      }\n                    });\n                return printComments(doc, cmtTbl, argLoc);\n              } else {\n                exit = 1;\n              }\n            } else {\n              exit$1 = 2;\n            }\n            break;\n        case /* Ldot */1 :\n        case /* Lapply */2 :\n            exit = 1;\n            break;\n        \n      }\n    }\n  }\n  if (exit$1 === 2) {\n    var match$4 = arg[1];\n    var match$5 = match$4.pexp_desc;\n    if (typeof match$5 === \"number\" || match$5.TAG !== /* Pexp_ident */0) {\n      exit = 1;\n    } else {\n      var ident$2 = match$5._0.txt;\n      switch (ident$2.TAG | 0) {\n        case /* Lident */0 :\n            if (match$4.pexp_attributes) {\n              exit = 1;\n            } else {\n              var ident$3 = ident$2._0;\n              if (lbl._0 === ident$3) {\n                if (typeof lbl === \"number\") {\n                  return Res_doc.nil;\n                } else if (lbl.TAG === /* Labelled */0) {\n                  return printIdentLike(undefined, ident$3);\n                } else {\n                  return Res_doc.concat({\n                              hd: Res_doc.question,\n                              tl: {\n                                hd: printIdentLike(undefined, ident$3),\n                                tl: /* [] */0\n                              }\n                            });\n                }\n              }\n              exit = 1;\n            }\n            break;\n        case /* Ldot */1 :\n        case /* Lapply */2 :\n            exit = 1;\n            break;\n        \n      }\n    }\n  }\n  if (exit === 1) {\n    var expr = arg[1];\n    var match$6 = expr.pexp_attributes;\n    var match$7;\n    if (match$6) {\n      var match$8 = match$6.hd[0];\n      match$7 = match$8.txt === \"ns.namedArgLoc\" ? [\n          match$8.loc,\n          {\n            pexp_desc: expr.pexp_desc,\n            pexp_loc: expr.pexp_loc,\n            pexp_attributes: match$6.tl\n          }\n        ] : [\n          $$Location.none,\n          expr\n        ];\n    } else {\n      match$7 = [\n        $$Location.none,\n        expr\n      ];\n    }\n    var expr$1 = match$7[1];\n    var argLoc$1 = match$7[0];\n    var lblDoc;\n    if (typeof lbl === \"number\") {\n      lblDoc = Res_doc.nil;\n    } else if (lbl.TAG === /* Labelled */0) {\n      var lbl$1 = printComments(printIdentLike(undefined, lbl._0), cmtTbl, argLoc$1);\n      lblDoc = Res_doc.concat({\n            hd: lbl$1,\n            tl: {\n              hd: Res_doc.equal,\n              tl: /* [] */0\n            }\n          });\n    } else {\n      var lbl$2 = printComments(printIdentLike(undefined, lbl._0), cmtTbl, argLoc$1);\n      lblDoc = Res_doc.concat({\n            hd: lbl$2,\n            tl: {\n              hd: Res_doc.equal,\n              tl: {\n                hd: Res_doc.question,\n                tl: /* [] */0\n              }\n            }\n          });\n    }\n    var leadingLineCommentPresent = hasLeadingLineComment(cmtTbl, expr$1.pexp_loc);\n    var doc$1 = printExpressionWithComments(expr$1, cmtTbl);\n    var match$9 = Res_parens.jsxPropExpr(expr$1);\n    var exprDoc;\n    var exit$2 = 0;\n    if (typeof match$9 === \"number\" && match$9 !== 0) {\n      exprDoc = doc$1;\n    } else {\n      exit$2 = 2;\n    }\n    if (exit$2 === 2) {\n      var innerDoc = Res_parens.bracedExpr(expr$1) ? addParens(doc$1) : doc$1;\n      exprDoc = leadingLineCommentPresent ? addBraces(innerDoc) : Res_doc.concat({\n              hd: Res_doc.lbrace,\n              tl: {\n                hd: innerDoc,\n                tl: {\n                  hd: Res_doc.rbrace,\n                  tl: /* [] */0\n                }\n              }\n            });\n    }\n    var fullLoc_loc_start = argLoc$1.loc_start;\n    var fullLoc_loc_end = expr$1.pexp_loc.loc_end;\n    var fullLoc_loc_ghost = argLoc$1.loc_ghost;\n    var fullLoc = {\n      loc_start: fullLoc_loc_start,\n      loc_end: fullLoc_loc_end,\n      loc_ghost: fullLoc_loc_ghost\n    };\n    return printComments(Res_doc.concat({\n                    hd: lblDoc,\n                    tl: {\n                      hd: exprDoc,\n                      tl: /* [] */0\n                    }\n                  }), cmtTbl, fullLoc);\n  }\n  \n}\n\nfunction printJsxName(param) {\n  var lident = param.txt;\n  var flatten = function (_acc, _lident) {\n    while(true) {\n      var lident = _lident;\n      var acc = _acc;\n      switch (lident.TAG | 0) {\n        case /* Lident */0 :\n            return {\n                    hd: lident._0,\n                    tl: acc\n                  };\n        case /* Ldot */1 :\n            var txt = lident._1;\n            var acc$1 = txt === \"createElement\" ? acc : ({\n                  hd: txt,\n                  tl: acc\n                });\n            _lident = lident._0;\n            _acc = acc$1;\n            continue ;\n        case /* Lapply */2 :\n            return acc;\n        \n      }\n    };\n  };\n  switch (lident.TAG | 0) {\n    case /* Lident */0 :\n        return Res_doc.text(lident._0);\n    case /* Ldot */1 :\n    case /* Lapply */2 :\n        break;\n    \n  }\n  var segments = flatten(/* [] */0, lident);\n  return Res_doc.join(Res_doc.dot, List.map(Res_doc.text, segments));\n}\n\nfunction printArgumentsWithCallbackInFirstPosition(uncurried, args, cmtTbl) {\n  var cmtTblCopy = Res_comments_table.copy(cmtTbl);\n  var match;\n  if (args) {\n    var match$1 = args.hd;\n    var expr = match$1[1];\n    var lbl = match$1[0];\n    var lblDoc;\n    lblDoc = typeof lbl === \"number\" ? Res_doc.nil : (\n        lbl.TAG === /* Labelled */0 ? Res_doc.concat({\n                hd: Res_doc.tilde,\n                tl: {\n                  hd: printIdentLike(undefined, lbl._0),\n                  tl: {\n                    hd: Res_doc.equal,\n                    tl: /* [] */0\n                  }\n                }\n              }) : Res_doc.concat({\n                hd: Res_doc.tilde,\n                tl: {\n                  hd: printIdentLike(undefined, lbl._0),\n                  tl: {\n                    hd: Res_doc.equal,\n                    tl: {\n                      hd: Res_doc.question,\n                      tl: /* [] */0\n                    }\n                  }\n                }\n              })\n      );\n    var callback = Res_doc.concat({\n          hd: lblDoc,\n          tl: {\n            hd: printPexpFun(/* FitsOnOneLine */1, expr, cmtTbl),\n            tl: /* [] */0\n          }\n        });\n    var callback$1 = printComments(callback, cmtTbl, expr.pexp_loc);\n    var printedArgs = Res_doc.join(Res_doc.concat({\n              hd: Res_doc.comma,\n              tl: {\n                hd: Res_doc.line,\n                tl: /* [] */0\n              }\n            }), List.map((function (arg) {\n                return printArgument(arg, cmtTbl);\n              }), args.tl));\n    match = [\n      callback$1,\n      printedArgs\n    ];\n  } else {\n    throw {\n          RE_EXN_ID: \"Assert_failure\",\n          _1: [\n            \"res_printer.res\",\n            4197,\n            9\n          ],\n          Error: new Error()\n        };\n  }\n  var printedArgs$1 = match[1];\n  var fitsOnOneLine = Res_doc.concat({\n        hd: uncurried ? Res_doc.text(\"(. \") : Res_doc.lparen,\n        tl: {\n          hd: match[0],\n          tl: {\n            hd: Res_doc.comma,\n            tl: {\n              hd: Res_doc.line,\n              tl: {\n                hd: printedArgs$1,\n                tl: {\n                  hd: Res_doc.rparen,\n                  tl: /* [] */0\n                }\n              }\n            }\n          }\n        }\n      });\n  var breakAllArgs = printArguments(uncurried, args, cmtTblCopy);\n  if (Res_doc.willBreak(printedArgs$1)) {\n    return breakAllArgs;\n  } else {\n    return Res_doc.customLayout({\n                hd: fitsOnOneLine,\n                tl: {\n                  hd: breakAllArgs,\n                  tl: /* [] */0\n                }\n              });\n  }\n}\n\nfunction printArgumentsWithCallbackInLastPosition(uncurried, args, cmtTbl) {\n  var cmtTblCopy = Res_comments_table.copy(cmtTbl);\n  var cmtTblCopy2 = Res_comments_table.copy(cmtTbl);\n  var loop = function (_acc, _args) {\n    while(true) {\n      var args = _args;\n      var acc = _acc;\n      if (!args) {\n        return [\n                Res_doc.nil,\n                Res_doc.nil,\n                Res_doc.nil\n              ];\n      }\n      var args$1 = args.tl;\n      var arg = args.hd;\n      var expr = arg[1];\n      var lbl = arg[0];\n      if (args$1) {\n        var argDoc = printArgument(arg, cmtTbl);\n        _args = args$1;\n        _acc = {\n          hd: Res_doc.line,\n          tl: {\n            hd: Res_doc.comma,\n            tl: {\n              hd: argDoc,\n              tl: acc\n            }\n          }\n        };\n        continue ;\n      }\n      var lblDoc;\n      lblDoc = typeof lbl === \"number\" ? Res_doc.nil : (\n          lbl.TAG === /* Labelled */0 ? Res_doc.concat({\n                  hd: Res_doc.tilde,\n                  tl: {\n                    hd: printIdentLike(undefined, lbl._0),\n                    tl: {\n                      hd: Res_doc.equal,\n                      tl: /* [] */0\n                    }\n                  }\n                }) : Res_doc.concat({\n                  hd: Res_doc.tilde,\n                  tl: {\n                    hd: printIdentLike(undefined, lbl._0),\n                    tl: {\n                      hd: Res_doc.equal,\n                      tl: {\n                        hd: Res_doc.question,\n                        tl: /* [] */0\n                      }\n                    }\n                  }\n                })\n        );\n      var pexpFunDoc = printPexpFun(/* FitsOnOneLine */1, expr, cmtTbl);\n      var doc = Res_doc.concat({\n            hd: lblDoc,\n            tl: {\n              hd: pexpFunDoc,\n              tl: /* [] */0\n            }\n          });\n      var callbackFitsOnOneLine = printComments(doc, cmtTbl, expr.pexp_loc);\n      var pexpFunDoc$1 = printPexpFun(/* ArgumentsFitOnOneLine */2, expr, cmtTblCopy);\n      var doc$1 = Res_doc.concat({\n            hd: lblDoc,\n            tl: {\n              hd: pexpFunDoc$1,\n              tl: /* [] */0\n            }\n          });\n      var callbackArgumentsFitsOnOneLine = printComments(doc$1, cmtTblCopy, expr.pexp_loc);\n      return [\n              Res_doc.concat(List.rev(acc)),\n              callbackFitsOnOneLine,\n              callbackArgumentsFitsOnOneLine\n            ];\n    };\n  };\n  var match = loop(/* [] */0, args);\n  var printedArgs = match[0];\n  var fitsOnOneLine = Res_doc.concat({\n        hd: uncurried ? Res_doc.text(\"(.\") : Res_doc.lparen,\n        tl: {\n          hd: printedArgs,\n          tl: {\n            hd: match[1],\n            tl: {\n              hd: Res_doc.rparen,\n              tl: /* [] */0\n            }\n          }\n        }\n      });\n  var arugmentsFitOnOneLine = Res_doc.concat({\n        hd: uncurried ? Res_doc.text(\"(.\") : Res_doc.lparen,\n        tl: {\n          hd: printedArgs,\n          tl: {\n            hd: Res_doc.breakableGroup(true, match[2]),\n            tl: {\n              hd: Res_doc.rparen,\n              tl: /* [] */0\n            }\n          }\n        }\n      });\n  var breakAllArgs = printArguments(uncurried, args, cmtTblCopy2);\n  if (Res_doc.willBreak(printedArgs)) {\n    return breakAllArgs;\n  } else {\n    return Res_doc.customLayout({\n                hd: fitsOnOneLine,\n                tl: {\n                  hd: arugmentsFitOnOneLine,\n                  tl: {\n                    hd: breakAllArgs,\n                    tl: /* [] */0\n                  }\n                }\n              });\n  }\n}\n\nfunction printArguments(uncurried, args, cmtTbl) {\n  if (args) {\n    var match = args.hd;\n    if (typeof match[0] === \"number\") {\n      var arg = match[1];\n      var match$1 = arg.pexp_desc;\n      var exit = 0;\n      if (typeof match$1 === \"number\" || match$1.TAG !== /* Pexp_construct */9) {\n        exit = 2;\n      } else {\n        var match$2 = match$1._0.txt;\n        switch (match$2.TAG | 0) {\n          case /* Lident */0 :\n              if (match$2._0 === \"()\") {\n                if (!args.tl) {\n                  var match$3 = arg.pexp_loc.loc_ghost;\n                  if (uncurried) {\n                    if (match$3) {\n                      return Res_doc.text(\"(.)\");\n                    } else {\n                      return Res_doc.text(\"(. ())\");\n                    }\n                  } else {\n                    return Res_doc.text(\"()\");\n                  }\n                }\n                \n              } else {\n                exit = 2;\n              }\n              break;\n          case /* Ldot */1 :\n          case /* Lapply */2 :\n              exit = 2;\n              break;\n          \n        }\n      }\n      if (exit === 2 && !args.tl && Res_parsetree_viewer.isHuggableExpression(arg)) {\n        var doc = printExpressionWithComments(arg, cmtTbl);\n        var braces = Res_parens.expr(arg);\n        var argDoc = typeof braces === \"number\" ? (\n            braces !== 0 ? doc : addParens(doc)\n          ) : printBraces(doc, arg, braces._0);\n        return Res_doc.concat({\n                    hd: uncurried ? Res_doc.text(\"(. \") : Res_doc.lparen,\n                    tl: {\n                      hd: argDoc,\n                      tl: {\n                        hd: Res_doc.rparen,\n                        tl: /* [] */0\n                      }\n                    }\n                  });\n      }\n      \n    }\n    \n  }\n  return Res_doc.group(Res_doc.concat({\n                  hd: uncurried ? Res_doc.text(\"(.\") : Res_doc.lparen,\n                  tl: {\n                    hd: Res_doc.indent(Res_doc.concat({\n                              hd: uncurried ? Res_doc.line : Res_doc.softLine,\n                              tl: {\n                                hd: Res_doc.join(Res_doc.concat({\n                                          hd: Res_doc.comma,\n                                          tl: {\n                                            hd: Res_doc.line,\n                                            tl: /* [] */0\n                                          }\n                                        }), List.map((function (arg) {\n                                            return printArgument(arg, cmtTbl);\n                                          }), args)),\n                                tl: /* [] */0\n                              }\n                            })),\n                    tl: {\n                      hd: Res_doc.trailingComma,\n                      tl: {\n                        hd: Res_doc.softLine,\n                        tl: {\n                          hd: Res_doc.rparen,\n                          tl: /* [] */0\n                        }\n                      }\n                    }\n                  }\n                }));\n}\n\nfunction printArgument(param, cmtTbl) {\n  var arg = param[1];\n  var argLbl = param[0];\n  if (typeof argLbl !== \"number\") {\n    if (argLbl.TAG === /* Labelled */0) {\n      var match = arg.pexp_desc;\n      var lbl = argLbl._0;\n      if (typeof match !== \"number\") {\n        switch (match.TAG | 0) {\n          case /* Pexp_ident */0 :\n              var name = match._0.txt;\n              switch (name.TAG | 0) {\n                case /* Lident */0 :\n                    var match$1 = arg.pexp_attributes;\n                    var exit = 0;\n                    if (!(match$1 && !(match$1.hd[0].txt === \"ns.namedArgLoc\" && !match$1.tl))) {\n                      exit = 2;\n                    }\n                    if (exit === 2 && lbl === name._0 && !Res_parsetree_viewer.isBracedExpr(arg)) {\n                      var match$2 = arg.pexp_attributes;\n                      var loc;\n                      if (match$2) {\n                        var match$3 = match$2.hd[0];\n                        loc = match$3.txt === \"ns.namedArgLoc\" ? match$3.loc : arg.pexp_loc;\n                      } else {\n                        loc = arg.pexp_loc;\n                      }\n                      var doc = Res_doc.concat({\n                            hd: Res_doc.tilde,\n                            tl: {\n                              hd: printIdentLike(undefined, lbl),\n                              tl: /* [] */0\n                            }\n                          });\n                      return printComments(doc, cmtTbl, loc);\n                    }\n                    break;\n                case /* Ldot */1 :\n                case /* Lapply */2 :\n                    break;\n                \n              }\n              break;\n          case /* Pexp_constraint */19 :\n              var argExpr = match._0;\n              var match$4 = argExpr.pexp_desc;\n              if (typeof match$4 !== \"number\" && match$4.TAG === /* Pexp_ident */0) {\n                var name$1 = match$4._0.txt;\n                switch (name$1.TAG | 0) {\n                  case /* Lident */0 :\n                      var attrs = arg.pexp_attributes;\n                      var exit$1 = 0;\n                      if (!(attrs && !(attrs.hd[0].txt === \"ns.namedArgLoc\" && !attrs.tl))) {\n                        exit$1 = 2;\n                      }\n                      if (exit$1 === 2 && lbl === name$1._0 && !Res_parsetree_viewer.isBracedExpr(argExpr)) {\n                        var loc$1;\n                        if (attrs) {\n                          var match$5 = attrs.hd[0];\n                          if (match$5.txt === \"ns.namedArgLoc\") {\n                            var loc$2 = match$5.loc;\n                            loc$1 = {\n                              loc_start: loc$2.loc_start,\n                              loc_end: arg.pexp_loc.loc_end,\n                              loc_ghost: loc$2.loc_ghost\n                            };\n                          } else {\n                            loc$1 = arg.pexp_loc;\n                          }\n                        } else {\n                          loc$1 = arg.pexp_loc;\n                        }\n                        var doc$1 = Res_doc.concat({\n                              hd: Res_doc.tilde,\n                              tl: {\n                                hd: printIdentLike(undefined, lbl),\n                                tl: {\n                                  hd: Res_doc.text(\": \"),\n                                  tl: {\n                                    hd: printTypExpr(match._1, cmtTbl),\n                                    tl: /* [] */0\n                                  }\n                                }\n                              }\n                            });\n                        return printComments(doc$1, cmtTbl, loc$1);\n                      }\n                      break;\n                  case /* Ldot */1 :\n                  case /* Lapply */2 :\n                      break;\n                  \n                }\n              }\n              break;\n          default:\n            \n        }\n      }\n      \n    } else {\n      var match$6 = arg.pexp_desc;\n      if (typeof match$6 !== \"number\" && match$6.TAG === /* Pexp_ident */0) {\n        var name$2 = match$6._0.txt;\n        var lbl$1 = argLbl._0;\n        switch (name$2.TAG | 0) {\n          case /* Lident */0 :\n              var match$7 = arg.pexp_attributes;\n              var exit$2 = 0;\n              if (!(match$7 && !(match$7.hd[0].txt === \"ns.namedArgLoc\" && !match$7.tl))) {\n                exit$2 = 2;\n              }\n              if (exit$2 === 2 && lbl$1 === name$2._0) {\n                var match$8 = arg.pexp_attributes;\n                var loc$3;\n                if (match$8) {\n                  var match$9 = match$8.hd[0];\n                  loc$3 = match$9.txt === \"ns.namedArgLoc\" ? match$9.loc : arg.pexp_loc;\n                } else {\n                  loc$3 = arg.pexp_loc;\n                }\n                var doc$2 = Res_doc.concat({\n                      hd: Res_doc.tilde,\n                      tl: {\n                        hd: printIdentLike(undefined, lbl$1),\n                        tl: {\n                          hd: Res_doc.question,\n                          tl: /* [] */0\n                        }\n                      }\n                    });\n                return printComments(doc$2, cmtTbl, loc$3);\n              }\n              break;\n          case /* Ldot */1 :\n          case /* Lapply */2 :\n              break;\n          \n        }\n      }\n      \n    }\n  }\n  var match$10 = arg.pexp_attributes;\n  var match$11;\n  if (match$10) {\n    var match$12 = match$10.hd[0];\n    match$11 = match$12.txt === \"ns.namedArgLoc\" ? [\n        match$12.loc,\n        {\n          pexp_desc: arg.pexp_desc,\n          pexp_loc: arg.pexp_loc,\n          pexp_attributes: match$10.tl\n        }\n      ] : [\n        arg.pexp_loc,\n        arg\n      ];\n  } else {\n    match$11 = [\n      arg.pexp_loc,\n      arg\n    ];\n  }\n  var expr = match$11[1];\n  var argLoc = match$11[0];\n  var printedLbl;\n  if (typeof argLbl === \"number\") {\n    printedLbl = Res_doc.nil;\n  } else if (argLbl.TAG === /* Labelled */0) {\n    var doc$3 = Res_doc.concat({\n          hd: Res_doc.tilde,\n          tl: {\n            hd: printIdentLike(undefined, argLbl._0),\n            tl: {\n              hd: Res_doc.equal,\n              tl: /* [] */0\n            }\n          }\n        });\n    printedLbl = printComments(doc$3, cmtTbl, argLoc);\n  } else {\n    var doc$4 = Res_doc.concat({\n          hd: Res_doc.tilde,\n          tl: {\n            hd: printIdentLike(undefined, argLbl._0),\n            tl: {\n              hd: Res_doc.equal,\n              tl: {\n                hd: Res_doc.question,\n                tl: /* [] */0\n              }\n            }\n          }\n        });\n    printedLbl = printComments(doc$4, cmtTbl, argLoc);\n  }\n  var doc$5 = printExpressionWithComments(expr, cmtTbl);\n  var braces = Res_parens.expr(expr);\n  var printedExpr = typeof braces === \"number\" ? (\n      braces !== 0 ? doc$5 : addParens(doc$5)\n    ) : printBraces(doc$5, expr, braces._0);\n  var loc_loc_start = argLoc.loc_start;\n  var loc_loc_end = expr.pexp_loc.loc_end;\n  var loc_loc_ghost = argLoc.loc_ghost;\n  var loc$4 = {\n    loc_start: loc_loc_start,\n    loc_end: loc_loc_end,\n    loc_ghost: loc_loc_ghost\n  };\n  var doc$6 = Res_doc.concat({\n        hd: printedLbl,\n        tl: {\n          hd: printedExpr,\n          tl: /* [] */0\n        }\n      });\n  return printComments(doc$6, cmtTbl, loc$4);\n}\n\nfunction printCases(cases, cmtTbl) {\n  return Res_doc.breakableGroup(true, Res_doc.concat({\n                  hd: Res_doc.lbrace,\n                  tl: {\n                    hd: Res_doc.concat({\n                          hd: Res_doc.line,\n                          tl: {\n                            hd: printList((function (n) {\n                                    var init = n.pc_lhs.ppat_loc;\n                                    return {\n                                            loc_start: init.loc_start,\n                                            loc_end: n.pc_rhs.pexp_loc.loc_end,\n                                            loc_ghost: init.loc_ghost\n                                          };\n                                  }), cases, printCase, undefined, cmtTbl),\n                            tl: /* [] */0\n                          }\n                        }),\n                    tl: {\n                      hd: Res_doc.line,\n                      tl: {\n                        hd: Res_doc.rbrace,\n                        tl: /* [] */0\n                      }\n                    }\n                  }\n                }));\n}\n\nfunction printCase($$case, cmtTbl) {\n  var match = $$case.pc_rhs.pexp_desc;\n  var rhs;\n  var exit = 0;\n  if (typeof match === \"number\") {\n    exit = 1;\n  } else {\n    switch (match.TAG | 0) {\n      case /* Pexp_let */2 :\n      case /* Pexp_sequence */16 :\n      case /* Pexp_letmodule */25 :\n      case /* Pexp_letexception */26 :\n      case /* Pexp_open */33 :\n          exit = 2;\n          break;\n      default:\n        exit = 1;\n    }\n  }\n  switch (exit) {\n    case 1 :\n        var doc = printExpressionWithComments($$case.pc_rhs, cmtTbl);\n        var match$1 = Res_parens.expr($$case.pc_rhs);\n        rhs = match$1 === 0 ? addParens(doc) : doc;\n        break;\n    case 2 :\n        rhs = printExpressionBlock(Res_parsetree_viewer.isBracedExpr($$case.pc_rhs), $$case.pc_rhs, cmtTbl);\n        break;\n    \n  }\n  var expr = $$case.pc_guard;\n  var guard = expr !== undefined ? Res_doc.group(Res_doc.concat({\n              hd: Res_doc.line,\n              tl: {\n                hd: Res_doc.text(\"if \"),\n                tl: {\n                  hd: printExpressionWithComments(expr, cmtTbl),\n                  tl: /* [] */0\n                }\n              }\n            })) : Res_doc.nil;\n  var match$2 = $$case.pc_rhs.pexp_desc;\n  var shouldInlineRhs;\n  var exit$1 = 0;\n  if (typeof match$2 === \"number\") {\n    exit$1 = 1;\n  } else {\n    switch (match$2.TAG | 0) {\n      case /* Pexp_ident */0 :\n      case /* Pexp_constant */1 :\n          shouldInlineRhs = true;\n          break;\n      case /* Pexp_construct */9 :\n          var match$3 = match$2._0.txt;\n          switch (match$3.TAG | 0) {\n            case /* Lident */0 :\n                switch (match$3._0) {\n                  case \"()\" :\n                  case \"false\" :\n                  case \"true\" :\n                      shouldInlineRhs = true;\n                      break;\n                  default:\n                    exit$1 = 1;\n                }\n                break;\n            case /* Ldot */1 :\n            case /* Lapply */2 :\n                exit$1 = 1;\n                break;\n            \n          }\n          break;\n      default:\n        exit$1 = 1;\n    }\n  }\n  if (exit$1 === 1) {\n    shouldInlineRhs = Res_parsetree_viewer.isHuggableRhs($$case.pc_rhs) ? true : false;\n  }\n  var match$4 = $$case.pc_lhs.ppat_desc;\n  var shouldIndentPattern;\n  shouldIndentPattern = typeof match$4 === \"number\" || match$4.TAG !== /* Ppat_or */9 ? true : false;\n  var doc$1 = printPattern($$case.pc_lhs, cmtTbl);\n  var match$5 = $$case.pc_lhs.ppat_desc;\n  var patternDoc;\n  patternDoc = typeof match$5 === \"number\" || match$5.TAG !== /* Ppat_constraint */10 ? doc$1 : addParens(doc$1);\n  var content = Res_doc.concat({\n        hd: shouldIndentPattern ? Res_doc.indent(patternDoc) : patternDoc,\n        tl: {\n          hd: Res_doc.indent(guard),\n          tl: {\n            hd: Res_doc.text(\" =>\"),\n            tl: {\n              hd: Res_doc.indent(Res_doc.concat({\n                        hd: shouldInlineRhs ? Res_doc.space : Res_doc.line,\n                        tl: {\n                          hd: rhs,\n                          tl: /* [] */0\n                        }\n                      })),\n              tl: /* [] */0\n            }\n          }\n        }\n      });\n  return Res_doc.group(Res_doc.concat({\n                  hd: Res_doc.text(\"| \"),\n                  tl: {\n                    hd: content,\n                    tl: /* [] */0\n                  }\n                }));\n}\n\nfunction printExprFunParameters(inCallback, uncurried, hasConstraint, parameters, cmtTbl) {\n  if (parameters) {\n    var match = parameters.hd;\n    if (match.TAG === /* Parameter */0 && !match.attrs && typeof match.lbl === \"number\" && match.defaultExpr === undefined) {\n      var stringLoc = match.pat.ppat_desc;\n      if (typeof stringLoc === \"number\") {\n        if (!parameters.tl && !uncurried) {\n          if (hasConstraint) {\n            return Res_doc.text(\"(_)\");\n          } else {\n            return Res_doc.text(\"_\");\n          }\n        }\n        \n      } else {\n        switch (stringLoc.TAG | 0) {\n          case /* Ppat_var */0 :\n              if (!parameters.tl && !uncurried) {\n                var stringLoc$1 = stringLoc._0;\n                var $$var = printIdentLike(undefined, stringLoc$1.txt);\n                var txtDoc = hasConstraint ? addParens($$var) : $$var;\n                return printComments(txtDoc, cmtTbl, stringLoc$1.loc);\n              }\n              break;\n          case /* Ppat_construct */5 :\n              var match$1 = stringLoc._0.txt;\n              switch (match$1.TAG | 0) {\n                case /* Lident */0 :\n                    if (match$1._0 === \"()\" && stringLoc._1 === undefined && !parameters.tl && !uncurried) {\n                      return Res_doc.text(\"()\");\n                    }\n                    break;\n                case /* Ldot */1 :\n                case /* Lapply */2 :\n                    break;\n                \n              }\n              break;\n          default:\n            \n        }\n      }\n    }\n    \n  }\n  var inCallback$1 = inCallback === 1;\n  var lparen = uncurried ? Res_doc.text(\"(. \") : Res_doc.lparen;\n  var shouldHug = Res_parsetree_viewer.parametersShouldHug(parameters);\n  var printedParamaters = Res_doc.concat({\n        hd: shouldHug || inCallback$1 ? Res_doc.nil : Res_doc.softLine,\n        tl: {\n          hd: Res_doc.join(Res_doc.concat({\n                    hd: Res_doc.comma,\n                    tl: {\n                      hd: Res_doc.line,\n                      tl: /* [] */0\n                    }\n                  }), List.map((function (p) {\n                      return printExpFunParameter(p, cmtTbl);\n                    }), parameters)),\n          tl: /* [] */0\n        }\n      });\n  return Res_doc.group(Res_doc.concat({\n                  hd: lparen,\n                  tl: {\n                    hd: shouldHug || inCallback$1 ? printedParamaters : Res_doc.concat({\n                            hd: Res_doc.indent(printedParamaters),\n                            tl: {\n                              hd: Res_doc.trailingComma,\n                              tl: {\n                                hd: Res_doc.softLine,\n                                tl: /* [] */0\n                              }\n                            }\n                          }),\n                    tl: {\n                      hd: Res_doc.rparen,\n                      tl: /* [] */0\n                    }\n                  }\n                }));\n}\n\nfunction printExpFunParameter(parameter, cmtTbl) {\n  if (parameter.TAG !== /* Parameter */0) {\n    return Res_doc.group(Res_doc.concat({\n                    hd: printAttributes(undefined, undefined, parameter.attrs, cmtTbl),\n                    tl: {\n                      hd: Res_doc.text(\"type \"),\n                      tl: {\n                        hd: Res_doc.join(Res_doc.space, List.map((function (lbl) {\n                                    return printComments(printIdentLike(undefined, lbl.txt), cmtTbl, lbl.loc);\n                                  }), parameter.locs)),\n                        tl: /* [] */0\n                      }\n                    }\n                  }));\n  }\n  var pattern = parameter.pat;\n  var defaultExpr = parameter.defaultExpr;\n  var lbl = parameter.lbl;\n  var match = Res_parsetree_viewer.processUncurriedAttribute(parameter.attrs);\n  var uncurried = match[0] ? Res_doc.concat({\n          hd: Res_doc.dot,\n          tl: {\n            hd: Res_doc.space,\n            tl: /* [] */0\n          }\n        }) : Res_doc.nil;\n  var attrs = printAttributes(undefined, undefined, match[1], cmtTbl);\n  var defaultExprDoc = defaultExpr !== undefined ? Res_doc.concat({\n          hd: Res_doc.text(\"=\"),\n          tl: {\n            hd: printExpressionWithComments(defaultExpr, cmtTbl),\n            tl: /* [] */0\n          }\n        }) : Res_doc.nil;\n  var labelWithPattern;\n  var exit = 0;\n  if (typeof lbl === \"number\") {\n    labelWithPattern = printPattern(pattern, cmtTbl);\n  } else {\n    var lbl$1 = lbl._0;\n    var stringLoc = pattern.ppat_desc;\n    if (typeof stringLoc === \"number\") {\n      exit = 1;\n    } else {\n      switch (stringLoc.TAG | 0) {\n        case /* Ppat_var */0 :\n            var match$1 = pattern.ppat_attributes;\n            var exit$1 = 0;\n            if (match$1 && !(match$1.hd[0].txt === \"ns.namedArgLoc\" && !match$1.tl)) {\n              exit = 1;\n            } else {\n              exit$1 = 2;\n            }\n            if (exit$1 === 2) {\n              if (lbl$1 === stringLoc._0.txt) {\n                labelWithPattern = Res_doc.concat({\n                      hd: Res_doc.text(\"~\"),\n                      tl: {\n                        hd: printIdentLike(undefined, lbl$1),\n                        tl: /* [] */0\n                      }\n                    });\n              } else {\n                exit = 1;\n              }\n            }\n            break;\n        case /* Ppat_constraint */10 :\n            var lbl$2 = lbl._0;\n            var match$2 = pattern.ppat_desc;\n            var match$3 = match$2._0.ppat_desc;\n            if (typeof match$3 === \"number\" || match$3.TAG !== /* Ppat_var */0) {\n              exit = 1;\n            } else {\n              var match$4 = pattern.ppat_attributes;\n              var exit$2 = 0;\n              if (match$4 && !(match$4.hd[0].txt === \"ns.namedArgLoc\" && !match$4.tl)) {\n                exit = 1;\n              } else {\n                exit$2 = 2;\n              }\n              if (exit$2 === 2) {\n                if (lbl$2 === match$3._0.txt) {\n                  labelWithPattern = Res_doc.concat({\n                        hd: Res_doc.text(\"~\"),\n                        tl: {\n                          hd: printIdentLike(undefined, lbl$2),\n                          tl: {\n                            hd: Res_doc.text(\": \"),\n                            tl: {\n                              hd: printTypExpr(match$2._1, cmtTbl),\n                              tl: /* [] */0\n                            }\n                          }\n                        }\n                      });\n                } else {\n                  exit = 1;\n                }\n              }\n              \n            }\n            break;\n        default:\n          exit = 1;\n      }\n    }\n  }\n  if (exit === 1) {\n    labelWithPattern = Res_doc.concat({\n          hd: Res_doc.text(\"~\"),\n          tl: {\n            hd: printIdentLike(undefined, lbl._0),\n            tl: {\n              hd: Res_doc.text(\" as \"),\n              tl: {\n                hd: printPattern(pattern, cmtTbl),\n                tl: /* [] */0\n              }\n            }\n          }\n        });\n  }\n  var optionalLabelSuffix;\n  optionalLabelSuffix = typeof lbl === \"number\" || lbl.TAG === /* Labelled */0 || defaultExpr !== undefined ? Res_doc.nil : Res_doc.text(\"=?\");\n  var doc = Res_doc.group(Res_doc.concat({\n            hd: uncurried,\n            tl: {\n              hd: attrs,\n              tl: {\n                hd: labelWithPattern,\n                tl: {\n                  hd: defaultExprDoc,\n                  tl: {\n                    hd: optionalLabelSuffix,\n                    tl: /* [] */0\n                  }\n                }\n              }\n            }\n          }));\n  var cmtLoc;\n  if (defaultExpr !== undefined) {\n    var match$5 = pattern.ppat_attributes;\n    var startPos;\n    if (match$5) {\n      var match$6 = match$5.hd[0];\n      startPos = match$6.txt === \"ns.namedArgLoc\" ? match$6.loc.loc_start : pattern.ppat_loc.loc_start;\n    } else {\n      startPos = pattern.ppat_loc.loc_start;\n    }\n    var init = pattern.ppat_loc;\n    cmtLoc = {\n      loc_start: startPos,\n      loc_end: defaultExpr.pexp_loc.loc_end,\n      loc_ghost: init.loc_ghost\n    };\n  } else {\n    var match$7 = pattern.ppat_attributes;\n    if (match$7) {\n      var match$8 = match$7.hd[0];\n      if (match$8.txt === \"ns.namedArgLoc\") {\n        var loc = match$8.loc;\n        cmtLoc = {\n          loc_start: loc.loc_start,\n          loc_end: pattern.ppat_loc.loc_end,\n          loc_ghost: loc.loc_ghost\n        };\n      } else {\n        cmtLoc = pattern.ppat_loc;\n      }\n    } else {\n      cmtLoc = pattern.ppat_loc;\n    }\n  }\n  return printComments(doc, cmtTbl, cmtLoc);\n}\n\nfunction printExpressionBlock(braces, expr, cmtTbl) {\n  var collectRows = function (_acc, _expr) {\n    while(true) {\n      var expr = _expr;\n      var acc = _acc;\n      var match = expr.pexp_desc;\n      if (typeof match !== \"number\") {\n        switch (match.TAG | 0) {\n          case /* Pexp_let */2 :\n              var expr2 = match._2;\n              var valueBindings = match._1;\n              var match$1 = List.rev(valueBindings);\n              var loc;\n              if (valueBindings && match$1) {\n                var init = valueBindings.hd.pvb_loc;\n                loc = {\n                  loc_start: init.loc_start,\n                  loc_end: match$1.hd.pvb_loc.loc_end,\n                  loc_ghost: init.loc_ghost\n                };\n              } else {\n                loc = $$Location.none;\n              }\n              var comment = getFirstLeadingComment(cmtTbl, loc);\n              var loc$1;\n              if (comment !== undefined) {\n                var cmtLoc = Res_comment.loc(comment);\n                loc$1 = {\n                  loc_start: cmtLoc.loc_start,\n                  loc_end: loc.loc_end,\n                  loc_ghost: cmtLoc.loc_ghost\n                };\n              } else {\n                loc$1 = loc;\n              }\n              var recFlag = match._0 ? Res_doc.text(\"rec \") : Res_doc.nil;\n              var letDoc = printValueBindings(recFlag, valueBindings, cmtTbl);\n              var match$2 = expr2.pexp_desc;\n              var exit = 0;\n              if (typeof match$2 === \"number\" || match$2.TAG !== /* Pexp_construct */9) {\n                exit = 2;\n              } else {\n                var match$3 = match$2._0.txt;\n                switch (match$3.TAG | 0) {\n                  case /* Lident */0 :\n                      if (match$3._0 === \"()\") {\n                        return List.rev({\n                                    hd: [\n                                      loc$1,\n                                      letDoc\n                                    ],\n                                    tl: acc\n                                  });\n                      }\n                      exit = 2;\n                      break;\n                  case /* Ldot */1 :\n                  case /* Lapply */2 :\n                      exit = 2;\n                      break;\n                  \n                }\n              }\n              if (exit === 2) {\n                _expr = expr2;\n                _acc = {\n                  hd: [\n                    loc$1,\n                    letDoc\n                  ],\n                  tl: acc\n                };\n                continue ;\n              }\n              break;\n          case /* Pexp_sequence */16 :\n              var expr1 = match._0;\n              var doc = printExpression(expr1, cmtTbl);\n              var braces = Res_parens.expr(expr1);\n              var exprDoc = typeof braces === \"number\" ? (\n                  braces !== 0 ? doc : addParens(doc)\n                ) : printBraces(doc, expr1, braces._0);\n              var loc$2 = expr1.pexp_loc;\n              _expr = match._1;\n              _acc = {\n                hd: [\n                  loc$2,\n                  exprDoc\n                ],\n                tl: acc\n              };\n              continue ;\n          case /* Pexp_letmodule */25 :\n              var modExpr = match._1;\n              var modName = match._0;\n              var doc$1 = Res_doc.text(modName.txt);\n              var name = printComments(doc$1, cmtTbl, modName.loc);\n              var letModuleDoc = Res_doc.concat({\n                    hd: Res_doc.text(\"module \"),\n                    tl: {\n                      hd: name,\n                      tl: {\n                        hd: Res_doc.text(\" = \"),\n                        tl: {\n                          hd: printModExpr(modExpr, cmtTbl),\n                          tl: /* [] */0\n                        }\n                      }\n                    }\n                  });\n              var init$1 = expr.pexp_loc;\n              var loc_loc_start = init$1.loc_start;\n              var loc_loc_end = modExpr.pmod_loc.loc_end;\n              var loc_loc_ghost = init$1.loc_ghost;\n              var loc$3 = {\n                loc_start: loc_loc_start,\n                loc_end: loc_loc_end,\n                loc_ghost: loc_loc_ghost\n              };\n              _expr = match._2;\n              _acc = {\n                hd: [\n                  loc$3,\n                  letModuleDoc\n                ],\n                tl: acc\n              };\n              continue ;\n          case /* Pexp_letexception */26 :\n              var extensionConstructor = match._0;\n              var init$2 = expr.pexp_loc;\n              var loc_loc_start$1 = init$2.loc_start;\n              var loc_loc_end$1 = extensionConstructor.pext_loc.loc_end;\n              var loc_loc_ghost$1 = init$2.loc_ghost;\n              var loc$4 = {\n                loc_start: loc_loc_start$1,\n                loc_end: loc_loc_end$1,\n                loc_ghost: loc_loc_ghost$1\n              };\n              var comment$1 = getFirstLeadingComment(cmtTbl, loc$4);\n              var loc$5;\n              if (comment$1 !== undefined) {\n                var cmtLoc$1 = Res_comment.loc(comment$1);\n                loc$5 = {\n                  loc_start: cmtLoc$1.loc_start,\n                  loc_end: loc_loc_end$1,\n                  loc_ghost: cmtLoc$1.loc_ghost\n                };\n              } else {\n                loc$5 = loc$4;\n              }\n              var letExceptionDoc = printExceptionDef(extensionConstructor, cmtTbl);\n              _expr = match._1;\n              _acc = {\n                hd: [\n                  loc$5,\n                  letExceptionDoc\n                ],\n                tl: acc\n              };\n              continue ;\n          case /* Pexp_open */33 :\n              var longidentLoc = match._1;\n              var openDoc = Res_doc.concat({\n                    hd: Res_doc.text(\"open\"),\n                    tl: {\n                      hd: printOverrideFlag(match._0),\n                      tl: {\n                        hd: Res_doc.space,\n                        tl: {\n                          hd: printLongidentLocation(longidentLoc, cmtTbl),\n                          tl: /* [] */0\n                        }\n                      }\n                    }\n                  });\n              var init$3 = expr.pexp_loc;\n              var loc_loc_start$2 = init$3.loc_start;\n              var loc_loc_end$2 = longidentLoc.loc.loc_end;\n              var loc_loc_ghost$2 = init$3.loc_ghost;\n              var loc$6 = {\n                loc_start: loc_loc_start$2,\n                loc_end: loc_loc_end$2,\n                loc_ghost: loc_loc_ghost$2\n              };\n              _expr = match._2;\n              _acc = {\n                hd: [\n                  loc$6,\n                  openDoc\n                ],\n                tl: acc\n              };\n              continue ;\n          default:\n            \n        }\n      }\n      var doc$2 = printExpression(expr, cmtTbl);\n      var braces$1 = Res_parens.expr(expr);\n      var exprDoc$1 = typeof braces$1 === \"number\" ? (\n          braces$1 !== 0 ? doc$2 : addParens(doc$2)\n        ) : printBraces(doc$2, expr, braces$1._0);\n      return List.rev({\n                  hd: [\n                    expr.pexp_loc,\n                    exprDoc$1\n                  ],\n                  tl: acc\n                });\n    };\n  };\n  var rows = collectRows(/* [] */0, expr);\n  var block = printList((function (prim) {\n          return prim[0];\n        }), rows, (function (param, param$1) {\n          return param[1];\n        }), true, cmtTbl);\n  return Res_doc.breakableGroup(true, braces ? Res_doc.concat({\n                    hd: Res_doc.lbrace,\n                    tl: {\n                      hd: Res_doc.indent(Res_doc.concat({\n                                hd: Res_doc.line,\n                                tl: {\n                                  hd: block,\n                                  tl: /* [] */0\n                                }\n                              })),\n                      tl: {\n                        hd: Res_doc.line,\n                        tl: {\n                          hd: Res_doc.rbrace,\n                          tl: /* [] */0\n                        }\n                      }\n                    }\n                  }) : block);\n}\n\nfunction printBraces(doc, expr, bracesLoc) {\n  var overMultipleLines = bracesLoc.loc_end.pos_lnum > bracesLoc.loc_start.pos_lnum;\n  var match = expr.pexp_desc;\n  if (typeof match !== \"number\") {\n    switch (match.TAG | 0) {\n      case /* Pexp_let */2 :\n      case /* Pexp_sequence */16 :\n      case /* Pexp_letmodule */25 :\n      case /* Pexp_letexception */26 :\n      case /* Pexp_open */33 :\n          return doc;\n      default:\n        \n    }\n  }\n  return Res_doc.breakableGroup(overMultipleLines, Res_doc.concat({\n                  hd: Res_doc.lbrace,\n                  tl: {\n                    hd: Res_doc.indent(Res_doc.concat({\n                              hd: Res_doc.softLine,\n                              tl: {\n                                hd: Res_parens.bracedExpr(expr) ? addParens(doc) : doc,\n                                tl: /* [] */0\n                              }\n                            })),\n                    tl: {\n                      hd: Res_doc.softLine,\n                      tl: {\n                        hd: Res_doc.rbrace,\n                        tl: /* [] */0\n                      }\n                    }\n                  }\n                }));\n}\n\nfunction printOverrideFlag(overrideFlag) {\n  if (overrideFlag) {\n    return Res_doc.nil;\n  } else {\n    return Res_doc.text(\"!\");\n  }\n}\n\nfunction printDirectionFlag(flag) {\n  if (flag) {\n    return Res_doc.text(\" downto \");\n  } else {\n    return Res_doc.text(\" to \");\n  }\n}\n\nfunction printRecordRow(param, cmtTbl, punningAllowed) {\n  var expr = param[1];\n  var lbl = param[0];\n  var init = lbl.loc;\n  var cmtLoc_loc_start = init.loc_start;\n  var cmtLoc_loc_end = expr.pexp_loc.loc_end;\n  var cmtLoc_loc_ghost = init.loc_ghost;\n  var cmtLoc = {\n    loc_start: cmtLoc_loc_start,\n    loc_end: cmtLoc_loc_end,\n    loc_ghost: cmtLoc_loc_ghost\n  };\n  var match = expr.pexp_desc;\n  var tmp;\n  var exit = 0;\n  if (typeof match === \"number\" || match.TAG !== /* Pexp_ident */0) {\n    exit = 1;\n  } else {\n    var match$1 = match._0;\n    var key = match$1.txt;\n    switch (key.TAG | 0) {\n      case /* Lident */0 :\n          if (punningAllowed && Longident.last(lbl.txt) === key._0 && lbl.loc.loc_start.pos_cnum === match$1.loc.loc_start.pos_cnum) {\n            tmp = printLidentPath(lbl, cmtTbl);\n          } else {\n            exit = 1;\n          }\n          break;\n      case /* Ldot */1 :\n      case /* Lapply */2 :\n          exit = 1;\n          break;\n      \n    }\n  }\n  if (exit === 1) {\n    var doc = printExpressionWithComments(expr, cmtTbl);\n    var braces = Res_parens.expr(expr);\n    tmp = Res_doc.concat({\n          hd: printLidentPath(lbl, cmtTbl),\n          tl: {\n            hd: Res_doc.text(\": \"),\n            tl: {\n              hd: typeof braces === \"number\" ? (\n                  braces !== 0 ? doc : addParens(doc)\n                ) : printBraces(doc, expr, braces._0),\n              tl: /* [] */0\n            }\n          }\n        });\n  }\n  var doc$1 = Res_doc.group(tmp);\n  return printComments(doc$1, cmtTbl, cmtLoc);\n}\n\nfunction printBsObjectRow(param, cmtTbl) {\n  var expr = param[1];\n  var lbl = param[0];\n  var init = lbl.loc;\n  var cmtLoc_loc_start = init.loc_start;\n  var cmtLoc_loc_end = expr.pexp_loc.loc_end;\n  var cmtLoc_loc_ghost = init.loc_ghost;\n  var cmtLoc = {\n    loc_start: cmtLoc_loc_start,\n    loc_end: cmtLoc_loc_end,\n    loc_ghost: cmtLoc_loc_ghost\n  };\n  var doc = Res_doc.concat({\n        hd: Res_doc.text(\"\\\"\"),\n        tl: {\n          hd: printLongident(lbl.txt),\n          tl: {\n            hd: Res_doc.text(\"\\\"\"),\n            tl: /* [] */0\n          }\n        }\n      });\n  var lblDoc = printComments(doc, cmtTbl, lbl.loc);\n  var doc$1 = printExpressionWithComments(expr, cmtTbl);\n  var braces = Res_parens.expr(expr);\n  var doc$2 = Res_doc.concat({\n        hd: lblDoc,\n        tl: {\n          hd: Res_doc.text(\": \"),\n          tl: {\n            hd: typeof braces === \"number\" ? (\n                braces !== 0 ? doc$1 : addParens(doc$1)\n              ) : printBraces(doc$1, expr, braces._0),\n            tl: /* [] */0\n          }\n        }\n      });\n  return printComments(doc$2, cmtTbl, cmtLoc);\n}\n\nfunction printAttributes(loc, inlineOpt, attrs, cmtTbl) {\n  var inline = inlineOpt !== undefined ? inlineOpt : false;\n  var attrs$1 = Res_parsetree_viewer.filterParsingAttrs(attrs);\n  if (!attrs$1) {\n    return Res_doc.nil;\n  }\n  var lineBreak;\n  if (loc !== undefined) {\n    var match = List.rev(attrs$1);\n    lineBreak = match && loc.loc_start.pos_lnum > match.hd[0].loc.loc_end.pos_lnum ? Res_doc.hardLine : Res_doc.line;\n  } else {\n    lineBreak = Res_doc.line;\n  }\n  return Res_doc.concat({\n              hd: Res_doc.group(Res_doc.join(Res_doc.line, List.map((function (attr) {\n                              return printAttribute(attr, cmtTbl);\n                            }), attrs$1))),\n              tl: {\n                hd: inline ? Res_doc.space : lineBreak,\n                tl: /* [] */0\n              }\n            });\n}\n\nfunction printPayload(payload, cmtTbl) {\n  switch (payload.TAG | 0) {\n    case /* PStr */0 :\n        var structure = payload._0;\n        if (!structure) {\n          return Res_doc.nil;\n        }\n        var si = structure.hd;\n        var match = si.pstr_desc;\n        switch (match.TAG | 0) {\n          case /* Pstr_eval */0 :\n              if (structure.tl) {\n                return addParens(printStructure(structure, cmtTbl));\n              }\n              var attrs = match._1;\n              var expr = match._0;\n              var exprDoc = printExpressionWithComments(expr, cmtTbl);\n              var needsParens = attrs ? true : false;\n              var shouldHug = Res_parsetree_viewer.isHuggableExpression(expr);\n              if (shouldHug) {\n                return Res_doc.concat({\n                            hd: Res_doc.lparen,\n                            tl: {\n                              hd: printAttributes(undefined, undefined, attrs, cmtTbl),\n                              tl: {\n                                hd: needsParens ? addParens(exprDoc) : exprDoc,\n                                tl: {\n                                  hd: Res_doc.rparen,\n                                  tl: /* [] */0\n                                }\n                              }\n                            }\n                          });\n              } else {\n                return Res_doc.concat({\n                            hd: Res_doc.lparen,\n                            tl: {\n                              hd: Res_doc.indent(Res_doc.concat({\n                                        hd: Res_doc.softLine,\n                                        tl: {\n                                          hd: printAttributes(undefined, undefined, attrs, cmtTbl),\n                                          tl: {\n                                            hd: needsParens ? addParens(exprDoc) : exprDoc,\n                                            tl: /* [] */0\n                                          }\n                                        }\n                                      })),\n                              tl: {\n                                hd: Res_doc.softLine,\n                                tl: {\n                                  hd: Res_doc.rparen,\n                                  tl: /* [] */0\n                                }\n                              }\n                            }\n                          });\n              }\n          case /* Pstr_value */1 :\n              if (structure.tl) {\n                return addParens(printStructure(structure, cmtTbl));\n              } else {\n                return addParens(printStructureItem(si, cmtTbl));\n              }\n          default:\n            return addParens(printStructure(structure, cmtTbl));\n        }\n    case /* PSig */1 :\n        return Res_doc.concat({\n                    hd: Res_doc.lparen,\n                    tl: {\n                      hd: Res_doc.text(\":\"),\n                      tl: {\n                        hd: Res_doc.indent(Res_doc.concat({\n                                  hd: Res_doc.line,\n                                  tl: {\n                                    hd: printSignature(payload._0, cmtTbl),\n                                    tl: /* [] */0\n                                  }\n                                })),\n                        tl: {\n                          hd: Res_doc.softLine,\n                          tl: {\n                            hd: Res_doc.rparen,\n                            tl: /* [] */0\n                          }\n                        }\n                      }\n                    }\n                  });\n    case /* PTyp */2 :\n        return Res_doc.concat({\n                    hd: Res_doc.lparen,\n                    tl: {\n                      hd: Res_doc.text(\":\"),\n                      tl: {\n                        hd: Res_doc.indent(Res_doc.concat({\n                                  hd: Res_doc.line,\n                                  tl: {\n                                    hd: printTypExpr(payload._0, cmtTbl),\n                                    tl: /* [] */0\n                                  }\n                                })),\n                        tl: {\n                          hd: Res_doc.softLine,\n                          tl: {\n                            hd: Res_doc.rparen,\n                            tl: /* [] */0\n                          }\n                        }\n                      }\n                    }\n                  });\n    case /* PPat */3 :\n        var optExpr = payload._1;\n        var whenDoc = optExpr !== undefined ? Res_doc.concat({\n                hd: Res_doc.line,\n                tl: {\n                  hd: Res_doc.text(\"if \"),\n                  tl: {\n                    hd: printExpressionWithComments(optExpr, cmtTbl),\n                    tl: /* [] */0\n                  }\n                }\n              }) : Res_doc.nil;\n        return Res_doc.concat({\n                    hd: Res_doc.lparen,\n                    tl: {\n                      hd: Res_doc.indent(Res_doc.concat({\n                                hd: Res_doc.softLine,\n                                tl: {\n                                  hd: Res_doc.text(\"? \"),\n                                  tl: {\n                                    hd: printPattern(payload._0, cmtTbl),\n                                    tl: {\n                                      hd: whenDoc,\n                                      tl: /* [] */0\n                                    }\n                                  }\n                                }\n                              })),\n                      tl: {\n                        hd: Res_doc.softLine,\n                        tl: {\n                          hd: Res_doc.rparen,\n                          tl: /* [] */0\n                        }\n                      }\n                    }\n                  });\n    \n  }\n}\n\nfunction printAttribute(param, cmtTbl) {\n  return Res_doc.group(Res_doc.concat({\n                  hd: Res_doc.text(\"@\"),\n                  tl: {\n                    hd: Res_doc.text(convertBsExternalAttribute(param[0].txt)),\n                    tl: {\n                      hd: printPayload(param[1], cmtTbl),\n                      tl: /* [] */0\n                    }\n                  }\n                }));\n}\n\nfunction printModExpr(modExpr, cmtTbl) {\n  var longidentLoc = modExpr.pmod_desc;\n  var doc;\n  switch (longidentLoc.TAG | 0) {\n    case /* Pmod_ident */0 :\n        doc = printLongidentLocation(longidentLoc._0, cmtTbl);\n        break;\n    case /* Pmod_structure */1 :\n        var structure = longidentLoc._0;\n        if (structure) {\n          doc = Res_doc.breakableGroup(true, Res_doc.concat({\n                    hd: Res_doc.lbrace,\n                    tl: {\n                      hd: Res_doc.indent(Res_doc.concat({\n                                hd: Res_doc.softLine,\n                                tl: {\n                                  hd: printStructure(structure, cmtTbl),\n                                  tl: /* [] */0\n                                }\n                              })),\n                      tl: {\n                        hd: Res_doc.softLine,\n                        tl: {\n                          hd: Res_doc.rbrace,\n                          tl: /* [] */0\n                        }\n                      }\n                    }\n                  }));\n        } else {\n          var shouldBreak = modExpr.pmod_loc.loc_start.pos_lnum < modExpr.pmod_loc.loc_end.pos_lnum;\n          doc = Res_doc.breakableGroup(shouldBreak, Res_doc.concat({\n                    hd: Res_doc.lbrace,\n                    tl: {\n                      hd: Res_doc.indent(Res_doc.concat({\n                                hd: Res_doc.softLine,\n                                tl: {\n                                  hd: printCommentsInside(cmtTbl, modExpr.pmod_loc),\n                                  tl: /* [] */0\n                                }\n                              })),\n                      tl: {\n                        hd: Res_doc.softLine,\n                        tl: {\n                          hd: Res_doc.rbrace,\n                          tl: /* [] */0\n                        }\n                      }\n                    }\n                  }));\n        }\n        break;\n    case /* Pmod_functor */2 :\n        doc = printModFunctor(modExpr, cmtTbl);\n        break;\n    case /* Pmod_apply */3 :\n        var match = Res_parsetree_viewer.modExprApply(modExpr);\n        var args = match[0];\n        var isUnitSugar;\n        if (args) {\n          var match$1 = args.hd.pmod_desc;\n          isUnitSugar = match$1.TAG === /* Pmod_structure */1 && !(match$1._0 || args.tl) ? true : false;\n        } else {\n          isUnitSugar = false;\n        }\n        var shouldHug = args && args.hd.pmod_desc.TAG === /* Pmod_structure */1 && !args.tl ? true : false;\n        doc = Res_doc.group(Res_doc.concat({\n                  hd: printModExpr(match[1], cmtTbl),\n                  tl: {\n                    hd: isUnitSugar ? printModApplyArg(List.hd(args), cmtTbl) : Res_doc.concat({\n                            hd: Res_doc.lparen,\n                            tl: {\n                              hd: shouldHug ? printModApplyArg(List.hd(args), cmtTbl) : Res_doc.indent(Res_doc.concat({\n                                          hd: Res_doc.softLine,\n                                          tl: {\n                                            hd: Res_doc.join(Res_doc.concat({\n                                                      hd: Res_doc.comma,\n                                                      tl: {\n                                                        hd: Res_doc.line,\n                                                        tl: /* [] */0\n                                                      }\n                                                    }), List.map((function (modArg) {\n                                                        return printModApplyArg(modArg, cmtTbl);\n                                                      }), args)),\n                                            tl: /* [] */0\n                                          }\n                                        })),\n                              tl: {\n                                hd: shouldHug ? Res_doc.nil : Res_doc.concat({\n                                        hd: Res_doc.trailingComma,\n                                        tl: {\n                                          hd: Res_doc.softLine,\n                                          tl: /* [] */0\n                                        }\n                                      }),\n                                tl: {\n                                  hd: Res_doc.rparen,\n                                  tl: /* [] */0\n                                }\n                              }\n                            }\n                          }),\n                    tl: /* [] */0\n                  }\n                }));\n        break;\n    case /* Pmod_constraint */4 :\n        doc = Res_doc.concat({\n              hd: printModExpr(longidentLoc._0, cmtTbl),\n              tl: {\n                hd: Res_doc.text(\": \"),\n                tl: {\n                  hd: printModType(longidentLoc._1, cmtTbl),\n                  tl: /* [] */0\n                }\n              }\n            });\n        break;\n    case /* Pmod_unpack */5 :\n        var expr = longidentLoc._0;\n        var match$2 = expr.pexp_desc;\n        var shouldHug$1;\n        if (typeof match$2 === \"number\") {\n          shouldHug$1 = false;\n        } else {\n          switch (match$2.TAG | 0) {\n            case /* Pexp_let */2 :\n                shouldHug$1 = true;\n                break;\n            case /* Pexp_constraint */19 :\n                var tmp = match$2._0.pexp_desc;\n                if (typeof tmp === \"number\" || tmp.TAG !== /* Pexp_let */2) {\n                  shouldHug$1 = false;\n                } else {\n                  var tmp$1 = match$2._1.ptyp_desc;\n                  shouldHug$1 = typeof tmp$1 === \"number\" || tmp$1.TAG !== /* Ptyp_package */9 ? false : true;\n                }\n                break;\n            default:\n              shouldHug$1 = false;\n          }\n        }\n        var match$3 = expr.pexp_desc;\n        var match$4;\n        if (typeof match$3 === \"number\" || match$3.TAG !== /* Pexp_constraint */19) {\n          match$4 = [\n            expr,\n            Res_doc.nil\n          ];\n        } else {\n          var match$5 = match$3._1;\n          var packageType = match$5.ptyp_desc;\n          if (typeof packageType === \"number\" || packageType.TAG !== /* Ptyp_package */9) {\n            match$4 = [\n              expr,\n              Res_doc.nil\n            ];\n          } else {\n            var doc$1 = printPackageType(false, packageType._0, cmtTbl);\n            var packageDoc = printComments(doc$1, cmtTbl, match$5.ptyp_loc);\n            var typeDoc = Res_doc.group(Res_doc.concat({\n                      hd: Res_doc.text(\":\"),\n                      tl: {\n                        hd: Res_doc.indent(Res_doc.concat({\n                                  hd: Res_doc.line,\n                                  tl: {\n                                    hd: packageDoc,\n                                    tl: /* [] */0\n                                  }\n                                })),\n                        tl: /* [] */0\n                      }\n                    }));\n            match$4 = [\n              match$3._0,\n              typeDoc\n            ];\n          }\n        }\n        var unpackDoc = Res_doc.group(Res_doc.concat({\n                  hd: printExpressionWithComments(match$4[0], cmtTbl),\n                  tl: {\n                    hd: match$4[1],\n                    tl: /* [] */0\n                  }\n                }));\n        doc = Res_doc.group(Res_doc.concat({\n                  hd: Res_doc.text(\"unpack(\"),\n                  tl: {\n                    hd: shouldHug$1 ? unpackDoc : Res_doc.concat({\n                            hd: Res_doc.indent(Res_doc.concat({\n                                      hd: Res_doc.softLine,\n                                      tl: {\n                                        hd: unpackDoc,\n                                        tl: /* [] */0\n                                      }\n                                    })),\n                            tl: {\n                              hd: Res_doc.softLine,\n                              tl: /* [] */0\n                            }\n                          }),\n                    tl: {\n                      hd: Res_doc.rparen,\n                      tl: /* [] */0\n                    }\n                  }\n                }));\n        break;\n    case /* Pmod_extension */6 :\n        doc = printExtension(false, longidentLoc._0, cmtTbl);\n        break;\n    \n  }\n  return printComments(doc, cmtTbl, modExpr.pmod_loc);\n}\n\nfunction printModFunctor(modExpr, cmtTbl) {\n  var match = Res_parsetree_viewer.modExprFunctor(modExpr);\n  var returnModExpr = match[1];\n  var parameters = match[0];\n  var match$1 = returnModExpr.pmod_desc;\n  var match$2;\n  if (match$1.TAG === /* Pmod_constraint */4) {\n    var modType = match$1._1;\n    var doc = printModType(modType, cmtTbl);\n    var constraintDoc = Res_parens.modExprFunctorConstraint(modType) ? addParens(doc) : doc;\n    var modConstraint = Res_doc.concat({\n          hd: Res_doc.text(\": \"),\n          tl: {\n            hd: constraintDoc,\n            tl: /* [] */0\n          }\n        });\n    match$2 = [\n      modConstraint,\n      printModExpr(match$1._0, cmtTbl)\n    ];\n  } else {\n    match$2 = [\n      Res_doc.nil,\n      printModExpr(returnModExpr, cmtTbl)\n    ];\n  }\n  var parametersDoc;\n  var exit = 0;\n  if (parameters) {\n    var match$3 = parameters.hd;\n    var attrs = match$3[0];\n    if (match$3[1].txt === \"*\") {\n      if (match$3[2] !== undefined || parameters.tl) {\n        exit = 1;\n      } else {\n        parametersDoc = Res_doc.group(Res_doc.concat({\n                  hd: printAttributes(undefined, undefined, attrs, cmtTbl),\n                  tl: {\n                    hd: Res_doc.text(\"()\"),\n                    tl: /* [] */0\n                  }\n                }));\n      }\n    } else if (attrs || match$3[2] !== undefined || parameters.tl) {\n      exit = 1;\n    } else {\n      parametersDoc = Res_doc.text(match$3[1].txt);\n    }\n  } else {\n    exit = 1;\n  }\n  if (exit === 1) {\n    parametersDoc = Res_doc.group(Res_doc.concat({\n              hd: Res_doc.lparen,\n              tl: {\n                hd: Res_doc.indent(Res_doc.concat({\n                          hd: Res_doc.softLine,\n                          tl: {\n                            hd: Res_doc.join(Res_doc.concat({\n                                      hd: Res_doc.comma,\n                                      tl: {\n                                        hd: Res_doc.line,\n                                        tl: /* [] */0\n                                      }\n                                    }), List.map((function (param) {\n                                        return printModFunctorParam(param, cmtTbl);\n                                      }), parameters)),\n                            tl: /* [] */0\n                          }\n                        })),\n                tl: {\n                  hd: Res_doc.trailingComma,\n                  tl: {\n                    hd: Res_doc.softLine,\n                    tl: {\n                      hd: Res_doc.rparen,\n                      tl: /* [] */0\n                    }\n                  }\n                }\n              }\n            }));\n  }\n  return Res_doc.group(Res_doc.concat({\n                  hd: parametersDoc,\n                  tl: {\n                    hd: match$2[0],\n                    tl: {\n                      hd: Res_doc.text(\" => \"),\n                      tl: {\n                        hd: match$2[1],\n                        tl: /* [] */0\n                      }\n                    }\n                  }\n                }));\n}\n\nfunction printModFunctorParam(param, cmtTbl) {\n  var optModType = param[2];\n  var lbl = param[1];\n  var cmtLoc;\n  if (optModType !== undefined) {\n    var init = lbl.loc;\n    cmtLoc = {\n      loc_start: init.loc_start,\n      loc_end: optModType.pmty_loc.loc_end,\n      loc_ghost: init.loc_ghost\n    };\n  } else {\n    cmtLoc = lbl.loc;\n  }\n  var attrs = printAttributes(undefined, undefined, param[0], cmtTbl);\n  var doc = lbl.txt === \"*\" ? Res_doc.text(\"()\") : Res_doc.text(lbl.txt);\n  var lblDoc = printComments(doc, cmtTbl, lbl.loc);\n  var doc$1 = Res_doc.group(Res_doc.concat({\n            hd: attrs,\n            tl: {\n              hd: lblDoc,\n              tl: {\n                hd: optModType !== undefined ? Res_doc.concat({\n                        hd: Res_doc.text(\": \"),\n                        tl: {\n                          hd: printModType(optModType, cmtTbl),\n                          tl: /* [] */0\n                        }\n                      }) : Res_doc.nil,\n                tl: /* [] */0\n              }\n            }\n          }));\n  return printComments(doc$1, cmtTbl, cmtLoc);\n}\n\nfunction printModApplyArg(modExpr, cmtTbl) {\n  var match = modExpr.pmod_desc;\n  if (match.TAG === /* Pmod_structure */1 && !match._0) {\n    return Res_doc.text(\"()\");\n  } else {\n    return printModExpr(modExpr, cmtTbl);\n  }\n}\n\nfunction printExceptionDef(constr, cmtTbl) {\n  var longident = constr.pext_kind;\n  var kind;\n  if (longident.TAG === /* Pext_decl */0) {\n    var args = longident._0;\n    var exit = 0;\n    if (args.TAG === /* Pcstr_tuple */0 && !(args._0 || longident._1 !== undefined)) {\n      kind = Res_doc.nil;\n    } else {\n      exit = 1;\n    }\n    if (exit === 1) {\n      var gadt = longident._1;\n      var gadtDoc = gadt !== undefined ? Res_doc.concat({\n              hd: Res_doc.text(\": \"),\n              tl: {\n                hd: printTypExpr(gadt, cmtTbl),\n                tl: /* [] */0\n              }\n            }) : Res_doc.nil;\n      kind = Res_doc.concat({\n            hd: printConstructorArguments(false, args, cmtTbl),\n            tl: {\n              hd: gadtDoc,\n              tl: /* [] */0\n            }\n          });\n    }\n    \n  } else {\n    kind = Res_doc.indent(Res_doc.concat({\n              hd: Res_doc.text(\" =\"),\n              tl: {\n                hd: Res_doc.line,\n                tl: {\n                  hd: printLongidentLocation(longident._0, cmtTbl),\n                  tl: /* [] */0\n                }\n              }\n            }));\n  }\n  var name = printComments(Res_doc.text(constr.pext_name.txt), cmtTbl, constr.pext_name.loc);\n  var doc = Res_doc.group(Res_doc.concat({\n            hd: printAttributes(undefined, undefined, constr.pext_attributes, cmtTbl),\n            tl: {\n              hd: Res_doc.text(\"exception \"),\n              tl: {\n                hd: name,\n                tl: {\n                  hd: kind,\n                  tl: /* [] */0\n                }\n              }\n            }\n          }));\n  return printComments(doc, cmtTbl, constr.pext_loc);\n}\n\nfunction printExtensionConstructor(constr, cmtTbl, i) {\n  var attrs = printAttributes(undefined, undefined, constr.pext_attributes, cmtTbl);\n  var bar = i > 0 ? Res_doc.text(\"| \") : Res_doc.ifBreaks(Res_doc.text(\"| \"), Res_doc.nil);\n  var longident = constr.pext_kind;\n  var kind;\n  if (longident.TAG === /* Pext_decl */0) {\n    var args = longident._0;\n    var exit = 0;\n    if (args.TAG === /* Pcstr_tuple */0 && !(args._0 || longident._1 !== undefined)) {\n      kind = Res_doc.nil;\n    } else {\n      exit = 1;\n    }\n    if (exit === 1) {\n      var gadt = longident._1;\n      var gadtDoc = gadt !== undefined ? Res_doc.concat({\n              hd: Res_doc.text(\": \"),\n              tl: {\n                hd: printTypExpr(gadt, cmtTbl),\n                tl: /* [] */0\n              }\n            }) : Res_doc.nil;\n      kind = Res_doc.concat({\n            hd: printConstructorArguments(false, args, cmtTbl),\n            tl: {\n              hd: gadtDoc,\n              tl: /* [] */0\n            }\n          });\n    }\n    \n  } else {\n    kind = Res_doc.indent(Res_doc.concat({\n              hd: Res_doc.text(\" =\"),\n              tl: {\n                hd: Res_doc.line,\n                tl: {\n                  hd: printLongidentLocation(longident._0, cmtTbl),\n                  tl: /* [] */0\n                }\n              }\n            }));\n  }\n  var name = printComments(Res_doc.text(constr.pext_name.txt), cmtTbl, constr.pext_name.loc);\n  return Res_doc.concat({\n              hd: bar,\n              tl: {\n                hd: Res_doc.group(Res_doc.concat({\n                          hd: attrs,\n                          tl: {\n                            hd: name,\n                            tl: {\n                              hd: kind,\n                              tl: /* [] */0\n                            }\n                          }\n                        })),\n                tl: /* [] */0\n              }\n            });\n}\n\nfunction printImplementation(width, s, comments) {\n  var cmtTbl = Res_comments_table.make(undefined);\n  Res_comments_table.walkStructure(s, cmtTbl, comments);\n  var doc = printStructure(s, cmtTbl);\n  return Res_doc.toString(width, doc) + \"\\n\";\n}\n\nfunction printInterface(width, s, comments) {\n  var cmtTbl = Res_comments_table.make(undefined);\n  Res_comments_table.walkSignature(s, cmtTbl, comments);\n  return Res_doc.toString(width, printSignature(s, cmtTbl)) + \"\\n\";\n}\n\nvar Doc;\n\nvar CommentTable;\n\nvar $$Comment;\n\nvar Token;\n\nvar Parens;\n\nvar ParsetreeViewer;\n\nexport {\n  Doc ,\n  CommentTable ,\n  $$Comment ,\n  Token ,\n  Parens ,\n  ParsetreeViewer ,\n  convertBsExternalAttribute ,\n  convertBsExtension ,\n  addParens ,\n  addBraces ,\n  getFirstLeadingComment ,\n  hasLeadingLineComment ,\n  hasCommentBelow ,\n  printMultilineCommentContent ,\n  printTrailingComment ,\n  printLeadingComment ,\n  printCommentsInside ,\n  printLeadingComments ,\n  printTrailingComments ,\n  printComments ,\n  printList ,\n  printListi ,\n  printLongidentAux ,\n  printLongident ,\n  classifyIdentContent ,\n  printIdentLike ,\n  unsafe_for_all_range ,\n  for_all_from ,\n  isValidNumericPolyvarNumber ,\n  printPolyVarIdent ,\n  printLident ,\n  printLongidentLocation ,\n  printLidentPath ,\n  printIdentPath ,\n  printStringLoc ,\n  printStringContents ,\n  printConstant ,\n  printStructure ,\n  printStructureItem ,\n  printTypeExtension ,\n  printModuleBinding ,\n  printModuleTypeDeclaration ,\n  printModType ,\n  printWithConstraints ,\n  printWithConstraint ,\n  printSignature ,\n  printSignatureItem ,\n  printRecModuleDeclarations ,\n  printRecModuleDeclaration ,\n  printModuleDeclaration ,\n  printOpenDescription ,\n  printIncludeDescription ,\n  printIncludeDeclaration ,\n  printValueBindings ,\n  printValueDescription ,\n  printTypeDeclarations ,\n  printTypeDeclaration ,\n  printTypeDeclaration2 ,\n  printTypeDefinitionConstraints ,\n  printTypeDefinitionConstraint ,\n  printPrivateFlag ,\n  printTypeParams ,\n  printTypeParam ,\n  printRecordDeclaration ,\n  printConstructorDeclarations ,\n  printConstructorDeclaration2 ,\n  printConstructorArguments ,\n  printLabelDeclaration ,\n  printTypExpr ,\n  printObject ,\n  printTupleType ,\n  printObjectField ,\n  printTypeParameter ,\n  printValueBinding ,\n  printPackageType ,\n  printPackageConstraints ,\n  printPackageConstraint ,\n  printExtension ,\n  printPattern ,\n  printPatternRecordRow ,\n  printExpressionWithComments ,\n  printIfChain ,\n  printExpression ,\n  printPexpFun ,\n  printTernaryOperand ,\n  printSetFieldExpr ,\n  printTemplateLiteral ,\n  printUnaryExpression ,\n  printBinaryExpression ,\n  printPexpApply ,\n  printJsxExpression ,\n  printJsxFragment ,\n  printJsxChildren ,\n  printJsxProps ,\n  printJsxProp ,\n  printJsxName ,\n  printArgumentsWithCallbackInFirstPosition ,\n  printArgumentsWithCallbackInLastPosition ,\n  printArguments ,\n  printArgument ,\n  printCases ,\n  printCase ,\n  printExprFunParameters ,\n  printExpFunParameter ,\n  printExpressionBlock ,\n  printBraces ,\n  printOverrideFlag ,\n  printDirectionFlag ,\n  printRecordRow ,\n  printBsObjectRow ,\n  printAttributes ,\n  printPayload ,\n  printAttribute ,\n  printModExpr ,\n  printModFunctor ,\n  printModFunctorParam ,\n  printModApplyArg ,\n  printExceptionDef ,\n  printExtensionConstructor ,\n  printImplementation ,\n  printInterface ,\n  \n}\n/* Location Not a pure module */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/res_printer.res",
    "content": "module Doc = Res_doc\nmodule CommentTable = Res_comments_table\nmodule Comment = Res_comment\nmodule Token = Res_token\nmodule Parens = Res_parens\nmodule ParsetreeViewer = Res_parsetree_viewer\n\ntype callbackStyle =\n  /* regular arrow function, example: `let f = x => x + 1` */\n  | NoCallback\n  /* `Thing.map(foo, (arg1, arg2) => MyModuleBlah.toList(argument))` */\n  | FitsOnOneLine\n  /* Thing.map(longArgumet, veryLooooongArgument, (arg1, arg2) =>\n   *   MyModuleBlah.toList(argument)\n   * )\n   */\n  | ArgumentsFitOnOneLine\n\n/* Since compiler version 8.3, the bs. prefix is no longer needed */\n/* Synced from\n https://github.com/rescript-lang/rescript-compiler/blob/29174de1a5fde3b16cf05d10f5ac109cfac5c4ca/jscomp/frontend/ast_external_process.ml#L291-L367 */\nlet convertBsExternalAttribute = x =>\n  switch x {\n  | \"bs.as\" => \"as\"\n  | \"bs.deriving\" => \"deriving\"\n  | \"bs.get\" => \"get\"\n  | \"bs.get_index\" => \"get_index\"\n  | \"bs.ignore\" => \"ignore\"\n  | \"bs.inline\" => \"inline\"\n  | \"bs.int\" => \"int\"\n  | \"bs.meth\" => \"meth\"\n  | \"bs.module\" => \"module\"\n  | \"bs.new\" => \"new\"\n  | \"bs.obj\" => \"obj\"\n  | \"bs.optional\" => \"optional\"\n  | \"bs.return\" => \"return\"\n  | \"bs.send\" => \"send\"\n  | \"bs.scope\" => \"scope\"\n  | \"bs.set\" => \"set\"\n  | \"bs.set_index\" => \"set_index\"\n  | \"bs.splice\" | \"bs.variadic\" => \"variadic\"\n  | \"bs.string\" => \"string\"\n  | \"bs.this\" => \"this\"\n  | \"bs.uncurry\" => \"uncurry\"\n  | \"bs.unwrap\" => \"unwrap\"\n  | \"bs.val\" => \"val\"\n  /* bs.send.pipe shouldn't be transformed */\n  | txt => txt\n  }\n\n/* These haven't been needed for a long time now */\n/* Synced from\n https://github.com/rescript-lang/rescript-compiler/blob/29174de1a5fde3b16cf05d10f5ac109cfac5c4ca/jscomp/frontend/ast_exp_extension.ml */\nlet convertBsExtension = x =>\n  switch x {\n  | \"bs.debugger\" => \"debugger\"\n  | \"bs.external\" => \"raw\"\n  /* We should never see this one since we use the sugared object form, but still */\n  | \"bs.obj\" => \"obj\"\n  | \"bs.raw\" => \"raw\"\n  | \"bs.re\" => \"re\"\n  /* TODO: what about bs.time and bs.node? */\n  | txt => txt\n  }\n\nlet addParens = doc =>\n  Doc.group(\n    Doc.concat(list{\n      Doc.lparen,\n      Doc.indent(Doc.concat(list{Doc.softLine, doc})),\n      Doc.softLine,\n      Doc.rparen,\n    }),\n  )\n\nlet addBraces = doc =>\n  Doc.group(\n    Doc.concat(list{\n      Doc.lbrace,\n      Doc.indent(Doc.concat(list{Doc.softLine, doc})),\n      Doc.softLine,\n      Doc.rbrace,\n    }),\n  )\n\nlet getFirstLeadingComment = (tbl, loc) =>\n  switch Hashtbl.find(tbl.CommentTable.leading, loc) {\n  | list{comment, ..._} => Some(comment)\n  | list{} => None\n  | exception Not_found => None\n  }\n\n/* Checks if `loc` has a leading line comment, i.e. `// comment above` */\nlet hasLeadingLineComment = (tbl, loc) =>\n  switch getFirstLeadingComment(tbl, loc) {\n  | Some(comment) => Comment.isSingleLineComment(comment)\n  | None => false\n  }\n\nlet hasCommentBelow = (tbl, loc) =>\n  switch Hashtbl.find(tbl.CommentTable.trailing, loc) {\n  | list{comment, ..._} =>\n    let commentLoc = Comment.loc(comment)\n    commentLoc.Location.loc_start.pos_lnum > loc.Location.loc_end.pos_lnum\n  | list{} => false\n  | exception Not_found => false\n  }\n\nlet printMultilineCommentContent = txt => {\n  /* Turns\n   *         |* first line\n   *  * second line\n   *      * third line *|\n   * Into\n   * |* first line\n   *  * second line\n   *  * third line *|\n   *\n   * What makes a comment suitable for this kind of indentation?\n   *  ->  multiple lines + every line starts with a star\n   */\n  let rec indentStars = (lines, acc) =>\n    switch lines {\n    | list{} => Doc.nil\n    | list{lastLine} =>\n      let line = String.trim(lastLine)\n      let doc = Doc.text(\" \" ++ line)\n      let trailingSpace = if line == \"\" {\n        Doc.nil\n      } else {\n        Doc.space\n      }\n      List.rev(list{trailingSpace, doc, ...acc}) |> Doc.concat\n    | list{line, ...lines} =>\n      let line = String.trim(line)\n      if line !== \"\" && String.unsafe_get(line, 0) === '*' {\n        let doc = Doc.text(\" \" ++ line)\n        indentStars(lines, list{Doc.hardLine, doc, ...acc})\n      } else {\n        let trailingSpace = {\n          let len = String.length(txt)\n          if len > 0 && String.unsafe_get(txt, len - 1) == ' ' {\n            Doc.space\n          } else {\n            Doc.nil\n          }\n        }\n\n        let content = Comment.trimSpaces(txt)\n        Doc.concat(list{Doc.text(content), trailingSpace})\n      }\n    }\n\n  let lines = String.split_on_char('\\n', txt)\n  switch lines {\n  | list{} => Doc.text(\"/* */\")\n  | list{line} =>\n    Doc.concat(list{Doc.text(\"/* \"), Doc.text(Comment.trimSpaces(line)), Doc.text(\" */\")})\n  | list{first, ...rest} =>\n    let firstLine = Comment.trimSpaces(first)\n    Doc.concat(list{\n      Doc.text(\"/*\"),\n      switch firstLine {\n      | \"\" | \"*\" => Doc.nil\n      | _ => Doc.space\n      },\n      indentStars(rest, list{Doc.hardLine, Doc.text(firstLine)}),\n      Doc.text(\"*/\"),\n    })\n  }\n}\n\nlet printTrailingComment = (prevLoc: Location.t, nodeLoc: Location.t, comment) => {\n  let singleLine = Comment.isSingleLineComment(comment)\n  let content = {\n    let txt = Comment.txt(comment)\n    if singleLine {\n      Doc.text(\"//\" ++ txt)\n    } else {\n      printMultilineCommentContent(txt)\n    }\n  }\n\n  let diff = {\n    let cmtStart = Comment.loc(comment).loc_start\n    cmtStart.pos_lnum - prevLoc.loc_end.pos_lnum\n  }\n\n  let isBelow = Comment.loc(comment).loc_start.pos_lnum > nodeLoc.loc_end.pos_lnum\n  if diff > 0 || isBelow {\n    Doc.concat(list{\n      Doc.breakParent,\n      Doc.lineSuffix(\n        Doc.concat(list{\n          Doc.hardLine,\n          if diff > 1 {\n            Doc.hardLine\n          } else {\n            Doc.nil\n          },\n          content,\n        }),\n      ),\n    })\n  } else if !singleLine {\n    Doc.concat(list{Doc.space, content})\n  } else {\n    Doc.lineSuffix(Doc.concat(list{Doc.space, content}))\n  }\n}\n\nlet printLeadingComment = (~nextComment=?, comment) => {\n  let singleLine = Comment.isSingleLineComment(comment)\n  let content = {\n    let txt = Comment.txt(comment)\n    if singleLine {\n      Doc.text(\"//\" ++ txt)\n    } else {\n      printMultilineCommentContent(txt)\n    }\n  }\n\n  let separator = Doc.concat(list{\n    if singleLine {\n      Doc.concat(list{Doc.hardLine, Doc.breakParent})\n    } else {\n      Doc.nil\n    },\n    switch nextComment {\n    | Some(next) =>\n      let nextLoc = Comment.loc(next)\n      let currLoc = Comment.loc(comment)\n      let diff = nextLoc.Location.loc_start.pos_lnum - currLoc.Location.loc_end.pos_lnum\n\n      let nextSingleLine = Comment.isSingleLineComment(next)\n      if singleLine && nextSingleLine {\n        if diff > 1 {\n          Doc.hardLine\n        } else {\n          Doc.nil\n        }\n      } else if singleLine && !nextSingleLine {\n        if diff > 1 {\n          Doc.hardLine\n        } else {\n          Doc.nil\n        }\n      } else if diff > 1 {\n        Doc.concat(list{Doc.hardLine, Doc.hardLine})\n      } else if diff === 1 {\n        Doc.hardLine\n      } else {\n        Doc.space\n      }\n    | None => Doc.nil\n    },\n  })\n\n  Doc.concat(list{content, separator})\n}\n\nlet printCommentsInside = (cmtTbl, loc) => {\n  let rec loop = (acc, comments) =>\n    switch comments {\n    | list{} => Doc.nil\n    | list{comment} =>\n      let cmtDoc = printLeadingComment(comment)\n      let doc = Doc.group(Doc.concat(list{Doc.concat(List.rev(list{cmtDoc, ...acc}))}))\n\n      doc\n    | list{comment, ...list{nextComment, ..._comments} as rest} =>\n      let cmtDoc = printLeadingComment(~nextComment, comment)\n      loop(list{cmtDoc, ...acc}, rest)\n    }\n\n  switch Hashtbl.find(cmtTbl.CommentTable.inside, loc) {\n  | exception Not_found => Doc.nil\n  | comments =>\n    Hashtbl.remove(cmtTbl.inside, loc)\n    Doc.group(loop(list{}, comments))\n  }\n}\n\nlet printLeadingComments = (node, tbl, loc) => {\n  let rec loop = (acc, comments) =>\n    switch comments {\n    | list{} => node\n    | list{comment} =>\n      let cmtDoc = printLeadingComment(comment)\n      let diff = loc.Location.loc_start.pos_lnum - Comment.loc(comment).Location.loc_end.pos_lnum\n\n      let separator = if Comment.isSingleLineComment(comment) {\n        if diff > 1 {\n          Doc.hardLine\n        } else {\n          Doc.nil\n        }\n      } else if diff === 0 {\n        Doc.space\n      } else if diff > 1 {\n        Doc.concat(list{Doc.hardLine, Doc.hardLine})\n      } else {\n        Doc.hardLine\n      }\n\n      let doc = Doc.group(\n        Doc.concat(list{Doc.concat(List.rev(list{cmtDoc, ...acc})), separator, node}),\n      )\n\n      doc\n    | list{comment, ...list{nextComment, ..._comments} as rest} =>\n      let cmtDoc = printLeadingComment(~nextComment, comment)\n      loop(list{cmtDoc, ...acc}, rest)\n    }\n\n  switch Hashtbl.find(tbl, loc) {\n  | exception Not_found => node\n  | comments =>\n    /* Remove comments from tbl: Some ast nodes have the same location.\n     * We only want to print comments once */\n    Hashtbl.remove(tbl, loc)\n    loop(list{}, comments)\n  }\n}\n\nlet printTrailingComments = (node, tbl, loc) => {\n  let rec loop = (prev, acc, comments) =>\n    switch comments {\n    | list{} => Doc.concat(List.rev(acc))\n    | list{comment, ...comments} =>\n      let cmtDoc = printTrailingComment(prev, loc, comment)\n      loop(Comment.loc(comment), list{cmtDoc, ...acc}, comments)\n    }\n\n  switch Hashtbl.find(tbl, loc) {\n  | exception Not_found => node\n  | list{} => node\n  | list{_first, ..._} as comments =>\n    /* Remove comments from tbl: Some ast nodes have the same location.\n     * We only want to print comments once */\n    Hashtbl.remove(tbl, loc)\n    let cmtsDoc = loop(loc, list{}, comments)\n    Doc.concat(list{node, cmtsDoc})\n  }\n}\n\nlet printComments = (doc, tbl: CommentTable.t, loc) => {\n  let docWithLeadingComments = printLeadingComments(doc, tbl.leading, loc)\n  printTrailingComments(docWithLeadingComments, tbl.trailing, loc)\n}\n\nlet printList = (~getLoc, ~nodes, ~print, ~forceBreak=false, t) => {\n  let rec loop = (prevLoc: Location.t, acc, nodes) =>\n    switch nodes {\n    | list{} => (prevLoc, Doc.concat(List.rev(acc)))\n    | list{node, ...nodes} =>\n      let loc = getLoc(node)\n      let startPos = switch getFirstLeadingComment(t, loc) {\n      | None => loc.loc_start\n      | Some(comment) => Comment.loc(comment).loc_start\n      }\n\n      let sep = if startPos.pos_lnum - prevLoc.loc_end.pos_lnum > 1 {\n        Doc.concat(list{Doc.hardLine, Doc.hardLine})\n      } else {\n        Doc.hardLine\n      }\n\n      let doc = printComments(print(node, t), t, loc)\n      loop(loc, list{doc, sep, ...acc}, nodes)\n    }\n\n  switch nodes {\n  | list{} => Doc.nil\n  | list{node, ...nodes} =>\n    let firstLoc = getLoc(node)\n    let doc = printComments(print(node, t), t, firstLoc)\n    let (lastLoc, docs) = loop(firstLoc, list{doc}, nodes)\n    let forceBreak = forceBreak || firstLoc.loc_start.pos_lnum !== lastLoc.loc_end.pos_lnum\n\n    Doc.breakableGroup(~forceBreak, docs)\n  }\n}\n\nlet printListi = (~getLoc, ~nodes, ~print, ~forceBreak=false, t) => {\n  let rec loop = (i, prevLoc: Location.t, acc, nodes) =>\n    switch nodes {\n    | list{} => (prevLoc, Doc.concat(List.rev(acc)))\n    | list{node, ...nodes} =>\n      let loc = getLoc(node)\n      let startPos = switch getFirstLeadingComment(t, loc) {\n      | None => loc.loc_start\n      | Some(comment) => Comment.loc(comment).loc_start\n      }\n\n      let sep = if startPos.pos_lnum - prevLoc.loc_end.pos_lnum > 1 {\n        Doc.concat(list{Doc.hardLine, Doc.hardLine})\n      } else {\n        Doc.line\n      }\n\n      let doc = printComments(print(node, t, i), t, loc)\n      loop(i + 1, loc, list{doc, sep, ...acc}, nodes)\n    }\n\n  switch nodes {\n  | list{} => Doc.nil\n  | list{node, ...nodes} =>\n    let firstLoc = getLoc(node)\n    let doc = printComments(print(node, t, 0), t, firstLoc)\n    let (lastLoc, docs) = loop(1, firstLoc, list{doc}, nodes)\n    let forceBreak = forceBreak || firstLoc.loc_start.pos_lnum !== lastLoc.loc_end.pos_lnum\n\n    Doc.breakableGroup(~forceBreak, docs)\n  }\n}\n\nlet rec printLongidentAux = (accu, x) =>\n  switch x {\n  | Longident.Lident(s) => list{Doc.text(s), ...accu}\n  | Ldot(lid, s) => printLongidentAux(list{Doc.text(s), ...accu}, lid)\n  | Lapply(lid1, lid2) =>\n    let d1 = Doc.join(~sep=Doc.dot, printLongidentAux(list{}, lid1))\n    let d2 = Doc.join(~sep=Doc.dot, printLongidentAux(list{}, lid2))\n    list{Doc.concat(list{d1, Doc.lparen, d2, Doc.rparen}), ...accu}\n  }\n\nlet printLongident = x =>\n  switch x {\n  | Longident.Lident(txt) => Doc.text(txt)\n  | lid => Doc.join(~sep=Doc.dot, printLongidentAux(list{}, lid))\n  }\n\ntype identifierStyle =\n  | ExoticIdent\n  | NormalIdent\n\nlet classifyIdentContent = (~allowUident=false, txt) =>\n  if Token.isKeywordTxt(txt) {\n    ExoticIdent\n  } else {\n    let len = String.length(txt)\n    let rec loop = i =>\n      if i === len {\n        NormalIdent\n      } else if i === 0 {\n        switch String.unsafe_get(txt, i) {\n        | 'A' .. 'Z' if allowUident => loop(i + 1)\n        | 'a' .. 'z' | '_' => loop(i + 1)\n        | _ => ExoticIdent\n        }\n      } else {\n        switch String.unsafe_get(txt, i) {\n        | 'A' .. 'Z' | 'a' .. 'z' | '0' .. '9' | '\\'' | '_' => loop(i + 1)\n        | _ => ExoticIdent\n        }\n      }\n\n    loop(0)\n  }\n\nlet printIdentLike = (~allowUident=?, txt) =>\n  switch classifyIdentContent(~allowUident?, txt) {\n  | ExoticIdent => Doc.concat(list{Doc.text(\"\\\\\\\"\"), Doc.text(txt), Doc.text(\"\\\"\")})\n  | NormalIdent => Doc.text(txt)\n  }\n\nlet rec unsafe_for_all_range = (s, ~start, ~finish, p) =>\n  start > finish ||\n    (p(String.unsafe_get(s, start)) && unsafe_for_all_range(s, ~start=start + 1, ~finish, p))\n\nlet for_all_from = (s, start, p) => {\n  let len = String.length(s)\n  unsafe_for_all_range(s, ~start, ~finish=len - 1, p)\n}\n\n/* See https://github.com/rescript-lang/rescript-compiler/blob/726cfa534314b586e5b5734471bc2023ad99ebd9/jscomp/ext/ext_string.ml#L510 */\nlet isValidNumericPolyvarNumber = (x: string) => {\n  let len = String.length(x)\n  len > 0 && {\n      let a = Char.code(String.unsafe_get(x, 0))\n      a <= 57 && if len > 1 {\n          a > 48 &&\n            for_all_from(x, 1, x =>\n              switch x {\n              | '0' .. '9' => true\n              | _ => false\n              }\n            )\n        } else {\n          a >= 48\n        }\n    }\n}\n\n/* Exotic identifiers in poly-vars have a \"lighter\" syntax: #\"ease-in\" */\nlet printPolyVarIdent = txt =>\n  /* numeric poly-vars don't need quotes: #644 */\n  if isValidNumericPolyvarNumber(txt) {\n    Doc.text(txt)\n  } else {\n    switch classifyIdentContent(~allowUident=true, txt) {\n    | ExoticIdent => Doc.concat(list{Doc.text(\"\\\"\"), Doc.text(txt), Doc.text(\"\\\"\")})\n    | NormalIdent =>\n      switch txt {\n      | \"\" => Doc.concat(list{Doc.text(\"\\\"\"), Doc.text(txt), Doc.text(\"\\\"\")})\n      | _ => Doc.text(txt)\n      }\n    }\n  }\n\nlet printLident = l => {\n  let flatLidOpt = lid => {\n    let rec flat = (accu, x) =>\n      switch x {\n      | Longident.Lident(s) => Some(list{s, ...accu})\n      | Ldot(lid, s) => flat(list{s, ...accu}, lid)\n      | Lapply(_, _) => None\n      }\n\n    flat(list{}, lid)\n  }\n\n  switch l {\n  | Longident.Lident(txt) => printIdentLike(txt)\n  | Longident.Ldot(path, txt) =>\n    let doc = switch flatLidOpt(path) {\n    | Some(txts) =>\n      Doc.concat(list{\n        Doc.join(~sep=Doc.dot, List.map(Doc.text, txts)),\n        Doc.dot,\n        printIdentLike(txt),\n      })\n    | None => Doc.text(\"printLident: Longident.Lapply is not supported\")\n    }\n\n    doc\n  | Lapply(_, _) => Doc.text(\"printLident: Longident.Lapply is not supported\")\n  }\n}\n\nlet printLongidentLocation = (l, cmtTbl) => {\n  let doc = printLongident(l.Location.txt)\n  printComments(doc, cmtTbl, l.loc)\n}\n\n/* Module.SubModule.x */\nlet printLidentPath = (path, cmtTbl) => {\n  let doc = printLident(path.Location.txt)\n  printComments(doc, cmtTbl, path.loc)\n}\n\n/* Module.SubModule.x or Module.SubModule.X */\nlet printIdentPath = (path, cmtTbl) => {\n  let doc = printLident(path.Location.txt)\n  printComments(doc, cmtTbl, path.loc)\n}\n\nlet printStringLoc = (sloc, cmtTbl) => {\n  let doc = printIdentLike(sloc.Location.txt)\n  printComments(doc, cmtTbl, sloc.loc)\n}\n\nlet printStringContents = txt => {\n  let lines = String.split_on_char('\\n', txt)\n  Doc.join(~sep=Doc.literalLine, List.map(Doc.text, lines))\n}\n\nlet printConstant = (~templateLiteral=false, c) =>\n  switch c {\n  | Parsetree.Pconst_integer(s, suffix) =>\n    switch suffix {\n    | Some(c) => Doc.text(s ++ Char.escaped(c))\n    | None => Doc.text(s)\n    }\n  | Pconst_string(txt, None) =>\n    Doc.concat(list{Doc.text(\"\\\"\"), printStringContents(txt), Doc.text(\"\\\"\")})\n  | Pconst_string(txt, Some(prefix)) =>\n    if prefix == \"INTERNAL_RES_CHAR_CONTENTS\" {\n      Doc.concat(list{Doc.text(\"'\"), Doc.text(txt), Doc.text(\"'\")})\n    } else {\n      let (lquote, rquote) = if templateLiteral {\n        (\"`\", \"`\")\n      } else {\n        (\"\\\"\", \"\\\"\")\n      }\n\n      Doc.concat(list{\n        if prefix == \"js\" {\n          Doc.nil\n        } else {\n          Doc.text(prefix)\n        },\n        Doc.text(lquote),\n        printStringContents(txt),\n        Doc.text(rquote),\n      })\n    }\n  | Pconst_float(s, _) => Doc.text(s)\n  | Pconst_char(c) =>\n    let str = switch c {\n    | '\\'' => \"\\\\'\"\n    | '\\\\' => \"\\\\\\\\\"\n    | '\\n' => \"\\\\n\"\n    | '\\t' => \"\\\\t\"\n    | '\\r' => \"\\\\r\"\n    | '\\b' => \"\\\\b\"\n    | ' ' .. '~' as c =>\n      let s = (@doesNotRaise Bytes.create)(1)\n      Bytes.unsafe_set(s, 0, c)\n      Bytes.unsafe_to_string(s)\n    | c => Res_utf8.encodeCodePoint(Obj.magic(c))\n    }\n\n    Doc.text(\"'\" ++ (str ++ \"'\"))\n  }\n\nlet rec printStructure = (s: Parsetree.structure, t) =>\n  switch s {\n  | list{} => printCommentsInside(t, Location.none)\n  | structure =>\n    printList(~getLoc=s => s.Parsetree.pstr_loc, ~nodes=structure, ~print=printStructureItem, t)\n  }\n\nand printStructureItem = (si: Parsetree.structure_item, cmtTbl) =>\n  switch si.pstr_desc {\n  | Pstr_value(rec_flag, valueBindings) =>\n    let recFlag = switch rec_flag {\n    | Asttypes.Nonrecursive => Doc.nil\n    | Asttypes.Recursive => Doc.text(\"rec \")\n    }\n\n    printValueBindings(~recFlag, valueBindings, cmtTbl)\n  | Pstr_type(recFlag, typeDeclarations) =>\n    let recFlag = switch recFlag {\n    | Asttypes.Nonrecursive => Doc.nil\n    | Asttypes.Recursive => Doc.text(\"rec \")\n    }\n\n    printTypeDeclarations(~recFlag, typeDeclarations, cmtTbl)\n  | Pstr_primitive(valueDescription) => printValueDescription(valueDescription, cmtTbl)\n  | Pstr_eval(expr, attrs) =>\n    let exprDoc = {\n      let doc = printExpressionWithComments(expr, cmtTbl)\n      switch Parens.structureExpr(expr) {\n      | Parens.Parenthesized => addParens(doc)\n      | Braced(braces) => printBraces(doc, expr, braces)\n      | Nothing => doc\n      }\n    }\n\n    Doc.concat(list{printAttributes(attrs, cmtTbl), exprDoc})\n  | Pstr_attribute(attr) => Doc.concat(list{Doc.text(\"@\"), printAttribute(attr, cmtTbl)})\n  | Pstr_extension(extension, attrs) =>\n    Doc.concat(list{\n      printAttributes(attrs, cmtTbl),\n      Doc.concat(list{printExtension(~atModuleLvl=true, extension, cmtTbl)}),\n    })\n  | Pstr_include(includeDeclaration) => printIncludeDeclaration(includeDeclaration, cmtTbl)\n  | Pstr_open(openDescription) => printOpenDescription(openDescription, cmtTbl)\n  | Pstr_modtype(modTypeDecl) => printModuleTypeDeclaration(modTypeDecl, cmtTbl)\n  | Pstr_module(moduleBinding) => printModuleBinding(~isRec=false, moduleBinding, cmtTbl, 0)\n  | Pstr_recmodule(moduleBindings) =>\n    printListi(\n      ~getLoc=mb => mb.Parsetree.pmb_loc,\n      ~nodes=moduleBindings,\n      ~print=printModuleBinding(~isRec=true),\n      cmtTbl,\n    )\n  | Pstr_exception(extensionConstructor) => printExceptionDef(extensionConstructor, cmtTbl)\n  | Pstr_typext(typeExtension) => printTypeExtension(typeExtension, cmtTbl)\n  | Pstr_class(_) | Pstr_class_type(_) => Doc.nil\n  }\n\nand printTypeExtension = (te: Parsetree.type_extension, cmtTbl) => {\n  let prefix = Doc.text(\"type \")\n  let name = printLidentPath(te.ptyext_path, cmtTbl)\n  let typeParams = printTypeParams(te.ptyext_params, cmtTbl)\n  let extensionConstructors = {\n    let ecs = te.ptyext_constructors\n    let forceBreak = switch (ecs, List.rev(ecs)) {\n    | (list{first, ..._}, list{last, ..._}) =>\n      first.pext_loc.loc_start.pos_lnum > te.ptyext_path.loc.loc_end.pos_lnum ||\n        first.pext_loc.loc_start.pos_lnum < last.pext_loc.loc_end.pos_lnum\n    | _ => false\n    }\n\n    let privateFlag = switch te.ptyext_private {\n    | Asttypes.Private => Doc.concat(list{Doc.text(\"private\"), Doc.line})\n    | Public => Doc.nil\n    }\n\n    let rows = printListi(\n      ~getLoc=n => n.Parsetree.pext_loc,\n      ~print=printExtensionConstructor,\n      ~nodes=ecs,\n      ~forceBreak,\n      cmtTbl,\n    )\n\n    Doc.breakableGroup(\n      ~forceBreak,\n      Doc.indent(\n        Doc.concat(list{\n          Doc.line,\n          privateFlag,\n          rows,\n          /* Doc.join ~sep:Doc.line ( */\n          /* List.mapi printExtensionConstructor ecs */\n          /* ) */\n        }),\n      ),\n    )\n  }\n\n  Doc.group(\n    Doc.concat(list{\n      printAttributes(~loc=te.ptyext_path.loc, te.ptyext_attributes, cmtTbl),\n      prefix,\n      name,\n      typeParams,\n      Doc.text(\" +=\"),\n      extensionConstructors,\n    }),\n  )\n}\n\nand printModuleBinding = (~isRec, moduleBinding, cmtTbl, i) => {\n  let prefix = if i == 0 {\n    Doc.concat(list{\n      Doc.text(\"module \"),\n      if isRec {\n        Doc.text(\"rec \")\n      } else {\n        Doc.nil\n      },\n    })\n  } else {\n    Doc.text(\"and \")\n  }\n\n  let (modExprDoc, modConstraintDoc) = switch moduleBinding.pmb_expr {\n  | {pmod_desc: Pmod_constraint(modExpr, modType)} => (\n      printModExpr(modExpr, cmtTbl),\n      Doc.concat(list{Doc.text(\": \"), printModType(modType, cmtTbl)}),\n    )\n  | modExpr => (printModExpr(modExpr, cmtTbl), Doc.nil)\n  }\n\n  let modName = {\n    let doc = Doc.text(moduleBinding.pmb_name.Location.txt)\n    printComments(doc, cmtTbl, moduleBinding.pmb_name.loc)\n  }\n\n  let doc = Doc.concat(list{\n    printAttributes(~loc=moduleBinding.pmb_name.loc, moduleBinding.pmb_attributes, cmtTbl),\n    prefix,\n    modName,\n    modConstraintDoc,\n    Doc.text(\" = \"),\n    modExprDoc,\n  })\n  printComments(doc, cmtTbl, moduleBinding.pmb_loc)\n}\n\nand printModuleTypeDeclaration = (modTypeDecl: Parsetree.module_type_declaration, cmtTbl) => {\n  let modName = {\n    let doc = Doc.text(modTypeDecl.pmtd_name.txt)\n    printComments(doc, cmtTbl, modTypeDecl.pmtd_name.loc)\n  }\n\n  Doc.concat(list{\n    printAttributes(modTypeDecl.pmtd_attributes, cmtTbl),\n    Doc.text(\"module type \"),\n    modName,\n    switch modTypeDecl.pmtd_type {\n    | None => Doc.nil\n    | Some(modType) => Doc.concat(list{Doc.text(\" = \"), printModType(modType, cmtTbl)})\n    },\n  })\n}\n\nand printModType = (modType, cmtTbl) => {\n  let modTypeDoc = switch modType.pmty_desc {\n  | Parsetree.Pmty_ident(longident) =>\n    Doc.concat(list{\n      printAttributes(~loc=longident.loc, modType.pmty_attributes, cmtTbl),\n      printLongidentLocation(longident, cmtTbl),\n    })\n  | Pmty_signature(list{}) =>\n    let shouldBreak = modType.pmty_loc.loc_start.pos_lnum < modType.pmty_loc.loc_end.pos_lnum\n\n    Doc.breakableGroup(\n      ~forceBreak=shouldBreak,\n      Doc.concat(list{\n        Doc.lbrace,\n        Doc.indent(Doc.concat(list{Doc.softLine, printCommentsInside(cmtTbl, modType.pmty_loc)})),\n        Doc.softLine,\n        Doc.rbrace,\n      }),\n    )\n  | Pmty_signature(signature) =>\n    let signatureDoc = Doc.breakableGroup(\n      ~forceBreak=true,\n      Doc.concat(list{\n        Doc.lbrace,\n        Doc.indent(Doc.concat(list{Doc.line, printSignature(signature, cmtTbl)})),\n        Doc.line,\n        Doc.rbrace,\n      }),\n    )\n    Doc.concat(list{printAttributes(modType.pmty_attributes, cmtTbl), signatureDoc})\n  | Pmty_functor(_) =>\n    let (parameters, returnType) = ParsetreeViewer.functorType(modType)\n    let parametersDoc = switch parameters {\n    | list{} => Doc.nil\n    | list{(attrs, {Location.txt: \"_\", loc}, Some(modType))} =>\n      let cmtLoc = {...loc, loc_end: modType.Parsetree.pmty_loc.loc_end}\n\n      let attrs = printAttributes(attrs, cmtTbl)\n      let doc = Doc.concat(list{attrs, printModType(modType, cmtTbl)})\n      printComments(doc, cmtTbl, cmtLoc)\n    | params =>\n      Doc.group(\n        Doc.concat(list{\n          Doc.lparen,\n          Doc.indent(\n            Doc.concat(list{\n              Doc.softLine,\n              Doc.join(\n                ~sep=Doc.concat(list{Doc.comma, Doc.line}),\n                List.map(((attrs, lbl, modType)) => {\n                  let cmtLoc = switch modType {\n                  | None => lbl.Asttypes.loc\n                  | Some(modType) => {\n                      ...lbl.Asttypes.loc,\n                      loc_end: modType.Parsetree.pmty_loc.loc_end,\n                    }\n                  }\n\n                  let attrs = printAttributes(attrs, cmtTbl)\n                  let lblDoc = if lbl.Location.txt == \"_\" || lbl.txt == \"*\" {\n                    Doc.nil\n                  } else {\n                    let doc = Doc.text(lbl.txt)\n                    printComments(doc, cmtTbl, lbl.loc)\n                  }\n\n                  let doc = Doc.concat(list{\n                    attrs,\n                    lblDoc,\n                    switch modType {\n                    | None => Doc.nil\n                    | Some(modType) =>\n                      Doc.concat(list{\n                        if lbl.txt == \"_\" {\n                          Doc.nil\n                        } else {\n                          Doc.text(\": \")\n                        },\n                        printModType(modType, cmtTbl),\n                      })\n                    },\n                  })\n                  printComments(doc, cmtTbl, cmtLoc)\n                }, params),\n              ),\n            }),\n          ),\n          Doc.trailingComma,\n          Doc.softLine,\n          Doc.rparen,\n        }),\n      )\n    }\n\n    let returnDoc = {\n      let doc = printModType(returnType, cmtTbl)\n      if Parens.modTypeFunctorReturn(returnType) {\n        addParens(doc)\n      } else {\n        doc\n      }\n    }\n\n    Doc.group(\n      Doc.concat(list{\n        parametersDoc,\n        Doc.group(Doc.concat(list{Doc.text(\" =>\"), Doc.line, returnDoc})),\n      }),\n    )\n  | Pmty_typeof(modExpr) =>\n    Doc.concat(list{Doc.text(\"module type of \"), printModExpr(modExpr, cmtTbl)})\n  | Pmty_extension(extension) => printExtension(~atModuleLvl=false, extension, cmtTbl)\n  | Pmty_alias(longident) =>\n    Doc.concat(list{Doc.text(\"module \"), printLongidentLocation(longident, cmtTbl)})\n  | Pmty_with(modType, withConstraints) =>\n    let operand = {\n      let doc = printModType(modType, cmtTbl)\n      if Parens.modTypeWithOperand(modType) {\n        addParens(doc)\n      } else {\n        doc\n      }\n    }\n\n    Doc.group(\n      Doc.concat(list{\n        operand,\n        Doc.indent(Doc.concat(list{Doc.line, printWithConstraints(withConstraints, cmtTbl)})),\n      }),\n    )\n  }\n\n  let attrsAlreadyPrinted = switch modType.pmty_desc {\n  | Pmty_functor(_) | Pmty_signature(_) | Pmty_ident(_) => true\n  | _ => false\n  }\n\n  let doc = Doc.concat(list{\n    if attrsAlreadyPrinted {\n      Doc.nil\n    } else {\n      printAttributes(modType.pmty_attributes, cmtTbl)\n    },\n    modTypeDoc,\n  })\n  printComments(doc, cmtTbl, modType.pmty_loc)\n}\n\nand printWithConstraints = (withConstraints, cmtTbl) => {\n  let rows = List.mapi((i, withConstraint) =>\n    Doc.group(\n      Doc.concat(list{\n        if i === 0 {\n          Doc.text(\"with \")\n        } else {\n          Doc.text(\"and \")\n        },\n        printWithConstraint(withConstraint, cmtTbl),\n      }),\n    )\n  , withConstraints)\n\n  Doc.join(~sep=Doc.line, rows)\n}\n\nand printWithConstraint = (withConstraint: Parsetree.with_constraint, cmtTbl) =>\n  switch withConstraint {\n  /* with type X.t = ... */\n  | Pwith_type(longident, typeDeclaration) =>\n    Doc.group(\n      printTypeDeclaration(\n        ~name=printLidentPath(longident, cmtTbl),\n        ~equalSign=\"=\",\n        ~recFlag=Doc.nil,\n        0,\n        typeDeclaration,\n        CommentTable.empty,\n      ),\n    )\n  /* with module X.Y = Z */\n  | Pwith_module({txt: longident1}, {txt: longident2}) =>\n    Doc.concat(list{\n      Doc.text(\"module \"),\n      printLongident(longident1),\n      Doc.text(\" =\"),\n      Doc.indent(Doc.concat(list{Doc.line, printLongident(longident2)})),\n    })\n  /* with type X.t := ..., same format as [Pwith_type] */\n  | Pwith_typesubst(longident, typeDeclaration) =>\n    Doc.group(\n      printTypeDeclaration(\n        ~name=printLidentPath(longident, cmtTbl),\n        ~equalSign=\":=\",\n        ~recFlag=Doc.nil,\n        0,\n        typeDeclaration,\n        CommentTable.empty,\n      ),\n    )\n  | Pwith_modsubst({txt: longident1}, {txt: longident2}) =>\n    Doc.concat(list{\n      Doc.text(\"module \"),\n      printLongident(longident1),\n      Doc.text(\" :=\"),\n      Doc.indent(Doc.concat(list{Doc.line, printLongident(longident2)})),\n    })\n  }\n\nand printSignature = (signature, cmtTbl) =>\n  switch signature {\n  | list{} => printCommentsInside(cmtTbl, Location.none)\n  | signature =>\n    printList(\n      ~getLoc=s => s.Parsetree.psig_loc,\n      ~nodes=signature,\n      ~print=printSignatureItem,\n      cmtTbl,\n    )\n  }\n\nand printSignatureItem = (si: Parsetree.signature_item, cmtTbl) =>\n  switch si.psig_desc {\n  | Parsetree.Psig_value(valueDescription) => printValueDescription(valueDescription, cmtTbl)\n  | Psig_type(recFlag, typeDeclarations) =>\n    let recFlag = switch recFlag {\n    | Asttypes.Nonrecursive => Doc.nil\n    | Asttypes.Recursive => Doc.text(\"rec \")\n    }\n\n    printTypeDeclarations(~recFlag, typeDeclarations, cmtTbl)\n  | Psig_typext(typeExtension) => printTypeExtension(typeExtension, cmtTbl)\n  | Psig_exception(extensionConstructor) => printExceptionDef(extensionConstructor, cmtTbl)\n  | Psig_module(moduleDeclaration) => printModuleDeclaration(moduleDeclaration, cmtTbl)\n  | Psig_recmodule(moduleDeclarations) => printRecModuleDeclarations(moduleDeclarations, cmtTbl)\n  | Psig_modtype(modTypeDecl) => printModuleTypeDeclaration(modTypeDecl, cmtTbl)\n  | Psig_open(openDescription) => printOpenDescription(openDescription, cmtTbl)\n  | Psig_include(includeDescription) => printIncludeDescription(includeDescription, cmtTbl)\n  | Psig_attribute(attr) => Doc.concat(list{Doc.text(\"@\"), printAttribute(attr, cmtTbl)})\n  | Psig_extension(extension, attrs) =>\n    Doc.concat(list{\n      printAttributes(attrs, cmtTbl),\n      Doc.concat(list{printExtension(~atModuleLvl=true, extension, cmtTbl)}),\n    })\n  | Psig_class(_) | Psig_class_type(_) => Doc.nil\n  }\n\nand printRecModuleDeclarations = (moduleDeclarations, cmtTbl) =>\n  printListi(\n    ~getLoc=n => n.Parsetree.pmd_loc,\n    ~nodes=moduleDeclarations,\n    ~print=printRecModuleDeclaration,\n    cmtTbl,\n  )\n\nand printRecModuleDeclaration = (md, cmtTbl, i) => {\n  let body = switch md.pmd_type.pmty_desc {\n  | Parsetree.Pmty_alias(longident) =>\n    Doc.concat(list{Doc.text(\" = \"), printLongidentLocation(longident, cmtTbl)})\n  | _ =>\n    let needsParens = switch md.pmd_type.pmty_desc {\n    | Pmty_with(_) => true\n    | _ => false\n    }\n\n    let modTypeDoc = {\n      let doc = printModType(md.pmd_type, cmtTbl)\n      if needsParens {\n        addParens(doc)\n      } else {\n        doc\n      }\n    }\n\n    Doc.concat(list{Doc.text(\": \"), modTypeDoc})\n  }\n\n  let prefix = if i < 1 {\n    \"module rec \"\n  } else {\n    \"and \"\n  }\n  Doc.concat(list{\n    printAttributes(~loc=md.pmd_name.loc, md.pmd_attributes, cmtTbl),\n    Doc.text(prefix),\n    printComments(Doc.text(md.pmd_name.txt), cmtTbl, md.pmd_name.loc),\n    body,\n  })\n}\n\nand printModuleDeclaration = (md: Parsetree.module_declaration, cmtTbl) => {\n  let body = switch md.pmd_type.pmty_desc {\n  | Parsetree.Pmty_alias(longident) =>\n    Doc.concat(list{Doc.text(\" = \"), printLongidentLocation(longident, cmtTbl)})\n  | _ => Doc.concat(list{Doc.text(\": \"), printModType(md.pmd_type, cmtTbl)})\n  }\n\n  Doc.concat(list{\n    printAttributes(~loc=md.pmd_name.loc, md.pmd_attributes, cmtTbl),\n    Doc.text(\"module \"),\n    printComments(Doc.text(md.pmd_name.txt), cmtTbl, md.pmd_name.loc),\n    body,\n  })\n}\n\nand printOpenDescription = (openDescription: Parsetree.open_description, cmtTbl) =>\n  Doc.concat(list{\n    printAttributes(openDescription.popen_attributes, cmtTbl),\n    Doc.text(\"open\"),\n    switch openDescription.popen_override {\n    | Asttypes.Fresh => Doc.space\n    | Asttypes.Override => Doc.text(\"! \")\n    },\n    printLongidentLocation(openDescription.popen_lid, cmtTbl),\n  })\n\nand printIncludeDescription = (includeDescription: Parsetree.include_description, cmtTbl) =>\n  Doc.concat(list{\n    printAttributes(includeDescription.pincl_attributes, cmtTbl),\n    Doc.text(\"include \"),\n    printModType(includeDescription.pincl_mod, cmtTbl),\n  })\n\nand printIncludeDeclaration = (includeDeclaration: Parsetree.include_declaration, cmtTbl) =>\n  Doc.concat(list{\n    printAttributes(includeDeclaration.pincl_attributes, cmtTbl),\n    Doc.text(\"include \"),\n    {\n      let includeDoc = printModExpr(includeDeclaration.pincl_mod, cmtTbl)\n\n      if Parens.includeModExpr(includeDeclaration.pincl_mod) {\n        addParens(includeDoc)\n      } else {\n        includeDoc\n      }\n    },\n  })\n\nand printValueBindings = (~recFlag, vbs: list<Parsetree.value_binding>, cmtTbl) =>\n  printListi(\n    ~getLoc=vb => vb.Parsetree.pvb_loc,\n    ~nodes=vbs,\n    ~print=printValueBinding(~recFlag),\n    cmtTbl,\n  )\n\nand printValueDescription = (valueDescription, cmtTbl) => {\n  let isExternal = switch valueDescription.pval_prim {\n  | list{} => false\n  | _ => true\n  }\n\n  let attrs = printAttributes(\n    ~loc=valueDescription.pval_name.loc,\n    valueDescription.pval_attributes,\n    cmtTbl,\n  )\n\n  let header = if isExternal {\n    \"external \"\n  } else {\n    \"let \"\n  }\n  Doc.group(\n    Doc.concat(list{\n      attrs,\n      Doc.text(header),\n      printComments(\n        printIdentLike(valueDescription.pval_name.txt),\n        cmtTbl,\n        valueDescription.pval_name.loc,\n      ),\n      Doc.text(\": \"),\n      printTypExpr(valueDescription.pval_type, cmtTbl),\n      if isExternal {\n        Doc.group(\n          Doc.concat(list{\n            Doc.text(\" =\"),\n            Doc.indent(\n              Doc.concat(list{\n                Doc.line,\n                Doc.join(\n                  ~sep=Doc.line,\n                  List.map(\n                    s => Doc.concat(list{Doc.text(\"\\\"\"), Doc.text(s), Doc.text(\"\\\"\")}),\n                    valueDescription.pval_prim,\n                  ),\n                ),\n              }),\n            ),\n          }),\n        )\n      } else {\n        Doc.nil\n      },\n    }),\n  )\n}\n\nand printTypeDeclarations = (~recFlag, typeDeclarations, cmtTbl) =>\n  printListi(\n    ~getLoc=n => n.Parsetree.ptype_loc,\n    ~nodes=typeDeclarations,\n    ~print=printTypeDeclaration2(~recFlag),\n    cmtTbl,\n  )\n\n/*\n * type_declaration = {\n *    ptype_name: string loc;\n *    ptype_params: (core_type * variance) list;\n *          (* ('a1,...'an) t; None represents  _*)\n *    ptype_cstrs: (core_type * core_type * Location.t) list;\n *          (* ... constraint T1=T1'  ... constraint Tn=Tn' *)\n *    ptype_kind: type_kind;\n *    ptype_private: private_flag;   (* = private ... *)\n *    ptype_manifest: core_type option;  (* = T *)\n *    ptype_attributes: attributes;   (* ... [@@id1] [@@id2] *)\n *    ptype_loc: Location.t;\n * }\n *\n *\n *  type t                     (abstract, no manifest)\n *  type t = T0                (abstract, manifest=T0)\n *  type t = C of T | ...      (variant,  no manifest)\n *  type t = T0 = C of T | ... (variant,  manifest=T0)\n *  type t = {l: T; ...}       (record,   no manifest)\n *  type t = T0 = {l : T; ...} (record,   manifest=T0)\n *  type t = ..                (open,     no manifest)\n *\n *\n * and type_kind =\n *  | Ptype_abstract\n *  | Ptype_variant of constructor_declaration list\n *        (* Invariant: non-empty list *)\n *  | Ptype_record of label_declaration list\n *        (* Invariant: non-empty list *)\n *  | Ptype_open\n */\nand printTypeDeclaration = (\n  ~name,\n  ~equalSign,\n  ~recFlag,\n  i,\n  td: Parsetree.type_declaration,\n  cmtTbl,\n) => {\n  let attrs = printAttributes(~loc=td.ptype_loc, td.ptype_attributes, cmtTbl)\n  let prefix = if i > 0 {\n    Doc.text(\"and \")\n  } else {\n    Doc.concat(list{Doc.text(\"type \"), recFlag})\n  }\n\n  let typeName = name\n  let typeParams = printTypeParams(td.ptype_params, cmtTbl)\n  let manifestAndKind = switch td.ptype_kind {\n  | Ptype_abstract =>\n    switch td.ptype_manifest {\n    | None => Doc.nil\n    | Some(typ) =>\n      Doc.concat(list{\n        Doc.concat(list{Doc.space, Doc.text(equalSign), Doc.space}),\n        printPrivateFlag(td.ptype_private),\n        printTypExpr(typ, cmtTbl),\n      })\n    }\n  | Ptype_open =>\n    Doc.concat(list{\n      Doc.concat(list{Doc.space, Doc.text(equalSign), Doc.space}),\n      printPrivateFlag(td.ptype_private),\n      Doc.text(\"..\"),\n    })\n  | Ptype_record(lds) =>\n    let manifest = switch td.ptype_manifest {\n    | None => Doc.nil\n    | Some(typ) =>\n      Doc.concat(list{\n        Doc.concat(list{Doc.space, Doc.text(equalSign), Doc.space}),\n        printTypExpr(typ, cmtTbl),\n      })\n    }\n\n    Doc.concat(list{\n      manifest,\n      Doc.concat(list{Doc.space, Doc.text(equalSign), Doc.space}),\n      printPrivateFlag(td.ptype_private),\n      printRecordDeclaration(lds, cmtTbl),\n    })\n  | Ptype_variant(cds) =>\n    let manifest = switch td.ptype_manifest {\n    | None => Doc.nil\n    | Some(typ) =>\n      Doc.concat(list{\n        Doc.concat(list{Doc.space, Doc.text(equalSign), Doc.space}),\n        printTypExpr(typ, cmtTbl),\n      })\n    }\n\n    Doc.concat(list{\n      manifest,\n      Doc.concat(list{Doc.space, Doc.text(equalSign)}),\n      printConstructorDeclarations(~privateFlag=td.ptype_private, cds, cmtTbl),\n    })\n  }\n\n  let constraints = printTypeDefinitionConstraints(td.ptype_cstrs)\n  Doc.group(Doc.concat(list{attrs, prefix, typeName, typeParams, manifestAndKind, constraints}))\n}\n\nand printTypeDeclaration2 = (~recFlag, td: Parsetree.type_declaration, cmtTbl, i) => {\n  let name = {\n    let doc = printIdentLike(td.Parsetree.ptype_name.txt)\n    printComments(doc, cmtTbl, td.ptype_name.loc)\n  }\n\n  let equalSign = \"=\"\n  let attrs = printAttributes(~loc=td.ptype_loc, td.ptype_attributes, cmtTbl)\n  let prefix = if i > 0 {\n    Doc.text(\"and \")\n  } else {\n    Doc.concat(list{Doc.text(\"type \"), recFlag})\n  }\n\n  let typeName = name\n  let typeParams = printTypeParams(td.ptype_params, cmtTbl)\n  let manifestAndKind = switch td.ptype_kind {\n  | Ptype_abstract =>\n    switch td.ptype_manifest {\n    | None => Doc.nil\n    | Some(typ) =>\n      Doc.concat(list{\n        Doc.concat(list{Doc.space, Doc.text(equalSign), Doc.space}),\n        printPrivateFlag(td.ptype_private),\n        printTypExpr(typ, cmtTbl),\n      })\n    }\n  | Ptype_open =>\n    Doc.concat(list{\n      Doc.concat(list{Doc.space, Doc.text(equalSign), Doc.space}),\n      printPrivateFlag(td.ptype_private),\n      Doc.text(\"..\"),\n    })\n  | Ptype_record(lds) =>\n    let manifest = switch td.ptype_manifest {\n    | None => Doc.nil\n    | Some(typ) =>\n      Doc.concat(list{\n        Doc.concat(list{Doc.space, Doc.text(equalSign), Doc.space}),\n        printTypExpr(typ, cmtTbl),\n      })\n    }\n\n    Doc.concat(list{\n      manifest,\n      Doc.concat(list{Doc.space, Doc.text(equalSign), Doc.space}),\n      printPrivateFlag(td.ptype_private),\n      printRecordDeclaration(lds, cmtTbl),\n    })\n  | Ptype_variant(cds) =>\n    let manifest = switch td.ptype_manifest {\n    | None => Doc.nil\n    | Some(typ) =>\n      Doc.concat(list{\n        Doc.concat(list{Doc.space, Doc.text(equalSign), Doc.space}),\n        printTypExpr(typ, cmtTbl),\n      })\n    }\n\n    Doc.concat(list{\n      manifest,\n      Doc.concat(list{Doc.space, Doc.text(equalSign)}),\n      printConstructorDeclarations(~privateFlag=td.ptype_private, cds, cmtTbl),\n    })\n  }\n\n  let constraints = printTypeDefinitionConstraints(td.ptype_cstrs)\n  Doc.group(Doc.concat(list{attrs, prefix, typeName, typeParams, manifestAndKind, constraints}))\n}\n\nand printTypeDefinitionConstraints = cstrs =>\n  switch cstrs {\n  | list{} => Doc.nil\n  | cstrs =>\n    Doc.indent(\n      Doc.group(\n        Doc.concat(list{\n          Doc.line,\n          Doc.group(Doc.join(~sep=Doc.line, List.map(printTypeDefinitionConstraint, cstrs))),\n        }),\n      ),\n    )\n  }\n\nand printTypeDefinitionConstraint = (\n  (typ1, typ2, _loc): (Parsetree.core_type, Parsetree.core_type, Location.t),\n) =>\n  Doc.concat(list{\n    Doc.text(\"constraint \"),\n    printTypExpr(typ1, CommentTable.empty),\n    Doc.text(\" = \"),\n    printTypExpr(typ2, CommentTable.empty),\n  })\n\nand printPrivateFlag = (flag: Asttypes.private_flag) =>\n  switch flag {\n  | Private => Doc.text(\"private \")\n  | Public => Doc.nil\n  }\n\nand printTypeParams = (typeParams, cmtTbl) =>\n  switch typeParams {\n  | list{} => Doc.nil\n  | typeParams =>\n    Doc.group(\n      Doc.concat(list{\n        Doc.lessThan,\n        Doc.indent(\n          Doc.concat(list{\n            Doc.softLine,\n            Doc.join(~sep=Doc.concat(list{Doc.comma, Doc.line}), List.map(typeParam => {\n                let doc = printTypeParam(typeParam, cmtTbl)\n                printComments(doc, cmtTbl, fst(typeParam).Parsetree.ptyp_loc)\n              }, typeParams)),\n          }),\n        ),\n        Doc.trailingComma,\n        Doc.softLine,\n        Doc.greaterThan,\n      }),\n    )\n  }\n\nand printTypeParam = (param: (Parsetree.core_type, Asttypes.variance), cmtTbl) => {\n  let (typ, variance) = param\n  let printedVariance = switch variance {\n  | Covariant => Doc.text(\"+\")\n  | Contravariant => Doc.text(\"-\")\n  | Invariant => Doc.nil\n  }\n\n  Doc.concat(list{printedVariance, printTypExpr(typ, cmtTbl)})\n}\n\nand printRecordDeclaration = (lds: list<Parsetree.label_declaration>, cmtTbl) => {\n  let forceBreak = switch (lds, List.rev(lds)) {\n  | (list{first, ..._}, list{last, ..._}) =>\n    first.pld_loc.loc_start.pos_lnum < last.pld_loc.loc_end.pos_lnum\n  | _ => false\n  }\n\n  Doc.breakableGroup(\n    ~forceBreak,\n    Doc.concat(list{\n      Doc.lbrace,\n      Doc.indent(\n        Doc.concat(list{\n          Doc.softLine,\n          Doc.join(~sep=Doc.concat(list{Doc.comma, Doc.line}), List.map(ld => {\n              let doc = printLabelDeclaration(ld, cmtTbl)\n              printComments(doc, cmtTbl, ld.Parsetree.pld_loc)\n            }, lds)),\n        }),\n      ),\n      Doc.trailingComma,\n      Doc.softLine,\n      Doc.rbrace,\n    }),\n  )\n}\n\nand printConstructorDeclarations = (\n  ~privateFlag,\n  cds: list<Parsetree.constructor_declaration>,\n  cmtTbl,\n) => {\n  let forceBreak = switch (cds, List.rev(cds)) {\n  | (list{first, ..._}, list{last, ..._}) =>\n    first.pcd_loc.loc_start.pos_lnum < last.pcd_loc.loc_end.pos_lnum\n  | _ => false\n  }\n\n  let privateFlag = switch privateFlag {\n  | Asttypes.Private => Doc.concat(list{Doc.text(\"private\"), Doc.line})\n  | Public => Doc.nil\n  }\n\n  let rows = printListi(\n    ~getLoc=cd => cd.Parsetree.pcd_loc,\n    ~nodes=cds,\n    ~print=(cd, cmtTbl, i) => {\n      let doc = printConstructorDeclaration2(i, cd, cmtTbl)\n      printComments(doc, cmtTbl, cd.Parsetree.pcd_loc)\n    },\n    ~forceBreak,\n    cmtTbl,\n  )\n\n  Doc.breakableGroup(~forceBreak, Doc.indent(Doc.concat(list{Doc.line, privateFlag, rows})))\n}\n\nand printConstructorDeclaration2 = (i, cd: Parsetree.constructor_declaration, cmtTbl) => {\n  let attrs = printAttributes(cd.pcd_attributes, cmtTbl)\n  let bar = if i > 0 || cd.pcd_attributes != list{} {\n    Doc.text(\"| \")\n  } else {\n    Doc.ifBreaks(Doc.text(\"| \"), Doc.nil)\n  }\n\n  let constrName = {\n    let doc = Doc.text(cd.pcd_name.txt)\n    printComments(doc, cmtTbl, cd.pcd_name.loc)\n  }\n\n  let constrArgs = printConstructorArguments(~indent=true, cd.pcd_args, cmtTbl)\n  let gadt = switch cd.pcd_res {\n  | None => Doc.nil\n  | Some(typ) => Doc.indent(Doc.concat(list{Doc.text(\": \"), printTypExpr(typ, cmtTbl)}))\n  }\n\n  Doc.concat(list{\n    bar,\n    Doc.group(\n      Doc.concat(list{\n        attrs /* TODO: fix parsing of attributes, so when can print them above the bar? */,\n        constrName,\n        constrArgs,\n        gadt,\n      }),\n    ),\n  })\n}\n\nand printConstructorArguments = (~indent, cdArgs: Parsetree.constructor_arguments, cmtTbl) =>\n  switch cdArgs {\n  | Pcstr_tuple(list{}) => Doc.nil\n  | Pcstr_tuple(types) =>\n    let args = Doc.concat(list{\n      Doc.lparen,\n      Doc.indent(\n        Doc.concat(list{\n          Doc.softLine,\n          Doc.join(\n            ~sep=Doc.concat(list{Doc.comma, Doc.line}),\n            List.map(typexpr => printTypExpr(typexpr, cmtTbl), types),\n          ),\n        }),\n      ),\n      Doc.trailingComma,\n      Doc.softLine,\n      Doc.rparen,\n    })\n    Doc.group(\n      if indent {\n        Doc.indent(args)\n      } else {\n        args\n      },\n    )\n  | Pcstr_record(lds) =>\n    let args = Doc.concat(list{\n      Doc.lparen,\n      /* manually inline the printRecordDeclaration, gives better layout */\n      Doc.lbrace,\n      Doc.indent(\n        Doc.concat(list{\n          Doc.softLine,\n          Doc.join(~sep=Doc.concat(list{Doc.comma, Doc.line}), List.map(ld => {\n              let doc = printLabelDeclaration(ld, cmtTbl)\n              printComments(doc, cmtTbl, ld.Parsetree.pld_loc)\n            }, lds)),\n        }),\n      ),\n      Doc.trailingComma,\n      Doc.softLine,\n      Doc.rbrace,\n      Doc.rparen,\n    })\n    if indent {\n      Doc.indent(args)\n    } else {\n      args\n    }\n  }\n\nand printLabelDeclaration = (ld: Parsetree.label_declaration, cmtTbl) => {\n  let attrs = printAttributes(~loc=ld.pld_name.loc, ld.pld_attributes, cmtTbl)\n  let mutableFlag = switch ld.pld_mutable {\n  | Mutable => Doc.text(\"mutable \")\n  | Immutable => Doc.nil\n  }\n\n  let name = {\n    let doc = printIdentLike(ld.pld_name.txt)\n    printComments(doc, cmtTbl, ld.pld_name.loc)\n  }\n\n  Doc.group(\n    Doc.concat(list{attrs, mutableFlag, name, Doc.text(\": \"), printTypExpr(ld.pld_type, cmtTbl)}),\n  )\n}\n\nand printTypExpr = (typExpr: Parsetree.core_type, cmtTbl) => {\n  let renderedType = switch typExpr.ptyp_desc {\n  | Ptyp_any => Doc.text(\"_\")\n  | Ptyp_var(var) => Doc.concat(list{Doc.text(\"'\"), printIdentLike(~allowUident=true, var)})\n  | Ptyp_extension(extension) => printExtension(~atModuleLvl=false, extension, cmtTbl)\n  | Ptyp_alias(typ, alias) =>\n    let typ = {\n      /* Technically type t = (string, float) => unit as 'x, doesn't require\n       * parens around the arrow expression. This is very confusing though.\n       * Is the \"as\" part of \"unit\" or \"(string, float) => unit\". By printing\n       * parens we guide the user towards its meaning. */\n      let needsParens = switch typ.ptyp_desc {\n      | Ptyp_arrow(_) => true\n      | _ => false\n      }\n\n      let doc = printTypExpr(typ, cmtTbl)\n      if needsParens {\n        Doc.concat(list{Doc.lparen, doc, Doc.rparen})\n      } else {\n        doc\n      }\n    }\n\n    Doc.concat(list{typ, Doc.text(\" as \"), Doc.concat(list{Doc.text(\"'\"), printIdentLike(alias)})})\n\n  /* object printings */\n  | Ptyp_object(fields, openFlag) => printObject(~inline=false, fields, openFlag, cmtTbl)\n  | Ptyp_constr(longidentLoc, list{{ptyp_desc: Ptyp_object(fields, openFlag)}}) =>\n    /* for foo<{\"a\": b}>, when the object is long and needs a line break, we\n     want the <{ and }> to stay hugged together */\n    let constrName = printLidentPath(longidentLoc, cmtTbl)\n    Doc.concat(list{\n      constrName,\n      Doc.lessThan,\n      printObject(~inline=true, fields, openFlag, cmtTbl),\n      Doc.greaterThan,\n    })\n\n  | Ptyp_constr(longidentLoc, list{{ptyp_desc: Parsetree.Ptyp_tuple(tuple)}}) =>\n    let constrName = printLidentPath(longidentLoc, cmtTbl)\n    Doc.group(\n      Doc.concat(list{\n        constrName,\n        Doc.lessThan,\n        printTupleType(~inline=true, tuple, cmtTbl),\n        Doc.greaterThan,\n      }),\n    )\n  | Ptyp_constr(longidentLoc, constrArgs) =>\n    let constrName = printLidentPath(longidentLoc, cmtTbl)\n    switch constrArgs {\n    | list{} => constrName\n    | _args =>\n      Doc.group(\n        Doc.concat(list{\n          constrName,\n          Doc.lessThan,\n          Doc.indent(\n            Doc.concat(list{\n              Doc.softLine,\n              Doc.join(\n                ~sep=Doc.concat(list{Doc.comma, Doc.line}),\n                List.map(typexpr => printTypExpr(typexpr, cmtTbl), constrArgs),\n              ),\n            }),\n          ),\n          Doc.trailingComma,\n          Doc.softLine,\n          Doc.greaterThan,\n        }),\n      )\n    }\n  | Ptyp_arrow(_) =>\n    let (attrsBefore, args, returnType) = ParsetreeViewer.arrowType(typExpr)\n    let returnTypeNeedsParens = switch returnType.ptyp_desc {\n    | Ptyp_alias(_) => true\n    | _ => false\n    }\n\n    let returnDoc = {\n      let doc = printTypExpr(returnType, cmtTbl)\n      if returnTypeNeedsParens {\n        Doc.concat(list{Doc.lparen, doc, Doc.rparen})\n      } else {\n        doc\n      }\n    }\n\n    let (isUncurried, attrs) = ParsetreeViewer.processUncurriedAttribute(attrsBefore)\n\n    switch args {\n    | list{} => Doc.nil\n    | list{(list{}, Nolabel, n)} if !isUncurried =>\n      let hasAttrsBefore = !(attrs == list{})\n      let attrs = if hasAttrsBefore {\n        printAttributes(~inline=true, attrsBefore, cmtTbl)\n      } else {\n        Doc.nil\n      }\n\n      let typDoc = {\n        let doc = printTypExpr(n, cmtTbl)\n        switch n.ptyp_desc {\n        | Ptyp_arrow(_) | Ptyp_tuple(_) | Ptyp_alias(_) => addParens(doc)\n        | _ => doc\n        }\n      }\n\n      Doc.group(\n        Doc.concat(list{\n          Doc.group(attrs),\n          Doc.group(\n            if hasAttrsBefore {\n              Doc.concat(list{\n                Doc.lparen,\n                Doc.indent(Doc.concat(list{Doc.softLine, typDoc, Doc.text(\" => \"), returnDoc})),\n                Doc.softLine,\n                Doc.rparen,\n              })\n            } else {\n              Doc.concat(list{typDoc, Doc.text(\" => \"), returnDoc})\n            },\n          ),\n        }),\n      )\n    | args =>\n      let attrs = printAttributes(~inline=true, attrs, cmtTbl)\n      let renderedArgs = Doc.concat(list{\n        attrs,\n        Doc.text(\"(\"),\n        Doc.indent(\n          Doc.concat(list{\n            Doc.softLine,\n            if isUncurried {\n              Doc.concat(list{Doc.dot, Doc.space})\n            } else {\n              Doc.nil\n            },\n            Doc.join(\n              ~sep=Doc.concat(list{Doc.comma, Doc.line}),\n              List.map(tp => printTypeParameter(tp, cmtTbl), args),\n            ),\n          }),\n        ),\n        Doc.trailingComma,\n        Doc.softLine,\n        Doc.text(\")\"),\n      })\n      Doc.group(Doc.concat(list{renderedArgs, Doc.text(\" => \"), returnDoc}))\n    }\n  | Ptyp_tuple(types) => printTupleType(~inline=false, types, cmtTbl)\n  | Ptyp_poly(list{}, typ) => printTypExpr(typ, cmtTbl)\n  | Ptyp_poly(stringLocs, typ) =>\n    Doc.concat(list{Doc.join(~sep=Doc.space, List.map(({Location.txt: txt, loc}) => {\n          let doc = Doc.concat(list{Doc.text(\"'\"), Doc.text(txt)})\n          printComments(doc, cmtTbl, loc)\n        }, stringLocs)), Doc.dot, Doc.space, printTypExpr(typ, cmtTbl)})\n  | Ptyp_package(packageType) =>\n    printPackageType(~printModuleKeywordAndParens=true, packageType, cmtTbl)\n  | Ptyp_class(_) => Doc.text(\"classes are not supported in types\")\n  | Ptyp_variant(rowFields, closedFlag, labelsOpt) =>\n    let forceBreak =\n      typExpr.ptyp_loc.Location.loc_start.pos_lnum < typExpr.ptyp_loc.loc_end.pos_lnum\n    let printRowField = x =>\n      switch x {\n      | Parsetree.Rtag({txt}, attrs, true, list{}) =>\n        Doc.group(\n          Doc.concat(list{\n            printAttributes(attrs, cmtTbl),\n            Doc.concat(list{Doc.text(\"#\"), printPolyVarIdent(txt)}),\n          }),\n        )\n      | Rtag({txt}, attrs, truth, types) =>\n        let doType = t =>\n          switch t.Parsetree.ptyp_desc {\n          | Ptyp_tuple(_) => printTypExpr(t, cmtTbl)\n          | _ => Doc.concat(list{Doc.lparen, printTypExpr(t, cmtTbl), Doc.rparen})\n          }\n\n        let printedTypes = List.map(doType, types)\n        let cases = Doc.join(~sep=Doc.concat(list{Doc.line, Doc.text(\"& \")}), printedTypes)\n        let cases = if truth {\n          Doc.concat(list{Doc.line, Doc.text(\"& \"), cases})\n        } else {\n          cases\n        }\n        Doc.group(\n          Doc.concat(list{\n            printAttributes(attrs, cmtTbl),\n            Doc.concat(list{Doc.text(\"#\"), printPolyVarIdent(txt)}),\n            cases,\n          }),\n        )\n      | Rinherit(coreType) => printTypExpr(coreType, cmtTbl)\n      }\n\n    let docs = List.map(printRowField, rowFields)\n    let cases = Doc.join(~sep=Doc.concat(list{Doc.line, Doc.text(\"| \")}), docs)\n    let cases = if docs == list{} {\n      cases\n    } else {\n      Doc.concat(list{Doc.ifBreaks(Doc.text(\"| \"), Doc.nil), cases})\n    }\n\n    let openingSymbol = if closedFlag == Open {\n      Doc.concat(list{Doc.greaterThan, Doc.line})\n    } else if labelsOpt == None {\n      Doc.softLine\n    } else {\n      Doc.concat(list{Doc.lessThan, Doc.line})\n    }\n    let labels = switch labelsOpt {\n    | None\n    | Some(list{}) => Doc.nil\n    | Some(labels) =>\n      Doc.concat(\n        List.map(\n          label => Doc.concat(list{Doc.line, Doc.text(\"#\"), printPolyVarIdent(label)}),\n          labels,\n        ),\n      )\n    }\n\n    let closingSymbol = switch labelsOpt {\n    | None | Some(list{}) => Doc.nil\n    | _ => Doc.text(\" >\")\n    }\n\n    Doc.breakableGroup(\n      ~forceBreak,\n      Doc.concat(list{\n        Doc.lbracket,\n        Doc.indent(Doc.concat(list{openingSymbol, cases, closingSymbol, labels})),\n        Doc.softLine,\n        Doc.rbracket,\n      }),\n    )\n  }\n\n  let shouldPrintItsOwnAttributes = switch typExpr.ptyp_desc {\n  | Ptyp_arrow(_) /* es6 arrow types print their own attributes */ => true\n  | _ => false\n  }\n\n  let doc = switch typExpr.ptyp_attributes {\n  | list{_, ..._} as attrs if !shouldPrintItsOwnAttributes =>\n    Doc.group(Doc.concat(list{printAttributes(attrs, cmtTbl), renderedType}))\n  | _ => renderedType\n  }\n\n  printComments(doc, cmtTbl, typExpr.ptyp_loc)\n}\n\nand printObject = (~inline, fields, openFlag, cmtTbl) => {\n  let doc = switch fields {\n  | list{} =>\n    Doc.concat(list{\n      Doc.lbrace,\n      switch openFlag {\n      | Asttypes.Closed => Doc.dot\n      | Open => Doc.dotdot\n      },\n      Doc.rbrace,\n    })\n  | fields =>\n    Doc.concat(list{\n      Doc.lbrace,\n      switch openFlag {\n      | Asttypes.Closed => Doc.nil\n      | Open =>\n        switch fields {\n        /* handle `type t = {.. ...objType, \"x\": int}`\n         * .. and ... should have a space in between */\n        | list{Oinherit(_), ..._} => Doc.text(\".. \")\n        | _ => Doc.dotdot\n        }\n      },\n      Doc.indent(\n        Doc.concat(list{\n          Doc.softLine,\n          Doc.join(\n            ~sep=Doc.concat(list{Doc.comma, Doc.line}),\n            List.map(field => printObjectField(field, cmtTbl), fields),\n          ),\n        }),\n      ),\n      Doc.trailingComma,\n      Doc.softLine,\n      Doc.rbrace,\n    })\n  }\n\n  if inline {\n    doc\n  } else {\n    Doc.group(doc)\n  }\n}\n\nand printTupleType = (~inline, types: list<Parsetree.core_type>, cmtTbl) => {\n  let tuple = Doc.concat(list{\n    Doc.lparen,\n    Doc.indent(\n      Doc.concat(list{\n        Doc.softLine,\n        Doc.join(\n          ~sep=Doc.concat(list{Doc.comma, Doc.line}),\n          List.map(typexpr => printTypExpr(typexpr, cmtTbl), types),\n        ),\n      }),\n    ),\n    Doc.trailingComma,\n    Doc.softLine,\n    Doc.rparen,\n  })\n\n  if inline === false {\n    Doc.group(tuple)\n  } else {\n    tuple\n  }\n}\n\nand printObjectField = (field: Parsetree.object_field, cmtTbl) =>\n  switch field {\n  | Otag(labelLoc, attrs, typ) =>\n    let lbl = {\n      let doc = Doc.text(\"\\\"\" ++ (labelLoc.txt ++ \"\\\"\"))\n      printComments(doc, cmtTbl, labelLoc.loc)\n    }\n\n    let doc = Doc.concat(list{\n      printAttributes(~loc=labelLoc.loc, attrs, cmtTbl),\n      lbl,\n      Doc.text(\": \"),\n      printTypExpr(typ, cmtTbl),\n    })\n    let cmtLoc = {...labelLoc.loc, loc_end: typ.ptyp_loc.loc_end}\n    printComments(doc, cmtTbl, cmtLoc)\n  | Oinherit(typexpr) => Doc.concat(list{Doc.dotdotdot, printTypExpr(typexpr, cmtTbl)})\n  }\n\n/* es6 arrow type arg\n * type t = (~foo: string, ~bar: float=?, unit) => unit\n * i.e. ~foo: string, ~bar: float */\nand printTypeParameter = ((attrs, lbl, typ), cmtTbl) => {\n  let (isUncurried, attrs) = ParsetreeViewer.processUncurriedAttribute(attrs)\n  let uncurried = if isUncurried {\n    Doc.concat(list{Doc.dot, Doc.space})\n  } else {\n    Doc.nil\n  }\n  let attrs = printAttributes(attrs, cmtTbl)\n  let label = switch lbl {\n  | Asttypes.Nolabel => Doc.nil\n  | Labelled(lbl) => Doc.concat(list{Doc.text(\"~\"), printIdentLike(lbl), Doc.text(\": \")})\n  | Optional(lbl) => Doc.concat(list{Doc.text(\"~\"), printIdentLike(lbl), Doc.text(\": \")})\n  }\n\n  let optionalIndicator = switch lbl {\n  | Asttypes.Nolabel\n  | Labelled(_) => Doc.nil\n  | Optional(_lbl) => Doc.text(\"=?\")\n  }\n\n  let (loc, typ) = switch typ.ptyp_attributes {\n  | list{({Location.txt: \"ns.namedArgLoc\", loc}, _), ...attrs} => (\n      {...loc, loc_end: typ.ptyp_loc.loc_end},\n      {...typ, ptyp_attributes: attrs},\n    )\n  | _ => (typ.ptyp_loc, typ)\n  }\n\n  let doc = Doc.group(\n    Doc.concat(list{uncurried, attrs, label, printTypExpr(typ, cmtTbl), optionalIndicator}),\n  )\n  printComments(doc, cmtTbl, loc)\n}\n\nand printValueBinding = (~recFlag, vb, cmtTbl, i) => {\n  let attrs = printAttributes(~loc=vb.pvb_pat.ppat_loc, vb.pvb_attributes, cmtTbl)\n  let header = if i === 0 {\n    Doc.concat(list{Doc.text(\"let \"), recFlag})\n  } else {\n    Doc.text(\"and \")\n  }\n\n  switch vb {\n  | {\n      pvb_pat: {ppat_desc: Ppat_constraint(pattern, {ptyp_desc: Ptyp_poly(_)} as patTyp)},\n      pvb_expr: {pexp_desc: Pexp_newtype(_)} as expr,\n    } =>\n    let (_attrs, parameters, returnExpr) = ParsetreeViewer.funExpr(expr)\n    let abstractType = switch parameters {\n    | list{NewTypes({locs: vars})} =>\n      Doc.concat(list{\n        Doc.text(\"type \"),\n        Doc.join(~sep=Doc.space, List.map(var => Doc.text(var.Asttypes.txt), vars)),\n        Doc.dot,\n      })\n    | _ => Doc.nil\n    }\n\n    switch returnExpr.pexp_desc {\n    | Pexp_constraint(expr, typ) =>\n      Doc.group(\n        Doc.concat(list{\n          attrs,\n          header,\n          printPattern(pattern, cmtTbl),\n          Doc.text(\":\"),\n          Doc.indent(\n            Doc.concat(list{\n              Doc.line,\n              abstractType,\n              Doc.space,\n              printTypExpr(typ, cmtTbl),\n              Doc.text(\" =\"),\n              Doc.concat(list{Doc.line, printExpressionWithComments(expr, cmtTbl)}),\n            }),\n          ),\n        }),\n      )\n    | _ =>\n      /* Example:\n       * let cancel_and_collect_callbacks:\n       *   'a 'u 'c. (list<packed_callbacks>, promise<'a, 'u, 'c>) => list<packed_callbacks> =         *  (type x, callbacks_accumulator, p: promise<_, _, c>)\n       */\n      Doc.group(\n        Doc.concat(list{\n          attrs,\n          header,\n          printPattern(pattern, cmtTbl),\n          Doc.text(\":\"),\n          Doc.indent(\n            Doc.concat(list{\n              Doc.line,\n              abstractType,\n              Doc.space,\n              printTypExpr(patTyp, cmtTbl),\n              Doc.text(\" =\"),\n              Doc.concat(list{Doc.line, printExpressionWithComments(expr, cmtTbl)}),\n            }),\n          ),\n        }),\n      )\n    }\n  | _ =>\n    let (optBraces, expr) = ParsetreeViewer.processBracesAttr(vb.pvb_expr)\n    let printedExpr = {\n      let doc = printExpressionWithComments(vb.pvb_expr, cmtTbl)\n      switch Parens.expr(vb.pvb_expr) {\n      | Parens.Parenthesized => addParens(doc)\n      | Braced(braces) => printBraces(doc, expr, braces)\n      | Nothing => doc\n      }\n    }\n\n    let patternDoc = printPattern(vb.pvb_pat, cmtTbl)\n    /*\n     * we want to optimize the layout of one pipe:\n     *   let tbl = data->Js.Array2.reduce((map, curr) => {\n     *     ...\n     *   })\n     * important is that we don't do this for multiple pipes:\n     *   let decoratorTags =\n     *     items\n     *     ->Js.Array2.filter(items => {items.category === Decorators})\n     *     ->Belt.Array.map(...)\n     * Multiple pipes chained together lend themselves more towards the last layout.\n     */\n    if ParsetreeViewer.isSinglePipeExpr(vb.pvb_expr) {\n      Doc.customLayout(list{\n        Doc.group(\n          Doc.concat(list{attrs, header, patternDoc, Doc.text(\" =\"), Doc.space, printedExpr}),\n        ),\n        Doc.group(\n          Doc.concat(list{\n            attrs,\n            header,\n            patternDoc,\n            Doc.text(\" =\"),\n            Doc.indent(Doc.concat(list{Doc.line, printedExpr})),\n          }),\n        ),\n      })\n    } else {\n      let shouldIndent = switch optBraces {\n      | Some(_) => false\n      | _ =>\n        ParsetreeViewer.isBinaryExpression(expr) ||\n        switch vb.pvb_expr {\n        | {\n            pexp_attributes: list{({Location.txt: \"ns.ternary\"}, _)},\n            pexp_desc: Pexp_ifthenelse(ifExpr, _, _),\n          } =>\n          ParsetreeViewer.isBinaryExpression(ifExpr) ||\n          ParsetreeViewer.hasAttributes(ifExpr.pexp_attributes)\n        | {pexp_desc: Pexp_newtype(_)} => false\n        | e => ParsetreeViewer.hasAttributes(e.pexp_attributes) || ParsetreeViewer.isArrayAccess(e)\n        }\n      }\n\n      Doc.group(\n        Doc.concat(list{\n          attrs,\n          header,\n          patternDoc,\n          Doc.text(\" =\"),\n          if shouldIndent {\n            Doc.indent(Doc.concat(list{Doc.line, printedExpr}))\n          } else {\n            Doc.concat(list{Doc.space, printedExpr})\n          },\n        }),\n      )\n    }\n  }\n}\n\nand printPackageType = (\n  ~printModuleKeywordAndParens,\n  packageType: Parsetree.package_type,\n  cmtTbl,\n) => {\n  let doc = switch packageType {\n  | (longidentLoc, list{}) =>\n    Doc.group(Doc.concat(list{printLongidentLocation(longidentLoc, cmtTbl)}))\n  | (longidentLoc, packageConstraints) =>\n    Doc.group(\n      Doc.concat(list{\n        printLongidentLocation(longidentLoc, cmtTbl),\n        printPackageConstraints(packageConstraints, cmtTbl),\n        Doc.softLine,\n      }),\n    )\n  }\n\n  if printModuleKeywordAndParens {\n    Doc.concat(list{Doc.text(\"module(\"), doc, Doc.rparen})\n  } else {\n    doc\n  }\n}\n\nand printPackageConstraints = (packageConstraints, cmtTbl) =>\n  Doc.concat(list{\n    Doc.text(\" with\"),\n    Doc.indent(Doc.concat(list{Doc.line, Doc.join(~sep=Doc.line, List.mapi((i, pc) => {\n            let (longident, typexpr) = pc\n            let cmtLoc = {\n              ...longident.Asttypes.loc,\n              loc_end: typexpr.Parsetree.ptyp_loc.loc_end,\n            }\n            let doc = printPackageConstraint(i, cmtTbl, pc)\n            printComments(doc, cmtTbl, cmtLoc)\n          }, packageConstraints))})),\n  })\n\nand printPackageConstraint = (i, cmtTbl, (longidentLoc, typ)) => {\n  let prefix = if i === 0 {\n    Doc.text(\"type \")\n  } else {\n    Doc.text(\"and type \")\n  }\n  Doc.concat(list{\n    prefix,\n    printLongidentLocation(longidentLoc, cmtTbl),\n    Doc.text(\" = \"),\n    printTypExpr(typ, cmtTbl),\n  })\n}\n\nand printExtension = (~atModuleLvl, (stringLoc, payload), cmtTbl) => {\n  let txt = convertBsExtension(stringLoc.Location.txt)\n  let extName = {\n    let doc = Doc.concat(list{\n      Doc.text(\"%\"),\n      if atModuleLvl {\n        Doc.text(\"%\")\n      } else {\n        Doc.nil\n      },\n      Doc.text(txt),\n    })\n    printComments(doc, cmtTbl, stringLoc.Location.loc)\n  }\n\n  Doc.group(Doc.concat(list{extName, printPayload(payload, cmtTbl)}))\n}\n\nand printPattern = (p: Parsetree.pattern, cmtTbl) => {\n  let patternWithoutAttributes = switch p.ppat_desc {\n  | Ppat_any => Doc.text(\"_\")\n  | Ppat_var(var) => printIdentLike(var.txt)\n  | Ppat_constant(c) =>\n    let templateLiteral = ParsetreeViewer.hasTemplateLiteralAttr(p.ppat_attributes)\n    printConstant(~templateLiteral, c)\n  | Ppat_tuple(patterns) =>\n    Doc.group(\n      Doc.concat(list{\n        Doc.lparen,\n        Doc.indent(\n          Doc.concat(list{\n            Doc.softLine,\n            Doc.join(\n              ~sep=Doc.concat(list{Doc.text(\",\"), Doc.line}),\n              List.map(pat => printPattern(pat, cmtTbl), patterns),\n            ),\n          }),\n        ),\n        Doc.trailingComma,\n        Doc.softLine,\n        Doc.rparen,\n      }),\n    )\n  | Ppat_array(list{}) =>\n    Doc.concat(list{Doc.lbracket, printCommentsInside(cmtTbl, p.ppat_loc), Doc.rbracket})\n  | Ppat_array(patterns) =>\n    Doc.group(\n      Doc.concat(list{\n        Doc.text(\"[\"),\n        Doc.indent(\n          Doc.concat(list{\n            Doc.softLine,\n            Doc.join(\n              ~sep=Doc.concat(list{Doc.text(\",\"), Doc.line}),\n              List.map(pat => printPattern(pat, cmtTbl), patterns),\n            ),\n          }),\n        ),\n        Doc.trailingComma,\n        Doc.softLine,\n        Doc.text(\"]\"),\n      }),\n    )\n  | Ppat_construct({txt: Longident.Lident(\"()\")}, _) =>\n    Doc.concat(list{Doc.lparen, printCommentsInside(cmtTbl, p.ppat_loc), Doc.rparen})\n  | Ppat_construct({txt: Longident.Lident(\"[]\")}, _) =>\n    Doc.concat(list{Doc.text(\"list{\"), printCommentsInside(cmtTbl, p.ppat_loc), Doc.rbrace})\n  | Ppat_construct({txt: Longident.Lident(\"::\")}, _) =>\n    let (patterns, tail) = ParsetreeViewer.collectPatternsFromListConstruct(list{}, p)\n    let shouldHug = switch (patterns, tail) {\n    | (list{pat}, {ppat_desc: Ppat_construct({txt: Longident.Lident(\"[]\")}, _)})\n      if ParsetreeViewer.isHuggablePattern(pat) => true\n    | _ => false\n    }\n\n    let children = Doc.concat(list{\n      if shouldHug {\n        Doc.nil\n      } else {\n        Doc.softLine\n      },\n      Doc.join(\n        ~sep=Doc.concat(list{Doc.text(\",\"), Doc.line}),\n        List.map(pat => printPattern(pat, cmtTbl), patterns),\n      ),\n      switch tail.Parsetree.ppat_desc {\n      | Ppat_construct({txt: Longident.Lident(\"[]\")}, _) => Doc.nil\n      | _ =>\n        let doc = Doc.concat(list{Doc.text(\"...\"), printPattern(tail, cmtTbl)})\n        let tail = printComments(doc, cmtTbl, tail.ppat_loc)\n        Doc.concat(list{Doc.text(\",\"), Doc.line, tail})\n      },\n    })\n    Doc.group(\n      Doc.concat(list{\n        Doc.text(\"list{\"),\n        if shouldHug {\n          children\n        } else {\n          Doc.concat(list{Doc.indent(children), Doc.ifBreaks(Doc.text(\",\"), Doc.nil), Doc.softLine})\n        },\n        Doc.rbrace,\n      }),\n    )\n  | Ppat_construct(constrName, constructorArgs) =>\n    let constrName = printLongidentLocation(constrName, cmtTbl)\n    let argsDoc = switch constructorArgs {\n    | None => Doc.nil\n    | Some({ppat_loc, ppat_desc: Ppat_construct({txt: Longident.Lident(\"()\")}, _)}) =>\n      Doc.concat(list{Doc.lparen, printCommentsInside(cmtTbl, ppat_loc), Doc.rparen})\n    | Some({ppat_desc: Ppat_tuple(list{}), ppat_loc: loc}) =>\n      Doc.concat(list{Doc.lparen, Doc.softLine, printCommentsInside(cmtTbl, loc), Doc.rparen})\n    /* Some((1, 2) */\n    | Some({ppat_desc: Ppat_tuple(list{{ppat_desc: Ppat_tuple(_)} as arg})}) =>\n      Doc.concat(list{Doc.lparen, printPattern(arg, cmtTbl), Doc.rparen})\n    | Some({ppat_desc: Ppat_tuple(patterns)}) =>\n      Doc.concat(list{\n        Doc.lparen,\n        Doc.indent(\n          Doc.concat(list{\n            Doc.softLine,\n            Doc.join(\n              ~sep=Doc.concat(list{Doc.comma, Doc.line}),\n              List.map(pat => printPattern(pat, cmtTbl), patterns),\n            ),\n          }),\n        ),\n        Doc.trailingComma,\n        Doc.softLine,\n        Doc.rparen,\n      })\n    | Some(arg) =>\n      let argDoc = printPattern(arg, cmtTbl)\n      let shouldHug = ParsetreeViewer.isHuggablePattern(arg)\n      Doc.concat(list{\n        Doc.lparen,\n        if shouldHug {\n          argDoc\n        } else {\n          Doc.concat(list{\n            Doc.indent(Doc.concat(list{Doc.softLine, argDoc})),\n            Doc.trailingComma,\n            Doc.softLine,\n          })\n        },\n        Doc.rparen,\n      })\n    }\n\n    Doc.group(Doc.concat(list{constrName, argsDoc}))\n  | Ppat_variant(label, None) => Doc.concat(list{Doc.text(\"#\"), printPolyVarIdent(label)})\n  | Ppat_variant(label, variantArgs) =>\n    let variantName = Doc.concat(list{Doc.text(\"#\"), printPolyVarIdent(label)})\n    let argsDoc = switch variantArgs {\n    | None => Doc.nil\n    | Some({ppat_desc: Ppat_construct({txt: Longident.Lident(\"()\")}, _)}) => Doc.text(\"()\")\n    | Some({ppat_desc: Ppat_tuple(list{}), ppat_loc: loc}) =>\n      Doc.concat(list{Doc.lparen, Doc.softLine, printCommentsInside(cmtTbl, loc), Doc.rparen})\n    /* Some((1, 2) */\n    | Some({ppat_desc: Ppat_tuple(list{{ppat_desc: Ppat_tuple(_)} as arg})}) =>\n      Doc.concat(list{Doc.lparen, printPattern(arg, cmtTbl), Doc.rparen})\n    | Some({ppat_desc: Ppat_tuple(patterns)}) =>\n      Doc.concat(list{\n        Doc.lparen,\n        Doc.indent(\n          Doc.concat(list{\n            Doc.softLine,\n            Doc.join(\n              ~sep=Doc.concat(list{Doc.comma, Doc.line}),\n              List.map(pat => printPattern(pat, cmtTbl), patterns),\n            ),\n          }),\n        ),\n        Doc.trailingComma,\n        Doc.softLine,\n        Doc.rparen,\n      })\n    | Some(arg) =>\n      let argDoc = printPattern(arg, cmtTbl)\n      let shouldHug = ParsetreeViewer.isHuggablePattern(arg)\n      Doc.concat(list{\n        Doc.lparen,\n        if shouldHug {\n          argDoc\n        } else {\n          Doc.concat(list{\n            Doc.indent(Doc.concat(list{Doc.softLine, argDoc})),\n            Doc.trailingComma,\n            Doc.softLine,\n          })\n        },\n        Doc.rparen,\n      })\n    }\n\n    Doc.group(Doc.concat(list{variantName, argsDoc}))\n  | Ppat_type(ident) => Doc.concat(list{Doc.text(\"#...\"), printIdentPath(ident, cmtTbl)})\n  | Ppat_record(rows, openFlag) =>\n    Doc.group(\n      Doc.concat(list{\n        Doc.lbrace,\n        Doc.indent(\n          Doc.concat(list{\n            Doc.softLine,\n            Doc.join(\n              ~sep=Doc.concat(list{Doc.text(\",\"), Doc.line}),\n              List.map(row => printPatternRecordRow(row, cmtTbl), rows),\n            ),\n            switch openFlag {\n            | Open => Doc.concat(list{Doc.text(\",\"), Doc.line, Doc.text(\"_\")})\n            | Closed => Doc.nil\n            },\n          }),\n        ),\n        Doc.ifBreaks(Doc.text(\",\"), Doc.nil),\n        Doc.softLine,\n        Doc.rbrace,\n      }),\n    )\n\n  | Ppat_exception(p) =>\n    let needsParens = switch p.ppat_desc {\n    | Ppat_or(_, _) | Ppat_alias(_, _) => true\n    | _ => false\n    }\n\n    let pat = {\n      let p = printPattern(p, cmtTbl)\n      if needsParens {\n        Doc.concat(list{Doc.text(\"(\"), p, Doc.text(\")\")})\n      } else {\n        p\n      }\n    }\n\n    Doc.group(Doc.concat(list{Doc.text(\"exception\"), Doc.line, pat}))\n  | Ppat_or(_) =>\n    /* Blue | Red | Green -> [Blue; Red; Green] */\n    let orChain = ParsetreeViewer.collectOrPatternChain(p)\n    let docs = List.mapi((i, pat) => {\n      let patternDoc = printPattern(pat, cmtTbl)\n      Doc.concat(list{\n        if i === 0 {\n          Doc.nil\n        } else {\n          Doc.concat(list{Doc.line, Doc.text(\"| \")})\n        },\n        switch pat.ppat_desc {\n        /* (Blue | Red) | (Green | Black) | White */\n        | Ppat_or(_) => addParens(patternDoc)\n        | _ => patternDoc\n        },\n      })\n    }, orChain)\n    let isSpreadOverMultipleLines = switch (orChain, List.rev(orChain)) {\n    | (list{first, ..._}, list{last, ..._}) =>\n      first.ppat_loc.loc_start.pos_lnum < last.ppat_loc.loc_end.pos_lnum\n    | _ => false\n    }\n\n    Doc.breakableGroup(~forceBreak=isSpreadOverMultipleLines, Doc.concat(docs))\n  | Ppat_extension(ext) => printExtension(~atModuleLvl=false, ext, cmtTbl)\n  | Ppat_lazy(p) =>\n    let needsParens = switch p.ppat_desc {\n    | Ppat_or(_, _) | Ppat_alias(_, _) => true\n    | _ => false\n    }\n\n    let pat = {\n      let p = printPattern(p, cmtTbl)\n      if needsParens {\n        Doc.concat(list{Doc.text(\"(\"), p, Doc.text(\")\")})\n      } else {\n        p\n      }\n    }\n\n    Doc.concat(list{Doc.text(\"lazy \"), pat})\n  | Ppat_alias(p, aliasLoc) =>\n    let needsParens = switch p.ppat_desc {\n    | Ppat_or(_, _) | Ppat_alias(_, _) => true\n    | _ => false\n    }\n\n    let renderedPattern = {\n      let p = printPattern(p, cmtTbl)\n      if needsParens {\n        Doc.concat(list{Doc.text(\"(\"), p, Doc.text(\")\")})\n      } else {\n        p\n      }\n    }\n\n    Doc.concat(list{renderedPattern, Doc.text(\" as \"), printStringLoc(aliasLoc, cmtTbl)})\n\n  /* Note: module(P : S) is represented as */\n  /* Ppat_constraint(Ppat_unpack, Ptyp_package) */\n  | Ppat_constraint(\n      {ppat_desc: Ppat_unpack(stringLoc)},\n      {ptyp_desc: Ptyp_package(packageType), ptyp_loc},\n    ) =>\n    Doc.concat(list{\n      Doc.text(\"module(\"),\n      printComments(Doc.text(stringLoc.txt), cmtTbl, stringLoc.loc),\n      Doc.text(\": \"),\n      printComments(\n        printPackageType(~printModuleKeywordAndParens=false, packageType, cmtTbl),\n        cmtTbl,\n        ptyp_loc,\n      ),\n      Doc.rparen,\n    })\n  | Ppat_constraint(pattern, typ) =>\n    Doc.concat(list{printPattern(pattern, cmtTbl), Doc.text(\": \"), printTypExpr(typ, cmtTbl)})\n\n  /* Note: module(P : S) is represented as */\n  /* Ppat_constraint(Ppat_unpack, Ptyp_package) */\n  | Ppat_unpack(stringLoc) =>\n    Doc.concat(list{\n      Doc.text(\"module(\"),\n      printComments(Doc.text(stringLoc.txt), cmtTbl, stringLoc.loc),\n      Doc.rparen,\n    })\n  | Ppat_interval(a, b) => Doc.concat(list{printConstant(a), Doc.text(\" .. \"), printConstant(b)})\n  | Ppat_open(_) => Doc.nil\n  }\n\n  let doc = switch p.ppat_attributes {\n  | list{} => patternWithoutAttributes\n  | attrs => Doc.group(Doc.concat(list{printAttributes(attrs, cmtTbl), patternWithoutAttributes}))\n  }\n\n  printComments(doc, cmtTbl, p.ppat_loc)\n}\n\nand printPatternRecordRow = (row, cmtTbl) =>\n  switch row {\n  /* punned {x} */\n  | (\n      {Location.txt: Longident.Lident(ident)} as longident,\n      {Parsetree.ppat_desc: Ppat_var({txt, _})},\n    ) if ident == txt =>\n    printLidentPath(longident, cmtTbl)\n  | (longident, pattern) =>\n    let locForComments = {\n      ...longident.loc,\n      loc_end: pattern.Parsetree.ppat_loc.loc_end,\n    }\n    let rhsDoc = {\n      let doc = printPattern(pattern, cmtTbl)\n      if Parens.patternRecordRowRhs(pattern) {\n        addParens(doc)\n      } else {\n        doc\n      }\n    }\n\n    let doc = Doc.group(\n      Doc.concat(list{\n        printLidentPath(longident, cmtTbl),\n        Doc.text(\":\"),\n        if ParsetreeViewer.isHuggablePattern(pattern) {\n          Doc.concat(list{Doc.space, rhsDoc})\n        } else {\n          Doc.indent(Doc.concat(list{Doc.line, rhsDoc}))\n        },\n      }),\n    )\n    printComments(doc, cmtTbl, locForComments)\n  }\n\nand printExpressionWithComments = (expr, cmtTbl) => {\n  let doc = printExpression(expr, cmtTbl)\n  printComments(doc, cmtTbl, expr.Parsetree.pexp_loc)\n}\n\nand printIfChain = (pexp_attributes, ifs, elseExpr, cmtTbl) => {\n  let ifDocs = Doc.join(~sep=Doc.space, List.mapi((i, (ifExpr, thenExpr)) => {\n      let ifTxt = if i > 0 {\n        Doc.text(\"else if \")\n      } else {\n        Doc.text(\"if \")\n      }\n      switch ifExpr {\n      | ParsetreeViewer.If(ifExpr) =>\n        let condition = if ParsetreeViewer.isBlockExpr(ifExpr) {\n          printExpressionBlock(~braces=true, ifExpr, cmtTbl)\n        } else {\n          let doc = printExpressionWithComments(ifExpr, cmtTbl)\n          switch Parens.expr(ifExpr) {\n          | Parens.Parenthesized => addParens(doc)\n          | Braced(braces) => printBraces(doc, ifExpr, braces)\n          | Nothing => Doc.ifBreaks(addParens(doc), doc)\n          }\n        }\n\n        Doc.concat(list{\n          ifTxt,\n          Doc.group(condition),\n          Doc.space,\n          {\n            let thenExpr = switch ParsetreeViewer.processBracesAttr(thenExpr) {\n            /* This case only happens when coming from Reason, we strip braces */\n            | (Some(_), expr) => expr\n            | _ => thenExpr\n            }\n\n            printExpressionBlock(~braces=true, thenExpr, cmtTbl)\n          },\n        })\n      | IfLet(pattern, conditionExpr) =>\n        let conditionDoc = {\n          let doc = printExpressionWithComments(conditionExpr, cmtTbl)\n          switch Parens.expr(conditionExpr) {\n          | Parens.Parenthesized => addParens(doc)\n          | Braced(braces) => printBraces(doc, conditionExpr, braces)\n          | Nothing => doc\n          }\n        }\n\n        Doc.concat(list{\n          ifTxt,\n          Doc.text(\"let \"),\n          printPattern(pattern, cmtTbl),\n          Doc.text(\" = \"),\n          conditionDoc,\n          Doc.space,\n          printExpressionBlock(~braces=true, thenExpr, cmtTbl),\n        })\n      }\n    }, ifs))\n  let elseDoc = switch elseExpr {\n  | None => Doc.nil\n  | Some(expr) =>\n    Doc.concat(list{Doc.text(\" else \"), printExpressionBlock(~braces=true, expr, cmtTbl)})\n  }\n\n  let attrs = ParsetreeViewer.filterFragileMatchAttributes(pexp_attributes)\n  Doc.concat(list{printAttributes(attrs, cmtTbl), ifDocs, elseDoc})\n}\n\nand printExpression = (e: Parsetree.expression, cmtTbl) => {\n  let printedExpression = switch e.pexp_desc {\n  | Parsetree.Pexp_constant(c) =>\n    printConstant(~templateLiteral=ParsetreeViewer.isTemplateLiteral(e), c)\n  | Pexp_construct(_) if ParsetreeViewer.hasJsxAttribute(e.pexp_attributes) =>\n    printJsxFragment(e, cmtTbl)\n  | Pexp_construct({txt: Longident.Lident(\"()\")}, _) => Doc.text(\"()\")\n  | Pexp_construct({txt: Longident.Lident(\"[]\")}, _) =>\n    Doc.concat(list{Doc.text(\"list{\"), printCommentsInside(cmtTbl, e.pexp_loc), Doc.rbrace})\n  | Pexp_construct({txt: Longident.Lident(\"::\")}, _) =>\n    let (expressions, spread) = ParsetreeViewer.collectListExpressions(e)\n    let spreadDoc = switch spread {\n    | Some(expr) =>\n      Doc.concat(list{\n        Doc.text(\",\"),\n        Doc.line,\n        Doc.dotdotdot,\n        {\n          let doc = printExpressionWithComments(expr, cmtTbl)\n          switch Parens.expr(expr) {\n          | Parens.Parenthesized => addParens(doc)\n          | Braced(braces) => printBraces(doc, expr, braces)\n          | Nothing => doc\n          }\n        },\n      })\n    | None => Doc.nil\n    }\n\n    Doc.group(\n      Doc.concat(list{\n        Doc.text(\"list{\"),\n        Doc.indent(\n          Doc.concat(list{\n            Doc.softLine,\n            Doc.join(~sep=Doc.concat(list{Doc.text(\",\"), Doc.line}), List.map(expr => {\n                let doc = printExpressionWithComments(expr, cmtTbl)\n                switch Parens.expr(expr) {\n                | Parens.Parenthesized => addParens(doc)\n                | Braced(braces) => printBraces(doc, expr, braces)\n                | Nothing => doc\n                }\n              }, expressions)),\n            spreadDoc,\n          }),\n        ),\n        Doc.trailingComma,\n        Doc.softLine,\n        Doc.rbrace,\n      }),\n    )\n  | Pexp_construct(longidentLoc, args) =>\n    let constr = printLongidentLocation(longidentLoc, cmtTbl)\n    let args = switch args {\n    | None => Doc.nil\n    | Some({pexp_desc: Pexp_construct({txt: Longident.Lident(\"()\")}, _)}) => Doc.text(\"()\")\n    /* Some((1, 2)) */\n    | Some({pexp_desc: Pexp_tuple(list{{pexp_desc: Pexp_tuple(_)} as arg})}) =>\n      Doc.concat(list{\n        Doc.lparen,\n        {\n          let doc = printExpressionWithComments(arg, cmtTbl)\n          switch Parens.expr(arg) {\n          | Parens.Parenthesized => addParens(doc)\n          | Braced(braces) => printBraces(doc, arg, braces)\n          | Nothing => doc\n          }\n        },\n        Doc.rparen,\n      })\n    | Some({pexp_desc: Pexp_tuple(args)}) =>\n      Doc.concat(list{\n        Doc.lparen,\n        Doc.indent(\n          Doc.concat(list{\n            Doc.softLine,\n            Doc.join(~sep=Doc.concat(list{Doc.comma, Doc.line}), List.map(expr => {\n                let doc = printExpressionWithComments(expr, cmtTbl)\n                switch Parens.expr(expr) {\n                | Parens.Parenthesized => addParens(doc)\n                | Braced(braces) => printBraces(doc, expr, braces)\n                | Nothing => doc\n                }\n              }, args)),\n          }),\n        ),\n        Doc.trailingComma,\n        Doc.softLine,\n        Doc.rparen,\n      })\n    | Some(arg) =>\n      let argDoc = {\n        let doc = printExpressionWithComments(arg, cmtTbl)\n        switch Parens.expr(arg) {\n        | Parens.Parenthesized => addParens(doc)\n        | Braced(braces) => printBraces(doc, arg, braces)\n        | Nothing => doc\n        }\n      }\n\n      let shouldHug = ParsetreeViewer.isHuggableExpression(arg)\n      Doc.concat(list{\n        Doc.lparen,\n        if shouldHug {\n          argDoc\n        } else {\n          Doc.concat(list{\n            Doc.indent(Doc.concat(list{Doc.softLine, argDoc})),\n            Doc.trailingComma,\n            Doc.softLine,\n          })\n        },\n        Doc.rparen,\n      })\n    }\n\n    Doc.group(Doc.concat(list{constr, args}))\n  | Pexp_ident(path) => printLidentPath(path, cmtTbl)\n  | Pexp_tuple(exprs) =>\n    Doc.group(\n      Doc.concat(list{\n        Doc.lparen,\n        Doc.indent(\n          Doc.concat(list{\n            Doc.softLine,\n            Doc.join(~sep=Doc.concat(list{Doc.text(\",\"), Doc.line}), List.map(expr => {\n                let doc = printExpressionWithComments(expr, cmtTbl)\n                switch Parens.expr(expr) {\n                | Parens.Parenthesized => addParens(doc)\n                | Braced(braces) => printBraces(doc, expr, braces)\n                | Nothing => doc\n                }\n              }, exprs)),\n          }),\n        ),\n        Doc.ifBreaks(Doc.text(\",\"), Doc.nil),\n        Doc.softLine,\n        Doc.rparen,\n      }),\n    )\n  | Pexp_array(list{}) =>\n    Doc.concat(list{Doc.lbracket, printCommentsInside(cmtTbl, e.pexp_loc), Doc.rbracket})\n  | Pexp_array(exprs) =>\n    Doc.group(\n      Doc.concat(list{\n        Doc.lbracket,\n        Doc.indent(\n          Doc.concat(list{\n            Doc.softLine,\n            Doc.join(~sep=Doc.concat(list{Doc.text(\",\"), Doc.line}), List.map(expr => {\n                let doc = printExpressionWithComments(expr, cmtTbl)\n                switch Parens.expr(expr) {\n                | Parens.Parenthesized => addParens(doc)\n                | Braced(braces) => printBraces(doc, expr, braces)\n                | Nothing => doc\n                }\n              }, exprs)),\n          }),\n        ),\n        Doc.trailingComma,\n        Doc.softLine,\n        Doc.rbracket,\n      }),\n    )\n  | Pexp_variant(label, args) =>\n    let variantName = Doc.concat(list{Doc.text(\"#\"), printPolyVarIdent(label)})\n    let args = switch args {\n    | None => Doc.nil\n    | Some({pexp_desc: Pexp_construct({txt: Longident.Lident(\"()\")}, _)}) => Doc.text(\"()\")\n    /* #poly((1, 2) */\n    | Some({pexp_desc: Pexp_tuple(list{{pexp_desc: Pexp_tuple(_)} as arg})}) =>\n      Doc.concat(list{\n        Doc.lparen,\n        {\n          let doc = printExpressionWithComments(arg, cmtTbl)\n          switch Parens.expr(arg) {\n          | Parens.Parenthesized => addParens(doc)\n          | Braced(braces) => printBraces(doc, arg, braces)\n          | Nothing => doc\n          }\n        },\n        Doc.rparen,\n      })\n    | Some({pexp_desc: Pexp_tuple(args)}) =>\n      Doc.concat(list{\n        Doc.lparen,\n        Doc.indent(\n          Doc.concat(list{\n            Doc.softLine,\n            Doc.join(~sep=Doc.concat(list{Doc.comma, Doc.line}), List.map(expr => {\n                let doc = printExpressionWithComments(expr, cmtTbl)\n                switch Parens.expr(expr) {\n                | Parens.Parenthesized => addParens(doc)\n                | Braced(braces) => printBraces(doc, expr, braces)\n                | Nothing => doc\n                }\n              }, args)),\n          }),\n        ),\n        Doc.trailingComma,\n        Doc.softLine,\n        Doc.rparen,\n      })\n    | Some(arg) =>\n      let argDoc = {\n        let doc = printExpressionWithComments(arg, cmtTbl)\n        switch Parens.expr(arg) {\n        | Parens.Parenthesized => addParens(doc)\n        | Braced(braces) => printBraces(doc, arg, braces)\n        | Nothing => doc\n        }\n      }\n\n      let shouldHug = ParsetreeViewer.isHuggableExpression(arg)\n      Doc.concat(list{\n        Doc.lparen,\n        if shouldHug {\n          argDoc\n        } else {\n          Doc.concat(list{\n            Doc.indent(Doc.concat(list{Doc.softLine, argDoc})),\n            Doc.trailingComma,\n            Doc.softLine,\n          })\n        },\n        Doc.rparen,\n      })\n    }\n\n    Doc.group(Doc.concat(list{variantName, args}))\n  | Pexp_record(rows, spreadExpr) =>\n    let spread = switch spreadExpr {\n    | None => Doc.nil\n    | Some(expr) =>\n      Doc.concat(list{\n        Doc.dotdotdot,\n        {\n          let doc = printExpressionWithComments(expr, cmtTbl)\n          switch Parens.expr(expr) {\n          | Parens.Parenthesized => addParens(doc)\n          | Braced(braces) => printBraces(doc, expr, braces)\n          | Nothing => doc\n          }\n        },\n        Doc.comma,\n        Doc.line,\n      })\n    }\n\n    /* If the record is written over multiple lines, break automatically\n     * `let x = {a: 1, b: 3}` -> same line, break when line-width exceeded\n     * `let x = {\n     *   a: 1,\n     *   b: 2,\n     *  }` -> record is written on multiple lines, break the group */\n    let forceBreak = e.pexp_loc.loc_start.pos_lnum < e.pexp_loc.loc_end.pos_lnum\n\n    let punningAllowed = switch (spreadExpr, rows) {\n    | (None, list{_}) => false /* disallow punning for single-element records */\n    | _ => true\n    }\n\n    Doc.breakableGroup(\n      ~forceBreak,\n      Doc.concat(list{\n        Doc.lbrace,\n        Doc.indent(\n          Doc.concat(list{\n            Doc.softLine,\n            spread,\n            Doc.join(\n              ~sep=Doc.concat(list{Doc.text(\",\"), Doc.line}),\n              List.map(row => printRecordRow(row, cmtTbl, punningAllowed), rows),\n            ),\n          }),\n        ),\n        Doc.trailingComma,\n        Doc.softLine,\n        Doc.rbrace,\n      }),\n    )\n  | Pexp_extension(extension) =>\n    switch extension {\n    | (\n        {txt: \"bs.obj\" | \"obj\"},\n        PStr(list{{\n          pstr_loc: loc,\n          pstr_desc: Pstr_eval({pexp_desc: Pexp_record(rows, _)}, list{}),\n        }}),\n      ) =>\n      /* If the object is written over multiple lines, break automatically\n       * `let x = {\"a\": 1, \"b\": 3}` -> same line, break when line-width exceeded\n       * `let x = {\n       *   \"a\": 1,\n       *   \"b\": 2,\n       *  }` -> object is written on multiple lines, break the group */\n      let forceBreak = loc.loc_start.pos_lnum < loc.loc_end.pos_lnum\n\n      Doc.breakableGroup(\n        ~forceBreak,\n        Doc.concat(list{\n          Doc.lbrace,\n          Doc.indent(\n            Doc.concat(list{\n              Doc.softLine,\n              Doc.join(\n                ~sep=Doc.concat(list{Doc.text(\",\"), Doc.line}),\n                List.map(row => printBsObjectRow(row, cmtTbl), rows),\n              ),\n            }),\n          ),\n          Doc.trailingComma,\n          Doc.softLine,\n          Doc.rbrace,\n        }),\n      )\n    | extension => printExtension(~atModuleLvl=false, extension, cmtTbl)\n    }\n  | Pexp_apply(_) =>\n    if ParsetreeViewer.isUnaryExpression(e) {\n      printUnaryExpression(e, cmtTbl)\n    } else if ParsetreeViewer.isTemplateLiteral(e) {\n      printTemplateLiteral(e, cmtTbl)\n    } else if ParsetreeViewer.isBinaryExpression(e) {\n      printBinaryExpression(e, cmtTbl)\n    } else {\n      printPexpApply(e, cmtTbl)\n    }\n  | Pexp_unreachable => Doc.dot\n  | Pexp_field(expr, longidentLoc) =>\n    let lhs = {\n      let doc = printExpressionWithComments(expr, cmtTbl)\n      switch Parens.fieldExpr(expr) {\n      | Parens.Parenthesized => addParens(doc)\n      | Braced(braces) => printBraces(doc, expr, braces)\n      | Nothing => doc\n      }\n    }\n\n    Doc.concat(list{lhs, Doc.dot, printLidentPath(longidentLoc, cmtTbl)})\n  | Pexp_setfield(expr1, longidentLoc, expr2) =>\n    printSetFieldExpr(e.pexp_attributes, expr1, longidentLoc, expr2, e.pexp_loc, cmtTbl)\n  | Pexp_ifthenelse(_ifExpr, _thenExpr, _elseExpr) if ParsetreeViewer.isTernaryExpr(e) =>\n    let (parts, alternate) = ParsetreeViewer.collectTernaryParts(e)\n    let ternaryDoc = switch parts {\n    | list{(condition1, consequent1), ...rest} =>\n      Doc.group(\n        Doc.concat(list{\n          printTernaryOperand(condition1, cmtTbl),\n          Doc.indent(\n            Doc.concat(list{\n              Doc.line,\n              Doc.indent(\n                Doc.concat(list{Doc.text(\"? \"), printTernaryOperand(consequent1, cmtTbl)}),\n              ),\n              Doc.concat(\n                List.map(\n                  ((condition, consequent)) =>\n                    Doc.concat(list{\n                      Doc.line,\n                      Doc.text(\": \"),\n                      printTernaryOperand(condition, cmtTbl),\n                      Doc.line,\n                      Doc.text(\"? \"),\n                      printTernaryOperand(consequent, cmtTbl),\n                    }),\n                  rest,\n                ),\n              ),\n              Doc.line,\n              Doc.text(\": \"),\n              Doc.indent(printTernaryOperand(alternate, cmtTbl)),\n            }),\n          ),\n        }),\n      )\n    | _ => Doc.nil\n    }\n\n    let attrs = ParsetreeViewer.filterTernaryAttributes(e.pexp_attributes)\n    let needsParens = switch ParsetreeViewer.filterParsingAttrs(attrs) {\n    | list{} => false\n    | _ => true\n    }\n\n    Doc.concat(list{\n      printAttributes(attrs, cmtTbl),\n      if needsParens {\n        addParens(ternaryDoc)\n      } else {\n        ternaryDoc\n      },\n    })\n  | Pexp_ifthenelse(_ifExpr, _thenExpr, _elseExpr) =>\n    let (ifs, elseExpr) = ParsetreeViewer.collectIfExpressions(e)\n    printIfChain(e.pexp_attributes, ifs, elseExpr, cmtTbl)\n  | Pexp_while(expr1, expr2) =>\n    let condition = {\n      let doc = printExpressionWithComments(expr1, cmtTbl)\n      switch Parens.expr(expr1) {\n      | Parens.Parenthesized => addParens(doc)\n      | Braced(braces) => printBraces(doc, expr1, braces)\n      | Nothing => doc\n      }\n    }\n\n    Doc.breakableGroup(\n      ~forceBreak=true,\n      Doc.concat(list{\n        Doc.text(\"while \"),\n        if ParsetreeViewer.isBlockExpr(expr1) {\n          condition\n        } else {\n          Doc.group(Doc.ifBreaks(addParens(condition), condition))\n        },\n        Doc.space,\n        printExpressionBlock(~braces=true, expr2, cmtTbl),\n      }),\n    )\n  | Pexp_for(pattern, fromExpr, toExpr, directionFlag, body) =>\n    Doc.breakableGroup(\n      ~forceBreak=true,\n      Doc.concat(list{\n        Doc.text(\"for \"),\n        printPattern(pattern, cmtTbl),\n        Doc.text(\" in \"),\n        {\n          let doc = printExpressionWithComments(fromExpr, cmtTbl)\n          switch Parens.expr(fromExpr) {\n          | Parens.Parenthesized => addParens(doc)\n          | Braced(braces) => printBraces(doc, fromExpr, braces)\n          | Nothing => doc\n          }\n        },\n        printDirectionFlag(directionFlag),\n        {\n          let doc = printExpressionWithComments(toExpr, cmtTbl)\n          switch Parens.expr(toExpr) {\n          | Parens.Parenthesized => addParens(doc)\n          | Braced(braces) => printBraces(doc, toExpr, braces)\n          | Nothing => doc\n          }\n        },\n        Doc.space,\n        printExpressionBlock(~braces=true, body, cmtTbl),\n      }),\n    )\n  | Pexp_constraint(\n      {pexp_desc: Pexp_pack(modExpr)},\n      {ptyp_desc: Ptyp_package(packageType), ptyp_loc},\n    ) =>\n    Doc.group(\n      Doc.concat(list{\n        Doc.text(\"module(\"),\n        Doc.indent(\n          Doc.concat(list{\n            Doc.softLine,\n            printModExpr(modExpr, cmtTbl),\n            Doc.text(\": \"),\n            printComments(\n              printPackageType(~printModuleKeywordAndParens=false, packageType, cmtTbl),\n              cmtTbl,\n              ptyp_loc,\n            ),\n          }),\n        ),\n        Doc.softLine,\n        Doc.rparen,\n      }),\n    )\n\n  | Pexp_constraint(expr, typ) =>\n    let exprDoc = {\n      let doc = printExpressionWithComments(expr, cmtTbl)\n      switch Parens.expr(expr) {\n      | Parens.Parenthesized => addParens(doc)\n      | Braced(braces) => printBraces(doc, expr, braces)\n      | Nothing => doc\n      }\n    }\n\n    Doc.concat(list{exprDoc, Doc.text(\": \"), printTypExpr(typ, cmtTbl)})\n  | Pexp_letmodule({txt: _modName}, _modExpr, _expr) =>\n    printExpressionBlock(~braces=true, e, cmtTbl)\n  | Pexp_letexception(_extensionConstructor, _expr) => printExpressionBlock(~braces=true, e, cmtTbl)\n  | Pexp_assert(expr) =>\n    let rhs = {\n      let doc = printExpressionWithComments(expr, cmtTbl)\n      switch Parens.lazyOrAssertExprRhs(expr) {\n      | Parens.Parenthesized => addParens(doc)\n      | Braced(braces) => printBraces(doc, expr, braces)\n      | Nothing => doc\n      }\n    }\n\n    Doc.concat(list{Doc.text(\"assert \"), rhs})\n  | Pexp_lazy(expr) =>\n    let rhs = {\n      let doc = printExpressionWithComments(expr, cmtTbl)\n      switch Parens.lazyOrAssertExprRhs(expr) {\n      | Parens.Parenthesized => addParens(doc)\n      | Braced(braces) => printBraces(doc, expr, braces)\n      | Nothing => doc\n      }\n    }\n\n    Doc.group(Doc.concat(list{Doc.text(\"lazy \"), rhs}))\n  | Pexp_open(_overrideFlag, _longidentLoc, _expr) => printExpressionBlock(~braces=true, e, cmtTbl)\n  | Pexp_pack(modExpr) =>\n    Doc.group(\n      Doc.concat(list{\n        Doc.text(\"module(\"),\n        Doc.indent(Doc.concat(list{Doc.softLine, printModExpr(modExpr, cmtTbl)})),\n        Doc.softLine,\n        Doc.rparen,\n      }),\n    )\n  | Pexp_sequence(_) => printExpressionBlock(~braces=true, e, cmtTbl)\n  | Pexp_let(_) => printExpressionBlock(~braces=true, e, cmtTbl)\n  | Pexp_fun(Nolabel, None, {ppat_desc: Ppat_var({txt: \"__x\"})}, {pexp_desc: Pexp_apply(_)}) =>\n    /* (__x) => f(a, __x, c) -----> f(a, _, c) */\n    printExpressionWithComments(ParsetreeViewer.rewriteUnderscoreApply(e), cmtTbl)\n  | Pexp_fun(_) | Pexp_newtype(_) =>\n    let (attrsOnArrow, parameters, returnExpr) = ParsetreeViewer.funExpr(e)\n    let (uncurried, attrs) = ParsetreeViewer.processUncurriedAttribute(attrsOnArrow)\n\n    let (returnExpr, typConstraint) = switch returnExpr.pexp_desc {\n    | Pexp_constraint(expr, typ) => (\n        {\n          ...expr,\n          pexp_attributes: List.concat(list{expr.pexp_attributes, returnExpr.pexp_attributes}),\n        },\n        Some(typ),\n      )\n    | _ => (returnExpr, None)\n    }\n\n    let hasConstraint = switch typConstraint {\n    | Some(_) => true\n    | None => false\n    }\n    let parametersDoc = printExprFunParameters(\n      ~inCallback=NoCallback,\n      ~uncurried,\n      ~hasConstraint,\n      parameters,\n      cmtTbl,\n    )\n\n    let returnExprDoc = {\n      let (optBraces, _) = ParsetreeViewer.processBracesAttr(returnExpr)\n      let shouldInline = switch (returnExpr.pexp_desc, optBraces) {\n      | (_, Some(_)) => true\n      | (\n          Pexp_array(_)\n          | Pexp_tuple(_)\n          | Pexp_construct(_, Some(_))\n          | Pexp_record(_),\n          _,\n        ) => true\n      | _ => false\n      }\n\n      let shouldIndent = switch returnExpr.pexp_desc {\n      | Pexp_sequence(_)\n      | Pexp_let(_)\n      | Pexp_letmodule(_)\n      | Pexp_letexception(_)\n      | Pexp_open(_) => false\n      | _ => true\n      }\n\n      let returnDoc = {\n        let doc = printExpressionWithComments(returnExpr, cmtTbl)\n        switch Parens.expr(returnExpr) {\n        | Parens.Parenthesized => addParens(doc)\n        | Braced(braces) => printBraces(doc, returnExpr, braces)\n        | Nothing => doc\n        }\n      }\n\n      if shouldInline {\n        Doc.concat(list{Doc.space, returnDoc})\n      } else {\n        Doc.group(\n          if shouldIndent {\n            Doc.indent(Doc.concat(list{Doc.line, returnDoc}))\n          } else {\n            Doc.concat(list{Doc.space, returnDoc})\n          },\n        )\n      }\n    }\n\n    let typConstraintDoc = switch typConstraint {\n    | Some(typ) =>\n      let typDoc = {\n        let doc = printTypExpr(typ, cmtTbl)\n        if Parens.arrowReturnTypExpr(typ) {\n          addParens(doc)\n        } else {\n          doc\n        }\n      }\n\n      Doc.concat(list{Doc.text(\": \"), typDoc})\n    | _ => Doc.nil\n    }\n\n    let attrs = printAttributes(attrs, cmtTbl)\n    Doc.group(\n      Doc.concat(list{attrs, parametersDoc, typConstraintDoc, Doc.text(\" =>\"), returnExprDoc}),\n    )\n  | Pexp_try(expr, cases) =>\n    let exprDoc = {\n      let doc = printExpressionWithComments(expr, cmtTbl)\n      switch Parens.expr(expr) {\n      | Parens.Parenthesized => addParens(doc)\n      | Braced(braces) => printBraces(doc, expr, braces)\n      | Nothing => doc\n      }\n    }\n\n    Doc.concat(list{Doc.text(\"try \"), exprDoc, Doc.text(\" catch \"), printCases(cases, cmtTbl)})\n  | Pexp_match(_, list{_, _}) if ParsetreeViewer.isIfLetExpr(e) =>\n    let (ifs, elseExpr) = ParsetreeViewer.collectIfExpressions(e)\n    printIfChain(e.pexp_attributes, ifs, elseExpr, cmtTbl)\n  | Pexp_match(expr, cases) =>\n    let exprDoc = {\n      let doc = printExpressionWithComments(expr, cmtTbl)\n      switch Parens.expr(expr) {\n      | Parens.Parenthesized => addParens(doc)\n      | Braced(braces) => printBraces(doc, expr, braces)\n      | Nothing => doc\n      }\n    }\n\n    Doc.concat(list{Doc.text(\"switch \"), exprDoc, Doc.space, printCases(cases, cmtTbl)})\n  | Pexp_function(cases) => Doc.concat(list{Doc.text(\"x => switch x \"), printCases(cases, cmtTbl)})\n  | Pexp_coerce(expr, typOpt, typ) =>\n    let docExpr = printExpressionWithComments(expr, cmtTbl)\n    let docTyp = printTypExpr(typ, cmtTbl)\n    let ofType = switch typOpt {\n    | None => Doc.nil\n    | Some(typ1) => Doc.concat(list{Doc.text(\": \"), printTypExpr(typ1, cmtTbl)})\n    }\n\n    Doc.concat(list{Doc.lparen, docExpr, ofType, Doc.text(\" :> \"), docTyp, Doc.rparen})\n  | Pexp_send(parentExpr, label) =>\n    let parentDoc = {\n      let doc = printExpressionWithComments(parentExpr, cmtTbl)\n      switch Parens.unaryExprOperand(parentExpr) {\n      | Parens.Parenthesized => addParens(doc)\n      | Braced(braces) => printBraces(doc, parentExpr, braces)\n      | Nothing => doc\n      }\n    }\n\n    let member = {\n      let memberDoc = printComments(Doc.text(label.txt), cmtTbl, label.loc)\n      Doc.concat(list{Doc.text(\"\\\"\"), memberDoc, Doc.text(\"\\\"\")})\n    }\n\n    Doc.group(Doc.concat(list{parentDoc, Doc.lbracket, member, Doc.rbracket}))\n  | Pexp_new(_) => Doc.text(\"Pexp_new not impemented in printer\")\n  | Pexp_setinstvar(_) => Doc.text(\"Pexp_setinstvar not impemented in printer\")\n  | Pexp_override(_) => Doc.text(\"Pexp_override not impemented in printer\")\n  | Pexp_poly(_) => Doc.text(\"Pexp_poly not impemented in printer\")\n  | Pexp_object(_) => Doc.text(\"Pexp_object not impemented in printer\")\n  }\n\n  let shouldPrintItsOwnAttributes = switch e.pexp_desc {\n  | Pexp_apply(_)\n  | Pexp_fun(_)\n  | Pexp_newtype(_)\n  | Pexp_setfield(_)\n  | Pexp_ifthenelse(_) => true\n  | Pexp_match(_) if ParsetreeViewer.isIfLetExpr(e) => true\n  | Pexp_construct(_) if ParsetreeViewer.hasJsxAttribute(e.pexp_attributes) => true\n  | _ => false\n  }\n\n  switch e.pexp_attributes {\n  | list{} => printedExpression\n  | attrs if !shouldPrintItsOwnAttributes =>\n    Doc.group(Doc.concat(list{printAttributes(attrs, cmtTbl), printedExpression}))\n  | _ => printedExpression\n  }\n}\n\nand printPexpFun = (~inCallback, e, cmtTbl) => {\n  let (attrsOnArrow, parameters, returnExpr) = ParsetreeViewer.funExpr(e)\n  let (uncurried, attrs) = ParsetreeViewer.processUncurriedAttribute(attrsOnArrow)\n\n  let (returnExpr, typConstraint) = switch returnExpr.pexp_desc {\n  | Pexp_constraint(expr, typ) => (\n      {\n        ...expr,\n        pexp_attributes: List.concat(list{expr.pexp_attributes, returnExpr.pexp_attributes}),\n      },\n      Some(typ),\n    )\n  | _ => (returnExpr, None)\n  }\n\n  let parametersDoc = printExprFunParameters(\n    ~inCallback,\n    ~uncurried,\n    ~hasConstraint=switch typConstraint {\n    | Some(_) => true\n    | None => false\n    },\n    parameters,\n    cmtTbl,\n  )\n  let returnShouldIndent = switch returnExpr.pexp_desc {\n  | Pexp_sequence(_)\n  | Pexp_let(_)\n  | Pexp_letmodule(_)\n  | Pexp_letexception(_)\n  | Pexp_open(_) => false\n  | _ => true\n  }\n\n  let returnExprDoc = {\n    let (optBraces, _) = ParsetreeViewer.processBracesAttr(returnExpr)\n    let shouldInline = switch (returnExpr.pexp_desc, optBraces) {\n    | (_, Some(_)) => true\n    | (\n        Pexp_array(_)\n        | Pexp_tuple(_)\n        | Pexp_construct(_, Some(_))\n        | Pexp_record(_),\n        _,\n      ) => true\n    | _ => false\n    }\n\n    let returnDoc = {\n      let doc = printExpressionWithComments(returnExpr, cmtTbl)\n      switch Parens.expr(returnExpr) {\n      | Parens.Parenthesized => addParens(doc)\n      | Braced(braces) => printBraces(doc, returnExpr, braces)\n      | Nothing => doc\n      }\n    }\n\n    if shouldInline {\n      Doc.concat(list{Doc.space, returnDoc})\n    } else {\n      Doc.group(\n        if returnShouldIndent {\n          Doc.concat(list{\n            Doc.indent(Doc.concat(list{Doc.line, returnDoc})),\n            switch inCallback {\n            | FitsOnOneLine | ArgumentsFitOnOneLine => Doc.softLine\n            | _ => Doc.nil\n            },\n          })\n        } else {\n          Doc.concat(list{Doc.space, returnDoc})\n        },\n      )\n    }\n  }\n\n  let typConstraintDoc = switch typConstraint {\n  | Some(typ) => Doc.concat(list{Doc.text(\": \"), printTypExpr(typ, cmtTbl)})\n  | _ => Doc.nil\n  }\n\n  Doc.concat(list{\n    printAttributes(attrs, cmtTbl),\n    parametersDoc,\n    typConstraintDoc,\n    Doc.text(\" =>\"),\n    returnExprDoc,\n  })\n}\n\nand printTernaryOperand = (expr, cmtTbl) => {\n  let doc = printExpressionWithComments(expr, cmtTbl)\n  switch Parens.ternaryOperand(expr) {\n  | Parens.Parenthesized => addParens(doc)\n  | Braced(braces) => printBraces(doc, expr, braces)\n  | Nothing => doc\n  }\n}\n\nand printSetFieldExpr = (attrs, lhs, longidentLoc, rhs, loc, cmtTbl) => {\n  let rhsDoc = {\n    let doc = printExpressionWithComments(rhs, cmtTbl)\n    switch Parens.setFieldExprRhs(rhs) {\n    | Parens.Parenthesized => addParens(doc)\n    | Braced(braces) => printBraces(doc, rhs, braces)\n    | Nothing => doc\n    }\n  }\n\n  let lhsDoc = {\n    let doc = printExpressionWithComments(lhs, cmtTbl)\n    switch Parens.fieldExpr(lhs) {\n    | Parens.Parenthesized => addParens(doc)\n    | Braced(braces) => printBraces(doc, lhs, braces)\n    | Nothing => doc\n    }\n  }\n\n  let shouldIndent = ParsetreeViewer.isBinaryExpression(rhs)\n  let doc = Doc.group(\n    Doc.concat(list{\n      lhsDoc,\n      Doc.dot,\n      printLidentPath(longidentLoc, cmtTbl),\n      Doc.text(\" =\"),\n      if shouldIndent {\n        Doc.group(Doc.indent(Doc.concat(list{Doc.line, rhsDoc})))\n      } else {\n        Doc.concat(list{Doc.space, rhsDoc})\n      },\n    }),\n  )\n  let doc = switch attrs {\n  | list{} => doc\n  | attrs => Doc.group(Doc.concat(list{printAttributes(attrs, cmtTbl), doc}))\n  }\n\n  printComments(doc, cmtTbl, loc)\n}\n\nand printTemplateLiteral = (expr, cmtTbl) => {\n  let tag = ref(\"js\")\n  let rec walkExpr = expr => {\n    open Parsetree\n    switch expr.pexp_desc {\n    | Pexp_apply(\n        {pexp_desc: Pexp_ident({txt: Longident.Lident(\"^\")})},\n        list{(Nolabel, arg1), (Nolabel, arg2)},\n      ) =>\n      let lhs = walkExpr(arg1)\n      let rhs = walkExpr(arg2)\n      Doc.concat(list{lhs, rhs})\n    | Pexp_constant(Pconst_string(txt, Some(prefix))) =>\n      tag := prefix\n      printStringContents(txt)\n    | _ =>\n      let doc = printExpressionWithComments(expr, cmtTbl)\n      Doc.group(Doc.concat(list{Doc.text(\"${\"), Doc.indent(doc), Doc.rbrace}))\n    }\n  }\n\n  let content = walkExpr(expr)\n  Doc.concat(list{\n    if tag.contents == \"js\" {\n      Doc.nil\n    } else {\n      Doc.text(tag.contents)\n    },\n    Doc.text(\"`\"),\n    content,\n    Doc.text(\"`\"),\n  })\n}\n\nand printUnaryExpression = (expr, cmtTbl) => {\n  let printUnaryOperator = op =>\n    Doc.text(\n      switch op {\n      | \"~+\" => \"+\"\n      | \"~+.\" => \"+.\"\n      | \"~-\" => \"-\"\n      | \"~-.\" => \"-.\"\n      | \"not\" => \"!\"\n      | _ => assert false\n      },\n    )\n  switch expr.pexp_desc {\n  | Pexp_apply(\n      {pexp_desc: Pexp_ident({txt: Longident.Lident(operator)})},\n      list{(Nolabel, operand)},\n    ) =>\n    let printedOperand = {\n      let doc = printExpressionWithComments(operand, cmtTbl)\n      switch Parens.unaryExprOperand(operand) {\n      | Parens.Parenthesized => addParens(doc)\n      | Braced(braces) => printBraces(doc, operand, braces)\n      | Nothing => doc\n      }\n    }\n\n    let doc = Doc.concat(list{printUnaryOperator(operator), printedOperand})\n    printComments(doc, cmtTbl, expr.pexp_loc)\n  | _ => assert false\n  }\n}\n\nand printBinaryExpression = (expr: Parsetree.expression, cmtTbl) => {\n  let printBinaryOperator = (~inlineRhs, operator) => {\n    let operatorTxt = switch operator {\n    | \"|.\" => \"->\"\n    | \"^\" => \"++\"\n    | \"=\" => \"==\"\n    | \"==\" => \"===\"\n    | \"<>\" => \"!=\"\n    | \"!=\" => \"!==\"\n    | txt => txt\n    }\n\n    let spacingBeforeOperator = if operator == \"|.\" {\n      Doc.softLine\n    } else if operator == \"|>\" {\n      Doc.line\n    } else {\n      Doc.space\n    }\n\n    let spacingAfterOperator = if operator == \"|.\" {\n      Doc.nil\n    } else if operator == \"|>\" {\n      Doc.space\n    } else if inlineRhs {\n      Doc.space\n    } else {\n      Doc.line\n    }\n\n    Doc.concat(list{spacingBeforeOperator, Doc.text(operatorTxt), spacingAfterOperator})\n  }\n\n  let printOperand = (~isLhs, expr, parentOperator) => {\n    let rec flatten = (~isLhs, expr, parentOperator) =>\n      if ParsetreeViewer.isBinaryExpression(expr) {\n        switch expr {\n        | {\n            pexp_desc: Pexp_apply(\n              {pexp_desc: Pexp_ident({txt: Longident.Lident(operator)})},\n              list{(_, left), (_, right)},\n            ),\n          } =>\n          if (\n            ParsetreeViewer.flattenableOperators(parentOperator, operator) &&\n            !ParsetreeViewer.hasAttributes(expr.pexp_attributes)\n          ) {\n            let leftPrinted = flatten(~isLhs=true, left, operator)\n            let rightPrinted = {\n              let (_, rightAttrs) = ParsetreeViewer.partitionPrintableAttributes(\n                right.pexp_attributes,\n              )\n\n              let doc = printExpressionWithComments({...right, pexp_attributes: rightAttrs}, cmtTbl)\n\n              let doc = if Parens.flattenOperandRhs(parentOperator, right) {\n                Doc.concat(list{Doc.lparen, doc, Doc.rparen})\n              } else {\n                doc\n              }\n\n              let printableAttrs = ParsetreeViewer.filterPrintableAttributes(right.pexp_attributes)\n\n              let doc = Doc.concat(list{printAttributes(printableAttrs, cmtTbl), doc})\n              switch printableAttrs {\n              | list{} => doc\n              | _ => addParens(doc)\n              }\n            }\n\n            let doc = Doc.concat(list{\n              leftPrinted,\n              printBinaryOperator(~inlineRhs=false, operator),\n              rightPrinted,\n            })\n            let doc = if !isLhs && Parens.rhsBinaryExprOperand(operator, expr) {\n              Doc.concat(list{Doc.lparen, doc, Doc.rparen})\n            } else {\n              doc\n            }\n\n            printComments(doc, cmtTbl, expr.pexp_loc)\n          } else {\n            let doc = printExpressionWithComments({...expr, pexp_attributes: list{}}, cmtTbl)\n            let doc = if (\n              Parens.subBinaryExprOperand(parentOperator, operator) ||\n              (expr.pexp_attributes != list{} &&\n                (ParsetreeViewer.isBinaryExpression(expr) || ParsetreeViewer.isTernaryExpr(expr)))\n            ) {\n              Doc.concat(list{Doc.lparen, doc, Doc.rparen})\n            } else {\n              doc\n            }\n            Doc.concat(list{printAttributes(expr.pexp_attributes, cmtTbl), doc})\n          }\n        | _ => assert false\n        }\n      } else {\n        switch expr.pexp_desc {\n        | Pexp_apply(\n            {pexp_desc: Pexp_ident({txt: Longident.Lident(\"^\"), loc})},\n            list{(Nolabel, _), (Nolabel, _)},\n          ) if loc.loc_ghost =>\n          let doc = printTemplateLiteral(expr, cmtTbl)\n          printComments(doc, cmtTbl, expr.Parsetree.pexp_loc)\n        | Pexp_setfield(lhs, field, rhs) =>\n          let doc = printSetFieldExpr(expr.pexp_attributes, lhs, field, rhs, expr.pexp_loc, cmtTbl)\n          if isLhs {\n            addParens(doc)\n          } else {\n            doc\n          }\n        | Pexp_apply(\n            {pexp_desc: Pexp_ident({txt: Longident.Lident(\"#=\")})},\n            list{(Nolabel, lhs), (Nolabel, rhs)},\n          ) =>\n          let rhsDoc = printExpressionWithComments(rhs, cmtTbl)\n          let lhsDoc = printExpressionWithComments(lhs, cmtTbl)\n          /* TODO: unify indentation of \"=\" */\n          let shouldIndent = ParsetreeViewer.isBinaryExpression(rhs)\n          let doc = Doc.group(\n            Doc.concat(list{\n              lhsDoc,\n              Doc.text(\" =\"),\n              if shouldIndent {\n                Doc.group(Doc.indent(Doc.concat(list{Doc.line, rhsDoc})))\n              } else {\n                Doc.concat(list{Doc.space, rhsDoc})\n              },\n            }),\n          )\n          let doc = switch expr.pexp_attributes {\n          | list{} => doc\n          | attrs => Doc.group(Doc.concat(list{printAttributes(attrs, cmtTbl), doc}))\n          }\n\n          if isLhs {\n            addParens(doc)\n          } else {\n            doc\n          }\n        | _ =>\n          let doc = printExpressionWithComments(expr, cmtTbl)\n          switch Parens.binaryExprOperand(~isLhs, expr) {\n          | Parens.Parenthesized => addParens(doc)\n          | Braced(braces) => printBraces(doc, expr, braces)\n          | Nothing => doc\n          }\n        }\n      }\n\n    flatten(~isLhs, expr, parentOperator)\n  }\n\n  switch expr.pexp_desc {\n  | Pexp_apply(\n      {pexp_desc: Pexp_ident({txt: Longident.Lident((\"|.\" | \"|>\") as op)})},\n      list{(Nolabel, lhs), (Nolabel, rhs)},\n    ) if !(ParsetreeViewer.isBinaryExpression(lhs) || ParsetreeViewer.isBinaryExpression(rhs)) =>\n    let lhsHasCommentBelow = hasCommentBelow(cmtTbl, lhs.pexp_loc)\n    let lhsDoc = printOperand(~isLhs=true, lhs, op)\n    let rhsDoc = printOperand(~isLhs=false, rhs, op)\n    Doc.group(\n      Doc.concat(list{\n        lhsDoc,\n        switch (lhsHasCommentBelow, op) {\n        | (true, \"|.\") => Doc.concat(list{Doc.softLine, Doc.text(\"->\")})\n        | (false, \"|.\") => Doc.text(\"->\")\n        | (true, \"|>\") => Doc.concat(list{Doc.line, Doc.text(\"|> \")})\n        | (false, \"|>\") => Doc.text(\" |> \")\n        | _ => Doc.nil\n        },\n        rhsDoc,\n      }),\n    )\n  | Pexp_apply(\n      {pexp_desc: Pexp_ident({txt: Longident.Lident(operator)})},\n      list{(Nolabel, lhs), (Nolabel, rhs)},\n    ) =>\n    let right = {\n      let operatorWithRhs = {\n        let rhsDoc = printOperand(~isLhs=false, rhs, operator)\n        Doc.concat(list{\n          printBinaryOperator(~inlineRhs=ParsetreeViewer.shouldInlineRhsBinaryExpr(rhs), operator),\n          rhsDoc,\n        })\n      }\n      if ParsetreeViewer.shouldIndentBinaryExpr(expr) {\n        Doc.group(Doc.indent(operatorWithRhs))\n      } else {\n        operatorWithRhs\n      }\n    }\n\n    let doc = Doc.group(Doc.concat(list{printOperand(~isLhs=true, lhs, operator), right}))\n    Doc.group(\n      Doc.concat(list{\n        printAttributes(expr.pexp_attributes, cmtTbl),\n        switch Parens.binaryExpr({\n          ...expr,\n          pexp_attributes: List.filter(attr =>\n            switch attr {\n            | ({Location.txt: \"ns.braces\"}, _) => false\n            | _ => true\n            }\n          , expr.pexp_attributes),\n        }) {\n        | Braced(bracesLoc) => printBraces(doc, expr, bracesLoc)\n        | Parenthesized => addParens(doc)\n        | Nothing => doc\n        },\n      }),\n    )\n  | _ => Doc.nil\n  }\n}\n\n/* callExpr(arg1, arg2) */\nand printPexpApply = (expr, cmtTbl) =>\n  switch expr.pexp_desc {\n  | Pexp_apply(\n      {pexp_desc: Pexp_ident({txt: Longident.Lident(\"##\")})},\n      list{(Nolabel, parentExpr), (Nolabel, memberExpr)},\n    ) =>\n    let parentDoc = {\n      let doc = printExpressionWithComments(parentExpr, cmtTbl)\n      switch Parens.unaryExprOperand(parentExpr) {\n      | Parens.Parenthesized => addParens(doc)\n      | Braced(braces) => printBraces(doc, parentExpr, braces)\n      | Nothing => doc\n      }\n    }\n\n    let member = {\n      let memberDoc = switch memberExpr.pexp_desc {\n      | Pexp_ident(lident) => printComments(printLongident(lident.txt), cmtTbl, memberExpr.pexp_loc)\n      | _ => printExpressionWithComments(memberExpr, cmtTbl)\n      }\n\n      Doc.concat(list{Doc.text(\"\\\"\"), memberDoc, Doc.text(\"\\\"\")})\n    }\n\n    Doc.group(\n      Doc.concat(list{\n        printAttributes(expr.pexp_attributes, cmtTbl),\n        parentDoc,\n        Doc.lbracket,\n        member,\n        Doc.rbracket,\n      }),\n    )\n  | Pexp_apply(\n      {pexp_desc: Pexp_ident({txt: Longident.Lident(\"#=\")})},\n      list{(Nolabel, lhs), (Nolabel, rhs)},\n    ) =>\n    let rhsDoc = {\n      let doc = printExpressionWithComments(rhs, cmtTbl)\n      switch Parens.expr(rhs) {\n      | Parens.Parenthesized => addParens(doc)\n      | Braced(braces) => printBraces(doc, rhs, braces)\n      | Nothing => doc\n      }\n    }\n\n    /* TODO: unify indentation of \"=\" */\n    let shouldIndent = !ParsetreeViewer.isBracedExpr(rhs) && ParsetreeViewer.isBinaryExpression(rhs)\n    let doc = Doc.group(\n      Doc.concat(list{\n        printExpressionWithComments(lhs, cmtTbl),\n        Doc.text(\" =\"),\n        if shouldIndent {\n          Doc.group(Doc.indent(Doc.concat(list{Doc.line, rhsDoc})))\n        } else {\n          Doc.concat(list{Doc.space, rhsDoc})\n        },\n      }),\n    )\n    switch expr.pexp_attributes {\n    | list{} => doc\n    | attrs => Doc.group(Doc.concat(list{printAttributes(attrs, cmtTbl), doc}))\n    }\n  | Pexp_apply(\n      {pexp_desc: Pexp_ident({txt: Longident.Ldot(Lident(\"Array\"), \"get\")})},\n      list{(Nolabel, parentExpr), (Nolabel, memberExpr)},\n    ) if !ParsetreeViewer.isRewrittenUnderscoreApplySugar(parentExpr) =>\n    /* Don't print the Array.get(_, 0) sugar a.k.a. (__x) => Array.get(__x, 0) as _[0] */\n    let member = {\n      let memberDoc = {\n        let doc = printExpressionWithComments(memberExpr, cmtTbl)\n        switch Parens.expr(memberExpr) {\n        | Parens.Parenthesized => addParens(doc)\n        | Braced(braces) => printBraces(doc, memberExpr, braces)\n        | Nothing => doc\n        }\n      }\n\n      let shouldInline = switch memberExpr.pexp_desc {\n      | Pexp_constant(_) | Pexp_ident(_) => true\n      | _ => false\n      }\n\n      if shouldInline {\n        memberDoc\n      } else {\n        Doc.concat(list{Doc.indent(Doc.concat(list{Doc.softLine, memberDoc})), Doc.softLine})\n      }\n    }\n\n    let parentDoc = {\n      let doc = printExpressionWithComments(parentExpr, cmtTbl)\n      switch Parens.unaryExprOperand(parentExpr) {\n      | Parens.Parenthesized => addParens(doc)\n      | Braced(braces) => printBraces(doc, parentExpr, braces)\n      | Nothing => doc\n      }\n    }\n\n    Doc.group(\n      Doc.concat(list{\n        printAttributes(expr.pexp_attributes, cmtTbl),\n        parentDoc,\n        Doc.lbracket,\n        member,\n        Doc.rbracket,\n      }),\n    )\n  | Pexp_apply(\n      {pexp_desc: Pexp_ident({txt: Longident.Ldot(Lident(\"Array\"), \"set\")})},\n      list{(Nolabel, parentExpr), (Nolabel, memberExpr), (Nolabel, targetExpr)},\n    ) =>\n    let member = {\n      let memberDoc = {\n        let doc = printExpressionWithComments(memberExpr, cmtTbl)\n        switch Parens.expr(memberExpr) {\n        | Parens.Parenthesized => addParens(doc)\n        | Braced(braces) => printBraces(doc, memberExpr, braces)\n        | Nothing => doc\n        }\n      }\n\n      let shouldInline = switch memberExpr.pexp_desc {\n      | Pexp_constant(_) | Pexp_ident(_) => true\n      | _ => false\n      }\n\n      if shouldInline {\n        memberDoc\n      } else {\n        Doc.concat(list{Doc.indent(Doc.concat(list{Doc.softLine, memberDoc})), Doc.softLine})\n      }\n    }\n\n    let shouldIndentTargetExpr = if ParsetreeViewer.isBracedExpr(targetExpr) {\n      false\n    } else {\n      ParsetreeViewer.isBinaryExpression(targetExpr) ||\n      switch targetExpr {\n      | {\n          pexp_attributes: list{({Location.txt: \"ns.ternary\"}, _)},\n          pexp_desc: Pexp_ifthenelse(ifExpr, _, _),\n        } =>\n        ParsetreeViewer.isBinaryExpression(ifExpr) ||\n        ParsetreeViewer.hasAttributes(ifExpr.pexp_attributes)\n      | {pexp_desc: Pexp_newtype(_)} => false\n      | e => ParsetreeViewer.hasAttributes(e.pexp_attributes) || ParsetreeViewer.isArrayAccess(e)\n      }\n    }\n\n    let targetExpr = {\n      let doc = printExpressionWithComments(targetExpr, cmtTbl)\n      switch Parens.expr(targetExpr) {\n      | Parens.Parenthesized => addParens(doc)\n      | Braced(braces) => printBraces(doc, targetExpr, braces)\n      | Nothing => doc\n      }\n    }\n\n    let parentDoc = {\n      let doc = printExpressionWithComments(parentExpr, cmtTbl)\n      switch Parens.unaryExprOperand(parentExpr) {\n      | Parens.Parenthesized => addParens(doc)\n      | Braced(braces) => printBraces(doc, parentExpr, braces)\n      | Nothing => doc\n      }\n    }\n\n    Doc.group(\n      Doc.concat(list{\n        printAttributes(expr.pexp_attributes, cmtTbl),\n        parentDoc,\n        Doc.lbracket,\n        member,\n        Doc.rbracket,\n        Doc.text(\" =\"),\n        if shouldIndentTargetExpr {\n          Doc.indent(Doc.concat(list{Doc.line, targetExpr}))\n        } else {\n          Doc.concat(list{Doc.space, targetExpr})\n        },\n      }),\n    )\n  /* TODO: cleanup, are those branches even remotely performant? */\n  | Pexp_apply({pexp_desc: Pexp_ident(lident)}, args) if ParsetreeViewer.isJsxExpression(expr) =>\n    printJsxExpression(lident, args, cmtTbl)\n  | Pexp_apply(callExpr, args) =>\n    let args = List.map(((lbl, arg)) => (lbl, ParsetreeViewer.rewriteUnderscoreApply(arg)), args)\n\n    let (uncurried, attrs) = ParsetreeViewer.processUncurriedAttribute(expr.pexp_attributes)\n\n    let callExprDoc = {\n      let doc = printExpressionWithComments(callExpr, cmtTbl)\n      switch Parens.callExpr(callExpr) {\n      | Parens.Parenthesized => addParens(doc)\n      | Braced(braces) => printBraces(doc, callExpr, braces)\n      | Nothing => doc\n      }\n    }\n\n    if ParsetreeViewer.requiresSpecialCallbackPrintingFirstArg(args) {\n      let argsDoc = printArgumentsWithCallbackInFirstPosition(~uncurried, args, cmtTbl)\n\n      Doc.concat(list{printAttributes(attrs, cmtTbl), callExprDoc, argsDoc})\n    } else if ParsetreeViewer.requiresSpecialCallbackPrintingLastArg(args) {\n      let argsDoc = printArgumentsWithCallbackInLastPosition(~uncurried, args, cmtTbl)\n\n      /*\n       * Fixes the following layout (the `[` and `]` should break):\n       *   [fn(x => {\n       *     let _ = x\n       *   }), fn(y => {\n       *     let _ = y\n       *   }), fn(z => {\n       *     let _ = z\n       *   })]\n       * See `Doc.willBreak documentation in interface file for more context.\n       * Context:\n       *  https://github.com/rescript-lang/syntax/issues/111\n       *  https://github.com/rescript-lang/syntax/issues/166\n       */\n      let maybeBreakParent = if Doc.willBreak(argsDoc) {\n        Doc.breakParent\n      } else {\n        Doc.nil\n      }\n\n      Doc.concat(list{maybeBreakParent, printAttributes(attrs, cmtTbl), callExprDoc, argsDoc})\n    } else {\n      let argsDoc = printArguments(~uncurried, args, cmtTbl)\n      Doc.concat(list{printAttributes(attrs, cmtTbl), callExprDoc, argsDoc})\n    }\n  | _ => assert false\n  }\n\nand printJsxExpression = (lident, args, cmtTbl) => {\n  let name = printJsxName(lident)\n  let (formattedProps, children) = printJsxProps(args, cmtTbl)\n  /* <div className=\"test\" /> */\n  let isSelfClosing = switch children {\n  | Some({Parsetree.pexp_desc: Pexp_construct({txt: Longident.Lident(\"[]\")}, None)}) => true\n  | _ => false\n  }\n\n  Doc.group(\n    Doc.concat(list{\n      Doc.group(\n        Doc.concat(list{\n          printComments(Doc.concat(list{Doc.lessThan, name}), cmtTbl, lident.Asttypes.loc),\n          formattedProps,\n          if isSelfClosing {\n            Doc.concat(list{Doc.line, Doc.text(\"/>\")})\n          } else {\n            Doc.nil\n          },\n        }),\n      ),\n      if isSelfClosing {\n        Doc.nil\n      } else {\n        Doc.concat(list{\n          Doc.greaterThan,\n          Doc.indent(\n            Doc.concat(list{\n              Doc.line,\n              switch children {\n              | Some(childrenExpression) => printJsxChildren(childrenExpression, cmtTbl)\n              | None => Doc.nil\n              },\n            }),\n          ),\n          Doc.line,\n          Doc.text(\"</\"),\n          name,\n          Doc.greaterThan,\n        })\n      },\n    }),\n  )\n}\n\nand printJsxFragment = (expr, cmtTbl) => {\n  let opening = Doc.text(\"<>\")\n  let closing = Doc.text(\"</>\")\n  /* let (children, _) = ParsetreeViewer.collectListExpressions expr in */\n  Doc.group(\n    Doc.concat(list{\n      opening,\n      switch expr.pexp_desc {\n      | Pexp_construct({txt: Longident.Lident(\"[]\")}, None) => Doc.nil\n      | _ => Doc.indent(Doc.concat(list{Doc.line, printJsxChildren(expr, cmtTbl)}))\n      },\n      Doc.line,\n      closing,\n    }),\n  )\n}\n\nand printJsxChildren = (childrenExpr: Parsetree.expression, cmtTbl) =>\n  switch childrenExpr.pexp_desc {\n  | Pexp_construct({txt: Longident.Lident(\"::\")}, _) =>\n    let (children, _) = ParsetreeViewer.collectListExpressions(childrenExpr)\n    Doc.group(Doc.join(~sep=Doc.line, List.map((expr: Parsetree.expression) => {\n          let leadingLineCommentPresent = hasLeadingLineComment(cmtTbl, expr.pexp_loc)\n          let exprDoc = printExpressionWithComments(expr, cmtTbl)\n          switch Parens.jsxChildExpr(expr) {\n          | Parenthesized | Braced(_) =>\n            /* {(20: int)} make sure that we also protect the expression inside */\n            let innerDoc = if Parens.bracedExpr(expr) {\n              addParens(exprDoc)\n            } else {\n              exprDoc\n            }\n            if leadingLineCommentPresent {\n              addBraces(innerDoc)\n            } else {\n              Doc.concat(list{Doc.lbrace, innerDoc, Doc.rbrace})\n            }\n          | Nothing => exprDoc\n          }\n        }, children)))\n  | _ =>\n    let leadingLineCommentPresent = hasLeadingLineComment(cmtTbl, childrenExpr.pexp_loc)\n    let exprDoc = printExpressionWithComments(childrenExpr, cmtTbl)\n    Doc.concat(list{\n      Doc.dotdotdot,\n      switch Parens.jsxChildExpr(childrenExpr) {\n      | Parenthesized | Braced(_) =>\n        let innerDoc = if Parens.bracedExpr(childrenExpr) {\n          addParens(exprDoc)\n        } else {\n          exprDoc\n        }\n        if leadingLineCommentPresent {\n          addBraces(innerDoc)\n        } else {\n          Doc.concat(list{Doc.lbrace, innerDoc, Doc.rbrace})\n        }\n      | Nothing => exprDoc\n      },\n    })\n  }\n\nand printJsxProps = (args, cmtTbl): (Doc.t, option<Parsetree.expression>) => {\n  let rec loop = (props, args) =>\n    switch args {\n    | list{} => (Doc.nil, None)\n    | list{\n        (Asttypes.Labelled(\"children\"), children),\n        (\n          Asttypes.Nolabel,\n          {Parsetree.pexp_desc: Pexp_construct({txt: Longident.Lident(\"()\")}, None)},\n        ),\n      } =>\n      let formattedProps = Doc.indent(\n        switch props {\n        | list{} => Doc.nil\n        | props => Doc.concat(list{Doc.line, Doc.group(Doc.join(~sep=Doc.line, props |> List.rev))})\n        },\n      )\n      (formattedProps, Some(children))\n    | list{arg, ...args} =>\n      let propDoc = printJsxProp(arg, cmtTbl)\n      loop(list{propDoc, ...props}, args)\n    }\n\n  loop(list{}, args)\n}\n\nand printJsxProp = (arg, cmtTbl) =>\n  switch arg {\n  | (\n      (Asttypes.Labelled(lblTxt) | Optional(lblTxt)) as lbl,\n      {\n        Parsetree.pexp_attributes: list{({Location.txt: \"ns.namedArgLoc\", loc: argLoc}, _)},\n        pexp_desc: Pexp_ident({txt: Longident.Lident(ident)}),\n      },\n    ) if lblTxt == ident /* jsx punning */ =>\n    switch lbl {\n    | Nolabel => Doc.nil\n    | Labelled(_lbl) => printComments(printIdentLike(ident), cmtTbl, argLoc)\n    | Optional(_lbl) =>\n      let doc = Doc.concat(list{Doc.question, printIdentLike(ident)})\n      printComments(doc, cmtTbl, argLoc)\n    }\n  | (\n      (Asttypes.Labelled(lblTxt) | Optional(lblTxt)) as lbl,\n      {Parsetree.pexp_attributes: list{}, pexp_desc: Pexp_ident({txt: Longident.Lident(ident)})},\n    ) if lblTxt == ident /* jsx punning when printing from Reason */ =>\n    switch lbl {\n    | Nolabel => Doc.nil\n    | Labelled(_lbl) => printIdentLike(ident)\n    | Optional(_lbl) => Doc.concat(list{Doc.question, printIdentLike(ident)})\n    }\n  | (lbl, expr) =>\n    let (argLoc, expr) = switch expr.pexp_attributes {\n    | list{({Location.txt: \"ns.namedArgLoc\", loc}, _), ...attrs} => (\n        loc,\n        {...expr, pexp_attributes: attrs},\n      )\n    | _ => (Location.none, expr)\n    }\n\n    let lblDoc = switch lbl {\n    | Asttypes.Labelled(lbl) =>\n      let lbl = printComments(printIdentLike(lbl), cmtTbl, argLoc)\n      Doc.concat(list{lbl, Doc.equal})\n    | Asttypes.Optional(lbl) =>\n      let lbl = printComments(printIdentLike(lbl), cmtTbl, argLoc)\n      Doc.concat(list{lbl, Doc.equal, Doc.question})\n    | Nolabel => Doc.nil\n    }\n\n    let exprDoc = {\n      let leadingLineCommentPresent = hasLeadingLineComment(cmtTbl, expr.pexp_loc)\n      let doc = printExpressionWithComments(expr, cmtTbl)\n      switch Parens.jsxPropExpr(expr) {\n      | Parenthesized | Braced(_) =>\n        /* {(20: int)} make sure that we also protect the expression inside */\n        let innerDoc = if Parens.bracedExpr(expr) {\n          addParens(doc)\n        } else {\n          doc\n        }\n        if leadingLineCommentPresent {\n          addBraces(innerDoc)\n        } else {\n          Doc.concat(list{Doc.lbrace, innerDoc, Doc.rbrace})\n        }\n      | _ => doc\n      }\n    }\n\n    let fullLoc = {...argLoc, loc_end: expr.pexp_loc.loc_end}\n    printComments(Doc.concat(list{lblDoc, exprDoc}), cmtTbl, fullLoc)\n  }\n\n/* div -> div.\n * Navabar.createElement -> Navbar\n * Staff.Users.createElement -> Staff.Users */\nand printJsxName = ({txt: lident}) => {\n  let rec flatten = (acc, lident) =>\n    switch lident {\n    | Longident.Lident(txt) => list{txt, ...acc}\n    | Ldot(lident, txt) =>\n      let acc = if txt == \"createElement\" {\n        acc\n      } else {\n        list{txt, ...acc}\n      }\n      flatten(acc, lident)\n    | _ => acc\n    }\n\n  switch lident {\n  | Longident.Lident(txt) => Doc.text(txt)\n  | _ as lident =>\n    let segments = flatten(list{}, lident)\n    Doc.join(~sep=Doc.dot, List.map(Doc.text, segments))\n  }\n}\n\nand printArgumentsWithCallbackInFirstPosition = (~uncurried, args, cmtTbl) => {\n  /* Because the same subtree gets printed twice, we need to copy the cmtTbl.\n   * consumed comments need to be marked not-consumed and reprinted…\n   * Cheng's different comment algorithm will solve this. */\n  let cmtTblCopy = CommentTable.copy(cmtTbl)\n  let (callback, printedArgs) = switch args {\n  | list{(lbl, expr), ...args} =>\n    let lblDoc = switch lbl {\n    | Asttypes.Nolabel => Doc.nil\n    | Asttypes.Labelled(txt) => Doc.concat(list{Doc.tilde, printIdentLike(txt), Doc.equal})\n    | Asttypes.Optional(txt) =>\n      Doc.concat(list{Doc.tilde, printIdentLike(txt), Doc.equal, Doc.question})\n    }\n\n    let callback = Doc.concat(list{lblDoc, printPexpFun(~inCallback=FitsOnOneLine, expr, cmtTbl)})\n    let callback = printComments(callback, cmtTbl, expr.pexp_loc)\n    let printedArgs = Doc.join(\n      ~sep=Doc.concat(list{Doc.comma, Doc.line}),\n      List.map(arg => printArgument(arg, cmtTbl), args),\n    )\n\n    (callback, printedArgs)\n  | _ => assert false\n  }\n\n  /* Thing.map((arg1, arg2) => MyModuleBlah.toList(argument), foo) */\n  /* Thing.map((arg1, arg2) => {\n   *   MyModuleBlah.toList(argument)\n   * }, longArgumet, veryLooooongArgument)\n   */\n  let fitsOnOneLine = Doc.concat(list{\n    if uncurried {\n      Doc.text(\"(. \")\n    } else {\n      Doc.lparen\n    },\n    callback,\n    Doc.comma,\n    Doc.line,\n    printedArgs,\n    Doc.rparen,\n  })\n\n  /* Thing.map(\n   *   (param1, parm2) => doStuff(param1, parm2),\n   *   arg1,\n   *   arg2,\n   *   arg3,\n   * )\n   */\n  let breakAllArgs = printArguments(~uncurried, args, cmtTblCopy)\n\n  /* Sometimes one of the non-callback arguments will break.\n   * There might be a single line comment in there, or a multiline string etc.\n   * showDialog(\n   *   ~onConfirm={() => ()},\n   *   `\n   *   Do you really want to leave this workspace?\n   *   Some more text with detailed explanations...\n   *   `,\n   *   ~danger=true,\n   *   // comment   --> here a single line comment\n   *   ~confirmText=\"Yes, I am sure!\",\n   *  )\n   * In this case, we always want the arguments broken over multiple lines,\n   * like a normal function call.\n   */\n  if Doc.willBreak(printedArgs) {\n    breakAllArgs\n  } else {\n    Doc.customLayout(list{fitsOnOneLine, breakAllArgs})\n  }\n}\n\nand printArgumentsWithCallbackInLastPosition = (~uncurried, args, cmtTbl) => {\n  /* Because the same subtree gets printed twice, we need to copy the cmtTbl.\n   * consumed comments need to be marked not-consumed and reprinted…\n   * Cheng's different comment algorithm will solve this. */\n  let cmtTblCopy = CommentTable.copy(cmtTbl)\n  let cmtTblCopy2 = CommentTable.copy(cmtTbl)\n  let rec loop = (acc, args) =>\n    switch args {\n    | list{} => (Doc.nil, Doc.nil, Doc.nil)\n    | list{(lbl, expr)} =>\n      let lblDoc = switch lbl {\n      | Asttypes.Nolabel => Doc.nil\n      | Asttypes.Labelled(txt) => Doc.concat(list{Doc.tilde, printIdentLike(txt), Doc.equal})\n      | Asttypes.Optional(txt) =>\n        Doc.concat(list{Doc.tilde, printIdentLike(txt), Doc.equal, Doc.question})\n      }\n\n      let callbackFitsOnOneLine = {\n        let pexpFunDoc = printPexpFun(~inCallback=FitsOnOneLine, expr, cmtTbl)\n        let doc = Doc.concat(list{lblDoc, pexpFunDoc})\n        printComments(doc, cmtTbl, expr.pexp_loc)\n      }\n\n      let callbackArgumentsFitsOnOneLine = {\n        let pexpFunDoc = printPexpFun(~inCallback=ArgumentsFitOnOneLine, expr, cmtTblCopy)\n        let doc = Doc.concat(list{lblDoc, pexpFunDoc})\n        printComments(doc, cmtTblCopy, expr.pexp_loc)\n      }\n\n      (Doc.concat(List.rev(acc)), callbackFitsOnOneLine, callbackArgumentsFitsOnOneLine)\n    | list{arg, ...args} =>\n      let argDoc = printArgument(arg, cmtTbl)\n      loop(list{Doc.line, Doc.comma, argDoc, ...acc}, args)\n    }\n\n  let (printedArgs, callback, callback2) = loop(list{}, args)\n\n  /* Thing.map(foo, (arg1, arg2) => MyModuleBlah.toList(argument)) */\n  let fitsOnOneLine = Doc.concat(list{\n    if uncurried {\n      Doc.text(\"(.\")\n    } else {\n      Doc.lparen\n    },\n    printedArgs,\n    callback,\n    Doc.rparen,\n  })\n\n  /* Thing.map(longArgumet, veryLooooongArgument, (arg1, arg2) =>\n   *   MyModuleBlah.toList(argument)\n   * )\n   */\n  let arugmentsFitOnOneLine = Doc.concat(list{\n    if uncurried {\n      Doc.text(\"(.\")\n    } else {\n      Doc.lparen\n    },\n    printedArgs,\n    Doc.breakableGroup(~forceBreak=true, callback2),\n    Doc.rparen,\n  })\n\n  /* Thing.map(\n   *   arg1,\n   *   arg2,\n   *   arg3,\n   *   (param1, parm2) => doStuff(param1, parm2)\n   * )\n   */\n  let breakAllArgs = printArguments(~uncurried, args, cmtTblCopy2)\n\n  /* Sometimes one of the non-callback arguments will break.\n   * There might be a single line comment in there, or a multiline string etc.\n   * showDialog(\n   *   `\n   *   Do you really want to leave this workspace?\n   *   Some more text with detailed explanations...\n   *   `,\n   *   ~danger=true,\n   *   // comment   --> here a single line comment\n   *   ~confirmText=\"Yes, I am sure!\",\n   *   ~onConfirm={() => ()},\n   *  )\n   * In this case, we always want the arguments broken over multiple lines,\n   * like a normal function call.\n   */\n  if Doc.willBreak(printedArgs) {\n    breakAllArgs\n  } else {\n    Doc.customLayout(list{fitsOnOneLine, arugmentsFitOnOneLine, breakAllArgs})\n  }\n}\n\nand printArguments = (~uncurried, args: list<(Asttypes.arg_label, Parsetree.expression)>, cmtTbl) =>\n  switch args {\n  | list{(Nolabel, {pexp_desc: Pexp_construct({txt: Longident.Lident(\"()\")}, _), pexp_loc: loc})} =>\n    /* See \"parseCallExpr\", ghost unit expression is used the implement\n     * arity zero vs arity one syntax.\n     * Related: https://github.com/rescript-lang/syntax/issues/138 */\n    switch (uncurried, loc.loc_ghost) {\n    | (true, true) => Doc.text(\"(.)\") /* arity zero */\n    | (true, false) => Doc.text(\"(. ())\") /* arity one */\n    | _ => Doc.text(\"()\")\n    }\n  | list{(Nolabel, arg)} if ParsetreeViewer.isHuggableExpression(arg) =>\n    let argDoc = {\n      let doc = printExpressionWithComments(arg, cmtTbl)\n      switch Parens.expr(arg) {\n      | Parens.Parenthesized => addParens(doc)\n      | Braced(braces) => printBraces(doc, arg, braces)\n      | Nothing => doc\n      }\n    }\n\n    Doc.concat(list{\n      if uncurried {\n        Doc.text(\"(. \")\n      } else {\n        Doc.lparen\n      },\n      argDoc,\n      Doc.rparen,\n    })\n  | args =>\n    Doc.group(\n      Doc.concat(list{\n        if uncurried {\n          Doc.text(\"(.\")\n        } else {\n          Doc.lparen\n        },\n        Doc.indent(\n          Doc.concat(list{\n            if uncurried {\n              Doc.line\n            } else {\n              Doc.softLine\n            },\n            Doc.join(\n              ~sep=Doc.concat(list{Doc.comma, Doc.line}),\n              List.map(arg => printArgument(arg, cmtTbl), args),\n            ),\n          }),\n        ),\n        Doc.trailingComma,\n        Doc.softLine,\n        Doc.rparen,\n      }),\n    )\n  }\n\n/*\n * argument ::=\n *   | _                            (* syntax sugar *)\n *   | expr\n *   | expr : type\n *   | ~ label-name\n *   | ~ label-name\n *   | ~ label-name ?\n *   | ~ label-name =   expr\n *   | ~ label-name =   _           (* syntax sugar *)\n *   | ~ label-name =   expr : type\n *   | ~ label-name = ? expr\n *   | ~ label-name = ? _           (* syntax sugar *)\n *   | ~ label-name = ? expr : type */\nand printArgument = ((argLbl, arg), cmtTbl) =>\n  switch (argLbl, arg) {\n  /* ~a (punned) */\n  | (\n      Asttypes.Labelled(lbl),\n      {\n        pexp_desc: Pexp_ident({txt: Longident.Lident(name)}),\n        pexp_attributes: list{} | list{({Location.txt: \"ns.namedArgLoc\"}, _)},\n      } as argExpr,\n    ) if lbl == name && !ParsetreeViewer.isBracedExpr(argExpr) =>\n    let loc = switch arg.pexp_attributes {\n    | list{({Location.txt: \"ns.namedArgLoc\", loc}, _), ..._} => loc\n    | _ => arg.pexp_loc\n    }\n\n    let doc = Doc.concat(list{Doc.tilde, printIdentLike(lbl)})\n    printComments(doc, cmtTbl, loc)\n\n  /* ~a: int (punned) */\n  | (\n      Asttypes.Labelled(lbl),\n      {\n        pexp_desc: Pexp_constraint(\n          {pexp_desc: Pexp_ident({txt: Longident.Lident(name)})} as argExpr,\n          typ,\n        ),\n        pexp_loc,\n        pexp_attributes: (list{} | list{({Location.txt: \"ns.namedArgLoc\"}, _)}) as attrs,\n      },\n    ) if lbl == name && !ParsetreeViewer.isBracedExpr(argExpr) =>\n    let loc = switch attrs {\n    | list{({Location.txt: \"ns.namedArgLoc\", loc}, _), ..._} => {...loc, loc_end: pexp_loc.loc_end}\n    | _ => arg.pexp_loc\n    }\n\n    let doc = Doc.concat(list{\n      Doc.tilde,\n      printIdentLike(lbl),\n      Doc.text(\": \"),\n      printTypExpr(typ, cmtTbl),\n    })\n    printComments(doc, cmtTbl, loc)\n  /* ~a? (optional lbl punned) */\n  | (\n      Asttypes.Optional(lbl),\n      {\n        pexp_desc: Pexp_ident({txt: Longident.Lident(name)}),\n        pexp_attributes: list{} | list{({Location.txt: \"ns.namedArgLoc\"}, _)},\n      },\n    ) if lbl == name =>\n    let loc = switch arg.pexp_attributes {\n    | list{({Location.txt: \"ns.namedArgLoc\", loc}, _), ..._} => loc\n    | _ => arg.pexp_loc\n    }\n\n    let doc = Doc.concat(list{Doc.tilde, printIdentLike(lbl), Doc.question})\n    printComments(doc, cmtTbl, loc)\n  | (_lbl, expr) =>\n    let (argLoc, expr) = switch expr.pexp_attributes {\n    | list{({Location.txt: \"ns.namedArgLoc\", loc}, _), ...attrs} => (\n        loc,\n        {...expr, pexp_attributes: attrs},\n      )\n    | _ => (expr.pexp_loc, expr)\n    }\n\n    let printedLbl = switch argLbl {\n    | Asttypes.Nolabel => Doc.nil\n    | Asttypes.Labelled(lbl) =>\n      let doc = Doc.concat(list{Doc.tilde, printIdentLike(lbl), Doc.equal})\n      printComments(doc, cmtTbl, argLoc)\n    | Asttypes.Optional(lbl) =>\n      let doc = Doc.concat(list{Doc.tilde, printIdentLike(lbl), Doc.equal, Doc.question})\n      printComments(doc, cmtTbl, argLoc)\n    }\n\n    let printedExpr = {\n      let doc = printExpressionWithComments(expr, cmtTbl)\n      switch Parens.expr(expr) {\n      | Parens.Parenthesized => addParens(doc)\n      | Braced(braces) => printBraces(doc, expr, braces)\n      | Nothing => doc\n      }\n    }\n\n    let loc = {...argLoc, loc_end: expr.pexp_loc.loc_end}\n    let doc = Doc.concat(list{printedLbl, printedExpr})\n    printComments(doc, cmtTbl, loc)\n  }\n\nand printCases = (cases: list<Parsetree.case>, cmtTbl) =>\n  Doc.breakableGroup(\n    ~forceBreak=true,\n    Doc.concat(list{\n      Doc.lbrace,\n      Doc.concat(list{\n        Doc.line,\n        printList(\n          ~getLoc=n => {...n.Parsetree.pc_lhs.ppat_loc, loc_end: n.pc_rhs.pexp_loc.loc_end},\n          ~print=printCase,\n          ~nodes=cases,\n          cmtTbl,\n        ),\n      }),\n      Doc.line,\n      Doc.rbrace,\n    }),\n  )\n\nand printCase = (case: Parsetree.case, cmtTbl) => {\n  let rhs = switch case.pc_rhs.pexp_desc {\n  | Pexp_let(_)\n  | Pexp_letmodule(_)\n  | Pexp_letexception(_)\n  | Pexp_open(_)\n  | Pexp_sequence(_) =>\n    printExpressionBlock(~braces=ParsetreeViewer.isBracedExpr(case.pc_rhs), case.pc_rhs, cmtTbl)\n  | _ =>\n    let doc = printExpressionWithComments(case.pc_rhs, cmtTbl)\n    switch Parens.expr(case.pc_rhs) {\n    | Parenthesized => addParens(doc)\n    | _ => doc\n    }\n  }\n\n  let guard = switch case.pc_guard {\n  | None => Doc.nil\n  | Some(expr) =>\n    Doc.group(\n      Doc.concat(list{Doc.line, Doc.text(\"if \"), printExpressionWithComments(expr, cmtTbl)}),\n    )\n  }\n\n  let shouldInlineRhs = switch case.pc_rhs.pexp_desc {\n  | Pexp_construct({txt: Longident.Lident(\"()\" | \"true\" | \"false\")}, _)\n  | Pexp_constant(_)\n  | Pexp_ident(_) => true\n  | _ if ParsetreeViewer.isHuggableRhs(case.pc_rhs) => true\n  | _ => false\n  }\n\n  let shouldIndentPattern = switch case.pc_lhs.ppat_desc {\n  | Ppat_or(_) => false\n  | _ => true\n  }\n\n  let patternDoc = {\n    let doc = printPattern(case.pc_lhs, cmtTbl)\n    switch case.pc_lhs.ppat_desc {\n    | Ppat_constraint(_) => addParens(doc)\n    | _ => doc\n    }\n  }\n\n  let content = Doc.concat(list{\n    if shouldIndentPattern {\n      Doc.indent(patternDoc)\n    } else {\n      patternDoc\n    },\n    Doc.indent(guard),\n    Doc.text(\" =>\"),\n    Doc.indent(\n      Doc.concat(list{\n        if shouldInlineRhs {\n          Doc.space\n        } else {\n          Doc.line\n        },\n        rhs,\n      }),\n    ),\n  })\n  Doc.group(Doc.concat(list{Doc.text(\"| \"), content}))\n}\n\nand printExprFunParameters = (~inCallback, ~uncurried, ~hasConstraint, parameters, cmtTbl) =>\n  switch parameters {\n  /* let f = _ => () */\n  | list{ParsetreeViewer.Parameter({\n      attrs: list{},\n      lbl: Asttypes.Nolabel,\n      defaultExpr: None,\n      pat: {Parsetree.ppat_desc: Ppat_any},\n    })} if !uncurried =>\n    if hasConstraint {\n      Doc.text(\"(_)\")\n    } else {\n      Doc.text(\"_\")\n    }\n  /* let f = a => () */\n  | list{ParsetreeViewer.Parameter({\n      attrs: list{},\n      lbl: Asttypes.Nolabel,\n      defaultExpr: None,\n      pat: {Parsetree.ppat_desc: Ppat_var(stringLoc)},\n    })} if !uncurried =>\n    let txtDoc = {\n      let var = printIdentLike(stringLoc.txt)\n      if hasConstraint {\n        addParens(var)\n      } else {\n        var\n      }\n    }\n\n    printComments(txtDoc, cmtTbl, stringLoc.loc)\n  /* let f = () => () */\n  | list{ParsetreeViewer.Parameter({\n      attrs: list{},\n      lbl: Asttypes.Nolabel,\n      defaultExpr: None,\n      pat: {ppat_desc: Ppat_construct({txt: Longident.Lident(\"()\")}, None)},\n    })} if !uncurried =>\n    Doc.text(\"()\")\n  /* let f = (~greeting, ~from as hometown, ~x=?) => () */\n  | parameters =>\n    let inCallback = switch inCallback {\n    | FitsOnOneLine => true\n    | _ => false\n    }\n\n    let lparen = if uncurried {\n      Doc.text(\"(. \")\n    } else {\n      Doc.lparen\n    }\n    let shouldHug = ParsetreeViewer.parametersShouldHug(parameters)\n    let printedParamaters = Doc.concat(list{\n      if shouldHug || inCallback {\n        Doc.nil\n      } else {\n        Doc.softLine\n      },\n      Doc.join(\n        ~sep=Doc.concat(list{Doc.comma, Doc.line}),\n        List.map(p => printExpFunParameter(p, cmtTbl), parameters),\n      ),\n    })\n    Doc.group(\n      Doc.concat(list{\n        lparen,\n        if shouldHug || inCallback {\n          printedParamaters\n        } else {\n          Doc.concat(list{Doc.indent(printedParamaters), Doc.trailingComma, Doc.softLine})\n        },\n        Doc.rparen,\n      }),\n    )\n  }\n\nand printExpFunParameter = (parameter, cmtTbl) =>\n  switch parameter {\n  | ParsetreeViewer.NewTypes({attrs, locs: lbls}) =>\n    Doc.group(\n      Doc.concat(list{\n        printAttributes(attrs, cmtTbl),\n        Doc.text(\"type \"),\n        Doc.join(\n          ~sep=Doc.space,\n          List.map(\n            lbl => printComments(printIdentLike(lbl.Asttypes.txt), cmtTbl, lbl.Asttypes.loc),\n            lbls,\n          ),\n        ),\n      }),\n    )\n  | Parameter({attrs, lbl, defaultExpr, pat: pattern}) =>\n    let (isUncurried, attrs) = ParsetreeViewer.processUncurriedAttribute(attrs)\n    let uncurried = if isUncurried {\n      Doc.concat(list{Doc.dot, Doc.space})\n    } else {\n      Doc.nil\n    }\n    let attrs = printAttributes(attrs, cmtTbl)\n    /* =defaultValue */\n    let defaultExprDoc = switch defaultExpr {\n    | Some(expr) => Doc.concat(list{Doc.text(\"=\"), printExpressionWithComments(expr, cmtTbl)})\n    | None => Doc.nil\n    }\n\n    /* ~from as hometown\n     * ~from                   ->  punning */\n    let labelWithPattern = switch (lbl, pattern) {\n    | (Asttypes.Nolabel, pattern) => printPattern(pattern, cmtTbl)\n    | (\n        Asttypes.Labelled(lbl) | Optional(lbl),\n        {\n          ppat_desc: Ppat_var(stringLoc),\n          ppat_attributes: list{} | list{({Location.txt: \"ns.namedArgLoc\"}, _)},\n        },\n      ) if lbl == stringLoc.txt =>\n      /* ~d */\n      Doc.concat(list{Doc.text(\"~\"), printIdentLike(lbl)})\n    | (\n        Asttypes.Labelled(lbl) | Optional(lbl),\n        {\n          ppat_desc: Ppat_constraint({ppat_desc: Ppat_var({txt})}, typ),\n          ppat_attributes: list{} | list{({Location.txt: \"ns.namedArgLoc\"}, _)},\n        },\n      ) if lbl == txt =>\n      /* ~d: e */\n      Doc.concat(list{\n        Doc.text(\"~\"),\n        printIdentLike(lbl),\n        Doc.text(\": \"),\n        printTypExpr(typ, cmtTbl),\n      })\n    | (Asttypes.Labelled(lbl) | Optional(lbl), pattern) =>\n      /* ~b as c */\n      Doc.concat(list{\n        Doc.text(\"~\"),\n        printIdentLike(lbl),\n        Doc.text(\" as \"),\n        printPattern(pattern, cmtTbl),\n      })\n    }\n\n    let optionalLabelSuffix = switch (lbl, defaultExpr) {\n    | (Asttypes.Optional(_), None) => Doc.text(\"=?\")\n    | _ => Doc.nil\n    }\n\n    let doc = Doc.group(\n      Doc.concat(list{uncurried, attrs, labelWithPattern, defaultExprDoc, optionalLabelSuffix}),\n    )\n    let cmtLoc = switch defaultExpr {\n    | None =>\n      switch pattern.ppat_attributes {\n      | list{({Location.txt: \"ns.namedArgLoc\", loc}, _), ..._} => {\n          ...loc,\n          loc_end: pattern.ppat_loc.loc_end,\n        }\n      | _ => pattern.ppat_loc\n      }\n    | Some(expr) =>\n      let startPos = switch pattern.ppat_attributes {\n      | list{({Location.txt: \"ns.namedArgLoc\", loc}, _), ..._} => loc.loc_start\n      | _ => pattern.ppat_loc.loc_start\n      }\n      {\n        ...pattern.ppat_loc,\n        loc_start: startPos,\n        loc_end: expr.pexp_loc.loc_end,\n      }\n    }\n\n    printComments(doc, cmtTbl, cmtLoc)\n  }\n\nand printExpressionBlock = (~braces, expr, cmtTbl) => {\n  let rec collectRows = (acc, expr) =>\n    switch expr.Parsetree.pexp_desc {\n    | Parsetree.Pexp_letmodule(modName, modExpr, expr2) =>\n      let name = {\n        let doc = Doc.text(modName.txt)\n        printComments(doc, cmtTbl, modName.loc)\n      }\n\n      let letModuleDoc = Doc.concat(list{\n        Doc.text(\"module \"),\n        name,\n        Doc.text(\" = \"),\n        printModExpr(modExpr, cmtTbl),\n      })\n      let loc = {...expr.pexp_loc, loc_end: modExpr.pmod_loc.loc_end}\n      collectRows(list{(loc, letModuleDoc), ...acc}, expr2)\n    | Pexp_letexception(extensionConstructor, expr2) =>\n      let loc = {\n        let loc = {...expr.pexp_loc, loc_end: extensionConstructor.pext_loc.loc_end}\n        switch getFirstLeadingComment(cmtTbl, loc) {\n        | None => loc\n        | Some(comment) =>\n          let cmtLoc = Comment.loc(comment)\n          {...cmtLoc, loc_end: loc.loc_end}\n        }\n      }\n\n      let letExceptionDoc = printExceptionDef(extensionConstructor, cmtTbl)\n      collectRows(list{(loc, letExceptionDoc), ...acc}, expr2)\n    | Pexp_open(overrideFlag, longidentLoc, expr2) =>\n      let openDoc = Doc.concat(list{\n        Doc.text(\"open\"),\n        printOverrideFlag(overrideFlag),\n        Doc.space,\n        printLongidentLocation(longidentLoc, cmtTbl),\n      })\n      let loc = {...expr.pexp_loc, loc_end: longidentLoc.loc.loc_end}\n      collectRows(list{(loc, openDoc), ...acc}, expr2)\n    | Pexp_sequence(expr1, expr2) =>\n      let exprDoc = {\n        let doc = printExpression(expr1, cmtTbl)\n        switch Parens.expr(expr1) {\n        | Parens.Parenthesized => addParens(doc)\n        | Braced(braces) => printBraces(doc, expr1, braces)\n        | Nothing => doc\n        }\n      }\n\n      let loc = expr1.pexp_loc\n      collectRows(list{(loc, exprDoc), ...acc}, expr2)\n    | Pexp_let(recFlag, valueBindings, expr2) =>\n      let loc = {\n        let loc = switch (valueBindings, List.rev(valueBindings)) {\n        | (list{vb, ..._}, list{lastVb, ..._}) => {...vb.pvb_loc, loc_end: lastVb.pvb_loc.loc_end}\n        | _ => Location.none\n        }\n\n        switch getFirstLeadingComment(cmtTbl, loc) {\n        | None => loc\n        | Some(comment) =>\n          let cmtLoc = Comment.loc(comment)\n          {...cmtLoc, loc_end: loc.loc_end}\n        }\n      }\n\n      let recFlag = switch recFlag {\n      | Asttypes.Nonrecursive => Doc.nil\n      | Asttypes.Recursive => Doc.text(\"rec \")\n      }\n\n      let letDoc = printValueBindings(~recFlag, valueBindings, cmtTbl)\n      /* let () = {\n       *   let () = foo()\n       *   ()\n       * }\n       * We don't need to print the () on the last line of the block\n       */\n      switch expr2.pexp_desc {\n      | Pexp_construct({txt: Longident.Lident(\"()\")}, _) => List.rev(list{(loc, letDoc), ...acc})\n      | _ => collectRows(list{(loc, letDoc), ...acc}, expr2)\n      }\n    | _ =>\n      let exprDoc = {\n        let doc = printExpression(expr, cmtTbl)\n        switch Parens.expr(expr) {\n        | Parens.Parenthesized => addParens(doc)\n        | Braced(braces) => printBraces(doc, expr, braces)\n        | Nothing => doc\n        }\n      }\n\n      List.rev(list{(expr.pexp_loc, exprDoc), ...acc})\n    }\n\n  let rows = collectRows(list{}, expr)\n  let block = printList(\n    ~getLoc=fst,\n    ~nodes=rows,\n    ~print=((_, doc), _) => doc,\n    ~forceBreak=true,\n    cmtTbl,\n  )\n\n  Doc.breakableGroup(\n    ~forceBreak=true,\n    if braces {\n      Doc.concat(list{\n        Doc.lbrace,\n        Doc.indent(Doc.concat(list{Doc.line, block})),\n        Doc.line,\n        Doc.rbrace,\n      })\n    } else {\n      block\n    },\n  )\n}\n\n/*\n * // user types:\n * let f = (a, b) => { a + b }\n *\n * // printer: everything is on one line\n * let f = (a, b) => { a + b }\n *\n * // user types: over multiple lines\n * let f = (a, b) => {\n *   a + b\n * }\n *\n * // printer: over multiple lines\n * let f = (a, b) => {\n *   a + b\n * }\n */\nand printBraces = (doc, expr, bracesLoc) => {\n  let overMultipleLines = {\n    open Location\n    bracesLoc.loc_end.pos_lnum > bracesLoc.loc_start.pos_lnum\n  }\n\n  switch expr.Parsetree.pexp_desc {\n  | Pexp_letmodule(_)\n  | Pexp_letexception(_)\n  | Pexp_let(_)\n  | Pexp_open(_)\n  | Pexp_sequence(_) => /* already has braces */\n    doc\n  | _ =>\n    Doc.breakableGroup(\n      ~forceBreak=overMultipleLines,\n      Doc.concat(list{\n        Doc.lbrace,\n        Doc.indent(\n          Doc.concat(list{\n            Doc.softLine,\n            if Parens.bracedExpr(expr) {\n              addParens(doc)\n            } else {\n              doc\n            },\n          }),\n        ),\n        Doc.softLine,\n        Doc.rbrace,\n      }),\n    )\n  }\n}\n\nand printOverrideFlag = overrideFlag =>\n  switch overrideFlag {\n  | Asttypes.Override => Doc.text(\"!\")\n  | Fresh => Doc.nil\n  }\n\nand printDirectionFlag = flag =>\n  switch flag {\n  | Asttypes.Downto => Doc.text(\" downto \")\n  | Asttypes.Upto => Doc.text(\" to \")\n  }\n\nand printRecordRow = ((lbl, expr), cmtTbl, punningAllowed) => {\n  let cmtLoc = {...lbl.loc, loc_end: expr.pexp_loc.loc_end}\n  let doc = Doc.group(\n    switch expr.pexp_desc {\n    | Pexp_ident({txt: Lident(key), loc: keyLoc})\n      if punningAllowed &&\n      (Longident.last(lbl.txt) == key &&\n      lbl.loc.loc_start.pos_cnum === keyLoc.loc_start.pos_cnum) =>\n      /* print punned field */\n      printLidentPath(lbl, cmtTbl)\n    | _ =>\n      Doc.concat(list{\n        printLidentPath(lbl, cmtTbl),\n        Doc.text(\": \"),\n        {\n          let doc = printExpressionWithComments(expr, cmtTbl)\n          switch Parens.expr(expr) {\n          | Parens.Parenthesized => addParens(doc)\n          | Braced(braces) => printBraces(doc, expr, braces)\n          | Nothing => doc\n          }\n        },\n      })\n    },\n  )\n  printComments(doc, cmtTbl, cmtLoc)\n}\n\nand printBsObjectRow = ((lbl, expr), cmtTbl) => {\n  let cmtLoc = {...lbl.loc, loc_end: expr.pexp_loc.loc_end}\n  let lblDoc = {\n    let doc = Doc.concat(list{Doc.text(\"\\\"\"), printLongident(lbl.txt), Doc.text(\"\\\"\")})\n    printComments(doc, cmtTbl, lbl.loc)\n  }\n\n  let doc = Doc.concat(list{\n    lblDoc,\n    Doc.text(\": \"),\n    {\n      let doc = printExpressionWithComments(expr, cmtTbl)\n      switch Parens.expr(expr) {\n      | Parens.Parenthesized => addParens(doc)\n      | Braced(braces) => printBraces(doc, expr, braces)\n      | Nothing => doc\n      }\n    },\n  })\n  printComments(doc, cmtTbl, cmtLoc)\n}\n\n/* The optional loc indicates whether we need to print the attributes in\n * relation to some location. In practise this means the following:\n *  `@attr type t = string` -> on the same line, print on the same line\n *  `@attr\n *   type t = string` -> attr is on prev line, print the attributes\n *   with a line break between, we respect the users' original layout */\nand printAttributes = (~loc=?, ~inline=false, attrs: Parsetree.attributes, cmtTbl) =>\n  switch ParsetreeViewer.filterParsingAttrs(attrs) {\n  | list{} => Doc.nil\n  | attrs =>\n    let lineBreak = switch loc {\n    | None => Doc.line\n    | Some(loc) =>\n      switch List.rev(attrs) {\n      | list{({loc: firstLoc}, _), ..._}\n        if loc.loc_start.pos_lnum > firstLoc.loc_end.pos_lnum => Doc.hardLine\n      | _ => Doc.line\n      }\n    }\n\n    Doc.concat(list{\n      Doc.group(Doc.join(~sep=Doc.line, List.map(attr => printAttribute(attr, cmtTbl), attrs))),\n      if inline {\n        Doc.space\n      } else {\n        lineBreak\n      },\n    })\n  }\n\nand printPayload = (payload: Parsetree.payload, cmtTbl) =>\n  switch payload {\n  | PStr(list{}) => Doc.nil\n  | PStr(list{{pstr_desc: Pstr_eval(expr, attrs)}}) =>\n    let exprDoc = printExpressionWithComments(expr, cmtTbl)\n    let needsParens = switch attrs {\n    | list{} => false\n    | _ => true\n    }\n    let shouldHug = ParsetreeViewer.isHuggableExpression(expr)\n    if shouldHug {\n      Doc.concat(list{\n        Doc.lparen,\n        printAttributes(attrs, cmtTbl),\n        if needsParens {\n          addParens(exprDoc)\n        } else {\n          exprDoc\n        },\n        Doc.rparen,\n      })\n    } else {\n      Doc.concat(list{\n        Doc.lparen,\n        Doc.indent(\n          Doc.concat(list{\n            Doc.softLine,\n            printAttributes(attrs, cmtTbl),\n            if needsParens {\n              addParens(exprDoc)\n            } else {\n              exprDoc\n            },\n          }),\n        ),\n        Doc.softLine,\n        Doc.rparen,\n      })\n    }\n  | PStr(list{{pstr_desc: Pstr_value(_recFlag, _bindings)} as si}) =>\n    addParens(printStructureItem(si, cmtTbl))\n  | PStr(structure) => addParens(printStructure(structure, cmtTbl))\n  | PTyp(typ) =>\n    Doc.concat(list{\n      Doc.lparen,\n      Doc.text(\":\"),\n      Doc.indent(Doc.concat(list{Doc.line, printTypExpr(typ, cmtTbl)})),\n      Doc.softLine,\n      Doc.rparen,\n    })\n  | PPat(pat, optExpr) =>\n    let whenDoc = switch optExpr {\n    | Some(expr) =>\n      Doc.concat(list{Doc.line, Doc.text(\"if \"), printExpressionWithComments(expr, cmtTbl)})\n    | None => Doc.nil\n    }\n\n    Doc.concat(list{\n      Doc.lparen,\n      Doc.indent(\n        Doc.concat(list{Doc.softLine, Doc.text(\"? \"), printPattern(pat, cmtTbl), whenDoc}),\n      ),\n      Doc.softLine,\n      Doc.rparen,\n    })\n  | PSig(signature) =>\n    Doc.concat(list{\n      Doc.lparen,\n      Doc.text(\":\"),\n      Doc.indent(Doc.concat(list{Doc.line, printSignature(signature, cmtTbl)})),\n      Doc.softLine,\n      Doc.rparen,\n    })\n  }\n\nand printAttribute = ((id, payload): Parsetree.attribute, cmtTbl) =>\n  Doc.group(\n    Doc.concat(list{\n      Doc.text(\"@\"),\n      Doc.text(convertBsExternalAttribute(id.txt)),\n      printPayload(payload, cmtTbl),\n    }),\n  )\n\nand printModExpr = (modExpr, cmtTbl) => {\n  let doc = switch modExpr.pmod_desc {\n  | Pmod_ident(longidentLoc) => printLongidentLocation(longidentLoc, cmtTbl)\n  | Pmod_structure(list{}) =>\n    let shouldBreak = modExpr.pmod_loc.loc_start.pos_lnum < modExpr.pmod_loc.loc_end.pos_lnum\n\n    Doc.breakableGroup(\n      ~forceBreak=shouldBreak,\n      Doc.concat(list{\n        Doc.lbrace,\n        Doc.indent(Doc.concat(list{Doc.softLine, printCommentsInside(cmtTbl, modExpr.pmod_loc)})),\n        Doc.softLine,\n        Doc.rbrace,\n      }),\n    )\n  | Pmod_structure(structure) =>\n    Doc.breakableGroup(\n      ~forceBreak=true,\n      Doc.concat(list{\n        Doc.lbrace,\n        Doc.indent(Doc.concat(list{Doc.softLine, printStructure(structure, cmtTbl)})),\n        Doc.softLine,\n        Doc.rbrace,\n      }),\n    )\n  | Pmod_unpack(expr) =>\n    let shouldHug = switch expr.pexp_desc {\n    | Pexp_let(_) => true\n    | Pexp_constraint({pexp_desc: Pexp_let(_)}, {ptyp_desc: Ptyp_package(_packageType)}) => true\n    | _ => false\n    }\n\n    let (expr, moduleConstraint) = switch expr.pexp_desc {\n    | Pexp_constraint(expr, {ptyp_desc: Ptyp_package(packageType), ptyp_loc}) =>\n      let packageDoc = {\n        let doc = printPackageType(~printModuleKeywordAndParens=false, packageType, cmtTbl)\n        printComments(doc, cmtTbl, ptyp_loc)\n      }\n\n      let typeDoc = Doc.group(\n        Doc.concat(list{Doc.text(\":\"), Doc.indent(Doc.concat(list{Doc.line, packageDoc}))}),\n      )\n      (expr, typeDoc)\n    | _ => (expr, Doc.nil)\n    }\n\n    let unpackDoc = Doc.group(\n      Doc.concat(list{printExpressionWithComments(expr, cmtTbl), moduleConstraint}),\n    )\n    Doc.group(\n      Doc.concat(list{\n        Doc.text(\"unpack(\"),\n        if shouldHug {\n          unpackDoc\n        } else {\n          Doc.concat(list{Doc.indent(Doc.concat(list{Doc.softLine, unpackDoc})), Doc.softLine})\n        },\n        Doc.rparen,\n      }),\n    )\n  | Pmod_extension(extension) => printExtension(~atModuleLvl=false, extension, cmtTbl)\n  | Pmod_apply(_) =>\n    let (args, callExpr) = ParsetreeViewer.modExprApply(modExpr)\n    let isUnitSugar = switch args {\n    | list{{pmod_desc: Pmod_structure(list{})}} => true\n    | _ => false\n    }\n\n    let shouldHug = switch args {\n    | list{{pmod_desc: Pmod_structure(_)}} => true\n    | _ => false\n    }\n\n    Doc.group(\n      Doc.concat(list{\n        printModExpr(callExpr, cmtTbl),\n        if isUnitSugar {\n          printModApplyArg(@doesNotRaise List.hd(args), cmtTbl)\n        } else {\n          Doc.concat(list{\n            Doc.lparen,\n            if shouldHug {\n              printModApplyArg(@doesNotRaise List.hd(args), cmtTbl)\n            } else {\n              Doc.indent(\n                Doc.concat(list{\n                  Doc.softLine,\n                  Doc.join(\n                    ~sep=Doc.concat(list{Doc.comma, Doc.line}),\n                    List.map(modArg => printModApplyArg(modArg, cmtTbl), args),\n                  ),\n                }),\n              )\n            },\n            if !shouldHug {\n              Doc.concat(list{Doc.trailingComma, Doc.softLine})\n            } else {\n              Doc.nil\n            },\n            Doc.rparen,\n          })\n        },\n      }),\n    )\n  | Pmod_constraint(modExpr, modType) =>\n    Doc.concat(list{printModExpr(modExpr, cmtTbl), Doc.text(\": \"), printModType(modType, cmtTbl)})\n  | Pmod_functor(_) => printModFunctor(modExpr, cmtTbl)\n  }\n\n  printComments(doc, cmtTbl, modExpr.pmod_loc)\n}\n\nand printModFunctor = (modExpr, cmtTbl) => {\n  let (parameters, returnModExpr) = ParsetreeViewer.modExprFunctor(modExpr)\n  /* let shouldInline = match returnModExpr.pmod_desc with */\n  /* | Pmod_structure _ | Pmod_ident _ -> true */\n  /* | Pmod_constraint ({pmod_desc = Pmod_structure _}, _) -> true */\n  /* | _ -> false */\n  /* in */\n  let (returnConstraint, returnModExpr) = switch returnModExpr.pmod_desc {\n  | Pmod_constraint(modExpr, modType) =>\n    let constraintDoc = {\n      let doc = printModType(modType, cmtTbl)\n      if Parens.modExprFunctorConstraint(modType) {\n        addParens(doc)\n      } else {\n        doc\n      }\n    }\n\n    let modConstraint = Doc.concat(list{Doc.text(\": \"), constraintDoc})\n    (modConstraint, printModExpr(modExpr, cmtTbl))\n  | _ => (Doc.nil, printModExpr(returnModExpr, cmtTbl))\n  }\n\n  let parametersDoc = switch parameters {\n  | list{(attrs, {txt: \"*\"}, None)} =>\n    Doc.group(Doc.concat(list{printAttributes(attrs, cmtTbl), Doc.text(\"()\")}))\n  | list{(list{}, {txt: lbl}, None)} => Doc.text(lbl)\n  | parameters =>\n    Doc.group(\n      Doc.concat(list{\n        Doc.lparen,\n        Doc.indent(\n          Doc.concat(list{\n            Doc.softLine,\n            Doc.join(\n              ~sep=Doc.concat(list{Doc.comma, Doc.line}),\n              List.map(param => printModFunctorParam(param, cmtTbl), parameters),\n            ),\n          }),\n        ),\n        Doc.trailingComma,\n        Doc.softLine,\n        Doc.rparen,\n      }),\n    )\n  }\n\n  Doc.group(Doc.concat(list{parametersDoc, returnConstraint, Doc.text(\" => \"), returnModExpr}))\n}\n\nand printModFunctorParam = ((attrs, lbl, optModType), cmtTbl) => {\n  let cmtLoc = switch optModType {\n  | None => lbl.Asttypes.loc\n  | Some(modType) => {\n      ...lbl.loc,\n      loc_end: modType.Parsetree.pmty_loc.loc_end,\n    }\n  }\n\n  let attrs = printAttributes(attrs, cmtTbl)\n  let lblDoc = {\n    let doc = if lbl.txt == \"*\" {\n      Doc.text(\"()\")\n    } else {\n      Doc.text(lbl.txt)\n    }\n    printComments(doc, cmtTbl, lbl.loc)\n  }\n\n  let doc = Doc.group(\n    Doc.concat(list{\n      attrs,\n      lblDoc,\n      switch optModType {\n      | None => Doc.nil\n      | Some(modType) => Doc.concat(list{Doc.text(\": \"), printModType(modType, cmtTbl)})\n      },\n    }),\n  )\n  printComments(doc, cmtTbl, cmtLoc)\n}\n\nand printModApplyArg = (modExpr, cmtTbl) =>\n  switch modExpr.pmod_desc {\n  | Pmod_structure(list{}) => Doc.text(\"()\")\n  | _ => printModExpr(modExpr, cmtTbl)\n  }\n\nand printExceptionDef = (constr: Parsetree.extension_constructor, cmtTbl) => {\n  let kind = switch constr.pext_kind {\n  | Pext_rebind(longident) =>\n    Doc.indent(\n      Doc.concat(list{Doc.text(\" =\"), Doc.line, printLongidentLocation(longident, cmtTbl)}),\n    )\n  | Pext_decl(Pcstr_tuple(list{}), None) => Doc.nil\n  | Pext_decl(args, gadt) =>\n    let gadtDoc = switch gadt {\n    | Some(typ) => Doc.concat(list{Doc.text(\": \"), printTypExpr(typ, cmtTbl)})\n    | None => Doc.nil\n    }\n\n    Doc.concat(list{printConstructorArguments(~indent=false, args, cmtTbl), gadtDoc})\n  }\n\n  let name = printComments(Doc.text(constr.pext_name.txt), cmtTbl, constr.pext_name.loc)\n\n  let doc = Doc.group(\n    Doc.concat(list{\n      printAttributes(constr.pext_attributes, cmtTbl),\n      Doc.text(\"exception \"),\n      name,\n      kind,\n    }),\n  )\n  printComments(doc, cmtTbl, constr.pext_loc)\n}\n\nand printExtensionConstructor = (constr: Parsetree.extension_constructor, cmtTbl, i) => {\n  let attrs = printAttributes(constr.pext_attributes, cmtTbl)\n  let bar = if i > 0 {\n    Doc.text(\"| \")\n  } else {\n    Doc.ifBreaks(Doc.text(\"| \"), Doc.nil)\n  }\n\n  let kind = switch constr.pext_kind {\n  | Pext_rebind(longident) =>\n    Doc.indent(\n      Doc.concat(list{Doc.text(\" =\"), Doc.line, printLongidentLocation(longident, cmtTbl)}),\n    )\n  | Pext_decl(Pcstr_tuple(list{}), None) => Doc.nil\n  | Pext_decl(args, gadt) =>\n    let gadtDoc = switch gadt {\n    | Some(typ) => Doc.concat(list{Doc.text(\": \"), printTypExpr(typ, cmtTbl)})\n    | None => Doc.nil\n    }\n\n    Doc.concat(list{printConstructorArguments(~indent=false, args, cmtTbl), gadtDoc})\n  }\n\n  let name = printComments(Doc.text(constr.pext_name.txt), cmtTbl, constr.pext_name.loc)\n\n  Doc.concat(list{bar, Doc.group(Doc.concat(list{attrs, name, kind}))})\n}\n\nlet printImplementation = (~width, s: Parsetree.structure, ~comments) => {\n  let cmtTbl = CommentTable.make()\n  CommentTable.walkStructure(s, cmtTbl, comments)\n  /* CommentTable.log cmtTbl; */\n  let doc = printStructure(s, cmtTbl)\n  /* Doc.debug doc; */\n  Doc.toString(~width, doc) ++ \"\\n\"\n}\n\nlet printInterface = (~width, s: Parsetree.signature, ~comments) => {\n  let cmtTbl = CommentTable.make()\n  CommentTable.walkSignature(s, cmtTbl, comments)\n  Doc.toString(~width, printSignature(s, cmtTbl)) ++ \"\\n\"\n}\n\n"
  },
  {
    "path": "analysis/examples/larger-project/src/res_reporting.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nvar Token;\n\nvar Grammar;\n\nexport {\n  Token ,\n  Grammar ,\n  \n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/res_reporting.res",
    "content": "module Token = Res_token\nmodule Grammar = Res_grammar\n\ntype problem =\n  | @live Unexpected(Token.t)\n  | @live Expected({token: Token.t, pos: Lexing.position, context: option<Grammar.t>})\n  | @live Message(string)\n  | @live Uident\n  | @live Lident\n  | @live Unbalanced(Token.t)\n\ntype parseError = (Lexing.position, problem)\n\n"
  },
  {
    "path": "analysis/examples/larger-project/src/res_scanner.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as P from \"./P.js\";\nimport * as Caml from \"rescript/lib/es6/caml.js\";\nimport * as Curry from \"rescript/lib/es6/curry.js\";\nimport * as $$Buffer from \"rescript/lib/es6/buffer.js\";\nimport * as $$String from \"rescript/lib/es6/string.js\";\nimport * as Res_utf8 from \"./res_utf8.js\";\nimport * as Res_token from \"./res_token.js\";\nimport * as Pervasives from \"rescript/lib/es6/pervasives.js\";\nimport * as Res_comment from \"./res_comment.js\";\nimport * as Res_diagnostics from \"./res_diagnostics.js\";\n\nfunction setDiamondMode(scanner) {\n  scanner.mode = {\n    hd: /* Diamond */1,\n    tl: scanner.mode\n  };\n  \n}\n\nfunction setJsxMode(scanner) {\n  scanner.mode = {\n    hd: /* Jsx */0,\n    tl: scanner.mode\n  };\n  \n}\n\nfunction popMode(scanner, mode) {\n  var match = scanner.mode;\n  if (match && match.hd === mode) {\n    scanner.mode = match.tl;\n    return ;\n  }\n  \n}\n\nfunction inDiamondMode(scanner) {\n  var match = scanner.mode;\n  if (match && match.hd) {\n    return true;\n  } else {\n    return false;\n  }\n}\n\nfunction inJsxMode(scanner) {\n  var match = scanner.mode;\n  if (match && !match.hd) {\n    return true;\n  } else {\n    return false;\n  }\n}\n\nfunction position(scanner) {\n  return {\n          pos_fname: scanner.filename,\n          pos_lnum: scanner.lnum,\n          pos_bol: scanner.lineOffset,\n          pos_cnum: scanner.offset\n        };\n}\n\nfunction _printDebug(startPos, endPos, scanner, token) {\n  Pervasives.print_string(scanner.src);\n  Pervasives.print_string($$String.make(startPos.pos_cnum, /* ' ' */32));\n  P.print_char(/* '^' */94);\n  var n = endPos.pos_cnum - startPos.pos_cnum | 0;\n  if (n !== 0) {\n    if (n !== 1) {\n      Pervasives.print_string($$String.make(n - 2 | 0, /* '-' */45));\n      P.print_char(/* '^' */94);\n    }\n    \n  } else if (token !== /* Eof */26) {\n    throw {\n          RE_EXN_ID: \"Assert_failure\",\n          _1: [\n            \"res_scanner.res\",\n            89,\n            6\n          ],\n          Error: new Error()\n        };\n  }\n  P.print_char(/* ' ' */32);\n  Pervasives.print_string(Res_token.toString(token));\n  P.print_char(/* ' ' */32);\n  Pervasives.print_int(startPos.pos_cnum);\n  P.print_char(/* '-' */45);\n  Pervasives.print_int(endPos.pos_cnum);\n  console.log(\"\");\n  \n}\n\nfunction next(scanner) {\n  var nextOffset = scanner.offset + 1 | 0;\n  var match = scanner.ch;\n  if (match !== 10) {\n    \n  } else {\n    scanner.lineOffset = nextOffset;\n    scanner.lnum = scanner.lnum + 1 | 0;\n  }\n  if (nextOffset < scanner.src.length) {\n    scanner.offset = nextOffset;\n    scanner.ch = scanner.src.charCodeAt(scanner.offset);\n  } else {\n    scanner.offset = scanner.src.length;\n    scanner.ch = -1;\n  }\n  \n}\n\nfunction next2(scanner) {\n  next(scanner);\n  return next(scanner);\n}\n\nfunction next3(scanner) {\n  next(scanner);\n  next(scanner);\n  return next(scanner);\n}\n\nfunction peek(scanner) {\n  if ((scanner.offset + 1 | 0) < scanner.src.length) {\n    return scanner.src.charCodeAt(scanner.offset + 1 | 0);\n  } else {\n    return -1;\n  }\n}\n\nfunction peek2(scanner) {\n  if ((scanner.offset + 2 | 0) < scanner.src.length) {\n    return scanner.src.charCodeAt(scanner.offset + 2 | 0);\n  } else {\n    return -1;\n  }\n}\n\nfunction make(filename, src) {\n  return {\n          filename: filename,\n          src: src,\n          err: (function (param, param$1, param$2) {\n              \n            }),\n          ch: src === \"\" ? -1 : src.charCodeAt(0),\n          offset: 0,\n          lineOffset: 0,\n          lnum: 1,\n          mode: /* [] */0\n        };\n}\n\nfunction isWhitespace(ch) {\n  if (ch > 13 || ch < 9) {\n    return ch === 32;\n  } else {\n    return !(ch === 12 || ch === 11);\n  }\n}\n\nfunction skipWhitespace(scanner) {\n  while(true) {\n    if (!isWhitespace(scanner.ch)) {\n      return ;\n    }\n    next(scanner);\n    continue ;\n  };\n}\n\nfunction digitValue(ch) {\n  if (ch >= 65) {\n    if (ch >= 97) {\n      if (ch >= 103) {\n        return 16;\n      } else {\n        return (ch - /* 'a' */97 | 0) + 10 | 0;\n      }\n    } else if (ch >= 71) {\n      return 16;\n    } else {\n      return ((ch + 32 | 0) - /* 'a' */97 | 0) + 10 | 0;\n    }\n  } else if (ch > 57 || ch < 48) {\n    return 16;\n  } else {\n    return ch - 48 | 0;\n  }\n}\n\nfunction skipLowerCaseChars(scanner) {\n  while(true) {\n    var match = scanner.ch;\n    if (match > 122 || match < 97) {\n      return ;\n    }\n    next(scanner);\n    continue ;\n  };\n}\n\nfunction scanIdentifier(scanner) {\n  var startOff = scanner.offset;\n  var skipGoodChars = function (scanner) {\n    while(true) {\n      var match = scanner.ch;\n      if (match >= 65) {\n        if (match > 96 || match < 91) {\n          if (match >= 123) {\n            return ;\n          }\n          next(scanner);\n          continue ;\n        }\n        if (match !== 95) {\n          return ;\n        }\n        next(scanner);\n        continue ;\n      }\n      if (match >= 48) {\n        if (match >= 58) {\n          return ;\n        }\n        next(scanner);\n        continue ;\n      }\n      if (match !== 39) {\n        return ;\n      }\n      next(scanner);\n      continue ;\n    };\n  };\n  skipGoodChars(scanner);\n  var str = $$String.sub(scanner.src, startOff, scanner.offset - startOff | 0);\n  if (/* '{' */123 === scanner.ch && str === \"list\") {\n    next(scanner);\n    return Res_token.lookupKeyword(\"list{\");\n  } else {\n    return Res_token.lookupKeyword(str);\n  }\n}\n\nfunction scanDigits(scanner, base) {\n  if (base <= 10) {\n    while(true) {\n      var match = scanner.ch;\n      if (match >= 58) {\n        if (match !== 95) {\n          return ;\n        }\n        next(scanner);\n        continue ;\n      }\n      if (match < 48) {\n        return ;\n      }\n      next(scanner);\n      continue ;\n    };\n  }\n  while(true) {\n    var match$1 = scanner.ch;\n    if (match$1 >= 71) {\n      if (match$1 >= 97) {\n        if (match$1 >= 103) {\n          return ;\n        }\n        next(scanner);\n        continue ;\n      }\n      if (match$1 !== 95) {\n        return ;\n      }\n      next(scanner);\n      continue ;\n    }\n    if (match$1 >= 58) {\n      if (match$1 < 65) {\n        return ;\n      }\n      next(scanner);\n      continue ;\n    }\n    if (match$1 < 48) {\n      return ;\n    }\n    next(scanner);\n    continue ;\n  };\n}\n\nfunction scanNumber(scanner) {\n  var startOff = scanner.offset;\n  var match = scanner.ch;\n  var base;\n  if (match !== 48) {\n    base = 10;\n  } else {\n    var match$1 = peek(scanner);\n    if (match$1 >= 89) {\n      if (match$1 !== 98) {\n        if (match$1 !== 111) {\n          if (match$1 !== 120) {\n            next(scanner);\n            base = 8;\n          } else {\n            next(scanner);\n            next(scanner);\n            base = 16;\n          }\n        } else {\n          next(scanner);\n          next(scanner);\n          base = 8;\n        }\n      } else {\n        next(scanner);\n        next(scanner);\n        base = 2;\n      }\n    } else if (match$1 !== 66) {\n      if (match$1 !== 79) {\n        if (match$1 >= 88) {\n          next(scanner);\n          next(scanner);\n          base = 16;\n        } else {\n          next(scanner);\n          base = 8;\n        }\n      } else {\n        next(scanner);\n        next(scanner);\n        base = 8;\n      }\n    } else {\n      next(scanner);\n      next(scanner);\n      base = 2;\n    }\n  }\n  scanDigits(scanner, base);\n  var isFloat = /* '.' */46 === scanner.ch ? (next(scanner), scanDigits(scanner, base), true) : false;\n  var match$2 = scanner.ch;\n  var isFloat$1;\n  var exit = 0;\n  if (match$2 >= 81) {\n    if (match$2 !== 101 && match$2 !== 112) {\n      isFloat$1 = isFloat;\n    } else {\n      exit = 1;\n    }\n  } else if (match$2 !== 69 && match$2 < 80) {\n    isFloat$1 = isFloat;\n  } else {\n    exit = 1;\n  }\n  if (exit === 1) {\n    var match$3 = peek(scanner);\n    if (match$3 !== 43 && match$3 !== 45) {\n      next(scanner);\n    } else {\n      next(scanner);\n      next(scanner);\n    }\n    scanDigits(scanner, base);\n    isFloat$1 = true;\n  }\n  var literal = $$String.sub(scanner.src, startOff, scanner.offset - startOff | 0);\n  var ch = scanner.ch;\n  var suffix;\n  if (ch > 122 || ch < 103) {\n    if (ch > 90 || ch < 71) {\n      suffix = undefined;\n    } else {\n      next(scanner);\n      suffix = ch;\n    }\n  } else if (ch !== 110) {\n    next(scanner);\n    suffix = ch;\n  } else {\n    var msg = \"Unsupported number type (nativeint). Did you mean `\" + (literal + \"`?\");\n    var pos = position(scanner);\n    Curry._3(scanner.err, pos, pos, Res_diagnostics.message(msg));\n    next(scanner);\n    suffix = /* 'n' */110;\n  }\n  if (isFloat$1) {\n    return {\n            TAG: /* Float */2,\n            f: literal,\n            suffix: suffix\n          };\n  } else {\n    return {\n            TAG: /* Int */1,\n            i: literal,\n            suffix: suffix\n          };\n  }\n}\n\nfunction scanExoticIdentifier(scanner) {\n  next(scanner);\n  var buffer = $$Buffer.create(20);\n  var startPos = position(scanner);\n  var scan = function (_param) {\n    while(true) {\n      var ch = scanner.ch;\n      if (ch > 13 || ch < 10) {\n        if (ch === 34) {\n          return next(scanner);\n        }\n        \n      } else if (!(ch === 12 || ch === 11)) {\n        var endPos = position(scanner);\n        Curry._3(scanner.err, startPos, endPos, Res_diagnostics.message(\"A quoted identifier can't contain line breaks.\"));\n        return next(scanner);\n      }\n      if (ch === -1) {\n        var endPos$1 = position(scanner);\n        return Curry._3(scanner.err, startPos, endPos$1, Res_diagnostics.message(\"Did you forget a \\\" here?\"));\n      }\n      $$Buffer.add_char(buffer, ch);\n      next(scanner);\n      _param = undefined;\n      continue ;\n    };\n  };\n  scan(undefined);\n  return {\n          TAG: /* Lident */4,\n          _0: $$Buffer.contents(buffer)\n        };\n}\n\nfunction scanStringEscapeSequence(startPos, scanner) {\n  var scan = function (n, base, max) {\n    var loop = function (_n, _x) {\n      while(true) {\n        var x = _x;\n        var n = _n;\n        if (n === 0) {\n          return x;\n        }\n        var d = digitValue(scanner.ch);\n        if (d >= base) {\n          var pos = position(scanner);\n          var msg = scanner.ch === -1 ? \"unclosed escape sequence\" : \"unknown escape sequence\";\n          Curry._3(scanner.err, startPos, pos, Res_diagnostics.message(msg));\n          return -1;\n        }\n        next(scanner);\n        _x = Math.imul(x, base) + d | 0;\n        _n = n - 1 | 0;\n        continue ;\n      };\n    };\n    var x = loop(n, 0);\n    if (!(x > max || 55296 <= x && x < 57344)) {\n      return ;\n    }\n    var pos = position(scanner);\n    return Curry._3(scanner.err, startPos, pos, Res_diagnostics.message(\"escape sequence is invalid unicode code point\"));\n  };\n  var match = scanner.ch;\n  if (match >= 48) {\n    if (match < 92) {\n      if (match >= 58) {\n        return ;\n      } else {\n        return scan(3, 10, 255);\n      }\n    }\n    if (match >= 121) {\n      return ;\n    }\n    switch (match) {\n      case 111 :\n          next(scanner);\n          return scan(3, 8, 255);\n      case 92 :\n      case 98 :\n      case 110 :\n      case 114 :\n      case 116 :\n          return next(scanner);\n      case 117 :\n          next(scanner);\n          var match$1 = scanner.ch;\n          if (match$1 !== 123) {\n            return scan(4, 16, Res_utf8.max);\n          }\n          next(scanner);\n          var x = 0;\n          while((function () {\n                  var match = scanner.ch;\n                  if (match > 70 || match < 48) {\n                    return !(match > 102 || match < 97);\n                  } else {\n                    return match > 64 || match < 58;\n                  }\n                })()) {\n            x = (x << 4) + digitValue(scanner.ch) | 0;\n            next(scanner);\n          };\n          var match$2 = scanner.ch;\n          if (match$2 !== 125) {\n            return ;\n          } else {\n            return next(scanner);\n          }\n      case 93 :\n      case 94 :\n      case 95 :\n      case 96 :\n      case 97 :\n      case 99 :\n      case 100 :\n      case 101 :\n      case 102 :\n      case 103 :\n      case 104 :\n      case 105 :\n      case 106 :\n      case 107 :\n      case 108 :\n      case 109 :\n      case 112 :\n      case 113 :\n      case 115 :\n      case 118 :\n      case 119 :\n          return ;\n      case 120 :\n          next(scanner);\n          return scan(2, 16, 255);\n      \n    }\n  } else {\n    switch (match) {\n      case 33 :\n      case 35 :\n      case 36 :\n      case 37 :\n      case 38 :\n          return ;\n      case 32 :\n      case 34 :\n      case 39 :\n          return next(scanner);\n      default:\n        return ;\n    }\n  }\n}\n\nfunction scanString(scanner) {\n  var startPosWithQuote = position(scanner);\n  next(scanner);\n  var firstCharOffset = scanner.offset;\n  var scan = function (_param) {\n    while(true) {\n      var ch = scanner.ch;\n      if (ch !== 34) {\n        if (ch !== 92) {\n          if (ch === -1) {\n            var endPos = position(scanner);\n            Curry._3(scanner.err, startPosWithQuote, endPos, Res_diagnostics.unclosedString);\n            return $$String.sub(scanner.src, firstCharOffset, scanner.offset - firstCharOffset | 0);\n          }\n          next(scanner);\n          _param = undefined;\n          continue ;\n        }\n        var startPos = position(scanner);\n        next(scanner);\n        scanStringEscapeSequence(startPos, scanner);\n        _param = undefined;\n        continue ;\n      }\n      var lastCharOffset = scanner.offset;\n      next(scanner);\n      return $$String.sub(scanner.src, firstCharOffset, lastCharOffset - firstCharOffset | 0);\n    };\n  };\n  return {\n          TAG: /* String */3,\n          _0: scan(undefined)\n        };\n}\n\nfunction scanEscape(scanner) {\n  var offset = scanner.offset - 1 | 0;\n  var convertNumber = function (scanner, n, base) {\n    var x = 0;\n    for(var _for = n; _for >= 1; --_for){\n      var d = digitValue(scanner.ch);\n      x = Math.imul(x, base) + d | 0;\n      next(scanner);\n    }\n    var c = x;\n    if (Res_utf8.isValidCodePoint(c)) {\n      return c;\n    } else {\n      return Res_utf8.repl;\n    }\n  };\n  var ch = scanner.ch;\n  var codepoint;\n  if (ch >= 58) {\n    switch (ch) {\n      case 98 :\n          next(scanner);\n          codepoint = /* '\\b' */8;\n          break;\n      case 110 :\n          next(scanner);\n          codepoint = /* '\\n' */10;\n          break;\n      case 111 :\n          next(scanner);\n          codepoint = convertNumber(scanner, 3, 8);\n          break;\n      case 114 :\n          next(scanner);\n          codepoint = /* '\\r' */13;\n          break;\n      case 116 :\n          next(scanner);\n          codepoint = /* '\\t' */9;\n          break;\n      case 117 :\n          next(scanner);\n          var match = scanner.ch;\n          if (match !== 123) {\n            codepoint = convertNumber(scanner, 4, 16);\n          } else {\n            next(scanner);\n            var x = 0;\n            while((function () {\n                    var match = scanner.ch;\n                    if (match > 70 || match < 48) {\n                      return !(match > 102 || match < 97);\n                    } else {\n                      return match > 64 || match < 58;\n                    }\n                  })()) {\n              x = (x << 4) + digitValue(scanner.ch) | 0;\n              next(scanner);\n            };\n            var match$1 = scanner.ch;\n            if (match$1 !== 125) {\n              \n            } else {\n              next(scanner);\n            }\n            var c = x;\n            codepoint = Res_utf8.isValidCodePoint(c) ? c : Res_utf8.repl;\n          }\n          break;\n      case 99 :\n      case 100 :\n      case 101 :\n      case 102 :\n      case 103 :\n      case 104 :\n      case 105 :\n      case 106 :\n      case 107 :\n      case 108 :\n      case 109 :\n      case 112 :\n      case 113 :\n      case 115 :\n      case 118 :\n      case 119 :\n          next(scanner);\n          codepoint = ch;\n          break;\n      case 120 :\n          next(scanner);\n          codepoint = convertNumber(scanner, 2, 16);\n          break;\n      default:\n        next(scanner);\n        codepoint = ch;\n    }\n  } else if (ch >= 48) {\n    codepoint = convertNumber(scanner, 3, 10);\n  } else {\n    next(scanner);\n    codepoint = ch;\n  }\n  var contents = $$String.sub(scanner.src, offset, scanner.offset - offset | 0);\n  next(scanner);\n  return {\n          TAG: /* Codepoint */0,\n          c: codepoint,\n          original: contents\n        };\n}\n\nfunction scanSingleLineComment(scanner) {\n  var startOff = scanner.offset;\n  var startPos = position(scanner);\n  var skip = function (scanner) {\n    while(true) {\n      var ch = scanner.ch;\n      if (ch === 10) {\n        return ;\n      }\n      if (ch === 13) {\n        return ;\n      }\n      if (ch === -1) {\n        return ;\n      }\n      next(scanner);\n      continue ;\n    };\n  };\n  skip(scanner);\n  var endPos = position(scanner);\n  return {\n          TAG: /* Comment */6,\n          _0: Res_comment.makeSingleLineComment({\n                loc_start: startPos,\n                loc_end: endPos,\n                loc_ghost: false\n              }, $$String.sub(scanner.src, startOff, scanner.offset - startOff | 0))\n        };\n}\n\nfunction scanMultiLineComment(scanner) {\n  var contentStartOff = scanner.offset + 2 | 0;\n  var startPos = position(scanner);\n  var scan = function (_depth) {\n    while(true) {\n      var depth = _depth;\n      var match = scanner.ch;\n      var match$1 = peek(scanner);\n      if (match !== 42) {\n        if (match === 47 && match$1 === 42) {\n          next(scanner);\n          next(scanner);\n          _depth = depth + 1 | 0;\n          continue ;\n        }\n        \n      } else if (match$1 === 47) {\n        next(scanner);\n        next(scanner);\n        if (depth <= 1) {\n          return ;\n        }\n        _depth = depth - 1 | 0;\n        continue ;\n      }\n      if (match === -1) {\n        var endPos = position(scanner);\n        return Curry._3(scanner.err, startPos, endPos, Res_diagnostics.unclosedComment);\n      }\n      next(scanner);\n      continue ;\n    };\n  };\n  scan(0);\n  var length = (scanner.offset - 2 | 0) - contentStartOff | 0;\n  var length$1 = length < 0 ? 0 : length;\n  return {\n          TAG: /* Comment */6,\n          _0: Res_comment.makeMultiLineComment({\n                loc_start: startPos,\n                loc_end: position(scanner),\n                loc_ghost: false\n              }, $$String.sub(scanner.src, contentStartOff, length$1))\n        };\n}\n\nfunction scanTemplateLiteralToken(scanner) {\n  var startOff = scanner.offset;\n  if (scanner.ch === /* '}' */125) {\n    next(scanner);\n  }\n  var startPos = position(scanner);\n  var scan = function (_param) {\n    while(true) {\n      var ch = scanner.ch;\n      if (ch !== 36) {\n        if (ch !== 92) {\n          if (ch !== 96) {\n            if (ch === -1) {\n              var endPos = position(scanner);\n              Curry._3(scanner.err, startPos, endPos, Res_diagnostics.unclosedTemplate);\n              return {\n                      TAG: /* TemplateTail */7,\n                      _0: $$String.sub(scanner.src, startOff, Caml.caml_int_max((scanner.offset - 1 | 0) - startOff | 0, 0))\n                    };\n            }\n            next(scanner);\n            _param = undefined;\n            continue ;\n          }\n          next(scanner);\n          return {\n                  TAG: /* TemplateTail */7,\n                  _0: $$String.sub(scanner.src, startOff, (scanner.offset - 1 | 0) - startOff | 0)\n                };\n        }\n        var match = peek(scanner);\n        if (match >= 36) {\n          if (match > 95 || match < 37) {\n            if (match >= 97) {\n              next(scanner);\n              _param = undefined;\n              continue ;\n            }\n            next(scanner);\n            next(scanner);\n            _param = undefined;\n            continue ;\n          }\n          if (match !== 92) {\n            next(scanner);\n            _param = undefined;\n            continue ;\n          }\n          next(scanner);\n          next(scanner);\n          _param = undefined;\n          continue ;\n        }\n        if (match !== 10) {\n          if (match !== 13) {\n            next(scanner);\n            _param = undefined;\n            continue ;\n          }\n          next(scanner);\n          next(scanner);\n          _param = undefined;\n          continue ;\n        }\n        next(scanner);\n        next(scanner);\n        _param = undefined;\n        continue ;\n      }\n      var match$1 = peek(scanner);\n      if (match$1 !== 123) {\n        next(scanner);\n        _param = undefined;\n        continue ;\n      }\n      next(scanner);\n      next(scanner);\n      var contents = $$String.sub(scanner.src, startOff, (scanner.offset - 2 | 0) - startOff | 0);\n      return {\n              TAG: /* TemplatePart */8,\n              _0: contents\n            };\n    };\n  };\n  var token = scan(undefined);\n  var endPos = position(scanner);\n  return [\n          startPos,\n          endPos,\n          token\n        ];\n}\n\nfunction scan(scanner) {\n  skipWhitespace(scanner);\n  var startPos = position(scanner);\n  var ch = scanner.ch;\n  var token;\n  var exit = 0;\n  switch (ch) {\n    case 33 :\n        var match = peek(scanner);\n        var match$1 = peek2(scanner);\n        if (match !== 61) {\n          next(scanner);\n          token = /* Bang */7;\n        } else if (match$1 !== 61) {\n          next(scanner);\n          next(scanner);\n          token = /* BangEqual */70;\n        } else {\n          next3(scanner);\n          token = /* BangEqualEqual */71;\n        }\n        break;\n    case 34 :\n        token = scanString(scanner);\n        break;\n    case 35 :\n        var match$2 = peek(scanner);\n        if (match$2 !== 61) {\n          next(scanner);\n          token = /* Hash */44;\n        } else {\n          next(scanner);\n          next(scanner);\n          token = /* HashEqual */45;\n        }\n        break;\n    case 37 :\n        var match$3 = peek(scanner);\n        if (match$3 !== 37) {\n          next(scanner);\n          token = /* Percent */77;\n        } else {\n          next(scanner);\n          next(scanner);\n          token = /* PercentPercent */78;\n        }\n        break;\n    case 38 :\n        var match$4 = peek(scanner);\n        if (match$4 !== 38) {\n          next(scanner);\n          token = /* Band */69;\n        } else {\n          next(scanner);\n          next(scanner);\n          token = /* Land */67;\n        }\n        break;\n    case 39 :\n        var match$5 = peek(scanner);\n        var match$6 = peek2(scanner);\n        if (match$5 !== 92) {\n          if (match$6 !== 39) {\n            next(scanner);\n            var offset = scanner.offset;\n            var match$7 = Res_utf8.decodeCodePoint(scanner.offset, scanner.src, scanner.src.length);\n            var length = match$7[1];\n            for(var _for = 0; _for < length; ++_for){\n              next(scanner);\n            }\n            if (scanner.ch === /* '\\'' */39) {\n              var contents = $$String.sub(scanner.src, offset, length);\n              next(scanner);\n              token = {\n                TAG: /* Codepoint */0,\n                c: match$7[0],\n                original: contents\n              };\n            } else {\n              scanner.ch = match$5;\n              scanner.offset = offset;\n              token = /* SingleQuote */13;\n            }\n          } else {\n            var offset$1 = scanner.offset + 1 | 0;\n            next3(scanner);\n            token = {\n              TAG: /* Codepoint */0,\n              c: match$5,\n              original: $$String.sub(scanner.src, offset$1, 1)\n            };\n          }\n        } else if (match$6 !== 34) {\n          next(scanner);\n          next(scanner);\n          token = scanEscape(scanner);\n        } else {\n          next(scanner);\n          token = /* SingleQuote */13;\n        }\n        break;\n    case 40 :\n        next(scanner);\n        token = /* Lparen */18;\n        break;\n    case 41 :\n        next(scanner);\n        token = /* Rparen */19;\n        break;\n    case 42 :\n        var match$8 = peek(scanner);\n        if (match$8 !== 42) {\n          if (match$8 !== 46) {\n            next(scanner);\n            token = /* Asterisk */31;\n          } else {\n            next(scanner);\n            next(scanner);\n            token = /* AsteriskDot */32;\n          }\n        } else {\n          next(scanner);\n          next(scanner);\n          token = /* Exponentiation */33;\n        }\n        break;\n    case 43 :\n        var match$9 = peek(scanner);\n        if (match$9 !== 43) {\n          if (match$9 !== 46) {\n            if (match$9 !== 61) {\n              next(scanner);\n              token = /* Plus */36;\n            } else {\n              next(scanner);\n              next(scanner);\n              token = /* PlusEqual */39;\n            }\n          } else {\n            next(scanner);\n            next(scanner);\n            token = /* PlusDot */37;\n          }\n        } else {\n          next(scanner);\n          next(scanner);\n          token = /* PlusPlus */38;\n        }\n        break;\n    case 44 :\n        next(scanner);\n        token = /* Comma */25;\n        break;\n    case 45 :\n        var match$10 = peek(scanner);\n        if (match$10 !== 46) {\n          if (match$10 !== 62) {\n            next(scanner);\n            token = /* Minus */34;\n          } else {\n            next(scanner);\n            next(scanner);\n            token = /* MinusGreater */58;\n          }\n        } else {\n          next(scanner);\n          next(scanner);\n          token = /* MinusDot */35;\n        }\n        break;\n    case 46 :\n        var match$11 = peek(scanner);\n        var match$12 = peek2(scanner);\n        if (match$11 !== 46) {\n          next(scanner);\n          token = /* Dot */4;\n        } else if (match$12 !== 46) {\n          next(scanner);\n          next(scanner);\n          token = /* DotDot */5;\n        } else {\n          next3(scanner);\n          token = /* DotDotDot */6;\n        }\n        break;\n    case 47 :\n        var match$13 = peek(scanner);\n        switch (match$13) {\n          case 42 :\n              token = scanMultiLineComment(scanner);\n              break;\n          case 43 :\n          case 44 :\n          case 45 :\n              next(scanner);\n              token = /* Forwardslash */29;\n              break;\n          case 46 :\n              next(scanner);\n              next(scanner);\n              token = /* ForwardslashDot */30;\n              break;\n          case 47 :\n              next(scanner);\n              next(scanner);\n              token = scanSingleLineComment(scanner);\n              break;\n          default:\n            next(scanner);\n            token = /* Forwardslash */29;\n        }\n        break;\n    case 48 :\n    case 49 :\n    case 50 :\n    case 51 :\n    case 52 :\n    case 53 :\n    case 54 :\n    case 55 :\n    case 56 :\n    case 57 :\n        token = scanNumber(scanner);\n        break;\n    case 58 :\n        var match$14 = peek(scanner);\n        if (match$14 !== 61) {\n          if (match$14 !== 62) {\n            next(scanner);\n            token = /* Colon */24;\n          } else {\n            next(scanner);\n            next(scanner);\n            token = /* ColonGreaterThan */40;\n          }\n        } else {\n          next(scanner);\n          next(scanner);\n          token = /* ColonEqual */74;\n        }\n        break;\n    case 59 :\n        next(scanner);\n        token = /* Semicolon */8;\n        break;\n    case 60 :\n        if (inJsxMode(scanner)) {\n          next(scanner);\n          skipWhitespace(scanner);\n          var match$15 = scanner.ch;\n          if (match$15 !== 47) {\n            if (match$15 !== 61) {\n              token = /* LessThan */42;\n            } else {\n              next(scanner);\n              token = /* LessEqual */72;\n            }\n          } else {\n            next(scanner);\n            token = /* LessThanSlash */43;\n          }\n        } else {\n          var match$16 = peek(scanner);\n          if (match$16 !== 61) {\n            next(scanner);\n            token = /* LessThan */42;\n          } else {\n            next(scanner);\n            next(scanner);\n            token = /* LessEqual */72;\n          }\n        }\n        break;\n    case 61 :\n        var match$17 = peek(scanner);\n        var match$18 = peek2(scanner);\n        if (match$17 !== 61) {\n          if (match$17 !== 62) {\n            next(scanner);\n            token = /* Equal */14;\n          } else {\n            next(scanner);\n            next(scanner);\n            token = /* EqualGreater */57;\n          }\n        } else if (match$18 !== 61) {\n          next(scanner);\n          next(scanner);\n          token = /* EqualEqual */15;\n        } else {\n          next3(scanner);\n          token = /* EqualEqualEqual */16;\n        }\n        break;\n    case 62 :\n        var match$19 = peek(scanner);\n        if (match$19 !== 61 || inDiamondMode(scanner)) {\n          next(scanner);\n          token = /* GreaterThan */41;\n        } else {\n          next(scanner);\n          next(scanner);\n          token = /* GreaterEqual */73;\n        }\n        break;\n    case 63 :\n        next(scanner);\n        token = /* Question */49;\n        break;\n    case 64 :\n        var match$20 = peek(scanner);\n        if (match$20 !== 64) {\n          next(scanner);\n          token = /* At */75;\n        } else {\n          next(scanner);\n          next(scanner);\n          token = /* AtAt */76;\n        }\n        break;\n    case 91 :\n        next(scanner);\n        token = /* Lbracket */20;\n        break;\n    case 92 :\n        next(scanner);\n        token = scanExoticIdentifier(scanner);\n        break;\n    case 93 :\n        next(scanner);\n        token = /* Rbracket */21;\n        break;\n    case 36 :\n    case 94 :\n        exit = 1;\n        break;\n    case 95 :\n        var match$21 = peek(scanner);\n        if (match$21 >= 91) {\n          if (match$21 >= 97) {\n            if (match$21 >= 123) {\n              next(scanner);\n              token = /* Underscore */12;\n            } else {\n              token = scanIdentifier(scanner);\n            }\n          } else if (match$21 !== 95) {\n            next(scanner);\n            token = /* Underscore */12;\n          } else {\n            token = scanIdentifier(scanner);\n          }\n        } else if (match$21 >= 58) {\n          if (match$21 >= 65) {\n            token = scanIdentifier(scanner);\n          } else {\n            next(scanner);\n            token = /* Underscore */12;\n          }\n        } else if (match$21 >= 48) {\n          token = scanIdentifier(scanner);\n        } else {\n          next(scanner);\n          token = /* Underscore */12;\n        }\n        break;\n    case 96 :\n        next(scanner);\n        token = /* Backtick */80;\n        break;\n    case 65 :\n    case 66 :\n    case 67 :\n    case 68 :\n    case 69 :\n    case 70 :\n    case 71 :\n    case 72 :\n    case 73 :\n    case 74 :\n    case 75 :\n    case 76 :\n    case 77 :\n    case 78 :\n    case 79 :\n    case 80 :\n    case 81 :\n    case 82 :\n    case 83 :\n    case 84 :\n    case 85 :\n    case 86 :\n    case 87 :\n    case 88 :\n    case 89 :\n    case 90 :\n    case 97 :\n    case 98 :\n    case 99 :\n    case 100 :\n    case 101 :\n    case 102 :\n    case 103 :\n    case 104 :\n    case 105 :\n    case 106 :\n    case 107 :\n    case 108 :\n    case 109 :\n    case 110 :\n    case 111 :\n    case 112 :\n    case 113 :\n    case 114 :\n    case 115 :\n    case 116 :\n    case 117 :\n    case 118 :\n    case 119 :\n    case 120 :\n    case 121 :\n    case 122 :\n        token = scanIdentifier(scanner);\n        break;\n    case 123 :\n        next(scanner);\n        token = /* Lbrace */22;\n        break;\n    case 124 :\n        var match$22 = peek(scanner);\n        if (match$22 !== 62) {\n          if (match$22 !== 124) {\n            next(scanner);\n            token = /* Bar */17;\n          } else {\n            next(scanner);\n            next(scanner);\n            token = /* Lor */68;\n          }\n        } else {\n          next(scanner);\n          next(scanner);\n          token = /* BarGreater */81;\n        }\n        break;\n    case 125 :\n        next(scanner);\n        token = /* Rbrace */23;\n        break;\n    case 126 :\n        next(scanner);\n        token = /* Tilde */48;\n        break;\n    default:\n      exit = 1;\n  }\n  if (exit === 1) {\n    next(scanner);\n    if (ch === -1) {\n      token = /* Eof */26;\n    } else {\n      var endPos = position(scanner);\n      Curry._3(scanner.err, startPos, endPos, Res_diagnostics.unknownUchar(ch));\n      token = scan(scanner)[2];\n    }\n  }\n  var endPos$1 = position(scanner);\n  return [\n          startPos,\n          endPos$1,\n          token\n        ];\n}\n\nfunction reconsiderLessThan(scanner) {\n  skipWhitespace(scanner);\n  if (scanner.ch === /* '/' */47) {\n    next(scanner);\n    return /* LessThanSlash */43;\n  } else {\n    return /* LessThan */42;\n  }\n}\n\nfunction isBinaryOp(src, startCnum, endCnum) {\n  if (startCnum === 0) {\n    return false;\n  }\n  if (endCnum < 0) {\n    throw {\n          RE_EXN_ID: \"Assert_failure\",\n          _1: [\n            \"res_scanner.res\",\n            989,\n            4\n          ],\n          Error: new Error()\n        };\n  }\n  if (!(startCnum > 0 && startCnum < src.length)) {\n    throw {\n          RE_EXN_ID: \"Assert_failure\",\n          _1: [\n            \"res_scanner.res\",\n            990,\n            4\n          ],\n          Error: new Error()\n        };\n  }\n  var leftOk = isWhitespace(src.charCodeAt(startCnum - 1 | 0));\n  var rightOk = endCnum >= src.length || isWhitespace(src.charCodeAt(endCnum));\n  if (leftOk) {\n    return rightOk;\n  } else {\n    return false;\n  }\n}\n\nfunction tryAdvanceQuotedString(scanner) {\n  var scanContents = function (tag) {\n    while(true) {\n      var ch = scanner.ch;\n      if (ch !== 124) {\n        if (ch === -1) {\n          return ;\n        }\n        next(scanner);\n        continue ;\n      }\n      next(scanner);\n      var match = scanner.ch;\n      if (match >= 123) {\n        if (match === 125) {\n          return next(scanner);\n        }\n        continue ;\n      }\n      if (match >= 97) {\n        var startOff = scanner.offset;\n        skipLowerCaseChars(scanner);\n        var suffix = $$String.sub(scanner.src, startOff, scanner.offset - startOff | 0);\n        if (tag === suffix) {\n          if (scanner.ch === /* '}' */125) {\n            return next(scanner);\n          }\n          continue ;\n        }\n        continue ;\n      }\n      continue ;\n    };\n  };\n  var match = scanner.ch;\n  if (match >= 123) {\n    if (match !== 124) {\n      return ;\n    } else {\n      return scanContents(\"\");\n    }\n  }\n  if (match < 97) {\n    return ;\n  }\n  var startOff = scanner.offset;\n  skipLowerCaseChars(scanner);\n  var tag = $$String.sub(scanner.src, startOff, scanner.offset - startOff | 0);\n  if (scanner.ch === /* '|' */124) {\n    return scanContents(tag);\n  }\n  \n}\n\nvar Diagnostics;\n\nvar Token;\n\nvar $$Comment;\n\nvar hackyEOFChar = -1;\n\nexport {\n  Diagnostics ,\n  Token ,\n  $$Comment ,\n  hackyEOFChar ,\n  setDiamondMode ,\n  setJsxMode ,\n  popMode ,\n  inDiamondMode ,\n  inJsxMode ,\n  position ,\n  _printDebug ,\n  next ,\n  next2 ,\n  next3 ,\n  peek ,\n  peek2 ,\n  make ,\n  isWhitespace ,\n  skipWhitespace ,\n  digitValue ,\n  skipLowerCaseChars ,\n  scanIdentifier ,\n  scanDigits ,\n  scanNumber ,\n  scanExoticIdentifier ,\n  scanStringEscapeSequence ,\n  scanString ,\n  scanEscape ,\n  scanSingleLineComment ,\n  scanMultiLineComment ,\n  scanTemplateLiteralToken ,\n  scan ,\n  reconsiderLessThan ,\n  isBinaryOp ,\n  tryAdvanceQuotedString ,\n  \n}\n/* P Not a pure module */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/res_scanner.res",
    "content": "open P\n\nmodule Diagnostics = Res_diagnostics\nmodule Token = Res_token\nmodule Comment = Res_comment\n\ntype mode = Jsx | Diamond\n\n/* We hide the implementation detail of the scanner reading character. Our char\nwill also contain the special -1 value to indicate end-of-file. This isn't\nideal; we should clean this up */\nlet hackyEOFChar = Char.unsafe_chr(-1)\ntype charEncoding = Char.t\n\ntype t = {\n  filename: string,\n  src: string,\n  mutable err: (~startPos: Lexing.position, ~endPos: Lexing.position, Diagnostics.category) => unit,\n  mutable ch: charEncoding /* current character */,\n  mutable offset: int /* character offset */,\n  mutable lineOffset: int /* current line offset */,\n  mutable lnum: int /* current line number */,\n  mutable mode: list<mode>,\n}\n\nlet setDiamondMode = scanner => scanner.mode = list{Diamond, ...scanner.mode}\n\nlet setJsxMode = scanner => scanner.mode = list{Jsx, ...scanner.mode}\n\nlet popMode = (scanner, mode) =>\n  switch scanner.mode {\n  | list{m, ...ms} if m == mode => scanner.mode = ms\n  | _ => ()\n  }\n\nlet inDiamondMode = scanner =>\n  switch scanner.mode {\n  | list{Diamond, ..._} => true\n  | _ => false\n  }\n\nlet inJsxMode = scanner =>\n  switch scanner.mode {\n  | list{Jsx, ..._} => true\n  | _ => false\n  }\n\nlet position = scanner => {\n  open Lexing\n  {\n    pos_fname: scanner.filename,\n    /* line number */\n    pos_lnum: scanner.lnum,\n    /* offset of the beginning of the line (number\n     of characters between the beginning of the scanner and the beginning\n     of the line) */\n    pos_bol: scanner.lineOffset,\n    /* [pos_cnum] is the offset of the position (number of\n     characters between the beginning of the scanner and the position). */\n    pos_cnum: scanner.offset,\n  }\n}\n\n/* Small debugging util\n❯ echo 'let msg = \"hello\"' | ./lib/rescript.exe\nlet msg = \"hello\"\n^-^ let 0-3\nlet msg = \"hello\"\n    ^-^ msg 4-7\nlet msg = \"hello\"\n        ^ = 8-9\nlet msg = \"hello\"\n          ^-----^ string \"hello\" 10-17\nlet msg = \"hello\"\n                  ^ eof 18-18\nlet msg = \"hello\"\n*/\n@live\nlet _printDebug = (~startPos, ~endPos, scanner, token) => {\n  open Lexing\n  print_string(scanner.src)\n  print_string((@doesNotRaise String.make)(startPos.pos_cnum, ' '))\n  print_char('^')\n  switch endPos.pos_cnum - startPos.pos_cnum {\n  | 0 =>\n    if token == Token.Eof {\n      ()\n    } else {\n      assert false\n    }\n  | 1 => ()\n  | n =>\n    print_string((@doesNotRaise String.make)(n - 2, '-'))\n    print_char('^')\n  }\n  print_char(' ')\n  print_string(Res_token.toString(token))\n  print_char(' ')\n  print_int(startPos.pos_cnum)\n  print_char('-')\n  print_int(endPos.pos_cnum)\n  print_endline(\"\")\n}\n\nlet next = scanner => {\n  let nextOffset = scanner.offset + 1\n  switch scanner.ch {\n  | '\\n' =>\n    scanner.lineOffset = nextOffset\n    scanner.lnum = scanner.lnum + 1\n  /* What about CRLF (\\r + \\n) on windows?\n   * \\r\\n will always be terminated by a \\n\n   * -> we can just bump the line count on \\n */\n  | _ => ()\n  }\n  if nextOffset < String.length(scanner.src) {\n    scanner.offset = nextOffset\n    scanner.ch = String.unsafe_get(scanner.src, scanner.offset)\n  } else {\n    scanner.offset = String.length(scanner.src)\n    scanner.ch = hackyEOFChar\n  }\n}\n\nlet next2 = scanner => {\n  next(scanner)\n  next(scanner)\n}\n\nlet next3 = scanner => {\n  next(scanner)\n  next(scanner)\n  next(scanner)\n}\n\nlet peek = scanner =>\n  if scanner.offset + 1 < String.length(scanner.src) {\n    String.unsafe_get(scanner.src, scanner.offset + 1)\n  } else {\n    hackyEOFChar\n  }\n\nlet peek2 = scanner =>\n  if scanner.offset + 2 < String.length(scanner.src) {\n    String.unsafe_get(scanner.src, scanner.offset + 2)\n  } else {\n    hackyEOFChar\n  }\n\nlet make = (~filename, src) => {\n  filename: filename,\n  src: src,\n  err: (~startPos as _, ~endPos as _, _) => (),\n  ch: if src == \"\" {\n    hackyEOFChar\n  } else {\n    String.unsafe_get(src, 0)\n  },\n  offset: 0,\n  lineOffset: 0,\n  lnum: 1,\n  mode: list{},\n}\n\n/* generic helpers */\n\nlet isWhitespace = ch =>\n  switch ch {\n  | ' ' | '\\t' | '\\n' | '\\r' => true\n  | _ => false\n  }\n\nlet rec skipWhitespace = scanner =>\n  if isWhitespace(scanner.ch) {\n    next(scanner)\n    skipWhitespace(scanner)\n  }\n\nlet digitValue = ch =>\n  switch ch {\n  | '0' .. '9' => Char.code(ch) - 48\n  | 'a' .. 'f' => Char.code(ch) - Char.code('a') + 10\n  | 'A' .. 'F' => Char.code(ch) + 32 - Char.code('a') + 10\n  | _ => 16\n  } /* larger than any legal value */\n\nlet rec skipLowerCaseChars = scanner =>\n  switch scanner.ch {\n  | 'a' .. 'z' =>\n    next(scanner)\n    skipLowerCaseChars(scanner)\n  | _ => ()\n  }\n\n/* scanning helpers */\n\nlet scanIdentifier = scanner => {\n  let startOff = scanner.offset\n  let rec skipGoodChars = scanner =>\n    switch scanner.ch {\n    | 'A' .. 'Z' | 'a' .. 'z' | '0' .. '9' | '_' | '\\'' =>\n      next(scanner)\n      skipGoodChars(scanner)\n    | _ => ()\n    }\n\n  skipGoodChars(scanner)\n  let str = (@doesNotRaise String.sub)(scanner.src, startOff, scanner.offset - startOff)\n  if '{' === scanner.ch && str == \"list\" {\n    next(scanner)\n    /* TODO: this isn't great */\n    Token.lookupKeyword(\"list{\")\n  } else {\n    Token.lookupKeyword(str)\n  }\n}\n\nlet scanDigits = (scanner, ~base) =>\n  if base <= 10 {\n    let rec loop = scanner =>\n      switch scanner.ch {\n      | '0' .. '9' | '_' =>\n        next(scanner)\n        loop(scanner)\n      | _ => ()\n      }\n    loop(scanner)\n  } else {\n    let rec loop = scanner =>\n      switch scanner.ch {\n      /* hex */\n      | '0' .. '9' | 'a' .. 'f' | 'A' .. 'F' | '_' =>\n        next(scanner)\n        loop(scanner)\n      | _ => ()\n      }\n    loop(scanner)\n  }\n\n/* float: (0…9) { 0…9∣ _ } [. { 0…9∣ _ }] [(e∣ E) [+∣ -] (0…9) { 0…9∣ _ }] */\nlet scanNumber = scanner => {\n  let startOff = scanner.offset\n\n  /* integer part */\n  let base = switch scanner.ch {\n  | '0' =>\n    switch peek(scanner) {\n    | 'x' | 'X' =>\n      next2(scanner)\n      16\n    | 'o' | 'O' =>\n      next2(scanner)\n      8\n    | 'b' | 'B' =>\n      next2(scanner)\n      2\n    | _ =>\n      next(scanner)\n      8\n    }\n  | _ => 10\n  }\n\n  scanDigits(scanner, ~base)\n\n  /*  */\n  let isFloat = if '.' === scanner.ch {\n    next(scanner)\n    scanDigits(scanner, ~base)\n    true\n  } else {\n    false\n  }\n\n  /* exponent part */\n  let isFloat = switch scanner.ch {\n  | 'e' | 'E' | 'p' | 'P' =>\n    switch peek(scanner) {\n    | '+' | '-' => next2(scanner)\n    | _ => next(scanner)\n    }\n    scanDigits(scanner, ~base)\n    true\n  | _ => isFloat\n  }\n\n  let literal = (@doesNotRaise String.sub)(scanner.src, startOff, scanner.offset - startOff)\n\n  /* suffix */\n  let suffix = switch scanner.ch {\n  | 'n' =>\n    let msg = \"Unsupported number type (nativeint). Did you mean `\" ++ (literal ++ \"`?\")\n\n    let pos = position(scanner)\n    scanner.err(~startPos=pos, ~endPos=pos, Diagnostics.message(msg))\n    next(scanner)\n    Some('n')\n  | ('g' .. 'z' | 'G' .. 'Z') as ch =>\n    next(scanner)\n    Some(ch)\n  | _ => None\n  }\n\n  if isFloat {\n    Token.Float({f: literal, suffix: suffix})\n  } else {\n    Token.Int({i: literal, suffix: suffix})\n  }\n}\n\nlet scanExoticIdentifier = scanner => {\n  /* TODO: are we disregarding the current char...? Should be a quote */\n  next(scanner)\n  let buffer = Buffer.create(20)\n  let startPos = position(scanner)\n\n  let rec scan = () =>\n    switch scanner.ch {\n    | '\"' => next(scanner)\n    | '\\n' | '\\r' =>\n      /* line break */\n      let endPos = position(scanner)\n      scanner.err(\n        ~startPos,\n        ~endPos,\n        Diagnostics.message(\"A quoted identifier can't contain line breaks.\"),\n      )\n      next(scanner)\n    | ch if ch === hackyEOFChar =>\n      let endPos = position(scanner)\n      scanner.err(~startPos, ~endPos, Diagnostics.message(\"Did you forget a \\\" here?\"))\n    | ch =>\n      Buffer.add_char(buffer, ch)\n      next(scanner)\n      scan()\n    }\n\n  scan()\n  /* TODO: do we really need to create a new buffer instead of substring once? */\n  Token.Lident(Buffer.contents(buffer))\n}\n\nlet scanStringEscapeSequence = (~startPos, scanner) => {\n  let scan = (~n, ~base, ~max) => {\n    let rec loop = (n, x) =>\n      if n === 0 {\n        x\n      } else {\n        let d = digitValue(scanner.ch)\n        if d >= base {\n          let pos = position(scanner)\n          let msg = if scanner.ch === hackyEOFChar {\n            \"unclosed escape sequence\"\n          } else {\n            \"unknown escape sequence\"\n          }\n\n          scanner.err(~startPos, ~endPos=pos, Diagnostics.message(msg))\n          -1\n        } else {\n          let () = next(scanner)\n          loop(n - 1, x * base + d)\n        }\n      }\n\n    let x = loop(n, 0)\n    if x > max || (0xD800 <= x && x < 0xE000) {\n      let pos = position(scanner)\n      let msg = \"escape sequence is invalid unicode code point\"\n      scanner.err(~startPos, ~endPos=pos, Diagnostics.message(msg))\n    }\n  }\n\n  switch scanner.ch {\n  /* \\ already consumed */\n  | 'n' | 't' | 'b' | 'r' | '\\\\' | ' ' | '\\'' | '\"' => next(scanner)\n  | '0' .. '9' =>\n    /* decimal */\n    scan(~n=3, ~base=10, ~max=255)\n  | 'o' =>\n    /* octal */\n    next(scanner)\n    scan(~n=3, ~base=8, ~max=255)\n  | 'x' =>\n    /* hex */\n    next(scanner)\n    scan(~n=2, ~base=16, ~max=255)\n  | 'u' =>\n    next(scanner)\n    switch scanner.ch {\n    | '{' =>\n      /* unicode code point escape sequence: '\\u{7A}', one or more hex digits */\n      next(scanner)\n      let x = ref(0)\n      while (\n        switch scanner.ch {\n        | '0' .. '9' | 'a' .. 'f' | 'A' .. 'F' => true\n        | _ => false\n        }\n      ) {\n        x := x.contents * 16 + digitValue(scanner.ch)\n        next(scanner)\n      }\n      /* consume '}' in '\\u{7A}' */\n      switch scanner.ch {\n      | '}' => next(scanner)\n      | _ => ()\n      }\n    | _ => scan(~n=4, ~base=16, ~max=Res_utf8.max)\n    }\n  | _ => /* unknown escape sequence\n     * TODO: we should warn the user here. Let's not make it a hard error for now, for reason compat */\n    /*\n      let pos = position scanner in\n      let msg =\n        if ch == -1 then \"unclosed escape sequence\"\n        else \"unknown escape sequence\"\n      in\n      scanner.err ~startPos ~endPos:pos (Diagnostics.message msg)\n */\n    ()\n  }\n}\n\nlet scanString = scanner => {\n  /* assumption: we've just matched a quote */\n\n  let startPosWithQuote = position(scanner)\n  next(scanner)\n  let firstCharOffset = scanner.offset\n\n  let rec scan = () =>\n    switch scanner.ch {\n    | '\"' =>\n      let lastCharOffset = scanner.offset\n      next(scanner)\n      (@doesNotRaise String.sub)(scanner.src, firstCharOffset, lastCharOffset - firstCharOffset)\n    | '\\\\' =>\n      let startPos = position(scanner)\n      next(scanner)\n      scanStringEscapeSequence(~startPos, scanner)\n      scan()\n    | ch if ch === hackyEOFChar =>\n      let endPos = position(scanner)\n      scanner.err(~startPos=startPosWithQuote, ~endPos, Diagnostics.unclosedString)\n      (@doesNotRaise String.sub)(scanner.src, firstCharOffset, scanner.offset - firstCharOffset)\n    | _ =>\n      next(scanner)\n      scan()\n    }\n\n  Token.String(scan())\n}\n\nlet scanEscape = scanner => {\n  /* '\\' consumed */\n  let offset = scanner.offset - 1\n  let convertNumber = (scanner, ~n, ~base) => {\n    let x = ref(0)\n    for _ in n downto 1 {\n      let d = digitValue(scanner.ch)\n      x := x.contents * base + d\n      next(scanner)\n    }\n    let c = x.contents\n    if Res_utf8.isValidCodePoint(c) {\n      Char.unsafe_chr(c)\n    } else {\n      Char.unsafe_chr(Res_utf8.repl)\n    }\n  }\n\n  let codepoint = switch scanner.ch {\n  | '0' .. '9' => convertNumber(scanner, ~n=3, ~base=10)\n  | 'b' =>\n    next(scanner)\n    '\\b'\n  | 'n' =>\n    next(scanner)\n    '\\n'\n  | 'r' =>\n    next(scanner)\n    '\\r'\n  | 't' =>\n    next(scanner)\n    '\\t'\n  | 'x' =>\n    next(scanner)\n    convertNumber(scanner, ~n=2, ~base=16)\n  | 'o' =>\n    next(scanner)\n    convertNumber(scanner, ~n=3, ~base=8)\n  | 'u' =>\n    next(scanner)\n    switch scanner.ch {\n    | '{' =>\n      /* unicode code point escape sequence: '\\u{7A}', one or more hex digits */\n      next(scanner)\n      let x = ref(0)\n      while (\n        switch scanner.ch {\n        | '0' .. '9' | 'a' .. 'f' | 'A' .. 'F' => true\n        | _ => false\n        }\n      ) {\n        x := x.contents * 16 + digitValue(scanner.ch)\n        next(scanner)\n      }\n      /* consume '}' in '\\u{7A}' */\n      switch scanner.ch {\n      | '}' => next(scanner)\n      | _ => ()\n      }\n      let c = x.contents\n      if Res_utf8.isValidCodePoint(c) {\n        Char.unsafe_chr(c)\n      } else {\n        Char.unsafe_chr(Res_utf8.repl)\n      }\n    | _ =>\n      /* unicode escape sequence: '\\u007A', exactly 4 hex digits */\n      convertNumber(scanner, ~n=4, ~base=16)\n    }\n  | ch =>\n    next(scanner)\n    ch\n  }\n\n  let contents = (@doesNotRaise String.sub)(scanner.src, offset, scanner.offset - offset)\n  next(scanner) /* Consume \\' */\n  /* TODO: do we know it's \\' ? */\n  Token.Codepoint({c: codepoint, original: contents})\n}\n\nlet scanSingleLineComment = scanner => {\n  let startOff = scanner.offset\n  let startPos = position(scanner)\n  let rec skip = scanner =>\n    switch scanner.ch {\n    | '\\n' | '\\r' => ()\n    | ch if ch === hackyEOFChar => ()\n    | _ =>\n      next(scanner)\n      skip(scanner)\n    }\n\n  skip(scanner)\n  let endPos = position(scanner)\n  Token.Comment(\n    Comment.makeSingleLineComment(\n      ~loc={\n        open Location\n        {loc_start: startPos, loc_end: endPos, loc_ghost: false}\n      },\n      (@doesNotRaise String.sub)(scanner.src, startOff, scanner.offset - startOff),\n    ),\n  )\n}\n\nlet scanMultiLineComment = scanner => {\n  /* assumption: we're only ever using this helper in `scan` after detecting a comment */\n  let contentStartOff = scanner.offset + 2\n  let startPos = position(scanner)\n  let rec scan = (~depth) =>\n    /* invariant: depth > 0 right after this match. See assumption */\n    switch (scanner.ch, peek(scanner)) {\n    | ('/', '*') =>\n      next2(scanner)\n      scan(~depth=depth + 1)\n    | ('*', '/') =>\n      next2(scanner)\n      if depth > 1 {\n        scan(~depth=depth - 1)\n      }\n    | (ch, _) if ch === hackyEOFChar =>\n      let endPos = position(scanner)\n      scanner.err(~startPos, ~endPos, Diagnostics.unclosedComment)\n    | _ =>\n      next(scanner)\n      scan(~depth)\n    }\n\n  scan(~depth=0)\n  let length = scanner.offset - 2 - contentStartOff\n  let length = if length < 0 /* in case of EOF */ {\n    0\n  } else {\n    length\n  }\n  Token.Comment(\n    Comment.makeMultiLineComment(\n      ~loc={\n        open Location\n        {loc_start: startPos, loc_end: position(scanner), loc_ghost: false}\n      },\n      (@doesNotRaise String.sub)(scanner.src, contentStartOff, length),\n    ),\n  )\n}\n\nlet scanTemplateLiteralToken = scanner => {\n  let startOff = scanner.offset\n\n  /* if starting } here, consume it */\n  if scanner.ch === '}' {\n    next(scanner)\n  }\n\n  let startPos = position(scanner)\n\n  let rec scan = () =>\n    switch scanner.ch {\n    | '`' =>\n      next(scanner)\n      Token.TemplateTail(\n        (@doesNotRaise String.sub)(scanner.src, startOff, scanner.offset - 1 - startOff),\n      )\n    | '$' =>\n      switch peek(scanner) {\n      | '{' =>\n        next2(scanner)\n        let contents = (@doesNotRaise String.sub)(\n          scanner.src,\n          startOff,\n          scanner.offset - 2 - startOff,\n        )\n\n        Token.TemplatePart(contents)\n      | _ =>\n        next(scanner)\n        scan()\n      }\n    | '\\\\' =>\n      switch peek(scanner) {\n      | '`'\n      | '\\\\'\n      | '$'\n      | '\\n'\n      | '\\r' =>\n        /* line break */\n        next2(scanner)\n        scan()\n      | _ =>\n        next(scanner)\n        scan()\n      }\n    | ch if ch == hackyEOFChar =>\n      let endPos = position(scanner)\n      scanner.err(~startPos, ~endPos, Diagnostics.unclosedTemplate)\n      Token.TemplateTail(\n        (@doesNotRaise String.sub)(scanner.src, startOff, max(scanner.offset - 1 - startOff, 0)),\n      )\n    | _ =>\n      next(scanner)\n      scan()\n    }\n\n  let token = scan()\n  let endPos = position(scanner)\n  (startPos, endPos, token)\n}\n\nlet rec scan = scanner => {\n  skipWhitespace(scanner)\n  let startPos = position(scanner)\n\n  let token = switch scanner.ch {\n  /* peeking 0 char */\n  | 'A' .. 'Z' | 'a' .. 'z' => scanIdentifier(scanner)\n  | '0' .. '9' => scanNumber(scanner)\n  | '`' =>\n    next(scanner)\n    Token.Backtick\n  | '~' =>\n    next(scanner)\n    Token.Tilde\n  | '?' =>\n    next(scanner)\n    Token.Question\n  | ';' =>\n    next(scanner)\n    Token.Semicolon\n  | '(' =>\n    next(scanner)\n    Token.Lparen\n  | ')' =>\n    next(scanner)\n    Token.Rparen\n  | '[' =>\n    next(scanner)\n    Token.Lbracket\n  | ']' =>\n    next(scanner)\n    Token.Rbracket\n  | '{' =>\n    next(scanner)\n    Token.Lbrace\n  | '}' =>\n    next(scanner)\n    Token.Rbrace\n  | ',' =>\n    next(scanner)\n    Token.Comma\n  | '\"' => scanString(scanner)\n\n  /* peeking 1 char */\n  | '_' =>\n    switch peek(scanner) {\n    | 'A' .. 'Z' | 'a' .. 'z' | '0' .. '9' | '_' => scanIdentifier(scanner)\n    | _ =>\n      next(scanner)\n      Token.Underscore\n    }\n  | '#' =>\n    switch peek(scanner) {\n    | '=' =>\n      next2(scanner)\n      Token.HashEqual\n    | _ =>\n      next(scanner)\n      Token.Hash\n    }\n  | '*' =>\n    switch peek(scanner) {\n    | '*' =>\n      next2(scanner)\n      Token.Exponentiation\n    | '.' =>\n      next2(scanner)\n      Token.AsteriskDot\n    | _ =>\n      next(scanner)\n      Token.Asterisk\n    }\n  | '@' =>\n    switch peek(scanner) {\n    | '@' =>\n      next2(scanner)\n      Token.AtAt\n    | _ =>\n      next(scanner)\n      Token.At\n    }\n  | '%' =>\n    switch peek(scanner) {\n    | '%' =>\n      next2(scanner)\n      Token.PercentPercent\n    | _ =>\n      next(scanner)\n      Token.Percent\n    }\n  | '|' =>\n    switch peek(scanner) {\n    | '|' =>\n      next2(scanner)\n      Token.Lor\n    | '>' =>\n      next2(scanner)\n      Token.BarGreater\n    | _ =>\n      next(scanner)\n      Token.Bar\n    }\n  | '&' =>\n    switch peek(scanner) {\n    | '&' =>\n      next2(scanner)\n      Token.Land\n    | _ =>\n      next(scanner)\n      Token.Band\n    }\n  | ':' =>\n    switch peek(scanner) {\n    | '=' =>\n      next2(scanner)\n      Token.ColonEqual\n    | '>' =>\n      next2(scanner)\n      Token.ColonGreaterThan\n    | _ =>\n      next(scanner)\n      Token.Colon\n    }\n  | '\\\\' =>\n    next(scanner)\n    scanExoticIdentifier(scanner)\n  | '/' =>\n    switch peek(scanner) {\n    | '/' =>\n      next2(scanner)\n      scanSingleLineComment(scanner)\n    | '*' => scanMultiLineComment(scanner)\n    | '.' =>\n      next2(scanner)\n      Token.ForwardslashDot\n    | _ =>\n      next(scanner)\n      Token.Forwardslash\n    }\n  | '-' =>\n    switch peek(scanner) {\n    | '.' =>\n      next2(scanner)\n      Token.MinusDot\n    | '>' =>\n      next2(scanner)\n      Token.MinusGreater\n    | _ =>\n      next(scanner)\n      Token.Minus\n    }\n  | '+' =>\n    switch peek(scanner) {\n    | '.' =>\n      next2(scanner)\n      Token.PlusDot\n    | '+' =>\n      next2(scanner)\n      Token.PlusPlus\n    | '=' =>\n      next2(scanner)\n      Token.PlusEqual\n    | _ =>\n      next(scanner)\n      Token.Plus\n    }\n  | '>' =>\n    switch peek(scanner) {\n    | '=' if !inDiamondMode(scanner) =>\n      next2(scanner)\n      Token.GreaterEqual\n    | _ =>\n      next(scanner)\n      Token.GreaterThan\n    }\n  | '<' if !inJsxMode(scanner) =>\n    switch peek(scanner) {\n    | '=' =>\n      next2(scanner)\n      Token.LessEqual\n    | _ =>\n      next(scanner)\n      Token.LessThan\n    }\n  /* special handling for JSX < */\n  | '<' =>\n    /* Imagine the following: <div><\n     * < indicates the start of a new jsx-element, the parser expects\n     * the name of a new element after the <\n     * Example: <div> <div\n     * But what if we have a / here: example </ in  <div></div>\n     * This signals a closing element. To simulate the two-token lookahead,\n     * the </ is emitted as a single new token LessThanSlash */\n    next(scanner)\n    skipWhitespace(scanner)\n    switch scanner.ch {\n    | '/' =>\n      next(scanner)\n      Token.LessThanSlash\n    | '=' =>\n      next(scanner)\n      Token.LessEqual\n    | _ => Token.LessThan\n    }\n\n  /* peeking 2 chars */\n  | '.' =>\n    switch (peek(scanner), peek2(scanner)) {\n    | ('.', '.') =>\n      next3(scanner)\n      Token.DotDotDot\n    | ('.', _) =>\n      next2(scanner)\n      Token.DotDot\n    | _ =>\n      next(scanner)\n      Token.Dot\n    }\n  | '\\'' =>\n    switch (peek(scanner), peek2(scanner)) {\n    | ('\\\\', '\"') =>\n      /* careful with this one! We're next-ing _once_ (not twice),\n       then relying on matching on the quote */\n      next(scanner)\n      SingleQuote\n    | ('\\\\', _) =>\n      next2(scanner)\n      scanEscape(scanner)\n    | (ch, '\\'') =>\n      let offset = scanner.offset + 1\n      next3(scanner)\n      Token.Codepoint({c: ch, original: (@doesNotRaise String.sub)(scanner.src, offset, 1)})\n    | (ch, _) =>\n      next(scanner)\n      let offset = scanner.offset\n      let (codepoint, length) = Res_utf8.decodeCodePoint(\n        scanner.offset,\n        scanner.src,\n        String.length(scanner.src),\n      )\n      for _ in 0 to length - 1 {\n        next(scanner)\n      }\n      if scanner.ch == '\\'' {\n        let contents = (@doesNotRaise String.sub)(scanner.src, offset, length)\n        next(scanner)\n        Token.Codepoint({c: Obj.magic(codepoint), original: contents})\n      } else {\n        scanner.ch = ch\n        scanner.offset = offset\n        SingleQuote\n      }\n    }\n  | '!' =>\n    switch (peek(scanner), peek2(scanner)) {\n    | ('=', '=') =>\n      next3(scanner)\n      Token.BangEqualEqual\n    | ('=', _) =>\n      next2(scanner)\n      Token.BangEqual\n    | _ =>\n      next(scanner)\n      Token.Bang\n    }\n  | '=' =>\n    switch (peek(scanner), peek2(scanner)) {\n    | ('=', '=') =>\n      next3(scanner)\n      Token.EqualEqualEqual\n    | ('=', _) =>\n      next2(scanner)\n      Token.EqualEqual\n    | ('>', _) =>\n      next2(scanner)\n      Token.EqualGreater\n    | _ =>\n      next(scanner)\n      Token.Equal\n    }\n\n  /* special cases */\n  | ch if ch === hackyEOFChar =>\n    next(scanner)\n    Token.Eof\n  | ch =>\n    /* if we arrive here, we're dealing with an unknown character,\n     * report the error and continue scanning… */\n    next(scanner)\n    let endPos = position(scanner)\n    scanner.err(~startPos, ~endPos, Diagnostics.unknownUchar(ch))\n    let (_, _, token) = scan(scanner)\n    token\n  }\n\n  let endPos = position(scanner)\n  /* _printDebug ~startPos ~endPos scanner token; */\n  (startPos, endPos, token)\n}\n\n/* misc helpers used elsewhere */\n\n/* Imagine: <div> <Navbar /> <\n * is `<` the start of a jsx-child? <div …\n * or is it the start of a closing tag?  </div>\n * reconsiderLessThan peeks at the next token and\n * determines the correct token to disambiguate */\nlet reconsiderLessThan = scanner => {\n  /* < consumed */\n  skipWhitespace(scanner)\n  if scanner.ch === '/' {\n    let () = next(scanner)\n    Token.LessThanSlash\n  } else {\n    Token.LessThan\n  }\n}\n\n/* If an operator has whitespace around both sides, it's a binary operator */\n/* TODO: this helper seems out of place */\nlet isBinaryOp = (src, startCnum, endCnum) =>\n  if startCnum === 0 {\n    false\n  } else {\n    /* we're gonna put some assertions and invariant checks here because this is\n     used outside of the scanner's normal invariant assumptions */\n    assert (endCnum >= 0)\n    assert (startCnum > 0 && startCnum < String.length(src))\n    let leftOk = isWhitespace(String.unsafe_get(src, startCnum - 1))\n    /* we need some stronger confidence that endCnum is ok */\n    let rightOk = endCnum >= String.length(src) || isWhitespace(String.unsafe_get(src, endCnum))\n    leftOk && rightOk\n  }\n\n/* Assume `{` consumed, advances the scanner towards the ends of Reason quoted strings. (for conversion)\n * In {| foo bar |} the scanner will be advanced until after the `|}` */\nlet tryAdvanceQuotedString = scanner => {\n  let rec scanContents = tag =>\n    switch scanner.ch {\n    | '|' =>\n      next(scanner)\n      switch scanner.ch {\n      | 'a' .. 'z' =>\n        let startOff = scanner.offset\n        skipLowerCaseChars(scanner)\n        let suffix = (@doesNotRaise String.sub)(scanner.src, startOff, scanner.offset - startOff)\n        if tag == suffix {\n          if scanner.ch == '}' {\n            next(scanner)\n          } else {\n            scanContents(tag)\n          }\n        } else {\n          scanContents(tag)\n        }\n      | '}' => next(scanner)\n      | _ => scanContents(tag)\n      }\n    | ch if ch === hackyEOFChar => /* TODO: why is this place checking EOF and not others? */\n      ()\n    | _ =>\n      next(scanner)\n      scanContents(tag)\n    }\n\n  switch scanner.ch {\n  | 'a' .. 'z' =>\n    let startOff = scanner.offset\n    skipLowerCaseChars(scanner)\n    let tag = (@doesNotRaise String.sub)(scanner.src, startOff, scanner.offset - startOff)\n    if scanner.ch == '|' {\n      scanContents(tag)\n    }\n  | '|' => scanContents(\"\")\n  | _ => ()\n  }\n}\n\n"
  },
  {
    "path": "analysis/examples/larger-project/src/res_token.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as Caml_string from \"rescript/lib/es6/caml_string.js\";\nimport * as Res_comment from \"./res_comment.js\";\nimport * as Caml_js_exceptions from \"rescript/lib/es6/caml_js_exceptions.js\";\n\nfunction precedence(x) {\n  if (typeof x !== \"number\") {\n    return 0;\n  }\n  if (x < 17) {\n    if (x !== 4) {\n      if (x >= 14) {\n        return 4;\n      } else {\n        return 0;\n      }\n    } else {\n      return 9;\n    }\n  }\n  if (x >= 46) {\n    if (x < 58) {\n      return 0;\n    }\n    switch (x) {\n      case /* MinusGreater */58 :\n          return 8;\n      case /* Land */67 :\n          return 3;\n      case /* Lor */68 :\n          return 2;\n      case /* ColonEqual */74 :\n          return 1;\n      case /* BangEqual */70 :\n      case /* BangEqualEqual */71 :\n      case /* LessEqual */72 :\n      case /* GreaterEqual */73 :\n      case /* BarGreater */81 :\n          return 4;\n      case /* External */59 :\n      case /* Typ */60 :\n      case /* Private */61 :\n      case /* Mutable */62 :\n      case /* Constraint */63 :\n      case /* Include */64 :\n      case /* Module */65 :\n      case /* Of */66 :\n      case /* Band */69 :\n      case /* At */75 :\n      case /* AtAt */76 :\n      case /* Percent */77 :\n      case /* PercentPercent */78 :\n      case /* List */79 :\n      case /* Backtick */80 :\n      case /* Try */82 :\n      case /* Import */83 :\n      case /* Export */84 :\n          return 0;\n      \n    }\n  } else {\n    if (x < 29) {\n      return 0;\n    }\n    switch (x) {\n      case /* Forwardslash */29 :\n      case /* ForwardslashDot */30 :\n      case /* Asterisk */31 :\n      case /* AsteriskDot */32 :\n          return 6;\n      case /* Exponentiation */33 :\n          return 7;\n      case /* Minus */34 :\n      case /* MinusDot */35 :\n      case /* Plus */36 :\n      case /* PlusDot */37 :\n      case /* PlusPlus */38 :\n          return 5;\n      case /* GreaterThan */41 :\n      case /* LessThan */42 :\n          return 4;\n      case /* PlusEqual */39 :\n      case /* ColonGreaterThan */40 :\n      case /* LessThanSlash */43 :\n      case /* Hash */44 :\n          return 0;\n      case /* HashEqual */45 :\n          return 1;\n      \n    }\n  }\n}\n\nfunction toString(x) {\n  if (typeof x === \"number\") {\n    switch (x) {\n      case /* Open */0 :\n          return \"open\";\n      case /* True */1 :\n          return \"true\";\n      case /* False */2 :\n          return \"false\";\n      case /* As */3 :\n          return \"as\";\n      case /* Dot */4 :\n          return \".\";\n      case /* DotDot */5 :\n          return \"..\";\n      case /* DotDotDot */6 :\n          return \"...\";\n      case /* Bang */7 :\n          return \"!\";\n      case /* Semicolon */8 :\n          return \";\";\n      case /* Let */9 :\n          return \"let\";\n      case /* And */10 :\n          return \"and\";\n      case /* Rec */11 :\n          return \"rec\";\n      case /* Underscore */12 :\n          return \"_\";\n      case /* SingleQuote */13 :\n          return \"'\";\n      case /* Equal */14 :\n          return \"=\";\n      case /* EqualEqual */15 :\n          return \"==\";\n      case /* EqualEqualEqual */16 :\n          return \"===\";\n      case /* Bar */17 :\n          return \"|\";\n      case /* Lparen */18 :\n          return \"(\";\n      case /* Rparen */19 :\n          return \")\";\n      case /* Lbracket */20 :\n          return \"[\";\n      case /* Rbracket */21 :\n          return \"]\";\n      case /* Lbrace */22 :\n          return \"{\";\n      case /* Rbrace */23 :\n          return \"}\";\n      case /* Colon */24 :\n          return \":\";\n      case /* Comma */25 :\n          return \",\";\n      case /* Eof */26 :\n          return \"eof\";\n      case /* Exception */27 :\n          return \"exception\";\n      case /* Backslash */28 :\n          return \"\\\\\";\n      case /* Forwardslash */29 :\n          return \"/\";\n      case /* ForwardslashDot */30 :\n          return \"/.\";\n      case /* Asterisk */31 :\n          return \"*\";\n      case /* AsteriskDot */32 :\n          return \"*.\";\n      case /* Exponentiation */33 :\n          return \"**\";\n      case /* Minus */34 :\n          return \"-\";\n      case /* MinusDot */35 :\n          return \"-.\";\n      case /* Plus */36 :\n          return \"+\";\n      case /* PlusDot */37 :\n          return \"+.\";\n      case /* PlusPlus */38 :\n          return \"++\";\n      case /* PlusEqual */39 :\n          return \"+=\";\n      case /* ColonGreaterThan */40 :\n          return \":>\";\n      case /* GreaterThan */41 :\n          return \">\";\n      case /* LessThan */42 :\n          return \"<\";\n      case /* LessThanSlash */43 :\n          return \"</\";\n      case /* Hash */44 :\n          return \"#\";\n      case /* HashEqual */45 :\n          return \"#=\";\n      case /* Assert */46 :\n          return \"assert\";\n      case /* Lazy */47 :\n          return \"lazy\";\n      case /* Tilde */48 :\n          return \"tilde\";\n      case /* Question */49 :\n          return \"?\";\n      case /* If */50 :\n          return \"if\";\n      case /* Else */51 :\n          return \"else\";\n      case /* For */52 :\n          return \"for\";\n      case /* In */53 :\n          return \"in\";\n      case /* While */54 :\n          return \"while\";\n      case /* Switch */55 :\n          return \"switch\";\n      case /* When */56 :\n          return \"when\";\n      case /* EqualGreater */57 :\n          return \"=>\";\n      case /* MinusGreater */58 :\n          return \"->\";\n      case /* External */59 :\n          return \"external\";\n      case /* Typ */60 :\n          return \"type\";\n      case /* Private */61 :\n          return \"private\";\n      case /* Mutable */62 :\n          return \"mutable\";\n      case /* Constraint */63 :\n          return \"constraint\";\n      case /* Include */64 :\n          return \"include\";\n      case /* Module */65 :\n          return \"module\";\n      case /* Of */66 :\n          return \"of\";\n      case /* Land */67 :\n          return \"&&\";\n      case /* Lor */68 :\n          return \"||\";\n      case /* Band */69 :\n          return \"&\";\n      case /* BangEqual */70 :\n          return \"!=\";\n      case /* BangEqualEqual */71 :\n          return \"!==\";\n      case /* LessEqual */72 :\n          return \"<=\";\n      case /* GreaterEqual */73 :\n          return \">=\";\n      case /* ColonEqual */74 :\n          return \":=\";\n      case /* At */75 :\n          return \"@\";\n      case /* AtAt */76 :\n          return \"@@\";\n      case /* Percent */77 :\n          return \"%\";\n      case /* PercentPercent */78 :\n          return \"%%\";\n      case /* List */79 :\n          return \"list{\";\n      case /* Backtick */80 :\n          return \"`\";\n      case /* BarGreater */81 :\n          return \"|>\";\n      case /* Try */82 :\n          return \"try\";\n      case /* Import */83 :\n          return \"import\";\n      case /* Export */84 :\n          return \"export\";\n      \n    }\n  } else {\n    switch (x.TAG | 0) {\n      case /* Codepoint */0 :\n          return \"codepoint '\" + (x.original + \"'\");\n      case /* Int */1 :\n          return \"int \" + x.i;\n      case /* Float */2 :\n          return \"Float: \" + x.f;\n      case /* String */3 :\n          return \"string \\\"\" + (x._0 + \"\\\"\");\n      case /* Lident */4 :\n      case /* Uident */5 :\n          return x._0;\n      case /* Comment */6 :\n          return \"Comment\" + Res_comment.toString(x._0);\n      case /* TemplateTail */7 :\n          return \"TemplateTail(\" + (x._0 + \")\");\n      case /* TemplatePart */8 :\n          return x._0 + \"${\";\n      \n    }\n  }\n}\n\nfunction keywordTable(x) {\n  switch (x) {\n    case \"and\" :\n        return /* And */10;\n    case \"as\" :\n        return /* As */3;\n    case \"assert\" :\n        return /* Assert */46;\n    case \"constraint\" :\n        return /* Constraint */63;\n    case \"else\" :\n        return /* Else */51;\n    case \"exception\" :\n        return /* Exception */27;\n    case \"export\" :\n        return /* Export */84;\n    case \"external\" :\n        return /* External */59;\n    case \"false\" :\n        return /* False */2;\n    case \"for\" :\n        return /* For */52;\n    case \"if\" :\n        return /* If */50;\n    case \"import\" :\n        return /* Import */83;\n    case \"in\" :\n        return /* In */53;\n    case \"include\" :\n        return /* Include */64;\n    case \"lazy\" :\n        return /* Lazy */47;\n    case \"let\" :\n        return /* Let */9;\n    case \"list{\" :\n        return /* List */79;\n    case \"module\" :\n        return /* Module */65;\n    case \"mutable\" :\n        return /* Mutable */62;\n    case \"of\" :\n        return /* Of */66;\n    case \"open\" :\n        return /* Open */0;\n    case \"private\" :\n        return /* Private */61;\n    case \"rec\" :\n        return /* Rec */11;\n    case \"switch\" :\n        return /* Switch */55;\n    case \"true\" :\n        return /* True */1;\n    case \"try\" :\n        return /* Try */82;\n    case \"type\" :\n        return /* Typ */60;\n    case \"when\" :\n        return /* When */56;\n    case \"while\" :\n        return /* While */54;\n    default:\n      throw {\n            RE_EXN_ID: \"Not_found\",\n            Error: new Error()\n          };\n  }\n}\n\nfunction isKeyword(x) {\n  if (typeof x === \"number\") {\n    if (x >= 48) {\n      if (x >= 69) {\n        if (x !== 79) {\n          return x >= 82;\n        } else {\n          return true;\n        }\n      } else if (x >= 57) {\n        return x >= 59;\n      } else {\n        return x >= 50;\n      }\n    } else if (x > 45 || x < 12) {\n      return x > 8 || x < 4;\n    } else {\n      return x === 27;\n    }\n  } else {\n    return false;\n  }\n}\n\nfunction lookupKeyword(str) {\n  try {\n    return keywordTable(str);\n  }\n  catch (raw_exn){\n    var exn = Caml_js_exceptions.internalToOCamlException(raw_exn);\n    if (exn.RE_EXN_ID === \"Not_found\") {\n      var match = Caml_string.get(str, 0);\n      if (match > 90 || match < 65) {\n        return {\n                TAG: /* Lident */4,\n                _0: str\n              };\n      } else {\n        return {\n                TAG: /* Uident */5,\n                _0: str\n              };\n      }\n    }\n    throw exn;\n  }\n}\n\nfunction isKeywordTxt(str) {\n  try {\n    keywordTable(str);\n    return true;\n  }\n  catch (raw_exn){\n    var exn = Caml_js_exceptions.internalToOCamlException(raw_exn);\n    if (exn.RE_EXN_ID === \"Not_found\") {\n      return false;\n    }\n    throw exn;\n  }\n}\n\nvar $$Comment;\n\nvar $$catch = {\n  TAG: /* Lident */4,\n  _0: \"catch\"\n};\n\nexport {\n  $$Comment ,\n  precedence ,\n  toString ,\n  keywordTable ,\n  isKeyword ,\n  lookupKeyword ,\n  isKeywordTxt ,\n  $$catch ,\n  \n}\n/* Res_comment Not a pure module */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/res_token.res",
    "content": "module Comment = Res_comment\n\ntype t =\n  | Open\n  | True\n  | False\n  | Codepoint({c: char, original: string})\n  | Int({i: string, suffix: option<char>})\n  | Float({f: string, suffix: option<char>})\n  | String(string)\n  | Lident(string)\n  | Uident(string)\n  | As\n  | Dot\n  | DotDot\n  | DotDotDot\n  | Bang\n  | Semicolon\n  | Let\n  | And\n  | Rec\n  | Underscore\n  | SingleQuote\n  | Equal\n  | EqualEqual\n  | EqualEqualEqual\n  | Bar\n  | Lparen\n  | Rparen\n  | Lbracket\n  | Rbracket\n  | Lbrace\n  | Rbrace\n  | Colon\n  | Comma\n  | Eof\n  | Exception\n  | @live Backslash\n  | Forwardslash\n  | ForwardslashDot\n  | Asterisk\n  | AsteriskDot\n  | Exponentiation\n  | Minus\n  | MinusDot\n  | Plus\n  | PlusDot\n  | PlusPlus\n  | PlusEqual\n  | ColonGreaterThan\n  | GreaterThan\n  | LessThan\n  | LessThanSlash\n  | Hash\n  | HashEqual\n  | Assert\n  | Lazy\n  | Tilde\n  | Question\n  | If\n  | Else\n  | For\n  | In\n  | While\n  | Switch\n  | When\n  | EqualGreater\n  | MinusGreater\n  | External\n  | Typ\n  | Private\n  | Mutable\n  | Constraint\n  | Include\n  | Module\n  | Of\n  | Land\n  | Lor\n  | Band /* Bitwise and: & */\n  | BangEqual\n  | BangEqualEqual\n  | LessEqual\n  | GreaterEqual\n  | ColonEqual\n  | At\n  | AtAt\n  | Percent\n  | PercentPercent\n  | Comment(Comment.t)\n  | List\n  | TemplateTail(string)\n  | TemplatePart(string)\n  | Backtick\n  | BarGreater\n  | Try\n  | Import\n  | Export\n\nlet precedence = x =>\n  switch x {\n  | HashEqual | ColonEqual => 1\n  | Lor => 2\n  | Land => 3\n  | Equal\n  | EqualEqual\n  | EqualEqualEqual\n  | LessThan\n  | GreaterThan\n  | BangEqual\n  | BangEqualEqual\n  | LessEqual\n  | GreaterEqual\n  | BarGreater => 4\n  | Plus | PlusDot | Minus | MinusDot | PlusPlus => 5\n  | Asterisk | AsteriskDot | Forwardslash | ForwardslashDot => 6\n  | Exponentiation => 7\n  | MinusGreater => 8\n  | Dot => 9\n  | _ => 0\n  }\n\nlet toString = x =>\n  switch x {\n  | Open => \"open\"\n  | True => \"true\"\n  | False => \"false\"\n  | Codepoint({original}) => \"codepoint '\" ++ (original ++ \"'\")\n  | String(s) => \"string \\\"\" ++ (s ++ \"\\\"\")\n  | Lident(str) => str\n  | Uident(str) => str\n  | Dot => \".\"\n  | DotDot => \"..\"\n  | DotDotDot => \"...\"\n  | Int({i}) => \"int \" ++ i\n  | Float({f}) => \"Float: \" ++ f\n  | Bang => \"!\"\n  | Semicolon => \";\"\n  | Let => \"let\"\n  | And => \"and\"\n  | Rec => \"rec\"\n  | Underscore => \"_\"\n  | SingleQuote => \"'\"\n  | Equal => \"=\"\n  | EqualEqual => \"==\"\n  | EqualEqualEqual => \"===\"\n  | Eof => \"eof\"\n  | Bar => \"|\"\n  | As => \"as\"\n  | Lparen => \"(\"\n  | Rparen => \")\"\n  | Lbracket => \"[\"\n  | Rbracket => \"]\"\n  | Lbrace => \"{\"\n  | Rbrace => \"}\"\n  | ColonGreaterThan => \":>\"\n  | Colon => \":\"\n  | Comma => \",\"\n  | Minus => \"-\"\n  | MinusDot => \"-.\"\n  | Plus => \"+\"\n  | PlusDot => \"+.\"\n  | PlusPlus => \"++\"\n  | PlusEqual => \"+=\"\n  | Backslash => \"\\\\\"\n  | Forwardslash => \"/\"\n  | ForwardslashDot => \"/.\"\n  | Exception => \"exception\"\n  | Hash => \"#\"\n  | HashEqual => \"#=\"\n  | GreaterThan => \">\"\n  | LessThan => \"<\"\n  | LessThanSlash => \"</\"\n  | Asterisk => \"*\"\n  | AsteriskDot => \"*.\"\n  | Exponentiation => \"**\"\n  | Assert => \"assert\"\n  | Lazy => \"lazy\"\n  | Tilde => \"tilde\"\n  | Question => \"?\"\n  | If => \"if\"\n  | Else => \"else\"\n  | For => \"for\"\n  | In => \"in\"\n  | While => \"while\"\n  | Switch => \"switch\"\n  | When => \"when\"\n  | EqualGreater => \"=>\"\n  | MinusGreater => \"->\"\n  | External => \"external\"\n  | Typ => \"type\"\n  | Private => \"private\"\n  | Constraint => \"constraint\"\n  | Mutable => \"mutable\"\n  | Include => \"include\"\n  | Module => \"module\"\n  | Of => \"of\"\n  | Lor => \"||\"\n  | Band => \"&\"\n  | Land => \"&&\"\n  | BangEqual => \"!=\"\n  | BangEqualEqual => \"!==\"\n  | GreaterEqual => \">=\"\n  | LessEqual => \"<=\"\n  | ColonEqual => \":=\"\n  | At => \"@\"\n  | AtAt => \"@@\"\n  | Percent => \"%\"\n  | PercentPercent => \"%%\"\n  | Comment(c) => \"Comment\" ++ Comment.toString(c)\n  | List => \"list{\"\n  | TemplatePart(text) => text ++ \"${\"\n  | TemplateTail(text) => \"TemplateTail(\" ++ (text ++ \")\")\n  | Backtick => \"`\"\n  | BarGreater => \"|>\"\n  | Try => \"try\"\n  | Import => \"import\"\n  | Export => \"export\"\n  }\n\n@raises(Not_found)\nlet keywordTable = x =>\n  switch x {\n  | \"and\" => And\n  | \"as\" => As\n  | \"assert\" => Assert\n  | \"constraint\" => Constraint\n  | \"else\" => Else\n  | \"exception\" => Exception\n  | \"export\" => Export\n  | \"external\" => External\n  | \"false\" => False\n  | \"for\" => For\n  | \"if\" => If\n  | \"import\" => Import\n  | \"in\" => In\n  | \"include\" => Include\n  | \"lazy\" => Lazy\n  | \"let\" => Let\n  | \"list{\" => List\n  | \"module\" => Module\n  | \"mutable\" => Mutable\n  | \"of\" => Of\n  | \"open\" => Open\n  | \"private\" => Private\n  | \"rec\" => Rec\n  | \"switch\" => Switch\n  | \"true\" => True\n  | \"try\" => Try\n  | \"type\" => Typ\n  | \"when\" => When\n  | \"while\" => While\n  | _ => raise(Not_found)\n  }\n\nlet isKeyword = x =>\n  switch x {\n  | And\n  | As\n  | Assert\n  | Constraint\n  | Else\n  | Exception\n  | Export\n  | External\n  | False\n  | For\n  | If\n  | Import\n  | In\n  | Include\n  | Land\n  | Lazy\n  | Let\n  | List\n  | Lor\n  | Module\n  | Mutable\n  | Of\n  | Open\n  | Private\n  | Rec\n  | Switch\n  | True\n  | Try\n  | Typ\n  | When\n  | While => true\n  | _ => false\n  }\n\nlet lookupKeyword = str =>\n  try keywordTable(str) catch {\n  | Not_found =>\n    switch @doesNotRaise\n    String.get(str, 0) {\n    | 'A' .. 'Z' => Uident(str)\n    | _ => Lident(str)\n    }\n  }\n\nlet isKeywordTxt = str =>\n  try {\n    let _ = keywordTable(str)\n    true\n  } catch {\n  | Not_found => false\n  }\n\nlet catch = Lident(\"catch\")\n"
  },
  {
    "path": "analysis/examples/larger-project/src/res_utf8.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as Bytes from \"rescript/lib/es6/bytes.js\";\n\nvar categoryTable = [\n  {\n    low: -1,\n    high: -1,\n    size: 1\n  },\n  {\n    low: 1,\n    high: -1,\n    size: 1\n  },\n  {\n    low: 128,\n    high: 191,\n    size: 2\n  },\n  {\n    low: 160,\n    high: 191,\n    size: 3\n  },\n  {\n    low: 128,\n    high: 191,\n    size: 3\n  },\n  {\n    low: 128,\n    high: 159,\n    size: 3\n  },\n  {\n    low: 144,\n    high: 191,\n    size: 4\n  },\n  {\n    low: 128,\n    high: 191,\n    size: 4\n  },\n  {\n    low: 128,\n    high: 143,\n    size: 4\n  }\n];\n\nvar categories = [\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  2,\n  2,\n  2,\n  2,\n  2,\n  2,\n  2,\n  2,\n  2,\n  2,\n  2,\n  2,\n  2,\n  2,\n  2,\n  2,\n  2,\n  2,\n  2,\n  2,\n  2,\n  2,\n  2,\n  2,\n  2,\n  2,\n  2,\n  2,\n  2,\n  2,\n  3,\n  4,\n  4,\n  4,\n  4,\n  4,\n  4,\n  4,\n  4,\n  4,\n  4,\n  4,\n  4,\n  5,\n  4,\n  4,\n  6,\n  7,\n  7,\n  7,\n  8,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0\n];\n\nfunction decodeCodePoint(i, s, len) {\n  if (len < 1) {\n    return [\n            65533,\n            1\n          ];\n  }\n  var first = s.charCodeAt(i);\n  if (first < 128) {\n    return [\n            first,\n            1\n          ];\n  }\n  var index = categories[first];\n  if (index === 0) {\n    return [\n            65533,\n            1\n          ];\n  }\n  var cat = categoryTable[index];\n  if (len < (i + cat.size | 0)) {\n    return [\n            65533,\n            1\n          ];\n  }\n  if (cat.size === 2) {\n    var c1 = s.charCodeAt(i + 1 | 0);\n    if (c1 < cat.low || cat.high < c1) {\n      return [\n              65533,\n              1\n            ];\n    }\n    var i1 = c1 & 63;\n    var i0 = ((first & 31) << 6);\n    var uc = i0 | i1;\n    return [\n            uc,\n            2\n          ];\n  }\n  if (cat.size === 3) {\n    var c1$1 = s.charCodeAt(i + 1 | 0);\n    var c2 = s.charCodeAt(i + 2 | 0);\n    if (c1$1 < cat.low || cat.high < c1$1 || c2 < 128 || 191 < c2) {\n      return [\n              65533,\n              1\n            ];\n    }\n    var i0$1 = ((first & 15) << 12);\n    var i1$1 = ((c1$1 & 63) << 6);\n    var i2 = c2 & 63;\n    var uc$1 = i0$1 | i1$1 | i2;\n    return [\n            uc$1,\n            3\n          ];\n  }\n  var c1$2 = s.charCodeAt(i + 1 | 0);\n  var c2$1 = s.charCodeAt(i + 2 | 0);\n  var c3 = s.charCodeAt(i + 3 | 0);\n  if (c1$2 < cat.low || cat.high < c1$2 || c2$1 < 128 || 191 < c2$1 || c3 < 128 || 191 < c3) {\n    return [\n            65533,\n            1\n          ];\n  }\n  var i1$2 = ((c1$2 & 63) << 12);\n  var i2$1 = ((c2$1 & 63) << 6);\n  var i3 = c3 & 63;\n  var i0$2 = ((first & 7) << 18);\n  var uc$2 = i0$2 | i3 | i2$1 | i1$2;\n  return [\n          uc$2,\n          4\n        ];\n}\n\nfunction encodeCodePoint(c) {\n  if (c <= 127) {\n    var bytes = [0];\n    bytes[0] = c;\n    return Bytes.unsafe_to_string(bytes);\n  }\n  if (c <= 2047) {\n    var bytes$1 = [\n      0,\n      0\n    ];\n    bytes$1[0] = 192 | (c >>> 6);\n    bytes$1[1] = 128 | c & 63;\n    return Bytes.unsafe_to_string(bytes$1);\n  }\n  if (c <= 65535) {\n    var bytes$2 = [\n      0,\n      0,\n      0\n    ];\n    bytes$2[0] = 224 | (c >>> 12);\n    bytes$2[1] = 128 | (c >>> 6) & 63;\n    bytes$2[2] = 128 | c & 63;\n    return Bytes.unsafe_to_string(bytes$2);\n  }\n  var bytes$3 = [\n    0,\n    0,\n    0,\n    0\n  ];\n  bytes$3[0] = 240 | (c >>> 18);\n  bytes$3[1] = 128 | (c >>> 12) & 63;\n  bytes$3[2] = 128 | (c >>> 6) & 63;\n  bytes$3[3] = 128 | c & 63;\n  return Bytes.unsafe_to_string(bytes$3);\n}\n\nfunction isValidCodePoint(c) {\n  if (0 <= c && c < 55296) {\n    return true;\n  } else if (57343 < c) {\n    return c <= 1114111;\n  } else {\n    return false;\n  }\n}\n\nvar repl = 65533;\n\nvar max = 1114111;\n\nvar surrogateMin = 55296;\n\nvar surrogateMax = 57343;\n\nvar h2 = 192;\n\nvar h3 = 224;\n\nvar h4 = 240;\n\nvar cont_mask = 63;\n\nvar locb = 128;\n\nvar hicb = 191;\n\nexport {\n  repl ,\n  max ,\n  surrogateMin ,\n  surrogateMax ,\n  h2 ,\n  h3 ,\n  h4 ,\n  cont_mask ,\n  locb ,\n  hicb ,\n  categoryTable ,\n  categories ,\n  decodeCodePoint ,\n  encodeCodePoint ,\n  isValidCodePoint ,\n  \n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/res_utf8.res",
    "content": "/* https://tools.ietf.org/html/rfc3629#section-10 */\n/* let bom = 0xFEFF */\n\nlet repl = 0xFFFD\n\n/* let min = 0x0000 */\nlet max = 0x10FFFF\n\nlet surrogateMin = 0xD800\nlet surrogateMax = 0xDFFF\n\n/*\n * Char. number range  |        UTF-8 octet sequence\n *       (hexadecimal)    |              (binary)\n *    --------------------+---------------------------------------------\n *    0000 0000-0000 007F | 0xxxxxxx\n *    0000 0080-0000 07FF | 110xxxxx 10xxxxxx\n *    0000 0800-0000 FFFF | 1110xxxx 10xxxxxx 10xxxxxx\n *    0001 0000-0010 FFFF | 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx\n */\nlet h2 = 0b1100_0000\nlet h3 = 0b1110_0000\nlet h4 = 0b1111_0000\n\nlet cont_mask = 0b0011_1111\n\ntype category = {\n  low: int,\n  high: int,\n  size: int,\n}\n\nlet locb = 0b1000_0000\nlet hicb = 0b1011_1111\n\nlet categoryTable = [\n  /* 0 */ {low: -1, high: -1, size: 1} /* invalid */,\n  /* 1 */ {low: 1, high: -1, size: 1} /* ascii */,\n  /* 2 */ {low: locb, high: hicb, size: 2},\n  /* 3 */ {low: 0xA0, high: hicb, size: 3},\n  /* 4 */ {low: locb, high: hicb, size: 3},\n  /* 5 */ {low: locb, high: 0x9F, size: 3},\n  /* 6 */ {low: 0x90, high: hicb, size: 4},\n  /* 7 */ {low: locb, high: hicb, size: 4},\n  /* 8 */ {low: locb, high: 0x8F, size: 4},\n]\n\nlet categories = [\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  1,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  /* surrogate range U+D800 - U+DFFFF = 55296 - 917503 */\n  0,\n  0,\n  2,\n  2,\n  2,\n  2,\n  2,\n  2,\n  2,\n  2,\n  2,\n  2,\n  2,\n  2,\n  2,\n  2,\n  2,\n  2,\n  2,\n  2,\n  2,\n  2,\n  2,\n  2,\n  2,\n  2,\n  2,\n  2,\n  2,\n  2,\n  2,\n  2,\n  3,\n  4,\n  4,\n  4,\n  4,\n  4,\n  4,\n  4,\n  4,\n  4,\n  4,\n  4,\n  4,\n  5,\n  4,\n  4,\n  6,\n  7,\n  7,\n  7,\n  8,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n  0,\n]\n\nlet decodeCodePoint = (i, s, len) =>\n  if len < 1 {\n    (repl, 1)\n  } else {\n    let first = int_of_char(String.unsafe_get(s, i))\n    if first < 128 {\n      (first, 1)\n    } else {\n      let index = Array.unsafe_get(categories, first)\n      if index == 0 {\n        (repl, 1)\n      } else {\n        let cat = Array.unsafe_get(categoryTable, index)\n        if len < i + cat.size {\n          (repl, 1)\n        } else if cat.size === 2 {\n          let c1 = int_of_char(String.unsafe_get(s, i + 1))\n          if c1 < cat.low || cat.high < c1 {\n            (repl, 1)\n          } else {\n            let i1 = land(c1, 0b00111111)\n            let i0 = lsl(land(first, 0b00011111), 6)\n            let uc = lor(i0, i1)\n            (uc, 2)\n          }\n        } else if cat.size === 3 {\n          let c1 = int_of_char(String.unsafe_get(s, i + 1))\n          let c2 = int_of_char(String.unsafe_get(s, i + 2))\n          if c1 < cat.low || (cat.high < c1 || (c2 < locb || hicb < c2)) {\n            (repl, 1)\n          } else {\n            let i0 = lsl(land(first, 0b00001111), 12)\n            let i1 = lsl(land(c1, 0b00111111), 6)\n            let i2 = land(c2, 0b00111111)\n            let uc = lor(lor(i0, i1), i2)\n            (uc, 3)\n          }\n        } else {\n          let c1 = int_of_char(String.unsafe_get(s, i + 1))\n          let c2 = int_of_char(String.unsafe_get(s, i + 2))\n          let c3 = int_of_char(String.unsafe_get(s, i + 3))\n          if (\n            c1 < cat.low ||\n              (cat.high < c1 ||\n              (c2 < locb || (hicb < c2 || (c3 < locb || hicb < c3))))\n          ) {\n            (repl, 1)\n          } else {\n            let i1 = lsl(land(c1, 0x3f), 12)\n            let i2 = lsl(land(c2, 0x3f), 6)\n            let i3 = land(c3, 0x3f)\n            let i0 = lsl(land(first, 0x07), 18)\n            let uc = lor(lor(lor(i0, i3), i2), i1)\n            (uc, 4)\n          }\n        }\n      }\n    }\n  }\n\nlet encodeCodePoint = c =>\n  if c <= 127 {\n    let bytes = (@doesNotRaise Bytes.create)(1)\n    Bytes.unsafe_set(bytes, 0, Char.unsafe_chr(c))\n    Bytes.unsafe_to_string(bytes)\n  } else if c <= 2047 {\n    let bytes = (@doesNotRaise Bytes.create)(2)\n    Bytes.unsafe_set(bytes, 0, Char.unsafe_chr(lor(h2, lsr(c, 6))))\n    Bytes.unsafe_set(bytes, 1, Char.unsafe_chr(lor(0b1000_0000, land(c, cont_mask))))\n    Bytes.unsafe_to_string(bytes)\n  } else if c <= 65535 {\n    let bytes = (@doesNotRaise Bytes.create)(3)\n    Bytes.unsafe_set(bytes, 0, Char.unsafe_chr(lor(h3, lsr(c, 12))))\n    Bytes.unsafe_set(bytes, 1, Char.unsafe_chr(lor(0b1000_0000, land(lsr(c, 6), cont_mask))))\n    Bytes.unsafe_set(bytes, 2, Char.unsafe_chr(lor(0b1000_0000, land(c, cont_mask))))\n    Bytes.unsafe_to_string(bytes)\n  } else {\n    /* if c <= max then */\n    let bytes = (@doesNotRaise Bytes.create)(4)\n    Bytes.unsafe_set(bytes, 0, Char.unsafe_chr(lor(h4, lsr(c, 18))))\n    Bytes.unsafe_set(bytes, 1, Char.unsafe_chr(lor(0b1000_0000, land(lsr(c, 12), cont_mask))))\n    Bytes.unsafe_set(bytes, 2, Char.unsafe_chr(lor(0b1000_0000, land(lsr(c, 6), cont_mask))))\n    Bytes.unsafe_set(bytes, 3, Char.unsafe_chr(lor(0b1000_0000, land(c, cont_mask))))\n    Bytes.unsafe_to_string(bytes)\n  }\n\nlet isValidCodePoint = c => (0 <= c && c < surrogateMin) || (surrogateMax < c && c <= max)\n\n"
  },
  {
    "path": "analysis/examples/larger-project/src/syntaxerr.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as Curry from \"rescript/lib/es6/curry.js\";\nimport * as Printf from \"./printf.js\";\nimport * as $$Location from \"./location.js\";\nimport * as Caml_exceptions from \"rescript/lib/es6/caml_exceptions.js\";\n\nvar $$Error = /* @__PURE__ */Caml_exceptions.create(\"Syntaxerr.Error\");\n\nvar Escape_error = /* @__PURE__ */Caml_exceptions.create(\"Syntaxerr.Escape_error\");\n\nfunction prepare_error(x) {\n  switch (x.TAG | 0) {\n    case /* Unclosed */0 :\n        var closing = x._3;\n        var opening = x._1;\n        return Curry._1($$Location.errorf(x._2, {\n                        hd: Curry._1($$Location.errorf(x._0, undefined, undefined, \"This '%s' might be unmatched\"), opening),\n                        tl: /* [] */0\n                      }, Curry._2(Printf.sprintf(\"Syntax error: '%s' expected, the highlighted '%s' might be unmatched\"), closing, opening), \"Syntax error: '%s' expected\"), closing);\n    case /* Expecting */1 :\n        return Curry._1($$Location.errorf(x._0, undefined, undefined, \"Syntax error: %s expected.\"), x._1);\n    case /* Not_expecting */2 :\n        return Curry._1($$Location.errorf(x._0, undefined, undefined, \"Syntax error: %s not expected.\"), x._1);\n    case /* Applicative_path */3 :\n        return $$Location.errorf(x._0, undefined, undefined, \"Syntax error: applicative paths of the form F(X).t are not supported when the option -no-app-func is set.\");\n    case /* Variable_in_scope */4 :\n        var $$var = x._1;\n        return Curry._2($$Location.errorf(x._0, undefined, undefined, \"In this scoped type, variable '%s is reserved for the local type %s.\"), $$var, $$var);\n    case /* Other */5 :\n        return $$Location.errorf(x._0, undefined, undefined, \"Syntax error\");\n    case /* Ill_formed_ast */6 :\n        return Curry._1($$Location.errorf(x._0, undefined, undefined, \"broken invariant in parsetree: %s\"), x._1);\n    case /* Invalid_package_type */7 :\n        return Curry._1($$Location.errorf(x._0, undefined, undefined, \"invalid package type: %s\"), x._1);\n    \n  }\n}\n\n$$Location.register_error_of_exn(function (x) {\n      if (x.RE_EXN_ID === $$Error) {\n        return prepare_error(x._1);\n      }\n      \n    });\n\nfunction report_error(ppf, err) {\n  \n}\n\nfunction location_of_error(x) {\n  return x._0;\n}\n\nfunction ill_formed_ast(loc, s) {\n  throw {\n        RE_EXN_ID: $$Error,\n        _1: {\n          TAG: /* Ill_formed_ast */6,\n          _0: loc,\n          _1: s\n        },\n        Error: new Error()\n      };\n}\n\nexport {\n  $$Error ,\n  Escape_error ,\n  prepare_error ,\n  report_error ,\n  location_of_error ,\n  ill_formed_ast ,\n  \n}\n/*  Not a pure module */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/syntaxerr.res",
    "content": "/* ************************************************************************ */\n/*  */\n/* OCaml */\n/*  */\n/* Xavier Leroy, projet Cristal, INRIA Rocquencourt */\n/*  */\n/* Copyright 1997 Institut National de Recherche en Informatique et */\n/* en Automatique. */\n/*  */\n/* All rights reserved.  This file is distributed under the terms of */\n/* the GNU Lesser General Public License version 2.1, with the */\n/* special exception on linking described in the file LICENSE. */\n/*  */\n/* ************************************************************************ */\n\n/* Auxiliary type for reporting syntax errors */\n\ntype error =\n  | Unclosed(Location.t, string, Location.t, string)\n  | Expecting(Location.t, string)\n  | Not_expecting(Location.t, string)\n  | Applicative_path(Location.t)\n  | Variable_in_scope(Location.t, string)\n  | Other(Location.t)\n  | Ill_formed_ast(Location.t, string)\n  | Invalid_package_type(Location.t, string)\n\nexception Error(error)\nexception Escape_error\n\nlet prepare_error = x =>\n  switch x {\n  | Unclosed(opening_loc, opening, closing_loc, closing) =>\n    Location.errorf(\n      ~loc=closing_loc,\n      ~sub=list{Location.errorf(~loc=opening_loc, \"This '%s' might be unmatched\", opening)},\n      ~if_highlight=Printf.sprintf(\n        \"Syntax error: '%s' expected, \\\n                           the highlighted '%s' might be unmatched\",\n        closing,\n        opening,\n      ),\n      \"Syntax error: '%s' expected\",\n      closing,\n    )\n\n  | Expecting(loc, nonterm) => Location.errorf(~loc, \"Syntax error: %s expected.\", nonterm)\n  | Not_expecting(loc, nonterm) => Location.errorf(~loc, \"Syntax error: %s not expected.\", nonterm)\n  | Applicative_path(loc) =>\n    Location.errorf(\n      ~loc,\n      \"Syntax error: applicative paths of the form F(X).t \\\n         are not supported when the option -no-app-func is set.\",\n    )\n  | Variable_in_scope(loc, var) =>\n    Location.errorf(\n      ~loc,\n      \"In this scoped type, variable '%s \\\n         is reserved for the local type %s.\",\n      var,\n      var,\n    )\n  | Other(loc) => Location.errorf(~loc, \"Syntax error\")\n  | Ill_formed_ast(loc, s) => Location.errorf(~loc, \"broken invariant in parsetree: %s\", s)\n  | Invalid_package_type(loc, s) => Location.errorf(~loc, \"invalid package type: %s\", s)\n  }\n\nlet () = Location.register_error_of_exn(x =>\n  switch x {\n  | Error(err) => Some(prepare_error(err))\n  | _ => None\n  }\n)\n\nlet report_error = (ppf, err) => ()\n\nlet location_of_error = x =>\n  switch x {\n  | Unclosed(l, _, _, _)\n  | Applicative_path(l)\n  | Variable_in_scope(l, _)\n  | Other(l)\n  | Not_expecting(l, _)\n  | Ill_formed_ast(l, _)\n  | Invalid_package_type(l, _)\n  | Expecting(l, _) => l\n  }\n\nlet ill_formed_ast = (loc, s) => raise(Error(Ill_formed_ast(loc, s)))\n\n"
  },
  {
    "path": "analysis/examples/larger-project/src/warnings.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as Arg from \"rescript/lib/es6/arg.js\";\nimport * as Caml from \"rescript/lib/es6/caml.js\";\nimport * as Char from \"rescript/lib/es6/char.js\";\nimport * as List from \"rescript/lib/es6/list.js\";\nimport * as Misc from \"./misc.js\";\nimport * as $$Array from \"rescript/lib/es6/array.js\";\nimport * as Curry from \"rescript/lib/es6/curry.js\";\nimport * as Printf from \"./printf.js\";\nimport * as $$String from \"rescript/lib/es6/string.js\";\nimport * as Caml_array from \"rescript/lib/es6/caml_array.js\";\nimport * as Pervasives from \"rescript/lib/es6/pervasives.js\";\nimport * as Caml_string from \"rescript/lib/es6/caml_string.js\";\nimport * as Caml_exceptions from \"rescript/lib/es6/caml_exceptions.js\";\n\nfunction number(x) {\n  if (typeof x === \"number\") {\n    switch (x) {\n      case /* Comment_start */0 :\n          return 1;\n      case /* Comment_not_end */1 :\n          return 2;\n      case /* Partial_application */2 :\n          return 5;\n      case /* Statement_type */3 :\n          return 10;\n      case /* Unused_match */4 :\n          return 11;\n      case /* Unused_pat */5 :\n          return 12;\n      case /* Illegal_backslash */6 :\n          return 14;\n      case /* Unerasable_optional_argument */7 :\n          return 16;\n      case /* Unused_argument */8 :\n          return 20;\n      case /* Nonreturning_statement */9 :\n          return 21;\n      case /* Useless_record_with */10 :\n          return 23;\n      case /* All_clauses_guarded */11 :\n          return 8;\n      case /* Wildcard_arg_to_constant_constr */12 :\n          return 28;\n      case /* Eol_in_string */13 :\n          return 29;\n      case /* Unused_rec_flag */14 :\n          return 39;\n      case /* Expect_tailcall */15 :\n          return 51;\n      case /* Fragile_literal_pattern */16 :\n          return 52;\n      case /* Unreachable_case */17 :\n          return 56;\n      case /* Assignment_to_non_mutable_value */18 :\n          return 59;\n      case /* Constraint_on_gadt */19 :\n          return 62;\n      \n    }\n  } else {\n    switch (x.TAG | 0) {\n      case /* Deprecated */0 :\n          return 3;\n      case /* Fragile_match */1 :\n          return 4;\n      case /* Labels_omitted */2 :\n          return 6;\n      case /* Method_override */3 :\n          return 7;\n      case /* Partial_match */4 :\n          return 8;\n      case /* Non_closed_record_pattern */5 :\n          return 9;\n      case /* Instance_variable_override */6 :\n          return 13;\n      case /* Implicit_public_methods */7 :\n          return 15;\n      case /* Undeclared_virtual_method */8 :\n          return 17;\n      case /* Not_principal */9 :\n          return 18;\n      case /* Without_principality */10 :\n          return 19;\n      case /* Preprocessor */11 :\n          return 22;\n      case /* Bad_module_name */12 :\n          return 24;\n      case /* Unused_var */13 :\n          return 26;\n      case /* Unused_var_strict */14 :\n          return 27;\n      case /* Duplicate_definitions */15 :\n          return 30;\n      case /* Multiple_definition */16 :\n          return 31;\n      case /* Unused_value_declaration */17 :\n          return 32;\n      case /* Unused_open */18 :\n          return 33;\n      case /* Unused_type_declaration */19 :\n          return 34;\n      case /* Unused_for_index */20 :\n          return 35;\n      case /* Unused_ancestor */21 :\n          return 36;\n      case /* Unused_constructor */22 :\n          return 37;\n      case /* Unused_extension */23 :\n          return 38;\n      case /* Name_out_of_scope */24 :\n          return 40;\n      case /* Ambiguous_name */25 :\n          return 41;\n      case /* Disambiguated_name */26 :\n          return 42;\n      case /* Nonoptional_label */27 :\n          return 43;\n      case /* Open_shadow_identifier */28 :\n          return 44;\n      case /* Open_shadow_label_constructor */29 :\n          return 45;\n      case /* Bad_env_variable */30 :\n          return 46;\n      case /* Attribute_payload */31 :\n          return 47;\n      case /* Eliminated_optional_arguments */32 :\n          return 48;\n      case /* No_cmi_file */33 :\n          return 49;\n      case /* Bad_docstring */34 :\n          return 50;\n      case /* Misplaced_attribute */35 :\n          return 53;\n      case /* Duplicated_attribute */36 :\n          return 54;\n      case /* Inlining_impossible */37 :\n          return 55;\n      case /* Ambiguous_pattern */38 :\n          return 57;\n      case /* No_cmx_file */39 :\n          return 58;\n      case /* Unused_module */40 :\n          return 60;\n      case /* Unboxable_type_in_prim_decl */41 :\n          return 61;\n      \n    }\n  }\n}\n\nfunction letter(x) {\n  switch (x) {\n    case 97 :\n        var loop = function (i) {\n          if (i === 0) {\n            return /* [] */0;\n          } else {\n            return {\n                    hd: i,\n                    tl: loop(i - 1 | 0)\n                  };\n          }\n        };\n        return loop(62);\n    case 99 :\n        return {\n                hd: 1,\n                tl: {\n                  hd: 2,\n                  tl: /* [] */0\n                }\n              };\n    case 100 :\n        return {\n                hd: 3,\n                tl: /* [] */0\n              };\n    case 101 :\n        return {\n                hd: 4,\n                tl: /* [] */0\n              };\n    case 102 :\n        return {\n                hd: 5,\n                tl: /* [] */0\n              };\n    case 107 :\n        return {\n                hd: 32,\n                tl: {\n                  hd: 33,\n                  tl: {\n                    hd: 34,\n                    tl: {\n                      hd: 35,\n                      tl: {\n                        hd: 36,\n                        tl: {\n                          hd: 37,\n                          tl: {\n                            hd: 38,\n                            tl: {\n                              hd: 39,\n                              tl: /* [] */0\n                            }\n                          }\n                        }\n                      }\n                    }\n                  }\n                }\n              };\n    case 108 :\n        return {\n                hd: 6,\n                tl: /* [] */0\n              };\n    case 109 :\n        return {\n                hd: 7,\n                tl: /* [] */0\n              };\n    case 112 :\n        return {\n                hd: 8,\n                tl: /* [] */0\n              };\n    case 114 :\n        return {\n                hd: 9,\n                tl: /* [] */0\n              };\n    case 115 :\n        return {\n                hd: 10,\n                tl: /* [] */0\n              };\n    case 117 :\n        return {\n                hd: 11,\n                tl: {\n                  hd: 12,\n                  tl: /* [] */0\n                }\n              };\n    case 118 :\n        return {\n                hd: 13,\n                tl: /* [] */0\n              };\n    case 98 :\n    case 103 :\n    case 104 :\n    case 105 :\n    case 106 :\n    case 110 :\n    case 111 :\n    case 113 :\n    case 116 :\n    case 119 :\n        return /* [] */0;\n    case 120 :\n        return {\n                hd: 14,\n                tl: {\n                  hd: 15,\n                  tl: {\n                    hd: 16,\n                    tl: {\n                      hd: 17,\n                      tl: {\n                        hd: 18,\n                        tl: {\n                          hd: 19,\n                          tl: {\n                            hd: 20,\n                            tl: {\n                              hd: 21,\n                              tl: {\n                                hd: 22,\n                                tl: {\n                                  hd: 23,\n                                  tl: {\n                                    hd: 24,\n                                    tl: {\n                                      hd: 30,\n                                      tl: /* [] */0\n                                    }\n                                  }\n                                }\n                              }\n                            }\n                          }\n                        }\n                      }\n                    }\n                  }\n                }\n              };\n    case 121 :\n        return {\n                hd: 26,\n                tl: /* [] */0\n              };\n    case 122 :\n        return {\n                hd: 27,\n                tl: /* [] */0\n              };\n    default:\n      throw {\n            RE_EXN_ID: \"Assert_failure\",\n            _1: [\n              \"warnings.res\",\n              204,\n              9\n            ],\n            Error: new Error()\n          };\n  }\n}\n\nvar current = {\n  contents: {\n    active: Caml_array.make(63, true),\n    error: Caml_array.make(63, false)\n  }\n};\n\nvar disabled = {\n  contents: false\n};\n\nfunction without_warnings(f) {\n  return Misc.protect_refs({\n              hd: /* R */{\n                _0: disabled,\n                _1: true\n              },\n              tl: /* [] */0\n            }, f);\n}\n\nfunction backup(param) {\n  return current.contents;\n}\n\nfunction restore(x) {\n  current.contents = x;\n  \n}\n\nfunction is_active(x) {\n  if (disabled.contents) {\n    return false;\n  } else {\n    return Caml_array.get(current.contents.active, number(x));\n  }\n}\n\nfunction is_error(x) {\n  if (disabled.contents) {\n    return false;\n  } else {\n    return Caml_array.get(current.contents.error, number(x));\n  }\n}\n\nfunction mk_lazy(f) {\n  var state = current.contents;\n  return {\n          LAZY_DONE: false,\n          VAL: (function () {\n              var prev = current.contents;\n              current.contents = state;\n              try {\n                var r = Curry._1(f, undefined);\n                current.contents = prev;\n                return r;\n              }\n              catch (exn){\n                current.contents = prev;\n                throw exn;\n              }\n            })\n        };\n}\n\nfunction parse_opt(error, active, flags, s) {\n  var set = function (i) {\n    return Caml_array.set(flags, i, true);\n  };\n  var clear = function (i) {\n    return Caml_array.set(flags, i, false);\n  };\n  var set_all = function (i) {\n    Caml_array.set(active, i, true);\n    return Caml_array.set(error, i, true);\n  };\n  var get_num = function (_n, _i) {\n    while(true) {\n      var i = _i;\n      var n = _n;\n      if (i >= s.length) {\n        return [\n                i,\n                n\n              ];\n      }\n      var match = Caml_string.get(s, i);\n      if (match > 57 || match < 48) {\n        return [\n                i,\n                n\n              ];\n      }\n      _i = i + 1 | 0;\n      _n = (Math.imul(10, n) + Caml_string.get(s, i) | 0) - /* '0' */48 | 0;\n      continue ;\n    };\n  };\n  var get_range = function (i) {\n    var match = get_num(0, i);\n    var n1 = match[1];\n    var i$1 = match[0];\n    if (!((i$1 + 2 | 0) < s.length && Caml_string.get(s, i$1) === /* '.' */46 && Caml_string.get(s, i$1 + 1 | 0) === /* '.' */46)) {\n      return [\n              i$1,\n              n1,\n              n1\n            ];\n    }\n    var match$1 = get_num(0, i$1 + 2 | 0);\n    var n2 = match$1[1];\n    if (n2 < n1) {\n      throw {\n            RE_EXN_ID: Arg.Bad,\n            _1: \"Ill-formed list of warnings\",\n            Error: new Error()\n          };\n    }\n    return [\n            match$1[0],\n            n1,\n            n2\n          ];\n  };\n  var loop = function (_i) {\n    while(true) {\n      var i = _i;\n      if (i >= s.length) {\n        return ;\n      }\n      var match = Caml_string.get(s, i);\n      if (match >= 65) {\n        if (match >= 97) {\n          if (match >= 123) {\n            throw {\n                  RE_EXN_ID: Arg.Bad,\n                  _1: \"Ill-formed list of warnings\",\n                  Error: new Error()\n                };\n          }\n          List.iter(clear, letter(Caml_string.get(s, i)));\n          _i = i + 1 | 0;\n          continue ;\n        }\n        if (match >= 91) {\n          throw {\n                RE_EXN_ID: Arg.Bad,\n                _1: \"Ill-formed list of warnings\",\n                Error: new Error()\n              };\n        }\n        List.iter(set, letter(Char.lowercase_ascii(Caml_string.get(s, i))));\n        _i = i + 1 | 0;\n        continue ;\n      }\n      if (match >= 46) {\n        if (match >= 64) {\n          return loop_letter_num(set_all, i + 1 | 0);\n        }\n        throw {\n              RE_EXN_ID: Arg.Bad,\n              _1: \"Ill-formed list of warnings\",\n              Error: new Error()\n            };\n      }\n      if (match >= 43) {\n        switch (match) {\n          case 43 :\n              return loop_letter_num(set, i + 1 | 0);\n          case 44 :\n              throw {\n                    RE_EXN_ID: Arg.Bad,\n                    _1: \"Ill-formed list of warnings\",\n                    Error: new Error()\n                  };\n          case 45 :\n              return loop_letter_num(clear, i + 1 | 0);\n          \n        }\n      } else {\n        throw {\n              RE_EXN_ID: Arg.Bad,\n              _1: \"Ill-formed list of warnings\",\n              Error: new Error()\n            };\n      }\n    };\n  };\n  var loop_letter_num = function (myset, i) {\n    if (i >= s.length) {\n      throw {\n            RE_EXN_ID: Arg.Bad,\n            _1: \"Ill-formed list of warnings\",\n            Error: new Error()\n          };\n    }\n    var match = Caml_string.get(s, i);\n    if (match >= 65) {\n      if (match >= 97) {\n        if (match >= 123) {\n          throw {\n                RE_EXN_ID: Arg.Bad,\n                _1: \"Ill-formed list of warnings\",\n                Error: new Error()\n              };\n        }\n        List.iter(myset, letter(Caml_string.get(s, i)));\n        return loop(i + 1 | 0);\n      }\n      if (match >= 91) {\n        throw {\n              RE_EXN_ID: Arg.Bad,\n              _1: \"Ill-formed list of warnings\",\n              Error: new Error()\n            };\n      }\n      List.iter(myset, letter(Char.lowercase_ascii(Caml_string.get(s, i))));\n      return loop(i + 1 | 0);\n    }\n    if (match > 57 || match < 48) {\n      throw {\n            RE_EXN_ID: Arg.Bad,\n            _1: \"Ill-formed list of warnings\",\n            Error: new Error()\n          };\n    }\n    var match$1 = get_range(i);\n    for(var n = match$1[1] ,n_finish = Caml.caml_int_min(match$1[2], 62); n <= n_finish; ++n){\n      Curry._1(myset, n);\n    }\n    return loop(match$1[0]);\n  };\n  return loop(0);\n}\n\nfunction parse_options(errflag, s) {\n  var error = $$Array.copy(current.contents.error);\n  var active = $$Array.copy(current.contents.active);\n  parse_opt(error, active, errflag ? error : active, s);\n  current.contents = {\n    active: active,\n    error: error\n  };\n  \n}\n\nvar defaults_w = \"+a-4-6-7-9-27-29-32..42-44-45-48-50-60\";\n\nvar defaults_warn_error = \"-a+31\";\n\nparse_options(false, defaults_w);\n\nparse_options(true, defaults_warn_error);\n\nfunction message(x) {\n  if (typeof x === \"number\") {\n    switch (x) {\n      case /* Comment_start */0 :\n          return \"this is the start of a comment.\";\n      case /* Comment_not_end */1 :\n          return \"this is not the end of a comment.\";\n      case /* Partial_application */2 :\n          return \"this function application is partial,\\nmaybe some arguments are missing.\";\n      case /* Statement_type */3 :\n          return \"this expression should have type unit.\";\n      case /* Unused_match */4 :\n          return \"this match case is unused.\";\n      case /* Unused_pat */5 :\n          return \"this sub-pattern is unused.\";\n      case /* Illegal_backslash */6 :\n          return \"illegal backslash escape in string.\";\n      case /* Unerasable_optional_argument */7 :\n          return \"this optional argument cannot be erased.\";\n      case /* Unused_argument */8 :\n          return \"this argument will not be used by the function.\";\n      case /* Nonreturning_statement */9 :\n          return \"this statement never returns (or has an unsound type.)\";\n      case /* Useless_record_with */10 :\n          return \"all the fields are explicitly listed in this record:\\nthe 'with' clause is useless.\";\n      case /* All_clauses_guarded */11 :\n          return \"this pattern-matching is not exhaustive.\\nAll clauses in this pattern-matching are guarded.\";\n      case /* Wildcard_arg_to_constant_constr */12 :\n          return \"wildcard pattern given as argument to a constant constructor\";\n      case /* Eol_in_string */13 :\n          return \"unescaped end-of-line in a string constant (non-portable code)\";\n      case /* Unused_rec_flag */14 :\n          return \"unused rec flag.\";\n      case /* Expect_tailcall */15 :\n          return Printf.sprintf(\"expected tailcall\");\n      case /* Fragile_literal_pattern */16 :\n          return Printf.sprintf(\"Code should not depend on the actual values of\\nthis constructor's arguments. They are only for information\\nand may change in future versions. (See manual section 8.5)\");\n      case /* Unreachable_case */17 :\n          return \"this match case is unreachable.\\nConsider replacing it with a refutation case '<pat> -> .'\";\n      case /* Assignment_to_non_mutable_value */18 :\n          return \"A potential assignment to a non-mutable value was detected \\nin this source file.  Such assignments may generate incorrect code \\nwhen using Flambda.\";\n      case /* Constraint_on_gadt */19 :\n          return \"Type constraints do not apply to GADT cases of variant types.\";\n      \n    }\n  } else {\n    switch (x.TAG | 0) {\n      case /* Deprecated */0 :\n          return \"deprecated: \" + Misc.normalise_eol(x._0);\n      case /* Fragile_match */1 :\n          var s = x._0;\n          if (s === \"\") {\n            return \"this pattern-matching is fragile.\";\n          } else {\n            return \"this pattern-matching is fragile.\\nIt will remain exhaustive when constructors are added to type \" + (s + \".\");\n          }\n      case /* Labels_omitted */2 :\n          var ls = x._0;\n          if (ls) {\n            if (ls.tl) {\n              return \"labels \" + ($$String.concat(\", \", ls) + \" were omitted in the application of this function.\");\n            } else {\n              return \"label \" + (ls.hd + \" was omitted in the application of this function.\");\n            }\n          }\n          throw {\n                RE_EXN_ID: \"Assert_failure\",\n                _1: [\n                  \"warnings.res\",\n                  360,\n                  30\n                ],\n                Error: new Error()\n              };\n      case /* Method_override */3 :\n          var match = x._0;\n          if (match) {\n            var slist = match.tl;\n            var lab = match.hd;\n            if (slist) {\n              return $$String.concat(\" \", {\n                          hd: \"the following methods are overridden by the class\",\n                          tl: {\n                            hd: lab,\n                            tl: {\n                              hd: \":\\n \",\n                              tl: slist\n                            }\n                          }\n                        });\n            } else {\n              return \"the method \" + (lab + \" is overridden.\");\n            }\n          }\n          throw {\n                RE_EXN_ID: \"Assert_failure\",\n                _1: [\n                  \"warnings.res\",\n                  371,\n                  31\n                ],\n                Error: new Error()\n              };\n      case /* Partial_match */4 :\n          var s$1 = x._0;\n          if (s$1 === \"\") {\n            return \"this pattern-matching is not exhaustive.\";\n          } else {\n            return \"this pattern-matching is not exhaustive.\\nHere is an example of a case that is not matched:\\n\" + s$1;\n          }\n      case /* Non_closed_record_pattern */5 :\n          return \"the following labels are not bound in this record pattern:\\n\" + (x._0 + \"\\nEither bind these labels explicitly or add '; _' to the pattern.\");\n      case /* Instance_variable_override */6 :\n          var match$1 = x._0;\n          if (match$1) {\n            var slist$1 = match$1.tl;\n            var lab$1 = match$1.hd;\n            if (slist$1) {\n              return $$String.concat(\" \", {\n                          hd: \"the following instance variables are overridden by the class\",\n                          tl: {\n                            hd: lab$1,\n                            tl: {\n                              hd: \":\\n \",\n                              tl: slist$1\n                            }\n                          }\n                        }) + \"\\nThe behaviour changed in ocaml 3.10 (previous behaviour was hiding.)\";\n            } else {\n              return \"the instance variable \" + (lab$1 + \" is overridden.\\nThe behaviour changed in ocaml 3.10 (previous behaviour was hiding.)\");\n            }\n          }\n          throw {\n                RE_EXN_ID: \"Assert_failure\",\n                _1: [\n                  \"warnings.res\",\n                  393,\n                  42\n                ],\n                Error: new Error()\n              };\n      case /* Implicit_public_methods */7 :\n          return \"the following private methods were made public implicitly:\\n \" + ($$String.concat(\" \", x._0) + \".\");\n      case /* Undeclared_virtual_method */8 :\n          return \"the virtual method \" + (x._0 + \" is not declared.\");\n      case /* Not_principal */9 :\n          return x._0 + \" is not principal.\";\n      case /* Without_principality */10 :\n          return x._0 + \" without principality.\";\n      case /* Preprocessor */11 :\n          return x._0;\n      case /* Bad_module_name */12 :\n          return \"bad source file name: \\\"\" + (x._0 + \"\\\" is not a valid module name.\");\n      case /* Unused_var */13 :\n      case /* Unused_var_strict */14 :\n          return \"unused variable \" + (x._0 + \".\");\n      case /* Duplicate_definitions */15 :\n          return Curry._4(Printf.sprintf(\"the %s %s is defined in both types %s and %s.\"), x._0, x._1, x._2, x._3);\n      case /* Multiple_definition */16 :\n          return Curry._3(Printf.sprintf(\"files %s and %s both define a module named %s\"), x._1, x._2, x._0);\n      case /* Unused_value_declaration */17 :\n          return \"unused value \" + (x._0 + \".\");\n      case /* Unused_open */18 :\n          return \"unused open \" + (x._0 + \".\");\n      case /* Unused_type_declaration */19 :\n          return \"unused type \" + (x._0 + \".\");\n      case /* Unused_for_index */20 :\n          return \"unused for-loop index \" + (x._0 + \".\");\n      case /* Unused_ancestor */21 :\n          return \"unused ancestor variable \" + (x._0 + \".\");\n      case /* Unused_constructor */22 :\n          var s$2 = x._0;\n          if (x._1) {\n            return \"constructor \" + (s$2 + \" is never used to build values.\\n(However, this constructor appears in patterns.)\");\n          } else if (x._2) {\n            return \"constructor \" + (s$2 + \" is never used to build values.\\nIts type is exported as a private type.\");\n          } else {\n            return \"unused constructor \" + (s$2 + \".\");\n          }\n      case /* Unused_extension */23 :\n          var kind = x._1 ? \"exception\" : \"extension constructor\";\n          var name = kind + (\" \" + x._0);\n          if (x._2) {\n            return name + \" is never used to build values.\\n(However, this constructor appears in patterns.)\";\n          } else if (x._3) {\n            return name + \" is never used to build values.\\nIt is exported or rebound as a private extension.\";\n          } else {\n            return \"unused \" + name;\n          }\n      case /* Name_out_of_scope */24 :\n          var slist$2 = x._1;\n          var ty = x._0;\n          if (slist$2 && !slist$2.tl && !x._2) {\n            return slist$2.hd + (\" was selected from type \" + (ty + \".\\nIt is not visible in the current scope, and will not \\nbe selected if the type becomes unknown.\"));\n          }\n          if (x._2) {\n            return \"this record of type \" + (ty + (\" contains fields that are \\nnot visible in the current scope: \" + ($$String.concat(\" \", slist$2) + \".\\nThey will not be selected if the type becomes unknown.\")));\n          }\n          throw {\n                RE_EXN_ID: \"Assert_failure\",\n                _1: [\n                  \"warnings.res\",\n                  457,\n                  38\n                ],\n                Error: new Error()\n              };\n          break;\n      case /* Ambiguous_name */25 :\n          var _slist = x._0;\n          if (_slist && !_slist.tl && !x._2) {\n            return _slist.hd + (\" belongs to several types: \" + ($$String.concat(\" \", x._1) + \"\\nThe first one was selected. Please disambiguate if this is wrong.\"));\n          }\n          if (x._2) {\n            return \"these field labels belong to several types: \" + ($$String.concat(\" \", x._1) + \"\\nThe first one was selected. Please disambiguate if this is wrong.\");\n          }\n          throw {\n                RE_EXN_ID: \"Assert_failure\",\n                _1: [\n                  \"warnings.res\",\n                  473,\n                  35\n                ],\n                Error: new Error()\n              };\n          break;\n      case /* Disambiguated_name */26 :\n          return \"this use of \" + (x._0 + \" relies on type-directed disambiguation,\\nit will not compile with OCaml 4.00 or earlier.\");\n      case /* Nonoptional_label */27 :\n          return \"the label \" + (x._0 + \" is not optional.\");\n      case /* Open_shadow_identifier */28 :\n          return Curry._2(Printf.sprintf(\"this open statement shadows the %s identifier %s (which is later used)\"), x._0, x._1);\n      case /* Open_shadow_label_constructor */29 :\n          return Curry._2(Printf.sprintf(\"this open statement shadows the %s %s (which is later used)\"), x._0, x._1);\n      case /* Bad_env_variable */30 :\n          return Curry._2(Printf.sprintf(\"illegal environment variable %s : %s\"), x._0, x._1);\n      case /* Attribute_payload */31 :\n          return Curry._2(Printf.sprintf(\"illegal payload for attribute '%s'.\\n%s\"), x._0, x._1);\n      case /* Eliminated_optional_arguments */32 :\n          var sl = x._0;\n          return Curry._2(Printf.sprintf(\"implicit elimination of optional argument%s %s\"), List.length(sl) === 1 ? \"\" : \"s\", $$String.concat(\", \", sl));\n      case /* No_cmi_file */33 :\n          var msg = x._1;\n          var name$1 = x._0;\n          if (msg !== undefined) {\n            return Curry._2(Printf.sprintf(\"no valid cmi file was found in path for module %s. %s\"), name$1, msg);\n          } else {\n            return \"no cmi file was found in path for module \" + name$1;\n          }\n      case /* Bad_docstring */34 :\n          if (x._0) {\n            return \"unattached documentation comment (ignored)\";\n          } else {\n            return \"ambiguous documentation comment\";\n          }\n      case /* Misplaced_attribute */35 :\n          return Curry._1(Printf.sprintf(\"the %S attribute cannot appear in this context\"), x._0);\n      case /* Duplicated_attribute */36 :\n          return Curry._1(Printf.sprintf(\"the %S attribute is used more than once on this expression\"), x._0);\n      case /* Inlining_impossible */37 :\n          return Curry._1(Printf.sprintf(\"Cannot inline: %s\"), x._0);\n      case /* Ambiguous_pattern */38 :\n          var vars = List.sort($$String.compare, x._0);\n          var tmp;\n          if (vars) {\n            tmp = vars.tl ? \"variables \" + $$String.concat(\",\", vars) : \"variable \" + vars.hd;\n          } else {\n            throw {\n                  RE_EXN_ID: \"Assert_failure\",\n                  _1: [\n                    \"warnings.res\",\n                    535,\n                    18\n                  ],\n                  Error: new Error()\n                };\n          }\n          return Curry._1(Printf.sprintf(\"Ambiguous or-pattern variables under guard;\\n%s may match different arguments. (See manual section 8.5)\"), tmp);\n      case /* No_cmx_file */39 :\n          return Curry._1(Printf.sprintf(\"no cmx file was found in path for module %s, and its interface was not compiled with -opaque\"), x._0);\n      case /* Unused_module */40 :\n          return \"unused module \" + (x._0 + \".\");\n      case /* Unboxable_type_in_prim_decl */41 :\n          var t = x._0;\n          return Curry._2(Printf.sprintf(\"This primitive declaration uses type %s, which is unannotated and\\nunboxable. The representation of such types may change in future\\nversions. You should annotate the declaration of %s with [@@boxed]\\nor [@@unboxed].\"), t, t);\n      \n    }\n  }\n}\n\nfunction sub_locs(x) {\n  if (typeof x === \"number\" || x.TAG !== /* Deprecated */0) {\n    return /* [] */0;\n  } else {\n    return {\n            hd: [\n              x._1,\n              \"Definition\"\n            ],\n            tl: {\n              hd: [\n                x._2,\n                \"Expected signature\"\n              ],\n              tl: /* [] */0\n            }\n          };\n  }\n}\n\nvar nerrors = {\n  contents: 0\n};\n\nfunction report(w) {\n  if (is_active(w)) {\n    if (is_error(w)) {\n      nerrors.contents = nerrors.contents + 1 | 0;\n    }\n    return {\n            NAME: \"Active\",\n            VAL: {\n              number: number(w),\n              message: message(w),\n              is_error: is_error(w),\n              sub_locs: sub_locs(w)\n            }\n          };\n  } else {\n    return \"Inactive\";\n  }\n}\n\nvar Errors = /* @__PURE__ */Caml_exceptions.create(\"Warnings.Errors\");\n\nfunction reset_fatal(param) {\n  nerrors.contents = 0;\n  \n}\n\nfunction check_fatal(param) {\n  if (nerrors.contents <= 0) {\n    return ;\n  }\n  nerrors.contents = 0;\n  throw {\n        RE_EXN_ID: Errors,\n        Error: new Error()\n      };\n}\n\nvar descriptions = {\n  hd: [\n    1,\n    \"Suspicious-looking start-of-comment mark.\"\n  ],\n  tl: {\n    hd: [\n      2,\n      \"Suspicious-looking end-of-comment mark.\"\n    ],\n    tl: {\n      hd: [\n        3,\n        \"Deprecated feature.\"\n      ],\n      tl: {\n        hd: [\n          4,\n          \"Fragile pattern matching: matching that will remain complete even\\n    if additional constructors are added to one of the variant types\\n    matched.\"\n        ],\n        tl: {\n          hd: [\n            5,\n            \"Partially applied function: expression whose result has function\\n    type and is ignored.\"\n          ],\n          tl: {\n            hd: [\n              6,\n              \"Label omitted in function application.\"\n            ],\n            tl: {\n              hd: [\n                7,\n                \"Method overridden.\"\n              ],\n              tl: {\n                hd: [\n                  8,\n                  \"Partial match: missing cases in pattern-matching.\"\n                ],\n                tl: {\n                  hd: [\n                    9,\n                    \"Missing fields in a record pattern.\"\n                  ],\n                  tl: {\n                    hd: [\n                      10,\n                      \"Expression on the left-hand side of a sequence that doesn't have type\\n    \\\"unit\\\" (and that is not a function, see warning number 5).\"\n                    ],\n                    tl: {\n                      hd: [\n                        11,\n                        \"Redundant case in a pattern matching (unused match case).\"\n                      ],\n                      tl: {\n                        hd: [\n                          12,\n                          \"Redundant sub-pattern in a pattern-matching.\"\n                        ],\n                        tl: {\n                          hd: [\n                            13,\n                            \"Instance variable overridden.\"\n                          ],\n                          tl: {\n                            hd: [\n                              14,\n                              \"Illegal backslash escape in a string constant.\"\n                            ],\n                            tl: {\n                              hd: [\n                                15,\n                                \"Private method made public implicitly.\"\n                              ],\n                              tl: {\n                                hd: [\n                                  16,\n                                  \"Unerasable optional argument.\"\n                                ],\n                                tl: {\n                                  hd: [\n                                    17,\n                                    \"Undeclared virtual method.\"\n                                  ],\n                                  tl: {\n                                    hd: [\n                                      18,\n                                      \"Non-principal type.\"\n                                    ],\n                                    tl: {\n                                      hd: [\n                                        19,\n                                        \"Type without principality.\"\n                                      ],\n                                      tl: {\n                                        hd: [\n                                          20,\n                                          \"Unused function argument.\"\n                                        ],\n                                        tl: {\n                                          hd: [\n                                            21,\n                                            \"Non-returning statement.\"\n                                          ],\n                                          tl: {\n                                            hd: [\n                                              22,\n                                              \"Preprocessor warning.\"\n                                            ],\n                                            tl: {\n                                              hd: [\n                                                23,\n                                                \"Useless record \\\"with\\\" clause.\"\n                                              ],\n                                              tl: {\n                                                hd: [\n                                                  24,\n                                                  \"Bad module name: the source file name is not a valid OCaml module name.\"\n                                                ],\n                                                tl: {\n                                                  hd: [\n                                                    25,\n                                                    \"Deprecated: now part of warning 8.\"\n                                                  ],\n                                                  tl: {\n                                                    hd: [\n                                                      26,\n                                                      \"Suspicious unused variable: unused variable that is bound\\n    with \\\"let\\\" or \\\"as\\\", and doesn't start with an underscore (\\\"_\\\")\\n    character.\"\n                                                    ],\n                                                    tl: {\n                                                      hd: [\n                                                        27,\n                                                        \"Innocuous unused variable: unused variable that is not bound with\\n    \\\"let\\\" nor \\\"as\\\", and doesn't start with an underscore (\\\"_\\\")\\n    character.\"\n                                                      ],\n                                                      tl: {\n                                                        hd: [\n                                                          28,\n                                                          \"Wildcard pattern given as argument to a constant constructor.\"\n                                                        ],\n                                                        tl: {\n                                                          hd: [\n                                                            29,\n                                                            \"Unescaped end-of-line in a string constant (non-portable code).\"\n                                                          ],\n                                                          tl: {\n                                                            hd: [\n                                                              30,\n                                                              \"Two labels or constructors of the same name are defined in two\\n    mutually recursive types.\"\n                                                            ],\n                                                            tl: {\n                                                              hd: [\n                                                                31,\n                                                                \"A module is linked twice in the same executable.\"\n                                                              ],\n                                                              tl: {\n                                                                hd: [\n                                                                  32,\n                                                                  \"Unused value declaration.\"\n                                                                ],\n                                                                tl: {\n                                                                  hd: [\n                                                                    33,\n                                                                    \"Unused open statement.\"\n                                                                  ],\n                                                                  tl: {\n                                                                    hd: [\n                                                                      34,\n                                                                      \"Unused type declaration.\"\n                                                                    ],\n                                                                    tl: {\n                                                                      hd: [\n                                                                        35,\n                                                                        \"Unused for-loop index.\"\n                                                                      ],\n                                                                      tl: {\n                                                                        hd: [\n                                                                          36,\n                                                                          \"Unused ancestor variable.\"\n                                                                        ],\n                                                                        tl: {\n                                                                          hd: [\n                                                                            37,\n                                                                            \"Unused constructor.\"\n                                                                          ],\n                                                                          tl: {\n                                                                            hd: [\n                                                                              38,\n                                                                              \"Unused extension constructor.\"\n                                                                            ],\n                                                                            tl: {\n                                                                              hd: [\n                                                                                39,\n                                                                                \"Unused rec flag.\"\n                                                                              ],\n                                                                              tl: {\n                                                                                hd: [\n                                                                                  40,\n                                                                                  \"Constructor or label name used out of scope.\"\n                                                                                ],\n                                                                                tl: {\n                                                                                  hd: [\n                                                                                    41,\n                                                                                    \"Ambiguous constructor or label name.\"\n                                                                                  ],\n                                                                                  tl: {\n                                                                                    hd: [\n                                                                                      42,\n                                                                                      \"Disambiguated constructor or label name (compatibility warning).\"\n                                                                                    ],\n                                                                                    tl: {\n                                                                                      hd: [\n                                                                                        43,\n                                                                                        \"Nonoptional label applied as optional.\"\n                                                                                      ],\n                                                                                      tl: {\n                                                                                        hd: [\n                                                                                          44,\n                                                                                          \"Open statement shadows an already defined identifier.\"\n                                                                                        ],\n                                                                                        tl: {\n                                                                                          hd: [\n                                                                                            45,\n                                                                                            \"Open statement shadows an already defined label or constructor.\"\n                                                                                          ],\n                                                                                          tl: {\n                                                                                            hd: [\n                                                                                              46,\n                                                                                              \"Error in environment variable.\"\n                                                                                            ],\n                                                                                            tl: {\n                                                                                              hd: [\n                                                                                                47,\n                                                                                                \"Illegal attribute payload.\"\n                                                                                              ],\n                                                                                              tl: {\n                                                                                                hd: [\n                                                                                                  48,\n                                                                                                  \"Implicit elimination of optional arguments.\"\n                                                                                                ],\n                                                                                                tl: {\n                                                                                                  hd: [\n                                                                                                    49,\n                                                                                                    \"Absent cmi file when looking up module alias.\"\n                                                                                                  ],\n                                                                                                  tl: {\n                                                                                                    hd: [\n                                                                                                      50,\n                                                                                                      \"Unexpected documentation comment.\"\n                                                                                                    ],\n                                                                                                    tl: {\n                                                                                                      hd: [\n                                                                                                        51,\n                                                                                                        \"Warning on non-tail calls if @tailcall present.\"\n                                                                                                      ],\n                                                                                                      tl: {\n                                                                                                        hd: [\n                                                                                                          52,\n                                                                                                          \"Fragile constant pattern.\"\n                                                                                                        ],\n                                                                                                        tl: {\n                                                                                                          hd: [\n                                                                                                            53,\n                                                                                                            \"Attribute cannot appear in this context\"\n                                                                                                          ],\n                                                                                                          tl: {\n                                                                                                            hd: [\n                                                                                                              54,\n                                                                                                              \"Attribute used more than once on an expression\"\n                                                                                                            ],\n                                                                                                            tl: {\n                                                                                                              hd: [\n                                                                                                                55,\n                                                                                                                \"Inlining impossible\"\n                                                                                                              ],\n                                                                                                              tl: {\n                                                                                                                hd: [\n                                                                                                                  56,\n                                                                                                                  \"Unreachable case in a pattern-matching (based on type information).\"\n                                                                                                                ],\n                                                                                                                tl: {\n                                                                                                                  hd: [\n                                                                                                                    57,\n                                                                                                                    \"Ambiguous or-pattern variables under guard\"\n                                                                                                                  ],\n                                                                                                                  tl: {\n                                                                                                                    hd: [\n                                                                                                                      58,\n                                                                                                                      \"Missing cmx file\"\n                                                                                                                    ],\n                                                                                                                    tl: {\n                                                                                                                      hd: [\n                                                                                                                        59,\n                                                                                                                        \"Assignment to non-mutable value\"\n                                                                                                                      ],\n                                                                                                                      tl: {\n                                                                                                                        hd: [\n                                                                                                                          60,\n                                                                                                                          \"Unused module declaration\"\n                                                                                                                        ],\n                                                                                                                        tl: {\n                                                                                                                          hd: [\n                                                                                                                            61,\n                                                                                                                            \"Unboxable type in primitive declaration\"\n                                                                                                                          ],\n                                                                                                                          tl: {\n                                                                                                                            hd: [\n                                                                                                                              62,\n                                                                                                                              \"Type constraint on GADT type declaration\"\n                                                                                                                            ],\n                                                                                                                            tl: /* [] */0\n                                                                                                                          }\n                                                                                                                        }\n                                                                                                                      }\n                                                                                                                    }\n                                                                                                                  }\n                                                                                                                }\n                                                                                                              }\n                                                                                                            }\n                                                                                                          }\n                                                                                                        }\n                                                                                                      }\n                                                                                                    }\n                                                                                                  }\n                                                                                                }\n                                                                                              }\n                                                                                            }\n                                                                                          }\n                                                                                        }\n                                                                                      }\n                                                                                    }\n                                                                                  }\n                                                                                }\n                                                                              }\n                                                                            }\n                                                                          }\n                                                                        }\n                                                                      }\n                                                                    }\n                                                                  }\n                                                                }\n                                                              }\n                                                            }\n                                                          }\n                                                        }\n                                                      }\n                                                    }\n                                                  }\n                                                }\n                                              }\n                                            }\n                                          }\n                                        }\n                                      }\n                                    }\n                                  }\n                                }\n                              }\n                            }\n                          }\n                        }\n                      }\n                    }\n                  }\n                }\n              }\n            }\n          }\n        }\n      }\n    }\n  }\n};\n\nfunction help_warnings(param) {\n  List.iter((function (param) {\n          return Curry._2(Printf.printf(\"%3i %s\\n\"), param[0], param[1]);\n        }), descriptions);\n  console.log(\"  A all warnings\");\n  for(var i = /* 'b' */98; i <= /* 'z' */122; ++i){\n    var c = Char.chr(i);\n    var l = letter(c);\n    if (l) {\n      if (l.tl) {\n        Curry._2(Printf.printf(\"  %c warnings %s.\\n\"), Char.uppercase_ascii(c), $$String.concat(\", \", List.map((function (prim) {\n                        return String(prim);\n                      }), l)));\n      } else {\n        Curry._2(Printf.printf(\"  %c Alias for warning %i.\\n\"), Char.uppercase_ascii(c), l.hd);\n      }\n    }\n    \n  }\n  return Pervasives.exit(0);\n}\n\nvar last_warning_number = 62;\n\nexport {\n  number ,\n  last_warning_number ,\n  letter ,\n  current ,\n  disabled ,\n  without_warnings ,\n  backup ,\n  restore ,\n  is_active ,\n  is_error ,\n  mk_lazy ,\n  parse_opt ,\n  parse_options ,\n  defaults_w ,\n  defaults_warn_error ,\n  message ,\n  sub_locs ,\n  nerrors ,\n  report ,\n  Errors ,\n  reset_fatal ,\n  check_fatal ,\n  descriptions ,\n  help_warnings ,\n  \n}\n/*  Not a pure module */\n"
  },
  {
    "path": "analysis/examples/larger-project/src/warnings.res",
    "content": "/* ************************************************************************ */\n/*  */\n/* OCaml */\n/*  */\n/* Pierre Weis && Damien Doligez, INRIA Rocquencourt */\n/*  */\n/* Copyright 1998 Institut National de Recherche en Informatique et */\n/* en Automatique. */\n/*  */\n/* All rights reserved.  This file is distributed under the terms of */\n/* the GNU Lesser General Public License version 2.1, with the */\n/* special exception on linking described in the file LICENSE. */\n/*  */\n/* ************************************************************************ */\n\n/* When you change this, you need to update the documentation:\n   - man/ocamlc.m\n   - man/ocamlopt.m\n   - manual/manual/cmds/comp.etex\n   - manual/manual/cmds/native.etex\n*/\n\ntype loc = {\n  loc_start: Lexing.position,\n  loc_end: Lexing.position,\n  loc_ghost: bool,\n}\n\ntype t =\n  | Comment_start /* 1 */\n  | Comment_not_end /* 2 */\n  | Deprecated(string, loc, loc) /* 3 */\n  | Fragile_match(string) /* 4 */\n  | Partial_application /* 5 */\n  | Labels_omitted(list<string>) /* 6 */\n  | Method_override(list<string>) /* 7 */\n  | Partial_match(string) /* 8 */\n  | Non_closed_record_pattern(string) /* 9 */\n  | Statement_type /* 10 */\n  | Unused_match /* 11 */\n  | Unused_pat /* 12 */\n  | Instance_variable_override(list<string>) /* 13 */\n  | Illegal_backslash /* 14 */\n  | Implicit_public_methods(list<string>) /* 15 */\n  | Unerasable_optional_argument /* 16 */\n  | Undeclared_virtual_method(string) /* 17 */\n  | Not_principal(string) /* 18 */\n  | Without_principality(string) /* 19 */\n  | Unused_argument /* 20 */\n  | Nonreturning_statement /* 21 */\n  | Preprocessor(string) /* 22 */\n  | Useless_record_with /* 23 */\n  | Bad_module_name(string) /* 24 */\n  | All_clauses_guarded /* 8, used to be 25 */\n  | Unused_var(string) /* 26 */\n  | Unused_var_strict(string) /* 27 */\n  | Wildcard_arg_to_constant_constr /* 28 */\n  | Eol_in_string /* 29 */\n  | Duplicate_definitions(string, string, string, string) /* 30 */\n  | Multiple_definition(string, string, string) /* 31 */\n  | Unused_value_declaration(string) /* 32 */\n  | Unused_open(string) /* 33 */\n  | Unused_type_declaration(string) /* 34 */\n  | Unused_for_index(string) /* 35 */\n  | Unused_ancestor(string) /* 36 */\n  | Unused_constructor(string, bool, bool) /* 37 */\n  | Unused_extension(string, bool, bool, bool) /* 38 */\n  | Unused_rec_flag /* 39 */\n  | Name_out_of_scope(string, list<string>, bool) /* 40 */\n  | Ambiguous_name(list<string>, list<string>, bool) /* 41 */\n  | Disambiguated_name(string) /* 42 */\n  | Nonoptional_label(string) /* 43 */\n  | Open_shadow_identifier(string, string) /* 44 */\n  | Open_shadow_label_constructor(string, string) /* 45 */\n  | Bad_env_variable(string, string) /* 46 */\n  | Attribute_payload(string, string) /* 47 */\n  | Eliminated_optional_arguments(list<string>) /* 48 */\n  | No_cmi_file(string, option<string>) /* 49 */\n  | Bad_docstring(bool) /* 50 */\n  | Expect_tailcall /* 51 */\n  | Fragile_literal_pattern /* 52 */\n  | Misplaced_attribute(string) /* 53 */\n  | Duplicated_attribute(string) /* 54 */\n  | Inlining_impossible(string) /* 55 */\n  | Unreachable_case /* 56 */\n  | Ambiguous_pattern(list<string>) /* 57 */\n  | No_cmx_file(string) /* 58 */\n  | Assignment_to_non_mutable_value /* 59 */\n  | Unused_module(string) /* 60 */\n  | Unboxable_type_in_prim_decl(string) /* 61 */\n  | Constraint_on_gadt /* 62 */\n\n/* If you remove a warning, leave a hole in the numbering.  NEVER change\n   the numbers of existing warnings.\n   If you add a new warning, add it at the end with a new number;\n   do NOT reuse one of the holes.\n*/\n\nlet number = x =>\n  switch x {\n  | Comment_start => 1\n  | Comment_not_end => 2\n  | Deprecated(_) => 3\n  | Fragile_match(_) => 4\n  | Partial_application => 5\n  | Labels_omitted(_) => 6\n  | Method_override(_) => 7\n  | Partial_match(_) => 8\n  | Non_closed_record_pattern(_) => 9\n  | Statement_type => 10\n  | Unused_match => 11\n  | Unused_pat => 12\n  | Instance_variable_override(_) => 13\n  | Illegal_backslash => 14\n  | Implicit_public_methods(_) => 15\n  | Unerasable_optional_argument => 16\n  | Undeclared_virtual_method(_) => 17\n  | Not_principal(_) => 18\n  | Without_principality(_) => 19\n  | Unused_argument => 20\n  | Nonreturning_statement => 21\n  | Preprocessor(_) => 22\n  | Useless_record_with => 23\n  | Bad_module_name(_) => 24\n  | All_clauses_guarded => 8 /* used to be 25 */\n  | Unused_var(_) => 26\n  | Unused_var_strict(_) => 27\n  | Wildcard_arg_to_constant_constr => 28\n  | Eol_in_string => 29\n  | Duplicate_definitions(_) => 30\n  | Multiple_definition(_) => 31\n  | Unused_value_declaration(_) => 32\n  | Unused_open(_) => 33\n  | Unused_type_declaration(_) => 34\n  | Unused_for_index(_) => 35\n  | Unused_ancestor(_) => 36\n  | Unused_constructor(_) => 37\n  | Unused_extension(_) => 38\n  | Unused_rec_flag => 39\n  | Name_out_of_scope(_) => 40\n  | Ambiguous_name(_) => 41\n  | Disambiguated_name(_) => 42\n  | Nonoptional_label(_) => 43\n  | Open_shadow_identifier(_) => 44\n  | Open_shadow_label_constructor(_) => 45\n  | Bad_env_variable(_) => 46\n  | Attribute_payload(_) => 47\n  | Eliminated_optional_arguments(_) => 48\n  | No_cmi_file(_) => 49\n  | Bad_docstring(_) => 50\n  | Expect_tailcall => 51\n  | Fragile_literal_pattern => 52\n  | Misplaced_attribute(_) => 53\n  | Duplicated_attribute(_) => 54\n  | Inlining_impossible(_) => 55\n  | Unreachable_case => 56\n  | Ambiguous_pattern(_) => 57\n  | No_cmx_file(_) => 58\n  | Assignment_to_non_mutable_value => 59\n  | Unused_module(_) => 60\n  | Unboxable_type_in_prim_decl(_) => 61\n  | Constraint_on_gadt => 62\n  }\n\nlet last_warning_number = 62\n\n/* Must be the max number returned by the [number] function. */\n\nlet letter = x =>\n  switch x {\n  | 'a' =>\n    let rec loop = i =>\n      if i == 0 {\n        list{}\n      } else {\n        list{i, ...loop(i - 1)}\n      }\n    loop(last_warning_number)\n  | 'b' => list{}\n  | 'c' => list{1, 2}\n  | 'd' => list{3}\n  | 'e' => list{4}\n  | 'f' => list{5}\n  | 'g' => list{}\n  | 'h' => list{}\n  | 'i' => list{}\n  | 'j' => list{}\n  | 'k' => list{32, 33, 34, 35, 36, 37, 38, 39}\n  | 'l' => list{6}\n  | 'm' => list{7}\n  | 'n' => list{}\n  | 'o' => list{}\n  | 'p' => list{8}\n  | 'q' => list{}\n  | 'r' => list{9}\n  | 's' => list{10}\n  | 't' => list{}\n  | 'u' => list{11, 12}\n  | 'v' => list{13}\n  | 'w' => list{}\n  | 'x' => list{14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 30}\n  | 'y' => list{26}\n  | 'z' => list{27}\n  | _ => assert false\n  }\n\ntype state = {\n  active: array<bool>,\n  error: array<bool>,\n}\n\nlet current = ref({\n  active: Array.make(last_warning_number + 1, true),\n  error: Array.make(last_warning_number + 1, false),\n})\n\nlet disabled = ref(false)\n\nlet without_warnings = f => Misc.protect_refs(list{Misc.R(disabled, true)}, f)\n\nlet backup = () => current.contents\n\nlet restore = x => current := x\n\nlet is_active = x => !disabled.contents && current.contents.active[number(x)]\nlet is_error = x => !disabled.contents && current.contents.error[number(x)]\n\nlet mk_lazy = f => {\n  let state = backup()\n  lazy {\n    let prev = backup()\n    restore(state)\n    try {\n      let r = f()\n      restore(prev)\n      r\n    } catch {\n    | exn =>\n      restore(prev)\n      raise(exn)\n    }\n  }\n}\n\nlet parse_opt = (error, active, flags, s) => {\n  let set = i => flags[i] = true\n  let clear = i => flags[i] = false\n  let set_all = i => {\n    active[i] = true\n    error[i] = true\n  }\n  let error = () => raise(Arg.Bad(\"Ill-formed list of warnings\"))\n  let rec get_num = (n, i) =>\n    if i >= String.length(s) {\n      (i, n)\n    } else {\n      switch String.get(s, i) {\n      | '0' .. '9' => get_num(10 * n + Char.code(String.get(s, i)) - Char.code('0'), i + 1)\n      | _ => (i, n)\n      }\n    }\n\n  let get_range = i => {\n    let (i, n1) = get_num(0, i)\n    if i + 2 < String.length(s) && (String.get(s, i) == '.' && String.get(s, i + 1) == '.') {\n      let (i, n2) = get_num(0, i + 2)\n      if n2 < n1 {\n        error()\n      }\n      (i, n1, n2)\n    } else {\n      (i, n1, n1)\n    }\n  }\n\n  let rec loop = i =>\n    if i >= String.length(s) {\n      ()\n    } else {\n      switch String.get(s, i) {\n      | 'A' .. 'Z' =>\n        List.iter(set, letter(Char.lowercase_ascii(String.get(s, i))))\n        loop(i + 1)\n      | 'a' .. 'z' =>\n        List.iter(clear, letter(String.get(s, i)))\n        loop(i + 1)\n      | '+' => loop_letter_num(set, i + 1)\n      | '-' => loop_letter_num(clear, i + 1)\n      | '@' => loop_letter_num(set_all, i + 1)\n      | _ => error()\n      }\n    }\n  and loop_letter_num = (myset, i) =>\n    if i >= String.length(s) {\n      error()\n    } else {\n      switch String.get(s, i) {\n      | '0' .. '9' =>\n        let (i, n1, n2) = get_range(i)\n        for n in n1 to min(n2, last_warning_number) {\n          myset(n)\n        }\n        loop(i)\n      | 'A' .. 'Z' =>\n        List.iter(myset, letter(Char.lowercase_ascii(String.get(s, i))))\n        loop(i + 1)\n      | 'a' .. 'z' =>\n        List.iter(myset, letter(String.get(s, i)))\n        loop(i + 1)\n      | _ => error()\n      }\n    }\n\n  loop(0)\n}\n\nlet parse_options = (errflag, s) => {\n  let error = Array.copy(current.contents.error)\n  let active = Array.copy(current.contents.active)\n  parse_opt(\n    error,\n    active,\n    if errflag {\n      error\n    } else {\n      active\n    },\n    s,\n  )\n  current := {error: error, active: active}\n}\n\n/* If you change these, don't forget to change them in man/ocamlc.m */\nlet defaults_w = \"+a-4-6-7-9-27-29-32..42-44-45-48-50-60\"\nlet defaults_warn_error = \"-a+31\"\n\nlet () = parse_options(false, defaults_w)\nlet () = parse_options(true, defaults_warn_error)\n\nlet message = x =>\n  switch x {\n  | Comment_start => \"this is the start of a comment.\"\n  | Comment_not_end => \"this is not the end of a comment.\"\n  | Deprecated(s, _, _) =>\n    /* Reduce \\r\\n to \\n:\n           - Prevents any \\r characters being printed on Unix when processing\n             Windows sources\n           - Prevents \\r\\r\\n being generated on Windows, which affects the\n             testsuite\n */\n    \"deprecated: \" ++ Misc.normalise_eol(s)\n  | Fragile_match(\"\") => \"this pattern-matching is fragile.\"\n  | Fragile_match(s) =>\n    \"this pattern-matching is fragile.\\n\\\n       It will remain exhaustive when constructors are added to type \" ++\n    (s ++\n    \".\")\n  | Partial_application => \"this function application is partial,\\n\\\n       maybe some arguments are missing.\"\n  | Labels_omitted(list{}) => assert false\n  | Labels_omitted(list{l}) =>\n    \"label \" ++ (l ++ \" was omitted in the application of this function.\")\n  | Labels_omitted(ls) =>\n    \"labels \" ++ (String.concat(\", \", ls) ++ \" were omitted in the application of this function.\")\n  | Method_override(list{lab}) => \"the method \" ++ (lab ++ \" is overridden.\")\n  | Method_override(list{cname, ...slist}) =>\n    String.concat(\n      \" \",\n      list{\"the following methods are overridden by the class\", cname, \":\\n \", ...slist},\n    )\n  | Method_override(list{}) => assert false\n  | Partial_match(\"\") => \"this pattern-matching is not exhaustive.\"\n  | Partial_match(s) =>\n    \"this pattern-matching is not exhaustive.\\n\\\n       Here is an example of a case that is not matched:\\n\" ++\n    s\n  | Non_closed_record_pattern(s) =>\n    \"the following labels are not bound in this record pattern:\\n\" ++\n    (s ++\n    \"\\nEither bind these labels explicitly or add '; _' to the pattern.\")\n  | Statement_type => \"this expression should have type unit.\"\n  | Unused_match => \"this match case is unused.\"\n  | Unused_pat => \"this sub-pattern is unused.\"\n  | Instance_variable_override(list{lab}) =>\n    \"the instance variable \" ++\n    (lab ++\n    (\" is overridden.\\n\" ++ \"The behaviour changed in ocaml 3.10 (previous behaviour was hiding.)\"))\n  | Instance_variable_override(list{cname, ...slist}) =>\n    String.concat(\n      \" \",\n      list{\"the following instance variables are overridden by the class\", cname, \":\\n \", ...slist},\n    ) ++ \"\\nThe behaviour changed in ocaml 3.10 (previous behaviour was hiding.)\"\n  | Instance_variable_override(list{}) => assert false\n  | Illegal_backslash => \"illegal backslash escape in string.\"\n  | Implicit_public_methods(l) =>\n    \"the following private methods were made public implicitly:\\n \" ++\n    (String.concat(\" \", l) ++\n    \".\")\n  | Unerasable_optional_argument => \"this optional argument cannot be erased.\"\n  | Undeclared_virtual_method(m) => \"the virtual method \" ++ (m ++ \" is not declared.\")\n  | Not_principal(s) => s ++ \" is not principal.\"\n  | Without_principality(s) => s ++ \" without principality.\"\n  | Unused_argument => \"this argument will not be used by the function.\"\n  | Nonreturning_statement => \"this statement never returns (or has an unsound type.)\"\n  | Preprocessor(s) => s\n  | Useless_record_with => \"all the fields are explicitly listed in this record:\\n\\\n       the 'with' clause is useless.\"\n  | Bad_module_name(modname) =>\n    \"bad source file name: \\\"\" ++ (modname ++ \"\\\" is not a valid module name.\")\n  | All_clauses_guarded => \"this pattern-matching is not exhaustive.\\n\\\n       All clauses in this pattern-matching are guarded.\"\n  | Unused_var(v) | Unused_var_strict(v) => \"unused variable \" ++ (v ++ \".\")\n  | Wildcard_arg_to_constant_constr => \"wildcard pattern given as argument to a constant constructor\"\n  | Eol_in_string => \"unescaped end-of-line in a string constant (non-portable code)\"\n  | Duplicate_definitions(kind, cname, tc1, tc2) =>\n    Printf.sprintf(\"the %s %s is defined in both types %s and %s.\", kind, cname, tc1, tc2)\n  | Multiple_definition(modname, file1, file2) =>\n    Printf.sprintf(\"files %s and %s both define a module named %s\", file1, file2, modname)\n  | Unused_value_declaration(v) => \"unused value \" ++ (v ++ \".\")\n  | Unused_open(s) => \"unused open \" ++ (s ++ \".\")\n  | Unused_type_declaration(s) => \"unused type \" ++ (s ++ \".\")\n  | Unused_for_index(s) => \"unused for-loop index \" ++ (s ++ \".\")\n  | Unused_ancestor(s) => \"unused ancestor variable \" ++ (s ++ \".\")\n  | Unused_constructor(s, false, false) => \"unused constructor \" ++ (s ++ \".\")\n  | Unused_constructor(s, true, _) =>\n    \"constructor \" ++\n    (s ++\n    \" is never used to build values.\\n\\\n        (However, this constructor appears in patterns.)\")\n  | Unused_constructor(s, false, true) =>\n    \"constructor \" ++\n    (s ++\n    \" is never used to build values.\\n\\\n        Its type is exported as a private type.\")\n  | Unused_extension(s, is_exception, cu_pattern, cu_privatize) =>\n    let kind = if is_exception {\n      \"exception\"\n    } else {\n      \"extension constructor\"\n    }\n    let name = kind ++ (\" \" ++ s)\n    switch (cu_pattern, cu_privatize) {\n    | (false, false) => \"unused \" ++ name\n    | (true, _) =>\n      name ++ \" is never used to build values.\\n\\\n           (However, this constructor appears in patterns.)\"\n    | (false, true) =>\n      name ++ \" is never used to build values.\\n\\\n            It is exported or rebound as a private extension.\"\n    }\n  | Unused_rec_flag => \"unused rec flag.\"\n  | Name_out_of_scope(ty, list{nm}, false) =>\n    nm ++\n    (\" was selected from type \" ++\n    (ty ++ \".\\nIt is not visible in the current scope, and will not \\n\\\n       be selected if the type becomes unknown.\"))\n  | Name_out_of_scope(_, _, false) => assert false\n  | Name_out_of_scope(ty, slist, true) =>\n    \"this record of type \" ++\n    (ty ++\n    (\" contains fields that are \\n\\\n       not visible in the current scope: \" ++\n    (String.concat(\" \", slist) ++\n    \".\\n\\\n       They will not be selected if the type becomes unknown.\")))\n  | Ambiguous_name(list{s}, tl, false) =>\n    s ++\n    (\" belongs to several types: \" ++\n    (String.concat(\n      \" \",\n      tl,\n    ) ++ \"\\nThe first one was selected. Please disambiguate if this is wrong.\"))\n  | Ambiguous_name(_, _, false) => assert false\n  | Ambiguous_name(_slist, tl, true) =>\n    \"these field labels belong to several types: \" ++\n    (String.concat(\" \", tl) ++\n    \"\\nThe first one was selected. Please disambiguate if this is wrong.\")\n  | Disambiguated_name(s) =>\n    \"this use of \" ++\n    (s ++\n    \" relies on type-directed disambiguation,\\n\\\n       it will not compile with OCaml 4.00 or earlier.\")\n  | Nonoptional_label(s) => \"the label \" ++ (s ++ \" is not optional.\")\n  | Open_shadow_identifier(kind, s) =>\n    Printf.sprintf(\n      \"this open statement shadows the %s identifier %s (which is later used)\",\n      kind,\n      s,\n    )\n  | Open_shadow_label_constructor(kind, s) =>\n    Printf.sprintf(\"this open statement shadows the %s %s (which is later used)\", kind, s)\n  | Bad_env_variable(var, s) => Printf.sprintf(\"illegal environment variable %s : %s\", var, s)\n  | Attribute_payload(a, s) => Printf.sprintf(\"illegal payload for attribute '%s'.\\n%s\", a, s)\n  | Eliminated_optional_arguments(sl) =>\n    Printf.sprintf(\n      \"implicit elimination of optional argument%s %s\",\n      if List.length(sl) == 1 {\n        \"\"\n      } else {\n        \"s\"\n      },\n      String.concat(\", \", sl),\n    )\n  | No_cmi_file(name, None) => \"no cmi file was found in path for module \" ++ name\n  | No_cmi_file(name, Some(msg)) =>\n    Printf.sprintf(\"no valid cmi file was found in path for module %s. %s\", name, msg)\n  | Bad_docstring(unattached) =>\n    if unattached {\n      \"unattached documentation comment (ignored)\"\n    } else {\n      \"ambiguous documentation comment\"\n    }\n  | Expect_tailcall => Printf.sprintf(\"expected tailcall\")\n  | Fragile_literal_pattern =>\n    Printf.sprintf(\n      \"Code should not depend on the actual values of\\n\\\n         this constructor's arguments. They are only for information\\n\\\n         and may change in future versions. (See manual section 8.5)\",\n    )\n  | Unreachable_case => \"this match case is unreachable.\\n\\\n       Consider replacing it with a refutation case '<pat> -> .'\"\n  | Misplaced_attribute(attr_name) =>\n    Printf.sprintf(\"the %S attribute cannot appear in this context\", attr_name)\n  | Duplicated_attribute(attr_name) =>\n    Printf.sprintf(\n      \"the %S attribute is used more than once on this \\\n          expression\",\n      attr_name,\n    )\n  | Inlining_impossible(reason) => Printf.sprintf(\"Cannot inline: %s\", reason)\n  | Ambiguous_pattern(vars) =>\n    let msg = {\n      let vars = List.sort(String.compare, vars)\n      switch vars {\n      | list{} => assert false\n      | list{x} => \"variable \" ++ x\n      | list{_, ..._} => \"variables \" ++ String.concat(\",\", vars)\n      }\n    }\n    Printf.sprintf(\n      \"Ambiguous or-pattern variables under guard;\\n\\\n         %s may match different arguments. (See manual section 8.5)\",\n      msg,\n    )\n  | No_cmx_file(name) =>\n    Printf.sprintf(\n      \"no cmx file was found in path for module %s, \\\n         and its interface was not compiled with -opaque\",\n      name,\n    )\n  | Assignment_to_non_mutable_value => \"A potential assignment to a non-mutable value was detected \\n\\\n        in this source file.  Such assignments may generate incorrect code \\n\\\n        when using Flambda.\"\n  | Unused_module(s) => \"unused module \" ++ (s ++ \".\")\n  | Unboxable_type_in_prim_decl(t) =>\n    Printf.sprintf(\n      \"This primitive declaration uses type %s, which is unannotated and\\n\\\n         unboxable. The representation of such types may change in future\\n\\\n         versions. You should annotate the declaration of %s with [@@boxed]\\n\\\n         or [@@unboxed].\",\n      t,\n      t,\n    )\n  | Constraint_on_gadt => \"Type constraints do not apply to GADT cases of variant types.\"\n  }\n\nlet sub_locs = x =>\n  switch x {\n  | Deprecated(_, def, use) => list{(def, \"Definition\"), (use, \"Expected signature\")}\n  | _ => list{}\n  }\n\nlet nerrors = ref(0)\n\ntype reporting_information = {\n  number: int,\n  message: string,\n  is_error: bool,\n  sub_locs: list<(loc, string)>,\n}\n\nlet report = w =>\n  switch is_active(w) {\n  | false => #Inactive\n  | true =>\n    if is_error(w) {\n      incr(nerrors)\n    }\n    #Active({\n      number: number(w),\n      message: message(w),\n      is_error: is_error(w),\n      sub_locs: sub_locs(w),\n    })\n  }\n\nexception Errors\n\nlet reset_fatal = () => nerrors := 0\n\nlet check_fatal = () =>\n  if nerrors.contents > 0 {\n    nerrors := 0\n    raise(Errors)\n  }\n\nlet descriptions = list{\n  (1, \"Suspicious-looking start-of-comment mark.\"),\n  (2, \"Suspicious-looking end-of-comment mark.\"),\n  (3, \"Deprecated feature.\"),\n  (\n    4,\n    \"Fragile pattern matching: matching that will remain complete even\\n\\\n   \\    if additional constructors are added to one of the variant types\\n\\\n   \\    matched.\",\n  ),\n  (\n    5,\n    \"Partially applied function: expression whose result has function\\n\\\n   \\    type and is ignored.\",\n  ),\n  (6, \"Label omitted in function application.\"),\n  (7, \"Method overridden.\"),\n  (8, \"Partial match: missing cases in pattern-matching.\"),\n  (9, \"Missing fields in a record pattern.\"),\n  (\n    10,\n    \"Expression on the left-hand side of a sequence that doesn't have \\\n      type\\n\\\n   \\    \\\"unit\\\" (and that is not a function, see warning number 5).\",\n  ),\n  (11, \"Redundant case in a pattern matching (unused match case).\"),\n  (12, \"Redundant sub-pattern in a pattern-matching.\"),\n  (13, \"Instance variable overridden.\"),\n  (14, \"Illegal backslash escape in a string constant.\"),\n  (15, \"Private method made public implicitly.\"),\n  (16, \"Unerasable optional argument.\"),\n  (17, \"Undeclared virtual method.\"),\n  (18, \"Non-principal type.\"),\n  (19, \"Type without principality.\"),\n  (20, \"Unused function argument.\"),\n  (21, \"Non-returning statement.\"),\n  (22, \"Preprocessor warning.\"),\n  (23, \"Useless record \\\"with\\\" clause.\"),\n  (\n    24,\n    \"Bad module name: the source file name is not a valid OCaml module \\\n        name.\",\n  ),\n  (25, \"Deprecated: now part of warning 8.\"),\n  (\n    26,\n    \"Suspicious unused variable: unused variable that is bound\\n\\\n   \\    with \\\"let\\\" or \\\"as\\\", and doesn't start with an underscore (\\\"_\\\")\\n\\\n   \\    character.\",\n  ),\n  (\n    27,\n    \"Innocuous unused variable: unused variable that is not bound with\\n\\\n   \\    \\\"let\\\" nor \\\"as\\\", and doesn't start with an underscore (\\\"_\\\")\\n\\\n   \\    character.\",\n  ),\n  (28, \"Wildcard pattern given as argument to a constant constructor.\"),\n  (29, \"Unescaped end-of-line in a string constant (non-portable code).\"),\n  (\n    30,\n    \"Two labels or constructors of the same name are defined in two\\n\\\n   \\    mutually recursive types.\",\n  ),\n  (31, \"A module is linked twice in the same executable.\"),\n  (32, \"Unused value declaration.\"),\n  (33, \"Unused open statement.\"),\n  (34, \"Unused type declaration.\"),\n  (35, \"Unused for-loop index.\"),\n  (36, \"Unused ancestor variable.\"),\n  (37, \"Unused constructor.\"),\n  (38, \"Unused extension constructor.\"),\n  (39, \"Unused rec flag.\"),\n  (40, \"Constructor or label name used out of scope.\"),\n  (41, \"Ambiguous constructor or label name.\"),\n  (42, \"Disambiguated constructor or label name (compatibility warning).\"),\n  (43, \"Nonoptional label applied as optional.\"),\n  (44, \"Open statement shadows an already defined identifier.\"),\n  (45, \"Open statement shadows an already defined label or constructor.\"),\n  (46, \"Error in environment variable.\"),\n  (47, \"Illegal attribute payload.\"),\n  (48, \"Implicit elimination of optional arguments.\"),\n  (49, \"Absent cmi file when looking up module alias.\"),\n  (50, \"Unexpected documentation comment.\"),\n  (51, \"Warning on non-tail calls if @tailcall present.\"),\n  (52, \"Fragile constant pattern.\"),\n  (53, \"Attribute cannot appear in this context\"),\n  (54, \"Attribute used more than once on an expression\"),\n  (55, \"Inlining impossible\"),\n  (56, \"Unreachable case in a pattern-matching (based on type information).\"),\n  (57, \"Ambiguous or-pattern variables under guard\"),\n  (58, \"Missing cmx file\"),\n  (59, \"Assignment to non-mutable value\"),\n  (60, \"Unused module declaration\"),\n  (61, \"Unboxable type in primitive declaration\"),\n  (62, \"Type constraint on GADT type declaration\"),\n}\n\nlet help_warnings = () => {\n  List.iter(((i, s)) => Printf.printf(\"%3i %s\\n\", i, s), descriptions)\n  print_endline(\"  A all warnings\")\n  for i in Char.code('b') to Char.code('z') {\n    let c = Char.chr(i)\n    switch letter(c) {\n    | list{} => ()\n    | list{n} => Printf.printf(\"  %c Alias for warning %i.\\n\", Char.uppercase_ascii(c), n)\n    | l =>\n      Printf.printf(\n        \"  %c warnings %s.\\n\",\n        Char.uppercase_ascii(c),\n        String.concat(\", \", List.map(string_of_int, l)),\n      )\n    }\n  }\n  exit(0)\n}\n\n"
  },
  {
    "path": "analysis/examples/monorepo-project/.gitignore",
    "content": "lib\n.merlin\n"
  },
  {
    "path": "analysis/examples/monorepo-project/.vscode/settings.json",
    "content": "{\n  \"rescript.settings.logLevel\": \"log\"\n}\n"
  },
  {
    "path": "analysis/examples/monorepo-project/package.json",
    "content": "{\n  \"name\": \"monorepo-project\",\n  \"private\": true,\n  \"workspaces\": [\n    \"packages/app\",\n    \"packages/lib\"\n  ],\n  \"dependencies\": {\n    \"rescript\": \"12.1.0\"\n  },\n  \"scripts\": {\n    \"build\": \"rescript build\",\n    \"watch\": \"rescript watch\",\n    \"clean\": \"rescript clean\"\n  }\n}\n"
  },
  {
    "path": "analysis/examples/monorepo-project/packages/app/.vscode/settings.json",
    "content": "{\n  \"rescript.settings.logLevel\": \"log\"\n}\n"
  },
  {
    "path": "analysis/examples/monorepo-project/packages/app/package.json",
    "content": "{\n  \"name\": \"@monorepo/app\",\n  \"version\": \"0.0.1\",\n  \"scripts\": {\n    \"build\": \"rescript build\",\n    \"clean\": \"rescript clean\",\n    \"watch\": \"rescript watch\"\n  },\n  \"dependencies\": {\n    \"@monorepo/lib\": \"*\"\n  }\n}\n"
  },
  {
    "path": "analysis/examples/monorepo-project/packages/app/rescript.json",
    "content": "{\n  \"name\": \"@monorepo/app\",\n  \"sources\": {\n    \"dir\": \"src\",\n    \"subdirs\": true\n  },\n  \"package-specs\": {\n    \"module\": \"esmodule\",\n    \"in-source\": true\n  },\n  \"suffix\": \".mjs\",\n  \"dependencies\": [\"@monorepo/lib\"]\n}\n"
  },
  {
    "path": "analysis/examples/monorepo-project/packages/app/src/App.mjs",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as Lib from \"@monorepo/lib/src/Lib.mjs\";\n\nfunction main() {\n  let greeting = Lib.greet(\"World\");\n  console.log(greeting);\n  let sum = Lib.add(1, 2);\n  console.log(\"Sum: \" + sum.toString());\n}\n\nfunction unusedAppFunction() {\n  return \"Unused in app\";\n}\n\nmain();\n\nexport {\n  main,\n  unusedAppFunction,\n}\n/*  Not a pure module */\n"
  },
  {
    "path": "analysis/examples/monorepo-project/packages/app/src/App.res",
    "content": "/* monorepo subpackage test */\n// App module - main application\n\nlet main = () => {\n  let greeting = Lib.greet(\"World\")\n  Console.log(greeting)\n\n  let sum = Lib.add(1, 2)\n  Console.log(\"Sum: \" ++ Int.toString(sum))\n}\n\n// This function is never used (dead code)\nlet unusedAppFunction = () => \"Unused in app\"\n\n// Run main\nlet _ = main()\n"
  },
  {
    "path": "analysis/examples/monorepo-project/rescript.json",
    "content": "{\n  \"name\": \"monorepo-project\",\n  \"sources\": {\n    \"dir\": \"src\",\n    \"subdirs\": true\n  },\n  \"package-specs\": {\n    \"module\": \"esmodule\",\n    \"in-source\": true\n  },\n  \"suffix\": \".mjs\",\n  \"dependencies\": [\n    \"@monorepo/app\",\n    \"@monorepo/lib\"\n  ]\n}\n"
  },
  {
    "path": "analysis/examples/monorepo-project/src/Root.mjs",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as App from \"@monorepo/app/src/App.mjs\";\n\nApp.main();\n\nlet rootValue = \"Root package\";\n\nlet unusedRootValue = 123;\n\nexport {\n  rootValue,\n  unusedRootValue,\n}\n/*  Not a pure module */\n"
  },
  {
    "path": "analysis/examples/monorepo-project/src/Root.res",
    "content": "// Root module - monorepo root\n\nlet rootValue = \"Root package\"\n\n// Use something from the app package\nlet _ = App.main()\n\n// This is unused (dead code)\nlet unusedRootValue = 123\n\n// let _ = App.unusedAppFunction"
  },
  {
    "path": "analysis/examples/rescript9-project/.gitignore",
    "content": "lib\n.merlin\n"
  },
  {
    "path": "analysis/examples/rescript9-project/.vscode/settings.json",
    "content": "{\n  \"rescript.settings.logLevel\": \"log\"\n}\n"
  },
  {
    "path": "analysis/examples/rescript9-project/bsconfig.json",
    "content": "{\n  \"name\": \"rescript9-project\",\n  \"sources\": [\"src\"],\n  \"package-specs\": {\n    \"module\": \"es6\",\n    \"in-source\": true\n  },\n  \"suffix\": \".bs.js\"\n}\n"
  },
  {
    "path": "analysis/examples/rescript9-project/package.json",
    "content": "{\n  \"name\": \"rescript9-project\",\n  \"version\": \"1.0.0\",\n  \"scripts\": {\n    \"build\": \"rescript build\",\n    \"watch\": \"rescript build -w\",\n    \"clean\": \"rescript clean\"\n  },\n  \"dependencies\": {\n    \"rescript\": \"9.1.4\"\n  }\n}\n"
  },
  {
    "path": "analysis/examples/rescript9-project/src/Hello.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nfunction add(a, b) {\n  return a + b | 0;\n}\n\nvar greeting = \"Hello from ReScript 9!\";\n\nvar result = 3;\n\nexport {\n  greeting ,\n  add ,\n  result ,\n  \n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/examples/rescript9-project/src/Hello.res",
    "content": "// Simple ReScript 9 test file\nlet greeting = \"Hello from ReScript 9!\"\n\nlet add = (a, b) => a + b\n\nlet result = add(1, 2)\n"
  },
  {
    "path": "analysis/examples/workspace-project/.gitignore",
    "content": "*.exe\n*.obj\n*.out\n*.compile\n*.native\n*.byte\n*.cmo\n*.annot\n*.cmi\n*.cmx\n*.cmt\n*.cmti\n*.cma\n*.a\n*.cmxa\n*.obj\n*~\n*.annot\n*.cmj\n*.bak\nlib\n*.mlast\n*.mliast\n.vscode\n.merlin\n.bsb.lock\n/node_modules/\n"
  },
  {
    "path": "analysis/examples/workspace-project/README.md",
    "content": "# ReScript workspaces example (monorepo)\n\n```\nnpm install\nnpm run build\nnpm run start\n```\n"
  },
  {
    "path": "analysis/examples/workspace-project/app/bsconfig.json",
    "content": "{\n  \"name\": \"app\",\n  \"bsc-flags\": [\"-open Common\"],\n  \"bs-dependencies\": [\"common\", \"myplugin\"],\n  \"sources\": [\n    {\n      \"dir\": \"src\",\n      \"subdirs\": true\n    }\n  ]\n}\n"
  },
  {
    "path": "analysis/examples/workspace-project/app/package.json",
    "content": "{\n\t\"name\": \"app\",\n\t\"version\": \"0.1.0\"\n}"
  },
  {
    "path": "analysis/examples/workspace-project/app/src/App.res",
    "content": "let _ = Myplugin.Promise.resolve(Utils.printError(\"Oh no!\"))\n"
  },
  {
    "path": "analysis/examples/workspace-project/bsconfig.json",
    "content": "{\n  \"name\": \"workspace-project\",\n  \"version\": \"0.1.0\",\n  \"sources\": [],\n  \"package-specs\": {\n    \"module\": \"es6\",\n    \"in-source\": true\n  },\n  \"suffix\": \".mjs\",\n  \"pinned-dependencies\": [\"common\", \"myplugin\", \"app\"],\n  \"bs-dependencies\": [\"common\", \"myplugin\", \"app\"]\n}\n"
  },
  {
    "path": "analysis/examples/workspace-project/common/bsconfig.json",
    "content": "{\n  \"name\": \"common\",\n  \"namespace\": true,\n  \"sources\": [\n    {\n      \"dir\": \"src\",\n      \"subdirs\": true\n    }\n  ]\n}\n"
  },
  {
    "path": "analysis/examples/workspace-project/common/package.json",
    "content": "{\n\t\"name\": \"common\",\n\t\"version\": \"0.1.0\"\n}"
  },
  {
    "path": "analysis/examples/workspace-project/common/src/Utils.mjs",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nfunction printError(error) {\n  var errorMessage = \"\\u001b[31m\" + error;\n  console.log(errorMessage);\n  \n}\n\nexport {\n  printError ,\n  \n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/examples/workspace-project/common/src/Utils.res",
    "content": "let printError = error => {\n  let errorMessage = `\\u001b[31m${error}`\n  Js.log(errorMessage)\n}\n"
  },
  {
    "path": "analysis/examples/workspace-project/myplugin/bsconfig.json",
    "content": "{\n  \"name\": \"myplugin\",\n  \"namespace\": true,\n  \"sources\": [\n    {\n      \"dir\": \"src\",\n      \"subdirs\": true\n    }\n  ]\n}\n"
  },
  {
    "path": "analysis/examples/workspace-project/myplugin/package.json",
    "content": "{\n\t\"name\": \"myplugin\",\n\t\"version\": \"0.1.0\"\n}"
  },
  {
    "path": "analysis/examples/workspace-project/myplugin/src/Promise.mjs",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as Curry from \"rescript/lib/es6/curry.js\";\nimport * as Js_exn from \"rescript/lib/es6/js_exn.js\";\nimport * as Caml_exceptions from \"rescript/lib/es6/caml_exceptions.js\";\n\nvar JsError = /* @__PURE__ */Caml_exceptions.create(\"Promise-Myplugin.JsError\");\n\nfunction $$catch(promise, callback) {\n  return promise.catch(function (err) {\n              return Curry._1(callback, Js_exn.anyToExnInternal(err));\n            });\n}\n\nexport {\n  JsError ,\n  $$catch ,\n  \n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/examples/workspace-project/myplugin/src/Promise.res",
    "content": "type t<+'a> = Js.Promise.t<'a>\n\nexception JsError(Js.Exn.t)\nexternal unsafeToJsExn: exn => Js.Exn.t = \"%identity\"\n\n@new\nexternal make: ((@uncurry (. 'a) => unit, (. 'e) => unit) => unit) => t<'a> = \"Promise\"\n\n@val @scope(\"Promise\")\nexternal resolve: 'a => t<'a> = \"resolve\"\n\n@send external then: (t<'a>, @uncurry ('a => t<'b>)) => t<'b> = \"then\"\n\n@send\nexternal thenResolve: (t<'a>, @uncurry ('a => 'b)) => t<'b> = \"then\"\n\n@send external finally: (t<'a>, unit => unit) => t<'a> = \"finally\"\n\n@scope(\"Promise\") @val\nexternal reject: exn => t<_> = \"reject\"\n\n@scope(\"Promise\") @val\nexternal all: array<t<'a>> => t<array<'a>> = \"all\"\n\n@scope(\"Promise\") @val\nexternal all2: ((t<'a>, t<'b>)) => t<('a, 'b)> = \"all\"\n\n@scope(\"Promise\") @val\nexternal all3: ((t<'a>, t<'b>, t<'c>)) => t<('a, 'b, 'c)> = \"all\"\n\n@scope(\"Promise\") @val\nexternal all4: ((t<'a>, t<'b>, t<'c>, t<'d>)) => t<('a, 'b, 'c, 'd)> = \"all\"\n\n@scope(\"Promise\") @val\nexternal all5: ((t<'a>, t<'b>, t<'c>, t<'d>, t<'e>)) => t<('a, 'b, 'c, 'd, 'e)> = \"all\"\n\n@scope(\"Promise\") @val\nexternal all6: ((t<'a>, t<'b>, t<'c>, t<'d>, t<'e>, t<'f>)) => t<('a, 'b, 'c, 'd, 'e, 'f)> = \"all\"\n\n@send\nexternal _catch: (t<'a>, @uncurry (exn => t<'a>)) => t<'a> = \"catch\"\n\nlet catch = (promise, callback) => _catch(promise, err => callback(Js.Exn.anyToExnInternal(err)))\n\n@scope(\"Promise\") @val\nexternal race: array<t<'a>> => t<'a> = \"race\"\n"
  },
  {
    "path": "analysis/examples/workspace-project/package.json",
    "content": "{\n  \"name\": \"workspace-project\",\n  \"private\": true,\n  \"version\": \"0.1.0\",\n  \"scripts\": {\n    \"clean\": \"rescript clean\",\n    \"build\": \"rescript build -with-deps\",\n    \"start\": \"node app/src/App.mjs\",\n    \"watch\": \"rescript build -w\"\n  },\n  \"keywords\": [\n    \"ReScript\"\n  ],\n  \"author\": \"\",\n  \"license\": \"MIT\",\n  \"devDependencies\": {\n    \"rescript\": \"^10.0.0-beta.3\"\n  },\n  \"workspaces\": {\n    \"packages\": [\n      \"app\",\n      \"common\",\n      \"myplugin\"\n    ]\n  }\n}\n"
  },
  {
    "path": "analysis/reanalyze/Makefile",
    "content": "SHELL = /bin/bash\n\nbuild:\n\tmake -C examples/deadcode build\n\tmake -C examples/termination build\n\ntest:\n\tmake -C examples/deadcode test\n\tmake -C examples/termination test\n\nclean:\n\tmake -C examples/deadcode clean\n\tmake -C examples/termination clean\n\n.DEFAULT_GOAL := build\n\n.PHONY: build clean clean test\n"
  },
  {
    "path": "analysis/reanalyze/dune",
    "content": "(dirs src)\n"
  },
  {
    "path": "analysis/reanalyze/examples/.gitignore",
    "content": ".merlin\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/.gitignore",
    "content": "/node_modules\n/lib"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/.watchmanconfig",
    "content": ""
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/Makefile",
    "content": "SHELL = /bin/bash\n\nnode_modules/.bin/rescript:\n\tnpm install\n\nbuild: node_modules/.bin/rescript\n\tnode_modules/.bin/rescript\n\ntest: build node_modules/.bin/rescript\n\t./test.sh\n\nclean:\n\trm -r node_modules lib\n\n.DEFAULT_GOAL := build\n\n.PHONY: build clean test\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/bsconfig.json",
    "content": "{\n  \"reanalyze\": {\n    \"analysis\": [\"dce\"],\n    \"suppress\": [],\n    \"unsuppress\": [],\n    \"transitive\": true\n  },\n  \"name\": \"sample-typescript-app\",\n  \"bsc-flags\": [\"-bs-super-errors -w a\"],\n  \"jsx\": { \"version\": 3 },\n  \"bs-dependencies\": [\"@rescript/react\", \"@glennsl/bs-json\"],\n  \"sources\": [\n    {\n      \"dir\": \"src\",\n      \"subdirs\": true\n    }\n  ],\n  \"package-specs\": {\n    \"module\": \"es6\",\n    \"in-source\": true\n  },\n  \"suffix\": \".bs.js\"\n}\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/expected/deadcode.txt",
    "content": "\n  Scanning AutoAnnotate.cmt Source:AutoAnnotate.res\n  addVariantCaseDeclaration R AutoAnnotate.res:1:15 path:+AutoAnnotate.variant\n  addRecordLabelDeclaration variant AutoAnnotate.res:4:15 path:+AutoAnnotate.record\n  addRecordLabelDeclaration r2 AutoAnnotate.res:6:11 path:+AutoAnnotate.r2\n  addRecordLabelDeclaration r3 AutoAnnotate.res:8:11 path:+AutoAnnotate.r3\n  addRecordLabelDeclaration r4 AutoAnnotate.res:10:11 path:+AutoAnnotate.r4\n  addVariantCaseDeclaration R2 AutoAnnotate.res:14:2 path:+AutoAnnotate.annotatedVariant\n  addVariantCaseDeclaration R4 AutoAnnotate.res:15:2 path:+AutoAnnotate.annotatedVariant\n  Scanning BootloaderResource.cmt Source:BootloaderResource.res\n  Scanning BucklescriptAnnotations.cmt Source:BucklescriptAnnotations.res\n  addValueDeclaration +bar BucklescriptAnnotations.res:25:4 path:+BucklescriptAnnotations\n  addValueDeclaration +f BucklescriptAnnotations.res:26:6 path:+BucklescriptAnnotations\n  addValueReference BucklescriptAnnotations.res:26:6 --> BucklescriptAnnotations.res:25:11\n  addValueReference BucklescriptAnnotations.res:25:4 --> BucklescriptAnnotations.res:26:6\n  Scanning ComponentAsProp.cmt Source:ComponentAsProp.res\n  addValueDeclaration +make ComponentAsProp.res:6:4 path:+ComponentAsProp\n  DeadOptionalArgs.addReferences ReactDOMRe.createDOMElementVariadic called with optional argNames: argNamesMaybe: ComponentAsProp.res:7:3\n  DeadOptionalArgs.addReferences ReactDOMRe.createDOMElementVariadic called with optional argNames: argNamesMaybe: ComponentAsProp.res:8:5\n  addValueReference ComponentAsProp.res:9:6 --> ComponentAsProp.res:6:12\n  addValueReference ComponentAsProp.res:10:6 --> ComponentAsProp.res:6:20\n  addValueReference ComponentAsProp.res:12:24 --> ComponentAsProp.res:12:13\n  addValueReference ComponentAsProp.res:13:16 --> React.res:3:0\n  addValueReference ComponentAsProp.res:11:14 --> ComponentAsProp.res:6:34\n  addValueReference ComponentAsProp.res:8:5 --> ReactDOMRe.res:8:0\n  addValueReference ComponentAsProp.res:7:3 --> ReactDOMRe.res:8:0\n  Scanning CreateErrorHandler1.cmt Source:CreateErrorHandler1.res\n  addValueDeclaration +notification CreateErrorHandler1.res:3:6 path:+CreateErrorHandler1.Error1\n  addValueReference CreateErrorHandler1.res:3:6 --> CreateErrorHandler1.res:3:21\n  addValueReference CreateErrorHandler1.res:3:6 --> CreateErrorHandler1.res:3:21\n  addValueReference CreateErrorHandler1.res:8:0 --> ErrorHandler.resi:7:2\n  addValueReference ErrorHandler.resi:3:2 --> CreateErrorHandler1.res:3:6\n  Scanning CreateErrorHandler2.cmt Source:CreateErrorHandler2.res\n  addValueDeclaration +notification CreateErrorHandler2.res:3:6 path:+CreateErrorHandler2.Error2\n  addValueReference CreateErrorHandler2.res:3:6 --> CreateErrorHandler2.res:3:21\n  addValueReference ErrorHandler.resi:3:2 --> CreateErrorHandler2.res:3:6\n  Scanning DeadCodeImplementation.cmt Source:DeadCodeImplementation.res\n  addValueDeclaration +x DeadCodeImplementation.res:2:6 path:+DeadCodeImplementation.M\n  addValueReference DeadCodeInterface.res:2:2 --> DeadCodeImplementation.res:2:6\n  Scanning DeadCodeInterface.cmt Source:DeadCodeInterface.res\n  Scanning DeadExn.cmt Source:DeadExn.res\n  addValueDeclaration +eToplevel DeadExn.res:8:4 path:+DeadExn\n  addValueDeclaration +eInside DeadExn.res:10:4 path:+DeadExn\n  addExceptionDeclaration Etoplevel DeadExn.res:1:0 path:+DeadExn\n  addExceptionDeclaration Einside DeadExn.res:4:2 path:+DeadExn.Inside\n  addExceptionDeclaration DeadE DeadExn.res:7:0 path:+DeadExn\n  addValueReference DeadExn.res:8:4 --> DeadExn.res:1:0\n  addTypeReference DeadExn.res:8:16 --> DeadExn.res:1:0\n  addValueReference DeadExn.res:10:4 --> DeadExn.res:4:2\n  addTypeReference DeadExn.res:10:14 --> DeadExn.res:4:2\n  addValueReference DeadExn.res:12:7 --> DeadExn.res:10:4\n  Scanning DeadExn.cmti Source:DeadExn.resi\n  Scanning DeadRT.cmt Source:DeadRT.res\n  addValueDeclaration +emitModuleAccessPath DeadRT.res:5:8 path:+DeadRT\n  addVariantCaseDeclaration Root DeadRT.res:2:2 path:+DeadRT.moduleAccessPath\n  addVariantCaseDeclaration Kaboom DeadRT.res:3:2 path:+DeadRT.moduleAccessPath\n  addValueReference DeadRT.res:5:8 --> DeadRT.res:7:9\n  addValueReference DeadRT.res:5:8 --> DeadRT.res:5:31\n  addTypeReference DeadRT.res:11:16 --> DeadRT.res:3:2\n  Scanning DeadRT.cmti Source:DeadRT.resi\n  addVariantCaseDeclaration Root DeadRT.resi:2:2 path:DeadRT.moduleAccessPath\n  extendTypeDependencies DeadRT.res:2:2 --> DeadRT.resi:2:2\n  extendTypeDependencies DeadRT.resi:2:2 --> DeadRT.res:2:2\n  addVariantCaseDeclaration Kaboom DeadRT.resi:3:2 path:DeadRT.moduleAccessPath\n  extendTypeDependencies DeadRT.res:3:2 --> DeadRT.resi:3:2\n  extendTypeDependencies DeadRT.resi:3:2 --> DeadRT.res:3:2\n  addTypeReference DeadRT.res:3:2 --> DeadRT.resi:3:2\n  addTypeReference DeadRT.resi:3:2 --> DeadRT.res:3:2\n  addTypeReference DeadRT.res:2:2 --> DeadRT.resi:2:2\n  addTypeReference DeadRT.resi:2:2 --> DeadRT.res:2:2\n  Scanning DeadTest.cmt Source:DeadTest.res\n  addValueDeclaration +fortytwo DeadTest.res:2:4 path:+DeadTest\n  addValueDeclaration +fortyTwoButExported DeadTest.res:5:4 path:+DeadTest\n  addValueDeclaration +thisIsUsedOnce DeadTest.res:7:4 path:+DeadTest\n  addValueDeclaration +thisIsUsedTwice DeadTest.res:10:4 path:+DeadTest\n  addValueDeclaration +thisIsMarkedDead DeadTest.res:15:4 path:+DeadTest\n  addValueDeclaration +thisIsKeptAlive DeadTest.res:17:4 path:+DeadTest\n  addValueDeclaration +thisIsMarkedLive DeadTest.res:20:4 path:+DeadTest\n  addValueDeclaration +thisIsAlsoMarkedDead DeadTest.res:24:6 path:+DeadTest.Inner\n  addValueDeclaration +thisSignatureItemIsDead DeadTest.res:28:2 path:+DeadTest.M\n  addValueDeclaration +a DeadTest.res:36:2 path:+DeadTest.VariantUsedOnlyInImplementation\n  addValueDeclaration +x DeadTest.res:60:2 path:+DeadTest.MM\n  addValueDeclaration +y DeadTest.res:61:2 path:+DeadTest.MM\n  addValueDeclaration +unusedRec DeadTest.res:75:8 path:+DeadTest\n  addValueDeclaration +split_map DeadTest.res:77:8 path:+DeadTest\n  addValueDeclaration +rec1 DeadTest.res:82:8 path:+DeadTest\n  addValueDeclaration +rec2 DeadTest.res:83:4 path:+DeadTest\n  addValueDeclaration +recWithCallback DeadTest.res:85:8 path:+DeadTest\n  addValueDeclaration +foo DeadTest.res:90:8 path:+DeadTest\n  addValueDeclaration +bar DeadTest.res:94:4 path:+DeadTest\n  addValueDeclaration +withDefaultValue DeadTest.res:96:4 path:+DeadTest\n  addValueDeclaration +reasonResource DeadTest.res:111:6 path:+DeadTest.LazyDynamicallyLoadedComponent2\n  addValueDeclaration +makeProps DeadTest.res:114:6 path:+DeadTest.LazyDynamicallyLoadedComponent2\n  addValueDeclaration +make DeadTest.res:115:6 path:+DeadTest.LazyDynamicallyLoadedComponent2\n  addValueDeclaration +zzz DeadTest.res:127:4 path:+DeadTest\n  addValueDeclaration +second DeadTest.res:135:4 path:+DeadTest\n  addValueDeclaration +minute DeadTest.res:136:4 path:+DeadTest\n  addValueDeclaration +deadRef DeadTest.res:138:4 path:+DeadTest\n  addValueDeclaration +make DeadTest.res:141:4 path:+DeadTest\n  addValueDeclaration +theSideEffectIsLogging DeadTest.res:145:4 path:+DeadTest\n  addValueDeclaration +stringLengthNoSideEffects DeadTest.res:147:4 path:+DeadTest\n  addValueDeclaration +globallyLive1 DeadTest.res:152:6 path:+DeadTest.GloobLive\n  addValueDeclaration +globallyLive2 DeadTest.res:153:6 path:+DeadTest.GloobLive\n  addValueDeclaration +globallyLive3 DeadTest.res:154:6 path:+DeadTest.GloobLive\n  addValueDeclaration +funWithInnerVars DeadTest.res:169:4 path:+DeadTest\n  addValueDeclaration +deadIncorrect DeadTest.res:178:4 path:+DeadTest\n  addValueDeclaration +ira DeadTest.res:184:4 path:+DeadTest\n  addValueReference DeadTest.res:1:15 --> ImmutableArray.resi:9:0\n  addValueReference DeadTest.res:8:7 --> DeadTest.res:7:4\n  addValueReference DeadTest.res:11:7 --> DeadTest.res:10:4\n  addValueReference DeadTest.res:12:7 --> DeadTest.res:10:4\n  addValueReference DeadTest.res:20:4 --> DeadTest.res:17:4\n  addValueDeclaration +thisSignatureItemIsDead DeadTest.res:31:6 path:+DeadTest.M\n  addVariantCaseDeclaration A DeadTest.res:35:11 path:+DeadTest.VariantUsedOnlyInImplementation.t\n  addVariantCaseDeclaration A DeadTest.res:38:11 path:+DeadTest.VariantUsedOnlyInImplementation.t\n  extendTypeDependencies DeadTest.res:38:11 --> DeadTest.res:35:11\n  extendTypeDependencies DeadTest.res:35:11 --> DeadTest.res:38:11\n  addValueDeclaration +a DeadTest.res:39:6 path:+DeadTest.VariantUsedOnlyInImplementation\n  addTypeReference DeadTest.res:39:10 --> DeadTest.res:38:11\n  addValueReference DeadTest.res:42:17 --> DeadTest.res:36:2\n  addValueReference DeadTest.res:42:14 --> DeadTest.res:42:9\n  addValueDeclaration +_ DeadTest.res:44:0 path:+DeadTest\n  addTypeReference DeadTest.res:44:8 --> DeadTypeTest.resi:8:2\n  addValueDeclaration +_ DeadTest.res:45:0 path:+DeadTest\n  addTypeReference DeadTest.res:45:8 --> DeadTypeTest.resi:9:2\n  addRecordLabelDeclaration xxx DeadTest.res:48:2 path:+DeadTest.record\n  addRecordLabelDeclaration yyy DeadTest.res:49:2 path:+DeadTest.record\n  addValueDeclaration +_ DeadTest.res:52:0 path:+DeadTest\n  addTypeReference DeadTest.res:52:13 --> DeadTest.res:48:2\n  addValueReference DeadTest.res:52:13 --> DeadTest.res:52:8\n  addValueDeclaration +_ DeadTest.res:53:0 path:+DeadTest\n  addValueReference DeadTest.res:53:19 --> DeadTest.res:53:10\n  addTypeReference DeadTest.res:53:9 --> DeadTest.res:49:2\n  addValueDeclaration +_ DeadTest.res:56:2 path:+DeadTest.UnderscoreInside\n  addValueDeclaration +y DeadTest.res:63:6 path:+DeadTest.MM\n  addValueDeclaration +x DeadTest.res:64:6 path:+DeadTest.MM\n  addValueReference DeadTest.res:64:6 --> DeadTest.res:63:6\n  addValueDeclaration +valueOnlyInImplementation DeadTest.res:65:6 path:+DeadTest.MM\n  addValueReference DeadTest.res:69:9 --> DeadTest.res:60:2\n  addValueReference DeadTest.res:73:16 --> DeadValueTest.resi:1:0\n  addValueReference DeadTest.res:75:8 --> DeadTest.res:75:8\n  addValueReference DeadTest.res:77:8 --> DeadTest.res:77:20\n  addValueReference DeadTest.res:77:8 --> DeadTest.res:77:8\n  addValueReference DeadTest.res:82:8 --> DeadTest.res:83:4\n  addValueReference DeadTest.res:83:4 --> DeadTest.res:82:8\n  addValueDeclaration +cb DeadTest.res:86:6 path:+DeadTest\n  addValueReference DeadTest.res:86:6 --> DeadTest.res:85:8\n  addValueReference DeadTest.res:85:8 --> DeadTest.res:86:6\n  addValueDeclaration +cb DeadTest.res:91:6 path:+DeadTest\n  addValueReference DeadTest.res:91:6 --> DeadTest.res:94:4\n  addValueReference DeadTest.res:90:8 --> DeadTest.res:91:6\n  addValueReference DeadTest.res:94:4 --> DeadTest.res:90:8\n  addValueReference DeadTest.res:96:4 --> DeadTest.res:96:42\n  addValueReference DeadTest.res:96:4 --> DeadTest.res:96:24\n  addValueReference DeadTest.res:96:4 --> DeadTest.res:96:45\n  addTypeReference DeadTest.res:106:16 --> DeadRT.resi:2:2\n  addValueReference DeadTest.res:111:6 --> JSResource.res:3:0\n  addValueReference DeadTest.res:115:6 --> DynamicallyLoadedComponent.res:2:4\n  addValueReference DeadTest.res:115:6 --> DeadTest.res:111:6\n  addValueReference DeadTest.res:115:6 --> BootloaderResource.res:3:0\n  addValueReference DeadTest.res:115:6 --> DeadTest.res:115:13\n  addValueReference DeadTest.res:115:6 --> React.res:18:0\n  addValueDeclaration +a1 DeadTest.res:128:6 path:+DeadTest\n  addValueDeclaration +a2 DeadTest.res:129:6 path:+DeadTest\n  addValueDeclaration +a3 DeadTest.res:130:6 path:+DeadTest\n  addValueReference DeadTest.res:133:17 --> DynamicallyLoadedComponent.res:2:4\n  addValueReference DeadTest.res:133:17 --> React.res:18:0\n  addValueReference DeadTest.res:136:4 --> DeadTest.res:135:4\n  addValueReference DeadTest.res:141:32 --> DeadTest.res:141:12\n  addValueReference DeadTest.res:141:19 --> React.res:7:0\n  addValueReference DeadTest.res:143:16 --> DeadTest.res:141:4\n  addVariantCaseDeclaration A DeadTest.res:158:11 path:+DeadTest.WithInclude.t\n  addVariantCaseDeclaration A DeadTest.res:161:13 path:+DeadTest.WithInclude.T.t\n  addVariantCaseDeclaration A DeadTest.res:161:13 path:+DeadTest.WithInclude.t\n  extendTypeDependencies DeadTest.res:161:13 --> DeadTest.res:158:11\n  extendTypeDependencies DeadTest.res:158:11 --> DeadTest.res:161:13\n  addTypeReference DeadTest.res:166:7 --> DeadTest.res:158:11\n  addValueDeclaration +x DeadTest.res:170:6 path:+DeadTest\n  addValueDeclaration +y DeadTest.res:171:6 path:+DeadTest\n  addValueReference DeadTest.res:169:4 --> DeadTest.res:170:6\n  addValueReference DeadTest.res:169:4 --> DeadTest.res:171:6\n  addRecordLabelDeclaration a DeadTest.res:175:11 path:+DeadTest.rc\n  addValueDeclaration +_ DeadTest.res:180:0 path:+DeadTest\n  addValueReference DeadTest.res:180:8 --> DeadTest.res:178:4\n  addRecordLabelDeclaration IR.a DeadTest.res:182:24 path:+DeadTest.inlineRecord\n  addRecordLabelDeclaration IR.b DeadTest.res:182:32 path:+DeadTest.inlineRecord\n  addRecordLabelDeclaration IR.c DeadTest.res:182:40 path:+DeadTest.inlineRecord\n  addRecordLabelDeclaration IR.d DeadTest.res:182:51 path:+DeadTest.inlineRecord\n  addRecordLabelDeclaration IR.e DeadTest.res:182:65 path:+DeadTest.inlineRecord\n  addVariantCaseDeclaration IR DeadTest.res:182:20 path:+DeadTest.inlineRecord\n  addValueDeclaration +_ DeadTest.res:185:0 path:+DeadTest\n  addTypeReference DeadTest.res:187:20 --> DeadTest.res:182:20\n  addValueReference DeadTest.res:187:27 --> DeadTest.res:184:4\n  addTypeReference DeadTest.res:187:35 --> DeadTest.res:182:32\n  addValueReference DeadTest.res:187:35 --> DeadTest.res:187:7\n  addValueReference DeadTest.res:187:40 --> DeadTest.res:187:8\n  addTypeReference DeadTest.res:187:7 --> DeadTest.res:182:40\n  addValueReference DeadTest.res:186:9 --> DeadTest.res:185:8\n  addRecordLabelDeclaration IR2.a DeadTest.res:191:26 path:+DeadTest.inlineRecord2\n  addRecordLabelDeclaration IR2.b DeadTest.res:191:34 path:+DeadTest.inlineRecord2\n  addVariantCaseDeclaration IR2 DeadTest.res:191:21 path:+DeadTest.inlineRecord2\n  addRecordLabelDeclaration IR3.a DeadTest.res:193:34 path:+DeadTest.inlineRecord3\n  addRecordLabelDeclaration IR3.b DeadTest.res:193:42 path:+DeadTest.inlineRecord3\n  addVariantCaseDeclaration IR3 DeadTest.res:193:21 path:+DeadTest.inlineRecord3\n  addValueReference DeadTest.res:28:2 --> DeadTest.res:31:6\n  addValueReference DeadTest.res:36:2 --> DeadTest.res:39:6\n  addValueReference DeadTest.res:60:2 --> DeadTest.res:64:6\n  addValueReference DeadTest.res:61:2 --> DeadTest.res:63:6\n  addValueReference DeadTest.res:101:2 --> DeadTest.res:103:2\n  addTypeReference DeadTest.res:161:13 --> DeadTest.res:158:11\n  addTypeReference DeadTest.res:158:11 --> DeadTest.res:161:13\n  addTypeReference DeadTest.res:38:11 --> DeadTest.res:35:11\n  addTypeReference DeadTest.res:35:11 --> DeadTest.res:38:11\n  Scanning DeadTestBlacklist.cmt Source:DeadTestBlacklist.res\n  addValueDeclaration +x DeadTestBlacklist.res:1:4 path:+DeadTestBlacklist\n  Scanning DeadTestWithInterface.cmt Source:DeadTestWithInterface.res\n  addValueDeclaration +x DeadTestWithInterface.res:2:2 path:+DeadTestWithInterface.Ext_buffer\n  addValueDeclaration +x DeadTestWithInterface.res:4:6 path:+DeadTestWithInterface.Ext_buffer\n  addValueReference DeadTestWithInterface.res:2:2 --> DeadTestWithInterface.res:4:6\n  Interface 0\n  Scanning DeadTypeTest.cmt Source:DeadTypeTest.res\n  addValueDeclaration +a DeadTypeTest.res:4:4 path:+DeadTypeTest\n  addVariantCaseDeclaration A DeadTypeTest.res:2:2 path:+DeadTypeTest.t\n  addVariantCaseDeclaration B DeadTypeTest.res:3:2 path:+DeadTypeTest.t\n  addTypeReference DeadTypeTest.res:4:8 --> DeadTypeTest.res:2:2\n  addVariantCaseDeclaration OnlyInImplementation DeadTypeTest.res:7:2 path:+DeadTypeTest.deadType\n  addVariantCaseDeclaration OnlyInInterface DeadTypeTest.res:8:2 path:+DeadTypeTest.deadType\n  addVariantCaseDeclaration InBoth DeadTypeTest.res:9:2 path:+DeadTypeTest.deadType\n  addVariantCaseDeclaration InNeither DeadTypeTest.res:10:2 path:+DeadTypeTest.deadType\n  addValueDeclaration +_ DeadTypeTest.res:12:0 path:+DeadTypeTest\n  addTypeReference DeadTypeTest.res:12:8 --> DeadTypeTest.res:7:2\n  addValueDeclaration +_ DeadTypeTest.res:13:0 path:+DeadTypeTest\n  addTypeReference DeadTypeTest.res:13:8 --> DeadTypeTest.res:9:2\n  addRecordLabelDeclaration x DeadTypeTest.res:16:15 path:+DeadTypeTest.record\n  addRecordLabelDeclaration y DeadTypeTest.res:16:23 path:+DeadTypeTest.record\n  addRecordLabelDeclaration z DeadTypeTest.res:16:34 path:+DeadTypeTest.record\n  addValueReference DeadTypeTest.resi:4:0 --> DeadTypeTest.res:4:4\n  Scanning DeadTypeTest.cmti Source:DeadTypeTest.resi\n  addVariantCaseDeclaration A DeadTypeTest.resi:2:2 path:DeadTypeTest.t\n  extendTypeDependencies DeadTypeTest.res:2:2 --> DeadTypeTest.resi:2:2\n  extendTypeDependencies DeadTypeTest.resi:2:2 --> DeadTypeTest.res:2:2\n  addVariantCaseDeclaration B DeadTypeTest.resi:3:2 path:DeadTypeTest.t\n  extendTypeDependencies DeadTypeTest.res:3:2 --> DeadTypeTest.resi:3:2\n  extendTypeDependencies DeadTypeTest.resi:3:2 --> DeadTypeTest.res:3:2\n  addValueDeclaration +a DeadTypeTest.resi:4:0 path:DeadTypeTest\n  addVariantCaseDeclaration OnlyInImplementation DeadTypeTest.resi:7:2 path:DeadTypeTest.deadType\n  extendTypeDependencies DeadTypeTest.res:7:2 --> DeadTypeTest.resi:7:2\n  extendTypeDependencies DeadTypeTest.resi:7:2 --> DeadTypeTest.res:7:2\n  addVariantCaseDeclaration OnlyInInterface DeadTypeTest.resi:8:2 path:DeadTypeTest.deadType\n  extendTypeDependencies DeadTypeTest.res:8:2 --> DeadTypeTest.resi:8:2\n  extendTypeDependencies DeadTypeTest.resi:8:2 --> DeadTypeTest.res:8:2\n  addVariantCaseDeclaration InBoth DeadTypeTest.resi:9:2 path:DeadTypeTest.deadType\n  extendTypeDependencies DeadTypeTest.res:9:2 --> DeadTypeTest.resi:9:2\n  extendTypeDependencies DeadTypeTest.resi:9:2 --> DeadTypeTest.res:9:2\n  addVariantCaseDeclaration InNeither DeadTypeTest.resi:10:2 path:DeadTypeTest.deadType\n  extendTypeDependencies DeadTypeTest.res:10:2 --> DeadTypeTest.resi:10:2\n  extendTypeDependencies DeadTypeTest.resi:10:2 --> DeadTypeTest.res:10:2\n  addTypeReference DeadTypeTest.res:10:2 --> DeadTypeTest.resi:10:2\n  addTypeReference DeadTypeTest.resi:10:2 --> DeadTypeTest.res:10:2\n  addTypeReference DeadTypeTest.res:9:2 --> DeadTypeTest.resi:9:2\n  addTypeReference DeadTypeTest.resi:9:2 --> DeadTypeTest.res:9:2\n  addTypeReference DeadTypeTest.res:8:2 --> DeadTypeTest.resi:8:2\n  addTypeReference DeadTypeTest.resi:8:2 --> DeadTypeTest.res:8:2\n  addTypeReference DeadTypeTest.res:7:2 --> DeadTypeTest.resi:7:2\n  addTypeReference DeadTypeTest.resi:7:2 --> DeadTypeTest.res:7:2\n  addTypeReference DeadTypeTest.res:3:2 --> DeadTypeTest.resi:3:2\n  addTypeReference DeadTypeTest.resi:3:2 --> DeadTypeTest.res:3:2\n  addTypeReference DeadTypeTest.res:2:2 --> DeadTypeTest.resi:2:2\n  addTypeReference DeadTypeTest.resi:2:2 --> DeadTypeTest.res:2:2\n  Scanning DeadValueTest.cmt Source:DeadValueTest.res\n  addValueDeclaration +valueAlive DeadValueTest.res:1:4 path:+DeadValueTest\n  addValueDeclaration +valueDead DeadValueTest.res:2:4 path:+DeadValueTest\n  addValueDeclaration +valueOnlyInImplementation DeadValueTest.res:4:4 path:+DeadValueTest\n  addValueDeclaration +subList DeadValueTest.res:6:8 path:+DeadValueTest\n  addValueDeclaration +tail DeadValueTest.res:10:8 path:+DeadValueTest\n  addValueReference DeadValueTest.res:10:8 --> DeadValueTest.res:6:19\n  addValueReference DeadValueTest.res:10:8 --> DeadValueTest.res:6:22\n  addValueReference DeadValueTest.res:10:8 --> DeadValueTest.res:9:15\n  addValueReference DeadValueTest.res:10:8 --> DeadValueTest.res:6:8\n  addValueReference DeadValueTest.res:10:8 --> DeadValueTest.res:6:22\n  addValueReference DeadValueTest.res:6:8 --> DeadValueTest.res:9:9\n  addValueReference DeadValueTest.res:6:8 --> DeadValueTest.res:10:8\n  addValueReference DeadValueTest.res:6:8 --> DeadValueTest.res:10:8\n  addValueReference DeadValueTest.res:6:8 --> DeadValueTest.res:6:19\n  addValueReference DeadValueTest.res:6:8 --> DeadValueTest.res:6:25\n  addValueReference DeadValueTest.resi:1:0 --> DeadValueTest.res:1:4\n  addValueReference DeadValueTest.resi:2:0 --> DeadValueTest.res:2:4\n  Scanning DeadValueTest.cmti Source:DeadValueTest.resi\n  addValueDeclaration +valueAlive DeadValueTest.resi:1:0 path:DeadValueTest\n  addValueDeclaration +valueDead DeadValueTest.resi:2:0 path:DeadValueTest\n  Scanning Docstrings.cmt Source:Docstrings.res\n  addValueDeclaration +flat Docstrings.res:2:4 path:+Docstrings\n  addValueDeclaration +signMessage Docstrings.res:12:4 path:+Docstrings\n  addValueDeclaration +one Docstrings.res:15:4 path:+Docstrings\n  addValueDeclaration +two Docstrings.res:18:4 path:+Docstrings\n  addValueDeclaration +tree Docstrings.res:21:4 path:+Docstrings\n  addValueDeclaration +oneU Docstrings.res:24:4 path:+Docstrings\n  addValueDeclaration +twoU Docstrings.res:27:4 path:+Docstrings\n  addValueDeclaration +treeU Docstrings.res:30:4 path:+Docstrings\n  addValueDeclaration +useParam Docstrings.res:33:4 path:+Docstrings\n  addValueDeclaration +useParamU Docstrings.res:36:4 path:+Docstrings\n  addValueDeclaration +unnamed1 Docstrings.res:39:4 path:+Docstrings\n  addValueDeclaration +unnamed1U Docstrings.res:42:4 path:+Docstrings\n  addValueDeclaration +unnamed2 Docstrings.res:45:4 path:+Docstrings\n  addValueDeclaration +unnamed2U Docstrings.res:48:4 path:+Docstrings\n  addValueDeclaration +grouped Docstrings.res:51:4 path:+Docstrings\n  addValueDeclaration +unitArgWithoutConversion Docstrings.res:54:4 path:+Docstrings\n  addValueDeclaration +unitArgWithoutConversionU Docstrings.res:57:4 path:+Docstrings\n  addValueDeclaration +unitArgWithConversion Docstrings.res:64:4 path:+Docstrings\n  addValueDeclaration +unitArgWithConversionU Docstrings.res:67:4 path:+Docstrings\n  addValueReference Docstrings.res:12:4 --> Docstrings.res:12:21\n  addValueReference Docstrings.res:12:4 --> Docstrings.res:12:30\n  addValueReference Docstrings.res:15:4 --> Docstrings.res:15:10\n  addValueReference Docstrings.res:18:4 --> Docstrings.res:18:11\n  addValueReference Docstrings.res:18:4 --> Docstrings.res:18:14\n  addValueReference Docstrings.res:21:4 --> Docstrings.res:21:12\n  addValueReference Docstrings.res:21:4 --> Docstrings.res:21:15\n  addValueReference Docstrings.res:21:4 --> Docstrings.res:21:18\n  addValueReference Docstrings.res:24:4 --> Docstrings.res:24:14\n  addValueReference Docstrings.res:27:4 --> Docstrings.res:27:14\n  addValueReference Docstrings.res:27:4 --> Docstrings.res:27:17\n  addValueReference Docstrings.res:30:4 --> Docstrings.res:30:15\n  addValueReference Docstrings.res:30:4 --> Docstrings.res:30:18\n  addValueReference Docstrings.res:30:4 --> Docstrings.res:30:21\n  addValueReference Docstrings.res:33:4 --> Docstrings.res:33:15\n  addValueReference Docstrings.res:36:4 --> Docstrings.res:36:19\n  addValueReference Docstrings.res:51:4 --> Docstrings.res:51:15\n  addValueReference Docstrings.res:51:4 --> Docstrings.res:51:19\n  addValueReference Docstrings.res:51:4 --> Docstrings.res:51:23\n  addValueReference Docstrings.res:51:4 --> Docstrings.res:51:26\n  addValueReference Docstrings.res:51:4 --> Docstrings.res:51:29\n  addValueReference Docstrings.res:51:4 --> Docstrings.res:51:32\n  addVariantCaseDeclaration A Docstrings.res:60:2 path:+Docstrings.t\n  addVariantCaseDeclaration B Docstrings.res:61:2 path:+Docstrings.t\n  addTypeReference Docstrings.res:64:34 --> Docstrings.res:60:2\n  addTypeReference Docstrings.res:67:39 --> Docstrings.res:60:2\n  Scanning DynamicallyLoadedComponent.cmt Source:DynamicallyLoadedComponent.res\n  addValueDeclaration +make DynamicallyLoadedComponent.res:2:4 path:+DynamicallyLoadedComponent\n  addValueReference DynamicallyLoadedComponent.res:2:32 --> DynamicallyLoadedComponent.res:2:12\n  addValueReference DynamicallyLoadedComponent.res:2:19 --> React.res:7:0\n  Scanning EmptyArray.cmt Source:EmptyArray.res\n  addValueDeclaration +make EmptyArray.res:5:6 path:+EmptyArray.Z\n  DeadOptionalArgs.addReferences ReactDOMRe.createDOMElementVariadic called with optional argNames: argNamesMaybe: EmptyArray.res:6:5\n  addValueReference EmptyArray.res:6:5 --> ReactDOMRe.res:8:0\n  addValueReference EmptyArray.res:10:9 --> EmptyArray.res:5:6\n  addValueReference EmptyArray.res:10:9 --> React.res:18:0\n  Scanning ErrorHandler.cmt Source:ErrorHandler.res\n  addValueDeclaration +notify ErrorHandler.res:7:6 path:+ErrorHandler.Make\n  addValueDeclaration +x ErrorHandler.res:12:4 path:+ErrorHandler\n  addValueReference ErrorHandler.res:7:6 --> ErrorHandler.res:7:15\n  addValueReference ErrorHandler.res:7:6 --> ErrorHandler.res:3:2\n  addValueReference ErrorHandler.resi:3:2 --> ErrorHandler.res:3:2\n  addValueReference ErrorHandler.res:3:2 --> ErrorHandler.resi:3:2\n  addValueReference ErrorHandler.resi:7:2 --> ErrorHandler.res:7:6\n  addValueReference ErrorHandler.resi:10:0 --> ErrorHandler.res:12:4\n  Scanning ErrorHandler.cmti Source:ErrorHandler.resi\n  addValueDeclaration +notify ErrorHandler.resi:7:2 path:ErrorHandler.Make\n  addValueDeclaration +x ErrorHandler.resi:10:0 path:ErrorHandler\n  Scanning EverythingLiveHere.cmt Source:EverythingLiveHere.res\n  addValueDeclaration +x EverythingLiveHere.res:1:4 path:+EverythingLiveHere\n  addValueDeclaration +y EverythingLiveHere.res:3:4 path:+EverythingLiveHere\n  addValueDeclaration +z EverythingLiveHere.res:5:4 path:+EverythingLiveHere\n  Scanning FC.cmt Source:FC.res\n  addValueDeclaration +foo FC.res:6:4 path:+FC\n  addValueReference FC.res:7:13 --> FC.res:6:11\n  addValueReference FC.res:6:4 --> FC.res:2:2\n  addValueReference FC.res:6:4 --> FC.res:7:6\n  addValueReference FC.res:11:7 --> FC.res:6:4\n  Scanning FirstClassModules.cmt Source:FirstClassModules.res\n  addValueDeclaration +y FirstClassModules.res:23:6 path:+FirstClassModules.M\n  addValueDeclaration +k FirstClassModules.res:29:8 path:+FirstClassModules.M.InnerModule2\n  addValueDeclaration +k3 FirstClassModules.res:33:8 path:+FirstClassModules.M.InnerModule3\n  addValueDeclaration +u FirstClassModules.res:40:8 path:+FirstClassModules.M.Z\n  addValueDeclaration +x FirstClassModules.res:44:6 path:+FirstClassModules.M\n  addValueDeclaration +firstClassModule FirstClassModules.res:51:4 path:+FirstClassModules\n  addValueDeclaration +testConvert FirstClassModules.res:54:4 path:+FirstClassModules\n  addValueDeclaration +someFunctorAsFunction FirstClassModules.res:65:4 path:+FirstClassModules\n  addValueReference FirstClassModules.res:33:8 --> FirstClassModules.res:33:13\n  addValueReference FirstClassModules.res:54:4 --> FirstClassModules.res:54:19\n  addValueDeclaration +ww FirstClassModules.res:61:6 path:+FirstClassModules.SomeFunctor\n  addValueReference FirstClassModules.res:61:6 --> FirstClassModules.res:20:2\n  addValueReference FirstClassModules.res:65:4 --> FirstClassModules.res:65:29\n  addValueReference FirstClassModules.res:2:2 --> FirstClassModules.res:44:6\n  addValueReference FirstClassModules.res:4:2 --> FirstClassModules.res:43:2\n  addValueReference FirstClassModules.res:10:4 --> FirstClassModules.res:29:8\n  addValueReference FirstClassModules.res:14:4 --> FirstClassModules.res:33:8\n  addValueReference FirstClassModules.res:17:4 --> FirstClassModules.res:37:4\n  addValueReference FirstClassModules.res:37:4 --> FirstClassModules.res:17:4\n  addValueReference FirstClassModules.res:37:4 --> FirstClassModules.res:40:8\n  addValueReference FirstClassModules.res:20:2 --> FirstClassModules.res:23:6\n  addValueReference FirstClassModules.res:57:2 --> FirstClassModules.res:61:6\n  Scanning FirstClassModulesInterface.cmt Source:FirstClassModulesInterface.res\n  addValueDeclaration +r FirstClassModulesInterface.res:6:4 path:+FirstClassModulesInterface\n  addRecordLabelDeclaration x FirstClassModulesInterface.res:2:2 path:+FirstClassModulesInterface.record\n  addRecordLabelDeclaration y FirstClassModulesInterface.res:3:2 path:+FirstClassModulesInterface.record\n  addValueReference FirstClassModulesInterface.resi:7:0 --> FirstClassModulesInterface.res:6:4\n  addValueReference FirstClassModulesInterface.resi:11:2 --> FirstClassModulesInterface.res:9:2\n  addValueReference FirstClassModulesInterface.res:9:2 --> FirstClassModulesInterface.resi:11:2\n  Scanning FirstClassModulesInterface.cmti Source:FirstClassModulesInterface.resi\n  addRecordLabelDeclaration x FirstClassModulesInterface.resi:3:2 path:FirstClassModulesInterface.record\n  extendTypeDependencies FirstClassModulesInterface.res:2:2 --> FirstClassModulesInterface.resi:3:2\n  extendTypeDependencies FirstClassModulesInterface.resi:3:2 --> FirstClassModulesInterface.res:2:2\n  addRecordLabelDeclaration y FirstClassModulesInterface.resi:4:2 path:FirstClassModulesInterface.record\n  extendTypeDependencies FirstClassModulesInterface.res:3:2 --> FirstClassModulesInterface.resi:4:2\n  extendTypeDependencies FirstClassModulesInterface.resi:4:2 --> FirstClassModulesInterface.res:3:2\n  addValueDeclaration +r FirstClassModulesInterface.resi:7:0 path:FirstClassModulesInterface\n  addTypeReference FirstClassModulesInterface.res:3:2 --> FirstClassModulesInterface.resi:4:2\n  addTypeReference FirstClassModulesInterface.resi:4:2 --> FirstClassModulesInterface.res:3:2\n  addTypeReference FirstClassModulesInterface.res:2:2 --> FirstClassModulesInterface.resi:3:2\n  addTypeReference FirstClassModulesInterface.resi:3:2 --> FirstClassModulesInterface.res:2:2\n  Scanning Hooks.cmt Source:Hooks.res\n  addValueDeclaration +make Hooks.res:4:4 path:+Hooks\n  addValueDeclaration +default Hooks.res:25:4 path:+Hooks\n  addValueDeclaration +anotherComponent Hooks.res:28:4 path:+Hooks\n  addValueDeclaration +make Hooks.res:35:6 path:+Hooks.Inner\n  addValueDeclaration +anotherComponent Hooks.res:38:6 path:+Hooks.Inner\n  addValueDeclaration +make Hooks.res:42:8 path:+Hooks.Inner.Inner2\n  addValueDeclaration +anotherComponent Hooks.res:45:8 path:+Hooks.Inner.Inner2\n  addValueDeclaration +make Hooks.res:52:6 path:+Hooks.NoProps\n  addValueDeclaration +functionWithRenamedArgs Hooks.res:58:4 path:+Hooks\n  addValueDeclaration +componentWithRenamedArgs Hooks.res:64:4 path:+Hooks\n  addValueDeclaration +makeWithRef Hooks.res:70:4 path:+Hooks\n  addValueDeclaration +testForwardRef Hooks.res:80:4 path:+Hooks\n  addValueDeclaration +input Hooks.res:85:4 path:+Hooks\n  addValueDeclaration +polymorphicComponent Hooks.res:100:4 path:+Hooks\n  addValueDeclaration +functionReturningReactElement Hooks.res:103:4 path:+Hooks\n  addValueDeclaration +make Hooks.res:107:6 path:+Hooks.RenderPropRequiresConversion\n  addValueDeclaration +aComponentWithChildren Hooks.res:114:4 path:+Hooks\n  addRecordLabelDeclaration name Hooks.res:1:16 path:+Hooks.vehicle\n  addValueReference Hooks.res:5:26 --> React.res:167:0\n  DeadOptionalArgs.addReferences ReactDOMRe.createDOMElementVariadic called with optional argNames: argNamesMaybe: Hooks.res:7:3\n  DeadOptionalArgs.addReferences ReactDOMRe.createDOMElementVariadic called with optional argNames: argNamesMaybe: Hooks.res:8:5\n  addTypeReference Hooks.res:10:29 --> Hooks.res:1:16\n  addValueReference Hooks.res:10:29 --> Hooks.res:4:12\n  addValueReference Hooks.res:10:76 --> Hooks.res:5:7\n  addValueReference Hooks.res:9:7 --> React.res:7:0\n  addValueReference Hooks.res:8:5 --> ReactDOMRe.res:8:0\n  DeadOptionalArgs.addReferences ReactDOMRe.createDOMElementVariadic called with optional argNames:props argNamesMaybe: Hooks.res:13:5\n  addValueReference Hooks.res:13:40 --> Hooks.res:5:7\n  addValueReference Hooks.res:13:26 --> Hooks.res:5:14\n  addValueReference Hooks.res:13:54 --> React.res:7:0\n  addValueReference Hooks.res:13:5 --> ReactDOMRe.res:8:0\n  addValueReference Hooks.res:14:5 --> ImportHooks.res:13:0\n  DeadOptionalArgs.addReferences ImportHooks.makeProps called with optional argNames: argNamesMaybe: Hooks.res:14:5\n  addValueReference Hooks.res:14:5 --> React.res:3:0\n  addValueReference Hooks.res:14:63 --> React.res:7:0\n  addValueReference Hooks.res:14:5 --> ImportHooks.res:13:0\n  addValueReference Hooks.res:15:7 --> React.res:7:0\n  addValueReference Hooks.res:15:32 --> React.res:7:0\n  addValueReference Hooks.res:14:5 --> React.res:24:0\n  addValueReference Hooks.res:17:5 --> ImportHookDefault.res:6:0\n  DeadOptionalArgs.addReferences ImportHookDefault.makeProps called with optional argNames: argNamesMaybe: Hooks.res:17:5\n  addValueReference Hooks.res:17:5 --> React.res:3:0\n  addValueReference Hooks.res:18:61 --> React.res:7:0\n  addValueReference Hooks.res:17:5 --> ImportHookDefault.res:6:0\n  addValueReference Hooks.res:19:7 --> React.res:7:0\n  addValueReference Hooks.res:19:32 --> React.res:7:0\n  addValueReference Hooks.res:17:5 --> React.res:24:0\n  addValueReference Hooks.res:7:3 --> ReactDOMRe.res:8:0\n  addValueReference Hooks.res:25:4 --> Hooks.res:4:4\n  DeadOptionalArgs.addReferences ReactDOMRe.createDOMElementVariadic called with optional argNames: argNamesMaybe: Hooks.res:30:3\n  addTypeReference Hooks.res:30:41 --> Hooks.res:1:16\n  addValueReference Hooks.res:30:41 --> Hooks.res:28:24\n  addValueReference Hooks.res:30:9 --> React.res:7:0\n  addValueReference Hooks.res:30:3 --> ReactDOMRe.res:8:0\n  addValueReference Hooks.res:29:2 --> Hooks.res:28:34\n  DeadOptionalArgs.addReferences ReactDOMRe.createDOMElementVariadic called with optional argNames: argNamesMaybe: Hooks.res:35:28\n  addTypeReference Hooks.res:35:66 --> Hooks.res:1:16\n  addValueReference Hooks.res:35:66 --> Hooks.res:35:14\n  addValueReference Hooks.res:35:34 --> React.res:7:0\n  addValueReference Hooks.res:35:28 --> ReactDOMRe.res:8:0\n  DeadOptionalArgs.addReferences ReactDOMRe.createDOMElementVariadic called with optional argNames: argNamesMaybe: Hooks.res:38:40\n  addTypeReference Hooks.res:38:78 --> Hooks.res:1:16\n  addValueReference Hooks.res:38:78 --> Hooks.res:38:26\n  addValueReference Hooks.res:38:46 --> React.res:7:0\n  addValueReference Hooks.res:38:40 --> ReactDOMRe.res:8:0\n  DeadOptionalArgs.addReferences ReactDOMRe.createDOMElementVariadic called with optional argNames: argNamesMaybe: Hooks.res:42:30\n  addTypeReference Hooks.res:42:68 --> Hooks.res:1:16\n  addValueReference Hooks.res:42:68 --> Hooks.res:42:16\n  addValueReference Hooks.res:42:36 --> React.res:7:0\n  addValueReference Hooks.res:42:30 --> ReactDOMRe.res:8:0\n  DeadOptionalArgs.addReferences ReactDOMRe.createDOMElementVariadic called with optional argNames: argNamesMaybe: Hooks.res:46:7\n  addTypeReference Hooks.res:46:45 --> Hooks.res:1:16\n  addValueReference Hooks.res:46:45 --> Hooks.res:45:28\n  addValueReference Hooks.res:46:13 --> React.res:7:0\n  addValueReference Hooks.res:46:7 --> ReactDOMRe.res:8:0\n  DeadOptionalArgs.addReferences ReactDOMRe.createDOMElementVariadic called with optional argNames: argNamesMaybe: Hooks.res:52:20\n  addValueReference Hooks.res:52:25 --> React.res:3:0\n  addValueReference Hooks.res:52:20 --> ReactDOMRe.res:8:0\n  addTypeReference Hooks.res:60:2 --> Hooks.res:1:16\n  addValueReference Hooks.res:58:4 --> Hooks.res:58:31\n  addTypeReference Hooks.res:60:14 --> Hooks.res:1:16\n  addValueReference Hooks.res:58:4 --> Hooks.res:58:37\n  addValueReference Hooks.res:58:4 --> Hooks.res:58:31\n  addValueReference Hooks.res:58:4 --> Hooks.res:58:45\n  addTypeReference Hooks.res:66:15 --> Hooks.res:1:16\n  addValueReference Hooks.res:66:15 --> Hooks.res:64:32\n  addTypeReference Hooks.res:66:27 --> Hooks.res:1:16\n  addValueReference Hooks.res:66:27 --> Hooks.res:64:38\n  addValueReference Hooks.res:66:2 --> React.res:7:0\n  addValueReference Hooks.res:65:6 --> Hooks.res:64:32\n  addValueReference Hooks.res:65:2 --> Hooks.res:64:46\n  addValueDeclaration +_ Hooks.res:71:2 path:+Hooks\n  DeadOptionalArgs.addReferences ReactDOMRe.createDOMElementVariadic called with optional argNames:props argNamesMaybe: Hooks.res:74:20\n  addValueReference Hooks.res:74:52 --> Hooks.res:74:11\n  addValueReference Hooks.res:74:32 --> ReactDOM.res:47:2\n  addTypeReference Hooks.res:74:73 --> Hooks.res:1:16\n  addValueReference Hooks.res:74:73 --> Hooks.res:70:19\n  addValueReference Hooks.res:74:60 --> React.res:7:0\n  addValueReference Hooks.res:74:20 --> ReactDOMRe.res:8:0\n  addValueReference Hooks.res:75:14 --> React.res:3:0\n  addValueReference Hooks.res:73:11 --> Hooks.res:72:2\n  addValueReference Hooks.res:80:4 --> Hooks.res:70:4\n  addValueReference Hooks.res:80:4 --> React.res:90:0\n  addRecordLabelDeclaration x Hooks.res:82:10 path:+Hooks.r\n  DeadOptionalArgs.addReferences ReactDOMRe.createDOMElementVariadic called with optional argNames:props argNamesMaybe: Hooks.res:85:47\n  addValueReference Hooks.res:85:66 --> Hooks.res:85:38\n  addTypeReference Hooks.res:85:87 --> Hooks.res:82:10\n  addValueReference Hooks.res:85:87 --> Hooks.res:85:30\n  addValueReference Hooks.res:85:74 --> React.res:7:0\n  addValueReference Hooks.res:85:47 --> ReactDOMRe.res:8:0\n  addValueReference Hooks.res:85:4 --> React.res:90:0\n  addTypeReference Hooks.res:100:58 --> Hooks.res:1:16\n  addValueReference Hooks.res:100:58 --> Hooks.res:100:35\n  addValueReference Hooks.res:100:45 --> React.res:7:0\n  addValueReference Hooks.res:103:60 --> Hooks.res:103:37\n  addValueReference Hooks.res:103:47 --> React.res:7:0\n  addValueDeclaration +car Hooks.res:108:8 path:+Hooks.RenderPropRequiresConversion\n  addValueReference Hooks.res:109:30 --> Hooks.res:108:8\n  addValueReference Hooks.res:109:18 --> Hooks.res:109:18\n  addValueReference Hooks.res:109:4 --> Hooks.res:107:14\n  DeadOptionalArgs.addReferences ReactDOMRe.createDOMElementVariadic called with optional argNames: argNamesMaybe: Hooks.res:115:3\n  addTypeReference Hooks.res:115:41 --> Hooks.res:1:16\n  addValueReference Hooks.res:115:41 --> Hooks.res:114:30\n  addValueReference Hooks.res:115:9 --> React.res:7:0\n  DeadOptionalArgs.addReferences ReactDOMRe.createDOMElementVariadic called with optional argNames: argNamesMaybe: Hooks.res:115:57\n  addValueReference Hooks.res:115:62 --> Hooks.res:114:40\n  addValueReference Hooks.res:115:57 --> ReactDOMRe.res:8:0\n  addValueReference Hooks.res:115:3 --> ReactDOMRe.res:8:0\n  Scanning IgnoreInterface.cmt Source:IgnoreInterface.res\n  Scanning IgnoreInterface.cmti Source:IgnoreInterface.resi\n  Scanning ImmutableArray.cmt Source:ImmutableArray.res\n  addValueDeclaration +fromArray ImmutableArray.res:14:6 path:+ImmutableArray.Array\n  addValueDeclaration +toArray ImmutableArray.res:16:6 path:+ImmutableArray.Array\n  addValueDeclaration +length ImmutableArray.res:20:6 path:+ImmutableArray.Array\n  addValueDeclaration +size ImmutableArray.res:22:6 path:+ImmutableArray.Array\n  addValueDeclaration +get ImmutableArray.res:24:6 path:+ImmutableArray.Array\n  addValueDeclaration +getExn ImmutableArray.res:26:6 path:+ImmutableArray.Array\n  addValueDeclaration +getUnsafe ImmutableArray.res:28:6 path:+ImmutableArray.Array\n  addValueDeclaration +getUndefined ImmutableArray.res:30:6 path:+ImmutableArray.Array\n  addValueDeclaration +shuffle ImmutableArray.res:32:6 path:+ImmutableArray.Array\n  addValueDeclaration +reverse ImmutableArray.res:34:6 path:+ImmutableArray.Array\n  addValueDeclaration +makeUninitialized ImmutableArray.res:36:6 path:+ImmutableArray.Array\n  addValueDeclaration +makeUninitializedUnsafe ImmutableArray.res:38:6 path:+ImmutableArray.Array\n  addValueDeclaration +make ImmutableArray.res:40:6 path:+ImmutableArray.Array\n  addValueDeclaration +range ImmutableArray.res:42:6 path:+ImmutableArray.Array\n  addValueDeclaration +rangeBy ImmutableArray.res:44:6 path:+ImmutableArray.Array\n  addValueDeclaration +makeByU ImmutableArray.res:46:6 path:+ImmutableArray.Array\n  addValueDeclaration +makeBy ImmutableArray.res:47:6 path:+ImmutableArray.Array\n  addValueDeclaration +makeByAndShuffleU ImmutableArray.res:49:6 path:+ImmutableArray.Array\n  addValueDeclaration +makeByAndShuffle ImmutableArray.res:50:6 path:+ImmutableArray.Array\n  addValueDeclaration +zip ImmutableArray.res:52:6 path:+ImmutableArray.Array\n  addValueDeclaration +zipByU ImmutableArray.res:54:6 path:+ImmutableArray.Array\n  addValueDeclaration +zipBy ImmutableArray.res:55:6 path:+ImmutableArray.Array\n  addValueDeclaration +unzip ImmutableArray.res:57:6 path:+ImmutableArray.Array\n  addValueDeclaration +concat ImmutableArray.res:59:6 path:+ImmutableArray.Array\n  addValueDeclaration +concatMany ImmutableArray.res:61:6 path:+ImmutableArray.Array\n  addValueDeclaration +slice ImmutableArray.res:63:6 path:+ImmutableArray.Array\n  addValueDeclaration +sliceToEnd ImmutableArray.res:65:6 path:+ImmutableArray.Array\n  addValueDeclaration +copy ImmutableArray.res:67:6 path:+ImmutableArray.Array\n  addValueDeclaration +forEachU ImmutableArray.res:69:6 path:+ImmutableArray.Array\n  addValueDeclaration +forEach ImmutableArray.res:70:6 path:+ImmutableArray.Array\n  addValueDeclaration +mapU ImmutableArray.res:72:6 path:+ImmutableArray.Array\n  addValueDeclaration +map ImmutableArray.res:73:6 path:+ImmutableArray.Array\n  addValueDeclaration +keepWithIndexU ImmutableArray.res:75:6 path:+ImmutableArray.Array\n  addValueDeclaration +keepWithIndex ImmutableArray.res:76:6 path:+ImmutableArray.Array\n  addValueDeclaration +keepMapU ImmutableArray.res:78:6 path:+ImmutableArray.Array\n  addValueDeclaration +keepMap ImmutableArray.res:79:6 path:+ImmutableArray.Array\n  addValueDeclaration +forEachWithIndexU ImmutableArray.res:81:6 path:+ImmutableArray.Array\n  addValueDeclaration +forEachWithIndex ImmutableArray.res:82:6 path:+ImmutableArray.Array\n  addValueDeclaration +mapWithIndexU ImmutableArray.res:84:6 path:+ImmutableArray.Array\n  addValueDeclaration +mapWithIndex ImmutableArray.res:85:6 path:+ImmutableArray.Array\n  addValueDeclaration +partitionU ImmutableArray.res:87:6 path:+ImmutableArray.Array\n  addValueDeclaration +partition ImmutableArray.res:88:6 path:+ImmutableArray.Array\n  addValueDeclaration +reduceU ImmutableArray.res:90:6 path:+ImmutableArray.Array\n  addValueDeclaration +reduce ImmutableArray.res:91:6 path:+ImmutableArray.Array\n  addValueDeclaration +reduceReverseU ImmutableArray.res:93:6 path:+ImmutableArray.Array\n  addValueDeclaration +reduceReverse ImmutableArray.res:94:6 path:+ImmutableArray.Array\n  addValueDeclaration +reduceReverse2U ImmutableArray.res:96:6 path:+ImmutableArray.Array\n  addValueDeclaration +reduceReverse2 ImmutableArray.res:97:6 path:+ImmutableArray.Array\n  addValueDeclaration +someU ImmutableArray.res:99:6 path:+ImmutableArray.Array\n  addValueDeclaration +some ImmutableArray.res:100:6 path:+ImmutableArray.Array\n  addValueDeclaration +everyU ImmutableArray.res:102:6 path:+ImmutableArray.Array\n  addValueDeclaration +every ImmutableArray.res:103:6 path:+ImmutableArray.Array\n  addValueDeclaration +every2U ImmutableArray.res:105:6 path:+ImmutableArray.Array\n  addValueDeclaration +every2 ImmutableArray.res:106:6 path:+ImmutableArray.Array\n  addValueDeclaration +some2U ImmutableArray.res:108:6 path:+ImmutableArray.Array\n  addValueDeclaration +some2 ImmutableArray.res:109:6 path:+ImmutableArray.Array\n  addValueDeclaration +cmpU ImmutableArray.res:111:6 path:+ImmutableArray.Array\n  addValueDeclaration +cmp ImmutableArray.res:112:6 path:+ImmutableArray.Array\n  addValueDeclaration +eqU ImmutableArray.res:114:6 path:+ImmutableArray.Array\n  addValueDeclaration +eq ImmutableArray.res:115:6 path:+ImmutableArray.Array\n  addValueDeclaration +fromArray ImmutableArray.res:14:6 path:+ImmutableArray\n  addValueDeclaration +toArray ImmutableArray.res:16:6 path:+ImmutableArray\n  addValueDeclaration +length ImmutableArray.res:20:6 path:+ImmutableArray\n  addValueDeclaration +size ImmutableArray.res:22:6 path:+ImmutableArray\n  addValueDeclaration +get ImmutableArray.res:24:6 path:+ImmutableArray\n  addValueDeclaration +getExn ImmutableArray.res:26:6 path:+ImmutableArray\n  addValueDeclaration +getUnsafe ImmutableArray.res:28:6 path:+ImmutableArray\n  addValueDeclaration +getUndefined ImmutableArray.res:30:6 path:+ImmutableArray\n  addValueDeclaration +shuffle ImmutableArray.res:32:6 path:+ImmutableArray\n  addValueDeclaration +reverse ImmutableArray.res:34:6 path:+ImmutableArray\n  addValueDeclaration +makeUninitialized ImmutableArray.res:36:6 path:+ImmutableArray\n  addValueDeclaration +makeUninitializedUnsafe ImmutableArray.res:38:6 path:+ImmutableArray\n  addValueDeclaration +make ImmutableArray.res:40:6 path:+ImmutableArray\n  addValueDeclaration +range ImmutableArray.res:42:6 path:+ImmutableArray\n  addValueDeclaration +rangeBy ImmutableArray.res:44:6 path:+ImmutableArray\n  addValueDeclaration +makeByU ImmutableArray.res:46:6 path:+ImmutableArray\n  addValueDeclaration +makeBy ImmutableArray.res:47:6 path:+ImmutableArray\n  addValueDeclaration +makeByAndShuffleU ImmutableArray.res:49:6 path:+ImmutableArray\n  addValueDeclaration +makeByAndShuffle ImmutableArray.res:50:6 path:+ImmutableArray\n  addValueDeclaration +zip ImmutableArray.res:52:6 path:+ImmutableArray\n  addValueDeclaration +zipByU ImmutableArray.res:54:6 path:+ImmutableArray\n  addValueDeclaration +zipBy ImmutableArray.res:55:6 path:+ImmutableArray\n  addValueDeclaration +unzip ImmutableArray.res:57:6 path:+ImmutableArray\n  addValueDeclaration +concat ImmutableArray.res:59:6 path:+ImmutableArray\n  addValueDeclaration +concatMany ImmutableArray.res:61:6 path:+ImmutableArray\n  addValueDeclaration +slice ImmutableArray.res:63:6 path:+ImmutableArray\n  addValueDeclaration +sliceToEnd ImmutableArray.res:65:6 path:+ImmutableArray\n  addValueDeclaration +copy ImmutableArray.res:67:6 path:+ImmutableArray\n  addValueDeclaration +forEachU ImmutableArray.res:69:6 path:+ImmutableArray\n  addValueDeclaration +forEach ImmutableArray.res:70:6 path:+ImmutableArray\n  addValueDeclaration +mapU ImmutableArray.res:72:6 path:+ImmutableArray\n  addValueDeclaration +map ImmutableArray.res:73:6 path:+ImmutableArray\n  addValueDeclaration +keepWithIndexU ImmutableArray.res:75:6 path:+ImmutableArray\n  addValueDeclaration +keepWithIndex ImmutableArray.res:76:6 path:+ImmutableArray\n  addValueDeclaration +keepMapU ImmutableArray.res:78:6 path:+ImmutableArray\n  addValueDeclaration +keepMap ImmutableArray.res:79:6 path:+ImmutableArray\n  addValueDeclaration +forEachWithIndexU ImmutableArray.res:81:6 path:+ImmutableArray\n  addValueDeclaration +forEachWithIndex ImmutableArray.res:82:6 path:+ImmutableArray\n  addValueDeclaration +mapWithIndexU ImmutableArray.res:84:6 path:+ImmutableArray\n  addValueDeclaration +mapWithIndex ImmutableArray.res:85:6 path:+ImmutableArray\n  addValueDeclaration +partitionU ImmutableArray.res:87:6 path:+ImmutableArray\n  addValueDeclaration +partition ImmutableArray.res:88:6 path:+ImmutableArray\n  addValueDeclaration +reduceU ImmutableArray.res:90:6 path:+ImmutableArray\n  addValueDeclaration +reduce ImmutableArray.res:91:6 path:+ImmutableArray\n  addValueDeclaration +reduceReverseU ImmutableArray.res:93:6 path:+ImmutableArray\n  addValueDeclaration +reduceReverse ImmutableArray.res:94:6 path:+ImmutableArray\n  addValueDeclaration +reduceReverse2U ImmutableArray.res:96:6 path:+ImmutableArray\n  addValueDeclaration +reduceReverse2 ImmutableArray.res:97:6 path:+ImmutableArray\n  addValueDeclaration +someU ImmutableArray.res:99:6 path:+ImmutableArray\n  addValueDeclaration +some ImmutableArray.res:100:6 path:+ImmutableArray\n  addValueDeclaration +everyU ImmutableArray.res:102:6 path:+ImmutableArray\n  addValueDeclaration +every ImmutableArray.res:103:6 path:+ImmutableArray\n  addValueDeclaration +every2U ImmutableArray.res:105:6 path:+ImmutableArray\n  addValueDeclaration +every2 ImmutableArray.res:106:6 path:+ImmutableArray\n  addValueDeclaration +some2U ImmutableArray.res:108:6 path:+ImmutableArray\n  addValueDeclaration +some2 ImmutableArray.res:109:6 path:+ImmutableArray\n  addValueDeclaration +cmpU ImmutableArray.res:111:6 path:+ImmutableArray\n  addValueDeclaration +cmp ImmutableArray.res:112:6 path:+ImmutableArray\n  addValueDeclaration +eqU ImmutableArray.res:114:6 path:+ImmutableArray\n  addValueDeclaration +eq ImmutableArray.res:115:6 path:+ImmutableArray\n  addValueReference ImmutableArray.res:14:6 --> ImmutableArray.res:14:18\n  addValueReference ImmutableArray.res:14:6 --> ImmutableArray.res:8:2\n  addValueReference ImmutableArray.res:16:6 --> ImmutableArray.res:16:16\n  addValueReference ImmutableArray.res:16:6 --> ImmutableArray.res:5:2\n  addValueReference ImmutableArray.res:20:6 --> ImmutableArray.res:20:15\n  addValueReference ImmutableArray.res:20:6 --> ImmutableArray.res:5:2\n  addValueReference ImmutableArray.res:22:6 --> ImmutableArray.res:22:13\n  addValueReference ImmutableArray.res:22:6 --> ImmutableArray.res:5:2\n  addValueReference ImmutableArray.res:24:6 --> ImmutableArray.res:24:13\n  addValueReference ImmutableArray.res:24:6 --> ImmutableArray.res:5:2\n  addValueReference ImmutableArray.res:24:6 --> ImmutableArray.res:24:16\n  addValueReference ImmutableArray.res:26:6 --> ImmutableArray.res:26:16\n  addValueReference ImmutableArray.res:26:6 --> ImmutableArray.res:5:2\n  addValueReference ImmutableArray.res:26:6 --> ImmutableArray.res:26:19\n  addValueReference ImmutableArray.res:28:6 --> ImmutableArray.res:28:19\n  addValueReference ImmutableArray.res:28:6 --> ImmutableArray.res:5:2\n  addValueReference ImmutableArray.res:28:6 --> ImmutableArray.res:28:22\n  addValueReference ImmutableArray.res:30:6 --> ImmutableArray.res:30:22\n  addValueReference ImmutableArray.res:30:6 --> ImmutableArray.res:5:2\n  addValueReference ImmutableArray.res:30:6 --> ImmutableArray.res:30:25\n  addValueReference ImmutableArray.res:32:6 --> ImmutableArray.res:32:16\n  addValueReference ImmutableArray.res:32:6 --> ImmutableArray.res:5:2\n  addValueReference ImmutableArray.res:32:6 --> ImmutableArray.res:8:2\n  addValueReference ImmutableArray.res:34:6 --> ImmutableArray.res:34:16\n  addValueReference ImmutableArray.res:34:6 --> ImmutableArray.res:5:2\n  addValueReference ImmutableArray.res:34:6 --> ImmutableArray.res:8:2\n  addValueReference ImmutableArray.res:36:6 --> ImmutableArray.res:36:26\n  addValueReference ImmutableArray.res:36:6 --> ImmutableArray.res:8:2\n  addValueReference ImmutableArray.res:38:6 --> ImmutableArray.res:38:32\n  addValueReference ImmutableArray.res:38:6 --> ImmutableArray.res:8:2\n  addValueReference ImmutableArray.res:40:6 --> ImmutableArray.res:40:14\n  addValueReference ImmutableArray.res:40:6 --> ImmutableArray.res:40:17\n  addValueReference ImmutableArray.res:40:6 --> ImmutableArray.res:8:2\n  addValueReference ImmutableArray.res:42:6 --> ImmutableArray.res:42:15\n  addValueReference ImmutableArray.res:42:6 --> ImmutableArray.res:42:18\n  addValueReference ImmutableArray.res:42:6 --> ImmutableArray.res:8:2\n  addValueReference ImmutableArray.res:44:6 --> ImmutableArray.res:44:17\n  addValueReference ImmutableArray.res:44:6 --> ImmutableArray.res:44:20\n  addValueReference ImmutableArray.res:44:6 --> ImmutableArray.res:44:23\n  addValueReference ImmutableArray.res:44:6 --> ImmutableArray.res:8:2\n  addValueReference ImmutableArray.res:46:6 --> ImmutableArray.res:46:17\n  addValueReference ImmutableArray.res:46:6 --> ImmutableArray.res:46:20\n  addValueReference ImmutableArray.res:46:6 --> ImmutableArray.res:8:2\n  addValueReference ImmutableArray.res:47:6 --> ImmutableArray.res:47:16\n  addValueReference ImmutableArray.res:47:6 --> ImmutableArray.res:47:19\n  addValueReference ImmutableArray.res:47:6 --> ImmutableArray.res:8:2\n  addValueReference ImmutableArray.res:49:6 --> ImmutableArray.res:49:27\n  addValueReference ImmutableArray.res:49:6 --> ImmutableArray.res:49:30\n  addValueReference ImmutableArray.res:49:6 --> ImmutableArray.res:8:2\n  addValueReference ImmutableArray.res:50:6 --> ImmutableArray.res:50:26\n  addValueReference ImmutableArray.res:50:6 --> ImmutableArray.res:50:29\n  addValueReference ImmutableArray.res:50:6 --> ImmutableArray.res:8:2\n  addValueReference ImmutableArray.res:52:6 --> ImmutableArray.res:52:13\n  addValueReference ImmutableArray.res:52:6 --> ImmutableArray.res:5:2\n  addValueReference ImmutableArray.res:52:6 --> ImmutableArray.res:52:17\n  addValueReference ImmutableArray.res:52:6 --> ImmutableArray.res:5:2\n  addValueReference ImmutableArray.res:52:6 --> ImmutableArray.res:9:2\n  addValueReference ImmutableArray.res:54:6 --> ImmutableArray.res:54:16\n  addValueReference ImmutableArray.res:54:6 --> ImmutableArray.res:5:2\n  addValueReference ImmutableArray.res:54:6 --> ImmutableArray.res:54:20\n  addValueReference ImmutableArray.res:54:6 --> ImmutableArray.res:5:2\n  addValueReference ImmutableArray.res:54:6 --> ImmutableArray.res:54:24\n  addValueReference ImmutableArray.res:54:6 --> ImmutableArray.res:8:2\n  addValueReference ImmutableArray.res:55:6 --> ImmutableArray.res:55:15\n  addValueReference ImmutableArray.res:55:6 --> ImmutableArray.res:5:2\n  addValueReference ImmutableArray.res:55:6 --> ImmutableArray.res:55:19\n  addValueReference ImmutableArray.res:55:6 --> ImmutableArray.res:5:2\n  addValueReference ImmutableArray.res:55:6 --> ImmutableArray.res:55:23\n  addValueReference ImmutableArray.res:55:6 --> ImmutableArray.res:8:2\n  addValueReference ImmutableArray.res:57:6 --> ImmutableArray.res:57:14\n  addValueReference ImmutableArray.res:57:6 --> ImmutableArray.res:6:2\n  addValueReference ImmutableArray.res:57:6 --> ImmutableArray.res:10:2\n  addValueReference ImmutableArray.res:59:6 --> ImmutableArray.res:59:16\n  addValueReference ImmutableArray.res:59:6 --> ImmutableArray.res:5:2\n  addValueReference ImmutableArray.res:59:6 --> ImmutableArray.res:59:20\n  addValueReference ImmutableArray.res:59:6 --> ImmutableArray.res:5:2\n  addValueReference ImmutableArray.res:59:6 --> ImmutableArray.res:8:2\n  addValueReference ImmutableArray.res:61:6 --> ImmutableArray.res:61:20\n  addValueReference ImmutableArray.res:61:6 --> ImmutableArray.res:7:2\n  addValueReference ImmutableArray.res:61:6 --> ImmutableArray.res:8:2\n  addValueReference ImmutableArray.res:63:6 --> ImmutableArray.res:63:15\n  addValueReference ImmutableArray.res:63:6 --> ImmutableArray.res:5:2\n  addValueReference ImmutableArray.res:63:6 --> ImmutableArray.res:63:18\n  addValueReference ImmutableArray.res:63:6 --> ImmutableArray.res:63:27\n  addValueReference ImmutableArray.res:63:6 --> ImmutableArray.res:8:2\n  addValueReference ImmutableArray.res:65:6 --> ImmutableArray.res:65:20\n  addValueReference ImmutableArray.res:65:6 --> ImmutableArray.res:5:2\n  addValueReference ImmutableArray.res:65:6 --> ImmutableArray.res:65:23\n  addValueReference ImmutableArray.res:65:6 --> ImmutableArray.res:8:2\n  addValueReference ImmutableArray.res:67:6 --> ImmutableArray.res:67:13\n  addValueReference ImmutableArray.res:67:6 --> ImmutableArray.res:5:2\n  addValueReference ImmutableArray.res:67:6 --> ImmutableArray.res:8:2\n  addValueReference ImmutableArray.res:69:6 --> ImmutableArray.res:69:18\n  addValueReference ImmutableArray.res:69:6 --> ImmutableArray.res:5:2\n  addValueReference ImmutableArray.res:69:6 --> ImmutableArray.res:69:21\n  addValueReference ImmutableArray.res:70:6 --> ImmutableArray.res:70:17\n  addValueReference ImmutableArray.res:70:6 --> ImmutableArray.res:5:2\n  addValueReference ImmutableArray.res:70:6 --> ImmutableArray.res:70:20\n  addValueReference ImmutableArray.res:72:6 --> ImmutableArray.res:72:14\n  addValueReference ImmutableArray.res:72:6 --> ImmutableArray.res:5:2\n  addValueReference ImmutableArray.res:72:6 --> ImmutableArray.res:72:17\n  addValueReference ImmutableArray.res:72:6 --> ImmutableArray.res:8:2\n  addValueReference ImmutableArray.res:73:6 --> ImmutableArray.res:73:13\n  addValueReference ImmutableArray.res:73:6 --> ImmutableArray.res:5:2\n  addValueReference ImmutableArray.res:73:6 --> ImmutableArray.res:73:16\n  addValueReference ImmutableArray.res:73:6 --> ImmutableArray.res:8:2\n  addValueReference ImmutableArray.res:75:6 --> ImmutableArray.res:75:24\n  addValueReference ImmutableArray.res:75:6 --> ImmutableArray.res:5:2\n  addValueReference ImmutableArray.res:75:6 --> ImmutableArray.res:75:27\n  addValueReference ImmutableArray.res:75:6 --> ImmutableArray.res:8:2\n  addValueReference ImmutableArray.res:76:6 --> ImmutableArray.res:76:23\n  addValueReference ImmutableArray.res:76:6 --> ImmutableArray.res:5:2\n  addValueReference ImmutableArray.res:76:6 --> ImmutableArray.res:76:26\n  addValueReference ImmutableArray.res:76:6 --> ImmutableArray.res:8:2\n  addValueReference ImmutableArray.res:78:6 --> ImmutableArray.res:78:18\n  addValueReference ImmutableArray.res:78:6 --> ImmutableArray.res:5:2\n  addValueReference ImmutableArray.res:78:6 --> ImmutableArray.res:78:21\n  addValueReference ImmutableArray.res:78:6 --> ImmutableArray.res:8:2\n  addValueReference ImmutableArray.res:79:6 --> ImmutableArray.res:79:17\n  addValueReference ImmutableArray.res:79:6 --> ImmutableArray.res:5:2\n  addValueReference ImmutableArray.res:79:6 --> ImmutableArray.res:79:20\n  addValueReference ImmutableArray.res:79:6 --> ImmutableArray.res:8:2\n  addValueReference ImmutableArray.res:81:6 --> ImmutableArray.res:81:27\n  addValueReference ImmutableArray.res:81:6 --> ImmutableArray.res:5:2\n  addValueReference ImmutableArray.res:81:6 --> ImmutableArray.res:81:30\n  addValueReference ImmutableArray.res:82:6 --> ImmutableArray.res:82:26\n  addValueReference ImmutableArray.res:82:6 --> ImmutableArray.res:5:2\n  addValueReference ImmutableArray.res:82:6 --> ImmutableArray.res:82:29\n  addValueReference ImmutableArray.res:84:6 --> ImmutableArray.res:84:23\n  addValueReference ImmutableArray.res:84:6 --> ImmutableArray.res:5:2\n  addValueReference ImmutableArray.res:84:6 --> ImmutableArray.res:84:26\n  addValueReference ImmutableArray.res:84:6 --> ImmutableArray.res:8:2\n  addValueReference ImmutableArray.res:85:6 --> ImmutableArray.res:85:22\n  addValueReference ImmutableArray.res:85:6 --> ImmutableArray.res:5:2\n  addValueReference ImmutableArray.res:85:6 --> ImmutableArray.res:85:25\n  addValueReference ImmutableArray.res:85:6 --> ImmutableArray.res:8:2\n  addValueReference ImmutableArray.res:87:6 --> ImmutableArray.res:87:20\n  addValueReference ImmutableArray.res:87:6 --> ImmutableArray.res:5:2\n  addValueReference ImmutableArray.res:87:6 --> ImmutableArray.res:87:23\n  addValueReference ImmutableArray.res:87:6 --> ImmutableArray.res:10:2\n  addValueReference ImmutableArray.res:88:6 --> ImmutableArray.res:88:19\n  addValueReference ImmutableArray.res:88:6 --> ImmutableArray.res:5:2\n  addValueReference ImmutableArray.res:88:6 --> ImmutableArray.res:88:22\n  addValueReference ImmutableArray.res:88:6 --> ImmutableArray.res:10:2\n  addValueReference ImmutableArray.res:90:6 --> ImmutableArray.res:90:17\n  addValueReference ImmutableArray.res:90:6 --> ImmutableArray.res:5:2\n  addValueReference ImmutableArray.res:90:6 --> ImmutableArray.res:90:20\n  addValueReference ImmutableArray.res:90:6 --> ImmutableArray.res:90:23\n  addValueReference ImmutableArray.res:91:6 --> ImmutableArray.res:91:16\n  addValueReference ImmutableArray.res:91:6 --> ImmutableArray.res:5:2\n  addValueReference ImmutableArray.res:91:6 --> ImmutableArray.res:91:19\n  addValueReference ImmutableArray.res:91:6 --> ImmutableArray.res:91:22\n  addValueReference ImmutableArray.res:93:6 --> ImmutableArray.res:93:24\n  addValueReference ImmutableArray.res:93:6 --> ImmutableArray.res:5:2\n  addValueReference ImmutableArray.res:93:6 --> ImmutableArray.res:93:27\n  addValueReference ImmutableArray.res:93:6 --> ImmutableArray.res:93:30\n  addValueReference ImmutableArray.res:94:6 --> ImmutableArray.res:94:23\n  addValueReference ImmutableArray.res:94:6 --> ImmutableArray.res:5:2\n  addValueReference ImmutableArray.res:94:6 --> ImmutableArray.res:94:26\n  addValueReference ImmutableArray.res:94:6 --> ImmutableArray.res:94:29\n  addValueReference ImmutableArray.res:96:6 --> ImmutableArray.res:96:25\n  addValueReference ImmutableArray.res:96:6 --> ImmutableArray.res:5:2\n  addValueReference ImmutableArray.res:96:6 --> ImmutableArray.res:96:29\n  addValueReference ImmutableArray.res:96:6 --> ImmutableArray.res:5:2\n  addValueReference ImmutableArray.res:96:6 --> ImmutableArray.res:96:33\n  addValueReference ImmutableArray.res:96:6 --> ImmutableArray.res:96:36\n  addValueReference ImmutableArray.res:97:6 --> ImmutableArray.res:97:24\n  addValueReference ImmutableArray.res:97:6 --> ImmutableArray.res:5:2\n  addValueReference ImmutableArray.res:97:6 --> ImmutableArray.res:97:28\n  addValueReference ImmutableArray.res:97:6 --> ImmutableArray.res:5:2\n  addValueReference ImmutableArray.res:97:6 --> ImmutableArray.res:97:32\n  addValueReference ImmutableArray.res:97:6 --> ImmutableArray.res:97:35\n  addValueReference ImmutableArray.res:99:6 --> ImmutableArray.res:99:15\n  addValueReference ImmutableArray.res:99:6 --> ImmutableArray.res:5:2\n  addValueReference ImmutableArray.res:99:6 --> ImmutableArray.res:99:18\n  addValueReference ImmutableArray.res:100:6 --> ImmutableArray.res:100:14\n  addValueReference ImmutableArray.res:100:6 --> ImmutableArray.res:5:2\n  addValueReference ImmutableArray.res:100:6 --> ImmutableArray.res:100:17\n  addValueReference ImmutableArray.res:102:6 --> ImmutableArray.res:102:16\n  addValueReference ImmutableArray.res:102:6 --> ImmutableArray.res:5:2\n  addValueReference ImmutableArray.res:102:6 --> ImmutableArray.res:102:19\n  addValueReference ImmutableArray.res:103:6 --> ImmutableArray.res:103:15\n  addValueReference ImmutableArray.res:103:6 --> ImmutableArray.res:5:2\n  addValueReference ImmutableArray.res:103:6 --> ImmutableArray.res:103:18\n  addValueReference ImmutableArray.res:105:6 --> ImmutableArray.res:105:17\n  addValueReference ImmutableArray.res:105:6 --> ImmutableArray.res:5:2\n  addValueReference ImmutableArray.res:105:6 --> ImmutableArray.res:105:21\n  addValueReference ImmutableArray.res:105:6 --> ImmutableArray.res:5:2\n  addValueReference ImmutableArray.res:105:6 --> ImmutableArray.res:105:25\n  addValueReference ImmutableArray.res:106:6 --> ImmutableArray.res:106:16\n  addValueReference ImmutableArray.res:106:6 --> ImmutableArray.res:5:2\n  addValueReference ImmutableArray.res:106:6 --> ImmutableArray.res:106:20\n  addValueReference ImmutableArray.res:106:6 --> ImmutableArray.res:5:2\n  addValueReference ImmutableArray.res:106:6 --> ImmutableArray.res:106:24\n  addValueReference ImmutableArray.res:108:6 --> ImmutableArray.res:108:16\n  addValueReference ImmutableArray.res:108:6 --> ImmutableArray.res:5:2\n  addValueReference ImmutableArray.res:108:6 --> ImmutableArray.res:108:20\n  addValueReference ImmutableArray.res:108:6 --> ImmutableArray.res:5:2\n  addValueReference ImmutableArray.res:108:6 --> ImmutableArray.res:108:24\n  addValueReference ImmutableArray.res:109:6 --> ImmutableArray.res:109:15\n  addValueReference ImmutableArray.res:109:6 --> ImmutableArray.res:5:2\n  addValueReference ImmutableArray.res:109:6 --> ImmutableArray.res:109:19\n  addValueReference ImmutableArray.res:109:6 --> ImmutableArray.res:5:2\n  addValueReference ImmutableArray.res:109:6 --> ImmutableArray.res:109:23\n  addValueReference ImmutableArray.res:111:6 --> ImmutableArray.res:111:14\n  addValueReference ImmutableArray.res:111:6 --> ImmutableArray.res:5:2\n  addValueReference ImmutableArray.res:111:6 --> ImmutableArray.res:111:18\n  addValueReference ImmutableArray.res:111:6 --> ImmutableArray.res:5:2\n  addValueReference ImmutableArray.res:111:6 --> ImmutableArray.res:111:22\n  addValueReference ImmutableArray.res:112:6 --> ImmutableArray.res:112:13\n  addValueReference ImmutableArray.res:112:6 --> ImmutableArray.res:5:2\n  addValueReference ImmutableArray.res:112:6 --> ImmutableArray.res:112:17\n  addValueReference ImmutableArray.res:112:6 --> ImmutableArray.res:5:2\n  addValueReference ImmutableArray.res:112:6 --> ImmutableArray.res:112:21\n  addValueReference ImmutableArray.res:114:6 --> ImmutableArray.res:114:13\n  addValueReference ImmutableArray.res:114:6 --> ImmutableArray.res:5:2\n  addValueReference ImmutableArray.res:114:6 --> ImmutableArray.res:114:17\n  addValueReference ImmutableArray.res:114:6 --> ImmutableArray.res:5:2\n  addValueReference ImmutableArray.res:114:6 --> ImmutableArray.res:114:21\n  addValueReference ImmutableArray.res:115:6 --> ImmutableArray.res:115:12\n  addValueReference ImmutableArray.res:115:6 --> ImmutableArray.res:5:2\n  addValueReference ImmutableArray.res:115:6 --> ImmutableArray.res:115:16\n  addValueReference ImmutableArray.res:115:6 --> ImmutableArray.res:5:2\n  addValueReference ImmutableArray.res:115:6 --> ImmutableArray.res:115:20\n  addValueReference ImmutableArray.resi:6:2 --> ImmutableArray.res:24:6\n  addValueReference ImmutableArray.resi:9:0 --> ImmutableArray.res:14:6\n  addValueReference ImmutableArray.resi:12:0 --> ImmutableArray.res:16:6\n  addValueReference ImmutableArray.resi:14:0 --> ImmutableArray.res:20:6\n  addValueReference ImmutableArray.resi:17:0 --> ImmutableArray.res:22:6\n  addValueReference ImmutableArray.resi:19:0 --> ImmutableArray.res:24:6\n  addValueReference ImmutableArray.resi:21:0 --> ImmutableArray.res:26:6\n  addValueReference ImmutableArray.resi:23:0 --> ImmutableArray.res:28:6\n  addValueReference ImmutableArray.resi:25:0 --> ImmutableArray.res:30:6\n  addValueReference ImmutableArray.resi:27:0 --> ImmutableArray.res:32:6\n  addValueReference ImmutableArray.resi:29:0 --> ImmutableArray.res:34:6\n  addValueReference ImmutableArray.resi:31:0 --> ImmutableArray.res:36:6\n  addValueReference ImmutableArray.resi:33:0 --> ImmutableArray.res:38:6\n  addValueReference ImmutableArray.resi:35:0 --> ImmutableArray.res:40:6\n  addValueReference ImmutableArray.resi:37:0 --> ImmutableArray.res:42:6\n  addValueReference ImmutableArray.resi:39:0 --> ImmutableArray.res:44:6\n  addValueReference ImmutableArray.resi:41:0 --> ImmutableArray.res:46:6\n  addValueReference ImmutableArray.resi:42:0 --> ImmutableArray.res:47:6\n  addValueReference ImmutableArray.resi:44:0 --> ImmutableArray.res:49:6\n  addValueReference ImmutableArray.resi:45:0 --> ImmutableArray.res:50:6\n  addValueReference ImmutableArray.resi:47:0 --> ImmutableArray.res:52:6\n  addValueReference ImmutableArray.resi:49:0 --> ImmutableArray.res:54:6\n  addValueReference ImmutableArray.resi:50:0 --> ImmutableArray.res:55:6\n  addValueReference ImmutableArray.resi:52:0 --> ImmutableArray.res:57:6\n  addValueReference ImmutableArray.resi:54:0 --> ImmutableArray.res:59:6\n  addValueReference ImmutableArray.resi:56:0 --> ImmutableArray.res:61:6\n  addValueReference ImmutableArray.resi:58:0 --> ImmutableArray.res:63:6\n  addValueReference ImmutableArray.resi:60:0 --> ImmutableArray.res:65:6\n  addValueReference ImmutableArray.resi:62:0 --> ImmutableArray.res:67:6\n  addValueReference ImmutableArray.resi:64:0 --> ImmutableArray.res:69:6\n  addValueReference ImmutableArray.resi:65:0 --> ImmutableArray.res:70:6\n  addValueReference ImmutableArray.resi:67:0 --> ImmutableArray.res:72:6\n  addValueReference ImmutableArray.resi:68:0 --> ImmutableArray.res:73:6\n  addValueReference ImmutableArray.resi:70:0 --> ImmutableArray.res:75:6\n  addValueReference ImmutableArray.resi:71:0 --> ImmutableArray.res:76:6\n  addValueReference ImmutableArray.resi:73:0 --> ImmutableArray.res:78:6\n  addValueReference ImmutableArray.resi:74:0 --> ImmutableArray.res:79:6\n  addValueReference ImmutableArray.resi:76:0 --> ImmutableArray.res:81:6\n  addValueReference ImmutableArray.resi:77:0 --> ImmutableArray.res:82:6\n  addValueReference ImmutableArray.resi:79:0 --> ImmutableArray.res:84:6\n  addValueReference ImmutableArray.resi:80:0 --> ImmutableArray.res:85:6\n  addValueReference ImmutableArray.resi:82:0 --> ImmutableArray.res:87:6\n  addValueReference ImmutableArray.resi:83:0 --> ImmutableArray.res:88:6\n  addValueReference ImmutableArray.resi:85:0 --> ImmutableArray.res:90:6\n  addValueReference ImmutableArray.resi:86:0 --> ImmutableArray.res:91:6\n  addValueReference ImmutableArray.resi:88:0 --> ImmutableArray.res:93:6\n  addValueReference ImmutableArray.resi:89:0 --> ImmutableArray.res:94:6\n  addValueReference ImmutableArray.resi:91:0 --> ImmutableArray.res:96:6\n  addValueReference ImmutableArray.resi:92:0 --> ImmutableArray.res:97:6\n  addValueReference ImmutableArray.resi:94:0 --> ImmutableArray.res:99:6\n  addValueReference ImmutableArray.resi:95:0 --> ImmutableArray.res:100:6\n  addValueReference ImmutableArray.resi:97:0 --> ImmutableArray.res:102:6\n  addValueReference ImmutableArray.resi:98:0 --> ImmutableArray.res:103:6\n  addValueReference ImmutableArray.resi:100:0 --> ImmutableArray.res:105:6\n  addValueReference ImmutableArray.resi:101:0 --> ImmutableArray.res:106:6\n  addValueReference ImmutableArray.resi:103:0 --> ImmutableArray.res:108:6\n  addValueReference ImmutableArray.resi:104:0 --> ImmutableArray.res:109:6\n  addValueReference ImmutableArray.resi:106:0 --> ImmutableArray.res:111:6\n  addValueReference ImmutableArray.resi:107:0 --> ImmutableArray.res:112:6\n  addValueReference ImmutableArray.resi:109:0 --> ImmutableArray.res:114:6\n  addValueReference ImmutableArray.resi:110:0 --> ImmutableArray.res:115:6\n  Scanning ImmutableArray.cmti Source:ImmutableArray.resi\n  addValueDeclaration +get ImmutableArray.resi:6:2 path:ImmutableArray.Array\n  addValueDeclaration +fromArray ImmutableArray.resi:9:0 path:ImmutableArray\n  addValueDeclaration +toArray ImmutableArray.resi:12:0 path:ImmutableArray\n  addValueDeclaration +length ImmutableArray.resi:14:0 path:ImmutableArray\n  addValueDeclaration +size ImmutableArray.resi:17:0 path:ImmutableArray\n  addValueDeclaration +get ImmutableArray.resi:19:0 path:ImmutableArray\n  addValueDeclaration +getExn ImmutableArray.resi:21:0 path:ImmutableArray\n  addValueDeclaration +getUnsafe ImmutableArray.resi:23:0 path:ImmutableArray\n  addValueDeclaration +getUndefined ImmutableArray.resi:25:0 path:ImmutableArray\n  addValueDeclaration +shuffle ImmutableArray.resi:27:0 path:ImmutableArray\n  addValueDeclaration +reverse ImmutableArray.resi:29:0 path:ImmutableArray\n  addValueDeclaration +makeUninitialized ImmutableArray.resi:31:0 path:ImmutableArray\n  addValueDeclaration +makeUninitializedUnsafe ImmutableArray.resi:33:0 path:ImmutableArray\n  addValueDeclaration +make ImmutableArray.resi:35:0 path:ImmutableArray\n  addValueDeclaration +range ImmutableArray.resi:37:0 path:ImmutableArray\n  addValueDeclaration +rangeBy ImmutableArray.resi:39:0 path:ImmutableArray\n  addValueDeclaration +makeByU ImmutableArray.resi:41:0 path:ImmutableArray\n  addValueDeclaration +makeBy ImmutableArray.resi:42:0 path:ImmutableArray\n  addValueDeclaration +makeByAndShuffleU ImmutableArray.resi:44:0 path:ImmutableArray\n  addValueDeclaration +makeByAndShuffle ImmutableArray.resi:45:0 path:ImmutableArray\n  addValueDeclaration +zip ImmutableArray.resi:47:0 path:ImmutableArray\n  addValueDeclaration +zipByU ImmutableArray.resi:49:0 path:ImmutableArray\n  addValueDeclaration +zipBy ImmutableArray.resi:50:0 path:ImmutableArray\n  addValueDeclaration +unzip ImmutableArray.resi:52:0 path:ImmutableArray\n  addValueDeclaration +concat ImmutableArray.resi:54:0 path:ImmutableArray\n  addValueDeclaration +concatMany ImmutableArray.resi:56:0 path:ImmutableArray\n  addValueDeclaration +slice ImmutableArray.resi:58:0 path:ImmutableArray\n  addValueDeclaration +sliceToEnd ImmutableArray.resi:60:0 path:ImmutableArray\n  addValueDeclaration +copy ImmutableArray.resi:62:0 path:ImmutableArray\n  addValueDeclaration +forEachU ImmutableArray.resi:64:0 path:ImmutableArray\n  addValueDeclaration +forEach ImmutableArray.resi:65:0 path:ImmutableArray\n  addValueDeclaration +mapU ImmutableArray.resi:67:0 path:ImmutableArray\n  addValueDeclaration +map ImmutableArray.resi:68:0 path:ImmutableArray\n  addValueDeclaration +keepWithIndexU ImmutableArray.resi:70:0 path:ImmutableArray\n  addValueDeclaration +keepWithIndex ImmutableArray.resi:71:0 path:ImmutableArray\n  addValueDeclaration +keepMapU ImmutableArray.resi:73:0 path:ImmutableArray\n  addValueDeclaration +keepMap ImmutableArray.resi:74:0 path:ImmutableArray\n  addValueDeclaration +forEachWithIndexU ImmutableArray.resi:76:0 path:ImmutableArray\n  addValueDeclaration +forEachWithIndex ImmutableArray.resi:77:0 path:ImmutableArray\n  addValueDeclaration +mapWithIndexU ImmutableArray.resi:79:0 path:ImmutableArray\n  addValueDeclaration +mapWithIndex ImmutableArray.resi:80:0 path:ImmutableArray\n  addValueDeclaration +partitionU ImmutableArray.resi:82:0 path:ImmutableArray\n  addValueDeclaration +partition ImmutableArray.resi:83:0 path:ImmutableArray\n  addValueDeclaration +reduceU ImmutableArray.resi:85:0 path:ImmutableArray\n  addValueDeclaration +reduce ImmutableArray.resi:86:0 path:ImmutableArray\n  addValueDeclaration +reduceReverseU ImmutableArray.resi:88:0 path:ImmutableArray\n  addValueDeclaration +reduceReverse ImmutableArray.resi:89:0 path:ImmutableArray\n  addValueDeclaration +reduceReverse2U ImmutableArray.resi:91:0 path:ImmutableArray\n  addValueDeclaration +reduceReverse2 ImmutableArray.resi:92:0 path:ImmutableArray\n  addValueDeclaration +someU ImmutableArray.resi:94:0 path:ImmutableArray\n  addValueDeclaration +some ImmutableArray.resi:95:0 path:ImmutableArray\n  addValueDeclaration +everyU ImmutableArray.resi:97:0 path:ImmutableArray\n  addValueDeclaration +every ImmutableArray.resi:98:0 path:ImmutableArray\n  addValueDeclaration +every2U ImmutableArray.resi:100:0 path:ImmutableArray\n  addValueDeclaration +every2 ImmutableArray.resi:101:0 path:ImmutableArray\n  addValueDeclaration +some2U ImmutableArray.resi:103:0 path:ImmutableArray\n  addValueDeclaration +some2 ImmutableArray.resi:104:0 path:ImmutableArray\n  addValueDeclaration +cmpU ImmutableArray.resi:106:0 path:ImmutableArray\n  addValueDeclaration +cmp ImmutableArray.resi:107:0 path:ImmutableArray\n  addValueDeclaration +eqU ImmutableArray.resi:109:0 path:ImmutableArray\n  addValueDeclaration +eq ImmutableArray.resi:110:0 path:ImmutableArray\n  Scanning ImportHookDefault.cmt Source:ImportHookDefault.res\n  addValueDeclaration +make ImportHookDefault.res:6:0 path:+ImportHookDefault\n  addValueDeclaration +make2 ImportHookDefault.res:13:0 path:+ImportHookDefault\n  addRecordLabelDeclaration name ImportHookDefault.res:2:2 path:+ImportHookDefault.person\n  addRecordLabelDeclaration age ImportHookDefault.res:3:2 path:+ImportHookDefault.person\n  Scanning ImportHooks.cmt Source:ImportHooks.res\n  addValueDeclaration +make ImportHooks.res:13:0 path:+ImportHooks\n  addValueDeclaration +foo ImportHooks.res:20:0 path:+ImportHooks\n  addRecordLabelDeclaration name ImportHooks.res:3:2 path:+ImportHooks.person\n  addRecordLabelDeclaration age ImportHooks.res:4:2 path:+ImportHooks.person\n  Scanning ImportIndex.cmt Source:ImportIndex.res\n  addValueDeclaration +make ImportIndex.res:2:0 path:+ImportIndex\n  Scanning ImportJsValue.cmt Source:ImportJsValue.res\n  addValueDeclaration +round ImportJsValue.res:1:0 path:+ImportJsValue\n  addValueDeclaration +area ImportJsValue.res:15:0 path:+ImportJsValue\n  addValueDeclaration +returnMixedArray ImportJsValue.res:23:0 path:+ImportJsValue\n  addValueDeclaration +roundedNumber ImportJsValue.res:27:4 path:+ImportJsValue\n  addValueDeclaration +areaValue ImportJsValue.res:30:4 path:+ImportJsValue\n  addValueDeclaration +getAbs ImportJsValue.res:40:6 path:+ImportJsValue.AbsoluteValue\n  addValueDeclaration +useGetProp ImportJsValue.res:47:4 path:+ImportJsValue\n  addValueDeclaration +useGetAbs ImportJsValue.res:50:4 path:+ImportJsValue\n  addValueDeclaration +useColor ImportJsValue.res:58:0 path:+ImportJsValue\n  addValueDeclaration +higherOrder ImportJsValue.res:60:0 path:+ImportJsValue\n  addValueDeclaration +returnedFromHigherOrder ImportJsValue.res:64:4 path:+ImportJsValue\n  addValueDeclaration +convertVariant ImportJsValue.res:70:0 path:+ImportJsValue\n  addValueDeclaration +polymorphic ImportJsValue.res:73:0 path:+ImportJsValue\n  addValueDeclaration +default ImportJsValue.res:75:0 path:+ImportJsValue\n  addRecordLabelDeclaration x ImportJsValue.res:11:2 path:+ImportJsValue.point\n  addRecordLabelDeclaration y ImportJsValue.res:12:2 path:+ImportJsValue.point\n  addValueReference ImportJsValue.res:27:4 --> ImportJsValue.res:1:0\n  addValueReference ImportJsValue.res:30:4 --> ImportJsValue.res:15:0\n  addValueDeclaration +getAbs ImportJsValue.res:41:8 path:+ImportJsValue.AbsoluteValue\n  addValueReference ImportJsValue.res:41:8 --> ImportJsValue.res:40:16\n  addValueReference ImportJsValue.res:40:6 --> ImportJsValue.res:41:8\n  addValueReference ImportJsValue.res:47:4 --> ImportJsValue.res:47:18\n  addValueReference ImportJsValue.res:47:4 --> ImportJsValue.res:37:2\n  addValueReference ImportJsValue.res:50:4 --> ImportJsValue.res:50:17\n  addValueReference ImportJsValue.res:50:4 --> ImportJsValue.res:40:6\n  addValueReference ImportJsValue.res:64:4 --> ImportJsValue.res:60:0\n  addVariantCaseDeclaration I ImportJsValue.res:67:2 path:+ImportJsValue.variant\n  addVariantCaseDeclaration S ImportJsValue.res:68:2 path:+ImportJsValue.variant\n  Scanning ImportMyBanner.cmt Source:ImportMyBanner.res\n  addValueDeclaration +make ImportMyBanner.res:7:0 path:+ImportMyBanner\n  addValueDeclaration +make ImportMyBanner.res:12:4 path:+ImportMyBanner\n  addRecordLabelDeclaration text ImportMyBanner.res:5:16 path:+ImportMyBanner.message\n  addValueReference ImportMyBanner.res:12:4 --> ImportMyBanner.res:7:0\n  Scanning InnerModuleTypes.cmt Source:InnerModuleTypes.res\n  addVariantCaseDeclaration Foo InnerModuleTypes.res:2:11 path:+InnerModuleTypes.I.t\n  Scanning InnerModuleTypes.cmti Source:InnerModuleTypes.resi\n  addVariantCaseDeclaration Foo InnerModuleTypes.resi:2:11 path:InnerModuleTypes.I.t\n  extendTypeDependencies InnerModuleTypes.res:2:11 --> InnerModuleTypes.resi:2:11\n  extendTypeDependencies InnerModuleTypes.resi:2:11 --> InnerModuleTypes.res:2:11\n  addTypeReference InnerModuleTypes.res:2:11 --> InnerModuleTypes.resi:2:11\n  addTypeReference InnerModuleTypes.resi:2:11 --> InnerModuleTypes.res:2:11\n  Scanning JSResource.cmt Source:JSResource.res\n  Scanning JsxV4.cmt Source:JsxV4.res\n  addValueDeclaration +make JsxV4.res:4:23 path:+JsxV4.C\n  addValueReference JsxV4.res:4:36 --> React.res:3:0\n  addValueReference JsxV4.res:7:9 --> JsxV4.res:4:23\n  Scanning LetPrivate.cmt Source:LetPrivate.res\n  addValueDeclaration +y LetPrivate.res:7:4 path:+LetPrivate\n  addValueDeclaration +x LetPrivate.res:3:6 path:+LetPrivate.local_1\n  addValueReference LetPrivate.res:7:4 --> LetPrivate.res:3:6\n  Scanning ModuleAliases.cmt Source:ModuleAliases.res\n  addValueDeclaration +testNested ModuleAliases.res:22:4 path:+ModuleAliases\n  addValueDeclaration +testInner ModuleAliases.res:25:4 path:+ModuleAliases\n  addValueDeclaration +testInner2 ModuleAliases.res:28:4 path:+ModuleAliases\n  addRecordLabelDeclaration inner ModuleAliases.res:3:19 path:+ModuleAliases.Outer.Inner.innerT\n  addRecordLabelDeclaration nested ModuleAliases.res:11:16 path:+ModuleAliases.Outer2.Inner2.InnerNested.t\n  addValueReference ModuleAliases.res:22:4 --> ModuleAliases.res:22:18\n  addValueReference ModuleAliases.res:25:4 --> ModuleAliases.res:25:17\n  addValueReference ModuleAliases.res:28:4 --> ModuleAliases.res:28:18\n  Scanning ModuleAliases2.cmt Source:ModuleAliases2.res\n  addValueDeclaration +q ModuleAliases2.res:21:4 path:+ModuleAliases2\n  addRecordLabelDeclaration x ModuleAliases2.res:3:2 path:+ModuleAliases2.record\n  addRecordLabelDeclaration y ModuleAliases2.res:4:2 path:+ModuleAliases2.record\n  addRecordLabelDeclaration outer ModuleAliases2.res:9:16 path:+ModuleAliases2.Outer.outer\n  addRecordLabelDeclaration inner ModuleAliases2.res:13:18 path:+ModuleAliases2.Outer.Inner.inner\n  Scanning ModuleExceptionBug.cmt Source:ModuleExceptionBug.res\n  addValueDeclaration +customDouble ModuleExceptionBug.res:2:6 path:+ModuleExceptionBug.Dep\n  addValueDeclaration +ddjdj ModuleExceptionBug.res:7:4 path:+ModuleExceptionBug\n  addValueReference ModuleExceptionBug.res:2:6 --> ModuleExceptionBug.res:2:21\n  addExceptionDeclaration MyOtherException ModuleExceptionBug.res:5:0 path:+ModuleExceptionBug\n  addValueReference ModuleExceptionBug.res:8:7 --> ModuleExceptionBug.res:7:4\n  Scanning NestedModules.cmt Source:NestedModules.res\n  addValueDeclaration +notNested NestedModules.res:2:4 path:+NestedModules\n  addValueDeclaration +theAnswer NestedModules.res:6:6 path:+NestedModules.Universe\n  addValueDeclaration +notExported NestedModules.res:8:6 path:+NestedModules.Universe\n  addValueDeclaration +x NestedModules.res:14:8 path:+NestedModules.Universe.Nested2\n  addValueDeclaration +nested2Value NestedModules.res:17:8 path:+NestedModules.Universe.Nested2\n  addValueDeclaration +y NestedModules.res:19:8 path:+NestedModules.Universe.Nested2\n  addValueDeclaration +x NestedModules.res:25:10 path:+NestedModules.Universe.Nested2.Nested3\n  addValueDeclaration +y NestedModules.res:26:10 path:+NestedModules.Universe.Nested2.Nested3\n  addValueDeclaration +z NestedModules.res:27:10 path:+NestedModules.Universe.Nested2.Nested3\n  addValueDeclaration +w NestedModules.res:28:10 path:+NestedModules.Universe.Nested2.Nested3\n  addValueDeclaration +nested3Value NestedModules.res:34:10 path:+NestedModules.Universe.Nested2.Nested3\n  addValueDeclaration +nested3Function NestedModules.res:37:10 path:+NestedModules.Universe.Nested2.Nested3\n  addValueDeclaration +nested2Function NestedModules.res:41:8 path:+NestedModules.Universe.Nested2\n  addValueDeclaration +someString NestedModules.res:50:6 path:+NestedModules.Universe\n  addValueReference NestedModules.res:37:10 --> NestedModules.res:37:29\n  addValueReference NestedModules.res:41:8 --> NestedModules.res:41:27\n  addVariantCaseDeclaration A NestedModules.res:46:4 path:+NestedModules.Universe.variant\n  addVariantCaseDeclaration B NestedModules.res:47:4 path:+NestedModules.Universe.variant\n  Scanning NestedModulesInSignature.cmt Source:NestedModulesInSignature.res\n  addValueDeclaration +theAnswer NestedModulesInSignature.res:2:6 path:+NestedModulesInSignature.Universe\n  addValueReference NestedModulesInSignature.resi:2:2 --> NestedModulesInSignature.res:2:6\n  Scanning NestedModulesInSignature.cmti Source:NestedModulesInSignature.resi\n  addValueDeclaration +theAnswer NestedModulesInSignature.resi:2:2 path:NestedModulesInSignature.Universe\n  Scanning Newsyntax.cmt Source:Newsyntax.res\n  addValueDeclaration +x Newsyntax.res:1:4 path:+Newsyntax\n  addValueDeclaration +y Newsyntax.res:3:4 path:+Newsyntax\n  addRecordLabelDeclaration xxx Newsyntax.res:6:2 path:+Newsyntax.record\n  addRecordLabelDeclaration yyy Newsyntax.res:7:2 path:+Newsyntax.record\n  addVariantCaseDeclaration A Newsyntax.res:10:15 path:+Newsyntax.variant\n  addVariantCaseDeclaration B Newsyntax.res:10:17 path:+Newsyntax.variant\n  addVariantCaseDeclaration C Newsyntax.res:10:25 path:+Newsyntax.variant\n  addRecordLabelDeclaration xx Newsyntax.res:12:16 path:+Newsyntax.record2\n  addRecordLabelDeclaration yy Newsyntax.res:12:23 path:+Newsyntax.record2\n  Scanning Newton.cmt Source:Newton.res\n  addValueDeclaration +- Newton.res:1:4 path:+Newton\n  addValueDeclaration ++ Newton.res:2:4 path:+Newton\n  addValueDeclaration +* Newton.res:3:4 path:+Newton\n  addValueDeclaration +/ Newton.res:4:4 path:+Newton\n  addValueDeclaration +newton Newton.res:6:4 path:+Newton\n  addValueDeclaration +f Newton.res:25:4 path:+Newton\n  addValueDeclaration +fPrimed Newton.res:27:4 path:+Newton\n  addValueDeclaration +result Newton.res:29:4 path:+Newton\n  addValueDeclaration +current Newton.res:7:6 path:+Newton\n  addValueReference Newton.res:7:6 --> Newton.res:6:28\n  addValueDeclaration +iterateMore Newton.res:8:6 path:+Newton\n  addValueDeclaration +delta Newton.res:9:8 path:+Newton\n  addValueReference Newton.res:9:8 --> Newton.res:8:21\n  addValueReference Newton.res:9:8 --> Newton.res:8:31\n  addValueReference Newton.res:9:8 --> Newton.res:1:4\n  addValueReference Newton.res:9:8 --> Newton.res:8:31\n  addValueReference Newton.res:9:8 --> Newton.res:8:21\n  addValueReference Newton.res:9:8 --> Newton.res:1:4\n  addValueReference Newton.res:9:8 --> Newton.res:8:31\n  addValueReference Newton.res:9:8 --> Newton.res:8:21\n  addValueReference Newton.res:8:6 --> Newton.res:9:8\n  addValueReference Newton.res:8:6 --> Newton.res:6:38\n  addValueReference Newton.res:8:6 --> Newton.res:7:6\n  addValueReference Newton.res:8:6 --> Newton.res:8:31\n  addValueDeclaration +loop Newton.res:14:10 path:+Newton\n  addValueDeclaration +previous Newton.res:15:8 path:+Newton\n  addValueReference Newton.res:15:8 --> Newton.res:7:6\n  addValueDeclaration +next Newton.res:16:8 path:+Newton\n  addValueReference Newton.res:16:8 --> Newton.res:15:8\n  addValueReference Newton.res:16:8 --> Newton.res:15:8\n  addValueReference Newton.res:16:8 --> Newton.res:6:14\n  addValueReference Newton.res:16:8 --> Newton.res:15:8\n  addValueReference Newton.res:16:8 --> Newton.res:6:18\n  addValueReference Newton.res:16:8 --> Newton.res:4:4\n  addValueReference Newton.res:16:8 --> Newton.res:1:4\n  addValueReference Newton.res:14:10 --> Newton.res:7:6\n  addValueReference Newton.res:14:10 --> Newton.res:14:10\n  addValueReference Newton.res:14:10 --> Newton.res:15:8\n  addValueReference Newton.res:14:10 --> Newton.res:16:8\n  addValueReference Newton.res:14:10 --> Newton.res:8:6\n  addValueReference Newton.res:6:4 --> Newton.res:14:10\n  addValueReference Newton.res:25:4 --> Newton.res:25:8\n  addValueReference Newton.res:25:4 --> Newton.res:25:8\n  addValueReference Newton.res:25:4 --> Newton.res:3:4\n  addValueReference Newton.res:25:4 --> Newton.res:25:8\n  addValueReference Newton.res:25:4 --> Newton.res:3:4\n  addValueReference Newton.res:25:4 --> Newton.res:25:8\n  addValueReference Newton.res:25:4 --> Newton.res:3:4\n  addValueReference Newton.res:25:4 --> Newton.res:25:8\n  addValueReference Newton.res:25:4 --> Newton.res:3:4\n  addValueReference Newton.res:25:4 --> Newton.res:1:4\n  addValueReference Newton.res:25:4 --> Newton.res:25:8\n  addValueReference Newton.res:25:4 --> Newton.res:3:4\n  addValueReference Newton.res:25:4 --> Newton.res:1:4\n  addValueReference Newton.res:25:4 --> Newton.res:2:4\n  addValueReference Newton.res:27:4 --> Newton.res:27:14\n  addValueReference Newton.res:27:4 --> Newton.res:3:4\n  addValueReference Newton.res:27:4 --> Newton.res:27:14\n  addValueReference Newton.res:27:4 --> Newton.res:3:4\n  addValueReference Newton.res:27:4 --> Newton.res:27:14\n  addValueReference Newton.res:27:4 --> Newton.res:3:4\n  addValueReference Newton.res:27:4 --> Newton.res:1:4\n  addValueReference Newton.res:27:4 --> Newton.res:1:4\n  addValueReference Newton.res:29:4 --> Newton.res:25:4\n  addValueReference Newton.res:29:4 --> Newton.res:27:4\n  addValueReference Newton.res:29:4 --> Newton.res:6:4\n  addValueReference Newton.res:31:8 --> Newton.res:29:4\n  addValueReference Newton.res:31:18 --> Newton.res:29:4\n  addValueReference Newton.res:31:16 --> Newton.res:25:4\n  Scanning Opaque.cmt Source:Opaque.res\n  addValueDeclaration +noConversion Opaque.res:5:4 path:+Opaque\n  addValueDeclaration +testConvertNestedRecordFromOtherFile Opaque.res:11:4 path:+Opaque\n  addVariantCaseDeclaration A Opaque.res:2:25 path:+Opaque.opaqueFromRecords\n  addValueReference Opaque.res:5:4 --> Opaque.res:5:20\n  addValueReference Opaque.res:11:4 --> Opaque.res:11:44\n  Scanning OptArg.cmt Source:OptArg.res\n  addValueDeclaration +foo OptArg.res:1:4 path:+OptArg\n  addValueDeclaration +bar OptArg.res:3:4 path:+OptArg\n  addValueDeclaration +threeArgs OptArg.res:9:4 path:+OptArg\n  addValueDeclaration +twoArgs OptArg.res:14:4 path:+OptArg\n  addValueDeclaration +oneArg OptArg.res:18:4 path:+OptArg\n  addValueDeclaration +wrapOneArg OptArg.res:20:4 path:+OptArg\n  addValueDeclaration +fourArgs OptArg.res:24:4 path:+OptArg\n  addValueDeclaration +wrapfourArgs OptArg.res:26:4 path:+OptArg\n  addValueReference OptArg.res:1:4 --> OptArg.res:1:14\n  addValueReference OptArg.res:1:4 --> OptArg.res:1:20\n  addValueReference OptArg.res:1:4 --> OptArg.res:1:26\n  addValueReference OptArg.res:1:4 --> OptArg.res:1:11\n  addValueReference OptArg.res:1:4 --> OptArg.res:1:17\n  addValueReference OptArg.res:1:4 --> OptArg.res:1:23\n  addValueReference OptArg.res:1:4 --> OptArg.res:1:29\n  addValueReference OptArg.res:3:4 --> OptArg.res:3:17\n  addValueReference OptArg.res:3:4 --> OptArg.res:3:27\n  DeadOptionalArgs.addReferences foo called with optional argNames:x argNamesMaybe: OptArg.res:5:7\n  addValueReference OptArg.res:5:7 --> OptArg.res:1:4\n  DeadOptionalArgs.addReferences bar called with optional argNames: argNamesMaybe: OptArg.res:7:7\n  addValueReference OptArg.res:7:7 --> OptArg.res:3:4\n  addValueReference OptArg.res:9:4 --> OptArg.res:9:20\n  addValueReference OptArg.res:9:4 --> OptArg.res:9:26\n  addValueReference OptArg.res:9:4 --> OptArg.res:9:32\n  addValueReference OptArg.res:9:4 --> OptArg.res:9:17\n  addValueReference OptArg.res:9:4 --> OptArg.res:9:23\n  addValueReference OptArg.res:9:4 --> OptArg.res:9:29\n  addValueReference OptArg.res:9:4 --> OptArg.res:9:35\n  DeadOptionalArgs.addReferences threeArgs called with optional argNames:c, a argNamesMaybe: OptArg.res:11:7\n  addValueReference OptArg.res:11:7 --> OptArg.res:9:4\n  DeadOptionalArgs.addReferences threeArgs called with optional argNames:a argNamesMaybe: OptArg.res:12:7\n  addValueReference OptArg.res:12:7 --> OptArg.res:9:4\n  addValueReference OptArg.res:14:4 --> OptArg.res:14:18\n  addValueReference OptArg.res:14:4 --> OptArg.res:14:24\n  addValueReference OptArg.res:14:4 --> OptArg.res:14:15\n  addValueReference OptArg.res:14:4 --> OptArg.res:14:21\n  addValueReference OptArg.res:14:4 --> OptArg.res:14:27\n  DeadOptionalArgs.addReferences twoArgs called with optional argNames: argNamesMaybe: OptArg.res:16:12\n  addValueReference OptArg.res:16:12 --> OptArg.res:14:4\n  addValueReference OptArg.res:18:4 --> OptArg.res:18:17\n  addValueReference OptArg.res:18:4 --> OptArg.res:18:14\n  addValueReference OptArg.res:18:4 --> OptArg.res:18:24\n  DeadOptionalArgs.addReferences oneArg called with optional argNames:a argNamesMaybe:a OptArg.res:20:30\n  addValueReference OptArg.res:20:4 --> OptArg.res:20:18\n  addValueReference OptArg.res:20:4 --> OptArg.res:20:24\n  addValueReference OptArg.res:20:4 --> OptArg.res:18:4\n  DeadOptionalArgs.addReferences wrapOneArg called with optional argNames:a argNamesMaybe: OptArg.res:22:7\n  addValueReference OptArg.res:22:7 --> OptArg.res:20:4\n  addValueReference OptArg.res:24:4 --> OptArg.res:24:19\n  addValueReference OptArg.res:24:4 --> OptArg.res:24:25\n  addValueReference OptArg.res:24:4 --> OptArg.res:24:31\n  addValueReference OptArg.res:24:4 --> OptArg.res:24:37\n  addValueReference OptArg.res:24:4 --> OptArg.res:24:16\n  addValueReference OptArg.res:24:4 --> OptArg.res:24:22\n  addValueReference OptArg.res:24:4 --> OptArg.res:24:28\n  addValueReference OptArg.res:24:4 --> OptArg.res:24:34\n  addValueReference OptArg.res:24:4 --> OptArg.res:24:40\n  DeadOptionalArgs.addReferences fourArgs called with optional argNames:c, b, a argNamesMaybe:c, b, a OptArg.res:26:44\n  addValueReference OptArg.res:26:4 --> OptArg.res:26:20\n  addValueReference OptArg.res:26:4 --> OptArg.res:26:26\n  addValueReference OptArg.res:26:4 --> OptArg.res:26:32\n  addValueReference OptArg.res:26:4 --> OptArg.res:26:38\n  addValueReference OptArg.res:26:4 --> OptArg.res:24:4\n  DeadOptionalArgs.addReferences wrapfourArgs called with optional argNames:c, a argNamesMaybe: OptArg.res:28:7\n  addValueReference OptArg.res:28:7 --> OptArg.res:26:4\n  DeadOptionalArgs.addReferences wrapfourArgs called with optional argNames:c, b argNamesMaybe: OptArg.res:29:7\n  addValueReference OptArg.res:29:7 --> OptArg.res:26:4\n  addValueReference OptArg.resi:1:0 --> OptArg.res:1:4\n  OptionalArgs.addFunctionReference OptArg.resi:1:0 OptArg.res:1:4\n  addValueReference OptArg.resi:2:0 --> OptArg.res:3:4\n  OptionalArgs.addFunctionReference OptArg.resi:2:0 OptArg.res:3:4\n  Scanning OptArg.cmti Source:OptArg.resi\n  addValueDeclaration +foo OptArg.resi:1:0 path:OptArg\n  addValueDeclaration +bar OptArg.resi:2:0 path:OptArg\n  Scanning Records.cmt Source:Records.res\n  addValueDeclaration +origin Records.res:11:4 path:+Records\n  addValueDeclaration +computeArea Records.res:14:4 path:+Records\n  addValueDeclaration +coord2d Records.res:20:4 path:+Records\n  addValueDeclaration +getOpt Records.res:36:4 path:+Records\n  addValueDeclaration +findAddress Records.res:39:4 path:+Records\n  addValueDeclaration +someBusiness Records.res:43:4 path:+Records\n  addValueDeclaration +findAllAddresses Records.res:46:4 path:+Records\n  addValueDeclaration +getPayload Records.res:65:4 path:+Records\n  addValueDeclaration +getPayloadRecord Records.res:74:4 path:+Records\n  addValueDeclaration +recordValue Records.res:77:4 path:+Records\n  addValueDeclaration +payloadValue Records.res:80:4 path:+Records\n  addValueDeclaration +getPayloadRecordPlusOne Records.res:83:4 path:+Records\n  addValueDeclaration +findAddress2 Records.res:96:4 path:+Records\n  addValueDeclaration +someBusiness2 Records.res:100:4 path:+Records\n  addValueDeclaration +computeArea3 Records.res:107:4 path:+Records\n  addValueDeclaration +computeArea4 Records.res:111:4 path:+Records\n  addValueDeclaration +testMyRec Records.res:127:4 path:+Records\n  addValueDeclaration +testMyRec2 Records.res:130:4 path:+Records\n  addValueDeclaration +testMyObj Records.res:133:4 path:+Records\n  addValueDeclaration +testMyObj2 Records.res:136:4 path:+Records\n  addValueDeclaration +testMyRecBsAs Records.res:145:4 path:+Records\n  addValueDeclaration +testMyRecBsAs2 Records.res:148:4 path:+Records\n  addRecordLabelDeclaration x Records.res:5:2 path:+Records.coord\n  addRecordLabelDeclaration y Records.res:6:2 path:+Records.coord\n  addRecordLabelDeclaration z Records.res:7:2 path:+Records.coord\n  addValueReference Records.res:14:4 --> Records.res:14:20\n  addValueReference Records.res:14:4 --> Records.res:14:23\n  addValueReference Records.res:14:4 --> Records.res:14:26\n  addValueReference Records.res:14:4 --> Records.res:16:31\n  addTypeReference Records.res:14:19 --> Records.res:5:2\n  addTypeReference Records.res:14:19 --> Records.res:6:2\n  addTypeReference Records.res:14:19 --> Records.res:7:2\n  addValueReference Records.res:20:4 --> Records.res:20:15\n  addValueReference Records.res:20:4 --> Records.res:20:18\n  addRecordLabelDeclaration name Records.res:24:2 path:+Records.person\n  addRecordLabelDeclaration age Records.res:25:2 path:+Records.person\n  addRecordLabelDeclaration address Records.res:26:2 path:+Records.person\n  addRecordLabelDeclaration name Records.res:31:2 path:+Records.business\n  addRecordLabelDeclaration owner Records.res:32:2 path:+Records.business\n  addRecordLabelDeclaration address Records.res:33:2 path:+Records.business\n  addValueReference Records.res:36:4 --> Records.res:36:14\n  addValueReference Records.res:36:4 --> Records.res:36:19\n  addValueReference Records.res:36:4 --> Records.res:36:28\n  addTypeReference Records.res:40:2 --> Records.res:33:2\n  addValueReference Records.res:39:4 --> Records.res:39:19\n  addValueReference Records.res:39:4 --> Records.res:40:35\n  addValueReference Records.res:39:4 --> Records.res:36:4\n  addValueReference Records.res:46:4 --> Records.res:46:24\n  addTypeReference Records.res:50:6 --> Records.res:33:2\n  addValueReference Records.res:46:4 --> Records.res:48:14\n  addValueReference Records.res:46:4 --> Records.res:50:39\n  addValueReference Records.res:46:4 --> Records.res:36:4\n  addTypeReference Records.res:51:6 --> Records.res:32:2\n  addValueReference Records.res:46:4 --> Records.res:48:14\n  addTypeReference Records.res:51:42 --> Records.res:26:2\n  addValueReference Records.res:46:4 --> Records.res:51:37\n  addValueReference Records.res:46:4 --> Records.res:51:68\n  addValueReference Records.res:46:4 --> Records.res:36:4\n  addValueReference Records.res:46:4 --> Records.res:36:4\n  addRecordLabelDeclaration num Records.res:60:2 path:+Records.payload\n  addRecordLabelDeclaration payload Records.res:61:2 path:+Records.payload\n  addValueReference Records.res:65:4 --> Records.res:65:19\n  addTypeReference Records.res:65:18 --> Records.res:61:2\n  addRecordLabelDeclaration v Records.res:69:2 path:+Records.record\n  addRecordLabelDeclaration w Records.res:70:2 path:+Records.record\n  addValueReference Records.res:74:4 --> Records.res:74:25\n  addTypeReference Records.res:74:24 --> Records.res:61:2\n  addValueReference Records.res:80:4 --> Records.res:77:4\n  addTypeReference Records.res:85:5 --> Records.res:69:2\n  addValueReference Records.res:83:4 --> Records.res:83:32\n  addValueReference Records.res:83:4 --> Records.res:83:32\n  addTypeReference Records.res:83:31 --> Records.res:61:2\n  addRecordLabelDeclaration name Records.res:90:2 path:+Records.business2\n  addRecordLabelDeclaration owner Records.res:91:2 path:+Records.business2\n  addRecordLabelDeclaration address2 Records.res:92:2 path:+Records.business2\n  addTypeReference Records.res:97:2 --> Records.res:92:2\n  addValueReference Records.res:96:4 --> Records.res:96:20\n  addValueReference Records.res:96:4 --> Records.res:97:58\n  addValueReference Records.res:96:4 --> Records.res:36:4\n  addValueReference Records.res:107:4 --> Records.res:107:20\n  addValueReference Records.res:107:4 --> Records.res:107:20\n  addValueReference Records.res:107:4 --> Records.res:107:20\n  addValueReference Records.res:107:4 --> Records.res:108:75\n  addValueReference Records.res:111:4 --> Records.res:111:20\n  addValueReference Records.res:111:4 --> Records.res:111:20\n  addValueReference Records.res:111:4 --> Records.res:111:20\n  addValueReference Records.res:111:4 --> Records.res:112:53\n  addRecordLabelDeclaration type_ Records.res:119:2 path:+Records.myRec\n  addTypeReference Records.res:127:30 --> Records.res:119:2\n  addValueReference Records.res:127:4 --> Records.res:127:17\n  addValueReference Records.res:130:4 --> Records.res:130:18\n  addValueReference Records.res:133:4 --> Records.res:133:17\n  addValueReference Records.res:136:4 --> Records.res:136:18\n  addRecordLabelDeclaration type_ Records.res:140:2 path:+Records.myRecBsAs\n  addTypeReference Records.res:145:38 --> Records.res:140:2\n  addValueReference Records.res:145:4 --> Records.res:145:21\n  addValueReference Records.res:148:4 --> Records.res:148:22\n  Scanning References.cmt Source:References.res\n  addValueDeclaration +create References.res:4:4 path:+References\n  addValueDeclaration +access References.res:7:4 path:+References\n  addValueDeclaration +update References.res:10:4 path:+References\n  addValueDeclaration +get References.res:17:2 path:+References.R\n  addValueDeclaration +make References.res:18:2 path:+References.R\n  addValueDeclaration +set References.res:19:2 path:+References.R\n  addValueDeclaration +get References.res:31:4 path:+References\n  addValueDeclaration +make References.res:34:4 path:+References\n  addValueDeclaration +set References.res:37:4 path:+References\n  addValueDeclaration +destroysRefIdentity References.res:43:4 path:+References\n  addValueDeclaration +preserveRefIdentity References.res:47:4 path:+References\n  addValueReference References.res:4:4 --> References.res:4:14\n  addValueReference References.res:7:4 --> References.res:7:13\n  addValueReference References.res:10:4 --> References.res:10:13\n  addValueReference References.res:10:4 --> References.res:10:13\n  addValueDeclaration +get References.res:22:6 path:+References.R\n  addValueReference References.res:22:6 --> References.res:22:12\n  addValueDeclaration +make References.res:23:6 path:+References.R\n  addValueDeclaration +set References.res:24:6 path:+References.R\n  addValueReference References.res:24:6 --> References.res:24:16\n  addValueReference References.res:24:6 --> References.res:24:13\n  addValueReference References.res:31:4 --> References.res:17:2\n  addValueReference References.res:34:4 --> References.res:18:2\n  addValueReference References.res:37:4 --> References.res:19:2\n  addRecordLabelDeclaration x References.res:39:27 path:+References.requiresConversion\n  addValueReference References.res:43:4 --> References.res:43:27\n  addValueReference References.res:47:4 --> References.res:47:27\n  addValueReference References.res:17:2 --> References.res:22:6\n  addValueReference References.res:18:2 --> References.res:23:6\n  addValueReference References.res:19:2 --> References.res:24:6\n  Scanning RepeatedLabel.cmt Source:RepeatedLabel.res\n  addValueDeclaration +userData RepeatedLabel.res:12:4 path:+RepeatedLabel\n  addRecordLabelDeclaration a RepeatedLabel.res:2:2 path:+RepeatedLabel.userData\n  addRecordLabelDeclaration b RepeatedLabel.res:3:2 path:+RepeatedLabel.userData\n  addRecordLabelDeclaration a RepeatedLabel.res:7:2 path:+RepeatedLabel.tabState\n  addRecordLabelDeclaration b RepeatedLabel.res:8:2 path:+RepeatedLabel.tabState\n  addRecordLabelDeclaration f RepeatedLabel.res:9:2 path:+RepeatedLabel.tabState\n  addValueReference RepeatedLabel.res:12:4 --> RepeatedLabel.res:12:17\n  addValueReference RepeatedLabel.res:12:4 --> RepeatedLabel.res:12:20\n  addTypeReference RepeatedLabel.res:12:16 --> RepeatedLabel.res:7:2\n  addTypeReference RepeatedLabel.res:12:16 --> RepeatedLabel.res:8:2\n  addValueReference RepeatedLabel.res:14:7 --> RepeatedLabel.res:12:4\n  Scanning RequireCond.cmt Source:RequireCond.res\n  Scanning Shadow.cmt Source:Shadow.res\n  addValueDeclaration +test Shadow.res:2:4 path:+Shadow\n  addValueDeclaration +test Shadow.res:5:4 path:+Shadow\n  addValueDeclaration +test Shadow.res:11:6 path:+Shadow.M\n  addValueDeclaration +test Shadow.res:9:6 path:+Shadow.M\n  Scanning TestDeadExn.cmt Source:TestDeadExn.res\n  Scanning TestEmitInnerModules.cmt Source:TestEmitInnerModules.res\n  addValueDeclaration +x TestEmitInnerModules.res:3:6 path:+TestEmitInnerModules.Inner\n  addValueDeclaration +y TestEmitInnerModules.res:5:6 path:+TestEmitInnerModules.Inner\n  addValueDeclaration +y TestEmitInnerModules.res:12:10 path:+TestEmitInnerModules.Outer.Medium.Inner\n  Scanning TestFirstClassModules.cmt Source:TestFirstClassModules.res\n  addValueDeclaration +convert TestFirstClassModules.res:2:4 path:+TestFirstClassModules\n  addValueDeclaration +convertInterface TestFirstClassModules.res:5:4 path:+TestFirstClassModules\n  addValueDeclaration +convertRecord TestFirstClassModules.res:8:4 path:+TestFirstClassModules\n  addValueDeclaration +convertFirstClassModuleWithTypeEquations TestFirstClassModules.res:27:4 path:+TestFirstClassModules\n  addValueReference TestFirstClassModules.res:2:4 --> TestFirstClassModules.res:2:15\n  addValueReference TestFirstClassModules.res:5:4 --> TestFirstClassModules.res:5:24\n  addValueReference TestFirstClassModules.res:8:4 --> TestFirstClassModules.res:8:21\n  addValueReference TestFirstClassModules.res:27:4 --> TestFirstClassModules.res:29:2\n  Scanning TestImmutableArray.cmt Source:TestImmutableArray.res\n  addValueDeclaration +testImmutableArrayGet TestImmutableArray.res:2:4 path:+TestImmutableArray\n  addValueDeclaration +testBeltArrayGet TestImmutableArray.res:12:4 path:+TestImmutableArray\n  addValueDeclaration +testBeltArraySet TestImmutableArray.res:17:4 path:+TestImmutableArray\n  addValueReference TestImmutableArray.res:2:4 --> TestImmutableArray.res:2:28\n  addValueReference TestImmutableArray.res:2:4 --> ImmutableArray.resi:6:2\n  addValueReference TestImmutableArray.res:12:4 --> TestImmutableArray.res:12:23\n  addValueReference TestImmutableArray.res:17:4 --> TestImmutableArray.res:17:23\n  Scanning TestImport.cmt Source:TestImport.res\n  addValueDeclaration +innerStuffContents TestImport.res:1:0 path:+TestImport\n  addValueDeclaration +innerStuffContentsAsEmptyObject TestImport.res:7:0 path:+TestImport\n  addValueDeclaration +innerStuffContents TestImport.res:13:4 path:+TestImport\n  addValueDeclaration +valueStartingWithUpperCaseLetter TestImport.res:15:0 path:+TestImport\n  addValueDeclaration +defaultValue TestImport.res:18:0 path:+TestImport\n  addValueDeclaration +make TestImport.res:24:0 path:+TestImport\n  addValueDeclaration +make TestImport.res:27:4 path:+TestImport\n  addValueDeclaration +defaultValue2 TestImport.res:29:0 path:+TestImport\n  addValueReference TestImport.res:13:4 --> TestImport.res:1:0\n  addRecordLabelDeclaration text TestImport.res:22:16 path:+TestImport.message\n  addValueReference TestImport.res:27:4 --> TestImport.res:24:0\n  Scanning TestInnedModuleTypes.cmt Source:TestInnedModuleTypes.res\n  addValueDeclaration +_ TestInnedModuleTypes.res:1:0 path:+TestInnedModuleTypes\n  addTypeReference TestInnedModuleTypes.res:1:8 --> InnerModuleTypes.resi:2:11\n  Scanning TestModuleAliases.cmt Source:TestModuleAliases.res\n  addValueDeclaration +testInner1 TestModuleAliases.res:32:4 path:+TestModuleAliases\n  addValueDeclaration +testInner1Expanded TestModuleAliases.res:35:4 path:+TestModuleAliases\n  addValueDeclaration +testInner2 TestModuleAliases.res:38:4 path:+TestModuleAliases\n  addValueDeclaration +testInner2Expanded TestModuleAliases.res:41:4 path:+TestModuleAliases\n  addValueReference TestModuleAliases.res:32:4 --> TestModuleAliases.res:32:18\n  addValueReference TestModuleAliases.res:35:4 --> TestModuleAliases.res:35:26\n  addValueReference TestModuleAliases.res:38:4 --> TestModuleAliases.res:38:18\n  addValueReference TestModuleAliases.res:41:4 --> TestModuleAliases.res:41:26\n  Scanning TestOptArg.cmt Source:TestOptArg.res\n  addValueDeclaration +foo TestOptArg.res:3:4 path:+TestOptArg\n  addValueDeclaration +bar TestOptArg.res:5:4 path:+TestOptArg\n  addValueDeclaration +notSuppressesOptArgs TestOptArg.res:9:4 path:+TestOptArg\n  addValueDeclaration +liveSuppressesOptArgs TestOptArg.res:14:4 path:+TestOptArg\n  DeadOptionalArgs.addReferences OptArg.bar called with optional argNames:z argNamesMaybe: TestOptArg.res:1:7\n  addValueReference TestOptArg.res:1:7 --> OptArg.resi:2:0\n  addValueReference TestOptArg.res:3:4 --> TestOptArg.res:3:14\n  addValueReference TestOptArg.res:3:4 --> TestOptArg.res:3:11\n  addValueReference TestOptArg.res:3:4 --> TestOptArg.res:3:17\n  DeadOptionalArgs.addReferences foo called with optional argNames:x argNamesMaybe: TestOptArg.res:5:16\n  addValueReference TestOptArg.res:5:4 --> TestOptArg.res:3:4\n  addValueReference TestOptArg.res:7:7 --> TestOptArg.res:5:4\n  addValueReference TestOptArg.res:9:4 --> TestOptArg.res:9:31\n  addValueReference TestOptArg.res:9:4 --> TestOptArg.res:9:37\n  addValueReference TestOptArg.res:9:4 --> TestOptArg.res:9:43\n  addValueReference TestOptArg.res:9:4 --> TestOptArg.res:9:28\n  addValueReference TestOptArg.res:9:4 --> TestOptArg.res:9:34\n  addValueReference TestOptArg.res:9:4 --> TestOptArg.res:9:40\n  addValueReference TestOptArg.res:9:4 --> TestOptArg.res:9:46\n  DeadOptionalArgs.addReferences notSuppressesOptArgs called with optional argNames: argNamesMaybe: TestOptArg.res:11:8\n  addValueReference TestOptArg.res:11:8 --> TestOptArg.res:9:4\n  addValueReference TestOptArg.res:14:4 --> TestOptArg.res:14:32\n  addValueReference TestOptArg.res:14:4 --> TestOptArg.res:14:38\n  addValueReference TestOptArg.res:14:4 --> TestOptArg.res:14:44\n  addValueReference TestOptArg.res:14:4 --> TestOptArg.res:14:29\n  addValueReference TestOptArg.res:14:4 --> TestOptArg.res:14:35\n  addValueReference TestOptArg.res:14:4 --> TestOptArg.res:14:41\n  addValueReference TestOptArg.res:14:4 --> TestOptArg.res:14:47\n  DeadOptionalArgs.addReferences liveSuppressesOptArgs called with optional argNames:x argNamesMaybe: TestOptArg.res:16:8\n  addValueReference TestOptArg.res:16:8 --> TestOptArg.res:14:4\n  Scanning TestPromise.cmt Source:TestPromise.res\n  addValueDeclaration +convert TestPromise.res:14:4 path:+TestPromise\n  addRecordLabelDeclaration x TestPromise.res:6:2 path:+TestPromise.fromPayload\n  addRecordLabelDeclaration s TestPromise.res:7:2 path:+TestPromise.fromPayload\n  addRecordLabelDeclaration result TestPromise.res:11:18 path:+TestPromise.toPayload\n  addValueReference TestPromise.res:14:4 --> TestPromise.res:14:33\n  addTypeReference TestPromise.res:14:32 --> TestPromise.res:7:2\n  Scanning ToSuppress.cmt Source:ToSuppress.res\n  addValueDeclaration +toSuppress ToSuppress.res:1:4 path:+ToSuppress\n  Scanning TransitiveType1.cmt Source:TransitiveType1.res\n  addValueDeclaration +convert TransitiveType1.res:2:4 path:+TransitiveType1\n  addValueDeclaration +convertAlias TransitiveType1.res:5:4 path:+TransitiveType1\n  addValueReference TransitiveType1.res:2:4 --> TransitiveType1.res:2:15\n  addValueReference TransitiveType1.res:5:4 --> TransitiveType1.res:5:20\n  Scanning TransitiveType2.cmt Source:TransitiveType2.res\n  addValueDeclaration +convertT2 TransitiveType2.res:7:4 path:+TransitiveType2\n  addValueReference TransitiveType2.res:7:4 --> TransitiveType2.res:7:17\n  Scanning TransitiveType3.cmt Source:TransitiveType3.res\n  addValueDeclaration +convertT3 TransitiveType3.res:8:4 path:+TransitiveType3\n  addRecordLabelDeclaration i TransitiveType3.res:3:2 path:+TransitiveType3.t3\n  addRecordLabelDeclaration s TransitiveType3.res:4:2 path:+TransitiveType3.t3\n  addValueReference TransitiveType3.res:8:4 --> TransitiveType3.res:8:17\n  Scanning Tuples.cmt Source:Tuples.res\n  addValueDeclaration +testTuple Tuples.res:4:4 path:+Tuples\n  addValueDeclaration +origin Tuples.res:10:4 path:+Tuples\n  addValueDeclaration +computeArea Tuples.res:13:4 path:+Tuples\n  addValueDeclaration +computeAreaWithIdent Tuples.res:19:4 path:+Tuples\n  addValueDeclaration +computeAreaNoConverters Tuples.res:25:4 path:+Tuples\n  addValueDeclaration +coord2d Tuples.res:28:4 path:+Tuples\n  addValueDeclaration +getFirstName Tuples.res:43:4 path:+Tuples\n  addValueDeclaration +marry Tuples.res:46:4 path:+Tuples\n  addValueDeclaration +changeSecondAge Tuples.res:49:4 path:+Tuples\n  addValueReference Tuples.res:4:4 --> Tuples.res:4:18\n  addValueReference Tuples.res:4:4 --> Tuples.res:4:21\n  addValueReference Tuples.res:13:4 --> Tuples.res:13:20\n  addValueReference Tuples.res:13:4 --> Tuples.res:13:23\n  addValueReference Tuples.res:13:4 --> Tuples.res:13:26\n  addValueReference Tuples.res:13:4 --> Tuples.res:15:31\n  addValueReference Tuples.res:19:4 --> Tuples.res:19:29\n  addValueReference Tuples.res:19:4 --> Tuples.res:19:32\n  addValueReference Tuples.res:19:4 --> Tuples.res:19:35\n  addValueReference Tuples.res:19:4 --> Tuples.res:21:31\n  addValueReference Tuples.res:25:4 --> Tuples.res:25:32\n  addValueReference Tuples.res:25:4 --> Tuples.res:25:40\n  addValueReference Tuples.res:28:4 --> Tuples.res:28:15\n  addValueReference Tuples.res:28:4 --> Tuples.res:28:18\n  addRecordLabelDeclaration name Tuples.res:35:2 path:+Tuples.person\n  addRecordLabelDeclaration age Tuples.res:36:2 path:+Tuples.person\n  addTypeReference Tuples.res:43:49 --> Tuples.res:35:2\n  addValueReference Tuples.res:43:4 --> Tuples.res:43:21\n  addValueReference Tuples.res:46:4 --> Tuples.res:46:13\n  addValueReference Tuples.res:46:4 --> Tuples.res:46:20\n  addValueReference Tuples.res:49:4 --> Tuples.res:49:24\n  addTypeReference Tuples.res:49:84 --> Tuples.res:36:2\n  addValueReference Tuples.res:49:4 --> Tuples.res:49:31\n  addValueReference Tuples.res:49:4 --> Tuples.res:49:31\n  Scanning TypeParams1.cmt Source:TypeParams1.res\n  addValueDeclaration +exportSomething TypeParams1.res:4:4 path:+TypeParams1\n  Scanning TypeParams2.cmt Source:TypeParams2.res\n  addValueDeclaration +exportSomething TypeParams2.res:10:4 path:+TypeParams2\n  addRecordLabelDeclaration id TypeParams2.res:2:13 path:+TypeParams2.item\n  Scanning TypeParams3.cmt Source:TypeParams3.res\n  addValueDeclaration +test TypeParams3.res:2:4 path:+TypeParams3\n  addValueDeclaration +test2 TypeParams3.res:5:4 path:+TypeParams3\n  addValueReference TypeParams3.res:2:4 --> TypeParams3.res:2:12\n  addValueReference TypeParams3.res:5:4 --> TypeParams3.res:5:13\n  Scanning Types.cmt Source:Types.res\n  addValueDeclaration +someIntList Types.res:5:4 path:+Types\n  addValueDeclaration +map Types.res:8:4 path:+Types\n  addValueDeclaration +swap Types.res:23:8 path:+Types\n  addValueDeclaration +selfRecursiveConverter Types.res:42:4 path:+Types\n  addValueDeclaration +mutuallyRecursiveConverter Types.res:49:4 path:+Types\n  addValueDeclaration +testFunctionOnOptionsAsArgument Types.res:52:4 path:+Types\n  addValueDeclaration +stringT Types.res:60:4 path:+Types\n  addValueDeclaration +jsStringT Types.res:63:4 path:+Types\n  addValueDeclaration +jsString2T Types.res:66:4 path:+Types\n  addValueDeclaration +jsonStringify Types.res:78:4 path:+Types\n  addValueDeclaration +testConvertNull Types.res:92:4 path:+Types\n  addValueDeclaration +testMarshalFields Types.res:112:4 path:+Types\n  addValueDeclaration +setMatch Types.res:128:4 path:+Types\n  addValueDeclaration +testInstantiateTypeParameter Types.res:138:4 path:+Types\n  addValueDeclaration +currentTime Types.res:147:4 path:+Types\n  addValueDeclaration +i64Const Types.res:156:4 path:+Types\n  addValueDeclaration +optFunction Types.res:159:4 path:+Types\n  addVariantCaseDeclaration A Types.res:12:2 path:+Types.typeWithVars\n  addVariantCaseDeclaration B Types.res:13:2 path:+Types.typeWithVars\n  addValueReference Types.res:23:8 --> Types.res:23:16\n  addValueReference Types.res:23:8 --> Types.res:23:16\n  addValueReference Types.res:23:8 --> Types.res:23:8\n  addValueReference Types.res:23:8 --> Types.res:23:16\n  addValueReference Types.res:23:8 --> Types.res:23:8\n  addValueReference Types.res:23:8 --> Types.res:24:2\n  addRecordLabelDeclaration self Types.res:31:26 path:+Types.selfRecursive\n  addRecordLabelDeclaration b Types.res:34:31 path:+Types.mutuallyRecursiveA\n  addRecordLabelDeclaration a Types.res:35:26 path:+Types.mutuallyRecursiveB\n  addValueReference Types.res:42:4 --> Types.res:42:31\n  addTypeReference Types.res:42:30 --> Types.res:31:26\n  addValueReference Types.res:49:4 --> Types.res:49:35\n  addTypeReference Types.res:49:34 --> Types.res:34:31\n  addValueReference Types.res:52:4 --> Types.res:52:39\n  addValueReference Types.res:52:4 --> Types.res:52:54\n  addVariantCaseDeclaration A Types.res:56:2 path:+Types.opaqueVariant\n  addVariantCaseDeclaration B Types.res:57:2 path:+Types.opaqueVariant\n  addRecordLabelDeclaration i Types.res:87:2 path:+Types.record\n  addRecordLabelDeclaration s Types.res:88:2 path:+Types.record\n  addValueReference Types.res:92:4 --> Types.res:92:23\n  addValueReference Types.res:112:4 --> Types.res:112:39\n  addValueReference Types.res:128:4 --> Types.res:128:16\n  addRecordLabelDeclaration id Types.res:133:19 path:+Types.someRecord\n  addValueReference Types.res:138:4 --> Types.res:138:36\n  addValueDeclaration +x Types.res:166:6 path:+Types.ObjectId\n  Scanning Unboxed.cmt Source:Unboxed.res\n  addValueDeclaration +testV1 Unboxed.res:8:4 path:+Unboxed\n  addValueDeclaration +r2Test Unboxed.res:17:4 path:+Unboxed\n  addVariantCaseDeclaration A Unboxed.res:2:10 path:+Unboxed.v1\n  addVariantCaseDeclaration A Unboxed.res:5:10 path:+Unboxed.v2\n  addValueReference Unboxed.res:8:4 --> Unboxed.res:8:14\n  addRecordLabelDeclaration x Unboxed.res:11:11 path:+Unboxed.r1\n  addRecordLabelDeclaration B.g Unboxed.res:14:13 path:+Unboxed.r2\n  addVariantCaseDeclaration B Unboxed.res:14:10 path:+Unboxed.r2\n  addValueReference Unboxed.res:17:4 --> Unboxed.res:17:14\n  Scanning Uncurried.cmt Source:Uncurried.res\n  addValueDeclaration +uncurried0 Uncurried.res:14:4 path:+Uncurried\n  addValueDeclaration +uncurried1 Uncurried.res:17:4 path:+Uncurried\n  addValueDeclaration +uncurried2 Uncurried.res:20:4 path:+Uncurried\n  addValueDeclaration +uncurried3 Uncurried.res:23:4 path:+Uncurried\n  addValueDeclaration +curried3 Uncurried.res:26:4 path:+Uncurried\n  addValueDeclaration +callback Uncurried.res:29:4 path:+Uncurried\n  addValueDeclaration +callback2 Uncurried.res:35:4 path:+Uncurried\n  addValueDeclaration +callback2U Uncurried.res:38:4 path:+Uncurried\n  addValueDeclaration +sumU Uncurried.res:41:4 path:+Uncurried\n  addValueDeclaration +sumU2 Uncurried.res:44:4 path:+Uncurried\n  addValueDeclaration +sumCurried Uncurried.res:47:4 path:+Uncurried\n  addValueDeclaration +sumLblCurried Uncurried.res:53:4 path:+Uncurried\n  addValueReference Uncurried.res:17:4 --> Uncurried.res:17:20\n  addValueReference Uncurried.res:20:4 --> Uncurried.res:20:20\n  addValueReference Uncurried.res:20:4 --> Uncurried.res:20:23\n  addValueReference Uncurried.res:23:4 --> Uncurried.res:23:20\n  addValueReference Uncurried.res:23:4 --> Uncurried.res:23:23\n  addValueReference Uncurried.res:23:4 --> Uncurried.res:23:26\n  addValueReference Uncurried.res:26:4 --> Uncurried.res:26:16\n  addValueReference Uncurried.res:26:4 --> Uncurried.res:26:19\n  addValueReference Uncurried.res:26:4 --> Uncurried.res:26:22\n  addValueReference Uncurried.res:29:4 --> Uncurried.res:29:15\n  addRecordLabelDeclaration login Uncurried.res:31:13 path:+Uncurried.auth\n  addRecordLabelDeclaration loginU Uncurried.res:32:14 path:+Uncurried.authU\n  addTypeReference Uncurried.res:35:24 --> Uncurried.res:31:13\n  addValueReference Uncurried.res:35:4 --> Uncurried.res:35:16\n  addTypeReference Uncurried.res:38:25 --> Uncurried.res:32:14\n  addValueReference Uncurried.res:38:4 --> Uncurried.res:38:17\n  addValueReference Uncurried.res:41:4 --> Uncurried.res:41:17\n  addValueReference Uncurried.res:41:4 --> Uncurried.res:41:14\n  addValueReference Uncurried.res:41:4 --> Uncurried.res:41:17\n  addValueReference Uncurried.res:44:4 --> Uncurried.res:44:20\n  addValueReference Uncurried.res:44:4 --> Uncurried.res:44:15\n  addValueReference Uncurried.res:44:4 --> Uncurried.res:44:20\n  addValueReference Uncurried.res:47:4 --> Uncurried.res:49:2\n  addValueReference Uncurried.res:47:4 --> Uncurried.res:47:17\n  addValueReference Uncurried.res:47:4 --> Uncurried.res:49:2\n  addValueReference Uncurried.res:47:4 --> Uncurried.res:47:17\n  addValueReference Uncurried.res:53:4 --> Uncurried.res:55:3\n  addValueReference Uncurried.res:53:4 --> Uncurried.res:53:32\n  addValueReference Uncurried.res:53:4 --> Uncurried.res:55:3\n  addValueReference Uncurried.res:53:4 --> Uncurried.res:53:21\n  addValueReference Uncurried.res:53:4 --> Uncurried.res:53:32\n  Scanning Unison.cmt Source:Unison.res\n  addValueDeclaration +group Unison.res:17:4 path:+Unison\n  addValueDeclaration +fits Unison.res:19:8 path:+Unison\n  addValueDeclaration +toString Unison.res:26:8 path:+Unison\n  addVariantCaseDeclaration IfNeed Unison.res:4:2 path:+Unison.break\n  addVariantCaseDeclaration Never Unison.res:5:2 path:+Unison.break\n  addVariantCaseDeclaration Always Unison.res:6:2 path:+Unison.break\n  addRecordLabelDeclaration break Unison.res:9:2 path:+Unison.t\n  addRecordLabelDeclaration doc Unison.res:10:2 path:+Unison.t\n  addVariantCaseDeclaration Empty Unison.res:14:2 path:+Unison.stack\n  addVariantCaseDeclaration Cons Unison.res:15:2 path:+Unison.stack\n  addValueReference Unison.res:17:4 --> Unison.res:17:20\n  addTypeReference Unison.res:17:20 --> Unison.res:4:2\n  addValueReference Unison.res:17:4 --> Unison.res:17:13\n  addValueReference Unison.res:17:4 --> Unison.res:17:28\n  addValueReference Unison.res:19:8 --> Unison.res:19:16\n  addValueReference Unison.res:19:8 --> Unison.res:19:16\n  addValueReference Unison.res:19:8 --> Unison.res:23:10\n  addValueReference Unison.res:19:8 --> Unison.res:23:16\n  addValueReference Unison.res:19:8 --> Unison.res:19:8\n  addTypeReference Unison.res:23:9 --> Unison.res:10:2\n  addValueReference Unison.res:19:8 --> Unison.res:19:19\n  addValueReference Unison.res:26:8 --> Unison.res:26:20\n  addValueReference Unison.res:26:8 --> Unison.res:28:23\n  addValueReference Unison.res:26:8 --> Unison.res:19:8\n  addValueReference Unison.res:26:8 --> Unison.res:28:23\n  addValueReference Unison.res:26:8 --> Unison.res:26:20\n  addValueReference Unison.res:26:8 --> Unison.res:26:8\n  addValueReference Unison.res:26:8 --> Unison.res:28:17\n  addValueReference Unison.res:26:8 --> Unison.res:28:23\n  addValueReference Unison.res:26:8 --> Unison.res:26:20\n  addValueReference Unison.res:26:8 --> Unison.res:26:8\n  addValueReference Unison.res:26:8 --> Unison.res:28:17\n  addValueReference Unison.res:26:8 --> Unison.res:28:23\n  addValueReference Unison.res:26:8 --> Unison.res:26:20\n  addValueReference Unison.res:26:8 --> Unison.res:26:8\n  addValueReference Unison.res:26:8 --> Unison.res:28:10\n  addTypeReference Unison.res:28:9 --> Unison.res:9:2\n  addTypeReference Unison.res:28:9 --> Unison.res:10:2\n  addValueReference Unison.res:26:8 --> Unison.res:26:28\n  addTypeReference Unison.res:37:20 --> Unison.res:14:2\n  addValueReference Unison.res:37:0 --> Unison.res:26:8\n  addTypeReference Unison.res:38:20 --> Unison.res:15:2\n  DeadOptionalArgs.addReferences group called with optional argNames:break argNamesMaybe: Unison.res:38:25\n  addTypeReference Unison.res:38:38 --> Unison.res:5:2\n  addValueReference Unison.res:38:25 --> Unison.res:17:4\n  addTypeReference Unison.res:38:53 --> Unison.res:14:2\n  addValueReference Unison.res:38:0 --> Unison.res:26:8\n  addTypeReference Unison.res:39:20 --> Unison.res:15:2\n  DeadOptionalArgs.addReferences group called with optional argNames:break argNamesMaybe: Unison.res:39:25\n  addTypeReference Unison.res:39:38 --> Unison.res:6:2\n  addValueReference Unison.res:39:25 --> Unison.res:17:4\n  addTypeReference Unison.res:39:52 --> Unison.res:14:2\n  addValueReference Unison.res:39:0 --> Unison.res:26:8\n  Scanning UseImportJsValue.cmt Source:UseImportJsValue.res\n  addValueDeclaration +useGetProp UseImportJsValue.res:2:4 path:+UseImportJsValue\n  addValueDeclaration +useTypeImportedInOtherModule UseImportJsValue.res:5:4 path:+UseImportJsValue\n  addValueReference UseImportJsValue.res:2:4 --> UseImportJsValue.res:2:18\n  addValueReference UseImportJsValue.res:2:4 --> ImportJsValue.res:37:2\n  addValueReference UseImportJsValue.res:5:4 --> UseImportJsValue.res:5:36\n  Scanning Variants.cmt Source:Variants.res\n  addValueDeclaration +isWeekend Variants.res:13:4 path:+Variants\n  addValueDeclaration +monday Variants.res:21:4 path:+Variants\n  addValueDeclaration +saturday Variants.res:23:4 path:+Variants\n  addValueDeclaration +sunday Variants.res:25:4 path:+Variants\n  addValueDeclaration +onlySunday Variants.res:28:4 path:+Variants\n  addValueDeclaration +swap Variants.res:31:4 path:+Variants\n  addValueDeclaration +testConvert Variants.res:45:4 path:+Variants\n  addValueDeclaration +fortytwoOK Variants.res:48:4 path:+Variants\n  addValueDeclaration +fortytwoBAD Variants.res:52:4 path:+Variants\n  addValueDeclaration +testConvert2 Variants.res:64:4 path:+Variants\n  addValueDeclaration +testConvert3 Variants.res:76:4 path:+Variants\n  addValueDeclaration +testConvert2to3 Variants.res:80:4 path:+Variants\n  addValueDeclaration +id1 Variants.res:89:4 path:+Variants\n  addValueDeclaration +id2 Variants.res:92:4 path:+Variants\n  addValueDeclaration +polyWithOpt Variants.res:98:4 path:+Variants\n  addValueDeclaration +restResult1 Variants.res:112:4 path:+Variants\n  addValueDeclaration +restResult2 Variants.res:115:4 path:+Variants\n  addValueDeclaration +restResult3 Variants.res:118:4 path:+Variants\n  addValueReference Variants.res:13:4 --> Variants.res:13:17\n  addValueReference Variants.res:31:4 --> Variants.res:31:11\n  addValueReference Variants.res:45:4 --> Variants.res:45:19\n  addValueReference Variants.res:64:4 --> Variants.res:64:20\n  addValueReference Variants.res:76:4 --> Variants.res:76:20\n  addValueReference Variants.res:80:4 --> Variants.res:80:23\n  addValueReference Variants.res:89:4 --> Variants.res:89:11\n  addValueReference Variants.res:92:4 --> Variants.res:92:11\n  addVariantCaseDeclaration Type Variants.res:95:13 path:+Variants.type_\n  addValueReference Variants.res:98:4 --> Variants.res:98:18\n  addValueReference Variants.res:98:4 --> Variants.res:98:18\n  addValueReference Variants.res:98:4 --> Variants.res:98:18\n  addVariantCaseDeclaration Ok Variants.res:102:2 path:+Variants.result1\n  addVariantCaseDeclaration Error Variants.res:103:2 path:+Variants.result1\n  addValueReference Variants.res:112:4 --> Variants.res:112:19\n  addValueReference Variants.res:115:4 --> Variants.res:115:19\n  addValueReference Variants.res:118:4 --> Variants.res:118:19\n  Scanning VariantsWithPayload.cmt Source:VariantsWithPayload.res\n  addValueDeclaration +testWithPayload VariantsWithPayload.res:16:4 path:+VariantsWithPayload\n  addValueDeclaration +printVariantWithPayload VariantsWithPayload.res:19:4 path:+VariantsWithPayload\n  addValueDeclaration +testManyPayloads VariantsWithPayload.res:37:4 path:+VariantsWithPayload\n  addValueDeclaration +printManyPayloads VariantsWithPayload.res:40:4 path:+VariantsWithPayload\n  addValueDeclaration +testSimpleVariant VariantsWithPayload.res:54:4 path:+VariantsWithPayload\n  addValueDeclaration +testVariantWithPayloads VariantsWithPayload.res:65:4 path:+VariantsWithPayload\n  addValueDeclaration +printVariantWithPayloads VariantsWithPayload.res:68:4 path:+VariantsWithPayload\n  addValueDeclaration +testVariant1Int VariantsWithPayload.res:93:4 path:+VariantsWithPayload\n  addValueDeclaration +testVariant1Object VariantsWithPayload.res:99:4 path:+VariantsWithPayload\n  addRecordLabelDeclaration x VariantsWithPayload.res:2:2 path:+VariantsWithPayload.payload\n  addRecordLabelDeclaration y VariantsWithPayload.res:3:2 path:+VariantsWithPayload.payload\n  addValueReference VariantsWithPayload.res:16:4 --> VariantsWithPayload.res:16:23\n  addTypeReference VariantsWithPayload.res:26:57 --> VariantsWithPayload.res:2:2\n  addValueReference VariantsWithPayload.res:19:4 --> VariantsWithPayload.res:26:7\n  addTypeReference VariantsWithPayload.res:26:74 --> VariantsWithPayload.res:3:2\n  addValueReference VariantsWithPayload.res:19:4 --> VariantsWithPayload.res:26:7\n  addValueReference VariantsWithPayload.res:19:4 --> VariantsWithPayload.res:19:31\n  addValueReference VariantsWithPayload.res:37:4 --> VariantsWithPayload.res:37:24\n  addValueReference VariantsWithPayload.res:40:4 --> VariantsWithPayload.res:42:9\n  addValueReference VariantsWithPayload.res:40:4 --> VariantsWithPayload.res:43:9\n  addValueReference VariantsWithPayload.res:40:4 --> VariantsWithPayload.res:43:13\n  addTypeReference VariantsWithPayload.res:44:55 --> VariantsWithPayload.res:2:2\n  addValueReference VariantsWithPayload.res:40:4 --> VariantsWithPayload.res:44:11\n  addTypeReference VariantsWithPayload.res:44:72 --> VariantsWithPayload.res:3:2\n  addValueReference VariantsWithPayload.res:40:4 --> VariantsWithPayload.res:44:11\n  addValueReference VariantsWithPayload.res:40:4 --> VariantsWithPayload.res:40:25\n  addVariantCaseDeclaration A VariantsWithPayload.res:49:2 path:+VariantsWithPayload.simpleVariant\n  addVariantCaseDeclaration B VariantsWithPayload.res:50:2 path:+VariantsWithPayload.simpleVariant\n  addVariantCaseDeclaration C VariantsWithPayload.res:51:2 path:+VariantsWithPayload.simpleVariant\n  addValueReference VariantsWithPayload.res:54:4 --> VariantsWithPayload.res:54:25\n  addVariantCaseDeclaration A VariantsWithPayload.res:58:2 path:+VariantsWithPayload.variantWithPayloads\n  addVariantCaseDeclaration B VariantsWithPayload.res:59:2 path:+VariantsWithPayload.variantWithPayloads\n  addVariantCaseDeclaration C VariantsWithPayload.res:60:2 path:+VariantsWithPayload.variantWithPayloads\n  addVariantCaseDeclaration D VariantsWithPayload.res:61:2 path:+VariantsWithPayload.variantWithPayloads\n  addVariantCaseDeclaration E VariantsWithPayload.res:62:2 path:+VariantsWithPayload.variantWithPayloads\n  addValueReference VariantsWithPayload.res:65:4 --> VariantsWithPayload.res:65:31\n  addValueReference VariantsWithPayload.res:68:4 --> VariantsWithPayload.res:71:6\n  addValueReference VariantsWithPayload.res:68:4 --> VariantsWithPayload.res:72:6\n  addValueReference VariantsWithPayload.res:68:4 --> VariantsWithPayload.res:72:9\n  addValueReference VariantsWithPayload.res:68:4 --> VariantsWithPayload.res:77:7\n  addValueReference VariantsWithPayload.res:68:4 --> VariantsWithPayload.res:77:10\n  addValueReference VariantsWithPayload.res:68:4 --> VariantsWithPayload.res:82:6\n  addValueReference VariantsWithPayload.res:68:4 --> VariantsWithPayload.res:82:9\n  addValueReference VariantsWithPayload.res:68:4 --> VariantsWithPayload.res:82:12\n  addValueReference VariantsWithPayload.res:68:4 --> VariantsWithPayload.res:68:31\n  addVariantCaseDeclaration R VariantsWithPayload.res:90:19 path:+VariantsWithPayload.variant1Int\n  addValueReference VariantsWithPayload.res:93:4 --> VariantsWithPayload.res:93:23\n  addVariantCaseDeclaration R VariantsWithPayload.res:96:22 path:+VariantsWithPayload.variant1Object\n  addValueReference VariantsWithPayload.res:99:4 --> VariantsWithPayload.res:99:26\n  addValueReference TestDeadExn.res:1:7 --> DeadExn.res:1:0\n  \nFile References\n\n  AutoAnnotate.res -->> \n  BootloaderResource.res -->> \n  BucklescriptAnnotations.res -->> \n  ComponentAsProp.res -->> React.res, ReactDOMRe.res\n  CreateErrorHandler1.res -->> ErrorHandler.resi\n  CreateErrorHandler2.res -->> \n  DeadCodeImplementation.res -->> \n  DeadCodeInterface.res -->> \n  DeadExn.res -->> \n  DeadExn.resi -->> \n  DeadRT.res -->> \n  DeadRT.resi -->> \n  DeadTest.res -->> React.res, BootloaderResource.res, DeadValueTest.resi, DynamicallyLoadedComponent.res, ImmutableArray.resi, JSResource.res\n  DeadTestBlacklist.res -->> \n  DeadTestWithInterface.res -->> \n  DeadTypeTest.res -->> \n  DeadTypeTest.resi -->> DeadTypeTest.res\n  DeadValueTest.res -->> \n  DeadValueTest.resi -->> DeadValueTest.res\n  Docstrings.res -->> \n  DynamicallyLoadedComponent.res -->> React.res\n  EmptyArray.res -->> React.res, ReactDOMRe.res\n  ErrorHandler.res -->> \n  ErrorHandler.resi -->> ErrorHandler.res\n  EverythingLiveHere.res -->> \n  FC.res -->> \n  FirstClassModules.res -->> \n  FirstClassModulesInterface.res -->> \n  FirstClassModulesInterface.resi -->> FirstClassModulesInterface.res\n  Hooks.res -->> React.res, ReactDOM.res, ReactDOMRe.res, ImportHookDefault.res, ImportHooks.res\n  IgnoreInterface.res -->> \n  IgnoreInterface.resi -->> \n  ImmutableArray.res -->> \n  ImmutableArray.resi -->> ImmutableArray.res\n  ImportHookDefault.res -->> \n  ImportHooks.res -->> \n  ImportIndex.res -->> \n  ImportJsValue.res -->> \n  ImportMyBanner.res -->> \n  InnerModuleTypes.res -->> \n  InnerModuleTypes.resi -->> \n  JSResource.res -->> \n  JsxV4.res -->> React.res\n  LetPrivate.res -->> \n  ModuleAliases.res -->> \n  ModuleAliases2.res -->> \n  ModuleExceptionBug.res -->> \n  NestedModules.res -->> \n  NestedModulesInSignature.res -->> \n  NestedModulesInSignature.resi -->> NestedModulesInSignature.res\n  Newsyntax.res -->> \n  Newton.res -->> \n  Opaque.res -->> \n  OptArg.res -->> \n  OptArg.resi -->> OptArg.res\n  Records.res -->> \n  References.res -->> \n  RepeatedLabel.res -->> \n  RequireCond.res -->> \n  Shadow.res -->> \n  TestDeadExn.res -->> DeadExn.res\n  TestEmitInnerModules.res -->> \n  TestFirstClassModules.res -->> \n  TestImmutableArray.res -->> ImmutableArray.resi\n  TestImport.res -->> \n  TestInnedModuleTypes.res -->> \n  TestModuleAliases.res -->> \n  TestOptArg.res -->> OptArg.resi\n  TestPromise.res -->> \n  ToSuppress.res -->> \n  TransitiveType1.res -->> \n  TransitiveType2.res -->> \n  TransitiveType3.res -->> \n  Tuples.res -->> \n  TypeParams1.res -->> \n  TypeParams2.res -->> \n  TypeParams3.res -->> \n  Types.res -->> \n  Unboxed.res -->> \n  Uncurried.res -->> \n  Unison.res -->> \n  UseImportJsValue.res -->> ImportJsValue.res\n  Variants.res -->> \n  VariantsWithPayload.res -->> \n  Dead VariantCase +AutoAnnotate.annotatedVariant.R4: 0 references () [0]\n  Dead VariantCase +AutoAnnotate.annotatedVariant.R2: 0 references () [0]\n  Dead RecordLabel +AutoAnnotate.r4.r4: 0 references () [0]\n  Dead RecordLabel +AutoAnnotate.r3.r3: 0 references () [0]\n  Dead RecordLabel +AutoAnnotate.r2.r2: 0 references () [0]\n  Dead RecordLabel +AutoAnnotate.record.variant: 0 references () [0]\n  Dead VariantCase +AutoAnnotate.variant.R: 0 references () [0]\n  Dead Value +BucklescriptAnnotations.+bar: 0 references () [1]\n  Dead Value +BucklescriptAnnotations.+f: 0 references () [0]\n  Live Value +ComponentAsProp.+make: 0 references () [0]\n  Live Value +CreateErrorHandler1.Error1.+notification: 1 references (ErrorHandler.resi:3:2) [0]\n  Live Value +CreateErrorHandler2.Error2.+notification: 1 references (ErrorHandler.resi:3:2) [0]\n  Live Value +DeadCodeImplementation.M.+x: 1 references (DeadCodeInterface.res:2:2) [0]\n  Dead Value +DeadRT.+emitModuleAccessPath: 0 references () [0]\n  Live VariantCase +DeadRT.moduleAccessPath.Kaboom: 1 references (DeadRT.res:11:16) [0]\n  Live VariantCase DeadRT.moduleAccessPath.Root: 1 references (DeadTest.res:106:16) [1]\n  Live VariantCase +DeadRT.moduleAccessPath.Root: 1 references (DeadRT.resi:2:2) [0]\n  Live VariantCase DeadRT.moduleAccessPath.Kaboom: 1 references (DeadRT.res:3:2) [0]\n  Dead RecordLabel +DeadTest.inlineRecord3.IR3.b: 0 references () [0]\n  Dead RecordLabel +DeadTest.inlineRecord3.IR3.a: 0 references () [0]\n  Dead VariantCase +DeadTest.inlineRecord3.IR3: 0 references () [0]\n  Dead RecordLabel +DeadTest.inlineRecord2.IR2.b: 0 references () [0]\n  Dead RecordLabel +DeadTest.inlineRecord2.IR2.a: 0 references () [0]\n  Dead VariantCase +DeadTest.inlineRecord2.IR2: 0 references () [0]\n  Dead Value +DeadTest.+_: 0 references () [0]\n  Live Value +DeadTest.+ira: 1 references (DeadTest.res:187:27) [0]\n  Live RecordLabel +DeadTest.inlineRecord.IR.e: 0 references () [0]\n  Dead RecordLabel +DeadTest.inlineRecord.IR.d: 0 references () [0]\n  Live RecordLabel +DeadTest.inlineRecord.IR.c: 1 references (DeadTest.res:187:7) [0]\n  Live RecordLabel +DeadTest.inlineRecord.IR.b: 1 references (DeadTest.res:187:35) [0]\n  Dead RecordLabel +DeadTest.inlineRecord.IR.a: 0 references () [0]\n  Live VariantCase +DeadTest.inlineRecord.IR: 1 references (DeadTest.res:187:20) [0]\n  Dead Value +DeadTest.+_: 0 references () [0]\n  Live Value +DeadTest.+deadIncorrect: 1 references (DeadTest.res:180:8) [0]\n  Dead RecordLabel +DeadTest.rc.a: 0 references () [0]\n  Dead Value +DeadTest.+funWithInnerVars: 0 references () [1]\n  Dead Value +DeadTest.+y: 0 references () [0]\n  Dead Value +DeadTest.+x: 0 references () [0]\n  Live VariantCase +DeadTest.WithInclude.t.A: 1 references (DeadTest.res:166:7) [1]\n  Live VariantCase +DeadTest.WithInclude.t.A: 1 references (DeadTest.res:158:11) [0]\n  Live Value +DeadTest.GloobLive.+globallyLive3: 0 references () [0]\n  Live Value +DeadTest.GloobLive.+globallyLive2: 0 references () [0]\n  Live Value +DeadTest.GloobLive.+globallyLive1: 0 references () [0]\n  Dead Value +DeadTest.+stringLengthNoSideEffects: 0 references () [0]\n  Dead Value +DeadTest.+theSideEffectIsLogging: 0 references () [0]\n  Live Value +DeadTest.+make: 1 references (DeadTest.res:143:16) [0]\n  Dead Value +DeadTest.+deadRef: 0 references () [0]\n  Dead Value +DeadTest.+minute: 0 references () [0]\n  Dead Value +DeadTest.+second: 0 references () [0]\n  Dead Value +DeadTest.+a3: 0 references () [0]\n  Dead Value +DeadTest.+a2: 0 references () [0]\n  Dead Value +DeadTest.+a1: 0 references () [0]\n  Dead Value +DeadTest.+zzz: 0 references () [0]\n  Dead Value +DeadTest.LazyDynamicallyLoadedComponent2.+make: 0 references () [0]\n  Dead Value +DeadTest.LazyDynamicallyLoadedComponent2.+makeProps: 0 references () [0]\n  Dead Value +DeadTest.LazyDynamicallyLoadedComponent2.+reasonResource: 0 references () [0]\n  Dead Value +DeadTest.+withDefaultValue: 0 references () [0]\n  Dead Value +DeadTest.+bar: 0 references () [0]\n  Dead Value +DeadTest.+foo: 0 references () [1]\n  Dead Value +DeadTest.+cb: 0 references () [0]\n  Dead Value +DeadTest.+cb: 0 references () [0]\n  Dead Value +DeadTest.+recWithCallback: 0 references () [0]\n  Dead Value +DeadTest.+rec2: 0 references () [0]\n  Dead Value +DeadTest.+rec1: 0 references () [0]\n  Dead Value +DeadTest.+split_map: 0 references () [0]\n  Dead Value +DeadTest.+unusedRec: 0 references () [0]\n  Dead Value +DeadTest.MM.+valueOnlyInImplementation: 0 references () [0]\n  Live Value +DeadTest.MM.+x: 1 references (DeadTest.res:69:9) [1]\n  Live Value +DeadTest.MM.+x: 1 references (DeadTest.res:60:2) [0]\n  Dead Value +DeadTest.MM.+y: 0 references () [1]\n  Live Value +DeadTest.MM.+y: 1 references (DeadTest.res:64:6) [0]\n  Dead Value +DeadTest.UnderscoreInside.+_: 0 references () [0]\n  Dead Value +DeadTest.+_: 0 references () [0]\n  Dead Value +DeadTest.+_: 0 references () [0]\n  Live RecordLabel +DeadTest.record.yyy: 1 references (DeadTest.res:53:9) [0]\n  Live RecordLabel +DeadTest.record.xxx: 1 references (DeadTest.res:52:13) [0]\n  Dead Value +DeadTest.+_: 0 references () [0]\n  Dead Value +DeadTest.+_: 0 references () [0]\n  Live Value +DeadTest.VariantUsedOnlyInImplementation.+a: 1 references (DeadTest.res:42:17) [1]\n  Live Value +DeadTest.VariantUsedOnlyInImplementation.+a: 1 references (DeadTest.res:36:2) [0]\n  Live VariantCase +DeadTest.VariantUsedOnlyInImplementation.t.A: 1 references (DeadTest.res:39:10) [0]\n  Live VariantCase +DeadTest.VariantUsedOnlyInImplementation.t.A: 1 references (DeadTest.res:38:11) [0]\n  Dead Value +DeadTest.M.+thisSignatureItemIsDead: 0 references () [1]\n  Dead Value +DeadTest.M.+thisSignatureItemIsDead: 0 references () [0]\n  Dead Value +DeadTest.Inner.+thisIsAlsoMarkedDead: 0 references () [0]\n  Live Value +DeadTest.+thisIsMarkedLive: 0 references () [0]\n  Live Value +DeadTest.+thisIsKeptAlive: 1 references (DeadTest.res:20:4) [0]\n  Dead Value +DeadTest.+thisIsMarkedDead: 0 references () [0]\n  Live Value +DeadTest.+thisIsUsedTwice: 2 references (DeadTest.res:11:7, DeadTest.res:12:7) [0]\n  Live Value +DeadTest.+thisIsUsedOnce: 1 references (DeadTest.res:8:7) [0]\n  Live Value +DeadTest.+fortyTwoButExported: 0 references () [0]\n  Dead Value +DeadTest.+fortytwo: 0 references () [0]\n  Dead Value +DeadTestBlacklist.+x: 0 references () [0]\n  Dead Value +DeadTestWithInterface.Ext_buffer.+x: 0 references () [1]\n  Dead Value +DeadTestWithInterface.Ext_buffer.+x: 0 references () [0]\n  Dead VariantCase DeadTypeTest.deadType.InNeither: 0 references () [0]\n  Live VariantCase +DeadTypeTest.deadType.InBoth: 1 references (DeadTypeTest.res:13:8) [1]\n  Live VariantCase DeadTypeTest.deadType.InBoth: 2 references (DeadTest.res:45:8, DeadTypeTest.res:9:2) [0]\n  Live VariantCase DeadTypeTest.deadType.OnlyInInterface: 1 references (DeadTest.res:44:8) [0]\n  Live VariantCase +DeadTypeTest.deadType.OnlyInImplementation: 1 references (DeadTypeTest.res:12:8) [1]\n  Live VariantCase DeadTypeTest.deadType.OnlyInImplementation: 1 references (DeadTypeTest.res:7:2) [0]\n  Dead Value DeadTypeTest.+a: 0 references () [0]\n  Dead VariantCase DeadTypeTest.t.B: 0 references () [0]\n  Live VariantCase +DeadTypeTest.t.A: 1 references (DeadTypeTest.res:4:8) [1]\n  Live VariantCase DeadTypeTest.t.A: 1 references (DeadTypeTest.res:2:2) [0]\n  Live Value +Docstrings.+unitArgWithConversionU: 0 references () [0]\n  Live Value +Docstrings.+unitArgWithConversion: 0 references () [0]\n  Dead VariantCase +Docstrings.t.B: 0 references () [0]\n  Live VariantCase +Docstrings.t.A: 2 references (Docstrings.res:64:34, Docstrings.res:67:39) [0]\n  Live Value +Docstrings.+unitArgWithoutConversionU: 0 references () [0]\n  Live Value +Docstrings.+unitArgWithoutConversion: 0 references () [0]\n  Live Value +Docstrings.+grouped: 0 references () [0]\n  Live Value +Docstrings.+unnamed2U: 0 references () [0]\n  Live Value +Docstrings.+unnamed2: 0 references () [0]\n  Live Value +Docstrings.+unnamed1U: 0 references () [0]\n  Live Value +Docstrings.+unnamed1: 0 references () [0]\n  Live Value +Docstrings.+useParamU: 0 references () [0]\n  Live Value +Docstrings.+useParam: 0 references () [0]\n  Live Value +Docstrings.+treeU: 0 references () [0]\n  Live Value +Docstrings.+twoU: 0 references () [0]\n  Live Value +Docstrings.+oneU: 0 references () [0]\n  Live Value +Docstrings.+tree: 0 references () [0]\n  Live Value +Docstrings.+two: 0 references () [0]\n  Live Value +Docstrings.+one: 0 references () [0]\n  Live Value +Docstrings.+signMessage: 0 references () [0]\n  Live Value +Docstrings.+flat: 0 references () [0]\n  Live Value +EmptyArray.Z.+make: 1 references (EmptyArray.res:10:9) [0]\n  Dead Value +EverythingLiveHere.+z: 0 references () [0]\n  Dead Value +EverythingLiveHere.+y: 0 references () [0]\n  Dead Value +EverythingLiveHere.+x: 0 references () [0]\n  Live Value +FC.+foo: 1 references (FC.res:11:7) [0]\n  Live Value +FirstClassModules.+someFunctorAsFunction: 0 references () [0]\n  Live Value +FirstClassModules.SomeFunctor.+ww: 1 references (FirstClassModules.res:57:2) [0]\n  Live Value +FirstClassModules.+testConvert: 0 references () [0]\n  Live Value +FirstClassModules.+firstClassModule: 0 references () [0]\n  Live Value +FirstClassModules.M.+x: 1 references (FirstClassModules.res:2:2) [0]\n  Live Value +FirstClassModules.M.Z.+u: 1 references (FirstClassModules.res:37:4) [0]\n  Live Value +FirstClassModules.M.InnerModule3.+k3: 1 references (FirstClassModules.res:14:4) [0]\n  Live Value +FirstClassModules.M.InnerModule2.+k: 1 references (FirstClassModules.res:10:4) [0]\n  Live Value +FirstClassModules.M.+y: 1 references (FirstClassModules.res:20:2) [0]\n  Dead Value FirstClassModulesInterface.+r: 0 references () [0]\n  Dead RecordLabel FirstClassModulesInterface.record.y: 0 references () [0]\n  Dead RecordLabel FirstClassModulesInterface.record.x: 0 references () [0]\n  Live Value +Hooks.+aComponentWithChildren: 0 references () [0]\n  Live Value +Hooks.RenderPropRequiresConversion.+car: 1 references (Hooks.res:109:30) [0]\n  Live Value +Hooks.RenderPropRequiresConversion.+make: 0 references () [0]\n  Live Value +Hooks.+functionReturningReactElement: 0 references () [0]\n  Live Value +Hooks.+polymorphicComponent: 0 references () [0]\n  Live Value +Hooks.+input: 0 references () [0]\n  Live RecordLabel +Hooks.r.x: 1 references (Hooks.res:85:87) [0]\n  Live Value +Hooks.+testForwardRef: 0 references () [0]\n  Dead Value +Hooks.+_: 0 references () [0]\n  Live Value +Hooks.+makeWithRef: 1 references (Hooks.res:80:4) [0]\n  Live Value +Hooks.+componentWithRenamedArgs: 0 references () [0]\n  Live Value +Hooks.+functionWithRenamedArgs: 0 references () [0]\n  Live Value +Hooks.NoProps.+make: 0 references () [0]\n  Live Value +Hooks.Inner.Inner2.+anotherComponent: 0 references () [0]\n  Live Value +Hooks.Inner.Inner2.+make: 0 references () [0]\n  Live Value +Hooks.Inner.+anotherComponent: 0 references () [0]\n  Live Value +Hooks.Inner.+make: 0 references () [0]\n  Live Value +Hooks.+anotherComponent: 0 references () [0]\n  Live Value +Hooks.+default: 0 references () [0]\n  Live Value +Hooks.+make: 1 references (Hooks.res:25:4) [0]\n  Live RecordLabel +Hooks.vehicle.name: 13 references (Hooks.res:10:29, Hooks.res:30:41, Hooks.res:35:66, Hooks.res:38:78, Hooks.res:42:68, Hooks.res:46:45, Hooks.res:60:2, Hooks.res:60:14, Hooks.res:66:15, Hooks.res:66:27, Hooks.res:74:73, Hooks.res:100:58, Hooks.res:115:41) [0]\n  Live Value +ImportIndex.+make: 0 references () [0]\n  Dead Value +ImportMyBanner.+make: 0 references () [0]\n  Live Value +ImportMyBanner.+make: 0 references () [0]\n  Dead RecordLabel +ImportMyBanner.message.text: 0 references () [0]\n  Live VariantCase InnerModuleTypes.I.t.Foo: 1 references (TestInnedModuleTypes.res:1:8) [1]\n  Live VariantCase +InnerModuleTypes.I.t.Foo: 1 references (InnerModuleTypes.resi:2:11) [0]\n  Live Value +JsxV4.C.+make: 1 references (JsxV4.res:7:9) [0]\n  Live Value +LetPrivate.+y: 0 references () [0]\n  Live Value +LetPrivate.local_1.+x: 1 references (LetPrivate.res:7:4) [0]\n  Live Value +ModuleAliases.+testInner2: 0 references () [0]\n  Live Value +ModuleAliases.+testInner: 0 references () [0]\n  Live Value +ModuleAliases.+testNested: 0 references () [0]\n  Dead RecordLabel +ModuleAliases.Outer2.Inner2.InnerNested.t.nested: 0 references () [0]\n  Dead RecordLabel +ModuleAliases.Outer.Inner.innerT.inner: 0 references () [0]\n  Dead Value +ModuleAliases2.+q: 0 references () [0]\n  Dead RecordLabel +ModuleAliases2.Outer.Inner.inner.inner: 0 references () [0]\n  Dead RecordLabel +ModuleAliases2.Outer.outer.outer: 0 references () [0]\n  Dead RecordLabel +ModuleAliases2.record.y: 0 references () [0]\n  Dead RecordLabel +ModuleAliases2.record.x: 0 references () [0]\n  Live Value +ModuleExceptionBug.+ddjdj: 1 references (ModuleExceptionBug.res:8:7) [0]\n  Dead Exception +ModuleExceptionBug.MyOtherException: 0 references () [0]\n  Dead Value +ModuleExceptionBug.Dep.+customDouble: 0 references () [0]\n  Live Value +NestedModules.Universe.+someString: 0 references () [0]\n  Dead VariantCase +NestedModules.Universe.variant.B: 0 references () [0]\n  Dead VariantCase +NestedModules.Universe.variant.A: 0 references () [0]\n  Live Value +NestedModules.Universe.Nested2.+nested2Function: 0 references () [0]\n  Live Value +NestedModules.Universe.Nested2.Nested3.+nested3Function: 0 references () [0]\n  Live Value +NestedModules.Universe.Nested2.Nested3.+nested3Value: 0 references () [0]\n  Dead Value +NestedModules.Universe.Nested2.Nested3.+w: 0 references () [0]\n  Dead Value +NestedModules.Universe.Nested2.Nested3.+z: 0 references () [0]\n  Dead Value +NestedModules.Universe.Nested2.Nested3.+y: 0 references () [0]\n  Dead Value +NestedModules.Universe.Nested2.Nested3.+x: 0 references () [0]\n  Dead Value +NestedModules.Universe.Nested2.+y: 0 references () [0]\n  Live Value +NestedModules.Universe.Nested2.+nested2Value: 0 references () [0]\n  Dead Value +NestedModules.Universe.Nested2.+x: 0 references () [0]\n  Dead Value +NestedModules.Universe.+notExported: 0 references () [0]\n  Live Value +NestedModules.Universe.+theAnswer: 0 references () [0]\n  Live Value +NestedModules.+notNested: 0 references () [0]\n  Live Value NestedModulesInSignature.Universe.+theAnswer: 0 references () [0]\n  Dead RecordLabel +Newsyntax.record2.yy: 0 references () [0]\n  Dead RecordLabel +Newsyntax.record2.xx: 0 references () [0]\n  Dead VariantCase +Newsyntax.variant.C: 0 references () [0]\n  Dead VariantCase +Newsyntax.variant.B: 0 references () [0]\n  Dead VariantCase +Newsyntax.variant.A: 0 references () [0]\n  Dead RecordLabel +Newsyntax.record.yyy: 0 references () [0]\n  Dead RecordLabel +Newsyntax.record.xxx: 0 references () [0]\n  Dead Value +Newsyntax.+y: 0 references () [0]\n  Dead Value +Newsyntax.+x: 0 references () [0]\n  Live Value +Newton.+result: 2 references (Newton.res:31:8, Newton.res:31:18) [0]\n  Live Value +Newton.+fPrimed: 1 references (Newton.res:29:4) [0]\n  Live Value +Newton.+f: 2 references (Newton.res:29:4, Newton.res:31:16) [0]\n  Live Value +Newton.+newton: 1 references (Newton.res:29:4) [2]\n  Live Value +Newton.+loop: 1 references (Newton.res:6:4) [1]\n  Live Value +Newton.+next: 1 references (Newton.res:14:10) [0]\n  Live Value +Newton.+previous: 2 references (Newton.res:14:10, Newton.res:16:8) [0]\n  Live Value +Newton.+iterateMore: 1 references (Newton.res:14:10) [1]\n  Live Value +Newton.+delta: 1 references (Newton.res:8:6) [0]\n  Live Value +Newton.+current: 3 references (Newton.res:8:6, Newton.res:14:10, Newton.res:15:8) [0]\n  Live Value +Newton.+/: 1 references (Newton.res:16:8) [0]\n  Live Value +Newton.+*: 2 references (Newton.res:25:4, Newton.res:27:4) [0]\n  Live Value +Newton.++: 1 references (Newton.res:25:4) [0]\n  Live Value +Newton.+-: 4 references (Newton.res:9:8, Newton.res:16:8, Newton.res:25:4, Newton.res:27:4) [0]\n  Live Value +Opaque.+testConvertNestedRecordFromOtherFile: 0 references () [0]\n  Live Value +Opaque.+noConversion: 0 references () [0]\n  Dead VariantCase +Opaque.opaqueFromRecords.A: 0 references () [0]\n  Live Value +Records.+testMyRecBsAs2: 0 references () [0]\n  Live Value +Records.+testMyRecBsAs: 0 references () [0]\n  Live RecordLabel +Records.myRecBsAs.type_: 1 references (Records.res:145:38) [0]\n  Live Value +Records.+testMyObj2: 0 references () [0]\n  Live Value +Records.+testMyObj: 0 references () [0]\n  Live Value +Records.+testMyRec2: 0 references () [0]\n  Live Value +Records.+testMyRec: 0 references () [0]\n  Live RecordLabel +Records.myRec.type_: 1 references (Records.res:127:30) [0]\n  Live Value +Records.+computeArea4: 0 references () [0]\n  Live Value +Records.+computeArea3: 0 references () [0]\n  Live Value +Records.+someBusiness2: 0 references () [0]\n  Live Value +Records.+findAddress2: 0 references () [0]\n  Live RecordLabel +Records.business2.address2: 1 references (Records.res:97:2) [0]\n  Dead RecordLabel +Records.business2.owner: 0 references () [0]\n  Dead RecordLabel +Records.business2.name: 0 references () [0]\n  Live Value +Records.+getPayloadRecordPlusOne: 0 references () [0]\n  Live Value +Records.+payloadValue: 0 references () [0]\n  Live Value +Records.+recordValue: 1 references (Records.res:80:4) [0]\n  Live Value +Records.+getPayloadRecord: 0 references () [0]\n  Dead RecordLabel +Records.record.w: 0 references () [0]\n  Live RecordLabel +Records.record.v: 1 references (Records.res:85:5) [0]\n  Live Value +Records.+getPayload: 0 references () [0]\n  Live RecordLabel +Records.payload.payload: 3 references (Records.res:65:18, Records.res:74:24, Records.res:83:31) [0]\n  Dead RecordLabel +Records.payload.num: 0 references () [0]\n  Live Value +Records.+findAllAddresses: 0 references () [0]\n  Live Value +Records.+someBusiness: 0 references () [0]\n  Live Value +Records.+findAddress: 0 references () [0]\n  Live Value +Records.+getOpt: 3 references (Records.res:39:4, Records.res:46:4, Records.res:96:4) [0]\n  Live RecordLabel +Records.business.address: 2 references (Records.res:40:2, Records.res:50:6) [0]\n  Live RecordLabel +Records.business.owner: 1 references (Records.res:51:6) [0]\n  Dead RecordLabel +Records.business.name: 0 references () [0]\n  Live RecordLabel +Records.person.address: 1 references (Records.res:51:42) [0]\n  Dead RecordLabel +Records.person.age: 0 references () [0]\n  Dead RecordLabel +Records.person.name: 0 references () [0]\n  Live Value +Records.+coord2d: 0 references () [0]\n  Live Value +Records.+computeArea: 0 references () [0]\n  Live Value +Records.+origin: 0 references () [0]\n  Live RecordLabel +Records.coord.z: 1 references (Records.res:14:19) [0]\n  Live RecordLabel +Records.coord.y: 1 references (Records.res:14:19) [0]\n  Live RecordLabel +Records.coord.x: 1 references (Records.res:14:19) [0]\n  Live Value +References.+preserveRefIdentity: 0 references () [0]\n  Live Value +References.+destroysRefIdentity: 0 references () [0]\n  Dead RecordLabel +References.requiresConversion.x: 0 references () [0]\n  Live Value +References.+set: 0 references () [0]\n  Live Value +References.+make: 0 references () [0]\n  Live Value +References.+get: 0 references () [0]\n  Live Value +References.R.+set: 1 references (References.res:37:4) [1]\n  Live Value +References.R.+set: 1 references (References.res:19:2) [0]\n  Live Value +References.R.+make: 1 references (References.res:34:4) [1]\n  Live Value +References.R.+make: 1 references (References.res:18:2) [0]\n  Live Value +References.R.+get: 1 references (References.res:31:4) [1]\n  Live Value +References.R.+get: 1 references (References.res:17:2) [0]\n  Live Value +References.+update: 0 references () [0]\n  Live Value +References.+access: 0 references () [0]\n  Live Value +References.+create: 0 references () [0]\n  Live Value +RepeatedLabel.+userData: 1 references (RepeatedLabel.res:14:7) [0]\n  Dead RecordLabel +RepeatedLabel.tabState.f: 0 references () [0]\n  Live RecordLabel +RepeatedLabel.tabState.b: 1 references (RepeatedLabel.res:12:16) [0]\n  Live RecordLabel +RepeatedLabel.tabState.a: 1 references (RepeatedLabel.res:12:16) [0]\n  Dead RecordLabel +RepeatedLabel.userData.b: 0 references () [0]\n  Dead RecordLabel +RepeatedLabel.userData.a: 0 references () [0]\n  Dead Value +Shadow.M.+test: 0 references () [0]\n  Live Value +Shadow.M.+test: 0 references () [0]\n  Live Value +Shadow.+test: 0 references () [0]\n  Live Value +Shadow.+test: 0 references () [0]\n  Live Value +TestEmitInnerModules.Outer.Medium.Inner.+y: 0 references () [0]\n  Live Value +TestEmitInnerModules.Inner.+y: 0 references () [0]\n  Live Value +TestEmitInnerModules.Inner.+x: 0 references () [0]\n  Live Value +TestFirstClassModules.+convertFirstClassModuleWithTypeEquations: 0 references () [0]\n  Live Value +TestFirstClassModules.+convertRecord: 0 references () [0]\n  Live Value +TestFirstClassModules.+convertInterface: 0 references () [0]\n  Live Value +TestFirstClassModules.+convert: 0 references () [0]\n  Dead Value +TestImmutableArray.+testBeltArraySet: 0 references () [0]\n  Dead Value +TestImmutableArray.+testBeltArrayGet: 0 references () [0]\n  Live Value +TestImmutableArray.+testImmutableArrayGet: 0 references () [0]\n  Live Value +TestImport.+defaultValue2: 0 references () [0]\n  Dead Value +TestImport.+make: 0 references () [0]\n  Live Value +TestImport.+make: 0 references () [0]\n  Dead RecordLabel +TestImport.message.text: 0 references () [0]\n  Live Value +TestImport.+defaultValue: 0 references () [0]\n  Live Value +TestImport.+valueStartingWithUpperCaseLetter: 0 references () [0]\n  Dead Value +TestImport.+innerStuffContents: 0 references () [0]\n  Live Value +TestImport.+innerStuffContentsAsEmptyObject: 0 references () [0]\n  Live Value +TestImport.+innerStuffContents: 0 references () [0]\n  Dead Value +TestInnedModuleTypes.+_: 0 references () [0]\n  Live Value +TestModuleAliases.+testInner2Expanded: 0 references () [0]\n  Live Value +TestModuleAliases.+testInner2: 0 references () [0]\n  Live Value +TestModuleAliases.+testInner1Expanded: 0 references () [0]\n  Live Value +TestModuleAliases.+testInner1: 0 references () [0]\n  Live Value +TestOptArg.+liveSuppressesOptArgs: 1 references (TestOptArg.res:16:8) [0]\n  Live Value +TestOptArg.+notSuppressesOptArgs: 1 references (TestOptArg.res:11:8) [0]\n  Live Value +TestOptArg.+bar: 1 references (TestOptArg.res:7:7) [0]\n  Live Value +TestOptArg.+foo: 1 references (TestOptArg.res:5:4) [0]\n  Live Value +TestPromise.+convert: 0 references () [0]\n  Dead RecordLabel +TestPromise.toPayload.result: 0 references () [0]\n  Live RecordLabel +TestPromise.fromPayload.s: 1 references (TestPromise.res:14:32) [0]\n  Dead RecordLabel +TestPromise.fromPayload.x: 0 references () [0]\n  Dead Value +ToSuppress.+toSuppress: 0 references () [0]\n  Live Value +TransitiveType1.+convertAlias: 0 references () [0]\n  Live Value +TransitiveType1.+convert: 0 references () [0]\n  Dead Value +TransitiveType2.+convertT2: 0 references () [0]\n  Live Value +TransitiveType3.+convertT3: 0 references () [0]\n  Dead RecordLabel +TransitiveType3.t3.s: 0 references () [0]\n  Dead RecordLabel +TransitiveType3.t3.i: 0 references () [0]\n  Live Value +Tuples.+changeSecondAge: 0 references () [0]\n  Live Value +Tuples.+marry: 0 references () [0]\n  Live Value +Tuples.+getFirstName: 0 references () [0]\n  Live RecordLabel +Tuples.person.age: 1 references (Tuples.res:49:84) [0]\n  Live RecordLabel +Tuples.person.name: 1 references (Tuples.res:43:49) [0]\n  Live Value +Tuples.+coord2d: 0 references () [0]\n  Live Value +Tuples.+computeAreaNoConverters: 0 references () [0]\n  Live Value +Tuples.+computeAreaWithIdent: 0 references () [0]\n  Live Value +Tuples.+computeArea: 0 references () [0]\n  Live Value +Tuples.+origin: 0 references () [0]\n  Live Value +Tuples.+testTuple: 0 references () [0]\n  Dead Value +TypeParams1.+exportSomething: 0 references () [0]\n  Dead Value +TypeParams2.+exportSomething: 0 references () [0]\n  Dead RecordLabel +TypeParams2.item.id: 0 references () [0]\n  Live Value +TypeParams3.+test2: 0 references () [0]\n  Live Value +TypeParams3.+test: 0 references () [0]\n  Dead Value +Types.ObjectId.+x: 0 references () [0]\n  Live Value +Types.+optFunction: 0 references () [0]\n  Live Value +Types.+i64Const: 0 references () [0]\n  Live Value +Types.+currentTime: 0 references () [0]\n  Live Value +Types.+testInstantiateTypeParameter: 0 references () [0]\n  Dead RecordLabel +Types.someRecord.id: 0 references () [0]\n  Live Value +Types.+setMatch: 0 references () [0]\n  Live Value +Types.+testMarshalFields: 0 references () [0]\n  Live Value +Types.+testConvertNull: 0 references () [0]\n  Dead RecordLabel +Types.record.s: 0 references () [0]\n  Dead RecordLabel +Types.record.i: 0 references () [0]\n  Live Value +Types.+jsonStringify: 0 references () [0]\n  Live Value +Types.+jsString2T: 0 references () [0]\n  Live Value +Types.+jsStringT: 0 references () [0]\n  Live Value +Types.+stringT: 0 references () [0]\n  Dead VariantCase +Types.opaqueVariant.B: 0 references () [0]\n  Dead VariantCase +Types.opaqueVariant.A: 0 references () [0]\n  Live Value +Types.+testFunctionOnOptionsAsArgument: 0 references () [0]\n  Live Value +Types.+mutuallyRecursiveConverter: 0 references () [0]\n  Live Value +Types.+selfRecursiveConverter: 0 references () [0]\n  Dead RecordLabel +Types.mutuallyRecursiveB.a: 0 references () [0]\n  Live RecordLabel +Types.mutuallyRecursiveA.b: 1 references (Types.res:49:34) [0]\n  Live RecordLabel +Types.selfRecursive.self: 1 references (Types.res:42:30) [0]\n  Live Value +Types.+swap: 0 references () [0]\n  Dead VariantCase +Types.typeWithVars.B: 0 references () [0]\n  Dead VariantCase +Types.typeWithVars.A: 0 references () [0]\n  Live Value +Types.+map: 0 references () [0]\n  Live Value +Types.+someIntList: 0 references () [0]\n  Live Value +Unboxed.+r2Test: 0 references () [0]\n  Dead RecordLabel +Unboxed.r2.B.g: 0 references () [0]\n  Dead VariantCase +Unboxed.r2.B: 0 references () [0]\n  Dead RecordLabel +Unboxed.r1.x: 0 references () [0]\n  Live Value +Unboxed.+testV1: 0 references () [0]\n  Dead VariantCase +Unboxed.v2.A: 0 references () [0]\n  Dead VariantCase +Unboxed.v1.A: 0 references () [0]\n  Live Value +Uncurried.+sumLblCurried: 0 references () [0]\n  Live Value +Uncurried.+sumCurried: 0 references () [0]\n  Live Value +Uncurried.+sumU2: 0 references () [0]\n  Live Value +Uncurried.+sumU: 0 references () [0]\n  Live Value +Uncurried.+callback2U: 0 references () [0]\n  Live Value +Uncurried.+callback2: 0 references () [0]\n  Live RecordLabel +Uncurried.authU.loginU: 1 references (Uncurried.res:38:25) [0]\n  Live RecordLabel +Uncurried.auth.login: 1 references (Uncurried.res:35:24) [0]\n  Live Value +Uncurried.+callback: 0 references () [0]\n  Live Value +Uncurried.+curried3: 0 references () [0]\n  Live Value +Uncurried.+uncurried3: 0 references () [0]\n  Live Value +Uncurried.+uncurried2: 0 references () [0]\n  Live Value +Uncurried.+uncurried1: 0 references () [0]\n  Live Value +Uncurried.+uncurried0: 0 references () [0]\n  Live Value +Unison.+toString: 3 references (Unison.res:37:0, Unison.res:38:0, Unison.res:39:0) [0]\n  Live Value +Unison.+fits: 1 references (Unison.res:26:8) [0]\n  Live Value +Unison.+group: 2 references (Unison.res:38:25, Unison.res:39:25) [0]\n  Live VariantCase +Unison.stack.Cons: 2 references (Unison.res:38:20, Unison.res:39:20) [0]\n  Live VariantCase +Unison.stack.Empty: 3 references (Unison.res:37:20, Unison.res:38:53, Unison.res:39:52) [0]\n  Live RecordLabel +Unison.t.doc: 2 references (Unison.res:23:9, Unison.res:28:9) [0]\n  Live RecordLabel +Unison.t.break: 1 references (Unison.res:28:9) [0]\n  Live VariantCase +Unison.break.Always: 1 references (Unison.res:39:38) [0]\n  Live VariantCase +Unison.break.Never: 1 references (Unison.res:38:38) [0]\n  Live VariantCase +Unison.break.IfNeed: 1 references (Unison.res:17:20) [0]\n  Live Value +UseImportJsValue.+useTypeImportedInOtherModule: 0 references () [0]\n  Live Value +UseImportJsValue.+useGetProp: 0 references () [0]\n  Live Value +Variants.+restResult3: 0 references () [0]\n  Live Value +Variants.+restResult2: 0 references () [0]\n  Live Value +Variants.+restResult1: 0 references () [0]\n  Dead VariantCase +Variants.result1.Error: 0 references () [0]\n  Dead VariantCase +Variants.result1.Ok: 0 references () [0]\n  Live Value +Variants.+polyWithOpt: 0 references () [0]\n  Dead VariantCase +Variants.type_.Type: 0 references () [0]\n  Live Value +Variants.+id2: 0 references () [0]\n  Live Value +Variants.+id1: 0 references () [0]\n  Live Value +Variants.+testConvert2to3: 0 references () [0]\n  Live Value +Variants.+testConvert3: 0 references () [0]\n  Live Value +Variants.+testConvert2: 0 references () [0]\n  Live Value +Variants.+fortytwoBAD: 0 references () [0]\n  Live Value +Variants.+fortytwoOK: 0 references () [0]\n  Live Value +Variants.+testConvert: 0 references () [0]\n  Live Value +Variants.+swap: 0 references () [0]\n  Live Value +Variants.+onlySunday: 0 references () [0]\n  Live Value +Variants.+sunday: 0 references () [0]\n  Live Value +Variants.+saturday: 0 references () [0]\n  Live Value +Variants.+monday: 0 references () [0]\n  Live Value +Variants.+isWeekend: 0 references () [0]\n  Live Value +VariantsWithPayload.+testVariant1Object: 0 references () [0]\n  Dead VariantCase +VariantsWithPayload.variant1Object.R: 0 references () [0]\n  Live Value +VariantsWithPayload.+testVariant1Int: 0 references () [0]\n  Dead VariantCase +VariantsWithPayload.variant1Int.R: 0 references () [0]\n  Live Value +VariantsWithPayload.+printVariantWithPayloads: 0 references () [0]\n  Live Value +VariantsWithPayload.+testVariantWithPayloads: 0 references () [0]\n  Dead VariantCase +VariantsWithPayload.variantWithPayloads.E: 0 references () [0]\n  Dead VariantCase +VariantsWithPayload.variantWithPayloads.D: 0 references () [0]\n  Dead VariantCase +VariantsWithPayload.variantWithPayloads.C: 0 references () [0]\n  Dead VariantCase +VariantsWithPayload.variantWithPayloads.B: 0 references () [0]\n  Dead VariantCase +VariantsWithPayload.variantWithPayloads.A: 0 references () [0]\n  Live Value +VariantsWithPayload.+testSimpleVariant: 0 references () [0]\n  Dead VariantCase +VariantsWithPayload.simpleVariant.C: 0 references () [0]\n  Dead VariantCase +VariantsWithPayload.simpleVariant.B: 0 references () [0]\n  Dead VariantCase +VariantsWithPayload.simpleVariant.A: 0 references () [0]\n  Live Value +VariantsWithPayload.+printManyPayloads: 0 references () [0]\n  Live Value +VariantsWithPayload.+testManyPayloads: 0 references () [0]\n  Live Value +VariantsWithPayload.+printVariantWithPayload: 0 references () [0]\n  Live Value +VariantsWithPayload.+testWithPayload: 0 references () [0]\n  Live RecordLabel +VariantsWithPayload.payload.y: 2 references (VariantsWithPayload.res:26:74, VariantsWithPayload.res:44:72) [0]\n  Live RecordLabel +VariantsWithPayload.payload.x: 2 references (VariantsWithPayload.res:26:57, VariantsWithPayload.res:44:55) [0]\n  Live Value +DeadExn.+eInside: 1 references (DeadExn.res:12:7) [0]\n  Dead Value +DeadExn.+eToplevel: 0 references () [0]\n  Dead Exception +DeadExn.DeadE: 0 references () [0]\n  Live Exception +DeadExn.Inside.Einside: 1 references (DeadExn.res:10:14) [0]\n  Live Exception +DeadExn.Etoplevel: 1 references (DeadExn.res:8:16) [0]\n  Live RecordLabel +DeadTypeTest.record.z: 0 references () [0]\n  Live RecordLabel +DeadTypeTest.record.y: 0 references () [0]\n  Live RecordLabel +DeadTypeTest.record.x: 0 references () [0]\n  Dead Value +DeadTypeTest.+_: 0 references () [0]\n  Dead Value +DeadTypeTest.+_: 0 references () [0]\n  Dead VariantCase +DeadTypeTest.deadType.InNeither: 0 references () [0]\n  Live VariantCase +DeadTypeTest.deadType.OnlyInInterface: 1 references (DeadTypeTest.resi:8:2) [0]\n  Dead Value +DeadTypeTest.+a: 0 references () [0]\n  Dead VariantCase +DeadTypeTest.t.B: 0 references () [0]\n  Dead Value DeadValueTest.+valueDead: 0 references () [0]\n  Live Value DeadValueTest.+valueAlive: 1 references (DeadTest.res:73:16) [0]\n  Live Value +DynamicallyLoadedComponent.+make: 1 references (DeadTest.res:133:17) [0]\n  Dead Value ErrorHandler.+x: 0 references () [0]\n  Live Value ErrorHandler.Make.+notify: 1 references (CreateErrorHandler1.res:8:0) [0]\n  Dead Value +FirstClassModulesInterface.+r: 0 references () [0]\n  Dead RecordLabel +FirstClassModulesInterface.record.y: 0 references () [0]\n  Dead RecordLabel +FirstClassModulesInterface.record.x: 0 references () [0]\n  Dead Value ImmutableArray.+eq: 0 references () [0]\n  Dead Value ImmutableArray.+eqU: 0 references () [0]\n  Dead Value ImmutableArray.+cmp: 0 references () [0]\n  Dead Value ImmutableArray.+cmpU: 0 references () [0]\n  Dead Value ImmutableArray.+some2: 0 references () [0]\n  Dead Value ImmutableArray.+some2U: 0 references () [0]\n  Dead Value ImmutableArray.+every2: 0 references () [0]\n  Dead Value ImmutableArray.+every2U: 0 references () [0]\n  Dead Value ImmutableArray.+every: 0 references () [0]\n  Dead Value ImmutableArray.+everyU: 0 references () [0]\n  Dead Value ImmutableArray.+some: 0 references () [0]\n  Dead Value ImmutableArray.+someU: 0 references () [0]\n  Dead Value ImmutableArray.+reduceReverse2: 0 references () [0]\n  Dead Value ImmutableArray.+reduceReverse2U: 0 references () [0]\n  Dead Value ImmutableArray.+reduceReverse: 0 references () [0]\n  Dead Value ImmutableArray.+reduceReverseU: 0 references () [0]\n  Dead Value ImmutableArray.+reduce: 0 references () [0]\n  Dead Value ImmutableArray.+reduceU: 0 references () [0]\n  Dead Value ImmutableArray.+partition: 0 references () [0]\n  Dead Value ImmutableArray.+partitionU: 0 references () [0]\n  Dead Value ImmutableArray.+mapWithIndex: 0 references () [0]\n  Dead Value ImmutableArray.+mapWithIndexU: 0 references () [0]\n  Dead Value ImmutableArray.+forEachWithIndex: 0 references () [0]\n  Dead Value ImmutableArray.+forEachWithIndexU: 0 references () [0]\n  Dead Value ImmutableArray.+keepMap: 0 references () [0]\n  Dead Value ImmutableArray.+keepMapU: 0 references () [0]\n  Dead Value ImmutableArray.+keepWithIndex: 0 references () [0]\n  Dead Value ImmutableArray.+keepWithIndexU: 0 references () [0]\n  Dead Value ImmutableArray.+map: 0 references () [0]\n  Dead Value ImmutableArray.+mapU: 0 references () [0]\n  Dead Value ImmutableArray.+forEach: 0 references () [0]\n  Dead Value ImmutableArray.+forEachU: 0 references () [0]\n  Dead Value ImmutableArray.+copy: 0 references () [0]\n  Dead Value ImmutableArray.+sliceToEnd: 0 references () [0]\n  Dead Value ImmutableArray.+slice: 0 references () [0]\n  Dead Value ImmutableArray.+concatMany: 0 references () [0]\n  Dead Value ImmutableArray.+concat: 0 references () [0]\n  Dead Value ImmutableArray.+unzip: 0 references () [0]\n  Dead Value ImmutableArray.+zipBy: 0 references () [0]\n  Dead Value ImmutableArray.+zipByU: 0 references () [0]\n  Dead Value ImmutableArray.+zip: 0 references () [0]\n  Dead Value ImmutableArray.+makeByAndShuffle: 0 references () [0]\n  Dead Value ImmutableArray.+makeByAndShuffleU: 0 references () [0]\n  Dead Value ImmutableArray.+makeBy: 0 references () [0]\n  Dead Value ImmutableArray.+makeByU: 0 references () [0]\n  Dead Value ImmutableArray.+rangeBy: 0 references () [0]\n  Dead Value ImmutableArray.+range: 0 references () [0]\n  Dead Value ImmutableArray.+make: 0 references () [0]\n  Dead Value ImmutableArray.+makeUninitializedUnsafe: 0 references () [0]\n  Dead Value ImmutableArray.+makeUninitialized: 0 references () [0]\n  Dead Value ImmutableArray.+reverse: 0 references () [0]\n  Dead Value ImmutableArray.+shuffle: 0 references () [0]\n  Dead Value ImmutableArray.+getUndefined: 0 references () [0]\n  Dead Value ImmutableArray.+getUnsafe: 0 references () [0]\n  Dead Value ImmutableArray.+getExn: 0 references () [0]\n  Dead Value ImmutableArray.+get: 0 references () [0]\n  Dead Value ImmutableArray.+size: 0 references () [0]\n  Dead Value ImmutableArray.+length: 0 references () [0]\n  Dead Value ImmutableArray.+toArray: 0 references () [0]\n  Live Value ImmutableArray.+fromArray: 1 references (DeadTest.res:1:15) [0]\n  Live Value ImmutableArray.Array.+get: 1 references (TestImmutableArray.res:2:4) [0]\n  Live Value +ImportHookDefault.+make2: 0 references () [0]\n  Live Value +ImportHookDefault.+make: 1 references (Hooks.res:17:5) [0]\n  Dead RecordLabel +ImportHookDefault.person.age: 0 references () [0]\n  Dead RecordLabel +ImportHookDefault.person.name: 0 references () [0]\n  Live Value +ImportHooks.+foo: 0 references () [0]\n  Live Value +ImportHooks.+make: 1 references (Hooks.res:14:5) [0]\n  Dead RecordLabel +ImportHooks.person.age: 0 references () [0]\n  Dead RecordLabel +ImportHooks.person.name: 0 references () [0]\n  Live Value +ImportJsValue.+default: 0 references () [0]\n  Live Value +ImportJsValue.+polymorphic: 0 references () [0]\n  Live Value +ImportJsValue.+convertVariant: 0 references () [0]\n  Dead VariantCase +ImportJsValue.variant.S: 0 references () [0]\n  Dead VariantCase +ImportJsValue.variant.I: 0 references () [0]\n  Live Value +ImportJsValue.+returnedFromHigherOrder: 0 references () [0]\n  Live Value +ImportJsValue.+higherOrder: 1 references (ImportJsValue.res:64:4) [0]\n  Live Value +ImportJsValue.+useColor: 0 references () [0]\n  Live Value +ImportJsValue.+useGetAbs: 0 references () [0]\n  Live Value +ImportJsValue.+useGetProp: 0 references () [0]\n  Live Value +ImportJsValue.AbsoluteValue.+getAbs: 1 references (ImportJsValue.res:50:4) [1]\n  Live Value +ImportJsValue.AbsoluteValue.+getAbs: 1 references (ImportJsValue.res:40:6) [0]\n  Live Value +ImportJsValue.+areaValue: 0 references () [0]\n  Live Value +ImportJsValue.+roundedNumber: 0 references () [0]\n  Live Value +ImportJsValue.+returnMixedArray: 0 references () [0]\n  Live Value +ImportJsValue.+area: 1 references (ImportJsValue.res:30:4) [0]\n  Dead RecordLabel +ImportJsValue.point.y: 0 references () [0]\n  Dead RecordLabel +ImportJsValue.point.x: 0 references () [0]\n  Live Value +ImportJsValue.+round: 1 references (ImportJsValue.res:27:4) [0]\n  Live Value +NestedModulesInSignature.Universe.+theAnswer: 1 references (NestedModulesInSignature.resi:2:2) [0]\n  Live Value OptArg.+bar: 1 references (TestOptArg.res:1:7) [0]\n  Dead Value OptArg.+foo: 0 references () [0]\n  Dead Value +DeadValueTest.+tail: 0 references () [0]\n  Dead Value +DeadValueTest.+subList: 0 references () [0]\n  Dead Value +DeadValueTest.+valueOnlyInImplementation: 0 references () [0]\n  Dead Value +DeadValueTest.+valueDead: 0 references () [0]\n  Live Value +DeadValueTest.+valueAlive: 1 references (DeadValueTest.resi:1:0) [0]\n  Dead Value +ErrorHandler.+x: 0 references () [0]\n  Live Value +ErrorHandler.Make.+notify: 1 references (ErrorHandler.resi:7:2) [0]\n  Dead Value +ImmutableArray.+eq: 0 references () [0]\n  Dead Value +ImmutableArray.+eqU: 0 references () [0]\n  Dead Value +ImmutableArray.+cmp: 0 references () [0]\n  Dead Value +ImmutableArray.+cmpU: 0 references () [0]\n  Dead Value +ImmutableArray.+some2: 0 references () [0]\n  Dead Value +ImmutableArray.+some2U: 0 references () [0]\n  Dead Value +ImmutableArray.+every2: 0 references () [0]\n  Dead Value +ImmutableArray.+every2U: 0 references () [0]\n  Dead Value +ImmutableArray.+every: 0 references () [0]\n  Dead Value +ImmutableArray.+everyU: 0 references () [0]\n  Dead Value +ImmutableArray.+some: 0 references () [0]\n  Dead Value +ImmutableArray.+someU: 0 references () [0]\n  Dead Value +ImmutableArray.+reduceReverse2: 0 references () [0]\n  Dead Value +ImmutableArray.+reduceReverse2U: 0 references () [0]\n  Dead Value +ImmutableArray.+reduceReverse: 0 references () [0]\n  Dead Value +ImmutableArray.+reduceReverseU: 0 references () [0]\n  Dead Value +ImmutableArray.+reduce: 0 references () [0]\n  Dead Value +ImmutableArray.+reduceU: 0 references () [0]\n  Dead Value +ImmutableArray.+partition: 0 references () [0]\n  Dead Value +ImmutableArray.+partitionU: 0 references () [0]\n  Dead Value +ImmutableArray.+mapWithIndex: 0 references () [0]\n  Dead Value +ImmutableArray.+mapWithIndexU: 0 references () [0]\n  Dead Value +ImmutableArray.+forEachWithIndex: 0 references () [0]\n  Dead Value +ImmutableArray.+forEachWithIndexU: 0 references () [0]\n  Dead Value +ImmutableArray.+keepMap: 0 references () [0]\n  Dead Value +ImmutableArray.+keepMapU: 0 references () [0]\n  Dead Value +ImmutableArray.+keepWithIndex: 0 references () [0]\n  Dead Value +ImmutableArray.+keepWithIndexU: 0 references () [0]\n  Dead Value +ImmutableArray.+map: 0 references () [0]\n  Dead Value +ImmutableArray.+mapU: 0 references () [0]\n  Dead Value +ImmutableArray.+forEach: 0 references () [0]\n  Dead Value +ImmutableArray.+forEachU: 0 references () [0]\n  Dead Value +ImmutableArray.+copy: 0 references () [0]\n  Dead Value +ImmutableArray.+sliceToEnd: 0 references () [0]\n  Dead Value +ImmutableArray.+slice: 0 references () [0]\n  Dead Value +ImmutableArray.+concatMany: 0 references () [0]\n  Dead Value +ImmutableArray.+concat: 0 references () [0]\n  Dead Value +ImmutableArray.+unzip: 0 references () [0]\n  Dead Value +ImmutableArray.+zipBy: 0 references () [0]\n  Dead Value +ImmutableArray.+zipByU: 0 references () [0]\n  Dead Value +ImmutableArray.+zip: 0 references () [0]\n  Dead Value +ImmutableArray.+makeByAndShuffle: 0 references () [0]\n  Dead Value +ImmutableArray.+makeByAndShuffleU: 0 references () [0]\n  Dead Value +ImmutableArray.+makeBy: 0 references () [0]\n  Dead Value +ImmutableArray.+makeByU: 0 references () [0]\n  Dead Value +ImmutableArray.+rangeBy: 0 references () [0]\n  Dead Value +ImmutableArray.+range: 0 references () [0]\n  Dead Value +ImmutableArray.+make: 0 references () [0]\n  Dead Value +ImmutableArray.+makeUninitializedUnsafe: 0 references () [0]\n  Dead Value +ImmutableArray.+makeUninitialized: 0 references () [0]\n  Dead Value +ImmutableArray.+reverse: 0 references () [0]\n  Dead Value +ImmutableArray.+shuffle: 0 references () [0]\n  Dead Value +ImmutableArray.+getUndefined: 0 references () [0]\n  Dead Value +ImmutableArray.+getUnsafe: 0 references () [0]\n  Dead Value +ImmutableArray.+getExn: 0 references () [0]\n  Live Value +ImmutableArray.+get: 1 references (ImmutableArray.resi:6:2) [0]\n  Dead Value +ImmutableArray.+size: 0 references () [0]\n  Dead Value +ImmutableArray.+length: 0 references () [0]\n  Dead Value +ImmutableArray.+toArray: 0 references () [0]\n  Live Value +ImmutableArray.+fromArray: 1 references (ImmutableArray.resi:9:0) [0]\n  Live Value +OptArg.+wrapfourArgs: 2 references (OptArg.res:28:7, OptArg.res:29:7) [0]\n  Live Value +OptArg.+fourArgs: 1 references (OptArg.res:26:4) [0]\n  Live Value +OptArg.+wrapOneArg: 1 references (OptArg.res:22:7) [0]\n  Live Value +OptArg.+oneArg: 1 references (OptArg.res:20:4) [0]\n  Live Value +OptArg.+twoArgs: 1 references (OptArg.res:16:12) [0]\n  Live Value +OptArg.+threeArgs: 2 references (OptArg.res:11:7, OptArg.res:12:7) [0]\n  Live Value +OptArg.+bar: 2 references (OptArg.res:7:7, OptArg.resi:2:0) [0]\n  Live Value +OptArg.+foo: 1 references (OptArg.res:5:7) [0]\n\n  Incorrect Dead Annotation\n  DeadTest.res:178:1-22\n  deadIncorrect  is annotated @dead but is live\n\n  Warning Unused Argument\n  TestOptArg.res:9:1-65\n  optional argument x of function notSuppressesOptArgs is never used\n\n  Warning Unused Argument\n  TestOptArg.res:9:1-65\n  optional argument y of function notSuppressesOptArgs is never used\n\n  Warning Unused Argument\n  TestOptArg.res:9:1-65\n  optional argument z of function notSuppressesOptArgs is never used\n\n  Warning Redundant Optional Argument\n  TestOptArg.res:3:1-28\n  optional argument x of function foo is always supplied (1 calls)\n\n  Warning Redundant Optional Argument\n  Unison.res:17:1-60\n  optional argument break of function group is always supplied (2 calls)\n\n  Warning Unused Argument\n  OptArg.resi:2:1-50\n  optional argument x of function bar is never used\n\n  Warning Redundant Optional Argument\n  OptArg.res:26:1-70\n  optional argument c of function wrapfourArgs is always supplied (2 calls)\n\n  Warning Unused Argument\n  OptArg.res:24:1-63\n  optional argument d of function fourArgs is never used\n\n  Warning Redundant Optional Argument\n  OptArg.res:20:1-51\n  optional argument a of function wrapOneArg is always supplied (1 calls)\n\n  Warning Unused Argument\n  OptArg.res:14:1-42\n  optional argument a of function twoArgs is never used\n\n  Warning Unused Argument\n  OptArg.res:14:1-42\n  optional argument b of function twoArgs is never used\n\n  Warning Unused Argument\n  OptArg.res:9:1-54\n  optional argument b of function threeArgs is never used\n\n  Warning Redundant Optional Argument\n  OptArg.res:9:1-54\n  optional argument a of function threeArgs is always supplied (2 calls)\n\n  Warning Unused Argument\n  OptArg.res:3:1-38\n  optional argument x of function bar is never used\n\n  Warning Unused Argument\n  OptArg.res:1:1-48\n  optional argument y of function foo is never used\n\n  Warning Unused Argument\n  OptArg.res:1:1-48\n  optional argument z of function foo is never used\n\n  Warning Dead Module\n  AutoAnnotate.res:0:1\n  AutoAnnotate is a dead module as all its items are dead.\n\n  Warning Dead Type\n  AutoAnnotate.res:1:16-21\n  variant.R is a variant case which is never constructed\n  <-- line 1\n  type variant = | @dead(\"variant.R\") R(int)\n\n  Warning Dead Type\n  AutoAnnotate.res:4:16-31\n  record.variant is a record label never used to read a value\n  <-- line 4\n  type record = {@dead(\"record.variant\") variant: variant}\n\n  Warning Dead Type\n  AutoAnnotate.res:6:12-18\n  r2.r2 is a record label never used to read a value\n  <-- line 6\n  type r2 = {@dead(\"r2.r2\") r2: int}\n\n  Warning Dead Type\n  AutoAnnotate.res:8:12-18\n  r3.r3 is a record label never used to read a value\n  <-- line 8\n  type r3 = {@dead(\"r3.r3\") r3: int}\n\n  Warning Dead Type\n  AutoAnnotate.res:10:12-18\n  r4.r4 is a record label never used to read a value\n  <-- line 10\n  type r4 = {@dead(\"r4.r4\") r4: int}\n\n  Warning Dead Type\n  AutoAnnotate.res:14:3-14\n  annotatedVariant.R2 is a variant case which is never constructed\n  <-- line 14\n    | @dead(\"annotatedVariant.R2\") R2(r2, r3)\n\n  Warning Dead Type\n  AutoAnnotate.res:15:5-10\n  annotatedVariant.R4 is a variant case which is never constructed\n  <-- line 15\n    | @dead(\"annotatedVariant.R4\") R4(r4)\n\n  Warning Dead Module\n  BucklescriptAnnotations.res:0:1\n  BucklescriptAnnotations is a dead module as all its items are dead.\n\n  Warning Dead Value\n  BucklescriptAnnotations.res:25:1-70\n  bar is never used\n  <-- line 25\n  @dead(\"bar\") let bar = (x: someMethods) => {\n\n  Warning Dead Exception\n  DeadExn.res:7:1-15\n  DeadE is never raised or passed as value\n  <-- line 7\n  @dead(\"DeadE\") exception DeadE\n\n  Warning Dead Value\n  DeadExn.res:8:1-25\n  eToplevel is never used\n  <-- line 8\n  @dead(\"eToplevel\") let eToplevel = Etoplevel\n\n  Warning Dead Value\n  DeadRT.res:5:1-116\n  emitModuleAccessPath is never used\n  <-- line 5\n  @dead(\"emitModuleAccessPath\") let rec emitModuleAccessPath = moduleAccessPath =>\n\n  Warning Dead Value\n  DeadTest.res:2:1-17\n  fortytwo is never used\n  <-- line 2\n  @dead(\"fortytwo\") let fortytwo = 42\n\n  Warning Dead Module\n  DeadTest.res:27:8-97\n  DeadTest.M is a dead module as all its items are dead.\n\n  Warning Dead Value\n  DeadTest.res:31:3-34\n  M.thisSignatureItemIsDead is never used\n  <-- line 31\n    @dead(\"M.thisSignatureItemIsDead\") let thisSignatureItemIsDead = 34\n\n  Warning Dead Value\n  DeadTest.res:61:3-12\n  MM.y is never used\n  <-- line 61\n    @dead(\"MM.y\") let y: int\n\n  Warning Dead Value\n  DeadTest.res:65:3-35\n  MM.valueOnlyInImplementation is never used\n  <-- line 65\n    @dead(\"MM.valueOnlyInImplementation\") let valueOnlyInImplementation = 7\n\n  Warning Dead Value\n  DeadTest.res:75:1-37\n  unusedRec is never used\n  <-- line 75\n  @dead(\"unusedRec\") let rec unusedRec = () => unusedRec()\n\n  Warning Dead Value\n  DeadTest.res:77:1-60\n  split_map is never used\n  <-- line 77\n  @dead(\"split_map\") let rec split_map = l => {\n\n  Warning Dead Value\n  DeadTest.res:82:1-27\n  rec1 is never used\n  <-- line 82\n  @dead(\"rec1\") let rec rec1 = () => rec2()\n\n  Warning Dead Value\n  DeadTest.res:83:1-23\n  rec2 is never used\n  <-- line 83\n  @dead(\"rec2\") and rec2 = () => rec1()\n\n  Warning Dead Value\n  DeadTest.res:85:1-77\n  recWithCallback is never used\n  <-- line 85\n  @dead(\"recWithCallback\") let rec recWithCallback = () => {\n\n  Warning Dead Value\n  DeadTest.res:90:1-53\n  foo is never used\n  <-- line 90\n  @dead(\"foo\") let rec foo = () => {\n\n  Warning Dead Value\n  DeadTest.res:94:1-21\n  bar is never used\n  <-- line 94\n  @dead(\"bar\") and bar = () => foo()\n\n  Warning Dead Value\n  DeadTest.res:96:1-71\n  withDefaultValue is never used\n  <-- line 96\n  @dead(\"withDefaultValue\") let withDefaultValue = (~paramWithDefault=3, y) => paramWithDefault + y\n\n  Warning Dead Module\n  DeadTest.res:110:8-413\n  DeadTest.LazyDynamicallyLoadedComponent2 is a dead module as all its items are dead.\n\n  Warning Dead Value With Side Effects\n  DeadTest.res:111:3-142\n  LazyDynamicallyLoadedComponent2.reasonResource is never used and could have side effects\n\n  Warning Dead Value\n  DeadTest.res:114:3-54\n  LazyDynamicallyLoadedComponent2.makeProps is never used\n  <-- line 114\n    @dead(\"LazyDynamicallyLoadedComponent2.makeProps\") let makeProps = DynamicallyLoadedComponent.makeProps\n\n  Warning Dead Value\n  DeadTest.res:115:3-170\n  LazyDynamicallyLoadedComponent2.make is never used\n  <-- line 115\n    @dead(\"LazyDynamicallyLoadedComponent2.make\") let make = props =>\n\n  Warning Dead Value\n  DeadTest.res:127:1-52\n  zzz is never used\n  <-- line 127\n  @dead(\"zzz\") let zzz = {\n\n  Warning Dead Value\n  DeadTest.res:135:1-15\n  second is never used\n  <-- line 135\n  @dead(\"second\") let second = 1L\n\n  Warning Dead Value\n  DeadTest.res:136:1-35\n  minute is never used\n  <-- line 136\n  @dead(\"minute\") let minute = Int64.mul(60L, second)\n\n  Warning Dead Value\n  DeadTest.res:138:1-21\n  deadRef is never used\n  <-- line 138\n  @dead(\"deadRef\") let deadRef = ref(12)\n\n  Warning Dead Value With Side Effects\n  DeadTest.res:145:1-40\n  theSideEffectIsLogging is never used and could have side effects\n\n  Warning Dead Value\n  DeadTest.res:147:1-54\n  stringLengthNoSideEffects is never used\n  <-- line 147\n  @dead(\"stringLengthNoSideEffects\") let stringLengthNoSideEffects = String.length(\"sdkdl\")\n\n  Warning Dead Type\n  DeadTest.res:175:12-17\n  rc.a is a record label never used to read a value\n  <-- line 175\n  type rc = {@dead(\"rc.a\") a: int}\n\n  Warning Dead Type\n  DeadTest.res:182:25-30\n  inlineRecord.IR.a is a record label never used to read a value\n  <-- line 182\n  type inlineRecord = IR({@dead(\"inlineRecord.IR.a\") a: int, b: int, c: string, @dead d: int, @live e: int})\n\n  Warning Dead Module\n  DeadTestBlacklist.res:0:1\n  DeadTestBlacklist is a dead module as all its items are dead.\n\n  Warning Dead Value\n  DeadTestBlacklist.res:1:1-10\n  x is never used\n  <-- line 1\n  @dead(\"x\") let x = 34\n\n  Warning Dead Module\n  DeadTestWithInterface.res:1:8-54\n  DeadTestWithInterface.Ext_buffer is a dead module as all its items are dead.\n\n  Warning Dead Value\n  DeadTestWithInterface.res:2:3-12\n  Ext_buffer.x is never used\n  <-- line 2\n    @dead(\"Ext_buffer.x\") let x: int\n\n  Warning Dead Value\n  DeadTestWithInterface.res:4:3-12\n  Ext_buffer.x is never used\n  <-- line 4\n    @dead(\"Ext_buffer.x\") let x = 42\n\n  Warning Dead Type\n  DeadTypeTest.res:3:5\n  t.B is a variant case which is never constructed\n  <-- line 3\n    | @dead(\"t.B\") B\n\n  Warning Dead Value\n  DeadTypeTest.res:4:1-9\n  a is never used\n  <-- line 4\n  @dead(\"a\") let a = A\n\n  Warning Dead Type\n  DeadTypeTest.res:10:5-13\n  deadType.InNeither is a variant case which is never constructed\n  <-- line 10\n    | @dead(\"deadType.InNeither\") InNeither\n\n  Warning Dead Type\n  DeadTypeTest.resi:3:5\n  t.B is a variant case which is never constructed\n  <-- line 3\n    | @dead(\"t.B\") B\n\n  Warning Dead Value\n  DeadTypeTest.resi:4:1-8\n  a is never used\n  <-- line 4\n  @dead(\"a\") let a: t\n\n  Warning Dead Type\n  DeadTypeTest.resi:10:5-13\n  deadType.InNeither is a variant case which is never constructed\n  <-- line 10\n    | @dead(\"deadType.InNeither\") InNeither\n\n  Warning Dead Value\n  DeadValueTest.res:2:1-17\n  valueDead is never used\n  <-- line 2\n  @dead(\"valueDead\") let valueDead = 2\n\n  Warning Dead Value\n  DeadValueTest.res:4:1-33\n  valueOnlyInImplementation is never used\n  <-- line 4\n  @dead(\"valueOnlyInImplementation\") let valueOnlyInImplementation = 3\n\n  Warning Dead Value\n  DeadValueTest.res:6:1-260\n  subList is never used\n  <-- line 6\n  @dead(\"subList\") let rec subList = (b, e, l) =>\n\n  Warning Dead Value\n  DeadValueTest.resi:2:1-18\n  valueDead is never used\n  <-- line 2\n  @dead(\"valueDead\") let valueDead: int\n\n  Warning Dead Type\n  Docstrings.res:61:5\n  t.B is a variant case which is never constructed\n  <-- line 61\n    | @dead(\"t.B\") B\n\n  Warning Dead Module\n  ErrorHandler.res:0:1\n  ErrorHandler is a dead module as all its items are dead.\n\n  Warning Dead Value\n  ErrorHandler.res:12:1-10\n  x is never used\n  <-- line 12\n  @dead(\"x\") let x = 42\n\n  Warning Dead Module\n  ErrorHandler.resi:0:1\n  ErrorHandler is a dead module as all its items are dead.\n\n  Warning Dead Value\n  ErrorHandler.resi:10:1-10\n  x is never used\n  <-- line 10\n  @dead(\"x\") let x: int\n\n  Warning Dead Module\n  EverythingLiveHere.res:0:1\n  EverythingLiveHere is a dead module as all its items are dead.\n\n  Warning Dead Value\n  EverythingLiveHere.res:1:1-9\n  x is never used\n  <-- line 1\n  @dead(\"x\") let x = 1\n\n  Warning Dead Value\n  EverythingLiveHere.res:3:1-9\n  y is never used\n  <-- line 3\n  @dead(\"y\") let y = 3\n\n  Warning Dead Value\n  EverythingLiveHere.res:5:1-9\n  z is never used\n  <-- line 5\n  @dead(\"z\") let z = 4\n\n  Warning Dead Module\n  FirstClassModulesInterface.res:0:1\n  FirstClassModulesInterface is a dead module as all its items are dead.\n\n  Warning Dead Type\n  FirstClassModulesInterface.res:2:3-8\n  record.x is a record label never used to read a value\n  <-- line 2\n    @dead(\"record.x\") x: int,\n\n  Warning Dead Type\n  FirstClassModulesInterface.res:3:3-11\n  record.y is a record label never used to read a value\n  <-- line 3\n    @dead(\"record.y\") y: string,\n\n  Warning Dead Value\n  FirstClassModulesInterface.res:6:1-26\n  r is never used\n  <-- line 6\n  @dead(\"r\") let r = {x: 3, y: \"hello\"}\n\n  Warning Dead Module\n  FirstClassModulesInterface.resi:0:1\n  FirstClassModulesInterface is a dead module as all its items are dead.\n\n  Warning Dead Type\n  FirstClassModulesInterface.resi:3:3-8\n  record.x is a record label never used to read a value\n  <-- line 3\n    @dead(\"record.x\") x: int,\n\n  Warning Dead Type\n  FirstClassModulesInterface.resi:4:3-11\n  record.y is a record label never used to read a value\n  <-- line 4\n    @dead(\"record.y\") y: string,\n\n  Warning Dead Value\n  FirstClassModulesInterface.resi:7:1-13\n  r is never used\n  <-- line 7\n  @dead(\"r\") let r: record\n\n  Warning Dead Value\n  ImmutableArray.res:16:3-41\n  toArray is never used\n  <-- line 16\n    @dead(\"toArray\") let toArray = a => Array.copy(a->fromT)\n\n  Warning Dead Value\n  ImmutableArray.res:20:3-42\n  length is never used\n  <-- line 20\n    @dead(\"length\") let length = a => Array.length(a->fromT)\n\n  Warning Dead Value\n  ImmutableArray.res:22:3-38\n  size is never used\n  <-- line 22\n    @dead(\"size\") let size = a => Array.size(a->fromT)\n\n  Warning Dead Value\n  ImmutableArray.res:26:3-50\n  getExn is never used\n  <-- line 26\n    @dead(\"getExn\") let getExn = (a, x) => Array.getExn(a->fromT, x)\n\n  Warning Dead Value\n  ImmutableArray.res:28:3-56\n  getUnsafe is never used\n  <-- line 28\n    @dead(\"getUnsafe\") let getUnsafe = (a, x) => Array.getUnsafe(a->fromT, x)\n\n  Warning Dead Value\n  ImmutableArray.res:30:3-62\n  getUndefined is never used\n  <-- line 30\n    @dead(\"getUndefined\") let getUndefined = (a, x) => Array.getUndefined(a->fromT, x)\n\n  Warning Dead Value\n  ImmutableArray.res:32:3-49\n  shuffle is never used\n  <-- line 32\n    @dead(\"shuffle\") let shuffle = x => Array.shuffle(x->fromT)->toT\n\n  Warning Dead Value\n  ImmutableArray.res:34:3-49\n  reverse is never used\n  <-- line 34\n    @dead(\"reverse\") let reverse = x => Array.reverse(x->fromT)->toT\n\n  Warning Dead Value\n  ImmutableArray.res:36:3-62\n  makeUninitialized is never used\n  <-- line 36\n    @dead(\"makeUninitialized\") let makeUninitialized = x => Array.makeUninitialized(x)->toT\n\n  Warning Dead Value\n  ImmutableArray.res:38:3-74\n  makeUninitializedUnsafe is never used\n  <-- line 38\n    @dead(\"makeUninitializedUnsafe\") let makeUninitializedUnsafe = x => Array.makeUninitializedUnsafe(x)->toT\n\n  Warning Dead Value\n  ImmutableArray.res:40:3-44\n  make is never used\n  <-- line 40\n    @dead(\"make\") let make = (x, y) => Array.make(x, y)->toT\n\n  Warning Dead Value\n  ImmutableArray.res:42:3-46\n  range is never used\n  <-- line 42\n    @dead(\"range\") let range = (x, y) => Array.range(x, y)->toT\n\n  Warning Dead Value\n  ImmutableArray.res:44:3-64\n  rangeBy is never used\n  <-- line 44\n    @dead(\"rangeBy\") let rangeBy = (x, y, ~step) => Array.rangeBy(x, y, ~step)->toT\n\n  Warning Dead Value\n  ImmutableArray.res:46:3-50\n  makeByU is never used\n  <-- line 46\n    @dead(\"makeByU\") let makeByU = (c, f) => Array.makeByU(c, f)->toT\n\n  Warning Dead Value\n  ImmutableArray.res:47:3-48\n  makeBy is never used\n  <-- line 47\n    @dead(\"makeBy\") let makeBy = (c, f) => Array.makeBy(c, f)->toT\n\n  Warning Dead Value\n  ImmutableArray.res:49:3-70\n  makeByAndShuffleU is never used\n  <-- line 49\n    @dead(\"makeByAndShuffleU\") let makeByAndShuffleU = (c, f) => Array.makeByAndShuffleU(c, f)->toT\n\n  Warning Dead Value\n  ImmutableArray.res:50:3-68\n  makeByAndShuffle is never used\n  <-- line 50\n    @dead(\"makeByAndShuffle\") let makeByAndShuffle = (c, f) => Array.makeByAndShuffle(c, f)->toT\n\n  Warning Dead Value\n  ImmutableArray.res:52:3-61\n  zip is never used\n  <-- line 52\n    @dead(\"zip\") let zip = (a1, a2) => Array.zip(fromT(a1), fromT(a2))->toTp\n\n  Warning Dead Value\n  ImmutableArray.res:54:3-72\n  zipByU is never used\n  <-- line 54\n    @dead(\"zipByU\") let zipByU = (a1, a2, f) => Array.zipByU(fromT(a1), fromT(a2), f)->toT\n\n  Warning Dead Value\n  ImmutableArray.res:55:3-70\n  zipBy is never used\n  <-- line 55\n    @dead(\"zipBy\") let zipBy = (a1, a2, f) => Array.zipBy(fromT(a1), fromT(a2), f)->toT\n\n  Warning Dead Value\n  ImmutableArray.res:57:3-47\n  unzip is never used\n  <-- line 57\n    @dead(\"unzip\") let unzip = a => Array.unzip(a->fromTp)->toT2\n\n  Warning Dead Value\n  ImmutableArray.res:59:3-66\n  concat is never used\n  <-- line 59\n    @dead(\"concat\") let concat = (a1, a2) => Array.concat(a1->fromT, a2->fromT)->toT\n\n  Warning Dead Value\n  ImmutableArray.res:61:3-67\n  concatMany is never used\n  <-- line 61\n    @dead(\"concatMany\") let concatMany = (a: t<t<_>>) => Array.concatMany(a->fromTT)->toT\n\n  Warning Dead Value\n  ImmutableArray.res:63:3-77\n  slice is never used\n  <-- line 63\n    @dead(\"slice\") let slice = (a, ~offset, ~len) => Array.slice(a->fromT, ~offset, ~len)->toT\n\n  Warning Dead Value\n  ImmutableArray.res:65:3-63\n  sliceToEnd is never used\n  <-- line 65\n    @dead(\"sliceToEnd\") let sliceToEnd = (a, b) => Array.sliceToEnd(a->fromT, b)->toT\n\n  Warning Dead Value\n  ImmutableArray.res:67:3-43\n  copy is never used\n  <-- line 67\n    @dead(\"copy\") let copy = a => Array.copy(a->fromT)->toT\n\n  Warning Dead Value\n  ImmutableArray.res:69:3-54\n  forEachU is never used\n  <-- line 69\n    @dead(\"forEachU\") let forEachU = (a, f) => Array.forEachU(a->fromT, f)\n\n  Warning Dead Value\n  ImmutableArray.res:70:3-52\n  forEach is never used\n  <-- line 70\n    @dead(\"forEach\") let forEach = (a, f) => Array.forEach(a->fromT, f)\n\n  Warning Dead Value\n  ImmutableArray.res:72:3-51\n  mapU is never used\n  <-- line 72\n    @dead(\"mapU\") let mapU = (a, f) => Array.mapU(a->fromT, f)->toT\n\n  Warning Dead Value\n  ImmutableArray.res:73:3-49\n  map is never used\n  <-- line 73\n    @dead(\"map\") let map = (a, f) => Array.map(a->fromT, f)->toT\n\n  Warning Dead Value\n  ImmutableArray.res:75:3-71\n  keepWithIndexU is never used\n  <-- line 75\n    @dead(\"keepWithIndexU\") let keepWithIndexU = (a, f) => Array.keepWithIndexU(a->fromT, f)->toT\n\n  Warning Dead Value\n  ImmutableArray.res:76:3-69\n  keepWithIndex is never used\n  <-- line 76\n    @dead(\"keepWithIndex\") let keepWithIndex = (a, f) => Array.keepWithIndex(a->fromT, f)->toT\n\n  Warning Dead Value\n  ImmutableArray.res:78:3-59\n  keepMapU is never used\n  <-- line 78\n    @dead(\"keepMapU\") let keepMapU = (a, f) => Array.keepMapU(a->fromT, f)->toT\n\n  Warning Dead Value\n  ImmutableArray.res:79:3-57\n  keepMap is never used\n  <-- line 79\n    @dead(\"keepMap\") let keepMap = (a, f) => Array.keepMap(a->fromT, f)->toT\n\n  Warning Dead Value\n  ImmutableArray.res:81:3-72\n  forEachWithIndexU is never used\n  <-- line 81\n    @dead(\"forEachWithIndexU\") let forEachWithIndexU = (a, f) => Array.forEachWithIndexU(a->fromT, f)\n\n  Warning Dead Value\n  ImmutableArray.res:82:3-70\n  forEachWithIndex is never used\n  <-- line 82\n    @dead(\"forEachWithIndex\") let forEachWithIndex = (a, f) => Array.forEachWithIndex(a->fromT, f)\n\n  Warning Dead Value\n  ImmutableArray.res:84:3-69\n  mapWithIndexU is never used\n  <-- line 84\n    @dead(\"mapWithIndexU\") let mapWithIndexU = (a, f) => Array.mapWithIndexU(a->fromT, f)->toT\n\n  Warning Dead Value\n  ImmutableArray.res:85:3-67\n  mapWithIndex is never used\n  <-- line 85\n    @dead(\"mapWithIndex\") let mapWithIndex = (a, f) => Array.mapWithIndex(a->fromT, f)->toT\n\n  Warning Dead Value\n  ImmutableArray.res:87:3-64\n  partitionU is never used\n  <-- line 87\n    @dead(\"partitionU\") let partitionU = (a, f) => Array.partitionU(a->fromT, f)->toT2\n\n  Warning Dead Value\n  ImmutableArray.res:88:3-62\n  partition is never used\n  <-- line 88\n    @dead(\"partition\") let partition = (a, f) => Array.partition(a->fromT, f)->toT2\n\n  Warning Dead Value\n  ImmutableArray.res:90:3-58\n  reduceU is never used\n  <-- line 90\n    @dead(\"reduceU\") let reduceU = (a, b, f) => Array.reduceU(a->fromT, b, f)\n\n  Warning Dead Value\n  ImmutableArray.res:91:3-56\n  reduce is never used\n  <-- line 91\n    @dead(\"reduce\") let reduce = (a, b, f) => Array.reduce(a->fromT, b, f)\n\n  Warning Dead Value\n  ImmutableArray.res:93:3-72\n  reduceReverseU is never used\n  <-- line 93\n    @dead(\"reduceReverseU\") let reduceReverseU = (a, b, f) => Array.reduceReverseU(a->fromT, b, f)\n\n  Warning Dead Value\n  ImmutableArray.res:94:3-70\n  reduceReverse is never used\n  <-- line 94\n    @dead(\"reduceReverse\") let reduceReverse = (a, b, f) => Array.reduceReverse(a->fromT, b, f)\n\n  Warning Dead Value\n  ImmutableArray.res:96:3-91\n  reduceReverse2U is never used\n  <-- line 96\n    @dead(\"reduceReverse2U\") let reduceReverse2U = (a1, a2, c, f) => Array.reduceReverse2U(fromT(a1), fromT(a2), c, f)\n\n  Warning Dead Value\n  ImmutableArray.res:97:3-89\n  reduceReverse2 is never used\n  <-- line 97\n    @dead(\"reduceReverse2\") let reduceReverse2 = (a1, a2, c, f) => Array.reduceReverse2(fromT(a1), fromT(a2), c, f)\n\n  Warning Dead Value\n  ImmutableArray.res:99:3-48\n  someU is never used\n  <-- line 99\n    @dead(\"someU\") let someU = (a, f) => Array.someU(a->fromT, f)\n\n  Warning Dead Value\n  ImmutableArray.res:100:3-46\n  some is never used\n  <-- line 100\n    @dead(\"some\") let some = (a, f) => Array.some(a->fromT, f)\n\n  Warning Dead Value\n  ImmutableArray.res:102:3-50\n  everyU is never used\n  <-- line 102\n    @dead(\"everyU\") let everyU = (a, f) => Array.everyU(a->fromT, f)\n\n  Warning Dead Value\n  ImmutableArray.res:103:3-48\n  every is never used\n  <-- line 103\n    @dead(\"every\") let every = (a, f) => Array.every(a->fromT, f)\n\n  Warning Dead Value\n  ImmutableArray.res:105:3-69\n  every2U is never used\n  <-- line 105\n    @dead(\"every2U\") let every2U = (a1, a2, f) => Array.every2U(fromT(a1), fromT(a2), f)\n\n  Warning Dead Value\n  ImmutableArray.res:106:3-67\n  every2 is never used\n  <-- line 106\n    @dead(\"every2\") let every2 = (a1, a2, f) => Array.every2(fromT(a1), fromT(a2), f)\n\n  Warning Dead Value\n  ImmutableArray.res:108:3-67\n  some2U is never used\n  <-- line 108\n    @dead(\"some2U\") let some2U = (a1, a2, f) => Array.some2U(fromT(a1), fromT(a2), f)\n\n  Warning Dead Value\n  ImmutableArray.res:109:3-65\n  some2 is never used\n  <-- line 109\n    @dead(\"some2\") let some2 = (a1, a2, f) => Array.some2(fromT(a1), fromT(a2), f)\n\n  Warning Dead Value\n  ImmutableArray.res:111:3-63\n  cmpU is never used\n  <-- line 111\n    @dead(\"cmpU\") let cmpU = (a1, a2, f) => Array.cmpU(fromT(a1), fromT(a2), f)\n\n  Warning Dead Value\n  ImmutableArray.res:112:3-61\n  cmp is never used\n  <-- line 112\n    @dead(\"cmp\") let cmp = (a1, a2, f) => Array.cmp(fromT(a1), fromT(a2), f)\n\n  Warning Dead Value\n  ImmutableArray.res:114:3-61\n  eqU is never used\n  <-- line 114\n    @dead(\"eqU\") let eqU = (a1, a2, f) => Array.eqU(fromT(a1), fromT(a2), f)\n\n  Warning Dead Value\n  ImmutableArray.res:115:3-59\n  eq is never used\n  <-- line 115\n    @dead(\"eq\") let eq = (a1, a2, f) => Array.eq(fromT(a1), fromT(a2), f)\n\n  Warning Dead Value\n  ImmutableArray.resi:12:1-31\n  toArray is never used\n  <-- line 12\n  @dead(\"toArray\") let toArray: t<'a> => array<'a>\n\n  Warning Dead Value\n  ImmutableArray.resi:14:1-107\n  length is never used\n  <-- line 14\n  @dead(\"length\") @ocaml.doc(\" Subset of the Belt.Array oprerations that do not mutate the array. \")\n\n  Warning Dead Value\n  ImmutableArray.resi:17:1-22\n  size is never used\n  <-- line 17\n  @dead(\"size\") let size: t<'a> => int\n\n  Warning Dead Value\n  ImmutableArray.resi:19:1-35\n  get is never used\n  <-- line 19\n  @dead(\"get\") let get: (t<'a>, int) => option<'a>\n\n  Warning Dead Value\n  ImmutableArray.resi:21:1-30\n  getExn is never used\n  <-- line 21\n  @dead(\"getExn\") let getExn: (t<'a>, int) => 'a\n\n  Warning Dead Value\n  ImmutableArray.resi:23:1-33\n  getUnsafe is never used\n  <-- line 23\n  @dead(\"getUnsafe\") let getUnsafe: (t<'a>, int) => 'a\n\n  Warning Dead Value\n  ImmutableArray.resi:25:1-50\n  getUndefined is never used\n  <-- line 25\n  @dead(\"getUndefined\") let getUndefined: (t<'a>, int) => Js.undefined<'a>\n\n  Warning Dead Value\n  ImmutableArray.resi:27:1-27\n  shuffle is never used\n  <-- line 27\n  @dead(\"shuffle\") let shuffle: t<'a> => t<'a>\n\n  Warning Dead Value\n  ImmutableArray.resi:29:1-27\n  reverse is never used\n  <-- line 29\n  @dead(\"reverse\") let reverse: t<'a> => t<'a>\n\n  Warning Dead Value\n  ImmutableArray.resi:31:1-49\n  makeUninitialized is never used\n  <-- line 31\n  @dead(\"makeUninitialized\") let makeUninitialized: int => t<Js.undefined<'a>>\n\n  Warning Dead Value\n  ImmutableArray.resi:33:1-41\n  makeUninitializedUnsafe is never used\n  <-- line 33\n  @dead(\"makeUninitializedUnsafe\") let makeUninitializedUnsafe: int => t<'a>\n\n  Warning Dead Value\n  ImmutableArray.resi:35:1-28\n  make is never used\n  <-- line 35\n  @dead(\"make\") let make: (int, 'a) => t<'a>\n\n  Warning Dead Value\n  ImmutableArray.resi:37:1-31\n  range is never used\n  <-- line 37\n  @dead(\"range\") let range: (int, int) => t<int>\n\n  Warning Dead Value\n  ImmutableArray.resi:39:1-45\n  rangeBy is never used\n  <-- line 39\n  @dead(\"rangeBy\") let rangeBy: (int, int, ~step: int) => t<int>\n\n  Warning Dead Value\n  ImmutableArray.resi:41:1-42\n  makeByU is never used\n  <-- line 41\n  @dead(\"makeByU\") let makeByU: (int, (. int) => 'a) => t<'a>\n\n  Warning Dead Value\n  ImmutableArray.resi:42:1-37\n  makeBy is never used\n  <-- line 42\n  @dead(\"makeBy\") let makeBy: (int, int => 'a) => t<'a>\n\n  Warning Dead Value\n  ImmutableArray.resi:44:1-52\n  makeByAndShuffleU is never used\n  <-- line 44\n  @dead(\"makeByAndShuffleU\") let makeByAndShuffleU: (int, (. int) => 'a) => t<'a>\n\n  Warning Dead Value\n  ImmutableArray.resi:45:1-47\n  makeByAndShuffle is never used\n  <-- line 45\n  @dead(\"makeByAndShuffle\") let makeByAndShuffle: (int, int => 'a) => t<'a>\n\n  Warning Dead Value\n  ImmutableArray.resi:47:1-38\n  zip is never used\n  <-- line 47\n  @dead(\"zip\") let zip: (t<'a>, t<'b>) => t<('a, 'b)>\n\n  Warning Dead Value\n  ImmutableArray.resi:49:1-53\n  zipByU is never used\n  <-- line 49\n  @dead(\"zipByU\") let zipByU: (t<'a>, t<'b>, (. 'a, 'b) => 'c) => t<'c>\n\n  Warning Dead Value\n  ImmutableArray.resi:50:1-50\n  zipBy is never used\n  <-- line 50\n  @dead(\"zipBy\") let zipBy: (t<'a>, t<'b>, ('a, 'b) => 'c) => t<'c>\n\n  Warning Dead Value\n  ImmutableArray.resi:52:1-40\n  unzip is never used\n  <-- line 52\n  @dead(\"unzip\") let unzip: t<('a, 'a)> => (t<'a>, t<'a>)\n\n  Warning Dead Value\n  ImmutableArray.resi:54:1-35\n  concat is never used\n  <-- line 54\n  @dead(\"concat\") let concat: (t<'a>, t<'a>) => t<'a>\n\n  Warning Dead Value\n  ImmutableArray.resi:56:1-33\n  concatMany is never used\n  <-- line 56\n  @dead(\"concatMany\") let concatMany: t<t<'a>> => t<'a>\n\n  Warning Dead Value\n  ImmutableArray.resi:58:1-52\n  slice is never used\n  <-- line 58\n  @dead(\"slice\") let slice: (t<'a>, ~offset: int, ~len: int) => t<'a>\n\n  Warning Dead Value\n  ImmutableArray.resi:60:1-37\n  sliceToEnd is never used\n  <-- line 60\n  @dead(\"sliceToEnd\") let sliceToEnd: (t<'a>, int) => t<'a>\n\n  Warning Dead Value\n  ImmutableArray.resi:62:1-24\n  copy is never used\n  <-- line 62\n  @dead(\"copy\") let copy: t<'a> => t<'a>\n\n  Warning Dead Value\n  ImmutableArray.resi:64:1-45\n  forEachU is never used\n  <-- line 64\n  @dead(\"forEachU\") let forEachU: (t<'a>, (. 'a) => unit) => unit\n\n  Warning Dead Value\n  ImmutableArray.resi:65:1-40\n  forEach is never used\n  <-- line 65\n  @dead(\"forEach\") let forEach: (t<'a>, 'a => unit) => unit\n\n  Warning Dead Value\n  ImmutableArray.resi:67:1-40\n  mapU is never used\n  <-- line 67\n  @dead(\"mapU\") let mapU: (t<'a>, (. 'a) => 'b) => t<'b>\n\n  Warning Dead Value\n  ImmutableArray.resi:68:1-35\n  map is never used\n  <-- line 68\n  @dead(\"map\") let map: (t<'a>, 'a => 'b) => t<'b>\n\n  Warning Dead Value\n  ImmutableArray.resi:70:1-57\n  keepWithIndexU is never used\n  <-- line 70\n  @dead(\"keepWithIndexU\") let keepWithIndexU: (t<'a>, (. 'a, int) => bool) => t<'a>\n\n  Warning Dead Value\n  ImmutableArray.resi:71:1-54\n  keepWithIndex is never used\n  <-- line 71\n  @dead(\"keepWithIndex\") let keepWithIndex: (t<'a>, ('a, int) => bool) => t<'a>\n\n  Warning Dead Value\n  ImmutableArray.resi:73:1-52\n  keepMapU is never used\n  <-- line 73\n  @dead(\"keepMapU\") let keepMapU: (t<'a>, (. 'a) => option<'b>) => t<'b>\n\n  Warning Dead Value\n  ImmutableArray.resi:74:1-47\n  keepMap is never used\n  <-- line 74\n  @dead(\"keepMap\") let keepMap: (t<'a>, 'a => option<'b>) => t<'b>\n\n  Warning Dead Value\n  ImmutableArray.resi:76:1-59\n  forEachWithIndexU is never used\n  <-- line 76\n  @dead(\"forEachWithIndexU\") let forEachWithIndexU: (t<'a>, (. int, 'a) => unit) => unit\n\n  Warning Dead Value\n  ImmutableArray.resi:77:1-56\n  forEachWithIndex is never used\n  <-- line 77\n  @dead(\"forEachWithIndex\") let forEachWithIndex: (t<'a>, (int, 'a) => unit) => unit\n\n  Warning Dead Value\n  ImmutableArray.resi:79:1-54\n  mapWithIndexU is never used\n  <-- line 79\n  @dead(\"mapWithIndexU\") let mapWithIndexU: (t<'a>, (. int, 'a) => 'b) => t<'b>\n\n  Warning Dead Value\n  ImmutableArray.resi:80:1-51\n  mapWithIndex is never used\n  <-- line 80\n  @dead(\"mapWithIndex\") let mapWithIndex: (t<'a>, (int, 'a) => 'b) => t<'b>\n\n  Warning Dead Value\n  ImmutableArray.resi:82:1-57\n  partitionU is never used\n  <-- line 82\n  @dead(\"partitionU\") let partitionU: (t<'a>, (. 'a) => bool) => (t<'a>, t<'a>)\n\n  Warning Dead Value\n  ImmutableArray.resi:83:1-52\n  partition is never used\n  <-- line 83\n  @dead(\"partition\") let partition: (t<'a>, 'a => bool) => (t<'a>, t<'a>)\n\n  Warning Dead Value\n  ImmutableArray.resi:85:1-48\n  reduceU is never used\n  <-- line 85\n  @dead(\"reduceU\") let reduceU: (t<'a>, 'b, (. 'b, 'a) => 'b) => 'b\n\n  Warning Dead Value\n  ImmutableArray.resi:86:1-45\n  reduce is never used\n  <-- line 86\n  @dead(\"reduce\") let reduce: (t<'a>, 'b, ('b, 'a) => 'b) => 'b\n\n  Warning Dead Value\n  ImmutableArray.resi:88:1-55\n  reduceReverseU is never used\n  <-- line 88\n  @dead(\"reduceReverseU\") let reduceReverseU: (t<'a>, 'b, (. 'b, 'a) => 'b) => 'b\n\n  Warning Dead Value\n  ImmutableArray.resi:89:1-52\n  reduceReverse is never used\n  <-- line 89\n  @dead(\"reduceReverse\") let reduceReverse: (t<'a>, 'b, ('b, 'a) => 'b) => 'b\n\n  Warning Dead Value\n  ImmutableArray.resi:91:1-67\n  reduceReverse2U is never used\n  <-- line 91\n  @dead(\"reduceReverse2U\") let reduceReverse2U: (t<'a>, t<'b>, 'c, (. 'c, 'a, 'b) => 'c) => 'c\n\n  Warning Dead Value\n  ImmutableArray.resi:92:1-64\n  reduceReverse2 is never used\n  <-- line 92\n  @dead(\"reduceReverse2\") let reduceReverse2: (t<'a>, t<'b>, 'c, ('c, 'a, 'b) => 'c) => 'c\n\n  Warning Dead Value\n  ImmutableArray.resi:94:1-42\n  someU is never used\n  <-- line 94\n  @dead(\"someU\") let someU: (t<'a>, (. 'a) => bool) => bool\n\n  Warning Dead Value\n  ImmutableArray.resi:95:1-37\n  some is never used\n  <-- line 95\n  @dead(\"some\") let some: (t<'a>, 'a => bool) => bool\n\n  Warning Dead Value\n  ImmutableArray.resi:97:1-43\n  everyU is never used\n  <-- line 97\n  @dead(\"everyU\") let everyU: (t<'a>, (. 'a) => bool) => bool\n\n  Warning Dead Value\n  ImmutableArray.resi:98:1-38\n  every is never used\n  <-- line 98\n  @dead(\"every\") let every: (t<'a>, 'a => bool) => bool\n\n  Warning Dead Value\n  ImmutableArray.resi:100:1-55\n  every2U is never used\n  <-- line 100\n  @dead(\"every2U\") let every2U: (t<'a>, t<'b>, (. 'a, 'b) => bool) => bool\n\n  Warning Dead Value\n  ImmutableArray.resi:101:1-52\n  every2 is never used\n  <-- line 101\n  @dead(\"every2\") let every2: (t<'a>, t<'b>, ('a, 'b) => bool) => bool\n\n  Warning Dead Value\n  ImmutableArray.resi:103:1-54\n  some2U is never used\n  <-- line 103\n  @dead(\"some2U\") let some2U: (t<'a>, t<'b>, (. 'a, 'b) => bool) => bool\n\n  Warning Dead Value\n  ImmutableArray.resi:104:1-51\n  some2 is never used\n  <-- line 104\n  @dead(\"some2\") let some2: (t<'a>, t<'b>, ('a, 'b) => bool) => bool\n\n  Warning Dead Value\n  ImmutableArray.resi:106:1-50\n  cmpU is never used\n  <-- line 106\n  @dead(\"cmpU\") let cmpU: (t<'a>, t<'a>, (. 'a, 'a) => int) => int\n\n  Warning Dead Value\n  ImmutableArray.resi:107:1-47\n  cmp is never used\n  <-- line 107\n  @dead(\"cmp\") let cmp: (t<'a>, t<'a>, ('a, 'a) => int) => int\n\n  Warning Dead Value\n  ImmutableArray.resi:109:1-51\n  eqU is never used\n  <-- line 109\n  @dead(\"eqU\") let eqU: (t<'a>, t<'a>, (. 'a, 'a) => bool) => bool\n\n  Warning Dead Value\n  ImmutableArray.resi:110:1-48\n  eq is never used\n  <-- line 110\n  @dead(\"eq\") let eq: (t<'a>, t<'a>, ('a, 'a) => bool) => bool\n\n  Warning Dead Type\n  ImportHookDefault.res:2:3-14\n  person.name is a record label never used to read a value\n  <-- line 2\n    @dead(\"person.name\") name: string,\n\n  Warning Dead Type\n  ImportHookDefault.res:3:3-10\n  person.age is a record label never used to read a value\n  <-- line 3\n    @dead(\"person.age\") age: int,\n\n  Warning Dead Type\n  ImportHooks.res:3:3-14\n  person.name is a record label never used to read a value\n  <-- line 3\n    @dead(\"person.name\") name: string,\n\n  Warning Dead Type\n  ImportHooks.res:4:3-10\n  person.age is a record label never used to read a value\n  <-- line 4\n    @dead(\"person.age\") age: int,\n\n  Warning Dead Type\n  ImportJsValue.res:11:3-8\n  point.x is a record label never used to read a value\n  <-- line 11\n    @dead(\"point.x\") x: int,\n\n  Warning Dead Type\n  ImportJsValue.res:12:3-16\n  point.y is a record label never used to read a value\n  <-- line 12\n    @dead(\"point.y\") y: option<int>,\n\n  Warning Dead Type\n  ImportJsValue.res:67:3-10\n  variant.I is a variant case which is never constructed\n  <-- line 67\n    | @dead(\"variant.I\") I(int)\n\n  Warning Dead Type\n  ImportJsValue.res:68:5-13\n  variant.S is a variant case which is never constructed\n  <-- line 68\n    | @dead(\"variant.S\") S(string)\n\n  Warning Dead Type\n  ImportMyBanner.res:5:17-28\n  message.text is a record label never used to read a value\n  <-- line 5\n  type message = {@dead(\"message.text\") text: string}\n\n  Warning Dead Value\n  ImportMyBanner.res:12:1-15\n  make is never used\n  <-- line 12\n  @dead(\"make\") let make = make\n\n  Warning Dead Module\n  ModuleAliases.res:2:10-56\n  ModuleAliases.Outer.Inner is a dead module as all its items are dead.\n\n  Warning Dead Type\n  ModuleAliases.res:3:20-32\n  Outer.Inner.innerT.inner is a record label never used to read a value\n  <-- line 3\n      type innerT = {@dead(\"Outer.Inner.innerT.inner\") inner: string}\n\n  Warning Dead Module\n  ModuleAliases.res:10:12-61\n  ModuleAliases.Outer2.Inner2.InnerNested is a dead module as all its items are dead.\n\n  Warning Dead Type\n  ModuleAliases.res:11:17-27\n  Outer2.Inner2.InnerNested.t.nested is a record label never used to read a value\n  <-- line 11\n        type t = {@dead(\"Outer2.Inner2.InnerNested.t.nested\") nested: int}\n\n  Warning Dead Module\n  ModuleAliases2.res:0:1\n  ModuleAliases2 is a dead module as all its items are dead.\n\n  Warning Dead Type\n  ModuleAliases2.res:3:3-8\n  record.x is a record label never used to read a value\n  <-- line 3\n    @dead(\"record.x\") x: int,\n\n  Warning Dead Type\n  ModuleAliases2.res:4:3-11\n  record.y is a record label never used to read a value\n  <-- line 4\n    @dead(\"record.y\") y: string,\n\n  Warning Dead Module\n  ModuleAliases2.res:7:8-130\n  ModuleAliases2.Outer is a dead module as all its items are dead.\n\n  Warning Dead Type\n  ModuleAliases2.res:9:17-29\n  Outer.outer.outer is a record label never used to read a value\n  <-- line 9\n    type outer = {@dead(\"Outer.outer.outer\") outer: string}\n\n  Warning Dead Module\n  ModuleAliases2.res:11:10-68\n  ModuleAliases2.Outer.Inner is a dead module as all its items are dead.\n\n  Warning Dead Type\n  ModuleAliases2.res:13:19-31\n  Outer.Inner.inner.inner is a record label never used to read a value\n  <-- line 13\n      type inner = {@dead(\"Outer.Inner.inner.inner\") inner: string}\n\n  Warning Dead Value\n  ModuleAliases2.res:21:1-10\n  q is never used\n  <-- line 21\n  @dead(\"q\") let q = 42\n\n  Warning Dead Module\n  ModuleExceptionBug.res:1:8-52\n  ModuleExceptionBug.Dep is a dead module as all its items are dead.\n\n  Warning Dead Value\n  ModuleExceptionBug.res:2:3-35\n  Dep.customDouble is never used\n  <-- line 2\n    @dead(\"Dep.customDouble\") let customDouble = foo => foo * 2\n\n  Warning Dead Exception\n  ModuleExceptionBug.res:5:1-26\n  MyOtherException is never raised or passed as value\n  <-- line 5\n  @dead(\"MyOtherException\") exception MyOtherException\n\n  Warning Dead Value\n  NestedModules.res:8:3-22\n  Universe.notExported is never used\n  <-- line 8\n    @dead(\"Universe.notExported\") let notExported = 33\n\n  Warning Dead Value\n  NestedModules.res:14:5-13\n  Universe.Nested2.x is never used\n  <-- line 14\n      @dead(\"Universe.Nested2.x\") let x = 0\n\n  Warning Dead Value\n  NestedModules.res:19:5-13\n  Universe.Nested2.y is never used\n  <-- line 19\n      @dead(\"Universe.Nested2.y\") let y = 2\n\n  Warning Dead Value\n  NestedModules.res:25:7-15\n  Universe.Nested2.Nested3.x is never used\n  <-- line 25\n        @dead(\"Universe.Nested2.Nested3.x\") let x = 0\n\n  Warning Dead Value\n  NestedModules.res:26:7-15\n  Universe.Nested2.Nested3.y is never used\n  <-- line 26\n        @dead(\"Universe.Nested2.Nested3.y\") let y = 1\n\n  Warning Dead Value\n  NestedModules.res:27:7-15\n  Universe.Nested2.Nested3.z is never used\n  <-- line 27\n        @dead(\"Universe.Nested2.Nested3.z\") let z = 2\n\n  Warning Dead Value\n  NestedModules.res:28:7-15\n  Universe.Nested2.Nested3.w is never used\n  <-- line 28\n        @dead(\"Universe.Nested2.Nested3.w\") let w = 3\n\n  Warning Dead Type\n  NestedModules.res:46:5-7\n  Universe.variant.A is a variant case which is never constructed\n  <-- line 46\n      | @dead(\"Universe.variant.A\") A\n\n  Warning Dead Type\n  NestedModules.res:47:7-15\n  Universe.variant.B is a variant case which is never constructed\n  <-- line 47\n      | @dead(\"Universe.variant.B\") B(string)\n\n  Warning Dead Module\n  Newsyntax.res:0:1\n  Newsyntax is a dead module as all its items are dead.\n\n  Warning Dead Value\n  Newsyntax.res:1:1-10\n  x is never used\n  <-- line 1\n  @dead(\"x\") let x = 34\n\n  Warning Dead Value\n  Newsyntax.res:3:1-10\n  y is never used\n  <-- line 3\n  @dead(\"y\") let y = 11\n\n  Warning Dead Type\n  Newsyntax.res:6:3-10\n  record.xxx is a record label never used to read a value\n  <-- line 6\n    @dead(\"record.xxx\") xxx: int,\n\n  Warning Dead Type\n  Newsyntax.res:7:3-10\n  record.yyy is a record label never used to read a value\n  <-- line 7\n    @dead(\"record.yyy\") yyy: int,\n\n  Warning Dead Type\n  Newsyntax.res:10:16\n  variant.A is a variant case which is never constructed\n  <-- line 10\n  type variant = | @dead(\"variant.A\") A | @dead(\"variant.B\") B(int)|@dead(\"variant.C\") C\n\n  Warning Dead Type\n  Newsyntax.res:10:20-25\n  variant.B is a variant case which is never constructed\n  <-- line 10\n  type variant = | @dead(\"variant.A\") A | @dead(\"variant.B\") B(int)|@dead(\"variant.C\") C\n\n  Warning Dead Type\n  Newsyntax.res:10:26-27\n  variant.C is a variant case which is never constructed\n  <-- line 10\n  type variant = | @dead(\"variant.A\") A | @dead(\"variant.B\") B(int)|@dead(\"variant.C\") C\n\n  Warning Dead Type\n  Newsyntax.res:12:17-22\n  record2.xx is a record label never used to read a value\n  <-- line 12\n  type record2 = {@dead(\"record2.xx\") xx:int,@dead(\"record2.yy\") yy:int}\n\n  Warning Dead Type\n  Newsyntax.res:12:24-29\n  record2.yy is a record label never used to read a value\n  <-- line 12\n  type record2 = {@dead(\"record2.xx\") xx:int,@dead(\"record2.yy\") yy:int}\n\n  Warning Dead Type\n  Opaque.res:2:26-41\n  opaqueFromRecords.A is a variant case which is never constructed\n  <-- line 2\n  type opaqueFromRecords = | @dead(\"opaqueFromRecords.A\") A(Records.coord)\n\n  Warning Dead Value\n  OptArg.resi:1:1-54\n  foo is never used\n  <-- line 1\n  @dead(\"foo\") let foo: (~x: int=?, ~y: int=?, ~z: int=?, int) => int\n\n  Warning Dead Type\n  Records.res:24:3-14\n  person.name is a record label never used to read a value\n  <-- line 24\n    @dead(\"person.name\") name: string,\n\n  Warning Dead Type\n  Records.res:25:3-10\n  person.age is a record label never used to read a value\n  <-- line 25\n    @dead(\"person.age\") age: int,\n\n  Warning Dead Type\n  Records.res:31:3-14\n  business.name is a record label never used to read a value\n  <-- line 31\n    @dead(\"business.name\") name: string,\n\n  Warning Dead Type\n  Records.res:60:3-10\n  payload.num is a record label never used to read a value\n  <-- line 60\n    @dead(\"payload.num\") num: int,\n\n  Warning Dead Type\n  Records.res:70:3-8\n  record.w is a record label never used to read a value\n  <-- line 70\n    @dead(\"record.w\") w: int,\n\n  Warning Dead Type\n  Records.res:90:3-14\n  business2.name is a record label never used to read a value\n  <-- line 90\n    @dead(\"business2.name\") name: string,\n\n  Warning Dead Type\n  Records.res:91:3-30\n  business2.owner is a record label never used to read a value\n  <-- line 91\n    @dead(\"business2.owner\") owner: Js.Nullable.t<person>,\n\n  Warning Dead Type\n  References.res:39:28-33\n  requiresConversion.x is a record label never used to read a value\n  <-- line 39\n  type requiresConversion = {@dead(\"requiresConversion.x\") x: int}\n\n  Warning Dead Type\n  RepeatedLabel.res:2:3-9\n  userData.a is a record label never used to read a value\n  <-- line 2\n    @dead(\"userData.a\") a: bool,\n\n  Warning Dead Type\n  RepeatedLabel.res:3:3-8\n  userData.b is a record label never used to read a value\n  <-- line 3\n    @dead(\"userData.b\") b: int,\n\n  Warning Dead Type\n  RepeatedLabel.res:9:3-11\n  tabState.f is a record label never used to read a value\n  <-- line 9\n    @dead(\"tabState.f\") f: string,\n\n  Warning Dead Value\n  Shadow.res:11:3-22\n  M.test is never used\n  <-- line 11\n    @dead(\"M.test\") let test = () => \"a\"\n\n  Warning Dead Value\n  TestImmutableArray.res:12:1-54\n  testBeltArrayGet is never used\n  <-- line 12\n  @dead(\"testBeltArrayGet\") let testBeltArrayGet = arr => {\n\n  Warning Dead Value\n  TestImmutableArray.res:17:1-58\n  testBeltArraySet is never used\n  <-- line 17\n  @dead(\"testBeltArraySet\") let testBeltArraySet = arr => {\n\n  Warning Dead Value\n  TestImport.res:13:1-43\n  innerStuffContents is never used\n  <-- line 13\n  @dead(\"innerStuffContents\") let innerStuffContents = innerStuffContents\n\n  Warning Dead Type\n  TestImport.res:22:17-28\n  message.text is a record label never used to read a value\n  <-- line 22\n  type message = {@dead(\"message.text\") text: string}\n\n  Warning Dead Value\n  TestImport.res:27:1-15\n  make is never used\n  <-- line 27\n  @dead(\"make\") let make = make\n\n  Warning Dead Type\n  TestPromise.res:6:3-8\n  fromPayload.x is a record label never used to read a value\n  <-- line 6\n    @dead(\"fromPayload.x\") x: int,\n\n  Warning Dead Type\n  TestPromise.res:11:19-32\n  toPayload.result is a record label never used to read a value\n  <-- line 11\n  type toPayload = {@dead(\"toPayload.result\") result: string}\n\n  Warning Dead Module\n  TransitiveType2.res:0:1\n  TransitiveType2 is a dead module as all its items are dead.\n\n  Warning Dead Value\n  TransitiveType2.res:7:1-28\n  convertT2 is never used\n  <-- line 7\n  @dead(\"convertT2\") let convertT2 = (x: t2) => x\n\n  Warning Dead Type\n  TransitiveType3.res:3:3-8\n  t3.i is a record label never used to read a value\n  <-- line 3\n    @dead(\"t3.i\") i: int,\n\n  Warning Dead Type\n  TransitiveType3.res:4:3-11\n  t3.s is a record label never used to read a value\n  <-- line 4\n    @dead(\"t3.s\") s: string,\n\n  Warning Dead Module\n  TypeParams1.res:0:1\n  TypeParams1 is a dead module as all its items are dead.\n\n  Warning Dead Value\n  TypeParams1.res:4:1-24\n  exportSomething is never used\n  <-- line 4\n  @dead(\"exportSomething\") let exportSomething = 10\n\n  Warning Dead Module\n  TypeParams2.res:0:1\n  TypeParams2 is a dead module as all its items are dead.\n\n  Warning Dead Type\n  TypeParams2.res:2:14-20\n  item.id is a record label never used to read a value\n  <-- line 2\n  type item = {@dead(\"item.id\") id: int}\n\n  Warning Dead Value\n  TypeParams2.res:10:1-24\n  exportSomething is never used\n  <-- line 10\n  @dead(\"exportSomething\") let exportSomething = 10\n\n  Warning Dead Type\n  Types.res:12:3-13\n  typeWithVars.A is a variant case which is never constructed\n  <-- line 12\n    | @dead(\"typeWithVars.A\") A('x, 'y)\n\n  Warning Dead Type\n  Types.res:13:5-9\n  typeWithVars.B is a variant case which is never constructed\n  <-- line 13\n    | @dead(\"typeWithVars.B\") B('z)\n\n  Warning Dead Type\n  Types.res:35:27-47\n  mutuallyRecursiveB.a is a record label never used to read a value\n  <-- line 35\n  and mutuallyRecursiveB = {@dead(\"mutuallyRecursiveB.a\") a: mutuallyRecursiveA}\n\n  Warning Dead Type\n  Types.res:56:3-5\n  opaqueVariant.A is a variant case which is never constructed\n  <-- line 56\n    | @dead(\"opaqueVariant.A\") A\n\n  Warning Dead Type\n  Types.res:57:5\n  opaqueVariant.B is a variant case which is never constructed\n  <-- line 57\n    | @dead(\"opaqueVariant.B\") B\n\n  Warning Dead Type\n  Types.res:87:3-8\n  record.i is a record label never used to read a value\n  <-- line 87\n    @dead(\"record.i\") i: int,\n\n  Warning Dead Type\n  Types.res:88:3-11\n  record.s is a record label never used to read a value\n  <-- line 88\n    @dead(\"record.s\") s: string,\n\n  Warning Dead Type\n  Types.res:133:20-26\n  someRecord.id is a record label never used to read a value\n  <-- line 133\n  type someRecord = {@dead(\"someRecord.id\") id: int}\n\n  Warning Dead Module\n  Types.res:161:8-79\n  Types.ObjectId is a dead module as all its items are dead.\n\n  Warning Dead Value\n  Types.res:166:3-11\n  ObjectId.x is never used\n  <-- line 166\n    @dead(\"ObjectId.x\") let x = 1\n\n  Warning Dead Type\n  Unboxed.res:2:11-16\n  v1.A is a variant case which is never constructed\n  <-- line 2\n  type v1 = | @dead(\"v1.A\") A(int)\n\n  Warning Dead Type\n  Unboxed.res:5:11-16\n  v2.A is a variant case which is never constructed\n  <-- line 5\n  type v2 = | @dead(\"v2.A\") A(int)\n\n  Warning Dead Type\n  Unboxed.res:11:12-17\n  r1.x is a record label never used to read a value\n  <-- line 11\n  type r1 = {@dead(\"r1.x\") x: int}\n\n  Warning Dead Type\n  Unboxed.res:14:11-24\n  r2.B is a variant case which is never constructed\n  <-- line 14\n  type r2 = | @dead(\"r2.B\") B({@dead(\"r2.B.g\") g: string})\n\n  Warning Dead Type\n  Unboxed.res:14:14-22\n  r2.B.g is a record label never used to read a value\n  <-- line 14\n  type r2 = | @dead(\"r2.B\") B({@dead(\"r2.B.g\") g: string})\n\n  Warning Dead Type\n  Variants.res:95:14-39\n  type_.Type is a variant case which is never constructed\n  <-- line 95\n  type type_ = | @dead(\"type_.Type\") @genType.as(\"type\") Type\n\n  Warning Dead Type\n  Variants.res:102:3-10\n  result1.Ok is a variant case which is never constructed\n  <-- line 102\n    | @dead(\"result1.Ok\") Ok('a)\n\n  Warning Dead Type\n  Variants.res:103:5-13\n  result1.Error is a variant case which is never constructed\n  <-- line 103\n    | @dead(\"result1.Error\") Error('b)\n\n  Warning Dead Type\n  VariantsWithPayload.res:49:3-5\n  simpleVariant.A is a variant case which is never constructed\n  <-- line 49\n    | @dead(\"simpleVariant.A\") A\n\n  Warning Dead Type\n  VariantsWithPayload.res:50:5\n  simpleVariant.B is a variant case which is never constructed\n  <-- line 50\n    | @dead(\"simpleVariant.B\") B\n\n  Warning Dead Type\n  VariantsWithPayload.res:51:5\n  simpleVariant.C is a variant case which is never constructed\n  <-- line 51\n    | @dead(\"simpleVariant.C\") C\n\n  Warning Dead Type\n  VariantsWithPayload.res:58:3-29\n  variantWithPayloads.A is a variant case which is never constructed\n  <-- line 58\n    | @dead(\"variantWithPayloads.A\") @genType.as(\"ARenamed\") A\n\n  Warning Dead Type\n  VariantsWithPayload.res:59:5-10\n  variantWithPayloads.B is a variant case which is never constructed\n  <-- line 59\n    | @dead(\"variantWithPayloads.B\") B(int)\n\n  Warning Dead Type\n  VariantsWithPayload.res:60:5-15\n  variantWithPayloads.C is a variant case which is never constructed\n  <-- line 60\n    | @dead(\"variantWithPayloads.C\") C(int, int)\n\n  Warning Dead Type\n  VariantsWithPayload.res:61:5-17\n  variantWithPayloads.D is a variant case which is never constructed\n  <-- line 61\n    | @dead(\"variantWithPayloads.D\") D((int, int))\n\n  Warning Dead Type\n  VariantsWithPayload.res:62:5-23\n  variantWithPayloads.E is a variant case which is never constructed\n  <-- line 62\n    | @dead(\"variantWithPayloads.E\") E(int, string, int)\n\n  Warning Dead Type\n  VariantsWithPayload.res:90:20-25\n  variant1Int.R is a variant case which is never constructed\n  <-- line 90\n  type variant1Int = | @dead(\"variant1Int.R\") R(int)\n\n  Warning Dead Type\n  VariantsWithPayload.res:96:23-32\n  variant1Object.R is a variant case which is never constructed\n  <-- line 96\n  type variant1Object = | @dead(\"variant1Object.R\") R(payload)\n  \n  Analysis reported 306 issues (Incorrect Dead Annotation:1, Warning Dead Exception:2, Warning Dead Module:22, Warning Dead Type:86, Warning Dead Value:177, Warning Dead Value With Side Effects:2, Warning Redundant Optional Argument:5, Warning Unused Argument:11)\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/expected/exception.txt",
    "content": "\n\n  Exception Analysis\n  Exn.res:1:5-10\n  raises might raise Not_found (Exn.res:1:19) and is not annotated with @raises(Not_found)\n\n  Exception Analysis\n  Exn.res:19:5-28\n  callsRaiseWithAnnotation might raise Not_found (Exn.res:19:31) and is not annotated with @raises(Not_found)\n\n  Exception Analysis\n  Exn.res:22:5-42\n  callsRaiseWithAnnotationAndIsAnnotated might raise Not_found (Exn.res:22:45) and is not annotated with @raises(Not_found)\n\n  Exception Analysis\n  Exn.res:22:5-42\n  callsRaiseWithAnnotationAndIsAnnotated might raise Not_found (Exn.res:22:45) and is annotated with redundant @raises(A)\n\n  Exception Analysis\n  Exn.res:24:5\n  z might raise Failure (Exn.res:24:8) and is not annotated with @raises(Failure)\n\n  Exception Analysis\n  Exn.res:26:5-19\n  incompleteMatch might raise Match_failure (Exn.res:27:2) and is not annotated with @raises(Match_failure)\n\n  Exception Analysis\n  Exn.res:34:5-13\n  twoRaises might raise [A (Exn.res:36:4), B (Exn.res:39:4)] and is not annotated with @raises([A, B])\n\n  Exception Analysis\n  Exn.res:43:5-14\n  sequencing might raise A (Exn.res:44:2) and is not annotated with @raises(A)\n\n  Exception Analysis\n  Exn.res:50:5-14\n  wrongCatch might raise B (Exn.res:51:6) and is not annotated with @raises(B)\n\n  Exception Analysis\n  Exn.res:56:5-15\n  wrongCatch2 might raise [C (Exn.res:57:24), Match_failure (Exn.res:57:2)] and is not annotated with @raises([C, Match_failure])\n\n  Exception Analysis\n  Exn.res:64:5-19\n  raise2Annotate3 might raise [A (Exn.res:66:4), B (Exn.res:69:4)] and is annotated with redundant @raises(C)\n\n  Exception Analysis\n  Exn.res:75:5-24\n  parse_json_from_file might raise Error (Exn.res:78:4) and is not annotated with @raises(Error)\n\n  Exception Analysis\n  Exn.res:84:5-11\n  reRaise might raise B (Exn.res:86:19) and is not annotated with @raises(B)\n\n  Exception Analysis\n  Exn.res:95:5-22\n  raiseInInternalLet might raise A (Exn.res:96:14) and is not annotated with @raises(A)\n\n  Exception Analysis\n  Exn.res:100:5-16\n  indirectCall might raise Not_found (Exn.res:100:31) and is not annotated with @raises(Not_found)\n\n  Exception Analysis\n  Exn.res:148:5-16\n  severalCases might raise Failure (Exn.res:150:13 Exn.res:151:13 Exn.res:152:15) and is not annotated with @raises(Failure)\n\n  Exception Analysis\n  Exn.res:159:32-56\n  String.uncapitalize_ascii does not raise and is annotated with redundant @doesNotRaise\n\n  Exception Analysis\n  Exn.res:161:32-63\n  String.uncapitalize_ascii does not raise and is annotated with redundant @doesNotRaise\n\n  Exception Analysis\n  Exn.res:163:47-71\n  String.uncapitalize_ascii does not raise and is annotated with redundant @doesNotRaise\n\n  Exception Analysis\n  Exn.res:163:47-79\n  expression does not raise and is annotated with redundant @doesNotRaise\n\n  Exception Analysis\n  Exn.res:169:51-55\n  expression does not raise and is annotated with redundant @doesNotRaise\n\n  Exception Analysis\n  Exn.res:167:25-56\n  String.uncapitalize_ascii does not raise and is annotated with redundant @doesNotRaise\n\n  Exception Analysis\n  Exn.res:176:5-23\n  redundantAnnotation raises nothing and is annotated with redundant @raises(Invalid_argument)\n\n  Exception Analysis\n  Exn.res:178:5-6\n  _x might raise A (Exn.res:178:9) and is not annotated with @raises(A)\n\n  Exception Analysis\n  Exn.res:180:5\n  _ might raise A (Exn.res:180:8) and is not annotated with @raises(A)\n\n  Exception Analysis\n  Exn.res:182:5-6\n  () might raise A (Exn.res:182:9) and is not annotated with @raises(A)\n\n  Exception Analysis\n  Exn.res:184:1-16\n  Toplevel expression might raise Not_found (Exn.res:184:0) and is not annotated with @raises(Not_found)\n\n  Exception Analysis\n  Exn.res:186:1-19\n  Toplevel expression might raise exit (Exn.res:186:7) and is not annotated with @raises(exit)\n\n  Exception Analysis\n  Exn.res:196:45-46\n  expression does not raise and is annotated with redundant @doesNotRaise\n\n  Exception Analysis\n  Exn.res:196:5-21\n  onResultPipeWrong might raise Assert_failure (Exn.res:196:48) and is not annotated with @raises(Assert_failure)\n\n  Exception Analysis\n  ExnA.res:1:5-7\n  bar might raise Not_found (ExnA.res:1:16) and is not annotated with @raises(Not_found)\n  \n  Analysis reported 31 issues (Exception Analysis:31)\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/package.json",
    "content": "{\n  \"name\": \"deadcode\",\n  \"version\": \"0.1.0\",\n  \"private\": true,\n  \"devDependencies\": {\n    \"react\": \"^16.13.1\",\n    \"react-dom\": \"^16.8.6\",\n    \"rescript\": \"^10.1.2\"\n  },\n  \"dependencies\": {\n    \"@glennsl/bs-json\": \"^5.0.4\",\n    \"@rescript/react\": \"^0.10.3\"\n  }\n}\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/AutoAnnotate.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n/* This output is empty. Its source's type definitions, externals and/or unused code got optimized away. */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/AutoAnnotate.res",
    "content": "type variant = R(int)\n\n@genType\ntype record = {variant: variant}\n\ntype r2 = {r2: int}\n\ntype r3 = {r3: int}\n\ntype r4 = {r4: int}\n\n@genType\ntype annotatedVariant =\n  | R2(r2, r3)\n  | R4(r4)\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/BootloaderResource.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n/* This output is empty. Its source's type definitions, externals and/or unused code got optimized away. */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/BootloaderResource.res",
    "content": "/* NOTE: This is a spooky interface that provides no type safety. It should be\n * improved. Use with caution. */\n@module(\"BootloaderResource\")\nexternal read: JSResource.t<'a> => 'a = \"read\"\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/BucklescriptAnnotations.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nfunction bar(x) {\n  var f = x.twoArgs;\n  return f(3, \"a\");\n}\n\nexport {\n  bar ,\n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/BucklescriptAnnotations.res",
    "content": "@genType\ntype someMutableFields = {\n  @set\n  \"mutable0\": string,\n  \"immutable\": int,\n  @set\n  \"mutable1\": string,\n  @set\n  \"mutable2\": string,\n}\n\n@genType\ntype someMethods = {\n  @meth\n  \"send\": string => unit,\n  @meth\n  \"on\": (string, (. int) => unit) => unit,\n  @meth\n  \"threeargs\": (int, string, int) => string,\n  \"twoArgs\": (. int, string) => int,\n}\n\n// let foo = (x: someMethods) => x[\"threeargs\"](3, \"a\", 4)\n\nlet bar = (x: someMethods) => {\n  let f = x[\"twoArgs\"]\n  f(. 3, \"a\")\n}\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/ComponentAsProp.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as React from \"react\";\nimport * as Caml_option from \"rescript/lib/es6/caml_option.js\";\n\nfunction ComponentAsProp(Props) {\n  var title = Props.title;\n  var description = Props.description;\n  var button = Props.button;\n  return React.createElement(\"div\", undefined, React.createElement(\"div\", undefined, title, description, button !== undefined ? Caml_option.valFromOption(button) : null));\n}\n\nvar make = ComponentAsProp;\n\nexport {\n  make ,\n}\n/* react Not a pure module */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/ComponentAsProp.res",
    "content": "@ocaml.doc(\n  \" This is like declaring a normal ReasonReact component's `make` function, except the body is a the interop hook wrapJsForReason \"\n)\n@genType\n@react.component\nlet make = (~title, ~description, ~button=?) => {\n  <div>\n    <div>\n      title\n      description\n      {switch button {\n      | Some(button) => button\n      | None => React.null\n      }}\n    </div>\n  </div>\n}\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/CreateErrorHandler1.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as Curry from \"rescript/lib/es6/curry.js\";\nimport * as ErrorHandler from \"./ErrorHandler.bs.js\";\n\nfunction notification(s) {\n  return [\n          s,\n          s\n        ];\n}\n\nvar Error1 = {\n  notification: notification\n};\n\nvar MyErrorHandler = ErrorHandler.Make(Error1);\n\nCurry._1(MyErrorHandler.notify, \"abc\");\n\nexport {\n  Error1 ,\n  MyErrorHandler ,\n}\n/* MyErrorHandler Not a pure module */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/CreateErrorHandler1.res",
    "content": "module Error1 = {\n  type t = string\n  let notification = s => (s, s)\n}\n\nmodule MyErrorHandler = ErrorHandler.Make(Error1)\n\nMyErrorHandler.notify(\"abc\")\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/CreateErrorHandler2.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as ErrorHandler from \"./ErrorHandler.bs.js\";\n\nfunction notification(n) {\n  return [\n          String(n),\n          \"\"\n        ];\n}\n\nvar Error2 = {\n  notification: notification\n};\n\nvar MyErrorHandler = ErrorHandler.Make(Error2);\n\nexport {\n  Error2 ,\n  MyErrorHandler ,\n}\n/* MyErrorHandler Not a pure module */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/CreateErrorHandler2.res",
    "content": "module Error2 = {\n  type t = int\n  let notification = n => (string_of_int(n), \"\")\n}\n\nmodule MyErrorHandler = ErrorHandler.Make(Error2) /* MyErrorHandler.notify(42) */\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/DeadCodeImplementation.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nvar M = {\n  x: 42\n};\n\nexport {\n  M ,\n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/DeadCodeImplementation.res",
    "content": "module M: DeadCodeInterface.T = {\n  let x = 42\n}\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/DeadCodeInterface.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n/* This output is empty. Its source's type definitions, externals and/or unused code got optimized away. */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/DeadCodeInterface.res",
    "content": "module type T = {\n  let x: int\n}\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/DeadExn.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as Caml_exceptions from \"rescript/lib/es6/caml_exceptions.js\";\n\nvar Etoplevel = /* @__PURE__ */Caml_exceptions.create(\"DeadExn.Etoplevel\");\n\nvar Einside = /* @__PURE__ */Caml_exceptions.create(\"DeadExn.Inside.Einside\");\n\nvar eInside = {\n  RE_EXN_ID: Einside\n};\n\nconsole.log(eInside);\n\nexport {\n  Etoplevel ,\n}\n/*  Not a pure module */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/DeadExn.res",
    "content": "exception Etoplevel\n\nmodule Inside = {\n  exception Einside\n}\n\nexception DeadE\nlet eToplevel = Etoplevel\n\nlet eInside = Inside.Einside\n\nJs.log(eInside)\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/DeadExn.resi",
    "content": "// empty\nexception Etoplevel\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/DeadRT.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nconsole.log(/* Kaboom */0);\n\nexport {\n  \n}\n/*  Not a pure module */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/DeadRT.res",
    "content": "type moduleAccessPath =\n  | Root(string)\n  | Kaboom\n\nlet rec emitModuleAccessPath = moduleAccessPath =>\n  switch moduleAccessPath {\n  | Root(s) => s\n  | Kaboom => \"\"\n  }\n\nlet () = Js.log(Kaboom)\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/DeadRT.resi",
    "content": "type moduleAccessPath =\n  | Root(string)\n  | Kaboom\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/DeadTest.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as React from \"react\";\nimport * as Caml_int64 from \"rescript/lib/es6/caml_int64.js\";\nimport * as JSResource from \"JSResource\";\nimport * as DeadValueTest from \"./DeadValueTest.bs.js\";\nimport * as ImmutableArray from \"./ImmutableArray.bs.js\";\nimport * as BootloaderResource from \"BootloaderResource\";\nimport * as DynamicallyLoadedComponent from \"./DynamicallyLoadedComponent.bs.js\";\n\nconsole.log(ImmutableArray.fromArray);\n\nvar Inner = {\n  thisIsAlsoMarkedDead: 99\n};\n\nvar M = {\n  thisSignatureItemIsDead: 34\n};\n\nvar VariantUsedOnlyInImplementation = {\n  a: /* A */0\n};\n\nvar UnderscoreInside = {};\n\nvar MM = {\n  x: 55,\n  y: 55\n};\n\nconsole.log(55);\n\nconsole.log(DeadValueTest.valueAlive);\n\nfunction unusedRec(_param) {\n  while(true) {\n    _param = undefined;\n    continue ;\n  };\n}\n\nfunction split_map(l) {\n  split_map(l);\n  return /* [] */0;\n}\n\nfunction rec1(_param) {\n  while(true) {\n    _param = undefined;\n    continue ;\n  };\n}\n\nfunction rec2(_param) {\n  while(true) {\n    _param = undefined;\n    continue ;\n  };\n}\n\nfunction recWithCallback(_param) {\n  while(true) {\n    _param = undefined;\n    continue ;\n  };\n}\n\nfunction foo(_param) {\n  while(true) {\n    _param = undefined;\n    continue ;\n  };\n}\n\nfunction bar(param) {\n  return foo(undefined);\n}\n\nfunction withDefaultValue(paramWithDefaultOpt, y) {\n  var paramWithDefault = paramWithDefaultOpt !== undefined ? paramWithDefaultOpt : 3;\n  return paramWithDefault + y | 0;\n}\n\nvar Ext_buffer = {};\n\nconsole.log(/* Root */{\n      _0: \"xzz\"\n    });\n\nvar reasonResource = JSResource(\"DynamicallyLoadedComponent.bs\");\n\nfunction makeProps(prim0, prim1, prim2) {\n  var tmp = {\n    s: prim0\n  };\n  if (prim1 !== undefined) {\n    tmp.key = prim1;\n  }\n  return tmp;\n}\n\nfunction make(props) {\n  return React.createElement(BootloaderResource.read(reasonResource).make, props);\n}\n\nvar LazyDynamicallyLoadedComponent2 = {\n  reasonResource: reasonResource,\n  makeProps: makeProps,\n  make: make\n};\n\nvar Chat = {};\n\nconsole.log(React.createElement(DynamicallyLoadedComponent.make, {\n          s: \"\"\n        }));\n\nvar second = Caml_int64.one;\n\nvar minute = Caml_int64.mul([\n      0,\n      60\n    ], second);\n\nvar deadRef = {\n  contents: 12\n};\n\nfunction DeadTest(Props) {\n  return Props.s;\n}\n\nconsole.log(DeadTest);\n\nconsole.log(123);\n\nvar stringLengthNoSideEffects = \"sdkdl\".length;\n\nvar GloobLive = {\n  globallyLive1: 1,\n  globallyLive2: 2,\n  globallyLive3: 3\n};\n\nvar WithInclude = {};\n\nconsole.log(/* A */0);\n\nfunction funWithInnerVars(param) {\n  return 70;\n}\n\nvar fortytwo = 42;\n\nvar fortyTwoButExported = 42;\n\nvar thisIsUsedOnce = 34;\n\nvar thisIsUsedTwice = 34;\n\nvar thisIsMarkedDead = 99;\n\nvar thisIsKeptAlive = 42;\n\nvar thisIsMarkedLive = 42;\n\nvar zzz;\n\nvar make$1 = DeadTest;\n\nvar theSideEffectIsLogging;\n\nvar deadIncorrect = 34;\n\nvar ira = 10;\n\nexport {\n  fortytwo ,\n  fortyTwoButExported ,\n  thisIsUsedOnce ,\n  thisIsUsedTwice ,\n  thisIsMarkedDead ,\n  thisIsKeptAlive ,\n  thisIsMarkedLive ,\n  Inner ,\n  M ,\n  VariantUsedOnlyInImplementation ,\n  UnderscoreInside ,\n  MM ,\n  unusedRec ,\n  split_map ,\n  rec1 ,\n  rec2 ,\n  recWithCallback ,\n  foo ,\n  bar ,\n  withDefaultValue ,\n  Ext_buffer ,\n  LazyDynamicallyLoadedComponent2 ,\n  Chat ,\n  zzz ,\n  second ,\n  minute ,\n  deadRef ,\n  make$1 as make,\n  theSideEffectIsLogging ,\n  stringLengthNoSideEffects ,\n  GloobLive ,\n  WithInclude ,\n  funWithInnerVars ,\n  deadIncorrect ,\n  ira ,\n}\n/*  Not a pure module */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/DeadTest.res",
    "content": "let _ = Js.log(ImmutableArray.fromArray)\nlet fortytwo = 42\n\n@genType\nlet fortyTwoButExported = 42\n\nlet thisIsUsedOnce = 34\nignore(thisIsUsedOnce)\n\nlet thisIsUsedTwice = 34\nignore(thisIsUsedTwice)\nignore(thisIsUsedTwice)\n\n@dead\nlet thisIsMarkedDead = 99\n\nlet thisIsKeptAlive = 42\n\n@live\nlet thisIsMarkedLive = thisIsKeptAlive\n\nmodule Inner = {\n  @dead\n  let thisIsAlsoMarkedDead = 99\n}\n\nmodule M: {\n  @dead\n  let thisSignatureItemIsDead: int\n} = {\n  let thisSignatureItemIsDead = 34\n}\n\nmodule VariantUsedOnlyInImplementation: {\n  type t = A // TODO: discovered this automatically\n  let a: t\n} = {\n  type t = A\n  let a = A\n}\n\nlet _ = (x => x)(VariantUsedOnlyInImplementation.a)\n\nlet _ = DeadTypeTest.OnlyInInterface\nlet _ = DeadTypeTest.InBoth\n\ntype record = {\n  xxx: int,\n  yyy: int,\n}\n\nlet _ = r => r.xxx\nlet _ = ({yyy}) => yyy\n\nmodule UnderscoreInside = {\n  let _ = 13\n}\n\nmodule MM: {\n  let x: int\n  let y: int\n} = {\n  let y = 55\n  let x = y\n  let valueOnlyInImplementation = 7\n}\n\nlet _ = {\n  Js.log(MM.x)\n  44\n}\n\nlet () = Js.log(DeadValueTest.valueAlive)\n\nlet rec unusedRec = () => unusedRec()\n\nlet rec split_map = l => {\n  let _ = split_map(l)\n  list{}\n}\n\nlet rec rec1 = () => rec2()\nand rec2 = () => rec1()\n\nlet rec recWithCallback = () => {\n  let cb = () => recWithCallback()\n  cb()\n}\n\nlet rec foo = () => {\n  let cb = () => bar()\n  cb()\n}\nand bar = () => foo()\n\nlet withDefaultValue = (~paramWithDefault=3, y) => paramWithDefault + y\n\nexternal unsafe_string1: (bytes, int, int) => Digest.t = \"caml_md5_string\"\n\nmodule Ext_buffer: {\n  external unsafe_string2: (bytes, int, int) => Digest.t = \"caml_md5_string\"\n} = {\n  external unsafe_string2: (bytes, int, int) => Digest.t = \"caml_md5_string\"\n}\n\nlet () = Js.log(DeadRT.Root(\"xzz\"))\n\nmodule type LocalDynamicallyLoadedComponent2 = module type of DynamicallyLoadedComponent\n\nmodule LazyDynamicallyLoadedComponent2 = {\n  let reasonResource: JSResource.t<\n    module(LocalDynamicallyLoadedComponent2),\n  > = JSResource.jSResource(\"DynamicallyLoadedComponent.bs\")\n  let makeProps = DynamicallyLoadedComponent.makeProps\n  let make = props =>\n    React.createElement(\n      {\n        module Comp = unpack(BootloaderResource.read(reasonResource))\n        Comp.make\n      },\n      props,\n    )\n}\n\nmodule Chat = {}\n\nlet zzz = {\n  let a1 = 1\n  let a2 = 2\n  let a3 = 3\n}\n\nlet () = Js.log(<DynamicallyLoadedComponent s=\"\" />)\n\nlet second = 1L\nlet minute = Int64.mul(60L, second)\n\nlet deadRef = ref(12)\n\n@react.component\nlet make = (~s) => React.string(s)\n\nlet () = Js.log(make)\n\nlet theSideEffectIsLogging = Js.log(123)\n\nlet stringLengthNoSideEffects = String.length(\"sdkdl\")\n\n// Trace.infok(\"\", \"\", ({pf}) => pf(\"%s\", \"\"))\n\nmodule GloobLive = {\n  let globallyLive1 = 1\n  let globallyLive2 = 2\n  let globallyLive3 = 3\n}\n\nmodule WithInclude: {\n  type t = A\n} = {\n  module T = {\n    type t = A\n  }\n  include T\n}\n\nJs.log(WithInclude.A)\n\n@dead\nlet funWithInnerVars = () => {\n  let x = 34\n  let y = 36\n  x + y\n}\n\ntype rc = {a: int}\n\n@dead\nlet deadIncorrect = 34\n\nlet _ = deadIncorrect\n\ntype inlineRecord = IR({a: int, b: int, c: string, @dead d: int, @live e: int})\n\nlet ira = 10\nlet _ = ir =>\n  switch ir {\n  | IR({c} as r) => IR({a: ira, b: r.b, c, d: 0, e: 0})\n  }\n\n@dead\ntype inlineRecord2 = IR2({a: int, b: int})\n\ntype inlineRecord3 = | @dead IR3({a: int, b: int})\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/DeadTestBlacklist.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nvar x = 34;\n\nexport {\n  x ,\n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/DeadTestBlacklist.res",
    "content": "let x = 34\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/DeadTestWithInterface.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n/* This output is empty. Its source's type definitions, externals and/or unused code got optimized away. */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/DeadTestWithInterface.res",
    "content": "module Ext_buffer: {\n  let x: int\n} = {\n  let x = 42\n}\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/DeadTestWithInterface.resi",
    "content": "\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/DeadTypeTest.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nvar a = /* A */0;\n\nexport {\n  a ,\n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/DeadTypeTest.res",
    "content": "type t =\n  | A\n  | B\nlet a = A\n\ntype deadType =\n  | OnlyInImplementation\n  | OnlyInInterface\n  | InBoth\n  | InNeither\n\nlet _ = OnlyInImplementation\nlet _ = InBoth\n\n@live\ntype record = {x: int, y: string, z: float}\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/DeadTypeTest.resi",
    "content": "type t =\n  | A\n  | B\nlet a: t\n\ntype deadType =\n  | OnlyInImplementation\n  | OnlyInInterface\n  | InBoth\n  | InNeither\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/DeadValueTest.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nvar valueAlive = 1;\n\nvar valueDead = 2;\n\nexport {\n  valueAlive ,\n  valueDead ,\n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/DeadValueTest.res",
    "content": "let valueAlive = 1\nlet valueDead = 2\n\nlet valueOnlyInImplementation = 3\n\nlet rec subList = (b, e, l) =>\n  switch l {\n  | list{} => failwith(\"subList\")\n  | list{h, ...t} =>\n    let tail = if e == 0 {\n      list{}\n    } else {\n      subList(b - 1, e - 1, t)\n    }\n    if b > 0 {\n      tail\n    } else {\n      list{h, ...tail}\n    }\n  }\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/DeadValueTest.resi",
    "content": "let valueAlive: int\nlet valueDead: int\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/Docstrings.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nfunction signMessage(message, key) {\n  return message + String(key);\n}\n\nfunction one(a) {\n  return a + 0 | 0;\n}\n\nfunction two(a, b) {\n  return (a + b | 0) + 0 | 0;\n}\n\nfunction tree(a, b, c) {\n  return ((a + b | 0) + c | 0) + 0 | 0;\n}\n\nfunction oneU(a) {\n  return a + 0 | 0;\n}\n\nfunction twoU(a, b) {\n  return (a + b | 0) + 0 | 0;\n}\n\nfunction treeU(a, b, c) {\n  return ((a + b | 0) + c | 0) + 0 | 0;\n}\n\nfunction useParam(param) {\n  return param + 34 | 0;\n}\n\nfunction useParamU(param) {\n  return param + 34 | 0;\n}\n\nfunction unnamed1(param) {\n  return 34;\n}\n\nfunction unnamed1U(param) {\n  return 34;\n}\n\nfunction unnamed2(param, param$1) {\n  return 34;\n}\n\nfunction unnamed2U(param, param$1) {\n  return 34;\n}\n\nfunction grouped(x, y, a, b, c, z) {\n  return ((((x + y | 0) + a | 0) + b | 0) + c | 0) + z | 0;\n}\n\nfunction unitArgWithoutConversion(param) {\n  return \"abc\";\n}\n\nfunction unitArgWithoutConversionU() {\n  return \"abc\";\n}\n\nfunction unitArgWithConversion(param) {\n  return /* A */0;\n}\n\nfunction unitArgWithConversionU() {\n  return /* A */0;\n}\n\nvar flat = 34;\n\nexport {\n  flat ,\n  signMessage ,\n  one ,\n  two ,\n  tree ,\n  oneU ,\n  twoU ,\n  treeU ,\n  useParam ,\n  useParamU ,\n  unnamed1 ,\n  unnamed1U ,\n  unnamed2 ,\n  unnamed2U ,\n  grouped ,\n  unitArgWithoutConversion ,\n  unitArgWithoutConversionU ,\n  unitArgWithConversion ,\n  unitArgWithConversionU ,\n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/Docstrings.res",
    "content": "@ocaml.doc(\" hello \") @genType\nlet flat = 34\n\n@ocaml.doc(\"\n  * Sign a message with a key.\n  *\n  * @param message - A message to be signed\n  * @param key - The key with which to sign the message\n  * @returns A signed message\n \")\n@genType\nlet signMessage = (. message, key) => message ++ string_of_int(key)\n\n@genType\nlet one = a => a + 0\n\n@genType\nlet two = (a, b) => a + b + 0\n\n@genType\nlet tree = (a, b, c) => a + b + c + 0\n\n@genType\nlet oneU = (. a) => a + 0\n\n@genType\nlet twoU = (. a, b) => a + b + 0\n\n@genType\nlet treeU = (. a, b, c) => a + b + c + 0\n\n@genType\nlet useParam = param => param + 34\n\n@genType\nlet useParamU = (. param) => param + 34\n\n@genType\nlet unnamed1 = (_: int) => 34\n\n@genType\nlet unnamed1U = (. _: int) => 34\n\n@genType\nlet unnamed2 = (_: int, _: int) => 34\n\n@genType\nlet unnamed2U = (. _: int, _: int) => 34\n\n@genType\nlet grouped = (~x, ~y, a, b, c, ~z) => x + y + a + b + c + z\n\n@genType\nlet unitArgWithoutConversion = () => \"abc\"\n\n@genType\nlet unitArgWithoutConversionU = (. ()) => \"abc\"\n\ntype t =\n  | A\n  | B\n\n@genType\nlet unitArgWithConversion = () => A\n\n@genType\nlet unitArgWithConversionU = (. ()) => A\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/DynamicallyLoadedComponent.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nfunction DynamicallyLoadedComponent(Props) {\n  return Props.s;\n}\n\nvar make = DynamicallyLoadedComponent;\n\nexport {\n  make ,\n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/DynamicallyLoadedComponent.res",
    "content": "@react.component\nlet make = (~s) => React.string(s)\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/EmptyArray.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as React from \"react\";\n\nfunction EmptyArray$Z(Props) {\n  return React.createElement(\"br\", undefined);\n}\n\nvar Z = {\n  make: EmptyArray$Z\n};\n\nReact.createElement(EmptyArray$Z, {});\n\nexport {\n  Z ,\n}\n/*  Not a pure module */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/EmptyArray.res",
    "content": "// @@config({flags : [\"-dsource\"]});\n\nmodule Z = {\n  @react.component\n  let make = () => {\n    <br />\n  }\n}\n\nlet _ = <Z />\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/ErrorHandler.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as Curry from \"rescript/lib/es6/curry.js\";\n\nfunction Make($$Error) {\n  var notify = function (x) {\n    return Curry._1($$Error.notification, x);\n  };\n  return {\n          notify: notify\n        };\n}\n\nvar x = 42;\n\nexport {\n  Make ,\n  x ,\n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/ErrorHandler.res",
    "content": "module type Error = {\n  type t\n  let notification: t => (string, string)\n}\n\nmodule Make = (Error: Error) => {\n  let notify = x => Error.notification(x)\n}\n\n// This is ignored as there's an interface file\n@genType\nlet x = 42\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/ErrorHandler.resi",
    "content": "module type Error = {\n  type t\n  let notification: t => (string, string)\n}\nmodule Make: (Error: Error) =>\n{\n  let notify: Error.t => (string, string)\n}\n\nlet x: int\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/EverythingLiveHere.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nvar x = 1;\n\nvar y = 3;\n\nvar z = 4;\n\nexport {\n  x ,\n  y ,\n  z ,\n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/EverythingLiveHere.res",
    "content": "let x = 1\n\nlet y = 3\n\nlet z = 4\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/FC.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nfunction foo(impl) {\n  return impl.make;\n}\n\nconsole.log(foo);\n\nexport {\n  foo ,\n}\n/*  Not a pure module */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/FC.res",
    "content": "module type ReplacebleComponent = {\n  @react.component\n  let make: unit => React.element\n}\n\nlet foo = (~impl: module(ReplacebleComponent)) => {\n  let module(X) = impl\n  X.make\n}\n\nJs.log(foo)\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/FirstClassModules.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nvar y = \"abc\";\n\nvar EmptyInnerModule = {};\n\nvar InnerModule2 = {\n  k: 4242\n};\n\nfunction k3(x) {\n  return x + 1 | 0;\n}\n\nvar InnerModule3 = {\n  k3: k3\n};\n\nvar Z = {\n  u: [\n    0,\n    0\n  ]\n};\n\nvar M = {\n  y: y,\n  EmptyInnerModule: EmptyInnerModule,\n  InnerModule2: InnerModule2,\n  InnerModule3: InnerModule3,\n  Z: Z,\n  x: 42\n};\n\nvar firstClassModule = {\n  x: 42,\n  EmptyInnerModule: EmptyInnerModule,\n  InnerModule2: InnerModule2,\n  InnerModule3: InnerModule3,\n  Z: Z,\n  y: y\n};\n\nfunction testConvert(m) {\n  return m;\n}\n\nfunction SomeFunctor(X) {\n  return {\n          ww: X.y\n        };\n}\n\nfunction someFunctorAsFunction(x) {\n  return {\n          ww: x.y\n        };\n}\n\nexport {\n  M ,\n  firstClassModule ,\n  testConvert ,\n  SomeFunctor ,\n  someFunctorAsFunction ,\n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/FirstClassModules.res",
    "content": "module type MT = {\n  let x: int\n  type t = int\n  @module(\"foo\") external f: int => int = \"f\"\n  module type MT2 = {\n    type tt = string\n  }\n  module EmptyInnerModule: {}\n  module InnerModule2: {\n    let k: t\n  }\n  module InnerModule3: {\n    type inner = int\n    let k3: inner => inner\n  }\n  module type TT = {\n    let u: (int, int)\n  }\n  module Z: TT\n  let y: string\n}\nmodule M = {\n  let y = \"abc\"\n  module type MT2 = {\n    type tt = string\n  }\n  module EmptyInnerModule = {}\n  module InnerModule2 = {\n    let k = 4242\n  }\n  module InnerModule3 = {\n    type inner = int\n    let k3 = x => x + 1\n  }\n\n  module type TT = {\n    let u: (int, int)\n  }\n  module Z = {\n    let u = (0, 0)\n  }\n  type t = int\n  @module(\"foo\") external f: int => int = \"f\"\n  let x = 42\n}\n\n@genType\ntype firstClassModule = module(MT)\n\n@genType\nlet firstClassModule: firstClassModule = module(M)\n\n@genType\nlet testConvert = (m: module(MT)) => m\n\nmodule type ResT = {\n  let ww: string\n}\n\nmodule SomeFunctor = (X: MT): ResT => {\n  let ww = X.y\n}\n\n@genType\nlet someFunctorAsFunction = (x: module(MT)): module(ResT) => module(SomeFunctor(unpack(x)))\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/FirstClassModulesInterface.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nvar r = {\n  x: 3,\n  y: \"hello\"\n};\n\nexport {\n  r ,\n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/FirstClassModulesInterface.res",
    "content": "type record = {\n  x: int,\n  y: string,\n}\n\nlet r = {x: 3, y: \"hello\"}\n\nmodule type MT = {\n  let x: int\n}\n\ntype firstClassModule = module(MT)\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/FirstClassModulesInterface.resi",
    "content": "@genType\ntype record = {\n  x: int,\n  y: string,\n}\n\nlet r: record\n\n@genType\nmodule type MT = {\n  let x: int\n}\n\n@genType\ntype firstClassModule = module(MT)\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/Hooks.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as Curry from \"rescript/lib/es6/curry.js\";\nimport * as React from \"react\";\nimport * as ImportHooks from \"./ImportHooks.bs.js\";\nimport * as ImportHookDefault from \"./ImportHookDefault.bs.js\";\n\nfunction Hooks(Props) {\n  var vehicle = Props.vehicle;\n  var match = React.useState(function () {\n        return 0;\n      });\n  var setCount = match[1];\n  var count = match[0];\n  return React.createElement(\"div\", undefined, React.createElement(\"p\", undefined, \"Hooks example \" + (vehicle.name + (\" clicked \" + (String(count) + \" times\")))), React.createElement(\"button\", {\n                  onClick: (function (param) {\n                      Curry._1(setCount, (function (param) {\n                              return count + 1 | 0;\n                            }));\n                    })\n                }, \"Click me\"), React.createElement(ImportHooks.make, {\n                  person: {\n                    name: \"Mary\",\n                    age: 71\n                  },\n                  children: null,\n                  renderMe: (function (x) {\n                      return x.randomString;\n                    })\n                }, \"child1\", \"child2\"), React.createElement(ImportHookDefault.make, {\n                  person: {\n                    name: \"DefaultImport\",\n                    age: 42\n                  },\n                  children: null,\n                  renderMe: (function (x) {\n                      return x.randomString;\n                    })\n                }, \"child1\", \"child2\"));\n}\n\nfunction Hooks$anotherComponent(Props) {\n  var vehicle = Props.vehicle;\n  var callback = Props.callback;\n  Curry._1(callback, undefined);\n  return React.createElement(\"div\", undefined, \"Another Hook \" + vehicle.name);\n}\n\nfunction Hooks$Inner(Props) {\n  var vehicle = Props.vehicle;\n  return React.createElement(\"div\", undefined, \"Another Hook \" + vehicle.name);\n}\n\nfunction Hooks$Inner$anotherComponent(Props) {\n  var vehicle = Props.vehicle;\n  return React.createElement(\"div\", undefined, \"Another Hook \" + vehicle.name);\n}\n\nfunction Hooks$Inner$Inner2(Props) {\n  var vehicle = Props.vehicle;\n  return React.createElement(\"div\", undefined, \"Another Hook \" + vehicle.name);\n}\n\nfunction Hooks$Inner$Inner2$anotherComponent(Props) {\n  var vehicle = Props.vehicle;\n  return React.createElement(\"div\", undefined, \"Another Hook \" + vehicle.name);\n}\n\nvar Inner2 = {\n  make: Hooks$Inner$Inner2,\n  anotherComponent: Hooks$Inner$Inner2$anotherComponent\n};\n\nvar Inner = {\n  make: Hooks$Inner,\n  anotherComponent: Hooks$Inner$anotherComponent,\n  Inner2: Inner2\n};\n\nfunction Hooks$NoProps(Props) {\n  return React.createElement(\"div\", undefined, null);\n}\n\nvar NoProps = {\n  make: Hooks$NoProps\n};\n\nfunction functionWithRenamedArgs(_to, _Type, cb) {\n  Curry._1(cb, _to);\n  return _to.name + _Type.name;\n}\n\nfunction Hooks$componentWithRenamedArgs(Props) {\n  var _to = Props.to;\n  var _Type = Props.Type;\n  var cb = Props.cb;\n  Curry._1(cb, _to);\n  return _to.name + _Type.name;\n}\n\nfunction Hooks$makeWithRef(Props) {\n  var vehicle = Props.vehicle;\n  return function (ref) {\n    if (ref == null) {\n      return null;\n    } else {\n      return React.createElement(\"button\", {\n                  ref: ref\n                }, vehicle.name);\n    }\n  };\n}\n\nvar testForwardRef = React.forwardRef(function (param, param$1) {\n      return Hooks$makeWithRef(param)(param$1);\n    });\n\nvar input = React.forwardRef(function (Props, param) {\n      var partial_arg = Props.r;\n      return React.createElement(\"div\", {\n                  ref: param\n                }, partial_arg.x);\n    });\n\nfunction Hooks$polymorphicComponent(Props) {\n  var param = Props.p;\n  return param[0].name;\n}\n\nfunction Hooks$functionReturningReactElement(Props) {\n  return Props.name;\n}\n\nfunction Hooks$RenderPropRequiresConversion(Props) {\n  var renderVehicle = Props.renderVehicle;\n  return Curry._1(renderVehicle, {\n              vehicle: {\n                name: \"Car\"\n              },\n              number: 42\n            });\n}\n\nvar RenderPropRequiresConversion = {\n  make: Hooks$RenderPropRequiresConversion\n};\n\nfunction Hooks$aComponentWithChildren(Props) {\n  var vehicle = Props.vehicle;\n  var children = Props.children;\n  return React.createElement(\"div\", undefined, \"Another Hook \" + vehicle.name, React.createElement(\"div\", undefined, children));\n}\n\nvar make = Hooks;\n\nvar $$default = Hooks;\n\nvar anotherComponent = Hooks$anotherComponent;\n\nvar componentWithRenamedArgs = Hooks$componentWithRenamedArgs;\n\nvar makeWithRef = Hooks$makeWithRef;\n\nvar polymorphicComponent = Hooks$polymorphicComponent;\n\nvar functionReturningReactElement = Hooks$functionReturningReactElement;\n\nvar aComponentWithChildren = Hooks$aComponentWithChildren;\n\nexport {\n  make ,\n  $$default ,\n  $$default as default,\n  anotherComponent ,\n  Inner ,\n  NoProps ,\n  functionWithRenamedArgs ,\n  componentWithRenamedArgs ,\n  makeWithRef ,\n  testForwardRef ,\n  input ,\n  polymorphicComponent ,\n  functionReturningReactElement ,\n  RenderPropRequiresConversion ,\n  aComponentWithChildren ,\n}\n/* testForwardRef Not a pure module */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/Hooks.res",
    "content": "type vehicle = {name: string}\n\n@react.component\nlet make = (~vehicle) => {\n  let (count, setCount) = React.useState(() => 0)\n\n  <div>\n    <p>\n      {React.string(\n        \"Hooks example \" ++ (vehicle.name ++ (\" clicked \" ++ (string_of_int(count) ++ \" times\"))),\n      )}\n    </p>\n    <button onClick={_ => setCount(_ => count + 1)}> {React.string(\"Click me\")} </button>\n    <ImportHooks person={name: \"Mary\", age: 71} renderMe={x => React.string(x[\"randomString\"])}>\n      {React.string(\"child1\")} {React.string(\"child2\")}\n    </ImportHooks>\n    <ImportHookDefault\n      person={name: \"DefaultImport\", age: 42} renderMe={x => React.string(x[\"randomString\"])}>\n      {React.string(\"child1\")} {React.string(\"child2\")}\n    </ImportHookDefault>\n  </div>\n}\n\n@genType\nlet default = make\n\n@genType @react.component\nlet anotherComponent = (~vehicle, ~callback: unit => unit) => {\n  callback()\n  <div> {React.string(\"Another Hook \" ++ vehicle.name)} </div>\n}\n\nmodule Inner = {\n  @genType @react.component\n  let make = (~vehicle) => <div> {React.string(\"Another Hook \" ++ vehicle.name)} </div>\n\n  @genType @react.component\n  let anotherComponent = (~vehicle) => <div> {React.string(\"Another Hook \" ++ vehicle.name)} </div>\n\n  module Inner2 = {\n    @genType @react.component\n    let make = (~vehicle) => <div> {React.string(\"Another Hook \" ++ vehicle.name)} </div>\n\n    @genType @react.component\n    let anotherComponent = (~vehicle) =>\n      <div> {React.string(\"Another Hook \" ++ vehicle.name)} </div>\n  }\n}\n\nmodule NoProps = {\n  @genType @react.component\n  let make = () => <div> React.null </div>\n}\n\ntype cb = (~_to: vehicle) => unit\n\n@genType\nlet functionWithRenamedArgs = (~_to, ~_Type, ~cb: cb) => {\n  cb(~_to)\n  _to.name ++ _Type.name\n}\n\n@genType @react.component\nlet componentWithRenamedArgs = (~_to, ~_Type, ~cb: cb) => {\n  cb(~_to)\n  React.string(_to.name ++ _Type.name)\n}\n\n@genType @react.component\nlet makeWithRef = (~vehicle) => {\n  let _ = 34\n  ref =>\n    switch ref->Js.Nullable.toOption {\n    | Some(ref) => <button ref={ReactDOM.Ref.domRef(ref)}> {React.string(vehicle.name)} </button>\n    | None => React.null\n    }\n}\n\n@genType\nlet testForwardRef = React.forwardRef(makeWithRef)\n\ntype r = {x: string}\n\n@genType @react.component\nlet input = React.forwardRef((~r, (), ref) => <div ref={Obj.magic(ref)}> {React.string(r.x)} </div>)\n\n@genType\ntype callback<'input, 'output> = React.callback<'input, 'output>\n\n@genType\ntype testReactContext = React.Context.t<int>\n\n@genType\ntype testReactRef = React.Ref.t<int>\n\n@genType\ntype testDomRef = ReactDOM.domRef\n\n@genType @react.component\nlet polymorphicComponent = (~p as (x, _)) => React.string(x.name)\n\n@genType @react.component\nlet functionReturningReactElement = (~name) => React.string(name)\n\nmodule RenderPropRequiresConversion = {\n  @genType @react.component\n  let make = (~renderVehicle: {\"vehicle\": vehicle, \"number\": int} => React.element) => {\n    let car = {name: \"Car\"}\n    renderVehicle({\"vehicle\": car, \"number\": 42})\n  }\n}\n\n@genType @react.component\nlet aComponentWithChildren = (~vehicle, ~children) =>\n  <div> {React.string(\"Another Hook \" ++ vehicle.name)} <div> children </div> </div>\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/IgnoreInterface.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n/* This output is empty. Its source's type definitions, externals and/or unused code got optimized away. */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/IgnoreInterface.res",
    "content": "@gentype\ntype t = int\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/IgnoreInterface.resi",
    "content": "// Use the annotations, and definitions, from the .re file\n@@genType.ignoreInterface\n\n@genType\ntype t\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/ImmutableArray.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as Belt_Array from \"rescript/lib/es6/belt_Array.js\";\n\nfunction fromArray(a) {\n  return a.slice(0);\n}\n\nfunction toArray(a) {\n  return a.slice(0);\n}\n\nfunction length(a) {\n  return a.length;\n}\n\nfunction size(a) {\n  return a.length;\n}\n\nvar get = Belt_Array.get;\n\nvar getExn = Belt_Array.getExn;\n\nfunction getUnsafe(a, x) {\n  return a[x];\n}\n\nfunction getUndefined(a, x) {\n  return a[x];\n}\n\nvar shuffle = Belt_Array.shuffle;\n\nvar reverse = Belt_Array.reverse;\n\nfunction makeUninitialized(x) {\n  return new Array(x);\n}\n\nfunction makeUninitializedUnsafe(x) {\n  return new Array(x);\n}\n\nvar make = Belt_Array.make;\n\nvar range = Belt_Array.range;\n\nvar rangeBy = Belt_Array.rangeBy;\n\nvar makeByU = Belt_Array.makeByU;\n\nvar makeBy = Belt_Array.makeBy;\n\nvar makeByAndShuffleU = Belt_Array.makeByAndShuffleU;\n\nvar makeByAndShuffle = Belt_Array.makeByAndShuffle;\n\nvar zip = Belt_Array.zip;\n\nvar zipByU = Belt_Array.zipByU;\n\nvar zipBy = Belt_Array.zipBy;\n\nvar unzip = Belt_Array.unzip;\n\nvar concat = Belt_Array.concat;\n\nvar concatMany = Belt_Array.concatMany;\n\nvar slice = Belt_Array.slice;\n\nvar sliceToEnd = Belt_Array.sliceToEnd;\n\nfunction copy(a) {\n  return a.slice(0);\n}\n\nvar forEachU = Belt_Array.forEachU;\n\nvar forEach = Belt_Array.forEach;\n\nvar mapU = Belt_Array.mapU;\n\nvar map = Belt_Array.map;\n\nvar keepWithIndexU = Belt_Array.keepWithIndexU;\n\nvar keepWithIndex = Belt_Array.keepWithIndex;\n\nvar keepMapU = Belt_Array.keepMapU;\n\nvar keepMap = Belt_Array.keepMap;\n\nvar forEachWithIndexU = Belt_Array.forEachWithIndexU;\n\nvar forEachWithIndex = Belt_Array.forEachWithIndex;\n\nvar mapWithIndexU = Belt_Array.mapWithIndexU;\n\nvar mapWithIndex = Belt_Array.mapWithIndex;\n\nvar partitionU = Belt_Array.partitionU;\n\nvar partition = Belt_Array.partition;\n\nvar reduceU = Belt_Array.reduceU;\n\nvar reduce = Belt_Array.reduce;\n\nvar reduceReverseU = Belt_Array.reduceReverseU;\n\nvar reduceReverse = Belt_Array.reduceReverse;\n\nvar reduceReverse2U = Belt_Array.reduceReverse2U;\n\nvar reduceReverse2 = Belt_Array.reduceReverse2;\n\nvar someU = Belt_Array.someU;\n\nvar some = Belt_Array.some;\n\nvar everyU = Belt_Array.everyU;\n\nvar every = Belt_Array.every;\n\nvar every2U = Belt_Array.every2U;\n\nvar every2 = Belt_Array.every2;\n\nvar some2U = Belt_Array.some2U;\n\nvar some2 = Belt_Array.some2;\n\nvar cmpU = Belt_Array.cmpU;\n\nvar cmp = Belt_Array.cmp;\n\nvar eqU = Belt_Array.eqU;\n\nvar eq = Belt_Array.eq;\n\nvar $$Array$1 = {\n  get: get\n};\n\nexport {\n  $$Array$1 as $$Array,\n  fromArray ,\n  toArray ,\n  length ,\n  size ,\n  get ,\n  getExn ,\n  getUnsafe ,\n  getUndefined ,\n  shuffle ,\n  reverse ,\n  makeUninitialized ,\n  makeUninitializedUnsafe ,\n  make ,\n  range ,\n  rangeBy ,\n  makeByU ,\n  makeBy ,\n  makeByAndShuffleU ,\n  makeByAndShuffle ,\n  zip ,\n  zipByU ,\n  zipBy ,\n  unzip ,\n  concat ,\n  concatMany ,\n  slice ,\n  sliceToEnd ,\n  copy ,\n  forEachU ,\n  forEach ,\n  mapU ,\n  map ,\n  keepWithIndexU ,\n  keepWithIndex ,\n  keepMapU ,\n  keepMap ,\n  forEachWithIndexU ,\n  forEachWithIndex ,\n  mapWithIndexU ,\n  mapWithIndex ,\n  partitionU ,\n  partition ,\n  reduceU ,\n  reduce ,\n  reduceReverseU ,\n  reduceReverse ,\n  reduceReverse2U ,\n  reduceReverse2 ,\n  someU ,\n  some ,\n  everyU ,\n  every ,\n  every2U ,\n  every2 ,\n  some2U ,\n  some2 ,\n  cmpU ,\n  cmp ,\n  eqU ,\n  eq ,\n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/ImmutableArray.res",
    "content": "type t<+'a>\nmodule Array = {\n  open Belt\n  type array2<'a> = (array<'a>, array<'a>)\n  external fromT: t<'a> => array<'a> = \"%identity\"\n  external fromTp: t<('a, 'b)> => array<('a, 'b)> = \"%identity\"\n  external fromTT: t<t<'a>> => array<array<'a>> = \"%identity\"\n  external toT: array<'a> => t<'a> = \"%identity\"\n  external toTp: array<('a, 'b)> => t<('a, 'b)> = \"%identity\"\n  external toT2: array2<'a> => (t<'a>, t<'a>) = \"%identity\"\n\n  /* Conversions involve a copy */\n\n  let fromArray = a => Array.copy(a)->toT\n\n  let toArray = a => Array.copy(a->fromT)\n\n  /* Type-cast immutable functions from Belt.Array. */\n\n  let length = a => Array.length(a->fromT)\n\n  let size = a => Array.size(a->fromT)\n\n  let get = (a, x) => (a->fromT)[x]\n\n  let getExn = (a, x) => Array.getExn(a->fromT, x)\n\n  let getUnsafe = (a, x) => Array.getUnsafe(a->fromT, x)\n\n  let getUndefined = (a, x) => Array.getUndefined(a->fromT, x)\n\n  let shuffle = x => Array.shuffle(x->fromT)->toT\n\n  let reverse = x => Array.reverse(x->fromT)->toT\n\n  let makeUninitialized = x => Array.makeUninitialized(x)->toT\n\n  let makeUninitializedUnsafe = x => Array.makeUninitializedUnsafe(x)->toT\n\n  let make = (x, y) => Array.make(x, y)->toT\n\n  let range = (x, y) => Array.range(x, y)->toT\n\n  let rangeBy = (x, y, ~step) => Array.rangeBy(x, y, ~step)->toT\n\n  let makeByU = (c, f) => Array.makeByU(c, f)->toT\n  let makeBy = (c, f) => Array.makeBy(c, f)->toT\n\n  let makeByAndShuffleU = (c, f) => Array.makeByAndShuffleU(c, f)->toT\n  let makeByAndShuffle = (c, f) => Array.makeByAndShuffle(c, f)->toT\n\n  let zip = (a1, a2) => Array.zip(fromT(a1), fromT(a2))->toTp\n\n  let zipByU = (a1, a2, f) => Array.zipByU(fromT(a1), fromT(a2), f)->toT\n  let zipBy = (a1, a2, f) => Array.zipBy(fromT(a1), fromT(a2), f)->toT\n\n  let unzip = a => Array.unzip(a->fromTp)->toT2\n\n  let concat = (a1, a2) => Array.concat(a1->fromT, a2->fromT)->toT\n\n  let concatMany = (a: t<t<_>>) => Array.concatMany(a->fromTT)->toT\n\n  let slice = (a, ~offset, ~len) => Array.slice(a->fromT, ~offset, ~len)->toT\n\n  let sliceToEnd = (a, b) => Array.sliceToEnd(a->fromT, b)->toT\n\n  let copy = a => Array.copy(a->fromT)->toT\n\n  let forEachU = (a, f) => Array.forEachU(a->fromT, f)\n  let forEach = (a, f) => Array.forEach(a->fromT, f)\n\n  let mapU = (a, f) => Array.mapU(a->fromT, f)->toT\n  let map = (a, f) => Array.map(a->fromT, f)->toT\n\n  let keepWithIndexU = (a, f) => Array.keepWithIndexU(a->fromT, f)->toT\n  let keepWithIndex = (a, f) => Array.keepWithIndex(a->fromT, f)->toT\n\n  let keepMapU = (a, f) => Array.keepMapU(a->fromT, f)->toT\n  let keepMap = (a, f) => Array.keepMap(a->fromT, f)->toT\n\n  let forEachWithIndexU = (a, f) => Array.forEachWithIndexU(a->fromT, f)\n  let forEachWithIndex = (a, f) => Array.forEachWithIndex(a->fromT, f)\n\n  let mapWithIndexU = (a, f) => Array.mapWithIndexU(a->fromT, f)->toT\n  let mapWithIndex = (a, f) => Array.mapWithIndex(a->fromT, f)->toT\n\n  let partitionU = (a, f) => Array.partitionU(a->fromT, f)->toT2\n  let partition = (a, f) => Array.partition(a->fromT, f)->toT2\n\n  let reduceU = (a, b, f) => Array.reduceU(a->fromT, b, f)\n  let reduce = (a, b, f) => Array.reduce(a->fromT, b, f)\n\n  let reduceReverseU = (a, b, f) => Array.reduceReverseU(a->fromT, b, f)\n  let reduceReverse = (a, b, f) => Array.reduceReverse(a->fromT, b, f)\n\n  let reduceReverse2U = (a1, a2, c, f) => Array.reduceReverse2U(fromT(a1), fromT(a2), c, f)\n  let reduceReverse2 = (a1, a2, c, f) => Array.reduceReverse2(fromT(a1), fromT(a2), c, f)\n\n  let someU = (a, f) => Array.someU(a->fromT, f)\n  let some = (a, f) => Array.some(a->fromT, f)\n\n  let everyU = (a, f) => Array.everyU(a->fromT, f)\n  let every = (a, f) => Array.every(a->fromT, f)\n\n  let every2U = (a1, a2, f) => Array.every2U(fromT(a1), fromT(a2), f)\n  let every2 = (a1, a2, f) => Array.every2(fromT(a1), fromT(a2), f)\n\n  let some2U = (a1, a2, f) => Array.some2U(fromT(a1), fromT(a2), f)\n  let some2 = (a1, a2, f) => Array.some2(fromT(a1), fromT(a2), f)\n\n  let cmpU = (a1, a2, f) => Array.cmpU(fromT(a1), fromT(a2), f)\n  let cmp = (a1, a2, f) => Array.cmp(fromT(a1), fromT(a2), f)\n\n  let eqU = (a1, a2, f) => Array.eqU(fromT(a1), fromT(a2), f)\n  let eq = (a1, a2, f) => Array.eq(fromT(a1), fromT(a2), f)\n}\n\ninclude Array\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/ImmutableArray.resi",
    "content": "@ocaml.doc(\" Immutable arrays are covariant. \")\ntype t<+'a>\n\n@ocaml.doc(\" Redefine the [_] syntax, and disable the assignment [_] = _. \")\nmodule Array: {\n  let get: (t<'a>, int) => option<'a>\n}\n\n@ocaml.doc(\" Converting from/to normal arrays involves making a copy. \")\nlet fromArray: array<'a> => t<'a>\n\nlet toArray: t<'a> => array<'a>\n\n@ocaml.doc(\" Subset of the Belt.Array oprerations that do not mutate the array. \")\nlet length: t<'a> => int\n\nlet size: t<'a> => int\n\nlet get: (t<'a>, int) => option<'a>\n\nlet getExn: (t<'a>, int) => 'a\n\nlet getUnsafe: (t<'a>, int) => 'a\n\nlet getUndefined: (t<'a>, int) => Js.undefined<'a>\n\nlet shuffle: t<'a> => t<'a>\n\nlet reverse: t<'a> => t<'a>\n\nlet makeUninitialized: int => t<Js.undefined<'a>>\n\nlet makeUninitializedUnsafe: int => t<'a>\n\nlet make: (int, 'a) => t<'a>\n\nlet range: (int, int) => t<int>\n\nlet rangeBy: (int, int, ~step: int) => t<int>\n\nlet makeByU: (int, (. int) => 'a) => t<'a>\nlet makeBy: (int, int => 'a) => t<'a>\n\nlet makeByAndShuffleU: (int, (. int) => 'a) => t<'a>\nlet makeByAndShuffle: (int, int => 'a) => t<'a>\n\nlet zip: (t<'a>, t<'b>) => t<('a, 'b)>\n\nlet zipByU: (t<'a>, t<'b>, (. 'a, 'b) => 'c) => t<'c>\nlet zipBy: (t<'a>, t<'b>, ('a, 'b) => 'c) => t<'c>\n\nlet unzip: t<('a, 'a)> => (t<'a>, t<'a>)\n\nlet concat: (t<'a>, t<'a>) => t<'a>\n\nlet concatMany: t<t<'a>> => t<'a>\n\nlet slice: (t<'a>, ~offset: int, ~len: int) => t<'a>\n\nlet sliceToEnd: (t<'a>, int) => t<'a>\n\nlet copy: t<'a> => t<'a>\n\nlet forEachU: (t<'a>, (. 'a) => unit) => unit\nlet forEach: (t<'a>, 'a => unit) => unit\n\nlet mapU: (t<'a>, (. 'a) => 'b) => t<'b>\nlet map: (t<'a>, 'a => 'b) => t<'b>\n\nlet keepWithIndexU: (t<'a>, (. 'a, int) => bool) => t<'a>\nlet keepWithIndex: (t<'a>, ('a, int) => bool) => t<'a>\n\nlet keepMapU: (t<'a>, (. 'a) => option<'b>) => t<'b>\nlet keepMap: (t<'a>, 'a => option<'b>) => t<'b>\n\nlet forEachWithIndexU: (t<'a>, (. int, 'a) => unit) => unit\nlet forEachWithIndex: (t<'a>, (int, 'a) => unit) => unit\n\nlet mapWithIndexU: (t<'a>, (. int, 'a) => 'b) => t<'b>\nlet mapWithIndex: (t<'a>, (int, 'a) => 'b) => t<'b>\n\nlet partitionU: (t<'a>, (. 'a) => bool) => (t<'a>, t<'a>)\nlet partition: (t<'a>, 'a => bool) => (t<'a>, t<'a>)\n\nlet reduceU: (t<'a>, 'b, (. 'b, 'a) => 'b) => 'b\nlet reduce: (t<'a>, 'b, ('b, 'a) => 'b) => 'b\n\nlet reduceReverseU: (t<'a>, 'b, (. 'b, 'a) => 'b) => 'b\nlet reduceReverse: (t<'a>, 'b, ('b, 'a) => 'b) => 'b\n\nlet reduceReverse2U: (t<'a>, t<'b>, 'c, (. 'c, 'a, 'b) => 'c) => 'c\nlet reduceReverse2: (t<'a>, t<'b>, 'c, ('c, 'a, 'b) => 'c) => 'c\n\nlet someU: (t<'a>, (. 'a) => bool) => bool\nlet some: (t<'a>, 'a => bool) => bool\n\nlet everyU: (t<'a>, (. 'a) => bool) => bool\nlet every: (t<'a>, 'a => bool) => bool\n\nlet every2U: (t<'a>, t<'b>, (. 'a, 'b) => bool) => bool\nlet every2: (t<'a>, t<'b>, ('a, 'b) => bool) => bool\n\nlet some2U: (t<'a>, t<'b>, (. 'a, 'b) => bool) => bool\nlet some2: (t<'a>, t<'b>, ('a, 'b) => bool) => bool\n\nlet cmpU: (t<'a>, t<'a>, (. 'a, 'a) => int) => int\nlet cmp: (t<'a>, t<'a>, ('a, 'a) => int) => int\n\nlet eqU: (t<'a>, t<'a>, (. 'a, 'a) => bool) => bool\nlet eq: (t<'a>, t<'a>, ('a, 'a) => bool) => bool\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/ImportHookDefault.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport ImportHookDefaultGen from \"./ImportHookDefault.gen\";\nimport * as ImportHookDefaultGen$1 from \"./ImportHookDefault.gen\";\n\nvar make = ImportHookDefaultGen$1.make;\n\nvar make2 = ImportHookDefaultGen;\n\nexport {\n  make ,\n  make2 ,\n}\n/* make Not a pure module */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/ImportHookDefault.res",
    "content": "type person = {\n  name: string,\n  age: int,\n}\n\n@genType.import((\"./hookExample\", \"default\")) @react.component\nexternal make: (\n  ~person: person,\n  ~children: React.element,\n  ~renderMe: ImportHooks.renderMe<string>,\n) => React.element = \"make\"\n\n@genType.import(\"./hookExample\") @react.component\nexternal make2: (\n  ~person: person,\n  ~children: React.element,\n  ~renderMe: ImportHooks.renderMe<string>,\n) => React.element = \"default\"\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/ImportHooks.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as ImportHooksGen from \"./ImportHooks.gen\";\n\nvar make = ImportHooksGen.makeRenamed;\n\nfunction foo(prim) {\n  return ImportHooksGen.foo(prim);\n}\n\nexport {\n  make ,\n  foo ,\n}\n/* make Not a pure module */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/ImportHooks.res",
    "content": "@genType\ntype person = {\n  name: string,\n  age: int,\n}\n\n@genType\ntype renderMe<'a> = React.component<{\n  \"randomString\": string,\n  \"poly\": 'a,\n}>\n\n@genType.import(\"./hookExample\") @react.component\nexternal make: (\n  ~person: person,\n  ~children: React.element,\n  ~renderMe: renderMe<'a>,\n) => React.element = \"makeRenamed\"\n\n@genType.import(\"./hookExample\")\nexternal foo: (~person: person) => string = \"foo\"\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/ImportIndex.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport ImportIndexGen from \"./ImportIndex.gen\";\n\nvar make = ImportIndexGen;\n\nexport {\n  make ,\n}\n/* make Not a pure module */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/ImportIndex.res",
    "content": "// TODO: rename metodd back once remmt bug is fixed\n@genType.import(\"./\") @react.component\nexternal make: (~method: @string [#push | #replace]=?) => React.element = \"default\"\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/ImportJsValue.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport ImportJsValueGen from \"./ImportJsValue.gen\";\nimport * as ImportJsValueGen$1 from \"./ImportJsValue.gen\";\n\nfunction round(prim) {\n  return ImportJsValueGen$1.round(prim);\n}\n\nfunction area(prim) {\n  return ImportJsValueGen$1.area(prim);\n}\n\nfunction returnMixedArray(prim) {\n  return ImportJsValueGen$1.returnMixedArray();\n}\n\nvar roundedNumber = ImportJsValueGen$1.round(1.8);\n\nvar areaValue = ImportJsValueGen$1.area({\n      x: 3,\n      y: undefined\n    });\n\nfunction getAbs(x) {\n  return x.getAbs();\n}\n\nvar AbsoluteValue = {\n  getAbs: getAbs\n};\n\nfunction useGetProp(x) {\n  return x.getProp() + 1 | 0;\n}\n\nfunction useGetAbs(x) {\n  return x.getAbs() + 1 | 0;\n}\n\nfunction useColor(prim) {\n  return ImportJsValueGen$1.useColor(prim);\n}\n\nfunction higherOrder(prim) {\n  return ImportJsValueGen$1.higherOrder(prim);\n}\n\nvar returnedFromHigherOrder = ImportJsValueGen$1.higherOrder(function (prim0, prim1) {\n      return prim0 + prim1 | 0;\n    });\n\nfunction convertVariant(prim) {\n  return ImportJsValueGen$1.convertVariant(prim);\n}\n\nfunction polymorphic(prim) {\n  return ImportJsValueGen$1.polymorphic(prim);\n}\n\nvar $$default = ImportJsValueGen;\n\nexport {\n  round ,\n  area ,\n  returnMixedArray ,\n  roundedNumber ,\n  areaValue ,\n  AbsoluteValue ,\n  useGetProp ,\n  useGetAbs ,\n  useColor ,\n  higherOrder ,\n  returnedFromHigherOrder ,\n  convertVariant ,\n  polymorphic ,\n  $$default ,\n  $$default as default,\n}\n/* roundedNumber Not a pure module */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/ImportJsValue.res",
    "content": "@ocaml.doc(\"\n  * Wrap JS values to be used from Reason\n  \")\n@genType.import(\"./MyMath\")\nexternal /* This is the module to import from. */\n/* Name and type of the JS value to bind to. */\nround: float => float = \"round\"\n\n@genType\ntype point = {\n  x: int,\n  y: option<int>,\n}\n\n@genType.import(\"./MyMath\")\nexternal /* This is the module to import from. */\n/* Name and type of the JS value to bind to. */\narea: point => int = \"area\"\n\n@genType.import(\"./MyMath\")\ntype numberOrString\n\n@genType.import(\"./MyMath\")\nexternal returnMixedArray: unit => array<numberOrString> = \"returnMixedArray\"\n\n@genType\nlet roundedNumber = round(1.8)\n\n@genType\nlet areaValue = area({x: 3, y: None})\n\nmodule AbsoluteValue = {\n  @genType.import((\"./MyMath\", \"AbsoluteValue\"))\n  type t = {\"getAbs\": (. unit) => int}\n\n  /* This is untyped */\n  @send external getProp: t => int = \"getProp\"\n\n  /* This is also untyped, as we \"trust\" the type declaration in absoluteVaue */\n  let getAbs = (x: t) => {\n    let getAbs = x[\"getAbs\"]\n    getAbs(.)\n  }\n}\n\n@genType\nlet useGetProp = (x: AbsoluteValue.t) => x->AbsoluteValue.getProp + 1\n\n@genType\nlet useGetAbs = (x: AbsoluteValue.t) => x->AbsoluteValue.getAbs + 1\n\n@genType.import(\"./MyMath\")\ntype stringFunction\n\n@genType\ntype color = [#tomato | #gray]\n\n@genType.import(\"./MyMath\") external useColor: color => int = \"useColor\"\n\n@genType.import(\"./MyMath\")\nexternal higherOrder: ((int, int) => int) => int = \"higherOrder\"\n\n@genType\nlet returnedFromHigherOrder = higherOrder(\\\"+\")\n\ntype variant =\n  | I(int)\n  | S(string)\n\n@genType.import(\"./MyMath\")\nexternal convertVariant: variant => variant = \"convertVariant\"\n\n@genType.import(\"./MyMath\") external polymorphic: 'a => 'a = \"polymorphic\"\n\n@genType.import(\"./MyMath\") external default: int = \"default\"\n\n@genType.import((\"./MyMath\", \"num\"))\ntype num\n\n@genType.import((\"./MyMath\", \"num\"))\ntype myNum\n\n@genType.import(\"./MyMath\")\ntype polyType<'a>\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/ImportMyBanner.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as Caml_option from \"rescript/lib/es6/caml_option.js\";\nimport * as ImportMyBannerGen from \"./ImportMyBanner.gen\";\n\nfunction make(prim0, prim1, prim2) {\n  return ImportMyBannerGen.make(prim0, prim1 !== undefined ? Caml_option.valFromOption(prim1) : undefined, prim2);\n}\n\nexport {\n  make ,\n}\n/* ./ImportMyBanner.gen Not a pure module */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/ImportMyBanner.res",
    "content": "@ocaml.doc(\"\n  * Wrap component MyBanner to be used from Reason.\n  \")\n@genType\ntype message = {text: string}\n\n@genType.import(\"./MyBanner\")\nexternal /* Module with the JS component to be wrapped. */\n/* The make function will be automatically generated from the types below. */\nmake: (~show: bool, ~message: option<message>=?, 'a) => React.element = \"make\"\n\nlet make = make\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/InnerModuleTypes.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nvar I = {};\n\nexport {\n  I ,\n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/InnerModuleTypes.res",
    "content": "module I = {\n  type t = Foo\n}\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/InnerModuleTypes.resi",
    "content": "module I: {\n  type t = Foo\n}\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/JSResource.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n/* This output is empty. Its source's type definitions, externals and/or unused code got optimized away. */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/JSResource.res",
    "content": "type t<'a>\n\n@module external jSResource: string => t<'a> = \"JSResource\"\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/JsxV4.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as React from \"react\";\n\nfunction JsxV4$C(props) {\n  return null;\n}\n\nvar C = {\n  make: JsxV4$C\n};\n\nReact.createElement(JsxV4$C, {});\n\nexport {\n  C ,\n}\n/*  Not a pure module */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/JsxV4.res",
    "content": "@@jsxConfig({version: 4})\n\nmodule C = {\n  @react.component let make = () => React.null\n}\n\nlet _ = <C />\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/LetPrivate.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nvar y = 34;\n\nexport {\n  y ,\n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/LetPrivate.res",
    "content": "%%private(\n  @genType\n  let x = 34\n)\n\n@genType\nlet y = x\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/ModuleAliases.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nvar Inner = {};\n\nvar Outer = {\n  Inner: Inner\n};\n\nvar InnerNested = {};\n\nvar Inner2 = {\n  InnerNested: InnerNested,\n  OuterInnerAlias2: undefined\n};\n\nvar Outer2 = {\n  OuterInnerAlias: undefined,\n  Inner2: Inner2\n};\n\nfunction testNested(x) {\n  return x;\n}\n\nfunction testInner(x) {\n  return x;\n}\n\nfunction testInner2(x) {\n  return x;\n}\n\nvar Outer2Alias;\n\nvar InnerNestedAlias;\n\nexport {\n  Outer ,\n  Outer2 ,\n  Outer2Alias ,\n  InnerNestedAlias ,\n  testNested ,\n  testInner ,\n  testInner2 ,\n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/ModuleAliases.res",
    "content": "module Outer = {\n  module Inner = {\n    type innerT = {inner: string}\n  }\n}\n\nmodule Outer2 = {\n  module OuterInnerAlias = Outer.Inner\n  module Inner2 = {\n    module InnerNested = {\n      type t = {nested: int}\n    }\n    module OuterInnerAlias2 = OuterInnerAlias\n  }\n}\n\nmodule Outer2Alias = Outer2\n\nmodule InnerNestedAlias = Outer2.Inner2.InnerNested\n\n@genType\nlet testNested = (x: InnerNestedAlias.t) => x\n\n@genType\nlet testInner = (x: Outer2Alias.OuterInnerAlias.innerT) => x\n\n@genType\nlet testInner2 = (x: Outer2Alias.Inner2.OuterInnerAlias2.innerT) => x\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/ModuleAliases2.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nvar Inner = {};\n\nvar Outer = {\n  Inner: Inner\n};\n\nvar OuterAlias;\n\nvar InnerAlias;\n\nvar q = 42;\n\nexport {\n  Outer ,\n  OuterAlias ,\n  InnerAlias ,\n  q ,\n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/ModuleAliases2.res",
    "content": "@genType\ntype record = {\n  x: int,\n  y: string,\n}\n\nmodule Outer = {\n  @genType\n  type outer = {outer: string}\n\n  module Inner = {\n    @genType\n    type inner = {inner: string}\n  }\n}\n\nmodule OuterAlias = Outer\n\nmodule InnerAlias = OuterAlias.Inner\n\nlet q = 42\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/ModuleExceptionBug.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as Caml_exceptions from \"rescript/lib/es6/caml_exceptions.js\";\n\nfunction customDouble(foo) {\n  return (foo << 1);\n}\n\nvar Dep = {\n  customDouble: customDouble\n};\n\nvar MyOtherException = /* @__PURE__ */Caml_exceptions.create(\"ModuleExceptionBug.MyOtherException\");\n\nconsole.log(34);\n\nvar ddjdj = 34;\n\nexport {\n  Dep ,\n  MyOtherException ,\n  ddjdj ,\n}\n/*  Not a pure module */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/ModuleExceptionBug.res",
    "content": "module Dep = {\n  let customDouble = foo => foo * 2\n}\n\nexception MyOtherException\n\nlet ddjdj = 34\nJs.log(ddjdj)\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/NestedModules.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nfunction nested3Function(x) {\n  return x;\n}\n\nvar Nested3 = {\n  x: 0,\n  y: 1,\n  z: 2,\n  w: 3,\n  nested3Value: \"nested3Value\",\n  nested3Function: nested3Function\n};\n\nfunction nested2Function(x) {\n  return x;\n}\n\nvar Nested2 = {\n  x: 0,\n  nested2Value: 1,\n  y: 2,\n  Nested3: Nested3,\n  nested2Function: nested2Function\n};\n\nvar Universe = {\n  theAnswer: 42,\n  notExported: 33,\n  Nested2: Nested2,\n  someString: \"some exported string\"\n};\n\nvar notNested = 1;\n\nexport {\n  notNested ,\n  Universe ,\n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/NestedModules.res",
    "content": "@genType\nlet notNested = 1\n\nmodule Universe = {\n  @genType\n  let theAnswer = 42\n\n  let notExported = 33\n\n  @genType\n  type nestedType = array<string>\n\n  module Nested2 = {\n    let x = 0\n\n    @genType\n    let nested2Value = 1\n\n    let y = 2\n\n    @genType\n    type nested2Type = array<array<string>>\n\n    module Nested3 = {\n      let x = 0\n      let y = 1\n      let z = 2\n      let w = 3\n\n      @genType\n      type nested3Type = array<array<array<string>>>\n\n      @genType\n      let nested3Value = \"nested3Value\"\n\n      @genType\n      let nested3Function = (x: nested2Type) => x\n    }\n\n    @genType\n    let nested2Function = (x: Nested3.nested3Type) => x\n  }\n\n  @genType\n  type variant =\n    | A\n    | B(string)\n\n  @genType\n  let someString = \"some exported string\"\n}\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/NestedModulesInSignature.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nvar Universe = {\n  theAnswer: 42\n};\n\nexport {\n  Universe ,\n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/NestedModulesInSignature.res",
    "content": "module Universe = {\n  let theAnswer = 42\n}\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/NestedModulesInSignature.resi",
    "content": "module Universe: {\n  @genType\n  let theAnswer: int\n}\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/Newsyntax.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nvar x = 34;\n\nvar y = 11;\n\nexport {\n  x ,\n  y ,\n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/Newsyntax.res",
    "content": "let x = 34\n\nlet y = 11\n\ntype record = {\n  xxx: int,\n  yyy: int,\n}\n\ntype variant = A | B(int)|C\n\ntype record2 = {xx:int,yy:int}\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/Newton.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as Curry from \"rescript/lib/es6/curry.js\";\n\nfunction $neg(prim0, prim1) {\n  return prim0 - prim1;\n}\n\nfunction $plus(prim0, prim1) {\n  return prim0 + prim1;\n}\n\nfunction $star(prim0, prim1) {\n  return prim0 * prim1;\n}\n\nfunction $slash(prim0, prim1) {\n  return prim0 / prim1;\n}\n\nfunction newton(f, fPrimed, initial, threshold) {\n  var current = {\n    contents: initial\n  };\n  var iterateMore = function (previous, next) {\n    var delta = next >= previous ? next - previous : previous - next;\n    current.contents = next;\n    return delta >= threshold;\n  };\n  var _param;\n  while(true) {\n    var previous = current.contents;\n    var next = previous - Curry._1(f, previous) / Curry._1(fPrimed, previous);\n    if (!iterateMore(previous, next)) {\n      return current.contents;\n    }\n    _param = undefined;\n    continue ;\n  };\n}\n\nfunction f(x) {\n  return x * x * x - 2.0 * x * x - 11.0 * x + 12.0;\n}\n\nfunction fPrimed(x) {\n  return 3.0 * x * x - 4.0 * x - 11.0;\n}\n\nvar result = newton(f, fPrimed, 5.0, 0.0003);\n\nconsole.log(result, f(result));\n\nexport {\n  $neg ,\n  $plus ,\n  $star ,\n  $slash ,\n  newton ,\n  f ,\n  fPrimed ,\n  result ,\n}\n/* result Not a pure module */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/Newton.res",
    "content": "let \\\"-\" = \\\"-.\"\nlet \\\"+\" = \\\"+.\"\nlet \\\"*\" = \\\"*.\"\nlet \\\"/\" = \\\"/.\"\n\nlet newton = (~f, ~fPrimed, ~initial, ~threshold) => {\n  let current = ref(initial)\n  let iterateMore = (previous, next) => {\n    let delta = next >= previous ? next - previous : previous - next\n    current := next\n    !(delta < threshold)\n  }\n  @progress(iterateMore)\n  let rec loop = () => {\n    let previous = current.contents\n    let next = previous - f(previous) / fPrimed(previous)\n    if iterateMore(previous, next) {\n      loop()\n    } else {\n      current.contents\n    }\n  }\n  loop()\n}\nlet f = x => x * x * x - 2.0 * x * x - 11.0 * x + 12.0\n\nlet fPrimed = x => 3.0 * x * x - 4.0 * x - 11.0\n\nlet result = newton(~f, ~fPrimed, ~initial=5.0, ~threshold=0.0003)\n\nJs.log2(result, f(result))\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/Opaque.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nfunction noConversion(x) {\n  return x;\n}\n\nfunction testConvertNestedRecordFromOtherFile(x) {\n  return x;\n}\n\nexport {\n  noConversion ,\n  testConvertNestedRecordFromOtherFile ,\n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/Opaque.res",
    "content": "@genType.opaque\ntype opaqueFromRecords = A(Records.coord)\n\n@genType\nlet noConversion = (x: opaqueFromRecords) => x\n\n@genType\ntype pair = (opaqueFromRecords, opaqueFromRecords)\n\n@genType\nlet testConvertNestedRecordFromOtherFile = (x: Records.business) => x\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/OptArg.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nfunction foo(xOpt, yOpt, zOpt, w) {\n  var x = xOpt !== undefined ? xOpt : 1;\n  var y = yOpt !== undefined ? yOpt : 2;\n  var z = zOpt !== undefined ? zOpt : 3;\n  return ((x + y | 0) + z | 0) + w | 0;\n}\n\nfunction bar(x, y, z, w) {\n  return y + w | 0;\n}\n\nconsole.log(foo(3, undefined, undefined, 4));\n\nconsole.log(7);\n\nfunction threeArgs(aOpt, bOpt, cOpt, d) {\n  var a = aOpt !== undefined ? aOpt : 1;\n  var b = bOpt !== undefined ? bOpt : 2;\n  var c = cOpt !== undefined ? cOpt : 3;\n  return ((a + b | 0) + c | 0) + d | 0;\n}\n\nconsole.log(threeArgs(4, undefined, 7, 1));\n\nconsole.log(threeArgs(4, undefined, undefined, 1));\n\nfunction twoArgs(aOpt, bOpt, c) {\n  var a = aOpt !== undefined ? aOpt : 1;\n  var b = bOpt !== undefined ? bOpt : 2;\n  return (a + b | 0) + c | 0;\n}\n\nconsole.log(twoArgs(undefined, undefined, 1));\n\nvar a = 3;\n\nconsole.log(a + 44 | 0);\n\nfunction wrapfourArgs(a, b, c, n) {\n  var dOpt;\n  var a$1 = a !== undefined ? a : 1;\n  var b$1 = b !== undefined ? b : 2;\n  var c$1 = c !== undefined ? c : 3;\n  var d = dOpt !== undefined ? dOpt : 4;\n  return (((a$1 + b$1 | 0) + c$1 | 0) + d | 0) + n | 0;\n}\n\nconsole.log(wrapfourArgs(3, undefined, 44, 44));\n\nconsole.log(wrapfourArgs(undefined, 4, 44, 44));\n\nexport {\n  foo ,\n  bar ,\n}\n/*  Not a pure module */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/OptArg.res",
    "content": "let foo = (~x=1, ~y=2, ~z=3, w) => x + y + z + w\n\nlet bar = (~x=?, ~y, ~z=?, w) => y + w\n\nJs.log(foo(~x=3, 4))\n\nJs.log(bar(~y=3, 4))\n\nlet threeArgs = (~a=1, ~b=2, ~c=3, d) => a + b + c + d\n\nJs.log(threeArgs(~a=4, ~c=7, 1))\nJs.log(threeArgs(~a=4, 1))\n\nlet twoArgs = (~a=1, ~b=2, c) => a + b + c\n\nJs.log(1 |> twoArgs)\n\nlet oneArg = (~a=1, ~z, b) => a + b\n\nlet wrapOneArg = (~a=?, n) => oneArg(~a?, ~z=33, n)\n\nJs.log(wrapOneArg(~a=3, 44))\n\nlet fourArgs = (~a=1, ~b=2, ~c=3, ~d=4, n) => a + b + c + d + n\n\nlet wrapfourArgs = (~a=?, ~b=?, ~c=?, n) => fourArgs(~a?, ~b?, ~c?, n)\n\nJs.log(wrapfourArgs(~a=3, ~c=44, 44))\nJs.log(wrapfourArgs(~b=4, ~c=44, 44))\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/OptArg.resi",
    "content": "let foo: (~x: int=?, ~y: int=?, ~z: int=?, int) => int\nlet bar: (~x: 'a=?, ~y: int, ~z: 'b=?, int) => int\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/Records.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as Belt_List from \"rescript/lib/es6/belt_List.js\";\nimport * as Belt_Array from \"rescript/lib/es6/belt_Array.js\";\nimport * as Pervasives from \"rescript/lib/es6/pervasives.js\";\nimport * as Belt_Option from \"rescript/lib/es6/belt_Option.js\";\nimport * as Caml_option from \"rescript/lib/es6/caml_option.js\";\n\nfunction computeArea(param) {\n  return Math.imul(Math.imul(param.x, param.y), Belt_Option.mapWithDefault(param.z, 1, (function (n) {\n                    return n;\n                  })));\n}\n\nfunction coord2d(x, y) {\n  return {\n          x: x,\n          y: y,\n          z: undefined\n        };\n}\n\nvar getOpt = Belt_Option.mapWithDefault;\n\nfunction findAddress(business) {\n  return Belt_Option.mapWithDefault(business.address, /* [] */0, (function (a) {\n                return {\n                        hd: a,\n                        tl: /* [] */0\n                      };\n              }));\n}\n\nfunction findAllAddresses(businesses) {\n  return Belt_List.toArray(Belt_List.flatten(Belt_List.fromArray(Belt_Array.map(businesses, (function (business) {\n                            return Pervasives.$at(Belt_Option.mapWithDefault(business.address, /* [] */0, (function (a) {\n                                              return {\n                                                      hd: a,\n                                                      tl: /* [] */0\n                                                    };\n                                            })), Belt_Option.mapWithDefault(business.owner, /* [] */0, (function (p) {\n                                              return Belt_Option.mapWithDefault(p.address, /* [] */0, (function (a) {\n                                                            return {\n                                                                    hd: a,\n                                                                    tl: /* [] */0\n                                                                  };\n                                                          }));\n                                            })));\n                          })))));\n}\n\nfunction getPayload(param) {\n  return param.payload;\n}\n\nfunction getPayloadRecord(param) {\n  return param.payload;\n}\n\nvar recordValue = {\n  v: 1,\n  w: 1\n};\n\nvar payloadValue = {\n  num: 1,\n  payload: recordValue\n};\n\nfunction getPayloadRecordPlusOne(param) {\n  var payload = param.payload;\n  return {\n          v: payload.v + 1 | 0,\n          w: payload.w\n        };\n}\n\nfunction findAddress2(business) {\n  return Belt_Option.mapWithDefault(Caml_option.nullable_to_opt(business.address2), /* [] */0, (function (a) {\n                return {\n                        hd: a,\n                        tl: /* [] */0\n                      };\n              }));\n}\n\nvar someBusiness2_owner = null;\n\nvar someBusiness2_address2 = null;\n\nvar someBusiness2 = {\n  name: \"SomeBusiness\",\n  owner: someBusiness2_owner,\n  address2: someBusiness2_address2\n};\n\nfunction computeArea3(o) {\n  return Math.imul(Math.imul(o.x, o.y), Belt_Option.mapWithDefault(Caml_option.nullable_to_opt(o.z), 1, (function (n) {\n                    return n;\n                  })));\n}\n\nfunction computeArea4(o) {\n  return Math.imul(Math.imul(o.x, o.y), Belt_Option.mapWithDefault(o.z, 1, (function (n) {\n                    return n;\n                  })));\n}\n\nfunction testMyRec(x) {\n  return x.type_;\n}\n\nfunction testMyRec2(x) {\n  return x;\n}\n\nfunction testMyObj(x) {\n  return x.type_;\n}\n\nfunction testMyObj2(x) {\n  return x;\n}\n\nfunction testMyRecBsAs(x) {\n  return x.type;\n}\n\nfunction testMyRecBsAs2(x) {\n  return x;\n}\n\nvar origin = {\n  x: 0,\n  y: 0,\n  z: 0\n};\n\nvar someBusiness = {\n  name: \"SomeBusiness\",\n  owner: undefined,\n  address: undefined\n};\n\nexport {\n  origin ,\n  computeArea ,\n  coord2d ,\n  getOpt ,\n  findAddress ,\n  someBusiness ,\n  findAllAddresses ,\n  getPayload ,\n  getPayloadRecord ,\n  recordValue ,\n  payloadValue ,\n  getPayloadRecordPlusOne ,\n  findAddress2 ,\n  someBusiness2 ,\n  computeArea3 ,\n  computeArea4 ,\n  testMyRec ,\n  testMyRec2 ,\n  testMyObj ,\n  testMyObj2 ,\n  testMyRecBsAs ,\n  testMyRecBsAs2 ,\n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/Records.res",
    "content": "open Belt\n\n@genType\ntype coord = {\n  x: int,\n  y: int,\n  z: option<int>,\n}\n\n@genType\nlet origin = {x: 0, y: 0, z: Some(0)}\n\n@genType\nlet computeArea = ({x, y, z}) => {\n  open Option\n  x * y * z->mapWithDefault(1, n => n)\n}\n\n@genType\nlet coord2d = (x, y) => {x: x, y: y, z: None}\n\n@genType\ntype person = {\n  name: string,\n  age: int,\n  address: option<string>,\n}\n\n@genType\ntype business = {\n  name: string,\n  owner: option<person>,\n  address: option<string>,\n}\n\nlet getOpt = (opt, default, foo) => opt->Option.mapWithDefault(default, foo)\n\n@genType\nlet findAddress = (business: business): list<string> =>\n  business.address->getOpt(list{}, a => list{a})\n\n@genType\nlet someBusiness = {name: \"SomeBusiness\", owner: None, address: None}\n\n@genType\nlet findAllAddresses = (businesses: array<business>): array<string> =>\n  businesses\n  ->Array.map(business =>\n    \\\"@\"(\n      business.address->getOpt(list{}, a => list{a}),\n      business.owner->getOpt(list{}, p => p.address->getOpt(list{}, a => list{a})),\n    )\n  )\n  ->List.fromArray\n  ->List.flatten\n  ->List.toArray\n\n@genType\ntype payload<'a> = {\n  num: int,\n  payload: 'a,\n}\n\n@genType\nlet getPayload = ({payload}) => payload\n\n@genType\ntype record = {\n  v: int,\n  w: int,\n}\n\n@genType\nlet getPayloadRecord = ({payload}): record => payload\n\n@genType\nlet recordValue = {v: 1, w: 1}\n\n@genType\nlet payloadValue = {num: 1, payload: recordValue}\n\n@genType\nlet getPayloadRecordPlusOne = ({payload}): record => {\n  ...payload,\n  v: payload.v + 1,\n}\n\n@genType\ntype business2 = {\n  name: string,\n  owner: Js.Nullable.t<person>,\n  address2: Js.Nullable.t<string>,\n}\n\n@genType\nlet findAddress2 = (business: business2): list<string> =>\n  business.address2->Js.Nullable.toOption->getOpt(list{}, a => list{a})\n\n@genType\nlet someBusiness2 = {\n  name: \"SomeBusiness\",\n  owner: Js.Nullable.null,\n  address2: Js.Nullable.null,\n}\n\n@genType\nlet computeArea3 = (o: {\"x\": int, \"y\": int, \"z\": Js.Nullable.t<int>}) =>\n  o[\"x\"] * o[\"y\"] * o[\"z\"]->Js.Nullable.toOption->Option.mapWithDefault(1, n => n)\n\n@genType\nlet computeArea4 = (o: {\"x\": int, \"y\": int, \"z\": option<int>}) =>\n  o[\"x\"] * o[\"y\"] * o[\"z\"]->Option.mapWithDefault(1, n => n)\n\n@genType\ntype mix = {\"a\": int, \"b\": int, \"c\": option<{\"name\": string, \"surname\": string}>}\n\n@genType\ntype myRec = {\n  @genType.as(\"type\")\n  type_: string,\n}\n\n@genType\ntype myObj = {\"type_\": string}\n\n@genType\nlet testMyRec = (x: myRec) => x.type_\n\n@genType\nlet testMyRec2 = (x: myRec) => x\n\n@genType\nlet testMyObj = (x: myObj) => x[\"type_\"]\n\n@genType\nlet testMyObj2 = (x: myObj) => x\n\n@genType\ntype myRecBsAs = {\n  @as(\"type\")\n  type_: string,\n}\n\n@genType\nlet testMyRecBsAs = (x: myRecBsAs) => x.type_\n\n@genType\nlet testMyRecBsAs2 = (x: myRecBsAs) => x\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/References.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nfunction create(x) {\n  return {\n          contents: x\n        };\n}\n\nfunction access(r) {\n  return r.contents + 1 | 0;\n}\n\nfunction update(r) {\n  r.contents = r.contents + 1 | 0;\n}\n\nfunction get(r) {\n  return r.contents;\n}\n\nfunction make(prim) {\n  return {\n          contents: prim\n        };\n}\n\nfunction set(r, v) {\n  r.contents = v;\n}\n\nvar R = {\n  get: get,\n  make: make,\n  set: set\n};\n\nfunction destroysRefIdentity(x) {\n  return x;\n}\n\nfunction preserveRefIdentity(x) {\n  return x;\n}\n\nexport {\n  create ,\n  access ,\n  update ,\n  R ,\n  get ,\n  make ,\n  set ,\n  destroysRefIdentity ,\n  preserveRefIdentity ,\n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/References.res",
    "content": "// Test pervasive references\n\n@genType\nlet create = (x: int) => ref(x)\n\n@genType\nlet access = r => r.contents + 1\n\n@genType\nlet update = r => r.contents = r.contents + 1\n\n// Abstract version of references: works when conversion is required.\n\nmodule R: {\n  @genType\n  type t<'a>\n  let get: t<'a> => 'a\n  let make: 'a => t<'a>\n  let set: (t<'a>, 'a) => unit\n} = {\n  type t<'a> = ref<'a>\n  let get = r => r.contents\n  let make = ref\n  let set = (r, v) => r.contents = v\n}\n\n@genType\ntype t<'a> = R.t<'a>\n\n@genType\nlet get = R.get\n\n@gentype\nlet make = R.make\n\n@genType\nlet set = R.set\n\ntype requiresConversion = {x: int}\n\n// Careful: conversion makes a copy and destroys the reference identity.\n@genType\nlet destroysRefIdentity = (x: ref<requiresConversion>) => x\n\n// Using abstract references preserves the identity.\n@genType\nlet preserveRefIdentity = (x: R.t<requiresConversion>) => x\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/RepeatedLabel.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nfunction userData(param) {\n  return {\n          a: param.a,\n          b: param.b\n        };\n}\n\nconsole.log(userData);\n\nexport {\n  userData ,\n}\n/*  Not a pure module */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/RepeatedLabel.res",
    "content": "type userData = {\n  a: bool,\n  b: int,\n}\n\ntype tabState = {\n  a: bool,\n  b: int,\n  f: string,\n}\n\nlet userData = ({a, b}): userData => {a: a, b: b}\n\nJs.log(userData)\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/RequireCond.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n/* This output is empty. Its source's type definitions, externals and/or unused code got optimized away. */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/RequireCond.res",
    "content": "@module\n@deprecated(\n  \"Please use this syntax to guarantee safe usage: [%requireCond(`gk, \\\"gk_name\\\", ConditionalModule)]\"\n)\nexternal make: (\n  @string [@as(\"qe.bool\") #qeBool | @as(\"gk\") #gk],\n  string,\n  string,\n) => Js.Nullable.t<'a> = \"requireCond\"\n\n@module\n@deprecated(\n  \"Please use this syntax to guarantee safe usage: [%requireCond(`gk, \\\"gk_name\\\", {\\\"true\\\": ModuleA, \\\"false\\\": ModuleB})]\"\n)\nexternal either: (\n  @string [@as(\"qe.bool\") #qeBool | @as(\"gk\") #gk],\n  string,\n  {\"true\": string, \"false\": string},\n) => 'b = \"requireCond\"\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/Shadow.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nfunction test(param) {\n  return \"a\";\n}\n\nfunction test$1(param) {\n  return \"a\";\n}\n\nvar M = {\n  test: test$1\n};\n\nexport {\n  test ,\n  M ,\n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/Shadow.res",
    "content": "@genType\nlet test = () => 3\n\n@genType\nlet test = () => \"a\"\n\nmodule M = {\n  @genType\n  let test = () => 3\n\n  let test = () => \"a\"\n}\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/TestDeadExn.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as DeadExn from \"./DeadExn.bs.js\";\n\nconsole.log({\n      RE_EXN_ID: DeadExn.Etoplevel\n    });\n\nexport {\n  \n}\n/*  Not a pure module */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/TestDeadExn.res",
    "content": "Js.log(DeadExn.Etoplevel)\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/TestEmitInnerModules.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nvar Inner = {\n  x: 34,\n  y: \"hello\"\n};\n\nvar Inner$1 = {\n  y: 44\n};\n\nvar Medium = {\n  Inner: Inner$1\n};\n\nvar Outer = {\n  Medium: Medium\n};\n\nexport {\n  Inner ,\n  Outer ,\n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/TestEmitInnerModules.res",
    "content": "module Inner = {\n  @genType\n  let x = 34\n  @genType\n  let y = \"hello\"\n}\n\nmodule Outer = {\n  module Medium = {\n    module Inner = {\n      @genType\n      let y = 44\n    }\n  }\n}\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/TestFirstClassModules.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nfunction convert(x) {\n  return x;\n}\n\nfunction convertInterface(x) {\n  return x;\n}\n\nfunction convertRecord(x) {\n  return x;\n}\n\nfunction convertFirstClassModuleWithTypeEquations(x) {\n  return x;\n}\n\nexport {\n  convert ,\n  convertInterface ,\n  convertRecord ,\n  convertFirstClassModuleWithTypeEquations ,\n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/TestFirstClassModules.res",
    "content": "@genType\nlet convert = (x: FirstClassModules.firstClassModule) => x\n\n@genType\nlet convertInterface = (x: FirstClassModulesInterface.firstClassModule) => x\n\n@genType\nlet convertRecord = (x: FirstClassModulesInterface.record) => x\n\nmodule type MT = {\n  type outer\n  let out: outer => outer\n\n  module Inner: {\n    type inner\n    let inn: inner => inner\n  }\n}\n\n@genType\ntype firstClassModuleWithTypeEquations<'i, 'o> = module(MT with\n  type Inner.inner = 'i\n  and type outer = 'o\n)\n\n@genType\nlet convertFirstClassModuleWithTypeEquations = (\n  type o i,\n  x: module(MT with type Inner.inner = i and type outer = o),\n) => x\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/TestImmutableArray.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as Belt_Array from \"rescript/lib/es6/belt_Array.js\";\nimport * as ImmutableArray from \"./ImmutableArray.bs.js\";\n\nfunction testImmutableArrayGet(arr) {\n  return ImmutableArray.$$Array.get(arr, 3);\n}\n\nfunction testBeltArrayGet(arr) {\n  return Belt_Array.get(arr, 3);\n}\n\nfunction testBeltArraySet(arr) {\n  return Belt_Array.set(arr, 3, 4);\n}\n\nexport {\n  testImmutableArrayGet ,\n  testBeltArrayGet ,\n  testBeltArraySet ,\n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/TestImmutableArray.res",
    "content": "@genType\nlet testImmutableArrayGet = arr => {\n  open ImmutableArray\n  arr[3]\n}\n\n/*\n   type error\n   let testImmutableArraySet = arr => ImmutableArray.(arr[3] = 4);\n */\n\nlet testBeltArrayGet = arr => {\n  open Belt\n  arr[3]\n}\n\nlet testBeltArraySet = arr => {\n  open Belt\n  arr[3] = 4\n}\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/TestImport.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as Caml_option from \"rescript/lib/es6/caml_option.js\";\nimport * as TestImportGen from \"./TestImport.gen\";\n\nvar innerStuffContents = TestImportGen.innerStuffContents;\n\nvar innerStuffContentsAsEmptyObject = TestImportGen.innerStuffContentsAsEmptyObject;\n\nvar valueStartingWithUpperCaseLetter = TestImportGen.valueStartingWithUpperCaseLetter;\n\nvar defaultValue = TestImportGen.defaultValue;\n\nfunction make(prim0, prim1, prim2) {\n  return TestImportGen.make(prim0, prim1 !== undefined ? Caml_option.valFromOption(prim1) : undefined, prim2);\n}\n\nvar defaultValue2 = TestImportGen.defaultValue2;\n\nexport {\n  innerStuffContentsAsEmptyObject ,\n  innerStuffContents ,\n  valueStartingWithUpperCaseLetter ,\n  defaultValue ,\n  make ,\n  defaultValue2 ,\n}\n/* innerStuffContents Not a pure module */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/TestImport.res",
    "content": "@genType.import((\n  \"./exportNestedValues\",\n  \"TopLevelClass.MiddleLevelElements.stuff.InnerStuff.innerStuffContents\",\n))\nexternal innerStuffContents: {\"x\": int} = \"innerStuffContents\"\n\n@genType.import((\n  \"./exportNestedValues\",\n  \"TopLevelClass.MiddleLevelElements.stuff.InnerStuff.innerStuffContents\",\n))\nexternal innerStuffContentsAsEmptyObject: {.} = \"innerStuffContentsAsEmptyObject\"\n\nlet innerStuffContents = innerStuffContents\n\n@genType.import((\"./exportNestedValues\", \"ValueStartingWithUpperCaseLetter\"))\nexternal valueStartingWithUpperCaseLetter: string = \"valueStartingWithUpperCaseLetter\"\n\n@genType.import((\"./exportNestedValues\", \"default\"))\nexternal defaultValue: int = \"defaultValue\"\n\n@genType\ntype message = {text: string}\n\n@genType.import((\"./MyBanner\", \"TopLevelClass.MiddleLevelElements.MyBannerInternal\"))\nexternal make: (~show: bool, ~message: option<message>=?, 'a) => React.element = \"make\"\n\nlet make = make\n\n@genType.import((\"./exportNestedValues\", \"default\"))\nexternal defaultValue2: int = \"defaultValue2\"\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/TestInnedModuleTypes.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n/* This output is empty. Its source's type definitions, externals and/or unused code got optimized away. */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/TestInnedModuleTypes.res",
    "content": "let _ = InnerModuleTypes.I.Foo\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/TestModuleAliases.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nfunction testInner1(x) {\n  return x;\n}\n\nfunction testInner1Expanded(x) {\n  return x;\n}\n\nfunction testInner2(x) {\n  return x;\n}\n\nfunction testInner2Expanded(x) {\n  return x;\n}\n\nvar OtherFile;\n\nvar OtherFileAlias;\n\nvar OuterAlias;\n\nvar OtherFile1;\n\nvar Outer2;\n\nvar Inner2;\n\nexport {\n  OtherFile ,\n  OtherFileAlias ,\n  OuterAlias ,\n  OtherFile1 ,\n  Outer2 ,\n  Inner2 ,\n  testInner1 ,\n  testInner1Expanded ,\n  testInner2 ,\n  testInner2Expanded ,\n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/TestModuleAliases.res",
    "content": "module OtherFile = ModuleAliases2\nmodule OtherFileAlias = OtherFile\n\n@genType\ntype record = OtherFile.record\n\n@genType\ntype record2 = OtherFileAlias.record\n\nmodule OuterAlias = OtherFile.Outer\n\n@genType\ntype outer = OtherFileAlias.Outer.outer\n\n@genType\ntype outer2 = OuterAlias.outer\n\nmodule OtherFile1 = OtherFile\nmodule Outer2 = OtherFile1.Outer\nmodule Inner2 = Outer2.Inner\n\n@genType\ntype my2 = Inner2.inner\n\n@genType\ntype inner1 = OtherFile.InnerAlias.inner\n\n@genType\ntype inner2 = OtherFile.Outer.Inner.inner\n\n@genType\nlet testInner1 = (x: inner1) => x\n\n@genType\nlet testInner1Expanded = (x: OtherFile.InnerAlias.inner) => x\n\n@genType\nlet testInner2 = (x: inner2) => x\n\n@genType\nlet testInner2Expanded = (x: OtherFile.Outer.Inner.inner) => x\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/TestOptArg.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as OptArg from \"./OptArg.bs.js\";\n\nconsole.log(OptArg.bar(undefined, 3, 3, 4));\n\nfunction foo(xOpt, y) {\n  var x = xOpt !== undefined ? xOpt : 3;\n  return x + y | 0;\n}\n\nfunction bar(param) {\n  var x = 12;\n  return x + 3 | 0;\n}\n\nconsole.log(bar);\n\nfunction notSuppressesOptArgs(xOpt, yOpt, zOpt, w) {\n  var x = xOpt !== undefined ? xOpt : 1;\n  var y = yOpt !== undefined ? yOpt : 2;\n  var z = zOpt !== undefined ? zOpt : 3;\n  return ((x + y | 0) + z | 0) + w | 0;\n}\n\nnotSuppressesOptArgs(undefined, undefined, undefined, 3);\n\nfunction liveSuppressesOptArgs(xOpt, yOpt, zOpt, w) {\n  var x = xOpt !== undefined ? xOpt : 1;\n  var y = yOpt !== undefined ? yOpt : 2;\n  var z = zOpt !== undefined ? zOpt : 3;\n  return ((x + y | 0) + z | 0) + w | 0;\n}\n\nliveSuppressesOptArgs(3, undefined, undefined, 3);\n\nexport {\n  foo ,\n  bar ,\n  notSuppressesOptArgs ,\n  liveSuppressesOptArgs ,\n}\n/*  Not a pure module */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/TestOptArg.res",
    "content": "Js.log(OptArg.bar(~z=3, ~y=3, 4))\n\nlet foo = (~x=3, y) => x + y\n\nlet bar = () => foo(~x=12, 3)\n\nJs.log(bar)\n\nlet notSuppressesOptArgs = (~x=1, ~y=2, ~z=3, w) => x + y + z + w\n\nlet _ = notSuppressesOptArgs(3)\n\n@live\nlet liveSuppressesOptArgs = (~x=1, ~y=2, ~z=3, w) => x + y + z + w\n\nlet _ = liveSuppressesOptArgs(~x=3, 3)\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/TestPromise.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as Js_promise from \"rescript/lib/es6/js_promise.js\";\n\nfunction convert(param) {\n  return Js_promise.then_((function (param) {\n                return Promise.resolve({\n                            result: param.s\n                          });\n              }), param);\n}\n\nexport {\n  convert ,\n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/TestPromise.res",
    "content": "@genType\ntype promise<'a> = Js.Promise.t<'a>\n\n@genType\ntype fromPayload = {\n  x: int,\n  s: string,\n}\n\n@genType\ntype toPayload = {result: string}\n\n@genType\nlet convert = Js.Promise.then_(({s}) => Js.Promise.resolve({result: s}))\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/ToSuppress.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nvar toSuppress = 0;\n\nexport {\n  toSuppress ,\n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/ToSuppress.res",
    "content": "let toSuppress = 0\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/TransitiveType1.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nfunction convert(x) {\n  return x;\n}\n\nfunction convertAlias(x) {\n  return x;\n}\n\nexport {\n  convert ,\n  convertAlias ,\n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/TransitiveType1.res",
    "content": "@genType\nlet convert = (x: TransitiveType2.t2) => x\n\n@genType\nlet convertAlias = (x: TransitiveType2.t2Alias) => x\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/TransitiveType2.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nfunction convertT2(x) {\n  return x;\n}\n\nexport {\n  convertT2 ,\n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/TransitiveType2.res",
    "content": "@genType\ntype t2 = option<TransitiveType3.t3>\n\n@genType\ntype t2Alias = t2\n\nlet convertT2 = (x: t2) => x\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/TransitiveType3.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nfunction convertT3(x) {\n  return x;\n}\n\nexport {\n  convertT3 ,\n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/TransitiveType3.res",
    "content": "@genType\ntype t3 = {\n  i: int,\n  s: string,\n}\n\n@genType\nlet convertT3 = (x: t3) => x\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/Tuples.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as Belt_Option from \"rescript/lib/es6/belt_Option.js\";\n\nfunction testTuple(param) {\n  return param[0] + param[1] | 0;\n}\n\nfunction computeArea(param) {\n  return Math.imul(Math.imul(param[0], param[1]), Belt_Option.mapWithDefault(param[2], 1, (function (n) {\n                    return n;\n                  })));\n}\n\nfunction computeAreaWithIdent(param) {\n  return Math.imul(Math.imul(param[0], param[1]), Belt_Option.mapWithDefault(param[2], 1, (function (n) {\n                    return n;\n                  })));\n}\n\nfunction computeAreaNoConverters(param) {\n  return Math.imul(param[0], param[1]);\n}\n\nfunction coord2d(x, y) {\n  return [\n          x,\n          y,\n          undefined\n        ];\n}\n\nfunction getFirstName(param) {\n  return param[0].name;\n}\n\nfunction marry(first, second) {\n  return [\n          first,\n          second\n        ];\n}\n\nfunction changeSecondAge(param) {\n  var second = param[1];\n  return [\n          param[0],\n          {\n            name: second.name,\n            age: second.age + 1 | 0\n          }\n        ];\n}\n\nvar origin = [\n  0,\n  0,\n  0\n];\n\nexport {\n  testTuple ,\n  origin ,\n  computeArea ,\n  computeAreaWithIdent ,\n  computeAreaNoConverters ,\n  coord2d ,\n  getFirstName ,\n  marry ,\n  changeSecondAge ,\n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/Tuples.res",
    "content": "open Belt\n\n@genType\nlet testTuple = ((a, b)) => a + b\n\n@genType\ntype coord = (int, int, option<int>)\n\n@genType\nlet origin = (0, 0, Some(0))\n\n@genType\nlet computeArea = ((x, y, z)) => {\n  open Option\n  x * y * z->mapWithDefault(1, n => n)\n}\n\n@genType\nlet computeAreaWithIdent = ((x, y, z): coord) => {\n  open Option\n  x * y * z->mapWithDefault(1, n => n)\n}\n\n@genType\nlet computeAreaNoConverters = ((x: int, y: int)) => x * y\n\n@genType\nlet coord2d = (x, y) => (x, y, None)\n\n@genType\ntype coord2 = (int, int, Js.Nullable.t<int>)\n\n@genType\ntype person = {\n  name: string,\n  age: int,\n}\n\n@genType\ntype couple = (person, person)\n\n@genType\nlet getFirstName = ((first, _second): couple) => first.name\n\n@genType\nlet marry = (first, second): couple => (first, second)\n\n@genType\nlet changeSecondAge = ((first, second): couple): couple => (first, {...second, age: second.age + 1})\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/TypeParams1.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nvar exportSomething = 10;\n\nexport {\n  exportSomething ,\n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/TypeParams1.res",
    "content": "@gentype\ntype ocaml_array<'a> = array<'a>\n\nlet exportSomething = 10\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/TypeParams2.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nvar exportSomething = 10;\n\nexport {\n  exportSomething ,\n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/TypeParams2.res",
    "content": "@genType\ntype item = {id: int}\n\n@genType\ntype items = TypeParams1.ocaml_array<item>\n\n@genType\ntype items2 = array<item>\n\nlet exportSomething = 10\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/TypeParams3.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nfunction test(x) {\n  return x;\n}\n\nfunction test2(x) {\n  return x;\n}\n\nexport {\n  test ,\n  test2 ,\n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/TypeParams3.res",
    "content": "@genType\nlet test = (x: TypeParams2.items) => x\n\n@genType\nlet test2 = (x: TypeParams2.items2) => x\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/Types.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as List from \"rescript/lib/es6/list.js\";\nimport * as Curry from \"rescript/lib/es6/curry.js\";\nimport * as Belt_Option from \"rescript/lib/es6/belt_Option.js\";\n\nfunction swap(tree) {\n  return {\n          label: tree.label,\n          left: Belt_Option.map(tree.right, swap),\n          right: Belt_Option.map(tree.left, swap)\n        };\n}\n\nfunction selfRecursiveConverter(param) {\n  return param.self;\n}\n\nfunction mutuallyRecursiveConverter(param) {\n  return param.b;\n}\n\nfunction testFunctionOnOptionsAsArgument(a, foo) {\n  return Curry._1(foo, a);\n}\n\nfunction jsonStringify(prim) {\n  return JSON.stringify(prim);\n}\n\nfunction testConvertNull(x) {\n  return x;\n}\n\nvar testMarshalFields = {\n  rec: \"rec\",\n  _switch: \"_switch\",\n  switch: \"switch\",\n  __: \"__\",\n  _: \"_\",\n  foo: \"foo\",\n  _foo: \"_foo\",\n  Uppercase: \"Uppercase\",\n  _Uppercase: \"_Uppercase\"\n};\n\nfunction setMatch(x) {\n  x.match = 34;\n}\n\nfunction testInstantiateTypeParameter(x) {\n  return x;\n}\n\nvar currentTime = new Date();\n\nvar optFunction = (function (param) {\n    return 3;\n  });\n\nvar ObjectId = {};\n\nvar someIntList = {\n  hd: 1,\n  tl: {\n    hd: 2,\n    tl: {\n      hd: 3,\n      tl: /* [] */0\n    }\n  }\n};\n\nvar map = List.map;\n\nvar stringT = \"a\";\n\nvar jsStringT = \"a\";\n\nvar jsString2T = \"a\";\n\nvar i64Const = [\n  0,\n  34\n];\n\nexport {\n  someIntList ,\n  map ,\n  swap ,\n  selfRecursiveConverter ,\n  mutuallyRecursiveConverter ,\n  testFunctionOnOptionsAsArgument ,\n  stringT ,\n  jsStringT ,\n  jsString2T ,\n  jsonStringify ,\n  testConvertNull ,\n  testMarshalFields ,\n  setMatch ,\n  testInstantiateTypeParameter ,\n  currentTime ,\n  i64Const ,\n  optFunction ,\n  ObjectId ,\n}\n/* currentTime Not a pure module */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/Types.res",
    "content": "@genType\ntype t = int\n\n@genType\nlet someIntList = list{1, 2, 3}\n\n@genType\nlet map = List.map\n\n@genType\ntype typeWithVars<'x, 'y, 'z> =\n  | A('x, 'y)\n  | B('z)\n\n@genType\ntype rec tree = {\"label\": string, \"left\": option<tree>, \"right\": option<tree>}\n\n/*\n * A tree is a recursive type which does not require any conversion (JS object).\n * All is well.\n */\n@genType\nlet rec swap = (tree: tree): tree =>\n  {\n    \"label\": tree[\"label\"],\n    \"left\": tree[\"right\"]->Belt.Option.map(swap),\n    \"right\": tree[\"left\"]->Belt.Option.map(swap),\n  }\n\n@genType\ntype rec selfRecursive = {self: selfRecursive}\n\n@genType\ntype rec mutuallyRecursiveA = {b: mutuallyRecursiveB}\nand mutuallyRecursiveB = {a: mutuallyRecursiveA}\n\n/*\n * This is a recursive type which requires conversion (a record).\n * Only a shallow conversion of the top-level element is performed.\n */\n@genType\nlet selfRecursiveConverter = ({self}) => self\n\n/*\n * This is a mutually recursive type which requires conversion (a record).\n * Only a shallow conversion of the two top-level elements is performed.\n */\n@genType\nlet mutuallyRecursiveConverter = ({b}) => b\n\n@genType\nlet testFunctionOnOptionsAsArgument = (a: option<'a>, foo) => foo(a)\n\n@genType.opaque\ntype opaqueVariant =\n  | A\n  | B\n\n@genType\nlet stringT: String.t = \"a\"\n\n@genType\nlet jsStringT: Js.String.t = \"a\"\n\n@genType\nlet jsString2T: Js.String2.t = \"a\"\n\n@genType\ntype twice<'a> = ('a, 'a)\n\n@gentype\ntype genTypeMispelled = int\n\n@genType\ntype dictString = Js.Dict.t<string>\n\n@genType\nlet jsonStringify = Js.Json.stringify\n\n@genType\ntype nullOrString = Js.Null.t<string>\n\n@genType\ntype nullOrString2 = Js.null<string>\n\ntype record = {\n  i: int,\n  s: string,\n}\n\n@genType\nlet testConvertNull = (x: Js.Null.t<record>) => x\n\n@genType\ntype decorator<'a, 'b> = 'a => 'b constraint 'a = int constraint 'b = _ => _\n\n/* Bucklescript's marshaling rules. */\n@genType\ntype marshalFields = {\n  \"_rec\": string,\n  \"_switch\": string,\n  \"switch\": string,\n  \"__\": string,\n  \"___\": string,\n  \"foo__\": string,\n  \"_foo__\": string,\n  \"_Uppercase\": string,\n  \"_Uppercase__\": string,\n}\n\n@genType\nlet testMarshalFields: marshalFields = {\n  \"_rec\": \"rec\",\n  \"_switch\" /* reason keywords are not recognized */: \"_switch\",\n  \"switch\": \"switch\",\n  \"__\": \"__\",\n  \"___\": \"_\",\n  \"foo__\": \"foo\",\n  \"_foo__\": \"_foo\",\n  \"_Uppercase\": \"Uppercase\",\n  \"_Uppercase__\": \"_Uppercase\",\n}\n\n@genType\ntype marshalMutableField = {@set \"_match\": int}\n\n@genType\nlet setMatch = (x: marshalMutableField) => x[\"_match\"] = 34\n\ntype ocaml_array<'a> = array<'a>\n\n// This should be considered annotated automatically.\ntype someRecord = {id: int}\n\ntype instantiateTypeParameter = ocaml_array<someRecord>\n\n@genType\nlet testInstantiateTypeParameter = (x: instantiateTypeParameter) => x\n\n@genType @genType.as(\"Vector\")\ntype vector<'a> = ('a, 'a)\n\n@genType\ntype date = Js.Date.t\n\n@genType\nlet currentTime = Js.Date.make()\n\n@genType\ntype i64A = Int64.t\n\n@genType\ntype i64B = int64\n\n@genType\nlet i64Const: i64B = 34L\n\n@genType\nlet optFunction = Some(() => 3)\n\nmodule ObjectId: {\n  @genType\n  type t = int\n} = {\n  type t = int\n  let x = 1\n}\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/Unboxed.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nfunction testV1(x) {\n  return x;\n}\n\nfunction r2Test(x) {\n  return x;\n}\n\nexport {\n  testV1 ,\n  r2Test ,\n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/Unboxed.res",
    "content": "@genType @ocaml.unboxed\ntype v1 = A(int)\n\n@genType @unboxed\ntype v2 = A(int)\n\n@genType\nlet testV1 = (x: v1) => x\n\n@genType @unboxed\ntype r1 = {x: int}\n\n@genType @ocaml.unboxed\ntype r2 = B({g: string})\n\n@genType\nlet r2Test = (x: r2) => x\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/Uncurried.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as Curry from \"rescript/lib/es6/curry.js\";\n\nfunction uncurried0() {\n  return \"\";\n}\n\nfunction uncurried1(x) {\n  return String(x);\n}\n\nfunction uncurried2(x, y) {\n  return String(x) + y;\n}\n\nfunction uncurried3(x, y, z) {\n  return String(x) + (y + String(z));\n}\n\nfunction curried3(x, y, z) {\n  return String(x) + (y + String(z));\n}\n\nfunction callback(cb) {\n  return String(Curry._1(cb, undefined));\n}\n\nfunction callback2(auth) {\n  return Curry._1(auth.login, undefined);\n}\n\nfunction callback2U(auth) {\n  return auth.loginU();\n}\n\nfunction sumU(n, m) {\n  console.log(\"sumU 2nd arg\", m, \"result\", n + m | 0);\n}\n\nfunction sumU2(n) {\n  return function (m) {\n    console.log(\"sumU2 2nd arg\", m, \"result\", n + m | 0);\n  };\n}\n\nfunction sumCurried(n) {\n  console.log(\"sumCurried 1st arg\", n);\n  return function (m) {\n    console.log(\"sumCurried 2nd arg\", m, \"result\", n + m | 0);\n  };\n}\n\nfunction sumLblCurried(s, n) {\n  console.log(s, \"sumLblCurried 1st arg\", n);\n  return function (m) {\n    console.log(\"sumLblCurried 2nd arg\", m, \"result\", n + m | 0);\n  };\n}\n\nexport {\n  uncurried0 ,\n  uncurried1 ,\n  uncurried2 ,\n  uncurried3 ,\n  curried3 ,\n  callback ,\n  callback2 ,\n  callback2U ,\n  sumU ,\n  sumU2 ,\n  sumCurried ,\n  sumLblCurried ,\n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/Uncurried.res",
    "content": "@genType\ntype u0 = (. unit) => string\n\n@genType\ntype u1 = (. int) => string\n\n@genType\ntype u2 = (. int, string) => string\n\n@genType\ntype u3 = (. int, string, int) => string\n\n@genType\nlet uncurried0 = (. ()) => \"\"\n\n@genType\nlet uncurried1 = (. x) => x |> string_of_int\n\n@genType\nlet uncurried2 = (. x, y) => (x |> string_of_int) ++ y\n\n@genType\nlet uncurried3 = (. x, y, z) => (x |> string_of_int) ++ (y ++ (z |> string_of_int))\n\n@genType\nlet curried3 = (x, y, z) => (x |> string_of_int) ++ (y ++ (z |> string_of_int))\n\n@genType\nlet callback = cb => cb() |> string_of_int\n\ntype auth = {login: unit => string}\ntype authU = {loginU: (. unit) => string}\n\n@genType\nlet callback2 = auth => auth.login()\n\n@genType\nlet callback2U = auth => auth.loginU(.)\n\n@genType\nlet sumU = (. n, m) => Js.log4(\"sumU 2nd arg\", m, \"result\", n + m)\n\n@genType\nlet sumU2 = (. n, . m) => Js.log4(\"sumU2 2nd arg\", m, \"result\", n + m)\n\n@genType\nlet sumCurried = n => {\n  Js.log2(\"sumCurried 1st arg\", n)\n  m => Js.log4(\"sumCurried 2nd arg\", m, \"result\", n + m)\n}\n\n@genType\nlet sumLblCurried = (s: string, ~n) => {\n  Js.log3(s, \"sumLblCurried 1st arg\", n)\n  (~m) => Js.log4(\"sumLblCurried 2nd arg\", m, \"result\", n + m)\n}\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/Unison.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nfunction group(breakOpt, doc) {\n  var $$break = breakOpt !== undefined ? breakOpt : /* IfNeed */0;\n  return {\n          break: $$break,\n          doc: doc\n        };\n}\n\nfunction fits(_w, _stack) {\n  while(true) {\n    var stack = _stack;\n    var w = _w;\n    if (w < 0) {\n      return false;\n    }\n    if (!stack) {\n      return true;\n    }\n    _stack = stack._1;\n    _w = w - stack._0.doc.length | 0;\n    continue ;\n  };\n}\n\nfunction toString(width, stack) {\n  if (!stack) {\n    return \"\";\n  }\n  var stack$1 = stack._1;\n  var match = stack._0;\n  var doc = match.doc;\n  switch (match.break) {\n    case /* IfNeed */0 :\n        return (\n                fits(width, stack$1) ? \"fits \" : \"no \"\n              ) + toString(width - 1 | 0, stack$1);\n    case /* Never */1 :\n        return \"never \" + (doc + toString(width - 1 | 0, stack$1));\n    case /* Always */2 :\n        return \"always \" + (doc + toString(width - 1 | 0, stack$1));\n    \n  }\n}\n\ntoString(80, /* Empty */0);\n\nvar $$break = /* Never */1;\n\ntoString(80, /* Cons */{\n      _0: {\n        break: $$break,\n        doc: \"abc\"\n      },\n      _1: /* Empty */0\n    });\n\nvar $$break$1 = /* Always */2;\n\ntoString(80, /* Cons */{\n      _0: {\n        break: $$break$1,\n        doc: \"d\"\n      },\n      _1: /* Empty */0\n    });\n\nexport {\n  group ,\n  fits ,\n  toString ,\n}\n/*  Not a pure module */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/Unison.res",
    "content": "// Exmple of several DCE checks operating in unison\n\ntype break =\n  | IfNeed\n  | Never\n  | Always\n\ntype t = {\n  break: break,\n  doc: string,\n}\n\ntype rec stack =\n  | Empty\n  | Cons(t, stack)\n\nlet group = (~break=IfNeed, doc) => {break: break, doc: doc}\n\nlet rec fits = (w, stack) =>\n  switch stack {\n  | _ when w < 0 => false\n  | Empty => true\n  | Cons({doc}, stack) => fits(w - String.length(doc), stack)\n  }\n\nlet rec toString = (~width, stack) =>\n  switch stack {\n  | Cons({break, doc}, stack) =>\n    switch break {\n    | IfNeed => (fits(width, stack) ? \"fits \" : \"no \") ++ (stack |> toString(~width=width - 1))\n    | Never => \"never \" ++ (doc ++ (stack |> toString(~width=width - 1)))\n    | Always => \"always \" ++ (doc ++ (stack |> toString(~width=width - 1)))\n    }\n  | Empty => \"\"\n  }\n\ntoString(~width=80, Empty)\ntoString(~width=80, Cons(group(~break=Never, \"abc\"), Empty))\ntoString(~width=80, Cons(group(~break=Always, \"d\"), Empty))\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/UseImportJsValue.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nfunction useGetProp(x) {\n  return x.getProp() + 1 | 0;\n}\n\nfunction useTypeImportedInOtherModule(x) {\n  return x;\n}\n\nexport {\n  useGetProp ,\n  useTypeImportedInOtherModule ,\n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/UseImportJsValue.res",
    "content": "@genType\nlet useGetProp = (x: ImportJsValue.AbsoluteValue.t) => x->ImportJsValue.AbsoluteValue.getProp + 1\n\n@genType\nlet useTypeImportedInOtherModule = (x: ImportJsValue.stringFunction) => x\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/Variants.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nfunction isWeekend(x) {\n  if (x === \"sunday\") {\n    return true;\n  } else {\n    return x === \"saturday\";\n  }\n}\n\nfunction onlySunday(param) {\n  \n}\n\nfunction swap(x) {\n  if (x === \"sunday\") {\n    return \"saturday\";\n  } else {\n    return \"sunday\";\n  }\n}\n\nfunction testConvert(x) {\n  return x;\n}\n\nfunction testConvert2(x) {\n  return x;\n}\n\nfunction testConvert3(x) {\n  return x;\n}\n\nfunction testConvert2to3(x) {\n  return x;\n}\n\nfunction id1(x) {\n  return x;\n}\n\nfunction id2(x) {\n  return x;\n}\n\nfunction polyWithOpt(foo) {\n  if (foo === \"bar\") {\n    return ;\n  } else if (foo !== \"baz\") {\n    return {\n            NAME: \"One\",\n            VAL: foo\n          };\n  } else {\n    return {\n            NAME: \"Two\",\n            VAL: 1\n          };\n  }\n}\n\nfunction restResult1(x) {\n  return x;\n}\n\nfunction restResult2(x) {\n  return x;\n}\n\nfunction restResult3(x) {\n  return x;\n}\n\nvar monday = \"monday\";\n\nvar saturday = \"saturday\";\n\nvar sunday = \"sunday\";\n\nvar fortytwoOK = \"fortytwo\";\n\nvar fortytwoBAD = \"fortytwo\";\n\nexport {\n  isWeekend ,\n  monday ,\n  saturday ,\n  sunday ,\n  onlySunday ,\n  swap ,\n  testConvert ,\n  fortytwoOK ,\n  fortytwoBAD ,\n  testConvert2 ,\n  testConvert3 ,\n  testConvert2to3 ,\n  id1 ,\n  id2 ,\n  polyWithOpt ,\n  restResult1 ,\n  restResult2 ,\n  restResult3 ,\n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/Variants.res",
    "content": "@genType\ntype weekday = [\n  | #monday\n  | #tuesday\n  | #wednesday\n  | #thursday\n  | #friday\n  | #saturday\n  | #sunday\n]\n\n@genType\nlet isWeekend = (x: weekday) =>\n  switch x {\n  | #saturday\n  | #sunday => true\n  | _ => false\n  }\n\n@genType\nlet monday = #monday\n@genType\nlet saturday = #saturday\n@genType\nlet sunday = #sunday\n\n@genType\nlet onlySunday = (_: [#sunday]) => ()\n\n@genType\nlet swap = x =>\n  switch x {\n  | #sunday => #saturday\n  | #saturday => #sunday\n  }\n\n@genType\ntype testGenTypeAs = [\n  | @genType.as(\"type\") #type_\n  | @genType.as(\"module\") #module_\n  | @genType.as(\"42\") #fortytwo\n]\n\n@genType\nlet testConvert = (x: testGenTypeAs) => x\n\n@genType\nlet fortytwoOK: testGenTypeAs = #fortytwo\n\n/* Exporting this is BAD: type inference means it's not mapped to \"42\" */\n@genType\nlet fortytwoBAD = #fortytwo\n\n@genType\ntype testGenTypeAs2 = [\n  | @genType.as(\"type\") #type_\n  | @genType.as(\"module\") #module_\n  | @genType.as(\"42\") #fortytwo\n]\n\n/* Since testGenTypeAs2 is the same type as testGenTypeAs1,\n share the conversion map. */\n@genType\nlet testConvert2 = (x: testGenTypeAs2) => x\n\n@genType\ntype testGenTypeAs3 = [\n  | @genType.as(\"type\") #type_\n  | @genType.as(\"module\") #module_\n  | @genType.as(\"THIS IS DIFFERENT\") #fortytwo\n]\n\n/* Since testGenTypeAs3 has a different representation:\n use a new conversion map. */\n@genType\nlet testConvert3 = (x: testGenTypeAs3) => x\n\n/* This converts between testGenTypeAs2 and testGenTypeAs3 */\n@genType\nlet testConvert2to3 = (x: testGenTypeAs2): testGenTypeAs3 => x\n\n@genType\ntype x1 = [#x | @genType.as(\"same\") #x1]\n\n@genType\ntype x2 = [#x | @genType.as(\"same\") #x2]\n\n@genType\nlet id1 = (x: x1) => x\n\n@genType\nlet id2 = (x: x2) => x\n\n@genType @genType.as(\"type\")\ntype type_ = | @genType.as(\"type\") Type\n\n@genType\nlet polyWithOpt = foo => foo === \"bar\" ? None : foo !== \"baz\" ? Some(#One(foo)) : Some(#Two(1))\n\n@genType\ntype result1<'a, 'b> =\n  | Ok('a)\n  | Error('b)\n\n@genType\ntype result2<'a, 'b> = result<'a, 'b>\n\n@genType\ntype result3<'a, 'b> = Belt.Result.t<'a, 'b>\n\n@genType\nlet restResult1 = (x: result1<int, string>) => x\n\n@genType\nlet restResult2 = (x: result2<int, string>) => x\n\n@genType\nlet restResult3 = (x: result3<int, string>) => x\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/VariantsWithPayload.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nfunction testWithPayload(x) {\n  return x;\n}\n\nfunction printVariantWithPayload(x) {\n  if (typeof x !== \"object\") {\n    if (x === \"a\") {\n      console.log(\"printVariantWithPayload: a\");\n    } else if (x === \"b\") {\n      console.log(\"printVariantWithPayload: b\");\n    } else if (x === \"Half\") {\n      console.log(\"printVariantWithPayload: Half\");\n    } else if (x === \"True\") {\n      console.log(\"printVariantWithPayload: True\");\n    } else {\n      console.log(\"printVariantWithPayload: Twenty\");\n    }\n    return ;\n  }\n  var payload = x.VAL;\n  console.log(\"printVariantWithPayload x:\", payload.x, \"y:\", payload.y);\n}\n\nfunction testManyPayloads(x) {\n  return x;\n}\n\nfunction printManyPayloads(x) {\n  var variant = x.NAME;\n  if (variant === \"two\") {\n    var match = x.VAL;\n    console.log(\"printManyPayloads two:\", match[0], match[1]);\n    return ;\n  }\n  if (variant === \"three\") {\n    var payload = x.VAL;\n    console.log(\"printManyPayloads x:\", payload.x, \"y:\", payload.y);\n    return ;\n  }\n  console.log(\"printManyPayloads one:\", x.VAL);\n}\n\nfunction testSimpleVariant(x) {\n  return x;\n}\n\nfunction testVariantWithPayloads(x) {\n  return x;\n}\n\nfunction printVariantWithPayloads(x) {\n  if (typeof x === \"number\") {\n    console.log(\"printVariantWithPayloads\", \"A\");\n    return ;\n  }\n  switch (x.TAG | 0) {\n    case /* B */0 :\n        console.log(\"printVariantWithPayloads\", \"B(\" + (String(x._0) + \")\"));\n        return ;\n    case /* C */1 :\n        console.log(\"printVariantWithPayloads\", \"C(\" + (String(x._0) + (\", \" + (String(x._1) + \")\"))));\n        return ;\n    case /* D */2 :\n        var match = x._0;\n        console.log(\"printVariantWithPayloads\", \"D((\" + (String(match[0]) + (\", \" + (String(match[1]) + \"))\"))));\n        return ;\n    case /* E */3 :\n        console.log(\"printVariantWithPayloads\", \"E(\" + (String(x._0) + (\", \" + (x._1 + (\", \" + (String(x._2) + \")\"))))));\n        return ;\n    \n  }\n}\n\nfunction testVariant1Int(x) {\n  return x;\n}\n\nfunction testVariant1Object(x) {\n  return x;\n}\n\nexport {\n  testWithPayload ,\n  printVariantWithPayload ,\n  testManyPayloads ,\n  printManyPayloads ,\n  testSimpleVariant ,\n  testVariantWithPayloads ,\n  printVariantWithPayloads ,\n  testVariant1Int ,\n  testVariant1Object ,\n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/VariantsWithPayload.res",
    "content": "type payload = {\n  x: int,\n  y: option<string>,\n}\n\ntype withPayload = [\n  | #a\n  | @genType.as(\"bRenamed\") #b\n  | @genType.as(true) #True\n  | @genType.as(20) #Twenty\n  | @genType.as(0.5) #Half\n  | #c(payload)\n]\n\n@genType\nlet testWithPayload = (x: withPayload) => x\n\n@genType\nlet printVariantWithPayload = (x: withPayload) =>\n  switch x {\n  | #a => Js.log(\"printVariantWithPayload: a\")\n  | #b => Js.log(\"printVariantWithPayload: b\")\n  | #True => Js.log(\"printVariantWithPayload: True\")\n  | #Twenty => Js.log(\"printVariantWithPayload: Twenty\")\n  | #Half => Js.log(\"printVariantWithPayload: Half\")\n  | #c(payload) => Js.log4(\"printVariantWithPayload x:\", payload.x, \"y:\", payload.y)\n  }\n\n@genType\ntype manyPayloads = [\n  | @genType.as(\"oneRenamed\") #one(int)\n  | @genType.as(2) #two(string, string)\n  | #three(payload)\n]\n\n@genType\nlet testManyPayloads = (x: manyPayloads) => x\n\n@genType\nlet printManyPayloads = (x: manyPayloads) =>\n  switch x {\n  | #one(n) => Js.log2(\"printManyPayloads one:\", n)\n  | #two(s1, s2) => Js.log3(\"printManyPayloads two:\", s1, s2)\n  | #three(payload) => Js.log4(\"printManyPayloads x:\", payload.x, \"y:\", payload.y)\n  }\n\n@genType\ntype simpleVariant =\n  | A\n  | B\n  | C\n\n@genType\nlet testSimpleVariant = (x: simpleVariant) => x\n\n@genType\ntype variantWithPayloads =\n  | @genType.as(\"ARenamed\") A\n  | B(int)\n  | C(int, int)\n  | D((int, int))\n  | E(int, string, int)\n\n@genType\nlet testVariantWithPayloads = (x: variantWithPayloads) => x\n\n@genType\nlet printVariantWithPayloads = x =>\n  switch x {\n  | A => Js.log2(\"printVariantWithPayloads\", \"A\")\n  | B(x) => Js.log2(\"printVariantWithPayloads\", \"B(\" ++ (string_of_int(x) ++ \")\"))\n  | C(x, y) =>\n    Js.log2(\n      \"printVariantWithPayloads\",\n      \"C(\" ++ (string_of_int(x) ++ (\", \" ++ (string_of_int(y) ++ \")\"))),\n    )\n  | D((x, y)) =>\n    Js.log2(\n      \"printVariantWithPayloads\",\n      \"D((\" ++ (string_of_int(x) ++ (\", \" ++ (string_of_int(y) ++ \"))\"))),\n    )\n  | E(x, s, y) =>\n    Js.log2(\n      \"printVariantWithPayloads\",\n      \"E(\" ++ (string_of_int(x) ++ (\", \" ++ (s ++ (\", \" ++ (string_of_int(y) ++ \")\"))))),\n    )\n  }\n\n@genType\ntype variant1Int = R(int)\n\n@genType\nlet testVariant1Int = (x: variant1Int) => x\n\n@genType\ntype variant1Object = R(payload)\n\n@genType\nlet testVariant1Object = (x: variant1Object) => x\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/exception/Arr.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as Belt_Array from \"rescript/lib/es6/belt_Array.js\";\n\nfunction ff(a) {\n  Belt_Array.get(a, 3);\n  return 11;\n}\n\nvar MM = {\n  ff: ff\n};\n\nvar B;\n\nvar $$Array;\n\nexport {\n  B ,\n  $$Array ,\n  MM ,\n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/exception/Arr.res",
    "content": "module B = Belt\nmodule Array = B.Array\n\nmodule MM = {\n  let ff = a =>\n    switch a[3] {\n    | _ => 11\n    }\n}\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/exception/BeltTest.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as Belt_Map from \"rescript/lib/es6/belt_Map.js\";\nimport * as Belt_List from \"rescript/lib/es6/belt_List.js\";\nimport * as Belt_MapInt from \"rescript/lib/es6/belt_MapInt.js\";\nimport * as Belt_MapString from \"rescript/lib/es6/belt_MapString.js\";\n\nvar lstHead1 = Belt_List.headExn;\n\nvar lstHead2 = Belt_List.headExn;\n\nvar mapGetExn1 = Belt_MapInt.getExn;\n\nvar mapGetExn2 = Belt_MapInt.getExn;\n\nvar mapGetExn3 = Belt_MapInt.getExn;\n\nvar mapGetExn4 = Belt_MapString.getExn;\n\nvar mapGetExn5 = Belt_MapString.getExn;\n\nvar mapGetExn6 = Belt_MapString.getExn;\n\nvar mapGetExn7 = Belt_Map.getExn;\n\nvar mapGetExn8 = Belt_Map.getExn;\n\nexport {\n  lstHead1 ,\n  lstHead2 ,\n  mapGetExn1 ,\n  mapGetExn2 ,\n  mapGetExn3 ,\n  mapGetExn4 ,\n  mapGetExn5 ,\n  mapGetExn6 ,\n  mapGetExn7 ,\n  mapGetExn8 ,\n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/exception/BeltTest.res",
    "content": "open Belt.List\n\n@raises(Not_found)\nlet lstHead1 = l => l->Belt.List.headExn\n\n@raises(Not_found)\nlet lstHead2 = l => l->Belt_List.headExn\n\n@raises(Not_found)\nlet mapGetExn1 = (s, k) => s->Belt.Map.Int.getExn(k)\n\n@raises(Not_found)\nlet mapGetExn2 = (s, k) => s->Belt_Map.Int.getExn(k)\n\n@raises(Not_found)\nlet mapGetExn3 = (s, k) => s->Belt_MapInt.getExn(k)\n\n@raises(Not_found)\nlet mapGetExn4 = (s, k) => s->Belt.Map.String.getExn(k)\n\n@raises(Not_found)\nlet mapGetExn5 = (s, k) => s->Belt_Map.String.getExn(k)\n\n@raises(Not_found)\nlet mapGetExn6 = (s, k) => s->Belt_MapString.getExn(k)\n\n@raises(Not_found)\nlet mapGetExn7 = (s, k) => s->Belt.Map.getExn(k)\n\n@raises(Not_found)\nlet mapGetExn8 = (s, k) => s->Belt_Map.getExn(k)\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/exception/BsJson.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as Json_decode from \"@glennsl/bs-json/src/Json_decode.bs.js\";\n\nvar testBsJson = Json_decode.string;\n\nvar testBsJson2 = Json_decode.string;\n\nexport {\n  testBsJson ,\n  testBsJson2 ,\n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/exception/BsJson.res",
    "content": "@raise(DecodeError)\nlet testBsJson = x => Json_decode.string(x)\n\n@raise(DecodeError)\nlet testBsJson2 = x => Json.Decode.string(x)\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/exception/Exn.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as Caml_js_exceptions from \"rescript/lib/es6/caml_js_exceptions.js\";\n\nfunction raises(param) {\n  throw {\n        RE_EXN_ID: \"Not_found\",\n        Error: new Error()\n      };\n}\n\nvar catches1;\n\ntry {\n  catches1 = undefined;\n}\ncatch (raw_exn){\n  var exn = Caml_js_exceptions.internalToOCamlException(raw_exn);\n  if (exn.RE_EXN_ID === \"Not_found\") {\n    catches1 = undefined;\n  } else {\n    throw exn;\n  }\n}\n\nvar catches2;\n\ntry {\n  catches2 = undefined;\n}\ncatch (raw_exn$1){\n  var exn$1 = Caml_js_exceptions.internalToOCamlException(raw_exn$1);\n  if (exn$1.RE_EXN_ID === \"Not_found\") {\n    catches2 = undefined;\n  } else {\n    throw exn$1;\n  }\n}\n\nvar raiseAndCatch;\n\ntry {\n  throw {\n        RE_EXN_ID: \"Not_found\",\n        Error: new Error()\n      };\n}\ncatch (exn$2){\n  raiseAndCatch = undefined;\n}\n\nfunction raisesWithAnnotaion(param) {\n  throw {\n        RE_EXN_ID: \"Not_found\",\n        Error: new Error()\n      };\n}\n\nthrow {\n      RE_EXN_ID: \"Not_found\",\n      Error: new Error()\n    };\n\nexport {\n  raises ,\n  catches1 ,\n  catches2 ,\n  raiseAndCatch ,\n  raisesWithAnnotaion ,\n  callsRaiseWithAnnotation ,\n  callsRaiseWithAnnotationAndIsAnnotated ,\n  z ,\n  incompleteMatch ,\n  A ,\n  B ,\n  twoRaises ,\n  sequencing ,\n  wrongCatch ,\n  C ,\n  wrongCatch2 ,\n  raise2Annotate3 ,\n  $$Error ,\n  parse_json_from_file ,\n  reRaise ,\n  switchWithCatchAll ,\n  raiseInInternalLet ,\n  indirectCall ,\n  array ,\n  id ,\n  tryChar ,\n  StringHash ,\n  specializedHash ,\n  genericHash ,\n  raiseAtAt ,\n  raisePipe ,\n  raiseArrow ,\n  bar ,\n  foo ,\n  stringMake1 ,\n  stringMake2 ,\n  stringMake3 ,\n  severalCases ,\n  genericRaiseIsNotSupported ,\n  redundant ,\n  redundant2 ,\n  redundant3 ,\n  redundant4 ,\n  exits ,\n  redundantAnnotation ,\n  _x ,\n  onFunction ,\n  onResult ,\n  onFunctionPipe ,\n  onResultPipeWrong ,\n}\n/* catches1 Not a pure module */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/exception/Exn.res",
    "content": "let raises = () => raise(Not_found)\n\nlet catches1 = try () catch {\n| Not_found => ()\n}\n\nlet catches2 = switch () {\n| _ => ()\n| exception Not_found => ()\n}\n\nlet raiseAndCatch = try raise(Not_found) catch {\n| _ => ()\n}\n\n@raises(Not_found)\nlet raisesWithAnnotaion = () => raise(Not_found)\n\nlet callsRaiseWithAnnotation = raisesWithAnnotaion()\n\n@raises(A)\nlet callsRaiseWithAnnotationAndIsAnnotated = raisesWithAnnotaion()\n\nlet z = List.hd(list{})\n\nlet incompleteMatch = l =>\n  switch l {\n  | list{} => ()\n  }\n\nexception A\nexception B\n\nlet twoRaises = (x, y) => {\n  if x {\n    raise(A)\n  }\n  if y {\n    raise(B)\n  }\n}\n\nlet sequencing = () => {\n  raise(A)\n  try raise(B) catch {\n  | _ => ()\n  }\n}\n\nlet wrongCatch = () =>\n  try raise(B) catch {\n  | A => ()\n  }\n\nexception C\nlet wrongCatch2 = b =>\n  switch b ? raise(B) : raise(C) {\n  | exception A => ()\n  | exception B => ()\n  | list{} => ()\n  }\n\n@raises([A, B, C])\nlet raise2Annotate3 = (x, y) => {\n  if x {\n    raise(A)\n  }\n  if y {\n    raise(B)\n  }\n}\n\nexception Error(string, string, int)\n\nlet parse_json_from_file = s => {\n  switch 34 {\n  | exception Error(p1, p2, e) =>\n    raise(Error(p1, p2, e))\n  | v =>\n    v\n  }\n}\n\nlet reRaise = () =>\n  switch raise(A) {\n  | exception A => raise(B)\n  | _ => 11\n  }\n\nlet switchWithCatchAll = switch raise(A) {\n| exception _ => 1\n| _ => 2\n}\n\nlet raiseInInternalLet = b => {\n  let a = b ? raise(A) : 22\n  a + 34\n}\n\nlet indirectCall = () => () |> raisesWithAnnotaion\n\n@raises(Invalid_argument)\nlet array = a => a[2]\n\nlet id = x => x\n\nlet tryChar = v => {\n  try id(Char.chr(v)) |> ignore catch {\n  | _ => ()\n  }\n  42\n}\n\nmodule StringHash = Hashtbl.Make({\n  include String\n  let hash = Hashtbl.hash\n})\n\nlet specializedHash = tbl => StringHash.find(tbl, \"abc\")\n\n@raises(Not_found)\nlet genericHash = tbl => Hashtbl.find(tbl, \"abc\")\n\n@raises(Not_found)\nlet raiseAtAt = () => \\\"@@\"(raise, Not_found)\n\n@raises(Not_found)\nlet raisePipe = Not_found |> raise\n\n@raises(Not_found)\nlet raiseArrow = Not_found->raise\n\n@raises(Js.Exn.Error)\nlet bar = () => Js.Json.parseExn(\"!!!\")\n\nlet foo = () =>\n  try Js.Json.parseExn(\"!!!\") catch {\n  | Js.Exn.Error(_) => Js.Json.null\n  }\n\n@raises(Invalid_argument)\nlet stringMake1 = String.make(12, ' ')\n\nlet stringMake2 = (@doesNotRaise String.make)(12, ' ')\n\nlet stringMake3 = @doesNotRaise String.make(12, ' ')\n\nlet severalCases = cases =>\n  switch cases {\n  | \"one\" => failwith(\"one\")\n  | \"two\" => failwith(\"two\")\n  | \"three\" => failwith(\"three\")\n  | _ => ()\n  }\n\n@raises(genericException)\nlet genericRaiseIsNotSupported = exn => raise(exn)\n\nlet redundant = (@doesNotRaise String.uncapitalize_ascii)(\"abc\")\n\nlet redundant2 = @doesNotRaise String.uncapitalize_ascii(\"abc\")\n\nlet redundant3 = @doesNotRaise (@doesNotRaise String.uncapitalize_ascii)(\"abc\")\n\nlet redundant4 = () => {\n  let _ = String.uncapitalize_ascii(\"abc\")\n  let _ = @doesNotRaise String.uncapitalize_ascii(\"abc\")\n  let _ = String.uncapitalize_ascii(\"abc\")\n  let _ = String.uncapitalize_ascii(@doesNotRaise \"abc\")\n}\n\n@raises(exit)\nlet exits = () => exit(1)\n\n@raises(Invalid_argument)\nlet redundantAnnotation = () => ()\n\nlet _x = raise(A)\n\nlet _ = raise(A)\n\nlet () = raise(A)\n\nraise(Not_found)\n\ntrue ? exits() : ()\n\n// Examples with pipe\n\nlet onFunction = () => (@doesNotRaise Belt.Array.getExn)([], 0)\n\nlet onResult = () => @doesNotRaise Belt.Array.getExn([], 0)\n\nlet onFunctionPipe = () => []->(@doesNotRaise Belt.Array.getExn)(0)\n\nlet onResultPipeWrong = () => @doesNotRaise []->Belt.Array.getExn(0)\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/exception/ExnA.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as ExnB from \"./ExnB.bs.js\";\n\nfunction bar(param) {\n  return ExnB.foo(undefined);\n}\n\nexport {\n  bar ,\n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/exception/ExnA.res",
    "content": "let bar = () => ExnB.foo()\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/exception/ExnB.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nfunction foo(param) {\n  throw {\n        RE_EXN_ID: \"Not_found\",\n        Error: new Error()\n      };\n}\n\nexport {\n  foo ,\n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/exception/ExnB.res",
    "content": "@raises(Not_found)\nlet foo = () => raise(Not_found)\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/exception/ExportWithRename.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\n\nfunction ExportWithRename(Props) {\n  return Props.s;\n}\n\nvar make = ExportWithRename;\n\nexport {\n  make ,\n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/exception/ExportWithRename.res",
    "content": "@genType(\"ExportWithRename\") @react.component\nlet make = (~s) => React.string(s)\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/exception/InnerModules.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as Pervasives from \"rescript/lib/es6/pervasives.js\";\n\nvar wrapExitTop = Pervasives.exit;\n\nvar wrapExitM1 = Pervasives.exit;\n\nvar callLocally = Pervasives.exit;\n\nvar callTop = Pervasives.exit;\n\nvar wrapExitM2 = Pervasives.exit;\n\nvar callM1 = Pervasives.exit;\n\nvar callTop$1 = Pervasives.exit;\n\nvar M2 = {\n  wrapExitM2: wrapExitM2,\n  callM1: callM1,\n  callTop: callTop$1\n};\n\nvar M1 = {\n  wrapExitM1: wrapExitM1,\n  callLocally: callLocally,\n  callTop: callTop,\n  M2: M2\n};\n\nvar callM1$1 = Pervasives.exit;\n\nexport {\n  wrapExitTop ,\n  M1 ,\n  callM1$1 as callM1,\n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/exception/InnerModules.res",
    "content": "@raises(exit)\nlet wrapExitTop = x => exit(x)\n\nmodule M1 = {\n  @raises(exit)\n  let wrapExitM1 = x => exit(x)\n\n  @raises(exit)\n  let callLocally = x => wrapExitM1(x)\n\n  @raises(exit)\n  let callTop = x => wrapExitTop(x)\n\n  module M2 = {\n    @raises(exit)\n    let wrapExitM2 = x => exit(x)\n\n    @raises(exit)\n    let callM1 = x => wrapExitM1(x)\n\n    @raises(exit)\n    let callTop = x => wrapExitTop(x)\n  }\n}\n\n@raises(exit)\nlet callM1 = x => M1.wrapExitM1(x)\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/exception/TestInnerModules.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as Curry from \"rescript/lib/es6/curry.js\";\nimport * as InnerModules from \"./InnerModules.bs.js\";\n\nvar testTop = InnerModules.wrapExitTop;\n\nfunction testM1(x) {\n  return InnerModules.M1.wrapExitM1(x);\n}\n\nfunction testM2(x) {\n  return Curry._1(InnerModules.M1.M2.wrapExitM2, x);\n}\n\nexport {\n  testTop ,\n  testM1 ,\n  testM2 ,\n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/exception/TestInnerModules.res",
    "content": "@raises(exit)\nlet testTop = x => InnerModules.wrapExitTop(x)\n\n@raises(exit)\nlet testM1 = x => InnerModules.M1.wrapExitM1(x)\n\n@raises(exit)\nlet testM2 = x => InnerModules.M1.M2.wrapExitM2(x)\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/exception/TestYojson.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as Curry from \"rescript/lib/es6/curry.js\";\nimport * as Yojson from \"./Yojson.bs.js\";\nimport * as Caml_obj from \"rescript/lib/es6/caml_obj.js\";\nimport * as Caml_js_exceptions from \"rescript/lib/es6/caml_js_exceptions.js\";\n\nfunction foo(x) {\n  return Yojson.Basic.from_string(x);\n}\n\nfunction bar(str, json) {\n  try {\n    return Curry._2(Yojson.Basic.Util.member, str, json);\n  }\n  catch (raw_exn){\n    var exn = Caml_js_exceptions.internalToOCamlException(raw_exn);\n    if (exn.RE_EXN_ID === Yojson.Basic.Util.Type_error) {\n      if (exn._1 === \"a\") {\n        if (Caml_obj.equal(exn._2, json)) {\n          return json;\n        }\n        throw exn;\n      }\n      throw exn;\n    }\n    throw exn;\n  }\n}\n\nfunction toString(x) {\n  return Curry._1(Yojson.Basic.Util.to_string, x);\n}\n\nfunction toInt(x) {\n  return Curry._1(Yojson.Basic.Util.to_int, x);\n}\n\nexport {\n  foo ,\n  bar ,\n  toString ,\n  toInt ,\n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/exception/TestYojson.res",
    "content": "@raises(Yojson.Json_error)\nlet foo = x => Yojson.Basic.from_string(x)\n\nlet bar = (str, json) =>\n  switch {\n    open Yojson.Basic.Util\n    json |> member(str)\n  } {\n  | j => j\n  | exception Yojson.Basic.Util.Type_error(\"a\", d) when d == json => json\n  }\n\n@raises(Yojson.Basic.Util.Type_error)\nlet toString = x => Yojson.Basic.Util.to_string(x)\n\n@raises(Yojson.Basic.Util.Type_error)\nlet toInt = x => Yojson.Basic.Util.to_int(x)\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/exception/Yojson.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as Caml_exceptions from \"rescript/lib/es6/caml_exceptions.js\";\n\nvar Json_error = /* @__PURE__ */Caml_exceptions.create(\"Yojson.Json_error\");\n\nfunction from_string(param) {\n  throw {\n        RE_EXN_ID: Json_error,\n        _1: \"Basic.from_string\",\n        Error: new Error()\n      };\n}\n\nvar Type_error = /* @__PURE__ */Caml_exceptions.create(\"Yojson.Basic.Util.Type_error\");\n\nfunction member(_s, j) {\n  throw {\n        RE_EXN_ID: Type_error,\n        _1: \"Basic.Util.member\",\n        _2: j,\n        Error: new Error()\n      };\n}\n\nfunction to_int(param) {\n  return 34;\n}\n\nfunction to_string(param) {\n  return \"\";\n}\n\nvar Util = {\n  Type_error: Type_error,\n  member: member,\n  to_int: to_int,\n  to_string: to_string\n};\n\nvar Basic = {\n  from_string: from_string,\n  Util: Util\n};\n\nexport {\n  Json_error ,\n  Basic ,\n}\n/* No side effect */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/exception/Yojson.res",
    "content": "exception Json_error(string)\n\nmodule Basic = {\n  type t\n\n  @raises(Json_error)\n  let from_string: string => t = _ => raise(Json_error(\"Basic.from_string\"))\n\n  module Util = {\n    exception Type_error(string, t)\n\n    @raises(Type_error)\n    let member: (string, t) => t = (_s, j) => raise(Type_error(\"Basic.Util.member\", j))\n\n    let to_int: t => int = _ => 34\n\n    let to_string: t => string = _ => \"\"\n  }\n}\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/exportNestedValues.js",
    "content": "/* @flow strict */\n\nclass InnerClass {\n  static InnerStuff = {\n    innerStuffContents: { x: 34 }\n  };\n}\n\nexport class TopLevelClass {\n  static MiddleLevelElements = {\n    stuff: InnerClass\n  };\n}\n\nexport const ValueStartingWithUpperCaseLetter = \"ValueStartingWithUpperCaseLetter\";\n\nexport default 42;"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/src/trace.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as Curry from \"rescript/lib/es6/curry.js\";\nimport * as Format from \"rescript/lib/es6/format.js\";\n\nfunction infok(_mod_name, _fun_name, k) {\n  return Curry._1(k, {\n              pf: Format.eprintf\n            });\n}\n\nexport {\n  infok ,\n  \n}\n/* Format Not a pure module */\n"
  },
  {
    "path": "analysis/reanalyze/examples/deadcode/test.sh",
    "content": "output=\"expected/deadcode.txt\"\nif [ \"$RUNNER_OS\" == \"Windows\" ]; then\n  exclude_dirs=\"src\\exception\"\n  suppress=\"src\\ToSuppress.res\"\nelse\n  exclude_dirs=\"src/exception\"\n  suppress=\"src/ToSuppress.res\"\nfi\ndune exec rescript-editor-analysis -- reanalyze -config -debug -ci -exclude-paths $exclude_dirs -live-names globallyLive1 -live-names globallyLive2,globallyLive3 -suppress $suppress > $output\n# CI. We use LF, and the CI OCaml fork prints CRLF. Convert.\nif [ \"$RUNNER_OS\" == \"Windows\" ]; then\n  perl -pi -e 's/\\r\\n/\\n/g' -- $output\nfi\n\noutput=\"expected/exception.txt\"\nif [ \"$RUNNER_OS\" == \"Windows\" ]; then\n  unsuppress_dirs=\"src\\exception\"\nelse\n  unsuppress_dirs=\"src/exception\"\nfi\ndune exec rescript-editor-analysis -- reanalyze -exception -ci -suppress src -unsuppress $unsuppress_dirs > $output\n# CI. We use LF, and the CI OCaml fork prints CRLF. Convert.\nif [ \"$RUNNER_OS\" == \"Windows\" ]; then\n  perl -pi -e 's/\\r\\n/\\n/g' -- $output\nfi\n\n\nwarningYellow='\\033[0;33m'\nsuccessGreen='\\033[0;32m'\nreset='\\033[0m'\n\ndiff=$(git ls-files --modified expected)\nif [[ $diff = \"\" ]]; then\n  printf \"${successGreen}✅ No unstaged tests difference.${reset}\\n\"\nelse\n  printf \"${warningYellow}⚠️ There are unstaged differences in tests/! Did you break a test?\\n${diff}\\n${reset}\"\n  git --no-pager diff expected\n  exit 1\nfi\n"
  },
  {
    "path": "analysis/reanalyze/examples/termination/.gitignore",
    "content": "/node_modules\n/lib"
  },
  {
    "path": "analysis/reanalyze/examples/termination/.watchmanconfig",
    "content": ""
  },
  {
    "path": "analysis/reanalyze/examples/termination/Makefile",
    "content": "SHELL = /bin/bash\n\nnode_modules/.bin/rescript:\n\tnpm install\n\nbuild: node_modules/.bin/rescript\n\tnode_modules/.bin/rescript\n\ntest: build node_modules/.bin/rescript\n\t./test.sh\n\nclean:\n\trm -r node_modules lib\n\n.DEFAULT_GOAL := build\n\n.PHONY: build clean test\n"
  },
  {
    "path": "analysis/reanalyze/examples/termination/README.md",
    "content": "# Termination Analysis\n\n```\nConcrete Programs\nx y l f                   Variables\narg := e | ~l=e           Function Argument\nargs := args, ..., argn   Function Arguments\ne := x | e(args)          Expressions\nk := <l1:k1, ..., ln:kn>  Kind\nP := f1 = e1 ... fn = en  Program\nE := x1:k1, ..., xn:kn    Environment\n\nThe variables xi in E must be distinct from labels in kj and in args.\n  E |- e  Well Formedness Relation\n  For fi = ei in P, and fi:ki in E, check E,ki |- ei.\n\n  E |- x if x:k not in E  No Escape\n\n  E |- x   E |- e\n  ---------------\n     E |- ~x=e\n\n  E |- x   E |- args\n  ------------------\n     E |- e(args)\n\n           E(x) = <l1:k1, ... ln:kn>\n        E(l1) = E(x1) ... E(ln) = E(xn)\n            E |- arg1 ... E |- argk\n  -------------------------------------------\n  E |- x(~l1:x1, ... ~ln:xn, arg1, ..., argk)\n\nAbstract Programs\narg := l:x                Function Argument\nargs := arg1, ...., argn  Function Arguments\nC ::=                     Commmand\n      x<args>             Call\n      Some | None         Optional Value\n      switch x<args> {    Switch on Optionals\n        | Some -> C1\n        | None -> C2 }\n      C1 ; ... ; Cn       Sequential Composition\n      C1 | ... | Cn       Nondeterministic Choice\n      { C1, ..., Cn }     No Evaluation Order\nFT := f1<args1> = C1      Function Table\n     ...\n     fn<argsn> = Cn\nStack := f1<args1> ... fn<argsn>\nprogress := Progress | NoProgress\nvalues := {some, none} -> ?progress\nState := (progress, ?values)\n\nEval.run: (FT, Stack, f<args>, C, State) ---> State\n```\n\n\n# Termination Types\n\n```\np ::= 0 | 1\nr ::= p | {Some:p, None:p}\nt ::= * | t1=>r[s]t2\n\n\n-----------------\nG, x:t |-p x:0[]t\n\n\n    G, x:t1 |-0 e:p[s]t2\n-----------------------------\nG|-p0 (x)=>e: p0[](t1=>p[s]t2)\n\n\nG|-p0 e1:p1[s1]t1  G|-p1 e2:p2[s2](t1=>p[s]t2)\n    p3=p2+p  s3=(p2=1 ? s1+s2 : s1+s2+s)\n----------------------------------------------\n         G|-p0 e2(e1): p3[s3]t2\n\n\nG, fi: ti=>pi[si,fi]ti', x: ti |-0 ei: pi[si]ti'\n     G, fi: ti=>pi[si]ti' |-p0 e': p[s]t\n              fi not in si,ti'\n------------------------------------------------\n    G |-p0 let rec fi = (xi)=>ei; e : p[s]t\n\n```\n\n# Type Inference\n\n```\ns ::=\n      S        Set variable.\n      Loop     May loop.\n      f        May call f before making progress.\n      p.s      If p==1 then empty else s.\n      s1+s2    Union.\n\np ::=\n      P        Progress variable.\n      0        Does not make progress.\n      1        Makes.\n      p1+p2    Makes progress if either does.\n      p1|p2    Makes progress if both do.\n\nt ::=\n      T           Type variable.\n      *           Base type.\n      t1=>p[s]t2  Function that calls s before making progress.\n\n\n-----------------\nG, x:t |-p x:0[]t\n\n\nG, x:T1 |-0 e:p[s]t2  T1 fresh\n------------------------------\nG|-p0 (x)=>e: p0[](T1=>p[s]t2)\n\n\nG|-p0 e1:p1[s1]t1  G|-p1 e2:p2[s2]t  P,S,T2 fresh\n-------------------------------------------------\nG|-p0 e2(e1): (p2+P)[s1+s2+p2.S]T2  t=t1=>P[S]T2\n\n\nG, fi: Ti=>Pi[Si+fi]Ti', x: Ti |-0 ei: pi[si]ti'\n             Ti,Ti',Pi,Si fresh\n     G, fi: ti=>pi[si]ti' |-p0 e': p[s]t          \n------------------------------------------------\n    G |-p0 let rec fi = (xi)=>ei; e : p[s]t \n    pi=Pi si=Si ti'=Ti'  fi not in si,ti'\n\n```\n\nConstraint equations:\n```\n  0+p=p  1+p=1  p1+p2=p2+p1  p1+(p2+p3)=(p1+p2)+p3\n```\n\n```\n  0.s=s  1.s=[]\n```\n\n```\n  f-f=Loop  s+Loop=Loop\n  p.Loop ~~~> add  p=1\n```\n\n\n# Example Inference\n\n\n```reason\nlet rec iter = (f, x) => { f(x); iter(f,x); };\n```\n\n```\niter:(*=>P[S]*)=>P1[S1+iter](*=>P2[S2]*), f:*=>P3[S3]*, x:* |-0 f(x) : ???\n\n     |-0 f(x) : (0+P3)[0.S3]*\n     |-0 f(x) : P3[S3]*\n     \n     |-P3 iter(f) : (P3+P1)[P3.(S1+iter)](*=>P2[S2]*)\n       P3=P  S3=S\n     |-P iter(f) : (P+P1)[P.(S1+iter)](*=>P2[S2]*)\n     \n     |-(P+P1) iter(f,x) : (P+P1+P2)[P.(S1+iter)+(P+P1).S2]*\n\n\n     |-0 (x) => { f(x); iter(f,x); } : 0[](*=>(P+P1+P2)[S+P.(S1+iter)+(P+P1).S2]*)\n       P1=0  S1=[]  P2=P+P1+P2  S2=S+P.(S1+iter)+(P+P1).S2\n       iter not in S+P.(S1+iter)+(P+P1).S2  \n\n       P2=P+P2  S2=S+P.iter+P.S2\n       iter not in S+P.iter+P.S2  \n```\n\n\nResolving \"not in\":\n\n```\nS+P.iter+P.S2 - iter =\nS+P.Loop+P.S2 =  ---> add P=1\nS\n\nS2=S\nP2=1\n```\n\nAfter Applying substitutions:\n\n```\niter:(*=>1[S]*)=>0[](*=>1[S]*)\n```\n\nIn words: `iter` expects as first parameter a function that: makes progress when called, and let `S` bet the set of functions it calls before making progress. When supplied the first argument, `iter` does not make progress. When supplied the second argument, it makes progress, and calls functions in set `S` before making progress.\n\n\n\n"
  },
  {
    "path": "analysis/reanalyze/examples/termination/bsconfig.json",
    "content": "{\n  \"reanalyze\": {\n    \"analysis\": [\"termination\"],\n    \"suppress\": [],\n    \"unsuppress\": []\n  },\n  \"name\": \"arnold\",\n  \"bsc-flags\": [\"-bs-super-errors\"],\n  \"jsx\": { \"version\": 3 },\n  \"bs-dependencies\": [],\n  \"sources\": [\n    {\n      \"dir\": \"src\",\n      \"subdirs\": true\n    }\n  ],\n  \"package-specs\": {\n    \"module\": \"es6\",\n    \"in-source\": true\n  },\n  \"suffix\": \".bs.js\"\n}\n"
  },
  {
    "path": "analysis/reanalyze/examples/termination/expected/termination.txt",
    "content": "\n  Scanning TestCyberTruck.cmt Source:TestCyberTruck.res\n\n  Function Table\n  1 justReturn: _\n\n  Termination Analysis for justReturn\n\n  Function Table\n  1 alwaysLoop: alwaysLoop\n\n  Termination Analysis for alwaysLoop\n\n  Function Table\n  1 alwaysProgress: +progress; alwaysProgress\n\n  Termination Analysis for alwaysProgress\n\n  Function Table\n  1 alwaysProgressWrongOrder: alwaysProgressWrongOrder; +progress\n\n  Termination Analysis for alwaysProgressWrongOrder\n\n  Function Table\n  1 doNotAlias: _\n\n  Termination Analysis for doNotAlias\n\n  Function Table\n  1 progressOnBothBranches: [+progress || +progress2]; progressOnBothBranches\n\n  Termination Analysis for progressOnBothBranches\n\n  Function Table\n  1 progressOnOneBranch: [+progress || _]; progressOnOneBranch\n\n  Termination Analysis for progressOnOneBranch\n\n  Function Table\n  1 callParseFunction<parseFunction>: parseFunction\n  2 testParametricFunction: [+progress || _]; testParametricFunction2\n  3 testParametricFunction2: callParseFunction<parseFunction:testParametricFunction>\n\n  Termination Analysis for testParametricFunction\n\n  Function Table\n  1 doNothing: _\n  2 testCacheHit: [doNothing; doNothing; +Progress.Nested.f || _]; testCacheHit\n\n  Termination Analysis for testCacheHit\n\n  Function Table\n  1 evalOrderIsNotLeftToRight: {+progress, evalOrderIsNotLeftToRight}; _\n\n  Termination Analysis for evalOrderIsNotLeftToRight\n\n  Function Table\n  1 evalOrderIsNotRightToLeft: {evalOrderIsNotRightToLeft, +progress}; _\n\n  Termination Analysis for evalOrderIsNotRightToLeft\n\n  Function Table\n  1 butFirstArgumentIsAlwaysEvaluated: +progress; butFirstArgumentIsAlwaysEvaluated\n\n  Termination Analysis for butFirstArgumentIsAlwaysEvaluated\n\n  Function Table\n  1 butSecondArgumentIsAlwaysEvaluated: +progress; butSecondArgumentIsAlwaysEvaluated\n\n  Termination Analysis for butSecondArgumentIsAlwaysEvaluated\n\n  Function Table\n  1 parseExpression: [_ || _]; [+Parser.next; parseExpression; parseExpression; _ || parseInt]\n  2 parseInt: [_ || _]; +Parser.next; _\n  3 parseList<f>: parseList$loop<f:f>\n  4 parseList$loop<f>: [_ || f; parseList$loop<f:f>; _]\n  5 parseListExpression: parseList<f:parseExpression>\n  6 parseListExpression2: parseExpression; parseList<f:parseExpression>\n  7 parseListInt: parseList<f:parseInt>\n  8 parseListIntTailRecursive: parseListIntTailRecursive$loop\n  9 parseListIntTailRecursive$loop: [_ || parseInt; parseListIntTailRecursive$loop]\n  10 parseListListInt: parseList<f:parseListInt>\n\n  Termination Analysis for parseListInt\n\n  Termination Analysis for parseListListInt\n\n  Termination Analysis for parseExpression\n\n  Termination Analysis for parseListExpression\n\n  Termination Analysis for parseListExpression2\n\n  Termination Analysis for parseListIntTailRecursive\n\n  Function Table\n  1 loopAfterProgress: loopAfterProgress\n  2 testLoopAfterProgress: +progress; loopAfterProgress\n\n  Termination Analysis for testLoopAfterProgress\n\n  Function Table\n  1 counterCompiled: +initState; [_ || counterCompiled; _]; _\n  2 onClick1: [_ || counterCompiled]\n\n  Termination Analysis for counterCompiled\n\n  Function Table\n  1 countRendersCompiled: [_ || countRendersCompiled; _]; _\n\n  Termination Analysis for countRendersCompiled\n\n  Function Table\n  1 alwaysReturnNone: [+Parser.next; alwaysReturnNone || None]\n  2 parseIntO: [+Parser.next; Some || None]\n  3 parseIntOWrapper: parseIntO\n  4 parseListIntO: parseListO<f:parseIntO>\n  5 parseListO<f>: parseListO$loop<f:f>\n  6 parseListO$loop<f>: [+Parser.next; _ || switch f {some: parseListO$loop<f:f>, none: _}]\n  7 testAlwaysReturnNone: alwaysReturnNone\n  8 thisMakesNoProgress: None; [_ || +Parser.next; Some]\n\n  Termination Analysis for parseListIntO\n\n  Termination Analysis for testAlwaysReturnNone\n\n  Termination Analysis for parseIntOWrapper\n\n  Termination Analysis for thisMakesNoProgress\n\n  Function Table\n  1 f: [g; _ || _ || +Parser.next; f]\n  2 g: +Parser.next; gParam<g:g>\n  3 gParam<g>: [g; _ || f]\n\n  Termination Analysis for f\n\n  Function Table\n  1 concat<f, g>: switch f {some: switch g {some: Some, none: None}, none: None}\n  2 kleene<f>: switch f {some: kleene<f:f>, none: _}\n  3 one: [+Parser.next; Some || None]\n  4 oneTwo: concat<f:one,g:two>\n  5 oneTwoStar: kleene<f:oneTwo>\n  6 two: [+Parser.next; Some || None]\n\n  Termination Analysis for oneTwoStar\n\n  Function Table\n  1 testTry: [+progress; testTry || +progress; testTry]\n\n  Termination Analysis for testTry\n\n  Termination Analysis Stats\n  Files:1\n  Recursive Blocks:21\n  Functions:49\n  Infinite Loops:10\n  Hygiene Errors:1\n  Cache Hits:8/31\n  \n\n  Error Termination\n  TestCyberTruck.res:29:28-39\n  Possible infinite loop when calling alwaysLoop\n  CallStack:\n    1 alwaysLoop (TestCyberTruck.res 29)\n\n  Error Termination\n  TestCyberTruck.res:40:3-28\n  Possible infinite loop when calling alwaysProgressWrongOrder\n  CallStack:\n    1 alwaysProgressWrongOrder (TestCyberTruck.res 39)\n\n  Error Hygiene\n  TestCyberTruck.res:47:15-24\n  doNotAlias can only be called directly, or passed as labeled argument\n\n  Error Termination\n  TestCyberTruck.res:68:3-24\n  Possible infinite loop when calling progressOnOneBranch\n  CallStack:\n    1 progressOnOneBranch (TestCyberTruck.res 64)\n\n  Error Termination\n  TestCyberTruck.res:80:48-63\n  Possible infinite loop when calling parseFunction which is testParametricFunction\n  CallStack:\n    3 callParseFunction<parseFunction:testParametricFunction> (TestCyberTruck.res 79)\n    2 testParametricFunction2 (TestCyberTruck.res 77)\n    1 testParametricFunction (TestCyberTruck.res 73)\n\n  Error Termination\n  TestCyberTruck.res:89:3-17\n  Possible infinite loop when calling testCacheHit\n  CallStack:\n    1 testCacheHit (TestCyberTruck.res 83)\n\n  Error Termination\n  TestCyberTruck.res:97:31-58\n  Possible infinite loop when calling evalOrderIsNotLeftToRight\n  CallStack:\n    1 evalOrderIsNotLeftToRight (TestCyberTruck.res 95)\n\n  Error Termination\n  TestCyberTruck.res:104:19-46\n  Possible infinite loop when calling evalOrderIsNotRightToLeft\n  CallStack:\n    1 evalOrderIsNotRightToLeft (TestCyberTruck.res 102)\n\n  Error Termination\n  TestCyberTruck.res:180:15-21\n  Possible infinite loop when calling parseList$loop<f:f> which is parseList$loop<f:parseListInt>\n  CallStack:\n    3 parseList$loop<f:parseListInt> (TestCyberTruck.res 183)\n    2 parseList<f:parseListInt> (TestCyberTruck.res 201)\n    1 parseListListInt (TestCyberTruck.res 201)\n\n  Error Termination\n  TestCyberTruck.res:238:31-49\n  Possible infinite loop when calling loopAfterProgress\n  CallStack:\n    1 loopAfterProgress (TestCyberTruck.res 236)\n\n  Error Termination\n  TestCyberTruck.res:286:32-61\n  Possible infinite loop when calling countRendersCompiled\n  CallStack:\n    1 countRendersCompiled (TestCyberTruck.res 283)\n  \n  Analysis reported 11 issues (Error Hygiene:1, Error Termination:10)\n"
  },
  {
    "path": "analysis/reanalyze/examples/termination/package.json",
    "content": "{\n  \"name\": \"termination\",\n  \"version\": \"0.1.0\",\n  \"private\": true,\n  \"devDependencies\": {\n    \"rescript\": \"^10.1.2\"\n  }\n}\n"
  },
  {
    "path": "analysis/reanalyze/examples/termination/src/TestCyberTruck.bs.js",
    "content": "// Generated by ReScript, PLEASE EDIT WITH CARE\n\nimport * as List from \"rescript/lib/es6/list.js\";\nimport * as Curry from \"rescript/lib/es6/curry.js\";\nimport * as Random from \"rescript/lib/es6/random.js\";\nimport * as Caml_obj from \"rescript/lib/es6/caml_obj.js\";\nimport * as Caml_option from \"rescript/lib/es6/caml_option.js\";\nimport * as Caml_js_exceptions from \"rescript/lib/es6/caml_js_exceptions.js\";\n\nvar counter = {\n  contents: Random.$$int(100)\n};\n\nfunction progress(param) {\n  if (counter.contents < 0) {\n    throw {\n          RE_EXN_ID: \"Assert_failure\",\n          _1: [\n            \"TestCyberTruck.res\",\n            8,\n            6\n          ],\n          Error: new Error()\n        };\n  }\n  counter.contents = counter.contents - 1 | 0;\n}\n\nvar Nested = {\n  f: progress\n};\n\nvar Progress = {\n  Nested: Nested\n};\n\nfunction justReturn(param) {\n  \n}\n\nfunction alwaysLoop(_param) {\n  while(true) {\n    _param = undefined;\n    continue ;\n  };\n}\n\nfunction alwaysProgress(_param) {\n  while(true) {\n    progress(undefined);\n    _param = undefined;\n    continue ;\n  };\n}\n\nfunction alwaysProgressWrongOrder(param) {\n  alwaysProgressWrongOrder(undefined);\n  progress(undefined);\n}\n\nfunction doNotAlias(_param) {\n  while(true) {\n    _param = undefined;\n    continue ;\n  };\n}\n\nfunction progressOnBothBranches(x) {\n  while(true) {\n    progress(undefined);\n    continue ;\n  };\n}\n\nfunction progressOnOneBranch(x) {\n  while(true) {\n    if (x > 3) {\n      progress(undefined);\n    }\n    continue ;\n  };\n}\n\nfunction testParametricFunction(x) {\n  while(true) {\n    if (x > 3) {\n      progress(undefined);\n    }\n    continue ;\n  };\n}\n\nvar testParametricFunction2 = testParametricFunction;\n\nfunction callParseFunction(x, parseFunction) {\n  return Curry._1(parseFunction, x);\n}\n\nfunction testCacheHit(x) {\n  while(true) {\n    if (x > 0) {\n      progress(undefined);\n    }\n    continue ;\n  };\n}\n\nfunction doNothing(param) {\n  \n}\n\nfunction evalOrderIsNotLeftToRight(x) {\n  evalOrderIsNotLeftToRight(x);\n  progress(undefined);\n}\n\nfunction evalOrderIsNotRightToLeft(x) {\n  progress(undefined);\n  evalOrderIsNotRightToLeft(x);\n}\n\nfunction butFirstArgumentIsAlwaysEvaluated(x) {\n  while(true) {\n    progress(undefined);\n    continue ;\n  };\n}\n\nfunction butSecondArgumentIsAlwaysEvaluated(x) {\n  while(true) {\n    progress(undefined);\n    continue ;\n  };\n}\n\nfunction tokenToString(token) {\n  if (typeof token !== \"number\") {\n    return String(token._0);\n  }\n  switch (token) {\n    case /* Asterisk */0 :\n        return \"*\";\n    case /* Eof */1 :\n        return \"Eof\";\n    case /* Lparen */2 :\n        return \"(\";\n    case /* Plus */3 :\n        return \"+\";\n    case /* Rparen */4 :\n        return \")\";\n    \n  }\n}\n\nfunction next(p) {\n  p.token = Random.bool(undefined) ? /* Eof */1 : /* Int */({\n        _0: Random.$$int(1000)\n      });\n  p.position = {\n    lnum: Random.$$int(1000),\n    cnum: Random.$$int(80)\n  };\n}\n\nfunction err(p, s) {\n  p.errors = {\n    hd: s,\n    tl: p.errors\n  };\n}\n\nfunction expect(p, token) {\n  if (Caml_obj.equal(p.token, token)) {\n    return next(p);\n  } else {\n    return err(p, \"expected token \" + tokenToString(p.token));\n  }\n}\n\nvar Parser = {\n  tokenToString: tokenToString,\n  next: next,\n  err: err,\n  expect: expect\n};\n\nvar Expr = {};\n\nfunction parseList(p, f) {\n  var loop = function (p) {\n    if (p.token === /* Asterisk */0) {\n      return /* [] */0;\n    }\n    var item = Curry._1(f, p);\n    var l = loop(p);\n    return {\n            hd: item,\n            tl: l\n          };\n  };\n  return loop(p);\n}\n\nfunction $$parseInt(p) {\n  var n = p.token;\n  var res = typeof n === \"number\" ? (err(p, \"integer expected\"), -1) : n._0;\n  next(p);\n  return res;\n}\n\nfunction parseExpression(xOpt, p) {\n  var match = p.token;\n  if (match !== 2) {\n    return {\n            TAG: /* Int */0,\n            _0: $$parseInt(p)\n          };\n  }\n  next(p);\n  var e1 = parseExpression(undefined, p);\n  expect(p, /* Plus */3);\n  var e2 = parseExpression(undefined, p);\n  expect(p, /* Lparen */2);\n  return {\n          TAG: /* Plus */1,\n          _0: e1,\n          _1: e2\n        };\n}\n\nfunction parseListInt(p) {\n  return parseList(p, $$parseInt);\n}\n\nfunction parseListListInt(p) {\n  return parseList(p, parseListInt);\n}\n\nfunction parseListExpression(p) {\n  return parseList(p, (function (eta) {\n                return parseExpression(undefined, eta);\n              }));\n}\n\nfunction parseListExpression2(p) {\n  var partial_arg = 7;\n  return parseList(p, (function (param) {\n                return parseExpression(partial_arg, param);\n              }));\n}\n\nfunction parseListIntTailRecursive(p) {\n  var _l = /* [] */0;\n  while(true) {\n    var l = _l;\n    if (p.token === /* Asterisk */0) {\n      return List.rev(l);\n    }\n    _l = {\n      hd: $$parseInt(p),\n      tl: l\n    };\n    continue ;\n  };\n}\n\nfunction loopAfterProgress(_param) {\n  while(true) {\n    _param = undefined;\n    continue ;\n  };\n}\n\nfunction testLoopAfterProgress(param) {\n  progress(undefined);\n  return loopAfterProgress(undefined);\n}\n\nfunction nothing(param) {\n  \n}\n\nfunction div(text, onClick) {\n  throw {\n        RE_EXN_ID: \"Assert_failure\",\n        _1: [\n          \"TestCyberTruck.res\",\n          250,\n          38\n        ],\n        Error: new Error()\n      };\n}\n\nfunction initState(n) {\n  if (n === 0) {\n    return 42;\n  }\n  \n}\n\nfunction increment(n) {\n  return n + 1 | 0;\n}\n\nfunction incrementOnClick(setState, param) {\n  Curry._1(setState, increment);\n}\n\nfunction counter$1(state, setState) {\n  Curry._1(setState, initState);\n  return div(String(state), (function (param) {\n                Curry._1(setState, increment);\n              }));\n}\n\nfunction counterCompiled(state) {\n  var newState = initState(state);\n  if (newState !== undefined) {\n    counterCompiled(newState);\n  }\n  String(state);\n}\n\nfunction onClick1(state) {\n  counterCompiled(state + 1 | 0);\n}\n\nfunction countRenders(state, setState) {\n  Curry._1(setState, increment);\n  return div(\"I have been rendered \" + (String(state) + \" times\"), nothing);\n}\n\nfunction countRendersCompiled(state) {\n  var newState = state + 1 | 0;\n  countRendersCompiled(newState);\n  \"I have been rendered \" + (String(state) + \" times\");\n}\n\nvar UITermination = {\n  nothing: nothing,\n  div: div,\n  initState: initState,\n  increment: increment,\n  incrementOnClick: incrementOnClick,\n  counter: counter$1,\n  counterCompiled: counterCompiled,\n  onClick1: onClick1,\n  countRenders: countRenders,\n  countRendersCompiled: countRendersCompiled\n};\n\nfunction parseListO(p, f) {\n  var _nodes = /* [] */0;\n  while(true) {\n    var nodes = _nodes;\n    if (p.token === /* Asterisk */0) {\n      next(p);\n      return /* [] */0;\n    }\n    var item = Curry._1(f, p);\n    if (item === undefined) {\n      return List.rev(nodes);\n    }\n    _nodes = {\n      hd: Caml_option.valFromOption(item),\n      tl: nodes\n    };\n    continue ;\n  };\n}\n\nfunction parseIntO(p) {\n  var n = p.token;\n  if (typeof n === \"number\") {\n    err(p, \"integer expected\");\n    return ;\n  } else {\n    next(p);\n    return n._0;\n  }\n}\n\nfunction alwaysReturnNone(p) {\n  while(true) {\n    var match = p.token;\n    if (typeof match === \"number\") {\n      return ;\n    }\n    next(p);\n    continue ;\n  };\n}\n\nfunction parseListIntO(p) {\n  return parseListO(p, parseIntO);\n}\n\nvar testAlwaysReturnNone = alwaysReturnNone;\n\nvar parseIntOWrapper = parseIntO;\n\nfunction thisMakesNoProgress(p, y) {\n  if (y !== undefined) {\n    return ;\n  } else {\n    next(p);\n    return 10;\n  }\n}\n\nvar ParserWihtOptionals = {\n  parseListO: parseListO,\n  parseIntO: parseIntO,\n  parseListIntO: parseListIntO,\n  alwaysReturnNone: alwaysReturnNone,\n  testAlwaysReturnNone: testAlwaysReturnNone,\n  parseIntOWrapper: parseIntOWrapper,\n  thisMakesNoProgress: thisMakesNoProgress\n};\n\nfunction f(p) {\n  while(true) {\n    var i = p.token;\n    if (typeof i !== \"number\") {\n      return g(p) + i._0 | 0;\n    }\n    if (i === 1) {\n      return 0;\n    }\n    next(p);\n    continue ;\n  };\n}\n\nfunction gParam(p, g) {\n  var i = p.token;\n  if (typeof i === \"number\") {\n    return f(p);\n  } else {\n    return Curry._1(g, p) + i._0 | 0;\n  }\n}\n\nfunction g(p) {\n  next(p);\n  return gParam(p, g);\n}\n\nvar Riddle = {\n  f: f,\n  gParam: gParam,\n  g: g\n};\n\nfunction f$1(p) {\n  while(true) {\n    next(p);\n    next(p);\n    continue ;\n  };\n}\n\nfunction g$1(p) {\n  while(true) {\n    next(p);\n    next(p);\n    continue ;\n  };\n}\n\nfunction kleene0(f, p) {\n  while(true) {\n    Curry._1(f, p);\n    continue ;\n  };\n}\n\nfunction union(f, g, p) {\n  var x = Curry._1(f, p);\n  if (x !== undefined) {\n    return Caml_option.valFromOption(x);\n  } else {\n    return Curry._1(g, p);\n  }\n}\n\nfunction concat(f, g, p) {\n  var x = Curry._1(f, p);\n  if (x === undefined) {\n    return ;\n  }\n  var y = Curry._1(g, p);\n  if (y !== undefined) {\n    return x + y;\n  }\n  \n}\n\nfunction kleene(f, p) {\n  var x = Curry._1(f, p);\n  if (x !== undefined) {\n    return {\n            hd: x,\n            tl: kleene(f, p)\n          };\n  } else {\n    return /* [] */0;\n  }\n}\n\nfunction two(p) {\n  var match = p.token;\n  if (typeof match === \"number\" || match._0 !== 2) {\n    return ;\n  } else {\n    next(p);\n    return \"2\";\n  }\n}\n\nfunction one(p) {\n  var match = p.token;\n  if (typeof match === \"number\" || match._0 !== 1) {\n    return ;\n  } else {\n    next(p);\n    return \"1\";\n  }\n}\n\nfunction oneTwo(p) {\n  return concat(one, two, p);\n}\n\nfunction oneTwoStar(p) {\n  return kleene(oneTwo, p);\n}\n\nvar TerminationTypes = {\n  f: f$1,\n  g: g$1,\n  kleene0: kleene0,\n  union: union,\n  concat: concat,\n  kleene: kleene,\n  one: one,\n  two: two,\n  oneTwo: oneTwo,\n  oneTwoStar: oneTwoStar\n};\n\nfunction testTry(_param) {\n  while(true) {\n    try {\n      throw {\n            RE_EXN_ID: \"Not_found\",\n            Error: new Error()\n          };\n    }\n    catch (raw_exn){\n      var exn = Caml_js_exceptions.internalToOCamlException(raw_exn);\n      if (exn.RE_EXN_ID === \"Not_found\") {\n        progress(undefined);\n        _param = undefined;\n        continue ;\n      }\n      [\n        undefined,\n        progress(undefined),\n        undefined\n      ];\n      _param = undefined;\n      continue ;\n    }\n  };\n}\n\nvar progress2 = progress;\n\nexport {\n  progress ,\n  progress2 ,\n  Progress ,\n  justReturn ,\n  alwaysLoop ,\n  alwaysProgress ,\n  alwaysProgressWrongOrder ,\n  doNotAlias ,\n  progressOnBothBranches ,\n  progressOnOneBranch ,\n  testParametricFunction ,\n  testParametricFunction2 ,\n  callParseFunction ,\n  testCacheHit ,\n  doNothing ,\n  evalOrderIsNotLeftToRight ,\n  evalOrderIsNotRightToLeft ,\n  butFirstArgumentIsAlwaysEvaluated ,\n  butSecondArgumentIsAlwaysEvaluated ,\n  Parser ,\n  Expr ,\n  parseList ,\n  $$parseInt ,\n  parseListInt ,\n  parseListListInt ,\n  parseExpression ,\n  parseListExpression ,\n  parseListExpression2 ,\n  parseListIntTailRecursive ,\n  testLoopAfterProgress ,\n  loopAfterProgress ,\n  UITermination ,\n  ParserWihtOptionals ,\n  Riddle ,\n  TerminationTypes ,\n  testTry ,\n}\n/* counter Not a pure module */\n"
  },
  {
    "path": "analysis/reanalyze/examples/termination/src/TestCyberTruck.res",
    "content": "@@warning(\"-39-48-27\")\n\n// A progress function will eventually terminate\nlet progress = {\n  let counter = ref(Random.int(100))\n  () => {\n    if counter.contents < 0 {\n      assert false\n    }\n    counter := counter.contents - 1\n  }\n}\n\n// Another progress function\nlet progress2 = progress\n\n// A progress function can be taken from a module\nmodule Progress = {\n  module Nested = {\n    let f = progress\n  }\n}\n\n// Need to declare at least one progress function and one recursive definition\n@progress(progress)\nlet rec justReturn = () => ()\n\n@progress(progress)\nlet rec alwaysLoop = () => alwaysLoop() // infinite loop\n\n@progress(progress)\nlet rec alwaysProgress = () => {\n  // Terminates\n  progress()\n  alwaysProgress()\n}\n\n@progress(progress)\nlet rec alwaysProgressWrongOrder = () => {\n  alwaysProgressWrongOrder()\n  progress() // Oops: this is too late\n}\n\n@progress(progress)\nlet rec doNotAlias = () => {\n  // Must not alias recursive functions\n  let alias = doNotAlias\n  alias()\n}\n\n@progress((progress, progress2))\nlet rec // Terminates as each branch makes progress\nprogressOnBothBranches = x => {\n  if x > 3 {\n    progress()\n  } else {\n    progress2()\n  }\n  progressOnBothBranches(x)\n}\n\n@progress(progress)\nlet rec // Loops as progress is only on one branch\nprogressOnOneBranch = x => {\n  if x > 3 {\n    progress()\n  }\n  progressOnOneBranch(x)\n}\n\n@progress(progress)\nlet rec // callParseFunction is parametric: it takes a parse function and calls it\ntestParametricFunction = x => {\n  if x > 3 {\n    progress()\n  }\n  testParametricFunction2(x)\n}\nand testParametricFunction2 = x => callParseFunction(x, ~parseFunction=testParametricFunction)\nand callParseFunction = (x, ~parseFunction) => parseFunction(x) // loops\n\n@progress(Progress.Nested.f)\nlet rec testCacheHit = x => {\n  if x > 0 {\n    doNothing(x)\n    doNothing(x) // this should hit the analysis cache\n    Progress.Nested.f()\n  }\n  testCacheHit(x)\n}\nand doNothing = _ => ()\n\n@progress(progress)\nlet rec // Loops as can't rely on a specific evaluation order\nevalOrderIsNotLeftToRight = x => {\n  let combineTwoUnits = ((), ()) => ()\n  combineTwoUnits(progress(), evalOrderIsNotLeftToRight(x))\n}\n\n@progress(progress)\nlet rec // Loops as can't rely on a specific evaluation order\nevalOrderIsNotRightToLeft = x => {\n  let combineTwoUnits = ((), ()) => ()\n  combineTwoUnits(evalOrderIsNotRightToLeft(x), progress())\n}\n\n@progress(progress)\nlet rec // Terminates: all arguments are evaluated in some order\nbutFirstArgumentIsAlwaysEvaluated = x => {\n  let combineTwoUnits = ((), ()) => ()\n  combineTwoUnits(progress(), ())\n  butFirstArgumentIsAlwaysEvaluated(x)\n}\n\n@progress(progress)\nlet rec // Terminates: all arguments are evaluated in some order\nbutSecondArgumentIsAlwaysEvaluated = x => {\n  let combineTwoUnits = ((), ()) => ()\n  combineTwoUnits((), progress())\n  butSecondArgumentIsAlwaysEvaluated(x)\n}\n\nmodule Parser = {\n  type token =\n    | Asterisk\n    | Eof\n    | Lparen\n    | Int(int)\n    | Plus\n    | Rparen\n\n  type position = {\n    lnum: int,\n    cnum: int,\n  }\n\n  type t = {\n    mutable position: position,\n    mutable errors: list<string>,\n    mutable token: token,\n  }\n\n  let tokenToString = token =>\n    switch token {\n    | Asterisk => \"*\"\n    | Eof => \"Eof\"\n    | Lparen => \"(\"\n    | Int(n) => string_of_int(n)\n    | Plus => \"+\"\n    | Rparen => \")\"\n    }\n\n  let next = p => {\n    p.token = Random.bool() ? Eof : Int(Random.int(1000))\n    p.position = {lnum: Random.int(1000), cnum: Random.int(80)}\n  }\n\n  let err = (p, s) => p.errors = list{s, ...p.errors}\n\n  let expect = (p, token) =>\n    if p.token == token {\n      next(p)\n    } else {\n      err(p, \"expected token \" ++ tokenToString(p.token))\n    }\n}\n\nmodule Expr = {\n  type rec t =\n    | Int(int)\n    | Plus(t, t)\n}\n\nlet parseList = (p: Parser.t, ~f) => {\n  let rec loop = (p: Parser.t) =>\n    if p.token == Asterisk {\n      list{}\n    } else {\n      let item = f(p)\n      let l = loop(p)\n      list{item, ...l}\n    }\n  loop(p)\n}\n\nlet parseInt = (p: Parser.t) => {\n  let res = switch p.token {\n  | Int(n) => n\n  | _ =>\n    Parser.err(p, \"integer expected\")\n    -1\n  }\n  Parser.next(p)\n  res\n}\n\n@progress(Parser.next)\nlet rec parseListInt = p => parseList(p, ~f=parseInt)\n\n@progress\nand parseListListInt = p => parseList(p, ~f=parseListInt)\n\n@progress\nand parseExpression = (~x=4, p: Parser.t) =>\n  switch p.token {\n  | Lparen =>\n    Parser.next(p)\n    let e1 = parseExpression(p)\n    Parser.expect(p, Plus)\n    let e2 = parseExpression(p)\n    Parser.expect(p, Lparen)\n    Expr.Plus(e1, e2)\n  | _ => Expr.Int(parseInt(p))\n  }\n\n@progress\nand parseListExpression = p => parseList(p, ~f=parseExpression)\n\n@progress\nand parseListExpression2 = p => parseList(p, ~f=parseExpression(~x=7))\n\n@progress\nand parseListIntTailRecursive = p => {\n  let rec loop = (p: Parser.t, l) =>\n    if p.token == Asterisk {\n      List.rev(l)\n    } else {\n      loop(p, list{parseInt(p), ...l})\n    }\n  loop(p, list{})\n}\n\n@progress(progress)\nlet rec testLoopAfterProgress = () => {\n  progress()\n  loopAfterProgress()\n}\nand loopAfterProgress = () => loopAfterProgress()\n\nmodule UITermination = {\n  type state = int\n  type setState = (~f: state => option<state>) => unit\n\n  type onClick = unit => unit\n  type dom\n\n  let nothing: onClick = () => ()\n\n  type div = (~text: string, ~onClick: onClick) => dom\n  let div: div = (~text, ~onClick) => assert false\n\n  let initState = n => n == 0 ? Some(42) : None\n  let increment = n => Some(n + 1)\n\n  let incrementOnClick = (~setState: setState): onClick => () => setState(~f=increment)\n\n  let counter = (state: state, ~setState: setState) => {\n    setState(~f=initState)\n    div(~text=string_of_int(state), ~onClick=() => setState(~f=increment))\n  }\n\n  @progress(initState)\n  let rec counterCompiled = (state: state) => {\n    switch initState(state) {\n    | None => ()\n    | Some(newState) => ignore(counterCompiled(newState))\n    }\n    ignore(string_of_int(state))\n  }\n\n  and onClick1 = state =>\n    switch increment(state) {\n    | None => ()\n    | Some(newState) => counterCompiled(newState)\n    }\n\n  let countRenders = (state: state, ~setState: setState) => {\n    setState(~f=increment)\n    div(~text=\"I have been rendered \" ++ (string_of_int(state) ++ \" times\"), ~onClick=nothing)\n  }\n\n  @progress(initState)\n  let rec countRendersCompiled = (state: state) => {\n    switch increment(state) {\n    | None => ()\n    | Some(newState) => ignore(countRendersCompiled(newState))\n    }\n    ignore(\"I have been rendered \" ++ (string_of_int(state) ++ \" times\"))\n  }\n}\n\nmodule ParserWihtOptionals = {\n  let parseListO = (p: Parser.t, ~f) => {\n    let rec loop = nodes =>\n      if p.token == Asterisk {\n        Parser.next(p)\n        list{}\n      } else {\n        switch f(p) {\n        | None => List.rev(nodes)\n        | Some(item) => loop(list{item, ...nodes})\n        }\n      }\n    loop(list{})\n  }\n\n  let parseIntO = (p: Parser.t) =>\n    switch p.token {\n    | Int(n) =>\n      Parser.next(p)\n      Some(n)\n    | _ =>\n      Parser.err(p, \"integer expected\")\n      None\n    }\n\n  @progress((Parser.next, Parser.next))\n  let rec parseListIntO = p => parseListO(p, ~f=parseIntO)\n\n  and alwaysReturnNone = (p: Parser.t) =>\n    switch p.token {\n    | Int(_) =>\n      Parser.next(p)\n      alwaysReturnNone(p)\n    | _ => None\n    }\n\n  @progress\n  and testAlwaysReturnNone = p => alwaysReturnNone(p)\n\n  @progress\n  and parseIntOWrapper = p => parseIntO(p)\n\n  @progress\n  and thisMakesNoProgress = (p: Parser.t, y) => {\n    let x = None\n    switch y {\n    | Some(_) => x\n    | _ =>\n      Parser.next(p)\n      Some(10)\n    }\n  }\n}\n\nmodule Riddle = {\n  @progress(Parser.next)\n  let rec f = (p: Parser.t) =>\n    switch p.token {\n    | Int(i) => g(p) + i\n    | Eof => 0\n    | _ =>\n      Parser.next(p)\n      f(p)\n    }\n\n  and gParam = (p: Parser.t, ~g) =>\n    switch p.token {\n    | Int(i) => g(p) + i\n    | _ => f(p)\n    }\n\n  and g = p => {\n    Parser.next(p)\n    gParam(p, ~g)\n  }\n}\n\nmodule TerminationTypes = {\n  // p ::= P | N   (P progress, or N no progress)\n  // r ::= p | {Some:p, None:p}    (result, in case of optional specify progress separately)\n  // t ::= _ | t1=>r[f1,... fn]t2  (when called, the function makes progress or not\n  // and calls f1,...,fn without making progeess first)\n  // Abbreviations: omit empty [], and rhs _\n\n  let rec f /* _=>P[g] */ = p => g(p)\n  and g /* _=>P */ = p => {\n    Parser.next(p)\n    f(p)\n  }\n\n  let rec kleene0 /* (~f:_=>P, _) => P */ = (~f, p) => {\n    f(p)\n    kleene0(~f, p)\n  }\n\n  let union /* (~f:_=>{Some:P, None:N}, ~g:_=>{Some:P, None:N}, _) => {Some:P, None:N} */ = (\n    ~f,\n    ~g,\n    p,\n  ) =>\n    switch f(p) {\n    | None => g(p)\n    | Some(x) => x\n    }\n\n  let concat /* (~f:_=>{Some:P, None:N}, ~g:_=>{Some:P, None:N}, _) => {Some:P, None:N} */ = (\n    ~f,\n    ~g,\n    p,\n  ) =>\n    switch f(p) {\n    | None => None\n    | Some(x) =>\n      switch g(p) {\n      | None => None\n      | Some(y) => Some(x ++ y)\n      }\n    }\n\n  let rec kleene /* (~f:_=>{Some:P, None:N}, _) => N */ = (~f, p) =>\n    switch f(p) {\n    | None => list{}\n    | Some(x) => list{x, ...kleene(~f, p)}\n    }\n\n  and one /* _=>{Some:P, None:N} */ = (p: Parser.t) =>\n    switch p.token {\n    | Int(1) =>\n      Parser.next(p)\n      Some(\"1\")\n    | _ => None\n    }\n\n  and two /* _=>{Some:P, None:N} */ = (p: Parser.t) =>\n    switch p.token {\n    | Int(2) =>\n      Parser.next(p)\n      Some(\"2\")\n    | _ => None\n    }\n\n  and oneTwo /* _=>{Some:P, None:N} */ = p => concat(~f=one, ~g=two, p)\n\n  @progress(Parser.next)\n  and oneTwoStar /* _=>N */ = p => kleene(~f=oneTwo, p)\n}\n\n@progress(progress)\nlet rec testTry = () => {\n  try raise(Not_found) catch {\n  | Not_found =>\n    let _ = #abc(progress())\n    testTry()\n  | _ =>\n    let _ = [(), progress(), ()]\n    testTry()\n  }\n}\n"
  },
  {
    "path": "analysis/reanalyze/examples/termination/test.sh",
    "content": "output=\"expected/termination.txt\"\ndune exec rescript-editor-analysis -- reanalyze -config -ci -debug > $output\n# CI. We use LF, and the CI OCaml fork prints CRLF. Convert.\nif [ \"$RUNNER_OS\" == \"Windows\" ]; then\n  perl -pi -e 's/\\r\\n/\\n/g' -- $output\nfi\n\nwarningYellow='\\033[0;33m'\nsuccessGreen='\\033[0;32m'\nreset='\\033[0m'\n\ndiff=$(git ls-files --modified expected)\nif [[ $diff = \"\" ]]; then\n  printf \"${successGreen}✅ No unstaged tests difference.${reset}\\n\"\nelse\n  printf \"${warningYellow}⚠️ There are unstaged differences in tests/! Did you break a test?\\n${diff}\\n${reset}\"\n  git --no-pager diff expected\n  exit 1\nfi\n"
  },
  {
    "path": "analysis/reanalyze/src/Annotation.ml",
    "content": "type attributePayload =\n  | BoolPayload of bool\n  | ConstructPayload of string\n  | FloatPayload of string\n  | IdentPayload of Longident.t\n  | IntPayload of string\n  | StringPayload of string\n  | TuplePayload of attributePayload list\n  | UnrecognizedPayload\n\nlet tagIsGenType s = s = \"genType\" || s = \"gentype\"\nlet tagIsGenTypeImport s = s = \"genType.import\" || s = \"gentype.import\"\nlet tagIsGenTypeOpaque s = s = \"genType.opaque\" || s = \"gentype.opaque\"\n\nlet tagIsOneOfTheGenTypeAnnotations s =\n  tagIsGenType s || tagIsGenTypeImport s || tagIsGenTypeOpaque s\n\nlet rec getAttributePayload checkText (attributes : Typedtree.attributes) =\n  let rec fromExpr (expr : Parsetree.expression) =\n    match expr with\n    | {pexp_desc = Pexp_constant (Pconst_string (s, _))} ->\n      Some (StringPayload s)\n    | {pexp_desc = Pexp_constant (Pconst_integer (n, _))} -> Some (IntPayload n)\n    | {pexp_desc = Pexp_constant (Pconst_float (s, _))} -> Some (FloatPayload s)\n    | {\n     pexp_desc = Pexp_construct ({txt = Lident ((\"true\" | \"false\") as s)}, _);\n     _;\n    } ->\n      Some (BoolPayload (s = \"true\"))\n    | {pexp_desc = Pexp_construct ({txt = Longident.Lident \"[]\"}, None)} -> None\n    | {pexp_desc = Pexp_construct ({txt = Longident.Lident \"::\"}, Some e)} ->\n      fromExpr e\n    | {pexp_desc = Pexp_construct ({txt}, _); _} ->\n      Some (ConstructPayload (txt |> Longident.flatten |> String.concat \".\"))\n    | {pexp_desc = Pexp_tuple exprs | Pexp_array exprs} ->\n      let payloads =\n        exprs |> List.rev\n        |> List.fold_left\n             (fun payloads expr ->\n               match expr |> fromExpr with\n               | Some payload -> payload :: payloads\n               | None -> payloads)\n             []\n      in\n      Some (TuplePayload payloads)\n    | {pexp_desc = Pexp_ident {txt}} -> Some (IdentPayload txt)\n    | _ -> None\n  in\n  match attributes with\n  | [] -> None\n  | ({Asttypes.txt}, payload) :: tl ->\n    if checkText txt then\n      match payload with\n      | PStr [] -> Some UnrecognizedPayload\n      | PStr ({pstr_desc = Pstr_eval (expr, _)} :: _) -> expr |> fromExpr\n      | PStr ({pstr_desc = Pstr_extension _} :: _) -> Some UnrecognizedPayload\n      | PStr ({pstr_desc = Pstr_value _} :: _) -> Some UnrecognizedPayload\n      | PStr ({pstr_desc = Pstr_primitive _} :: _) -> Some UnrecognizedPayload\n      | PStr ({pstr_desc = Pstr_type _} :: _) -> Some UnrecognizedPayload\n      | PStr ({pstr_desc = Pstr_typext _} :: _) -> Some UnrecognizedPayload\n      | PStr ({pstr_desc = Pstr_exception _} :: _) -> Some UnrecognizedPayload\n      | PStr ({pstr_desc = Pstr_module _} :: _) -> Some UnrecognizedPayload\n      | PStr ({pstr_desc = Pstr_recmodule _} :: _) -> Some UnrecognizedPayload\n      | PStr ({pstr_desc = Pstr_modtype _} :: _) -> Some UnrecognizedPayload\n      | PStr ({pstr_desc = Pstr_open _} :: _) -> Some UnrecognizedPayload\n      | PStr ({pstr_desc = Pstr_class _} :: _) -> Some UnrecognizedPayload\n      | PStr ({pstr_desc = Pstr_class_type _} :: _) -> Some UnrecognizedPayload\n      | PStr ({pstr_desc = Pstr_include _} :: _) -> Some UnrecognizedPayload\n      | PStr ({pstr_desc = Pstr_attribute _} :: _) -> Some UnrecognizedPayload\n      | PPat _ -> Some UnrecognizedPayload\n      | PSig _ -> Some UnrecognizedPayload\n      | PTyp _ -> Some UnrecognizedPayload\n    else getAttributePayload checkText tl\n\nlet hasAttribute checkText (attributes : Typedtree.attributes) =\n  getAttributePayload checkText attributes <> None\n\nlet isOcamlSuppressDeadWarning attributes =\n  match\n    attributes\n    |> getAttributePayload (fun x -> x = \"ocaml.warning\" || x = \"warning\")\n  with\n  | Some (StringPayload s) ->\n    let numeric =\n      match Str.search_forward (Str.regexp (Str.quote \"-32\")) s 0 with\n      | _ -> true\n      | exception Not_found -> false\n    in\n    let textual =\n      match\n        Str.search_forward\n          (Str.regexp (Str.quote \"-unused-value-declaration\"))\n          s 0\n      with\n      | _ -> true\n      | exception Not_found -> false\n    in\n    numeric || textual\n  | _ -> false\n"
  },
  {
    "path": "analysis/reanalyze/src/Arnold.ml",
    "content": "let printPos ppf (pos : Lexing.position) =\n  let file = pos.Lexing.pos_fname in\n  let line = pos.Lexing.pos_lnum in\n  Format.fprintf ppf \"@{<filename>%s@} @{<dim>%i@}\"\n    (file |> Filename.basename)\n    line\n\nmodule StringSet = Set.Make (String)\n\n(** Type Definitions *)\nmodule FunctionName = struct\n  type t = string\nend\n\nmodule FunctionArgs = struct\n  type arg = {label: string; functionName: FunctionName.t}\n  type t = arg list\n\n  let empty = []\n  let argToString {label; functionName} = label ^ \":\" ^ functionName\n\n  let toString functionArgs =\n    match functionArgs = [] with\n    | true -> \"\"\n    | false ->\n      \"<\" ^ (functionArgs |> List.map argToString |> String.concat \",\") ^ \">\"\n\n  let find (t : t) ~label =\n    match t |> List.find_opt (fun arg -> arg.label = label) with\n    | Some {functionName} -> Some functionName\n    | None -> None\n\n  let compareArg a1 a2 =\n    let n = compare a1.label a2.label in\n    if n <> 0 then n else compare a1.functionName a2.functionName\n\n  let rec compare l1 l2 =\n    match (l1, l2) with\n    | [], [] -> 0\n    | [], _ :: _ -> -1\n    | _ :: _, [] -> 1\n    | x1 :: l1, x2 :: l2 ->\n      let n = compareArg x1 x2 in\n      if n <> 0 then n else compare l1 l2\nend\n\nmodule FunctionCall = struct\n  type t = {functionName: FunctionName.t; functionArgs: FunctionArgs.t}\n\n  let substituteName ~sub name =\n    match sub |> FunctionArgs.find ~label:name with\n    | Some functionName -> functionName\n    | None -> name\n\n  let applySubstitution ~(sub : FunctionArgs.t) (t : t) =\n    if sub = [] then t\n    else\n      {\n        functionName = t.functionName |> substituteName ~sub;\n        functionArgs =\n          t.functionArgs\n          |> List.map (fun (arg : FunctionArgs.arg) ->\n                 {\n                   arg with\n                   functionName = arg.functionName |> substituteName ~sub;\n                 });\n      }\n\n  let noArgs functionName = {functionName; functionArgs = []}\n\n  let toString {functionName; functionArgs} =\n    functionName ^ FunctionArgs.toString functionArgs\n\n  let compare (x1 : t) x2 =\n    let n = compare x1.functionName x2.functionName in\n    if n <> 0 then n else FunctionArgs.compare x1.functionArgs x2.functionArgs\nend\n\nmodule FunctionCallSet = Set.Make (FunctionCall)\n\nmodule Stats = struct\n  let nCacheChecks = ref 0\n  let nCacheHits = ref 0\n  let nFiles = ref 0\n  let nFunctions = ref 0\n  let nHygieneErrors = ref 0\n  let nInfiniteLoops = ref 0\n  let nRecursiveBlocks = ref 0\n\n  let print ppf () =\n    Format.fprintf ppf \"@[<v 2>@,@{<warning>Termination Analysis Stats@}@,\";\n    Format.fprintf ppf \"Files:@{<dim>%d@}@,\" !nFiles;\n    Format.fprintf ppf \"Recursive Blocks:@{<dim>%d@}@,\" !nRecursiveBlocks;\n    Format.fprintf ppf \"Functions:@{<dim>%d@}@,\" !nFunctions;\n    Format.fprintf ppf \"Infinite Loops:@{<dim>%d@}@,\" !nInfiniteLoops;\n    Format.fprintf ppf \"Hygiene Errors:@{<dim>%d@}@,\" !nHygieneErrors;\n    Format.fprintf ppf \"Cache Hits:@{<dim>%d@}/@{<dim>%d@}@,\" !nCacheHits\n      !nCacheChecks;\n    Format.fprintf ppf \"@]\"\n\n  let dump ~ppf = Format.fprintf ppf \"%a@.\" print ()\n  let newFile () = incr nFiles\n\n  let newRecursiveFunctions ~numFunctions =\n    incr nRecursiveBlocks;\n    nFunctions := !nFunctions + numFunctions\n\n  let logLoop () = incr nInfiniteLoops\n\n  let logCache ~functionCall ~hit ~loc =\n    incr nCacheChecks;\n    if hit then incr nCacheHits;\n    if !Common.Cli.debug then\n      Log_.warning ~forStats:false ~loc\n        (Termination\n           {\n             termination = TerminationAnalysisInternal;\n             message =\n               Format.asprintf \"Cache %s for @{<info>%s@}\"\n                 (match hit with\n                 | true -> \"hit\"\n                 | false -> \"miss\")\n                 (FunctionCall.toString functionCall);\n           })\n\n  let logResult ~functionCall ~loc ~resString =\n    if !Common.Cli.debug then\n      Log_.warning ~forStats:false ~loc\n        (Termination\n           {\n             termination = TerminationAnalysisInternal;\n             message =\n               Format.asprintf \"@{<info>%s@} returns %s\"\n                 (FunctionCall.toString functionCall)\n                 resString;\n           })\n\n  let logHygieneParametric ~functionName ~loc =\n    incr nHygieneErrors;\n    Log_.error ~loc\n      (Termination\n         {\n           termination = ErrorHygiene;\n           message =\n             Format.asprintf\n               \"@{<error>%s@} cannot be analyzed directly as it is parametric\"\n               functionName;\n         })\n\n  let logHygieneOnlyCallDirectly ~path ~loc =\n    incr nHygieneErrors;\n    Log_.error ~loc\n      (Termination\n         {\n           termination = ErrorHygiene;\n           message =\n             Format.asprintf\n               \"@{<error>%s@} can only be called directly, or passed as \\\n                labeled argument\"\n               (Path.name path);\n         })\n\n  let logHygieneMustHaveNamedArgument ~label ~loc =\n    incr nHygieneErrors;\n    Log_.error ~loc\n      (Termination\n         {\n           termination = ErrorHygiene;\n           message =\n             Format.asprintf \"Call must have named argument @{<error>%s@}\" label;\n         })\n\n  let logHygieneNamedArgValue ~label ~loc =\n    incr nHygieneErrors;\n    Log_.error ~loc\n      (Termination\n         {\n           termination = ErrorHygiene;\n           message =\n             Format.asprintf\n               \"Named argument @{<error>%s@} must be passed a recursive \\\n                function\"\n               label;\n         })\n\n  let logHygieneNoNestedLetRec ~loc =\n    incr nHygieneErrors;\n    Log_.error ~loc\n      (Termination\n         {\n           termination = ErrorHygiene;\n           message = Format.asprintf \"Nested multiple let rec not supported yet\";\n         })\nend\n\nmodule Progress = struct\n  type t = Progress | NoProgress\n\n  let toString progress =\n    match progress = Progress with\n    | true -> \"Progress\"\n    | false -> \"NoProgress\"\nend\n\nmodule Call = struct\n  type progressFunction = Path.t\n\n  type t =\n    | FunctionCall of FunctionCall.t\n    | ProgressFunction of progressFunction\n\n  let toString call =\n    match call with\n    | ProgressFunction progressFunction -> \"+\" ^ Path.name progressFunction\n    | FunctionCall functionCall -> FunctionCall.toString functionCall\nend\n\nmodule Trace = struct\n  type retOption = Rsome | Rnone\n\n  type t =\n    | Tcall of Call.t * Progress.t\n    | Tnondet of t list\n    | Toption of retOption\n    | Tseq of t list\n\n  let empty = Tseq []\n\n  let nd (t1 : t) (t2 : t) : t =\n    match (t1, t2) with\n    | Tnondet l1, Tnondet l2 -> Tnondet (l1 @ l2)\n    | _, Tnondet l2 -> Tnondet (t1 :: l2)\n    | Tnondet l1, _ -> Tnondet (l1 @ [t2])\n    | _ -> Tnondet [t1; t2]\n\n  let seq (t1 : t) (t2 : t) : t =\n    match (t1, t2) with\n    | Tseq l1, Tseq l2 -> Tseq (l1 @ l2)\n    | _, Tseq l2 -> Tseq (t1 :: l2)\n    | Tseq l1, _ -> Tseq (l1 @ [t2])\n    | _ -> Tseq [t1; t2]\n\n  let some = Toption Rsome\n  let none = Toption Rnone\n\n  let retOptionToString r =\n    match r = Rsome with\n    | true -> \"Some\"\n    | false -> \"None\"\n\n  let rec toString trace =\n    match trace with\n    | Tcall (ProgressFunction progressFunction, progress) ->\n      Path.name progressFunction ^ \":\" ^ Progress.toString progress\n    | Tcall (FunctionCall functionCall, progress) ->\n      FunctionCall.toString functionCall ^ \":\" ^ Progress.toString progress\n    | Tnondet traces ->\n      \"[\" ^ (traces |> List.map toString |> String.concat \" || \") ^ \"]\"\n    | Toption retOption -> retOption |> retOptionToString\n    | Tseq traces -> (\n      let tracesNotEmpty = traces |> List.filter (( <> ) empty) in\n      match tracesNotEmpty with\n      | [] -> \"_\"\n      | [t] -> t |> toString\n      | _ :: _ -> tracesNotEmpty |> List.map toString |> String.concat \"; \")\nend\n\nmodule Values : sig\n  type t\n\n  val getNone : t -> Progress.t option\n  val getSome : t -> Progress.t option\n  val nd : t -> t -> t\n  val none : progress:Progress.t -> t\n  val some : progress:Progress.t -> t\n  val toString : t -> string\nend = struct\n  type t = {none: Progress.t option; some: Progress.t option}\n\n  let getNone {none} = none\n  let getSome {some} = some\n\n  let toString x =\n    ((match x.some with\n     | None -> []\n     | Some p -> [\"some: \" ^ Progress.toString p])\n    @\n    match x.none with\n    | None -> []\n    | Some p -> [\"none: \" ^ Progress.toString p])\n    |> String.concat \", \"\n\n  let none ~progress = {none = Some progress; some = None}\n  let some ~progress = {none = None; some = Some progress}\n\n  let nd (v1 : t) (v2 : t) : t =\n    let combine x y =\n      match (x, y) with\n      | Some progress1, Some progress2 ->\n        Some\n          (match progress1 = Progress.Progress && progress2 = Progress with\n          | true -> Progress.Progress\n          | false -> NoProgress)\n      | None, progressOpt | progressOpt, None -> progressOpt\n    in\n    let none = combine v1.none v2.none in\n    let some = combine v1.some v2.some in\n    {none; some}\nend\n\nmodule State = struct\n  type t = {progress: Progress.t; trace: Trace.t; valuesOpt: Values.t option}\n\n  let toString {progress; trace; valuesOpt} =\n    let progressStr =\n      match valuesOpt with\n      | None -> progress |> Progress.toString\n      | Some values -> \"{\" ^ (values |> Values.toString) ^ \"}\"\n    in\n    progressStr ^ \" with trace \" ^ Trace.toString trace\n\n  let init ?(progress = Progress.NoProgress) ?(trace = Trace.empty)\n      ?(valuesOpt = None) () =\n    {progress; trace; valuesOpt}\n\n  let seq s1 s2 =\n    let progress =\n      match s1.progress = Progress || s2.progress = Progress with\n      | true -> Progress.Progress\n      | false -> NoProgress\n    in\n    let trace = Trace.seq s1.trace s2.trace in\n    let valuesOpt = s2.valuesOpt in\n    {progress; trace; valuesOpt}\n\n  let sequence states =\n    match states with\n    | [] -> assert false\n    | s :: nextStates -> List.fold_left seq s nextStates\n\n  let nd s1 s2 =\n    let progress =\n      match s1.progress = Progress && s2.progress = Progress with\n      | true -> Progress.Progress\n      | false -> NoProgress\n    in\n    let trace = Trace.nd s1.trace s2.trace in\n    let valuesOpt =\n      match (s1.valuesOpt, s2.valuesOpt) with\n      | None, valuesOpt -> (\n        match s1.progress = Progress with\n        | true -> valuesOpt\n        | false -> None)\n      | valuesOpt, None -> (\n        match s2.progress = Progress with\n        | true -> valuesOpt\n        | false -> None)\n      | Some values1, Some values2 -> Some (Values.nd values1 values2)\n    in\n    {progress; trace; valuesOpt}\n\n  let nondet states =\n    match states with\n    | [] -> assert false\n    | s :: nextStates -> List.fold_left nd s nextStates\n\n  let unorderedSequence states = {(states |> sequence) with valuesOpt = None}\n\n  let none ~progress =\n    init ~progress ~trace:Trace.none\n      ~valuesOpt:(Some (Values.none ~progress))\n      ()\n\n  let some ~progress =\n    init ~progress ~trace:Trace.some\n      ~valuesOpt:(Some (Values.some ~progress))\n      ()\nend\n\nmodule Command = struct\n  type progress = Progress.t\n  type retOption = Trace.retOption\n\n  type t =\n    | Call of Call.t * Location.t\n    | ConstrOption of retOption\n    | Nondet of t list\n    | Nothing\n    | Sequence of t list\n    | SwitchOption of {\n        functionCall: FunctionCall.t;\n        loc: Location.t;\n        some: t;\n        none: t;\n      }\n    | UnorderedSequence of t list\n\n  let rec toString command =\n    match command with\n    | Call (call, _pos) -> call |> Call.toString\n    | ConstrOption r -> r |> Trace.retOptionToString\n    | Nondet commands ->\n      \"[\" ^ (commands |> List.map toString |> String.concat \" || \") ^ \"]\"\n    | Nothing -> \"_\"\n    | Sequence commands -> commands |> List.map toString |> String.concat \"; \"\n    | SwitchOption {functionCall; some = cSome; none = cNone} ->\n      \"switch \"\n      ^ FunctionCall.toString functionCall\n      ^ \" {some: \" ^ toString cSome ^ \", none: \" ^ toString cNone ^ \"}\"\n    | UnorderedSequence commands ->\n      \"{\" ^ (commands |> List.map toString |> String.concat \", \") ^ \"}\"\n\n  let nothing = Nothing\n\n  let nondet commands =\n    let rec loop commands =\n      match commands with\n      | [] -> nothing\n      | Nondet commands :: rest -> loop (commands @ rest)\n      | [command] -> command\n      | _ -> Nondet commands\n    in\n    loop commands\n\n  let sequence commands =\n    let rec loop acc commands =\n      match commands with\n      | [] -> List.rev acc\n      | Nothing :: cs when cs <> [] -> loop acc cs\n      | Sequence cs1 :: cs2 -> loop acc (cs1 @ cs2)\n      | c :: cs -> loop (c :: acc) cs\n    in\n    match loop [] commands with\n    | [c] -> c\n    | cs -> Sequence cs\n\n  let ( +++ ) c1 c2 = sequence [c1; c2]\n\n  let unorderedSequence commands =\n    let relevantCommands = commands |> List.filter (fun x -> x <> nothing) in\n    match relevantCommands with\n    | [] -> nothing\n    | [c] -> c\n    | _ :: _ :: _ -> UnorderedSequence relevantCommands\nend\n\nmodule Kind = struct\n  type t = entry list\n  and entry = {label: string; k: t}\n\n  let empty = ([] : t)\n\n  let hasLabel ~label (k : t) =\n    k |> List.exists (fun entry -> entry.label = label)\n\n  let rec entryToString {label; k} =\n    match k = [] with\n    | true -> label\n    | false -> label ^ \":\" ^ (k |> toString)\n\n  and toString (kind : t) =\n    match kind = [] with\n    | true -> \"\"\n    | false ->\n      \"<\" ^ (kind |> List.map entryToString |> String.concat \", \") ^ \">\"\n\n  let addLabelWithEmptyKind ~label kind =\n    if not (kind |> hasLabel ~label) then\n      {label; k = empty} :: kind |> List.sort compare\n    else kind\nend\n\nmodule FunctionTable = struct\n  type functionDefinition = {\n    mutable body: Command.t option;\n    mutable kind: Kind.t;\n  }\n\n  type t = (FunctionName.t, functionDefinition) Hashtbl.t\n\n  let create () : t = Hashtbl.create 1\n\n  let print ppf (tbl : t) =\n    Format.fprintf ppf \"@[<v 2>@,@{<warning>Function Table@}\";\n    let definitions =\n      Hashtbl.fold\n        (fun functionName {kind; body} definitions ->\n          (functionName, kind, body) :: definitions)\n        tbl []\n      |> List.sort (fun (fn1, _, _) (fn2, _, _) -> String.compare fn1 fn2)\n    in\n    definitions\n    |> List.iteri (fun i (functionName, kind, body) ->\n           Format.fprintf ppf \"@,@{<dim>%d@} @{<info>%s%s@}: %s\" (i + 1)\n             functionName (Kind.toString kind)\n             (match body with\n             | Some command -> Command.toString command\n             | None -> \"None\"));\n    Format.fprintf ppf \"@]\"\n\n  let dump tbl = Format.fprintf Format.std_formatter \"%a@.\" print tbl\n  let initialFunctionDefinition () = {kind = Kind.empty; body = None}\n\n  let getFunctionDefinition ~functionName (tbl : t) =\n    try Hashtbl.find tbl functionName with Not_found -> assert false\n\n  let isInFunctionInTable ~functionTable path =\n    Hashtbl.mem functionTable (Path.name path)\n\n  let addFunction ~functionName (tbl : t) =\n    if Hashtbl.mem tbl functionName then assert false;\n    Hashtbl.replace tbl functionName (initialFunctionDefinition ())\n\n  let addLabelToKind ~functionName ~label (tbl : t) =\n    let functionDefinition = tbl |> getFunctionDefinition ~functionName in\n    functionDefinition.kind <-\n      functionDefinition.kind |> Kind.addLabelWithEmptyKind ~label\n\n  let addBody ~body ~functionName (tbl : t) =\n    let functionDefinition = tbl |> getFunctionDefinition ~functionName in\n    functionDefinition.body <- body\n\n  let functionGetKindOfLabel ~functionName ~label (tbl : t) =\n    match Hashtbl.find tbl functionName with\n    | {kind} -> (\n      match kind |> Kind.hasLabel ~label with\n      | true -> Some Kind.empty\n      | false -> None)\n    | exception Not_found -> None\nend\n\nmodule FindFunctionsCalled = struct\n  let traverseExpr ~callees =\n    let super = Tast_mapper.default in\n    let expr (self : Tast_mapper.mapper) (e : Typedtree.expression) =\n      (match e.exp_desc with\n      | Texp_apply ({exp_desc = Texp_ident (callee, _, _)}, _args) ->\n        let functionName = Path.name callee in\n        callees := !callees |> StringSet.add functionName\n      | _ -> ());\n      super.expr self e\n    in\n    {super with Tast_mapper.expr}\n\n  let findCallees (expression : Typedtree.expression) =\n    let isFunction =\n      match expression.exp_desc with\n      | Texp_function _ -> true\n      | _ -> false\n    in\n    let callees = ref StringSet.empty in\n    let traverseExpr = traverseExpr ~callees in\n    if isFunction then expression |> traverseExpr.expr traverseExpr |> ignore;\n    !callees\nend\n\nmodule ExtendFunctionTable = struct\n  (* Add functions passed a recursive function via a labeled argument,\n     and functions calling progress functions, to the function table. *)\n  let extractLabelledArgument ?(kindOpt = None)\n      (argOpt : Typedtree.expression option) =\n    match argOpt with\n    | Some {exp_desc = Texp_ident (path, {loc}, _)} -> Some (path, loc)\n    | Some\n        {\n          exp_desc =\n            Texp_let\n              ( Nonrecursive,\n                [\n                  {\n                    vb_pat = {pat_desc = Tpat_var (_, _)};\n                    vb_expr = {exp_desc = Texp_ident (path, {loc}, _)};\n                    vb_loc = {loc_ghost = true};\n                  };\n                ],\n                _ );\n        } ->\n      Some (path, loc)\n    | Some\n        {exp_desc = Texp_apply ({exp_desc = Texp_ident (path, {loc}, _)}, args)}\n      when kindOpt <> None ->\n      let checkArg ((argLabel : Asttypes.arg_label), _argOpt) =\n        match (argLabel, kindOpt) with\n        | (Labelled l | Optional l), Some kind ->\n          kind |> List.for_all (fun {Kind.label} -> label <> l)\n        | _ -> true\n      in\n      if args |> List.for_all checkArg then Some (path, loc) else None\n    | _ -> None\n\n  let traverseExpr ~functionTable ~progressFunctions ~valueBindingsTable =\n    let super = Tast_mapper.default in\n    let expr (self : Tast_mapper.mapper) (e : Typedtree.expression) =\n      (match e.exp_desc with\n      | Texp_ident (callee, _, _) -> (\n        let loc = e.exp_loc in\n        match Hashtbl.find_opt valueBindingsTable (Path.name callee) with\n        | None -> ()\n        | Some (id_pos, _, callees) ->\n          if\n            not\n              (StringSet.is_empty\n                 (StringSet.inter (Lazy.force callees) progressFunctions))\n          then\n            let functionName = Path.name callee in\n            if not (callee |> FunctionTable.isInFunctionInTable ~functionTable)\n            then (\n              functionTable |> FunctionTable.addFunction ~functionName;\n              if !Common.Cli.debug then\n                Log_.warning ~forStats:false ~loc\n                  (Termination\n                     {\n                       termination = TerminationAnalysisInternal;\n                       message =\n                         Format.asprintf\n                           \"Extend Function Table with @{<info>%s@} (%a) as it \\\n                            calls a progress function\"\n                           functionName printPos id_pos;\n                     })))\n      | Texp_apply ({exp_desc = Texp_ident (callee, _, _)}, args)\n        when callee |> FunctionTable.isInFunctionInTable ~functionTable ->\n        let functionName = Path.name callee in\n        args\n        |> List.iter (fun ((argLabel : Asttypes.arg_label), argOpt) ->\n               match (argLabel, argOpt |> extractLabelledArgument) with\n               | Labelled label, Some (path, loc)\n                 when path |> FunctionTable.isInFunctionInTable ~functionTable\n                 ->\n                 functionTable\n                 |> FunctionTable.addLabelToKind ~functionName ~label;\n                 if !Common.Cli.debug then\n                   Log_.warning ~forStats:false ~loc\n                     (Termination\n                        {\n                          termination = TerminationAnalysisInternal;\n                          message =\n                            Format.asprintf\n                              \"@{<info>%s@} is parametric \\\n                               ~@{<info>%s@}=@{<info>%s@}\"\n                              functionName label (Path.name path);\n                        })\n               | _ -> ())\n      | _ -> ());\n      super.expr self e\n    in\n    {super with Tast_mapper.expr}\n\n  let run ~functionTable ~progressFunctions ~valueBindingsTable\n      (expression : Typedtree.expression) =\n    let traverseExpr =\n      traverseExpr ~functionTable ~progressFunctions ~valueBindingsTable\n    in\n    expression |> traverseExpr.expr traverseExpr |> ignore\nend\n\nmodule CheckExpressionWellFormed = struct\n  let traverseExpr ~functionTable ~valueBindingsTable =\n    let super = Tast_mapper.default in\n    let checkIdent ~path ~loc =\n      if path |> FunctionTable.isInFunctionInTable ~functionTable then\n        Stats.logHygieneOnlyCallDirectly ~path ~loc\n    in\n    let expr (self : Tast_mapper.mapper) (e : Typedtree.expression) =\n      match e.exp_desc with\n      | Texp_ident (path, {loc}, _) ->\n        checkIdent ~path ~loc;\n        e\n      | Texp_apply ({exp_desc = Texp_ident (functionPath, _, _)}, args) ->\n        let functionName = Path.name functionPath in\n        args\n        |> List.iter (fun ((argLabel : Asttypes.arg_label), argOpt) ->\n               match argOpt |> ExtendFunctionTable.extractLabelledArgument with\n               | Some (path, loc) -> (\n                 match argLabel with\n                 | Labelled label -> (\n                   if\n                     functionTable\n                     |> FunctionTable.functionGetKindOfLabel ~functionName\n                          ~label\n                     <> None\n                   then ()\n                   else\n                     match Hashtbl.find_opt valueBindingsTable functionName with\n                     | Some (_pos, (body : Typedtree.expression), _)\n                       when path\n                            |> FunctionTable.isInFunctionInTable ~functionTable\n                       ->\n                       let inTable =\n                         functionPath\n                         |> FunctionTable.isInFunctionInTable ~functionTable\n                       in\n                       if not inTable then\n                         functionTable\n                         |> FunctionTable.addFunction ~functionName;\n                       functionTable\n                       |> FunctionTable.addLabelToKind ~functionName ~label;\n                       if !Common.Cli.debug then\n                         Log_.warning ~forStats:false ~loc:body.exp_loc\n                           (Termination\n                              {\n                                termination = TerminationAnalysisInternal;\n                                message =\n                                  Format.asprintf\n                                    \"Extend Function Table with @{<info>%s@} \\\n                                     as parametric ~@{<info>%s@}=@{<info>%s@}\"\n                                    functionName label (Path.name path);\n                              })\n                     | _ -> checkIdent ~path ~loc)\n                 | Optional _ | Nolabel -> checkIdent ~path ~loc)\n               | _ -> ());\n        e\n      | _ -> super.expr self e\n    in\n    {super with Tast_mapper.expr}\n\n  let run ~functionTable ~valueBindingsTable (expression : Typedtree.expression)\n      =\n    let traverseExpr = traverseExpr ~functionTable ~valueBindingsTable in\n    expression |> traverseExpr.expr traverseExpr |> ignore\nend\n\nmodule Compile = struct\n  type ctx = {\n    currentFunctionName: FunctionName.t;\n    functionTable: FunctionTable.t;\n    innerRecursiveFunctions: (FunctionName.t, FunctionName.t) Hashtbl.t;\n    isProgressFunction: Path.t -> bool;\n  }\n\n  let rec expression ~ctx (expr : Typedtree.expression) =\n    let {currentFunctionName; functionTable; isProgressFunction} = ctx in\n    let loc = expr.exp_loc in\n    let notImplemented case =\n      Log_.error ~loc\n        (Termination\n           {termination = ErrorNotImplemented; message = Format.asprintf case})\n    in\n\n    match expr.exp_desc with\n    | Texp_ident _ -> Command.nothing\n    | Texp_apply\n        (({exp_desc = Texp_ident (calleeToRename, l, vd)} as expr), argsToExtend)\n      -> (\n      let callee, args =\n        match\n          Hashtbl.find_opt ctx.innerRecursiveFunctions\n            (Path.name calleeToRename)\n        with\n        | Some innerFunctionName ->\n          let innerFunctionDefinition =\n            functionTable\n            |> FunctionTable.getFunctionDefinition\n                 ~functionName:innerFunctionName\n          in\n          let argsFromKind =\n            innerFunctionDefinition.kind\n            |> List.map (fun (entry : Kind.entry) ->\n                   ( Asttypes.Labelled entry.label,\n                     Some\n                       {\n                         expr with\n                         exp_desc =\n                           Texp_ident\n                             (Path.Pident (Ident.create entry.label), l, vd);\n                       } ))\n          in\n          ( Path.Pident (Ident.create innerFunctionName),\n            argsFromKind @ argsToExtend )\n        | None -> (calleeToRename, argsToExtend)\n      in\n      if callee |> FunctionTable.isInFunctionInTable ~functionTable then\n        let functionName = Path.name callee in\n        let functionDefinition =\n          functionTable |> FunctionTable.getFunctionDefinition ~functionName\n        in\n        let exception ArgError in\n        let getFunctionArg {Kind.label} =\n          let argOpt =\n            args\n            |> List.find_opt (fun arg ->\n                   match arg with\n                   | Asttypes.Labelled s, Some _ -> s = label\n                   | _ -> false)\n          in\n          let argOpt =\n            match argOpt with\n            | Some (_, Some e) -> Some e\n            | _ -> None\n          in\n          let functionArg () =\n            match\n              argOpt\n              |> ExtendFunctionTable.extractLabelledArgument\n                   ~kindOpt:(Some functionDefinition.kind)\n            with\n            | None ->\n              Stats.logHygieneMustHaveNamedArgument ~label ~loc;\n              raise ArgError\n            | Some (path, _pos)\n              when path |> FunctionTable.isInFunctionInTable ~functionTable ->\n              let functionName = Path.name path in\n              {FunctionArgs.label; functionName}\n            | Some (path, _pos)\n              when functionTable\n                   |> FunctionTable.functionGetKindOfLabel\n                        ~functionName:currentFunctionName\n                        ~label:(Path.name path)\n                   = Some []\n                   (* TODO: when kinds are inferred, support and check non-empty kinds *)\n              ->\n              let functionName = Path.name path in\n              {FunctionArgs.label; functionName}\n            | _ ->\n              Stats.logHygieneNamedArgValue ~label ~loc;\n              raise ArgError\n              [@@raises ArgError]\n          in\n          functionArg ()\n            [@@raises ArgError]\n        in\n        let functionArgsOpt =\n          try Some (functionDefinition.kind |> List.map getFunctionArg)\n          with ArgError -> None\n        in\n        match functionArgsOpt with\n        | None -> Command.nothing\n        | Some functionArgs ->\n          Command.Call (FunctionCall {functionName; functionArgs}, loc)\n          |> evalArgs ~args ~ctx\n      else if callee |> isProgressFunction then\n        Command.Call (ProgressFunction callee, loc) |> evalArgs ~args ~ctx\n      else\n        match\n          functionTable\n          |> FunctionTable.functionGetKindOfLabel\n               ~functionName:currentFunctionName ~label:(Path.name callee)\n        with\n        | Some kind when kind = Kind.empty ->\n          Command.Call\n            (FunctionCall (Path.name callee |> FunctionCall.noArgs), loc)\n          |> evalArgs ~args ~ctx\n        | Some _kind ->\n          (* TODO when kinds are extended in future: check that args matches with kind\n             and create a function call with the appropriate arguments *)\n          assert false\n        | None -> expr |> expression ~ctx |> evalArgs ~args ~ctx)\n    | Texp_apply (expr, args) -> expr |> expression ~ctx |> evalArgs ~args ~ctx\n    | Texp_let\n        ( Recursive,\n          [{vb_pat = {pat_desc = Tpat_var (id, _); pat_loc}; vb_expr}],\n          inExpr ) ->\n      let oldFunctionName = Ident.name id in\n      let newFunctionName = currentFunctionName ^ \"$\" ^ oldFunctionName in\n      functionTable |> FunctionTable.addFunction ~functionName:newFunctionName;\n      let newFunctionDefinition =\n        functionTable\n        |> FunctionTable.getFunctionDefinition ~functionName:newFunctionName\n      in\n      let currentFunctionDefinition =\n        functionTable\n        |> FunctionTable.getFunctionDefinition ~functionName:currentFunctionName\n      in\n      newFunctionDefinition.kind <- currentFunctionDefinition.kind;\n      let newCtx = {ctx with currentFunctionName = newFunctionName} in\n      Hashtbl.replace ctx.innerRecursiveFunctions oldFunctionName\n        newFunctionName;\n      newFunctionDefinition.body <- Some (vb_expr |> expression ~ctx:newCtx);\n      if !Common.Cli.debug then\n        Log_.warning ~forStats:false ~loc:pat_loc\n          (Termination\n             {\n               termination = TerminationAnalysisInternal;\n               message =\n                 Format.asprintf \"Adding recursive definition @{<info>%s@}\"\n                   newFunctionName;\n             });\n      inExpr |> expression ~ctx\n    | Texp_let (recFlag, valueBindings, inExpr) ->\n      if recFlag = Recursive then Stats.logHygieneNoNestedLetRec ~loc;\n      let commands =\n        (valueBindings\n        |> List.map (fun (vb : Typedtree.value_binding) ->\n               vb.vb_expr |> expression ~ctx))\n        @ [inExpr |> expression ~ctx]\n      in\n      Command.sequence commands\n    | Texp_sequence (e1, e2) ->\n      let open Command in\n      expression ~ctx e1 +++ expression ~ctx e2\n    | Texp_ifthenelse (e1, e2, eOpt) ->\n      let c1 = e1 |> expression ~ctx in\n      let c2 = e2 |> expression ~ctx in\n      let c3 = eOpt |> expressionOpt ~ctx in\n      let open Command in\n      c1 +++ nondet [c2; c3]\n    | Texp_constant _ -> Command.nothing\n    | Texp_construct ({loc = {loc_ghost}}, {cstr_name}, expressions) -> (\n      let c =\n        expressions\n        |> List.map (fun e -> e |> expression ~ctx)\n        |> Command.unorderedSequence\n      in\n      match cstr_name with\n      | \"Some\" when loc_ghost = false ->\n        let open Command in\n        c +++ ConstrOption Rsome\n      | \"None\" when loc_ghost = false ->\n        let open Command in\n        c +++ ConstrOption Rnone\n      | _ -> c)\n    | Texp_function {cases} -> cases |> List.map (case ~ctx) |> Command.nondet\n    | Texp_match (e, casesOk, casesExn, _partial)\n      when not\n             (casesExn\n             |> List.map (fun (case : Typedtree.case) -> case.c_lhs.pat_desc)\n             != []) -> (\n      (* No exceptions *)\n      let cases = casesOk @ casesExn in\n      let cE = e |> expression ~ctx in\n      let cCases = cases |> List.map (case ~ctx) in\n      let fail () =\n        let open Command in\n        cE +++ nondet cCases\n      in\n      match (cE, cases) with\n      | ( Call (FunctionCall functionCall, loc),\n          [{c_lhs = pattern1}; {c_lhs = pattern2}] ) -> (\n        match (pattern1.pat_desc, pattern2.pat_desc) with\n        | ( Tpat_construct (_, {cstr_name = (\"Some\" | \"None\") as name1}, _),\n            Tpat_construct (_, {cstr_name = \"Some\" | \"None\"}, _) ) ->\n          let casesArr = Array.of_list cCases in\n          let some, none =\n            try\n              match name1 = \"Some\" with\n              | true -> (casesArr.(0), casesArr.(1))\n              | false -> (casesArr.(1), casesArr.(0))\n            with Invalid_argument _ -> (Nothing, Nothing)\n          in\n          Command.SwitchOption {functionCall; loc; some; none}\n        | _ -> fail ())\n      | _ -> fail ())\n    | Texp_match _ -> assert false (* exceptions *)\n    | Texp_field (e, _lid, _desc) -> e |> expression ~ctx\n    | Texp_record {fields; extended_expression} ->\n      extended_expression\n      :: (fields |> Array.to_list\n         |> List.map\n              (fun\n                ( _desc,\n                  (recordLabelDefinition : Typedtree.record_label_definition) )\n              ->\n                match recordLabelDefinition with\n                | Kept _typeExpr -> None\n                | Overridden (_loc, e) -> Some e))\n      |> List.map (expressionOpt ~ctx)\n      |> Command.unorderedSequence\n    | Texp_setfield (e1, _loc, _desc, e2) ->\n      [e1; e2] |> List.map (expression ~ctx) |> Command.unorderedSequence\n    | Texp_tuple expressions | Texp_array expressions ->\n      expressions |> List.map (expression ~ctx) |> Command.unorderedSequence\n    | Texp_assert _ -> Command.nothing\n    | Texp_try (e, cases) ->\n      let cE = e |> expression ~ctx in\n      let cCases = cases |> List.map (case ~ctx) |> Command.nondet in\n      let open Command in\n      cE +++ cCases\n    | Texp_variant (_label, eOpt) -> eOpt |> expressionOpt ~ctx\n    | Texp_while _ ->\n      notImplemented \"Texp_while\";\n      assert false\n    | Texp_for _ ->\n      notImplemented \"Texp_for\";\n      assert false\n    | Texp_send _ ->\n      notImplemented \"Texp_send\";\n      assert false\n    | Texp_new _ ->\n      notImplemented \"Texp_new\";\n      assert false\n    | Texp_instvar _ ->\n      notImplemented \"Texp_instvar\";\n      assert false\n    | Texp_setinstvar _ ->\n      notImplemented \"Texp_setinstvar\";\n      assert false\n    | Texp_override _ ->\n      notImplemented \"Texp_override\";\n      assert false\n    | Texp_letmodule _ ->\n      notImplemented \"Texp_letmodule\";\n      assert false\n    | Texp_letexception _ ->\n      notImplemented \"Texp_letexception\";\n      assert false\n    | Texp_lazy _ ->\n      notImplemented \"Texp_lazy\";\n      assert false\n    | Texp_object _ ->\n      notImplemented \"Texp_letmodule\";\n      assert false\n    | Texp_pack _ ->\n      notImplemented \"Texp_pack\";\n      assert false\n    | Texp_unreachable ->\n      notImplemented \"Texp_unreachable\";\n      assert false\n    | Texp_extension_constructor _ when true ->\n      notImplemented \"Texp_extension_constructor\";\n      assert false\n    | _ ->\n      (* ocaml 4.08: Texp_letop(_) | Texp_open(_) *)\n      notImplemented \"Texp_letop(_) | Texp_open(_)\";\n      assert false\n\n  and expressionOpt ~ctx eOpt =\n    match eOpt with\n    | None -> Command.nothing\n    | Some e -> e |> expression ~ctx\n\n  and evalArgs ~args ~ctx command =\n    (* Don't assume any evaluation order on the arguments *)\n    let commands =\n      args |> List.map (fun (_, eOpt) -> eOpt |> expressionOpt ~ctx)\n    in\n    let open Command in\n    unorderedSequence commands +++ command\n\n  and case : ctx:ctx -> Typedtree.case -> _ =\n   fun ~ctx {c_guard; c_rhs} ->\n    match c_guard with\n    | None -> c_rhs |> expression ~ctx\n    | Some e ->\n      let open Command in\n      expression ~ctx e +++ expression ~ctx c_rhs\nend\n\nmodule CallStack = struct\n  type frame = {frameNumber: int; pos: Lexing.position}\n  type t = {tbl: (FunctionCall.t, frame) Hashtbl.t; mutable size: int}\n\n  let create () = {tbl = Hashtbl.create 1; size = 0}\n\n  let toSet {tbl} =\n    Hashtbl.fold\n      (fun frame _i set -> FunctionCallSet.add frame set)\n      tbl FunctionCallSet.empty\n\n  let hasFunctionCall ~functionCall (t : t) = Hashtbl.mem t.tbl functionCall\n\n  let addFunctionCall ~functionCall ~pos (t : t) =\n    t.size <- t.size + 1;\n    Hashtbl.replace t.tbl functionCall {frameNumber = t.size; pos}\n\n  let removeFunctionCall ~functionCall (t : t) =\n    t.size <- t.size - 1;\n    Hashtbl.remove t.tbl functionCall\n\n  let print ppf (t : t) =\n    Format.fprintf ppf \"  CallStack:\";\n    let frames =\n      Hashtbl.fold\n        (fun functionCall {frameNumber; pos} frames ->\n          (functionCall, frameNumber, pos) :: frames)\n        t.tbl []\n      |> List.sort (fun (_, i1, _) (_, i2, _) -> i2 - i1)\n    in\n    frames\n    |> List.iter (fun ((functionCall : FunctionCall.t), i, pos) ->\n           Format.fprintf ppf \"\\n    @{<dim>%d@} %s (%a)\" i\n             (FunctionCall.toString functionCall)\n             printPos pos)\nend\n\nmodule Eval = struct\n  type progress = Progress.t\n  type cache = (FunctionCall.t, State.t) Hashtbl.t\n\n  let createCache () : cache = Hashtbl.create 1\n\n  let lookupCache ~functionCall (cache : cache) =\n    Hashtbl.find_opt cache functionCall\n\n  let updateCache ~functionCall ~loc ~state (cache : cache) =\n    Stats.logResult ~functionCall ~resString:(state |> State.toString) ~loc;\n    if not (Hashtbl.mem cache functionCall) then\n      Hashtbl.replace cache functionCall state\n\n  let hasInfiniteLoop ~callStack ~functionCallToInstantiate ~functionCall ~loc\n      ~state =\n    if callStack |> CallStack.hasFunctionCall ~functionCall then (\n      if state.State.progress = NoProgress then (\n        Log_.error ~loc\n          (Termination\n             {\n               termination = ErrorTermination;\n               message =\n                 Format.asprintf \"%a\"\n                   (fun ppf () ->\n                     Format.fprintf ppf \"Possible infinite loop when calling \";\n                     (match functionCallToInstantiate = functionCall with\n                     | true ->\n                       Format.fprintf ppf \"@{<error>%s@}\"\n                         (functionCallToInstantiate |> FunctionCall.toString)\n                     | false ->\n                       Format.fprintf ppf \"@{<error>%s@} which is @{<error>%s@}\"\n                         (functionCallToInstantiate |> FunctionCall.toString)\n                         (functionCall |> FunctionCall.toString));\n                     Format.fprintf ppf \"@,%a\" CallStack.print callStack)\n                   ();\n             });\n        Stats.logLoop ());\n      true)\n    else false\n\n  let rec runFunctionCall ~cache ~callStack ~functionArgs ~functionTable\n      ~madeProgressOn ~loc ~state functionCallToInstantiate : State.t =\n    let pos = loc.Location.loc_start in\n    let functionCall =\n      functionCallToInstantiate\n      |> FunctionCall.applySubstitution ~sub:functionArgs\n    in\n    let functionName = functionCall.functionName in\n    let call = Call.FunctionCall functionCall in\n    let stateAfterCall =\n      match cache |> lookupCache ~functionCall with\n      | Some stateAfterCall ->\n        Stats.logCache ~functionCall ~hit:true ~loc;\n        {\n          stateAfterCall with\n          trace = Trace.Tcall (call, stateAfterCall.progress);\n        }\n      | None ->\n        if FunctionCallSet.mem functionCall madeProgressOn then\n          State.init ~progress:Progress ~trace:(Trace.Tcall (call, Progress)) ()\n        else if\n          hasInfiniteLoop ~callStack ~functionCallToInstantiate ~functionCall\n            ~loc ~state\n        then {state with trace = Trace.Tcall (call, state.progress)}\n        else (\n          Stats.logCache ~functionCall ~hit:false ~loc;\n          let functionDefinition =\n            functionTable |> FunctionTable.getFunctionDefinition ~functionName\n          in\n          callStack |> CallStack.addFunctionCall ~functionCall ~pos;\n          let body =\n            match functionDefinition.body with\n            | Some body -> body\n            | None -> assert false\n          in\n          let stateAfterCall =\n            body\n            |> run ~cache ~callStack ~functionArgs:functionCall.functionArgs\n                 ~functionTable ~madeProgressOn ~state:(State.init ())\n          in\n          cache |> updateCache ~functionCall ~loc ~state:stateAfterCall;\n          (* Invariant: run should restore the callStack *)\n          callStack |> CallStack.removeFunctionCall ~functionCall;\n          let trace = Trace.Tcall (call, stateAfterCall.progress) in\n          {stateAfterCall with trace})\n    in\n    State.seq state stateAfterCall\n\n  and run ~(cache : cache) ~callStack ~functionArgs ~functionTable\n      ~madeProgressOn ~state (command : Command.t) : State.t =\n    match command with\n    | Call (FunctionCall functionCall, loc) ->\n      functionCall\n      |> runFunctionCall ~cache ~callStack ~functionArgs ~functionTable\n           ~madeProgressOn ~loc ~state\n    | Call ((ProgressFunction _ as call), _pos) ->\n      let state1 =\n        State.init ~progress:Progress ~trace:(Tcall (call, Progress)) ()\n      in\n      State.seq state state1\n    | ConstrOption r ->\n      let state1 =\n        match r = Rsome with\n        | true -> State.some ~progress:state.progress\n        | false -> State.none ~progress:state.progress\n      in\n      State.seq state state1\n    | Nothing ->\n      let state1 = State.init () in\n      State.seq state state1\n    | Sequence commands ->\n      (* if one command makes progress, then the sequence makes progress *)\n      let rec findFirstProgress ~callStack ~commands ~madeProgressOn ~state =\n        match commands with\n        | [] -> state\n        | c :: nextCommands ->\n          let state1 =\n            c\n            |> run ~cache ~callStack ~functionArgs ~functionTable\n                 ~madeProgressOn ~state\n          in\n          let madeProgressOn, callStack =\n            match state1.progress with\n            | Progress ->\n              (* look for infinite loops in the rest of the sequence, remembering what has made progress *)\n              ( FunctionCallSet.union madeProgressOn\n                  (callStack |> CallStack.toSet),\n                CallStack.create () )\n            | NoProgress -> (madeProgressOn, callStack)\n          in\n          findFirstProgress ~callStack ~commands:nextCommands ~madeProgressOn\n            ~state:state1\n      in\n      findFirstProgress ~callStack ~commands ~madeProgressOn ~state\n    | UnorderedSequence commands ->\n      let stateNoTrace = {state with trace = Trace.empty} in\n      (* the commands could be executed in any order: progess if any one does *)\n      let states =\n        commands\n        |> List.map (fun c ->\n               c\n               |> run ~cache ~callStack ~functionArgs ~functionTable\n                    ~madeProgressOn ~state:stateNoTrace)\n      in\n      State.seq state (states |> State.unorderedSequence)\n    | Nondet commands ->\n      let stateNoTrace = {state with trace = Trace.empty} in\n      (* the commands could be executed in any order: progess if any one does *)\n      let states =\n        commands\n        |> List.map (fun c ->\n               c\n               |> run ~cache ~callStack ~functionArgs ~functionTable\n                    ~madeProgressOn ~state:stateNoTrace)\n      in\n      State.seq state (states |> State.nondet)\n    | SwitchOption {functionCall; loc; some; none} -> (\n      let stateAfterCall =\n        functionCall\n        |> runFunctionCall ~cache ~callStack ~functionArgs ~functionTable\n             ~madeProgressOn ~loc ~state\n      in\n      match stateAfterCall.valuesOpt with\n      | None ->\n        Command.nondet [some; none]\n        |> run ~cache ~callStack ~functionArgs ~functionTable ~madeProgressOn\n             ~state:stateAfterCall\n      | Some values ->\n        let runOpt c progressOpt =\n          match progressOpt with\n          | None -> State.init ~progress:Progress ()\n          | Some progress ->\n            c\n            |> run ~cache ~callStack ~functionArgs ~functionTable\n                 ~madeProgressOn ~state:(State.init ~progress ())\n        in\n        let stateNone = values |> Values.getNone |> runOpt none in\n        let stateSome = values |> Values.getSome |> runOpt some in\n        State.seq stateAfterCall (State.nondet [stateSome; stateNone]))\n\n  let analyzeFunction ~cache ~functionTable ~loc functionName =\n    if !Common.Cli.debug then\n      Log_.log \"@[<v 2>@,@{<warning>Termination Analysis@} for @{<info>%s@}@]@.\"\n        functionName;\n    let pos = loc.Location.loc_start in\n    let callStack = CallStack.create () in\n    let functionArgs = FunctionArgs.empty in\n    let functionCall = FunctionCall.noArgs functionName in\n    callStack |> CallStack.addFunctionCall ~functionCall ~pos;\n    let functionDefinition =\n      functionTable |> FunctionTable.getFunctionDefinition ~functionName\n    in\n    if functionDefinition.kind <> Kind.empty then\n      Stats.logHygieneParametric ~functionName ~loc\n    else\n      let body =\n        match functionDefinition.body with\n        | Some body -> body\n        | None -> assert false\n      in\n      let state =\n        body\n        |> run ~cache ~callStack ~functionArgs ~functionTable\n             ~madeProgressOn:FunctionCallSet.empty ~state:(State.init ())\n      in\n      cache |> updateCache ~functionCall ~loc ~state\nend\n\nlet progressFunctionsFromAttributes attributes =\n  let lidToString lid = lid |> Longident.flatten |> String.concat \".\" in\n  let isProgress = ( = ) \"progress\" in\n  if attributes |> Annotation.hasAttribute isProgress then\n    Some\n      (match attributes |> Annotation.getAttributePayload isProgress with\n      | None -> []\n      | Some (IdentPayload lid) -> [lidToString lid]\n      | Some (TuplePayload l) ->\n        l\n        |> List.filter_map (function\n             | Annotation.IdentPayload lid -> Some (lidToString lid)\n             | _ -> None)\n      | _ -> [])\n  else None\n\nlet traverseAst ~valueBindingsTable =\n  let super = Tast_mapper.default in\n  let value_bindings (self : Tast_mapper.mapper) (recFlag, valueBindings) =\n    (* Update the table of value bindings for variables *)\n    valueBindings\n    |> List.iter (fun (vb : Typedtree.value_binding) ->\n           match vb.vb_pat.pat_desc with\n           | Tpat_var (id, {loc = {loc_start = pos}}) ->\n             let callees = lazy (FindFunctionsCalled.findCallees vb.vb_expr) in\n             Hashtbl.replace valueBindingsTable (Ident.name id)\n               (pos, vb.vb_expr, callees)\n           | _ -> ());\n    let progressFunctions, functionsToAnalyze =\n      if recFlag = Asttypes.Nonrecursive then (StringSet.empty, [])\n      else\n        let progressFunctions0, functionsToAnalyze0 =\n          valueBindings\n          |> List.fold_left\n               (fun (progressFunctions, functionsToAnalyze)\n                    (valueBinding : Typedtree.value_binding) ->\n                 match\n                   progressFunctionsFromAttributes valueBinding.vb_attributes\n                 with\n                 | None -> (progressFunctions, functionsToAnalyze)\n                 | Some newProgressFunctions ->\n                   ( StringSet.union\n                       (StringSet.of_list newProgressFunctions)\n                       progressFunctions,\n                     match valueBinding.vb_pat.pat_desc with\n                     | Tpat_var (id, _) ->\n                       (Ident.name id, valueBinding.vb_expr.exp_loc)\n                       :: functionsToAnalyze\n                     | _ -> functionsToAnalyze ))\n               (StringSet.empty, [])\n        in\n        (progressFunctions0, functionsToAnalyze0 |> List.rev)\n    in\n    if functionsToAnalyze <> [] then (\n      let functionTable = FunctionTable.create () in\n      let isProgressFunction path =\n        StringSet.mem (Path.name path) progressFunctions\n      in\n      let recursiveFunctions =\n        List.fold_left\n          (fun defs (valueBinding : Typedtree.value_binding) ->\n            match valueBinding.vb_pat.pat_desc with\n            | Tpat_var (id, _) -> Ident.name id :: defs\n            | _ -> defs)\n          [] valueBindings\n        |> List.rev\n      in\n      let recursiveDefinitions =\n        recursiveFunctions\n        |> List.fold_left\n             (fun acc functionName ->\n               match Hashtbl.find_opt valueBindingsTable functionName with\n               | Some (_pos, e, _set) -> (functionName, e) :: acc\n               | None -> acc)\n             []\n        |> List.rev\n      in\n      recursiveDefinitions\n      |> List.iter (fun (functionName, _body) ->\n             functionTable |> FunctionTable.addFunction ~functionName);\n      recursiveDefinitions\n      |> List.iter (fun (_, body) ->\n             body\n             |> ExtendFunctionTable.run ~functionTable ~progressFunctions\n                  ~valueBindingsTable);\n      recursiveDefinitions\n      |> List.iter (fun (_, body) ->\n             body\n             |> CheckExpressionWellFormed.run ~functionTable ~valueBindingsTable);\n      functionTable\n      |> Hashtbl.iter\n           (fun\n             functionName\n             (functionDefinition : FunctionTable.functionDefinition)\n           ->\n             if functionDefinition.body = None then\n               match Hashtbl.find_opt valueBindingsTable functionName with\n               | None -> ()\n               | Some (_pos, body, _) ->\n                 functionTable\n                 |> FunctionTable.addBody\n                      ~body:\n                        (Some\n                           (body\n                           |> Compile.expression\n                                ~ctx:\n                                  {\n                                    currentFunctionName = functionName;\n                                    functionTable;\n                                    innerRecursiveFunctions = Hashtbl.create 1;\n                                    isProgressFunction;\n                                  }))\n                      ~functionName);\n      if !Common.Cli.debug then FunctionTable.dump functionTable;\n      let cache = Eval.createCache () in\n      functionsToAnalyze\n      |> List.iter (fun (functionName, loc) ->\n             functionName |> Eval.analyzeFunction ~cache ~functionTable ~loc);\n      Stats.newRecursiveFunctions ~numFunctions:(Hashtbl.length functionTable));\n    valueBindings\n    |> List.iter (fun valueBinding ->\n           super.value_binding self valueBinding |> ignore);\n    (recFlag, valueBindings)\n  in\n  {super with Tast_mapper.value_bindings}\n\nlet processStructure (structure : Typedtree.structure) =\n  Stats.newFile ();\n  let valueBindingsTable = Hashtbl.create 1 in\n  let traverseAst = traverseAst ~valueBindingsTable in\n  structure |> traverseAst.structure traverseAst |> ignore\n\nlet processCmt (cmt_infos : Cmt_format.cmt_infos) =\n  match cmt_infos.cmt_annots with\n  | Interface _ -> ()\n  | Implementation structure -> processStructure structure\n  | _ -> ()\n\nlet reportStats () = Stats.dump ~ppf:Format.std_formatter\n"
  },
  {
    "path": "analysis/reanalyze/src/Common.ml",
    "content": "let currentSrc = ref \"\"\nlet currentModule = ref \"\"\nlet currentModuleName = ref (\"\" |> Name.create)\nlet runConfig = RunConfig.runConfig\n\n(* Location printer: `filename:line: ' *)\nlet posToString (pos : Lexing.position) =\n  let file = pos.Lexing.pos_fname in\n  let line = pos.Lexing.pos_lnum in\n  let col = pos.Lexing.pos_cnum - pos.Lexing.pos_bol in\n  (file |> Filename.basename)\n  ^ \":\" ^ string_of_int line ^ \":\" ^ string_of_int col\n\nmodule Cli = struct\n  let debug = ref false\n  let ci = ref false\n\n  (** The command was a -cmt variant (e.g. -exception-cmt) *)\n  let cmtCommand = ref false\n\n  let experimental = ref false\n  let json = ref false\n  let write = ref false\n\n  (* names to be considered live values *)\n  let liveNames = ref ([] : string list)\n\n  (* paths of files where all values are considered live *)\n\n  let livePaths = ref ([] : string list)\n\n  (* paths of files to exclude from analysis *)\n  let excludePaths = ref ([] : string list)\nend\n\nmodule StringSet = Set.Make (String)\n\nmodule LocSet = Set.Make (struct\n  include Location\n\n  let compare = compare\nend)\n\nmodule FileSet = Set.Make (String)\n\nmodule FileHash = struct\n  include Hashtbl.Make (struct\n    type t = string\n\n    let hash (x : t) = Hashtbl.hash x\n    let equal (x : t) y = x = y\n  end)\nend\n\nmodule FileReferences = struct\n  (* references across files *)\n  let table = (FileHash.create 256 : FileSet.t FileHash.t)\n\n  let findSet table key =\n    try FileHash.find table key with Not_found -> FileSet.empty\n\n  let add (locFrom : Location.t) (locTo : Location.t) =\n    let key = locFrom.loc_start.pos_fname in\n    let set = findSet table key in\n    FileHash.replace table key (FileSet.add locTo.loc_start.pos_fname set)\n\n  let addFile fileName =\n    let set = findSet table fileName in\n    FileHash.replace table fileName set\n\n  let exists fileName = FileHash.mem table fileName\n\n  let find fileName =\n    match FileHash.find_opt table fileName with\n    | Some set -> set\n    | None -> FileSet.empty\n\n  let iter f = FileHash.iter f table\nend\n\nmodule Path = struct\n  type t = Name.t list\n\n  let toName (path : t) =\n    path |> List.rev_map Name.toString |> String.concat \".\" |> Name.create\n\n  let toString path = path |> toName |> Name.toString\n\n  let withoutHead path =\n    match\n      path |> List.rev_map (fun n -> n |> Name.toInterface |> Name.toString)\n    with\n    | _ :: tl -> tl |> String.concat \".\"\n    | [] -> \"\"\n\n  let onOkPath ~whenContainsApply ~f path =\n    match path |> Path.flatten with\n    | `Ok (id, mods) -> f (Ident.name id :: mods |> String.concat \".\")\n    | `Contains_apply -> whenContainsApply\n\n  let fromPathT path =\n    match path |> Path.flatten with\n    | `Ok (id, mods) -> Ident.name id :: mods |> List.rev_map Name.create\n    | `Contains_apply -> []\n\n  let moduleToImplementation path =\n    match path |> List.rev with\n    | moduleName :: rest ->\n      (moduleName |> Name.toImplementation) :: rest |> List.rev\n    | [] -> path\n\n  let moduleToInterface path =\n    match path |> List.rev with\n    | moduleName :: rest -> (moduleName |> Name.toInterface) :: rest |> List.rev\n    | [] -> path\n\n  let toModuleName ~isType path =\n    match path with\n    | _ :: tl when not isType -> tl |> toName\n    | _ :: _ :: tl when isType -> tl |> toName\n    | _ -> \"\" |> Name.create\n\n  let typeToInterface path =\n    match path with\n    | typeName :: rest -> (typeName |> Name.toInterface) :: rest\n    | [] -> path\nend\n\nmodule OptionalArgs = struct\n  type t = {\n    mutable count: int;\n    mutable unused: StringSet.t;\n    mutable alwaysUsed: StringSet.t;\n  }\n\n  let empty =\n    {unused = StringSet.empty; alwaysUsed = StringSet.empty; count = 0}\n\n  let fromList l =\n    {unused = StringSet.of_list l; alwaysUsed = StringSet.empty; count = 0}\n\n  let isEmpty x = StringSet.is_empty x.unused\n\n  let call ~argNames ~argNamesMaybe x =\n    let nameSet = argNames |> StringSet.of_list in\n    let nameSetMaybe = argNamesMaybe |> StringSet.of_list in\n    let nameSetAlways = StringSet.diff nameSet nameSetMaybe in\n    if x.count = 0 then x.alwaysUsed <- nameSetAlways\n    else x.alwaysUsed <- StringSet.inter nameSetAlways x.alwaysUsed;\n    argNames\n    |> List.iter (fun name -> x.unused <- StringSet.remove name x.unused);\n    x.count <- x.count + 1\n\n  let combine x y =\n    let unused = StringSet.inter x.unused y.unused in\n    x.unused <- unused;\n    y.unused <- unused;\n    let alwaysUsed = StringSet.inter x.alwaysUsed y.alwaysUsed in\n    x.alwaysUsed <- alwaysUsed;\n    y.alwaysUsed <- alwaysUsed\n\n  let iterUnused f x = StringSet.iter f x.unused\n  let iterAlwaysUsed f x = StringSet.iter (fun s -> f s x.count) x.alwaysUsed\nend\n\nmodule DeclKind = struct\n  type t =\n    | Exception\n    | RecordLabel\n    | VariantCase\n    | Value of {\n        isToplevel: bool;\n        mutable optionalArgs: OptionalArgs.t;\n        sideEffects: bool;\n      }\n\n  let isType dk =\n    match dk with\n    | RecordLabel | VariantCase -> true\n    | Exception | Value _ -> false\n\n  let toString dk =\n    match dk with\n    | Exception -> \"Exception\"\n    | RecordLabel -> \"RecordLabel\"\n    | VariantCase -> \"VariantCase\"\n    | Value _ -> \"Value\"\nend\n\ntype posAdjustment = FirstVariant | OtherVariant | Nothing\n\ntype decl = {\n  declKind: DeclKind.t;\n  moduleLoc: Location.t;\n  posAdjustment: posAdjustment;\n  path: Path.t;\n  pos: Lexing.position;\n  posEnd: Lexing.position;\n  posStart: Lexing.position;\n  mutable resolvedDead: bool option;\n  mutable report: bool;\n}\n\ntype line = {mutable declarations: decl list; original: string}\n\nmodule ExnSet = Set.Make (Exn)\n\ntype missingRaiseInfo = {\n  exnName: string;\n  exnTable: (Exn.t, LocSet.t) Hashtbl.t;\n  locFull: Location.t;\n  missingAnnotations: ExnSet.t;\n  raiseSet: ExnSet.t;\n}\n\ntype severity = Warning | Error\ntype deadOptional = WarningUnusedArgument | WarningRedundantOptionalArgument\n\ntype termination =\n  | ErrorHygiene\n  | ErrorNotImplemented\n  | ErrorTermination\n  | TerminationAnalysisInternal\n\ntype deadWarning =\n  | WarningDeadException\n  | WarningDeadType\n  | WarningDeadValue\n  | WarningDeadValueWithSideEffects\n  | IncorrectDeadAnnotation\n\ntype lineAnnotation = (decl * line) option\n\ntype description =\n  | Circular of {message: string}\n  | ExceptionAnalysis of {message: string}\n  | ExceptionAnalysisMissing of missingRaiseInfo\n  | DeadModule of {message: string}\n  | DeadOptional of {deadOptional: deadOptional; message: string}\n  | DeadWarning of {\n      deadWarning: deadWarning;\n      path: string;\n      message: string;\n      shouldWriteLineAnnotation: bool;\n      lineAnnotation: lineAnnotation;\n    }\n  | Termination of {termination: termination; message: string}\n\ntype issue = {\n  name: string;\n  severity: severity;\n  loc: Location.t;\n  description: description;\n}\n"
  },
  {
    "path": "analysis/reanalyze/src/DeadCode.ml",
    "content": "open DeadCommon\n\nlet processSignature ~doValues ~doTypes (signature : Types.signature) =\n  signature\n  |> List.iter (fun sig_item ->\n         DeadValue.processSignatureItem ~doValues ~doTypes\n           ~moduleLoc:Location.none\n           ~path:[!Common.currentModuleName]\n           sig_item)\n\nlet processCmt ~cmtFilePath (cmt_infos : Cmt_format.cmt_infos) =\n  (match cmt_infos.cmt_annots with\n  | Interface signature ->\n    ProcessDeadAnnotations.signature signature;\n    processSignature ~doValues:true ~doTypes:true signature.sig_type\n  | Implementation structure ->\n    let cmtiExists =\n      Sys.file_exists ((cmtFilePath |> Filename.remove_extension) ^ \".cmti\")\n    in\n    ProcessDeadAnnotations.structure ~doGenType:(not cmtiExists) structure;\n    processSignature ~doValues:true ~doTypes:false structure.str_type;\n    let doExternals =\n      (* This is already handled at the interface level, avoid issues in inconsistent locations\n         https://github.com/BuckleScript/syntax/pull/54\n         Ideally, the handling should be less location-based, just like other language aspects. *)\n      false\n    in\n    DeadValue.processStructure ~doTypes:true ~doExternals\n      ~cmt_value_dependencies:cmt_infos.cmt_value_dependencies structure\n  | _ -> ());\n  DeadType.TypeDependencies.forceDelayedItems ();\n  DeadType.TypeDependencies.clear ()\n"
  },
  {
    "path": "analysis/reanalyze/src/DeadCommon.ml",
    "content": "(* Adapted from https://github.com/LexiFi/dead_code_analyzer *)\n\nopen Common\n\nmodule PosSet = Set.Make (struct\n  type t = Lexing.position\n\n  let compare = compare\nend)\n\nmodule Config = struct\n  (* Turn on type analysis *)\n  let analyzeTypes = ref true\n  let analyzeExternals = ref false\n  let reportUnderscore = false\n  let reportTypesDeadOnlyInInterface = false\n  let recursiveDebug = false\n  let warnOnCircularDependencies = false\nend\n\nmodule Current = struct\n  let bindings = ref PosSet.empty\n  let lastBinding = ref Location.none\n\n  (** max end position of a value reported dead *)\n  let maxValuePosEnd = ref Lexing.dummy_pos\nend\n\nlet rec checkSub s1 s2 n =\n  n <= 0\n  || (try s1.[n] = s2.[n] with Invalid_argument _ -> false)\n     && checkSub s1 s2 (n - 1)\n\nlet fileIsImplementationOf s1 s2 =\n  let n1 = String.length s1 and n2 = String.length s2 in\n  n2 = n1 + 1 && checkSub s1 s2 (n1 - 1)\n\nlet liveAnnotation = \"live\"\n\nmodule PosHash = struct\n  include Hashtbl.Make (struct\n    type t = Lexing.position\n\n    let hash x =\n      let s = Filename.basename x.Lexing.pos_fname in\n      Hashtbl.hash (x.Lexing.pos_cnum, s)\n\n    let equal (x : t) y = x = y\n  end)\n\n  let findSet h k = try find h k with Not_found -> PosSet.empty\n\n  let addSet h k v =\n    let set = findSet h k in\n    replace h k (PosSet.add v set)\nend\n\ntype decls = decl PosHash.t\n(** all exported declarations *)\n\nlet decls = (PosHash.create 256 : decls)\n\nmodule ValueReferences = struct\n  (** all value references *)\n  let table = (PosHash.create 256 : PosSet.t PosHash.t)\n\n  let add posTo posFrom = PosHash.addSet table posTo posFrom\n  let find pos = PosHash.findSet table pos\nend\n\nmodule TypeReferences = struct\n  (** all type references *)\n  let table = (PosHash.create 256 : PosSet.t PosHash.t)\n\n  let add posTo posFrom = PosHash.addSet table posTo posFrom\n  let find pos = PosHash.findSet table pos\nend\n\nlet declGetLoc decl =\n  let loc_start =\n    let offset =\n      WriteDeadAnnotations.offsetOfPosAdjustment decl.posAdjustment\n    in\n    let cnumWithOffset = decl.posStart.pos_cnum + offset in\n    if cnumWithOffset < decl.posEnd.pos_cnum then\n      {decl.posStart with pos_cnum = cnumWithOffset}\n    else decl.posStart\n  in\n  {Location.loc_start; loc_end = decl.posEnd; loc_ghost = false}\n\nlet addValueReference ~addFileReference ~(locFrom : Location.t)\n    ~(locTo : Location.t) =\n  let lastBinding = !Current.lastBinding in\n  let locFrom =\n    match lastBinding = Location.none with\n    | true -> locFrom\n    | false -> lastBinding\n  in\n  if not locFrom.loc_ghost then (\n    if !Cli.debug then\n      Log_.item \"addValueReference %s --> %s@.\"\n        (locFrom.loc_start |> posToString)\n        (locTo.loc_start |> posToString);\n    ValueReferences.add locTo.loc_start locFrom.loc_start;\n    if\n      addFileReference && (not locTo.loc_ghost) && (not locFrom.loc_ghost)\n      && locFrom.loc_start.pos_fname <> locTo.loc_start.pos_fname\n    then FileReferences.add locFrom locTo)\n\nlet iterFilesFromRootsToLeaves iterFun =\n  (* For each file, the number of incoming references *)\n  let inverseReferences = (Hashtbl.create 1 : (string, int) Hashtbl.t) in\n  (* For each number of incoming references, the files *)\n  let referencesByNumber = (Hashtbl.create 1 : (int, FileSet.t) Hashtbl.t) in\n  let getNum fileName =\n    try Hashtbl.find inverseReferences fileName with Not_found -> 0\n  in\n  let getSet num =\n    try Hashtbl.find referencesByNumber num with Not_found -> FileSet.empty\n  in\n  let addIncomingEdge fileName =\n    let oldNum = getNum fileName in\n    let newNum = oldNum + 1 in\n    let oldSetAtNum = getSet oldNum in\n    let newSetAtNum = FileSet.remove fileName oldSetAtNum in\n    let oldSetAtNewNum = getSet newNum in\n    let newSetAtNewNum = FileSet.add fileName oldSetAtNewNum in\n    Hashtbl.replace inverseReferences fileName newNum;\n    Hashtbl.replace referencesByNumber oldNum newSetAtNum;\n    Hashtbl.replace referencesByNumber newNum newSetAtNewNum\n  in\n  let removeIncomingEdge fileName =\n    let oldNum = getNum fileName in\n    let newNum = oldNum - 1 in\n    let oldSetAtNum = getSet oldNum in\n    let newSetAtNum = FileSet.remove fileName oldSetAtNum in\n    let oldSetAtNewNum = getSet newNum in\n    let newSetAtNewNum = FileSet.add fileName oldSetAtNewNum in\n    Hashtbl.replace inverseReferences fileName newNum;\n    Hashtbl.replace referencesByNumber oldNum newSetAtNum;\n    Hashtbl.replace referencesByNumber newNum newSetAtNewNum\n  in\n  let addEdge fromFile toFile =\n    if FileReferences.exists fromFile then addIncomingEdge toFile\n  in\n  let removeEdge fromFile toFile =\n    if FileReferences.exists fromFile then removeIncomingEdge toFile\n  in\n  FileReferences.iter (fun fromFile set ->\n      if getNum fromFile = 0 then\n        Hashtbl.replace referencesByNumber 0 (FileSet.add fromFile (getSet 0));\n      set |> FileSet.iter (fun toFile -> addEdge fromFile toFile));\n  while getSet 0 <> FileSet.empty do\n    let filesWithNoIncomingReferences = getSet 0 in\n    Hashtbl.remove referencesByNumber 0;\n    filesWithNoIncomingReferences\n    |> FileSet.iter (fun fileName ->\n           iterFun fileName;\n           let references = FileReferences.find fileName in\n           references |> FileSet.iter (fun toFile -> removeEdge fileName toFile))\n  done;\n  (* Process any remaining items in case of circular references *)\n  referencesByNumber\n  |> Hashtbl.iter (fun _num set ->\n         if FileSet.is_empty set then ()\n         else\n           set\n           |> FileSet.iter (fun fileName ->\n                  let pos = {Lexing.dummy_pos with pos_fname = fileName} in\n                  let loc =\n                    {Location.none with loc_start = pos; loc_end = pos}\n                  in\n                  if Config.warnOnCircularDependencies then\n                    Log_.warning ~loc\n                      (Circular\n                         {\n                           message =\n                             Format.asprintf\n                               \"Results for %s could be inaccurate because of \\\n                                circular references\"\n                               fileName;\n                         });\n                  iterFun fileName))\n\n(** Keep track of the location of values annotated @genType or @dead *)\nmodule ProcessDeadAnnotations = struct\n  type annotatedAs = GenType | Dead | Live\n\n  let positionsAnnotated = PosHash.create 1\n  let isAnnotatedDead pos = PosHash.find_opt positionsAnnotated pos = Some Dead\n\n  let isAnnotatedGenTypeOrLive pos =\n    match PosHash.find_opt positionsAnnotated pos with\n    | Some (Live | GenType) -> true\n    | Some Dead | None -> false\n\n  let isAnnotatedGenTypeOrDead pos =\n    match PosHash.find_opt positionsAnnotated pos with\n    | Some (Dead | GenType) -> true\n    | Some Live | None -> false\n\n  let annotateGenType (pos : Lexing.position) =\n    PosHash.replace positionsAnnotated pos GenType\n\n  let annotateDead (pos : Lexing.position) =\n    PosHash.replace positionsAnnotated pos Dead\n\n  let annotateLive (pos : Lexing.position) =\n    PosHash.replace positionsAnnotated pos Live\n\n  let processAttributes ~doGenType ~name ~pos attributes =\n    let getPayloadFun f = attributes |> Annotation.getAttributePayload f in\n    let getPayload (x : string) =\n      attributes |> Annotation.getAttributePayload (( = ) x)\n    in\n    if\n      doGenType\n      && getPayloadFun Annotation.tagIsOneOfTheGenTypeAnnotations <> None\n    then pos |> annotateGenType;\n    if getPayload WriteDeadAnnotations.deadAnnotation <> None then\n      pos |> annotateDead;\n    let nameIsInLiveNamesOrPaths () =\n      !Cli.liveNames |> List.mem name\n      ||\n      let fname =\n        match Filename.is_relative pos.pos_fname with\n        | true -> pos.pos_fname\n        | false -> Filename.concat (Sys.getcwd ()) pos.pos_fname\n      in\n      let fnameLen = String.length fname in\n      !Cli.livePaths\n      |> List.exists (fun prefix ->\n             String.length prefix <= fnameLen\n             &&\n             try String.sub fname 0 (String.length prefix) = prefix\n             with Invalid_argument _ -> false)\n    in\n    if getPayload liveAnnotation <> None || nameIsInLiveNamesOrPaths () then\n      pos |> annotateLive;\n    if attributes |> Annotation.isOcamlSuppressDeadWarning then\n      pos |> annotateLive\n\n  let collectExportLocations ~doGenType =\n    let super = Tast_mapper.default in\n    let currentlyDisableWarnings = ref false in\n    let value_binding self\n        ({vb_attributes; vb_pat} as value_binding : Typedtree.value_binding) =\n      (match vb_pat.pat_desc with\n      | Tpat_var (id, {loc = {loc_start = pos}})\n      | Tpat_alias ({pat_desc = Tpat_any}, id, {loc = {loc_start = pos}}) ->\n        if !currentlyDisableWarnings then pos |> annotateLive;\n        vb_attributes\n        |> processAttributes ~doGenType ~name:(id |> Ident.name) ~pos\n      | _ -> ());\n      super.value_binding self value_binding\n    in\n    let type_kind toplevelAttrs self (typeKind : Typedtree.type_kind) =\n      (match typeKind with\n      | Ttype_record labelDeclarations ->\n        labelDeclarations\n        |> List.iter\n             (fun ({ld_attributes; ld_loc} : Typedtree.label_declaration) ->\n               toplevelAttrs @ ld_attributes\n               |> processAttributes ~doGenType:false ~name:\"\"\n                    ~pos:ld_loc.loc_start)\n      | Ttype_variant constructorDeclarations ->\n        constructorDeclarations\n        |> List.iter\n             (fun\n               ({cd_attributes; cd_loc; cd_args} :\n                 Typedtree.constructor_declaration)\n             ->\n               let _process_inline_records =\n                 match cd_args with\n                 | Cstr_record flds ->\n                   List.iter\n                     (fun ({ld_attributes; ld_loc} :\n                            Typedtree.label_declaration) ->\n                       toplevelAttrs @ cd_attributes @ ld_attributes\n                       |> processAttributes ~doGenType:false ~name:\"\"\n                            ~pos:ld_loc.loc_start)\n                     flds\n                 | Cstr_tuple _ -> ()\n               in\n               toplevelAttrs @ cd_attributes\n               |> processAttributes ~doGenType:false ~name:\"\"\n                    ~pos:cd_loc.loc_start)\n      | _ -> ());\n      super.type_kind self typeKind\n    in\n    let type_declaration self (typeDeclaration : Typedtree.type_declaration) =\n      let attributes = typeDeclaration.typ_attributes in\n      let _ = type_kind attributes self typeDeclaration.typ_kind in\n      typeDeclaration\n    in\n    let value_description self\n        ({val_attributes; val_id; val_val = {val_loc = {loc_start = pos}}} as\n         value_description :\n          Typedtree.value_description) =\n      if !currentlyDisableWarnings then pos |> annotateLive;\n      val_attributes\n      |> processAttributes ~doGenType ~name:(val_id |> Ident.name) ~pos;\n      super.value_description self value_description\n    in\n    let structure_item self (item : Typedtree.structure_item) =\n      (match item.str_desc with\n      | Tstr_attribute attribute\n        when [attribute] |> Annotation.isOcamlSuppressDeadWarning ->\n        currentlyDisableWarnings := true\n      | _ -> ());\n      super.structure_item self item\n    in\n    let structure self (structure : Typedtree.structure) =\n      let oldDisableWarnings = !currentlyDisableWarnings in\n      super.structure self structure |> ignore;\n      currentlyDisableWarnings := oldDisableWarnings;\n      structure\n    in\n    let signature_item self (item : Typedtree.signature_item) =\n      (match item.sig_desc with\n      | Tsig_attribute attribute\n        when [attribute] |> Annotation.isOcamlSuppressDeadWarning ->\n        currentlyDisableWarnings := true\n      | _ -> ());\n      super.signature_item self item\n    in\n    let signature self (signature : Typedtree.signature) =\n      let oldDisableWarnings = !currentlyDisableWarnings in\n      super.signature self signature |> ignore;\n      currentlyDisableWarnings := oldDisableWarnings;\n      signature\n    in\n    {\n      super with\n      signature;\n      signature_item;\n      structure;\n      structure_item;\n      type_declaration;\n      value_binding;\n      value_description;\n    }\n\n  let structure ~doGenType structure =\n    let collectExportLocations = collectExportLocations ~doGenType in\n    structure\n    |> collectExportLocations.structure collectExportLocations\n    |> ignore\n\n  let signature signature =\n    let collectExportLocations = collectExportLocations ~doGenType:true in\n    signature\n    |> collectExportLocations.signature collectExportLocations\n    |> ignore\nend\n\nlet addDeclaration_ ?posEnd ?posStart ~declKind ~path ~(loc : Location.t)\n    ?(posAdjustment = Nothing) ~moduleLoc (name : Name.t) =\n  let pos = loc.loc_start in\n  let posStart =\n    match posStart with\n    | Some posStart -> posStart\n    | None -> pos\n  in\n  let posEnd =\n    match posEnd with\n    | Some posEnd -> posEnd\n    | None -> loc.loc_end\n  in\n  (* a .cmi file can contain locations from other files.\n     For instance:\n         module M : Set.S with type elt = int\n     will create value definitions whose location is in set.mli\n  *)\n  if\n    (not loc.loc_ghost)\n    && (!currentSrc = pos.pos_fname || !currentModule == \"*include*\")\n  then (\n    if !Cli.debug then\n      Log_.item \"add%sDeclaration %s %s path:%s@.\"\n        (declKind |> DeclKind.toString)\n        (name |> Name.toString) (pos |> posToString) (path |> Path.toString);\n    let decl =\n      {\n        declKind;\n        moduleLoc;\n        posAdjustment;\n        path = name :: path;\n        pos;\n        posEnd;\n        posStart;\n        resolvedDead = None;\n        report = true;\n      }\n    in\n    PosHash.replace decls pos decl)\n\nlet addValueDeclaration ?(isToplevel = true) ~(loc : Location.t) ~moduleLoc\n    ?(optionalArgs = OptionalArgs.empty) ~path ~sideEffects name =\n  name\n  |> addDeclaration_\n       ~declKind:(Value {isToplevel; optionalArgs; sideEffects})\n       ~loc ~moduleLoc ~path\n\nlet emitWarning ~decl ~message deadWarning =\n  let loc = decl |> declGetLoc in\n  let isToplevelValueWithSideEffects decl =\n    match decl.declKind with\n    | Value {isToplevel; sideEffects} -> isToplevel && sideEffects\n    | _ -> false\n  in\n  let shouldWriteLineAnnotation =\n    (not (isToplevelValueWithSideEffects decl))\n    && Suppress.filter decl.pos\n    && deadWarning <> IncorrectDeadAnnotation\n  in\n  let lineAnnotation =\n    if shouldWriteLineAnnotation then\n      WriteDeadAnnotations.addLineAnnotation ~decl\n    else None\n  in\n  decl.path\n  |> Path.toModuleName ~isType:(decl.declKind |> DeclKind.isType)\n  |> DeadModules.checkModuleDead ~fileName:decl.pos.pos_fname;\n  Log_.warning ~loc\n    (DeadWarning\n       {\n         deadWarning;\n         path = Path.withoutHead decl.path;\n         message;\n         lineAnnotation;\n         shouldWriteLineAnnotation;\n       })\n\nmodule Decl = struct\n  let isValue decl =\n    match decl.declKind with\n    | Value _ (* | Exception *) -> true\n    | _ -> false\n\n  let isToplevelValueWithSideEffects decl =\n    match decl.declKind with\n    | Value {isToplevel; sideEffects} -> isToplevel && sideEffects\n    | _ -> false\n\n  let compareUsingDependencies ~orderedFiles\n      {\n        declKind = kind1;\n        path = _path1;\n        pos =\n          {\n            pos_fname = fname1;\n            pos_lnum = lnum1;\n            pos_bol = bol1;\n            pos_cnum = cnum1;\n          };\n      }\n      {\n        declKind = kind2;\n        path = _path2;\n        pos =\n          {\n            pos_fname = fname2;\n            pos_lnum = lnum2;\n            pos_bol = bol2;\n            pos_cnum = cnum2;\n          };\n      } =\n    let findPosition fn = Hashtbl.find orderedFiles fn [@@raises Not_found] in\n    (* From the root of the file dependency DAG to the leaves.\n       From the bottom of the file to the top. *)\n    let position1, position2 =\n      try (fname1 |> findPosition, fname2 |> findPosition)\n      with Not_found -> (0, 0)\n    in\n    compare\n      (position1, lnum2, bol2, cnum2, kind1)\n      (position2, lnum1, bol1, cnum1, kind2)\n\n  let compareForReporting\n      {\n        declKind = kind1;\n        pos =\n          {\n            pos_fname = fname1;\n            pos_lnum = lnum1;\n            pos_bol = bol1;\n            pos_cnum = cnum1;\n          };\n      }\n      {\n        declKind = kind2;\n        pos =\n          {\n            pos_fname = fname2;\n            pos_lnum = lnum2;\n            pos_bol = bol2;\n            pos_cnum = cnum2;\n          };\n      } =\n    compare\n      (fname1, lnum1, bol1, cnum1, kind1)\n      (fname2, lnum2, bol2, cnum2, kind2)\n\n  let isInsideReportedValue decl =\n    let fileHasChanged =\n      !Current.maxValuePosEnd.pos_fname <> decl.pos.pos_fname\n    in\n    let insideReportedValue =\n      decl |> isValue && (not fileHasChanged)\n      && !Current.maxValuePosEnd.pos_cnum > decl.pos.pos_cnum\n    in\n    if not insideReportedValue then\n      if decl |> isValue then\n        if\n          fileHasChanged\n          || decl.posEnd.pos_cnum > !Current.maxValuePosEnd.pos_cnum\n        then Current.maxValuePosEnd := decl.posEnd;\n    insideReportedValue\n\n  let report decl =\n    let insideReportedValue = decl |> isInsideReportedValue in\n    if decl.report then\n      let name, message =\n        match decl.declKind with\n        | Exception ->\n          (WarningDeadException, \"is never raised or passed as value\")\n        | Value {sideEffects} -> (\n          let noSideEffectsOrUnderscore =\n            (not sideEffects)\n            ||\n            match decl.path with\n            | hd :: _ -> hd |> Name.startsWithUnderscore\n            | [] -> false\n          in\n          ( (match not noSideEffectsOrUnderscore with\n            | true -> WarningDeadValueWithSideEffects\n            | false -> WarningDeadValue),\n            match decl.path with\n            | name :: _ when name |> Name.isUnderscore ->\n              \"has no side effects and can be removed\"\n            | _ -> (\n              \"is never used\"\n              ^\n              match not noSideEffectsOrUnderscore with\n              | true -> \" and could have side effects\"\n              | false -> \"\") ))\n        | RecordLabel ->\n          (WarningDeadType, \"is a record label never used to read a value\")\n        | VariantCase ->\n          (WarningDeadType, \"is a variant case which is never constructed\")\n      in\n      let hasRefBelow () =\n        let refs = ValueReferences.find decl.pos in\n        let refIsBelow (pos : Lexing.position) =\n          decl.pos.pos_fname <> pos.pos_fname\n          || decl.pos.pos_cnum < pos.pos_cnum\n             &&\n             (* not a function defined inside a function, e.g. not a callback *)\n             decl.posEnd.pos_cnum < pos.pos_cnum\n        in\n        refs |> PosSet.exists refIsBelow\n      in\n      let shouldEmitWarning =\n        (not insideReportedValue)\n        && (match decl.path with\n           | name :: _ when name |> Name.isUnderscore -> Config.reportUnderscore\n           | _ -> true)\n        && (runConfig.transitive || not (hasRefBelow ()))\n      in\n      if shouldEmitWarning then (\n        decl.path\n        |> Path.toModuleName ~isType:(decl.declKind |> DeclKind.isType)\n        |> DeadModules.checkModuleDead ~fileName:decl.pos.pos_fname;\n        emitWarning ~decl ~message name)\nend\n\nlet declIsDead ~refs decl =\n  let liveRefs =\n    refs\n    |> PosSet.filter (fun p -> not (ProcessDeadAnnotations.isAnnotatedDead p))\n  in\n  liveRefs |> PosSet.cardinal = 0\n  && not (ProcessDeadAnnotations.isAnnotatedGenTypeOrLive decl.pos)\n\nlet doReportDead pos = not (ProcessDeadAnnotations.isAnnotatedGenTypeOrDead pos)\n\nlet rec resolveRecursiveRefs ~checkOptionalArg ~deadDeclarations ~level\n    ~orderedFiles ~refs ~refsBeingResolved decl : bool =\n  match decl.pos with\n  | _ when decl.resolvedDead <> None ->\n    if Config.recursiveDebug then\n      Log_.item \"recursiveDebug %s [%d] already resolved@.\"\n        (decl.path |> Path.toString)\n        level;\n    decl.pos |> ProcessDeadAnnotations.isAnnotatedDead\n  | _ when PosSet.mem decl.pos !refsBeingResolved ->\n    if Config.recursiveDebug then\n      Log_.item \"recursiveDebug %s [%d] is being resolved: assume dead@.\"\n        (decl.path |> Path.toString)\n        level;\n    true\n  | _ ->\n    if Config.recursiveDebug then\n      Log_.item \"recursiveDebug resolving %s [%d]@.\"\n        (decl.path |> Path.toString)\n        level;\n    refsBeingResolved := PosSet.add decl.pos !refsBeingResolved;\n    let allDepsResolved = ref true in\n    let newRefs =\n      refs\n      |> PosSet.filter (fun pos ->\n             if pos = decl.pos then (\n               if Config.recursiveDebug then\n                 Log_.item \"recursiveDebug %s ignoring reference to self@.\"\n                   (decl.path |> Path.toString);\n               false)\n             else\n               match PosHash.find_opt decls pos with\n               | None ->\n                 if Config.recursiveDebug then\n                   Log_.item \"recursiveDebug can't find decl for %s@.\"\n                     (pos |> posToString);\n                 true\n               | Some xDecl ->\n                 let xRefs =\n                   match xDecl.declKind |> DeclKind.isType with\n                   | true -> TypeReferences.find pos\n                   | false -> ValueReferences.find pos\n                 in\n                 let xDeclIsDead =\n                   xDecl\n                   |> resolveRecursiveRefs ~checkOptionalArg ~deadDeclarations\n                        ~level:(level + 1) ~orderedFiles ~refs:xRefs\n                        ~refsBeingResolved\n                 in\n                 if xDecl.resolvedDead = None then allDepsResolved := false;\n                 not xDeclIsDead)\n    in\n    let isDead = decl |> declIsDead ~refs:newRefs in\n    let isResolved = (not isDead) || !allDepsResolved || level = 0 in\n    if isResolved then (\n      decl.resolvedDead <- Some isDead;\n      if isDead then (\n        decl.path\n        |> DeadModules.markDead\n             ~isType:(decl.declKind |> DeclKind.isType)\n             ~loc:decl.moduleLoc;\n        if not (decl.pos |> doReportDead) then decl.report <- false;\n        deadDeclarations := decl :: !deadDeclarations;\n        if not (Decl.isToplevelValueWithSideEffects decl) then\n          decl.pos |> ProcessDeadAnnotations.annotateDead)\n      else (\n        checkOptionalArg decl;\n        decl.path\n        |> DeadModules.markLive\n             ~isType:(decl.declKind |> DeclKind.isType)\n             ~loc:decl.moduleLoc;\n        if decl.pos |> ProcessDeadAnnotations.isAnnotatedDead then\n          emitWarning ~decl ~message:\" is annotated @dead but is live\"\n            IncorrectDeadAnnotation);\n      if !Cli.debug then\n        let refsString =\n          newRefs |> PosSet.elements |> List.map posToString\n          |> String.concat \", \"\n        in\n        Log_.item \"%s %s %s: %d references (%s) [%d]@.\"\n          (match isDead with\n          | true -> \"Dead\"\n          | false -> \"Live\")\n          (decl.declKind |> DeclKind.toString)\n          (decl.path |> Path.toString)\n          (newRefs |> PosSet.cardinal)\n          refsString level);\n    isDead\n\nlet reportDead ~checkOptionalArg =\n  let iterDeclInOrder ~deadDeclarations ~orderedFiles decl =\n    let refs =\n      match decl |> Decl.isValue with\n      | true -> ValueReferences.find decl.pos\n      | false -> TypeReferences.find decl.pos\n    in\n    resolveRecursiveRefs ~checkOptionalArg ~deadDeclarations ~level:0\n      ~orderedFiles ~refsBeingResolved:(ref PosSet.empty) ~refs decl\n    |> ignore\n  in\n  if !Cli.debug then (\n    Log_.item \"@.File References@.@.\";\n    let fileList = ref [] in\n    FileReferences.iter (fun file files ->\n        fileList := (file, files) :: !fileList);\n    !fileList\n    |> List.sort (fun (f1, _) (f2, _) -> String.compare f1 f2)\n    |> List.iter (fun (file, files) ->\n           Log_.item \"%s -->> %s@.\"\n             (file |> Filename.basename)\n             (files |> FileSet.elements |> List.map Filename.basename\n            |> String.concat \", \")));\n  let declarations =\n    PosHash.fold (fun _pos decl declarations -> decl :: declarations) decls []\n  in\n  let orderedFiles = Hashtbl.create 256 in\n  iterFilesFromRootsToLeaves\n    (let current = ref 0 in\n     fun fileName ->\n       incr current;\n       Hashtbl.add orderedFiles fileName !current);\n  let orderedDeclarations =\n    (* analyze in reverse order *)\n    declarations |> List.fast_sort (Decl.compareUsingDependencies ~orderedFiles)\n  in\n  let deadDeclarations = ref [] in\n  orderedDeclarations\n  |> List.iter (iterDeclInOrder ~orderedFiles ~deadDeclarations);\n  let sortedDeadDeclarations =\n    !deadDeclarations |> List.fast_sort Decl.compareForReporting\n  in\n  (* XXX *)\n  sortedDeadDeclarations |> List.iter Decl.report\n"
  },
  {
    "path": "analysis/reanalyze/src/DeadException.ml",
    "content": "open DeadCommon\nopen Common\n\ntype item = {exceptionPath: Path.t; locFrom: Location.t}\n\nlet delayedItems = ref []\nlet declarations = Hashtbl.create 1\n\nlet add ~path ~loc ~(strLoc : Location.t) name =\n  let exceptionPath = name :: path in\n  Hashtbl.add declarations exceptionPath loc;\n  name\n  |> addDeclaration_ ~posEnd:strLoc.loc_end ~posStart:strLoc.loc_start\n       ~declKind:Exception ~moduleLoc:(ModulePath.getCurrent ()).loc ~path ~loc\n\nlet forceDelayedItems () =\n  let items = !delayedItems |> List.rev in\n  delayedItems := [];\n  items\n  |> List.iter (fun {exceptionPath; locFrom} ->\n         match Hashtbl.find_opt declarations exceptionPath with\n         | None -> ()\n         | Some locTo ->\n           addValueReference ~addFileReference:true ~locFrom ~locTo)\n\nlet markAsUsed ~(locFrom : Location.t) ~(locTo : Location.t) path_ =\n  if locTo.loc_ghost then\n    (* Probably defined in another file, delay processing and check at the end *)\n    let exceptionPath =\n      path_ |> Path.fromPathT |> Path.moduleToImplementation\n    in\n    delayedItems := {exceptionPath; locFrom} :: !delayedItems\n  else addValueReference ~addFileReference:true ~locFrom ~locTo\n"
  },
  {
    "path": "analysis/reanalyze/src/DeadModules.ml",
    "content": "let active () =\n  (* When transitive reporting is off, the only dead modules would be empty modules *)\n  RunConfig.runConfig.transitive\n\nlet table = Hashtbl.create 1\n\nlet markDead ~isType ~loc path =\n  if active () then\n    let moduleName = path |> Common.Path.toModuleName ~isType in\n    match Hashtbl.find_opt table moduleName with\n    | Some _ -> ()\n    | _ -> Hashtbl.replace table moduleName (false, loc)\n\nlet markLive ~isType ~(loc : Location.t) path =\n  if active () then\n    let moduleName = path |> Common.Path.toModuleName ~isType in\n    match Hashtbl.find_opt table moduleName with\n    | None -> Hashtbl.replace table moduleName (true, loc)\n    | Some (false, loc) -> Hashtbl.replace table moduleName (true, loc)\n    | Some (true, _) -> ()\n\nlet checkModuleDead ~fileName:pos_fname moduleName =\n  if active () then\n    match Hashtbl.find_opt table moduleName with\n    | Some (false, loc) ->\n      Hashtbl.remove table moduleName;\n      (* only report once *)\n      let loc =\n        if loc.loc_ghost then\n          let pos =\n            {Lexing.pos_fname; pos_lnum = 0; pos_bol = 0; pos_cnum = 0}\n          in\n          {Location.loc_start = pos; loc_end = pos; loc_ghost = false}\n        else loc\n      in\n      Log_.warning ~loc\n        (Common.DeadModule\n           {\n             message =\n               Format.asprintf \"@{<info>%s@} %s\"\n                 (moduleName |> Name.toInterface |> Name.toString)\n                 \"is a dead module as all its items are dead.\";\n           })\n    | _ -> ()\n"
  },
  {
    "path": "analysis/reanalyze/src/DeadOptionalArgs.ml",
    "content": "open DeadCommon\nopen Common\n\nlet active () = true\n\ntype item = {\n  posTo: Lexing.position;\n  argNames: string list;\n  argNamesMaybe: string list;\n}\n\nlet delayedItems = (ref [] : item list ref)\nlet functionReferences = (ref [] : (Lexing.position * Lexing.position) list ref)\n\nlet addFunctionReference ~(locFrom : Location.t) ~(locTo : Location.t) =\n  if active () then\n    let posTo = locTo.loc_start in\n    let posFrom = locFrom.loc_start in\n    let shouldAdd =\n      match PosHash.find_opt decls posTo with\n      | Some {declKind = Value {optionalArgs}} ->\n        not (OptionalArgs.isEmpty optionalArgs)\n      | _ -> false\n    in\n    if shouldAdd then (\n      if !Common.Cli.debug then\n        Log_.item \"OptionalArgs.addFunctionReference %s %s@.\"\n          (posFrom |> posToString) (posTo |> posToString);\n      functionReferences := (posFrom, posTo) :: !functionReferences)\n\nlet rec hasOptionalArgs (texpr : Types.type_expr) =\n  match texpr.desc with\n  | _ when not (active ()) -> false\n  | Tarrow (Optional _, _tFrom, _tTo, _) -> true\n  | Tarrow (_, _tFrom, tTo, _) -> hasOptionalArgs tTo\n  | Tlink t -> hasOptionalArgs t\n  | Tsubst t -> hasOptionalArgs t\n  | _ -> false\n\nlet rec fromTypeExpr (texpr : Types.type_expr) =\n  match texpr.desc with\n  | _ when not (active ()) -> []\n  | Tarrow (Optional s, _tFrom, tTo, _) -> s :: fromTypeExpr tTo\n  | Tarrow (_, _tFrom, tTo, _) -> fromTypeExpr tTo\n  | Tlink t -> fromTypeExpr t\n  | Tsubst t -> fromTypeExpr t\n  | _ -> []\n\nlet addReferences ~(locFrom : Location.t) ~(locTo : Location.t) ~path\n    (argNames, argNamesMaybe) =\n  if active () then (\n    let posTo = locTo.loc_start in\n    let posFrom = locFrom.loc_start in\n    delayedItems := {posTo; argNames; argNamesMaybe} :: !delayedItems;\n    if !Common.Cli.debug then\n      Log_.item\n        \"DeadOptionalArgs.addReferences %s called with optional argNames:%s \\\n         argNamesMaybe:%s %s@.\"\n        (path |> Path.fromPathT |> Path.toString)\n        (argNames |> String.concat \", \")\n        (argNamesMaybe |> String.concat \", \")\n        (posFrom |> posToString))\n\nlet forceDelayedItems () =\n  let items = !delayedItems |> List.rev in\n  delayedItems := [];\n  items\n  |> List.iter (fun {posTo; argNames; argNamesMaybe} ->\n         match PosHash.find_opt decls posTo with\n         | Some {declKind = Value r} ->\n           r.optionalArgs |> OptionalArgs.call ~argNames ~argNamesMaybe\n         | _ -> ());\n  let fRefs = !functionReferences |> List.rev in\n  functionReferences := [];\n  fRefs\n  |> List.iter (fun (posFrom, posTo) ->\n         match\n           (PosHash.find_opt decls posFrom, PosHash.find_opt decls posTo)\n         with\n         | Some {declKind = Value rFrom}, Some {declKind = Value rTo} ->\n           OptionalArgs.combine rFrom.optionalArgs rTo.optionalArgs\n         | _ -> ())\n\nlet check decl =\n  match decl with\n  | {declKind = Value {optionalArgs}}\n    when active ()\n         && not (ProcessDeadAnnotations.isAnnotatedGenTypeOrLive decl.pos) ->\n    optionalArgs\n    |> OptionalArgs.iterUnused (fun s ->\n           Log_.warning ~loc:(decl |> declGetLoc)\n             (DeadOptional\n                {\n                  deadOptional = WarningUnusedArgument;\n                  message =\n                    Format.asprintf\n                      \"optional argument @{<info>%s@} of function @{<info>%s@} \\\n                       is never used\"\n                      s\n                      (decl.path |> Path.withoutHead);\n                }));\n    optionalArgs\n    |> OptionalArgs.iterAlwaysUsed (fun s nCalls ->\n           Log_.warning ~loc:(decl |> declGetLoc)\n             (DeadOptional\n                {\n                  deadOptional = WarningRedundantOptionalArgument;\n                  message =\n                    Format.asprintf\n                      \"optional argument @{<info>%s@} of function @{<info>%s@} \\\n                       is always supplied (%d calls)\"\n                      s\n                      (decl.path |> Path.withoutHead)\n                      nCalls;\n                }))\n  | _ -> ()\n"
  },
  {
    "path": "analysis/reanalyze/src/DeadType.ml",
    "content": "(* Adapted from https://github.com/LexiFi/dead_code_analyzer *)\n\nopen Common\nopen DeadCommon\n\nmodule TypeLabels = struct\n  (* map from type path (for record/variant label) to its location *)\n\n  let table = (Hashtbl.create 256 : (Path.t, Location.t) Hashtbl.t)\n  let add path loc = Hashtbl.replace table path loc\n  let find path = Hashtbl.find_opt table path\nend\n\nlet addTypeReference ~posFrom ~posTo =\n  if !Common.Cli.debug then\n    Log_.item \"addTypeReference %s --> %s@.\" (posFrom |> posToString)\n      (posTo |> posToString);\n  TypeReferences.add posTo posFrom\n\nmodule TypeDependencies = struct\n  let delayedItems = ref []\n  let add loc1 loc2 = delayedItems := (loc1, loc2) :: !delayedItems\n  let clear () = delayedItems := []\n\n  let processTypeDependency\n      ( ({loc_start = posTo; loc_ghost = ghost1} : Location.t),\n        ({loc_start = posFrom; loc_ghost = ghost2} : Location.t) ) =\n    if (not ghost1) && (not ghost2) && posTo <> posFrom then\n      addTypeReference ~posTo ~posFrom\n\n  let forceDelayedItems () = List.iter processTypeDependency !delayedItems\nend\n\nlet extendTypeDependencies (loc1 : Location.t) (loc2 : Location.t) =\n  if loc1.loc_start <> loc2.loc_start then (\n    if !Common.Cli.debug then\n      Log_.item \"extendTypeDependencies %s --> %s@.\"\n        (loc1.loc_start |> posToString)\n        (loc2.loc_start |> posToString);\n    TypeDependencies.add loc1 loc2)\n\n(* Type dependencies between Foo.re and Foo.rei *)\nlet addTypeDependenciesAcrossFiles ~pathToType ~loc ~typeLabelName =\n  let isInterface = Filename.check_suffix !Common.currentSrc \"i\" in\n  if not isInterface then (\n    let path_1 = pathToType |> Path.moduleToInterface in\n    let path_2 = path_1 |> Path.typeToInterface in\n    let path1 = typeLabelName :: path_1 in\n    let path2 = typeLabelName :: path_2 in\n    match TypeLabels.find path1 with\n    | None -> (\n      match TypeLabels.find path2 with\n      | None -> ()\n      | Some loc2 ->\n        extendTypeDependencies loc loc2;\n        if not Config.reportTypesDeadOnlyInInterface then\n          extendTypeDependencies loc2 loc)\n    | Some loc1 ->\n      extendTypeDependencies loc loc1;\n      if not Config.reportTypesDeadOnlyInInterface then\n        extendTypeDependencies loc1 loc)\n  else\n    let path_1 = pathToType |> Path.moduleToImplementation in\n    let path1 = typeLabelName :: path_1 in\n    match TypeLabels.find path1 with\n    | None -> ()\n    | Some loc1 ->\n      extendTypeDependencies loc1 loc;\n      if not Config.reportTypesDeadOnlyInInterface then\n        extendTypeDependencies loc loc1\n\n(* Add type dependencies between implementation and interface in inner module *)\nlet addTypeDependenciesInnerModule ~pathToType ~loc ~typeLabelName =\n  let path = typeLabelName :: pathToType in\n  match TypeLabels.find path with\n  | Some loc2 ->\n    extendTypeDependencies loc loc2;\n    if not Config.reportTypesDeadOnlyInInterface then\n      extendTypeDependencies loc2 loc\n  | None -> TypeLabels.add path loc\n\nlet addDeclaration ~(typeId : Ident.t) ~(typeKind : Types.type_kind) =\n  let currentModulePath = ModulePath.getCurrent () in\n  let pathToType =\n    (typeId |> Ident.name |> Name.create)\n    :: (currentModulePath.path @ [!Common.currentModuleName])\n  in\n  let processTypeLabel ?(posAdjustment = Nothing) typeLabelName ~declKind\n      ~(loc : Location.t) =\n    addDeclaration_ ~declKind ~path:pathToType ~loc\n      ~moduleLoc:currentModulePath.loc ~posAdjustment typeLabelName;\n    addTypeDependenciesAcrossFiles ~pathToType ~loc ~typeLabelName;\n    addTypeDependenciesInnerModule ~pathToType ~loc ~typeLabelName;\n    TypeLabels.add (typeLabelName :: pathToType) loc\n  in\n  match typeKind with\n  | Type_record (l, _) ->\n    List.iter\n      (fun {Types.ld_id; ld_loc} ->\n        Ident.name ld_id |> Name.create\n        |> processTypeLabel ~declKind:RecordLabel ~loc:ld_loc)\n      l\n  | Type_variant decls ->\n    List.iteri\n      (fun i {Types.cd_id; cd_loc; cd_args} ->\n        let _handle_inline_records =\n          match cd_args with\n          | Cstr_record lbls ->\n            List.iter\n              (fun {Types.ld_id; ld_loc} ->\n                Ident.name cd_id ^ \".\" ^ Ident.name ld_id\n                |> Name.create\n                |> processTypeLabel ~declKind:RecordLabel ~loc:ld_loc)\n              lbls\n          | Cstr_tuple _ -> ()\n        in\n        let posAdjustment =\n          (* In Res the variant loc can include the | and spaces after it *)\n          if WriteDeadAnnotations.posLanguage cd_loc.loc_start = Res then\n            if i = 0 then FirstVariant else OtherVariant\n          else Nothing\n        in\n        Ident.name cd_id |> Name.create\n        |> processTypeLabel ~declKind:VariantCase ~loc:cd_loc ~posAdjustment)\n      decls\n  | _ -> ()\n"
  },
  {
    "path": "analysis/reanalyze/src/DeadValue.ml",
    "content": "(* Adapted from https://github.com/LexiFi/dead_code_analyzer *)\n\nopen DeadCommon\n\nlet checkAnyValueBindingWithNoSideEffects\n    ({vb_pat = {pat_desc}; vb_expr = expr; vb_loc = loc} :\n      Typedtree.value_binding) =\n  match pat_desc with\n  | Tpat_any when (not (SideEffects.checkExpr expr)) && not loc.loc_ghost ->\n    let name = \"_\" |> Name.create ~isInterface:false in\n    let currentModulePath = ModulePath.getCurrent () in\n    let path = currentModulePath.path @ [!Common.currentModuleName] in\n    name\n    |> addValueDeclaration ~path ~loc ~moduleLoc:currentModulePath.loc\n         ~sideEffects:false\n  | _ -> ()\n\nlet collectValueBinding super self (vb : Typedtree.value_binding) =\n  let oldCurrentBindings = !Current.bindings in\n  let oldLastBinding = !Current.lastBinding in\n  checkAnyValueBindingWithNoSideEffects vb;\n  let loc =\n    match vb.vb_pat.pat_desc with\n    | Tpat_var (id, {loc = {loc_start; loc_ghost} as loc})\n    | Tpat_alias\n        ({pat_desc = Tpat_any}, id, {loc = {loc_start; loc_ghost} as loc})\n      when (not loc_ghost) && not vb.vb_loc.loc_ghost ->\n      let name = Ident.name id |> Name.create ~isInterface:false in\n      let optionalArgs =\n        vb.vb_expr.exp_type |> DeadOptionalArgs.fromTypeExpr\n        |> Common.OptionalArgs.fromList\n      in\n      let exists =\n        match PosHash.find_opt decls loc_start with\n        | Some {declKind = Value r} ->\n          r.optionalArgs <- optionalArgs;\n          true\n        | _ -> false\n      in\n      let currentModulePath = ModulePath.getCurrent () in\n      let path = currentModulePath.path @ [!Common.currentModuleName] in\n      let isFirstClassModule =\n        match vb.vb_expr.exp_type.desc with\n        | Tpackage _ -> true\n        | _ -> false\n      in\n      (if (not exists) && not isFirstClassModule then\n         (* This is never toplevel currently *)\n         let isToplevel = oldLastBinding = Location.none in\n         let sideEffects = SideEffects.checkExpr vb.vb_expr in\n         name\n         |> addValueDeclaration ~isToplevel ~loc\n              ~moduleLoc:currentModulePath.loc ~optionalArgs ~path ~sideEffects);\n      (match PosHash.find_opt decls loc_start with\n      | None -> ()\n      | Some decl ->\n        (* Value bindings contain the correct location for the entire declaration: update final position.\n           The previous value was taken from the signature, which only has positions for the id. *)\n        let declKind =\n          match decl.declKind with\n          | Value vk ->\n            Common.DeclKind.Value\n              {vk with sideEffects = SideEffects.checkExpr vb.vb_expr}\n          | dk -> dk\n        in\n        PosHash.replace decls loc_start\n          {\n            decl with\n            declKind;\n            posEnd = vb.vb_loc.loc_end;\n            posStart = vb.vb_loc.loc_start;\n          });\n      loc\n    | _ -> !Current.lastBinding\n  in\n  Current.bindings := PosSet.add loc.loc_start !Current.bindings;\n  Current.lastBinding := loc;\n  let r = super.Tast_mapper.value_binding self vb in\n  Current.bindings := oldCurrentBindings;\n  Current.lastBinding := oldLastBinding;\n  r\n\nlet processOptionalArgs ~expType ~(locFrom : Location.t) ~locTo ~path args =\n  if expType |> DeadOptionalArgs.hasOptionalArgs then (\n    let supplied = ref [] in\n    let suppliedMaybe = ref [] in\n    args\n    |> List.iter (fun (lbl, arg) ->\n           let argIsSupplied =\n             match arg with\n             | Some\n                 {\n                   Typedtree.exp_desc =\n                     Texp_construct (_, {cstr_name = \"Some\"}, _);\n                 } ->\n               Some true\n             | Some\n                 {\n                   Typedtree.exp_desc =\n                     Texp_construct (_, {cstr_name = \"None\"}, _);\n                 } ->\n               Some false\n             | Some _ -> None\n             | None -> Some false\n           in\n           match lbl with\n           | Asttypes.Optional s when not locFrom.loc_ghost ->\n             if argIsSupplied <> Some false then supplied := s :: !supplied;\n             if argIsSupplied = None then suppliedMaybe := s :: !suppliedMaybe\n           | _ -> ());\n    (!supplied, !suppliedMaybe)\n    |> DeadOptionalArgs.addReferences ~locFrom ~locTo ~path)\n\nlet rec collectExpr super self (e : Typedtree.expression) =\n  let locFrom = e.exp_loc in\n  (match e.exp_desc with\n  | Texp_ident (_path, _, {Types.val_loc = {loc_ghost = false; _} as locTo}) ->\n    (* if Path.name _path = \"rc\" then assert false; *)\n    if locFrom = locTo && _path |> Path.name = \"emptyArray\" then (\n      (* Work around lowercase jsx with no children producing an artifact `emptyArray`\n         which is called from its own location as many things are generated on the same location. *)\n      if !Common.Cli.debug then\n        Log_.item \"addDummyReference %s --> %s@.\"\n          (Location.none.loc_start |> Common.posToString)\n          (locTo.loc_start |> Common.posToString);\n      ValueReferences.add locTo.loc_start Location.none.loc_start)\n    else addValueReference ~addFileReference:true ~locFrom ~locTo\n  | Texp_apply\n      ( {\n          exp_desc =\n            Texp_ident\n              (path, _, {Types.val_loc = {loc_ghost = false; _} as locTo});\n          exp_type;\n        },\n        args ) ->\n    args\n    |> processOptionalArgs ~expType:exp_type\n         ~locFrom:(locFrom : Location.t)\n         ~locTo ~path\n  | Texp_let\n      ( (* generated for functions with optional args *)\n        Nonrecursive,\n        [\n          {\n            vb_pat = {pat_desc = Tpat_var (idArg, _)};\n            vb_expr =\n              {\n                exp_desc =\n                  Texp_ident\n                    (path, _, {Types.val_loc = {loc_ghost = false; _} as locTo});\n                exp_type;\n              };\n          };\n        ],\n        {\n          exp_desc =\n            Texp_function\n              {\n                cases =\n                  [\n                    {\n                      c_lhs = {pat_desc = Tpat_var (etaArg, _)};\n                      c_rhs =\n                        {\n                          exp_desc =\n                            Texp_apply\n                              ({exp_desc = Texp_ident (idArg2, _, _)}, args);\n                        };\n                    };\n                  ];\n              };\n        } )\n    when Ident.name idArg = \"arg\"\n         && Ident.name etaArg = \"eta\"\n         && Path.name idArg2 = \"arg\" ->\n    args\n    |> processOptionalArgs ~expType:exp_type\n         ~locFrom:(locFrom : Location.t)\n         ~locTo ~path\n  | Texp_field\n      (_, _, {lbl_loc = {Location.loc_start = posTo; loc_ghost = false}; _}) ->\n    if !Config.analyzeTypes then\n      DeadType.addTypeReference ~posTo ~posFrom:locFrom.loc_start\n  | Texp_construct\n      ( _,\n        {cstr_loc = {Location.loc_start = posTo; loc_ghost} as locTo; cstr_tag},\n        _ ) ->\n    (match cstr_tag with\n    | Cstr_extension (path, _) ->\n      path |> DeadException.markAsUsed ~locFrom ~locTo\n    | _ -> ());\n    if !Config.analyzeTypes && not loc_ghost then\n      DeadType.addTypeReference ~posTo ~posFrom:locFrom.loc_start\n  | Texp_record {fields} ->\n    fields\n    |> Array.iter (fun (_, record_label_definition) ->\n           match record_label_definition with\n           | Typedtree.Overridden (_, ({exp_loc} as e)) when exp_loc.loc_ghost\n             ->\n             (* Punned field in OCaml projects has ghost location in expression *)\n             let e = {e with exp_loc = {exp_loc with loc_ghost = false}} in\n             collectExpr super self e |> ignore\n           | _ -> ())\n  | _ -> ());\n  super.Tast_mapper.expr self e\n\n(*\n  type k. is a locally abstract type\n  https://caml.inria.fr/pub/docs/manual-ocaml/locallyabstract.html\n  it is required because in ocaml >= 4.11 Typedtree.pattern and ADT is converted\n  in a GADT\n  https://github.com/ocaml/ocaml/commit/312253ce822c32740349e572498575cf2a82ee96\n  in short: all branches of pattern matches aren't the same type.\n  With this annotation we declare a new type for each branch to allow the\n  function to be typed.\n  *)\nlet collectPattern : _ -> _ -> Typedtree.pattern -> Typedtree.pattern =\n fun super self pat ->\n  let posFrom = pat.Typedtree.pat_loc.loc_start in\n  (match pat.pat_desc with\n  | Typedtree.Tpat_record (cases, _clodsedFlag) ->\n    cases\n    |> List.iter (fun (_loc, {Types.lbl_loc = {loc_start = posTo}}, _pat) ->\n           if !Config.analyzeTypes then\n             DeadType.addTypeReference ~posFrom ~posTo)\n  | _ -> ());\n  super.Tast_mapper.pat self pat\n\nlet rec getSignature (moduleType : Types.module_type) =\n  match moduleType with\n  | Mty_signature signature -> signature\n  | Mty_functor (_, _mtParam, mt) -> getSignature mt\n  | _ -> []\n\nlet rec processSignatureItem ~doTypes ~doValues ~moduleLoc ~path\n    (si : Types.signature_item) =\n  let oldModulePath = ModulePath.getCurrent () in\n  (match si with\n  | Sig_type (id, t, _) when doTypes ->\n    if !Config.analyzeTypes then\n      DeadType.addDeclaration ~typeId:id ~typeKind:t.type_kind\n  | Sig_value (id, {Types.val_loc = loc; val_kind = kind; val_type})\n    when doValues ->\n    if not loc.Location.loc_ghost then\n      let isPrimitive =\n        match kind with\n        | Val_prim _ -> true\n        | _ -> false\n      in\n      if (not isPrimitive) || !Config.analyzeExternals then\n        let optionalArgs =\n          val_type |> DeadOptionalArgs.fromTypeExpr\n          |> Common.OptionalArgs.fromList\n        in\n\n        (* if Ident.name id = \"someValue\" then\n           Printf.printf \"XXX %s\\n\" (Ident.name id); *)\n        Ident.name id\n        |> Name.create ~isInterface:false\n        |> addValueDeclaration ~loc ~moduleLoc ~optionalArgs ~path\n             ~sideEffects:false\n  | Sig_module (id, {Types.md_type = moduleType; md_loc = moduleLoc}, _)\n  | Sig_modtype (id, {Types.mtd_type = Some moduleType; mtd_loc = moduleLoc}) ->\n    ModulePath.setCurrent\n      {\n        oldModulePath with\n        loc = moduleLoc;\n        path = (id |> Ident.name |> Name.create) :: oldModulePath.path;\n      };\n    let collect =\n      match si with\n      | Sig_modtype _ -> false\n      | _ -> true\n    in\n    if collect then\n      getSignature moduleType\n      |> List.iter\n           (processSignatureItem ~doTypes ~doValues ~moduleLoc\n              ~path:((id |> Ident.name |> Name.create) :: path))\n  | _ -> ());\n  ModulePath.setCurrent oldModulePath\n\n(* Traverse the AST *)\nlet traverseStructure ~doTypes ~doExternals =\n  let super = Tast_mapper.default in\n  let expr self e = e |> collectExpr super self in\n  let pat self p = p |> collectPattern super self in\n  let value_binding self vb = vb |> collectValueBinding super self in\n  let structure_item self (structureItem : Typedtree.structure_item) =\n    let oldModulePath = ModulePath.getCurrent () in\n    (match structureItem.str_desc with\n    | Tstr_module {mb_expr; mb_id; mb_loc} -> (\n      let hasInterface =\n        match mb_expr.mod_desc with\n        | Tmod_constraint _ -> true\n        | _ -> false\n      in\n      ModulePath.setCurrent\n        {\n          oldModulePath with\n          loc = mb_loc;\n          path = (mb_id |> Ident.name |> Name.create) :: oldModulePath.path;\n        };\n      if hasInterface then\n        match mb_expr.mod_type with\n        | Mty_signature signature ->\n          signature\n          |> List.iter\n               (processSignatureItem ~doTypes ~doValues:false\n                  ~moduleLoc:mb_expr.mod_loc\n                  ~path:\n                    ((ModulePath.getCurrent ()).path\n                    @ [!Common.currentModuleName]))\n        | _ -> ())\n    | Tstr_primitive vd when doExternals && !Config.analyzeExternals ->\n      let currentModulePath = ModulePath.getCurrent () in\n      let path = currentModulePath.path @ [!Common.currentModuleName] in\n      let exists =\n        match PosHash.find_opt decls vd.val_loc.loc_start with\n        | Some {declKind = Value _} -> true\n        | _ -> false\n      in\n      let id = vd.val_id |> Ident.name in\n      Printf.printf \"Primitive %s\\n\" id;\n      if\n        (not exists) && id <> \"unsafe_expr\"\n        (* see https://github.com/BuckleScript/bucklescript/issues/4532 *)\n      then\n        id\n        |> Name.create ~isInterface:false\n        |> addValueDeclaration ~path ~loc:vd.val_loc\n             ~moduleLoc:currentModulePath.loc ~sideEffects:false\n    | Tstr_type (_recFlag, typeDeclarations) when doTypes ->\n      if !Config.analyzeTypes then\n        typeDeclarations\n        |> List.iter (fun (typeDeclaration : Typedtree.type_declaration) ->\n               DeadType.addDeclaration ~typeId:typeDeclaration.typ_id\n                 ~typeKind:typeDeclaration.typ_type.type_kind)\n    | Tstr_include {incl_mod; incl_type} -> (\n      match incl_mod.mod_desc with\n      | Tmod_ident (_path, _lid) ->\n        let currentPath =\n          (ModulePath.getCurrent ()).path @ [!Common.currentModuleName]\n        in\n        incl_type\n        |> List.iter\n             (processSignatureItem ~doTypes\n                ~doValues:false (* TODO: also values? *)\n                ~moduleLoc:incl_mod.mod_loc ~path:currentPath)\n      | _ -> ())\n    | Tstr_exception {ext_id = id; ext_loc = loc} ->\n      let path =\n        (ModulePath.getCurrent ()).path @ [!Common.currentModuleName]\n      in\n      let name = id |> Ident.name |> Name.create in\n      name |> DeadException.add ~path ~loc ~strLoc:structureItem.str_loc\n    | _ -> ());\n    let result = super.structure_item self structureItem in\n    ModulePath.setCurrent oldModulePath;\n    result\n  in\n  {super with expr; pat; structure_item; value_binding}\n\n(* Merge a location's references to another one's *)\nlet processValueDependency\n    ( ({\n         val_loc =\n           {loc_start = {pos_fname = fnTo} as posTo; loc_ghost = ghost1} as\n           locTo;\n       } :\n        Types.value_description),\n      ({\n         val_loc =\n           {loc_start = {pos_fname = fnFrom} as posFrom; loc_ghost = ghost2} as\n           locFrom;\n       } :\n        Types.value_description) ) =\n  if (not ghost1) && (not ghost2) && posTo <> posFrom then (\n    let addFileReference = fileIsImplementationOf fnTo fnFrom in\n    addValueReference ~addFileReference ~locFrom ~locTo;\n    DeadOptionalArgs.addFunctionReference ~locFrom ~locTo)\n\nlet processStructure ~cmt_value_dependencies ~doTypes ~doExternals\n    (structure : Typedtree.structure) =\n  let traverseStructure = traverseStructure ~doTypes ~doExternals in\n  structure |> traverseStructure.structure traverseStructure |> ignore;\n  let valueDependencies = cmt_value_dependencies |> List.rev in\n  valueDependencies |> List.iter processValueDependency\n"
  },
  {
    "path": "analysis/reanalyze/src/EmitJson.ml",
    "content": "let items = ref 0\nlet start () = Printf.printf \"[\"\nlet finish () = Printf.printf \"\\n]\\n\"\nlet emitClose () = \"\\n}\"\n\nlet emitItem ~ppf ~name ~kind ~file ~range ~message =\n  let open Format in\n  items := !items + 1;\n  let startLine, startCharacter, endLine, endCharacter = range in\n  fprintf ppf \"%s{\\n\" (if !items = 1 then \"\\n\" else \",\\n\");\n  fprintf ppf \"  \\\"name\\\": \\\"%s\\\",\\n\" name;\n  fprintf ppf \"  \\\"kind\\\": \\\"%s\\\",\\n\" kind;\n  fprintf ppf \"  \\\"file\\\": \\\"%s\\\",\\n\" file;\n  fprintf ppf \"  \\\"range\\\": [%d,%d,%d,%d],\\n\" startLine startCharacter endLine\n    endCharacter;\n  fprintf ppf \"  \\\"message\\\": \\\"%s\\\"\" message\n\nlet locToPos (loc : Location.t) =\n  (loc.loc_start.pos_lnum - 1, loc.loc_start.pos_cnum - loc.loc_start.pos_bol)\n\nlet emitAnnotate ~pos ~text ~action =\n  let line, character = pos in\n  Format.asprintf\n    \",\\n\\\n    \\  \\\"annotate\\\": { \\\"line\\\": %d, \\\"character\\\": %d, \\\"text\\\": \\\"%s\\\", \\\n     \\\"action\\\": \\\"%s\\\"}\"\n    line character text action\n"
  },
  {
    "path": "analysis/reanalyze/src/Exception.ml",
    "content": "let posToString = Common.posToString\n\nmodule LocSet = Common.LocSet\n\nmodule Values = struct\n  let valueBindingsTable =\n    (Hashtbl.create 15 : (string, (Name.t, Exceptions.t) Hashtbl.t) Hashtbl.t)\n\n  let currentFileTable = ref (Hashtbl.create 1)\n\n  let add ~name exceptions =\n    let path = (name |> Name.create) :: (ModulePath.getCurrent ()).path in\n    Hashtbl.replace !currentFileTable (path |> Common.Path.toName) exceptions\n\n  let getFromModule ~moduleName ~modulePath (path_ : Common.Path.t) =\n    let name = path_ @ modulePath |> Common.Path.toName in\n    match\n      Hashtbl.find_opt valueBindingsTable (String.capitalize_ascii moduleName)\n    with\n    | Some tbl -> Hashtbl.find_opt tbl name\n    | None -> (\n      match\n        Hashtbl.find_opt valueBindingsTable\n          (String.uncapitalize_ascii moduleName)\n      with\n      | Some tbl -> Hashtbl.find_opt tbl name\n      | None -> None)\n\n  let rec findLocal ~moduleName ~modulePath path =\n    match path |> getFromModule ~moduleName ~modulePath with\n    | Some exceptions -> Some exceptions\n    | None -> (\n      match modulePath with\n      | [] -> None\n      | _ :: restModulePath ->\n        path |> findLocal ~moduleName ~modulePath:restModulePath)\n\n  let findPath ~moduleName ~modulePath path =\n    let findExternal ~externalModuleName ~pathRev =\n      pathRev |> List.rev\n      |> getFromModule\n           ~moduleName:(externalModuleName |> Name.toString)\n           ~modulePath:[]\n    in\n    match path |> findLocal ~moduleName ~modulePath with\n    | None -> (\n      (* Search in another file *)\n      match path |> List.rev with\n      | externalModuleName :: pathRev -> (\n        match (findExternal ~externalModuleName ~pathRev, pathRev) with\n        | (Some _ as found), _ -> found\n        | None, externalModuleName2 :: pathRev2\n          when !Common.Cli.cmtCommand && pathRev2 <> [] ->\n          (* Simplistic namespace resolution for dune namespace: skip the root of the path *)\n          findExternal ~externalModuleName:externalModuleName2 ~pathRev:pathRev2\n        | None, _ -> None)\n      | [] -> None)\n    | Some exceptions -> Some exceptions\n\n  let newCmt () =\n    currentFileTable := Hashtbl.create 15;\n    Hashtbl.replace valueBindingsTable !Common.currentModule !currentFileTable\nend\n\nmodule Event = struct\n  type kind =\n    | Catches of t list (* with | E => ... *)\n    | Call of {callee: Common.Path.t; modulePath: Common.Path.t} (* foo() *)\n    | DoesNotRaise of\n        t list (* DoesNotRaise(events) where events come from an expression *)\n    | Raises  (** raise E *)\n\n  and t = {exceptions: Exceptions.t; kind: kind; loc: Location.t}\n\n  let rec print ppf event =\n    match event with\n    | {kind = Call {callee; modulePath}; exceptions; loc} ->\n      Format.fprintf ppf \"%s Call(%s, modulePath:%s) %a@.\"\n        (loc.loc_start |> posToString)\n        (callee |> Common.Path.toString)\n        (modulePath |> Common.Path.toString)\n        (Exceptions.pp ~exnTable:None)\n        exceptions\n    | {kind = DoesNotRaise nestedEvents; loc} ->\n      Format.fprintf ppf \"%s DoesNotRaise(%a)@.\"\n        (loc.loc_start |> posToString)\n        (fun ppf () ->\n          nestedEvents |> List.iter (fun e -> Format.fprintf ppf \"%a \" print e))\n        ()\n    | {kind = Raises; exceptions; loc} ->\n      Format.fprintf ppf \"%s raises %a@.\"\n        (loc.loc_start |> posToString)\n        (Exceptions.pp ~exnTable:None)\n        exceptions\n    | {kind = Catches nestedEvents; exceptions; loc} ->\n      Format.fprintf ppf \"%s Catches exceptions:%a nestedEvents:%a@.\"\n        (loc.loc_start |> posToString)\n        (Exceptions.pp ~exnTable:None)\n        exceptions\n        (fun ppf () ->\n          nestedEvents |> List.iter (fun e -> Format.fprintf ppf \"%a \" print e))\n        ()\n\n  let combine ~moduleName events =\n    if !Common.Cli.debug then (\n      Log_.item \"@.\";\n      Log_.item \"Events combine: #events %d@.\" (events |> List.length));\n    let exnTable = Hashtbl.create 1 in\n    let extendExnTable exn loc =\n      match Hashtbl.find_opt exnTable exn with\n      | Some locSet -> Hashtbl.replace exnTable exn (LocSet.add loc locSet)\n      | None -> Hashtbl.replace exnTable exn (LocSet.add loc LocSet.empty)\n    in\n    let shrinkExnTable exn loc =\n      match Hashtbl.find_opt exnTable exn with\n      | Some locSet -> Hashtbl.replace exnTable exn (LocSet.remove loc locSet)\n      | None -> ()\n    in\n    let rec loop exnSet events =\n      match events with\n      | ({kind = Raises; exceptions; loc} as ev) :: rest ->\n        if !Common.Cli.debug then Log_.item \"%a@.\" print ev;\n        exceptions |> Exceptions.iter (fun exn -> extendExnTable exn loc);\n        loop (Exceptions.union exnSet exceptions) rest\n      | ({kind = Call {callee; modulePath}; loc} as ev) :: rest ->\n        if !Common.Cli.debug then Log_.item \"%a@.\" print ev;\n        let exceptions =\n          match callee |> Values.findPath ~moduleName ~modulePath with\n          | Some exceptions -> exceptions\n          | _ -> (\n            match ExnLib.find callee with\n            | Some exceptions -> exceptions\n            | None -> Exceptions.empty)\n        in\n        exceptions |> Exceptions.iter (fun exn -> extendExnTable exn loc);\n        loop (Exceptions.union exnSet exceptions) rest\n      | ({kind = DoesNotRaise nestedEvents; loc} as ev) :: rest ->\n        if !Common.Cli.debug then Log_.item \"%a@.\" print ev;\n        let nestedExceptions = loop Exceptions.empty nestedEvents in\n        (if Exceptions.isEmpty nestedExceptions (* catch-all *) then\n           let name =\n             match nestedEvents with\n             | {kind = Call {callee}} :: _ -> callee |> Common.Path.toName\n             | _ -> \"expression\" |> Name.create\n           in\n           Log_.warning ~loc\n             (Common.ExceptionAnalysis\n                {\n                  message =\n                    Format.asprintf\n                      \"@{<info>%s@} does not raise and is annotated with \\\n                       redundant @doesNotRaise\"\n                      (name |> Name.toString);\n                }));\n        loop exnSet rest\n      | ({kind = Catches nestedEvents; exceptions} as ev) :: rest ->\n        if !Common.Cli.debug then Log_.item \"%a@.\" print ev;\n        if Exceptions.isEmpty exceptions then loop exnSet rest\n        else\n          let nestedExceptions = loop Exceptions.empty nestedEvents in\n          let newRaises = Exceptions.diff nestedExceptions exceptions in\n          exceptions\n          |> Exceptions.iter (fun exn ->\n                 nestedEvents\n                 |> List.iter (fun event -> shrinkExnTable exn event.loc));\n          loop (Exceptions.union exnSet newRaises) rest\n      | [] -> exnSet\n    in\n    let exnSet = loop Exceptions.empty events in\n    (exnSet, exnTable)\nend\n\nmodule Checks = struct\n  type check = {\n    events: Event.t list;\n    loc: Location.t;\n    locFull: Location.t;\n    moduleName: string;\n    exnName: string;\n    exceptions: Exceptions.t;\n  }\n\n  type t = check list\n\n  let checks = (ref [] : t ref)\n\n  let add ~events ~exceptions ~loc ?(locFull = loc) ~moduleName exnName =\n    checks := {events; exceptions; loc; locFull; moduleName; exnName} :: !checks\n\n  let doCheck {events; exceptions; loc; locFull; moduleName; exnName} =\n    let raiseSet, exnTable = events |> Event.combine ~moduleName in\n    let missingAnnotations = Exceptions.diff raiseSet exceptions in\n    let redundantAnnotations = Exceptions.diff exceptions raiseSet in\n    (if not (Exceptions.isEmpty missingAnnotations) then\n       let description =\n         Common.ExceptionAnalysisMissing\n           {exnName; exnTable; raiseSet; missingAnnotations; locFull}\n       in\n       Log_.warning ~loc description);\n    if not (Exceptions.isEmpty redundantAnnotations) then\n      Log_.warning ~loc\n        (Common.ExceptionAnalysis\n           {\n             message =\n               (let raisesDescription ppf () =\n                  if raiseSet |> Exceptions.isEmpty then\n                    Format.fprintf ppf \"raises nothing\"\n                  else\n                    Format.fprintf ppf \"might raise %a\"\n                      (Exceptions.pp ~exnTable:(Some exnTable))\n                      raiseSet\n                in\n                Format.asprintf\n                  \"@{<info>%s@} %a and is annotated with redundant @raises(%a)\"\n                  exnName raisesDescription ()\n                  (Exceptions.pp ~exnTable:None)\n                  redundantAnnotations);\n           })\n\n  let doChecks () = !checks |> List.rev |> List.iter doCheck\nend\n\nlet traverseAst () =\n  ModulePath.init ();\n  let super = Tast_mapper.default in\n  let currentId = ref \"\" in\n  let currentEvents = ref [] in\n  let exceptionsOfPatterns patterns =\n    patterns\n    |> List.fold_left\n         (fun acc desc ->\n           match desc with\n           | Typedtree.Tpat_construct ({txt}, _, _) ->\n             Exceptions.add (Exn.fromLid txt) acc\n           | _ -> acc)\n         Exceptions.empty\n  in\n  let iterExpr self e = self.Tast_mapper.expr self e |> ignore in\n  let iterExprOpt self eo =\n    match eo with\n    | None -> ()\n    | Some e -> e |> iterExpr self\n  in\n  let iterPat self p = self.Tast_mapper.pat self p |> ignore in\n  let iterCases self cases =\n    cases\n    |> List.iter (fun case ->\n           case.Typedtree.c_lhs |> iterPat self;\n           case.c_guard |> iterExprOpt self;\n           case.c_rhs |> iterExpr self)\n  in\n  let isRaise s = s = \"Pervasives.raise\" || s = \"Pervasives.raise_notrace\" in\n  let raiseArgs args =\n    match args with\n    | [(_, Some {Typedtree.exp_desc = Texp_construct ({txt}, _, _)})] ->\n      [Exn.fromLid txt] |> Exceptions.fromList\n    | [(_, Some {Typedtree.exp_desc = Texp_ident _})] ->\n      [Exn.fromString \"genericException\"] |> Exceptions.fromList\n    | _ -> [Exn.fromString \"TODO_from_raise1\"] |> Exceptions.fromList\n  in\n  let doesNotRaise attributes =\n    attributes\n    |> Annotation.getAttributePayload (fun s ->\n           s = \"doesNotRaise\" || s = \"doesnotraise\" || s = \"DoesNoRaise\"\n           || s = \"doesNotraise\" || s = \"doNotRaise\" || s = \"donotraise\"\n           || s = \"DoNoRaise\" || s = \"doNotraise\")\n    <> None\n  in\n  let expr (self : Tast_mapper.mapper) (expr : Typedtree.expression) =\n    let loc = expr.exp_loc in\n    let isDoesNoRaise = expr.exp_attributes |> doesNotRaise in\n    let oldEvents = !currentEvents in\n    if isDoesNoRaise then currentEvents := [];\n    (match expr.exp_desc with\n    | Texp_ident (callee_, _, _) ->\n      let callee =\n        callee_ |> Common.Path.fromPathT |> ModulePath.resolveAlias\n      in\n      let calleeName = callee |> Common.Path.toName in\n      if calleeName |> Name.toString |> isRaise then\n        Log_.warning ~loc\n          (Common.ExceptionAnalysis\n             {\n               message =\n                 Format.asprintf\n                   \"@{<info>%s@} can be analyzed only if called directly\"\n                   (calleeName |> Name.toString);\n             });\n      currentEvents :=\n        {\n          Event.exceptions = Exceptions.empty;\n          loc;\n          kind = Call {callee; modulePath = (ModulePath.getCurrent ()).path};\n        }\n        :: !currentEvents\n    | Texp_apply\n        ( {exp_desc = Texp_ident (atat, _, _)},\n          [(_lbl1, Some {exp_desc = Texp_ident (callee, _, _)}); arg] )\n      when (* raise @@ Exn(...) *)\n           atat |> Path.name = \"Pervasives.@@\" && callee |> Path.name |> isRaise\n      ->\n      let exceptions = [arg] |> raiseArgs in\n      currentEvents := {Event.exceptions; loc; kind = Raises} :: !currentEvents;\n      arg |> snd |> iterExprOpt self\n    | Texp_apply\n        ( {exp_desc = Texp_ident (atat, _, _)},\n          [arg; (_lbl1, Some {exp_desc = Texp_ident (callee, _, _)})] )\n      when (*  Exn(...) |> raise *)\n           atat |> Path.name = \"Pervasives.|>\" && callee |> Path.name |> isRaise\n      ->\n      let exceptions = [arg] |> raiseArgs in\n      currentEvents := {Event.exceptions; loc; kind = Raises} :: !currentEvents;\n      arg |> snd |> iterExprOpt self\n    | Texp_apply (({exp_desc = Texp_ident (callee, _, _)} as e), args) ->\n      let calleeName = Path.name callee in\n      if calleeName |> isRaise then\n        let exceptions = args |> raiseArgs in\n        currentEvents :=\n          {Event.exceptions; loc; kind = Raises} :: !currentEvents\n      else e |> iterExpr self;\n      args |> List.iter (fun (_, eOpt) -> eOpt |> iterExprOpt self)\n    | Texp_match (e, casesOk, casesExn, partial) ->\n      let cases = casesOk @ casesExn in\n      let exceptionPatterns =\n        casesExn\n        |> List.map (fun (case : Typedtree.case) -> case.c_lhs.pat_desc)\n      in\n      let exceptions = exceptionPatterns |> exceptionsOfPatterns in\n      if exceptionPatterns <> [] then (\n        let oldEvents = !currentEvents in\n        currentEvents := [];\n        e |> iterExpr self;\n        currentEvents :=\n          {Event.exceptions; loc; kind = Catches !currentEvents} :: oldEvents)\n      else e |> iterExpr self;\n      cases |> iterCases self;\n      if partial = Partial then\n        currentEvents :=\n          {\n            Event.exceptions = [Exn.matchFailure] |> Exceptions.fromList;\n            loc;\n            kind = Raises;\n          }\n          :: !currentEvents\n    | Texp_try (e, cases) ->\n      let exceptions =\n        cases\n        |> List.map (fun case -> case.Typedtree.c_lhs.pat_desc)\n        |> exceptionsOfPatterns\n      in\n      let oldEvents = !currentEvents in\n      currentEvents := [];\n      e |> iterExpr self;\n      currentEvents :=\n        {Event.exceptions; loc; kind = Catches !currentEvents} :: oldEvents;\n      cases |> iterCases self\n    | _ -> super.expr self expr |> ignore);\n    (if isDoesNoRaise then\n       let nestedEvents = !currentEvents in\n       currentEvents :=\n         {\n           Event.exceptions = Exceptions.empty;\n           loc;\n           kind = DoesNotRaise nestedEvents;\n         }\n         :: oldEvents);\n    expr\n  in\n  let getExceptionsFromAnnotations attributes =\n    let raisesAnnotationPayload =\n      attributes\n      |> Annotation.getAttributePayload (fun s -> s = \"raises\" || s = \"raise\")\n    in\n    let rec getExceptions payload =\n      match payload with\n      | Annotation.StringPayload s -> [Exn.fromString s] |> Exceptions.fromList\n      | Annotation.ConstructPayload s when s <> \"::\" ->\n        [Exn.fromString s] |> Exceptions.fromList\n      | Annotation.IdentPayload s ->\n        [Exn.fromString (s |> Longident.flatten |> String.concat \".\")]\n        |> Exceptions.fromList\n      | Annotation.TuplePayload tuple ->\n        tuple\n        |> List.map (fun payload ->\n               payload |> getExceptions |> Exceptions.toList)\n        |> List.concat |> Exceptions.fromList\n      | _ -> Exceptions.empty\n    in\n    match raisesAnnotationPayload with\n    | None -> Exceptions.empty\n    | Some payload -> payload |> getExceptions\n  in\n  let toplevelEval (self : Tast_mapper.mapper) (expr : Typedtree.expression)\n      attributes =\n    let oldId = !currentId in\n    let oldEvents = !currentEvents in\n    let name = \"Toplevel expression\" in\n    currentId := name;\n    currentEvents := [];\n    let moduleName = !Common.currentModule in\n    self.expr self expr |> ignore;\n    Checks.add ~events:!currentEvents\n      ~exceptions:(getExceptionsFromAnnotations attributes)\n      ~loc:expr.exp_loc ~moduleName name;\n    currentId := oldId;\n    currentEvents := oldEvents\n  in\n  let structure_item (self : Tast_mapper.mapper)\n      (structureItem : Typedtree.structure_item) =\n    let oldModulePath = ModulePath.getCurrent () in\n    (match structureItem.str_desc with\n    | Tstr_eval (expr, attributes) -> toplevelEval self expr attributes\n    | Tstr_module {mb_id; mb_loc} ->\n      ModulePath.setCurrent\n        {\n          oldModulePath with\n          loc = mb_loc;\n          path = (mb_id |> Ident.name |> Name.create) :: oldModulePath.path;\n        }\n    | _ -> ());\n    let result = super.structure_item self structureItem in\n    ModulePath.setCurrent oldModulePath;\n    (match structureItem.str_desc with\n    | Tstr_module {mb_id; mb_expr = {mod_desc = Tmod_ident (path_, _lid)}} ->\n      ModulePath.addAlias\n        ~name:(mb_id |> Ident.name |> Name.create)\n        ~path:(path_ |> Common.Path.fromPathT)\n    | _ -> ());\n    result\n  in\n  let value_binding (self : Tast_mapper.mapper) (vb : Typedtree.value_binding) =\n    let oldId = !currentId in\n    let oldEvents = !currentEvents in\n    let isFunction =\n      match vb.vb_expr.exp_desc with\n      | Texp_function _ -> true\n      | _ -> false\n    in\n    let isToplevel = !currentId = \"\" in\n    let processBinding name =\n      currentId := name;\n      currentEvents := [];\n      let exceptionsFromAnnotations =\n        getExceptionsFromAnnotations vb.vb_attributes\n      in\n      exceptionsFromAnnotations |> Values.add ~name;\n      let res = super.value_binding self vb in\n      let moduleName = !Common.currentModule in\n      let path = [name |> Name.create] in\n      let exceptions =\n        match\n          path\n          |> Values.findPath ~moduleName\n               ~modulePath:(ModulePath.getCurrent ()).path\n        with\n        | Some exceptions -> exceptions\n        | _ -> Exceptions.empty\n      in\n      Checks.add ~events:!currentEvents ~exceptions ~loc:vb.vb_pat.pat_loc\n        ~locFull:vb.vb_loc ~moduleName name;\n      currentId := oldId;\n      currentEvents := oldEvents;\n      res\n    in\n    match vb.vb_pat.pat_desc with\n    | Tpat_any when isToplevel && not vb.vb_loc.loc_ghost -> processBinding \"_\"\n    | Tpat_construct ({txt}, _, _)\n      when isToplevel && (not vb.vb_loc.loc_ghost)\n           && txt = Longident.Lident \"()\" ->\n      processBinding \"()\"\n    | Tpat_var (id, {loc = {loc_ghost}})\n      when (isFunction || isToplevel) && (not loc_ghost)\n           && not vb.vb_loc.loc_ghost ->\n      processBinding (id |> Ident.name)\n    | _ -> super.value_binding self vb\n  in\n  let open Tast_mapper in\n  {super with expr; value_binding; structure_item}\n\nlet processStructure (structure : Typedtree.structure) =\n  let traverseAst = traverseAst () in\n  structure |> traverseAst.structure traverseAst |> ignore\n\nlet processCmt (cmt_infos : Cmt_format.cmt_infos) =\n  match cmt_infos.cmt_annots with\n  | Interface _ -> ()\n  | Implementation structure ->\n    Values.newCmt ();\n    structure |> processStructure\n  | _ -> ()\n"
  },
  {
    "path": "analysis/reanalyze/src/Exceptions.ml",
    "content": "open Common\n\ntype t = ExnSet.t\n\nlet add = ExnSet.add\nlet diff = ExnSet.diff\nlet empty = ExnSet.empty\nlet fromList = ExnSet.of_list\nlet toList = ExnSet.elements\nlet isEmpty = ExnSet.is_empty\nlet iter = ExnSet.iter\nlet union = ExnSet.union\n\nlet pp ~exnTable ppf exceptions =\n  let isFirst = ref true in\n  let ppExn exn =\n    let separator = if !isFirst then \"\" else \", \" in\n    isFirst := false;\n    let name = Exn.toString exn in\n    match exnTable with\n    | Some exnTable -> (\n      match Hashtbl.find_opt exnTable exn with\n      | Some locSet ->\n        let positions =\n          locSet |> Common.LocSet.elements\n          |> List.map (fun loc -> loc.Location.loc_start)\n        in\n        Format.fprintf ppf \"%s@{<info>%s@} (@{<filename>%s@})\" separator name\n          (positions |> List.map posToString |> String.concat \" \")\n      | None -> Format.fprintf ppf \"%s@{<info>%s@}\" separator name)\n    | None -> Format.fprintf ppf \"%s@{<info>%s@}\" separator name\n  in\n  let isList = exceptions |> ExnSet.cardinal > 1 in\n  if isList then Format.fprintf ppf \"[\";\n  exceptions |> ExnSet.iter ppExn;\n  if isList then Format.fprintf ppf \"]\"\n"
  },
  {
    "path": "analysis/reanalyze/src/Exn.ml",
    "content": "type t = string\n\nlet compare = String.compare\nlet decodeError = \"DecodeError\"\nlet assertFailure = \"Assert_failure\"\nlet divisionByZero = \"Division_by_zero\"\nlet endOfFile = \"End_of_file\"\nlet exit = \"exit\"\nlet failure = \"Failure\"\nlet invalidArgument = \"Invalid_argument\"\nlet jsExnError = \"Js.Exn.Error\"\nlet matchFailure = \"Match_failure\"\nlet notFound = \"Not_found\"\nlet sysError = \"Sys_error\"\nlet fromLid lid = lid |> Longident.flatten |> String.concat \".\"\nlet fromString s = s\nlet toString s = s\nlet yojsonJsonError = \"Yojson.Json_error\"\nlet yojsonTypeError = \"Yojson.Basic.Util.Type_error\"\n"
  },
  {
    "path": "analysis/reanalyze/src/Exn.mli",
    "content": "type t\n\nval compare : t -> t -> int\nval assertFailure : t\nval decodeError : t\nval divisionByZero : t\nval endOfFile : t\nval exit : t\nval failure : t\nval fromLid : Longident.t -> t\nval fromString : string -> t\nval invalidArgument : t\nval jsExnError : t\nval matchFailure : t\nval notFound : t\nval sysError : t\nval toString : t -> string\nval yojsonJsonError : t\nval yojsonTypeError : t\n"
  },
  {
    "path": "analysis/reanalyze/src/ExnLib.ml",
    "content": "let raisesLibTable : (Name.t, Exceptions.t) Hashtbl.t =\n  let table = Hashtbl.create 15 in\n  let open Exn in\n  let array =\n    [\n      (\"get\", [invalidArgument]);\n      (\"set\", [invalidArgument]);\n      (\"make\", [invalidArgument]);\n      (\"init\", [invalidArgument]);\n      (\"make_matrix\", [invalidArgument]);\n      (\"fill\", [invalidArgument]);\n      (\"blit\", [invalidArgument]);\n      (\"iter2\", [invalidArgument]);\n      (\"map2\", [invalidArgument]);\n    ]\n  in\n  let beltArray = [(\"getExn\", [assertFailure]); (\"setExn\", [assertFailure])] in\n  let beltList =\n    [(\"getExn\", [notFound]); (\"headExn\", [notFound]); (\"tailExn\", [notFound])]\n  in\n  let beltMap = [(\"getExn\", [notFound])] in\n  let beltMutableMap = beltMap in\n  let beltMutableQueue = [(\"peekExn\", [notFound]); (\"popExn\", [notFound])] in\n  let beltMutableSet = [(\"getExn\", [notFound])] in\n  let beltOption = [(\"getExn\", [notFound])] in\n  let beltResult = [(\"getExn\", [notFound])] in\n  let beltSet = [(\"getExn\", [notFound])] in\n  let bsJson =\n    (* bs-json *)\n    [\n      (\"bool\", [decodeError]);\n      (\"float\", [decodeError]);\n      (\"int\", [decodeError]);\n      (\"string\", [decodeError]);\n      (\"char\", [decodeError]);\n      (\"date\", [decodeError]);\n      (\"nullable\", [decodeError]);\n      (\"nullAs\", [decodeError]);\n      (\"array\", [decodeError]);\n      (\"list\", [decodeError]);\n      (\"pair\", [decodeError]);\n      (\"tuple2\", [decodeError]);\n      (\"tuple3\", [decodeError]);\n      (\"tuple4\", [decodeError]);\n      (\"dict\", [decodeError]);\n      (\"field\", [decodeError]);\n      (\"at\", [decodeError; invalidArgument]);\n      (\"oneOf\", [decodeError]);\n      (\"either\", [decodeError]);\n    ]\n  in\n  let buffer =\n    [\n      (\"sub\", [invalidArgument]);\n      (\"blit\", [invalidArgument]);\n      (\"nth\", [invalidArgument]);\n      (\"add_substitute\", [notFound]);\n      (\"add_channel\", [endOfFile]);\n      (\"truncate\", [invalidArgument]);\n    ]\n  in\n  let bytes =\n    [\n      (\"get\", [invalidArgument]);\n      (\"set\", [invalidArgument]);\n      (\"create\", [invalidArgument]);\n      (\"make\", [invalidArgument]);\n      (\"init\", [invalidArgument]);\n      (\"sub\", [invalidArgument]);\n      (\"sub_string\", [invalidArgument]);\n      (\"extend\", [invalidArgument]);\n      (\"fill\", [invalidArgument]);\n      (\"blit\", [invalidArgument]);\n      (\"blit_string\", [invalidArgument]);\n      (* (\"concat\", [invalidArgument]), if longer than {!Sys.max_string_length}\n         (\"cat\", [invalidArgument]), if longer than {!Sys.max_string_length}\n         (\"escaped\", [invalidArgument]), if longer than {!Sys.max_string_length} *)\n      (\"index\", [notFound]);\n      (\"rindex\", [notFound]);\n      (\"index_from\", [invalidArgument; notFound]);\n      (\"index_from_opt\", [invalidArgument]);\n      (\"rindex_from\", [invalidArgument; notFound]);\n      (\"rindex_from_opt\", [invalidArgument]);\n      (\"contains_from\", [invalidArgument]);\n      (\"rcontains_from\", [invalidArgument]);\n    ]\n  in\n  let filename =\n    [\n      (\"chop_extension\", [invalidArgument]);\n      (\"temp_file\", [sysError]);\n      (\"open_temp_file\", [sysError]);\n    ]\n  in\n  let hashtbl = [(\"find\", [notFound])] in\n  let list =\n    [\n      (\"hd\", [failure]);\n      (\"tl\", [failure]);\n      (\"nth\", [failure; invalidArgument]);\n      (\"nth_opt\", [invalidArgument]);\n      (\"init\", [invalidArgument]);\n      (\"iter2\", [invalidArgument]);\n      (\"map2\", [invalidArgument]);\n      (\"fold_left2\", [invalidArgument]);\n      (\"fold_right2\", [invalidArgument]);\n      (\"for_all2\", [invalidArgument]);\n      (\"exists2\", [invalidArgument]);\n      (\"find\", [notFound]);\n      (\"assoc\", [notFound]);\n      (\"combine\", [invalidArgument]);\n    ]\n  in\n  let string =\n    [\n      (\"get\", [invalidArgument]);\n      (\"set\", [invalidArgument]);\n      (\"create\", [invalidArgument]);\n      (\"make\", [invalidArgument]);\n      (\"init\", [invalidArgument]);\n      (\"sub\", [invalidArgument]);\n      (\"fill\", [invalidArgument]);\n      (* (\"concat\", [invalidArgument]), if longer than {!Sys.max_string_length}\n         (\"escaped\", [invalidArgument]), if longer than {!Sys.max_string_length} *)\n      (\"index\", [notFound]);\n      (\"rindex\", [notFound]);\n      (\"index_from\", [invalidArgument; notFound]);\n      (\"index_from_opt\", [invalidArgument]);\n      (\"rindex_from\", [invalidArgument; notFound]);\n      (\"rindex_from_opt\", [invalidArgument]);\n      (\"contains_from\", [invalidArgument]);\n      (\"rcontains_from\", [invalidArgument]);\n    ]\n  in\n  let stdlib =\n    [\n      (\"invalid_arg\", [invalidArgument]);\n      (\"failwith\", [failure]);\n      (\"/\", [divisionByZero]);\n      (\"mod\", [divisionByZero]);\n      (\"char_of_int\", [invalidArgument]);\n      (\"bool_of_string\", [invalidArgument]);\n      (\"int_of_string\", [failure]);\n      (\"float_of_string\", [failure]);\n      (\"read_int\", [failure]);\n      (\"output\", [invalidArgument]);\n      (\"close_out\", [sysError]);\n      (\"input_char\", [endOfFile]);\n      (\"input_line\", [endOfFile]);\n      (\"input\", [invalidArgument]);\n      (\"really_input\", [endOfFile; invalidArgument]);\n      (\"really_input_string\", [endOfFile]);\n      (\"input_byte\", [endOfFile]);\n      (\"input_binary_int\", [endOfFile]);\n      (\"close_in\", [sysError]);\n      (\"exit\", [exit]);\n    ]\n  in\n  let str =\n    [\n      (\"search_forward\", [notFound]);\n      (\"search_backward\", [notFound]);\n      (\"matched_group\", [notFound]);\n      (\"group_beginning\", [notFound; invalidArgument]);\n      (\"group_end\", [notFound; invalidArgument]);\n    ]\n  in\n  let yojsonBasic = [(\"from_string\", [yojsonJsonError])] in\n  let yojsonBasicUtil =\n    [\n      (\"member\", [yojsonTypeError]);\n      (\"to_assoc\", [yojsonTypeError]);\n      (\"to_bool\", [yojsonTypeError]);\n      (\"to_bool_option\", [yojsonTypeError]);\n      (\"to_float\", [yojsonTypeError]);\n      (\"to_float_option\", [yojsonTypeError]);\n      (\"to_int\", [yojsonTypeError]);\n      (\"to_list\", [yojsonTypeError]);\n      (\"to_number\", [yojsonTypeError]);\n      (\"to_number_option\", [yojsonTypeError]);\n      (\"to_string\", [yojsonTypeError]);\n      (\"to_string_option\", [yojsonTypeError]);\n    ]\n  in\n  [\n    (\"Array\", array);\n    (\"Belt.Array\", beltArray);\n    (\"Belt_Array\", beltArray);\n    (\"Belt.List\", beltList);\n    (\"Belt_List\", beltList);\n    (\"Belt.Map\", beltMap);\n    (\"Belt.Map.Int\", beltMap);\n    (\"Belt.Map.String\", beltMap);\n    (\"Belt_Map\", beltMap);\n    (\"Belt_Map.Int\", beltMap);\n    (\"Belt_Map.String\", beltMap);\n    (\"Belt_MapInt\", beltMap);\n    (\"Belt_MapString\", beltMap);\n    (\"Belt.MutableMap\", beltMutableMap);\n    (\"Belt.MutableMap.Int\", beltMutableMap);\n    (\"Belt.MutableMap.String\", beltMutableMap);\n    (\"Belt_MutableMap\", beltMutableMap);\n    (\"Belt_MutableMap.Int\", beltMutableMap);\n    (\"Belt_MutableMap.String\", beltMutableMap);\n    (\"Belt_MutableMapInt\", beltMutableMap);\n    (\"Belt_MutableMapString\", beltMutableMap);\n    (\"Belt.MutableQueue\", beltMutableQueue);\n    (\"Belt_MutableQueue\", beltMutableQueue);\n    (\"Belt.Option\", beltOption);\n    (\"Belt_Option\", beltOption);\n    (\"Belt.Result\", beltResult);\n    (\"Belt_Result\", beltResult);\n    (\"Belt.Set\", beltSet);\n    (\"Belt.Set.Int\", beltSet);\n    (\"Belt.Set.String\", beltSet);\n    (\"Belt_Set\", beltSet);\n    (\"Belt_Set.Int\", beltSet);\n    (\"Belt_Set.String\", beltSet);\n    (\"Belt_SetInt\", beltSet);\n    (\"Belt_SetString\", beltSet);\n    (\"Belt.MutableSet\", beltMutableSet);\n    (\"Belt.MutableSet.Int\", beltMutableSet);\n    (\"Belt.MutableSet.String\", beltMutableSet);\n    (\"MutableSet\", beltMutableSet);\n    (\"MutableSet.Int\", beltMutableSet);\n    (\"MutableSet.String\", beltMutableSet);\n    (\"Belt_MutableSetInt\", beltMutableSet);\n    (\"Belt_MutableSetString\", beltMutableSet);\n    (\"Buffer\", buffer);\n    (\"Bytes\", bytes);\n    (\"Char\", [(\"chr\", [invalidArgument])]);\n    (\"Filename\", filename);\n    (\"Hashtbl\", hashtbl);\n    (\"Js.Json\", [(\"parseExn\", [jsExnError])]);\n    (\"Json_decode\", bsJson);\n    (\"Json.Decode\", bsJson);\n    (\"List\", list);\n    (\"Pervasives\", stdlib);\n    (\"Stdlib\", stdlib);\n    (\"Stdlib.Array\", array);\n    (\"Stdlib.Buffer\", buffer);\n    (\"Stdlib.Bytes\", bytes);\n    (\"Stdlib.Filename\", filename);\n    (\"Stdlib.Hashtbl\", hashtbl);\n    (\"Stdlib.List\", list);\n    (\"Stdlib.Str\", str);\n    (\"Stdlib.String\", string);\n    (\"Str\", str);\n    (\"String\", string);\n    (\"Yojson.Basic\", yojsonBasic);\n    (\"Yojson.Basic.Util\", yojsonBasicUtil);\n  ]\n  |> List.iter (fun (name, group) ->\n         group\n         |> List.iter (fun (s, e) ->\n                Hashtbl.add table\n                  (name ^ \".\" ^ s |> Name.create)\n                  (e |> Exceptions.fromList)));\n  table\n\nlet find (path : Common.Path.t) =\n  Hashtbl.find_opt raisesLibTable (path |> Common.Path.toName)\n"
  },
  {
    "path": "analysis/reanalyze/src/FindSourceFile.ml",
    "content": "let rec interface items =\n  match items with\n  | {Typedtree.sig_loc} :: rest -> (\n    match not (Sys.file_exists sig_loc.loc_start.pos_fname) with\n    | true -> interface rest\n    | false -> Some sig_loc.loc_start.pos_fname)\n  | [] -> None\n\nlet rec implementation items =\n  match items with\n  | {Typedtree.str_loc} :: rest -> (\n    match not (Sys.file_exists str_loc.loc_start.pos_fname) with\n    | true -> implementation rest\n    | false -> Some str_loc.loc_start.pos_fname)\n  | [] -> None\n\nlet cmt cmt_annots =\n  match cmt_annots with\n  | Cmt_format.Interface signature ->\n    if !Common.Cli.debug && signature.sig_items = [] then\n      Log_.item \"Interface %d@.\" (signature.sig_items |> List.length);\n    interface signature.sig_items\n  | Implementation structure ->\n    if !Common.Cli.debug && structure.str_items = [] then\n      Log_.item \"Implementation %d@.\" (structure.str_items |> List.length);\n    implementation structure.str_items\n  | _ -> None\n"
  },
  {
    "path": "analysis/reanalyze/src/Issues.ml",
    "content": "let errorHygiene = \"Error Hygiene\"\nlet errorNotImplemented = \"Error Not Implemented\"\nlet errorTermination = \"Error Termination\"\nlet exceptionAnalysis = \"Exception Analysis\"\nlet incorrectDeadAnnotation = \"Incorrect Dead Annotation\"\nlet terminationAnalysisInternal = \"Termination Analysis Internal\"\nlet warningDeadAnalysisCycle = \"Warning Dead Analysis Cycle\"\nlet warningDeadException = \"Warning Dead Exception\"\nlet warningDeadModule = \"Warning Dead Module\"\nlet warningDeadType = \"Warning Dead Type\"\nlet warningDeadValue = \"Warning Dead Value\"\nlet warningDeadValueWithSideEffects = \"Warning Dead Value With Side Effects\"\nlet warningRedundantOptionalArgument = \"Warning Redundant Optional Argument\"\nlet warningUnusedArgument = \"Warning Unused Argument\"\n"
  },
  {
    "path": "analysis/reanalyze/src/Log_.ml",
    "content": "open Common\n\nmodule Color = struct\n  let color_enabled = lazy (Unix.isatty Unix.stdout)\n  let forceColor = ref false\n  let get_color_enabled () = !forceColor || Lazy.force color_enabled\n\n  type color = Red | Yellow | Magenta | Cyan\n  type style = FG of color | Bold | Dim\n\n  let code_of_style = function\n    | FG Red -> \"31\"\n    | FG Yellow -> \"33\"\n    | FG Magenta -> \"35\"\n    | FG Cyan -> \"36\"\n    | Bold -> \"1\"\n    | Dim -> \"2\"\n\n  let getStringTag s =\n    match s with\n    | Format.String_tag s -> s\n    | _ -> \"\"\n\n  let style_of_tag s =\n    match s |> getStringTag with\n    | \"error\" -> [Bold; FG Red]\n    | \"warning\" -> [Bold; FG Magenta]\n    | \"info\" -> [Bold; FG Yellow]\n    | \"dim\" -> [Dim]\n    | \"filename\" -> [FG Cyan]\n    | _ -> []\n\n  let ansi_of_tag s =\n    let l = style_of_tag s in\n    let s = String.concat \";\" (List.map code_of_style l) in\n    \"\\027[\" ^ s ^ \"m\"\n\n  let reset_lit = \"\\027[0m\"\n\n  let setOpenCloseTag openTag closeTag =\n    {\n      Format.mark_open_stag = openTag;\n      mark_close_stag = closeTag;\n      print_open_stag = (fun _ -> ());\n      print_close_stag = (fun _ -> ());\n    }\n\n  let color_functions =\n    setOpenCloseTag\n      (fun s -> if get_color_enabled () then ansi_of_tag s else \"\")\n      (fun _ -> if get_color_enabled () then reset_lit else \"\")\n\n  let setup () =\n    Format.pp_set_mark_tags Format.std_formatter true;\n    Format.pp_set_formatter_stag_functions Format.std_formatter color_functions;\n    if not (get_color_enabled ()) then Misc.Color.setup (Some Never);\n    (* Print a dummy thing once in the beginning, as otherwise flushing does not work. *)\n    Location.print_loc Format.str_formatter Location.none\n\n  let error ppf s = Format.fprintf ppf \"@{<error>%s@}\" s\n  let info ppf s = Format.fprintf ppf \"@{<info>%s@}\" s\nend\n\nmodule Loc = struct\n  let print_loc ppf (loc : Location.t) =\n    (* Change the range so it's on a single line.\n       In this way, the line number is clickable in vscode. *)\n    let startChar = loc.loc_start.pos_cnum - loc.loc_start.pos_bol in\n    let endChar = startChar + loc.loc_end.pos_cnum - loc.loc_start.pos_cnum in\n    let line = loc.loc_start.pos_lnum in\n    let processPos char (pos : Lexing.position) : Lexing.position =\n      {\n        pos_lnum = line;\n        pos_bol = 0;\n        pos_cnum = char;\n        pos_fname =\n          (let open Filename in\n           match is_implicit pos.pos_fname with\n           | _ when !Cli.ci -> basename pos.pos_fname\n           | true -> concat (Sys.getcwd ()) pos.pos_fname\n           | false -> pos.pos_fname);\n      }\n    in\n    Location.print_loc ppf\n      {\n        loc with\n        loc_start = loc.loc_start |> processPos startChar;\n        loc_end = loc.loc_end |> processPos endChar;\n      }\n\n  let print ppf (loc : Location.t) = Format.fprintf ppf \"@[%a@]\" print_loc loc\nend\n\nlet log x = Format.fprintf Format.std_formatter x\n\nlet item x =\n  Format.fprintf Format.std_formatter \"  \";\n  Format.fprintf Format.std_formatter x\n\nlet missingRaiseInfoToText {missingAnnotations; locFull} =\n  let missingTxt =\n    Format.asprintf \"%a\" (Exceptions.pp ~exnTable:None) missingAnnotations\n  in\n  if !Cli.json then\n    EmitJson.emitAnnotate ~action:\"Add @raises annotation\"\n      ~pos:(EmitJson.locToPos locFull)\n      ~text:(Format.asprintf \"@raises(%s)\\\\n\" missingTxt)\n  else \"\"\n\nlet logAdditionalInfo ~(description : description) =\n  match description with\n  | DeadWarning {lineAnnotation; shouldWriteLineAnnotation} ->\n    if shouldWriteLineAnnotation then\n      WriteDeadAnnotations.lineAnnotationToString lineAnnotation\n    else \"\"\n  | ExceptionAnalysisMissing missingRaiseInfo ->\n    missingRaiseInfoToText missingRaiseInfo\n  | _ -> \"\"\n\nlet missingRaiseInfoToMessage {exnTable; exnName; missingAnnotations; raiseSet}\n    =\n  let raisesTxt =\n    Format.asprintf \"%a\" (Exceptions.pp ~exnTable:(Some exnTable)) raiseSet\n  in\n  let missingTxt =\n    Format.asprintf \"%a\" (Exceptions.pp ~exnTable:None) missingAnnotations\n  in\n  Format.asprintf\n    \"@{<info>%s@} might raise %s and is not annotated with @raises(%s)\" exnName\n    raisesTxt missingTxt\n\nlet descriptionToMessage (description : description) =\n  match description with\n  | Circular {message} -> message\n  | DeadModule {message} -> message\n  | DeadOptional {message} -> message\n  | DeadWarning {path; message} ->\n    Format.asprintf \"@{<info>%s@} %s\" path message\n  | ExceptionAnalysis {message} -> message\n  | ExceptionAnalysisMissing missingRaiseInfo ->\n    missingRaiseInfoToMessage missingRaiseInfo\n  | Termination {message} -> message\n\nlet descriptionToName (description : description) =\n  match description with\n  | Circular _ -> Issues.warningDeadAnalysisCycle\n  | DeadModule _ -> Issues.warningDeadModule\n  | DeadOptional {deadOptional = WarningUnusedArgument} ->\n    Issues.warningUnusedArgument\n  | DeadOptional {deadOptional = WarningRedundantOptionalArgument} ->\n    Issues.warningRedundantOptionalArgument\n  | DeadWarning {deadWarning = WarningDeadException} ->\n    Issues.warningDeadException\n  | DeadWarning {deadWarning = WarningDeadType} -> Issues.warningDeadType\n  | DeadWarning {deadWarning = WarningDeadValue} -> Issues.warningDeadValue\n  | DeadWarning {deadWarning = WarningDeadValueWithSideEffects} ->\n    Issues.warningDeadValueWithSideEffects\n  | DeadWarning {deadWarning = IncorrectDeadAnnotation} ->\n    Issues.incorrectDeadAnnotation\n  | ExceptionAnalysis _ -> Issues.exceptionAnalysis\n  | ExceptionAnalysisMissing _ -> Issues.exceptionAnalysis\n  | Termination {termination = ErrorHygiene} -> Issues.errorHygiene\n  | Termination {termination = ErrorNotImplemented} ->\n    Issues.errorNotImplemented\n  | Termination {termination = ErrorTermination} -> Issues.errorTermination\n  | Termination {termination = TerminationAnalysisInternal} ->\n    Issues.terminationAnalysisInternal\n\nlet logIssue ~(issue : issue) =\n  let open Format in\n  let loc = issue.loc in\n  if !Cli.json then\n    let file = Json.escape loc.loc_start.pos_fname in\n    let startLine = loc.loc_start.pos_lnum - 1 in\n    let startCharacter = loc.loc_start.pos_cnum - loc.loc_start.pos_bol in\n    let endLine = loc.loc_end.pos_lnum - 1 in\n    let endCharacter = loc.loc_end.pos_cnum - loc.loc_start.pos_bol in\n    let message = Json.escape (descriptionToMessage issue.description) in\n    Format.asprintf \"%a%s%s\"\n      (fun ppf () ->\n        EmitJson.emitItem ~ppf ~name:issue.name\n          ~kind:\n            (match issue.severity with\n            | Warning -> \"warning\"\n            | Error -> \"error\")\n          ~file\n          ~range:(startLine, startCharacter, endLine, endCharacter)\n          ~message)\n      ()\n      (logAdditionalInfo ~description:issue.description)\n      (if !Cli.json then EmitJson.emitClose () else \"\")\n  else\n    let color =\n      match issue.severity with\n      | Warning -> Color.info\n      | Error -> Color.error\n    in\n    asprintf \"@.  %a@.  %a@.  %s%s@.\" color issue.name Loc.print issue.loc\n      (descriptionToMessage issue.description)\n      (logAdditionalInfo ~description:issue.description)\n\nmodule Stats = struct\n  let issues = ref []\n  let addIssue (issue : issue) = issues := issue :: !issues\n  let clear () = issues := []\n\n  let getSortedIssues () =\n    let counters2 = Hashtbl.create 1 in\n    !issues\n    |> List.iter (fun (issue : issue) ->\n           let counter =\n             match Hashtbl.find_opt counters2 issue.name with\n             | Some counter -> counter\n             | None ->\n               let counter = ref 0 in\n               Hashtbl.add counters2 issue.name counter;\n               counter\n           in\n           incr counter);\n    let issues, nIssues =\n      Hashtbl.fold\n        (fun name cnt (issues, nIssues) ->\n          ((name, cnt) :: issues, nIssues + !cnt))\n        counters2 ([], 0)\n    in\n    (issues |> List.sort (fun (n1, _) (n2, _) -> String.compare n1 n2), nIssues)\n\n  let report () =\n    !issues |> List.rev\n    |> List.iter (fun issue -> logIssue ~issue |> print_string);\n    let sortedIssues, nIssues = getSortedIssues () in\n    if not !Cli.json then (\n      if sortedIssues <> [] then item \"@.\";\n      item \"Analysis reported %d issues%s@.\" nIssues\n        (match sortedIssues with\n        | [] -> \"\"\n        | _ :: _ ->\n          \" (\"\n          ^ (sortedIssues\n            |> List.map (fun (name, cnt) -> name ^ \":\" ^ string_of_int !cnt)\n            |> String.concat \", \")\n          ^ \")\"))\nend\n\nlet logIssue ~forStats ~severity ~(loc : Location.t) description =\n  let name = descriptionToName description in\n  if Suppress.filter loc.loc_start then\n    if forStats then Stats.addIssue {name; severity; loc; description}\n\nlet warning ?(forStats = true) ~loc description =\n  description |> logIssue ~severity:Warning ~forStats ~loc\n\nlet error ~loc description =\n  description |> logIssue ~severity:Error ~forStats:true ~loc\n"
  },
  {
    "path": "analysis/reanalyze/src/ModulePath.ml",
    "content": "open Common\nmodule NameMap = Map.Make (Name)\n\n(* Keep track of the module path while traversing with Tast_mapper *)\ntype t = {aliases: Path.t NameMap.t; loc: Location.t; path: Path.t}\n\nlet initial = ({aliases = NameMap.empty; loc = Location.none; path = []} : t)\nlet current = (ref initial : t ref)\nlet init () = current := initial\n\nlet normalizePath ~aliases path =\n  match path |> List.rev with\n  | name :: restRev when restRev <> [] -> (\n    match aliases |> NameMap.find_opt name with\n    | None -> path\n    | Some path1 ->\n      let newPath = List.rev (path1 @ restRev) in\n      if !Common.Cli.debug then\n        Log_.item \"Resolve Alias: %s to %s@.\"\n          (path |> Common.Path.toString)\n          (newPath |> Common.Path.toString);\n      newPath)\n  | _ -> path\n\nlet addAlias ~name ~path =\n  let aliases = !current.aliases in\n  let pathNormalized = path |> normalizePath ~aliases in\n  if !Common.Cli.debug then\n    Log_.item \"Module Alias: %s = %s@.\" (name |> Name.toString)\n      (Path.toString pathNormalized);\n  current := {!current with aliases = NameMap.add name pathNormalized aliases}\n\nlet resolveAlias path = path |> normalizePath ~aliases:!current.aliases\nlet getCurrent () = !current\nlet setCurrent p = current := p\n"
  },
  {
    "path": "analysis/reanalyze/src/Name.ml",
    "content": "type t = string\n\nlet compare = String.compare\n\nlet create ?(isInterface = true) s =\n  match isInterface with\n  | true -> s\n  | false -> \"+\" ^ s\n\nlet isInterface s = try s.[0] <> '+' with Invalid_argument _ -> false\nlet isUnderscore s = s = \"_\" || s = \"+_\"\n\nlet startsWithUnderscore s =\n  s |> String.length >= 2\n  &&\n  try s.[0] = '_' || (s.[0] = '+' && s.[1] = '_')\n  with Invalid_argument _ -> false\n\nlet toInterface s =\n  match isInterface s with\n  | true -> s\n  | false -> (\n    try String.sub s 1 (String.length s - 1) with Invalid_argument _ -> s)\n\nlet toImplementation s =\n  match isInterface s with\n  | true -> \"+\" ^ s\n  | false -> s\nlet toString (s : t) = s\n"
  },
  {
    "path": "analysis/reanalyze/src/Name.mli",
    "content": "type t\n\nval compare : t -> t -> int\nval create : ?isInterface:bool -> string -> t\nval isUnderscore : t -> bool\nval startsWithUnderscore : t -> bool\nval toImplementation : t -> t\nval toInterface : t -> t\nval toString : t -> string\n"
  },
  {
    "path": "analysis/reanalyze/src/Paths.ml",
    "content": "open Common\nmodule StringMap = Map_string\n\nlet bsconfig = \"bsconfig.json\"\nlet rescriptJson = \"rescript.json\"\n\nlet readFile filename =\n  try\n    (* windows can't use open_in *)\n    let chan = open_in_bin filename in\n    let content = really_input_string chan (in_channel_length chan) in\n    close_in_noerr chan;\n    Some content\n  with _ -> None\n\nlet rec findProjectRoot ~dir =\n  let rescriptJsonFile = Filename.concat dir rescriptJson in\n  let bsconfigFile = Filename.concat dir bsconfig in\n  if Sys.file_exists rescriptJsonFile || Sys.file_exists bsconfigFile then dir\n  else\n    let parent = dir |> Filename.dirname in\n    if parent = dir then (\n      prerr_endline\n        (\"Error: cannot find project root containing \" ^ rescriptJson ^ \".\");\n      assert false)\n    else findProjectRoot ~dir:parent\n\nlet setReScriptProjectRoot =\n  lazy\n    (runConfig.projectRoot <- findProjectRoot ~dir:(Sys.getcwd ());\n     runConfig.bsbProjectRoot <-\n       (match Sys.getenv_opt \"BSB_PROJECT_ROOT\" with\n       | None -> runConfig.projectRoot\n       | Some s -> s))\n\nmodule Config = struct\n  let readSuppress conf =\n    match Json.get \"suppress\" conf with\n    | Some (Array elements) ->\n      let names =\n        elements\n        |> List.filter_map (fun (x : Json.t) ->\n               match x with\n               | String s -> Some s\n               | _ -> None)\n      in\n      runConfig.suppress <- names @ runConfig.suppress\n    | _ -> ()\n\n  let readUnsuppress conf =\n    match Json.get \"unsuppress\" conf with\n    | Some (Array elements) ->\n      let names =\n        elements\n        |> List.filter_map (fun (x : Json.t) ->\n               match x with\n               | String s -> Some s\n               | _ -> None)\n      in\n      runConfig.unsuppress <- names @ runConfig.unsuppress\n    | _ -> ()\n\n  let readAnalysis conf =\n    match Json.get \"analysis\" conf with\n    | Some (Array elements) ->\n      elements\n      |> List.iter (fun (x : Json.t) ->\n             match x with\n             | String \"all\" -> RunConfig.all ()\n             | String \"dce\" -> RunConfig.dce ()\n             | String \"exception\" -> RunConfig.exception_ ()\n             | String \"termination\" -> RunConfig.termination ()\n             | _ -> ())\n    | _ ->\n      (* if no \"analysis\" specified, default to dce *)\n      RunConfig.dce ()\n\n  let readTransitive conf =\n    match Json.get \"transitive\" conf with\n    | Some True -> RunConfig.transitive true\n    | Some False -> RunConfig.transitive false\n    | _ -> ()\n\n  (* Read the config from rescript.json/bsconfig.json and apply it to runConfig and suppress and unsuppress *)\n  let processBsconfig () =\n    Lazy.force setReScriptProjectRoot;\n    let rescriptFile = Filename.concat runConfig.projectRoot rescriptJson in\n    let bsconfigFile = Filename.concat runConfig.projectRoot bsconfig in\n\n    let processText text =\n      match Json.parse text with\n      | None -> ()\n      | Some json -> (\n        match Json.get \"reanalyze\" json with\n        | Some conf ->\n          readSuppress conf;\n          readUnsuppress conf;\n          readAnalysis conf;\n          readTransitive conf\n        | None ->\n          (* if no \"analysis\" specified, default to dce *)\n          RunConfig.dce ())\n    in\n\n    match readFile rescriptFile with\n    | Some text -> processText text\n    | None -> (\n      match readFile bsconfigFile with\n      | Some text -> processText text\n      | None -> ())\nend\n\n(** * Handle namespaces in cmt files. * E.g. src/Module-Project.cmt becomes\n    src/Module *)\nlet handleNamespace cmt =\n  let cutAfterDash s =\n    match String.index s '-' with\n    | n -> ( try String.sub s 0 n with Invalid_argument _ -> s)\n    | exception Not_found -> s\n  in\n  let noDir = Filename.basename cmt = cmt in\n  if noDir then cmt |> Filename.remove_extension |> cutAfterDash\n  else\n    let dir = cmt |> Filename.dirname in\n    let base =\n      cmt |> Filename.basename |> Filename.remove_extension |> cutAfterDash\n    in\n    Filename.concat dir base\n\nlet getModuleName cmt = cmt |> handleNamespace |> Filename.basename\n\nlet readDirsFromConfig ~configSources =\n  let dirs = ref [] in\n  let root = runConfig.projectRoot in\n  let rec processDir ~subdirs dir =\n    let absDir =\n      match dir = \"\" with\n      | true -> root\n      | false -> Filename.concat root dir\n    in\n    if Sys.file_exists absDir && Sys.is_directory absDir then (\n      dirs := dir :: !dirs;\n      if subdirs then\n        absDir |> Sys.readdir\n        |> Array.iter (fun d -> processDir ~subdirs (Filename.concat dir d)))\n  in\n  let rec processSourceItem (sourceItem : Ext_json_types.t) =\n    match sourceItem with\n    | Str {str} -> str |> processDir ~subdirs:false\n    | Obj {map} -> (\n      match StringMap.find_opt map \"dir\" with\n      | Some (Str {str}) ->\n        let subdirs =\n          match StringMap.find_opt map \"subdirs\" with\n          | Some (True _) -> true\n          | Some (False _) -> false\n          | _ -> false\n        in\n        str |> processDir ~subdirs\n      | _ -> ())\n    | Arr {content = arr} -> arr |> Array.iter processSourceItem\n    | _ -> ()\n  in\n  (match configSources with\n  | Some sourceItem -> processSourceItem sourceItem\n  | None -> ());\n  !dirs\n\nlet readSourceDirs ~configSources =\n  let sourceDirs =\n    [\"lib\"; \"bs\"; \".sourcedirs.json\"]\n    |> List.fold_left Filename.concat runConfig.bsbProjectRoot\n  in\n  let dirs = ref [] in\n  let readDirs json =\n    match json with\n    | Ext_json_types.Obj {map} -> (\n      match StringMap.find_opt map \"dirs\" with\n      | Some (Arr {content = arr}) ->\n        arr\n        |> Array.iter (fun x ->\n               match x with\n               | Ext_json_types.Str {str} -> dirs := str :: !dirs\n               | _ -> ());\n        ()\n      | _ -> ())\n    | _ -> ()\n  in\n  if sourceDirs |> Sys.file_exists then\n    let jsonOpt = sourceDirs |> Ext_json_parse.parse_json_from_file in\n    match jsonOpt with\n    | exception _ -> ()\n    | json ->\n      if runConfig.bsbProjectRoot <> runConfig.projectRoot then (\n        readDirs json;\n        dirs := readDirsFromConfig ~configSources)\n      else readDirs json\n  else (\n    if !Cli.debug then (\n      Log_.item \"Warning: can't find source dirs: %s\\n\" sourceDirs;\n      Log_.item \"Types for cross-references will not be found.\\n\");\n    dirs := readDirsFromConfig ~configSources);\n  !dirs\n"
  },
  {
    "path": "analysis/reanalyze/src/Reanalyze.ml",
    "content": "open Common\n\nlet loadCmtFile cmtFilePath =\n  let cmt_infos = Cmt_format.read_cmt cmtFilePath in\n  let excludePath sourceFile =\n    !Cli.excludePaths\n    |> List.exists (fun prefix_ ->\n           let prefix =\n             match Filename.is_relative sourceFile with\n             | true -> prefix_\n             | false -> Filename.concat (Sys.getcwd ()) prefix_\n           in\n           String.length prefix <= String.length sourceFile\n           &&\n           try String.sub sourceFile 0 (String.length prefix) = prefix\n           with Invalid_argument _ -> false)\n  in\n  match cmt_infos.cmt_annots |> FindSourceFile.cmt with\n  | Some sourceFile when not (excludePath sourceFile) ->\n    if !Cli.debug then\n      Log_.item \"Scanning %s Source:%s@.\"\n        (match !Cli.ci && not (Filename.is_relative cmtFilePath) with\n        | true -> Filename.basename cmtFilePath\n        | false -> cmtFilePath)\n        (match !Cli.ci && not (Filename.is_relative sourceFile) with\n        | true -> sourceFile |> Filename.basename\n        | false -> sourceFile);\n    FileReferences.addFile sourceFile;\n    currentSrc := sourceFile;\n    currentModule := Paths.getModuleName sourceFile;\n    currentModuleName :=\n      !currentModule\n      |> Name.create ~isInterface:(Filename.check_suffix !currentSrc \"i\");\n    if runConfig.dce then cmt_infos |> DeadCode.processCmt ~cmtFilePath;\n    if runConfig.exception_ then cmt_infos |> Exception.processCmt;\n    if runConfig.termination then cmt_infos |> Arnold.processCmt\n  | _ -> ()\n\nlet processCmtFiles ~cmtRoot =\n  let ( +++ ) = Filename.concat in\n  match cmtRoot with\n  | Some root ->\n    Cli.cmtCommand := true;\n    let rec walkSubDirs dir =\n      let absDir =\n        match dir = \"\" with\n        | true -> root\n        | false -> root +++ dir\n      in\n      let skipDir =\n        let base = Filename.basename dir in\n        base = \"node_modules\" || base = \"_esy\"\n      in\n      if (not skipDir) && Sys.file_exists absDir then\n        if Sys.is_directory absDir then\n          absDir |> Sys.readdir |> Array.iter (fun d -> walkSubDirs (dir +++ d))\n        else if\n          Filename.check_suffix absDir \".cmt\"\n          || Filename.check_suffix absDir \".cmti\"\n        then absDir |> loadCmtFile\n    in\n    walkSubDirs \"\"\n  | None ->\n    Lazy.force Paths.setReScriptProjectRoot;\n    let lib_bs = runConfig.projectRoot +++ (\"lib\" +++ \"bs\") in\n    let sourceDirs =\n      Paths.readSourceDirs ~configSources:None |> List.sort String.compare\n    in\n    sourceDirs\n    |> List.iter (fun sourceDir ->\n           let libBsSourceDir = Filename.concat lib_bs sourceDir in\n           let files =\n             match Sys.readdir libBsSourceDir |> Array.to_list with\n             | files -> files\n             | exception Sys_error _ -> []\n           in\n           let cmtFiles =\n             files\n             |> List.filter (fun x ->\n                    Filename.check_suffix x \".cmt\"\n                    || Filename.check_suffix x \".cmti\")\n           in\n           cmtFiles |> List.sort String.compare\n           |> List.iter (fun cmtFile ->\n                  let cmtFilePath = Filename.concat libBsSourceDir cmtFile in\n                  cmtFilePath |> loadCmtFile))\n\nlet runAnalysis ~cmtRoot =\n  processCmtFiles ~cmtRoot;\n  if runConfig.dce then (\n    DeadException.forceDelayedItems ();\n    DeadOptionalArgs.forceDelayedItems ();\n    DeadCommon.reportDead ~checkOptionalArg:DeadOptionalArgs.check;\n    WriteDeadAnnotations.write ());\n  if runConfig.exception_ then Exception.Checks.doChecks ();\n  if runConfig.termination && !Common.Cli.debug then Arnold.reportStats ()\n\nlet runAnalysisAndReport ~cmtRoot =\n  Log_.Color.setup ();\n  if !Common.Cli.json then EmitJson.start ();\n  runAnalysis ~cmtRoot;\n  Log_.Stats.report ();\n  Log_.Stats.clear ();\n  if !Common.Cli.json then EmitJson.finish ()\n\nlet cli () =\n  let analysisKindSet = ref false in\n  let cmtRootRef = ref None in\n  let usage = \"reanalyze version \" ^ Version.version in\n  let versionAndExit () =\n    print_endline usage;\n    exit 0\n      [@@raises exit]\n  in\n  let rec setAll cmtRoot =\n    RunConfig.all ();\n    cmtRootRef := cmtRoot;\n    analysisKindSet := true\n  and setConfig () =\n    Paths.Config.processBsconfig ();\n    analysisKindSet := true\n  and setDCE cmtRoot =\n    RunConfig.dce ();\n    cmtRootRef := cmtRoot;\n    analysisKindSet := true\n  and setException cmtRoot =\n    RunConfig.exception_ ();\n    cmtRootRef := cmtRoot;\n    analysisKindSet := true\n  and setTermination cmtRoot =\n    RunConfig.termination ();\n    cmtRootRef := cmtRoot;\n    analysisKindSet := true\n  and speclist =\n    [\n      (\"-all\", Arg.Unit (fun () -> setAll None), \"Run all the analyses.\");\n      ( \"-all-cmt\",\n        String (fun s -> setAll (Some s)),\n        \"root_path Run all the analyses for all the .cmt files under the root \\\n         path\" );\n      (\"-ci\", Unit (fun () -> Cli.ci := true), \"Internal flag for use in CI\");\n      ( \"-config\",\n        Unit setConfig,\n        \"Read the analysis mode from rescript.json/bsconfig.json\" );\n      (\"-dce\", Unit (fun () -> setDCE None), \"Eperimental DCE\");\n      (\"-debug\", Unit (fun () -> Cli.debug := true), \"Print debug information\");\n      ( \"-dce-cmt\",\n        String (fun s -> setDCE (Some s)),\n        \"root_path Experimental DCE for all the .cmt files under the root path\"\n      );\n      ( \"-exception\",\n        Unit (fun () -> setException None),\n        \"Experimental exception analysis\" );\n      ( \"-exception-cmt\",\n        String (fun s -> setException (Some s)),\n        \"root_path Experimental exception analysis for all the .cmt files \\\n         under the root path\" );\n      ( \"-exclude-paths\",\n        String\n          (fun s ->\n            let paths = s |> String.split_on_char ',' in\n            Common.Cli.excludePaths := paths @ Common.Cli.excludePaths.contents),\n        \"comma-separated-path-prefixes Exclude from analysis files whose path \\\n         has a prefix in the list\" );\n      ( \"-experimental\",\n        Set Common.Cli.experimental,\n        \"Turn on experimental analyses (this option is currently unused)\" );\n      ( \"-externals\",\n        Set DeadCommon.Config.analyzeExternals,\n        \"Report on externals in dead code analysis\" );\n      (\"-json\", Set Common.Cli.json, \"Print reports in json format\");\n      ( \"-live-names\",\n        String\n          (fun s ->\n            let names = s |> String.split_on_char ',' in\n            Common.Cli.liveNames := names @ Common.Cli.liveNames.contents),\n        \"comma-separated-names Consider all values with the given names as live\"\n      );\n      ( \"-live-paths\",\n        String\n          (fun s ->\n            let paths = s |> String.split_on_char ',' in\n            Common.Cli.livePaths := paths @ Common.Cli.livePaths.contents),\n        \"comma-separated-path-prefixes Consider all values whose path has a \\\n         prefix in the list as live\" );\n      ( \"-suppress\",\n        String\n          (fun s ->\n            let names = s |> String.split_on_char ',' in\n            runConfig.suppress <- names @ runConfig.suppress),\n        \"comma-separated-path-prefixes Don't report on files whose path has a \\\n         prefix in the list\" );\n      ( \"-termination\",\n        Unit (fun () -> setTermination None),\n        \"Experimental termination analysis\" );\n      ( \"-termination-cmt\",\n        String (fun s -> setTermination (Some s)),\n        \"root_path Experimental termination analysis for all the .cmt files \\\n         under the root path\" );\n      ( \"-unsuppress\",\n        String\n          (fun s ->\n            let names = s |> String.split_on_char ',' in\n            runConfig.unsuppress <- names @ runConfig.unsuppress),\n        \"comma-separated-path-prefixes Report on files whose path has a prefix \\\n         in the list, overriding -suppress (no-op if -suppress is not \\\n         specified)\" );\n      (\"-version\", Unit versionAndExit, \"Show version information and exit\");\n      (\"--version\", Unit versionAndExit, \"Show version information and exit\");\n      ( \"-write\",\n        Set Common.Cli.write,\n        \"Write @dead annotations directly in the source files\" );\n    ]\n  in\n  Arg.parse speclist print_endline usage;\n  if !analysisKindSet = false then setConfig ();\n  let cmtRoot = !cmtRootRef in\n  runAnalysisAndReport ~cmtRoot\n[@@raises exit]\n\nmodule RunConfig = RunConfig\nmodule Log_ = Log_\n"
  },
  {
    "path": "analysis/reanalyze/src/RunConfig.ml",
    "content": "type t = {\n  mutable bsbProjectRoot: string;\n  mutable dce: bool;\n  mutable exception_: bool;\n  mutable projectRoot: string;\n  mutable suppress: string list;\n  mutable termination: bool;\n  mutable transitive: bool;\n  mutable unsuppress: string list;\n}\n\nlet runConfig =\n  {\n    bsbProjectRoot = \"\";\n    dce = false;\n    exception_ = false;\n    projectRoot = \"\";\n    suppress = [];\n    termination = false;\n    transitive = false;\n    unsuppress = [];\n  }\n\nlet all () =\n  runConfig.dce <- true;\n  runConfig.exception_ <- true;\n  runConfig.termination <- true\n\nlet dce () = runConfig.dce <- true\nlet exception_ () = runConfig.exception_ <- true\nlet termination () = runConfig.termination <- true\n\nlet transitive b = runConfig.transitive <- b\n"
  },
  {
    "path": "analysis/reanalyze/src/SideEffects.ml",
    "content": "let whiteListSideEffects =\n  [\n    \"Pervasives./.\";\n    \"Pervasives.ref\";\n    \"Int64.mul\";\n    \"Int64.neg\";\n    \"Int64.sub\";\n    \"Int64.shift_left\";\n    \"Int64.one\";\n    \"String.length\";\n  ]\n\nlet whiteTableSideEffects =\n  lazy\n    (let tbl = Hashtbl.create 11 in\n     whiteListSideEffects |> List.iter (fun s -> Hashtbl.add tbl s ());\n     tbl)\n\nlet pathIsWhitelistedForSideEffects path =\n  path\n  |> Common.Path.onOkPath ~whenContainsApply:false ~f:(fun s ->\n         Hashtbl.mem (Lazy.force whiteTableSideEffects) s)\n\nlet rec exprNoSideEffects (expr : Typedtree.expression) =\n  match expr.exp_desc with\n  | Texp_ident _ | Texp_constant _ -> true\n  | Texp_construct (_, _, el) -> el |> List.for_all exprNoSideEffects\n  | Texp_function _ -> true\n  | Texp_apply ({exp_desc = Texp_ident (path, _, _)}, args)\n    when path |> pathIsWhitelistedForSideEffects ->\n    args |> List.for_all (fun (_, eo) -> eo |> exprOptNoSideEffects)\n  | Texp_apply _ -> false\n  | Texp_sequence (e1, e2) -> e1 |> exprNoSideEffects && e2 |> exprNoSideEffects\n  | Texp_let (_, vbs, e) ->\n    vbs\n    |> List.for_all (fun (vb : Typedtree.value_binding) ->\n           vb.vb_expr |> exprNoSideEffects)\n    && e |> exprNoSideEffects\n  | Texp_record {fields; extended_expression} ->\n    fields |> Array.for_all fieldNoSideEffects\n    && extended_expression |> exprOptNoSideEffects\n  | Texp_assert _ -> false\n  | Texp_match (e, casesOk, casesExn, partial) ->\n    let cases = casesOk @ casesExn in\n    partial = Total && e |> exprNoSideEffects\n    && cases |> List.for_all caseNoSideEffects\n  | Texp_letmodule _ -> false\n  | Texp_lazy e -> e |> exprNoSideEffects\n  | Texp_try (e, cases) ->\n    e |> exprNoSideEffects && cases |> List.for_all caseNoSideEffects\n  | Texp_tuple el -> el |> List.for_all exprNoSideEffects\n  | Texp_variant (_lbl, eo) -> eo |> exprOptNoSideEffects\n  | Texp_field (e, _lid, _ld) -> e |> exprNoSideEffects\n  | Texp_setfield _ -> false\n  | Texp_array el -> el |> List.for_all exprNoSideEffects\n  | Texp_ifthenelse (e1, e2, eo) ->\n    e1 |> exprNoSideEffects && e2 |> exprNoSideEffects\n    && eo |> exprOptNoSideEffects\n  | Texp_while (e1, e2) -> e1 |> exprNoSideEffects && e2 |> exprNoSideEffects\n  | Texp_for (_id, _pat, e1, e2, _dir, e3) ->\n    e1 |> exprNoSideEffects && e2 |> exprNoSideEffects\n    && e3 |> exprNoSideEffects\n  | Texp_send _ -> false\n  | Texp_new _ -> true\n  | Texp_instvar _ -> true\n  | Texp_setinstvar _ -> false\n  | Texp_override _ -> false\n  | Texp_letexception (_ec, e) -> e |> exprNoSideEffects\n  | Texp_object _ -> true\n  | Texp_pack _ -> false\n  | Texp_unreachable -> false\n  | Texp_extension_constructor _ when true -> true\n  | _ -> (* on ocaml 4.08: Texp_letop | Texp_open *) true\n\nand exprOptNoSideEffects eo =\n  match eo with\n  | None -> true\n  | Some e -> e |> exprNoSideEffects\n\nand fieldNoSideEffects ((_ld, rld) : _ * Typedtree.record_label_definition) =\n  match rld with\n  | Kept _typeExpr -> true\n  | Overridden (_lid, e) -> e |> exprNoSideEffects\n\nand caseNoSideEffects : Typedtree.case -> _ =\n fun {c_guard; c_rhs} ->\n  c_guard |> exprOptNoSideEffects && c_rhs |> exprNoSideEffects\n\nlet checkExpr e = not (exprNoSideEffects e)\n"
  },
  {
    "path": "analysis/reanalyze/src/Suppress.ml",
    "content": "open Common\n\nlet checkPrefix prefix_ =\n  let prefix =\n    match runConfig.projectRoot = \"\" with\n    | true -> prefix_\n    | false -> Filename.concat runConfig.projectRoot prefix_\n  in\n  let prefixLen = prefix |> String.length in\n  fun sourceDir ->\n    try String.sub sourceDir 0 prefixLen = prefix\n    with Invalid_argument _ -> false\n\nlet suppressSourceDir =\n  lazy\n    (fun sourceDir ->\n      runConfig.suppress\n      |> List.exists (fun prefix -> checkPrefix prefix sourceDir))\n\nlet unsuppressSourceDir =\n  lazy\n    (fun sourceDir ->\n      runConfig.unsuppress\n      |> List.exists (fun prefix -> checkPrefix prefix sourceDir))\n\nlet posInSuppress (pos : Lexing.position) =\n  pos.pos_fname |> Lazy.force suppressSourceDir\n\nlet posInUnsuppress (pos : Lexing.position) =\n  pos.pos_fname |> Lazy.force unsuppressSourceDir\n\n(** First suppress list, then override with unsuppress list *)\nlet filter pos = (not (posInSuppress pos)) || posInUnsuppress pos\n"
  },
  {
    "path": "analysis/reanalyze/src/Version.ml",
    "content": "(* CREATED BY reanalyze/scripts/bump_version_module.js *)\n(* DO NOT MODIFY BY HAND, WILL BE AUTOMATICALLY UPDATED BY npm version *)\n\nlet version = \"2.22.0\"\n"
  },
  {
    "path": "analysis/reanalyze/src/WriteDeadAnnotations.ml",
    "content": "open Common\n\ntype language = Ml | Res\n\nlet posLanguage (pos : Lexing.position) =\n  if\n    Filename.check_suffix pos.pos_fname \".res\"\n    || Filename.check_suffix pos.pos_fname \".resi\"\n  then Res\n  else Ml\n\nlet deadAnnotation = \"dead\"\nlet annotateAtEnd ~pos =\n  match posLanguage pos with\n  | Res -> false\n  | Ml -> true\n\nlet getPosAnnotation decl =\n  match annotateAtEnd ~pos:decl.pos with\n  | true -> decl.posEnd\n  | false -> decl.posStart\n\nlet rec lineToString_ {original; declarations} =\n  match declarations with\n  | [] -> original\n  | ({declKind; path; pos} as decl) :: nextDeclarations ->\n    let language = posLanguage pos in\n    let annotationStr =\n      match language with\n      | Res ->\n        \"@\" ^ deadAnnotation ^ \"(\\\"\" ^ (path |> Path.withoutHead) ^ \"\\\") \"\n      | Ml ->\n        \" \" ^ \"[\"\n        ^ (match declKind |> DeclKind.isType with\n          | true -> \"@\"\n          | false -> \"@@\")\n        ^ deadAnnotation ^ \" \\\"\" ^ (path |> Path.withoutHead) ^ \"\\\"] \"\n    in\n    let posAnnotation = decl |> getPosAnnotation in\n    let col = posAnnotation.pos_cnum - posAnnotation.pos_bol in\n    let originalLen = String.length original in\n    {\n      original =\n        (if String.length original >= col && col > 0 then\n           let original1, original2 =\n             try\n               ( String.sub original 0 col,\n                 String.sub original col (originalLen - col) )\n             with Invalid_argument _ -> (original, \"\")\n           in\n           if language = Res && declKind = VariantCase then\n             if\n               String.length original2 >= 2\n               && (String.sub [@doesNotRaise]) original2 0 2 = \"| \"\n             then\n               original1 ^ \"| \" ^ annotationStr\n               ^ (String.sub [@doesNotRaise]) original2 2\n                   (String.length original2 - 2)\n             else if\n               String.length original2 >= 1\n               && (String.sub [@doesNotRaise]) original2 0 1 = \"|\"\n             then\n               original1 ^ \"|\" ^ annotationStr\n               ^ (String.sub [@doesNotRaise]) original2 1\n                   (String.length original2 - 1)\n             else original1 ^ \"| \" ^ annotationStr ^ original2\n           else original1 ^ annotationStr ^ original2\n         else\n           match language = Ml with\n           | true -> original ^ annotationStr\n           | false -> annotationStr ^ original);\n      declarations = nextDeclarations;\n    }\n    |> lineToString_\n\nlet lineToString {original; declarations} =\n  let declarations =\n    declarations\n    |> List.sort (fun decl1 decl2 ->\n           (getPosAnnotation decl2).pos_cnum - (getPosAnnotation decl1).pos_cnum)\n  in\n  lineToString_ {original; declarations}\n\nlet currentFile = ref \"\"\nlet currentFileLines = (ref [||] : line array ref)\n\nlet readFile fileName =\n  let channel = open_in fileName in\n  let lines = ref [] in\n  let rec loop () =\n    let line = {original = input_line channel; declarations = []} in\n    lines := line :: !lines;\n    loop ()\n      [@@raises End_of_file]\n  in\n  try loop ()\n  with End_of_file ->\n    close_in_noerr channel;\n    !lines |> List.rev |> Array.of_list\n\nlet writeFile fileName lines =\n  if fileName <> \"\" && !Cli.write then (\n    let channel = open_out fileName in\n    let lastLine = Array.length lines in\n    lines\n    |> Array.iteri (fun n line ->\n           output_string channel (line |> lineToString);\n           if n < lastLine - 1 then output_char channel '\\n');\n    close_out_noerr channel)\n\nlet offsetOfPosAdjustment = function\n  | FirstVariant | Nothing -> 0\n  | OtherVariant -> 2\n\nlet getLineAnnotation ~decl ~line =\n  if !Cli.json then\n    let posAnnotation = decl |> getPosAnnotation in\n    let offset = decl.posAdjustment |> offsetOfPosAdjustment in\n    EmitJson.emitAnnotate\n      ~pos:\n        ( posAnnotation.pos_lnum - 1,\n          posAnnotation.pos_cnum - posAnnotation.pos_bol + offset )\n      ~text:\n        (if decl.posAdjustment = FirstVariant then\n           (* avoid syntax error *)\n           \"| @dead \"\n         else \"@dead \")\n      ~action:\"Suppress dead code warning\"\n  else\n    Format.asprintf \"@.  <-- line %d@.  %s\" decl.pos.pos_lnum\n      (line |> lineToString)\n\nlet cantFindLine () = if !Cli.json then \"\" else \"\\n  <-- Can't find line\"\n\nlet lineAnnotationToString = function\n  | None -> cantFindLine ()\n  | Some (decl, line) -> getLineAnnotation ~decl ~line\n\nlet addLineAnnotation ~decl : lineAnnotation =\n  let fileName = decl.pos.pos_fname in\n  if Sys.file_exists fileName then (\n    if fileName <> !currentFile then (\n      writeFile !currentFile !currentFileLines;\n      currentFile := fileName;\n      currentFileLines := readFile fileName);\n    let indexInLines = (decl |> getPosAnnotation).pos_lnum - 1 in\n    match !currentFileLines.(indexInLines) with\n    | line ->\n      line.declarations <- decl :: line.declarations;\n      Some (decl, line)\n    | exception Invalid_argument _ -> None)\n  else None\n\nlet write () = writeFile !currentFile !currentFileLines\n"
  },
  {
    "path": "analysis/reanalyze/src/dune",
    "content": "(library\n (name reanalyze)\n (flags\n  (-w \"+6+26+27+32+33+39\"))\n (libraries jsonlib ext ml str unix))\n"
  },
  {
    "path": "analysis/src/BuildSystem.ml",
    "content": "let namespacedName namespace name =\n  match namespace with\n  | None -> name\n  | Some namespace -> name ^ \"-\" ^ namespace\n\nlet ( /+ ) = Filename.concat\n\nlet getBsPlatformDir rootPath =\n  match !Cfg.isDocGenFromCompiler with\n  | false -> (\n    let result =\n      ModuleResolution.resolveNodeModulePath ~startPath:rootPath \"rescript\"\n    in\n    match result with\n    | Some path -> Some path\n    | None ->\n      let message = \"rescript could not be found\" in\n      Log.log message;\n      None)\n  | true -> Some rootPath\n\nlet getLibBs root = Files.ifExists (root /+ \"lib\" /+ \"bs\")\n\nlet getStdlib base =\n  match getBsPlatformDir base with\n  | None -> None\n  | Some bsPlatformDir -> Some (bsPlatformDir /+ \"lib\" /+ \"ocaml\")\n"
  },
  {
    "path": "analysis/src/Cache.ml",
    "content": "open SharedTypes\n\ntype cached = {\n  projectFiles: FileSet.t;\n  dependenciesFiles: FileSet.t;\n  pathsForModule: (file, paths) Hashtbl.t;\n}\n\nlet writeCache filename (data : cached) =\n  let oc = open_out_bin filename in\n  Marshal.to_channel oc data [];\n  close_out oc\n\nlet readCache filename =\n  if !Cfg.readProjectConfigCache && Sys.file_exists filename then\n    try\n      let ic = open_in_bin filename in\n      let data : cached = Marshal.from_channel ic in\n      close_in ic;\n      Some data\n    with _ -> None\n  else None\n\nlet deleteCache filename = try Sys.remove filename with _ -> ()\n\nlet targetFileFromLibBs libBs = Filename.concat libBs \".project-files-cache\"\n\nlet cacheProject (package : package) =\n  let cached =\n    {\n      projectFiles = package.projectFiles;\n      dependenciesFiles = package.dependenciesFiles;\n      pathsForModule = package.pathsForModule;\n    }\n  in\n  match BuildSystem.getLibBs package.rootPath with\n  | None -> print_endline \"\\\"ERR\\\"\"\n  | Some libBs ->\n    let targetFile = targetFileFromLibBs libBs in\n    writeCache targetFile cached;\n    print_endline \"\\\"OK\\\"\"\n"
  },
  {
    "path": "analysis/src/Cfg.ml",
    "content": "let debugFollowCtxPath = ref false\n\nlet isDocGenFromCompiler = ref false\n\nlet inIncrementalTypecheckingMode =\n  ref\n    (try\n       match Sys.getenv \"RESCRIPT_INCREMENTAL_TYPECHECKING\" with\n       | \"true\" -> true\n       | _ -> false\n     with _ -> false)\n\nlet readProjectConfigCache =\n  ref\n    (try\n       match Sys.getenv \"RESCRIPT_PROJECT_CONFIG_CACHE\" with\n       | \"true\" -> true\n       | _ -> false\n     with _ -> false)\n"
  },
  {
    "path": "analysis/src/Cmt.ml",
    "content": "open SharedTypes\n\nlet fullForCmt ~moduleName ~package ~uri cmt =\n  match Shared.tryReadCmt cmt with\n  | None -> None\n  | Some infos ->\n    let file = ProcessCmt.fileForCmtInfos ~moduleName ~uri infos in\n    let extra = ProcessExtra.getExtra ~file ~infos in\n    Some {file; extra; package}\n\nlet ( /+ ) = Filename.concat\n\nlet fullFromUri ~uri =\n  let path = Uri.toPath uri in\n  match Packages.getPackage ~uri with\n  | None -> None\n  | Some package -> (\n    let moduleName =\n      BuildSystem.namespacedName package.namespace (FindFiles.getName path)\n    in\n    let incremental =\n      if !Cfg.inIncrementalTypecheckingMode then\n        let incrementalCmtPath =\n          (package.rootPath /+ \"lib\" /+ \"bs\" /+ \"___incremental\" /+ moduleName)\n          ^\n          match Files.classifySourceFile path with\n          | Resi -> \".cmti\"\n          | _ -> \".cmt\"\n        in\n        fullForCmt ~moduleName ~package ~uri incrementalCmtPath\n      else None\n    in\n    match incremental with\n    | Some cmtInfo ->\n      if Debug.verbose () then Printf.printf \"[cmt] Found incremental cmt\\n\";\n      Some cmtInfo\n    | None -> (\n      match Hashtbl.find_opt package.pathsForModule moduleName with\n      | Some paths ->\n        let cmt = getCmtPath ~uri paths in\n        fullForCmt ~moduleName ~package ~uri cmt\n      | None ->\n        prerr_endline (\"can't find module \" ^ moduleName);\n        None))\n\nlet fullsFromModule ~package ~moduleName =\n  if Hashtbl.mem package.pathsForModule moduleName then\n    let paths = Hashtbl.find package.pathsForModule moduleName in\n    let uris = getUris paths in\n    uris |> List.filter_map (fun uri -> fullFromUri ~uri)\n  else []\n\nlet loadFullCmtFromPath ~path =\n  let uri = Uri.fromPath path in\n  let full = fullFromUri ~uri in\n  match full with\n  | None -> None\n  | Some full ->\n    (* Turn on uncurried for the outcome printer *)\n    if full.package.uncurried then Config.uncurried := Uncurried;\n    Some full\n"
  },
  {
    "path": "analysis/src/CodeActions.ml",
    "content": "(* This is the return that's expected when resolving code actions *)\ntype result = Protocol.codeAction list\n\nlet stringifyCodeActions codeActions =\n  Printf.sprintf {|%s|}\n    (codeActions |> List.map Protocol.stringifyCodeAction |> Protocol.array)\n\nlet make ~title ~kind ~uri ~newText ~range =\n  let uri = uri |> Uri.fromPath |> Uri.toString in\n  {\n    Protocol.title;\n    codeActionKind = kind;\n    edit =\n      {\n        documentChanges =\n          [\n            TextDocumentEdit\n              {\n                Protocol.textDocument = {version = None; uri};\n                edits = [{newText; range}];\n              };\n          ];\n      };\n  }\n\nlet makeWithDocumentChanges ~title ~kind ~documentChanges =\n  {Protocol.title; codeActionKind = kind; edit = {documentChanges}}\n"
  },
  {
    "path": "analysis/src/Codemod.ml",
    "content": "type transformType = AddMissingCases\n\nlet rec collectPatterns p =\n  match p.Parsetree.ppat_desc with\n  | Ppat_or (p1, p2) -> collectPatterns p1 @ [p2]\n  | _ -> [p]\n\nlet transform ~path ~pos ~debug ~typ ~hint =\n  let structure, printExpr, _, _ = Xform.parseImplementation ~filename:path in\n  match typ with\n  | AddMissingCases -> (\n    let source = \"let \" ^ hint ^ \" = ()\" in\n    let {Res_driver.parsetree = hintStructure} =\n      Res_driver.parse_implementation_from_source ~for_printer:false\n        ~display_filename:\"<none>\" ~source\n    in\n    match hintStructure with\n    | [{pstr_desc = Pstr_value (_, [{pvb_pat = pattern}])}] -> (\n      let cases =\n        collectPatterns pattern\n        |> List.map (fun (p : Parsetree.pattern) ->\n               Ast_helper.Exp.case p (TypeUtils.Codegen.mkFailWithExp ()))\n      in\n      let result = ref None in\n      let mkIterator ~pos ~result =\n        let expr (iterator : Ast_iterator.iterator) (exp : Parsetree.expression)\n            =\n          match exp.pexp_desc with\n          | Pexp_match (e, existingCases)\n            when Pos.ofLexing exp.pexp_loc.loc_start = pos ->\n            result :=\n              Some {exp with pexp_desc = Pexp_match (e, existingCases @ cases)}\n          | _ -> Ast_iterator.default_iterator.expr iterator exp\n        in\n        {Ast_iterator.default_iterator with expr}\n      in\n      let iterator = mkIterator ~pos ~result in\n      iterator.structure iterator structure;\n      match !result with\n      | None ->\n        if debug then print_endline \"Found no result\";\n        exit 1\n      | Some switchExpr ->\n        printExpr ~range:(Loc.rangeOfLoc switchExpr.pexp_loc) switchExpr)\n    | _ ->\n      if debug then print_endline \"Mismatch in expected structure\";\n      exit 1)\n"
  },
  {
    "path": "analysis/src/Commands.ml",
    "content": "let completion ~debug ~path ~pos ~currentFile =\n  let completions =\n    match\n      Completions.getCompletions ~debug ~path ~pos ~currentFile ~forHover:false\n    with\n    | None -> []\n    | Some (completions, full, _) ->\n      completions\n      |> List.map (CompletionBackEnd.completionToItem ~full)\n      |> List.map Protocol.stringifyCompletionItem\n  in\n  completions |> Protocol.array |> print_endline\n\nlet completionResolve ~path ~modulePath =\n  (* We ignore the internal module path as of now because there's currently\n     no use case for it. But, if we wanted to move resolving documentation\n     for regular modules and not just file modules to the completionResolve\n     hook as well, it'd be easy to implement here. *)\n  let moduleName, _innerModulePath =\n    match modulePath |> String.split_on_char '.' with\n    | [moduleName] -> (moduleName, [])\n    | moduleName :: rest -> (moduleName, rest)\n    | [] -> raise (Failure \"Invalid module path.\")\n  in\n  let docstring =\n    match Cmt.loadFullCmtFromPath ~path with\n    | None ->\n      if Debug.verbose () then\n        Printf.printf \"[completion_resolve] Could not load cmt\\n\";\n      Protocol.null\n    | Some full -> (\n      match ProcessCmt.fileForModule ~package:full.package moduleName with\n      | None ->\n        if Debug.verbose () then\n          Printf.printf \"[completion_resolve] Did not find file for module %s\\n\"\n            moduleName;\n        Protocol.null\n      | Some file ->\n        file.structure.docstring |> String.concat \"\\n\\n\"\n        |> Protocol.wrapInQuotes)\n  in\n  print_endline docstring\n\nlet inlayhint ~path ~pos ~maxLength ~debug =\n  let result =\n    match Hint.inlay ~path ~pos ~maxLength ~debug with\n    | Some hints -> hints |> Protocol.array\n    | None -> Protocol.null\n  in\n  print_endline result\n\nlet codeLens ~path ~debug =\n  let result =\n    match Hint.codeLens ~path ~debug with\n    | Some lens -> lens |> Protocol.array\n    | None -> Protocol.null\n  in\n  print_endline result\n\nlet hover ~path ~pos ~currentFile ~debug ~supportsMarkdownLinks =\n  let result =\n    match Cmt.loadFullCmtFromPath ~path with\n    | None -> Protocol.null\n    | Some full -> (\n      match References.getLocItem ~full ~pos ~debug with\n      | None -> (\n        if debug then\n          Printf.printf\n            \"Nothing at that position. Now trying to use completion.\\n\";\n        match\n          Hover.getHoverViaCompletions ~debug ~path ~pos ~currentFile\n            ~forHover:true ~supportsMarkdownLinks\n        with\n        | None -> Protocol.null\n        | Some hover -> hover)\n      | Some locItem -> (\n        let isModule =\n          match locItem.locType with\n          | LModule _ | TopLevelModule _ -> true\n          | TypeDefinition _ | Typed _ | Constant _ -> false\n        in\n        let uriLocOpt = References.definitionForLocItem ~full locItem in\n        let skipZero =\n          match uriLocOpt with\n          | None -> false\n          | Some (_, loc) ->\n            let isInterface = full.file.uri |> Uri.isInterface in\n            let posIsZero {Lexing.pos_lnum; pos_bol; pos_cnum} =\n              (not isInterface) && pos_lnum = 1 && pos_cnum - pos_bol = 0\n            in\n            (* Skip if range is all zero, unless it's a module *)\n            (not isModule) && posIsZero loc.loc_start && posIsZero loc.loc_end\n        in\n        if skipZero then Protocol.null\n        else\n          let hoverText = Hover.newHover ~supportsMarkdownLinks ~full locItem in\n          match hoverText with\n          | None -> Protocol.null\n          | Some s -> Protocol.stringifyHover s))\n  in\n  print_endline result\n\nlet signatureHelp ~path ~pos ~currentFile ~debug ~allowForConstructorPayloads =\n  let result =\n    match\n      SignatureHelp.signatureHelp ~path ~pos ~currentFile ~debug\n        ~allowForConstructorPayloads\n    with\n    | None ->\n      {Protocol.signatures = []; activeSignature = None; activeParameter = None}\n    | Some res -> res\n  in\n  print_endline (Protocol.stringifySignatureHelp result)\n\nlet codeAction ~path ~startPos ~endPos ~currentFile ~debug =\n  Xform.extractCodeActions ~path ~startPos ~endPos ~currentFile ~debug\n  |> CodeActions.stringifyCodeActions |> print_endline\n\nlet definition ~path ~pos ~debug =\n  let locationOpt =\n    match Cmt.loadFullCmtFromPath ~path with\n    | None -> None\n    | Some full -> (\n      match References.getLocItem ~full ~pos ~debug with\n      | None -> None\n      | Some locItem -> (\n        match References.definitionForLocItem ~full locItem with\n        | None -> None\n        | Some (uri, loc) when not loc.loc_ghost ->\n          let isInterface = full.file.uri |> Uri.isInterface in\n          let posIsZero {Lexing.pos_lnum; pos_bol; pos_cnum} =\n            (* range is zero *)\n            pos_lnum = 1 && pos_cnum - pos_bol = 0\n          in\n          let isModule =\n            match locItem.locType with\n            | LModule _ | TopLevelModule _ -> true\n            | TypeDefinition _ | Typed _ | Constant _ -> false\n          in\n          let skipLoc =\n            (not isModule) && (not isInterface) && posIsZero loc.loc_start\n            && posIsZero loc.loc_end\n          in\n          if skipLoc then None\n          else\n            Some\n              {\n                Protocol.uri = Files.canonicalizeUri uri;\n                range = Utils.cmtLocToRange loc;\n              }\n        | Some _ -> None))\n  in\n  print_endline\n    (match locationOpt with\n    | None -> Protocol.null\n    | Some location -> location |> Protocol.stringifyLocation)\n\nlet typeDefinition ~path ~pos ~debug =\n  let maybeLocation =\n    match Cmt.loadFullCmtFromPath ~path with\n    | None -> None\n    | Some full -> (\n      match References.getLocItem ~full ~pos ~debug with\n      | None -> None\n      | Some locItem -> (\n        match References.typeDefinitionForLocItem ~full locItem with\n        | None -> None\n        | Some (uri, loc) ->\n          Some\n            {\n              Protocol.uri = Files.canonicalizeUri uri;\n              range = Utils.cmtLocToRange loc;\n            }))\n  in\n  print_endline\n    (match maybeLocation with\n    | None -> Protocol.null\n    | Some location -> location |> Protocol.stringifyLocation)\n\nlet references ~path ~pos ~debug =\n  let allLocs =\n    match Cmt.loadFullCmtFromPath ~path with\n    | None -> []\n    | Some full -> (\n      match References.getLocItem ~full ~pos ~debug with\n      | None -> []\n      | Some locItem ->\n        let allReferences = References.allReferencesForLocItem ~full locItem in\n        allReferences\n        |> List.fold_left\n             (fun acc {References.uri = uri2; locOpt} ->\n               let loc =\n                 match locOpt with\n                 | Some loc -> loc\n                 | None -> Uri.toTopLevelLoc uri2\n               in\n               Protocol.stringifyLocation\n                 {uri = Uri.toString uri2; range = Utils.cmtLocToRange loc}\n               :: acc)\n             [])\n  in\n  print_endline\n    (if allLocs = [] then Protocol.null\n     else \"[\\n\" ^ (allLocs |> String.concat \",\\n\") ^ \"\\n]\")\n\nlet rename ~path ~pos ~newName ~debug =\n  let result =\n    match Cmt.loadFullCmtFromPath ~path with\n    | None -> Protocol.null\n    | Some full -> (\n      match References.getLocItem ~full ~pos ~debug with\n      | None -> Protocol.null\n      | Some locItem ->\n        let allReferences = References.allReferencesForLocItem ~full locItem in\n        let referencesToToplevelModules =\n          allReferences\n          |> Utils.filterMap (fun {References.uri = uri2; locOpt} ->\n                 if locOpt = None then Some uri2 else None)\n        in\n        let referencesToItems =\n          allReferences\n          |> Utils.filterMap (function\n               | {References.uri = uri2; locOpt = Some loc} -> Some (uri2, loc)\n               | {locOpt = None} -> None)\n        in\n        let fileRenames =\n          referencesToToplevelModules\n          |> List.map (fun uri ->\n                 let path = Uri.toPath uri in\n                 let dir = Filename.dirname path in\n                 let newPath =\n                   Filename.concat dir (newName ^ Filename.extension path)\n                 in\n                 let newUri = Uri.fromPath newPath in\n                 Protocol.\n                   {\n                     oldUri = uri |> Uri.toString;\n                     newUri = newUri |> Uri.toString;\n                   })\n        in\n        let textDocumentEdits =\n          let module StringMap = Misc.StringMap in\n          let textEditsByUri =\n            referencesToItems\n            |> List.map (fun (uri, loc) -> (Uri.toString uri, loc))\n            |> List.fold_left\n                 (fun acc (uri, loc) ->\n                   let textEdit =\n                     Protocol.\n                       {range = Utils.cmtLocToRange loc; newText = newName}\n                   in\n                   match StringMap.find_opt uri acc with\n                   | None -> StringMap.add uri [textEdit] acc\n                   | Some prevEdits ->\n                     StringMap.add uri (textEdit :: prevEdits) acc)\n                 StringMap.empty\n          in\n          StringMap.fold\n            (fun uri edits acc ->\n              let textDocumentEdit =\n                Protocol.{textDocument = {uri; version = None}; edits}\n              in\n              textDocumentEdit :: acc)\n            textEditsByUri []\n        in\n        let fileRenamesString =\n          fileRenames |> List.map Protocol.stringifyRenameFile\n        in\n        let textDocumentEditsString =\n          textDocumentEdits |> List.map Protocol.stringifyTextDocumentEdit\n        in\n        \"[\\n\"\n        ^ (fileRenamesString @ textDocumentEditsString |> String.concat \",\\n\")\n        ^ \"\\n]\")\n  in\n  print_endline result\n\nlet format ~path =\n  if Filename.check_suffix path \".res\" then\n    let {Res_driver.parsetree = structure; comments; diagnostics} =\n      Res_driver.parsing_engine.parse_implementation ~for_printer:true\n        ~filename:path\n    in\n    if List.length diagnostics > 0 then \"\"\n    else\n      Res_printer.print_implementation ~width:!Res_cli.ResClflags.width\n        ~comments structure\n  else if Filename.check_suffix path \".resi\" then\n    let {Res_driver.parsetree = signature; comments; diagnostics} =\n      Res_driver.parsing_engine.parse_interface ~for_printer:true ~filename:path\n    in\n    if List.length diagnostics > 0 then \"\"\n    else\n      Res_printer.print_interface ~width:!Res_cli.ResClflags.width ~comments\n        signature\n  else \"\"\n\nlet diagnosticSyntax ~path =\n  print_endline (Diagnostics.document_syntax ~path |> Protocol.array)\n\nlet test ~path =\n  Uri.stripPath := true;\n  match Files.readFile path with\n  | None -> assert false\n  | Some text ->\n    let lines = text |> String.split_on_char '\\n' in\n    let processLine i line =\n      let createCurrentFile () =\n        let currentFile, cout =\n          Filename.open_temp_file \"def\" (\"txt.\" ^ Filename.extension path)\n        in\n        let removeLineComment l =\n          let len = String.length l in\n          let rec loop i =\n            if i + 2 <= len && l.[i] = '/' && l.[i + 1] = '/' then Some (i + 2)\n            else if i + 2 < len && l.[i] = ' ' then loop (i + 1)\n            else None\n          in\n          match loop 0 with\n          | None -> l\n          | Some indexAfterComment ->\n            String.make indexAfterComment ' '\n            ^ String.sub l indexAfterComment (len - indexAfterComment)\n        in\n        lines\n        |> List.iteri (fun j l ->\n               let lineToOutput =\n                 if j == i - 1 then removeLineComment l else l\n               in\n               Printf.fprintf cout \"%s\\n\" lineToOutput);\n        close_out cout;\n        currentFile\n      in\n      if Str.string_match (Str.regexp \"^ *//[ ]*\\\\^\") line 0 then\n        let matched = Str.matched_string line in\n        let len = line |> String.length in\n        let mlen = String.length matched in\n        let rest = String.sub line mlen (len - mlen) in\n        let line = i - 1 in\n        let col = mlen - 1 in\n        if mlen >= 3 then (\n          (match String.sub rest 0 3 with\n          | \"db+\" -> Log.verbose := true\n          | \"db-\" -> Log.verbose := false\n          | \"dv+\" -> Debug.debugLevel := Verbose\n          | \"dv-\" -> Debug.debugLevel := Off\n          | \"in+\" -> Cfg.inIncrementalTypecheckingMode := true\n          | \"in-\" -> Cfg.inIncrementalTypecheckingMode := false\n          | \"ve+\" -> (\n            let version = String.sub rest 3 (String.length rest - 3) in\n            let version = String.trim version in\n            if Debug.verbose () then\n              Printf.printf \"Setting version: %s\\n\" version;\n            match String.split_on_char '.' version with\n            | [majorRaw; minorRaw] ->\n              let version = (int_of_string majorRaw, int_of_string minorRaw) in\n              Packages.overrideRescriptVersion := Some version\n            | _ -> ())\n          | \"ve-\" -> Packages.overrideRescriptVersion := None\n          | \"def\" ->\n            print_endline\n              (\"Definition \" ^ path ^ \" \" ^ string_of_int line ^ \":\"\n             ^ string_of_int col);\n            definition ~path ~pos:(line, col) ~debug:true\n          | \"com\" ->\n            print_endline\n              (\"Complete \" ^ path ^ \" \" ^ string_of_int line ^ \":\"\n             ^ string_of_int col);\n            let currentFile = createCurrentFile () in\n            completion ~debug:true ~path ~pos:(line, col) ~currentFile;\n            Sys.remove currentFile\n          | \"cre\" ->\n            let modulePath = String.sub rest 3 (String.length rest - 3) in\n            let modulePath = String.trim modulePath in\n            print_endline (\"Completion resolve: \" ^ modulePath);\n            completionResolve ~path ~modulePath\n          | \"dce\" ->\n            print_endline (\"DCE \" ^ path);\n            Reanalyze.RunConfig.runConfig.suppress <- [\"src\"];\n            Reanalyze.RunConfig.runConfig.unsuppress <-\n              [Filename.concat \"src\" \"dce\"];\n            DceCommand.command ()\n          | \"doc\" ->\n            print_endline (\"DocumentSymbol \" ^ path);\n            DocumentSymbol.command ~path\n          | \"hig\" ->\n            print_endline (\"Highlight \" ^ path);\n            SemanticTokens.command ~debug:true\n              ~emitter:(SemanticTokens.Token.createEmitter ())\n              ~path\n          | \"hov\" ->\n            print_endline\n              (\"Hover \" ^ path ^ \" \" ^ string_of_int line ^ \":\"\n             ^ string_of_int col);\n            let currentFile = createCurrentFile () in\n            hover ~supportsMarkdownLinks:true ~path ~pos:(line, col)\n              ~currentFile ~debug:true;\n            Sys.remove currentFile\n          | \"she\" ->\n            print_endline\n              (\"Signature help \" ^ path ^ \" \" ^ string_of_int line ^ \":\"\n             ^ string_of_int col);\n            let currentFile = createCurrentFile () in\n            signatureHelp ~path ~pos:(line, col) ~currentFile ~debug:true\n              ~allowForConstructorPayloads:true;\n            Sys.remove currentFile\n          | \"int\" ->\n            print_endline (\"Create Interface \" ^ path);\n            let cmiFile =\n              let open Filename in\n              let ( ++ ) = concat in\n              let name = chop_extension (basename path) ^ \".cmi\" in\n              let dir = dirname path in\n              dir ++ parent_dir_name ++ \"lib\" ++ \"bs\" ++ \"src\" ++ name\n            in\n            Printf.printf \"%s\" (CreateInterface.command ~path ~cmiFile)\n          | \"ref\" ->\n            print_endline\n              (\"References \" ^ path ^ \" \" ^ string_of_int line ^ \":\"\n             ^ string_of_int col);\n            references ~path ~pos:(line, col) ~debug:true\n          | \"ren\" ->\n            let newName = String.sub rest 4 (len - mlen - 4) in\n            let () =\n              print_endline\n                (\"Rename \" ^ path ^ \" \" ^ string_of_int line ^ \":\"\n               ^ string_of_int col ^ \" \" ^ newName)\n            in\n            rename ~path ~pos:(line, col) ~newName ~debug:true\n          | \"typ\" ->\n            print_endline\n              (\"TypeDefinition \" ^ path ^ \" \" ^ string_of_int line ^ \":\"\n             ^ string_of_int col);\n            typeDefinition ~path ~pos:(line, col) ~debug:true\n          | \"xfm\" ->\n            let currentFile = createCurrentFile () in\n            (* +2 is to ensure that the character ^ points to is what's considered the end of the selection. *)\n            let endCol = col + try String.index rest '^' + 2 with _ -> 0 in\n            let endPos = (line, endCol) in\n            let startPos = (line, col) in\n            if startPos = endPos then\n              print_endline\n                (\"Xform \" ^ path ^ \" \" ^ string_of_int line ^ \":\"\n               ^ string_of_int col)\n            else\n              print_endline\n                (\"Xform \" ^ path ^ \" start: \" ^ Pos.toString startPos\n               ^ \", end: \" ^ Pos.toString endPos);\n            let codeActions =\n              Xform.extractCodeActions ~path ~startPos ~endPos ~currentFile\n                ~debug:true\n            in\n            Sys.remove currentFile;\n            codeActions\n            |> List.iter (fun {Protocol.title; edit = {documentChanges}} ->\n                   Printf.printf \"Hit: %s\\n\" title;\n                   documentChanges\n                   |> List.iter (fun dc ->\n                          match dc with\n                          | Protocol.TextDocumentEdit tde ->\n                            Printf.printf \"\\nTextDocumentEdit: %s\\n\"\n                              tde.textDocument.uri;\n\n                            tde.edits\n                            |> List.iter (fun {Protocol.range; newText} ->\n                                   let indent =\n                                     String.make range.start.character ' '\n                                   in\n                                   Printf.printf\n                                     \"%s\\nnewText:\\n%s<--here\\n%s%s\\n\"\n                                     (Protocol.stringifyRange range)\n                                     indent indent newText)\n                          | CreateFile cf ->\n                            Printf.printf \"\\nCreateFile: %s\\n\" cf.uri))\n          | \"c-a\" ->\n            let hint = String.sub rest 3 (String.length rest - 3) in\n            print_endline\n              (\"Codemod AddMissingCases\" ^ path ^ \" \" ^ string_of_int line ^ \":\"\n             ^ string_of_int col);\n            Codemod.transform ~path ~pos:(line, col) ~debug:true\n              ~typ:AddMissingCases ~hint\n            |> print_endline\n          | \"dia\" -> diagnosticSyntax ~path\n          | \"hin\" ->\n            (* Get all inlay Hint between line 1 and n.\n               Don't get the first line = 0.\n            *)\n            let line_start = 1 in\n            let line_end = 34 in\n            print_endline\n              (\"Inlay Hint \" ^ path ^ \" \" ^ string_of_int line_start ^ \":\"\n             ^ string_of_int line_end);\n            inlayhint ~path ~pos:(line_start, line_end) ~maxLength:\"25\"\n              ~debug:false\n          | \"cle\" ->\n            print_endline (\"Code Lens \" ^ path);\n            codeLens ~path ~debug:false\n          | \"ast\" ->\n            print_endline\n              (\"Dump AST \" ^ path ^ \" \" ^ string_of_int line ^ \":\"\n             ^ string_of_int col);\n            let currentFile = createCurrentFile () in\n            DumpAst.dump ~pos:(line, col) ~currentFile;\n            Sys.remove currentFile\n          | _ -> ());\n          print_newline ())\n    in\n    lines |> List.iteri processLine\n"
  },
  {
    "path": "analysis/src/CompletionBackEnd.ml",
    "content": "open SharedTypes\n\nlet showConstructor {Constructor.cname = {txt}; args; res} =\n  txt\n  ^ (match args with\n    | Args [] -> \"\"\n    | InlineRecord fields ->\n      \"({\"\n      ^ (fields\n        |> List.map (fun (field : field) ->\n               Printf.sprintf \"%s%s: %s\" field.fname.txt\n                 (if field.optional then \"?\" else \"\")\n                 (Shared.typeToString\n                    (if field.optional then Utils.unwrapIfOption field.typ\n                     else field.typ)))\n        |> String.concat \", \")\n      ^ \"})\"\n    | Args args ->\n      \"(\"\n      ^ (args\n        |> List.map (fun (typ, _) -> typ |> Shared.typeToString)\n        |> String.concat \", \")\n      ^ \")\")\n  ^\n  match res with\n  | None -> \"\"\n  | Some typ -> \"\\n\" ^ (typ |> Shared.typeToString)\n\n(* TODO: local opens *)\nlet resolveOpens ~env opens ~package =\n  List.fold_left\n    (fun previous path ->\n      (* Finding an open, first trying to find it in previoulsly resolved opens *)\n      let rec loop prev =\n        match prev with\n        | [] -> (\n          match path with\n          | [] | [_] -> previous\n          | name :: path -> (\n            match ProcessCmt.fileForModule ~package name with\n            | None ->\n              Log.log (\"Could not get module \" ^ name);\n              previous (* TODO: warn? *)\n            | Some file -> (\n              match\n                ResolvePath.resolvePath ~env:(QueryEnv.fromFile file) ~package\n                  ~path\n              with\n              | None ->\n                Log.log (\"Could not resolve in \" ^ name);\n                previous\n              | Some (env, _placeholder) -> previous @ [env])))\n        | env :: rest -> (\n          match ResolvePath.resolvePath ~env ~package ~path with\n          | None -> loop rest\n          | Some (env, _placeholder) -> previous @ [env])\n      in\n      Log.log (\"resolving open \" ^ pathToString path);\n      match ResolvePath.resolvePath ~env ~package ~path with\n      | None ->\n        Log.log \"Not local\";\n        loop previous\n      | Some (env, _) ->\n        Log.log \"Was local\";\n        previous @ [env])\n    (* loop(previous) *)\n    [] opens\n\nlet completionForExporteds iterExported getDeclared ~prefix ~exact ~env\n    ~namesUsed transformContents =\n  let res = ref [] in\n  iterExported (fun name stamp ->\n      (* Log.log(\"checking exported: \" ++ name); *)\n      if Utils.checkName name ~prefix ~exact then\n        match getDeclared stamp with\n        | Some (declared : _ Declared.t)\n          when not (Hashtbl.mem namesUsed declared.name.txt) ->\n          Hashtbl.add namesUsed declared.name.txt ();\n          res :=\n            {\n              (Completion.create declared.name.txt ~env\n                 ~kind:(transformContents declared))\n              with\n              deprecated = declared.deprecated;\n              docstring = declared.docstring;\n            }\n            :: !res\n        | _ -> ());\n  !res\n\nlet completionForExportedModules ~env ~prefix ~exact ~namesUsed =\n  completionForExporteds (Exported.iter env.QueryEnv.exported Exported.Module)\n    (Stamps.findModule env.file.stamps) ~prefix ~exact ~env ~namesUsed\n    (fun declared ->\n      Completion.Module\n        {docstring = declared.docstring; module_ = declared.item})\n\nlet completionForExportedValues ~env ~prefix ~exact ~namesUsed =\n  completionForExporteds (Exported.iter env.QueryEnv.exported Exported.Value)\n    (Stamps.findValue env.file.stamps) ~prefix ~exact ~env ~namesUsed\n    (fun declared -> Completion.Value declared.item)\n\nlet completionForExportedTypes ~env ~prefix ~exact ~namesUsed =\n  completionForExporteds (Exported.iter env.QueryEnv.exported Exported.Type)\n    (Stamps.findType env.file.stamps) ~prefix ~exact ~env ~namesUsed\n    (fun declared -> Completion.Type declared.item)\n\nlet completionsForExportedConstructors ~(env : QueryEnv.t) ~prefix ~exact\n    ~namesUsed =\n  let res = ref [] in\n  Exported.iter env.exported Exported.Type (fun _name stamp ->\n      match Stamps.findType env.file.stamps stamp with\n      | Some ({item = {kind = Type.Variant constructors}} as t) ->\n        res :=\n          (constructors\n          |> List.filter (fun c ->\n                 Utils.checkName c.Constructor.cname.txt ~prefix ~exact)\n          |> Utils.filterMap (fun c ->\n                 let name = c.Constructor.cname.txt in\n                 if not (Hashtbl.mem namesUsed name) then\n                   let () = Hashtbl.add namesUsed name () in\n                   Some\n                     (Completion.create name ~env ~docstring:c.docstring\n                        ?deprecated:c.deprecated\n                        ~kind:\n                          (Completion.Constructor\n                             (c, t.item.decl |> Shared.declToString t.name.txt)))\n                 else None))\n          @ !res\n      | _ -> ());\n  !res\n\nlet completionForExportedFields ~(env : QueryEnv.t) ~prefix ~exact ~namesUsed =\n  let res = ref [] in\n  Exported.iter env.exported Exported.Type (fun _name stamp ->\n      match Stamps.findType env.file.stamps stamp with\n      | Some ({item = {kind = Record fields}} as t) ->\n        res :=\n          (fields\n          |> List.filter (fun f -> Utils.checkName f.fname.txt ~prefix ~exact)\n          |> Utils.filterMap (fun f ->\n                 let name = f.fname.txt in\n                 if not (Hashtbl.mem namesUsed name) then\n                   let () = Hashtbl.add namesUsed name () in\n                   Some\n                     (Completion.create name ~env ~docstring:f.docstring\n                        ?deprecated:f.deprecated\n                        ~kind:\n                          (Completion.Field\n                             (f, t.item.decl |> Shared.declToString t.name.txt)))\n                 else None))\n          @ !res\n      | _ -> ());\n  !res\n\nlet findModuleInScope ~env ~moduleName ~scope =\n  let modulesTable = Hashtbl.create 10 in\n  env.QueryEnv.file.stamps\n  |> Stamps.iterModules (fun _ declared ->\n         Hashtbl.replace modulesTable\n           (declared.name.txt, declared.extentLoc |> Loc.start)\n           declared);\n  let result = ref None in\n  let processModule name loc =\n    if name = moduleName && !result = None then\n      match Hashtbl.find_opt modulesTable (name, Loc.start loc) with\n      | Some declared -> result := Some declared\n      | None ->\n        Log.log\n          (Printf.sprintf \"Module Not Found %s loc:%s\\n\" name (Loc.toString loc))\n  in\n  scope |> Scope.iterModulesBeforeFirstOpen processModule;\n  scope |> Scope.iterModulesAfterFirstOpen processModule;\n  !result\n\nlet resolvePathFromStamps ~(env : QueryEnv.t) ~package ~scope ~moduleName ~path\n    =\n  (* Log.log(\"Finding from stamps \" ++ name); *)\n  match findModuleInScope ~env ~moduleName ~scope with\n  | None -> None\n  | Some declared -> (\n    (* Log.log(\"found it\"); *)\n    match ResolvePath.findInModule ~env declared.item path with\n    | None -> None\n    | Some res -> (\n      match res with\n      | `Local (env, name) -> Some (env, name)\n      | `Global (moduleName, fullPath) -> (\n        match ProcessCmt.fileForModule ~package moduleName with\n        | None -> None\n        | Some file ->\n          ResolvePath.resolvePath ~env:(QueryEnv.fromFile file) ~path:fullPath\n            ~package)))\n\nlet resolveModuleWithOpens ~opens ~package ~moduleName =\n  let rec loop opens =\n    match opens with\n    | (env : QueryEnv.t) :: rest -> (\n      Log.log (\"Looking for env in \" ^ Uri.toString env.file.uri);\n      match ResolvePath.resolvePath ~env ~package ~path:[moduleName; \"\"] with\n      | Some (env, _) -> Some env\n      | None -> loop rest)\n    | [] -> None\n  in\n  loop opens\n\nlet resolveFileModule ~moduleName ~package =\n  Log.log (\"Getting module \" ^ moduleName);\n  match ProcessCmt.fileForModule ~package moduleName with\n  | None -> None\n  | Some file ->\n    Log.log \"got it\";\n    let env = QueryEnv.fromFile file in\n    Some env\n\nlet getEnvWithOpens ~scope ~(env : QueryEnv.t) ~package\n    ~(opens : QueryEnv.t list) ~moduleName (path : string list) =\n  (* TODO: handle interleaving of opens and local modules correctly *)\n  match resolvePathFromStamps ~env ~scope ~moduleName ~path ~package with\n  | Some x -> Some x\n  | None -> (\n    match resolveModuleWithOpens ~opens ~package ~moduleName with\n    | Some env -> ResolvePath.resolvePath ~env ~package ~path\n    | None -> (\n      match resolveFileModule ~moduleName ~package with\n      | None -> None\n      | Some env -> ResolvePath.resolvePath ~env ~package ~path))\n\nlet rec expandTypeExpr ~env ~package typeExpr =\n  match typeExpr |> Shared.digConstructor with\n  | Some path -> (\n    match References.digConstructor ~env ~package path with\n    | None -> None\n    | Some (env, {item = {decl = {type_manifest = Some t}}}) ->\n      expandTypeExpr ~env ~package t\n    | Some (_, {docstring; item}) -> Some (docstring, item))\n  | None -> None\n\nlet kindToDocumentation ~env ~full ~currentDocstring name\n    (kind : Completion.kind) =\n  let docsFromKind =\n    match kind with\n    | ObjLabel _ | Label _ | FileModule _ | Snippet _ | FollowContextPath _ ->\n      []\n    | Module {docstring} -> docstring\n    | Type {decl; name} ->\n      [decl |> Shared.declToString name |> Markdown.codeBlock]\n    | Value typ -> (\n      match expandTypeExpr ~env ~package:full.package typ with\n      | None -> []\n      | Some (docstrings, {decl; name; kind}) ->\n        docstrings\n        @ [\n            (match kind with\n            | Record _ | Tuple _ | Variant _ ->\n              Markdown.codeBlock (Shared.declToString name decl)\n            | _ -> \"\");\n          ])\n    | Field ({typ; optional; docstring}, s) ->\n      (* Handle optional fields. Checking for \"?\" is because sometimes optional\n         fields are prefixed with \"?\" when completing, and at that point we don't\n         need to _also_ add a \"?\" after the field name, as that looks weird. *)\n      docstring\n      @ [\n          Markdown.codeBlock\n            (if optional && Utils.startsWith name \"?\" = false then\n               name ^ \"?: \"\n               ^ (typ |> Utils.unwrapIfOption |> Shared.typeToString)\n             else name ^ \": \" ^ (typ |> Shared.typeToString));\n          Markdown.codeBlock s;\n        ]\n    | Constructor (c, s) ->\n      [Markdown.codeBlock (showConstructor c); Markdown.codeBlock s]\n    | PolyvariantConstructor ({displayName; args}, s) ->\n      [\n        Markdown.codeBlock\n          (\"#\" ^ displayName\n          ^\n          match args with\n          | [] -> \"\"\n          | typeExprs ->\n            \"(\"\n            ^ (typeExprs\n              |> List.map (fun typeExpr -> typeExpr |> Shared.typeToString)\n              |> String.concat \", \")\n            ^ \")\");\n        Markdown.codeBlock s;\n      ]\n    | ExtractedType (extractedType, _) ->\n      [Markdown.codeBlock (TypeUtils.extractedTypeToString extractedType)]\n  in\n  currentDocstring @ docsFromKind\n  |> List.filter (fun s -> s <> \"\")\n  |> String.concat \"\\n\\n\"\n\nlet kindToDetail name (kind : Completion.kind) =\n  match kind with\n  | Type {name} -> \"type \" ^ name\n  | Value typ -> typ |> Shared.typeToString\n  | ObjLabel typ -> typ |> Shared.typeToString\n  | Label typString -> typString\n  | Module _ -> \"module \" ^ name\n  | FileModule f -> \"module \" ^ f\n  | Field ({typ; optional}, _) ->\n    (* Handle optional fields. Checking for \"?\" is because sometimes optional\n       fields are prefixed with \"?\" when completing, and at that point we don't\n       need to _also_ add a \"?\" after the field name, as that looks weird. *)\n    if optional && Utils.startsWith name \"?\" = false then\n      typ |> Utils.unwrapIfOption |> Shared.typeToString\n    else typ |> Shared.typeToString\n  | Constructor (c, _) -> showConstructor c\n  | PolyvariantConstructor ({displayName; args}, _) -> (\n    \"#\" ^ displayName\n    ^\n    match args with\n    | [] -> \"\"\n    | typeExprs ->\n      \"(\"\n      ^ (typeExprs\n        |> List.map (fun typeExpr -> typeExpr |> Shared.typeToString)\n        |> String.concat \", \")\n      ^ \")\")\n  | Snippet s -> s\n  | FollowContextPath _ -> \"\"\n  | ExtractedType (extractedType, _) ->\n    TypeUtils.extractedTypeToString ~nameOnly:true extractedType\n\nlet kindToData filePath (kind : Completion.kind) =\n  match kind with\n  | FileModule f -> Some [(\"modulePath\", f); (\"filePath\", filePath)]\n  | _ -> None\n\nlet findAllCompletions ~(env : QueryEnv.t) ~prefix ~exact ~namesUsed\n    ~(completionContext : Completable.completionContext) =\n  Log.log (\"findAllCompletions uri:\" ^ Uri.toString env.file.uri);\n  match completionContext with\n  | Value ->\n    completionForExportedValues ~env ~prefix ~exact ~namesUsed\n    @ completionsForExportedConstructors ~env ~prefix ~exact ~namesUsed\n    @ completionForExportedModules ~env ~prefix ~exact ~namesUsed\n  | Type ->\n    completionForExportedTypes ~env ~prefix ~exact ~namesUsed\n    @ completionForExportedModules ~env ~prefix ~exact ~namesUsed\n  | Module -> completionForExportedModules ~env ~prefix ~exact ~namesUsed\n  | Field ->\n    completionForExportedFields ~env ~prefix ~exact ~namesUsed\n    @ completionForExportedModules ~env ~prefix ~exact ~namesUsed\n  | ValueOrField ->\n    completionForExportedValues ~env ~prefix ~exact ~namesUsed\n    @ completionForExportedFields ~env ~prefix ~exact ~namesUsed\n    @ completionForExportedModules ~env ~prefix ~exact ~namesUsed\n\nlet processLocalValue name loc contextPath scope ~prefix ~exact ~env\n    ~(localTables : LocalTables.t) =\n  if Utils.checkName name ~prefix ~exact then\n    match Hashtbl.find_opt localTables.valueTable (name, Loc.start loc) with\n    | Some declared ->\n      if not (Hashtbl.mem localTables.namesUsed name) then (\n        Hashtbl.add localTables.namesUsed name ();\n        localTables.resultRev <-\n          {\n            (Completion.create declared.name.txt ~env ~kind:(Value declared.item))\n            with\n            deprecated = declared.deprecated;\n            docstring = declared.docstring;\n          }\n          :: localTables.resultRev)\n    | None ->\n      if !Cfg.debugFollowCtxPath then\n        Printf.printf \"Completion Value Not Found %s loc:%s\\n\" name\n          (Loc.toString loc);\n      localTables.resultRev <-\n        Completion.create name ~env\n          ~kind:\n            (match contextPath with\n            | Some contextPath -> FollowContextPath (contextPath, scope)\n            | None ->\n              Value\n                (Ctype.newconstr\n                   (Path.Pident (Ident.create \"Type Not Known\"))\n                   []))\n        :: localTables.resultRev\n\nlet processLocalConstructor name loc ~prefix ~exact ~env\n    ~(localTables : LocalTables.t) =\n  if Utils.checkName name ~prefix ~exact then\n    match\n      Hashtbl.find_opt localTables.constructorTable (name, Loc.start loc)\n    with\n    | Some declared ->\n      if not (Hashtbl.mem localTables.namesUsed name) then (\n        Hashtbl.add localTables.namesUsed name ();\n        localTables.resultRev <-\n          {\n            (Completion.create declared.name.txt ~env\n               ~kind:\n                 (Constructor\n                    ( declared.item,\n                      snd declared.item.typeDecl\n                      |> Shared.declToString (fst declared.item.typeDecl) )))\n            with\n            deprecated = declared.deprecated;\n            docstring = declared.docstring;\n          }\n          :: localTables.resultRev)\n    | None ->\n      Log.log\n        (Printf.sprintf \"Completion Constructor Not Found %s loc:%s\\n\" name\n           (Loc.toString loc))\n\nlet processLocalType name loc ~prefix ~exact ~env ~(localTables : LocalTables.t)\n    =\n  if Utils.checkName name ~prefix ~exact then\n    match Hashtbl.find_opt localTables.typesTable (name, Loc.start loc) with\n    | Some declared ->\n      if not (Hashtbl.mem localTables.namesUsed name) then (\n        Hashtbl.add localTables.namesUsed name ();\n        localTables.resultRev <-\n          {\n            (Completion.create declared.name.txt ~env ~kind:(Type declared.item))\n            with\n            deprecated = declared.deprecated;\n            docstring = declared.docstring;\n          }\n          :: localTables.resultRev)\n    | None ->\n      Log.log\n        (Printf.sprintf \"Completion Type Not Found %s loc:%s\\n\" name\n           (Loc.toString loc))\n\nlet processLocalModule name loc ~prefix ~exact ~env\n    ~(localTables : LocalTables.t) =\n  if Utils.checkName name ~prefix ~exact then\n    match Hashtbl.find_opt localTables.modulesTable (name, Loc.start loc) with\n    | Some declared ->\n      if not (Hashtbl.mem localTables.namesUsed name) then (\n        Hashtbl.add localTables.namesUsed name ();\n        localTables.resultRev <-\n          {\n            (Completion.create declared.name.txt ~env\n               ~kind:\n                 (Module\n                    {docstring = declared.docstring; module_ = declared.item}))\n            with\n            deprecated = declared.deprecated;\n            docstring = declared.docstring;\n          }\n          :: localTables.resultRev)\n    | None ->\n      Log.log\n        (Printf.sprintf \"Completion Module Not Found %s loc:%s\\n\" name\n           (Loc.toString loc))\n\nlet getItemsFromOpens ~opens ~localTables ~prefix ~exact ~completionContext =\n  opens\n  |> List.fold_left\n       (fun results env ->\n         let completionsFromThisOpen =\n           findAllCompletions ~env ~prefix ~exact\n             ~namesUsed:localTables.LocalTables.namesUsed ~completionContext\n         in\n         completionsFromThisOpen @ results)\n       []\n\nlet findLocalCompletionsForValuesAndConstructors ~(localTables : LocalTables.t)\n    ~env ~prefix ~exact ~opens ~scope =\n  localTables |> LocalTables.populateValues ~env;\n  localTables |> LocalTables.populateConstructors ~env;\n  localTables |> LocalTables.populateModules ~env;\n  scope\n  |> Scope.iterValuesBeforeFirstOpen\n       (processLocalValue ~prefix ~exact ~env ~localTables);\n  scope\n  |> Scope.iterConstructorsBeforeFirstOpen\n       (processLocalConstructor ~prefix ~exact ~env ~localTables);\n  scope\n  |> Scope.iterModulesBeforeFirstOpen\n       (processLocalModule ~prefix ~exact ~env ~localTables);\n\n  let valuesFromOpens =\n    getItemsFromOpens ~opens ~localTables ~prefix ~exact\n      ~completionContext:Value\n  in\n\n  scope\n  |> Scope.iterValuesAfterFirstOpen\n       (processLocalValue ~prefix ~exact ~env ~localTables);\n  scope\n  |> Scope.iterConstructorsAfterFirstOpen\n       (processLocalConstructor ~prefix ~exact ~env ~localTables);\n  scope\n  |> Scope.iterModulesAfterFirstOpen\n       (processLocalModule ~prefix ~exact ~env ~localTables);\n  List.rev_append localTables.resultRev valuesFromOpens\n\nlet findLocalCompletionsForValues ~(localTables : LocalTables.t) ~env ~prefix\n    ~exact ~opens ~scope =\n  localTables |> LocalTables.populateValues ~env;\n  localTables |> LocalTables.populateModules ~env;\n  scope\n  |> Scope.iterValuesBeforeFirstOpen\n       (processLocalValue ~prefix ~exact ~env ~localTables);\n  scope\n  |> Scope.iterModulesBeforeFirstOpen\n       (processLocalModule ~prefix ~exact ~env ~localTables);\n\n  let valuesFromOpens =\n    getItemsFromOpens ~opens ~localTables ~prefix ~exact\n      ~completionContext:Value\n  in\n\n  scope\n  |> Scope.iterValuesAfterFirstOpen\n       (processLocalValue ~prefix ~exact ~env ~localTables);\n  scope\n  |> Scope.iterModulesAfterFirstOpen\n       (processLocalModule ~prefix ~exact ~env ~localTables);\n  List.rev_append localTables.resultRev valuesFromOpens\n\nlet findLocalCompletionsForTypes ~(localTables : LocalTables.t) ~env ~prefix\n    ~exact ~opens ~scope =\n  localTables |> LocalTables.populateTypes ~env;\n  localTables |> LocalTables.populateModules ~env;\n  scope\n  |> Scope.iterTypesBeforeFirstOpen\n       (processLocalType ~prefix ~exact ~env ~localTables);\n  scope\n  |> Scope.iterModulesBeforeFirstOpen\n       (processLocalModule ~prefix ~exact ~env ~localTables);\n\n  let valuesFromOpens =\n    getItemsFromOpens ~opens ~localTables ~prefix ~exact ~completionContext:Type\n  in\n\n  scope\n  |> Scope.iterTypesAfterFirstOpen\n       (processLocalType ~prefix ~exact ~env ~localTables);\n  scope\n  |> Scope.iterModulesAfterFirstOpen\n       (processLocalModule ~prefix ~exact ~env ~localTables);\n  List.rev_append localTables.resultRev valuesFromOpens\n\nlet findLocalCompletionsForModules ~(localTables : LocalTables.t) ~env ~prefix\n    ~exact ~opens ~scope =\n  localTables |> LocalTables.populateModules ~env;\n  scope\n  |> Scope.iterModulesBeforeFirstOpen\n       (processLocalModule ~prefix ~exact ~env ~localTables);\n\n  let valuesFromOpens =\n    getItemsFromOpens ~opens ~localTables ~prefix ~exact\n      ~completionContext:Module\n  in\n\n  scope\n  |> Scope.iterModulesAfterFirstOpen\n       (processLocalModule ~prefix ~exact ~env ~localTables);\n  List.rev_append localTables.resultRev valuesFromOpens\n\nlet findLocalCompletionsWithOpens ~pos ~(env : QueryEnv.t) ~prefix ~exact ~opens\n    ~scope ~(completionContext : Completable.completionContext) =\n  (* TODO: handle arbitrary interleaving of opens and local bindings correctly *)\n  Log.log\n    (\"findLocalCompletionsWithOpens uri:\" ^ Uri.toString env.file.uri ^ \" pos:\"\n   ^ Pos.toString pos);\n  let localTables = LocalTables.create () in\n  match completionContext with\n  | Value | ValueOrField ->\n    findLocalCompletionsForValuesAndConstructors ~localTables ~env ~prefix\n      ~exact ~opens ~scope\n  | Type ->\n    findLocalCompletionsForTypes ~localTables ~env ~prefix ~exact ~opens ~scope\n  | Module ->\n    findLocalCompletionsForModules ~localTables ~env ~prefix ~exact ~opens\n      ~scope\n  | Field ->\n    (* There's no local completion for fields *)\n    []\n\nlet getComplementaryCompletionsForTypedValue ~opens ~allFiles ~scope ~env prefix\n    =\n  let exact = false in\n  let localCompletionsWithOpens =\n    let localTables = LocalTables.create () in\n    findLocalCompletionsForValues ~localTables ~env ~prefix ~exact ~opens ~scope\n  in\n  let fileModules =\n    allFiles |> FileSet.elements\n    |> Utils.filterMap (fun name ->\n           if\n             Utils.checkName name ~prefix ~exact\n             && not\n                  (* TODO complete the namespaced name too *)\n                  (Utils.fileNameHasUnallowedChars name)\n           then\n             Some\n               (Completion.create name ~synthetic:true ~env\n                  ~kind:(Completion.FileModule name))\n           else None)\n  in\n  localCompletionsWithOpens @ fileModules\n\nlet getCompletionsForPath ~debug ~opens ~full ~pos ~exact ~scope\n    ~completionContext ~env path =\n  if debug then Printf.printf \"Path %s\\n\" (path |> String.concat \".\");\n  let allFiles = allFilesInPackage full.package in\n  match path with\n  | [] -> []\n  | [prefix] ->\n    let localCompletionsWithOpens =\n      findLocalCompletionsWithOpens ~pos ~env ~prefix ~exact ~opens ~scope\n        ~completionContext\n    in\n    let fileModules =\n      allFiles |> FileSet.elements\n      |> Utils.filterMap (fun name ->\n             if\n               Utils.checkName name ~prefix ~exact\n               && not\n                    (* TODO complete the namespaced name too *)\n                    (Utils.fileNameHasUnallowedChars name)\n             then\n               Some\n                 (Completion.create name ~env ~kind:(Completion.FileModule name))\n             else None)\n    in\n    localCompletionsWithOpens @ fileModules\n  | moduleName :: path -> (\n    Log.log (\"Path \" ^ pathToString path);\n    match\n      getEnvWithOpens ~scope ~env ~package:full.package ~opens ~moduleName path\n    with\n    | Some (env, prefix) ->\n      Log.log \"Got the env\";\n      let namesUsed = Hashtbl.create 10 in\n      findAllCompletions ~env ~prefix ~exact ~namesUsed ~completionContext\n    | None -> [])\n\n(** Completions intended for piping, from a completion path. *)\nlet completionsForPipeFromCompletionPath ~envCompletionIsMadeFrom ~opens ~pos\n    ~scope ~debug ~prefix ~env ~rawOpens ~full completionPath =\n  let completionPathWithoutCurrentModule =\n    TypeUtils.removeCurrentModuleIfNeeded ~envCompletionIsMadeFrom\n      completionPath\n  in\n  let completionPathMinusOpens =\n    TypeUtils.removeOpensFromCompletionPath ~rawOpens ~package:full.package\n      completionPathWithoutCurrentModule\n    |> String.concat \".\"\n  in\n  let completionName name =\n    if completionPathMinusOpens = \"\" then name\n    else completionPathMinusOpens ^ \".\" ^ name\n  in\n  let completions =\n    completionPath @ [prefix]\n    |> getCompletionsForPath ~debug ~completionContext:Value ~exact:false ~opens\n         ~full ~pos ~env ~scope\n  in\n  let completions =\n    completions\n    |> List.map (fun (completion : Completion.t) ->\n           {completion with name = completionName completion.name})\n  in\n  completions\n\nlet rec digToRecordFieldsForCompletion ~debug ~package ~opens ~full ~pos ~env\n    ~scope path =\n  match\n    path\n    |> getCompletionsForPath ~debug ~completionContext:Type ~exact:true ~opens\n         ~full ~pos ~env ~scope\n  with\n  | {kind = Type {kind = Abstract (Some (p, _))}} :: _ ->\n    (* This case happens when what we're looking for is a type alias.\n       This is the case in newer rescript-react versions where\n       ReactDOM.domProps is an alias for JsxEvent.t. *)\n    let pathRev = p |> Utils.expandPath in\n    pathRev |> List.rev\n    |> digToRecordFieldsForCompletion ~debug ~package ~opens ~full ~pos ~env\n         ~scope\n  | {kind = Type {kind = Record fields}} :: _ -> Some fields\n  | _ -> None\n\nlet mkItem ?data ?additionalTextEdits name ~kind ~detail ~deprecated ~docstring\n    =\n  let docContent =\n    (match deprecated with\n    | None -> \"\"\n    | Some s -> \"Deprecated: \" ^ s ^ \"\\n\\n\")\n    ^\n    match docstring with\n    | [] -> \"\"\n    | _ :: _ -> docstring |> String.concat \"\\n\"\n  in\n  let tags =\n    match deprecated with\n    | None -> []\n    | Some _ -> [1 (* deprecated *)]\n  in\n  Protocol.\n    {\n      label = name;\n      kind;\n      tags;\n      detail;\n      documentation =\n        (if docContent = \"\" then None\n         else Some {kind = \"markdown\"; value = docContent});\n      sortText = None;\n      insertText = None;\n      insertTextFormat = None;\n      filterText = None;\n      data;\n      additionalTextEdits;\n    }\n\nlet completionToItem\n    {\n      Completion.name;\n      deprecated;\n      docstring;\n      kind;\n      sortText;\n      insertText;\n      insertTextFormat;\n      filterText;\n      detail;\n      env;\n      additionalTextEdits;\n    } ~full =\n  let item =\n    mkItem name ?additionalTextEdits\n      ?data:(kindToData (full.file.uri |> Uri.toPath) kind)\n      ~kind:(Completion.kindToInt kind)\n      ~deprecated\n      ~detail:\n        (match detail with\n        | None -> kindToDetail name kind\n        | Some detail -> detail)\n      ~docstring:\n        (match\n           kindToDocumentation ~currentDocstring:docstring ~full ~env name kind\n         with\n        | \"\" -> []\n        | docstring -> [docstring])\n  in\n  {item with sortText; insertText; insertTextFormat; filterText}\n\nlet completionsGetTypeEnv = function\n  | {Completion.kind = Value typ; env} :: _ -> Some (typ, env)\n  | {Completion.kind = ObjLabel typ; env} :: _ -> Some (typ, env)\n  | {Completion.kind = Field ({typ}, _); env} :: _ -> Some (typ, env)\n  | _ -> None\n\ntype getCompletionsForContextPathMode = Regular | Pipe\n\nlet completionsGetCompletionType ~full completions =\n  let firstNonSyntheticCompletion =\n    List.find_opt (fun c -> not c.Completion.synthetic) completions\n  in\n  match firstNonSyntheticCompletion with\n  | Some {Completion.kind = Value typ; env}\n  | Some {Completion.kind = ObjLabel typ; env}\n  | Some {Completion.kind = Field ({typ}, _); env} ->\n    typ\n    |> TypeUtils.extractType ~env ~package:full.package\n    |> Option.map (fun (typ, _) -> (typ, env))\n  | Some {Completion.kind = Type typ; env} -> (\n    match TypeUtils.extractTypeFromResolvedType typ ~env ~full with\n    | None -> None\n    | Some extractedType -> Some (extractedType, env))\n  | Some {Completion.kind = ExtractedType (typ, _); env} -> Some (typ, env)\n  | _ -> None\n\nlet rec completionsGetCompletionType2 ~debug ~full ~opens ~rawOpens ~pos\n    completions =\n  let firstNonSyntheticCompletion =\n    List.find_opt (fun c -> not c.Completion.synthetic) completions\n  in\n  match firstNonSyntheticCompletion with\n  | Some\n      ( {Completion.kind = Value typ; env}\n      | {Completion.kind = ObjLabel typ; env}\n      | {Completion.kind = Field ({typ}, _); env} ) ->\n    Some (TypeExpr typ, env)\n  | Some {Completion.kind = FollowContextPath (ctxPath, scope); env} ->\n    ctxPath\n    |> getCompletionsForContextPath ~debug ~full ~env ~exact:true ~opens\n         ~rawOpens ~pos ~scope\n    |> completionsGetCompletionType2 ~debug ~full ~opens ~rawOpens ~pos\n  | Some {Completion.kind = Type typ; env} -> (\n    match TypeUtils.extractTypeFromResolvedType typ ~env ~full with\n    | None -> None\n    | Some extractedType -> Some (ExtractedType extractedType, env))\n  | Some {Completion.kind = ExtractedType (typ, _); env} ->\n    Some (ExtractedType typ, env)\n  | _ -> None\n\nand completionsGetTypeEnv2 ~debug (completions : Completion.t list) ~full ~opens\n    ~rawOpens ~pos =\n  let firstNonSyntheticCompletion =\n    List.find_opt (fun c -> not c.Completion.synthetic) completions\n  in\n  match firstNonSyntheticCompletion with\n  | Some {Completion.kind = Value typ; env} -> Some (typ, env)\n  | Some {Completion.kind = ObjLabel typ; env} -> Some (typ, env)\n  | Some {Completion.kind = Field ({typ}, _); env} -> Some (typ, env)\n  | Some {Completion.kind = FollowContextPath (ctxPath, scope); env} ->\n    ctxPath\n    |> getCompletionsForContextPath ~debug ~full ~opens ~rawOpens ~pos ~env\n         ~exact:true ~scope\n    |> completionsGetTypeEnv2 ~debug ~full ~opens ~rawOpens ~pos\n  | _ -> None\n\nand getCompletionsForContextPath ~debug ~full ~opens ~rawOpens ~pos ~env ~exact\n    ~scope ?(mode = Regular) contextPath =\n  let envCompletionIsMadeFrom = env in\n  if debug then\n    Printf.printf \"ContextPath %s\\n\"\n      (Completable.contextPathToString contextPath);\n  let package = full.package in\n  match contextPath with\n  | CPString ->\n    if Debug.verbose () then print_endline \"[ctx_path]--> CPString\";\n    [\n      Completion.create \"dummy\" ~env\n        ~kind:(Completion.Value (Ctype.newconstr Predef.path_string []));\n    ]\n  | CPBool ->\n    if Debug.verbose () then print_endline \"[ctx_path]--> CPBool\";\n    [\n      Completion.create \"dummy\" ~env\n        ~kind:(Completion.Value (Ctype.newconstr Predef.path_bool []));\n    ]\n  | CPInt ->\n    if Debug.verbose () then print_endline \"[ctx_path]--> CPInt\";\n    [\n      Completion.create \"dummy\" ~env\n        ~kind:(Completion.Value (Ctype.newconstr Predef.path_int []));\n    ]\n  | CPFloat ->\n    if Debug.verbose () then print_endline \"[ctx_path]--> CPFloat\";\n    [\n      Completion.create \"dummy\" ~env\n        ~kind:(Completion.Value (Ctype.newconstr Predef.path_float []));\n    ]\n  | CPArray None ->\n    if Debug.verbose () then print_endline \"[ctx_path]--> CPArray (no payload)\";\n    [\n      Completion.create \"dummy\" ~env\n        ~kind:(Completion.Value (Ctype.newconstr Predef.path_array []));\n    ]\n  | CPArray (Some cp) -> (\n    if Debug.verbose () then\n      print_endline \"[ctx_path]--> CPArray (with payload)\";\n    match mode with\n    | Regular -> (\n      match\n        cp\n        |> getCompletionsForContextPath ~debug ~full ~opens ~rawOpens ~pos ~env\n             ~exact:true ~scope\n        |> completionsGetCompletionType ~full\n      with\n      | None -> []\n      | Some (typ, env) ->\n        [\n          Completion.create \"dummy\" ~env\n            ~kind:\n              (Completion.ExtractedType (Tarray (env, ExtractedType typ), `Type));\n        ])\n    | Pipe ->\n      (* Pipe completion with array just needs to know that it's an array, not\n         what inner type it has. *)\n      [\n        Completion.create \"dummy\" ~env\n          ~kind:(Completion.Value (Ctype.newconstr Predef.path_array []));\n      ])\n  | CPOption cp -> (\n    if Debug.verbose () then print_endline \"[ctx_path]--> CPOption\";\n    match\n      cp\n      |> getCompletionsForContextPath ~debug ~full ~opens ~rawOpens ~pos ~env\n           ~exact:true ~scope\n      |> completionsGetCompletionType ~full\n    with\n    | None -> []\n    | Some (typ, env) ->\n      [\n        Completion.create \"dummy\" ~env\n          ~kind:\n            (Completion.ExtractedType (Toption (env, ExtractedType typ), `Type));\n      ])\n  | CPAwait cp -> (\n    if Debug.verbose () then print_endline \"[ctx_path]--> CPAwait\";\n    match\n      cp\n      |> getCompletionsForContextPath ~debug ~full ~opens ~rawOpens ~pos ~env\n           ~exact:true ~scope\n      |> completionsGetCompletionType ~full\n    with\n    | Some (Tpromise (env, typ), _env) ->\n      [Completion.create \"dummy\" ~env ~kind:(Completion.Value typ)]\n    | _ -> [])\n  | CPId {path; completionContext; loc} ->\n    if Debug.verbose () then print_endline \"[ctx_path]--> CPId\";\n    (* Looks up the type of an identifier.\n\n       Because of reasons we sometimes don't get enough type\n       information when looking up identifiers where the type\n       has type parameters. This in turn means less completions.\n\n       There's a heuristic below that tries to look up the type\n       of the ID in the usual way first. But if the type found\n       still has uninstantiated type parameters, we check the\n       location for the identifier from the compiler type artifacts.\n       That type usually has the type params instantiated, if they are.\n       This leads to better completion.\n\n       However, we only do it in incremental type checking mode,\n       because more type information is always available in that mode. *)\n    let useTvarLookup = !Cfg.inIncrementalTypecheckingMode in\n    let byPath =\n      path\n      |> getCompletionsForPath ~debug ~opens ~full ~pos ~exact\n           ~completionContext ~env ~scope\n    in\n    let hasTvars =\n      if useTvarLookup then\n        match byPath with\n        | [{kind = Value typ}] when TypeUtils.hasTvar typ -> true\n        | _ -> false\n      else false\n    in\n    let result =\n      if hasTvars then\n        let byLoc = TypeUtils.findTypeViaLoc loc ~full ~debug in\n        match (byLoc, byPath) with\n        | Some t, [({kind = Value _} as item)] -> [{item with kind = Value t}]\n        | _ -> byPath\n      else byPath\n    in\n    result\n  | CPApply (cp, labels) -> (\n    if Debug.verbose () then print_endline \"[ctx_path]--> CPApply\";\n    match\n      cp\n      |> getCompletionsForContextPath ~debug ~full ~opens ~rawOpens ~pos ~env\n           ~exact:true ~scope\n      |> completionsGetCompletionType2 ~debug ~full ~opens ~rawOpens ~pos\n    with\n    | Some ((TypeExpr typ | ExtractedType (Tfunction {typ})), env) -> (\n      let rec reconstructFunctionType args tRet =\n        match args with\n        | [] -> tRet\n        | (label, tArg) :: rest ->\n          let restType = reconstructFunctionType rest tRet in\n          {typ with desc = Tarrow (label, tArg, restType, Cok)}\n      in\n      let rec processApply args labels =\n        match (args, labels) with\n        | _, [] -> args\n        | _, label :: (_ :: _ as nextLabels) ->\n          (* compute the application of the first label, then the next ones *)\n          let args = processApply args [label] in\n          processApply args nextLabels\n        | (Asttypes.Nolabel, _) :: nextArgs, [Asttypes.Nolabel] -> nextArgs\n        | ((Labelled _, _) as arg) :: nextArgs, [Nolabel] ->\n          arg :: processApply nextArgs labels\n        | (Optional _, _) :: nextArgs, [Nolabel] -> processApply nextArgs labels\n        | ( (((Labelled s1 | Optional s1), _) as arg) :: nextArgs,\n            [(Labelled s2 | Optional s2)] ) ->\n          if s1 = s2 then nextArgs else arg :: processApply nextArgs labels\n        | ((Nolabel, _) as arg) :: nextArgs, [(Labelled _ | Optional _)] ->\n          arg :: processApply nextArgs labels\n        | [], [(Nolabel | Labelled _ | Optional _)] ->\n          (* should not happen, but just ignore extra arguments *) []\n      in\n      match TypeUtils.extractFunctionType ~env ~package typ with\n      | args, tRet when args <> [] ->\n        let args = processApply args labels in\n        let retType = reconstructFunctionType args tRet in\n        [Completion.create \"dummy\" ~env ~kind:(Completion.Value retType)]\n      | _ -> [])\n    | _ -> [])\n  | CPField {contextPath = CPId {path; completionContext = Module}; fieldName}\n    ->\n    if Debug.verbose () then print_endline \"[ctx_path]--> CPField: M.field\";\n    (* M.field *)\n    path @ [fieldName]\n    |> getCompletionsForPath ~debug ~opens ~full ~pos ~exact\n         ~completionContext:Field ~env ~scope\n  | CPField {contextPath = cp; fieldName; posOfDot; exprLoc; inJsx} -> (\n    if Debug.verbose () then print_endline \"[dot_completion]--> Triggered\";\n    let completionsFromCtxPath =\n      cp\n      |> getCompletionsForContextPath ~debug ~full ~opens ~rawOpens ~pos ~env\n           ~exact:true ~scope\n    in\n    let mainTypeCompletionEnv =\n      completionsFromCtxPath\n      |> completionsGetTypeEnv2 ~debug ~full ~opens ~rawOpens ~pos\n    in\n    match mainTypeCompletionEnv with\n    | None ->\n      if Debug.verbose () then\n        Printf.printf\n          \"[dot_completion] Could not extract main type completion env.\\n\";\n      []\n    | Some (typ, env) ->\n      let fieldCompletions =\n        DotCompletionUtils.fieldCompletionsForDotCompletion typ ~env ~package\n          ~prefix:fieldName ?posOfDot ~exact\n      in\n      (* Get additional completions acting as if this field completion was actually a pipe completion. *)\n      let cpAsPipeCompletion =\n        Completable.CPPipe\n          {\n            synthetic = true;\n            contextPath =\n              (match cp with\n              | CPApply (c, args) -> CPApply (c, args @ [Asttypes.Nolabel])\n              | CPId _ when TypeUtils.isFunctionType ~env ~package typ ->\n                CPApply (cp, [Asttypes.Nolabel])\n              | _ -> cp);\n            id = fieldName;\n            inJsx;\n            lhsLoc = exprLoc;\n          }\n      in\n      let pipeCompletions =\n        cpAsPipeCompletion\n        |> getCompletionsForContextPath ~debug ~full ~opens ~rawOpens ~pos\n             ~env:envCompletionIsMadeFrom ~exact ~scope\n        |> List.filter_map (fun c ->\n               TypeUtils.transformCompletionToPipeCompletion ~synthetic:true\n                 ~env ?posOfDot c)\n      in\n      fieldCompletions @ pipeCompletions)\n  | CPObj (cp, label) -> (\n    (* TODO: Also needs to support ExtractedType *)\n    if Debug.verbose () then print_endline \"[ctx_path]--> CPObj\";\n    match\n      cp\n      |> getCompletionsForContextPath ~debug ~full ~opens ~rawOpens ~pos ~env\n           ~exact:true ~scope\n      |> completionsGetTypeEnv2 ~debug ~full ~opens ~rawOpens ~pos\n    with\n    | Some (typ, env) -> (\n      match typ |> TypeUtils.extractObjectType ~env ~package with\n      | Some (env, tObj) ->\n        tObj |> TypeUtils.getObjFields\n        |> Utils.filterMap (fun (field, typ) ->\n               if Utils.checkName field ~prefix:label ~exact then\n                 Some\n                   (Completion.create field ~env ~kind:(Completion.ObjLabel typ))\n               else None)\n      | None -> [])\n    | None -> [])\n  | CPPipe {contextPath = cp; id = prefix; lhsLoc; inJsx; synthetic} -> (\n    if Debug.verbose () then print_endline \"[ctx_path]--> CPPipe\";\n    match\n      cp\n      |> getCompletionsForContextPath ~debug ~full ~opens ~rawOpens ~pos ~env\n           ~exact:true ~scope ~mode:Pipe\n      |> completionsGetTypeEnv2 ~debug ~full ~opens ~rawOpens ~pos\n    with\n    | None ->\n      if Debug.verbose () then\n        print_endline \"[CPPipe]--> Could not resolve type env\";\n      []\n    | Some (typ, env) -> (\n      let env, typ =\n        typ\n        |> TypeUtils.resolveTypeForPipeCompletion ~env ~package:full.package\n             ~full ~lhsLoc\n      in\n      let mainTypeId = TypeUtils.findRootTypeId ~full ~env typ in\n      let typePath = TypeUtils.pathFromTypeExpr typ in\n      match mainTypeId with\n      | None ->\n        if Debug.verbose () then\n          Printf.printf\n            \"[pipe_completion] Could not find mainTypeId. Aborting pipe \\\n             completions.\\n\";\n        []\n      | Some mainTypeId ->\n        if Debug.verbose () then\n          Printf.printf \"[pipe_completion] mainTypeId: %s\\n\" mainTypeId;\n        let pipeCompletions =\n          (* We now need a completion path from where to look up the module for our dot completion type.\n              This is from where we pull all of the functions we want to complete for the pipe.\n\n              A completion path here could be one of two things:\n              1. A module path to the main module for the type we've found\n              2. A module path to a builtin module, like `Int` for `int`, or `Array` for `array`\n\n             The below code will deliberately _not_ dig into type aliases for the main type when we're looking\n             for what _module_ to complete from. This is because you should be able to control where completions\n             come from even if your type is an alias.\n          *)\n          let completeAsBuiltin =\n            match typePath with\n            | Some t ->\n              TypeUtils.completionPathFromMaybeBuiltin t ~package:full.package\n            | None -> None\n          in\n          let completionPath =\n            match (completeAsBuiltin, typePath) with\n            | Some completionPathForBuiltin, _ ->\n              Some (false, completionPathForBuiltin)\n            | _, Some p -> (\n              (* If this isn't a builtin, but we have a path, we try to resolve the\n                 module path relative to the env we're completing from. This ensures that\n                 what we get here is a module path we can find completions for regardless of\n                 of the current scope for the position we're at.*)\n              match\n                TypeUtils.getModulePathRelativeToEnv ~debug\n                  ~env:envCompletionIsMadeFrom ~envFromItem:env\n                  (Utils.expandPath p)\n              with\n              | None -> Some (true, [env.file.moduleName])\n              | Some p -> Some (false, p))\n            | _ -> None\n          in\n          match completionPath with\n          | None -> []\n          | Some (isFromCurrentModule, completionPath) ->\n            completionsForPipeFromCompletionPath ~envCompletionIsMadeFrom ~opens\n              ~pos ~scope ~debug ~prefix ~env ~rawOpens ~full completionPath\n            |> TypeUtils.filterPipeableFunctions ~env ~full ~synthetic\n                 ~targetTypeId:mainTypeId\n            |> List.filter (fun (c : Completion.t) ->\n                   (* If we're completing from the current module then we need to care about scope.\n                      This is automatically taken care of in other cases. *)\n                   if isFromCurrentModule then\n                     match c.kind with\n                     | Value _ ->\n                       scope\n                       |> List.find_opt (fun (item : ScopeTypes.item) ->\n                              match item with\n                              | Value (scopeItemName, _, _, _) ->\n                                scopeItemName = c.name\n                              | _ -> false)\n                       |> Option.is_some\n                     | _ -> false\n                   else true)\n        in\n        (* Extra completions can be drawn from the @editor.completeFrom attribute. Here we\n           find and add those completions as well. *)\n        let extraCompletions =\n          TypeUtils.getExtraModulesToCompleteFromForType ~env ~full typ\n          |> List.map (fun completionPath ->\n                 completionsForPipeFromCompletionPath ~envCompletionIsMadeFrom\n                   ~opens ~pos ~scope ~debug ~prefix ~env ~rawOpens ~full\n                   completionPath)\n          |> List.flatten\n          |> TypeUtils.filterPipeableFunctions ~synthetic:true ~env ~full\n               ~targetTypeId:mainTypeId\n        in\n        (* Add JSX completion items if we're in a JSX context. *)\n        let jsxCompletions =\n          if inJsx then\n            PipeCompletionUtils.addJsxCompletionItems ~env ~mainTypeId ~prefix\n              ~full ~rawOpens typ\n          else []\n        in\n        jsxCompletions @ pipeCompletions @ extraCompletions))\n  | CTuple ctxPaths ->\n    if Debug.verbose () then print_endline \"[ctx_path]--> CTuple\";\n    (* Turn a list of context paths into a list of type expressions. *)\n    let typeExrps =\n      ctxPaths\n      |> List.map (fun contextPath ->\n             contextPath\n             |> getCompletionsForContextPath ~debug ~full ~opens ~rawOpens ~pos\n                  ~env ~exact:true ~scope)\n      |> List.filter_map (fun completionItems ->\n             match completionItems with\n             | {Completion.kind = Value typ} :: _ -> Some typ\n             | _ -> None)\n    in\n    if List.length ctxPaths = List.length typeExrps then\n      [\n        Completion.create \"dummy\" ~env\n          ~kind:(Completion.Value (Ctype.newty (Ttuple typeExrps)));\n      ]\n    else []\n  | CJsxPropValue {pathToComponent; propName; emptyJsxPropNameHint} -> (\n    if Debug.verbose () then print_endline \"[ctx_path]--> CJsxPropValue\";\n    let findTypeOfValue path =\n      path\n      |> getCompletionsForPath ~debug ~completionContext:Value ~exact:true\n           ~opens ~full ~pos ~env ~scope\n      |> completionsGetTypeEnv2 ~debug ~full ~opens ~rawOpens ~pos\n    in\n    let lowercaseComponent =\n      match pathToComponent with\n      | [elName] when Char.lowercase_ascii elName.[0] = elName.[0] -> true\n      | _ -> false\n    in\n    (* TODO(env-stuff) Does this need to potentially be instantiated with type args too? *)\n    let labels =\n      if lowercaseComponent then\n        let rec digToTypeForCompletion path =\n          match\n            path\n            |> getCompletionsForPath ~debug ~completionContext:Type ~exact:true\n                 ~opens ~full ~pos ~env ~scope\n          with\n          | {kind = Type {kind = Abstract (Some (p, _))}} :: _ ->\n            (* This case happens when what we're looking for is a type alias.\n               This is the case in newer rescript-react versions where\n               ReactDOM.domProps is an alias for JsxEvent.t. *)\n            let pathRev = p |> Utils.expandPath in\n            pathRev |> List.rev |> digToTypeForCompletion\n          | {kind = Type {kind = Record fields}} :: _ ->\n            fields |> List.map (fun f -> (f.fname.txt, f.typ, env))\n          | _ -> []\n        in\n        TypeUtils.pathToElementProps package |> digToTypeForCompletion\n      else\n        CompletionJsx.getJsxLabels ~componentPath:pathToComponent\n          ~findTypeOfValue ~package\n    in\n    (* We have a heuristic that kicks in when completing empty prop expressions in the middle of a JSX element,\n       like <SomeComp firstProp=test second=<com> third=123 />.\n       The parser turns that broken JSX into: <SomeComp firstProp=test second=<com>third />, 123.\n\n       So, we use a heuristic that covers this scenario by picking up on the cursor being between\n       the prop name and the prop expression, and the prop expression being an ident that's a\n       _valid prop name_ for that JSX element.\n\n       This works because the ident itself will always be the next prop name (since that's what the\n       parser eats). So, we do a simple lookup of that hint here if it exists, to make sure the hint\n       is indeed a valid label for this JSX element. *)\n    let emptyJsxPropNameHintIsCorrect =\n      match emptyJsxPropNameHint with\n      | Some identName when identName != propName ->\n        labels\n        |> List.find_opt (fun (f, _, _) -> f = identName)\n        |> Option.is_some\n      | Some _ -> false\n      | None -> true\n    in\n    let targetLabel =\n      if emptyJsxPropNameHintIsCorrect then\n        labels |> List.find_opt (fun (f, _, _) -> f = propName)\n      else None\n    in\n    match targetLabel with\n    | None -> []\n    | Some (_, typ, env) ->\n      [\n        Completion.create \"dummy\" ~env\n          ~kind:(Completion.Value (Utils.unwrapIfOption typ));\n      ])\n  | CArgument {functionContextPath; argumentLabel} -> (\n    if Debug.verbose () then print_endline \"[ctx_path]--> CArgument\";\n    if Debug.verbose () then\n      Printf.printf \"--> function argument: %s\\n\"\n        (match argumentLabel with\n        | Labelled n | Optional n -> n\n        | Unlabelled {argumentPosition} -> \"$\" ^ string_of_int argumentPosition);\n\n    let labels, env =\n      match\n        functionContextPath\n        |> getCompletionsForContextPath ~debug ~full ~opens ~rawOpens ~pos ~env\n             ~exact:true ~scope\n        |> completionsGetCompletionType2 ~debug ~full ~opens ~rawOpens ~pos\n      with\n      | Some ((TypeExpr typ | ExtractedType (Tfunction {typ})), env) ->\n        if Debug.verbose () then print_endline \"--> found function type\";\n        (typ |> TypeUtils.getArgs ~full ~env, env)\n      | _ ->\n        if Debug.verbose () then\n          print_endline \"--> could not find function type\";\n        ([], env)\n    in\n    let targetLabel =\n      labels\n      |> List.find_opt (fun (label, _) ->\n             match (argumentLabel, label) with\n             | ( Unlabelled {argumentPosition = pos1},\n                 Completable.Unlabelled {argumentPosition = pos2} ) ->\n               pos1 = pos2\n             | ( (Labelled name1 | Optional name1),\n                 (Labelled name2 | Optional name2) ) ->\n               name1 = name2\n             | _ -> false)\n    in\n    let expandOption =\n      match targetLabel with\n      | None | Some ((Unlabelled _ | Labelled _), _) -> false\n      | Some (Optional _, _) -> true\n    in\n    match targetLabel with\n    | None ->\n      if Debug.verbose () then\n        print_endline \"--> could not look up function argument\";\n      []\n    | Some (_, typ) ->\n      if Debug.verbose () then print_endline \"--> found function argument!\";\n      [\n        Completion.create \"dummy\" ~env\n          ~kind:\n            (Completion.Value\n               (if expandOption then Utils.unwrapIfOption typ else typ));\n      ])\n  | CPatternPath {rootCtxPath; nested} -> (\n    if Debug.verbose () then print_endline \"[ctx_path]--> CPatternPath\";\n    (* TODO(env-stuff) Get rid of innerType etc *)\n    match\n      rootCtxPath\n      |> getCompletionsForContextPath ~debug ~full ~opens ~rawOpens ~pos ~env\n           ~exact:true ~scope\n      |> completionsGetCompletionType2 ~debug ~full ~opens ~rawOpens ~pos\n    with\n    | Some (typ, env) -> (\n      match typ |> TypeUtils.resolveNestedPatternPath ~env ~full ~nested with\n      | Some (typ, env) ->\n        [Completion.create \"dummy\" ~env ~kind:(kindFromInnerType typ)]\n      | None -> [])\n    | None -> [])\n  | CTypeAtPos loc -> (\n    if Debug.verbose () then print_endline \"[ctx_path]--> CTypeAtPos\";\n    match TypeUtils.findTypeViaLoc loc ~full ~debug with\n    | None -> []\n    | Some typExpr -> [Completion.create \"dummy\" ~env ~kind:(Value typExpr)])\n\nlet getOpens ~debug ~rawOpens ~package ~env =\n  if debug && rawOpens <> [] then\n    Printf.printf \"%s\\n\"\n      (\"Raw opens: \"\n      ^ string_of_int (List.length rawOpens)\n      ^ \" \"\n      ^ String.concat \" ... \" (rawOpens |> List.map pathToString));\n  let packageOpens = package.opens in\n  if debug && packageOpens <> [] then\n    Printf.printf \"%s\\n\"\n      (\"Package opens \"\n      ^ String.concat \" \"\n          (packageOpens\n          |> List.map (fun p ->\n                 p\n                 |> List.map (fun name ->\n                        (* Unify formatting between curried and uncurried *)\n                        if name = \"PervasivesU\" then \"Pervasives\" else name)\n                 |> pathToString)));\n  let resolvedOpens =\n    resolveOpens ~env (List.rev (rawOpens @ packageOpens)) ~package\n  in\n  if debug && resolvedOpens <> [] then\n    Printf.printf \"%s\\n\"\n      (\"Resolved opens \"\n      ^ string_of_int (List.length resolvedOpens)\n      ^ \" \"\n      ^ String.concat \" \"\n          (resolvedOpens\n          |> List.map (fun (e : QueryEnv.t) ->\n                 let name = Uri.toString e.file.uri in\n\n                 (* Unify formatting between curried and uncurried *)\n                 if\n                   name = \"pervasives.res\" || name = \"pervasives.resi\"\n                   || name = \"pervasivesU.res\" || name = \"pervasivesU.resi\"\n                 then \"pervasives\"\n                 else name)));\n  (* Last open takes priority *)\n  List.rev resolvedOpens\n\nlet filterItems items ~prefix =\n  if prefix = \"\" then items\n  else\n    items\n    |> List.filter (fun (item : Completion.t) ->\n           Utils.startsWith item.name prefix)\n\ntype completionMode = Pattern of Completable.patternMode | Expression\n\nlet emptyCase ~mode num =\n  match mode with\n  | Expression -> \"$\" ^ string_of_int (num - 1)\n  | Pattern _ -> \"${\" ^ string_of_int num ^ \":_}\"\n\nlet printConstructorArgs ~mode ~asSnippet argsLen =\n  let args = ref [] in\n  for argNum = 1 to argsLen do\n    args :=\n      !args\n      @ [\n          (match (asSnippet, argsLen) with\n          | true, l when l > 1 -> Printf.sprintf \"${%i:_}\" argNum\n          | true, l when l > 0 -> emptyCase ~mode argNum\n          | _ -> \"_\");\n        ]\n  done;\n  if List.length !args > 0 then \"(\" ^ (!args |> String.concat \", \") ^ \")\"\n  else \"\"\n\nlet rec completeTypedValue ?(typeArgContext : typeArgContext option) ~rawOpens\n    ~full ~prefix ~completionContext ~mode (t : SharedTypes.completionType) =\n  let emptyCase = emptyCase ~mode in\n  let printConstructorArgs = printConstructorArgs ~mode in\n  let create = Completion.create ?typeArgContext in\n  match t with\n  | TtypeT {env; path} when mode = Expression ->\n    if Debug.verbose () then\n      print_endline \"[complete_typed_value]--> TtypeT (Expression)\";\n    (* Find all values in the module with type t *)\n    let valueWithTypeT t =\n      match t.Types.desc with\n      | Tconstr (Pident {name = \"t\"}, [], _) -> true\n      | _ -> false\n    in\n    (* Find all functions in the module that returns type t *)\n    let rec fnReturnsTypeT t =\n      match t.Types.desc with\n      | Tlink t1\n      | Tsubst t1\n      | Tpoly (t1, [])\n      | Tconstr (Pident {name = \"function$\"}, [t1; _], _) ->\n        fnReturnsTypeT t1\n      | Tarrow _ -> (\n        match TypeUtils.extractFunctionType ~env ~package:full.package t with\n        | ( (Nolabel, {desc = Tconstr (Path.Pident {name = \"t\"}, _, _)}) :: _,\n            {desc = Tconstr (Path.Pident {name = \"t\"}, _, _)} ) ->\n          (* Filter out functions that take type t first. These are often\n             @send style functions that we don't want to have here because\n             they usually aren't meant to create a type t from scratch. *)\n          false\n        | _args, {desc = Tconstr (Path.Pident {name = \"t\"}, _, _)} -> true\n        | _ -> false)\n      | _ -> false\n    in\n    let getCompletionName exportedValueName =\n      let fnNname =\n        TypeUtils.getModulePathRelativeToEnv ~debug:false\n          ~env:(QueryEnv.fromFile full.file)\n          ~envFromItem:env (Utils.expandPath path)\n      in\n      match fnNname with\n      | None -> None\n      | Some base ->\n        let base =\n          TypeUtils.removeOpensFromCompletionPath ~rawOpens\n            ~package:full.package base\n        in\n        Some ((base |> String.concat \".\") ^ \".\" ^ exportedValueName)\n    in\n    let getExportedValueCompletion name (declared : Types.type_expr Declared.t)\n        =\n      let typeExpr = declared.item in\n      if valueWithTypeT typeExpr then\n        getCompletionName name\n        |> Option.map (fun name ->\n               create name ~includesSnippets:true ~insertText:name\n                 ~kind:(Value typeExpr) ~env)\n      else if fnReturnsTypeT typeExpr then\n        getCompletionName name\n        |> Option.map (fun name ->\n               create\n                 (Printf.sprintf \"%s()\" name)\n                 ~includesSnippets:true ~insertText:(name ^ \"($0)\")\n                 ~kind:(Value typeExpr) ~env)\n      else None\n    in\n    let completionItems =\n      Hashtbl.fold\n        (fun name stamp all ->\n          match Stamps.findValue env.file.stamps stamp with\n          | None -> all\n          | Some declaredTypeExpr -> (\n            match getExportedValueCompletion name declaredTypeExpr with\n            | None -> all\n            | Some completion -> completion :: all))\n        env.exported.values_ []\n    in\n\n    (* Special casing for things where we want extra things in the completions *)\n    let completionItems =\n      match path with\n      | Pdot (Pdot (Pident m, \"Re\", _), \"t\", _) when Ident.name m = \"Js\" ->\n        (* regexps *)\n        create \"%re()\" ~insertText:\"%re(\\\"/$0/g\\\")\" ~includesSnippets:true\n          ~kind:(Label \"Regular expression\") ~env\n        :: completionItems\n      | _ -> completionItems\n    in\n    completionItems\n  | Tbool env ->\n    if Debug.verbose () then print_endline \"[complete_typed_value]--> Tbool\";\n    [\n      create \"true\" ~kind:(Label \"bool\") ~env;\n      create \"false\" ~kind:(Label \"bool\") ~env;\n    ]\n    |> filterItems ~prefix\n  | TtypeT {env; path} ->\n    if Debug.verbose () then\n      print_endline \"[complete_typed_value]--> TtypeT (Pattern)\";\n    (* This is in patterns. Emit an alias/binding with the module name as a value name. *)\n    if prefix <> \"\" then []\n    else\n      let moduleName =\n        match path |> Utils.expandPath with\n        | _t :: moduleName :: _rest -> String.uncapitalize_ascii moduleName\n        | _ -> \"value\"\n      in\n      [\n        create moduleName ~kind:(Label moduleName) ~env\n          ~insertText:(\"${0:\" ^ moduleName ^ \"}\")\n          ~includesSnippets:true;\n      ]\n  | Tvariant {env; constructors; variantDecl; variantName} ->\n    if Debug.verbose () then print_endline \"[complete_typed_value]--> Tvariant\";\n    constructors\n    |> List.map (fun (constructor : Constructor.t) ->\n           let numArgs =\n             match constructor.args with\n             | InlineRecord _ -> 1\n             | Args args -> List.length args\n           in\n           create ?deprecated:constructor.deprecated ~includesSnippets:true\n             (constructor.cname.txt\n             ^ printConstructorArgs numArgs ~asSnippet:false)\n             ~insertText:\n               (constructor.cname.txt\n               ^ printConstructorArgs numArgs ~asSnippet:true)\n             ~kind:\n               (Constructor\n                  (constructor, variantDecl |> Shared.declToString variantName))\n             ~env)\n    |> filterItems ~prefix\n  | Tpolyvariant {env; constructors; typeExpr} ->\n    if Debug.verbose () then\n      print_endline \"[complete_typed_value]--> Tpolyvariant\";\n    constructors\n    |> List.map (fun (constructor : polyVariantConstructor) ->\n           create\n             (\"#\" ^ constructor.displayName\n             ^ printConstructorArgs\n                 (List.length constructor.args)\n                 ~asSnippet:false)\n             ~includesSnippets:true\n             ~insertText:\n               ((if Utils.startsWith prefix \"#\" then \"\" else \"#\")\n               ^ constructor.displayName\n               ^ printConstructorArgs\n                   (List.length constructor.args)\n                   ~asSnippet:true)\n             ~kind:\n               (PolyvariantConstructor\n                  (constructor, typeExpr |> Shared.typeToString))\n             ~env)\n    |> filterItems\n         ~prefix:(if Utils.startsWith prefix \"#\" then prefix else \"#\" ^ prefix)\n  | Toption (env, t) ->\n    if Debug.verbose () then print_endline \"[complete_typed_value]--> Toption\";\n    let innerType =\n      match t with\n      | ExtractedType t -> Some (t, None)\n      | TypeExpr t -> t |> TypeUtils.extractType ~env ~package:full.package\n    in\n    let expandedCompletions =\n      match innerType with\n      | None -> []\n      | Some (innerType, _typeArgsContext) ->\n        innerType\n        |> completeTypedValue ~rawOpens ~full ~prefix ~completionContext ~mode\n        |> List.map (fun (c : Completion.t) ->\n               {\n                 c with\n                 name = \"Some(\" ^ c.name ^ \")\";\n                 sortText = None;\n                 insertText =\n                   (match c.insertText with\n                   | None -> None\n                   | Some insertText -> Some (\"Some(\" ^ insertText ^ \")\"));\n               })\n    in\n    let noneCase = Completion.create \"None\" ~kind:(kindFromInnerType t) ~env in\n    let someAnyCase =\n      create \"Some(_)\" ~includesSnippets:true ~kind:(kindFromInnerType t) ~env\n        ~insertText:(Printf.sprintf \"Some(%s)\" (emptyCase 1))\n    in\n    let completions =\n      match completionContext with\n      | Some (Completable.CameFromRecordField fieldName) ->\n        [\n          create\n            (\"Some(\" ^ fieldName ^ \")\")\n            ~includesSnippets:true ~kind:(kindFromInnerType t) ~env\n            ~insertText:(\"Some(\" ^ fieldName ^ \")$0\");\n          someAnyCase;\n          noneCase;\n        ]\n      | _ -> [noneCase; someAnyCase]\n    in\n    completions @ expandedCompletions |> filterItems ~prefix\n  | Tresult {env; okType; errorType} ->\n    if Debug.verbose () then print_endline \"[complete_typed_value]--> Tresult\";\n    let okInnerType =\n      okType |> TypeUtils.extractType ~env ~package:full.package\n    in\n    let errorInnerType =\n      errorType |> TypeUtils.extractType ~env ~package:full.package\n    in\n    let expandedOkCompletions =\n      match okInnerType with\n      | None -> []\n      | Some (innerType, _) ->\n        innerType\n        |> completeTypedValue ~rawOpens ~full ~prefix ~completionContext ~mode\n        |> List.map (fun (c : Completion.t) ->\n               {\n                 c with\n                 name = \"Ok(\" ^ c.name ^ \")\";\n                 sortText = None;\n                 insertText =\n                   (match c.insertText with\n                   | None -> None\n                   | Some insertText -> Some (\"Ok(\" ^ insertText ^ \")\"));\n               })\n    in\n    let expandedErrorCompletions =\n      match errorInnerType with\n      | None -> []\n      | Some (innerType, _) ->\n        innerType\n        |> completeTypedValue ~rawOpens ~full ~prefix ~completionContext ~mode\n        |> List.map (fun (c : Completion.t) ->\n               {\n                 c with\n                 name = \"Error(\" ^ c.name ^ \")\";\n                 sortText = None;\n                 insertText =\n                   (match c.insertText with\n                   | None -> None\n                   | Some insertText -> Some (\"Error(\" ^ insertText ^ \")\"));\n               })\n    in\n    let okAnyCase =\n      create \"Ok(_)\" ~includesSnippets:true ~kind:(Value okType) ~env\n        ~insertText:(Printf.sprintf \"Ok(%s)\" (emptyCase 1))\n    in\n    let errorAnyCase =\n      create \"Error(_)\" ~includesSnippets:true ~kind:(Value errorType) ~env\n        ~insertText:(Printf.sprintf \"Error(%s)\" (emptyCase 1))\n    in\n    let completions =\n      match completionContext with\n      | Some (Completable.CameFromRecordField fieldName) ->\n        [\n          create\n            (\"Ok(\" ^ fieldName ^ \")\")\n            ~includesSnippets:true ~kind:(Value okType) ~env\n            ~insertText:(\"Ok(\" ^ fieldName ^ \")$0\");\n          okAnyCase;\n          errorAnyCase;\n        ]\n      | _ -> [okAnyCase; errorAnyCase]\n    in\n    completions @ expandedOkCompletions @ expandedErrorCompletions\n    |> filterItems ~prefix\n  | Tuple (env, exprs, typ) ->\n    if Debug.verbose () then print_endline \"[complete_typed_value]--> Tuple\";\n    let numExprs = List.length exprs in\n    [\n      create\n        (printConstructorArgs numExprs ~asSnippet:false)\n        ~includesSnippets:true\n        ~insertText:(printConstructorArgs numExprs ~asSnippet:true)\n        ~kind:(Value typ) ~env;\n    ]\n  | Trecord {env; fields} as extractedType -> (\n    if Debug.verbose () then print_endline \"[complete_typed_value]--> Trecord\";\n    (* As we're completing for a record, we'll need a hint (completionContext)\n       here to figure out whether we should complete for a record field, or\n       the record body itself. *)\n    match completionContext with\n    | Some (Completable.RecordField {seenFields}) ->\n      fields\n      |> List.filter (fun (field : field) ->\n             List.mem field.fname.txt seenFields = false)\n      |> List.map (fun (field : field) ->\n             match (field.optional, mode) with\n             | true, Pattern Destructuring ->\n               create (\"?\" ^ field.fname.txt) ?deprecated:field.deprecated\n                 ~docstring:\n                   [\n                     field.fname.txt\n                     ^ \" is an optional field, and needs to be destructured \\\n                        using '?'.\";\n                   ]\n                 ~kind:\n                   (Field (field, TypeUtils.extractedTypeToString extractedType))\n                 ~env\n             | _ ->\n               create field.fname.txt ?deprecated:field.deprecated\n                 ~kind:\n                   (Field (field, TypeUtils.extractedTypeToString extractedType))\n                 ~env)\n      |> filterItems ~prefix\n    | _ ->\n      if prefix = \"\" then\n        [\n          create \"{}\" ~includesSnippets:true ~insertText:\"{$0}\" ~sortText:\"A\"\n            ~kind:\n              (ExtractedType\n                 ( extractedType,\n                   match mode with\n                   | Pattern _ -> `Type\n                   | Expression -> `Value ))\n            ~env;\n        ]\n      else [])\n  | TinlineRecord {env; fields} -> (\n    if Debug.verbose () then\n      print_endline \"[complete_typed_value]--> TinlineRecord\";\n    match completionContext with\n    | Some (Completable.RecordField {seenFields}) ->\n      fields\n      |> List.filter (fun (field : field) ->\n             List.mem field.fname.txt seenFields = false)\n      |> List.map (fun (field : field) ->\n             create field.fname.txt ~kind:(Label \"Inline record\")\n               ?deprecated:field.deprecated ~env)\n      |> filterItems ~prefix\n    | _ ->\n      if prefix = \"\" then\n        [\n          create \"{}\" ~includesSnippets:true ~insertText:\"{$0}\" ~sortText:\"A\"\n            ~kind:(Label \"Inline record\") ~env;\n        ]\n      else [])\n  | Tarray (env, typ) ->\n    if Debug.verbose () then print_endline \"[complete_typed_value]--> Tarray\";\n    if prefix = \"\" then\n      [\n        create \"[]\" ~includesSnippets:true ~insertText:\"[$0]\" ~sortText:\"A\"\n          ~kind:\n            (match typ with\n            | ExtractedType typ ->\n              ExtractedType\n                ( typ,\n                  match mode with\n                  | Pattern _ -> `Type\n                  | Expression -> `Value )\n            | TypeExpr typ -> Value typ)\n          ~env;\n      ]\n    else []\n  | Tstring env ->\n    if Debug.verbose () then print_endline \"[complete_typed_value]--> Tstring\";\n    if prefix = \"\" then\n      [\n        create \"\\\"\\\"\" ~includesSnippets:true ~insertText:\"\\\"$0\\\"\" ~sortText:\"A\"\n          ~kind:(Value (Ctype.newconstr Predef.path_string []))\n          ~env;\n      ]\n    else []\n  | Tfunction {env; typ; args; uncurried; returnType}\n    when prefix = \"\" && mode = Expression ->\n    if Debug.verbose () then\n      print_endline \"[complete_typed_value]--> Tfunction #1\";\n    let shouldPrintAsUncurried = uncurried && !Config.uncurried <> Uncurried in\n    let mkFnArgs ~asSnippet =\n      match args with\n      | [(Nolabel, argTyp)] when TypeUtils.typeIsUnit argTyp ->\n        if shouldPrintAsUncurried then \"(. )\" else \"()\"\n      | [(Nolabel, argTyp)] ->\n        let varName =\n          CompletionExpressions.prettyPrintFnTemplateArgName ~env ~full argTyp\n        in\n        let argsText = if asSnippet then \"${1:\" ^ varName ^ \"}\" else varName in\n        if shouldPrintAsUncurried then \"(. \" ^ argsText ^ \")\" else argsText\n      | _ ->\n        let currentUnlabelledIndex = ref 0 in\n        let argsText =\n          args\n          |> List.map (fun ((label, typ) : typedFnArg) ->\n                 match label with\n                 | Optional name -> \"~\" ^ name ^ \"=?\"\n                 | Labelled name -> \"~\" ^ name\n                 | Nolabel ->\n                   if TypeUtils.typeIsUnit typ then \"()\"\n                   else (\n                     currentUnlabelledIndex := !currentUnlabelledIndex + 1;\n                     let num = !currentUnlabelledIndex in\n                     let varName =\n                       CompletionExpressions.prettyPrintFnTemplateArgName\n                         ~currentIndex:num ~env ~full typ\n                     in\n                     if asSnippet then\n                       \"${\" ^ string_of_int num ^ \":\" ^ varName ^ \"}\"\n                     else varName))\n          |> String.concat \", \"\n        in\n        \"(\" ^ if shouldPrintAsUncurried then \". \" else \"\" ^ argsText ^ \")\"\n    in\n    let isAsync =\n      match TypeUtils.extractType ~env ~package:full.package returnType with\n      | Some (Tpromise _, _) -> true\n      | _ -> false\n    in\n    let asyncPrefix = if isAsync then \"async \" else \"\" in\n    let functionBody, functionBodyInsertText =\n      match args with\n      | [(Nolabel, argTyp)] ->\n        let varName =\n          CompletionExpressions.prettyPrintFnTemplateArgName ~env ~full argTyp\n        in\n        ( (\" => \" ^ if varName = \"()\" then \"{}\" else varName),\n          \" => ${0:\" ^ varName ^ \"}\" )\n      | _ -> (\" => {}\", \" => {${0:()}}\")\n    in\n    [\n      create\n        (asyncPrefix ^ mkFnArgs ~asSnippet:false ^ functionBody)\n        ~includesSnippets:true\n        ~insertText:\n          (asyncPrefix ^ mkFnArgs ~asSnippet:true ^ functionBodyInsertText)\n        ~sortText:\"A\" ~kind:(Value typ) ~env;\n    ]\n  | Tfunction _ ->\n    if Debug.verbose () then\n      print_endline \"[complete_typed_value]--> Tfunction #other\";\n    []\n  | Texn env ->\n    if Debug.verbose () then print_endline \"[complete_typed_value]--> Texn\";\n    [\n      create\n        (full.package.builtInCompletionModules.exnModulePath @ [\"Error(error)\"]\n        |> ident)\n        ~kind:(Label \"Catches errors from JavaScript errors.\")\n        ~docstring:\n          [\n            \"Matches on a JavaScript error. Read more in the [documentation on \\\n             catching JS \\\n             exceptions](https://rescript-lang.org/docs/manual/latest/exception#catching-js-exceptions).\";\n          ]\n        ~env;\n    ]\n  | Tpromise _ ->\n    if Debug.verbose () then print_endline \"[complete_typed_value]--> Tpromise\";\n    []\n\nmodule StringSet = Set.Make (String)\n\nlet rec processCompletable ~debug ~full ~scope ~env ~pos ~forHover completable =\n  if debug then\n    Printf.printf \"Completable: %s\\n\" (Completable.toString completable);\n  let package = full.package in\n  let rawOpens = Scope.getRawOpens scope in\n  let opens = getOpens ~debug ~rawOpens ~package ~env in\n  let allFiles = allFilesInPackage package in\n  let findTypeOfValue path =\n    path\n    |> getCompletionsForPath ~debug ~completionContext:Value ~exact:true ~opens\n         ~full ~pos ~env ~scope\n    |> completionsGetTypeEnv2 ~debug ~full ~opens ~rawOpens ~pos\n  in\n  match completable with\n  | Cnone -> []\n  | Cpath contextPath ->\n    contextPath\n    |> getCompletionsForContextPath ~debug ~full ~opens ~rawOpens ~pos ~env\n         ~exact:forHover ~scope\n  | Cjsx ([id], prefix, identsSeen) when String.uncapitalize_ascii id = id -> (\n    (* Lowercase JSX tag means builtin *)\n    let mkLabel (name, typString) =\n      Completion.create name ~kind:(Label typString) ~env\n    in\n    let keyLabels =\n      if Utils.startsWith \"key\" prefix then [mkLabel (\"key\", \"string\")] else []\n    in\n    (* We always try to look up completion from the actual domProps type first.\n       This works in JSXv4. For JSXv3, we have a backup hardcoded list of dom\n       labels we can use for completion. *)\n    let pathToElementProps = TypeUtils.pathToElementProps package in\n    if Debug.verbose () then\n      Printf.printf\n        \"[completing-lowercase-jsx] Attempting to complete from type at %s\\n\"\n        (pathToElementProps |> String.concat \".\");\n    let fromElementProps =\n      match\n        pathToElementProps\n        |> digToRecordFieldsForCompletion ~debug ~package ~opens ~full ~pos ~env\n             ~scope\n      with\n      | None -> None\n      | Some fields ->\n        Some\n          (fields\n          |> List.filter_map (fun (f : field) ->\n                 if\n                   Utils.startsWith f.fname.txt prefix\n                   && (forHover || not (List.mem f.fname.txt identsSeen))\n                 then\n                   Some\n                     ( f.fname.txt,\n                       Shared.typeToString (Utils.unwrapIfOption f.typ) )\n                 else None)\n          |> List.map mkLabel)\n    in\n    match fromElementProps with\n    | Some elementProps -> elementProps\n    | None ->\n      if debug then\n        Printf.printf\n          \"[completing-lowercase-jsx] could not find element props to complete \\\n           from.\\n\";\n      (CompletionJsx.domLabels\n      |> List.filter (fun (name, _t) ->\n             Utils.startsWith name prefix\n             && (forHover || not (List.mem name identsSeen)))\n      |> List.map mkLabel)\n      @ keyLabels)\n  | Cjsx (componentPath, prefix, identsSeen) ->\n    let labels =\n      CompletionJsx.getJsxLabels ~componentPath ~findTypeOfValue ~package\n    in\n    let mkLabel_ name typString =\n      Completion.create name ~kind:(Label typString) ~env\n    in\n    let mkLabel (name, typ, _env) =\n      mkLabel_ name (typ |> Shared.typeToString)\n    in\n    let keyLabels =\n      if Utils.startsWith \"key\" prefix then [mkLabel_ \"key\" \"string\"] else []\n    in\n    if labels = [] then []\n    else\n      (labels\n      |> List.filter (fun (name, _t, _env) ->\n             Utils.startsWith name prefix\n             && name <> \"key\"\n             && (forHover || not (List.mem name identsSeen)))\n      |> List.map mkLabel)\n      @ keyLabels\n  | CdecoratorPayload (JsxConfig {prefix; nested}) -> (\n    let mkField ~name ~primitive =\n      {\n        stamp = -1;\n        fname = {loc = Location.none; txt = name};\n        optional = true;\n        typ = Ctype.newconstr primitive [];\n        docstring = [];\n        deprecated = None;\n      }\n    in\n    let typ : completionType =\n      Trecord\n        {\n          env;\n          definition = `NameOnly \"jsxConfig\";\n          fields =\n            [\n              mkField ~name:\"version\" ~primitive:Predef.path_int;\n              mkField ~name:\"module_\" ~primitive:Predef.path_string;\n              mkField ~name:\"mode\" ~primitive:Predef.path_string;\n            ];\n        }\n    in\n    match typ |> TypeUtils.resolveNested ~env ~full ~nested with\n    | None -> []\n    | Some (typ, _env, completionContext, typeArgContext) ->\n      typ\n      |> completeTypedValue ?typeArgContext ~rawOpens ~mode:Expression ~full\n           ~prefix ~completionContext)\n  | CdecoratorPayload (ModuleWithImportAttributes {prefix; nested}) -> (\n    let mkField ~name ~primitive =\n      {\n        stamp = -1;\n        fname = {loc = Location.none; txt = name};\n        optional = true;\n        typ = Ctype.newconstr primitive [];\n        docstring = [];\n        deprecated = None;\n      }\n    in\n    let importAttributesConfig : completionType =\n      Trecord\n        {\n          env;\n          definition = `NameOnly \"importAttributesConfig\";\n          fields = [mkField ~name:\"type_\" ~primitive:Predef.path_string];\n        }\n    in\n    let rootConfig : completionType =\n      Trecord\n        {\n          env;\n          definition = `NameOnly \"moduleConfig\";\n          fields =\n            [\n              mkField ~name:\"from\" ~primitive:Predef.path_string;\n              mkField ~name:\"with\" ~primitive:Predef.path_string;\n            ];\n        }\n    in\n    let nested, typ =\n      match nested with\n      | NFollowRecordField {fieldName = \"with\"} :: rest ->\n        (rest, importAttributesConfig)\n      | _ -> (nested, rootConfig)\n    in\n    match typ |> TypeUtils.resolveNested ~env ~full ~nested with\n    | None -> []\n    | Some (typ, _env, completionContext, typeArgContext) ->\n      typ\n      |> completeTypedValue ?typeArgContext ~rawOpens ~mode:Expression ~full\n           ~prefix ~completionContext)\n  | CdecoratorPayload (Module prefix) ->\n    let packageJsonPath =\n      Utils.findPackageJson (full.package.rootPath |> Uri.fromPath)\n    in\n    let itemsFromPackageJson =\n      match packageJsonPath with\n      | None ->\n        if debug then\n          Printf.printf\n            \"Did not find package.json, started looking (going upwards) from: %s\\n\"\n            full.package.rootPath;\n        []\n      | Some path -> (\n        match Files.readFile path with\n        | None ->\n          if debug then print_endline \"Could not read package.json\";\n          []\n        | Some s -> (\n          match Json.parse s with\n          | Some (Object items) ->\n            items\n            |> List.filter_map (fun (key, t) ->\n                   match (key, t) with\n                   | (\"dependencies\" | \"devDependencies\"), Json.Object o ->\n                     Some\n                       (o\n                       |> List.filter_map (fun (pkgName, _) ->\n                              match pkgName with\n                              | \"rescript\" -> None\n                              | pkgName -> Some pkgName))\n                   | _ -> None)\n            |> List.flatten\n          | _ ->\n            if debug then print_endline \"Could not parse package.json\";\n            []))\n    in\n    (* TODO: Resolve relatives? *)\n    let localItems =\n      try\n        let files =\n          Sys.readdir (Filename.dirname (env.file.uri |> Uri.toPath))\n          |> Array.to_list\n        in\n        (* Try to filter out compiled in source files *)\n        let resFiles =\n          StringSet.of_list\n            (files\n            |> List.filter_map (fun f ->\n                   if Filename.extension f = \".res\" then\n                     Some (try Filename.chop_extension f with _ -> f)\n                   else None))\n        in\n        files\n        |> List.filter_map (fun fileName ->\n               let withoutExtension =\n                 try Filename.chop_extension fileName with _ -> fileName\n               in\n               if\n                 String.ends_with fileName ~suffix:package.suffix\n                 && resFiles |> StringSet.mem withoutExtension\n               then None\n               else\n                 match Filename.extension fileName with\n                 | \".res\" | \".resi\" | \"\" -> None\n                 | _ -> Some (\"./\" ^ fileName))\n        |> List.sort String.compare\n      with _ ->\n        if debug then print_endline \"Could not read relative directory\";\n        []\n    in\n    let items = itemsFromPackageJson @ localItems in\n    items\n    |> List.filter (fun name -> Utils.startsWith name prefix)\n    |> List.map (fun name ->\n           let isLocal = Utils.startsWith name \"./\" in\n           Completion.create name\n             ~kind:(Label (if isLocal then \"Local file\" else \"Package\"))\n             ~env)\n  | Cdecorator prefix ->\n    let mkDecorator (name, docstring, maybeInsertText) =\n      {\n        (Completion.create name ~synthetic:true ~includesSnippets:true\n           ~kind:(Label \"\") ~env ?insertText:maybeInsertText)\n        with\n        docstring;\n      }\n    in\n    let isTopLevel = String.starts_with ~prefix:\"@\" prefix in\n    let prefix =\n      if isTopLevel then String.sub prefix 1 (String.length prefix - 1)\n      else prefix\n    in\n    let decorators =\n      if isTopLevel then CompletionDecorators.toplevel\n      else CompletionDecorators.local\n    in\n    decorators\n    |> List.filter (fun (decorator, _, _) -> Utils.startsWith decorator prefix)\n    |> List.map (fun (decorator, maybeInsertText, doc) ->\n           let parts = String.split_on_char '.' prefix in\n           let len = String.length prefix in\n           let dec2 =\n             if List.length parts > 1 then\n               String.sub decorator len (String.length decorator - len)\n             else decorator\n           in\n           (dec2, doc, maybeInsertText))\n    |> List.map mkDecorator\n  | CnamedArg (cp, prefix, identsSeen) ->\n    let labels =\n      match\n        cp\n        |> getCompletionsForContextPath ~debug ~full ~opens ~rawOpens ~pos ~env\n             ~exact:true ~scope\n        |> completionsGetTypeEnv2 ~debug ~full ~opens ~rawOpens ~pos\n      with\n      | Some (typ, _env) ->\n        if debug then\n          Printf.printf \"Found type for function %s\\n\"\n            (typ |> Shared.typeToString);\n\n        typ\n        |> TypeUtils.getArgs ~full ~env\n        |> List.filter_map (fun arg ->\n               match arg with\n               | SharedTypes.Completable.Labelled name, a -> Some (name, a)\n               | Optional name, a -> Some (name, a)\n               | _ -> None)\n      | None -> []\n    in\n    let mkLabel (name, typ) =\n      Completion.create name ~kind:(Label (typ |> Shared.typeToString)) ~env\n    in\n    labels\n    |> List.filter (fun (name, _t) ->\n           Utils.startsWith name prefix\n           && (forHover || not (List.mem name identsSeen)))\n    |> List.map mkLabel\n  | Cpattern {contextPath; prefix; nested; fallback; patternMode} -> (\n    let fallbackOrEmpty ?items () =\n      match (fallback, items) with\n      | Some fallback, (None | Some []) ->\n        fallback |> processCompletable ~debug ~full ~scope ~env ~pos ~forHover\n      | _, Some items -> items\n      | None, None -> []\n    in\n    match\n      contextPath\n      |> getCompletionsForContextPath ~debug ~full ~opens ~rawOpens ~pos ~env\n           ~exact:true ~scope\n      |> completionsGetTypeEnv2 ~debug ~full ~opens ~rawOpens ~pos\n    with\n    | Some (typ, env) -> (\n      match\n        typ\n        |> TypeUtils.extractType ~env ~package:full.package\n        |> Utils.Option.flatMap (fun (typ, typeArgContext) ->\n               typ |> TypeUtils.resolveNested ?typeArgContext ~env ~full ~nested)\n      with\n      | None -> fallbackOrEmpty ()\n      | Some (typ, _env, completionContext, typeArgContext) ->\n        let items =\n          typ\n          |> completeTypedValue ?typeArgContext ~rawOpens\n               ~mode:(Pattern patternMode) ~full ~prefix ~completionContext\n        in\n        fallbackOrEmpty ~items ())\n    | None -> fallbackOrEmpty ())\n  | Cexpression {contextPath; prefix; nested} -> (\n    let isAmbigiousRecordBodyOrJsxWrap =\n      match (contextPath, nested) with\n      | CJsxPropValue _, [NRecordBody _] -> true\n      | _ -> false\n    in\n    if Debug.verbose () then\n      (* This happens in this scenario: `<SomeComponent someProp={<com>}`\n           Here, we don't know whether `{}` is just wraps for the type of\n           `someProp`, or if it's a record body where we want to complete\n            for the fields in the record. We need to look up what the type is\n           first before deciding what completions to show. So we do that here.*)\n      if isAmbigiousRecordBodyOrJsxWrap then\n        print_endline\n          \"[process_completable]--> Cexpression special case: JSX prop value \\\n           that might be record body or JSX wrap\"\n      else print_endline \"[process_completable]--> Cexpression\";\n    (* Completions for local things like variables in scope, modules in the\n       project, etc. We only add completions when there's a prefix of some sort\n       we can filter on, since we know we're in some sort of context, and\n       therefore don't want to overwhelm the user with completion items. *)\n    let regularCompletions =\n      if prefix = \"\" then []\n      else\n        prefix\n        |> getComplementaryCompletionsForTypedValue ~opens ~allFiles ~env ~scope\n    in\n    match\n      contextPath\n      |> getCompletionsForContextPath ~debug ~full ~opens ~rawOpens ~pos ~env\n           ~exact:true ~scope\n      |> completionsGetCompletionType ~full\n    with\n    | None ->\n      if Debug.verbose () then\n        print_endline\n          \"[process_completable]--> could not get completions for context path\";\n      regularCompletions\n    | Some (typ, env) -> (\n      match typ |> TypeUtils.resolveNested ~env ~full ~nested with\n      | None ->\n        if Debug.verbose () then\n          print_endline\n            \"[process_completable]--> could not resolve nested expression path\";\n        if isAmbigiousRecordBodyOrJsxWrap then (\n          if Debug.verbose () then\n            print_endline\n              \"[process_completable]--> case is ambigious Jsx prop vs record \\\n               body case, complete also for the JSX prop value directly\";\n          let itemsForRawJsxPropValue =\n            typ\n            |> completeTypedValue ~rawOpens ~mode:Expression ~full ~prefix\n                 ~completionContext:None\n          in\n          itemsForRawJsxPropValue @ regularCompletions)\n        else regularCompletions\n      | Some (typ, _env, completionContext, typeArgContext) -> (\n        if Debug.verbose () then\n          print_endline\n            \"[process_completable]--> found type in nested expression \\\n             completion\";\n        (* Wrap the insert text in braces when we're completing the root of a\n           JSX prop value. *)\n        let wrapInsertTextInBraces =\n          if List.length nested > 0 then false\n          else\n            match contextPath with\n            | CJsxPropValue _ -> true\n            | _ -> false\n        in\n        let items =\n          typ\n          |> completeTypedValue ?typeArgContext ~rawOpens ~mode:Expression ~full\n               ~prefix ~completionContext\n          |> List.map (fun (c : Completion.t) ->\n                 if wrapInsertTextInBraces then\n                   {\n                     c with\n                     insertText =\n                       (match c.insertText with\n                       | None -> None\n                       | Some text -> Some (\"{\" ^ text ^ \"}\"));\n                   }\n                 else c)\n        in\n        match (prefix, completionContext) with\n        | \"\", _ -> items\n        | _, None ->\n          let items =\n            if List.length regularCompletions > 0 then\n              (* The client will occasionally sort the list of completions alphabetically, disregarding the order\n                 in which we send it. This fixes that by providing a sort text making the typed completions\n                 guaranteed to end up on top. *)\n              items\n              |> List.map (fun (c : Completion.t) ->\n                     {c with sortText = Some (\"A\" ^ \" \" ^ c.name)})\n            else items\n          in\n          items @ regularCompletions\n        | _ -> items)))\n  | CexhaustiveSwitch {contextPath; exprLoc} ->\n    let range = Utils.rangeOfLoc exprLoc in\n    let rescriptMajor, rescriptMinor = Packages.getReScriptVersion () in\n    let printFailwithStr num =\n      if (rescriptMajor = 11 && rescriptMinor >= 1) || rescriptMajor >= 12 then\n        \"${\" ^ string_of_int num ^ \":%todo}\"\n      else \"${\" ^ string_of_int num ^ \":failwith(\\\"todo\\\")}\"\n    in\n    let withExhaustiveItem ~cases ?(startIndex = 0) (c : Completion.t) =\n      (* We don't need to write out `switch` here since we know that's what the\n         user has already written. Just complete for the rest. *)\n      let newText =\n        c.name ^ \" {\\n\"\n        ^ (cases\n          |> List.mapi (fun index caseText ->\n                 \"| \" ^ caseText ^ \" => \"\n                 ^ printFailwithStr (startIndex + index + 1))\n          |> String.concat \"\\n\")\n        ^ \"\\n}\"\n        |> Utils.indent range.start.character\n      in\n      [\n        c;\n        {\n          c with\n          name = c.name ^ \" (exhaustive switch)\";\n          filterText = Some c.name;\n          insertTextFormat = Some Snippet;\n          insertText = Some newText;\n          kind = Snippet \"insert exhaustive switch for value\";\n        };\n      ]\n    in\n    let completionsForContextPath =\n      contextPath\n      |> getCompletionsForContextPath ~debug ~full ~opens ~rawOpens ~pos ~env\n           ~exact:forHover ~scope\n    in\n    completionsForContextPath\n    |> List.map (fun (c : Completion.t) ->\n           match c.kind with\n           | Value typExpr -> (\n             match typExpr |> TypeUtils.extractType ~env:c.env ~package with\n             | Some (Tvariant v, _) ->\n               withExhaustiveItem c\n                 ~cases:\n                   (v.constructors\n                   |> List.map (fun (constructor : Constructor.t) ->\n                          constructor.cname.txt\n                          ^\n                          match constructor.args with\n                          | Args [] -> \"\"\n                          | _ -> \"(_)\"))\n             | Some (Tpolyvariant v, _) ->\n               withExhaustiveItem c\n                 ~cases:\n                   (v.constructors\n                   |> List.map (fun (constructor : polyVariantConstructor) ->\n                          \"#\" ^ constructor.displayName\n                          ^\n                          match constructor.args with\n                          | [] -> \"\"\n                          | _ -> \"(_)\"))\n             | Some (Toption (_env, _typ), _) ->\n               withExhaustiveItem c ~cases:[\"Some($1)\"; \"None\"] ~startIndex:1\n             | Some (Tresult _, _) ->\n               withExhaustiveItem c ~cases:[\"Ok($1)\"; \"Error($1)\"] ~startIndex:1\n             | Some (Tbool _, _) ->\n               withExhaustiveItem c ~cases:[\"true\"; \"false\"]\n             | _ -> [c])\n           | _ -> [c])\n    |> List.flatten\n  | ChtmlElement {prefix} ->\n    CompletionJsx.htmlElements\n    |> List.filter_map (fun (elementName, description, deprecated) ->\n           if Utils.startsWith elementName prefix then\n             let name = \"<\" ^ elementName ^ \">\" in\n             Some\n               (Completion.create name ~synthetic:true ~kind:(Label name)\n                  ~detail:description ~env ~docstring:[description]\n                  ~insertText:elementName\n                  ?deprecated:\n                    (match deprecated with\n                    | true -> Some \"true\"\n                    | false -> None))\n           else None)\n  | CextensionNode prefix ->\n    if Utils.startsWith \"todo\" prefix then\n      let detail =\n        \"`%todo` is used to tell the compiler that some code still needs to be \\\n         implemented.\"\n      in\n      [\n        Completion.create \"todo\" ~synthetic:true ~kind:(Label \"todo\") ~detail\n          ~env ~insertText:\"todo\";\n        Completion.create \"todo (with payload)\" ~synthetic:true\n          ~includesSnippets:true ~kind:(Label \"todo\")\n          ~detail:(detail ^ \" With a payload.\")\n          ~env ~insertText:\"todo(\\\"${0:TODO}\\\")\";\n      ]\n    else []\n"
  },
  {
    "path": "analysis/src/CompletionDecorators.ml",
    "content": "let local =\n  [\n    ( \"as\",\n      Some \"as(\\\"$0\\\")\",\n      [\n        {|The `@as` decorator is commonly used on record types to alias record field names to a different JavaScript attribute name.\n\nThis is useful to map to JavaScript attribute names that cannot be expressed in ReScript (such as keywords).\n\nIt is also possible to map a ReScript record to a JavaScript array by passing indices to the `@as` decorator.\n\n[Read more and see examples in the documentation](https://rescript-lang.org/syntax-lookup#as-decorator).|};\n      ] );\n    ( \"dead\",\n      None,\n      [\n        {|The `@dead` decorator is for reanalyze, a static analysis tool for ReScript that can do dead code analysis.\n\n`@dead` suppresses reporting on the value/type, but can also be used to force the analysis to consider a value as dead. Typically used to acknowledge cases of dead code you are not planning to address right now, but can be searched easily later.\n\n[Read more and see examples in the documentation](https://rescript-lang.org/syntax-lookup#dead-decorator).\n\n> Hint: Did you know you can run an interactive code analysis in your project by running the command `> ReScript: Start Code Analyzer`? Try it!|};\n      ] );\n    ( \"deriving\",\n      Some \"deriving($0)\",\n      [\n        {|When the `@deriving` decorator is applied to a record type, it expands the type into a factory function plus a set of getter/setter functions for its fields.\n  \n[Read more and see examples in the documentation](https://rescript-lang.org/syntax-lookup#deriving-decorator).|};\n      ] );\n    ( \"deprecated\",\n      Some \"deprecated(\\\"$0\\\")\",\n      [\n        {|The `@deprecated` decorator is used to add deprecation notes to types, values and submodules. The compiler and editor tooling will yield a warning whenever a deprecated entity is being used.\n\nAlternatively, use the `@@deprecated` decorator to add a deprecation warning to the file level.\n\n[Read more and see examples in the documentation](https://rescript-lang.org/syntax-lookup#expression-deprecated-decorator).|};\n      ] );\n    ( \"doesNotRaise\",\n      None,\n      [\n        {|The `@doesNotRaise` decorator is for reanalyze, a static analysis tool for ReScript that can perform exception analysis.\n\n`@doesNotRaise` is uses to override the analysis and state that an expression does not raise any exceptions,\neven though the analysis reports otherwise. This can happen for example in the case of array access where\nthe analysis does not perform range checks but takes a conservative stance that any access\ncould potentially raise.\n[Read more and see examples in the documentation](https://github.com/rescript-association/reanalyze/blob/master/EXCEPTION.md).\n> Hint: Did you know you can run an interactive code analysis in your project by running the command `> ReScript: Start Code Analyzer`? Try it!|};\n      ] );\n    ( \"genType\",\n      None,\n      [\n        {|The @genType decorator may be used to export ReScript values and types to JavaScript, and import JavaScript values and types into ReScript. It allows seamless integration of compiled ReScript modules in existing TypeScript, Flow, or plain JavaScript codebases, without loosing type information across different type systems.\n  \n[Read more and see examples in the documentation](https://rescript-lang.org/syntax-lookup#gentype-decorator).|};\n      ] );\n    ( \"genType.as\",\n      Some \"genType.as(\\\"$0\\\")\",\n      [\n        {|The @genType decorator may be used to export ReScript values and types to JavaScript, and import JavaScript values and types into ReScript. It allows seamless integration of compiled ReScript modules in existing TypeScript, Flow, or plain JavaScript codebases, without loosing type information across different type systems.\n\n[Read more and see examples in the documentation](https://rescript-lang.org/docs/gentype/latest/usage).|};\n      ] );\n    ( \"genType.import\",\n      None,\n      [\n        {|The @genType decorator may be used to export ReScript values and types to JavaScript, and import JavaScript values and types into ReScript. It allows seamless integration of compiled ReScript modules in existing TypeScript, Flow, or plain JavaScript codebases, without loosing type information across different type systems.\n\n[Read more and see examples in the documentation](https://rescript-lang.org/docs/gentype/latest/usage).|};\n      ] );\n    ( \"genType.opaque\",\n      None,\n      [\n        {|The @genType decorator may be used to export ReScript values and types to JavaScript, and import JavaScript values and types into ReScript. It allows seamless integration of compiled ReScript modules in existing TypeScript, Flow, or plain JavaScript codebases, without loosing type information across different type systems.\n\n[Read more and see examples in the documentation](https://rescript-lang.org/docs/gentype/latest/usage).|};\n      ] );\n    ( \"get\",\n      None,\n      [\n        {|The `@get` decorator is used to bind to a property of an object.\n\n[Read more and see examples in the documentation](https://rescript-lang.org/syntax-lookup#get-decorator).|};\n      ] );\n    ( \"get_index\",\n      None,\n      [\n        {|The `@get_index` decorator is used to access a dynamic property on an object, or an index of an array.\n\n[Read more and see examples in the documentation](https://rescript-lang.org/syntax-lookup#get-index-decorator).|};\n      ] );\n    ( \"inline\",\n      None,\n      [\n        {|The `@inline` decorator tells the compiler to inline its value in every place the binding is being used, rather than use a variable.\n\n[Read more and see examples in the documentation](https://rescript-lang.org/syntax-lookup#inline-decorator).|};\n      ] );\n    ( \"int\",\n      None,\n      [\n        {|The `@int` decorator can be used with polymorphic variants and the @as decorator on externals to modify the compiled JavaScript to use integers for the values instead of strings.\n\n[Read more and see examples in the documentation](https://rescript-lang.org/syntax-lookup#int-decorator).|};\n      ] );\n    ( \"live\",\n      None,\n      [\n        {|The `@live` decorator is for reanalyze, a static analysis tool for ReScript that can do dead code analysis.\n\n`@live` tells the dead code analysis that the value should be considered live, even though it might appear to be dead. This is typically used in case of FFI where there are indirect ways to access values. It can be added to everything that could otherwise be considered unused by the dead code analysis - values, functions, arguments, records, individual record fields, and so on.\n\n[Read more and see examples in the documentation](https://rescript-lang.org/syntax-lookup#live-decorator).\n\nHint: Did you know you can run an interactive code analysis in your project by running the command `> ReScript: Start Code Analyzer`? Try it!|};\n      ] );\n    ( \"meth\",\n      None,\n      [\n        {|The `@meth` decorator is used to call a function on a JavaScript object, and avoid issues with currying.\n\n[Read more and see examples in the documentation](https://rescript-lang.org/syntax-lookup#meth-decorator).|};\n      ] );\n    ( \"module\",\n      Some \"module(\\\"$0\\\")\",\n      [\n        {|The `@module` decorator is used to bind to a JavaScript module.\n\n[Read more and see examples in the documentation](https://rescript-lang.org/syntax-lookup#module-decorator).|};\n      ] );\n    ( \"new\",\n      None,\n      [\n        {|\nThe `@new` decorator is used whenever you need to bind to a JavaScript class constructor that requires the new keword for instantiation.|\n\n[Read more and see examples in the documentation](https://rescript-lang.org/syntax-lookup#new-decorator).|};\n      ] );\n    ( \"obj\",\n      None,\n      [\n        {|The `@obj` decorator is used to create functions that return JavaScript objects with properties that match the function's parameter labels.\n\n[Read more and see examples in the documentation](https://rescript-lang.org/syntax-lookup#obj-decorator).|};\n      ] );\n    ( \"raises\",\n      Some \"raises(\\\"$0\\\")\",\n      [\n        {|The `@raises` decorator is for reanalyze, a static analysis tool for ReScript that can perform exception analysis.\n\n`@raises` acknowledges that a function can raise exceptions that are not caught, and suppresses\na warning in that case. Callers of the functions are then subjected to the same rule.\nExample `@raises(Exn)` or `@raises([E1, E2, E3])` for multiple exceptions.\n[Read more and see examples in the documentation](https://github.com/rescript-association/reanalyze/blob/master/EXCEPTION.md).\n> Hint: Did you know you can run an interactive code analysis in your project by running the command `> ReScript: Start Code Analyzer`? Try it!|};\n      ] );\n    ( \"react.component\",\n      None,\n      [\n        {|The `@react.component` decorator is used to annotate functions that are RescriptReact components.\n\nYou will need this decorator whenever you want to use a ReScript / React component in ReScript JSX expressions.\n\nNote: The `@react.component` decorator requires the `jsx` config to be set in your `rescript.json`/`bsconfig.json` to enable the required React transformations.\n\n[Read more and see examples in the documentation](https://rescript-lang.org/syntax-lookup#react-component-decorator).|};\n      ] );\n    ( \"jsx.component\",\n      None,\n      [\n        {|The `@jsx.component` decorator is used to annotate functions that are JSX components used with ReScript's [generic JSX transform](https://rescript-lang.org/docs/manual/latest/jsx#generic-jsx-transform-jsx-beyond-react-experimental).\n\nYou will need this decorator whenever you want to use a JSX component in ReScript JSX expressions.|};\n      ] );\n    ( \"return\",\n      Some \"return(${1:nullable})\",\n      [\n        {|The `@return` decorator is used to control how `null` and `undefined` values are converted to option types in ReScript.\n\n[Read more and see examples in the documentation](https://rescript-lang.org/syntax-lookup#return-decorator).|};\n      ] );\n    ( \"scope\",\n      Some \"scope(\\\"$0\\\")\",\n      [\n        {|The `@scope` decorator is used with other decorators such as `@val` and `@module` to declare a parent scope for the binding.\n\n[Read more and see examples in the documentation](https://rescript-lang.org/syntax-lookup#scope-decorator).|};\n      ] );\n    ( \"send\",\n      None,\n      [\n        {|The `@send` decorator is used to bind to a method on an object or array.\n  \n[Read more and see examples in the documentation](https://rescript-lang.org/syntax-lookup#send-decorator).|};\n      ] );\n    ( \"set\",\n      None,\n      [\n        {|The `@set` decorator is used to set a property of an object.\n\n[Read more and see examples in the documentation](https://rescript-lang.org/syntax-lookup#set-decorator).|};\n      ] );\n    ( \"set_index\",\n      None,\n      [\n        {|The `@set_index` decorator is used to set a dynamic property on an object, or an index of an array.\n\n[Read more and see examples in the documentation](https://rescript-lang.org/syntax-lookup#set-index-decorator).|};\n      ] );\n    ( \"string\",\n      None,\n      [\n        {|The `@string` decorator can be used with polymorphic variants and the `@as` decorator on externals to modify the string values used for the variants in the compiled JavaScript.\n  \n[Read more and see examples in the documentation](https://rescript-lang.org/syntax-lookup#string-decorator).|};\n      ] );\n    ( \"this\",\n      None,\n      [\n        {|The `@this` decorator may be used to bind to an external callback function that require access to a this context.\n\n[Read more and see examples in the documentation](https://rescript-lang.org/syntax-lookup#this-decorator).|};\n      ] );\n    ( \"unboxed\",\n      None,\n      [\n        {|The `@unboxed` decorator provides a way to unwrap variant constructors that have a single argument, or record objects that have a single field.\n  \n[Read more and see examples in the documentation](https://rescript-lang.org/syntax-lookup#unboxed-decorator).|};\n      ] );\n    ( \"uncurry\",\n      None,\n      [\n        {|The `@uncurry` decorator can be used to mark any callback argument within an external function as an uncurried function without the need for any explicit uncurried function syntax (`(.) => { ... }`).\n\n[Read more and see examples in the documentation](https://rescript-lang.org/syntax-lookup#uncurry-decorator).|};\n      ] );\n    ( \"unwrap\",\n      None,\n      [\n        {|The `@unwrap` decorator may be used when binding to external functions that accept multiple types for an argument.\n\n[Read more and see examples in the documentation](https://rescript-lang.org/syntax-lookup#unwrap-decorator).|};\n      ] );\n    ( \"val\",\n      None,\n      [\n        {|The `@val` decorator allows you to bind to JavaScript values that are on the global scope.\n  \n[Read more and see examples in the documentation](https://rescript-lang.org/syntax-lookup#val-decorator).|};\n      ] );\n    ( \"variadic\",\n      None,\n      [\n        {|The `@variadic` decorator is used to model JavaScript functions that take a variable number of arguments, where all arguments are of the same type.\n\n[Read more and see examples in the documentation](https://rescript-lang.org/syntax-lookup#variadic-decorator).|};\n      ] );\n    ( \"editor.completeFrom\",\n      None,\n      [\n        {|The `@editor.completeFrom` decorator instructs the editor where it can draw additional completions from for this type.|};\n      ] );\n  ]\n\nlet toplevel =\n  [\n    ( \"deprecated\",\n      Some \"deprecated(\\\"$0\\\")\",\n      [\n        {|The `@@deprecated` decorator is used to add a deprecation note to the file-level of a module. The compiler and editor tooling will yield a warning whenever a deprecated file module is being used.\n\n[Read more and see examples in the documentation](https://rescript-lang.org/syntax-lookup#module-deprecated-decorator).|};\n      ] );\n    ( \"directive\",\n      Some \"directive(\\\"$0\\\")\",\n      [\n        {|The `@@directive` decorator will output that string verbatim at the very top of the generated JavaScript file, before any imports.\n\n[Read more and see examples in the documentation](https://rescript-lang.org/syntax-lookup#directive-decorator).|};\n      ] );\n    ( \"warning\",\n      Some \"warning(\\\"$0\\\")\",\n      [\n        {|The `@@warning` decorator is used to modify the enabled compiler warnings for the current module. See here for all available warning numbers.\n\n[Read more and see examples in the documentation](https://rescript-lang.org/syntax-lookup#module-warning-decorator).\n         |};\n      ] );\n    ( \"jsxConfig\",\n      Some \"jsxConfig({$0})\",\n      [\n        {|The `@@jsxConfig` decorator is used to change the config for JSX on the fly.\n\n[Read more and see examples in the documentation](https://rescript-lang.org/docs/manual/latest/jsx#file-level-configuration).|};\n      ] );\n  ]\n"
  },
  {
    "path": "analysis/src/CompletionExpressions.ml",
    "content": "open SharedTypes\n\nlet isExprHole exp =\n  match exp.Parsetree.pexp_desc with\n  | Pexp_extension ({txt = \"rescript.exprhole\"}, _) -> true\n  | _ -> false\n\nlet isExprTuple expr =\n  match expr.Parsetree.pexp_desc with\n  | Pexp_tuple _ -> true\n  | _ -> false\n\nlet rec traverseExpr (exp : Parsetree.expression) ~exprPath ~pos\n    ~firstCharBeforeCursorNoWhite =\n  let locHasCursor loc = loc |> CursorPosition.locHasCursor ~pos in\n  let someIfHasCursor v = if locHasCursor exp.pexp_loc then Some v else None in\n  match exp.pexp_desc with\n  | Pexp_ident {txt = Lident txt} when Utils.hasBraces exp.pexp_attributes ->\n    (* An ident with braces attribute corresponds to for example `{n}`.\n       Looks like a record but is parsed as an ident with braces. *)\n    someIfHasCursor (txt, [Completable.NRecordBody {seenFields = []}] @ exprPath)\n  | Pexp_ident {txt = Lident txt} -> someIfHasCursor (txt, exprPath)\n  | Pexp_construct ({txt = Lident \"()\"}, _) -> someIfHasCursor (\"\", exprPath)\n  | Pexp_construct ({txt = Lident txt}, None) -> someIfHasCursor (txt, exprPath)\n  | Pexp_variant (label, None) -> someIfHasCursor (\"#\" ^ label, exprPath)\n  | Pexp_array arrayPatterns -> (\n    let nextExprPath = [Completable.NArray] @ exprPath in\n    (* No fields but still has cursor = empty completion *)\n    if List.length arrayPatterns = 0 && locHasCursor exp.pexp_loc then\n      Some (\"\", nextExprPath)\n    else\n      let arrayItemWithCursor =\n        arrayPatterns\n        |> List.find_map (fun e ->\n               e\n               |> traverseExpr ~exprPath:nextExprPath\n                    ~firstCharBeforeCursorNoWhite ~pos)\n      in\n\n      match (arrayItemWithCursor, locHasCursor exp.pexp_loc) with\n      | Some arrayItemWithCursor, _ -> Some arrayItemWithCursor\n      | None, true when firstCharBeforeCursorNoWhite = Some ',' ->\n        (* No item had the cursor, but the entire expr still has the cursor (so\n           the cursor is in the array somewhere), and the first char before the\n           cursor is a comma = interpret as compleing for a new value (example:\n           `[None, <com>, None]`) *)\n        Some (\"\", nextExprPath)\n      | _ -> None)\n  | Pexp_tuple tupleItems when locHasCursor exp.pexp_loc ->\n    tupleItems\n    |> traverseExprTupleItems ~firstCharBeforeCursorNoWhite ~pos\n         ~nextExprPath:(fun itemNum ->\n           [Completable.NTupleItem {itemNum}] @ exprPath)\n         ~resultFromFoundItemNum:(fun itemNum ->\n           [Completable.NTupleItem {itemNum = itemNum + 1}] @ exprPath)\n  | Pexp_record ([], _) ->\n    (* Empty fields means we're in a record body `{}`. Complete for the fields. *)\n    someIfHasCursor (\"\", [Completable.NRecordBody {seenFields = []}] @ exprPath)\n  | Pexp_record (fields, _) -> (\n    let fieldWithCursor = ref None in\n    let fieldWithExprHole = ref None in\n    fields\n    |> List.iter (fun (fname, exp) ->\n           match\n             ( fname.Location.txt,\n               exp.Parsetree.pexp_loc |> CursorPosition.classifyLoc ~pos )\n           with\n           | Longident.Lident fname, HasCursor ->\n             fieldWithCursor := Some (fname, exp)\n           | Lident fname, _ when isExprHole exp ->\n             fieldWithExprHole := Some (fname, exp)\n           | _ -> ());\n    let seenFields =\n      fields\n      |> List.filter_map (fun (fieldName, _f) ->\n             match fieldName with\n             | {Location.txt = Longident.Lident fieldName} -> Some fieldName\n             | _ -> None)\n    in\n    match (!fieldWithCursor, !fieldWithExprHole) with\n    | Some (fname, f), _ | None, Some (fname, f) -> (\n      match f.pexp_desc with\n      | Pexp_extension ({txt = \"rescript.exprhole\"}, _) ->\n        (* An expression hole means for example `{someField: <com>}`. We want to complete for the type of `someField`.  *)\n        someIfHasCursor\n          (\"\", [Completable.NFollowRecordField {fieldName = fname}] @ exprPath)\n      | Pexp_ident {txt = Lident txt} when fname = txt ->\n        (* This is a heuristic for catching writing field names. ReScript has punning for record fields, but the AST doesn't,\n           so punning is represented as the record field name and identifier being the same: {someField}. *)\n        someIfHasCursor (txt, [Completable.NRecordBody {seenFields}] @ exprPath)\n      | Pexp_ident {txt = Lident txt} ->\n        (* A var means `{someField: s}` or similar. Complete for identifiers or values. *)\n        someIfHasCursor (txt, exprPath)\n      | _ ->\n        f\n        |> traverseExpr ~firstCharBeforeCursorNoWhite ~pos\n             ~exprPath:\n               ([Completable.NFollowRecordField {fieldName = fname}] @ exprPath)\n      )\n    | None, None -> (\n      if Debug.verbose () then (\n        Printf.printf \"[traverse_expr] No field with cursor and no expr hole.\\n\";\n\n        match firstCharBeforeCursorNoWhite with\n        | None -> ()\n        | Some c ->\n          Printf.printf \"[traverse_expr] firstCharBeforeCursorNoWhite: %c.\\n\" c);\n\n      (* Figure out if we're completing for a new field.\n         If the cursor is inside of the record body, but no field has the cursor,\n         and there's no pattern hole. Check the first char to the left of the cursor,\n         ignoring white space. If that's a comma or {, we assume you're completing for a new field,\n         since you're either between 2 fields (comma to the left) or at the start of the record ({). *)\n      match firstCharBeforeCursorNoWhite with\n      | Some (',' | '{') ->\n        someIfHasCursor (\"\", [Completable.NRecordBody {seenFields}] @ exprPath)\n      | _ -> None))\n  | Pexp_construct\n      ( {txt},\n        Some {pexp_loc; pexp_desc = Pexp_construct ({txt = Lident \"()\"}, _)} )\n    when locHasCursor pexp_loc ->\n    (* Empty payload with cursor, like: Test(<com>) *)\n    Some\n      ( \"\",\n        [\n          Completable.NVariantPayload\n            {constructorName = Utils.getUnqualifiedName txt; itemNum = 0};\n        ]\n        @ exprPath )\n  | Pexp_construct ({txt}, Some e)\n    when pos >= (e.pexp_loc |> Loc.end_)\n         && firstCharBeforeCursorNoWhite = Some ','\n         && isExprTuple e = false ->\n    (* Empty payload with trailing ',', like: Test(true, <com>) *)\n    Some\n      ( \"\",\n        [\n          Completable.NVariantPayload\n            {constructorName = Utils.getUnqualifiedName txt; itemNum = 1};\n        ]\n        @ exprPath )\n  | Pexp_construct ({txt}, Some {pexp_loc; pexp_desc = Pexp_tuple tupleItems})\n    when locHasCursor pexp_loc ->\n    tupleItems\n    |> traverseExprTupleItems ~firstCharBeforeCursorNoWhite ~pos\n         ~nextExprPath:(fun itemNum ->\n           [\n             Completable.NVariantPayload\n               {constructorName = Utils.getUnqualifiedName txt; itemNum};\n           ]\n           @ exprPath)\n         ~resultFromFoundItemNum:(fun itemNum ->\n           [\n             Completable.NVariantPayload\n               {\n                 constructorName = Utils.getUnqualifiedName txt;\n                 itemNum = itemNum + 1;\n               };\n           ]\n           @ exprPath)\n  | Pexp_construct ({txt}, Some p) when locHasCursor exp.pexp_loc ->\n    p\n    |> traverseExpr ~firstCharBeforeCursorNoWhite ~pos\n         ~exprPath:\n           ([\n              Completable.NVariantPayload\n                {constructorName = Utils.getUnqualifiedName txt; itemNum = 0};\n            ]\n           @ exprPath)\n  | Pexp_variant\n      (txt, Some {pexp_loc; pexp_desc = Pexp_construct ({txt = Lident \"()\"}, _)})\n    when locHasCursor pexp_loc ->\n    (* Empty payload with cursor, like: #test(<com>) *)\n    Some\n      ( \"\",\n        [Completable.NPolyvariantPayload {constructorName = txt; itemNum = 0}]\n        @ exprPath )\n  | Pexp_variant (txt, Some e)\n    when pos >= (e.pexp_loc |> Loc.end_)\n         && firstCharBeforeCursorNoWhite = Some ','\n         && isExprTuple e = false ->\n    (* Empty payload with trailing ',', like: #test(true, <com>) *)\n    Some\n      ( \"\",\n        [Completable.NPolyvariantPayload {constructorName = txt; itemNum = 1}]\n        @ exprPath )\n  | Pexp_variant (txt, Some {pexp_loc; pexp_desc = Pexp_tuple tupleItems})\n    when locHasCursor pexp_loc ->\n    tupleItems\n    |> traverseExprTupleItems ~firstCharBeforeCursorNoWhite ~pos\n         ~nextExprPath:(fun itemNum ->\n           [Completable.NPolyvariantPayload {constructorName = txt; itemNum}]\n           @ exprPath)\n         ~resultFromFoundItemNum:(fun itemNum ->\n           [\n             Completable.NPolyvariantPayload\n               {constructorName = txt; itemNum = itemNum + 1};\n           ]\n           @ exprPath)\n  | Pexp_variant (txt, Some p) when locHasCursor exp.pexp_loc ->\n    p\n    |> traverseExpr ~firstCharBeforeCursorNoWhite ~pos\n         ~exprPath:\n           ([\n              Completable.NPolyvariantPayload\n                {constructorName = txt; itemNum = 0};\n            ]\n           @ exprPath)\n  | _ -> None\n\nand traverseExprTupleItems tupleItems ~nextExprPath ~resultFromFoundItemNum ~pos\n    ~firstCharBeforeCursorNoWhite =\n  let itemNum = ref (-1) in\n  let itemWithCursor =\n    tupleItems\n    |> List.find_map (fun e ->\n           itemNum := !itemNum + 1;\n           e\n           |> traverseExpr ~exprPath:(nextExprPath !itemNum)\n                ~firstCharBeforeCursorNoWhite ~pos)\n  in\n  match (itemWithCursor, firstCharBeforeCursorNoWhite) with\n  | None, Some ',' ->\n    (* No tuple item has the cursor, but there's a comma before the cursor.\n       Figure out what arg we're trying to complete. Example: (true, <com>, None) *)\n    let posNum = ref (-1) in\n    tupleItems\n    |> List.iteri (fun index e ->\n           if pos >= Loc.start e.Parsetree.pexp_loc then posNum := index);\n    if !posNum > -1 then Some (\"\", resultFromFoundItemNum !posNum) else None\n  | v, _ -> v\n\nlet prettyPrintFnTemplateArgName ?currentIndex ~env ~full\n    (argTyp : Types.type_expr) =\n  let indexText =\n    match currentIndex with\n    | None -> \"\"\n    | Some i -> string_of_int i\n  in\n  let defaultVarName = \"v\" ^ indexText in\n  let argTyp, suffix, _env =\n    TypeUtils.digToRelevantTemplateNameType ~env ~package:full.package argTyp\n  in\n  match argTyp |> TypeUtils.pathFromTypeExpr with\n  | None -> defaultVarName\n  | Some p -> (\n    let trailingElementsOfPath =\n      p |> Utils.expandPath |> List.rev |> Utils.lastElements\n    in\n    match trailingElementsOfPath with\n    | [] | [\"t\"] -> defaultVarName\n    | [\"unit\"] -> \"()\"\n    (* Special treatment for JsxEvent, since that's a common enough thing\n       used in event handlers. *)\n    | [\"JsxEvent\"; \"synthetic\"] -> \"event\"\n    | [\"synthetic\"] -> \"event\"\n    (* Ignore `t` types, and go for its module name instead. *)\n    | [someName; \"t\"] | [_; someName] | [someName] -> (\n      match someName with\n      | \"string\" | \"int\" | \"float\" | \"array\" | \"option\" | \"bool\" ->\n        defaultVarName\n      | someName when String.length someName < 30 ->\n        if someName = \"synthetic\" then\n          Printf.printf \"synthetic! %s\\n\"\n            (trailingElementsOfPath |> SharedTypes.ident);\n        (* We cap how long the name can be, so we don't end up with super\n           long type names. *)\n        (someName |> Utils.lowercaseFirstChar) ^ suffix\n      | _ -> defaultVarName)\n    | _ -> defaultVarName)\n\nlet completeConstructorPayload ~posBeforeCursor ~firstCharBeforeCursorNoWhite\n    (constructorLid : Longident.t Location.loc) expr =\n  match\n    traverseExpr expr ~exprPath:[] ~pos:posBeforeCursor\n      ~firstCharBeforeCursorNoWhite\n  with\n  | None -> None\n  | Some (prefix, nested) ->\n    (* The nested path must start with the constructor name found, plus\n       the target argument number for the constructor. We translate to\n       that here, because we need to account for multi arg constructors\n       being represented as tuples. *)\n    let nested =\n      match List.rev nested with\n      | Completable.NTupleItem {itemNum} :: rest ->\n        [\n          Completable.NVariantPayload\n            {constructorName = Longident.last constructorLid.txt; itemNum};\n        ]\n        @ rest\n      | nested ->\n        [\n          Completable.NVariantPayload\n            {constructorName = Longident.last constructorLid.txt; itemNum = 0};\n        ]\n        @ nested\n    in\n    let variantCtxPath =\n      Completable.CTypeAtPos\n        {constructorLid.loc with loc_start = constructorLid.loc.loc_end}\n    in\n    Some\n      (Completable.Cexpression {contextPath = variantCtxPath; prefix; nested})\n"
  },
  {
    "path": "analysis/src/CompletionFrontEnd.ml",
    "content": "open SharedTypes\n\nlet findArgCompletables ~(args : arg list) ~endPos ~posBeforeCursor\n    ~(contextPath : Completable.contextPath) ~posAfterFunExpr\n    ~firstCharBeforeCursorNoWhite ~charBeforeCursor ~isPipedExpr =\n  let fnHasCursor =\n    posAfterFunExpr <= posBeforeCursor && posBeforeCursor < endPos\n  in\n  let allNames =\n    List.fold_right\n      (fun arg allLabels ->\n        match arg with\n        | {label = Some labelled} -> labelled.name :: allLabels\n        | {label = None} -> allLabels)\n      args []\n  in\n  let unlabelledCount = ref (if isPipedExpr then 1 else 0) in\n  let someArgHadEmptyExprLoc = ref false in\n  let rec loop args =\n    match args with\n    | {label = Some labelled; exp} :: rest ->\n      if\n        labelled.posStart <= posBeforeCursor\n        && posBeforeCursor < labelled.posEnd\n      then (\n        if Debug.verbose () then\n          print_endline \"[findArgCompletables] Completing named arg #2\";\n        Some (Completable.CnamedArg (contextPath, labelled.name, allNames)))\n      else if exp.pexp_loc |> Loc.hasPos ~pos:posBeforeCursor then (\n        if Debug.verbose () then\n          print_endline\n            \"[findArgCompletables] Completing in the assignment of labelled \\\n             argument\";\n        match\n          CompletionExpressions.traverseExpr exp ~exprPath:[]\n            ~pos:posBeforeCursor ~firstCharBeforeCursorNoWhite\n        with\n        | None -> None\n        | Some (prefix, nested) ->\n          if Debug.verbose () then\n            print_endline\n              \"[findArgCompletables] Completing for labelled argument value\";\n          Some\n            (Cexpression\n               {\n                 contextPath =\n                   CArgument\n                     {\n                       functionContextPath = contextPath;\n                       argumentLabel = Labelled labelled.name;\n                     };\n                 prefix;\n                 nested = List.rev nested;\n               }))\n      else if CompletionExpressions.isExprHole exp then (\n        if Debug.verbose () then\n          print_endline \"[findArgCompletables] found exprhole\";\n        Some\n          (Cexpression\n             {\n               contextPath =\n                 CArgument\n                   {\n                     functionContextPath = contextPath;\n                     argumentLabel = Labelled labelled.name;\n                   };\n               prefix = \"\";\n               nested = [];\n             }))\n      else loop rest\n    | {label = None; exp} :: rest ->\n      if Debug.verbose () then\n        Printf.printf \"[findArgCompletable] unlabelled arg expr is: %s \\n\"\n          (DumpAst.printExprItem ~pos:posBeforeCursor ~indentation:0 exp);\n\n      (* Track whether there was an arg with an empty loc (indicates parser error)*)\n      if CursorPosition.locIsEmpty exp.pexp_loc ~pos:posBeforeCursor then\n        someArgHadEmptyExprLoc := true;\n\n      if Res_parsetree_viewer.is_template_literal exp then None\n      else if exp.pexp_loc |> Loc.hasPos ~pos:posBeforeCursor then (\n        if Debug.verbose () then\n          print_endline\n            \"[findArgCompletables] Completing in an unlabelled argument\";\n        match\n          CompletionExpressions.traverseExpr exp ~pos:posBeforeCursor\n            ~firstCharBeforeCursorNoWhite ~exprPath:[]\n        with\n        | None ->\n          if Debug.verbose () then\n            print_endline\n              \"[findArgCompletables] found nothing when traversing expr\";\n          None\n        | Some (prefix, nested) ->\n          if Debug.verbose () then\n            print_endline\n              \"[findArgCompletables] completing for unlabelled argument #2\";\n          Some\n            (Cexpression\n               {\n                 contextPath =\n                   CArgument\n                     {\n                       functionContextPath = contextPath;\n                       argumentLabel =\n                         Unlabelled {argumentPosition = !unlabelledCount};\n                     };\n                 prefix;\n                 nested = List.rev nested;\n               }))\n      else if CompletionExpressions.isExprHole exp then (\n        if Debug.verbose () then\n          print_endline \"[findArgCompletables] found an exprhole #2\";\n        Some\n          (Cexpression\n             {\n               contextPath =\n                 CArgument\n                   {\n                     functionContextPath = contextPath;\n                     argumentLabel =\n                       Unlabelled {argumentPosition = !unlabelledCount};\n                   };\n               prefix = \"\";\n               nested = [];\n             }))\n      else (\n        unlabelledCount := !unlabelledCount + 1;\n        loop rest)\n    | [] ->\n      let hadEmptyExpLoc = !someArgHadEmptyExprLoc in\n      if fnHasCursor then (\n        if Debug.verbose () then\n          print_endline \"[findArgCompletables] Function has cursor\";\n        match charBeforeCursor with\n        | Some '~' ->\n          if Debug.verbose () then\n            print_endline \"[findArgCompletables] '~' is before cursor\";\n          Some (Completable.CnamedArg (contextPath, \"\", allNames))\n        | _ when hadEmptyExpLoc ->\n          (* Special case: `Console.log(arr->)`, completing on the pipe.\n             This match branch happens when the fn call has the cursor and:\n             - there's no argument label or expr that has the cursor\n             - there's an argument expression with an empty loc (indicates parser error)\n\n             In that case, it's safer to not complete for the unlabelled function\n             argument (which we do otherwise), and instead not complete and let the\n             completion engine move into the arguments one by one instead to check\n             for completions.\n\n             This can be handled in a more robust way in a future refactor of the\n             completion engine logic. *)\n          if Debug.verbose () then\n            print_endline\n              \"[findArgCompletables] skipping completion in fn call because \\\n               arg had empty loc\";\n          None\n        | _\n          when firstCharBeforeCursorNoWhite = Some '('\n               || firstCharBeforeCursorNoWhite = Some ',' ->\n          (* Checks to ensure that completing for empty unlabelled arg makes\n             sense by checking what's left of the cursor. *)\n          if Debug.verbose () then\n            Printf.printf\n              \"[findArgCompletables] Completing for unlabelled argument value \\\n               because nothing matched and is not labelled argument name \\\n               completion. isPipedExpr: %b\\n\"\n              isPipedExpr;\n          Some\n            (Cexpression\n               {\n                 contextPath =\n                   CArgument\n                     {\n                       functionContextPath = contextPath;\n                       argumentLabel =\n                         Unlabelled {argumentPosition = !unlabelledCount};\n                     };\n                 prefix = \"\";\n                 nested = [];\n               })\n        | _ -> None)\n      else None\n  in\n  match args with\n  (* Special handling for empty fn calls, e.g. `let _ = someFn(<com>)` *)\n  | [\n   {label = None; exp = {pexp_desc = Pexp_construct ({txt = Lident \"()\"}, _)}};\n  ]\n    when fnHasCursor ->\n    if Debug.verbose () then\n      print_endline \"[findArgCompletables] Completing for unit argument\";\n    Some\n      (Completable.Cexpression\n         {\n           contextPath =\n             CArgument\n               {\n                 functionContextPath = contextPath;\n                 argumentLabel =\n                   Unlabelled\n                     {argumentPosition = (if isPipedExpr then 1 else 0)};\n               };\n           prefix = \"\";\n           nested = [];\n         })\n  | _ -> loop args\n\nlet rec exprToContextPathInner ~(inJsxContext : bool) (e : Parsetree.expression)\n    =\n  match e.pexp_desc with\n  | Pexp_constant (Pconst_string _) -> Some Completable.CPString\n  | Pexp_constant (Pconst_integer _) -> Some CPInt\n  | Pexp_constant (Pconst_float _) -> Some CPFloat\n  | Pexp_construct ({txt = Lident (\"true\" | \"false\")}, None) -> Some CPBool\n  | Pexp_array exprs ->\n    Some\n      (CPArray\n         (match exprs with\n         | [] -> None\n         | exp :: _ -> exprToContextPath ~inJsxContext exp))\n  | Pexp_ident {txt = Lident (\"|.\" | \"|.u\")} -> None\n  | Pexp_ident {txt; loc} ->\n    Some\n      (CPId {path = Utils.flattenLongIdent txt; completionContext = Value; loc})\n  | Pexp_field (e1, {txt = Lident name}) -> (\n    match exprToContextPath ~inJsxContext e1 with\n    | Some contextPath ->\n      Some\n        (CPField\n           {\n             contextPath;\n             fieldName = name;\n             posOfDot = None;\n             exprLoc = e1.pexp_loc;\n             inJsx = inJsxContext;\n           })\n    | _ -> None)\n  | Pexp_field (e1, {loc; txt = Ldot (lid, name)}) ->\n    (* Case x.M.field ignore the x part *)\n    Some\n      (CPField\n         {\n           contextPath =\n             CPId\n               {\n                 path = Utils.flattenLongIdent lid;\n                 completionContext = Module;\n                 loc;\n               };\n           fieldName = name;\n           posOfDot = None;\n           exprLoc = e1.pexp_loc;\n           inJsx = inJsxContext;\n         })\n  | Pexp_send (e1, {txt}) -> (\n    match exprToContextPath ~inJsxContext e1 with\n    | None -> None\n    | Some contexPath -> Some (CPObj (contexPath, txt)))\n  | Pexp_apply\n      ( {pexp_desc = Pexp_ident {txt = Lident (\"|.\" | \"|.u\")}},\n        [\n          (_, lhs);\n          (_, {pexp_desc = Pexp_apply (d, args); pexp_loc; pexp_attributes});\n        ] ) ->\n    (* Transform away pipe with apply call *)\n    exprToContextPath ~inJsxContext\n      {\n        pexp_desc = Pexp_apply (d, (Nolabel, lhs) :: args);\n        pexp_loc;\n        pexp_attributes;\n      }\n  | Pexp_apply\n      ( {pexp_desc = Pexp_ident {txt = Lident (\"|.\" | \"|.u\")}},\n        [(_, lhs); (_, {pexp_desc = Pexp_ident id; pexp_loc; pexp_attributes})]\n      ) ->\n    (* Transform away pipe with identifier *)\n    exprToContextPath ~inJsxContext\n      {\n        pexp_desc =\n          Pexp_apply\n            ( {pexp_desc = Pexp_ident id; pexp_loc; pexp_attributes},\n              [(Nolabel, lhs)] );\n        pexp_loc;\n        pexp_attributes;\n      }\n  | Pexp_apply (e1, args) -> (\n    match exprToContextPath ~inJsxContext e1 with\n    | None -> None\n    | Some contexPath -> Some (CPApply (contexPath, args |> List.map fst)))\n  | Pexp_tuple exprs ->\n    let exprsAsContextPaths =\n      exprs |> List.filter_map (exprToContextPath ~inJsxContext)\n    in\n    if List.length exprs = List.length exprsAsContextPaths then\n      Some (CTuple exprsAsContextPaths)\n    else None\n  | _ -> None\n\nand exprToContextPath ~(inJsxContext : bool) (e : Parsetree.expression) =\n  match\n    ( Res_parsetree_viewer.has_await_attribute e.pexp_attributes,\n      exprToContextPathInner ~inJsxContext e )\n  with\n  | true, Some ctxPath -> Some (CPAwait ctxPath)\n  | false, Some ctxPath -> Some ctxPath\n  | _, None -> None\n\nlet completePipeChain ~(inJsxContext : bool) (exp : Parsetree.expression) =\n  (* Complete the end of pipe chains by reconstructing the pipe chain as a single pipe,\n     so it can be completed.\n     Example:\n      someArray->Js.Array2.filter(v => v > 10)->Js.Array2.map(v => v + 2)->\n        will complete as:\n      Js.Array2.map(someArray->Js.Array2.filter(v => v > 10), v => v + 2)->\n  *)\n  match exp.pexp_desc with\n  (* When the left side of the pipe we're completing is a function application.\n     Example: someArray->Js.Array2.map(v => v + 2)-> *)\n  | Pexp_apply\n      ( {pexp_desc = Pexp_ident {txt = Lident (\"|.\" | \"|.u\")}},\n        [_; (_, {pexp_desc = Pexp_apply (d, _)})] ) ->\n    exprToContextPath ~inJsxContext exp\n    |> Option.map (fun ctxPath -> (ctxPath, d.pexp_loc))\n    (* When the left side of the pipe we're completing is an identifier application.\n       Example: someArray->filterAllTheGoodStuff-> *)\n  | Pexp_apply\n      ( {pexp_desc = Pexp_ident {txt = Lident (\"|.\" | \"|.u\")}},\n        [_; (_, {pexp_desc = Pexp_ident _; pexp_loc})] ) ->\n    exprToContextPath ~inJsxContext exp\n    |> Option.map (fun ctxPath -> (ctxPath, pexp_loc))\n  | _ -> None\n\nlet completionWithParser1 ~currentFile ~debug ~offset ~path ~posCursor\n    ?findThisExprLoc text =\n  let offsetNoWhite = Utils.skipWhite text (offset - 1) in\n  let posNoWhite =\n    let line, col = posCursor in\n    (line, max 0 col - offset + offsetNoWhite)\n  in\n  (* Identifies the first character before the cursor that's not white space.\n     Should be used very sparingly, but can be used to drive completion triggering\n     in scenarios where the parser eats things we'd need to complete.\n     Example: let {whatever,     <cursor>}, char is ','. *)\n  let firstCharBeforeCursorNoWhite =\n    if offsetNoWhite < String.length text && offsetNoWhite >= 0 then\n      Some text.[offsetNoWhite]\n    else None\n  in\n  let posOfDot = Pos.posOfDot text ~pos:posCursor ~offset in\n  let charAtCursor =\n    if offset < String.length text then text.[offset] else '\\n'\n  in\n  let posBeforeCursor = Pos.posBeforeCursor posCursor in\n  let charBeforeCursor, blankAfterCursor =\n    match Pos.positionToOffset text posCursor with\n    | Some offset when offset > 0 -> (\n      let charBeforeCursor = text.[offset - 1] in\n      match charAtCursor with\n      | ' ' | '\\t' | '\\r' | '\\n' ->\n        (Some charBeforeCursor, Some charBeforeCursor)\n      | _ -> (Some charBeforeCursor, None))\n    | _ -> (None, None)\n  in\n  let flattenLidCheckDot ?(jsx = true) (lid : Longident.t Location.loc) =\n    (* Flatten an identifier keeping track of whether the current cursor\n       is after a \".\" in the id followed by a blank character.\n       In that case, cut the path after \".\". *)\n    let cutAtOffset =\n      let idStart = Loc.start lid.loc in\n      match blankAfterCursor with\n      | Some '.' ->\n        if fst posBeforeCursor = fst idStart then\n          Some (snd posBeforeCursor - snd idStart)\n        else None\n      | _ -> None\n    in\n    Utils.flattenLongIdent ~cutAtOffset ~jsx lid.txt\n  in\n\n  let currentCtxPath = ref None in\n  let processingFun = ref None in\n  let setCurrentCtxPath ctxPath =\n    if !Cfg.debugFollowCtxPath then\n      Printf.printf \"setting current ctxPath: %s\\n\"\n        (Completable.contextPathToString ctxPath);\n    currentCtxPath := Some ctxPath\n  in\n  let resetCurrentCtxPath ctxPath =\n    (match (!currentCtxPath, ctxPath) with\n    | None, None -> ()\n    | _ ->\n      if !Cfg.debugFollowCtxPath then\n        Printf.printf \"resetting current ctxPath to: %s\\n\"\n          (match ctxPath with\n          | None -> \"None\"\n          | Some ctxPath -> Completable.contextPathToString ctxPath));\n    currentCtxPath := ctxPath\n  in\n\n  let found = ref false in\n  let result = ref None in\n  let scope = ref (Scope.create ()) in\n  let setResultOpt x =\n    if !result = None then\n      match x with\n      | None ->\n        if Debug.verbose () then\n          print_endline\n            \"[set_result] did not set new result because result already was set\";\n        ()\n      | Some x ->\n        if Debug.verbose () then\n          Printf.printf \"[set_result] set new result to %s\\n\"\n            (Completable.toString x);\n        result := Some (x, !scope)\n  in\n  let inJsxContext = ref false in\n  let setResult x = setResultOpt (Some x) in\n  let scopeValueDescription (vd : Parsetree.value_description) =\n    scope :=\n      !scope |> Scope.addValue ~name:vd.pval_name.txt ~loc:vd.pval_name.loc\n  in\n  let rec scopePattern ?contextPath\n      ?(patternPath : Completable.nestedPath list = [])\n      (pat : Parsetree.pattern) =\n    let contextPathToSave =\n      match (contextPath, patternPath) with\n      | maybeContextPath, [] -> maybeContextPath\n      | Some contextPath, patternPath ->\n        Some\n          (Completable.CPatternPath\n             {rootCtxPath = contextPath; nested = List.rev patternPath})\n      | _ -> None\n    in\n    match pat.ppat_desc with\n    | Ppat_any -> ()\n    | Ppat_var {txt; loc} ->\n      scope :=\n        !scope |> Scope.addValue ~name:txt ~loc ?contextPath:contextPathToSave\n    | Ppat_alias (p, asA) ->\n      scopePattern p ~patternPath ?contextPath;\n      let ctxPath =\n        if contextPathToSave = None then\n          match p with\n          | {ppat_desc = Ppat_var {txt; loc}} ->\n            Some\n              (Completable.CPId {path = [txt]; completionContext = Value; loc})\n          | _ -> None\n        else None\n      in\n      scope :=\n        !scope |> Scope.addValue ~name:asA.txt ~loc:asA.loc ?contextPath:ctxPath\n    | Ppat_constant _ | Ppat_interval _ -> ()\n    | Ppat_tuple pl ->\n      pl\n      |> List.iteri (fun index p ->\n             scopePattern p\n               ~patternPath:(NTupleItem {itemNum = index} :: patternPath)\n               ?contextPath)\n    | Ppat_construct (_, None) -> ()\n    | Ppat_construct ({txt}, Some {ppat_desc = Ppat_tuple pl}) ->\n      pl\n      |> List.iteri (fun index p ->\n             scopePattern p\n               ~patternPath:\n                 (NVariantPayload\n                    {\n                      itemNum = index;\n                      constructorName = Utils.getUnqualifiedName txt;\n                    }\n                 :: patternPath)\n               ?contextPath)\n    | Ppat_construct ({txt}, Some p) ->\n      scopePattern\n        ~patternPath:\n          (NVariantPayload\n             {itemNum = 0; constructorName = Utils.getUnqualifiedName txt}\n          :: patternPath)\n        ?contextPath p\n    | Ppat_variant (_, None) -> ()\n    | Ppat_variant (txt, Some {ppat_desc = Ppat_tuple pl}) ->\n      pl\n      |> List.iteri (fun index p ->\n             scopePattern p\n               ~patternPath:\n                 (NPolyvariantPayload {itemNum = index; constructorName = txt}\n                 :: patternPath)\n               ?contextPath)\n    | Ppat_variant (txt, Some p) ->\n      scopePattern\n        ~patternPath:\n          (NPolyvariantPayload {itemNum = 0; constructorName = txt}\n          :: patternPath)\n        ?contextPath p\n    | Ppat_record (fields, _) ->\n      fields\n      |> List.iter (fun (fname, p) ->\n             match fname with\n             | {Location.txt = Longident.Lident fname} ->\n               scopePattern\n                 ~patternPath:\n                   (Completable.NFollowRecordField {fieldName = fname}\n                   :: patternPath)\n                 ?contextPath p\n             | _ -> ())\n    | Ppat_array pl ->\n      pl\n      |> List.iter\n           (scopePattern ~patternPath:(NArray :: patternPath) ?contextPath)\n    | Ppat_or (p1, _) -> scopePattern ~patternPath ?contextPath p1\n    | Ppat_constraint (p, coreType) ->\n      scopePattern ~patternPath\n        ?contextPath:(TypeUtils.contextPathFromCoreType coreType)\n        p\n    | Ppat_type _ -> ()\n    | Ppat_lazy p -> scopePattern ~patternPath ?contextPath p\n    | Ppat_unpack {txt; loc} ->\n      scope :=\n        !scope |> Scope.addValue ~name:txt ~loc ?contextPath:contextPathToSave\n    | Ppat_exception p -> scopePattern ~patternPath ?contextPath p\n    | Ppat_extension _ -> ()\n    | Ppat_open (_, p) -> scopePattern ~patternPath ?contextPath p\n  in\n  let locHasCursor = CursorPosition.locHasCursor ~pos:posBeforeCursor in\n  let locIsEmpty = CursorPosition.locIsEmpty ~pos:posBeforeCursor in\n  let completePattern ?contextPath (pat : Parsetree.pattern) =\n    match\n      ( pat\n        |> CompletionPatterns.traversePattern ~patternPath:[] ~locHasCursor\n             ~firstCharBeforeCursorNoWhite ~posBeforeCursor,\n        contextPath )\n    with\n    | Some (prefix, nestedPattern), Some ctxPath ->\n      if Debug.verbose () then\n        Printf.printf \"[completePattern] found pattern that can be completed\\n\";\n      setResult\n        (Completable.Cpattern\n           {\n             contextPath = ctxPath;\n             prefix;\n             nested = List.rev nestedPattern;\n             fallback = None;\n             patternMode = Default;\n           })\n    | _ -> ()\n  in\n  let scopeValueBinding (vb : Parsetree.value_binding) =\n    let contextPath =\n      (* Pipe chains get special treatment here, because when assigning values\n         we want the return of the entire pipe chain as a function call, rather\n         than as a pipe completion call. *)\n      match completePipeChain ~inJsxContext:!inJsxContext vb.pvb_expr with\n      | Some (ctxPath, _) -> Some ctxPath\n      | None -> exprToContextPath ~inJsxContext:!inJsxContext vb.pvb_expr\n    in\n    scopePattern ?contextPath vb.pvb_pat\n  in\n  let scopeTypeKind (tk : Parsetree.type_kind) =\n    match tk with\n    | Ptype_variant constrDecls ->\n      constrDecls\n      |> List.iter (fun (cd : Parsetree.constructor_declaration) ->\n             scope :=\n               !scope\n               |> Scope.addConstructor ~name:cd.pcd_name.txt ~loc:cd.pcd_loc)\n    | Ptype_record labelDecls ->\n      labelDecls\n      |> List.iter (fun (ld : Parsetree.label_declaration) ->\n             scope :=\n               !scope |> Scope.addField ~name:ld.pld_name.txt ~loc:ld.pld_loc)\n    | _ -> ()\n  in\n  let scopeTypeDeclaration (td : Parsetree.type_declaration) =\n    scope :=\n      !scope |> Scope.addType ~name:td.ptype_name.txt ~loc:td.ptype_name.loc;\n    scopeTypeKind td.ptype_kind\n  in\n  let scopeModuleBinding (mb : Parsetree.module_binding) =\n    scope :=\n      !scope |> Scope.addModule ~name:mb.pmb_name.txt ~loc:mb.pmb_name.loc\n  in\n  let scopeModuleDeclaration (md : Parsetree.module_declaration) =\n    scope :=\n      !scope |> Scope.addModule ~name:md.pmd_name.txt ~loc:md.pmd_name.loc\n  in\n\n  (* Identifies expressions where we can do typed pattern or expr completion. *)\n  let typedCompletionExpr (exp : Parsetree.expression) =\n    let debugTypedCompletionExpr = false in\n    if exp.pexp_loc |> CursorPosition.locHasCursor ~pos:posBeforeCursor then (\n      if Debug.verbose () && debugTypedCompletionExpr then\n        print_endline \"[typedCompletionExpr] Has cursor\";\n      match exp.pexp_desc with\n      (* No cases means there's no `|` yet in the switch *)\n      | Pexp_match (({pexp_desc = Pexp_ident _} as expr), []) ->\n        if Debug.verbose () && debugTypedCompletionExpr then\n          print_endline \"[typedCompletionExpr] No cases, with ident\";\n        if locHasCursor expr.pexp_loc then (\n          if Debug.verbose () && debugTypedCompletionExpr then\n            print_endline \"[typedCompletionExpr] No cases - has cursor\";\n          (* We can do exhaustive switch completion if this is an ident we can\n             complete from. *)\n          match exprToContextPath ~inJsxContext:!inJsxContext expr with\n          | None -> ()\n          | Some contextPath ->\n            setResult (CexhaustiveSwitch {contextPath; exprLoc = exp.pexp_loc}))\n      | Pexp_match (_expr, []) ->\n        (* switch x { } *)\n        if Debug.verbose () && debugTypedCompletionExpr then\n          print_endline \"[typedCompletionExpr] No cases, rest\";\n        ()\n      | Pexp_match (expr, [{pc_lhs; pc_rhs}])\n        when locHasCursor expr.pexp_loc\n             && CompletionExpressions.isExprHole pc_rhs\n             && CompletionPatterns.isPatternHole pc_lhs ->\n        (* switch x { | } when we're in the switch expr itself. *)\n        if Debug.verbose () && debugTypedCompletionExpr then\n          print_endline\n            \"[typedCompletionExpr] No cases (expr and pat holes), rest\";\n        ()\n      | Pexp_match\n          ( exp,\n            [\n              {\n                pc_lhs =\n                  {\n                    ppat_desc =\n                      Ppat_extension ({txt = \"rescript.patternhole\"}, _);\n                  };\n              };\n            ] ) -> (\n        (* A single case that's a pattern hole typically means `switch x { | }`. Complete as the pattern itself with nothing nested. *)\n        match exprToContextPath ~inJsxContext:!inJsxContext exp with\n        | None -> ()\n        | Some ctxPath ->\n          setResult\n            (Completable.Cpattern\n               {\n                 contextPath = ctxPath;\n                 nested = [];\n                 prefix = \"\";\n                 fallback = None;\n                 patternMode = Default;\n               }))\n      | Pexp_match (exp, cases) -> (\n        if Debug.verbose () && debugTypedCompletionExpr then\n          print_endline \"[typedCompletionExpr] Has cases\";\n        (* If there's more than one case, or the case isn't a pattern hole, figure out if we're completing another\n           broken parser case (`switch x { | true => () | <com> }` for example). *)\n        match exp |> exprToContextPath ~inJsxContext:!inJsxContext with\n        | None ->\n          if Debug.verbose () && debugTypedCompletionExpr then\n            print_endline \"[typedCompletionExpr] Has cases - no ctx path\"\n        | Some ctxPath -> (\n          if Debug.verbose () && debugTypedCompletionExpr then\n            print_endline \"[typedCompletionExpr] Has cases - has ctx path\";\n          let hasCaseWithCursor =\n            cases\n            |> List.find_opt (fun case ->\n                   locHasCursor case.Parsetree.pc_lhs.ppat_loc)\n            |> Option.is_some\n          in\n          let hasCaseWithEmptyLoc =\n            cases\n            |> List.find_opt (fun case ->\n                   locIsEmpty case.Parsetree.pc_lhs.ppat_loc)\n            |> Option.is_some\n          in\n          if Debug.verbose () && debugTypedCompletionExpr then\n            Printf.printf\n              \"[typedCompletionExpr] Has cases - has ctx path - \\\n               hasCaseWithEmptyLoc: %b, hasCaseWithCursor: %b\\n\"\n              hasCaseWithEmptyLoc hasCaseWithCursor;\n          match (hasCaseWithEmptyLoc, hasCaseWithCursor) with\n          | _, true ->\n            (* Always continue if there's a case with the cursor *)\n            ()\n          | true, false ->\n            (* If there's no case with the cursor, but a broken parser case, complete for the top level. *)\n            setResult\n              (Completable.Cpattern\n                 {\n                   contextPath = ctxPath;\n                   nested = [];\n                   prefix = \"\";\n                   fallback = None;\n                   patternMode = Default;\n                 })\n          | false, false -> ()))\n      | _ -> ())\n  in\n  let structure (iterator : Ast_iterator.iterator)\n      (structure : Parsetree.structure) =\n    let oldScope = !scope in\n    Ast_iterator.default_iterator.structure iterator structure;\n    scope := oldScope\n  in\n  let structure_item (iterator : Ast_iterator.iterator)\n      (item : Parsetree.structure_item) =\n    let processed = ref false in\n    (match item.pstr_desc with\n    | Pstr_open {popen_lid} ->\n      scope := !scope |> Scope.addOpen ~lid:popen_lid.txt\n    | Pstr_primitive vd -> scopeValueDescription vd\n    | Pstr_value (recFlag, bindings) ->\n      if recFlag = Recursive then bindings |> List.iter scopeValueBinding;\n      bindings |> List.iter (fun vb -> iterator.value_binding iterator vb);\n      if recFlag = Nonrecursive then bindings |> List.iter scopeValueBinding;\n      processed := true\n    | Pstr_type (recFlag, decls) ->\n      if recFlag = Recursive then decls |> List.iter scopeTypeDeclaration;\n      decls |> List.iter (fun td -> iterator.type_declaration iterator td);\n      if recFlag = Nonrecursive then decls |> List.iter scopeTypeDeclaration;\n      processed := true\n    | Pstr_module mb ->\n      iterator.module_binding iterator mb;\n      scopeModuleBinding mb;\n      processed := true\n    | Pstr_recmodule mbs ->\n      mbs |> List.iter scopeModuleBinding;\n      mbs |> List.iter (fun b -> iterator.module_binding iterator b);\n      processed := true\n    | _ -> ());\n    if not !processed then\n      Ast_iterator.default_iterator.structure_item iterator item\n  in\n  let value_binding (iterator : Ast_iterator.iterator)\n      (value_binding : Parsetree.value_binding) =\n    let oldInJsxContext = !inJsxContext in\n    if Utils.isJsxComponent value_binding then inJsxContext := true;\n    (match value_binding with\n    | {pvb_pat = {ppat_desc = Ppat_constraint (_pat, coreType)}; pvb_expr}\n      when locHasCursor pvb_expr.pexp_loc -> (\n      (* Expression with derivable type annotation.\n         E.g: let x: someRecord = {<com>} *)\n      match\n        ( TypeUtils.contextPathFromCoreType coreType,\n          pvb_expr\n          |> CompletionExpressions.traverseExpr ~exprPath:[]\n               ~pos:posBeforeCursor ~firstCharBeforeCursorNoWhite )\n      with\n      | Some ctxPath, Some (prefix, nested) ->\n        setResult\n          (Completable.Cexpression\n             {contextPath = ctxPath; prefix; nested = List.rev nested})\n      | _ -> ())\n    | {pvb_pat = {ppat_desc = Ppat_var {loc}}; pvb_expr}\n      when locHasCursor pvb_expr.pexp_loc -> (\n      (* Expression without a type annotation. We can complete this if this\n         has compiled previously and there's a type available for the identifier itself.\n         This is nice because the type is assigned even if the assignment isn't complete.\n\n         E.g: let x = {name: \"name\", <com>}, when `x` has compiled. *)\n      match\n        pvb_expr\n        |> CompletionExpressions.traverseExpr ~exprPath:[] ~pos:posBeforeCursor\n             ~firstCharBeforeCursorNoWhite\n      with\n      | Some (prefix, nested) ->\n        (* This completion should be low prio, so let any deeper completion\n           hit first, and only set this TypeAtPos completion if nothing else\n           here hit. *)\n        Ast_iterator.default_iterator.value_binding iterator value_binding;\n        setResult\n          (Completable.Cexpression\n             {contextPath = CTypeAtPos loc; prefix; nested = List.rev nested})\n      | _ -> ())\n    | {\n     pvb_pat = {ppat_desc = Ppat_constraint (_pat, coreType); ppat_loc};\n     pvb_expr;\n    }\n      when locHasCursor value_binding.pvb_loc\n           && locHasCursor ppat_loc = false\n           && locHasCursor pvb_expr.pexp_loc = false\n           && CompletionExpressions.isExprHole pvb_expr -> (\n      (* Expression with derivable type annotation, when the expression is empty (expr hole).\n         E.g: let x: someRecord = <com> *)\n      match TypeUtils.contextPathFromCoreType coreType with\n      | Some ctxPath ->\n        setResult\n          (Completable.Cexpression\n             {contextPath = ctxPath; prefix = \"\"; nested = []})\n      | _ -> ())\n    | {pvb_pat; pvb_expr} when locHasCursor pvb_pat.ppat_loc -> (\n      (* Completing a destructuring.\n         E.g: let {<com>} = someVar *)\n      match\n        ( pvb_pat\n          |> CompletionPatterns.traversePattern ~patternPath:[] ~locHasCursor\n               ~firstCharBeforeCursorNoWhite ~posBeforeCursor,\n          exprToContextPath ~inJsxContext:!inJsxContext pvb_expr )\n      with\n      | Some (prefix, nested), Some ctxPath ->\n        setResult\n          (Completable.Cpattern\n             {\n               contextPath = ctxPath;\n               prefix;\n               nested = List.rev nested;\n               fallback = None;\n               patternMode = Destructuring;\n             })\n      | _ -> ())\n    | _ -> ());\n    Ast_iterator.default_iterator.value_binding iterator value_binding;\n    inJsxContext := oldInJsxContext\n  in\n  let signature (iterator : Ast_iterator.iterator)\n      (signature : Parsetree.signature) =\n    let oldScope = !scope in\n    Ast_iterator.default_iterator.signature iterator signature;\n    scope := oldScope\n  in\n  let signature_item (iterator : Ast_iterator.iterator)\n      (item : Parsetree.signature_item) =\n    let processed = ref false in\n    (match item.psig_desc with\n    | Psig_open {popen_lid} ->\n      scope := !scope |> Scope.addOpen ~lid:popen_lid.txt\n    | Psig_value vd -> scopeValueDescription vd\n    | Psig_type (recFlag, decls) ->\n      if recFlag = Recursive then decls |> List.iter scopeTypeDeclaration;\n      decls |> List.iter (fun td -> iterator.type_declaration iterator td);\n      if recFlag = Nonrecursive then decls |> List.iter scopeTypeDeclaration;\n      processed := true\n    | Psig_module md ->\n      iterator.module_declaration iterator md;\n      scopeModuleDeclaration md;\n      processed := true\n    | Psig_recmodule mds ->\n      mds |> List.iter scopeModuleDeclaration;\n      mds |> List.iter (fun d -> iterator.module_declaration iterator d);\n      processed := true\n    | _ -> ());\n    if not !processed then\n      Ast_iterator.default_iterator.signature_item iterator item\n  in\n  let attribute (iterator : Ast_iterator.iterator)\n      ((id, payload) : Parsetree.attribute) =\n    (if String.length id.txt >= 4 && String.sub id.txt 0 4 = \"res.\" then\n       (* skip: internal parser attribute *) ()\n     else if id.loc.loc_ghost then ()\n     else if id.loc |> Loc.hasPos ~pos:posBeforeCursor then\n       let posStart, posEnd = Loc.range id.loc in\n       match\n         (Pos.positionToOffset text posStart, Pos.positionToOffset text posEnd)\n       with\n       | Some offsetStart, Some offsetEnd ->\n         (* Can't trust the parser's location\n            E.g. @foo. let x... gives as label @foo.let *)\n         let label =\n           let rawLabel =\n             String.sub text offsetStart (offsetEnd - offsetStart)\n           in\n           let ( ++ ) x y =\n             match (x, y) with\n             | Some i1, Some i2 -> Some (min i1 i2)\n             | Some _, None -> x\n             | None, _ -> y\n           in\n           let label =\n             match\n               String.index_opt rawLabel ' '\n               ++ String.index_opt rawLabel '\\t'\n               ++ String.index_opt rawLabel '\\r'\n               ++ String.index_opt rawLabel '\\n'\n             with\n             | None -> rawLabel\n             | Some i -> String.sub rawLabel 0 i\n           in\n           if label <> \"\" && label.[0] = '@' then\n             String.sub label 1 (String.length label - 1)\n           else label\n         in\n         found := true;\n         if debug then\n           Printf.printf \"Attribute id:%s:%s label:%s\\n\" id.txt\n             (Loc.toString id.loc) label;\n         setResult (Completable.Cdecorator label)\n       | _ -> ()\n     else if id.txt = \"module\" then\n       match payload with\n       | PStr\n           [\n             {\n               pstr_desc =\n                 Pstr_eval\n                   ( {pexp_loc; pexp_desc = Pexp_constant (Pconst_string (s, _))},\n                     _ );\n             };\n           ]\n         when locHasCursor pexp_loc ->\n         if Debug.verbose () then\n           print_endline \"[decoratorCompletion] Found @module\";\n         setResult (Completable.CdecoratorPayload (Module s))\n       | PStr\n           [\n             {\n               pstr_desc =\n                 Pstr_eval\n                   ( {\n                       pexp_desc =\n                         Pexp_record (({txt = Lident \"from\"}, fromExpr) :: _, _);\n                     },\n                     _ );\n             };\n           ]\n         when locHasCursor fromExpr.pexp_loc\n              || locIsEmpty fromExpr.pexp_loc\n                 && CompletionExpressions.isExprHole fromExpr -> (\n         if Debug.verbose () then\n           print_endline\n             \"[decoratorCompletion] Found @module with import attributes and \\\n              cursor on \\\"from\\\"\";\n         match\n           ( locHasCursor fromExpr.pexp_loc,\n             locIsEmpty fromExpr.pexp_loc,\n             CompletionExpressions.isExprHole fromExpr,\n             fromExpr )\n         with\n         | true, _, _, {pexp_desc = Pexp_constant (Pconst_string (s, _))} ->\n           if Debug.verbose () then\n             print_endline\n               \"[decoratorCompletion] @module `from` payload was string\";\n           setResult (Completable.CdecoratorPayload (Module s))\n         | false, true, true, _ ->\n           if Debug.verbose () then\n             print_endline\n               \"[decoratorCompletion] @module `from` payload was expr hole\";\n           setResult (Completable.CdecoratorPayload (Module \"\"))\n         | _ -> ())\n       | PStr [{pstr_desc = Pstr_eval (expr, _)}] -> (\n         if Debug.verbose () then\n           print_endline\n             \"[decoratorCompletion] Found @module with non-string payload\";\n         match\n           CompletionExpressions.traverseExpr expr ~exprPath:[]\n             ~pos:posBeforeCursor ~firstCharBeforeCursorNoWhite\n         with\n         | None -> ()\n         | Some (prefix, nested) ->\n           if Debug.verbose () then\n             print_endline \"[decoratorCompletion] Found @module record path\";\n           setResult\n             (Completable.CdecoratorPayload\n                (ModuleWithImportAttributes {nested = List.rev nested; prefix}))\n         )\n       | _ -> ()\n     else if id.txt = \"jsxConfig\" then\n       match payload with\n       | PStr [{pstr_desc = Pstr_eval (expr, _)}] -> (\n         if Debug.verbose () then\n           print_endline \"[decoratorCompletion] Found @jsxConfig\";\n         match\n           CompletionExpressions.traverseExpr expr ~exprPath:[]\n             ~pos:posBeforeCursor ~firstCharBeforeCursorNoWhite\n         with\n         | None -> ()\n         | Some (prefix, nested) ->\n           if Debug.verbose () then\n             print_endline \"[decoratorCompletion] Found @jsxConfig path!\";\n           setResult\n             (Completable.CdecoratorPayload\n                (JsxConfig {nested = List.rev nested; prefix})))\n       | _ -> ()\n     else if id.txt = \"editor.completeFrom\" then\n       match payload with\n       | PStr\n           [\n             {\n               pstr_desc =\n                 Pstr_eval\n                   ( {\n                       pexp_loc;\n                       pexp_desc = Pexp_construct ({txt = path; loc}, None);\n                     },\n                     _ );\n             };\n           ]\n         when locHasCursor pexp_loc ->\n         if Debug.verbose () then\n           print_endline \"[decoratorCompletion] Found @editor.completeFrom\";\n         setResult\n           (Completable.Cpath\n              (CPId\n                 {\n                   path = Utils.flattenLongIdent path;\n                   completionContext = Module;\n                   loc;\n                 }))\n       | _ -> ());\n    Ast_iterator.default_iterator.attribute iterator (id, payload)\n  in\n  let rec iterateFnArguments ~args ~iterator ~isPipe\n      (argCompletable : Completable.t option) =\n    match argCompletable with\n    | None -> (\n      match !currentCtxPath with\n      | None -> ()\n      | Some functionContextPath ->\n        let currentUnlabelledCount = ref (if isPipe then 1 else 0) in\n        args\n        |> List.iter (fun (arg : arg) ->\n               let previousCtxPath = !currentCtxPath in\n               setCurrentCtxPath\n                 (CArgument\n                    {\n                      functionContextPath;\n                      argumentLabel =\n                        (match arg with\n                        | {label = None} ->\n                          let current = !currentUnlabelledCount in\n                          currentUnlabelledCount := current + 1;\n                          Unlabelled {argumentPosition = current}\n                        | {label = Some {name; opt = true}} -> Optional name\n                        | {label = Some {name; opt = false}} -> Labelled name);\n                    });\n               expr iterator arg.exp;\n               resetCurrentCtxPath previousCtxPath))\n    | Some argCompletable -> setResult argCompletable\n  and iterateJsxProps ~iterator (props : CompletionJsx.jsxProps) =\n    props.props\n    |> List.iter (fun (prop : CompletionJsx.prop) ->\n           let previousCtxPath = !currentCtxPath in\n           setCurrentCtxPath\n             (CJsxPropValue\n                {\n                  pathToComponent =\n                    Utils.flattenLongIdent ~jsx:true props.compName.txt;\n                  propName = prop.name;\n                  emptyJsxPropNameHint = None;\n                });\n           expr iterator prop.exp;\n           resetCurrentCtxPath previousCtxPath)\n  and expr (iterator : Ast_iterator.iterator) (expr : Parsetree.expression) =\n    let oldInJsxContext = !inJsxContext in\n    let processed = ref false in\n    let setFound () =\n      found := true;\n      if debug then\n        Printf.printf \"posCursor:[%s] posNoWhite:[%s] Found expr:%s\\n\"\n          (Pos.toString posCursor) (Pos.toString posNoWhite)\n          (Loc.toString expr.pexp_loc)\n    in\n    (match findThisExprLoc with\n    | Some loc when expr.pexp_loc = loc -> (\n      match exprToContextPath ~inJsxContext:!inJsxContext expr with\n      | None -> ()\n      | Some ctxPath -> setResult (Cpath ctxPath))\n    | _ -> ());\n    let setPipeResult ~(lhs : Parsetree.expression) ~id =\n      match completePipeChain ~inJsxContext:!inJsxContext lhs with\n      | None -> (\n        match exprToContextPath ~inJsxContext:!inJsxContext lhs with\n        | Some pipe ->\n          setResult\n            (Cpath\n               (CPPipe\n                  {\n                    synthetic = false;\n                    contextPath = pipe;\n                    id;\n                    lhsLoc = lhs.pexp_loc;\n                    inJsx = !inJsxContext;\n                  }));\n          true\n        | None -> false)\n      | Some (pipe, lhsLoc) ->\n        setResult\n          (Cpath\n             (CPPipe\n                {\n                  synthetic = false;\n                  contextPath = pipe;\n                  id;\n                  lhsLoc;\n                  inJsx = !inJsxContext;\n                }));\n        true\n    in\n    typedCompletionExpr expr;\n    match expr.pexp_desc with\n    | Pexp_match (expr, cases)\n      when cases <> []\n           && locHasCursor expr.pexp_loc = false\n           && Option.is_none findThisExprLoc ->\n      if Debug.verbose () then\n        print_endline \"[completionFrontend] Checking each case\";\n      let ctxPath = exprToContextPath ~inJsxContext:!inJsxContext expr in\n      let oldCtxPath = !currentCtxPath in\n      cases\n      |> List.iter (fun (case : Parsetree.case) ->\n             let oldScope = !scope in\n             if\n               locHasCursor case.pc_rhs.pexp_loc = false\n               && locHasCursor case.pc_lhs.ppat_loc\n             then completePattern ?contextPath:ctxPath case.pc_lhs;\n             scopePattern ?contextPath:ctxPath case.pc_lhs;\n             Ast_iterator.default_iterator.case iterator case;\n             scope := oldScope);\n      resetCurrentCtxPath oldCtxPath\n    | Pexp_apply\n        ( {pexp_desc = Pexp_ident {txt = Lident (\"|.\" | \"|.u\"); loc = opLoc}},\n          [\n            (_, lhs);\n            (_, {pexp_desc = Pexp_extension _; pexp_loc = {loc_ghost = true}});\n          ] )\n      when opLoc |> Loc.hasPos ~pos:posBeforeCursor ->\n      (* Case foo-> when the parser adds a ghost expression to the rhs\n         so the apply expression does not include the cursor *)\n      if setPipeResult ~lhs ~id:\"\" then setFound ()\n    | _ -> (\n      if expr.pexp_loc |> Loc.hasPos ~pos:posNoWhite && !result = None then (\n        setFound ();\n        match expr.pexp_desc with\n        | Pexp_extension ({txt = \"obj\"}, PStr [str_item]) ->\n          Ast_iterator.default_iterator.structure_item iterator str_item\n        | Pexp_extension ({txt}, _) -> setResult (CextensionNode txt)\n        | Pexp_constant _ -> setResult Cnone\n        | Pexp_ident lid ->\n          let lidPath = flattenLidCheckDot lid in\n          if debug then\n            Printf.printf \"Pexp_ident %s:%s\\n\"\n              (lidPath |> String.concat \".\")\n              (Loc.toString lid.loc);\n          if lid.loc |> Loc.hasPos ~pos:posBeforeCursor then\n            let isLikelyModulePath =\n              match lidPath with\n              | head :: _\n                when String.length head > 0\n                     && head.[0] == Char.uppercase_ascii head.[0] ->\n                true\n              | _ -> false\n            in\n            setResult\n              (Cpath\n                 (CPId\n                    {\n                      loc = lid.loc;\n                      path = lidPath;\n                      completionContext =\n                        (if\n                           isLikelyModulePath\n                           && expr |> Res_parsetree_viewer.is_braced_expr\n                         then ValueOrField\n                         else Value);\n                    }))\n        | Pexp_construct ({txt = Lident (\"::\" | \"()\")}, _) ->\n          (* Ignore list expressions, used in JSX, unit, and more *) ()\n        | Pexp_construct (lid, eOpt) -> (\n          let lidPath = flattenLidCheckDot lid in\n          if debug && lid.txt <> Lident \"Function$\" then\n            Printf.printf \"Pexp_construct %s:%s %s\\n\"\n              (lidPath |> String.concat \"\\n\")\n              (Loc.toString lid.loc)\n              (match eOpt with\n              | None -> \"None\"\n              | Some e -> Loc.toString e.pexp_loc);\n          if\n            eOpt = None && (not lid.loc.loc_ghost)\n            && lid.loc |> Loc.hasPos ~pos:posBeforeCursor\n          then\n            setResult\n              (Cpath\n                 (CPId\n                    {loc = lid.loc; path = lidPath; completionContext = Value}))\n          else\n            match eOpt with\n            | Some e when locHasCursor e.pexp_loc -> (\n              match\n                CompletionExpressions.completeConstructorPayload\n                  ~posBeforeCursor ~firstCharBeforeCursorNoWhite lid e\n              with\n              | Some result ->\n                (* Check if anything else more important completes before setting this completion. *)\n                Ast_iterator.default_iterator.expr iterator e;\n                setResult result\n              | None -> ())\n            | _ -> ())\n        | Pexp_field (e, fieldName) -> (\n          if debug then\n            Printf.printf \"Pexp_field %s %s:%s\\n\" (Loc.toString e.pexp_loc)\n              (Utils.flattenLongIdent fieldName.txt |> String.concat \".\")\n              (Loc.toString fieldName.loc);\n          if fieldName.loc |> Loc.hasPos ~pos:posBeforeCursor then\n            match fieldName.txt with\n            | Lident name -> (\n              match exprToContextPath ~inJsxContext:!inJsxContext e with\n              | Some contextPath ->\n                let contextPath =\n                  Completable.CPField\n                    {\n                      contextPath;\n                      fieldName = name;\n                      posOfDot;\n                      exprLoc = e.pexp_loc;\n                      inJsx = !inJsxContext;\n                    }\n                in\n                setResult (Cpath contextPath)\n              | None -> ())\n            | Ldot (id, name) ->\n              (* Case x.M.field ignore the x part *)\n              let contextPath =\n                Completable.CPField\n                  {\n                    contextPath =\n                      CPId\n                        {\n                          loc = fieldName.loc;\n                          path = Utils.flattenLongIdent id;\n                          completionContext = Module;\n                        };\n                    fieldName =\n                      (if blankAfterCursor = Some '.' then\n                         (* x.M. field  --->  M. *) \"\"\n                       else if name = \"_\" then \"\"\n                       else name);\n                    posOfDot;\n                    exprLoc = e.pexp_loc;\n                    inJsx = !inJsxContext;\n                  }\n              in\n              setResult (Cpath contextPath)\n            | Lapply _ -> ()\n          else if Loc.end_ e.pexp_loc = posBeforeCursor then\n            match exprToContextPath ~inJsxContext:!inJsxContext e with\n            | Some contextPath ->\n              setResult\n                (Cpath\n                   (CPField\n                      {\n                        contextPath;\n                        fieldName = \"\";\n                        posOfDot;\n                        exprLoc = e.pexp_loc;\n                        inJsx = !inJsxContext;\n                      }))\n            | None -> ())\n        | Pexp_apply ({pexp_desc = Pexp_ident compName}, args)\n          when Res_parsetree_viewer.is_jsx_expression expr ->\n          inJsxContext := true;\n          let jsxProps = CompletionJsx.extractJsxProps ~compName ~args in\n          let compNamePath = flattenLidCheckDot ~jsx:true compName in\n          if debug then\n            Printf.printf \"JSX <%s:%s %s> _children:%s\\n\"\n              (compNamePath |> String.concat \".\")\n              (Loc.toString compName.loc)\n              (jsxProps.props\n              |> List.map\n                   (fun ({name; posStart; posEnd; exp} : CompletionJsx.prop) ->\n                     Printf.sprintf \"%s[%s->%s]=...%s\" name\n                       (Pos.toString posStart) (Pos.toString posEnd)\n                       (Loc.toString exp.pexp_loc))\n              |> String.concat \" \")\n              (match jsxProps.childrenStart with\n              | None -> \"None\"\n              | Some childrenPosStart -> Pos.toString childrenPosStart);\n          let jsxCompletable =\n            CompletionJsx.findJsxPropsCompletable ~jsxProps\n              ~endPos:(Loc.end_ expr.pexp_loc) ~posBeforeCursor\n              ~posAfterCompName:(Loc.end_ compName.loc)\n              ~firstCharBeforeCursorNoWhite ~charAtCursor\n          in\n          if jsxCompletable <> None then setResultOpt jsxCompletable\n          else if compName.loc |> Loc.hasPos ~pos:posBeforeCursor then\n            setResult\n              (match compNamePath with\n              | [prefix] when Char.lowercase_ascii prefix.[0] = prefix.[0] ->\n                ChtmlElement {prefix}\n              | _ ->\n                Cpath\n                  (CPId\n                     {\n                       loc = compName.loc;\n                       path = compNamePath;\n                       completionContext = Module;\n                     }))\n          else iterateJsxProps ~iterator jsxProps\n        | Pexp_apply\n            ( {pexp_desc = Pexp_ident {txt = Lident (\"|.\" | \"|.u\")}},\n              [\n                (_, lhs);\n                (_, {pexp_desc = Pexp_ident {txt = Longident.Lident id; loc}});\n              ] )\n          when loc |> Loc.hasPos ~pos:posBeforeCursor ->\n          if Debug.verbose () then print_endline \"[expr_iter] Case foo->id\";\n          setPipeResult ~lhs ~id |> ignore\n        | Pexp_apply\n            ( {pexp_desc = Pexp_ident {txt = Lident (\"|.\" | \"|.u\"); loc = opLoc}},\n              [(_, lhs); _] )\n          when Loc.end_ opLoc = posCursor ->\n          if Debug.verbose () then print_endline \"[expr_iter] Case foo->\";\n          setPipeResult ~lhs ~id:\"\" |> ignore\n        | Pexp_apply\n            ( {pexp_desc = Pexp_ident {txt = Lident (\"|.\" | \"|.u\")}},\n              [_; (_, {pexp_desc = Pexp_apply (funExpr, args)})] )\n          when (* Normally named arg completion fires when the cursor is right after the expression.\n                  E.g in foo(~<---there\n                  But it should not fire in foo(~a)<---there *)\n               not\n                 (Loc.end_ expr.pexp_loc = posCursor\n                 && charBeforeCursor = Some ')') -> (\n          (* Complete fn argument values and named args when the fn call is piped. E.g. someVar->someFn(<com>). *)\n          if Debug.verbose () then\n            print_endline \"[expr_iter] Complete fn arguments (piped)\";\n          let args = extractExpApplyArgs ~args in\n          let funCtxPath =\n            exprToContextPath ~inJsxContext:!inJsxContext funExpr\n          in\n          let argCompletable =\n            match funCtxPath with\n            | Some contextPath ->\n              findArgCompletables ~contextPath ~args\n                ~endPos:(Loc.end_ expr.pexp_loc) ~posBeforeCursor\n                ~posAfterFunExpr:(Loc.end_ funExpr.pexp_loc)\n                ~charBeforeCursor ~isPipedExpr:true\n                ~firstCharBeforeCursorNoWhite\n            | None -> None\n          in\n          match argCompletable with\n          | None -> (\n            match funCtxPath with\n            | None -> ()\n            | Some funCtxPath ->\n              let oldCtxPath = !currentCtxPath in\n              setCurrentCtxPath funCtxPath;\n              argCompletable |> iterateFnArguments ~isPipe:true ~args ~iterator;\n              resetCurrentCtxPath oldCtxPath)\n          | Some argCompletable -> setResult argCompletable)\n        | Pexp_apply\n            ({pexp_desc = Pexp_ident {txt = Lident (\"|.\" | \"|.u\")}}, [_; _]) ->\n          (* Ignore any other pipe. *)\n          ()\n        | Pexp_apply (funExpr, args)\n          when not\n                 (Loc.end_ expr.pexp_loc = posCursor\n                 && charBeforeCursor = Some ')') -> (\n          (* Complete fn argument values and named args when the fn call is _not_ piped. E.g. someFn(<com>). *)\n          if Debug.verbose () then\n            print_endline \"[expr_iter] Complete fn arguments (not piped)\";\n          let args = extractExpApplyArgs ~args in\n          if debug then\n            Printf.printf \"Pexp_apply ...%s (%s)\\n\"\n              (Loc.toString funExpr.pexp_loc)\n              (args\n              |> List.map (fun {label; exp} ->\n                     Printf.sprintf \"%s...%s\"\n                       (match label with\n                       | None -> \"\"\n                       | Some {name; opt; posStart; posEnd} ->\n                         \"~\" ^ name ^ Pos.toString posStart ^ \"->\"\n                         ^ Pos.toString posEnd ^ \"=\"\n                         ^ if opt then \"?\" else \"\")\n                       (Loc.toString exp.pexp_loc))\n              |> String.concat \", \");\n\n          let funCtxPath =\n            exprToContextPath ~inJsxContext:!inJsxContext funExpr\n          in\n          let argCompletable =\n            match funCtxPath with\n            | Some contextPath ->\n              findArgCompletables ~contextPath ~args\n                ~endPos:(Loc.end_ expr.pexp_loc) ~posBeforeCursor\n                ~posAfterFunExpr:(Loc.end_ funExpr.pexp_loc)\n                ~charBeforeCursor ~isPipedExpr:false\n                ~firstCharBeforeCursorNoWhite\n            | None -> None\n          in\n          match argCompletable with\n          | None -> (\n            match funCtxPath with\n            | None -> ()\n            | Some funCtxPath ->\n              let oldCtxPath = !currentCtxPath in\n              setCurrentCtxPath funCtxPath;\n              argCompletable |> iterateFnArguments ~isPipe:false ~args ~iterator;\n              resetCurrentCtxPath oldCtxPath)\n          | Some argCompletable -> setResult argCompletable)\n        | Pexp_send (lhs, {txt; loc}) -> (\n          (* e[\"txt\"]\n             If the string for txt is not closed, it could go over several lines.\n             Only take the first like to represent the label *)\n          let txtLines = txt |> String.split_on_char '\\n' in\n          let label = List.hd txtLines in\n          let label =\n            if label <> \"\" && label.[String.length label - 1] = '\\r' then\n              String.sub label 0 (String.length label - 1)\n            else label\n          in\n          let labelRange =\n            let l, c = Loc.start loc in\n            ((l, c + 1), (l, c + 1 + String.length label))\n          in\n          if debug then\n            Printf.printf \"Pexp_send %s%s e:%s\\n\" label\n              (Range.toString labelRange)\n              (Loc.toString lhs.pexp_loc);\n          if\n            labelRange |> Range.hasPos ~pos:posBeforeCursor\n            || (label = \"\" && posCursor = fst labelRange)\n          then\n            match exprToContextPath ~inJsxContext:!inJsxContext lhs with\n            | Some contextPath -> setResult (Cpath (CPObj (contextPath, label)))\n            | None -> ())\n        | Pexp_fun (lbl, defaultExpOpt, pat, e) ->\n          let oldScope = !scope in\n          (match (!processingFun, !currentCtxPath) with\n          | None, Some ctxPath -> processingFun := Some (ctxPath, 0)\n          | _ -> ());\n          let argContextPath =\n            match !processingFun with\n            | None -> None\n            | Some (ctxPath, currentUnlabelledCount) ->\n              (processingFun :=\n                 match lbl with\n                 | Nolabel -> Some (ctxPath, currentUnlabelledCount + 1)\n                 | _ -> Some (ctxPath, currentUnlabelledCount));\n              if Debug.verbose () then\n                print_endline \"[expr_iter] Completing for argument value\";\n              Some\n                (Completable.CArgument\n                   {\n                     functionContextPath = ctxPath;\n                     argumentLabel =\n                       (match lbl with\n                       | Nolabel ->\n                         Unlabelled {argumentPosition = currentUnlabelledCount}\n                       | Optional name -> Optional name\n                       | Labelled name -> Labelled name);\n                   })\n          in\n          (match defaultExpOpt with\n          | None -> ()\n          | Some defaultExp -> iterator.expr iterator defaultExp);\n          if locHasCursor e.pexp_loc = false then\n            completePattern ?contextPath:argContextPath pat;\n          scopePattern ?contextPath:argContextPath pat;\n          iterator.pat iterator pat;\n          iterator.expr iterator e;\n          scope := oldScope;\n          processed := true\n        | Pexp_let (recFlag, bindings, e) ->\n          let oldScope = !scope in\n          if recFlag = Recursive then bindings |> List.iter scopeValueBinding;\n          bindings |> List.iter (fun vb -> iterator.value_binding iterator vb);\n          if recFlag = Nonrecursive then bindings |> List.iter scopeValueBinding;\n          iterator.expr iterator e;\n          scope := oldScope;\n          processed := true\n        | Pexp_letmodule (name, modExpr, modBody) ->\n          let oldScope = !scope in\n          iterator.location iterator name.loc;\n          iterator.module_expr iterator modExpr;\n          scope := !scope |> Scope.addModule ~name:name.txt ~loc:name.loc;\n          iterator.expr iterator modBody;\n          scope := oldScope;\n          processed := true\n        | Pexp_open (_, lid, e) ->\n          let oldScope = !scope in\n          iterator.location iterator lid.loc;\n          scope := !scope |> Scope.addOpen ~lid:lid.txt;\n          iterator.expr iterator e;\n          scope := oldScope;\n          processed := true\n        | _ -> ());\n      if not !processed then Ast_iterator.default_iterator.expr iterator expr;\n      inJsxContext := oldInJsxContext;\n      match expr.pexp_desc with\n      | Pexp_fun _ -> ()\n      | _ -> processingFun := None)\n  in\n  let typ (iterator : Ast_iterator.iterator) (core_type : Parsetree.core_type) =\n    if core_type.ptyp_loc |> Loc.hasPos ~pos:posNoWhite then (\n      found := true;\n      if debug then\n        Printf.printf \"posCursor:[%s] posNoWhite:[%s] Found type:%s\\n\"\n          (Pos.toString posCursor) (Pos.toString posNoWhite)\n          (Loc.toString core_type.ptyp_loc);\n      match core_type.ptyp_desc with\n      | Ptyp_constr (lid, _args) ->\n        let lidPath = flattenLidCheckDot lid in\n        if debug then\n          Printf.printf \"Ptyp_constr %s:%s\\n\"\n            (lidPath |> String.concat \".\")\n            (Loc.toString lid.loc);\n        if lid.loc |> Loc.hasPos ~pos:posBeforeCursor then\n          setResult\n            (Cpath\n               (CPId {loc = lid.loc; path = lidPath; completionContext = Type}))\n      | _ -> ());\n    Ast_iterator.default_iterator.typ iterator core_type\n  in\n  let pat (iterator : Ast_iterator.iterator) (pat : Parsetree.pattern) =\n    if pat.ppat_loc |> Loc.hasPos ~pos:posNoWhite then (\n      found := true;\n      if debug then\n        Printf.printf \"posCursor:[%s] posNoWhite:[%s] Found pattern:%s\\n\"\n          (Pos.toString posCursor) (Pos.toString posNoWhite)\n          (Loc.toString pat.ppat_loc);\n      (match pat.ppat_desc with\n      | Ppat_construct (lid, _) -> (\n        let lidPath = flattenLidCheckDot lid in\n        if debug then\n          Printf.printf \"Ppat_construct %s:%s\\n\"\n            (lidPath |> String.concat \".\")\n            (Loc.toString lid.loc);\n        let completion =\n          Completable.Cpath\n            (CPId {loc = lid.loc; path = lidPath; completionContext = Value})\n        in\n        match !result with\n        | Some (Completable.Cpattern p, scope) ->\n          result := Some (Cpattern {p with fallback = Some completion}, scope)\n        | _ -> setResult completion)\n      | _ -> ());\n      Ast_iterator.default_iterator.pat iterator pat)\n  in\n  let module_expr (iterator : Ast_iterator.iterator)\n      (me : Parsetree.module_expr) =\n    (match me.pmod_desc with\n    | Pmod_ident lid when lid.loc |> Loc.hasPos ~pos:posBeforeCursor ->\n      let lidPath = flattenLidCheckDot lid in\n      if debug then\n        Printf.printf \"Pmod_ident %s:%s\\n\"\n          (lidPath |> String.concat \".\")\n          (Loc.toString lid.loc);\n      found := true;\n      setResult\n        (Cpath\n           (CPId {loc = lid.loc; path = lidPath; completionContext = Module}))\n    | _ -> ());\n    Ast_iterator.default_iterator.module_expr iterator me\n  in\n  let module_type (iterator : Ast_iterator.iterator)\n      (mt : Parsetree.module_type) =\n    (match mt.pmty_desc with\n    | Pmty_ident lid when lid.loc |> Loc.hasPos ~pos:posBeforeCursor ->\n      let lidPath = flattenLidCheckDot lid in\n      if debug then\n        Printf.printf \"Pmty_ident %s:%s\\n\"\n          (lidPath |> String.concat \".\")\n          (Loc.toString lid.loc);\n      found := true;\n      setResult\n        (Cpath\n           (CPId {loc = lid.loc; path = lidPath; completionContext = Module}))\n    | _ -> ());\n    Ast_iterator.default_iterator.module_type iterator mt\n  in\n  let type_kind (iterator : Ast_iterator.iterator)\n      (type_kind : Parsetree.type_kind) =\n    (match type_kind with\n    | Ptype_variant [decl]\n      when decl.pcd_name.loc |> Loc.hasPos ~pos:posNoWhite\n           && decl.pcd_args = Pcstr_tuple [] ->\n      (* \"type t = Pre\" could signal the intent to complete variant \"Prelude\",\n         or the beginning of \"Prefix.t\" *)\n      if debug then\n        Printf.printf \"Ptype_variant unary %s:%s\\n\" decl.pcd_name.txt\n          (Loc.toString decl.pcd_name.loc);\n      found := true;\n      setResult\n        (Cpath\n           (CPId\n              {\n                loc = decl.pcd_name.loc;\n                path = [decl.pcd_name.txt];\n                completionContext = Value;\n              }))\n    | _ -> ());\n    Ast_iterator.default_iterator.type_kind iterator type_kind\n  in\n\n  let lastScopeBeforeCursor = ref (Scope.create ()) in\n  let location (_iterator : Ast_iterator.iterator) (loc : Location.t) =\n    if Loc.end_ loc <= posCursor then lastScopeBeforeCursor := !scope\n  in\n\n  let iterator =\n    {\n      Ast_iterator.default_iterator with\n      attribute;\n      expr;\n      location;\n      module_expr;\n      module_type;\n      pat;\n      signature;\n      signature_item;\n      structure;\n      structure_item;\n      typ;\n      type_kind;\n      value_binding;\n    }\n  in\n\n  if Filename.check_suffix path \".res\" then (\n    let parser =\n      Res_driver.parsing_engine.parse_implementation ~for_printer:false\n    in\n    let {Res_driver.parsetree = str} = parser ~filename:currentFile in\n    iterator.structure iterator str |> ignore;\n    if blankAfterCursor = Some ' ' || blankAfterCursor = Some '\\n' then (\n      scope := !lastScopeBeforeCursor;\n      setResult\n        (Cpath\n           (CPId {loc = Location.none; path = [\"\"]; completionContext = Value})));\n    if !found = false then if debug then Printf.printf \"XXX Not found!\\n\";\n    !result)\n  else if Filename.check_suffix path \".resi\" then (\n    let parser = Res_driver.parsing_engine.parse_interface ~for_printer:false in\n    let {Res_driver.parsetree = signature} = parser ~filename:currentFile in\n    iterator.signature iterator signature |> ignore;\n    if blankAfterCursor = Some ' ' || blankAfterCursor = Some '\\n' then (\n      scope := !lastScopeBeforeCursor;\n      setResult\n        (Cpath\n           (CPId {loc = Location.none; path = [\"\"]; completionContext = Type})));\n    if !found = false then if debug then Printf.printf \"XXX Not found!\\n\";\n    !result)\n  else None\n\nlet completionWithParser ~debug ~path ~posCursor ~currentFile ~text =\n  match Pos.positionToOffset text posCursor with\n  | Some offset ->\n    completionWithParser1 ~currentFile ~debug ~offset ~path ~posCursor text\n  | None -> None\n\nlet findTypeOfExpressionAtLoc ~debug ~path ~posCursor ~currentFile loc =\n  let textOpt = Files.readFile currentFile in\n  match textOpt with\n  | None | Some \"\" -> None\n  | Some text -> (\n    match Pos.positionToOffset text posCursor with\n    | Some offset ->\n      completionWithParser1 ~findThisExprLoc:loc ~currentFile ~debug ~offset\n        ~path ~posCursor text\n    | None -> None)\n"
  },
  {
    "path": "analysis/src/CompletionJsx.ml",
    "content": "open SharedTypes\n\nlet domLabels =\n  let bool = \"bool\" in\n  let float = \"float\" in\n  let int = \"int\" in\n  let string = \"string\" in\n  [\n    (\"ariaDetails\", string);\n    (\"ariaDisabled\", bool);\n    (\"ariaHidden\", bool);\n    (\"ariaKeyshortcuts\", string);\n    (\"ariaLabel\", string);\n    (\"ariaRoledescription\", string);\n    (\"ariaExpanded\", bool);\n    (\"ariaLevel\", int);\n    (\"ariaModal\", bool);\n    (\"ariaMultiline\", bool);\n    (\"ariaMultiselectable\", bool);\n    (\"ariaPlaceholder\", string);\n    (\"ariaReadonly\", bool);\n    (\"ariaRequired\", bool);\n    (\"ariaSelected\", bool);\n    (\"ariaSort\", string);\n    (\"ariaValuemax\", float);\n    (\"ariaValuemin\", float);\n    (\"ariaValuenow\", float);\n    (\"ariaValuetext\", string);\n    (\"ariaAtomic\", bool);\n    (\"ariaBusy\", bool);\n    (\"ariaRelevant\", string);\n    (\"ariaGrabbed\", bool);\n    (\"ariaActivedescendant\", string);\n    (\"ariaColcount\", int);\n    (\"ariaColindex\", int);\n    (\"ariaColspan\", int);\n    (\"ariaControls\", string);\n    (\"ariaDescribedby\", string);\n    (\"ariaErrormessage\", string);\n    (\"ariaFlowto\", string);\n    (\"ariaLabelledby\", string);\n    (\"ariaOwns\", string);\n    (\"ariaPosinset\", int);\n    (\"ariaRowcount\", int);\n    (\"ariaRowindex\", int);\n    (\"ariaRowspan\", int);\n    (\"ariaSetsize\", int);\n    (\"defaultChecked\", bool);\n    (\"defaultValue\", string);\n    (\"accessKey\", string);\n    (\"className\", string);\n    (\"contentEditable\", bool);\n    (\"contextMenu\", string);\n    (\"dir\", string);\n    (\"draggable\", bool);\n    (\"hidden\", bool);\n    (\"id\", string);\n    (\"lang\", string);\n    (\"style\", \"style\");\n    (\"spellCheck\", bool);\n    (\"tabIndex\", int);\n    (\"title\", string);\n    (\"itemID\", string);\n    (\"itemProp\", string);\n    (\"itemRef\", string);\n    (\"itemScope\", bool);\n    (\"itemType\", string);\n    (\"accept\", string);\n    (\"acceptCharset\", string);\n    (\"action\", string);\n    (\"allowFullScreen\", bool);\n    (\"alt\", string);\n    (\"async\", bool);\n    (\"autoComplete\", string);\n    (\"autoCapitalize\", string);\n    (\"autoFocus\", bool);\n    (\"autoPlay\", bool);\n    (\"challenge\", string);\n    (\"charSet\", string);\n    (\"checked\", bool);\n    (\"cite\", string);\n    (\"crossOrigin\", string);\n    (\"cols\", int);\n    (\"colSpan\", int);\n    (\"content\", string);\n    (\"controls\", bool);\n    (\"coords\", string);\n    (\"data\", string);\n    (\"dateTime\", string);\n    (\"default\", bool);\n    (\"defer\", bool);\n    (\"disabled\", bool);\n    (\"download\", string);\n    (\"encType\", string);\n    (\"form\", string);\n    (\"formAction\", string);\n    (\"formTarget\", string);\n    (\"formMethod\", string);\n    (\"headers\", string);\n    (\"height\", string);\n    (\"high\", int);\n    (\"href\", string);\n    (\"hrefLang\", string);\n    (\"htmlFor\", string);\n    (\"httpEquiv\", string);\n    (\"icon\", string);\n    (\"inputMode\", string);\n    (\"integrity\", string);\n    (\"keyType\", string);\n    (\"label\", string);\n    (\"list\", string);\n    (\"loop\", bool);\n    (\"low\", int);\n    (\"manifest\", string);\n    (\"max\", string);\n    (\"maxLength\", int);\n    (\"media\", string);\n    (\"mediaGroup\", string);\n    (\"method\", string);\n    (\"min\", string);\n    (\"minLength\", int);\n    (\"multiple\", bool);\n    (\"muted\", bool);\n    (\"name\", string);\n    (\"nonce\", string);\n    (\"noValidate\", bool);\n    (\"open_\", bool);\n    (\"optimum\", int);\n    (\"pattern\", string);\n    (\"placeholder\", string);\n    (\"playsInline\", bool);\n    (\"poster\", string);\n    (\"preload\", string);\n    (\"radioGroup\", string);\n    (\"readOnly\", bool);\n    (\"rel\", string);\n    (\"required\", bool);\n    (\"reversed\", bool);\n    (\"rows\", int);\n    (\"rowSpan\", int);\n    (\"sandbox\", string);\n    (\"scope\", string);\n    (\"scoped\", bool);\n    (\"scrolling\", string);\n    (\"selected\", bool);\n    (\"shape\", string);\n    (\"size\", int);\n    (\"sizes\", string);\n    (\"span\", int);\n    (\"src\", string);\n    (\"srcDoc\", string);\n    (\"srcLang\", string);\n    (\"srcSet\", string);\n    (\"start\", int);\n    (\"step\", float);\n    (\"summary\", string);\n    (\"target\", string);\n    (\"type_\", string);\n    (\"useMap\", string);\n    (\"value\", string);\n    (\"width\", string);\n    (\"wrap\", string);\n    (\"onCopy\", \"ReactEvent.Clipboard.t => unit\");\n    (\"onCut\", \"ReactEvent.Clipboard.t => unit\");\n    (\"onPaste\", \"ReactEvent.Clipboard.t => unit\");\n    (\"onCompositionEnd\", \"ReactEvent.Composition.t => unit\");\n    (\"onCompositionStart\", \"ReactEvent.Composition.t => unit\");\n    (\"onCompositionUpdate\", \"ReactEvent.Composition.t => unit\");\n    (\"onKeyDown\", \"ReactEvent.Keyboard.t => unit\");\n    (\"onKeyPress\", \"ReactEvent.Keyboard.t => unit\");\n    (\"onKeyUp\", \"ReactEvent.Keyboard.t => unit\");\n    (\"onFocus\", \"ReactEvent.Focus.t => unit\");\n    (\"onBlur\", \"ReactEvent.Focus.t => unit\");\n    (\"onChange\", \"ReactEvent.Form.t => unit\");\n    (\"onInput\", \"ReactEvent.Form.t => unit\");\n    (\"onSubmit\", \"ReactEvent.Form.t => unit\");\n    (\"onInvalid\", \"ReactEvent.Form.t => unit\");\n    (\"onClick\", \"ReactEvent.Mouse.t => unit\");\n    (\"onContextMenu\", \"ReactEvent.Mouse.t => unit\");\n    (\"onDoubleClick\", \"ReactEvent.Mouse.t => unit\");\n    (\"onDrag\", \"ReactEvent.Mouse.t => unit\");\n    (\"onDragEnd\", \"ReactEvent.Mouse.t => unit\");\n    (\"onDragEnter\", \"ReactEvent.Mouse.t => unit\");\n    (\"onDragExit\", \"ReactEvent.Mouse.t => unit\");\n    (\"onDragLeave\", \"ReactEvent.Mouse.t => unit\");\n    (\"onDragOver\", \"ReactEvent.Mouse.t => unit\");\n    (\"onDragStart\", \"ReactEvent.Mouse.t => unit\");\n    (\"onDrop\", \"ReactEvent.Mouse.t => unit\");\n    (\"onMouseDown\", \"ReactEvent.Mouse.t => unit\");\n    (\"onMouseEnter\", \"ReactEvent.Mouse.t => unit\");\n    (\"onMouseLeave\", \"ReactEvent.Mouse.t => unit\");\n    (\"onMouseMove\", \"ReactEvent.Mouse.t => unit\");\n    (\"onMouseOut\", \"ReactEvent.Mouse.t => unit\");\n    (\"onMouseOver\", \"ReactEvent.Mouse.t => unit\");\n    (\"onMouseUp\", \"ReactEvent.Mouse.t => unit\");\n    (\"onSelect\", \"ReactEvent.Selection.t => unit\");\n    (\"onTouchCancel\", \"ReactEvent.Touch.t => unit\");\n    (\"onTouchEnd\", \"ReactEvent.Touch.t => unit\");\n    (\"onTouchMove\", \"ReactEvent.Touch.t => unit\");\n    (\"onTouchStart\", \"ReactEvent.Touch.t => unit\");\n    (\"onPointerOver\", \"ReactEvent.Pointer.t => unit\");\n    (\"onPointerEnter\", \"ReactEvent.Pointer.t => unit\");\n    (\"onPointerDown\", \"ReactEvent.Pointer.t => unit\");\n    (\"onPointerMove\", \"ReactEvent.Pointer.t => unit\");\n    (\"onPointerUp\", \"ReactEvent.Pointer.t => unit\");\n    (\"onPointerCancel\", \"ReactEvent.Pointer.t => unit\");\n    (\"onPointerOut\", \"ReactEvent.Pointer.t => unit\");\n    (\"onPointerLeave\", \"ReactEvent.Pointer.t => unit\");\n    (\"onGotPointerCapture\", \"ReactEvent.Pointer.t => unit\");\n    (\"onLostPointerCapture\", \"ReactEvent.Pointer.t => unit\");\n    (\"onScroll\", \"ReactEvent.UI.t => unit\");\n    (\"onWheel\", \"ReactEvent.Wheel.t => unit\");\n    (\"onAbort\", \"ReactEvent.Media.t => unit\");\n    (\"onCanPlay\", \"ReactEvent.Media.t => unit\");\n    (\"onCanPlayThrough\", \"ReactEvent.Media.t => unit\");\n    (\"onDurationChange\", \"ReactEvent.Media.t => unit\");\n    (\"onEmptied\", \"ReactEvent.Media.t => unit\");\n    (\"onEncrypetd\", \"ReactEvent.Media.t => unit\");\n    (\"onEnded\", \"ReactEvent.Media.t => unit\");\n    (\"onError\", \"ReactEvent.Media.t => unit\");\n    (\"onLoadedData\", \"ReactEvent.Media.t => unit\");\n    (\"onLoadedMetadata\", \"ReactEvent.Media.t => unit\");\n    (\"onLoadStart\", \"ReactEvent.Media.t => unit\");\n    (\"onPause\", \"ReactEvent.Media.t => unit\");\n    (\"onPlay\", \"ReactEvent.Media.t => unit\");\n    (\"onPlaying\", \"ReactEvent.Media.t => unit\");\n    (\"onProgress\", \"ReactEvent.Media.t => unit\");\n    (\"onRateChange\", \"ReactEvent.Media.t => unit\");\n    (\"onSeeked\", \"ReactEvent.Media.t => unit\");\n    (\"onSeeking\", \"ReactEvent.Media.t => unit\");\n    (\"onStalled\", \"ReactEvent.Media.t => unit\");\n    (\"onSuspend\", \"ReactEvent.Media.t => unit\");\n    (\"onTimeUpdate\", \"ReactEvent.Media.t => unit\");\n    (\"onVolumeChange\", \"ReactEvent.Media.t => unit\");\n    (\"onWaiting\", \"ReactEvent.Media.t => unit\");\n    (\"onAnimationStart\", \"ReactEvent.Animation.t => unit\");\n    (\"onAnimationEnd\", \"ReactEvent.Animation.t => unit\");\n    (\"onAnimationIteration\", \"ReactEvent.Animation.t => unit\");\n    (\"onTransitionEnd\", \"ReactEvent.Transition.t => unit\");\n    (\"accentHeight\", string);\n    (\"accumulate\", string);\n    (\"additive\", string);\n    (\"alignmentBaseline\", string);\n    (\"allowReorder\", string);\n    (\"alphabetic\", string);\n    (\"amplitude\", string);\n    (\"arabicForm\", string);\n    (\"ascent\", string);\n    (\"attributeName\", string);\n    (\"attributeType\", string);\n    (\"autoReverse\", string);\n    (\"azimuth\", string);\n    (\"baseFrequency\", string);\n    (\"baseProfile\", string);\n    (\"baselineShift\", string);\n    (\"bbox\", string);\n    (\"bias\", string);\n    (\"by\", string);\n    (\"calcMode\", string);\n    (\"capHeight\", string);\n    (\"clip\", string);\n    (\"clipPath\", string);\n    (\"clipPathUnits\", string);\n    (\"clipRule\", string);\n    (\"colorInterpolation\", string);\n    (\"colorInterpolationFilters\", string);\n    (\"colorProfile\", string);\n    (\"colorRendering\", string);\n    (\"contentScriptType\", string);\n    (\"contentStyleType\", string);\n    (\"cursor\", string);\n    (\"cx\", string);\n    (\"cy\", string);\n    (\"d\", string);\n    (\"decelerate\", string);\n    (\"descent\", string);\n    (\"diffuseConstant\", string);\n    (\"direction\", string);\n    (\"display\", string);\n    (\"divisor\", string);\n    (\"dominantBaseline\", string);\n    (\"dur\", string);\n    (\"dx\", string);\n    (\"dy\", string);\n    (\"edgeMode\", string);\n    (\"elevation\", string);\n    (\"enableBackground\", string);\n    (\"exponent\", string);\n    (\"externalResourcesRequired\", string);\n    (\"fill\", string);\n    (\"fillOpacity\", string);\n    (\"fillRule\", string);\n    (\"filter\", string);\n    (\"filterRes\", string);\n    (\"filterUnits\", string);\n    (\"floodColor\", string);\n    (\"floodOpacity\", string);\n    (\"focusable\", string);\n    (\"fontFamily\", string);\n    (\"fontSize\", string);\n    (\"fontSizeAdjust\", string);\n    (\"fontStretch\", string);\n    (\"fontStyle\", string);\n    (\"fontVariant\", string);\n    (\"fontWeight\", string);\n    (\"fomat\", string);\n    (\"from\", string);\n    (\"fx\", string);\n    (\"fy\", string);\n    (\"g1\", string);\n    (\"g2\", string);\n    (\"glyphName\", string);\n    (\"glyphOrientationHorizontal\", string);\n    (\"glyphOrientationVertical\", string);\n    (\"glyphRef\", string);\n    (\"gradientTransform\", string);\n    (\"gradientUnits\", string);\n    (\"hanging\", string);\n    (\"horizAdvX\", string);\n    (\"horizOriginX\", string);\n    (\"ideographic\", string);\n    (\"imageRendering\", string);\n    (\"in2\", string);\n    (\"intercept\", string);\n    (\"k\", string);\n    (\"k1\", string);\n    (\"k2\", string);\n    (\"k3\", string);\n    (\"k4\", string);\n    (\"kernelMatrix\", string);\n    (\"kernelUnitLength\", string);\n    (\"kerning\", string);\n    (\"keyPoints\", string);\n    (\"keySplines\", string);\n    (\"keyTimes\", string);\n    (\"lengthAdjust\", string);\n    (\"letterSpacing\", string);\n    (\"lightingColor\", string);\n    (\"limitingConeAngle\", string);\n    (\"local\", string);\n    (\"markerEnd\", string);\n    (\"markerHeight\", string);\n    (\"markerMid\", string);\n    (\"markerStart\", string);\n    (\"markerUnits\", string);\n    (\"markerWidth\", string);\n    (\"mask\", string);\n    (\"maskContentUnits\", string);\n    (\"maskUnits\", string);\n    (\"mathematical\", string);\n    (\"mode\", string);\n    (\"numOctaves\", string);\n    (\"offset\", string);\n    (\"opacity\", string);\n    (\"operator\", string);\n    (\"order\", string);\n    (\"orient\", string);\n    (\"orientation\", string);\n    (\"origin\", string);\n    (\"overflow\", string);\n    (\"overflowX\", string);\n    (\"overflowY\", string);\n    (\"overlinePosition\", string);\n    (\"overlineThickness\", string);\n    (\"paintOrder\", string);\n    (\"panose1\", string);\n    (\"pathLength\", string);\n    (\"patternContentUnits\", string);\n    (\"patternTransform\", string);\n    (\"patternUnits\", string);\n    (\"pointerEvents\", string);\n    (\"points\", string);\n    (\"pointsAtX\", string);\n    (\"pointsAtY\", string);\n    (\"pointsAtZ\", string);\n    (\"preserveAlpha\", string);\n    (\"preserveAspectRatio\", string);\n    (\"primitiveUnits\", string);\n    (\"r\", string);\n    (\"radius\", string);\n    (\"refX\", string);\n    (\"refY\", string);\n    (\"renderingIntent\", string);\n    (\"repeatCount\", string);\n    (\"repeatDur\", string);\n    (\"requiredExtensions\", string);\n    (\"requiredFeatures\", string);\n    (\"restart\", string);\n    (\"result\", string);\n    (\"rotate\", string);\n    (\"rx\", string);\n    (\"ry\", string);\n    (\"scale\", string);\n    (\"seed\", string);\n    (\"shapeRendering\", string);\n    (\"slope\", string);\n    (\"spacing\", string);\n    (\"specularConstant\", string);\n    (\"specularExponent\", string);\n    (\"speed\", string);\n    (\"spreadMethod\", string);\n    (\"startOffset\", string);\n    (\"stdDeviation\", string);\n    (\"stemh\", string);\n    (\"stemv\", string);\n    (\"stitchTiles\", string);\n    (\"stopColor\", string);\n    (\"stopOpacity\", string);\n    (\"strikethroughPosition\", string);\n    (\"strikethroughThickness\", string);\n    (string, string);\n    (\"stroke\", string);\n    (\"strokeDasharray\", string);\n    (\"strokeDashoffset\", string);\n    (\"strokeLinecap\", string);\n    (\"strokeLinejoin\", string);\n    (\"strokeMiterlimit\", string);\n    (\"strokeOpacity\", string);\n    (\"strokeWidth\", string);\n    (\"surfaceScale\", string);\n    (\"systemLanguage\", string);\n    (\"tableValues\", string);\n    (\"targetX\", string);\n    (\"targetY\", string);\n    (\"textAnchor\", string);\n    (\"textDecoration\", string);\n    (\"textLength\", string);\n    (\"textRendering\", string);\n    (\"transform\", string);\n    (\"u1\", string);\n    (\"u2\", string);\n    (\"underlinePosition\", string);\n    (\"underlineThickness\", string);\n    (\"unicode\", string);\n    (\"unicodeBidi\", string);\n    (\"unicodeRange\", string);\n    (\"unitsPerEm\", string);\n    (\"vAlphabetic\", string);\n    (\"vHanging\", string);\n    (\"vIdeographic\", string);\n    (\"vMathematical\", string);\n    (\"values\", string);\n    (\"vectorEffect\", string);\n    (\"version\", string);\n    (\"vertAdvX\", string);\n    (\"vertAdvY\", string);\n    (\"vertOriginX\", string);\n    (\"vertOriginY\", string);\n    (\"viewBox\", string);\n    (\"viewTarget\", string);\n    (\"visibility\", string);\n    (\"widths\", string);\n    (\"wordSpacing\", string);\n    (\"writingMode\", string);\n    (\"x\", string);\n    (\"x1\", string);\n    (\"x2\", string);\n    (\"xChannelSelector\", string);\n    (\"xHeight\", string);\n    (\"xlinkActuate\", string);\n    (\"xlinkArcrole\", string);\n    (\"xlinkHref\", string);\n    (\"xlinkRole\", string);\n    (\"xlinkShow\", string);\n    (\"xlinkTitle\", string);\n    (\"xlinkType\", string);\n    (\"xmlns\", string);\n    (\"xmlnsXlink\", string);\n    (\"xmlBase\", string);\n    (\"xmlLang\", string);\n    (\"xmlSpace\", string);\n    (\"y\", string);\n    (\"y1\", string);\n    (\"y2\", string);\n    (\"yChannelSelector\", string);\n    (\"z\", string);\n    (\"zoomAndPan\", string);\n    (\"about\", string);\n    (\"datatype\", string);\n    (\"inlist\", string);\n    (\"prefix\", string);\n    (\"property\", string);\n    (\"resource\", string);\n    (\"typeof\", string);\n    (\"vocab\", string);\n    (\"dangerouslySetInnerHTML\", \"{\\\"__html\\\": string}\");\n    (\"suppressContentEditableWarning\", bool);\n  ]\n\n(* List and explanations taken from\n   https://www.tutorialrepublic.com/html-reference/html5-tags.php. *)\nlet htmlElements =\n  [\n    (\"a\", \"Defines a hyperlink.\", false);\n    (\"abbr\", \"Defines an abbreviated form of a longer word or phrase.\", false);\n    (\"acronym\", \"Defines an acronym. Use <abbr> instead.\", true);\n    (\"address\", \"Specifies the author's contact information.\", false);\n    ( \"applet\",\n      \"Embeds a Java applet (mini Java applications) on the page. Use <object> \\\n       instead.\",\n      true );\n    (\"area\", \"Defines a specific area within an image map.\", false);\n    (\"article\", \"Defines an article.\", false);\n    (\"aside\", \"Defines some content loosely related to the page content.\", false);\n    (\"audio\", \"Embeds a sound, or an audio stream in an HTML document.\", false);\n    (\"b\", \"Displays text in a bold style.\", false);\n    (\"base\", \"Defines the base URL for all relative URLs in a document.\", false);\n    (\"basefont\", \"Specifies the base font for a page. Use CSS instead.\", true);\n    ( \"bdi\",\n      \"Represents text that is isolated from its surrounding for the purposes \\\n       of bidirectional text formatting.\",\n      false );\n    (\"bdo\", \"Overrides the current text direction.\", false);\n    (\"big\", \"Displays text in a large size. Use CSS instead.\", true);\n    ( \"blockquote\",\n      \"Represents a section that is quoted from another source.\",\n      false );\n    (\"body\", \"Defines the document's body.\", false);\n    (\"br\", \"Produces a single line break.\", false);\n    (\"button\", \"Creates a clickable button.\", false);\n    ( \"canvas\",\n      \"Defines a region in the document, which can be used to draw graphics on \\\n       the fly via scripting (usually JavaScript).\",\n      false );\n    (\"caption\", \"Defines the caption or title of the table.\", false);\n    (\"center\", \"Align contents in the center. Use CSS instead.\", true);\n    (\"cite\", \"Indicates a citation or reference to another source.\", false);\n    (\"code\", \"Specifies text as computer code.\", false);\n    ( \"col\",\n      \"Defines attribute values for one or more columns in a table.\",\n      false );\n    (\"colgroup\", \"Specifies attributes for multiple columns in a table.\", false);\n    ( \"data\",\n      \"Links a piece of content with a machine-readable translation.\",\n      false );\n    ( \"datalist\",\n      \"Represents a set of pre-defined options for an <input> element.\",\n      false );\n    ( \"dd\",\n      \"Specifies a description, or value for the term (<dt>) in a description \\\n       list (<dl>).\",\n      false );\n    (\"del\", \"Represents text that has been deleted from the document.\", false);\n    ( \"details\",\n      \"Represents a widget from which the user can obtain additional \\\n       information or controls on-demand.\",\n      false );\n    (\"dfn\", \"Specifies a definition.\", false);\n    (\"dialog\", \"Defines a dialog box or subwindow.\", false);\n    (\"dir\", \"Defines a directory list. Use <ul> instead.\", true);\n    (\"div\", \"Specifies a division or a section in a document.\", false);\n    (\"dl\", \"Defines a description list.\", false);\n    (\"dt\", \"Defines a term (an item) in a description list.\", false);\n    (\"em\", \"Defines emphasized text.\", false);\n    ( \"embed\",\n      \"Embeds external application, typically multimedia content like audio or \\\n       video into an HTML document.\",\n      false );\n    (\"fieldset\", \"Specifies a set of related form fields.\", false);\n    (\"figcaption\", \"Defines a caption or legend for a figure.\", false);\n    (\"figure\", \"Represents a figure illustrated as part of the document.\", false);\n    (\"font\", \"Defines font, color, and size for text. Use CSS instead.\", true);\n    (\"footer\", \"Represents the footer of a document or a section.\", false);\n    (\"form\", \"Defines an HTML form for user input.\", false);\n    (\"frame\", \"Defines a single frame within a frameset.\", true);\n    (\"frameset\", \"Defines a collection of frames or other frameset.\", true);\n    ( \"head\",\n      \"Defines the head portion of the document that contains information \\\n       about the document such as title.\",\n      false );\n    (\"header\", \"Represents the header of a document or a section.\", false);\n    (\"hgroup\", \"Defines a group of headings.\", false);\n    (\"h1\", \"Defines HTML headings.\", false);\n    (\"h2\", \"Defines HTML headings.\", false);\n    (\"h3\", \"Defines HTML headings.\", false);\n    (\"h4\", \"Defines HTML headings.\", false);\n    (\"h5\", \"Defines HTML headings.\", false);\n    (\"h6\", \"Defines HTML headings.\", false);\n    (\"hr\", \"Produce a horizontal line.\", false);\n    (\"html\", \"Defines the root of an HTML document.\", false);\n    (\"i\", \"Displays text in an italic style.\", false);\n    (\"iframe\", \"Displays a URL in an inline frame.\", false);\n    (\"img\", \"Represents an image.\", false);\n    (\"input\", \"Defines an input control.\", false);\n    ( \"ins\",\n      \"Defines a block of text that has been inserted into a document.\",\n      false );\n    (\"kbd\", \"Specifies text as keyboard input.\", false);\n    ( \"keygen\",\n      \"Represents a control for generating a public-private key pair.\",\n      false );\n    (\"label\", \"Defines a label for an <input> control.\", false);\n    (\"legend\", \"Defines a caption for a <fieldset> element.\", false);\n    (\"li\", \"Defines a list item.\", false);\n    ( \"link\",\n      \"Defines the relationship between the current document and an external \\\n       resource.\",\n      false );\n    (\"main\", \"Represents the main or dominant content of the document.\", false);\n    (\"map\", \"Defines a client-side image-map.\", false);\n    (\"mark\", \"Represents text highlighted for reference purposes.\", false);\n    (\"menu\", \"Represents a list of commands.\", false);\n    ( \"menuitem\",\n      \"Defines a list (or menuitem) of commands that a user can perform.\",\n      false );\n    (\"meta\", \"Provides structured metadata about the document content.\", false);\n    (\"meter\", \"Represents a scalar measurement within a known range.\", false);\n    (\"nav\", \"Defines a section of navigation links.\", false);\n    ( \"noframes\",\n      \"Defines an alternate content that displays in browsers that do not \\\n       support frames.\",\n      true );\n    ( \"noscript\",\n      \"Defines alternative content to display when the browser doesn't support \\\n       scripting.\",\n      false );\n    (\"object\", \"Defines an embedded object.\", false);\n    (\"ol\", \"Defines an ordered list.\", false);\n    ( \"optgroup\",\n      \"Defines a group of related options in a selection list.\",\n      false );\n    (\"option\", \"Defines an option in a selection list.\", false);\n    (\"output\", \"Represents the result of a calculation.\", false);\n    (\"p\", \"Defines a paragraph.\", false);\n    (\"param\", \"Defines a parameter for an object or applet element.\", false);\n    (\"picture\", \"Defines a container for multiple image sources.\", false);\n    (\"pre\", \"Defines a block of preformatted text.\", false);\n    (\"progress\", \"Represents the completion progress of a task.\", false);\n    (\"q\", \"Defines a short inline quotation.\", false);\n    ( \"rp\",\n      \"Provides fall-back parenthesis for browsers that that don't support \\\n       ruby annotations.\",\n      false );\n    ( \"rt\",\n      \"Defines the pronunciation of character presented in a ruby annotations.\",\n      false );\n    (\"ruby\", \"Represents a ruby annotation.\", false);\n    ( \"s\",\n      \"Represents contents that are no longer accurate or no longer relevant.\",\n      false );\n    (\"samp\", \"Specifies text as sample output from a computer program.\", false);\n    ( \"script\",\n      \"Places script in the document for client-side processing.\",\n      false );\n    ( \"section\",\n      \"Defines a section of a document, such as header, footer etc.\",\n      false );\n    (\"select\", \"Defines a selection list within a form.\", false);\n    (\"small\", \"Displays text in a smaller size.\", false);\n    ( \"source\",\n      \"Defines alternative media resources for the media elements like <audio> \\\n       or <video>.\",\n      false );\n    (\"span\", \"Defines an inline styleless section in a document.\", false);\n    (\"strike\", \"Displays text in strikethrough style.\", true);\n    (\"strong\", \"Indicate strongly emphasized text.\", false);\n    ( \"style\",\n      \"Inserts style information (commonly CSS) into the head of a document.\",\n      false );\n    (\"sub\", \"Defines subscripted text.\", false);\n    (\"summary\", \"Defines a summary for the <details> element.\", false);\n    (\"sup\", \"Defines superscripted text.\", false);\n    ( \"svg\",\n      \"Embed SVG (Scalable Vector Graphics) content in an HTML document.\",\n      false );\n    (\"table\", \"Defines a data table.\", false);\n    ( \"tbody\",\n      \"Groups a set of rows defining the main body of the table data.\",\n      false );\n    (\"td\", \"Defines a cell in a table.\", false);\n    ( \"template\",\n      \"Defines the fragments of HTML that should be hidden when the page is \\\n       loaded, but can be cloned and inserted in the document by JavaScript.\",\n      false );\n    (\"textarea\", \"Defines a multi-line text input control (text area).\", false);\n    ( \"tfoot\",\n      \"Groups a set of rows summarizing the columns of the table.\",\n      false );\n    (\"th\", \"Defines a header cell in a table.\", false);\n    ( \"thead\",\n      \"Groups a set of rows that describes the column labels of a table.\",\n      false );\n    (\"time\", \"Represents a time and/or date.\", false);\n    (\"title\", \"Defines a title for the document.\", false);\n    (\"tr\", \"Defines a row of cells in a table.\", false);\n    ( \"track\",\n      \"Defines text tracks for the media elements like <audio> or <video>.\",\n      false );\n    (\"tt\", \"Displays text in a teletype style.\", true);\n    (\"u\", \"Displays text with an underline.\", false);\n    (\"ul\", \"Defines an unordered list.\", false);\n    (\"var\", \"Defines a variable.\", false);\n    (\"video\", \"Embeds video content in an HTML document.\", false);\n    (\"wbr\", \"Represents a line break opportunity.\", false);\n  ]\n\nlet getJsxLabels ~componentPath ~findTypeOfValue ~package =\n  match componentPath @ [\"make\"] |> findTypeOfValue with\n  | Some (typ, make_env) ->\n    let rec getFieldsV3 (texp : Types.type_expr) =\n      match texp.desc with\n      | Tfield (name, _, t1, t2) ->\n        let fields = t2 |> getFieldsV3 in\n        if name = \"children\" then fields else (name, t1, make_env) :: fields\n      | Tlink te | Tsubst te | Tpoly (te, []) -> te |> getFieldsV3\n      | Tvar None -> []\n      | _ -> []\n    in\n    let getFieldsV4 ~path ~typeArgs =\n      match References.digConstructor ~env:make_env ~package path with\n      | Some\n          ( env,\n            {\n              item =\n                {\n                  decl =\n                    {\n                      type_kind = Type_record (labelDecls, _repr);\n                      type_params = typeParams;\n                    };\n                };\n            } ) ->\n        labelDecls\n        |> List.map (fun (ld : Types.label_declaration) ->\n               let name = Ident.name ld.ld_id in\n               let t =\n                 ld.ld_type |> TypeUtils.instantiateType ~typeParams ~typeArgs\n               in\n               (name, t, env))\n      | _ -> []\n    in\n    let rec getLabels (t : Types.type_expr) =\n      match t.desc with\n      | Tlink t1\n      | Tsubst t1\n      | Tpoly (t1, [])\n      | Tconstr (Pident {name = \"function$\"}, [t1; _], _) ->\n        getLabels t1\n      | Tarrow\n          ( Nolabel,\n            {\n              desc =\n                ( Tconstr (* Js.t *) (_, [{desc = Tobject (tObj, _)}], _)\n                | Tobject (tObj, _) );\n            },\n            _,\n            _ ) ->\n        (* JSX V3 *)\n        getFieldsV3 tObj\n      | Tconstr (p, [propsType], _) when Path.name p = \"React.component\" -> (\n        let rec getPropsType (t : Types.type_expr) =\n          match t.desc with\n          | Tlink t1 | Tsubst t1 | Tpoly (t1, []) -> getPropsType t1\n          | Tconstr (path, typeArgs, _) when Path.last path = \"props\" ->\n            Some (path, typeArgs)\n          | _ -> None\n        in\n        match propsType |> getPropsType with\n        | Some (path, typeArgs) -> getFieldsV4 ~path ~typeArgs\n        | None -> [])\n      | Tarrow (Nolabel, {desc = Tconstr (path, typeArgs, _)}, _, _)\n        when Path.last path = \"props\" ->\n        (* JSX V4 *)\n        getFieldsV4 ~path ~typeArgs\n      | Tconstr\n          ( clPath,\n            [\n              {\n                desc =\n                  ( Tconstr (* Js.t *) (_, [{desc = Tobject (tObj, _)}], _)\n                  | Tobject (tObj, _) );\n              };\n              _;\n            ],\n            _ )\n        when Path.name clPath = \"React.componentLike\" ->\n        (* JSX V3 external or interface *)\n        getFieldsV3 tObj\n      | Tconstr (clPath, [{desc = Tconstr (path, typeArgs, _)}; _], _)\n        when Path.name clPath = \"React.componentLike\"\n             && Path.last path = \"props\" ->\n        (* JSX V4 external or interface *)\n        getFieldsV4 ~path ~typeArgs\n      | Tarrow (Nolabel, typ, _, _) -> (\n        (* Component without the JSX PPX, like a make fn taking a hand-written\n           type props. *)\n        let rec digToConstr typ =\n          match typ.Types.desc with\n          | Tlink t1 | Tsubst t1 | Tpoly (t1, []) -> digToConstr t1\n          | Tconstr (path, typeArgs, _) when Path.last path = \"props\" ->\n            Some (path, typeArgs)\n          | _ -> None\n        in\n        match digToConstr typ with\n        | None -> []\n        | Some (path, typeArgs) -> getFieldsV4 ~path ~typeArgs)\n      | _ -> []\n    in\n    typ |> getLabels\n  | None -> []\n\ntype prop = {\n  name: string;\n  posStart: int * int;\n  posEnd: int * int;\n  exp: Parsetree.expression;\n}\n\ntype jsxProps = {\n  compName: Longident.t Location.loc;\n  props: prop list;\n  childrenStart: (int * int) option;\n}\n\nlet findJsxPropsCompletable ~jsxProps ~endPos ~posBeforeCursor\n    ~firstCharBeforeCursorNoWhite ~charAtCursor ~posAfterCompName =\n  let allLabels =\n    List.fold_right\n      (fun prop allLabels -> prop.name :: allLabels)\n      jsxProps.props []\n  in\n  let beforeChildrenStart =\n    match jsxProps.childrenStart with\n    | Some childrenPos -> posBeforeCursor < childrenPos\n    | None -> posBeforeCursor <= endPos\n  in\n  let rec loop props =\n    match props with\n    | prop :: rest ->\n      if prop.posStart <= posBeforeCursor && posBeforeCursor < prop.posEnd then (\n        if Debug.verbose () then\n          print_endline \"[jsx_props_completable]--> Cursor on the prop name\";\n\n        Some\n          (Completable.Cjsx\n             ( Utils.flattenLongIdent ~jsx:true jsxProps.compName.txt,\n               prop.name,\n               allLabels )))\n      else if\n        prop.posEnd <= posBeforeCursor\n        && posBeforeCursor < Loc.start prop.exp.pexp_loc\n      then (\n        if Debug.verbose () then\n          print_endline\n            \"[jsx_props_completable]--> Cursor between the prop name and expr \\\n             assigned\";\n        match (firstCharBeforeCursorNoWhite, prop.exp) with\n        | Some '=', {pexp_desc = Pexp_ident {txt = Lident txt}} ->\n          if Debug.verbose () then\n            Printf.printf\n              \"[jsx_props_completable]--> Heuristic for empty JSX prop expr \\\n               completion.\\n\";\n          Some\n            (Cexpression\n               {\n                 contextPath =\n                   CJsxPropValue\n                     {\n                       pathToComponent =\n                         Utils.flattenLongIdent ~jsx:true jsxProps.compName.txt;\n                       propName = prop.name;\n                       emptyJsxPropNameHint = Some txt;\n                     };\n                 nested = [];\n                 prefix = \"\";\n               })\n        | _ -> None)\n      else if prop.exp.pexp_loc |> Loc.hasPos ~pos:posBeforeCursor then (\n        if Debug.verbose () then\n          print_endline \"[jsx_props_completable]--> Cursor on expr assigned\";\n        match\n          CompletionExpressions.traverseExpr prop.exp ~exprPath:[]\n            ~pos:posBeforeCursor ~firstCharBeforeCursorNoWhite\n        with\n        | Some (prefix, nested) ->\n          Some\n            (Cexpression\n               {\n                 contextPath =\n                   CJsxPropValue\n                     {\n                       pathToComponent =\n                         Utils.flattenLongIdent ~jsx:true jsxProps.compName.txt;\n                       propName = prop.name;\n                       emptyJsxPropNameHint = None;\n                     };\n                 nested = List.rev nested;\n                 prefix;\n               })\n        | _ -> None)\n      else if prop.exp.pexp_loc |> Loc.end_ = (Location.none |> Loc.end_) then (\n        if Debug.verbose () then\n          print_endline \"[jsx_props_completable]--> Loc is broken\";\n        if CompletionExpressions.isExprHole prop.exp then (\n          if Debug.verbose () then\n            print_endline \"[jsx_props_completable]--> Expr was expr hole\";\n          Some\n            (Cexpression\n               {\n                 contextPath =\n                   CJsxPropValue\n                     {\n                       pathToComponent =\n                         Utils.flattenLongIdent ~jsx:true jsxProps.compName.txt;\n                       propName = prop.name;\n                       emptyJsxPropNameHint = None;\n                     };\n                 prefix = \"\";\n                 nested = [];\n               }))\n        else None)\n      else if\n        rest = [] && beforeChildrenStart && charAtCursor = '>'\n        && firstCharBeforeCursorNoWhite = Some '='\n      then (\n        (* This is a special case for: <SomeComponent someProp=> (completing directly after the '=').\n           The completion comes at the end of the component, after the equals sign, but before any\n           children starts, and '>' marks that it's at the end of the component JSX.\n           This little heuristic makes sure we pick up this special case. *)\n        if Debug.verbose () then\n          print_endline\n            \"[jsx_props_completable]--> Special case: last prop, '>' after \\\n             cursor\";\n        Some\n          (Cexpression\n             {\n               contextPath =\n                 CJsxPropValue\n                   {\n                     pathToComponent =\n                       Utils.flattenLongIdent ~jsx:true jsxProps.compName.txt;\n                     propName = prop.name;\n                     emptyJsxPropNameHint = None;\n                   };\n               prefix = \"\";\n               nested = [];\n             }))\n      else loop rest\n    | [] ->\n      let afterCompName = posBeforeCursor >= posAfterCompName in\n      if afterCompName && beforeChildrenStart then (\n        if Debug.verbose () then\n          print_endline \"[jsx_props_completable]--> Complete for JSX prop name\";\n        Some\n          (Cjsx\n             ( Utils.flattenLongIdent ~jsx:true jsxProps.compName.txt,\n               \"\",\n               allLabels )))\n      else None\n  in\n  loop jsxProps.props\n\nlet extractJsxProps ~(compName : Longident.t Location.loc) ~args =\n  let thisCaseShouldNotHappen =\n    {\n      compName = Location.mknoloc (Longident.Lident \"\");\n      props = [];\n      childrenStart = None;\n    }\n  in\n  let rec processProps ~acc args =\n    match args with\n    | (Asttypes.Labelled \"children\", {Parsetree.pexp_loc}) :: _ ->\n      {\n        compName;\n        props = List.rev acc;\n        childrenStart =\n          (if pexp_loc.loc_ghost then None else Some (Loc.start pexp_loc));\n      }\n    | ((Labelled s | Optional s), (eProp : Parsetree.expression)) :: rest -> (\n      let namedArgLoc =\n        eProp.pexp_attributes\n        |> List.find_opt (fun ({Asttypes.txt}, _) -> txt = \"res.namedArgLoc\")\n      in\n      match namedArgLoc with\n      | Some ({loc}, _) ->\n        processProps\n          ~acc:\n            ({\n               name = s;\n               posStart = Loc.start loc;\n               posEnd = Loc.end_ loc;\n               exp = eProp;\n             }\n            :: acc)\n          rest\n      | None -> processProps ~acc rest)\n    | _ -> thisCaseShouldNotHappen\n  in\n  args |> processProps ~acc:[]\n"
  },
  {
    "path": "analysis/src/CompletionPatterns.ml",
    "content": "open SharedTypes\n\nlet isPatternHole pat =\n  match pat.Parsetree.ppat_desc with\n  | Ppat_extension ({txt = \"rescript.patternhole\"}, _) -> true\n  | _ -> false\n\nlet isPatternTuple pat =\n  match pat.Parsetree.ppat_desc with\n  | Ppat_tuple _ -> true\n  | _ -> false\n\nlet rec traverseTupleItems tupleItems ~nextPatternPath ~resultFromFoundItemNum\n    ~locHasCursor ~firstCharBeforeCursorNoWhite ~posBeforeCursor =\n  let itemNum = ref (-1) in\n  let itemWithCursor =\n    tupleItems\n    |> List.find_map (fun pat ->\n           itemNum := !itemNum + 1;\n           pat\n           |> traversePattern ~patternPath:(nextPatternPath !itemNum)\n                ~locHasCursor ~firstCharBeforeCursorNoWhite ~posBeforeCursor)\n  in\n  match (itemWithCursor, firstCharBeforeCursorNoWhite) with\n  | None, Some ',' ->\n    (* No tuple item has the cursor, but there's a comma before the cursor.\n       Figure out what arg we're trying to complete. Example: (true, <com>, None) *)\n    let posNum = ref (-1) in\n    tupleItems\n    |> List.iteri (fun index pat ->\n           if posBeforeCursor >= Loc.start pat.Parsetree.ppat_loc then\n             posNum := index);\n    if !posNum > -1 then Some (\"\", resultFromFoundItemNum !posNum) else None\n  | v, _ -> v\n\nand traversePattern (pat : Parsetree.pattern) ~patternPath ~locHasCursor\n    ~firstCharBeforeCursorNoWhite ~posBeforeCursor =\n  let someIfHasCursor v debugId =\n    if locHasCursor pat.Parsetree.ppat_loc then (\n      if Debug.verbose () then\n        Printf.printf\n          \"[traversePattern:someIfHasCursor] '%s' has cursor, returning \\n\"\n          debugId;\n      Some v)\n    else None\n  in\n  match pat.ppat_desc with\n  | Ppat_constant _ | Ppat_interval _ -> None\n  | Ppat_lazy p\n  | Ppat_constraint (p, _)\n  | Ppat_alias (p, _)\n  | Ppat_exception p\n  | Ppat_open (_, p) ->\n    p\n    |> traversePattern ~patternPath ~locHasCursor ~firstCharBeforeCursorNoWhite\n         ~posBeforeCursor\n  | Ppat_or (p1, p2) -> (\n    let orPatWithItem =\n      [p1; p2]\n      |> List.find_map (fun p ->\n             p\n             |> traversePattern ~patternPath ~locHasCursor\n                  ~firstCharBeforeCursorNoWhite ~posBeforeCursor)\n    in\n    match orPatWithItem with\n    | None when isPatternHole p1 || isPatternHole p2 ->\n      if Debug.verbose () then\n        Printf.printf\n          \"[traversePattern] found or-pattern that was pattern hole\\n\";\n      Some (\"\", patternPath)\n    | v -> v)\n  | Ppat_any ->\n    (* We treat any `_` as an empty completion. This is mainly because we're\n       inserting `_` in snippets and automatically put the cursor there. So\n       letting it trigger an empty completion improves the ergonomics by a\n       lot. *)\n    someIfHasCursor (\"\", patternPath) \"Ppat_any\"\n  | Ppat_var {txt} -> someIfHasCursor (txt, patternPath) \"Ppat_var\"\n  | Ppat_construct ({txt = Lident \"()\"}, None) ->\n    (* switch s { | (<com>) }*)\n    someIfHasCursor\n      (\"\", patternPath @ [Completable.NTupleItem {itemNum = 0}])\n      \"Ppat_construct()\"\n  | Ppat_construct ({txt = Lident prefix}, None) ->\n    someIfHasCursor (prefix, patternPath) \"Ppat_construct(Lident)\"\n  | Ppat_variant (prefix, None) ->\n    someIfHasCursor (\"#\" ^ prefix, patternPath) \"Ppat_variant\"\n  | Ppat_array arrayPatterns ->\n    let nextPatternPath = [Completable.NArray] @ patternPath in\n    if List.length arrayPatterns = 0 && locHasCursor pat.ppat_loc then\n      Some (\"\", nextPatternPath)\n    else\n      arrayPatterns\n      |> List.find_map (fun pat ->\n             pat\n             |> traversePattern ~patternPath:nextPatternPath ~locHasCursor\n                  ~firstCharBeforeCursorNoWhite ~posBeforeCursor)\n  | Ppat_tuple tupleItems when locHasCursor pat.ppat_loc ->\n    tupleItems\n    |> traverseTupleItems ~firstCharBeforeCursorNoWhite ~posBeforeCursor\n         ~locHasCursor\n         ~nextPatternPath:(fun itemNum ->\n           [Completable.NTupleItem {itemNum}] @ patternPath)\n         ~resultFromFoundItemNum:(fun itemNum ->\n           [Completable.NTupleItem {itemNum = itemNum + 1}] @ patternPath)\n  | Ppat_record ([], _) ->\n    (* Empty fields means we're in a record body `{}`. Complete for the fields. *)\n    someIfHasCursor\n      (\"\", [Completable.NRecordBody {seenFields = []}] @ patternPath)\n      \"Ppat_record(empty)\"\n  | Ppat_record (fields, _) -> (\n    let fieldWithCursor = ref None in\n    let fieldWithPatHole = ref None in\n    fields\n    |> List.iter (fun (fname, f) ->\n           match\n             ( fname.Location.txt,\n               f.Parsetree.ppat_loc\n               |> CursorPosition.classifyLoc ~pos:posBeforeCursor )\n           with\n           | Longident.Lident fname, HasCursor ->\n             fieldWithCursor := Some (fname, f)\n           | Lident fname, _ when isPatternHole f ->\n             fieldWithPatHole := Some (fname, f)\n           | _ -> ());\n    let seenFields =\n      fields\n      |> List.filter_map (fun (fieldName, _f) ->\n             match fieldName with\n             | {Location.txt = Longident.Lident fieldName} -> Some fieldName\n             | _ -> None)\n    in\n    match (!fieldWithCursor, !fieldWithPatHole) with\n    | Some (fname, f), _ | None, Some (fname, f) -> (\n      match f.ppat_desc with\n      | Ppat_extension ({txt = \"rescript.patternhole\"}, _) ->\n        (* A pattern hole means for example `{someField: <com>}`. We want to complete for the type of `someField`.  *)\n        someIfHasCursor\n          ( \"\",\n            [Completable.NFollowRecordField {fieldName = fname}] @ patternPath\n          )\n          \"patternhole\"\n      | Ppat_var {txt} ->\n        (* A var means `{s}` or similar. Complete for fields. *)\n        someIfHasCursor\n          (txt, [Completable.NRecordBody {seenFields}] @ patternPath)\n          \"Ppat_var #2\"\n      | _ ->\n        f\n        |> traversePattern\n             ~patternPath:\n               ([Completable.NFollowRecordField {fieldName = fname}]\n               @ patternPath)\n             ~locHasCursor ~firstCharBeforeCursorNoWhite ~posBeforeCursor)\n    | None, None -> (\n      (* Figure out if we're completing for a new field.\n         If the cursor is inside of the record body, but no field has the cursor,\n         and there's no pattern hole. Check the first char to the left of the cursor,\n         ignoring white space. If that's a comma, we assume you're completing for a new field. *)\n      match firstCharBeforeCursorNoWhite with\n      | Some ',' ->\n        someIfHasCursor\n          (\"\", [Completable.NRecordBody {seenFields}] @ patternPath)\n          \"firstCharBeforeCursorNoWhite:,\"\n      | _ -> None))\n  | Ppat_construct\n      ( {txt},\n        Some {ppat_loc; ppat_desc = Ppat_construct ({txt = Lident \"()\"}, _)} )\n    when locHasCursor ppat_loc ->\n    (* Empty payload with cursor, like: Test(<com>) *)\n    Some\n      ( \"\",\n        [\n          Completable.NVariantPayload\n            {constructorName = Utils.getUnqualifiedName txt; itemNum = 0};\n        ]\n        @ patternPath )\n  | Ppat_construct ({txt}, Some pat)\n    when posBeforeCursor >= (pat.ppat_loc |> Loc.end_)\n         && firstCharBeforeCursorNoWhite = Some ','\n         && isPatternTuple pat = false ->\n    (* Empty payload with trailing ',', like: Test(true, <com>) *)\n    Some\n      ( \"\",\n        [\n          Completable.NVariantPayload\n            {constructorName = Utils.getUnqualifiedName txt; itemNum = 1};\n        ]\n        @ patternPath )\n  | Ppat_construct ({txt}, Some {ppat_loc; ppat_desc = Ppat_tuple tupleItems})\n    when locHasCursor ppat_loc ->\n    tupleItems\n    |> traverseTupleItems ~locHasCursor ~firstCharBeforeCursorNoWhite\n         ~posBeforeCursor\n         ~nextPatternPath:(fun itemNum ->\n           [\n             Completable.NVariantPayload\n               {constructorName = Utils.getUnqualifiedName txt; itemNum};\n           ]\n           @ patternPath)\n         ~resultFromFoundItemNum:(fun itemNum ->\n           [\n             Completable.NVariantPayload\n               {\n                 constructorName = Utils.getUnqualifiedName txt;\n                 itemNum = itemNum + 1;\n               };\n           ]\n           @ patternPath)\n  | Ppat_construct ({txt}, Some p) when locHasCursor pat.ppat_loc ->\n    p\n    |> traversePattern ~locHasCursor ~firstCharBeforeCursorNoWhite\n         ~posBeforeCursor\n         ~patternPath:\n           ([\n              Completable.NVariantPayload\n                {constructorName = Utils.getUnqualifiedName txt; itemNum = 0};\n            ]\n           @ patternPath)\n  | Ppat_variant\n      (txt, Some {ppat_loc; ppat_desc = Ppat_construct ({txt = Lident \"()\"}, _)})\n    when locHasCursor ppat_loc ->\n    (* Empty payload with cursor, like: #test(<com>) *)\n    Some\n      ( \"\",\n        [Completable.NPolyvariantPayload {constructorName = txt; itemNum = 0}]\n        @ patternPath )\n  | Ppat_variant (txt, Some pat)\n    when posBeforeCursor >= (pat.ppat_loc |> Loc.end_)\n         && firstCharBeforeCursorNoWhite = Some ','\n         && isPatternTuple pat = false ->\n    (* Empty payload with trailing ',', like: #test(true, <com>) *)\n    Some\n      ( \"\",\n        [Completable.NPolyvariantPayload {constructorName = txt; itemNum = 1}]\n        @ patternPath )\n  | Ppat_variant (txt, Some {ppat_loc; ppat_desc = Ppat_tuple tupleItems})\n    when locHasCursor ppat_loc ->\n    tupleItems\n    |> traverseTupleItems ~locHasCursor ~firstCharBeforeCursorNoWhite\n         ~posBeforeCursor\n         ~nextPatternPath:(fun itemNum ->\n           [Completable.NPolyvariantPayload {constructorName = txt; itemNum}]\n           @ patternPath)\n         ~resultFromFoundItemNum:(fun itemNum ->\n           [\n             Completable.NPolyvariantPayload\n               {constructorName = txt; itemNum = itemNum + 1};\n           ]\n           @ patternPath)\n  | Ppat_variant (txt, Some p) when locHasCursor pat.ppat_loc ->\n    p\n    |> traversePattern ~locHasCursor ~firstCharBeforeCursorNoWhite\n         ~posBeforeCursor\n         ~patternPath:\n           ([\n              Completable.NPolyvariantPayload\n                {constructorName = txt; itemNum = 0};\n            ]\n           @ patternPath)\n  | _ -> None\n"
  },
  {
    "path": "analysis/src/Completions.ml",
    "content": "let getCompletions ~debug ~path ~pos ~currentFile ~forHover =\n  let textOpt = Files.readFile currentFile in\n  match textOpt with\n  | None | Some \"\" -> None\n  | Some text -> (\n    match\n      CompletionFrontEnd.completionWithParser ~debug ~path ~posCursor:pos\n        ~currentFile ~text\n    with\n    | None -> None\n    | Some (completable, scope) -> (\n      (* Only perform expensive ast operations if there are completables *)\n      match Cmt.loadFullCmtFromPath ~path with\n      | None -> None\n      | Some full ->\n        let env = SharedTypes.QueryEnv.fromFile full.file in\n        let completables =\n          completable\n          |> CompletionBackEnd.processCompletable ~debug ~full ~pos ~scope ~env\n               ~forHover\n        in\n        Some (completables, full, scope)))\n"
  },
  {
    "path": "analysis/src/CreateInterface.ml",
    "content": "module SourceFileExtractor = struct\n  let create ~path =\n    match Files.readFile path with\n    | None -> [||]\n    | Some text -> text |> String.split_on_char '\\n' |> Array.of_list\n\n  let extract lines ~posStart ~posEnd =\n    let lineStart, colStart = posStart in\n    let lineEnd, colEnd = posEnd in\n    let res = ref [] in\n    if lineStart < 0 || lineStart > lineEnd || lineEnd >= Array.length lines\n    then []\n    else (\n      for n = lineEnd downto lineStart do\n        let line = lines.(n) in\n        let len = String.length line in\n        if n = lineStart && n = lineEnd then (\n          if colStart >= 0 && colStart < colEnd && colEnd <= len then\n            let indent = String.make colStart ' ' in\n            res :=\n              (indent ^ String.sub line colStart (colEnd - colStart)) :: !res)\n        else if n = lineStart then (\n          if colStart >= 0 && colStart < len then\n            let indent = String.make colStart ' ' in\n            res := (indent ^ String.sub line colStart (len - colStart)) :: !res)\n        else if n = lineEnd then (\n          if colEnd > 0 && colEnd <= len then\n            res := String.sub line 0 colEnd :: !res)\n        else res := line :: !res\n      done;\n      !res)\nend\n\nmodule AttributesUtils : sig\n  type t\n\n  val make : string list -> t\n\n  val contains : string -> t -> bool\n\n  val toString : t -> string\nend = struct\n  type attribute = {line: int; offset: int; name: string}\n  type t = attribute list\n  type parseState = Search | Collect of int\n\n  let make lines =\n    let makeAttr lineIdx attrOffsetStart attrOffsetEnd line =\n      {\n        line = lineIdx;\n        offset = attrOffsetStart;\n        name = String.sub line attrOffsetStart (attrOffsetEnd - attrOffsetStart);\n      }\n    in\n    let res = ref [] in\n    lines\n    |> List.iteri (fun lineIdx line ->\n           let state = ref Search in\n           for i = 0 to String.length line - 1 do\n             let ch = line.[i] in\n             match (!state, ch) with\n             | Search, '@' -> state := Collect i\n             | Collect attrOffset, ' ' ->\n               res := makeAttr lineIdx attrOffset i line :: !res;\n               state := Search\n             | Search, _ | Collect _, _ -> ()\n           done;\n\n           match !state with\n           | Collect attrOffset ->\n             res :=\n               makeAttr lineIdx attrOffset (String.length line) line :: !res\n           | _ -> ());\n    !res |> List.rev\n\n  let contains attributeForSearch t =\n    t |> List.exists (fun {name} -> name = attributeForSearch)\n\n  let toString t =\n    match t with\n    | [] -> \"\"\n    | {line} :: _ ->\n      let prevLine = ref line in\n      let buffer = ref \"\" in\n      let res = ref [] in\n      t\n      |> List.iter (fun attr ->\n             let {line; offset; name} = attr in\n\n             if line <> !prevLine then (\n               res := !buffer :: !res;\n               buffer := \"\";\n               prevLine := line);\n\n             let indent = String.make (offset - String.length !buffer) ' ' in\n             buffer := !buffer ^ indent ^ name);\n      res := !buffer :: !res;\n      !res |> List.rev |> String.concat \"\\n\"\nend\n\nlet printSignature ~extractor ~signature =\n  let objectPropsToFun objTyp ~rhs ~makePropsType =\n    let propsTbl = Hashtbl.create 1 in\n    (* Process the object type of the make function, and map field names to types. *)\n    let rec processObjType typ =\n      match typ.Types.desc with\n      | Tfield (name, kind, {desc = Tlink t | Tsubst t | Tpoly (t, [])}, obj) ->\n        processObjType {typ with desc = Tfield (name, kind, t, obj)}\n      | Tfield (name, _kind, t, obj) ->\n        Hashtbl.add propsTbl name t;\n        processObjType obj\n      | Tnil -> ()\n      | _ -> (* should not happen *) assert false\n    in\n\n    processObjType objTyp;\n\n    (* Traverse the type of the makeProps function, and fill the prop types\n       by using the corresponding field in the object type of the make function *)\n    let rec fillPropsTypes makePropsType ~rhs =\n      match makePropsType.Types.desc with\n      | Tarrow (((Labelled lbl | Optional lbl) as argLbl), _, retT, c) -> (\n        match Hashtbl.find_opt propsTbl lbl with\n        | Some propT ->\n          {\n            makePropsType with\n            desc = Tarrow (argLbl, propT, fillPropsTypes retT ~rhs, c);\n          }\n        | None -> fillPropsTypes retT ~rhs)\n      | _ -> rhs\n    in\n\n    match objTyp.Types.desc with\n    | Tnil ->\n      (* component with zero props *)\n      {\n        objTyp with\n        desc =\n          Tarrow\n            ( Nolabel,\n              Ctype.newconstr (Path.Pident (Ident.create \"unit\")) [],\n              rhs,\n              Cok );\n      }\n    | _ -> fillPropsTypes makePropsType ~rhs\n  in\n\n  Printtyp.reset_names ();\n  let sigItemToString (item : Outcometree.out_sig_item) =\n    item |> Res_outcome_printer.print_out_sig_item_doc\n    |> Res_doc.to_string ~width:!Res_cli.ResClflags.width\n  in\n\n  let genSigStrForInlineAttr lines attributes id vd =\n    let divider = if List.length lines > 1 then \"\\n\" else \" \" in\n\n    let sigStr =\n      sigItemToString\n        (Printtyp.tree_of_value_description id {vd with val_kind = Val_reg})\n    in\n\n    (attributes |> AttributesUtils.toString) ^ divider ^ sigStr ^ \"\\n\"\n  in\n\n  let buf = Buffer.create 10 in\n\n  let rec getComponentTypeV3 (typ : Types.type_expr) =\n    let reactElement =\n      Ctype.newconstr (Pdot (Pident (Ident.create \"React\"), \"element\", 0)) []\n    in\n    match typ.desc with\n    | Tconstr (Pident {name = \"function$\"}, [typ; _], _) ->\n      getComponentTypeV3 typ\n    | Tarrow (_, {desc = Tobject (tObj, _)}, retType, _) -> Some (tObj, retType)\n    | Tconstr\n        ( Pdot (Pident {name = \"React\"}, \"component\", _),\n          [{desc = Tobject (tObj, _)}],\n          _ ) ->\n      Some (tObj, reactElement)\n    | Tconstr\n        ( Pdot (Pident {name = \"React\"}, \"componentLike\", _),\n          [{desc = Tobject (tObj, _)}; retType],\n          _ ) ->\n      Some (tObj, retType)\n    | _ -> None\n  in\n\n  let rec getComponentTypeV4 (typ : Types.type_expr) =\n    let reactElement =\n      Ctype.newconstr (Pdot (Pident (Ident.create \"React\"), \"element\", 0)) []\n    in\n    match typ.desc with\n    | Tconstr (Pident {name = \"function$\"}, [typ; _], _) ->\n      getComponentTypeV4 typ\n    | Tarrow (_, {desc = Tconstr (Path.Pident propsId, typeArgs, _)}, retType, _)\n      when Ident.name propsId = \"props\" ->\n      Some (typeArgs, retType)\n    | Tconstr\n        ( Pdot (Pident {name = \"React\"}, \"component\", _),\n          [{desc = Tconstr (Path.Pident propsId, typeArgs, _)}],\n          _ )\n      when Ident.name propsId = \"props\" ->\n      Some (typeArgs, reactElement)\n    | Tconstr\n        ( Pdot (Pident {name = \"React\"}, \"componentLike\", _),\n          [{desc = Tconstr (Path.Pident propsId, typeArgs, _)}; retType],\n          _ )\n      when Ident.name propsId = \"props\" ->\n      Some (typeArgs, retType)\n    | _ -> None\n  in\n\n  let rec processSignature ~indent (signature : Types.signature) : unit =\n    match signature with\n    | Sig_value\n        ( makePropsId (* makeProps *),\n          {val_loc = makePropsLoc; val_type = makePropsType} )\n      :: Sig_value (makeId (* make *), makeValueDesc)\n      :: rest\n      when Ident.name makePropsId = Ident.name makeId ^ \"Props\"\n           && ((* from implementation *) makePropsLoc.loc_ghost\n              ||\n              (* from interface *)\n              makePropsLoc = makeValueDesc.val_loc)\n           && getComponentTypeV3 makeValueDesc.val_type <> None ->\n      (*\n        {\"name\": string} => retType  ~~>  (~name:string) => retType\n        React.component<{\"name\": string}>  ~~>  (~name:string) => React.element\n        React.componentLike<{\"name\": string}, retType>  ~~>  (~name:string) => retType\n      *)\n      let tObj, retType =\n        match getComponentTypeV3 makeValueDesc.val_type with\n        | None -> assert false\n        | Some (tObj, retType) -> (tObj, retType)\n      in\n      let funType = tObj |> objectPropsToFun ~rhs:retType ~makePropsType in\n      let newItemStr =\n        sigItemToString\n          (Printtyp.tree_of_value_description makeId\n             {makeValueDesc with val_type = funType})\n      in\n      Buffer.add_string buf (indent ^ \"@react.component\\n\");\n      Buffer.add_string buf (indent ^ newItemStr ^ \"\\n\");\n      processSignature ~indent rest\n    | Sig_type\n        ( propsId,\n          {\n            type_params;\n            type_kind = Type_record (labelDecls, recordRepresentation);\n          },\n          _ )\n      :: Sig_value (makeId (* make *), makeValueDesc)\n      :: rest\n      when Ident.name propsId = \"props\"\n           && getComponentTypeV4 makeValueDesc.val_type <> None ->\n      (* PPX V4 component declaration:\n         type props = {...}\n         let v = ...\n      *)\n      let newItemStr =\n        let typeArgs, retType =\n          match getComponentTypeV4 makeValueDesc.val_type with\n          | Some x -> x\n          | None -> assert false\n        in\n        let rec mkFunType (labelDecls : Types.label_declaration list) =\n          match labelDecls with\n          | [] -> retType\n          | labelDecl :: rest ->\n            let propType =\n              TypeUtils.instantiateType ~typeParams:type_params ~typeArgs\n                labelDecl.ld_type\n            in\n            let lblName = labelDecl.ld_id |> Ident.name in\n            let lbl =\n              let optLbls =\n                match recordRepresentation with\n                | Record_optional_labels optLbls -> optLbls\n                | _ -> []\n              in\n              if List.mem lblName optLbls then Asttypes.Optional lblName\n              else Labelled lblName\n            in\n            {retType with desc = Tarrow (lbl, propType, mkFunType rest, Cok)}\n        in\n        let funType =\n          if List.length labelDecls = 0 (* No props *) then\n            let tUnit =\n              Ctype.newconstr (Path.Pident (Ident.create \"unit\")) []\n            in\n            {retType with desc = Tarrow (Nolabel, tUnit, retType, Cok)}\n          else mkFunType labelDecls\n        in\n        sigItemToString\n          (Printtyp.tree_of_value_description makeId\n             {makeValueDesc with val_type = funType})\n      in\n      Buffer.add_string buf (indent ^ \"@react.component\\n\");\n      Buffer.add_string buf (indent ^ newItemStr ^ \"\\n\");\n      processSignature ~indent rest\n    | Sig_module (id, modDecl, recStatus) :: rest ->\n      let colonOrEquals =\n        match modDecl.md_type with\n        | Mty_alias _ -> \" = \"\n        | _ -> \": \"\n      in\n      Buffer.add_string buf\n        (indent\n        ^ (match recStatus with\n          | Trec_not -> \"module \"\n          | Trec_first -> \"module rec \"\n          | Trec_next -> \"and \")\n        ^ Ident.name id ^ colonOrEquals);\n      processModuleType ~indent modDecl.md_type;\n      Buffer.add_string buf \"\\n\";\n      processSignature ~indent rest\n    | Sig_modtype (id, mtd) :: rest ->\n      let () =\n        match mtd.mtd_type with\n        | None ->\n          Buffer.add_string buf (indent ^ \"module type \" ^ Ident.name id ^ \"\\n\")\n        | Some mt ->\n          Buffer.add_string buf (indent ^ \"module type \" ^ Ident.name id ^ \" = \");\n          processModuleType ~indent mt;\n          Buffer.add_string buf \"\\n\"\n      in\n      processSignature ~indent rest\n    | Sig_value (id, ({val_kind = Val_prim prim; val_loc} as vd)) :: items\n      when prim.prim_native_name <> \"\" && prim.prim_native_name.[0] = '\\132' ->\n      (* Rescript primitive name, e.g. @val external ... *)\n      let lines =\n        let posStart, posEnd = Loc.range val_loc in\n        extractor |> SourceFileExtractor.extract ~posStart ~posEnd\n      in\n      let attributes = AttributesUtils.make lines in\n\n      if AttributesUtils.contains \"@inline\" attributes then\n        (* Generate type signature for @inline declaration *)\n        Buffer.add_string buf (genSigStrForInlineAttr lines attributes id vd)\n      else\n        (* Copy the external declaration verbatim from the implementation file *)\n        Buffer.add_string buf ((lines |> String.concat \"\\n\") ^ \"\\n\");\n\n      processSignature ~indent items\n    | Sig_value (id, vd) :: items ->\n      let newItemStr =\n        sigItemToString (Printtyp.tree_of_value_description id vd)\n      in\n      Buffer.add_string buf (indent ^ newItemStr ^ \"\\n\");\n      processSignature ~indent items\n    | Sig_type (id, typeDecl, resStatus) :: items ->\n      let newItemStr =\n        sigItemToString\n          (Printtyp.tree_of_type_declaration id typeDecl resStatus)\n      in\n      Buffer.add_string buf (indent ^ newItemStr ^ \"\\n\");\n      processSignature ~indent items\n    | Sig_typext (id, extConstr, extStatus) :: items ->\n      let newItemStr =\n        sigItemToString\n          (Printtyp.tree_of_extension_constructor id extConstr extStatus)\n      in\n      Buffer.add_string buf (indent ^ newItemStr ^ \"\\n\");\n      processSignature ~indent items\n    | Sig_class _ :: items ->\n      (* not needed *)\n      processSignature ~indent items\n    | Sig_class_type _ :: items ->\n      (* not needed *)\n      processSignature ~indent items\n    | [] -> ()\n  and processModuleType ~indent (mt : Types.module_type) =\n    match mt with\n    | Mty_signature signature ->\n      Buffer.add_string buf \"{\\n\";\n      processSignature ~indent:(indent ^ \"  \") signature;\n      Buffer.add_string buf (indent ^ \"}\")\n    | Mty_functor _ ->\n      let rec collectFunctorArgs ~args (mt : Types.module_type) =\n        match mt with\n        | Mty_functor (id, None, mt) when Ident.name id = \"*\" ->\n          (* AST encoding of functor with no arguments *)\n          collectFunctorArgs ~args mt\n        | Mty_functor (id, mto, mt) ->\n          collectFunctorArgs ~args:((id, mto) :: args) mt\n        | mt -> (List.rev args, mt)\n      in\n      let args, retMt = collectFunctorArgs ~args:[] mt in\n      Buffer.add_string buf \"(\";\n      args\n      |> List.iter (fun (id, mto) ->\n             Buffer.add_string buf (\"\\n\" ^ indent ^ \"  \");\n             (match mto with\n             | None -> Buffer.add_string buf (Ident.name id)\n             | Some mt ->\n               Buffer.add_string buf (Ident.name id ^ \": \");\n               processModuleType ~indent:(indent ^ \"  \") mt);\n             Buffer.add_string buf \",\");\n      if args <> [] then Buffer.add_string buf (\"\\n\" ^ indent);\n      Buffer.add_string buf (\") =>\\n\" ^ indent);\n      processModuleType ~indent retMt\n    | Mty_ident path | Mty_alias (_, path) ->\n      let rec outIdentToString (ident : Outcometree.out_ident) =\n        match ident with\n        | Oide_ident s -> s\n        | Oide_dot (ident, s) -> outIdentToString ident ^ \".\" ^ s\n        | Oide_apply (call, arg) ->\n          outIdentToString call ^ \"(\" ^ outIdentToString arg ^ \")\"\n      in\n      Buffer.add_string buf (outIdentToString (Printtyp.tree_of_path path))\n  in\n\n  processSignature ~indent:\"\" signature;\n  Buffer.contents buf\n\nlet command ~path ~cmiFile =\n  match Shared.tryReadCmi cmiFile with\n  | Some cmi_info ->\n    (* For reading the config *)\n    let _ = Cmt.loadFullCmtFromPath ~path in\n    let extractor = SourceFileExtractor.create ~path in\n    printSignature ~extractor ~signature:cmi_info.cmi_sign\n  | None -> \"\"\n"
  },
  {
    "path": "analysis/src/DceCommand.ml",
    "content": "let command () =\n  Reanalyze.RunConfig.dce ();\n  Reanalyze.runAnalysis ~cmtRoot:None;\n  let issues = !Reanalyze.Log_.Stats.issues in\n  Printf.printf \"issues:%d\\n\" (List.length issues)\n"
  },
  {
    "path": "analysis/src/Debug.ml",
    "content": "type debugLevel = Off | Regular | Verbose\n\nlet debugLevel = ref Off\n\nlet log s =\n  match !debugLevel with\n  | Regular | Verbose -> print_endline s\n  | Off -> ()\n\nlet debugPrintEnv (env : SharedTypes.QueryEnv.t) =\n  env.pathRev @ [env.file.moduleName] |> List.rev |> String.concat \".\"\n\nlet verbose () = !debugLevel = Verbose\n"
  },
  {
    "path": "analysis/src/Diagnostics.ml",
    "content": "let document_syntax ~path =\n  let get_diagnostics diagnostics =\n    diagnostics\n    |> List.map (fun diagnostic ->\n           let _, startline, startcol =\n             Location.get_pos_info (Res_diagnostics.get_start_pos diagnostic)\n           in\n           let _, endline, endcol =\n             Location.get_pos_info (Res_diagnostics.get_end_pos diagnostic)\n           in\n           Protocol.stringifyDiagnostic\n             {\n               range =\n                 {\n                   start = {line = startline - 1; character = startcol};\n                   end_ = {line = endline - 1; character = endcol};\n                 };\n               message = Res_diagnostics.explain diagnostic;\n               severity = 1;\n             })\n  in\n  if FindFiles.isImplementation path then\n    let parseImplementation =\n      Res_driver.parsing_engine.parse_implementation ~for_printer:false\n        ~filename:path\n    in\n    get_diagnostics parseImplementation.diagnostics\n  else if FindFiles.isInterface path then\n    let parseInterface =\n      Res_driver.parsing_engine.parse_interface ~for_printer:false\n        ~filename:path\n    in\n    get_diagnostics parseInterface.diagnostics\n  else []\n"
  },
  {
    "path": "analysis/src/DocumentSymbol.ml",
    "content": "(* https://microsoft.github.io/language-server-protocol/specifications/specification-current/#textDocument_documentSymbol *)\n\ntype kind =\n  | Module\n  | Property\n  | Constructor\n  | Function\n  | Variable\n  | Constant\n  | String\n  | Number\n  | EnumMember\n  | TypeParameter\n\nlet kindNumber = function\n  | Module -> 2\n  | Property -> 7\n  | Constructor -> 9\n  | Function -> 12\n  | Variable -> 13\n  | Constant -> 14\n  | String -> 15\n  | Number -> 16\n  | EnumMember -> 22\n  | TypeParameter -> 26\n\nlet command ~path =\n  let symbols = ref [] in\n  let addSymbol name loc kind =\n    if\n      (not loc.Location.loc_ghost)\n      && loc.loc_start.pos_cnum >= 0\n      && loc.loc_end.pos_cnum >= 0\n    then\n      let range = Utils.cmtLocToRange loc in\n      let symbol : Protocol.documentSymbolItem =\n        {name; range; kind = kindNumber kind; children = []}\n      in\n      symbols := symbol :: !symbols\n  in\n  let rec exprKind (exp : Parsetree.expression) =\n    match exp.pexp_desc with\n    | Pexp_fun _ -> Function\n    | Pexp_function _ -> Function\n    | Pexp_constraint (e, _) -> exprKind e\n    | Pexp_constant (Pconst_string _) -> String\n    | Pexp_constant (Pconst_float _ | Pconst_integer _) -> Number\n    | Pexp_constant _ -> Constant\n    | _ -> Variable\n  in\n  let processTypeKind (tk : Parsetree.type_kind) =\n    match tk with\n    | Ptype_variant constrDecls ->\n      constrDecls\n      |> List.iter (fun (cd : Parsetree.constructor_declaration) ->\n             addSymbol cd.pcd_name.txt cd.pcd_loc EnumMember)\n    | Ptype_record labelDecls ->\n      labelDecls\n      |> List.iter (fun (ld : Parsetree.label_declaration) ->\n             addSymbol ld.pld_name.txt ld.pld_loc Property)\n    | _ -> ()\n  in\n  let processTypeDeclaration (td : Parsetree.type_declaration) =\n    addSymbol td.ptype_name.txt td.ptype_loc TypeParameter;\n    processTypeKind td.ptype_kind\n  in\n  let processValueDescription (vd : Parsetree.value_description) =\n    addSymbol vd.pval_name.txt vd.pval_loc Variable\n  in\n  let processModuleBinding (mb : Parsetree.module_binding) =\n    addSymbol mb.pmb_name.txt mb.pmb_loc Module\n  in\n  let processModuleDeclaration (md : Parsetree.module_declaration) =\n    addSymbol md.pmd_name.txt md.pmd_loc Module\n  in\n  let processExtensionConstructor (et : Parsetree.extension_constructor) =\n    addSymbol et.pext_name.txt et.pext_loc Constructor\n  in\n  let value_binding (iterator : Ast_iterator.iterator)\n      (vb : Parsetree.value_binding) =\n    (match vb.pvb_pat.ppat_desc with\n    | Ppat_var {txt} | Ppat_constraint ({ppat_desc = Ppat_var {txt}}, _) ->\n      addSymbol txt vb.pvb_loc (exprKind vb.pvb_expr)\n    | _ -> ());\n    Ast_iterator.default_iterator.value_binding iterator vb\n  in\n  let expr (iterator : Ast_iterator.iterator) (e : Parsetree.expression) =\n    (match e.pexp_desc with\n    | Pexp_letmodule ({txt}, modExpr, _) ->\n      addSymbol txt {e.pexp_loc with loc_end = modExpr.pmod_loc.loc_end} Module\n    | Pexp_letexception (ec, _) -> processExtensionConstructor ec\n    | _ -> ());\n    Ast_iterator.default_iterator.expr iterator e\n  in\n  let structure_item (iterator : Ast_iterator.iterator)\n      (item : Parsetree.structure_item) =\n    (match item.pstr_desc with\n    | Pstr_value _ -> ()\n    | Pstr_primitive vd -> processValueDescription vd\n    | Pstr_type (_, typDecls) -> typDecls |> List.iter processTypeDeclaration\n    | Pstr_module mb -> processModuleBinding mb\n    | Pstr_recmodule mbs -> mbs |> List.iter processModuleBinding\n    | Pstr_exception ec -> processExtensionConstructor ec\n    | _ -> ());\n    Ast_iterator.default_iterator.structure_item iterator item\n  in\n  let signature_item (iterator : Ast_iterator.iterator)\n      (item : Parsetree.signature_item) =\n    (match item.psig_desc with\n    | Psig_value vd -> processValueDescription vd\n    | Psig_type (_, typDecls) -> typDecls |> List.iter processTypeDeclaration\n    | Psig_module md -> processModuleDeclaration md\n    | Psig_recmodule mds -> mds |> List.iter processModuleDeclaration\n    | Psig_exception ec -> processExtensionConstructor ec\n    | _ -> ());\n    Ast_iterator.default_iterator.signature_item iterator item\n  in\n  let module_expr (iterator : Ast_iterator.iterator)\n      (me : Parsetree.module_expr) =\n    match me.pmod_desc with\n    | Pmod_constraint (modExpr, _modTyp) ->\n      (* Don't double-list items in implementation and interface *)\n      Ast_iterator.default_iterator.module_expr iterator modExpr\n    | _ -> Ast_iterator.default_iterator.module_expr iterator me\n  in\n  let iterator =\n    {\n      Ast_iterator.default_iterator with\n      expr;\n      module_expr;\n      signature_item;\n      structure_item;\n      value_binding;\n    }\n  in\n\n  (if Filename.check_suffix path \".res\" then\n     let parser =\n       Res_driver.parsing_engine.parse_implementation ~for_printer:false\n     in\n     let {Res_driver.parsetree = structure} = parser ~filename:path in\n     iterator.structure iterator structure |> ignore\n   else\n     let parser =\n       Res_driver.parsing_engine.parse_interface ~for_printer:false\n     in\n     let {Res_driver.parsetree = signature} = parser ~filename:path in\n     iterator.signature iterator signature |> ignore);\n  let isInside\n      ({\n         range =\n           {\n             start = {line = sl1; character = sc1};\n             end_ = {line = el1; character = ec1};\n           };\n       } :\n        Protocol.documentSymbolItem)\n      ({\n         range =\n           {\n             start = {line = sl2; character = sc2};\n             end_ = {line = el2; character = ec2};\n           };\n       } :\n        Protocol.documentSymbolItem) =\n    (sl1 > sl2 || (sl1 = sl2 && sc1 >= sc2))\n    && (el1 < el2 || (el1 = el2 && ec1 <= ec2))\n  in\n  let compareSymbol (s1 : Protocol.documentSymbolItem)\n      (s2 : Protocol.documentSymbolItem) =\n    let n = compare s1.range.start.line s2.range.start.line in\n    if n <> 0 then n\n    else\n      let n = compare s1.range.start.character s2.range.start.character in\n      if n <> 0 then n\n      else\n        let n = compare s1.range.end_.line s2.range.end_.line in\n        if n <> 0 then n\n        else compare s1.range.end_.character s2.range.end_.character\n  in\n  let rec addSymbolToChildren ~symbol children =\n    match children with\n    | [] -> [symbol]\n    | last :: rest ->\n      if isInside symbol last then\n        let newLast =\n          {last with children = last.children |> addSymbolToChildren ~symbol}\n        in\n        newLast :: rest\n      else symbol :: children\n  in\n  let rec addSortedSymbolsToChildren ~sortedSymbols children =\n    match sortedSymbols with\n    | [] -> children\n    | firstSymbol :: rest ->\n      children\n      |> addSymbolToChildren ~symbol:firstSymbol\n      |> addSortedSymbolsToChildren ~sortedSymbols:rest\n  in\n  let sortedSymbols = !symbols |> List.sort compareSymbol in\n  let symbolsWithChildren = [] |> addSortedSymbolsToChildren ~sortedSymbols in\n  print_endline (Protocol.stringifyDocumentSymbolItems symbolsWithChildren)\n"
  },
  {
    "path": "analysis/src/DotCompletionUtils.ml",
    "content": "let filterRecordFields ~env ~recordAsString ~prefix ~exact fields =\n  fields\n  |> Utils.filterMap (fun (field : SharedTypes.field) ->\n         if Utils.checkName field.fname.txt ~prefix ~exact then\n           Some\n             (SharedTypes.Completion.create field.fname.txt ~env\n                ?deprecated:field.deprecated ~docstring:field.docstring\n                ~kind:(SharedTypes.Completion.Field (field, recordAsString)))\n         else None)\n\nlet fieldCompletionsForDotCompletion ?posOfDot typ ~env ~package ~prefix ~exact\n    =\n  let asObject = typ |> TypeUtils.extractObjectType ~env ~package in\n  match asObject with\n  | Some (objEnv, obj) ->\n    (* Handle obj completion via dot *)\n    if Debug.verbose () then\n      Printf.printf \"[dot_completion]--> Obj type found:\\n\";\n    obj |> TypeUtils.getObjFields\n    |> Utils.filterMap (fun (field, _typ) ->\n           if Utils.checkName field ~prefix ~exact then\n             let fullObjFieldName = Printf.sprintf \"[\\\"%s\\\"]\" field in\n             Some\n               (SharedTypes.Completion.create fullObjFieldName ~synthetic:true\n                  ~insertText:fullObjFieldName ~env:objEnv\n                  ~kind:(SharedTypes.Completion.ObjLabel typ)\n                  ?additionalTextEdits:\n                    (match posOfDot with\n                    | None -> None\n                    | Some posOfDot ->\n                      Some\n                        (TypeUtils.makeAdditionalTextEditsForRemovingDot\n                           posOfDot)))\n           else None)\n  | None -> (\n    match typ |> TypeUtils.extractRecordType ~env ~package with\n    | Some (env, fields, typDecl) ->\n      fields\n      |> filterRecordFields ~env ~prefix ~exact\n           ~recordAsString:\n             (typDecl.item.decl |> Shared.declToString typDecl.name.txt)\n    | None -> [])\n"
  },
  {
    "path": "analysis/src/DumpAst.ml",
    "content": "open SharedTypes\n(* This is intended to be a debug tool. It's by no means complete. Rather, you're encouraged to extend this with printing whatever types you need printing. *)\n\nlet emptyLocDenom = \"<x>\"\nlet hasCursorDenom = \"<*>\"\nlet noCursorDenom = \"\"\n\nlet printLocDenominator loc ~pos =\n  match loc |> CursorPosition.classifyLoc ~pos with\n  | EmptyLoc -> emptyLocDenom\n  | HasCursor -> hasCursorDenom\n  | NoCursor -> noCursorDenom\n\nlet printLocDenominatorLoc loc ~pos =\n  match loc |> CursorPosition.classifyLocationLoc ~pos with\n  | CursorPosition.EmptyLoc -> emptyLocDenom\n  | HasCursor -> hasCursorDenom\n  | NoCursor -> noCursorDenom\n\nlet printLocDenominatorPos pos ~posStart ~posEnd =\n  match CursorPosition.classifyPositions pos ~posStart ~posEnd with\n  | CursorPosition.EmptyLoc -> emptyLocDenom\n  | HasCursor -> hasCursorDenom\n  | NoCursor -> noCursorDenom\n\nlet addIndentation indentation =\n  let rec indent str indentation =\n    if indentation < 1 then str else indent (str ^ \"  \") (indentation - 1)\n  in\n  indent \"\" indentation\n\nlet printAttributes attributes =\n  match List.length attributes with\n  | 0 -> \"\"\n  | _ ->\n    \"[\"\n    ^ (attributes\n      |> List.map (fun ({Location.txt}, _payload) -> \"@\" ^ txt)\n      |> String.concat \",\")\n    ^ \"]\"\n\nlet printConstant const =\n  match const with\n  | Parsetree.Pconst_integer (s, _) -> \"Pconst_integer(\" ^ s ^ \")\"\n  | Pconst_char c -> \"Pconst_char(\" ^ String.make 1 (Char.chr c) ^ \")\"\n  | Pconst_string (s, delim) ->\n    let delim =\n      match delim with\n      | None -> \"\"\n      | Some delim -> delim ^ \" \"\n    in\n    \"Pconst_string(\" ^ delim ^ s ^ delim ^ \")\"\n  | Pconst_float (s, _) -> \"Pconst_float(\" ^ s ^ \")\"\n\nlet printCoreType typ ~pos =\n  printAttributes typ.Parsetree.ptyp_attributes\n  ^ (typ.ptyp_loc |> printLocDenominator ~pos)\n  ^\n  match typ.ptyp_desc with\n  | Ptyp_any -> \"Ptyp_any\"\n  | Ptyp_var name -> \"Ptyp_var(\" ^ str name ^ \")\"\n  | Ptyp_constr (lid, _types) ->\n    \"Ptyp_constr(\"\n    ^ (lid |> printLocDenominatorLoc ~pos)\n    ^ (Utils.flattenLongIdent lid.txt |> ident |> str)\n    ^ \")\"\n  | Ptyp_variant _ -> \"Ptyp_variant(<unimplemented>)\"\n  | _ -> \"<unimplemented_ptyp_desc>\"\n\nlet rec printPattern pattern ~pos ~indentation =\n  printAttributes pattern.Parsetree.ppat_attributes\n  ^ (pattern.ppat_loc |> printLocDenominator ~pos)\n  ^\n  match pattern.Parsetree.ppat_desc with\n  | Ppat_or (pat1, pat2) ->\n    \"Ppat_or(\\n\"\n    ^ addIndentation (indentation + 1)\n    ^ printPattern pat1 ~pos ~indentation:(indentation + 2)\n    ^ \",\\n\"\n    ^ addIndentation (indentation + 1)\n    ^ printPattern pat2 ~pos ~indentation:(indentation + 2)\n    ^ \"\\n\" ^ addIndentation indentation ^ \")\"\n  | Ppat_extension (({txt} as loc), _) ->\n    \"Ppat_extension(%\" ^ (loc |> printLocDenominatorLoc ~pos) ^ txt ^ \")\"\n  | Ppat_var ({txt} as loc) ->\n    \"Ppat_var(\" ^ (loc |> printLocDenominatorLoc ~pos) ^ txt ^ \")\"\n  | Ppat_constant const -> \"Ppat_constant(\" ^ printConstant const ^ \")\"\n  | Ppat_construct (({txt} as loc), maybePat) ->\n    \"Ppat_construct(\"\n    ^ (loc |> printLocDenominatorLoc ~pos)\n    ^ (Utils.flattenLongIdent txt |> ident |> str)\n    ^ (match maybePat with\n      | None -> \"\"\n      | Some pat -> \",\" ^ printPattern pat ~pos ~indentation)\n    ^ \")\"\n  | Ppat_variant (label, maybePat) ->\n    \"Ppat_variant(\" ^ str label\n    ^ (match maybePat with\n      | None -> \"\"\n      | Some pat -> \",\" ^ printPattern pat ~pos ~indentation)\n    ^ \")\"\n  | Ppat_record (fields, _) ->\n    \"Ppat_record(\\n\"\n    ^ addIndentation (indentation + 1)\n    ^ \"fields:\\n\"\n    ^ (fields\n      |> List.map (fun ((Location.{txt} as loc), pat) ->\n             addIndentation (indentation + 2)\n             ^ (loc |> printLocDenominatorLoc ~pos)\n             ^ (Utils.flattenLongIdent txt |> ident |> str)\n             ^ \": \"\n             ^ printPattern pat ~pos ~indentation:(indentation + 2))\n      |> String.concat \"\\n\")\n    ^ \"\\n\" ^ addIndentation indentation ^ \")\"\n  | Ppat_tuple patterns ->\n    \"Ppat_tuple(\\n\"\n    ^ (patterns\n      |> List.map (fun pattern ->\n             addIndentation (indentation + 2)\n             ^ (pattern |> printPattern ~pos ~indentation:(indentation + 2)))\n      |> String.concat \",\\n\")\n    ^ \"\\n\" ^ addIndentation indentation ^ \")\"\n  | Ppat_any -> \"Ppat_any\"\n  | Ppat_constraint (pattern, typ) ->\n    \"Ppat_constraint(\\n\"\n    ^ addIndentation (indentation + 1)\n    ^ printCoreType typ ~pos ^ \",\\n\"\n    ^ addIndentation (indentation + 1)\n    ^ (pattern |> printPattern ~pos ~indentation:(indentation + 1))\n    ^ \"\\n\" ^ addIndentation indentation ^ \")\"\n  | v -> Printf.sprintf \"<unimplemented_ppat_desc: %s>\" (Utils.identifyPpat v)\n\nand printCase case ~pos ~indentation ~caseNum =\n  addIndentation indentation\n  ^ Printf.sprintf \"case %i:\\n\" caseNum\n  ^ addIndentation (indentation + 1)\n  ^ \"pattern\"\n  ^ (case.Parsetree.pc_lhs.ppat_loc |> printLocDenominator ~pos)\n  ^ \":\\n\"\n  ^ addIndentation (indentation + 2)\n  ^ printPattern case.Parsetree.pc_lhs ~pos ~indentation\n  ^ \"\\n\"\n  ^ addIndentation (indentation + 1)\n  ^ \"expr\"\n  ^ (case.Parsetree.pc_rhs.pexp_loc |> printLocDenominator ~pos)\n  ^ \":\\n\"\n  ^ addIndentation (indentation + 2)\n  ^ printExprItem case.pc_rhs ~pos ~indentation:(indentation + 2)\n\nand printExprItem expr ~pos ~indentation =\n  printAttributes expr.Parsetree.pexp_attributes\n  ^ (expr.pexp_loc |> printLocDenominator ~pos)\n  ^\n  match expr.Parsetree.pexp_desc with\n  | Pexp_array exprs ->\n    \"Pexp_array(\\n\"\n    ^ addIndentation (indentation + 1)\n    ^ (exprs\n      |> List.map (fun expr ->\n             expr |> printExprItem ~pos ~indentation:(indentation + 1))\n      |> String.concat (\"\\n\" ^ addIndentation (indentation + 1)))\n    ^ \"\\n\" ^ addIndentation indentation ^ \")\"\n  | Pexp_match (matchExpr, cases) ->\n    \"Pexp_match(\"\n    ^ printExprItem matchExpr ~pos ~indentation:0\n    ^ \")\\n\"\n    ^ (cases\n      |> List.mapi (fun caseNum case ->\n             printCase case ~pos ~caseNum:(caseNum + 1)\n               ~indentation:(indentation + 1))\n      |> String.concat \"\\n\")\n  | Pexp_ident {txt} ->\n    \"Pexp_ident:\" ^ (Utils.flattenLongIdent txt |> SharedTypes.ident)\n  | Pexp_apply (expr, args) ->\n    let printLabel labelled ~pos =\n      match labelled with\n      | None -> \"<unlabelled>\"\n      | Some labelled ->\n        printLocDenominatorPos pos ~posStart:labelled.posStart\n          ~posEnd:labelled.posEnd\n        ^ \"~\"\n        ^ if labelled.opt then \"?\" else \"\" ^ labelled.name\n    in\n    let args = extractExpApplyArgs ~args in\n    \"Pexp_apply(\\n\"\n    ^ addIndentation (indentation + 1)\n    ^ \"expr:\\n\"\n    ^ addIndentation (indentation + 2)\n    ^ printExprItem expr ~pos ~indentation:(indentation + 2)\n    ^ \"\\n\"\n    ^ addIndentation (indentation + 1)\n    ^ \"args:\\n\"\n    ^ (args\n      |> List.map (fun arg ->\n             addIndentation (indentation + 2)\n             ^ printLabel arg.label ~pos ^ \"=\\n\"\n             ^ addIndentation (indentation + 3)\n             ^ printExprItem arg.exp ~pos ~indentation:(indentation + 3))\n      |> String.concat \",\\n\")\n    ^ \"\\n\" ^ addIndentation indentation ^ \")\"\n  | Pexp_constant constant -> \"Pexp_constant(\" ^ printConstant constant ^ \")\"\n  | Pexp_construct (({txt} as loc), maybeExpr) ->\n    \"Pexp_construct(\"\n    ^ (loc |> printLocDenominatorLoc ~pos)\n    ^ (Utils.flattenLongIdent txt |> ident |> str)\n    ^ (match maybeExpr with\n      | None -> \"\"\n      | Some expr -> \", \" ^ printExprItem expr ~pos ~indentation)\n    ^ \")\"\n  | Pexp_variant (label, maybeExpr) ->\n    \"Pexp_variant(\" ^ str label\n    ^ (match maybeExpr with\n      | None -> \"\"\n      | Some expr -> \",\" ^ printExprItem expr ~pos ~indentation)\n    ^ \")\"\n  | Pexp_fun (arg, _maybeDefaultArgExpr, pattern, nextExpr) ->\n    \"Pexp_fun(\\n\"\n    ^ addIndentation (indentation + 1)\n    ^ \"arg: \"\n    ^ (match arg with\n      | Nolabel -> \"Nolabel\"\n      | Labelled name -> \"Labelled(\" ^ name ^ \")\"\n      | Optional name -> \"Optional(\" ^ name ^ \")\")\n    ^ \",\\n\"\n    ^ addIndentation (indentation + 2)\n    ^ \"pattern: \"\n    ^ printPattern pattern ~pos ~indentation:(indentation + 2)\n    ^ \",\\n\"\n    ^ addIndentation (indentation + 1)\n    ^ \"next expr:\\n\"\n    ^ addIndentation (indentation + 2)\n    ^ printExprItem nextExpr ~pos ~indentation:(indentation + 2)\n    ^ \"\\n\" ^ addIndentation indentation ^ \")\"\n  | Pexp_extension (({txt} as loc), _) ->\n    \"Pexp_extension(%\" ^ (loc |> printLocDenominatorLoc ~pos) ^ txt ^ \")\"\n  | Pexp_assert expr ->\n    \"Pexp_assert(\" ^ printExprItem expr ~pos ~indentation ^ \")\"\n  | Pexp_field (exp, loc) ->\n    \"Pexp_field(\"\n    ^ (loc |> printLocDenominatorLoc ~pos)\n    ^ printExprItem exp ~pos ~indentation\n    ^ \")\"\n  | Pexp_record (fields, _) ->\n    \"Pexp_record(\\n\"\n    ^ addIndentation (indentation + 1)\n    ^ \"fields:\\n\"\n    ^ (fields\n      |> List.map (fun ((Location.{txt} as loc), expr) ->\n             addIndentation (indentation + 2)\n             ^ (loc |> printLocDenominatorLoc ~pos)\n             ^ (Utils.flattenLongIdent txt |> ident |> str)\n             ^ \": \"\n             ^ printExprItem expr ~pos ~indentation:(indentation + 2))\n      |> String.concat \"\\n\")\n    ^ \"\\n\" ^ addIndentation indentation ^ \")\"\n  | Pexp_tuple exprs ->\n    \"Pexp_tuple(\\n\"\n    ^ (exprs\n      |> List.map (fun expr ->\n             addIndentation (indentation + 2)\n             ^ (expr |> printExprItem ~pos ~indentation:(indentation + 2)))\n      |> String.concat \",\\n\")\n    ^ \"\\n\" ^ addIndentation indentation ^ \")\"\n  | v -> Printf.sprintf \"<unimplemented_pexp_desc: %s>\" (Utils.identifyPexp v)\n\nlet printValueBinding value ~pos ~indentation =\n  printAttributes value.Parsetree.pvb_attributes\n  ^ \"value\" ^ \":\\n\"\n  ^ addIndentation (indentation + 1)\n  ^ (value.pvb_pat |> printPattern ~pos ~indentation:(indentation + 1))\n  ^ \"\\n\" ^ addIndentation indentation ^ \"expr:\\n\"\n  ^ addIndentation (indentation + 1)\n  ^ printExprItem value.pvb_expr ~pos ~indentation:(indentation + 1)\n\nlet printStructItem structItem ~pos ~source =\n  match structItem.Parsetree.pstr_loc |> CursorPosition.classifyLoc ~pos with\n  | HasCursor -> (\n    let startOffset =\n      match Pos.positionToOffset source (structItem.pstr_loc |> Loc.start) with\n      | None -> 0\n      | Some offset -> offset\n    in\n    let endOffset =\n      (* Include the next line of the source since that will hold the ast comment pointing to the position.\n         Caveat: this only works for single line sources with a comment on the next line. Will need to be\n         adapted if that's not the only use case.*)\n      let line, _col = structItem.pstr_loc |> Loc.end_ in\n      match Pos.positionToOffset source (line + 2, 0) with\n      | None -> 0\n      | Some offset -> offset\n    in\n\n    (\"\\nSource:\\n// \"\n    ^ String.sub source startOffset (endOffset - startOffset)\n    ^ \"\\n\")\n    ^ printLocDenominator structItem.pstr_loc ~pos\n    ^\n    match structItem.pstr_desc with\n    | Pstr_eval (expr, _attributes) ->\n      \"Pstr_eval(\\n\" ^ printExprItem expr ~pos ~indentation:1 ^ \"\\n)\"\n    | Pstr_value (recFlag, values) ->\n      \"Pstr_value(\\n\"\n      ^ (match recFlag with\n        | Recursive -> \"  rec,\\n\"\n        | Nonrecursive -> \"\")\n      ^ (values\n        |> List.map (fun value ->\n               addIndentation 1 ^ printValueBinding value ~pos ~indentation:1)\n        |> String.concat \",\\n\")\n      ^ \"\\n)\"\n    | _ -> \"<structure_item_not_implemented>\")\n  | _ -> \"\"\n\nlet dump ~currentFile ~pos =\n  let {Res_driver.parsetree = structure; source} =\n    Res_driver.parsing_engine.parse_implementation ~for_printer:true\n      ~filename:currentFile\n  in\n\n  print_endline\n    (structure\n    |> List.map (fun structItem -> printStructItem structItem ~pos ~source)\n    |> String.concat \"\")\n"
  },
  {
    "path": "analysis/src/Files.ml",
    "content": "let split str string = Str.split (Str.regexp_string str) string\n\nlet removeExtraDots path =\n  Str.global_replace (Str.regexp_string \"/./\") \"/\" path\n  |> Str.global_replace (Str.regexp {|^\\./\\.\\./|}) \"../\"\n\n(* Win32 & MacOS are case-insensitive *)\nlet pathEq =\n  if Sys.os_type = \"Linux\" then fun a b -> a = b\n  else fun a b -> String.lowercase_ascii a = String.lowercase_ascii b\n\nlet pathStartsWith text prefix =\n  String.length prefix <= String.length text\n  && pathEq (String.sub text 0 (String.length prefix)) prefix\n\nlet sliceToEnd str pos = String.sub str pos (String.length str - pos)\n\nlet relpath base path =\n  if pathStartsWith path base then\n    let baselen = String.length base in\n    let rest = String.sub path baselen (String.length path - baselen) in\n    (if rest <> \"\" && rest.[0] = Filename.dir_sep.[0] then sliceToEnd rest 1\n     else rest)\n    |> removeExtraDots\n  else\n    let rec loop bp pp =\n      match (bp, pp) with\n      | \".\" :: ra, _ -> loop ra pp\n      | _, \".\" :: rb -> loop bp rb\n      | a :: ra, b :: rb when pathEq a b -> loop ra rb\n      | _ -> (bp, pp)\n    in\n    let base, path =\n      loop (split Filename.dir_sep base) (split Filename.dir_sep path)\n    in\n    String.concat Filename.dir_sep\n      ((match base with\n       | [] -> [\".\"]\n       | _ -> List.map (fun _ -> \"..\") base)\n      @ path)\n    |> removeExtraDots\n\nlet maybeStat path =\n  try Some (Unix.stat path) with Unix.Unix_error (Unix.ENOENT, _, _) -> None\n\nlet readFile filename =\n  try\n    (* windows can't use open_in *)\n    let chan = open_in_bin filename in\n    let content = really_input_string chan (in_channel_length chan) in\n    close_in_noerr chan;\n    Some content\n  with _ -> None\n\nlet exists path =\n  match maybeStat path with\n  | None -> false\n  | Some _ -> true\nlet ifExists path = if exists path then Some path else None\n\nlet readDirectory dir =\n  match Unix.opendir dir with\n  | exception Unix.Unix_error (Unix.ENOENT, \"opendir\", _dir) -> []\n  | handle ->\n    let rec loop handle =\n      try\n        let name = Unix.readdir handle in\n        if name = Filename.current_dir_name || name = Filename.parent_dir_name\n        then loop handle\n        else name :: loop handle\n      with End_of_file ->\n        Unix.closedir handle;\n        []\n    in\n    loop handle\n\nlet rec collectDirs path =\n  match maybeStat path with\n  | None -> []\n  | Some {Unix.st_kind = Unix.S_DIR} ->\n    path\n    :: (readDirectory path\n       |> List.map (fun name -> collectDirs (Filename.concat path name))\n       |> List.concat)\n  | _ -> []\n\nlet rec collect ?(checkDir = fun _ -> true) path test =\n  match maybeStat path with\n  | None -> []\n  | Some {Unix.st_kind = Unix.S_DIR} ->\n    if checkDir path then\n      readDirectory path\n      |> List.map (fun name ->\n             collect ~checkDir (Filename.concat path name) test)\n      |> List.concat\n    else []\n  | _ -> if test path then [path] else []\n\ntype classifiedFile = Res | Resi | Other\n\nlet classifySourceFile path =\n  if Filename.check_suffix path \".res\" && exists path then Res\n  else if Filename.check_suffix path \".resi\" && exists path then Resi\n  else Other\n\nlet canonicalizeUri uri =\n  let path = Uri.toPath uri in\n  path |> Unix.realpath |> Uri.fromPath |> Uri.toString\n"
  },
  {
    "path": "analysis/src/FindFiles.ml",
    "content": "let ifDebug debug name fn v = if debug then Log.log (name ^ \": \" ^ fn v)\nlet ( /+ ) = Filename.concat\nlet bind f x = Option.bind x f\n\n(* Returns a list of paths, relative to the provided `base` *)\nlet getSourceDirectories ~includeDev ~baseDir config =\n  let rec handleItem current item =\n    match item with\n    | Json.Array contents ->\n      List.map (handleItem current) contents |> List.concat\n    | Json.String text -> [current /+ text]\n    | Json.Object _ -> (\n      let dir =\n        item |> Json.get \"dir\" |> bind Json.string\n        |> Option.value ~default:\"Must specify directory\"\n      in\n      let typ =\n        if includeDev then \"lib\"\n        else\n          item |> Json.get \"type\" |> bind Json.string\n          |> Option.value ~default:\"lib\"\n      in\n\n      if typ = \"dev\" then []\n      else\n        match item |> Json.get \"subdirs\" with\n        | None | Some Json.False -> [current /+ dir]\n        | Some Json.True ->\n          Files.collectDirs (baseDir /+ current /+ dir)\n          |> List.filter (fun name -> name <> Filename.current_dir_name)\n          |> List.map (Files.relpath baseDir)\n        | Some item -> (current /+ dir) :: handleItem (current /+ dir) item)\n    | _ -> failwith \"Invalid subdirs entry\"\n  in\n  match config |> Json.get \"sources\" with\n  | None -> []\n  | Some item -> handleItem \"\" item\n\nlet isCompiledFile name =\n  Filename.check_suffix name \".cmt\" || Filename.check_suffix name \".cmti\"\n\nlet isImplementation name =\n  Filename.check_suffix name \".re\"\n  || Filename.check_suffix name \".res\"\n  || Filename.check_suffix name \".ml\"\n\nlet isInterface name =\n  Filename.check_suffix name \".rei\"\n  || Filename.check_suffix name \".resi\"\n  || Filename.check_suffix name \".mli\"\n\nlet isSourceFile name = isImplementation name || isInterface name\n\nlet compiledNameSpace name =\n  String.split_on_char '-' name\n  |> List.map String.capitalize_ascii\n  |> String.concat \"\"\n  (* Remove underscores??? Whyyy bucklescript, whyyyy *)\n  |> String.split_on_char '_'\n  |> String.concat \"\"\n\nlet compiledBaseName ~namespace name =\n  Filename.chop_extension name\n  ^\n  match namespace with\n  | None -> \"\"\n  | Some n -> \"-\" ^ compiledNameSpace n\n\nlet getName x =\n  Filename.basename x |> Filename.chop_extension |> String.capitalize_ascii\n\nlet filterDuplicates cmts =\n  (* Remove .cmt's that have .cmti's *)\n  let intfs = Hashtbl.create 100 in\n  cmts\n  |> List.iter (fun path ->\n         if\n           Filename.check_suffix path \".rei\"\n           || Filename.check_suffix path \".mli\"\n           || Filename.check_suffix path \".cmti\"\n         then Hashtbl.add intfs (getName path) true);\n  cmts\n  |> List.filter (fun path ->\n         not\n           ((Filename.check_suffix path \".re\"\n            || Filename.check_suffix path \".ml\"\n            || Filename.check_suffix path \".cmt\")\n           && Hashtbl.mem intfs (getName path)))\n\nlet nameSpaceToName n =\n  n\n  |> Str.split (Str.regexp \"[-/@]\")\n  |> List.map String.capitalize_ascii\n  |> String.concat \"\"\n\nlet getNamespace config =\n  let ns = config |> Json.get \"namespace\" in\n  let namespaceEntry = config |> Json.get \"namespace-entry\" in\n  let fromString = ns |> bind Json.string in\n  let isNamespaced =\n    ns |> bind Json.bool |> Option.value ~default:(fromString |> Option.is_some)\n  in\n  let either x y = if x = None then y else x in\n  if isNamespaced then\n    let fromName = config |> Json.get \"name\" |> bind Json.string in\n    let name = either fromString fromName |> Option.map nameSpaceToName in\n    match (namespaceEntry, name) with\n    | Some _, Some name -> Some (\"@\" ^ name)\n    | _ -> name\n  else None\n\nmodule StringSet = Set.Make (String)\n\nlet getPublic config =\n  let public = config |> Json.get \"public\" in\n  match public with\n  | None -> None\n  | Some public -> (\n    match public |> Json.array with\n    | None -> None\n    | Some public ->\n      Some (public |> List.filter_map Json.string |> StringSet.of_list))\n\nlet collectFiles directory =\n  let allFiles = Files.readDirectory directory in\n  let compileds = allFiles |> List.filter isCompiledFile |> filterDuplicates in\n  let sources = allFiles |> List.filter isSourceFile |> filterDuplicates in\n  compileds\n  |> Utils.filterMap (fun path ->\n         let modName = getName path in\n         let cmt = directory /+ path in\n         let resOpt =\n           Utils.find\n             (fun name ->\n               if getName name = modName then Some (directory /+ name) else None)\n             sources\n         in\n         match resOpt with\n         | None -> None\n         | Some res -> Some (modName, SharedTypes.Impl {cmt; res}))\n\n(* returns a list of (absolute path to cmt(i), relative path from base to source file) *)\nlet findProjectFiles ~public ~namespace ~path ~sourceDirectories ~libBs =\n  let dirs =\n    sourceDirectories |> List.map (Filename.concat path) |> StringSet.of_list\n  in\n  let files =\n    dirs |> StringSet.elements\n    |> List.map (fun name -> Files.collect name isSourceFile)\n    |> List.concat |> StringSet.of_list\n  in\n  dirs\n  |> ifDebug true \"Source directories\" (fun s ->\n         s |> StringSet.elements |> List.map Utils.dumpPath |> String.concat \" \");\n  files\n  |> ifDebug true \"Source files\" (fun s ->\n         s |> StringSet.elements |> List.map Utils.dumpPath |> String.concat \" \");\n\n  let interfaces = Hashtbl.create 100 in\n  files\n  |> StringSet.iter (fun path ->\n         if isInterface path then Hashtbl.replace interfaces (getName path) path);\n\n  let normals =\n    files |> StringSet.elements\n    |> Utils.filterMap (fun file ->\n           if isImplementation file then (\n             let moduleName = getName file in\n             let resi = Hashtbl.find_opt interfaces moduleName in\n             Hashtbl.remove interfaces moduleName;\n             let base = compiledBaseName ~namespace (Files.relpath path file) in\n             match resi with\n             | Some resi ->\n               let cmti = (libBs /+ base) ^ \".cmti\" in\n               let cmt = (libBs /+ base) ^ \".cmt\" in\n               if Files.exists cmti then\n                 if Files.exists cmt then\n                   (* Log.log(\"Intf and impl \" ++ cmti ++ \" \" ++ cmt) *)\n                   Some\n                     ( moduleName,\n                       SharedTypes.IntfAndImpl {cmti; resi; cmt; res = file} )\n                 else None\n               else (\n                 (* Log.log(\"Just intf \" ++ cmti) *)\n                 Log.log (\"Bad source file (no cmt/cmti/cmi) \" ^ (libBs /+ base));\n                 None)\n             | None ->\n               let cmt = (libBs /+ base) ^ \".cmt\" in\n               if Files.exists cmt then Some (moduleName, Impl {cmt; res = file})\n               else (\n                 Log.log (\"Bad source file (no cmt/cmi) \" ^ (libBs /+ base));\n                 None))\n           else None)\n  in\n  let result =\n    normals\n    |> List.filter_map (fun (name, paths) ->\n           let originalName = name in\n           let name =\n             match namespace with\n             | None -> name\n             | Some namespace -> name ^ \"-\" ^ namespace\n           in\n           match public with\n           | Some public ->\n             if public |> StringSet.mem originalName then Some (name, paths)\n             else None\n           | None -> Some (name, paths))\n  in\n  match namespace with\n  | None -> result\n  | Some namespace ->\n    let moduleName = nameSpaceToName namespace in\n    let cmt = (libBs /+ namespace) ^ \".cmt\" in\n    Log.log (\"adding namespace \" ^ namespace ^ \" : \" ^ moduleName ^ \" : \" ^ cmt);\n    (moduleName, Namespace {cmt}) :: result\n\nlet findDependencyFiles base config =\n  let deps =\n    config |> Json.get \"bs-dependencies\" |> bind Json.array\n    |> Option.value ~default:[]\n    |> List.filter_map Json.string\n  in\n  let devDeps =\n    config\n    |> Json.get \"bs-dev-dependencies\"\n    |> bind Json.array\n    |> Option.map (List.filter_map Json.string)\n    |> Option.value ~default:[]\n  in\n  let deps = deps @ devDeps in\n  Log.log (\"Dependencies: \" ^ String.concat \" \" deps);\n  let depFiles =\n    deps\n    |> List.map (fun name ->\n           let result =\n             Json.bind\n               (ModuleResolution.resolveNodeModulePath ~startPath:base name)\n               (fun path ->\n                 let rescriptJsonPath = path /+ \"rescript.json\" in\n                 let bsconfigJsonPath = path /+ \"bsconfig.json\" in\n\n                 let parseText text =\n                   match Json.parse text with\n                   | Some inner -> (\n                     let namespace = getNamespace inner in\n                     let sourceDirectories =\n                       getSourceDirectories ~includeDev:false ~baseDir:path\n                         inner\n                     in\n                     match BuildSystem.getLibBs path with\n                     | None -> None\n                     | Some libBs ->\n                       let compiledDirectories =\n                         sourceDirectories |> List.map (Filename.concat libBs)\n                       in\n                       let compiledDirectories =\n                         match namespace with\n                         | None -> compiledDirectories\n                         | Some _ -> libBs :: compiledDirectories\n                       in\n                       let projectFiles =\n                         findProjectFiles ~public:(getPublic inner) ~namespace\n                           ~path ~sourceDirectories ~libBs\n                       in\n                       Some (compiledDirectories, projectFiles))\n                   | None -> None\n                 in\n\n                 match Files.readFile rescriptJsonPath with\n                 | Some text -> parseText text\n                 | None -> (\n                   match Files.readFile bsconfigJsonPath with\n                   | Some text -> parseText text\n                   | None -> None))\n           in\n\n           match result with\n           | Some (files, directories) -> (files, directories)\n           | None ->\n             Log.log (\"Skipping nonexistent dependency: \" ^ name);\n             ([], []))\n  in\n  match BuildSystem.getStdlib base with\n  | None -> None\n  | Some stdlibDirectory ->\n    let compiledDirectories, projectFiles =\n      let files, directories = List.split depFiles in\n      (List.concat files, List.concat directories)\n    in\n    let allFiles = projectFiles @ collectFiles stdlibDirectory in\n    Some (compiledDirectories, allFiles)\n"
  },
  {
    "path": "analysis/src/Hint.ml",
    "content": "open SharedTypes\n\ntype inlayHintKind = Type\nlet inlayKindToNumber = function\n  | Type -> 1\n\nlet locItemToTypeHint ~full:{file; package} locItem =\n  match locItem.locType with\n  | Constant t ->\n    Some\n      (match t with\n      | Const_int _ -> \"int\"\n      | Const_char _ -> \"char\"\n      | Const_string _ -> \"string\"\n      | Const_float _ -> \"float\"\n      | Const_int32 _ -> \"int32\"\n      | Const_int64 _ -> \"int64\"\n      | Const_bigint _ -> \"bigint\")\n  | Typed (_, t, locKind) ->\n    let fromType typ =\n      typ |> Shared.typeToString\n      |> Str.global_replace (Str.regexp \"[\\r\\n\\t]\") \"\"\n    in\n    Some\n      (match References.definedForLoc ~file ~package locKind with\n      | None -> fromType t\n      | Some (_, res) -> (\n        match res with\n        | `Declared -> fromType t\n        | `Constructor _ -> fromType t\n        | `Field -> fromType t))\n  | _ -> None\n\nlet inlay ~path ~pos ~maxLength ~debug =\n  let maxlen = try Some (int_of_string maxLength) with Failure _ -> None in\n  let hints = ref [] in\n  let start_line, end_line = pos in\n  let push loc kind =\n    let range = Utils.cmtLocToRange loc in\n    if start_line <= range.end_.line && end_line >= range.start.line then\n      hints := (range, kind) :: !hints\n  in\n  let rec processPattern (pat : Parsetree.pattern) =\n    match pat.ppat_desc with\n    | Ppat_tuple pl -> pl |> List.iter processPattern\n    | Ppat_record (fields, _) ->\n      fields |> List.iter (fun (_, p) -> processPattern p)\n    | Ppat_array fields -> fields |> List.iter processPattern\n    | Ppat_var {loc} -> push loc Type\n    | _ -> ()\n  in\n  let value_binding (iterator : Ast_iterator.iterator)\n      (vb : Parsetree.value_binding) =\n    (match vb with\n    | {\n     pvb_pat = {ppat_desc = Ppat_var _};\n     pvb_expr =\n       {\n         pexp_desc =\n           ( Pexp_constant _ | Pexp_tuple _ | Pexp_record _ | Pexp_variant _\n           | Pexp_apply _ | Pexp_match _ | Pexp_construct _ | Pexp_ifthenelse _\n           | Pexp_array _ | Pexp_ident _ | Pexp_try _ | Pexp_lazy _\n           | Pexp_send _ | Pexp_field _ | Pexp_open _ );\n       };\n    } ->\n      push vb.pvb_pat.ppat_loc Type\n    | {pvb_pat = {ppat_desc = Ppat_tuple _}} -> processPattern vb.pvb_pat\n    | {pvb_pat = {ppat_desc = Ppat_record _}} -> processPattern vb.pvb_pat\n    | _ -> ());\n    Ast_iterator.default_iterator.value_binding iterator vb\n  in\n  let iterator = {Ast_iterator.default_iterator with value_binding} in\n  (if Files.classifySourceFile path = Res then\n     let parser =\n       Res_driver.parsing_engine.parse_implementation ~for_printer:false\n     in\n     let {Res_driver.parsetree = structure} = parser ~filename:path in\n     iterator.structure iterator structure |> ignore);\n  match Cmt.loadFullCmtFromPath ~path with\n  | None -> None\n  | Some full ->\n    let result =\n      !hints\n      |> List.filter_map (fun ((range : Protocol.range), hintKind) ->\n             match\n               References.getLocItem ~full\n                 ~pos:(range.start.line, range.start.character + 1)\n                 ~debug\n             with\n             | None -> None\n             | Some locItem -> (\n               let position : Protocol.position =\n                 {line = range.start.line; character = range.end_.character}\n               in\n               match locItemToTypeHint locItem ~full with\n               | Some label -> (\n                 let result =\n                   Protocol.stringifyHint\n                     {\n                       kind = inlayKindToNumber hintKind;\n                       position;\n                       paddingLeft = true;\n                       paddingRight = false;\n                       label = \": \" ^ label;\n                     }\n                 in\n                 match maxlen with\n                 | Some value ->\n                   if String.length label > value then None else Some result\n                 | None -> Some result)\n               | None -> None))\n    in\n    Some result\n\nlet codeLens ~path ~debug =\n  let lenses = ref [] in\n  let push loc =\n    let range = Utils.cmtLocToRange loc in\n    lenses := range :: !lenses\n  in\n  (* Code lenses are only emitted for functions right now. So look for value bindings that are functions,\n     and use the loc of the value binding itself so we can look up the full function type for our code lens. *)\n  let value_binding (iterator : Ast_iterator.iterator)\n      (vb : Parsetree.value_binding) =\n    (match vb with\n    | {\n     pvb_pat = {ppat_desc = Ppat_var _; ppat_loc};\n     pvb_expr = {pexp_desc = Pexp_fun _};\n    } ->\n      push ppat_loc\n    | _ -> ());\n    Ast_iterator.default_iterator.value_binding iterator vb\n  in\n  let iterator = {Ast_iterator.default_iterator with value_binding} in\n  (* We only print code lenses in implementation files. This is because they'd be redundant in interface files,\n     where the definition itself will be the same thing as what would've been printed in the code lens. *)\n  (if Files.classifySourceFile path = Res then\n     let parser =\n       Res_driver.parsing_engine.parse_implementation ~for_printer:false\n     in\n     let {Res_driver.parsetree = structure} = parser ~filename:path in\n     iterator.structure iterator structure |> ignore);\n  match Cmt.loadFullCmtFromPath ~path with\n  | None -> None\n  | Some full ->\n    let result =\n      !lenses\n      |> List.filter_map (fun (range : Protocol.range) ->\n             match\n               References.getLocItem ~full\n                 ~pos:(range.start.line, range.start.character + 1)\n                 ~debug\n             with\n             | Some {locType = Typed (_, typeExpr, _)} ->\n               Some\n                 (Protocol.stringifyCodeLens\n                    {\n                      range;\n                      command =\n                        Some\n                          {\n                            (* Code lenses can run commands. An empty command string means we just want the editor\n                               to print the text, not link to running a command. *)\n                            command = \"\";\n                            (* Print the type with a huge line width, because the code lens always prints on a\n                               single line in the editor. *)\n                            title =\n                              typeExpr |> Shared.typeToString ~lineWidth:400;\n                          };\n                    })\n             | _ -> None)\n    in\n    Some result\n"
  },
  {
    "path": "analysis/src/Hover.ml",
    "content": "open SharedTypes\n\nlet showModuleTopLevel ~docstring ~isType ~name (topLevel : Module.item list) =\n  let contents =\n    topLevel\n    |> List.map (fun item ->\n           match item.Module.kind with\n           (* TODO pretty print module contents *)\n           | Type ({decl}, recStatus) ->\n             \"  \" ^ (decl |> Shared.declToString ~recStatus item.name)\n           | Module _ -> \"  module \" ^ item.name\n           | Value typ ->\n             \"  let \" ^ item.name ^ \": \" ^ (typ |> Shared.typeToString))\n    (* TODO indent *)\n    |> String.concat \"\\n\"\n  in\n  let name = Utils.cutAfterDash name in\n  let full =\n    Markdown.codeBlock\n      (\"module \"\n      ^ (if isType then \"type \" ^ name ^ \" = \" else name ^ \": \")\n      ^ \"{\" ^ \"\\n\" ^ contents ^ \"\\n}\")\n  in\n  let doc =\n    match docstring with\n    | [] -> \"\"\n    | _ :: _ -> \"\\n\" ^ (docstring |> String.concat \"\\n\") ^ \"\\n\"\n  in\n  Some (doc ^ full)\n\nlet rec showModule ~docstring ~(file : File.t) ~package ~name\n    (declared : Module.t Declared.t option) =\n  match declared with\n  | None ->\n    showModuleTopLevel ~docstring ~isType:false ~name file.structure.items\n  | Some {item = Structure {items}; modulePath} ->\n    let isType =\n      match modulePath with\n      | ExportedModule {isType} -> isType\n      | _ -> false\n    in\n    showModuleTopLevel ~docstring ~isType ~name items\n  | Some ({item = Constraint (_moduleItem, moduleTypeItem)} as declared) ->\n    (* show the interface *)\n    showModule ~docstring ~file ~name ~package\n      (Some {declared with item = moduleTypeItem})\n  | Some ({item = Ident path} as declared) -> (\n    match References.resolveModuleReference ~file ~package declared with\n    | None -> Some (\"Unable to resolve module reference \" ^ Path.name path)\n    | Some (_, declared) -> showModule ~docstring ~file ~name ~package declared)\n\ntype extractedType = {\n  name: string;\n  path: Path.t;\n  decl: Types.type_declaration;\n  env: SharedTypes.QueryEnv.t;\n  loc: Warnings.loc;\n}\n\nlet findRelevantTypesFromType ~file ~package typ =\n  (* Expand definitions of types mentioned in typ.\n     If typ itself is a record or variant, search its body *)\n  let env = QueryEnv.fromFile file in\n  let envToSearch, typesToSearch =\n    match typ |> Shared.digConstructor with\n    | Some path -> (\n      let labelDeclarationsTypes lds =\n        lds |> List.map (fun (ld : Types.label_declaration) -> ld.ld_type)\n      in\n      match References.digConstructor ~env ~package path with\n      | None -> (env, [typ])\n      | Some (env1, {item = {decl}}) -> (\n        match decl.type_kind with\n        | Type_record (lds, _) -> (env1, typ :: (lds |> labelDeclarationsTypes))\n        | Type_variant cds ->\n          ( env1,\n            cds\n            |> List.map (fun (cd : Types.constructor_declaration) ->\n                   let fromArgs =\n                     match cd.cd_args with\n                     | Cstr_tuple ts -> ts\n                     | Cstr_record lds -> lds |> labelDeclarationsTypes\n                   in\n                   typ\n                   ::\n                   (match cd.cd_res with\n                   | None -> fromArgs\n                   | Some t -> t :: fromArgs))\n            |> List.flatten )\n        | _ -> (env, [typ])))\n    | None -> (env, [typ])\n  in\n  let fromConstructorPath ~env path =\n    match References.digConstructor ~env ~package path with\n    | None -> None\n    | Some (env, {name = {txt}; extentLoc; item = {decl}}) ->\n      if Utils.isUncurriedInternal path then None\n      else Some {name = txt; env; loc = extentLoc; decl; path}\n  in\n  let constructors = Shared.findTypeConstructors typesToSearch in\n  constructors |> List.filter_map (fromConstructorPath ~env:envToSearch)\n\nlet expandTypes ~file ~package ~supportsMarkdownLinks typ =\n  findRelevantTypesFromType typ ~file ~package\n  |> List.map (fun {decl; env; loc; path} ->\n         let linkToTypeDefinitionStr =\n           if supportsMarkdownLinks then\n             Markdown.goToDefinitionText ~env ~pos:loc.Warnings.loc_start\n           else \"\"\n         in\n         Markdown.divider\n         ^ (if supportsMarkdownLinks then Markdown.spacing else \"\")\n         ^ Markdown.codeBlock\n             (decl\n             |> Shared.declToString ~printNameAsIs:true\n                  (SharedTypes.pathIdentToString path))\n         ^ linkToTypeDefinitionStr ^ \"\\n\")\n\n(* Produces a hover with relevant types expanded in the main type being hovered. *)\nlet hoverWithExpandedTypes ~file ~package ~supportsMarkdownLinks typ =\n  let typeString = Markdown.codeBlock (typ |> Shared.typeToString) in\n  typeString :: expandTypes ~file ~package ~supportsMarkdownLinks typ\n  |> String.concat \"\\n\"\n\n(* Leverages autocomplete functionality to produce a hover for a position. This\n   makes it (most often) work with unsaved content. *)\nlet getHoverViaCompletions ~debug ~path ~pos ~currentFile ~forHover\n    ~supportsMarkdownLinks =\n  match Completions.getCompletions ~debug ~path ~pos ~currentFile ~forHover with\n  | None -> None\n  | Some (completions, ({file; package} as full), scope) -> (\n    let rawOpens = Scope.getRawOpens scope in\n    match completions with\n    | {kind = Label typString; docstring} :: _ ->\n      let parts =\n        docstring\n        @ if typString = \"\" then [] else [Markdown.codeBlock typString]\n      in\n\n      Some (Protocol.stringifyHover (String.concat \"\\n\\n\" parts))\n    | {kind = Field _; env; docstring} :: _ -> (\n      let opens = CompletionBackEnd.getOpens ~debug ~rawOpens ~package ~env in\n      match\n        CompletionBackEnd.completionsGetTypeEnv2 ~debug ~full ~rawOpens ~opens\n          ~pos completions\n      with\n      | Some (typ, _env) ->\n        let typeString =\n          hoverWithExpandedTypes ~file ~package ~supportsMarkdownLinks typ\n        in\n        let parts = docstring @ [typeString] in\n        Some (Protocol.stringifyHover (String.concat \"\\n\\n\" parts))\n      | None -> None)\n    | {env} :: _ -> (\n      let opens = CompletionBackEnd.getOpens ~debug ~rawOpens ~package ~env in\n      match\n        CompletionBackEnd.completionsGetTypeEnv2 ~debug ~full ~rawOpens ~opens\n          ~pos completions\n      with\n      | Some (typ, _env) ->\n        let typeString =\n          hoverWithExpandedTypes ~file ~package ~supportsMarkdownLinks typ\n        in\n        Some (Protocol.stringifyHover typeString)\n      | None -> None)\n    | _ -> None)\n\nlet newHover ~full:{file; package} ~supportsMarkdownLinks locItem =\n  match locItem.locType with\n  | TypeDefinition (name, decl, _stamp) -> (\n    let typeDef = Markdown.codeBlock (Shared.declToString name decl) in\n    match decl.type_manifest with\n    | None -> Some typeDef\n    | Some typ ->\n      Some\n        (typeDef :: expandTypes ~file ~package ~supportsMarkdownLinks typ\n        |> String.concat \"\\n\"))\n  | LModule (Definition (stamp, _tip)) | LModule (LocalReference (stamp, _tip))\n    -> (\n    match Stamps.findModule file.stamps stamp with\n    | None -> None\n    | Some md -> (\n      match References.resolveModuleReference ~file ~package md with\n      | None -> None\n      | Some (file, declared) ->\n        let name, docstring =\n          match declared with\n          | Some d -> (d.name.txt, d.docstring)\n          | None -> (file.moduleName, file.structure.docstring)\n        in\n        showModule ~docstring ~name ~file declared ~package))\n  | LModule (GlobalReference (moduleName, path, tip)) -> (\n    match ProcessCmt.fileForModule ~package moduleName with\n    | None -> None\n    | Some file -> (\n      let env = QueryEnv.fromFile file in\n      match References.exportedForTip ~env ~path ~package ~tip with\n      | None -> None\n      | Some (_env, _name, stamp) -> (\n        match Stamps.findModule file.stamps stamp with\n        | None -> None\n        | Some md -> (\n          match References.resolveModuleReference ~file ~package md with\n          | None -> None\n          | Some (file, declared) ->\n            let name, docstring =\n              match declared with\n              | Some d -> (d.name.txt, d.docstring)\n              | None -> (file.moduleName, file.structure.docstring)\n            in\n            showModule ~docstring ~name ~file ~package declared))))\n  | LModule NotFound -> None\n  | TopLevelModule name -> (\n    match ProcessCmt.fileForModule ~package name with\n    | None -> None\n    | Some file ->\n      showModule ~docstring:file.structure.docstring ~name:file.moduleName ~file\n        ~package None)\n  | Typed (_, _, Definition (_, (Field _ | Constructor _))) -> None\n  | Constant t ->\n    Some\n      (Markdown.codeBlock\n         (match t with\n         | Const_int _ -> \"int\"\n         | Const_char _ -> \"char\"\n         | Const_string _ -> \"string\"\n         | Const_float _ -> \"float\"\n         | Const_int32 _ -> \"int32\"\n         | Const_int64 _ -> \"int64\"\n         | Const_bigint _ -> \"bigint\"))\n  | Typed (_, t, locKind) ->\n    let fromType ~docstring typ =\n      ( hoverWithExpandedTypes ~file ~package ~supportsMarkdownLinks typ,\n        docstring )\n    in\n    let parts =\n      match References.definedForLoc ~file ~package locKind with\n      | None ->\n        let typeString, docstring = t |> fromType ~docstring:[] in\n        typeString :: docstring\n      | Some (docstring, res) -> (\n        match res with\n        | `Declared ->\n          let typeString, docstring = t |> fromType ~docstring in\n          typeString :: docstring\n        | `Constructor {cname = {txt}; args; docstring} ->\n          let typeString, docstring = t |> fromType ~docstring in\n          let argsString =\n            match args with\n            | InlineRecord _ | Args [] -> \"\"\n            | Args args ->\n              args\n              |> List.map (fun (t, _) -> Shared.typeToString t)\n              |> String.concat \", \" |> Printf.sprintf \"(%s)\"\n          in\n          typeString :: Markdown.codeBlock (txt ^ argsString) :: docstring\n        | `Field ->\n          let typeString, docstring = t |> fromType ~docstring in\n          typeString :: docstring)\n    in\n    Some (String.concat Markdown.divider parts)\n"
  },
  {
    "path": "analysis/src/JsxHacks.ml",
    "content": "let pathIsFragment path = Path.name path = \"ReasonReact.fragment\"\n\nlet primitiveIsFragment (vd : Typedtree.value_description) =\n  vd.val_name.txt = \"fragment\"\n  && vd.val_loc.loc_start.pos_fname |> Filename.basename = \"ReasonReact.res\"\n"
  },
  {
    "path": "analysis/src/Loc.ml",
    "content": "type t = Location.t\n\nlet start (loc : t) = Pos.ofLexing loc.loc_start\nlet end_ (loc : t) = Pos.ofLexing loc.loc_end\nlet range loc : Range.t = (start loc, end_ loc)\n\nlet toString (loc : t) =\n  (if loc.loc_ghost then \"__ghost__\" else \"\") ^ (loc |> range |> Range.toString)\n\nlet hasPos ~pos loc = start loc <= pos && pos < end_ loc\n\n(** Allows the character after the end to be included. Ie when the cursor is at\n    the end of the word, like `someIdentifier<cursor>`. Useful in some\n    scenarios. *)\nlet hasPosInclusiveEnd ~pos loc = start loc <= pos && pos <= end_ loc\n\nlet mkPosition (pos : Pos.t) =\n  let line, character = pos in\n  {Protocol.line; character}\n\nlet rangeOfLoc (loc : t) =\n  let start = loc |> start |> mkPosition in\n  let end_ = loc |> end_ |> mkPosition in\n  {Protocol.start; end_}\n"
  },
  {
    "path": "analysis/src/LocalTables.ml",
    "content": "open SharedTypes\n\ntype 'a table = (string * (int * int), 'a Declared.t) Hashtbl.t\ntype namesUsed = (string, unit) Hashtbl.t\n\ntype t = {\n  namesUsed: namesUsed;\n  mutable resultRev: Completion.t list;\n  constructorTable: Constructor.t table;\n  modulesTable: Module.t table;\n  typesTable: Type.t table;\n  valueTable: Types.type_expr table;\n}\n\nlet create () =\n  {\n    namesUsed = Hashtbl.create 1;\n    resultRev = [];\n    constructorTable = Hashtbl.create 1;\n    modulesTable = Hashtbl.create 1;\n    typesTable = Hashtbl.create 1;\n    valueTable = Hashtbl.create 1;\n  }\n\nlet populateValues ~env localTables =\n  env.QueryEnv.file.stamps\n  |> Stamps.iterValues (fun _ declared ->\n         Hashtbl.replace localTables.valueTable\n           (declared.name.txt, declared.name.loc |> Loc.start)\n           declared)\n\nlet populateConstructors ~env localTables =\n  env.QueryEnv.file.stamps\n  |> Stamps.iterConstructors (fun _ declared ->\n         Hashtbl.replace localTables.constructorTable\n           (declared.name.txt, declared.extentLoc |> Loc.start)\n           declared)\n\nlet populateTypes ~env localTables =\n  env.QueryEnv.file.stamps\n  |> Stamps.iterTypes (fun _ declared ->\n         Hashtbl.replace localTables.typesTable\n           (declared.name.txt, declared.name.loc |> Loc.start)\n           declared)\n\nlet populateModules ~env localTables =\n  env.QueryEnv.file.stamps\n  |> Stamps.iterModules (fun _ declared ->\n         Hashtbl.replace localTables.modulesTable\n           (declared.name.txt, declared.extentLoc |> Loc.start)\n           declared)\n"
  },
  {
    "path": "analysis/src/Log.ml",
    "content": "let verbose = ref false\nlet log msg = if !verbose then print_endline msg\n"
  },
  {
    "path": "analysis/src/Markdown.ml",
    "content": "let spacing = \"\\n```\\n \\n```\\n\"\nlet codeBlock code = Printf.sprintf \"```rescript\\n%s\\n```\" code\nlet divider = \"\\n---\\n\"\n\ntype link = {startPos: Protocol.position; file: string; label: string}\n\nlet linkToCommandArgs link =\n  Printf.sprintf \"[\\\"%s\\\",%i,%i]\" link.file link.startPos.line\n    link.startPos.character\n\nlet makeGotoCommand link =\n  Printf.sprintf \"[%s](command:rescript-vscode.go_to_location?%s)\" link.label\n    (Uri.encodeURIComponent (linkToCommandArgs link))\n\nlet goToDefinitionText ~env ~pos =\n  let startLine, startCol = Pos.ofLexing pos in\n  \"\\nGo to: \"\n  ^ makeGotoCommand\n      {\n        label = \"Type definition\";\n        file = Uri.toString env.SharedTypes.QueryEnv.file.uri;\n        startPos = {line = startLine; character = startCol};\n      }\n"
  },
  {
    "path": "analysis/src/ModuleResolution.ml",
    "content": "let ( /+ ) = Filename.concat\n\nlet rec resolveNodeModulePath ~startPath name =\n  let path = startPath /+ \"node_modules\" /+ name in\n  if Files.exists path then Some path\n  else if Filename.dirname startPath = startPath then None\n  else resolveNodeModulePath ~startPath:(Filename.dirname startPath) name\n"
  },
  {
    "path": "analysis/src/Packages.ml",
    "content": "open SharedTypes\n\n(* Creates the `pathsForModule` hashtbl, which maps a `moduleName` to it's `paths` (the ml/re, mli/rei, cmt, and cmti files) *)\nlet makePathsForModule ~projectFilesAndPaths ~dependenciesFilesAndPaths =\n  let pathsForModule = Hashtbl.create 30 in\n  dependenciesFilesAndPaths\n  |> List.iter (fun (modName, paths) ->\n         Hashtbl.replace pathsForModule modName paths);\n  projectFilesAndPaths\n  |> List.iter (fun (modName, paths) ->\n         Hashtbl.replace pathsForModule modName paths);\n  pathsForModule\n\nlet overrideRescriptVersion = ref None\n\nlet getReScriptVersion () =\n  match !overrideRescriptVersion with\n  | Some overrideRescriptVersion -> overrideRescriptVersion\n  | None -> (\n    (* TODO: Include patch stuff when needed *)\n    let defaultVersion = (11, 0) in\n    try\n      let value = Sys.getenv \"RESCRIPT_VERSION\" in\n      let version =\n        match value |> String.split_on_char '.' with\n        | major :: minor :: _rest -> (\n          match (int_of_string_opt major, int_of_string_opt minor) with\n          | Some major, Some minor -> (major, minor)\n          | _ -> defaultVersion)\n        | _ -> defaultVersion\n      in\n      version\n    with Not_found -> defaultVersion)\n\nlet newBsPackage ~rootPath =\n  let rescriptJson = Filename.concat rootPath \"rescript.json\" in\n  let bsconfigJson = Filename.concat rootPath \"bsconfig.json\" in\n\n  let parseRaw raw =\n    let libBs =\n      match !Cfg.isDocGenFromCompiler with\n      | true -> BuildSystem.getStdlib rootPath\n      | false -> BuildSystem.getLibBs rootPath\n    in\n    match Json.parse raw with\n    | Some config -> (\n      let namespace = FindFiles.getNamespace config in\n      let rescriptVersion = getReScriptVersion () in\n      let suffix =\n        match config |> Json.get \"suffix\" with\n        | Some (String suffix) -> suffix\n        | _ -> \".js\"\n      in\n      let uncurried =\n        let ns = config |> Json.get \"uncurried\" in\n        match (rescriptVersion, ns) with\n        | (major, _), None when major >= 11 -> Some true\n        | _, ns -> Option.bind ns Json.bool\n      in\n      let genericJsxModule =\n        let jsxConfig = config |> Json.get \"jsx\" in\n        match jsxConfig with\n        | Some jsxConfig -> (\n          match jsxConfig |> Json.get \"module\" with\n          | Some (String m) when String.lowercase_ascii m <> \"react\" -> Some m\n          | _ -> None)\n        | None -> None\n      in\n      let uncurried = uncurried = Some true in\n      match libBs with\n      | None -> None\n      | Some libBs ->\n        let cached = Cache.readCache (Cache.targetFileFromLibBs libBs) in\n        let projectFiles, dependenciesFiles, pathsForModule =\n          match cached with\n          | Some cached ->\n            ( cached.projectFiles,\n              cached.dependenciesFiles,\n              cached.pathsForModule )\n          | None ->\n            let dependenciesFilesAndPaths =\n              match FindFiles.findDependencyFiles rootPath config with\n              | None -> []\n              | Some (_dependencyDirectories, dependenciesFilesAndPaths) ->\n                dependenciesFilesAndPaths\n            in\n            let sourceDirectories =\n              FindFiles.getSourceDirectories ~includeDev:true ~baseDir:rootPath\n                config\n            in\n            let projectFilesAndPaths =\n              FindFiles.findProjectFiles\n                ~public:(FindFiles.getPublic config)\n                ~namespace ~path:rootPath ~sourceDirectories ~libBs\n            in\n            let pathsForModule =\n              makePathsForModule ~projectFilesAndPaths\n                ~dependenciesFilesAndPaths\n            in\n            let projectFiles =\n              projectFilesAndPaths |> List.map fst |> FileSet.of_list\n            in\n            let dependenciesFiles =\n              dependenciesFilesAndPaths |> List.map fst |> FileSet.of_list\n            in\n            (projectFiles, dependenciesFiles, pathsForModule)\n        in\n        Some\n          (let opens_from_namespace =\n             match namespace with\n             | None -> []\n             | Some namespace ->\n               let cmt = Filename.concat libBs namespace ^ \".cmt\" in\n               Hashtbl.replace pathsForModule namespace (Namespace {cmt});\n               let path = [FindFiles.nameSpaceToName namespace] in\n               [path]\n           in\n           let opens_from_bsc_flags =\n             let bind f x = Option.bind x f in\n             match Json.get \"bsc-flags\" config |> bind Json.array with\n             | Some l ->\n               List.fold_left\n                 (fun opens item ->\n                   match item |> Json.string with\n                   | None -> opens\n                   | Some s -> (\n                     let parts = String.split_on_char ' ' s in\n                     match parts with\n                     | \"-open\" :: name :: _ ->\n                       let path = name |> String.split_on_char '.' in\n                       path :: opens\n                     | _ -> opens))\n                 [] l\n             | None -> []\n           in\n           let opens =\n             [(if uncurried then \"PervasivesU\" else \"Pervasives\"); \"JsxModules\"]\n             :: opens_from_namespace\n             |> List.rev_append opens_from_bsc_flags\n             |> List.map (fun path -> path @ [\"place holder\"])\n           in\n           {\n             genericJsxModule;\n             suffix;\n             rescriptVersion;\n             rootPath;\n             projectFiles;\n             dependenciesFiles;\n             pathsForModule;\n             opens;\n             namespace;\n             builtInCompletionModules =\n               (if\n                  opens_from_bsc_flags\n                  |> List.find_opt (fun opn ->\n                         match opn with\n                         | [\"RescriptCore\"] -> true\n                         | _ -> false)\n                  |> Option.is_some\n                then\n                  {\n                    arrayModulePath = [\"Array\"];\n                    optionModulePath = [\"Option\"];\n                    stringModulePath = [\"String\"];\n                    intModulePath = [\"Int\"];\n                    floatModulePath = [\"Float\"];\n                    promiseModulePath = [\"Promise\"];\n                    listModulePath = [\"List\"];\n                    resultModulePath = [\"Result\"];\n                    exnModulePath = [\"Exn\"];\n                    regexpModulePath = [\"RegExp\"];\n                  }\n                else if\n                  opens_from_bsc_flags\n                  |> List.find_opt (fun opn ->\n                         match opn with\n                         | [\"Belt\"] -> true\n                         | _ -> false)\n                  |> Option.is_some\n                then\n                  {\n                    arrayModulePath = [\"Array\"];\n                    optionModulePath = [\"Option\"];\n                    stringModulePath = [\"Js\"; \"String2\"];\n                    intModulePath = [\"Int\"];\n                    floatModulePath = [\"Float\"];\n                    promiseModulePath = [\"Js\"; \"Promise\"];\n                    listModulePath = [\"List\"];\n                    resultModulePath = [\"Result\"];\n                    exnModulePath = [\"Js\"; \"Exn\"];\n                    regexpModulePath = [\"Js\"; \"Re\"];\n                  }\n                else\n                  {\n                    arrayModulePath = [\"Js\"; \"Array2\"];\n                    optionModulePath = [\"Belt\"; \"Option\"];\n                    stringModulePath = [\"Js\"; \"String2\"];\n                    intModulePath = [\"Belt\"; \"Int\"];\n                    floatModulePath = [\"Belt\"; \"Float\"];\n                    promiseModulePath = [\"Js\"; \"Promise\"];\n                    listModulePath = [\"Belt\"; \"List\"];\n                    resultModulePath = [\"Belt\"; \"Result\"];\n                    exnModulePath = [\"Js\"; \"Exn\"];\n                    regexpModulePath = [\"Js\"; \"Re\"];\n                  });\n             uncurried;\n           }))\n    | None -> None\n  in\n\n  match Files.readFile rescriptJson with\n  | Some raw -> parseRaw raw\n  | None -> (\n    Log.log (\"Unable to read \" ^ rescriptJson);\n    match Files.readFile bsconfigJson with\n    | Some raw -> parseRaw raw\n    | None ->\n      Log.log (\"Unable to read \" ^ bsconfigJson);\n      None)\n\nlet findRoot ~uri packagesByRoot =\n  let path = Uri.toPath uri in\n  let rec loop path =\n    if path = \"/\" then None\n    else if Hashtbl.mem packagesByRoot path then Some (`Root path)\n    else if\n      Files.exists (Filename.concat path \"rescript.json\")\n      || Files.exists (Filename.concat path \"bsconfig.json\")\n    then Some (`Bs path)\n    else\n      let parent = Filename.dirname path in\n      if parent = path then (* reached root *) None else loop parent\n  in\n  loop (if Sys.is_directory path then path else Filename.dirname path)\n\nlet getPackage ~uri =\n  let open SharedTypes in\n  if Hashtbl.mem state.rootForUri uri then\n    Some (Hashtbl.find state.packagesByRoot (Hashtbl.find state.rootForUri uri))\n  else\n    match findRoot ~uri state.packagesByRoot with\n    | None ->\n      Log.log \"No root directory found\";\n      None\n    | Some (`Root rootPath) ->\n      Hashtbl.replace state.rootForUri uri rootPath;\n      Some\n        (Hashtbl.find state.packagesByRoot (Hashtbl.find state.rootForUri uri))\n    | Some (`Bs rootPath) -> (\n      match newBsPackage ~rootPath with\n      | None -> None\n      | Some package ->\n        Hashtbl.replace state.rootForUri uri package.rootPath;\n        Hashtbl.replace state.packagesByRoot package.rootPath package;\n        Some package)\n"
  },
  {
    "path": "analysis/src/PipeCompletionUtils.ml",
    "content": "let addJsxCompletionItems ~mainTypeId ~env ~prefix ~(full : SharedTypes.full)\n    ~rawOpens typ =\n  match mainTypeId with\n  | (\"array\" | \"float\" | \"string\" | \"int\") as builtinNameToComplete ->\n    if Utils.checkName builtinNameToComplete ~prefix ~exact:false then\n      let name =\n        match full.package.genericJsxModule with\n        | None -> \"React.\" ^ builtinNameToComplete\n        | Some g ->\n          g ^ \".\" ^ builtinNameToComplete\n          |> String.split_on_char '.'\n          |> TypeUtils.removeOpensFromCompletionPath ~rawOpens\n               ~package:full.package\n          |> String.concat \".\"\n      in\n      [\n        SharedTypes.Completion.create name ~synthetic:true\n          ~includesSnippets:true ~kind:(Value typ) ~env ~sortText:\"A\"\n          ~docstring:\n            [\n              \"Turns `\" ^ builtinNameToComplete\n              ^ \"` into a JSX element so it can be used inside of JSX.\";\n            ];\n      ]\n    else []\n  | _ -> []\n"
  },
  {
    "path": "analysis/src/Pos.ml",
    "content": "type t = int * int\n\nlet ofLexing {Lexing.pos_lnum; pos_cnum; pos_bol} =\n  (pos_lnum - 1, pos_cnum - pos_bol)\n\nlet toString (loc, col) = Printf.sprintf \"%d:%d\" loc col\n\nlet offsetOfLine text line =\n  let ln = String.length text in\n  let rec loop i lno =\n    if i >= ln then None\n    else\n      match text.[i] with\n      | '\\n' -> if lno = line - 1 then Some (i + 1) else loop (i + 1) (lno + 1)\n      | _ -> loop (i + 1) lno\n  in\n  match line with\n  | 0 -> Some 0\n  | _ -> loop 0 0\n\nlet positionToOffset text (line, character) =\n  match offsetOfLine text line with\n  | None -> None\n  | Some bol ->\n    if bol + character <= String.length text then Some (bol + character)\n    else None\n\nlet posBeforeCursor pos = (fst pos, max 0 (snd pos - 1))\n\nlet posOfDot text ~(pos : int * int) ~offset =\n  let rec loop i =\n    if i < 0 then None\n    else\n      match text.[i] with\n      | '.' -> Some (i + 1)\n      | '\\n' -> None\n      | _ -> loop (i - 1)\n  in\n  match loop (offset - 1) with\n  | None -> None\n  | Some offsetBeforeDot ->\n    let line, col = pos in\n    let newCol = max 0 (col - (offset - offsetBeforeDot)) in\n    Some (line, newCol)\n"
  },
  {
    "path": "analysis/src/PrintType.ml",
    "content": "let printExpr ?(lineWidth = 60) typ =\n  Printtyp.reset_names ();\n  Printtyp.reset_and_mark_loops typ;\n  Res_doc.to_string ~width:lineWidth\n    (Res_outcome_printer.print_out_type_doc (Printtyp.tree_of_typexp false typ))\n\nlet printDecl ?printNameAsIs ~recStatus name decl =\n  Printtyp.reset_names ();\n  Res_doc.to_string ~width:60\n    (Res_outcome_printer.print_out_sig_item_doc ?print_name_as_is:printNameAsIs\n       (Printtyp.tree_of_type_declaration (Ident.create name) decl recStatus))\n"
  },
  {
    "path": "analysis/src/ProcessAttributes.ml",
    "content": "open SharedTypes\n\n(* TODO should I hang on to location? *)\nlet rec findDocAttribute attributes =\n  let open Parsetree in\n  match attributes with\n  | [] -> None\n  | ( {Asttypes.txt = \"ocaml.doc\" | \"ocaml.text\" | \"ns.doc\" | \"res.doc\"},\n      PStr\n        [\n          {\n            pstr_desc =\n              Pstr_eval ({pexp_desc = Pexp_constant (Pconst_string (doc, _))}, _);\n          };\n        ] )\n    :: _ ->\n    Some doc\n  | _ :: rest -> findDocAttribute rest\n\nlet rec findDeprecatedAttribute attributes =\n  let open Parsetree in\n  match attributes with\n  | [] -> None\n  | ( {Asttypes.txt = \"deprecated\"},\n      PStr\n        [\n          {\n            pstr_desc =\n              Pstr_eval ({pexp_desc = Pexp_constant (Pconst_string (msg, _))}, _);\n          };\n        ] )\n    :: _ ->\n    Some msg\n  | ({Asttypes.txt = \"deprecated\"}, _) :: _ -> Some \"\"\n  | _ :: rest -> findDeprecatedAttribute rest\n\nlet newDeclared ~item ~extent ~name ~stamp ~modulePath isExported attributes =\n  {\n    Declared.name;\n    stamp;\n    extentLoc = extent;\n    isExported;\n    modulePath;\n    deprecated = findDeprecatedAttribute attributes;\n    docstring =\n      (match findDocAttribute attributes with\n      | None -> []\n      | Some d -> [d]);\n    item;\n  }\n\nlet rec findEditorCompleteFromAttribute ?(modulePaths = []) attributes =\n  let open Parsetree in\n  match attributes with\n  | [] -> modulePaths\n  | ( {Asttypes.txt = \"editor.completeFrom\"},\n      PStr [{pstr_desc = Pstr_eval (payloadExpr, _)}] )\n    :: rest ->\n    let items =\n      match payloadExpr with\n      | {pexp_desc = Pexp_array items} -> items\n      | p -> [p]\n    in\n    let modulePathsFromArray =\n      items\n      |> List.filter_map (fun item ->\n             match item.Parsetree.pexp_desc with\n             | Pexp_construct ({txt = path}, None) ->\n               Some (Utils.flattenLongIdent path)\n             | _ -> None)\n    in\n    findEditorCompleteFromAttribute\n      ~modulePaths:(modulePathsFromArray @ modulePaths)\n      rest\n  | _ :: rest -> findEditorCompleteFromAttribute ~modulePaths rest\n"
  },
  {
    "path": "analysis/src/ProcessCmt.ml",
    "content": "open SharedTypes\n\nlet isModuleType (declared : Module.t Declared.t) =\n  match declared.modulePath with\n  | ExportedModule {isType} -> isType\n  | _ -> false\n\nlet addDeclared ~(name : string Location.loc) ~extent ~stamp ~(env : Env.t)\n    ~item attributes addExported addStamp =\n  let isExported = addExported name.txt stamp in\n  let declared =\n    ProcessAttributes.newDeclared ~item ~extent ~name ~stamp\n      ~modulePath:env.modulePath isExported attributes\n  in\n  addStamp env.stamps stamp declared;\n  declared\n\nlet attrsToDocstring attrs =\n  match ProcessAttributes.findDocAttribute attrs with\n  | None -> []\n  | Some docstring -> [docstring]\n\nlet mapRecordField {Types.ld_id; ld_type; ld_attributes} =\n  let astamp = Ident.binding_time ld_id in\n  let name = Ident.name ld_id in\n  {\n    stamp = astamp;\n    fname = Location.mknoloc name;\n    typ = ld_type;\n    optional = Res_parsetree_viewer.has_optional_attribute ld_attributes;\n    docstring =\n      (match ProcessAttributes.findDocAttribute ld_attributes with\n      | None -> []\n      | Some docstring -> [docstring]);\n    deprecated = ProcessAttributes.findDeprecatedAttribute ld_attributes;\n  }\n\nlet rec forTypeSignatureItem ~(env : SharedTypes.Env.t) ~(exported : Exported.t)\n    (item : Types.signature_item) =\n  match item with\n  | Sig_value (ident, {val_type; val_attributes; val_loc = loc}) ->\n    let item = val_type in\n    let stamp = Ident.binding_time ident in\n    let oldDeclared = Stamps.findValue env.stamps stamp in\n    let declared =\n      addDeclared\n        ~name:(Location.mkloc (Ident.name ident) loc)\n        ~extent:loc ~stamp ~env ~item val_attributes\n        (Exported.add exported Exported.Value)\n        Stamps.addValue\n    in\n    let declared =\n      (* When an id is shadowed, a module constraint without the doc comment is created.\n         Here the existing doc comment is restored. See https://github.com/rescript-lang/rescript-vscode/issues/621 *)\n      match oldDeclared with\n      | Some oldDeclared when declared.docstring = [] ->\n        let newDeclared = {declared with docstring = oldDeclared.docstring} in\n        Stamps.addValue env.stamps stamp newDeclared;\n        newDeclared\n      | _ -> declared\n    in\n    [\n      {\n        Module.kind = Module.Value declared.item;\n        name = declared.name.txt;\n        docstring = declared.docstring;\n        deprecated = declared.deprecated;\n        loc = declared.extentLoc;\n      };\n    ]\n  | Sig_type\n      ( ident,\n        ({type_loc; type_kind; type_manifest; type_attributes} as decl),\n        recStatus ) ->\n    let declared =\n      let name = Location.mknoloc (Ident.name ident) in\n      addDeclared ~extent:type_loc\n        ~item:\n          {\n            Type.decl;\n            attributes = type_attributes;\n            name = name.txt;\n            kind =\n              (match type_kind with\n              | Type_abstract -> (\n                match type_manifest with\n                | Some {desc = Tconstr (path, args, _)} ->\n                  Abstract (Some (path, args))\n                | Some {desc = Ttuple items} -> Tuple items\n                (* TODO dig *)\n                | _ -> Abstract None)\n              | Type_open -> Open\n              | Type_variant constructors ->\n                Variant\n                  (constructors\n                  |> List.map\n                       (fun\n                         {Types.cd_loc; cd_id; cd_args; cd_res; cd_attributes}\n                       ->\n                         let name = Ident.name cd_id in\n                         let stamp = Ident.binding_time cd_id in\n                         let item =\n                           {\n                             Constructor.stamp;\n                             cname = Location.mknoloc name;\n                             args =\n                               (match cd_args with\n                               | Cstr_tuple args ->\n                                 Args\n                                   (args\n                                   |> List.map (fun t -> (t, Location.none)))\n                               | Cstr_record fields ->\n                                 InlineRecord (fields |> List.map mapRecordField));\n                             res = cd_res;\n                             typeDecl = (name, decl);\n                             docstring = attrsToDocstring cd_attributes;\n                             deprecated =\n                               ProcessAttributes.findDeprecatedAttribute\n                                 cd_attributes;\n                           }\n                         in\n                         let declared =\n                           ProcessAttributes.newDeclared ~item ~extent:cd_loc\n                             ~name:(Location.mknoloc name)\n                             ~stamp (* TODO maybe this needs another child *)\n                             ~modulePath:env.modulePath true cd_attributes\n                         in\n                         Stamps.addConstructor env.stamps stamp declared;\n                         item))\n              | Type_record (fields, _) ->\n                Record (fields |> List.map mapRecordField));\n          }\n        ~name ~stamp:(Ident.binding_time ident) ~env type_attributes\n        (Exported.add exported Exported.Type)\n        Stamps.addType\n    in\n    [\n      {\n        Module.kind = Type (declared.item, recStatus);\n        name = declared.name.txt;\n        docstring = declared.docstring;\n        deprecated = declared.deprecated;\n        loc = declared.extentLoc;\n      };\n    ]\n  | Sig_module (ident, {md_type; md_attributes; md_loc}, _) ->\n    let name = Ident.name ident in\n    let declared =\n      addDeclared ~extent:md_loc\n        ~item:(forTypeModule ~name ~env md_type)\n        ~name:(Location.mkloc name md_loc)\n        ~stamp:(Ident.binding_time ident) ~env md_attributes\n        (Exported.add exported Exported.Module)\n        Stamps.addModule\n    in\n    [\n      {\n        Module.kind =\n          Module {type_ = declared.item; isModuleType = isModuleType declared};\n        name = declared.name.txt;\n        docstring = declared.docstring;\n        deprecated = declared.deprecated;\n        loc = declared.extentLoc;\n      };\n    ]\n  | _ -> []\n\nand forTypeSignature ~name ~env signature =\n  let exported = Exported.init () in\n  let items =\n    List.fold_right\n      (fun item items -> forTypeSignatureItem ~env ~exported item @ items)\n      signature []\n  in\n  {Module.name; docstring = []; exported; items; deprecated = None}\n\nand forTypeModule ~name ~env moduleType =\n  match moduleType with\n  | Types.Mty_ident path -> Ident path\n  | Mty_alias (_ (* 402 *), path) -> Ident path\n  | Mty_signature signature -> Structure (forTypeSignature ~name ~env signature)\n  | Mty_functor (_argIdent, _argType, resultType) ->\n    forTypeModule ~name ~env resultType\n\nlet getModuleTypePath mod_desc =\n  match mod_desc with\n  | Typedtree.Tmty_ident (path, _) | Tmty_alias (path, _) -> Some path\n  | Tmty_signature _ | Tmty_functor _ | Tmty_with _ | Tmty_typeof _ -> None\n\nlet forTypeDeclaration ~env ~(exported : Exported.t)\n    {\n      Typedtree.typ_id;\n      typ_loc;\n      typ_name = name;\n      typ_attributes;\n      typ_type;\n      typ_kind;\n      typ_manifest;\n    } ~recStatus =\n  let stamp = Ident.binding_time typ_id in\n  let declared =\n    addDeclared ~extent:typ_loc\n      ~item:\n        {\n          Type.decl = typ_type;\n          attributes = typ_attributes;\n          name = name.txt;\n          kind =\n            (match typ_kind with\n            | Ttype_abstract -> (\n              match typ_manifest with\n              | Some {ctyp_desc = Ttyp_constr (path, _lident, args)} ->\n                Abstract\n                  (Some (path, args |> List.map (fun t -> t.Typedtree.ctyp_type)))\n              | Some {ctyp_desc = Ttyp_tuple items} ->\n                Tuple (items |> List.map (fun t -> t.Typedtree.ctyp_type))\n              (* TODO dig *)\n              | _ -> Abstract None)\n            | Ttype_open -> Open\n            | Ttype_variant constructors ->\n              Variant\n                (constructors\n                |> List.map\n                     (fun\n                       {\n                         Typedtree.cd_id;\n                         cd_name = cname;\n                         cd_args;\n                         cd_res;\n                         cd_attributes;\n                         cd_loc;\n                       }\n                     ->\n                       let stamp = Ident.binding_time cd_id in\n                       let item =\n                         {\n                           Constructor.stamp;\n                           cname;\n                           deprecated =\n                             ProcessAttributes.findDeprecatedAttribute\n                               cd_attributes;\n                           args =\n                             (match cd_args with\n                             | Cstr_tuple args ->\n                               Args\n                                 (args\n                                 |> List.map (fun t ->\n                                        (t.Typedtree.ctyp_type, t.ctyp_loc)))\n                             | Cstr_record fields ->\n                               InlineRecord\n                                 (fields\n                                 |> List.map\n                                      (fun (f : Typedtree.label_declaration) ->\n                                        let astamp =\n                                          Ident.binding_time f.ld_id\n                                        in\n                                        let name = Ident.name f.ld_id in\n                                        {\n                                          stamp = astamp;\n                                          fname = Location.mknoloc name;\n                                          typ = f.ld_type.ctyp_type;\n                                          optional =\n                                            Res_parsetree_viewer\n                                            .has_optional_attribute\n                                              f.ld_attributes;\n                                          docstring =\n                                            (match\n                                               ProcessAttributes\n                                               .findDocAttribute f.ld_attributes\n                                             with\n                                            | None -> []\n                                            | Some docstring -> [docstring]);\n                                          deprecated =\n                                            ProcessAttributes\n                                            .findDeprecatedAttribute\n                                              f.ld_attributes;\n                                        })));\n                           res =\n                             (match cd_res with\n                             | None -> None\n                             | Some t -> Some t.ctyp_type);\n                           typeDecl = (name.txt, typ_type);\n                           docstring = attrsToDocstring cd_attributes;\n                         }\n                       in\n                       let declared =\n                         ProcessAttributes.newDeclared ~item ~extent:cd_loc\n                           ~name:cname ~stamp ~modulePath:env.modulePath true\n                           cd_attributes\n                       in\n                       Stamps.addConstructor env.stamps stamp declared;\n                       item))\n            | Ttype_record fields ->\n              Record\n                (fields\n                |> List.map\n                     (fun\n                       {\n                         Typedtree.ld_id;\n                         ld_name = fname;\n                         ld_type = {ctyp_type};\n                         ld_attributes;\n                       }\n                     ->\n                       let fstamp = Ident.binding_time ld_id in\n                       {\n                         stamp = fstamp;\n                         fname;\n                         typ = ctyp_type;\n                         optional =\n                           Res_parsetree_viewer.has_optional_attribute\n                             ld_attributes;\n                         docstring = attrsToDocstring ld_attributes;\n                         deprecated =\n                           ProcessAttributes.findDeprecatedAttribute\n                             ld_attributes;\n                       })));\n        }\n      ~name ~stamp ~env typ_attributes\n      (Exported.add exported Exported.Type)\n      Stamps.addType\n  in\n  {\n    Module.kind = Module.Type (declared.item, recStatus);\n    name = declared.name.txt;\n    docstring = declared.docstring;\n    deprecated = declared.deprecated;\n    loc = declared.extentLoc;\n  }\n\nlet rec forSignatureItem ~env ~(exported : Exported.t)\n    (item : Typedtree.signature_item) =\n  match item.sig_desc with\n  | Tsig_value {val_id; val_loc; val_name = name; val_desc; val_attributes} ->\n    let declared =\n      addDeclared ~name\n        ~stamp:(Ident.binding_time val_id)\n        ~extent:val_loc ~item:val_desc.ctyp_type ~env val_attributes\n        (Exported.add exported Exported.Value)\n        Stamps.addValue\n    in\n    [\n      {\n        Module.kind = Module.Value declared.item;\n        name = declared.name.txt;\n        docstring = declared.docstring;\n        deprecated = declared.deprecated;\n        loc = declared.extentLoc;\n      };\n    ]\n  | Tsig_type (recFlag, decls) ->\n    decls\n    |> List.mapi (fun i decl ->\n           let recStatus =\n             match recFlag with\n             | Recursive when i = 0 -> Types.Trec_first\n             | Nonrecursive when i = 0 -> Types.Trec_not\n             | _ -> Types.Trec_next\n           in\n           decl |> forTypeDeclaration ~env ~exported ~recStatus)\n  | Tsig_module\n      {md_id; md_attributes; md_loc; md_name = name; md_type = {mty_type}} ->\n    let item =\n      forTypeModule ~name:name.txt\n        ~env:(env |> Env.addModule ~name:name.txt)\n        mty_type\n    in\n    let declared =\n      addDeclared ~item ~name ~extent:md_loc ~stamp:(Ident.binding_time md_id)\n        ~env md_attributes\n        (Exported.add exported Exported.Module)\n        Stamps.addModule\n    in\n    [\n      {\n        Module.kind =\n          Module {type_ = declared.item; isModuleType = isModuleType declared};\n        name = declared.name.txt;\n        docstring = declared.docstring;\n        deprecated = declared.deprecated;\n        loc = declared.extentLoc;\n      };\n    ]\n  | Tsig_recmodule modDecls ->\n    modDecls\n    |> List.map (fun modDecl ->\n           forSignatureItem ~env ~exported\n             {item with sig_desc = Tsig_module modDecl})\n    |> List.flatten\n  | Tsig_include {incl_mod; incl_type} ->\n    let env =\n      match getModuleTypePath incl_mod.mty_desc with\n      | None -> env\n      | Some path ->\n        {env with modulePath = IncludedModule (path, env.modulePath)}\n    in\n    let topLevel =\n      List.fold_right\n        (fun item items -> forTypeSignatureItem ~env ~exported item @ items)\n        incl_type []\n    in\n    topLevel\n  (* TODO: process other things here *)\n  | _ -> []\n\nlet forSignature ~name ~env sigItems =\n  let exported = Exported.init () in\n  let items =\n    sigItems |> List.map (forSignatureItem ~env ~exported) |> List.flatten\n  in\n  let attributes =\n    match sigItems with\n    | {sig_desc = Tsig_attribute attribute} :: _ -> [attribute]\n    | _ -> []\n  in\n  let docstring = attrsToDocstring attributes in\n  let deprecated = ProcessAttributes.findDeprecatedAttribute attributes in\n  {Module.name; docstring; exported; items; deprecated}\n\nlet forTreeModuleType ~name ~env {Typedtree.mty_desc} =\n  match mty_desc with\n  | Tmty_ident _ -> None\n  | Tmty_signature {sig_items} ->\n    let contents = forSignature ~name ~env sig_items in\n    Some (Module.Structure contents)\n  | _ -> None\n\nlet rec getModulePath mod_desc =\n  match mod_desc with\n  | Typedtree.Tmod_ident (path, _lident) -> Some path\n  | Tmod_structure _ -> None\n  | Tmod_functor (_ident, _argName, _maybeType, _resultExpr) -> None\n  | Tmod_apply (functor_, _arg, _coercion) -> getModulePath functor_.mod_desc\n  | Tmod_unpack (_expr, _moduleType) -> None\n  | Tmod_constraint (expr, _typ, _constraint, _coercion) ->\n    getModulePath expr.mod_desc\n\nlet rec forStructureItem ~env ~(exported : Exported.t) item =\n  match item.Typedtree.str_desc with\n  | Tstr_value (_isRec, bindings) ->\n    let items = ref [] in\n    let rec handlePattern attributes pat =\n      match pat.Typedtree.pat_desc with\n      | Tpat_var (ident, name)\n      | Tpat_alias (_, ident, name) (* let x : t = ... *) ->\n        let item = pat.pat_type in\n        let declared =\n          addDeclared ~name ~stamp:(Ident.binding_time ident) ~env\n            ~extent:pat.pat_loc ~item attributes\n            (Exported.add exported Exported.Value)\n            Stamps.addValue\n        in\n        items :=\n          {\n            Module.kind = Module.Value declared.item;\n            name = declared.name.txt;\n            docstring = declared.docstring;\n            deprecated = declared.deprecated;\n            loc = declared.extentLoc;\n          }\n          :: !items\n      | Tpat_tuple pats | Tpat_array pats | Tpat_construct (_, _, pats) ->\n        pats |> List.iter (fun p -> handlePattern [] p)\n      | Tpat_or (p, _, _) -> handlePattern [] p\n      | Tpat_record (items, _) ->\n        items |> List.iter (fun (_, _, p) -> handlePattern [] p)\n      | Tpat_lazy p -> handlePattern [] p\n      | Tpat_variant (_, Some p, _) -> handlePattern [] p\n      | Tpat_variant (_, None, _) | Tpat_any | Tpat_constant _ -> ()\n    in\n    List.iter\n      (fun {Typedtree.vb_pat; vb_attributes} ->\n        handlePattern vb_attributes vb_pat)\n      bindings;\n    !items\n  | Tstr_module\n      {mb_id; mb_attributes; mb_loc; mb_name = name; mb_expr = {mod_desc}}\n    when not\n           (String.length name.txt >= 6\n           && (String.sub name.txt 0 6 = \"local_\") [@doesNotRaise])\n         (* %%private generates a dummy module called local_... *) ->\n    let item = forModule ~env mod_desc name.txt in\n    let declared =\n      addDeclared ~item ~name ~extent:mb_loc ~stamp:(Ident.binding_time mb_id)\n        ~env mb_attributes\n        (Exported.add exported Exported.Module)\n        Stamps.addModule\n    in\n    [\n      {\n        Module.kind =\n          Module {type_ = declared.item; isModuleType = isModuleType declared};\n        name = declared.name.txt;\n        docstring = declared.docstring;\n        deprecated = declared.deprecated;\n        loc = declared.extentLoc;\n      };\n    ]\n  | Tstr_recmodule modDecls ->\n    modDecls\n    |> List.map (fun modDecl ->\n           forStructureItem ~env ~exported\n             {item with str_desc = Tstr_module modDecl})\n    |> List.flatten\n  | Tstr_modtype\n      {\n        mtd_name = name;\n        mtd_id;\n        mtd_attributes;\n        mtd_type = Some {mty_type = modType};\n        mtd_loc;\n      } ->\n    let env = env |> Env.addModuleType ~name:name.txt in\n    let modTypeItem = forTypeModule ~name:name.txt ~env modType in\n    let declared =\n      addDeclared ~item:modTypeItem ~name ~extent:mtd_loc\n        ~stamp:(Ident.binding_time mtd_id)\n        ~env mtd_attributes\n        (Exported.add exported Exported.Module)\n        Stamps.addModule\n    in\n    [\n      {\n        Module.kind =\n          Module {type_ = declared.item; isModuleType = isModuleType declared};\n        name = declared.name.txt;\n        docstring = declared.docstring;\n        deprecated = declared.deprecated;\n        loc = declared.extentLoc;\n      };\n    ]\n  | Tstr_include {incl_mod; incl_type} ->\n    let env =\n      match getModulePath incl_mod.mod_desc with\n      | None -> env\n      | Some path ->\n        {env with modulePath = IncludedModule (path, env.modulePath)}\n    in\n    let topLevel =\n      List.fold_right\n        (fun item items -> forTypeSignatureItem ~env ~exported item @ items)\n        incl_type []\n    in\n    topLevel\n  | Tstr_primitive vd when JsxHacks.primitiveIsFragment vd = false ->\n    let declared =\n      addDeclared ~extent:vd.val_loc ~item:vd.val_val.val_type ~name:vd.val_name\n        ~stamp:(Ident.binding_time vd.val_id)\n        ~env vd.val_attributes\n        (Exported.add exported Exported.Value)\n        Stamps.addValue\n    in\n    [\n      {\n        Module.kind = Value declared.item;\n        name = declared.name.txt;\n        docstring = declared.docstring;\n        deprecated = declared.deprecated;\n        loc = declared.extentLoc;\n      };\n    ]\n  | Tstr_type (recFlag, decls) ->\n    decls\n    |> List.mapi (fun i decl ->\n           let recStatus =\n             match recFlag with\n             | Recursive when i = 0 -> Types.Trec_first\n             | Nonrecursive when i = 0 -> Types.Trec_not\n             | _ -> Types.Trec_next\n           in\n           decl |> forTypeDeclaration ~env ~exported ~recStatus)\n  | _ -> []\n\nand forModule ~env mod_desc moduleName =\n  match mod_desc with\n  | Tmod_ident (path, _lident) -> Ident path\n  | Tmod_structure structure ->\n    let env = env |> Env.addModule ~name:moduleName in\n    let contents = forStructure ~name:moduleName ~env structure.str_items in\n    Structure contents\n  | Tmod_functor (ident, argName, maybeType, resultExpr) ->\n    (match maybeType with\n    | None -> ()\n    | Some t -> (\n      match forTreeModuleType ~name:argName.txt ~env t with\n      | None -> ()\n      | Some kind ->\n        let stamp = Ident.binding_time ident in\n        let declared =\n          ProcessAttributes.newDeclared ~item:kind ~name:argName\n            ~extent:t.Typedtree.mty_loc ~stamp ~modulePath:NotVisible false []\n        in\n        Stamps.addModule env.stamps stamp declared));\n    forModule ~env resultExpr.mod_desc moduleName\n  | Tmod_apply (functor_, _arg, _coercion) ->\n    forModule ~env functor_.mod_desc moduleName\n  | Tmod_unpack (_expr, moduleType) ->\n    let env = env |> Env.addModule ~name:moduleName in\n    forTypeModule ~name:moduleName ~env moduleType\n  | Tmod_constraint (expr, typ, _constraint, _coercion) ->\n    (* TODO do this better I think *)\n    let modKind = forModule ~env expr.mod_desc moduleName in\n    let env = env |> Env.addModule ~name:moduleName in\n    let modTypeKind = forTypeModule ~name:moduleName ~env typ in\n    Constraint (modKind, modTypeKind)\n\nand forStructure ~name ~env strItems =\n  let exported = Exported.init () in\n  let items =\n    List.fold_right\n      (fun item results -> forStructureItem ~env ~exported item @ results)\n      strItems []\n  in\n  let attributes =\n    strItems\n    |> List.filter_map (fun (struc : Typedtree.structure_item) ->\n           match struc with\n           | {str_desc = Tstr_attribute attr} -> Some attr\n           | _ -> None)\n  in\n  let docstring = attrsToDocstring attributes in\n  let deprecated = ProcessAttributes.findDeprecatedAttribute attributes in\n  {Module.name; docstring; exported; items; deprecated}\n\nlet fileForCmtInfos ~moduleName ~uri\n    ({cmt_modname; cmt_annots} : Cmt_format.cmt_infos) =\n  let env =\n    {Env.stamps = Stamps.init (); modulePath = File (uri, moduleName)}\n  in\n  match cmt_annots with\n  | Partial_implementation parts ->\n    let items =\n      parts |> Array.to_list\n      |> Utils.filterMap (fun p ->\n             match (p : Cmt_format.binary_part) with\n             | Partial_structure str -> Some str.str_items\n             | Partial_structure_item str -> Some [str]\n             | _ -> None)\n      |> List.concat\n    in\n    let structure = forStructure ~name:moduleName ~env items in\n    {File.uri; moduleName = cmt_modname; stamps = env.stamps; structure}\n  | Partial_interface parts ->\n    let items =\n      parts |> Array.to_list\n      |> Utils.filterMap (fun (p : Cmt_format.binary_part) ->\n             match p with\n             | Partial_signature str -> Some str.sig_items\n             | Partial_signature_item str -> Some [str]\n             | _ -> None)\n      |> List.concat\n    in\n    let structure = forSignature ~name:moduleName ~env items in\n    {uri; moduleName = cmt_modname; stamps = env.stamps; structure}\n  | Implementation structure ->\n    let structure = forStructure ~name:moduleName ~env structure.str_items in\n    {uri; moduleName = cmt_modname; stamps = env.stamps; structure}\n  | Interface signature ->\n    let structure = forSignature ~name:moduleName ~env signature.sig_items in\n    {uri; moduleName = cmt_modname; stamps = env.stamps; structure}\n  | _ -> File.create moduleName uri\n\nlet fileForCmt ~moduleName ~cmt ~uri =\n  match Hashtbl.find_opt state.cmtCache cmt with\n  | Some file -> Some file\n  | None -> (\n    match Shared.tryReadCmt cmt with\n    | None -> None\n    | Some infos ->\n      let file = fileForCmtInfos ~moduleName ~uri infos in\n      Hashtbl.replace state.cmtCache cmt file;\n      Some file)\n\nlet fileForModule moduleName ~package =\n  match Hashtbl.find_opt package.pathsForModule moduleName with\n  | Some paths ->\n    let uri = getUri paths in\n    let cmt = getCmtPath ~uri paths in\n    Log.log (\"fileForModule \" ^ showPaths paths);\n    fileForCmt ~cmt ~moduleName ~uri\n  | None ->\n    Log.log (\"No path for module \" ^ moduleName);\n    None\n"
  },
  {
    "path": "analysis/src/ProcessExtra.ml",
    "content": "open SharedTypes\n\nlet addLocItem extra loc locType =\n  if not loc.Warnings.loc_ghost then\n    extra.locItems <- {loc; locType} :: extra.locItems\n\nlet addReference ~extra stamp loc =\n  Hashtbl.replace extra.internalReferences stamp\n    (loc\n    ::\n    (if Hashtbl.mem extra.internalReferences stamp then\n       Hashtbl.find extra.internalReferences stamp\n     else []))\n\nlet extraForFile ~(file : File.t) =\n  let extra = initExtra () in\n  file.stamps\n  |> Stamps.iterModules (fun stamp (d : Module.t Declared.t) ->\n         addLocItem extra d.name.loc (LModule (Definition (stamp, Module)));\n         addReference ~extra stamp d.name.loc);\n  file.stamps\n  |> Stamps.iterValues (fun stamp (d : Types.type_expr Declared.t) ->\n         addLocItem extra d.name.loc\n           (Typed (d.name.txt, d.item, Definition (stamp, Value)));\n         addReference ~extra stamp d.name.loc);\n  file.stamps\n  |> Stamps.iterTypes (fun stamp (d : Type.t Declared.t) ->\n         addLocItem extra d.name.loc\n           (TypeDefinition (d.name.txt, d.item.Type.decl, stamp));\n         addReference ~extra stamp d.name.loc;\n         match d.item.Type.kind with\n         | Record labels ->\n           labels\n           |> List.iter (fun {stamp; fname; typ} ->\n                  addReference ~extra stamp fname.loc;\n                  addLocItem extra fname.loc\n                    (Typed\n                       (d.name.txt, typ, Definition (d.stamp, Field fname.txt))))\n         | Variant constructors ->\n           constructors\n           |> List.iter (fun {Constructor.stamp; cname} ->\n                  addReference ~extra stamp cname.loc;\n                  let t =\n                    {\n                      Types.id = 0;\n                      level = 0;\n                      desc =\n                        Tconstr\n                          ( Path.Pident\n                              {Ident.stamp; name = d.name.txt; flags = 0},\n                            [],\n                            ref Types.Mnil );\n                    }\n                  in\n                  addLocItem extra cname.loc\n                    (Typed\n                       ( d.name.txt,\n                         t,\n                         Definition (d.stamp, Constructor cname.txt) )))\n         | _ -> ());\n  extra\n\nlet addExternalReference ~extra moduleName path tip loc =\n  (* TODO need to follow the path, and be able to load the files to follow module references... *)\n  Hashtbl.replace extra.externalReferences moduleName\n    ((path, tip, loc)\n    ::\n    (if Hashtbl.mem extra.externalReferences moduleName then\n       Hashtbl.find extra.externalReferences moduleName\n     else []))\n\nlet addFileReference ~extra moduleName loc =\n  let newLocs =\n    match Hashtbl.find_opt extra.fileReferences moduleName with\n    | Some oldLocs -> LocationSet.add loc oldLocs\n    | None -> LocationSet.singleton loc\n  in\n  Hashtbl.replace extra.fileReferences moduleName newLocs\n\nlet handleConstructor txt =\n  match txt with\n  | Longident.Lident name -> name\n  | Ldot (_left, name) -> name\n  | Lapply (_, _) -> assert false\n\nlet rec lidIsComplex (lid : Longident.t) =\n  match lid with\n  | Lapply _ -> true\n  | Ldot (lid, _) -> lidIsComplex lid\n  | _ -> false\n\nlet extraForStructureItems ~(iterator : Tast_iterator.iterator)\n    (items : Typedtree.structure_item list) =\n  items |> List.iter (iterator.structure_item iterator)\n\nlet extraForSignatureItems ~(iterator : Tast_iterator.iterator)\n    (items : Typedtree.signature_item list) =\n  items |> List.iter (iterator.signature_item iterator)\n\nlet extraForCmt ~(iterator : Tast_iterator.iterator)\n    ({cmt_annots} : Cmt_format.cmt_infos) =\n  let extraForParts parts =\n    parts\n    |> Array.iter (fun part ->\n           match part with\n           | Cmt_format.Partial_signature str -> iterator.signature iterator str\n           | Partial_signature_item str -> iterator.signature_item iterator str\n           | Partial_expression expression -> iterator.expr iterator expression\n           | Partial_pattern pattern -> iterator.pat iterator pattern\n           | Partial_class_expr _ -> ()\n           | Partial_module_type module_type ->\n             iterator.module_type iterator module_type\n           | Partial_structure _ | Partial_structure_item _ -> ())\n  in\n  match cmt_annots with\n  | Implementation structure ->\n    extraForStructureItems ~iterator structure.str_items\n  | Partial_implementation parts ->\n    let items =\n      parts |> Array.to_list\n      |> Utils.filterMap (fun (p : Cmt_format.binary_part) ->\n             match p with\n             | Partial_structure str -> Some str.str_items\n             | Partial_structure_item str -> Some [str]\n             (* | Partial_expression(exp) => Some([ str]) *)\n             | _ -> None)\n      |> List.concat\n    in\n    extraForStructureItems ~iterator items;\n    extraForParts parts\n  | Interface signature -> extraForSignatureItems ~iterator signature.sig_items\n  | Partial_interface parts ->\n    let items =\n      parts |> Array.to_list\n      |> Utils.filterMap (fun (p : Cmt_format.binary_part) ->\n             match p with\n             | Partial_signature s -> Some s.sig_items\n             | Partial_signature_item str -> Some [str]\n             | _ -> None)\n      |> List.concat\n    in\n    extraForSignatureItems ~iterator items;\n    extraForParts parts\n  | _ -> extraForStructureItems ~iterator []\n\nlet addForPath ~env ~extra path lident loc typ tip =\n  let identName = Longident.last lident in\n  let identLoc = Utils.endOfLocation loc (String.length identName) in\n  let locType =\n    match ResolvePath.fromCompilerPath ~env path with\n    | Stamp stamp ->\n      addReference ~extra stamp identLoc;\n      LocalReference (stamp, tip)\n    | NotFound -> NotFound\n    | Global (moduleName, path) ->\n      addExternalReference ~extra moduleName path tip identLoc;\n      GlobalReference (moduleName, path, tip)\n    | Exported (env, name) -> (\n      match\n        match tip with\n        | Type -> Exported.find env.exported Exported.Type name\n        | _ -> Exported.find env.exported Exported.Value name\n      with\n      | Some stamp ->\n        addReference ~extra stamp identLoc;\n        LocalReference (stamp, tip)\n      | None -> NotFound)\n    | GlobalMod _ -> NotFound\n  in\n  addLocItem extra loc (Typed (identName, typ, locType))\n\nlet addForPathParent ~env ~extra path loc =\n  let locType =\n    match ResolvePath.fromCompilerPath ~env path with\n    | GlobalMod moduleName ->\n      addFileReference ~extra moduleName loc;\n      TopLevelModule moduleName\n    | Stamp stamp ->\n      addReference ~extra stamp loc;\n      LModule (LocalReference (stamp, Module))\n    | NotFound -> LModule NotFound\n    | Global (moduleName, path) ->\n      addExternalReference ~extra moduleName path Module loc;\n      LModule (GlobalReference (moduleName, path, Module))\n    | Exported (env, name) -> (\n      match Exported.find env.exported Exported.Module name with\n      | Some stamp ->\n        addReference ~extra stamp loc;\n        LModule (LocalReference (stamp, Module))\n      | None -> LModule NotFound)\n  in\n  addLocItem extra loc locType\n\nlet getTypeAtPath ~env path =\n  match ResolvePath.fromCompilerPath ~env path with\n  | GlobalMod _ -> `Not_found\n  | Global (moduleName, path) -> `Global (moduleName, path)\n  | NotFound -> `Not_found\n  | Exported (env, name) -> (\n    match Exported.find env.exported Exported.Type name with\n    | None -> `Not_found\n    | Some stamp -> (\n      let declaredType = Stamps.findType env.file.stamps stamp in\n      match declaredType with\n      | Some declaredType -> `Local declaredType\n      | None -> `Not_found))\n  | Stamp stamp -> (\n    let declaredType = Stamps.findType env.file.stamps stamp in\n    match declaredType with\n    | Some declaredType -> `Local declaredType\n    | None -> `Not_found)\n\nlet addForField ~env ~extra ~recordType ~fieldType {Asttypes.txt; loc} =\n  match (Shared.dig recordType).desc with\n  | Tconstr (path, _args, _memo) ->\n    let t = getTypeAtPath ~env path in\n    let name = handleConstructor txt in\n    let nameLoc = Utils.endOfLocation loc (String.length name) in\n    let locType =\n      match t with\n      | `Local {stamp; item = {kind = Record fields}} -> (\n        match fields |> List.find_opt (fun f -> f.fname.txt = name) with\n        | Some {stamp = astamp} ->\n          addReference ~extra astamp nameLoc;\n          LocalReference (stamp, Field name)\n        | None -> NotFound)\n      | `Global (moduleName, path) ->\n        addExternalReference ~extra moduleName path (Field name) nameLoc;\n        GlobalReference (moduleName, path, Field name)\n      | _ -> NotFound\n    in\n    addLocItem extra nameLoc (Typed (name, fieldType, locType))\n  | _ -> ()\n\nlet addForRecord ~env ~extra ~recordType items =\n  match (Shared.dig recordType).desc with\n  | Tconstr (path, _args, _memo) ->\n    let t = getTypeAtPath ~env path in\n    items\n    |> List.iter (fun ({Asttypes.txt; loc}, _, _) ->\n           (* let name = Longident.last(txt); *)\n           let name = handleConstructor txt in\n           let nameLoc = Utils.endOfLocation loc (String.length name) in\n           let locType =\n             match t with\n             | `Local {stamp; item = {kind = Record fields}} -> (\n               match fields |> List.find_opt (fun f -> f.fname.txt = name) with\n               | Some {stamp = astamp} ->\n                 addReference ~extra astamp nameLoc;\n                 LocalReference (stamp, Field name)\n               | None -> NotFound)\n             | `Global (moduleName, path) ->\n               addExternalReference ~extra moduleName path (Field name) nameLoc;\n               GlobalReference (moduleName, path, Field name)\n             | _ -> NotFound\n           in\n           addLocItem extra nameLoc (Typed (name, recordType, locType)))\n  | _ -> ()\n\nlet addForConstructor ~env ~extra constructorType {Asttypes.txt; loc}\n    {Types.cstr_name} =\n  match (Shared.dig constructorType).desc with\n  | Tconstr (path, _args, _memo) ->\n    let name = handleConstructor txt in\n    let nameLoc = Utils.endOfLocation loc (String.length name) in\n    let t = getTypeAtPath ~env path in\n    let locType =\n      match t with\n      | `Local {stamp; item = {kind = Variant constructors}} -> (\n        match\n          constructors\n          |> List.find_opt (fun c -> c.Constructor.cname.txt = cstr_name)\n        with\n        | Some {stamp = cstamp} ->\n          addReference ~extra cstamp nameLoc;\n          LocalReference (stamp, Constructor name)\n        | None -> NotFound)\n      | `Global (moduleName, path) ->\n        addExternalReference ~extra moduleName path (Constructor name) nameLoc;\n        GlobalReference (moduleName, path, Constructor name)\n      | _ -> NotFound\n    in\n    addLocItem extra nameLoc (Typed (name, constructorType, locType))\n  | _ -> ()\n\nlet rec addForLongident ~env ~extra top (path : Path.t) (txt : Longident.t) loc\n    =\n  if (not loc.Location.loc_ghost) && not (lidIsComplex txt) then (\n    let idLength = String.length (String.concat \".\" (Longident.flatten txt)) in\n    let reportedLength = loc.loc_end.pos_cnum - loc.loc_start.pos_cnum in\n    let isPpx = idLength <> reportedLength in\n    if isPpx then\n      match top with\n      | Some (t, tip) -> addForPath ~env ~extra path txt loc t tip\n      | None -> addForPathParent ~env ~extra path loc\n    else\n      let l = Utils.endOfLocation loc (String.length (Longident.last txt)) in\n      (match top with\n      | Some (t, tip) -> addForPath ~env ~extra path txt l t tip\n      | None -> addForPathParent ~env ~extra path l);\n      match (path, txt) with\n      | Pdot (pinner, _pname, _), Ldot (inner, name) ->\n        addForLongident ~env ~extra None pinner inner\n          (Utils.chopLocationEnd loc (String.length name + 1))\n      | Pident _, Lident _ -> ()\n      | _ -> ())\n\nlet rec handle_module_expr ~env ~extra expr =\n  match expr with\n  | Typedtree.Tmod_constraint (expr, _, _, _) ->\n    handle_module_expr ~env ~extra expr.mod_desc\n  | Tmod_ident (path, {txt; loc}) ->\n    if not (lidIsComplex txt) then\n      Log.log (\"Ident!! \" ^ String.concat \".\" (Longident.flatten txt));\n    addForLongident ~env ~extra None path txt loc\n  | Tmod_functor (_ident, _argName, _maybeType, resultExpr) ->\n    handle_module_expr ~env ~extra resultExpr.mod_desc\n  | Tmod_apply (obj, arg, _) ->\n    handle_module_expr ~env ~extra obj.mod_desc;\n    handle_module_expr ~env ~extra arg.mod_desc\n  | _ -> ()\n\nlet structure_item ~env ~extra (iter : Tast_iterator.iterator) item =\n  (match item.Typedtree.str_desc with\n  | Tstr_include {incl_mod = expr} ->\n    handle_module_expr ~env ~extra expr.mod_desc\n  | Tstr_module {mb_expr} -> handle_module_expr ~env ~extra mb_expr.mod_desc\n  | Tstr_open {open_path; open_txt = {txt; loc}} ->\n    (* Log.log(\"Have an open here\"); *)\n    addForLongident ~env ~extra None open_path txt loc\n  | _ -> ());\n  Tast_iterator.default_iterator.structure_item iter item\n\nlet signature_item ~(file : File.t) ~extra (iter : Tast_iterator.iterator) item\n    =\n  (match item.Typedtree.sig_desc with\n  | Tsig_value {val_id; val_loc; val_name = name; val_desc; val_attributes} ->\n    let stamp = Ident.binding_time val_id in\n    if Stamps.findValue file.stamps stamp = None then (\n      let declared =\n        ProcessAttributes.newDeclared ~name ~stamp ~extent:val_loc\n          ~modulePath:NotVisible ~item:val_desc.ctyp_type false val_attributes\n      in\n      Stamps.addValue file.stamps stamp declared;\n      addReference ~extra stamp name.loc;\n      addLocItem extra name.loc\n        (Typed (name.txt, val_desc.ctyp_type, Definition (stamp, Value))))\n  | _ -> ());\n  Tast_iterator.default_iterator.signature_item iter item\n\nlet typ ~env ~extra (iter : Tast_iterator.iterator) (item : Typedtree.core_type)\n    =\n  (match item.ctyp_desc with\n  | Ttyp_constr (path, {txt; loc}, _args) ->\n    addForLongident ~env ~extra (Some (item.ctyp_type, Type)) path txt loc\n  | _ -> ());\n  Tast_iterator.default_iterator.typ iter item\n\nlet pat ~(file : File.t) ~env ~extra (iter : Tast_iterator.iterator)\n    (pattern : Typedtree.pattern) =\n  let addForPattern stamp name =\n    if Stamps.findValue file.stamps stamp = None then (\n      let declared =\n        ProcessAttributes.newDeclared ~name ~stamp ~modulePath:NotVisible\n          ~extent:pattern.pat_loc ~item:pattern.pat_type false\n          pattern.pat_attributes\n      in\n      Stamps.addValue file.stamps stamp declared;\n      addReference ~extra stamp name.loc;\n      addLocItem extra name.loc\n        (Typed (name.txt, pattern.pat_type, Definition (stamp, Value))))\n  in\n  (* Log.log(\"Entering pattern \" ++ Utils.showLocation(pat_loc)); *)\n  (match pattern.pat_desc with\n  | Tpat_record (items, _) ->\n    addForRecord ~env ~extra ~recordType:pattern.pat_type items\n  | Tpat_construct (lident, constructor, _) ->\n    addForConstructor ~env ~extra pattern.pat_type lident constructor\n  | Tpat_alias (_inner, ident, name) ->\n    let stamp = Ident.binding_time ident in\n    addForPattern stamp name\n  | Tpat_var (ident, name) ->\n    (* Log.log(\"Pattern \" ++ name.txt); *)\n    let stamp = Ident.binding_time ident in\n    addForPattern stamp name\n  | _ -> ());\n  Tast_iterator.default_iterator.pat iter pattern\n\nlet expr ~env ~(extra : extra) (iter : Tast_iterator.iterator)\n    (expression : Typedtree.expression) =\n  (match expression.exp_desc with\n  | Texp_ident (path, {txt; loc}, _) when not (JsxHacks.pathIsFragment path) ->\n    addForLongident ~env ~extra (Some (expression.exp_type, Value)) path txt loc\n  | Texp_record {fields} ->\n    addForRecord ~env ~extra ~recordType:expression.exp_type\n      (fields |> Array.to_list\n      |> Utils.filterMap (fun (desc, item) ->\n             match item with\n             | Typedtree.Overridden (loc, _) -> Some (loc, desc, ())\n             | _ -> None))\n  | Texp_constant constant ->\n    addLocItem extra expression.exp_loc (Constant constant)\n  (* Skip unit and list literals *)\n  | Texp_construct ({txt = Lident (\"()\" | \"::\"); loc}, _, _args)\n    when loc.loc_end.pos_cnum - loc.loc_start.pos_cnum <> 2 ->\n    ()\n  | Texp_construct (lident, constructor, _args) ->\n    addForConstructor ~env ~extra expression.exp_type lident constructor\n  | Texp_field (inner, lident, _label_description) ->\n    addForField ~env ~extra ~recordType:inner.exp_type\n      ~fieldType:expression.exp_type lident\n  | _ -> ());\n  Tast_iterator.default_iterator.expr iter expression\n\nlet getExtra ~file ~infos =\n  let extra = extraForFile ~file in\n  let env = QueryEnv.fromFile file in\n  let iterator =\n    {\n      Tast_iterator.default_iterator with\n      expr = expr ~env ~extra;\n      pat = pat ~env ~extra ~file;\n      signature_item = signature_item ~file ~extra;\n      structure_item = structure_item ~env ~extra;\n      typ = typ ~env ~extra;\n    }\n  in\n  extraForCmt ~iterator infos;\n  extra\n"
  },
  {
    "path": "analysis/src/Protocol.ml",
    "content": "type position = {line: int; character: int}\ntype range = {start: position; end_: position}\ntype markupContent = {kind: string; value: string}\n\n(* https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#command *)\ntype command = {title: string; command: string}\n\n(* https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#codeLens *)\ntype codeLens = {range: range; command: command option}\n\ntype inlayHint = {\n  position: position;\n  label: string;\n  kind: int;\n  paddingLeft: bool;\n  paddingRight: bool;\n}\n\n(* https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#parameterInformation *)\ntype parameterInformation = {label: int * int; documentation: markupContent}\n\n(* https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#signatureInformation *)\ntype signatureInformation = {\n  label: string;\n  parameters: parameterInformation list;\n  documentation: markupContent option;\n}\n\n(* https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#signatureHelp *)\ntype signatureHelp = {\n  signatures: signatureInformation list;\n  activeSignature: int option;\n  activeParameter: int option;\n}\n\n(* https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#insertTextFormat *)\ntype insertTextFormat = Snippet\n\nlet insertTextFormatToInt f =\n  match f with\n  | Snippet -> 2\n\ntype textEdit = {range: range; newText: string}\n\ntype completionItem = {\n  label: string;\n  kind: int;\n  tags: int list;\n  detail: string;\n  sortText: string option;\n  filterText: string option;\n  insertTextFormat: insertTextFormat option;\n  insertText: string option;\n  documentation: markupContent option;\n  data: (string * string) list option;\n  additionalTextEdits: textEdit list option;\n}\n\ntype location = {uri: string; range: range}\ntype documentSymbolItem = {\n  name: string;\n  kind: int;\n  range: range;\n  children: documentSymbolItem list;\n}\ntype renameFile = {oldUri: string; newUri: string}\ntype diagnostic = {range: range; message: string; severity: int}\n\ntype optionalVersionedTextDocumentIdentifier = {\n  version: int option;\n  uri: string;\n}\n\ntype textDocumentEdit = {\n  textDocument: optionalVersionedTextDocumentIdentifier;\n  edits: textEdit list;\n}\n\ntype createFileOptions = {overwrite: bool option; ignoreIfExists: bool option}\ntype createFile = {uri: string; options: createFileOptions option}\n\ntype documentChange =\n  | TextDocumentEdit of textDocumentEdit\n  | CreateFile of createFile\n\ntype codeActionEdit = {documentChanges: documentChange list}\ntype codeActionKind = RefactorRewrite\n\ntype codeAction = {\n  title: string;\n  codeActionKind: codeActionKind;\n  edit: codeActionEdit;\n}\n\nlet wrapInQuotes s = \"\\\"\" ^ Json.escape s ^ \"\\\"\"\n\nlet null = \"null\"\nlet array l = \"[\" ^ String.concat \", \" l ^ \"]\"\n\nlet stringifyPosition p =\n  Printf.sprintf {|{\"line\": %i, \"character\": %i}|} p.line p.character\n\nlet stringifyRange r =\n  Printf.sprintf {|{\"start\": %s, \"end\": %s}|}\n    (stringifyPosition r.start)\n    (stringifyPosition r.end_)\n\nlet stringifyTextEdit (te : textEdit) =\n  Printf.sprintf\n    {|{\n      \"range\": %s,\n      \"newText\": %s\n      }|}\n    (stringifyRange te.range) (wrapInQuotes te.newText)\n\nlet stringifyMarkupContent (m : markupContent) =\n  Printf.sprintf {|{\"kind\": %s, \"value\": %s}|} (wrapInQuotes m.kind)\n    (wrapInQuotes m.value)\n\n(** None values are not emitted in the output. *)\nlet stringifyObject ?(startOnNewline = false) ?(indentation = 1) properties =\n  let indentationStr = String.make (indentation * 2) ' ' in\n  (if startOnNewline then \"\\n\" ^ indentationStr else \"\")\n  ^ {|{\n|}\n  ^ (properties\n    |> List.filter_map (fun (key, value) ->\n           match value with\n           | None -> None\n           | Some v ->\n             Some (Printf.sprintf {|%s  \"%s\": %s|} indentationStr key v))\n    |> String.concat \",\\n\")\n  ^ \"\\n\" ^ indentationStr ^ \"}\"\n\nlet optWrapInQuotes s =\n  match s with\n  | None -> None\n  | Some s -> Some (wrapInQuotes s)\n\nlet stringifyCompletionItem c =\n  stringifyObject\n    [\n      (\"label\", Some (wrapInQuotes c.label));\n      (\"kind\", Some (string_of_int c.kind));\n      (\"tags\", Some (c.tags |> List.map string_of_int |> array));\n      (\"detail\", Some (wrapInQuotes c.detail));\n      ( \"documentation\",\n        Some\n          (match c.documentation with\n          | None -> null\n          | Some doc -> stringifyMarkupContent doc) );\n      (\"sortText\", optWrapInQuotes c.sortText);\n      (\"filterText\", optWrapInQuotes c.filterText);\n      (\"insertText\", optWrapInQuotes c.insertText);\n      ( \"insertTextFormat\",\n        match c.insertTextFormat with\n        | None -> None\n        | Some insertTextFormat ->\n          Some (Printf.sprintf \"%i\" (insertTextFormatToInt insertTextFormat)) );\n      ( \"data\",\n        match c.data with\n        | None -> None\n        | Some fields ->\n          Some\n            (fields\n            |> List.map (fun (key, value) -> (key, Some (wrapInQuotes value)))\n            |> stringifyObject ~indentation:2) );\n      ( \"additionalTextEdits\",\n        match c.additionalTextEdits with\n        | Some additionalTextEdits ->\n          Some (additionalTextEdits |> List.map stringifyTextEdit |> array)\n        | None -> None );\n    ]\n\nlet stringifyHover value =\n  Printf.sprintf {|{\"contents\": %s}|}\n    (stringifyMarkupContent {kind = \"markdown\"; value})\n\nlet stringifyLocation (h : location) =\n  Printf.sprintf {|{\"uri\": %s, \"range\": %s}|} (wrapInQuotes h.uri)\n    (stringifyRange h.range)\n\nlet stringifyDocumentSymbolItems items =\n  let buf = Buffer.create 10 in\n  let stringifyName name = Printf.sprintf \"\\\"%s\\\"\" (Json.escape name) in\n  let stringifyKind kind = string_of_int kind in\n  let emitStr = Buffer.add_string buf in\n  let emitSep () = emitStr \",\\n\" in\n  let rec emitItem ~indent item =\n    let openBrace = Printf.sprintf \"%s{\\n\" indent in\n    let closeBrace = Printf.sprintf \"\\n%s}\" indent in\n    let indent = indent ^ \"  \" in\n    let emitField name s =\n      emitStr (Printf.sprintf \"%s\\\"%s\\\": %s\" indent name s)\n    in\n    emitStr openBrace;\n    emitField \"name\" (stringifyName item.name);\n    emitSep ();\n    emitField \"kind\" (stringifyKind item.kind);\n    emitSep ();\n    emitField \"range\" (stringifyRange item.range);\n    emitSep ();\n    emitField \"selectionRange\" (stringifyRange item.range);\n    if item.children <> [] then (\n      emitSep ();\n      emitField \"children\" \"[\\n\";\n      emitBody ~indent (List.rev item.children);\n      emitStr \"]\");\n    emitStr closeBrace\n  and emitBody ~indent items =\n    match items with\n    | [] -> ()\n    | item :: rest ->\n      emitItem ~indent item;\n      if rest <> [] then emitSep ();\n      emitBody ~indent rest\n  in\n  let indent = \"\" in\n  emitStr \"[\\n\";\n  emitBody ~indent (List.rev items);\n  emitStr \"\\n]\";\n  Buffer.contents buf\n\nlet stringifyRenameFile {oldUri; newUri} =\n  Printf.sprintf {|{\n  \"kind\": \"rename\",\n  \"oldUri\": %s,\n  \"newUri\": %s\n}|}\n    (wrapInQuotes oldUri) (wrapInQuotes newUri)\n\nlet stringifyoptionalVersionedTextDocumentIdentifier td =\n  Printf.sprintf {|{\n  \"version\": %s,\n  \"uri\": %s\n  }|}\n    (match td.version with\n    | None -> null\n    | Some v -> string_of_int v)\n    (wrapInQuotes td.uri)\n\nlet stringifyTextDocumentEdit tde =\n  Printf.sprintf {|{\n  \"textDocument\": %s,\n  \"edits\": %s\n  }|}\n    (stringifyoptionalVersionedTextDocumentIdentifier tde.textDocument)\n    (tde.edits |> List.map stringifyTextEdit |> array)\n\nlet stringifyCreateFile cf =\n  stringifyObject\n    [\n      (\"kind\", Some (wrapInQuotes \"create\"));\n      (\"uri\", Some (wrapInQuotes cf.uri));\n      ( \"options\",\n        match cf.options with\n        | None -> None\n        | Some options ->\n          Some\n            (stringifyObject\n               [\n                 ( \"overwrite\",\n                   match options.overwrite with\n                   | None -> None\n                   | Some ov -> Some (string_of_bool ov) );\n                 ( \"ignoreIfExists\",\n                   match options.ignoreIfExists with\n                   | None -> None\n                   | Some i -> Some (string_of_bool i) );\n               ]) );\n    ]\n\nlet stringifyDocumentChange dc =\n  match dc with\n  | TextDocumentEdit tde -> stringifyTextDocumentEdit tde\n  | CreateFile cf -> stringifyCreateFile cf\n\nlet codeActionKindToString kind =\n  match kind with\n  | RefactorRewrite -> \"refactor.rewrite\"\n\nlet stringifyCodeActionEdit cae =\n  Printf.sprintf {|{\"documentChanges\": %s}|}\n    (cae.documentChanges |> List.map stringifyDocumentChange |> array)\n\nlet stringifyCodeAction ca =\n  Printf.sprintf {|{\"title\": %s, \"kind\": %s, \"edit\": %s}|}\n    (wrapInQuotes ca.title)\n    (wrapInQuotes (codeActionKindToString ca.codeActionKind))\n    (ca.edit |> stringifyCodeActionEdit)\n\nlet stringifyHint (hint : inlayHint) =\n  Printf.sprintf\n    {|{\n    \"position\": %s,\n    \"label\": %s,\n    \"kind\": %i,\n    \"paddingLeft\": %b,\n    \"paddingRight\": %b\n}|}\n    (stringifyPosition hint.position)\n    (wrapInQuotes hint.label) hint.kind hint.paddingLeft hint.paddingRight\n\nlet stringifyCommand (command : command) =\n  Printf.sprintf {|{\"title\": %s, \"command\": %s}|}\n    (wrapInQuotes command.title)\n    (wrapInQuotes command.command)\n\nlet stringifyCodeLens (codeLens : codeLens) =\n  Printf.sprintf\n    {|{\n        \"range\": %s,\n        \"command\": %s\n    }|}\n    (stringifyRange codeLens.range)\n    (match codeLens.command with\n    | None -> \"\"\n    | Some command -> stringifyCommand command)\n\nlet stringifyParameterInformation (parameterInformation : parameterInformation)\n    =\n  Printf.sprintf {|{\"label\": %s, \"documentation\": %s}|}\n    (let line, chr = parameterInformation.label in\n     \"[\" ^ string_of_int line ^ \", \" ^ string_of_int chr ^ \"]\")\n    (stringifyMarkupContent parameterInformation.documentation)\n\nlet stringifySignatureInformation (signatureInformation : signatureInformation)\n    =\n  Printf.sprintf\n    {|{\n    \"label\": %s,\n    \"parameters\": %s%s\n  }|}\n    (wrapInQuotes signatureInformation.label)\n    (signatureInformation.parameters\n    |> List.map stringifyParameterInformation\n    |> array)\n    (match signatureInformation.documentation with\n    | None -> \"\"\n    | Some docs ->\n      Printf.sprintf \",\\n    \\\"documentation\\\": %s\"\n        (stringifyMarkupContent docs))\n\nlet stringifySignatureHelp (signatureHelp : signatureHelp) =\n  Printf.sprintf\n    {|{\n  \"signatures\": %s,\n  \"activeSignature\": %s,\n  \"activeParameter\": %s\n}|}\n    (signatureHelp.signatures |> List.map stringifySignatureInformation |> array)\n    (match signatureHelp.activeSignature with\n    | None -> null\n    | Some activeSignature -> activeSignature |> string_of_int)\n    (match signatureHelp.activeParameter with\n    | None -> null\n    | Some activeParameter -> activeParameter |> string_of_int)\n\n(* https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#diagnostic *)\nlet stringifyDiagnostic d =\n  Printf.sprintf\n    {|{\n  \"range\": %s,\n  \"message\": %s,\n  \"severity\": %d,\n  \"source\": \"ReScript\"\n}|}\n    (stringifyRange d.range) (wrapInQuotes d.message) d.severity\n"
  },
  {
    "path": "analysis/src/Range.ml",
    "content": "type t = Pos.t * Pos.t\n\nlet toString ((posStart, posEnd) : t) =\n  Printf.sprintf \"[%s->%s]\" (Pos.toString posStart) (Pos.toString posEnd)\n\nlet hasPos ~pos ((posStart, posEnd) : t) = posStart <= pos && pos < posEnd\n"
  },
  {
    "path": "analysis/src/References.ml",
    "content": "open SharedTypes\n\nlet debugReferences = ref true\nlet maybeLog m = if !debugReferences then Log.log (\"[ref] \" ^ m)\n\nlet checkPos (line, char)\n    {Location.loc_start = {pos_lnum; pos_bol; pos_cnum}; loc_end} =\n  if line < pos_lnum || (line = pos_lnum && char < pos_cnum - pos_bol) then\n    false\n  else if\n    line > loc_end.pos_lnum\n    || (line = loc_end.pos_lnum && char > loc_end.pos_cnum - loc_end.pos_bol)\n  then false\n  else true\n\nlet locItemsForPos ~extra pos =\n  extra.locItems |> List.filter (fun {loc; locType = _} -> checkPos pos loc)\n\nlet lineColToCmtLoc ~pos:(line, col) = (line + 1, col)\n\nlet getLocItem ~full ~pos ~debug =\n  let log n msg = if debug then Printf.printf \"getLocItem #%d: %s\\n\" n msg in\n  let pos = lineColToCmtLoc ~pos in\n  let locItems = locItemsForPos ~extra:full.extra pos in\n  if !Log.verbose then\n    print_endline\n      (\"locItems:\\n  \"\n      ^ (locItems |> List.map locItemToString |> String.concat \"\\n  \"));\n  let nameOf li =\n    match li.locType with\n    | Typed (n, _, _) -> n\n    | _ -> \"NotFound\"\n  in\n  match locItems with\n  | li1 :: li2 :: li3 :: ({locType = Typed (\"makeProps\", _, _)} as li4) :: _\n    when full.file.uri |> Uri.isInterface ->\n    log 1 \"heuristic for makeProps in interface files\";\n    if debug then\n      Printf.printf \"n1:%s n2:%s n3:%s\\n\" (nameOf li1) (nameOf li2) (nameOf li3);\n    Some li4\n  | [\n   {locType = Constant _};\n   ({locType = Typed (\"createDOMElementVariadic\", _, _)} as li2);\n  ] ->\n    log 3 \"heuristic for <div>\";\n    Some li2\n  | {locType = Typed (\"makeProps\", _, _)}\n    :: ({locType = Typed (\"make\", _, _)} as li2)\n    :: _ ->\n    log 4\n      \"heuristic for </Comp> within fragments: take make as makeProps does not \\\n       work\\n\\\n       the type is not great but jump to definition works\";\n    Some li2\n  | [\n   ({locType = Typed (_, _, LocalReference _)} as li1);\n   ({locType = Typed (_, _, _)} as li2);\n  ]\n    when li1.loc = li2.loc ->\n    log 5\n      \"heuristic for JSX and compiler combined:\\n\\\n       ~x becomes Props#x\\n\\\n       heuristic for: [Props, x], give loc of `x`\";\n    if debug then Printf.printf \"n1:%s n2:%s\\n\" (nameOf li1) (nameOf li2);\n    Some li2\n  | [\n   ({locType = Typed (_, _, LocalReference _)} as li1);\n   ({locType = Typed (_, _, GlobalReference (\"Js_OO\", [\"unsafe_downgrade\"], _))}\n    as li2);\n   li3;\n  ]\n  (* For older compiler 9.0 or earlier *)\n    when li1.loc = li2.loc && li2.loc = li3.loc ->\n    (* Not currently testable on 9.1.4 *)\n    log 6\n      \"heuristic for JSX and compiler combined:\\n\\\n       ~x becomes Js_OO.unsafe_downgrade(Props)#x\\n\\\n       heuristic for: [Props, unsafe_downgrade, x], give loc of `x`\";\n    Some li3\n  | [\n   ({locType = Typed (_, _, LocalReference (_, Value))} as li1);\n   ({locType = Typed (_, _, Definition (_, Value))} as li2);\n  ] ->\n    log 7\n      \"heuristic for JSX on type-annotated labeled (~arg:t):\\n\\\n       (~arg:t) becomes Props#arg\\n\\\n       Props has the location range of arg:t\\n\\\n       arg has the location range of arg\\n\\\n       heuristic for: [Props, arg], give loc of `arg`\";\n    if debug then Printf.printf \"n1:%s n2:%s\\n\" (nameOf li1) (nameOf li2);\n    Some li2\n  | [li1; li2; li3] when li1.loc = li2.loc && li2.loc = li3.loc ->\n    (* Not currently testable on 9.1.4 *)\n    log 8\n      \"heuristic for JSX with at most one child\\n\\\n       heuristic for: [makeProps, make, createElement], give the loc of `make` \";\n    Some li2\n  | [li1; li2; li3; li4]\n    when li1.loc = li2.loc && li2.loc = li3.loc && li3.loc = li4.loc ->\n    log 9\n      \"heuristic for JSX variadic, e.g. <C> {x} {y} </C>\\n\\\n       heuristic for: [React.null, makeProps, make, createElementVariadic], \\\n       give the loc of `make`\";\n    if debug then\n      Printf.printf \"n1:%s n2:%s n3:%s n4:%s\\n\" (nameOf li1) (nameOf li2)\n        (nameOf li3) (nameOf li4);\n    Some li3\n  | {locType = Typed (_, {desc = Tconstr (path, _, _)}, _)} :: li :: _\n    when Utils.isUncurriedInternal path ->\n    Some li\n  | li :: _ -> Some li\n  | _ -> None\n\nlet declaredForTip ~(stamps : Stamps.t) stamp (tip : Tip.t) =\n  match tip with\n  | Value ->\n    Stamps.findValue stamps stamp\n    |> Option.map (fun x -> {x with Declared.item = ()})\n  | Field _ | Constructor _ | Type ->\n    Stamps.findType stamps stamp\n    |> Option.map (fun x -> {x with Declared.item = ()})\n  | Module ->\n    Stamps.findModule stamps stamp\n    |> Option.map (fun x -> {x with Declared.item = ()})\n\nlet getField (file : File.t) stamp name =\n  match Stamps.findType file.stamps stamp with\n  | None -> None\n  | Some {item = {kind}} -> (\n    match kind with\n    | Record fields -> fields |> List.find_opt (fun f -> f.fname.txt = name)\n    | _ -> None)\n\nlet getConstructor (file : File.t) stamp name =\n  match Stamps.findType file.stamps stamp with\n  | None -> None\n  | Some {item = {kind}} -> (\n    match kind with\n    | Variant constructors -> (\n      match\n        constructors\n        |> List.find_opt (fun const -> const.Constructor.cname.txt = name)\n      with\n      | None -> None\n      | Some const -> Some const)\n    | _ -> None)\n\nlet exportedForTip ~env ~path ~package ~(tip : Tip.t) =\n  match ResolvePath.resolvePath ~env ~path ~package with\n  | None ->\n    Log.log (\"Cannot resolve path \" ^ pathToString path);\n    None\n  | Some (env, name) -> (\n    let kind =\n      match tip with\n      | Value -> Exported.Value\n      | Field _ | Constructor _ | Type -> Exported.Type\n      | Module -> Exported.Module\n    in\n    match Exported.find env.exported kind name with\n    | None ->\n      Log.log (\"Exported not found for tip \" ^ name ^ \" > \" ^ Tip.toString tip);\n      None\n    | Some stamp -> Some (env, name, stamp))\n\nlet definedForLoc ~file ~package locKind =\n  let inner ~file stamp (tip : Tip.t) =\n    match tip with\n    | Constructor name -> (\n      match getConstructor file stamp name with\n      | None -> None\n      | Some constructor ->\n        Some (constructor.docstring, `Constructor constructor))\n    | Field name ->\n      Some\n        ( (match getField file stamp name with\n          | None -> []\n          | Some field -> field.docstring),\n          `Field )\n    | _ -> (\n      maybeLog\n        (\"Trying for declared \" ^ Tip.toString tip ^ \" \" ^ string_of_int stamp\n       ^ \" in file \" ^ Uri.toString file.uri);\n      match declaredForTip ~stamps:file.stamps stamp tip with\n      | None -> None\n      | Some declared -> Some (declared.docstring, `Declared))\n  in\n  match locKind with\n  | NotFound -> None\n  | LocalReference (stamp, tip) | Definition (stamp, tip) ->\n    inner ~file stamp tip\n  | GlobalReference (moduleName, path, tip) -> (\n    maybeLog (\"Getting global \" ^ moduleName);\n    match ProcessCmt.fileForModule ~package moduleName with\n    | None ->\n      Log.log (\"Cannot get module \" ^ moduleName);\n      None\n    | Some file -> (\n      let env = QueryEnv.fromFile file in\n      match exportedForTip ~env ~path ~package ~tip with\n      | None -> None\n      | Some (env, name, stamp) -> (\n        maybeLog (\"Getting for \" ^ string_of_int stamp ^ \" in \" ^ name);\n        match inner ~file:env.file stamp tip with\n        | None ->\n          Log.log \"could not get defined\";\n          None\n        | Some res ->\n          maybeLog \"Yes!! got it\";\n          Some res)))\n\n(** Find alternative declaration: from res in case of interface, or from resi in\n    case of implementation *)\nlet alternateDeclared ~(file : File.t) ~package (declared : _ Declared.t) tip =\n  match Hashtbl.find_opt package.pathsForModule file.moduleName with\n  | None -> None\n  | Some paths -> (\n    match paths with\n    | IntfAndImpl {resi; res} -> (\n      maybeLog\n        (\"alternateDeclared for \" ^ file.moduleName ^ \" has both resi and res\");\n      let alternateUri = if Uri.isInterface file.uri then res else resi in\n      match Cmt.fullFromUri ~uri:(Uri.fromPath alternateUri) with\n      | None -> None\n      | Some {file; extra} -> (\n        let env = QueryEnv.fromFile file in\n        let path = ModulePath.toPath declared.modulePath declared.name.txt in\n        maybeLog (\"find declared for path \" ^ pathToString path);\n        let declaredOpt =\n          match exportedForTip ~env ~path ~package ~tip with\n          | None -> None\n          | Some (_env, _name, stamp) ->\n            declaredForTip ~stamps:file.stamps stamp tip\n        in\n        match declaredOpt with\n        | None -> None\n        | Some declared -> Some (file, extra, declared)))\n    | _ ->\n      maybeLog (\"alternateDeclared for \" ^ file.moduleName ^ \" not found\");\n\n      None)\n\nlet rec resolveModuleReference ?(pathsSeen = []) ~file ~package\n    (declared : Module.t Declared.t) =\n  match declared.item with\n  | Structure _ -> Some (file, Some declared)\n  | Constraint (_moduleItem, moduleTypeItem) ->\n    resolveModuleReference ~pathsSeen ~file ~package\n      {declared with item = moduleTypeItem}\n  | Ident path -> (\n    let env = QueryEnv.fromFile file in\n    match ResolvePath.fromCompilerPath ~env path with\n    | NotFound -> None\n    | Exported (env, name) -> (\n      match Exported.find env.exported Exported.Module name with\n      | None -> None\n      | Some stamp -> (\n        match Stamps.findModule env.file.stamps stamp with\n        | None -> None\n        | Some md -> Some (env.file, Some md)))\n    | Global (moduleName, path) -> (\n      match ProcessCmt.fileForModule ~package moduleName with\n      | None -> None\n      | Some file -> (\n        let env = QueryEnv.fromFile file in\n        match ResolvePath.resolvePath ~env ~package ~path with\n        | None -> None\n        | Some (env, name) -> (\n          match Exported.find env.exported Exported.Module name with\n          | None -> None\n          | Some stamp -> (\n            match Stamps.findModule env.file.stamps stamp with\n            | None -> None\n            | Some md -> Some (env.file, Some md)))))\n    | Stamp stamp -> (\n      match Stamps.findModule file.stamps stamp with\n      | None -> None\n      | Some ({item = Ident path} as md) when not (List.mem path pathsSeen) ->\n        (* avoid possible infinite loops *)\n        resolveModuleReference ~file ~package ~pathsSeen:(path :: pathsSeen) md\n      | Some md -> Some (file, Some md))\n    | GlobalMod name -> (\n      match ProcessCmt.fileForModule ~package name with\n      | None -> None\n      | Some file -> Some (file, None)))\n\nlet validateLoc (loc : Location.t) (backup : Location.t) =\n  if loc.loc_start.pos_cnum = -1 then\n    if backup.loc_start.pos_cnum = -1 then\n      {\n        Location.loc_ghost = true;\n        loc_start = {pos_cnum = 0; pos_lnum = 1; pos_bol = 0; pos_fname = \"\"};\n        loc_end = {pos_cnum = 0; pos_lnum = 1; pos_bol = 0; pos_fname = \"\"};\n      }\n    else backup\n  else loc\n\nlet resolveModuleDefinition ~(file : File.t) ~package stamp =\n  match Stamps.findModule file.stamps stamp with\n  | None -> None\n  | Some md -> (\n    match resolveModuleReference ~file ~package md with\n    | None -> None\n    | Some (file, declared) ->\n      let loc =\n        match declared with\n        | None -> Uri.toTopLevelLoc file.uri\n        | Some declared -> validateLoc declared.name.loc declared.extentLoc\n      in\n      Some (file.uri, loc))\n\nlet definition ~file ~package stamp (tip : Tip.t) =\n  match tip with\n  | Constructor name -> (\n    match getConstructor file stamp name with\n    | None -> None\n    | Some constructor -> Some (file.uri, constructor.cname.loc))\n  | Field name -> (\n    match getField file stamp name with\n    | None -> None\n    | Some field -> Some (file.uri, field.fname.loc))\n  | Module -> resolveModuleDefinition ~file ~package stamp\n  | _ -> (\n    match declaredForTip ~stamps:file.stamps stamp tip with\n    | None -> None\n    | Some declared ->\n      let fileImpl, declaredImpl =\n        match alternateDeclared ~package ~file declared tip with\n        | Some (fileImpl, _extra, declaredImpl) when Uri.isInterface file.uri ->\n          (fileImpl, declaredImpl)\n        | _ -> (file, declared)\n      in\n      let loc = validateLoc declaredImpl.name.loc declaredImpl.extentLoc in\n      let env = QueryEnv.fromFile fileImpl in\n      let uri =\n        ResolvePath.getSourceUri ~env ~package declaredImpl.modulePath\n      in\n      maybeLog (\"Inner uri \" ^ Uri.toString uri);\n      Some (uri, loc))\n\nlet definitionForLocItem ~full:{file; package} locItem =\n  match locItem.locType with\n  | Typed (_, _, Definition (stamp, tip)) -> (\n    maybeLog\n      (\"Typed Definition stamp:\" ^ string_of_int stamp ^ \" tip:\"\n     ^ Tip.toString tip);\n    match declaredForTip ~stamps:file.stamps stamp tip with\n    | None -> None\n    | Some declared ->\n      maybeLog (\"Declared \" ^ declared.name.txt);\n      if declared.isExported then (\n        maybeLog (\"exported, looking for alternate \" ^ file.moduleName);\n        match alternateDeclared ~package ~file declared tip with\n        | None -> None\n        | Some (file, _extra, declared) ->\n          let loc = validateLoc declared.name.loc declared.extentLoc in\n          Some (file.uri, loc))\n      else None)\n  | Typed (_, _, NotFound)\n  | LModule (NotFound | Definition (_, _))\n  | TypeDefinition (_, _, _)\n  | Constant _ ->\n    None\n  | TopLevelModule name -> (\n    maybeLog (\"Toplevel \" ^ name);\n    match Hashtbl.find_opt package.pathsForModule name with\n    | None -> None\n    | Some paths ->\n      let uri = getUri paths in\n      Some (uri, Uri.toTopLevelLoc uri))\n  | LModule (LocalReference (stamp, tip))\n  | Typed (_, _, LocalReference (stamp, tip)) ->\n    maybeLog (\"Local defn \" ^ Tip.toString tip);\n    definition ~file ~package stamp tip\n  | LModule (GlobalReference (moduleName, path, tip))\n  | Typed (_, _, GlobalReference (moduleName, path, tip)) -> (\n    maybeLog\n      (\"Typed GlobalReference moduleName:\" ^ moduleName ^ \" path:\"\n     ^ pathToString path ^ \" tip:\" ^ Tip.toString tip);\n    match ProcessCmt.fileForModule ~package moduleName with\n    | None -> None\n    | Some file -> (\n      let env = QueryEnv.fromFile file in\n      match exportedForTip ~env ~path ~package ~tip with\n      | None -> None\n      | Some (env, _name, stamp) ->\n        (* oooh wht do I do if the stamp is inside a pseudo-file? *)\n        maybeLog (\"Got stamp \" ^ string_of_int stamp);\n        definition ~file:env.file ~package stamp tip))\n\nlet digConstructor ~env ~package path =\n  match ResolvePath.resolveFromCompilerPath ~env ~package path with\n  | NotFound -> None\n  | Stamp stamp -> (\n    match Stamps.findType env.file.stamps stamp with\n    | None -> None\n    | Some t -> Some (env, t))\n  | Exported (env, name) -> (\n    match Exported.find env.exported Exported.Type name with\n    | None -> None\n    | Some stamp -> (\n      match Stamps.findType env.file.stamps stamp with\n      | None -> None\n      | Some t -> Some (env, t)))\n  | _ -> None\n\nlet typeDefinitionForLocItem ~full:{file; package} locItem =\n  match locItem.locType with\n  | Constant _ | TopLevelModule _ | LModule _ -> None\n  | TypeDefinition _ -> Some (file.uri, locItem.loc)\n  | Typed (_, typ, _) -> (\n    let env = QueryEnv.fromFile file in\n    match Shared.digConstructor typ with\n    | None -> None\n    | Some path -> (\n      match digConstructor ~env ~package path with\n      | Some (env, declared) -> Some (env.file.uri, declared.item.decl.type_loc)\n      | None -> None))\n\nlet isVisible (declared : _ Declared.t) =\n  declared.isExported\n  &&\n  let rec loop (v : ModulePath.t) =\n    match v with\n    | File _ -> true\n    | NotVisible -> false\n    | IncludedModule (_, inner) -> loop inner\n    | ExportedModule {modulePath = inner} -> loop inner\n  in\n  loop declared.modulePath\n\ntype references = {\n  uri: Uri.t;\n  locOpt: Location.t option; (* None: reference to a toplevel module *)\n}\n\nlet forLocalStamp ~full:{file; extra; package} stamp (tip : Tip.t) =\n  let env = QueryEnv.fromFile file in\n  match\n    match tip with\n    | Constructor name ->\n      getConstructor file stamp name\n      |> Option.map (fun x -> x.Constructor.stamp)\n    | Field name -> getField file stamp name |> Option.map (fun x -> x.stamp)\n    | _ -> Some stamp\n  with\n  | None -> []\n  | Some localStamp -> (\n    match Hashtbl.find_opt extra.internalReferences localStamp with\n    | None -> []\n    | Some locs ->\n      maybeLog (\"Checking externals: \" ^ string_of_int stamp);\n      let externals =\n        match declaredForTip ~stamps:env.file.stamps stamp tip with\n        | None -> []\n        | Some declared ->\n          if isVisible declared then (\n            let alternativeReferences =\n              match alternateDeclared ~package ~file declared tip with\n              | None -> []\n              | Some (file, extra, {stamp}) -> (\n                match\n                  match tip with\n                  | Constructor name ->\n                    getConstructor file stamp name\n                    |> Option.map (fun x -> x.Constructor.stamp)\n                  | Field name ->\n                    getField file stamp name |> Option.map (fun x -> x.stamp)\n                  | _ -> Some stamp\n                with\n                | None -> []\n                | Some localStamp -> (\n                  match\n                    Hashtbl.find_opt extra.internalReferences localStamp\n                  with\n                  | None -> []\n                  | Some locs ->\n                    locs\n                    |> List.map (fun loc -> {uri = file.uri; locOpt = Some loc})\n                  ))\n              (* if this file has a corresponding interface or implementation file\n                 also find the references in that file *)\n            in\n            let path =\n              ModulePath.toPath declared.modulePath declared.name.txt\n            in\n            maybeLog (\"Now checking path \" ^ pathToString path);\n            let thisModuleName = file.moduleName in\n            let externals =\n              package.projectFiles |> FileSet.elements\n              |> List.filter (fun name -> name <> file.moduleName)\n              |> List.map (fun moduleName ->\n                     Cmt.fullsFromModule ~package ~moduleName\n                     |> List.map (fun {file; extra} ->\n                            match\n                              Hashtbl.find_opt extra.externalReferences\n                                thisModuleName\n                            with\n                            | None -> []\n                            | Some refs ->\n                              let locs =\n                                refs\n                                |> Utils.filterMap (fun (p, t, locs) ->\n                                       if p = path && t = tip then Some locs\n                                       else None)\n                              in\n                              locs\n                              |> List.map (fun loc ->\n                                     {uri = file.uri; locOpt = Some loc})))\n              |> List.concat |> List.concat\n            in\n            alternativeReferences @ externals)\n          else (\n            maybeLog \"Not visible\";\n            [])\n      in\n      List.append\n        (locs |> List.map (fun loc -> {uri = file.uri; locOpt = Some loc}))\n        externals)\n\nlet allReferencesForLocItem ~full:({file; package} as full) locItem =\n  match locItem.locType with\n  | TopLevelModule moduleName ->\n    let otherModulesReferences =\n      package.projectFiles |> FileSet.elements\n      |> Utils.filterMap (fun name ->\n             match ProcessCmt.fileForModule ~package name with\n             | None -> None\n             | Some file -> Cmt.fullFromUri ~uri:file.uri)\n      |> List.map (fun full ->\n             match Hashtbl.find_opt full.extra.fileReferences moduleName with\n             | None -> []\n             | Some locs ->\n               locs |> LocationSet.elements\n               |> List.map (fun loc ->\n                      {\n                        uri = Uri.fromPath loc.Location.loc_start.pos_fname;\n                        locOpt = Some loc;\n                      }))\n      |> List.flatten\n    in\n    let targetModuleReferences =\n      match Hashtbl.find_opt package.pathsForModule moduleName with\n      | None -> []\n      | Some paths ->\n        let moduleSrcToRef src = {uri = Uri.fromPath src; locOpt = None} in\n        getSrc paths |> List.map moduleSrcToRef\n    in\n    List.append targetModuleReferences otherModulesReferences\n  | Typed (_, _, NotFound) | LModule NotFound | Constant _ -> []\n  | TypeDefinition (_, _, stamp) -> forLocalStamp ~full stamp Type\n  | Typed (_, _, (LocalReference (stamp, tip) | Definition (stamp, tip)))\n  | LModule (LocalReference (stamp, tip) | Definition (stamp, tip)) ->\n    maybeLog\n      (\"Finding references for \" ^ Uri.toString file.uri ^ \" and stamp \"\n     ^ string_of_int stamp ^ \" and tip \" ^ Tip.toString tip);\n    forLocalStamp ~full stamp tip\n  | LModule (GlobalReference (moduleName, path, tip))\n  | Typed (_, _, GlobalReference (moduleName, path, tip)) -> (\n    match ProcessCmt.fileForModule ~package moduleName with\n    | None -> []\n    | Some file -> (\n      let env = QueryEnv.fromFile file in\n      match exportedForTip ~env ~path ~package ~tip with\n      | None -> []\n      | Some (env, _name, stamp) -> (\n        match Cmt.fullFromUri ~uri:env.file.uri with\n        | None -> []\n        | Some full ->\n          maybeLog\n            (\"Finding references for (global) \" ^ Uri.toString env.file.uri\n           ^ \" and stamp \" ^ string_of_int stamp ^ \" and tip \"\n           ^ Tip.toString tip);\n          forLocalStamp ~full stamp tip)))\n"
  },
  {
    "path": "analysis/src/ResolvePath.ml",
    "content": "open SharedTypes\n\ntype resolution =\n  | Exported of QueryEnv.t * filePath\n  | Global of filePath * filePath list\n  | GlobalMod of filePath\n  | NotFound\n  | Stamp of int\n\nlet rec joinPaths modulePath path =\n  match modulePath with\n  | Path.Pident ident -> (ident.stamp, ident.name, path)\n  | Papply (fnPath, _argPath) -> joinPaths fnPath path\n  | Pdot (inner, name, _) -> joinPaths inner (name :: path)\n\nlet rec makePath ~(env : QueryEnv.t) modulePath =\n  match modulePath with\n  | Path.Pident ident when ident.stamp == 0 -> GlobalMod ident.name\n  | Pident ident -> Stamp ident.stamp\n  | Papply (fnPath, _argPath) -> makePath ~env fnPath\n  | Pdot (inner, name, _) -> (\n    match joinPaths inner [name] with\n    | 0, moduleName, path -> Global (moduleName, path)\n    | stamp, _moduleName, path -> (\n      let res =\n        match Stamps.findModule env.file.stamps stamp with\n        | None -> None\n        | Some {item = kind} -> findInModule ~env kind path\n      in\n      match res with\n      | None -> NotFound\n      | Some (`Local (env, name)) -> Exported (env, name)\n      | Some (`Global (moduleName, fullPath)) -> Global (moduleName, fullPath)))\n\nand resolvePathInner ~(env : QueryEnv.t) ~path =\n  match path with\n  | [] -> None\n  | [name] -> Some (`Local (env, name))\n  | subName :: subPath -> (\n    match Exported.find env.exported Exported.Module subName with\n    | None -> None\n    | Some stamp -> (\n      match Stamps.findModule env.file.stamps stamp with\n      | None -> None\n      | Some {item} -> findInModule ~env item subPath))\n\nand findInModule ~(env : QueryEnv.t) module_ path =\n  match module_ with\n  | Structure structure ->\n    resolvePathInner ~env:(QueryEnv.enterStructure env structure) ~path\n  | Constraint (_, module1) -> findInModule ~env module1 path\n  | Ident modulePath -> (\n    let stamp, moduleName, fullPath = joinPaths modulePath path in\n    if stamp = 0 then Some (`Global (moduleName, fullPath))\n    else\n      match Stamps.findModule env.file.stamps stamp with\n      | None -> None\n      | Some {item} -> findInModule ~env item fullPath)\n\nlet rec resolvePath ~env ~path ~package =\n  Log.log (\"resolvePath path:\" ^ pathToString path);\n  match resolvePathInner ~env ~path with\n  | None -> None\n  | Some result -> (\n    match result with\n    | `Local (env, name) -> Some (env, name)\n    | `Global (moduleName, fullPath) -> (\n      Log.log\n        (\"resolvePath Global path:\" ^ pathToString fullPath ^ \" module:\"\n       ^ moduleName);\n      match ProcessCmt.fileForModule ~package moduleName with\n      | None -> None\n      | Some file ->\n        resolvePath ~env:(QueryEnv.fromFile file) ~path:fullPath ~package))\n\nlet fromCompilerPath ~(env : QueryEnv.t) path : resolution =\n  match makePath ~env path with\n  | Stamp stamp -> Stamp stamp\n  | GlobalMod name -> GlobalMod name\n  | NotFound -> NotFound\n  | Exported (env, name) -> Exported (env, name)\n  | Global (moduleName, fullPath) -> Global (moduleName, fullPath)\n\nlet resolveModuleFromCompilerPath ~env ~package path =\n  match fromCompilerPath ~env path with\n  | Global (moduleName, path) -> (\n    match ProcessCmt.fileForModule ~package moduleName with\n    | None -> None\n    | Some file -> (\n      let env = QueryEnv.fromFile file in\n      match resolvePath ~env ~package ~path with\n      | None -> None\n      | Some (env, name) -> (\n        match Exported.find env.exported Exported.Module name with\n        | None -> None\n        | Some stamp -> (\n          match Stamps.findModule env.file.stamps stamp with\n          | None -> None\n          | Some declared -> Some (env, Some declared)))))\n  | Stamp stamp -> (\n    match Stamps.findModule env.file.stamps stamp with\n    | None -> None\n    | Some declared -> Some (env, Some declared))\n  | GlobalMod moduleName -> (\n    match ProcessCmt.fileForModule ~package moduleName with\n    | None -> None\n    | Some file ->\n      let env = QueryEnv.fromFile file in\n      Some (env, None))\n  | NotFound -> None\n  | Exported (env, name) -> (\n    match Exported.find env.exported Exported.Module name with\n    | None -> None\n    | Some stamp -> (\n      match Stamps.findModule env.file.stamps stamp with\n      | None -> None\n      | Some declared -> Some (env, Some declared)))\n\nlet resolveFromCompilerPath ~env ~package path =\n  match fromCompilerPath ~env path with\n  | Global (moduleName, path) -> (\n    let res =\n      match ProcessCmt.fileForModule ~package moduleName with\n      | None -> None\n      | Some file ->\n        let env = QueryEnv.fromFile file in\n        resolvePath ~env ~package ~path\n    in\n    match res with\n    | None -> NotFound\n    | Some (env, name) -> Exported (env, name))\n  | Stamp stamp -> Stamp stamp\n  | GlobalMod _ -> NotFound\n  | NotFound -> NotFound\n  | Exported (env, name) -> Exported (env, name)\n\nlet rec getSourceUri ~(env : QueryEnv.t) ~package (path : ModulePath.t) =\n  match path with\n  | File (uri, _moduleName) -> uri\n  | NotVisible -> env.file.uri\n  | IncludedModule (path, inner) -> (\n    Log.log \"INCLUDED MODULE\";\n    match resolveModuleFromCompilerPath ~env ~package path with\n    | None ->\n      Log.log \"NOT FOUND\";\n      getSourceUri ~env ~package inner\n    | Some (env, _declared) -> env.file.uri)\n  | ExportedModule {modulePath = inner} -> getSourceUri ~env ~package inner\n"
  },
  {
    "path": "analysis/src/Scope.ml",
    "content": "type item = SharedTypes.ScopeTypes.item\n\ntype t = item list\n\nopen SharedTypes.ScopeTypes\n\nlet itemToString item =\n  let str s = if s = \"\" then \"\\\"\\\"\" else s in\n  let list l = \"[\" ^ (l |> List.map str |> String.concat \", \") ^ \"]\" in\n  match item with\n  | Constructor (s, loc) -> \"Constructor \" ^ s ^ \" \" ^ Loc.toString loc\n  | Field (s, loc) -> \"Field \" ^ s ^ \" \" ^ Loc.toString loc\n  | Open sl -> \"Open \" ^ list sl\n  | Module (s, loc) -> \"Module \" ^ s ^ \" \" ^ Loc.toString loc\n  | Value (s, loc, _, _) -> \"Value \" ^ s ^ \" \" ^ Loc.toString loc\n  | Type (s, loc) -> \"Type \" ^ s ^ \" \" ^ Loc.toString loc\n[@@live]\n\nlet create () : t = []\nlet addConstructor ~name ~loc x = Constructor (name, loc) :: x\nlet addField ~name ~loc x = Field (name, loc) :: x\nlet addModule ~name ~loc x = Module (name, loc) :: x\nlet addOpen ~lid x = Open (Utils.flattenLongIdent lid @ [\"place holder\"]) :: x\nlet addValue ~name ~loc ?contextPath x =\n  let showDebug = !Cfg.debugFollowCtxPath in\n  (if showDebug then\n     match contextPath with\n     | None -> Printf.printf \"adding value '%s', no ctxPath\\n\" name\n     | Some contextPath ->\n       if showDebug then\n         Printf.printf \"adding value '%s' with ctxPath: %s\\n\" name\n           (SharedTypes.Completable.contextPathToString contextPath));\n  Value (name, loc, contextPath, x) :: x\nlet addType ~name ~loc x = Type (name, loc) :: x\n\nlet iterValuesBeforeFirstOpen f x =\n  let rec loop items =\n    match items with\n    | Value (s, loc, contextPath, scope) :: rest ->\n      f s loc contextPath scope;\n      loop rest\n    | Open _ :: _ -> ()\n    | _ :: rest -> loop rest\n    | [] -> ()\n  in\n  loop x\n\nlet iterValuesAfterFirstOpen f x =\n  let rec loop foundOpen items =\n    match items with\n    | Value (s, loc, contextPath, scope) :: rest ->\n      if foundOpen then f s loc contextPath scope;\n      loop foundOpen rest\n    | Open _ :: rest -> loop true rest\n    | _ :: rest -> loop foundOpen rest\n    | [] -> ()\n  in\n  loop false x\n\nlet iterConstructorsBeforeFirstOpen f x =\n  let rec loop items =\n    match items with\n    | Constructor (s, loc) :: rest ->\n      f s loc;\n      loop rest\n    | Open _ :: _ -> ()\n    | _ :: rest -> loop rest\n    | [] -> ()\n  in\n  loop x\n\nlet iterConstructorsAfterFirstOpen f x =\n  let rec loop foundOpen items =\n    match items with\n    | Constructor (s, loc) :: rest ->\n      if foundOpen then f s loc;\n      loop foundOpen rest\n    | Open _ :: rest -> loop true rest\n    | _ :: rest -> loop foundOpen rest\n    | [] -> ()\n  in\n  loop false x\n\nlet iterTypesBeforeFirstOpen f x =\n  let rec loop items =\n    match items with\n    | Type (s, loc) :: rest ->\n      f s loc;\n      loop rest\n    | Open _ :: _ -> ()\n    | _ :: rest -> loop rest\n    | [] -> ()\n  in\n  loop x\n\nlet iterTypesAfterFirstOpen f x =\n  let rec loop foundOpen items =\n    match items with\n    | Type (s, loc) :: rest ->\n      if foundOpen then f s loc;\n      loop foundOpen rest\n    | Open _ :: rest -> loop true rest\n    | _ :: rest -> loop foundOpen rest\n    | [] -> ()\n  in\n  loop false x\n\nlet iterModulesBeforeFirstOpen f x =\n  let rec loop items =\n    match items with\n    | Module (s, loc) :: rest ->\n      f s loc;\n      loop rest\n    | Open _ :: _ -> ()\n    | _ :: rest -> loop rest\n    | [] -> ()\n  in\n  loop x\n\nlet iterModulesAfterFirstOpen f x =\n  let rec loop foundOpen items =\n    match items with\n    | Module (s, loc) :: rest ->\n      if foundOpen then f s loc;\n      loop foundOpen rest\n    | Open _ :: rest -> loop true rest\n    | _ :: rest -> loop foundOpen rest\n    | [] -> ()\n  in\n  loop false x\n\nlet getRawOpens x =\n  x\n  |> Utils.filterMap (function\n       | Open path -> Some path\n       | _ -> None)\n"
  },
  {
    "path": "analysis/src/SemanticTokens.ml",
    "content": "(*\n   Generally speaking, semantic highlighting here takes care of categorizing identifiers,\n   since the kind of an identifier is highly context-specific and hard to catch with a grammar.\n\n   The big exception is labels, whose location is not represented in the AST\n   E.g. function definition such as (~foo as _) =>, application (~foo=3) and prop <div foo=3>.\n   Labels are handled in the grammar, not here.\n   Punned labels such as (~foo) => are both labels and identifiers. They are overridden here.\n\n   There are 2 cases where the grammar and semantic highlighting work jointly.\n   The styles emitted in the grammar and here need to be kept in sync.\n   1) For jsx angled brackets, the grammar handles basic cases such as />\n      whose location is not in the AST.\n      Instead < and > are handled here. Those would be difficult to disambiguate in a grammar.\n   2) Most operators are handled in the grammar. Except < and > are handled here.\n      The reason is again that < and > would be difficult do disambiguate in a grammar.\n*)\n\nmodule Token = struct\n  (* This needs to stay synced with the same legend in `server.ts` *)\n  (* See https://microsoft.github.io/language-server-protocol/specifications/specification-current/#textDocument_semanticTokens *)\n  type tokenType =\n    | Operator  (** < and > *)\n    | Variable  (** let x = *)\n    | Type  (** type t = *)\n    | JsxTag  (** the < and > in <div> *)\n    | Namespace  (** module M = *)\n    | EnumMember  (** variant A or poly variant #A *)\n    | Property  (** {x:...} *)\n    | JsxLowercase  (** div in <div> *)\n\n  let tokenTypeToString = function\n    | Operator -> \"0\"\n    | Variable -> \"1\"\n    | Type -> \"2\"\n    | JsxTag -> \"3\"\n    | Namespace -> \"4\"\n    | EnumMember -> \"5\"\n    | Property -> \"6\"\n    | JsxLowercase -> \"7\"\n\n  let tokenTypeDebug = function\n    | Operator -> \"Operator\"\n    | Variable -> \"Variable\"\n    | Type -> \"Type\"\n    | JsxTag -> \"JsxTag\"\n    | Namespace -> \"Namespace\"\n    | EnumMember -> \"EnumMember\"\n    | Property -> \"Property\"\n    | JsxLowercase -> \"JsxLowercase\"\n\n  let tokenModifiersString = \"0\" (* None at the moment *)\n\n  type token = int * int * int * tokenType\n\n  type emitter = {\n    mutable tokens: token list;\n    mutable lastLine: int;\n    mutable lastChar: int;\n  }\n\n  let createEmitter () = {tokens = []; lastLine = 0; lastChar = 0}\n\n  let add ~line ~char ~length ~type_ e =\n    e.tokens <- (line, char, length, type_) :: e.tokens\n\n  let emitToken buf (line, char, length, type_) e =\n    let deltaLine = line - e.lastLine in\n    let deltaChar = if deltaLine = 0 then char - e.lastChar else char in\n    e.lastLine <- line;\n    e.lastChar <- char;\n    if Buffer.length buf > 0 then Buffer.add_char buf ',';\n    if\n      deltaLine >= 0 && deltaChar >= 0 && length >= 0\n      (* Defensive programming *)\n    then\n      Buffer.add_string buf\n        (string_of_int deltaLine ^ \",\" ^ string_of_int deltaChar ^ \",\"\n       ^ string_of_int length ^ \",\" ^ tokenTypeToString type_ ^ \",\"\n       ^ tokenModifiersString)\n\n  let remove_trailing_comma buffer =\n    let len = Buffer.length buffer in\n    if len > 0 && Buffer.nth buffer (len - 1) = ',' then\n      Buffer.truncate buffer (len - 1)\n\n  let emit e =\n    let sortedTokens =\n      e.tokens\n      |> List.sort (fun (l1, c1, _, _) (l2, c2, _, _) ->\n             if l1 = l2 then compare c1 c2 else compare l1 l2)\n    in\n    let buf = Buffer.create 1 in\n    sortedTokens |> List.iter (fun t -> e |> emitToken buf t);\n\n    (* Valid JSON arrays cannot have trailing commas *)\n    remove_trailing_comma buf;\n\n    Buffer.contents buf\nend\n\nlet isLowercaseId id =\n  id <> \"\"\n  &&\n  let c = id.[0] in\n  c == '_' || (c >= 'a' && c <= 'z')\n\nlet isUppercaseId id =\n  id <> \"\"\n  &&\n  let c = id.[0] in\n  c >= 'A' && c <= 'Z'\n\nlet emitFromRange (posStart, posEnd) ~type_ emitter =\n  let length =\n    if fst posStart = fst posEnd then snd posEnd - snd posStart else 0\n  in\n  if length > 0 then\n    emitter\n    |> Token.add ~line:(fst posStart) ~char:(snd posStart) ~length ~type_\n\nlet emitFromLoc ~loc ~type_ emitter =\n  emitter |> emitFromRange (Loc.range loc) ~type_\n\nlet emitLongident ?(backwards = false) ?(jsx = false)\n    ?(lowerCaseToken = if jsx then Token.JsxLowercase else Token.Variable)\n    ?(upperCaseToken = Token.Namespace) ?(lastToken = None) ?(posEnd = None)\n    ~pos ~lid ~debug emitter =\n  let rec flatten acc lid =\n    match lid with\n    | Longident.Lident txt -> txt :: acc\n    | Ldot (lid, txt) ->\n      let acc = if jsx && txt = \"createElement\" then acc else txt :: acc in\n      flatten acc lid\n    | _ -> acc\n  in\n  let rec loop pos segments =\n    match segments with\n    | [id] when isUppercaseId id || isLowercaseId id ->\n      let type_ =\n        match lastToken with\n        | Some type_ -> type_\n        | None -> if isUppercaseId id then upperCaseToken else lowerCaseToken\n      in\n      let posAfter = (fst pos, snd pos + String.length id) in\n      let posEnd, lenMismatch =\n        (* There could be a length mismatch when ids are quoted\n           e.g. variable /\"true\" or object field {\"x\":...} *)\n        match posEnd with\n        | Some posEnd -> (posEnd, posEnd <> posAfter)\n        | None -> (posAfter, false)\n      in\n      if debug then\n        Printf.printf \"Lident: %s %s%s %s\\n\" id (Pos.toString pos)\n          (if lenMismatch then \"->\" ^ Pos.toString posEnd else \"\")\n          (Token.tokenTypeDebug type_);\n      emitter |> emitFromRange (pos, posEnd) ~type_\n    | id :: segments when isUppercaseId id || isLowercaseId id ->\n      let type_ = if isUppercaseId id then upperCaseToken else lowerCaseToken in\n      if debug then\n        Printf.printf \"Ldot: %s %s %s\\n\" id (Pos.toString pos)\n          (Token.tokenTypeDebug type_);\n      let length = String.length id in\n      emitter |> emitFromRange (pos, (fst pos, snd pos + length)) ~type_;\n      loop (fst pos, snd pos + length + 1) segments\n    | _ -> ()\n  in\n  let segments = flatten [] lid in\n  if backwards then (\n    let totalLength = segments |> String.concat \".\" |> String.length in\n    if snd pos >= totalLength then\n      loop (fst pos, snd pos - totalLength) segments)\n  else loop pos segments\n\nlet emitVariable ~id ~debug ~loc emitter =\n  if debug then Printf.printf \"Variable: %s %s\\n\" id (Loc.toString loc);\n  emitter |> emitFromLoc ~loc ~type_:Variable\n\nlet emitJsxOpen ~lid ~debug ~(loc : Location.t) emitter =\n  if not loc.loc_ghost then\n    emitter |> emitLongident ~pos:(Loc.start loc) ~lid ~jsx:true ~debug\n\nlet emitJsxClose ~lid ~debug ~pos emitter =\n  emitter |> emitLongident ~backwards:true ~pos ~lid ~jsx:true ~debug\n\nlet emitJsxTag ~debug ~name ~pos emitter =\n  if debug then Printf.printf \"JsxTag %s: %s\\n\" name (Pos.toString pos);\n  emitter |> emitFromRange (pos, (fst pos, snd pos + 1)) ~type_:Token.JsxTag\n\nlet emitType ~lid ~debug ~(loc : Location.t) emitter =\n  if not loc.loc_ghost then\n    emitter\n    |> emitLongident ~lowerCaseToken:Token.Type ~pos:(Loc.start loc) ~lid ~debug\n\nlet emitRecordLabel ~(label : Longident.t Location.loc) ~debug emitter =\n  if not label.loc.loc_ghost then\n    emitter\n    |> emitLongident ~lowerCaseToken:Token.Property ~pos:(Loc.start label.loc)\n         ~posEnd:(Some (Loc.end_ label.loc))\n         ~lid:label.txt ~debug\n\nlet emitVariant ~(name : Longident.t Location.loc) ~debug emitter =\n  if not name.loc.loc_ghost then\n    emitter\n    |> emitLongident ~lastToken:(Some Token.EnumMember)\n         ~pos:(Loc.start name.loc) ~lid:name.txt ~debug\n\nlet command ~debug ~emitter ~path =\n  let processTypeArg (coreType : Parsetree.core_type) =\n    if debug then Printf.printf \"TypeArg: %s\\n\" (Loc.toString coreType.ptyp_loc)\n  in\n  let typ (iterator : Ast_iterator.iterator) (coreType : Parsetree.core_type) =\n    match coreType.ptyp_desc with\n    | Ptyp_constr ({txt = lid; loc}, args) ->\n      emitter |> emitType ~lid ~debug ~loc;\n      args |> List.iter processTypeArg;\n      Ast_iterator.default_iterator.typ iterator coreType\n    | _ -> Ast_iterator.default_iterator.typ iterator coreType\n  in\n  let type_declaration (iterator : Ast_iterator.iterator)\n      (tydecl : Parsetree.type_declaration) =\n    emitter\n    |> emitType ~lid:(Lident tydecl.ptype_name.txt) ~debug\n         ~loc:tydecl.ptype_name.loc;\n    Ast_iterator.default_iterator.type_declaration iterator tydecl\n  in\n  let pat (iterator : Ast_iterator.iterator) (p : Parsetree.pattern) =\n    match p.ppat_desc with\n    | Ppat_var {txt = id} ->\n      if isLowercaseId id then\n        emitter |> emitVariable ~id ~debug ~loc:p.ppat_loc;\n      Ast_iterator.default_iterator.pat iterator p\n    | Ppat_construct ({txt = Lident (\"true\" | \"false\")}, _) ->\n      (* Don't emit true or false *)\n      Ast_iterator.default_iterator.pat iterator p\n    | Ppat_record (cases, _) ->\n      cases\n      |> List.iter (fun (label, _) -> emitter |> emitRecordLabel ~label ~debug);\n      Ast_iterator.default_iterator.pat iterator p\n    | Ppat_construct (name, _) ->\n      emitter |> emitVariant ~name ~debug;\n      Ast_iterator.default_iterator.pat iterator p\n    | Ppat_type {txt = lid; loc} ->\n      emitter |> emitType ~lid ~debug ~loc;\n      Ast_iterator.default_iterator.pat iterator p\n    | _ -> Ast_iterator.default_iterator.pat iterator p\n  in\n  let expr (iterator : Ast_iterator.iterator) (e : Parsetree.expression) =\n    match e.pexp_desc with\n    | Pexp_ident {txt = lid; loc} ->\n      if lid <> Lident \"not\" then\n        if not loc.loc_ghost then\n          emitter\n          |> emitLongident ~pos:(Loc.start loc)\n               ~posEnd:(Some (Loc.end_ loc))\n               ~lid ~debug;\n      Ast_iterator.default_iterator.expr iterator e\n    | Pexp_apply ({pexp_desc = Pexp_ident lident; pexp_loc}, args)\n      when Res_parsetree_viewer.is_jsx_expression e ->\n      (*\n         Angled brackets:\n          - These are handled in the grammar:  <>  </>  </  />\n          - Here we handle `<` and `>`\n\n         Component names:\n          - handled like other Longitent.t, except lowercase id is marked Token.JsxLowercase\n      *)\n      emitter (* --> <div... *)\n      |> emitJsxTag ~debug ~name:\"<\"\n           ~pos:\n             (let pos = Loc.start e.pexp_loc in\n              (fst pos, snd pos - 1 (* the AST skips the loc of < somehow *)));\n      emitter |> emitJsxOpen ~lid:lident.txt ~debug ~loc:pexp_loc;\n\n      let posOfGreatherthanAfterProps =\n        let rec loop = function\n          | (Asttypes.Labelled \"children\", {Parsetree.pexp_loc}) :: _ ->\n            Loc.start pexp_loc\n          | _ :: args -> loop args\n          | [] -> (* should not happen *) (-1, -1)\n        in\n\n        loop args\n      in\n      let posOfFinalGreatherthan =\n        let pos = Loc.end_ e.pexp_loc in\n        (fst pos, snd pos - 1)\n      in\n      let selfClosing =\n        fst posOfGreatherthanAfterProps == fst posOfFinalGreatherthan\n        && snd posOfGreatherthanAfterProps + 1 == snd posOfFinalGreatherthan\n        (* there's an off-by one somehow in the AST *)\n      in\n      (if not selfClosing then\n         let lineStart, colStart = Loc.start pexp_loc in\n         let lineEnd, colEnd = Loc.end_ pexp_loc in\n         let length = if lineStart = lineEnd then colEnd - colStart else 0 in\n         let lineEndWhole, colEndWhole = Loc.end_ e.pexp_loc in\n         if length > 0 && colEndWhole > length then (\n           emitter\n           |> emitJsxClose ~debug ~lid:lident.txt\n                ~pos:(lineEndWhole, colEndWhole - 1);\n           emitter (* <foo ...props > <-- *)\n           |> emitJsxTag ~debug ~name:\">\" ~pos:posOfGreatherthanAfterProps;\n           emitter (* <foo> ... </foo> <-- *)\n           |> emitJsxTag ~debug ~name:\">\" ~pos:posOfFinalGreatherthan));\n\n      args |> List.iter (fun (_lbl, arg) -> iterator.expr iterator arg)\n    | Pexp_apply\n        ( {\n            pexp_desc =\n              Pexp_ident {txt = Longident.Lident ((\"<\" | \">\") as op); loc};\n          },\n          [_; _] ) ->\n      if debug then\n        Printf.printf \"Binary operator %s %s\\n\" op (Loc.toString loc);\n      emitter |> emitFromLoc ~loc ~type_:Operator;\n      Ast_iterator.default_iterator.expr iterator e\n    | Pexp_record (cases, _) ->\n      cases\n      |> List.filter_map (fun ((label : Longident.t Location.loc), _) ->\n             match label.txt with\n             | Longident.Lident s when not (Utils.isFirstCharUppercase s) ->\n               Some label\n             | _ -> None)\n      |> List.iter (fun label -> emitter |> emitRecordLabel ~label ~debug);\n      Ast_iterator.default_iterator.expr iterator e\n    | Pexp_field (_, label) | Pexp_setfield (_, label, _) ->\n      emitter |> emitRecordLabel ~label ~debug;\n      Ast_iterator.default_iterator.expr iterator e\n    | Pexp_construct ({txt = Lident (\"true\" | \"false\")}, _) ->\n      (* Don't emit true or false *)\n      Ast_iterator.default_iterator.expr iterator e\n    | Pexp_construct (name, _) ->\n      emitter |> emitVariant ~name ~debug;\n      Ast_iterator.default_iterator.expr iterator e\n    | _ -> Ast_iterator.default_iterator.expr iterator e\n  in\n  let module_expr (iterator : Ast_iterator.iterator)\n      (me : Parsetree.module_expr) =\n    match me.pmod_desc with\n    | Pmod_ident {txt = lid; loc} ->\n      if not loc.loc_ghost then\n        emitter |> emitLongident ~pos:(Loc.start loc) ~lid ~debug;\n      Ast_iterator.default_iterator.module_expr iterator me\n    | _ -> Ast_iterator.default_iterator.module_expr iterator me\n  in\n  let module_binding (iterator : Ast_iterator.iterator)\n      (mb : Parsetree.module_binding) =\n    if not mb.pmb_name.loc.loc_ghost then\n      emitter\n      |> emitLongident\n           ~pos:(Loc.start mb.pmb_name.loc)\n           ~lid:(Longident.Lident mb.pmb_name.txt) ~debug;\n    Ast_iterator.default_iterator.module_binding iterator mb\n  in\n  let module_declaration (iterator : Ast_iterator.iterator)\n      (md : Parsetree.module_declaration) =\n    if not md.pmd_name.loc.loc_ghost then\n      emitter\n      |> emitLongident\n           ~pos:(Loc.start md.pmd_name.loc)\n           ~lid:(Longident.Lident md.pmd_name.txt) ~debug;\n    Ast_iterator.default_iterator.module_declaration iterator md\n  in\n  let module_type (iterator : Ast_iterator.iterator)\n      (mt : Parsetree.module_type) =\n    match mt.pmty_desc with\n    | Pmty_ident {txt = lid; loc} ->\n      if not loc.loc_ghost then\n        emitter\n        |> emitLongident ~upperCaseToken:Token.Type ~pos:(Loc.start loc) ~lid\n             ~debug;\n      Ast_iterator.default_iterator.module_type iterator mt\n    | _ -> Ast_iterator.default_iterator.module_type iterator mt\n  in\n  let module_type_declaration (iterator : Ast_iterator.iterator)\n      (mtd : Parsetree.module_type_declaration) =\n    if not mtd.pmtd_name.loc.loc_ghost then\n      emitter\n      |> emitLongident ~upperCaseToken:Token.Type\n           ~pos:(Loc.start mtd.pmtd_name.loc)\n           ~lid:(Longident.Lident mtd.pmtd_name.txt) ~debug;\n    Ast_iterator.default_iterator.module_type_declaration iterator mtd\n  in\n  let open_description (iterator : Ast_iterator.iterator)\n      (od : Parsetree.open_description) =\n    if not od.popen_lid.loc.loc_ghost then\n      emitter\n      |> emitLongident\n           ~pos:(Loc.start od.popen_lid.loc)\n           ~lid:od.popen_lid.txt ~debug;\n    Ast_iterator.default_iterator.open_description iterator od\n  in\n  let label_declaration (iterator : Ast_iterator.iterator)\n      (ld : Parsetree.label_declaration) =\n    emitter\n    |> emitRecordLabel\n         ~label:{loc = ld.pld_name.loc; txt = Longident.Lident ld.pld_name.txt}\n         ~debug;\n    Ast_iterator.default_iterator.label_declaration iterator ld\n  in\n  let constructor_declaration (iterator : Ast_iterator.iterator)\n      (cd : Parsetree.constructor_declaration) =\n    emitter\n    |> emitVariant\n         ~name:{loc = cd.pcd_name.loc; txt = Longident.Lident cd.pcd_name.txt}\n         ~debug;\n    Ast_iterator.default_iterator.constructor_declaration iterator cd\n  in\n\n  let structure_item (iterator : Ast_iterator.iterator)\n      (item : Parsetree.structure_item) =\n    (match item.pstr_desc with\n    | Pstr_primitive {pval_name = {txt = id; loc}} ->\n      emitter |> emitVariable ~id ~debug ~loc\n    | _ -> ());\n    Ast_iterator.default_iterator.structure_item iterator item\n  in\n\n  let signature_item (iterator : Ast_iterator.iterator)\n      (item : Parsetree.signature_item) =\n    (match item.psig_desc with\n    | Psig_value {pval_name = {txt = id; loc}} ->\n      emitter |> emitVariable ~id ~debug ~loc\n    | _ -> ());\n    Ast_iterator.default_iterator.signature_item iterator item\n  in\n\n  let iterator =\n    {\n      Ast_iterator.default_iterator with\n      constructor_declaration;\n      expr;\n      label_declaration;\n      module_declaration;\n      module_binding;\n      module_expr;\n      module_type;\n      module_type_declaration;\n      open_description;\n      pat;\n      typ;\n      type_declaration;\n      structure_item;\n      signature_item;\n    }\n  in\n\n  if Files.classifySourceFile path = Res then (\n    let parser =\n      Res_driver.parsing_engine.parse_implementation ~for_printer:false\n    in\n    let {Res_driver.parsetree = structure; diagnostics} =\n      parser ~filename:path\n    in\n    if debug then\n      Printf.printf \"structure items:%d diagnostics:%d \\n\"\n        (List.length structure) (List.length diagnostics);\n    iterator.structure iterator structure |> ignore)\n  else\n    let parser = Res_driver.parsing_engine.parse_interface ~for_printer:false in\n    let {Res_driver.parsetree = signature; diagnostics} =\n      parser ~filename:path\n    in\n    if debug then\n      Printf.printf \"signature items:%d diagnostics:%d \\n\"\n        (List.length signature) (List.length diagnostics);\n    iterator.signature iterator signature |> ignore\n\nlet semanticTokens ~currentFile =\n  let emitter = Token.createEmitter () in\n  command ~emitter ~debug:false ~path:currentFile;\n  Printf.printf \"{\\\"data\\\":[%s]}\" (Token.emit emitter)\n"
  },
  {
    "path": "analysis/src/Shared.ml",
    "content": "let tryReadCmt cmt =\n  if not (Files.exists cmt) then (\n    Log.log (\"Cmt file does not exist \" ^ cmt);\n    None)\n  else\n    match Cmt_format.read_cmt cmt with\n    | exception Cmi_format.Error err ->\n      Log.log\n        (\"Failed to load \" ^ cmt ^ \" as a cmt w/ ocaml version \" ^ \"406\"\n       ^ \", error: \"\n        ^\n        (Cmi_format.report_error Format.str_formatter err;\n         Format.flush_str_formatter ()));\n      None\n    | exception err ->\n      Log.log\n        (\"Invalid cmt format \" ^ cmt\n       ^ \" - probably wrong ocaml version, expected \" ^ Config.version ^ \" : \"\n       ^ Printexc.to_string err);\n      None\n    | x -> Some x\n\nlet tryReadCmi cmi =\n  if not (Files.exists cmi) then None\n  else\n    match Cmt_format.read_cmi cmi with\n    | exception _ ->\n      Log.log (\"Failed to load \" ^ cmi);\n      None\n    | x -> Some x\n\nlet rec dig (te : Types.type_expr) =\n  match te.desc with\n  | Tlink inner -> dig inner\n  | Tsubst inner -> dig inner\n  | Tpoly (inner, _) -> dig inner\n  | _ -> te\n\nlet digConstructor te =\n  match (dig te).desc with\n  | Tconstr (path, _args, _memo) -> Some path\n  | _ -> None\n\nlet findTypeConstructors (tel : Types.type_expr list) =\n  let paths = ref [] in\n  let addPath path =\n    if not (List.exists (Path.same path) !paths) then paths := path :: !paths\n  in\n  let rec loop (te : Types.type_expr) =\n    match te.desc with\n    | Tlink te1 | Tsubst te1 | Tpoly (te1, _) -> loop te1\n    | Tconstr (path, args, _) ->\n      addPath path;\n      args |> List.iter loop\n    | Tarrow (_, te1, te2, _) ->\n      loop te1;\n      loop te2\n    | Ttuple tel -> tel |> List.iter loop\n    | Tnil | Tvar _ | Tobject _ | Tfield _ | Tvariant _ | Tunivar _ | Tpackage _\n      ->\n      ()\n  in\n  tel |> List.iter loop;\n  !paths |> List.rev\n\nlet declToString ?printNameAsIs ?(recStatus = Types.Trec_not) name t =\n  PrintType.printDecl ?printNameAsIs ~recStatus name t\n\nlet cacheTypeToString = ref false\nlet typeTbl = Hashtbl.create 1\n\nlet typeToString ?lineWidth (t : Types.type_expr) =\n  match\n    if !cacheTypeToString then Hashtbl.find_opt typeTbl (t.id, t) else None\n  with\n  | None ->\n    let s = PrintType.printExpr ?lineWidth t in\n    Hashtbl.replace typeTbl (t.id, t) s;\n    s\n  | Some s -> s\n"
  },
  {
    "path": "analysis/src/SharedTypes.ml",
    "content": "let str s = if s = \"\" then \"\\\"\\\"\" else s\nlet list l = \"[\" ^ (l |> List.map str |> String.concat \", \") ^ \"]\"\nlet ident l = l |> List.map str |> String.concat \".\"\n\ntype path = string list\n\ntype typedFnArg = Asttypes.arg_label * Types.type_expr\n\nlet pathToString (path : path) = path |> String.concat \".\"\n\nmodule ModulePath = struct\n  type t =\n    | File of Uri.t * string\n    | NotVisible\n    | IncludedModule of Path.t * t\n    | ExportedModule of {name: string; modulePath: t; isType: bool}\n\n  let toPath modulePath tipName : path =\n    let rec loop modulePath current =\n      match modulePath with\n      | File _ -> current\n      | IncludedModule (_, inner) -> loop inner current\n      | ExportedModule {name; modulePath = inner} -> loop inner (name :: current)\n      | NotVisible -> current\n    in\n    loop modulePath [tipName]\nend\n\ntype field = {\n  stamp: int;\n  fname: string Location.loc;\n  typ: Types.type_expr;\n  optional: bool;\n  docstring: string list;\n  deprecated: string option;\n}\n\ntype constructorArgs =\n  | InlineRecord of field list\n  | Args of (Types.type_expr * Location.t) list\n\nmodule Constructor = struct\n  type t = {\n    stamp: int;\n    cname: string Location.loc;\n    args: constructorArgs;\n    res: Types.type_expr option;\n    typeDecl: string * Types.type_declaration;\n    docstring: string list;\n    deprecated: string option;\n  }\nend\n\nmodule Type = struct\n  type kind =\n    | Abstract of (Path.t * Types.type_expr list) option\n    | Open\n    | Tuple of Types.type_expr list\n    | Record of field list\n    | Variant of Constructor.t list\n\n  type t = {\n    kind: kind;\n    decl: Types.type_declaration;\n    name: string;\n    attributes: Parsetree.attributes;\n  }\nend\n\nmodule Exported = struct\n  type namedStampMap = (string, int) Hashtbl.t\n\n  type t = {\n    types_: namedStampMap;\n    values_: namedStampMap;\n    modules_: namedStampMap;\n  }\n\n  type kind = Type | Value | Module\n\n  let init () =\n    {\n      types_ = Hashtbl.create 10;\n      values_ = Hashtbl.create 10;\n      modules_ = Hashtbl.create 10;\n    }\n\n  let add t kind name x =\n    let tbl =\n      match kind with\n      | Type -> t.types_\n      | Value -> t.values_\n      | Module -> t.modules_\n    in\n    if Hashtbl.mem tbl name then false\n    else\n      let () = Hashtbl.add tbl name x in\n      true\n\n  let find t kind name =\n    let tbl =\n      match kind with\n      | Type -> t.types_\n      | Value -> t.values_\n      | Module -> t.modules_\n    in\n    Hashtbl.find_opt tbl name\n\n  let iter t kind f =\n    let tbl =\n      match kind with\n      | Type -> t.types_\n      | Value -> t.values_\n      | Module -> t.modules_\n    in\n    Hashtbl.iter f tbl\nend\n\nmodule Module = struct\n  type kind =\n    | Value of Types.type_expr\n    | Type of Type.t * Types.rec_status\n    | Module of {type_: t; isModuleType: bool}\n\n  and item = {\n    kind: kind;\n    name: string;\n    loc: Location.t;\n    docstring: string list;\n    deprecated: string option;\n  }\n\n  and structure = {\n    name: string;\n    docstring: string list;\n    exported: Exported.t;\n    items: item list;\n    deprecated: string option;\n  }\n\n  and t = Ident of Path.t | Structure of structure | Constraint of t * t\nend\n\nmodule Declared = struct\n  type 'item t = {\n    name: string Location.loc;\n    extentLoc: Location.t;\n    stamp: int;\n    modulePath: ModulePath.t;\n    isExported: bool;\n    deprecated: string option;\n    docstring: string list;\n    item: 'item;\n  }\nend\n\nmodule Stamps : sig\n  type t\n\n  val addConstructor : t -> int -> Constructor.t Declared.t -> unit\n  val addModule : t -> int -> Module.t Declared.t -> unit\n  val addType : t -> int -> Type.t Declared.t -> unit\n  val addValue : t -> int -> Types.type_expr Declared.t -> unit\n  val findModule : t -> int -> Module.t Declared.t option\n  val findType : t -> int -> Type.t Declared.t option\n  val findValue : t -> int -> Types.type_expr Declared.t option\n  val init : unit -> t\n  val iterConstructors : (int -> Constructor.t Declared.t -> unit) -> t -> unit\n  val iterModules : (int -> Module.t Declared.t -> unit) -> t -> unit\n  val iterTypes : (int -> Type.t Declared.t -> unit) -> t -> unit\n  val iterValues : (int -> Types.type_expr Declared.t -> unit) -> t -> unit\nend = struct\n  type 't stampMap = (int, 't Declared.t) Hashtbl.t\n\n  type kind =\n    | KType of Type.t Declared.t\n    | KValue of Types.type_expr Declared.t\n    | KModule of Module.t Declared.t\n    | KConstructor of Constructor.t Declared.t\n\n  type t = (int, kind) Hashtbl.t\n\n  let init () = Hashtbl.create 10\n\n  let addConstructor (stamps : t) stamp declared =\n    Hashtbl.add stamps stamp (KConstructor declared)\n\n  let addModule stamps stamp declared =\n    Hashtbl.add stamps stamp (KModule declared)\n\n  let addType stamps stamp declared = Hashtbl.add stamps stamp (KType declared)\n\n  let addValue stamps stamp declared =\n    Hashtbl.add stamps stamp (KValue declared)\n\n  let findModule stamps stamp =\n    match Hashtbl.find_opt stamps stamp with\n    | Some (KModule declared) -> Some declared\n    | _ -> None\n\n  let findType stamps stamp =\n    match Hashtbl.find_opt stamps stamp with\n    | Some (KType declared) -> Some declared\n    | _ -> None\n\n  let findValue stamps stamp =\n    match Hashtbl.find_opt stamps stamp with\n    | Some (KValue declared) -> Some declared\n    | _ -> None\n\n  let iterModules f stamps =\n    Hashtbl.iter\n      (fun stamp d ->\n        match d with\n        | KModule d -> f stamp d\n        | _ -> ())\n      stamps\n\n  let iterTypes f stamps =\n    Hashtbl.iter\n      (fun stamp d ->\n        match d with\n        | KType d -> f stamp d\n        | _ -> ())\n      stamps\n\n  let iterValues f stamps =\n    Hashtbl.iter\n      (fun stamp d ->\n        match d with\n        | KValue d -> f stamp d\n        | _ -> ())\n      stamps\n\n  let iterConstructors f stamps =\n    Hashtbl.iter\n      (fun stamp d ->\n        match d with\n        | KConstructor d -> f stamp d\n        | _ -> ())\n      stamps\nend\n\nmodule File = struct\n  type t = {\n    uri: Uri.t;\n    stamps: Stamps.t;\n    moduleName: string;\n    structure: Module.structure;\n  }\n\n  let create moduleName uri =\n    {\n      uri;\n      stamps = Stamps.init ();\n      moduleName;\n      structure =\n        {\n          name = moduleName;\n          docstring = [];\n          exported = Exported.init ();\n          items = [];\n          deprecated = None;\n        };\n    }\nend\n\nmodule QueryEnv : sig\n  type t = private {\n    file: File.t;\n    exported: Exported.t;\n    pathRev: path;\n    parent: t option;\n  }\n  val fromFile : File.t -> t\n  val enterStructure : t -> Module.structure -> t\n\n  (* Express a path starting from the module represented by the env.\n     E.g. the env is at A.B.C and the path is D.\n     The result is A.B.C.D if D is inside C.\n     Or A.B.D or A.D or D if it's in one of its parents. *)\n  val pathFromEnv : t -> path -> bool * path\n\n  val toString : t -> string\nend = struct\n  type t = {file: File.t; exported: Exported.t; pathRev: path; parent: t option}\n\n  let toString {file; pathRev} =\n    file.moduleName :: List.rev pathRev |> String.concat \".\"\n\n  let fromFile (file : File.t) =\n    {file; exported = file.structure.exported; pathRev = []; parent = None}\n\n  (* Prune a path and find a parent environment that contains the module name *)\n  let rec prunePath pathRev env name =\n    if Exported.find env.exported Module name <> None then (true, pathRev)\n    else\n      match (pathRev, env.parent) with\n      | _ :: rest, Some env -> prunePath rest env name\n      | _ -> (false, [])\n\n  let pathFromEnv env path =\n    match path with\n    | [] -> (true, env.pathRev |> List.rev)\n    | name :: _ ->\n      let found, prunedPathRev = prunePath env.pathRev env name in\n      (found, List.rev_append prunedPathRev path)\n\n  let enterStructure env (structure : Module.structure) =\n    let name = structure.name in\n    let pathRev = name :: snd (prunePath env.pathRev env name) in\n    {env with exported = structure.exported; pathRev; parent = Some env}\nend\n\ntype typeArgContext = {\n  env: QueryEnv.t;\n  typeArgs: Types.type_expr list;\n  typeParams: Types.type_expr list;\n}\n\ntype polyVariantConstructor = {\n  name: string;\n  displayName: string;\n  args: Types.type_expr list;\n}\n\n(* TODO(env-stuff) All envs for bool string etc can be removed. *)\ntype innerType = TypeExpr of Types.type_expr | ExtractedType of completionType\nand completionType =\n  | Tuple of QueryEnv.t * Types.type_expr list * Types.type_expr\n  | Texn of QueryEnv.t\n  | Tpromise of QueryEnv.t * Types.type_expr\n  | Toption of QueryEnv.t * innerType\n  | Tresult of {\n      env: QueryEnv.t;\n      okType: Types.type_expr;\n      errorType: Types.type_expr;\n    }\n  | Tbool of QueryEnv.t\n  | Tarray of QueryEnv.t * innerType\n  | Tstring of QueryEnv.t\n  | TtypeT of {env: QueryEnv.t; path: Path.t}\n  | Tvariant of {\n      env: QueryEnv.t;\n      constructors: Constructor.t list;\n      variantDecl: Types.type_declaration;\n      variantName: string;\n    }\n  | Tpolyvariant of {\n      env: QueryEnv.t;\n      constructors: polyVariantConstructor list;\n      typeExpr: Types.type_expr;\n    }\n  | Trecord of {\n      env: QueryEnv.t;\n      fields: field list;\n      definition:\n        [ `NameOnly of string\n          (** When we only have the name, like when pulling the record from a\n              declared type. *)\n        | `TypeExpr of Types.type_expr\n          (** When we have the full type expr from the compiler. *) ];\n    }\n  | TinlineRecord of {env: QueryEnv.t; fields: field list}\n  | Tfunction of {\n      env: QueryEnv.t;\n      args: typedFnArg list;\n      typ: Types.type_expr;\n      uncurried: bool;\n      returnType: Types.type_expr;\n    }\n\nmodule Env = struct\n  type t = {stamps: Stamps.t; modulePath: ModulePath.t}\n  let addExportedModule ~name ~isType env =\n    {\n      env with\n      modulePath = ExportedModule {name; modulePath = env.modulePath; isType};\n    }\n  let addModule ~name env = env |> addExportedModule ~name ~isType:false\n  let addModuleType ~name env = env |> addExportedModule ~name ~isType:true\nend\n\ntype filePath = string\n\ntype paths =\n  | Impl of {cmt: filePath; res: filePath}\n  | Namespace of {cmt: filePath}\n  | IntfAndImpl of {\n      cmti: filePath;\n      resi: filePath;\n      cmt: filePath;\n      res: filePath;\n    }\n\nlet showPaths paths =\n  match paths with\n  | Impl {cmt; res} ->\n    Printf.sprintf \"Impl cmt:%s res:%s\" (Utils.dumpPath cmt)\n      (Utils.dumpPath res)\n  | Namespace {cmt} -> Printf.sprintf \"Namespace cmt:%s\" (Utils.dumpPath cmt)\n  | IntfAndImpl {cmti; resi; cmt; res} ->\n    Printf.sprintf \"IntfAndImpl cmti:%s resi:%s cmt:%s res:%s\"\n      (Utils.dumpPath cmti) (Utils.dumpPath resi) (Utils.dumpPath cmt)\n      (Utils.dumpPath res)\n\nlet getSrc p =\n  match p with\n  | Impl {res} -> [res]\n  | Namespace _ -> []\n  | IntfAndImpl {resi; res} -> [resi; res]\n\nlet getUri p =\n  match p with\n  | Impl {res} -> Uri.fromPath res\n  | Namespace {cmt} -> Uri.fromPath cmt\n  | IntfAndImpl {resi} -> Uri.fromPath resi\n\nlet getUris p =\n  match p with\n  | Impl {res} -> [Uri.fromPath res]\n  | Namespace {cmt} -> [Uri.fromPath cmt]\n  | IntfAndImpl {res; resi} -> [Uri.fromPath res; Uri.fromPath resi]\n\nlet getCmtPath ~uri p =\n  match p with\n  | Impl {cmt} -> cmt\n  | Namespace {cmt} -> cmt\n  | IntfAndImpl {cmti; cmt} ->\n    let interface = Utils.endsWith (Uri.toPath uri) \"i\" in\n    if interface then cmti else cmt\n\nmodule Tip = struct\n  type t = Value | Type | Field of string | Constructor of string | Module\n\n  let toString tip =\n    match tip with\n    | Value -> \"Value\"\n    | Type -> \"Type\"\n    | Field f -> \"Field(\" ^ f ^ \")\"\n    | Constructor a -> \"Constructor(\" ^ a ^ \")\"\n    | Module -> \"Module\"\nend\n\nlet rec pathIdentToString (p : Path.t) =\n  match p with\n  | Pident {name} -> name\n  | Pdot (nextPath, id, _) ->\n    Printf.sprintf \"%s.%s\" (pathIdentToString nextPath) id\n  | Papply _ -> \"\"\n\ntype locKind =\n  | LocalReference of int * Tip.t\n  | GlobalReference of string * string list * Tip.t\n  | NotFound\n  | Definition of int * Tip.t\n\ntype locType =\n  | Typed of string * Types.type_expr * locKind\n  | Constant of Asttypes.constant\n  | LModule of locKind\n  | TopLevelModule of string\n  | TypeDefinition of string * Types.type_declaration * int\n\ntype locItem = {loc: Location.t; locType: locType}\n\nmodule LocationSet = Set.Make (struct\n  include Location\n\n  let compare loc1 loc2 = compare loc2 loc1\n\n  (* polymorphic compare should be OK *)\nend)\n\ntype extra = {\n  internalReferences: (int, Location.t list) Hashtbl.t;\n  externalReferences:\n    (string, (string list * Tip.t * Location.t) list) Hashtbl.t;\n  fileReferences: (string, LocationSet.t) Hashtbl.t;\n  mutable locItems: locItem list;\n}\n\ntype file = string\n\nmodule FileSet = Set.Make (String)\n\ntype builtInCompletionModules = {\n  arrayModulePath: string list;\n  optionModulePath: string list;\n  stringModulePath: string list;\n  intModulePath: string list;\n  floatModulePath: string list;\n  promiseModulePath: string list;\n  listModulePath: string list;\n  resultModulePath: string list;\n  exnModulePath: string list;\n  regexpModulePath: string list;\n}\n\ntype package = {\n  genericJsxModule: string option;\n  suffix: string;\n  rootPath: filePath;\n  projectFiles: FileSet.t;\n  dependenciesFiles: FileSet.t;\n  pathsForModule: (file, paths) Hashtbl.t;\n  namespace: string option;\n  builtInCompletionModules: builtInCompletionModules;\n  opens: path list;\n  uncurried: bool;\n  rescriptVersion: int * int;\n}\n\nlet allFilesInPackage package =\n  FileSet.union package.projectFiles package.dependenciesFiles\n\ntype full = {extra: extra; file: File.t; package: package}\n\nlet initExtra () =\n  {\n    internalReferences = Hashtbl.create 10;\n    externalReferences = Hashtbl.create 10;\n    fileReferences = Hashtbl.create 10;\n    locItems = [];\n  }\n\ntype state = {\n  packagesByRoot: (string, package) Hashtbl.t;\n  rootForUri: (Uri.t, string) Hashtbl.t;\n  cmtCache: (filePath, File.t) Hashtbl.t;\n}\n\n(* There's only one state, so it can as well be global *)\nlet state =\n  {\n    packagesByRoot = Hashtbl.create 1;\n    rootForUri = Hashtbl.create 30;\n    cmtCache = Hashtbl.create 30;\n  }\n\nlet locKindToString = function\n  | LocalReference (_, tip) -> \"(LocalReference \" ^ Tip.toString tip ^ \")\"\n  | GlobalReference _ -> \"GlobalReference\"\n  | NotFound -> \"NotFound\"\n  | Definition (_, tip) -> \"(Definition \" ^ Tip.toString tip ^ \")\"\n\nlet locTypeToString = function\n  | Typed (name, e, locKind) ->\n    \"Typed \" ^ name ^ \" \" ^ Shared.typeToString e ^ \" \"\n    ^ locKindToString locKind\n  | Constant _ -> \"Constant\"\n  | LModule locKind -> \"LModule \" ^ locKindToString locKind\n  | TopLevelModule _ -> \"TopLevelModule\"\n  | TypeDefinition _ -> \"TypeDefinition\"\n\nlet locItemToString {loc = {Location.loc_start; loc_end}; locType} =\n  let pos1 = Utils.cmtPosToPosition loc_start in\n  let pos2 = Utils.cmtPosToPosition loc_end in\n  Printf.sprintf \"%d:%d-%d:%d %s\" pos1.line pos1.character pos2.line\n    pos2.character (locTypeToString locType)\n\n(* needed for debugging *)\nlet _ = locItemToString\n\nmodule Completable = struct\n  (* Completion context *)\n  type completionContext = Type | Value | Module | Field | ValueOrField\n\n  type argumentLabel =\n    | Unlabelled of {argumentPosition: int}\n    | Labelled of string\n    | Optional of string\n\n  (** Additional context for nested completion where needed. *)\n  type nestedContext =\n    | RecordField of {seenFields: string list}\n        (** Completing for a record field, and we already saw the following\n            fields... *)\n    | CameFromRecordField of string\n        (** We just came from this field (we leverage use this for better\n            completion names etc) *)\n\n  type nestedPath =\n    | NTupleItem of {itemNum: int}\n    | NFollowRecordField of {fieldName: string}\n    | NRecordBody of {seenFields: string list}\n    | NVariantPayload of {constructorName: string; itemNum: int}\n    | NPolyvariantPayload of {constructorName: string; itemNum: int}\n    | NArray\n\n  let nestedPathToString p =\n    match p with\n    | NTupleItem {itemNum} -> \"tuple($\" ^ string_of_int itemNum ^ \")\"\n    | NFollowRecordField {fieldName} -> \"recordField(\" ^ fieldName ^ \")\"\n    | NRecordBody _ -> \"recordBody\"\n    | NVariantPayload {constructorName; itemNum} ->\n      \"variantPayload::\" ^ constructorName ^ \"($\" ^ string_of_int itemNum ^ \")\"\n    | NPolyvariantPayload {constructorName; itemNum} ->\n      \"polyvariantPayload::\" ^ constructorName ^ \"($\" ^ string_of_int itemNum\n      ^ \")\"\n    | NArray -> \"array\"\n\n  type contextPath =\n    | CPString\n    | CPArray of contextPath option\n    | CPInt\n    | CPFloat\n    | CPBool\n    | CPOption of contextPath\n    | CPApply of contextPath * Asttypes.arg_label list\n    | CPId of {\n        path: string list;\n        completionContext: completionContext;\n        loc: Location.t;\n      }\n    | CPField of {\n        contextPath: contextPath;\n        fieldName: string;\n        posOfDot: (int * int) option;\n        exprLoc: Location.t;\n        inJsx: bool;\n            (** Whether this field access was found in a JSX context. *)\n      }\n    | CPObj of contextPath * string\n    | CPAwait of contextPath\n    | CPPipe of {\n        synthetic: bool;  (** Whether this pipe completion is synthetic. *)\n        contextPath: contextPath;\n        id: string;\n        inJsx: bool;  (** Whether this pipe was found in a JSX context. *)\n        lhsLoc: Location.t;\n            (** The loc item for the left hand side of the pipe. *)\n      }\n    | CTuple of contextPath list\n    | CArgument of {\n        functionContextPath: contextPath;\n        argumentLabel: argumentLabel;\n      }\n    | CJsxPropValue of {\n        pathToComponent: string list;\n        propName: string;\n        emptyJsxPropNameHint: string option;\n            (* This helps handle a special case in JSX prop completion. More info where this is used. *)\n      }\n    | CPatternPath of {rootCtxPath: contextPath; nested: nestedPath list}\n    | CTypeAtPos of Location.t\n        (** A position holding something that might have a *compiled* type. *)\n\n  type patternMode = Default | Destructuring\n\n  type decoratorPayload =\n    | Module of string\n    | ModuleWithImportAttributes of {nested: nestedPath list; prefix: string}\n    | JsxConfig of {nested: nestedPath list; prefix: string}\n\n  type t =\n    | Cdecorator of string  (** e.g. @module *)\n    | CdecoratorPayload of decoratorPayload\n    | CextensionNode of string  (** e.g. %todo *)\n    | CnamedArg of contextPath * string * string list\n        (** e.g. (..., \"label\", [\"l1\", \"l2\"]) for ...(...~l1...~l2...~label...)\n        *)\n    | Cnone  (** e.g. don't complete inside strings *)\n    | Cpath of contextPath\n    | Cjsx of string list * string * string list\n        (** E.g. ([\"M\", \"Comp\"], \"id\", [\"id1\", \"id2\"]) for <M.Comp id1=...\n            id2=... ... id *)\n    | Cexpression of {\n        contextPath: contextPath;\n        nested: nestedPath list;\n        prefix: string;\n      }\n    | Cpattern of {\n        contextPath: contextPath;\n        nested: nestedPath list;\n        prefix: string;\n        patternMode: patternMode;\n        fallback: t option;\n      }\n    | CexhaustiveSwitch of {contextPath: contextPath; exprLoc: Location.t}\n    | ChtmlElement of {prefix: string}\n\n  let completionContextToString = function\n    | Value -> \"Value\"\n    | Type -> \"Type\"\n    | Module -> \"Module\"\n    | Field -> \"Field\"\n    | ValueOrField -> \"ValueOrField\"\n\n  let rec contextPathToString = function\n    | CPString -> \"string\"\n    | CPInt -> \"int\"\n    | CPFloat -> \"float\"\n    | CPBool -> \"bool\"\n    | CPAwait ctxPath -> \"await \" ^ contextPathToString ctxPath\n    | CPOption ctxPath -> \"option<\" ^ contextPathToString ctxPath ^ \">\"\n    | CPApply (cp, labels) ->\n      contextPathToString cp ^ \"(\"\n      ^ (labels\n        |> List.map (function\n             | Asttypes.Nolabel -> \"Nolabel\"\n             | Labelled s -> \"~\" ^ s\n             | Optional s -> \"?\" ^ s)\n        |> String.concat \", \")\n      ^ \")\"\n    | CPArray (Some ctxPath) -> \"array<\" ^ contextPathToString ctxPath ^ \">\"\n    | CPArray None -> \"array\"\n    | CPId {path; completionContext} ->\n      completionContextToString completionContext ^ list path\n    | CPField {contextPath = cp; fieldName = s} ->\n      contextPathToString cp ^ \".\" ^ str s\n    | CPObj (cp, s) -> contextPathToString cp ^ \"[\\\"\" ^ s ^ \"\\\"]\"\n    | CPPipe {contextPath; id; inJsx} ->\n      contextPathToString contextPath\n      ^ \"->\" ^ id\n      ^ if inJsx then \" <<jsx>>\" else \"\"\n    | CTuple ctxPaths ->\n      \"CTuple(\"\n      ^ (ctxPaths |> List.map contextPathToString |> String.concat \", \")\n      ^ \")\"\n    | CArgument {functionContextPath; argumentLabel} ->\n      \"CArgument \"\n      ^ contextPathToString functionContextPath\n      ^ \"(\"\n      ^ (match argumentLabel with\n        | Unlabelled {argumentPosition} -> \"$\" ^ string_of_int argumentPosition\n        | Labelled name -> \"~\" ^ name\n        | Optional name -> \"~\" ^ name ^ \"=?\")\n      ^ \")\"\n    | CJsxPropValue {pathToComponent; propName} ->\n      \"CJsxPropValue \" ^ (pathToComponent |> list) ^ \" \" ^ propName\n    | CPatternPath {rootCtxPath; nested} ->\n      \"CPatternPath(\"\n      ^ contextPathToString rootCtxPath\n      ^ \")\" ^ \"->\"\n      ^ (nested\n        |> List.map (fun nestedPath -> nestedPathToString nestedPath)\n        |> String.concat \"->\")\n    | CTypeAtPos _loc -> \"CTypeAtPos()\"\n\n  let toString = function\n    | Cpath cp -> \"Cpath \" ^ contextPathToString cp\n    | Cdecorator s -> \"Cdecorator(\" ^ str s ^ \")\"\n    | CextensionNode s -> \"CextensionNode(\" ^ str s ^ \")\"\n    | CdecoratorPayload (Module s) -> \"CdecoratorPayload(module=\" ^ s ^ \")\"\n    | CdecoratorPayload (ModuleWithImportAttributes _) ->\n      \"CdecoratorPayload(moduleWithImportAttributes)\"\n    | CdecoratorPayload (JsxConfig _) -> \"JsxConfig\"\n    | CnamedArg (cp, s, sl2) ->\n      \"CnamedArg(\"\n      ^ (cp |> contextPathToString)\n      ^ \", \" ^ str s ^ \", \" ^ (sl2 |> list) ^ \")\"\n    | Cnone -> \"Cnone\"\n    | Cjsx (sl1, s, sl2) ->\n      \"Cjsx(\" ^ (sl1 |> list) ^ \", \" ^ str s ^ \", \" ^ (sl2 |> list) ^ \")\"\n    | Cpattern {contextPath; nested; prefix} -> (\n      \"Cpattern \"\n      ^ contextPathToString contextPath\n      ^ (if prefix = \"\" then \"\" else \"=\" ^ prefix)\n      ^\n      match nested with\n      | [] -> \"\"\n      | nestedPaths ->\n        \"->\"\n        ^ (nestedPaths\n          |> List.map (fun nestedPath -> nestedPathToString nestedPath)\n          |> String.concat \", \"))\n    | Cexpression {contextPath; nested; prefix} -> (\n      \"Cexpression \"\n      ^ contextPathToString contextPath\n      ^ (if prefix = \"\" then \"\" else \"=\" ^ prefix)\n      ^\n      match nested with\n      | [] -> \"\"\n      | nestedPaths ->\n        \"->\"\n        ^ (nestedPaths\n          |> List.map (fun nestedPath -> nestedPathToString nestedPath)\n          |> String.concat \", \"))\n    | CexhaustiveSwitch {contextPath} ->\n      \"CexhaustiveSwitch \" ^ contextPathToString contextPath\n    | ChtmlElement {prefix} -> \"ChtmlElement <\" ^ prefix\nend\n\nmodule ScopeTypes = struct\n  type item =\n    | Constructor of string * Location.t\n    | Field of string * Location.t\n    | Module of string * Location.t\n    | Open of string list\n    | Type of string * Location.t\n    | Value of string * Location.t * Completable.contextPath option * item list\nend\n\nmodule Completion = struct\n  type kind =\n    | Module of {docstring: string list; module_: Module.t}\n    | Value of Types.type_expr\n    | ObjLabel of Types.type_expr\n    | Label of string\n    | Type of Type.t\n    | Constructor of Constructor.t * string\n    | PolyvariantConstructor of polyVariantConstructor * string\n    | Field of field * string\n    | FileModule of string\n    | Snippet of string\n    | ExtractedType of completionType * [`Value | `Type]\n    | FollowContextPath of Completable.contextPath * ScopeTypes.item list\n\n  type t = {\n    name: string;\n    sortText: string option;\n    insertText: string option;\n    filterText: string option;\n    insertTextFormat: Protocol.insertTextFormat option;\n    env: QueryEnv.t;\n    deprecated: string option;\n    docstring: string list;\n    kind: kind;\n    detail: string option;\n    typeArgContext: typeArgContext option;\n    data: (string * string) list option;\n    additionalTextEdits: Protocol.textEdit list option;\n    synthetic: bool;\n        (** Whether this item is an made up, synthetic item or not. *)\n  }\n\n  let create ?(synthetic = false) ?additionalTextEdits ?data ?typeArgContext\n      ?(includesSnippets = false) ?insertText ~kind ~env ?sortText ?deprecated\n      ?filterText ?detail ?(docstring = []) name =\n    {\n      name;\n      env;\n      deprecated;\n      docstring;\n      kind;\n      sortText;\n      insertText;\n      insertTextFormat =\n        (if includesSnippets then Some Protocol.Snippet else None);\n      filterText;\n      detail;\n      typeArgContext;\n      data;\n      additionalTextEdits;\n      synthetic;\n    }\n\n  (* https://microsoft.github.io/language-server-protocol/specifications/specification-current/#textDocument_completion *)\n  (* https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#completionItemKind *)\n  let kindToInt kind =\n    match kind with\n    | Module _ -> 9\n    | FileModule _ -> 9\n    | Constructor (_, _) | PolyvariantConstructor (_, _) -> 4\n    | ObjLabel _ -> 4\n    | Label _ -> 4\n    | Field (_, _) -> 5\n    | Type _ | ExtractedType (_, `Type) -> 22\n    | Value _ | ExtractedType (_, `Value) -> 12\n    | Snippet _ | FollowContextPath _ -> 15\nend\n\nlet kindFromInnerType (t : innerType) =\n  match t with\n  | ExtractedType extractedType ->\n    Completion.ExtractedType (extractedType, `Value)\n  | TypeExpr typ -> Value typ\n\nmodule CursorPosition = struct\n  type t = NoCursor | HasCursor | EmptyLoc\n\n  let classifyLoc loc ~pos =\n    if loc |> Loc.hasPos ~pos then HasCursor\n    else if loc |> Loc.end_ = (Location.none |> Loc.end_) then EmptyLoc\n    else NoCursor\n\n  let classifyLocationLoc (loc : 'a Location.loc) ~pos =\n    if Loc.start loc.Location.loc <= pos && pos <= Loc.end_ loc.loc then\n      HasCursor\n    else if loc.loc |> Loc.end_ = (Location.none |> Loc.end_) then EmptyLoc\n    else NoCursor\n\n  let classifyPositions pos ~posStart ~posEnd =\n    if posStart <= pos && pos <= posEnd then HasCursor\n    else if posEnd = (Location.none |> Loc.end_) then EmptyLoc\n    else NoCursor\n\n  let locHasCursor loc ~pos = loc |> classifyLoc ~pos = HasCursor\n\n  let locIsEmpty loc ~pos = loc |> classifyLoc ~pos = EmptyLoc\nend\n\ntype labelled = {\n  name: string;\n  opt: bool;\n  posStart: int * int;\n  posEnd: int * int;\n}\n\ntype label = labelled option\ntype arg = {label: label; exp: Parsetree.expression}\n\nlet extractExpApplyArgs ~args =\n  let rec processArgs ~acc args =\n    match args with\n    | (((Asttypes.Labelled s | Optional s) as label), (e : Parsetree.expression))\n      :: rest -> (\n      let namedArgLoc =\n        e.pexp_attributes\n        |> List.find_opt (fun ({Asttypes.txt}, _) -> txt = \"res.namedArgLoc\")\n      in\n      match namedArgLoc with\n      | Some ({loc}, _) ->\n        let labelled =\n          {\n            name = s;\n            opt =\n              (match label with\n              | Optional _ -> true\n              | _ -> false);\n            posStart = Loc.start loc;\n            posEnd = Loc.end_ loc;\n          }\n        in\n        processArgs ~acc:({label = Some labelled; exp = e} :: acc) rest\n      | None -> processArgs ~acc rest)\n    | (Asttypes.Nolabel, (e : Parsetree.expression)) :: rest ->\n      if e.pexp_loc.loc_ghost then processArgs ~acc rest\n      else processArgs ~acc:({label = None; exp = e} :: acc) rest\n    | [] -> List.rev acc\n  in\n  args |> processArgs ~acc:[]\n"
  },
  {
    "path": "analysis/src/SignatureHelp.ml",
    "content": "open SharedTypes\ntype cursorAtArg = Unlabelled of int | Labelled of string\n\n(* Produces the doc string shown below the signature help for each parameter. *)\nlet docsForLabel typeExpr ~file ~package ~supportsMarkdownLinks =\n  let types = Hover.findRelevantTypesFromType ~file ~package typeExpr in\n  let typeNames = types |> List.map (fun {Hover.name} -> name) in\n  let typeDefinitions =\n    types\n    |> List.map (fun {Hover.decl; name; env; loc; path} ->\n           let linkToTypeDefinitionStr =\n             if supportsMarkdownLinks then\n               Markdown.goToDefinitionText ~env ~pos:loc.Warnings.loc_start\n             else \"\"\n           in\n           (* Since printing the whole name via its path can get quite long, and\n              we're short on space for the signature help, we'll only print the\n              fully \"qualified\" type name if we must (ie if several types we're\n              displaying have the same name). *)\n           let multipleTypesHaveThisName =\n             typeNames\n             |> List.filter (fun typeName -> typeName = name)\n             |> List.length > 1\n           in\n           let typeName =\n             if multipleTypesHaveThisName then\n               path |> SharedTypes.pathIdentToString\n             else name\n           in\n           Markdown.codeBlock\n             (Shared.declToString ~printNameAsIs:true typeName decl)\n           ^ linkToTypeDefinitionStr)\n  in\n  typeDefinitions |> String.concat \"\\n\"\n\nlet findFunctionType ~currentFile ~debug ~path ~pos =\n  (* Start by looking at the typed info at the loc of the fn *)\n  match Cmt.loadFullCmtFromPath ~path with\n  | None -> None\n  | Some full -> (\n    let {file; package} = full in\n    let env = QueryEnv.fromFile file in\n    let fnFromLocItem =\n      match References.getLocItem ~full ~pos ~debug:false with\n      | Some {locType = Typed (_, typeExpr, locKind)} -> (\n        let docstring =\n          match References.definedForLoc ~file ~package locKind with\n          | None -> []\n          | Some (docstring, _) -> docstring\n        in\n        if Debug.verbose () then\n          Printf.printf \"[sig_help_fn] Found loc item: %s.\\n\"\n            (Shared.typeToString typeExpr);\n        match\n          TypeUtils.extractFunctionType2 ~env ~package:full.package typeExpr\n        with\n        | args, _tRet, _ when args <> [] ->\n          Some (args, docstring, typeExpr, package, env, file)\n        | _ -> None)\n      | None ->\n        if Debug.verbose () then\n          Printf.printf \"[sig_help_fn] Found no loc item.\\n\";\n        None\n      | Some _ ->\n        if Debug.verbose () then\n          Printf.printf\n            \"[sig_help_fn] Found loc item, but not what was expected.\\n\";\n        None\n    in\n    match fnFromLocItem with\n    | Some fnFromLocItem -> Some fnFromLocItem\n    | None -> (\n      (* If nothing was found there, try using the unsaved completion engine *)\n      let completables =\n        let textOpt = Files.readFile currentFile in\n        match textOpt with\n        | None | Some \"\" -> None\n        | Some text -> (\n          (* Leverage the completion functionality to pull out the type of the identifier doing the function application.\n             This lets us leverage all of the smart work done in completions to find the correct type in many cases even\n             for files not saved yet. *)\n          match\n            CompletionFrontEnd.completionWithParser ~debug ~path ~posCursor:pos\n              ~currentFile ~text\n          with\n          | None -> None\n          | Some (completable, scope) ->\n            Some\n              ( completable\n                |> CompletionBackEnd.processCompletable ~debug ~full ~pos ~scope\n                     ~env ~forHover:true,\n                env,\n                package,\n                file ))\n      in\n      match completables with\n      | Some ({kind = Value type_expr; docstring} :: _, env, package, file) ->\n        let args, _, _ =\n          TypeUtils.extractFunctionType2 type_expr ~env ~package\n        in\n        Some (args, docstring, type_expr, package, env, file)\n      | _ -> None))\n\n(* Extracts all parameters from a parsed function signature *)\nlet extractParameters ~signature ~typeStrForParser ~labelPrefixLen =\n  match signature with\n  | [\n   ( {\n       Parsetree.psig_desc =\n         Psig_value {pval_type = {ptyp_desc = Ptyp_arrow _} as expr};\n     }\n   | {\n       psig_desc =\n         Psig_value\n           {\n             pval_type =\n               {\n                 ptyp_desc =\n                   Ptyp_constr\n                     ( {txt = Lident \"function$\"},\n                       [({ptyp_desc = Ptyp_arrow _} as expr); _] );\n               };\n           };\n     } );\n  ] ->\n    let rec extractParams expr params =\n      match expr with\n      | {\n       (* Gotcha: functions with multiple arugments are modelled as a series of single argument functions. *)\n       Parsetree.ptyp_desc =\n         Ptyp_arrow (argumentLabel, argumentTypeExpr, nextFunctionExpr);\n       ptyp_loc;\n      } ->\n        let startOffset =\n          ptyp_loc |> Loc.start\n          |> Pos.positionToOffset typeStrForParser\n          |> Option.get\n        in\n        let endOffset =\n          argumentTypeExpr.ptyp_loc |> Loc.end_\n          |> Pos.positionToOffset typeStrForParser\n          |> Option.get\n        in\n        (* The AST locations does not account for \"=?\" of optional arguments, so add that to the offset here if needed. *)\n        let endOffset =\n          match argumentLabel with\n          | Asttypes.Optional _ -> endOffset + 2\n          | _ -> endOffset\n        in\n        extractParams nextFunctionExpr\n          (params\n          @ [\n              ( argumentLabel,\n                (* Remove the label prefix offset here, since we're not showing\n                   that to the end user. *)\n                startOffset - labelPrefixLen,\n                endOffset - labelPrefixLen );\n            ])\n      | _ -> params\n    in\n    extractParams expr []\n  | _ -> []\n\n(* Finds what parameter is active, if any *)\nlet findActiveParameter ~argAtCursor ~args =\n  match argAtCursor with\n  | None -> (\n    (* If a function only has one, unlabelled argument, we can safely assume that's active whenever we're in the signature help for that function,\n       even if we technically didn't find anything at the cursor (which we don't for empty expressions). *)\n    match args with\n    | [(Asttypes.Nolabel, _)] -> Some 0\n    | _ -> None)\n  | Some (Unlabelled unlabelledArgumentIndex) ->\n    let index = ref 0 in\n    args\n    |> List.find_map (fun (label, _) ->\n           match label with\n           | Asttypes.Nolabel when !index = unlabelledArgumentIndex ->\n             Some !index\n           | _ ->\n             index := !index + 1;\n             None)\n  | Some (Labelled name) ->\n    let index = ref 0 in\n    args\n    |> List.find_map (fun (label, _) ->\n           match label with\n           | (Asttypes.Labelled labelName | Optional labelName)\n             when labelName = name ->\n             Some !index\n           | _ ->\n             index := !index + 1;\n             None)\n\ntype constructorInfo = {\n  docstring: string list;\n  name: string;\n  args: constructorArgs;\n}\n\nlet findConstructorArgs ~full ~env ~constructorName loc =\n  match\n    References.getLocItem ~debug:false ~full\n      ~pos:(Pos.ofLexing loc.Location.loc_end)\n  with\n  | None -> None\n  | Some {locType = Typed (_, typExpr, _)} -> (\n    match TypeUtils.extractType ~env ~package:full.package typExpr with\n    | Some ((Toption (_, TypeExpr t) as extractedType), _) -> (\n      match constructorName with\n      | \"Some\" ->\n        Some\n          {\n            name = \"Some\";\n            docstring =\n              [\n                Markdown.codeBlock\n                  (TypeUtils.extractedTypeToString extractedType);\n              ];\n            args = Args [(t, Location.none)];\n          }\n      | _ -> None)\n    | Some ((Tresult {okType; errorType} as extractedType), _) -> (\n      match constructorName with\n      | \"Ok\" ->\n        Some\n          {\n            name = \"Ok\";\n            docstring =\n              [\n                Markdown.codeBlock\n                  (TypeUtils.extractedTypeToString extractedType);\n              ];\n            args = Args [(okType, Location.none)];\n          }\n      | \"Error\" ->\n        Some\n          {\n            name = \"Error\";\n            docstring =\n              [\n                Markdown.codeBlock\n                  (TypeUtils.extractedTypeToString extractedType);\n              ];\n            args = Args [(errorType, Location.none)];\n          }\n      | _ -> None)\n    | Some (Tvariant {constructors}, _) ->\n      constructors\n      |> List.find_opt (fun (c : Constructor.t) ->\n             c.cname.txt = constructorName)\n      |> Option.map (fun (c : Constructor.t) ->\n             {docstring = c.docstring; name = c.cname.txt; args = c.args})\n    | _ -> None)\n  | _ -> None\n\nlet signatureHelp ~path ~pos ~currentFile ~debug ~allowForConstructorPayloads =\n  let textOpt = Files.readFile currentFile in\n  match textOpt with\n  | None | Some \"\" -> None\n  | Some text -> (\n    match Pos.positionToOffset text pos with\n    | None -> None\n    | Some offset -> (\n      let posBeforeCursor = Pos.posBeforeCursor pos in\n      let offsetNoWhite = Utils.skipWhite text (offset - 1) in\n      let firstCharBeforeCursorNoWhite =\n        if offsetNoWhite < String.length text && offsetNoWhite >= 0 then\n          Some text.[offsetNoWhite]\n        else None\n      in\n      let locHasCursor loc =\n        loc |> CursorPosition.locHasCursor ~pos:posBeforeCursor\n      in\n      let supportsMarkdownLinks = true in\n      let result = ref None in\n      let printThing thg =\n        match thg with\n        | `ConstructorExpr _ -> \"Constructor(expr)\"\n        | `ConstructorPat _ -> \"Constructor(pat)\"\n        | `FunctionCall _ -> \"FunctionCall\"\n      in\n      let setResult (loc, thing) =\n        match (thing, allowForConstructorPayloads) with\n        | (`ConstructorExpr _ | `ConstructorPat _), false -> ()\n        | _ -> (\n          match !result with\n          | None ->\n            if Debug.verbose () then\n              Printf.printf \"[sig_help_result] Setting because had none\\n\";\n            result := Some (loc, thing)\n          | Some (currentLoc, currentThing)\n            when Pos.ofLexing loc.Location.loc_start\n                 > Pos.ofLexing currentLoc.Location.loc_start ->\n            result := Some (loc, thing);\n\n            if Debug.verbose () then\n              Printf.printf\n                \"[sig_help_result] Setting because loc of %s > then existing \\\n                 of %s\\n\"\n                (printThing thing) (printThing currentThing)\n          | Some (_, currentThing) ->\n            if Debug.verbose () then\n              Printf.printf\n                \"[sig_help_result] Doing nothing because loc of %s < then \\\n                 existing of %s\\n\"\n                (printThing thing) (printThing currentThing))\n      in\n      let searchForArgWithCursor ~isPipeExpr ~args =\n        let extractedArgs = extractExpApplyArgs ~args in\n        let argAtCursor =\n          let firstArgIndex = if isPipeExpr then 1 else 0 in\n          let unlabelledArgCount = ref firstArgIndex in\n          let lastUnlabelledArgBeforeCursor = ref firstArgIndex in\n          let argAtCursor_ =\n            extractedArgs\n            |> List.find_map (fun arg ->\n                   match arg.label with\n                   | None ->\n                     let currentUnlabelledArgCount = !unlabelledArgCount in\n                     unlabelledArgCount := currentUnlabelledArgCount + 1;\n                     (* An argument without a label is just the expression, so we can use that. *)\n                     if locHasCursor arg.exp.pexp_loc then\n                       Some (Unlabelled currentUnlabelledArgCount)\n                     else (\n                       (* If this unlabelled arg doesn't have the cursor, record\n                          it as the last seen unlabelled arg before the\n                          cursor.*)\n                       if posBeforeCursor >= (arg.exp.pexp_loc |> Loc.start)\n                       then\n                         lastUnlabelledArgBeforeCursor :=\n                           currentUnlabelledArgCount;\n                       None)\n                   | Some {name; posStart; posEnd} -> (\n                     (* Check for the label identifier itself having the cursor *)\n                     match\n                       pos |> CursorPosition.classifyPositions ~posStart ~posEnd\n                     with\n                     | HasCursor -> Some (Labelled name)\n                     | NoCursor | EmptyLoc -> (\n                       (* If we're not in the label, check the exp. Either the exp\n                          exists and has the cursor. Or the exp is a parser recovery\n                          node, in which case we assume that the parser recovery\n                          indicates that the cursor was here. *)\n                       match\n                         ( arg.exp.pexp_desc,\n                           arg.exp.pexp_loc\n                           |> CursorPosition.classifyLoc ~pos:posBeforeCursor )\n                       with\n                       | Pexp_extension ({txt = \"rescript.exprhole\"}, _), _\n                       | _, HasCursor ->\n                         Some (Labelled name)\n                       | _ -> None)))\n          in\n\n          match argAtCursor_ with\n          | None ->\n            Some\n              (Unlabelled\n                 (!lastUnlabelledArgBeforeCursor\n                 +\n                 if firstCharBeforeCursorNoWhite = Some ',' then 1\n                   (* If we found no argument with the cursor, we might still be\n                      able to complete for an unlabelled argument, if the char\n                      before the cursor is ',', like: `someFn(123, <com>)`\n                      complete for argument 2, or: `someFn(123, <com>, true)`\n                      complete for argument 2 as well. Adding 1 here accounts\n                      for the comma telling us that the users intent is to fill\n                      in the next argument. *)\n                 else 0))\n          | v -> v\n        in\n        (argAtCursor, extractedArgs)\n      in\n      let expr (iterator : Ast_iterator.iterator) (expr : Parsetree.expression)\n          =\n        (match expr with\n        (* Handle pipes, like someVar->someFunc(... *)\n        | {\n         pexp_desc =\n           Pexp_apply\n             ( {pexp_desc = Pexp_ident {txt = Lident (\"|.\" | \"|.u\")}},\n               [\n                 _;\n                 ( _,\n                   {\n                     pexp_desc =\n                       Pexp_apply (({pexp_desc = Pexp_ident _} as exp), args);\n                     pexp_loc;\n                   } );\n               ] );\n        }\n          when locHasCursor pexp_loc ->\n          let argAtCursor, extractedArgs =\n            searchForArgWithCursor ~isPipeExpr:true ~args\n          in\n          setResult\n            (exp.pexp_loc, `FunctionCall (argAtCursor, exp, extractedArgs))\n        (* Look for applying idents, like someIdent(...) *)\n        | {\n         pexp_desc = Pexp_apply (({pexp_desc = Pexp_ident _} as exp), args);\n         pexp_loc;\n        }\n          when locHasCursor pexp_loc ->\n          let argAtCursor, extractedArgs =\n            searchForArgWithCursor ~isPipeExpr:false ~args\n          in\n          setResult\n            (exp.pexp_loc, `FunctionCall (argAtCursor, exp, extractedArgs))\n        | {pexp_desc = Pexp_construct (lid, Some payloadExp); pexp_loc}\n          when locHasCursor payloadExp.pexp_loc\n               || CompletionExpressions.isExprHole payloadExp\n                  && locHasCursor pexp_loc ->\n          (* Constructor payloads *)\n          setResult (lid.loc, `ConstructorExpr (lid, payloadExp))\n        | _ -> ());\n        Ast_iterator.default_iterator.expr iterator expr\n      in\n      let pat (iterator : Ast_iterator.iterator) (pat : Parsetree.pattern) =\n        (match pat with\n        | {ppat_desc = Ppat_construct (lid, Some payloadPat)}\n          when locHasCursor payloadPat.ppat_loc ->\n          (* Constructor payloads *)\n          setResult (lid.loc, `ConstructorPat (lid, payloadPat))\n        | _ -> ());\n        Ast_iterator.default_iterator.pat iterator pat\n      in\n      let iterator = {Ast_iterator.default_iterator with expr; pat} in\n      let parser =\n        Res_driver.parsing_engine.parse_implementation ~for_printer:false\n      in\n      let {Res_driver.parsetree = structure} = parser ~filename:currentFile in\n      iterator.structure iterator structure |> ignore;\n      (* Handle function application, if found *)\n      match !result with\n      | Some (_, `FunctionCall (argAtCursor, exp, _extractedArgs)) -> (\n        (* Not looking for the cursor position after this, but rather the target function expression's loc. *)\n        let pos = exp.pexp_loc |> Loc.end_ in\n        match findFunctionType ~currentFile ~debug ~path ~pos with\n        | Some (args, docstring, type_expr, package, _env, file) ->\n          if debug then\n            Printf.printf \"argAtCursor: %s\\n\"\n              (match argAtCursor with\n              | None -> \"none\"\n              | Some (Labelled name) -> \"~\" ^ name\n              | Some (Unlabelled index) ->\n                \"unlabelled<\" ^ string_of_int index ^ \">\");\n\n          (* The LS protocol wants us to send both the full type signature (label) that the end user sees as the signature help, and all parameters in that label\n             in the form of a list of start/end character offsets. We leverage the parser to figure the offsets out by parsing the label, and extract the\n             offsets from the parser. *)\n\n          (* A full let binding with the type text is needed for the parser to be able to parse it.  *)\n          let labelPrefix = \"let fn: \" in\n          let labelPrefixLen = String.length labelPrefix in\n          let fnTypeStr = Shared.typeToString type_expr in\n          let typeStrForParser = labelPrefix ^ fnTypeStr in\n          let {Res_driver.parsetree = signature} =\n            Res_driver.parse_interface_from_source ~for_printer:false\n              ~display_filename:\"<missing-file>\" ~source:typeStrForParser\n          in\n\n          let parameters =\n            extractParameters ~signature ~typeStrForParser ~labelPrefixLen\n          in\n          if debug then\n            Printf.printf \"extracted params: \\n%s\\n\"\n              (parameters\n              |> List.map (fun (_, start, end_) ->\n                     String.sub fnTypeStr start (end_ - start))\n              |> list);\n\n          (* Figure out the active parameter *)\n          let activeParameter = findActiveParameter ~argAtCursor ~args in\n\n          let paramUnlabelledArgCount = ref 0 in\n          Some\n            {\n              Protocol.signatures =\n                [\n                  {\n                    label = fnTypeStr;\n                    parameters =\n                      parameters\n                      |> List.map (fun (argLabel, start, end_) ->\n                             let paramArgCount = !paramUnlabelledArgCount in\n                             paramUnlabelledArgCount := paramArgCount + 1;\n                             let unlabelledArgCount = ref 0 in\n                             {\n                               Protocol.label = (start, end_);\n                               documentation =\n                                 (match\n                                    args\n                                    |> List.find_opt (fun (lbl, _) ->\n                                           let argCount = !unlabelledArgCount in\n                                           unlabelledArgCount := argCount + 1;\n                                           match (lbl, argLabel) with\n                                           | ( Asttypes.Optional l1,\n                                               Asttypes.Optional l2 )\n                                             when l1 = l2 ->\n                                             true\n                                           | Labelled l1, Labelled l2\n                                             when l1 = l2 ->\n                                             true\n                                           | Nolabel, Nolabel\n                                             when paramArgCount = argCount ->\n                                             true\n                                           | _ -> false)\n                                  with\n                                 | None ->\n                                   {Protocol.kind = \"markdown\"; value = \"\"}\n                                 | Some (_, labelTypExpr) ->\n                                   {\n                                     Protocol.kind = \"markdown\";\n                                     value =\n                                       docsForLabel ~supportsMarkdownLinks ~file\n                                         ~package labelTypExpr;\n                                   });\n                             });\n                    documentation =\n                      (match List.nth_opt docstring 0 with\n                      | None -> None\n                      | Some docs ->\n                        Some {Protocol.kind = \"markdown\"; value = docs});\n                  };\n                ];\n              activeSignature = Some 0;\n              activeParameter =\n                (match activeParameter with\n                | None -> Some (-1)\n                | activeParameter -> activeParameter);\n            }\n        | _ -> None)\n      | Some (_, ((`ConstructorExpr (lid, _) | `ConstructorPat (lid, _)) as cs))\n        -> (\n        if Debug.verbose () then\n          Printf.printf \"[signature_help] Found constructor!\\n\";\n        match Cmt.loadFullCmtFromPath ~path with\n        | None ->\n          if Debug.verbose () then\n            Printf.printf \"[signature_help] Could not load cmt\\n\";\n          None\n        | Some full -> (\n          let {file} = full in\n          let env = QueryEnv.fromFile file in\n          let constructorName = Longident.last lid.txt in\n          match\n            findConstructorArgs ~full ~env ~constructorName\n              {lid.loc with loc_start = lid.loc.loc_end}\n          with\n          | None ->\n            if Debug.verbose () then\n              Printf.printf \"[signature_help] Did not find constructor '%s'\\n\"\n                constructorName;\n            None\n          | Some constructor ->\n            let argParts =\n              match constructor.args with\n              | Args [] -> None\n              | InlineRecord fields ->\n                let offset = ref 0 in\n                Some\n                  (`InlineRecord\n                     (fields\n                     |> List.map (fun (field : field) ->\n                            let startOffset = !offset in\n                            let argText =\n                              Printf.sprintf \"%s%s: %s\" field.fname.txt\n                                (if field.optional then \"?\" else \"\")\n                                (Shared.typeToString\n                                   (if field.optional then\n                                      Utils.unwrapIfOption field.typ\n                                    else field.typ))\n                            in\n                            let endOffset =\n                              startOffset + String.length argText\n                            in\n                            offset := endOffset + String.length \", \";\n                            (argText, field, (startOffset, endOffset)))))\n              | Args [(typ, _)] ->\n                Some\n                  (`SingleArg\n                     ( typ |> Shared.typeToString,\n                       docsForLabel ~file:full.file ~package:full.package\n                         ~supportsMarkdownLinks typ ))\n              | Args args ->\n                let offset = ref 0 in\n                Some\n                  (`TupleArg\n                     (args\n                     |> List.map (fun (typ, _) ->\n                            let startOffset = !offset in\n                            let argText = typ |> Shared.typeToString in\n                            let endOffset =\n                              startOffset + String.length argText\n                            in\n                            offset := endOffset + String.length \", \";\n                            ( argText,\n                              docsForLabel ~file:full.file ~package:full.package\n                                ~supportsMarkdownLinks typ,\n                              (startOffset, endOffset) ))))\n            in\n            let label =\n              constructor.name ^ \"(\"\n              ^ (match argParts with\n                | None -> \"\"\n                | Some (`InlineRecord fields) ->\n                  \"{\"\n                  ^ (fields\n                    |> List.map (fun (argText, _, _) -> argText)\n                    |> String.concat \", \")\n                  ^ \"}\"\n                | Some (`SingleArg (arg, _)) -> arg\n                | Some (`TupleArg items) ->\n                  items\n                  |> List.map (fun (argText, _, _) -> argText)\n                  |> String.concat \", \")\n              ^ \")\"\n            in\n            let activeParameter =\n              match cs with\n              | `ConstructorExpr (_, {pexp_desc = Pexp_tuple items}) -> (\n                let idx = ref 0 in\n                let tupleItemWithCursor =\n                  items\n                  |> List.find_map (fun (item : Parsetree.expression) ->\n                         let currentIndex = !idx in\n                         idx := currentIndex + 1;\n                         if locHasCursor item.pexp_loc then Some currentIndex\n                         else None)\n                in\n                match tupleItemWithCursor with\n                | None -> -1\n                | Some i -> i)\n              | `ConstructorExpr (_, {pexp_desc = Pexp_record (fields, _)}) -> (\n                let fieldNameWithCursor =\n                  fields\n                  |> List.find_map\n                       (fun\n                         (({loc; txt}, expr) :\n                           Longident.t Location.loc * Parsetree.expression)\n                       ->\n                         if\n                           posBeforeCursor >= Pos.ofLexing loc.loc_start\n                           && posBeforeCursor\n                              <= Pos.ofLexing expr.pexp_loc.loc_end\n                         then Some (Longident.last txt)\n                         else None)\n                in\n                match (fieldNameWithCursor, argParts) with\n                | Some fieldName, Some (`InlineRecord fields) ->\n                  let idx = ref 0 in\n                  let fieldIndex = ref (-1) in\n                  fields\n                  |> List.iter (fun (_, field, _) ->\n                         idx := !idx + 1;\n                         let currentIndex = !idx in\n                         if fieldName = field.fname.txt then\n                           fieldIndex := currentIndex\n                         else ());\n                  !fieldIndex\n                | _ -> -1)\n              | `ConstructorExpr (_, expr) when locHasCursor expr.pexp_loc -> 0\n              | `ConstructorPat (_, {ppat_desc = Ppat_tuple items}) -> (\n                let idx = ref 0 in\n                let tupleItemWithCursor =\n                  items\n                  |> List.find_map (fun (item : Parsetree.pattern) ->\n                         let currentIndex = !idx in\n                         idx := currentIndex + 1;\n                         if locHasCursor item.ppat_loc then Some currentIndex\n                         else None)\n                in\n                match tupleItemWithCursor with\n                | None -> -1\n                | Some i -> i)\n              | `ConstructorPat (_, {ppat_desc = Ppat_record (fields, _)}) -> (\n                let fieldNameWithCursor =\n                  fields\n                  |> List.find_map\n                       (fun\n                         (({loc; txt}, pat) :\n                           Longident.t Location.loc * Parsetree.pattern)\n                       ->\n                         if\n                           posBeforeCursor >= Pos.ofLexing loc.loc_start\n                           && posBeforeCursor\n                              <= Pos.ofLexing pat.ppat_loc.loc_end\n                         then Some (Longident.last txt)\n                         else None)\n                in\n                match (fieldNameWithCursor, argParts) with\n                | Some fieldName, Some (`InlineRecord fields) ->\n                  let idx = ref 0 in\n                  let fieldIndex = ref (-1) in\n                  fields\n                  |> List.iter (fun (_, field, _) ->\n                         idx := !idx + 1;\n                         let currentIndex = !idx in\n                         if fieldName = field.fname.txt then\n                           fieldIndex := currentIndex\n                         else ());\n                  !fieldIndex\n                | _ -> -1)\n              | `ConstructorPat (_, pat) when locHasCursor pat.ppat_loc -> 0\n              | _ -> -1\n            in\n\n            let constructorNameLength = String.length constructor.name in\n            let params =\n              match argParts with\n              | None -> []\n              | Some (`SingleArg (_, docstring)) ->\n                [\n                  {\n                    Protocol.label =\n                      (constructorNameLength + 1, String.length label - 1);\n                    documentation =\n                      {Protocol.kind = \"markdown\"; value = docstring};\n                  };\n                ]\n              | Some (`InlineRecord fields) ->\n                (* Account for leading '({' *)\n                let baseOffset = constructorNameLength + 2 in\n                {\n                  Protocol.label = (0, 0);\n                  documentation = {Protocol.kind = \"markdown\"; value = \"\"};\n                }\n                :: (fields\n                   |> List.map (fun (_, (field : field), (start, end_)) ->\n                          {\n                            Protocol.label =\n                              (baseOffset + start, baseOffset + end_);\n                            documentation =\n                              {\n                                Protocol.kind = \"markdown\";\n                                value = field.docstring |> String.concat \"\\n\";\n                              };\n                          }))\n              | Some (`TupleArg items) ->\n                (* Account for leading '(' *)\n                let baseOffset = constructorNameLength + 1 in\n                items\n                |> List.map (fun (_, docstring, (start, end_)) ->\n                       {\n                         Protocol.label = (baseOffset + start, baseOffset + end_);\n                         documentation =\n                           {Protocol.kind = \"markdown\"; value = docstring};\n                       })\n            in\n            Some\n              {\n                Protocol.signatures =\n                  [\n                    {\n                      label;\n                      parameters = params;\n                      documentation =\n                        (match List.nth_opt constructor.docstring 0 with\n                        | None -> None\n                        | Some docs ->\n                          Some {Protocol.kind = \"markdown\"; value = docs});\n                    };\n                  ];\n                activeSignature = Some 0;\n                activeParameter = Some activeParameter;\n              }))\n      | _ -> None))\n"
  },
  {
    "path": "analysis/src/TypeUtils.ml",
    "content": "open SharedTypes\n\nlet modulePathFromEnv env = env.QueryEnv.file.moduleName :: List.rev env.pathRev\n\nlet fullTypeIdFromDecl ~env ~name ~modulePath =\n  env.QueryEnv.file.moduleName :: ModulePath.toPath modulePath name\n  |> String.concat \".\"\n\nlet debugLogTypeArgContext {env; typeArgs; typeParams} =\n  Printf.sprintf \"Type arg context. env: %s, typeArgs: %s, typeParams: %s\\n\"\n    (Debug.debugPrintEnv env)\n    (typeArgs |> List.map Shared.typeToString |> String.concat \", \")\n    (typeParams |> List.map Shared.typeToString |> String.concat \", \")\n\n(** Checks whether this type has any uninstantiated type parameters. *)\nlet rec hasTvar (ty : Types.type_expr) : bool =\n  match ty.desc with\n  | Tvar _ -> true\n  | Tarrow (_, ty1, ty2, _) -> hasTvar ty1 || hasTvar ty2\n  | Ttuple tyl -> List.exists hasTvar tyl\n  | Tconstr (_, tyl, _) -> List.exists hasTvar tyl\n  | Tobject (ty, _) -> hasTvar ty\n  | Tfield (_, _, ty1, ty2) -> hasTvar ty1 || hasTvar ty2\n  | Tnil -> false\n  | Tlink ty -> hasTvar ty\n  | Tsubst ty -> hasTvar ty\n  | Tvariant {row_fields; _} ->\n    List.exists\n      (function\n        | _, Types.Rpresent (Some ty) -> hasTvar ty\n        | _, Reither (_, tyl, _, _) -> List.exists hasTvar tyl\n        | _ -> false)\n      row_fields\n  | Tunivar _ -> true\n  | Tpoly (ty, tyl) -> hasTvar ty || List.exists hasTvar tyl\n  | Tpackage (_, _, tyl) -> List.exists hasTvar tyl\n\nlet findTypeViaLoc ~full ~debug (loc : Location.t) =\n  match References.getLocItem ~full ~pos:(Pos.ofLexing loc.loc_end) ~debug with\n  | Some {locType = Typed (_, typExpr, _)} -> Some typExpr\n  | _ -> None\n\nlet rec pathFromTypeExpr (t : Types.type_expr) =\n  match t.desc with\n  | Tconstr (Pident {name = \"function$\"}, [t; _], _) -> pathFromTypeExpr t\n  | Tconstr (path, _typeArgs, _)\n  | Tlink {desc = Tconstr (path, _typeArgs, _)}\n  | Tsubst {desc = Tconstr (path, _typeArgs, _)}\n  | Tpoly ({desc = Tconstr (path, _typeArgs, _)}, []) ->\n    Some path\n  | _ -> None\n\nlet printRecordFromFields ?name (fields : field list) =\n  (match name with\n  | None -> \"\"\n  | Some name -> \"type \" ^ name ^ \" = \")\n  ^ \"{\"\n  ^ (fields\n    |> List.map (fun f -> f.fname.txt ^ \": \" ^ Shared.typeToString f.typ)\n    |> String.concat \", \")\n  ^ \"}\"\n\nlet rec extractedTypeToString ?(nameOnly = false) ?(inner = false) = function\n  | Tuple (_, _, typ) | Tpolyvariant {typeExpr = typ} | Tfunction {typ} ->\n    if inner then\n      try typ |> pathFromTypeExpr |> Option.get |> SharedTypes.pathIdentToString\n      with _ -> \"\"\n    else Shared.typeToString typ\n  | Trecord {definition; fields} ->\n    let name =\n      match definition with\n      | `TypeExpr typ -> (\n        try\n          typ |> pathFromTypeExpr |> Option.get |> SharedTypes.pathIdentToString\n        with _ -> \"\")\n      | `NameOnly name -> name\n    in\n    if inner || nameOnly then name else printRecordFromFields ~name fields\n  | Tbool _ -> \"bool\"\n  | Tstring _ -> \"string\"\n  | TtypeT _ -> \"type t\"\n  | Tarray (_, TypeExpr innerTyp) ->\n    \"array<\" ^ Shared.typeToString innerTyp ^ \">\"\n  | Tarray (_, ExtractedType innerTyp) ->\n    \"array<\" ^ extractedTypeToString ~inner:true innerTyp ^ \">\"\n  | Toption (_, TypeExpr innerTyp) ->\n    \"option<\" ^ Shared.typeToString innerTyp ^ \">\"\n  | Tresult {okType; errorType} ->\n    \"result<\" ^ Shared.typeToString okType ^ \", \"\n    ^ Shared.typeToString errorType\n    ^ \">\"\n  | Toption (_, ExtractedType innerTyp) ->\n    \"option<\" ^ extractedTypeToString ~inner:true innerTyp ^ \">\"\n  | Tpromise (_, innerTyp) -> \"promise<\" ^ Shared.typeToString innerTyp ^ \">\"\n  | Tvariant {variantDecl; variantName} ->\n    if inner || nameOnly then variantName\n    else Shared.declToString variantName variantDecl\n  | TinlineRecord {fields} -> printRecordFromFields fields\n  | Texn _ -> \"exn\"\n\nlet getExtractedType maybeRes =\n  match maybeRes with\n  | None -> None\n  | Some (extractedType, _) -> Some extractedType\n\nlet instantiateType ~typeParams ~typeArgs (t : Types.type_expr) =\n  if typeParams = [] || typeArgs = [] then t\n  else\n    let rec applySub tp ta t =\n      match (tp, ta) with\n      | t1 :: tRest1, t2 :: tRest2 ->\n        if t1 = t then t2 else applySub tRest1 tRest2 t\n      | [], _ | _, [] -> t\n    in\n    let rec loop (t : Types.type_expr) =\n      match t.desc with\n      | Tlink t -> loop t\n      | Tvar _ -> applySub typeParams typeArgs t\n      | Tunivar _ -> t\n      | Tconstr (path, args, memo) ->\n        {t with desc = Tconstr (path, args |> List.map loop, memo)}\n      | Tsubst t -> loop t\n      | Tvariant rd -> {t with desc = Tvariant (rowDesc rd)}\n      | Tnil -> t\n      | Tarrow (lbl, t1, t2, c) ->\n        {t with desc = Tarrow (lbl, loop t1, loop t2, c)}\n      | Ttuple tl -> {t with desc = Ttuple (tl |> List.map loop)}\n      | Tobject (t, r) -> {t with desc = Tobject (loop t, r)}\n      | Tfield (n, k, t1, t2) -> {t with desc = Tfield (n, k, loop t1, loop t2)}\n      | Tpoly (t, []) -> loop t\n      | Tpoly (t, tl) -> {t with desc = Tpoly (loop t, tl |> List.map loop)}\n      | Tpackage (p, l, tl) ->\n        {t with desc = Tpackage (p, l, tl |> List.map loop)}\n    and rowDesc (rd : Types.row_desc) =\n      let row_fields =\n        rd.row_fields |> List.map (fun (l, rf) -> (l, rowField rf))\n      in\n      let row_more = loop rd.row_more in\n      let row_name =\n        match rd.row_name with\n        | None -> None\n        | Some (p, tl) -> Some (p, tl |> List.map loop)\n      in\n      {rd with row_fields; row_more; row_name}\n    and rowField (rf : Types.row_field) =\n      match rf with\n      | Rpresent None -> rf\n      | Rpresent (Some t) -> Rpresent (Some (loop t))\n      | Reither (b1, tl, b2, r) -> Reither (b1, tl |> List.map loop, b2, r)\n      | Rabsent -> Rabsent\n    in\n    loop t\n\nlet instantiateType2 ?(typeArgContext : typeArgContext option)\n    (t : Types.type_expr) =\n  match typeArgContext with\n  | None | Some {typeArgs = []} | Some {typeParams = []} -> t\n  | Some {typeArgs; typeParams} ->\n    let rec applySub tp ta name =\n      match (tp, ta) with\n      | {Types.desc = Tvar (Some varName)} :: tRest1, t2 :: tRest2 ->\n        if varName = name then t2 else applySub tRest1 tRest2 name\n      | _ :: tRest1, _ :: tRest2 -> applySub tRest1 tRest2 name\n      | [], _ | _, [] -> t\n    in\n\n    let rec loop (t : Types.type_expr) =\n      match t.desc with\n      | Tlink t -> loop t\n      | Tvar (Some name) -> applySub typeParams typeArgs name\n      | Tvar _ -> t\n      | Tunivar _ -> t\n      | Tconstr (path, args, memo) ->\n        {t with desc = Tconstr (path, args |> List.map loop, memo)}\n      | Tsubst t -> loop t\n      | Tvariant rd -> {t with desc = Tvariant (rowDesc rd)}\n      | Tnil -> t\n      | Tarrow (lbl, t1, t2, c) ->\n        {t with desc = Tarrow (lbl, loop t1, loop t2, c)}\n      | Ttuple tl -> {t with desc = Ttuple (tl |> List.map loop)}\n      | Tobject (t, r) -> {t with desc = Tobject (loop t, r)}\n      | Tfield (n, k, t1, t2) -> {t with desc = Tfield (n, k, loop t1, loop t2)}\n      | Tpoly (t, []) -> loop t\n      | Tpoly (t, tl) -> {t with desc = Tpoly (loop t, tl |> List.map loop)}\n      | Tpackage (p, l, tl) ->\n        {t with desc = Tpackage (p, l, tl |> List.map loop)}\n    and rowDesc (rd : Types.row_desc) =\n      let row_fields =\n        rd.row_fields |> List.map (fun (l, rf) -> (l, rowField rf))\n      in\n      let row_more = loop rd.row_more in\n      let row_name =\n        match rd.row_name with\n        | None -> None\n        | Some (p, tl) -> Some (p, tl |> List.map loop)\n      in\n      {rd with row_fields; row_more; row_name}\n    and rowField (rf : Types.row_field) =\n      match rf with\n      | Rpresent None -> rf\n      | Rpresent (Some t) -> Rpresent (Some (loop t))\n      | Reither (b1, tl, b2, r) -> Reither (b1, tl |> List.map loop, b2, r)\n      | Rabsent -> Rabsent\n    in\n    loop t\n\nlet rec extractRecordType ~env ~package (t : Types.type_expr) =\n  match t.desc with\n  | Tlink t1 | Tsubst t1 | Tpoly (t1, []) -> extractRecordType ~env ~package t1\n  | Tconstr (path, typeArgs, _) -> (\n    match References.digConstructor ~env ~package path with\n    | Some (env, ({item = {kind = Record fields}} as typ)) ->\n      let typeParams = typ.item.decl.type_params in\n      let fields =\n        fields\n        |> List.map (fun field ->\n               let fieldTyp =\n                 field.typ |> instantiateType ~typeParams ~typeArgs\n               in\n               {field with typ = fieldTyp})\n      in\n      Some (env, fields, typ)\n    | Some\n        ( env,\n          {item = {decl = {type_manifest = Some t1; type_params = typeParams}}}\n        ) ->\n      let t1 = t1 |> instantiateType ~typeParams ~typeArgs in\n      extractRecordType ~env ~package t1\n    | _ -> None)\n  | _ -> None\n\nlet rec extractObjectType ~env ~package (t : Types.type_expr) =\n  match t.desc with\n  | Tlink t1 | Tsubst t1 | Tpoly (t1, []) -> extractObjectType ~env ~package t1\n  | Tobject (tObj, _) -> Some (env, tObj)\n  | Tconstr (path, typeArgs, _) -> (\n    match References.digConstructor ~env ~package path with\n    | Some\n        ( env,\n          {item = {decl = {type_manifest = Some t1; type_params = typeParams}}}\n        ) ->\n      let t1 = t1 |> instantiateType ~typeParams ~typeArgs in\n      extractObjectType ~env ~package t1\n    | _ -> None)\n  | _ -> None\n\nlet rec extractFunctionType ~env ~package typ =\n  let rec loop ~env acc (t : Types.type_expr) =\n    match t.desc with\n    | Tlink t1 | Tsubst t1 | Tpoly (t1, []) -> loop ~env acc t1\n    | Tarrow (label, tArg, tRet, _) -> loop ~env ((label, tArg) :: acc) tRet\n    | Tconstr (Pident {name = \"function$\"}, [t; _], _) ->\n      extractFunctionType ~env ~package t\n    | Tconstr (path, typeArgs, _) -> (\n      match References.digConstructor ~env ~package path with\n      | Some\n          ( env,\n            {\n              item = {decl = {type_manifest = Some t1; type_params = typeParams}};\n            } ) ->\n        let t1 = t1 |> instantiateType ~typeParams ~typeArgs in\n        loop ~env acc t1\n      | _ -> (List.rev acc, t))\n    | _ -> (List.rev acc, t)\n  in\n  loop ~env [] typ\n\nlet rec extractFunctionTypeWithEnv ~env ~package typ =\n  let rec loop ~env acc (t : Types.type_expr) =\n    match t.desc with\n    | Tlink t1 | Tsubst t1 | Tpoly (t1, []) -> loop ~env acc t1\n    | Tarrow (label, tArg, tRet, _) -> loop ~env ((label, tArg) :: acc) tRet\n    | Tconstr (Pident {name = \"function$\"}, [t; _], _) ->\n      extractFunctionTypeWithEnv ~env ~package t\n    | Tconstr (path, typeArgs, _) -> (\n      match References.digConstructor ~env ~package path with\n      | Some\n          ( _env,\n            {\n              item = {decl = {type_manifest = Some t1; type_params = typeParams}};\n            } ) ->\n        let t1 = t1 |> instantiateType ~typeParams ~typeArgs in\n        loop ~env acc t1\n      | Some _ -> (List.rev acc, t, env)\n      | _ -> (List.rev acc, t, env))\n    | _ -> (List.rev acc, t, env)\n  in\n  loop ~env [] typ\n\nlet maybeSetTypeArgCtx ?typeArgContextFromTypeManifest ~typeParams ~typeArgs env\n    =\n  match typeArgContextFromTypeManifest with\n  | Some typeArgContextFromTypeManifest -> Some typeArgContextFromTypeManifest\n  | None ->\n    let typeArgContext =\n      if List.length typeParams > 0 then Some {env; typeParams; typeArgs}\n      else None\n    in\n    (match typeArgContext with\n    | None -> ()\n    | Some typeArgContext ->\n      if Debug.verbose () then\n        Printf.printf \"[#type_arg_ctx]--> setting new type arg ctx: %s\"\n          (debugLogTypeArgContext typeArgContext));\n    typeArgContext\n\n(* TODO(env-stuff) Maybe this could be removed entirely if we can guarantee that we don't have to look up functions from in here. *)\nlet rec extractFunctionType2 ?typeArgContext ~env ~package typ =\n  let rec loop ?typeArgContext ~env acc (t : Types.type_expr) =\n    match t.desc with\n    | Tlink t1 | Tsubst t1 | Tpoly (t1, []) -> loop ?typeArgContext ~env acc t1\n    | Tarrow (label, tArg, tRet, _) ->\n      loop ?typeArgContext ~env ((label, tArg) :: acc) tRet\n    | Tconstr (Pident {name = \"function$\"}, [t; _], _) ->\n      extractFunctionType2 ?typeArgContext ~env ~package t\n    | Tconstr (path, typeArgs, _) -> (\n      match References.digConstructor ~env ~package path with\n      | Some\n          ( env,\n            {\n              item = {decl = {type_manifest = Some t1; type_params = typeParams}};\n            } ) ->\n        let typeArgContext = maybeSetTypeArgCtx ~typeParams ~typeArgs env in\n        loop ?typeArgContext ~env acc t1\n      | _ -> (List.rev acc, t, typeArgContext))\n    | _ -> (List.rev acc, t, typeArgContext)\n  in\n  loop ?typeArgContext ~env [] typ\n\nlet rec extractType ?(printOpeningDebug = true)\n    ?(typeArgContext : typeArgContext option)\n    ?(typeArgContextFromTypeManifest : typeArgContext option) ~env ~package\n    (t : Types.type_expr) =\n  let maybeSetTypeArgCtx = maybeSetTypeArgCtx ?typeArgContextFromTypeManifest in\n  if Debug.verbose () && printOpeningDebug then\n    Printf.printf\n      \"[extract_type]--> starting extraction of type: %s, in env: %s. Has type \\\n       arg ctx: %b\\n\"\n      (Shared.typeToString t) (Debug.debugPrintEnv env)\n      (Option.is_some typeArgContext);\n  (match typeArgContext with\n  | None -> ()\n  | Some typeArgContext ->\n    if Debug.verbose () && printOpeningDebug then\n      Printf.printf \"[extract_type]--> %s\"\n        (debugLogTypeArgContext typeArgContext));\n  let instantiateType = instantiateType2 in\n  match t.desc with\n  | Tlink t1 | Tsubst t1 | Tpoly (t1, []) ->\n    extractType ?typeArgContext ~printOpeningDebug:false ~env ~package t1\n  | Tconstr (Path.Pident {name = \"option\"}, [payloadTypeExpr], _) ->\n    Some (Toption (env, TypeExpr payloadTypeExpr), typeArgContext)\n  | Tconstr (Path.Pident {name = \"promise\"}, [payloadTypeExpr], _) ->\n    Some (Tpromise (env, payloadTypeExpr), typeArgContext)\n  | Tconstr (Path.Pident {name = \"array\"}, [payloadTypeExpr], _) ->\n    Some (Tarray (env, TypeExpr payloadTypeExpr), typeArgContext)\n  | Tconstr (Path.Pident {name = \"result\"}, [okType; errorType], _) ->\n    Some (Tresult {env; okType; errorType}, typeArgContext)\n  | Tconstr (Path.Pident {name = \"bool\"}, [], _) ->\n    Some (Tbool env, typeArgContext)\n  | Tconstr (Path.Pident {name = \"string\"}, [], _) ->\n    Some (Tstring env, typeArgContext)\n  | Tconstr (Path.Pident {name = \"exn\"}, [], _) ->\n    Some (Texn env, typeArgContext)\n  | Tconstr (Pident {name = \"function$\"}, [t; _], _) -> (\n    match extractFunctionType2 ?typeArgContext t ~env ~package with\n    | args, tRet, typeArgContext when args <> [] ->\n      Some\n        ( Tfunction {env; args; typ = t; uncurried = true; returnType = tRet},\n          typeArgContext )\n    | _args, _tRet, _typeArgContext -> None)\n  | Tarrow _ -> (\n    match extractFunctionType2 ?typeArgContext t ~env ~package with\n    | args, tRet, typeArgContext when args <> [] ->\n      Some\n        ( Tfunction {env; args; typ = t; uncurried = false; returnType = tRet},\n          typeArgContext )\n    | _args, _tRet, _typeArgContext -> None)\n  | Tconstr (path, typeArgs, _) -> (\n    if Debug.verbose () then\n      Printf.printf \"[extract_type]--> digging for type %s in %s\\n\"\n        (Path.name path) (Debug.debugPrintEnv env);\n    match References.digConstructor ~env ~package path with\n    | Some\n        ( envFromDeclaration,\n          {item = {decl = {type_manifest = Some t1; type_params}}} ) ->\n      if Debug.verbose () then\n        print_endline \"[extract_type]--> found type manifest\";\n\n      (* Type manifests inherit the last type args ctx that wasn't for a type manifest.\n         This is because the manifest itself doesn't have type args and an env that can\n         be used to instantiate. *)\n      let typeArgContext =\n        maybeSetTypeArgCtx ~typeParams:type_params ~typeArgs env\n      in\n      t1\n      |> extractType ?typeArgContextFromTypeManifest:typeArgContext\n           ~env:envFromDeclaration ~package\n    | Some (envFromItem, {name; item = {decl; kind = Type.Variant constructors}})\n      ->\n      if Debug.verbose () then print_endline \"[extract_type]--> found variant\";\n      let typeArgContext =\n        maybeSetTypeArgCtx ~typeParams:decl.type_params ~typeArgs env\n      in\n      Some\n        ( Tvariant\n            {\n              env = envFromItem;\n              constructors;\n              variantName = name.txt;\n              variantDecl = decl;\n            },\n          typeArgContext )\n    | Some (envFromDeclaration, {item = {kind = Record fields; decl}}) ->\n      if Debug.verbose () then print_endline \"[extract_type]--> found record\";\n      (* Need to create a new type arg context here because we're sending along a type expr that might have type vars. *)\n      let typeArgContext =\n        maybeSetTypeArgCtx ~typeParams:decl.type_params ~typeArgs env\n      in\n      Some\n        ( Trecord {env = envFromDeclaration; fields; definition = `TypeExpr t},\n          typeArgContext )\n    | Some (envFromDeclaration, {item = {name = \"t\"; decl = {type_params}}}) ->\n      let typeArgContext =\n        maybeSetTypeArgCtx ~typeParams:type_params ~typeArgs env\n      in\n      Some (TtypeT {env = envFromDeclaration; path}, typeArgContext)\n    | None ->\n      if Debug.verbose () then\n        print_endline \"[extract_type]--> found nothing when digging\";\n      None\n    | _ ->\n      if Debug.verbose () then\n        print_endline \"[extract_type]--> found something else when digging\";\n      None)\n  | Ttuple expressions -> Some (Tuple (env, expressions, t), typeArgContext)\n  | Tvariant {row_fields} ->\n    let constructors =\n      row_fields\n      |> List.map (fun (label, field) ->\n             {\n               name = label;\n               displayName = Utils.printMaybeExoticIdent ~allowUident:true label;\n               args =\n                 (* Multiple arguments are represented as a Ttuple, while a single argument is just the type expression itself. *)\n                 (match field with\n                 | Types.Rpresent (Some typeExpr) -> (\n                   match typeExpr.desc with\n                   | Ttuple args -> args\n                   | _ -> [typeExpr])\n                 | _ -> []);\n             })\n    in\n    Some (Tpolyvariant {env; constructors; typeExpr = t}, typeArgContext)\n  | Tvar (Some varName) -> (\n    if Debug.verbose () then\n      Printf.printf\n        \"[extract_type]--> found type variable: '%s. Trying to instantiate %s\"\n        varName\n        (match typeArgContext with\n        | None -> \"with no type args ctx\\n\"\n        | Some typeArgContext ->\n          Printf.sprintf \"with %s\" (debugLogTypeArgContext typeArgContext));\n\n    let instantiated = t |> instantiateType ?typeArgContext in\n    let rec extractInstantiated t =\n      match t.Types.desc with\n      | Tlink t1 | Tsubst t1 | Tpoly (t1, []) -> extractInstantiated t1\n      | _ -> t\n    in\n    match extractInstantiated instantiated with\n    | {desc = Tvar _} ->\n      if Debug.verbose () then\n        Printf.printf \"[extract_type]--> could not instantiate '%s. Skipping.\\n\"\n          varName;\n      None\n    | _ ->\n      if Debug.verbose () then\n        Printf.printf\n          \"[extract_type]--> SUCCEEDED instantiation, new type is: %s\\n\"\n          (Shared.typeToString instantiated);\n\n      (* Use the env from instantiation if we managed to instantiate the type param *)\n      let nextEnv =\n        match typeArgContext with\n        | Some {env} -> env\n        | None -> env\n      in\n      instantiated |> extractType ?typeArgContext ~env:nextEnv ~package)\n  | _ ->\n    if Debug.verbose () then print_endline \"[extract_type]--> miss\";\n    None\n\nlet isFunctionType ~env ~package t =\n  match extractType ~env ~package t with\n  | Some (Tfunction _, _) -> true\n  | _ -> false\n\nlet findReturnTypeOfFunctionAtLoc loc ~(env : QueryEnv.t) ~full ~debug =\n  match References.getLocItem ~full ~pos:(loc |> Loc.end_) ~debug with\n  | Some {locType = Typed (_, typExpr, _)} -> (\n    match extractFunctionType ~env ~package:full.package typExpr with\n    | args, tRet when args <> [] -> Some tRet\n    | _ -> None)\n  | _ -> None\n\nlet rec digToRelevantTemplateNameType ~env ~package ?(suffix = \"\")\n    (t : Types.type_expr) =\n  match t.desc with\n  | Tlink t1 | Tsubst t1 | Tpoly (t1, []) ->\n    digToRelevantTemplateNameType ~suffix ~env ~package t1\n  | Tconstr (Path.Pident {name = \"option\"}, [t1], _) ->\n    digToRelevantTemplateNameType ~suffix ~env ~package t1\n  | Tconstr (Path.Pident {name = \"array\"}, [t1], _) ->\n    digToRelevantTemplateNameType ~suffix:\"s\" ~env ~package t1\n  | Tconstr (path, _, _) -> (\n    match References.digConstructor ~env ~package path with\n    | Some (env, {item = {decl = {type_manifest = Some typ}}}) ->\n      digToRelevantTemplateNameType ~suffix ~env ~package typ\n    | _ -> (t, suffix, env))\n  | _ -> (t, suffix, env)\n\nlet rec resolveTypeForPipeCompletion ~env ~package ~lhsLoc ~full\n    (t : Types.type_expr) =\n  (* If the type we're completing on is a type parameter, we won't be able to\n     do completion unless we know what that type parameter is compiled as.\n     This attempts to look up the compiled type for that type parameter by\n     looking for compiled information at the loc of that expression. *)\n  let typFromLoc =\n    match t with\n    | {Types.desc = Tvar _} ->\n      findReturnTypeOfFunctionAtLoc lhsLoc ~env ~full ~debug:false\n    | _ -> None\n  in\n  match typFromLoc with\n  | Some ({desc = Tvar _} as t) -> (env, t)\n  | Some typFromLoc ->\n    typFromLoc |> resolveTypeForPipeCompletion ~lhsLoc ~env ~package ~full\n  | None ->\n    let rec digToRelevantType ~env ~package (t : Types.type_expr) =\n      match t.desc with\n      | Tlink t1 | Tsubst t1 | Tpoly (t1, []) ->\n        digToRelevantType ~env ~package t1\n      (* Don't descend into types named \"t\". Type t is a convention in the ReScript ecosystem. *)\n      | Tconstr (path, _, _) when path |> Path.last = \"t\" -> (env, t)\n      | Tconstr (path, _, _) -> (\n        match References.digConstructor ~env ~package path with\n        | Some (env, {item = {decl = {type_manifest = Some typ}}}) ->\n          digToRelevantType ~env ~package typ\n        | _ -> (env, t))\n      | _ -> (env, t)\n    in\n    digToRelevantType ~env ~package t\n\nlet extractTypeFromResolvedType (typ : Type.t) ~env ~full =\n  match typ.kind with\n  | Tuple items -> Some (Tuple (env, items, Ctype.newty (Ttuple items)))\n  | Record fields ->\n    Some (Trecord {env; fields; definition = `NameOnly typ.name})\n  | Variant constructors ->\n    Some\n      (Tvariant\n         {env; constructors; variantName = typ.name; variantDecl = typ.decl})\n  | Abstract _ | Open -> (\n    match typ.decl.type_manifest with\n    | None -> None\n    | Some t -> t |> extractType ~env ~package:full.package |> getExtractedType)\n\n(** The context we just came from as we resolve the nested structure. *)\ntype ctx = Rfield of string  (** A record field of name *)\n\nlet rec resolveNested ?typeArgContext ~env ~full ~nested ?ctx\n    (typ : completionType) =\n  let extractType = extractType ?typeArgContext in\n  if Debug.verbose () then\n    Printf.printf\n      \"[nested]--> running nested in env: %s. Has type arg ctx: %b\\n\"\n      (Debug.debugPrintEnv env)\n      (Option.is_some typeArgContext);\n  (match typeArgContext with\n  | None -> ()\n  | Some typeArgContext ->\n    if Debug.verbose () then\n      Printf.printf \"[nested]--> %s\" (debugLogTypeArgContext typeArgContext));\n  match nested with\n  | [] ->\n    if Debug.verbose () then\n      print_endline \"[nested]--> reached end of pattern, returning type\";\n    Some\n      ( typ,\n        env,\n        (match ctx with\n        | None -> None\n        | Some (Rfield fieldName) ->\n          Some (Completable.CameFromRecordField fieldName)),\n        typeArgContext )\n  | patternPath :: nested -> (\n    match (patternPath, typ) with\n    | Completable.NTupleItem {itemNum}, Tuple (env, tupleItems, _) -> (\n      if Debug.verbose () then\n        print_endline \"[nested]--> trying to move into tuple\";\n      match List.nth_opt tupleItems itemNum with\n      | None ->\n        if Debug.verbose () then\n          print_endline \"[nested]--> tuple element not found\";\n        None\n      | Some typ ->\n        typ\n        |> extractType ~env ~package:full.package\n        |> Utils.Option.flatMap (fun (typ, typeArgContext) ->\n               typ |> resolveNested ?typeArgContext ~env ~full ~nested))\n    | ( NFollowRecordField {fieldName},\n        (TinlineRecord {env; fields} | Trecord {env; fields}) ) -> (\n      if Debug.verbose () then\n        print_endline \"[nested]--> trying to move into record field\";\n      match\n        fields\n        |> List.find_opt (fun (field : field) -> field.fname.txt = fieldName)\n      with\n      | None ->\n        if Debug.verbose () then\n          print_endline \"[nested]--> did not find record field\";\n        None\n      | Some {typ; optional} ->\n        if Debug.verbose () then\n          print_endline \"[nested]--> found record field type\";\n        let typ = if optional then Utils.unwrapIfOption typ else typ in\n\n        if Debug.verbose () then\n          Printf.printf \"[nested]--> extracting from type %s in env %s\\n\"\n            (Shared.typeToString typ) (Debug.debugPrintEnv env);\n        typ\n        |> extractType ~env ~package:full.package\n        |> Utils.Option.flatMap (fun (typ, typeArgContext) ->\n               typ\n               |> resolveNested ?typeArgContext ~ctx:(Rfield fieldName) ~env\n                    ~full ~nested))\n    | NRecordBody {seenFields}, Trecord {env; definition = `TypeExpr typeExpr}\n      ->\n      typeExpr\n      |> extractType ~env ~package:full.package\n      |> Option.map (fun (typ, typeArgContext) ->\n             ( typ,\n               env,\n               Some (Completable.RecordField {seenFields}),\n               typeArgContext ))\n    | ( NRecordBody {seenFields},\n        (Trecord {env; definition = `NameOnly _} as extractedType) ) ->\n      Some\n        ( extractedType,\n          env,\n          Some (Completable.RecordField {seenFields}),\n          typeArgContext )\n    | NRecordBody {seenFields}, TinlineRecord {env; fields} ->\n      Some\n        ( TinlineRecord {fields; env},\n          env,\n          Some (Completable.RecordField {seenFields}),\n          typeArgContext )\n    | ( NVariantPayload {constructorName = \"Some\"; itemNum = 0},\n        Toption (env, ExtractedType typ) ) ->\n      if Debug.verbose () then\n        print_endline \"[nested]--> moving into option Some\";\n      typ |> resolveNested ?typeArgContext ~env ~full ~nested\n    | ( NVariantPayload {constructorName = \"Some\"; itemNum = 0},\n        Toption (env, TypeExpr typ) ) ->\n      if Debug.verbose () then\n        print_endline \"[nested]--> moving into option Some\";\n      typ\n      |> extractType ~env ~package:full.package\n      |> Utils.Option.flatMap (fun (t, typeArgContext) ->\n             t |> resolveNested ?typeArgContext ~env ~full ~nested)\n    | NVariantPayload {constructorName = \"Ok\"; itemNum = 0}, Tresult {okType} ->\n      if Debug.verbose () then print_endline \"[nested]--> moving into result Ok\";\n      okType\n      |> extractType ~env ~package:full.package\n      |> Utils.Option.flatMap (fun (t, typeArgContext) ->\n             t |> resolveNested ?typeArgContext ~env ~full ~nested)\n    | ( NVariantPayload {constructorName = \"Error\"; itemNum = 0},\n        Tresult {errorType} ) ->\n      if Debug.verbose () then\n        print_endline \"[nested]--> moving into result Error\";\n      errorType\n      |> extractType ~env ~package:full.package\n      |> Utils.Option.flatMap (fun (t, typeArgContext) ->\n             t |> resolveNested ?typeArgContext ~env ~full ~nested)\n    | NVariantPayload {constructorName; itemNum}, Tvariant {env; constructors}\n      -> (\n      if Debug.verbose () then\n        Printf.printf\n          \"[nested]--> trying to move into variant payload $%i of constructor \\\n           '%s'\\n\"\n          itemNum constructorName;\n      match\n        constructors\n        |> List.find_opt (fun (c : Constructor.t) ->\n               c.cname.txt = constructorName)\n      with\n      | Some {args = Args args} -> (\n        if Debug.verbose () then\n          print_endline \"[nested]--> found constructor (Args type)\";\n        match List.nth_opt args itemNum with\n        | None ->\n          if Debug.verbose () then\n            print_endline \"[nested]--> did not find relevant args num\";\n          None\n        | Some (typ, _) ->\n          if Debug.verbose () then\n            Printf.printf \"[nested]--> found arg of type: %s\\n\"\n              (Shared.typeToString typ);\n\n          typ\n          |> extractType ~env ~package:full.package\n          |> Utils.Option.flatMap (fun (typ, typeArgContext) ->\n                 if Debug.verbose () then\n                   Printf.printf\n                     \"[nested]--> extracted %s, continuing descent of %i items\\n\"\n                     (extractedTypeToString typ)\n                     (List.length nested);\n                 typ |> resolveNested ?typeArgContext ~env ~full ~nested))\n      | Some {args = InlineRecord fields} when itemNum = 0 ->\n        if Debug.verbose () then\n          print_endline \"[nested]--> found constructor (inline record)\";\n        TinlineRecord {env; fields}\n        |> resolveNested ?typeArgContext ~env ~full ~nested\n      | _ -> None)\n    | ( NPolyvariantPayload {constructorName; itemNum},\n        Tpolyvariant {env; constructors} ) -> (\n      match\n        constructors\n        |> List.find_opt (fun (c : polyVariantConstructor) ->\n               c.name = constructorName)\n      with\n      | None -> None\n      | Some constructor -> (\n        match List.nth_opt constructor.args itemNum with\n        | None -> None\n        | Some typ ->\n          typ\n          |> extractType ~env ~package:full.package\n          |> Utils.Option.flatMap (fun (typ, typeArgContext) ->\n                 typ |> resolveNested ?typeArgContext ~env ~full ~nested)))\n    | NArray, Tarray (env, ExtractedType typ) ->\n      typ |> resolveNested ?typeArgContext ~env ~full ~nested\n    | NArray, Tarray (env, TypeExpr typ) ->\n      typ\n      |> extractType ~env ~package:full.package\n      |> Utils.Option.flatMap (fun (typ, typeArgContext) ->\n             typ |> resolveNested ?typeArgContext ~env ~full ~nested)\n    | _ -> None)\n\nlet findTypeOfRecordField fields ~fieldName =\n  match\n    fields |> List.find_opt (fun (field : field) -> field.fname.txt = fieldName)\n  with\n  | None -> None\n  | Some {typ; optional} ->\n    let typ = if optional then Utils.unwrapIfOption typ else typ in\n    Some typ\n\nlet findTypeOfConstructorArg constructors ~constructorName ~payloadNum ~env =\n  match\n    constructors\n    |> List.find_opt (fun (c : Constructor.t) -> c.cname.txt = constructorName)\n  with\n  | Some {args = Args args} -> (\n    match List.nth_opt args payloadNum with\n    | None -> None\n    | Some (typ, _) -> Some (TypeExpr typ))\n  | Some {args = InlineRecord fields} when payloadNum = 0 ->\n    Some (ExtractedType (TinlineRecord {env; fields}))\n  | _ -> None\n\nlet findTypeOfPolyvariantArg constructors ~constructorName ~payloadNum =\n  match\n    constructors\n    |> List.find_opt (fun (c : polyVariantConstructor) ->\n           c.name = constructorName)\n  with\n  | Some {args} -> (\n    match List.nth_opt args payloadNum with\n    | None -> None\n    | Some typ -> Some typ)\n  | None -> None\n\nlet rec resolveNestedPatternPath (typ : innerType) ~env ~full ~nested =\n  if Debug.verbose () then print_endline \"[nested_pattern_path]\";\n  let t =\n    match typ with\n    | TypeExpr t ->\n      t |> extractType ~env ~package:full.package |> getExtractedType\n    | ExtractedType t -> Some t\n  in\n  match nested with\n  | [] -> None\n  | [finalPatternPath] -> (\n    match t with\n    | None -> None\n    | Some completionType -> (\n      match (finalPatternPath, completionType) with\n      | ( Completable.NFollowRecordField {fieldName},\n          (TinlineRecord {fields} | Trecord {fields}) ) -> (\n        match fields |> findTypeOfRecordField ~fieldName with\n        | None -> None\n        | Some typ -> Some (TypeExpr typ, env))\n      | NTupleItem {itemNum}, Tuple (env, tupleItems, _) -> (\n        match List.nth_opt tupleItems itemNum with\n        | None -> None\n        | Some typ -> Some (TypeExpr typ, env))\n      | NVariantPayload {constructorName; itemNum}, Tvariant {env; constructors}\n        -> (\n        match\n          constructors\n          |> findTypeOfConstructorArg ~constructorName ~payloadNum:itemNum ~env\n        with\n        | Some typ -> Some (typ, env)\n        | None -> None)\n      | ( NPolyvariantPayload {constructorName; itemNum},\n          Tpolyvariant {env; constructors} ) -> (\n        match\n          constructors\n          |> findTypeOfPolyvariantArg ~constructorName ~payloadNum:itemNum\n        with\n        | Some typ -> Some (TypeExpr typ, env)\n        | None -> None)\n      | ( NVariantPayload {constructorName = \"Some\"; itemNum = 0},\n          Toption (env, typ) ) ->\n        Some (typ, env)\n      | ( NVariantPayload {constructorName = \"Ok\"; itemNum = 0},\n          Tresult {env; okType} ) ->\n        Some (TypeExpr okType, env)\n      | ( NVariantPayload {constructorName = \"Error\"; itemNum = 0},\n          Tresult {env; errorType} ) ->\n        Some (TypeExpr errorType, env)\n      | NArray, Tarray (env, typ) -> Some (typ, env)\n      | _ -> None))\n  | patternPath :: nested -> (\n    match t with\n    | None -> None\n    | Some completionType -> (\n      match (patternPath, completionType) with\n      | ( Completable.NFollowRecordField {fieldName},\n          (TinlineRecord {env; fields} | Trecord {env; fields}) ) -> (\n        match fields |> findTypeOfRecordField ~fieldName with\n        | None -> None\n        | Some typ ->\n          typ\n          |> extractType ~env ~package:full.package\n          |> getExtractedType\n          |> Utils.Option.flatMap (fun typ ->\n                 ExtractedType typ\n                 |> resolveNestedPatternPath ~env ~full ~nested))\n      | NTupleItem {itemNum}, Tuple (env, tupleItems, _) -> (\n        match List.nth_opt tupleItems itemNum with\n        | None -> None\n        | Some typ ->\n          typ\n          |> extractType ~env ~package:full.package\n          |> getExtractedType\n          |> Utils.Option.flatMap (fun typ ->\n                 ExtractedType typ\n                 |> resolveNestedPatternPath ~env ~full ~nested))\n      | NVariantPayload {constructorName; itemNum}, Tvariant {env; constructors}\n        -> (\n        match\n          constructors\n          |> findTypeOfConstructorArg ~constructorName ~payloadNum:itemNum ~env\n        with\n        | Some typ -> typ |> resolveNestedPatternPath ~env ~full ~nested\n        | None -> None)\n      | ( NPolyvariantPayload {constructorName; itemNum},\n          Tpolyvariant {env; constructors} ) -> (\n        match\n          constructors\n          |> findTypeOfPolyvariantArg ~constructorName ~payloadNum:itemNum\n        with\n        | Some typ ->\n          TypeExpr typ |> resolveNestedPatternPath ~env ~full ~nested\n        | None -> None)\n      | ( NVariantPayload {constructorName = \"Some\"; itemNum = 0},\n          Toption (env, typ) ) ->\n        typ |> resolveNestedPatternPath ~env ~full ~nested\n      | ( NVariantPayload {constructorName = \"Ok\"; itemNum = 0},\n          Tresult {env; okType} ) ->\n        TypeExpr okType |> resolveNestedPatternPath ~env ~full ~nested\n      | ( NVariantPayload {constructorName = \"Error\"; itemNum = 0},\n          Tresult {env; errorType} ) ->\n        TypeExpr errorType |> resolveNestedPatternPath ~env ~full ~nested\n      | NArray, Tarray (env, typ) ->\n        typ |> resolveNestedPatternPath ~env ~full ~nested\n      | _ -> None))\n\nlet getArgs ~env (t : Types.type_expr) ~full =\n  let rec getArgsLoop ~env (t : Types.type_expr) ~full ~currentArgumentPosition\n      =\n    match t.desc with\n    | Tlink t1\n    | Tsubst t1\n    | Tpoly (t1, [])\n    | Tconstr (Pident {name = \"function$\"}, [t1; _], _) ->\n      getArgsLoop ~full ~env ~currentArgumentPosition t1\n    | Tarrow (Labelled l, tArg, tRet, _) ->\n      (SharedTypes.Completable.Labelled l, tArg)\n      :: getArgsLoop ~full ~env ~currentArgumentPosition tRet\n    | Tarrow (Optional l, tArg, tRet, _) ->\n      (Optional l, tArg) :: getArgsLoop ~full ~env ~currentArgumentPosition tRet\n    | Tarrow (Nolabel, tArg, tRet, _) ->\n      (Unlabelled {argumentPosition = currentArgumentPosition}, tArg)\n      :: getArgsLoop ~full ~env\n           ~currentArgumentPosition:(currentArgumentPosition + 1)\n           tRet\n    | Tconstr (path, typeArgs, _) -> (\n      match References.digConstructor ~env ~package:full.package path with\n      | Some\n          ( env,\n            {\n              item = {decl = {type_manifest = Some t1; type_params = typeParams}};\n            } ) ->\n        let t1 = t1 |> instantiateType ~typeParams ~typeArgs in\n        getArgsLoop ~full ~env ~currentArgumentPosition t1\n      | _ -> [])\n    | _ -> []\n  in\n  t |> getArgsLoop ~env ~full ~currentArgumentPosition:0\n\nlet typeIsUnit (typ : Types.type_expr) =\n  match typ.desc with\n  | Tconstr (Pident id, _typeArgs, _)\n  | Tlink {desc = Tconstr (Pident id, _typeArgs, _)}\n  | Tsubst {desc = Tconstr (Pident id, _typeArgs, _)}\n  | Tpoly ({desc = Tconstr (Pident id, _typeArgs, _)}, [])\n    when Ident.name id = \"unit\" ->\n    true\n  | _ -> false\n\nlet rec contextPathFromCoreType (coreType : Parsetree.core_type) =\n  match coreType.ptyp_desc with\n  | Ptyp_constr ({txt = Lident \"option\"}, [innerTyp]) ->\n    innerTyp |> contextPathFromCoreType\n    |> Option.map (fun innerTyp -> Completable.CPOption innerTyp)\n  | Ptyp_constr ({txt = Lident \"array\"}, [innerTyp]) ->\n    Some (Completable.CPArray (innerTyp |> contextPathFromCoreType))\n  | Ptyp_constr (lid, _) ->\n    Some\n      (CPId\n         {\n           path = lid.txt |> Utils.flattenLongIdent;\n           completionContext = Type;\n           loc = lid.loc;\n         })\n  | _ -> None\n\nlet unwrapCompletionTypeIfOption (t : SharedTypes.completionType) =\n  match t with\n  | Toption (_, ExtractedType unwrapped) -> unwrapped\n  | _ -> t\n\nmodule Codegen = struct\n  let mkFailWithExp () =\n    Ast_helper.Exp.apply\n      (Ast_helper.Exp.ident {txt = Lident \"failwith\"; loc = Location.none})\n      [(Nolabel, Ast_helper.Exp.constant (Pconst_string (\"TODO\", None)))]\n\n  let mkConstructPat ?payload name =\n    Ast_helper.Pat.construct\n      {Asttypes.txt = Longident.Lident name; loc = Location.none}\n      payload\n\n  let mkTagPat ?payload name = Ast_helper.Pat.variant name payload\n\n  let any () = Ast_helper.Pat.any ()\n\n  let rec extractedTypeToExhaustivePatterns ~env ~full extractedType =\n    match extractedType with\n    | Tvariant v ->\n      Some\n        (v.constructors\n        |> List.map (fun (c : SharedTypes.Constructor.t) ->\n               mkConstructPat\n                 ?payload:\n                   (match c.args with\n                   | Args [] -> None\n                   | _ -> Some (any ()))\n                 c.cname.txt))\n    | Tpolyvariant v ->\n      Some\n        (v.constructors\n        |> List.map (fun (c : SharedTypes.polyVariantConstructor) ->\n               mkTagPat\n                 ?payload:\n                   (match c.args with\n                   | [] -> None\n                   | _ -> Some (any ()))\n                 c.displayName))\n    | Toption (_, innerType) ->\n      let extractedType =\n        match innerType with\n        | ExtractedType t -> Some t\n        | TypeExpr t ->\n          extractType t ~env ~package:full.package |> getExtractedType\n      in\n      let expandedBranches =\n        match extractedType with\n        | None -> []\n        | Some extractedType -> (\n          match extractedTypeToExhaustivePatterns ~env ~full extractedType with\n          | None -> []\n          | Some patterns -> patterns)\n      in\n      Some\n        ([\n           mkConstructPat \"None\";\n           mkConstructPat ~payload:(Ast_helper.Pat.any ()) \"Some\";\n         ]\n        @ (expandedBranches\n          |> List.map (fun (pat : Parsetree.pattern) ->\n                 mkConstructPat ~payload:pat \"Some\")))\n    | Tresult {okType; errorType} ->\n      let extractedOkType =\n        okType |> extractType ~env ~package:full.package |> getExtractedType\n      in\n      let extractedErrorType =\n        errorType |> extractType ~env ~package:full.package |> getExtractedType\n      in\n      let expandedOkBranches =\n        match extractedOkType with\n        | None -> []\n        | Some extractedType -> (\n          match extractedTypeToExhaustivePatterns ~env ~full extractedType with\n          | None -> []\n          | Some patterns -> patterns)\n      in\n      let expandedErrorBranches =\n        match extractedErrorType with\n        | None -> []\n        | Some extractedType -> (\n          match extractedTypeToExhaustivePatterns ~env ~full extractedType with\n          | None -> []\n          | Some patterns -> patterns)\n      in\n      Some\n        ((expandedOkBranches\n         |> List.map (fun (pat : Parsetree.pattern) ->\n                mkConstructPat ~payload:pat \"Ok\"))\n        @ (expandedErrorBranches\n          |> List.map (fun (pat : Parsetree.pattern) ->\n                 mkConstructPat ~payload:pat \"Error\")))\n    | Tbool _ -> Some [mkConstructPat \"true\"; mkConstructPat \"false\"]\n    | _ -> None\n\n  let extractedTypeToExhaustiveCases ~env ~full extractedType =\n    let patterns = extractedTypeToExhaustivePatterns ~env ~full extractedType in\n\n    match patterns with\n    | None -> None\n    | Some patterns ->\n      Some\n        (patterns\n        |> List.map (fun (pat : Parsetree.pattern) ->\n               Ast_helper.Exp.case pat (mkFailWithExp ())))\nend\n\nlet getModulePathRelativeToEnv ~debug ~(env : QueryEnv.t) ~envFromItem path =\n  match path with\n  | _ :: pathRev ->\n    (* type path is relative to the completion environment\n       express it from the root of the file *)\n    let found, pathFromEnv =\n      QueryEnv.pathFromEnv envFromItem (List.rev pathRev)\n    in\n    if debug then\n      Printf.printf \"CPPipe pathFromEnv:%s found:%b\\n\"\n        (pathFromEnv |> String.concat \".\")\n        found;\n    if pathFromEnv = [] then None\n    else if\n      env.file.moduleName <> envFromItem.file.moduleName && found\n      (* If the module names are different, then one needs to qualify the path.\n         But only if the path belongs to the env from completion *)\n    then Some (envFromItem.file.moduleName :: pathFromEnv)\n    else Some pathFromEnv\n  | _ -> None\n\nlet removeOpensFromCompletionPath ~rawOpens ~package completionPath =\n  let rec removeRawOpen rawOpen modulePath =\n    match (rawOpen, modulePath) with\n    | [_], _ -> Some modulePath\n    | s :: inner, first :: restPath when s = first ->\n      removeRawOpen inner restPath\n    | _ -> None\n  in\n  let rec removeRawOpens rawOpens modulePath =\n    match rawOpens with\n    | rawOpen :: restOpens -> (\n      let newModulePath = removeRawOpens restOpens modulePath in\n      match removeRawOpen rawOpen newModulePath with\n      | None -> newModulePath\n      | Some mp -> mp)\n    | [] -> modulePath\n  in\n  let completionPathMinusOpens =\n    completionPath |> Utils.flattenAnyNamespaceInPath\n    |> removeRawOpens package.opens\n    |> removeRawOpens rawOpens\n  in\n  completionPathMinusOpens\n\nlet pathToElementProps package =\n  match package.genericJsxModule with\n  | None -> [\"ReactDOM\"; \"domProps\"]\n  | Some g -> (g |> String.split_on_char '.') @ [\"Elements\"; \"props\"]\n\nmodule StringSet = Set.Make (String)\n\nlet getExtraModulesToCompleteFromForType ~env ~full (t : Types.type_expr) =\n  let foundModulePaths = ref StringSet.empty in\n  let addToModulePaths attributes =\n    ProcessAttributes.findEditorCompleteFromAttribute attributes\n    |> List.iter (fun e ->\n           foundModulePaths :=\n             StringSet.add (e |> String.concat \".\") !foundModulePaths)\n  in\n  let rec inner ~env ~full (t : Types.type_expr) =\n    match t |> Shared.digConstructor with\n    | Some path -> (\n      match References.digConstructor ~env ~package:full.package path with\n      | None -> ()\n      | Some (env, {item = {decl = {type_manifest = Some t}; attributes}}) ->\n        addToModulePaths attributes;\n        inner ~env ~full t\n      | Some (_, {item = {attributes}}) -> addToModulePaths attributes)\n    | None -> ()\n  in\n  inner ~env ~full t;\n  !foundModulePaths |> StringSet.elements\n  |> List.map (fun l -> String.split_on_char '.' l)\n\nlet getFirstFnUnlabelledArgType ~env ~full t =\n  let labels, _, env =\n    extractFunctionTypeWithEnv ~env ~package:full.package t\n  in\n  let rec findFirstUnlabelledArgType labels =\n    match labels with\n    | (Asttypes.Nolabel, t) :: _ -> Some t\n    | _ :: rest -> findFirstUnlabelledArgType rest\n    | [] -> None\n  in\n  match findFirstUnlabelledArgType labels with\n  | Some t -> Some (t, env)\n  | _ -> None\n\nlet makeAdditionalTextEditsForRemovingDot posOfDot =\n  [\n    {\n      Protocol.range =\n        {\n          start = {line = fst posOfDot; character = snd posOfDot - 1};\n          end_ = {line = fst posOfDot; character = snd posOfDot};\n        };\n      newText = \"\";\n    };\n  ]\n\n(** Turns a completion into a pipe completion. *)\nlet transformCompletionToPipeCompletion ?(synthetic = false) ~env ?posOfDot\n    (completion : Completion.t) =\n  let name = completion.name in\n  let nameWithPipe = \"->\" ^ name in\n  Some\n    {\n      completion with\n      name = nameWithPipe;\n      sortText =\n        (match completion.sortText with\n        | Some _ -> completion.sortText\n        | None -> Some (name |> String.split_on_char '.' |> List.rev |> List.hd));\n      insertText = Some nameWithPipe;\n      env;\n      synthetic;\n      additionalTextEdits =\n        (match posOfDot with\n        | None -> None\n        | Some posOfDot -> Some (makeAdditionalTextEditsForRemovingDot posOfDot));\n    }\n\n(** This takes a type expr and the env that type expr was found in, and produces\n    a globally unique id for that specific type. The globally unique id is the\n    full path to the type as seen from the root of the project. Example: type x\n    in module SomeModule in file SomeFile would get the globally unique id\n    `SomeFile.SomeModule.x`.*)\nlet rec findRootTypeId ~full ~env (t : Types.type_expr) =\n  let debug = false in\n  match t.desc with\n  | Tlink t1 | Tsubst t1 | Tpoly (t1, []) -> findRootTypeId ~full ~env t1\n  | Tconstr (Pident {name = \"function$\"}, [t; _], _) ->\n    findRootTypeId ~full ~env t\n  | Tconstr (path, _, _) -> (\n    (* We have a path. Try to dig to its declaration *)\n    if debug then\n      Printf.printf \"[findRootTypeId] path %s, dig\\n\" (Path.name path);\n    match References.digConstructor ~env ~package:full.package path with\n    | Some (env, {item = {decl = {type_manifest = Some t1}}}) ->\n      if debug then\n        Printf.printf \"[findRootTypeId] dug up type alias at module path %s \\n\"\n          (modulePathFromEnv env |> String.concat \".\");\n      findRootTypeId ~full ~env t1\n    | Some (env, {item = {name}; modulePath}) ->\n      (* if it's a named type, then we know its name will be its module path from the env + its name.*)\n      if debug then\n        Printf.printf\n          \"[findRootTypeId] dug up named type at module path %s, from item: %s \\n\"\n          (modulePathFromEnv env |> String.concat \".\")\n          (ModulePath.toPath modulePath name |> String.concat \".\");\n      Some (fullTypeIdFromDecl ~env ~name ~modulePath)\n    | None ->\n      (* If we didn't find anything, then it might be a builtin type. Check it.*)\n      if debug then Printf.printf \"[findRootTypeId] dug up non-type alias\\n\";\n      if\n        Predef.builtin_idents\n        |> List.find_opt (fun (_, i) -> Ident.same i (Path.head path))\n        |> Option.is_some\n      then\n        Some\n          (if debug then Printf.printf \"[findRootTypeId] returning builtin\\n\";\n           Path.name path)\n      else None)\n  | _ -> None\n\n(** Filters out completions that are not pipeable from a list of completions. *)\nlet filterPipeableFunctions ~env ~full ?synthetic ?targetTypeId ?posOfDot\n    completions =\n  match targetTypeId with\n  | None -> completions\n  | Some targetTypeId ->\n    completions\n    |> List.filter_map (fun (completion : Completion.t) ->\n           let thisCompletionItemTypeId =\n             match completion.kind with\n             | Value t -> (\n               match\n                 getFirstFnUnlabelledArgType ~full ~env:completion.env t\n               with\n               | None -> None\n               | Some (t, envFromLabelledArg) ->\n                 findRootTypeId ~full ~env:envFromLabelledArg t)\n             | _ -> None\n           in\n           match thisCompletionItemTypeId with\n           | Some mainTypeId when mainTypeId = targetTypeId -> (\n             match posOfDot with\n             | None -> Some completion\n             | Some posOfDot ->\n               transformCompletionToPipeCompletion ?synthetic ~env ~posOfDot\n                 completion)\n           | _ -> None)\n\nlet removeCurrentModuleIfNeeded ~envCompletionIsMadeFrom completionPath =\n  if\n    List.length completionPath > 0\n    && List.hd completionPath = envCompletionIsMadeFrom.QueryEnv.file.moduleName\n  then List.tl completionPath\n  else completionPath\n\nlet rec getObjFields (texp : Types.type_expr) =\n  match texp.desc with\n  | Tfield (name, _, t1, t2) ->\n    let fields = t2 |> getObjFields in\n    (name, t1) :: fields\n  | Tlink te | Tsubst te | Tpoly (te, []) -> te |> getObjFields\n  | Tvar None -> []\n  | _ -> []\n\nlet pathToBuiltin path =\n  Predef.builtin_idents\n  |> List.find_opt (fun (_, i) -> Ident.same i (Path.head path))\n\nlet completionPathFromMaybeBuiltin path ~package =\n  match pathToBuiltin path with\n  | Some (\"array\", _) -> Some package.builtInCompletionModules.arrayModulePath\n  | Some (\"option\", _) -> Some package.builtInCompletionModules.optionModulePath\n  | Some (\"string\", _) -> Some package.builtInCompletionModules.stringModulePath\n  | Some (\"int\", _) -> Some package.builtInCompletionModules.intModulePath\n  | Some (\"float\", _) -> Some package.builtInCompletionModules.floatModulePath\n  | Some (\"promise\", _) ->\n    Some package.builtInCompletionModules.promiseModulePath\n  | Some (\"list\", _) -> Some package.builtInCompletionModules.listModulePath\n  | Some (\"result\", _) -> Some package.builtInCompletionModules.resultModulePath\n  | Some (\"dict\", _) -> Some [\"Dict\"]\n  | Some (\"char\", _) -> Some [\"Char\"]\n  | _ -> None\n"
  },
  {
    "path": "analysis/src/Uri.ml",
    "content": "type t = {path: string; uri: string}\n\nlet stripPath = ref false (* for use in tests *)\n\nlet pathToUri path =\n  if Sys.os_type = \"Unix\" then \"file://\" ^ path\n  else\n    \"file://\"\n    ^ (Str.global_replace (Str.regexp_string \"\\\\\") \"/\" path\n      |> Str.substitute_first (Str.regexp \"^\\\\([a-zA-Z]\\\\):\") (fun text ->\n             let name = Str.matched_group 1 text in\n             \"/\" ^ String.lowercase_ascii name ^ \"%3A\"))\n\nlet fromPath path = {path; uri = pathToUri path}\nlet isInterface {path} = Filename.check_suffix path \"i\"\nlet toPath {path} = path\n\nlet toTopLevelLoc {path} =\n  let topPos =\n    {Lexing.pos_fname = path; pos_lnum = 1; pos_bol = 0; pos_cnum = 0}\n  in\n  {Location.loc_start = topPos; Location.loc_end = topPos; loc_ghost = false}\n\nlet toString {uri} = if !stripPath then Filename.basename uri else uri\n\n(* Light weight, hopefully-enough-for-the-purpose fn to encode URI components.\n   Built to handle the reserved characters listed in\n   https://en.wikipedia.org/wiki/Percent-encoding. Note that this function is not\n   general purpose, rather it's currently only for URL encoding the argument list\n   passed to command links in markdown. *)\nlet encodeURIComponent text =\n  let ln = String.length text in\n  let buf = Buffer.create ln in\n  let rec loop i =\n    if i < ln then (\n      (match text.[i] with\n      | '\"' -> Buffer.add_string buf \"%22\"\n      | '\\'' -> Buffer.add_string buf \"%22\"\n      | ':' -> Buffer.add_string buf \"%3A\"\n      | ';' -> Buffer.add_string buf \"%3B\"\n      | '/' -> Buffer.add_string buf \"%2F\"\n      | '\\\\' -> Buffer.add_string buf \"%5C\"\n      | ',' -> Buffer.add_string buf \"%2C\"\n      | '&' -> Buffer.add_string buf \"%26\"\n      | '[' -> Buffer.add_string buf \"%5B\"\n      | ']' -> Buffer.add_string buf \"%5D\"\n      | '#' -> Buffer.add_string buf \"%23\"\n      | '$' -> Buffer.add_string buf \"%24\"\n      | '+' -> Buffer.add_string buf \"%2B\"\n      | '=' -> Buffer.add_string buf \"%3D\"\n      | '?' -> Buffer.add_string buf \"%3F\"\n      | '@' -> Buffer.add_string buf \"%40\"\n      | '%' -> Buffer.add_string buf \"%25\"\n      | c -> Buffer.add_char buf c);\n      loop (i + 1))\n  in\n  loop 0;\n  Buffer.contents buf\n"
  },
  {
    "path": "analysis/src/Uri.mli",
    "content": "type t\n\nval fromPath : string -> t\nval isInterface : t -> bool\nval stripPath : bool ref\nval toPath : t -> string\nval toString : t -> string\nval toTopLevelLoc : t -> Location.t\nval encodeURIComponent : string -> string\n"
  },
  {
    "path": "analysis/src/Utils.ml",
    "content": "(** * `startsWith(string, prefix)` * true if the string starts with the prefix\n*)\nlet startsWith s prefix =\n  if prefix = \"\" then true\n  else\n    let p = String.length prefix in\n    p <= String.length s && String.sub s 0 p = prefix\n\nlet endsWith s suffix =\n  if suffix = \"\" then true\n  else\n    let p = String.length suffix in\n    let l = String.length s in\n    p <= String.length s && String.sub s (l - p) p = suffix\n\nlet isFirstCharUppercase s =\n  String.length s > 0 && Char.equal s.[0] (Char.uppercase_ascii s.[0])\n\nlet cmtPosToPosition {Lexing.pos_lnum; pos_cnum; pos_bol} =\n  Protocol.{line = pos_lnum - 1; character = pos_cnum - pos_bol}\n\nlet cmtLocToRange {Location.loc_start; loc_end} =\n  Protocol.{start = cmtPosToPosition loc_start; end_ = cmtPosToPosition loc_end}\n\nlet endOfLocation loc length =\n  let open Location in\n  {\n    loc with\n    loc_start = {loc.loc_end with pos_cnum = loc.loc_end.pos_cnum - length};\n  }\n\nlet chopLocationEnd loc length =\n  let open Location in\n  {\n    loc with\n    loc_end = {loc.loc_end with pos_cnum = loc.loc_end.pos_cnum - length};\n  }\n\n(** An optional List.find *)\nlet rec find fn items =\n  match items with\n  | [] -> None\n  | one :: rest -> (\n    match fn one with\n    | None -> find fn rest\n    | Some x -> Some x)\n\nlet filterMap f =\n  let rec aux accu = function\n    | [] -> List.rev accu\n    | x :: l -> (\n      match f x with\n      | None -> aux accu l\n      | Some v -> aux (v :: accu) l)\n  in\n  aux []\n\nlet dumpPath path = Str.global_replace (Str.regexp_string \"\\\\\") \"/\" path\nlet isUncurriedInternal path = startsWith (Path.name path) \"Js.Fn.arity\"\n\nlet flattenLongIdent ?(jsx = false) ?(cutAtOffset = None) lid =\n  let extendPath s path =\n    match path with\n    | \"\" :: _ -> path\n    | _ -> s :: path\n  in\n  let rec loop lid =\n    match lid with\n    | Longident.Lident txt -> ([txt], String.length txt)\n    | Ldot (lid, txt) ->\n      let path, offset = loop lid in\n      if Some offset = cutAtOffset then (extendPath \"\" path, offset + 1)\n      else if jsx && txt = \"createElement\" then (path, offset)\n      else if txt = \"_\" then (extendPath \"\" path, offset + 1)\n      else (extendPath txt path, offset + 1 + String.length txt)\n    | Lapply _ -> ([], 0)\n  in\n  let path, _ = loop lid in\n  List.rev path\n\nlet identifyPexp pexp =\n  match pexp with\n  | Parsetree.Pexp_ident _ -> \"Pexp_ident\"\n  | Pexp_constant _ -> \"Pexp_constant\"\n  | Pexp_let _ -> \"Pexp_let\"\n  | Pexp_function _ -> \"Pexp_function\"\n  | Pexp_fun _ -> \"Pexp_fun\"\n  | Pexp_apply _ -> \"Pexp_apply\"\n  | Pexp_match _ -> \"Pexp_match\"\n  | Pexp_try _ -> \"Pexp_try\"\n  | Pexp_tuple _ -> \"Pexp_tuple\"\n  | Pexp_construct _ -> \"Pexp_construct\"\n  | Pexp_variant _ -> \"Pexp_variant\"\n  | Pexp_record _ -> \"Pexp_record\"\n  | Pexp_field _ -> \"Pexp_field\"\n  | Pexp_setfield _ -> \"Pexp_setfield\"\n  | Pexp_array _ -> \"Pexp_array\"\n  | Pexp_ifthenelse _ -> \"Pexp_ifthenelse\"\n  | Pexp_sequence _ -> \"Pexp_sequence\"\n  | Pexp_while _ -> \"Pexp_while\"\n  | Pexp_for _ -> \"Pexp_for\"\n  | Pexp_constraint _ -> \"Pexp_constraint\"\n  | Pexp_coerce _ -> \"Pexp_coerce\"\n  | Pexp_send _ -> \"Pexp_send\"\n  | Pexp_new _ -> \"Pexp_new\"\n  | Pexp_setinstvar _ -> \"Pexp_setinstvar\"\n  | Pexp_override _ -> \"Pexp_override\"\n  | Pexp_letmodule _ -> \"Pexp_letmodule\"\n  | Pexp_letexception _ -> \"Pexp_letexception\"\n  | Pexp_assert _ -> \"Pexp_assert\"\n  | Pexp_lazy _ -> \"Pexp_lazy\"\n  | Pexp_poly _ -> \"Pexp_poly\"\n  | Pexp_object _ -> \"Pexp_object\"\n  | Pexp_newtype _ -> \"Pexp_newtype\"\n  | Pexp_pack _ -> \"Pexp_pack\"\n  | Pexp_extension _ -> \"Pexp_extension\"\n  | Pexp_open _ -> \"Pexp_open\"\n  | Pexp_unreachable -> \"Pexp_unreachable\"\n\nlet identifyPpat pat =\n  match pat with\n  | Parsetree.Ppat_any -> \"Ppat_any\"\n  | Ppat_var _ -> \"Ppat_var\"\n  | Ppat_alias _ -> \"Ppat_alias\"\n  | Ppat_constant _ -> \"Ppat_constant\"\n  | Ppat_interval _ -> \"Ppat_interval\"\n  | Ppat_tuple _ -> \"Ppat_tuple\"\n  | Ppat_construct _ -> \"Ppat_construct\"\n  | Ppat_variant _ -> \"Ppat_variant\"\n  | Ppat_record _ -> \"Ppat_record\"\n  | Ppat_array _ -> \"Ppat_array\"\n  | Ppat_or _ -> \"Ppat_or\"\n  | Ppat_constraint _ -> \"Ppat_constraint\"\n  | Ppat_type _ -> \"Ppat_type\"\n  | Ppat_lazy _ -> \"Ppat_lazy\"\n  | Ppat_unpack _ -> \"Ppat_unpack\"\n  | Ppat_exception _ -> \"Ppat_exception\"\n  | Ppat_extension _ -> \"Ppat_extension\"\n  | Ppat_open _ -> \"Ppat_open\"\n\nlet rec skipWhite text i =\n  if i < 0 then 0\n  else\n    match text.[i] with\n    | ' ' | '\\n' | '\\r' | '\\t' -> skipWhite text (i - 1)\n    | _ -> i\n\nlet hasBraces attributes =\n  attributes |> List.exists (fun (loc, _) -> loc.Location.txt = \"res.braces\")\n\nlet rec unwrapIfOption (t : Types.type_expr) =\n  match t.desc with\n  | Tlink t1 | Tsubst t1 | Tpoly (t1, []) -> unwrapIfOption t1\n  | Tconstr (Path.Pident {name = \"option\"}, [unwrappedType], _) -> unwrappedType\n  | _ -> t\n\nlet isJsxComponent (vb : Parsetree.value_binding) =\n  vb.pvb_attributes\n  |> List.exists (function\n       | {Location.txt = \"react.component\" | \"jsx.component\"}, _payload -> true\n       | _ -> false)\n\nlet checkName name ~prefix ~exact =\n  if exact then name = prefix else startsWith name prefix\n\nlet rec getUnqualifiedName txt =\n  match txt with\n  | Longident.Lident fieldName -> fieldName\n  | Ldot (t, _) -> getUnqualifiedName t\n  | _ -> \"\"\n\nlet indent n text =\n  let spaces = String.make n ' ' in\n  let len = String.length text in\n  let text =\n    if len != 0 && text.[len - 1] = '\\n' then String.sub text 0 (len - 1)\n    else text\n  in\n  let lines = String.split_on_char '\\n' text in\n  match lines with\n  | [] -> \"\"\n  | [line] -> line\n  | line :: lines ->\n    line ^ \"\\n\"\n    ^ (lines |> List.map (fun line -> spaces ^ line) |> String.concat \"\\n\")\n\nlet mkPosition (pos : Pos.t) =\n  let line, character = pos in\n  {Protocol.line; character}\n\nlet rangeOfLoc (loc : Location.t) =\n  let start = loc |> Loc.start |> mkPosition in\n  let end_ = loc |> Loc.end_ |> mkPosition in\n  {Protocol.start; end_}\n\nlet rec expandPath (path : Path.t) =\n  match path with\n  | Pident id -> [Ident.name id]\n  | Pdot (p, s, _) -> s :: expandPath p\n  | Papply _ -> []\n\nmodule Option = struct\n  let flatMap f o =\n    match o with\n    | None -> None\n    | Some v -> f v\nend\n\nlet rec lastElements list =\n  match list with\n  | ([_; _] | [_] | []) as res -> res\n  | _ :: tl -> lastElements tl\n\nlet lowercaseFirstChar s =\n  if String.length s = 0 then s\n  else String.mapi (fun i c -> if i = 0 then Char.lowercase_ascii c else c) s\n\nlet cutAfterDash s =\n  match String.index s '-' with\n  | n -> ( try String.sub s 0 n with Invalid_argument _ -> s)\n  | exception Not_found -> s\n\nlet fileNameHasUnallowedChars s =\n  let regexp = Str.regexp \"[^A-Za-z0-9_]\" in\n  try\n    ignore (Str.search_forward regexp s 0);\n    true\n  with Not_found -> false\n\n(* Flattens any namespace in the provided path.\n   Example:\n    Globals-RescriptBun.URL.t (which is an illegal path because of the namespace) becomes:\n    RescriptBun.Globals.URL.t\n*)\nlet rec flattenAnyNamespaceInPath path =\n  match path with\n  | [] -> []\n  | head :: tail ->\n    if String.contains head '-' then\n      let parts = String.split_on_char '-' head in\n      (* Namespaces are in reverse order, so \"URL-RescriptBun\" where RescriptBun is the namespace. *)\n      (parts |> List.rev) @ flattenAnyNamespaceInPath tail\n    else head :: flattenAnyNamespaceInPath tail\n\nlet printMaybeExoticIdent ?(allowUident = false) txt =\n  let len = String.length txt in\n\n  let rec loop i =\n    if i == len then txt\n    else if i == 0 then\n      match String.unsafe_get txt i with\n      | 'A' .. 'Z' when allowUident -> loop (i + 1)\n      | 'a' .. 'z' | '_' -> loop (i + 1)\n      | _ -> \"\\\"\" ^ txt ^ \"\\\"\"\n    else\n      match String.unsafe_get txt i with\n      | 'A' .. 'Z' | 'a' .. 'z' | '0' .. '9' | '\\'' | '_' -> loop (i + 1)\n      | _ -> \"\\\"\" ^ txt ^ \"\\\"\"\n  in\n  if Res_token.is_keyword_txt txt then \"\\\"\" ^ txt ^ \"\\\"\" else loop 0\n\nlet findPackageJson root =\n  let path = Uri.toPath root in\n\n  let rec loop path =\n    if path = \"/\" then None\n    else if Files.exists (Filename.concat path \"package.json\") then\n      Some (Filename.concat path \"package.json\")\n    else\n      let parent = Filename.dirname path in\n      if parent = path then (* reached root *) None else loop parent\n  in\n  loop path\n"
  },
  {
    "path": "analysis/src/Xform.ml",
    "content": "(** Code transformations using the parser/printer and ast operations *)\n\nlet isBracedExpr = Res_parsetree_viewer.is_braced_expr\n\nlet extractTypeFromExpr expr ~debug ~path ~currentFile ~full ~pos =\n  match\n    expr.Parsetree.pexp_loc\n    |> CompletionFrontEnd.findTypeOfExpressionAtLoc ~debug ~path ~currentFile\n         ~posCursor:(Pos.ofLexing expr.Parsetree.pexp_loc.loc_start)\n  with\n  | Some (completable, scope) -> (\n    let env = SharedTypes.QueryEnv.fromFile full.SharedTypes.file in\n    let completions =\n      completable\n      |> CompletionBackEnd.processCompletable ~debug ~full ~pos ~scope ~env\n           ~forHover:true\n    in\n    let rawOpens = Scope.getRawOpens scope in\n    match completions with\n    | {env} :: _ -> (\n      let opens =\n        CompletionBackEnd.getOpens ~debug ~rawOpens ~package:full.package ~env\n      in\n      match\n        CompletionBackEnd.completionsGetCompletionType2 ~debug ~full ~rawOpens\n          ~opens ~pos completions\n      with\n      | Some (typ, _env) ->\n        let extractedType =\n          match typ with\n          | ExtractedType t -> Some t\n          | TypeExpr t ->\n            TypeUtils.extractType t ~env ~package:full.package\n            |> TypeUtils.getExtractedType\n        in\n        extractedType\n      | None -> None)\n    | _ -> None)\n  | _ -> None\n\nmodule IfThenElse = struct\n  (* Convert if-then-else to switch *)\n\n  let rec listToPat ~itemToPat = function\n    | [] -> Some []\n    | x :: xList -> (\n      match (itemToPat x, listToPat ~itemToPat xList) with\n      | Some p, Some pList -> Some (p :: pList)\n      | _ -> None)\n\n  let rec expToPat (exp : Parsetree.expression) =\n    let mkPat ppat_desc =\n      Ast_helper.Pat.mk ~loc:exp.pexp_loc ~attrs:exp.pexp_attributes ppat_desc\n    in\n    match exp.pexp_desc with\n    | Pexp_construct (lid, None) -> Some (mkPat (Ppat_construct (lid, None)))\n    | Pexp_construct (lid, Some e1) -> (\n      match expToPat e1 with\n      | None -> None\n      | Some p1 -> Some (mkPat (Ppat_construct (lid, Some p1))))\n    | Pexp_variant (label, None) -> Some (mkPat (Ppat_variant (label, None)))\n    | Pexp_variant (label, Some e1) -> (\n      match expToPat e1 with\n      | None -> None\n      | Some p1 -> Some (mkPat (Ppat_variant (label, Some p1))))\n    | Pexp_constant c -> Some (mkPat (Ppat_constant c))\n    | Pexp_tuple eList -> (\n      match listToPat ~itemToPat:expToPat eList with\n      | None -> None\n      | Some patList -> Some (mkPat (Ppat_tuple patList)))\n    | Pexp_record (items, None) -> (\n      let itemToPat (x, e) =\n        match expToPat e with\n        | None -> None\n        | Some p -> Some (x, p)\n      in\n      match listToPat ~itemToPat items with\n      | None -> None\n      | Some patItems -> Some (mkPat (Ppat_record (patItems, Closed))))\n    | Pexp_record (_, Some _) -> None\n    | _ -> None\n\n  let mkIterator ~pos ~changed =\n    let expr (iterator : Ast_iterator.iterator) (e : Parsetree.expression) =\n      let newExp =\n        match e.pexp_desc with\n        | Pexp_ifthenelse\n            ( {\n                pexp_desc =\n                  Pexp_apply\n                    ( {\n                        pexp_desc =\n                          Pexp_ident {txt = Lident ((\"=\" | \"<>\") as op)};\n                      },\n                      [(Nolabel, arg1); (Nolabel, arg2)] );\n              },\n              e1,\n              Some e2 )\n          when Loc.hasPos ~pos e.pexp_loc -> (\n          let e1, e2 = if op = \"=\" then (e1, e2) else (e2, e1) in\n          let mkMatch ~arg ~pat =\n            let cases =\n              [\n                Ast_helper.Exp.case pat e1;\n                Ast_helper.Exp.case (Ast_helper.Pat.any ()) e2;\n              ]\n            in\n            Ast_helper.Exp.match_ ~loc:e.pexp_loc ~attrs:e.pexp_attributes arg\n              cases\n          in\n\n          match expToPat arg2 with\n          | None -> (\n            match expToPat arg1 with\n            | None -> None\n            | Some pat1 ->\n              let newExp = mkMatch ~arg:arg2 ~pat:pat1 in\n              Some newExp)\n          | Some pat2 ->\n            let newExp = mkMatch ~arg:arg1 ~pat:pat2 in\n            Some newExp)\n        | _ -> None\n      in\n      match newExp with\n      | Some newExp -> changed := Some newExp\n      | None -> Ast_iterator.default_iterator.expr iterator e\n    in\n\n    {Ast_iterator.default_iterator with expr}\n\n  let xform ~pos ~codeActions ~printExpr ~path structure =\n    let changed = ref None in\n    let iterator = mkIterator ~pos ~changed in\n    iterator.structure iterator structure;\n    match !changed with\n    | None -> ()\n    | Some newExpr ->\n      let range = Loc.rangeOfLoc newExpr.pexp_loc in\n      let newText = printExpr ~range newExpr in\n      let codeAction =\n        CodeActions.make ~title:\"Replace with switch\" ~kind:RefactorRewrite\n          ~uri:path ~newText ~range\n      in\n      codeActions := codeAction :: !codeActions\nend\n\nmodule ModuleToFile = struct\n  let mkIterator ~pos ~changed ~path ~printStandaloneStructure =\n    let structure_item (iterator : Ast_iterator.iterator)\n        (structure_item : Parsetree.structure_item) =\n      (match structure_item.pstr_desc with\n      | Pstr_module\n          {pmb_loc; pmb_name; pmb_expr = {pmod_desc = Pmod_structure structure}}\n        when structure_item.pstr_loc |> Loc.hasPos ~pos ->\n        let range = Loc.rangeOfLoc structure_item.pstr_loc in\n        let newTextInCurrentFile = \"\" in\n        let textForExtractedFile =\n          printStandaloneStructure ~loc:pmb_loc structure\n        in\n        let moduleName = pmb_name.txt in\n        let newFilePath =\n          Uri.fromPath\n            (Filename.concat (Filename.dirname path) moduleName ^ \".res\")\n        in\n        changed :=\n          Some\n            (CodeActions.makeWithDocumentChanges\n               ~title:\n                 (Printf.sprintf \"Extract local module \\\"%s\\\" to file \\\"%s\\\"\"\n                    moduleName (moduleName ^ \".res\"))\n               ~kind:RefactorRewrite\n               ~documentChanges:\n                 [\n                   Protocol.CreateFile\n                     {\n                       uri = newFilePath |> Uri.toString;\n                       options =\n                         Some\n                           {overwrite = Some false; ignoreIfExists = Some true};\n                     };\n                   TextDocumentEdit\n                     {\n                       textDocument =\n                         {uri = newFilePath |> Uri.toString; version = None};\n                       edits =\n                         [\n                           {\n                             newText = textForExtractedFile;\n                             range =\n                               {\n                                 start = {line = 0; character = 0};\n                                 end_ = {line = 0; character = 0};\n                               };\n                           };\n                         ];\n                     };\n                   TextDocumentEdit\n                     {\n                       textDocument = {uri = path; version = None};\n                       edits = [{newText = newTextInCurrentFile; range}];\n                     };\n                 ]);\n        ()\n      | _ -> ());\n      Ast_iterator.default_iterator.structure_item iterator structure_item\n    in\n\n    {Ast_iterator.default_iterator with structure_item}\n\n  let xform ~pos ~codeActions ~path ~printStandaloneStructure structure =\n    let changed = ref None in\n    let iterator = mkIterator ~pos ~path ~changed ~printStandaloneStructure in\n    iterator.structure iterator structure;\n    match !changed with\n    | None -> ()\n    | Some codeAction -> codeActions := codeAction :: !codeActions\nend\n\nmodule AddBracesToFn = struct\n  (* Add braces to fn without braces *)\n\n  let mkIterator ~pos ~changed =\n    (* While iterating the AST, keep info on which structure item we are in.\n       Printing from the structure item, rather than the body of the function,\n       gives better local pretty printing *)\n    let currentStructureItem = ref None in\n\n    let structure_item (iterator : Ast_iterator.iterator)\n        (item : Parsetree.structure_item) =\n      let saved = !currentStructureItem in\n      currentStructureItem := Some item;\n      Ast_iterator.default_iterator.structure_item iterator item;\n      currentStructureItem := saved\n    in\n    let expr (iterator : Ast_iterator.iterator) (e : Parsetree.expression) =\n      let bracesAttribute =\n        let loc =\n          {\n            Location.none with\n            loc_start = Lexing.dummy_pos;\n            loc_end =\n              {\n                Lexing.dummy_pos with\n                pos_lnum = Lexing.dummy_pos.pos_lnum + 1 (* force line break *);\n              };\n          }\n        in\n        (Location.mkloc \"res.braces\" loc, Parsetree.PStr [])\n      in\n      let isFunction = function\n        | {Parsetree.pexp_desc = Pexp_fun _} -> true\n        | _ -> false\n      in\n      (match e.pexp_desc with\n      | Pexp_fun (_, _, _, bodyExpr)\n        when Loc.hasPos ~pos bodyExpr.pexp_loc\n             && isBracedExpr bodyExpr = false\n             && isFunction bodyExpr = false ->\n        bodyExpr.pexp_attributes <- bracesAttribute :: bodyExpr.pexp_attributes;\n        changed := !currentStructureItem\n      | _ -> ());\n      Ast_iterator.default_iterator.expr iterator e\n    in\n\n    {Ast_iterator.default_iterator with expr; structure_item}\n\n  let xform ~pos ~codeActions ~path ~printStructureItem structure =\n    let changed = ref None in\n    let iterator = mkIterator ~pos ~changed in\n    iterator.structure iterator structure;\n    match !changed with\n    | None -> ()\n    | Some newStructureItem ->\n      let range = Loc.rangeOfLoc newStructureItem.pstr_loc in\n      let newText = printStructureItem ~range newStructureItem in\n      let codeAction =\n        CodeActions.make ~title:\"Add braces to function\" ~kind:RefactorRewrite\n          ~uri:path ~newText ~range\n      in\n      codeActions := codeAction :: !codeActions\nend\n\nmodule AddTypeAnnotation = struct\n  (* Add type annotation to value declaration *)\n\n  type annotation = Plain | WithParens\n\n  let mkIterator ~pos ~result =\n    let processPattern ?(isUnlabeledOnlyArg = false) (pat : Parsetree.pattern) =\n      match pat.ppat_desc with\n      | Ppat_var {loc} when Loc.hasPos ~pos loc ->\n        result := Some (if isUnlabeledOnlyArg then WithParens else Plain)\n      | _ -> ()\n    in\n    let rec processFunction ~argNum (e : Parsetree.expression) =\n      match e.pexp_desc with\n      | Pexp_fun (argLabel, _, pat, e)\n      | Pexp_construct\n          ( {txt = Lident \"Function$\"},\n            Some {pexp_desc = Pexp_fun (argLabel, _, pat, e)} ) ->\n        let isUnlabeledOnlyArg =\n          argNum = 1 && argLabel = Nolabel\n          &&\n          match e.pexp_desc with\n          | Pexp_fun _ -> false\n          | _ -> true\n        in\n        processPattern ~isUnlabeledOnlyArg pat;\n        processFunction ~argNum:(argNum + 1) e\n      | _ -> ()\n    in\n    let structure_item (iterator : Ast_iterator.iterator)\n        (si : Parsetree.structure_item) =\n      match si.pstr_desc with\n      | Pstr_value (_recFlag, bindings) ->\n        let processBinding (vb : Parsetree.value_binding) =\n          (* Can't add a type annotation to a jsx component, or the compiler crashes *)\n          let isJsxComponent = Utils.isJsxComponent vb in\n          if not isJsxComponent then processPattern vb.pvb_pat;\n          processFunction vb.pvb_expr\n        in\n        bindings |> List.iter (processBinding ~argNum:1);\n        Ast_iterator.default_iterator.structure_item iterator si\n      | _ -> Ast_iterator.default_iterator.structure_item iterator si\n    in\n    {Ast_iterator.default_iterator with structure_item}\n\n  let xform ~path ~pos ~full ~structure ~codeActions ~debug =\n    let result = ref None in\n    let iterator = mkIterator ~pos ~result in\n    iterator.structure iterator structure;\n    match !result with\n    | None -> ()\n    | Some annotation -> (\n      match References.getLocItem ~full ~pos ~debug with\n      | None -> ()\n      | Some locItem -> (\n        match locItem.locType with\n        | Typed (name, typ, _) ->\n          let range, newText =\n            match annotation with\n            | Plain ->\n              ( Loc.rangeOfLoc {locItem.loc with loc_start = locItem.loc.loc_end},\n                \": \" ^ (typ |> Shared.typeToString) )\n            | WithParens ->\n              ( Loc.rangeOfLoc locItem.loc,\n                \"(\" ^ name ^ \": \" ^ (typ |> Shared.typeToString) ^ \")\" )\n          in\n          let codeAction =\n            CodeActions.make ~title:\"Add type annotation\" ~kind:RefactorRewrite\n              ~uri:path ~newText ~range\n          in\n          codeActions := codeAction :: !codeActions\n        | _ -> ()))\nend\n\nmodule ExpandCatchAllForVariants = struct\n  let mkIterator ~pos ~result =\n    let expr (iterator : Ast_iterator.iterator) (e : Parsetree.expression) =\n      (if e.pexp_loc |> Loc.hasPos ~pos then\n         match e.pexp_desc with\n         | Pexp_match (switchExpr, cases) -> (\n           let catchAllCase =\n             cases\n             |> List.find_opt (fun (c : Parsetree.case) ->\n                    match c with\n                    | {pc_lhs = {ppat_desc = Ppat_any}} -> true\n                    | _ -> false)\n           in\n           match catchAllCase with\n           | None -> ()\n           | Some catchAllCase ->\n             result := Some (switchExpr, catchAllCase, cases))\n         | _ -> ());\n      Ast_iterator.default_iterator.expr iterator e\n    in\n    {Ast_iterator.default_iterator with expr}\n\n  let xform ~path ~pos ~full ~structure ~currentFile ~codeActions ~debug =\n    let result = ref None in\n    let iterator = mkIterator ~pos ~result in\n    iterator.structure iterator structure;\n    match !result with\n    | None -> ()\n    | Some (switchExpr, catchAllCase, cases) -> (\n      if Debug.verbose () then\n        print_endline\n          \"[codeAction - ExpandCatchAllForVariants] Found target switch\";\n      let rec findAllConstructorNames ?(mode : [`option | `default] = `default)\n          ?(constructorNames = []) (p : Parsetree.pattern) =\n        match p.ppat_desc with\n        | Ppat_construct ({txt = Lident \"Some\"}, Some payload)\n          when mode = `option ->\n          findAllConstructorNames ~mode ~constructorNames payload\n        | Ppat_construct ({txt}, _) -> Longident.last txt :: constructorNames\n        | Ppat_variant (name, _) -> name :: constructorNames\n        | Ppat_or (a, b) ->\n          findAllConstructorNames ~mode ~constructorNames a\n          @ findAllConstructorNames ~mode ~constructorNames b\n          @ constructorNames\n        | _ -> constructorNames\n      in\n      let getCurrentConstructorNames ?mode cases =\n        cases\n        |> List.map (fun (c : Parsetree.case) ->\n               if Option.is_some c.pc_guard then []\n               else findAllConstructorNames ?mode c.pc_lhs)\n        |> List.flatten\n      in\n      let currentConstructorNames = getCurrentConstructorNames cases in\n      match\n        switchExpr\n        |> extractTypeFromExpr ~debug ~path ~currentFile ~full\n             ~pos:(Pos.ofLexing switchExpr.pexp_loc.loc_end)\n      with\n      | Some (Tvariant {constructors}) ->\n        let missingConstructors =\n          constructors\n          |> List.filter (fun (c : SharedTypes.Constructor.t) ->\n                 currentConstructorNames |> List.mem c.cname.txt = false)\n        in\n        if List.length missingConstructors > 0 then\n          let newText =\n            missingConstructors\n            |> List.map (fun (c : SharedTypes.Constructor.t) ->\n                   c.cname.txt\n                   ^\n                   match c.args with\n                   | Args [] -> \"\"\n                   | Args _ | InlineRecord _ -> \"(_)\")\n            |> String.concat \" | \"\n          in\n          let range = Loc.rangeOfLoc catchAllCase.pc_lhs.ppat_loc in\n          let codeAction =\n            CodeActions.make ~title:\"Expand catch-all\" ~kind:RefactorRewrite\n              ~uri:path ~newText ~range\n          in\n          codeActions := codeAction :: !codeActions\n        else ()\n      | Some (Tpolyvariant {constructors}) ->\n        let missingConstructors =\n          constructors\n          |> List.filter (fun (c : SharedTypes.polyVariantConstructor) ->\n                 currentConstructorNames |> List.mem c.name = false)\n        in\n        if List.length missingConstructors > 0 then\n          let newText =\n            missingConstructors\n            |> List.map (fun (c : SharedTypes.polyVariantConstructor) ->\n                   Res_printer.polyvar_ident_to_string c.name\n                   ^\n                   match c.args with\n                   | [] -> \"\"\n                   | _ -> \"(_)\")\n            |> String.concat \" | \"\n          in\n          let range = Loc.rangeOfLoc catchAllCase.pc_lhs.ppat_loc in\n          let codeAction =\n            CodeActions.make ~title:\"Expand catch-all\" ~kind:RefactorRewrite\n              ~uri:path ~newText ~range\n          in\n          codeActions := codeAction :: !codeActions\n        else ()\n      | Some (Toption (env, innerType)) -> (\n        if Debug.verbose () then\n          print_endline\n            \"[codeAction - ExpandCatchAllForVariants] Found option type\";\n        let innerType =\n          match innerType with\n          | ExtractedType t -> Some t\n          | TypeExpr t -> (\n            match TypeUtils.extractType ~env ~package:full.package t with\n            | None -> None\n            | Some (t, _) -> Some t)\n        in\n        match innerType with\n        | Some ((Tvariant _ | Tpolyvariant _) as variant) ->\n          let currentConstructorNames =\n            getCurrentConstructorNames ~mode:`option cases\n          in\n          let hasNoneCase =\n            cases\n            |> List.exists (fun (c : Parsetree.case) ->\n                   match c.pc_lhs.ppat_desc with\n                   | Ppat_construct ({txt = Lident \"None\"}, _) -> true\n                   | _ -> false)\n          in\n          let missingConstructors =\n            match variant with\n            | Tvariant {constructors} ->\n              constructors\n              |> List.filter_map (fun (c : SharedTypes.Constructor.t) ->\n                     if currentConstructorNames |> List.mem c.cname.txt = false\n                     then\n                       Some\n                         ( c.cname.txt,\n                           match c.args with\n                           | Args [] -> false\n                           | _ -> true )\n                     else None)\n            | Tpolyvariant {constructors} ->\n              constructors\n              |> List.filter_map\n                   (fun (c : SharedTypes.polyVariantConstructor) ->\n                     if currentConstructorNames |> List.mem c.name = false then\n                       Some\n                         ( Res_printer.polyvar_ident_to_string c.name,\n                           match c.args with\n                           | [] -> false\n                           | _ -> true )\n                     else None)\n            | _ -> []\n          in\n          if List.length missingConstructors > 0 || not hasNoneCase then\n            let newText =\n              \"Some(\"\n              ^ (missingConstructors\n                |> List.map (fun (name, hasArgs) ->\n                       name ^ if hasArgs then \"(_)\" else \"\")\n                |> String.concat \" | \")\n              ^ \")\"\n            in\n            let newText =\n              if hasNoneCase then newText else newText ^ \" | None\"\n            in\n            let range = Loc.rangeOfLoc catchAllCase.pc_lhs.ppat_loc in\n            let codeAction =\n              CodeActions.make ~title:\"Expand catch-all\" ~kind:RefactorRewrite\n                ~uri:path ~newText ~range\n            in\n            codeActions := codeAction :: !codeActions\n          else ()\n        | _ -> ())\n      | _ -> ())\nend\n\nmodule ExhaustiveSwitch = struct\n  (* Expand expression to be an exhaustive switch of the underlying value *)\n  type posType = Single of Pos.t | Range of Pos.t * Pos.t\n\n  type completionType =\n    | Switch of {\n        pos: Pos.t;\n        switchExpr: Parsetree.expression;\n        completionExpr: Parsetree.expression;\n      }\n    | Selection of {expr: Parsetree.expression}\n\n  let mkIteratorSingle ~pos ~result =\n    let expr (iterator : Ast_iterator.iterator) (exp : Parsetree.expression) =\n      (match exp.pexp_desc with\n      | Pexp_ident _ when Loc.hasPosInclusiveEnd ~pos exp.pexp_loc ->\n        (* Exhaustive switch for having the cursor on an identifier. *)\n        result := Some (Selection {expr = exp})\n      | Pexp_match (completionExpr, [])\n        when Loc.hasPosInclusiveEnd ~pos exp.pexp_loc ->\n        (* No cases means there's no `|` yet in the switch, so `switch someExpr` *)\n        result := Some (Switch {pos; switchExpr = exp; completionExpr})\n      | _ -> ());\n      Ast_iterator.default_iterator.expr iterator exp\n    in\n    {Ast_iterator.default_iterator with expr}\n\n  let mkIteratorRange ~startPos ~endPos ~foundSelection =\n    let expr (iterator : Ast_iterator.iterator) (exp : Parsetree.expression) =\n      let expStartPos = Pos.ofLexing exp.pexp_loc.loc_start in\n      let expEndPos = Pos.ofLexing exp.pexp_loc.loc_end in\n\n      (if expStartPos = startPos then\n         match !foundSelection with\n         | None, endExpr -> foundSelection := (Some exp, endExpr)\n         | _ -> ());\n\n      (if expEndPos = endPos then\n         match !foundSelection with\n         | startExp, _ -> foundSelection := (startExp, Some exp));\n\n      Ast_iterator.default_iterator.expr iterator exp\n    in\n    {Ast_iterator.default_iterator with expr}\n\n  let xform ~printExpr ~path ~currentFile ~pos ~full ~structure ~codeActions\n      ~debug =\n    (* TODO: Adapt to '(' as leading/trailing character (skip one col, it's not included in the AST) *)\n    let result = ref None in\n    let foundSelection = ref (None, None) in\n    let iterator =\n      match pos with\n      | Single pos -> mkIteratorSingle ~pos ~result\n      | Range (startPos, endPos) ->\n        mkIteratorRange ~startPos ~endPos ~foundSelection\n    in\n    iterator.structure iterator structure;\n    (match !foundSelection with\n    | Some startExp, Some endExp ->\n      if debug then\n        Printf.printf \"found selection: %s -> %s\\n\"\n          (Loc.toString startExp.pexp_loc)\n          (Loc.toString endExp.pexp_loc);\n      result := Some (Selection {expr = startExp})\n    | _ -> ());\n    match !result with\n    | None -> ()\n    | Some (Selection {expr}) -> (\n      match\n        expr\n        |> extractTypeFromExpr ~debug ~path ~currentFile ~full\n             ~pos:(Pos.ofLexing expr.pexp_loc.loc_start)\n      with\n      | None -> ()\n      | Some extractedType -> (\n        let open TypeUtils.Codegen in\n        let exhaustiveSwitch =\n          extractedTypeToExhaustiveCases\n            ~env:(SharedTypes.QueryEnv.fromFile full.file)\n            ~full extractedType\n        in\n        match exhaustiveSwitch with\n        | None -> ()\n        | Some cases ->\n          let range = Loc.rangeOfLoc expr.pexp_loc in\n          let newText =\n            printExpr ~range {expr with pexp_desc = Pexp_match (expr, cases)}\n          in\n          let codeAction =\n            CodeActions.make ~title:\"Exhaustive switch\" ~kind:RefactorRewrite\n              ~uri:path ~newText ~range\n          in\n          codeActions := codeAction :: !codeActions))\n    | Some (Switch {switchExpr; completionExpr; pos}) -> (\n      match\n        completionExpr\n        |> extractTypeFromExpr ~debug ~path ~currentFile ~full ~pos\n      with\n      | None -> ()\n      | Some extractedType -> (\n        let open TypeUtils.Codegen in\n        let exhaustiveSwitch =\n          extractedTypeToExhaustiveCases\n            ~env:(SharedTypes.QueryEnv.fromFile full.file)\n            ~full extractedType\n        in\n        match exhaustiveSwitch with\n        | None -> ()\n        | Some cases ->\n          let range = Loc.rangeOfLoc switchExpr.pexp_loc in\n          let newText =\n            printExpr ~range\n              {switchExpr with pexp_desc = Pexp_match (completionExpr, cases)}\n          in\n          let codeAction =\n            CodeActions.make ~title:\"Exhaustive switch\" ~kind:RefactorRewrite\n              ~uri:path ~newText ~range\n          in\n          codeActions := codeAction :: !codeActions))\nend\n\nmodule AddDocTemplate = struct\n  let createTemplate () =\n    let docContent = [\"\\n\"; \"\\n\"] in\n    let expression =\n      Ast_helper.Exp.constant\n        (Parsetree.Pconst_string (String.concat \"\" docContent, None))\n    in\n    let structureItemDesc = Parsetree.Pstr_eval (expression, []) in\n    let structureItem = Ast_helper.Str.mk structureItemDesc in\n    let attrLoc =\n      {\n        Location.none with\n        loc_start = Lexing.dummy_pos;\n        loc_end =\n          {\n            Lexing.dummy_pos with\n            pos_lnum = Lexing.dummy_pos.pos_lnum (* force line break *);\n          };\n      }\n    in\n    (Location.mkloc \"res.doc\" attrLoc, Parsetree.PStr [structureItem])\n\n  module Interface = struct\n    let mkIterator ~pos ~result =\n      let signature_item (iterator : Ast_iterator.iterator)\n          (item : Parsetree.signature_item) =\n        match item.psig_desc with\n        | Psig_value value_description as r\n          when Loc.hasPos ~pos value_description.pval_loc\n               && ProcessAttributes.findDocAttribute\n                    value_description.pval_attributes\n                  = None ->\n          result := Some (r, item.psig_loc)\n        | Psig_type (_, hd :: _) as r\n          when Loc.hasPos ~pos hd.ptype_loc\n               && ProcessAttributes.findDocAttribute hd.ptype_attributes = None\n          ->\n          result := Some (r, item.psig_loc)\n        | Psig_module {pmd_name = {loc}} as r ->\n          if Loc.start loc = pos then result := Some (r, item.psig_loc)\n          else Ast_iterator.default_iterator.signature_item iterator item\n        | _ -> Ast_iterator.default_iterator.signature_item iterator item\n      in\n      {Ast_iterator.default_iterator with signature_item}\n\n    let processSigValue (valueDesc : Parsetree.value_description) loc =\n      let attr = createTemplate () in\n      let newValueBinding =\n        {valueDesc with pval_attributes = attr :: valueDesc.pval_attributes}\n      in\n      let signature_item_desc = Parsetree.Psig_value newValueBinding in\n      Ast_helper.Sig.mk ~loc signature_item_desc\n\n    let processTypeDecl (typ : Parsetree.type_declaration) =\n      let attr = createTemplate () in\n      let newTypeDeclaration =\n        {typ with ptype_attributes = attr :: typ.ptype_attributes}\n      in\n      newTypeDeclaration\n\n    let processModDecl (modDecl : Parsetree.module_declaration) loc =\n      let attr = createTemplate () in\n      let newModDecl =\n        {modDecl with pmd_attributes = attr :: modDecl.pmd_attributes}\n      in\n      Ast_helper.Sig.mk ~loc (Parsetree.Psig_module newModDecl)\n\n    let xform ~path ~pos ~codeActions ~signature ~printSignatureItem =\n      let result = ref None in\n      let iterator = mkIterator ~pos ~result in\n      iterator.signature iterator signature;\n      match !result with\n      | Some (signatureItem, loc) -> (\n        let newSignatureItem =\n          match signatureItem with\n          | Psig_value value_desc ->\n            Some (processSigValue value_desc value_desc.pval_loc) (* Some loc *)\n          | Psig_type (flag, hd :: tl) ->\n            let newFirstTypeDecl = processTypeDecl hd in\n            Some\n              (Ast_helper.Sig.mk ~loc\n                 (Parsetree.Psig_type (flag, newFirstTypeDecl :: tl)))\n          | Psig_module modDecl -> Some (processModDecl modDecl loc)\n          | _ -> None\n        in\n\n        match newSignatureItem with\n        | Some signatureItem ->\n          let range = Loc.rangeOfLoc signatureItem.psig_loc in\n          let newText = printSignatureItem ~range signatureItem in\n          let codeAction =\n            CodeActions.make ~title:\"Add Documentation template\"\n              ~kind:RefactorRewrite ~uri:path ~newText ~range\n          in\n          codeActions := codeAction :: !codeActions\n        | None -> ())\n      | None -> ()\n  end\n\n  module Implementation = struct\n    let mkIterator ~pos ~result =\n      let structure_item (iterator : Ast_iterator.iterator)\n          (si : Parsetree.structure_item) =\n        match si.pstr_desc with\n        | Pstr_value (_, {pvb_pat = {ppat_loc}; pvb_attributes} :: _) as r\n          when Loc.hasPos ~pos ppat_loc\n               && ProcessAttributes.findDocAttribute pvb_attributes = None ->\n          result := Some (r, si.pstr_loc)\n        | Pstr_primitive value_description as r\n          when Loc.hasPos ~pos value_description.pval_loc\n               && ProcessAttributes.findDocAttribute\n                    value_description.pval_attributes\n                  = None ->\n          result := Some (r, si.pstr_loc)\n        | Pstr_module {pmb_name = {loc}} as r ->\n          if Loc.start loc = pos then result := Some (r, si.pstr_loc)\n          else Ast_iterator.default_iterator.structure_item iterator si\n        | Pstr_type (_, hd :: _) as r\n          when Loc.hasPos ~pos hd.ptype_loc\n               && ProcessAttributes.findDocAttribute hd.ptype_attributes = None\n          ->\n          result := Some (r, si.pstr_loc)\n        | _ -> Ast_iterator.default_iterator.structure_item iterator si\n      in\n      {Ast_iterator.default_iterator with structure_item}\n\n    let processValueBinding (valueBinding : Parsetree.value_binding) =\n      let attr = createTemplate () in\n      let newValueBinding =\n        {valueBinding with pvb_attributes = attr :: valueBinding.pvb_attributes}\n      in\n      newValueBinding\n\n    let processPrimitive (valueDesc : Parsetree.value_description) loc =\n      let attr = createTemplate () in\n      let newValueDesc =\n        {valueDesc with pval_attributes = attr :: valueDesc.pval_attributes}\n      in\n      Ast_helper.Str.primitive ~loc newValueDesc\n\n    let processModuleBinding (modBind : Parsetree.module_binding) loc =\n      let attr = createTemplate () in\n      let newModBinding =\n        {modBind with pmb_attributes = attr :: modBind.pmb_attributes}\n      in\n      Ast_helper.Str.module_ ~loc newModBinding\n\n    let xform ~pos ~codeActions ~path ~printStructureItem ~structure =\n      let result = ref None in\n      let iterator = mkIterator ~pos ~result in\n      iterator.structure iterator structure;\n      match !result with\n      | None -> ()\n      | Some (structureItem, loc) -> (\n        let newStructureItem =\n          match structureItem with\n          | Pstr_value (flag, hd :: tl) ->\n            let newValueBinding = processValueBinding hd in\n            Some\n              (Ast_helper.Str.mk ~loc\n                 (Parsetree.Pstr_value (flag, newValueBinding :: tl)))\n          | Pstr_primitive valueDesc -> Some (processPrimitive valueDesc loc)\n          | Pstr_module modBind -> Some (processModuleBinding modBind loc)\n          | Pstr_type (flag, hd :: tl) ->\n            let newFirstTypeDecl = Interface.processTypeDecl hd in\n            Some\n              (Ast_helper.Str.mk ~loc\n                 (Parsetree.Pstr_type (flag, newFirstTypeDecl :: tl)))\n          | _ -> None\n        in\n\n        match newStructureItem with\n        | Some structureItem ->\n          let range = Loc.rangeOfLoc structureItem.pstr_loc in\n          let newText = printStructureItem ~range structureItem in\n          let codeAction =\n            CodeActions.make ~title:\"Add Documentation template\"\n              ~kind:RefactorRewrite ~uri:path ~newText ~range\n          in\n          codeActions := codeAction :: !codeActions\n        | None -> ())\n  end\nend\n\nlet parseImplementation ~filename =\n  let {Res_driver.parsetree = structure; comments} =\n    Res_driver.parsing_engine.parse_implementation ~for_printer:false ~filename\n  in\n  let filterComments ~loc comments =\n    (* Relevant comments in the range of the expression *)\n    let filter comment =\n      Loc.hasPos ~pos:(Loc.start (Res_comment.loc comment)) loc\n    in\n    comments |> List.filter filter\n  in\n  let printExpr ~(range : Protocol.range) (expr : Parsetree.expression) =\n    let structure = [Ast_helper.Str.eval ~loc:expr.pexp_loc expr] in\n    structure\n    |> Res_printer.print_implementation ~width:!Res_cli.ResClflags.width\n         ~comments:(comments |> filterComments ~loc:expr.pexp_loc)\n    |> Utils.indent range.start.character\n  in\n  let printStructureItem ~(range : Protocol.range)\n      (item : Parsetree.structure_item) =\n    let structure = [item] in\n    structure\n    |> Res_printer.print_implementation ~width:!Res_cli.ResClflags.width\n         ~comments:(comments |> filterComments ~loc:item.pstr_loc)\n    |> Utils.indent range.start.character\n  in\n  let printStandaloneStructure ~(loc : Location.t) structure =\n    structure\n    |> Res_printer.print_implementation ~width:!Res_cli.ResClflags.width\n         ~comments:(comments |> filterComments ~loc)\n  in\n  (structure, printExpr, printStructureItem, printStandaloneStructure)\n\nlet parseInterface ~filename =\n  let {Res_driver.parsetree = structure; comments} =\n    Res_driver.parsing_engine.parse_interface ~for_printer:false ~filename\n  in\n  let filterComments ~loc comments =\n    (* Relevant comments in the range of the expression *)\n    let filter comment =\n      Loc.hasPos ~pos:(Loc.start (Res_comment.loc comment)) loc\n    in\n    comments |> List.filter filter\n  in\n  let printSignatureItem ~(range : Protocol.range)\n      (item : Parsetree.signature_item) =\n    let signature_item = [item] in\n    signature_item\n    |> Res_printer.print_interface ~width:!Res_cli.ResClflags.width\n         ~comments:(comments |> filterComments ~loc:item.psig_loc)\n    |> Utils.indent range.start.character\n  in\n  (structure, printSignatureItem)\n\nlet extractCodeActions ~path ~startPos ~endPos ~currentFile ~debug =\n  let pos = startPos in\n  let codeActions = ref [] in\n  match Files.classifySourceFile currentFile with\n  | Res ->\n    let structure, printExpr, printStructureItem, printStandaloneStructure =\n      parseImplementation ~filename:currentFile\n    in\n    IfThenElse.xform ~pos ~codeActions ~printExpr ~path structure;\n    ModuleToFile.xform ~pos ~codeActions ~path ~printStandaloneStructure\n      structure;\n    AddBracesToFn.xform ~pos ~codeActions ~path ~printStructureItem structure;\n    AddDocTemplate.Implementation.xform ~pos ~codeActions ~path\n      ~printStructureItem ~structure;\n\n    (* This Code Action needs type info *)\n    let () =\n      match Cmt.loadFullCmtFromPath ~path with\n      | Some full ->\n        AddTypeAnnotation.xform ~path ~pos ~full ~structure ~codeActions ~debug;\n        ExpandCatchAllForVariants.xform ~path ~pos ~full ~structure ~codeActions\n          ~currentFile ~debug;\n        ExhaustiveSwitch.xform ~printExpr ~path\n          ~pos:\n            (if startPos = endPos then Single startPos\n             else Range (startPos, endPos))\n          ~full ~structure ~codeActions ~debug ~currentFile\n      | None -> ()\n    in\n\n    !codeActions\n  | Resi ->\n    let signature, printSignatureItem = parseInterface ~filename:currentFile in\n    AddDocTemplate.Interface.xform ~pos ~codeActions ~path ~signature\n      ~printSignatureItem;\n    !codeActions\n  | Other -> []\n"
  },
  {
    "path": "analysis/src/dune",
    "content": "(library\n (name analysis)\n (flags\n  (-w \"+6+26+27+32+33+39\"))\n (libraries unix str ext ml jsonlib syntax reanalyze))\n"
  },
  {
    "path": "analysis/tests/Makefile",
    "content": "SHELL = /bin/bash\n\nnode_modules/.bin/rescript:\n\tnpm install\n\nbuild: node_modules/.bin/rescript\n\tnode_modules/.bin/rescript\n\ntest: build\n\t./test.sh\n\nclean:\n\trm -r node_modules lib\n\n.DEFAULT_GOAL := test\n\n.PHONY: clean test\n"
  },
  {
    "path": "analysis/tests/bsconfig.json",
    "content": "{\n  \"name\": \"test\",\n  \"reanalyze\": {\n    \"analysis\": [\"dce\"]\n  },\n  \"sources\": [\n    {\n      \"dir\": \"src\",\n      \"subdirs\": true\n    }\n  ],\n  \"bsc-flags\": [\"-w -33-44-8\"],\n  \"bs-dependencies\": [\"@rescript/react\"],\n  \"jsx\": { \"version\": 3 }\n}\n"
  },
  {
    "path": "analysis/tests/not_compiled/Diagnostics.res",
    "content": "let = 1 + 1.0\nlet add = =2\nlett a = 2\n\n//^dia"
  },
  {
    "path": "analysis/tests/not_compiled/DocTemplate.res",
    "content": "type a = {a: int}\n//\t^xfm\n\ntype rec t = A | B\n// ^xfm\nand e = C\n@unboxed type name = Name(string)\n//             ^xfm\nlet a = 1\n//  ^xfm\nlet inc = x => x + 1\n//  ^xfm\nmodule T = {\n  //   ^xfm\n  let b = 1\n  //  ^xfm\n}\n@module(\"path\")\nexternal dirname: string => string = \"dirname\"\n//^xfm\n"
  },
  {
    "path": "analysis/tests/not_compiled/DocTemplate.resi",
    "content": "type a = {a: int}\n//\t^xfm\n\ntype rec t = A | B\n// ^xfm\nand e = C\n@unboxed type name = Name(string)\n//             ^xfm\nlet a: int\n//  ^xfm\nlet inc: int => int\n//  ^xfm\nmodule T: {\n  //   ^xfm\n  let b: int\n  //  ^xfm\n}\n@module(\"path\")\nexternal dirname: string => string = \"dirname\"\n//^xfm\n"
  },
  {
    "path": "analysis/tests/not_compiled/expected/Diagnostics.res.txt",
    "content": "[{\n  \"range\": {\"start\": {\"line\": 2, \"character\": 4}, \"end\": {\"line\": 2, \"character\": 6}},\n  \"message\": \"consecutive statements on a line must be separated by ';' or a newline\",\n  \"severity\": 1,\n  \"source\": \"ReScript\"\n}, {\n  \"range\": {\"start\": {\"line\": 1, \"character\": 9}, \"end\": {\"line\": 1, \"character\": 11}},\n  \"message\": \"This let-binding misses an expression\",\n  \"severity\": 1,\n  \"source\": \"ReScript\"\n}, {\n  \"range\": {\"start\": {\"line\": 0, \"character\": 4}, \"end\": {\"line\": 0, \"character\": 5}},\n  \"message\": \"I was expecting a name for this let-binding. Example: `let message = \\\"hello\\\"`\",\n  \"severity\": 1,\n  \"source\": \"ReScript\"\n}]\n\n"
  },
  {
    "path": "analysis/tests/not_compiled/expected/DocTemplate.res.txt",
    "content": "Xform not_compiled/DocTemplate.res 3:3\ncan't find module DocTemplate\nHit: Add Documentation template\n\nTextDocumentEdit: DocTemplate.res\n{\"start\": {\"line\": 3, \"character\": 0}, \"end\": {\"line\": 5, \"character\": 9}}\nnewText:\n<--here\n/**\n\n*/\ntype rec t = A | B\n// ^xfm\nand e = C\n\nXform not_compiled/DocTemplate.res 6:15\ncan't find module DocTemplate\nHit: Add Documentation template\n\nTextDocumentEdit: DocTemplate.res\n{\"start\": {\"line\": 6, \"character\": 0}, \"end\": {\"line\": 6, \"character\": 33}}\nnewText:\n<--here\n/**\n\n*/\n@unboxed\ntype name = Name(string)\n\nXform not_compiled/DocTemplate.res 8:4\ncan't find module DocTemplate\nHit: Add Documentation template\n\nTextDocumentEdit: DocTemplate.res\n{\"start\": {\"line\": 8, \"character\": 0}, \"end\": {\"line\": 8, \"character\": 9}}\nnewText:\n<--here\n/**\n\n*/\nlet a = 1\n\nXform not_compiled/DocTemplate.res 10:4\ncan't find module DocTemplate\nHit: Add Documentation template\n\nTextDocumentEdit: DocTemplate.res\n{\"start\": {\"line\": 10, \"character\": 0}, \"end\": {\"line\": 10, \"character\": 20}}\nnewText:\n<--here\n/**\n\n*/\nlet inc = x => x + 1\n\nXform not_compiled/DocTemplate.res 12:7\ncan't find module DocTemplate\nHit: Add Documentation template\n\nTextDocumentEdit: DocTemplate.res\n{\"start\": {\"line\": 12, \"character\": 0}, \"end\": {\"line\": 16, \"character\": 1}}\nnewText:\n<--here\n/**\n\n*/\nmodule T = {\n  //   ^xfm\n  let b = 1\n  //  ^xfm\n}\nHit: Extract local module \"T\" to file \"T.res\"\n\nCreateFile: T.res\n\nTextDocumentEdit: T.res\n{\"start\": {\"line\": 0, \"character\": 0}, \"end\": {\"line\": 0, \"character\": 0}}\nnewText:\n<--here\n//   ^xfm\nlet b = 1\n//  ^xfm\n\n\nTextDocumentEdit: not_compiled/DocTemplate.res\n{\"start\": {\"line\": 12, \"character\": 0}, \"end\": {\"line\": 16, \"character\": 1}}\nnewText:\n<--here\n\n\nXform not_compiled/DocTemplate.res 14:6\ncan't find module DocTemplate\nHit: Add Documentation template\n\nTextDocumentEdit: DocTemplate.res\n{\"start\": {\"line\": 14, \"character\": 2}, \"end\": {\"line\": 14, \"character\": 11}}\nnewText:\n  <--here\n  /**\n  \n  */\n  let b = 1\nHit: Extract local module \"T\" to file \"T.res\"\n\nCreateFile: T.res\n\nTextDocumentEdit: T.res\n{\"start\": {\"line\": 0, \"character\": 0}, \"end\": {\"line\": 0, \"character\": 0}}\nnewText:\n<--here\n//   ^xfm\nlet b = 1\n//  ^xfm\n\n\nTextDocumentEdit: not_compiled/DocTemplate.res\n{\"start\": {\"line\": 12, \"character\": 0}, \"end\": {\"line\": 16, \"character\": 1}}\nnewText:\n<--here\n\n\nXform not_compiled/DocTemplate.res 18:2\ncan't find module DocTemplate\nHit: Add Documentation template\n\nTextDocumentEdit: DocTemplate.res\n{\"start\": {\"line\": 17, \"character\": 0}, \"end\": {\"line\": 18, \"character\": 46}}\nnewText:\n<--here\n/**\n\n*/\n@module(\"path\")\nexternal dirname: string => string = \"dirname\"\n\n"
  },
  {
    "path": "analysis/tests/not_compiled/expected/DocTemplate.resi.txt",
    "content": "Xform not_compiled/DocTemplate.resi 3:3\nHit: Add Documentation template\n\nTextDocumentEdit: DocTemplate.resi\n{\"start\": {\"line\": 3, \"character\": 0}, \"end\": {\"line\": 5, \"character\": 9}}\nnewText:\n<--here\n/**\n\n*/\ntype rec t = A | B\n// ^xfm\nand e = C\n\nXform not_compiled/DocTemplate.resi 6:15\nHit: Add Documentation template\n\nTextDocumentEdit: DocTemplate.resi\n{\"start\": {\"line\": 6, \"character\": 0}, \"end\": {\"line\": 6, \"character\": 33}}\nnewText:\n<--here\n/**\n\n*/\n@unboxed\ntype name = Name(string)\n\nXform not_compiled/DocTemplate.resi 8:4\nHit: Add Documentation template\n\nTextDocumentEdit: DocTemplate.resi\n{\"start\": {\"line\": 8, \"character\": 0}, \"end\": {\"line\": 8, \"character\": 10}}\nnewText:\n<--here\n/**\n\n*/\nlet a: int\n\nXform not_compiled/DocTemplate.resi 10:4\nHit: Add Documentation template\n\nTextDocumentEdit: DocTemplate.resi\n{\"start\": {\"line\": 10, \"character\": 0}, \"end\": {\"line\": 10, \"character\": 19}}\nnewText:\n<--here\n/**\n\n*/\nlet inc: int => int\n\nXform not_compiled/DocTemplate.resi 12:7\nHit: Add Documentation template\n\nTextDocumentEdit: DocTemplate.resi\n{\"start\": {\"line\": 12, \"character\": 0}, \"end\": {\"line\": 16, \"character\": 1}}\nnewText:\n<--here\n/**\n\n*/\nmodule T: {\n  //   ^xfm\n  let b: int\n  //  ^xfm\n}\n\nXform not_compiled/DocTemplate.resi 14:6\nHit: Add Documentation template\n\nTextDocumentEdit: DocTemplate.resi\n{\"start\": {\"line\": 14, \"character\": 2}, \"end\": {\"line\": 14, \"character\": 12}}\nnewText:\n  <--here\n  /**\n  \n  */\n  let b: int\n\nXform not_compiled/DocTemplate.resi 18:2\nHit: Add Documentation template\n\nTextDocumentEdit: DocTemplate.resi\n{\"start\": {\"line\": 17, \"character\": 0}, \"end\": {\"line\": 18, \"character\": 46}}\nnewText:\n<--here\n/**\n\n*/\n@module(\"path\")\nexternal dirname: string => string = \"dirname\"\n\n"
  },
  {
    "path": "analysis/tests/package.json",
    "content": "{\n  \"scripts\": {\n    \"build\": \"rescript\",\n    \"clean\": \"rescript clean -with-deps\"\n  },\n  \"private\": true,\n  \"devDependencies\": {\n    \"@rescript/react\": \"0.12.0\"\n  },\n  \"dependencies\": {\n    \"rescript\": \"11.1.4\"\n  }\n}\n"
  },
  {
    "path": "analysis/tests/src/Auto.res",
    "content": "open! ShadowedBelt\n\nlet m = List.map\n//           ^hov\n"
  },
  {
    "path": "analysis/tests/src/BrokenParserCases.res",
    "content": "// --- BROKEN PARSER CASES ---\n// This below demonstrates an issue when what you're completing is the _last_ labelled argument, and there's a unit application after it. The parser wrongly merges the unit argument as the expression of the labelled argument assignment, where is should really let the trailing unit argument be, and set a %rescript.exprhole as the expression of the assignment, just like it normally does.\n// let _ = someFn(~isOff=, ())\n//                      ^com\n\n// This should parse as a single item tuple when in a pattern?\n// switch s { | (t) }\n//               ^com\n\n// Here the parser eats the arrow and considers the None in the expression part of the pattern.\n// let _ = switch x { | None |  => None }\n//                           ^com\n\n"
  },
  {
    "path": "analysis/tests/src/CodeLens.res",
    "content": "let add = (x, y) => x + y\n\nlet foo = (~age, ~name) => name ++ string_of_int(age)\n\nlet ff = (~opt1=0, ~a, ~b, (), ~opt2=0, (), ~c) => a + b + c + opt1 + opt2\n\nlet compFF = Completion.ff\n\n@react.component\nlet make = (~name) => React.string(name)\n//^cle\n"
  },
  {
    "path": "analysis/tests/src/Codemod.res",
    "content": "type someTyp = [#valid | #invalid]\n\nlet ff = (v1: someTyp, v2: someTyp) => {\n  let x = switch (v1, v2) {\n  //      ^c-a (#valid, #valid) | (#invalid, _)\n  | (#valid, #invalid) => ()\n  }\n  x\n}\n"
  },
  {
    "path": "analysis/tests/src/CompletableComponent.res",
    "content": "type status = On | Off\n\n@@jsxConfig({version: 4, mode: \"automatic\"})\n\n@react.component\nlet make = (~status: status, ~name: string) => {\n  ignore(status)\n  ignore(name)\n  React.null\n}\n"
  },
  {
    "path": "analysis/tests/src/CompletePrioritize1.res",
    "content": "module Test = {\n  type t = {name: int}\n  let add = (a: float) => a +. 1.0\n  let name = t => t.name\n}\nlet a: Test.t = {name: 4}\n// a->\n//    ^com\n"
  },
  {
    "path": "analysis/tests/src/CompletePrioritize2.res",
    "content": "let ax = 4\nlet _ = ax\nlet ax = \"\"\nlet _ = ax\nmodule Test = {\n  type t = {name: int}\n  let add = (ax: t) => ax.name + 1\n}\nlet ax: Test.t = {name: 4}\n// ax->\n//     ^com\n\n// ax\n//   ^com\n"
  },
  {
    "path": "analysis/tests/src/Completion.res",
    "content": "module MyList = Belt.List\n// MyList.m\n//         ^com\n// Array.\n//       ^com\n// Array.m\n//        ^com\n\nmodule Dep: {\n  @ocaml.doc(\"Some doc comment\") @deprecated(\"Use customDouble instead\")\n  let customDouble: int => int\n} = {\n  let customDouble = foo => foo * 2\n}\n\n// let cc = Dep.c\n//               ^com\n\nmodule Lib = {\n  let foo = (~age, ~name) => name ++ string_of_int(age)\n  let next = (~number=0, ~year) => number + year\n}\n\n// let x = Lib.foo(~\n//                  ^com\n\n// [1,2,3]->m\n//           ^com\n\n// \"abc\"->toU\n//           ^com\n\nlet op = Some(3)\n\n// op->e\n//      ^com\n\nmodule ForAuto = {\n  type t = int\n  let abc = (x: t, _y: int) => x\n  let abd = (x: t, _y: int) => x\n}\n\nlet fa: ForAuto.t = 34\n// fa->\n//     ^com\n\n// \"hello\"->Js.Dict.u\n//                   ^com\n\nmodule O = {\n  module Comp = {\n    @react.component\n    let make = (~first=\"\", ~zoo=3, ~second) => React.string(first ++ second ++ string_of_int(zoo))\n  }\n}\n\nlet zzz = 11\n\n// let comp = <O.Comp second=z\n//                            ^com\n\n// let comp = <O.Comp z\n//                     ^com\n\n// @reac\n//      ^com\n\n// @react.\n//        ^com\n\n// let x = Lib.foo(~name, ~\n//                         ^com\n\n// let x = Lib.foo(~age, ~\n//                        ^com\n\n// let x = Lib.foo(~age={3+4}, ~\n//                              ^com\n\nlet _ = Lib.foo(\n  //~age,\n  //~\n  // ^com\n  ~age=3,\n  ~name=\"\",\n)\n\nlet someObj = {\"name\": \"a\", \"age\": 32}\n\n// someObj[\"a\n//           ^com\n\nlet nestedObj = {\"x\": {\"y\": {\"name\": \"a\", \"age\": 32}}}\n\n// nestedObj[\"x\"][\"y\"][\"\n//                      ^com\n\nlet o: Objects.objT = assert false\n// o[\"a\n//     ^com\n\ntype nestedObjT = {\"x\": Objects.nestedObjT}\nlet no: nestedObjT = assert false\n// no[\"x\"][\"y\"][\"\n//               ^com\n\ntype r = {x: int, y: string}\ntype rAlias = r\nlet r: rAlias = assert false\n// r.\n//   ^com\n\n// Objects.Rec.recordVal.\n//                       ^com\n\nlet myAmazingFunction = (x, y) => x + y\n\n@react.component\nlet make = () => {\n  // my\n  //   ^com\n  <> </>\n}\n\n// Objects.object[\"\n//                 ^com\n\nlet foo = {\n  let x = {\n    3\n  }\n  let y = 4\n  let add = (a, b) =>\n    switch a {\n    | 3 => a + b\n    | _ => 42\n    }\n  let z = assert false\n  let _ = z\n  module Inner = {\n    type z = int\n    let v = 44\n  }\n  exception MyException(int, string, float, array<Js.Json.t>)\n  let _ = raise(MyException(2, \"\", 1.0, []))\n  add((x: Inner.z), Inner.v + y)\n}\n\nexception MyOtherException\n\n// <O.\n//    ^com\n\ntype aa = {x: int, name: string}\ntype bb = {aa: aa, w: int}\nlet q: bb = assert false\n// q.aa.\n//      ^com\n// q.aa.n\n//       ^com\n\n// Lis\n//    ^com\n\nmodule WithChildren = {\n  @react.component\n  let make = (~children, ~name as _: string) => <jsx> children </jsx>\n}\n// <WithChildren\n//              ^com\n\n// type t = Js.n\n//              ^com\n// type t = ForAuto.\n//                  ^com\n\ntype z = Allo | Asterix | Baba\n\n// let q = As\n//           ^com\n\n// module M = For\n//               ^com\n\nmodule Private = {\n  %%private(let awr = 3)\n  let b = awr\n}\n\n// Private.\n//         ^com\n\nmodule Shadow = {\n  module A = {\n    let shadowed = 3\n  }\n  module B = {\n    let shadowed = \"\"\n  }\n}\n\n// sha\n//    ^com\nopen Shadow.A\n// sha\n//    ^com\nopen Shadow.B\n// sha\n//    ^com\nlet _ = shadowed\n\nmodule FAR = {\n  type forAutoRecord = {forAuto: ForAuto.t, something: option<int>}\n  let forAutoRecord: forAutoRecord = assert false\n}\n\nmodule FAO = {\n  let forAutoObject = {\"forAutoLabel\": FAR.forAutoRecord, \"age\": 32}\n}\n\n// FAO.forAutoObject[\"\n//                    ^com\n\n// FAO.forAutoObject[\"forAutoLabel\"].\n//                                   ^com\n\n// FAO.forAutoObject[\"forAutoLabel\"].forAuto->\n//                                            ^com\n\n// FAO.forAutoObject[\"forAutoLabel\"].forAuto->ForAuto.a\n//                                                     ^com\n\nlet name = \"abc\"\n// let template = `My name is ${na}`\n//                                ^com\n\nlet notHere = \"      \"\n//               ^com\n\nlet someR = Some(r)\nlet _ = switch someR {\n| Some(_z) => 1\n// + _z.\n//      ^com\n| _ => 3\n}\n\nmodule SomeLocalModule = {\n  let aa = 10\n  let bb = 20\n  type zz = int\n}\n\n// let _ = SomeLo\n//               ^com\n// type zz = SomeLocalModule.\n//                           ^com\n\ntype record = {\n  someProp: string,\n  //  otherProp: SomeLocalModule.\n  //                             ^com\n  thirdProp: string,\n}\n\ntype someLocalVariant = SomeLocalVariantItem\n\n// type t = SomeLocal\n//                   ^com\n\n// let _ : SomeLocal\n//                  ^com\n\nlet _foo = _world => {\n  // let _ = _w\n  //           ^com\n  3\n}\n\ntype someType = {hello: string}\n// type t = SomeType(s)\n//                    ^com\n\ntype funRecord = {\n  someFun: (~name: string) => unit,\n  stuff: string,\n}\n\nlet funRecord: funRecord = assert false\n\n// let _ = funRecord.someFun(~ )\n//                            ^com\n\nlet retAA = () => {x: 3, name: \"\"}\n\n// retAA().\n//         ^com\n\nlet ff = (~opt1=0, ~a, ~b, (), ~opt2=0, (), ~c) => a + b + c + opt1 + opt2\n\n// ff(~c=1)(~\n//           ^com\n\n// ff(~c=1)()(~\n//             ^com\n\n// ff(~c=1, ())(~\n//               ^com\n\n// ff(~c=1, (), ())(~\n//                   ^com\n\n// ff(~c=1, (), ~b=1)(~\n//                     ^com\n\n// ff(~opt2=1)(~\n//              ^com\n\ntype callback = (~a: int) => int\n\nlet withCallback: (~b: int) => callback = (~b) => { (); (~a) => a + b }\n\n// withCallback(~\n//               ^com\n\n// withCallback(~a)(~\n//                   ^com\n\n// withCallback(~b)(~\n//                   ^com\n\nlet _ =\n  <div\n    onClick={_ => {\n      ()\n      //        let _: Res\n      //                  ^com\n    }}\n    name=\"abc\">\n    {React.string(name)}\n  </div>\n\n//let _ = switch Some(3) { | Some(thisIsNotSaved) -> this\n//                                                       ^com\n\nlet _ = <div name=\"\" />\n//            ^hov\n\n// let _ = FAO.forAutoObject[\"age\"]\n//               ^hov\n\n// let _ = ff(~opt1=3)\n//               ^hov\n\n// (let _ = ff(~opt1=3))\n//                     ^com\n\ntype v = This | That\n\nlet _ = x =>\n  switch x {\n  // | T\n  //    ^com\n  | _ => 4\n  }\n\nmodule AndThatOther = {\n  type v = And | ThatOther\n}\n\nlet _ = x =>\n  switch x {\n  // | AndThatOther.T\n  //                 ^com\n  | _ => 4\n  }\n\n// let _  = ` ${ForAuto.}`\n//                      ^com\n\n// let _  = `abc ${FAO.forAutoObject[\"\"}`\n//                                    ^com\n\n// let _ = `${funRecord.}`\n//                      ^com\n\nlet _ = _ => {\n  open Js\n  //  []->ma\n  //        ^com\n  ()\n}\n\nlet red = \"#ff0000\"\n\nlet header1 = `\n    color: ${red}; `\n//            ^com\n\nlet header2 = `\n    color: ${red};\n    background-color: ${red}; `\n//                       ^com\n\n// let _ = `color: ${r\n//                    ^com\n\nlet onClick = evt => {\n  // SomeLocalModule.\n  //                 ^com\n  evt->ReactEvent.Synthetic.preventDefault\n  // SomeLocalModule.\n  //                 ^com\n  Js.log(\"Hello\")\n}\n\n// let _ = 123->t\n//               ^com\n\n// let _ = 123.0->t\n//                 ^com\n\nlet ok = Ok(true)\n\n// ok->g\n//      ^com\n\ntype someRecordWithDeprecatedField = {\n  name: string,\n  @deprecated\n  someInt: int,\n  @deprecated(\"Use 'someInt'.\")\n  someFloat: float,\n}\n\nlet rWithDepr: someRecordWithDeprecatedField = {\n  name: \"hej\",\n  someInt: 12,\n  someFloat: 12.,\n}\n\n// Should show deprecated status\n// rWithDepr.so\n//             ^com\n\ntype someVariantWithDeprecated =\n  | @deprecated DoNotUseMe | UseMeInstead | @deprecated(\"Use 'UseMeInstead'\") AndNotMe\n\n// Should show deprecated status\n// let v: someVariantWithDeprecated =\n//                                   ^com\n\nlet uncurried = (. num) => num + 2\n\n// let _ = uncurried(. 1)->toS\n//                            ^com\n\ntype withUncurried = {\n  fn: (. int) => unit\n}\n\n// let f: withUncurried = {fn: }\n//                            ^com\n\n// let someRecord = { FAR. }\n//                        ^com"
  },
  {
    "path": "analysis/tests/src/CompletionAttributes.res",
    "content": "// @modu\n//      ^com\n\n// @module(\"\") external doStuff: t = \"test\"\n//          ^com\n\n// @@js\n//     ^com\n\n// @@jsxConfig({})\n//              ^com\n\n// @@jsxConfig({m})\n//               ^com\n\n// @@jsxConfig({module_: })\n//                       ^com\n\n// @@jsxConfig({module_: \"\", })\n//                           ^com\n\n// @module({}) external doStuff: t = \"default\"\n//          ^com\n\n// @module({with: }) external doStuff: t = \"default\"\n//               ^com\n\n// @module({with: {}}) external doStuff: t = \"default\"\n//                 ^com\n\n// @module({from: \"\" }) external doStuff: t = \"default\"\n//                 ^com\n\n// @module({from: }) external doStuff: t = \"default\"\n//               ^com\n\n// let dd = %t\n//            ^com\n\n"
  },
  {
    "path": "analysis/tests/src/CompletionDicts.res",
    "content": "// let dict = Js.Dict.fromArray([])\n//                               ^com\n\n// let dict = Js.Dict.fromArray([()])\n//                                ^com\n\n// let dict = Js.Dict.fromArray([(\"key\", )])\n//                                      ^com\n\n// ^in+\nlet dict = Js.Dict.fromArray([\n  (\"key\", true),\n  //  (\"key2\", )\n  //          ^com\n])\n// ^in-\n"
  },
  {
    "path": "analysis/tests/src/CompletionExpressions.res",
    "content": "let s = true\nlet f = Some([false])\n\n// switch (s, f) { | }\n//                  ^com\n\ntype otherRecord = {\n  someField: int,\n  otherField: string,\n}\n\ntype rec someRecord = {\n  age: int,\n  offline: bool,\n  online: option<bool>,\n  variant: someVariant,\n  polyvariant: somePolyVariant,\n  nested: option<otherRecord>,\n}\nand someVariant = One | Two | Three(int, string)\nand somePolyVariant = [#one | #two(bool) | #three(someRecord, bool)]\n\nlet fnTakingRecord = (r: someRecord) => {\n  ignore(r)\n}\n\n// let _ = fnTakingRecord({})\n//                         ^com\n\n// let _ = fnTakingRecord({n})\n//                          ^com\n\n// let _ = fnTakingRecord({offline: })\n//                                 ^com\n\n// let _ = fnTakingRecord({age: 123, })\n//                                  ^com\n\n// let _ = fnTakingRecord({age: 123,  offline: true})\n//                                   ^com\n\n// let _ = fnTakingRecord({age: 123, nested: })\n//                                          ^com\n\n// let _ = fnTakingRecord({age: 123, nested: {}})\n//                                            ^com\n\n// let _ = fnTakingRecord({age: 123, nested: Some({})})\n//                                                 ^com\n\n// let _ = fnTakingRecord({age: 123, variant: })\n//                                           ^com\n\n// let _ = fnTakingRecord({age: 123, variant: O })\n//                                             ^com\n\n// let _ = fnTakingRecord({age: 123, polyvariant: #three() })\n//                                                       ^com\n\n// let _ = fnTakingRecord({age: 123, polyvariant: #three({}, ) })\n//                                                          ^com\n\n// let _ = fnTakingRecord({age: 123, polyvariant: #three({}, t) })\n//                                                            ^com\n\nlet fnTakingArray = (arr: array<option<bool>>) => {\n  ignore(arr)\n}\n\n// let _ = fnTakingArray()\n//                       ^com\n\n// let _ = fnTakingArray([])\n//                        ^com\n\n// let _ = fnTakingArray(s)\n//                        ^com\n\n// let _ = fnTakingArray([Some()])\n//                             ^com\n\n// let _ = fnTakingArray([None, ])\n//                             ^com\n\n// let _ = fnTakingArray([None, , None])\n//                             ^com\n\nlet someBoolVar = true\n\n// let _ = fnTakingRecord({offline: so })\n//                                    ^com\n\nlet fnTakingOtherRecord = (r: otherRecord) => {\n  ignore(r)\n}\n\n// let _ = fnTakingOtherRecord({otherField: })\n//                                         ^com\n\ntype recordWithOptionalField = {\n  someField: int,\n  someOptField?: bool,\n}\n\nlet fnTakingRecordWithOptionalField = (r: recordWithOptionalField) => {\n  ignore(r)\n}\n\n// let _ = fnTakingRecordWithOptionalField({someOptField: })\n//                                                       ^com\ntype recordWithOptVariant = {someVariant: option<someVariant>}\n\nlet fnTakingRecordWithOptVariant = (r: recordWithOptVariant) => {\n  ignore(r)\n}\n\n// let _ = fnTakingRecordWithOptVariant({someVariant: })\n//                                                   ^com\n\ntype variantWithInlineRecord =\n  WithInlineRecord({someBoolField: bool, otherField: option<bool>, nestedRecord: otherRecord})\n\nlet fnTakingInlineRecord = (r: variantWithInlineRecord) => {\n  ignore(r)\n}\n\n// let _ = fnTakingInlineRecord(WithInlineRecord())\n//                                               ^com\n\n// let _ = fnTakingInlineRecord(WithInlineRecord({}))\n//                                                ^com\n\n// let _ = fnTakingInlineRecord(WithInlineRecord({s}))\n//                                                 ^com\n\n// let _ = fnTakingInlineRecord(WithInlineRecord({nestedRecord: }))\n//                                                             ^com\n\n// let _ = fnTakingInlineRecord(WithInlineRecord({nestedRecord: {} }))\n//                                                               ^com\n\ntype variant = First | Second(bool)\n\nlet fnTakingCallback = (\n  cb: unit => unit,\n  cb2: bool => unit,\n  cb3: ReactEvent.Mouse.t => unit,\n  cb4: (~on: bool, ~off: bool=?, variant) => int,\n  cb5: (bool, option<bool>, bool) => unit,\n  cb6: (~on: bool=?, ~off: bool=?, unit) => int,\n) => {\n  let _ = cb\n  let _ = cb2\n  let _ = cb3\n  let _ = cb4\n  let _ = cb5\n  let _ = cb6\n}\n\n// fnTakingCallback()\n//                  ^com\n\n// fnTakingCallback(a)\n//                   ^com\n\n// fnTakingCallback(a, )\n//                    ^com\n\n// fnTakingCallback(a, b, )\n//                       ^com\n\n// fnTakingCallback(a, b, c, )\n//                           ^com\n\n// fnTakingCallback(a, b, c, d, )\n//                              ^com\n\n// fnTakingCallback(a, b, c, d, e, )\n//                                ^com\n\nlet something = {\n  let second = true\n  let second2 = 1\n  ignore(second)\n  ignore(second2)\n  Js.log(s)\n  //      ^com\n}\n\nlet fff: recordWithOptionalField = {\n  someField: 123,\n  someOptField: true,\n}\n\nignore(fff)\n\n// fff.someOpt\n//            ^com\n\ntype someTyp = {test: bool}\n\nlet takesCb = cb => {\n  cb({test: true})\n}\n\n// takesCb()\n//         ^com\n\nmodule Environment = {\n  type t = {hello: bool}\n}\n\nlet takesCb2 = cb => {\n  cb({Environment.hello: true})\n}\n\n// takesCb2()\n//          ^com\n\ntype apiCallResult = {hi: bool}\n\nlet takesCb3 = cb => {\n  cb({hi: true})\n}\n\n// takesCb3()\n//          ^com\n\nlet takesCb4 = cb => {\n  cb(Some({hi: true}))\n}\n\n// takesCb4()\n//          ^com\n\nlet takesCb5 = cb => {\n  cb([Some({hi: true})])\n}\n\n// takesCb5()\n//          ^com\n\nmodule RecordSourceSelectorProxy = {\n  type t\n}\n\n@val\nexternal commitLocalUpdate: (~updater: RecordSourceSelectorProxy.t => unit) => unit =\n  \"commitLocalUpdate\"\n\n// commitLocalUpdate(~updater=)\n//                            ^com\n\nlet fnTakingAsyncCallback = (cb: unit => promise<unit>) => {\n  let _ = cb\n}\n\n// fnTakingAsyncCallback()\n//                       ^com\n\nlet arr = [\"hello\"]\n\n// arr->Belt.Array.map()\n//                     ^com\n\ntype exoticPolyvariant = [#\"some exotic\"]\n\nlet takesExotic = (e: exoticPolyvariant) => {\n  ignore(e)\n}\n\n// takesExotic()\n//             ^com\n\nlet fnTakingPolyVariant = (a: somePolyVariant) => {\n  ignore(a)\n}\n\n// fnTakingPolyVariant()\n//                     ^com\n\n// fnTakingPolyVariant(#)\n//                      ^com\n\n// fnTakingPolyVariant(#o)\n//                       ^com\n\n// fnTakingPolyVariant(o)\n//                      ^com\n\nmodule SuperInt: {\n  type t\n  let increment: (t, int) => t\n  let decrement: (t, int => int) => t\n  let make: int => t\n  let toInt: t => int\n} = {\n  type t = int\n  let increment = (t, num) => t + num\n  let decrement = (t, decrementer) => decrementer(t)\n  let make = t => t\n  let toInt = t => t\n}\n\ntype withIntLocal = {superInt: SuperInt.t}\n\n// let withInt: withIntLocal = {superInt: }\n//                                       ^com\n\n// CompletionSupport.makeTestHidden()\n//                                  ^com\n\nopen CompletionSupport\n// CompletionSupport.makeTestHidden()\n//                                  ^com\n\nlet mkStuff = (r: Js.Re.t) => {\n  ignore(r)\n  \"hello\"\n}\n\n// mkStuff()\n//         ^com\n\nmodule Money: {\n  type t\n\n  let zero: t\n\n  let nonTType: string\n\n  let make: unit => t\n\n  let fromInt: int => t\n\n  let plus: (t, t) => t\n} = {\n  type t = string\n\n  let zero: t = \"0\"\n\n  let nonTType = \"0\"\n\n  let make = (): t => zero\n\n  let fromInt = (int): t => int->Js.Int.toString\n\n  let plus = (m1, _) => m1\n}\n\nlet tArgCompletionTestFn = (_tVal: Money.t) => ()\n\n// tArgCompletionTestFn()\n//                      ^com\n\nlet labeledTArgCompletionTestFn = (~tVal as _: Money.t) => ()\n\n// labeledTArgCompletionTestFn(~tVal=)\n//                                   ^com\n\nlet someTyp: someTyp = {test: true}\n\n// switch someTyp. { | _ => () }\n//                ^com\n\ntype config = {\n  includeName: bool,\n  operator?: [#\"and\" | #or],\n  showMore: bool,\n}\n\ntype hookReturn = {name: string}\n\nlet hook = (config: config) => {\n  ignore(config)\n  {\n    name: \"tester\",\n  }\n}\n\nlet {name} = hook({\n  //                  ^com\n  // ope\n  //    ^com\n  includeName: true,\n  showMore: true,\n})\n\n// switch someTyp. { | }\n//                ^com\n"
  },
  {
    "path": "analysis/tests/src/CompletionFromModule.res",
    "content": "module SomeModule = {\n  type t = {name: string}\n\n  @get external getName: t => string = \"name\"\n\n  let thisShouldNotBeCompletedFor = () => \"hi\"\n}\n\nlet n = {SomeModule.name: \"hello\"}\n\n// n.\n//   ^com\n\n@editor.completeFrom(CompletionFromModule.SomeOtherModule)\ntype typeOutsideModule = {nname: string}\n\nmodule SomeOtherModule = {\n  type t = typeOutsideModule\n\n  type irrelevantType = string\n\n  @get external getNName: t => string = \"nname\"\n  @get external getNName2: typeOutsideModule => string = \"nname\"\n  @get external getNName3: irrelevantType => string = \"nname\"\n\n  let thisShouldNotBeCompletedFor = () => \"hi\"\n}\n\nlet nn: SomeOtherModule.t = {nname: \"hello\"}\n\n// nn.\n//    ^com\n\n// @editor.completeFrom(SomeOthe) type typeOutsideModule = {nname: string}\n//                              ^com\n\nlet nnn: typeOutsideModule = {nname: \"hello\"}\n\n// nnn->\n//      ^com\n\nopen SomeOtherModule\n// nnn->\n//      ^com\n"
  },
  {
    "path": "analysis/tests/src/CompletionFromModule2.res",
    "content": "// Used to check completions across files\n\n// CompletionFromModule.n.\n//                        ^com\n\n// CompletionFromModule.nn.\n//                         ^com\n\n// CompletionFromModule.nnn->\n//                           ^com\n\nopen CompletionFromModule.SomeOtherModule\n// CompletionFromModule.nnn->\n//                           ^com\n"
  },
  {
    "path": "analysis/tests/src/CompletionFunctionArguments.res",
    "content": "let someFn = (~isOn, ~isOff=false, ()) => {\n  if isOn && !isOff {\n    \"on\"\n  } else {\n    \"off\"\n  }\n}\n\nlet tLocalVar = false\n\n// let _ = someFn(~isOn=)\n//                      ^com\n\n// let _ = someFn(~isOn=t)\n//                       ^com\n\n// let _ = someFn(~isOff=)\n//                       ^com\n\nlet _ = @res.partial someFn(\n  ~isOn={\n    // switch someFn(~isOn=)\n    //                     ^com\n    true\n  },\n)\n\nlet someOtherFn = (includeName, age, includeAge) => {\n  \"Hello\" ++\n  (includeName ? \" Some Name\" : \"\") ++\n  \", you are age \" ++\n  Belt.Int.toString(includeAge ? age : 0)\n}\n\n// let _ = someOtherFn(f)\n//                      ^com\n\nmodule OIncludeMeInCompletions = {}\n\ntype someVariant = One | Two | Three(int, string)\n\nlet someFnTakingVariant = (\n  configOpt: option<someVariant>,\n  ~configOpt2=One,\n  ~config: someVariant,\n) => {\n  ignore(config)\n  ignore(configOpt)\n  ignore(configOpt2)\n}\n\n// let _ = someFnTakingVariant(~config=)\n//                                     ^com\n\n// let _ = someFnTakingVariant(~config=O)\n//                                      ^com\n\n// let _ = someFnTakingVariant(So)\n//                               ^com\n\n// let _ = someFnTakingVariant(~configOpt2=O)\n//                                          ^com\n\n// let _ = someOtherFn()\n//                     ^com\n\n// let _ = someOtherFn(1, 2, )\n//                          ^com\n\n// let _ = 1->someOtherFn(1, t)\n//                            ^com\n\nlet fnTakingTuple = (arg: (int, int, float)) => {\n  ignore(arg)\n}\n\n// let _ = fnTakingTuple()\n//                       ^com\n\ntype someRecord = {\n  age: int,\n  offline: bool,\n  online: option<bool>,\n}\n\nlet fnTakingRecord = (r: someRecord) => {\n  ignore(r)\n}\n\n// let _ = fnTakingRecord({})\n//                         ^com\n\nmodule FineModule = {\n  type t = {\n    online: bool,\n    somethingElse: string,\n  }\n\n  let setToFalse = (t: t) => {\n    ...t,\n    online: false,\n  }\n}\n\nlet _ =\n  <div\n    onMouseDown={thisGetsBrokenLoc => {\n      let reassignedWorks = thisGetsBrokenLoc\n      ignore(reassignedWorks)\n      // thisGetsBrokenLoc->a\n      //                     ^com\n      // reassignedWorks->a\n      //                   ^com\n    }}\n  />\n\nlet fineModuleVal = {\n  FineModule.online: true,\n  somethingElse: \"\",\n}\n\n// makeItem(~changefreq=Monthly, ~lastmod=fineModuleVal->, ~priority=Low)\n//                                                       ^com\n"
  },
  {
    "path": "analysis/tests/src/CompletionInferValues.res",
    "content": "let getBool = () => true\nlet getInt = () => 123\n\ntype someRecord = {name: string, age: int}\n\nlet someFnWithCallback = (cb: (~num: int, ~someRecord: someRecord, ~isOn: bool) => unit) => {\n  let _ = cb\n}\n\nlet reactEventFn = (cb: ReactEvent.Mouse.t => unit) => {\n  let _ = cb\n}\n\n@val external getSomeRecord: unit => someRecord = \"getSomeRecord\"\n\n// let x = 123; let aliased = x; aliased->t\n//                                         ^com\n\n// let x = getSomeRecord(); x.\n//                            ^com\n\n// let x = getSomeRecord(); let aliased = x; aliased.\n//                                                   ^com\n\n// someFnWithCallback((~someRecord, ~num, ~isOn) => someRecord.)\n//                                                             ^com\n\n// let aliasedFn = someFnWithCallback; aliasedFn((~num, ~someRecord, ~isOn) => someRecord.)\n//                                                                                        ^com\n\n// reactEventFn(event => { event->pr });\n//                                  ^com\n\nmodule Div = {\n  @react.component\n  let make = (~onMouseEnter: option<JsxEvent.Mouse.t => unit>=?) => {\n    let _ = onMouseEnter\n    React.null\n  }\n}\n\n// let _ = <div onMouseEnter={event => { event->pr }} />\n//                                                ^com\n\n// let _ = <Div onMouseEnter={event => { event->pr }} />\n//                                                ^com\n\n// let _ = <div onMouseEnter={event => { let btn = event->JsxEvent.Mouse.button; btn->t }} />\n//                                                                                     ^com\n\n// let _ = <div onMouseEnter={event => { let btn = event->JsxEvent.Mouse.button->Belt.Int.toString; btn->spl }} />\n//                                                                                                          ^com\n\n// let _ = <div onMouseEnter={event => { let btn = event->JsxEvent.Mouse.button->Belt.Int.toString->Js.String2.split(\"/\"); btn->ma }} />\n//                                                                                                                                ^com\n\ntype someVariant = One | Two | Three(int, string)\ntype somePolyVariant = [#one | #two | #three(int, string)]\ntype someNestedRecord = {someRecord: someRecord}\n\ntype someRecordWithNestedStuff = {\n  things: string,\n  someInt: int,\n  srecord: someRecord,\n  nested: someNestedRecord,\n  someStuff: bool,\n}\n\ntype otherNestedRecord = {\n  someRecord: someRecord,\n  someTuple: (someVariant, int, somePolyVariant),\n  optRecord: option<someRecord>,\n}\n\n// Destructure record\n// let x: someRecordWithNestedStuff = Obj.magic(); let {srecord} = x; srecord.\n//                                                                            ^com\n\n// Follow aliased\n// let x: someRecordWithNestedStuff = Obj.magic(); let {nested: aliased} = x; aliased.\n//                                                                                    ^com\n\n// Follow nested record\n// let x: someRecordWithNestedStuff = Obj.magic(); let {srecord, nested: {someRecord}} = x; someRecord.\n//                                                                                                     ^com\n\n// Destructure string\n// let x: someRecordWithNestedStuff = Obj.magic(); let {things} = x; things->slic\n//                                                                               ^com\n\n// Destructure int\n// let x: someRecordWithNestedStuff = Obj.magic(); let {someInt} = x; someInt->toS\n//                                                                                ^com\n\n// Follow tuples\n// let x: otherNestedRecord = Obj.magic(); let {someTuple} = x; let (_, someInt, _) = someTuple; someInt->toS\n//                                                                                                           ^com\n\n// Same as above, but follow in switch case\n// let x: otherNestedRecord; switch x { | {someTuple} => let (_, someInt, _) = someTuple; someInt->toS }\n//                                                                                                    ^com\n\n// Follow variant payloads\n// let x: otherNestedRecord; switch x { | {someTuple:(Three(_, str), _, _)} => str->slic }\n//                                                                                      ^com\n\n// Follow polyvariant payloads\n// let x: otherNestedRecord; switch x { | {someTuple:(_, _, #three(_, str))} => str->slic }\n//                                                                                       ^com\n\n// Follow options\n// let x: otherNestedRecord; switch x { | {optRecord:Some({name})} => name->slic }\n//                                                                              ^com\n\n// Infer top level return\n// let x = 123; switch x { | 123 => () | v => v->toSt }\n//                                                   ^com\n\nlet fnWithRecordCallback = (cb: someRecord => unit) => {\n  let _ = cb\n}\n\n// Complete pattern of function parameter\n// fnWithRecordCallback(({}) => {()})\n//                        ^com\n\nlet fn2 = (~cb: CompletionSupport.Nested.config => unit) => {\n  let _ = cb\n}\n\n// fn2(~cb=({root}) => {root-> })\n//                            ^com\n\ntype sameFileRecord = {root: CompletionSupport.Test.t, test: int}\n\nlet fn3 = (~cb: sameFileRecord => unit) => {\n  let _ = cb\n}\n\n// fn3(~cb=({root}) => {root-> })\n//                            ^com\n\n// Handles pipe chains as input for switch\n// let x = 123; switch x->Belt.Int.toString { | }\n//                                             ^com\n\n// Handles pipe chains as input for switch\n// let x = 123; switch x->Belt.Int.toString->Js.String2.split(\"/\") { | }\n//                                                                    ^com\n\n// Regular completion works\n// let renderer = CompletionSupport2.makeRenderer(~prepare=() => \"hello\",~render=({support}) => {support.},())\n//                                                                                                       ^com\n\n// But pipe completion gets the wrong completion path. Should be `ReactDOM.Client.Root.t`, but ends up being `CompletionSupport2.ReactDOM.Client.Root.t`.\n// let renderer = CompletionSupport2.makeRenderer(~prepare=() => \"hello\",~render=({support:{root}}) => {root->},())\n//                                                                                                            ^com\n\n// Handles reusing the same name already in scope for bindings\nlet res = 1\n// switch res { | res => res }\n//                         ^hov\n"
  },
  {
    "path": "analysis/tests/src/CompletionJsx.res",
    "content": "let someString = \"hello\"\nignore(someString)\n\n// someString->st\n//               ^com\n\nmodule SomeComponent = {\n  @react.component\n  let make = (~someProp) => {\n    let someInt = 12\n    let someArr = [React.null]\n    ignore(someInt)\n    ignore(someArr)\n    // someString->st\n    //               ^com\n    <div>\n      {React.string(someProp)}\n      <div> {React.null} </div>\n      // {someString->st}\n      //                ^com\n      // {\"Some string\"->st}\n      //                   ^com\n      // {\"Some string\"->Js.String2.trim->st}\n      //                                    ^com\n      // {someInt->}\n      //           ^com\n      // {12->}\n      //      ^com\n      // {someArr->a}\n      //            ^com\n      // <di\n      //    ^com\n    </div>\n  }\n}\n\nmodule CompWithoutJsxPpx = {\n  type props = {name: string}\n\n  let make = ({name}) => {\n    ignore(name)\n    React.null\n  }\n}\n\n// <CompWithoutJsxPpx n\n//                     ^com\n\n// <SomeComponent someProp=>\n//                         ^com\n\n// <h1 hidd\n//         ^com\n\nmodule IntrinsicElementLowercase = {\n  type props = {name?: string, age?: int}\n\n  @module(\"react\")\n  external make: (@as(\"mesh\") _, props) => Jsx.element = \"createElement\"\n}\n\n// <IntrinsicElementLowercase\n//                            ^com\n\nmodule MultiPropComp = {\n  type time = Now | Later\n  @react.component\n  let make = (~name, ~age, ~time: time) => {\n    ignore(time)\n    name ++ age\n  }\n}\n\n// <MultiPropComp name=\"Hello\" time= age=\"35\"\n//                                  ^com\n\n// <MultiPropComp name=\"Hello\" time= age\n//                                  ^com\n\n// <MultiPropComp name time= age\n//                          ^com\n\nmodule Info = {\n  @react.component\n  let make = (~_type: [#warning | #info]) => {\n    React.string((_type :> string))\n  }\n}\n\n// <Info _type={#warning} >\n//                        ^com\n\n// let _ = <p>{\"\".s}</p>\n//                 ^com\n"
  },
  {
    "path": "analysis/tests/src/CompletionJsxProps.res",
    "content": "// let _ = <CompletionSupport.TestComponent on=\n//                                             ^com\n\n// let _ = <CompletionSupport.TestComponent on=t\n//                                              ^com\n\n// let _ = <CompletionSupport.TestComponent test=T\n//                                                ^com\n\n// let _ = <CompletionSupport.TestComponent polyArg=\n//                                                  ^com\n\n// let _ = <CompletionSupport.TestComponent polyArg=#t\n//                                                    ^com\n\n// let _ = <div muted= />\n//                    ^com\n\n// let _ = <div onMouseEnter= />\n//                           ^com\n\n// Should wrap in {}\n// let _ = <CompletionSupport.TestComponent testArr=\n//                                                  ^com\n\n// Should not wrap in {}\n// let _ = <CompletionSupport.TestComponent testArr={[]}\n//                                                    ^com\n\nlet tsomeVar = #two\n\n// let _ = <CompletionSupport.TestComponent polyArg={}\n//                                                   ^com\n\n// let _ = <CompletionSupport.TestComponent on={t}\n//                                               ^com\n\n@@jsxConfig({version: 4, mode: \"automatic\"})\n\nmodule CompletableComponentLazy = {\n  let loadComponent = () => Js.import(CompletableComponent.make)\n  let make = React.lazy_(loadComponent)\n}\n\n// let _ = <CompletableComponentLazy status=\n//                                          ^com\n"
  },
  {
    "path": "analysis/tests/src/CompletionMultipleEditorCompleteFrom.res",
    "content": "@@warning(\"-26\")\n@@warning(\"-27\")\n@@warning(\"-110\")\n\nmodule A = {\n  @editor.completeFrom(B) @editor.completeFrom(C)\n  type a\n}\n\nmodule B = {\n  let b = (a: A.a) => 1\n}\n\nmodule C = {\n  open A\n  let c = (a: a) => {'c'}\n}\n\nlet a : A.a = %todo\n// a.\n//   ^com\n// B.b and C.c should be completed"
  },
  {
    "path": "analysis/tests/src/CompletionObjects.res",
    "content": "let x = Some(true)\n\nlet _ff = {\n  \"one\": switch x {\n  | Some(true) => \"hello\"\n  // |\n  //   ^com\n  | _ => \"\"\n  },\n}\n"
  },
  {
    "path": "analysis/tests/src/CompletionPattern.res",
    "content": "let v = (true, Some(false), (true, true))\n\nlet _ = switch v {\n| (true, _, _) => 1\n| _ => 2\n}\n\n// switch v {\n//           ^com\n\n// switch v { | }\n//             ^com\n\n// switch v { | (t, _) }\n//                ^com\n\n// switch v { | (_, _, (f, _)) }\n//                       ^com\n\nlet x = true\n\n// switch x { |\n//             ^com\n\n// switch x { | t\n//               ^com\n\ntype nestedRecord = {nested: bool}\n\ntype rec someRecord = {\n  first: int,\n  second: (bool, option<someRecord>),\n  optThird: option<[#first | #second(someRecord)]>,\n  nest: nestedRecord,\n}\n\nlet f: someRecord = {\n  first: 123,\n  second: (true, None),\n  optThird: None,\n  nest: {nested: true},\n}\n\nlet z = (f, true)\nignore(z)\n\n// switch f { | }\n//             ^com\n\n// switch f { | {}}\n//               ^com\n\n// switch f { | {first,  , second }}\n//                      ^com\n\n// switch f { | {fi}}\n//                 ^com\n\n// switch z { | ({o}, _)}\n//                 ^com\n\n// switch f { | {nest: }}\n//                    ^com\n\n// switch f { | {nest: {}}}\n//                      ^com\n\nlet _ = switch f {\n| {first: 123, nest} =>\n  ()\n  // switch nest { | {}}\n  //                  ^com\n  nest.nested\n| _ => false\n}\n\n// let {} = f\n//      ^com\n\n// let {nest: {n}}} = f\n//              ^com\n\ntype someVariant = One | Two(bool) | Three(someRecord, bool)\n\nlet z = Two(true)\nignore(z)\n\n// switch z { | Two()}\n//                  ^com\n\n// switch z { | Two(t)}\n//                   ^com\n\n// switch z { | Three({})}\n//                     ^com\n\n// switch z { | Three({}, t)}\n//                         ^com\n\ntype somePolyVariant = [#one | #two(bool) | #three(someRecord, bool)]\nlet b: somePolyVariant = #two(true)\nignore(b)\n\n// switch b { | #two()}\n//                   ^com\n\n// switch b { | #two(t)}\n//                    ^com\n\n// switch b { | #three({})}\n//                      ^com\n\n// switch b { | #three({}, t)}\n//                          ^com\n\nlet c: array<bool> = []\nignore(c)\n\n// switch c { | }\n//             ^com\n\n// switch c { | [] }\n//               ^com\n\nlet o = Some(true)\nignore(o)\n\n// switch o { | Some() }\n//                   ^com\n\ntype multiPayloadVariant = Test(int, bool, option<bool>, array<bool>)\n\nlet p = Test(1, true, Some(false), [])\n\n// switch p { | Test(1, )}\n//                     ^com\n\n// switch p { | Test(1, true, )}\n//                           ^com\n\n// switch p { | Test(1, , None)}\n//                     ^com\n\n// switch p { | Test(1, true, None, )}\n//                                 ^com\n\ntype multiPayloadPolyVariant = [#test(int, bool, option<bool>, array<bool>)]\n\nlet v: multiPayloadPolyVariant = #test(1, true, Some(false), [])\n\n// switch v { | #test(1, )}\n//                      ^com\n\n// switch v { | #test(1, true, )}\n//                            ^com\n\n// switch v { | #test(1, , None)}\n//                      ^com\n\n// switch v { | #test(1, true, None, )}\n//                                  ^com\n\nlet s = (true, Some(true), [false])\n\n// switch s { | () }\n//               ^com\n\n// switch s { | (true, ) }\n//                     ^com\n\n// switch s { | (true, , []) }\n//                    ^com\n\n// switch s { | (true, []) => () |  }\n//                                 ^com\n\n// switch s { | (true, []) => () | (true, , [])  }\n//                                       ^com\n\n// switch z { | One |  }\n//                   ^com\n\n// switch z { | One | Two(true | )  }\n//                              ^com\n\n// switch z { | One | Three({test: true}, true | )  }\n//                                              ^com\n\n// switch b { | #one | #two(true | )  }\n//                                ^com\n\n// switch b { | #one | #three({test: true}, true | )  }\n//                                                ^com\n\n// switch s { | (true, _, []) }\n//                      ^com\n\ntype recordWithFn = {someFn: unit => unit}\n\nlet ff: recordWithFn = {someFn: () => ()}\n\n// switch ff { | {someFn: }}\n//                       ^com\n\nlet xn: exn = Obj.magic()\n\n// switch xn { | }\n//              ^com\n\nlet getThing = async () => One\n\n// switch await getThing() { | }\n//                            ^com\n\nlet res: result<someVariant, somePolyVariant> = Ok(One)\n\n// switch res { | Ok() }\n//                   ^com\n\n// switch res { | Error() }\n//                      ^com\n\n@react.component\nlet make = (~thing: result<someVariant, unit>) => {\n  switch thing {\n  | Ok(Three(r, _)) =>\n    let _x = r\n  // switch r { | {first, }}\n  //                     ^com\n  | _ => ()\n  }\n}\n\ntype results = {\n  query: string,\n  nbHits: int,\n}\n\ntype hitsUse = {results: results, hits: array<string>}\n\nlet hitsUse = (): hitsUse => Obj.magic()\n\n// let {results: {query, nbHits}, } = hitsUse()\n//                               ^com\n"
  },
  {
    "path": "analysis/tests/src/CompletionPipeChain.res",
    "content": "module Integer: {\n  type t\n  let increment: (t, int) => t\n  let decrement: (t, int => int) => t\n  let make: int => t\n  let toInt: t => int\n} = {\n  type t = int\n  let increment = (t, num) => t + num\n  let decrement = (t, decrementer) => decrementer(t)\n  let make = t => t\n  let toInt = t => t\n}\n\nmodule SuperFloat: {\n  type t\n  let fromInteger: Integer.t => t\n  let toInteger: t => Integer.t\n} = {\n  type t = float\n  let fromInteger = t => t->Integer.toInt->Belt.Float.fromInt\n  let toInteger = t => t->Belt.Float.toInt->Integer.make\n}\n\nlet toFlt = i => i->SuperFloat.fromInteger\nlet int = Integer.make(1)\nlet f = int->Integer.increment(2)\n// let _ = int->\n//              ^com\n\n// let _ = int->toFlt->\n//                     ^com\n\n// let _ = int->Integer.increment(2)->\n//                                    ^com\n\n// let _ = Integer.increment(int, 2)->\n//                                    ^com\n\n// let _ = int->Integer.decrement(t => t - 1)->\n//                                             ^com\n\n// let _ = int->Integer.increment(2)->Integer.decrement(t => t - 1)->\n//                                                                   ^com\n\n// let _ = int->Integer.increment(2)->SuperFloat.fromInteger->\n//                                                            ^com\n\n// let _ = int->Integer.increment(2)->SuperFloat.fromInteger->t\n//                                                             ^com\n\n// let _ = int->Integer.increment(2)->Integer.toInt->CompletionSupport.Test.make->\n//                                                                                ^com\n\n// let _ = CompletionSupport.Test.make(1)->CompletionSupport.Test.addSelf(2)->\n//                                                                            ^com\n\nlet _ = [123]->Js.Array2.forEach(v => Js.log(v))\n// ->\n//   ^com\n\nlet _ = [123]->Belt.Array.reduce(0, (acc, curr) => acc + curr)\n// ->t\n//    ^com\n\ntype aliasedType = CompletionSupport.Test.t\n\nlet aliased: aliasedType = {name: 123}\nlet notAliased: CompletionSupport.Test.t = {name: 123}\n\n// aliased->\n//          ^com\n\n// notAliased->\n//             ^com\n\nlet renderer = CompletionSupport2.makeRenderer(\n  ~prepare=() => \"hello\",\n  ~render=props => {\n    ignore(props)\n\n    // Doesn't work when tried through this chain. Presumably because it now goes through multiple different files.\n    // props.support.root->ren\n    //                        ^com\n    let root = props.support.root\n    ignore(root)\n\n    // Works here though when it's lifted out. Probably because it only goes through one file...?\n    // root->ren\n    //          ^com\n    React.null\n  },\n  (),\n)\n\n// Console.log(int->)\n//                  ^com\n\n// Console.log(int->t)\n//                   ^com\n\nlet r = %re(\"/t/g\")\n\n// r->la\n//      ^com\n\nmodule Xyz = {\n  type xx = One\n  let do = (_: xx) => \"\"\n}\n\nlet xx = Xyz.One\n// xx->\n//     ^com\n"
  },
  {
    "path": "analysis/tests/src/CompletionPipeProperty.res",
    "content": "module ObservablePoint = {\n  type op = {\n    mutable x: int,\n    mutable y: int,\n  }\n\n  @send\n  external setBoth: (op, float) => unit = \"set\"\n\n  @send\n  external set: (op, float, float) => unit = \"set\"\n}\n\nmodule Sprite = {\n  type s = {\n    anchor: ObservablePoint.op,\n  }\n}\n\nlet sprite : Sprite.s = %todo\n\n// sprite.anchor.\n//               ^com"
  },
  {
    "path": "analysis/tests/src/CompletionPipeSubmodules.res",
    "content": "module A = {\n  module B1 = {\n    type b1 = B1\n    let xx = B1\n    let d = (_: b1) => \"\"\n  }\n  module B2 = {\n    let yy = 20\n  }\n  type t2 = {v: B1.b1}\n  let x = {v: B1.B1}\n}\n\n// let _ = A.B1.xx->\n//                  ^com\n// b1 seen from B1 is A.B1.b1\n\n// let _ = A.x.v->\n//                ^com\n// B1.b1 seen from A  is A.B1.b1\n\nmodule C = {\n  type t = C\n  let do = (_: t) => \"\"\n}\n\nmodule D = {\n  module C2 = {\n    type t2 = C2\n    let do = (_: t2) => \"\"\n  }\n\n  type d = {v: C.t, v2: C2.t2}\n  let d = {v: C.C, v2: C2.C2}\n}\n\nmodule E = {\n  type e = {v: D.d}\n  let e = {v: D.d}\n}\n\n// let _ = E.e.v.v->\n//                  ^com\n// C.t seen from D is C.t\n\n// let _ = E.e.v.v2->\n//                   ^com\n// C2.t2 seen from D is D.C2.t2\n"
  },
  {
    "path": "analysis/tests/src/CompletionResolve.res",
    "content": "// ^cre Belt_Array\n\n// ^cre ModuleStuff\n\n"
  },
  {
    "path": "analysis/tests/src/CompletionSupport.res",
    "content": "module Test = {\n  type t = {name: int}\n  let add = (ax: t) => ax.name + 1\n  let addSelf = (ax: t) => {name: ax.name + 1}\n  let make = (name: int): t => {name: name}\n}\n\nmodule TestHidden: {\n  type t\n  let make: int => t\n  let self: t => t\n} = {\n  type t = {name: int}\n  let make = (name: int): t => {name: name}\n  let self = t => t\n}\n\ntype testVariant = One | Two | Three(int)\n\nmodule TestComponent = {\n  @react.component\n  let make = (\n    ~on: bool,\n    ~test: testVariant,\n    ~testArr: array<testVariant>,\n    ~polyArg: option<[#one | #two | #two2 | #three(int, bool)]>=?,\n  ) => {\n    ignore(on)\n    ignore(test)\n    ignore(testArr)\n    ignore(polyArg)\n    React.null\n  }\n}\n\nmodule Nested = {\n  type config = {root: ReactDOM.Client.Root.t}\n}\n\ntype options = {test: TestHidden.t}\n\nlet makeTestHidden = t => TestHidden.self(t)\n"
  },
  {
    "path": "analysis/tests/src/CompletionSupport2.res",
    "content": "module Internal = {\n  type prepareProps<'prepared> = {\n    someName: string,\n    support: CompletionSupport.Nested.config,\n    prepared: 'prepared,\n  }\n}\n\nlet makeRenderer = (\n  ~prepare: unit => 'prepared,\n  ~render: Internal.prepareProps<'prepared> => React.element,\n  (),\n) => {\n  let _ = prepare\n  let _ = render\n  \"123\"\n}\n"
  },
  {
    "path": "analysis/tests/src/CompletionTypeAnnotation.res",
    "content": "type someRecord = {\n  age: int,\n  name: string,\n}\n\ntype someVariant = One | Two(bool)\n\ntype somePolyVariant = [#one | #two(bool)]\n\n// let x: someRecord =\n//                    ^com\n\n// let x: someRecord = {}\n//                      ^com\n\n// let x: someVariant =\n//                     ^com\n\n// let x: someVariant = O\n//                       ^com\n\n// let x: somePolyVariant =\n//                         ^com\n\n// let x: somePolyVariant = #o\n//                            ^com\n\ntype someFunc = (int, string) => bool\n\n// let x: someFunc =\n//                  ^com\n\ntype someTuple = (bool, option<bool>)\n\n// let x: someTuple =\n//                   ^com\n\n// let x: someTuple = (true, )\n//                          ^com\n\n// let x: option<someVariant> =\n//                             ^com\n\n// let x: option<someVariant> = Some()\n//                                   ^com\n\n// let x: array<someVariant> =\n//                            ^com\n\n// let x: array<someVariant> = []\n//                              ^com\n\n// let x: array<option<someVariant>> =\n//                                    ^com\n\n// let x: option<array<someVariant>> = Some([])\n//                                           ^com\n"
  },
  {
    "path": "analysis/tests/src/CompletionTypeT.res",
    "content": "let date = Some(Js.Date.make())\n\ntype withDate = {date: Js.Date.t}\n\n// let x = switch date { | }\n//                        ^com\n\n// let x: withDate = {date: }\n//                         ^com\n"
  },
  {
    "path": "analysis/tests/src/Component.res",
    "content": "@react.component\nlet make = () => React.null\n"
  },
  {
    "path": "analysis/tests/src/Component.resi",
    "content": "@react.component\nlet make: unit => React.element\n"
  },
  {
    "path": "analysis/tests/src/CreateInterface.res",
    "content": "// ^int\n\ntype r = {name: string, age: int}\n\nlet add = (~x, ~y) => x + y\n\n@react.component\nlet make = (~name) => React.string(name)\n\nmodule Other = {\n  @react.component\n  let otherComponentName = (~name) => React.string(name)\n}\n\nmodule Mod = {\n  @react.component\n  let make = (~name) => React.string(name)\n}\n\nmodule type ModTyp = {\n  @react.component\n  let make: (~name: string) => React.element\n}\n\n@module(\"path\") external dirname: string => string = \"dirname\"\n\n@module(\"path\") @variadic\nexternal join: array<string> => string = \"join\"\n\n@val\nexternal padLeft: (\n  string,\n  @unwrap\n  [\n    | #Str(string)\n    | #Int(int)\n  ],\n) => string = \"padLeft\"\n\n@inline\nlet f1 = 10\n\n@inline let f2 = \"some string\"\n\n@genType @inline\nlet f3 = 10\n\n@genType @inline\nlet f4 = \"some string\"\n\n@genType @inline let f5 = 5.5\n\nmodule RFS = {\n  @module(\"fs\")\n  external readFileSync: (\n    ~name: string,\n    @string\n    [\n      | #utf8\n      | @as(\"ascii\") #useAscii\n    ],\n  ) => string = \"readFileSync\"\n}\n\nmodule Functor = () => {\n  @react.component\n  let make = () => React.null\n}\n\nmodule type FT = {\n  module Functor: (\n    X: {\n      let a: int\n      @react.component\n      let make: (~name: string) => React.element\n      let b: int\n    },\n    Y: ModTyp,\n  ) =>\n  {\n    @react.component\n    let make: (~name: string) => React.element\n  }\n}\n\nmodule NormaList = List\nopen Belt\nmodule BeltList = List\n\nmodule type MT2 = ModTyp\n\nmodule rec RM: ModTyp = D\nand D: ModTyp = Mod\n\nmodule type OptT = {\n  @react.component\n  let withOpt1: (~x: int=?, ~y: int) => int\n\n  module type Opt2 = {\n    @react.component\n    let withOpt2: (~x: int=?, ~y: int) => int\n  }\n\n  module type Opt3 = {\n    @react.component\n    let withOpt3: (~x: option<int>, ~y: int) => int\n  }\n}\n\nmodule Opt = {\n  @react.component\n  let withOpt1 = (~x=3, ~y) => x + y\n\n  module Opt2 = {\n    @react.component\n    let withOpt2 = (~x: option<int>=?, ~y: int) =>\n      switch x {\n      | None => 0\n      | Some(x) => x\n      } +\n      y\n  }\n  module type Opt2 = module type of Opt2\n\n  module Opt3 = {\n    @react.component\n    let withOpt3 = (~x: option<int>, ~y: int) =>\n      switch x {\n      | None => 0\n      | Some(x) => x\n      } +\n      y\n  }\n  module type Opt3 = module type of Opt3\n}\n\nmodule Opt2: OptT = Opt\nmodule Opt3 = Opt\n\nmodule Memo = {\n  @react.component\n  let make = (~name) => React.string(name)\n\n  let make = React.memo(make)\n}\n"
  },
  {
    "path": "analysis/tests/src/Cross.res",
    "content": "let crossRef = References.x\n//               ^ref\n\nlet crossRef2 = References.x\n\nmodule Ref = References\n\nlet crossRef3 = References.x\n\nlet crossRefWithInterface = ReferencesWithInterface.x\n//                             ^ref\n\nlet crossRefWithInterface2 = ReferencesWithInterface.x\n\nmodule RefWithInterface = ReferencesWithInterface\n\nlet crossRefWithInterface3 = ReferencesWithInterface.x\n\nlet _ = RenameWithInterface.x\n//           ^ren RenameWithInterfacePrime\n\nlet _ = RenameWithInterface.x\n//                          ^ren xPrime\n\nlet typeDef = {TypeDefinition.item: \"foobar\"}\n//   ^typ\n\nlet _ = DefinitionWithInterface.y\n//                              ^def\n\ntype defT = DefinitionWithInterface.t\n//                                  ^def\n\ntype defT2 = DefinitionWithInterface.t\n//                                   ^typ\n\n// DefinitionWithInterface.a\n//                          ^com\n\nlet yy = DefinitionWithInterface.Inner.y\n//                                     ^def"
  },
  {
    "path": "analysis/tests/src/Dce.res",
    "content": "// Note: in test mode this only reports on src/dce\n\n// ^dce\n\n"
  },
  {
    "path": "analysis/tests/src/Debug.res",
    "content": "// turn on by adding this comment // ^db+\n\nlet _ = ShadowedBelt.List.map\n//                         ^def\n\nopen Js\nmodule Before = {\n  open Belt\n  let _ = Id.getCmpInternal\n}\nmodule Inner = {\n  // eqN\n  //    ^com\n  open List\n  let _ = map\n}\n// ^db-\n"
  },
  {
    "path": "analysis/tests/src/Definition.res",
    "content": "let xx = 10\n\nlet y = xx\n//      ^def\n\nmodule Inner = {\n  type tInner = int\n  let vInner = 34\n}\n\ntype typeInner = Inner.tInner\n//                     ^def\n\n// open Belt\nlet m1 = List.map\n//            ^hov\n\nopen ShadowedBelt\nlet m2 = List.map\n//            ^hov\n\nlet uncurried = (. x, y) => x + y\n\nuncurried(. 3, 12)->ignore\n// ^hov\n\nuncurried(. 3, 12)->ignore\n// ^def"
  },
  {
    "path": "analysis/tests/src/DefinitionWithInterface.res",
    "content": "let y = 4\n//  ^def\n\ntype t = int\n\nlet aabbcc = 3\nlet _ = aabbcc\n\nmodule Inner = {\n  let y = 100\n  //  ^def\n}\n"
  },
  {
    "path": "analysis/tests/src/DefinitionWithInterface.resi",
    "content": "let y: int\n//  ^def\n\ntype t\n\nmodule Inner: {\n  let y: int\n  //  ^def\n}\n"
  },
  {
    "path": "analysis/tests/src/Destructuring.res",
    "content": "type x = {name: string, age: int}\n\nlet x = {name: \"123\", age: 12}\n\nlet {name, } = x\n//         ^com\n\n// let {} = x\n//      ^com\n\nlet f = (x: x) => {\n  let {name, } = x\n  //         ^com\n  name\n}\n\nlet f2 = (x: x) => {\n  // let {} = x\n  //      ^com\n  ignore(x)\n}\n\ntype recordWithOptField = {\n  someField: int,\n  someOptField?: bool\n}\n\nlet x: recordWithOptField = {\n  someField: 123\n}\n\n// let {} = x\n//      ^com"
  },
  {
    "path": "analysis/tests/src/Div.res",
    "content": "let q = <div />\n//        ^hov\n\n// <div dangerous\n//               ^com\n"
  },
  {
    "path": "analysis/tests/src/DocComments.res",
    "content": "@ns.doc(\"  Doc comment with a triple-backquote example\n  \n  ```res example\n    let a = 10\n    /*\n     * stuff\n     */\n  ```\n\")\nlet docComment1 = 12\n//       ^hov\n\n/**\n  Doc comment with a triple-backquote example\n  \n  ```res example\n    let a = 10\n    /*\n     * stuff\n     */\n  ```\n*/\nlet docComment2 = 12\n//    ^hov\n\n\n@ns.doc(\"  Doc comment with a triple-backquote example\n  \n  ```res example\n    let a = 10\n    let b = 20\n  ```\n\")\nlet docCommentNoNested1 = 12\n//       ^hov\n\n/**\n  Doc comment with a triple-backquote example\n  \n  ```res example\n    let a = 10\n    let b = 20\n  ```\n*/\nlet docCommentNoNested2 = 12\n//    ^hov\n\n@res.doc(\"New doc comment format\")\nlet newDoc = 10\n//   ^hov"
  },
  {
    "path": "analysis/tests/src/DocumentSymbol.res",
    "content": "module MyList = Belt.List\n\nmodule Dep: {\n  @ocaml.doc(\"Some doc comment\") @deprecated(\"Use customDouble instead\")\n  let customDouble: int => int\n} = {\n  let customDouble = foo => foo * 2\n}\n\nmodule Lib = {\n  let foo = (~age, ~name) => name ++ string_of_int(age)\n  let next = (~number=0, ~year) => number + year\n}\n\nlet op = Some(3)\n\nmodule ForAuto = {\n  type t = int\n  let abc = (x: t, _y: int) => x\n  let abd = (x: t, _y: int) => x\n}\n\nlet fa: ForAuto.t = 34\n\nmodule O = {\n  module Comp = {\n    @react.component\n    let make = (~first=\"\", ~zoo=3, ~second) => React.string(first ++ second ++ string_of_int(zoo))\n  }\n}\n\nlet zzz = 11\n\n//^doc\n"
  },
  {
    "path": "analysis/tests/src/DotPipeCompletionSpec.res",
    "content": "//\nmodule SomeModule = {\n  type t = {name: string}\n\n  @get external getName: t => string = \"name\"\n  @send\n  external withUnlabelledArgumentNotFirst: (~name: string=?, t) => unit =\n    \"withUnlabelledArgumentNotFirst\"\n\n  let thisShouldNotBeCompletedFor = () => \"hi\"\n}\n\nlet n = {SomeModule.name: \"hello\"}\n\n// Type from inside of a module\n// n.\n//   ^com\n\n@editor.completeFrom(DotPipeCompletionSpec.SomeOtherModule)\ntype typeOutsideModule = {nname: string}\n\nlet doWithTypeOutsideModule = (_: typeOutsideModule) => \"\"\n\nmodule CompleteFromThisToo = {\n  external a: typeOutsideModule => string = \"a\"\n  external b: unit => typeOutsideModule = \"b\"\n}\n\nmodule SomeOtherModule = {\n  @editor.completeFrom(DotPipeCompletionSpec.CompleteFromThisToo)\n  type t = typeOutsideModule\n\n  type irrelevantType = string\n\n  @get external getNName: t => string = \"nname\"\n  @get external getNName2: typeOutsideModule => string = \"nname\"\n  @get external getNName3: irrelevantType => string = \"nname\"\n\n  let thisShouldNotBeCompletedFor = () => \"hi\"\n}\n\nlet nn: SomeOtherModule.t = {nname: \"hello\"}\n\n// Type from module but that's an alias\n// nn.\n//    ^com\n\nmodule A = {\n  @editor.completeFrom(B)\n  type a\n\n  external withA: a => unit = \"withA\"\n  external make: unit => a = \"makeA\"\n}\n\nmodule B = {\n  let b = (_a: A.a) => 1\n}\n\nexternal a: A.a = \"a\"\n\n// Main type in other module\n// a.\n//   ^com\n\nlet xx: CompletionFromModule.SomeModule.t = {name: \"hello\"}\n// Type from other file\n// xx.\n//    ^com\n\ntype builtinType = array<string>\n\nlet ffff: builtinType = []\n\n// A built in type\n// ffff.u\n//       ^com\n\n// Type outside of module with complete from pointing to other module\nlet nnn: typeOutsideModule = {nname: \"hello\"}\n// nnn.\n//     ^com\n\n// Continuous completion\nlet xxxx = [1, 2]\n\n// xxxx->Js.Array2.filter(v => v > 10).filt\n//                                         ^com\n\n// xxxx->Js.Array2.filter(v => v > 10)->Js.Array2.joinWith(\",\").includ\n//                                                                    ^com\n\nlet str = \"hello\"\n\n// str->Js.String2.toLowerCase.toUpperCa\n//                                      ^com\n\n// str->Js.String2.toLowerCase->Js.String2.toUpperCase.toLowerC\n//                                                             ^com\n\nlet cc = (t: typeOutsideModule) => {\n  // t.\n  //   ^com\n  t\n}\n\nlet outOfScope = (t: typeOutsideModule) => t\n\n// @editor.completeFrom(Dot) type t\n//                         ^com\n\n// @editor.completeFrom([CompletionPipe]) type t\n//                                     ^com\n\n// @editor.completeFrom([CompletionPipe, Dot]) type t\n//                                          ^com\n\nlet someObj = {\n  \"name\": \"hello\",\n  \"age\": 123,\n}\n\n// someObj.\n//         ^com\n\n// someObj.na\n//           ^com\n\nmodule DOMAPI = {\n  type htmlElement = {prefix: string}\n\n  @editor.completeFrom(DotPipeCompletionSpec.HTMLButtonElement)\n  type rec htmlButtonElement = {mutable disabled: bool}\n}\n\nmodule HTMLButtonElement = {\n  open DOMAPI\n\n  @send\n  external checkValidity: htmlButtonElement => bool = \"checkValidity\"\n}\n\nexternal button: DOMAPI.htmlButtonElement = \"button\"\n\n// button.\n//        ^com\n"
  },
  {
    "path": "analysis/tests/src/EnvCompletion.res",
    "content": "type things = One | Two\ntype things2 = Four | Five\n\nlet res: EnvCompletionOtherFile.someResult<things, string> = Okay(One)\n\nlet use = (): EnvCompletionOtherFile.response => {\n  stuff: First,\n  res: Failure(\"\"),\n}\n\n// switch res { | }\n//               ^com\n\n// switch res { | Okay() }\n//                     ^com\n\n// switch res { | Failure() }\n//                        ^com\n\n// switch use() { | }\n//                 ^com\n\n// switch use() { | {} }\n//                   ^com\n\n// switch use() { | {stuff: } }\n//                         ^com\n\n// switch use() { | {stuff: Second() } }\n//                                 ^com\n\n// switch use() { | {stuff: Second({}) } }\n//                                  ^com\n\n// switch use() { | {res: } }\n//                       ^com\n\n// switch use() { | {res: Okay() } }\n//                             ^com\n\n// switch use() { | {res: Okay(Second()) } }\n//                                    ^com\n\n// switch use() { | {res: Okay(Second({})) } }\n//                                     ^com\n\nlet res2: EnvCompletionOtherFile.someRecord<things2> = {\n  name: \"string\",\n  theThing: Four,\n  theVariant: First,\n}\n\n// switch res2 { | }\n//                ^com\n\n// switch res2 { | {} }\n//                  ^com\n\n// switch res2 { | {theThing: } }\n//                           ^com\n\n// switch res2 { | {theVariant: } }\n//                             ^com\n"
  },
  {
    "path": "analysis/tests/src/EnvCompletionOtherFile.res",
    "content": "type someResult<'a, 'b> = Okay('a) | Failure('b)\n\ntype r1 = {age: int}\n\ntype theVariant = First | Second(r1)\n\ntype someRecord<'thing> = {\n  name: string,\n  theThing: 'thing,\n  theVariant: theVariant,\n}\n\ntype response = {stuff: theVariant, res: someResult<theVariant, string>}\n"
  },
  {
    "path": "analysis/tests/src/ExhaustiveSwitch.res",
    "content": "type someVariant = One | Two | Three(option<bool>)\ntype somePolyVariant = [#one | #two | #three(option<bool>) | #\"exotic ident\" | #\"switch\"]\n\nlet withSomeVariant = One\nlet withSomePoly: somePolyVariant = #one\nlet someBool = true\nlet someOpt = Some(true)\n\n// switch withSomeVarian\n//                      ^com\n\n// switch withSomePol\n//                   ^com\n\n// switch someBoo\n//               ^com\n\n// switch someOp\n//              ^com\n\ntype rcrd = {someVariant: someVariant}\n\nlet getV = r => r.someVariant\n\nlet x: rcrd = {\n  someVariant: One,\n}\n\nlet vvv = Some(x->getV)\n\n// switch x->getV\n//           ^xfm\n\n// x->getV\n// ^xfm  ^\n\n// vvv\n//  ^xfm\n\n// ^ve+ 11.1\n// switch withSomeVarian\n//                      ^com\n// ^ve-\n"
  },
  {
    "path": "analysis/tests/src/Firebase.res",
    "content": "module Firebase = {\n  module Firestore = {\n    type firestore\n\n    type documentReference<'documentdata> = {\n      id: string,\n      path: string,\n    }\n\n    type documentSnapshot<'documentdata> = {\n      id: string,\n      ref: documentReference<'documentdata>,\n    }\n\n    @module(\"firebase/firestore\") @variadic\n    external doc: (firestore, string, array<string>) => documentReference<'documentdata> = \"doc\"\n\n    @module(\"firebase/firestore\")\n    external getDoc: documentReference<'documentdata> => Js.Promise.t<\n      documentSnapshot<'documentdata>,\n    > = \"getDoc\"\n  }\n}\n\nmodule Sample = {\n  open Firebase\n\n  external store: Firestore.firestore = \"store\"\n\n  let ref = store->Firestore.doc(\"some_id\", [])\n  // ref.\n  //     ^com\n}\n"
  },
  {
    "path": "analysis/tests/src/Fragment.res",
    "content": "module SectionHeader = {\n  @react.component\n  let make = (~children) => children\n}\n\n\nlet z1 = <> <SectionHeader> {React.string(\"abc\")} </SectionHeader> </>\n//                 ^hov\n\nlet z2 = <> <SectionHeader> {React.string(\"abc\")} </SectionHeader> </>\n//                                                      ^hov"
  },
  {
    "path": "analysis/tests/src/Highlight.res",
    "content": "module M = {\n  module C = Component\n}\n\nlet _c = <Component />\n\nlet _mc = <M.C />\n\nlet _d = <div />\n\nlet _d2 =\n  <div>\n    {React.string(\"abc\")}\n    <div> {React.string(\"abc\")} </div>\n    {React.string(\"abc\")}\n    {React.string(\"abc\")}\n  </div>\n\ntype pair<'x, 'y> = ('x, 'y)\n\ntype looooooooooooooooooooooooooooooooooooooong_int = int\n\ntype looooooooooooooooooooooooooooooooooooooong_string = string\n\ntype pairIntString = list<\n  pair<\n    looooooooooooooooooooooooooooooooooooooong_int,\n    looooooooooooooooooooooooooooooooooooooong_string,\n  >,\n>\n\nlet _ = !(3 < 4) || 3 > 4\n\nmodule type MT = {\n  module DDF: {\n\n  }\n}\n\nmodule DDF: MT = {\n  module DDF = {\n\n  }\n}\n\nmodule XX = {\n  module YY = {\n    type t = int\n  }\n}\n\nopen XX.YY\n\ntype tt = t\n\n// ^hig\n\nmodule T = {\n  type someRecord<'typeParameter> = {\n    someField: int,\n    someOtherField: string,\n    theParam: 'typeParameter,\n  }\n\n  type someEnum = A | B | C\n}\n\nlet foo = x => x.T.someField\n\nlet add = (~hello as x, ~world) => x + world\n\nlet _ = @res.partial add(~hello=3)\n\nlet _ = <div scale=\"abc\"> <div /> </div>\n\nmodule SomeComponent = {\n  module Nested = {\n    @react.component\n    let make = (~children) => {\n      <> {children} </>\n    }\n  }\n}\n\nlet _ = <SomeComponent.Nested> <div /> </SomeComponent.Nested>\n\n// true/false\nlet _ = true || false\n\n// to/downto as label\nlet toAs = (~to as x) => x\nlet _toEquals = toAs(~to=10)\n\nlet to = 1\nfor _ in to + to to to + to {\n  ()\n}\n\nmodule ToAsProp = {\n  @react.component\n  let make = (~to) => {\n    <> {React.int(to)} </>\n  }\n}\nlet _ = <ToAsProp to=3 />\n\n// quoted identifiers\nlet \\\"true\" = 4\nlet _ = \\\"true\"\n\nlet enumInModule = T.A\n\ntype typeInModule = XX.YY.t\n\nmodule QQ = {\n  type somePolyEnumType = [\n    | #someMember\n    | #AnotherMember\n    | #SomeMemberWithPayload(list<int>)\n    | #\"fourth Member\"\n  ]\n}\n\nlet _ = x =>\n  switch x {\n  | #stuff => 3\n  | #...QQ.somePolyEnumType => 4\n  }\n\nlet _ = 3 == 3 || 3 === 3\n\nlet _ = (~_type_ as _) => ()\n\nlet _ = {\"abc\": 34}\n\nlet _ = {\"Key\": 2}\n"
  },
  {
    "path": "analysis/tests/src/Hover.res",
    "content": "let abc = 22 + 34\n//  ^hov\n\ntype t = (int, float)\n//   ^hov\n\nmodule Id = {\n  //   ^hov\n  type x = int\n}\n\n@ocaml.doc(\"This module is commented\")\nmodule Dep: {\n  @ocaml.doc(\"Some doc comment\")\n  let customDouble: int => int\n} = {\n  let customDouble = foo => foo * 2\n}\n\nmodule D = Dep\n//         ^hov\n\nlet cd = D.customDouble\n//         ^hov\n\nmodule HoverInsideModuleWithComponent = {\n  let x = 2 // check that hover on x works\n  //  ^hov\n  @react.component\n  let make = () => React.null\n}\n\n@ocaml.doc(\"Doc comment for functionWithTypeAnnotation\")\nlet functionWithTypeAnnotation: unit => int = () => 1\n//  ^hov\n\n@react.component\nlet make = (~name) => React.string(name)\n//           ^hov\n\nmodule C2 = {\n  @react.component\n  let make2 = (~name: string) => React.string(name)\n  //           ^hov\n}\n\nlet num = 34\n//        ^hov\n\nmodule type Logger = {\n  //         ^hov\n  let log: string => unit\n}\n\nmodule JsLogger: Logger = {\n  //   ^hov\n  let log = (msg: string) => Js.log(msg)\n  let _oneMore = 3\n}\n\nmodule JJ = JsLogger\n//            ^def\n\nmodule IdDefinedTwice = {\n  //     ^hov\n  let _x = 10\n  let y = 20\n  let _x = 10\n}\n\nmodule A = {\n  let x = 13\n}\n\nmodule B = A\n//     ^hov\n\nmodule C = B\n//     ^hov\n\nmodule Comp = {\n  @react.component\n  let make = (~children: React.element) => children\n}\n\nmodule Comp1 = Comp\n\nlet _ =\n  <Comp>\n    <div />\n    <div />\n  </Comp>\n//        ^hov\n\nlet _ =\n  <Comp1>\n    <div />\n    <div />\n  </Comp1>\n//        ^hov\n\ntype r<'a> = {i: 'a, f: float}\n\nlet _get = r => r.f +. r.i\n//                       ^hov\n\nlet withAs = (~xx as yyy) => yyy + 1\n//                   ^hov\n\nmodule AA = {\n  type cond<'a> = [< #str(string)] as 'a\n  let fnnxx = (b: cond<_>) => true ? b : b\n}\n\nlet funAlias = AA.fnnxx\n\nlet typeOk = funAlias\n//              ^hov\n\nlet typeDuplicate = AA.fnnxx\n//                       ^hov\n\n@live let dd = 34\n// ^hov\n\nlet arity0a = (. ()) => {\n  //^hov\n  let f = () => 3\n  f\n}\n\nlet arity0b = (. (), . ()) => 3\n//  ^hov\n\nlet arity0c = (. (), ()) => 3\n//  ^hov\n\nlet arity0d = (. ()) => {\n  // ^hov\n  let f = () => 3\n  f\n}\n\n/**doc comment 1*/\nlet docComment1 = 12\n//       ^hov\n\n/** doc comment 2 */\nlet docComment2 = 12\n//    ^hov\n\nmodule ModWithDocComment = {\n  /*** module level doc comment 1 */\n\n  /** doc comment for x */\n  let x = 44\n\n  /*** module level doc comment 2 */\n}\n\nmodule TypeSubstitutionRecords = {\n  type foo<'a> = {content: 'a, zzz: string}\n  type bar = {age: int}\n  type foobar = foo<bar>\n\n  let x1: foo<bar> = {content: {age: 42}, zzz: \"\"}\n  //                   ^hov\n  let x2: foobar = {content: {age: 42}, zzz: \"\"}\n  //                  ^hov\n\n  // x1.content.\n  //            ^com\n\n  // x2.content.\n  //            ^com\n\n  type foo2<'b> = foo<'b>\n  type foobar2 = foo2<bar>\n\n  let y1: foo2<bar> = {content: {age: 42}, zzz: \"\"}\n  let y2: foobar2 = {content: {age: 42}, zzz: \"\"}\n\n  // y1.content.\n  //            ^com\n\n  // y2.content.\n  //            ^com\n}\n\nmodule CompV4 = {\n  type props<'n, 's> = {n?: 'n, s: 's}\n  let make = props => {\n    let _ = props.n == Some(10)\n    React.string(props.s)\n  }\n}\n\nlet mk = CompV4.make\n//  ^hov\n\ntype useR = {x: int, y: list<option<r<float>>>}\n\nlet testUseR = (v: useR) => v\n//              ^hov\n\nlet usr: useR = {\n  x: 123,\n  y: list{},\n}\n\n// let f = usr\n//           ^hov\n\n\nmodule NotShadowed = {\n  /** Stuff */\n  let xx_ = 10\n\n  /** More Stuff */\n  let xx = xx_\n}\n\nmodule Shadowed = {\n  /** Stuff */\n  let xx = 10\n\n  /** More Stuff */\n  let xx = xx\n}\n\nlet _ = NotShadowed.xx\n//                  ^hov\n\nlet _ = Shadowed.xx\n//               ^hov\n\ntype recordWithDocstringField = {\n  /** Mighty fine field here. */\n  someField: bool,\n}\n\nlet x: recordWithDocstringField = {\n  someField: true,\n}\n\n// x.someField\n//    ^hov\n\nlet someField = x.someField\n//                 ^hov\n\ntype variant = | /** Cool variant! */ CoolVariant | /** Other cool variant */ OtherCoolVariant\n\nlet coolVariant = CoolVariant\n//                  ^hov\n\n// Hover on unsaved\n// let fff = \"hello\"; fff\n//                     ^hov\n\n// switch x { | {someField} => someField }\n//                               ^hov\n\nmodule Arr = Belt.Array\n//      ^hov\n\ntype aliased = variant\n//    ^hov\n"
  },
  {
    "path": "analysis/tests/src/InlayHint.res",
    "content": "let not_include = \"Not Include\"\nlet string = \"ReScript\"\nlet number = 1\nlet float = 1.1\nlet char = 'c'\n\nlet add = (x, y) => x + y\n\nlet my_sum = 3->add(1)->add(1)->add(1)->add(8)\n\nlet withAs = (~xx as yyy) => yyy + 1\n\n\n@react.component\nlet make = (~name) => React.string(name)\n\nlet tuple = (\"ReScript\", \"lol\")\n\nlet (lang, _) = tuple\n\ntype foo = {\n  name: string,\n  age: int,\n}\n\nlet bar = () => ({name: \"ReScript\", age: 2}, tuple)\nlet ({name:_, age:_}, t) = bar()\n\nlet alice = {\n  name: \"Alice\",\n  age: 42,\n};\n\nlet {name, age} = alice;\n\n//^hin"
  },
  {
    "path": "analysis/tests/src/Jsx2.res",
    "content": "module M = {\n  @react.component\n  let make = (~first, ~fun=\"\", ~second=\"\") => React.string(first ++ fun ++ second)\n}\n\nlet _ = <M first=\"abc\" />\n//       ^def\n\n// <M second=fi\n//             ^com\n\n// <M second=\"abc\" f\n//                  ^com\n\n// let e = <M\n//           ^com\n\n@react.component\nlet make = (~first) => React.string(first)\n\nlet y = 44\n\n// <M prop={A(3)} k\n//                 ^com\n\n// <M prop=A(3) k\n//               ^com\n\n// <M prop=foo(1+2) k\n//                   ^com\n\n// <M prop=list{1,2,3} k\n//                      ^com\n\n// <M prop=<N /> k\n//                ^com\n\n// <M prop=1.5 k\n//              ^com\n\n// <M prop=0X33 k\n//               ^com\n\n// <M prop=12e+3 k\n//                ^com\n\n// <M prop='z' k\n//              ^com\n\n// <M prop=`before${foo}` k\n//                         ^com\n\n// <M prop=module(@foo Three: X_int) k\n//                                    ^com\n\n// <M prop=%bs.raw(\"1\") k\n//                       ^com\n\nlet _ = <Component />\n//         ^def\n\nmodule Ext = {\n  @react.component @module(\"@material-ui/core\")\n  external make: (~align: string=?) => React.element = \"Typography\"\n}\n\nlet _ = Ext.make\n\n// <Ext al\n//        ^com\n\n// <M first\n//         ^com\n\n// <M first=#a k\n//              ^com\n\n// <M first =  ?   #a k\n//                     ^com\n\n// <M>\n//    ^com\n\nmodule WithChildren = {\n  @react.component\n  let make = (~name as _: string, ~children) => <jsx> children </jsx>\n}\n\nlet _ = <WithChildren name=\"\"> <div /> </WithChildren>\n// <WithChildren\n//              ^com\n// <WithChildren n\n//                ^com\n\n// let c : React.e\n//                ^com\n// let c : ReactDOMR\n//                  ^com\n\nmodule DefineSomeFields = {\n  type r = {thisField: int, thatField: string}\n  let thisValue = 10\n  // let foo x = x.th\n  //                 ^com\n}\n\n// let q = DefineSomeFields.\n//                          ^com\n// let foo x = x.DefineSomeFields.th\n//                                  ^com\n\nlet _ = x => x.DefineSomeFields.thisField + DefineSomeFields.thisValue\n\nmodule Outer = {\n  module Inner = {\n    let hello = 3\n  }\n}\nlet _ = Outer.Inner.hello\n\nlet _ =\n  <div\n  // x=Outer.Inner.h\n  //                ^com\n    name=\"\"\n  />\n\nlet _ =\n  <div\n  // x=Outer.Inner.\n  //               ^com\n    name=\"\"\n  />\n\nlet _ =\n  <div\n  // x=\n  //   ^com\n    name=\"\"\n  />\n\nmodule Nested = {\n  module Comp = {\n    @react.component\n    let make = (~name) => React.string(name)\n  }\n}\n\nlet _ = <Nested.Comp name=\"\" />\n\n// let _ = <Nested.Co name=\"\" />\n//                   ^com\n\n// let _ = <Nested. name=\"\" />\n//                 ^com\n\nmodule Comp = {\n  @react.component\n  let make = (~age) => React.int(age)\n}\n\nlet _ = {\n  <> <Comp age=34 /> </>\n  //        ^hov\n}\n\nlet _ = {\n  <> {<> <Comp age=34 /> </>} </>\n  //            ^hov\n}\n\nmodule type ExtT = module type of Ext\n\nlet _ = module(Ext: ExtT)\n"
  },
  {
    "path": "analysis/tests/src/Jsx2.resi",
    "content": "@react.component\nlet make: (~first: string) => React.element\n//  ^hov\n\nlet y: int\n//  ^hov\n\n// type t = React.e\n//                 ^com\n\n// let x : React.e\n//                ^com\n"
  },
  {
    "path": "analysis/tests/src/JsxV4.res",
    "content": "@@jsxConfig({version: 4})\n\nmodule M4 = {\n  /** Doc Comment For M4 */\n  @react.component\n  let make = (~first, ~fun=\"\", ~second=\"\") => React.string(first ++ fun ++ second)\n}\n\nlet _ = <M4 first=\"abc\" />\n//       ^def\n\n// <M4 first=\"abc\" f\n//                  ^com\n\nlet _ = <M4 first=\"abc\" />\n//       ^hov\n\nmodule MM = {\n  @react.component\n  let make = () => React.null\n}\n\nmodule Other = {\n  @react.component\n  let make = (~name) => React.string(name)\n}\n\n// ^int\n"
  },
  {
    "path": "analysis/tests/src/LongIdentTest.res",
    "content": "module Map = TableclothMap\n\nlet zz = Map.add\n//           ^hov\n// Triggers the processing of `Of(M)._t` and Lident.Apply ends up in the AST\n// even though it's not expressible in ReScript syntax.\n// This simulates ReScript projects with OCaml dependencies containing ident apply.\n"
  },
  {
    "path": "analysis/tests/src/ModuleStuff.res",
    "content": "/*** This is a top level module doc. */\n\nmodule Nested = {\n  /*** Module doc for nested. */\n}\n"
  },
  {
    "path": "analysis/tests/src/Objects.res",
    "content": "type objT = {\"name\": string, \"age\": int}\n\ntype nestedObjT = {\"y\": objT}\n\nmodule Rec = {\n  type recordt = {xx: int, ss: string}\n\n  let recordVal: recordt = assert false\n}\n\nlet object: objT = {\"name\": \"abc\", \"age\": 4}\n"
  },
  {
    "path": "analysis/tests/src/Patterns.res",
    "content": "module A = {\n  let makeX = () => (1, 2)\n\n  let (xxx, yyy) = makeX()\n\n  type t = {name: string, age: int}\n\n  let makeT = () => {name: \"\", age: 0}\n\n  let {name, age} = makeT()\n\n  let (a | a, b) = makeX()\n\n  type rec arr = A(array<arr>)\n\n  let A([v1, _, _]) | _ as v1 = assert false\n\n}\n\nlet y = A.xxx\n//        ^def\n\nlet z = A.yyy\n\nlet n1 = A.name\n//         ^def\n\nlet n2 = A.a\n//         ^def\n\nlet n3 = A.v1\n//         ^def\n"
  },
  {
    "path": "analysis/tests/src/PolyRec.res",
    "content": "let rec sum = x =>\n  switch x {\n  | #Leaf => 0\n  | #Node(value, left, right) => value + left->sum + right->sum\n  }\n\nlet myTree = #Node(\n  1,\n  #Node(2, #Node(4, #Leaf, #Leaf), #Node(6, #Leaf, #Leaf)),\n  #Node(3, #Node(5, #Leaf, #Leaf), #Node(7, #Leaf, #Leaf)),\n)\n\nlet () = myTree->sum->Js.log\n//        ^hov\n"
  },
  {
    "path": "analysis/tests/src/QueryFile.res",
    "content": "module Types = {\n  type byAddress = SchemaAssets.input_ByAddress\n  type location = SchemaAssets.input_Location\n\n  type variables = {location: location}\n}\n"
  },
  {
    "path": "analysis/tests/src/RecModules.res",
    "content": "module rec A: {\n  type t\n\n  @send external child: t => B.t = \"child\"\n} = A\n\nand B: {\n  type t\n\n  @send external parent: t => A.t = \"parent\"\n} = B\n\nmodule C = {\n  type t\n\n  @send external createA: t => A.t = \"createA\"\n}\n\nmodule MC = C\n//          ^hov\nmodule MA = A\n//          ^hov\n"
  },
  {
    "path": "analysis/tests/src/RecordCompletion.res",
    "content": "type t = {n: array<string>}\n\nlet t = {n: []}\n\ntype t2 = {n2: t}\n\nlet t2 = {n2: t}\n\n// t.n->m\n//       ^com\n\n// t2.n2.n->m\n//           ^com\n\nmodule R = {\n  type t = {name: string}\n}\n\nlet n = {R.name: \"\"}\n// n.R.\n//     ^com\n\n// n.R. xx\n//     ^com\n"
  },
  {
    "path": "analysis/tests/src/RecoveryOnProp.res",
    "content": "let name = \"\"\n\nlet _ =\n  <div\n    onClick={_ => {\n      ()\n      //        let _: Res\n      //                  ^com\n    }}\n    name=\"abc\">\n    {React.string(name)}\n  </div>\n"
  },
  {
    "path": "analysis/tests/src/References.res",
    "content": "let x = 12\n//  ^ref\n\nlet a = x\n\nlet b = a\n\nlet c = x\n\nlet foo = (~xx) => xx + 1\n//                 ^ref\n\nmodule M: {\n  let aa: int\n} = {\n  let aa = 10\n}\n\nlet bb = M.aa\nlet cc = bb\nlet dd = M.aa\n//          ^ref\n\nlet _ = <ComponentInner/>\n//             ^ref"
  },
  {
    "path": "analysis/tests/src/ReferencesWithInterface.res",
    "content": "let x = 2\n//  ^ref\n"
  },
  {
    "path": "analysis/tests/src/ReferencesWithInterface.resi",
    "content": "let x: int\n//  ^ref\n"
  },
  {
    "path": "analysis/tests/src/Rename.res",
    "content": "let x = 12\n//  ^ren y\n\nlet a = x\n\nlet b = a\n\nlet c = x\n\nlet foo = (~xx) => xx + 1\n//                 ^ren yy\n"
  },
  {
    "path": "analysis/tests/src/RenameWithInterface.res",
    "content": "let x = 2\n//  ^ren y\n"
  },
  {
    "path": "analysis/tests/src/RenameWithInterface.resi",
    "content": "let x: int\n//  ^ren y\n"
  },
  {
    "path": "analysis/tests/src/Reprod.res",
    "content": "module Query = {\n  let use = (~variables: QueryFile.Types.variables) => {\n    ignore(variables)\n    \"\"\n  }\n}\n\n// let x = Query.use(~variables={location: ByAddress()})\n//                                                   ^com\n\ntype nestedRecord = {nested: bool}\n\ntype rec someRecord = {\n  first: int,\n  second: (bool, option<someRecord>),\n  optThird: option<[#first | #second(someRecord)]>,\n  nest: nestedRecord,\n}\n\ntype somePolyVariant = [#one | #two(bool) | #three(someRecord, bool)]\n\ntype someVariant = One | Two(bool) | Three(someRecord, bool)\n\ntype paramRecord<'a, 'b> = {\n  first: 'a,\n  second: 'b,\n}\n\nlet record: paramRecord<someVariant, QueryFile.Types.byAddress> = {\n  first: One,\n  second: {city: \"city\"},\n}\n\n// switch record { | {first: }}\n//                          ^com\n\n// switch record { | {second: }}\n//                           ^com\n\n// TODO: Functions, aliases/definitions, records, variants, polyvariants, tuples\n\nlet res: result<someVariant, somePolyVariant> = Ok(One)\n\n// switch res { | Ok() }\n//                   ^com\n\n// switch res { | Error() }\n//                      ^com\n\nlet resOpt: result<option<someVariant>, unit> = Ok(None)\n\n// switch resOpt { | Ok() }\n//                      ^com\n\n// switch resOpt { | Ok(Some()) }\n//                           ^com\n"
  },
  {
    "path": "analysis/tests/src/Rxjs.res",
    "content": "// These are bindings used in RxjsCompletion.res\n// We are using a separate file to test complication for modules of external files.\ntype target\n\nmodule Subscriber = {\n  type t<'t> = {next: 't => unit}\n}\n\nmodule Observable = {\n  // Complete items defined inside the parent module.\n  @editor.completeFrom(Rxjs)\n  type t<'t>\n\n  type dispose = unit => unit\n\n  @new @module(\"rxjs\")\n  external make: (Subscriber.t<'t> => dispose) => t<'t> = \"Observable\"\n\n  type subscription\n\n  @send\n  external subscribe: (t<'t>, 't => unit) => subscription = \"subscribe\"\n}\n\n@module(\"rxjs\")\nexternal fromEvent: (target, string) => Observable.t<'t> = \"fromEvent\"\n\ntype operation<'t, 'u>\n\n@send\nexternal pipe: (Observable.t<'t>, operation<'t, 'u>) => Observable.t<'u> = \"pipe\"\n\n@send\nexternal pipe2: (Observable.t<'t>, operation<'t, 'u>, operation<'u, 'i>) => Observable.t<'i> =\n  \"pipe\"\n\n@module(\"rxjs\")\nexternal map: ('t => 'u) => operation<'t, 'u> = \"map\"\n\n@module(\"rxjs\")\nexternal distinctUntilChanged: unit => operation<'t, 't> = \"distinctUntilChanged\"\n\n@module(\"rxjs\")\nexternal merge: (Observable.t<'t>, Observable.t<'t>) => Observable.t<'t> = \"merge\"\n\n@module(\"rxjs\")\nexternal scan: (('acc, 't) => 'acc, 'acc) => operation<'t, 'acc> = \"scan\"\n\n@module(\"rxjs\")\nexternal combineLatest: (Observable.t<'a>, Observable.t<'b>) => Observable.t<('a, 'b)> =\n  \"combineLatest\"\n"
  },
  {
    "path": "analysis/tests/src/RxjsCompletion.res",
    "content": "@@warning(\"-26\")\n@@warning(\"-110\")\n\ntype keyPress =\n  | Up(string)\n  | Down(string)\n\n@val\nexternal window: {..} = \"window\"\n\nlet main = async () => {\n  let keyMapObservable = {\n    open Rxjs\n\n    let keydown =\n      fromEvent(Obj.magic(window), \"keydown\")->pipe2(\n        map(event => Down(event[\"key\"])),\n        distinctUntilChanged(),\n      )\n\n    let keyup =\n      fromEvent(Obj.magic(window), \"keyup\")->pipe2(\n        map(event => Up(event[\"key\"])),\n        distinctUntilChanged(),\n      )\n\n    // merge(keydown, keyup).\n    //                       ^com\n\n    // Rxjs.Observable.subscribe, Rxjs.pipe and Rxjs.pipe2 should be completed\n  }\n\n  let (a,b) : ( Rxjs.Observable.t<string> , Rxjs.Observable.t<string>) = %todo\n\n  // Rxjs.combineLatest(a, b).\n  //                          ^com\n\n  // Rxjs.Observable.subscribe, Rxjs.pipe and Rxjs.pipe2 should be completed\n}\n"
  },
  {
    "path": "analysis/tests/src/SchemaAssets.res",
    "content": "@live\ntype rec input_ByAddress = {city: string}\n@tag(\"__$inputUnion\")\nand input_Location =\n  | @as(\"byAddress\") ByAddress(input_ByAddress)\n  | @as(\"byId\") ById(string)\n"
  },
  {
    "path": "analysis/tests/src/ShadowedBelt.res",
    "content": "module List = {\n  let map = (l, fn) => List.map(fn, l)\n}\n"
  },
  {
    "path": "analysis/tests/src/SignatureHelp.res",
    "content": "type someVariant = One | Two | Three\n\n/** Does stuff. */\nlet someFunc = (one: int, ~two: option<string>=?, ~three: unit => unit, ~four: someVariant, ()) => {\n  ignore(one)\n  ignore(two)\n  ignore(three())\n  ignore(four)\n}\n\nlet otherFunc = (first: string, second: int, third: float) => {\n  ignore(first)\n  ignore(second)\n  ignore(third)\n}\n\n// let _ = someFunc(\n//                  ^she\n\n// let _ = someFunc(1\n//                   ^she\n\n// let _ = someFunc(123, ~two\n//                           ^she\n\n// let _ = someFunc(123, ~two=\"123\"\n//                               ^she\n\n// let _ = someFunc(123, ~two=\"123\", ~four\n//                                    ^she\n\n// let _ = someFunc(123, ~two=\"123\", ~four=O\n//                                        ^she\n\n// let _ = otherFunc(\n//                   ^she\n\n// let _ = otherFunc(\"123\"\n//                      ^she\n\n// let _ = otherFunc(\"123\", 123, 123.0)\n//                                 ^she\n\n// let _ = Completion.Lib.foo(~age\n//                               ^she\n\nlet iAmSoSpecial = (iJustHaveOneArg: string) => {\n  ignore(iJustHaveOneArg)\n}\n\n// let _ = iAmSoSpecial(\n//                      ^she\n\n// let _ = \"hello\"->otherFunc(1\n//                             ^she\n\nlet fn = (age: int, name: string, year: int) => {\n  ignore(age)\n  ignore(name)\n  ignore(year)\n}\n\n// let _ = fn(22, )\n//               ^she\n\n// let _ = fn(22, , 2023)\n//               ^she\n\n// let _ = fn(12, \"hello\", )\n//                        ^she\n\n// let _ = fn({ iAmSoSpecial() })\n//                           ^she\n\n// let _ = fn({ iAmSoSpecial({ someFunc() }) })\n//                                      ^she\n\n/** This is my own special thing. */\ntype mySpecialThing = string\n\ntype t =\n  | /** One is cool. */ One({miss?: bool, hit?: bool, stuff?: string})\n  | /** Two is fun! */ Two(mySpecialThing)\n  | /** Three is... three */ Three(mySpecialThing, array<option<string>>)\n\nlet _one = One({})\n//              ^she\n\nlet _one = One({miss: true})\n//                ^she\n\nlet _one = One({hit: true, miss: true})\n//                     ^she\n\nlet two = Two(\"true\")\n//             ^she\n\nlet three = Three(\"\", [])\n//                 ^she\n\nlet three2 = Three(\"\", [])\n//                      ^she\n\nlet _deepestTakesPrecedence = [12]->Js.Array2.map(v =>\n  if v > 0 {\n    One({})\n    //   ^she\n  } else {\n    Two(\"\")\n  }\n)\n\n/** Main docstring here. */\nlet map = (arr, mapper) => {\n  Array.map(mapper, arr)\n}\n\nlet _usesCorrectTypeInfo = [12]->map(v => v)\n//                                        ^she\n\n/** Type x... */\ntype x = {\n  age?: int,\n  name?: string,\n}\n\n/** Type tt! */\ntype tt = One\n\n/** Some stuff */\nlet stuffers = (x: x, y: tt) => {\n  ignore(x)\n  ignore(y)\n  \"hello\"\n}\n\nlet _ = stuffers({}, One)\n//                ^she\n\nlet _ = stuffers({}, One)\n//                    ^she\n\nlet _ = switch _one {\n| One({hit: _hit}) => \"\"\n//      ^she\n| One(_a) => \"\"\n//     ^she\n| Two(_ms) => \"\"\n//     ^she\n| Three(_a, []) => \"\"\n//       ^she\n| Three(_, _b) => \"\"\n//          ^she\n}\n\nlet _bb = Ok(true)\n//            ^she\n\nlet _bbb = Error(\"err\")\n//                 ^she\n\nlet _cc = Some(true)\n//              ^she\n"
  },
  {
    "path": "analysis/tests/src/Support.res",
    "content": "module CatchResult = {\n  @tag(\"ok\")\n  type t<'value> = | @as(true) Ok({value: 'value}) | @as(false) Error({errors: array<string>})\n}\n"
  },
  {
    "path": "analysis/tests/src/TableclothMap.res",
    "content": "let add = 3\n\nmodule Of = (M: {}) => {\n  type _t = int\n}\n\nmodule M = {}\n\nmodule O = Of(M)\nmodule Int = {\n  type _t = O._t\n}\n"
  },
  {
    "path": "analysis/tests/src/TableclothMap.resi",
    "content": "let add: int\n\nmodule Int: {}\n"
  },
  {
    "path": "analysis/tests/src/TypeArgCtx.res",
    "content": "type someTyp = {test: bool}\nlet catchResult = Support.CatchResult.Ok({\n  value: {\n    test: true,\n  },\n})\n\n// switch catchResult { | Ok({value: }) => ()\n//                                  ^com\n"
  },
  {
    "path": "analysis/tests/src/TypeAtPosCompletion.res",
    "content": "type optRecord = {\n  name: string,\n  age?: int,\n  online?: bool,\n}\n\nlet optRecord = {\n  name: \"Hello\",\n  //             ^com\n}\n\ntype someVariant = One(int, optRecord)\n\nlet x = One(\n  1,\n  {\n    name: \"What\",\n    //            ^com\n  },\n)\n\nlet arr = [\n  optRecord,\n  //        ^com\n]\n"
  },
  {
    "path": "analysis/tests/src/TypeDefinition.res",
    "content": "type variant = Foo | Bar\n\ntype record = {item: string}\n//       ^typ\n\nlet x = Foo\n//  ^typ\n\nlet y = {item: \"foo\"}\n//  ^typ\n\ntype obj = {\"foo\": string}\n\nlet obj: obj = {\"foo\": \"bar\"}\n//  ^typ\n\nlet f = r => r.item\n//           ^typ\n\nlet g = v =>\n  switch v {\n  //     ^typ\n  | Foo => \"Foo\"\n  | Bar => \"Bar\"\n  }\n"
  },
  {
    "path": "analysis/tests/src/Xform.res",
    "content": "type kind = First | Second | Third | Fourth(int)\ntype r = {name: string, age: int}\n\nlet ret = _ => assert(false)\nlet kind = assert(false)\n\nif kind == First {\n  // ^xfm\n  ret(\"First\")\n} else {\n  ret(\"Not First\")\n}\n\n#kind(\"First\", {name: \"abc\", age: 3}) != kind ? ret(\"Not First\") : ret(\"First\")\n//             ^xfm\n\nlet name = \"hello\"\n//   ^xfm\n\nlet annotated: int = 34\n//   ^xfm\n\nmodule T = {\n  type r = {a: int, x: string}\n}\n\nlet foo = x =>\n  //      ^xfm\n  switch x {\n  | None => 33\n  | Some(q) => q.T.a + 1\n  //     ^xfm\n  }\n\nlet withAs = (~x as name) => name + 1\n//                   ^xfm\n\n@react.component\nlet make = (~name) => React.string(name)\n//   ^xfm\n\nlet _ = (~x) => x + 1\n//       ^xfm\n\n//\n// Add braces to the body of a function\n//\n\nlet noBraces = () => name\n//                   ^xfm\n\nlet nested = () => {\n  let _noBraces = (_x, _y, _z) => \"someNewFunc\"\n  //                              ^xfm\n}\n\nlet bar = () => {\n  module Inner = {\n    let foo = (_x, y, _z) =>\n      switch y {\n      | #some => 3\n      | #stuff => 4\n      }\n    //^xfm\n  }\n  Inner.foo(1, ...)\n}\n\nmodule ExtractableModule = {\n  /** Doc comment. */\n  type t = int\n  // A comment here\n  let doStuff = a => a + 1\n  // ^xfm\n}\n\nlet variant = First\n\nlet _x = switch variant {\n| First => \"first\"\n| _ => \"other\"\n//  ^xfm\n}\n\nlet _x = switch variant {\n| First | Second => \"first\"\n| _ => \"other\"\n//  ^xfm\n}\n\nlet _x = switch variant {\n| First if 1 > 2 => \"first\"\n| Second => \"second\"\n| _ => \"other\"\n//  ^xfm\n}\n\nlet polyvariant: [#first | #second | #\"illegal identifier\" | #third(int)] = #first\n\nlet _y = switch polyvariant {\n| #first => \"first\"\n| _ => \"other\"\n//  ^xfm\n}\n\nlet _y = switch polyvariant {\n| #first | #second => \"first\"\n| _ => \"other\"\n//  ^xfm\n}\n\nlet variantOpt = Some(variant)\n\nlet _x = switch variantOpt {\n| Some(First) => \"first\"\n| _ => \"other\"\n//  ^xfm\n}\n\nlet _x = switch variantOpt {\n| Some(First) | Some(Second) => \"first\"\n| _ => \"other\"\n//  ^xfm\n}\n\nlet _x = switch variantOpt {\n| Some(First | Second) => \"first\"\n| _ => \"other\"\n//  ^xfm\n}\n\nlet polyvariantOpt = Some(polyvariant)\n\nlet _x = switch polyvariantOpt {\n| Some(#first) => \"first\"\n| None => \"nothing\"\n| _ => \"other\"\n//  ^xfm\n}\n\nlet _x = switch polyvariantOpt {\n| Some(#first | #second) => \"first\"\n| _ => \"other\"\n//  ^xfm\n}\n"
  },
  {
    "path": "analysis/tests/src/dce/DceTest.res",
    "content": "let x = 12\n"
  },
  {
    "path": "analysis/tests/src/expected/Auto.res.txt",
    "content": "Hover src/Auto.res 2:13\n{\"contents\": {\"kind\": \"markdown\", \"value\": \"```rescript\\n(list<'a>, 'a => 'b) => list<'b>\\n```\"}}\n\n"
  },
  {
    "path": "analysis/tests/src/expected/BrokenParserCases.res.txt",
    "content": "Complete src/BrokenParserCases.res 2:24\nposCursor:[2:24] posNoWhite:[2:23] Found expr:[2:11->2:30]\nPexp_apply ...[2:11->2:17] (~isOff2:19->2:24=...[2:27->2:29])\nCompletable: CnamedArg(Value[someFn], isOff, [isOff])\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[someFn]\nPath someFn\n[]\n\nComplete src/BrokenParserCases.res 6:17\nposCursor:[6:17] posNoWhite:[6:16] Found pattern:[6:16->6:19]\nCompletable: Cpattern Value[s]=t\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[s]\nPath s\n[]\n\nComplete src/BrokenParserCases.res 10:29\nposCursor:[10:29] posNoWhite:[10:27] Found pattern:[10:24->10:39]\nposCursor:[10:29] posNoWhite:[10:27] Found pattern:[10:24->10:28]\nPpat_construct None:[10:24->10:28]\nCompletable: Cpath Value[None]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[None]\nPath None\n[]\n\n"
  },
  {
    "path": "analysis/tests/src/expected/CodeLens.res.txt",
    "content": "Code Lens src/CodeLens.res\n[{\n        \"range\": {\"start\": {\"line\": 9, \"character\": 4}, \"end\": {\"line\": 9, \"character\": 8}},\n        \"command\": {\"title\": \"{\\\"name\\\": string} => React.element\", \"command\": \"\"}\n    }, {\n        \"range\": {\"start\": {\"line\": 4, \"character\": 4}, \"end\": {\"line\": 4, \"character\": 6}},\n        \"command\": {\"title\": \"(~opt1: int=?, ~a: int, ~b: int, unit, ~opt2: int=?, unit, ~c: int) => int\", \"command\": \"\"}\n    }, {\n        \"range\": {\"start\": {\"line\": 2, \"character\": 4}, \"end\": {\"line\": 2, \"character\": 7}},\n        \"command\": {\"title\": \"(~age: int, ~name: string) => string\", \"command\": \"\"}\n    }, {\n        \"range\": {\"start\": {\"line\": 0, \"character\": 4}, \"end\": {\"line\": 0, \"character\": 7}},\n        \"command\": {\"title\": \"(int, int) => int\", \"command\": \"\"}\n    }]\n\n"
  },
  {
    "path": "analysis/tests/src/expected/Codemod.res.txt",
    "content": "Codemod AddMissingCasessrc/Codemod.res 3:10\nswitch (v1, v2) {\n          //      ^c-a (#valid, #valid) | (#invalid, _)\n          | (#valid, #invalid) => ()\n          | (#valid, #valid) => failwith(\"TODO\")\n          | (#invalid, _) => failwith(\"TODO\")\n          }\n\n"
  },
  {
    "path": "analysis/tests/src/expected/CompletableComponent.res.txt",
    "content": ""
  },
  {
    "path": "analysis/tests/src/expected/CompletePrioritize1.res.txt",
    "content": "Complete src/CompletePrioritize1.res 6:6\nposCursor:[6:6] posNoWhite:[6:5] Found expr:[6:3->0:-1]\nCompletable: Cpath Value[a]->\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[a]->\nContextPath Value[a]\nPath a\nCPPipe pathFromEnv:Test found:true\nPath Test.\n[{\n    \"label\": \"Test.name\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"t => int\",\n    \"documentation\": null\n  }]\n\n"
  },
  {
    "path": "analysis/tests/src/expected/CompletePrioritize2.res.txt",
    "content": "Complete src/CompletePrioritize2.res 9:7\nposCursor:[9:7] posNoWhite:[9:6] Found expr:[9:3->0:-1]\nCompletable: Cpath Value[ax]->\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[ax]->\nContextPath Value[ax]\nPath ax\nCPPipe pathFromEnv:Test found:true\nPath Test.\n[{\n    \"label\": \"Test.add\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"t => int\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletePrioritize2.res 12:5\nposCursor:[12:5] posNoWhite:[12:4] Found expr:[12:3->12:5]\nPexp_ident ax:[12:3->12:5]\nCompletable: Cpath Value[ax]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[ax]\nPath ax\n[{\n    \"label\": \"ax\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"Test.t\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ntype t = {name: int}\\n```\"}\n  }]\n\n"
  },
  {
    "path": "analysis/tests/src/expected/Completion.res.txt",
    "content": "Complete src/Completion.res 1:11\nposCursor:[1:11] posNoWhite:[1:10] Found expr:[1:3->1:11]\nPexp_ident MyList.m:[1:3->1:11]\nCompletable: Cpath Value[MyList, m]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[MyList, m]\nPath MyList.m\n[{\n    \"label\": \"mapReverse\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t<'a>, 'a => 'b) => t<'b>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\nEquivalent to:\\n\\n```res\\nmap(someList, f)->reverse\\n```\\n\\n## Examples\\n\\n```rescript\\nlist{3, 4, 5}->Belt.List.mapReverse(x => x * x) /* list{25, 16, 9} */\\n```\\n\"}\n  }, {\n    \"label\": \"makeBy\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(int, int => 'a) => t<'a>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\nReturn a list of length `numItems` with element `i` initialized with `f(i)`.\\nReturns an empty list if `numItems` is negative.\\n\\n## Examples\\n\\n```rescript\\nBelt.List.makeBy(5, i => i) // list{0, 1, 2, 3, 4}\\n\\nBelt.List.makeBy(5, i => i * i) // list{0, 1, 4, 9, 16}\\n```\\n\"}\n  }, {\n    \"label\": \"make\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(int, 'a) => t<'a>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\nReturns a list of length `numItems` with each element filled with value `v`. Returns an empty list if `numItems` is negative.\\n\\n## Examples\\n\\n```rescript\\nBelt.List.make(3, 1) // list{1, 1, 1}\\n```\\n\"}\n  }, {\n    \"label\": \"mapReverse2U\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t<'a>, t<'b>, ('a, 'b) => 'c) => t<'c>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \" Uncurried version of [mapReverse2](#mapReverse2). \"}\n  }, {\n    \"label\": \"map\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t<'a>, 'a => 'b) => t<'b>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\nReturns a new list with `f` applied to each element of `someList`.\\n\\n## Examples\\n\\n```rescript\\nlist{1, 2}->Belt.List.map(x => x + 1) // list{3, 4}\\n```\\n\"}\n  }, {\n    \"label\": \"mapWithIndexU\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t<'a>, (int, 'a) => 'b) => t<'b>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \" Uncurried version of [mapWithIndex](#mapWithIndex). \"}\n  }, {\n    \"label\": \"mapU\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t<'a>, 'a => 'b) => t<'b>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \" Uncurried version of [map](#map). \"}\n  }, {\n    \"label\": \"makeByU\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(int, int => 'a) => t<'a>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \" Uncurried version of [makeBy](#makeBy) \"}\n  }, {\n    \"label\": \"mapReverse2\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t<'a>, t<'b>, ('a, 'b) => 'c) => t<'c>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\nEquivalent to: `zipBy(xs, ys, f)->reverse`\\n\\n## Examples\\n\\n```rescript\\n\\nBelt.List.mapReverse2(list{1, 2, 3}, list{1, 2}, (a, b) => a + b) // list{4, 2}\\n```\\n\"}\n  }, {\n    \"label\": \"mapWithIndex\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t<'a>, (int, 'a) => 'b) => t<'b>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\nApplies `f` to each element of `someList`.\\nFunction `f` takes two arguments: the index starting from 0 and the element from `someList`, in that order.\\n\\n## Examples\\n\\n```rescript\\nlist{1, 2, 3}->Belt.List.mapWithIndex((index, x) => index + x) // list{1, 3, 5}\\n```\\n\"}\n  }, {\n    \"label\": \"mapReverseU\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t<'a>, 'a => 'b) => t<'b>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \" Uncurried version of [mapReverse](#mapReverse). \"}\n  }]\n\nComplete src/Completion.res 3:9\nposCursor:[3:9] posNoWhite:[3:8] Found expr:[3:3->3:9]\nPexp_ident Array.:[3:3->3:9]\nCompletable: Cpath Value[Array, \"\"]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[Array, \"\"]\nPath Array.\n[{\n    \"label\": \"fold_left\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(('a, 'b) => 'a, 'a, array<'b>) => 'a\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \" [Array.fold_left f x a] computes\\n   [f (... (f (f x a.(0)) a.(1)) ...) a.(n-1)],\\n   where [n] is the length of the array [a]. \"}\n  }, {\n    \"label\": \"concat\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"list<array<'a>> => array<'a>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \" Same as {!Array.append}, but concatenates a list of arrays. \"}\n  }, {\n    \"label\": \"mapi\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"((int, 'a) => 'b, array<'a>) => array<'b>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \" Same as {!Array.map}, but the\\n   function is applied to the index of the element as first argument,\\n   and the element itself as second argument. \"}\n  }, {\n    \"label\": \"exists\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"('a => bool, array<'a>) => bool\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \" [Array.exists p [|a1; ...; an|]] checks if at least one element of\\n    the array satisfies the predicate [p]. That is, it returns\\n    [(p a1) || (p a2) || ... || (p an)].\\n    @since 4.03.0 \"}\n  }, {\n    \"label\": \"for_all\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"('a => bool, array<'a>) => bool\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \" [Array.for_all p [|a1; ...; an|]] checks if all elements of the array\\n   satisfy the predicate [p]. That is, it returns\\n   [(p a1) && (p a2) && ... && (p an)].\\n   @since 4.03.0 \"}\n  }, {\n    \"label\": \"copy\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"array<'a> => array<'a>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \" [Array.copy a] returns a copy of [a], that is, a fresh array\\n   containing the same elements as [a]. \"}\n  }, {\n    \"label\": \"iter2\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(('a, 'b) => unit, array<'a>, array<'b>) => unit\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \" [Array.iter2 f a b] applies function [f] to all the elements of [a]\\n   and [b].\\n   Raise [Invalid_argument] if the arrays are not the same size.\\n   @since 4.03.0 \"}\n  }, {\n    \"label\": \"to_list\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"array<'a> => list<'a>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \" [Array.to_list a] returns the list of all the elements of [a]. \"}\n  }, {\n    \"label\": \"stable_sort\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(('a, 'a) => int, array<'a>) => unit\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \" Same as {!Array.sort}, but the sorting algorithm is stable (i.e.\\n   elements that compare equal are kept in their original order) and\\n   not guaranteed to run in constant heap space.\\n\\n   The current implementation uses Merge Sort. It uses [n/2]\\n   words of heap space, where [n] is the length of the array.\\n   It is usually faster than the current implementation of {!Array.sort}.\\n\"}\n  }, {\n    \"label\": \"iteri\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"((int, 'a) => unit, array<'a>) => unit\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \" Same as {!Array.iter}, but the\\n   function is applied with the index of the element as first argument,\\n   and the element itself as second argument. \"}\n  }, {\n    \"label\": \"memq\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"('a, array<'a>) => bool\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \" Same as {!Array.mem}, but uses physical equality instead of structural\\n   equality to compare array elements.\\n   @since 4.03.0 \"}\n  }, {\n    \"label\": \"map2\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(('a, 'b) => 'c, array<'a>, array<'b>) => array<'c>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \" [Array.map2 f a b] applies function [f] to all the elements of [a]\\n   and [b], and builds an array with the results returned by [f]:\\n   [[| f a.(0) b.(0); ...; f a.(Array.length a - 1) b.(Array.length b - 1)|]].\\n   Raise [Invalid_argument] if the arrays are not the same size.\\n   @since 4.03.0 \"}\n  }, {\n    \"label\": \"set\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(array<'a>, int, 'a) => unit\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \" [Array.set a n x] modifies array [a] in place, replacing\\n   element number [n] with [x].\\n   You can also write [a.(n) <- x] instead of [Array.set a n x].\\n\\n   Raise [Invalid_argument \\\"index out of bounds\\\"]\\n   if [n] is outside the range 0 to [Array.length a - 1]. \"}\n  }, {\n    \"label\": \"make\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(int, 'a) => array<'a>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \" [Array.make n x] returns a fresh array of length [n],\\n   initialized with [x].\\n   All the elements of this new array are initially\\n   physically equal to [x] (in the sense of the [==] predicate).\\n   Consequently, if [x] is mutable, it is shared among all elements\\n   of the array, and modifying [x] through one of the array entries\\n   will modify all other entries at the same time.\\n\\n   Raise [Invalid_argument] if [n < 0] or [n > Sys.max_array_length].\\n   If the value of [x] is a floating-point number, then the maximum\\n   size is only [Sys.max_array_length / 2].\"}\n  }, {\n    \"label\": \"make_float\",\n    \"kind\": 12,\n    \"tags\": [1],\n    \"detail\": \"int => array<float>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"Deprecated: Use Array.create_float instead.\\n\\n @deprecated [Array.make_float] is an alias for {!Array.create_float}. \"}\n  }, {\n    \"label\": \"fold_right\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(('b, 'a) => 'a, array<'b>, 'a) => 'a\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \" [Array.fold_right f a x] computes\\n   [f a.(0) (f a.(1) ( ... (f a.(n-1) x) ...))],\\n   where [n] is the length of the array [a]. \"}\n  }, {\n    \"label\": \"sort\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(('a, 'a) => int, array<'a>) => unit\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \" Sort an array in increasing order according to a comparison\\n   function.  The comparison function must return 0 if its arguments\\n   compare as equal, a positive integer if the first is greater,\\n   and a negative integer if the first is smaller (see below for a\\n   complete specification).  For example, {!Pervasives.compare} is\\n   a suitable comparison function, provided there are no floating-point\\n   NaN values in the data.  After calling [Array.sort], the\\n   array is sorted in place in increasing order.\\n   [Array.sort] is guaranteed to run in constant heap space\\n   and (at most) logarithmic stack space.\\n\\n   The current implementation uses Heap Sort.  It runs in constant\\n   stack space.\\n\\n   Specification of the comparison function:\\n   Let [a] be the array and [cmp] the comparison function.  The following\\n   must be true for all x, y, z in a :\\n-   [cmp x y] > 0 if and only if [cmp y x] < 0\\n-   if [cmp x y] >= 0 and [cmp y z] >= 0 then [cmp x z] >= 0\\n\\n   When [Array.sort] returns, [a] contains the same elements as before,\\n   reordered in such a way that for all i and j valid indices of [a] :\\n-   [cmp a.(i) a.(j)] >= 0 if and only if i >= j\\n\"}\n  }, {\n    \"label\": \"length\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"array<'a> => int\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \" Return the length (number of elements) of the given array. \"}\n  }, {\n    \"label\": \"sub\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(array<'a>, int, int) => array<'a>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \" [Array.sub a start len] returns a fresh array of length [len],\\n   containing the elements number [start] to [start + len - 1]\\n   of array [a].\\n\\n   Raise [Invalid_argument \\\"Array.sub\\\"] if [start] and [len] do not\\n   designate a valid subarray of [a]; that is, if\\n   [start < 0], or [len < 0], or [start + len > Array.length a]. \"}\n  }, {\n    \"label\": \"of_list\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"list<'a> => array<'a>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \" [Array.of_list l] returns a fresh array containing the elements\\n   of [l]. \"}\n  }, {\n    \"label\": \"iter\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"('a => unit, array<'a>) => unit\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \" [Array.iter f a] applies function [f] in turn to all\\n   the elements of [a].  It is equivalent to\\n   [f a.(0); f a.(1); ...; f a.(Array.length a - 1); ()]. \"}\n  }, {\n    \"label\": \"map\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"('a => 'b, array<'a>) => array<'b>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \" [Array.map f a] applies function [f] to all the elements of [a],\\n   and builds an array with the results returned by [f]:\\n   [[| f a.(0); f a.(1); ...; f a.(Array.length a - 1) |]]. \"}\n  }, {\n    \"label\": \"unsafe_get\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(array<'a>, int) => 'a\",\n    \"documentation\": null\n  }, {\n    \"label\": \"make_matrix\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(int, int, 'a) => array<array<'a>>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \" [Array.make_matrix dimx dimy e] returns a two-dimensional array\\n   (an array of arrays) with first dimension [dimx] and\\n   second dimension [dimy]. All the elements of this new matrix\\n   are initially physically equal to [e].\\n   The element ([x,y]) of a matrix [m] is accessed\\n   with the notation [m.(x).(y)].\\n\\n   Raise [Invalid_argument] if [dimx] or [dimy] is negative or\\n   greater than {!Sys.max_array_length}.\\n   If the value of [e] is a floating-point number, then the maximum\\n   size is only [Sys.max_array_length / 2]. \"}\n  }, {\n    \"label\": \"mem\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"('a, array<'a>) => bool\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \" [mem a l] is true if and only if [a] is equal\\n   to an element of [l].\\n   @since 4.03.0 \"}\n  }, {\n    \"label\": \"get\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(array<'a>, int) => 'a\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \" [Array.get a n] returns the element number [n] of array [a].\\n   The first element has number 0.\\n   The last element has number [Array.length a - 1].\\n   You can also write [a.(n)] instead of [Array.get a n].\\n\\n   Raise [Invalid_argument \\\"index out of bounds\\\"]\\n   if [n] is outside the range 0 to [(Array.length a - 1)]. \"}\n  }, {\n    \"label\": \"append\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(array<'a>, array<'a>) => array<'a>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \" [Array.append v1 v2] returns a fresh array containing the\\n   concatenation of the arrays [v1] and [v2]. \"}\n  }, {\n    \"label\": \"unsafe_set\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(array<'a>, int, 'a) => unit\",\n    \"documentation\": null\n  }, {\n    \"label\": \"create_matrix\",\n    \"kind\": 12,\n    \"tags\": [1],\n    \"detail\": \"(int, int, 'a) => array<array<'a>>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"Deprecated: Use Array.make_matrix instead.\\n\\n @deprecated [Array.create_matrix] is an alias for {!Array.make_matrix}. \"}\n  }, {\n    \"label\": \"create_float\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"int => array<float>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \" [Array.create_float n] returns a fresh float array of length [n],\\n    with uninitialized data.\\n    @since 4.03 \"}\n  }, {\n    \"label\": \"create\",\n    \"kind\": 12,\n    \"tags\": [1],\n    \"detail\": \"(int, 'a) => array<'a>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"Deprecated: Use Array.make instead.\\n\\n @deprecated [Array.create] is an alias for {!Array.make}. \"}\n  }, {\n    \"label\": \"init\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(int, int => 'a) => array<'a>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \" [Array.init n f] returns a fresh array of length [n],\\n   with element number [i] initialized to the result of [f i].\\n   In other terms, [Array.init n f] tabulates the results of [f]\\n   applied to the integers [0] to [n-1].\\n\\n   Raise [Invalid_argument] if [n < 0] or [n > Sys.max_array_length].\\n   If the return type of [f] is [float], then the maximum\\n   size is only [Sys.max_array_length / 2].\"}\n  }, {\n    \"label\": \"fast_sort\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(('a, 'a) => int, array<'a>) => unit\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \" Same as {!Array.sort} or {!Array.stable_sort}, whichever is faster\\n    on typical input.\\n\"}\n  }, {\n    \"label\": \"fill\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(array<'a>, int, int, 'a) => unit\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \" [Array.fill a ofs len x] modifies the array [a] in place,\\n   storing [x] in elements number [ofs] to [ofs + len - 1].\\n\\n   Raise [Invalid_argument \\\"Array.fill\\\"] if [ofs] and [len] do not\\n   designate a valid subarray of [a]. \"}\n  }, {\n    \"label\": \"blit\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(array<'a>, int, array<'a>, int, int) => unit\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \" [Array.blit v1 o1 v2 o2 len] copies [len] elements\\n   from array [v1], starting at element number [o1], to array [v2],\\n   starting at element number [o2]. It works correctly even if\\n   [v1] and [v2] are the same array, and the source and\\n   destination chunks overlap.\\n\\n   Raise [Invalid_argument \\\"Array.blit\\\"] if [o1] and [len] do not\\n   designate a valid subarray of [v1], or if [o2] and [len] do not\\n   designate a valid subarray of [v2]. \"}\n  }, {\n    \"label\": \"Floatarray\",\n    \"kind\": 9,\n    \"tags\": [],\n    \"detail\": \"module Floatarray\",\n    \"documentation\": null\n  }]\n\nComplete src/Completion.res 5:10\nposCursor:[5:10] posNoWhite:[5:9] Found expr:[5:3->5:10]\nPexp_ident Array.m:[5:3->5:10]\nCompletable: Cpath Value[Array, m]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[Array, m]\nPath Array.m\n[{\n    \"label\": \"mapi\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"((int, 'a) => 'b, array<'a>) => array<'b>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \" Same as {!Array.map}, but the\\n   function is applied to the index of the element as first argument,\\n   and the element itself as second argument. \"}\n  }, {\n    \"label\": \"memq\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"('a, array<'a>) => bool\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \" Same as {!Array.mem}, but uses physical equality instead of structural\\n   equality to compare array elements.\\n   @since 4.03.0 \"}\n  }, {\n    \"label\": \"map2\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(('a, 'b) => 'c, array<'a>, array<'b>) => array<'c>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \" [Array.map2 f a b] applies function [f] to all the elements of [a]\\n   and [b], and builds an array with the results returned by [f]:\\n   [[| f a.(0) b.(0); ...; f a.(Array.length a - 1) b.(Array.length b - 1)|]].\\n   Raise [Invalid_argument] if the arrays are not the same size.\\n   @since 4.03.0 \"}\n  }, {\n    \"label\": \"make\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(int, 'a) => array<'a>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \" [Array.make n x] returns a fresh array of length [n],\\n   initialized with [x].\\n   All the elements of this new array are initially\\n   physically equal to [x] (in the sense of the [==] predicate).\\n   Consequently, if [x] is mutable, it is shared among all elements\\n   of the array, and modifying [x] through one of the array entries\\n   will modify all other entries at the same time.\\n\\n   Raise [Invalid_argument] if [n < 0] or [n > Sys.max_array_length].\\n   If the value of [x] is a floating-point number, then the maximum\\n   size is only [Sys.max_array_length / 2].\"}\n  }, {\n    \"label\": \"make_float\",\n    \"kind\": 12,\n    \"tags\": [1],\n    \"detail\": \"int => array<float>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"Deprecated: Use Array.create_float instead.\\n\\n @deprecated [Array.make_float] is an alias for {!Array.create_float}. \"}\n  }, {\n    \"label\": \"map\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"('a => 'b, array<'a>) => array<'b>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \" [Array.map f a] applies function [f] to all the elements of [a],\\n   and builds an array with the results returned by [f]:\\n   [[| f a.(0); f a.(1); ...; f a.(Array.length a - 1) |]]. \"}\n  }, {\n    \"label\": \"make_matrix\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(int, int, 'a) => array<array<'a>>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \" [Array.make_matrix dimx dimy e] returns a two-dimensional array\\n   (an array of arrays) with first dimension [dimx] and\\n   second dimension [dimy]. All the elements of this new matrix\\n   are initially physically equal to [e].\\n   The element ([x,y]) of a matrix [m] is accessed\\n   with the notation [m.(x).(y)].\\n\\n   Raise [Invalid_argument] if [dimx] or [dimy] is negative or\\n   greater than {!Sys.max_array_length}.\\n   If the value of [e] is a floating-point number, then the maximum\\n   size is only [Sys.max_array_length / 2]. \"}\n  }, {\n    \"label\": \"mem\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"('a, array<'a>) => bool\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \" [mem a l] is true if and only if [a] is equal\\n   to an element of [l].\\n   @since 4.03.0 \"}\n  }]\n\nComplete src/Completion.res 15:17\nposCursor:[15:17] posNoWhite:[15:16] Found expr:[15:12->15:17]\nPexp_ident Dep.c:[15:12->15:17]\nCompletable: Cpath Value[Dep, c]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[Dep, c]\nPath Dep.c\n[{\n    \"label\": \"customDouble\",\n    \"kind\": 12,\n    \"tags\": [1],\n    \"detail\": \"int => int\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"Deprecated: Use customDouble instead\\n\\nSome doc comment\"}\n  }]\n\nComplete src/Completion.res 23:20\nposCursor:[23:20] posNoWhite:[23:19] Found expr:[23:11->23:20]\nPexp_apply ...[23:11->23:18] ()\nCompletable: CnamedArg(Value[Lib, foo], \"\", [])\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[Lib, foo]\nPath Lib.foo\nFound type for function (~age: int, ~name: string) => string\n[{\n    \"label\": \"age\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": null\n  }, {\n    \"label\": \"name\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": null\n  }]\n\nComplete src/Completion.res 26:13\nposCursor:[26:13] posNoWhite:[26:12] Found expr:[26:3->26:13]\nCompletable: Cpath array<int>->m\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath array<int>->m\nContextPath array<int>\nPath Js.Array2.m\n[{\n    \"label\": \"Js.Array2.mapi\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t<'a>, ('a, int) => 'b) => t<'b>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\nApplies the function (the second argument) to each item in the array, returning\\na new array. The function acceps two arguments: an item from the array and its\\nindex number. The result array does not have to have elements of the same type\\nas the input array. See\\n[`Array.map`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/map)\\non MDN.\\n\\n## Examples\\n\\n```rescript\\n// multiply each item in array by its position\\nlet product = (item, index) => item * index\\nJs.Array2.mapi([10, 11, 12], product) == [0, 11, 24]\\n```\\n\"}\n  }, {\n    \"label\": \"Js.Array2.map\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t<'a>, 'a => 'b) => t<'b>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\nApplies the function (the second argument) to each item in the array, returning\\na new array. The result array does not have to have elements of the same type\\nas the input array. See\\n[`Array.map`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/map)\\non MDN.\\n\\n## Examples\\n\\n```rescript\\nJs.Array2.map([12, 4, 8], x => x * x) == [144, 16, 64]\\nJs.Array2.map([\\\"animal\\\", \\\"vegetable\\\", \\\"mineral\\\"], Js.String.length) == [6, 9, 7]\\n```\\n\"}\n  }]\n\nComplete src/Completion.res 29:13\nposCursor:[29:13] posNoWhite:[29:12] Found expr:[29:3->29:13]\nCompletable: Cpath string->toU\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath string->toU\nContextPath string\nPath Js.String2.toU\n[{\n    \"label\": \"Js.String2.toUpperCase\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"t => t\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\n`toUpperCase(str)` converts `str` to upper case using the locale-insensitive\\ncase mappings in the Unicode Character Database. Notice that the conversion can\\nexpand the number of letters in the result; for example the German ß\\ncapitalizes to two Ses in a row.\\n\\nSee [`String.toUpperCase`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/toUpperCase)\\non MDN.\\n\\n## Examples\\n\\n```rescript\\nJs.String2.toUpperCase(\\\"abc\\\") == \\\"ABC\\\"\\nJs.String2.toUpperCase(`Straße`) == `STRASSE`\\nJs.String2.toUpperCase(`πς`) == `ΠΣ`\\n```\\n\"}\n  }]\n\nComplete src/Completion.res 34:8\nposCursor:[34:8] posNoWhite:[34:7] Found expr:[34:3->34:8]\nCompletable: Cpath Value[op]->e\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[op]->e\nContextPath Value[op]\nPath op\nPath Belt.Option.e\n[{\n    \"label\": \"Belt.Option.eqU\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(option<'a>, option<'b>, ('a, 'b) => bool) => bool\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\nUncurried version of `eq`\\n\"}\n  }, {\n    \"label\": \"Belt.Option.eq\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(option<'a>, option<'b>, ('a, 'b) => bool) => bool\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\nEvaluates two optional values for equality with respect to a predicate\\nfunction. If both `optValue1` and `optValue2` are `None`, returns `true`.\\nIf one of the arguments is `Some(value)` and the other is `None`, returns\\n`false`.\\n\\nIf arguments are `Some(value1)` and `Some(value2)`, returns the result of\\n`predicate(value1, value2)`; the predicate function must return a bool.\\n\\n## Examples\\n\\n```rescript\\nlet clockEqual = (a, b) => mod(a, 12) == mod(b, 12)\\n\\nopen Belt.Option\\n\\neq(Some(3), Some(15), clockEqual) /* true */\\n\\neq(Some(3), None, clockEqual) /* false */\\n\\neq(None, Some(3), clockEqual) /* false */\\n\\neq(None, None, clockEqual) /* true */\\n```\\n\"}\n  }]\n\nComplete src/Completion.res 44:7\nposCursor:[44:7] posNoWhite:[44:6] Found expr:[44:3->54:3]\nPexp_apply ...[50:9->50:10] (...[44:3->50:8], ...[51:2->54:3])\nposCursor:[44:7] posNoWhite:[44:6] Found expr:[44:3->50:8]\nCompletable: Cpath Value[fa]->\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[fa]->\nContextPath Value[fa]\nPath fa\nCPPipe pathFromEnv:ForAuto found:true\nPath ForAuto.\n[{\n    \"label\": \"ForAuto.abc\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t, int) => t\",\n    \"documentation\": null\n  }, {\n    \"label\": \"ForAuto.abd\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t, int) => t\",\n    \"documentation\": null\n  }]\n\nComplete src/Completion.res 47:21\nposCursor:[47:21] posNoWhite:[47:20] Found expr:[47:3->47:21]\nposCursor:[47:21] posNoWhite:[47:20] Found expr:[47:12->47:21]\nPexp_ident Js.Dict.u:[47:12->47:21]\nCompletable: Cpath Value[Js, Dict, u]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[Js, Dict, u]\nPath Js.Dict.u\n[{\n    \"label\": \"unsafeGet\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t<'a>, key) => 'a\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\n`Js.Dict.unsafeGet(key)` returns the value if the key exists, otherwise an `undefined` value is returned. Use this only when you are sure the key exists (i.e. when having used the `keys()` function to check that the key is valid).\\n\\n## Examples\\n\\n```rescript\\nJs.Dict.unsafeGet(ages, \\\"Fred\\\") == 49\\nJs.Dict.unsafeGet(ages, \\\"Paul\\\") // returns undefined\\n```\\n\"}\n  }, {\n    \"label\": \"unsafeDeleteKey\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t<string>, string) => unit\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \" Experimental internal function \"}\n  }]\n\nComplete src/Completion.res 59:30\nposCursor:[59:30] posNoWhite:[59:29] Found expr:[59:15->59:30]\nJSX <O.Comp:[59:15->59:21] second[59:22->59:28]=...[59:29->59:30]> _children:None\nCompletable: Cexpression CJsxPropValue [O, Comp] second=z\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CJsxPropValue [O, Comp] second\nPath O.Comp.make\n[{\n    \"label\": \"zzz\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": null\n  }]\n\nComplete src/Completion.res 62:23\nposCursor:[62:23] posNoWhite:[62:22] Found expr:[62:15->62:23]\nJSX <O.Comp:[62:15->62:21] z[62:22->62:23]=...[62:22->62:23]> _children:None\nCompletable: Cjsx([O, Comp], z, [z])\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nPath O.Comp.make\n[{\n    \"label\": \"zoo\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"option<int>\",\n    \"documentation\": null\n  }]\n\nComplete src/Completion.res 65:8\nAttribute id:reac:[65:3->65:8] label:reac\nCompletable: Cdecorator(reac)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\n[{\n    \"label\": \"react.component\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"The `@react.component` decorator is used to annotate functions that are RescriptReact components.\\n\\nYou will need this decorator whenever you want to use a ReScript / React component in ReScript JSX expressions.\\n\\nNote: The `@react.component` decorator requires the `jsx` config to be set in your `rescript.json`/`bsconfig.json` to enable the required React transformations.\\n\\n[Read more and see examples in the documentation](https://rescript-lang.org/syntax-lookup#react-component-decorator).\"},\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/Completion.res 68:10\nposCursor:[68:10] posNoWhite:[68:9] Found expr:[0:-1->86:1]\nPexp_apply ...[80:6->80:7] (...[80:8->86:1])\nAttribute id:react.let:[68:3->80:3] label:react.\nCompletable: Cdecorator(react.)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\n[{\n    \"label\": \"component\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"The `@react.component` decorator is used to annotate functions that are RescriptReact components.\\n\\nYou will need this decorator whenever you want to use a ReScript / React component in ReScript JSX expressions.\\n\\nNote: The `@react.component` decorator requires the `jsx` config to be set in your `rescript.json`/`bsconfig.json` to enable the required React transformations.\\n\\n[Read more and see examples in the documentation](https://rescript-lang.org/syntax-lookup#react-component-decorator).\"},\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/Completion.res 71:27\nposCursor:[71:27] posNoWhite:[71:26] Found expr:[71:11->71:27]\nPexp_apply ...[71:11->71:18] (~name71:20->71:24=...[71:20->71:24])\nCompletable: CnamedArg(Value[Lib, foo], \"\", [name])\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[Lib, foo]\nPath Lib.foo\nFound type for function (~age: int, ~name: string) => string\n[{\n    \"label\": \"age\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": null\n  }]\n\nComplete src/Completion.res 74:26\nposCursor:[74:26] posNoWhite:[74:25] Found expr:[74:11->74:26]\nPexp_apply ...[74:11->74:18] (~age74:20->74:23=...[74:20->74:23])\nCompletable: CnamedArg(Value[Lib, foo], \"\", [age])\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[Lib, foo]\nPath Lib.foo\nFound type for function (~age: int, ~name: string) => string\n[{\n    \"label\": \"name\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": null\n  }]\n\nComplete src/Completion.res 77:32\nposCursor:[77:32] posNoWhite:[77:31] Found expr:[77:11->77:32]\nPexp_apply ...[77:11->77:18] (~age77:20->77:23=...[77:25->77:28])\nCompletable: CnamedArg(Value[Lib, foo], \"\", [age])\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[Lib, foo]\nPath Lib.foo\nFound type for function (~age: int, ~name: string) => string\n[{\n    \"label\": \"name\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": null\n  }]\n\nComplete src/Completion.res 82:5\nposCursor:[82:5] posNoWhite:[82:4] Found expr:[80:8->86:1]\nPexp_apply ...[80:8->80:15] (~age84:3->84:6=...[84:7->84:8], ~name85:3->85:7=...[85:8->85:10])\nCompletable: CnamedArg(Value[Lib, foo], \"\", [age, name])\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[Lib, foo]\nPath Lib.foo\nFound type for function (~age: int, ~name: string) => string\n[]\n\nComplete src/Completion.res 90:13\nposCursor:[90:13] posNoWhite:[90:12] Found expr:[90:3->93:18]\nPexp_send a[90:12->90:13] e:[90:3->90:10]\nCompletable: Cpath Value[someObj][\"a\"]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[someObj][\"a\"]\nContextPath Value[someObj]\nPath someObj\n[{\n    \"label\": \"age\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": null\n  }]\n\nComplete src/Completion.res 95:24\nposCursor:[95:24] posNoWhite:[95:23] Found expr:[95:3->99:6]\nPexp_send [95:24->95:24] e:[95:3->95:22]\nCompletable: Cpath Value[nestedObj][\"x\"][\"y\"][\"\"]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[nestedObj][\"x\"][\"y\"][\"\"]\nContextPath Value[nestedObj][\"x\"][\"y\"]\nContextPath Value[nestedObj][\"x\"]\nContextPath Value[nestedObj]\nPath nestedObj\n[{\n    \"label\": \"age\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": null\n  }, {\n    \"label\": \"name\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": null\n  }]\n\nComplete src/Completion.res 99:7\nposCursor:[99:7] posNoWhite:[99:6] Found expr:[99:3->102:20]\nPexp_send a[99:6->99:7] e:[99:3->99:4]\nCompletable: Cpath Value[o][\"a\"]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[o][\"a\"]\nContextPath Value[o]\nPath o\n[{\n    \"label\": \"age\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": null\n  }]\n\nComplete src/Completion.res 104:17\nposCursor:[104:17] posNoWhite:[104:16] Found expr:[104:3->125:19]\nPexp_send [104:17->104:17] e:[104:3->104:15]\nCompletable: Cpath Value[no][\"x\"][\"y\"][\"\"]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[no][\"x\"][\"y\"][\"\"]\nContextPath Value[no][\"x\"][\"y\"]\nContextPath Value[no][\"x\"]\nContextPath Value[no]\nPath no\n[{\n    \"label\": \"name\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": null\n  }, {\n    \"label\": \"age\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": null\n  }]\n\nComplete src/Completion.res 110:5\nposCursor:[110:5] posNoWhite:[110:4] Found expr:[110:3->110:5]\nPexp_field [110:3->110:4] _:[116:0->110:5]\nCompletable: Cpath Value[r].\"\"\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[r].\"\"\nContextPath Value[r]\nPath r\nContextPath Value[r]->\nContextPath Value[r]\nPath r\nCPPipe pathFromEnv: found:true\nPath Completion.\n[{\n    \"label\": \"x\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nx: int\\n```\\n\\n```rescript\\ntype r = {x: int, y: string}\\n```\"}\n  }, {\n    \"label\": \"y\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ny: string\\n```\\n\\n```rescript\\ntype r = {x: int, y: string}\\n```\"}\n  }]\n\nComplete src/Completion.res 113:25\nposCursor:[113:25] posNoWhite:[113:24] Found expr:[113:3->113:25]\nPexp_field [113:3->113:24] _:[116:0->113:25]\nCompletable: Cpath Value[Objects, Rec, recordVal].\"\"\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[Objects, Rec, recordVal].\"\"\nContextPath Value[Objects, Rec, recordVal]\nPath Objects.Rec.recordVal\nContextPath Value[Objects, Rec, recordVal]->\nContextPath Value[Objects, Rec, recordVal]\nPath Objects.Rec.recordVal\nCPPipe pathFromEnv:Rec found:true\nPath Objects.Rec.\n[{\n    \"label\": \"xx\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nxx: int\\n```\\n\\n```rescript\\ntype recordt = {xx: int, ss: string}\\n```\"}\n  }, {\n    \"label\": \"ss\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nss: string\\n```\\n\\n```rescript\\ntype recordt = {xx: int, ss: string}\\n```\"}\n  }]\n\nComplete src/Completion.res 120:7\nposCursor:[120:7] posNoWhite:[120:6] Found expr:[119:11->123:1]\nposCursor:[120:7] posNoWhite:[120:6] Found expr:[119:11->123:1]\nposCursor:[120:7] posNoWhite:[120:6] Found expr:[120:5->122:5]\nposCursor:[120:7] posNoWhite:[120:6] Found expr:[120:5->120:7]\nPexp_ident my:[120:5->120:7]\nCompletable: Cpath Value[my]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[my]\nPath my\n[{\n    \"label\": \"myAmazingFunction\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(int, int) => int\",\n    \"documentation\": null\n  }]\n\nComplete src/Completion.res 125:19\nposCursor:[125:19] posNoWhite:[125:18] Found expr:[125:3->145:32]\nPexp_send [125:19->125:19] e:[125:3->125:17]\nCompletable: Cpath Value[Objects, object][\"\"]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[Objects, object][\"\"]\nContextPath Value[Objects, object]\nPath Objects.object\n[{\n    \"label\": \"name\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": null\n  }, {\n    \"label\": \"age\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": null\n  }]\n\nComplete src/Completion.res 151:6\nposCursor:[151:6] posNoWhite:[151:5] Found expr:[151:4->151:6]\nJSX <O.:[151:4->151:6] > _children:None\nCompletable: Cpath Module[O, \"\"]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Module[O, \"\"]\nPath O.\n[{\n    \"label\": \"Comp\",\n    \"kind\": 9,\n    \"tags\": [],\n    \"detail\": \"module Comp\",\n    \"documentation\": null\n  }]\n\nComplete src/Completion.res 157:8\nposCursor:[157:8] posNoWhite:[157:7] Found expr:[157:3->157:8]\nPexp_field [157:3->157:7] _:[165:0->157:8]\nCompletable: Cpath Value[q].aa.\"\"\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[q].aa.\"\"\nContextPath Value[q].aa\nContextPath Value[q]\nPath q\nContextPath Value[q]->aa\nContextPath Value[q]\nPath q\nCPPipe pathFromEnv: found:true\nPath Completion.aa\nContextPath Value[q].aa->\nContextPath Value[q].aa\nContextPath Value[q]\nPath q\nContextPath Value[q]->aa\nContextPath Value[q]\nPath q\nCPPipe pathFromEnv: found:true\nPath Completion.aa\nCPPipe pathFromEnv: found:true\nPath Completion.\n[{\n    \"label\": \"x\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nx: int\\n```\\n\\n```rescript\\ntype aa = {x: int, name: string}\\n```\"}\n  }, {\n    \"label\": \"name\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nname: string\\n```\\n\\n```rescript\\ntype aa = {x: int, name: string}\\n```\"}\n  }]\n\nComplete src/Completion.res 159:9\nposCursor:[159:9] posNoWhite:[159:8] Found expr:[159:3->159:9]\nPexp_field [159:3->159:7] n:[159:8->159:9]\nCompletable: Cpath Value[q].aa.n\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[q].aa.n\nContextPath Value[q].aa\nContextPath Value[q]\nPath q\nContextPath Value[q]->aa\nContextPath Value[q]\nPath q\nCPPipe pathFromEnv: found:true\nPath Completion.aa\nContextPath Value[q].aa->n\nContextPath Value[q].aa\nContextPath Value[q]\nPath q\nContextPath Value[q]->aa\nContextPath Value[q]\nPath q\nCPPipe pathFromEnv: found:true\nPath Completion.aa\nCPPipe pathFromEnv: found:true\nPath Completion.n\n[{\n    \"label\": \"name\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nname: string\\n```\\n\\n```rescript\\ntype aa = {x: int, name: string}\\n```\"}\n  }]\n\nComplete src/Completion.res 162:6\nposCursor:[162:6] posNoWhite:[162:5] Found expr:[162:3->162:6]\nPexp_construct Lis:[162:3->162:6] None\nCompletable: Cpath Value[Lis]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[Lis]\nPath Lis\n[{\n    \"label\": \"List\",\n    \"kind\": 9,\n    \"tags\": [],\n    \"detail\": \"module List\",\n    \"documentation\": null,\n    \"data\": {\n      \"modulePath\": \"List\",\n      \"filePath\": \"src/Completion.res\"\n    }\n  }, {\n    \"label\": \"ListLabels\",\n    \"kind\": 9,\n    \"tags\": [],\n    \"detail\": \"module ListLabels\",\n    \"documentation\": null,\n    \"data\": {\n      \"modulePath\": \"ListLabels\",\n      \"filePath\": \"src/Completion.res\"\n    }\n  }]\n\nComplete src/Completion.res 169:16\nposCursor:[169:16] posNoWhite:[169:15] Found expr:[169:4->169:16]\nJSX <WithChildren:[169:4->169:16] > _children:None\nCompletable: Cpath Module[WithChildren]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Module[WithChildren]\nPath WithChildren\n[{\n    \"label\": \"WithChildren\",\n    \"kind\": 9,\n    \"tags\": [],\n    \"detail\": \"module WithChildren\",\n    \"documentation\": null\n  }]\n\nComplete src/Completion.res 172:16\nposCursor:[172:16] posNoWhite:[172:15] Found type:[172:12->172:16]\nPtyp_constr Js.n:[172:12->172:16]\nCompletable: Cpath Type[Js, n]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Type[Js, n]\nPath Js.n\n[{\n    \"label\": \"null_undefined\",\n    \"kind\": 22,\n    \"tags\": [],\n    \"detail\": \"type null_undefined\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ntype null_undefined<'a> = nullable<'a>\\n```\"}\n  }, {\n    \"label\": \"nullable\",\n    \"kind\": 22,\n    \"tags\": [],\n    \"detail\": \"type nullable\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ntype nullable<'a> = Value('a) | Null | Undefined\\n```\"}\n  }, {\n    \"label\": \"null\",\n    \"kind\": 22,\n    \"tags\": [],\n    \"detail\": \"type null\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\n  Nullable value of this type can be either null or 'a. This type is equivalent to Js.Null.t.\\n\\n\\n```rescript\\ntype null<'a> = Value('a) | Null\\n```\"}\n  }]\n\nComplete src/Completion.res 174:20\nposCursor:[174:20] posNoWhite:[174:19] Found type:[174:12->174:20]\nPtyp_constr ForAuto.:[174:12->174:20]\nCompletable: Cpath Type[ForAuto, \"\"]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Type[ForAuto, \"\"]\nPath ForAuto.\n[{\n    \"label\": \"t\",\n    \"kind\": 22,\n    \"tags\": [],\n    \"detail\": \"type t\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ntype t = int\\n```\"}\n  }]\n\nComplete src/Completion.res 179:13\nposCursor:[179:13] posNoWhite:[179:12] Found expr:[179:11->179:13]\nPexp_construct As:[179:11->179:13] None\nCompletable: Cpath Value[As]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[As]\nPath As\n[{\n    \"label\": \"Asterix\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"Asterix\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nAsterix\\n```\\n\\n```rescript\\ntype z = Allo | Asterix | Baba\\n```\"}\n  }]\n\nComplete src/Completion.res 182:17\nPmod_ident For:[182:14->182:17]\nCompletable: Cpath Module[For]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Module[For]\nPath For\n[{\n    \"label\": \"ForAuto\",\n    \"kind\": 9,\n    \"tags\": [],\n    \"detail\": \"module ForAuto\",\n    \"documentation\": null\n  }]\n\nComplete src/Completion.res 190:11\nposCursor:[190:11] posNoWhite:[190:10] Found expr:[190:3->190:11]\nPexp_ident Private.:[190:3->190:11]\nCompletable: Cpath Value[Private, \"\"]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[Private, \"\"]\nPath Private.\n[{\n    \"label\": \"b\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": null\n  }]\n\nComplete src/Completion.res 202:6\nposCursor:[202:6] posNoWhite:[202:5] Found expr:[202:3->202:6]\nPexp_ident sha:[202:3->202:6]\nCompletable: Cpath Value[sha]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[sha]\nPath sha\n[]\n\nComplete src/Completion.res 205:6\nposCursor:[205:6] posNoWhite:[205:5] Found expr:[205:3->205:6]\nPexp_ident sha:[205:3->205:6]\nCompletable: Cpath Value[sha]\nRaw opens: 1 Shadow.A.place holder\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 2 pervasives Completion.res\nContextPath Value[sha]\nPath sha\n[{\n    \"label\": \"shadowed\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": null\n  }]\n\nComplete src/Completion.res 208:6\nposCursor:[208:6] posNoWhite:[208:5] Found expr:[208:3->208:6]\nPexp_ident sha:[208:3->208:6]\nCompletable: Cpath Value[sha]\nRaw opens: 2 Shadow.B.place holder ... Shadow.A.place holder\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 3 pervasives Completion.res Completion.res\nContextPath Value[sha]\nPath sha\n[{\n    \"label\": \"shadowed\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": null\n  }]\n\nComplete src/Completion.res 221:22\nposCursor:[221:22] posNoWhite:[221:21] Found expr:[221:3->224:22]\nPexp_send [221:22->221:22] e:[221:3->221:20]\nCompletable: Cpath Value[FAO, forAutoObject][\"\"]\nRaw opens: 2 Shadow.B.place holder ... Shadow.A.place holder\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 3 pervasives Completion.res Completion.res\nContextPath Value[FAO, forAutoObject][\"\"]\nContextPath Value[FAO, forAutoObject]\nPath FAO.forAutoObject\n[{\n    \"label\": \"age\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": null\n  }, {\n    \"label\": \"forAutoLabel\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"FAR.forAutoRecord\",\n    \"documentation\": null\n  }]\n\nComplete src/Completion.res 224:37\nposCursor:[224:37] posNoWhite:[224:36] Found expr:[224:3->224:37]\nPexp_field [224:3->224:36] _:[233:0->224:37]\nCompletable: Cpath Value[FAO, forAutoObject][\"forAutoLabel\"].\"\"\nRaw opens: 2 Shadow.B.place holder ... Shadow.A.place holder\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 3 pervasives Completion.res Completion.res\nContextPath Value[FAO, forAutoObject][\"forAutoLabel\"].\"\"\nContextPath Value[FAO, forAutoObject][\"forAutoLabel\"]\nContextPath Value[FAO, forAutoObject]\nPath FAO.forAutoObject\nContextPath Value[FAO, forAutoObject][\"forAutoLabel\"]->\nContextPath Value[FAO, forAutoObject][\"forAutoLabel\"]\nContextPath Value[FAO, forAutoObject]\nPath FAO.forAutoObject\nCPPipe pathFromEnv:FAR found:true\nPath FAR.\n[{\n    \"label\": \"forAuto\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"ForAuto.t\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nforAuto: ForAuto.t\\n```\\n\\n```rescript\\ntype forAutoRecord = {\\n  forAuto: ForAuto.t,\\n  something: option<int>,\\n}\\n```\"}\n  }, {\n    \"label\": \"something\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"option<int>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nsomething: option<int>\\n```\\n\\n```rescript\\ntype forAutoRecord = {\\n  forAuto: ForAuto.t,\\n  something: option<int>,\\n}\\n```\"}\n  }]\n\nComplete src/Completion.res 227:46\nposCursor:[227:46] posNoWhite:[227:45] Found expr:[227:3->0:-1]\nCompletable: Cpath Value[FAO, forAutoObject][\"forAutoLabel\"].forAuto->\nRaw opens: 2 Shadow.B.place holder ... Shadow.A.place holder\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 3 pervasives Completion.res Completion.res\nContextPath Value[FAO, forAutoObject][\"forAutoLabel\"].forAuto->\nContextPath Value[FAO, forAutoObject][\"forAutoLabel\"].forAuto\nContextPath Value[FAO, forAutoObject][\"forAutoLabel\"]\nContextPath Value[FAO, forAutoObject]\nPath FAO.forAutoObject\nContextPath Value[FAO, forAutoObject][\"forAutoLabel\"]->forAuto\nContextPath Value[FAO, forAutoObject][\"forAutoLabel\"]\nContextPath Value[FAO, forAutoObject]\nPath FAO.forAutoObject\nCPPipe pathFromEnv:FAR found:true\nPath FAR.forAuto\nCPPipe pathFromEnv:ForAuto found:false\nPath ForAuto.\n[{\n    \"label\": \"ForAuto.abc\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t, int) => t\",\n    \"documentation\": null\n  }, {\n    \"label\": \"ForAuto.abd\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t, int) => t\",\n    \"documentation\": null\n  }]\n\nComplete src/Completion.res 230:55\nposCursor:[230:55] posNoWhite:[230:54] Found expr:[230:3->230:55]\nposCursor:[230:55] posNoWhite:[230:54] Found expr:[230:46->230:55]\nPexp_ident ForAuto.a:[230:46->230:55]\nCompletable: Cpath Value[ForAuto, a]\nRaw opens: 2 Shadow.B.place holder ... Shadow.A.place holder\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 3 pervasives Completion.res Completion.res\nContextPath Value[ForAuto, a]\nPath ForAuto.a\n[{\n    \"label\": \"abc\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t, int) => t\",\n    \"documentation\": null\n  }, {\n    \"label\": \"abd\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t, int) => t\",\n    \"documentation\": null\n  }]\n\nComplete src/Completion.res 234:34\nposCursor:[234:34] posNoWhite:[234:33] Found expr:[234:18->234:36]\nPexp_apply ...__ghost__[0:-1->0:-1] (...[234:18->234:34], ...[234:34->234:35])\nposCursor:[234:34] posNoWhite:[234:33] Found expr:[234:18->234:34]\nPexp_apply ...__ghost__[0:-1->0:-1] (...[234:18->234:30], ...[234:32->234:34])\nposCursor:[234:34] posNoWhite:[234:33] Found expr:[234:32->234:34]\nPexp_ident na:[234:32->234:34]\nCompletable: Cpath Value[na]\nRaw opens: 2 Shadow.B.place holder ... Shadow.A.place holder\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 3 pervasives Completion.res Completion.res\nContextPath Value[na]\nPath na\n[{\n    \"label\": \"name\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": null\n  }]\n\nComplete src/Completion.res 237:17\nposCursor:[237:17] posNoWhite:[237:14] Found expr:[237:14->237:22]\nCompletable: Cnone\nRaw opens: 2 Shadow.B.place holder ... Shadow.A.place holder\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 3 pervasives Completion.res Completion.res\n[]\n\nComplete src/Completion.res 243:8\nposCursor:[243:8] posNoWhite:[243:7] Found expr:[242:14->243:8]\nPexp_apply ...[243:3->243:4] (...[242:14->242:15], ...[243:5->243:8])\nposCursor:[243:8] posNoWhite:[243:7] Found expr:[243:5->243:8]\nPexp_field [243:5->243:7] _:[245:0->243:8]\nCompletable: Cpath Value[_z].\"\"\nRaw opens: 2 Shadow.B.place holder ... Shadow.A.place holder\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 3 pervasives Completion.res Completion.res\nContextPath Value[_z].\"\"\nContextPath Value[_z]\nPath _z\nContextPath Value[_z]->\nContextPath Value[_z]\nPath _z\nCPPipe pathFromEnv: found:true\nPath Completion.\n[{\n    \"label\": \"x\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nx: int\\n```\\n\\n```rescript\\ntype r = {x: int, y: string}\\n```\"}\n  }, {\n    \"label\": \"y\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ny: string\\n```\\n\\n```rescript\\ntype r = {x: int, y: string}\\n```\"}\n  }]\n\nComplete src/Completion.res 254:17\nposCursor:[254:17] posNoWhite:[254:16] Found expr:[254:11->254:17]\nPexp_construct SomeLo:[254:11->254:17] None\nCompletable: Cpath Value[SomeLo]\nRaw opens: 2 Shadow.B.place holder ... Shadow.A.place holder\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 3 pervasives Completion.res Completion.res\nContextPath Value[SomeLo]\nPath SomeLo\n[{\n    \"label\": \"SomeLocalModule\",\n    \"kind\": 9,\n    \"tags\": [],\n    \"detail\": \"module SomeLocalModule\",\n    \"documentation\": null\n  }]\n\nComplete src/Completion.res 256:29\nposCursor:[256:29] posNoWhite:[256:28] Found type:[256:13->256:29]\nPtyp_constr SomeLocalModule.:[256:13->256:29]\nCompletable: Cpath Type[SomeLocalModule, \"\"]\nRaw opens: 2 Shadow.B.place holder ... Shadow.A.place holder\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 3 pervasives Completion.res Completion.res\nContextPath Type[SomeLocalModule, \"\"]\nPath SomeLocalModule.\n[{\n    \"label\": \"zz\",\n    \"kind\": 22,\n    \"tags\": [],\n    \"detail\": \"type zz\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ntype zz = int\\n```\"}\n  }]\n\nComplete src/Completion.res 261:33\nposCursor:[261:33] posNoWhite:[261:32] Found type:[261:17->263:11]\nPtyp_constr SomeLocalModule.:[261:17->263:11]\nCompletable: Cpath Type[SomeLocalModule, \"\"]\nRaw opens: 2 Shadow.B.place holder ... Shadow.A.place holder\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 3 pervasives Completion.res Completion.res\nContextPath Type[SomeLocalModule, \"\"]\nPath SomeLocalModule.\n[{\n    \"label\": \"zz\",\n    \"kind\": 22,\n    \"tags\": [],\n    \"detail\": \"type zz\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ntype zz = int\\n```\"}\n  }]\n\nComplete src/Completion.res 268:21\nPtype_variant unary SomeLocal:[268:12->268:21]\nCompletable: Cpath Value[SomeLocal]\nRaw opens: 2 Shadow.B.place holder ... Shadow.A.place holder\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 3 pervasives Completion.res Completion.res\nContextPath Value[SomeLocal]\nPath SomeLocal\n[{\n    \"label\": \"SomeLocalVariantItem\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"SomeLocalVariantItem\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nSomeLocalVariantItem\\n```\\n\\n```rescript\\ntype someLocalVariant = SomeLocalVariantItem\\n```\"}\n  }, {\n    \"label\": \"SomeLocalModule\",\n    \"kind\": 9,\n    \"tags\": [],\n    \"detail\": \"module SomeLocalModule\",\n    \"documentation\": null\n  }]\n\nComplete src/Completion.res 271:20\nposCursor:[271:20] posNoWhite:[271:19] Found pattern:[271:7->274:3]\nposCursor:[271:20] posNoWhite:[271:19] Found type:[271:11->274:3]\nPtyp_constr SomeLocal:[271:11->274:3]\nCompletable: Cpath Type[SomeLocal]\nRaw opens: 2 Shadow.B.place holder ... Shadow.A.place holder\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 3 pervasives Completion.res Completion.res\nContextPath Type[SomeLocal]\nPath SomeLocal\n[{\n    \"label\": \"SomeLocalModule\",\n    \"kind\": 9,\n    \"tags\": [],\n    \"detail\": \"module SomeLocalModule\",\n    \"documentation\": null\n  }]\n\nComplete src/Completion.res 275:15\nposCursor:[275:15] posNoWhite:[275:14] Found expr:[274:11->278:1]\nposCursor:[275:15] posNoWhite:[275:14] Found expr:[274:11->278:1]\nposCursor:[275:15] posNoWhite:[275:14] Found expr:[275:5->277:3]\nposCursor:[275:15] posNoWhite:[275:14] Found expr:[275:13->275:15]\nPexp_ident _w:[275:13->275:15]\nCompletable: Cpath Value[_w]\nRaw opens: 2 Shadow.B.place holder ... Shadow.A.place holder\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 3 pervasives Completion.res Completion.res\nContextPath Value[_w]\nPath _w\n[{\n    \"label\": \"_world\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"'a\",\n    \"documentation\": null\n  }]\n\nComplete src/Completion.res 281:22\nposCursor:[281:22] posNoWhite:[281:21] Found type:[281:21->281:22]\nPtyp_constr s:[281:21->281:22]\nCompletable: Cpath Type[s]\nRaw opens: 2 Shadow.B.place holder ... Shadow.A.place holder\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 3 pervasives Completion.res Completion.res\nContextPath Type[s]\nPath s\n[{\n    \"label\": \"someType\",\n    \"kind\": 22,\n    \"tags\": [],\n    \"detail\": \"type someType\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ntype someType = {hello: string}\\n```\"}\n  }, {\n    \"label\": \"someLocalVariant\",\n    \"kind\": 22,\n    \"tags\": [],\n    \"detail\": \"type someLocalVariant\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ntype someLocalVariant = SomeLocalVariantItem\\n```\"}\n  }]\n\nComplete src/Completion.res 291:30\nposCursor:[291:30] posNoWhite:[291:29] Found expr:[291:11->291:32]\nPexp_apply ...[291:11->291:28] ()\nCompletable: CnamedArg(Value[funRecord].someFun, \"\", [])\nRaw opens: 2 Shadow.B.place holder ... Shadow.A.place holder\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 3 pervasives Completion.res Completion.res\nContextPath Value[funRecord].someFun\nContextPath Value[funRecord]\nPath funRecord\nContextPath Value[funRecord]->someFun\nContextPath Value[funRecord]\nPath funRecord\nCPPipe pathFromEnv: found:true\nPath Completion.someFun\nFound type for function (~name: string) => unit\n[{\n    \"label\": \"name\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": null\n  }]\n\nComplete src/Completion.res 296:11\nposCursor:[296:11] posNoWhite:[296:10] Found expr:[296:3->296:11]\nPexp_field [296:3->296:10] _:[299:0->296:11]\nCompletable: Cpath Value[retAA](Nolabel).\"\"\nRaw opens: 2 Shadow.B.place holder ... Shadow.A.place holder\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 3 pervasives Completion.res Completion.res\nContextPath Value[retAA](Nolabel).\"\"\nContextPath Value[retAA](Nolabel)\nContextPath Value[retAA]\nPath retAA\nContextPath Value[retAA](Nolabel, Nolabel)->\nContextPath Value[retAA](Nolabel, Nolabel)\nContextPath Value[retAA]\nPath retAA\nCPPipe pathFromEnv: found:true\nPath Completion.\n[{\n    \"label\": \"x\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nx: int\\n```\\n\\n```rescript\\ntype aa = {x: int, name: string}\\n```\"}\n  }, {\n    \"label\": \"name\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nname: string\\n```\\n\\n```rescript\\ntype aa = {x: int, name: string}\\n```\"}\n  }]\n\nComplete src/Completion.res 301:13\nposCursor:[301:13] posNoWhite:[301:12] Found expr:[301:3->301:13]\nPexp_apply ...[301:3->301:11] ()\nCompletable: CnamedArg(Value[ff](~c), \"\", [])\nRaw opens: 2 Shadow.B.place holder ... Shadow.A.place holder\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 3 pervasives Completion.res Completion.res\nContextPath Value[ff](~c)\nContextPath Value[ff]\nPath ff\nFound type for function (\n  ~opt1: int=?,\n  ~a: int,\n  ~b: int,\n  unit,\n  ~opt2: int=?,\n  unit,\n) => int\n[{\n    \"label\": \"opt1\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"option<int>\",\n    \"documentation\": null\n  }, {\n    \"label\": \"a\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": null\n  }, {\n    \"label\": \"b\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": null\n  }, {\n    \"label\": \"opt2\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"option<int>\",\n    \"documentation\": null\n  }]\n\nComplete src/Completion.res 304:15\nposCursor:[304:15] posNoWhite:[304:14] Found expr:[304:3->304:15]\nPexp_apply ...[304:3->304:13] ()\nCompletable: CnamedArg(Value[ff](~c)(Nolabel), \"\", [])\nRaw opens: 2 Shadow.B.place holder ... Shadow.A.place holder\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 3 pervasives Completion.res Completion.res\nContextPath Value[ff](~c)(Nolabel)\nContextPath Value[ff](~c)\nContextPath Value[ff]\nPath ff\nFound type for function (~a: int, ~b: int, ~opt2: int=?, unit) => int\n[{\n    \"label\": \"a\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": null\n  }, {\n    \"label\": \"b\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": null\n  }, {\n    \"label\": \"opt2\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"option<int>\",\n    \"documentation\": null\n  }]\n\nComplete src/Completion.res 307:17\nposCursor:[307:17] posNoWhite:[307:16] Found expr:[307:3->307:17]\nPexp_apply ...[307:3->307:15] ()\nCompletable: CnamedArg(Value[ff](~c, Nolabel), \"\", [])\nRaw opens: 2 Shadow.B.place holder ... Shadow.A.place holder\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 3 pervasives Completion.res Completion.res\nContextPath Value[ff](~c, Nolabel)\nContextPath Value[ff]\nPath ff\nFound type for function (~a: int, ~b: int, ~opt2: int=?, unit) => int\n[{\n    \"label\": \"a\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": null\n  }, {\n    \"label\": \"b\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": null\n  }, {\n    \"label\": \"opt2\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"option<int>\",\n    \"documentation\": null\n  }]\n\nComplete src/Completion.res 310:21\nposCursor:[310:21] posNoWhite:[310:20] Found expr:[310:3->310:21]\nPexp_apply ...[310:3->310:19] ()\nCompletable: CnamedArg(Value[ff](~c, Nolabel, Nolabel), \"\", [])\nRaw opens: 2 Shadow.B.place holder ... Shadow.A.place holder\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 3 pervasives Completion.res Completion.res\nContextPath Value[ff](~c, Nolabel, Nolabel)\nContextPath Value[ff]\nPath ff\nFound type for function (~a: int, ~b: int) => int\n[{\n    \"label\": \"a\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": null\n  }, {\n    \"label\": \"b\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": null\n  }]\n\nComplete src/Completion.res 313:23\nposCursor:[313:23] posNoWhite:[313:22] Found expr:[313:3->313:23]\nPexp_apply ...[313:3->313:21] ()\nCompletable: CnamedArg(Value[ff](~c, Nolabel, ~b), \"\", [])\nRaw opens: 2 Shadow.B.place holder ... Shadow.A.place holder\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 3 pervasives Completion.res Completion.res\nContextPath Value[ff](~c, Nolabel, ~b)\nContextPath Value[ff]\nPath ff\nFound type for function (~a: int, ~opt2: int=?, unit) => int\n[{\n    \"label\": \"a\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": null\n  }, {\n    \"label\": \"opt2\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"option<int>\",\n    \"documentation\": null\n  }]\n\nComplete src/Completion.res 316:16\nposCursor:[316:16] posNoWhite:[316:15] Found expr:[316:3->316:16]\nPexp_apply ...[316:3->316:14] ()\nCompletable: CnamedArg(Value[ff](~opt2), \"\", [])\nRaw opens: 2 Shadow.B.place holder ... Shadow.A.place holder\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 3 pervasives Completion.res Completion.res\nContextPath Value[ff](~opt2)\nContextPath Value[ff]\nPath ff\nFound type for function (~opt1: int=?, ~a: int, ~b: int, unit, unit, ~c: int) => int\n[{\n    \"label\": \"opt1\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"option<int>\",\n    \"documentation\": null\n  }, {\n    \"label\": \"a\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": null\n  }, {\n    \"label\": \"b\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": null\n  }, {\n    \"label\": \"c\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": null\n  }]\n\nComplete src/Completion.res 323:17\nposCursor:[323:17] posNoWhite:[323:16] Found expr:[323:3->323:17]\nPexp_apply ...[323:3->323:15] ()\nCompletable: CnamedArg(Value[withCallback], \"\", [])\nRaw opens: 2 Shadow.B.place holder ... Shadow.A.place holder\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 3 pervasives Completion.res Completion.res\nContextPath Value[withCallback]\nPath withCallback\nFound type for function (~b: int) => callback\n[{\n    \"label\": \"b\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": null\n  }, {\n    \"label\": \"a\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": null\n  }]\n\nComplete src/Completion.res 326:21\nposCursor:[326:21] posNoWhite:[326:20] Found expr:[326:3->326:21]\nPexp_apply ...[326:3->326:19] ()\nCompletable: CnamedArg(Value[withCallback](~a), \"\", [])\nRaw opens: 2 Shadow.B.place holder ... Shadow.A.place holder\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 3 pervasives Completion.res Completion.res\nContextPath Value[withCallback](~a)\nContextPath Value[withCallback]\nPath withCallback\nFound type for function int\n[]\n\nComplete src/Completion.res 329:21\nposCursor:[329:21] posNoWhite:[329:20] Found expr:[329:3->329:21]\nPexp_apply ...[329:3->329:19] ()\nCompletable: CnamedArg(Value[withCallback](~b), \"\", [])\nRaw opens: 2 Shadow.B.place holder ... Shadow.A.place holder\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 3 pervasives Completion.res Completion.res\nContextPath Value[withCallback](~b)\nContextPath Value[withCallback]\nPath withCallback\nFound type for function (~a: int) => int\n[{\n    \"label\": \"a\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": null\n  }]\n\nComplete src/Completion.res 336:26\nposCursor:[336:26] posNoWhite:[336:25] Found expr:[333:3->346:23]\nJSX <div:[333:3->333:6] onClick[334:4->334:11]=...[334:13->346:23]> _children:None\nposCursor:[336:26] posNoWhite:[336:25] Found expr:[334:13->346:23]\nposCursor:[336:26] posNoWhite:[336:25] Found expr:[334:13->338:6]\nposCursor:[336:26] posNoWhite:[336:25] Found expr:[334:13->338:6]\nposCursor:[336:26] posNoWhite:[336:25] Found expr:[335:6->338:5]\nposCursor:[336:26] posNoWhite:[336:25] Found expr:[336:16->338:5]\nposCursor:[336:26] posNoWhite:[336:25] Found pattern:[336:20->338:5]\nposCursor:[336:26] posNoWhite:[336:25] Found type:[336:23->338:5]\nPtyp_constr Res:[336:23->338:5]\nposCursor:[336:26] posNoWhite:[336:25] Found pattern:[336:20->338:5]\nposCursor:[336:26] posNoWhite:[336:25] Found type:[336:23->338:5]\nPtyp_constr Res:[336:23->338:5]\nCompletable: Cpath Type[Res]\nRaw opens: 2 Shadow.B.place holder ... Shadow.A.place holder\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 3 pervasives Completion.res Completion.res\nContextPath Type[Res]\nPath Res\n[{\n    \"label\": \"RescriptReactErrorBoundary\",\n    \"kind\": 9,\n    \"tags\": [],\n    \"detail\": \"module RescriptReactErrorBoundary\",\n    \"documentation\": null,\n    \"data\": {\n      \"modulePath\": \"RescriptReactErrorBoundary\",\n      \"filePath\": \"src/Completion.res\"\n    }\n  }, {\n    \"label\": \"RescriptReactRouter\",\n    \"kind\": 9,\n    \"tags\": [],\n    \"detail\": \"module RescriptReactRouter\",\n    \"documentation\": null,\n    \"data\": {\n      \"modulePath\": \"RescriptReactRouter\",\n      \"filePath\": \"src/Completion.res\"\n    }\n  }]\n\nComplete src/Completion.res 343:57\nposCursor:[343:57] posNoWhite:[343:56] Found expr:[343:53->346:23]\nposCursor:[343:57] posNoWhite:[343:56] Found expr:[343:53->343:57]\nPexp_ident this:[343:53->343:57]\nCompletable: Cpath Value[this]\nRaw opens: 2 Shadow.B.place holder ... Shadow.A.place holder\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 3 pervasives Completion.res Completion.res\nContextPath Value[this]\nPath this\n[{\n    \"label\": \"thisIsNotSaved\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"\\\\\\\"Type Not Known\\\"\",\n    \"documentation\": null\n  }]\n\nHover src/Completion.res 346:14\nNothing at that position. Now trying to use completion.\nposCursor:[346:14] posNoWhite:[346:13] Found expr:[346:9->346:23]\nJSX <div:[346:9->346:12] name[346:13->346:17]=...[346:18->346:20]> _children:346:21\nCompletable: Cjsx([div], name, [name])\nRaw opens: 2 Shadow.B.place holder ... Shadow.A.place holder\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 3 pervasives Completion.res Completion.res\nPath ReactDOM.domProps\nPath PervasivesU.JsxDOM.domProps\n{\"contents\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nstring\\n```\"}}\n\nHover src/Completion.res 349:17\nNothing at that position. Now trying to use completion.\nposCursor:[349:17] posNoWhite:[349:16] Found expr:[349:11->349:35]\nPexp_send age[349:30->349:33] e:[349:11->349:28]\nposCursor:[349:17] posNoWhite:[349:16] Found expr:[349:11->349:28]\nPexp_ident FAO.forAutoObject:[349:11->349:28]\nCompletable: Cpath Value[FAO, forAutoObject]\nRaw opens: 2 Shadow.B.place holder ... Shadow.A.place holder\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 3 pervasives Completion.res Completion.res\nContextPath Value[FAO, forAutoObject]\nPath FAO.forAutoObject\nRaw opens: 2 Shadow.B.place holder ... Shadow.A.place holder\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\n{\"contents\": {\"kind\": \"markdown\", \"value\": \"```rescript\\n{\\\"age\\\": int, \\\"forAutoLabel\\\": FAR.forAutoRecord}\\n```\"}}\n\nHover src/Completion.res 352:17\nNothing at that position. Now trying to use completion.\nposCursor:[352:17] posNoWhite:[352:16] Found expr:[352:11->352:22]\nPexp_apply ...[352:11->352:13] (~opt1352:15->352:19=...[352:20->352:21])\nCompletable: CnamedArg(Value[ff], opt1, [opt1])\nRaw opens: 2 Shadow.B.place holder ... Shadow.A.place holder\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 3 pervasives Completion.res Completion.res\nContextPath Value[ff]\nPath ff\nFound type for function (\n  ~opt1: int=?,\n  ~a: int,\n  ~b: int,\n  unit,\n  ~opt2: int=?,\n  unit,\n  ~c: int,\n) => int\n{\"contents\": {\"kind\": \"markdown\", \"value\": \"```rescript\\noption<int>\\n```\"}}\n\nComplete src/Completion.res 355:23\nposCursor:[355:23] posNoWhite:[355:22] Found expr:[0:-1->355:23]\nposCursor:[355:23] posNoWhite:[355:22] Found expr:[355:12->355:23]\n[]\n\nComplete src/Completion.res 362:8\nposCursor:[362:8] posNoWhite:[362:7] Found expr:[360:8->365:3]\nposCursor:[362:8] posNoWhite:[362:7] Found expr:[360:8->365:3]\nposCursor:[362:8] posNoWhite:[362:7] Found pattern:[362:7->364:5]\nposCursor:[362:8] posNoWhite:[362:7] Found pattern:[362:7->362:8]\nPpat_construct T:[362:7->362:8]\nCompletable: Cpattern Value[x]=T\nRaw opens: 2 Shadow.B.place holder ... Shadow.A.place holder\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 3 pervasives Completion.res Completion.res\nContextPath Value[x]\nPath x\nCompletable: Cpath Value[T]\nRaw opens: 2 Shadow.B.place holder ... Shadow.A.place holder\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 3 pervasives Completion.res Completion.res\nContextPath Value[T]\nPath T\n[{\n    \"label\": \"That\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"That\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nThat\\n```\\n\\n```rescript\\ntype v = This | That\\n```\"}\n  }, {\n    \"label\": \"This\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"This\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nThis\\n```\\n\\n```rescript\\ntype v = This | That\\n```\"}\n  }, {\n    \"label\": \"TableclothMap\",\n    \"kind\": 9,\n    \"tags\": [],\n    \"detail\": \"module TableclothMap\",\n    \"documentation\": null,\n    \"data\": {\n      \"modulePath\": \"TableclothMap\",\n      \"filePath\": \"src/Completion.res\"\n    }\n  }, {\n    \"label\": \"TypeArgCtx\",\n    \"kind\": 9,\n    \"tags\": [],\n    \"detail\": \"module TypeArgCtx\",\n    \"documentation\": null,\n    \"data\": {\n      \"modulePath\": \"TypeArgCtx\",\n      \"filePath\": \"src/Completion.res\"\n    }\n  }, {\n    \"label\": \"TypeAtPosCompletion\",\n    \"kind\": 9,\n    \"tags\": [],\n    \"detail\": \"module TypeAtPosCompletion\",\n    \"documentation\": null,\n    \"data\": {\n      \"modulePath\": \"TypeAtPosCompletion\",\n      \"filePath\": \"src/Completion.res\"\n    }\n  }, {\n    \"label\": \"TypeDefinition\",\n    \"kind\": 9,\n    \"tags\": [],\n    \"detail\": \"module TypeDefinition\",\n    \"documentation\": null,\n    \"data\": {\n      \"modulePath\": \"TypeDefinition\",\n      \"filePath\": \"src/Completion.res\"\n    }\n  }]\n\nComplete src/Completion.res 373:21\nposCursor:[373:21] posNoWhite:[373:20] Found expr:[371:8->376:3]\nposCursor:[373:21] posNoWhite:[373:20] Found expr:[371:8->376:3]\nposCursor:[373:21] posNoWhite:[373:20] Found pattern:[373:7->375:5]\nposCursor:[373:21] posNoWhite:[373:20] Found pattern:[373:7->373:21]\nPpat_construct AndThatOther.T:[373:7->373:21]\nCompletable: Cpath Value[AndThatOther, T]\nRaw opens: 2 Shadow.B.place holder ... Shadow.A.place holder\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 3 pervasives Completion.res Completion.res\nContextPath Value[AndThatOther, T]\nPath AndThatOther.T\n[{\n    \"label\": \"ThatOther\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"ThatOther\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nThatOther\\n```\\n\\n```rescript\\ntype v = And | ThatOther\\n```\"}\n  }]\n\nComplete src/Completion.res 378:24\nposCursor:[378:24] posNoWhite:[378:23] Found expr:[378:12->378:26]\nPexp_apply ...__ghost__[0:-1->0:-1] (...[378:12->378:24], ...[378:24->378:25])\nposCursor:[378:24] posNoWhite:[378:23] Found expr:[378:12->378:24]\nPexp_apply ...__ghost__[0:-1->0:-1] (...[378:12->378:14], ...[378:16->378:24])\nposCursor:[378:24] posNoWhite:[378:23] Found expr:[378:16->378:24]\nPexp_ident ForAuto.:[378:16->378:24]\nCompletable: Cpath Value[ForAuto, \"\"]\nRaw opens: 2 Shadow.B.place holder ... Shadow.A.place holder\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 3 pervasives Completion.res Completion.res\nContextPath Value[ForAuto, \"\"]\nPath ForAuto.\n[{\n    \"label\": \"abc\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t, int) => t\",\n    \"documentation\": null\n  }, {\n    \"label\": \"abd\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t, int) => t\",\n    \"documentation\": null\n  }]\n\nComplete src/Completion.res 381:38\nposCursor:[381:38] posNoWhite:[381:37] Found expr:[381:12->381:41]\nPexp_apply ...__ghost__[0:-1->0:-1] (...[381:12->381:39], ...[381:39->381:40])\nposCursor:[381:38] posNoWhite:[381:37] Found expr:[381:12->381:39]\nPexp_apply ...__ghost__[0:-1->0:-1] (...[381:12->381:17], ...[381:19->381:39])\nposCursor:[381:38] posNoWhite:[381:37] Found expr:[381:19->381:39]\nPexp_send [381:38->381:38] e:[381:19->381:36]\nCompletable: Cpath Value[FAO, forAutoObject][\"\"]\nRaw opens: 2 Shadow.B.place holder ... Shadow.A.place holder\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 3 pervasives Completion.res Completion.res\nContextPath Value[FAO, forAutoObject][\"\"]\nContextPath Value[FAO, forAutoObject]\nPath FAO.forAutoObject\n[{\n    \"label\": \"age\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": null\n  }, {\n    \"label\": \"forAutoLabel\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"FAR.forAutoRecord\",\n    \"documentation\": null\n  }]\n\nComplete src/Completion.res 384:24\nposCursor:[384:24] posNoWhite:[384:23] Found expr:[384:11->384:26]\nPexp_apply ...__ghost__[0:-1->0:-1] (...[384:11->384:24], ...[384:24->384:25])\nposCursor:[384:24] posNoWhite:[384:23] Found expr:[384:11->384:24]\nPexp_apply ...__ghost__[0:-1->0:-1] (...[384:11->384:12], ...[384:14->384:24])\nposCursor:[384:24] posNoWhite:[384:23] Found expr:[384:14->384:24]\nPexp_field [384:14->384:23] _:[384:24->384:24]\nCompletable: Cpath Value[funRecord].\"\"\nRaw opens: 2 Shadow.B.place holder ... Shadow.A.place holder\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 3 pervasives Completion.res Completion.res\nContextPath Value[funRecord].\"\"\nContextPath Value[funRecord]\nPath funRecord\nContextPath Value[funRecord]->\nContextPath Value[funRecord]\nPath funRecord\nCPPipe pathFromEnv: found:true\nPath Completion.\n[{\n    \"label\": \"someFun\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"(~name: string) => unit\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nsomeFun: (~name: string) => unit\\n```\\n\\n```rescript\\ntype funRecord = {\\n  someFun: (~name: string) => unit,\\n  stuff: string,\\n}\\n```\"}\n  }, {\n    \"label\": \"stuff\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nstuff: string\\n```\\n\\n```rescript\\ntype funRecord = {\\n  someFun: (~name: string) => unit,\\n  stuff: string,\\n}\\n```\"}\n  }]\n\nComplete src/Completion.res 389:12\nposCursor:[389:12] posNoWhite:[389:11] Found expr:[387:8->392:1]\nposCursor:[389:12] posNoWhite:[389:11] Found expr:[387:8->392:1]\nposCursor:[389:12] posNoWhite:[389:11] Found expr:[388:2->391:4]\nposCursor:[389:12] posNoWhite:[389:11] Found expr:[389:6->391:4]\nposCursor:[389:12] posNoWhite:[389:11] Found expr:[389:6->389:12]\nCompletable: Cpath array->ma\nRaw opens: 3 Js.place holder ... Shadow.B.place holder ... Shadow.A.place holder\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 4 pervasives Completion.res Completion.res js.ml\nContextPath array->ma\nContextPath array\nPath Js.Array2.ma\n[{\n    \"label\": \"Array2.mapi\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t<'a>, ('a, int) => 'b) => t<'b>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\nApplies the function (the second argument) to each item in the array, returning\\na new array. The function acceps two arguments: an item from the array and its\\nindex number. The result array does not have to have elements of the same type\\nas the input array. See\\n[`Array.map`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/map)\\non MDN.\\n\\n## Examples\\n\\n```rescript\\n// multiply each item in array by its position\\nlet product = (item, index) => item * index\\nJs.Array2.mapi([10, 11, 12], product) == [0, 11, 24]\\n```\\n\"}\n  }, {\n    \"label\": \"Array2.map\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t<'a>, 'a => 'b) => t<'b>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\nApplies the function (the second argument) to each item in the array, returning\\na new array. The result array does not have to have elements of the same type\\nas the input array. See\\n[`Array.map`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/map)\\non MDN.\\n\\n## Examples\\n\\n```rescript\\nJs.Array2.map([12, 4, 8], x => x * x) == [144, 16, 64]\\nJs.Array2.map([\\\"animal\\\", \\\"vegetable\\\", \\\"mineral\\\"], Js.String.length) == [6, 9, 7]\\n```\\n\"}\n  }]\n\nComplete src/Completion.res 397:14\nposCursor:[397:14] posNoWhite:[397:13] Found expr:[396:14->397:20]\nPexp_apply ...__ghost__[0:-1->0:-1] (...[396:14->397:16], ...[397:16->397:19])\nposCursor:[397:14] posNoWhite:[397:13] Found expr:[396:14->397:16]\nPexp_apply ...__ghost__[0:-1->0:-1] (...[396:14->397:11], ...[397:13->397:16])\nposCursor:[397:14] posNoWhite:[397:13] Found expr:[397:13->397:16]\nPexp_ident red:[397:13->397:16]\nCompletable: Cpath Value[red]\nRaw opens: 2 Shadow.B.place holder ... Shadow.A.place holder\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 3 pervasives Completion.res Completion.res\nContextPath Value[red]\nPath red\n[{\n    \"label\": \"red\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": null\n  }]\n\nComplete src/Completion.res 402:25\nposCursor:[402:25] posNoWhite:[402:24] Found expr:[400:14->402:31]\nPexp_apply ...__ghost__[0:-1->0:-1] (...[400:14->402:27], ...[402:27->402:30])\nposCursor:[402:25] posNoWhite:[402:24] Found expr:[400:14->402:27]\nPexp_apply ...__ghost__[0:-1->0:-1] (...[400:14->402:22], ...[402:24->402:27])\nposCursor:[402:25] posNoWhite:[402:24] Found expr:[402:24->402:27]\nPexp_ident red:[402:24->402:27]\nCompletable: Cpath Value[red]\nRaw opens: 2 Shadow.B.place holder ... Shadow.A.place holder\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 3 pervasives Completion.res Completion.res\nContextPath Value[red]\nPath red\n[{\n    \"label\": \"red\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": null\n  }]\n\nComplete src/Completion.res 405:22\nposCursor:[405:22] posNoWhite:[405:21] Found expr:[405:11->467:0]\nPexp_apply ...__ghost__[0:-1->0:-1] (...[405:11->423:17], ...[428:0->467:0])\nposCursor:[405:22] posNoWhite:[405:21] Found expr:[405:11->423:17]\nPexp_apply ...__ghost__[0:-1->0:-1] (...[405:11->405:19], ...[405:21->423:17])\nposCursor:[405:22] posNoWhite:[405:21] Found expr:[405:21->423:17]\nposCursor:[405:22] posNoWhite:[405:21] Found expr:[405:21->405:22]\nPexp_ident r:[405:21->405:22]\nCompletable: Cpath Value[r]\nRaw opens: 2 Shadow.B.place holder ... Shadow.A.place holder\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 3 pervasives Completion.res Completion.res\nContextPath Value[r]\nPath r\n[{\n    \"label\": \"red\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": null\n  }, {\n    \"label\": \"retAA\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"unit => aa\",\n    \"documentation\": null\n  }, {\n    \"label\": \"r\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"rAlias\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ntype r = {x: int, y: string}\\n```\"}\n  }]\n\nComplete src/Completion.res 409:21\nposCursor:[409:21] posNoWhite:[409:20] Found expr:[408:14->415:1]\nposCursor:[409:21] posNoWhite:[409:20] Found expr:[408:14->415:1]\nposCursor:[409:21] posNoWhite:[409:20] Found expr:[409:5->414:17]\nposCursor:[409:21] posNoWhite:[409:20] Found expr:[409:5->411:42]\nposCursor:[409:21] posNoWhite:[409:20] Found expr:[409:5->411:5]\nPexp_ident SomeLocalModule.:[409:5->411:5]\nCompletable: Cpath Value[SomeLocalModule, \"\"]\nRaw opens: 2 Shadow.B.place holder ... Shadow.A.place holder\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 3 pervasives Completion.res Completion.res\nContextPath Value[SomeLocalModule, \"\"]\nPath SomeLocalModule.\n[{\n    \"label\": \"bb\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": null\n  }, {\n    \"label\": \"aa\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": null\n  }]\n\nComplete src/Completion.res 412:21\nposCursor:[412:21] posNoWhite:[412:20] Found expr:[408:14->415:1]\nposCursor:[412:21] posNoWhite:[412:20] Found expr:[408:14->415:1]\nposCursor:[412:21] posNoWhite:[412:20] Found expr:[411:2->414:17]\nposCursor:[412:21] posNoWhite:[412:20] Found expr:[412:5->414:17]\nPexp_apply ...[412:5->414:8] (...[414:9->414:16])\nposCursor:[412:21] posNoWhite:[412:20] Found expr:[412:5->414:8]\nPexp_ident SomeLocalModule.:[412:5->414:8]\nCompletable: Cpath Value[SomeLocalModule, \"\"]\nRaw opens: 2 Shadow.B.place holder ... Shadow.A.place holder\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 3 pervasives Completion.res Completion.res\nContextPath Value[SomeLocalModule, \"\"]\nPath SomeLocalModule.\n[{\n    \"label\": \"bb\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": null\n  }, {\n    \"label\": \"aa\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": null\n  }]\n\nComplete src/Completion.res 417:17\nposCursor:[417:17] posNoWhite:[417:16] Found expr:[417:11->417:17]\nCompletable: Cpath int->t\nRaw opens: 2 Shadow.B.place holder ... Shadow.A.place holder\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 3 pervasives Completion.res Completion.res\nContextPath int->t\nContextPath int\nPath Belt.Int.t\n[{\n    \"label\": \"Belt.Int.toString\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"int => string\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\nConverts a given `int` to a `string`. Uses the JavaScript `String` constructor under the hood.\\n\\n## Examples\\n\\n```rescript\\nJs.log(Belt.Int.toString(1) === \\\"1\\\") /* true */\\n```\\n\"}\n  }, {\n    \"label\": \"Belt.Int.toFloat\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"int => float\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\nConverts a given `int` to a `float`.\\n\\n## Examples\\n\\n```rescript\\nJs.log(Belt.Int.toFloat(1) === 1.0) /* true */\\n```\\n\"}\n  }]\n\nComplete src/Completion.res 420:19\nposCursor:[420:19] posNoWhite:[420:18] Found expr:[420:11->420:19]\nCompletable: Cpath float->t\nRaw opens: 2 Shadow.B.place holder ... Shadow.A.place holder\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 3 pervasives Completion.res Completion.res\nContextPath float->t\nContextPath float\nPath Belt.Float.t\n[{\n    \"label\": \"Belt.Float.toInt\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"float => int\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\nConverts a given `float` to an `int`.\\n\\n## Examples\\n\\n```rescript\\nJs.log(Belt.Float.toInt(1.0) === 1) /* true */\\n```\\n\"}\n  }, {\n    \"label\": \"Belt.Float.toString\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"float => string\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\nConverts a given `float` to a `string`. Uses the JavaScript `String` constructor under the hood.\\n\\n## Examples\\n\\n```rescript\\nJs.log(Belt.Float.toString(1.0) === \\\"1.0\\\") /* true */\\n```\\n\"}\n  }]\n\nComplete src/Completion.res 425:8\nposCursor:[425:8] posNoWhite:[425:7] Found expr:[425:3->425:8]\nCompletable: Cpath Value[ok]->g\nRaw opens: 2 Shadow.B.place holder ... Shadow.A.place holder\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 3 pervasives Completion.res Completion.res\nContextPath Value[ok]->g\nContextPath Value[ok]\nPath ok\nPath Belt.Result.g\n[{\n    \"label\": \"Belt.Result.getExn\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"t<'a, 'b> => 'a\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\n`getExn(res)`: when `res` is `Ok(n)`, returns `n` when `res` is `Error(m)`, raise an exception\\n\\n## Examples\\n\\n```rescript\\nBelt.Result.getExn(Belt.Result.Ok(42)) == 42\\n\\nBelt.Result.getExn(Belt.Result.Error(\\\"Invalid data\\\")) /* raises exception */\\n```\\n\"}\n  }, {\n    \"label\": \"Belt.Result.getWithDefault\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t<'a, 'b>, 'a) => 'a\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\n`getWithDefault(res, defaultValue)`: If `res` is `Ok(n)`, returns `n`,\\notherwise `default`\\n\\n## Examples\\n\\n```rescript\\nBelt.Result.getWithDefault(Ok(42), 0) == 42\\n\\nBelt.Result.getWithDefault(Error(\\\"Invalid Data\\\"), 0) == 0\\n```\\n\"}\n  }]\n\nComplete src/Completion.res 443:15\nposCursor:[443:15] posNoWhite:[443:14] Found expr:[443:3->443:15]\nPexp_field [443:3->443:12] so:[443:13->443:15]\nCompletable: Cpath Value[rWithDepr].so\nRaw opens: 2 Shadow.B.place holder ... Shadow.A.place holder\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 3 pervasives Completion.res Completion.res\nContextPath Value[rWithDepr].so\nContextPath Value[rWithDepr]\nPath rWithDepr\nContextPath Value[rWithDepr]->so\nContextPath Value[rWithDepr]\nPath rWithDepr\nCPPipe pathFromEnv: found:true\nPath Completion.so\n[{\n    \"label\": \"someInt\",\n    \"kind\": 5,\n    \"tags\": [1],\n    \"detail\": \"int\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"Deprecated: \\n\\n```rescript\\nsomeInt: int\\n```\\n\\n```rescript\\ntype someRecordWithDeprecatedField = {\\n  name: string,\\n  someInt: int,\\n  someFloat: float,\\n}\\n```\"}\n  }, {\n    \"label\": \"someFloat\",\n    \"kind\": 5,\n    \"tags\": [1],\n    \"detail\": \"float\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"Deprecated: Use 'someInt'.\\n\\n```rescript\\nsomeFloat: float\\n```\\n\\n```rescript\\ntype someRecordWithDeprecatedField = {\\n  name: string,\\n  someInt: int,\\n  someFloat: float,\\n}\\n```\"}\n  }]\n\nComplete src/Completion.res 450:37\nXXX Not found!\nCompletable: Cexpression Type[someVariantWithDeprecated]\nRaw opens: 2 Shadow.B.place holder ... Shadow.A.place holder\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 3 pervasives Completion.res Completion.res\nContextPath Type[someVariantWithDeprecated]\nPath someVariantWithDeprecated\n[{\n    \"label\": \"DoNotUseMe\",\n    \"kind\": 4,\n    \"tags\": [1],\n    \"detail\": \"DoNotUseMe\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"Deprecated: \\n\\n```rescript\\nDoNotUseMe\\n```\\n\\n```rescript\\ntype someVariantWithDeprecated =\\n  | DoNotUseMe\\n  | UseMeInstead\\n  | AndNotMe\\n```\"},\n    \"insertText\": \"DoNotUseMe\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"UseMeInstead\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"UseMeInstead\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nUseMeInstead\\n```\\n\\n```rescript\\ntype someVariantWithDeprecated =\\n  | DoNotUseMe\\n  | UseMeInstead\\n  | AndNotMe\\n```\"},\n    \"insertText\": \"UseMeInstead\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"AndNotMe\",\n    \"kind\": 4,\n    \"tags\": [1],\n    \"detail\": \"AndNotMe\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"Deprecated: Use 'UseMeInstead'\\n\\n```rescript\\nAndNotMe\\n```\\n\\n```rescript\\ntype someVariantWithDeprecated =\\n  | DoNotUseMe\\n  | UseMeInstead\\n  | AndNotMe\\n```\"},\n    \"insertText\": \"AndNotMe\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/Completion.res 455:30\nposCursor:[455:30] posNoWhite:[455:29] Found expr:[455:11->455:30]\nCompletable: Cpath Value[uncurried](Nolabel)->toS\nRaw opens: 2 Shadow.B.place holder ... Shadow.A.place holder\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 3 pervasives Completion.res Completion.res\nContextPath Value[uncurried](Nolabel)->toS\nContextPath Value[uncurried](Nolabel)\nContextPath Value[uncurried]\nPath uncurried\nPath Belt.Int.toS\n[{\n    \"label\": \"Belt.Int.toString\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"int => string\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\nConverts a given `int` to a `string`. Uses the JavaScript `String` constructor under the hood.\\n\\n## Examples\\n\\n```rescript\\nJs.log(Belt.Int.toString(1) === \\\"1\\\") /* true */\\n```\\n\"}\n  }]\n\nComplete src/Completion.res 462:30\nXXX Not found!\nCompletable: Cexpression Type[withUncurried]->recordField(fn)\nRaw opens: 2 Shadow.B.place holder ... Shadow.A.place holder\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 3 pervasives Completion.res Completion.res\nContextPath Type[withUncurried]\nPath withUncurried\n[{\n    \"label\": \"v => v\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"int => unit\",\n    \"documentation\": null,\n    \"sortText\": \"A\",\n    \"insertText\": \"${1:v} => ${0:v}\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/Completion.res 465:26\nposCursor:[465:26] posNoWhite:[465:25] Found expr:[465:22->465:26]\nPexp_ident FAR.:[465:22->465:26]\nCompletable: Cpath ValueOrField[FAR, \"\"]\nRaw opens: 2 Shadow.B.place holder ... Shadow.A.place holder\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 3 pervasives Completion.res Completion.res\nContextPath ValueOrField[FAR, \"\"]\nPath FAR.\n[{\n    \"label\": \"forAutoRecord\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"forAutoRecord\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ntype forAutoRecord = {\\n  forAuto: ForAuto.t,\\n  something: option<int>,\\n}\\n```\"}\n  }, {\n    \"label\": \"forAuto\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"ForAuto.t\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nforAuto: ForAuto.t\\n```\\n\\n```rescript\\ntype forAutoRecord = {\\n  forAuto: ForAuto.t,\\n  something: option<int>,\\n}\\n```\"}\n  }, {\n    \"label\": \"something\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"option<int>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nsomething: option<int>\\n```\\n\\n```rescript\\ntype forAutoRecord = {\\n  forAuto: ForAuto.t,\\n  something: option<int>,\\n}\\n```\"}\n  }]\n\n"
  },
  {
    "path": "analysis/tests/src/expected/CompletionAttributes.res.txt",
    "content": "Complete src/CompletionAttributes.res 0:8\nAttribute id:modu:[0:3->0:8] label:modu\nCompletable: Cdecorator(modu)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\n[{\n    \"label\": \"module\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"The `@module` decorator is used to bind to a JavaScript module.\\n\\n[Read more and see examples in the documentation](https://rescript-lang.org/syntax-lookup#module-decorator).\"},\n    \"insertText\": \"module(\\\"$0\\\")\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionAttributes.res 3:12\nXXX Not found!\nCompletable: CdecoratorPayload(module=)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\n[{\n    \"label\": \"@rescript/react\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"Package\",\n    \"documentation\": null\n  }, {\n    \"label\": \"./test.json\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"Local file\",\n    \"documentation\": null\n  }, {\n    \"label\": \"./tst.js\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"Local file\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionAttributes.res 6:7\nAttribute id:js:[6:3->6:7] label:@js\nCompletable: Cdecorator(@js)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\n[{\n    \"label\": \"jsxConfig\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"The `@@jsxConfig` decorator is used to change the config for JSX on the fly.\\n\\n[Read more and see examples in the documentation](https://rescript-lang.org/docs/manual/latest/jsx#file-level-configuration).\"},\n    \"insertText\": \"jsxConfig({$0})\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionAttributes.res 9:16\nXXX Not found!\nCompletable: JsxConfig\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\n[{\n    \"label\": \"version\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nversion?: int\\n```\\n\\n```rescript\\ntype jsxConfig = {version: int, module_: string, mode: string}\\n```\"}\n  }, {\n    \"label\": \"module_\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nmodule_?: string\\n```\\n\\n```rescript\\ntype jsxConfig = {version: int, module_: string, mode: string}\\n```\"}\n  }, {\n    \"label\": \"mode\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nmode?: string\\n```\\n\\n```rescript\\ntype jsxConfig = {version: int, module_: string, mode: string}\\n```\"}\n  }]\n\nComplete src/CompletionAttributes.res 12:17\nXXX Not found!\nCompletable: JsxConfig\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\n[{\n    \"label\": \"module_\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nmodule_?: string\\n```\\n\\n```rescript\\ntype jsxConfig = {version: int, module_: string, mode: string}\\n```\"}\n  }, {\n    \"label\": \"mode\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nmode?: string\\n```\\n\\n```rescript\\ntype jsxConfig = {version: int, module_: string, mode: string}\\n```\"}\n  }]\n\nComplete src/CompletionAttributes.res 15:25\nXXX Not found!\nCompletable: JsxConfig\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\n[{\n    \"label\": \"\\\"\\\"\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": null,\n    \"sortText\": \"A\",\n    \"insertText\": \"\\\"$0\\\"\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionAttributes.res 18:29\nXXX Not found!\nCompletable: JsxConfig\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\n[{\n    \"label\": \"version\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nversion?: int\\n```\\n\\n```rescript\\ntype jsxConfig = {version: int, module_: string, mode: string}\\n```\"}\n  }, {\n    \"label\": \"mode\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nmode?: string\\n```\\n\\n```rescript\\ntype jsxConfig = {version: int, module_: string, mode: string}\\n```\"}\n  }]\n\nComplete src/CompletionAttributes.res 21:12\nXXX Not found!\nCompletable: CdecoratorPayload(moduleWithImportAttributes)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\n[{\n    \"label\": \"from\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nfrom?: string\\n```\\n\\n```rescript\\ntype moduleConfig = {from: string, with: string}\\n```\"}\n  }, {\n    \"label\": \"with\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nwith?: string\\n```\\n\\n```rescript\\ntype moduleConfig = {from: string, with: string}\\n```\"}\n  }]\n\nComplete src/CompletionAttributes.res 24:17\nXXX Not found!\nCompletable: CdecoratorPayload(moduleWithImportAttributes)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\n[{\n    \"label\": \"{}\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"importAttributesConfig\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ntype importAttributesConfig = {type_: string}\\n```\"},\n    \"sortText\": \"A\",\n    \"insertText\": \"{$0}\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionAttributes.res 27:19\nXXX Not found!\nCompletable: CdecoratorPayload(moduleWithImportAttributes)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\n[{\n    \"label\": \"type_\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ntype_?: string\\n```\\n\\n```rescript\\ntype importAttributesConfig = {type_: string}\\n```\"}\n  }]\n\nComplete src/CompletionAttributes.res 30:19\nXXX Not found!\nCompletable: CdecoratorPayload(module=)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\n[{\n    \"label\": \"@rescript/react\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"Package\",\n    \"documentation\": null\n  }, {\n    \"label\": \"./test.json\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"Local file\",\n    \"documentation\": null\n  }, {\n    \"label\": \"./tst.js\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"Local file\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionAttributes.res 33:17\nXXX Not found!\nCompletable: CdecoratorPayload(module=)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\n[{\n    \"label\": \"@rescript/react\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"Package\",\n    \"documentation\": null\n  }, {\n    \"label\": \"./test.json\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"Local file\",\n    \"documentation\": null\n  }, {\n    \"label\": \"./tst.js\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"Local file\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionAttributes.res 36:14\nposCursor:[36:14] posNoWhite:[36:13] Found expr:[36:12->36:14]\nCompletable: CextensionNode(t)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\n[{\n    \"label\": \"todo\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"`%todo` is used to tell the compiler that some code still needs to be implemented.\",\n    \"documentation\": null,\n    \"insertText\": \"todo\"\n  }, {\n    \"label\": \"todo (with payload)\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"`%todo` is used to tell the compiler that some code still needs to be implemented. With a payload.\",\n    \"documentation\": null,\n    \"insertText\": \"todo(\\\"${0:TODO}\\\")\",\n    \"insertTextFormat\": 2\n  }]\n\n"
  },
  {
    "path": "analysis/tests/src/expected/CompletionDicts.res.txt",
    "content": "Complete src/CompletionDicts.res 0:33\nposCursor:[0:33] posNoWhite:[0:32] Found expr:[0:14->0:35]\nPexp_apply ...[0:14->0:31] (...[0:32->0:34])\nCompletable: Cexpression CArgument Value[Js, Dict, fromArray]($0)->array\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[Js, Dict, fromArray]($0)\nContextPath Value[Js, Dict, fromArray]\nPath Js.Dict.fromArray\n[{\n    \"label\": \"(_, _)\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(key, 'a)\",\n    \"documentation\": null,\n    \"insertText\": \"(${1:_}, ${2:_})\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionDicts.res 3:34\nposCursor:[3:34] posNoWhite:[3:33] Found expr:[3:14->3:37]\nPexp_apply ...[3:14->3:31] (...[3:32->3:36])\nCompletable: Cexpression CArgument Value[Js, Dict, fromArray]($0)->array\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[Js, Dict, fromArray]($0)\nContextPath Value[Js, Dict, fromArray]\nPath Js.Dict.fromArray\n[{\n    \"label\": \"(_, _)\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(key, 'a)\",\n    \"documentation\": null,\n    \"insertText\": \"(${1:_}, ${2:_})\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionDicts.res 6:40\nposCursor:[6:40] posNoWhite:[6:39] Found expr:[6:14->6:44]\nPexp_apply ...[6:14->6:31] (...[6:32->6:43])\nCompletable: Cexpression CArgument Value[Js, Dict, fromArray]($0)->array, tuple($1)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[Js, Dict, fromArray]($0)\nContextPath Value[Js, Dict, fromArray]\nPath Js.Dict.fromArray\n[]\n\n\nComplete src/CompletionDicts.res 12:14\nposCursor:[12:14] posNoWhite:[12:13] Found expr:[10:11->14:2]\nPexp_apply ...[10:11->10:28] (...[10:29->14:1])\nCompletable: Cexpression CArgument Value[Js, Dict, fromArray]($0)->array, tuple($1)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[Js, Dict, fromArray]($0)\nContextPath Value[Js, Dict, fromArray]\nPath Js.Dict.fromArray\n[{\n    \"label\": \"true\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }, {\n    \"label\": \"false\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }]\n\n\n"
  },
  {
    "path": "analysis/tests/src/expected/CompletionExpressions.res.txt",
    "content": "Complete src/CompletionExpressions.res 3:20\nXXX Not found!\nCompletable: Cpattern CTuple(Value[s], Value[f])\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CTuple(Value[s], Value[f])\nContextPath Value[s]\nPath s\nContextPath Value[f]\nPath f\n[{\n    \"label\": \"(_, _)\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(bool, option<array<bool>>)\",\n    \"documentation\": null,\n    \"insertText\": \"(${1:_}, ${2:_})\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionExpressions.res 26:27\nposCursor:[26:27] posNoWhite:[26:26] Found expr:[26:11->26:29]\nPexp_apply ...[26:11->26:25] (...[26:26->26:28])\nCompletable: Cexpression CArgument Value[fnTakingRecord]($0)->recordBody\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[fnTakingRecord]($0)\nContextPath Value[fnTakingRecord]\nPath fnTakingRecord\n[{\n    \"label\": \"age\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nage: int\\n```\\n\\n```rescript\\ntype someRecord = {age: int, offline: bool, online: option<bool>, variant: someVariant, polyvariant: somePolyVariant, nested: option<otherRecord>}\\n```\"}\n  }, {\n    \"label\": \"offline\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\noffline: bool\\n```\\n\\n```rescript\\ntype someRecord = {age: int, offline: bool, online: option<bool>, variant: someVariant, polyvariant: somePolyVariant, nested: option<otherRecord>}\\n```\"}\n  }, {\n    \"label\": \"online\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"option<bool>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nonline: option<bool>\\n```\\n\\n```rescript\\ntype someRecord = {age: int, offline: bool, online: option<bool>, variant: someVariant, polyvariant: somePolyVariant, nested: option<otherRecord>}\\n```\"}\n  }, {\n    \"label\": \"variant\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"someVariant\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nvariant: someVariant\\n```\\n\\n```rescript\\ntype someRecord = {age: int, offline: bool, online: option<bool>, variant: someVariant, polyvariant: somePolyVariant, nested: option<otherRecord>}\\n```\"}\n  }, {\n    \"label\": \"polyvariant\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"somePolyVariant\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\npolyvariant: somePolyVariant\\n```\\n\\n```rescript\\ntype someRecord = {age: int, offline: bool, online: option<bool>, variant: someVariant, polyvariant: somePolyVariant, nested: option<otherRecord>}\\n```\"}\n  }, {\n    \"label\": \"nested\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"option<otherRecord>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nnested: option<otherRecord>\\n```\\n\\n```rescript\\ntype someRecord = {age: int, offline: bool, online: option<bool>, variant: someVariant, polyvariant: somePolyVariant, nested: option<otherRecord>}\\n```\"}\n  }]\n\nComplete src/CompletionExpressions.res 29:28\nposCursor:[29:28] posNoWhite:[29:27] Found expr:[29:11->29:30]\nPexp_apply ...[29:11->29:25] (...[29:27->29:28])\nCompletable: Cexpression CArgument Value[fnTakingRecord]($0)=n->recordBody\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[fnTakingRecord]($0)\nContextPath Value[fnTakingRecord]\nPath fnTakingRecord\n[{\n    \"label\": \"nested\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"option<otherRecord>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nnested: option<otherRecord>\\n```\\n\\n```rescript\\ntype someRecord = {age: int, offline: bool, online: option<bool>, variant: someVariant, polyvariant: somePolyVariant, nested: option<otherRecord>}\\n```\"}\n  }]\n\nComplete src/CompletionExpressions.res 32:35\nposCursor:[32:35] posNoWhite:[32:34] Found expr:[32:11->32:38]\nPexp_apply ...[32:11->32:25] (...[32:26->32:38])\nCompletable: Cexpression CArgument Value[fnTakingRecord]($0)->recordField(offline)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[fnTakingRecord]($0)\nContextPath Value[fnTakingRecord]\nPath fnTakingRecord\n[{\n    \"label\": \"true\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }, {\n    \"label\": \"false\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionExpressions.res 35:36\nposCursor:[35:36] posNoWhite:[35:35] Found expr:[35:11->35:39]\nPexp_apply ...[35:11->35:25] (...[35:26->35:38])\nCompletable: Cexpression CArgument Value[fnTakingRecord]($0)->recordBody\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[fnTakingRecord]($0)\nContextPath Value[fnTakingRecord]\nPath fnTakingRecord\n[{\n    \"label\": \"offline\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\noffline: bool\\n```\\n\\n```rescript\\ntype someRecord = {age: int, offline: bool, online: option<bool>, variant: someVariant, polyvariant: somePolyVariant, nested: option<otherRecord>}\\n```\"}\n  }, {\n    \"label\": \"online\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"option<bool>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nonline: option<bool>\\n```\\n\\n```rescript\\ntype someRecord = {age: int, offline: bool, online: option<bool>, variant: someVariant, polyvariant: somePolyVariant, nested: option<otherRecord>}\\n```\"}\n  }, {\n    \"label\": \"variant\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"someVariant\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nvariant: someVariant\\n```\\n\\n```rescript\\ntype someRecord = {age: int, offline: bool, online: option<bool>, variant: someVariant, polyvariant: somePolyVariant, nested: option<otherRecord>}\\n```\"}\n  }, {\n    \"label\": \"polyvariant\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"somePolyVariant\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\npolyvariant: somePolyVariant\\n```\\n\\n```rescript\\ntype someRecord = {age: int, offline: bool, online: option<bool>, variant: someVariant, polyvariant: somePolyVariant, nested: option<otherRecord>}\\n```\"}\n  }, {\n    \"label\": \"nested\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"option<otherRecord>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nnested: option<otherRecord>\\n```\\n\\n```rescript\\ntype someRecord = {age: int, offline: bool, online: option<bool>, variant: someVariant, polyvariant: somePolyVariant, nested: option<otherRecord>}\\n```\"}\n  }]\n\nComplete src/CompletionExpressions.res 38:37\nposCursor:[38:37] posNoWhite:[38:35] Found expr:[38:11->38:53]\nPexp_apply ...[38:11->38:25] (...[38:26->38:52])\nCompletable: Cexpression CArgument Value[fnTakingRecord]($0)->recordBody\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[fnTakingRecord]($0)\nContextPath Value[fnTakingRecord]\nPath fnTakingRecord\n[{\n    \"label\": \"online\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"option<bool>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nonline: option<bool>\\n```\\n\\n```rescript\\ntype someRecord = {age: int, offline: bool, online: option<bool>, variant: someVariant, polyvariant: somePolyVariant, nested: option<otherRecord>}\\n```\"}\n  }, {\n    \"label\": \"variant\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"someVariant\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nvariant: someVariant\\n```\\n\\n```rescript\\ntype someRecord = {age: int, offline: bool, online: option<bool>, variant: someVariant, polyvariant: somePolyVariant, nested: option<otherRecord>}\\n```\"}\n  }, {\n    \"label\": \"polyvariant\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"somePolyVariant\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\npolyvariant: somePolyVariant\\n```\\n\\n```rescript\\ntype someRecord = {age: int, offline: bool, online: option<bool>, variant: someVariant, polyvariant: somePolyVariant, nested: option<otherRecord>}\\n```\"}\n  }, {\n    \"label\": \"nested\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"option<otherRecord>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nnested: option<otherRecord>\\n```\\n\\n```rescript\\ntype someRecord = {age: int, offline: bool, online: option<bool>, variant: someVariant, polyvariant: somePolyVariant, nested: option<otherRecord>}\\n```\"}\n  }]\n\nComplete src/CompletionExpressions.res 41:44\nposCursor:[41:44] posNoWhite:[41:43] Found expr:[41:11->41:47]\nPexp_apply ...[41:11->41:25] (...[41:26->41:47])\nCompletable: Cexpression CArgument Value[fnTakingRecord]($0)->recordField(nested)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[fnTakingRecord]($0)\nContextPath Value[fnTakingRecord]\nPath fnTakingRecord\n[{\n    \"label\": \"Some(nested)\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"otherRecord\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ntype otherRecord = {someField: int, otherField: string}\\n```\"},\n    \"insertText\": \"Some(nested)$0\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Some(_)\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"otherRecord\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ntype otherRecord = {someField: int, otherField: string}\\n```\"},\n    \"insertText\": \"Some($0)\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"None\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"otherRecord\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ntype otherRecord = {someField: int, otherField: string}\\n```\"}\n  }, {\n    \"label\": \"Some({})\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"otherRecord\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ntype otherRecord = {someField: int, otherField: string}\\n```\"},\n    \"insertText\": \"Some({$0})\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionExpressions.res 44:46\nposCursor:[44:46] posNoWhite:[44:45] Found expr:[44:11->44:49]\nPexp_apply ...[44:11->44:25] (...[44:26->44:48])\nCompletable: Cexpression CArgument Value[fnTakingRecord]($0)->recordField(nested), recordBody\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[fnTakingRecord]($0)\nContextPath Value[fnTakingRecord]\nPath fnTakingRecord\n[]\n\nComplete src/CompletionExpressions.res 47:51\nposCursor:[47:51] posNoWhite:[47:50] Found expr:[47:11->47:55]\nPexp_apply ...[47:11->47:25] (...[47:26->47:54])\nCompletable: Cexpression CArgument Value[fnTakingRecord]($0)->recordField(nested), variantPayload::Some($0), recordBody\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[fnTakingRecord]($0)\nContextPath Value[fnTakingRecord]\nPath fnTakingRecord\n[{\n    \"label\": \"someField\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nsomeField: int\\n```\\n\\n```rescript\\ntype otherRecord = {someField: int, otherField: string}\\n```\"}\n  }, {\n    \"label\": \"otherField\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\notherField: string\\n```\\n\\n```rescript\\ntype otherRecord = {someField: int, otherField: string}\\n```\"}\n  }]\n\nComplete src/CompletionExpressions.res 50:45\nposCursor:[50:45] posNoWhite:[50:44] Found expr:[50:11->50:48]\nPexp_apply ...[50:11->50:25] (...[50:26->50:48])\nCompletable: Cexpression CArgument Value[fnTakingRecord]($0)->recordField(variant)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[fnTakingRecord]($0)\nContextPath Value[fnTakingRecord]\nPath fnTakingRecord\n[{\n    \"label\": \"One\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"One\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nOne\\n```\\n\\n```rescript\\ntype someVariant = One | Two | Three(int, string)\\n```\"},\n    \"insertText\": \"One\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Two\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"Two\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nTwo\\n```\\n\\n```rescript\\ntype someVariant = One | Two | Three(int, string)\\n```\"},\n    \"insertText\": \"Two\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Three(_, _)\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"Three(int, string)\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nThree(int, string)\\n```\\n\\n```rescript\\ntype someVariant = One | Two | Three(int, string)\\n```\"},\n    \"insertText\": \"Three(${1:_}, ${2:_})\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionExpressions.res 53:47\nposCursor:[53:47] posNoWhite:[53:46] Found expr:[53:11->53:50]\nPexp_apply ...[53:11->53:25] (...[53:26->53:49])\nCompletable: Cexpression CArgument Value[fnTakingRecord]($0)=O->recordField(variant)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[fnTakingRecord]($0)\nContextPath Value[fnTakingRecord]\nPath fnTakingRecord\n[{\n    \"label\": \"One\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"One\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nOne\\n```\\n\\n```rescript\\ntype someVariant = One | Two | Three(int, string)\\n```\"},\n    \"insertText\": \"One\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionExpressions.res 56:57\nposCursor:[56:57] posNoWhite:[56:56] Found expr:[56:11->56:61]\nPexp_apply ...[56:11->56:25] (...[56:26->56:60])\nCompletable: Cexpression CArgument Value[fnTakingRecord]($0)->recordField(polyvariant), polyvariantPayload::three($0)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[fnTakingRecord]($0)\nContextPath Value[fnTakingRecord]\nPath fnTakingRecord\n[{\n    \"label\": \"{}\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"someRecord\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ntype someRecord = {age: int, offline: bool, online: option<bool>, variant: someVariant, polyvariant: somePolyVariant, nested: option<otherRecord>}\\n```\"},\n    \"sortText\": \"A\",\n    \"insertText\": \"{$0}\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionExpressions.res 59:60\nposCursor:[59:60] posNoWhite:[59:59] Found expr:[59:11->59:65]\nPexp_apply ...[59:11->59:25] (...[59:26->59:64])\nCompletable: Cexpression CArgument Value[fnTakingRecord]($0)->recordField(polyvariant), polyvariantPayload::three($1)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[fnTakingRecord]($0)\nContextPath Value[fnTakingRecord]\nPath fnTakingRecord\n[{\n    \"label\": \"true\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }, {\n    \"label\": \"false\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionExpressions.res 62:62\nposCursor:[62:62] posNoWhite:[62:61] Found expr:[62:11->62:66]\nPexp_apply ...[62:11->62:25] (...[62:26->62:65])\nCompletable: Cexpression CArgument Value[fnTakingRecord]($0)=t->recordField(polyvariant), polyvariantPayload::three($1)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[fnTakingRecord]($0)\nContextPath Value[fnTakingRecord]\nPath fnTakingRecord\n[{\n    \"label\": \"true\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionExpressions.res 69:25\nposCursor:[69:25] posNoWhite:[69:24] Found expr:[69:11->69:26]\nPexp_apply ...[69:11->69:24] (...[69:25->69:26])\nCompletable: Cexpression CArgument Value[fnTakingArray]($0)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[fnTakingArray]($0)\nContextPath Value[fnTakingArray]\nPath fnTakingArray\n[{\n    \"label\": \"[]\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"option<bool>\",\n    \"documentation\": null,\n    \"sortText\": \"A\",\n    \"insertText\": \"[$0]\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionExpressions.res 72:26\nposCursor:[72:26] posNoWhite:[72:25] Found expr:[72:11->72:28]\nPexp_apply ...[72:11->72:24] (...[72:25->72:27])\nCompletable: Cexpression CArgument Value[fnTakingArray]($0)->array\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[fnTakingArray]($0)\nContextPath Value[fnTakingArray]\nPath fnTakingArray\n[{\n    \"label\": \"None\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }, {\n    \"label\": \"Some(_)\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null,\n    \"insertText\": \"Some($0)\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Some(true)\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }, {\n    \"label\": \"Some(false)\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionExpressions.res 75:26\nposCursor:[75:26] posNoWhite:[75:25] Found expr:[75:11->75:27]\nPexp_apply ...[75:11->75:24] (...[75:25->75:26])\nCompletable: Cexpression CArgument Value[fnTakingArray]($0)=s\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[fnTakingArray]($0)\nContextPath Value[fnTakingArray]\nPath fnTakingArray\n[{\n    \"label\": \"s\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionExpressions.res 78:31\nposCursor:[78:31] posNoWhite:[78:30] Found expr:[78:11->78:34]\nPexp_apply ...[78:11->78:24] (...[78:25->78:33])\nCompletable: Cexpression CArgument Value[fnTakingArray]($0)->array, variantPayload::Some($0)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[fnTakingArray]($0)\nContextPath Value[fnTakingArray]\nPath fnTakingArray\n[{\n    \"label\": \"true\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }, {\n    \"label\": \"false\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionExpressions.res 81:31\nposCursor:[81:31] posNoWhite:[81:30] Found expr:[81:11->81:34]\nPexp_apply ...[81:11->81:24] (...[81:25->81:33])\nCompletable: Cexpression CArgument Value[fnTakingArray]($0)->array\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[fnTakingArray]($0)\nContextPath Value[fnTakingArray]\nPath fnTakingArray\n[{\n    \"label\": \"None\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }, {\n    \"label\": \"Some(_)\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null,\n    \"insertText\": \"Some($0)\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Some(true)\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }, {\n    \"label\": \"Some(false)\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionExpressions.res 84:31\nposCursor:[84:31] posNoWhite:[84:30] Found expr:[84:11->84:40]\nPexp_apply ...[84:11->84:24] (...[84:25->84:39])\nCompletable: Cexpression CArgument Value[fnTakingArray]($0)->array\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[fnTakingArray]($0)\nContextPath Value[fnTakingArray]\nPath fnTakingArray\n[{\n    \"label\": \"None\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }, {\n    \"label\": \"Some(_)\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null,\n    \"insertText\": \"Some($0)\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Some(true)\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }, {\n    \"label\": \"Some(false)\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionExpressions.res 89:38\nposCursor:[89:38] posNoWhite:[89:37] Found expr:[89:11->89:41]\nPexp_apply ...[89:11->89:25] (...[89:26->89:40])\nCompletable: Cexpression CArgument Value[fnTakingRecord]($0)=so\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[fnTakingRecord]($0)\nContextPath Value[fnTakingRecord]\nPath fnTakingRecord\n[{\n    \"label\": \"someBoolVar\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionExpressions.res 96:43\nposCursor:[96:43] posNoWhite:[96:42] Found expr:[96:11->96:46]\nPexp_apply ...[96:11->96:30] (...[96:31->96:46])\nCompletable: Cexpression CArgument Value[fnTakingOtherRecord]($0)->recordField(otherField)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[fnTakingOtherRecord]($0)\nContextPath Value[fnTakingOtherRecord]\nPath fnTakingOtherRecord\n[{\n    \"label\": \"\\\"\\\"\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": null,\n    \"sortText\": \"A\",\n    \"insertText\": \"\\\"$0\\\"\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionExpressions.res 108:57\nposCursor:[108:57] posNoWhite:[108:56] Found expr:[108:11->108:60]\nPexp_apply ...[108:11->108:42] (...[108:43->108:60])\nCompletable: Cexpression CArgument Value[fnTakingRecordWithOptionalField]($0)->recordField(someOptField)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[fnTakingRecordWithOptionalField]($0)\nContextPath Value[fnTakingRecordWithOptionalField]\nPath fnTakingRecordWithOptionalField\n[{\n    \"label\": \"true\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }, {\n    \"label\": \"false\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionExpressions.res 116:53\nposCursor:[116:53] posNoWhite:[116:52] Found expr:[116:11->116:56]\nPexp_apply ...[116:11->116:39] (...[116:40->116:56])\nCompletable: Cexpression CArgument Value[fnTakingRecordWithOptVariant]($0)->recordField(someVariant)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[fnTakingRecordWithOptVariant]($0)\nContextPath Value[fnTakingRecordWithOptVariant]\nPath fnTakingRecordWithOptVariant\n[{\n    \"label\": \"Some(someVariant)\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"someVariant\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ntype someVariant = One | Two | Three(int, string)\\n```\"},\n    \"insertText\": \"Some(someVariant)$0\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Some(_)\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"someVariant\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ntype someVariant = One | Two | Three(int, string)\\n```\"},\n    \"insertText\": \"Some($0)\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"None\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"someVariant\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ntype someVariant = One | Two | Three(int, string)\\n```\"}\n  }, {\n    \"label\": \"Some(One)\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"One\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nOne\\n```\\n\\n```rescript\\ntype someVariant = One | Two | Three(int, string)\\n```\"},\n    \"insertText\": \"Some(One)\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Some(Two)\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"Two\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nTwo\\n```\\n\\n```rescript\\ntype someVariant = One | Two | Three(int, string)\\n```\"},\n    \"insertText\": \"Some(Two)\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Some(Three(_, _))\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"Three(int, string)\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nThree(int, string)\\n```\\n\\n```rescript\\ntype someVariant = One | Two | Three(int, string)\\n```\"},\n    \"insertText\": \"Some(Three(${1:_}, ${2:_}))\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionExpressions.res 126:49\nposCursor:[126:49] posNoWhite:[126:48] Found expr:[126:11->126:51]\nPexp_apply ...[126:11->126:31] (...[126:32->126:50])\nCompletable: Cexpression CArgument Value[fnTakingInlineRecord]($0)->variantPayload::WithInlineRecord($0)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[fnTakingInlineRecord]($0)\nContextPath Value[fnTakingInlineRecord]\nPath fnTakingInlineRecord\n[{\n    \"label\": \"{}\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"Inline record\",\n    \"documentation\": null,\n    \"sortText\": \"A\",\n    \"insertText\": \"{$0}\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionExpressions.res 129:50\nposCursor:[129:50] posNoWhite:[129:49] Found expr:[129:11->129:53]\nPexp_apply ...[129:11->129:31] (...[129:32->129:52])\nCompletable: Cexpression CArgument Value[fnTakingInlineRecord]($0)->variantPayload::WithInlineRecord($0), recordBody\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[fnTakingInlineRecord]($0)\nContextPath Value[fnTakingInlineRecord]\nPath fnTakingInlineRecord\n[{\n    \"label\": \"someBoolField\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"Inline record\",\n    \"documentation\": null\n  }, {\n    \"label\": \"otherField\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"Inline record\",\n    \"documentation\": null\n  }, {\n    \"label\": \"nestedRecord\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"Inline record\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionExpressions.res 132:51\nposCursor:[132:51] posNoWhite:[132:50] Found expr:[132:11->132:54]\nPexp_apply ...[132:11->132:31] (...[132:32->132:53])\nCompletable: Cexpression CArgument Value[fnTakingInlineRecord]($0)=s->variantPayload::WithInlineRecord($0), recordBody\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[fnTakingInlineRecord]($0)\nContextPath Value[fnTakingInlineRecord]\nPath fnTakingInlineRecord\n[{\n    \"label\": \"someBoolField\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"Inline record\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionExpressions.res 135:63\nposCursor:[135:63] posNoWhite:[135:62] Found expr:[135:11->135:67]\nPexp_apply ...[135:11->135:31] (...[135:32->135:66])\nCompletable: Cexpression CArgument Value[fnTakingInlineRecord]($0)->variantPayload::WithInlineRecord($0), recordField(nestedRecord)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[fnTakingInlineRecord]($0)\nContextPath Value[fnTakingInlineRecord]\nPath fnTakingInlineRecord\n[{\n    \"label\": \"{}\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"otherRecord\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ntype otherRecord = {someField: int, otherField: string}\\n```\"},\n    \"sortText\": \"A\",\n    \"insertText\": \"{$0}\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionExpressions.res 138:65\nposCursor:[138:65] posNoWhite:[138:64] Found expr:[138:11->138:70]\nPexp_apply ...[138:11->138:31] (...[138:32->138:69])\nCompletable: Cexpression CArgument Value[fnTakingInlineRecord]($0)->variantPayload::WithInlineRecord($0), recordField(nestedRecord), recordBody\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[fnTakingInlineRecord]($0)\nContextPath Value[fnTakingInlineRecord]\nPath fnTakingInlineRecord\n[{\n    \"label\": \"someField\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nsomeField: int\\n```\\n\\n```rescript\\ntype otherRecord = {someField: int, otherField: string}\\n```\"}\n  }, {\n    \"label\": \"otherField\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\notherField: string\\n```\\n\\n```rescript\\ntype otherRecord = {someField: int, otherField: string}\\n```\"}\n  }]\n\nComplete src/CompletionExpressions.res 159:20\nposCursor:[159:20] posNoWhite:[159:19] Found expr:[159:3->159:21]\nPexp_apply ...[159:3->159:19] (...[159:20->159:21])\nCompletable: Cexpression CArgument Value[fnTakingCallback]($0)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[fnTakingCallback]($0)\nContextPath Value[fnTakingCallback]\nPath fnTakingCallback\n[{\n    \"label\": \"() => {}\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"unit => unit\",\n    \"documentation\": null,\n    \"sortText\": \"A\",\n    \"insertText\": \"() => ${0:()}\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionExpressions.res 162:21\nposCursor:[162:21] posNoWhite:[162:20] Found expr:[162:3->162:22]\nPexp_apply ...[162:3->162:19] (...[162:20->162:21])\nCompletable: Cexpression CArgument Value[fnTakingCallback]($0)=a\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[fnTakingCallback]($0)\nContextPath Value[fnTakingCallback]\nPath fnTakingCallback\n[]\n\nComplete src/CompletionExpressions.res 165:22\nposCursor:[165:22] posNoWhite:[165:21] Found expr:[165:3->165:24]\nPexp_apply ...[165:3->165:19] (...[165:20->165:21])\nCompletable: Cexpression CArgument Value[fnTakingCallback]($1)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[fnTakingCallback]($1)\nContextPath Value[fnTakingCallback]\nPath fnTakingCallback\n[{\n    \"label\": \"v => v\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"bool => unit\",\n    \"documentation\": null,\n    \"sortText\": \"A\",\n    \"insertText\": \"${1:v} => ${0:v}\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionExpressions.res 168:25\nposCursor:[168:25] posNoWhite:[168:24] Found expr:[168:3->168:27]\nPexp_apply ...[168:3->168:19] (...[168:20->168:21], ...[168:23->168:24])\nCompletable: Cexpression CArgument Value[fnTakingCallback]($2)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[fnTakingCallback]($2)\nContextPath Value[fnTakingCallback]\nPath fnTakingCallback\n[{\n    \"label\": \"event => event\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"ReactEvent.Mouse.t => unit\",\n    \"documentation\": null,\n    \"sortText\": \"A\",\n    \"insertText\": \"${1:event} => ${0:event}\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionExpressions.res 171:29\nposCursor:[171:29] posNoWhite:[171:27] Found expr:[171:3->171:30]\nPexp_apply ...[171:3->171:19] (...[171:20->171:21], ...[171:23->171:24], ...[171:26->171:27])\nCompletable: Cexpression CArgument Value[fnTakingCallback]($3)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[fnTakingCallback]($3)\nContextPath Value[fnTakingCallback]\nPath fnTakingCallback\n[{\n    \"label\": \"(~on, ~off=?, variant) => {}\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(~on: bool, ~off: bool=?, variant) => int\",\n    \"documentation\": null,\n    \"sortText\": \"A\",\n    \"insertText\": \"(~on, ~off=?, ${1:variant}) => {${0:()}}\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionExpressions.res 174:32\nposCursor:[174:32] posNoWhite:[174:30] Found expr:[174:3->174:33]\nPexp_apply ...[174:3->174:19] (...[174:20->174:21], ...[174:23->174:24], ...[174:26->174:27], ...[174:29->174:30])\nCompletable: Cexpression CArgument Value[fnTakingCallback]($4)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[fnTakingCallback]($4)\nContextPath Value[fnTakingCallback]\nPath fnTakingCallback\n[{\n    \"label\": \"(v1, v2, v3) => {}\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(bool, option<bool>, bool) => unit\",\n    \"documentation\": null,\n    \"sortText\": \"A\",\n    \"insertText\": \"(${1:v1}, ${2:v2}, ${3:v3}) => {${0:()}}\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionExpressions.res 177:34\nposCursor:[177:34] posNoWhite:[177:33] Found expr:[177:3->177:36]\nPexp_apply ...[177:3->177:19] (...[177:20->177:21], ...[177:23->177:24], ...[177:26->177:27], ...[177:29->177:30], ...[177:32->177:33])\nCompletable: Cexpression CArgument Value[fnTakingCallback]($5)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[fnTakingCallback]($5)\nContextPath Value[fnTakingCallback]\nPath fnTakingCallback\n[{\n    \"label\": \"(~on=?, ~off=?, ()) => {}\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(~on: bool=?, ~off: bool=?, unit) => int\",\n    \"documentation\": null,\n    \"sortText\": \"A\",\n    \"insertText\": \"(~on=?, ~off=?, ()) => {${0:()}}\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionExpressions.res 185:10\nposCursor:[185:10] posNoWhite:[185:9] Found expr:[181:2->185:11]\nposCursor:[185:10] posNoWhite:[185:9] Found expr:[182:2->185:11]\nposCursor:[185:10] posNoWhite:[185:9] Found expr:[183:2->185:11]\nposCursor:[185:10] posNoWhite:[185:9] Found expr:[184:2->185:11]\nposCursor:[185:10] posNoWhite:[185:9] Found expr:[185:2->185:11]\nPexp_apply ...[185:2->185:8] (...[185:9->185:10])\nCompletable: Cexpression CArgument Value[Js, log]($0)=s\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[Js, log]($0)\nContextPath Value[Js, log]\nPath Js.log\n[{\n    \"label\": \"second2\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": null\n  }, {\n    \"label\": \"second\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }, {\n    \"label\": \"someBoolVar\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }, {\n    \"label\": \"s\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionExpressions.res 196:14\nposCursor:[196:14] posNoWhite:[196:13] Found expr:[196:3->196:14]\nPexp_field [196:3->196:6] someOpt:[196:7->196:14]\nCompletable: Cpath Value[fff].someOpt\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[fff].someOpt\nContextPath Value[fff]\nPath fff\nContextPath Value[fff]->someOpt\nContextPath Value[fff]\nPath fff\nCPPipe pathFromEnv: found:true\nPath CompletionExpressions.someOpt\n[{\n    \"label\": \"someOptField\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nsomeOptField?: bool\\n```\\n\\n```rescript\\ntype recordWithOptionalField = {\\n  someField: int,\\n  someOptField?: bool,\\n}\\n```\"}\n  }]\n\nComplete src/CompletionExpressions.res 205:11\nposCursor:[205:11] posNoWhite:[205:10] Found expr:[205:3->205:12]\nPexp_apply ...[205:3->205:10] (...[205:11->205:12])\nCompletable: Cexpression CArgument Value[takesCb]($0)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[takesCb]($0)\nContextPath Value[takesCb]\nPath takesCb\n[{\n    \"label\": \"someTyp => someTyp\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"someTyp => 'a\",\n    \"documentation\": null,\n    \"sortText\": \"A\",\n    \"insertText\": \"${1:someTyp} => ${0:someTyp}\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionExpressions.res 216:12\nposCursor:[216:12] posNoWhite:[216:11] Found expr:[216:3->216:13]\nPexp_apply ...[216:3->216:11] (...[216:12->216:13])\nCompletable: Cexpression CArgument Value[takesCb2]($0)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[takesCb2]($0)\nContextPath Value[takesCb2]\nPath takesCb2\n[{\n    \"label\": \"environment => environment\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"Environment.t => 'a\",\n    \"documentation\": null,\n    \"sortText\": \"A\",\n    \"insertText\": \"${1:environment} => ${0:environment}\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionExpressions.res 225:12\nposCursor:[225:12] posNoWhite:[225:11] Found expr:[225:3->225:13]\nPexp_apply ...[225:3->225:11] (...[225:12->225:13])\nCompletable: Cexpression CArgument Value[takesCb3]($0)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[takesCb3]($0)\nContextPath Value[takesCb3]\nPath takesCb3\n[{\n    \"label\": \"apiCallResult => apiCallResult\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"apiCallResult => 'a\",\n    \"documentation\": null,\n    \"sortText\": \"A\",\n    \"insertText\": \"${1:apiCallResult} => ${0:apiCallResult}\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionExpressions.res 232:12\nposCursor:[232:12] posNoWhite:[232:11] Found expr:[232:3->232:13]\nPexp_apply ...[232:3->232:11] (...[232:12->232:13])\nCompletable: Cexpression CArgument Value[takesCb4]($0)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[takesCb4]($0)\nContextPath Value[takesCb4]\nPath takesCb4\n[{\n    \"label\": \"apiCallResult => apiCallResult\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"option<apiCallResult> => 'a\",\n    \"documentation\": null,\n    \"sortText\": \"A\",\n    \"insertText\": \"${1:apiCallResult} => ${0:apiCallResult}\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionExpressions.res 239:12\nposCursor:[239:12] posNoWhite:[239:11] Found expr:[239:3->239:13]\nPexp_apply ...[239:3->239:11] (...[239:12->239:13])\nCompletable: Cexpression CArgument Value[takesCb5]($0)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[takesCb5]($0)\nContextPath Value[takesCb5]\nPath takesCb5\n[{\n    \"label\": \"apiCallResults => apiCallResults\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"array<option<apiCallResult>> => 'a\",\n    \"documentation\": null,\n    \"sortText\": \"A\",\n    \"insertText\": \"${1:apiCallResults} => ${0:apiCallResults}\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionExpressions.res 250:30\nposCursor:[250:30] posNoWhite:[250:29] Found expr:[250:3->250:31]\nPexp_apply ...[250:3->250:20] (~updater250:22->250:29=...__ghost__[0:-1->0:-1])\nCompletable: Cexpression CArgument Value[commitLocalUpdate](~updater)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[commitLocalUpdate](~updater)\nContextPath Value[commitLocalUpdate]\nPath commitLocalUpdate\n[{\n    \"label\": \"recordSourceSelectorProxy => recordSourceSelectorProxy\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"RecordSourceSelectorProxy.t => unit\",\n    \"documentation\": null,\n    \"sortText\": \"A\",\n    \"insertText\": \"${1:recordSourceSelectorProxy} => ${0:recordSourceSelectorProxy}\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionExpressions.res 257:25\nposCursor:[257:25] posNoWhite:[257:24] Found expr:[257:3->257:26]\nPexp_apply ...[257:3->257:24] (...[257:25->257:26])\nCompletable: Cexpression CArgument Value[fnTakingAsyncCallback]($0)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[fnTakingAsyncCallback]($0)\nContextPath Value[fnTakingAsyncCallback]\nPath fnTakingAsyncCallback\n[{\n    \"label\": \"async () => {}\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"unit => promise<unit>\",\n    \"documentation\": null,\n    \"sortText\": \"A\",\n    \"insertText\": \"async () => ${0:()}\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionExpressions.res 262:23\nposCursor:[262:23] posNoWhite:[262:22] Found expr:[262:3->262:24]\nCompletable: Cexpression CArgument Value[Belt, Array, map]($1)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[Belt, Array, map]($1)\nContextPath Value[Belt, Array, map]\nPath Belt.Array.map\n[{\n    \"label\": \"v => v\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"'a => 'b\",\n    \"documentation\": null,\n    \"sortText\": \"A\",\n    \"insertText\": \"${1:v} => ${0:v}\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionExpressions.res 271:15\nposCursor:[271:15] posNoWhite:[271:14] Found expr:[271:3->271:16]\nPexp_apply ...[271:3->271:14] (...[271:15->271:16])\nCompletable: Cexpression CArgument Value[takesExotic]($0)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[takesExotic]($0)\nContextPath Value[takesExotic]\nPath takesExotic\n[{\n    \"label\": \"#\\\"some exotic\\\"\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"#\\\"some exotic\\\"\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\n#\\\"some exotic\\\"\\n```\\n\\n```rescript\\n[#\\\"some exotic\\\"]\\n```\"},\n    \"insertText\": \"#\\\"some exotic\\\"\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionExpressions.res 278:23\nposCursor:[278:23] posNoWhite:[278:22] Found expr:[278:3->278:24]\nPexp_apply ...[278:3->278:22] (...[278:23->278:24])\nCompletable: Cexpression CArgument Value[fnTakingPolyVariant]($0)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[fnTakingPolyVariant]($0)\nContextPath Value[fnTakingPolyVariant]\nPath fnTakingPolyVariant\n[{\n    \"label\": \"#one\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"#one\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\n#one\\n```\\n\\n```rescript\\n[#one | #three(someRecord, bool) | #two(bool)]\\n```\"},\n    \"insertText\": \"#one\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"#three(_, _)\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"#three(someRecord, bool)\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\n#three(someRecord, bool)\\n```\\n\\n```rescript\\n[#one | #three(someRecord, bool) | #two(bool)]\\n```\"},\n    \"insertText\": \"#three(${1:_}, ${2:_})\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"#two(_)\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"#two(bool)\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\n#two(bool)\\n```\\n\\n```rescript\\n[#one | #three(someRecord, bool) | #two(bool)]\\n```\"},\n    \"insertText\": \"#two($0)\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionExpressions.res 281:24\nposCursor:[281:24] posNoWhite:[281:23] Found expr:[281:3->290:18]\nPexp_apply ...[281:3->281:22] (...[281:23->281:25], ...[290:0->290:16])\nCompletable: Cexpression CArgument Value[fnTakingPolyVariant]($0)=#\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[fnTakingPolyVariant]($0)\nContextPath Value[fnTakingPolyVariant]\nPath fnTakingPolyVariant\n[{\n    \"label\": \"#one\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"#one\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\n#one\\n```\\n\\n```rescript\\n[#one | #three(someRecord, bool) | #two(bool)]\\n```\"},\n    \"insertText\": \"one\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"#three(_, _)\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"#three(someRecord, bool)\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\n#three(someRecord, bool)\\n```\\n\\n```rescript\\n[#one | #three(someRecord, bool) | #two(bool)]\\n```\"},\n    \"insertText\": \"three(${1:_}, ${2:_})\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"#two(_)\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"#two(bool)\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\n#two(bool)\\n```\\n\\n```rescript\\n[#one | #three(someRecord, bool) | #two(bool)]\\n```\"},\n    \"insertText\": \"two($0)\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionExpressions.res 284:25\nposCursor:[284:25] posNoWhite:[284:24] Found expr:[284:3->284:26]\nPexp_apply ...[284:3->284:22] (...[284:23->284:25])\nCompletable: Cexpression CArgument Value[fnTakingPolyVariant]($0)=#o\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[fnTakingPolyVariant]($0)\nContextPath Value[fnTakingPolyVariant]\nPath fnTakingPolyVariant\n[{\n    \"label\": \"#one\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"#one\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\n#one\\n```\\n\\n```rescript\\n[#one | #three(someRecord, bool) | #two(bool)]\\n```\"},\n    \"insertText\": \"one\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionExpressions.res 287:24\nposCursor:[287:24] posNoWhite:[287:23] Found expr:[287:3->287:25]\nPexp_apply ...[287:3->287:22] (...[287:23->287:24])\nCompletable: Cexpression CArgument Value[fnTakingPolyVariant]($0)=o\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[fnTakingPolyVariant]($0)\nContextPath Value[fnTakingPolyVariant]\nPath fnTakingPolyVariant\n[{\n    \"label\": \"#one\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"#one\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\n#one\\n```\\n\\n```rescript\\n[#one | #three(someRecord, bool) | #two(bool)]\\n```\"},\n    \"insertText\": \"#one\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionExpressions.res 306:41\nXXX Not found!\nCompletable: Cexpression Type[withIntLocal]->recordField(superInt)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Type[withIntLocal]\nPath withIntLocal\n[{\n    \"label\": \"SuperInt.make()\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"int => t\",\n    \"documentation\": null,\n    \"insertText\": \"SuperInt.make($0)\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionExpressions.res 309:36\nposCursor:[309:36] posNoWhite:[309:35] Found expr:[309:3->309:37]\nPexp_apply ...[309:3->309:35] (...[309:36->309:37])\nCompletable: Cexpression CArgument Value[CompletionSupport, makeTestHidden]($0)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[CompletionSupport, makeTestHidden]($0)\nContextPath Value[CompletionSupport, makeTestHidden]\nPath CompletionSupport.makeTestHidden\n[{\n    \"label\": \"CompletionSupport.TestHidden.make()\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"int => t\",\n    \"documentation\": null,\n    \"insertText\": \"CompletionSupport.TestHidden.make($0)\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionExpressions.res 313:36\nposCursor:[313:36] posNoWhite:[313:35] Found expr:[313:3->313:37]\nPexp_apply ...[313:3->313:35] (...[313:36->313:37])\nCompletable: Cexpression CArgument Value[CompletionSupport, makeTestHidden]($0)\nRaw opens: 1 CompletionSupport.place holder\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 2 pervasives CompletionSupport.res\nContextPath CArgument Value[CompletionSupport, makeTestHidden]($0)\nContextPath Value[CompletionSupport, makeTestHidden]\nPath CompletionSupport.makeTestHidden\n[{\n    \"label\": \"TestHidden.make()\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"int => t\",\n    \"documentation\": null,\n    \"insertText\": \"TestHidden.make($0)\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionExpressions.res 321:11\nposCursor:[321:11] posNoWhite:[321:10] Found expr:[321:3->321:12]\nPexp_apply ...[321:3->321:10] (...[321:11->321:12])\nCompletable: Cexpression CArgument Value[mkStuff]($0)\nRaw opens: 1 CompletionSupport.place holder\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 2 pervasives CompletionSupport.res\nContextPath CArgument Value[mkStuff]($0)\nContextPath Value[mkStuff]\nPath mkStuff\n[{\n    \"label\": \"%re()\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"Regular expression\",\n    \"documentation\": null,\n    \"insertText\": \"%re(\\\"/$0/g\\\")\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Js.Re.fromString()\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"string => t\",\n    \"documentation\": null,\n    \"insertText\": \"Js.Re.fromString($0)\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Js.Re.fromStringWithFlags()\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(string, ~flags: string) => t\",\n    \"documentation\": null,\n    \"insertText\": \"Js.Re.fromStringWithFlags($0)\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionExpressions.res 352:24\nposCursor:[352:24] posNoWhite:[352:23] Found expr:[352:3->352:25]\nPexp_apply ...[352:3->352:23] (...[352:24->352:25])\nCompletable: Cexpression CArgument Value[tArgCompletionTestFn]($0)\nRaw opens: 1 CompletionSupport.place holder\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 2 pervasives CompletionSupport.res\nContextPath CArgument Value[tArgCompletionTestFn]($0)\nContextPath Value[tArgCompletionTestFn]\nPath tArgCompletionTestFn\n[{\n    \"label\": \"Money.fromInt()\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"int => t\",\n    \"documentation\": null,\n    \"insertText\": \"Money.fromInt($0)\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Money.make()\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"unit => t\",\n    \"documentation\": null,\n    \"insertText\": \"Money.make($0)\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Money.zero\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"t\",\n    \"documentation\": null,\n    \"insertText\": \"Money.zero\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionExpressions.res 357:37\nposCursor:[357:37] posNoWhite:[357:36] Found expr:[357:3->357:38]\nPexp_apply ...[357:3->357:30] (~tVal357:32->357:36=...__ghost__[0:-1->0:-1])\nCompletable: Cexpression CArgument Value[labeledTArgCompletionTestFn](~tVal)\nRaw opens: 1 CompletionSupport.place holder\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 2 pervasives CompletionSupport.res\nContextPath CArgument Value[labeledTArgCompletionTestFn](~tVal)\nContextPath Value[labeledTArgCompletionTestFn]\nPath labeledTArgCompletionTestFn\n[{\n    \"label\": \"Money.fromInt()\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"int => t\",\n    \"documentation\": null,\n    \"insertText\": \"Money.fromInt($0)\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Money.make()\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"unit => t\",\n    \"documentation\": null,\n    \"insertText\": \"Money.make($0)\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Money.zero\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"t\",\n    \"documentation\": null,\n    \"insertText\": \"Money.zero\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionExpressions.res 362:18\nposCursor:[362:18] posNoWhite:[362:17] Found expr:[362:3->362:32]\nposCursor:[362:18] posNoWhite:[362:17] Found expr:[362:10->362:18]\nPexp_field [362:10->362:17] _:[362:19->362:18]\nCompletable: Cpath Value[someTyp].\"\"\nRaw opens: 1 CompletionSupport.place holder\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 2 pervasives CompletionSupport.res\nContextPath Value[someTyp].\"\"\nContextPath Value[someTyp]\nPath someTyp\nContextPath Value[someTyp]->\nContextPath Value[someTyp]\nPath someTyp\nCPPipe pathFromEnv: found:true\nPath CompletionExpressions.\n[{\n    \"label\": \"test\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ntest: bool\\n```\\n\\n```rescript\\ntype someTyp = {test: bool}\\n```\"}\n  }]\n\nComplete src/CompletionExpressions.res 380:22\nposCursor:[380:22] posNoWhite:[380:18] Found expr:[380:13->386:2]\nPexp_apply ...[380:13->380:17] (...[380:18->386:1])\nCompletable: Cexpression CArgument Value[hook]($0)->recordBody\nRaw opens: 1 CompletionSupport.place holder\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 2 pervasives CompletionSupport.res\nContextPath CArgument Value[hook]($0)\nContextPath Value[hook]\nPath hook\n[{\n    \"label\": \"operator\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"[#\\\"and\\\" | #or]\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\noperator?: [#\\\"and\\\" | #or]\\n```\\n\\n```rescript\\ntype config = {includeName: bool, operator: option<[#\\\"and\\\" | #or]>, showMore: bool}\\n```\"}\n  }]\n\nComplete src/CompletionExpressions.res 382:8\nposCursor:[382:8] posNoWhite:[382:7] Found expr:[380:13->386:2]\nPexp_apply ...[380:13->380:17] (...[380:18->386:1])\nCompletable: Cexpression CArgument Value[hook]($0)=ope->recordBody\nRaw opens: 1 CompletionSupport.place holder\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 2 pervasives CompletionSupport.res\nContextPath CArgument Value[hook]($0)\nContextPath Value[hook]\nPath hook\n[{\n    \"label\": \"operator\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"[#\\\"and\\\" | #or]\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\noperator?: [#\\\"and\\\" | #or]\\n```\\n\\n```rescript\\ntype config = {includeName: bool, operator: option<[#\\\"and\\\" | #or]>, showMore: bool}\\n```\"}\n  }]\n\nComplete src/CompletionExpressions.res 388:18\nposCursor:[388:18] posNoWhite:[388:17] Found expr:[388:3->388:24]\nposCursor:[388:18] posNoWhite:[388:17] Found expr:[388:10->388:18]\nPexp_field [388:10->388:17] _:[388:19->388:18]\nCompletable: Cpath Value[someTyp].\"\"\nRaw opens: 1 CompletionSupport.place holder\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 2 pervasives CompletionSupport.res\nContextPath Value[someTyp].\"\"\nContextPath Value[someTyp]\nPath someTyp\nContextPath Value[someTyp]->\nContextPath Value[someTyp]\nPath someTyp\nCPPipe pathFromEnv: found:true\nPath CompletionExpressions.\n[{\n    \"label\": \"test\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ntest: bool\\n```\\n\\n```rescript\\ntype someTyp = {test: bool}\\n```\"}\n  }]\n\n"
  },
  {
    "path": "analysis/tests/src/expected/CompletionFromModule.res.txt",
    "content": "Complete src/CompletionFromModule.res 10:5\nposCursor:[10:5] posNoWhite:[10:4] Found expr:[10:3->10:5]\nPexp_field [10:3->10:4] _:[13:0->10:5]\nCompletable: Cpath Value[n].\"\"\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[n].\"\"\nContextPath Value[n]\nPath n\nContextPath Value[n]->\nContextPath Value[n]\nPath n\nCPPipe pathFromEnv:SomeModule found:true\nPath SomeModule.\n[{\n    \"label\": \"name\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nname: string\\n```\\n\\n```rescript\\ntype t = {name: string}\\n```\"}\n  }, {\n    \"label\": \"->SomeModule.getName\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"t => string\",\n    \"documentation\": null,\n    \"sortText\": \"getName\",\n    \"insertText\": \"->SomeModule.getName\",\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 10, \"character\": 4}, \"end\": {\"line\": 10, \"character\": 5}},\n      \"newText\": \"\"\n      }]\n  }]\n\nComplete src/CompletionFromModule.res 30:6\nposCursor:[30:6] posNoWhite:[30:5] Found expr:[30:3->30:6]\nPexp_field [30:3->30:5] _:[36:0->30:6]\nCompletable: Cpath Value[nn].\"\"\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[nn].\"\"\nContextPath Value[nn]\nPath nn\nContextPath Value[nn]->\nContextPath Value[nn]\nPath nn\nCPPipe pathFromEnv:SomeOtherModule found:true\nPath SomeOtherModule.\nPath CompletionFromModule.SomeOtherModule.\n[{\n    \"label\": \"nname\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nnname: string\\n```\\n\\n```rescript\\ntype typeOutsideModule = {nname: string}\\n```\"}\n  }, {\n    \"label\": \"->SomeOtherModule.getNName\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"t => string\",\n    \"documentation\": null,\n    \"sortText\": \"getNName\",\n    \"insertText\": \"->SomeOtherModule.getNName\",\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 30, \"character\": 5}, \"end\": {\"line\": 30, \"character\": 6}},\n      \"newText\": \"\"\n      }]\n  }, {\n    \"label\": \"->SomeOtherModule.getNName2\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"typeOutsideModule => string\",\n    \"documentation\": null,\n    \"sortText\": \"getNName2\",\n    \"insertText\": \"->SomeOtherModule.getNName2\",\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 30, \"character\": 5}, \"end\": {\"line\": 30, \"character\": 6}},\n      \"newText\": \"\"\n      }]\n  }, {\n    \"label\": \"->SomeOtherModule.getNName\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"t => string\",\n    \"documentation\": null,\n    \"sortText\": \"getNName\",\n    \"insertText\": \"->SomeOtherModule.getNName\",\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 30, \"character\": 5}, \"end\": {\"line\": 30, \"character\": 6}},\n      \"newText\": \"\"\n      }]\n  }, {\n    \"label\": \"->SomeOtherModule.getNName2\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"typeOutsideModule => string\",\n    \"documentation\": null,\n    \"sortText\": \"getNName2\",\n    \"insertText\": \"->SomeOtherModule.getNName2\",\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 30, \"character\": 5}, \"end\": {\"line\": 30, \"character\": 6}},\n      \"newText\": \"\"\n      }]\n  }]\n\nComplete src/CompletionFromModule.res 33:32\nXXX Not found!\nCompletable: Cpath Module[SomeOthe]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Module[SomeOthe]\nPath SomeOthe\n[{\n    \"label\": \"SomeOtherModule\",\n    \"kind\": 9,\n    \"tags\": [],\n    \"detail\": \"module SomeOtherModule\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionFromModule.res 38:8\nposCursor:[38:8] posNoWhite:[38:7] Found expr:[38:3->0:-1]\nCompletable: Cpath Value[nnn]->\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[nnn]->\nContextPath Value[nnn]\nPath nnn\nCPPipe pathFromEnv: found:true\nPath CompletionFromModule.\nPath CompletionFromModule.SomeOtherModule.\n[{\n    \"label\": \"SomeOtherModule.getNName\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"t => string\",\n    \"documentation\": null\n  }, {\n    \"label\": \"SomeOtherModule.getNName2\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"typeOutsideModule => string\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionFromModule.res 42:8\nposCursor:[42:8] posNoWhite:[42:7] Found expr:[42:3->0:-1]\nCompletable: Cpath Value[nnn]->\nRaw opens: 1 SomeOtherModule.place holder\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 2 pervasives CompletionFromModule.res\nContextPath Value[nnn]->\nContextPath Value[nnn]\nPath nnn\nCPPipe pathFromEnv: found:true\nPath CompletionFromModule.\nPath CompletionFromModule.SomeOtherModule.\n[{\n    \"label\": \"getNName\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"t => string\",\n    \"documentation\": null\n  }, {\n    \"label\": \"getNName2\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"typeOutsideModule => string\",\n    \"documentation\": null\n  }]\n\n"
  },
  {
    "path": "analysis/tests/src/expected/CompletionFromModule2.res.txt",
    "content": "Complete src/CompletionFromModule2.res 2:26\nposCursor:[2:26] posNoWhite:[2:25] Found expr:[2:3->2:26]\nPexp_field [2:3->2:25] _:[11:0->2:26]\nCompletable: Cpath Value[CompletionFromModule, n].\"\"\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[CompletionFromModule, n].\"\"\nContextPath Value[CompletionFromModule, n]\nPath CompletionFromModule.n\nContextPath Value[CompletionFromModule, n]->\nContextPath Value[CompletionFromModule, n]\nPath CompletionFromModule.n\nCPPipe pathFromEnv:SomeModule found:true\nPath CompletionFromModule.SomeModule.\n[{\n    \"label\": \"name\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nname: string\\n```\\n\\n```rescript\\ntype t = {name: string}\\n```\"}\n  }, {\n    \"label\": \"->CompletionFromModule.SomeModule.getName\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"t => string\",\n    \"documentation\": null,\n    \"sortText\": \"getName\",\n    \"insertText\": \"->CompletionFromModule.SomeModule.getName\",\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 2, \"character\": 25}, \"end\": {\"line\": 2, \"character\": 26}},\n      \"newText\": \"\"\n      }]\n  }]\n\nComplete src/CompletionFromModule2.res 5:27\nposCursor:[5:27] posNoWhite:[5:26] Found expr:[5:3->5:27]\nPexp_field [5:3->5:26] _:[11:0->5:27]\nCompletable: Cpath Value[CompletionFromModule, nn].\"\"\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[CompletionFromModule, nn].\"\"\nContextPath Value[CompletionFromModule, nn]\nPath CompletionFromModule.nn\nContextPath Value[CompletionFromModule, nn]->\nContextPath Value[CompletionFromModule, nn]\nPath CompletionFromModule.nn\nCPPipe pathFromEnv:SomeOtherModule found:true\nPath CompletionFromModule.SomeOtherModule.\nPath CompletionFromModule.SomeOtherModule.\n[{\n    \"label\": \"nname\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nnname: string\\n```\\n\\n```rescript\\ntype typeOutsideModule = {nname: string}\\n```\"}\n  }, {\n    \"label\": \"->CompletionFromModule.SomeOtherModule.getNName\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"t => string\",\n    \"documentation\": null,\n    \"sortText\": \"getNName\",\n    \"insertText\": \"->CompletionFromModule.SomeOtherModule.getNName\",\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 5, \"character\": 26}, \"end\": {\"line\": 5, \"character\": 27}},\n      \"newText\": \"\"\n      }]\n  }, {\n    \"label\": \"->CompletionFromModule.SomeOtherModule.getNName2\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"typeOutsideModule => string\",\n    \"documentation\": null,\n    \"sortText\": \"getNName2\",\n    \"insertText\": \"->CompletionFromModule.SomeOtherModule.getNName2\",\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 5, \"character\": 26}, \"end\": {\"line\": 5, \"character\": 27}},\n      \"newText\": \"\"\n      }]\n  }, {\n    \"label\": \"->CompletionFromModule.SomeOtherModule.getNName\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"t => string\",\n    \"documentation\": null,\n    \"sortText\": \"getNName\",\n    \"insertText\": \"->CompletionFromModule.SomeOtherModule.getNName\",\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 5, \"character\": 26}, \"end\": {\"line\": 5, \"character\": 27}},\n      \"newText\": \"\"\n      }]\n  }, {\n    \"label\": \"->CompletionFromModule.SomeOtherModule.getNName2\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"typeOutsideModule => string\",\n    \"documentation\": null,\n    \"sortText\": \"getNName2\",\n    \"insertText\": \"->CompletionFromModule.SomeOtherModule.getNName2\",\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 5, \"character\": 26}, \"end\": {\"line\": 5, \"character\": 27}},\n      \"newText\": \"\"\n      }]\n  }]\n\nComplete src/CompletionFromModule2.res 8:29\nposCursor:[8:29] posNoWhite:[8:28] Found expr:[8:3->0:-1]\nCompletable: Cpath Value[CompletionFromModule, nnn]->\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[CompletionFromModule, nnn]->\nContextPath Value[CompletionFromModule, nnn]\nPath CompletionFromModule.nnn\nCPPipe pathFromEnv: found:true\nPath CompletionFromModule.\nPath CompletionFromModule.SomeOtherModule.\n[{\n    \"label\": \"CompletionFromModule.SomeOtherModule.getNName\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"t => string\",\n    \"documentation\": null\n  }, {\n    \"label\": \"CompletionFromModule.SomeOtherModule.getNName2\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"typeOutsideModule => string\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionFromModule2.res 12:29\nposCursor:[12:29] posNoWhite:[12:28] Found expr:[12:3->0:-1]\nCompletable: Cpath Value[CompletionFromModule, nnn]->\nRaw opens: 1 CompletionFromModule.SomeOtherModule.place holder\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 2 pervasives CompletionFromModule.res\nContextPath Value[CompletionFromModule, nnn]->\nContextPath Value[CompletionFromModule, nnn]\nPath CompletionFromModule.nnn\nCPPipe pathFromEnv: found:true\nPath CompletionFromModule.\nPath CompletionFromModule.SomeOtherModule.\n[{\n    \"label\": \"getNName\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"t => string\",\n    \"documentation\": null\n  }, {\n    \"label\": \"getNName2\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"typeOutsideModule => string\",\n    \"documentation\": null\n  }]\n\n"
  },
  {
    "path": "analysis/tests/src/expected/CompletionFunctionArguments.res.txt",
    "content": "Complete src/CompletionFunctionArguments.res 10:24\nposCursor:[10:24] posNoWhite:[10:23] Found expr:[10:11->10:25]\nPexp_apply ...[10:11->10:17] (~isOn10:19->10:23=...__ghost__[0:-1->0:-1])\nCompletable: Cexpression CArgument Value[someFn](~isOn)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[someFn](~isOn)\nContextPath Value[someFn]\nPath someFn\n[{\n    \"label\": \"true\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }, {\n    \"label\": \"false\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionFunctionArguments.res 13:25\nposCursor:[13:25] posNoWhite:[13:24] Found expr:[13:11->13:26]\nPexp_apply ...[13:11->13:17] (~isOn13:19->13:23=...[13:24->13:25])\nCompletable: Cexpression CArgument Value[someFn](~isOn)=t\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[someFn](~isOn)\nContextPath Value[someFn]\nPath someFn\n[{\n    \"label\": \"true\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null,\n    \"sortText\": \"A true\"\n  }, {\n    \"label\": \"tLocalVar\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionFunctionArguments.res 16:25\nposCursor:[16:25] posNoWhite:[16:24] Found expr:[16:11->16:26]\nPexp_apply ...[16:11->16:17] (~isOff16:19->16:24=...__ghost__[0:-1->0:-1])\nCompletable: Cexpression CArgument Value[someFn](~isOff)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[someFn](~isOff)\nContextPath Value[someFn]\nPath someFn\n[{\n    \"label\": \"true\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }, {\n    \"label\": \"false\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionFunctionArguments.res 21:27\nposCursor:[21:27] posNoWhite:[21:26] Found expr:[19:21->25:1]\nPexp_apply ...[19:21->19:27] (~isOn20:3->20:7=...[21:7->23:8])\nposCursor:[21:27] posNoWhite:[21:26] Found expr:[21:7->23:8]\nposCursor:[21:27] posNoWhite:[21:26] Found expr:[21:7->21:28]\nposCursor:[21:27] posNoWhite:[21:26] Found expr:[21:14->21:28]\nPexp_apply ...[21:14->21:20] (~isOn21:22->21:26=...__ghost__[0:-1->0:-1])\nCompletable: Cexpression CArgument Value[someFn](~isOn)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[someFn](~isOn)\nContextPath Value[someFn]\nPath someFn\n[{\n    \"label\": \"true\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }, {\n    \"label\": \"false\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionFunctionArguments.res 34:24\nposCursor:[34:24] posNoWhite:[34:23] Found expr:[34:11->34:25]\nPexp_apply ...[34:11->34:22] (...[34:23->34:24])\nCompletable: Cexpression CArgument Value[someOtherFn]($0)=f\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[someOtherFn]($0)\nContextPath Value[someOtherFn]\nPath someOtherFn\n[{\n    \"label\": \"false\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionFunctionArguments.res 51:39\nposCursor:[51:39] posNoWhite:[51:38] Found expr:[51:11->51:40]\nPexp_apply ...[51:11->51:30] (~config51:32->51:38=...__ghost__[0:-1->0:-1])\nCompletable: Cexpression CArgument Value[someFnTakingVariant](~config)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[someFnTakingVariant](~config)\nContextPath Value[someFnTakingVariant]\nPath someFnTakingVariant\n[{\n    \"label\": \"One\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"One\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nOne\\n```\\n\\n```rescript\\ntype someVariant = One | Two | Three(int, string)\\n```\"},\n    \"insertText\": \"One\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Two\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"Two\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nTwo\\n```\\n\\n```rescript\\ntype someVariant = One | Two | Three(int, string)\\n```\"},\n    \"insertText\": \"Two\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Three(_, _)\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"Three(int, string)\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nThree(int, string)\\n```\\n\\n```rescript\\ntype someVariant = One | Two | Three(int, string)\\n```\"},\n    \"insertText\": \"Three(${1:_}, ${2:_})\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionFunctionArguments.res 54:40\nposCursor:[54:40] posNoWhite:[54:39] Found expr:[54:11->54:41]\nPexp_apply ...[54:11->54:30] (~config54:32->54:38=...[54:39->54:40])\nCompletable: Cexpression CArgument Value[someFnTakingVariant](~config)=O\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[someFnTakingVariant](~config)\nContextPath Value[someFnTakingVariant]\nPath someFnTakingVariant\n[{\n    \"label\": \"One\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"One\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nOne\\n```\\n\\n```rescript\\ntype someVariant = One | Two | Three(int, string)\\n```\"},\n    \"sortText\": \"A One\",\n    \"insertText\": \"One\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"OIncludeMeInCompletions\",\n    \"kind\": 9,\n    \"tags\": [],\n    \"detail\": \"module OIncludeMeInCompletions\",\n    \"documentation\": null\n  }, {\n    \"label\": \"Obj\",\n    \"kind\": 9,\n    \"tags\": [],\n    \"detail\": \"module Obj\",\n    \"documentation\": null,\n    \"data\": {\n      \"modulePath\": \"Obj\",\n      \"filePath\": \"src/CompletionFunctionArguments.res\"\n    }\n  }, {\n    \"label\": \"Objects\",\n    \"kind\": 9,\n    \"tags\": [],\n    \"detail\": \"module Objects\",\n    \"documentation\": null,\n    \"data\": {\n      \"modulePath\": \"Objects\",\n      \"filePath\": \"src/CompletionFunctionArguments.res\"\n    }\n  }]\n\nComplete src/CompletionFunctionArguments.res 57:33\nposCursor:[57:33] posNoWhite:[57:32] Found expr:[57:11->57:34]\nPexp_apply ...[57:11->57:30] (...[57:31->57:33])\nCompletable: Cexpression CArgument Value[someFnTakingVariant]($0)=So\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[someFnTakingVariant]($0)\nContextPath Value[someFnTakingVariant]\nPath someFnTakingVariant\n[{\n    \"label\": \"Some(_)\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"someVariant\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ntype someVariant = One | Two | Three(int, string)\\n```\"},\n    \"sortText\": \"A Some(_)\",\n    \"insertText\": \"Some($0)\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Sort\",\n    \"kind\": 9,\n    \"tags\": [],\n    \"detail\": \"module Sort\",\n    \"documentation\": null,\n    \"data\": {\n      \"modulePath\": \"Sort\",\n      \"filePath\": \"src/CompletionFunctionArguments.res\"\n    }\n  }]\n\nComplete src/CompletionFunctionArguments.res 60:44\nposCursor:[60:44] posNoWhite:[60:43] Found expr:[60:11->60:45]\nPexp_apply ...[60:11->60:30] (~configOpt260:32->60:42=...[60:43->60:44])\nCompletable: Cexpression CArgument Value[someFnTakingVariant](~configOpt2)=O\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[someFnTakingVariant](~configOpt2)\nContextPath Value[someFnTakingVariant]\nPath someFnTakingVariant\n[{\n    \"label\": \"One\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"One\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nOne\\n```\\n\\n```rescript\\ntype someVariant = One | Two | Three(int, string)\\n```\"},\n    \"sortText\": \"A One\",\n    \"insertText\": \"One\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"OIncludeMeInCompletions\",\n    \"kind\": 9,\n    \"tags\": [],\n    \"detail\": \"module OIncludeMeInCompletions\",\n    \"documentation\": null\n  }, {\n    \"label\": \"Obj\",\n    \"kind\": 9,\n    \"tags\": [],\n    \"detail\": \"module Obj\",\n    \"documentation\": null,\n    \"data\": {\n      \"modulePath\": \"Obj\",\n      \"filePath\": \"src/CompletionFunctionArguments.res\"\n    }\n  }, {\n    \"label\": \"Objects\",\n    \"kind\": 9,\n    \"tags\": [],\n    \"detail\": \"module Objects\",\n    \"documentation\": null,\n    \"data\": {\n      \"modulePath\": \"Objects\",\n      \"filePath\": \"src/CompletionFunctionArguments.res\"\n    }\n  }]\n\nComplete src/CompletionFunctionArguments.res 63:23\nposCursor:[63:23] posNoWhite:[63:22] Found expr:[63:11->63:24]\nPexp_apply ...[63:11->63:22] (...[63:23->63:24])\nCompletable: Cexpression CArgument Value[someOtherFn]($0)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[someOtherFn]($0)\nContextPath Value[someOtherFn]\nPath someOtherFn\n[{\n    \"label\": \"true\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }, {\n    \"label\": \"false\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionFunctionArguments.res 66:28\nposCursor:[66:28] posNoWhite:[66:27] Found expr:[66:11->66:30]\nPexp_apply ...[66:11->66:22] (...[66:23->66:24], ...[66:26->66:27])\nCompletable: Cexpression CArgument Value[someOtherFn]($2)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[someOtherFn]($2)\nContextPath Value[someOtherFn]\nPath someOtherFn\n[{\n    \"label\": \"true\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }, {\n    \"label\": \"false\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionFunctionArguments.res 69:30\nposCursor:[69:30] posNoWhite:[69:29] Found expr:[69:11->69:31]\nCompletable: Cexpression CArgument Value[someOtherFn]($2)=t\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[someOtherFn]($2)\nContextPath Value[someOtherFn]\nPath someOtherFn\n[{\n    \"label\": \"true\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null,\n    \"sortText\": \"A true\"\n  }, {\n    \"label\": \"tLocalVar\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionFunctionArguments.res 76:25\nposCursor:[76:25] posNoWhite:[76:24] Found expr:[76:11->76:26]\nPexp_apply ...[76:11->76:24] (...[76:25->76:26])\nCompletable: Cexpression CArgument Value[fnTakingTuple]($0)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[fnTakingTuple]($0)\nContextPath Value[fnTakingTuple]\nPath fnTakingTuple\n[{\n    \"label\": \"(_, _, _)\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(int, int, float)\",\n    \"documentation\": null,\n    \"insertText\": \"(${1:_}, ${2:_}, ${3:_})\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionFunctionArguments.res 89:27\nposCursor:[89:27] posNoWhite:[89:26] Found expr:[89:11->89:29]\nPexp_apply ...[89:11->89:25] (...[89:26->89:28])\nCompletable: Cexpression CArgument Value[fnTakingRecord]($0)->recordBody\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[fnTakingRecord]($0)\nContextPath Value[fnTakingRecord]\nPath fnTakingRecord\n[{\n    \"label\": \"age\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nage: int\\n```\\n\\n```rescript\\ntype someRecord = {age: int, offline: bool, online: option<bool>}\\n```\"}\n  }, {\n    \"label\": \"offline\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\noffline: bool\\n```\\n\\n```rescript\\ntype someRecord = {age: int, offline: bool, online: option<bool>}\\n```\"}\n  }, {\n    \"label\": \"online\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"option<bool>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nonline: option<bool>\\n```\\n\\n```rescript\\ntype someRecord = {age: int, offline: bool, online: option<bool>}\\n```\"}\n  }]\n\nComplete src/CompletionFunctionArguments.res 109:29\nposCursor:[109:29] posNoWhite:[109:28] Found expr:[105:3->114:4]\nJSX <div:[105:3->105:6] onMouseDown[106:4->106:15]=...[106:35->113:5]> _children:114:2\nposCursor:[109:29] posNoWhite:[109:28] Found expr:[106:35->113:5]\nposCursor:[109:29] posNoWhite:[109:28] Found expr:[106:16->113:5]\nposCursor:[109:29] posNoWhite:[109:28] Found expr:[107:6->109:29]\nposCursor:[109:29] posNoWhite:[109:28] Found expr:[108:6->109:29]\nposCursor:[109:29] posNoWhite:[109:28] Found expr:[109:9->109:29]\nCompletable: Cpath Value[thisGetsBrokenLoc]->a <<jsx>>\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[thisGetsBrokenLoc]->a <<jsx>>\nContextPath Value[thisGetsBrokenLoc]\nPath thisGetsBrokenLoc\nCPPipe pathFromEnv:ReactEvent.Mouse found:false\nPath ReactEvent.Mouse.a\n[{\n    \"label\": \"ReactEvent.Mouse.altKey\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"t => bool\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionFunctionArguments.res 111:27\nposCursor:[111:27] posNoWhite:[111:26] Found expr:[105:3->114:4]\nJSX <div:[105:3->105:6] onMouseDown[106:4->106:15]=...[106:35->113:5]> _children:114:2\nposCursor:[111:27] posNoWhite:[111:26] Found expr:[106:35->113:5]\nposCursor:[111:27] posNoWhite:[111:26] Found expr:[106:16->113:5]\nposCursor:[111:27] posNoWhite:[111:26] Found expr:[107:6->111:27]\nposCursor:[111:27] posNoWhite:[111:26] Found expr:[108:6->111:27]\nposCursor:[111:27] posNoWhite:[111:26] Found expr:[111:9->111:27]\nCompletable: Cpath Value[reassignedWorks]->a <<jsx>>\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[reassignedWorks]->a <<jsx>>\nContextPath Value[reassignedWorks]\nPath reassignedWorks\nCPPipe pathFromEnv:ReactEvent.Mouse found:false\nPath ReactEvent.Mouse.a\n[{\n    \"label\": \"ReactEvent.Mouse.altKey\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"t => bool\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionFunctionArguments.res 121:57\nposCursor:[121:57] posNoWhite:[121:56] Found expr:[121:3->121:73]\nPexp_apply ...[121:3->121:11] (~changefreq121:13->121:23=...[121:24->121:31], ~lastmod121:34->121:41=...[121:42->0:-1], ~priority121:60->121:68=...[121:69->121:72])\nposCursor:[121:57] posNoWhite:[121:56] Found expr:[121:42->0:-1]\nposCursor:[121:57] posNoWhite:[121:56] Found expr:[121:42->0:-1]\nCompletable: Cpath Value[fineModuleVal]->\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[fineModuleVal]->\nContextPath Value[fineModuleVal]\nPath fineModuleVal\nCPPipe pathFromEnv:FineModule found:true\nPath FineModule.\n[{\n    \"label\": \"FineModule.setToFalse\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"t => t\",\n    \"documentation\": null\n  }]\n\n"
  },
  {
    "path": "analysis/tests/src/expected/CompletionInferValues.res.txt",
    "content": "Complete src/CompletionInferValues.res 15:43\nposCursor:[15:43] posNoWhite:[15:42] Found expr:[15:33->15:43]\nCompletable: Cpath Value[aliased]->t\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[aliased]->t\nContextPath Value[aliased]\nPath aliased\nContextPath Value[x]\nPath x\nContextPath int\nPath Belt.Int.t\n[{\n    \"label\": \"Belt.Int.toString\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"int => string\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\nConverts a given `int` to a `string`. Uses the JavaScript `String` constructor under the hood.\\n\\n## Examples\\n\\n```rescript\\nJs.log(Belt.Int.toString(1) === \\\"1\\\") /* true */\\n```\\n\"}\n  }, {\n    \"label\": \"Belt.Int.toFloat\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"int => float\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\nConverts a given `int` to a `float`.\\n\\n## Examples\\n\\n```rescript\\nJs.log(Belt.Int.toFloat(1) === 1.0) /* true */\\n```\\n\"}\n  }]\n\nComplete src/CompletionInferValues.res 18:30\nposCursor:[18:30] posNoWhite:[18:29] Found expr:[18:28->18:30]\nPexp_field [18:28->18:29] _:[33:0->18:30]\nCompletable: Cpath Value[x].\"\"\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[x].\"\"\nContextPath Value[x]\nPath x\nContextPath Value[getSomeRecord](Nolabel)\nContextPath Value[getSomeRecord]\nPath getSomeRecord\nContextPath Value[x]->\nContextPath Value[x]\nPath x\nContextPath Value[getSomeRecord](Nolabel)\nContextPath Value[getSomeRecord]\nPath getSomeRecord\nCPPipe pathFromEnv: found:true\nPath CompletionInferValues.\n[{\n    \"label\": \"name\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nname: string\\n```\\n\\n```rescript\\ntype someRecord = {name: string, age: int}\\n```\"}\n  }, {\n    \"label\": \"age\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nage: int\\n```\\n\\n```rescript\\ntype someRecord = {name: string, age: int}\\n```\"}\n  }]\n\nComplete src/CompletionInferValues.res 21:53\nposCursor:[21:53] posNoWhite:[21:52] Found expr:[21:45->21:53]\nPexp_field [21:45->21:52] _:[33:0->21:53]\nCompletable: Cpath Value[aliased].\"\"\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[aliased].\"\"\nContextPath Value[aliased]\nPath aliased\nContextPath Value[x]\nPath x\nContextPath Value[getSomeRecord](Nolabel)\nContextPath Value[getSomeRecord]\nPath getSomeRecord\nContextPath Value[aliased]->\nContextPath Value[aliased]\nPath aliased\nContextPath Value[x]\nPath x\nContextPath Value[getSomeRecord](Nolabel)\nContextPath Value[getSomeRecord]\nPath getSomeRecord\nCPPipe pathFromEnv: found:true\nPath CompletionInferValues.\n[{\n    \"label\": \"name\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nname: string\\n```\\n\\n```rescript\\ntype someRecord = {name: string, age: int}\\n```\"}\n  }, {\n    \"label\": \"age\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nage: int\\n```\\n\\n```rescript\\ntype someRecord = {name: string, age: int}\\n```\"}\n  }]\n\nComplete src/CompletionInferValues.res 24:63\nposCursor:[24:63] posNoWhite:[24:62] Found expr:[24:3->24:64]\nPexp_apply ...[24:3->24:21] (...[24:22->24:63])\nposCursor:[24:63] posNoWhite:[24:62] Found expr:[24:22->24:63]\nposCursor:[24:63] posNoWhite:[24:62] Found expr:[24:23->24:63]\nposCursor:[24:63] posNoWhite:[24:62] Found expr:[24:36->24:63]\nposCursor:[24:63] posNoWhite:[24:62] Found expr:[24:42->24:63]\nposCursor:[24:63] posNoWhite:[24:62] Found expr:[24:52->24:63]\nPexp_field [24:52->24:62] _:[24:63->24:63]\nCompletable: Cpath Value[someRecord].\"\"\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[someRecord].\"\"\nContextPath Value[someRecord]\nPath someRecord\nContextPath CArgument CArgument Value[someFnWithCallback]($0)(~someRecord)\nContextPath CArgument Value[someFnWithCallback]($0)\nContextPath Value[someFnWithCallback]\nPath someFnWithCallback\nContextPath Value[someRecord]->\nContextPath Value[someRecord]\nPath someRecord\nContextPath CArgument CArgument Value[someFnWithCallback]($0)(~someRecord)\nContextPath CArgument Value[someFnWithCallback]($0)\nContextPath Value[someFnWithCallback]\nPath someFnWithCallback\nCPPipe pathFromEnv: found:true\nPath CompletionInferValues.\n[{\n    \"label\": \"name\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nname: string\\n```\\n\\n```rescript\\ntype someRecord = {name: string, age: int}\\n```\"}\n  }, {\n    \"label\": \"age\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nage: int\\n```\\n\\n```rescript\\ntype someRecord = {name: string, age: int}\\n```\"}\n  }]\n\nComplete src/CompletionInferValues.res 27:90\nposCursor:[27:90] posNoWhite:[27:89] Found expr:[27:39->27:91]\nPexp_apply ...[27:39->27:48] (...[27:49->27:90])\nposCursor:[27:90] posNoWhite:[27:89] Found expr:[27:49->27:90]\nposCursor:[27:90] posNoWhite:[27:89] Found expr:[27:50->27:90]\nposCursor:[27:90] posNoWhite:[27:89] Found expr:[27:56->27:90]\nposCursor:[27:90] posNoWhite:[27:89] Found expr:[27:69->27:90]\nposCursor:[27:90] posNoWhite:[27:89] Found expr:[27:79->27:90]\nPexp_field [27:79->27:89] _:[27:90->27:90]\nCompletable: Cpath Value[someRecord].\"\"\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[someRecord].\"\"\nContextPath Value[someRecord]\nPath someRecord\nContextPath CArgument CArgument Value[aliasedFn]($0)(~someRecord)\nContextPath CArgument Value[aliasedFn]($0)\nContextPath Value[aliasedFn]\nPath aliasedFn\nContextPath Value[someFnWithCallback]\nPath someFnWithCallback\nContextPath Value[someRecord]->\nContextPath Value[someRecord]\nPath someRecord\nContextPath CArgument CArgument Value[aliasedFn]($0)(~someRecord)\nContextPath CArgument Value[aliasedFn]($0)\nContextPath Value[aliasedFn]\nPath aliasedFn\nContextPath Value[someFnWithCallback]\nPath someFnWithCallback\nCPPipe pathFromEnv: found:true\nPath CompletionInferValues.\n[{\n    \"label\": \"name\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nname: string\\n```\\n\\n```rescript\\ntype someRecord = {name: string, age: int}\\n```\"}\n  }, {\n    \"label\": \"age\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nage: int\\n```\\n\\n```rescript\\ntype someRecord = {name: string, age: int}\\n```\"}\n  }]\n\nComplete src/CompletionInferValues.res 30:36\nposCursor:[30:36] posNoWhite:[30:35] Found expr:[30:3->30:39]\nPexp_apply ...[30:3->30:15] (...[30:16->30:38])\nposCursor:[30:36] posNoWhite:[30:35] Found expr:[30:16->30:38]\nposCursor:[30:36] posNoWhite:[30:35] Found expr:[30:16->30:38]\nposCursor:[30:36] posNoWhite:[30:35] Found expr:[30:27->30:36]\nCompletable: Cpath Value[event]->pr\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[event]->pr\nContextPath Value[event]\nPath event\nContextPath CArgument CArgument Value[reactEventFn]($0)($0)\nContextPath CArgument Value[reactEventFn]($0)\nContextPath Value[reactEventFn]\nPath reactEventFn\nCPPipe pathFromEnv:ReactEvent.Mouse found:false\nPath ReactEvent.Mouse.pr\n[{\n    \"label\": \"ReactEvent.Mouse.preventDefault\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"t => unit\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionInferValues.res 41:50\nposCursor:[41:50] posNoWhite:[41:49] Found expr:[41:12->41:56]\nJSX <div:[41:12->41:15] onMouseEnter[41:16->41:28]=...[41:36->41:52]> _children:41:54\nposCursor:[41:50] posNoWhite:[41:49] Found expr:[41:36->41:52]\nposCursor:[41:50] posNoWhite:[41:49] Found expr:[41:29->41:52]\nposCursor:[41:50] posNoWhite:[41:49] Found expr:[41:41->41:50]\nCompletable: Cpath Value[event]->pr <<jsx>>\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[event]->pr <<jsx>>\nContextPath Value[event]\nPath event\nContextPath CArgument CJsxPropValue [div] onMouseEnter($0)\nContextPath CJsxPropValue [div] onMouseEnter\nPath ReactDOM.domProps\nPath PervasivesU.JsxDOM.domProps\nCPPipe pathFromEnv:JsxEventU.Mouse found:false\nPath JsxEventU.Mouse.pr\n[{\n    \"label\": \"JsxEventU.Mouse.preventDefault\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"t => unit\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionInferValues.res 44:50\nposCursor:[44:50] posNoWhite:[44:49] Found expr:[44:12->44:56]\nJSX <Div:[44:12->44:15] onMouseEnter[44:16->44:28]=...[44:36->44:52]> _children:44:54\nposCursor:[44:50] posNoWhite:[44:49] Found expr:[44:36->44:52]\nposCursor:[44:50] posNoWhite:[44:49] Found expr:[44:29->44:52]\nposCursor:[44:50] posNoWhite:[44:49] Found expr:[44:41->44:50]\nCompletable: Cpath Value[event]->pr <<jsx>>\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[event]->pr <<jsx>>\nContextPath Value[event]\nPath event\nContextPath CArgument CJsxPropValue [Div] onMouseEnter($0)\nContextPath CJsxPropValue [Div] onMouseEnter\nPath Div.make\nCPPipe pathFromEnv:PervasivesU.JsxEvent.Mouse found:false\nPath PervasivesU.JsxEvent.Mouse.pr\n[{\n    \"label\": \"PervasivesU.JsxEvent.Mouse.preventDefault\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"t => unit\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionInferValues.res 47:87\nposCursor:[47:87] posNoWhite:[47:86] Found expr:[47:12->47:93]\nJSX <div:[47:12->47:15] onMouseEnter[47:16->47:28]=...[47:36->47:89]> _children:47:91\nposCursor:[47:87] posNoWhite:[47:86] Found expr:[47:36->47:89]\nposCursor:[47:87] posNoWhite:[47:86] Found expr:[47:29->47:89]\nposCursor:[47:87] posNoWhite:[47:86] Found expr:[47:41->47:87]\nposCursor:[47:87] posNoWhite:[47:86] Found expr:[47:81->47:87]\nCompletable: Cpath Value[btn]->t <<jsx>>\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[btn]->t <<jsx>>\nContextPath Value[btn]\nPath btn\nContextPath Value[JsxEvent, Mouse, button](Nolabel)\nContextPath Value[JsxEvent, Mouse, button]\nPath JsxEvent.Mouse.button\nPath Belt.Int.t\n[{\n    \"label\": \"Belt.Int.toString\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"int => string\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\nConverts a given `int` to a `string`. Uses the JavaScript `String` constructor under the hood.\\n\\n## Examples\\n\\n```rescript\\nJs.log(Belt.Int.toString(1) === \\\"1\\\") /* true */\\n```\\n\"}\n  }, {\n    \"label\": \"Belt.Int.toFloat\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"int => float\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\nConverts a given `int` to a `float`.\\n\\n## Examples\\n\\n```rescript\\nJs.log(Belt.Int.toFloat(1) === 1.0) /* true */\\n```\\n\"}\n  }]\n\nComplete src/CompletionInferValues.res 50:108\nposCursor:[50:108] posNoWhite:[50:107] Found expr:[50:12->50:114]\nJSX <div:[50:12->50:15] onMouseEnter[50:16->50:28]=...[50:36->50:110]> _children:50:112\nposCursor:[50:108] posNoWhite:[50:107] Found expr:[50:36->50:110]\nposCursor:[50:108] posNoWhite:[50:107] Found expr:[50:29->50:110]\nposCursor:[50:108] posNoWhite:[50:107] Found expr:[50:41->50:108]\nposCursor:[50:108] posNoWhite:[50:107] Found expr:[50:100->50:108]\nCompletable: Cpath Value[btn]->spl <<jsx>>\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[btn]->spl <<jsx>>\nContextPath Value[btn]\nPath btn\nContextPath Value[Belt, Int, toString](Nolabel)\nContextPath Value[Belt, Int, toString]\nPath Belt.Int.toString\nPath Js.String2.spl\n[{\n    \"label\": \"Js.String2.splitAtMost\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t, t, ~limit: int) => array<t>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\n`splitAtMost delimiter ~limit: n str` splits the given `str` at every occurrence of `delimiter` and returns an array of the first `n` resulting substrings. If `n` is negative or greater than the number of substrings, the array will contain all the substrings.\\n\\n```\\nsplitAtMost \\\"ant/bee/cat/dog/elk\\\" \\\"/\\\" ~limit: 3 = [|\\\"ant\\\"; \\\"bee\\\"; \\\"cat\\\"|];;\\nsplitAtMost \\\"ant/bee/cat/dog/elk\\\" \\\"/\\\" ~limit: 0 = [| |];;\\nsplitAtMost \\\"ant/bee/cat/dog/elk\\\" \\\"/\\\" ~limit: 9 = [|\\\"ant\\\"; \\\"bee\\\"; \\\"cat\\\"; \\\"dog\\\"; \\\"elk\\\"|];;\\n```\\n\"}\n  }, {\n    \"label\": \"Js.String2.splitByRe\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t, Js_re.t) => array<option<t>>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\n`splitByRe(str, regex)` splits the given `str` at every occurrence of `regex`\\nand returns an array of the resulting substrings.\\n\\nSee [`String.split`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/split)\\non MDN.\\n\\n## Examples\\n\\n```rescript\\nJs.String2.splitByRe(\\\"art; bed , cog ;dad\\\", %re(\\\"/\\\\s*[,;]\\\\s*TODO/\\\")) == [\\n    Some(\\\"art\\\"),\\n    Some(\\\"bed\\\"),\\n    Some(\\\"cog\\\"),\\n    Some(\\\"dad\\\"),\\n  ]\\n```\\n\"}\n  }, {\n    \"label\": \"Js.String2.split\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t, t) => array<t>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\n`split(str, delimiter)` splits the given `str` at every occurrence of\\n`delimiter` and returns an array of the resulting substrings.\\n\\nSee [`String.split`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/split)\\non MDN.\\n\\n## Examples\\n\\n```rescript\\nJs.String2.split(\\\"2018-01-02\\\", \\\"-\\\") == [\\\"2018\\\", \\\"01\\\", \\\"02\\\"]\\nJs.String2.split(\\\"a,b,,c\\\", \\\",\\\") == [\\\"a\\\", \\\"b\\\", \\\"\\\", \\\"c\\\"]\\nJs.String2.split(\\\"good::bad as great::awful\\\", \\\"::\\\") == [\\\"good\\\", \\\"bad as great\\\", \\\"awful\\\"]\\nJs.String2.split(\\\"has-no-delimiter\\\", \\\";\\\") == [\\\"has-no-delimiter\\\"]\\n```\\n\"}\n  }, {\n    \"label\": \"Js.String2.splitByReAtMost\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t, Js_re.t, ~limit: int) => array<option<t>>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\n`splitByReAtMost(str, regex, ~limit:n)` splits the given `str` at every\\noccurrence of `regex` and returns an array of the first `n` resulting\\nsubstrings. If `n` is negative or greater than the number of substrings, the\\narray will contain all the substrings.\\n\\nSee [`String.split`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/split)\\non MDN.\\n\\n## Examples\\n\\n```rescript\\nJs.String2.splitByReAtMost(\\\"one: two: three: four\\\", %re(\\\"/\\\\s*:\\\\s*TODO/\\\"), ~limit=3) == [\\n    Some(\\\"one\\\"),\\n    Some(\\\"two\\\"),\\n    Some(\\\"three\\\"),\\n  ]\\n\\nJs.String2.splitByReAtMost(\\\"one: two: three: four\\\", %re(\\\"/\\\\s*:\\\\s*TODO/\\\"), ~limit=0) == []\\n\\nJs.String2.splitByReAtMost(\\\"one: two: three: four\\\", %re(\\\"/\\\\s*:\\\\s*TODO/\\\"), ~limit=8) == [\\n    Some(\\\"one\\\"),\\n    Some(\\\"two\\\"),\\n    Some(\\\"three\\\"),\\n    Some(\\\"four\\\"),\\n  ]\\n```\\n\"}\n  }]\n\nComplete src/CompletionInferValues.res 53:130\nposCursor:[53:130] posNoWhite:[53:129] Found expr:[53:12->53:136]\nJSX <div:[53:12->53:15] onMouseEnter[53:16->53:28]=...[53:36->53:132]> _children:53:134\nposCursor:[53:130] posNoWhite:[53:129] Found expr:[53:36->53:132]\nposCursor:[53:130] posNoWhite:[53:129] Found expr:[53:29->53:132]\nposCursor:[53:130] posNoWhite:[53:129] Found expr:[53:41->53:130]\nposCursor:[53:130] posNoWhite:[53:129] Found expr:[53:123->53:130]\nCompletable: Cpath Value[btn]->ma <<jsx>>\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[btn]->ma <<jsx>>\nContextPath Value[btn]\nPath btn\nContextPath Value[Js, String2, split](Nolabel, Nolabel)\nContextPath Value[Js, String2, split]\nPath Js.String2.split\nPath Js.Array2.ma\n[{\n    \"label\": \"Js.Array2.mapi\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t<'a>, ('a, int) => 'b) => t<'b>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\nApplies the function (the second argument) to each item in the array, returning\\na new array. The function acceps two arguments: an item from the array and its\\nindex number. The result array does not have to have elements of the same type\\nas the input array. See\\n[`Array.map`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/map)\\non MDN.\\n\\n## Examples\\n\\n```rescript\\n// multiply each item in array by its position\\nlet product = (item, index) => item * index\\nJs.Array2.mapi([10, 11, 12], product) == [0, 11, 24]\\n```\\n\"}\n  }, {\n    \"label\": \"Js.Array2.map\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t<'a>, 'a => 'b) => t<'b>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\nApplies the function (the second argument) to each item in the array, returning\\na new array. The result array does not have to have elements of the same type\\nas the input array. See\\n[`Array.map`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/map)\\non MDN.\\n\\n## Examples\\n\\n```rescript\\nJs.Array2.map([12, 4, 8], x => x * x) == [144, 16, 64]\\nJs.Array2.map([\\\"animal\\\", \\\"vegetable\\\", \\\"mineral\\\"], Js.String.length) == [6, 9, 7]\\n```\\n\"}\n  }]\n\nComplete src/CompletionInferValues.res 75:78\nposCursor:[75:78] posNoWhite:[75:77] Found expr:[75:70->75:78]\nPexp_field [75:70->75:77] _:[118:0->75:78]\nCompletable: Cpath Value[srecord].\"\"\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[srecord].\"\"\nContextPath Value[srecord]\nPath srecord\nContextPath CPatternPath(Value[x])->recordField(srecord)\nContextPath Value[x]\nPath x\nContextPath Type[someRecordWithNestedStuff]\nPath someRecordWithNestedStuff\nContextPath Value[srecord]->\nContextPath Value[srecord]\nPath srecord\nContextPath CPatternPath(Value[x])->recordField(srecord)\nContextPath Value[x]\nPath x\nContextPath Type[someRecordWithNestedStuff]\nPath someRecordWithNestedStuff\nCPPipe pathFromEnv: found:true\nPath CompletionInferValues.\n[{\n    \"label\": \"name\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nname: string\\n```\\n\\n```rescript\\ntype someRecord = {name: string, age: int}\\n```\"}\n  }, {\n    \"label\": \"age\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nage: int\\n```\\n\\n```rescript\\ntype someRecord = {name: string, age: int}\\n```\"}\n  }]\n\nComplete src/CompletionInferValues.res 79:86\nposCursor:[79:86] posNoWhite:[79:85] Found expr:[79:78->79:86]\nPexp_field [79:78->79:85] _:[118:0->79:86]\nCompletable: Cpath Value[aliased].\"\"\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[aliased].\"\"\nContextPath Value[aliased]\nPath aliased\nContextPath CPatternPath(Value[x])->recordField(nested)\nContextPath Value[x]\nPath x\nContextPath Type[someRecordWithNestedStuff]\nPath someRecordWithNestedStuff\nContextPath Value[aliased]->\nContextPath Value[aliased]\nPath aliased\nContextPath CPatternPath(Value[x])->recordField(nested)\nContextPath Value[x]\nPath x\nContextPath Type[someRecordWithNestedStuff]\nPath someRecordWithNestedStuff\nCPPipe pathFromEnv: found:true\nPath CompletionInferValues.\n[{\n    \"label\": \"someRecord\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"someRecord\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nsomeRecord: someRecord\\n```\\n\\n```rescript\\ntype someNestedRecord = {someRecord: someRecord}\\n```\"}\n  }]\n\nComplete src/CompletionInferValues.res 83:103\nposCursor:[83:103] posNoWhite:[83:102] Found expr:[83:92->83:103]\nPexp_field [83:92->83:102] _:[118:0->83:103]\nCompletable: Cpath Value[someRecord].\"\"\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[someRecord].\"\"\nContextPath Value[someRecord]\nPath someRecord\nContextPath CPatternPath(Value[x])->recordField(nested)->recordField(someRecord)\nContextPath Value[x]\nPath x\nContextPath Type[someRecordWithNestedStuff]\nPath someRecordWithNestedStuff\nContextPath Value[someRecord]->\nContextPath Value[someRecord]\nPath someRecord\nContextPath CPatternPath(Value[x])->recordField(nested)->recordField(someRecord)\nContextPath Value[x]\nPath x\nContextPath Type[someRecordWithNestedStuff]\nPath someRecordWithNestedStuff\nCPPipe pathFromEnv: found:true\nPath CompletionInferValues.\n[{\n    \"label\": \"name\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nname: string\\n```\\n\\n```rescript\\ntype someRecord = {name: string, age: int}\\n```\"}\n  }, {\n    \"label\": \"age\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nage: int\\n```\\n\\n```rescript\\ntype someRecord = {name: string, age: int}\\n```\"}\n  }]\n\nComplete src/CompletionInferValues.res 87:81\nposCursor:[87:81] posNoWhite:[87:80] Found expr:[87:69->87:81]\nCompletable: Cpath Value[things]->slic\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[things]->slic\nContextPath Value[things]\nPath things\nContextPath CPatternPath(Value[x])->recordField(things)\nContextPath Value[x]\nPath x\nContextPath Type[someRecordWithNestedStuff]\nPath someRecordWithNestedStuff\nPath Js.String2.slic\n[{\n    \"label\": \"Js.String2.sliceToEnd\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t, ~from: int) => t\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\n`sliceToEnd(str, from:n)` returns the substring of `str` starting at character\\n`n` to the end of the string.\\n- If `n` is negative, then it is evaluated as `length(str - n)`.\\n- If `n` is greater than the length of `str`, then sliceToEnd returns the empty string.\\n\\nSee [`String.slice`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/slice) on MDN.\\n\\n## Examples\\n\\n```rescript\\nJs.String2.sliceToEnd(\\\"abcdefg\\\", ~from=4) == \\\"efg\\\"\\nJs.String2.sliceToEnd(\\\"abcdefg\\\", ~from=-2) == \\\"fg\\\"\\nJs.String2.sliceToEnd(\\\"abcdefg\\\", ~from=7) == \\\"\\\"\\n```\\n\"}\n  }, {\n    \"label\": \"Js.String2.slice\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t, ~from: int, ~to_: int) => t\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\n`slice(str, from:n1, to_:n2)` returns the substring of `str` starting at\\ncharacter `n1` up to but not including `n2`.\\n- If either `n1` or `n2` is negative, then it is evaluated as `length(str - n1)` or `length(str - n2)`.\\n- If `n2` is greater than the length of `str`, then it is treated as `length(str)`.\\n- If `n1` is greater than `n2`, slice returns the empty string.\\n\\nSee [`String.slice`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/slice) on MDN.\\n\\n## Examples\\n\\n```rescript\\nJs.String2.slice(\\\"abcdefg\\\", ~from=2, ~to_=5) == \\\"cde\\\"\\nJs.String2.slice(\\\"abcdefg\\\", ~from=2, ~to_=9) == \\\"cdefg\\\"\\nJs.String2.slice(\\\"abcdefg\\\", ~from=-4, ~to_=-2) == \\\"de\\\"\\nJs.String2.slice(\\\"abcdefg\\\", ~from=5, ~to_=1) == \\\"\\\"\\n```\\n\"}\n  }]\n\nComplete src/CompletionInferValues.res 91:82\nposCursor:[91:82] posNoWhite:[91:81] Found expr:[91:70->91:82]\nCompletable: Cpath Value[someInt]->toS\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[someInt]->toS\nContextPath Value[someInt]\nPath someInt\nContextPath CPatternPath(Value[x])->recordField(someInt)\nContextPath Value[x]\nPath x\nContextPath Type[someRecordWithNestedStuff]\nPath someRecordWithNestedStuff\nPath Belt.Int.toS\n[{\n    \"label\": \"Belt.Int.toString\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"int => string\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\nConverts a given `int` to a `string`. Uses the JavaScript `String` constructor under the hood.\\n\\n## Examples\\n\\n```rescript\\nJs.log(Belt.Int.toString(1) === \\\"1\\\") /* true */\\n```\\n\"}\n  }]\n\nComplete src/CompletionInferValues.res 95:109\nposCursor:[95:109] posNoWhite:[95:108] Found expr:[95:97->95:109]\nCompletable: Cpath Value[someInt]->toS\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[someInt]->toS\nContextPath Value[someInt]\nPath someInt\nContextPath CPatternPath(Value[someTuple])->tuple($1)\nContextPath Value[someTuple]\nPath someTuple\nContextPath CPatternPath(Value[x])->recordField(someTuple)\nContextPath Value[x]\nPath x\nContextPath Type[otherNestedRecord]\nPath otherNestedRecord\nPath Belt.Int.toS\n[{\n    \"label\": \"Belt.Int.toString\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"int => string\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\nConverts a given `int` to a `string`. Uses the JavaScript `String` constructor under the hood.\\n\\n## Examples\\n\\n```rescript\\nJs.log(Belt.Int.toString(1) === \\\"1\\\") /* true */\\n```\\n\"}\n  }]\n\nComplete src/CompletionInferValues.res 99:102\nposCursor:[99:102] posNoWhite:[99:101] Found expr:[99:57->99:102]\nposCursor:[99:102] posNoWhite:[99:101] Found expr:[99:90->99:102]\nCompletable: Cpath Value[someInt]->toS\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[someInt]->toS\nContextPath Value[someInt]\nPath someInt\nContextPath CPatternPath(Value[someTuple])->tuple($1)\nContextPath Value[someTuple]\nPath someTuple\nContextPath CPatternPath(Value[x])->recordField(someTuple)\nContextPath Value[x]\nPath x\nContextPath Type[otherNestedRecord]\nPath otherNestedRecord\nPath Belt.Int.toS\n[{\n    \"label\": \"Belt.Int.toString\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"int => string\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\nConverts a given `int` to a `string`. Uses the JavaScript `String` constructor under the hood.\\n\\n## Examples\\n\\n```rescript\\nJs.log(Belt.Int.toString(1) === \\\"1\\\") /* true */\\n```\\n\"}\n  }]\n\nComplete src/CompletionInferValues.res 103:88\nposCursor:[103:88] posNoWhite:[103:87] Found expr:[103:79->103:88]\nCompletable: Cpath Value[str]->slic\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[str]->slic\nContextPath Value[str]\nPath str\nContextPath CPatternPath(Value[x])->recordField(someTuple)->tuple($0)->variantPayload::Three($1)\nContextPath Value[x]\nPath x\nContextPath Type[otherNestedRecord]\nPath otherNestedRecord\nPath Js.String2.slic\n[{\n    \"label\": \"Js.String2.sliceToEnd\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t, ~from: int) => t\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\n`sliceToEnd(str, from:n)` returns the substring of `str` starting at character\\n`n` to the end of the string.\\n- If `n` is negative, then it is evaluated as `length(str - n)`.\\n- If `n` is greater than the length of `str`, then sliceToEnd returns the empty string.\\n\\nSee [`String.slice`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/slice) on MDN.\\n\\n## Examples\\n\\n```rescript\\nJs.String2.sliceToEnd(\\\"abcdefg\\\", ~from=4) == \\\"efg\\\"\\nJs.String2.sliceToEnd(\\\"abcdefg\\\", ~from=-2) == \\\"fg\\\"\\nJs.String2.sliceToEnd(\\\"abcdefg\\\", ~from=7) == \\\"\\\"\\n```\\n\"}\n  }, {\n    \"label\": \"Js.String2.slice\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t, ~from: int, ~to_: int) => t\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\n`slice(str, from:n1, to_:n2)` returns the substring of `str` starting at\\ncharacter `n1` up to but not including `n2`.\\n- If either `n1` or `n2` is negative, then it is evaluated as `length(str - n1)` or `length(str - n2)`.\\n- If `n2` is greater than the length of `str`, then it is treated as `length(str)`.\\n- If `n1` is greater than `n2`, slice returns the empty string.\\n\\nSee [`String.slice`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/slice) on MDN.\\n\\n## Examples\\n\\n```rescript\\nJs.String2.slice(\\\"abcdefg\\\", ~from=2, ~to_=5) == \\\"cde\\\"\\nJs.String2.slice(\\\"abcdefg\\\", ~from=2, ~to_=9) == \\\"cdefg\\\"\\nJs.String2.slice(\\\"abcdefg\\\", ~from=-4, ~to_=-2) == \\\"de\\\"\\nJs.String2.slice(\\\"abcdefg\\\", ~from=5, ~to_=1) == \\\"\\\"\\n```\\n\"}\n  }]\n\nComplete src/CompletionInferValues.res 107:89\nposCursor:[107:89] posNoWhite:[107:88] Found expr:[107:80->107:89]\nCompletable: Cpath Value[str]->slic\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[str]->slic\nContextPath Value[str]\nPath str\nContextPath CPatternPath(Value[x])->recordField(someTuple)->tuple($2)->polyvariantPayload::three($1)\nContextPath Value[x]\nPath x\nContextPath Type[otherNestedRecord]\nPath otherNestedRecord\nPath Js.String2.slic\n[{\n    \"label\": \"Js.String2.sliceToEnd\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t, ~from: int) => t\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\n`sliceToEnd(str, from:n)` returns the substring of `str` starting at character\\n`n` to the end of the string.\\n- If `n` is negative, then it is evaluated as `length(str - n)`.\\n- If `n` is greater than the length of `str`, then sliceToEnd returns the empty string.\\n\\nSee [`String.slice`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/slice) on MDN.\\n\\n## Examples\\n\\n```rescript\\nJs.String2.sliceToEnd(\\\"abcdefg\\\", ~from=4) == \\\"efg\\\"\\nJs.String2.sliceToEnd(\\\"abcdefg\\\", ~from=-2) == \\\"fg\\\"\\nJs.String2.sliceToEnd(\\\"abcdefg\\\", ~from=7) == \\\"\\\"\\n```\\n\"}\n  }, {\n    \"label\": \"Js.String2.slice\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t, ~from: int, ~to_: int) => t\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\n`slice(str, from:n1, to_:n2)` returns the substring of `str` starting at\\ncharacter `n1` up to but not including `n2`.\\n- If either `n1` or `n2` is negative, then it is evaluated as `length(str - n1)` or `length(str - n2)`.\\n- If `n2` is greater than the length of `str`, then it is treated as `length(str)`.\\n- If `n1` is greater than `n2`, slice returns the empty string.\\n\\nSee [`String.slice`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/slice) on MDN.\\n\\n## Examples\\n\\n```rescript\\nJs.String2.slice(\\\"abcdefg\\\", ~from=2, ~to_=5) == \\\"cde\\\"\\nJs.String2.slice(\\\"abcdefg\\\", ~from=2, ~to_=9) == \\\"cdefg\\\"\\nJs.String2.slice(\\\"abcdefg\\\", ~from=-4, ~to_=-2) == \\\"de\\\"\\nJs.String2.slice(\\\"abcdefg\\\", ~from=5, ~to_=1) == \\\"\\\"\\n```\\n\"}\n  }]\n\nComplete src/CompletionInferValues.res 111:80\nposCursor:[111:80] posNoWhite:[111:79] Found expr:[111:70->111:80]\nCompletable: Cpath Value[name]->slic\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[name]->slic\nContextPath Value[name]\nPath name\nContextPath CPatternPath(Value[x])->recordField(optRecord)->variantPayload::Some($0)->recordField(name)\nContextPath Value[x]\nPath x\nContextPath Type[otherNestedRecord]\nPath otherNestedRecord\nPath Js.String2.slic\n[{\n    \"label\": \"Js.String2.sliceToEnd\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t, ~from: int) => t\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\n`sliceToEnd(str, from:n)` returns the substring of `str` starting at character\\n`n` to the end of the string.\\n- If `n` is negative, then it is evaluated as `length(str - n)`.\\n- If `n` is greater than the length of `str`, then sliceToEnd returns the empty string.\\n\\nSee [`String.slice`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/slice) on MDN.\\n\\n## Examples\\n\\n```rescript\\nJs.String2.sliceToEnd(\\\"abcdefg\\\", ~from=4) == \\\"efg\\\"\\nJs.String2.sliceToEnd(\\\"abcdefg\\\", ~from=-2) == \\\"fg\\\"\\nJs.String2.sliceToEnd(\\\"abcdefg\\\", ~from=7) == \\\"\\\"\\n```\\n\"}\n  }, {\n    \"label\": \"Js.String2.slice\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t, ~from: int, ~to_: int) => t\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\n`slice(str, from:n1, to_:n2)` returns the substring of `str` starting at\\ncharacter `n1` up to but not including `n2`.\\n- If either `n1` or `n2` is negative, then it is evaluated as `length(str - n1)` or `length(str - n2)`.\\n- If `n2` is greater than the length of `str`, then it is treated as `length(str)`.\\n- If `n1` is greater than `n2`, slice returns the empty string.\\n\\nSee [`String.slice`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/slice) on MDN.\\n\\n## Examples\\n\\n```rescript\\nJs.String2.slice(\\\"abcdefg\\\", ~from=2, ~to_=5) == \\\"cde\\\"\\nJs.String2.slice(\\\"abcdefg\\\", ~from=2, ~to_=9) == \\\"cdefg\\\"\\nJs.String2.slice(\\\"abcdefg\\\", ~from=-4, ~to_=-2) == \\\"de\\\"\\nJs.String2.slice(\\\"abcdefg\\\", ~from=5, ~to_=1) == \\\"\\\"\\n```\\n\"}\n  }]\n\nComplete src/CompletionInferValues.res 115:53\nposCursor:[115:53] posNoWhite:[115:52] Found expr:[115:46->115:53]\nCompletable: Cpath Value[v]->toSt\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[v]->toSt\nContextPath Value[v]\nPath v\nContextPath Value[x]\nPath x\nContextPath int\nPath Belt.Int.toSt\n[{\n    \"label\": \"Belt.Int.toString\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"int => string\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\nConverts a given `int` to a `string`. Uses the JavaScript `String` constructor under the hood.\\n\\n## Examples\\n\\n```rescript\\nJs.log(Belt.Int.toString(1) === \\\"1\\\") /* true */\\n```\\n\"}\n  }]\n\nComplete src/CompletionInferValues.res 123:26\nposCursor:[123:26] posNoWhite:[123:25] Found expr:[123:3->123:37]\nPexp_apply ...[123:3->123:23] (...[123:24->123:36])\nposCursor:[123:26] posNoWhite:[123:25] Found expr:[123:24->123:36]\nposCursor:[123:26] posNoWhite:[123:25] Found expr:[123:25->123:36]\nposCursor:[123:26] posNoWhite:[123:25] Found pattern:[123:25->123:27]\nposCursor:[123:26] posNoWhite:[123:25] Found pattern:[123:25->123:27]\nCompletable: Cpattern CArgument CArgument Value[fnWithRecordCallback]($0)($0)->recordBody\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument CArgument Value[fnWithRecordCallback]($0)($0)\nContextPath CArgument Value[fnWithRecordCallback]($0)\nContextPath Value[fnWithRecordCallback]\nPath fnWithRecordCallback\n[{\n    \"label\": \"name\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nname: string\\n```\\n\\n```rescript\\ntype someRecord = {name: string, age: int}\\n```\"}\n  }, {\n    \"label\": \"age\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nage: int\\n```\\n\\n```rescript\\ntype someRecord = {name: string, age: int}\\n```\"}\n  }]\n\nComplete src/CompletionInferValues.res 130:30\nposCursor:[130:30] posNoWhite:[130:29] Found expr:[130:3->130:33]\nPexp_apply ...[130:3->130:6] (~cb130:8->130:10=...[130:11->130:32])\nposCursor:[130:30] posNoWhite:[130:29] Found expr:[130:11->130:32]\nposCursor:[130:30] posNoWhite:[130:29] Found expr:[130:12->130:32]\nposCursor:[130:30] posNoWhite:[130:29] Found expr:[130:24->0:-1]\nposCursor:[130:30] posNoWhite:[130:29] Found expr:[130:24->0:-1]\nCompletable: Cpath Value[root]->\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[root]->\nContextPath Value[root]\nPath root\nContextPath CPatternPath(CArgument CArgument Value[fn2](~cb)($0))->recordField(root)\nContextPath CArgument CArgument Value[fn2](~cb)($0)\nContextPath CArgument Value[fn2](~cb)\nContextPath Value[fn2]\nPath fn2\nCPPipe pathFromEnv:ReactDOM.Client.Root found:false\nPath ReactDOM.Client.Root.\n[{\n    \"label\": \"ReactDOM.Client.Root.unmount\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t, unit) => unit\",\n    \"documentation\": null\n  }, {\n    \"label\": \"ReactDOM.Client.Root.render\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t, React.element) => unit\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionInferValues.res 139:30\nposCursor:[139:30] posNoWhite:[139:29] Found expr:[139:3->139:33]\nPexp_apply ...[139:3->139:6] (~cb139:8->139:10=...[139:11->139:32])\nposCursor:[139:30] posNoWhite:[139:29] Found expr:[139:11->139:32]\nposCursor:[139:30] posNoWhite:[139:29] Found expr:[139:12->139:32]\nposCursor:[139:30] posNoWhite:[139:29] Found expr:[139:24->0:-1]\nposCursor:[139:30] posNoWhite:[139:29] Found expr:[139:24->0:-1]\nCompletable: Cpath Value[root]->\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[root]->\nContextPath Value[root]\nPath root\nContextPath CPatternPath(CArgument CArgument Value[fn3](~cb)($0))->recordField(root)\nContextPath CArgument CArgument Value[fn3](~cb)($0)\nContextPath CArgument Value[fn3](~cb)\nContextPath Value[fn3]\nPath fn3\nCPPipe pathFromEnv:CompletionSupport.Test found:false\nPath CompletionSupport.Test.\n[{\n    \"label\": \"CompletionSupport.Test.add\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"t => int\",\n    \"documentation\": null\n  }, {\n    \"label\": \"CompletionSupport.Test.addSelf\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"t => t\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionInferValues.res 143:47\nXXX Not found!\nCompletable: Cpattern Value[Belt, Int, toString](Nolabel)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[Belt, Int, toString](Nolabel)\nContextPath Value[Belt, Int, toString]\nPath Belt.Int.toString\n[{\n    \"label\": \"\\\"\\\"\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": null,\n    \"sortText\": \"A\",\n    \"insertText\": \"\\\"$0\\\"\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionInferValues.res 147:70\nXXX Not found!\nCompletable: Cpattern Value[Js, String2, split](Nolabel, Nolabel)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[Js, String2, split](Nolabel, Nolabel)\nContextPath Value[Js, String2, split]\nPath Js.String2.split\n[{\n    \"label\": \"[]\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"t\",\n    \"documentation\": null,\n    \"sortText\": \"A\",\n    \"insertText\": \"[$0]\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionInferValues.res 151:105\nposCursor:[151:105] posNoWhite:[151:104] Found expr:[151:18->151:110]\nPexp_apply ...[151:18->151:49] (~prepare151:51->151:58=...[151:59->151:72], ~render151:74->151:80=...[151:81->151:106], ...[151:107->151:109])\nposCursor:[151:105] posNoWhite:[151:104] Found expr:[151:81->151:106]\nposCursor:[151:105] posNoWhite:[151:104] Found expr:[151:82->151:106]\nposCursor:[151:105] posNoWhite:[151:104] Found expr:[151:97->151:105]\nPexp_field [151:97->151:104] _:[151:105->151:105]\nCompletable: Cpath Value[support].\"\"\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[support].\"\"\nContextPath Value[support]\nPath support\nContextPath CPatternPath(CArgument CArgument Value[CompletionSupport2, makeRenderer](~render)($0))->recordField(support)\nContextPath CArgument CArgument Value[CompletionSupport2, makeRenderer](~render)($0)\nContextPath CArgument Value[CompletionSupport2, makeRenderer](~render)\nContextPath Value[CompletionSupport2, makeRenderer]\nPath CompletionSupport2.makeRenderer\nContextPath Value[support]->\nContextPath Value[support]\nPath support\nContextPath CPatternPath(CArgument CArgument Value[CompletionSupport2, makeRenderer](~render)($0))->recordField(support)\nContextPath CArgument CArgument Value[CompletionSupport2, makeRenderer](~render)($0)\nContextPath CArgument Value[CompletionSupport2, makeRenderer](~render)\nContextPath Value[CompletionSupport2, makeRenderer]\nPath CompletionSupport2.makeRenderer\nCPPipe pathFromEnv:CompletionSupport.Nested found:false\nPath CompletionSupport.Nested.\n[{\n    \"label\": \"root\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"ReactDOM.Client.Root.t\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nroot: ReactDOM.Client.Root.t\\n```\\n\\n```rescript\\ntype config = {root: ReactDOM.Client.Root.t}\\n```\"}\n  }]\n\nComplete src/CompletionInferValues.res 155:110\nposCursor:[155:110] posNoWhite:[155:109] Found expr:[155:18->155:115]\nPexp_apply ...[155:18->155:49] (~prepare155:51->155:58=...[155:59->155:72], ~render155:74->155:80=...[155:81->155:111], ...[155:112->155:114])\nposCursor:[155:110] posNoWhite:[155:109] Found expr:[155:81->155:111]\nposCursor:[155:110] posNoWhite:[155:109] Found expr:[155:82->155:111]\nposCursor:[155:110] posNoWhite:[155:109] Found expr:[155:104->0:-1]\nposCursor:[155:110] posNoWhite:[155:109] Found expr:[155:104->0:-1]\nCompletable: Cpath Value[root]->\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[root]->\nContextPath Value[root]\nPath root\nContextPath CPatternPath(CArgument CArgument Value[CompletionSupport2, makeRenderer](~render)($0))->recordField(support)->recordField(root)\nContextPath CArgument CArgument Value[CompletionSupport2, makeRenderer](~render)($0)\nContextPath CArgument Value[CompletionSupport2, makeRenderer](~render)\nContextPath Value[CompletionSupport2, makeRenderer]\nPath CompletionSupport2.makeRenderer\nCPPipe pathFromEnv:ReactDOM.Client.Root found:false\nPath ReactDOM.Client.Root.\n[{\n    \"label\": \"ReactDOM.Client.Root.unmount\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t, unit) => unit\",\n    \"documentation\": null\n  }, {\n    \"label\": \"ReactDOM.Client.Root.render\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t, React.element) => unit\",\n    \"documentation\": null\n  }]\n\nHover src/CompletionInferValues.res 160:27\nNothing at that position. Now trying to use completion.\nposCursor:[160:27] posNoWhite:[160:26] Found expr:[160:25->160:28]\nPexp_ident res:[160:25->160:28]\nCompletable: Cpath Value[res]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[res]\nPath res\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[res]\nPath res\n{\"contents\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nint\\n```\"}}\n\n"
  },
  {
    "path": "analysis/tests/src/expected/CompletionJsx.res.txt",
    "content": "Complete src/CompletionJsx.res 3:17\nposCursor:[3:17] posNoWhite:[3:16] Found expr:[3:3->3:17]\nCompletable: Cpath Value[someString]->st\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[someString]->st\nContextPath Value[someString]\nPath someString\nPath Js.String2.st\n[{\n    \"label\": \"Js.String2.startsWith\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t, t) => bool\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\nES2015: `startsWith(str, substr)` returns `true` if the `str` starts with\\n`substr`, `false` otherwise.\\n\\nSee [`String.startsWith`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith)\\non MDN.\\n\\n## Examples\\n\\n```rescript\\nJs.String2.startsWith(\\\"ReScript\\\", \\\"Re\\\") == true\\nJs.String2.startsWith(\\\"ReScript\\\", \\\"\\\") == true\\nJs.String2.startsWith(\\\"JavaScript\\\", \\\"Re\\\") == false\\n```\\n\"}\n  }, {\n    \"label\": \"Js.String2.startsWithFrom\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t, t, int) => bool\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\nES2015: `startsWithFrom(str, substr, n)` returns `true` if the `str` starts\\nwith `substr` starting at position `n`, false otherwise. If `n` is negative,\\nthe search starts at the beginning of `str`.\\n\\nSee [`String.startsWith`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith)\\non MDN.\\n\\n## Examples\\n\\n```rescript\\nJs.String2.startsWithFrom(\\\"ReScript\\\", \\\"Scri\\\", 2) == true\\nJs.String2.startsWithFrom(\\\"ReScript\\\", \\\"\\\", 2) == true\\nJs.String2.startsWithFrom(\\\"JavaScript\\\", \\\"Scri\\\", 2) == false\\n```\\n\"}\n  }]\n\nComplete src/CompletionJsx.res 13:21\nposCursor:[13:21] posNoWhite:[13:20] Found expr:[8:13->33:3]\nposCursor:[13:21] posNoWhite:[13:20] Found expr:[8:14->33:3]\nposCursor:[13:21] posNoWhite:[13:20] Found expr:[9:4->32:10]\nposCursor:[13:21] posNoWhite:[13:20] Found expr:[10:4->32:10]\nposCursor:[13:21] posNoWhite:[13:20] Found expr:[11:4->32:10]\nposCursor:[13:21] posNoWhite:[13:20] Found expr:[12:4->32:10]\nposCursor:[13:21] posNoWhite:[13:20] Found expr:[13:7->32:10]\nposCursor:[13:21] posNoWhite:[13:20] Found expr:[13:7->13:21]\nCompletable: Cpath Value[someString]->st <<jsx>>\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[someString]->st <<jsx>>\nContextPath Value[someString]\nPath someString\nPath Js.String2.st\n[{\n    \"label\": \"React.string\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"Turns `string` into a JSX element so it can be used inside of JSX.\"},\n    \"sortText\": \"A\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Js.String2.startsWith\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t, t) => bool\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\nES2015: `startsWith(str, substr)` returns `true` if the `str` starts with\\n`substr`, `false` otherwise.\\n\\nSee [`String.startsWith`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith)\\non MDN.\\n\\n## Examples\\n\\n```rescript\\nJs.String2.startsWith(\\\"ReScript\\\", \\\"Re\\\") == true\\nJs.String2.startsWith(\\\"ReScript\\\", \\\"\\\") == true\\nJs.String2.startsWith(\\\"JavaScript\\\", \\\"Re\\\") == false\\n```\\n\"}\n  }, {\n    \"label\": \"Js.String2.startsWithFrom\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t, t, int) => bool\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\nES2015: `startsWithFrom(str, substr, n)` returns `true` if the `str` starts\\nwith `substr` starting at position `n`, false otherwise. If `n` is negative,\\nthe search starts at the beginning of `str`.\\n\\nSee [`String.startsWith`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith)\\non MDN.\\n\\n## Examples\\n\\n```rescript\\nJs.String2.startsWithFrom(\\\"ReScript\\\", \\\"Scri\\\", 2) == true\\nJs.String2.startsWithFrom(\\\"ReScript\\\", \\\"\\\", 2) == true\\nJs.String2.startsWithFrom(\\\"JavaScript\\\", \\\"Scri\\\", 2) == false\\n```\\n\"}\n  }]\n\nComplete src/CompletionJsx.res 18:24\nposCursor:[18:24] posNoWhite:[18:23] Found expr:[8:13->33:3]\nposCursor:[18:24] posNoWhite:[18:23] Found expr:[8:14->33:3]\nposCursor:[18:24] posNoWhite:[18:23] Found expr:[9:4->32:10]\nposCursor:[18:24] posNoWhite:[18:23] Found expr:[10:4->32:10]\nposCursor:[18:24] posNoWhite:[18:23] Found expr:[11:4->32:10]\nposCursor:[18:24] posNoWhite:[18:23] Found expr:[12:4->32:10]\nposCursor:[18:24] posNoWhite:[18:23] Found expr:[15:5->32:10]\nJSX <div:[15:5->15:8] > _children:15:8\nposCursor:[18:24] posNoWhite:[18:23] Found expr:[15:8->32:4]\nposCursor:[18:24] posNoWhite:[18:23] Found expr:[16:7->32:4]\nposCursor:[18:24] posNoWhite:[18:23] Found expr:[17:7->32:4]\nposCursor:[18:24] posNoWhite:[18:23] Found expr:[17:7->32:4]\nposCursor:[18:24] posNoWhite:[18:23] Found expr:[18:10->32:4]\nposCursor:[18:24] posNoWhite:[18:23] Found expr:[18:10->32:4]\nposCursor:[18:24] posNoWhite:[18:23] Found expr:[18:10->18:24]\nCompletable: Cpath Value[someString]->st <<jsx>>\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[someString]->st <<jsx>>\nContextPath Value[someString]\nPath someString\nPath Js.String2.st\n[{\n    \"label\": \"React.string\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"Turns `string` into a JSX element so it can be used inside of JSX.\"},\n    \"sortText\": \"A\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Js.String2.startsWith\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t, t) => bool\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\nES2015: `startsWith(str, substr)` returns `true` if the `str` starts with\\n`substr`, `false` otherwise.\\n\\nSee [`String.startsWith`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith)\\non MDN.\\n\\n## Examples\\n\\n```rescript\\nJs.String2.startsWith(\\\"ReScript\\\", \\\"Re\\\") == true\\nJs.String2.startsWith(\\\"ReScript\\\", \\\"\\\") == true\\nJs.String2.startsWith(\\\"JavaScript\\\", \\\"Re\\\") == false\\n```\\n\"}\n  }, {\n    \"label\": \"Js.String2.startsWithFrom\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t, t, int) => bool\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\nES2015: `startsWithFrom(str, substr, n)` returns `true` if the `str` starts\\nwith `substr` starting at position `n`, false otherwise. If `n` is negative,\\nthe search starts at the beginning of `str`.\\n\\nSee [`String.startsWith`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith)\\non MDN.\\n\\n## Examples\\n\\n```rescript\\nJs.String2.startsWithFrom(\\\"ReScript\\\", \\\"Scri\\\", 2) == true\\nJs.String2.startsWithFrom(\\\"ReScript\\\", \\\"\\\", 2) == true\\nJs.String2.startsWithFrom(\\\"JavaScript\\\", \\\"Scri\\\", 2) == false\\n```\\n\"}\n  }]\n\nComplete src/CompletionJsx.res 20:27\nposCursor:[20:27] posNoWhite:[20:26] Found expr:[8:13->33:3]\nposCursor:[20:27] posNoWhite:[20:26] Found expr:[8:14->33:3]\nposCursor:[20:27] posNoWhite:[20:26] Found expr:[9:4->32:10]\nposCursor:[20:27] posNoWhite:[20:26] Found expr:[10:4->32:10]\nposCursor:[20:27] posNoWhite:[20:26] Found expr:[11:4->32:10]\nposCursor:[20:27] posNoWhite:[20:26] Found expr:[12:4->32:10]\nposCursor:[20:27] posNoWhite:[20:26] Found expr:[15:5->32:10]\nJSX <div:[15:5->15:8] > _children:15:8\nposCursor:[20:27] posNoWhite:[20:26] Found expr:[15:8->32:4]\nposCursor:[20:27] posNoWhite:[20:26] Found expr:[16:7->32:4]\nposCursor:[20:27] posNoWhite:[20:26] Found expr:[17:7->32:4]\nposCursor:[20:27] posNoWhite:[20:26] Found expr:[17:7->32:4]\nposCursor:[20:27] posNoWhite:[20:26] Found expr:[20:10->32:4]\nposCursor:[20:27] posNoWhite:[20:26] Found expr:[20:10->32:4]\nposCursor:[20:27] posNoWhite:[20:26] Found expr:[20:10->20:27]\nCompletable: Cpath string->st <<jsx>>\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath string->st <<jsx>>\nContextPath string\nPath Js.String2.st\n[{\n    \"label\": \"React.string\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"Turns `string` into a JSX element so it can be used inside of JSX.\"},\n    \"sortText\": \"A\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Js.String2.startsWith\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t, t) => bool\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\nES2015: `startsWith(str, substr)` returns `true` if the `str` starts with\\n`substr`, `false` otherwise.\\n\\nSee [`String.startsWith`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith)\\non MDN.\\n\\n## Examples\\n\\n```rescript\\nJs.String2.startsWith(\\\"ReScript\\\", \\\"Re\\\") == true\\nJs.String2.startsWith(\\\"ReScript\\\", \\\"\\\") == true\\nJs.String2.startsWith(\\\"JavaScript\\\", \\\"Re\\\") == false\\n```\\n\"}\n  }, {\n    \"label\": \"Js.String2.startsWithFrom\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t, t, int) => bool\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\nES2015: `startsWithFrom(str, substr, n)` returns `true` if the `str` starts\\nwith `substr` starting at position `n`, false otherwise. If `n` is negative,\\nthe search starts at the beginning of `str`.\\n\\nSee [`String.startsWith`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith)\\non MDN.\\n\\n## Examples\\n\\n```rescript\\nJs.String2.startsWithFrom(\\\"ReScript\\\", \\\"Scri\\\", 2) == true\\nJs.String2.startsWithFrom(\\\"ReScript\\\", \\\"\\\", 2) == true\\nJs.String2.startsWithFrom(\\\"JavaScript\\\", \\\"Scri\\\", 2) == false\\n```\\n\"}\n  }]\n\nComplete src/CompletionJsx.res 22:44\nposCursor:[22:44] posNoWhite:[22:43] Found expr:[8:13->33:3]\nposCursor:[22:44] posNoWhite:[22:43] Found expr:[8:14->33:3]\nposCursor:[22:44] posNoWhite:[22:43] Found expr:[9:4->32:10]\nposCursor:[22:44] posNoWhite:[22:43] Found expr:[10:4->32:10]\nposCursor:[22:44] posNoWhite:[22:43] Found expr:[11:4->32:10]\nposCursor:[22:44] posNoWhite:[22:43] Found expr:[12:4->32:10]\nposCursor:[22:44] posNoWhite:[22:43] Found expr:[15:5->32:10]\nJSX <div:[15:5->15:8] > _children:15:8\nposCursor:[22:44] posNoWhite:[22:43] Found expr:[15:8->32:4]\nposCursor:[22:44] posNoWhite:[22:43] Found expr:[16:7->32:4]\nposCursor:[22:44] posNoWhite:[22:43] Found expr:[17:7->32:4]\nposCursor:[22:44] posNoWhite:[22:43] Found expr:[17:7->32:4]\nposCursor:[22:44] posNoWhite:[22:43] Found expr:[22:10->32:4]\nposCursor:[22:44] posNoWhite:[22:43] Found expr:[22:10->32:4]\nposCursor:[22:44] posNoWhite:[22:43] Found expr:[22:10->22:44]\nCompletable: Cpath Value[Js, String2, trim](Nolabel)->st <<jsx>>\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[Js, String2, trim](Nolabel)->st <<jsx>>\nContextPath Value[Js, String2, trim](Nolabel)\nContextPath Value[Js, String2, trim]\nPath Js.String2.trim\nPath Js.String2.st\n[{\n    \"label\": \"React.string\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"Turns `string` into a JSX element so it can be used inside of JSX.\"},\n    \"sortText\": \"A\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Js.String2.startsWith\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t, t) => bool\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\nES2015: `startsWith(str, substr)` returns `true` if the `str` starts with\\n`substr`, `false` otherwise.\\n\\nSee [`String.startsWith`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith)\\non MDN.\\n\\n## Examples\\n\\n```rescript\\nJs.String2.startsWith(\\\"ReScript\\\", \\\"Re\\\") == true\\nJs.String2.startsWith(\\\"ReScript\\\", \\\"\\\") == true\\nJs.String2.startsWith(\\\"JavaScript\\\", \\\"Re\\\") == false\\n```\\n\"}\n  }, {\n    \"label\": \"Js.String2.startsWithFrom\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t, t, int) => bool\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\nES2015: `startsWithFrom(str, substr, n)` returns `true` if the `str` starts\\nwith `substr` starting at position `n`, false otherwise. If `n` is negative,\\nthe search starts at the beginning of `str`.\\n\\nSee [`String.startsWith`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith)\\non MDN.\\n\\n## Examples\\n\\n```rescript\\nJs.String2.startsWithFrom(\\\"ReScript\\\", \\\"Scri\\\", 2) == true\\nJs.String2.startsWithFrom(\\\"ReScript\\\", \\\"\\\", 2) == true\\nJs.String2.startsWithFrom(\\\"JavaScript\\\", \\\"Scri\\\", 2) == false\\n```\\n\"}\n  }]\n\nComplete src/CompletionJsx.res 24:19\nposCursor:[24:19] posNoWhite:[24:18] Found expr:[8:13->33:3]\nposCursor:[24:19] posNoWhite:[24:18] Found expr:[8:14->33:3]\nposCursor:[24:19] posNoWhite:[24:18] Found expr:[9:4->32:10]\nposCursor:[24:19] posNoWhite:[24:18] Found expr:[10:4->32:10]\nposCursor:[24:19] posNoWhite:[24:18] Found expr:[11:4->32:10]\nposCursor:[24:19] posNoWhite:[24:18] Found expr:[12:4->32:10]\nposCursor:[24:19] posNoWhite:[24:18] Found expr:[15:5->32:10]\nJSX <div:[15:5->15:8] > _children:15:8\nposCursor:[24:19] posNoWhite:[24:18] Found expr:[15:8->32:4]\nposCursor:[24:19] posNoWhite:[24:18] Found expr:[16:7->32:4]\nposCursor:[24:19] posNoWhite:[24:18] Found expr:[17:7->32:4]\nposCursor:[24:19] posNoWhite:[24:18] Found expr:[17:7->32:4]\nposCursor:[24:19] posNoWhite:[24:18] Found expr:[24:10->32:4]\nposCursor:[24:19] posNoWhite:[24:18] Found expr:[24:10->32:4]\nposCursor:[24:19] posNoWhite:[24:18] Found expr:[24:10->0:-1]\nCompletable: Cpath Value[someInt]-> <<jsx>>\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[someInt]-> <<jsx>>\nContextPath Value[someInt]\nPath someInt\nPath Belt.Int.\n[{\n    \"label\": \"React.int\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"Turns `int` into a JSX element so it can be used inside of JSX.\"},\n    \"sortText\": \"A\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Belt.Int.*\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(int, int) => int\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\nMultiplication of two `int` values. Same as the multiplication from `Pervasives`.\\n\\n## Examples\\n\\n```rescript\\nopen Belt.Int\\nJs.log(2 * 2 === 4) /* true */\\n```\\n\"}\n  }, {\n    \"label\": \"Belt.Int./\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(int, int) => int\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\nDivision of two `int` values. Same as the division from `Pervasives`.\\n\\n## Examples\\n\\n```rescript\\nopen Belt.Int\\nJs.log(4 / 2 === 2); /* true */\\n```\\n\"}\n  }, {\n    \"label\": \"Belt.Int.toString\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"int => string\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\nConverts a given `int` to a `string`. Uses the JavaScript `String` constructor under the hood.\\n\\n## Examples\\n\\n```rescript\\nJs.log(Belt.Int.toString(1) === \\\"1\\\") /* true */\\n```\\n\"}\n  }, {\n    \"label\": \"Belt.Int.toFloat\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"int => float\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\nConverts a given `int` to a `float`.\\n\\n## Examples\\n\\n```rescript\\nJs.log(Belt.Int.toFloat(1) === 1.0) /* true */\\n```\\n\"}\n  }, {\n    \"label\": \"Belt.Int.-\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(int, int) => int\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\nSubtraction of two `int` values. Same as the subtraction from `Pervasives`.\\n\\n## Examples\\n\\n```rescript\\nopen Belt.Int\\nJs.log(2 - 1 === 1) /* true */\\n```\\n\"}\n  }, {\n    \"label\": \"Belt.Int.+\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(int, int) => int\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\nAddition of two `int` values. Same as the addition from `Pervasives`.\\n\\n## Examples\\n\\n```rescript\\nopen Belt.Int\\nJs.log(2 + 2 === 4) /* true */\\n```\\n\"}\n  }]\n\nComplete src/CompletionJsx.res 26:14\nposCursor:[26:14] posNoWhite:[26:13] Found expr:[8:13->33:3]\nposCursor:[26:14] posNoWhite:[26:13] Found expr:[8:14->33:3]\nposCursor:[26:14] posNoWhite:[26:13] Found expr:[9:4->32:10]\nposCursor:[26:14] posNoWhite:[26:13] Found expr:[10:4->32:10]\nposCursor:[26:14] posNoWhite:[26:13] Found expr:[11:4->32:10]\nposCursor:[26:14] posNoWhite:[26:13] Found expr:[12:4->32:10]\nposCursor:[26:14] posNoWhite:[26:13] Found expr:[15:5->32:10]\nJSX <div:[15:5->15:8] > _children:15:8\nposCursor:[26:14] posNoWhite:[26:13] Found expr:[15:8->32:4]\nposCursor:[26:14] posNoWhite:[26:13] Found expr:[16:7->32:4]\nposCursor:[26:14] posNoWhite:[26:13] Found expr:[17:7->32:4]\nposCursor:[26:14] posNoWhite:[26:13] Found expr:[17:7->32:4]\nposCursor:[26:14] posNoWhite:[26:13] Found expr:[26:10->32:4]\nposCursor:[26:14] posNoWhite:[26:13] Found expr:[26:10->32:4]\nposCursor:[26:14] posNoWhite:[26:13] Found expr:[26:10->0:-1]\nCompletable: Cpath int-> <<jsx>>\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath int-> <<jsx>>\nContextPath int\nPath Belt.Int.\n[{\n    \"label\": \"React.int\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"Turns `int` into a JSX element so it can be used inside of JSX.\"},\n    \"sortText\": \"A\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Belt.Int.*\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(int, int) => int\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\nMultiplication of two `int` values. Same as the multiplication from `Pervasives`.\\n\\n## Examples\\n\\n```rescript\\nopen Belt.Int\\nJs.log(2 * 2 === 4) /* true */\\n```\\n\"}\n  }, {\n    \"label\": \"Belt.Int./\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(int, int) => int\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\nDivision of two `int` values. Same as the division from `Pervasives`.\\n\\n## Examples\\n\\n```rescript\\nopen Belt.Int\\nJs.log(4 / 2 === 2); /* true */\\n```\\n\"}\n  }, {\n    \"label\": \"Belt.Int.toString\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"int => string\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\nConverts a given `int` to a `string`. Uses the JavaScript `String` constructor under the hood.\\n\\n## Examples\\n\\n```rescript\\nJs.log(Belt.Int.toString(1) === \\\"1\\\") /* true */\\n```\\n\"}\n  }, {\n    \"label\": \"Belt.Int.toFloat\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"int => float\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\nConverts a given `int` to a `float`.\\n\\n## Examples\\n\\n```rescript\\nJs.log(Belt.Int.toFloat(1) === 1.0) /* true */\\n```\\n\"}\n  }, {\n    \"label\": \"Belt.Int.-\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(int, int) => int\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\nSubtraction of two `int` values. Same as the subtraction from `Pervasives`.\\n\\n## Examples\\n\\n```rescript\\nopen Belt.Int\\nJs.log(2 - 1 === 1) /* true */\\n```\\n\"}\n  }, {\n    \"label\": \"Belt.Int.+\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(int, int) => int\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\nAddition of two `int` values. Same as the addition from `Pervasives`.\\n\\n## Examples\\n\\n```rescript\\nopen Belt.Int\\nJs.log(2 + 2 === 4) /* true */\\n```\\n\"}\n  }]\n\nComplete src/CompletionJsx.res 28:20\nposCursor:[28:20] posNoWhite:[28:19] Found expr:[8:13->33:3]\nposCursor:[28:20] posNoWhite:[28:19] Found expr:[8:14->33:3]\nposCursor:[28:20] posNoWhite:[28:19] Found expr:[9:4->32:10]\nposCursor:[28:20] posNoWhite:[28:19] Found expr:[10:4->32:10]\nposCursor:[28:20] posNoWhite:[28:19] Found expr:[11:4->32:10]\nposCursor:[28:20] posNoWhite:[28:19] Found expr:[12:4->32:10]\nposCursor:[28:20] posNoWhite:[28:19] Found expr:[15:5->32:10]\nJSX <div:[15:5->15:8] > _children:15:8\nposCursor:[28:20] posNoWhite:[28:19] Found expr:[15:8->32:4]\nposCursor:[28:20] posNoWhite:[28:19] Found expr:[16:7->32:4]\nposCursor:[28:20] posNoWhite:[28:19] Found expr:[17:7->32:4]\nposCursor:[28:20] posNoWhite:[28:19] Found expr:[17:7->32:4]\nposCursor:[28:20] posNoWhite:[28:19] Found expr:[28:10->32:4]\nposCursor:[28:20] posNoWhite:[28:19] Found expr:[28:10->32:4]\nposCursor:[28:20] posNoWhite:[28:19] Found expr:[28:10->28:20]\nCompletable: Cpath Value[someArr]->a <<jsx>>\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[someArr]->a <<jsx>>\nContextPath Value[someArr]\nPath someArr\nPath Js.Array2.a\n[{\n    \"label\": \"React.array\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"array<React.element>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"Turns `array` into a JSX element so it can be used inside of JSX.\"},\n    \"sortText\": \"A\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Js.Array2.append\",\n    \"kind\": 12,\n    \"tags\": [1],\n    \"detail\": \"(t<'a>, 'a) => t<'a>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"Deprecated: `append` is not type-safe. Use `concat` instead.\\n\\n\"}\n  }]\n\nComplete src/CompletionJsx.res 30:12\nposCursor:[30:12] posNoWhite:[30:11] Found expr:[8:13->33:3]\nposCursor:[30:12] posNoWhite:[30:11] Found expr:[8:14->33:3]\nposCursor:[30:12] posNoWhite:[30:11] Found expr:[9:4->32:10]\nposCursor:[30:12] posNoWhite:[30:11] Found expr:[10:4->32:10]\nposCursor:[30:12] posNoWhite:[30:11] Found expr:[11:4->32:10]\nposCursor:[30:12] posNoWhite:[30:11] Found expr:[12:4->32:10]\nposCursor:[30:12] posNoWhite:[30:11] Found expr:[15:5->32:10]\nJSX <div:[15:5->15:8] > _children:15:8\nposCursor:[30:12] posNoWhite:[30:11] Found expr:[15:8->33:2]\nposCursor:[30:12] posNoWhite:[30:11] Found expr:[16:7->33:2]\nposCursor:[30:12] posNoWhite:[30:11] Found expr:[17:7->33:2]\nposCursor:[30:12] posNoWhite:[30:11] Found expr:[17:7->33:2]\nposCursor:[30:12] posNoWhite:[30:11] Found expr:[30:10->33:2]\nposCursor:[30:12] posNoWhite:[30:11] Found expr:[30:10->33:2]\nposCursor:[30:12] posNoWhite:[30:11] Found expr:[30:10->32:10]\nJSX <di:[30:10->30:12] div[32:6->32:9]=...[32:6->32:9]> _children:32:9\nCompletable: ChtmlElement <di\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\n[{\n    \"label\": \"<dialog>\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"Defines a dialog box or subwindow.\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"Defines a dialog box or subwindow.\"},\n    \"insertText\": \"dialog\"\n  }, {\n    \"label\": \"<dir>\",\n    \"kind\": 4,\n    \"tags\": [1],\n    \"detail\": \"Defines a directory list. Use <ul> instead.\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"Deprecated: true\\n\\nDefines a directory list. Use <ul> instead.\"},\n    \"insertText\": \"dir\"\n  }, {\n    \"label\": \"<div>\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"Specifies a division or a section in a document.\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"Specifies a division or a section in a document.\"},\n    \"insertText\": \"div\"\n  }]\n\nComplete src/CompletionJsx.res 45:23\nposCursor:[45:23] posNoWhite:[45:22] Found expr:[45:4->45:23]\nJSX <CompWithoutJsxPpx:[45:4->45:21] n[45:22->45:23]=...[45:22->45:23]> _children:None\nCompletable: Cjsx([CompWithoutJsxPpx], n, [n])\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nPath CompWithoutJsxPpx.make\n[{\n    \"label\": \"name\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionJsx.res 48:27\nposCursor:[48:27] posNoWhite:[48:26] Found expr:[48:4->48:28]\nJSX <SomeComponent:[48:4->48:17] someProp[48:18->48:26]=...[48:18->48:26]> _children:None\nCompletable: Cexpression CJsxPropValue [SomeComponent] someProp\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CJsxPropValue [SomeComponent] someProp\nPath SomeComponent.make\n[{\n    \"label\": \"\\\"\\\"\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": null,\n    \"sortText\": \"A\",\n    \"insertText\": \"{\\\"$0\\\"}\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionJsx.res 51:11\nposCursor:[51:11] posNoWhite:[51:10] Found expr:[51:4->51:11]\nJSX <h1:[51:4->51:6] hidd[51:7->51:11]=...[51:7->51:11]> _children:None\nCompletable: Cjsx([h1], hidd, [hidd])\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nPath ReactDOM.domProps\nPath PervasivesU.JsxDOM.domProps\n[{\n    \"label\": \"hidden\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionJsx.res 61:30\nposCursor:[61:30] posNoWhite:[61:28] Found expr:[61:4->61:29]\nJSX <IntrinsicElementLowercase:[61:4->61:29] > _children:None\nCompletable: Cjsx([IntrinsicElementLowercase], \"\", [])\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nPath IntrinsicElementLowercase.make\n[{\n    \"label\": \"name\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"option<string>\",\n    \"documentation\": null\n  }, {\n    \"label\": \"age\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"option<int>\",\n    \"documentation\": null\n  }, {\n    \"label\": \"key\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionJsx.res 73:36\nposCursor:[73:36] posNoWhite:[73:35] Found expr:[73:4->73:41]\nJSX <MultiPropComp:[73:4->73:17] name[73:18->73:22]=...[73:23->73:30] time[73:31->73:35]=...[73:37->73:40]> _children:None\nCompletable: Cexpression CJsxPropValue [MultiPropComp] time\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CJsxPropValue [MultiPropComp] time\nPath MultiPropComp.make\n[{\n    \"label\": \"Now\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"Now\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nNow\\n```\\n\\n```rescript\\ntype time = Now | Later\\n```\"},\n    \"insertText\": \"{Now}\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Later\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"Later\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nLater\\n```\\n\\n```rescript\\ntype time = Now | Later\\n```\"},\n    \"insertText\": \"{Later}\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionJsx.res 76:36\nposCursor:[76:36] posNoWhite:[76:35] Found expr:[76:4->76:40]\nJSX <MultiPropComp:[76:4->76:17] name[76:18->76:22]=...[76:23->76:30] time[76:31->76:35]=...[76:37->76:40]> _children:None\nCompletable: Cexpression CJsxPropValue [MultiPropComp] time\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CJsxPropValue [MultiPropComp] time\nPath MultiPropComp.make\n[{\n    \"label\": \"Now\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"Now\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nNow\\n```\\n\\n```rescript\\ntype time = Now | Later\\n```\"},\n    \"insertText\": \"{Now}\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Later\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"Later\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nLater\\n```\\n\\n```rescript\\ntype time = Now | Later\\n```\"},\n    \"insertText\": \"{Later}\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionJsx.res 79:28\nposCursor:[79:28] posNoWhite:[79:27] Found expr:[79:4->79:32]\nJSX <MultiPropComp:[79:4->79:17] name[79:18->79:22]=...[79:18->79:22] time[79:23->79:27]=...[79:29->79:32]> _children:None\nCompletable: Cexpression CJsxPropValue [MultiPropComp] time\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CJsxPropValue [MultiPropComp] time\nPath MultiPropComp.make\n[{\n    \"label\": \"Now\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"Now\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nNow\\n```\\n\\n```rescript\\ntype time = Now | Later\\n```\"},\n    \"insertText\": \"{Now}\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Later\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"Later\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nLater\\n```\\n\\n```rescript\\ntype time = Now | Later\\n```\"},\n    \"insertText\": \"{Later}\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionJsx.res 89:26\nposCursor:[89:26] posNoWhite:[89:24] Found expr:[89:4->89:27]\nJSX <Info:[89:4->89:8] _type[89:9->89:14]=...[89:16->89:24]> _children:89:26\nCompletable: Cjsx([Info], \"\", [_type])\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nPath Info.make\n[{\n    \"label\": \"key\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionJsx.res 92:19\nposCursor:[92:19] posNoWhite:[92:18] Found expr:[92:12->92:24]\nJSX <p:[92:12->92:13] > _children:92:13\nposCursor:[92:19] posNoWhite:[92:18] Found expr:[92:13->92:20]\nposCursor:[92:19] posNoWhite:[92:18] Found expr:[92:15->92:20]\nposCursor:[92:19] posNoWhite:[92:18] Found expr:[92:15->92:19]\nPexp_field [92:15->92:17] s:[92:18->92:19]\nCompletable: Cpath string.s\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath string.s\nContextPath string\nContextPath string->s <<jsx>>\nContextPath string\nPath Js.String2.s\n[{\n    \"label\": \"->React.string\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"Turns `string` into a JSX element so it can be used inside of JSX.\"},\n    \"sortText\": \"A\",\n    \"insertText\": \"->React.string\",\n    \"insertTextFormat\": 2,\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 92, \"character\": 17}, \"end\": {\"line\": 92, \"character\": 18}},\n      \"newText\": \"\"\n      }]\n  }, {\n    \"label\": \"->Js.String2.startsWith\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t, t) => bool\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\nES2015: `startsWith(str, substr)` returns `true` if the `str` starts with\\n`substr`, `false` otherwise.\\n\\nSee [`String.startsWith`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith)\\non MDN.\\n\\n## Examples\\n\\n```rescript\\nJs.String2.startsWith(\\\"ReScript\\\", \\\"Re\\\") == true\\nJs.String2.startsWith(\\\"ReScript\\\", \\\"\\\") == true\\nJs.String2.startsWith(\\\"JavaScript\\\", \\\"Re\\\") == false\\n```\\n\"},\n    \"sortText\": \"startsWith\",\n    \"insertText\": \"->Js.String2.startsWith\",\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 92, \"character\": 17}, \"end\": {\"line\": 92, \"character\": 18}},\n      \"newText\": \"\"\n      }]\n  }, {\n    \"label\": \"->Js.String2.splitAtMost\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t, t, ~limit: int) => array<t>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\n`splitAtMost delimiter ~limit: n str` splits the given `str` at every occurrence of `delimiter` and returns an array of the first `n` resulting substrings. If `n` is negative or greater than the number of substrings, the array will contain all the substrings.\\n\\n```\\nsplitAtMost \\\"ant/bee/cat/dog/elk\\\" \\\"/\\\" ~limit: 3 = [|\\\"ant\\\"; \\\"bee\\\"; \\\"cat\\\"|];;\\nsplitAtMost \\\"ant/bee/cat/dog/elk\\\" \\\"/\\\" ~limit: 0 = [| |];;\\nsplitAtMost \\\"ant/bee/cat/dog/elk\\\" \\\"/\\\" ~limit: 9 = [|\\\"ant\\\"; \\\"bee\\\"; \\\"cat\\\"; \\\"dog\\\"; \\\"elk\\\"|];;\\n```\\n\"},\n    \"sortText\": \"splitAtMost\",\n    \"insertText\": \"->Js.String2.splitAtMost\",\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 92, \"character\": 17}, \"end\": {\"line\": 92, \"character\": 18}},\n      \"newText\": \"\"\n      }]\n  }, {\n    \"label\": \"->Js.String2.substrAtMost\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t, ~from: int, ~length: int) => t\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\n`substrAtMost(str, ~from: pos, ~length: n)` returns the substring of `str` of\\nlength `n` starting at position `pos`.\\n- If `pos` is less than zero, the starting position is the length of `str - pos`.\\n- If `pos` is greater than or equal to the length of `str`, returns the empty string.\\n- If `n` is less than or equal to zero, returns the empty string.\\n\\nJavaScript’s `String.substr()` is a legacy function. When possible, use\\n`substring()` instead.\\n\\nSee [`String.substr`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substr)\\non MDN.\\n\\n## Examples\\n\\n```rescript\\nJs.String2.substrAtMost(\\\"abcdefghij\\\", ~from=3, ~length=4) == \\\"defg\\\"\\nJs.String2.substrAtMost(\\\"abcdefghij\\\", ~from=-3, ~length=4) == \\\"hij\\\"\\nJs.String2.substrAtMost(\\\"abcdefghij\\\", ~from=12, ~length=2) == \\\"\\\"\\n```\\n\"},\n    \"sortText\": \"substrAtMost\",\n    \"insertText\": \"->Js.String2.substrAtMost\",\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 92, \"character\": 17}, \"end\": {\"line\": 92, \"character\": 18}},\n      \"newText\": \"\"\n      }]\n  }, {\n    \"label\": \"->Js.String2.sliceToEnd\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t, ~from: int) => t\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\n`sliceToEnd(str, from:n)` returns the substring of `str` starting at character\\n`n` to the end of the string.\\n- If `n` is negative, then it is evaluated as `length(str - n)`.\\n- If `n` is greater than the length of `str`, then sliceToEnd returns the empty string.\\n\\nSee [`String.slice`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/slice) on MDN.\\n\\n## Examples\\n\\n```rescript\\nJs.String2.sliceToEnd(\\\"abcdefg\\\", ~from=4) == \\\"efg\\\"\\nJs.String2.sliceToEnd(\\\"abcdefg\\\", ~from=-2) == \\\"fg\\\"\\nJs.String2.sliceToEnd(\\\"abcdefg\\\", ~from=7) == \\\"\\\"\\n```\\n\"},\n    \"sortText\": \"sliceToEnd\",\n    \"insertText\": \"->Js.String2.sliceToEnd\",\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 92, \"character\": 17}, \"end\": {\"line\": 92, \"character\": 18}},\n      \"newText\": \"\"\n      }]\n  }, {\n    \"label\": \"->Js.String2.slice\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t, ~from: int, ~to_: int) => t\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\n`slice(str, from:n1, to_:n2)` returns the substring of `str` starting at\\ncharacter `n1` up to but not including `n2`.\\n- If either `n1` or `n2` is negative, then it is evaluated as `length(str - n1)` or `length(str - n2)`.\\n- If `n2` is greater than the length of `str`, then it is treated as `length(str)`.\\n- If `n1` is greater than `n2`, slice returns the empty string.\\n\\nSee [`String.slice`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/slice) on MDN.\\n\\n## Examples\\n\\n```rescript\\nJs.String2.slice(\\\"abcdefg\\\", ~from=2, ~to_=5) == \\\"cde\\\"\\nJs.String2.slice(\\\"abcdefg\\\", ~from=2, ~to_=9) == \\\"cdefg\\\"\\nJs.String2.slice(\\\"abcdefg\\\", ~from=-4, ~to_=-2) == \\\"de\\\"\\nJs.String2.slice(\\\"abcdefg\\\", ~from=5, ~to_=1) == \\\"\\\"\\n```\\n\"},\n    \"sortText\": \"slice\",\n    \"insertText\": \"->Js.String2.slice\",\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 92, \"character\": 17}, \"end\": {\"line\": 92, \"character\": 18}},\n      \"newText\": \"\"\n      }]\n  }, {\n    \"label\": \"->Js.String2.splitByRe\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t, Js_re.t) => array<option<t>>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\n`splitByRe(str, regex)` splits the given `str` at every occurrence of `regex`\\nand returns an array of the resulting substrings.\\n\\nSee [`String.split`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/split)\\non MDN.\\n\\n## Examples\\n\\n```rescript\\nJs.String2.splitByRe(\\\"art; bed , cog ;dad\\\", %re(\\\"/\\\\s*[,;]\\\\s*TODO/\\\")) == [\\n    Some(\\\"art\\\"),\\n    Some(\\\"bed\\\"),\\n    Some(\\\"cog\\\"),\\n    Some(\\\"dad\\\"),\\n  ]\\n```\\n\"},\n    \"sortText\": \"splitByRe\",\n    \"insertText\": \"->Js.String2.splitByRe\",\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 92, \"character\": 17}, \"end\": {\"line\": 92, \"character\": 18}},\n      \"newText\": \"\"\n      }]\n  }, {\n    \"label\": \"->Js.String2.startsWithFrom\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t, t, int) => bool\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\nES2015: `startsWithFrom(str, substr, n)` returns `true` if the `str` starts\\nwith `substr` starting at position `n`, false otherwise. If `n` is negative,\\nthe search starts at the beginning of `str`.\\n\\nSee [`String.startsWith`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith)\\non MDN.\\n\\n## Examples\\n\\n```rescript\\nJs.String2.startsWithFrom(\\\"ReScript\\\", \\\"Scri\\\", 2) == true\\nJs.String2.startsWithFrom(\\\"ReScript\\\", \\\"\\\", 2) == true\\nJs.String2.startsWithFrom(\\\"JavaScript\\\", \\\"Scri\\\", 2) == false\\n```\\n\"},\n    \"sortText\": \"startsWithFrom\",\n    \"insertText\": \"->Js.String2.startsWithFrom\",\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 92, \"character\": 17}, \"end\": {\"line\": 92, \"character\": 18}},\n      \"newText\": \"\"\n      }]\n  }, {\n    \"label\": \"->Js.String2.split\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t, t) => array<t>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\n`split(str, delimiter)` splits the given `str` at every occurrence of\\n`delimiter` and returns an array of the resulting substrings.\\n\\nSee [`String.split`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/split)\\non MDN.\\n\\n## Examples\\n\\n```rescript\\nJs.String2.split(\\\"2018-01-02\\\", \\\"-\\\") == [\\\"2018\\\", \\\"01\\\", \\\"02\\\"]\\nJs.String2.split(\\\"a,b,,c\\\", \\\",\\\") == [\\\"a\\\", \\\"b\\\", \\\"\\\", \\\"c\\\"]\\nJs.String2.split(\\\"good::bad as great::awful\\\", \\\"::\\\") == [\\\"good\\\", \\\"bad as great\\\", \\\"awful\\\"]\\nJs.String2.split(\\\"has-no-delimiter\\\", \\\";\\\") == [\\\"has-no-delimiter\\\"]\\n```\\n\"},\n    \"sortText\": \"split\",\n    \"insertText\": \"->Js.String2.split\",\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 92, \"character\": 17}, \"end\": {\"line\": 92, \"character\": 18}},\n      \"newText\": \"\"\n      }]\n  }, {\n    \"label\": \"->Js.String2.splitByReAtMost\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t, Js_re.t, ~limit: int) => array<option<t>>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\n`splitByReAtMost(str, regex, ~limit:n)` splits the given `str` at every\\noccurrence of `regex` and returns an array of the first `n` resulting\\nsubstrings. If `n` is negative or greater than the number of substrings, the\\narray will contain all the substrings.\\n\\nSee [`String.split`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/split)\\non MDN.\\n\\n## Examples\\n\\n```rescript\\nJs.String2.splitByReAtMost(\\\"one: two: three: four\\\", %re(\\\"/\\\\s*:\\\\s*TODO/\\\"), ~limit=3) == [\\n    Some(\\\"one\\\"),\\n    Some(\\\"two\\\"),\\n    Some(\\\"three\\\"),\\n  ]\\n\\nJs.String2.splitByReAtMost(\\\"one: two: three: four\\\", %re(\\\"/\\\\s*:\\\\s*TODO/\\\"), ~limit=0) == []\\n\\nJs.String2.splitByReAtMost(\\\"one: two: three: four\\\", %re(\\\"/\\\\s*:\\\\s*TODO/\\\"), ~limit=8) == [\\n    Some(\\\"one\\\"),\\n    Some(\\\"two\\\"),\\n    Some(\\\"three\\\"),\\n    Some(\\\"four\\\"),\\n  ]\\n```\\n\"},\n    \"sortText\": \"splitByReAtMost\",\n    \"insertText\": \"->Js.String2.splitByReAtMost\",\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 92, \"character\": 17}, \"end\": {\"line\": 92, \"character\": 18}},\n      \"newText\": \"\"\n      }]\n  }, {\n    \"label\": \"->Js.String2.substring\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t, ~from: int, ~to_: int) => t\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\n`substring(str, ~from: start, ~to_: finish)` returns characters `start` up to\\nbut not including finish from `str`.\\n- If `start` is less than zero, it is treated as zero.\\n- If `finish` is zero or negative, the empty string is returned.\\n- If `start` is greater than `finish`, the `start` and `finish` points are swapped.\\n\\nSee [`String.substring`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substring) on MDN.\\n\\n## Examples\\n\\n```rescript\\nJs.String2.substring(\\\"playground\\\", ~from=3, ~to_=6) == \\\"ygr\\\"\\nJs.String2.substring(\\\"playground\\\", ~from=6, ~to_=3) == \\\"ygr\\\"\\nJs.String2.substring(\\\"playground\\\", ~from=4, ~to_=12) == \\\"ground\\\"\\n```\\n\"},\n    \"sortText\": \"substring\",\n    \"insertText\": \"->Js.String2.substring\",\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 92, \"character\": 17}, \"end\": {\"line\": 92, \"character\": 18}},\n      \"newText\": \"\"\n      }]\n  }, {\n    \"label\": \"->Js.String2.substr\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t, ~from: int) => t\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\n`substr(str, ~from:n)` returns the substring of `str` from position `n` to the\\nend of the string.\\n- If `n` is less than zero, the starting position is the length of `str - n`.\\n- If `n` is greater than or equal to the length of `str`, returns the empty string.\\n\\nJavaScript’s `String.substr()` is a legacy function. When possible, use\\n`substring()` instead.\\n\\nSee [`String.substr`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substr)\\non MDN.\\n\\n## Examples\\n\\n```rescript\\nJs.String2.substr(\\\"abcdefghij\\\", ~from=3) == \\\"defghij\\\"\\nJs.String2.substr(\\\"abcdefghij\\\", ~from=-3) == \\\"hij\\\"\\nJs.String2.substr(\\\"abcdefghij\\\", ~from=12) == \\\"\\\"\\n```\\n\"},\n    \"sortText\": \"substr\",\n    \"insertText\": \"->Js.String2.substr\",\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 92, \"character\": 17}, \"end\": {\"line\": 92, \"character\": 18}},\n      \"newText\": \"\"\n      }]\n  }, {\n    \"label\": \"->Js.String2.substringToEnd\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t, ~from: int) => t\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\n`substringToEnd(str, ~from: start)` returns the substring of `str` from\\nposition `start` to the end.\\n- If `start` is less than or equal to zero, the entire string is returned.\\n- If `start` is greater than or equal to the length of `str`, the empty string is returned.\\n\\nSee [`String.substring`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/substring) on MDN.\\n\\n## Examples\\n\\n```rescript\\nJs.String2.substringToEnd(\\\"playground\\\", ~from=4) == \\\"ground\\\"\\nJs.String2.substringToEnd(\\\"playground\\\", ~from=-3) == \\\"playground\\\"\\nJs.String2.substringToEnd(\\\"playground\\\", ~from=12) == \\\"\\\"\\n```\\n\"},\n    \"sortText\": \"substringToEnd\",\n    \"insertText\": \"->Js.String2.substringToEnd\",\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 92, \"character\": 17}, \"end\": {\"line\": 92, \"character\": 18}},\n      \"newText\": \"\"\n      }]\n  }, {\n    \"label\": \"->Js.String2.search\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t, Js_re.t) => int\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\n`search(str, regexp)` returns the starting position of the first match of\\n`regexp` in the given `str`, or -1 if there is no match.\\n\\nSee [`String.search`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/search)\\non MDN.\\n\\n## Examples\\n\\n```rescript\\nJs.String2.search(\\\"testing 1 2 3\\\", %re(\\\"/\\\\d+/\\\")) == 8\\nJs.String2.search(\\\"no numbers\\\", %re(\\\"/\\\\d+/\\\")) == -1\\n```\\n\"},\n    \"sortText\": \"search\",\n    \"insertText\": \"->Js.String2.search\",\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 92, \"character\": 17}, \"end\": {\"line\": 92, \"character\": 18}},\n      \"newText\": \"\"\n      }]\n  }]\n\n"
  },
  {
    "path": "analysis/tests/src/expected/CompletionJsxProps.res.txt",
    "content": "Complete src/CompletionJsxProps.res 0:47\nposCursor:[0:47] posNoWhite:[0:46] Found expr:[0:12->0:47]\nJSX <CompletionSupport.TestComponent:[0:12->0:43] on[0:44->0:46]=...__ghost__[0:-1->0:-1]> _children:None\nCompletable: Cexpression CJsxPropValue [CompletionSupport, TestComponent] on\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CJsxPropValue [CompletionSupport, TestComponent] on\nPath CompletionSupport.TestComponent.make\n[{\n    \"label\": \"true\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }, {\n    \"label\": \"false\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionJsxProps.res 3:48\nposCursor:[3:48] posNoWhite:[3:47] Found expr:[3:12->3:48]\nJSX <CompletionSupport.TestComponent:[3:12->3:43] on[3:44->3:46]=...[3:47->3:48]> _children:None\nCompletable: Cexpression CJsxPropValue [CompletionSupport, TestComponent] on=t\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CJsxPropValue [CompletionSupport, TestComponent] on\nPath CompletionSupport.TestComponent.make\n[{\n    \"label\": \"true\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionJsxProps.res 6:50\nposCursor:[6:50] posNoWhite:[6:49] Found expr:[6:12->6:50]\nJSX <CompletionSupport.TestComponent:[6:12->6:43] test[6:44->6:48]=...[6:49->6:50]> _children:None\nCompletable: Cexpression CJsxPropValue [CompletionSupport, TestComponent] test=T\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CJsxPropValue [CompletionSupport, TestComponent] test\nPath CompletionSupport.TestComponent.make\n[{\n    \"label\": \"Two\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"Two\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nTwo\\n```\\n\\n```rescript\\ntype testVariant = One | Two | Three(int)\\n```\"},\n    \"sortText\": \"A Two\",\n    \"insertText\": \"{Two}\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Three(_)\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"Three(int)\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nThree(int)\\n```\\n\\n```rescript\\ntype testVariant = One | Two | Three(int)\\n```\"},\n    \"sortText\": \"A Three(_)\",\n    \"insertText\": \"{Three($0)}\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"TableclothMap\",\n    \"kind\": 9,\n    \"tags\": [],\n    \"detail\": \"module TableclothMap\",\n    \"documentation\": null,\n    \"data\": {\n      \"modulePath\": \"TableclothMap\",\n      \"filePath\": \"src/CompletionJsxProps.res\"\n    }\n  }, {\n    \"label\": \"TypeArgCtx\",\n    \"kind\": 9,\n    \"tags\": [],\n    \"detail\": \"module TypeArgCtx\",\n    \"documentation\": null,\n    \"data\": {\n      \"modulePath\": \"TypeArgCtx\",\n      \"filePath\": \"src/CompletionJsxProps.res\"\n    }\n  }, {\n    \"label\": \"TypeAtPosCompletion\",\n    \"kind\": 9,\n    \"tags\": [],\n    \"detail\": \"module TypeAtPosCompletion\",\n    \"documentation\": null,\n    \"data\": {\n      \"modulePath\": \"TypeAtPosCompletion\",\n      \"filePath\": \"src/CompletionJsxProps.res\"\n    }\n  }, {\n    \"label\": \"TypeDefinition\",\n    \"kind\": 9,\n    \"tags\": [],\n    \"detail\": \"module TypeDefinition\",\n    \"documentation\": null,\n    \"data\": {\n      \"modulePath\": \"TypeDefinition\",\n      \"filePath\": \"src/CompletionJsxProps.res\"\n    }\n  }]\n\nComplete src/CompletionJsxProps.res 9:52\nposCursor:[9:52] posNoWhite:[9:51] Found expr:[9:12->9:52]\nJSX <CompletionSupport.TestComponent:[9:12->9:43] polyArg[9:44->9:51]=...__ghost__[0:-1->0:-1]> _children:None\nCompletable: Cexpression CJsxPropValue [CompletionSupport, TestComponent] polyArg\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CJsxPropValue [CompletionSupport, TestComponent] polyArg\nPath CompletionSupport.TestComponent.make\n[{\n    \"label\": \"#one\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"#one\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\n#one\\n```\\n\\n```rescript\\n[#one | #three(int, bool) | #two | #two2]\\n```\"},\n    \"insertText\": \"{#one}\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"#three(_, _)\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"#three(int, bool)\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\n#three(int, bool)\\n```\\n\\n```rescript\\n[#one | #three(int, bool) | #two | #two2]\\n```\"},\n    \"insertText\": \"{#three(${1:_}, ${2:_})}\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"#two\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"#two\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\n#two\\n```\\n\\n```rescript\\n[#one | #three(int, bool) | #two | #two2]\\n```\"},\n    \"insertText\": \"{#two}\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"#two2\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"#two2\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\n#two2\\n```\\n\\n```rescript\\n[#one | #three(int, bool) | #two | #two2]\\n```\"},\n    \"insertText\": \"{#two2}\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionJsxProps.res 12:54\nposCursor:[12:54] posNoWhite:[12:53] Found expr:[12:12->12:54]\nJSX <CompletionSupport.TestComponent:[12:12->12:43] polyArg[12:44->12:51]=...[12:52->12:54]> _children:None\nCompletable: Cexpression CJsxPropValue [CompletionSupport, TestComponent] polyArg=#t\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CJsxPropValue [CompletionSupport, TestComponent] polyArg\nPath CompletionSupport.TestComponent.make\n[{\n    \"label\": \"#three(_, _)\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"#three(int, bool)\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\n#three(int, bool)\\n```\\n\\n```rescript\\n[#one | #three(int, bool) | #two | #two2]\\n```\"},\n    \"insertText\": \"{three(${1:_}, ${2:_})}\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"#two\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"#two\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\n#two\\n```\\n\\n```rescript\\n[#one | #three(int, bool) | #two | #two2]\\n```\"},\n    \"insertText\": \"{two}\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"#two2\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"#two2\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\n#two2\\n```\\n\\n```rescript\\n[#one | #three(int, bool) | #two | #two2]\\n```\"},\n    \"insertText\": \"{two2}\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionJsxProps.res 15:22\nposCursor:[15:22] posNoWhite:[15:21] Found expr:[15:12->15:25]\nJSX <div:[15:12->15:15] muted[15:16->15:21]=...__ghost__[0:-1->0:-1]> _children:15:23\nCompletable: Cexpression CJsxPropValue [div] muted\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CJsxPropValue [div] muted\nPath ReactDOM.domProps\nPath PervasivesU.JsxDOM.domProps\n[{\n    \"label\": \"true\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }, {\n    \"label\": \"false\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionJsxProps.res 18:29\nposCursor:[18:29] posNoWhite:[18:28] Found expr:[18:12->18:32]\nJSX <div:[18:12->18:15] onMouseEnter[18:16->18:28]=...__ghost__[0:-1->0:-1]> _children:18:30\nCompletable: Cexpression CJsxPropValue [div] onMouseEnter\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CJsxPropValue [div] onMouseEnter\nPath ReactDOM.domProps\nPath PervasivesU.JsxDOM.domProps\n[{\n    \"label\": \"event => event\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"JsxEventU.Mouse.t => unit\",\n    \"documentation\": null,\n    \"sortText\": \"A\",\n    \"insertText\": \"{${1:event} => ${0:event}}\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionJsxProps.res 22:52\nposCursor:[22:52] posNoWhite:[22:51] Found expr:[22:12->22:52]\nJSX <CompletionSupport.TestComponent:[22:12->22:43] testArr[22:44->22:51]=...__ghost__[0:-1->0:-1]> _children:None\nCompletable: Cexpression CJsxPropValue [CompletionSupport, TestComponent] testArr\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CJsxPropValue [CompletionSupport, TestComponent] testArr\nPath CompletionSupport.TestComponent.make\n[{\n    \"label\": \"[]\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"testVariant\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ntype testVariant = One | Two | Three(int)\\n```\"},\n    \"sortText\": \"A\",\n    \"insertText\": \"{[$0]}\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionJsxProps.res 26:54\nposCursor:[26:54] posNoWhite:[26:53] Found expr:[26:12->26:56]\nJSX <CompletionSupport.TestComponent:[26:12->26:43] testArr[26:44->26:51]=...[26:53->26:55]> _children:None\nCompletable: Cexpression CJsxPropValue [CompletionSupport, TestComponent] testArr->array\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CJsxPropValue [CompletionSupport, TestComponent] testArr\nPath CompletionSupport.TestComponent.make\n[{\n    \"label\": \"One\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"One\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nOne\\n```\\n\\n```rescript\\ntype testVariant = One | Two | Three(int)\\n```\"},\n    \"insertText\": \"One\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Two\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"Two\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nTwo\\n```\\n\\n```rescript\\ntype testVariant = One | Two | Three(int)\\n```\"},\n    \"insertText\": \"Two\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Three(_)\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"Three(int)\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nThree(int)\\n```\\n\\n```rescript\\ntype testVariant = One | Two | Three(int)\\n```\"},\n    \"insertText\": \"Three($0)\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionJsxProps.res 31:53\nposCursor:[31:53] posNoWhite:[31:52] Found expr:[31:12->31:54]\nJSX <CompletionSupport.TestComponent:[31:12->31:43] polyArg[31:44->31:51]=...[31:52->31:54]> _children:None\nCompletable: Cexpression CJsxPropValue [CompletionSupport, TestComponent] polyArg->recordBody\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CJsxPropValue [CompletionSupport, TestComponent] polyArg\nPath CompletionSupport.TestComponent.make\n[{\n    \"label\": \"#one\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"#one\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\n#one\\n```\\n\\n```rescript\\n[#one | #three(int, bool) | #two | #two2]\\n```\"},\n    \"insertText\": \"#one\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"#three(_, _)\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"#three(int, bool)\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\n#three(int, bool)\\n```\\n\\n```rescript\\n[#one | #three(int, bool) | #two | #two2]\\n```\"},\n    \"insertText\": \"#three(${1:_}, ${2:_})\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"#two\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"#two\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\n#two\\n```\\n\\n```rescript\\n[#one | #three(int, bool) | #two | #two2]\\n```\"},\n    \"insertText\": \"#two\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"#two2\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"#two2\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\n#two2\\n```\\n\\n```rescript\\n[#one | #three(int, bool) | #two | #two2]\\n```\"},\n    \"insertText\": \"#two2\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionJsxProps.res 34:49\nposCursor:[34:49] posNoWhite:[34:48] Found expr:[34:12->34:50]\nJSX <CompletionSupport.TestComponent:[34:12->34:43] on[34:44->34:46]=...[34:48->34:49]> _children:None\nCompletable: Cexpression CJsxPropValue [CompletionSupport, TestComponent] on=t->recordBody\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CJsxPropValue [CompletionSupport, TestComponent] on\nPath CompletionSupport.TestComponent.make\n[{\n    \"label\": \"true\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }, {\n    \"label\": \"tsomeVar\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"[> #two]\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionJsxProps.res 44:44\nposCursor:[44:44] posNoWhite:[44:43] Found expr:[44:12->44:44]\nJSX <CompletableComponentLazy:[44:12->44:36] status[44:37->44:43]=...__ghost__[0:-1->0:-1]> _children:None\nCompletable: Cexpression CJsxPropValue [CompletableComponentLazy] status\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CJsxPropValue [CompletableComponentLazy] status\nPath CompletableComponentLazy.make\n[{\n    \"label\": \"On\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"On\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nOn\\n```\\n\\n```rescript\\ntype status = On | Off\\n```\"},\n    \"insertText\": \"{On}\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Off\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"Off\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nOff\\n```\\n\\n```rescript\\ntype status = On | Off\\n```\"},\n    \"insertText\": \"{Off}\",\n    \"insertTextFormat\": 2\n  }]\n\n"
  },
  {
    "path": "analysis/tests/src/expected/CompletionMultipleEditorCompleteFrom.res.txt",
    "content": "Complete src/CompletionMultipleEditorCompleteFrom.res 19:5\nposCursor:[19:5] posNoWhite:[19:4] Found expr:[19:3->19:5]\nPexp_field [19:3->19:4] _:[22:0->19:5]\nCompletable: Cpath Value[a].\"\"\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[a].\"\"\nContextPath Value[a]\nPath a\nContextPath Value[a]->\nContextPath Value[a]\nPath a\nCPPipe pathFromEnv:A found:true\nPath A.\nPath B.\nPath C.\n[{\n    \"label\": \"->B.b\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"A.a => int\",\n    \"documentation\": null,\n    \"sortText\": \"b\",\n    \"insertText\": \"->B.b\",\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 19, \"character\": 4}, \"end\": {\"line\": 19, \"character\": 5}},\n      \"newText\": \"\"\n      }]\n  }, {\n    \"label\": \"->C.c\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"A.a => char\",\n    \"documentation\": null,\n    \"sortText\": \"c\",\n    \"insertText\": \"->C.c\",\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 19, \"character\": 4}, \"end\": {\"line\": 19, \"character\": 5}},\n      \"newText\": \"\"\n      }]\n  }]\n\n"
  },
  {
    "path": "analysis/tests/src/expected/CompletionObjects.res.txt",
    "content": "Complete src/CompletionObjects.res 5:7\nposCursor:[5:7] posNoWhite:[5:5] Found expr:[2:10->9:1]\nposCursor:[5:7] posNoWhite:[5:5] Found expr:[2:10->9:1]\nposCursor:[5:7] posNoWhite:[5:5] Found pattern:__ghost__[0:-1->7:5]\nposCursor:[5:7] posNoWhite:[5:5] Found pattern:__ghost__[0:-1->7:5]\nCompletable: Cpattern Value[x]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[x]\nPath x\n[{\n    \"label\": \"None\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }, {\n    \"label\": \"Some(_)\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null,\n    \"insertText\": \"Some(${1:_})\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Some(true)\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }, {\n    \"label\": \"Some(false)\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }]\n\n"
  },
  {
    "path": "analysis/tests/src/expected/CompletionPattern.res.txt",
    "content": "Complete src/CompletionPattern.res 7:13\nposCursor:[7:13] posNoWhite:[7:12] Found expr:[7:3->7:13]\n[]\n\nComplete src/CompletionPattern.res 10:15\nXXX Not found!\nCompletable: Cpattern Value[v]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[v]\nPath v\n[{\n    \"label\": \"(_, _, _)\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(bool, option<bool>, (bool, bool))\",\n    \"documentation\": null,\n    \"insertText\": \"(${1:_}, ${2:_}, ${3:_})\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionPattern.res 13:18\nposCursor:[13:18] posNoWhite:[13:17] Found pattern:[13:16->13:22]\nposCursor:[13:18] posNoWhite:[13:17] Found pattern:[13:17->13:18]\nCompletable: Cpattern Value[v]=t->tuple($0)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[v]\nPath v\n[{\n    \"label\": \"true\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionPattern.res 16:25\nposCursor:[16:25] posNoWhite:[16:24] Found pattern:[16:16->16:30]\nposCursor:[16:25] posNoWhite:[16:24] Found pattern:[16:23->16:29]\nposCursor:[16:25] posNoWhite:[16:24] Found pattern:[16:24->16:25]\nCompletable: Cpattern Value[v]=f->tuple($2), tuple($0)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[v]\nPath v\n[{\n    \"label\": \"false\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionPattern.res 21:15\nXXX Not found!\nCompletable: Cpattern Value[x]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[x]\nPath x\n[{\n    \"label\": \"true\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }, {\n    \"label\": \"false\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionPattern.res 24:17\nposCursor:[24:17] posNoWhite:[24:16] Found pattern:[24:16->24:17]\nCompletable: Cpattern Value[x]=t\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[x]\nPath x\n[{\n    \"label\": \"true\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionPattern.res 46:15\nXXX Not found!\nCompletable: Cpattern Value[f]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[f]\nPath f\n[{\n    \"label\": \"{}\",\n    \"kind\": 22,\n    \"tags\": [],\n    \"detail\": \"someRecord\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ntype someRecord = {first: int, second: (bool, option<someRecord>), optThird: option<[#first | #second(someRecord)]>, nest: nestedRecord}\\n```\"},\n    \"sortText\": \"A\",\n    \"insertText\": \"{$0}\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionPattern.res 49:17\nposCursor:[49:17] posNoWhite:[49:16] Found pattern:[49:16->49:18]\nCompletable: Cpattern Value[f]->recordBody\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[f]\nPath f\n[{\n    \"label\": \"first\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nfirst: int\\n```\\n\\n```rescript\\ntype someRecord = {first: int, second: (bool, option<someRecord>), optThird: option<[#first | #second(someRecord)]>, nest: nestedRecord}\\n```\"}\n  }, {\n    \"label\": \"second\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"(bool, option<someRecord>)\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nsecond: (bool, option<someRecord>)\\n```\\n\\n```rescript\\ntype someRecord = {first: int, second: (bool, option<someRecord>), optThird: option<[#first | #second(someRecord)]>, nest: nestedRecord}\\n```\"}\n  }, {\n    \"label\": \"optThird\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"option<[#first | #second(someRecord)]>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\noptThird: option<[#first | #second(someRecord)]>\\n```\\n\\n```rescript\\ntype someRecord = {first: int, second: (bool, option<someRecord>), optThird: option<[#first | #second(someRecord)]>, nest: nestedRecord}\\n```\"}\n  }, {\n    \"label\": \"nest\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"nestedRecord\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nnest: nestedRecord\\n```\\n\\n```rescript\\ntype someRecord = {first: int, second: (bool, option<someRecord>), optThird: option<[#first | #second(someRecord)]>, nest: nestedRecord}\\n```\"}\n  }]\n\nComplete src/CompletionPattern.res 52:24\nposCursor:[52:24] posNoWhite:[52:22] Found pattern:[52:16->52:35]\nCompletable: Cpattern Value[f]->recordBody\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[f]\nPath f\n[{\n    \"label\": \"optThird\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"option<[#first | #second(someRecord)]>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\noptThird: option<[#first | #second(someRecord)]>\\n```\\n\\n```rescript\\ntype someRecord = {first: int, second: (bool, option<someRecord>), optThird: option<[#first | #second(someRecord)]>, nest: nestedRecord}\\n```\"}\n  }, {\n    \"label\": \"nest\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"nestedRecord\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nnest: nestedRecord\\n```\\n\\n```rescript\\ntype someRecord = {first: int, second: (bool, option<someRecord>), optThird: option<[#first | #second(someRecord)]>, nest: nestedRecord}\\n```\"}\n  }]\n\nComplete src/CompletionPattern.res 55:19\nposCursor:[55:19] posNoWhite:[55:18] Found pattern:[55:16->55:20]\nposCursor:[55:19] posNoWhite:[55:18] Found pattern:[55:17->55:19]\nCompletable: Cpattern Value[f]=fi->recordBody\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[f]\nPath f\n[{\n    \"label\": \"first\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nfirst: int\\n```\\n\\n```rescript\\ntype someRecord = {first: int, second: (bool, option<someRecord>), optThird: option<[#first | #second(someRecord)]>, nest: nestedRecord}\\n```\"}\n  }]\n\nComplete src/CompletionPattern.res 58:19\nposCursor:[58:19] posNoWhite:[58:18] Found pattern:[58:16->58:24]\nposCursor:[58:19] posNoWhite:[58:18] Found pattern:[58:17->58:20]\nposCursor:[58:19] posNoWhite:[58:18] Found pattern:[58:18->58:19]\nCompletable: Cpattern Value[z]=o->tuple($0), recordBody\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[z]\nPath z\n[{\n    \"label\": \"optThird\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"option<[#first | #second(someRecord)]>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\noptThird: option<[#first | #second(someRecord)]>\\n```\\n\\n```rescript\\ntype someRecord = {first: int, second: (bool, option<someRecord>), optThird: option<[#first | #second(someRecord)]>, nest: nestedRecord}\\n```\"}\n  }]\n\nComplete src/CompletionPattern.res 61:22\nposCursor:[61:22] posNoWhite:[61:21] Found pattern:[61:16->61:25]\nCompletable: Cpattern Value[f]->recordField(nest)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[f]\nPath f\n[{\n    \"label\": \"{}\",\n    \"kind\": 22,\n    \"tags\": [],\n    \"detail\": \"nestedRecord\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ntype nestedRecord = {nested: bool}\\n```\"},\n    \"sortText\": \"A\",\n    \"insertText\": \"{$0}\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionPattern.res 64:24\nposCursor:[64:24] posNoWhite:[64:23] Found pattern:[64:16->64:26]\nposCursor:[64:24] posNoWhite:[64:23] Found pattern:[64:23->64:25]\nCompletable: Cpattern Value[f]->recordField(nest), recordBody\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[f]\nPath f\n[{\n    \"label\": \"nested\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nnested: bool\\n```\\n\\n```rescript\\ntype nestedRecord = {nested: bool}\\n```\"}\n  }]\n\nComplete src/CompletionPattern.res 70:22\nposCursor:[70:22] posNoWhite:[70:21] Found expr:[69:2->72:13]\nposCursor:[70:22] posNoWhite:[70:21] Found expr:[70:5->72:13]\nposCursor:[70:22] posNoWhite:[70:21] Found pattern:[70:21->70:23]\nCompletable: Cpattern Value[nest]->recordBody\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[nest]\nPath nest\n[{\n    \"label\": \"nested\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nnested: bool\\n```\\n\\n```rescript\\ntype nestedRecord = {nested: bool}\\n```\"}\n  }]\n\nComplete src/CompletionPattern.res 76:8\nposCursor:[76:8] posNoWhite:[76:7] Found pattern:[76:7->76:9]\nCompletable: Cpattern Value[f]->recordBody\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[f]\nPath f\n[{\n    \"label\": \"first\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nfirst: int\\n```\\n\\n```rescript\\ntype someRecord = {first: int, second: (bool, option<someRecord>), optThird: option<[#first | #second(someRecord)]>, nest: nestedRecord}\\n```\"}\n  }, {\n    \"label\": \"second\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"(bool, option<someRecord>)\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nsecond: (bool, option<someRecord>)\\n```\\n\\n```rescript\\ntype someRecord = {first: int, second: (bool, option<someRecord>), optThird: option<[#first | #second(someRecord)]>, nest: nestedRecord}\\n```\"}\n  }, {\n    \"label\": \"optThird\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"option<[#first | #second(someRecord)]>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\noptThird: option<[#first | #second(someRecord)]>\\n```\\n\\n```rescript\\ntype someRecord = {first: int, second: (bool, option<someRecord>), optThird: option<[#first | #second(someRecord)]>, nest: nestedRecord}\\n```\"}\n  }, {\n    \"label\": \"nest\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"nestedRecord\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nnest: nestedRecord\\n```\\n\\n```rescript\\ntype someRecord = {first: int, second: (bool, option<someRecord>), optThird: option<[#first | #second(someRecord)]>, nest: nestedRecord}\\n```\"}\n  }]\n\nComplete src/CompletionPattern.res 79:16\nposCursor:[79:16] posNoWhite:[79:15] Found pattern:[79:7->79:18]\nposCursor:[79:16] posNoWhite:[79:15] Found pattern:[79:14->79:17]\nposCursor:[79:16] posNoWhite:[79:15] Found pattern:[79:15->79:16]\nCompletable: Cpattern Value[f]=n->recordField(nest), recordBody\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[f]\nPath f\n[{\n    \"label\": \"nested\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nnested: bool\\n```\\n\\n```rescript\\ntype nestedRecord = {nested: bool}\\n```\"}\n  }]\n\nComplete src/CompletionPattern.res 87:20\nposCursor:[87:20] posNoWhite:[87:19] Found pattern:[87:16->87:21]\nPpat_construct Two:[87:16->87:19]\nposCursor:[87:20] posNoWhite:[87:19] Found pattern:[87:19->87:21]\nPpat_construct ():[87:19->87:21]\nCompletable: Cpattern Value[z]->variantPayload::Two($0)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[z]\nPath z\n[{\n    \"label\": \"true\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }, {\n    \"label\": \"false\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionPattern.res 90:21\nposCursor:[90:21] posNoWhite:[90:20] Found pattern:[90:16->90:22]\nPpat_construct Two:[90:16->90:19]\nposCursor:[90:21] posNoWhite:[90:20] Found pattern:[90:20->90:21]\nCompletable: Cpattern Value[z]=t->variantPayload::Two($0)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[z]\nPath z\n[{\n    \"label\": \"true\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionPattern.res 93:23\nposCursor:[93:23] posNoWhite:[93:22] Found pattern:[93:16->93:25]\nPpat_construct Three:[93:16->93:21]\nposCursor:[93:23] posNoWhite:[93:22] Found pattern:[93:22->93:24]\nCompletable: Cpattern Value[z]->variantPayload::Three($0), recordBody\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[z]\nPath z\n[{\n    \"label\": \"first\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nfirst: int\\n```\\n\\n```rescript\\ntype someRecord = {first: int, second: (bool, option<someRecord>), optThird: option<[#first | #second(someRecord)]>, nest: nestedRecord}\\n```\"}\n  }, {\n    \"label\": \"second\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"(bool, option<someRecord>)\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nsecond: (bool, option<someRecord>)\\n```\\n\\n```rescript\\ntype someRecord = {first: int, second: (bool, option<someRecord>), optThird: option<[#first | #second(someRecord)]>, nest: nestedRecord}\\n```\"}\n  }, {\n    \"label\": \"optThird\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"option<[#first | #second(someRecord)]>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\noptThird: option<[#first | #second(someRecord)]>\\n```\\n\\n```rescript\\ntype someRecord = {first: int, second: (bool, option<someRecord>), optThird: option<[#first | #second(someRecord)]>, nest: nestedRecord}\\n```\"}\n  }, {\n    \"label\": \"nest\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"nestedRecord\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nnest: nestedRecord\\n```\\n\\n```rescript\\ntype someRecord = {first: int, second: (bool, option<someRecord>), optThird: option<[#first | #second(someRecord)]>, nest: nestedRecord}\\n```\"}\n  }]\n\nComplete src/CompletionPattern.res 96:27\nposCursor:[96:27] posNoWhite:[96:26] Found pattern:[96:16->96:28]\nPpat_construct Three:[96:16->96:21]\nposCursor:[96:27] posNoWhite:[96:26] Found pattern:[96:21->96:29]\nposCursor:[96:27] posNoWhite:[96:26] Found pattern:[96:26->96:27]\nCompletable: Cpattern Value[z]=t->variantPayload::Three($1)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[z]\nPath z\n[{\n    \"label\": \"true\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionPattern.res 103:21\nposCursor:[103:21] posNoWhite:[103:20] Found pattern:[103:16->103:22]\nposCursor:[103:21] posNoWhite:[103:20] Found pattern:[103:20->103:21]\nPpat_construct ():[103:20->103:21]\nCompletable: Cpattern Value[b]->polyvariantPayload::two($0)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[b]\nPath b\n[{\n    \"label\": \"true\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }, {\n    \"label\": \"false\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionPattern.res 106:22\nposCursor:[106:22] posNoWhite:[106:21] Found pattern:[106:16->106:23]\nposCursor:[106:22] posNoWhite:[106:21] Found pattern:[106:21->106:22]\nCompletable: Cpattern Value[b]=t->polyvariantPayload::two($0)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[b]\nPath b\n[{\n    \"label\": \"true\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionPattern.res 109:24\nposCursor:[109:24] posNoWhite:[109:23] Found pattern:[109:16->109:26]\nposCursor:[109:24] posNoWhite:[109:23] Found pattern:[109:23->109:25]\nCompletable: Cpattern Value[b]->polyvariantPayload::three($0), recordBody\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[b]\nPath b\n[{\n    \"label\": \"first\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nfirst: int\\n```\\n\\n```rescript\\ntype someRecord = {first: int, second: (bool, option<someRecord>), optThird: option<[#first | #second(someRecord)]>, nest: nestedRecord}\\n```\"}\n  }, {\n    \"label\": \"second\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"(bool, option<someRecord>)\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nsecond: (bool, option<someRecord>)\\n```\\n\\n```rescript\\ntype someRecord = {first: int, second: (bool, option<someRecord>), optThird: option<[#first | #second(someRecord)]>, nest: nestedRecord}\\n```\"}\n  }, {\n    \"label\": \"optThird\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"option<[#first | #second(someRecord)]>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\noptThird: option<[#first | #second(someRecord)]>\\n```\\n\\n```rescript\\ntype someRecord = {first: int, second: (bool, option<someRecord>), optThird: option<[#first | #second(someRecord)]>, nest: nestedRecord}\\n```\"}\n  }, {\n    \"label\": \"nest\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"nestedRecord\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nnest: nestedRecord\\n```\\n\\n```rescript\\ntype someRecord = {first: int, second: (bool, option<someRecord>), optThird: option<[#first | #second(someRecord)]>, nest: nestedRecord}\\n```\"}\n  }]\n\nComplete src/CompletionPattern.res 112:28\nposCursor:[112:28] posNoWhite:[112:27] Found pattern:[112:16->112:29]\nposCursor:[112:28] posNoWhite:[112:27] Found pattern:[112:22->112:29]\nposCursor:[112:28] posNoWhite:[112:27] Found pattern:[112:27->112:28]\nCompletable: Cpattern Value[b]=t->polyvariantPayload::three($1)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[b]\nPath b\n[{\n    \"label\": \"true\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionPattern.res 118:15\nXXX Not found!\nCompletable: Cpattern Value[c]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[c]\nPath c\n[{\n    \"label\": \"[]\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null,\n    \"sortText\": \"A\",\n    \"insertText\": \"[$0]\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionPattern.res 121:17\nposCursor:[121:17] posNoWhite:[121:16] Found pattern:[121:16->121:18]\nCompletable: Cpattern Value[c]->array\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[c]\nPath c\n[{\n    \"label\": \"true\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }, {\n    \"label\": \"false\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionPattern.res 127:21\nposCursor:[127:21] posNoWhite:[127:20] Found pattern:[127:16->127:22]\nPpat_construct Some:[127:16->127:20]\nposCursor:[127:21] posNoWhite:[127:20] Found pattern:[127:20->127:22]\nPpat_construct ():[127:20->127:22]\nCompletable: Cpattern Value[o]->variantPayload::Some($0)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[o]\nPath o\n[{\n    \"label\": \"true\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }, {\n    \"label\": \"false\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionPattern.res 134:23\nposCursor:[134:23] posNoWhite:[134:22] Found pattern:[134:16->134:25]\nPpat_construct Test:[134:16->134:20]\nCompletable: Cpattern Value[p]->variantPayload::Test($1)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[p]\nPath p\n[{\n    \"label\": \"true\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }, {\n    \"label\": \"false\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionPattern.res 137:29\nposCursor:[137:29] posNoWhite:[137:28] Found pattern:[137:16->137:31]\nPpat_construct Test:[137:16->137:20]\nposCursor:[137:29] posNoWhite:[137:28] Found pattern:[137:20->137:32]\nCompletable: Cpattern Value[p]->variantPayload::Test($2)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[p]\nPath p\n[{\n    \"label\": \"None\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }, {\n    \"label\": \"Some(_)\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null,\n    \"insertText\": \"Some(${1:_})\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Some(true)\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }, {\n    \"label\": \"Some(false)\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionPattern.res 140:23\nposCursor:[140:23] posNoWhite:[140:22] Found pattern:[140:16->140:31]\nPpat_construct Test:[140:16->140:20]\nposCursor:[140:23] posNoWhite:[140:22] Found pattern:[140:20->140:32]\nCompletable: Cpattern Value[p]->variantPayload::Test($1)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[p]\nPath p\n[{\n    \"label\": \"true\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }, {\n    \"label\": \"false\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionPattern.res 143:35\nposCursor:[143:35] posNoWhite:[143:34] Found pattern:[143:16->143:37]\nPpat_construct Test:[143:16->143:20]\nposCursor:[143:35] posNoWhite:[143:34] Found pattern:[143:20->143:38]\nCompletable: Cpattern Value[p]->variantPayload::Test($3)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[p]\nPath p\n[{\n    \"label\": \"[]\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null,\n    \"sortText\": \"A\",\n    \"insertText\": \"[$0]\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionPattern.res 150:24\nposCursor:[150:24] posNoWhite:[150:23] Found pattern:[150:16->150:26]\nCompletable: Cpattern Value[v]->polyvariantPayload::test($1)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[v]\nPath v\n[{\n    \"label\": \"true\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }, {\n    \"label\": \"false\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionPattern.res 153:30\nposCursor:[153:30] posNoWhite:[153:29] Found pattern:[153:16->153:32]\nposCursor:[153:30] posNoWhite:[153:29] Found pattern:[153:21->153:32]\nCompletable: Cpattern Value[v]->polyvariantPayload::test($2)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[v]\nPath v\n[{\n    \"label\": \"None\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }, {\n    \"label\": \"Some(_)\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null,\n    \"insertText\": \"Some(${1:_})\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Some(true)\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }, {\n    \"label\": \"Some(false)\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionPattern.res 156:24\nposCursor:[156:24] posNoWhite:[156:23] Found pattern:[156:16->156:32]\nposCursor:[156:24] posNoWhite:[156:23] Found pattern:[156:21->156:32]\nCompletable: Cpattern Value[v]->polyvariantPayload::test($1)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[v]\nPath v\n[{\n    \"label\": \"true\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }, {\n    \"label\": \"false\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionPattern.res 159:36\nposCursor:[159:36] posNoWhite:[159:35] Found pattern:[159:16->159:38]\nposCursor:[159:36] posNoWhite:[159:35] Found pattern:[159:21->159:38]\nCompletable: Cpattern Value[v]->polyvariantPayload::test($3)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[v]\nPath v\n[{\n    \"label\": \"[]\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null,\n    \"sortText\": \"A\",\n    \"insertText\": \"[$0]\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionPattern.res 164:17\nposCursor:[164:17] posNoWhite:[164:16] Found pattern:[164:16->164:18]\nPpat_construct ():[164:16->164:18]\nCompletable: Cpattern Value[s]->tuple($0)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[s]\nPath s\n[{\n    \"label\": \"true\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }, {\n    \"label\": \"false\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionPattern.res 167:23\nposCursor:[167:23] posNoWhite:[167:21] Found pattern:[167:16->167:24]\nCompletable: Cpattern Value[s]->tuple($1)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[s]\nPath s\n[{\n    \"label\": \"None\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }, {\n    \"label\": \"Some(_)\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null,\n    \"insertText\": \"Some(${1:_})\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Some(true)\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }, {\n    \"label\": \"Some(false)\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionPattern.res 170:22\nposCursor:[170:22] posNoWhite:[170:21] Found pattern:[170:16->170:28]\nCompletable: Cpattern Value[s]->tuple($1)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[s]\nPath s\n[{\n    \"label\": \"None\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }, {\n    \"label\": \"Some(_)\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null,\n    \"insertText\": \"Some(${1:_})\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Some(true)\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }, {\n    \"label\": \"Some(false)\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionPattern.res 173:35\nXXX Not found!\nCompletable: Cpattern Value[s]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[s]\nPath s\n[{\n    \"label\": \"(_, _, _)\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(bool, option<bool>, array<bool>)\",\n    \"documentation\": null,\n    \"insertText\": \"(${1:_}, ${2:_}, ${3:_})\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionPattern.res 176:41\nposCursor:[176:41] posNoWhite:[176:40] Found pattern:[176:35->176:47]\nCompletable: Cpattern Value[s]->tuple($1)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[s]\nPath s\n[{\n    \"label\": \"None\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }, {\n    \"label\": \"Some(_)\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null,\n    \"insertText\": \"Some(${1:_})\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Some(true)\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }, {\n    \"label\": \"Some(false)\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionPattern.res 179:21\nXXX Not found!\nCompletable: Cpattern Value[z]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[z]\nPath z\n[{\n    \"label\": \"One\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"One\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nOne\\n```\\n\\n```rescript\\ntype someVariant = One | Two(bool) | Three(someRecord, bool)\\n```\"},\n    \"insertText\": \"One\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Two(_)\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"Two(bool)\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nTwo(bool)\\n```\\n\\n```rescript\\ntype someVariant = One | Two(bool) | Three(someRecord, bool)\\n```\"},\n    \"insertText\": \"Two(${1:_})\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Three(_, _)\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"Three(someRecord, bool)\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nThree(someRecord, bool)\\n```\\n\\n```rescript\\ntype someVariant = One | Two(bool) | Three(someRecord, bool)\\n```\"},\n    \"insertText\": \"Three(${1:_}, ${2:_})\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionPattern.res 182:32\nposCursor:[182:32] posNoWhite:[182:31] Found pattern:[182:16->182:34]\nposCursor:[182:32] posNoWhite:[182:31] Found pattern:[182:22->182:34]\nPpat_construct Two:[182:22->182:25]\nCompletable: Cpattern Value[z]->variantPayload::Two($0)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[z]\nPath z\n[{\n    \"label\": \"true\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }, {\n    \"label\": \"false\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionPattern.res 185:48\nposCursor:[185:48] posNoWhite:[185:47] Found pattern:[185:16->185:50]\nposCursor:[185:48] posNoWhite:[185:47] Found pattern:[185:22->185:50]\nPpat_construct Three:[185:22->185:27]\nposCursor:[185:48] posNoWhite:[185:47] Found pattern:[185:27->185:53]\nCompletable: Cpattern Value[z]->variantPayload::Three($1)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[z]\nPath z\n[{\n    \"label\": \"true\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }, {\n    \"label\": \"false\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionPattern.res 188:34\nposCursor:[188:34] posNoWhite:[188:33] Found pattern:[188:16->188:36]\nposCursor:[188:34] posNoWhite:[188:33] Found pattern:[188:23->188:36]\nCompletable: Cpattern Value[b]->polyvariantPayload::two($0)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[b]\nPath b\n[{\n    \"label\": \"true\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }, {\n    \"label\": \"false\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionPattern.res 191:50\nposCursor:[191:50] posNoWhite:[191:49] Found pattern:[191:16->191:52]\nposCursor:[191:50] posNoWhite:[191:49] Found pattern:[191:23->191:52]\nposCursor:[191:50] posNoWhite:[191:49] Found pattern:[191:29->191:52]\nCompletable: Cpattern Value[b]->polyvariantPayload::three($1)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[b]\nPath b\n[{\n    \"label\": \"true\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }, {\n    \"label\": \"false\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionPattern.res 194:24\nposCursor:[194:24] posNoWhite:[194:23] Found pattern:[194:16->194:29]\nposCursor:[194:24] posNoWhite:[194:23] Found pattern:[194:23->194:24]\nCompletable: Cpattern Value[s]->tuple($1)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[s]\nPath s\n[{\n    \"label\": \"None\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }, {\n    \"label\": \"Some(_)\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null,\n    \"insertText\": \"Some(${1:_})\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Some(true)\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }, {\n    \"label\": \"Some(false)\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionPattern.res 201:25\nposCursor:[201:25] posNoWhite:[201:24] Found pattern:[201:17->201:28]\nCompletable: Cpattern Value[ff]->recordField(someFn)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[ff]\nPath ff\n[]\n\nComplete src/CompletionPattern.res 206:16\nXXX Not found!\nCompletable: Cpattern Value[xn]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[xn]\nPath xn\n[{\n    \"label\": \"Js.Exn.Error(error)\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"Catches errors from JavaScript errors.\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"Matches on a JavaScript error. Read more in the [documentation on catching JS exceptions](https://rescript-lang.org/docs/manual/latest/exception#catching-js-exceptions).\"}\n  }]\n\nComplete src/CompletionPattern.res 211:30\nXXX Not found!\nCompletable: Cpattern await Value[getThing](Nolabel)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath await Value[getThing](Nolabel)\nContextPath Value[getThing](Nolabel)\nContextPath Value[getThing]\nPath getThing\n[{\n    \"label\": \"One\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"One\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nOne\\n```\\n\\n```rescript\\ntype someVariant = One | Two(bool) | Three(someRecord, bool)\\n```\"},\n    \"insertText\": \"One\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Two(_)\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"Two(bool)\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nTwo(bool)\\n```\\n\\n```rescript\\ntype someVariant = One | Two(bool) | Three(someRecord, bool)\\n```\"},\n    \"insertText\": \"Two(${1:_})\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Three(_, _)\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"Three(someRecord, bool)\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nThree(someRecord, bool)\\n```\\n\\n```rescript\\ntype someVariant = One | Two(bool) | Three(someRecord, bool)\\n```\"},\n    \"insertText\": \"Three(${1:_}, ${2:_})\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionPattern.res 216:21\nposCursor:[216:21] posNoWhite:[216:20] Found pattern:[216:18->216:22]\nPpat_construct Ok:[216:18->216:20]\nposCursor:[216:21] posNoWhite:[216:20] Found pattern:[216:20->216:22]\nPpat_construct ():[216:20->216:22]\nCompletable: Cpattern Value[res]->variantPayload::Ok($0)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[res]\nPath res\n[{\n    \"label\": \"One\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"One\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nOne\\n```\\n\\n```rescript\\ntype someVariant = One | Two(bool) | Three(someRecord, bool)\\n```\"},\n    \"insertText\": \"One\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Two(_)\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"Two(bool)\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nTwo(bool)\\n```\\n\\n```rescript\\ntype someVariant = One | Two(bool) | Three(someRecord, bool)\\n```\"},\n    \"insertText\": \"Two(${1:_})\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Three(_, _)\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"Three(someRecord, bool)\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nThree(someRecord, bool)\\n```\\n\\n```rescript\\ntype someVariant = One | Two(bool) | Three(someRecord, bool)\\n```\"},\n    \"insertText\": \"Three(${1:_}, ${2:_})\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionPattern.res 219:24\nposCursor:[219:24] posNoWhite:[219:23] Found pattern:[219:18->219:25]\nPpat_construct Error:[219:18->219:23]\nposCursor:[219:24] posNoWhite:[219:23] Found pattern:[219:23->219:25]\nPpat_construct ():[219:23->219:25]\nCompletable: Cpattern Value[res]->variantPayload::Error($0)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[res]\nPath res\n[{\n    \"label\": \"#one\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"#one\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\n#one\\n```\\n\\n```rescript\\n[#one | #three(someRecord, bool) | #two(bool)]\\n```\"},\n    \"insertText\": \"#one\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"#three(_, _)\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"#three(someRecord, bool)\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\n#three(someRecord, bool)\\n```\\n\\n```rescript\\n[#one | #three(someRecord, bool) | #two(bool)]\\n```\"},\n    \"insertText\": \"#three(${1:_}, ${2:_})\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"#two(_)\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"#two(bool)\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\n#two(bool)\\n```\\n\\n```rescript\\n[#one | #three(someRecord, bool) | #two(bool)]\\n```\"},\n    \"insertText\": \"#two(${1:_})\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionPattern.res 227:25\nposCursor:[227:25] posNoWhite:[227:24] Found expr:[223:11->231:1]\nposCursor:[227:25] posNoWhite:[227:24] Found expr:[223:12->231:1]\nposCursor:[227:25] posNoWhite:[227:24] Found expr:[226:4->227:28]\nposCursor:[227:25] posNoWhite:[227:24] Found pattern:[227:18->227:27]\nCompletable: Cpattern Value[r]->recordBody\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[r]\nPath r\n[{\n    \"label\": \"second\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"(bool, option<someRecord>)\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nsecond: (bool, option<someRecord>)\\n```\\n\\n```rescript\\ntype someRecord = {first: int, second: (bool, option<someRecord>), optThird: option<[#first | #second(someRecord)]>, nest: nestedRecord}\\n```\"}\n  }, {\n    \"label\": \"optThird\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"option<[#first | #second(someRecord)]>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\noptThird: option<[#first | #second(someRecord)]>\\n```\\n\\n```rescript\\ntype someRecord = {first: int, second: (bool, option<someRecord>), optThird: option<[#first | #second(someRecord)]>, nest: nestedRecord}\\n```\"}\n  }, {\n    \"label\": \"nest\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"nestedRecord\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nnest: nestedRecord\\n```\\n\\n```rescript\\ntype someRecord = {first: int, second: (bool, option<someRecord>), optThird: option<[#first | #second(someRecord)]>, nest: nestedRecord}\\n```\"}\n  }]\n\nComplete src/CompletionPattern.res 242:33\nposCursor:[242:33] posNoWhite:[242:32] Found pattern:[242:7->242:35]\nCompletable: Cpattern Value[hitsUse](Nolabel)->recordBody\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[hitsUse](Nolabel)\nContextPath Value[hitsUse]\nPath hitsUse\n[{\n    \"label\": \"hits\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"array<string>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nhits: array<string>\\n```\\n\\n```rescript\\ntype hitsUse = {results: results, hits: array<string>}\\n```\"}\n  }]\n\n"
  },
  {
    "path": "analysis/tests/src/expected/CompletionPipeChain.res.txt",
    "content": "Complete src/CompletionPipeChain.res 27:16\nposCursor:[27:16] posNoWhite:[27:15] Found expr:[27:11->0:-1]\nCompletable: Cpath Value[int]->\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[int]->\nContextPath Value[int]\nPath int\nCPPipe pathFromEnv:Integer found:true\nPath Integer.\n[{\n    \"label\": \"Integer.toInt\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"t => int\",\n    \"documentation\": null\n  }, {\n    \"label\": \"Integer.increment\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t, int) => t\",\n    \"documentation\": null\n  }, {\n    \"label\": \"Integer.decrement\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t, int => int) => t\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionPipeChain.res 30:23\nposCursor:[30:23] posNoWhite:[30:22] Found expr:[30:11->0:-1]\nCompletable: Cpath Value[toFlt](Nolabel)->\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[toFlt](Nolabel)->\nContextPath Value[toFlt](Nolabel)\nContextPath Value[toFlt]\nPath toFlt\nCPPipe pathFromEnv:SuperFloat found:true\nPath SuperFloat.\n[{\n    \"label\": \"SuperFloat.toInteger\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"t => Integer.t\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionPipeChain.res 33:38\nposCursor:[33:38] posNoWhite:[33:37] Found expr:[33:11->0:-1]\nCompletable: Cpath Value[Integer, increment](Nolabel, Nolabel)->\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[Integer, increment](Nolabel, Nolabel)->\nContextPath Value[Integer, increment](Nolabel, Nolabel)\nContextPath Value[Integer, increment]\nPath Integer.increment\nCPPipe pathFromEnv:Integer found:true\nPath Integer.\n[{\n    \"label\": \"Integer.toInt\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"t => int\",\n    \"documentation\": null\n  }, {\n    \"label\": \"Integer.increment\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t, int) => t\",\n    \"documentation\": null\n  }, {\n    \"label\": \"Integer.decrement\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t, int => int) => t\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionPipeChain.res 36:38\nposCursor:[36:38] posNoWhite:[36:37] Found expr:[36:11->0:-1]\nCompletable: Cpath Value[Integer, increment](Nolabel, Nolabel)->\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[Integer, increment](Nolabel, Nolabel)->\nContextPath Value[Integer, increment](Nolabel, Nolabel)\nContextPath Value[Integer, increment]\nPath Integer.increment\nCPPipe pathFromEnv:Integer found:true\nPath Integer.\n[{\n    \"label\": \"Integer.toInt\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"t => int\",\n    \"documentation\": null\n  }, {\n    \"label\": \"Integer.increment\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t, int) => t\",\n    \"documentation\": null\n  }, {\n    \"label\": \"Integer.decrement\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t, int => int) => t\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionPipeChain.res 39:47\nposCursor:[39:47] posNoWhite:[39:46] Found expr:[39:11->0:-1]\nCompletable: Cpath Value[Integer, decrement](Nolabel, Nolabel)->\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[Integer, decrement](Nolabel, Nolabel)->\nContextPath Value[Integer, decrement](Nolabel, Nolabel)\nContextPath Value[Integer, decrement]\nPath Integer.decrement\nCPPipe pathFromEnv:Integer found:true\nPath Integer.\n[{\n    \"label\": \"Integer.toInt\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"t => int\",\n    \"documentation\": null\n  }, {\n    \"label\": \"Integer.increment\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t, int) => t\",\n    \"documentation\": null\n  }, {\n    \"label\": \"Integer.decrement\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t, int => int) => t\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionPipeChain.res 42:69\nposCursor:[42:69] posNoWhite:[42:68] Found expr:[42:11->0:-1]\nCompletable: Cpath Value[Integer, decrement](Nolabel, Nolabel)->\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[Integer, decrement](Nolabel, Nolabel)->\nContextPath Value[Integer, decrement](Nolabel, Nolabel)\nContextPath Value[Integer, decrement]\nPath Integer.decrement\nCPPipe pathFromEnv:Integer found:true\nPath Integer.\n[{\n    \"label\": \"Integer.toInt\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"t => int\",\n    \"documentation\": null\n  }, {\n    \"label\": \"Integer.increment\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t, int) => t\",\n    \"documentation\": null\n  }, {\n    \"label\": \"Integer.decrement\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t, int => int) => t\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionPipeChain.res 45:62\nposCursor:[45:62] posNoWhite:[45:61] Found expr:[45:11->0:-1]\nCompletable: Cpath Value[SuperFloat, fromInteger](Nolabel)->\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[SuperFloat, fromInteger](Nolabel)->\nContextPath Value[SuperFloat, fromInteger](Nolabel)\nContextPath Value[SuperFloat, fromInteger]\nPath SuperFloat.fromInteger\nCPPipe pathFromEnv:SuperFloat found:true\nPath SuperFloat.\n[{\n    \"label\": \"SuperFloat.toInteger\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"t => Integer.t\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionPipeChain.res 48:63\nposCursor:[48:63] posNoWhite:[48:62] Found expr:[48:11->48:63]\nCompletable: Cpath Value[SuperFloat, fromInteger](Nolabel)->t\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[SuperFloat, fromInteger](Nolabel)->t\nContextPath Value[SuperFloat, fromInteger](Nolabel)\nContextPath Value[SuperFloat, fromInteger]\nPath SuperFloat.fromInteger\nCPPipe pathFromEnv:SuperFloat found:true\nPath SuperFloat.t\n[{\n    \"label\": \"SuperFloat.toInteger\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"t => Integer.t\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionPipeChain.res 51:82\nposCursor:[51:82] posNoWhite:[51:81] Found expr:[51:11->0:-1]\nCompletable: Cpath Value[CompletionSupport, Test, make](Nolabel)->\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[CompletionSupport, Test, make](Nolabel)->\nContextPath Value[CompletionSupport, Test, make](Nolabel)\nContextPath Value[CompletionSupport, Test, make]\nPath CompletionSupport.Test.make\nCPPipe pathFromEnv:Test found:true\nPath CompletionSupport.Test.\n[{\n    \"label\": \"CompletionSupport.Test.add\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"t => int\",\n    \"documentation\": null\n  }, {\n    \"label\": \"CompletionSupport.Test.addSelf\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"t => t\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionPipeChain.res 54:78\nposCursor:[54:78] posNoWhite:[54:77] Found expr:[54:11->0:-1]\nCompletable: Cpath Value[CompletionSupport, Test, addSelf](Nolabel, Nolabel)->\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[CompletionSupport, Test, addSelf](Nolabel, Nolabel)->\nContextPath Value[CompletionSupport, Test, addSelf](Nolabel, Nolabel)\nContextPath Value[CompletionSupport, Test, addSelf]\nPath CompletionSupport.Test.addSelf\nCPPipe pathFromEnv:Test found:true\nPath CompletionSupport.Test.\n[{\n    \"label\": \"CompletionSupport.Test.add\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"t => int\",\n    \"documentation\": null\n  }, {\n    \"label\": \"CompletionSupport.Test.addSelf\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"t => t\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionPipeChain.res 58:5\nposCursor:[58:5] posNoWhite:[58:4] Found expr:[57:8->0:-1]\nCompletable: Cpath Value[Js, Array2, forEach](Nolabel, Nolabel)->\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[Js, Array2, forEach](Nolabel, Nolabel)->\nContextPath Value[Js, Array2, forEach](Nolabel, Nolabel)\nContextPath Value[Js, Array2, forEach]\nPath Js.Array2.forEach\nCPPipe pathFromEnv: found:true\nPath Js_array2.\n[]\n\nComplete src/CompletionPipeChain.res 62:6\nposCursor:[62:6] posNoWhite:[62:5] Found expr:[61:8->62:6]\nCompletable: Cpath Value[Belt, Array, reduce](Nolabel, Nolabel, Nolabel)->t\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[Belt, Array, reduce](Nolabel, Nolabel, Nolabel)->t\nContextPath Value[Belt, Array, reduce](Nolabel, Nolabel, Nolabel)\nContextPath Value[Belt, Array, reduce]\nPath Belt.Array.reduce\nPath Belt.Int.t\n[{\n    \"label\": \"Belt.Int.toString\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"int => string\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\nConverts a given `int` to a `string`. Uses the JavaScript `String` constructor under the hood.\\n\\n## Examples\\n\\n```rescript\\nJs.log(Belt.Int.toString(1) === \\\"1\\\") /* true */\\n```\\n\"}\n  }, {\n    \"label\": \"Belt.Int.toFloat\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"int => float\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\nConverts a given `int` to a `float`.\\n\\n## Examples\\n\\n```rescript\\nJs.log(Belt.Int.toFloat(1) === 1.0) /* true */\\n```\\n\"}\n  }]\n\nComplete src/CompletionPipeChain.res 70:12\nposCursor:[70:12] posNoWhite:[70:11] Found expr:[70:3->0:-1]\nCompletable: Cpath Value[aliased]->\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[aliased]->\nContextPath Value[aliased]\nPath aliased\nCPPipe pathFromEnv:CompletionSupport.Test found:false\nPath CompletionSupport.Test.\n[{\n    \"label\": \"CompletionSupport.Test.add\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"t => int\",\n    \"documentation\": null\n  }, {\n    \"label\": \"CompletionSupport.Test.addSelf\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"t => t\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionPipeChain.res 73:15\nposCursor:[73:15] posNoWhite:[73:14] Found expr:[73:3->0:-1]\nCompletable: Cpath Value[notAliased]->\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[notAliased]->\nContextPath Value[notAliased]\nPath notAliased\nCPPipe pathFromEnv:CompletionSupport.Test found:false\nPath CompletionSupport.Test.\n[{\n    \"label\": \"CompletionSupport.Test.add\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"t => int\",\n    \"documentation\": null\n  }, {\n    \"label\": \"CompletionSupport.Test.addSelf\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"t => t\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionPipeChain.res 82:30\nposCursor:[82:30] posNoWhite:[82:29] Found expr:[76:15->93:1]\nPexp_apply ...[76:15->76:46] (~prepare77:3->77:10=...[77:11->77:24], ~render78:3->78:9=...[78:10->91:3], ...[92:2->92:4])\nposCursor:[82:30] posNoWhite:[82:29] Found expr:[78:10->91:3]\nposCursor:[82:30] posNoWhite:[82:29] Found expr:[78:10->91:3]\nposCursor:[82:30] posNoWhite:[82:29] Found expr:[79:4->90:14]\nposCursor:[82:30] posNoWhite:[82:29] Found expr:[82:7->90:14]\nposCursor:[82:30] posNoWhite:[82:29] Found expr:[82:7->82:30]\nCompletable: Cpath Value[props].support.root->ren\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[props].support.root->ren\nContextPath Value[props].support.root\nContextPath Value[props].support\nContextPath Value[props]\nPath props\nContextPath Value[props]->support\nContextPath Value[props]\nPath props\nCPPipe pathFromEnv:CompletionSupport2.Internal found:false\nPath CompletionSupport2.Internal.support\nContextPath Value[props].support->root\nContextPath Value[props].support\nContextPath Value[props]\nPath props\nContextPath Value[props]->support\nContextPath Value[props]\nPath props\nCPPipe pathFromEnv:CompletionSupport2.Internal found:false\nPath CompletionSupport2.Internal.support\nCPPipe pathFromEnv:CompletionSupport.Nested found:false\nPath CompletionSupport.Nested.root\nCPPipe pathFromEnv:ReactDOM.Client.Root found:false\nPath ReactDOM.Client.Root.ren\n[{\n    \"label\": \"ReactDOM.Client.Root.render\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t, React.element) => unit\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionPipeChain.res 88:16\nposCursor:[88:16] posNoWhite:[88:15] Found expr:[76:15->93:1]\nPexp_apply ...[76:15->76:46] (~prepare77:3->77:10=...[77:11->77:24], ~render78:3->78:9=...[78:10->91:3], ...[92:2->92:4])\nposCursor:[88:16] posNoWhite:[88:15] Found expr:[78:10->91:3]\nposCursor:[88:16] posNoWhite:[88:15] Found expr:[78:10->91:3]\nposCursor:[88:16] posNoWhite:[88:15] Found expr:[79:4->90:14]\nposCursor:[88:16] posNoWhite:[88:15] Found expr:[84:4->90:14]\nposCursor:[88:16] posNoWhite:[88:15] Found expr:[85:4->90:14]\nposCursor:[88:16] posNoWhite:[88:15] Found expr:[88:7->90:14]\nposCursor:[88:16] posNoWhite:[88:15] Found expr:[88:7->88:16]\nCompletable: Cpath Value[root]->ren\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[root]->ren\nContextPath Value[root]\nPath root\nCPPipe pathFromEnv:ReactDOM.Client.Root found:false\nPath ReactDOM.Client.Root.ren\n[{\n    \"label\": \"ReactDOM.Client.Root.render\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t, React.element) => unit\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionPipeChain.res 95:20\nposCursor:[95:20] posNoWhite:[95:19] Found expr:[95:3->95:21]\nPexp_apply ...[95:3->95:14] (...[95:15->0:-1])\nposCursor:[95:20] posNoWhite:[95:19] Found expr:[95:15->0:-1]\nposCursor:[95:20] posNoWhite:[95:19] Found expr:[95:15->0:-1]\nCompletable: Cpath Value[int]->\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[int]->\nContextPath Value[int]\nPath int\nCPPipe pathFromEnv:Integer found:true\nPath Integer.\n[{\n    \"label\": \"Integer.toInt\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"t => int\",\n    \"documentation\": null\n  }, {\n    \"label\": \"Integer.increment\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t, int) => t\",\n    \"documentation\": null\n  }, {\n    \"label\": \"Integer.decrement\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t, int => int) => t\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionPipeChain.res 98:21\nposCursor:[98:21] posNoWhite:[98:20] Found expr:[98:3->98:22]\nPexp_apply ...[98:3->98:14] (...[98:15->98:21])\nposCursor:[98:21] posNoWhite:[98:20] Found expr:[98:15->98:21]\nCompletable: Cpath Value[int]->t\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[int]->t\nContextPath Value[int]\nPath int\nCPPipe pathFromEnv:Integer found:true\nPath Integer.t\n[{\n    \"label\": \"Integer.toInt\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"t => int\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionPipeChain.res 103:8\nposCursor:[103:8] posNoWhite:[103:7] Found expr:[103:3->103:8]\nCompletable: Cpath Value[r]->la\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[r]->la\nContextPath Value[r]\nPath r\nCPPipe pathFromEnv:Js.Re found:false\nPath Js.Re.la\n[{\n    \"label\": \"Js.Re.lastIndex\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"t => int\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\nReturns the index where the next match will start its search. This property\\nwill be modified when the RegExp object is used, if the global (\\\"g\\\") flag is\\nset.\\n\\n## Examples\\n\\n```rescript\\nlet re = %re(\\\"/ab*TODO/g\\\")\\nlet str = \\\"abbcdefabh\\\"\\n\\nlet break = ref(false)\\nwhile !break.contents {\\n  switch Js.Re.exec_(re, str) {\\n  | Some(result) => Js.Nullable.iter(Js.Re.captures(result)[0], (. match_) => {\\n      let next = Belt.Int.toString(Js.Re.lastIndex(re))\\n      Js.log(\\\"Found \\\" ++ (match_ ++ (\\\". Next match starts at \\\" ++ next)))\\n    })\\n  | None => break := true\\n  }\\n}\\n```\\n\\nSee\\n[`RegExp: lastIndex`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/RegExp/lastIndex)\\non MDN.\\n\"}\n  }]\n\nComplete src/CompletionPipeChain.res 112:7\nposCursor:[112:7] posNoWhite:[112:6] Found expr:[112:3->0:-1]\nCompletable: Cpath Value[xx]->\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[xx]->\nContextPath Value[xx]\nPath xx\nCPPipe pathFromEnv:Xyz found:true\nPath Xyz.\n[{\n    \"label\": \"Xyz.do\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"xx => string\",\n    \"documentation\": null\n  }]\n\n"
  },
  {
    "path": "analysis/tests/src/expected/CompletionPipeProperty.res.txt",
    "content": "Complete src/CompletionPipeProperty.res 21:17\nposCursor:[21:17] posNoWhite:[21:16] Found expr:[21:3->21:17]\nPexp_field [21:3->21:16] _:[23:0->21:17]\nCompletable: Cpath Value[sprite].anchor.\"\"\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[sprite].anchor.\"\"\nContextPath Value[sprite].anchor\nContextPath Value[sprite]\nPath sprite\nContextPath Value[sprite]->anchor\nContextPath Value[sprite]\nPath sprite\nCPPipe pathFromEnv:Sprite found:true\nPath Sprite.anchor\nContextPath Value[sprite].anchor->\nContextPath Value[sprite].anchor\nContextPath Value[sprite]\nPath sprite\nContextPath Value[sprite]->anchor\nContextPath Value[sprite]\nPath sprite\nCPPipe pathFromEnv:Sprite found:true\nPath Sprite.anchor\nCPPipe pathFromEnv:ObservablePoint found:true\nPath ObservablePoint.\n[{\n    \"label\": \"x\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nx: int\\n```\\n\\n```rescript\\ntype op = {mutable x: int, mutable y: int}\\n```\"}\n  }, {\n    \"label\": \"y\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ny: int\\n```\\n\\n```rescript\\ntype op = {mutable x: int, mutable y: int}\\n```\"}\n  }, {\n    \"label\": \"->ObservablePoint.setBoth\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(op, float) => unit\",\n    \"documentation\": null,\n    \"sortText\": \"setBoth\",\n    \"insertText\": \"->ObservablePoint.setBoth\",\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 21, \"character\": 16}, \"end\": {\"line\": 21, \"character\": 17}},\n      \"newText\": \"\"\n      }]\n  }, {\n    \"label\": \"->ObservablePoint.set\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(op, float, float) => unit\",\n    \"documentation\": null,\n    \"sortText\": \"set\",\n    \"insertText\": \"->ObservablePoint.set\",\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 21, \"character\": 16}, \"end\": {\"line\": 21, \"character\": 17}},\n      \"newText\": \"\"\n      }]\n  }]\n\n"
  },
  {
    "path": "analysis/tests/src/expected/CompletionPipeSubmodules.res.txt",
    "content": "Complete src/CompletionPipeSubmodules.res 13:20\nposCursor:[13:20] posNoWhite:[13:19] Found expr:[13:11->21:8]\nCompletable: Cpath Value[A, B1, xx]->\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[A, B1, xx]->\nContextPath Value[A, B1, xx]\nPath A.B1.xx\nCPPipe pathFromEnv:A.B1 found:true\nPath A.B1.\n[{\n    \"label\": \"A.B1.d\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"b1 => string\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionPipeSubmodules.res 17:18\nposCursor:[17:18] posNoWhite:[17:17] Found expr:[17:11->21:8]\nCompletable: Cpath Value[A, x].v->\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[A, x].v->\nContextPath Value[A, x].v\nContextPath Value[A, x]\nPath A.x\nContextPath Value[A, x]->v\nContextPath Value[A, x]\nPath A.x\nCPPipe pathFromEnv:A found:true\nPath A.v\nCPPipe pathFromEnv:A.B1 found:true\nPath A.B1.\n[{\n    \"label\": \"A.B1.d\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"b1 => string\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionPipeSubmodules.res 41:20\nposCursor:[41:20] posNoWhite:[41:19] Found expr:[41:11->0:-1]\nCompletable: Cpath Value[E, e].v.v->\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[E, e].v.v->\nContextPath Value[E, e].v.v\nContextPath Value[E, e].v\nContextPath Value[E, e]\nPath E.e\nContextPath Value[E, e]->v\nContextPath Value[E, e]\nPath E.e\nCPPipe pathFromEnv:E found:true\nPath E.v\nContextPath Value[E, e].v->v\nContextPath Value[E, e].v\nContextPath Value[E, e]\nPath E.e\nContextPath Value[E, e]->v\nContextPath Value[E, e]\nPath E.e\nCPPipe pathFromEnv:E found:true\nPath E.v\nCPPipe pathFromEnv:D found:true\nPath D.v\nCPPipe pathFromEnv:C found:false\nPath C.\n[{\n    \"label\": \"C.do\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"t => string\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionPipeSubmodules.res 45:21\nposCursor:[45:21] posNoWhite:[45:20] Found expr:[45:11->0:-1]\nCompletable: Cpath Value[E, e].v.v2->\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[E, e].v.v2->\nContextPath Value[E, e].v.v2\nContextPath Value[E, e].v\nContextPath Value[E, e]\nPath E.e\nContextPath Value[E, e]->v\nContextPath Value[E, e]\nPath E.e\nCPPipe pathFromEnv:E found:true\nPath E.v\nContextPath Value[E, e].v->v2\nContextPath Value[E, e].v\nContextPath Value[E, e]\nPath E.e\nContextPath Value[E, e]->v\nContextPath Value[E, e]\nPath E.e\nCPPipe pathFromEnv:E found:true\nPath E.v\nCPPipe pathFromEnv:D found:true\nPath D.v2\nCPPipe pathFromEnv:D.C2 found:true\nPath D.C2.\n[{\n    \"label\": \"D.C2.do\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"t2 => string\",\n    \"documentation\": null\n  }]\n\n"
  },
  {
    "path": "analysis/tests/src/expected/CompletionResolve.res.txt",
    "content": "Completion resolve: Belt_Array\n\"\\nUtilities for `Array` functions.\\n\\n### Note about index syntax\\n\\nCode like `arr[0]` does *not* compile to JavaScript `arr[0]`. Reason transforms\\nthe `[]` index syntax into a function: `Array.get(arr, 0)`. By default, this\\nuses the default standard library's `Array.get` function, which may raise an\\nexception if the index isn't found. If you `open Belt`, it will use the\\n`Belt.Array.get` function which returns options instead of raising exceptions. \\n[See this for more information](../belt.mdx#array-access-runtime-safety).\\n\"\n\nCompletion resolve: ModuleStuff\n\" This is a top level module doc. \"\n\n"
  },
  {
    "path": "analysis/tests/src/expected/CompletionSupport.res.txt",
    "content": ""
  },
  {
    "path": "analysis/tests/src/expected/CompletionSupport2.res.txt",
    "content": ""
  },
  {
    "path": "analysis/tests/src/expected/CompletionTypeAnnotation.res.txt",
    "content": "Complete src/CompletionTypeAnnotation.res 9:22\nXXX Not found!\nCompletable: Cexpression Type[someRecord]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Type[someRecord]\nPath someRecord\n[{\n    \"label\": \"{}\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"someRecord\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ntype someRecord = {age: int, name: string}\\n```\"},\n    \"sortText\": \"A\",\n    \"insertText\": \"{$0}\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionTypeAnnotation.res 12:24\nXXX Not found!\nCompletable: Cexpression Type[someRecord]->recordBody\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Type[someRecord]\nPath someRecord\n[{\n    \"label\": \"age\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nage: int\\n```\\n\\n```rescript\\ntype someRecord = {age: int, name: string}\\n```\"}\n  }, {\n    \"label\": \"name\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nname: string\\n```\\n\\n```rescript\\ntype someRecord = {age: int, name: string}\\n```\"}\n  }]\n\nComplete src/CompletionTypeAnnotation.res 15:23\nXXX Not found!\nCompletable: Cexpression Type[someVariant]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Type[someVariant]\nPath someVariant\n[{\n    \"label\": \"One\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"One\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nOne\\n```\\n\\n```rescript\\ntype someVariant = One | Two(bool)\\n```\"},\n    \"insertText\": \"One\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Two(_)\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"Two(bool)\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nTwo(bool)\\n```\\n\\n```rescript\\ntype someVariant = One | Two(bool)\\n```\"},\n    \"insertText\": \"Two($0)\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionTypeAnnotation.res 18:25\nXXX Not found!\nCompletable: Cexpression Type[someVariant]=O\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Type[someVariant]\nPath someVariant\n[{\n    \"label\": \"One\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"One\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nOne\\n```\\n\\n```rescript\\ntype someVariant = One | Two(bool)\\n```\"},\n    \"sortText\": \"A One\",\n    \"insertText\": \"One\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Obj\",\n    \"kind\": 9,\n    \"tags\": [],\n    \"detail\": \"module Obj\",\n    \"documentation\": null,\n    \"data\": {\n      \"modulePath\": \"Obj\",\n      \"filePath\": \"src/CompletionTypeAnnotation.res\"\n    }\n  }, {\n    \"label\": \"Objects\",\n    \"kind\": 9,\n    \"tags\": [],\n    \"detail\": \"module Objects\",\n    \"documentation\": null,\n    \"data\": {\n      \"modulePath\": \"Objects\",\n      \"filePath\": \"src/CompletionTypeAnnotation.res\"\n    }\n  }]\n\nComplete src/CompletionTypeAnnotation.res 21:27\nXXX Not found!\nCompletable: Cexpression Type[somePolyVariant]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Type[somePolyVariant]\nPath somePolyVariant\n[{\n    \"label\": \"#one\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"#one\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\n#one\\n```\\n\\n```rescript\\n[#one | #two(bool)]\\n```\"},\n    \"insertText\": \"#one\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"#two(_)\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"#two(bool)\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\n#two(bool)\\n```\\n\\n```rescript\\n[#one | #two(bool)]\\n```\"},\n    \"insertText\": \"#two($0)\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionTypeAnnotation.res 24:30\nXXX Not found!\nCompletable: Cexpression Type[somePolyVariant]=#o\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Type[somePolyVariant]\nPath somePolyVariant\n[{\n    \"label\": \"#one\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"#one\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\n#one\\n```\\n\\n```rescript\\n[#one | #two(bool)]\\n```\"},\n    \"insertText\": \"one\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionTypeAnnotation.res 29:20\nXXX Not found!\nCompletable: Cexpression Type[someFunc]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Type[someFunc]\nPath someFunc\n[{\n    \"label\": \"(v1, v2) => {}\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(int, string) => bool\",\n    \"documentation\": null,\n    \"sortText\": \"A\",\n    \"insertText\": \"(${1:v1}, ${2:v2}) => {${0:()}}\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionTypeAnnotation.res 34:21\nXXX Not found!\nCompletable: Cexpression Type[someTuple]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Type[someTuple]\nPath someTuple\n[{\n    \"label\": \"(_, _)\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(bool, option<bool>)\",\n    \"documentation\": null,\n    \"insertText\": \"(${1:_}, ${2:_})\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionTypeAnnotation.res 37:28\nXXX Not found!\nCompletable: Cexpression Type[someTuple]->tuple($1)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Type[someTuple]\nPath someTuple\n[{\n    \"label\": \"None\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }, {\n    \"label\": \"Some(_)\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null,\n    \"insertText\": \"Some($0)\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Some(true)\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }, {\n    \"label\": \"Some(false)\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }]\n\nComplete src/CompletionTypeAnnotation.res 40:31\nXXX Not found!\nCompletable: Cexpression option<Type[someVariant]>\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath option<Type[someVariant]>\nContextPath Type[someVariant]\nPath someVariant\n[{\n    \"label\": \"None\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"someVariant\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ntype someVariant = One | Two(bool)\\n```\"}\n  }, {\n    \"label\": \"Some(_)\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"someVariant\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ntype someVariant = One | Two(bool)\\n```\"},\n    \"insertText\": \"Some($0)\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Some(One)\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"One\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nOne\\n```\\n\\n```rescript\\ntype someVariant = One | Two(bool)\\n```\"},\n    \"insertText\": \"Some(One)\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Some(Two(_))\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"Two(bool)\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nTwo(bool)\\n```\\n\\n```rescript\\ntype someVariant = One | Two(bool)\\n```\"},\n    \"insertText\": \"Some(Two($0))\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionTypeAnnotation.res 43:37\nXXX Not found!\nCompletable: Cexpression option<Type[someVariant]>->variantPayload::Some($0)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath option<Type[someVariant]>\nContextPath Type[someVariant]\nPath someVariant\n[{\n    \"label\": \"One\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"One\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nOne\\n```\\n\\n```rescript\\ntype someVariant = One | Two(bool)\\n```\"},\n    \"insertText\": \"One\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Two(_)\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"Two(bool)\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nTwo(bool)\\n```\\n\\n```rescript\\ntype someVariant = One | Two(bool)\\n```\"},\n    \"insertText\": \"Two($0)\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionTypeAnnotation.res 46:30\nXXX Not found!\nCompletable: Cexpression array<Type[someVariant]>\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath array<Type[someVariant]>\nContextPath Type[someVariant]\nPath someVariant\n[{\n    \"label\": \"[]\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"someVariant\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ntype someVariant = One | Two(bool)\\n```\"},\n    \"sortText\": \"A\",\n    \"insertText\": \"[$0]\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionTypeAnnotation.res 49:32\nXXX Not found!\nCompletable: Cexpression array<Type[someVariant]>->array\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath array<Type[someVariant]>\nContextPath Type[someVariant]\nPath someVariant\n[{\n    \"label\": \"One\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"One\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nOne\\n```\\n\\n```rescript\\ntype someVariant = One | Two(bool)\\n```\"},\n    \"insertText\": \"One\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Two(_)\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"Two(bool)\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nTwo(bool)\\n```\\n\\n```rescript\\ntype someVariant = One | Two(bool)\\n```\"},\n    \"insertText\": \"Two($0)\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionTypeAnnotation.res 52:38\nXXX Not found!\nCompletable: Cexpression array<option<Type[someVariant]>>\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath array<option<Type[someVariant]>>\nContextPath option<Type[someVariant]>\nContextPath Type[someVariant]\nPath someVariant\n[{\n    \"label\": \"[]\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"option<someVariant>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\noption<someVariant>\\n```\"},\n    \"sortText\": \"A\",\n    \"insertText\": \"[$0]\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionTypeAnnotation.res 55:45\nXXX Not found!\nCompletable: Cexpression option<array<Type[someVariant]>>->variantPayload::Some($0), array\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath option<array<Type[someVariant]>>\nContextPath array<Type[someVariant]>\nContextPath Type[someVariant]\nPath someVariant\n[{\n    \"label\": \"One\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"One\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nOne\\n```\\n\\n```rescript\\ntype someVariant = One | Two(bool)\\n```\"},\n    \"insertText\": \"One\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Two(_)\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"Two(bool)\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nTwo(bool)\\n```\\n\\n```rescript\\ntype someVariant = One | Two(bool)\\n```\"},\n    \"insertText\": \"Two($0)\",\n    \"insertTextFormat\": 2\n  }]\n\n"
  },
  {
    "path": "analysis/tests/src/expected/CompletionTypeT.res.txt",
    "content": "Complete src/CompletionTypeT.res 4:26\nXXX Not found!\nCompletable: Cpattern Value[date]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[date]\nPath date\n[{\n    \"label\": \"None\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"Js.Date.t\",\n    \"documentation\": null\n  }, {\n    \"label\": \"Some(_)\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"Js.Date.t\",\n    \"documentation\": null,\n    \"insertText\": \"Some(${1:_})\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Some(date)\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"date\",\n    \"documentation\": null,\n    \"insertText\": \"Some(${0:date})\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/CompletionTypeT.res 7:27\nXXX Not found!\nCompletable: Cexpression Type[withDate]->recordField(date)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Type[withDate]\nPath withDate\n[{\n    \"label\": \"Js.Date.makeWithYMD()\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(~year: float, ~month: float, ~date: float, unit) => t\",\n    \"documentation\": null,\n    \"insertText\": \"Js.Date.makeWithYMD($0)\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Js.Date.makeWithYMDHM()\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(\\n  ~year: float,\\n  ~month: float,\\n  ~date: float,\\n  ~hours: float,\\n  ~minutes: float,\\n  unit,\\n) => t\",\n    \"documentation\": null,\n    \"insertText\": \"Js.Date.makeWithYMDHM($0)\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Js.Date.make()\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"unit => t\",\n    \"documentation\": null,\n    \"insertText\": \"Js.Date.make($0)\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Js.Date.fromString()\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"string => t\",\n    \"documentation\": null,\n    \"insertText\": \"Js.Date.fromString($0)\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Js.Date.fromFloat()\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"float => t\",\n    \"documentation\": null,\n    \"insertText\": \"Js.Date.fromFloat($0)\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Js.Date.parse()\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"string => t\",\n    \"documentation\": null,\n    \"insertText\": \"Js.Date.parse($0)\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Js.Date.makeWithYM()\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(~year: float, ~month: float, unit) => t\",\n    \"documentation\": null,\n    \"insertText\": \"Js.Date.makeWithYM($0)\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Js.Date.makeWithYMDHMS()\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(\\n  ~year: float,\\n  ~month: float,\\n  ~date: float,\\n  ~hours: float,\\n  ~minutes: float,\\n  ~seconds: float,\\n  unit,\\n) => t\",\n    \"documentation\": null,\n    \"insertText\": \"Js.Date.makeWithYMDHMS($0)\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Js.Date.makeWithYMDH()\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(\\n  ~year: float,\\n  ~month: float,\\n  ~date: float,\\n  ~hours: float,\\n  unit,\\n) => t\",\n    \"documentation\": null,\n    \"insertText\": \"Js.Date.makeWithYMDH($0)\",\n    \"insertTextFormat\": 2\n  }]\n\n"
  },
  {
    "path": "analysis/tests/src/expected/Component.res.txt",
    "content": ""
  },
  {
    "path": "analysis/tests/src/expected/Component.resi.txt",
    "content": ""
  },
  {
    "path": "analysis/tests/src/expected/CreateInterface.res.txt",
    "content": "Create Interface src/CreateInterface.res\ntype r = {name: string, age: int}\nlet add: (~x: int, ~y: int) => int\n@react.component\nlet make: (~name: string) => React.element\nmodule Other: {\n  @react.component\n  let otherComponentName: (~name: string) => React.element\n}\nmodule Mod: {\n  @react.component\n  let make: (~name: string) => React.element\n}\nmodule type ModTyp = {\n  @react.component\n  let make: (~name: string) => React.element\n}\n@module(\"path\") external dirname: string => string = \"dirname\"\n@module(\"path\") @variadic\nexternal join: array<string> => string = \"join\"\n@val\nexternal padLeft: (\n  string,\n  @unwrap\n  [\n    | #Str(string)\n    | #Int(int)\n  ],\n) => string = \"padLeft\"\n@inline\nlet f1: int\n@inline let f2: string\n@genType @inline\nlet f3: int\n@genType @inline\nlet f4: string\n@genType @inline let f5: float\nmodule RFS: {\n  @module(\"fs\")\n  external readFileSync: (\n    ~name: string,\n    @string\n    [\n      | #utf8\n      | @as(\"ascii\") #useAscii\n    ],\n  ) => string = \"readFileSync\"\n}\nmodule Functor: () =>\n{\n  @react.component\n  let make: unit => React.element\n}\nmodule type FT = {\n  module Functor: (\n    X: {\n      let a: int\n      @react.component\n      let make: (~name: string) => React.element\n      let b: int\n    },\n    Y: ModTyp,\n  ) =>\n  {\n    @react.component\n    let make: (~name: string) => React.element\n  }\n}\nmodule NormaList = List\nmodule BeltList = Belt.List\nmodule type MT2 = ModTyp\nmodule rec RM: ModTyp\nand D: ModTyp\nmodule type OptT = {\n  @react.component\n  let withOpt1: (~x: int=?, ~y: int) => int\n  module type Opt2 = {\n    @react.component\n    let withOpt2: (~x: int=?, ~y: int) => int\n  }\n  module type Opt3 = {\n    @react.component\n    let withOpt3: (~x: option<int>, ~y: int) => int\n  }\n}\nmodule Opt: {\n  @react.component\n  let withOpt1: (~x: int=?, ~y: int) => int\n  module Opt2: {\n    @react.component\n    let withOpt2: (~x: int=?, ~y: int) => int\n  }\n  module type Opt2 = {\n    @react.component\n    let withOpt2: (~x: int=?, ~y: int) => int\n  }\n  module Opt3: {\n    @react.component\n    let withOpt3: (~x: option<int>, ~y: int) => int\n  }\n  module type Opt3 = {\n    @react.component\n    let withOpt3: (~x: option<int>, ~y: int) => int\n  }\n}\nmodule Opt2: OptT\nmodule Opt3 = Opt\nmodule Memo: {\n  @react.component\n  let make: (~name: string) => React.element\n}\n\n"
  },
  {
    "path": "analysis/tests/src/expected/Cross.res.txt",
    "content": "References src/Cross.res 0:17\n[\n{\"uri\": \"Cross.res\", \"range\": {\"start\": {\"line\": 0, \"character\": 15}, \"end\": {\"line\": 0, \"character\": 25}}},\n{\"uri\": \"Cross.res\", \"range\": {\"start\": {\"line\": 3, \"character\": 16}, \"end\": {\"line\": 3, \"character\": 26}}},\n{\"uri\": \"Cross.res\", \"range\": {\"start\": {\"line\": 5, \"character\": 13}, \"end\": {\"line\": 5, \"character\": 23}}},\n{\"uri\": \"Cross.res\", \"range\": {\"start\": {\"line\": 7, \"character\": 16}, \"end\": {\"line\": 7, \"character\": 26}}},\n{\"uri\": \"References.res\", \"range\": {\"start\": {\"line\": 0, \"character\": 0}, \"end\": {\"line\": 0, \"character\": 0}}}\n]\n\nReferences src/Cross.res 9:31\n[\n{\"uri\": \"Cross.res\", \"range\": {\"start\": {\"line\": 9, \"character\": 28}, \"end\": {\"line\": 9, \"character\": 51}}},\n{\"uri\": \"Cross.res\", \"range\": {\"start\": {\"line\": 12, \"character\": 29}, \"end\": {\"line\": 12, \"character\": 52}}},\n{\"uri\": \"Cross.res\", \"range\": {\"start\": {\"line\": 14, \"character\": 26}, \"end\": {\"line\": 14, \"character\": 49}}},\n{\"uri\": \"Cross.res\", \"range\": {\"start\": {\"line\": 16, \"character\": 29}, \"end\": {\"line\": 16, \"character\": 52}}},\n{\"uri\": \"ReferencesWithInterface.res\", \"range\": {\"start\": {\"line\": 0, \"character\": 0}, \"end\": {\"line\": 0, \"character\": 0}}},\n{\"uri\": \"ReferencesWithInterface.resi\", \"range\": {\"start\": {\"line\": 0, \"character\": 0}, \"end\": {\"line\": 0, \"character\": 0}}}\n]\n\nRename src/Cross.res 18:13 RenameWithInterfacePrime\n[\n{\n  \"kind\": \"rename\",\n  \"oldUri\": \"RenameWithInterface.resi\",\n  \"newUri\": \"RenameWithInterfacePrime.resi\"\n},\n{\n  \"kind\": \"rename\",\n  \"oldUri\": \"RenameWithInterface.res\",\n  \"newUri\": \"RenameWithInterfacePrime.res\"\n},\n{\n  \"textDocument\": {\n  \"version\": null,\n  \"uri\": \"Cross.res\"\n  },\n  \"edits\": [{\n      \"range\": {\"start\": {\"line\": 18, \"character\": 8}, \"end\": {\"line\": 18, \"character\": 27}},\n      \"newText\": \"RenameWithInterfacePrime\"\n      }, {\n      \"range\": {\"start\": {\"line\": 21, \"character\": 8}, \"end\": {\"line\": 21, \"character\": 27}},\n      \"newText\": \"RenameWithInterfacePrime\"\n      }]\n  }\n]\n\nRename src/Cross.res 21:28 xPrime\n[\n{\n  \"textDocument\": {\n  \"version\": null,\n  \"uri\": \"RenameWithInterface.resi\"\n  },\n  \"edits\": [{\n      \"range\": {\"start\": {\"line\": 0, \"character\": 4}, \"end\": {\"line\": 0, \"character\": 5}},\n      \"newText\": \"xPrime\"\n      }]\n  },\n{\n  \"textDocument\": {\n  \"version\": null,\n  \"uri\": \"RenameWithInterface.res\"\n  },\n  \"edits\": [{\n      \"range\": {\"start\": {\"line\": 0, \"character\": 4}, \"end\": {\"line\": 0, \"character\": 5}},\n      \"newText\": \"xPrime\"\n      }]\n  },\n{\n  \"textDocument\": {\n  \"version\": null,\n  \"uri\": \"Cross.res\"\n  },\n  \"edits\": [{\n      \"range\": {\"start\": {\"line\": 18, \"character\": 28}, \"end\": {\"line\": 18, \"character\": 29}},\n      \"newText\": \"xPrime\"\n      }, {\n      \"range\": {\"start\": {\"line\": 21, \"character\": 28}, \"end\": {\"line\": 21, \"character\": 29}},\n      \"newText\": \"xPrime\"\n      }]\n  }\n]\n\nTypeDefinition src/Cross.res 24:5\n{\"uri\": \"TypeDefinition.res\", \"range\": {\"start\": {\"line\": 2, \"character\": 0}, \"end\": {\"line\": 2, \"character\": 28}}}\n\nDefinition src/Cross.res 27:32\n{\"uri\": \"DefinitionWithInterface.res\", \"range\": {\"start\": {\"line\": 0, \"character\": 4}, \"end\": {\"line\": 0, \"character\": 5}}}\n\nDefinition src/Cross.res 30:36\n{\"uri\": \"DefinitionWithInterface.res\", \"range\": {\"start\": {\"line\": 3, \"character\": 5}, \"end\": {\"line\": 3, \"character\": 6}}}\n\nTypeDefinition src/Cross.res 33:37\n{\"uri\": \"DefinitionWithInterface.resi\", \"range\": {\"start\": {\"line\": 3, \"character\": 0}, \"end\": {\"line\": 3, \"character\": 6}}}\n\nComplete src/Cross.res 36:28\nposCursor:[36:28] posNoWhite:[36:27] Found expr:[36:3->36:28]\nPexp_ident DefinitionWithInterface.a:[36:3->36:28]\nCompletable: Cpath Value[DefinitionWithInterface, a]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[DefinitionWithInterface, a]\nPath DefinitionWithInterface.a\n[]\n\nDefinition src/Cross.res 39:39\n{\"uri\": \"DefinitionWithInterface.res\", \"range\": {\"start\": {\"line\": 9, \"character\": 6}, \"end\": {\"line\": 9, \"character\": 7}}}\n\n"
  },
  {
    "path": "analysis/tests/src/expected/Dce.res.txt",
    "content": "DCE src/Dce.res\nissues:1\n\n"
  },
  {
    "path": "analysis/tests/src/expected/Debug.res.txt",
    "content": "Definition src/Debug.res 2:27\n{\"uri\": \"ShadowedBelt.res\", \"range\": {\"start\": {\"line\": 1, \"character\": 6}, \"end\": {\"line\": 1, \"character\": 9}}}\n\nComplete src/Debug.res 11:8\nposCursor:[11:8] posNoWhite:[11:7] Found expr:[11:5->11:8]\nPexp_ident eqN:[11:5->11:8]\nCompletable: Cpath Value[eqN]\nRaw opens: 1 Js.place holder\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 2 pervasives js.ml\nContextPath Value[eqN]\nPath eqN\n[{\n    \"label\": \"eqNullable\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"('a, nullable<'a>) => bool\",\n    \"documentation\": null\n  }, {\n    \"label\": \"eqNull\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"('a, null<'a>) => bool\",\n    \"documentation\": null\n  }]\n\n\n"
  },
  {
    "path": "analysis/tests/src/expected/Definition.res.txt",
    "content": "Definition src/Definition.res 2:8\n{\"uri\": \"Definition.res\", \"range\": {\"start\": {\"line\": 0, \"character\": 4}, \"end\": {\"line\": 0, \"character\": 6}}}\n\nDefinition src/Definition.res 10:23\n{\"uri\": \"Definition.res\", \"range\": {\"start\": {\"line\": 6, \"character\": 7}, \"end\": {\"line\": 6, \"character\": 13}}}\n\nHover src/Definition.res 14:14\n{\"contents\": {\"kind\": \"markdown\", \"value\": \"```rescript\\n('a => 'b, list<'a>) => list<'b>\\n```\\n---\\n [List.map f [a1; ...; an]] applies function [f] to [a1, ..., an],\\n   and builds the list [[f a1; ...; f an]]\\n   with the results returned by [f].  Not tail-recursive. \"}}\n\nHover src/Definition.res 18:14\n{\"contents\": {\"kind\": \"markdown\", \"value\": \"```rescript\\n(list<'a>, 'a => 'b) => list<'b>\\n```\"}}\n\nHover src/Definition.res 23:3\n{\"contents\": {\"kind\": \"markdown\", \"value\": \"```rescript\\n(int, int) => int\\n```\"}}\n\nDefinition src/Definition.res 26:3\n{\"uri\": \"Definition.res\", \"range\": {\"start\": {\"line\": 21, \"character\": 4}, \"end\": {\"line\": 21, \"character\": 13}}}\n\n"
  },
  {
    "path": "analysis/tests/src/expected/DefinitionWithInterface.res.txt",
    "content": "Definition src/DefinitionWithInterface.res 0:4\n{\"uri\": \"DefinitionWithInterface.resi\", \"range\": {\"start\": {\"line\": 0, \"character\": 4}, \"end\": {\"line\": 0, \"character\": 5}}}\n\nDefinition src/DefinitionWithInterface.res 9:6\n{\"uri\": \"DefinitionWithInterface.resi\", \"range\": {\"start\": {\"line\": 6, \"character\": 2}, \"end\": {\"line\": 6, \"character\": 12}}}\n\n"
  },
  {
    "path": "analysis/tests/src/expected/DefinitionWithInterface.resi.txt",
    "content": "Definition src/DefinitionWithInterface.resi 0:4\n{\"uri\": \"DefinitionWithInterface.res\", \"range\": {\"start\": {\"line\": 0, \"character\": 4}, \"end\": {\"line\": 0, \"character\": 5}}}\n\nDefinition src/DefinitionWithInterface.resi 6:6\n{\"uri\": \"DefinitionWithInterface.res\", \"range\": {\"start\": {\"line\": 9, \"character\": 6}, \"end\": {\"line\": 9, \"character\": 7}}}\n\n"
  },
  {
    "path": "analysis/tests/src/expected/Destructuring.res.txt",
    "content": "Complete src/Destructuring.res 4:11\nposCursor:[4:11] posNoWhite:[4:9] Found pattern:[4:4->4:12]\nCompletable: Cpattern Value[x]->recordBody\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[x]\nPath x\n[{\n    \"label\": \"age\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nage: int\\n```\\n\\n```rescript\\ntype x = {name: string, age: int}\\n```\"}\n  }]\n\nComplete src/Destructuring.res 7:8\nposCursor:[7:8] posNoWhite:[7:7] Found pattern:[7:7->7:9]\nCompletable: Cpattern Value[x]->recordBody\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[x]\nPath x\n[{\n    \"label\": \"name\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nname: string\\n```\\n\\n```rescript\\ntype x = {name: string, age: int}\\n```\"}\n  }, {\n    \"label\": \"age\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nage: int\\n```\\n\\n```rescript\\ntype x = {name: string, age: int}\\n```\"}\n  }]\n\nComplete src/Destructuring.res 11:13\nposCursor:[11:13] posNoWhite:[11:11] Found expr:[10:8->14:1]\nposCursor:[11:13] posNoWhite:[11:11] Found expr:[10:9->14:1]\nposCursor:[11:13] posNoWhite:[11:11] Found expr:[11:2->13:6]\nposCursor:[11:13] posNoWhite:[11:11] Found pattern:[11:6->11:14]\nCompletable: Cpattern Value[x]->recordBody\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[x]\nPath x\n[{\n    \"label\": \"age\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nage: int\\n```\\n\\n```rescript\\ntype x = {name: string, age: int}\\n```\"}\n  }]\n\nComplete src/Destructuring.res 17:10\nposCursor:[17:10] posNoWhite:[17:9] Found expr:[16:9->20:1]\nposCursor:[17:10] posNoWhite:[17:9] Found expr:[16:10->20:1]\nposCursor:[17:10] posNoWhite:[17:9] Found expr:[17:5->19:11]\nposCursor:[17:10] posNoWhite:[17:9] Found pattern:[17:9->17:11]\nCompletable: Cpattern Value[x]->recordBody\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[x]\nPath x\n[{\n    \"label\": \"name\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nname: string\\n```\\n\\n```rescript\\ntype x = {name: string, age: int}\\n```\"}\n  }, {\n    \"label\": \"age\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nage: int\\n```\\n\\n```rescript\\ntype x = {name: string, age: int}\\n```\"}\n  }]\n\nComplete src/Destructuring.res 31:8\nposCursor:[31:8] posNoWhite:[31:7] Found pattern:[31:7->31:9]\nCompletable: Cpattern Value[x]->recordBody\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[x]\nPath x\n[{\n    \"label\": \"someField\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nsomeField: int\\n```\\n\\n```rescript\\ntype recordWithOptField = {someField: int, someOptField: option<bool>}\\n```\"}\n  }, {\n    \"label\": \"?someOptField\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"option<bool>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"someOptField is an optional field, and needs to be destructured using '?'.\\n\\n```rescript\\n?someOptField: option<bool>\\n```\\n\\n```rescript\\ntype recordWithOptField = {someField: int, someOptField: option<bool>}\\n```\"}\n  }]\n\n"
  },
  {
    "path": "analysis/tests/src/expected/Div.res.txt",
    "content": "Hover src/Div.res 0:10\ngetLocItem #3: heuristic for <div>\n{\"contents\": {\"kind\": \"markdown\", \"value\": \"```rescript\\n(\\n  string,\\n  ~props: ReactDOM_V3.domProps=?,\\n  array<React.element>,\\n) => React.element\\n```\\n\\n---\\n\\n```\\n \\n```\\n```rescript\\ntype ReactDOM_V3.domProps = Props.domProps\\n```\\nGo to: [Type definition](command:rescript-vscode.go_to_location?%5B%22ReactDOM_V3.res%22%2C57%2C2%5D)\\n\\n\\n---\\n\\n```\\n \\n```\\n```rescript\\ntype React.element = Jsx.element\\n```\\nGo to: [Type definition](command:rescript-vscode.go_to_location?%5B%22React.res%22%2C0%2C0%5D)\\n\"}}\n\nComplete src/Div.res 3:17\nposCursor:[3:17] posNoWhite:[3:16] Found expr:[3:4->3:17]\nJSX <div:[3:4->3:7] dangerous[3:8->3:17]=...[3:8->3:17]> _children:None\nCompletable: Cjsx([div], dangerous, [dangerous])\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nPath ReactDOM.domProps\nPath PervasivesU.JsxDOM.domProps\n[{\n    \"label\": \"dangerouslySetInnerHTML\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"{\\\"__html\\\": string}\",\n    \"documentation\": null\n  }]\n\n"
  },
  {
    "path": "analysis/tests/src/expected/DocComments.res.txt",
    "content": "Hover src/DocComments.res 9:9\n{\"contents\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nint\\n```\\n---\\n  Doc comment with a triple-backquote example\\\\n  \\\\n  ```res example\\\\n    let a = 10\\\\n    /*\\\\n     * stuff\\\\n     */\\\\n  ```\\\\n\"}}\n\nHover src/DocComments.res 22:6\n{\"contents\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nint\\n```\\n---\\n\\n  Doc comment with a triple-backquote example\\n  \\n  ```res example\\n    let a = 10\\n    /*\\n     * stuff\\n     */\\n  ```\\n\"}}\n\nHover src/DocComments.res 33:9\n{\"contents\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nint\\n```\\n---\\n  Doc comment with a triple-backquote example\\\\n  \\\\n  ```res example\\\\n    let a = 10\\\\n    let b = 20\\\\n  ```\\\\n\"}}\n\nHover src/DocComments.res 44:6\n{\"contents\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nint\\n```\\n---\\n\\n  Doc comment with a triple-backquote example\\n  \\n  ```res example\\n    let a = 10\\n    let b = 20\\n  ```\\n\"}}\n\nHover src/DocComments.res 48:5\n{\"contents\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nint\\n```\\n---\\nNew doc comment format\"}}\n\n"
  },
  {
    "path": "analysis/tests/src/expected/DocExtraction2.res.txt",
    "content": "Documentation extraction src/DocExtraction2.res\nextracting docs for src/DocExtraction2.res\npreferring found resi file for impl: src/DocExtraction2.resi\n\n{\n  \"name\": \"DocExtraction2\",\n  \"docstrings\": [\"Module level doc here.\"],\n  \"items\": [\n  {\n    \"id\": \"DocExtraction2.t\",\n    \"kind\": \"type\",\n    \"name\": \"t\",\n    \"signature\": \"type t\",\n    \"docstrings\": [\"Type t is pretty cool.\"]\n  }, \n  {\n    \"id\": \"DocExtraction2.make\",\n    \"kind\": \"value\",\n    \"name\": \"make\",\n    \"signature\": \"let make: unit => t\",\n    \"docstrings\": [\"Makerz of stuffz.\"]\n  }, \n  {\n    \"id\": \"DocExtraction2.InnerModule\",\n    \"name\": \"InnerModule\",\n    \"kind\": \"module\",\n    \"docstrings\": [],\n    \"items\": [\n    {\n      \"id\": \"DocExtraction2.InnerModule.t\",\n      \"kind\": \"type\",\n      \"name\": \"t\",\n      \"signature\": \"type t\",\n      \"docstrings\": [\"This type is also t.\"]\n    }, \n    {\n      \"id\": \"DocExtraction2.InnerModule.make\",\n      \"kind\": \"value\",\n      \"name\": \"make\",\n      \"signature\": \"let make: unit => t\",\n      \"docstrings\": [\"Maker of tea.\"]\n    }]\n  }]\n}\n\n"
  },
  {
    "path": "analysis/tests/src/expected/DocExtraction2.resi.txt",
    "content": "Documentation extraction src/DocExtraction2.resi\nextracting docs for src/DocExtraction2.resi\n\n{\n  \"name\": \"DocExtraction2\",\n  \"docstrings\": [\"Module level doc here.\"],\n  \"items\": [\n  {\n    \"id\": \"DocExtraction2.t\",\n    \"kind\": \"type\",\n    \"name\": \"t\",\n    \"signature\": \"type t\",\n    \"docstrings\": [\"Type t is pretty cool.\"]\n  }, \n  {\n    \"id\": \"DocExtraction2.make\",\n    \"kind\": \"value\",\n    \"name\": \"make\",\n    \"signature\": \"let make: unit => t\",\n    \"docstrings\": [\"Makerz of stuffz.\"]\n  }, \n  {\n    \"id\": \"DocExtraction2.InnerModule\",\n    \"name\": \"InnerModule\",\n    \"kind\": \"module\",\n    \"docstrings\": [],\n    \"items\": [\n    {\n      \"id\": \"DocExtraction2.InnerModule.t\",\n      \"kind\": \"type\",\n      \"name\": \"t\",\n      \"signature\": \"type t\",\n      \"docstrings\": [\"This type is also t.\"]\n    }, \n    {\n      \"id\": \"DocExtraction2.InnerModule.make\",\n      \"kind\": \"value\",\n      \"name\": \"make\",\n      \"signature\": \"let make: unit => t\",\n      \"docstrings\": [\"Maker of tea.\"]\n    }]\n  }]\n}\n\n"
  },
  {
    "path": "analysis/tests/src/expected/DocExtractionRes.res.txt",
    "content": "Documentation extraction src/DocExtractionRes.res\nextracting docs for src/DocExtractionRes.res\n\n{\n  \"name\": \"DocExtractionRes\",\n  \"docstrings\": [\"Module level documentation goes here.\"],\n  \"items\": [\n  {\n    \"id\": \"DocExtractionRes.t\",\n    \"kind\": \"type\",\n    \"name\": \"t\",\n    \"signature\": \"type t = {name: string, online: bool}\",\n    \"docstrings\": [\"This type represents stuff.\"],\n    \"detail\": \n    {\n      \"kind\": \"record\",\n      \"items\": [{\n        \"name\": \"name\",\n        \"optional\": false,\n        \"docstrings\": [\"The name of the stuff.\"],\n        \"signature\": \"string\"\n      }, {\n        \"name\": \"online\",\n        \"optional\": false,\n        \"docstrings\": [\"Whether stuff is online.\"],\n        \"signature\": \"bool\"\n      }]\n    }\n  }, \n  {\n    \"id\": \"DocExtractionRes.make\",\n    \"kind\": \"value\",\n    \"name\": \"make\",\n    \"signature\": \"let make: string => t\",\n    \"docstrings\": [\"Create stuff.\\n\\n```rescript example\\nlet stuff = make(\\\"My name\\\")\\n```\"]\n  }, \n  {\n    \"id\": \"DocExtractionRes.asOffline\",\n    \"kind\": \"value\",\n    \"name\": \"asOffline\",\n    \"signature\": \"let asOffline: t => t\",\n    \"docstrings\": [\"Stuff goes offline.\"]\n  }, \n  {\n    \"id\": \"DocExtractionRes.SomeConstant\\\\\",\n    \"kind\": \"value\",\n    \"name\": \"SomeConstant\\\\\",\n    \"signature\": \"let SomeConstant\\\\: int\",\n    \"docstrings\": [\"exotic identifier\"]\n  }, \n  {\n    \"id\": \"DocExtractionRes.SomeInnerModule\",\n    \"name\": \"SomeInnerModule\",\n    \"kind\": \"module\",\n    \"docstrings\": [\"Another module level docstring here.\"],\n    \"items\": [\n    {\n      \"id\": \"DocExtractionRes.SomeInnerModule.status\",\n      \"kind\": \"type\",\n      \"name\": \"status\",\n      \"signature\": \"type status = Started(t) | Stopped | Idle\",\n      \"docstrings\": [],\n      \"detail\": \n      {\n        \"kind\": \"variant\",\n        \"items\": [\n        {\n          \"name\": \"Started\",\n          \"docstrings\": [\"If this is started or not\"],\n          \"signature\": \"Started(t)\"\n        }, \n        {\n          \"name\": \"Stopped\",\n          \"docstrings\": [\"Stopped?\"],\n          \"signature\": \"Stopped\"\n        }, \n        {\n          \"name\": \"Idle\",\n          \"docstrings\": [\"Now idle.\"],\n          \"signature\": \"Idle\"\n        }]\n      }\n    }, \n    {\n      \"id\": \"DocExtractionRes.SomeInnerModule.validInputs\",\n      \"kind\": \"type\",\n      \"name\": \"validInputs\",\n      \"signature\": \"type validInputs = [\\n  | #\\\"needs-escaping\\\"\\n  | #something\\n  | #status(status)\\n  | #withPayload(int)\\n]\",\n      \"docstrings\": [\"These are all the valid inputs.\"]\n    }, \n    {\n      \"id\": \"DocExtractionRes.SomeInnerModule.callback\",\n      \"kind\": \"type\",\n      \"name\": \"callback\",\n      \"signature\": \"type callback = (t, ~status: status) => unit\",\n      \"docstrings\": []\n    }]\n  }, \n  {\n    \"id\": \"DocExtractionRes.AnotherModule\",\n    \"name\": \"AnotherModule\",\n    \"kind\": \"module\",\n    \"docstrings\": [\"Mighty fine module here too!\"],\n    \"items\": [\n    {\n      \"id\": \"DocExtractionRes.LinkedModule\",\n      \"kind\": \"moduleAlias\",\n      \"name\": \"LinkedModule\",\n      \"docstrings\": [\"This links another module. Neat.\"],\n      \"items\": []\n    }, \n    {\n      \"id\": \"DocExtractionRes.AnotherModule.callback\",\n      \"kind\": \"type\",\n      \"name\": \"callback\",\n      \"signature\": \"type callback = SomeInnerModule.status => unit\",\n      \"docstrings\": [\"Testing what this looks like.\"]\n    }, \n    {\n      \"id\": \"DocExtractionRes.AnotherModule.isGoodStatus\",\n      \"kind\": \"value\",\n      \"name\": \"isGoodStatus\",\n      \"signature\": \"let isGoodStatus: SomeInnerModule.status => bool\",\n      \"docstrings\": []\n    }, \n    {\n      \"id\": \"DocExtractionRes.AnotherModule.someVariantWithInlineRecords\",\n      \"kind\": \"type\",\n      \"name\": \"someVariantWithInlineRecords\",\n      \"signature\": \"type someVariantWithInlineRecords =\\n  | SomeStuff({offline: bool, online?: bool})\",\n      \"docstrings\": [\"Trying how it looks with an inline record in a variant.\"],\n      \"detail\": \n      {\n        \"kind\": \"variant\",\n        \"items\": [\n        {\n          \"name\": \"SomeStuff\",\n          \"docstrings\": [\"This has inline records...\"],\n          \"signature\": \"SomeStuff({offline: bool, online?: bool})\",\n          \"payload\": {\n            \"kind\": \"inlineRecord\",\n            \"fields\": [{\n              \"name\": \"offline\",\n              \"optional\": false,\n              \"docstrings\": [],\n              \"signature\": \"bool\"\n            }, {\n              \"name\": \"online\",\n              \"optional\": true,\n              \"docstrings\": [\"Is the user online?\"],\n              \"signature\": \"option<bool>\"\n            }]\n          }\n        }]\n      }\n    }, \n    {\n      \"id\": \"DocExtractionRes.AnotherModule.domRoot\",\n      \"kind\": \"type\",\n      \"name\": \"domRoot\",\n      \"signature\": \"type domRoot = unit => ReactDOM.Client.Root.t\",\n      \"docstrings\": [\"Callback to get the DOM root...\"]\n    }]\n  }, \n  {\n    \"id\": \"DocExtractionRes.ModuleWithThingsThatShouldNotBeExported\",\n    \"name\": \"ModuleWithThingsThatShouldNotBeExported\",\n    \"kind\": \"module\",\n    \"docstrings\": [],\n    \"items\": [\n    {\n      \"id\": \"DocExtractionRes.ModuleWithThingsThatShouldNotBeExported.t\",\n      \"kind\": \"type\",\n      \"name\": \"t\",\n      \"signature\": \"type t\",\n      \"docstrings\": [\"The type t is stuff.\"]\n    }, \n    {\n      \"id\": \"DocExtractionRes.ModuleWithThingsThatShouldNotBeExported.make\",\n      \"kind\": \"value\",\n      \"name\": \"make\",\n      \"signature\": \"let make: unit => t\",\n      \"docstrings\": [\"The maker of stuff!\"]\n    }]\n  }]\n}\n\n"
  },
  {
    "path": "analysis/tests/src/expected/DocumentSymbol.res.txt",
    "content": "DocumentSymbol src/DocumentSymbol.res\n[\n{\n  \"name\": \"MyList\",\n  \"kind\": 2,\n  \"range\": {\"start\": {\"line\": 0, \"character\": 7}, \"end\": {\"line\": 0, \"character\": 25}},\n  \"selectionRange\": {\"start\": {\"line\": 0, \"character\": 7}, \"end\": {\"line\": 0, \"character\": 25}}\n},\n{\n  \"name\": \"Dep\",\n  \"kind\": 2,\n  \"range\": {\"start\": {\"line\": 2, \"character\": 7}, \"end\": {\"line\": 7, \"character\": 1}},\n  \"selectionRange\": {\"start\": {\"line\": 2, \"character\": 7}, \"end\": {\"line\": 7, \"character\": 1}},\n  \"children\": [\n  {\n    \"name\": \"customDouble\",\n    \"kind\": 12,\n    \"range\": {\"start\": {\"line\": 6, \"character\": 2}, \"end\": {\"line\": 6, \"character\": 35}},\n    \"selectionRange\": {\"start\": {\"line\": 6, \"character\": 2}, \"end\": {\"line\": 6, \"character\": 35}}\n  }]\n},\n{\n  \"name\": \"Lib\",\n  \"kind\": 2,\n  \"range\": {\"start\": {\"line\": 9, \"character\": 7}, \"end\": {\"line\": 12, \"character\": 1}},\n  \"selectionRange\": {\"start\": {\"line\": 9, \"character\": 7}, \"end\": {\"line\": 12, \"character\": 1}},\n  \"children\": [\n  {\n    \"name\": \"foo\",\n    \"kind\": 12,\n    \"range\": {\"start\": {\"line\": 10, \"character\": 2}, \"end\": {\"line\": 10, \"character\": 55}},\n    \"selectionRange\": {\"start\": {\"line\": 10, \"character\": 2}, \"end\": {\"line\": 10, \"character\": 55}}\n  },\n  {\n    \"name\": \"next\",\n    \"kind\": 12,\n    \"range\": {\"start\": {\"line\": 11, \"character\": 2}, \"end\": {\"line\": 11, \"character\": 48}},\n    \"selectionRange\": {\"start\": {\"line\": 11, \"character\": 2}, \"end\": {\"line\": 11, \"character\": 48}}\n  }]\n},\n{\n  \"name\": \"op\",\n  \"kind\": 13,\n  \"range\": {\"start\": {\"line\": 14, \"character\": 0}, \"end\": {\"line\": 14, \"character\": 16}},\n  \"selectionRange\": {\"start\": {\"line\": 14, \"character\": 0}, \"end\": {\"line\": 14, \"character\": 16}}\n},\n{\n  \"name\": \"ForAuto\",\n  \"kind\": 2,\n  \"range\": {\"start\": {\"line\": 16, \"character\": 7}, \"end\": {\"line\": 20, \"character\": 1}},\n  \"selectionRange\": {\"start\": {\"line\": 16, \"character\": 7}, \"end\": {\"line\": 20, \"character\": 1}},\n  \"children\": [\n  {\n    \"name\": \"t\",\n    \"kind\": 26,\n    \"range\": {\"start\": {\"line\": 17, \"character\": 2}, \"end\": {\"line\": 17, \"character\": 14}},\n    \"selectionRange\": {\"start\": {\"line\": 17, \"character\": 2}, \"end\": {\"line\": 17, \"character\": 14}}\n  },\n  {\n    \"name\": \"abc\",\n    \"kind\": 12,\n    \"range\": {\"start\": {\"line\": 18, \"character\": 2}, \"end\": {\"line\": 18, \"character\": 32}},\n    \"selectionRange\": {\"start\": {\"line\": 18, \"character\": 2}, \"end\": {\"line\": 18, \"character\": 32}}\n  },\n  {\n    \"name\": \"abd\",\n    \"kind\": 12,\n    \"range\": {\"start\": {\"line\": 19, \"character\": 2}, \"end\": {\"line\": 19, \"character\": 32}},\n    \"selectionRange\": {\"start\": {\"line\": 19, \"character\": 2}, \"end\": {\"line\": 19, \"character\": 32}}\n  }]\n},\n{\n  \"name\": \"fa\",\n  \"kind\": 16,\n  \"range\": {\"start\": {\"line\": 22, \"character\": 0}, \"end\": {\"line\": 22, \"character\": 22}},\n  \"selectionRange\": {\"start\": {\"line\": 22, \"character\": 0}, \"end\": {\"line\": 22, \"character\": 22}}\n},\n{\n  \"name\": \"O\",\n  \"kind\": 2,\n  \"range\": {\"start\": {\"line\": 24, \"character\": 7}, \"end\": {\"line\": 29, \"character\": 1}},\n  \"selectionRange\": {\"start\": {\"line\": 24, \"character\": 7}, \"end\": {\"line\": 29, \"character\": 1}},\n  \"children\": [\n  {\n    \"name\": \"Comp\",\n    \"kind\": 2,\n    \"range\": {\"start\": {\"line\": 25, \"character\": 9}, \"end\": {\"line\": 28, \"character\": 3}},\n    \"selectionRange\": {\"start\": {\"line\": 25, \"character\": 9}, \"end\": {\"line\": 28, \"character\": 3}},\n    \"children\": [\n    {\n      \"name\": \"make\",\n      \"kind\": 12,\n      \"range\": {\"start\": {\"line\": 26, \"character\": 4}, \"end\": {\"line\": 27, \"character\": 98}},\n      \"selectionRange\": {\"start\": {\"line\": 26, \"character\": 4}, \"end\": {\"line\": 27, \"character\": 98}}\n    }]\n  }]\n},\n{\n  \"name\": \"zzz\",\n  \"kind\": 16,\n  \"range\": {\"start\": {\"line\": 31, \"character\": 0}, \"end\": {\"line\": 31, \"character\": 12}},\n  \"selectionRange\": {\"start\": {\"line\": 31, \"character\": 0}, \"end\": {\"line\": 31, \"character\": 12}}\n}\n]\n\n"
  },
  {
    "path": "analysis/tests/src/expected/DotPipeCompletionSpec.res.txt",
    "content": "Complete src/DotPipeCompletionSpec.res 15:5\nposCursor:[15:5] posNoWhite:[15:4] Found expr:[15:3->15:5]\nPexp_field [15:3->15:4] _:[18:0->15:5]\nCompletable: Cpath Value[n].\"\"\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[n].\"\"\nContextPath Value[n]\nPath n\nContextPath Value[n]->\nContextPath Value[n]\nPath n\nCPPipe pathFromEnv:SomeModule found:true\nPath SomeModule.\n[{\n    \"label\": \"name\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nname: string\\n```\\n\\n```rescript\\ntype t = {name: string}\\n```\"}\n  }, {\n    \"label\": \"->SomeModule.withUnlabelledArgumentNotFirst\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(~name: string=?, t) => unit\",\n    \"documentation\": null,\n    \"sortText\": \"withUnlabelledArgumentNotFirst\",\n    \"insertText\": \"->SomeModule.withUnlabelledArgumentNotFirst\",\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 15, \"character\": 4}, \"end\": {\"line\": 15, \"character\": 5}},\n      \"newText\": \"\"\n      }]\n  }, {\n    \"label\": \"->SomeModule.getName\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"t => string\",\n    \"documentation\": null,\n    \"sortText\": \"getName\",\n    \"insertText\": \"->SomeModule.getName\",\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 15, \"character\": 4}, \"end\": {\"line\": 15, \"character\": 5}},\n      \"newText\": \"\"\n      }]\n  }]\n\nComplete src/DotPipeCompletionSpec.res 44:6\nposCursor:[44:6] posNoWhite:[44:5] Found expr:[44:3->44:6]\nPexp_field [44:3->44:5] _:[47:0->44:6]\nCompletable: Cpath Value[nn].\"\"\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[nn].\"\"\nContextPath Value[nn]\nPath nn\nContextPath Value[nn]->\nContextPath Value[nn]\nPath nn\nCPPipe pathFromEnv:SomeOtherModule found:true\nPath SomeOtherModule.\nPath DotPipeCompletionSpec.CompleteFromThisToo.\nPath DotPipeCompletionSpec.SomeOtherModule.\n[{\n    \"label\": \"nname\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nnname: string\\n```\\n\\n```rescript\\ntype typeOutsideModule = {nname: string}\\n```\"}\n  }, {\n    \"label\": \"->SomeOtherModule.getNName\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"t => string\",\n    \"documentation\": null,\n    \"sortText\": \"getNName\",\n    \"insertText\": \"->SomeOtherModule.getNName\",\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 44, \"character\": 5}, \"end\": {\"line\": 44, \"character\": 6}},\n      \"newText\": \"\"\n      }]\n  }, {\n    \"label\": \"->SomeOtherModule.getNName2\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"typeOutsideModule => string\",\n    \"documentation\": null,\n    \"sortText\": \"getNName2\",\n    \"insertText\": \"->SomeOtherModule.getNName2\",\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 44, \"character\": 5}, \"end\": {\"line\": 44, \"character\": 6}},\n      \"newText\": \"\"\n      }]\n  }, {\n    \"label\": \"->CompleteFromThisToo.a\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"typeOutsideModule => string\",\n    \"documentation\": null,\n    \"sortText\": \"a\",\n    \"insertText\": \"->CompleteFromThisToo.a\",\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 44, \"character\": 5}, \"end\": {\"line\": 44, \"character\": 6}},\n      \"newText\": \"\"\n      }]\n  }, {\n    \"label\": \"->SomeOtherModule.getNName\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"t => string\",\n    \"documentation\": null,\n    \"sortText\": \"getNName\",\n    \"insertText\": \"->SomeOtherModule.getNName\",\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 44, \"character\": 5}, \"end\": {\"line\": 44, \"character\": 6}},\n      \"newText\": \"\"\n      }]\n  }, {\n    \"label\": \"->SomeOtherModule.getNName2\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"typeOutsideModule => string\",\n    \"documentation\": null,\n    \"sortText\": \"getNName2\",\n    \"insertText\": \"->SomeOtherModule.getNName2\",\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 44, \"character\": 5}, \"end\": {\"line\": 44, \"character\": 6}},\n      \"newText\": \"\"\n      }]\n  }]\n\nComplete src/DotPipeCompletionSpec.res 62:5\nposCursor:[62:5] posNoWhite:[62:4] Found expr:[62:3->62:5]\nPexp_field [62:3->62:4] _:[65:0->62:5]\nCompletable: Cpath Value[a].\"\"\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[a].\"\"\nContextPath Value[a]\nPath a\nContextPath Value[a]->\nContextPath Value[a]\nPath a\nCPPipe pathFromEnv:A found:true\nPath A.\nPath B.\n[{\n    \"label\": \"->A.withA\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"a => unit\",\n    \"documentation\": null,\n    \"sortText\": \"withA\",\n    \"insertText\": \"->A.withA\",\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 62, \"character\": 4}, \"end\": {\"line\": 62, \"character\": 5}},\n      \"newText\": \"\"\n      }]\n  }, {\n    \"label\": \"->B.b\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"A.a => int\",\n    \"documentation\": null,\n    \"sortText\": \"b\",\n    \"insertText\": \"->B.b\",\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 62, \"character\": 4}, \"end\": {\"line\": 62, \"character\": 5}},\n      \"newText\": \"\"\n      }]\n  }]\n\nComplete src/DotPipeCompletionSpec.res 67:6\nposCursor:[67:6] posNoWhite:[67:5] Found expr:[67:3->67:6]\nPexp_field [67:3->67:5] _:[70:0->67:6]\nCompletable: Cpath Value[xx].\"\"\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[xx].\"\"\nContextPath Value[xx]\nPath xx\nContextPath Value[xx]->\nContextPath Value[xx]\nPath xx\nCPPipe pathFromEnv:CompletionFromModule.SomeModule found:false\nPath CompletionFromModule.SomeModule.\n[{\n    \"label\": \"name\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nname: string\\n```\\n\\n```rescript\\ntype t = {name: string}\\n```\"}\n  }, {\n    \"label\": \"->CompletionFromModule.SomeModule.getName\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"t => string\",\n    \"documentation\": null,\n    \"sortText\": \"getName\",\n    \"insertText\": \"->CompletionFromModule.SomeModule.getName\",\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 67, \"character\": 5}, \"end\": {\"line\": 67, \"character\": 6}},\n      \"newText\": \"\"\n      }]\n  }]\n\nComplete src/DotPipeCompletionSpec.res 75:9\nposCursor:[75:9] posNoWhite:[75:8] Found expr:[75:3->75:9]\nPexp_field [75:3->75:7] u:[75:8->75:9]\nCompletable: Cpath Value[ffff].u\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[ffff].u\nContextPath Value[ffff]\nPath ffff\nContextPath Value[ffff]->u\nContextPath Value[ffff]\nPath ffff\nPath Js.Array2.u\n[{\n    \"label\": \"->Js.Array2.unshiftMany\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t<'a>, array<'a>) => int\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\nAdds the elements in the second array argument at the beginning of the first\\narray argument, returning the new number of elements in the array. *This\\nfunction modifies the original array.* See\\n[`Array.unshift`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/unshift)\\non MDN.\\n\\n## Examples\\n\\n```rescript\\nlet arr = [\\\"d\\\", \\\"e\\\"]\\nJs.Array2.unshiftMany(arr, [\\\"a\\\", \\\"b\\\", \\\"c\\\"]) == 5\\narr == [\\\"a\\\", \\\"b\\\", \\\"c\\\", \\\"d\\\", \\\"e\\\"]\\n```\\n\"},\n    \"sortText\": \"unshiftMany\",\n    \"insertText\": \"->Js.Array2.unshiftMany\",\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 75, \"character\": 7}, \"end\": {\"line\": 75, \"character\": 8}},\n      \"newText\": \"\"\n      }]\n  }, {\n    \"label\": \"->Js.Array2.unshift\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t<'a>, 'a) => int\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\nAdds the given element to the array, returning the new number of elements in\\nthe array. *This function modifies the original array.* See\\n[`Array.unshift`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/unshift)\\non MDN.\\n\\n## Examples\\n\\n```rescript\\nlet arr = [\\\"b\\\", \\\"c\\\", \\\"d\\\"]\\nJs.Array2.unshift(arr, \\\"a\\\") == 4\\narr == [\\\"a\\\", \\\"b\\\", \\\"c\\\", \\\"d\\\"]\\n```\\n\"},\n    \"sortText\": \"unshift\",\n    \"insertText\": \"->Js.Array2.unshift\",\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 75, \"character\": 7}, \"end\": {\"line\": 75, \"character\": 8}},\n      \"newText\": \"\"\n      }]\n  }, {\n    \"label\": \"->Js.Array2.unsafe_get\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(array<'a>, int) => 'a\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\nReturns the value at the given position in the array if the position is in\\nbounds; returns the JavaScript value `undefined` otherwise.\\n\\n## Examples\\n\\n```rescript\\nlet arr = [100, 101, 102, 103]\\nJs.Array2.unsafe_get(arr, 3) == 103\\nJs.Array2.unsafe_get(arr, 4) // returns undefined\\n```\\n\"},\n    \"sortText\": \"unsafe_get\",\n    \"insertText\": \"->Js.Array2.unsafe_get\",\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 75, \"character\": 7}, \"end\": {\"line\": 75, \"character\": 8}},\n      \"newText\": \"\"\n      }]\n  }, {\n    \"label\": \"->Js.Array2.unsafe_set\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(array<'a>, int, 'a) => unit\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\nSets the value at the given position in the array if the position is in bounds.\\nIf the index is out of bounds, well, “here there be dragons.“\\n\\n*This function modifies the original array.*\\n\\n## Examples\\n\\n```rescript\\nlet arr = [100, 101, 102, 103]\\nJs.Array2.unsafe_set(arr, 3, 99)\\n// result is [100, 101, 102, 99];\\n\\nJs.Array2.unsafe_set(arr, 4, 88)\\n// result is [100, 101, 102, 99, 88]\\n\\nJs.Array2.unsafe_set(arr, 6, 77)\\n// result is [100, 101, 102, 99, 88, <1 empty item>, 77]\\n\\nJs.Array2.unsafe_set(arr, -1, 66)\\n// you don't want to know.\\n```\\n\"},\n    \"sortText\": \"unsafe_set\",\n    \"insertText\": \"->Js.Array2.unsafe_set\",\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 75, \"character\": 7}, \"end\": {\"line\": 75, \"character\": 8}},\n      \"newText\": \"\"\n      }]\n  }]\n\nComplete src/DotPipeCompletionSpec.res 80:7\nposCursor:[80:7] posNoWhite:[80:6] Found expr:[80:3->80:7]\nPexp_field [80:3->80:6] _:[84:0->80:7]\nCompletable: Cpath Value[nnn].\"\"\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[nnn].\"\"\nContextPath Value[nnn]\nPath nnn\nContextPath Value[nnn]->\nContextPath Value[nnn]\nPath nnn\nCPPipe pathFromEnv: found:true\nPath DotPipeCompletionSpec.\nPath DotPipeCompletionSpec.SomeOtherModule.\n[{\n    \"label\": \"nname\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nnname: string\\n```\\n\\n```rescript\\ntype typeOutsideModule = {nname: string}\\n```\"}\n  }, {\n    \"label\": \"->doWithTypeOutsideModule\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"typeOutsideModule => string\",\n    \"documentation\": null,\n    \"sortText\": \"doWithTypeOutsideModule\",\n    \"insertText\": \"->doWithTypeOutsideModule\",\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 80, \"character\": 6}, \"end\": {\"line\": 80, \"character\": 7}},\n      \"newText\": \"\"\n      }]\n  }, {\n    \"label\": \"->SomeOtherModule.getNName\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"t => string\",\n    \"documentation\": null,\n    \"sortText\": \"getNName\",\n    \"insertText\": \"->SomeOtherModule.getNName\",\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 80, \"character\": 6}, \"end\": {\"line\": 80, \"character\": 7}},\n      \"newText\": \"\"\n      }]\n  }, {\n    \"label\": \"->SomeOtherModule.getNName2\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"typeOutsideModule => string\",\n    \"documentation\": null,\n    \"sortText\": \"getNName2\",\n    \"insertText\": \"->SomeOtherModule.getNName2\",\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 80, \"character\": 6}, \"end\": {\"line\": 80, \"character\": 7}},\n      \"newText\": \"\"\n      }]\n  }]\n\nComplete src/DotPipeCompletionSpec.res 86:43\nposCursor:[86:43] posNoWhite:[86:42] Found expr:[86:3->86:43]\nposCursor:[86:43] posNoWhite:[86:42] Found expr:[86:9->86:43]\nPexp_field [86:9->86:38] filt:[86:39->86:43]\nCompletable: Cpath Value[Js, Array2, filter](Nolabel).filt\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[Js, Array2, filter](Nolabel).filt\nContextPath Value[Js, Array2, filter](Nolabel)\nContextPath Value[Js, Array2, filter]\nPath Js.Array2.filter\nContextPath Value[Js, Array2, filter](Nolabel, Nolabel)->filt\nContextPath Value[Js, Array2, filter](Nolabel, Nolabel)\nContextPath Value[Js, Array2, filter]\nPath Js.Array2.filter\nPath Js.Array2.filt\n[{\n    \"label\": \"->Js.Array2.filter\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t<'a>, 'a => bool) => t<'a>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\nApplies the given predicate function (the second argument) to each element in\\nthe array; the result is an array of those elements for which the predicate\\nfunction returned `true`. See\\n[`Array.filter`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/filter)\\non MDN.\\n\\n## Examples\\n\\n```rescript\\nlet nonEmpty = s => s != \\\"\\\"\\nJs.Array2.filter([\\\"abc\\\", \\\"\\\", \\\"\\\", \\\"def\\\", \\\"ghi\\\"], nonEmpty) == [\\\"abc\\\", \\\"def\\\", \\\"ghi\\\"]\\n```\\n\"},\n    \"sortText\": \"filter\",\n    \"insertText\": \"->Js.Array2.filter\",\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 86, \"character\": 38}, \"end\": {\"line\": 86, \"character\": 39}},\n      \"newText\": \"\"\n      }]\n  }, {\n    \"label\": \"->Js.Array2.filteri\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t<'a>, ('a, int) => bool) => t<'a>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\nEach element of the given array are passed to the predicate function. The\\nreturn value is an array of all those elements for which the predicate function\\nreturned `true`.\\n\\nSee\\n[`Array.filter`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/filter)\\non MDN.\\n\\n## Examples\\n\\n```rescript\\n// keep only positive elements at odd indices\\nlet positiveOddElement = (item, index) => mod(index, 2) == 1 && item > 0\\n\\nJs.Array2.filteri([6, 3, 5, 8, 7, -4, 1], positiveOddElement) == [3, 8]\\n```\\n\"},\n    \"sortText\": \"filteri\",\n    \"insertText\": \"->Js.Array2.filteri\",\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 86, \"character\": 38}, \"end\": {\"line\": 86, \"character\": 39}},\n      \"newText\": \"\"\n      }]\n  }]\n\nComplete src/DotPipeCompletionSpec.res 89:70\nposCursor:[89:70] posNoWhite:[89:69] Found expr:[89:3->89:70]\nposCursor:[89:70] posNoWhite:[89:69] Found expr:[89:40->89:70]\nPexp_field [89:40->89:63] includ:[89:64->89:70]\nCompletable: Cpath Value[Js, Array2, joinWith](Nolabel).includ\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[Js, Array2, joinWith](Nolabel).includ\nContextPath Value[Js, Array2, joinWith](Nolabel)\nContextPath Value[Js, Array2, joinWith]\nPath Js.Array2.joinWith\nContextPath Value[Js, Array2, joinWith](Nolabel, Nolabel)->includ\nContextPath Value[Js, Array2, joinWith](Nolabel, Nolabel)\nContextPath Value[Js, Array2, joinWith]\nPath Js.Array2.joinWith\nPath Js.String2.includ\n[{\n    \"label\": \"->Js.String2.includesFrom\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t, t, int) => bool\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\nES2015: `includes(str, searchValue start)` returns `true` if `searchValue` is\\nfound anywhere within `str` starting at character number `start` (where 0 is\\nthe first character), `false` otherwise.\\n\\nSee [`String.includes`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes)\\non MDN.\\n\\n## Examples\\n\\n```rescript\\nJs.String2.includesFrom(\\\"programmer\\\", \\\"gram\\\", 1) == true\\nJs.String2.includesFrom(\\\"programmer\\\", \\\"gram\\\", 4) == false\\nJs.String2.includesFrom(`대한민국`, `한`, 1) == true\\n```\\n\"},\n    \"sortText\": \"includesFrom\",\n    \"insertText\": \"->Js.String2.includesFrom\",\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 89, \"character\": 63}, \"end\": {\"line\": 89, \"character\": 64}},\n      \"newText\": \"\"\n      }]\n  }, {\n    \"label\": \"->Js.String2.includes\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t, t) => bool\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\nES2015: `includes(str, searchValue)` returns `true` if `searchValue` is found\\nanywhere within `str`, false otherwise.\\n\\nSee [`String.includes`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes)\\non MDN.\\n\\n## Examples\\n\\n```rescript\\nJs.String2.includes(\\\"programmer\\\", \\\"gram\\\") == true\\nJs.String2.includes(\\\"programmer\\\", \\\"er\\\") == true\\nJs.String2.includes(\\\"programmer\\\", \\\"pro\\\") == true\\nJs.String2.includes(\\\"programmer.dat\\\", \\\"xyz\\\") == false\\n```\\n\"},\n    \"sortText\": \"includes\",\n    \"insertText\": \"->Js.String2.includes\",\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 89, \"character\": 63}, \"end\": {\"line\": 89, \"character\": 64}},\n      \"newText\": \"\"\n      }]\n  }]\n\nComplete src/DotPipeCompletionSpec.res 94:40\nposCursor:[94:40] posNoWhite:[94:39] Found expr:[94:3->94:40]\nposCursor:[94:40] posNoWhite:[94:39] Found expr:[94:8->94:40]\nPexp_field [94:8->94:30] toUpperCa:[94:31->94:40]\nCompletable: Cpath Value[Js, String2, toLowerCase].toUpperCa\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[Js, String2, toLowerCase].toUpperCa\nContextPath Value[Js, String2, toLowerCase]\nPath Js.String2.toLowerCase\nContextPath Value[Js, String2, toLowerCase](Nolabel)->toUpperCa\nContextPath Value[Js, String2, toLowerCase](Nolabel)\nContextPath Value[Js, String2, toLowerCase]\nPath Js.String2.toLowerCase\nPath Js.String2.toUpperCa\n[{\n    \"label\": \"->Js.String2.toUpperCase\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"t => t\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\n`toUpperCase(str)` converts `str` to upper case using the locale-insensitive\\ncase mappings in the Unicode Character Database. Notice that the conversion can\\nexpand the number of letters in the result; for example the German ß\\ncapitalizes to two Ses in a row.\\n\\nSee [`String.toUpperCase`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/toUpperCase)\\non MDN.\\n\\n## Examples\\n\\n```rescript\\nJs.String2.toUpperCase(\\\"abc\\\") == \\\"ABC\\\"\\nJs.String2.toUpperCase(`Straße`) == `STRASSE`\\nJs.String2.toUpperCase(`πς`) == `ΠΣ`\\n```\\n\"},\n    \"sortText\": \"toUpperCase\",\n    \"insertText\": \"->Js.String2.toUpperCase\",\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 94, \"character\": 30}, \"end\": {\"line\": 94, \"character\": 31}},\n      \"newText\": \"\"\n      }]\n  }]\n\nComplete src/DotPipeCompletionSpec.res 97:63\nposCursor:[97:63] posNoWhite:[97:62] Found expr:[97:3->97:63]\nposCursor:[97:63] posNoWhite:[97:62] Found expr:[97:32->97:63]\nPexp_field [97:32->97:54] toLowerC:[97:55->97:63]\nCompletable: Cpath Value[Js, String2, toUpperCase].toLowerC\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[Js, String2, toUpperCase].toLowerC\nContextPath Value[Js, String2, toUpperCase]\nPath Js.String2.toUpperCase\nContextPath Value[Js, String2, toUpperCase](Nolabel)->toLowerC\nContextPath Value[Js, String2, toUpperCase](Nolabel)\nContextPath Value[Js, String2, toUpperCase]\nPath Js.String2.toUpperCase\nPath Js.String2.toLowerC\n[{\n    \"label\": \"->Js.String2.toLowerCase\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"t => t\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\n`toLowerCase(str)` converts `str` to lower case using the locale-insensitive\\ncase mappings in the Unicode Character Database. Notice that the conversion can\\ngive different results depending upon context, for example with the Greek\\nletter sigma, which has two different lower case forms; one when it is the last\\ncharacter in a string and another when it is not.\\n\\nSee [`String.toLowerCase`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/toLowerCase)\\non MDN.\\n\\n## Examples\\n\\n```rescript\\nJs.String2.toLowerCase(\\\"ABC\\\") == \\\"abc\\\"\\nJs.String2.toLowerCase(`ΣΠ`) == `σπ`\\nJs.String2.toLowerCase(`ΠΣ`) == `πς`\\n```\\n\"},\n    \"sortText\": \"toLowerCase\",\n    \"insertText\": \"->Js.String2.toLowerCase\",\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 97, \"character\": 54}, \"end\": {\"line\": 97, \"character\": 55}},\n      \"newText\": \"\"\n      }]\n  }]\n\nComplete src/DotPipeCompletionSpec.res 101:7\nposCursor:[101:7] posNoWhite:[101:6] Found expr:[100:9->104:1]\nposCursor:[101:7] posNoWhite:[101:6] Found expr:[100:10->104:1]\nposCursor:[101:7] posNoWhite:[101:6] Found expr:[101:5->103:3]\nPexp_field [101:5->101:6] t:[103:2->103:3]\nCompletable: Cpath Value[t].\"\"\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[t].\"\"\nContextPath Value[t]\nPath t\nContextPath Value[t]->\nContextPath Value[t]\nPath t\nCPPipe pathFromEnv: found:true\nPath DotPipeCompletionSpec.\nPath DotPipeCompletionSpec.SomeOtherModule.\n[{\n    \"label\": \"nname\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nnname: string\\n```\\n\\n```rescript\\ntype typeOutsideModule = {nname: string}\\n```\"}\n  }, {\n    \"label\": \"->doWithTypeOutsideModule\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"typeOutsideModule => string\",\n    \"documentation\": null,\n    \"sortText\": \"doWithTypeOutsideModule\",\n    \"insertText\": \"->doWithTypeOutsideModule\",\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 101, \"character\": 6}, \"end\": {\"line\": 101, \"character\": 7}},\n      \"newText\": \"\"\n      }]\n  }, {\n    \"label\": \"->SomeOtherModule.getNName\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"t => string\",\n    \"documentation\": null,\n    \"sortText\": \"getNName\",\n    \"insertText\": \"->SomeOtherModule.getNName\",\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 101, \"character\": 6}, \"end\": {\"line\": 101, \"character\": 7}},\n      \"newText\": \"\"\n      }]\n  }, {\n    \"label\": \"->SomeOtherModule.getNName2\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"typeOutsideModule => string\",\n    \"documentation\": null,\n    \"sortText\": \"getNName2\",\n    \"insertText\": \"->SomeOtherModule.getNName2\",\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 101, \"character\": 6}, \"end\": {\"line\": 101, \"character\": 7}},\n      \"newText\": \"\"\n      }]\n  }]\n\nComplete src/DotPipeCompletionSpec.res 108:27\nXXX Not found!\nCompletable: Cpath Module[Dot]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Module[Dot]\nPath Dot\n[{\n    \"label\": \"DotPipeCompletionSpec\",\n    \"kind\": 9,\n    \"tags\": [],\n    \"detail\": \"module DotPipeCompletionSpec\",\n    \"documentation\": null,\n    \"data\": {\n      \"modulePath\": \"DotPipeCompletionSpec\",\n      \"filePath\": \"src/DotPipeCompletionSpec.res\"\n    }\n  }]\n\nComplete src/DotPipeCompletionSpec.res 111:39\nposCursor:[111:39] posNoWhite:[111:38] Found expr:[111:24->111:40]\nposCursor:[111:39] posNoWhite:[111:38] Found expr:[111:25->111:39]\nPexp_construct CompletionPipe:[111:25->111:39] None\nCompletable: Cpath Value[CompletionPipe]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[CompletionPipe]\nPath CompletionPipe\n[{\n    \"label\": \"CompletionPipeChain\",\n    \"kind\": 9,\n    \"tags\": [],\n    \"detail\": \"module CompletionPipeChain\",\n    \"documentation\": null,\n    \"data\": {\n      \"modulePath\": \"CompletionPipeChain\",\n      \"filePath\": \"src/DotPipeCompletionSpec.res\"\n    }\n  }, {\n    \"label\": \"CompletionPipeProperty\",\n    \"kind\": 9,\n    \"tags\": [],\n    \"detail\": \"module CompletionPipeProperty\",\n    \"documentation\": null,\n    \"data\": {\n      \"modulePath\": \"CompletionPipeProperty\",\n      \"filePath\": \"src/DotPipeCompletionSpec.res\"\n    }\n  }, {\n    \"label\": \"CompletionPipeSubmodules\",\n    \"kind\": 9,\n    \"tags\": [],\n    \"detail\": \"module CompletionPipeSubmodules\",\n    \"documentation\": null,\n    \"data\": {\n      \"modulePath\": \"CompletionPipeSubmodules\",\n      \"filePath\": \"src/DotPipeCompletionSpec.res\"\n    }\n  }]\n\nComplete src/DotPipeCompletionSpec.res 114:44\nposCursor:[114:44] posNoWhite:[114:43] Found expr:[114:24->114:45]\nposCursor:[114:44] posNoWhite:[114:43] Found expr:[114:41->114:44]\nPexp_construct Dot:[114:41->114:44] None\nCompletable: Cpath Value[Dot]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[Dot]\nPath Dot\n[{\n    \"label\": \"DotPipeCompletionSpec\",\n    \"kind\": 9,\n    \"tags\": [],\n    \"detail\": \"module DotPipeCompletionSpec\",\n    \"documentation\": null,\n    \"data\": {\n      \"modulePath\": \"DotPipeCompletionSpec\",\n      \"filePath\": \"src/DotPipeCompletionSpec.res\"\n    }\n  }]\n\nComplete src/DotPipeCompletionSpec.res 122:11\nposCursor:[122:11] posNoWhite:[122:10] Found expr:[122:3->122:11]\nPexp_field [122:3->122:10] _:[128:0->122:11]\nCompletable: Cpath Value[someObj].\"\"\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[someObj].\"\"\nContextPath Value[someObj]\nPath someObj\nContextPath Value[someObj]->\nContextPath Value[someObj]\nPath someObj\n[{\n    \"label\": \"[\\\"age\\\"]\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"{\\\"age\\\": int, \\\"name\\\": string}\",\n    \"documentation\": null,\n    \"insertText\": \"[\\\"age\\\"]\",\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 122, \"character\": 10}, \"end\": {\"line\": 122, \"character\": 11}},\n      \"newText\": \"\"\n      }]\n  }, {\n    \"label\": \"[\\\"name\\\"]\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"{\\\"age\\\": int, \\\"name\\\": string}\",\n    \"documentation\": null,\n    \"insertText\": \"[\\\"name\\\"]\",\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 122, \"character\": 10}, \"end\": {\"line\": 122, \"character\": 11}},\n      \"newText\": \"\"\n      }]\n  }]\n\nComplete src/DotPipeCompletionSpec.res 125:13\nposCursor:[125:13] posNoWhite:[125:12] Found expr:[125:3->125:13]\nPexp_field [125:3->125:10] na:[125:11->125:13]\nCompletable: Cpath Value[someObj].na\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[someObj].na\nContextPath Value[someObj]\nPath someObj\nContextPath Value[someObj]->na\nContextPath Value[someObj]\nPath someObj\n[{\n    \"label\": \"[\\\"name\\\"]\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"{\\\"age\\\": int, \\\"name\\\": string}\",\n    \"documentation\": null,\n    \"insertText\": \"[\\\"name\\\"]\",\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 125, \"character\": 10}, \"end\": {\"line\": 125, \"character\": 11}},\n      \"newText\": \"\"\n      }]\n  }]\n\nComplete src/DotPipeCompletionSpec.res 144:10\nposCursor:[144:10] posNoWhite:[144:9] Found expr:[144:3->144:10]\nPexp_field [144:3->144:9] _:[147:0->144:10]\nCompletable: Cpath Value[button].\"\"\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[button].\"\"\nContextPath Value[button]\nPath button\nContextPath Value[button]->\nContextPath Value[button]\nPath button\nCPPipe pathFromEnv:DOMAPI found:true\nPath DOMAPI.\nPath DotPipeCompletionSpec.HTMLButtonElement.\n[{\n    \"label\": \"disabled\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ndisabled: bool\\n```\\n\\n```rescript\\ntype htmlButtonElement = {mutable disabled: bool}\\n```\"}\n  }, {\n    \"label\": \"->HTMLButtonElement.checkValidity\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"DOMAPI.htmlButtonElement => bool\",\n    \"documentation\": null,\n    \"sortText\": \"checkValidity\",\n    \"insertText\": \"->HTMLButtonElement.checkValidity\",\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 144, \"character\": 9}, \"end\": {\"line\": 144, \"character\": 10}},\n      \"newText\": \"\"\n      }]\n  }]\n\n"
  },
  {
    "path": "analysis/tests/src/expected/EnvCompletion.res.txt",
    "content": "Complete src/EnvCompletion.res 10:17\nXXX Not found!\nCompletable: Cpattern Value[res]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[res]\nPath res\n[{\n    \"label\": \"Okay(_)\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"Okay('a)\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nOkay('a)\\n```\\n\\n```rescript\\ntype someResult<'a, 'b> = Okay('a) | Failure('b)\\n```\"},\n    \"insertText\": \"Okay(${1:_})\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Failure(_)\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"Failure('b)\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nFailure('b)\\n```\\n\\n```rescript\\ntype someResult<'a, 'b> = Okay('a) | Failure('b)\\n```\"},\n    \"insertText\": \"Failure(${1:_})\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/EnvCompletion.res 13:23\nposCursor:[13:23] posNoWhite:[13:22] Found pattern:[13:18->13:24]\nPpat_construct Okay:[13:18->13:22]\nposCursor:[13:23] posNoWhite:[13:22] Found pattern:[13:22->13:24]\nPpat_construct ():[13:22->13:24]\nCompletable: Cpattern Value[res]->variantPayload::Okay($0)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[res]\nPath res\n[{\n    \"label\": \"One\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"One\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nOne\\n```\\n\\n```rescript\\ntype things = One | Two\\n```\"},\n    \"insertText\": \"One\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Two\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"Two\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nTwo\\n```\\n\\n```rescript\\ntype things = One | Two\\n```\"},\n    \"insertText\": \"Two\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/EnvCompletion.res 16:26\nposCursor:[16:26] posNoWhite:[16:25] Found pattern:[16:18->16:27]\nPpat_construct Failure:[16:18->16:25]\nposCursor:[16:26] posNoWhite:[16:25] Found pattern:[16:25->16:27]\nPpat_construct ():[16:25->16:27]\nCompletable: Cpattern Value[res]->variantPayload::Failure($0)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[res]\nPath res\n[{\n    \"label\": \"\\\"\\\"\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": null,\n    \"sortText\": \"A\",\n    \"insertText\": \"\\\"$0\\\"\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/EnvCompletion.res 19:19\nXXX Not found!\nCompletable: Cpattern Value[use](Nolabel)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[use](Nolabel)\nContextPath Value[use]\nPath use\n[{\n    \"label\": \"{}\",\n    \"kind\": 22,\n    \"tags\": [],\n    \"detail\": \"EnvCompletionOtherFile.response\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ntype EnvCompletionOtherFile.response = {stuff: theVariant, res: someResult<theVariant, string>}\\n```\"},\n    \"sortText\": \"A\",\n    \"insertText\": \"{$0}\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/EnvCompletion.res 22:21\nposCursor:[22:21] posNoWhite:[22:20] Found pattern:[22:20->22:22]\nCompletable: Cpattern Value[use](Nolabel)->recordBody\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[use](Nolabel)\nContextPath Value[use]\nPath use\n[{\n    \"label\": \"stuff\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"theVariant\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nstuff: theVariant\\n```\\n\\n```rescript\\ntype EnvCompletionOtherFile.response = {stuff: theVariant, res: someResult<theVariant, string>}\\n```\"}\n  }, {\n    \"label\": \"res\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"someResult<theVariant, string>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nres: someResult<theVariant, string>\\n```\\n\\n```rescript\\ntype EnvCompletionOtherFile.response = {stuff: theVariant, res: someResult<theVariant, string>}\\n```\"}\n  }]\n\nComplete src/EnvCompletion.res 25:27\nposCursor:[25:27] posNoWhite:[25:26] Found pattern:[25:20->25:31]\nCompletable: Cpattern Value[use](Nolabel)->recordField(stuff)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[use](Nolabel)\nContextPath Value[use]\nPath use\n[{\n    \"label\": \"First\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"First\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nFirst\\n```\\n\\n```rescript\\ntype theVariant = First | Second(r1)\\n```\"},\n    \"insertText\": \"First\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Second(_)\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"Second(r1)\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nSecond(r1)\\n```\\n\\n```rescript\\ntype theVariant = First | Second(r1)\\n```\"},\n    \"insertText\": \"Second(${1:_})\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/EnvCompletion.res 28:35\nposCursor:[28:35] posNoWhite:[28:34] Found pattern:[28:20->28:38]\nposCursor:[28:35] posNoWhite:[28:34] Found pattern:[28:28->28:36]\nPpat_construct Second:[28:28->28:34]\nposCursor:[28:35] posNoWhite:[28:34] Found pattern:[28:34->28:36]\nPpat_construct ():[28:34->28:36]\nCompletable: Cpattern Value[use](Nolabel)->recordField(stuff), variantPayload::Second($0)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[use](Nolabel)\nContextPath Value[use]\nPath use\n[{\n    \"label\": \"{}\",\n    \"kind\": 22,\n    \"tags\": [],\n    \"detail\": \"r1\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ntype r1 = {age: int}\\n```\"},\n    \"sortText\": \"A\",\n    \"insertText\": \"{$0}\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/EnvCompletion.res 31:36\nposCursor:[31:36] posNoWhite:[31:35] Found pattern:[31:20->31:40]\nposCursor:[31:36] posNoWhite:[31:35] Found pattern:[31:28->31:38]\nPpat_construct Second:[31:28->31:34]\nposCursor:[31:36] posNoWhite:[31:35] Found pattern:[31:35->31:37]\nCompletable: Cpattern Value[use](Nolabel)->recordField(stuff), variantPayload::Second($0), recordBody\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[use](Nolabel)\nContextPath Value[use]\nPath use\n[{\n    \"label\": \"age\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nage: int\\n```\\n\\n```rescript\\ntype r1 = {age: int}\\n```\"}\n  }]\n\nComplete src/EnvCompletion.res 34:25\nposCursor:[34:25] posNoWhite:[34:24] Found pattern:[34:20->34:29]\nCompletable: Cpattern Value[use](Nolabel)->recordField(res)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[use](Nolabel)\nContextPath Value[use]\nPath use\n[{\n    \"label\": \"Okay(_)\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"Okay('a)\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nOkay('a)\\n```\\n\\n```rescript\\ntype someResult<'a, 'b> = Okay('a) | Failure('b)\\n```\"},\n    \"insertText\": \"Okay(${1:_})\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Failure(_)\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"Failure('b)\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nFailure('b)\\n```\\n\\n```rescript\\ntype someResult<'a, 'b> = Okay('a) | Failure('b)\\n```\"},\n    \"insertText\": \"Failure(${1:_})\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/EnvCompletion.res 37:31\nposCursor:[37:31] posNoWhite:[37:30] Found pattern:[37:20->37:34]\nposCursor:[37:31] posNoWhite:[37:30] Found pattern:[37:26->37:32]\nPpat_construct Okay:[37:26->37:30]\nposCursor:[37:31] posNoWhite:[37:30] Found pattern:[37:30->37:32]\nPpat_construct ():[37:30->37:32]\nCompletable: Cpattern Value[use](Nolabel)->recordField(res), variantPayload::Okay($0)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[use](Nolabel)\nContextPath Value[use]\nPath use\n[{\n    \"label\": \"First\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"First\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nFirst\\n```\\n\\n```rescript\\ntype theVariant = First | Second(r1)\\n```\"},\n    \"insertText\": \"First\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Second(_)\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"Second(r1)\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nSecond(r1)\\n```\\n\\n```rescript\\ntype theVariant = First | Second(r1)\\n```\"},\n    \"insertText\": \"Second(${1:_})\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/EnvCompletion.res 40:38\nposCursor:[40:38] posNoWhite:[40:37] Found pattern:[40:20->40:42]\nposCursor:[40:38] posNoWhite:[40:37] Found pattern:[40:26->40:40]\nPpat_construct Okay:[40:26->40:30]\nposCursor:[40:38] posNoWhite:[40:37] Found pattern:[40:31->40:39]\nPpat_construct Second:[40:31->40:37]\nposCursor:[40:38] posNoWhite:[40:37] Found pattern:[40:37->40:39]\nPpat_construct ():[40:37->40:39]\nCompletable: Cpattern Value[use](Nolabel)->recordField(res), variantPayload::Okay($0), variantPayload::Second($0)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[use](Nolabel)\nContextPath Value[use]\nPath use\n[{\n    \"label\": \"{}\",\n    \"kind\": 22,\n    \"tags\": [],\n    \"detail\": \"r1\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ntype r1 = {age: int}\\n```\"},\n    \"sortText\": \"A\",\n    \"insertText\": \"{$0}\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/EnvCompletion.res 43:39\nposCursor:[43:39] posNoWhite:[43:38] Found pattern:[43:20->43:44]\nposCursor:[43:39] posNoWhite:[43:38] Found pattern:[43:26->43:42]\nPpat_construct Okay:[43:26->43:30]\nposCursor:[43:39] posNoWhite:[43:38] Found pattern:[43:31->43:41]\nPpat_construct Second:[43:31->43:37]\nposCursor:[43:39] posNoWhite:[43:38] Found pattern:[43:38->43:40]\nCompletable: Cpattern Value[use](Nolabel)->recordField(res), variantPayload::Okay($0), variantPayload::Second($0), recordBody\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[use](Nolabel)\nContextPath Value[use]\nPath use\n[{\n    \"label\": \"age\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nage: int\\n```\\n\\n```rescript\\ntype r1 = {age: int}\\n```\"}\n  }]\n\nComplete src/EnvCompletion.res 52:18\nXXX Not found!\nCompletable: Cpattern Value[res2]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[res2]\nPath res2\n[{\n    \"label\": \"{}\",\n    \"kind\": 22,\n    \"tags\": [],\n    \"detail\": \"EnvCompletionOtherFile.someRecord\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ntype EnvCompletionOtherFile.someRecord = {name: string, theThing: 'thing, theVariant: theVariant}\\n```\"},\n    \"sortText\": \"A\",\n    \"insertText\": \"{$0}\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/EnvCompletion.res 55:20\nposCursor:[55:20] posNoWhite:[55:19] Found pattern:[55:19->55:21]\nCompletable: Cpattern Value[res2]->recordBody\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[res2]\nPath res2\n[{\n    \"label\": \"name\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nname: string\\n```\\n\\n```rescript\\ntype EnvCompletionOtherFile.someRecord = {name: string, theThing: 'thing, theVariant: theVariant}\\n```\"}\n  }, {\n    \"label\": \"theThing\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"'thing\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ntheThing: 'thing\\n```\\n\\n```rescript\\ntype EnvCompletionOtherFile.someRecord = {name: string, theThing: 'thing, theVariant: theVariant}\\n```\"}\n  }, {\n    \"label\": \"theVariant\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"theVariant\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ntheVariant: theVariant\\n```\\n\\n```rescript\\ntype EnvCompletionOtherFile.someRecord = {name: string, theThing: 'thing, theVariant: theVariant}\\n```\"}\n  }]\n\nComplete src/EnvCompletion.res 58:29\nposCursor:[58:29] posNoWhite:[58:28] Found pattern:[58:19->58:33]\nCompletable: Cpattern Value[res2]->recordField(theThing)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[res2]\nPath res2\n[{\n    \"label\": \"Four\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"Four\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nFour\\n```\\n\\n```rescript\\ntype things2 = Four | Five\\n```\"},\n    \"insertText\": \"Four\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Five\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"Five\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nFive\\n```\\n\\n```rescript\\ntype things2 = Four | Five\\n```\"},\n    \"insertText\": \"Five\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/EnvCompletion.res 61:31\nposCursor:[61:31] posNoWhite:[61:30] Found pattern:[61:19->61:35]\nCompletable: Cpattern Value[res2]->recordField(theVariant)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[res2]\nPath res2\n[{\n    \"label\": \"First\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"First\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nFirst\\n```\\n\\n```rescript\\ntype theVariant = First | Second(r1)\\n```\"},\n    \"insertText\": \"First\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Second(_)\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"Second(r1)\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nSecond(r1)\\n```\\n\\n```rescript\\ntype theVariant = First | Second(r1)\\n```\"},\n    \"insertText\": \"Second(${1:_})\",\n    \"insertTextFormat\": 2\n  }]\n\n"
  },
  {
    "path": "analysis/tests/src/expected/EnvCompletionOtherFile.res.txt",
    "content": ""
  },
  {
    "path": "analysis/tests/src/expected/ExhaustiveSwitch.res.txt",
    "content": "Complete src/ExhaustiveSwitch.res 8:24\nXXX Not found!\nCompletable: CexhaustiveSwitch Value[withSomeVarian]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[withSomeVarian]\nPath withSomeVarian\n[{\n    \"label\": \"withSomeVariant\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"someVariant\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ntype someVariant = One | Two | Three(option<bool>)\\n```\"}\n  }, {\n    \"label\": \"withSomeVariant (exhaustive switch)\",\n    \"kind\": 15,\n    \"tags\": [],\n    \"detail\": \"insert exhaustive switch for value\",\n    \"documentation\": null,\n    \"filterText\": \"withSomeVariant\",\n    \"insertText\": \"withSomeVariant {\\n   | One => ${1:failwith(\\\"todo\\\")}\\n   | Two => ${2:failwith(\\\"todo\\\")}\\n   | Three(_) => ${3:failwith(\\\"todo\\\")}\\n   }\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/ExhaustiveSwitch.res 11:21\nXXX Not found!\nCompletable: CexhaustiveSwitch Value[withSomePol]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[withSomePol]\nPath withSomePol\n[{\n    \"label\": \"withSomePoly\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"somePolyVariant\",\n    \"documentation\": null\n  }, {\n    \"label\": \"withSomePoly (exhaustive switch)\",\n    \"kind\": 15,\n    \"tags\": [],\n    \"detail\": \"insert exhaustive switch for value\",\n    \"documentation\": null,\n    \"filterText\": \"withSomePoly\",\n    \"insertText\": \"withSomePoly {\\n   | #\\\"switch\\\" => ${1:failwith(\\\"todo\\\")}\\n   | #one => ${2:failwith(\\\"todo\\\")}\\n   | #three(_) => ${3:failwith(\\\"todo\\\")}\\n   | #two => ${4:failwith(\\\"todo\\\")}\\n   | #\\\"exotic ident\\\" => ${5:failwith(\\\"todo\\\")}\\n   }\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/ExhaustiveSwitch.res 14:17\nXXX Not found!\nCompletable: CexhaustiveSwitch Value[someBoo]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[someBoo]\nPath someBoo\n[{\n    \"label\": \"someBool\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }, {\n    \"label\": \"someBool (exhaustive switch)\",\n    \"kind\": 15,\n    \"tags\": [],\n    \"detail\": \"insert exhaustive switch for value\",\n    \"documentation\": null,\n    \"filterText\": \"someBool\",\n    \"insertText\": \"someBool {\\n   | true => ${1:failwith(\\\"todo\\\")}\\n   | false => ${2:failwith(\\\"todo\\\")}\\n   }\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/ExhaustiveSwitch.res 17:16\nXXX Not found!\nCompletable: CexhaustiveSwitch Value[someOp]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[someOp]\nPath someOp\n[{\n    \"label\": \"someOpt\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"option<bool>\",\n    \"documentation\": null\n  }, {\n    \"label\": \"someOpt (exhaustive switch)\",\n    \"kind\": 15,\n    \"tags\": [],\n    \"detail\": \"insert exhaustive switch for value\",\n    \"documentation\": null,\n    \"filterText\": \"someOpt\",\n    \"insertText\": \"someOpt {\\n   | Some($1) => ${2:failwith(\\\"todo\\\")}\\n   | None => ${3:failwith(\\\"todo\\\")}\\n   }\",\n    \"insertTextFormat\": 2\n  }]\n\nXform src/ExhaustiveSwitch.res 30:13\nposCursor:[30:13] posNoWhite:[30:12] Found expr:[30:3->30:17]\nposCursor:[30:13] posNoWhite:[30:12] Found expr:[30:10->30:17]\nCompletable: Cpath Value[x]->\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[x]->\nContextPath Value[x]\nPath x\nCPPipe pathFromEnv: found:true\nPath ExhaustiveSwitch.\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\n\nXform src/ExhaustiveSwitch.res start: 33:3, end: 33:10\nfound selection: [33:3->33:10] -> [33:6->33:10]\nXXX Not found!\nCompletable: Cpath Value[getV](Nolabel)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[getV](Nolabel)\nContextPath Value[getV]\nPath getV\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nHit: Exhaustive switch\n\nTextDocumentEdit: ExhaustiveSwitch.res\n{\"start\": {\"line\": 33, \"character\": 3}, \"end\": {\"line\": 33, \"character\": 10}}\nnewText:\n   <--here\n   switch x->getV {\n   | One => failwith(\"TODO\")\n   | Two => failwith(\"TODO\")\n   | Three(_) => failwith(\"TODO\")\n   }\n\nXform src/ExhaustiveSwitch.res 36:4\nXXX Not found!\nCompletable: Cpath Value[vvv]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[vvv]\nPath vvv\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nHit: Exhaustive switch\n\nTextDocumentEdit: ExhaustiveSwitch.res\n{\"start\": {\"line\": 36, \"character\": 3}, \"end\": {\"line\": 36, \"character\": 6}}\nnewText:\n   <--here\n   switch vvv {\n   | None => failwith(\"TODO\")\n   | Some(_) => failwith(\"TODO\")\n   | Some(One) => failwith(\"TODO\")\n   | Some(Two) => failwith(\"TODO\")\n   | Some(Three(_)) => failwith(\"TODO\")\n   }\n\n\nComplete src/ExhaustiveSwitch.res 40:24\nXXX Not found!\nCompletable: CexhaustiveSwitch Value[withSomeVarian]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[withSomeVarian]\nPath withSomeVarian\n[{\n    \"label\": \"withSomeVariant\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"someVariant\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ntype someVariant = One | Two | Three(option<bool>)\\n```\"}\n  }, {\n    \"label\": \"withSomeVariant (exhaustive switch)\",\n    \"kind\": 15,\n    \"tags\": [],\n    \"detail\": \"insert exhaustive switch for value\",\n    \"documentation\": null,\n    \"filterText\": \"withSomeVariant\",\n    \"insertText\": \"withSomeVariant {\\n   | One => ${1:%todo}\\n   | Two => ${2:%todo}\\n   | Three(_) => ${3:%todo}\\n   }\",\n    \"insertTextFormat\": 2\n  }]\n\n\n"
  },
  {
    "path": "analysis/tests/src/expected/Firebase.res.txt",
    "content": "Complete src/Firebase.res 30:9\nposCursor:[30:9] posNoWhite:[30:8] Found expr:[30:5->30:9]\nPexp_field [30:5->30:8] _:[32:0->30:9]\nCompletable: Cpath Value[ref].\"\"\nRaw opens: 1 Firebase.place holder\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 2 pervasives Firebase.res\nContextPath Value[ref].\"\"\nContextPath Value[ref]\nPath ref\nContextPath Value[ref]->\nContextPath Value[ref]\nPath ref\nCPPipe pathFromEnv:Firebase.Firestore found:true\nPath Firebase.Firestore.\n[{\n    \"label\": \"id\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nid: string\\n```\\n\\n```rescript\\ntype documentReference<'documentdata> = {\\n  id: string,\\n  path: string,\\n}\\n```\"}\n  }, {\n    \"label\": \"path\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\npath: string\\n```\\n\\n```rescript\\ntype documentReference<'documentdata> = {\\n  id: string,\\n  path: string,\\n}\\n```\"}\n  }, {\n    \"label\": \"->Firestore.getDoc\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"documentReference<\\n  'documentdata,\\n> => Js.Promise.t<documentSnapshot<'documentdata>>\",\n    \"documentation\": null,\n    \"sortText\": \"getDoc\",\n    \"insertText\": \"->Firestore.getDoc\",\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 30, \"character\": 8}, \"end\": {\"line\": 30, \"character\": 9}},\n      \"newText\": \"\"\n      }]\n  }]\n\n"
  },
  {
    "path": "analysis/tests/src/expected/Fragment.res.txt",
    "content": "Hover src/Fragment.res 6:19\ngetLocItem #4: heuristic for </Comp> within fragments: take make as makeProps does not work\nthe type is not great but jump to definition works\n{\"contents\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nReact.component<{\\\"children\\\": Jsx.element}>\\n```\\n\\n---\\n\\n```\\n \\n```\\n```rescript\\ntype React.component<'props> = Jsx.component<'props>\\n```\\nGo to: [Type definition](command:rescript-vscode.go_to_location?%5B%22React.res%22%2C12%2C0%5D)\\n\"}}\n\nHover src/Fragment.res 9:56\nNothing at that position. Now trying to use completion.\nposCursor:[9:56] posNoWhite:[9:55] Found expr:[9:10->9:67]\nposCursor:[9:56] posNoWhite:[9:55] Found expr:[9:13->9:67]\nposCursor:[9:56] posNoWhite:[9:55] Found expr:[9:13->9:66]\nJSX <SectionHeader:[9:13->9:26] > _children:9:26\nposCursor:[9:56] posNoWhite:[9:55] Found expr:__ghost__[9:10->9:67]\nPexp_construct []:__ghost__[9:10->9:67] None\nCompletable: Cexpression CTypeAtPos()=[]->variantPayload::::($1)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CTypeAtPos()\nnull\n\n"
  },
  {
    "path": "analysis/tests/src/expected/Highlight.res.txt",
    "content": "Highlight src/Highlight.res\nstructure items:39 diagnostics:0 \nLident: M 0:7 Namespace\nLident: C 1:9 Namespace\nLident: Component 1:13 Namespace\nVariable: _c [4:4->4:6]\nJsxTag <: 4:9\nLident: Component 4:10 Namespace\nVariable: _mc [6:4->6:7]\nJsxTag <: 6:10\nLdot: M 6:11 Namespace\nLident: C 6:13 Namespace\nVariable: _d [8:4->8:6]\nJsxTag <: 8:9\nLident: div 8:10 JsxLowercase\nVariable: _d2 [10:4->10:7]\nJsxTag <: 11:2\nLident: div 11:3 JsxLowercase\nLident: div 16:4 JsxLowercase\nJsxTag >: 11:6\nJsxTag >: 16:7\nLdot: React 12:5 Namespace\nLident: string 12:11 Variable\nJsxTag <: 13:4\nLident: div 13:5 JsxLowercase\nLident: div 13:34 JsxLowercase\nJsxTag >: 13:8\nJsxTag >: 13:37\nLdot: React 13:11 Namespace\nLident: string 13:17 Variable\nLdot: React 14:5 Namespace\nLident: string 14:11 Variable\nLdot: React 15:5 Namespace\nLident: string 15:11 Variable\nLident: pair 18:5 Type\nLident: looooooooooooooooooooooooooooooooooooooong_int 20:5 Type\nLident: int 20:54 Type\nLident: looooooooooooooooooooooooooooooooooooooong_string 22:5 Type\nLident: string 22:57 Type\nLident: pairIntString 24:5 Type\nLident: list 24:21 Type\nTypeArg: [25:2->28:3]\nLident: pair 25:2 Type\nTypeArg: [26:4->26:50]\nTypeArg: [27:4->27:53]\nLident: looooooooooooooooooooooooooooooooooooooong_int 26:4 Type\nLident: looooooooooooooooooooooooooooooooooooooong_string 27:4 Type\nBinary operator < [31:12->31:13]\nBinary operator > [31:22->31:23]\nLident: MT 33:12 Type\nLident: DDF 34:9 Namespace\nLident: DDF 39:7 Namespace\nLident: DDF 40:9 Namespace\nLident: MT 39:12 Type\nLident: XX 45:7 Namespace\nLident: YY 46:9 Namespace\nLident: t 47:9 Type\nLident: int 47:13 Type\nLdot: XX 51:5 Namespace\nLident: YY 51:8 Namespace\nLident: tt 53:5 Type\nLident: t 53:10 Type\nLident: T 57:7 Namespace\nLident: someRecord 58:7 Type\nLident: someField 59:4 Property\nLident: int 59:15 Type\nLident: someOtherField 60:4 Property\nLident: string 60:20 Type\nLident: theParam 61:4 Property\nLident: someEnum 64:7 Type\nLident: A 64:18 EnumMember\nLident: B 64:22 EnumMember\nLident: C 64:26 EnumMember\nVariable: foo [67:4->67:7]\nVariable: x [67:10->67:11]\nLdot: T 67:17 Namespace\nLident: someField 67:19 Property\nLident: x 67:15 Variable\nVariable: add [69:4->69:7]\nVariable: x [69:21->69:22]\nVariable: world [69:24->69:30]\nLident: x 69:35 Variable\nLident: world 69:39 Variable\nLident: add 71:21 Variable\nJsxTag <: 73:8\nLident: div 73:9 JsxLowercase\nLident: div 73:36 JsxLowercase\nJsxTag >: 73:24\nJsxTag >: 73:39\nJsxTag <: 73:26\nLident: div 73:27 JsxLowercase\nLident: SomeComponent 75:7 Namespace\nLident: Nested 76:9 Namespace\nVariable: make [78:8->78:12]\nVariable: children [78:16->78:25]\nLident: children 79:10 Variable\nJsxTag <: 84:8\nLdot: SomeComponent 84:9 Namespace\nLident: Nested 84:23 Namespace\nLdot: SomeComponent 84:41 Namespace\nLident: Nested 84:55 Namespace\nJsxTag >: 84:29\nJsxTag >: 84:61\nJsxTag <: 84:31\nLident: div 84:32 JsxLowercase\nVariable: toAs [90:4->90:8]\nVariable: x [90:19->90:20]\nLident: x 90:25 Variable\nVariable: _toEquals [91:4->91:13]\nLident: toAs 91:16 Variable\nVariable: to [93:4->93:6]\nLident: to 94:9 Variable\nLident: to 94:14 Variable\nLident: to 94:20 Variable\nLident: to 94:25 Variable\nLident: ToAsProp 98:7 Namespace\nVariable: make [100:6->100:10]\nVariable: to [100:14->100:17]\nLdot: React 101:8 Namespace\nLident: int 101:14 Variable\nLident: to 101:18 Variable\nJsxTag <: 104:8\nLident: ToAsProp 104:9 Namespace\nVariable: true [107:4->107:11]\nLident: true 108:8->108:15 Variable\nVariable: enumInModule [110:4->110:16]\nLdot: T 110:19 Namespace\nLident: A 110:21 EnumMember\nLident: typeInModule 112:5 Type\nLdot: XX 112:20 Namespace\nLdot: YY 112:23 Namespace\nLident: t 112:26 Type\nLident: QQ 114:7 Namespace\nLident: somePolyEnumType 115:7 Type\nLident: list 118:29 Type\nTypeArg: [118:34->118:37]\nLident: int 118:34 Type\nVariable: x [123:8->123:9]\nLident: x 124:9 Variable\nLdot: QQ 126:8 Namespace\nLident: somePolyEnumType 126:11 Type\nLident: abc 133:9->133:14 Property\n\n"
  },
  {
    "path": "analysis/tests/src/expected/Hover.res.txt",
    "content": "Hover src/Hover.res 0:4\n{\"contents\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nint\\n```\"}}\n\nHover src/Hover.res 3:5\n{\"contents\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ntype t = (int, float)\\n```\"}}\n\nHover src/Hover.res 6:7\n{\"contents\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nmodule Id: {\\n  type x = int\\n}\\n```\"}}\n\nHover src/Hover.res 19:11\n{\"contents\": {\"kind\": \"markdown\", \"value\": \"\\nThis module is commented\\n```rescript\\nmodule Dep: {\\n  let customDouble: int => int\\n}\\n```\"}}\n\nHover src/Hover.res 22:11\n{\"contents\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nint => int\\n```\\n---\\nSome doc comment\"}}\n\nHover src/Hover.res 26:6\ngetLocItem #8: heuristic for JSX with at most one child\nheuristic for: [makeProps, make, createElement], give the loc of `make` \n{\"contents\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nint\\n```\"}}\n\nHover src/Hover.res 33:4\n{\"contents\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nunit => int\\n```\\n---\\nDoc comment for functionWithTypeAnnotation\"}}\n\nHover src/Hover.res 37:13\ngetLocItem #5: heuristic for JSX and compiler combined:\n~x becomes Props#x\nheuristic for: [Props, x], give loc of `x`\nn1:Props n2:name\n{\"contents\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nstring\\n```\"}}\n\nHover src/Hover.res 42:15\ngetLocItem #7: heuristic for JSX on type-annotated labeled (~arg:t):\n(~arg:t) becomes Props#arg\nProps has the location range of arg:t\narg has the location range of arg\nheuristic for: [Props, arg], give loc of `arg`\nn1:Props n2:name\n{\"contents\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nstring\\n```\"}}\n\nHover src/Hover.res 46:10\n{\"contents\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nint\\n```\"}}\n\nHover src/Hover.res 49:13\n{\"contents\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nmodule type Logger = {\\n  let log: string => unit\\n}\\n```\"}}\n\nHover src/Hover.res 54:7\n{\"contents\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nmodule type Logger = {\\n  let log: string => unit\\n}\\n```\"}}\n\nDefinition src/Hover.res 60:14\n{\"uri\": \"Hover.res\", \"range\": {\"start\": {\"line\": 49, \"character\": 12}, \"end\": {\"line\": 49, \"character\": 18}}}\n\nHover src/Hover.res 63:9\n{\"contents\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nmodule IdDefinedTwice: {\\n  let y: int\\n  let _x: int\\n}\\n```\"}}\n\nHover src/Hover.res 74:7\n{\"contents\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nmodule A: {\\n  let x: int\\n}\\n```\"}}\n\nHover src/Hover.res 77:7\n{\"contents\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nmodule A: {\\n  let x: int\\n}\\n```\"}}\n\nHover src/Hover.res 91:10\nNothing at that position. Now trying to use completion.\nposCursor:[91:10] posNoWhite:[91:8] Found expr:[88:3->91:9]\nJSX <Comp:[88:3->88:7] > _children:88:7\nnull\n\nHover src/Hover.res 98:10\nNothing at that position. Now trying to use completion.\nposCursor:[98:10] posNoWhite:[98:9] Found expr:[95:3->98:10]\nJSX <Comp1:[95:3->95:8] > _children:95:8\nnull\n\nHover src/Hover.res 103:25\n{\"contents\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nfloat\\n```\"}}\n\nHover src/Hover.res 106:21\n{\"contents\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nint\\n```\"}}\n\nHover src/Hover.res 116:16\n{\"contents\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nAA.cond<([< #str(string)] as 'a)> => AA.cond<'a>\\n```\\n\\n---\\n\\n```\\n \\n```\\n```rescript\\ntype AA.cond<'a> = 'a\\n  constraint 'a = [< #str(string)]\\n```\\nGo to: [Type definition](command:rescript-vscode.go_to_location?%5B%22Hover.res%22%2C110%2C2%5D)\\n\"}}\n\nHover src/Hover.res 119:25\n{\"contents\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nAA.cond<([< #str(string)] as 'a)> => AA.cond<'a>\\n```\\n\\n---\\n\\n```\\n \\n```\\n```rescript\\ntype AA.cond<'a> = 'a\\n  constraint 'a = [< #str(string)]\\n```\\nGo to: [Type definition](command:rescript-vscode.go_to_location?%5B%22Hover.res%22%2C110%2C2%5D)\\n\"}}\n\nHover src/Hover.res 122:3\nNothing at that position. Now trying to use completion.\nAttribute id:live:[122:0->122:5] label:live\nCompletable: Cdecorator(live)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\n{\"contents\": {\"kind\": \"markdown\", \"value\": \"The `@live` decorator is for reanalyze, a static analysis tool for ReScript that can do dead code analysis.\\n\\n`@live` tells the dead code analysis that the value should be considered live, even though it might appear to be dead. This is typically used in case of FFI where there are indirect ways to access values. It can be added to everything that could otherwise be considered unused by the dead code analysis - values, functions, arguments, records, individual record fields, and so on.\\n\\n[Read more and see examples in the documentation](https://rescript-lang.org/syntax-lookup#live-decorator).\\n\\nHint: Did you know you can run an interactive code analysis in your project by running the command `> ReScript: Start Code Analyzer`? Try it!\"}}\n\nHover src/Hover.res 125:4\n{\"contents\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nunit => unit => int\\n```\"}}\n\nHover src/Hover.res 131:4\n{\"contents\": {\"kind\": \"markdown\", \"value\": \"```rescript\\n(unit, unit) => int\\n```\"}}\n\nHover src/Hover.res 134:4\n{\"contents\": {\"kind\": \"markdown\", \"value\": \"```rescript\\n(unit, unit) => int\\n```\"}}\n\nHover src/Hover.res 137:5\n{\"contents\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nunit => unit => int\\n```\"}}\n\nHover src/Hover.res 144:9\n{\"contents\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nint\\n```\\n---\\ndoc comment 1\"}}\n\nHover src/Hover.res 148:6\n{\"contents\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nint\\n```\\n---\\n doc comment 2 \"}}\n\nHover src/Hover.res 165:23\n{\"contents\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nfoo<bar>\\n```\\n\\n---\\n\\n```\\n \\n```\\n```rescript\\ntype foo<'a> = {content: 'a, zzz: string}\\n```\\nGo to: [Type definition](command:rescript-vscode.go_to_location?%5B%22Hover.res%22%2C161%2C2%5D)\\n\\n\\n---\\n\\n```\\n \\n```\\n```rescript\\ntype bar = {age: int}\\n```\\nGo to: [Type definition](command:rescript-vscode.go_to_location?%5B%22Hover.res%22%2C162%2C2%5D)\\n\"}}\n\nHover src/Hover.res 167:22\n{\"contents\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nfoobar\\n```\\n\\n---\\n\\n```\\n \\n```\\n```rescript\\ntype foobar = foo<bar>\\n```\\nGo to: [Type definition](command:rescript-vscode.go_to_location?%5B%22Hover.res%22%2C163%2C2%5D)\\n\"}}\n\nComplete src/Hover.res 170:16\nposCursor:[170:16] posNoWhite:[170:15] Found expr:[170:5->170:16]\nPexp_field [170:5->170:15] _:[176:2->170:16]\nCompletable: Cpath Value[x1].content.\"\"\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[x1].content.\"\"\nContextPath Value[x1].content\nContextPath Value[x1]\nPath x1\nContextPath Value[x1]->content\nContextPath Value[x1]\nPath x1\nCPPipe pathFromEnv: found:true\nPath Hover.content\nContextPath Value[x1].content->\nContextPath Value[x1].content\nContextPath Value[x1]\nPath x1\nContextPath Value[x1]->content\nContextPath Value[x1]\nPath x1\nCPPipe pathFromEnv: found:true\nPath Hover.content\nCPPipe pathFromEnv: found:true\nPath Hover.\n[{\n    \"label\": \"age\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nage: int\\n```\\n\\n```rescript\\ntype bar = {age: int}\\n```\"}\n  }]\n\nComplete src/Hover.res 173:16\nposCursor:[173:16] posNoWhite:[173:15] Found expr:[173:5->173:16]\nPexp_field [173:5->173:15] _:[176:2->173:16]\nCompletable: Cpath Value[x2].content.\"\"\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[x2].content.\"\"\nContextPath Value[x2].content\nContextPath Value[x2]\nPath x2\nContextPath Value[x2]->content\nContextPath Value[x2]\nPath x2\nCPPipe pathFromEnv: found:true\nPath Hover.content\nContextPath Value[x2].content->\nContextPath Value[x2].content\nContextPath Value[x2]\nPath x2\nContextPath Value[x2]->content\nContextPath Value[x2]\nPath x2\nCPPipe pathFromEnv: found:true\nPath Hover.content\nCPPipe pathFromEnv: found:true\nPath Hover.\n[{\n    \"label\": \"age\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nage: int\\n```\\n\\n```rescript\\ntype bar = {age: int}\\n```\"}\n  }]\n\nComplete src/Hover.res 182:16\nposCursor:[182:16] posNoWhite:[182:15] Found expr:[182:5->182:16]\nPexp_field [182:5->182:15] _:[187:0->182:16]\nCompletable: Cpath Value[y1].content.\"\"\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[y1].content.\"\"\nContextPath Value[y1].content\nContextPath Value[y1]\nPath y1\nContextPath Value[y1]->content\nContextPath Value[y1]\nPath y1\nCPPipe pathFromEnv: found:true\nPath Hover.content\nContextPath Value[y1].content->\nContextPath Value[y1].content\nContextPath Value[y1]\nPath y1\nContextPath Value[y1]->content\nContextPath Value[y1]\nPath y1\nCPPipe pathFromEnv: found:true\nPath Hover.content\nCPPipe pathFromEnv: found:true\nPath Hover.\n[{\n    \"label\": \"age\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nage: int\\n```\\n\\n```rescript\\ntype bar = {age: int}\\n```\"}\n  }]\n\nComplete src/Hover.res 185:16\nposCursor:[185:16] posNoWhite:[185:15] Found expr:[185:5->185:16]\nPexp_field [185:5->185:15] _:[187:0->185:16]\nCompletable: Cpath Value[y2].content.\"\"\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[y2].content.\"\"\nContextPath Value[y2].content\nContextPath Value[y2]\nPath y2\nContextPath Value[y2]->content\nContextPath Value[y2]\nPath y2\nCPPipe pathFromEnv: found:true\nPath Hover.content\nContextPath Value[y2].content->\nContextPath Value[y2].content\nContextPath Value[y2]\nPath y2\nContextPath Value[y2]->content\nContextPath Value[y2]\nPath y2\nCPPipe pathFromEnv: found:true\nPath Hover.content\nCPPipe pathFromEnv: found:true\nPath Hover.\n[{\n    \"label\": \"age\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nage: int\\n```\\n\\n```rescript\\ntype bar = {age: int}\\n```\"}\n  }]\n\nHover src/Hover.res 197:4\n{\"contents\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nCompV4.props<int, string> => React.element\\n```\\n\\n---\\n\\n```\\n \\n```\\n```rescript\\ntype CompV4.props<'n, 's> = {n?: 'n, s: 's}\\n```\\nGo to: [Type definition](command:rescript-vscode.go_to_location?%5B%22Hover.res%22%2C190%2C2%5D)\\n\\n\\n---\\n\\n```\\n \\n```\\n```rescript\\ntype React.element = Jsx.element\\n```\\nGo to: [Type definition](command:rescript-vscode.go_to_location?%5B%22React.res%22%2C0%2C0%5D)\\n\"}}\n\nHover src/Hover.res 202:16\n{\"contents\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nuseR\\n```\\n\\n---\\n\\n```\\n \\n```\\n```rescript\\ntype useR = {x: int, y: list<option<r<float>>>}\\n```\\nGo to: [Type definition](command:rescript-vscode.go_to_location?%5B%22Hover.res%22%2C200%2C0%5D)\\n\\n\\n---\\n\\n```\\n \\n```\\n```rescript\\ntype r<'a> = {i: 'a, f: float}\\n```\\nGo to: [Type definition](command:rescript-vscode.go_to_location?%5B%22Hover.res%22%2C101%2C0%5D)\\n\"}}\n\nHover src/Hover.res 210:13\nNothing at that position. Now trying to use completion.\nposCursor:[210:13] posNoWhite:[210:12] Found expr:[210:11->210:14]\nPexp_ident usr:[210:11->210:14]\nCompletable: Cpath Value[usr]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[usr]\nPath usr\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\n{\"contents\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nuseR\\n```\\n\\n---\\n\\n```\\n \\n```\\n```rescript\\ntype useR = {x: int, y: list<option<r<float>>>}\\n```\\nGo to: [Type definition](command:rescript-vscode.go_to_location?%5B%22Hover.res%22%2C200%2C0%5D)\\n\\n\\n---\\n\\n```\\n \\n```\\n```rescript\\ntype r<'a> = {i: 'a, f: float}\\n```\\nGo to: [Type definition](command:rescript-vscode.go_to_location?%5B%22Hover.res%22%2C101%2C0%5D)\\n\"}}\n\nHover src/Hover.res 230:20\n{\"contents\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nint\\n```\\n---\\n More Stuff \"}}\n\nHover src/Hover.res 233:17\n{\"contents\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nint\\n```\\n---\\n More Stuff \"}}\n\nHover src/Hover.res 245:6\nNothing at that position. Now trying to use completion.\nposCursor:[245:6] posNoWhite:[245:5] Found expr:[245:3->245:14]\nPexp_field [245:3->245:4] someField:[245:5->245:14]\nCompletable: Cpath Value[x].someField\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[x].someField\nContextPath Value[x]\nPath x\nContextPath Value[x]->someField\nContextPath Value[x]\nPath x\nCPPipe pathFromEnv: found:true\nPath Hover.someField\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\n{\"contents\": {\"kind\": \"markdown\", \"value\": \" Mighty fine field here. \\n\\n```rescript\\nbool\\n```\"}}\n\nHover src/Hover.res 248:19\n{\"contents\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nbool\\n```\\n---\\n Mighty fine field here. \"}}\n\nHover src/Hover.res 253:20\n{\"contents\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nvariant\\n```\\n\\n---\\n\\n```\\n \\n```\\n```rescript\\ntype variant = CoolVariant | OtherCoolVariant\\n```\\nGo to: [Type definition](command:rescript-vscode.go_to_location?%5B%22Hover.res%22%2C251%2C0%5D)\\n\\n---\\n```rescript\\nCoolVariant\\n```\\n---\\n Cool variant! \"}}\n\nHover src/Hover.res 257:23\nNothing at that position. Now trying to use completion.\nposCursor:[257:23] posNoWhite:[257:22] Found expr:[257:22->257:25]\nPexp_ident fff:[257:22->257:25]\nCompletable: Cpath Value[fff]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[fff]\nPath fff\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath string\n{\"contents\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nstring\\n```\"}}\n\nHover src/Hover.res 260:33\nNothing at that position. Now trying to use completion.\nposCursor:[260:33] posNoWhite:[260:32] Found expr:[260:31->260:40]\nPexp_ident someField:[260:31->260:40]\nCompletable: Cpath Value[someField]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[someField]\nPath someField\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CPatternPath(Value[x])->recordField(someField)\nContextPath Value[x]\nPath x\n{\"contents\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nbool\\n```\"}}\n\nHover src/Hover.res 263:8\n{\"contents\": {\"kind\": \"markdown\", \"value\": \"\\n [`Belt.Array`]()\\n\\n  **mutable array**: Utilities functions\\n\\n```rescript\\nmodule Array: {\\n  module Id\\n  module Array\\n  module SortArray\\n  module MutableQueue\\n  module MutableStack\\n  module List\\n  module Range\\n  module Set\\n  module Map\\n  module MutableSet\\n  module MutableMap\\n  module HashSet\\n  module HashMap\\n  module Option\\n  module Result\\n  module Int\\n  module Float\\n}\\n```\"}}\n\nHover src/Hover.res 266:6\n{\"contents\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ntype aliased = variant\\n```\\n\\n---\\n\\n```\\n \\n```\\n```rescript\\ntype variant = CoolVariant | OtherCoolVariant\\n```\\nGo to: [Type definition](command:rescript-vscode.go_to_location?%5B%22Hover.res%22%2C251%2C0%5D)\\n\"}}\n\n"
  },
  {
    "path": "analysis/tests/src/expected/InlayHint.res.txt",
    "content": "Inlay Hint src/InlayHint.res 1:34\n[{\n    \"position\": {\"line\": 33, \"character\": 14},\n    \"label\": \": int\",\n    \"kind\": 1,\n    \"paddingLeft\": true,\n    \"paddingRight\": false\n}, {\n    \"position\": {\"line\": 33, \"character\": 9},\n    \"label\": \": string\",\n    \"kind\": 1,\n    \"paddingLeft\": true,\n    \"paddingRight\": false\n}, {\n    \"position\": {\"line\": 28, \"character\": 9},\n    \"label\": \": foo\",\n    \"kind\": 1,\n    \"paddingLeft\": true,\n    \"paddingRight\": false\n}, {\n    \"position\": {\"line\": 26, \"character\": 23},\n    \"label\": \": (string, string)\",\n    \"kind\": 1,\n    \"paddingLeft\": true,\n    \"paddingRight\": false\n}, {\n    \"position\": {\"line\": 18, \"character\": 9},\n    \"label\": \": string\",\n    \"kind\": 1,\n    \"paddingLeft\": true,\n    \"paddingRight\": false\n}, {\n    \"position\": {\"line\": 16, \"character\": 9},\n    \"label\": \": (string, string)\",\n    \"kind\": 1,\n    \"paddingLeft\": true,\n    \"paddingRight\": false\n}, {\n    \"position\": {\"line\": 8, \"character\": 10},\n    \"label\": \": int\",\n    \"kind\": 1,\n    \"paddingLeft\": true,\n    \"paddingRight\": false\n}, {\n    \"position\": {\"line\": 4, \"character\": 8},\n    \"label\": \": char\",\n    \"kind\": 1,\n    \"paddingLeft\": true,\n    \"paddingRight\": false\n}, {\n    \"position\": {\"line\": 3, \"character\": 9},\n    \"label\": \": float\",\n    \"kind\": 1,\n    \"paddingLeft\": true,\n    \"paddingRight\": false\n}, {\n    \"position\": {\"line\": 2, \"character\": 10},\n    \"label\": \": int\",\n    \"kind\": 1,\n    \"paddingLeft\": true,\n    \"paddingRight\": false\n}, {\n    \"position\": {\"line\": 1, \"character\": 10},\n    \"label\": \": string\",\n    \"kind\": 1,\n    \"paddingLeft\": true,\n    \"paddingRight\": false\n}]\n\n"
  },
  {
    "path": "analysis/tests/src/expected/Jsx2.res.txt",
    "content": "Definition src/Jsx2.res 5:9\ngetLocItem #4: heuristic for </Comp> within fragments: take make as makeProps does not work\nthe type is not great but jump to definition works\n{\"uri\": \"Jsx2.res\", \"range\": {\"start\": {\"line\": 2, \"character\": 6}, \"end\": {\"line\": 2, \"character\": 10}}}\n\nComplete src/Jsx2.res 8:15\nposCursor:[8:15] posNoWhite:[8:14] Found expr:[8:4->8:15]\nJSX <M:[8:4->8:5] second[8:6->8:12]=...[8:13->8:15]> _children:None\nCompletable: Cexpression CJsxPropValue [M] second=fi\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CJsxPropValue [M] second\nPath M.make\n[]\n\nComplete src/Jsx2.res 11:20\nposCursor:[11:20] posNoWhite:[11:19] Found expr:[11:4->11:20]\nJSX <M:[11:4->11:5] second[11:6->11:12]=...[11:13->11:18] f[11:19->11:20]=...[11:19->11:20]> _children:None\nCompletable: Cjsx([M], f, [second, f])\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nPath M.make\n[{\n    \"label\": \"first\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": null\n  }, {\n    \"label\": \"fun\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"option<string>\",\n    \"documentation\": null\n  }]\n\nComplete src/Jsx2.res 14:13\nposCursor:[14:13] posNoWhite:[14:12] Found expr:[14:12->14:13]\nJSX <M:[14:12->14:13] > _children:None\nCompletable: Cpath Module[M]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Module[M]\nPath M\n[{\n    \"label\": \"M\",\n    \"kind\": 9,\n    \"tags\": [],\n    \"detail\": \"module M\",\n    \"documentation\": null\n  }, {\n    \"label\": \"Map\",\n    \"kind\": 9,\n    \"tags\": [],\n    \"detail\": \"module Map\",\n    \"documentation\": null,\n    \"data\": {\n      \"modulePath\": \"Map\",\n      \"filePath\": \"src/Jsx2.res\"\n    }\n  }, {\n    \"label\": \"MapLabels\",\n    \"kind\": 9,\n    \"tags\": [],\n    \"detail\": \"module MapLabels\",\n    \"documentation\": null,\n    \"data\": {\n      \"modulePath\": \"MapLabels\",\n      \"filePath\": \"src/Jsx2.res\"\n    }\n  }, {\n    \"label\": \"ModuleStuff\",\n    \"kind\": 9,\n    \"tags\": [],\n    \"detail\": \"module ModuleStuff\",\n    \"documentation\": null,\n    \"data\": {\n      \"modulePath\": \"ModuleStuff\",\n      \"filePath\": \"src/Jsx2.res\"\n    }\n  }, {\n    \"label\": \"MoreLabels\",\n    \"kind\": 9,\n    \"tags\": [],\n    \"detail\": \"module MoreLabels\",\n    \"documentation\": null,\n    \"data\": {\n      \"modulePath\": \"MoreLabels\",\n      \"filePath\": \"src/Jsx2.res\"\n    }\n  }]\n\nComplete src/Jsx2.res 22:19\nposCursor:[22:19] posNoWhite:[22:18] Found expr:[22:4->22:19]\nJSX <M:[22:4->22:5] prop[22:6->22:10]=...[22:12->22:16] k[22:18->22:19]=...[22:18->22:19]> _children:None\nCompletable: Cjsx([M], k, [prop, k])\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nPath M.make\n[{\n    \"label\": \"key\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": null\n  }]\n\nComplete src/Jsx2.res 25:17\nposCursor:[25:17] posNoWhite:[25:16] Found expr:[25:4->25:17]\nJSX <M:[25:4->25:5] prop[25:6->25:10]=...[25:11->25:15] k[25:16->25:17]=...[25:16->25:17]> _children:None\nCompletable: Cjsx([M], k, [prop, k])\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nPath M.make\n[{\n    \"label\": \"key\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": null\n  }]\n\nComplete src/Jsx2.res 28:21\nposCursor:[28:21] posNoWhite:[28:20] Found expr:[28:4->28:21]\nJSX <M:[28:4->28:5] prop[28:6->28:10]=...[28:11->28:19] k[28:20->28:21]=...[28:20->28:21]> _children:None\nCompletable: Cjsx([M], k, [prop, k])\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nPath M.make\n[{\n    \"label\": \"key\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": null\n  }]\n\nComplete src/Jsx2.res 31:24\nposCursor:[31:24] posNoWhite:[31:23] Found expr:[31:4->31:24]\nJSX <M:[31:4->31:5] prop[31:6->31:10]=...[31:11->31:22] k[31:23->31:24]=...[31:23->31:24]> _children:None\nCompletable: Cjsx([M], k, [prop, k])\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nPath M.make\n[{\n    \"label\": \"key\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": null\n  }]\n\nComplete src/Jsx2.res 34:18\nposCursor:[34:18] posNoWhite:[34:17] Found expr:[34:4->34:18]\nJSX <M:[34:4->34:5] prop[34:6->34:10]=...[34:12->34:16] k[34:17->34:18]=...[34:17->34:18]> _children:None\nCompletable: Cjsx([M], k, [prop, k])\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nPath M.make\n[{\n    \"label\": \"key\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": null\n  }]\n\nComplete src/Jsx2.res 37:16\nposCursor:[37:16] posNoWhite:[37:15] Found expr:[37:4->37:16]\nJSX <M:[37:4->37:5] prop[37:6->37:10]=...[37:11->37:14] k[37:15->37:16]=...[37:15->37:16]> _children:None\nCompletable: Cjsx([M], k, [prop, k])\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nPath M.make\n[{\n    \"label\": \"key\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": null\n  }]\n\nComplete src/Jsx2.res 40:17\nposCursor:[40:17] posNoWhite:[40:16] Found expr:[40:4->40:17]\nJSX <M:[40:4->40:5] prop[40:6->40:10]=...[40:11->40:15] k[40:16->40:17]=...[40:16->40:17]> _children:None\nCompletable: Cjsx([M], k, [prop, k])\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nPath M.make\n[{\n    \"label\": \"key\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": null\n  }]\n\nComplete src/Jsx2.res 43:18\nposCursor:[43:18] posNoWhite:[43:17] Found expr:[43:4->43:18]\nJSX <M:[43:4->43:5] prop[43:6->43:10]=...[43:11->43:16] k[43:17->43:18]=...[43:17->43:18]> _children:None\nCompletable: Cjsx([M], k, [prop, k])\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nPath M.make\n[{\n    \"label\": \"key\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": null\n  }]\n\nComplete src/Jsx2.res 46:16\nposCursor:[46:16] posNoWhite:[46:15] Found expr:[46:4->46:16]\nJSX <M:[46:4->46:5] prop[46:6->46:10]=...[46:11->46:14] k[46:15->46:16]=...[46:15->46:16]> _children:None\nCompletable: Cjsx([M], k, [prop, k])\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nPath M.make\n[{\n    \"label\": \"key\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": null\n  }]\n\nComplete src/Jsx2.res 49:27\nposCursor:[49:27] posNoWhite:[49:26] Found expr:[49:4->49:27]\nJSX <M:[49:4->49:5] prop[49:6->49:10]=...[49:11->49:25] k[49:26->49:27]=...[49:26->49:27]> _children:None\nCompletable: Cjsx([M], k, [prop, k])\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nPath M.make\n[{\n    \"label\": \"key\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": null\n  }]\n\nComplete src/Jsx2.res 52:38\nposCursor:[52:38] posNoWhite:[52:37] Found expr:[52:4->52:38]\nJSX <M:[52:4->52:5] prop[52:6->52:10]=...[52:11->52:36] k[52:37->52:38]=...[52:37->52:38]> _children:None\nCompletable: Cjsx([M], k, [prop, k])\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nPath M.make\n[{\n    \"label\": \"key\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": null\n  }]\n\nComplete src/Jsx2.res 55:25\nposCursor:[55:25] posNoWhite:[55:24] Found expr:[55:4->55:25]\nJSX <M:[55:4->55:5] prop[55:6->55:10]=...[55:11->55:23] k[55:24->55:25]=...[55:24->55:25]> _children:None\nCompletable: Cjsx([M], k, [prop, k])\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nPath M.make\n[{\n    \"label\": \"key\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": null\n  }]\n\nDefinition src/Jsx2.res 58:11\ngetLocItem #4: heuristic for </Comp> within fragments: take make as makeProps does not work\nthe type is not great but jump to definition works\n{\"uri\": \"Component.res\", \"range\": {\"start\": {\"line\": 1, \"character\": 4}, \"end\": {\"line\": 1, \"character\": 8}}}\n\nComplete src/Jsx2.res 68:10\nposCursor:[68:10] posNoWhite:[68:9] Found expr:[68:4->68:10]\nJSX <Ext:[68:4->68:7] al[68:8->68:10]=...[68:8->68:10]> _children:None\nCompletable: Cjsx([Ext], al, [al])\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nPath Ext.make\n[{\n    \"label\": \"align\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"option<string>\",\n    \"documentation\": null\n  }]\n\nComplete src/Jsx2.res 71:11\nposCursor:[71:11] posNoWhite:[71:10] Found expr:[71:4->71:11]\nJSX <M:[71:4->71:5] first[71:6->71:11]=...[71:6->71:11]> _children:None\nCompletable: Cjsx([M], first, [first])\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nPath M.make\n[]\n\nComplete src/Jsx2.res 74:16\nposCursor:[74:16] posNoWhite:[74:15] Found expr:[74:4->74:16]\nJSX <M:[74:4->74:5] first[74:6->74:11]=...[74:12->74:14] k[74:15->74:16]=...[74:15->74:16]> _children:None\nCompletable: Cjsx([M], k, [first, k])\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nPath M.make\n[{\n    \"label\": \"key\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": null\n  }]\n\nComplete src/Jsx2.res 77:23\nposCursor:[77:23] posNoWhite:[77:22] Found expr:[77:4->77:23]\nJSX <M:[77:4->77:5] first[77:6->77:11]=...[77:19->77:21] k[77:22->77:23]=...[77:22->77:23]> _children:None\nCompletable: Cjsx([M], k, [first, k])\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nPath M.make\n[{\n    \"label\": \"key\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": null\n  }]\n\nComplete src/Jsx2.res 80:6\nposCursor:[80:6] posNoWhite:[80:5] Found expr:[80:4->85:69]\nPexp_apply ...[83:20->83:21] (...[80:4->83:19], ...[84:2->85:69])\nposCursor:[80:6] posNoWhite:[80:5] Found expr:[80:4->83:19]\nJSX <M:[80:4->80:5] > _children:80:5\nposCursor:[80:6] posNoWhite:[80:5] Found expr:[80:5->83:20]\nposCursor:[80:6] posNoWhite:[80:5] Found expr:__ghost__[80:5->83:20]\nPexp_construct []:__ghost__[80:5->83:20] None\nposCursor:[80:6] posNoWhite:[80:5] Found expr:[80:4->83:19]\nJSX <M:[80:4->80:5] > _children:80:5\nposCursor:[80:6] posNoWhite:[80:5] Found expr:[80:5->83:20]\nposCursor:[80:6] posNoWhite:[80:5] Found expr:__ghost__[80:5->83:20]\nPexp_construct []:__ghost__[80:5->83:20] None\n[]\n\nComplete src/Jsx2.res 89:16\nposCursor:[89:16] posNoWhite:[89:15] Found expr:[89:4->89:16]\nJSX <WithChildren:[89:4->89:16] > _children:None\nCompletable: Cpath Module[WithChildren]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Module[WithChildren]\nPath WithChildren\n[{\n    \"label\": \"WithChildren\",\n    \"kind\": 9,\n    \"tags\": [],\n    \"detail\": \"module WithChildren\",\n    \"documentation\": null\n  }]\n\nComplete src/Jsx2.res 91:18\nposCursor:[91:18] posNoWhite:[91:17] Found expr:[91:4->91:18]\nJSX <WithChildren:[91:4->91:16] n[91:17->91:18]=...[91:17->91:18]> _children:None\nCompletable: Cjsx([WithChildren], n, [n])\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nPath WithChildren.make\n[{\n    \"label\": \"name\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": null\n  }]\n\nComplete src/Jsx2.res 94:18\nposCursor:[94:18] posNoWhite:[94:17] Found pattern:[94:7->94:18]\nposCursor:[94:18] posNoWhite:[94:17] Found type:[94:11->94:18]\nPtyp_constr React.e:[94:11->94:18]\nCompletable: Cpath Type[React, e]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Type[React, e]\nPath React.e\n[{\n    \"label\": \"element\",\n    \"kind\": 22,\n    \"tags\": [],\n    \"detail\": \"type element\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ntype element = Jsx.element\\n```\"}\n  }]\n\nComplete src/Jsx2.res 96:20\nposCursor:[96:20] posNoWhite:[96:19] Found pattern:[96:7->99:6]\nposCursor:[96:20] posNoWhite:[96:19] Found type:[96:11->99:6]\nPtyp_constr ReactDOMR:[96:11->99:6]\nCompletable: Cpath Type[ReactDOMR]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Type[ReactDOMR]\nPath ReactDOMR\n[{\n    \"label\": \"ReactDOMRe\",\n    \"kind\": 9,\n    \"tags\": [],\n    \"detail\": \"module ReactDOMRe\",\n    \"documentation\": null,\n    \"data\": {\n      \"modulePath\": \"ReactDOMRe\",\n      \"filePath\": \"src/Jsx2.res\"\n    }\n  }]\n\nComplete src/Jsx2.res 102:21\nposCursor:[102:21] posNoWhite:[102:20] Found expr:[102:13->102:21]\nPexp_apply ...[102:15->102:16] (...[102:13->102:14], ...[102:17->102:21])\nposCursor:[102:21] posNoWhite:[102:20] Found expr:[102:17->102:21]\nPexp_field [102:17->102:18] th:[102:19->102:21]\nCompletable: Cpath Value[x].th\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[x].th\nContextPath Value[x]\nPath x\n[]\n\nComplete src/Jsx2.res 106:28\nposCursor:[106:28] posNoWhite:[106:27] Found expr:[106:11->106:28]\nPexp_ident DefineSomeFields.:[106:11->106:28]\nCompletable: Cpath Value[DefineSomeFields, \"\"]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[DefineSomeFields, \"\"]\nPath DefineSomeFields.\n[{\n    \"label\": \"thisValue\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": null\n  }]\n\nComplete src/Jsx2.res 108:36\nposCursor:[108:36] posNoWhite:[108:35] Found expr:[108:11->108:36]\nPexp_apply ...[108:13->108:14] (...[108:11->108:12], ...[108:15->108:36])\nposCursor:[108:36] posNoWhite:[108:35] Found expr:[108:15->108:36]\nPexp_field [108:15->108:16] DefineSomeFields.th:[108:17->108:36]\nCompletable: Cpath Module[DefineSomeFields].th\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Module[DefineSomeFields].th\nPath DefineSomeFields.th\n[{\n    \"label\": \"thisField\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nthisField: int\\n```\\n\\n```rescript\\ntype r = {thisField: int, thatField: string}\\n```\"}\n  }, {\n    \"label\": \"thatField\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nthatField: string\\n```\\n\\n```rescript\\ntype r = {thisField: int, thatField: string}\\n```\"}\n  }]\n\nComplete src/Jsx2.res 122:20\nposCursor:[122:20] posNoWhite:[122:19] Found expr:[121:3->125:4]\nJSX <div:[121:3->121:6] x[122:5->122:6]=...[122:7->122:20] name[124:4->124:8]=...[124:9->124:11]> _children:125:2\nposCursor:[122:20] posNoWhite:[122:19] Found expr:[122:7->122:20]\nPexp_ident Outer.Inner.h:[122:7->122:20]\nCompletable: Cpath Value[Outer, Inner, h]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[Outer, Inner, h]\nPath Outer.Inner.h\n[{\n    \"label\": \"hello\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": null\n  }]\n\nComplete src/Jsx2.res 129:19\nposCursor:[129:19] posNoWhite:[129:18] Found expr:[128:3->131:9]\nJSX <div:[128:3->128:6] x[129:5->129:6]=...[129:7->131:8]> _children:None\nposCursor:[129:19] posNoWhite:[129:18] Found expr:[129:7->131:8]\nPexp_ident Outer.Inner.:[129:7->131:8]\nCompletable: Cpath Value[Outer, Inner, \"\"]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[Outer, Inner, \"\"]\nPath Outer.Inner.\n[{\n    \"label\": \"hello\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": null\n  }]\n\nComplete src/Jsx2.res 136:7\nposCursor:[136:7] posNoWhite:[136:6] Found expr:[135:3->138:9]\nJSX <div:[135:3->135:6] x[136:5->136:6]=...[138:4->138:8]> _children:None\nCompletable: Cexpression CJsxPropValue [div] x\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CJsxPropValue [div] x\nPath ReactDOM.domProps\nPath PervasivesU.JsxDOM.domProps\n[{\n    \"label\": \"\\\"\\\"\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": null,\n    \"sortText\": \"A\",\n    \"insertText\": \"{\\\"$0\\\"}\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/Jsx2.res 150:21\nposCursor:[150:21] posNoWhite:[150:20] Found expr:[150:12->150:32]\nJSX <Nested.Co:[150:12->150:21] name[150:22->150:26]=...[150:27->150:29]> _children:150:30\nCompletable: Cpath Module[Nested, Co]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Module[Nested, Co]\nPath Nested.Co\n[{\n    \"label\": \"Comp\",\n    \"kind\": 9,\n    \"tags\": [],\n    \"detail\": \"module Comp\",\n    \"documentation\": null\n  }]\n\nComplete src/Jsx2.res 153:19\nposCursor:[153:19] posNoWhite:[153:18] Found expr:[153:12->153:25]\nJSX <Nested.:[153:12->153:24] > _children:None\nCompletable: Cpath Module[Nested, \"\"]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Module[Nested, \"\"]\nPath Nested.\n[{\n    \"label\": \"Comp\",\n    \"kind\": 9,\n    \"tags\": [],\n    \"detail\": \"module Comp\",\n    \"documentation\": null\n  }]\n\nHover src/Jsx2.res 162:12\nNothing at that position. Now trying to use completion.\nposCursor:[162:12] posNoWhite:[162:11] Found expr:[162:3->162:21]\nposCursor:[162:12] posNoWhite:[162:11] Found expr:[162:6->162:21]\nposCursor:[162:12] posNoWhite:[162:11] Found expr:[162:6->162:20]\nJSX <Comp:[162:6->162:10] age[162:11->162:14]=...[162:15->162:17]> _children:162:18\nCompletable: Cjsx([Comp], age, [age])\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nPath Comp.make\n{\"contents\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nint\\n```\"}}\n\nHover src/Jsx2.res 167:16\nNothing at that position. Now trying to use completion.\nposCursor:[167:16] posNoWhite:[167:15] Found expr:[167:3->167:30]\nposCursor:[167:16] posNoWhite:[167:15] Found expr:[167:7->167:30]\nposCursor:[167:16] posNoWhite:[167:15] Found expr:[167:7->167:25]\nposCursor:[167:16] posNoWhite:[167:15] Found expr:[167:10->167:25]\nposCursor:[167:16] posNoWhite:[167:15] Found expr:[167:10->167:24]\nJSX <Comp:[167:10->167:14] age[167:15->167:18]=...[167:19->167:21]> _children:167:22\nCompletable: Cjsx([Comp], age, [age])\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nPath Comp.make\n{\"contents\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nint\\n```\"}}\n\n"
  },
  {
    "path": "analysis/tests/src/expected/Jsx2.resi.txt",
    "content": "Hover src/Jsx2.resi 1:4\ngetLocItem #1: heuristic for makeProps in interface files\nn1:componentLike n2:unit n3:string\n{\"contents\": {\"kind\": \"markdown\", \"value\": \"```rescript\\n(~first: string, ~key: string=?, unit) => {\\\"first\\\": string}\\n```\"}}\n\nHover src/Jsx2.resi 4:4\n{\"contents\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nint\\n```\"}}\n\nComplete src/Jsx2.resi 7:19\nposCursor:[7:19] posNoWhite:[7:18] Found type:[7:12->7:19]\nPtyp_constr React.e:[7:12->7:19]\nCompletable: Cpath Type[React, e]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Type[React, e]\nPath React.e\n[{\n    \"label\": \"element\",\n    \"kind\": 22,\n    \"tags\": [],\n    \"detail\": \"type element\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ntype element = Jsx.element\\n```\"}\n  }]\n\nComplete src/Jsx2.resi 10:18\nposCursor:[10:18] posNoWhite:[10:17] Found type:[10:11->10:18]\nPtyp_constr React.e:[10:11->10:18]\nCompletable: Cpath Type[React, e]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Type[React, e]\nPath React.e\n[{\n    \"label\": \"element\",\n    \"kind\": 22,\n    \"tags\": [],\n    \"detail\": \"type element\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ntype element = Jsx.element\\n```\"}\n  }]\n\n"
  },
  {
    "path": "analysis/tests/src/expected/JsxV4.res.txt",
    "content": "Definition src/JsxV4.res 8:9\n{\"uri\": \"JsxV4.res\", \"range\": {\"start\": {\"line\": 5, \"character\": 6}, \"end\": {\"line\": 5, \"character\": 10}}}\n\nComplete src/JsxV4.res 11:20\nposCursor:[11:20] posNoWhite:[11:19] Found expr:[11:4->11:20]\nJSX <M4:[11:4->11:6] first[11:7->11:12]=...[11:13->11:18] f[11:19->11:20]=...[11:19->11:20]> _children:None\nCompletable: Cjsx([M4], f, [first, f])\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nPath M4.make\n[{\n    \"label\": \"fun\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"option<string>\",\n    \"documentation\": null\n  }]\n\nHover src/JsxV4.res 14:9\n{\"contents\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nReact.component<M4.props<string, string, string>>\\n```\\n\\n---\\n\\n```\\n \\n```\\n```rescript\\ntype React.component<'props> = Jsx.component<'props>\\n```\\nGo to: [Type definition](command:rescript-vscode.go_to_location?%5B%22React.res%22%2C12%2C0%5D)\\n\\n\\n---\\n\\n```\\n \\n```\\n```rescript\\ntype M4.props<'first, 'fun, 'second> = {\\n  first: 'first,\\n  fun?: 'fun,\\n  second?: 'second,\\n}\\n```\\nGo to: [Type definition](command:rescript-vscode.go_to_location?%5B%22JsxV4.res%22%2C3%2C2%5D)\\n\\n---\\n Doc Comment For M4 \"}}\n\nCreate Interface src/JsxV4.res\nmodule M4: {\n  @react.component\n  let make: (~first: string, ~fun: string=?, ~second: string=?) => React.element\n}\nmodule MM: {\n  @react.component\n  let make: unit => React.element\n}\nmodule Other: {\n  @react.component\n  let make: (~name: string) => React.element\n}\n\n"
  },
  {
    "path": "analysis/tests/src/expected/LongIdentTest.res.txt",
    "content": "Hover src/LongIdentTest.res 2:13\n{\"contents\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nint\\n```\"}}\n\n"
  },
  {
    "path": "analysis/tests/src/expected/ModuleStuff.res.txt",
    "content": ""
  },
  {
    "path": "analysis/tests/src/expected/Objects.res.txt",
    "content": ""
  },
  {
    "path": "analysis/tests/src/expected/Patterns.res.txt",
    "content": "Definition src/Patterns.res 19:10\n{\"uri\": \"Patterns.res\", \"range\": {\"start\": {\"line\": 3, \"character\": 7}, \"end\": {\"line\": 3, \"character\": 10}}}\n\nDefinition src/Patterns.res 24:11\n{\"uri\": \"Patterns.res\", \"range\": {\"start\": {\"line\": 9, \"character\": 7}, \"end\": {\"line\": 9, \"character\": 11}}}\n\nDefinition src/Patterns.res 27:11\n{\"uri\": \"Patterns.res\", \"range\": {\"start\": {\"line\": 11, \"character\": 7}, \"end\": {\"line\": 11, \"character\": 8}}}\n\nDefinition src/Patterns.res 30:11\n{\"uri\": \"Patterns.res\", \"range\": {\"start\": {\"line\": 15, \"character\": 9}, \"end\": {\"line\": 15, \"character\": 11}}}\n\n"
  },
  {
    "path": "analysis/tests/src/expected/PolyRec.res.txt",
    "content": "Hover src/PolyRec.res 12:10\n{\"contents\": {\"kind\": \"markdown\", \"value\": \"```rescript\\n([#Leaf | #Node(int, 'a, 'a)] as 'a)\\n```\"}}\n\n"
  },
  {
    "path": "analysis/tests/src/expected/QueryFile.res.txt",
    "content": ""
  },
  {
    "path": "analysis/tests/src/expected/RecModules.res.txt",
    "content": "Hover src/RecModules.res 18:12\n{\"contents\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nmodule C: {\\n  type t\\n  let createA: t => A.t\\n}\\n```\"}}\n\nHover src/RecModules.res 20:12\n{\"contents\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nmodule A: {\\n  type t\\n  let child: t => B.t\\n}\\n```\"}}\n\n"
  },
  {
    "path": "analysis/tests/src/expected/RecordCompletion.res.txt",
    "content": "Complete src/RecordCompletion.res 8:9\nposCursor:[8:9] posNoWhite:[8:8] Found expr:[8:3->8:9]\nCompletable: Cpath Value[t].n->m\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[t].n->m\nContextPath Value[t].n\nContextPath Value[t]\nPath t\nContextPath Value[t]->n\nContextPath Value[t]\nPath t\nCPPipe pathFromEnv: found:true\nPath RecordCompletion.n\nPath Js.Array2.m\n[{\n    \"label\": \"Js.Array2.mapi\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t<'a>, ('a, int) => 'b) => t<'b>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\nApplies the function (the second argument) to each item in the array, returning\\na new array. The function acceps two arguments: an item from the array and its\\nindex number. The result array does not have to have elements of the same type\\nas the input array. See\\n[`Array.map`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/map)\\non MDN.\\n\\n## Examples\\n\\n```rescript\\n// multiply each item in array by its position\\nlet product = (item, index) => item * index\\nJs.Array2.mapi([10, 11, 12], product) == [0, 11, 24]\\n```\\n\"}\n  }, {\n    \"label\": \"Js.Array2.map\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t<'a>, 'a => 'b) => t<'b>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\nApplies the function (the second argument) to each item in the array, returning\\na new array. The result array does not have to have elements of the same type\\nas the input array. See\\n[`Array.map`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/map)\\non MDN.\\n\\n## Examples\\n\\n```rescript\\nJs.Array2.map([12, 4, 8], x => x * x) == [144, 16, 64]\\nJs.Array2.map([\\\"animal\\\", \\\"vegetable\\\", \\\"mineral\\\"], Js.String.length) == [6, 9, 7]\\n```\\n\"}\n  }]\n\nComplete src/RecordCompletion.res 11:13\nposCursor:[11:13] posNoWhite:[11:12] Found expr:[11:3->11:13]\nCompletable: Cpath Value[t2].n2.n->m\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[t2].n2.n->m\nContextPath Value[t2].n2.n\nContextPath Value[t2].n2\nContextPath Value[t2]\nPath t2\nContextPath Value[t2]->n2\nContextPath Value[t2]\nPath t2\nCPPipe pathFromEnv: found:true\nPath RecordCompletion.n2\nContextPath Value[t2].n2->n\nContextPath Value[t2].n2\nContextPath Value[t2]\nPath t2\nContextPath Value[t2]->n2\nContextPath Value[t2]\nPath t2\nCPPipe pathFromEnv: found:true\nPath RecordCompletion.n2\nCPPipe pathFromEnv: found:true\nPath RecordCompletion.n\nPath Js.Array2.m\n[{\n    \"label\": \"Js.Array2.mapi\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t<'a>, ('a, int) => 'b) => t<'b>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\nApplies the function (the second argument) to each item in the array, returning\\na new array. The function acceps two arguments: an item from the array and its\\nindex number. The result array does not have to have elements of the same type\\nas the input array. See\\n[`Array.map`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/map)\\non MDN.\\n\\n## Examples\\n\\n```rescript\\n// multiply each item in array by its position\\nlet product = (item, index) => item * index\\nJs.Array2.mapi([10, 11, 12], product) == [0, 11, 24]\\n```\\n\"}\n  }, {\n    \"label\": \"Js.Array2.map\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t<'a>, 'a => 'b) => t<'b>\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"\\nApplies the function (the second argument) to each item in the array, returning\\na new array. The result array does not have to have elements of the same type\\nas the input array. See\\n[`Array.map`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/map)\\non MDN.\\n\\n## Examples\\n\\n```rescript\\nJs.Array2.map([12, 4, 8], x => x * x) == [144, 16, 64]\\nJs.Array2.map([\\\"animal\\\", \\\"vegetable\\\", \\\"mineral\\\"], Js.String.length) == [6, 9, 7]\\n```\\n\"}\n  }]\n\nComplete src/RecordCompletion.res 19:7\nposCursor:[19:7] posNoWhite:[19:6] Found expr:[19:3->19:7]\nPexp_field [19:3->19:4] R.:[19:5->19:7]\nCompletable: Cpath Module[R].\"\"\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Module[R].\"\"\nPath R.\n[{\n    \"label\": \"name\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nname: string\\n```\\n\\n```rescript\\ntype t = {name: string}\\n```\"}\n  }]\n\nComplete src/RecordCompletion.res 22:7\nposCursor:[22:7] posNoWhite:[22:6] Found expr:[22:3->22:10]\nPexp_field [22:3->22:4] R.xx:[22:5->22:10]\nCompletable: Cpath Module[R].\"\"\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Module[R].\"\"\nPath R.\n[{\n    \"label\": \"name\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nname: string\\n```\\n\\n```rescript\\ntype t = {name: string}\\n```\"}\n  }]\n\n"
  },
  {
    "path": "analysis/tests/src/expected/RecoveryOnProp.res.txt",
    "content": "Complete src/RecoveryOnProp.res 6:26\nposCursor:[6:26] posNoWhite:[6:25] Found expr:[3:3->11:8]\nJSX <div:[3:3->3:6] onClick[4:4->4:11]=...[4:13->0:-1]> _children:None\nposCursor:[6:26] posNoWhite:[6:25] Found expr:[4:13->8:6]\nposCursor:[6:26] posNoWhite:[6:25] Found expr:[5:6->8:5]\nposCursor:[6:26] posNoWhite:[6:25] Found expr:[6:16->8:5]\nposCursor:[6:26] posNoWhite:[6:25] Found pattern:[6:20->8:5]\nposCursor:[6:26] posNoWhite:[6:25] Found type:[6:23->8:5]\nPtyp_constr Res:[6:23->8:5]\nposCursor:[6:26] posNoWhite:[6:25] Found pattern:[6:20->8:5]\nposCursor:[6:26] posNoWhite:[6:25] Found type:[6:23->8:5]\nPtyp_constr Res:[6:23->8:5]\nCompletable: Cpath Type[Res]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Type[Res]\nPath Res\n[{\n    \"label\": \"RescriptReactErrorBoundary\",\n    \"kind\": 9,\n    \"tags\": [],\n    \"detail\": \"module RescriptReactErrorBoundary\",\n    \"documentation\": null,\n    \"data\": {\n      \"modulePath\": \"RescriptReactErrorBoundary\",\n      \"filePath\": \"src/RecoveryOnProp.res\"\n    }\n  }, {\n    \"label\": \"RescriptReactRouter\",\n    \"kind\": 9,\n    \"tags\": [],\n    \"detail\": \"module RescriptReactRouter\",\n    \"documentation\": null,\n    \"data\": {\n      \"modulePath\": \"RescriptReactRouter\",\n      \"filePath\": \"src/RecoveryOnProp.res\"\n    }\n  }]\n\n"
  },
  {
    "path": "analysis/tests/src/expected/References.res.txt",
    "content": "References src/References.res 0:4\n[\n{\"uri\": \"Cross.res\", \"range\": {\"start\": {\"line\": 0, \"character\": 26}, \"end\": {\"line\": 0, \"character\": 27}}},\n{\"uri\": \"Cross.res\", \"range\": {\"start\": {\"line\": 3, \"character\": 27}, \"end\": {\"line\": 3, \"character\": 28}}},\n{\"uri\": \"Cross.res\", \"range\": {\"start\": {\"line\": 7, \"character\": 27}, \"end\": {\"line\": 7, \"character\": 28}}},\n{\"uri\": \"References.res\", \"range\": {\"start\": {\"line\": 0, \"character\": 4}, \"end\": {\"line\": 0, \"character\": 5}}},\n{\"uri\": \"References.res\", \"range\": {\"start\": {\"line\": 3, \"character\": 8}, \"end\": {\"line\": 3, \"character\": 9}}},\n{\"uri\": \"References.res\", \"range\": {\"start\": {\"line\": 7, \"character\": 8}, \"end\": {\"line\": 7, \"character\": 9}}}\n]\n\nReferences src/References.res 9:19\n[\n{\"uri\": \"References.res\", \"range\": {\"start\": {\"line\": 9, \"character\": 11}, \"end\": {\"line\": 9, \"character\": 14}}},\n{\"uri\": \"References.res\", \"range\": {\"start\": {\"line\": 9, \"character\": 19}, \"end\": {\"line\": 9, \"character\": 21}}}\n]\n\nReferences src/References.res 20:12\n[\n{\"uri\": \"References.res\", \"range\": {\"start\": {\"line\": 13, \"character\": 2}, \"end\": {\"line\": 13, \"character\": 13}}},\n{\"uri\": \"References.res\", \"range\": {\"start\": {\"line\": 18, \"character\": 11}, \"end\": {\"line\": 18, \"character\": 13}}},\n{\"uri\": \"References.res\", \"range\": {\"start\": {\"line\": 20, \"character\": 11}, \"end\": {\"line\": 20, \"character\": 13}}}\n]\n\nReferences src/References.res 23:15\ngetLocItem #4: heuristic for </Comp> within fragments: take make as makeProps does not work\nthe type is not great but jump to definition works\n[\n{\"uri\": \"ReferencesInner.res\", \"range\": {\"start\": {\"line\": 1, \"character\": 28}, \"end\": {\"line\": 1, \"character\": 32}}},\n{\"uri\": \"References.res\", \"range\": {\"start\": {\"line\": 23, \"character\": 19}, \"end\": {\"line\": 23, \"character\": 23}}},\n{\"uri\": \"ComponentInner.res\", \"range\": {\"start\": {\"line\": 1, \"character\": 4}, \"end\": {\"line\": 1, \"character\": 8}}},\n{\"uri\": \"ComponentInner.resi\", \"range\": {\"start\": {\"line\": 1, \"character\": 4}, \"end\": {\"line\": 1, \"character\": 8}}}\n]\n\n"
  },
  {
    "path": "analysis/tests/src/expected/ReferencesWithInterface.res.txt",
    "content": "References src/ReferencesWithInterface.res 0:4\n[\n{\"uri\": \"Cross.res\", \"range\": {\"start\": {\"line\": 9, \"character\": 52}, \"end\": {\"line\": 9, \"character\": 53}}},\n{\"uri\": \"Cross.res\", \"range\": {\"start\": {\"line\": 12, \"character\": 53}, \"end\": {\"line\": 12, \"character\": 54}}},\n{\"uri\": \"Cross.res\", \"range\": {\"start\": {\"line\": 16, \"character\": 53}, \"end\": {\"line\": 16, \"character\": 54}}},\n{\"uri\": \"ReferencesWithInterface.resi\", \"range\": {\"start\": {\"line\": 0, \"character\": 4}, \"end\": {\"line\": 0, \"character\": 5}}},\n{\"uri\": \"ReferencesWithInterface.res\", \"range\": {\"start\": {\"line\": 0, \"character\": 4}, \"end\": {\"line\": 0, \"character\": 5}}}\n]\n\n"
  },
  {
    "path": "analysis/tests/src/expected/ReferencesWithInterface.resi.txt",
    "content": "References src/ReferencesWithInterface.resi 0:4\n[\n{\"uri\": \"Cross.res\", \"range\": {\"start\": {\"line\": 9, \"character\": 52}, \"end\": {\"line\": 9, \"character\": 53}}},\n{\"uri\": \"Cross.res\", \"range\": {\"start\": {\"line\": 12, \"character\": 53}, \"end\": {\"line\": 12, \"character\": 54}}},\n{\"uri\": \"Cross.res\", \"range\": {\"start\": {\"line\": 16, \"character\": 53}, \"end\": {\"line\": 16, \"character\": 54}}},\n{\"uri\": \"ReferencesWithInterface.res\", \"range\": {\"start\": {\"line\": 0, \"character\": 4}, \"end\": {\"line\": 0, \"character\": 5}}},\n{\"uri\": \"ReferencesWithInterface.resi\", \"range\": {\"start\": {\"line\": 0, \"character\": 4}, \"end\": {\"line\": 0, \"character\": 5}}}\n]\n\n"
  },
  {
    "path": "analysis/tests/src/expected/Rename.res.txt",
    "content": "Rename src/Rename.res 0:4 y\n[\n{\n  \"textDocument\": {\n  \"version\": null,\n  \"uri\": \"Rename.res\"\n  },\n  \"edits\": [{\n      \"range\": {\"start\": {\"line\": 0, \"character\": 4}, \"end\": {\"line\": 0, \"character\": 5}},\n      \"newText\": \"y\"\n      }, {\n      \"range\": {\"start\": {\"line\": 3, \"character\": 8}, \"end\": {\"line\": 3, \"character\": 9}},\n      \"newText\": \"y\"\n      }, {\n      \"range\": {\"start\": {\"line\": 7, \"character\": 8}, \"end\": {\"line\": 7, \"character\": 9}},\n      \"newText\": \"y\"\n      }]\n  }\n]\n\nRename src/Rename.res 9:19 yy\n[\n{\n  \"textDocument\": {\n  \"version\": null,\n  \"uri\": \"Rename.res\"\n  },\n  \"edits\": [{\n      \"range\": {\"start\": {\"line\": 9, \"character\": 11}, \"end\": {\"line\": 9, \"character\": 14}},\n      \"newText\": \"yy\"\n      }, {\n      \"range\": {\"start\": {\"line\": 9, \"character\": 19}, \"end\": {\"line\": 9, \"character\": 21}},\n      \"newText\": \"yy\"\n      }]\n  }\n]\n\n"
  },
  {
    "path": "analysis/tests/src/expected/RenameWithInterface.res.txt",
    "content": "Rename src/RenameWithInterface.res 0:4 y\n[\n{\n  \"textDocument\": {\n  \"version\": null,\n  \"uri\": \"RenameWithInterface.resi\"\n  },\n  \"edits\": [{\n      \"range\": {\"start\": {\"line\": 0, \"character\": 4}, \"end\": {\"line\": 0, \"character\": 5}},\n      \"newText\": \"y\"\n      }]\n  },\n{\n  \"textDocument\": {\n  \"version\": null,\n  \"uri\": \"RenameWithInterface.res\"\n  },\n  \"edits\": [{\n      \"range\": {\"start\": {\"line\": 0, \"character\": 4}, \"end\": {\"line\": 0, \"character\": 5}},\n      \"newText\": \"y\"\n      }]\n  },\n{\n  \"textDocument\": {\n  \"version\": null,\n  \"uri\": \"Cross.res\"\n  },\n  \"edits\": [{\n      \"range\": {\"start\": {\"line\": 18, \"character\": 28}, \"end\": {\"line\": 18, \"character\": 29}},\n      \"newText\": \"y\"\n      }, {\n      \"range\": {\"start\": {\"line\": 21, \"character\": 28}, \"end\": {\"line\": 21, \"character\": 29}},\n      \"newText\": \"y\"\n      }]\n  }\n]\n\n"
  },
  {
    "path": "analysis/tests/src/expected/RenameWithInterface.resi.txt",
    "content": "Rename src/RenameWithInterface.resi 0:4 y\n[\n{\n  \"textDocument\": {\n  \"version\": null,\n  \"uri\": \"RenameWithInterface.resi\"\n  },\n  \"edits\": [{\n      \"range\": {\"start\": {\"line\": 0, \"character\": 4}, \"end\": {\"line\": 0, \"character\": 5}},\n      \"newText\": \"y\"\n      }]\n  },\n{\n  \"textDocument\": {\n  \"version\": null,\n  \"uri\": \"RenameWithInterface.res\"\n  },\n  \"edits\": [{\n      \"range\": {\"start\": {\"line\": 0, \"character\": 4}, \"end\": {\"line\": 0, \"character\": 5}},\n      \"newText\": \"y\"\n      }]\n  },\n{\n  \"textDocument\": {\n  \"version\": null,\n  \"uri\": \"Cross.res\"\n  },\n  \"edits\": [{\n      \"range\": {\"start\": {\"line\": 18, \"character\": 28}, \"end\": {\"line\": 18, \"character\": 29}},\n      \"newText\": \"y\"\n      }, {\n      \"range\": {\"start\": {\"line\": 21, \"character\": 28}, \"end\": {\"line\": 21, \"character\": 29}},\n      \"newText\": \"y\"\n      }]\n  }\n]\n\n"
  },
  {
    "path": "analysis/tests/src/expected/Reprod.res.txt",
    "content": "Complete src/Reprod.res 7:53\nposCursor:[7:53] posNoWhite:[7:52] Found expr:[7:11->7:56]\nPexp_apply ...[7:11->7:20] (~variables7:22->7:31=...[7:32->7:55])\nCompletable: Cexpression CArgument Value[Query, use](~variables)->recordField(location), variantPayload::ByAddress($0)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CArgument Value[Query, use](~variables)\nContextPath Value[Query, use]\nPath Query.use\n[{\n    \"label\": \"{}\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"input_ByAddress\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ntype input_ByAddress = {city: string}\\n```\"},\n    \"sortText\": \"A\",\n    \"insertText\": \"{$0}\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/Reprod.res 33:28\nposCursor:[33:28] posNoWhite:[33:27] Found pattern:[33:21->33:31]\nCompletable: Cpattern Value[record]->recordField(first)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[record]\nPath record\n[{\n    \"label\": \"One\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"One\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nOne\\n```\\n\\n```rescript\\ntype someVariant = One | Two(bool) | Three(someRecord, bool)\\n```\"},\n    \"insertText\": \"One\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Two(_)\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"Two(bool)\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nTwo(bool)\\n```\\n\\n```rescript\\ntype someVariant = One | Two(bool) | Three(someRecord, bool)\\n```\"},\n    \"insertText\": \"Two(${1:_})\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Three(_, _)\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"Three(someRecord, bool)\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nThree(someRecord, bool)\\n```\\n\\n```rescript\\ntype someVariant = One | Two(bool) | Three(someRecord, bool)\\n```\"},\n    \"insertText\": \"Three(${1:_}, ${2:_})\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/Reprod.res 36:29\nposCursor:[36:29] posNoWhite:[36:28] Found pattern:[36:21->36:32]\nCompletable: Cpattern Value[record]->recordField(second)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[record]\nPath record\n[{\n    \"label\": \"{}\",\n    \"kind\": 22,\n    \"tags\": [],\n    \"detail\": \"SchemaAssets.input_ByAddress\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ntype SchemaAssets.input_ByAddress = {city: string}\\n```\"},\n    \"sortText\": \"A\",\n    \"insertText\": \"{$0}\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/Reprod.res 43:21\nposCursor:[43:21] posNoWhite:[43:20] Found pattern:[43:18->43:22]\nPpat_construct Ok:[43:18->43:20]\nposCursor:[43:21] posNoWhite:[43:20] Found pattern:[43:20->43:22]\nPpat_construct ():[43:20->43:22]\nCompletable: Cpattern Value[res]->variantPayload::Ok($0)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[res]\nPath res\n[{\n    \"label\": \"One\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"One\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nOne\\n```\\n\\n```rescript\\ntype someVariant = One | Two(bool) | Three(someRecord, bool)\\n```\"},\n    \"insertText\": \"One\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Two(_)\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"Two(bool)\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nTwo(bool)\\n```\\n\\n```rescript\\ntype someVariant = One | Two(bool) | Three(someRecord, bool)\\n```\"},\n    \"insertText\": \"Two(${1:_})\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Three(_, _)\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"Three(someRecord, bool)\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nThree(someRecord, bool)\\n```\\n\\n```rescript\\ntype someVariant = One | Two(bool) | Three(someRecord, bool)\\n```\"},\n    \"insertText\": \"Three(${1:_}, ${2:_})\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/Reprod.res 46:24\nposCursor:[46:24] posNoWhite:[46:23] Found pattern:[46:18->46:25]\nPpat_construct Error:[46:18->46:23]\nposCursor:[46:24] posNoWhite:[46:23] Found pattern:[46:23->46:25]\nPpat_construct ():[46:23->46:25]\nCompletable: Cpattern Value[res]->variantPayload::Error($0)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[res]\nPath res\n[{\n    \"label\": \"#one\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"#one\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\n#one\\n```\\n\\n```rescript\\n[#one | #three(someRecord, bool) | #two(bool)]\\n```\"},\n    \"insertText\": \"#one\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"#three(_, _)\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"#three(someRecord, bool)\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\n#three(someRecord, bool)\\n```\\n\\n```rescript\\n[#one | #three(someRecord, bool) | #two(bool)]\\n```\"},\n    \"insertText\": \"#three(${1:_}, ${2:_})\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"#two(_)\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"#two(bool)\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\n#two(bool)\\n```\\n\\n```rescript\\n[#one | #three(someRecord, bool) | #two(bool)]\\n```\"},\n    \"insertText\": \"#two(${1:_})\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/Reprod.res 51:24\nposCursor:[51:24] posNoWhite:[51:23] Found pattern:[51:21->51:25]\nPpat_construct Ok:[51:21->51:23]\nposCursor:[51:24] posNoWhite:[51:23] Found pattern:[51:23->51:25]\nPpat_construct ():[51:23->51:25]\nCompletable: Cpattern Value[resOpt]->variantPayload::Ok($0)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[resOpt]\nPath resOpt\n[{\n    \"label\": \"None\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"someVariant\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ntype someVariant = One | Two(bool) | Three(someRecord, bool)\\n```\"}\n  }, {\n    \"label\": \"Some(_)\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"someVariant\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ntype someVariant = One | Two(bool) | Three(someRecord, bool)\\n```\"},\n    \"insertText\": \"Some(${1:_})\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Some(One)\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"One\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nOne\\n```\\n\\n```rescript\\ntype someVariant = One | Two(bool) | Three(someRecord, bool)\\n```\"},\n    \"insertText\": \"Some(One)\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Some(Two(_))\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"Two(bool)\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nTwo(bool)\\n```\\n\\n```rescript\\ntype someVariant = One | Two(bool) | Three(someRecord, bool)\\n```\"},\n    \"insertText\": \"Some(Two(${1:_}))\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Some(Three(_, _))\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"Three(someRecord, bool)\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nThree(someRecord, bool)\\n```\\n\\n```rescript\\ntype someVariant = One | Two(bool) | Three(someRecord, bool)\\n```\"},\n    \"insertText\": \"Some(Three(${1:_}, ${2:_}))\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/Reprod.res 54:29\nposCursor:[54:29] posNoWhite:[54:28] Found pattern:[54:21->54:31]\nPpat_construct Ok:[54:21->54:23]\nposCursor:[54:29] posNoWhite:[54:28] Found pattern:[54:24->54:30]\nPpat_construct Some:[54:24->54:28]\nposCursor:[54:29] posNoWhite:[54:28] Found pattern:[54:28->54:30]\nPpat_construct ():[54:28->54:30]\nCompletable: Cpattern Value[resOpt]->variantPayload::Ok($0), variantPayload::Some($0)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[resOpt]\nPath resOpt\n[{\n    \"label\": \"One\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"One\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nOne\\n```\\n\\n```rescript\\ntype someVariant = One | Two(bool) | Three(someRecord, bool)\\n```\"},\n    \"insertText\": \"One\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Two(_)\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"Two(bool)\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nTwo(bool)\\n```\\n\\n```rescript\\ntype someVariant = One | Two(bool) | Three(someRecord, bool)\\n```\"},\n    \"insertText\": \"Two(${1:_})\",\n    \"insertTextFormat\": 2\n  }, {\n    \"label\": \"Three(_, _)\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"Three(someRecord, bool)\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nThree(someRecord, bool)\\n```\\n\\n```rescript\\ntype someVariant = One | Two(bool) | Three(someRecord, bool)\\n```\"},\n    \"insertText\": \"Three(${1:_}, ${2:_})\",\n    \"insertTextFormat\": 2\n  }]\n\n"
  },
  {
    "path": "analysis/tests/src/expected/Rxjs.res.txt",
    "content": ""
  },
  {
    "path": "analysis/tests/src/expected/RxjsCompletion.res.txt",
    "content": "Complete src/RxjsCompletion.res 26:29\nposCursor:[26:29] posNoWhite:[26:28] Found expr:[10:17->38:1]\nposCursor:[26:29] posNoWhite:[26:28] Found expr:[11:2->32:78]\nposCursor:[26:29] posNoWhite:[26:28] Found expr:[12:4->26:29]\nposCursor:[26:29] posNoWhite:[26:28] Found expr:[14:4->26:29]\nposCursor:[26:29] posNoWhite:[26:28] Found expr:[20:4->26:29]\nposCursor:[26:29] posNoWhite:[26:28] Found expr:[26:7->26:29]\nPexp_field [26:7->26:28] _:[30:2->26:29]\nCompletable: Cpath Value[merge](Nolabel, Nolabel).\"\"\nRaw opens: 1 Rxjs.place holder\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 2 pervasives Rxjs.res\nContextPath Value[merge](Nolabel, Nolabel).\"\"\nContextPath Value[merge](Nolabel, Nolabel)\nContextPath Value[merge]\nPath merge\nContextPath Value[merge](Nolabel, Nolabel, Nolabel)->\nContextPath Value[merge](Nolabel, Nolabel, Nolabel)\nContextPath Value[merge]\nPath merge\nCPPipe pathFromEnv:Observable found:true\nPath Rxjs.Observable.\nPath Rxjs.\n[{\n    \"label\": \"->Observable.subscribe\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t<'t>, 't => unit) => subscription\",\n    \"documentation\": null,\n    \"sortText\": \"subscribe\",\n    \"insertText\": \"->Observable.subscribe\",\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 26, \"character\": 28}, \"end\": {\"line\": 26, \"character\": 29}},\n      \"newText\": \"\"\n      }]\n  }, {\n    \"label\": \"->pipe\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(Observable.t<'t>, operation<'t, 'u>) => Observable.t<'u>\",\n    \"documentation\": null,\n    \"sortText\": \"pipe\",\n    \"insertText\": \"->pipe\",\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 26, \"character\": 28}, \"end\": {\"line\": 26, \"character\": 29}},\n      \"newText\": \"\"\n      }]\n  }, {\n    \"label\": \"->combineLatest\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(\\n  Observable.t<'a>,\\n  Observable.t<'b>,\\n) => Observable.t<('a, 'b)>\",\n    \"documentation\": null,\n    \"sortText\": \"combineLatest\",\n    \"insertText\": \"->combineLatest\",\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 26, \"character\": 28}, \"end\": {\"line\": 26, \"character\": 29}},\n      \"newText\": \"\"\n      }]\n  }, {\n    \"label\": \"->merge\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(Observable.t<'t>, Observable.t<'t>) => Observable.t<'t>\",\n    \"documentation\": null,\n    \"sortText\": \"merge\",\n    \"insertText\": \"->merge\",\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 26, \"character\": 28}, \"end\": {\"line\": 26, \"character\": 29}},\n      \"newText\": \"\"\n      }]\n  }, {\n    \"label\": \"->pipe2\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(\\n  Observable.t<'t>,\\n  operation<'t, 'u>,\\n  operation<'u, 'i>,\\n) => Observable.t<'i>\",\n    \"documentation\": null,\n    \"sortText\": \"pipe2\",\n    \"insertText\": \"->pipe2\",\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 26, \"character\": 28}, \"end\": {\"line\": 26, \"character\": 29}},\n      \"newText\": \"\"\n      }]\n  }]\n\nComplete src/RxjsCompletion.res 34:30\nposCursor:[34:30] posNoWhite:[34:29] Found expr:[10:17->38:1]\nposCursor:[34:30] posNoWhite:[34:29] Found expr:[10:11->38:1]\nposCursor:[34:30] posNoWhite:[34:29] Found expr:[11:2->34:30]\nposCursor:[34:30] posNoWhite:[34:29] Found expr:[32:2->34:30]\nposCursor:[34:30] posNoWhite:[34:29] Found expr:[34:5->34:30]\nPexp_field [34:5->34:29] _:[38:0->34:30]\nCompletable: Cpath Value[Rxjs, combineLatest](Nolabel, Nolabel).\"\"\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[Rxjs, combineLatest](Nolabel, Nolabel).\"\"\nContextPath Value[Rxjs, combineLatest](Nolabel, Nolabel)\nContextPath Value[Rxjs, combineLatest]\nPath Rxjs.combineLatest\nContextPath Value[Rxjs, combineLatest](Nolabel, Nolabel, Nolabel)->\nContextPath Value[Rxjs, combineLatest](Nolabel, Nolabel, Nolabel)\nContextPath Value[Rxjs, combineLatest]\nPath Rxjs.combineLatest\nCPPipe pathFromEnv:Observable found:true\nPath Rxjs.Observable.\nPath Rxjs.\n[{\n    \"label\": \"->Rxjs.Observable.subscribe\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(t<'t>, 't => unit) => subscription\",\n    \"documentation\": null,\n    \"sortText\": \"subscribe\",\n    \"insertText\": \"->Rxjs.Observable.subscribe\",\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 34, \"character\": 29}, \"end\": {\"line\": 34, \"character\": 30}},\n      \"newText\": \"\"\n      }]\n  }, {\n    \"label\": \"->Rxjs.pipe\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(Observable.t<'t>, operation<'t, 'u>) => Observable.t<'u>\",\n    \"documentation\": null,\n    \"sortText\": \"pipe\",\n    \"insertText\": \"->Rxjs.pipe\",\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 34, \"character\": 29}, \"end\": {\"line\": 34, \"character\": 30}},\n      \"newText\": \"\"\n      }]\n  }, {\n    \"label\": \"->Rxjs.combineLatest\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(\\n  Observable.t<'a>,\\n  Observable.t<'b>,\\n) => Observable.t<('a, 'b)>\",\n    \"documentation\": null,\n    \"sortText\": \"combineLatest\",\n    \"insertText\": \"->Rxjs.combineLatest\",\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 34, \"character\": 29}, \"end\": {\"line\": 34, \"character\": 30}},\n      \"newText\": \"\"\n      }]\n  }, {\n    \"label\": \"->Rxjs.merge\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(Observable.t<'t>, Observable.t<'t>) => Observable.t<'t>\",\n    \"documentation\": null,\n    \"sortText\": \"merge\",\n    \"insertText\": \"->Rxjs.merge\",\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 34, \"character\": 29}, \"end\": {\"line\": 34, \"character\": 30}},\n      \"newText\": \"\"\n      }]\n  }, {\n    \"label\": \"->Rxjs.pipe2\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"(\\n  Observable.t<'t>,\\n  operation<'t, 'u>,\\n  operation<'u, 'i>,\\n) => Observable.t<'i>\",\n    \"documentation\": null,\n    \"sortText\": \"pipe2\",\n    \"insertText\": \"->Rxjs.pipe2\",\n    \"additionalTextEdits\": [{\n      \"range\": {\"start\": {\"line\": 34, \"character\": 29}, \"end\": {\"line\": 34, \"character\": 30}},\n      \"newText\": \"\"\n      }]\n  }]\n\n"
  },
  {
    "path": "analysis/tests/src/expected/SchemaAssets.res.txt",
    "content": ""
  },
  {
    "path": "analysis/tests/src/expected/ShadowedBelt.res.txt",
    "content": ""
  },
  {
    "path": "analysis/tests/src/expected/SignatureHelp.res.txt",
    "content": "Signature help src/SignatureHelp.res 16:20\nposCursor:[16:19] posNoWhite:[16:18] Found expr:[16:11->16:20]\nPexp_apply ...[16:11->16:19] (...[46:0->16:20])\nposCursor:[16:19] posNoWhite:[16:18] Found expr:[16:11->16:19]\nPexp_ident someFunc:[16:11->16:19]\nCompletable: Cpath Value[someFunc]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[someFunc]\nPath someFunc\nargAtCursor: unlabelled<0>\nextracted params: \n[int, ~two: string=?, ~three: unit => unit, ~four: someVariant, unit]\n{\n  \"signatures\": [{\n    \"label\": \"(\\n  int,\\n  ~two: string=?,\\n  ~three: unit => unit,\\n  ~four: someVariant,\\n  unit,\\n) => unit\",\n    \"parameters\": [{\"label\": [4, 7], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}, {\"label\": [11, 25], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}, {\"label\": [29, 49], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}, {\"label\": [53, 71], \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ntype someVariant = One | Two | Three\\n```\\nGo to: [Type definition](command:rescript-vscode.go_to_location?%5B%22SignatureHelp.res%22%2C0%2C0%5D)\"}}, {\"label\": [75, 79], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}],\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \" Does stuff. \"}\n  }],\n  \"activeSignature\": 0,\n  \"activeParameter\": 0\n}\n\nSignature help src/SignatureHelp.res 19:21\nposCursor:[19:19] posNoWhite:[19:18] Found expr:[19:11->19:21]\nPexp_apply ...[19:11->19:19] (...[19:20->19:21])\nposCursor:[19:19] posNoWhite:[19:18] Found expr:[19:11->19:19]\nPexp_ident someFunc:[19:11->19:19]\nCompletable: Cpath Value[someFunc]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[someFunc]\nPath someFunc\nargAtCursor: unlabelled<0>\nextracted params: \n[int, ~two: string=?, ~three: unit => unit, ~four: someVariant, unit]\n{\n  \"signatures\": [{\n    \"label\": \"(\\n  int,\\n  ~two: string=?,\\n  ~three: unit => unit,\\n  ~four: someVariant,\\n  unit,\\n) => unit\",\n    \"parameters\": [{\"label\": [4, 7], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}, {\"label\": [11, 25], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}, {\"label\": [29, 49], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}, {\"label\": [53, 71], \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ntype someVariant = One | Two | Three\\n```\\nGo to: [Type definition](command:rescript-vscode.go_to_location?%5B%22SignatureHelp.res%22%2C0%2C0%5D)\"}}, {\"label\": [75, 79], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}],\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \" Does stuff. \"}\n  }],\n  \"activeSignature\": 0,\n  \"activeParameter\": 0\n}\n\nSignature help src/SignatureHelp.res 22:29\nposCursor:[22:19] posNoWhite:[22:18] Found expr:[22:11->22:29]\nPexp_apply ...[22:11->22:19] (...[22:20->22:23], ~two22:26->22:29=...[22:26->22:29])\nposCursor:[22:19] posNoWhite:[22:18] Found expr:[22:11->22:19]\nPexp_ident someFunc:[22:11->22:19]\nCompletable: Cpath Value[someFunc]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[someFunc]\nPath someFunc\nargAtCursor: ~two\nextracted params: \n[int, ~two: string=?, ~three: unit => unit, ~four: someVariant, unit]\n{\n  \"signatures\": [{\n    \"label\": \"(\\n  int,\\n  ~two: string=?,\\n  ~three: unit => unit,\\n  ~four: someVariant,\\n  unit,\\n) => unit\",\n    \"parameters\": [{\"label\": [4, 7], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}, {\"label\": [11, 25], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}, {\"label\": [29, 49], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}, {\"label\": [53, 71], \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ntype someVariant = One | Two | Three\\n```\\nGo to: [Type definition](command:rescript-vscode.go_to_location?%5B%22SignatureHelp.res%22%2C0%2C0%5D)\"}}, {\"label\": [75, 79], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}],\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \" Does stuff. \"}\n  }],\n  \"activeSignature\": 0,\n  \"activeParameter\": 1\n}\n\nSignature help src/SignatureHelp.res 25:33\nposCursor:[25:19] posNoWhite:[25:18] Found expr:[25:11->25:35]\nPexp_apply ...[25:11->25:19] (...[25:20->25:23], ~two25:26->25:29=...[25:30->25:35])\nposCursor:[25:19] posNoWhite:[25:18] Found expr:[25:11->25:19]\nPexp_ident someFunc:[25:11->25:19]\nCompletable: Cpath Value[someFunc]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[someFunc]\nPath someFunc\nargAtCursor: ~two\nextracted params: \n[int, ~two: string=?, ~three: unit => unit, ~four: someVariant, unit]\n{\n  \"signatures\": [{\n    \"label\": \"(\\n  int,\\n  ~two: string=?,\\n  ~three: unit => unit,\\n  ~four: someVariant,\\n  unit,\\n) => unit\",\n    \"parameters\": [{\"label\": [4, 7], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}, {\"label\": [11, 25], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}, {\"label\": [29, 49], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}, {\"label\": [53, 71], \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ntype someVariant = One | Two | Three\\n```\\nGo to: [Type definition](command:rescript-vscode.go_to_location?%5B%22SignatureHelp.res%22%2C0%2C0%5D)\"}}, {\"label\": [75, 79], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}],\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \" Does stuff. \"}\n  }],\n  \"activeSignature\": 0,\n  \"activeParameter\": 1\n}\n\nSignature help src/SignatureHelp.res 28:38\nposCursor:[28:19] posNoWhite:[28:18] Found expr:[28:11->28:42]\nPexp_apply ...[28:11->28:19] (...[28:20->28:23], ~two28:26->28:29=...[28:30->28:35], ~four28:38->28:42=...[28:38->28:42])\nposCursor:[28:19] posNoWhite:[28:18] Found expr:[28:11->28:19]\nPexp_ident someFunc:[28:11->28:19]\nCompletable: Cpath Value[someFunc]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[someFunc]\nPath someFunc\nargAtCursor: ~four\nextracted params: \n[int, ~two: string=?, ~three: unit => unit, ~four: someVariant, unit]\n{\n  \"signatures\": [{\n    \"label\": \"(\\n  int,\\n  ~two: string=?,\\n  ~three: unit => unit,\\n  ~four: someVariant,\\n  unit,\\n) => unit\",\n    \"parameters\": [{\"label\": [4, 7], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}, {\"label\": [11, 25], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}, {\"label\": [29, 49], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}, {\"label\": [53, 71], \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ntype someVariant = One | Two | Three\\n```\\nGo to: [Type definition](command:rescript-vscode.go_to_location?%5B%22SignatureHelp.res%22%2C0%2C0%5D)\"}}, {\"label\": [75, 79], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}],\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \" Does stuff. \"}\n  }],\n  \"activeSignature\": 0,\n  \"activeParameter\": 3\n}\n\nSignature help src/SignatureHelp.res 31:42\nposCursor:[31:19] posNoWhite:[31:18] Found expr:[31:11->31:44]\nPexp_apply ...[31:11->31:19] (...[31:20->31:23], ~two31:26->31:29=...[31:30->31:35], ~four31:38->31:42=...[31:43->31:44])\nposCursor:[31:19] posNoWhite:[31:18] Found expr:[31:11->31:19]\nPexp_ident someFunc:[31:11->31:19]\nCompletable: Cpath Value[someFunc]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[someFunc]\nPath someFunc\nargAtCursor: ~four\nextracted params: \n[int, ~two: string=?, ~three: unit => unit, ~four: someVariant, unit]\n{\n  \"signatures\": [{\n    \"label\": \"(\\n  int,\\n  ~two: string=?,\\n  ~three: unit => unit,\\n  ~four: someVariant,\\n  unit,\\n) => unit\",\n    \"parameters\": [{\"label\": [4, 7], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}, {\"label\": [11, 25], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}, {\"label\": [29, 49], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}, {\"label\": [53, 71], \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ntype someVariant = One | Two | Three\\n```\\nGo to: [Type definition](command:rescript-vscode.go_to_location?%5B%22SignatureHelp.res%22%2C0%2C0%5D)\"}}, {\"label\": [75, 79], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}],\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \" Does stuff. \"}\n  }],\n  \"activeSignature\": 0,\n  \"activeParameter\": 3\n}\n\nSignature help src/SignatureHelp.res 34:21\nposCursor:[34:20] posNoWhite:[34:19] Found expr:[34:11->34:21]\nPexp_apply ...[34:11->34:20] (...[46:0->34:21])\nposCursor:[34:20] posNoWhite:[34:19] Found expr:[34:11->34:20]\nPexp_ident otherFunc:[34:11->34:20]\nCompletable: Cpath Value[otherFunc]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[otherFunc]\nPath otherFunc\nargAtCursor: unlabelled<0>\nextracted params: \n[string, int, float]\n{\n  \"signatures\": [{\n    \"label\": \"(string, int, float) => unit\",\n    \"parameters\": [{\"label\": [1, 7], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}, {\"label\": [9, 12], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}, {\"label\": [14, 19], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}]\n  }],\n  \"activeSignature\": 0,\n  \"activeParameter\": 0\n}\n\nSignature help src/SignatureHelp.res 37:24\nposCursor:[37:20] posNoWhite:[37:19] Found expr:[37:11->37:26]\nPexp_apply ...[37:11->37:20] (...[37:21->37:26])\nposCursor:[37:20] posNoWhite:[37:19] Found expr:[37:11->37:20]\nPexp_ident otherFunc:[37:11->37:20]\nCompletable: Cpath Value[otherFunc]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[otherFunc]\nPath otherFunc\nargAtCursor: unlabelled<0>\nextracted params: \n[string, int, float]\n{\n  \"signatures\": [{\n    \"label\": \"(string, int, float) => unit\",\n    \"parameters\": [{\"label\": [1, 7], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}, {\"label\": [9, 12], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}, {\"label\": [14, 19], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}]\n  }],\n  \"activeSignature\": 0,\n  \"activeParameter\": 0\n}\n\nSignature help src/SignatureHelp.res 40:35\nposCursor:[40:20] posNoWhite:[40:19] Found expr:[40:11->40:39]\nPexp_apply ...[40:11->40:20] (...[40:21->40:26], ...[40:28->40:31], ...[40:33->40:38])\nposCursor:[40:20] posNoWhite:[40:19] Found expr:[40:11->40:20]\nPexp_ident otherFunc:[40:11->40:20]\nCompletable: Cpath Value[otherFunc]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[otherFunc]\nPath otherFunc\nargAtCursor: unlabelled<2>\nextracted params: \n[string, int, float]\n{\n  \"signatures\": [{\n    \"label\": \"(string, int, float) => unit\",\n    \"parameters\": [{\"label\": [1, 7], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}, {\"label\": [9, 12], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}, {\"label\": [14, 19], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}]\n  }],\n  \"activeSignature\": 0,\n  \"activeParameter\": 2\n}\n\nSignature help src/SignatureHelp.res 43:33\nposCursor:[43:29] posNoWhite:[43:28] Found expr:[43:11->43:34]\nPexp_apply ...[43:11->43:29] (~age43:31->43:34=...[43:31->43:34])\nposCursor:[43:29] posNoWhite:[43:28] Found expr:[43:11->43:29]\nPexp_ident Completion.Lib.foo:[43:11->43:29]\nCompletable: Cpath Value[Completion, Lib, foo]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[Completion, Lib, foo]\nPath Completion.Lib.foo\nargAtCursor: ~age\nextracted params: \n[~age: int, ~name: string]\n{\n  \"signatures\": [{\n    \"label\": \"(~age: int, ~name: string) => string\",\n    \"parameters\": [{\"label\": [1, 10], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}, {\"label\": [12, 25], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}]\n  }],\n  \"activeSignature\": 0,\n  \"activeParameter\": 0\n}\n\nSignature help src/SignatureHelp.res 50:24\nposCursor:[50:23] posNoWhite:[50:22] Found expr:[50:11->50:24]\nPexp_apply ...[50:11->50:23] (...[56:0->50:24])\nposCursor:[50:23] posNoWhite:[50:22] Found expr:[50:11->50:23]\nPexp_ident iAmSoSpecial:[50:11->50:23]\nCompletable: Cpath Value[iAmSoSpecial]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[iAmSoSpecial]\nPath iAmSoSpecial\nargAtCursor: unlabelled<0>\nextracted params: \n[string]\n{\n  \"signatures\": [{\n    \"label\": \"string => unit\",\n    \"parameters\": [{\"label\": [0, 6], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}]\n  }],\n  \"activeSignature\": 0,\n  \"activeParameter\": 0\n}\n\nSignature help src/SignatureHelp.res 53:31\nposCursor:[53:29] posNoWhite:[53:28] Found expr:[53:11->53:31]\nposCursor:[53:29] posNoWhite:[53:28] Found expr:[53:20->53:31]\nPexp_apply ...[53:20->53:29] (...[53:30->53:31])\nposCursor:[53:29] posNoWhite:[53:28] Found expr:[53:20->53:29]\nPexp_ident otherFunc:[53:20->53:29]\nCompletable: Cpath Value[otherFunc]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[otherFunc]\nPath otherFunc\nargAtCursor: unlabelled<1>\nextracted params: \n[string, int, float]\n{\n  \"signatures\": [{\n    \"label\": \"(string, int, float) => unit\",\n    \"parameters\": [{\"label\": [1, 7], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}, {\"label\": [9, 12], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}, {\"label\": [14, 19], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}]\n  }],\n  \"activeSignature\": 0,\n  \"activeParameter\": 1\n}\n\nSignature help src/SignatureHelp.res 62:17\nposCursor:[62:13] posNoWhite:[62:12] Found expr:[62:11->62:19]\nPexp_apply ...[62:11->62:13] (...[62:14->62:16])\nposCursor:[62:13] posNoWhite:[62:12] Found expr:[62:11->62:13]\nPexp_ident fn:[62:11->62:13]\nCompletable: Cpath Value[fn]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[fn]\nPath fn\nargAtCursor: unlabelled<1>\nextracted params: \n[int, string, int]\n{\n  \"signatures\": [{\n    \"label\": \"(int, string, int) => unit\",\n    \"parameters\": [{\"label\": [1, 4], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}, {\"label\": [6, 12], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}, {\"label\": [14, 17], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}]\n  }],\n  \"activeSignature\": 0,\n  \"activeParameter\": 1\n}\n\nSignature help src/SignatureHelp.res 65:17\nposCursor:[65:13] posNoWhite:[65:12] Found expr:[65:11->65:25]\nPexp_apply ...[65:11->65:13] (...[65:14->65:16], ...[65:20->65:24])\nposCursor:[65:13] posNoWhite:[65:12] Found expr:[65:11->65:13]\nPexp_ident fn:[65:11->65:13]\nCompletable: Cpath Value[fn]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[fn]\nPath fn\nargAtCursor: unlabelled<1>\nextracted params: \n[int, string, int]\n{\n  \"signatures\": [{\n    \"label\": \"(int, string, int) => unit\",\n    \"parameters\": [{\"label\": [1, 4], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}, {\"label\": [6, 12], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}, {\"label\": [14, 17], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}]\n  }],\n  \"activeSignature\": 0,\n  \"activeParameter\": 1\n}\n\nSignature help src/SignatureHelp.res 68:26\nposCursor:[68:13] posNoWhite:[68:12] Found expr:[68:11->68:28]\nPexp_apply ...[68:11->68:13] (...[68:14->68:16], ...[68:18->68:25])\nposCursor:[68:13] posNoWhite:[68:12] Found expr:[68:11->68:13]\nPexp_ident fn:[68:11->68:13]\nCompletable: Cpath Value[fn]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[fn]\nPath fn\nargAtCursor: unlabelled<2>\nextracted params: \n[int, string, int]\n{\n  \"signatures\": [{\n    \"label\": \"(int, string, int) => unit\",\n    \"parameters\": [{\"label\": [1, 4], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}, {\"label\": [6, 12], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}, {\"label\": [14, 17], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}]\n  }],\n  \"activeSignature\": 0,\n  \"activeParameter\": 2\n}\n\nSignature help src/SignatureHelp.res 71:29\nposCursor:[71:28] posNoWhite:[71:27] Found expr:[71:11->71:33]\nPexp_apply ...[71:11->71:13] (...[71:16->71:30])\nposCursor:[71:28] posNoWhite:[71:27] Found expr:[71:16->71:30]\nPexp_apply ...[71:16->71:28] (...[71:29->71:30])\nposCursor:[71:28] posNoWhite:[71:27] Found expr:[71:16->71:28]\nPexp_ident iAmSoSpecial:[71:16->71:28]\nCompletable: Cpath Value[iAmSoSpecial]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[iAmSoSpecial]\nPath iAmSoSpecial\nargAtCursor: unlabelled<0>\nextracted params: \n[string]\n{\n  \"signatures\": [{\n    \"label\": \"string => unit\",\n    \"parameters\": [{\"label\": [0, 6], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}]\n  }],\n  \"activeSignature\": 0,\n  \"activeParameter\": 0\n}\n\nSignature help src/SignatureHelp.res 74:40\nposCursor:[74:39] posNoWhite:[74:38] Found expr:[74:11->74:47]\nPexp_apply ...[74:11->74:13] (...[74:16->74:44])\nposCursor:[74:39] posNoWhite:[74:38] Found expr:[74:16->74:44]\nPexp_apply ...[74:16->74:28] (...[74:31->74:41])\nposCursor:[74:39] posNoWhite:[74:38] Found expr:[74:31->74:41]\nPexp_apply ...[74:31->74:39] (...[74:40->74:41])\nposCursor:[74:39] posNoWhite:[74:38] Found expr:[74:31->74:39]\nPexp_ident someFunc:[74:31->74:39]\nCompletable: Cpath Value[someFunc]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[someFunc]\nPath someFunc\nargAtCursor: unlabelled<0>\nextracted params: \n[int, ~two: string=?, ~three: unit => unit, ~four: someVariant, unit]\n{\n  \"signatures\": [{\n    \"label\": \"(\\n  int,\\n  ~two: string=?,\\n  ~three: unit => unit,\\n  ~four: someVariant,\\n  unit,\\n) => unit\",\n    \"parameters\": [{\"label\": [4, 7], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}, {\"label\": [11, 25], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}, {\"label\": [29, 49], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}, {\"label\": [53, 71], \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ntype someVariant = One | Two | Three\\n```\\nGo to: [Type definition](command:rescript-vscode.go_to_location?%5B%22SignatureHelp.res%22%2C0%2C0%5D)\"}}, {\"label\": [75, 79], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}],\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \" Does stuff. \"}\n  }],\n  \"activeSignature\": 0,\n  \"activeParameter\": 0\n}\n\nSignature help src/SignatureHelp.res 85:16\n{\n  \"signatures\": [{\n    \"label\": \"One({miss?: bool, hit?: bool, stuff?: string})\",\n    \"parameters\": [{\"label\": [0, 0], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}, {\"label\": [5, 16], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}, {\"label\": [18, 28], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}, {\"label\": [30, 44], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}],\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \" One is cool. \"}\n  }],\n  \"activeSignature\": 0,\n  \"activeParameter\": -1\n}\n\nSignature help src/SignatureHelp.res 88:18\n{\n  \"signatures\": [{\n    \"label\": \"One({miss?: bool, hit?: bool, stuff?: string})\",\n    \"parameters\": [{\"label\": [0, 0], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}, {\"label\": [5, 16], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}, {\"label\": [18, 28], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}, {\"label\": [30, 44], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}],\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \" One is cool. \"}\n  }],\n  \"activeSignature\": 0,\n  \"activeParameter\": 1\n}\n\nSignature help src/SignatureHelp.res 91:23\n{\n  \"signatures\": [{\n    \"label\": \"One({miss?: bool, hit?: bool, stuff?: string})\",\n    \"parameters\": [{\"label\": [0, 0], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}, {\"label\": [5, 16], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}, {\"label\": [18, 28], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}, {\"label\": [30, 44], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}],\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \" One is cool. \"}\n  }],\n  \"activeSignature\": 0,\n  \"activeParameter\": 2\n}\n\nSignature help src/SignatureHelp.res 94:15\n{\n  \"signatures\": [{\n    \"label\": \"Two(mySpecialThing)\",\n    \"parameters\": [{\"label\": [4, 18], \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ntype mySpecialThing = string\\n```\\nGo to: [Type definition](command:rescript-vscode.go_to_location?%5B%22SignatureHelp.res%22%2C78%2C0%5D)\"}}],\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \" Two is fun! \"}\n  }],\n  \"activeSignature\": 0,\n  \"activeParameter\": 0\n}\n\nSignature help src/SignatureHelp.res 97:19\n{\n  \"signatures\": [{\n    \"label\": \"Three(mySpecialThing, array<option<string>>)\",\n    \"parameters\": [{\"label\": [6, 20], \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ntype mySpecialThing = string\\n```\\nGo to: [Type definition](command:rescript-vscode.go_to_location?%5B%22SignatureHelp.res%22%2C78%2C0%5D)\"}}, {\"label\": [22, 43], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}],\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \" Three is... three \"}\n  }],\n  \"activeSignature\": 0,\n  \"activeParameter\": 0\n}\n\nSignature help src/SignatureHelp.res 100:24\n{\n  \"signatures\": [{\n    \"label\": \"Three(mySpecialThing, array<option<string>>)\",\n    \"parameters\": [{\"label\": [6, 20], \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ntype mySpecialThing = string\\n```\\nGo to: [Type definition](command:rescript-vscode.go_to_location?%5B%22SignatureHelp.res%22%2C78%2C0%5D)\"}}, {\"label\": [22, 43], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}],\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \" Three is... three \"}\n  }],\n  \"activeSignature\": 0,\n  \"activeParameter\": 1\n}\n\nSignature help src/SignatureHelp.res 105:9\n{\n  \"signatures\": [{\n    \"label\": \"One({miss?: bool, hit?: bool, stuff?: string})\",\n    \"parameters\": [{\"label\": [0, 0], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}, {\"label\": [5, 16], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}, {\"label\": [18, 28], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}, {\"label\": [30, 44], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}],\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \" One is cool. \"}\n  }],\n  \"activeSignature\": 0,\n  \"activeParameter\": -1\n}\n\nSignature help src/SignatureHelp.res 117:42\nargAtCursor: unlabelled<1>\nextracted params: \n[array<int>, int => int]\n{\n  \"signatures\": [{\n    \"label\": \"(array<int>, int => int) => array<int>\",\n    \"parameters\": [{\"label\": [1, 11], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}, {\"label\": [13, 23], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}],\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \" Main docstring here. \"}\n  }],\n  \"activeSignature\": 0,\n  \"activeParameter\": 1\n}\n\nSignature help src/SignatureHelp.res 136:18\nargAtCursor: unlabelled<0>\nextracted params: \n[x, tt]\n{\n  \"signatures\": [{\n    \"label\": \"(x, tt) => string\",\n    \"parameters\": [{\"label\": [1, 2], \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ntype x = {age?: int, name?: string}\\n```\\nGo to: [Type definition](command:rescript-vscode.go_to_location?%5B%22SignatureHelp.res%22%2C121%2C0%5D)\"}}, {\"label\": [4, 6], \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ntype tt = One\\n```\\nGo to: [Type definition](command:rescript-vscode.go_to_location?%5B%22SignatureHelp.res%22%2C127%2C0%5D)\"}}],\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \" Some stuff \"}\n  }],\n  \"activeSignature\": 0,\n  \"activeParameter\": 0\n}\n\nSignature help src/SignatureHelp.res 139:22\nargAtCursor: unlabelled<1>\nextracted params: \n[x, tt]\n{\n  \"signatures\": [{\n    \"label\": \"(x, tt) => string\",\n    \"parameters\": [{\"label\": [1, 2], \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ntype x = {age?: int, name?: string}\\n```\\nGo to: [Type definition](command:rescript-vscode.go_to_location?%5B%22SignatureHelp.res%22%2C121%2C0%5D)\"}}, {\"label\": [4, 6], \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ntype tt = One\\n```\\nGo to: [Type definition](command:rescript-vscode.go_to_location?%5B%22SignatureHelp.res%22%2C127%2C0%5D)\"}}],\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \" Some stuff \"}\n  }],\n  \"activeSignature\": 0,\n  \"activeParameter\": 1\n}\n\nSignature help src/SignatureHelp.res 143:8\n{\n  \"signatures\": [{\n    \"label\": \"One({miss?: bool, hit?: bool, stuff?: string})\",\n    \"parameters\": [{\"label\": [0, 0], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}, {\"label\": [5, 16], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}, {\"label\": [18, 28], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}, {\"label\": [30, 44], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}],\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \" One is cool. \"}\n  }],\n  \"activeSignature\": 0,\n  \"activeParameter\": 2\n}\n\nSignature help src/SignatureHelp.res 145:7\n{\n  \"signatures\": [{\n    \"label\": \"One({miss?: bool, hit?: bool, stuff?: string})\",\n    \"parameters\": [{\"label\": [0, 0], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}, {\"label\": [5, 16], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}, {\"label\": [18, 28], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}, {\"label\": [30, 44], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}],\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \" One is cool. \"}\n  }],\n  \"activeSignature\": 0,\n  \"activeParameter\": 0\n}\n\nSignature help src/SignatureHelp.res 147:7\n{\n  \"signatures\": [{\n    \"label\": \"Two(mySpecialThing)\",\n    \"parameters\": [{\"label\": [4, 18], \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ntype mySpecialThing = string\\n```\\nGo to: [Type definition](command:rescript-vscode.go_to_location?%5B%22SignatureHelp.res%22%2C78%2C0%5D)\"}}],\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \" Two is fun! \"}\n  }],\n  \"activeSignature\": 0,\n  \"activeParameter\": 0\n}\n\nSignature help src/SignatureHelp.res 149:9\n{\n  \"signatures\": [{\n    \"label\": \"Three(mySpecialThing, array<option<string>>)\",\n    \"parameters\": [{\"label\": [6, 20], \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ntype mySpecialThing = string\\n```\\nGo to: [Type definition](command:rescript-vscode.go_to_location?%5B%22SignatureHelp.res%22%2C78%2C0%5D)\"}}, {\"label\": [22, 43], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}],\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \" Three is... three \"}\n  }],\n  \"activeSignature\": 0,\n  \"activeParameter\": 0\n}\n\nSignature help src/SignatureHelp.res 151:12\n{\n  \"signatures\": [{\n    \"label\": \"Three(mySpecialThing, array<option<string>>)\",\n    \"parameters\": [{\"label\": [6, 20], \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ntype mySpecialThing = string\\n```\\nGo to: [Type definition](command:rescript-vscode.go_to_location?%5B%22SignatureHelp.res%22%2C78%2C0%5D)\"}}, {\"label\": [22, 43], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}],\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \" Three is... three \"}\n  }],\n  \"activeSignature\": 0,\n  \"activeParameter\": 1\n}\n\nSignature help src/SignatureHelp.res 155:14\n{\n  \"signatures\": [{\n    \"label\": \"Ok(bool)\",\n    \"parameters\": [{\"label\": [3, 7], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}],\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nresult<bool, 'a>\\n```\"}\n  }],\n  \"activeSignature\": 0,\n  \"activeParameter\": 0\n}\n\nSignature help src/SignatureHelp.res 158:19\n{\n  \"signatures\": [{\n    \"label\": \"Error(string)\",\n    \"parameters\": [{\"label\": [6, 12], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}],\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nresult<'a, string>\\n```\"}\n  }],\n  \"activeSignature\": 0,\n  \"activeParameter\": 0\n}\n\nSignature help src/SignatureHelp.res 161:16\n{\n  \"signatures\": [{\n    \"label\": \"Some(bool)\",\n    \"parameters\": [{\"label\": [5, 9], \"documentation\": {\"kind\": \"markdown\", \"value\": \"\"}}],\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\noption<bool>\\n```\"}\n  }],\n  \"activeSignature\": 0,\n  \"activeParameter\": 0\n}\n\n"
  },
  {
    "path": "analysis/tests/src/expected/Support.res.txt",
    "content": ""
  },
  {
    "path": "analysis/tests/src/expected/TableclothMap.res.txt",
    "content": ""
  },
  {
    "path": "analysis/tests/src/expected/TableclothMap.resi.txt",
    "content": ""
  },
  {
    "path": "analysis/tests/src/expected/TypeArgCtx.res.txt",
    "content": "Complete src/TypeArgCtx.res 7:36\nposCursor:[7:36] posNoWhite:[7:35] Found pattern:[7:26->7:39]\nPpat_construct Ok:[7:26->7:28]\nposCursor:[7:36] posNoWhite:[7:35] Found pattern:[7:29->7:38]\nCompletable: Cpattern Value[catchResult]->variantPayload::Ok($0), recordField(value)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[catchResult]\nPath catchResult\n[{\n    \"label\": \"{}\",\n    \"kind\": 22,\n    \"tags\": [],\n    \"detail\": \"someTyp\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ntype someTyp = {test: bool}\\n```\"},\n    \"sortText\": \"A\",\n    \"insertText\": \"{$0}\",\n    \"insertTextFormat\": 2\n  }]\n\n"
  },
  {
    "path": "analysis/tests/src/expected/TypeAtPosCompletion.res.txt",
    "content": "Complete src/TypeAtPosCompletion.res 7:17\nposCursor:[7:17] posNoWhite:[7:15] Found expr:[6:16->9:1]\nCompletable: Cexpression CTypeAtPos()->recordBody\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CTypeAtPos()\n[{\n    \"label\": \"age\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nage?: int\\n```\\n\\n```rescript\\ntype optRecord = {name: string, age: option<int>, online: option<bool>}\\n```\"}\n  }, {\n    \"label\": \"online\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nonline?: bool\\n```\\n\\n```rescript\\ntype optRecord = {name: string, age: option<int>, online: option<bool>}\\n```\"}\n  }]\n\nComplete src/TypeAtPosCompletion.res 16:18\nposCursor:[16:18] posNoWhite:[16:16] Found expr:[13:8->19:1]\nPexp_construct One:[13:8->13:11] [13:11->19:1]\nposCursor:[16:18] posNoWhite:[16:16] Found expr:[15:2->18:3]\nCompletable: Cexpression CTypeAtPos()->variantPayload::One($1), recordBody\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CTypeAtPos()\n[{\n    \"label\": \"age\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"int\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nage?: int\\n```\\n\\n```rescript\\ntype optRecord = {name: string, age: option<int>, online: option<bool>}\\n```\"}\n  }, {\n    \"label\": \"online\",\n    \"kind\": 5,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\nonline?: bool\\n```\\n\\n```rescript\\ntype optRecord = {name: string, age: option<int>, online: option<bool>}\\n```\"}\n  }]\n\nComplete src/TypeAtPosCompletion.res 22:12\nposCursor:[22:12] posNoWhite:[22:11] Found expr:[21:10->24:1]\nCompletable: Cexpression CTypeAtPos()->array\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CTypeAtPos()\n[{\n    \"label\": \"{}\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"optRecord\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"```rescript\\ntype optRecord = {name: string, age: option<int>, online: option<bool>}\\n```\"},\n    \"sortText\": \"A\",\n    \"insertText\": \"{$0}\",\n    \"insertTextFormat\": 2\n  }]\n\n"
  },
  {
    "path": "analysis/tests/src/expected/TypeDefinition.res.txt",
    "content": "TypeDefinition src/TypeDefinition.res 2:9\n{\"uri\": \"TypeDefinition.res\", \"range\": {\"start\": {\"line\": 2, \"character\": 5}, \"end\": {\"line\": 2, \"character\": 11}}}\n\nTypeDefinition src/TypeDefinition.res 5:4\n{\"uri\": \"TypeDefinition.res\", \"range\": {\"start\": {\"line\": 0, \"character\": 0}, \"end\": {\"line\": 0, \"character\": 24}}}\n\nTypeDefinition src/TypeDefinition.res 8:4\n{\"uri\": \"TypeDefinition.res\", \"range\": {\"start\": {\"line\": 2, \"character\": 0}, \"end\": {\"line\": 2, \"character\": 28}}}\n\nTypeDefinition src/TypeDefinition.res 13:4\n{\"uri\": \"TypeDefinition.res\", \"range\": {\"start\": {\"line\": 11, \"character\": 0}, \"end\": {\"line\": 11, \"character\": 26}}}\n\nTypeDefinition src/TypeDefinition.res 16:13\n{\"uri\": \"TypeDefinition.res\", \"range\": {\"start\": {\"line\": 2, \"character\": 0}, \"end\": {\"line\": 2, \"character\": 28}}}\n\nTypeDefinition src/TypeDefinition.res 20:9\n{\"uri\": \"TypeDefinition.res\", \"range\": {\"start\": {\"line\": 0, \"character\": 0}, \"end\": {\"line\": 0, \"character\": 24}}}\n\n"
  },
  {
    "path": "analysis/tests/src/expected/Xform.res.txt",
    "content": "Xform src/Xform.res 6:5\nposCursor:[6:3] posNoWhite:[6:1] Found expr:[6:0->11:1]\nCompletable: Cpath Value[kind]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[kind]\nPath kind\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nHit: Replace with switch\n\nTextDocumentEdit: Xform.res\n{\"start\": {\"line\": 6, \"character\": 0}, \"end\": {\"line\": 11, \"character\": 1}}\nnewText:\n<--here\nswitch kind {\n| First =>\n  // ^xfm\n  ret(\"First\")\n| _ => ret(\"Not First\")\n}\n\nXform src/Xform.res 13:15\nHit: Replace with switch\n\nTextDocumentEdit: Xform.res\n{\"start\": {\"line\": 13, \"character\": 0}, \"end\": {\"line\": 13, \"character\": 79}}\nnewText:\n<--here\nswitch kind {\n| #kind(\"First\", {name: \"abc\", age: 3}) => ret(\"First\")\n| _ => ret(\"Not First\")\n}\n\nXform src/Xform.res 16:5\nHit: Add type annotation\n\nTextDocumentEdit: Xform.res\n{\"start\": {\"line\": 16, \"character\": 8}, \"end\": {\"line\": 16, \"character\": 8}}\nnewText:\n        <--here\n        : string\nHit: Add Documentation template\n\nTextDocumentEdit: Xform.res\n{\"start\": {\"line\": 16, \"character\": 0}, \"end\": {\"line\": 16, \"character\": 18}}\nnewText:\n<--here\n/**\n\n*/\nlet name = \"hello\"\n\nXform src/Xform.res 19:5\nHit: Add Documentation template\n\nTextDocumentEdit: Xform.res\n{\"start\": {\"line\": 19, \"character\": 0}, \"end\": {\"line\": 19, \"character\": 23}}\nnewText:\n<--here\n/**\n\n*/\nlet annotated: int = 34\n\nXform src/Xform.res 26:10\nHit: Add type annotation\n\nTextDocumentEdit: Xform.res\n{\"start\": {\"line\": 26, \"character\": 10}, \"end\": {\"line\": 26, \"character\": 11}}\nnewText:\n          <--here\n          (x: option<T.r>)\n\nXform src/Xform.res 30:9\nHit: Add braces to function\n\nTextDocumentEdit: Xform.res\n{\"start\": {\"line\": 26, \"character\": 0}, \"end\": {\"line\": 32, \"character\": 3}}\nnewText:\n<--here\nlet foo = x => {\n  //      ^xfm\n  switch x {\n  | None => 33\n  | Some(q) => q.T.a + 1\n  //     ^xfm\n  }\n}\n\nXform src/Xform.res 34:21\nHit: Add type annotation\n\nTextDocumentEdit: Xform.res\n{\"start\": {\"line\": 34, \"character\": 24}, \"end\": {\"line\": 34, \"character\": 24}}\nnewText:\n                        <--here\n                        : int\n\nXform src/Xform.res 38:5\nHit: Add Documentation template\n\nTextDocumentEdit: Xform.res\n{\"start\": {\"line\": 37, \"character\": 0}, \"end\": {\"line\": 38, \"character\": 40}}\nnewText:\n<--here\n/**\n\n*/\n@react.component\nlet make = (~name) => React.string(name)\n\nXform src/Xform.res 41:9\nHit: Add type annotation\n\nTextDocumentEdit: Xform.res\n{\"start\": {\"line\": 41, \"character\": 11}, \"end\": {\"line\": 41, \"character\": 11}}\nnewText:\n           <--here\n           : int\n\nXform src/Xform.res 48:21\nposCursor:[48:21] posNoWhite:[48:19] Found expr:[48:15->48:25]\nposCursor:[48:21] posNoWhite:[48:19] Found expr:[48:15->48:25]\nCompletable: Cpath Value[name]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[name]\nPath name\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nHit: Add braces to function\n\nTextDocumentEdit: Xform.res\n{\"start\": {\"line\": 48, \"character\": 0}, \"end\": {\"line\": 48, \"character\": 25}}\nnewText:\n<--here\nlet noBraces = () => {\n  name\n}\n\nXform src/Xform.res 52:34\nHit: Add braces to function\n\nTextDocumentEdit: Xform.res\n{\"start\": {\"line\": 51, \"character\": 0}, \"end\": {\"line\": 54, \"character\": 1}}\nnewText:\n<--here\nlet nested = () => {\n  let _noBraces = (_x, _y, _z) => {\n    \"someNewFunc\"\n  }\n  //                              ^xfm\n}\n\nXform src/Xform.res 62:6\nHit: Add braces to function\n\nTextDocumentEdit: Xform.res\n{\"start\": {\"line\": 58, \"character\": 4}, \"end\": {\"line\": 62, \"character\": 7}}\nnewText:\n    <--here\n    let foo = (_x, y, _z) => {\n      switch y {\n      | #some => 3\n      | #stuff => 4\n      }\n    }\n\nXform src/Xform.res 72:5\nHit: Extract local module \"ExtractableModule\" to file \"ExtractableModule.res\"\n\nCreateFile: ExtractableModule.res\n\nTextDocumentEdit: ExtractableModule.res\n{\"start\": {\"line\": 0, \"character\": 0}, \"end\": {\"line\": 0, \"character\": 0}}\nnewText:\n<--here\n/** Doc comment. */\ntype t = int\n// A comment here\nlet doStuff = a => a + 1\n// ^xfm\n\n\nTextDocumentEdit: src/Xform.res\n{\"start\": {\"line\": 68, \"character\": 0}, \"end\": {\"line\": 74, \"character\": 1}}\nnewText:\n<--here\n\n\nXform src/Xform.res 80:4\nposCursor:[78:16] posNoWhite:[78:14] Found expr:[78:9->82:1]\nCompletable: Cpath Value[variant]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[variant]\nPath variant\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nHit: Expand catch-all\n\nTextDocumentEdit: Xform.res\n{\"start\": {\"line\": 80, \"character\": 2}, \"end\": {\"line\": 80, \"character\": 3}}\nnewText:\n  <--here\n  Second | Third | Fourth(_)\n\nXform src/Xform.res 86:4\nposCursor:[84:16] posNoWhite:[84:14] Found expr:[84:9->88:1]\nCompletable: Cpath Value[variant]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[variant]\nPath variant\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nHit: Expand catch-all\n\nTextDocumentEdit: Xform.res\n{\"start\": {\"line\": 86, \"character\": 2}, \"end\": {\"line\": 86, \"character\": 3}}\nnewText:\n  <--here\n  Third | Fourth(_)\n\nXform src/Xform.res 93:4\nposCursor:[90:16] posNoWhite:[90:14] Found expr:[90:9->95:1]\nCompletable: Cpath Value[variant]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[variant]\nPath variant\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nHit: Expand catch-all\n\nTextDocumentEdit: Xform.res\n{\"start\": {\"line\": 93, \"character\": 2}, \"end\": {\"line\": 93, \"character\": 3}}\nnewText:\n  <--here\n  First | Third | Fourth(_)\n\nXform src/Xform.res 101:4\nposCursor:[99:16] posNoWhite:[99:14] Found expr:[99:9->103:1]\nCompletable: Cpath Value[polyvariant]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[polyvariant]\nPath polyvariant\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nHit: Expand catch-all\n\nTextDocumentEdit: Xform.res\n{\"start\": {\"line\": 101, \"character\": 2}, \"end\": {\"line\": 101, \"character\": 3}}\nnewText:\n  <--here\n  #second | #\"illegal identifier\" | #third(_)\n\nXform src/Xform.res 107:4\nposCursor:[105:16] posNoWhite:[105:14] Found expr:[105:9->109:1]\nCompletable: Cpath Value[polyvariant]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[polyvariant]\nPath polyvariant\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nHit: Expand catch-all\n\nTextDocumentEdit: Xform.res\n{\"start\": {\"line\": 107, \"character\": 2}, \"end\": {\"line\": 107, \"character\": 3}}\nnewText:\n  <--here\n  #\"illegal identifier\" | #third(_)\n\nXform src/Xform.res 115:4\nposCursor:[113:16] posNoWhite:[113:14] Found expr:[113:9->117:1]\nCompletable: Cpath Value[variantOpt]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[variantOpt]\nPath variantOpt\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nHit: Expand catch-all\n\nTextDocumentEdit: Xform.res\n{\"start\": {\"line\": 115, \"character\": 2}, \"end\": {\"line\": 115, \"character\": 3}}\nnewText:\n  <--here\n  Some(Second | Third | Fourth(_)) | None\n\nXform src/Xform.res 121:4\nposCursor:[119:16] posNoWhite:[119:14] Found expr:[119:9->123:1]\nCompletable: Cpath Value[variantOpt]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[variantOpt]\nPath variantOpt\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nHit: Expand catch-all\n\nTextDocumentEdit: Xform.res\n{\"start\": {\"line\": 121, \"character\": 2}, \"end\": {\"line\": 121, \"character\": 3}}\nnewText:\n  <--here\n  Some(Third | Fourth(_)) | None\n\nXform src/Xform.res 127:4\nposCursor:[125:16] posNoWhite:[125:14] Found expr:[125:9->129:1]\nCompletable: Cpath Value[variantOpt]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[variantOpt]\nPath variantOpt\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nHit: Expand catch-all\n\nTextDocumentEdit: Xform.res\n{\"start\": {\"line\": 127, \"character\": 2}, \"end\": {\"line\": 127, \"character\": 3}}\nnewText:\n  <--here\n  Some(Third | Fourth(_)) | None\n\nXform src/Xform.res 136:4\nposCursor:[133:16] posNoWhite:[133:14] Found expr:[133:9->138:1]\nCompletable: Cpath Value[polyvariantOpt]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[polyvariantOpt]\nPath polyvariantOpt\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nHit: Expand catch-all\n\nTextDocumentEdit: Xform.res\n{\"start\": {\"line\": 136, \"character\": 2}, \"end\": {\"line\": 136, \"character\": 3}}\nnewText:\n  <--here\n  Some(#\"illegal identifier\" | #second | #third(_))\n\nXform src/Xform.res 142:4\nposCursor:[140:16] posNoWhite:[140:14] Found expr:[140:9->144:1]\nCompletable: Cpath Value[polyvariantOpt]\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[polyvariantOpt]\nPath polyvariantOpt\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nHit: Expand catch-all\n\nTextDocumentEdit: Xform.res\n{\"start\": {\"line\": 142, \"character\": 2}, \"end\": {\"line\": 142, \"character\": 3}}\nnewText:\n  <--here\n  Some(#\"illegal identifier\" | #third(_)) | None\n\n"
  },
  {
    "path": "analysis/tests/src/inner/ComponentInner.res",
    "content": "@react.component\nlet make = () => React.null\n"
  },
  {
    "path": "analysis/tests/src/inner/ComponentInner.resi",
    "content": "@react.component\nlet make: unit => React.element\n"
  },
  {
    "path": "analysis/tests/src/inner/ReferencesInner.res",
    "content": "@react.component\nlet make = () => <ComponentInner/>"
  },
  {
    "path": "analysis/tests/src/inner/ReferencesInner.resi",
    "content": "@react.component\nlet make: unit => React.element\n"
  },
  {
    "path": "analysis/tests/src/test.json",
    "content": ""
  },
  {
    "path": "analysis/tests/src/tst.js",
    "content": ""
  },
  {
    "path": "analysis/tests/test.sh",
    "content": "for file in src/*.{res,resi}; do\n  output=\"$(dirname $file)/expected/$(basename $file).txt\"\n  ../../rescript-editor-analysis.exe test $file &> $output\n  # CI. We use LF, and the CI OCaml fork prints CRLF. Convert.\n  if [ \"$RUNNER_OS\" == \"Windows\" ]; then\n    perl -pi -e 's/\\r\\n/\\n/g' -- $output\n  fi\ndone\n\nfor file in not_compiled/*.{res,resi}; do\n  output=\"$(dirname $file)/expected/$(basename $file).txt\"\n  ../../rescript-editor-analysis.exe test $file &> $output\n  # CI. We use LF, and the CI OCaml fork prints CRLF. Convert.\n  if [ \"$RUNNER_OS\" == \"Windows\" ]; then\n    perl -pi -e 's/\\r\\n/\\n/g' -- $output\n  fi\ndone\n\nwarningYellow='\\033[0;33m'\nsuccessGreen='\\033[0;32m'\nreset='\\033[0m'\n\ndiff=$(git ls-files --modified src/expected)\nif [[ $diff = \"\" ]]; then\n  printf \"${successGreen}✅ No unstaged tests difference.${reset}\\n\"\nelse\n  printf \"${warningYellow}⚠️ There are unstaged differences in tests/! Did you break a test?\\n${diff}\\n${reset}\"\n  git --no-pager diff src/expected\n  exit 1\nfi\n"
  },
  {
    "path": "analysis/tests-generic-jsx-transform/Makefile",
    "content": "SHELL = /bin/bash\n\nnode_modules/.bin/rescript:\n\tnpm install\n\nbuild: node_modules/.bin/rescript\n\tnode_modules/.bin/rescript\n\ntest: build\n\t./test.sh\n\nclean:\n\trm -r node_modules lib\n\n.DEFAULT_GOAL := test\n\n.PHONY: clean test\n"
  },
  {
    "path": "analysis/tests-generic-jsx-transform/package.json",
    "content": "{\n  \"scripts\": {\n    \"build\": \"rescript\",\n    \"clean\": \"rescript clean -with-deps\"\n  },\n  \"private\": true,\n  \"dependencies\": {\n    \"rescript\": \"11.1.0-rc.2\"\n  }\n}\n"
  },
  {
    "path": "analysis/tests-generic-jsx-transform/rescript.json",
    "content": "{\n  \"name\": \"test-generic-jsx-transform\",\n  \"sources\": [\n    {\n      \"dir\": \"src\",\n      \"subdirs\": true\n    }\n  ],\n  \"bsc-flags\": [\"-w -33-44-8\"],\n  \"jsx\": { \"module\": \"GenericJsx\" }\n}\n"
  },
  {
    "path": "analysis/tests-generic-jsx-transform/src/GenericJsx.res",
    "content": "/* Below is a number of aliases to the common `Jsx` module */\ntype element = Jsx.element\n\ntype component<'props> = Jsx.component<'props>\n\ntype componentLike<'props, 'return> = Jsx.componentLike<'props, 'return>\n\n@module(\"preact\")\nexternal jsx: (component<'props>, 'props) => element = \"jsx\"\n\n@module(\"preact\")\nexternal jsxKeyed: (component<'props>, 'props, ~key: string=?, @ignore unit) => element = \"jsx\"\n\n@module(\"preact\")\nexternal jsxs: (component<'props>, 'props) => element = \"jsxs\"\n\n@module(\"preact\")\nexternal jsxsKeyed: (component<'props>, 'props, ~key: string=?, @ignore unit) => element = \"jsxs\"\n\n/* These identity functions and static values below are optional, but lets \nyou move things easily to the `element` type. The only required thing to \ndefine though is `array`, which the JSX transform will output. */\nexternal array: array<element> => element = \"%identity\"\n@val external null: element = \"null\"\n\nexternal float: float => element = \"%identity\"\nexternal int: int => element = \"%identity\"\nexternal string: string => element = \"%identity\"\n\n/* These are needed for Fragment (<> </>) support */\ntype fragmentProps = {children?: element}\n\n@module(\"preact\") external jsxFragment: component<fragmentProps> = \"Fragment\"\n\n/* The Elements module is the equivalent to the ReactDOM module in React. This holds things relevant to _lowercase_ JSX elements. */\nmodule Elements = {\n  /* Here you can control what props lowercase JSX elements should have. \n  A base that the React JSX transform uses is provided via JsxDOM.domProps, \n  but you can make this anything. The editor tooling will support \n  autocompletion etc for your specific type. */\n  type props = {\n    testing?: bool,\n    test2?: string,\n    children?: element\n  }\n\n  @module(\"preact\")\n  external jsx: (string, props) => Jsx.element = \"jsx\"\n\n  @module(\"preact\")\n  external div: (string, props) => Jsx.element = \"jsx\"\n\n  @module(\"preact\")\n  external jsxKeyed: (string, props, ~key: string=?, @ignore unit) => Jsx.element = \"jsx\"\n\n  @module(\"preact\")\n  external jsxs: (string, props) => Jsx.element = \"jsxs\"\n\n  @module(\"preact\")\n  external jsxsKeyed: (string, props, ~key: string=?, @ignore unit) => Jsx.element = \"jsxs\"\n\n  external someElement: element => option<element> = \"%identity\"\n}"
  },
  {
    "path": "analysis/tests-generic-jsx-transform/src/GenericJsxCompletion.res",
    "content": "// <div\n//      ^com\n\n// <div testing={}\n//               ^com\n\nmodule SomeComponent = {\n  @jsx.component\n  let make = (~someProp) => {\n    let someString = \"\"\n    let someInt = 12\n    let someArr = [GenericJsx.null]\n    ignore(someInt)\n    ignore(someArr)\n    // someString->st\n    //               ^com\n    open GenericJsx\n    <div>\n      {GenericJsx.string(someProp ++ someString)}\n      <div> {GenericJsx.null} </div>\n      // {someString->st}\n      //                ^com\n    </div>\n  }\n}\n"
  },
  {
    "path": "analysis/tests-generic-jsx-transform/src/expected/GenericJsx.res.txt",
    "content": ""
  },
  {
    "path": "analysis/tests-generic-jsx-transform/src/expected/GenericJsxCompletion.res.txt",
    "content": "Complete src/GenericJsxCompletion.res 0:8\nposCursor:[0:8] posNoWhite:[0:6] Found expr:[0:4->0:7]\nJSX <div:[0:4->0:7] > _children:None\nCompletable: Cjsx([div], \"\", [])\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nPath GenericJsx.Elements.props\n[{\n    \"label\": \"testing\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }, {\n    \"label\": \"test2\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": null\n  }, {\n    \"label\": \"children\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"element\",\n    \"documentation\": null\n  }]\n\nComplete src/GenericJsxCompletion.res 3:17\nposCursor:[3:17] posNoWhite:[3:16] Found expr:[3:4->3:18]\nJSX <div:[3:4->3:7] testing[3:8->3:15]=...[3:16->3:18]> _children:None\nCompletable: Cexpression CJsxPropValue [div] testing->recordBody\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CJsxPropValue [div] testing\nPath GenericJsx.Elements.props\n[{\n    \"label\": \"true\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }, {\n    \"label\": \"false\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"bool\",\n    \"documentation\": null\n  }]\n\nComplete src/GenericJsxCompletion.res 14:21\nposCursor:[14:21] posNoWhite:[14:20] Found expr:[8:13->23:3]\nposCursor:[14:21] posNoWhite:[14:20] Found expr:[8:14->23:3]\nposCursor:[14:21] posNoWhite:[14:20] Found expr:[9:4->22:10]\nposCursor:[14:21] posNoWhite:[14:20] Found expr:[10:4->22:10]\nposCursor:[14:21] posNoWhite:[14:20] Found expr:[11:4->22:10]\nposCursor:[14:21] posNoWhite:[14:20] Found expr:[12:4->22:10]\nposCursor:[14:21] posNoWhite:[14:20] Found expr:[13:4->22:10]\nposCursor:[14:21] posNoWhite:[14:20] Found expr:[14:7->22:10]\nposCursor:[14:21] posNoWhite:[14:20] Found expr:[14:7->14:21]\nCompletable: Cpath Value[someString]->st <<jsx>>\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath Value[someString]->st <<jsx>>\nContextPath Value[someString]\nPath someString\nCPPipe pathFromEnv: found:true\nPath GenericJsxCompletion.st\n[{\n    \"label\": \"GenericJsx.string\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"Turns `string` into a JSX element so it can be used inside of JSX.\"},\n    \"sortText\": \"A\",\n    \"insertTextFormat\": 2\n  }]\n\nComplete src/GenericJsxCompletion.res 20:24\nposCursor:[20:24] posNoWhite:[20:23] Found expr:[8:13->23:3]\nposCursor:[20:24] posNoWhite:[20:23] Found expr:[8:14->23:3]\nposCursor:[20:24] posNoWhite:[20:23] Found expr:[9:4->22:10]\nposCursor:[20:24] posNoWhite:[20:23] Found expr:[10:4->22:10]\nposCursor:[20:24] posNoWhite:[20:23] Found expr:[11:4->22:10]\nposCursor:[20:24] posNoWhite:[20:23] Found expr:[12:4->22:10]\nposCursor:[20:24] posNoWhite:[20:23] Found expr:[13:4->22:10]\nposCursor:[20:24] posNoWhite:[20:23] Found expr:[16:4->22:10]\nposCursor:[20:24] posNoWhite:[20:23] Found expr:[17:5->22:10]\nJSX <div:[17:5->17:8] > _children:17:8\nposCursor:[20:24] posNoWhite:[20:23] Found expr:[17:8->22:4]\nposCursor:[20:24] posNoWhite:[20:23] Found expr:[18:7->22:4]\nposCursor:[20:24] posNoWhite:[20:23] Found expr:[19:7->22:4]\nposCursor:[20:24] posNoWhite:[20:23] Found expr:[19:7->22:4]\nposCursor:[20:24] posNoWhite:[20:23] Found expr:[20:10->22:4]\nposCursor:[20:24] posNoWhite:[20:23] Found expr:[20:10->22:4]\nposCursor:[20:24] posNoWhite:[20:23] Found expr:[20:10->20:24]\nCompletable: Cpath Value[someString]->st <<jsx>>\nRaw opens: 1 GenericJsx.place holder\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 2 pervasives GenericJsx.res\nContextPath Value[someString]->st <<jsx>>\nContextPath Value[someString]\nPath someString\nCPPipe pathFromEnv: found:true\nPath GenericJsxCompletion.st\n[{\n    \"label\": \"string\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"string\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \"Turns `string` into a JSX element so it can be used inside of JSX.\"},\n    \"sortText\": \"A\",\n    \"insertTextFormat\": 2\n  }]\n\n"
  },
  {
    "path": "analysis/tests-generic-jsx-transform/test.sh",
    "content": "for file in src/*.res; do\n  output=\"$(dirname $file)/expected/$(basename $file).txt\"\n  ../../rescript-editor-analysis.exe test $file &> $output\n  # CI. We use LF, and the CI OCaml fork prints CRLF. Convert.\n  if [ \"$RUNNER_OS\" == \"Windows\" ]; then\n    perl -pi -e 's/\\r\\n/\\n/g' -- $output\n  fi\ndone\n\nwarningYellow='\\033[0;33m'\nsuccessGreen='\\033[0;32m'\nreset='\\033[0m'\n\ndiff=$(git ls-files --modified src/expected)\nif [[ $diff = \"\" ]]; then\n  printf \"${successGreen}✅ No unstaged tests difference.${reset}\\n\"\nelse\n  printf \"${warningYellow}⚠️ There are unstaged differences in tests/! Did you break a test?\\n${diff}\\n${reset}\"\n  git --no-pager diff src/expected\n  exit 1\nfi\n"
  },
  {
    "path": "analysis/tests-incremental-typechecking/Makefile",
    "content": "SHELL = /bin/bash\n\nnode_modules/.bin/rescript:\n\tnpm install\n\nbuild: node_modules/.bin/rescript\n\tnode_modules/.bin/rescript > /dev/null || true\n\ntest: build\n\t./test.sh\n\nclean:\n\trm -r node_modules lib\n\n.DEFAULT_GOAL := test\n\n.PHONY: clean test\n"
  },
  {
    "path": "analysis/tests-incremental-typechecking/package.json",
    "content": "{\n  \"scripts\": {\n    \"build\": \"rescript\",\n    \"clean\": \"rescript clean -with-deps\"\n  },\n  \"private\": true,\n  \"dependencies\": {\n    \"rescript\": \"11.1.0-rc.2\"\n  }\n}\n"
  },
  {
    "path": "analysis/tests-incremental-typechecking/rescript.json",
    "content": "{\n  \"name\": \"test-generic-jsx-transform\",\n  \"sources\": [\n    {\n      \"dir\": \"src\",\n      \"subdirs\": true\n    }\n  ],\n  \"bsc-flags\": [\"-w -33-44-8\"],\n  \"jsx\": { \"module\": \"GenericJsx\" }\n}\n"
  },
  {
    "path": "analysis/tests-incremental-typechecking/src/ConstructorCompletion__Json.res",
    "content": "let x = Js.Json.Array()\n//                    ^com\n"
  },
  {
    "path": "analysis/tests-incremental-typechecking/src/ConstructorCompletion__Own.res",
    "content": "module WithVariant = {\n  type t = One({miss: bool}) | Two(bool)\n}\n\nlet x = WithVariant.One()\n//                      ^com\n"
  },
  {
    "path": "analysis/tests-incremental-typechecking/src/expected/ConstructorCompletion__Json.res.txt",
    "content": "Complete src/ConstructorCompletion__Json.res 0:22\nposCursor:[0:22] posNoWhite:[0:21] Found expr:[0:8->0:23]\nPexp_construct Js\nJson\nArray:[0:8->0:21] [0:21->0:23]\nCompletable: Cexpression CTypeAtPos()->variantPayload::Array($0)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CTypeAtPos()\n[{\n    \"label\": \"[]\",\n    \"kind\": 12,\n    \"tags\": [],\n    \"detail\": \"t\",\n    \"documentation\": {\"kind\": \"markdown\", \"value\": \" The JSON data structure \\n\\n```rescript\\ntype t =\\n  | Boolean(bool)\\n  | Null\\n  | String(string)\\n  | Number(float)\\n  | Object(Js.Dict.t<t>)\\n  | Array(array<t>)\\n```\"},\n    \"sortText\": \"A\",\n    \"insertText\": \"[$0]\",\n    \"insertTextFormat\": 2\n  }]\n\n"
  },
  {
    "path": "analysis/tests-incremental-typechecking/src/expected/ConstructorCompletion__Own.res.txt",
    "content": "Complete src/ConstructorCompletion__Own.res 4:24\nposCursor:[4:24] posNoWhite:[4:23] Found expr:[4:8->4:25]\nPexp_construct WithVariant\nOne:[4:8->4:23] [4:23->4:25]\nCompletable: Cexpression CTypeAtPos()->variantPayload::One($0)\nPackage opens Pervasives.JsxModules.place holder\nResolved opens 1 pervasives\nContextPath CTypeAtPos()\n[{\n    \"label\": \"{}\",\n    \"kind\": 4,\n    \"tags\": [],\n    \"detail\": \"Inline record\",\n    \"documentation\": null,\n    \"sortText\": \"A\",\n    \"insertText\": \"{$0}\",\n    \"insertTextFormat\": 2\n  }]\n\n"
  },
  {
    "path": "analysis/tests-incremental-typechecking/test.sh",
    "content": "for file in src/*.res; do\n  output=\"$(dirname $file)/expected/$(basename $file).txt\"\n  ../../rescript-editor-analysis.exe test $file &> $output\n  # CI. We use LF, and the CI OCaml fork prints CRLF. Convert.\n  if [ \"$RUNNER_OS\" == \"Windows\" ]; then\n    perl -pi -e 's/\\r\\n/\\n/g' -- $output\n  fi\ndone\n\nwarningYellow='\\033[0;33m'\nsuccessGreen='\\033[0;32m'\nreset='\\033[0m'\n\ndiff=$(git ls-files --modified src/expected)\nif [[ $diff = \"\" ]]; then\n  printf \"${successGreen}✅ No unstaged tests difference.${reset}\\n\"\nelse\n  printf \"${warningYellow}⚠️ There are unstaged differences in tests/! Did you break a test?\\n${diff}\\n${reset}\"\n  git --no-pager diff src/expected\n  exit 1\nfi\n"
  },
  {
    "path": "analysis/vendor/dune",
    "content": "(dirs ext ml res_syntax json js_parser)\n"
  },
  {
    "path": "analysis/vendor/ext/README.md",
    "content": "This folder hosts some of the utils we use in ReScript, internally.\n"
  },
  {
    "path": "analysis/vendor/ext/bs_hash_stubs.ml",
    "content": "\n#ifdef BROWSER \n\n\nlet hash_string : string -> int = Hashtbl.hash\nlet hash_string_int s i = Hashtbl.hash (s,i)\nlet hash_string_small_int :  string -> int  -> int = hash_string_int\nlet hash_stamp_and_name (i:int) (s:string) = Hashtbl.hash(i,s)\nlet hash_int (i:int) = Hashtbl.hash i \nlet string_length_based_compare (x : string ) (y : string) = \n  let len1 = String.length x in \n  let len2 = String.length y in \n  if len1 = len2 then String.compare x y \n  else compare (len1:int) len2\nlet int_unsafe_blit: int array -> int -> int array -> int -> int -> unit = \n  Array.blit\n\n#else\nexternal hash_string :  string -> int = \"caml_bs_hash_string\" [@@noalloc];;\n\nexternal hash_string_int :  string -> int  -> int = \"caml_bs_hash_string_and_int\" [@@noalloc];;\n\nexternal hash_string_small_int :  string -> int  -> int = \"caml_bs_hash_string_and_small_int\" [@@noalloc];;\n\nexternal hash_stamp_and_name : int -> string -> int = \"caml_bs_hash_stamp_and_name\" [@@noalloc];;\n\nexternal hash_small_int : int -> int = \"caml_bs_hash_small_int\" [@@noalloc];;\n\nexternal hash_int :  int  -> int = \"caml_bs_hash_int\" [@@noalloc];;\n\nexternal string_length_based_compare : string -> string -> int  = \"caml_string_length_based_compare\" [@@noalloc];;\n\nexternal    \n  int_unsafe_blit : \n  int array -> int -> int array -> int -> int -> unit = \"caml_int_array_blit\" [@@noalloc];;\n\nexternal set_as_old_file : string -> unit = \"caml_stale_file\"\n#endif\n\n\n"
  },
  {
    "path": "analysis/vendor/ext/bsb_db.ml",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\ntype case = bool\n(** true means upper case*)\n\ntype info =\n  | Intf\n  (* intemediate state *)\n  | Impl\n  | Impl_intf\n\ntype module_info = {\n  mutable info: info;\n  dir: string;\n  case: bool;\n  name_sans_extension: string;\n}\n\ntype map = module_info Map_string.t\n\ntype 'a cat = {mutable lib: 'a; mutable dev: 'a}\n\ntype t = map cat\n(** indexed by the group *)\n"
  },
  {
    "path": "analysis/vendor/ext/bsb_db.mli",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\n(** Store a file called [.bsbuild] that can be communicated between [bsb.exe]\n    and [bsb_helper.exe]. [bsb.exe] stores such data which would be retrieved by\n    [bsb_helper.exe]. It is currently used to combine with ocamldep to figure\n    out which module->file it depends on *)\n\ntype case = bool\n\ntype info =\n  | Intf\n  (* intemediate state *)\n  | Impl\n  | Impl_intf\n\ntype module_info = {\n  mutable info: info;\n  dir: string;\n  case: bool;\n  name_sans_extension: string;\n}\n\ntype map = module_info Map_string.t\n\ntype 'a cat = {mutable lib: 'a; mutable dev: 'a}\n\ntype t = map cat\n\n(** store the meta data indexed by {!Bsb_dir_index}\n    {[\n      0 --> lib group\n        1 --> dev 1 group\n                    .\n    ]} *)\n"
  },
  {
    "path": "analysis/vendor/ext/bsc_args.ml",
    "content": "(* Copyright (C) 2020- Hongbo Zhang, Authors of ReScript\n *\n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n *\n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\ntype anon_fun = rev_args:string list -> unit\n\ntype string_action =\n  | String_call of (string -> unit)\n  | String_set of string ref\n  | String_optional_set of string option ref\n  | String_list_add of string list ref\n\ntype unit_action =\n  | Unit_call of (unit -> unit)\n  | Unit_lazy of unit lazy_t\n  | Unit_set of bool ref\n  | Unit_clear of bool ref\n\ntype spec = Unit_dummy | Unit of unit_action | String of string_action\n\nexception Bad = Arg.Bad\n\nlet bad_arg s = raise_notrace (Bad s)\n\ntype error = Unknown of string | Missing of string\n\ntype t = spec Ext_spec.t\n\nlet ( +> ) = Ext_buffer.add_string\n\nlet usage_b (buf : Ext_buffer.t) ~usage (speclist : t) =\n  buf +> usage;\n  buf +> \"\\nOptions:\\n\";\n  let max_col = ref 0 in\n  Ext_array.iter speclist (fun (key, _, _) ->\n      if String.length key > !max_col then max_col := String.length key);\n  Ext_array.iter speclist (fun (key, _, doc) ->\n      if not (Ext_string.starts_with doc \"*internal*\") then (\n        buf +> \"  \";\n        buf +> key;\n        buf +> String.make (!max_col - String.length key + 2) ' ';\n        let cur = ref 0 in\n        let doc_length = String.length doc in\n        while !cur < doc_length do\n          match String.index_from_opt doc !cur '\\n' with\n          | None ->\n            if !cur <> 0 then (\n              buf +> \"\\n\";\n              buf +> String.make (!max_col + 4) ' ');\n            buf +> String.sub doc !cur (String.length doc - !cur);\n            cur := doc_length\n          | Some new_line_pos ->\n            if !cur <> 0 then (\n              buf +> \"\\n\";\n              buf +> String.make (!max_col + 4) ' ');\n            buf +> String.sub doc !cur (new_line_pos - !cur);\n            cur := new_line_pos + 1\n        done;\n        buf +> \"\\n\"))\n\nlet stop_raise ~usage ~(error : error) (speclist : t) =\n  let b = Ext_buffer.create 200 in\n  (match error with\n  | Unknown (\"-help\" | \"--help\" | \"-h\") ->\n    usage_b b ~usage speclist;\n    Ext_buffer.output_buffer stdout b;\n    exit 0\n  | Unknown s ->\n    b +> \"Unknown option \\\"\";\n    b +> s;\n    b +> \"\\\".\\n\"\n  | Missing s ->\n    b +> \"Option \\\"\";\n    b +> s;\n    b +> \"\\\" needs an argument.\\n\");\n  usage_b b ~usage speclist;\n  bad_arg (Ext_buffer.contents b)\n\nlet parse_exn ~usage ~argv ?(start = 1) ?(finish = Array.length argv)\n    (speclist : t) (anonfun : rev_args:string list -> unit) =\n  let current = ref start in\n  let rev_list = ref [] in\n  while !current < finish do\n    let s = argv.(!current) in\n    incr current;\n    if s <> \"\" && s.[0] = '-' then\n      match Ext_spec.assoc3 speclist s with\n      | Some action -> (\n        match action with\n        | Unit_dummy -> ()\n        | Unit r -> (\n          match r with\n          | Unit_set r -> r := true\n          | Unit_clear r -> r := false\n          | Unit_call f -> f ()\n          | Unit_lazy f -> Lazy.force f)\n        | String f -> (\n          if !current >= finish then\n            stop_raise ~usage ~error:(Missing s) speclist\n          else\n            let arg = argv.(!current) in\n            incr current;\n            match f with\n            | String_call f -> f arg\n            | String_set u -> u := arg\n            | String_optional_set s -> s := Some arg\n            | String_list_add s -> s := arg :: !s))\n      | None -> stop_raise ~usage ~error:(Unknown s) speclist\n    else rev_list := s :: !rev_list\n  done;\n  anonfun ~rev_args:!rev_list\n"
  },
  {
    "path": "analysis/vendor/ext/bsc_args.mli",
    "content": "(* Copyright (C) 2020- Authors of ReScript\n *\n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n *\n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\ntype anon_fun = rev_args:string list -> unit\n\ntype string_action =\n  | String_call of (string -> unit)\n  | String_set of string ref\n  | String_optional_set of string option ref\n  | String_list_add of string list ref\n\ntype unit_action =\n  | Unit_call of (unit -> unit)\n  | Unit_lazy of unit lazy_t\n  | Unit_set of bool ref\n  | Unit_clear of bool ref\n\ntype spec = Unit_dummy | Unit of unit_action | String of string_action\n\ntype t = (string * spec * string) array\n\nexception Bad of string\n\nval bad_arg : string -> 'a\n\nval parse_exn :\n  usage:string ->\n  argv:string array ->\n  ?start:int ->\n  ?finish:int ->\n  t ->\n  (rev_args:string list -> unit) ->\n  unit\n"
  },
  {
    "path": "analysis/vendor/ext/bsc_warnings.ml",
    "content": "(* Copyright (C) 2020- Hongbo Zhang, Authors of ReScript \n *\n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n\n *\n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\n(** See the meanings of the warning codes here:\n    https://caml.inria.fr/pub/docs/manual-ocaml/comp.html#sec281\n\n    - 30 Two labels or constructors of the same name are defined in two mutually\n      recursive types.\n    - 40 Constructor or label name used out of scope.\n\n    - 6 Label omitted in function application.\n    - 7 Method overridden.\n    - 9 Missing fields in a record pattern. (*Not always desired, in some cases\n      need [@@@warning \"+9\"] *)\n    - 27 Innocuous unused variable: unused variable that is not bound with let\n      nor as, and doesn’t start with an underscore (_) character.\n    - 29 Unescaped end-of-line in a string constant (non-portable code).\n    - 32 .. 39 Unused blabla\n    - 44 Open statement shadows an already defined identifier.\n    - 45 Open statement shadows an already defined label or constructor.\n    - 48 Implicit elimination of optional arguments.\n      https://caml.inria.fr/mantis/view.php?id=6352\n    - 101 (bsb-specific) unsafe polymorphic comparison. *)\n\n(*\n  The purpose of default warning set is to make it strict while\n  not annoy user too much\n\n  -4 Fragile pattern matching: matching that will remain complete even if additional con- structors are added to one of the variant types matched.\n  We turn it off since common pattern\n   {[\n     match x with | A -> .. |  _ -> false\n   ]}\n\n   -9 Missing fields in a record pattern.\n   only in some special cases that we need all fields being listed\n\n   We encourage people to write code based on type based disambigution\n   40,41,42 are enabled for compatiblity reasons  \n   -40 Constructor or label name used out of scope\n   This is intentional, we should never warn it\n   - 41 Ambiguous constructor or label name.\n     It is turned off since it prevents such cases below:\n   {[\n     type a = A |B \n     type b = A | B | C\n   ]}\n   - 42 Disambiguated constructor or label name (compatibility warning).\n\n   - 50 Unexpected documentation comment.\n\n   - 102 Bs_polymorphic_comparison\n*)\n(* If you change this, don't forget to adapt docs/docson/build-schema.json as well. *)\nlet defaults_w = \"+a-4-9-20-40-41-42-50-61-102\"\n\nlet defaults_warn_error = \"-a+5+6+101+109\"\n(*TODO: add +10*)\n"
  },
  {
    "path": "analysis/vendor/ext/config.ml",
    "content": "let version = \"4.06.1+BS\"\n\nlet standard_library =\n  let ( // ) = Filename.concat in\n  Filename.dirname Sys.executable_name\n  // Filename.parent_dir_name // \"lib\" // \"ocaml\"\n\nlet standard_library_default = standard_library\n\nlet syntax_kind = ref `ml\n\nlet bs_only = ref true\n\nlet unsafe_empty_array = ref false\n\ntype uncurried = Legacy | Uncurried | Swap\nlet uncurried = ref Legacy\n\nand cmi_magic_number = \"Caml1999I022\"\n\nand ast_impl_magic_number = \"Caml1999M022\"\n\nand ast_intf_magic_number = \"Caml1999N022\"\n\nand cmt_magic_number = \"Caml1999T022\"\n\nlet load_path = ref ([] : string list)\n\nlet interface_suffix = ref \".mli\"\n\n(* This is normally the same as in obj.ml, but we have to define it\n   separately because it can differ when we're in the middle of a\n   bootstrapping phase. *)\n\nlet print_config oc =\n  let p name valu = Printf.fprintf oc \"%s: %s\\n\" name valu in\n  p \"version\" version;\n  p \"standard_library_default\" standard_library_default;\n  p \"standard_library\" standard_library;\n\n  (* print the magic number *)\n  p \"cmi_magic_number\" cmi_magic_number;\n  p \"ast_impl_magic_number\" ast_impl_magic_number;\n  p \"ast_intf_magic_number\" ast_intf_magic_number;\n  p \"cmt_magic_number\" cmt_magic_number;\n  flush oc\n"
  },
  {
    "path": "analysis/vendor/ext/config.mli",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(* System configuration *)\n\nval version : string\n(* The current version number of the system *)\n\nval standard_library : string\n(* The directory containing the standard libraries *)\n\nval syntax_kind : [`ml | `rescript] ref\n\nval bs_only : bool ref\n\nval unsafe_empty_array : bool ref\n\nval load_path : string list ref\n(* Directories in the search path for .cmi and .cmo files *)\n\nval interface_suffix : string ref\n(* Suffix for interface file names *)\n\nval cmi_magic_number : string\n\n(* Magic number for compiled interface files *)\nval ast_intf_magic_number : string\n\n(* Magic number for file holding an interface syntax tree *)\nval ast_impl_magic_number : string\n\n(* Magic number for file holding an implementation syntax tree *)\nval cmt_magic_number : string\n(* Magic number for compiled interface files *)\n\nval print_config : out_channel -> unit\n\ntype uncurried = Legacy | Uncurried | Swap\nval uncurried : uncurried ref\n"
  },
  {
    "path": "analysis/vendor/ext/dune",
    "content": "(library\n (name ext)\n (wrapped false)\n (preprocess\n  (action\n   (run %{bin:cppo} %{env:CPPO_FLAGS=} %{input-file})))\n (flags\n  (:standard -w +a-4-42-40-9-48-70))\n (foreign_stubs\n  (language c)\n  (names ext_basic_hash_stubs)))\n\n(ocamllex ext_json_parse)\n\n(rule\n (targets hash_set_string.ml)\n (deps hash_set.cppo.ml)\n (action\n  (run %{bin:cppo} -D TYPE_STRING %{deps} -o %{targets})))\n\n(rule\n (targets hash_set_int.ml)\n (deps hash_set.cppo.ml)\n (action\n  (run %{bin:cppo} -D TYPE_INT %{deps} -o %{targets})))\n\n(rule\n (targets hash_set_ident.ml)\n (deps hash_set.cppo.ml)\n (action\n  (run %{bin:cppo} -D TYPE_IDENT %{deps} -o %{targets})))\n\n(rule\n (targets hash_set.ml)\n (deps hash_set.cppo.ml)\n (action\n  (run %{bin:cppo} -D TYPE_FUNCTOR %{deps} -o %{targets})))\n\n(rule\n (targets hash_set_poly.ml)\n (deps hash_set.cppo.ml)\n (action\n  (run %{bin:cppo} -D TYPE_POLY %{deps} -o %{targets})))\n\n(rule\n (targets vec_int.ml)\n (deps vec.cppo.ml)\n (action\n  (run %{bin:cppo} -D TYPE_INT %{deps} -o %{targets})))\n\n(rule\n (targets vec.ml)\n (deps vec.cppo.ml)\n (action\n  (run %{bin:cppo} -D TYPE_FUNCTOR %{deps} -o %{targets})))\n\n(rule\n (targets set_string.ml)\n (deps set.cppo.ml)\n (action\n  (run %{bin:cppo} -D TYPE_STRING %{deps} -o %{targets})))\n\n(rule\n (targets set_int.ml)\n (deps set.cppo.ml)\n (action\n  (run %{bin:cppo} -D TYPE_INT %{deps} -o %{targets})))\n\n(rule\n (targets set_ident.ml)\n (deps set.cppo.ml)\n (action\n  (run %{bin:cppo} -D TYPE_IDENT %{deps} -o %{targets})))\n\n(rule\n (targets map_string.ml)\n (deps map.cppo.ml)\n (action\n  (run %{bin:cppo} -D TYPE_STRING %{deps} -o %{targets})))\n\n(rule\n (targets map_int.ml)\n (deps map.cppo.ml)\n (action\n  (run %{bin:cppo} -D TYPE_INT %{deps} -o %{targets})))\n\n(rule\n (targets map_ident.ml)\n (deps map.cppo.ml)\n (action\n  (run %{bin:cppo} -D TYPE_IDENT %{deps} -o %{targets})))\n\n(rule\n (targets ordered_hash_map_local_ident.ml)\n (deps ordered_hash_map.cppo.ml)\n (action\n  (run %{bin:cppo} -D TYPE_LOCAL_IDENT %{deps} -o %{targets})))\n\n(rule\n (targets hash_string.ml)\n (deps hash.cppo.ml)\n (action\n  (run %{bin:cppo} -D TYPE_STRING %{deps} -o %{targets})))\n\n(rule\n (targets hash_int.ml)\n (deps hash.cppo.ml)\n (action\n  (run %{bin:cppo} -D TYPE_INT %{deps} -o %{targets})))\n\n(rule\n (targets hash_ident.ml)\n (deps hash.cppo.ml)\n (action\n  (run %{bin:cppo} -D TYPE_IDENT %{deps} -o %{targets})))\n\n(rule\n (targets hash.ml)\n (deps hash.cppo.ml)\n (action\n  (run %{bin:cppo} -D TYPE_FUNCTOR %{deps} -o %{targets})))\n"
  },
  {
    "path": "analysis/vendor/ext/encoding.md",
    "content": "```c\nCAMLprim value caml_ml_string_length(value s)\n{\n  mlsize_t temp;\n  temp = Bosize_val(s) - 1;\n  Assert (Byte (s, temp - Byte (s, temp)) == 0);\n  return Val_long(temp - Byte (s, temp));\n}\n```\n\nLike all heap blocks, strings contain a header defining the size of\nthe string in machine words. The actual block contents are:\n\n- the characters of the string\n- padding bytes to align the block on a word boundary.  \n  The padding is one of\n  00\n  00 01\n  00 00 02\n  00 00 00 03\n  on a 32-bit machine, and up to 00 00 .... 07 on a 64-bit machine.\n\nThus, the string is always zero-terminated, and its length can be\ncomputed as follows:\n\n    number_of_words_in_block * sizeof(word) - last_byte_of_block - 1\n\nThe null-termination comes handy when passing a string to C, but is\nnot relied upon to compute the length (in Caml), allowing the string\nto contain nulls.\n\nso, suppose\n\n\"\" -> `8 - 7 - 1 `\n\"a\" -> `8 - 6 - 1`\n\"0123456\" -> `8 - 0 - 1`\n\"01234567\" -> `2 * 8 - 7 - 1`\n"
  },
  {
    "path": "analysis/vendor/ext/ext_array.ml",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\nexternal ( .!() ) : 'a array -> int -> 'a = \"%array_unsafe_get\"\n\nexternal ( .!()<- ) : 'a array -> int -> 'a -> unit = \"%array_unsafe_set\"\n\nlet reverse_range a i len =\n  if len = 0 then ()\n  else\n    for k = 0 to (len - 1) / 2 do\n      let t = a.!(i + k) in\n      a.!(i + k) <- a.!(i + len - 1 - k);\n      a.!(i + len - 1 - k) <- t\n    done\n\nlet reverse_in_place a = reverse_range a 0 (Array.length a)\n\nlet reverse a =\n  let b_len = Array.length a in\n  if b_len = 0 then [||]\n  else\n    let b = Array.copy a in\n    for i = 0 to b_len - 1 do\n      Array.unsafe_set b i (Array.unsafe_get a (b_len - 1 - i))\n    done;\n    b\n\nlet reverse_of_list = function\n  | [] -> [||]\n  | hd :: tl ->\n    let len = List.length tl in\n    let a = Array.make (len + 1) hd in\n    let rec fill i = function\n      | [] -> a\n      | hd :: tl ->\n        Array.unsafe_set a i hd;\n        fill (i - 1) tl\n    in\n    fill (len - 1) tl\n\nlet filter a f =\n  let arr_len = Array.length a in\n  let rec aux acc i =\n    if i = arr_len then reverse_of_list acc\n    else\n      let v = Array.unsafe_get a i in\n      if f v then aux (v :: acc) (i + 1) else aux acc (i + 1)\n  in\n  aux [] 0\n\nlet filter_map a (f : _ -> _ option) =\n  let arr_len = Array.length a in\n  let rec aux acc i =\n    if i = arr_len then reverse_of_list acc\n    else\n      let v = Array.unsafe_get a i in\n      match f v with\n      | Some v -> aux (v :: acc) (i + 1)\n      | None -> aux acc (i + 1)\n  in\n  aux [] 0\n\nlet range from to_ =\n  if from > to_ then invalid_arg \"Ext_array.range\"\n  else Array.init (to_ - from + 1) (fun i -> i + from)\n\nlet map2i f a b =\n  let len = Array.length a in\n  if len <> Array.length b then invalid_arg \"Ext_array.map2i\"\n  else Array.mapi (fun i a -> f i a (Array.unsafe_get b i)) a\n\nlet rec tolist_f_aux a f i res =\n  if i < 0 then res\n  else\n    let v = Array.unsafe_get a i in\n    tolist_f_aux a f (i - 1) (f v :: res)\n\nlet to_list_f a f = tolist_f_aux a f (Array.length a - 1) []\n\nlet rec tolist_aux a f i res =\n  if i < 0 then res\n  else\n    tolist_aux a f (i - 1)\n      (match f a.!(i) with\n      | Some v -> v :: res\n      | None -> res)\n\nlet to_list_map a f = tolist_aux a f (Array.length a - 1) []\n\nlet to_list_map_acc a acc f = tolist_aux a f (Array.length a - 1) acc\n\nlet of_list_map a f =\n  match a with\n  | [] -> [||]\n  | [a0] ->\n    let b0 = f a0 in\n    [|b0|]\n  | [a0; a1] ->\n    let b0 = f a0 in\n    let b1 = f a1 in\n    [|b0; b1|]\n  | [a0; a1; a2] ->\n    let b0 = f a0 in\n    let b1 = f a1 in\n    let b2 = f a2 in\n    [|b0; b1; b2|]\n  | [a0; a1; a2; a3] ->\n    let b0 = f a0 in\n    let b1 = f a1 in\n    let b2 = f a2 in\n    let b3 = f a3 in\n    [|b0; b1; b2; b3|]\n  | [a0; a1; a2; a3; a4] ->\n    let b0 = f a0 in\n    let b1 = f a1 in\n    let b2 = f a2 in\n    let b3 = f a3 in\n    let b4 = f a4 in\n    [|b0; b1; b2; b3; b4|]\n  | a0 :: a1 :: a2 :: a3 :: a4 :: tl ->\n    let b0 = f a0 in\n    let b1 = f a1 in\n    let b2 = f a2 in\n    let b3 = f a3 in\n    let b4 = f a4 in\n    let len = List.length tl + 5 in\n    let arr = Array.make len b0 in\n    Array.unsafe_set arr 1 b1;\n    Array.unsafe_set arr 2 b2;\n    Array.unsafe_set arr 3 b3;\n    Array.unsafe_set arr 4 b4;\n    let rec fill i = function\n      | [] -> arr\n      | hd :: tl ->\n        Array.unsafe_set arr i (f hd);\n        fill (i + 1) tl\n    in\n    fill 5 tl\n\n(** {[\n      # rfind_with_index [|1;2;3|] (=) 2;;\n      - : int = 1\n                # rfind_with_index [|1;2;3|] (=) 1;;\n      - : int = 0\n                # rfind_with_index [|1;2;3|] (=) 3;;\n      - : int = 2\n                # rfind_with_index [|1;2;3|] (=) 4;;\n      - : int = -1\n    ]} *)\nlet rfind_with_index arr cmp v =\n  let len = Array.length arr in\n  let rec aux i =\n    if i < 0 then i\n    else if cmp (Array.unsafe_get arr i) v then i\n    else aux (i - 1)\n  in\n  aux (len - 1)\n\ntype 'a split = No_split | Split of 'a array * 'a array\n\nlet find_with_index arr cmp v =\n  let len = Array.length arr in\n  let rec aux i len =\n    if i >= len then -1\n    else if cmp (Array.unsafe_get arr i) v then i\n    else aux (i + 1) len\n  in\n  aux 0 len\n\nlet find_and_split arr cmp v : _ split =\n  let i = find_with_index arr cmp v in\n  if i < 0 then No_split\n  else\n    Split (Array.sub arr 0 i, Array.sub arr (i + 1) (Array.length arr - i - 1))\n\n(** TODO: available since 4.03, use {!Array.exists} *)\n\nlet exists a p =\n  let n = Array.length a in\n  let rec loop i =\n    if i = n then false\n    else if p (Array.unsafe_get a i) then true\n    else loop (succ i)\n  in\n  loop 0\n\nlet is_empty arr = Array.length arr = 0\n\nlet rec unsafe_loop index len p xs ys =\n  if index >= len then true\n  else\n    p (Array.unsafe_get xs index) (Array.unsafe_get ys index)\n    && unsafe_loop (succ index) len p xs ys\n\nlet for_alli a p =\n  let n = Array.length a in\n  let rec loop i =\n    if i = n then true\n    else if p i (Array.unsafe_get a i) then loop (succ i)\n    else false\n  in\n  loop 0\n\nlet for_all2_no_exn xs ys p =\n  let len_xs = Array.length xs in\n  let len_ys = Array.length ys in\n  len_xs = len_ys && unsafe_loop 0 len_xs p xs ys\n\nlet map a f =\n  let open Array in\n  let l = length a in\n  if l = 0 then [||]\n  else\n    let r = make l (f (unsafe_get a 0)) in\n    for i = 1 to l - 1 do\n      unsafe_set r i (f (unsafe_get a i))\n    done;\n    r\n\nlet iter a f =\n  let open Array in\n  for i = 0 to length a - 1 do\n    f (unsafe_get a i)\n  done\n\nlet fold_left a x f =\n  let open Array in\n  let r = ref x in\n  for i = 0 to length a - 1 do\n    r := f !r (unsafe_get a i)\n  done;\n  !r\n\nlet get_or arr i cb =\n  if i >= 0 && i < Array.length arr then Array.unsafe_get arr i else cb ()\n"
  },
  {
    "path": "analysis/vendor/ext/ext_array.mli",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\nval reverse_range : 'a array -> int -> int -> unit\n(** Some utilities for {!Array} operations *)\n\nval reverse_in_place : 'a array -> unit\n\nval reverse : 'a array -> 'a array\n\nval reverse_of_list : 'a list -> 'a array\n\nval filter : 'a array -> ('a -> bool) -> 'a array\n\nval filter_map : 'a array -> ('a -> 'b option) -> 'b array\n\nval range : int -> int -> int array\n\nval map2i : (int -> 'a -> 'b -> 'c) -> 'a array -> 'b array -> 'c array\n\nval to_list_f : 'a array -> ('a -> 'b) -> 'b list\n\nval to_list_map : 'a array -> ('a -> 'b option) -> 'b list\n\nval to_list_map_acc : 'a array -> 'b list -> ('a -> 'b option) -> 'b list\n\nval of_list_map : 'a list -> ('a -> 'b) -> 'b array\n\nval rfind_with_index : 'a array -> ('a -> 'b -> bool) -> 'b -> int\n\ntype 'a split = No_split | Split of 'a array * 'a array\n\nval find_and_split : 'a array -> ('a -> 'b -> bool) -> 'b -> 'a split\n\nval exists : 'a array -> ('a -> bool) -> bool\n\nval is_empty : 'a array -> bool\n\nval for_all2_no_exn : 'a array -> 'b array -> ('a -> 'b -> bool) -> bool\n\nval for_alli : 'a array -> (int -> 'a -> bool) -> bool\n\nval map : 'a array -> ('a -> 'b) -> 'b array\n\nval iter : 'a array -> ('a -> unit) -> unit\n\nval fold_left : 'b array -> 'a -> ('a -> 'b -> 'a) -> 'a\n\nval get_or : 'a array -> int -> (unit -> 'a) -> 'a\n"
  },
  {
    "path": "analysis/vendor/ext/ext_basic_hash_stubs.c",
    "content": "#include \"caml/hash.h\"\n#include \"caml/mlvalues.h\"\n#include <string.h>\n#include <stdint.h>\n#include \"caml/memory.h\"\n#include \"caml/osdeps.h\"\n#include \"caml/signals.h\"\n#include \"caml/misc.h\"\n#include <sys/stat.h>\ntypedef uint32_t uint32;\n\n#define FINAL_MIX(h) \\\n  h ^= h >> 16; \\\n  h *= 0x85ebca6b; \\\n  h ^= h >> 13; \\\n  h *= 0xc2b2ae35; \\\n  h ^= h >> 16;\n\n#define ROTL32(x,n) ((x) << n | (x) >> (32-n))\n\n#define MIX(h,d) \\\n  d *= 0xcc9e2d51; \\\n  d = ROTL32(d, 15); \\\n  d *= 0x1b873593; \\\n  h ^= d; \\\n  h = ROTL32(h, 13); \\\n  h = h * 5 + 0xe6546b64;\n\nCAMLprim value caml_bs_hash_string (value obj){\n\n  uint32 h = 0;\n  h = caml_hash_mix_string(h,obj);\n  FINAL_MIX(h);\n  return Val_int(h & 0x3FFFFFFFU);\n}\n\nCAMLprim value caml_bs_hash_int  ( value d){\n  uint32 h = 0; \n  h = caml_hash_mix_intnat(h,d);\n  FINAL_MIX(h);\n  return Val_int(h & 0x3FFFFFFFU);\n}\n\nCAMLprim value caml_bs_hash_string_and_int  (value obj, value d){\n  uint32 h = 0; \n  h = caml_hash_mix_string(h,obj);\n  h = caml_hash_mix_intnat(h,d);\n  FINAL_MIX(h);\n  return Val_int(h & 0x3FFFFFFFU);\n}\n\nCAMLprim value caml_bs_hash_string_and_small_int(value obj, value d){\n  uint32 h = 0;\n  h = caml_hash_mix_string(h,obj);\n  MIX(h,d);\n  FINAL_MIX(h);\n  return Val_int(h & 0x3FFFFFFFU);\n}\n\nCAMLprim value caml_bs_hash_small_int(value d){\n  uint32 h = 0; \n  // intnat stamp = Long_val(d); \n  // FIXME: unused value\n  MIX(h,d);\n  FINAL_MIX(h);\n  return Val_int(h & 0x3FFFFFFFU);\n}\n\nCAMLprim value caml_int_array_blit(\n  value a1, value ofs1, \n  value a2, value ofs2,\n  value n)\n  {\n     memmove((value *)&Field(a2, Long_val(ofs2)),\n            (value *)&Field(a1, Long_val(ofs1)),\n            Long_val(n) * sizeof(value));\n    return Val_unit;\n  }\n/*\n * http://stackoverflow.com/questions/664014/what-integer-hash-function-are-good-that-accepts-an-integer-hash-key\n * https://en.wikipedia.org/wiki/MurmurHash\n * http://zimbry.blogspot.it/2011/09/better-bit-mixing-improving-on.html\n * http://eternallyconfuzzled.com/tuts/algorithms/jsw_tut_hashing.aspx\n * We gave up the idea to  hash Ident.t (take only one argument)\n * customized hash function for Ident.t, first \n * argument is stamp, second argument is string \n * It's not just introducing c stubs, we need make a clear line\n * which part of our libraries depends on Ident.t\n */\nCAMLprim value caml_bs_hash_stamp_and_name(value d, value obj ){\n  uint32 h = 0;\n  intnat stamp = Long_val(d); \n  if (stamp){\n    MIX(h,d);\n  } else {\n    h = caml_hash_mix_string(h,obj);\n  }\n  \n  FINAL_MIX(h);\n  return Val_int(h & 0x3FFFFFFFU);\n}\n\n\n\n\n\n// https://github.com/ocaml/ocaml/pull/255/files\n#define Val_long_clang(x)     ((intnat) (((uintnat)(x) << 1)) + 1)\n\nCAMLprim value caml_string_length_based_compare(value s1, value s2)\n{\n  mlsize_t len1, len2;\n  mlsize_t temp;\n  int res;\n  if (s1 == s2) return Val_int(0);\n  \n  len1 = Wosize_val(s1);\n  temp = Bsize_wsize(len1) - 1 ;\n  len1 = temp - Byte(s1,temp);\n\n  len2 = Wosize_val(s2);\n  temp = Bsize_wsize(len2) - 1 ; \n  len2 = temp - Byte(s2,temp);\n\n  if (len1 != len2) \n  { \n    if (len1 < len2 ) {\n      return Val_long_clang(-1);\n    } else {\n      return Val_long_clang(1);\n    }\n  }\n  else {\n    \n    res = memcmp(String_val(s1), String_val(s2), len1);\n    if(res < 0) return Val_long_clang(-1); \n    if(res > 0) return Val_long_clang(1);\n    return Val_long_clang(0);\n    \n  }\n}\n\n\n\n#include <sys/time.h>\n#ifdef _WIN32\n#include <sys/utime.h>\nCAMLprim value caml_stale_file(value path)\n{\n  CAMLparam1(path);\n  struct _utimbuf tv;\n  char * p = caml_stat_strdup(String_val(path));\n  tv.modtime = 0;  \n  caml_enter_blocking_section();\n  _utime(p, &tv);\n  caml_leave_blocking_section();\n  caml_stat_free(p);\n  CAMLreturn(Val_unit);\n}\n#else\nCAMLprim value caml_stale_file(value path)\n{\n  CAMLparam1(path);\n  struct timeval tv[2];\n  char * p = caml_stat_strdup_to_os(String_val(path));\n  // unicode friendly\n  tv[0].tv_sec = 0.0;\n  tv[0].tv_usec = 0.0;\n  tv[1].tv_sec = 0.0;\n  tv[1].tv_usec = 0.0;\n  // caml_enter_blocking_section();\n  // not needed for single thread\n  utimes(p, tv);\n  // caml_leave_blocking_section();\n  // not needed for single thread\n  caml_stat_free(p);\n  // TODO: error checking\n  CAMLreturn(Val_unit);\n}\n#endif\n\n\nCAMLprim value caml_sys_is_directory_no_exn(value name)\n{\n  CAMLparam1(name);\n#ifdef _WIN32\n  struct _stati64 st;\n#else\n  struct stat st;\n#endif\n  char_os * p;\n  int ret;\n\n  \n  if(!caml_string_is_c_safe(name)){\n    CAMLreturn(Val_false);\n  }\n\n  p = caml_stat_strdup_to_os(String_val(name));\n  caml_enter_blocking_section();\n  ret = stat_os(p, &st);\n  caml_leave_blocking_section();\n  caml_stat_free(p);\n\n  if (ret == -1) CAMLreturn(Val_false);\n#ifdef S_ISDIR\n  CAMLreturn(Val_bool(S_ISDIR(st.st_mode)));\n#else\n  CAMLreturn(Val_bool(st.st_mode & S_IFDIR));\n#endif\n}\n/* local variables: */\n/* compile-command: \"ocamlopt.opt -c ext_basic_hash_stubs.c\" */\n/* end: */\n\n\n"
  },
  {
    "path": "analysis/vendor/ext/ext_buffer.ml",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*    Pierre Weis and Xavier Leroy, projet Cristal, INRIA Rocquencourt    *)\n(*                                                                        *)\n(*   Copyright 1999 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(* Extensible buffers *)\n\ntype t = {mutable buffer: bytes; mutable position: int; mutable length: int}\n\nlet create n =\n  let n = if n < 1 then 1 else n in\n  let s = Bytes.create n in\n  {buffer = s; position = 0; length = n}\n\nlet contents b = Bytes.sub_string b.buffer 0 b.position\n(* let to_bytes b = Bytes.sub b.buffer 0 b.position  *)\n\n(* let sub b ofs len =\n   if ofs < 0 || len < 0 || ofs > b.position - len\n   then invalid_arg \"Ext_buffer.sub\"\n   else Bytes.sub_string b.buffer ofs len *)\n\n(* let blit src srcoff dst dstoff len =\n   if len < 0 || srcoff < 0 || srcoff > src.position - len\n             || dstoff < 0 || dstoff > (Bytes.length dst) - len\n   then invalid_arg \"Ext_buffer.blit\"\n   else\n    Bytes.unsafe_blit src.buffer srcoff dst dstoff len *)\n\nlet length b = b.position\n\nlet is_empty b = b.position = 0\n\nlet clear b = b.position <- 0\n\n(* let reset b =\n   b.position <- 0; b.buffer <- b.initial_buffer;\n   b.length <- Bytes.length b.buffer *)\n\nlet resize b more =\n  let len = b.length in\n  let new_len = ref len in\n  while b.position + more > !new_len do\n    new_len := 2 * !new_len\n  done;\n  let new_buffer = Bytes.create !new_len in\n  (* PR#6148: let's keep using [blit] rather than [unsafe_blit] in\n     this tricky function that is slow anyway. *)\n  Bytes.blit b.buffer 0 new_buffer 0 b.position;\n  b.buffer <- new_buffer;\n  b.length <- !new_len;\n  assert (b.position + more <= b.length)\n\nlet[@inline] add_char b c =\n  let pos = b.position in\n  if pos >= b.length then resize b 1;\n  Bytes.unsafe_set b.buffer pos c;\n  b.position <- pos + 1\n\n(* let add_substring b s offset len =\n   if offset < 0 || len < 0 || offset > String.length s - len\n   then invalid_arg \"Ext_buffer.add_substring/add_subbytes\";\n   let new_position = b.position + len in\n   if new_position > b.length then resize b len;\n   Ext_bytes.unsafe_blit_string s offset b.buffer b.position len;\n   b.position <- new_position *)\n\n(* let add_subbytes b s offset len =\n   add_substring b (Bytes.unsafe_to_string s) offset len *)\n\nlet add_string b s =\n  let len = String.length s in\n  let new_position = b.position + len in\n  if new_position > b.length then resize b len;\n  Ext_bytes.unsafe_blit_string s 0 b.buffer b.position len;\n  b.position <- new_position\n\n(* TODO: micro-optimzie *)\nlet add_string_char b s c =\n  let s_len = String.length s in\n  let len = s_len + 1 in\n  let new_position = b.position + len in\n  if new_position > b.length then resize b len;\n  let b_buffer = b.buffer in\n  Ext_bytes.unsafe_blit_string s 0 b_buffer b.position s_len;\n  Bytes.unsafe_set b_buffer (new_position - 1) c;\n  b.position <- new_position\n\nlet add_char_string b c s =\n  let s_len = String.length s in\n  let len = s_len + 1 in\n  let new_position = b.position + len in\n  if new_position > b.length then resize b len;\n  let b_buffer = b.buffer in\n  let b_position = b.position in\n  Bytes.unsafe_set b_buffer b_position c;\n  Ext_bytes.unsafe_blit_string s 0 b_buffer (b_position + 1) s_len;\n  b.position <- new_position\n\n(* equivalent to add_char \" \"; add_char \"$\"; add_string s  *)\nlet add_ninja_prefix_var b s =\n  let s_len = String.length s in\n  let len = s_len + 2 in\n  let new_position = b.position + len in\n  if new_position > b.length then resize b len;\n  let b_buffer = b.buffer in\n  let b_position = b.position in\n  Bytes.unsafe_set b_buffer b_position ' ';\n  Bytes.unsafe_set b_buffer (b_position + 1) '$';\n  Ext_bytes.unsafe_blit_string s 0 b_buffer (b_position + 2) s_len;\n  b.position <- new_position\n\n(* let add_bytes b s = add_string b (Bytes.unsafe_to_string s)\n\n   let add_buffer b bs =\n   add_subbytes b bs.buffer 0 bs.position *)\n\n(* let add_channel b ic len =\n   if len < 0\n    || len > Sys.max_string_length\n    then   (* PR#5004 *)\n    invalid_arg \"Ext_buffer.add_channel\";\n   if b.position + len > b.length then resize b len;\n   really_input ic b.buffer b.position len;\n   b.position <- b.position + len *)\n\nlet output_buffer oc b = output oc b.buffer 0 b.position\n\nexternal unsafe_string : bytes -> int -> int -> Digest.t = \"caml_md5_string\"\n\nlet digest b = unsafe_string b.buffer 0 b.position\n\nlet rec not_equal_aux (b : bytes) (s : string) i len =\n  if i >= len then false\n  else\n    Bytes.unsafe_get b i <> String.unsafe_get s i\n    || not_equal_aux b s (i + 1) len\n\n(** avoid a large copy *)\nlet not_equal (b : t) (s : string) =\n  let b_len = b.position in\n  let s_len = String.length s in\n  b_len <> s_len || not_equal_aux b.buffer s 0 s_len\n\n(** It could be one byte, two bytes, three bytes and four bytes TODO: inline for\n    better performance *)\nlet add_int_1 (b : t) (x : int) =\n  let c = Char.unsafe_chr (x land 0xff) in\n  let pos = b.position in\n  if pos >= b.length then resize b 1;\n  Bytes.unsafe_set b.buffer pos c;\n  b.position <- pos + 1\n\nlet add_int_2 (b : t) (x : int) =\n  let c1 = Char.unsafe_chr (x land 0xff) in\n  let c2 = Char.unsafe_chr ((x lsr 8) land 0xff) in\n  let pos = b.position in\n  if pos + 1 >= b.length then resize b 2;\n  let b_buffer = b.buffer in\n  Bytes.unsafe_set b_buffer pos c1;\n  Bytes.unsafe_set b_buffer (pos + 1) c2;\n  b.position <- pos + 2\n\nlet add_int_3 (b : t) (x : int) =\n  let c1 = Char.unsafe_chr (x land 0xff) in\n  let c2 = Char.unsafe_chr ((x lsr 8) land 0xff) in\n  let c3 = Char.unsafe_chr ((x lsr 16) land 0xff) in\n  let pos = b.position in\n  if pos + 2 >= b.length then resize b 3;\n  let b_buffer = b.buffer in\n  Bytes.unsafe_set b_buffer pos c1;\n  Bytes.unsafe_set b_buffer (pos + 1) c2;\n  Bytes.unsafe_set b_buffer (pos + 2) c3;\n  b.position <- pos + 3\n\nlet add_int_4 (b : t) (x : int) =\n  let c1 = Char.unsafe_chr (x land 0xff) in\n  let c2 = Char.unsafe_chr ((x lsr 8) land 0xff) in\n  let c3 = Char.unsafe_chr ((x lsr 16) land 0xff) in\n  let c4 = Char.unsafe_chr ((x lsr 24) land 0xff) in\n  let pos = b.position in\n  if pos + 3 >= b.length then resize b 4;\n  let b_buffer = b.buffer in\n  Bytes.unsafe_set b_buffer pos c1;\n  Bytes.unsafe_set b_buffer (pos + 1) c2;\n  Bytes.unsafe_set b_buffer (pos + 2) c3;\n  Bytes.unsafe_set b_buffer (pos + 3) c4;\n  b.position <- pos + 4\n"
  },
  {
    "path": "analysis/vendor/ext/ext_buffer.mli",
    "content": "(***********************************************************************)\n(*                                                                     *)\n(*                                OCaml                                *)\n(*                                                                     *)\n(*  Pierre Weis and Xavier Leroy, projet Cristal, INRIA Rocquencourt   *)\n(*                                                                     *)\n(*  Copyright 1999 Institut National de Recherche en Informatique et   *)\n(*  en Automatique.  All rights reserved.  This file is distributed    *)\n(*  under the terms of the GNU Library General Public License, with    *)\n(*  the special exception on linking described in file ../LICENSE.     *)\n(*                                                                     *)\n(***********************************************************************)\n\n(** Extensible buffers.\n\n    This module implements buffers that automatically expand as necessary. It\n    provides accumulative concatenation of strings in quasi-linear time (instead\n    of quadratic time when strings are concatenated pairwise). *)\n\n(* ReScript customization: customized for efficient digest *)\n\ntype t\n(** The abstract type of buffers. *)\n\nval create : int -> t\n(** [create n] returns a fresh buffer, initially empty. The [n] parameter is the\n    initial size of the internal byte sequence that holds the buffer contents.\n    That byte sequence is automatically reallocated when more than [n]\n    characters are stored in the buffer, but shrinks back to [n] characters when\n    [reset] is called. For best performance, [n] should be of the same order of\n    magnitude as the number of characters that are expected to be stored in the\n    buffer (for instance, 80 for a buffer that holds one output line). Nothing\n    bad will happen if the buffer grows beyond that limit, however. In doubt,\n    take [n = 16] for instance. If [n] is not between 1 and\n    {!Sys.max_string_length}, it will be clipped to that interval. *)\n\nval contents : t -> string\n(** Return a copy of the current contents of the buffer. The buffer itself is\n    unchanged. *)\n\nval length : t -> int\n(** Return the number of characters currently contained in the buffer. *)\n\nval is_empty : t -> bool\n\nval clear : t -> unit\n(** Empty the buffer. *)\n\nval add_char : t -> char -> unit\n(** [add_char b c] appends the character [c] at the end of the buffer [b]. *)\n\nval add_string : t -> string -> unit\n(** [add_string b s] appends the string [s] at the end of the buffer [b]. *)\n\n(* val add_bytes : t -> bytes -> unit *)\n(** [add_string b s] appends the string [s] at the end of the buffer [b].\n    @since 4.02 *)\n\n(* val add_substring : t -> string -> int -> int -> unit *)\n(** [add_substring b s ofs len] takes [len] characters from offset [ofs] in\n    string [s] and appends them at the end of the buffer [b]. *)\n\n(* val add_subbytes : t -> bytes -> int -> int -> unit *)\n(** [add_substring b s ofs len] takes [len] characters from offset [ofs] in byte\n    sequence [s] and appends them at the end of the buffer [b].\n    @since 4.02 *)\n\n(* val add_buffer : t -> t -> unit *)\n(** [add_buffer b1 b2] appends the current contents of buffer [b2] at the end of\n    buffer [b1]. [b2] is not modified. *)\n\n(* val add_channel : t -> in_channel -> int -> unit *)\n(** [add_channel b ic n] reads exactly [n] character from the input channel [ic]\n    and stores them at the end of buffer [b]. Raise [End_of_file] if the channel\n    contains fewer than [n] characters. *)\n\nval output_buffer : out_channel -> t -> unit\n(** [output_buffer oc b] writes the current contents of buffer [b] on the output\n    channel [oc]. *)\n\nval digest : t -> Digest.t\n\nval not_equal : t -> string -> bool\n\nval add_int_1 : t -> int -> unit\n\nval add_int_2 : t -> int -> unit\n\nval add_int_3 : t -> int -> unit\n\nval add_int_4 : t -> int -> unit\n\nval add_string_char : t -> string -> char -> unit\n\nval add_ninja_prefix_var : t -> string -> unit\n\nval add_char_string : t -> char -> string -> unit\n"
  },
  {
    "path": "analysis/vendor/ext/ext_bytes.ml",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\nexternal unsafe_blit_string : string -> int -> bytes -> int -> int -> unit\n  = \"caml_blit_string\"\n[@@noalloc]\n"
  },
  {
    "path": "analysis/vendor/ext/ext_bytes.mli",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\nexternal unsafe_blit_string : string -> int -> bytes -> int -> int -> unit\n  = \"caml_blit_string\"\n[@@noalloc]\n"
  },
  {
    "path": "analysis/vendor/ext/ext_char.ml",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\n(** {!Char.escaped} is locale sensitive in 4.02.3, fixed in the trunk, backport\n    it here *)\n\nlet valid_hex x =\n  match x with\n  | '0' .. '9' | 'a' .. 'f' | 'A' .. 'F' -> true\n  | _ -> false\n\nlet is_lower_case c =\n  (c >= 'a' && c <= 'z')\n  || (c >= '\\224' && c <= '\\246')\n  || (c >= '\\248' && c <= '\\254')\n"
  },
  {
    "path": "analysis/vendor/ext/ext_char.mli",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\n(** Extension to Standard char module, avoid locale sensitivity *)\n\nval valid_hex : char -> bool\n\nval is_lower_case : char -> bool\n"
  },
  {
    "path": "analysis/vendor/ext/ext_cmp.ml",
    "content": "type 'a compare = 'a -> 'a -> int\n\ntype ('a, 'id) cmp = 'a compare\n\nexternal get_cmp : ('a, 'id) cmp -> 'a compare = \"%identity\"\n\nmodule type S = sig\n  type id\n\n  type t\n\n  val cmp : (t, id) cmp\nend\n\ntype ('key, 'id) t = (module S with type t = 'key and type id = 'id)\n\nmodule Make (M : sig\n  type t\n\n  val cmp : (t -> t -> int[@bs])\nend) =\nstruct\n  type id\n\n  type t = M.t\n\n  let cmp = M.cmp\nend\n\nlet make (type key) (cmp : (key -> key -> int[@bs])) =\n  let module M = struct\n    type t = key\n\n    let cmp = cmp\n  end in\n  let module N = Make (M) in\n  (module N : S with type t = key)\n"
  },
  {
    "path": "analysis/vendor/ext/ext_cmp.mli",
    "content": "type 'a compare = 'a -> 'a -> int\n\ntype ('a, 'id) cmp\n\nexternal get_cmp : ('a, 'id) cmp -> 'a compare = \"%identity\"\n(** only used for data structures, not exported for client usage *)\n\nmodule type S = sig\n  type id\n\n  type t\n\n  val cmp : (t, id) cmp\nend\n\ntype ('key, 'id) t = (module S with type t = 'key and type id = 'id)\n\nmodule Make (M : sig\n  type t\n\n  val cmp : t compare\nend) : S with type t = M.t\n\nval make : ('a -> 'a -> int) -> (module S with type t = 'a)\n"
  },
  {
    "path": "analysis/vendor/ext/ext_color.ml",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\ntype color = Black | Red | Green | Yellow | Blue | Magenta | Cyan | White\n\ntype style = FG of color | BG of color | Bold | Dim\n\n(* let ansi_of_color = function\n   | Black -> \"0\"\n   | Red -> \"1\"\n   | Green -> \"2\"\n   | Yellow -> \"3\"\n   | Blue -> \"4\"\n   | Magenta -> \"5\"\n   | Cyan -> \"6\"\n   | White -> \"7\" *)\n\nlet code_of_style = function\n  | FG Black -> \"30\"\n  | FG Red -> \"31\"\n  | FG Green -> \"32\"\n  | FG Yellow -> \"33\"\n  | FG Blue -> \"34\"\n  | FG Magenta -> \"35\"\n  | FG Cyan -> \"36\"\n  | FG White -> \"37\"\n  | BG Black -> \"40\"\n  | BG Red -> \"41\"\n  | BG Green -> \"42\"\n  | BG Yellow -> \"43\"\n  | BG Blue -> \"44\"\n  | BG Magenta -> \"45\"\n  | BG Cyan -> \"46\"\n  | BG White -> \"47\"\n  | Bold -> \"1\"\n  | Dim -> \"2\"\n\n(** TODO: add more styles later *)\nlet style_of_tag s =\n  match s with\n  | Format.String_tag \"error\" -> [Bold; FG Red]\n  | Format.String_tag \"warning\" -> [Bold; FG Magenta]\n  | Format.String_tag \"info\" -> [Bold; FG Yellow]\n  | Format.String_tag \"dim\" -> [Dim]\n  | Format.String_tag \"filename\" -> [FG Cyan]\n  | _ -> []\n\nlet ansi_of_tag s =\n  let l = style_of_tag s in\n  let s = String.concat \";\" (Ext_list.map l code_of_style) in\n  \"\\x1b[\" ^ s ^ \"m\"\n\nlet reset_lit = \"\\x1b[0m\"\n"
  },
  {
    "path": "analysis/vendor/ext/ext_color.mli",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\ntype color = Black | Red | Green | Yellow | Blue | Magenta | Cyan | White\n\ntype style = FG of color | BG of color | Bold | Dim\n\nval ansi_of_tag : Format.stag -> string\n(** Input is the tag for example `@{<warning>@}` return escape code *)\n\nval reset_lit : string\n"
  },
  {
    "path": "analysis/vendor/ext/ext_digest.ml",
    "content": "(* Copyright (C) 2019- Hongbo Zhang, Authors of ReScript\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\nlet length = 16\n\nlet hex_length = 32\n"
  },
  {
    "path": "analysis/vendor/ext/ext_digest.mli",
    "content": "(* Copyright (C) 2019- Authors of ReScript\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\nval length : int\n\nval hex_length : int\n"
  },
  {
    "path": "analysis/vendor/ext/ext_file_extensions.ml",
    "content": "type valid_input = Res | Resi | Intf_ast | Impl_ast | Mlmap | Cmi | Unknown\n\n(** This is per-file based, when [ocamlc] [-c -o another_dir/xx.cmi] it will\n    return (another_dir/xx) *)\n\nlet classify_input ext =\n  match () with\n  | _ when ext = Literals.suffix_ast -> Impl_ast\n  | _ when ext = Literals.suffix_iast -> Intf_ast\n  | _ when ext = Literals.suffix_mlmap -> Mlmap\n  | _ when ext = Literals.suffix_cmi -> Cmi\n  | _ when ext = Literals.suffix_res -> Res\n  | _ when ext = Literals.suffix_resi -> Resi\n  | _ -> Unknown\n"
  },
  {
    "path": "analysis/vendor/ext/ext_filename.ml",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\nlet is_dir_sep_unix c = c = '/'\n\nlet is_dir_sep_win_cygwin c = c = '/' || c = '\\\\' || c = ':'\n\nlet is_dir_sep = if Sys.unix then is_dir_sep_unix else is_dir_sep_win_cygwin\n\n(* reference ninja.cc IsKnownShellSafeCharacter *)\nlet maybe_quote (s : string) =\n  let noneed_quote =\n    Ext_string.for_all s (function\n      | '0' .. '9' | 'a' .. 'z' | 'A' .. 'Z' | '_' | '+' | '-' | '.' | '/' | '@'\n        ->\n        true\n      | _ -> false)\n  in\n  if noneed_quote then s else Filename.quote s\n\nlet chop_extension_maybe name =\n  let rec search_dot i =\n    if i < 0 || is_dir_sep (String.unsafe_get name i) then name\n    else if String.unsafe_get name i = '.' then String.sub name 0 i\n    else search_dot (i - 1)\n  in\n  search_dot (String.length name - 1)\n\nlet get_extension_maybe name =\n  let name_len = String.length name in\n  let rec search_dot name i name_len =\n    if i < 0 || is_dir_sep (String.unsafe_get name i) then \"\"\n    else if String.unsafe_get name i = '.' then String.sub name i (name_len - i)\n    else search_dot name (i - 1) name_len\n  in\n  search_dot name (name_len - 1) name_len\n\nlet chop_all_extensions_maybe name =\n  let rec search_dot i last =\n    if i < 0 || is_dir_sep (String.unsafe_get name i) then\n      match last with\n      | None -> name\n      | Some i -> String.sub name 0 i\n    else if String.unsafe_get name i = '.' then search_dot (i - 1) (Some i)\n    else search_dot (i - 1) last\n  in\n  search_dot (String.length name - 1) None\n\nlet new_extension name (ext : string) =\n  let rec search_dot name i ext =\n    if i < 0 || is_dir_sep (String.unsafe_get name i) then name ^ ext\n    else if String.unsafe_get name i = '.' then (\n      let ext_len = String.length ext in\n      let buf = Bytes.create (i + ext_len) in\n      Bytes.blit_string name 0 buf 0 i;\n      Bytes.blit_string ext 0 buf i ext_len;\n      Bytes.unsafe_to_string buf)\n    else search_dot name (i - 1) ext\n  in\n  search_dot name (String.length name - 1) ext\n\n(** TODO: improve efficiency given a path, calcuate its module name Note that\n    `ocamlc.opt -c aa.xx.mli` gives `aa.xx.cmi` we can not strip all extensions,\n    otherwise we can not tell the difference between \"x.cpp.ml\" and \"x.ml\" *)\nlet module_name name =\n  let rec search_dot i name =\n    if i < 0 then Ext_string.capitalize_ascii name\n    else if String.unsafe_get name i = '.' then Ext_string.capitalize_sub name i\n    else search_dot (i - 1) name\n  in\n  let name = Filename.basename name in\n  let name_len = String.length name in\n  search_dot (name_len - 1) name\n\ntype module_info = {module_name: string; case: bool}\n\nlet rec valid_module_name_aux name off len =\n  if off >= len then true\n  else\n    let c = String.unsafe_get name off in\n    match c with\n    | 'A' .. 'Z' | 'a' .. 'z' | '0' .. '9' | '_' | '\\'' | '.' | '[' | ']' ->\n      valid_module_name_aux name (off + 1) len\n    | _ -> false\n\ntype state = Invalid | Upper | Lower\n\nlet valid_module_name name len =\n  if len = 0 then Invalid\n  else\n    let c = String.unsafe_get name 0 in\n    match c with\n    | 'A' .. 'Z' -> if valid_module_name_aux name 1 len then Upper else Invalid\n    | 'a' .. 'z' | '0' .. '9' | '_' | '[' | ']' ->\n      if valid_module_name_aux name 1 len then Lower else Invalid\n    | _ -> Invalid\n\nlet as_module ~basename =\n  let rec search_dot i name name_len =\n    if i < 0 then\n      (* Input e.g, [a_b] *)\n      match valid_module_name name name_len with\n      | Invalid -> None\n      | Upper -> Some {module_name = name; case = true}\n      | Lower ->\n        Some {module_name = Ext_string.capitalize_ascii name; case = false}\n    else if String.unsafe_get name i = '.' then\n      (*Input e.g, [A_b] *)\n      match valid_module_name name i with\n      | Invalid -> None\n      | Upper ->\n        Some {module_name = Ext_string.capitalize_sub name i; case = true}\n      | Lower ->\n        Some {module_name = Ext_string.capitalize_sub name i; case = false}\n    else search_dot (i - 1) name name_len\n  in\n  let name_len = String.length basename in\n  search_dot (name_len - 1) basename name_len\n"
  },
  {
    "path": "analysis/vendor/ext/ext_filename.mli",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\n(* TODO:\n   Change the module name, this code is not really an extension of the standard\n    library but rather specific to JS Module name convention.\n*)\n\n(** An extension module to calculate relative path follow node/npm style. TODO :\n    this short name will have to change upon renaming the file. *)\n\nval is_dir_sep : char -> bool\n\nval maybe_quote : string -> string\n\nval chop_extension_maybe : string -> string\n\n(* return an empty string if no extension found *)\nval get_extension_maybe : string -> string\n\nval new_extension : string -> string -> string\n\nval chop_all_extensions_maybe : string -> string\n\n(* OCaml specific abstraction*)\nval module_name : string -> string\n\ntype module_info = {module_name: string; case: bool}\n\nval as_module : basename:string -> module_info option\n"
  },
  {
    "path": "analysis/vendor/ext/ext_fmt.ml",
    "content": "let with_file_as_pp filename f =\n  Ext_pervasives.finally (open_out_bin filename) ~clean:close_out (fun chan ->\n      let fmt = Format.formatter_of_out_channel chan in\n      let v = f fmt in\n      Format.pp_print_flush fmt ();\n      v)\n\nlet failwithf ~loc fmt = Format.ksprintf (fun s -> failwith (loc ^ s)) fmt\n\nlet invalid_argf fmt = Format.ksprintf invalid_arg fmt\n"
  },
  {
    "path": "analysis/vendor/ext/ext_format.ml",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\nopen Format\n\ntype t = formatter\n\n(* let string = pp_print_string *)\n\n(* let break = fun fmt -> pp_print_break fmt 0 0\n\n   let break1 =\n   fun fmt -> pp_print_break fmt 0 1\n\n   let space  fmt  =\n   pp_print_break fmt 1 0\n*)\n(* let vgroup fmt indent u =\n   pp_open_vbox fmt indent;\n   let v = u () in\n   pp_close_box fmt ();\n   v\n\n   let group fmt indent u =\n   pp_open_hovbox fmt indent;\n   let v = u () in\n   pp_close_box fmt ();\n   v\n\n   let paren fmt u =\n   string fmt \"(\";\n   let v = u () in\n   string fmt \")\";\n   v\n\n   let brace fmt u =\n   string fmt \"{\";\n   (* break1 fmt ; *)\n   let v = u () in\n   string fmt \"}\";\n   v\n\n   let bracket fmt u =\n   string fmt \"[\";\n   let v = u () in\n   string fmt \"]\";\n   v *)\n\n(* let paren_group st n action =\n   group st n (fun _ -> paren st action)\n\n   let brace_group st n action =\n   group st n (fun _ -> brace st action )\n\n   let brace_vgroup st n action =\n   vgroup st n (fun _ ->\n    string st \"{\";\n    pp_print_break st 0 2;\n    let v = vgroup st 0 action in\n    pp_print_break st 0 0;\n    string st \"}\";\n    v\n              )\n   let bracket_group st n action =\n   group st n (fun _ -> bracket st action)\n\n   let newline fmt = pp_print_newline fmt ()\n\n   let to_out_channel = formatter_of_out_channel\n\n   (* let non_breaking_space  fmt = string fmt \" \" *)\n   (* let set_needed_space_function _ _ = () *)\n   let flush = pp_print_flush\n*)\n(* let list = pp_print_list *)\n\nlet pp_print_queue ?(pp_sep = pp_print_cut) pp_v ppf q =\n  Queue.iter\n    (fun q ->\n      pp_v ppf q;\n      pp_sep ppf ())\n    q\n"
  },
  {
    "path": "analysis/vendor/ext/ext_format.mli",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\n(** Simplified wrapper module for the standard library [Format] module. *)\n\ntype t = private Format.formatter\n\n(* val string : t -> string -> unit\n\n   val break : t -> unit\n\n   val break1 : t -> unit\n\n   val space :  t -> unit\n\n   val group : t -> int -> (unit -> 'a) -> 'a\n   (** [group] will record current indentation \n    and indent futher\n *)\n\n   val vgroup : t -> int -> (unit -> 'a) -> 'a\n\n   val paren : t -> (unit -> 'a) -> 'a\n\n   val paren_group : t -> int -> (unit -> 'a) -> 'a\n\n   val brace_group : t -> int -> (unit -> 'a) -> 'a\n\n   val brace_vgroup : t -> int -> (unit -> 'a) -> 'a\n\n   val bracket_group : t -> int -> (unit -> 'a) -> 'a\n\n   val newline : t -> unit\n\n   val to_out_channel : out_channel -> t\n\n   val flush : t -> unit -> unit *)\n\nval pp_print_queue :\n  ?pp_sep:(Format.formatter -> unit -> unit) ->\n  (Format.formatter -> 'a -> unit) ->\n  Format.formatter ->\n  'a Queue.t ->\n  unit\n"
  },
  {
    "path": "analysis/vendor/ext/ext_ident.ml",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * 2017 - Hongbo Zhang, Authors of ReScript\n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n *\n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\nlet js_flag = 0b1_000 (* check with ocaml compiler *)\n\n(* let js_module_flag = 0b10_000 (\\* javascript external modules *\\) *)\n(* TODO:\n    check name conflicts with javascript conventions\n   {[\n     Ext_ident.convert \"^\";;\n     - : string = \"$caret\"\n   ]}\n*)\nlet js_object_flag = 0b100_000 (* javascript object flags *)\n\nlet is_js (i : Ident.t) = i.flags land js_flag <> 0\n\nlet is_js_or_global (i : Ident.t) = i.flags land (8 lor 1) <> 0\n\nlet is_js_object (i : Ident.t) = i.flags land js_object_flag <> 0\n\nlet make_js_object (i : Ident.t) = i.flags <- i.flags lor js_object_flag\n\n(* It's a js function hard coded by js api, so when printing,\n   it should preserve the name\n*)\nlet create_js (name : string) : Ident.t = {name; flags = js_flag; stamp = 0}\n\nlet create = Ident.create\n\n(* FIXME: no need for `$' operator *)\nlet create_tmp ?(name = Literals.tmp) () = create name\n\nlet js_module_table : Ident.t Hash_string.t = Hash_string.create 31\n\n(* This is for a js exeternal module, we can change it when printing\n   for example\n   {[\n     var React$1 = require('react');\n     React$1.render(..)\n   ]}\n\n   Given a name, if duplicated, they should  have the same id\n*)\n(* let create_js_module (name : string) : Ident.t =\n    let name =\n     String.concat \"\" @@ Ext_list.map\n     (Ext_string.split name '-')  Ext_string.capitalize_ascii in\n    (* TODO: if we do such transformation, we should avoid       collision for example:\n       react-dom\n       react--dom\n       check collision later\n   *)\n    match Hash_string.find_exn js_module_table name  with\n    | exception Not_found ->\n     let ans = Ident.create name in\n     (* let ans = { v with flags = js_module_flag} in  *)\n     Hash_string.add js_module_table name ans;\n     ans\n    | v -> (* v *) Ident.rename v\n*)\n\nlet[@inline] convert ?(op = false) (c : char) : string =\n  match c with\n  | '*' -> \"$star\"\n  | '\\'' -> \"$p\"\n  | '!' -> \"$bang\"\n  | '>' -> \"$great\"\n  | '<' -> \"$less\"\n  | '=' -> \"$eq\"\n  | '+' -> \"$plus\"\n  | '-' -> if op then \"$neg\" else \"$\"\n  | '@' -> \"$at\"\n  | '^' -> \"$caret\"\n  | '/' -> \"$slash\"\n  | '|' -> \"$pipe\"\n  | '.' -> \"$dot\"\n  | '%' -> \"$percent\"\n  | '~' -> \"$tilde\"\n  | '#' -> \"$hash\"\n  | ':' -> \"$colon\"\n  | '?' -> \"$question\"\n  | '&' -> \"$amp\"\n  | '(' -> \"$lpar\"\n  | ')' -> \"$rpar\"\n  | '{' -> \"$lbrace\"\n  | '}' -> \"$lbrace\"\n  | '[' -> \"$lbrack\"\n  | ']' -> \"$rbrack\"\n  | _ -> \"$unknown\"\nlet[@inline] no_escape (c : char) =\n  match c with\n  | 'a' .. 'z' | 'A' .. 'Z' | '0' .. '9' | '_' | '$' -> true\n  | _ -> false\n\nlet is_uident name =\n  let len = String.length name in\n  if len > 0 then\n    match name.[0] with\n    | 'A' .. 'Z' -> true\n    | _ -> false\n  else false\n\nlet is_uppercase_exotic name =\n  let len = String.length name in\n  len >= 3 && name.[0] = '\\\\' && name.[1] = '\\\"' && name.[len - 1] = '\\\"'\n\nlet unwrap_uppercase_exotic name =\n  if is_uppercase_exotic name then\n    let len = String.length name in\n    String.sub name 2 (len - 3)\n  else name\n\nexception Not_normal_letter of int\nlet name_mangle name =\n  let len = String.length name in\n  try\n    for i = 0 to len - 1 do\n      if not (no_escape (String.unsafe_get name i)) then\n        raise_notrace (Not_normal_letter i)\n    done;\n    name (* Normal letter *)\n  with Not_normal_letter i ->\n    let buffer = Ext_buffer.create len in\n    for j = 0 to len - 1 do\n      let c = String.unsafe_get name j in\n      if no_escape c then Ext_buffer.add_char buffer c\n      else Ext_buffer.add_string buffer (convert ~op:(i = 0) c)\n    done;\n    Ext_buffer.contents buffer\n\n(** [convert name] if [name] is a js keyword or js global, add \"$$\" otherwise do\n    the name mangling to make sure ocaml identifier it is a valid js identifier\n*)\nlet convert (name : string) =\n  let name = unwrap_uppercase_exotic name in\n  if Js_reserved_map.is_js_keyword name || Js_reserved_map.is_js_global name\n  then \"$$\" ^ name\n  else name_mangle name\n\n(** keyword could be used in property *)\n\n(* It is currently made a persistent ident to avoid fresh ids\n    which would result in different signature files\n   - other solution: use lazy values\n*)\nlet make_unused () = create \"_\"\n\nlet reset () = Hash_string.clear js_module_table\n\n(* Has to be total order, [x < y]\n   and [x > y] should be consistent\n   flags are not relevant here\n*)\nlet compare (x : Ident.t) (y : Ident.t) =\n  let u = x.stamp - y.stamp in\n  if u = 0 then Ext_string.compare x.name y.name else u\n\nlet equal (x : Ident.t) (y : Ident.t) =\n  if x.stamp <> 0 then x.stamp = y.stamp else y.stamp = 0 && x.name = y.name\n"
  },
  {
    "path": "analysis/vendor/ext/ext_ident.mli",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\n(** A wrapper around [Ident] module in compiler-libs*)\n\nval is_js : Ident.t -> bool\n\nval is_js_object : Ident.t -> bool\n\nval create_js : string -> Ident.t\n(** create identifiers for predefined [js] global variables *)\n\nval create : string -> Ident.t\n\nval make_js_object : Ident.t -> unit\n\nval reset : unit -> unit\n\nval create_tmp : ?name:string -> unit -> Ident.t\n\nval make_unused : unit -> Ident.t\n\nval is_uident : string -> bool\n\nval is_uppercase_exotic : string -> bool\n\nval unwrap_uppercase_exotic : string -> string\n\nval convert : string -> string\n(** Invariant: if name is not converted, the reference should be equal *)\n\nval is_js_or_global : Ident.t -> bool\n\nval compare : Ident.t -> Ident.t -> int\nval equal : Ident.t -> Ident.t -> bool\n"
  },
  {
    "path": "analysis/vendor/ext/ext_int.ml",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\ntype t = int\n\nlet compare (x : t) (y : t) = Stdlib.compare x y\n\nlet equal (x : t) (y : t) = x = y\n\nlet move = 0x1_0000_0000\n\n(* works only on 64 bit platform *)\nlet int32_unsigned_to_int (n : int32) : int =\n  let i = Int32.to_int n in\n  if i < 0 then i + move else i\n"
  },
  {
    "path": "analysis/vendor/ext/ext_int.mli",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\ntype t = int\n\nval compare : t -> t -> int\n\nval equal : t -> t -> bool\n\nval int32_unsigned_to_int : int32 -> int\n(** works on 64 bit platform only given input as an uint32 and convert it io\n    int64 *)\n"
  },
  {
    "path": "analysis/vendor/ext/ext_io.ml",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\n(** on 32 bit , there are 16M limitation *)\nlet load_file f =\n  Ext_pervasives.finally (open_in_bin f) ~clean:close_in (fun ic ->\n      let n = in_channel_length ic in\n      let s = Bytes.create n in\n      really_input ic s 0 n;\n      Bytes.unsafe_to_string s)\n\nlet rev_lines_of_chann chan =\n  let rec loop acc chan =\n    match input_line chan with\n    | line -> loop (line :: acc) chan\n    | exception End_of_file ->\n      close_in chan;\n      acc\n  in\n  loop [] chan\n\nlet rev_lines_of_file file =\n  Ext_pervasives.finally ~clean:close_in (open_in_bin file) rev_lines_of_chann\n\nlet write_file f content =\n  Ext_pervasives.finally ~clean:close_out (open_out_bin f) (fun oc ->\n      output_string oc content)\n"
  },
  {
    "path": "analysis/vendor/ext/ext_io.mli",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\nval load_file : string -> string\n\nval rev_lines_of_file : string -> string list\n\nval rev_lines_of_chann : in_channel -> string list\n\nval write_file : string -> string -> unit\n"
  },
  {
    "path": "analysis/vendor/ext/ext_js_file_kind.ml",
    "content": "(* Copyright (C) 2020- Hongbo Zhang, Authors of ReScript\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\ntype case = Upper | Little\n\ntype t = {case: case; suffix: string} [@@warning \"-69\"]\n"
  },
  {
    "path": "analysis/vendor/ext/ext_js_regex.ml",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n *\n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n *\n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\nlet check_from_end al =\n  let rec aux l seen =\n    match l with\n    | [] -> false\n    | e :: r ->\n      if e < 0 || e > 255 then false\n      else\n        let c = Char.chr e in\n        if c = '/' then true\n        else if Ext_list.exists seen (fun x -> x = c) then false\n          (* flag should not be repeated *)\n        else if c = 'i' || c = 'g' || c = 'm' || c = 'y' || c = 'u' then\n          aux r (c :: seen)\n        else false\n  in\n  aux al []\n\nlet js_regex_checker s =\n  match Ext_utf8.decode_utf8_string s with\n  | [] -> false\n  | 47 (* [Char.code '/' = 47 ]*) :: tail -> check_from_end (List.rev tail)\n  | _ :: _ -> false\n  | exception Ext_utf8.Invalid_utf8 _ -> false\n"
  },
  {
    "path": "analysis/vendor/ext/ext_js_regex.mli",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\n(* This is a module that checks if js regex is valid or not *)\n\nval js_regex_checker : string -> bool\n"
  },
  {
    "path": "analysis/vendor/ext/ext_json.ml",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n *\n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n *\n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\ntype callback =\n  [ `Str of string -> unit\n  | `Str_loc of string -> Lexing.position -> unit\n  | `Flo of string -> unit\n  | `Flo_loc of string -> Lexing.position -> unit\n  | `Bool of bool -> unit\n  | `Obj of Ext_json_types.t Map_string.t -> unit\n  | `Arr of Ext_json_types.t array -> unit\n  | `Arr_loc of\n    Ext_json_types.t array -> Lexing.position -> Lexing.position -> unit\n  | `Null of unit -> unit\n  | `Not_found of unit -> unit\n  | `Id of Ext_json_types.t -> unit ]\n\ntype path = string list\n\ntype status = No_path | Found of Ext_json_types.t | Wrong_type of path\n\nlet test ?(fail = fun () -> ()) key (cb : callback)\n    (m : Ext_json_types.t Map_string.t) =\n  (match (Map_string.find_exn m key, cb) with\n  | exception Not_found -> (\n    match cb with\n    | `Not_found f -> f ()\n    | _ -> fail ())\n  | True _, `Bool cb -> cb true\n  | False _, `Bool cb -> cb false\n  | Flo {flo = s}, `Flo cb -> cb s\n  | Flo {flo = s; loc}, `Flo_loc cb -> cb s loc\n  | Obj {map = b}, `Obj cb -> cb b\n  | Arr {content}, `Arr cb -> cb content\n  | Arr {content; loc_start; loc_end}, `Arr_loc cb ->\n    cb content loc_start loc_end\n  | Null _, `Null cb -> cb ()\n  | Str {str = s}, `Str cb -> cb s\n  | Str {str = s; loc}, `Str_loc cb -> cb s loc\n  | any, `Id cb -> cb any\n  | _, _ -> fail ());\n  m\n\nlet loc_of (x : Ext_json_types.t) =\n  match x with\n  | True p | False p | Null p -> p\n  | Str p -> p.loc\n  | Arr p -> p.loc_start\n  | Obj p -> p.loc\n  | Flo p -> p.loc\n"
  },
  {
    "path": "analysis/vendor/ext/ext_json.mli",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\ntype path = string list\n\ntype status = No_path | Found of Ext_json_types.t | Wrong_type of path\n\ntype callback =\n  [ `Str of string -> unit\n  | `Str_loc of string -> Lexing.position -> unit\n  | `Flo of string -> unit\n  | `Flo_loc of string -> Lexing.position -> unit\n  | `Bool of bool -> unit\n  | `Obj of Ext_json_types.t Map_string.t -> unit\n  | `Arr of Ext_json_types.t array -> unit\n  | `Arr_loc of\n    Ext_json_types.t array -> Lexing.position -> Lexing.position -> unit\n  | `Null of unit -> unit\n  | `Not_found of unit -> unit\n  | `Id of Ext_json_types.t -> unit ]\n\nval test :\n  ?fail:(unit -> unit) ->\n  string ->\n  callback ->\n  Ext_json_types.t Map_string.t ->\n  Ext_json_types.t Map_string.t\n\nval loc_of : Ext_json_types.t -> Ext_position.t\n"
  },
  {
    "path": "analysis/vendor/ext/ext_json_noloc.ml",
    "content": "(* Copyright (C) 2017- Hongbo Zhang, Authors of ReScript\n *\n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n *\n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\n(* This file is only used in bsb watcher searlization *)\ntype t =\n  | True\n  | False\n  | Null\n  | Flo of string\n  | Str of string\n  | Arr of t array\n  | Obj of t Map_string.t\n\n(** poor man's serialization *)\nlet naive_escaped (unmodified_input : string) : string =\n  let n = ref 0 in\n  let len = String.length unmodified_input in\n  for i = 0 to len - 1 do\n    n :=\n      !n\n      +\n      match String.unsafe_get unmodified_input i with\n      | '\\\"' | '\\\\' | '\\n' | '\\t' | '\\r' | '\\b' -> 2\n      | _ -> 1\n  done;\n  if !n = len then unmodified_input\n  else\n    let result = Bytes.create !n in\n    n := 0;\n    for i = 0 to len - 1 do\n      let open Bytes in\n      (match String.unsafe_get unmodified_input i with\n      | ('\\\"' | '\\\\') as c ->\n        unsafe_set result !n '\\\\';\n        incr n;\n        unsafe_set result !n c\n      | '\\n' ->\n        unsafe_set result !n '\\\\';\n        incr n;\n        unsafe_set result !n 'n'\n      | '\\t' ->\n        unsafe_set result !n '\\\\';\n        incr n;\n        unsafe_set result !n 't'\n      | '\\r' ->\n        unsafe_set result !n '\\\\';\n        incr n;\n        unsafe_set result !n 'r'\n      | '\\b' ->\n        unsafe_set result !n '\\\\';\n        incr n;\n        unsafe_set result !n 'b'\n      | c -> unsafe_set result !n c);\n      incr n\n    done;\n    Bytes.unsafe_to_string result\n\nlet quot x = \"\\\"\" ^ naive_escaped x ^ \"\\\"\"\n\nlet true_ = True\n\nlet false_ = False\n\nlet null = Null\n\nlet str s = Str s\n\nlet flo s = Flo s\n\nlet arr s = Arr s\n\nlet obj s = Obj s\n\nlet kvs s = Obj (Map_string.of_list s)\n\nlet rec encode_buf (x : t) (buf : Buffer.t) : unit =\n  let a str = Buffer.add_string buf str in\n  match x with\n  | Null -> a \"null\"\n  | Str s -> a (quot s)\n  | Flo s ->\n    a s\n    (*\n         since our parsing keep the original float representation, we just dump it as is, there is no cases like [nan] *)\n  | Arr content -> (\n    match content with\n    | [||] -> a \"[]\"\n    | _ ->\n      a \"[ \";\n      encode_buf (Array.unsafe_get content 0) buf;\n      for i = 1 to Array.length content - 1 do\n        a \" , \";\n        encode_buf (Array.unsafe_get content i) buf\n      done;\n      a \" ]\")\n  | True -> a \"true\"\n  | False -> a \"false\"\n  | Obj map ->\n    if Map_string.is_empty map then a \"{}\"\n    else (\n      (*prerr_endline \"WEIRD\";\n        prerr_endline (string_of_int @@ Map_string.cardinal map ); *)\n      a \"{ \";\n      let (_ : int) =\n        Map_string.fold map 0 (fun k v i ->\n            if i <> 0 then a \" , \";\n            a (quot k);\n            a \" : \";\n            encode_buf v buf;\n            i + 1)\n      in\n      a \" }\")\n\nlet to_string x =\n  let buf = Buffer.create 1024 in\n  encode_buf x buf;\n  Buffer.contents buf\n\nlet to_channel (oc : out_channel) x =\n  let buf = Buffer.create 1024 in\n  encode_buf x buf;\n  Buffer.output_buffer oc buf\n\nlet to_file name v =\n  let ochan = open_out_bin name in\n  to_channel ochan v;\n  close_out ochan\n"
  },
  {
    "path": "analysis/vendor/ext/ext_json_noloc.mli",
    "content": "(* Copyright (C) 2017- Authors of ReScript\n *\n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n *\n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\ntype t = private\n  | True\n  | False\n  | Null\n  | Flo of string\n  | Str of string\n  | Arr of t array\n  | Obj of t Map_string.t\n\nval true_ : t\n\nval false_ : t\n\nval null : t\n\nval str : string -> t\n\nval flo : string -> t\n\nval arr : t array -> t\n\nval obj : t Map_string.t -> t\n\nval kvs : (string * t) list -> t\n\nval to_string : t -> string\n\nval to_channel : out_channel -> t -> unit\n\nval to_file : string -> t -> unit\n"
  },
  {
    "path": "analysis/vendor/ext/ext_json_parse.mli",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\ntype error\n\nval report_error : Format.formatter -> error -> unit\n\nexception Error of Lexing.position * Lexing.position * error\n\nval parse_json_from_string : string -> Ext_json_types.t\n\nval parse_json_from_chan : string -> in_channel -> Ext_json_types.t\n\nval parse_json_from_file : string -> Ext_json_types.t\n"
  },
  {
    "path": "analysis/vendor/ext/ext_json_parse.mll",
    "content": "{\ntype error =\n  | Illegal_character of char\n  | Unterminated_string\n  | Unterminated_comment\n  | Illegal_escape of string\n  | Unexpected_token \n  | Expect_comma_or_rbracket\n  | Expect_comma_or_rbrace\n  | Expect_colon\n  | Expect_string_or_rbrace \n  | Expect_eof \n  (* | Trailing_comma_in_obj *)\n  (* | Trailing_comma_in_array *)\n\n\nlet fprintf  = Format.fprintf\nlet report_error ppf = function\n  | Illegal_character c ->\n      fprintf ppf \"Illegal character (%s)\" (Char.escaped c)\n  | Illegal_escape s ->\n      fprintf ppf \"Illegal backslash escape in string or character (%s)\" s\n  | Unterminated_string -> \n      fprintf ppf \"Unterminated_string\"\n  | Expect_comma_or_rbracket ->\n    fprintf ppf \"Expect_comma_or_rbracket\"\n  | Expect_comma_or_rbrace -> \n    fprintf ppf \"Expect_comma_or_rbrace\"\n  | Expect_colon -> \n    fprintf ppf \"Expect_colon\"\n  | Expect_string_or_rbrace  -> \n    fprintf ppf \"Expect_string_or_rbrace\"\n  | Expect_eof  -> \n    fprintf ppf \"Expect_eof\"\n  | Unexpected_token \n    ->\n    fprintf ppf \"Unexpected_token\"\n  (* | Trailing_comma_in_obj  *)\n  (*   -> fprintf ppf \"Trailing_comma_in_obj\" *)\n  (* | Trailing_comma_in_array  *)\n  (*   -> fprintf ppf \"Trailing_comma_in_array\" *)\n  | Unterminated_comment \n    -> fprintf ppf \"Unterminated_comment\"\n         \n\nexception Error of Lexing.position * Lexing.position * error\n\n\nlet () = \n  Printexc.register_printer\n    (function x -> \n     match x with \n     | Error (loc_start,loc_end,error) -> \n       Some (Format.asprintf \n          \"@[%a:@ %a@ -@ %a)@]\" \n          report_error  error\n          Ext_position.print loc_start\n          Ext_position.print loc_end\n       )\n\n     | _ -> None\n    )\n\n\n\n\n\ntype token = \n  | Comma\n  | Eof\n  | False\n  | Lbrace\n  | Lbracket\n  | Null\n  | Colon\n  | Number of string\n  | Rbrace\n  | Rbracket\n  | String of string\n  | True   \n  \nlet error  (lexbuf : Lexing.lexbuf) e = \n  raise (Error (lexbuf.lex_start_p, lexbuf.lex_curr_p, e))\n\n\nlet lexeme_len (x : Lexing.lexbuf) =\n  x.lex_curr_pos - x.lex_start_pos\n\nlet update_loc (lexbuf : Lexing.lexbuf) diff =\n  let lex_curr_p = lexbuf.lex_curr_p in \n  lexbuf.lex_curr_p <-\n    {\n      lex_curr_p with\n      pos_lnum = lex_curr_p.pos_lnum + 1;\n      pos_bol = lex_curr_p.pos_cnum - diff;\n    }\n\nlet char_for_backslash = function\n  | 'n' -> '\\010'\n  | 'r' -> '\\013'\n  | 'b' -> '\\008'\n  | 't' -> '\\009'\n  | c -> c\n\nlet dec_code c1 c2 c3 =\n  100 * (Char.code c1 - 48) + 10 * (Char.code c2 - 48) + (Char.code c3 - 48)\n\nlet hex_code c1 c2 =\n  let d1 = Char.code c1 in\n  let val1 =\n    if d1 >= 97 then d1 - 87\n    else if d1 >= 65 then d1 - 55\n    else d1 - 48 in\n  let d2 = Char.code c2 in\n  let val2 =\n    if d2 >= 97 then d2 - 87\n    else if d2 >= 65 then d2 - 55\n    else d2 - 48 in\n  val1 * 16 + val2\n\nlet lf = '\\010'\n}\n\nlet lf = '\\010'\nlet lf_cr = ['\\010' '\\013']\nlet dos_newline = \"\\013\\010\"\nlet blank = [' ' '\\009' '\\012']\n\nlet digit = ['0'-'9']\nlet nonzero = ['1'-'9']\nlet digits = digit +\nlet frac = '.' digits\nlet e = ['e' 'E']['+' '-']?\nlet exp = e digits\nlet positive_int = (digit | nonzero digits)\nlet number = '-'? positive_int (frac | exp | frac exp) ?\nlet hexdigit = digit | ['a'-'f' 'A'-'F']    \n\nlet comment_start = \"/*\"\nlet comment_end = \"*/\"\n\nrule lex_json buf  = parse\n| blank + { lex_json buf lexbuf}\n| lf | dos_newline { \n    update_loc lexbuf 0;\n    lex_json buf  lexbuf\n  }\n| comment_start { comment buf lexbuf}\n| \"true\" { True}\n| \"false\" {False}\n| \"null\" {Null}\n| \"[\"  {Lbracket}\n| \"]\"  {Rbracket}\n| \"{\"  {Lbrace}\n| \"}\"  {Rbrace}\n| \",\"  {Comma}\n| ':'   {Colon}\n| (\"//\" (_ # lf_cr)*) {lex_json buf lexbuf}\n\n| number { Number (Lexing.lexeme lexbuf)}\n\n| '\"' {\n  let pos = Lexing.lexeme_start_p lexbuf in\n  scan_string buf pos lexbuf;\n  let content = (Buffer.contents  buf) in \n  Buffer.clear buf ;\n  String content \n}\n| eof  {Eof }\n| _ as c  { error lexbuf (Illegal_character c )}\nand comment buf  = parse \n| comment_end {lex_json buf lexbuf}\n| _  {comment buf lexbuf}\n| eof  {error lexbuf Unterminated_comment}\n(* Note this is wrong for JSON conversion *)\n(* We should fix it later *)\nand scan_string buf start = parse\n| '\"' { () }\n| '\\\\' lf [' ' '\\t']*\n  {\n        let len = lexeme_len lexbuf - 2 in\n        update_loc lexbuf len;\n\n        scan_string buf start lexbuf\n      }\n| '\\\\' dos_newline [' ' '\\t']*\n      {\n        let len = lexeme_len lexbuf - 3 in\n        update_loc lexbuf len;\n        scan_string buf start lexbuf\n      }\n| '\\\\' (['\\\\' '\\'' '\"' 'n' 't' 'b' 'r' ' '] as c)\n      {\n        Buffer.add_char buf (char_for_backslash c);\n        scan_string buf start lexbuf\n      }\n| '\\\\' (digit as c1) (digit as c2) (digit as c3) as s \n      {\n        let v = dec_code c1 c2 c3 in\n        if v > 255 then\n          error lexbuf (Illegal_escape s) ;\n        Buffer.add_char buf (Char.chr v);\n\n        scan_string buf start lexbuf\n      }\n| '\\\\' 'x' (hexdigit as c1) (hexdigit as c2)\n      {\n        let v = hex_code c1 c2 in\n        Buffer.add_char buf (Char.chr v);\n\n        scan_string buf start lexbuf\n      }\n| '\\\\' (_ as c)\n      {\n        Buffer.add_char buf '\\\\';\n        Buffer.add_char buf c;\n\n        scan_string buf start lexbuf\n      }\n| lf\n      {\n        update_loc lexbuf 0;\n        Buffer.add_char buf lf;\n\n        scan_string buf start lexbuf\n      }\n| ([^ '\\\\' '\"'] # lf)+\n      {\n        let ofs = lexbuf.lex_start_pos in\n        let len = lexbuf.lex_curr_pos - ofs in\n        Buffer.add_subbytes buf lexbuf.lex_buffer ofs len;\n\n        scan_string buf start lexbuf\n      }\n| eof\n      {\n        error lexbuf Unterminated_string\n      }\n\n{\n\n\n\n\n\n\nlet  parse_json lexbuf =\n  let buf = Buffer.create 64 in \n  let look_ahead = ref None in\n  let token () : token = \n    match !look_ahead with \n    | None ->  \n      lex_json buf lexbuf \n    | Some x -> \n      look_ahead := None ;\n      x \n  in\n  let push e = look_ahead := Some e in \n  let rec json (lexbuf : Lexing.lexbuf) : Ext_json_types.t = \n    match token () with \n    | True -> True lexbuf.lex_start_p\n    | False -> False lexbuf.lex_start_p\n    | Null -> Null lexbuf.lex_start_p\n    | Number s ->  Flo {flo = s; loc = lexbuf.lex_start_p}  \n    | String s -> Str { str = s; loc =    lexbuf.lex_start_p}\n    | Lbracket -> parse_array  lexbuf.lex_start_p lexbuf.lex_curr_p [] lexbuf\n    | Lbrace -> parse_map lexbuf.lex_start_p Map_string.empty lexbuf\n    |  _ -> error lexbuf Unexpected_token\n\n(* Note if we remove [trailing_comma] support \n    we should report errors (actually more work), for example \n    {[\n    match token () with \n    | Rbracket ->\n      if trailing_comma then\n        error lexbuf Trailing_comma_in_array\n      else\n    ]} \n    {[\n    match token () with \n    | Rbrace -> \n      if trailing_comma then\n        error lexbuf Trailing_comma_in_obj\n      else\n\n    ]}   \n *)\n  and parse_array   loc_start loc_finish acc lexbuf \n    : Ext_json_types.t =\n    match token () with \n    | Rbracket ->\n        Arr {loc_start ; content = Ext_array.reverse_of_list acc ; \n              loc_end = lexbuf.lex_curr_p }\n    | x -> \n      push x ;\n      let new_one = json lexbuf in \n      begin match token ()  with \n      | Comma -> \n          parse_array  loc_start loc_finish (new_one :: acc) lexbuf \n      | Rbracket \n        -> Arr {content = (Ext_array.reverse_of_list (new_one::acc));\n                     loc_start ; \n                     loc_end = lexbuf.lex_curr_p }\n      | _ -> \n        error lexbuf Expect_comma_or_rbracket\n      end\n  and parse_map loc_start  acc lexbuf : Ext_json_types.t = \n    match token () with \n    | Rbrace -> \n        Obj { map = acc ; loc = loc_start}\n    | String key -> \n      begin match token () with \n      | Colon ->\n        let value = json lexbuf in\n        begin match token () with \n        | Rbrace -> Obj {map = Map_string.add acc key value  ; loc = loc_start}\n        | Comma -> \n          parse_map loc_start  (Map_string.add acc key value ) lexbuf \n        | _ -> error lexbuf Expect_comma_or_rbrace\n        end\n      | _ -> error lexbuf Expect_colon\n      end\n    | _ -> error lexbuf Expect_string_or_rbrace\n  in \n  let v = json lexbuf in \n  match token () with \n  | Eof -> v \n  | _ -> error lexbuf Expect_eof\n\nlet parse_json_from_string s = \n  parse_json (Lexing.from_string s )\n\nlet parse_json_from_chan fname in_chan = \n  let lexbuf = \n    Ext_position.lexbuf_from_channel_with_fname\n    in_chan fname in \n  parse_json lexbuf \n\nlet parse_json_from_file s = \n  let in_chan = open_in s in \n  let lexbuf = \n    Ext_position.lexbuf_from_channel_with_fname\n    in_chan s in \n  match parse_json lexbuf with \n  | exception e -> close_in in_chan ; raise e\n  | v  -> close_in in_chan;  v\n\n\n\n\n}\n"
  },
  {
    "path": "analysis/vendor/ext/ext_json_types.ml",
    "content": "(* Copyright (C) 2015-2017 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\ntype loc = Lexing.position\n\ntype json_str = {str: string; loc: loc}\n\ntype json_flo = {flo: string; loc: loc}\n\ntype json_array = {content: t array; loc_start: loc; loc_end: loc}\n\nand json_map = {map: t Map_string.t; loc: loc}\n\nand t =\n  | True of loc\n  | False of loc\n  | Null of loc\n  | Flo of json_flo\n  | Str of json_str\n  | Arr of json_array\n  | Obj of json_map\n"
  },
  {
    "path": "analysis/vendor/ext/ext_list.ml",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\nexternal ( .!() ) : 'a array -> int -> 'a = \"%array_unsafe_get\"\n\nlet rec map l f =\n  match l with\n  | [] -> []\n  | [x1] ->\n    let y1 = f x1 in\n    [y1]\n  | [x1; x2] ->\n    let y1 = f x1 in\n    let y2 = f x2 in\n    [y1; y2]\n  | [x1; x2; x3] ->\n    let y1 = f x1 in\n    let y2 = f x2 in\n    let y3 = f x3 in\n    [y1; y2; y3]\n  | [x1; x2; x3; x4] ->\n    let y1 = f x1 in\n    let y2 = f x2 in\n    let y3 = f x3 in\n    let y4 = f x4 in\n    [y1; y2; y3; y4]\n  | x1 :: x2 :: x3 :: x4 :: x5 :: tail ->\n    let y1 = f x1 in\n    let y2 = f x2 in\n    let y3 = f x3 in\n    let y4 = f x4 in\n    let y5 = f x5 in\n    y1 :: y2 :: y3 :: y4 :: y5 :: map tail f\n\nlet rec has_string l f =\n  match l with\n  | [] -> false\n  | [x1] -> x1 = f\n  | [x1; x2] -> x1 = f || x2 = f\n  | [x1; x2; x3] -> x1 = f || x2 = f || x3 = f\n  | x1 :: x2 :: x3 :: x4 -> x1 = f || x2 = f || x3 = f || has_string x4 f\n\nlet rec map_combine l1 l2 f =\n  match (l1, l2) with\n  | [], [] -> []\n  | a1 :: l1, a2 :: l2 -> (f a1, a2) :: map_combine l1 l2 f\n  | _, _ -> invalid_arg \"Ext_list.map_combine\"\n\nlet rec arr_list_combine_unsafe arr l i j acc f =\n  if i = j then acc\n  else\n    match l with\n    | [] -> invalid_arg \"Ext_list.combine\"\n    | h :: tl ->\n      (f arr.!(i), h) :: arr_list_combine_unsafe arr tl (i + 1) j acc f\n\nlet combine_array_append arr l acc f =\n  let len = Array.length arr in\n  arr_list_combine_unsafe arr l 0 len acc f\n\nlet combine_array arr l f =\n  let len = Array.length arr in\n  arr_list_combine_unsafe arr l 0 len [] f\n\nlet rec arr_list_filter_map_unasfe arr l i j acc f =\n  if i = j then acc\n  else\n    match l with\n    | [] -> invalid_arg \"Ext_list.arr_list_filter_map_unsafe\"\n    | h :: tl -> (\n      match f arr.!(i) h with\n      | None -> arr_list_filter_map_unasfe arr tl (i + 1) j acc f\n      | Some v -> v :: arr_list_filter_map_unasfe arr tl (i + 1) j acc f)\n\nlet array_list_filter_map arr l f =\n  let len = Array.length arr in\n  arr_list_filter_map_unasfe arr l 0 len [] f\n\nlet rec map_split_opt (xs : 'a list) (f : 'a -> 'b option * 'c option) :\n    'b list * 'c list =\n  match xs with\n  | [] -> ([], [])\n  | x :: xs -> (\n    let c, d = f x in\n    let cs, ds = map_split_opt xs f in\n    ( (match c with\n      | Some c -> c :: cs\n      | None -> cs),\n      match d with\n      | Some d -> d :: ds\n      | None -> ds ))\n\nlet rec map_snd l f =\n  match l with\n  | [] -> []\n  | [(v1, x1)] ->\n    let y1 = f x1 in\n    [(v1, y1)]\n  | [(v1, x1); (v2, x2)] ->\n    let y1 = f x1 in\n    let y2 = f x2 in\n    [(v1, y1); (v2, y2)]\n  | [(v1, x1); (v2, x2); (v3, x3)] ->\n    let y1 = f x1 in\n    let y2 = f x2 in\n    let y3 = f x3 in\n    [(v1, y1); (v2, y2); (v3, y3)]\n  | [(v1, x1); (v2, x2); (v3, x3); (v4, x4)] ->\n    let y1 = f x1 in\n    let y2 = f x2 in\n    let y3 = f x3 in\n    let y4 = f x4 in\n    [(v1, y1); (v2, y2); (v3, y3); (v4, y4)]\n  | (v1, x1) :: (v2, x2) :: (v3, x3) :: (v4, x4) :: (v5, x5) :: tail ->\n    let y1 = f x1 in\n    let y2 = f x2 in\n    let y3 = f x3 in\n    let y4 = f x4 in\n    let y5 = f x5 in\n    (v1, y1) :: (v2, y2) :: (v3, y3) :: (v4, y4) :: (v5, y5) :: map_snd tail f\n\nlet rec map_last l f =\n  match l with\n  | [] -> []\n  | [x1] ->\n    let y1 = f true x1 in\n    [y1]\n  | [x1; x2] ->\n    let y1 = f false x1 in\n    let y2 = f true x2 in\n    [y1; y2]\n  | [x1; x2; x3] ->\n    let y1 = f false x1 in\n    let y2 = f false x2 in\n    let y3 = f true x3 in\n    [y1; y2; y3]\n  | [x1; x2; x3; x4] ->\n    let y1 = f false x1 in\n    let y2 = f false x2 in\n    let y3 = f false x3 in\n    let y4 = f true x4 in\n    [y1; y2; y3; y4]\n  | x1 :: x2 :: x3 :: x4 :: tail ->\n    (* make sure that tail is not empty *)\n    let y1 = f false x1 in\n    let y2 = f false x2 in\n    let y3 = f false x3 in\n    let y4 = f false x4 in\n    y1 :: y2 :: y3 :: y4 :: map_last tail f\n\nlet rec mapi_aux lst i f tail =\n  match lst with\n  | [] -> tail\n  | a :: l ->\n    let r = f i a in\n    r :: mapi_aux l (i + 1) f tail\n\nlet mapi lst f = mapi_aux lst 0 f []\n\nlet mapi_append lst f tail = mapi_aux lst 0 f tail\n\nlet rec last xs =\n  match xs with\n  | [x] -> x\n  | _ :: tl -> last tl\n  | [] -> invalid_arg \"Ext_list.last\"\n\nlet rec append_aux l1 l2 =\n  match l1 with\n  | [] -> l2\n  | [a0] -> a0 :: l2\n  | [a0; a1] -> a0 :: a1 :: l2\n  | [a0; a1; a2] -> a0 :: a1 :: a2 :: l2\n  | [a0; a1; a2; a3] -> a0 :: a1 :: a2 :: a3 :: l2\n  | [a0; a1; a2; a3; a4] -> a0 :: a1 :: a2 :: a3 :: a4 :: l2\n  | a0 :: a1 :: a2 :: a3 :: a4 :: rest ->\n    a0 :: a1 :: a2 :: a3 :: a4 :: append_aux rest l2\n\nlet append l1 l2 =\n  match l2 with\n  | [] -> l1\n  | _ -> append_aux l1 l2\n\nlet append_one l1 x = append_aux l1 [x]\n\nlet rec map_append l1 l2 f =\n  match l1 with\n  | [] -> l2\n  | [a0] -> f a0 :: l2\n  | [a0; a1] ->\n    let b0 = f a0 in\n    let b1 = f a1 in\n    b0 :: b1 :: l2\n  | [a0; a1; a2] ->\n    let b0 = f a0 in\n    let b1 = f a1 in\n    let b2 = f a2 in\n    b0 :: b1 :: b2 :: l2\n  | [a0; a1; a2; a3] ->\n    let b0 = f a0 in\n    let b1 = f a1 in\n    let b2 = f a2 in\n    let b3 = f a3 in\n    b0 :: b1 :: b2 :: b3 :: l2\n  | [a0; a1; a2; a3; a4] ->\n    let b0 = f a0 in\n    let b1 = f a1 in\n    let b2 = f a2 in\n    let b3 = f a3 in\n    let b4 = f a4 in\n    b0 :: b1 :: b2 :: b3 :: b4 :: l2\n  | a0 :: a1 :: a2 :: a3 :: a4 :: rest ->\n    let b0 = f a0 in\n    let b1 = f a1 in\n    let b2 = f a2 in\n    let b3 = f a3 in\n    let b4 = f a4 in\n    b0 :: b1 :: b2 :: b3 :: b4 :: map_append rest l2 f\n\nlet rec fold_right l acc f =\n  match l with\n  | [] -> acc\n  | [a0] -> f a0 acc\n  | [a0; a1] -> f a0 (f a1 acc)\n  | [a0; a1; a2] -> f a0 (f a1 (f a2 acc))\n  | [a0; a1; a2; a3] -> f a0 (f a1 (f a2 (f a3 acc)))\n  | [a0; a1; a2; a3; a4] -> f a0 (f a1 (f a2 (f a3 (f a4 acc))))\n  | a0 :: a1 :: a2 :: a3 :: a4 :: rest ->\n    f a0 (f a1 (f a2 (f a3 (f a4 (fold_right rest acc f)))))\n\nlet rec fold_right2 l r acc f =\n  match (l, r) with\n  | [], [] -> acc\n  | [a0], [b0] -> f a0 b0 acc\n  | [a0; a1], [b0; b1] -> f a0 b0 (f a1 b1 acc)\n  | [a0; a1; a2], [b0; b1; b2] -> f a0 b0 (f a1 b1 (f a2 b2 acc))\n  | [a0; a1; a2; a3], [b0; b1; b2; b3] ->\n    f a0 b0 (f a1 b1 (f a2 b2 (f a3 b3 acc)))\n  | [a0; a1; a2; a3; a4], [b0; b1; b2; b3; b4] ->\n    f a0 b0 (f a1 b1 (f a2 b2 (f a3 b3 (f a4 b4 acc))))\n  | a0 :: a1 :: a2 :: a3 :: a4 :: arest, b0 :: b1 :: b2 :: b3 :: b4 :: brest ->\n    f a0 b0\n      (f a1 b1 (f a2 b2 (f a3 b3 (f a4 b4 (fold_right2 arest brest acc f)))))\n  | _, _ -> invalid_arg \"Ext_list.fold_right2\"\n\nlet rec fold_right3 l r last acc f =\n  match (l, r, last) with\n  | [], [], [] -> acc\n  | [a0], [b0], [c0] -> f a0 b0 c0 acc\n  | [a0; a1], [b0; b1], [c0; c1] -> f a0 b0 c0 (f a1 b1 c1 acc)\n  | [a0; a1; a2], [b0; b1; b2], [c0; c1; c2] ->\n    f a0 b0 c0 (f a1 b1 c1 (f a2 b2 c2 acc))\n  | [a0; a1; a2; a3], [b0; b1; b2; b3], [c0; c1; c2; c3] ->\n    f a0 b0 c0 (f a1 b1 c1 (f a2 b2 c2 (f a3 b3 c3 acc)))\n  | [a0; a1; a2; a3; a4], [b0; b1; b2; b3; b4], [c0; c1; c2; c3; c4] ->\n    f a0 b0 c0 (f a1 b1 c1 (f a2 b2 c2 (f a3 b3 c3 (f a4 b4 c4 acc))))\n  | ( a0 :: a1 :: a2 :: a3 :: a4 :: arest,\n      b0 :: b1 :: b2 :: b3 :: b4 :: brest,\n      c0 :: c1 :: c2 :: c3 :: c4 :: crest ) ->\n    f a0 b0 c0\n      (f a1 b1 c1\n         (f a2 b2 c2\n            (f a3 b3 c3 (f a4 b4 c4 (fold_right3 arest brest crest acc f)))))\n  | _, _, _ -> invalid_arg \"Ext_list.fold_right2\"\n\nlet rec map2i l r f =\n  match (l, r) with\n  | [], [] -> []\n  | [a0], [b0] -> [f 0 a0 b0]\n  | [a0; a1], [b0; b1] ->\n    let c0 = f 0 a0 b0 in\n    let c1 = f 1 a1 b1 in\n    [c0; c1]\n  | [a0; a1; a2], [b0; b1; b2] ->\n    let c0 = f 0 a0 b0 in\n    let c1 = f 1 a1 b1 in\n    let c2 = f 2 a2 b2 in\n    [c0; c1; c2]\n  | [a0; a1; a2; a3], [b0; b1; b2; b3] ->\n    let c0 = f 0 a0 b0 in\n    let c1 = f 1 a1 b1 in\n    let c2 = f 2 a2 b2 in\n    let c3 = f 3 a3 b3 in\n    [c0; c1; c2; c3]\n  | [a0; a1; a2; a3; a4], [b0; b1; b2; b3; b4] ->\n    let c0 = f 0 a0 b0 in\n    let c1 = f 1 a1 b1 in\n    let c2 = f 2 a2 b2 in\n    let c3 = f 3 a3 b3 in\n    let c4 = f 4 a4 b4 in\n    [c0; c1; c2; c3; c4]\n  | a0 :: a1 :: a2 :: a3 :: a4 :: arest, b0 :: b1 :: b2 :: b3 :: b4 :: brest ->\n    let c0 = f 0 a0 b0 in\n    let c1 = f 1 a1 b1 in\n    let c2 = f 2 a2 b2 in\n    let c3 = f 3 a3 b3 in\n    let c4 = f 4 a4 b4 in\n    c0 :: c1 :: c2 :: c3 :: c4 :: map2i arest brest f\n  | _, _ -> invalid_arg \"Ext_list.map2\"\n\nlet rec map2 l r f =\n  match (l, r) with\n  | [], [] -> []\n  | [a0], [b0] -> [f a0 b0]\n  | [a0; a1], [b0; b1] ->\n    let c0 = f a0 b0 in\n    let c1 = f a1 b1 in\n    [c0; c1]\n  | [a0; a1; a2], [b0; b1; b2] ->\n    let c0 = f a0 b0 in\n    let c1 = f a1 b1 in\n    let c2 = f a2 b2 in\n    [c0; c1; c2]\n  | [a0; a1; a2; a3], [b0; b1; b2; b3] ->\n    let c0 = f a0 b0 in\n    let c1 = f a1 b1 in\n    let c2 = f a2 b2 in\n    let c3 = f a3 b3 in\n    [c0; c1; c2; c3]\n  | [a0; a1; a2; a3; a4], [b0; b1; b2; b3; b4] ->\n    let c0 = f a0 b0 in\n    let c1 = f a1 b1 in\n    let c2 = f a2 b2 in\n    let c3 = f a3 b3 in\n    let c4 = f a4 b4 in\n    [c0; c1; c2; c3; c4]\n  | a0 :: a1 :: a2 :: a3 :: a4 :: arest, b0 :: b1 :: b2 :: b3 :: b4 :: brest ->\n    let c0 = f a0 b0 in\n    let c1 = f a1 b1 in\n    let c2 = f a2 b2 in\n    let c3 = f a3 b3 in\n    let c4 = f a4 b4 in\n    c0 :: c1 :: c2 :: c3 :: c4 :: map2 arest brest f\n  | _, _ -> invalid_arg \"Ext_list.map2\"\n\nlet rec fold_left_with_offset l accu i f =\n  match l with\n  | [] -> accu\n  | a :: l -> fold_left_with_offset l (f a accu i) (i + 1) f\n\nlet rec filter_map xs (f : 'a -> 'b option) =\n  match xs with\n  | [] -> []\n  | y :: ys -> (\n    match f y with\n    | None -> filter_map ys f\n    | Some z -> z :: filter_map ys f)\n\nlet rec exclude (xs : 'a list) (p : 'a -> bool) : 'a list =\n  match xs with\n  | [] -> []\n  | x :: xs -> if p x then exclude xs p else x :: exclude xs p\n\nlet rec exclude_with_val l p =\n  match l with\n  | [] -> None\n  | a0 :: xs -> (\n    if p a0 then Some (exclude xs p)\n    else\n      match xs with\n      | [] -> None\n      | a1 :: rest -> (\n        if p a1 then Some (a0 :: exclude rest p)\n        else\n          match exclude_with_val rest p with\n          | None -> None\n          | Some rest -> Some (a0 :: a1 :: rest)))\n\nlet rec same_length xs ys =\n  match (xs, ys) with\n  | [], [] -> true\n  | _ :: xs, _ :: ys -> same_length xs ys\n  | _, _ -> false\n\nlet init n f =\n  match n with\n  | 0 -> []\n  | 1 ->\n    let a0 = f 0 in\n    [a0]\n  | 2 ->\n    let a0 = f 0 in\n    let a1 = f 1 in\n    [a0; a1]\n  | 3 ->\n    let a0 = f 0 in\n    let a1 = f 1 in\n    let a2 = f 2 in\n    [a0; a1; a2]\n  | 4 ->\n    let a0 = f 0 in\n    let a1 = f 1 in\n    let a2 = f 2 in\n    let a3 = f 3 in\n    [a0; a1; a2; a3]\n  | 5 ->\n    let a0 = f 0 in\n    let a1 = f 1 in\n    let a2 = f 2 in\n    let a3 = f 3 in\n    let a4 = f 4 in\n    [a0; a1; a2; a3; a4]\n  | _ -> Array.to_list (Array.init n f)\n\nlet rec rev_append l1 l2 =\n  match l1 with\n  | [] -> l2\n  | [a0] -> a0 :: l2 (* single element is common *)\n  | [a0; a1] -> a1 :: a0 :: l2\n  | a0 :: a1 :: a2 :: rest -> rev_append rest (a2 :: a1 :: a0 :: l2)\n\nlet rev l = rev_append l []\n\nlet rec small_split_at n acc l =\n  if n <= 0 then (rev acc, l)\n  else\n    match l with\n    | x :: xs -> small_split_at (n - 1) (x :: acc) xs\n    | _ -> invalid_arg \"Ext_list.split_at\"\n\nlet split_at l n = small_split_at n [] l\n\nlet rec split_at_last_aux acc x =\n  match x with\n  | [] -> invalid_arg \"Ext_list.split_at_last\"\n  | [x] -> (rev acc, x)\n  | y0 :: ys -> split_at_last_aux (y0 :: acc) ys\n\nlet split_at_last (x : 'a list) =\n  match x with\n  | [] -> invalid_arg \"Ext_list.split_at_last\"\n  | [a0] -> ([], a0)\n  | [a0; a1] -> ([a0], a1)\n  | [a0; a1; a2] -> ([a0; a1], a2)\n  | [a0; a1; a2; a3] -> ([a0; a1; a2], a3)\n  | [a0; a1; a2; a3; a4] -> ([a0; a1; a2; a3], a4)\n  | a0 :: a1 :: a2 :: a3 :: a4 :: rest ->\n    let rev, last = split_at_last_aux [] rest in\n    (a0 :: a1 :: a2 :: a3 :: a4 :: rev, last)\n\n(** can not do loop unroll due to state combination *)\nlet filter_mapi xs f =\n  let rec aux i xs =\n    match xs with\n    | [] -> []\n    | y :: ys -> (\n      match f y i with\n      | None -> aux (i + 1) ys\n      | Some z -> z :: aux (i + 1) ys)\n  in\n  aux 0 xs\n\nlet rec filter_map2 xs ys (f : 'a -> 'b -> 'c option) =\n  match (xs, ys) with\n  | [], [] -> []\n  | u :: us, v :: vs -> (\n    match f u v with\n    | None -> filter_map2 us vs f (* idea: rec f us vs instead? *)\n    | Some z -> z :: filter_map2 us vs f)\n  | _ -> invalid_arg \"Ext_list.filter_map2\"\n\nlet rec rev_map_append l1 l2 f =\n  match l1 with\n  | [] -> l2\n  | a :: l -> rev_map_append l (f a :: l2) f\n\n(** It is not worth loop unrolling, it is already tail-call, and we need to be\n    careful about evaluation order when unroll *)\nlet rec flat_map_aux f acc append lx =\n  match lx with\n  | [] -> rev_append acc append\n  | a0 :: rest ->\n    let new_acc =\n      match f a0 with\n      | [] -> acc\n      | [a0] -> a0 :: acc\n      | [a0; a1] -> a1 :: a0 :: acc\n      | a0 :: a1 :: a2 :: rest -> rev_append rest (a2 :: a1 :: a0 :: acc)\n    in\n    flat_map_aux f new_acc append rest\n\nlet flat_map lx f = flat_map_aux f [] [] lx\n\nlet flat_map_append lx append f = flat_map_aux f [] append lx\n\nlet rec length_compare l n =\n  if n < 0 then `Gt\n  else\n    match l with\n    | _ :: xs -> length_compare xs (n - 1)\n    | [] -> if n = 0 then `Eq else `Lt\n\nlet rec length_ge l n =\n  if n > 0 then\n    match l with\n    | _ :: tl -> length_ge tl (n - 1)\n    | [] -> false\n  else true\n\n(** {[\n      length xs = length ys + n\n    ]} *)\nlet rec length_larger_than_n xs ys n =\n  match (xs, ys) with\n  | _, [] -> length_compare xs n = `Eq\n  | _ :: xs, _ :: ys -> length_larger_than_n xs ys n\n  | [], _ -> false\n\nlet rec group (eq : 'a -> 'a -> bool) lst =\n  match lst with\n  | [] -> []\n  | x :: xs -> aux eq x (group eq xs)\n\nand aux eq (x : 'a) (xss : 'a list list) : 'a list list =\n  match xss with\n  | [] -> [[x]]\n  | (y0 :: _ as y) :: ys ->\n    (* cannot be empty *)\n    if eq x y0 then (x :: y) :: ys else y :: aux eq x ys\n  | _ :: _ -> assert false\n\nlet stable_group lst eq = group eq lst |> rev\n\nlet rec drop h n =\n  if n < 0 then invalid_arg \"Ext_list.drop\"\n  else if n = 0 then h\n  else\n    match h with\n    | [] -> invalid_arg \"Ext_list.drop\"\n    | _ :: tl -> drop tl (n - 1)\n\nlet rec find_first x p =\n  match x with\n  | [] -> None\n  | x :: l -> if p x then Some x else find_first l p\n\nlet rec find_first_not xs p =\n  match xs with\n  | [] -> None\n  | a :: l -> if p a then find_first_not l p else Some a\n\nlet rec rev_iter l f =\n  match l with\n  | [] -> ()\n  | [x1] -> f x1\n  | [x1; x2] ->\n    f x2;\n    f x1\n  | [x1; x2; x3] ->\n    f x3;\n    f x2;\n    f x1\n  | [x1; x2; x3; x4] ->\n    f x4;\n    f x3;\n    f x2;\n    f x1\n  | x1 :: x2 :: x3 :: x4 :: x5 :: tail ->\n    rev_iter tail f;\n    f x5;\n    f x4;\n    f x3;\n    f x2;\n    f x1\n\nlet rec iter l f =\n  match l with\n  | [] -> ()\n  | [x1] -> f x1\n  | [x1; x2] ->\n    f x1;\n    f x2\n  | [x1; x2; x3] ->\n    f x1;\n    f x2;\n    f x3\n  | [x1; x2; x3; x4] ->\n    f x1;\n    f x2;\n    f x3;\n    f x4\n  | x1 :: x2 :: x3 :: x4 :: x5 :: tail ->\n    f x1;\n    f x2;\n    f x3;\n    f x4;\n    f x5;\n    iter tail f\n\nlet rec for_all lst p =\n  match lst with\n  | [] -> true\n  | a :: l -> p a && for_all l p\n\nlet rec for_all_snd lst p =\n  match lst with\n  | [] -> true\n  | (_, a) :: l -> p a && for_all_snd l p\n\nlet rec for_all2_no_exn l1 l2 p =\n  match (l1, l2) with\n  | [], [] -> true\n  | a1 :: l1, a2 :: l2 -> p a1 a2 && for_all2_no_exn l1 l2 p\n  | _, _ -> false\n\nlet rec find_opt xs p =\n  match xs with\n  | [] -> None\n  | x :: l -> (\n    match p x with\n    | Some _ as v -> v\n    | None -> find_opt l p)\n\nlet rec find_def xs p def =\n  match xs with\n  | [] -> def\n  | x :: l -> (\n    match p x with\n    | Some v -> v\n    | None -> find_def l p def)\n\nlet rec split_map l f =\n  match l with\n  | [] -> ([], [])\n  | [x1] ->\n    let a0, b0 = f x1 in\n    ([a0], [b0])\n  | [x1; x2] ->\n    let a1, b1 = f x1 in\n    let a2, b2 = f x2 in\n    ([a1; a2], [b1; b2])\n  | [x1; x2; x3] ->\n    let a1, b1 = f x1 in\n    let a2, b2 = f x2 in\n    let a3, b3 = f x3 in\n    ([a1; a2; a3], [b1; b2; b3])\n  | [x1; x2; x3; x4] ->\n    let a1, b1 = f x1 in\n    let a2, b2 = f x2 in\n    let a3, b3 = f x3 in\n    let a4, b4 = f x4 in\n    ([a1; a2; a3; a4], [b1; b2; b3; b4])\n  | x1 :: x2 :: x3 :: x4 :: x5 :: tail ->\n    let a1, b1 = f x1 in\n    let a2, b2 = f x2 in\n    let a3, b3 = f x3 in\n    let a4, b4 = f x4 in\n    let a5, b5 = f x5 in\n    let ass, bss = split_map tail f in\n    (a1 :: a2 :: a3 :: a4 :: a5 :: ass, b1 :: b2 :: b3 :: b4 :: b5 :: bss)\n\nlet sort_via_array lst cmp =\n  let arr = Array.of_list lst in\n  Array.sort cmp arr;\n  Array.to_list arr\n\nlet sort_via_arrayf lst cmp f =\n  let arr = Array.of_list lst in\n  Array.sort cmp arr;\n  Ext_array.to_list_f arr f\n\nlet rec assoc_by_string lst (k : string) def =\n  match lst with\n  | [] -> (\n    match def with\n    | None -> assert false\n    | Some x -> x)\n  | (k1, v1) :: rest -> if k1 = k then v1 else assoc_by_string rest k def\n\nlet rec assoc_by_int lst (k : int) def =\n  match lst with\n  | [] -> (\n    match def with\n    | None -> assert false\n    | Some x -> x)\n  | (k1, v1) :: rest -> if k1 = k then v1 else assoc_by_int rest k def\n\nlet rec nth_aux l n =\n  match l with\n  | [] -> None\n  | a :: l -> if n = 0 then Some a else nth_aux l (n - 1)\n\nlet nth_opt l n = if n < 0 then None else nth_aux l n\n\nlet rec iter_snd lst f =\n  match lst with\n  | [] -> ()\n  | (_, x) :: xs ->\n    f x;\n    iter_snd xs f\n\nlet rec iter_fst lst f =\n  match lst with\n  | [] -> ()\n  | (x, _) :: xs ->\n    f x;\n    iter_fst xs f\n\nlet rec exists l p =\n  match l with\n  | [] -> false\n  | x :: xs -> p x || exists xs p\n\nlet rec exists_fst l p =\n  match l with\n  | [] -> false\n  | (a, _) :: l -> p a || exists_fst l p\n\nlet rec exists_snd l p =\n  match l with\n  | [] -> false\n  | (_, a) :: l -> p a || exists_snd l p\n\nlet rec concat_append (xss : 'a list list) (xs : 'a list) : 'a list =\n  match xss with\n  | [] -> xs\n  | l :: r -> append l (concat_append r xs)\n\nlet rec fold_left l accu f =\n  match l with\n  | [] -> accu\n  | a :: l -> fold_left l (f accu a) f\n\nlet reduce_from_left lst fn =\n  match lst with\n  | first :: rest -> fold_left rest first fn\n  | _ -> invalid_arg \"Ext_list.reduce_from_left\"\n\nlet rec fold_left2 l1 l2 accu f =\n  match (l1, l2) with\n  | [], [] -> accu\n  | a1 :: l1, a2 :: l2 -> fold_left2 l1 l2 (f a1 a2 accu) f\n  | _, _ -> invalid_arg \"Ext_list.fold_left2\"\n\nlet singleton_exn xs =\n  match xs with\n  | [x] -> x\n  | _ -> assert false\n\nlet rec mem_string (xs : string list) (x : string) =\n  match xs with\n  | [] -> false\n  | a :: l -> a = x || mem_string l x\n\nlet filter lst p =\n  let rec find ~p accu lst =\n    match lst with\n    | [] -> rev accu\n    | x :: l -> if p x then find (x :: accu) l ~p else find accu l ~p\n  in\n  find [] lst ~p\n"
  },
  {
    "path": "analysis/vendor/ext/ext_list.mli",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\nval map : 'a list -> ('a -> 'b) -> 'b list\n\nval map_combine : 'a list -> 'b list -> ('a -> 'c) -> ('c * 'b) list\n\nval combine_array : 'a array -> 'b list -> ('a -> 'c) -> ('c * 'b) list\n\nval combine_array_append :\n  'a array -> 'b list -> ('c * 'b) list -> ('a -> 'c) -> ('c * 'b) list\n\nval has_string : string list -> string -> bool\n\nval map_split_opt :\n  'a list -> ('a -> 'b option * 'c option) -> 'b list * 'c list\n\nval mapi : 'a list -> (int -> 'a -> 'b) -> 'b list\n\nval mapi_append : 'a list -> (int -> 'a -> 'b) -> 'b list -> 'b list\n\nval map_snd : ('a * 'b) list -> ('b -> 'c) -> ('a * 'c) list\n\nval map_last : 'a list -> (bool -> 'a -> 'b) -> 'b list\n(** [map_last f xs ] will pass [true] to [f] for the last element, [false]\n    otherwise. For empty list, it returns empty *)\n\nval last : 'a list -> 'a\n(** [last l] return the last element raise if the list is empty *)\n\nval append : 'a list -> 'a list -> 'a list\n\nval append_one : 'a list -> 'a -> 'a list\n\nval map_append : 'b list -> 'a list -> ('b -> 'a) -> 'a list\n\nval fold_right : 'a list -> 'b -> ('a -> 'b -> 'b) -> 'b\n\nval fold_right2 : 'a list -> 'b list -> 'c -> ('a -> 'b -> 'c -> 'c) -> 'c\n\nval fold_right3 :\n  'a list -> 'b list -> 'c list -> 'd -> ('a -> 'b -> 'c -> 'd -> 'd) -> 'd\n\nval map2 : 'a list -> 'b list -> ('a -> 'b -> 'c) -> 'c list\n\nval map2i : 'a list -> 'b list -> (int -> 'a -> 'b -> 'c) -> 'c list\n\nval fold_left_with_offset :\n  'a list -> 'acc -> int -> ('a -> 'acc -> int -> 'acc) -> 'acc\n\nval filter_map : 'a list -> ('a -> 'b option) -> 'b list\n(** @unused *)\n\nval exclude : 'a list -> ('a -> bool) -> 'a list\n(** [exclude p l] is the opposite of [filter p l] *)\n\nval exclude_with_val : 'a list -> ('a -> bool) -> 'a list option\n(** [excludes p l] return a tuple [excluded,newl] where [exluded] is true\n    indicates that at least one element is removed,[newl] is the new list where\n    all [p x] for [x] is false *)\n\nval same_length : 'a list -> 'b list -> bool\n\nval init : int -> (int -> 'a) -> 'a list\n\nval split_at : 'a list -> int -> 'a list * 'a list\n(** [split_at n l] will split [l] into two lists [a,b], [a] will be of length\n    [n], otherwise, it will raise *)\n\nval split_at_last : 'a list -> 'a list * 'a\n(** [split_at_last l] It is equivalent to [split_at (List.length l - 1) l ] *)\n\nval filter_mapi : 'a list -> ('a -> int -> 'b option) -> 'b list\n\nval filter_map2 : 'a list -> 'b list -> ('a -> 'b -> 'c option) -> 'c list\n\nval length_compare : 'a list -> int -> [`Gt | `Eq | `Lt]\n\nval length_ge : 'a list -> int -> bool\n\n(** {[\n      length xs = length ys + n\n    ]}\n    input n should be positive TODO: input checking *)\n\nval length_larger_than_n : 'a list -> 'a list -> int -> bool\n\nval rev_map_append : 'a list -> 'b list -> ('a -> 'b) -> 'b list\n(** [rev_map_append f l1 l2] [map f l1] and reverse it to append [l2] This weird\n    semantics is due to it is the most efficient operation we can do *)\n\nval flat_map : 'a list -> ('a -> 'b list) -> 'b list\n\nval flat_map_append : 'a list -> 'b list -> ('a -> 'b list) -> 'b list\n\nval stable_group : 'a list -> ('a -> 'a -> bool) -> 'a list list\n(** [stable_group eq lst] Example: Input:\n    {[\n      stable_group ( = ) [1; 2; 3; 4; 3]\n    ]}\n    Output:\n    {[\n      [[1]; [2]; [4]; [3; 3]]\n    ]}\n    TODO: this is O(n^2) behavior which could be improved later *)\n\nval drop : 'a list -> int -> 'a list\n(** [drop n list] raise when [n] is negative raise when list's length is less\n    than [n] *)\n\nval find_first : 'a list -> ('a -> bool) -> 'a option\n\nval find_first_not : 'a list -> ('a -> bool) -> 'a option\n(** [find_first_not p lst ] if all elements in [lst] pass, return [None]\n    otherwise return the first element [e] as [Some e] which fails the predicate\n*)\n\n(** [find_opt f l] returns [None] if all return [None], otherwise returns the\n    first one. *)\n\nval find_opt : 'a list -> ('a -> 'b option) -> 'b option\n\nval find_def : 'a list -> ('a -> 'b option) -> 'b -> 'b\n\nval rev_iter : 'a list -> ('a -> unit) -> unit\n\nval iter : 'a list -> ('a -> unit) -> unit\n\nval for_all : 'a list -> ('a -> bool) -> bool\n\nval for_all_snd : ('a * 'b) list -> ('b -> bool) -> bool\n\nval for_all2_no_exn : 'a list -> 'b list -> ('a -> 'b -> bool) -> bool\n(** [for_all2_no_exn p xs ys] return [true] if all satisfied, [false] otherwise\n    or length not equal *)\n\nval split_map : 'a list -> ('a -> 'b * 'c) -> 'b list * 'c list\n(** [f] is applied follow the list order *)\n\nval reduce_from_left : 'a list -> ('a -> 'a -> 'a) -> 'a\n(** [fn] is applied from left to right *)\n\nval sort_via_array : 'a list -> ('a -> 'a -> int) -> 'a list\n\nval sort_via_arrayf : 'a list -> ('a -> 'a -> int) -> ('a -> 'b) -> 'b list\n\nval assoc_by_string : (string * 'a) list -> string -> 'a option -> 'a\n(** [assoc_by_string default key lst] if [key] is found in the list return that\n    val, other unbox the [default], otherwise [assert false ] *)\n\nval assoc_by_int : (int * 'a) list -> int -> 'a option -> 'a\n\nval nth_opt : 'a list -> int -> 'a option\n\nval iter_snd : ('a * 'b) list -> ('b -> unit) -> unit\n\nval iter_fst : ('a * 'b) list -> ('a -> unit) -> unit\n\nval exists : 'a list -> ('a -> bool) -> bool\n\nval exists_fst : ('a * 'b) list -> ('a -> bool) -> bool\n\nval exists_snd : ('a * 'b) list -> ('b -> bool) -> bool\n\nval concat_append : 'a list list -> 'a list -> 'a list\n\nval fold_left2 : 'a list -> 'b list -> 'c -> ('a -> 'b -> 'c -> 'c) -> 'c\n\nval fold_left : 'a list -> 'b -> ('b -> 'a -> 'b) -> 'b\n\nval singleton_exn : 'a list -> 'a\n\nval mem_string : string list -> string -> bool\n\nval filter : 'a list -> ('a -> bool) -> 'a list\n\nval array_list_filter_map :\n  'a array -> 'b list -> ('a -> 'b -> 'c option) -> 'c list\n"
  },
  {
    "path": "analysis/vendor/ext/ext_marshal.ml",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\nlet from_string (s : string) = Marshal.from_string s 0\n"
  },
  {
    "path": "analysis/vendor/ext/ext_marshal.mli",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\n(** Extension to the standard library [Marshall] module *)\n\nval from_string : string -> 'a\n"
  },
  {
    "path": "analysis/vendor/ext/ext_module_system.ml",
    "content": "type t = Commonjs | Esmodule | Es6_global\n"
  },
  {
    "path": "analysis/vendor/ext/ext_modulename.ml",
    "content": "(* Copyright (C) 2017 Hongbo Zhang, Authors of ReScript\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\nlet good_hint_name module_name offset =\n  let len = String.length module_name in\n  len > offset\n  && (function\n       | 'a' .. 'z' | 'A' .. 'Z' -> true\n       | _ -> false)\n       (String.unsafe_get module_name offset)\n  && Ext_string.for_all_from module_name (offset + 1) (function\n       | 'a' .. 'z' | 'A' .. 'Z' | '0' .. '9' | '_' -> true\n       | _ -> false)\n\nlet rec collect_start buf s off len =\n  if off >= len then ()\n  else\n    let next = succ off in\n    match String.unsafe_get s off with\n    | 'a' .. 'z' as c ->\n      Ext_buffer.add_char buf (Char.uppercase_ascii c);\n      collect_next buf s next len\n    | 'A' .. 'Z' as c ->\n      Ext_buffer.add_char buf c;\n      collect_next buf s next len\n    | _ -> collect_start buf s next len\n\nand collect_next buf s off len =\n  if off >= len then ()\n  else\n    let next = off + 1 in\n    match String.unsafe_get s off with\n    | ('a' .. 'z' | 'A' .. 'Z' | '0' .. '9' | '_') as c ->\n      Ext_buffer.add_char buf c;\n      collect_next buf s next len\n    | '.' | '-' -> collect_start buf s next len\n    | _ -> collect_next buf s next len\n\n(** This is for a js exeternal module, we can change it when printing for\n    example\n    {[\n      var React$1 = require('react');\n      React$1.render(..)\n    ]}\n    Given a name, if duplicated, they should have the same id *)\nlet js_id_name_of_hint_name module_name =\n  let i = Ext_string.rindex_neg module_name '/' in\n  if i >= 0 then (\n    let offset = succ i in\n    if good_hint_name module_name offset then\n      Ext_string.capitalize_ascii (Ext_string.tail_from module_name offset)\n    else\n      let str_len = String.length module_name in\n      let buf = Ext_buffer.create str_len in\n      collect_start buf module_name offset str_len;\n      if Ext_buffer.is_empty buf then Ext_string.capitalize_ascii module_name\n      else Ext_buffer.contents buf)\n  else if good_hint_name module_name 0 then\n    Ext_string.capitalize_ascii module_name\n  else\n    let str_len = String.length module_name in\n    let buf = Ext_buffer.create str_len in\n    collect_start buf module_name 0 str_len;\n    if Ext_buffer.is_empty buf then module_name else Ext_buffer.contents buf\n"
  },
  {
    "path": "analysis/vendor/ext/ext_modulename.mli",
    "content": "(* Copyright (C) 2017 Authors of ReScript\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\nval js_id_name_of_hint_name : string -> string\n(** Given an JS bundle name, generate a meaningful bounded module name *)\n"
  },
  {
    "path": "analysis/vendor/ext/ext_namespace.ml",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\nlet rec rindex_rec s i =\n  if i < 0 then i\n  else\n    let char = String.unsafe_get s i in\n    if Ext_filename.is_dir_sep char then -1\n    else if char = Literals.ns_sep_char then i\n    else rindex_rec s (i - 1)\n\nlet change_ext_ns_suffix name ext =\n  let i = rindex_rec name (String.length name - 1) in\n  if i < 0 then name ^ ext else String.sub name 0 i ^ ext\n(* FIXME: micro-optimizaiton*)\n\nlet try_split_module_name name =\n  let len = String.length name in\n  let i = rindex_rec name (len - 1) in\n  if i < 0 then None\n  else Some (String.sub name (i + 1) (len - i - 1), String.sub name 0 i)\n\nlet js_name_of_modulename s (case : Ext_js_file_kind.case) suffix : string =\n  let s =\n    match case with\n    | Little -> Ext_string.uncapitalize_ascii s\n    | Upper -> s\n  in\n  change_ext_ns_suffix s suffix\n\n(* https://docs.npmjs.com/files/package.json\n   Some rules:\n   The name must be less than or equal to 214 characters. This includes the scope for scoped packages.\n   The name can't start with a dot or an underscore.\n   New packages must not have uppercase letters in the name.\n   The name ends up being part of a URL, an argument on the command line, and a folder name. Therefore, the name can't contain any non-URL-safe characters.\n*)\nlet is_valid_npm_package_name (s : string) =\n  let len = String.length s in\n  len <= 214 (* magic number forced by npm *)\n  && len > 0\n  &&\n  match String.unsafe_get s 0 with\n  | 'a' .. 'z' | '@' ->\n    Ext_string.for_all_from s 1 (fun x ->\n        match x with\n        | 'a' .. 'z' | '0' .. '9' | '_' | '-' -> true\n        | _ -> false)\n  | _ -> false\n\nlet namespace_of_package_name (s : string) : string =\n  let len = String.length s in\n  let buf = Ext_buffer.create len in\n  let add capital ch =\n    Ext_buffer.add_char buf (if capital then Char.uppercase_ascii ch else ch)\n  in\n  let rec aux capital off len =\n    if off >= len then ()\n    else\n      let ch = String.unsafe_get s off in\n      match ch with\n      | 'a' .. 'z' | 'A' .. 'Z' | '0' .. '9' | '_' ->\n        add capital ch;\n        aux false (off + 1) len\n      | '/' | '-' -> aux true (off + 1) len\n      | _ -> aux capital (off + 1) len\n  in\n  aux true 0 len;\n  Ext_buffer.contents buf\n"
  },
  {
    "path": "analysis/vendor/ext/ext_namespace.mli",
    "content": "(* Copyright (C) 2017- Authors of ReScript\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\nval try_split_module_name : string -> (string * string) option\n\n(* Note  we have to output uncapitalized file Name,\n   or at least be consistent, since by reading cmi file on Case insensitive OS, we don't really know it is `list.cmi` or `List.cmi`, so that `require (./list.js)` or `require(./List.js)`\n   relevant issues: #1609, #913\n\n   #1933 when removing ns suffix, don't pass the bound\n   of basename\n*)\nval change_ext_ns_suffix : string -> string -> string\n\nval js_name_of_modulename : string -> Ext_js_file_kind.case -> string -> string\n(** [js_name_of_modulename ~little A-Ns] *)\n\n(* TODO handle cases like\n   '@angular/core'\n   its directory structure is like\n   {[\n     @angular\n     |-------- core\n   ]}\n*)\nval is_valid_npm_package_name : string -> bool\n\nval namespace_of_package_name : string -> string\n"
  },
  {
    "path": "analysis/vendor/ext/ext_namespace_encode.ml",
    "content": "(* Copyright (C) 2020- Hongbo Zhang, Authors of ReScript\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\nlet make ?ns cunit =\n  match ns with\n  | None -> cunit\n  | Some ns -> cunit ^ Literals.ns_sep ^ ns\n"
  },
  {
    "path": "analysis/vendor/ext/ext_namespace_encode.mli",
    "content": "(* Copyright (C) 2020- Authors of ReScript\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\nval make : ?ns:string -> string -> string\n(** [make ~ns:\"Ns\" \"a\" ] A typical example would return \"a-Ns\" Note the\n    namespace comes from the output of [namespace_of_package_name] *)\n"
  },
  {
    "path": "analysis/vendor/ext/ext_obj.ml",
    "content": "(* Copyright (C) 2019-Present Hongbo Zhang, Authors of ReScript \n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\nlet rec dump r =\n  if Obj.is_int r then string_of_int (Obj.magic r : int)\n  else\n    (* Block. *)\n    let rec get_fields acc = function\n      | 0 -> acc\n      | n ->\n        let n = n - 1 in\n        get_fields (Obj.field r n :: acc) n\n    in\n    let rec is_list r =\n      if Obj.is_int r then r = Obj.repr 0 (* [] *)\n      else\n        let s = Obj.size r and t = Obj.tag r in\n        t = 0 && s = 2 && is_list (Obj.field r 1)\n      (* h :: t *)\n    in\n    let rec get_list r =\n      if Obj.is_int r then []\n      else\n        let h = Obj.field r 0 and t = get_list (Obj.field r 1) in\n        h :: t\n    in\n    let opaque name =\n      (* XXX In future, print the address of value 'r'.  Not possible\n       * in pure OCaml at the moment. *)\n      \"<\" ^ name ^ \">\"\n    in\n    let s = Obj.size r and t = Obj.tag r in\n    (* From the tag, determine the type of block. *)\n    match t with\n    | _ when is_list r ->\n      let fields = get_list r in\n      \"[\" ^ String.concat \"; \" (Ext_list.map fields dump) ^ \"]\"\n    | 0 ->\n      let fields = get_fields [] s in\n      \"(\" ^ String.concat \", \" (Ext_list.map fields dump) ^ \")\"\n    | x when x = Obj.lazy_tag ->\n      (* Note that [lazy_tag .. forward_tag] are < no_scan_tag.  Not\n       * clear if very large constructed values could have the same\n       * tag. XXX *)\n      opaque \"lazy\"\n    | x when x = Obj.closure_tag -> opaque \"closure\"\n    | x when x = Obj.object_tag ->\n      let fields = get_fields [] s in\n      let _clasz, id, slots =\n        match fields with\n        | h :: h' :: t -> (h, h', t)\n        | _ -> assert false\n      in\n      (* No information on decoding the class (first field).  So just print\n       * out the ID and the slots. *)\n      \"Object #\" ^ dump id ^ \" (\"\n      ^ String.concat \", \" (Ext_list.map slots dump)\n      ^ \")\"\n    | x when x = Obj.infix_tag -> opaque \"infix\"\n    | x when x = Obj.forward_tag -> opaque \"forward\"\n    | x when x < Obj.no_scan_tag ->\n      let fields = get_fields [] s in\n      \"Tag\" ^ string_of_int t ^ \" (\"\n      ^ String.concat \", \" (Ext_list.map fields dump)\n      ^ \")\"\n    | x when x = Obj.string_tag ->\n      \"\\\"\" ^ String.escaped (Obj.magic r : string) ^ \"\\\"\"\n    | x when x = Obj.double_tag -> string_of_float (Obj.magic r : float)\n    | x when x = Obj.abstract_tag -> opaque \"abstract\"\n    | x when x = Obj.custom_tag -> opaque \"custom\"\n    | x when x = Obj.custom_tag -> opaque \"final\"\n    | x when x = Obj.double_array_tag ->\n      \"[|\"\n      ^ String.concat \";\"\n          (Array.to_list\n             (Array.map string_of_float (Obj.magic r : float array)))\n      ^ \"|]\"\n    | _ -> opaque (Printf.sprintf \"unknown: tag %d size %d\" t s)\n\nlet dump v = dump (Obj.repr v)\n\nlet dump_endline ?(__LOC__ = \"\") v =\n  print_endline __LOC__;\n  print_endline (dump v)\n\nlet pp_any fmt v = Format.fprintf fmt \"@[%s@]\" (dump v)\n\nlet bt () =\n  let raw_bt = Printexc.backtrace_slots (Printexc.get_raw_backtrace ()) in\n  match raw_bt with\n  | None -> ()\n  | Some raw_bt ->\n    let acc = ref [] in\n    for i = Array.length raw_bt - 1 downto 0 do\n      let slot = raw_bt.(i) in\n      match Printexc.Slot.location slot with\n      | None -> ()\n      | Some bt -> (\n        match !acc with\n        | [] -> acc := [bt]\n        | hd :: _ -> if hd <> bt then acc := bt :: !acc)\n    done;\n    Ext_list.iter !acc (fun bt ->\n        Printf.eprintf \"File \\\"%s\\\", line %d, characters %d-%d\\n\" bt.filename\n          bt.line_number bt.start_char bt.end_char)\n"
  },
  {
    "path": "analysis/vendor/ext/ext_obj.mli",
    "content": "(* Copyright (C) 2019-Present Authors of ReScript \n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\nval dump : 'a -> string\n\nval dump_endline : ?__LOC__:string -> 'a -> unit\n\nval pp_any : Format.formatter -> 'a -> unit\n\nval bt : unit -> unit\n"
  },
  {
    "path": "analysis/vendor/ext/ext_option.ml",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\nlet map v f =\n  match v with\n  | None -> None\n  | Some x -> Some (f x)\n\nlet iter v f =\n  match v with\n  | None -> ()\n  | Some x -> f x\n\nlet exists v f =\n  match v with\n  | None -> false\n  | Some x -> f x\n"
  },
  {
    "path": "analysis/vendor/ext/ext_option.mli",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\n(** Utilities for [option] type *)\n\nval map : 'a option -> ('a -> 'b) -> 'b option\n\nval iter : 'a option -> ('a -> unit) -> unit\n\nval exists : 'a option -> ('a -> bool) -> bool\n"
  },
  {
    "path": "analysis/vendor/ext/ext_path.ml",
    "content": "(* Copyright (C) 2017 Hongbo Zhang, Authors of ReScript\n *\n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n *\n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\n(* [@@@warning \"-37\"] *)\ntype t =\n  (* | File of string  *)\n  | Dir of string\n[@@unboxed]\n\nlet simple_convert_node_path_to_os_path =\n  if Sys.unix then fun x -> x\n  else if Sys.win32 || Sys.cygwin then Ext_string.replace_slash_backward\n  else failwith (\"Unknown OS : \" ^ Sys.os_type)\n\nlet cwd = lazy (Sys.getcwd ())\n\nlet split_by_sep_per_os : string -> string list =\n  if Ext_sys.is_windows_or_cygwin then fun x ->\n    (* on Windows, we can still accept -bs-package-output lib/js *)\n    Ext_string.split_by\n      (fun x ->\n        match x with\n        | '/' | '\\\\' -> true\n        | _ -> false)\n      x\n  else fun x -> Ext_string.split x '/'\n\n(** example\n    {[\n      \"/bb/mbigc/mbig2899/bgit/rescript/jscomp/stdlib/external/pervasives.cmj\"\n        \"/bb/mbigc/mbig2899/bgit/rescript/jscomp/stdlib/ocaml_array.ml\"\n    ]}\n\n    The other way\n    {[\n      \"/bb/mbigc/mbig2899/bgit/rescript/jscomp/stdlib/ocaml_array.ml\"\n        \"/bb/mbigc/mbig2899/bgit/rescript/jscomp/stdlib/external/pervasives.cmj\"\n    ]}\n    {[\n      \"/bb/mbigc/mbig2899/bgit/rescript/jscomp/stdlib//ocaml_array.ml\"\n    ]}\n    {[\n      /a/b\n      /c/d\n    ]} *)\nlet node_relative_path ~from:(file_or_dir_2 : t) (file_or_dir_1 : t) =\n  let relevant_dir1 =\n    match file_or_dir_1 with\n    | Dir x -> x\n    (* | File file1 ->  Filename.dirname file1 *)\n  in\n  let relevant_dir2 =\n    match file_or_dir_2 with\n    | Dir x -> x\n    (* | File file2 -> Filename.dirname file2  *)\n  in\n  let dir1 = split_by_sep_per_os relevant_dir1 in\n  let dir2 = split_by_sep_per_os relevant_dir2 in\n  let rec go (dir1 : string list) (dir2 : string list) =\n    match (dir1, dir2) with\n    | \".\" :: xs, ys -> go xs ys\n    | xs, \".\" :: ys -> go xs ys\n    | x :: xs, y :: ys when x = y -> go xs ys\n    | _, _ -> Ext_list.map_append dir2 dir1 (fun _ -> Literals.node_parent)\n  in\n  match go dir1 dir2 with\n  | x :: _ as ys when x = Literals.node_parent ->\n    String.concat Literals.node_sep ys\n  | ys -> String.concat Literals.node_sep @@ (Literals.node_current :: ys)\n\nlet node_concat ~dir base = dir ^ Literals.node_sep ^ base\n\nlet node_rebase_file ~from ~to_ file =\n  node_concat\n    ~dir:\n      (if from = to_ then Literals.node_current\n       else node_relative_path ~from:(Dir from) (Dir to_))\n    file\n\n(***\n   {[\n     Filename.concat \".\" \"\";;\n     \"./\"\n   ]}\n*)\nlet combine path1 path2 =\n  if Filename.is_relative path2 then\n    if Ext_string.is_empty path2 then path1\n    else if path1 = Filename.current_dir_name then path2\n    else if path2 = Filename.current_dir_name then path1\n    else Filename.concat path1 path2\n  else path2\n\nlet ( // ) x y =\n  if x = Filename.current_dir_name then y\n  else if y = Filename.current_dir_name then x\n  else Filename.concat x y\n\n(** {[\n      split_aux \"//ghosg//ghsogh/\";;\n      - : string * string list = (\"/\", [\"ghosg\"; \"ghsogh\"])\n    ]}\n    Note that\n    {[\n      Filename.dirname \"/a/\"\n      = \"/\" Filename.dirname \"/a/b/\"\n      = Filename.dirname \"/a/b\" = \"/a\"\n    ]}\n    Special case:\n    {[\n      basename \"//\" = \"/\" basename \"///\" = \"/\"\n    ]}\n    {[\n      basename \"\" = \".\" basename \"\" = \".\" dirname \"\" = \".\" dirname \"\" = \".\"\n    ]} *)\nlet split_aux p =\n  let rec go p acc =\n    let dir = Filename.dirname p in\n    if dir = p then (dir, acc)\n    else\n      let new_path = Filename.basename p in\n      if Ext_string.equal new_path Filename.dir_sep then go dir acc\n        (* We could do more path simplification here\n           leave to [rel_normalized_absolute_path]\n        *)\n      else go dir (new_path :: acc)\n  in\n\n  go p []\n\n(** TODO: optimization if [from] and [to] resolve to the same path, a\n    zero-length string is returned\n\n    This function is useed in [es6-global] and [amdjs-global] format and\n    tailored for `rollup` *)\nlet rel_normalized_absolute_path ~from to_ =\n  let root1, paths1 = split_aux from in\n  let root2, paths2 = split_aux to_ in\n  if root1 <> root2 then root2\n  else\n    let rec go xss yss =\n      match (xss, yss) with\n      | x :: xs, y :: ys ->\n        if Ext_string.equal x y then go xs ys\n        else if x = Filename.current_dir_name then go xs yss\n        else if y = Filename.current_dir_name then go xss ys\n        else\n          let start =\n            Ext_list.fold_left xs Ext_string.parent_dir_lit (fun acc _ ->\n                acc // Ext_string.parent_dir_lit)\n          in\n          Ext_list.fold_left yss start (fun acc v -> acc // v)\n      | [], [] -> Ext_string.empty\n      | [], y :: ys -> Ext_list.fold_left ys y (fun acc x -> acc // x)\n      | _ :: xs, [] ->\n        Ext_list.fold_left xs Ext_string.parent_dir_lit (fun acc _ ->\n            acc // Ext_string.parent_dir_lit)\n    in\n    let v = go paths1 paths2 in\n\n    if Ext_string.is_empty v then Literals.node_current\n    else if\n      v = \".\" || v = \"..\"\n      || Ext_string.starts_with v \"./\"\n      || Ext_string.starts_with v \"../\"\n    then v\n    else \"./\" ^ v\n\n(*TODO: could be hgighly optimized later\n  {[\n    normalize_absolute_path \"/gsho/./..\";;\n\n    normalize_absolute_path \"/a/b/../c../d/e/f\";;\n\n    normalize_absolute_path \"/gsho/./..\";;\n\n    normalize_absolute_path \"/gsho/./../..\";;\n\n    normalize_absolute_path \"/a/b/c/d\";;\n\n    normalize_absolute_path \"/a/b/c/d/\";;\n\n    normalize_absolute_path \"/a/\";;\n\n    normalize_absolute_path \"/a\";;\n  ]}\n*)\n\n(** See tests in {!Ounit_path_tests} *)\nlet normalize_absolute_path x =\n  let drop_if_exist xs =\n    match xs with\n    | [] -> []\n    | _ :: xs -> xs\n  in\n  let rec normalize_list acc paths =\n    match paths with\n    | [] -> acc\n    | x :: xs ->\n      if Ext_string.equal x Ext_string.current_dir_lit then\n        normalize_list acc xs\n      else if Ext_string.equal x Ext_string.parent_dir_lit then\n        normalize_list (drop_if_exist acc) xs\n      else normalize_list (x :: acc) xs\n  in\n  let root, paths = split_aux x in\n  let rev_paths = normalize_list [] paths in\n  let rec go acc rev_paths =\n    match rev_paths with\n    | [] -> Filename.concat root acc\n    | last :: rest -> go (Filename.concat last acc) rest\n  in\n  match rev_paths with\n  | [] -> root\n  | last :: rest -> go last rest\n\nlet absolute_path cwd s =\n  let process s =\n    let s = if Filename.is_relative s then Lazy.force cwd // s else s in\n    (* Now simplify . and .. components *)\n    let rec aux s =\n      let base, dir = (Filename.basename s, Filename.dirname s) in\n      if dir = s then dir\n      else if base = Filename.current_dir_name then aux dir\n      else if base = Filename.parent_dir_name then Filename.dirname (aux dir)\n      else aux dir // base\n    in\n    aux s\n  in\n  process s\n\nlet absolute_cwd_path s = absolute_path cwd s\n\n(* let absolute cwd s =\n   match s with\n   | File x -> File (absolute_path cwd x )\n   | Dir x -> Dir (absolute_path cwd x) *)\n\nlet concat dirname filename =\n  if filename = Filename.current_dir_name then dirname\n  else if dirname = Filename.current_dir_name then filename\n  else Filename.concat dirname filename\n\nlet check_suffix_case = Ext_string.ends_with\n\n(* Input must be absolute directory *)\nlet rec find_root_filename ~cwd filenames =\n  let file_exists =\n    Ext_list.exists filenames (fun filename ->\n        Sys.file_exists (Filename.concat cwd filename))\n  in\n  if file_exists then cwd\n  else\n    let cwd' = Filename.dirname cwd in\n    if String.length cwd' < String.length cwd then\n      find_root_filename ~cwd:cwd' filenames\n    else\n      Ext_fmt.failwithf ~loc:__LOC__ \"%s not found from %s\" (List.hd filenames)\n        cwd\n\nlet find_config_dir cwd =\n  find_root_filename ~cwd [Literals.rescript_json; Literals.bsconfig_json]\n\nlet package_dir = lazy (find_config_dir (Lazy.force cwd))\n"
  },
  {
    "path": "analysis/vendor/ext/ext_path.mli",
    "content": "(* Copyright (C) 2017 Authors of ReScript\n *\n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n *\n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\ntype t\n\nval simple_convert_node_path_to_os_path : string -> string\n(** Js_output is node style, which means separator is only '/'\n\n    if the path contains 'node_modules', [node_relative_path] will discard its\n    prefix and just treat it as a library instead *)\n\nval combine : string -> string -> string\n(** [combine path1 path2] 1. add some simplifications when concatenating 2. when\n    [path2] is absolute, return [path2] *)\n\n(** {[\n      get_extension \"a.txt\" = \".txt\" get_extension \"a\" = \"\"\n    ]} *)\n\nval node_rebase_file : from:string -> to_:string -> string -> string\n\nval rel_normalized_absolute_path : from:string -> string -> string\n(** TODO: could be highly optimized if [from] and [to] resolve to the same path,\n    a zero-length string is returned Given that two paths are directory\n\n    A typical use case is\n    {[\n      Filename.concat\n        (rel_normalized_absolute_path cwd (Filename.dirname a))\n        (Filename.basename a)\n    ]} *)\n\nval normalize_absolute_path : string -> string\n\nval absolute_cwd_path : string -> string\n\nval concat : string -> string -> string\n(** [concat dirname filename] The same as {!Filename.concat} except a tiny\n    optimization for current directory simplification *)\n\nval check_suffix_case : string -> string -> bool\n\n(* It is lazy so that it will not hit errors when in script mode *)\nval package_dir : string Lazy.t\n"
  },
  {
    "path": "analysis/vendor/ext/ext_pervasives.ml",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\nexternal reraise : exn -> 'a = \"%reraise\"\n\nlet finally v ~clean:action f =\n  match f v with\n  | exception e ->\n    action v;\n    reraise e\n  | e ->\n    action v;\n    e\n\n(* let try_it f  =\n   try ignore (f ()) with _ -> () *)\n\nlet with_file_as_chan filename f =\n  finally (open_out_bin filename) ~clean:close_out f\n\nlet max_int (x : int) y = if x >= y then x else y\n\nlet min_int (x : int) y = if x < y then x else y\n\nlet max_int_option (x : int option) y =\n  match (x, y) with\n  | None, _ -> y\n  | Some _, None -> x\n  | Some x0, Some y0 -> if x0 >= y0 then x else y\n\n(* external id : 'a -> 'a = \"%identity\" *)\n\n(*\n   let hash_variant s =\n     let accu = ref 0 in\n     for i = 0 to String.length s - 1 do\n       accu := 223 * !accu + Char.code s.[i]\n     done;\n     (* reduce to 31 bits *)\n     accu := !accu land (1 lsl 31 - 1);\n     (* make it signed for 64 bits architectures *)\n     if !accu > 0x3FFFFFFF then !accu - (1 lsl 31) else !accu *)\n\n(* let todo loc =\n   failwith (loc ^ \" Not supported yet\")\n*)\n\nlet rec int_of_string_aux s acc off len =\n  if off >= len then acc\n  else\n    let d = Char.code (String.unsafe_get s off) - 48 in\n    if d >= 0 && d <= 9 then int_of_string_aux s ((10 * acc) + d) (off + 1) len\n    else -1\n(* error *)\n\nlet nat_of_string_exn (s : string) =\n  let acc = int_of_string_aux s 0 0 (String.length s) in\n  if acc < 0 then invalid_arg s else acc\n\n(** return index *)\nlet parse_nat_of_string (s : string) (cursor : int ref) =\n  let current = !cursor in\n  assert (current >= 0);\n  let acc = ref 0 in\n  let s_len = String.length s in\n  let todo = ref true in\n  let cur = ref current in\n  while !todo && !cursor < s_len do\n    let d = Char.code (String.unsafe_get s !cur) - 48 in\n    if d >= 0 && d <= 9 then (\n      acc := (10 * !acc) + d;\n      incr cur)\n    else todo := false\n  done;\n  cursor := !cur;\n  !acc\n"
  },
  {
    "path": "analysis/vendor/ext/ext_pervasives.mli",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\n(** Extension to standard library [Pervavives] module, safe to open *)\n\nexternal reraise : exn -> 'a = \"%reraise\"\n\nval finally : 'a -> clean:('a -> unit) -> ('a -> 'b) -> 'b\n\n(* val try_it : (unit -> 'a) ->  unit  *)\n\nval with_file_as_chan : string -> (out_channel -> 'a) -> 'a\n\nval max_int : int -> int -> int\n\nval min_int : int -> int -> int\n\nval max_int_option : int option -> int option -> int option\n\n(* external id : 'a -> 'a = \"%identity\" *)\n\n(** Copied from {!Btype.hash_variant}: need sync up and add test case *)\n(* val hash_variant : string -> int *)\n\n(* val todo : string -> 'a *)\n\nval nat_of_string_exn : string -> int\n\nval parse_nat_of_string : string -> int ref -> int\n"
  },
  {
    "path": "analysis/vendor/ext/ext_position.ml",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n *\n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n *\n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\ntype t = Lexing.position = {\n  pos_fname: string;\n  pos_lnum: int;\n  pos_bol: int;\n  pos_cnum: int;\n}\n\nlet offset (x : t) (y : t) =\n  {\n    x with\n    pos_lnum = x.pos_lnum + y.pos_lnum - 1;\n    pos_cnum = x.pos_cnum + y.pos_cnum;\n    pos_bol = (if y.pos_lnum = 1 then x.pos_bol else x.pos_cnum + y.pos_bol);\n  }\n\nlet print fmt (pos : t) =\n  Format.fprintf fmt \"(line %d, column %d)\" pos.pos_lnum\n    (pos.pos_cnum - pos.pos_bol)\n\nlet lexbuf_from_channel_with_fname ic fname =\n  let x = Lexing.from_function (fun buf n -> input ic buf 0 n) in\n  let pos : t =\n    {\n      pos_fname = fname;\n      pos_lnum = 1;\n      pos_bol = 0;\n      pos_cnum = 0 (* copied from zero_pos*);\n    }\n  in\n  x.lex_start_p <- pos;\n  x.lex_curr_p <- pos;\n  x\n"
  },
  {
    "path": "analysis/vendor/ext/ext_position.mli",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n *\n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n *\n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\ntype t = Lexing.position = {\n  pos_fname: string;\n  pos_lnum: int;\n  pos_bol: int;\n  pos_cnum: int;\n}\n\nval offset : t -> t -> t\n(** [offset pos newpos] return a new position here [newpos] is zero based, the\n    use case is that at position [pos], we get a string and Lexing from that\n    string, therefore, we get a [newpos] and we need rebase it on top of [pos]\n*)\n\nval lexbuf_from_channel_with_fname : in_channel -> string -> Lexing.lexbuf\n\nval print : Format.formatter -> t -> unit\n"
  },
  {
    "path": "analysis/vendor/ext/ext_pp.ml",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n *\n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n *\n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\nmodule L = struct\n  let space = \" \"\n\n  let indent_str = \"  \"\nend\n\nlet indent_length = String.length L.indent_str\n\ntype t = {\n  output_string: string -> unit;\n  output_char: char -> unit;\n  flush: unit -> unit;\n  mutable indent_level: int;\n  mutable last_new_line: bool;\n      (* only when we print newline, we print the indent *)\n}\n\nlet from_channel chan =\n  {\n    output_string = (fun s -> output_string chan s);\n    output_char = (fun c -> output_char chan c);\n    flush = (fun _ -> flush chan);\n    indent_level = 0;\n    last_new_line = false;\n  }\n\nlet from_buffer buf =\n  {\n    output_string = (fun s -> Buffer.add_string buf s);\n    output_char = (fun c -> Buffer.add_char buf c);\n    flush = (fun _ -> ());\n    indent_level = 0;\n    last_new_line = false;\n  }\n\n(* If we have [newline] in [s],\n   all indentations will be broken\n   in the future, we can detect this in [s]\n*)\nlet string t s =\n  t.output_string s;\n  t.last_new_line <- false\n\nlet newline t =\n  if not t.last_new_line then (\n    t.output_char '\\n';\n    for _ = 0 to t.indent_level - 1 do\n      t.output_string L.indent_str\n    done;\n    t.last_new_line <- true)\n\nlet at_least_two_lines t =\n  if not t.last_new_line then t.output_char '\\n';\n  t.output_char '\\n';\n  for _ = 0 to t.indent_level - 1 do\n    t.output_string L.indent_str\n  done;\n  t.last_new_line <- true\n\nlet force_newline t =\n  t.output_char '\\n';\n  for _ = 0 to t.indent_level - 1 do\n    t.output_string L.indent_str\n  done;\n  t.last_new_line <- true\n\nlet space t = string t L.space\n\nlet nspace t n = string t (String.make n ' ')\n\nlet group t i action =\n  if i = 0 then action ()\n  else\n    let old = t.indent_level in\n    t.indent_level <- t.indent_level + i;\n    Ext_pervasives.finally ~clean:(fun _ -> t.indent_level <- old) () action\n\nlet vgroup = group\n\nlet paren t action =\n  string t \"(\";\n  let v = action () in\n  string t \")\";\n  v\n\nlet brace fmt u =\n  string fmt \"{\";\n  (* break1 fmt ; *)\n  let v = u () in\n  string fmt \"}\";\n  v\n\nlet bracket fmt u =\n  string fmt \"[\";\n  let v = u () in\n  string fmt \"]\";\n  v\n\nlet brace_vgroup st n action =\n  string st \"{\";\n  let v =\n    vgroup st n (fun _ ->\n        newline st;\n        let v = action () in\n        v)\n  in\n  force_newline st;\n  string st \"}\";\n  v\n\nlet bracket_vgroup st n action =\n  string st \"[\";\n  let v =\n    vgroup st n (fun _ ->\n        newline st;\n        let v = action () in\n        v)\n  in\n  force_newline st;\n  string st \"]\";\n  v\n\nlet bracket_group st n action = group st n (fun _ -> bracket st action)\n\nlet paren_vgroup st n action =\n  string st \"(\";\n  let v =\n    group st n (fun _ ->\n        newline st;\n        let v = action () in\n        v)\n  in\n  newline st;\n  string st \")\";\n  v\n\nlet paren_group st n action = group st n (fun _ -> paren st action)\n\nlet cond_paren_group st b action =\n  if b then paren_group st 0 action else action ()\n\nlet brace_group st n action = group st n (fun _ -> brace st action)\n\n(* let indent t n =\n   t.indent_level <- t.indent_level + n *)\n\nlet flush t () = t.flush ()\n"
  },
  {
    "path": "analysis/vendor/ext/ext_pp.mli",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\ntype t\n(** A simple pretty printer\n\n    Advantage compared with [Format], [P.newline] does not screw the layout,\n    have better control when do a newline (sicne JS has ASI) Easy to tweak\n\n    - be a little smarter\n    - buffer the last line, so that we can do a smart newline, when it's really\n      safe to do so *)\n\nval indent_length : int\n\nval string : t -> string -> unit\n\nval space : t -> unit\n\nval nspace : t -> int -> unit\n\nval group : t -> int -> (unit -> 'a) -> 'a\n(** [group] will record current indentation and indent futher *)\n\nval vgroup : t -> int -> (unit -> 'a) -> 'a\n\nval paren : t -> (unit -> 'a) -> 'a\n\nval brace : t -> (unit -> 'a) -> 'a\n\nval paren_group : t -> int -> (unit -> 'a) -> 'a\n\nval cond_paren_group : t -> bool -> (unit -> 'a) -> 'a\n\nval paren_vgroup : t -> int -> (unit -> 'a) -> 'a\n\nval brace_group : t -> int -> (unit -> 'a) -> 'a\n\nval brace_vgroup : t -> int -> (unit -> 'a) -> 'a\n\nval bracket_group : t -> int -> (unit -> 'a) -> 'a\n\nval bracket_vgroup : t -> int -> (unit -> 'a) -> 'a\n\nval newline : t -> unit\n\nval at_least_two_lines : t -> unit\n\nval from_channel : out_channel -> t\n\nval from_buffer : Buffer.t -> t\n\nval flush : t -> unit -> unit\n"
  },
  {
    "path": "analysis/vendor/ext/ext_pp_scope.ml",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\ntype t = int Map_int.t Map_string.t\n\n(*\n   -- \"name\" --> int map -- stamp --> index suffix\n*)\nlet empty : t = Map_string.empty\n\nlet rec print fmt v =\n  Format.fprintf fmt \"@[<v>{\";\n  Map_string.iter v (fun k m ->\n      Format.fprintf fmt \"%s: @[%a@],@ \" k print_int_map m);\n  Format.fprintf fmt \"}@]\"\n\nand print_int_map fmt m =\n  Map_int.iter m (fun k v -> Format.fprintf fmt \"%d - %d\" k v)\n\nlet add_ident ~mangled:name (stamp : int) (cxt : t) : int * t =\n  match Map_string.find_opt cxt name with\n  | None -> (0, Map_string.add cxt name (Map_int.add Map_int.empty stamp 0))\n  | Some imap -> (\n    match Map_int.find_opt imap stamp with\n    | None ->\n      let v = Map_int.cardinal imap in\n      (v, Map_string.add cxt name (Map_int.add imap stamp v))\n    | Some i -> (i, cxt))\n\n(** same as {!Js_dump.ident} except it generates a string instead of doing the\n    printing For fast/debug mode, we can generate the name as\n    [Printf.sprintf \"%s$%d\" name id.stamp] which is not relevant to the context\n\n    Attention:\n    - $$Array.length, due to the fact that global module is always printed in\n      the begining(via imports), so you get a gurantee, (global modules will not\n      be printed as [List$1])\n\n    However, this means we loose the ability of dynamic loading, is it a big\n    deal? we can fix this by a scanning first, since we already know which\n    modules are global\n\n    check [test/test_global_print.ml] for regression\n    - collision It is obvious that for the same identifier that they print the\n      same name.\n\n    It also needs to be hold that for two different identifiers, they print\n    different names:\n    - This happens when they escape to the same name and share the same stamp So\n      the key has to be mangled name + stamp otherwise, if two identifier\n      happens to have same mangled name, if we use the original name as key,\n      they can have same id (like 0). then it caused a collision\n\n    Here we can guarantee that if mangled name and stamp are not all the same\n    they can not have a collision *)\nlet str_of_ident (cxt : t) (id : Ident.t) : string * t =\n  if Ext_ident.is_js id then (* reserved by compiler *)\n    (id.name, cxt)\n  else\n    let id_name = id.name in\n    let name = Ext_ident.convert id_name in\n    let i, new_cxt = add_ident ~mangled:name id.stamp cxt in\n    ((if i == 0 then name else Printf.sprintf \"%s$%d\" name i), new_cxt)\n\nlet ident (cxt : t) f (id : Ident.t) : t =\n  let str, cxt = str_of_ident cxt id in\n  Ext_pp.string f str;\n  cxt\n\nlet merge (cxt : t) (set : Set_ident.t) =\n  Set_ident.fold set cxt (fun ident acc ->\n      snd (add_ident ~mangled:(Ext_ident.convert ident.name) ident.stamp acc))\n\n(* Assume that all idents are already in [scope]\n   so both [param/0] and [param/1] are in idents, we don't need\n   update twice,  once is enough\n*)\nlet sub_scope (scope : t) (idents : Set_ident.t) : t =\n  Set_ident.fold idents empty (fun {name} acc ->\n      let mangled = Ext_ident.convert name in\n      match Map_string.find_exn scope mangled with\n      | exception Not_found -> assert false\n      | imap ->\n        if Map_string.mem acc mangled then acc\n        else Map_string.add acc mangled imap)\n"
  },
  {
    "path": "analysis/vendor/ext/ext_pp_scope.mli",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\n(** Scope type to improve identifier name printing Defines scope type [t], so\n    that the pretty printer would print more beautiful code: print [identifer]\n    instead of [identifier$1234] when it can *)\n\ntype t\n\nval empty : t\n\nval print : Format.formatter -> t -> unit\n\nval sub_scope : t -> Set_ident.t -> t\n\nval merge : t -> Set_ident.t -> t\n\nval str_of_ident : t -> Ident.t -> string * t\n\nval ident : t -> Ext_pp.t -> Ident.t -> t\n"
  },
  {
    "path": "analysis/vendor/ext/ext_ref.ml",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\nlet non_exn_protect r v body =\n  let old = !r in\n  r := v;\n  let res = body () in\n  r := old;\n  res\n\nlet protect r v body =\n  let old = !r in\n  try\n    r := v;\n    let res = body () in\n    r := old;\n    res\n  with x ->\n    r := old;\n    raise x\n\nlet non_exn_protect2 r1 r2 v1 v2 body =\n  let old1 = !r1 in\n  let old2 = !r2 in\n  r1 := v1;\n  r2 := v2;\n  let res = body () in\n  r1 := old1;\n  r2 := old2;\n  res\n\nlet protect2 r1 r2 v1 v2 body =\n  let old1 = !r1 in\n  let old2 = !r2 in\n  try\n    r1 := v1;\n    r2 := v2;\n    let res = body () in\n    r1 := old1;\n    r2 := old2;\n    res\n  with x ->\n    r1 := old1;\n    r2 := old2;\n    raise x\n\nlet protect_list rvs body =\n  let olds = Ext_list.map rvs (fun (x, _) -> !x) in\n  let () = List.iter (fun (x, y) -> x := y) rvs in\n  try\n    let res = body () in\n    List.iter2 (fun (x, _) old -> x := old) rvs olds;\n    res\n  with e ->\n    List.iter2 (fun (x, _) old -> x := old) rvs olds;\n    raise e\n"
  },
  {
    "path": "analysis/vendor/ext/ext_ref.mli",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\n(** [non_exn_protect ref value f] assusme [f()] would not raise *)\n\nval non_exn_protect : 'a ref -> 'a -> (unit -> 'b) -> 'b\n\nval protect : 'a ref -> 'a -> (unit -> 'b) -> 'b\n\nval protect2 : 'a ref -> 'b ref -> 'a -> 'b -> (unit -> 'c) -> 'c\n\nval non_exn_protect2 : 'a ref -> 'b ref -> 'a -> 'b -> (unit -> 'c) -> 'c\n(** [non_exn_protect2 refa refb va vb f ] assume [f ()] would not raise *)\n\nval protect_list : ('a ref * 'a) list -> (unit -> 'b) -> 'b\n"
  },
  {
    "path": "analysis/vendor/ext/ext_scc.ml",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\ntype node = Vec_int.t\n\n(** [int] as data for this algorithm Pros: 1. Easy to eoncode algorithm\n    (especially given that the capacity of node is known) 2. Algorithms itself\n    are much more efficient 3. Node comparison semantics is clear 4. Easy to\n    print output Cons: 1. post processing input data *)\nlet min_int (x : int) y = if x < y then x else y\n\nlet graph e =\n  let index = ref 0 in\n  let s = Vec_int.empty () in\n\n  let output = Int_vec_vec.empty () in\n  (* collect output *)\n  let node_numes = Array.length e in\n\n  let on_stack_array = Array.make node_numes false in\n  let index_array = Array.make node_numes (-1) in\n  let lowlink_array = Array.make node_numes (-1) in\n\n  let rec scc v_data =\n    let new_index = !index + 1 in\n    index := new_index;\n    Vec_int.push s v_data;\n\n    index_array.(v_data) <- new_index;\n    lowlink_array.(v_data) <- new_index;\n    on_stack_array.(v_data) <- true;\n    let v = e.(v_data) in\n    Vec_int.iter v (fun w_data ->\n        if Array.unsafe_get index_array w_data < 0 then (\n          (* not processed *)\n          scc w_data;\n          Array.unsafe_set lowlink_array v_data\n            (min_int\n               (Array.unsafe_get lowlink_array v_data)\n               (Array.unsafe_get lowlink_array w_data)))\n        else if Array.unsafe_get on_stack_array w_data then\n          (* successor is in stack and hence in current scc *)\n          Array.unsafe_set lowlink_array v_data\n            (min_int\n               (Array.unsafe_get lowlink_array v_data)\n               (Array.unsafe_get lowlink_array w_data)));\n\n    if\n      Array.unsafe_get lowlink_array v_data\n      = Array.unsafe_get index_array v_data\n    then (\n      (* start a new scc *)\n      let s_len = Vec_int.length s in\n      let last_index = ref (s_len - 1) in\n      let u = ref (Vec_int.unsafe_get s !last_index) in\n      while !u <> v_data do\n        Array.unsafe_set on_stack_array !u false;\n        last_index := !last_index - 1;\n        u := Vec_int.unsafe_get s !last_index\n      done;\n      on_stack_array.(v_data) <- false;\n      (* necessary *)\n      Int_vec_vec.push output\n        (Vec_int.get_and_delete_range s !last_index (s_len - !last_index)))\n  in\n  for i = 0 to node_numes - 1 do\n    if Array.unsafe_get index_array i < 0 then scc i\n  done;\n  output\n\nlet graph_check v =\n  let v = graph v in\n  ( Int_vec_vec.length v,\n    Int_vec_vec.fold_left (fun acc x -> Vec_int.length x :: acc) [] v )\n"
  },
  {
    "path": "analysis/vendor/ext/ext_scc.mli",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\ntype node = Vec_int.t\n\nval graph : Vec_int.t array -> Int_vec_vec.t\n(** Assume input is int array with offset from 0 Typical input\n    {[\n      [|\n        [ 1 ; 2 ]; // 0 -> 1,  0 -> 2\n                     [ 1 ];   // 0 -> 1\n          [ 2 ]  // 0 -> 2\n      |]\n    ]}\n    Note that we can tell how many nodes by calculating [Array.length] of the\n    input *)\n\nval graph_check : node array -> int * int list\n(** Used for unit test *)\n"
  },
  {
    "path": "analysis/vendor/ext/ext_spec.ml",
    "content": "(* Copyright (C) 2020- Hongbo Zhang, Authors of ReScript\n *\n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n *\n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\n(* A small module which is also used by {!Bsb_helper} *)\ntype 'a t = (string * 'a * string) array\n\nlet rec unsafe_loop i (l : 'a t) n x =\n  if i = n then None\n  else\n    let y1, y2, _ = Array.unsafe_get l i in\n    if y1 = x then Some y2 else unsafe_loop (i + 1) l n x\n\nlet assoc3 (l : 'a t) (x : string) : 'a option =\n  let n = Array.length l in\n  unsafe_loop 0 l n x\n"
  },
  {
    "path": "analysis/vendor/ext/ext_spec.mli",
    "content": "type 'a t = (string * 'a * string) array\n\nval assoc3 : 'a t -> string -> 'a option\n"
  },
  {
    "path": "analysis/vendor/ext/ext_string.ml",
    "content": "(* Copyright (C) 2015 - 2016 Bloomberg Finance L.P.\n * Copyright (C) 2017 - Hongbo Zhang, Authors of ReScript\n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\n\n\n\n\n\n\n(*\n   {[ split \" test_unsafe_obj_ffi_ppx.cmi\" ~keep_empty:false ' ']}\n*)\nlet split_by ?(keep_empty=false) is_delim str =\n  let len = String.length str in\n  let rec loop acc last_pos pos =\n    if pos = -1 then\n      if last_pos = 0 && not keep_empty then\n\n        acc\n      else \n        String.sub str 0 last_pos :: acc\n    else\n    if is_delim str.[pos] then\n      let new_len = (last_pos - pos - 1) in\n      if new_len <> 0 || keep_empty then \n        let v = String.sub str (pos + 1) new_len in\n        loop ( v :: acc)\n          pos (pos - 1)\n      else loop acc pos (pos - 1)\n    else loop acc last_pos (pos - 1)\n  in\n  loop [] len (len - 1)\n\nlet trim s = \n  let i = ref 0  in\n  let j = String.length s in \n  while !i < j &&  \n        let u = String.unsafe_get s !i in \n        u = '\\t' || u = '\\n' || u = ' ' \n  do \n    incr i;\n  done;\n  let k = ref (j - 1)  in \n  while !k >= !i && \n        let u = String.unsafe_get s !k in \n        u = '\\t' || u = '\\n' || u = ' ' do \n    decr k ;\n  done;\n  String.sub s !i (!k - !i + 1)\n\nlet split ?keep_empty  str on = \n  if str = \"\" then [] else \n    split_by ?keep_empty (fun x -> (x : char) = on) str  ;;\n\nlet quick_split_by_ws str : string list = \n  split_by ~keep_empty:false (fun x -> x = '\\t' || x = '\\n' || x = ' ') str\n\nlet starts_with s beg = \n  let beg_len = String.length beg in\n  let s_len = String.length s in\n  beg_len <=  s_len &&\n  (let i = ref 0 in\n   while !i <  beg_len \n         && String.unsafe_get s !i =\n            String.unsafe_get beg !i do \n     incr i \n   done;\n   !i = beg_len\n  )\n\nlet rec ends_aux s end_ j k = \n  if k < 0 then (j + 1)\n  else if String.unsafe_get s j = String.unsafe_get end_ k then \n    ends_aux s end_ (j - 1) (k - 1)\n  else  -1   \n\n(** return an index which is minus when [s] does not \n    end with [beg]\n*)\nlet ends_with_index s end_ : int = \n  let s_finish = String.length s - 1 in\n  let s_beg = String.length end_ - 1 in\n  if s_beg > s_finish then -1\n  else\n    ends_aux s end_ s_finish s_beg\n\nlet ends_with s end_ = ends_with_index s end_ >= 0 \n\nlet ends_with_then_chop s beg = \n  let i =  ends_with_index s beg in \n  if i >= 0 then Some (String.sub s 0 i) \n  else None\n\n(* let check_suffix_case = ends_with  *)\n(* let check_suffix_case_then_chop = ends_with_then_chop *)\n\n(* let check_any_suffix_case s suffixes = \n   Ext_list.exists suffixes (fun x -> check_suffix_case s x)  *)\n\n(* let check_any_suffix_case_then_chop s suffixes = \n   let rec aux suffixes = \n    match suffixes with \n    | [] -> None \n    | x::xs -> \n      let id = ends_with_index s x in \n      if id >= 0 then Some (String.sub s 0 id)\n      else aux xs in \n   aux suffixes     *)\n\n\n\n\n(* it is unsafe to expose such API as unsafe since \n   user can provide bad input range \n\n*)\nlet rec unsafe_for_all_range s ~start ~finish p =     \n  start > finish ||\n  p (String.unsafe_get s start) && \n  unsafe_for_all_range s ~start:(start + 1) ~finish p\n\nlet for_all_from s start  p = \n  let len = String.length s in \n  if start < 0  then invalid_arg \"Ext_string.for_all_from\"\n  else unsafe_for_all_range s ~start ~finish:(len - 1) p \n\n\nlet for_all s (p : char -> bool)  =   \n  unsafe_for_all_range s ~start:0  ~finish:(String.length s - 1) p \n\nlet is_empty s = String.length s = 0\n\n\nlet repeat n s  =\n  let len = String.length s in\n  let res = Bytes.create(n * len) in\n  for i = 0 to pred n do\n    String.blit s 0 res (i * len) len\n  done;\n  Bytes.to_string res\n\n\n\n\nlet unsafe_is_sub ~sub i s j ~len =\n  let rec check k =\n    if k = len\n    then true\n    else \n      String.unsafe_get sub (i+k) = \n      String.unsafe_get s (j+k) && check (k+1)\n  in\n  j+len <= String.length s && check 0\n\n\n\nlet find ?(start=0) ~sub s =\n  let exception Local_exit in\n  let n = String.length sub in\n  let s_len = String.length s in \n  let i = ref start in  \n  try\n    while !i + n <= s_len do\n      if unsafe_is_sub ~sub 0 s !i ~len:n then\n        raise_notrace Local_exit;\n      incr i\n    done;\n    -1\n  with Local_exit ->\n    !i\n\nlet contain_substring s sub = \n  find s ~sub >= 0 \n\n(** TODO: optimize \n    avoid nonterminating when string is empty \n*)\nlet non_overlap_count ~sub s = \n  let sub_len = String.length sub in \n  let rec aux  acc off = \n    let i = find ~start:off ~sub s  in \n    if i < 0 then acc \n    else aux (acc + 1) (i + sub_len) in\n  if String.length sub = 0 then invalid_arg \"Ext_string.non_overlap_count\"\n  else aux 0 0  \n\n\nlet rfind ~sub s =\n  let exception Local_exit in   \n  let n = String.length sub in\n  let i = ref (String.length s - n) in\n  try\n    while !i >= 0 do\n      if unsafe_is_sub ~sub 0 s !i ~len:n then \n        raise_notrace Local_exit;\n      decr i\n    done;\n    -1\n  with Local_exit ->\n    !i\n\nlet tail_from s x = \n  let len = String.length s  in \n  if  x > len then invalid_arg (\"Ext_string.tail_from \" ^s ^ \" : \"^ string_of_int x )\n  else String.sub s x (len - x)\n\nlet equal (x : string) y  = x = y\n\n(* let rec index_rec s lim i c =\n   if i >= lim then -1 else\n   if String.unsafe_get s i = c then i \n   else index_rec s lim (i + 1) c *)\n\n\n\nlet rec index_rec_count s lim i c count =\n  if i >= lim then -1 else\n  if String.unsafe_get s i = c then \n    if count = 1 then i \n    else index_rec_count s lim (i + 1) c (count - 1)\n  else index_rec_count s lim (i + 1) c count\n\nlet index_count s i c count =     \n  let lim = String.length s in \n  if i < 0 || i >= lim || count < 1 then \n    invalid_arg (\"index_count: ( \" ^string_of_int i ^ \",\" ^string_of_int count ^ \")\" );\n  index_rec_count s lim i c count \n\n(* let index_next s i c =   \n   index_count s i c 1  *)\n\n(* let extract_until s cursor c =       \n   let len = String.length s in   \n   let start = !cursor in \n   if start < 0 || start >= len then (\n    cursor := -1;\n    \"\"\n    )\n   else \n    let i = index_rec s len start c in   \n    let finish = \n      if i < 0 then (      \n        cursor := -1 ;\n        len \n      )\n      else (\n        cursor := i + 1;\n        i \n      ) in \n    String.sub s start (finish - start) *)\n\nlet rec rindex_rec s i c =\n  if i < 0 then i else\n  if String.unsafe_get s i = c then i else rindex_rec s (i - 1) c;;\n\nlet rec rindex_rec_opt s i c =\n  if i < 0 then None else\n  if String.unsafe_get s i = c then Some i else rindex_rec_opt s (i - 1) c;;\n\nlet rindex_neg s c = \n  rindex_rec s (String.length s - 1) c;;\n\nlet rindex_opt s c = \n  rindex_rec_opt s (String.length s - 1) c;;\n\n\n(** TODO: can be improved to return a positive integer instead *)\nlet rec unsafe_no_char x ch i  last_idx = \n  i > last_idx  || \n  (String.unsafe_get x i <> ch && unsafe_no_char x ch (i + 1)  last_idx)\n\nlet rec unsafe_no_char_idx x ch i last_idx = \n  if i > last_idx  then -1 \n  else \n  if String.unsafe_get x i <> ch then \n    unsafe_no_char_idx x ch (i + 1)  last_idx\n  else i\n\nlet no_char x ch i len  : bool =\n  let str_len = String.length x in \n  if i < 0 || i >= str_len || len >= str_len then invalid_arg \"Ext_string.no_char\"   \n  else unsafe_no_char x ch i len \n\n\nlet no_slash x = \n  unsafe_no_char x '/' 0 (String.length x - 1)\n\nlet no_slash_idx x = \n  unsafe_no_char_idx x '/' 0 (String.length x - 1)\n\nlet no_slash_idx_from x from = \n  let last_idx = String.length x - 1  in \n  assert (from >= 0); \n  unsafe_no_char_idx x '/' from last_idx\n\nlet replace_slash_backward (x : string ) = \n  let len = String.length x in \n  if unsafe_no_char x '/' 0  (len - 1) then x \n  else \n    String.map (function \n        | '/' -> '\\\\'\n        | x -> x ) x \n\nlet replace_backward_slash (x : string)=\n  let len = String.length x in\n  if unsafe_no_char x '\\\\' 0  (len -1) then x \n  else  \n    String.map (function \n        |'\\\\'-> '/'\n        | x -> x) x\n\nlet empty = \"\"\n\n#ifdef BROWSER \nlet compare = Bs_hash_stubs.string_length_based_compare\n#else\nexternal compare : string -> string -> int = \"caml_string_length_based_compare\" [@@noalloc];;    \n#endif\nlet single_space = \" \"\nlet single_colon = \":\"\n\nlet concat_array sep (s : string array) =   \n  let s_len = Array.length s in \n  match s_len with \n  | 0 -> empty \n  | 1 -> Array.unsafe_get s 0\n  | _ ->     \n    let sep_len = String.length sep in \n    let len = ref 0 in \n    for i = 0 to  s_len - 1 do \n      len := !len + String.length (Array.unsafe_get s i)\n    done;\n    let target = \n      Bytes.create \n        (!len + (s_len - 1) * sep_len ) in    \n    let hd = (Array.unsafe_get s 0) in     \n    let hd_len = String.length hd in \n    String.unsafe_blit hd  0  target 0 hd_len;   \n    let current_offset = ref hd_len in     \n    for i = 1 to s_len - 1 do \n      String.unsafe_blit sep 0 target  !current_offset sep_len;\n      let cur = Array.unsafe_get s i in \n      let cur_len = String.length cur in     \n      let new_off_set = (!current_offset + sep_len ) in\n      String.unsafe_blit cur 0 target new_off_set cur_len; \n      current_offset := \n        new_off_set + cur_len ; \n    done;\n    Bytes.unsafe_to_string target   \n\nlet concat3 a b c = \n  let a_len = String.length a in \n  let b_len = String.length b in \n  let c_len = String.length c in \n  let len = a_len + b_len + c_len in \n  let target = Bytes.create len in \n  String.unsafe_blit a 0 target 0 a_len ; \n  String.unsafe_blit b 0 target a_len b_len;\n  String.unsafe_blit c 0 target (a_len + b_len) c_len;\n  Bytes.unsafe_to_string target\n\nlet concat4 a b c d =\n  let a_len = String.length a in \n  let b_len = String.length b in \n  let c_len = String.length c in \n  let d_len = String.length d in \n  let len = a_len + b_len + c_len + d_len in \n\n  let target = Bytes.create len in \n  String.unsafe_blit a 0 target 0 a_len ; \n  String.unsafe_blit b 0 target a_len b_len;\n  String.unsafe_blit c 0 target (a_len + b_len) c_len;\n  String.unsafe_blit d 0 target (a_len + b_len + c_len) d_len;\n  Bytes.unsafe_to_string target\n\n\nlet concat5 a b c d e =\n  let a_len = String.length a in \n  let b_len = String.length b in \n  let c_len = String.length c in \n  let d_len = String.length d in \n  let e_len = String.length e in \n  let len = a_len + b_len + c_len + d_len + e_len in \n\n  let target = Bytes.create len in \n  String.unsafe_blit a 0 target 0 a_len ; \n  String.unsafe_blit b 0 target a_len b_len;\n  String.unsafe_blit c 0 target (a_len + b_len) c_len;\n  String.unsafe_blit d 0 target (a_len + b_len + c_len) d_len;\n  String.unsafe_blit e 0 target (a_len + b_len + c_len + d_len) e_len;\n  Bytes.unsafe_to_string target\n\n\n\nlet inter2 a b = \n  concat3 a single_space b \n\n\nlet inter3 a b c = \n  concat5 a  single_space  b  single_space  c \n\n\n\n\n\nlet inter4 a b c d =\n  concat_array single_space [| a; b ; c; d|]\n\n\nlet parent_dir_lit = \"..\"    \nlet current_dir_lit = \".\"\n\n\n(* reference {!Bytes.unppercase} *)\nlet capitalize_ascii (s : string) : string = \n  if String.length s = 0 then s \n  else \n    begin\n      let c = String.unsafe_get s 0 in \n      if (c >= 'a' && c <= 'z')\n      || (c >= '\\224' && c <= '\\246')\n      || (c >= '\\248' && c <= '\\254') then \n        let uc = Char.unsafe_chr (Char.code c - 32) in \n        let bytes = Bytes.of_string s in\n        Bytes.unsafe_set bytes 0 uc;\n        Bytes.unsafe_to_string bytes \n      else s \n    end\n\nlet capitalize_sub (s : string) len : string = \n  let slen = String.length s in \n  if  len < 0 || len > slen then invalid_arg \"Ext_string.capitalize_sub\"\n  else \n  if len = 0 then \"\"\n  else \n    let bytes = Bytes.create len in \n    let uc = \n      let c = String.unsafe_get s 0 in \n      if (c >= 'a' && c <= 'z')\n      || (c >= '\\224' && c <= '\\246')\n      || (c >= '\\248' && c <= '\\254') then \n        Char.unsafe_chr (Char.code c - 32) else c in \n    Bytes.unsafe_set bytes 0 uc;\n    for i = 1 to len - 1 do \n      Bytes.unsafe_set bytes i (String.unsafe_get s i)\n    done ;\n    Bytes.unsafe_to_string bytes \n\n\n\nlet uncapitalize_ascii =\n  String.uncapitalize_ascii\n\nlet lowercase_ascii = String.lowercase_ascii\n\nexternal (.![]) : string -> int -> int = \"%string_unsafe_get\"\n\nlet get_int_1_unsafe (x : string) off : int = \n  x.![off]\n\nlet get_int_2_unsafe (x : string) off : int =   \n  x.![off] lor   \n  x.![off+1] lsl 8\n\nlet get_int_3_unsafe (x : string) off : int = \n  x.![off] lor   \n  x.![off+1] lsl 8  lor \n  x.![off+2] lsl 16\n\n\nlet get_int_4_unsafe (x : string) off : int =     \n  x.![off] lor   \n  x.![off+1] lsl 8  lor \n  x.![off+2] lsl 16 lor\n  x.![off+3] lsl 24 \n\nlet get_1_2_3_4 (x : string) ~off len : int =  \n  if len = 1 then get_int_1_unsafe x off \n  else if len = 2 then get_int_2_unsafe x off \n  else if len = 3 then get_int_3_unsafe x off \n  else if len = 4 then get_int_4_unsafe x off \n  else assert false\n\nlet unsafe_sub  x offs len =\n  let b = Bytes.create len in \n  Ext_bytes.unsafe_blit_string x offs b 0 len;\n  (Bytes.unsafe_to_string b)\n\nlet is_valid_hash_number (x:string) = \n  let len = String.length x in \n  len > 0 && (\n    let a = x.![0] in \n    a <= 57 &&\n    (if len > 1 then \n       a > 48 && \n       for_all_from x 1 (function '0' .. '9' -> true | _ -> false)\n     else\n       a >= 48 )\n  ) \n\n\nlet hash_number_as_i32_exn \n    ( x : string) : int32 = \n  Int32.of_string x    \n\n\nlet first_marshal_char (x : string) = \n    x <> \"\"   &&\n    ( String.unsafe_get x  0 = '\\132')\n"
  },
  {
    "path": "analysis/vendor/ext/ext_string.mli",
    "content": "(* Copyright (C) 2015 - 2016 Bloomberg Finance L.P.\n * Copyright (C) 2017 - Hongbo Zhang, Authors of ReScript\n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\n\n\n\n\n\n\n\n(** Extension to the standard library [String] module, fixed some bugs like\n    avoiding locale sensitivity *) \n\n(** default is false *)    \nval split_by : ?keep_empty:bool -> (char -> bool) -> string -> string list\n\n\n(** remove whitespace letters ('\\t', '\\n', ' ') on both side*)\nval trim : string -> string \n\n\n(** default is false *)\nval split : ?keep_empty:bool -> string -> char -> string list\n\n(** split by space chars for quick scripting *)\nval quick_split_by_ws : string -> string list \n\n\n\nval starts_with : string -> string -> bool\n\n(**\n   return [-1] when not found, the returned index is useful \n   see [ends_with_then_chop]\n*)\nval ends_with_index : string -> string -> int\n\nval ends_with : string -> string -> bool\n\n(**\n   [ends_with_then_chop name ext]\n   @example:\n   {[\n     ends_with_then_chop \"a.cmj\" \".cmj\"\n       \"a\"\n   ]}\n   This is useful in controlled or file case sensitve system\n*)\nval ends_with_then_chop : string -> string -> string option\n\n\n\n\n(**\n   [for_all_from  s start p]\n   if [start] is negative, it raises,\n   if [start] is too large, it returns true\n*)\nval for_all_from:\n  string -> \n  int -> \n  (char -> bool) -> \n  bool \n\nval for_all : \n  string -> \n  (char -> bool) -> \n  bool\n\nval is_empty : string -> bool\n\nval repeat : int -> string -> string \n\nval equal : string -> string -> bool\n\n(**\n   [extract_until s cursor sep]\n   When [sep] not found, the cursor is updated to -1,\n   otherwise cursor is increased to 1 + [sep_position]\n   User can not determine whether it is found or not by\n   telling the return string is empty since \n   \"\\n\\n\" would result in an empty string too.\n*)\n(* val extract_until:\n   string -> \n   int ref -> (* cursor to be updated *)\n   char -> \n   string *)\n\nval index_count:  \n  string -> \n  int ->\n  char -> \n  int -> \n  int \n\n(* val index_next :\n   string -> \n   int ->\n   char -> \n   int  *)\n\n\n(**\n   [find ~start ~sub s]\n   returns [-1] if not found\n*)\nval find : ?start:int -> sub:string -> string -> int\n\nval contain_substring : string -> string -> bool \n\nval non_overlap_count : sub:string -> string -> int \n\nval rfind : sub:string -> string -> int\n\n(** [tail_from s 1]\n    return a substring from offset 1 (inclusive)\n*)\nval tail_from : string -> int -> string\n\n\n(** returns negative number if not found *)\nval rindex_neg : string -> char -> int \n\nval rindex_opt : string -> char -> int option\n\n\nval no_char : string -> char -> int -> int -> bool \n\n\nval no_slash : string -> bool \n\n(** return negative means no slash, otherwise [i] means the place for first slash *)\nval no_slash_idx : string -> int \n\nval no_slash_idx_from : string -> int -> int \n\n(** if no conversion happens, reference equality holds *)\nval replace_slash_backward : string -> string \n\n(** if no conversion happens, reference equality holds *)\nval replace_backward_slash : string -> string \n\nval empty : string \n\n#ifdef BROWSER \nval compare :  string -> string -> int\n#else\nexternal compare : string -> string -> int = \"caml_string_length_based_compare\" [@@noalloc];;  \n#endif  \nval single_space : string\n\nval concat3 : string -> string -> string -> string \nval concat4 : string -> string -> string -> string -> string \nval concat5 : string -> string -> string -> string -> string -> string  \nval inter2 : string -> string -> string\nval inter3 : string -> string -> string -> string \nval inter4 : string -> string -> string -> string -> string\nval concat_array : string -> string array -> string \n\nval single_colon : string \n\nval parent_dir_lit : string\nval current_dir_lit : string\n\nval capitalize_ascii : string -> string\n\nval capitalize_sub:\n  string -> \n  int -> \n  string\n\nval uncapitalize_ascii : string -> string\n\nval lowercase_ascii : string -> string \n\n(** Play parity to {!Ext_buffer.add_int_1} *)\n(* val get_int_1 : string -> int -> int \n   val get_int_2 : string -> int -> int \n   val get_int_3 : string -> int -> int \n   val get_int_4 : string -> int -> int  *)\n\nval get_1_2_3_4 : \n  string -> \n  off:int ->  \n  int -> \n  int \n\nval unsafe_sub :   \n  string -> \n  int -> \n  int -> \n  string\n\nval is_valid_hash_number:\n  string -> \n  bool\n\nval hash_number_as_i32_exn:\n  string ->\n  int32\n\nval first_marshal_char:  \n  string -> \n  bool\n"
  },
  {
    "path": "analysis/vendor/ext/ext_string_array.ml",
    "content": "(* Copyright (C) 2020 - Present Hongbo Zhang, Authors of ReScript\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\n(* Invariant: the same as encoding Map_string.compare_key  *)\nlet cmp = Ext_string.compare\n\nlet rec binary_search_aux (arr : string array) (lo : int) (hi : int)\n    (key : string) : _ option =\n  let mid = (lo + hi) / 2 in\n  let mid_val = Array.unsafe_get arr mid in\n  let c = cmp key mid_val in\n  if c = 0 then Some mid\n  else if c < 0 then\n    (*  a[lo] =< key < a[mid] <= a[hi] *)\n    if hi = mid then\n      let lo_val = Array.unsafe_get arr lo in\n      if lo_val = key then Some lo else None\n    else binary_search_aux arr lo mid key\n  else if\n    (*  a[lo] =< a[mid] < key <= a[hi] *)\n    lo = mid\n  then\n    let hi_val = Array.unsafe_get arr hi in\n    if hi_val = key then Some hi else None\n  else binary_search_aux arr mid hi key\n\nlet find_sorted sorted key : int option =\n  let len = Array.length sorted in\n  if len = 0 then None\n  else\n    let lo = Array.unsafe_get sorted 0 in\n    let c = cmp key lo in\n    if c < 0 then None\n    else\n      let hi = Array.unsafe_get sorted (len - 1) in\n      let c2 = cmp key hi in\n      if c2 > 0 then None else binary_search_aux sorted 0 (len - 1) key\n\nlet rec binary_search_assoc (arr : (string * _) array) (lo : int) (hi : int)\n    (key : string) : _ option =\n  let mid = (lo + hi) / 2 in\n  let mid_val = Array.unsafe_get arr mid in\n  let c = cmp key (fst mid_val) in\n  if c = 0 then Some (snd mid_val)\n  else if c < 0 then\n    (*  a[lo] =< key < a[mid] <= a[hi] *)\n    if hi = mid then\n      let lo_val = Array.unsafe_get arr lo in\n      if fst lo_val = key then Some (snd lo_val) else None\n    else binary_search_assoc arr lo mid key\n  else if\n    (*  a[lo] =< a[mid] < key <= a[hi] *)\n    lo = mid\n  then\n    let hi_val = Array.unsafe_get arr hi in\n    if fst hi_val = key then Some (snd hi_val) else None\n  else binary_search_assoc arr mid hi key\n\nlet find_sorted_assoc (type a) (sorted : (string * a) array) (key : string) :\n    a option =\n  let len = Array.length sorted in\n  if len = 0 then None\n  else\n    let lo = Array.unsafe_get sorted 0 in\n    let c = cmp key (fst lo) in\n    if c < 0 then None\n    else\n      let hi = Array.unsafe_get sorted (len - 1) in\n      let c2 = cmp key (fst hi) in\n      if c2 > 0 then None else binary_search_assoc sorted 0 (len - 1) key\n"
  },
  {
    "path": "analysis/vendor/ext/ext_string_array.mli",
    "content": "(* Copyright (C) 2020 - Present Authors of ReScript\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\nval cmp : string -> string -> int\n\nval find_sorted : string array -> string -> int option\n\nval find_sorted_assoc : (string * 'a) array -> string -> 'a option\n"
  },
  {
    "path": "analysis/vendor/ext/ext_sys.ml",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\n(** TODO: not exported yet, wait for Windows Fix*)\n#ifdef BROWSER\nlet is_directory_no_exn f =\n  try Sys.is_directory f with _ -> false  \n#else\nexternal is_directory_no_exn : string -> bool = \"caml_sys_is_directory_no_exn\"\n#endif\n\n\nlet is_windows_or_cygwin = Sys.win32 || Sys.cygwin\n\n\n"
  },
  {
    "path": "analysis/vendor/ext/ext_sys.mli",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\nval is_directory_no_exn : string -> bool\n\nval is_windows_or_cygwin : bool\n"
  },
  {
    "path": "analysis/vendor/ext/ext_topsort.ml",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\ntype edges = {id: int; deps: Vec_int.t}\n\nmodule Edge_vec = Vec.Make (struct\n  type t = edges\n\n  let null = {id = 0; deps = Vec_int.empty ()}\nend)\n\ntype t = Edge_vec.t\n\n(** This graph is different the graph used in [scc] graph, since we need dynamic\n    shrink the graph, so for each vector the first node is it self , it will\n    also change the input.\n\n    TODO: error handling (cycle handling) and defensive bad input (missing edges\n    etc) *)\n\nlet layered_dfs (g : t) =\n  let queue = Queue.create () in\n  let rec aux g =\n    let new_entries =\n      Edge_vec.inplace_filter_with\n        (fun (x : edges) -> not (Vec_int.is_empty x.deps))\n        ~cb_no:(fun x acc -> Set_int.add acc x.id)\n        Set_int.empty g\n    in\n    if not (Set_int.is_empty new_entries) then (\n      Queue.push new_entries queue;\n      Edge_vec.iter g (fun edges ->\n          Vec_int.inplace_filter\n            (fun x -> not (Set_int.mem new_entries x))\n            edges.deps);\n      aux g)\n  in\n  aux g;\n  queue\n"
  },
  {
    "path": "analysis/vendor/ext/ext_topsort.mli",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\ntype edges = {id: int; deps: Vec_int.t}\n\nmodule Edge_vec : Vec_gen.S with type elt = edges\n\ntype t = Edge_vec.t\n\nval layered_dfs : t -> Set_int.t Queue.t\n(** the input will be modified , *)\n"
  },
  {
    "path": "analysis/vendor/ext/ext_utf8.ml",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n *\n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n *\n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\ntype byte = Single of int | Cont of int | Leading of int * int | Invalid\n\n(** [classify chr] returns the {!byte} corresponding to [chr] *)\nlet classify chr =\n  let c = int_of_char chr in\n  (* Classify byte according to leftmost 0 bit *)\n  if c land 0b1000_0000 = 0 then Single c\n  else if\n    (* c 0b0____*)\n    c land 0b0100_0000 = 0\n  then Cont (c land 0b0011_1111)\n  else if\n    (* c 0b10___*)\n    c land 0b0010_0000 = 0\n  then Leading (1, c land 0b0001_1111)\n  else if\n    (* c 0b110__*)\n    c land 0b0001_0000 = 0\n  then Leading (2, c land 0b0000_1111)\n  else if\n    (* c 0b1110_ *)\n    c land 0b0000_1000 = 0\n  then Leading (3, c land 0b0000_0111)\n  else if\n    (* c 0b1111_0___*)\n    c land 0b0000_0100 = 0\n  then Leading (4, c land 0b0000_0011)\n  else if\n    (* c 0b1111_10__*)\n    c land 0b0000_0010 = 0\n  then Leading (5, c land 0b0000_0001) (* c 0b1111_110__ *)\n  else Invalid\n\nexception Invalid_utf8 of string\n\n(* when the first char is [Leading],\n   TODO: need more error checking\n   when out of bond\n*)\nlet rec follow s n (c : int) offset =\n  if n = 0 then (c, offset)\n  else\n    match classify s.[offset + 1] with\n    | Cont cc -> follow s (n - 1) ((c lsl 6) lor (cc land 0x3f)) (offset + 1)\n    | _ -> raise (Invalid_utf8 \"Continuation byte expected\")\n\nlet rec next s ~remaining offset =\n  if remaining = 0 then offset\n  else\n    match classify s.[offset + 1] with\n    | Cont _cc -> next s ~remaining:(remaining - 1) (offset + 1)\n    | _ -> -1\n    | exception _ -> -1\n(* it can happen when out of bound *)\n\nlet decode_utf8_string s =\n  let lst = ref [] in\n  let add elem = lst := elem :: !lst in\n  let rec decode_utf8_cont s i s_len =\n    if i = s_len then ()\n    else\n      match classify s.[i] with\n      | Single c ->\n        add c;\n        decode_utf8_cont s (i + 1) s_len\n      | Cont _ -> raise (Invalid_utf8 \"Unexpected continuation byte\")\n      | Leading (n, c) ->\n        let c', i' = follow s n c i in\n        add c';\n        decode_utf8_cont s (i' + 1) s_len\n      | Invalid -> raise (Invalid_utf8 \"Invalid byte\")\n  in\n  decode_utf8_cont s 0 (String.length s);\n  List.rev !lst\n\n(** To decode {j||j} we need verify in the ast so that we have better error \n    location, then we do the decode later\n*)\n\n(* let verify s loc =\n   assert false *)\n\nlet encode_codepoint c =\n  (* reused from syntax/src/res_utf8.ml *)\n  let h2 = 0b1100_0000 in\n  let h3 = 0b1110_0000 in\n  let h4 = 0b1111_0000 in\n  let cont_mask = 0b0011_1111 in\n  if c <= 127 then (\n    let bytes = (Bytes.create [@doesNotRaise]) 1 in\n    Bytes.unsafe_set bytes 0 (Char.unsafe_chr c);\n    Bytes.unsafe_to_string bytes)\n  else if c <= 2047 then (\n    let bytes = (Bytes.create [@doesNotRaise]) 2 in\n    Bytes.unsafe_set bytes 0 (Char.unsafe_chr (h2 lor (c lsr 6)));\n    Bytes.unsafe_set bytes 1\n      (Char.unsafe_chr (0b1000_0000 lor (c land cont_mask)));\n    Bytes.unsafe_to_string bytes)\n  else if c <= 65535 then (\n    let bytes = (Bytes.create [@doesNotRaise]) 3 in\n    Bytes.unsafe_set bytes 0 (Char.unsafe_chr (h3 lor (c lsr 12)));\n    Bytes.unsafe_set bytes 1\n      (Char.unsafe_chr (0b1000_0000 lor ((c lsr 6) land cont_mask)));\n    Bytes.unsafe_set bytes 2\n      (Char.unsafe_chr (0b1000_0000 lor (c land cont_mask)));\n    Bytes.unsafe_to_string bytes)\n  else\n    (* if c <= max then *)\n    let bytes = (Bytes.create [@doesNotRaise]) 4 in\n    Bytes.unsafe_set bytes 0 (Char.unsafe_chr (h4 lor (c lsr 18)));\n    Bytes.unsafe_set bytes 1\n      (Char.unsafe_chr (0b1000_0000 lor ((c lsr 12) land cont_mask)));\n    Bytes.unsafe_set bytes 2\n      (Char.unsafe_chr (0b1000_0000 lor ((c lsr 6) land cont_mask)));\n    Bytes.unsafe_set bytes 3\n      (Char.unsafe_chr (0b1000_0000 lor (c land cont_mask)));\n    Bytes.unsafe_to_string bytes\n"
  },
  {
    "path": "analysis/vendor/ext/ext_utf8.mli",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n *\n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n *\n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\ntype byte = Single of int | Cont of int | Leading of int * int | Invalid\n\nval classify : char -> byte\n\nval follow : string -> int -> int -> int -> int * int\n\nval next : string -> remaining:int -> int -> int\n(** return [-1] if failed *)\n\nexception Invalid_utf8 of string\n\nval decode_utf8_string : string -> int list\n\nval encode_codepoint : int -> string\n"
  },
  {
    "path": "analysis/vendor/ext/ext_util.ml",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\n(** {[\n      (power_2_above 16 63 = 64) (power_2_above 16 76 = 128)\n    ]} *)\nlet rec power_2_above x n =\n  if x >= n then x\n  else if x * 2 > Sys.max_array_length then x\n  else power_2_above (x * 2) n\n\nlet stats_to_string\n    ({num_bindings; num_buckets; max_bucket_length; bucket_histogram} :\n      Hashtbl.statistics) =\n  Printf.sprintf \"bindings: %d,buckets: %d, longest: %d, hist:[%s]\" num_bindings\n    num_buckets max_bucket_length\n    (String.concat \",\"\n       (Array.to_list (Array.map string_of_int bucket_histogram)))\n\nlet string_of_int_as_char (i : int) : string =\n  if i <= 255 && i >= 0 then Format.asprintf \"%C\" (Char.unsafe_chr i)\n  else\n    let str =\n      match Char.unsafe_chr i with\n      | '\\'' -> \"\\\\'\"\n      | '\\\\' -> \"\\\\\\\\\"\n      | '\\n' -> \"\\\\n\"\n      | '\\t' -> \"\\\\t\"\n      | '\\r' -> \"\\\\r\"\n      | '\\b' -> \"\\\\b\"\n      | ' ' .. '~' as c ->\n        let s = (Bytes.create [@doesNotRaise]) 1 in\n        Bytes.unsafe_set s 0 c;\n        Bytes.unsafe_to_string s\n      | _ -> Ext_utf8.encode_codepoint i\n    in\n    Printf.sprintf \"\\'%s\\'\" str\n"
  },
  {
    "path": "analysis/vendor/ext/ext_util.mli",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\nval power_2_above : int -> int -> int\n\nval stats_to_string : Hashtbl.statistics -> string\n\nval string_of_int_as_char : int -> string\n"
  },
  {
    "path": "analysis/vendor/ext/hash.cppo.ml",
    "content": "#if defined TYPE_IDENT\ntype key = Ident.t \ntype 'a t = (key, 'a)  Hash_gen.t \nlet key_index (h : _ t ) (key : key) =\n  (Bs_hash_stubs.hash_stamp_and_name  key.stamp key.name ) land (Array.length h.data - 1)\n(* (Bs_hash_stubs.hash_string_int  key.name key.stamp ) land (Array.length h.data - 1) *)\nlet eq_key = Ext_ident.equal \n#elif defined TYPE_STRING\ntype key = string\ntype 'a t = (key, 'a)  Hash_gen.t \nlet key_index (h : _ t ) (key : key) =\n  (Bs_hash_stubs.hash_string  key ) land (Array.length h.data - 1)\nlet eq_key = Ext_string.equal \n#elif defined TYPE_INT\ntype key = int \ntype 'a t = (key, 'a)  Hash_gen.t \nlet key_index (h : _ t ) (key : key) =\n  (Bs_hash_stubs.hash_int  key ) land (Array.length h.data - 1)\nlet eq_key = Ext_int.equal   \n\n#elif defined TYPE_FUNCTOR\nmodule Make (Key : Hashtbl.HashedType) = struct \n  type key = Key.t \n  type 'a t = (key, 'a)  Hash_gen.t \n  let key_index (h : _ t ) (key : key) =\n    (Key.hash  key ) land (Array.length h.data - 1)\n  let eq_key = Key.equal   \n\n#else\n      [%error \"unknown type\"]\n#endif\n\n  type ('a, 'b) bucket = ('a,'b) Hash_gen.bucket\n  let create = Hash_gen.create\n  let clear = Hash_gen.clear\n  let reset = Hash_gen.reset\n  let iter = Hash_gen.iter\n  let to_list = Hash_gen.to_list\n  let fold = Hash_gen.fold\n  let length = Hash_gen.length\n  (* let stats = Hash_gen.stats *)\n\n\n\n  let add (h : _ t) key data =\n    let i = key_index h key in\n    let h_data = h.data in   \n    Array.unsafe_set h_data i (Cons{key; data; next=Array.unsafe_get h_data i});\n    h.size <- h.size + 1;\n    if h.size > Array.length h_data lsl 1 then Hash_gen.resize key_index h\n\n  (* after upgrade to 4.04 we should provide an efficient [replace_or_init] *)\n  let add_or_update \n      (h : 'a t) \n      (key : key) \n      ~update:(modf : 'a -> 'a) \n      (default :  'a) : unit =\n    let rec find_bucket (bucketlist : _ bucket) : bool =\n      match bucketlist with\n      | Cons rhs  ->\n        if eq_key rhs.key key then begin rhs.data <- modf rhs.data; false end\n        else find_bucket rhs.next\n      | Empty -> true in\n    let i = key_index h key in \n    let h_data = h.data in \n    if find_bucket (Array.unsafe_get h_data i) then\n      begin \n        Array.unsafe_set h_data i  (Cons{key; data=default; next = Array.unsafe_get h_data i});\n        h.size <- h.size + 1 ;\n        if h.size > Array.length h_data lsl 1 then Hash_gen.resize key_index h \n      end\n\n  let remove (h : _ t ) key =\n    let i = key_index h key in\n    let h_data = h.data in \n    Hash_gen.remove_bucket h i key ~prec:Empty (Array.unsafe_get h_data i) eq_key\n\n  (* for short bucket list, [find_rec is not called ] *)\n  let rec find_rec key (bucketlist : _ bucket) = match bucketlist with  \n    | Empty ->\n      raise Not_found\n    | Cons rhs  ->\n      if eq_key key rhs.key then rhs.data else find_rec key rhs.next\n\n  let find_exn (h : _ t) key =\n    match Array.unsafe_get h.data (key_index h key) with\n    | Empty -> raise Not_found\n    | Cons rhs  ->\n      if eq_key key rhs.key then rhs.data else\n        match rhs.next with\n        | Empty -> raise Not_found\n        | Cons rhs  ->\n          if eq_key key rhs.key then rhs.data else\n            match rhs.next with\n            | Empty -> raise Not_found\n            | Cons rhs ->\n              if eq_key key rhs.key  then rhs.data else find_rec key rhs.next\n\n  let find_opt (h : _ t) key =\n    Hash_gen.small_bucket_opt eq_key key (Array.unsafe_get h.data (key_index h key))\n\n  let find_key_opt (h : _ t) key =\n    Hash_gen.small_bucket_key_opt eq_key key (Array.unsafe_get h.data (key_index h key))\n\n  let find_default (h : _ t) key default = \n    Hash_gen.small_bucket_default eq_key key default (Array.unsafe_get h.data (key_index h key))\n\n  let find_all (h : _ t) key =\n    let rec find_in_bucket (bucketlist : _ bucket) = match bucketlist with \n      | Empty ->\n        []\n      | Cons rhs  ->\n        if eq_key key rhs.key\n        then rhs.data :: find_in_bucket rhs.next\n        else find_in_bucket rhs.next in\n    find_in_bucket (Array.unsafe_get h.data (key_index h key))\n\n\n  let replace h key data =\n    let i = key_index h key in\n    let h_data = h.data in \n    let l = Array.unsafe_get h_data i in\n    if Hash_gen.replace_bucket key data l eq_key then \n      begin \n        Array.unsafe_set h_data i (Cons{key; data; next=l});\n        h.size <- h.size + 1;\n        if h.size > Array.length h_data lsl 1 then Hash_gen.resize key_index h;\n      end \n\n  let mem (h : _ t) key = \n    Hash_gen.small_bucket_mem \n      (Array.unsafe_get h.data (key_index h key))\n      eq_key key \n\n\n  let of_list2 ks vs = \n    let len = List.length ks in \n    let map = create len in \n    List.iter2 (fun k v -> add map k v) ks vs ; \n    map\n\n#if defined TYPE_FUNCTOR\nend\n#endif\n"
  },
  {
    "path": "analysis/vendor/ext/hash.mli",
    "content": "module Make (Key : Hashtbl.HashedType) : Hash_gen.S with type key = Key.t\n"
  },
  {
    "path": "analysis/vendor/ext/hash_gen.ml",
    "content": "(***********************************************************************)\n(*                                                                     *)\n(*                                OCaml                                *)\n(*                                                                     *)\n(*            Xavier Leroy, projet Cristal, INRIA Rocquencourt         *)\n(*                                                                     *)\n(*  Copyright 1996 Institut National de Recherche en Informatique et   *)\n(*  en Automatique.  All rights reserved.  This file is distributed    *)\n(*  under the terms of the GNU Library General Public License, with    *)\n(*  the special exception on linking described in file ../LICENSE.     *)\n(*                                                                     *)\n(***********************************************************************)\n\n(* Hash tables *)\n\n(* We do dynamic hashing, and resize the table and rehash the elements\n   when buckets become too long. *)\n\ntype ('a, 'b) bucket =\n  | Empty\n  | Cons of {mutable key: 'a; mutable data: 'b; mutable next: ('a, 'b) bucket}\n\ntype ('a, 'b) t = {\n  mutable size: int;\n  (* number of entries *)\n  mutable data: ('a, 'b) bucket array;\n  (* the buckets *)\n  initial_size: int; (* initial array size *)\n}\n\nlet create initial_size =\n  let s = Ext_util.power_2_above 16 initial_size in\n  {initial_size = s; size = 0; data = Array.make s Empty}\n\nlet clear h =\n  h.size <- 0;\n  let len = Array.length h.data in\n  for i = 0 to len - 1 do\n    Array.unsafe_set h.data i Empty\n  done\n\nlet reset h =\n  h.size <- 0;\n  h.data <- Array.make h.initial_size Empty\n\nlet length h = h.size\n\nlet resize indexfun h =\n  let odata = h.data in\n  let osize = Array.length odata in\n  let nsize = osize * 2 in\n  if nsize < Sys.max_array_length then (\n    let ndata = Array.make nsize Empty in\n    let ndata_tail = Array.make nsize Empty in\n    h.data <- ndata;\n    (* so that indexfun sees the new bucket count *)\n    let rec insert_bucket = function\n      | Empty -> ()\n      | Cons {key; next} as cell ->\n        let nidx = indexfun h key in\n        (match Array.unsafe_get ndata_tail nidx with\n        | Empty -> Array.unsafe_set ndata nidx cell\n        | Cons tail -> tail.next <- cell);\n        Array.unsafe_set ndata_tail nidx cell;\n        insert_bucket next\n    in\n    for i = 0 to osize - 1 do\n      insert_bucket (Array.unsafe_get odata i)\n    done;\n    for i = 0 to nsize - 1 do\n      match Array.unsafe_get ndata_tail i with\n      | Empty -> ()\n      | Cons tail -> tail.next <- Empty\n    done)\n\nlet iter h f =\n  let rec do_bucket = function\n    | Empty -> ()\n    | Cons l ->\n      f l.key l.data;\n      do_bucket l.next\n  in\n  let d = h.data in\n  for i = 0 to Array.length d - 1 do\n    do_bucket (Array.unsafe_get d i)\n  done\n\nlet fold h init f =\n  let rec do_bucket b accu =\n    match b with\n    | Empty -> accu\n    | Cons l -> do_bucket l.next (f l.key l.data accu)\n  in\n  let d = h.data in\n  let accu = ref init in\n  for i = 0 to Array.length d - 1 do\n    accu := do_bucket (Array.unsafe_get d i) !accu\n  done;\n  !accu\n\nlet to_list h f = fold h [] (fun k data acc -> f k data :: acc)\n\nlet rec small_bucket_mem (lst : _ bucket) eq key =\n  match lst with\n  | Empty -> false\n  | Cons lst -> (\n    eq key lst.key\n    ||\n    match lst.next with\n    | Empty -> false\n    | Cons lst -> (\n      eq key lst.key\n      ||\n      match lst.next with\n      | Empty -> false\n      | Cons lst -> eq key lst.key || small_bucket_mem lst.next eq key))\n\nlet rec small_bucket_opt eq key (lst : _ bucket) : _ option =\n  match lst with\n  | Empty -> None\n  | Cons lst -> (\n    if eq key lst.key then Some lst.data\n    else\n      match lst.next with\n      | Empty -> None\n      | Cons lst -> (\n        if eq key lst.key then Some lst.data\n        else\n          match lst.next with\n          | Empty -> None\n          | Cons lst ->\n            if eq key lst.key then Some lst.data\n            else small_bucket_opt eq key lst.next))\n\nlet rec small_bucket_key_opt eq key (lst : _ bucket) : _ option =\n  match lst with\n  | Empty -> None\n  | Cons {key = k; next} -> (\n    if eq key k then Some k\n    else\n      match next with\n      | Empty -> None\n      | Cons {key = k; next} -> (\n        if eq key k then Some k\n        else\n          match next with\n          | Empty -> None\n          | Cons {key = k; next} ->\n            if eq key k then Some k else small_bucket_key_opt eq key next))\n\nlet rec small_bucket_default eq key default (lst : _ bucket) =\n  match lst with\n  | Empty -> default\n  | Cons lst -> (\n    if eq key lst.key then lst.data\n    else\n      match lst.next with\n      | Empty -> default\n      | Cons lst -> (\n        if eq key lst.key then lst.data\n        else\n          match lst.next with\n          | Empty -> default\n          | Cons lst ->\n            if eq key lst.key then lst.data\n            else small_bucket_default eq key default lst.next))\n\nlet rec remove_bucket h (i : int) key ~(prec : _ bucket) (buck : _ bucket)\n    eq_key =\n  match buck with\n  | Empty -> ()\n  | Cons {key = k; next} ->\n    if eq_key k key then (\n      h.size <- h.size - 1;\n      match prec with\n      | Empty -> Array.unsafe_set h.data i next\n      | Cons c -> c.next <- next)\n    else remove_bucket h i key ~prec:buck next eq_key\n\nlet rec replace_bucket key data (buck : _ bucket) eq_key =\n  match buck with\n  | Empty -> true\n  | Cons slot ->\n    if eq_key slot.key key then (\n      slot.key <- key;\n      slot.data <- data;\n      false)\n    else replace_bucket key data slot.next eq_key\n\nmodule type S = sig\n  type key\n\n  type 'a t\n\n  val create : int -> 'a t\n\n  val clear : 'a t -> unit\n\n  val reset : 'a t -> unit\n\n  val add : 'a t -> key -> 'a -> unit\n\n  val add_or_update : 'a t -> key -> update:('a -> 'a) -> 'a -> unit\n\n  val remove : 'a t -> key -> unit\n\n  val find_exn : 'a t -> key -> 'a\n\n  val find_all : 'a t -> key -> 'a list\n\n  val find_opt : 'a t -> key -> 'a option\n\n  val find_key_opt : 'a t -> key -> key option\n  (** return the key found in the hashtbl. Use case: when you find the key\n      existed in hashtbl, you want to use the one stored in the hashtbl. (they\n      are semantically equivlanent, but may have other information different) *)\n\n  val find_default : 'a t -> key -> 'a -> 'a\n\n  val replace : 'a t -> key -> 'a -> unit\n\n  val mem : 'a t -> key -> bool\n\n  val iter : 'a t -> (key -> 'a -> unit) -> unit\n\n  val fold : 'a t -> 'b -> (key -> 'a -> 'b -> 'b) -> 'b\n\n  val length : 'a t -> int\n\n  (* val stats: 'a t -> Hashtbl.statistics *)\n  val to_list : 'a t -> (key -> 'a -> 'c) -> 'c list\n\n  val of_list2 : key list -> 'a list -> 'a t\nend\n"
  },
  {
    "path": "analysis/vendor/ext/hash_ident.mli",
    "content": "include Hash_gen.S with type key = Ident.t\n"
  },
  {
    "path": "analysis/vendor/ext/hash_int.mli",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\ninclude Hash_gen.S with type key = int\n"
  },
  {
    "path": "analysis/vendor/ext/hash_set.cppo.ml",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n[@@@warning \"-32\"] (* FIXME *)\n#ifdef TYPE_INT\ntype key = int\nlet key_index (h :  _ Hash_set_gen.t ) (key : key) =\n  (Bs_hash_stubs.hash_int  key) land (Array.length h.data - 1)\nlet eq_key = Ext_int.equal \ntype  t = key  Hash_set_gen.t \n#elif defined TYPE_STRING\ntype key = string \nlet key_index (h :  _ Hash_set_gen.t ) (key : key) =\n  (Bs_hash_stubs.hash_string  key) land (Array.length h.data - 1)\nlet eq_key = Ext_string.equal \ntype  t = key  Hash_set_gen.t \n#elif defined TYPE_IDENT\ntype key = Ident.t\nlet key_index (h :  _ Hash_set_gen.t ) (key : key) =\n  (Bs_hash_stubs.hash_string_int  key.name key.stamp) land (Array.length h.data - 1)\nlet eq_key = Ext_ident.equal\ntype t = key Hash_set_gen.t\n#elif defined TYPE_FUNCTOR\nmodule Make (H: Hashtbl.HashedType) : (Hash_set_gen.S with type key = H.t) = struct \n  type key = H.t \n  let eq_key = H.equal\n  let key_index (h :  _ Hash_set_gen.t ) key =\n    (H.hash  key) land (Array.length h.data - 1)\n  type t = key Hash_set_gen.t\n\n#elif defined TYPE_POLY\n  [@@@warning \"-3\"]\n  (* we used cppo the mixture does not work*)\n  external seeded_hash_param :\n    int -> int -> int -> 'a -> int = \"caml_hash\" \"noalloc\"\n  let key_index (h :  _ Hash_set_gen.t ) (key : 'a) =\n    seeded_hash_param 10 100 0 key land (Array.length h.data - 1)\n  let eq_key = (=)\n  type  'a t = 'a Hash_set_gen.t \n#else \n      [%error \"unknown type\"]\n#endif \n\n\n      let create = Hash_set_gen.create\n  let clear = Hash_set_gen.clear\n  let reset = Hash_set_gen.reset\n  (* let copy = Hash_set_gen.copy *)\n  let iter = Hash_set_gen.iter\n  let fold = Hash_set_gen.fold\n  let length = Hash_set_gen.length\n  (* let stats = Hash_set_gen.stats *)\n  let to_list = Hash_set_gen.to_list\n\n\n\n  let remove (h : _ Hash_set_gen.t ) key =\n    let i = key_index h key in\n    let h_data = h.data in \n    Hash_set_gen.remove_bucket h i key ~prec:Empty (Array.unsafe_get h_data i) eq_key    \n\n\n\n  let add (h : _ Hash_set_gen.t) key =\n    let i = key_index h key  in \n    let h_data = h.data in \n    let old_bucket = (Array.unsafe_get h_data i) in\n    if not (Hash_set_gen.small_bucket_mem eq_key key old_bucket) then \n      begin \n        Array.unsafe_set h_data i (Cons {key = key ; next =  old_bucket});\n        h.size <- h.size + 1 ;\n        if h.size > Array.length h_data lsl 1 then Hash_set_gen.resize key_index h\n      end\n\n  let of_array arr = \n    let len = Array.length arr in \n    let tbl = create len in \n    for i = 0 to len - 1  do\n      add tbl (Array.unsafe_get arr i);\n    done ;\n    tbl \n\n\n  let check_add (h : _ Hash_set_gen.t) key : bool =\n    let i = key_index h key  in \n    let h_data = h.data in  \n    let old_bucket = (Array.unsafe_get h_data i) in\n    if not (Hash_set_gen.small_bucket_mem eq_key key old_bucket) then \n      begin \n        Array.unsafe_set h_data i  (Cons { key = key ; next =  old_bucket});\n        h.size <- h.size + 1 ;\n        if h.size > Array.length h_data lsl 1 then Hash_set_gen.resize key_index h;\n        true \n      end\n    else false \n\n\n  let mem (h :  _ Hash_set_gen.t) key =\n    Hash_set_gen.small_bucket_mem eq_key key (Array.unsafe_get h.data (key_index h key)) \n\n#ifdef TYPE_FUNCTOR\nend\n#endif\n\n"
  },
  {
    "path": "analysis/vendor/ext/hash_set.mli",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\n(** Ideas are based on {!Hash}, however, {!Hash.add} does not really optimize\n    and has a bad semantics for {!Hash_set}, This module fixes the semantics of\n    [add]. [remove] is not optimized since it is not used too much *)\n\n(** A naive t implementation on top of [hashtbl], the value is [unit]*)\nmodule Make (H : Hashtbl.HashedType) : Hash_set_gen.S with type key = H.t\n"
  },
  {
    "path": "analysis/vendor/ext/hash_set_gen.ml",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\n(* We do dynamic hashing, and resize the table and rehash the elements\n   when buckets become too long. *)\n\ntype 'a bucket = Empty | Cons of {mutable key: 'a; mutable next: 'a bucket}\n\ntype 'a t = {\n  mutable size: int;\n  (* number of entries *)\n  mutable data: 'a bucket array;\n  (* the buckets *)\n  initial_size: int; (* initial array size *)\n}\n\nlet create initial_size =\n  let s = Ext_util.power_2_above 16 initial_size in\n  {initial_size = s; size = 0; data = Array.make s Empty}\n\nlet clear h =\n  h.size <- 0;\n  let len = Array.length h.data in\n  for i = 0 to len - 1 do\n    Array.unsafe_set h.data i Empty\n  done\n\nlet reset h =\n  h.size <- 0;\n  h.data <- Array.make h.initial_size Empty\n\nlet length h = h.size\n\nlet resize indexfun h =\n  let odata = h.data in\n  let osize = Array.length odata in\n  let nsize = osize * 2 in\n  if nsize < Sys.max_array_length then (\n    let ndata = Array.make nsize Empty in\n    let ndata_tail = Array.make nsize Empty in\n    h.data <- ndata;\n    (* so that indexfun sees the new bucket count *)\n    let rec insert_bucket = function\n      | Empty -> ()\n      | Cons {key; next} as cell ->\n        let nidx = indexfun h key in\n        (match Array.unsafe_get ndata_tail nidx with\n        | Empty -> Array.unsafe_set ndata nidx cell\n        | Cons tail -> tail.next <- cell);\n        Array.unsafe_set ndata_tail nidx cell;\n        insert_bucket next\n    in\n    for i = 0 to osize - 1 do\n      insert_bucket (Array.unsafe_get odata i)\n    done;\n    for i = 0 to nsize - 1 do\n      match Array.unsafe_get ndata_tail i with\n      | Empty -> ()\n      | Cons tail -> tail.next <- Empty\n    done)\n\nlet iter h f =\n  let rec do_bucket = function\n    | Empty -> ()\n    | Cons l ->\n      f l.key;\n      do_bucket l.next\n  in\n  let d = h.data in\n  for i = 0 to Array.length d - 1 do\n    do_bucket (Array.unsafe_get d i)\n  done\n\nlet fold h init f =\n  let rec do_bucket b accu =\n    match b with\n    | Empty -> accu\n    | Cons l -> do_bucket l.next (f l.key accu)\n  in\n  let d = h.data in\n  let accu = ref init in\n  for i = 0 to Array.length d - 1 do\n    accu := do_bucket (Array.unsafe_get d i) !accu\n  done;\n  !accu\n\nlet to_list set = fold set [] List.cons\n\nlet rec small_bucket_mem eq key lst =\n  match lst with\n  | Empty -> false\n  | Cons lst -> (\n    eq key lst.key\n    ||\n    match lst.next with\n    | Empty -> false\n    | Cons lst -> (\n      eq key lst.key\n      ||\n      match lst.next with\n      | Empty -> false\n      | Cons lst -> eq key lst.key || small_bucket_mem eq key lst.next))\n\nlet rec remove_bucket (h : _ t) (i : int) key ~(prec : _ bucket)\n    (buck : _ bucket) eq_key =\n  match buck with\n  | Empty -> ()\n  | Cons {key = k; next} ->\n    if eq_key k key then (\n      h.size <- h.size - 1;\n      match prec with\n      | Empty -> Array.unsafe_set h.data i next\n      | Cons c -> c.next <- next)\n    else remove_bucket h i key ~prec:buck next eq_key\n\nmodule type S = sig\n  type key\n\n  type t\n\n  val create : int -> t\n\n  val clear : t -> unit\n\n  val reset : t -> unit\n\n  (* val copy: t -> t *)\n  val remove : t -> key -> unit\n\n  val add : t -> key -> unit\n\n  val of_array : key array -> t\n\n  val check_add : t -> key -> bool\n\n  val mem : t -> key -> bool\n\n  val iter : t -> (key -> unit) -> unit\n\n  val fold : t -> 'b -> (key -> 'b -> 'b) -> 'b\n\n  val length : t -> int\n\n  (* val stats:  t -> Hashtbl.statistics *)\n  val to_list : t -> key list\nend\n"
  },
  {
    "path": "analysis/vendor/ext/hash_set_ident.mli",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\ninclude Hash_set_gen.S with type key = Ident.t\n"
  },
  {
    "path": "analysis/vendor/ext/hash_set_ident_mask.ml",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\n(** A speicalized datastructure for scc algorithm *)\n\ntype ident = Ident.t\n\ntype bucket = Empty | Cons of {ident: ident; mutable mask: bool; rest: bucket}\n\ntype t = {\n  mutable size: int;\n  mutable data: bucket array;\n  mutable mask_size: int; (* mark how many idents are marked *)\n}\n\nlet key_index_by_ident (h : t) (key : Ident.t) =\n  Bs_hash_stubs.hash_string_int key.name key.stamp land (Array.length h.data - 1)\n\nlet create initial_size =\n  let s = Ext_util.power_2_above 8 initial_size in\n  {size = 0; data = Array.make s Empty; mask_size = 0}\n\nlet iter_and_unmask h f =\n  let rec iter_bucket buckets =\n    match buckets with\n    | Empty -> ()\n    | Cons k ->\n      let k_mask = k.mask in\n      f k.ident k_mask;\n      if k_mask then (\n        k.mask <- false;\n        (* we can set [h.mask_size] to zero,\n           however, it would result inconsistent state\n           once [f] throw\n        *)\n        h.mask_size <- h.mask_size - 1);\n      iter_bucket k.rest\n  in\n  let d = h.data in\n  for i = 0 to Array.length d - 1 do\n    iter_bucket (Array.unsafe_get d i)\n  done\n\nlet rec small_bucket_mem key lst =\n  match lst with\n  | Empty -> false\n  | Cons rst -> (\n    Ext_ident.equal key rst.ident\n    ||\n    match rst.rest with\n    | Empty -> false\n    | Cons rst -> (\n      Ext_ident.equal key rst.ident\n      ||\n      match rst.rest with\n      | Empty -> false\n      | Cons rst ->\n        Ext_ident.equal key rst.ident || small_bucket_mem key rst.rest))\n\nlet resize indexfun h =\n  let odata = h.data in\n  let osize = Array.length odata in\n  let nsize = osize * 2 in\n  if nsize < Sys.max_array_length then (\n    let ndata = Array.make nsize Empty in\n    h.data <- ndata;\n    (* so that indexfun sees the new bucket count *)\n    let rec insert_bucket = function\n      | Empty -> ()\n      | Cons {ident = key; mask; rest} ->\n        let nidx = indexfun h key in\n        Array.unsafe_set ndata nidx\n          (Cons {ident = key; mask; rest = Array.unsafe_get ndata nidx});\n        insert_bucket rest\n    in\n    for i = 0 to osize - 1 do\n      insert_bucket (Array.unsafe_get odata i)\n    done)\n\nlet add_unmask (h : t) (key : Ident.t) =\n  let i = key_index_by_ident h key in\n  let h_data = h.data in\n  let old_bucket = Array.unsafe_get h_data i in\n  if not (small_bucket_mem key old_bucket) then (\n    Array.unsafe_set h_data i\n      (Cons {ident = key; mask = false; rest = old_bucket});\n    h.size <- h.size + 1;\n    if h.size > Array.length h_data lsl 1 then resize key_index_by_ident h)\n\nlet rec small_bucket_mask key lst =\n  match lst with\n  | Empty -> false\n  | Cons rst -> (\n    if Ext_ident.equal key rst.ident then\n      if rst.mask then false\n      else (\n        rst.mask <- true;\n        true)\n    else\n      match rst.rest with\n      | Empty -> false\n      | Cons rst -> (\n        if Ext_ident.equal key rst.ident then\n          if rst.mask then false\n          else (\n            rst.mask <- true;\n            true)\n        else\n          match rst.rest with\n          | Empty -> false\n          | Cons rst ->\n            if Ext_ident.equal key rst.ident then\n              if rst.mask then false\n              else (\n                rst.mask <- true;\n                true)\n            else small_bucket_mask key rst.rest))\n\nlet mask_and_check_all_hit (h : t) (key : Ident.t) =\n  if small_bucket_mask key (Array.unsafe_get h.data (key_index_by_ident h key))\n  then h.mask_size <- h.mask_size + 1;\n  h.size = h.mask_size\n"
  },
  {
    "path": "analysis/vendor/ext/hash_set_ident_mask.mli",
    "content": "type ident = Ident.t\n(** Based on [hash_set] specialized for mask operations *)\n\ntype t\n\nval create : int -> t\n\n(* add one ident\n   ident is unmaksed by default\n*)\nval add_unmask : t -> ident -> unit\n\nval mask_and_check_all_hit : t -> ident -> bool\n(** [check_mask h key] if [key] exists mask it otherwise nothing return true if\n    all keys are masked otherwise false *)\n\nval iter_and_unmask : t -> (ident -> bool -> unit) -> unit\n(** [iter_and_unmask f h] iterating the collection and mask all idents, dont\n    consul the collection in function [f] TODO: what happens if an exception\n    raised in the callback, would the hashtbl still be in consistent state? *)\n"
  },
  {
    "path": "analysis/vendor/ext/hash_set_int.mli",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\ninclude Hash_set_gen.S with type key = int\n"
  },
  {
    "path": "analysis/vendor/ext/hash_set_poly.mli",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\ntype 'a t\n\nval create : int -> 'a t\n\nval clear : 'a t -> unit\n\nval reset : 'a t -> unit\n\n(* val copy : 'a t -> 'a t *)\n\nval add : 'a t -> 'a -> unit\n\nval remove : 'a t -> 'a -> unit\n\nval mem : 'a t -> 'a -> bool\n\nval iter : 'a t -> ('a -> unit) -> unit\n\nval to_list : 'a t -> 'a list\n\nval length : 'a t -> int\n\n(* val stats:  'a t -> Hashtbl.statistics *)\n"
  },
  {
    "path": "analysis/vendor/ext/hash_set_string.mli",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\ninclude Hash_set_gen.S with type key = string\n"
  },
  {
    "path": "analysis/vendor/ext/hash_string.mli",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\ninclude Hash_gen.S with type key = string\n"
  },
  {
    "path": "analysis/vendor/ext/ident.ml",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\nopen Format\n\ntype t = {stamp: int; name: string; mutable flags: int}\n\nlet[@inlnie] max (x : int) y = if x >= y then x else y\nlet global_flag = 1\nlet predef_exn_flag = 2\n\n(* A stamp of 0 denotes a persistent identifier *)\n\nlet currentstamp = ref 0\n\nlet create s =\n  incr currentstamp;\n  {name = s; stamp = !currentstamp; flags = 0}\n\nlet create_predef_exn s =\n  incr currentstamp;\n  {name = s; stamp = !currentstamp; flags = predef_exn_flag}\n\nlet create_persistent s = {name = s; stamp = 0; flags = global_flag}\n\nlet rename i =\n  incr currentstamp;\n  {i with stamp = !currentstamp}\n\nlet name i = i.name\n\nlet unique_name i = i.name ^ \"_\" ^ string_of_int i.stamp\n\nlet unique_toplevel_name i = i.name ^ \"/\" ^ string_of_int i.stamp\n\nlet persistent i = i.stamp = 0\n\nlet equal i1 i2 = i1.name = i2.name\n\nlet same ({stamp; name} : t) i2 =\n  if stamp <> 0 then stamp = i2.stamp else i2.stamp = 0 && name = i2.name\n\nlet binding_time i = i.stamp\n\nlet current_time () = !currentstamp\nlet set_current_time t = currentstamp := max !currentstamp t\n\nlet reinit_level = ref (-1)\n\nlet reinit () =\n  if !reinit_level < 0 then reinit_level := !currentstamp\n  else currentstamp := !reinit_level\n\nlet hide i = {i with stamp = -1}\n\nlet make_global i = i.flags <- i.flags lor global_flag\n\nlet global i = i.flags land global_flag <> 0\n\nlet is_predef_exn i = i.flags land predef_exn_flag <> 0\n\nlet print ppf i =\n  match i.stamp with\n  | 0 -> fprintf ppf \"%s!\" i.name\n  | -1 -> fprintf ppf \"%s#\" i.name\n  | n -> fprintf ppf \"%s/%i%s\" i.name n (if global i then \"g\" else \"\")\n\ntype 'a tbl = Empty | Node of 'a tbl * 'a data * 'a tbl * int\n\nand 'a data = {ident: t; data: 'a; previous: 'a data option}\n\nlet empty = Empty\n\n(* Inline expansion of height for better speed\n * let height = function\n *     Empty -> 0\n *   | Node(_,_,_,h) -> h\n *)\n\nlet mknode l d r =\n  let hl =\n    match l with\n    | Empty -> 0\n    | Node (_, _, _, h) -> h\n  and hr =\n    match r with\n    | Empty -> 0\n    | Node (_, _, _, h) -> h\n  in\n  Node (l, d, r, if hl >= hr then hl + 1 else hr + 1)\n\nlet balance l d r =\n  let hl =\n    match l with\n    | Empty -> 0\n    | Node (_, _, _, h) -> h\n  and hr =\n    match r with\n    | Empty -> 0\n    | Node (_, _, _, h) -> h\n  in\n  if hl > hr + 1 then\n    match l with\n    | Node (ll, ld, lr, _)\n      when (match ll with\n           | Empty -> 0\n           | Node (_, _, _, h) -> h)\n           >=\n           match lr with\n           | Empty -> 0\n           | Node (_, _, _, h) -> h ->\n      mknode ll ld (mknode lr d r)\n    | Node (ll, ld, Node (lrl, lrd, lrr, _), _) ->\n      mknode (mknode ll ld lrl) lrd (mknode lrr d r)\n    | _ -> assert false\n  else if hr > hl + 1 then\n    match r with\n    | Node (rl, rd, rr, _)\n      when (match rr with\n           | Empty -> 0\n           | Node (_, _, _, h) -> h)\n           >=\n           match rl with\n           | Empty -> 0\n           | Node (_, _, _, h) -> h ->\n      mknode (mknode l d rl) rd rr\n    | Node (Node (rll, rld, rlr, _), rd, rr, _) ->\n      mknode (mknode l d rll) rld (mknode rlr rd rr)\n    | _ -> assert false\n  else mknode l d r\n\nlet rec add id data = function\n  | Empty -> Node (Empty, {ident = id; data; previous = None}, Empty, 1)\n  | Node (l, k, r, h) ->\n    let c = compare id.name k.ident.name in\n    if c = 0 then Node (l, {ident = id; data; previous = Some k}, r, h)\n    else if c < 0 then balance (add id data l) k r\n    else balance l k (add id data r)\n\nlet rec find_stamp s = function\n  | None -> raise Not_found\n  | Some k -> if k.ident.stamp = s then k.data else find_stamp s k.previous\n\nlet rec find_same id = function\n  | Empty -> raise Not_found\n  | Node (l, k, r, _) ->\n    let c = compare id.name k.ident.name in\n    if c = 0 then\n      if id.stamp = k.ident.stamp then k.data\n      else find_stamp id.stamp k.previous\n    else find_same id (if c < 0 then l else r)\n\nlet rec find_name name = function\n  | Empty -> raise Not_found\n  | Node (l, k, r, _) ->\n    let c = compare name k.ident.name in\n    if c = 0 then (k.ident, k.data) else find_name name (if c < 0 then l else r)\n\nlet rec get_all = function\n  | None -> []\n  | Some k -> (k.ident, k.data) :: get_all k.previous\n\nlet rec find_all name = function\n  | Empty -> []\n  | Node (l, k, r, _) ->\n    let c = compare name k.ident.name in\n    if c = 0 then (k.ident, k.data) :: get_all k.previous\n    else find_all name (if c < 0 then l else r)\n\nlet rec fold_aux f stack accu = function\n  | Empty -> (\n    match stack with\n    | [] -> accu\n    | a :: l -> fold_aux f l accu a)\n  | Node (l, k, r, _) -> fold_aux f (l :: stack) (f k accu) r\n\nlet fold_name f tbl accu = fold_aux (fun k -> f k.ident k.data) [] accu tbl\n\nlet rec fold_data f d accu =\n  match d with\n  | None -> accu\n  | Some k -> f k.ident k.data (fold_data f k.previous accu)\n\nlet fold_all f tbl accu = fold_aux (fun k -> fold_data f (Some k)) [] accu tbl\n\n(* let keys tbl = fold_name (fun k _ accu -> k::accu) tbl [] *)\n\nlet rec iter f = function\n  | Empty -> ()\n  | Node (l, k, r, _) ->\n    iter f l;\n    f k.ident k.data;\n    iter f r\n\n(* Idents for sharing keys *)\n\n(* They should be 'totally fresh' -> neg numbers *)\nlet key_name = \"\"\n\nlet make_key_generator () =\n  let c = ref 1 in\n  fun id ->\n    let stamp = !c in\n    decr c;\n    {id with name = key_name; stamp}\n\nlet compare x y =\n  let c = x.stamp - y.stamp in\n  if c <> 0 then c\n  else\n    let c = compare x.name y.name in\n    if c <> 0 then c else compare x.flags y.flags\n\nlet output oc id = output_string oc (unique_name id)\nlet hash i = Char.code i.name.[0] lxor i.stamp\n\nlet original_equal = equal\ninclude Identifiable.Make (struct\n  type nonrec t = t\n  let compare = compare\n  let output = output\n  let print = print\n  let hash = hash\n  let equal = same\nend)\nlet equal = original_equal\n"
  },
  {
    "path": "analysis/vendor/ext/ident.mli",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(* Identifiers (unique names) *)\n\ntype t = {stamp: int; name: string; mutable flags: int}\n\ninclude Identifiable.S with type t := t\n(* Notes:\n   - [equal] compares identifiers by name\n   - [compare x y] is 0 if [same x y] is true.\n   - [compare] compares identifiers by binding location\n*)\n\nval create : string -> t\nval create_persistent : string -> t\nval create_predef_exn : string -> t\nval rename : t -> t\nval name : t -> string\nval unique_name : t -> string\nval unique_toplevel_name : t -> string\nval persistent : t -> bool\nval same : t -> t -> bool\n(* Compare identifiers by binding location.\n   Two identifiers are the same either if they are both\n   non-persistent and have been created by the same call to\n   [new], or if they are both persistent and have the same\n   name. *)\n\nval compare : t -> t -> int\nval hide : t -> t\n(* Return an identifier with same name as the given identifier,\n   but stamp different from any stamp returned by new.\n   When put in a 'a tbl, this identifier can only be looked\n   up by name. *)\n\nval make_global : t -> unit\nval global : t -> bool\nval is_predef_exn : t -> bool\n\nval binding_time : t -> int\nval current_time : unit -> int\nval set_current_time : int -> unit\nval reinit : unit -> unit\n\ntype 'a tbl\n(* Association tables from identifiers to type 'a. *)\n\nval empty : 'a tbl\nval add : t -> 'a -> 'a tbl -> 'a tbl\nval find_same : t -> 'a tbl -> 'a\nval find_name : string -> 'a tbl -> t * 'a\nval find_all : string -> 'a tbl -> (t * 'a) list\nval fold_name : (t -> 'a -> 'b -> 'b) -> 'a tbl -> 'b -> 'b\nval fold_all : (t -> 'a -> 'b -> 'b) -> 'a tbl -> 'b -> 'b\nval iter : (t -> 'a -> unit) -> 'a tbl -> unit\n\n(* Idents for sharing keys *)\n\nval make_key_generator : unit -> t -> t\n"
  },
  {
    "path": "analysis/vendor/ext/identifiable.ml",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*                       Pierre Chambart, OCamlPro                        *)\n(*           Mark Shinwell and Leo White, Jane Street Europe              *)\n(*                                                                        *)\n(*   Copyright 2013--2016 OCamlPro SAS                                    *)\n(*   Copyright 2014--2016 Jane Street Group LLC                           *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\nmodule type Thing = sig\n  type t\n\n  include Hashtbl.HashedType with type t := t\n  include Map.OrderedType with type t := t\n\n  val output : out_channel -> t -> unit\n  val print : Format.formatter -> t -> unit\nend\n\nmodule type Set = sig\n  module T : Set.OrderedType\n  include Set.S with type elt = T.t and type t = Set.Make(T).t\n\n  val output : out_channel -> t -> unit\n  val print : Format.formatter -> t -> unit\n  val to_string : t -> string\n  val of_list : elt list -> t\n  val map : (elt -> elt) -> t -> t\nend\n\nmodule type Map = sig\n  module T : Map.OrderedType\n  include Map.S with type key = T.t and type 'a t = 'a Map.Make(T).t\n\n  val filter_map : (key -> 'a -> 'b option) -> 'a t -> 'b t\n  val of_list : (key * 'a) list -> 'a t\n\n  val disjoint_union :\n    ?eq:('a -> 'a -> bool) ->\n    ?print:(Format.formatter -> 'a -> unit) ->\n    'a t ->\n    'a t ->\n    'a t\n\n  val union_right : 'a t -> 'a t -> 'a t\n\n  val union_left : 'a t -> 'a t -> 'a t\n\n  val union_merge : ('a -> 'a -> 'a) -> 'a t -> 'a t -> 'a t\n  val rename : key t -> key -> key\n  val map_keys : (key -> key) -> 'a t -> 'a t\n  val keys : 'a t -> Set.Make(T).t\n  val data : 'a t -> 'a list\n  val of_set : (key -> 'a) -> Set.Make(T).t -> 'a t\n  val transpose_keys_and_data : key t -> key t\n  val transpose_keys_and_data_set : key t -> Set.Make(T).t t\n  val print :\n    (Format.formatter -> 'a -> unit) -> Format.formatter -> 'a t -> unit\nend\n\nmodule type Tbl = sig\n  module T : sig\n    type t\n    include Map.OrderedType with type t := t\n    include Hashtbl.HashedType with type t := t\n  end\n  include Hashtbl.S with type key = T.t and type 'a t = 'a Hashtbl.Make(T).t\n\n  val to_list : 'a t -> (T.t * 'a) list\n  val of_list : (T.t * 'a) list -> 'a t\n\n  val to_map : 'a t -> 'a Map.Make(T).t\n  val of_map : 'a Map.Make(T).t -> 'a t\n  val memoize : 'a t -> (key -> 'a) -> key -> 'a\n  val map : 'a t -> ('a -> 'b) -> 'b t\nend\n\nmodule Pair (A : Thing) (B : Thing) : Thing with type t = A.t * B.t = struct\n  type t = A.t * B.t\n\n  let compare (a1, b1) (a2, b2) =\n    let c = A.compare a1 a2 in\n    if c <> 0 then c else B.compare b1 b2\n\n  let output oc (a, b) = Printf.fprintf oc \" (%a, %a)\" A.output a B.output b\n  let hash (a, b) = Hashtbl.hash (A.hash a, B.hash b)\n  let equal (a1, b1) (a2, b2) = A.equal a1 a2 && B.equal b1 b2\n  let print ppf (a, b) = Format.fprintf ppf \" (%a, @ %a)\" A.print a B.print b\nend\n\nmodule Make_map (T : Thing) = struct\n  include Map.Make (T)\n\n  let filter_map f t =\n    fold\n      (fun id v map ->\n        match f id v with\n        | None -> map\n        | Some r -> add id r map)\n      t empty\n\n  let of_list l = List.fold_left (fun map (id, v) -> add id v map) empty l\n\n  let disjoint_union ?eq ?print m1 m2 =\n    union\n      (fun id v1 v2 ->\n        let ok =\n          match eq with\n          | None -> false\n          | Some eq -> eq v1 v2\n        in\n        if not ok then\n          let err =\n            match print with\n            | None -> Format.asprintf \"Map.disjoint_union %a\" T.print id\n            | Some print ->\n              Format.asprintf \"Map.disjoint_union %a => %a <> %a\" T.print id\n                print v1 print v2\n          in\n          Misc.fatal_error err\n        else Some v1)\n      m1 m2\n\n  let union_right m1 m2 =\n    merge\n      (fun _id x y ->\n        match (x, y) with\n        | None, None -> None\n        | None, Some v | Some v, None | Some _, Some v -> Some v)\n      m1 m2\n\n  let union_left m1 m2 = union_right m2 m1\n\n  let union_merge f m1 m2 =\n    let aux _ m1 m2 =\n      match (m1, m2) with\n      | None, m | m, None -> m\n      | Some m1, Some m2 -> Some (f m1 m2)\n    in\n    merge aux m1 m2\n\n  let rename m v = try find v m with Not_found -> v\n\n  let map_keys f m = of_list (List.map (fun (k, v) -> (f k, v)) (bindings m))\n\n  let print f ppf s =\n    let elts ppf s =\n      iter (fun id v -> Format.fprintf ppf \"@ (@[%a@ %a@])\" T.print id f v) s\n    in\n    Format.fprintf ppf \"@[<1>{@[%a@ @]}@]\" elts s\n\n  module T_set = Set.Make (T)\n\n  let keys map = fold (fun k _ set -> T_set.add k set) map T_set.empty\n\n  let data t = List.map snd (bindings t)\n\n  let of_set f set = T_set.fold (fun e map -> add e (f e) map) set empty\n\n  let transpose_keys_and_data map = fold (fun k v m -> add v k m) map empty\n  let transpose_keys_and_data_set map =\n    fold\n      (fun k v m ->\n        let set =\n          match find v m with\n          | exception Not_found -> T_set.singleton k\n          | set -> T_set.add k set\n        in\n        add v set m)\n      map empty\nend\n\nmodule Make_set (T : Thing) = struct\n  include Set.Make (T)\n\n  let output oc s =\n    Printf.fprintf oc \" ( \";\n    iter (fun v -> Printf.fprintf oc \"%a \" T.output v) s;\n    Printf.fprintf oc \")\"\n\n  let print ppf s =\n    let elts ppf s = iter (fun e -> Format.fprintf ppf \"@ %a\" T.print e) s in\n    Format.fprintf ppf \"@[<1>{@[%a@ @]}@]\" elts s\n\n  let to_string s = Format.asprintf \"%a\" print s\n\n  let of_list l =\n    match l with\n    | [] -> empty\n    | [t] -> singleton t\n    | t :: q -> List.fold_left (fun acc e -> add e acc) (singleton t) q\n\n  let map f s = of_list (List.map f (elements s))\nend\n\nmodule Make_tbl (T : Thing) = struct\n  include Hashtbl.Make (T)\n\n  module T_map = Make_map (T)\n\n  let to_list t = fold (fun key datum elts -> (key, datum) :: elts) t []\n\n  let of_list elts =\n    let t = create 42 in\n    List.iter (fun (key, datum) -> add t key datum) elts;\n    t\n\n  let to_map v = fold T_map.add v T_map.empty\n\n  let of_map m =\n    let t = create (T_map.cardinal m) in\n    T_map.iter (fun k v -> add t k v) m;\n    t\n\n  let memoize t f key =\n    try find t key\n    with Not_found ->\n      let r = f key in\n      add t key r;\n      r\n\n  let map t f = of_map (T_map.map f (to_map t))\nend\n\nmodule type S = sig\n  type t\n\n  module T : Thing with type t = t\n  include Thing with type t := T.t\n\n  module Set : Set with module T := T\n  module Map : Map with module T := T\n  module Tbl : Tbl with module T := T\nend\n\nmodule Make (T : Thing) = struct\n  module T = T\n  include T\n\n  module Set = Make_set (T)\n  module Map = Make_map (T)\n  module Tbl = Make_tbl (T)\nend\n"
  },
  {
    "path": "analysis/vendor/ext/identifiable.mli",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*                       Pierre Chambart, OCamlPro                        *)\n(*           Mark Shinwell and Leo White, Jane Street Europe              *)\n(*                                                                        *)\n(*   Copyright 2013--2016 OCamlPro SAS                                    *)\n(*   Copyright 2014--2016 Jane Street Group LLC                           *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(** Uniform interface for common data structures over various things. *)\n\nmodule type Thing = sig\n  type t\n\n  include Hashtbl.HashedType with type t := t\n  include Map.OrderedType with type t := t\n\n  val output : out_channel -> t -> unit\n  val print : Format.formatter -> t -> unit\nend\n\nmodule Pair : functor (A : Thing) (B : Thing) -> Thing with type t = A.t * B.t\n\nmodule type Set = sig\n  module T : Set.OrderedType\n  include Set.S with type elt = T.t and type t = Set.Make(T).t\n\n  val output : out_channel -> t -> unit\n  val print : Format.formatter -> t -> unit\n  val to_string : t -> string\n  val of_list : elt list -> t\n  val map : (elt -> elt) -> t -> t\nend\n\nmodule type Map = sig\n  module T : Map.OrderedType\n  include Map.S with type key = T.t and type 'a t = 'a Map.Make(T).t\n\n  val filter_map : (key -> 'a -> 'b option) -> 'a t -> 'b t\n  val of_list : (key * 'a) list -> 'a t\n\n  val disjoint_union :\n    ?eq:('a -> 'a -> bool) ->\n    ?print:(Format.formatter -> 'a -> unit) ->\n    'a t ->\n    'a t ->\n    'a t\n  (** [disjoint_union m1 m2] contains all bindings from [m1] and [m2]. If some\n      binding is present in both and the associated value is not equal, a\n      Fatal_error is raised *)\n\n  val union_right : 'a t -> 'a t -> 'a t\n  (** [union_right m1 m2] contains all bindings from [m1] and [m2]. If some\n      binding is present in both, the one from [m2] is taken *)\n\n  val union_left : 'a t -> 'a t -> 'a t\n  (** [union_left m1 m2 = union_right m2 m1] *)\n\n  val union_merge : ('a -> 'a -> 'a) -> 'a t -> 'a t -> 'a t\n  val rename : key t -> key -> key\n  val map_keys : (key -> key) -> 'a t -> 'a t\n  val keys : 'a t -> Set.Make(T).t\n  val data : 'a t -> 'a list\n  val of_set : (key -> 'a) -> Set.Make(T).t -> 'a t\n  val transpose_keys_and_data : key t -> key t\n  val transpose_keys_and_data_set : key t -> Set.Make(T).t t\n  val print :\n    (Format.formatter -> 'a -> unit) -> Format.formatter -> 'a t -> unit\nend\n\nmodule type Tbl = sig\n  module T : sig\n    type t\n    include Map.OrderedType with type t := t\n    include Hashtbl.HashedType with type t := t\n  end\n  include Hashtbl.S with type key = T.t and type 'a t = 'a Hashtbl.Make(T).t\n\n  val to_list : 'a t -> (T.t * 'a) list\n  val of_list : (T.t * 'a) list -> 'a t\n\n  val to_map : 'a t -> 'a Map.Make(T).t\n  val of_map : 'a Map.Make(T).t -> 'a t\n  val memoize : 'a t -> (key -> 'a) -> key -> 'a\n  val map : 'a t -> ('a -> 'b) -> 'b t\nend\n\nmodule type S = sig\n  type t\n\n  module T : Thing with type t = t\n  include Thing with type t := T.t\n\n  module Set : Set with module T := T\n  module Map : Map with module T := T\n  module Tbl : Tbl with module T := T\nend\n\nmodule Make (T : Thing) : S with type t := T.t\n"
  },
  {
    "path": "analysis/vendor/ext/int_vec_util.ml",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\nlet rec unsafe_mem_aux arr i (key : int) bound =\n  if i <= bound then\n    if Array.unsafe_get arr i = (key : int) then true\n    else unsafe_mem_aux arr (i + 1) key bound\n  else false\n\nlet mem key (x : Vec_int.t) =\n  let internal_array = Vec_int.unsafe_internal_array x in\n  let len = Vec_int.length x in\n  unsafe_mem_aux internal_array 0 key (len - 1)\n"
  },
  {
    "path": "analysis/vendor/ext/int_vec_util.mli",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\nval mem : int -> Vec_int.t -> bool\n"
  },
  {
    "path": "analysis/vendor/ext/int_vec_vec.ml",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\ninclude Vec.Make (struct\n  type t = Vec_int.t\n\n  let null = Vec_int.empty ()\nend)\n"
  },
  {
    "path": "analysis/vendor/ext/int_vec_vec.mli",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\ninclude Vec_gen.S with type elt = Vec_int.t\n"
  },
  {
    "path": "analysis/vendor/ext/js_reserved_map.ml",
    "content": "(* Copyright (C) 2019-Present Hongbo Zhang, Authors of ReScript\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\nmodule STbl = struct\n  include Hashtbl.Make (String)\n\n  let of_array arr =\n    let tbl = create (Array.length arr) in\n    let () = Array.iter (fun el -> add tbl el ()) arr in\n    tbl\nend\n\n(** Words that can never be identifier's name.\n\n    See https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Lexical_grammar#reserved_words\n *)\nlet js_keywords = STbl.of_array [|\n  \"break\";\n  \"case\";\n  \"catch\";\n  \"class\";\n  \"const\";\n  \"continue\";\n  \"debugger\";\n  \"default\";\n  \"delete\";\n  \"do\";\n  \"else\";\n  \"export\";\n  \"extends\";\n  \"false\";\n  \"finally\";\n  \"for\";\n  \"function\";\n  \"if\";\n  \"import\";\n  \"in\";\n  \"instanceof\";\n  \"new\";\n  \"null\";\n  \"return\";\n  \"super\";\n  \"switch\";\n  \"this\";\n  \"throw\";\n  \"true\";\n  \"try\";\n  \"typeof\";\n  \"var\";\n  \"void\";\n  \"while\";\n  \"with\";\n  (* The following are also reserved in strict context, including ESM *)\n  \"let\";\n  \"static\";\n  \"yield\";\n  (* `await` is reserved in async context, including ESM *)\n  \"await\";\n  (* Future reserved words *)\n  \"enum\";\n  \"implements\";\n  \"interface\";\n  \"package\";\n  \"private\";\n  \"protected\";\n  \"public\";\n|]\n\nlet is_js_keyword s = STbl.mem js_keywords s\n\n(** Identifiers with special meanings.\n\n    They can have different meanings depending on the context when used as identifier names, so it should be done carefully.\n\n    See https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Lexical_grammar#identifiers_with_special_meanings\n\n    However, these names are actually used with no problems today. Preventing this can be annoying.\n *)\nlet js_special_words = STbl.of_array [|\n  \"arguments\";\n  \"as\";\n  \"async\";\n  \"eval\";\n  \"from\";\n  \"get\";\n  \"of\";\n  \"set\";\n|]\n\nlet is_js_special_word s = STbl.mem js_special_words s\n\n(** Identifier names _might_ need to care about *)\nlet js_globals = STbl.of_array [|\n  (* JavaScript standards built-ins\n     See https://developer.mozilla.org/ko/docs/Web/JavaScript/Reference/Global_Objects\n  *)\n  \"AggregateError\";\n  \"Array\";\n  \"ArrayBuffer\";\n  \"AsyncFunction\";\n  \"AsyncGenerator\";\n  \"AsyncGeneratorFunction\";\n  \"AsyncIterator\";\n  \"Atomics\";\n  \"BigInt\";\n  \"BigInt64Array\";\n  \"BigUint64Array\";\n  \"Boolean\";\n  \"DataView\";\n  \"Date\";\n  \"decodeURI\";\n  \"decodeURIComponent\";\n  \"encodeURI\";\n  \"encodeURIComponent\";\n  \"Error\";\n  \"eval\";\n  \"EvalError\";\n  \"FinalizationRegistry\";\n  \"Float16Array\";\n  \"Float32Array\";\n  \"Float64Array\";\n  \"Function\";\n  \"Generator\";\n  \"GeneratorFunction\";\n  \"globalThis\";\n  \"Infinity\";\n  \"Int16Array\";\n  \"Int32Array\";\n  \"Int8Array\";\n  \"Intl\";\n  \"isFinite\";\n  \"isNaN\";\n  \"Iterator\";\n  \"JSON\";\n  \"Map\";\n  \"Math\";\n  \"NaN\";\n  \"Number\";\n  \"Object\";\n  \"parseFloat\";\n  \"parseInt\";\n  \"Promise\";\n  \"Proxy\";\n  \"RangeError\";\n  \"ReferenceError\";\n  \"Reflect\";\n  \"RegExp\";\n  \"Set\";\n  \"SharedArrayBuffer\";\n  \"String\";\n  \"Symbol\";\n  \"SyntaxError\";\n  \"TypedArray\";\n  \"TypeError\";\n  \"Uint16Array\";\n  \"Uint32Array\";\n  \"Uint8Array\";\n  \"Uint8ClampedArray\";\n  \"undefined\";\n  \"URIError\";\n  \"WeakMap\";\n  \"WeakRef\";\n  \"WeakSet\";\n\n  (* A few of the HTML standard globals\n  \n     See https://developer.mozilla.org/en-US/docs/Web/API/Window\n     See https://developer.mozilla.org/en-US/docs/Web/API/WorkerGlobalScope\n    \n     But we don't actually need to protect these names.\n   \n  \"window\";\n  \"self\";\n  \"document\";\n  \"location\";\n  \"navigator\";\n  \"origin\";\n  *)\n\n  (* A few of the Node.js globals\n  \n     Specifically related to the CommonJS module system\n     They cannot be redeclared in nested scope.\n  *)\n  \"__dirname\";\n  \"__filename\";\n  \"require\";\n  \"module\";\n  \"exports\";\n\n  (* Bun's global namespace *)\n  \"Bun\";\n\n  (* Deno's global namespace *)\n  \"Deno\";\n|]\n\nlet is_js_global s = STbl.mem js_globals s\n"
  },
  {
    "path": "analysis/vendor/ext/js_reserved_map.mli",
    "content": "(* Copyright (C) 2019-Present Authors of ReScript\n *\n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n *\n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\nval is_js_keyword : string -> bool\n\nval is_js_special_word : string -> bool\n\nval is_js_global : string -> bool\n"
  },
  {
    "path": "analysis/vendor/ext/js_runtime_modules.ml",
    "content": "(* Copyright (C) 2017 Hongbo Zhang, Authors of ReScript\n *\n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n *\n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\nlet exceptions = \"Caml_exceptions\"\n\nlet io = \"Caml_io\"\n\nlet sys = \"Caml_sys\"\n\nlet lexer = \"Caml_lexer\"\n\nlet parser = \"Caml_parser\"\n\nlet obj_runtime = \"Caml_obj\"\n\nlet array = \"Caml_array\"\n\nlet format = \"Caml_format\"\n\nlet string = \"Caml_string\"\n\nlet bytes = \"Caml_bytes\"\nlet bytes_ = \"Bytes\"\n\nlet float = \"Caml_float\"\n\nlet hash_primitive = \"Caml_hash_primitive\"\n\nlet hash = \"Caml_hash\"\n\nlet curry = \"Curry\"\n\nlet caml_primitive = \"Caml\"\n\nlet int64 = \"Caml_int64\"\n\nlet md5 = \"Caml_md5\"\n\nlet int32 = \"Caml_int32\"\n\nlet bigint = \"Caml_bigint\"\n\nlet option = \"Caml_option\"\n\nlet module_ = \"Caml_module\"\n\nlet external_polyfill = \"Caml_external_polyfill\"\n\nlet caml_js_exceptions = \"Caml_js_exceptions\"\n\nlet caml_splice_call = \"Caml_splice_call\"\n\nlet deriving = \"Runtime_deriving\"\n\nlet promise = \"Runtime_promise\"\n\nlet astExtensions = \"Runtime_ast_extensions\"\n"
  },
  {
    "path": "analysis/vendor/ext/literals.ml",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n *\n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n *\n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\nlet js_array_ctor = \"Array\"\n\nlet js_type_number = \"number\"\n\nlet js_type_string = \"string\"\n\nlet js_type_object = \"object\"\n\nlet js_type_boolean = \"boolean\"\n\nlet js_undefined = \"undefined\"\n\nlet js_prop_length = \"length\"\n\nlet prim = \"prim\"\n\nlet param = \"param\"\n\nlet partial_arg = \"partial_arg\"\n\nlet tmp = \"tmp\"\n\nlet create = \"create\" (* {!Caml_exceptions.create}*)\n\nlet runtime = \"runtime\" (* runtime directory *)\n\nlet stdlib = \"stdlib\"\n\nlet imul = \"imul\" (* signed int32 mul *)\n\nlet setter_suffix = \"#=\"\n\nlet setter_suffix_len = String.length setter_suffix\n\nlet debugger = \"debugger\"\n\nlet fn_run = \"fn_run\"\n\nlet method_run = \"method_run\"\n\nlet fn_method = \"fn_method\"\n\nlet fn_mk = \"fn_mk\"\n(*let js_fn_runmethod = \"js_fn_runmethod\"*)\n\n(** nodejs *)\nlet node_modules = \"node_modules\"\n\nlet node_modules_length = String.length \"node_modules\"\n\nlet package_json = \"package.json\"\n\nlet bsconfig_json = \"bsconfig.json\"\n\nlet rescript_json = \"rescript.json\"\n\nlet build_ninja = \"build.ninja\"\n\n(* Name of the library file created for each external dependency. *)\nlet library_file = \"lib\"\n\nlet suffix_a = \".a\"\n\nlet suffix_cmj = \".cmj\"\n\nlet suffix_cmo = \".cmo\"\n\nlet suffix_cma = \".cma\"\n\nlet suffix_cmi = \".cmi\"\n\nlet suffix_cmx = \".cmx\"\n\nlet suffix_cmxa = \".cmxa\"\n\nlet suffix_mll = \".mll\"\n\nlet suffix_res = \".res\"\n\nlet suffix_resi = \".resi\"\n\nlet suffix_mlmap = \".mlmap\"\n\nlet suffix_cmt = \".cmt\"\n\nlet suffix_cmti = \".cmti\"\n\nlet suffix_ast = \".ast\"\n\nlet suffix_iast = \".iast\"\n\nlet suffix_d = \".d\"\n\nlet suffix_js = \".js\"\n\nlet suffix_gen_js = \".gen.js\"\n\nlet suffix_gen_tsx = \".gen.tsx\"\n\nlet esmodule = \"esmodule\"\n\nlet commonjs = \"commonjs\"\n\nlet es6 = \"es6\"\n(* [@@deprecated \"Will be removed in v12\"] *)\n\nlet es6_global = \"es6-global\"\n(* [@@deprecated \"Will be removed in v12\"] *)\n\nlet unused_attribute = \"Unused attribute \"\n\n(** Used when produce node compatible paths *)\nlet node_sep = \"/\"\n\nlet node_parent = \"..\"\n\nlet node_current = \".\"\n\nlet gentype_import1 = \"genType.import\"\nlet gentype_import2 = \"gentype.import\"\n\nlet bsbuild_cache = \".bsbuild\"\n\nlet sourcedirs_meta = \".sourcedirs.json\"\n\n(* Note the build system should check the validity of filenames\n   espeically, it should not contain '-'\n*)\nlet ns_sep_char = '-'\n\nlet ns_sep = \"-\"\n\nlet exception_id = \"RE_EXN_ID\"\n\nlet polyvar_hash = \"NAME\"\n\nlet polyvar_value = \"VAL\"\n\nlet cons = \"::\"\n\nlet hd = \"hd\"\n\nlet tl = \"tl\"\n\nlet lazy_done = \"LAZY_DONE\"\n\nlet lazy_val = \"VAL\"\n\nlet pure = \"@__PURE__\"\n"
  },
  {
    "path": "analysis/vendor/ext/map.cppo.ml",
    "content": "\n(* we don't create [map_poly], since some operations require raise an exception which carries [key] *)\n\n#ifdef TYPE_STRING\ntype key = string \nlet compare_key = Ext_string.compare\nlet [@inline] eq_key (x : key) y = x = y\n#elif defined TYPE_INT\ntype key = int\nlet compare_key = Ext_int.compare\nlet [@inline] eq_key (x : key) y = x = y\n#elif defined TYPE_IDENT\ntype key = Ident.t\nlet compare_key = Ext_ident.compare\nlet [@inline] eq_key (x : key) y = Ident.same x y\n#else\n    [%error \"unknown type\"]\n#endif\n    (* let [@inline] (=) (a : int) b = a = b *)\ntype + 'a t = (key,'a) Map_gen.t\n\nlet empty = Map_gen.empty \nlet is_empty = Map_gen.is_empty\nlet iter = Map_gen.iter\nlet fold = Map_gen.fold\nlet for_all = Map_gen.for_all \nlet exists = Map_gen.exists \nlet singleton = Map_gen.singleton \nlet cardinal = Map_gen.cardinal\nlet bindings = Map_gen.bindings\nlet to_sorted_array = Map_gen.to_sorted_array\nlet to_sorted_array_with_f = Map_gen.to_sorted_array_with_f\nlet keys = Map_gen.keys\n\n\n\nlet map = Map_gen.map \nlet mapi = Map_gen.mapi\nlet bal = Map_gen.bal \nlet height = Map_gen.height \n\n\nlet rec add (tree : _ Map_gen.t as 'a) x data  : 'a = match tree with \n  | Empty ->\n    singleton x data\n  | Leaf {k;v} ->\n    let c = compare_key x k in \n    if c = 0 then singleton x data else\n    if c < 0 then \n      Map_gen.unsafe_two_elements x data k v \n    else \n      Map_gen.unsafe_two_elements k v x data  \n  | Node {l; k ; v ; r; h} ->\n    let c = compare_key x k in\n    if c = 0 then\n      Map_gen.unsafe_node x data l r h (* at least need update data *)\n    else if c < 0 then\n      bal (add l x data ) k v r\n    else\n      bal l k v (add r x data )\n\n\nlet rec adjust (tree : _ Map_gen.t as 'a) x replace  : 'a = \n  match tree with \n  | Empty ->\n    singleton x (replace None)\n  | Leaf {k ; v} -> \n    let c = compare_key x k in \n    if c = 0 then singleton x (replace (Some v)) else \n    if c < 0 then \n      Map_gen.unsafe_two_elements x (replace None) k v   \n    else\n      Map_gen.unsafe_two_elements k v x (replace None)   \n  | Node ({l; k ; r} as tree) ->\n    let c = compare_key x k in\n    if c = 0 then\n      Map_gen.unsafe_node x (replace  (Some tree.v)) l r tree.h\n    else if c < 0 then\n      bal (adjust l x  replace ) k tree.v r\n    else\n      bal l k tree.v (adjust r x  replace )\n\n\nlet rec find_exn (tree : _ Map_gen.t ) x = match tree with \n  | Empty ->\n    raise Not_found\n  | Leaf leaf -> \n    if eq_key x leaf.k then leaf.v else raise Not_found  \n  | Node tree ->\n    let c = compare_key x tree.k in\n    if c = 0 then tree.v\n    else find_exn (if c < 0 then tree.l else tree.r) x\n\nlet rec find_opt (tree : _ Map_gen.t ) x = match tree with \n  | Empty -> None \n  | Leaf leaf -> \n    if eq_key x leaf.k then Some leaf.v else None\n  | Node tree ->\n    let c = compare_key x tree.k in\n    if c = 0 then Some tree.v\n    else find_opt (if c < 0 then tree.l else tree.r) x\n\nlet rec find_default (tree : _ Map_gen.t ) x  default     = match tree with \n  | Empty -> default  \n  | Leaf leaf -> \n    if eq_key x leaf.k then  leaf.v else default\n  | Node tree ->\n    let c = compare_key x tree.k in\n    if c = 0 then tree.v\n    else find_default (if c < 0 then tree.l else tree.r) x default\n\nlet rec mem (tree : _ Map_gen.t )  x= match tree with \n  | Empty ->\n    false\n  | Leaf leaf -> eq_key x leaf.k \n  | Node{l; k ;  r} ->\n    let c = compare_key x k in\n    c = 0 || mem (if c < 0 then l else r) x \n\nlet rec remove (tree : _ Map_gen.t as 'a) x : 'a = match tree with \n  | Empty -> empty\n  | Leaf leaf -> \n    if eq_key x leaf.k then empty \n    else tree\n  | Node{l; k ; v; r} ->\n    let c = compare_key x k in\n    if c = 0 then\n      Map_gen.merge l r\n    else if c < 0 then\n      bal (remove l x) k v r\n    else\n      bal l k v (remove r x )\n\ntype 'a split = \n  | Yes of {l : (key,'a) Map_gen.t; r : (key,'a)Map_gen.t ; v : 'a}\n  | No of {l : (key,'a) Map_gen.t; r : (key,'a)Map_gen.t }\n\n\nlet rec split  (tree : (key,'a) Map_gen.t) x : 'a split  = \n  match tree with \n  | Empty ->\n    No {l = empty; r = empty}\n  | Leaf leaf -> \n    let c = compare_key x leaf.k in \n    if c = 0 then Yes {l = empty; v= leaf.v; r = empty} \n    else if c < 0 then No { l = empty; r = tree }\n    else  No { l = tree; r = empty}\n  | Node {l; k ; v ; r} ->\n    let c = compare_key x k in\n    if c = 0 then Yes {l; v; r}\n    else if c < 0 then      \n      match  split l x with \n      | Yes result -> Yes {result with r = Map_gen.join result.r k v r }\n      | No result -> No {result with r = Map_gen.join result.r k v r } \n    else\n      match split r x with \n      | Yes result -> \n        Yes {result with l = Map_gen.join l k v result.l}\n      | No result -> \n        No {result with l = Map_gen.join l k v result.l}\n\n\nlet rec disjoint_merge_exn  \n    (s1 : _ Map_gen.t) \n    (s2  : _ Map_gen.t) \n    fail : _ Map_gen.t =\n  match s1 with\n  | Empty -> s2  \n  | Leaf ({k } as l1)  -> \n    begin match s2 with \n      | Empty -> s1 \n      | Leaf l2 -> \n        let c = compare_key k l2.k in \n        if c = 0 then raise_notrace (fail k l1.v l2.v)\n        else if c < 0 then Map_gen.unsafe_two_elements l1.k l1.v l2.k l2.v\n        else Map_gen.unsafe_two_elements l2.k l2.v k l1.v\n      | Node _ -> \n        adjust s2 k (fun data -> \n            match data with \n            |  None -> l1.v\n            | Some s2v  -> raise_notrace (fail k l1.v s2v)\n          )        \n    end\n  | Node ({k} as xs1) -> \n    if  xs1.h >= height s2 then\n      begin match split s2 k with \n        | No {l; r} -> \n          Map_gen.join \n            (disjoint_merge_exn  xs1.l l fail)\n            k \n            xs1.v \n            (disjoint_merge_exn xs1.r r fail)\n        | Yes { v =  s2v} ->\n          raise_notrace (fail k xs1.v s2v)\n      end        \n    else let [@warning \"-8\"] (Node ({k} as s2) : _ Map_gen.t)  = s2 in \n      begin match  split s1 k with \n        | No {l;  r} -> \n          Map_gen.join \n            (disjoint_merge_exn  l s2.l fail) k s2.v \n            (disjoint_merge_exn  r s2.r fail)\n        | Yes { v = s1v} -> \n          raise_notrace (fail k s1v s2.v)\n      end\n\n\n\n\n\n\nlet add_list (xs : _ list ) init = \n  Ext_list.fold_left xs init (fun  acc (k,v) -> add acc k v )\n\nlet of_list xs = add_list xs empty\n\nlet of_array xs = \n  Ext_array.fold_left xs empty (fun acc (k,v) -> add acc k v ) \n"
  },
  {
    "path": "analysis/vendor/ext/map_gen.ml",
    "content": "(***********************************************************************)\n(*                                                                     *)\n(*                                OCaml                                *)\n(*                                                                     *)\n(*            Xavier Leroy, projet Cristal, INRIA Rocquencourt         *)\n(*                                                                     *)\n(*  Copyright 1996 Institut National de Recherche en Informatique et   *)\n(*  en Automatique.  All rights reserved.  This file is distributed    *)\n(*  under the terms of the GNU Library General Public License, with    *)\n(*  the special exception on linking described in file ../LICENSE.     *)\n(*                                                                     *)\n(***********************************************************************)\n\n[@@@warnerror \"+55\"]\n(* adapted from stdlib *)\n\ntype ('key, 'a) t0 =\n  | Empty\n  | Leaf of {k: 'key; v: 'a}\n  | Node of {l: ('key, 'a) t0; k: 'key; v: 'a; r: ('key, 'a) t0; h: int}\n\ntype ('key, 'a) parital_node = {\n  l: ('key, 'a) t0;\n  k: 'key;\n  v: 'a;\n  r: ('key, 'a) t0;\n  h: int;\n}\n\nexternal ( ~! ) : ('key, 'a) t0 -> ('key, 'a) parital_node = \"%identity\"\n\nlet empty = Empty\n\nlet rec map x f =\n  match x with\n  | Empty -> Empty\n  | Leaf {k; v} -> Leaf {k; v = f v}\n  | Node ({l; v; r} as x) ->\n    let l' = map l f in\n    let d' = f v in\n    let r' = map r f in\n    Node {x with l = l'; v = d'; r = r'}\n\nlet rec mapi x f =\n  match x with\n  | Empty -> Empty\n  | Leaf {k; v} -> Leaf {k; v = f k v}\n  | Node ({l; k; v; r} as x) ->\n    let l' = mapi l f in\n    let v' = f k v in\n    let r' = mapi r f in\n    Node {x with l = l'; v = v'; r = r'}\n\nlet[@inline] calc_height a b = (if a >= b then a else b) + 1\n\nlet[@inline] singleton k v = Leaf {k; v}\n\nlet[@inline] height = function\n  | Empty -> 0\n  | Leaf _ -> 1\n  | Node {h} -> h\n\nlet[@inline] unsafe_node k v l r h = Node {l; k; v; r; h}\n\nlet[@inline] unsafe_two_elements k1 v1 k2 v2 =\n  unsafe_node k2 v2 (singleton k1 v1) empty 2\n\nlet[@inline] unsafe_node_maybe_leaf k v l r h =\n  if h = 1 then Leaf {k; v} else Node {l; k; v; r; h}\n\ntype ('key, +'a) t = ('key, 'a) t0 = private\n  | Empty\n  | Leaf of {k: 'key; v: 'a}\n  | Node of {l: ('key, 'a) t; k: 'key; v: 'a; r: ('key, 'a) t; h: int}\n\nlet rec cardinal_aux acc = function\n  | Empty -> acc\n  | Leaf _ -> acc + 1\n  | Node {l; r} -> cardinal_aux (cardinal_aux (acc + 1) r) l\n\nlet cardinal s = cardinal_aux 0 s\n\nlet rec bindings_aux accu = function\n  | Empty -> accu\n  | Leaf {k; v} -> (k, v) :: accu\n  | Node {l; k; v; r} -> bindings_aux ((k, v) :: bindings_aux accu r) l\n\nlet bindings s = bindings_aux [] s\n\nlet rec fill_array_with_f (s : _ t) i arr f : int =\n  match s with\n  | Empty -> i\n  | Leaf {k; v} ->\n    Array.unsafe_set arr i (f k v);\n    i + 1\n  | Node {l; k; v; r} ->\n    let inext = fill_array_with_f l i arr f in\n    Array.unsafe_set arr inext (f k v);\n    fill_array_with_f r (inext + 1) arr f\n\nlet rec fill_array_aux (s : _ t) i arr : int =\n  match s with\n  | Empty -> i\n  | Leaf {k; v} ->\n    Array.unsafe_set arr i (k, v);\n    i + 1\n  | Node {l; k; v; r} ->\n    let inext = fill_array_aux l i arr in\n    Array.unsafe_set arr inext (k, v);\n    fill_array_aux r (inext + 1) arr\n\nlet to_sorted_array (s : ('key, 'a) t) : ('key * 'a) array =\n  match s with\n  | Empty -> [||]\n  | Leaf {k; v} -> [|(k, v)|]\n  | Node {l; k; v; r} ->\n    let len = cardinal_aux (cardinal_aux 1 r) l in\n    let arr = Array.make len (k, v) in\n    ignore (fill_array_aux s 0 arr : int);\n    arr\n\nlet to_sorted_array_with_f (type key a b) (s : (key, a) t) (f : key -> a -> b) :\n    b array =\n  match s with\n  | Empty -> [||]\n  | Leaf {k; v} -> [|f k v|]\n  | Node {l; k; v; r} ->\n    let len = cardinal_aux (cardinal_aux 1 r) l in\n    let arr = Array.make len (f k v) in\n    ignore (fill_array_with_f s 0 arr f : int);\n    arr\n\nlet rec keys_aux accu = function\n  | Empty -> accu\n  | Leaf {k} -> k :: accu\n  | Node {l; k; r} -> keys_aux (k :: keys_aux accu r) l\n\nlet keys s = keys_aux [] s\n\nlet bal l x d r =\n  let hl = height l in\n  let hr = height r in\n  if hl > hr + 2 then\n    let {l = ll; r = lr; v = lv; k = lk; h = _} = ~!l in\n    let hll = height ll in\n    let hlr = height lr in\n    if hll >= hlr then\n      let hnode = calc_height hlr hr in\n      unsafe_node lk lv ll\n        (unsafe_node_maybe_leaf x d lr r hnode)\n        (calc_height hll hnode)\n    else\n      let {l = lrl; r = lrr; k = lrk; v = lrv} = ~!lr in\n      let hlrl = height lrl in\n      let hlrr = height lrr in\n      let hlnode = calc_height hll hlrl in\n      let hrnode = calc_height hlrr hr in\n      unsafe_node lrk lrv\n        (unsafe_node_maybe_leaf lk lv ll lrl hlnode)\n        (unsafe_node_maybe_leaf x d lrr r hrnode)\n        (calc_height hlnode hrnode)\n  else if hr > hl + 2 then\n    let {l = rl; r = rr; k = rk; v = rv} = ~!r in\n    let hrr = height rr in\n    let hrl = height rl in\n    if hrr >= hrl then\n      let hnode = calc_height hl hrl in\n      unsafe_node rk rv\n        (unsafe_node_maybe_leaf x d l rl hnode)\n        rr (calc_height hnode hrr)\n    else\n      let {l = rll; r = rlr; k = rlk; v = rlv} = ~!rl in\n      let hrll = height rll in\n      let hrlr = height rlr in\n      let hlnode = calc_height hl hrll in\n      let hrnode = calc_height hrlr hrr in\n      unsafe_node rlk rlv\n        (unsafe_node_maybe_leaf x d l rll hlnode)\n        (unsafe_node_maybe_leaf rk rv rlr rr hrnode)\n        (calc_height hlnode hrnode)\n  else unsafe_node_maybe_leaf x d l r (calc_height hl hr)\n\nlet[@inline] is_empty = function\n  | Empty -> true\n  | _ -> false\n\nlet rec min_binding_exn = function\n  | Empty -> raise Not_found\n  | Leaf {k; v} -> (k, v)\n  | Node {l; k; v} -> (\n    match l with\n    | Empty -> (k, v)\n    | Leaf _ | Node _ -> min_binding_exn l)\n\nlet rec remove_min_binding = function\n  | Empty -> invalid_arg \"Map.remove_min_elt\"\n  | Leaf _ -> empty\n  | Node {l = Empty; r} -> r\n  | Node {l; k; v; r} -> bal (remove_min_binding l) k v r\n\nlet merge t1 t2 =\n  match (t1, t2) with\n  | Empty, t -> t\n  | t, Empty -> t\n  | _, _ ->\n    let x, d = min_binding_exn t2 in\n    bal t1 x d (remove_min_binding t2)\n\nlet rec iter x f =\n  match x with\n  | Empty -> ()\n  | Leaf {k; v} -> (f k v : unit)\n  | Node {l; k; v; r} ->\n    iter l f;\n    f k v;\n    iter r f\n\nlet rec fold m accu f =\n  match m with\n  | Empty -> accu\n  | Leaf {k; v} -> f k v accu\n  | Node {l; k; v; r} -> fold r (f k v (fold l accu f)) f\n\nlet rec for_all x p =\n  match x with\n  | Empty -> true\n  | Leaf {k; v} -> p k v\n  | Node {l; k; v; r} -> p k v && for_all l p && for_all r p\n\nlet rec exists x p =\n  match x with\n  | Empty -> false\n  | Leaf {k; v} -> p k v\n  | Node {l; k; v; r} -> p k v || exists l p || exists r p\n\n(* Beware: those two functions assume that the added k is *strictly*\n   smaller (or bigger) than all the present keys in the tree; it\n   does not test for equality with the current min (or max) key.\n\n   Indeed, they are only used during the \"join\" operation which\n   respects this precondition.\n*)\n\nlet rec add_min k v = function\n  | Empty -> singleton k v\n  | Leaf l -> unsafe_two_elements k v l.k l.v\n  | Node tree -> bal (add_min k v tree.l) tree.k tree.v tree.r\n\nlet rec add_max k v = function\n  | Empty -> singleton k v\n  | Leaf l -> unsafe_two_elements l.k l.v k v\n  | Node tree -> bal tree.l tree.k tree.v (add_max k v tree.r)\n\n(* Same as create and bal, but no assumptions are made on the\n   relative heights of l and r. *)\n\nlet rec join l v d r =\n  match l with\n  | Empty -> add_min v d r\n  | Leaf leaf -> add_min leaf.k leaf.v (add_min v d r)\n  | Node xl -> (\n    match r with\n    | Empty -> add_max v d l\n    | Leaf leaf -> add_max leaf.k leaf.v (add_max v d l)\n    | Node xr ->\n      let lh = xl.h in\n      let rh = xr.h in\n      if lh > rh + 2 then bal xl.l xl.k xl.v (join xl.r v d r)\n      else if rh > lh + 2 then bal (join l v d xr.l) xr.k xr.v xr.r\n      else unsafe_node v d l r (calc_height lh rh))\n\n(* Merge two trees l and r into one.\n   All elements of l must precede the elements of r.\n   No assumption on the heights of l and r. *)\n\nlet concat t1 t2 =\n  match (t1, t2) with\n  | Empty, t -> t\n  | t, Empty -> t\n  | _, _ ->\n    let x, d = min_binding_exn t2 in\n    join t1 x d (remove_min_binding t2)\n\nlet concat_or_join t1 v d t2 =\n  match d with\n  | Some d -> join t1 v d t2\n  | None -> concat t1 t2\n\nmodule type S = sig\n  type key\n\n  type +'a t\n\n  val empty : 'a t\n\n  val compare_key : key -> key -> int\n\n  val is_empty : 'a t -> bool\n\n  val mem : 'a t -> key -> bool\n\n  val to_sorted_array : 'a t -> (key * 'a) array\n\n  val to_sorted_array_with_f : 'a t -> (key -> 'a -> 'b) -> 'b array\n\n  val add : 'a t -> key -> 'a -> 'a t\n  (** [add x y m] If [x] was already bound in [m], its previous binding\n      disappears. *)\n\n  val adjust : 'a t -> key -> ('a option -> 'a) -> 'a t\n  (** [adjust acc k replace ] if not exist [add (replace None ], otherwise\n      [add k v (replace (Some old))] *)\n\n  val singleton : key -> 'a -> 'a t\n\n  val remove : 'a t -> key -> 'a t\n  (** [remove x m] returns a map containing the same bindings as [m], except for\n      [x] which is unbound in the returned map. *)\n\n  (* val merge:\n       'a t -> 'b t ->\n       (key -> 'a option -> 'b option -> 'c option) ->  'c t *)\n  (** [merge f m1 m2] computes a map whose keys is a subset of keys of [m1] and\n      of [m2]. The presence of each such binding, and the corresponding value,\n      is determined with the function [f].\n      @since 3.12.0 *)\n\n  val disjoint_merge_exn : 'a t -> 'a t -> (key -> 'a -> 'a -> exn) -> 'a t\n  (* merge two maps, will raise if they have the same key *)\n\n  val iter : 'a t -> (key -> 'a -> unit) -> unit\n  (** [iter f m] applies [f] to all bindings in map [m]. The bindings are passed\n      to [f] in increasing order. *)\n\n  val fold : 'a t -> 'b -> (key -> 'a -> 'b -> 'b) -> 'b\n  (** [fold f m a] computes [(f kN dN ... (f k1 d1 a)...)], where [k1 ... kN]\n      are the keys of all bindings in [m] (in increasing order) *)\n\n  val for_all : 'a t -> (key -> 'a -> bool) -> bool\n  (** [for_all p m] checks if all the bindings of the map. order unspecified *)\n\n  val exists : 'a t -> (key -> 'a -> bool) -> bool\n  (** [exists p m] checks if at least one binding of the map satisfy the\n      predicate [p]. order unspecified *)\n\n  (* val filter: 'a t -> (key -> 'a -> bool) -> 'a t *)\n  (** [filter p m] returns the map with all the bindings in [m] that satisfy\n      predicate [p]. order unspecified *)\n\n  (* val partition: 'a t -> (key -> 'a -> bool) ->  'a t * 'a t *)\n  (** [partition p m] returns a pair of maps [(m1, m2)], where [m1] contains all\n      the bindings of [s] that satisfy the predicate [p], and [m2] is the map\n      with all the bindings of [s] that do not satisfy [p]. *)\n\n  val cardinal : 'a t -> int\n  (** Return the number of bindings of a map. *)\n\n  val bindings : 'a t -> (key * 'a) list\n  (** Return the list of all bindings of the given map. The returned list is\n      sorted in increasing order with respect to the ordering *)\n\n  val keys : 'a t -> key list\n  (* Increasing order *)\n\n  (* val split: 'a t -> key -> 'a t * 'a option * 'a t *)\n  (** [split x m] returns a triple [(l, data, r)], where [l] is the map with all\n      the bindings of [m] whose key is strictly less than [x]; [r] is the map\n      with all the bindings of [m] whose key is strictly greater than [x];\n      [data] is [None] if [m] contains no binding for [x], or [Some v] if [m]\n      binds [v] to [x].\n      @since 3.12.0 *)\n\n  val find_exn : 'a t -> key -> 'a\n  (** [find x m] returns the current binding of [x] in [m], or raises\n      [Not_found] if no such binding exists. *)\n\n  val find_opt : 'a t -> key -> 'a option\n\n  val find_default : 'a t -> key -> 'a -> 'a\n\n  val map : 'a t -> ('a -> 'b) -> 'b t\n  (** [map f m] returns a map with same domain as [m], where the associated\n      value [a] of all bindings of [m] has been replaced by the result of the\n      application of [f] to [a]. The bindings are passed to [f] in increasing\n      order with respect to the ordering over the type of the keys. *)\n\n  val mapi : 'a t -> (key -> 'a -> 'b) -> 'b t\n  (** Same as {!Map.S.map}, but the function receives as arguments both the key\n      and the associated value for each binding of the map. *)\n\n  val of_list : (key * 'a) list -> 'a t\n\n  val of_array : (key * 'a) array -> 'a t\n\n  val add_list : (key * 'b) list -> 'b t -> 'b t\nend\n"
  },
  {
    "path": "analysis/vendor/ext/map_gen.mli",
    "content": "type ('key, +'a) t = private\n  | Empty\n  | Leaf of {k: 'key; v: 'a}\n  | Node of {l: ('key, 'a) t; k: 'key; v: 'a; r: ('key, 'a) t; h: int}\n\nval cardinal : ('a, 'b) t -> int\n\nval bindings : ('a, 'b) t -> ('a * 'b) list\n\nval fill_array_with_f : ('a, 'b) t -> int -> 'c array -> ('a -> 'b -> 'c) -> int\n\nval fill_array_aux : ('a, 'b) t -> int -> ('a * 'b) array -> int\n\nval to_sorted_array : ('key, 'a) t -> ('key * 'a) array\n\nval to_sorted_array_with_f : ('a, 'b) t -> ('a -> 'b -> 'c) -> 'c array\n\nval keys : ('a, 'b) t -> 'a list\n\nval height : ('a, 'b) t -> int\n\nval singleton : 'a -> 'b -> ('a, 'b) t\n\nval unsafe_node : 'a -> 'b -> ('a, 'b) t -> ('a, 'b) t -> int -> ('a, 'b) t\n\nval unsafe_two_elements : 'a -> 'b -> 'a -> 'b -> ('a, 'b) t\n(** smaller comes first *)\n\nval bal : ('a, 'b) t -> 'a -> 'b -> ('a, 'b) t -> ('a, 'b) t\n\nval empty : ('a, 'b) t\n\nval is_empty : ('a, 'b) t -> bool\n\nval merge : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t\n\nval iter : ('a, 'b) t -> ('a -> 'b -> unit) -> unit\n\nval map : ('a, 'b) t -> ('b -> 'c) -> ('a, 'c) t\n\nval mapi : ('a, 'b) t -> ('a -> 'b -> 'c) -> ('a, 'c) t\n\nval fold : ('a, 'b) t -> 'c -> ('a -> 'b -> 'c -> 'c) -> 'c\n\nval for_all : ('a, 'b) t -> ('a -> 'b -> bool) -> bool\n\nval exists : ('a, 'b) t -> ('a -> 'b -> bool) -> bool\n\nval join : ('a, 'b) t -> 'a -> 'b -> ('a, 'b) t -> ('a, 'b) t\n\nval concat : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t\n\nval concat_or_join : ('a, 'b) t -> 'a -> 'b option -> ('a, 'b) t -> ('a, 'b) t\n\nmodule type S = sig\n  type key\n\n  type +'a t\n\n  val empty : 'a t\n\n  val compare_key : key -> key -> int\n\n  val is_empty : 'a t -> bool\n\n  val mem : 'a t -> key -> bool\n\n  val to_sorted_array : 'a t -> (key * 'a) array\n\n  val to_sorted_array_with_f : 'a t -> (key -> 'a -> 'b) -> 'b array\n\n  val add : 'a t -> key -> 'a -> 'a t\n\n  val adjust : 'a t -> key -> ('a option -> 'a) -> 'a t\n\n  val singleton : key -> 'a -> 'a t\n\n  val remove : 'a t -> key -> 'a t\n\n  (* val merge :\n     'a t -> 'b t -> (key -> 'a option -> 'b option -> 'c option) -> 'c t *)\n  val disjoint_merge_exn : 'a t -> 'a t -> (key -> 'a -> 'a -> exn) -> 'a t\n\n  val iter : 'a t -> (key -> 'a -> unit) -> unit\n\n  val fold : 'a t -> 'b -> (key -> 'a -> 'b -> 'b) -> 'b\n\n  val for_all : 'a t -> (key -> 'a -> bool) -> bool\n\n  val exists : 'a t -> (key -> 'a -> bool) -> bool\n\n  (* val filter : 'a t -> (key -> 'a -> bool) -> 'a t *)\n  (* val partition : 'a t -> (key -> 'a -> bool) -> 'a t * 'a t *)\n  val cardinal : 'a t -> int\n\n  val bindings : 'a t -> (key * 'a) list\n\n  val keys : 'a t -> key list\n  (* val choose : 'a t -> key * 'a *)\n\n  val find_exn : 'a t -> key -> 'a\n\n  val find_opt : 'a t -> key -> 'a option\n\n  val find_default : 'a t -> key -> 'a -> 'a\n\n  val map : 'a t -> ('a -> 'b) -> 'b t\n\n  val mapi : 'a t -> (key -> 'a -> 'b) -> 'b t\n\n  val of_list : (key * 'a) list -> 'a t\n\n  val of_array : (key * 'a) array -> 'a t\n\n  val add_list : (key * 'b) list -> 'b t -> 'b t\nend\n"
  },
  {
    "path": "analysis/vendor/ext/map_ident.mli",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\ninclude Map_gen.S with type key = Ident.t\n"
  },
  {
    "path": "analysis/vendor/ext/map_int.mli",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\ninclude Map_gen.S with type key = int\n"
  },
  {
    "path": "analysis/vendor/ext/map_string.mli",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\ninclude Map_gen.S with type key = string\n"
  },
  {
    "path": "analysis/vendor/ext/misc.ml",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(* Errors *)\n\nexception Fatal_error\n\nlet fatal_error msg =\n  prerr_string \">> Fatal error: \";\n  prerr_endline msg;\n  raise Fatal_error\n\nlet fatal_errorf fmt = Format.kasprintf fatal_error fmt\n\n(* Exceptions *)\n\nlet try_finally work cleanup =\n  let result =\n    try work ()\n    with e ->\n      cleanup ();\n      raise e\n  in\n  cleanup ();\n  result\n\ntype ref_and_value = R : 'a ref * 'a -> ref_and_value\n\nlet protect_refs =\n  let set_refs l = List.iter (fun (R (r, v)) -> r := v) l in\n  fun refs f ->\n    let backup = List.map (fun (R (r, _)) -> R (r, !r)) refs in\n    set_refs refs;\n    match f () with\n    | x ->\n      set_refs backup;\n      x\n    | exception e ->\n      set_refs backup;\n      raise e\n\n(* List functions *)\n\nlet rec map_end f l1 l2 =\n  match l1 with\n  | [] -> l2\n  | hd :: tl -> f hd :: map_end f tl l2\n\nlet rec map_left_right f = function\n  | [] -> []\n  | hd :: tl ->\n    let res = f hd in\n    res :: map_left_right f tl\n\nlet rec for_all2 pred l1 l2 =\n  match (l1, l2) with\n  | [], [] -> true\n  | hd1 :: tl1, hd2 :: tl2 -> pred hd1 hd2 && for_all2 pred tl1 tl2\n  | _, _ -> false\n\nlet rec replicate_list elem n =\n  if n <= 0 then [] else elem :: replicate_list elem (n - 1)\n\nlet rec list_remove x = function\n  | [] -> []\n  | hd :: tl -> if hd = x then tl else hd :: list_remove x tl\n\nlet rec split_last = function\n  | [] -> assert false\n  | [x] -> ([], x)\n  | hd :: tl ->\n    let lst, last = split_last tl in\n    (hd :: lst, last)\n\nmodule Stdlib = struct\n  module List = struct\n    type 'a t = 'a list\n\n    let rec compare cmp l1 l2 =\n      match (l1, l2) with\n      | [], [] -> 0\n      | [], _ :: _ -> -1\n      | _ :: _, [] -> 1\n      | h1 :: t1, h2 :: t2 ->\n        let c = cmp h1 h2 in\n        if c <> 0 then c else compare cmp t1 t2\n\n    let rec equal eq l1 l2 =\n      match (l1, l2) with\n      | [], [] -> true\n      | hd1 :: tl1, hd2 :: tl2 -> eq hd1 hd2 && equal eq tl1 tl2\n      | _, _ -> false\n\n    let filter_map f l =\n      let rec aux acc l =\n        match l with\n        | [] -> List.rev acc\n        | h :: t -> (\n          match f h with\n          | None -> aux acc t\n          | Some v -> aux (v :: acc) t)\n      in\n      aux [] l\n\n    let map2_prefix f l1 l2 =\n      let rec aux acc l1 l2 =\n        match (l1, l2) with\n        | [], _ -> (List.rev acc, l2)\n        | _ :: _, [] -> raise (Invalid_argument \"map2_prefix\")\n        | h1 :: t1, h2 :: t2 ->\n          let h = f h1 h2 in\n          aux (h :: acc) t1 t2\n      in\n      aux [] l1 l2\n\n    let some_if_all_elements_are_some l =\n      let rec aux acc l =\n        match l with\n        | [] -> Some (List.rev acc)\n        | None :: _ -> None\n        | Some h :: t -> aux (h :: acc) t\n      in\n      aux [] l\n\n    let split_at n l =\n      let rec aux n acc l =\n        if n = 0 then (List.rev acc, l)\n        else\n          match l with\n          | [] -> raise (Invalid_argument \"split_at\")\n          | t :: q -> aux (n - 1) (t :: acc) q\n      in\n      aux n [] l\n  end\n\n  module Option = struct\n    type 'a t = 'a option\n\n    let equal eq o1 o2 =\n      match (o1, o2) with\n      | None, None -> true\n      | Some e1, Some e2 -> eq e1 e2\n      | _, _ -> false\n\n    let iter f = function\n      | Some x -> f x\n      | None -> ()\n\n    let map f = function\n      | Some x -> Some (f x)\n      | None -> None\n\n    let fold f a b =\n      match a with\n      | None -> b\n      | Some a -> f a b\n\n    let value_default f ~default a =\n      match a with\n      | None -> default\n      | Some a -> f a\n  end\n\n  module Array = struct\n    let exists2 p a1 a2 =\n      let n = Array.length a1 in\n      if Array.length a2 <> n then invalid_arg \"Misc.Stdlib.Array.exists2\";\n      let rec loop i =\n        if i = n then false\n        else if p (Array.unsafe_get a1 i) (Array.unsafe_get a2 i) then true\n        else loop (succ i)\n      in\n      loop 0\n  end\nend\n\nlet may = Stdlib.Option.iter\nlet may_map = Stdlib.Option.map\n\n(* File functions *)\n\nlet find_in_path path name =\n  if not (Filename.is_implicit name) then\n    if Sys.file_exists name then name else raise Not_found\n  else\n    let rec try_dir = function\n      | [] -> raise Not_found\n      | dir :: rem ->\n        let fullname = Filename.concat dir name in\n        if Sys.file_exists fullname then fullname else try_dir rem\n    in\n    try_dir path\n\nlet find_in_path_rel path name =\n  let rec simplify s =\n    let open Filename in\n    let base = basename s in\n    let dir = dirname s in\n    if dir = s then dir\n    else if base = current_dir_name then simplify dir\n    else concat (simplify dir) base\n  in\n  let rec try_dir = function\n    | [] -> raise Not_found\n    | dir :: rem ->\n      let fullname = simplify (Filename.concat dir name) in\n      if Sys.file_exists fullname then fullname else try_dir rem\n  in\n  try_dir path\n\nlet find_in_path_uncap path name =\n  let uname = String.uncapitalize_ascii name in\n  let rec try_dir = function\n    | [] -> raise Not_found\n    | dir :: rem ->\n      let fullname = Filename.concat dir name\n      and ufullname = Filename.concat dir uname in\n      if Sys.file_exists ufullname then ufullname\n      else if Sys.file_exists fullname then fullname\n      else try_dir rem\n  in\n  try_dir path\n\nlet remove_file filename =\n  try if Sys.file_exists filename then Sys.remove filename\n  with Sys_error _msg -> ()\n\n(* Expand a -I option: if it starts with +, make it relative to the standard\n   library directory *)\n\nlet expand_directory alt s =\n  if String.length s > 0 && s.[0] = '+' then\n    Filename.concat alt (String.sub s 1 (String.length s - 1))\n  else s\n\n(* Hashtable functions *)\n\nlet create_hashtable size init =\n  let tbl = Hashtbl.create size in\n  List.iter (fun (key, data) -> Hashtbl.add tbl key data) init;\n  tbl\n\n(* File copy *)\n\nlet copy_file ic oc =\n  let buff = Bytes.create 0x1000 in\n  let rec copy () =\n    let n = input ic buff 0 0x1000 in\n    if n = 0 then ()\n    else (\n      output oc buff 0 n;\n      copy ())\n  in\n  copy ()\n\nlet copy_file_chunk ic oc len =\n  let buff = Bytes.create 0x1000 in\n  let rec copy n =\n    if n <= 0 then ()\n    else\n      let r = input ic buff 0 (min n 0x1000) in\n      if r = 0 then raise End_of_file\n      else (\n        output oc buff 0 r;\n        copy (n - r))\n  in\n  copy len\n\nlet string_of_file ic =\n  let b = Buffer.create 0x10000 in\n  let buff = Bytes.create 0x1000 in\n  let rec copy () =\n    let n = input ic buff 0 0x1000 in\n    if n = 0 then Buffer.contents b\n    else (\n      Buffer.add_subbytes b buff 0 n;\n      copy ())\n  in\n  copy ()\n\nlet output_to_bin_file_directly filename fn =\n  let oc = open_out_bin filename in\n  match fn filename oc with\n  | v ->\n    close_out oc;\n    v\n  | exception e ->\n    close_out oc;\n    raise e\n\nlet output_to_file_via_temporary ?(mode = [Open_text]) filename fn =\n  let temp_filename, oc =\n    Filename.open_temp_file ~mode ~perms:0o666\n      ~temp_dir:(Filename.dirname filename)\n      (Filename.basename filename)\n      \".tmp\"\n  in\n  (* The 0o666 permissions will be modified by the umask.  It's just\n     like what [open_out] and [open_out_bin] do.\n     With temp_dir = dirname filename, we ensure that the returned\n     temp file is in the same directory as filename itself, making\n     it safe to rename temp_filename to filename later.\n     With prefix = basename filename, we are almost certain that\n     the first generated name will be unique.  A fixed prefix\n     would work too but might generate more collisions if many\n     files are being produced simultaneously in the same directory. *)\n  match fn temp_filename oc with\n  | res -> (\n    close_out oc;\n    try\n      Sys.rename temp_filename filename;\n      res\n    with exn ->\n      remove_file temp_filename;\n      raise exn)\n  | exception exn ->\n    close_out oc;\n    remove_file temp_filename;\n    raise exn\n\n(* Integer operations *)\n\nlet rec log2 n = if n <= 1 then 0 else 1 + log2 (n asr 1)\n\nlet align n a = if n >= 0 then (n + a - 1) land -a else n land -a\n\nlet no_overflow_add a b = a lxor b lor (a lxor lnot (a + b)) < 0\n\nlet no_overflow_sub a b = a lxor lnot b lor (b lxor (a - b)) < 0\n\nlet no_overflow_mul a b = b <> 0 && a * b / b = a\n\nlet no_overflow_lsl a k =\n  0 <= k && k < Sys.word_size && min_int asr k <= a && a <= max_int asr k\n\nmodule Int_literal_converter = struct\n  (* To convert integer literals, allowing max_int + 1 (PR#4210) *)\n  let cvt_int_aux str neg of_string =\n    if String.length str = 0 || str.[0] = '-' then of_string str\n    else neg (of_string (\"-\" ^ str))\n  let int s = cvt_int_aux s ( ~- ) int_of_string\n  let int32 s = cvt_int_aux s Int32.neg Int32.of_string\n  let int64 s = cvt_int_aux s Int64.neg Int64.of_string\nend\n\n(* String operations *)\n\nlet chop_extensions file =\n  let dirname = Filename.dirname file and basename = Filename.basename file in\n  try\n    let pos = String.index basename '.' in\n    let basename = String.sub basename 0 pos in\n    if Filename.is_implicit file && dirname = Filename.current_dir_name then\n      basename\n    else Filename.concat dirname basename\n  with Not_found -> file\n\nlet search_substring pat str start =\n  let rec search i j =\n    if j >= String.length pat then i\n    else if i + j >= String.length str then raise Not_found\n    else if str.[i + j] = pat.[j] then search i (j + 1)\n    else search (i + 1) 0\n  in\n  search start 0\n\nlet replace_substring ~before ~after str =\n  let rec search acc curr =\n    match search_substring before str curr with\n    | next ->\n      let prefix = String.sub str curr (next - curr) in\n      search (prefix :: acc) (next + String.length before)\n    | exception Not_found ->\n      let suffix = String.sub str curr (String.length str - curr) in\n      List.rev (suffix :: acc)\n  in\n  String.concat after (search [] 0)\n\nlet rev_split_words s =\n  let rec split1 res i =\n    if i >= String.length s then res\n    else\n      match s.[i] with\n      | ' ' | '\\t' | '\\r' | '\\n' -> split1 res (i + 1)\n      | _ -> split2 res i (i + 1)\n  and split2 res i j =\n    if j >= String.length s then String.sub s i (j - i) :: res\n    else\n      match s.[j] with\n      | ' ' | '\\t' | '\\r' | '\\n' ->\n        split1 (String.sub s i (j - i) :: res) (j + 1)\n      | _ -> split2 res i (j + 1)\n  in\n  split1 [] 0\n\nlet get_ref r =\n  let v = !r in\n  r := [];\n  v\n\nlet fst3 (x, _, _) = x\nlet snd3 (_, x, _) = x\nlet thd3 (_, _, x) = x\n\nlet fst4 (x, _, _, _) = x\nlet snd4 (_, x, _, _) = x\nlet thd4 (_, _, x, _) = x\nlet for4 (_, _, _, x) = x\n\nmodule LongString = struct\n  type t = bytes array\n\n  let create str_size =\n    let tbl_size = (str_size / Sys.max_string_length) + 1 in\n    let tbl = Array.make tbl_size Bytes.empty in\n    for i = 0 to tbl_size - 2 do\n      tbl.(i) <- Bytes.create Sys.max_string_length\n    done;\n    tbl.(tbl_size - 1) <- Bytes.create (str_size mod Sys.max_string_length);\n    tbl\n\n  let length tbl =\n    let tbl_size = Array.length tbl in\n    (Sys.max_string_length * (tbl_size - 1)) + Bytes.length tbl.(tbl_size - 1)\n\n  let get tbl ind =\n    Bytes.get tbl.(ind / Sys.max_string_length) (ind mod Sys.max_string_length)\n\n  let set tbl ind c =\n    Bytes.set\n      tbl.(ind / Sys.max_string_length)\n      (ind mod Sys.max_string_length)\n      c\n\n  let blit src srcoff dst dstoff len =\n    for i = 0 to len - 1 do\n      set dst (dstoff + i) (get src (srcoff + i))\n    done\n\n  let output oc tbl pos len =\n    for i = pos to pos + len - 1 do\n      output_char oc (get tbl i)\n    done\n\n  let unsafe_blit_to_bytes src srcoff dst dstoff len =\n    for i = 0 to len - 1 do\n      Bytes.unsafe_set dst (dstoff + i) (get src (srcoff + i))\n    done\n\n  let input_bytes ic len =\n    let tbl = create len in\n    Array.iter (fun str -> really_input ic str 0 (Bytes.length str)) tbl;\n    tbl\nend\n\nlet edit_distance a b cutoff =\n  let la, lb = (String.length a, String.length b) in\n  let cutoff =\n    (* using max_int for cutoff would cause overflows in (i + cutoff + 1);\n       we bring it back to the (max la lb) worstcase *)\n    min (max la lb) cutoff\n  in\n  if abs (la - lb) > cutoff then None\n  else\n    (* initialize with 'cutoff + 1' so that not-yet-written-to cases have\n       the worst possible cost; this is useful when computing the cost of\n       a case just at the boundary of the cutoff diagonal. *)\n    let m = Array.make_matrix (la + 1) (lb + 1) (cutoff + 1) in\n    m.(0).(0) <- 0;\n    for i = 1 to la do\n      m.(i).(0) <- i\n    done;\n    for j = 1 to lb do\n      m.(0).(j) <- j\n    done;\n    for i = 1 to la do\n      for j = max 1 (i - cutoff - 1) to min lb (i + cutoff + 1) do\n        let cost = if a.[i - 1] = b.[j - 1] then 0 else 1 in\n        let best =\n          (* insert, delete or substitute *)\n          min (1 + min m.(i - 1).(j) m.(i).(j - 1)) (m.(i - 1).(j - 1) + cost)\n        in\n        let best =\n          (* swap two adjacent letters; we use \"cost\" again in case of\n             a swap between two identical letters; this is slightly\n             redundant as this is a double-substitution case, but it\n             was done this way in most online implementations and\n             imitation has its virtues *)\n          if\n            not\n              (i > 1 && j > 1 && a.[i - 1] = b.[j - 2] && a.[i - 2] = b.[j - 1])\n          then best\n          else min best (m.(i - 2).(j - 2) + cost)\n        in\n        m.(i).(j) <- best\n      done\n    done;\n    let result = m.(la).(lb) in\n    if result > cutoff then None else Some result\n\nlet spellcheck env name =\n  let cutoff =\n    match String.length name with\n    | 1 | 2 -> 0\n    | 3 | 4 -> 1\n    | 5 | 6 -> 2\n    | _ -> 3\n  in\n  let compare target acc head =\n    match edit_distance target head cutoff with\n    | None -> acc\n    | Some dist ->\n      let best_choice, best_dist = acc in\n      if dist < best_dist then ([head], dist)\n      else if dist = best_dist then (head :: best_choice, dist)\n      else acc\n  in\n  fst (List.fold_left (compare name) ([], max_int) env)\n\nlet did_you_mean ppf get_choices =\n  (* flush now to get the error report early, in the (unheard of) case\n     where the search in the get_choices function would take a bit of\n     time; in the worst case, the user has seen the error, she can\n     interrupt the process before the spell-checking terminates. *)\n  Format.fprintf ppf \"@?\";\n  match get_choices () with\n  | [] -> ()\n  | choices ->\n    let rest, last = split_last choices in\n    Format.fprintf ppf \"@\\nHint: Did you mean %s%s%s?@?\"\n      (String.concat \", \" rest)\n      (if rest = [] then \"\" else \" or \")\n      last\n\nlet cut_at s c =\n  let pos = String.index s c in\n  (String.sub s 0 pos, String.sub s (pos + 1) (String.length s - pos - 1))\n\nmodule StringSet = Set.Make (struct\n  type t = string\n  let compare = compare\nend)\nmodule StringMap = Map.Make (struct\n  type t = string\n  let compare = compare\nend)\n\n(* Color handling *)\nmodule Color = struct\n  (* use ANSI color codes, see https://en.wikipedia.org/wiki/ANSI_escape_code *)\n  type color = Black | Red | Green | Yellow | Blue | Magenta | Cyan | White\n\n  type style =\n    | FG of color (* foreground *)\n    | BG of color (* background *)\n    | Bold\n    | Reset\n    | Dim\n\n  let ansi_of_color = function\n    | Black -> \"0\"\n    | Red -> \"1\"\n    | Green -> \"2\"\n    | Yellow -> \"3\"\n    | Blue -> \"4\"\n    | Magenta -> \"5\"\n    | Cyan -> \"6\"\n    | White -> \"7\"\n\n  let code_of_style = function\n    | FG c -> \"3\" ^ ansi_of_color c\n    | BG c -> \"4\" ^ ansi_of_color c\n    | Bold -> \"1\"\n    | Reset -> \"0\"\n    | Dim -> \"2\"\n\n  let ansi_of_style_l l =\n    let s =\n      match l with\n      | [] -> code_of_style Reset\n      | [s] -> code_of_style s\n      | _ -> String.concat \";\" (List.map code_of_style l)\n    in\n    \"\\x1b[\" ^ s ^ \"m\"\n\n  type styles = {error: style list; warning: style list; loc: style list}\n\n  let default_styles =\n    {warning = [Bold; FG Magenta]; error = [Bold; FG Red]; loc = [Bold]}\n\n  let cur_styles = ref default_styles\n  let get_styles () = !cur_styles\n  let set_styles s = cur_styles := s\n\n  (* map a tag to a style, if the tag is known.\n     @raise Not_found otherwise *)\n  let style_of_tag s =\n    match s with\n    | Format.String_tag \"error\" -> !cur_styles.error\n    | Format.String_tag \"warning\" -> !cur_styles.warning\n    | Format.String_tag \"loc\" -> !cur_styles.loc\n    | Format.String_tag \"info\" -> [Bold; FG Yellow]\n    | Format.String_tag \"dim\" -> [Dim]\n    | Format.String_tag \"filename\" -> [FG Cyan]\n    | _ -> raise Not_found\n\n  let color_enabled = ref true\n\n  (* either prints the tag of [s] or delegates to [or_else] *)\n  let mark_open_tag ~or_else s =\n    try\n      let style = style_of_tag s in\n      if !color_enabled then ansi_of_style_l style else \"\"\n    with Not_found -> or_else s\n\n  let mark_close_tag ~or_else s =\n    try\n      let _ = style_of_tag s in\n      if !color_enabled then ansi_of_style_l [Reset] else \"\"\n    with Not_found -> or_else s\n\n  (* add color handling to formatter [ppf] *)\n  let set_color_tag_handling ppf =\n    let open Format in\n    let functions = pp_get_formatter_stag_functions ppf () in\n    let functions' =\n      {\n        functions with\n        mark_open_stag = mark_open_tag ~or_else:functions.mark_open_stag;\n        mark_close_stag = mark_close_tag ~or_else:functions.mark_close_stag;\n      }\n    in\n    pp_set_mark_tags ppf true;\n    (* enable tags *)\n    pp_set_formatter_stag_functions ppf functions';\n    (* also setup margins *)\n    pp_set_margin ppf (pp_get_margin std_formatter ());\n    ()\n\n  external isatty : out_channel -> bool = \"caml_sys_isatty\"\n\n  (* reasonable heuristic on whether colors should be enabled *)\n  let should_enable_color () =\n    let term = try Sys.getenv \"TERM\" with Not_found -> \"\" in\n    term <> \"dumb\" && term <> \"\" && isatty stderr\n\n  type setting = Auto | Always | Never\n\n  let setup =\n    let first = ref true in\n    (* initialize only once *)\n    let formatter_l =\n      [Format.std_formatter; Format.err_formatter; Format.str_formatter]\n    in\n    fun o ->\n      if !first then (\n        first := false;\n        Format.set_mark_tags true;\n        List.iter set_color_tag_handling formatter_l;\n        color_enabled :=\n          match o with\n          | Some Always -> true\n          | Some Auto -> should_enable_color ()\n          | Some Never -> false\n          | None -> should_enable_color ());\n      ()\nend\n\nlet normalise_eol s =\n  let b = Buffer.create 80 in\n  for i = 0 to String.length s - 1 do\n    if s.[i] <> '\\r' then Buffer.add_char b s.[i]\n  done;\n  Buffer.contents b\n\nlet delete_eol_spaces src =\n  let len_src = String.length src in\n  let dst = Bytes.create len_src in\n  let rec loop i_src i_dst =\n    if i_src = len_src then i_dst\n    else\n      match src.[i_src] with\n      | ' ' | '\\t' -> loop_spaces 1 (i_src + 1) i_dst\n      | c ->\n        Bytes.set dst i_dst c;\n        loop (i_src + 1) (i_dst + 1)\n  and loop_spaces spaces i_src i_dst =\n    if i_src = len_src then i_dst\n    else\n      match src.[i_src] with\n      | ' ' | '\\t' -> loop_spaces (spaces + 1) (i_src + 1) i_dst\n      | '\\n' ->\n        Bytes.set dst i_dst '\\n';\n        loop (i_src + 1) (i_dst + 1)\n      | _ ->\n        for n = 0 to spaces do\n          Bytes.set dst (i_dst + n) src.[i_src - spaces + n]\n        done;\n        loop (i_src + 1) (i_dst + spaces + 1)\n  in\n  let stop = loop 0 0 in\n  Bytes.sub_string dst 0 stop\n\ntype hook_info = {sourcefile: string}\n\nexception\n  HookExnWrapper of {error: exn; hook_name: string; hook_info: hook_info}\n\nexception HookExn of exn\n\nlet raise_direct_hook_exn e = raise (HookExn e)\n\nlet fold_hooks list hook_info ast =\n  List.fold_left\n    (fun ast (hook_name, f) ->\n      try f hook_info ast with\n      | HookExn e -> raise e\n      | error -> raise (HookExnWrapper {error; hook_name; hook_info})\n      (* when explicit reraise with backtrace will be available,\n         it should be used here *))\n    ast (List.sort compare list)\n\nmodule type HookSig = sig\n  type t\n\n  val add_hook : string -> (hook_info -> t -> t) -> unit\n  val apply_hooks : hook_info -> t -> t\nend\n\nmodule MakeHooks (M : sig\n  type t\nend) : HookSig with type t = M.t = struct\n  type t = M.t\n\n  let hooks = ref []\n  let add_hook name f = hooks := (name, f) :: !hooks\n  let apply_hooks sourcefile intf = fold_hooks !hooks sourcefile intf\nend\n"
  },
  {
    "path": "analysis/vendor/ext/misc.mli",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(* Miscellaneous useful types and functions *)\n\nval fatal_error : string -> 'a\nval fatal_errorf : ('a, Format.formatter, unit, 'b) format4 -> 'a\nexception Fatal_error\n\nval try_finally : (unit -> 'a) -> (unit -> unit) -> 'a\n\nval map_end : ('a -> 'b) -> 'a list -> 'b list -> 'b list\n(* [map_end f l t] is [map f l @ t], just more efficient. *)\n\nval map_left_right : ('a -> 'b) -> 'a list -> 'b list\n(* Like [List.map], with guaranteed left-to-right evaluation order *)\n\nval for_all2 : ('a -> 'b -> bool) -> 'a list -> 'b list -> bool\n(* Same as [List.for_all] but for a binary predicate.\n   In addition, this [for_all2] never fails: given two lists\n   with different lengths, it returns false. *)\n\nval replicate_list : 'a -> int -> 'a list\n(* [replicate_list elem n] is the list with [n] elements\n   all identical to [elem]. *)\n\nval list_remove : 'a -> 'a list -> 'a list\n(* [list_remove x l] returns a copy of [l] with the first\n   element equal to [x] removed. *)\n\nval split_last : 'a list -> 'a list * 'a\n(* Return the last element and the other elements of the given list. *)\n\nval may : ('a -> unit) -> 'a option -> unit\nval may_map : ('a -> 'b) -> 'a option -> 'b option\n\ntype ref_and_value = R : 'a ref * 'a -> ref_and_value\n\nval protect_refs : ref_and_value list -> (unit -> 'a) -> 'a\n(** [protect_refs l f] temporarily sets [r] to [v] for each [R (r, v)] in [l]\n    while executing [f]. The previous contents of the references is restored\n    even if [f] raises an exception. *)\n\nmodule Stdlib : sig\n  module List : sig\n    type 'a t = 'a list\n\n    val compare : ('a -> 'a -> int) -> 'a t -> 'a t -> int\n    (** The lexicographic order supported by the provided order. There is no\n        constraint on the relative lengths of the lists. *)\n\n    val equal : ('a -> 'a -> bool) -> 'a t -> 'a t -> bool\n    (** Returns [true] iff the given lists have the same length and content with\n        respect to the given equality function. *)\n\n    val filter_map : ('a -> 'b option) -> 'a t -> 'b t\n    (** [filter_map f l] applies [f] to every element of [l], filters out the\n        [None] elements and returns the list of the arguments of the [Some]\n        elements. *)\n\n    val some_if_all_elements_are_some : 'a option t -> 'a t option\n    (** If all elements of the given list are [Some _] then [Some xs] is\n        returned with the [xs] being the contents of those [Some]s, with order\n        preserved. Otherwise return [None]. *)\n\n    val map2_prefix : ('a -> 'b -> 'c) -> 'a t -> 'b t -> 'c t * 'b t\n    (** [let r1, r2 = map2_prefix f l1 l2] If [l1] is of length n and\n        [l2 = h2 @ t2] with h2 of length n, r1 is [List.map2 f l1 h1] and r2 is\n        t2. *)\n\n    val split_at : int -> 'a t -> 'a t * 'a t\n    (** [split_at n l] returns the pair [before, after] where [before] is the\n        [n] first elements of [l] and [after] the remaining ones. If [l] has\n        less than [n] elements, raises Invalid_argument. *)\n  end\n\n  module Option : sig\n    type 'a t = 'a option\n\n    val equal : ('a -> 'a -> bool) -> 'a t -> 'a t -> bool\n\n    val iter : ('a -> unit) -> 'a t -> unit\n    val map : ('a -> 'b) -> 'a t -> 'b t\n    val fold : ('a -> 'b -> 'b) -> 'a t -> 'b -> 'b\n    val value_default : ('a -> 'b) -> default:'b -> 'a t -> 'b\n  end\n\n  module Array : sig\n    val exists2 : ('a -> 'b -> bool) -> 'a array -> 'b array -> bool\n    (* Same as [Array.exists], but for a two-argument predicate. Raise\n       Invalid_argument if the two arrays are determined to have\n       different lengths. *)\n  end\nend\n\nval find_in_path : string list -> string -> string\n(* Search a file in a list of directories. *)\n\nval find_in_path_rel : string list -> string -> string\n(* Search a relative file in a list of directories. *)\n\nval find_in_path_uncap : string list -> string -> string\n(* Same, but search also for uncapitalized name, i.e.\n   if name is Foo.ml, allow /path/Foo.ml and /path/foo.ml\n   to match. *)\n\nval remove_file : string -> unit\n(* Delete the given file if it exists. Never raise an error. *)\n\nval expand_directory : string -> string -> string\n(* [expand_directory alt file] eventually expands a [+] at the\n   beginning of file into [alt] (an alternate root directory) *)\n\nval create_hashtable : int -> ('a * 'b) list -> ('a, 'b) Hashtbl.t\n(* Create a hashtable of the given size and fills it with the\n   given bindings. *)\n\nval copy_file : in_channel -> out_channel -> unit\n(* [copy_file ic oc] reads the contents of file [ic] and copies\n   them to [oc]. It stops when encountering EOF on [ic]. *)\n\nval copy_file_chunk : in_channel -> out_channel -> int -> unit\n(* [copy_file_chunk ic oc n] reads [n] bytes from [ic] and copies\n   them to [oc]. It raises [End_of_file] when encountering\n   EOF on [ic]. *)\n\nval string_of_file : in_channel -> string\n(* [string_of_file ic] reads the contents of file [ic] and copies\n   them to a string. It stops when encountering EOF on [ic]. *)\n\nval output_to_bin_file_directly : string -> (string -> out_channel -> 'a) -> 'a\n\nval output_to_file_via_temporary :\n  ?mode:open_flag list -> string -> (string -> out_channel -> 'a) -> 'a\n(* Produce output in temporary file, then rename it\n   (as atomically as possible) to the desired output file name.\n   [output_to_file_via_temporary filename fn] opens a temporary file\n   which is passed to [fn] (name + output channel).  When [fn] returns,\n   the channel is closed and the temporary file is renamed to\n   [filename]. *)\n\nval log2 : int -> int\n(* [log2 n] returns [s] such that [n = 1 lsl s]\n   if [n] is a power of 2*)\n\nval align : int -> int -> int\n(* [align n a] rounds [n] upwards to a multiple of [a]\n   (a power of 2). *)\n\nval no_overflow_add : int -> int -> bool\n(* [no_overflow_add n1 n2] returns [true] if the computation of\n   [n1 + n2] does not overflow. *)\n\nval no_overflow_sub : int -> int -> bool\n(* [no_overflow_sub n1 n2] returns [true] if the computation of\n   [n1 - n2] does not overflow. *)\n\nval no_overflow_mul : int -> int -> bool\n(* [no_overflow_mul n1 n2] returns [true] if the computation of\n   [n1 * n2] does not overflow. *)\n\nval no_overflow_lsl : int -> int -> bool\n(* [no_overflow_lsl n k] returns [true] if the computation of\n   [n lsl k] does not overflow. *)\n\nmodule Int_literal_converter : sig\n  val int : string -> int\n  val int32 : string -> int32\n  val int64 : string -> int64\nend\n\nval chop_extensions : string -> string\n(* Return the given file name without its extensions. The extensions\n   is the longest suffix starting with a period and not including\n   a directory separator, [.xyz.uvw] for instance.\n\n   Return the given name if it does not contain an extension. *)\n\nval search_substring : string -> string -> int -> int\n(* [search_substring pat str start] returns the position of the first\n   occurrence of string [pat] in string [str].  Search starts\n   at offset [start] in [str].  Raise [Not_found] if [pat]\n   does not occur. *)\n\nval replace_substring : before:string -> after:string -> string -> string\n(* [replace_substring ~before ~after str] replaces all\n   occurrences of [before] with [after] in [str] and returns\n   the resulting string. *)\n\nval rev_split_words : string -> string list\n(* [rev_split_words s] splits [s] in blank-separated words, and returns\n   the list of words in reverse order. *)\n\nval get_ref : 'a list ref -> 'a list\n(* [get_ref lr] returns the content of the list reference [lr] and reset\n   its content to the empty list. *)\n\nval fst3 : 'a * 'b * 'c -> 'a\nval snd3 : 'a * 'b * 'c -> 'b\nval thd3 : 'a * 'b * 'c -> 'c\n\nval fst4 : 'a * 'b * 'c * 'd -> 'a\nval snd4 : 'a * 'b * 'c * 'd -> 'b\nval thd4 : 'a * 'b * 'c * 'd -> 'c\nval for4 : 'a * 'b * 'c * 'd -> 'd\n\nmodule LongString : sig\n  type t = bytes array\n  val create : int -> t\n  val length : t -> int\n  val get : t -> int -> char\n  val set : t -> int -> char -> unit\n  val blit : t -> int -> t -> int -> int -> unit\n  val output : out_channel -> t -> int -> int -> unit\n  val unsafe_blit_to_bytes : t -> int -> bytes -> int -> int -> unit\n  val input_bytes : in_channel -> int -> t\nend\n\nval edit_distance : string -> string -> int -> int option\n(** [edit_distance a b cutoff] computes the edit distance between strings [a]\n    and [b]. To help efficiency, it uses a cutoff: if the distance [d] is\n    smaller than [cutoff], it returns [Some d], else [None].\n\n    The distance algorithm currently used is Damerau-Levenshtein: it computes\n    the number of insertion, deletion, substitution of letters, or swapping of\n    adjacent letters to go from one word to the other. The particular algorithm\n    may change in the future. *)\n\nval spellcheck : string list -> string -> string list\n(** [spellcheck env name] takes a list of names [env] that exist in the current\n    environment and an erroneous [name], and returns a list of suggestions taken\n    from [env], that are close enough to [name] that it may be a typo for one of\n    them. *)\n\nval did_you_mean : Format.formatter -> (unit -> string list) -> unit\n(** [did_you_mean ppf get_choices] hints that the user may have meant one of the\n    option returned by calling [get_choices]. It does nothing if the returned\n    list is empty.\n\n    The [unit -> ...] thunking is meant to delay any potentially-slow\n    computation (typically computing edit-distance with many things from the\n    current environment) to when the hint message is to be printed. You should\n    print an understandable error message before calling [did_you_mean], so that\n    users get a clear notification of the failure even if producing the hint is\n    slow. *)\n\nval cut_at : string -> char -> string * string\n(** [String.cut_at s c] returns a pair containing the sub-string before the\n    first occurrence of [c] in [s], and the sub-string after the first\n    occurrence of [c] in [s].\n    [let (before, after) = String.cut_at s c in before ^ String.make 1 c ^\n     after] is the identity if [s] contains [c].\n\n    Raise [Not_found] if the character does not appear in the string\n    @since 4.01 *)\n\nmodule StringSet : Set.S with type elt = string\nmodule StringMap : Map.S with type key = string\n(* TODO: replace all custom instantiations of StringSet/StringMap in various\n   compiler modules with this one. *)\n\n(* Color handling *)\nmodule Color : sig\n  type color = Black | Red | Green | Yellow | Blue | Magenta | Cyan | White\n\n  type style =\n    | FG of color (* foreground *)\n    | BG of color (* background *)\n    | Bold\n    | Reset\n    | Dim\n\n  val ansi_of_style_l : style list -> string\n  (* ANSI escape sequence for the given style *)\n\n  type styles = {error: style list; warning: style list; loc: style list}\n\n  val default_styles : styles\n  val get_styles : unit -> styles\n  val set_styles : styles -> unit\n\n  type setting = Auto | Always | Never\n\n  val setup : setting option -> unit\n  (* [setup opt] will enable or disable color handling on standard formatters\n     according to the value of color setting [opt].\n     Only the first call to this function has an effect. *)\n\n  val set_color_tag_handling : Format.formatter -> unit\n  (* adds functions to support color tags to the given formatter. *)\nend\n\nval normalise_eol : string -> string\n(** [normalise_eol s] returns a fresh copy of [s] with any '\\r' characters\n    removed. Intended for pre-processing text which will subsequently be printed\n    on a channel which performs EOL transformations (i.e. Windows) *)\n\nval delete_eol_spaces : string -> string\n(** [delete_eol_spaces s] returns a fresh copy of [s] with any end of line\n    spaces removed. Intended to normalize the output of the toplevel for tests.\n*)\n\n(** {1 Hook machinery}\n\n    Hooks machinery: [add_hook name f] will register a function that will be\n    called on the argument of a later call to [apply_hooks]. Hooks are applied\n    in the lexicographical order of their names. *)\n\ntype hook_info = {sourcefile: string}\n\nexception\n  HookExnWrapper of {error: exn; hook_name: string; hook_info: hook_info}\n(** An exception raised by a hook will be wrapped into a [HookExnWrapper]\n    constructor by the hook machinery. *)\n\nval raise_direct_hook_exn : exn -> 'a\n(** A hook can use [raise_unwrapped_hook_exn] to raise an exception that will\n    not be wrapped into a {!HookExnWrapper}. *)\n\nmodule type HookSig = sig\n  type t\n  val add_hook : string -> (hook_info -> t -> t) -> unit\n  val apply_hooks : hook_info -> t -> t\nend\n\nmodule MakeHooks : functor\n  (M : sig\n     type t\n   end)\n  -> HookSig with type t = M.t\n"
  },
  {
    "path": "analysis/vendor/ext/ordered_hash_map.cppo.ml",
    "content": "#if defined TYPE_FUNCTOR\nmodule Make(H: Hashtbl.HashedType): (S with type key = H.t) =\nstruct\n  type key = H.t\n  type   'value t = (key,'value) Ordered_hash_map_gen.t\n  let key_index (h : _ t) key =\n    (H.hash  key) land (Array.length h.data - 1)\n  let equal_key = H.equal\n#elif defined TYPE_LOCAL_IDENT\n  type key = Ident.t\n  type   'value t = (key,'value) Ordered_hash_map_gen.t\n  let key_index (h : _ t) (key : key) =\n    (Bs_hash_stubs.hash_int  key.stamp) land (Array.length h.data - 1)\n  let equal_key = Ext_ident.equal\n\n#else\n      [%error \"unknown type\"]\n#endif \n\n  open Ordered_hash_map_gen\n\n  let create = create\n  let clear = clear\n  let reset = reset\n\n  let iter = iter\n  let fold = fold\n  let length = length\n\n  let elements = elements\n  let choose = choose\n  let to_sorted_array = to_sorted_array\n\n\n\n  let rec small_bucket_mem key lst =\n    match lst with \n    | Empty -> false \n    | Cons rhs -> \n      equal_key key rhs.key ||\n      match rhs.next with \n      | Empty -> false \n      | Cons rhs -> \n        equal_key key rhs.key ||\n        match rhs.next with \n        | Empty -> false \n        | Cons rhs -> \n          equal_key key rhs.key ||\n          small_bucket_mem key rhs.next\n\n  let rec small_bucket_rank key lst =\n    match lst with \n    | Empty -> -1\n    | Cons rhs -> \n      if equal_key key rhs.key then rhs.ord \n      else match rhs.next with \n        | Empty -> -1 \n        | Cons rhs -> \n          if equal_key key rhs.key then rhs.ord else\n            match rhs.next with \n            | Empty -> -1 \n            | Cons rhs -> \n              if equal_key key rhs.key then rhs.ord else\n                small_bucket_rank key rhs.next\n\n  let rec small_bucket_find_value  key (lst : (_,_) bucket)   =\n    match lst with \n    | Empty -> raise Not_found\n    | Cons rhs -> \n      if equal_key key rhs.key then rhs.data\n      else match rhs.next with \n        | Empty -> raise Not_found \n        | Cons rhs -> \n          if equal_key key  rhs.key then rhs.data else\n            match rhs.next with \n            | Empty -> raise Not_found \n            | Cons rhs -> \n              if equal_key key rhs.key then rhs.data else\n                small_bucket_find_value key rhs.next \n\n  let add h key value =\n    let i = key_index h key  in \n    if not (small_bucket_mem key  h.data.(i)) then \n      begin \n        h.data.(i) <- Cons {key; ord = h.size; data = value; next =  h.data.(i)};\n        h.size <- h.size + 1 ;\n        if h.size > Array.length h.data lsl 1 then resize key_index h\n      end\n\n  let mem h key =\n    small_bucket_mem key (Array.unsafe_get h.data (key_index h key)) \n  let rank h key = \n    small_bucket_rank key(Array.unsafe_get h.data (key_index h key))  \n\n  let find_value h key =\n    small_bucket_find_value key (Array.unsafe_get h.data (key_index h key))\n\n\n#if defined TYPE_FUNCTOR\nend\n#endif\n\n\n\n\n\n\n\n\n\n\n"
  },
  {
    "path": "analysis/vendor/ext/ordered_hash_map_gen.ml",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\n(** Hash based datastrucure which does not support [remove], so that the adding\n    order is strict and continous *)\n\nmodule type S = sig\n  type key\n\n  type 'value t\n\n  val create : int -> 'value t\n\n  val clear : 'vaulue t -> unit\n\n  val reset : 'value t -> unit\n\n  val add : 'value t -> key -> 'value -> unit\n\n  val mem : 'value t -> key -> bool\n\n  val rank : 'value t -> key -> int (* -1 if not found*)\n\n  val find_value : 'value t -> key -> 'value (* raise if not found*)\n\n  val iter : 'value t -> (key -> 'value -> int -> unit) -> unit\n\n  val fold : 'value t -> 'b -> (key -> 'value -> int -> 'b -> 'b) -> 'b\n\n  val length : 'value t -> int\n\n  val elements : 'value t -> key list\n\n  val choose : 'value t -> key\n\n  val to_sorted_array : 'value t -> key array\nend\n\n(* We do dynamic hashing, and resize the table and rehash the elements\n   when buckets become too long. *)\ntype ('a, 'b) bucket =\n  | Empty\n  | Cons of {key: 'a; ord: int; data: 'b; next: ('a, 'b) bucket}\n\ntype ('a, 'b) t = {\n  mutable size: int;\n  (* number of entries *)\n  mutable data: ('a, 'b) bucket array;\n  (* the buckets *)\n  initial_size: int; (* initial array size *)\n}\n\nlet create initial_size =\n  let s = Ext_util.power_2_above 16 initial_size in\n  {initial_size = s; size = 0; data = Array.make s Empty}\n\nlet clear h =\n  h.size <- 0;\n  let len = Array.length h.data in\n  for i = 0 to len - 1 do\n    Array.unsafe_set h.data i Empty\n  done\n\nlet reset h =\n  h.size <- 0;\n  h.data <- Array.make h.initial_size Empty\n\nlet length h = h.size\n\nlet resize indexfun h =\n  let odata = h.data in\n  let osize = Array.length odata in\n  let nsize = osize * 2 in\n  if nsize < Sys.max_array_length then (\n    let ndata = Array.make nsize Empty in\n    h.data <- ndata;\n    (* so that indexfun sees the new bucket count *)\n    let rec insert_bucket = function\n      | Empty -> ()\n      | Cons {key; ord; data; next} ->\n        let nidx = indexfun h key in\n        Array.unsafe_set ndata nidx\n          (Cons {key; ord; data; next = Array.unsafe_get ndata nidx});\n        insert_bucket next\n    in\n    for i = 0 to osize - 1 do\n      insert_bucket (Array.unsafe_get odata i)\n    done)\n\nlet iter h f =\n  let rec do_bucket = function\n    | Empty -> ()\n    | Cons {key; ord; data; next} ->\n      f key data ord;\n      do_bucket next\n  in\n  let d = h.data in\n  for i = 0 to Array.length d - 1 do\n    do_bucket (Array.unsafe_get d i)\n  done\n\nlet choose h =\n  let rec aux arr offset len =\n    if offset >= len then raise Not_found\n    else\n      match Array.unsafe_get arr offset with\n      | Empty -> aux arr (offset + 1) len\n      | Cons {key = k; _} -> k\n  in\n  aux h.data 0 (Array.length h.data)\n\nlet to_sorted_array h =\n  if h.size = 0 then [||]\n  else\n    let v = choose h in\n    let arr = Array.make h.size v in\n    iter h (fun k _ i -> Array.unsafe_set arr i k);\n    arr\n\nlet fold h init f =\n  let rec do_bucket b accu =\n    match b with\n    | Empty -> accu\n    | Cons {key; ord; data; next} -> do_bucket next (f key data ord accu)\n  in\n  let d = h.data in\n  let accu = ref init in\n  for i = 0 to Array.length d - 1 do\n    accu := do_bucket (Array.unsafe_get d i) !accu\n  done;\n  !accu\n\nlet elements set = fold set [] (fun k _ _ acc -> k :: acc)\n\nlet rec bucket_length acc (x : _ bucket) =\n  match x with\n  | Empty -> 0\n  | Cons rhs -> bucket_length (acc + 1) rhs.next\n"
  },
  {
    "path": "analysis/vendor/ext/ordered_hash_map_local_ident.mli",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\ninclude Ordered_hash_map_gen.S with type key = Ident.t\n(** Hash algorithm only hash stamp, this makes sense when all identifiers are\n    local (no global) *)\n"
  },
  {
    "path": "analysis/vendor/ext/set.cppo.ml",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\n\n#if defined TYPE_STRING \ntype elt = string\nlet compare_elt = Ext_string.compare \nlet [@inline] eq_elt (x : elt) y = x = y\nlet print_elt = Format.pp_print_string\n#elif defined TYPE_IDENT\ntype elt = Ident.t\nlet compare_elt (x : elt) (y : elt) = \n  let a =  Stdlib.compare (x.stamp : int) y.stamp in \n  if a <> 0 then a \n  else \n    let b = Stdlib.compare (x.name : string) y.name in \n    if b <> 0 then b \n    else Stdlib.compare (x.flags : int) y.flags     \nlet [@inline] eq_elt (x : elt) y = Ident.same x y\nlet print_elt = Ident.print\n#elif defined TYPE_INT\ntype elt = int \nlet compare_elt = Ext_int.compare \nlet print_elt = Format.pp_print_int\nlet [@inline] eq_elt (x : elt) y = x = y\n#else \n[%error \"unknown type\" ]\n#endif\n\n\n(* let (=) (a:int) b = a = b *)\n\ntype ('a ) t0 = 'a Set_gen.t \n\ntype  t = elt t0\n\nlet empty = Set_gen.empty \nlet is_empty = Set_gen.is_empty\nlet iter = Set_gen.iter\nlet fold = Set_gen.fold\nlet for_all = Set_gen.for_all \nlet exists = Set_gen.exists \nlet singleton = Set_gen.singleton \nlet cardinal = Set_gen.cardinal\nlet elements = Set_gen.elements\nlet choose = Set_gen.choose \n\nlet of_sorted_array = Set_gen.of_sorted_array\n\nlet rec mem (tree : t) (x : elt) =  match tree with \n  | Empty -> false\n  | Leaf v -> eq_elt x  v \n  | Node{l; v; r} ->\n    let c = compare_elt x v in\n    c = 0 || mem (if c < 0 then l else r) x\n\ntype split = \n  | Yes of  {l : t ;  r :  t }\n  | No of { l : t; r : t}  \n\nlet [@inline] split_l (x : split) = \n  match x with \n  | Yes {l} | No {l} -> l \n\nlet [@inline] split_r (x : split) = \n  match x with \n  | Yes {r} | No {r} -> r       \n\nlet [@inline] split_pres (x : split) = match x with | Yes _ -> true | No _ -> false   \n\nlet rec split (tree : t) x : split =  match tree with \n  | Empty ->\n     No {l = empty;  r = empty}\n  | Leaf v ->   \n    let c = compare_elt x v in\n    if c = 0 then Yes {l = empty; r = empty}\n    else if c < 0 then\n      No {l = empty;  r = tree}\n    else\n      No {l = tree;  r = empty}\n  | Node {l; v; r} ->\n    let c = compare_elt x v in\n    if c = 0 then Yes {l; r}\n    else if c < 0 then\n      match split l x with \n      | Yes result -> \n        Yes { result with r = Set_gen.internal_join result.r v r }\n      | No result ->\n        No { result with r= Set_gen.internal_join result.r v r }\n    else\n      match split r x with\n      | Yes result -> \n        Yes {result with l = Set_gen.internal_join l v result.l}\n      | No result ->   \n        No {result with l = Set_gen.internal_join l v result.l}\n\nlet rec add (tree : t) x : t =  match tree with \n  | Empty -> singleton x\n  | Leaf v -> \n    let c = compare_elt x v in\n    if c = 0 then tree else     \n    if c < 0 then \n      Set_gen.unsafe_two_elements x v\n    else \n      Set_gen.unsafe_two_elements v x \n  | Node {l; v; r} as t ->\n    let c = compare_elt x v in\n    if c = 0 then t else\n    if c < 0 then Set_gen.bal (add l x ) v r else Set_gen.bal l v (add r x )\n\nlet rec union (s1 : t) (s2 : t) : t  =\n  match (s1, s2) with\n  | (Empty, t) \n  | (t, Empty) -> t\n  | Node _, Leaf v2 ->\n    add s1 v2 \n  | Leaf v1, Node _ -> \n    add s2 v1 \n  | Leaf x, Leaf v -> \n    let c = compare_elt x v in\n    if c = 0 then s1 else     \n    if c < 0 then \n      Set_gen.unsafe_two_elements x v\n    else \n      Set_gen.unsafe_two_elements v x\n  | Node{l=l1; v=v1; r=r1; h=h1}, Node{l=l2; v=v2; r=r2; h=h2} ->\n    if h1 >= h2 then    \n      let split_result =  split s2 v1 in\n      Set_gen.internal_join \n        (union l1 (split_l split_result)) v1 \n        (union r1 (split_r split_result))  \n    else    \n      let split_result =  split s1 v2 in\n      Set_gen.internal_join \n        (union (split_l split_result) l2) v2 \n        (union (split_r split_result) r2)\n\n\nlet rec inter (s1 : t)  (s2 : t) : t  =\n  match (s1, s2) with\n  | (Empty, _) \n  | (_, Empty) -> empty  \n  | Leaf v, _ -> \n    if mem s2 v then s1 else empty\n  | Node ({ v } as s1), _ ->\n    let result = split s2 v in \n    if split_pres result then \n      Set_gen.internal_join \n        (inter s1.l (split_l result)) \n        v \n        (inter s1.r (split_r result))\n    else\n      Set_gen.internal_concat \n        (inter s1.l (split_l result)) \n        (inter s1.r (split_r result))\n\n\nlet rec diff (s1 : t) (s2 : t) : t  =\n  match (s1, s2) with\n  | (Empty, _) -> empty\n  | (t1, Empty) -> t1\n  | Leaf v, _-> \n    if mem s2 v then empty else s1 \n  | (Node({ v} as s1), _) ->\n    let result =  split s2 v in\n    if split_pres result then \n      Set_gen.internal_concat \n        (diff s1.l (split_l result)) \n        (diff s1.r (split_r result))    \n    else\n      Set_gen.internal_join \n        (diff s1.l (split_l result))\n        v \n        (diff s1.r (split_r result))\n\n\n\n\n\n\n\nlet rec remove (tree : t)  (x : elt) : t = match tree with \n  | Empty -> empty (* This case actually would be never reached *)\n  | Leaf v ->     \n    if eq_elt x  v then empty else tree    \n  | Node{l; v; r} ->\n    let c = compare_elt x v in\n    if c = 0 then Set_gen.internal_merge l r else\n    if c < 0 then Set_gen.bal (remove l x) v r else Set_gen.bal l v (remove r x )\n\n(* let compare s1 s2 = Set_gen.compare ~cmp:compare_elt s1 s2  *)\n\n\n\nlet of_list l =\n  match l with\n  | [] -> empty\n  | [x0] -> singleton x0\n  | [x0; x1] -> add (singleton x0) x1 \n  | [x0; x1; x2] -> add (add (singleton x0)  x1) x2 \n  | [x0; x1; x2; x3] -> add (add (add (singleton x0) x1 ) x2 ) x3 \n  | [x0; x1; x2; x3; x4] -> add (add (add (add (singleton x0) x1) x2 ) x3 ) x4 \n  | _ -> \n    let arrs = Array.of_list l in \n    Array.sort compare_elt arrs ; \n    of_sorted_array arrs\n\n\n\n(* also check order *)\nlet invariant t =\n  Set_gen.check t ;\n  Set_gen.is_ordered ~cmp:compare_elt t          \n\nlet print fmt s = \n  Format.fprintf \n   fmt   \"@[<v>{%a}@]@.\"\n    (fun fmt s   -> \n       iter s\n         (fun e -> Format.fprintf fmt \"@[<v>%a@],@ \" \n         print_elt e) \n    )\n    s     \n\n\n\n\n\n"
  },
  {
    "path": "analysis/vendor/ext/set_gen.ml",
    "content": "(***********************************************************************)\n(*                                                                     *)\n(*                                OCaml                                *)\n(*                                                                     *)\n(*            Xavier Leroy, projet Cristal, INRIA Rocquencourt         *)\n(*                                                                     *)\n(*  Copyright 1996 Institut National de Recherche en Informatique et   *)\n(*  en Automatique.  All rights reserved.  This file is distributed    *)\n(*  under the terms of the GNU Library General Public License, with    *)\n(*  the special exception on linking described in file ../LICENSE.     *)\n(*                                                                     *)\n(***********************************************************************)\n[@@@warnerror \"+55\"]\n\n(* balanced tree based on stdlib distribution *)\n\ntype 'a t0 = Empty | Leaf of 'a | Node of {l: 'a t0; v: 'a; r: 'a t0; h: int}\n\ntype 'a partial_node = {l: 'a t0; v: 'a; r: 'a t0; h: int}\n\nexternal ( ~! ) : 'a t0 -> 'a partial_node = \"%identity\"\n\nlet empty = Empty\n\nlet[@inline] height = function\n  | Empty -> 0\n  | Leaf _ -> 1\n  | Node {h} -> h\n\nlet[@inline] calc_height a b = (if a >= b then a else b) + 1\n\n(*\n     Invariants:\n     1. {[ l < v < r]}\n     2. l and r balanced\n     3. [height l] - [height r] <= 2\n*)\nlet[@inline] unsafe_node v l r h = Node {l; v; r; h}\n\nlet[@inline] unsafe_node_maybe_leaf v l r h =\n  if h = 1 then Leaf v else Node {l; v; r; h}\n\nlet[@inline] singleton x = Leaf x\n\nlet[@inline] unsafe_two_elements x v = unsafe_node v (singleton x) empty 2\n\ntype 'a t = 'a t0 = private\n  | Empty\n  | Leaf of 'a\n  | Node of {l: 'a t0; v: 'a; r: 'a t0; h: int}\n\n(* Smallest and greatest element of a set *)\n\nlet rec min_exn = function\n  | Empty -> raise Not_found\n  | Leaf v -> v\n  | Node {l; v} -> (\n    match l with\n    | Empty -> v\n    | Leaf _ | Node _ -> min_exn l)\n\nlet[@inline] is_empty = function\n  | Empty -> true\n  | _ -> false\n\nlet rec cardinal_aux acc = function\n  | Empty -> acc\n  | Leaf _ -> acc + 1\n  | Node {l; r} -> cardinal_aux (cardinal_aux (acc + 1) r) l\n\nlet cardinal s = cardinal_aux 0 s\n\nlet rec elements_aux accu = function\n  | Empty -> accu\n  | Leaf v -> v :: accu\n  | Node {l; v; r} -> elements_aux (v :: elements_aux accu r) l\n\nlet elements s = elements_aux [] s\n\nlet choose = min_exn\n\nlet rec iter x f =\n  match x with\n  | Empty -> ()\n  | Leaf v -> f v\n  | Node {l; v; r} ->\n    iter l f;\n    f v;\n    iter r f\n\nlet rec fold s accu f =\n  match s with\n  | Empty -> accu\n  | Leaf v -> f v accu\n  | Node {l; v; r} -> fold r (f v (fold l accu f)) f\n\nlet rec for_all x p =\n  match x with\n  | Empty -> true\n  | Leaf v -> p v\n  | Node {l; v; r} -> p v && for_all l p && for_all r p\n\nlet rec exists x p =\n  match x with\n  | Empty -> false\n  | Leaf v -> p v\n  | Node {l; v; r} -> p v || exists l p || exists r p\n\nexception Height_invariant_broken\n\nexception Height_diff_borken\n\nlet rec check_height_and_diff = function\n  | Empty -> 0\n  | Leaf _ -> 1\n  | Node {l; r; h} ->\n    let hl = check_height_and_diff l in\n    let hr = check_height_and_diff r in\n    if h <> calc_height hl hr then raise Height_invariant_broken\n    else\n      let diff = abs (hl - hr) in\n      if diff > 2 then raise Height_diff_borken else h\n\nlet check tree = ignore (check_height_and_diff tree)\n\n(* Same as create, but performs one step of rebalancing if necessary.\n    Invariants:\n    1. {[ l < v < r ]}\n    2. l and r balanced\n    3. | height l - height r | <= 3.\n\n    Proof by indunction\n\n    Lemma: the height of  [bal l v r] will bounded by [max l r] + 1\n*)\nlet bal l v r : _ t =\n  let hl = height l in\n  let hr = height r in\n  if hl > hr + 2 then\n    let {l = ll; r = lr; v = lv; h = _} = ~!l in\n    let hll = height ll in\n    let hlr = height lr in\n    if hll >= hlr then\n      let hnode = calc_height hlr hr in\n      unsafe_node lv ll\n        (unsafe_node_maybe_leaf v lr r hnode)\n        (calc_height hll hnode)\n    else\n      let {l = lrl; r = lrr; v = lrv} = ~!lr in\n      let hlrl = height lrl in\n      let hlrr = height lrr in\n      let hlnode = calc_height hll hlrl in\n      let hrnode = calc_height hlrr hr in\n      unsafe_node lrv\n        (unsafe_node_maybe_leaf lv ll lrl hlnode)\n        (unsafe_node_maybe_leaf v lrr r hrnode)\n        (calc_height hlnode hrnode)\n  else if hr > hl + 2 then\n    let {l = rl; r = rr; v = rv} = ~!r in\n    let hrr = height rr in\n    let hrl = height rl in\n    if hrr >= hrl then\n      let hnode = calc_height hl hrl in\n      unsafe_node rv\n        (unsafe_node_maybe_leaf v l rl hnode)\n        rr (calc_height hnode hrr)\n    else\n      let {l = rll; r = rlr; v = rlv} = ~!rl in\n      let hrll = height rll in\n      let hrlr = height rlr in\n      let hlnode = calc_height hl hrll in\n      let hrnode = calc_height hrlr hrr in\n      unsafe_node rlv\n        (unsafe_node_maybe_leaf v l rll hlnode)\n        (unsafe_node_maybe_leaf rv rlr rr hrnode)\n        (calc_height hlnode hrnode)\n  else unsafe_node_maybe_leaf v l r (calc_height hl hr)\n\nlet rec remove_min_elt = function\n  | Empty -> invalid_arg \"Set.remove_min_elt\"\n  | Leaf _ -> empty\n  | Node {l = Empty; r} -> r\n  | Node {l; v; r} -> bal (remove_min_elt l) v r\n\n(*\n    All elements of l must precede the elements of r.\n        Assume | height l - height r | <= 2.\n    weak form of [concat]\n*)\n\nlet internal_merge l r =\n  match (l, r) with\n  | Empty, t -> t\n  | t, Empty -> t\n  | _, _ -> bal l (min_exn r) (remove_min_elt r)\n\n(* Beware: those two functions assume that the added v is *strictly*\n    smaller (or bigger) than all the present elements in the tree; it\n    does not test for equality with the current min (or max) element.\n    Indeed, they are only used during the \"join\" operation which\n    respects this precondition.\n*)\n\nlet rec add_min v = function\n  | Empty -> singleton v\n  | Leaf x -> unsafe_two_elements v x\n  | Node n -> bal (add_min v n.l) n.v n.r\n\nlet rec add_max v = function\n  | Empty -> singleton v\n  | Leaf x -> unsafe_two_elements x v\n  | Node n -> bal n.l n.v (add_max v n.r)\n\n(** Invariants: 1. l < v < r 2. l and r are balanced\n\n    Proof by induction The height of output will be ~~ (max (height l) (height\n    r) + 2) Also use the lemma from [bal] *)\nlet rec internal_join l v r =\n  match (l, r) with\n  | Empty, _ -> add_min v r\n  | _, Empty -> add_max v l\n  | Leaf lv, Node {h = rh} ->\n    if rh > 3 then add_min lv (add_min v r) (* FIXME: could inlined *)\n    else unsafe_node v l r (rh + 1)\n  | Leaf _, Leaf _ -> unsafe_node v l r 2\n  | Node {h = lh}, Leaf rv ->\n    if lh > 3 then add_max rv (add_max v l) else unsafe_node v l r (lh + 1)\n  | Node {l = ll; v = lv; r = lr; h = lh}, Node {l = rl; v = rv; r = rr; h = rh}\n    ->\n    if lh > rh + 2 then\n      (* proof by induction:\n         now [height of ll] is [lh - 1]\n      *)\n      bal ll lv (internal_join lr v r)\n    else if rh > lh + 2 then bal (internal_join l v rl) rv rr\n    else unsafe_node v l r (calc_height lh rh)\n\n(*\n    Required Invariants: \n    [t1] < [t2]  \n*)\nlet internal_concat t1 t2 =\n  match (t1, t2) with\n  | Empty, t -> t\n  | t, Empty -> t\n  | _, _ -> internal_join t1 (min_exn t2) (remove_min_elt t2)\n\nlet rec partition x p =\n  match x with\n  | Empty -> (empty, empty)\n  | Leaf v ->\n    let pv = p v in\n    if pv then (x, empty) else (empty, x)\n  | Node {l; v; r} ->\n    (* call [p] in the expected left-to-right order *)\n    let lt, lf = partition l p in\n    let pv = p v in\n    let rt, rf = partition r p in\n    if pv then (internal_join lt v rt, internal_concat lf rf)\n    else (internal_concat lt rt, internal_join lf v rf)\n\nlet of_sorted_array l =\n  let rec sub start n l =\n    if n = 0 then empty\n    else if n = 1 then\n      let x0 = Array.unsafe_get l start in\n      singleton x0\n    else if n = 2 then\n      let x0 = Array.unsafe_get l start in\n      let x1 = Array.unsafe_get l (start + 1) in\n      unsafe_node x1 (singleton x0) empty 2\n    else if n = 3 then\n      let x0 = Array.unsafe_get l start in\n      let x1 = Array.unsafe_get l (start + 1) in\n      let x2 = Array.unsafe_get l (start + 2) in\n      unsafe_node x1 (singleton x0) (singleton x2) 2\n    else\n      let nl = n / 2 in\n      let left = sub start nl l in\n      let mid = start + nl in\n      let v = Array.unsafe_get l mid in\n      let right = sub (mid + 1) (n - nl - 1) l in\n      unsafe_node v left right (calc_height (height left) (height right))\n  in\n  sub 0 (Array.length l) l\n\nlet is_ordered ~cmp tree =\n  let rec is_ordered_min_max tree =\n    match tree with\n    | Empty -> `Empty\n    | Leaf v -> `V (v, v)\n    | Node {l; v; r} -> (\n      match is_ordered_min_max l with\n      | `No -> `No\n      | `Empty -> (\n        match is_ordered_min_max r with\n        | `No -> `No\n        | `Empty -> `V (v, v)\n        | `V (l, r) -> if cmp v l < 0 then `V (v, r) else `No)\n      | `V (min_v, max_v) -> (\n        match is_ordered_min_max r with\n        | `No -> `No\n        | `Empty -> if cmp max_v v < 0 then `V (min_v, v) else `No\n        | `V (min_v_r, max_v_r) ->\n          if cmp max_v min_v_r < 0 then `V (min_v, max_v_r) else `No))\n  in\n  is_ordered_min_max tree <> `No\n\nlet invariant ~cmp t =\n  check t;\n  is_ordered ~cmp t\n\nmodule type S = sig\n  type elt\n\n  type t\n\n  val empty : t\n\n  val is_empty : t -> bool\n\n  val iter : t -> (elt -> unit) -> unit\n\n  val fold : t -> 'a -> (elt -> 'a -> 'a) -> 'a\n\n  val for_all : t -> (elt -> bool) -> bool\n\n  val exists : t -> (elt -> bool) -> bool\n\n  val singleton : elt -> t\n\n  val cardinal : t -> int\n\n  val elements : t -> elt list\n\n  val choose : t -> elt\n\n  val mem : t -> elt -> bool\n\n  val add : t -> elt -> t\n\n  val remove : t -> elt -> t\n\n  val union : t -> t -> t\n\n  val inter : t -> t -> t\n\n  val diff : t -> t -> t\n\n  val of_list : elt list -> t\n\n  val of_sorted_array : elt array -> t\n\n  val invariant : t -> bool\n\n  val print : Format.formatter -> t -> unit\nend\n"
  },
  {
    "path": "analysis/vendor/ext/set_gen.mli",
    "content": "type 'a t = private\n  | Empty\n  | Leaf of 'a\n  | Node of {l: 'a t; v: 'a; r: 'a t; h: int}\n\nval empty : 'a t\n\nval is_empty : 'a t -> bool\n\nval unsafe_two_elements : 'a -> 'a -> 'a t\n\nval cardinal : 'a t -> int\n\nval elements : 'a t -> 'a list\n\nval choose : 'a t -> 'a\n\nval iter : 'a t -> ('a -> unit) -> unit\n\nval fold : 'a t -> 'c -> ('a -> 'c -> 'c) -> 'c\n\nval for_all : 'a t -> ('a -> bool) -> bool\n\nval exists : 'a t -> ('a -> bool) -> bool\n\nval check : 'a t -> unit\n\nval bal : 'a t -> 'a -> 'a t -> 'a t\n\nval remove_min_elt : 'a t -> 'a t\n\nval singleton : 'a -> 'a t\n\nval internal_merge : 'a t -> 'a t -> 'a t\n\nval internal_join : 'a t -> 'a -> 'a t -> 'a t\n\nval internal_concat : 'a t -> 'a t -> 'a t\n\nval partition : 'a t -> ('a -> bool) -> 'a t * 'a t\n\nval of_sorted_array : 'a array -> 'a t\n\nval is_ordered : cmp:('a -> 'a -> int) -> 'a t -> bool\n\nval invariant : cmp:('a -> 'a -> int) -> 'a t -> bool\n\nmodule type S = sig\n  type elt\n\n  type t\n\n  val empty : t\n\n  val is_empty : t -> bool\n\n  val iter : t -> (elt -> unit) -> unit\n\n  val fold : t -> 'a -> (elt -> 'a -> 'a) -> 'a\n\n  val for_all : t -> (elt -> bool) -> bool\n\n  val exists : t -> (elt -> bool) -> bool\n\n  val singleton : elt -> t\n\n  val cardinal : t -> int\n\n  val elements : t -> elt list\n\n  val choose : t -> elt\n\n  val mem : t -> elt -> bool\n\n  val add : t -> elt -> t\n\n  val remove : t -> elt -> t\n\n  val union : t -> t -> t\n\n  val inter : t -> t -> t\n\n  val diff : t -> t -> t\n\n  val of_list : elt list -> t\n\n  val of_sorted_array : elt array -> t\n\n  val invariant : t -> bool\n\n  val print : Format.formatter -> t -> unit\nend\n"
  },
  {
    "path": "analysis/vendor/ext/set_ident.mli",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\ninclude Set_gen.S with type elt = Ident.t\n"
  },
  {
    "path": "analysis/vendor/ext/set_int.mli",
    "content": "include Set_gen.S with type elt = int\n"
  },
  {
    "path": "analysis/vendor/ext/set_string.mli",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\ninclude Set_gen.S with type elt = string\n"
  },
  {
    "path": "analysis/vendor/ext/union_find.ml",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\ntype t = {id: int array; sz: int array; mutable components: int}\n\nlet init n =\n  let id = Array.make n 0 in\n  for i = 0 to n - 1 do\n    Array.unsafe_set id i i\n  done;\n  {id; sz = Array.make n 1; components = n}\n\nlet rec find_aux id_store p =\n  let parent = Array.unsafe_get id_store p in\n  if p <> parent then find_aux id_store parent else p\n\nlet find store p = find_aux store.id p\n\nlet union store p q =\n  let id_store = store.id in\n  let p_root = find_aux id_store p in\n  let q_root = find_aux id_store q in\n  if p_root <> q_root then\n    let () = store.components <- store.components - 1 in\n    let sz_store = store.sz in\n    let sz_p_root = Array.unsafe_get sz_store p_root in\n    let sz_q_root = Array.unsafe_get sz_store q_root in\n    let bigger = sz_p_root + sz_q_root in\n    (* Smaller root point to larger to make\n       it more balanced\n       it will introduce a cost for small root find,\n       but major will not be impacted\n    *)\n    if sz_p_root < sz_q_root then (\n      Array.unsafe_set id_store p q_root;\n      Array.unsafe_set id_store p_root q_root;\n      Array.unsafe_set sz_store q_root bigger (* little optimization *))\n    else (\n      Array.unsafe_set id_store q p_root;\n      Array.unsafe_set id_store q_root p_root;\n      Array.unsafe_set sz_store p_root bigger (* little optimization *))\n\nlet count store = store.components\n"
  },
  {
    "path": "analysis/vendor/ext/union_find.mli",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\ntype t\n\nval init : int -> t\n\nval find : t -> int -> int\n\nval union : t -> int -> int -> unit\n\nval count : t -> int\n"
  },
  {
    "path": "analysis/vendor/ext/vec.cppo.ml",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\n\nlet [@inline] min (x :int) y = if x < y then x else y\n\n#if defined TYPE_FUNCTOR\nexternal unsafe_blit : \n    'a array -> int -> 'a array -> int -> int -> unit = \"caml_array_blit\"\nmodule Make ( Resize :  Vec_gen.ResizeType) = struct\n  type elt = Resize.t \n\n  let null = Resize.null \n  \n#elif defined TYPE_INT\n\ntype elt = int \nlet null = 0 (* can be optimized *)\nlet unsafe_blit = Bs_hash_stubs.int_unsafe_blit\n#else \n[%error \"unknown type\"]\n#endif\n\nexternal unsafe_sub : 'a array -> int -> int -> 'a array = \"caml_array_sub\"\n\ntype  t = {\n  mutable arr : elt array ;\n  mutable len : int ;  \n}\n\nlet length d = d.len\n\nlet compact d =\n  let d_arr = d.arr in \n  if d.len <> Array.length d_arr then \n    begin\n      let newarr = unsafe_sub d_arr 0 d.len in \n      d.arr <- newarr\n    end\nlet singleton v = \n  {\n    len = 1 ; \n    arr = [|v|]\n  }\n\nlet empty () =\n  {\n    len = 0;\n    arr = [||];\n  }\n\nlet is_empty d =\n  d.len = 0\n\nlet reset d = \n  d.len <- 0; \n  d.arr <- [||]\n\n\n(* For [to_*] operations, we should be careful to call {!Array.*} function \n   in case we operate on the whole array\n*)\nlet to_list d =\n  let rec loop (d_arr : elt array) idx accum =\n    if idx < 0 then accum else loop d_arr (idx - 1) (Array.unsafe_get d_arr idx :: accum)\n  in\n  loop d.arr (d.len - 1) []\n\n\nlet of_list lst =\n  let arr = Array.of_list lst in \n  { arr ; len = Array.length arr}\n\n\nlet to_array d = \n  unsafe_sub d.arr 0 d.len\n\nlet of_array src =\n  {\n    len = Array.length src;\n    arr = Array.copy src;\n    (* okay to call {!Array.copy}*)\n  }\nlet of_sub_array arr off len = \n  { \n    len = len ; \n    arr = Array.sub arr off len  \n  }  \nlet unsafe_internal_array v = v.arr  \n(* we can not call {!Array.copy} *)\nlet copy src =\n  let len = src.len in\n  {\n    len ;\n    arr = unsafe_sub src.arr 0 len ;\n  }\n\n(* FIXME *)\nlet reverse_in_place src = \n  Ext_array.reverse_range src.arr 0 src.len \n\n\n\n\n(* {!Array.sub} is not enough for error checking, it \n   may contain some garbage\n *)\nlet sub (src : t) start len =\n  let src_len = src.len in \n  if len < 0 || start > src_len - len then invalid_arg \"Vec.sub\"\n  else \n  { len ; \n    arr = unsafe_sub src.arr start len }\n\nlet iter d  f = \n  let arr = d.arr in \n  for i = 0 to d.len - 1 do\n    f (Array.unsafe_get arr i)\n  done\n\nlet iteri d f =\n  let arr = d.arr in\n  for i = 0 to d.len - 1 do\n    f i (Array.unsafe_get arr i)\n  done\n\nlet iter_range d ~from ~to_ f =\n  if from < 0 || to_ >= d.len then invalid_arg \"Vec.iter_range\"\n  else \n    let d_arr = d.arr in \n    for i = from to to_ do \n      f  (Array.unsafe_get d_arr i)\n    done\n\nlet iteri_range d ~from ~to_ f =\n  if from < 0 || to_ >= d.len then invalid_arg \"Vec.iteri_range\"\n  else \n    let d_arr = d.arr in \n    for i = from to to_ do \n      f i (Array.unsafe_get d_arr i)\n    done\n\nlet map_into_array f src =\n  let src_len = src.len in \n  let src_arr = src.arr in \n  if src_len = 0 then [||]\n  else \n    let first_one = f (Array.unsafe_get src_arr 0) in \n    let arr = Array.make  src_len  first_one in\n    for i = 1 to src_len - 1 do\n      Array.unsafe_set arr i (f (Array.unsafe_get src_arr i))\n    done;\n    arr \nlet map_into_list f src = \n  let src_len = src.len in \n  let src_arr = src.arr in \n  if src_len = 0 then []\n  else \n    let acc = ref [] in         \n    for i =  src_len - 1 downto 0 do\n      acc := f (Array.unsafe_get src_arr i) :: !acc\n    done;\n    !acc\n\nlet mapi f src =\n  let len = src.len in \n  if len = 0 then { len ; arr = [| |] }\n  else \n    let src_arr = src.arr in \n    let arr = Array.make len (Array.unsafe_get src_arr 0) in\n    for i = 1 to len - 1 do\n      Array.unsafe_set arr i (f i (Array.unsafe_get src_arr i))\n    done;\n    {\n      len ;\n      arr ;\n    }\n\nlet fold_left f x a =\n  let rec loop a_len (a_arr : elt array) idx x =\n    if idx >= a_len then x else \n      loop a_len a_arr (idx + 1) (f x (Array.unsafe_get a_arr idx))\n  in\n  loop a.len a.arr 0 x\n\nlet fold_right f a x =\n  let rec loop (a_arr : elt array) idx x =\n    if idx < 0 then x\n    else loop a_arr (idx - 1) (f (Array.unsafe_get a_arr idx) x)\n  in\n  loop a.arr (a.len - 1) x\n\n(**  \n   [filter] and [inplace_filter]\n*)\nlet filter f d =\n  let new_d = copy d in \n  let new_d_arr = new_d.arr in \n  let d_arr = d.arr in\n  let p = ref 0 in\n  for i = 0 to d.len  - 1 do\n    let x = Array.unsafe_get d_arr i in\n    (* TODO: can be optimized for segments blit *)\n    if f x  then\n      begin\n        Array.unsafe_set new_d_arr !p x;\n        incr p;\n      end;\n  done;\n  new_d.len <- !p;\n  new_d \n\nlet equal eq x y : bool = \n  if x.len <> y.len then false \n  else \n    let rec aux x_arr y_arr i =\n      if i < 0 then true else  \n      if eq (Array.unsafe_get x_arr i) (Array.unsafe_get y_arr i) then \n        aux x_arr y_arr (i - 1)\n      else false in \n    aux x.arr y.arr (x.len - 1)\n\nlet get d i = \n  if i < 0 || i >= d.len then invalid_arg \"Vec.get\"\n  else Array.unsafe_get d.arr i\nlet unsafe_get d i = Array.unsafe_get d.arr i \nlet last d = \n  if d.len <= 0 then invalid_arg   \"Vec.last\"\n  else Array.unsafe_get d.arr (d.len - 1)\n\nlet capacity d = Array.length d.arr\n\n(* Attention can not use {!Array.exists} since the bound is not the same *)  \nlet exists p d = \n  let a = d.arr in \n  let n = d.len in   \n  let rec loop i =\n    if i = n then false\n    else if p (Array.unsafe_get a i) then true\n    else loop (succ i) in\n  loop 0\n\nlet map f src =\n  let src_len = src.len in \n  if src_len = 0 then { len = 0 ; arr = [||]}\n  (* TODO: we may share the empty array \n     but sharing mutable state is very challenging, \n     the tricky part is to avoid mutating the immutable array,\n     here it looks fine -- \n     invariant: whenever [.arr] mutated, make sure  it is not an empty array\n     Actually no: since starting from an empty array \n     {[\n       push v (* the address of v should not be changed *)\n     ]}\n  *)\n  else \n    let src_arr = src.arr in \n    let first = f (Array.unsafe_get src_arr 0 ) in \n    let arr = Array.make  src_len first in\n    for i = 1 to src_len - 1 do\n      Array.unsafe_set arr i (f (Array.unsafe_get src_arr i))\n    done;\n    {\n      len = src_len;\n      arr = arr;\n    }\n\nlet init len f =\n  if len < 0 then invalid_arg  \"Vec.init\"\n  else if len = 0 then { len = 0 ; arr = [||] }\n  else \n    let first = f 0 in \n    let arr = Array.make len first in\n    for i = 1 to len - 1 do\n      Array.unsafe_set arr i (f i)\n    done;\n    {\n\n      len ;\n      arr \n    }\n\n\n\n  let make initsize : t =\n    if initsize < 0 then invalid_arg  \"Vec.make\" ;\n    {\n\n      len = 0;\n      arr = Array.make  initsize null ;\n    }\n\n\n\n  let reserve (d : t ) s = \n    let d_len = d.len in \n    let d_arr = d.arr in \n    if s < d_len || s < Array.length d_arr then ()\n    else \n      let new_capacity = min Sys.max_array_length s in \n      let new_d_arr = Array.make new_capacity null in \n       unsafe_blit d_arr 0 new_d_arr 0 d_len;\n      d.arr <- new_d_arr \n\n  let push (d : t) v  =\n    let d_len = d.len in\n    let d_arr = d.arr in \n    let d_arr_len = Array.length d_arr in\n    if d_arr_len = 0 then\n      begin \n        d.len <- 1 ;\n        d.arr <- [| v |]\n      end\n    else  \n      begin \n        if d_len = d_arr_len then \n          begin\n            if d_len >= Sys.max_array_length then \n              failwith \"exceeds max_array_length\";\n            let new_capacity = min Sys.max_array_length d_len * 2 \n            (* [d_len] can not be zero, so [*2] will enlarge   *)\n            in\n            let new_d_arr = Array.make new_capacity null in \n            d.arr <- new_d_arr;\n             unsafe_blit d_arr 0 new_d_arr 0 d_len ;\n          end;\n        d.len <- d_len + 1;\n        Array.unsafe_set d.arr d_len v\n      end\n\n(** delete element at offset [idx], will raise exception when have invalid input *)\n  let delete (d : t) idx =\n    let d_len = d.len in \n    if idx < 0 || idx >= d_len then invalid_arg \"Vec.delete\" ;\n    let arr = d.arr in \n     unsafe_blit arr (idx + 1) arr idx  (d_len - idx - 1);\n    let idx = d_len - 1 in \n    d.len <- idx\n#ifdef TYPE_INT\n#else \n    ;\n    Array.unsafe_set arr idx  null\n#endif    \n    \n(** pop the last element, a specialized version of [delete] *)\n  let pop (d : t) = \n    let idx  = d.len - 1  in\n    if idx < 0 then invalid_arg \"Vec.pop\";\n    d.len <- idx\n#ifdef TYPE_INT    \n#else     \n    ;    \n    Array.unsafe_set d.arr idx null\n#endif\n  \n(** pop and return the last element *)  \n  let get_last_and_pop (d : t) = \n    let idx  = d.len - 1  in\n    if idx < 0 then invalid_arg \"Vec.get_last_and_pop\";\n    let last = Array.unsafe_get d.arr idx in \n    d.len <- idx \n#ifdef TYPE_INT    \n#else \n    ;\n    Array.unsafe_set d.arr idx null\n#endif\n    ;\n    last \n\n(** delete elements start from [idx] with length [len] *)\n  let delete_range (d : t) idx len =\n    let d_len = d.len in \n    if len < 0 || idx < 0 || idx + len > d_len then invalid_arg  \"Vec.delete_range\"  ;\n    let arr = d.arr in \n     unsafe_blit arr (idx + len) arr idx (d_len  - idx - len);\n    d.len <- d_len - len\n#ifdef TYPE_INT \n#else    \n    ;\n    for i = d_len - len to d_len - 1 do\n      Array.unsafe_set arr i null\n    done\n#endif    \n\n(** delete elements from [idx] with length [len] return the deleted elements as a new vec*)\n  let get_and_delete_range (d : t) idx len : t = \n    let d_len = d.len in \n    if len < 0 || idx < 0 || idx + len > d_len then invalid_arg  \"Vec.get_and_delete_range\"  ;\n    let arr = d.arr in \n    let value =  unsafe_sub arr idx len in\n     unsafe_blit arr (idx + len) arr idx (d_len  - idx - len);\n    d.len <- d_len - len; \n#ifdef TYPE_INT    \n#else \n    for i = d_len - len to d_len - 1 do\n      Array.unsafe_set arr i null\n    done;\n#endif     \n    {len = len ; arr = value}\n\n\n  (** Below are simple wrapper around normal Array operations *)  \n\n  let clear (d : t ) =\n#ifdef TYPE_INT\n#else \n    for i = 0 to d.len - 1 do \n      Array.unsafe_set d.arr i null\n    done;\n#endif    \n    d.len <- 0\n\n\n\n  let inplace_filter f (d : t) : unit = \n    let d_arr = d.arr in     \n    let d_len = d.len in\n    let p = ref 0 in\n    for i = 0 to d_len - 1 do \n      let x = Array.unsafe_get d_arr i in \n      if f x then \n        begin \n          let curr_p = !p in \n          (if curr_p <> i then \n             Array.unsafe_set d_arr curr_p x) ;\n          incr p\n        end\n    done ;\n    let last = !p  in \n#ifdef TYPE_INT \n    d.len <-  last \n    (* INT , there is not need to reset it, since it will cause GC behavior *)\n#else         \n    delete_range d last  (d_len - last)\n#endif \n\n  let inplace_filter_from start f (d : t) : unit = \n    if start < 0 then invalid_arg \"Vec.inplace_filter_from\"; \n    let d_arr = d.arr in     \n    let d_len = d.len in\n    let p = ref start in    \n    for i = start to d_len - 1 do \n      let x = Array.unsafe_get d_arr i in \n      if f x then \n        begin \n          let curr_p = !p in \n          (if curr_p <> i then \n             Array.unsafe_set d_arr curr_p x) ;\n          incr p\n        end\n    done ;\n    let last = !p  in \n#ifdef TYPE_INT \n    d.len <-  last \n#else         \n    delete_range d last  (d_len - last)\n#endif \n\n\n(** inplace filter the elements and accumulate the non-filtered elements *)\n  let inplace_filter_with  f ~cb_no acc (d : t)  = \n    let d_arr = d.arr in     \n    let p = ref 0 in\n    let d_len = d.len in\n    let acc = ref acc in \n    for i = 0 to d_len - 1 do \n      let x = Array.unsafe_get d_arr i in \n      if f x then \n        begin \n          let curr_p = !p in \n          (if curr_p <> i then \n             Array.unsafe_set d_arr curr_p x) ;\n          incr p\n        end\n      else \n        acc := cb_no  x  !acc\n    done ;\n    let last = !p  in \n#ifdef TYPE_INT \n    d.len <-  last \n    (* INT , there is not need to reset it, since it will cause GC behavior *)\n#else         \n    delete_range d last  (d_len - last)\n#endif \n    ; !acc \n\n\n\n#ifdef TYPE_FUNCTOR\nend\n#endif \n"
  },
  {
    "path": "analysis/vendor/ext/vec.mli",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\nmodule Make (Resize : Vec_gen.ResizeType) : Vec_gen.S with type elt = Resize.t\n"
  },
  {
    "path": "analysis/vendor/ext/vec_gen.ml",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\nmodule type ResizeType = sig\n  type t\n\n  val null : t\n  (* used to populate new allocated array checkout {!Obj.new_block} for more performance *)\nend\n\nmodule type S = sig\n  type elt\n\n  type t\n\n  val length : t -> int\n\n  val compact : t -> unit\n\n  val singleton : elt -> t\n\n  val empty : unit -> t\n\n  val make : int -> t\n\n  val init : int -> (int -> elt) -> t\n\n  val is_empty : t -> bool\n\n  val of_sub_array : elt array -> int -> int -> t\n\n  val unsafe_internal_array : t -> elt array\n  (** Exposed for some APIs which only take array as input, when exposed *)\n\n  val reserve : t -> int -> unit\n\n  val push : t -> elt -> unit\n\n  val delete : t -> int -> unit\n\n  val pop : t -> unit\n\n  val get_last_and_pop : t -> elt\n\n  val delete_range : t -> int -> int -> unit\n\n  val get_and_delete_range : t -> int -> int -> t\n\n  val clear : t -> unit\n\n  val reset : t -> unit\n\n  val to_list : t -> elt list\n\n  val of_list : elt list -> t\n\n  val to_array : t -> elt array\n\n  val of_array : elt array -> t\n\n  val copy : t -> t\n\n  val reverse_in_place : t -> unit\n\n  val iter : t -> (elt -> unit) -> unit\n\n  val iteri : t -> (int -> elt -> unit) -> unit\n\n  val iter_range : t -> from:int -> to_:int -> (elt -> unit) -> unit\n\n  val iteri_range : t -> from:int -> to_:int -> (int -> elt -> unit) -> unit\n\n  val map : (elt -> elt) -> t -> t\n\n  val mapi : (int -> elt -> elt) -> t -> t\n\n  val map_into_array : (elt -> 'f) -> t -> 'f array\n\n  val map_into_list : (elt -> 'f) -> t -> 'f list\n\n  val fold_left : ('f -> elt -> 'f) -> 'f -> t -> 'f\n\n  val fold_right : (elt -> 'g -> 'g) -> t -> 'g -> 'g\n\n  val filter : (elt -> bool) -> t -> t\n\n  val inplace_filter : (elt -> bool) -> t -> unit\n\n  val inplace_filter_with :\n    (elt -> bool) -> cb_no:(elt -> 'a -> 'a) -> 'a -> t -> 'a\n\n  val inplace_filter_from : int -> (elt -> bool) -> t -> unit\n\n  val equal : (elt -> elt -> bool) -> t -> t -> bool\n\n  val get : t -> int -> elt\n\n  val unsafe_get : t -> int -> elt\n\n  val last : t -> elt\n\n  val capacity : t -> int\n\n  val exists : (elt -> bool) -> t -> bool\n\n  val sub : t -> int -> int -> t\nend\n"
  },
  {
    "path": "analysis/vendor/ext/vec_int.mli",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\ninclude Vec_gen.S with type elt = int\n"
  },
  {
    "path": "analysis/vendor/ext/warnings.ml",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Pierre Weis && Damien Doligez, INRIA Rocquencourt          *)\n(*                                                                        *)\n(*   Copyright 1998 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(* When you change this, you need to update the documentation:\n   - man/ocamlc.m\n   - man/ocamlopt.m\n   - manual/manual/cmds/comp.etex\n   - manual/manual/cmds/native.etex\n*)\n\ntype loc = {\n  loc_start: Lexing.position;\n  loc_end: Lexing.position;\n  loc_ghost: bool;\n}\n\ntype top_level_unit_help = FunctionCall | Other\n\ntype t =\n  | Comment_start (*  1 *)\n  | Comment_not_end (*  2 *)\n  | Deprecated of string * loc * loc (*  3 *)\n  | Fragile_match of string (*  4 *)\n  | Partial_application (*  5 *)\n  | Method_override of string list (*  7 *)\n  | Partial_match of string (*  8 *)\n  | Non_closed_record_pattern of string (*  9 *)\n  | Statement_type (* 10 *)\n  | Unused_match (* 11 *)\n  | Unused_pat (* 12 *)\n  | Instance_variable_override of string list (* 13 *)\n  | Illegal_backslash (* 14 *)\n  | Implicit_public_methods of string list (* 15 *)\n  | Unerasable_optional_argument (* 16 *)\n  | Unused_argument (* 20 *)\n  | Nonreturning_statement (* 21 *)\n  | Preprocessor of string (* 22 *)\n  | Useless_record_with (* 23 *)\n  | Bad_module_name of string (* 24 *)\n  | All_clauses_guarded (* 8, used to be 25 *)\n  | Unused_var of string (* 26 *)\n  | Unused_var_strict of string (* 27 *)\n  | Wildcard_arg_to_constant_constr (* 28 *)\n  | Eol_in_string (* 29 *)\n  | Duplicate_definitions of string * string * string * string (*30 *)\n  | Unused_value_declaration of string (* 32 *)\n  | Unused_open of string (* 33 *)\n  | Unused_type_declaration of string (* 34 *)\n  | Unused_for_index of string (* 35 *)\n  | Unused_constructor of string * bool * bool (* 37 *)\n  | Unused_extension of string * bool * bool * bool (* 38 *)\n  | Unused_rec_flag (* 39 *)\n  | Ambiguous_name of string list * string list * bool (* 41 *)\n  | Nonoptional_label of string (* 43 *)\n  | Open_shadow_identifier of string * string (* 44 *)\n  | Open_shadow_label_constructor of string * string (* 45 *)\n  | Attribute_payload of string * string (* 47 *)\n  | Eliminated_optional_arguments of string list (* 48 *)\n  | No_cmi_file of string * string option (* 49 *)\n  | Bad_docstring of bool (* 50 *)\n  | Fragile_literal_pattern (* 52 *)\n  | Misplaced_attribute of string (* 53 *)\n  | Duplicated_attribute of string (* 54 *)\n  | Unreachable_case (* 56 *)\n  | Ambiguous_pattern of string list (* 57 *)\n  | Unused_module of string (* 60 *)\n  | Constraint_on_gadt (* 62 *)\n  | Bs_unused_attribute of string (* 101 *)\n  | Bs_polymorphic_comparison (* 102 *)\n  | Bs_ffi_warning of string (* 103 *)\n  | Bs_derive_warning of string (* 104 *)\n  | Bs_fragile_external of string (* 105 *)\n  | Bs_unimplemented_primitive of string (* 106 *)\n  | Bs_integer_literal_overflow (* 107 *)\n  | Bs_uninterpreted_delimiters of string (* 108 *)\n  | Bs_toplevel_expression_unit of\n      (string * top_level_unit_help) option (* 109 *)\n  | Bs_todo of string option (* 110 *)\n\n(* If you remove a warning, leave a hole in the numbering.  NEVER change\n   the numbers of existing warnings.\n   If you add a new warning, add it at the end with a new number;\n   do NOT reuse one of the holes.\n*)\n\nlet number = function\n  | Comment_start -> 1\n  | Comment_not_end -> 2\n  | Deprecated _ -> 3\n  | Fragile_match _ -> 4\n  | Partial_application -> 5\n  | Method_override _ -> 7\n  | Partial_match _ -> 8\n  | Non_closed_record_pattern _ -> 9\n  | Statement_type -> 10\n  | Unused_match -> 11\n  | Unused_pat -> 12\n  | Instance_variable_override _ -> 13\n  | Illegal_backslash -> 14\n  | Implicit_public_methods _ -> 15\n  | Unerasable_optional_argument -> 16\n  | Unused_argument -> 20\n  | Nonreturning_statement -> 21\n  | Preprocessor _ -> 22\n  | Useless_record_with -> 23\n  | Bad_module_name _ -> 24\n  | All_clauses_guarded -> 8 (* used to be 25 *)\n  | Unused_var _ -> 26\n  | Unused_var_strict _ -> 27\n  | Wildcard_arg_to_constant_constr -> 28\n  | Eol_in_string -> 29\n  | Duplicate_definitions _ -> 30\n  | Unused_value_declaration _ -> 32\n  | Unused_open _ -> 33\n  | Unused_type_declaration _ -> 34\n  | Unused_for_index _ -> 35\n  | Unused_constructor _ -> 37\n  | Unused_extension _ -> 38\n  | Unused_rec_flag -> 39\n  | Ambiguous_name _ -> 41\n  | Nonoptional_label _ -> 43\n  | Open_shadow_identifier _ -> 44\n  | Open_shadow_label_constructor _ -> 45\n  | Attribute_payload _ -> 47\n  | Eliminated_optional_arguments _ -> 48\n  | No_cmi_file _ -> 49\n  | Bad_docstring _ -> 50\n  | Fragile_literal_pattern -> 52\n  | Misplaced_attribute _ -> 53\n  | Duplicated_attribute _ -> 54\n  | Unreachable_case -> 56\n  | Ambiguous_pattern _ -> 57\n  | Unused_module _ -> 60\n  | Constraint_on_gadt -> 62\n  | Bs_unused_attribute _ -> 101\n  | Bs_polymorphic_comparison -> 102\n  | Bs_ffi_warning _ -> 103\n  | Bs_derive_warning _ -> 104\n  | Bs_fragile_external _ -> 105\n  | Bs_unimplemented_primitive _ -> 106\n  | Bs_integer_literal_overflow -> 107\n  | Bs_uninterpreted_delimiters _ -> 108\n  | Bs_toplevel_expression_unit _ -> 109\n  | Bs_todo _ -> 110\n\nlet last_warning_number = 110\n\nlet letter_all =\n  let rec loop i = if i = 0 then [] else i :: loop (i - 1) in\n  loop last_warning_number\n\n(* Must be the max number returned by the [number] function. *)\n\nlet letter = function\n  | 'a' -> letter_all\n  | 'b' -> []\n  | 'c' -> [1; 2]\n  | 'd' -> [3]\n  | 'e' -> [4]\n  | 'f' -> [5]\n  | 'g' -> []\n  | 'h' -> []\n  | 'i' -> []\n  | 'j' -> []\n  | 'k' -> [32; 33; 34; 35; 36; 37; 38; 39]\n  | 'l' -> [6]\n  | 'm' -> [7]\n  | 'n' -> []\n  | 'o' -> []\n  | 'p' -> [8]\n  | 'q' -> []\n  | 'r' -> [9]\n  | 's' -> [10]\n  | 't' -> []\n  | 'u' -> [11; 12]\n  | 'v' -> [13]\n  | 'w' -> []\n  | 'x' -> [14; 15; 16; 17; 18; 19; 20; 21; 22; 23; 24; 30]\n  | 'y' -> [26]\n  | 'z' -> [27]\n  | _ -> assert false\n\ntype state = {active: bool array; error: bool array}\n\nlet current =\n  ref\n    {\n      active = Array.make (last_warning_number + 1) true;\n      error = Array.make (last_warning_number + 1) false;\n    }\n\nlet disabled = ref false\n\nlet without_warnings f = Misc.protect_refs [Misc.R (disabled, true)] f\n\nlet backup () = !current\n\nlet restore x = current := x\n\nlet is_active x = (not !disabled) && !current.active.(number x)\n\nlet is_error x = (not !disabled) && !current.error.(number x)\n\nlet mk_lazy f =\n  let state = backup () in\n  lazy\n    (let prev = backup () in\n     restore state;\n     try\n       let r = f () in\n       restore prev;\n       r\n     with exn ->\n       restore prev;\n       raise exn)\n\nlet parse_opt error active flags s =\n  let set i = flags.(i) <- true in\n  let clear i = flags.(i) <- false in\n  let set_all i =\n    active.(i) <- true;\n    error.(i) <- true\n  in\n  let error () = raise (Arg.Bad \"Ill-formed list of warnings\") in\n  let rec get_num n i =\n    if i >= String.length s then (i, n)\n    else\n      match s.[i] with\n      | '0' .. '9' ->\n        get_num ((10 * n) + Char.code s.[i] - Char.code '0') (i + 1)\n      | _ -> (i, n)\n  in\n  let get_range i =\n    let i, n1 = get_num 0 i in\n    if i + 2 < String.length s && s.[i] = '.' && s.[i + 1] = '.' then (\n      let i, n2 = get_num 0 (i + 2) in\n      if n2 < n1 then error ();\n      (i, n1, n2))\n    else (i, n1, n1)\n  in\n  let rec loop i =\n    if i >= String.length s then ()\n    else\n      match s.[i] with\n      | 'A' .. 'Z' ->\n        List.iter set (letter (Char.lowercase_ascii s.[i]));\n        loop (i + 1)\n      | 'a' .. 'z' ->\n        List.iter clear (letter s.[i]);\n        loop (i + 1)\n      | '+' -> loop_letter_num set (i + 1)\n      | '-' -> loop_letter_num clear (i + 1)\n      | '@' -> loop_letter_num set_all (i + 1)\n      | _ -> error ()\n  and loop_letter_num myset i =\n    if i >= String.length s then error ()\n    else\n      match s.[i] with\n      | '0' .. '9' ->\n        let i, n1, n2 = get_range i in\n        for n = n1 to Ext_pervasives.min_int n2 last_warning_number do\n          myset n\n        done;\n        loop i\n      | 'A' .. 'Z' ->\n        List.iter myset (letter (Char.lowercase_ascii s.[i]));\n        loop (i + 1)\n      | 'a' .. 'z' ->\n        List.iter myset (letter s.[i]);\n        loop (i + 1)\n      | _ -> error ()\n  in\n  loop 0\n\nlet parse_options errflag s =\n  let error = Array.copy !current.error in\n  let active = Array.copy !current.active in\n  parse_opt error active (if errflag then error else active) s;\n  current := {error; active}\n\nlet reset () =\n  parse_options false Bsc_warnings.defaults_w;\n  parse_options true Bsc_warnings.defaults_warn_error\n\nlet () = reset ()\n\nlet message = function\n  | Comment_start -> \"this is the start of a comment.\"\n  | Comment_not_end -> \"this is not the end of a comment.\"\n  | Deprecated (s, _, _) ->\n    (* Reduce \\r\\n to \\n:\n         - Prevents any \\r characters being printed on Unix when processing\n           Windows sources\n         - Prevents \\r\\r\\n being generated on Windows, which affects the\n           testsuite\n    *)\n    \"deprecated: \" ^ Misc.normalise_eol s\n  | Fragile_match \"\" -> \"this pattern-matching is fragile.\"\n  | Fragile_match s ->\n    \"this pattern-matching is fragile.\\n\\\n     It will remain exhaustive when constructors are added to type \" ^ s ^ \".\"\n  | Partial_application ->\n    \"this function application is partial,\\nmaybe some arguments are missing.\"\n  | Method_override [lab] -> \"the method \" ^ lab ^ \" is overridden.\"\n  | Method_override (cname :: slist) ->\n    String.concat \" \"\n      (\"the following methods are overridden by the class\" :: cname :: \":\\n \"\n     :: slist)\n  | Method_override [] -> assert false\n  | Partial_match \"\" ->\n    \"You forgot to handle a possible case here, though we don't have more \\\n     information on the value.\"\n  | Partial_match s ->\n    \"You forgot to handle a possible case here, for example: \\n  \" ^ s\n  | Non_closed_record_pattern s ->\n    \"the following labels are not bound in this record pattern: \" ^ s\n    ^ \"\\nEither bind these labels explicitly or add ', _' to the pattern.\"\n  | Statement_type ->\n    \"This expression returns a value, but you're not doing anything with it. \\\n     If this is on purpose, wrap it with `ignore`.\"\n  | Unused_match -> \"this match case is unused.\"\n  | Unused_pat -> \"this sub-pattern is unused.\"\n  | Instance_variable_override [lab] ->\n    \"the instance variable \" ^ lab ^ \" is overridden.\\n\"\n    ^ \"The behaviour changed in ocaml 3.10 (previous behaviour was hiding.)\"\n  | Instance_variable_override (cname :: slist) ->\n    String.concat \" \"\n      (\"the following instance variables are overridden by the class\" :: cname\n     :: \":\\n \" :: slist)\n    ^ \"\\nThe behaviour changed in ocaml 3.10 (previous behaviour was hiding.)\"\n  | Instance_variable_override [] -> assert false\n  | Illegal_backslash -> \"illegal backslash escape in string.\"\n  | Implicit_public_methods l ->\n    \"the following private methods were made public implicitly:\\n \"\n    ^ String.concat \" \" l ^ \".\"\n  | Unerasable_optional_argument ->\n    String.concat \"\"\n      [\n        \"This optional parameter in final position will, in practice, not be \\\n         optional.\\n\";\n        \"  Reorder the parameters so that at least one non-optional one is in \\\n         final position or, if all parameters are optional, insert a final \\\n         ().\\n\\n\";\n        \"  Explanation: If the final parameter is optional, it'd be unclear \\\n         whether a function application that omits it should be considered \\\n         fully applied, or partially applied. Imagine writing `let title = \\\n         display(\\\"hello!\\\")`, only to realize `title` isn't your desired \\\n         result, but a curried call that takes a final optional argument, e.g. \\\n         `~showDate`.\\n\\n\";\n        \"  Formal rule: an optional argument is considered intentionally \\\n         omitted when the 1st positional (i.e. neither labeled nor optional) \\\n         argument defined after it is passed in.\";\n      ]\n  | Unused_argument -> \"this argument will not be used by the function.\"\n  | Nonreturning_statement ->\n    \"this statement never returns (or has an unsound type.)\"\n  | Preprocessor s -> s\n  | Useless_record_with ->\n    \"All the fields are already explicitly listed in this record. You can \\\n     remove the `...` spread.\"\n  | Bad_module_name modname ->\n    \"This file's name is potentially invalid. The build systems conventionally \\\n     turn a file name into a module name by upper-casing the first letter. \"\n    ^ modname ^ \" isn't a valid module name.\\n\"\n    ^ \"Note: some build systems might e.g. turn kebab-case into CamelCase \\\n       module, which is why this isn't a hard error.\"\n  | All_clauses_guarded ->\n    \"this pattern-matching is not exhaustive.\\n\\\n     All clauses in this pattern-matching are guarded.\"\n  | Unused_var v | Unused_var_strict v -> \"unused variable \" ^ v ^ \".\"\n  | Wildcard_arg_to_constant_constr ->\n    \"wildcard pattern given as argument to a constant constructor\"\n  | Eol_in_string ->\n    \"unescaped end-of-line in a string constant (non-portable code)\"\n  | Duplicate_definitions (kind, cname, tc1, tc2) ->\n    Printf.sprintf \"the %s %s is defined in both types %s and %s.\" kind cname\n      tc1 tc2\n  | Unused_value_declaration v -> \"unused value \" ^ v ^ \".\"\n  | Unused_open s -> \"unused open \" ^ s ^ \".\"\n  | Unused_type_declaration s -> \"unused type \" ^ s ^ \".\"\n  | Unused_for_index s -> \"unused for-loop index \" ^ s ^ \".\"\n  | Unused_constructor (s, false, false) -> \"unused constructor \" ^ s ^ \".\"\n  | Unused_constructor (s, true, _) ->\n    \"constructor \" ^ s\n    ^ \" is never used to build values.\\n\\\n       (However, this constructor appears in patterns.)\"\n  | Unused_constructor (s, false, true) ->\n    \"constructor \" ^ s\n    ^ \" is never used to build values.\\nIts type is exported as a private type.\"\n  | Unused_extension (s, is_exception, cu_pattern, cu_privatize) -> (\n    let kind = if is_exception then \"exception\" else \"extension constructor\" in\n    let name = kind ^ \" \" ^ s in\n    match (cu_pattern, cu_privatize) with\n    | false, false -> \"unused \" ^ name\n    | true, _ ->\n      name\n      ^ \" is never used to build values.\\n\\\n         (However, this constructor appears in patterns.)\"\n    | false, true ->\n      name\n      ^ \" is never used to build values.\\n\\\n         It is exported or rebound as a private extension.\")\n  | Unused_rec_flag -> \"unused rec flag.\"\n  | Ambiguous_name ([s], tl, false) ->\n    s ^ \" belongs to several types: \" ^ String.concat \" \" tl\n    ^ \"\\nThe first one was selected. Please disambiguate if this is wrong.\"\n  | Ambiguous_name (_, _, false) -> assert false\n  | Ambiguous_name (_slist, tl, true) ->\n    \"these field labels belong to several types: \" ^ String.concat \" \" tl\n    ^ \"\\nThe first one was selected. Please disambiguate if this is wrong.\"\n  | Nonoptional_label s -> \"the label \" ^ s ^ \" is not optional.\"\n  | Open_shadow_identifier (kind, s) ->\n    Printf.sprintf\n      \"this open statement shadows the %s identifier %s (which is later used)\"\n      kind s\n  | Open_shadow_label_constructor (kind, s) ->\n    Printf.sprintf \"this open statement shadows the %s %s (which is later used)\"\n      kind s\n  | Attribute_payload (a, s) ->\n    Printf.sprintf \"illegal payload for attribute '%s'.\\n%s\" a s\n  | Eliminated_optional_arguments sl ->\n    Printf.sprintf \"implicit elimination of optional argument%s %s\"\n      (if List.length sl = 1 then \"\" else \"s\")\n      (String.concat \", \" sl)\n  | No_cmi_file (name, None) ->\n    \"no cmi file was found in path for module \" ^ name\n  | No_cmi_file (name, Some msg) ->\n    Printf.sprintf \"no valid cmi file was found in path for module %s. %s\" name\n      msg\n  | Bad_docstring unattached ->\n    if unattached then \"unattached documentation comment (ignored)\"\n    else \"ambiguous documentation comment\"\n  | Fragile_literal_pattern ->\n    Printf.sprintf\n      \"Code should not depend on the actual values of\\n\\\n       this constructor's arguments. They are only for information\\n\\\n       and may change in future versions. (See manual section 8.5)\"\n  | Unreachable_case ->\n    \"this match case is unreachable.\\n\\\n     Consider replacing it with a refutation case '<pat> -> .'\"\n  | Misplaced_attribute attr_name ->\n    Printf.sprintf \"the %S attribute cannot appear in this context\" attr_name\n  | Duplicated_attribute attr_name ->\n    Printf.sprintf \"the %S attribute is used more than once on this expression\"\n      attr_name\n  | Ambiguous_pattern vars ->\n    let msg =\n      let vars = List.sort String.compare vars in\n      match vars with\n      | [] -> assert false\n      | [x] -> \"variable \" ^ x\n      | _ :: _ -> \"variables \" ^ String.concat \",\" vars\n    in\n    Printf.sprintf\n      \"Ambiguous or-pattern variables under guard;\\n\\\n       %s may match different arguments. (See manual section 8.5)\"\n      msg\n  | Unused_module s -> \"unused module \" ^ s ^ \".\"\n  | Constraint_on_gadt ->\n    \"Type constraints do not apply to GADT cases of variant types.\"\n  | Bs_unused_attribute s ->\n    \"Unused attribute: @\" ^ s\n    ^ \"\\n\\\n       This attribute has no effect here.\\n\\\n       For example, some attributes are only meaningful in externals.\\n\"\n  | Bs_polymorphic_comparison ->\n    \"Polymorphic comparison introduced (maybe unsafe)\"\n  | Bs_ffi_warning s -> \"FFI warning: \" ^ s\n  | Bs_derive_warning s -> \"@deriving warning: \" ^ s\n  | Bs_fragile_external s ->\n    s\n    ^ \" : using an empty string as a shorthand to infer the external's name \\\n       from the value's name is dangerous when refactoring, and therefore \\\n       deprecated\"\n  | Bs_unimplemented_primitive s -> \"Unimplemented primitive used:\" ^ s\n  | Bs_integer_literal_overflow ->\n    \"Integer literal exceeds the range of representable integers of type int\"\n  | Bs_uninterpreted_delimiters s -> \"Uninterpreted delimiters \" ^ s\n  | Bs_toplevel_expression_unit help ->\n    Printf.sprintf\n      \"This%sis at the top level and is expected to return `unit`. But it's \\\n       returning %s.\\n\\n\\\n      \\  In ReScript, anything at the top level must evaluate to `unit`. You \\\n       can fix this by assigning the expression to a value, or piping it into \\\n       the `ignore` function.%s\"\n      (match help with\n      | Some (_, FunctionCall) -> \" function call \"\n      | _ -> \" \")\n      (match help with\n      | Some (return_type, _) -> Printf.sprintf \"`%s`\" return_type\n      | None -> \"something that is not `unit`\")\n      (match help with\n      | Some (_, help_typ) ->\n        let help_text =\n          match help_typ with\n          | FunctionCall -> \"yourFunctionCall()\"\n          | Other -> \"yourExpression\"\n        in\n        Printf.sprintf\n          \"\\n\\n\\\n          \\  Possible solutions:\\n\\\n          \\  - Assigning to a value that is then ignored: `let _ = %s`\\n\\\n          \\  - Piping into the built-in ignore function to ignore the result: \\\n           `%s->ignore`\"\n          help_text help_text\n      | _ -> \"\")\n  | Bs_todo maybe_text ->\n    (match maybe_text with\n    | None -> \"Todo found.\"\n    | Some todo -> \"Todo found: \" ^ todo)\n    ^ \"\\n\\n\\\n      \\  This code is not implemented yet and will crash at runtime. Make sure \\\n       you implement this before running the code.\"\n\nlet sub_locs = function\n  | Deprecated (_, def, use) ->\n    [(def, \"Definition\"); (use, \"Expected signature\")]\n  | _ -> []\n\nlet has_warnings = ref false\n\nlet nerrors = ref 0\n\ntype reporting_information = {\n  number: int;\n  message: string;\n  is_error: bool;\n  sub_locs: (loc * string) list;\n}\n\nlet report w =\n  match is_active w with\n  | false -> `Inactive\n  | true ->\n    has_warnings := true;\n    if is_error w then incr nerrors;\n    `Active\n      {\n        number = number w;\n        message = message w;\n        is_error = is_error w;\n        sub_locs = sub_locs w;\n      }\n\nexception Errors\n\nlet reset_fatal () = nerrors := 0\n\nlet check_fatal () =\n  if !nerrors > 0 then (\n    nerrors := 0;\n    raise Errors)\n\nlet descriptions =\n  [\n    (1, \"Suspicious-looking start-of-comment mark.\");\n    (2, \"Suspicious-looking end-of-comment mark.\");\n    (3, \"Deprecated feature.\");\n    ( 4,\n      \"Fragile pattern matching: matching that will remain complete even\\n\\\n      \\    if additional constructors are added to one of the variant types\\n\\\n      \\    matched.\" );\n    ( 5,\n      \"Partially applied function: expression whose result has function\\n\\\n      \\    type and is ignored.\" );\n    (6, \"Label omitted in function application.\");\n    (7, \"Method overridden.\");\n    (8, \"Partial match: missing cases in pattern-matching.\");\n    (9, \"Missing fields in a record pattern.\");\n    ( 10,\n      \"Expression on the left-hand side of a sequence that doesn't have type\\n\\\n      \\    \\\"unit\\\" (and that is not a function, see warning number 5).\" );\n    (11, \"Redundant case in a pattern matching (unused match case).\");\n    (12, \"Redundant sub-pattern in a pattern-matching.\");\n    (13, \"Instance variable overridden.\");\n    (14, \"Illegal backslash escape in a string constant.\");\n    (15, \"Private method made public implicitly.\");\n    (16, \"Unerasable optional argument.\");\n    (17, \"Undeclared virtual method.\");\n    (18, \"Non-principal type.\");\n    (19, \"Type without principality.\");\n    (20, \"Unused function argument.\");\n    (21, \"Non-returning statement.\");\n    (22, \"Preprocessor warning.\");\n    (23, \"Useless record \\\"with\\\" clause.\");\n    ( 24,\n      \"Bad module name: the source file name is not a valid OCaml module name.\"\n    );\n    (25, \"Deprecated: now part of warning 8.\");\n    ( 26,\n      \"Suspicious unused variable: unused variable that is bound\\n\\\n      \\    with \\\"let\\\" or \\\"as\\\", and doesn't start with an underscore (\\\"_\\\")\\n\\\n      \\    character.\" );\n    ( 27,\n      \"Innocuous unused variable: unused variable that is not bound with\\n\\\n      \\    \\\"let\\\" nor \\\"as\\\", and doesn't start with an underscore (\\\"_\\\")\\n\\\n      \\    character.\" );\n    (28, \"Wildcard pattern given as argument to a constant constructor.\");\n    (29, \"Unescaped end-of-line in a string constant (non-portable code).\");\n    ( 30,\n      \"Two labels or constructors of the same name are defined in two\\n\\\n      \\    mutually recursive types.\" );\n    (31, \"A module is linked twice in the same executable.\");\n    (32, \"Unused value declaration.\");\n    (33, \"Unused open statement.\");\n    (34, \"Unused type declaration.\");\n    (35, \"Unused for-loop index.\");\n    (36, \"Unused ancestor variable.\");\n    (37, \"Unused constructor.\");\n    (38, \"Unused extension constructor.\");\n    (39, \"Unused rec flag.\");\n    (41, \"Ambiguous constructor or label name.\");\n    (43, \"Nonoptional label applied as optional.\");\n    (44, \"Open statement shadows an already defined identifier.\");\n    (45, \"Open statement shadows an already defined label or constructor.\");\n    (46, \"Error in environment variable.\");\n    (47, \"Illegal attribute payload.\");\n    (48, \"Implicit elimination of optional arguments.\");\n    (49, \"Absent cmi file when looking up module alias.\");\n    (50, \"Unexpected documentation comment.\");\n    (51, \"Warning on non-tail calls if @tailcall present.\");\n    (52, \"Fragile constant pattern.\");\n    (53, \"Attribute cannot appear in this context\");\n    (54, \"Attribute used more than once on an expression\");\n    (55, \"Inlining impossible\");\n    (56, \"Unreachable case in a pattern-matching (based on type information).\");\n    (57, \"Ambiguous or-pattern variables under guard\");\n    (59, \"Assignment to non-mutable value\");\n    (60, \"Unused module declaration\");\n    (61, \"Unboxable type in primitive declaration\");\n    (62, \"Type constraint on GADT type declaration\");\n    (101, \"Unused bs attributes\");\n    (102, \"Polymorphic comparison introduced (maybe unsafe)\");\n    (103, \"Fragile FFI definitions\");\n    (104, \"@deriving warning with customized message \");\n    ( 105,\n      \"External name is inferred from val name is unsafe from refactoring when \\\n       changing value name\" );\n    (106, \"Unimplemented primitive used:\");\n    ( 107,\n      \"Integer literal exceeds the range of representable integers of type int\"\n    );\n    (108, \"Uninterpreted delimiters (for unicode)\");\n    (109, \"Toplevel expression has unit type\");\n    (110, \"Todo found\");\n  ]\n\nlet help_warnings () =\n  List.iter (fun (i, s) -> Printf.printf \"%3i %s\\n\" i s) descriptions;\n  print_endline \"  A all warnings\";\n  for i = Char.code 'b' to Char.code 'z' do\n    let c = Char.chr i in\n    match letter c with\n    | [] -> ()\n    | [n] ->\n      Printf.printf \"  %c Alias for warning %i.\\n\" (Char.uppercase_ascii c) n\n    | l ->\n      Printf.printf \"  %c warnings %s.\\n\" (Char.uppercase_ascii c)\n        (String.concat \", \" (List.map string_of_int l))\n  done;\n  exit 0\n"
  },
  {
    "path": "analysis/vendor/ext/warnings.mli",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Pierre Weis && Damien Doligez, INRIA Rocquencourt          *)\n(*                                                                        *)\n(*   Copyright 1998 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\ntype loc = {\n  loc_start: Lexing.position;\n  loc_end: Lexing.position;\n  loc_ghost: bool;\n}\n\ntype top_level_unit_help = FunctionCall | Other\n\ntype t =\n  | Comment_start (*  1 *)\n  | Comment_not_end (*  2 *)\n  | Deprecated of string * loc * loc (*  3 *)\n  | Fragile_match of string (*  4 *)\n  | Partial_application (*  5 *)\n  | Method_override of string list (*  7 *)\n  | Partial_match of string (*  8 *)\n  | Non_closed_record_pattern of string (*  9 *)\n  | Statement_type (* 10 *)\n  | Unused_match (* 11 *)\n  | Unused_pat (* 12 *)\n  | Instance_variable_override of string list (* 13 *)\n  | Illegal_backslash (* 14 *)\n  | Implicit_public_methods of string list (* 15 *)\n  | Unerasable_optional_argument (* 16 *)\n  | Unused_argument (* 20 *)\n  | Nonreturning_statement (* 21 *)\n  | Preprocessor of string (* 22 *)\n  | Useless_record_with (* 23 *)\n  | Bad_module_name of string (* 24 *)\n  | All_clauses_guarded (* 8, used to be 25 *)\n  | Unused_var of string (* 26 *)\n  | Unused_var_strict of string (* 27 *)\n  | Wildcard_arg_to_constant_constr (* 28 *)\n  | Eol_in_string (* 29 *)\n  | Duplicate_definitions of string * string * string * string (* 30 *)\n  | Unused_value_declaration of string (* 32 *)\n  | Unused_open of string (* 33 *)\n  | Unused_type_declaration of string (* 34 *)\n  | Unused_for_index of string (* 35 *)\n  | Unused_constructor of string * bool * bool (* 37 *)\n  | Unused_extension of string * bool * bool * bool (* 38 *)\n  | Unused_rec_flag (* 39 *)\n  | Ambiguous_name of string list * string list * bool (* 41 *)\n  | Nonoptional_label of string (* 43 *)\n  | Open_shadow_identifier of string * string (* 44 *)\n  | Open_shadow_label_constructor of string * string (* 45 *)\n  | Attribute_payload of string * string (* 47 *)\n  | Eliminated_optional_arguments of string list (* 48 *)\n  | No_cmi_file of string * string option (* 49 *)\n  | Bad_docstring of bool (* 50 *)\n  | Fragile_literal_pattern (* 52 *)\n  | Misplaced_attribute of string (* 53 *)\n  | Duplicated_attribute of string (* 54 *)\n  | Unreachable_case (* 56 *)\n  | Ambiguous_pattern of string list (* 57 *)\n  | Unused_module of string (* 60 *)\n  | Constraint_on_gadt (* 62 *)\n  | Bs_unused_attribute of string (* 101 *)\n  | Bs_polymorphic_comparison (* 102 *)\n  | Bs_ffi_warning of string (* 103 *)\n  | Bs_derive_warning of string (* 104 *)\n  | Bs_fragile_external of string (* 105 *)\n  | Bs_unimplemented_primitive of string (* 106 *)\n  | Bs_integer_literal_overflow (* 107 *)\n  | Bs_uninterpreted_delimiters of string (* 108 *)\n  | Bs_toplevel_expression_unit of\n      (string * top_level_unit_help) option (* 109 *)\n  | Bs_todo of string option (* 110 *)\n\nval parse_options : bool -> string -> unit\n\nval without_warnings : (unit -> 'a) -> 'a\n\nval is_active : t -> bool\n\nval is_error : t -> bool\n\ntype reporting_information = {\n  number: int;\n  message: string;\n  is_error: bool;\n  sub_locs: (loc * string) list;\n}\n\nval report : t -> [`Active of reporting_information | `Inactive]\n\nexception Errors\n\nval check_fatal : unit -> unit\n\nval reset_fatal : unit -> unit\n\nval help_warnings : unit -> unit\n\ntype state\n\nval backup : unit -> state\n\nval restore : state -> unit\n\nval mk_lazy : (unit -> 'a) -> 'a Lazy.t\n(** Like [Lazy.of_fun], but the function is applied with the warning settings at\n    the time [mk_lazy] is called. *)\n\nval has_warnings : bool ref\n\nval nerrors : int ref\n\nval message : t -> string\n\nval number : t -> int\n\nval reset : unit -> unit\n"
  },
  {
    "path": "analysis/vendor/js_parser/comment_attachment.ml",
    "content": "(*\n * Copyright (c) Meta Platforms, Inc. and affiliates.\n *\n * This source code is licensed under the MIT license found in the\n * LICENSE file in the root directory of this source tree.\n *)\n\nmodule Ast = Flow_ast\nopen Flow_ast\nopen Parser_env\n\nlet id = Flow_ast_mapper.id\n\nlet map_loc = Flow_ast_mapper.map_loc\n\nlet map_opt = Flow_ast_mapper.map_opt\n\nlet id_list_last (map : 'a -> 'a) (lst : 'a list) : 'a list =\n  match List.rev lst with\n  | [] -> lst\n  | hd :: tl ->\n    let hd' = map hd in\n    if hd == hd' then\n      lst\n    else\n      List.rev (hd' :: tl)\n\n(* Mapper that removes all trailing comments that appear after a given position in an AST node *)\nclass ['loc] trailing_comments_remover ~after_pos =\n  object (this)\n    inherit ['loc] Flow_ast_mapper.mapper\n\n    method! syntax comments =\n      let open Syntax in\n      let { trailing; _ } = comments in\n      let trailing' =\n        List.filter (fun (loc, _) -> Loc.(pos_cmp loc.start after_pos < 0)) trailing\n      in\n      if List.length trailing = List.length trailing' then\n        comments\n      else\n        { comments with trailing = trailing' }\n\n    method! array _loc expr =\n      let open Ast.Expression.Array in\n      let { comments; _ } = expr in\n      id this#syntax_opt comments expr (fun comments' -> { expr with comments = comments' })\n\n    method! array_type t =\n      let open Ast.Type.Array in\n      let { comments; _ } = t in\n      id this#syntax_opt comments t (fun comments' -> { t with comments = comments' })\n\n    method! assignment _loc expr =\n      let open Ast.Expression.Assignment in\n      let { right; comments; _ } = expr in\n      let right' = this#expression right in\n      let comments' = this#syntax_opt comments in\n      if right == right' && comments == comments' then\n        expr\n      else\n        { expr with right = right'; comments = comments' }\n\n    method! binary _loc expr =\n      let open Ast.Expression.Binary in\n      let { right; comments; _ } = expr in\n      let right' = this#expression right in\n      let comments' = this#syntax_opt comments in\n      if right == right' && comments == comments' then\n        expr\n      else\n        { expr with right = right'; comments = comments' }\n\n    method! block _loc stmt =\n      let open Ast.Statement.Block in\n      let { comments; _ } = stmt in\n      id this#syntax_opt comments stmt (fun comments' -> { stmt with comments = comments' })\n\n    method! call _annot expr =\n      let open Ast.Expression.Call in\n      let { arguments; comments; _ } = expr in\n      let arguments' = this#call_arguments arguments in\n      let comments' = this#syntax_opt comments in\n      if arguments == arguments' && comments == comments' then\n        expr\n      else\n        { expr with arguments = arguments'; comments = comments' }\n\n    method! call_arguments arg_list =\n      let open Ast.Expression.ArgList in\n      let (loc, { arguments; comments }) = arg_list in\n      id this#syntax_opt comments arg_list (fun comments' ->\n          (loc, { arguments; comments = comments' })\n      )\n\n    method! call_type_args targs =\n      let open Ast.Expression.CallTypeArgs in\n      let (loc, { arguments; comments }) = targs in\n      id this#syntax_opt comments targs (fun comments' -> (loc, { arguments; comments = comments' }))\n\n    method! class_ _loc cls =\n      let open Ast.Class in\n      let { body; comments; _ } = cls in\n      let body' = this#class_body body in\n      let comments' = this#syntax_opt comments in\n      if body == body' && comments == comments' then\n        cls\n      else\n        { cls with body = body'; comments = comments' }\n\n    method! class_body body =\n      let open Ast.Class.Body in\n      let (loc, { body = _body; comments }) = body in\n      id this#syntax_opt comments body (fun comments' ->\n          (loc, { body = _body; comments = comments' })\n      )\n\n    method! class_extends _loc extends =\n      let open Ast.Class.Extends in\n      let { expr; targs; _ } = extends in\n      if targs = None then\n        id this#expression expr extends (fun expr' -> { extends with expr = expr' })\n      else\n        id (map_opt this#type_args) targs extends (fun targs' -> { extends with targs = targs' })\n\n    method! class_implements implements =\n      let open Ast.Class.Implements in\n      let (loc, { interfaces; comments }) = implements in\n      id (id_list_last this#class_implements_interface) interfaces implements (fun interfaces' ->\n          (loc, { interfaces = interfaces'; comments })\n      )\n\n    method! class_implements_interface interface =\n      let open Ast.Class.Implements.Interface in\n      let (loc, { id = id_; targs }) = interface in\n      if targs = None then\n        id this#identifier id_ interface (fun id' -> (loc, { id = id'; targs }))\n      else\n        id (map_opt this#type_args) targs interface (fun targs' ->\n            (loc, { id = id_; targs = targs' })\n        )\n\n    method! computed_key key =\n      let open Ast.ComputedKey in\n      let (loc, { expression; comments }) = key in\n      id this#syntax_opt comments key (fun comments' -> (loc, { expression; comments = comments' }))\n\n    method! conditional _loc expr =\n      let open Ast.Expression.Conditional in\n      let { alternate; comments; _ } = expr in\n      let alternate' = this#expression alternate in\n      let comments' = this#syntax_opt comments in\n      if alternate == alternate' && comments == comments' then\n        expr\n      else\n        { expr with alternate = alternate'; comments = comments' }\n\n    method! function_ _loc func =\n      let open Ast.Function in\n      let { body; comments; _ } = func in\n      let body' = this#function_body_any body in\n      let comments' = this#syntax_opt comments in\n      if body == body' && comments == comments' then\n        func\n      else\n        { func with body = body'; comments = comments' }\n\n    method! function_params (loc, params) =\n      let open Ast.Function.Params in\n      let { comments; _ } = params in\n      id this#syntax_opt comments (loc, params) (fun comments' ->\n          (loc, { params with comments = comments' })\n      )\n\n    method! function_type _loc func =\n      let open Ast.Type.Function in\n      let { return; comments; _ } = func in\n      let return' = this#type_ return in\n      let comments' = this#syntax_opt comments in\n      if return == return' && comments == comments' then\n        func\n      else\n        { func with return = return'; comments = comments' }\n\n    method! generic_identifier_type git =\n      let open Ast.Type.Generic.Identifier in\n      match git with\n      | Unqualified i -> id this#identifier i git (fun i -> Unqualified i)\n      | Qualified (loc, ({ id; _ } as qualified)) ->\n        let id' = this#identifier id in\n        if id == id' then\n          git\n        else\n          Qualified (loc, { qualified with id = id' })\n\n    method! import _loc expr =\n      let open Ast.Expression.Import in\n      let { comments; _ } = expr in\n      id this#syntax_opt comments expr (fun comments' -> { expr with comments = comments' })\n\n    method! interface_type _loc t =\n      let open Ast.Type.Interface in\n      let { body; comments; _ } = t in\n      let body' = map_loc this#object_type body in\n      let comments' = this#syntax_opt comments in\n      if body == body' && comments == comments' then\n        t\n      else\n        { t with body = body'; comments = comments' }\n\n    method! intersection_type _loc t =\n      let { Ast.Type.Intersection.types = (t0, t1, ts); comments } = t in\n      let (t1', ts') =\n        match ts with\n        | [] -> (this#type_ t1, [])\n        | _ -> (t1, id_list_last this#type_ ts)\n      in\n      let comments' = this#syntax_opt comments in\n      if t1 == t1' && ts == ts' && comments == comments' then\n        t\n      else\n        { Ast.Type.Intersection.types = (t0, t1', ts'); comments = comments' }\n\n    method! jsx_element _loc elem =\n      let open Ast.JSX in\n      let { comments; _ } = elem in\n      id this#syntax_opt comments elem (fun comments' -> { elem with comments = comments' })\n\n    method! jsx_fragment _loc frag =\n      let open Ast.JSX in\n      let { frag_comments = comments; _ } = frag in\n      id this#syntax_opt comments frag (fun comments' -> { frag with frag_comments = comments' })\n\n    method! logical _loc expr =\n      let open Ast.Expression.Logical in\n      let { right; comments; _ } = expr in\n      let right' = this#expression right in\n      let comments' = this#syntax_opt comments in\n      if right == right' && comments == comments' then\n        expr\n      else\n        { expr with right = right'; comments = comments' }\n\n    method! new_ _loc expr =\n      let open Ast.Expression.New in\n      let { callee; targs; arguments; comments } = expr in\n      let comments' = this#syntax_opt comments in\n      match (targs, arguments) with\n      (* new Callee<T>() *)\n      | (_, Some _) ->\n        let arguments' = map_opt this#call_arguments arguments in\n        if arguments == arguments' && comments == comments' then\n          expr\n        else\n          { expr with arguments = arguments'; comments = comments' }\n      (* new Callee<T> *)\n      | (Some _, _) ->\n        let targs' = map_opt this#call_type_args targs in\n        if targs == targs' && comments == comments' then\n          expr\n        else\n          { expr with targs = targs'; comments = comments' }\n      (* new Callee *)\n      | (None, None) ->\n        let callee' = this#expression callee in\n        if callee == callee' && comments == comments' then\n          expr\n        else\n          { expr with callee = callee'; comments = comments' }\n\n    method! member _loc expr =\n      let open Ast.Expression.Member in\n      let { property; comments; _ } = expr in\n      let property' = this#member_property property in\n      let comments' = this#syntax_opt comments in\n      if property == property' && comments == comments' then\n        expr\n      else\n        { expr with property = property'; comments = comments' }\n\n    method! object_ _loc expr =\n      let open Ast.Expression.Object in\n      let { comments; _ } = expr in\n      id this#syntax_opt comments expr (fun comments' -> { expr with comments = comments' })\n\n    method! object_type _loc obj =\n      let open Ast.Type.Object in\n      let { comments; _ } = obj in\n      id this#syntax_opt comments obj (fun comments' -> { obj with comments = comments' })\n\n    method! predicate pred =\n      let open Ast.Type.Predicate in\n      let (loc, { kind; comments }) = pred in\n      id this#syntax_opt comments pred (fun comments' -> (loc, { kind; comments = comments' }))\n\n    method! sequence _loc expr =\n      let open Ast.Expression.Sequence in\n      let { expressions; comments } = expr in\n      let expressions' = id_list_last this#expression expressions in\n      let comments' = this#syntax_opt comments in\n      if expressions == expressions' && comments == comments' then\n        expr\n      else\n        { expressions = expressions'; comments = comments' }\n\n    method! template_literal _loc expr =\n      let open Ast.Expression.TemplateLiteral in\n      let { comments; _ } = expr in\n      id this#syntax_opt comments expr (fun comments' -> { expr with comments = comments' })\n\n    method! tuple_type t =\n      let open Ast.Type.Tuple in\n      let { comments; _ } = t in\n      id this#syntax_opt comments t (fun comments' -> { t with comments = comments' })\n\n    method! type_cast _loc expr =\n      let open Ast.Expression.TypeCast in\n      let { comments; _ } = expr in\n      id this#syntax_opt comments expr (fun comments' -> { expr with comments = comments' })\n\n    method! type_params tparams =\n      let open Ast.Type.TypeParams in\n      let (loc, { params; comments }) = tparams in\n      id this#syntax_opt comments tparams (fun comments' -> (loc, { params; comments = comments' }))\n\n    method! union_type _loc t =\n      let { Ast.Type.Union.types = (t0, t1, ts); comments } = t in\n      let (t1', ts') =\n        match ts with\n        | [] -> (this#type_ t1, [])\n        | _ -> (t1, id_list_last this#type_ ts)\n      in\n      let comments' = this#syntax_opt comments in\n      if t1 == t1' && ts == ts' && comments == comments' then\n        t\n      else\n        { Ast.Type.Union.types = (t0, t1', ts'); comments = comments' }\n\n    method! variable_declarator ~kind decl =\n      let open Ast.Statement.VariableDeclaration.Declarator in\n      let (loc, { id = ident; init }) = decl in\n      match init with\n      | None ->\n        id (this#variable_declarator_pattern ~kind) ident decl (fun ident' ->\n            (loc, { id = ident'; init })\n        )\n      | Some init ->\n        id this#expression init decl (fun init' -> (loc, { id = ident; init = Some init' }))\n  end\n\ntype trailing_and_remover_result = {\n  trailing: Loc.t Comment.t list;\n  remove_trailing: 'a. 'a -> (Loc.t trailing_comments_remover -> 'a -> 'a) -> 'a;\n}\n\n(* Returns a remover function which removes comments beginning after the previous token.\n   No trailing comments are returned, since all comments since the last loc should be removed. *)\nlet trailing_and_remover_after_last_loc : Parser_env.env -> trailing_and_remover_result =\n fun env ->\n  let open Loc in\n  let remover =\n    match Parser_env.last_loc env with\n    | None -> None\n    | Some _ when not (Peek.has_eaten_comments env) -> None\n    | Some last_loc ->\n      Parser_env.consume_comments_until env last_loc._end;\n      let remover = new trailing_comments_remover ~after_pos:last_loc._end in\n      Some remover\n  in\n  {\n    trailing = [];\n    remove_trailing =\n      (fun node f ->\n        match remover with\n        | None -> node\n        | Some remover -> f remover node);\n  }\n\n(* Consumes and returns comments on the same line as the previous token. Also returns a remover\n   function which can be used to remove comments beginning after the previous token's line. *)\nlet trailing_and_remover_after_last_line : Parser_env.env -> trailing_and_remover_result =\n fun env ->\n  let open Loc in\n  let (trailing, remover) =\n    match Parser_env.last_loc env with\n    | None -> ([], None)\n    | Some _ when not (Peek.has_eaten_comments env) -> (Eat.comments_until_next_line env, None)\n    | Some last_loc ->\n      Parser_env.consume_comments_until env last_loc._end;\n      let trailing = Eat.comments_until_next_line env in\n      let next_line_start = { line = last_loc._end.line + 1; column = 0 } in\n      let remover = new trailing_comments_remover ~after_pos:next_line_start in\n      (trailing, Some remover)\n  in\n  {\n    trailing;\n    remove_trailing =\n      (fun node f ->\n        match remover with\n        | None -> node\n        | Some remover -> f remover node);\n  }\n\nlet trailing_and_remover : Parser_env.env -> trailing_and_remover_result =\n fun env ->\n  if Peek.is_line_terminator env then\n    trailing_and_remover_after_last_line env\n  else\n    trailing_and_remover_after_last_loc env\n\nlet id_remove_trailing env id =\n  let { remove_trailing; _ } = trailing_and_remover env in\n  remove_trailing id (fun remover id -> remover#identifier id)\n\nlet expression_remove_trailing env expr =\n  let { remove_trailing; _ } = trailing_and_remover env in\n  remove_trailing expr (fun remover expr -> remover#expression expr)\n\nlet block_remove_trailing env block =\n  let { remove_trailing; _ } = trailing_and_remover env in\n  remove_trailing block (fun remover (loc, str) -> (loc, remover#block loc str))\n\nlet type_params_remove_trailing env tparams =\n  match tparams with\n  | None -> None\n  | Some tparams ->\n    let { remove_trailing; _ } = trailing_and_remover env in\n    Some (remove_trailing tparams (fun remover tparams -> remover#type_params tparams))\n\nlet type_remove_trailing env ty =\n  let { remove_trailing; _ } = trailing_and_remover env in\n  remove_trailing ty (fun remover ty -> remover#type_ ty)\n\nlet type_annotation_hint_remove_trailing env annot =\n  let { remove_trailing; _ } = trailing_and_remover env in\n  remove_trailing annot (fun remover annot -> remover#type_annotation_hint annot)\n\nlet function_params_remove_trailing env params =\n  let { remove_trailing; _ } = trailing_and_remover env in\n  remove_trailing params (fun remover params -> remover#function_params params)\n\nlet predicate_remove_trailing env pred =\n  match pred with\n  | None -> None\n  | Some pred ->\n    let { remove_trailing; _ } = trailing_and_remover env in\n    Some (remove_trailing pred (fun remover pred -> remover#predicate pred))\n\nlet object_key_remove_trailing env key =\n  let { remove_trailing; _ } = trailing_and_remover env in\n  remove_trailing key (fun remover key -> remover#object_key key)\n\nlet generic_type_remove_trailing env ty =\n  let { remove_trailing; _ } = trailing_and_remover env in\n  remove_trailing ty (fun remover ty -> map_loc remover#generic_type ty)\n\nlet generic_type_list_remove_trailing env extends =\n  let { remove_trailing; _ } = trailing_and_remover env in\n  remove_trailing extends (fun remover extends ->\n      id_list_last (map_loc remover#generic_type) extends\n  )\n\nlet class_implements_remove_trailing env implements =\n  let { remove_trailing; _ } = trailing_and_remover env in\n  remove_trailing implements (fun remover impl -> remover#class_implements impl)\n\nlet string_literal_remove_trailing env str =\n  let { remove_trailing; _ } = trailing_and_remover env in\n  remove_trailing str (fun remover (loc, str) -> (loc, remover#string_literal_type loc str))\n\nlet statement_add_comments\n    ((loc, stmt) : (Loc.t, Loc.t) Statement.t) (comments : (Loc.t, unit) Syntax.t option) :\n    (Loc.t, Loc.t) Statement.t =\n  let open Statement in\n  let merge_comments inner = Flow_ast_utils.merge_comments ~inner ~outer:comments in\n  let merge_comments_with_internal inner =\n    Flow_ast_utils.merge_comments_with_internal ~inner ~outer:comments\n  in\n  ( loc,\n    match stmt with\n    | Block ({ Block.comments; _ } as s) ->\n      Block { s with Block.comments = merge_comments_with_internal comments }\n    | Break ({ Break.comments; _ } as s) ->\n      Break { s with Break.comments = merge_comments comments }\n    | ClassDeclaration ({ Class.comments; _ } as s) ->\n      ClassDeclaration { s with Class.comments = merge_comments comments }\n    | Continue ({ Continue.comments; _ } as s) ->\n      Continue { s with Continue.comments = merge_comments comments }\n    | Debugger { Debugger.comments } -> Debugger { Debugger.comments = merge_comments comments }\n    | DeclareClass ({ DeclareClass.comments; _ } as s) ->\n      DeclareClass { s with DeclareClass.comments = merge_comments comments }\n    | DeclareExportDeclaration ({ DeclareExportDeclaration.comments; _ } as s) ->\n      DeclareExportDeclaration\n        { s with DeclareExportDeclaration.comments = merge_comments comments }\n    | DeclareFunction ({ DeclareFunction.comments; _ } as s) ->\n      DeclareFunction { s with DeclareFunction.comments = merge_comments comments }\n    | DeclareInterface ({ Interface.comments; _ } as s) ->\n      DeclareInterface { s with Interface.comments = merge_comments comments }\n    | DeclareModule ({ DeclareModule.comments; _ } as s) ->\n      DeclareModule { s with DeclareModule.comments = merge_comments comments }\n    | DeclareModuleExports ({ DeclareModuleExports.comments; _ } as s) ->\n      DeclareModuleExports { s with DeclareModuleExports.comments = merge_comments comments }\n    | DeclareTypeAlias ({ TypeAlias.comments; _ } as s) ->\n      DeclareTypeAlias { s with TypeAlias.comments = merge_comments comments }\n    | DeclareOpaqueType ({ OpaqueType.comments; _ } as s) ->\n      DeclareOpaqueType { s with OpaqueType.comments = merge_comments comments }\n    | DeclareVariable ({ DeclareVariable.comments; _ } as s) ->\n      DeclareVariable { s with DeclareVariable.comments = merge_comments comments }\n    | DoWhile ({ DoWhile.comments; _ } as s) ->\n      DoWhile { s with DoWhile.comments = merge_comments comments }\n    | Empty { Empty.comments } -> Empty { Empty.comments = merge_comments comments }\n    | EnumDeclaration ({ EnumDeclaration.comments; _ } as s) ->\n      EnumDeclaration { s with EnumDeclaration.comments = merge_comments comments }\n    | ExportDefaultDeclaration ({ ExportDefaultDeclaration.comments; _ } as s) ->\n      ExportDefaultDeclaration\n        { s with ExportDefaultDeclaration.comments = merge_comments comments }\n    | ExportNamedDeclaration ({ ExportNamedDeclaration.comments; _ } as s) ->\n      ExportNamedDeclaration { s with ExportNamedDeclaration.comments = merge_comments comments }\n    | Expression ({ Expression.comments; _ } as s) ->\n      Expression { s with Expression.comments = merge_comments comments }\n    | For ({ For.comments; _ } as s) -> For { s with For.comments = merge_comments comments }\n    | ForIn ({ ForIn.comments; _ } as s) ->\n      ForIn { s with ForIn.comments = merge_comments comments }\n    | ForOf ({ ForOf.comments; _ } as s) ->\n      ForOf { s with ForOf.comments = merge_comments comments }\n    | FunctionDeclaration ({ Function.comments; _ } as s) ->\n      FunctionDeclaration { s with Function.comments = merge_comments comments }\n    | If ({ If.comments; _ } as s) -> If { s with If.comments = merge_comments comments }\n    | ImportDeclaration ({ ImportDeclaration.comments; _ } as s) ->\n      ImportDeclaration { s with ImportDeclaration.comments = merge_comments comments }\n    | InterfaceDeclaration ({ Interface.comments; _ } as s) ->\n      InterfaceDeclaration { s with Interface.comments = merge_comments comments }\n    | Labeled ({ Labeled.comments; _ } as s) ->\n      Labeled { s with Labeled.comments = merge_comments comments }\n    | Return ({ Return.comments; _ } as s) ->\n      Return { s with Return.comments = merge_comments comments }\n    | Switch ({ Switch.comments; _ } as s) ->\n      Switch { s with Switch.comments = merge_comments comments }\n    | Throw ({ Throw.comments; _ } as s) ->\n      Throw { s with Throw.comments = merge_comments comments }\n    | Try ({ Try.comments; _ } as s) -> Try { s with Try.comments = merge_comments comments }\n    | TypeAlias ({ TypeAlias.comments; _ } as s) ->\n      TypeAlias { s with TypeAlias.comments = merge_comments comments }\n    | OpaqueType ({ OpaqueType.comments; _ } as s) ->\n      OpaqueType { s with OpaqueType.comments = merge_comments comments }\n    | VariableDeclaration ({ VariableDeclaration.comments; _ } as s) ->\n      VariableDeclaration { s with VariableDeclaration.comments = merge_comments comments }\n    | While ({ While.comments; _ } as s) ->\n      While { s with While.comments = merge_comments comments }\n    | With ({ With.comments; _ } as s) -> With { s with With.comments = merge_comments comments }\n  )\n\n(* Collects the first leading and last trailing comment on an AST node or its children.\n   The first leading comment is the first attached comment that begins before the given node's loc,\n   and the last trailing comment is the last attached comment that begins after the given node's loc. *)\nclass ['loc] comment_bounds_collector ~loc =\n  object (this)\n    inherit ['loc] Flow_ast_mapper.mapper\n\n    val mutable first_leading = None\n\n    val mutable last_trailing = None\n\n    method comment_bounds = (first_leading, last_trailing)\n\n    method collect_comments : 'internal. ('loc, 'internal) Syntax.t -> unit =\n      function\n      | { Syntax.leading; trailing; _ } ->\n        List.iter this#visit_leading_comment leading;\n        List.iter this#visit_trailing_comment trailing\n\n    method collect_comments_opt =\n      function\n      | None -> ()\n      | Some comments -> this#collect_comments comments\n\n    method visit_leading_comment ((comment_loc, _) as comment) =\n      let open Loc in\n      match first_leading with\n      | None -> if pos_cmp comment_loc.start loc.start < 0 then first_leading <- Some comment\n      | Some (current_first_loc, _) ->\n        if pos_cmp comment_loc.start current_first_loc.start < 0 then first_leading <- Some comment\n\n    method visit_trailing_comment ((comment_loc, _) as comment) =\n      let open Loc in\n      match last_trailing with\n      | None -> if pos_cmp comment_loc.start loc._end >= 0 then last_trailing <- Some comment\n      | Some (current_last_loc, _) ->\n        if pos_cmp current_last_loc.start comment_loc.start < 0 then last_trailing <- Some comment\n\n    method! syntax comments =\n      this#collect_comments comments;\n      comments\n\n    method! block _loc block =\n      let { Statement.Block.comments; _ } = block in\n      this#collect_comments_opt comments;\n      block\n  end\n\n(* Given an AST node and a function to collect all its comments, return the first leading\n   and last trailing comment on the node. *)\nlet comment_bounds loc node f =\n  let collector = new comment_bounds_collector ~loc in\n  ignore (f collector node);\n  collector#comment_bounds\n\n(* Expand node's loc to include its attached comments *)\nlet expand_loc_with_comment_bounds loc (first_leading, last_trailing) =\n  let open Loc in\n  let start =\n    match first_leading with\n    | None -> loc\n    | Some (first_leading_loc, _) -> first_leading_loc\n  in\n  let _end =\n    match last_trailing with\n    | None -> loc\n    | Some (last_trailing_loc, _) -> last_trailing_loc\n  in\n  btwn start _end\n\n(* Remove the trailing comment bound if it is a line comment *)\nlet comment_bounds_without_trailing_line_comment (leading, trailing) =\n  match trailing with\n  | Some (_, { Ast.Comment.kind = Ast.Comment.Line; _ }) -> (leading, None)\n  | _ -> (leading, trailing)\n\nlet collect_without_trailing_line_comment collector =\n  comment_bounds_without_trailing_line_comment collector#comment_bounds\n\n(* Return the first leading and last trailing comment of a statement *)\nlet statement_comment_bounds ((loc, _) as stmt : (Loc.t, Loc.t) Statement.t) :\n    Loc.t Comment.t option * Loc.t Comment.t option =\n  let collector = new comment_bounds_collector ~loc in\n  ignore (collector#statement stmt);\n  collector#comment_bounds\n\nlet expression_comment_bounds ((loc, _) as expr) =\n  let collector = new comment_bounds_collector ~loc in\n  ignore (collector#expression expr);\n  collector#comment_bounds\n\nlet type_comment_bounds ((loc, _) as ty) =\n  let collector = new comment_bounds_collector ~loc in\n  ignore (collector#type_ ty);\n  collector#comment_bounds\n\nlet block_comment_bounds (loc, block) =\n  let collector = new comment_bounds_collector ~loc in\n  ignore (collector#block loc block);\n  collector#comment_bounds\n\nlet object_property_comment_bounds property =\n  let open Ast.Expression.Object in\n  let collector =\n    match property with\n    | Property ((loc, _) as p) ->\n      let collector = new comment_bounds_collector ~loc in\n      ignore (collector#object_property p);\n      collector\n    | SpreadProperty ((loc, _) as p) ->\n      let collector = new comment_bounds_collector ~loc in\n      ignore (collector#spread_property p);\n      collector\n  in\n  collect_without_trailing_line_comment collector\n\nlet object_type_property_comment_bounds property =\n  let open Ast.Type.Object in\n  let collector =\n    match property with\n    | Property ((loc, _) as p) ->\n      let collector = new comment_bounds_collector ~loc in\n      ignore (collector#object_property_type p);\n      collector\n    | SpreadProperty ((loc, _) as p) ->\n      let collector = new comment_bounds_collector ~loc in\n      ignore (collector#object_spread_property_type p);\n      collector\n    | Indexer ((loc, _) as p) ->\n      let collector = new comment_bounds_collector ~loc in\n      ignore (collector#object_indexer_property_type p);\n      collector\n    | InternalSlot ((loc, _) as p) ->\n      let collector = new comment_bounds_collector ~loc in\n      ignore (collector#object_internal_slot_property_type p);\n      collector\n    | CallProperty ((loc, _) as p) ->\n      let collector = new comment_bounds_collector ~loc in\n      ignore (collector#object_call_property_type p);\n      collector\n  in\n  collect_without_trailing_line_comment collector\n\nlet object_pattern_property_comment_bounds loc property =\n  let collector = new comment_bounds_collector ~loc in\n  ignore (collector#pattern_object_p property);\n  collect_without_trailing_line_comment collector\n\nlet switch_case_comment_bounds (loc, case) =\n  let collector = new comment_bounds_collector ~loc in\n  ignore (collector#switch_case (loc, case));\n  collector#comment_bounds\n\nlet function_param_comment_bounds (loc, param) =\n  let collector = new comment_bounds_collector ~loc in\n  ignore (collector#function_param (loc, param));\n  collect_without_trailing_line_comment collector\n\nlet function_rest_param_comment_bounds (loc, param) =\n  let collector = new comment_bounds_collector ~loc in\n  ignore (collector#function_rest_param (loc, param));\n  collect_without_trailing_line_comment collector\n\nlet function_this_param_comment_bounds (loc, param) =\n  let collector = new comment_bounds_collector ~loc in\n  ignore (collector#function_this_param (loc, param));\n  collect_without_trailing_line_comment collector\n\nlet function_type_param_comment_bounds (loc, param) =\n  let collector = new comment_bounds_collector ~loc in\n  ignore (collector#function_param_type (loc, param));\n  collect_without_trailing_line_comment collector\n\nlet function_type_rest_param_comment_bounds (loc, param) =\n  let collector = new comment_bounds_collector ~loc in\n  ignore (collector#function_rest_param_type (loc, param));\n  collect_without_trailing_line_comment collector\n\nlet function_type_this_param_comment_bounds (loc, param) =\n  let collector = new comment_bounds_collector ~loc in\n  ignore (collector#function_this_param_type (loc, param));\n  collect_without_trailing_line_comment collector\n\nlet array_element_comment_bounds loc element =\n  let collector = new comment_bounds_collector ~loc in\n  ignore (collector#array_element element);\n  collect_without_trailing_line_comment collector\n\nlet array_pattern_element_comment_bounds loc element =\n  let collector = new comment_bounds_collector ~loc in\n  ignore (collector#pattern_array_e element);\n  collect_without_trailing_line_comment collector\n\nlet expression_or_spread_comment_bounds loc expr_or_spread =\n  let collector = new comment_bounds_collector ~loc in\n  ignore (collector#expression_or_spread expr_or_spread);\n  collect_without_trailing_line_comment collector\n\nlet call_type_arg_comment_bounds loc arg =\n  let collector = new comment_bounds_collector ~loc in\n  ignore (collector#call_type_arg arg);\n  collect_without_trailing_line_comment collector\n\nlet type_param_comment_bounds (loc, param) =\n  let collector = new comment_bounds_collector ~loc in\n  ignore (collector#type_param (loc, param));\n  collect_without_trailing_line_comment collector\n\nlet function_body_comment_bounds body =\n  let loc =\n    match body with\n    | Ast.Function.BodyBlock (loc, _) -> loc\n    | Ast.Function.BodyExpression (loc, _) -> loc\n  in\n  let collector = new comment_bounds_collector ~loc in\n  ignore (collector#function_body_any body);\n  collector#comment_bounds\n\nlet if_alternate_statement_comment_bounds loc alternate =\n  let collector = new comment_bounds_collector ~loc in\n  ignore (collector#if_alternate_statement loc alternate);\n  collector#comment_bounds\n\nlet member_property_comment_bounds loc property =\n  let collector = new comment_bounds_collector ~loc in\n  ignore (collector#member_property property);\n  collector#comment_bounds\n"
  },
  {
    "path": "analysis/vendor/js_parser/declaration_parser.ml",
    "content": "(*\n * Copyright (c) Meta Platforms, Inc. and affiliates.\n *\n * This source code is licensed under the MIT license found in the\n * LICENSE file in the root directory of this source tree.\n *)\n\nmodule Ast = Flow_ast\nopen Token\nopen Parser_common\nopen Parser_env\nopen Flow_ast\nopen Comment_attachment\n\nmodule type DECLARATION = sig\n  val async : env -> bool * Loc.t Comment.t list\n\n  val generator : env -> bool * Loc.t Comment.t list\n\n  val variance : env -> bool -> bool -> Loc.t Variance.t option\n\n  val function_params : await:bool -> yield:bool -> env -> (Loc.t, Loc.t) Ast.Function.Params.t\n\n  val function_body :\n    env ->\n    async:bool ->\n    generator:bool ->\n    expression:bool ->\n    simple_params:bool ->\n    (Loc.t, Loc.t) Function.body * bool\n\n  val strict_post_check :\n    env ->\n    contains_use_strict:bool ->\n    (Loc.t, Loc.t) Identifier.t option ->\n    (Loc.t, Loc.t) Ast.Function.Params.t ->\n    unit\n\n  val let_ :\n    env ->\n    (Loc.t, Loc.t) Statement.VariableDeclaration.Declarator.t list\n    * Loc.t Ast.Comment.t list\n    * (Loc.t * Parse_error.t) list\n\n  val const :\n    env ->\n    (Loc.t, Loc.t) Statement.VariableDeclaration.Declarator.t list\n    * Loc.t Ast.Comment.t list\n    * (Loc.t * Parse_error.t) list\n\n  val var :\n    env ->\n    (Loc.t, Loc.t) Statement.VariableDeclaration.Declarator.t list\n    * Loc.t Ast.Comment.t list\n    * (Loc.t * Parse_error.t) list\n\n  val _function : env -> (Loc.t, Loc.t) Statement.t\n\n  val enum_declaration : env -> (Loc.t, Loc.t) Statement.t\nend\n\nmodule Declaration (Parse : Parser_common.PARSER) (Type : Type_parser.TYPE) : DECLARATION = struct\n  module Enum = Enum_parser.Enum (Parse)\n\n  let check_param =\n    let rec pattern ((env, _) as check_env) (loc, p) =\n      Pattern.(\n        match p with\n        | Object o -> _object check_env o\n        | Array arr -> _array check_env arr\n        | Identifier id -> identifier_pattern check_env id\n        | Expression _ ->\n          error_at env (loc, Parse_error.ExpectedPatternFoundExpression);\n          check_env\n      )\n    and _object check_env o = List.fold_left object_property check_env o.Pattern.Object.properties\n    and object_property check_env =\n      let open Pattern.Object in\n      function\n      | Property (_, property) ->\n        Property.(\n          let check_env =\n            match property.key with\n            | Identifier id -> identifier_no_dupe_check check_env id\n            | _ -> check_env\n          in\n          pattern check_env property.pattern\n        )\n      | RestElement (_, { Pattern.RestElement.argument; comments = _ }) ->\n        pattern check_env argument\n    and _array check_env arr = List.fold_left array_element check_env arr.Pattern.Array.elements\n    and array_element check_env =\n      let open Pattern.Array in\n      function\n      | Hole _ -> check_env\n      | Element (_, { Element.argument; default = _ }) -> pattern check_env argument\n      | RestElement (_, { Pattern.RestElement.argument; comments = _ }) ->\n        pattern check_env argument\n    and identifier_pattern check_env { Pattern.Identifier.name = id; _ } = identifier check_env id\n    and identifier (env, param_names) ((loc, { Identifier.name; comments = _ }) as id) =\n      if SSet.mem name param_names then error_at env (loc, Parse_error.StrictParamDupe);\n      let (env, param_names) = identifier_no_dupe_check (env, param_names) id in\n      (env, SSet.add name param_names)\n    and identifier_no_dupe_check (env, param_names) (loc, { Identifier.name; comments = _ }) =\n      if is_restricted name then strict_error_at env (loc, Parse_error.StrictParamName);\n      if is_future_reserved name || is_strict_reserved name then\n        strict_error_at env (loc, Parse_error.StrictReservedWord);\n      (env, param_names)\n    in\n    pattern\n\n  let strict_post_check env ~contains_use_strict id params =\n    let strict_mode = Parser_env.in_strict_mode env in\n    let simple = is_simple_parameter_list params in\n    let (_, { Ast.Function.Params.params; rest; this_ = _; comments = _ }) = params in\n    (* If we were already in strict mode and therefore already threw strict\n       errors, we want to do these checks outside of strict mode. If we\n       were in non-strict mode but the function contains \"use strict\", then\n       we want to do these checks in strict mode *)\n    let env =\n      if strict_mode then\n        with_strict false env\n      else\n        with_strict contains_use_strict env\n    in\n    if contains_use_strict || strict_mode || not simple then (\n      (match id with\n      | Some (loc, { Identifier.name; comments = _ }) ->\n        if is_restricted name then strict_error_at env (loc, Parse_error.StrictFunctionName);\n        if is_future_reserved name || is_strict_reserved name then\n          strict_error_at env (loc, Parse_error.StrictReservedWord)\n      | None -> ());\n      let acc =\n        List.fold_left\n          (fun acc (_, { Function.Param.argument; default = _ }) -> check_param acc argument)\n          (env, SSet.empty)\n          params\n      in\n      match rest with\n      | Some (_, { Function.RestParam.argument; comments = _ }) -> ignore (check_param acc argument)\n      | None -> ()\n    )\n\n  let function_params =\n    let rec param =\n      with_loc (fun env ->\n          if Peek.token env = T_THIS then error env Parse_error.ThisParamMustBeFirst;\n          let argument = Parse.pattern env Parse_error.StrictParamName in\n          let default =\n            if Peek.token env = T_ASSIGN then (\n              Expect.token env T_ASSIGN;\n              Some (Parse.assignment env)\n            ) else\n              None\n          in\n          { Function.Param.argument; default }\n      )\n    and param_list env acc =\n      match Peek.token env with\n      | (T_EOF | T_RPAREN | T_ELLIPSIS) as t ->\n        let rest =\n          if t = T_ELLIPSIS then\n            let leading = Peek.comments env in\n            let (loc, id) =\n              with_loc\n                (fun env ->\n                  Expect.token env T_ELLIPSIS;\n                  Parse.pattern env Parse_error.StrictParamName)\n                env\n            in\n            Some\n              ( loc,\n                {\n                  Function.RestParam.argument = id;\n                  comments = Flow_ast_utils.mk_comments_opt ~leading ();\n                }\n              )\n          else\n            None\n        in\n        if Peek.token env <> T_RPAREN then error env Parse_error.ParameterAfterRestParameter;\n        (List.rev acc, rest)\n      | _ ->\n        let the_param = param env in\n        if Peek.token env <> T_RPAREN then Expect.token env T_COMMA;\n        param_list env (the_param :: acc)\n    in\n    let this_param_annotation env =\n      if should_parse_types env && Peek.token env = T_THIS then (\n        let leading = Peek.comments env in\n        let (this_loc, this_param) =\n          with_loc\n            (fun env ->\n              Expect.token env T_THIS;\n              if Peek.token env <> T_COLON then begin\n                error env Parse_error.ThisParamAnnotationRequired;\n                None\n              end else\n                Some (Type.annotation env))\n            env\n        in\n        match this_param with\n        | None -> None\n        | Some annot ->\n          if Peek.token env = T_COMMA then Eat.token env;\n          Some\n            ( this_loc,\n              {\n                Ast.Function.ThisParam.annot;\n                comments = Flow_ast_utils.mk_comments_opt ~leading ();\n              }\n            )\n      ) else\n        None\n    in\n    fun ~await ~yield ->\n      with_loc (fun env ->\n          let env =\n            env\n            |> with_allow_await await\n            |> with_allow_yield yield\n            |> with_in_formal_parameters true\n          in\n          let leading = Peek.comments env in\n          Expect.token env T_LPAREN;\n          let this_ = this_param_annotation env in\n          let (params, rest) = param_list env [] in\n          let internal = Peek.comments env in\n          Expect.token env T_RPAREN;\n          let trailing = Eat.trailing_comments env in\n          {\n            Ast.Function.Params.params;\n            rest;\n            comments = Flow_ast_utils.mk_comments_with_internal_opt ~leading ~trailing ~internal ();\n            this_;\n          }\n      )\n\n  let function_body env ~async ~generator ~expression ~simple_params =\n    let env = enter_function env ~async ~generator ~simple_params in\n    let (body_block, contains_use_strict) = Parse.function_block_body env ~expression in\n    (Function.BodyBlock body_block, contains_use_strict)\n\n  let variance env is_async is_generator =\n    let loc = Peek.loc env in\n    let variance =\n      match Peek.token env with\n      | T_PLUS ->\n        let leading = Peek.comments env in\n        Eat.token env;\n        Some\n          ( loc,\n            { Variance.kind = Variance.Plus; comments = Flow_ast_utils.mk_comments_opt ~leading () }\n          )\n      | T_MINUS ->\n        let leading = Peek.comments env in\n        Eat.token env;\n        Some\n          ( loc,\n            {\n              Variance.kind = Variance.Minus;\n              comments = Flow_ast_utils.mk_comments_opt ~leading ();\n            }\n          )\n      | _ -> None\n    in\n    match variance with\n    | Some (loc, _) when is_async || is_generator ->\n      error_at env (loc, Parse_error.UnexpectedVariance);\n      None\n    | _ -> variance\n\n  let generator env =\n    if Peek.token env = T_MULT then (\n      let leading = Peek.comments env in\n      Eat.token env;\n      (true, leading)\n    ) else\n      (false, [])\n\n  (* Returns true and consumes a token if the token is `async` and the token after it is on\n     the same line (see https://tc39.github.io/ecma262/#sec-async-function-definitions) *)\n  let async env =\n    if Peek.token env = T_ASYNC && not (Peek.ith_is_line_terminator ~i:1 env) then\n      let leading = Peek.comments env in\n      let () = Eat.token env in\n      (true, leading)\n    else\n      (false, [])\n\n  let _function =\n    with_loc (fun env ->\n        let (async, leading_async) = async env in\n        let (sig_loc, (generator, tparams, id, params, return, predicate, leading)) =\n          with_loc\n            (fun env ->\n              let leading_function = Peek.comments env in\n              Expect.token env T_FUNCTION;\n              let (generator, leading_generator) = generator env in\n              let leading = List.concat [leading_async; leading_function; leading_generator] in\n              let (tparams, id) =\n                match (in_export_default env, Peek.token env) with\n                | (true, T_LPAREN) -> (None, None)\n                | (true, T_LESS_THAN) ->\n                  let tparams = type_params_remove_trailing env (Type.type_params env) in\n                  let id =\n                    if Peek.token env = T_LPAREN then\n                      None\n                    else\n                      let id =\n                        id_remove_trailing\n                          env\n                          (Parse.identifier ~restricted_error:Parse_error.StrictFunctionName env)\n                      in\n                      Some id\n                  in\n                  (tparams, id)\n                | _ ->\n                  let id =\n                    if Peek.is_identifier env then\n                      id_remove_trailing\n                        env\n                        (Parse.identifier ~restricted_error:Parse_error.StrictFunctionName env)\n                    else (\n                      (* don't consume the identifier here like Parse.identifier does. *)\n                      error_nameless_declaration env \"function\";\n                      (Peek.loc env, { Identifier.name = \"\"; comments = None })\n                    )\n                  in\n                  let tparams = type_params_remove_trailing env (Type.type_params env) in\n                  (tparams, Some id)\n              in\n              let params =\n                let params = function_params ~await:async ~yield:generator env in\n                if Peek.token env = T_COLON then\n                  params\n                else\n                  function_params_remove_trailing env params\n              in\n              let (return, predicate) = Type.annotation_and_predicate_opt env in\n              let (return, predicate) =\n                match predicate with\n                | None -> (type_annotation_hint_remove_trailing env return, predicate)\n                | Some _ -> (return, predicate_remove_trailing env predicate)\n              in\n              (generator, tparams, id, params, return, predicate, leading))\n            env\n        in\n        let simple_params = is_simple_parameter_list params in\n        let (body, contains_use_strict) =\n          function_body env ~async ~generator ~expression:false ~simple_params\n        in\n        strict_post_check env ~contains_use_strict id params;\n        Statement.FunctionDeclaration\n          {\n            Function.id;\n            params;\n            body;\n            generator;\n            async;\n            predicate;\n            return;\n            tparams;\n            sig_loc;\n            comments = Flow_ast_utils.mk_comments_opt ~leading ();\n          }\n    )\n\n  let variable_declaration_list =\n    let variable_declaration env =\n      let (loc, (decl, err)) =\n        with_loc\n          (fun env ->\n            let id = Parse.pattern env Parse_error.StrictVarName in\n            let (init, err) =\n              if Eat.maybe env T_ASSIGN then\n                (Some (Parse.assignment env), None)\n              else\n                match id with\n                | (_, Ast.Pattern.Identifier _) -> (None, None)\n                | (loc, _) -> (None, Some (loc, Parse_error.NoUninitializedDestructuring))\n            in\n            (Ast.Statement.VariableDeclaration.Declarator.{ id; init }, err))\n          env\n      in\n      ((loc, decl), err)\n    in\n    let rec helper env decls errs =\n      let (decl, err) = variable_declaration env in\n      let decls = decl :: decls in\n      let errs =\n        match err with\n        | Some x -> x :: errs\n        | None -> errs\n      in\n      if Eat.maybe env T_COMMA then\n        helper env decls errs\n      else\n        (List.rev decls, List.rev errs)\n    in\n    (fun env -> helper env [] [])\n\n  let declarations token env =\n    let leading = Peek.comments env in\n    Expect.token env token;\n    let (declarations, errs) = variable_declaration_list env in\n    (declarations, leading, errs)\n\n  let var = declarations T_VAR\n\n  let const env =\n    let env = env |> with_no_let true in\n    let (declarations, leading_comments, errs) = declarations T_CONST env in\n    (* Make sure all consts defined are initialized *)\n    let errs =\n      List.fold_left\n        (fun errs decl ->\n          match decl with\n          | (loc, { Statement.VariableDeclaration.Declarator.init = None; _ }) ->\n            (loc, Parse_error.NoUninitializedConst) :: errs\n          | _ -> errs)\n        errs\n        declarations\n    in\n    (declarations, leading_comments, List.rev errs)\n\n  let let_ env =\n    let env = env |> with_no_let true in\n    declarations T_LET env\n\n  let enum_declaration = Enum.declaration\nend\n"
  },
  {
    "path": "analysis/vendor/js_parser/dune",
    "content": "(library\n (name js_parser)\n (wrapped false)\n (flags\n  (:standard -w +a-4-44-48-50-70)))\n"
  },
  {
    "path": "analysis/vendor/js_parser/enum_common.ml",
    "content": "(*\n * Copyright (c) Meta Platforms, Inc. and affiliates.\n *\n * This source code is licensed under the MIT license found in the\n * LICENSE file in the root directory of this source tree.\n *)\nopen Primitive_deriving\ntype explicit_type =\n  | Boolean\n  | Number\n  | String\n  | Symbol\n[@@deriving_inline compare]\nlet _ = fun (_ : explicit_type) -> ()\nlet compare_explicit_type =\n  (Ppx_compare_lib.polymorphic_compare : explicit_type ->\n                                           explicit_type -> int)\nlet _ = compare_explicit_type\n[@@@end]\nlet string_of_explicit_type = function\n  | Boolean -> \"boolean\"\n  | Number -> \"number\"\n  | String -> \"string\"\n  | Symbol -> \"symbol\"\n"
  },
  {
    "path": "analysis/vendor/js_parser/enum_parser.ml",
    "content": "(*\n * Copyright (c) Meta Platforms, Inc. and affiliates.\n *\n * This source code is licensed under the MIT license found in the\n * LICENSE file in the root directory of this source tree.\n *)\n\nopen Flow_ast\nopen Parser_common\nopen Parser_env\nopen Token\n\nmodule Enum (Parse : Parser_common.PARSER) : sig\n  val declaration : env -> (Loc.t, Loc.t) Statement.t\nend = struct\n  open Flow_ast.Statement.EnumDeclaration\n\n  type members = {\n    boolean_members: (Loc.t BooleanLiteral.t, Loc.t) InitializedMember.t list;\n    number_members: (Loc.t NumberLiteral.t, Loc.t) InitializedMember.t list;\n    string_members: (Loc.t StringLiteral.t, Loc.t) InitializedMember.t list;\n    defaulted_members: Loc.t DefaultedMember.t list;\n  }\n\n  type acc = {\n    members: members;\n    seen_names: SSet.t;\n    has_unknown_members: bool;\n    internal_comments: Loc.t Comment.t list;\n  }\n\n  type init =\n    | NoInit\n    | InvalidInit of Loc.t\n    | BooleanInit of Loc.t * Loc.t BooleanLiteral.t\n    | NumberInit of Loc.t * Loc.t NumberLiteral.t\n    | StringInit of Loc.t * Loc.t StringLiteral.t\n\n  let empty_members =\n    { boolean_members = []; number_members = []; string_members = []; defaulted_members = [] }\n\n  let empty_acc =\n    {\n      members = empty_members;\n      seen_names = SSet.empty;\n      has_unknown_members = false;\n      internal_comments = [];\n    }\n\n  let end_of_member_init env =\n    match Peek.token env with\n    | T_SEMICOLON\n    | T_COMMA\n    | T_RCURLY ->\n      true\n    | _ -> false\n\n  let member_init env =\n    let loc = Peek.loc env in\n    let leading = Peek.comments env in\n    match Peek.token env with\n    | T_NUMBER { kind; raw } ->\n      let value = Parse.number env kind raw in\n      let trailing = Eat.trailing_comments env in\n      if end_of_member_init env then\n        NumberInit\n          ( loc,\n            {\n              NumberLiteral.value;\n              raw;\n              comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing ();\n            }\n          )\n      else\n        InvalidInit loc\n    | T_STRING (loc, value, raw, octal) ->\n      if octal then strict_error env Parse_error.StrictOctalLiteral;\n      Eat.token env;\n      let trailing = Eat.trailing_comments env in\n      if end_of_member_init env then\n        StringInit\n          ( loc,\n            {\n              StringLiteral.value;\n              raw;\n              comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing ();\n            }\n          )\n      else\n        InvalidInit loc\n    | (T_TRUE | T_FALSE) as token ->\n      Eat.token env;\n      let trailing = Eat.trailing_comments env in\n      if end_of_member_init env then\n        BooleanInit\n          ( loc,\n            {\n              BooleanLiteral.value = token = T_TRUE;\n              comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing ();\n            }\n          )\n      else\n        InvalidInit loc\n    | _ ->\n      Eat.token env;\n      InvalidInit loc\n\n  let member_raw =\n    with_loc (fun env ->\n        let id = identifier_name env in\n        let init =\n          match Peek.token env with\n          | T_ASSIGN ->\n            Expect.token env T_ASSIGN;\n            member_init env\n          | T_COLON ->\n            let (_, { Identifier.name = member_name; _ }) = id in\n            error env (Parse_error.EnumInvalidInitializerSeparator { member_name });\n            Expect.token env T_COLON;\n            member_init env\n          | _ -> NoInit\n        in\n        (id, init)\n    )\n\n  let check_explicit_type_mismatch env ~enum_name ~explicit_type ~member_name literal_type loc =\n    match explicit_type with\n    | Some enum_type when enum_type <> literal_type ->\n      error_at\n        env\n        (loc, Parse_error.EnumInvalidMemberInitializer { enum_name; explicit_type; member_name })\n    | _ -> ()\n\n  let is_a_to_z c = c >= 'a' && c <= 'z'\n\n  let enum_member ~enum_name ~explicit_type acc env =\n    let { members; seen_names; _ } = acc in\n    let (member_loc, (id, init)) = member_raw env in\n    let (id_loc, { Identifier.name = member_name; _ }) = id in\n    (* if we parsed an empty name, something has gone wrong and we should abort analysis *)\n    if member_name = \"\" then\n      acc\n    else (\n      if is_a_to_z @@ member_name.[0] then\n        error_at env (id_loc, Parse_error.EnumInvalidMemberName { enum_name; member_name });\n      if SSet.mem member_name seen_names then\n        error_at env (id_loc, Parse_error.EnumDuplicateMemberName { enum_name; member_name });\n      let acc = { acc with seen_names = SSet.add member_name seen_names } in\n      let check_explicit_type_mismatch =\n        check_explicit_type_mismatch env ~enum_name ~explicit_type ~member_name\n      in\n      match init with\n      | BooleanInit (loc, value) ->\n        check_explicit_type_mismatch Enum_common.Boolean loc;\n        let member = (member_loc, { InitializedMember.id; init = (loc, value) }) in\n        { acc with members = { members with boolean_members = member :: members.boolean_members } }\n      | NumberInit (loc, value) ->\n        check_explicit_type_mismatch Enum_common.Number loc;\n        let member = (member_loc, { InitializedMember.id; init = (loc, value) }) in\n        { acc with members = { members with number_members = member :: members.number_members } }\n      | StringInit (loc, value) ->\n        check_explicit_type_mismatch Enum_common.String loc;\n        let member = (member_loc, { InitializedMember.id; init = (loc, value) }) in\n        { acc with members = { members with string_members = member :: members.string_members } }\n      | InvalidInit loc ->\n        error_at\n          env\n          (loc, Parse_error.EnumInvalidMemberInitializer { enum_name; explicit_type; member_name });\n        acc\n      | NoInit ->\n        begin\n          match explicit_type with\n          | Some Enum_common.Boolean ->\n            error_at\n              env\n              (member_loc, Parse_error.EnumBooleanMemberNotInitialized { enum_name; member_name });\n            acc\n          | Some Enum_common.Number ->\n            error_at\n              env\n              (member_loc, Parse_error.EnumNumberMemberNotInitialized { enum_name; member_name });\n            acc\n          | Some Enum_common.String\n          | Some Enum_common.Symbol\n          | None ->\n            let member = (member_loc, { DefaultedMember.id }) in\n            {\n              acc with\n              members = { members with defaulted_members = member :: members.defaulted_members };\n            }\n        end\n    )\n\n  let rec enum_members ~enum_name ~explicit_type acc env =\n    match Peek.token env with\n    | T_RCURLY\n    | T_EOF ->\n      ( {\n          boolean_members = List.rev acc.members.boolean_members;\n          number_members = List.rev acc.members.number_members;\n          string_members = List.rev acc.members.string_members;\n          defaulted_members = List.rev acc.members.defaulted_members;\n        },\n        acc.has_unknown_members,\n        acc.internal_comments\n      )\n    | T_ELLIPSIS ->\n      let loc = Peek.loc env in\n      (* Internal comments may appear before the ellipsis *)\n      let internal_comments = Peek.comments env in\n      Eat.token env;\n      (match Peek.token env with\n      | T_RCURLY\n      | T_EOF ->\n        ()\n      | T_COMMA ->\n        Expect.token env T_COMMA;\n        let trailing_comma =\n          match Peek.token env with\n          | T_RCURLY\n          | T_EOF ->\n            true\n          | _ -> false\n        in\n        error_at env (loc, Parse_error.EnumInvalidEllipsis { trailing_comma })\n      | _ -> error_at env (loc, Parse_error.EnumInvalidEllipsis { trailing_comma = false }));\n      enum_members\n        ~enum_name\n        ~explicit_type\n        { acc with has_unknown_members = true; internal_comments }\n        env\n    | _ ->\n      let acc = enum_member ~enum_name ~explicit_type acc env in\n      (match Peek.token env with\n      | T_RCURLY\n      | T_EOF ->\n        ()\n      | T_SEMICOLON ->\n        error env Parse_error.EnumInvalidMemberSeparator;\n        Expect.token env T_SEMICOLON\n      | _ -> Expect.token env T_COMMA);\n      enum_members ~enum_name ~explicit_type acc env\n\n  let string_body\n      ~env ~enum_name ~is_explicit ~has_unknown_members string_members defaulted_members comments =\n    let initialized_len = List.length string_members in\n    let defaulted_len = List.length defaulted_members in\n    let defaulted_body () =\n      StringBody\n        {\n          StringBody.members = StringBody.Defaulted defaulted_members;\n          explicit_type = is_explicit;\n          has_unknown_members;\n          comments;\n        }\n    in\n    let initialized_body () =\n      StringBody\n        {\n          StringBody.members = StringBody.Initialized string_members;\n          explicit_type = is_explicit;\n          has_unknown_members;\n          comments;\n        }\n    in\n    match (initialized_len, defaulted_len) with\n    | (0, 0)\n    | (0, _) ->\n      defaulted_body ()\n    | (_, 0) -> initialized_body ()\n    | _ when defaulted_len > initialized_len ->\n      List.iter\n        (fun (loc, _) ->\n          error_at env (loc, Parse_error.EnumStringMemberInconsistentlyInitailized { enum_name }))\n        string_members;\n      defaulted_body ()\n    | _ ->\n      List.iter\n        (fun (loc, _) ->\n          error_at env (loc, Parse_error.EnumStringMemberInconsistentlyInitailized { enum_name }))\n        defaulted_members;\n      initialized_body ()\n\n  let parse_explicit_type ~enum_name env =\n    if Eat.maybe env T_OF then (\n      Eat.push_lex_mode env Lex_mode.TYPE;\n      let result =\n        match Peek.token env with\n        | T_BOOLEAN_TYPE BOOLEAN -> Some Enum_common.Boolean\n        | T_NUMBER_TYPE -> Some Enum_common.Number\n        | T_STRING_TYPE -> Some Enum_common.String\n        | T_SYMBOL_TYPE -> Some Enum_common.Symbol\n        | T_IDENTIFIER { value; _ } ->\n          let supplied_type = Some value in\n          error env (Parse_error.EnumInvalidExplicitType { enum_name; supplied_type });\n          None\n        | _ ->\n          error env (Parse_error.EnumInvalidExplicitType { enum_name; supplied_type = None });\n          None\n      in\n      Eat.token env;\n      Eat.pop_lex_mode env;\n      result\n    ) else\n      None\n\n  let enum_body ~enum_name ~name_loc =\n    with_loc (fun env ->\n        let explicit_type = parse_explicit_type ~enum_name env in\n        let leading =\n          if explicit_type <> None then\n            Peek.comments env\n          else\n            []\n        in\n        Expect.token env T_LCURLY;\n        let (members, has_unknown_members, internal) =\n          enum_members ~enum_name ~explicit_type empty_acc env\n        in\n        let internal = internal @ Peek.comments env in\n        Expect.token env T_RCURLY;\n        let trailing =\n          match Peek.token env with\n          | T_EOF\n          | T_RCURLY ->\n            Eat.trailing_comments env\n          | _ when Peek.is_line_terminator env -> Eat.comments_until_next_line env\n          | _ -> []\n        in\n        let comments =\n          Flow_ast_utils.mk_comments_with_internal_opt ~leading ~trailing ~internal ()\n        in\n        let body =\n          match explicit_type with\n          | Some Enum_common.Boolean ->\n            BooleanBody\n              {\n                BooleanBody.members = members.boolean_members;\n                explicit_type = true;\n                has_unknown_members;\n                comments;\n              }\n          | Some Enum_common.Number ->\n            NumberBody\n              {\n                NumberBody.members = members.number_members;\n                explicit_type = true;\n                has_unknown_members;\n                comments;\n              }\n          | Some Enum_common.String ->\n            string_body\n              ~env\n              ~enum_name\n              ~is_explicit:true\n              ~has_unknown_members\n              members.string_members\n              members.defaulted_members\n              comments\n          | Some Enum_common.Symbol ->\n            SymbolBody\n              { SymbolBody.members = members.defaulted_members; has_unknown_members; comments }\n          | None ->\n            let bools_len = List.length members.boolean_members in\n            let nums_len = List.length members.number_members in\n            let strs_len = List.length members.string_members in\n            let defaulted_len = List.length members.defaulted_members in\n            let empty () =\n              StringBody\n                {\n                  StringBody.members = StringBody.Defaulted [];\n                  explicit_type = false;\n                  has_unknown_members;\n                  comments;\n                }\n            in\n            begin\n              match (bools_len, nums_len, strs_len, defaulted_len) with\n              | (0, 0, 0, 0) -> empty ()\n              | (0, 0, _, _) ->\n                string_body\n                  ~env\n                  ~enum_name\n                  ~is_explicit:false\n                  ~has_unknown_members\n                  members.string_members\n                  members.defaulted_members\n                  comments\n              | (_, 0, 0, _) when bools_len >= defaulted_len ->\n                List.iter\n                  (fun (loc, { DefaultedMember.id = (_, { Identifier.name = member_name; _ }) }) ->\n                    error_at\n                      env\n                      (loc, Parse_error.EnumBooleanMemberNotInitialized { enum_name; member_name }))\n                  members.defaulted_members;\n                BooleanBody\n                  {\n                    BooleanBody.members = members.boolean_members;\n                    explicit_type = false;\n                    has_unknown_members;\n                    comments;\n                  }\n              | (0, _, 0, _) when nums_len >= defaulted_len ->\n                List.iter\n                  (fun (loc, { DefaultedMember.id = (_, { Identifier.name = member_name; _ }) }) ->\n                    error_at\n                      env\n                      (loc, Parse_error.EnumNumberMemberNotInitialized { enum_name; member_name }))\n                  members.defaulted_members;\n                NumberBody\n                  {\n                    NumberBody.members = members.number_members;\n                    explicit_type = false;\n                    has_unknown_members;\n                    comments;\n                  }\n              | _ ->\n                error_at env (name_loc, Parse_error.EnumInconsistentMemberValues { enum_name });\n                empty ()\n            end\n        in\n        body\n    )\n\n  let declaration =\n    with_loc (fun env ->\n        let leading = Peek.comments env in\n        Expect.token env T_ENUM;\n        let id = Parse.identifier env in\n        let (name_loc, { Identifier.name = enum_name; _ }) = id in\n        let body = enum_body ~enum_name ~name_loc env in\n        let comments = Flow_ast_utils.mk_comments_opt ~leading () in\n        Statement.EnumDeclaration { id; body; comments }\n    )\nend\n"
  },
  {
    "path": "analysis/vendor/js_parser/expression_parser.ml",
    "content": "(*\n * Copyright (c) Meta Platforms, Inc. and affiliates.\n *\n * This source code is licensed under the MIT license found in the\n * LICENSE file in the root directory of this source tree.\n *)\n\nmodule Ast = Flow_ast\nopen Token\nopen Parser_env\nopen Flow_ast\nopen Parser_common\nopen Comment_attachment\n\nmodule type EXPRESSION = sig\n  val assignment : env -> (Loc.t, Loc.t) Expression.t\n\n  val assignment_cover : env -> pattern_cover\n\n  val conditional : env -> (Loc.t, Loc.t) Expression.t\n\n  val is_assignable_lhs : (Loc.t, Loc.t) Expression.t -> bool\n\n  val left_hand_side : env -> (Loc.t, Loc.t) Expression.t\n\n  val number : env -> number_type -> string -> float\n\n  val sequence :\n    env -> start_loc:Loc.t -> (Loc.t, Loc.t) Expression.t list -> (Loc.t, Loc.t) Expression.t\nend\n\nmodule Expression\n    (Parse : PARSER)\n    (Type : Type_parser.TYPE)\n    (Declaration : Declaration_parser.DECLARATION)\n    (Pattern_cover : Pattern_cover.COVER) : EXPRESSION = struct\n  type op_precedence =\n    | Left_assoc of int\n    | Right_assoc of int\n\n  type group_cover =\n    | Group_expr of (Loc.t, Loc.t) Expression.t\n    | Group_typecast of (Loc.t, Loc.t) Expression.TypeCast.t\n\n  let is_tighter a b =\n    let a_prec =\n      match a with\n      | Left_assoc x -> x\n      | Right_assoc x -> x - 1\n    in\n    let b_prec =\n      match b with\n      | Left_assoc x -> x\n      | Right_assoc x -> x\n    in\n    a_prec >= b_prec\n\n  let is_assignable_lhs =\n    let open Expression in\n    function\n    | ( _,\n        MetaProperty\n          {\n            MetaProperty.meta = (_, { Identifier.name = \"new\"; comments = _ });\n            property = (_, { Identifier.name = \"target\"; comments = _ });\n            comments = _;\n          }\n      ) ->\n      false\n    | ( _,\n        MetaProperty\n          {\n            MetaProperty.meta = (_, { Identifier.name = \"import\"; comments = _ });\n            property = (_, { Identifier.name = \"meta\"; comments = _ });\n            comments = _;\n          }\n      ) ->\n      false\n    (* #sec-static-semantics-static-semantics-isvalidsimpleassignmenttarget *)\n    | (_, Array _)\n    | (_, Identifier _)\n    | (_, Member _)\n    | (_, MetaProperty _)\n    | (_, Object _) ->\n      true\n    | (_, ArrowFunction _)\n    | (_, Assignment _)\n    | (_, Binary _)\n    | (_, Call _)\n    | (_, Class _)\n    | (_, Comprehension _)\n    | (_, Conditional _)\n    | (_, Function _)\n    | (_, Generator _)\n    | (_, Import _)\n    | (_, JSXElement _)\n    | (_, JSXFragment _)\n    | (_, Literal _)\n    | (_, Logical _)\n    | (_, New _)\n    | (_, OptionalCall _)\n    | (_, OptionalMember _)\n    | (_, Sequence _)\n    | (_, Super _)\n    | (_, TaggedTemplate _)\n    | (_, TemplateLiteral _)\n    | (_, This _)\n    | (_, TypeCast _)\n    | (_, Unary _)\n    | (_, Update _)\n    | (_, Yield _) ->\n      false\n\n  let as_expression = Pattern_cover.as_expression\n\n  let as_pattern = Pattern_cover.as_pattern\n\n  (* AssignmentExpression :\n   *   [+Yield] YieldExpression\n   *   ConditionalExpression\n   *   LeftHandSideExpression = AssignmentExpression\n   *   LeftHandSideExpression AssignmentOperator AssignmentExpression\n   *   ArrowFunctionFunction\n   *\n   *   Originally we were parsing this without backtracking, but\n   *   ArrowFunctionExpression got too tricky. Oh well.\n   *)\n  let rec assignment_cover =\n    let assignment_but_not_arrow_function_cover env =\n      let start_loc = Peek.loc env in\n      let expr_or_pattern = conditional_cover env in\n      match assignment_op env with\n      | Some operator ->\n        let expr =\n          with_loc\n            ~start_loc\n            (fun env ->\n              let left = as_pattern env expr_or_pattern in\n              let right = assignment env in\n              Expression.(Assignment { Assignment.operator; left; right; comments = None }))\n            env\n        in\n        Cover_expr expr\n      | _ -> expr_or_pattern\n    in\n    let error_callback _ = function\n      (* Don't rollback on these errors. *)\n      | Parse_error.StrictReservedWord -> ()\n      (* Everything else causes a rollback *)\n      | _ -> raise Try.Rollback\n      (* So we may or may not be parsing the first part of an arrow function\n       * (the part before the =>). We might end up parsing that whole thing or\n       * we might end up parsing only part of it and thinking we're done. We\n       * need to look at the next token to figure out if we really parsed an\n       * assignment expression or if this is just the beginning of an arrow\n       * function *)\n    in\n    let try_assignment_but_not_arrow_function env =\n      let env = env |> with_error_callback error_callback in\n      let ret = assignment_but_not_arrow_function_cover env in\n      match Peek.token env with\n      | T_ARROW ->\n        (* x => 123 *)\n        raise Try.Rollback\n      | T_COLON\n        when match last_token env with\n             | Some T_RPAREN -> true\n             | _ -> false ->\n        (* (x): number => 123 *)\n        raise Try.Rollback\n      (* async x => 123 -- and we've already parsed async as an identifier\n       * expression *)\n      | _ when Peek.is_identifier env ->\n        begin\n          match ret with\n          | Cover_expr (_, Expression.Identifier (_, { Identifier.name = \"async\"; comments = _ }))\n            when not (Peek.is_line_terminator env) ->\n            raise Try.Rollback\n          | _ -> ret\n        end\n      | _ -> ret\n    in\n    fun env ->\n      match (Peek.token env, Peek.is_identifier env) with\n      | (T_YIELD, _) when allow_yield env -> Cover_expr (yield env)\n      | ((T_LPAREN as t), _)\n      | ((T_LESS_THAN as t), _)\n      | ((T_THIS as t), _)\n      | (t, true) ->\n        (* Ok, we don't know if this is going to be an arrow function or a\n         * regular assignment expression. Let's first try to parse it as an\n         * assignment expression. If that fails we'll try an arrow function.\n         * Unless it begins with `async <` in which case we first try parsing\n         * it as an arrow function, and then an assignment expression.\n         *)\n        let (initial, secondary) =\n          if t = T_ASYNC && should_parse_types env && Peek.ith_token ~i:1 env = T_LESS_THAN then\n            (try_arrow_function, try_assignment_but_not_arrow_function)\n          else\n            (try_assignment_but_not_arrow_function, try_arrow_function)\n        in\n        (match Try.to_parse env initial with\n        | Try.ParsedSuccessfully expr -> expr\n        | Try.FailedToParse ->\n          (match Try.to_parse env secondary with\n          | Try.ParsedSuccessfully expr -> expr\n          | Try.FailedToParse ->\n            (* Well shoot. It doesn't parse cleanly as a normal\n             * expression or as an arrow_function. Let's treat it as a\n             * normal assignment expression gone wrong *)\n            assignment_but_not_arrow_function_cover env))\n      | _ -> assignment_but_not_arrow_function_cover env\n\n  and assignment env = as_expression env (assignment_cover env)\n\n  and yield env =\n    with_loc\n      (fun env ->\n        if in_formal_parameters env then error env Parse_error.YieldInFormalParameters;\n        let leading = Peek.comments env in\n        let start_loc = Peek.loc env in\n        Expect.token env T_YIELD;\n        let end_loc = Peek.loc env in\n        let (argument, delegate) =\n          if Peek.is_implicit_semicolon env then\n            (None, false)\n          else\n            let delegate = Eat.maybe env T_MULT in\n            let has_argument =\n              match Peek.token env with\n              | T_SEMICOLON\n              | T_RBRACKET\n              | T_RCURLY\n              | T_RPAREN\n              | T_COLON\n              | T_COMMA ->\n                false\n              | _ -> true\n            in\n            let argument =\n              if delegate || has_argument then\n                Some (assignment env)\n              else\n                None\n            in\n            (argument, delegate)\n        in\n        let trailing =\n          match argument with\n          | None -> Eat.trailing_comments env\n          | Some _ -> []\n        in\n        let open Expression in\n        Yield\n          Yield.\n            {\n              argument;\n              delegate;\n              comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing ();\n              result_out = Loc.btwn start_loc end_loc;\n            }\n          )\n      env\n\n  and is_lhs =\n    let open Expression in\n    function\n    | ( _,\n        MetaProperty\n          {\n            MetaProperty.meta = (_, { Identifier.name = \"new\"; comments = _ });\n            property = (_, { Identifier.name = \"target\"; comments = _ });\n            comments = _;\n          }\n      ) ->\n      false\n    | ( _,\n        MetaProperty\n          {\n            MetaProperty.meta = (_, { Identifier.name = \"import\"; comments = _ });\n            property = (_, { Identifier.name = \"meta\"; comments = _ });\n            comments = _;\n          }\n      ) ->\n      false\n    (* #sec-static-semantics-static-semantics-isvalidsimpleassignmenttarget *)\n    | (_, Identifier _)\n    | (_, Member _)\n    | (_, MetaProperty _) ->\n      true\n    | (_, Array _)\n    | (_, ArrowFunction _)\n    | (_, Assignment _)\n    | (_, Binary _)\n    | (_, Call _)\n    | (_, Class _)\n    | (_, Comprehension _)\n    | (_, Conditional _)\n    | (_, Function _)\n    | (_, Generator _)\n    | (_, Import _)\n    | (_, JSXElement _)\n    | (_, JSXFragment _)\n    | (_, Literal _)\n    | (_, Logical _)\n    | (_, New _)\n    | (_, Object _)\n    | (_, OptionalCall _)\n    | (_, OptionalMember _)\n    | (_, Sequence _)\n    | (_, Super _)\n    | (_, TaggedTemplate _)\n    | (_, TemplateLiteral _)\n    | (_, This _)\n    | (_, TypeCast _)\n    | (_, Unary _)\n    | (_, Update _)\n    | (_, Yield _) ->\n      false\n\n  and assignment_op env =\n    let op =\n      let open Expression.Assignment in\n      match Peek.token env with\n      | T_RSHIFT3_ASSIGN -> Some (Some RShift3Assign)\n      | T_RSHIFT_ASSIGN -> Some (Some RShiftAssign)\n      | T_LSHIFT_ASSIGN -> Some (Some LShiftAssign)\n      | T_BIT_XOR_ASSIGN -> Some (Some BitXorAssign)\n      | T_BIT_OR_ASSIGN -> Some (Some BitOrAssign)\n      | T_BIT_AND_ASSIGN -> Some (Some BitAndAssign)\n      | T_MOD_ASSIGN -> Some (Some ModAssign)\n      | T_DIV_ASSIGN -> Some (Some DivAssign)\n      | T_MULT_ASSIGN -> Some (Some MultAssign)\n      | T_EXP_ASSIGN -> Some (Some ExpAssign)\n      | T_MINUS_ASSIGN -> Some (Some MinusAssign)\n      | T_PLUS_ASSIGN -> Some (Some PlusAssign)\n      | T_NULLISH_ASSIGN -> Some (Some NullishAssign)\n      | T_AND_ASSIGN -> Some (Some AndAssign)\n      | T_OR_ASSIGN -> Some (Some OrAssign)\n      | T_ASSIGN -> Some None\n      | _ -> None\n    in\n    if op <> None then Eat.token env;\n    op\n\n  (* ConditionalExpression :\n   *   LogicalExpression\n   *   LogicalExpression ? AssignmentExpression : AssignmentExpression\n   *)\n  and conditional_cover env =\n    let start_loc = Peek.loc env in\n    let expr = logical_cover env in\n    if Peek.token env = T_PLING then (\n      Eat.token env;\n\n      (* no_in is ignored for the consequent *)\n      let env' = env |> with_no_in false in\n      let consequent = assignment env' in\n      Expect.token env T_COLON;\n      let (loc, alternate) = with_loc ~start_loc assignment env in\n      Cover_expr\n        ( loc,\n          let open Expression in\n          Conditional\n            { Conditional.test = as_expression env expr; consequent; alternate; comments = None }\n        )\n    ) else\n      expr\n\n  and conditional env = as_expression env (conditional_cover env)\n\n  (*\n   * LogicalANDExpression :\n   *   BinaryExpression\n   *   LogicalANDExpression && BitwiseORExpression\n   *\n   * LogicalORExpression :\n   *   LogicalANDExpression\n   *   LogicalORExpression || LogicalANDExpression\n   *   LogicalORExpression ?? LogicalANDExpression\n   *\n   * LogicalExpression :\n   *   LogicalORExpression\n   *)\n  and logical_cover =\n    let open Expression in\n    let make_logical env left right operator loc =\n      let left = as_expression env left in\n      let right = as_expression env right in\n      Cover_expr (loc, Logical { Logical.operator; left; right; comments = None })\n    in\n    let rec logical_and env left lloc =\n      match Peek.token env with\n      | T_AND ->\n        Eat.token env;\n        let (rloc, right) = with_loc binary_cover env in\n        let loc = Loc.btwn lloc rloc in\n        let left = make_logical env left right Logical.And loc in\n        (* `a && b ?? c` is an error, but to recover, try to parse it like `(a && b) ?? c`. *)\n        let (loc, left) = coalesce ~allowed:false env left loc in\n        logical_and env left loc\n      | _ -> (lloc, left)\n    and logical_or env left lloc =\n      match Peek.token env with\n      | T_OR ->\n        Eat.token env;\n        let (rloc, right) = with_loc binary_cover env in\n        let (rloc, right) = logical_and env right rloc in\n        let loc = Loc.btwn lloc rloc in\n        let left = make_logical env left right Logical.Or loc in\n        (* `a || b ?? c` is an error, but to recover, try to parse it like `(a || b) ?? c`. *)\n        let (loc, left) = coalesce ~allowed:false env left loc in\n        logical_or env left loc\n      | _ -> (lloc, left)\n    and coalesce ~allowed env left lloc =\n      match Peek.token env with\n      | T_PLING_PLING ->\n        if not allowed then error env (Parse_error.NullishCoalescingUnexpectedLogical \"??\");\n\n        Expect.token env T_PLING_PLING;\n        let (rloc, right) = with_loc binary_cover env in\n        let (rloc, right) =\n          match Peek.token env with\n          | (T_AND | T_OR) as t ->\n            (* `a ?? b || c` is an error. To recover, treat it like `a ?? (b || c)`. *)\n            error env (Parse_error.NullishCoalescingUnexpectedLogical (Token.value_of_token t));\n            let (rloc, right) = logical_and env right rloc in\n            logical_or env right rloc\n          | _ -> (rloc, right)\n        in\n        let loc = Loc.btwn lloc rloc in\n        coalesce ~allowed:true env (make_logical env left right Logical.NullishCoalesce loc) loc\n      | _ -> (lloc, left)\n    in\n    fun env ->\n      let (loc, left) = with_loc binary_cover env in\n      let (_, left) =\n        match Peek.token env with\n        | T_PLING_PLING -> coalesce ~allowed:true env left loc\n        | _ ->\n          let (loc, left) = logical_and env left loc in\n          logical_or env left loc\n      in\n      left\n\n  and binary_cover =\n    let binary_op env =\n      let ret =\n        let open Expression.Binary in\n        match Peek.token env with\n        (* Most BinaryExpression operators are left associative *)\n        (* Lowest pri *)\n        | T_BIT_OR -> Some (BitOr, Left_assoc 2)\n        | T_BIT_XOR -> Some (Xor, Left_assoc 3)\n        | T_BIT_AND -> Some (BitAnd, Left_assoc 4)\n        | T_EQUAL -> Some (Equal, Left_assoc 5)\n        | T_STRICT_EQUAL -> Some (StrictEqual, Left_assoc 5)\n        | T_NOT_EQUAL -> Some (NotEqual, Left_assoc 5)\n        | T_STRICT_NOT_EQUAL -> Some (StrictNotEqual, Left_assoc 5)\n        | T_LESS_THAN -> Some (LessThan, Left_assoc 6)\n        | T_LESS_THAN_EQUAL -> Some (LessThanEqual, Left_assoc 6)\n        | T_GREATER_THAN -> Some (GreaterThan, Left_assoc 6)\n        | T_GREATER_THAN_EQUAL -> Some (GreaterThanEqual, Left_assoc 6)\n        | T_IN ->\n          if no_in env then\n            None\n          else\n            Some (In, Left_assoc 6)\n        | T_INSTANCEOF -> Some (Instanceof, Left_assoc 6)\n        | T_LSHIFT -> Some (LShift, Left_assoc 7)\n        | T_RSHIFT -> Some (RShift, Left_assoc 7)\n        | T_RSHIFT3 -> Some (RShift3, Left_assoc 7)\n        | T_PLUS -> Some (Plus, Left_assoc 8)\n        | T_MINUS -> Some (Minus, Left_assoc 8)\n        | T_MULT -> Some (Mult, Left_assoc 9)\n        | T_DIV -> Some (Div, Left_assoc 9)\n        | T_MOD -> Some (Mod, Left_assoc 9)\n        | T_EXP -> Some (Exp, Right_assoc 10)\n        (* Highest priority *)\n        | _ -> None\n      in\n      if ret <> None then Eat.token env;\n      ret\n    in\n    let make_binary left right operator loc =\n      (loc, Expression.(Binary Binary.{ operator; left; right; comments = None }))\n    in\n    let rec add_to_stack right (rop, rpri) rloc = function\n      | (left, (lop, lpri), lloc) :: rest when is_tighter lpri rpri ->\n        let loc = Loc.btwn lloc rloc in\n        let right = make_binary left right lop loc in\n        add_to_stack right (rop, rpri) loc rest\n      | stack -> (right, (rop, rpri), rloc) :: stack\n    in\n    let rec collapse_stack right rloc = function\n      | [] -> right\n      | (left, (lop, _), lloc) :: rest ->\n        let loc = Loc.btwn lloc rloc in\n        collapse_stack (make_binary left right lop loc) loc rest\n    in\n    let rec helper env stack =\n      let (right_loc, (is_unary, right)) =\n        with_loc\n          (fun env ->\n            let is_unary = peek_unary_op env <> None in\n            let right = unary_cover (env |> with_no_in false) in\n            (is_unary, right))\n          env\n      in\n      ( if Peek.token env = T_LESS_THAN then\n        match right with\n        | Cover_expr (_, Expression.JSXElement _) -> error env Parse_error.AdjacentJSXElements\n        | _ -> ()\n      );\n      match (stack, binary_op env) with\n      | ([], None) -> right\n      | (_, None) ->\n        let right = as_expression env right in\n        Cover_expr (collapse_stack right right_loc stack)\n      | (_, Some (rop, rpri)) ->\n        if is_unary && rop = Expression.Binary.Exp then\n          error_at env (right_loc, Parse_error.InvalidLHSInExponentiation);\n        let right = as_expression env right in\n        helper env (add_to_stack right (rop, rpri) right_loc stack)\n    in\n    (fun env -> helper env [])\n\n  and peek_unary_op env =\n    let open Expression.Unary in\n    match Peek.token env with\n    | T_NOT -> Some Not\n    | T_BIT_NOT -> Some BitNot\n    | T_PLUS -> Some Plus\n    | T_MINUS -> Some Minus\n    | T_TYPEOF -> Some Typeof\n    | T_VOID -> Some Void\n    | T_DELETE -> Some Delete\n    (* If we are in a unary expression context, and within an async function,\n     * assume that a use of \"await\" is intended as a keyword, not an ordinary\n     * identifier. This is a little bit inconsistent, since it can be used as\n     * an identifier in other contexts (such as a variable name), but it's how\n     * Babel does it. *)\n    | T_AWAIT when allow_await env -> Some Await\n    | _ -> None\n\n  and unary_cover env =\n    let start_loc = Peek.loc env in\n    let leading = Peek.comments env in\n    let op = peek_unary_op env in\n    match op with\n    | None ->\n      let op =\n        let open Expression.Update in\n        match Peek.token env with\n        | T_INCR -> Some Increment\n        | T_DECR -> Some Decrement\n        | _ -> None\n      in\n      (match op with\n      | None -> postfix_cover env\n      | Some operator ->\n        Eat.token env;\n        let (loc, argument) = with_loc ~start_loc unary env in\n        if not (is_lhs argument) then error_at env (fst argument, Parse_error.InvalidLHSInAssignment);\n        (match argument with\n        | (_, Expression.Identifier (_, { Identifier.name; comments = _ })) when is_restricted name\n          ->\n          strict_error env Parse_error.StrictLHSPrefix\n        | _ -> ());\n        Cover_expr\n          ( loc,\n            Expression.(\n              Update\n                {\n                  Update.operator;\n                  prefix = true;\n                  argument;\n                  comments = Flow_ast_utils.mk_comments_opt ~leading ();\n                }\n            )\n          ))\n    | Some operator ->\n      Eat.token env;\n      let (loc, argument) = with_loc ~start_loc unary env in\n      let open Expression in\n      (match (operator, argument) with\n      | (Unary.Delete, (_, Identifier _)) -> strict_error_at env (loc, Parse_error.StrictDelete)\n      | (Unary.Delete, (_, Member member)) ->\n        begin\n          match member.Ast.Expression.Member.property with\n          | Ast.Expression.Member.PropertyPrivateName _ ->\n            error_at env (loc, Parse_error.PrivateDelete)\n          | _ -> ()\n        end\n      | _ -> ());\n      Cover_expr\n        ( loc,\n          let open Expression in\n          Unary { Unary.operator; argument; comments = Flow_ast_utils.mk_comments_opt ~leading () }\n        )\n\n  and unary env = as_expression env (unary_cover env)\n\n  and postfix_cover env =\n    let argument = left_hand_side_cover env in\n    (* No line terminator allowed before operator *)\n    if Peek.is_line_terminator env then\n      argument\n    else\n      let op =\n        let open Expression.Update in\n        match Peek.token env with\n        | T_INCR -> Some Increment\n        | T_DECR -> Some Decrement\n        | _ -> None\n      in\n      match op with\n      | None -> argument\n      | Some operator ->\n        let argument = as_expression env argument in\n        if not (is_lhs argument) then error_at env (fst argument, Parse_error.InvalidLHSInAssignment);\n        (match argument with\n        | (_, Expression.Identifier (_, { Identifier.name; comments = _ })) when is_restricted name\n          ->\n          strict_error env Parse_error.StrictLHSPostfix\n        | _ -> ());\n        let end_loc = Peek.loc env in\n        Eat.token env;\n        let trailing = Eat.trailing_comments env in\n        let loc = Loc.btwn (fst argument) end_loc in\n        Cover_expr\n          ( loc,\n            Expression.(\n              Update\n                {\n                  Update.operator;\n                  prefix = false;\n                  argument;\n                  comments = Flow_ast_utils.mk_comments_opt ~trailing ();\n                }\n            )\n          )\n\n  and left_hand_side_cover env =\n    let start_loc = Peek.loc env in\n    let allow_new = not (no_new env) in\n    let env = with_no_new false env in\n    let expr =\n      match Peek.token env with\n      | T_NEW when allow_new -> Cover_expr (new_expression env)\n      | T_IMPORT -> Cover_expr (import env)\n      | T_SUPER -> Cover_expr (super env)\n      | _ when Peek.is_function env -> Cover_expr (_function env)\n      | _ -> primary_cover env\n    in\n    call_cover env start_loc expr\n\n  and left_hand_side env = as_expression env (left_hand_side_cover env)\n\n  and super env =\n    let (allowed, call_allowed) =\n      match allow_super env with\n      | No_super -> (false, false)\n      | Super_prop -> (true, false)\n      | Super_prop_or_call -> (true, true)\n    in\n    let loc = Peek.loc env in\n    let leading = Peek.comments env in\n    Expect.token env T_SUPER;\n    let trailing = Eat.trailing_comments env in\n    let super =\n      ( loc,\n        Expression.Super\n          { Expression.Super.comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing () }\n      )\n    in\n    match Peek.token env with\n    | T_PERIOD\n    | T_LBRACKET ->\n      let super =\n        if not allowed then (\n          error_at env (loc, Parse_error.UnexpectedSuper);\n          (loc, Expression.Identifier (Flow_ast_utils.ident_of_source (loc, \"super\")))\n        ) else\n          super\n      in\n      call ~allow_optional_chain:false env loc super\n    | T_LPAREN ->\n      let super =\n        if not call_allowed then (\n          error_at env (loc, Parse_error.UnexpectedSuperCall);\n          (loc, Expression.Identifier (Flow_ast_utils.ident_of_source (loc, \"super\")))\n        ) else\n          super\n      in\n      call ~allow_optional_chain:false env loc super\n    | _ ->\n      if not allowed then\n        error_at env (loc, Parse_error.UnexpectedSuper)\n      else\n        error_unexpected ~expected:\"either a call or access of `super`\" env;\n      super\n\n  and import env =\n    with_loc\n      (fun env ->\n        let leading = Peek.comments env in\n        let start_loc = Peek.loc env in\n        Expect.token env T_IMPORT;\n        if Eat.maybe env T_PERIOD then (\n          (* import.meta *)\n          let import_ident = Flow_ast_utils.ident_of_source (start_loc, \"import\") in\n          let meta_loc = Peek.loc env in\n          Expect.identifier env \"meta\";\n          let meta_ident = Flow_ast_utils.ident_of_source (meta_loc, \"meta\") in\n          let trailing = Eat.trailing_comments env in\n          Expression.MetaProperty\n            {\n              Expression.MetaProperty.meta = import_ident;\n              property = meta_ident;\n              comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing ();\n            }\n        ) else\n          let leading_arg = Peek.comments env in\n          Expect.token env T_LPAREN;\n          let argument = add_comments (assignment (with_no_in false env)) ~leading:leading_arg in\n          Expect.token env T_RPAREN;\n          let trailing = Eat.trailing_comments env in\n          Expression.Import\n            {\n              Expression.Import.argument;\n              comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing ();\n            })\n      env\n\n  and call_cover ?(allow_optional_chain = true) ?(in_optional_chain = false) env start_loc left =\n    let left = member_cover ~allow_optional_chain ~in_optional_chain env start_loc left in\n    let optional =\n      match last_token env with\n      | Some T_PLING_PERIOD -> true\n      | _ -> false\n    in\n    let left_to_callee env =\n      let { remove_trailing; _ } = trailing_and_remover env in\n      remove_trailing (as_expression env left) (fun remover left -> remover#expression left)\n    in\n    let arguments ?targs env callee =\n      let (args_loc, arguments) = arguments env in\n      let loc = Loc.btwn start_loc args_loc in\n      let call =\n        { Expression.Call.callee; targs; arguments = (args_loc, arguments); comments = None }\n      in\n      let call =\n        if optional || in_optional_chain then\n          let open Expression in\n          OptionalCall { OptionalCall.call; optional; filtered_out = loc }\n        else\n          Expression.Call call\n      in\n      let in_optional_chain = in_optional_chain || optional in\n      call_cover ~allow_optional_chain ~in_optional_chain env start_loc (Cover_expr (loc, call))\n    in\n    if no_call env then\n      left\n    else\n      match Peek.token env with\n      | T_LPAREN -> arguments env (left_to_callee env)\n      | T_LSHIFT\n      | T_LESS_THAN\n        when should_parse_types env ->\n        (* If we are parsing types, then f<T>(e) is a function call with a\n           type application. If we aren't, it's a nested binary expression. *)\n        let error_callback _ _ = raise Try.Rollback in\n        let env = env |> with_error_callback error_callback in\n        (* Parameterized call syntax is ambiguous, so we fall back to\n           standard parsing if it fails. *)\n        Try.or_else env ~fallback:left (fun env ->\n            let callee = left_to_callee env in\n            let targs = call_type_args env in\n            arguments ?targs env callee\n        )\n      | _ -> left\n\n  and call ?(allow_optional_chain = true) env start_loc left =\n    as_expression env (call_cover ~allow_optional_chain env start_loc (Cover_expr left))\n\n  and new_expression env =\n    with_loc\n      (fun env ->\n        let start_loc = Peek.loc env in\n        let leading = Peek.comments env in\n        Expect.token env T_NEW;\n\n        if in_function env && Peek.token env = T_PERIOD then (\n          let trailing = Eat.trailing_comments env in\n          Eat.token env;\n          let meta =\n            Flow_ast_utils.ident_of_source\n              (start_loc, \"new\")\n              ?comments:(Flow_ast_utils.mk_comments_opt ~leading ~trailing ())\n          in\n          match Peek.token env with\n          | T_IDENTIFIER { raw = \"target\"; _ } ->\n            let property = Parse.identifier env in\n            Expression.(MetaProperty MetaProperty.{ meta; property; comments = None })\n          | _ ->\n            error_unexpected ~expected:\"the identifier `target`\" env;\n            Eat.token env;\n\n            (* skip unknown identifier *)\n            Expression.Identifier meta\n          (* return `new` identifier *)\n        ) else\n          let callee_loc = Peek.loc env in\n          let expr =\n            match Peek.token env with\n            | T_NEW -> new_expression env\n            | T_SUPER -> super (env |> with_no_call true)\n            | _ when Peek.is_function env -> _function env\n            | _ -> primary env\n          in\n          let callee =\n            member ~allow_optional_chain:false (env |> with_no_call true) callee_loc expr\n          in\n          (* You can do something like\n           *   new raw`42`\n           *)\n          let callee =\n            let callee =\n              match Peek.token env with\n              | T_TEMPLATE_PART part -> tagged_template env callee_loc callee part\n              | _ -> callee\n            in\n            (* Remove trailing comments if the callee is followed by args or type args *)\n            if Peek.token env = T_LPAREN || (should_parse_types env && Peek.token env = T_LESS_THAN)\n            then\n              let { remove_trailing; _ } = trailing_and_remover env in\n              remove_trailing callee (fun remover callee -> remover#expression callee)\n            else\n              callee\n          in\n          let targs =\n            (* If we are parsing types, then new C<T>(e) is a constructor with a\n               type application. If we aren't, it's a nested binary expression. *)\n            if should_parse_types env then\n              (* Parameterized call syntax is ambiguous, so we fall back to\n                 standard parsing if it fails. *)\n              let error_callback _ _ = raise Try.Rollback in\n              let env = env |> with_error_callback error_callback in\n              Try.or_else env ~fallback:None call_type_args\n            else\n              None\n          in\n          let arguments =\n            match Peek.token env with\n            | T_LPAREN -> Some (arguments env)\n            | _ -> None\n          in\n          let comments = Flow_ast_utils.mk_comments_opt ~leading () in\n          Expression.(New New.{ callee; targs; arguments; comments }))\n      env\n\n  and call_type_args =\n    let args =\n      let rec args_helper env acc =\n        match Peek.token env with\n        | T_EOF\n        | T_GREATER_THAN ->\n          List.rev acc\n        | _ ->\n          let t =\n            match Peek.token env with\n            | T_IDENTIFIER { value = \"_\"; _ } ->\n              let loc = Peek.loc env in\n              let leading = Peek.comments env in\n              Expect.identifier env \"_\";\n              let trailing = Eat.trailing_comments env in\n              Expression.CallTypeArg.Implicit\n                ( loc,\n                  {\n                    Expression.CallTypeArg.Implicit.comments =\n                      Flow_ast_utils.mk_comments_opt ~leading ~trailing ();\n                  }\n                )\n            | _ -> Expression.CallTypeArg.Explicit (Type._type env)\n          in\n          let acc = t :: acc in\n          if Peek.token env <> T_GREATER_THAN then Expect.token env T_COMMA;\n          args_helper env acc\n      in\n      fun env ->\n        let leading = Peek.comments env in\n        Expect.token env T_LESS_THAN;\n        let arguments = args_helper env [] in\n        let internal = Peek.comments env in\n        Expect.token env T_GREATER_THAN;\n        let trailing =\n          if Peek.token env = T_LPAREN then\n            let { trailing; _ } = trailing_and_remover env in\n            trailing\n          else\n            Eat.trailing_comments env\n        in\n        {\n          Expression.CallTypeArgs.arguments;\n          comments = Flow_ast_utils.mk_comments_with_internal_opt ~leading ~trailing ~internal ();\n        }\n    in\n    fun env ->\n      Eat.push_lex_mode env Lex_mode.TYPE;\n      let node =\n        if Peek.token env = T_LESS_THAN then\n          Some (with_loc args env)\n        else\n          None\n      in\n      Eat.pop_lex_mode env;\n      node\n\n  and arguments =\n    let spread_element env =\n      let leading = Peek.comments env in\n      Expect.token env T_ELLIPSIS;\n      let argument = assignment env in\n      Expression.SpreadElement.{ argument; comments = Flow_ast_utils.mk_comments_opt ~leading () }\n    in\n    let argument env =\n      match Peek.token env with\n      | T_ELLIPSIS -> Expression.Spread (with_loc spread_element env)\n      | _ -> Expression.Expression (assignment env)\n    in\n    let rec arguments' env acc =\n      match Peek.token env with\n      | T_EOF\n      | T_RPAREN ->\n        List.rev acc\n      | _ ->\n        let acc = argument env :: acc in\n        if Peek.token env <> T_RPAREN then Expect.token env T_COMMA;\n        arguments' env acc\n    in\n    fun env ->\n      with_loc\n        (fun env ->\n          let leading = Peek.comments env in\n          Expect.token env T_LPAREN;\n          let args = arguments' env [] in\n          let internal = Peek.comments env in\n          Expect.token env T_RPAREN;\n          let trailing = Eat.trailing_comments env in\n          {\n            Expression.ArgList.arguments = args;\n            comments = Flow_ast_utils.mk_comments_with_internal_opt ~leading ~trailing ~internal ();\n          })\n        env\n\n  and member_cover =\n    let dynamic\n        ?(allow_optional_chain = true)\n        ?(in_optional_chain = false)\n        ?(optional = false)\n        env\n        start_loc\n        left =\n      let expr = Parse.expression (env |> with_no_call false) in\n      let last_loc = Peek.loc env in\n      Expect.token env T_RBRACKET;\n      let trailing = Eat.trailing_comments env in\n      let loc = Loc.btwn start_loc last_loc in\n      let member =\n        {\n          Expression.Member._object = as_expression env left;\n          property = Expression.Member.PropertyExpression expr;\n          comments = Flow_ast_utils.mk_comments_opt ~trailing ();\n        }\n      in\n\n      let member =\n        if in_optional_chain then\n          let open Expression in\n          OptionalMember { OptionalMember.member; optional; filtered_out = loc }\n        else\n          Expression.Member member\n      in\n      call_cover ~allow_optional_chain ~in_optional_chain env start_loc (Cover_expr (loc, member))\n    in\n    let static\n        ?(allow_optional_chain = true)\n        ?(in_optional_chain = false)\n        ?(optional = false)\n        env\n        start_loc\n        left =\n      let open Expression.Member in\n      let (id_loc, property) =\n        match Peek.token env with\n        | T_POUND ->\n          let ((id_loc, { Ast.PrivateName.name; _ }) as id) = private_identifier env in\n          add_used_private env name id_loc;\n          (id_loc, PropertyPrivateName id)\n        | _ ->\n          let ((id_loc, _) as id) = identifier_name env in\n          (id_loc, PropertyIdentifier id)\n      in\n      let loc = Loc.btwn start_loc id_loc in\n      (* super.PrivateName is a syntax error *)\n      begin\n        match (left, property) with\n        | (Cover_expr (_, Ast.Expression.Super _), PropertyPrivateName _) ->\n          error_at env (loc, Parse_error.SuperPrivate)\n        | _ -> ()\n      end;\n      let member =\n        Expression.Member.{ _object = as_expression env left; property; comments = None }\n      in\n      let member =\n        if in_optional_chain then\n          let open Expression in\n          OptionalMember { OptionalMember.member; optional; filtered_out = loc }\n        else\n          Expression.Member member\n      in\n      call_cover ~allow_optional_chain ~in_optional_chain env start_loc (Cover_expr (loc, member))\n    in\n    fun ?(allow_optional_chain = true) ?(in_optional_chain = false) env start_loc left ->\n      match Peek.token env with\n      | T_PLING_PERIOD ->\n        if not allow_optional_chain then error env Parse_error.OptionalChainNew;\n\n        Expect.token env T_PLING_PERIOD;\n        begin\n          match Peek.token env with\n          | T_TEMPLATE_PART _ ->\n            error env Parse_error.OptionalChainTemplate;\n            left\n          | T_LPAREN -> left\n          | T_LESS_THAN when should_parse_types env -> left\n          | T_LBRACKET ->\n            Eat.token env;\n            dynamic ~allow_optional_chain ~in_optional_chain:true ~optional:true env start_loc left\n          | _ ->\n            static ~allow_optional_chain ~in_optional_chain:true ~optional:true env start_loc left\n        end\n      | T_LBRACKET ->\n        Eat.token env;\n        dynamic ~allow_optional_chain ~in_optional_chain env start_loc left\n      | T_PERIOD ->\n        Eat.token env;\n        static ~allow_optional_chain ~in_optional_chain env start_loc left\n      | T_TEMPLATE_PART part ->\n        if in_optional_chain then error env Parse_error.OptionalChainTemplate;\n\n        let expr = tagged_template env start_loc (as_expression env left) part in\n        call_cover ~allow_optional_chain:false env start_loc (Cover_expr expr)\n      | _ -> left\n\n  and member ?(allow_optional_chain = true) env start_loc left =\n    as_expression env (member_cover ~allow_optional_chain env start_loc (Cover_expr left))\n\n  and _function env =\n    with_loc\n      (fun env ->\n        let (async, leading_async) = Declaration.async env in\n        let (sig_loc, (id, params, generator, predicate, return, tparams, leading)) =\n          with_loc\n            (fun env ->\n              let leading_function = Peek.comments env in\n              Expect.token env T_FUNCTION;\n              let (generator, leading_generator) = Declaration.generator env in\n              let leading = List.concat [leading_async; leading_function; leading_generator] in\n              (* `await` is a keyword in async functions:\n                 - proposal-async-iteration/#prod-AsyncGeneratorExpression\n                 - #prod-AsyncFunctionExpression *)\n              let await = async in\n              (* `yield` is a keyword in generator functions:\n                 - proposal-async-iteration/#prod-AsyncGeneratorExpression\n                 - #prod-GeneratorExpression *)\n              let yield = generator in\n              let (id, tparams) =\n                if Peek.token env = T_LPAREN then\n                  (None, None)\n                else\n                  let id =\n                    match Peek.token env with\n                    | T_LESS_THAN -> None\n                    | _ ->\n                      let env = env |> with_allow_await await |> with_allow_yield yield in\n                      let id =\n                        id_remove_trailing\n                          env\n                          (Parse.identifier ~restricted_error:Parse_error.StrictFunctionName env)\n                      in\n                      Some id\n                  in\n                  let tparams = type_params_remove_trailing env (Type.type_params env) in\n                  (id, tparams)\n              in\n              (* #sec-function-definitions-static-semantics-early-errors *)\n              let env = env |> with_allow_super No_super in\n              let params =\n                let params = Declaration.function_params ~await ~yield env in\n                if Peek.token env = T_COLON then\n                  params\n                else\n                  function_params_remove_trailing env params\n              in\n              let (return, predicate) = Type.annotation_and_predicate_opt env in\n              let (return, predicate) =\n                match predicate with\n                | None -> (type_annotation_hint_remove_trailing env return, predicate)\n                | Some _ -> (return, predicate_remove_trailing env predicate)\n              in\n              (id, params, generator, predicate, return, tparams, leading))\n            env\n        in\n        let simple_params = is_simple_parameter_list params in\n        let (body, contains_use_strict) =\n          Declaration.function_body env ~async ~generator ~expression:true ~simple_params\n        in\n        Declaration.strict_post_check env ~contains_use_strict id params;\n        Expression.Function\n          {\n            Function.id;\n            params;\n            body;\n            generator;\n            async;\n            predicate;\n            return;\n            tparams;\n            sig_loc;\n            comments = Flow_ast_utils.mk_comments_opt ~leading ();\n          })\n      env\n\n  and number env kind raw =\n    let value =\n      match kind with\n      | LEGACY_OCTAL ->\n        strict_error env Parse_error.StrictOctalLiteral;\n        begin\n          try Int64.to_float (Int64.of_string (\"0o\" ^ raw)) with\n          | Failure _ -> failwith (\"Invalid legacy octal \" ^ raw)\n        end\n      | LEGACY_NON_OCTAL ->\n        strict_error env Parse_error.StrictNonOctalLiteral;\n        begin\n          try float_of_string raw with\n          | Failure _ -> failwith (\"Invalid number \" ^ raw)\n        end\n      | BINARY\n      | OCTAL ->\n        begin\n          try Int64.to_float (Int64.of_string raw) with\n          | Failure _ -> failwith (\"Invalid binary/octal \" ^ raw)\n        end\n      | NORMAL ->\n        begin\n          try float_of_string raw with\n          | Failure _ -> failwith (\"Invalid number \" ^ raw)\n        end\n    in\n    Expect.token env (T_NUMBER { kind; raw });\n    value\n\n  and bigint_strip_n raw =\n    let size = String.length raw in\n    let str =\n      if size != 0 && raw.[size - 1] == 'n' then\n        String.sub raw 0 (size - 1)\n      else\n        raw\n    in\n    str\n\n  and bigint env kind raw =\n    let postraw = bigint_strip_n raw in\n    let value = Int64.of_string_opt postraw in\n    Expect.token env (T_BIGINT { kind; raw });\n    value\n\n  and primary_cover env =\n    let loc = Peek.loc env in\n    let leading = Peek.comments env in\n    match Peek.token env with\n    | T_THIS ->\n      Eat.token env;\n      let trailing = Eat.trailing_comments env in\n      Cover_expr\n        ( loc,\n          Expression.This\n            { Expression.This.comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing () }\n        )\n    | T_NUMBER { kind; raw } ->\n      let value = Literal.Number (number env kind raw) in\n      let trailing = Eat.trailing_comments env in\n      Cover_expr\n        ( loc,\n          let open Expression in\n          Literal\n            { Literal.value; raw; comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing () }\n        )\n    | T_BIGINT { kind; raw } ->\n      let value = Literal.BigInt (bigint env kind raw) in\n      let trailing = Eat.trailing_comments env in\n      Cover_expr\n        ( loc,\n          let open Expression in\n          Literal\n            { Literal.value; raw; comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing () }\n        )\n    | T_STRING (loc, value, raw, octal) ->\n      if octal then strict_error env Parse_error.StrictOctalLiteral;\n      Eat.token env;\n      let value = Literal.String value in\n      let trailing = Eat.trailing_comments env in\n      Cover_expr\n        ( loc,\n          Expression.Literal\n            { Literal.value; raw; comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing () }\n        )\n    | (T_TRUE | T_FALSE) as token ->\n      Eat.token env;\n      let truthy = token = T_TRUE in\n      let raw =\n        if truthy then\n          \"true\"\n        else\n          \"false\"\n      in\n      let value = Literal.Boolean truthy in\n      let trailing = Eat.trailing_comments env in\n      Cover_expr\n        ( loc,\n          Expression.Literal\n            { Literal.value; raw; comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing () }\n        )\n    | T_NULL ->\n      Eat.token env;\n      let raw = \"null\" in\n      let value = Literal.Null in\n      let trailing = Eat.trailing_comments env in\n      Cover_expr\n        ( loc,\n          Expression.Literal\n            { Literal.value; raw; comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing () }\n        )\n    | T_LPAREN -> Cover_expr (group env)\n    | T_LCURLY ->\n      let (loc, obj, errs) = Parse.object_initializer env in\n      Cover_patt ((loc, Expression.Object obj), errs)\n    | T_LBRACKET ->\n      let (loc, (arr, errs)) = with_loc array_initializer env in\n      Cover_patt ((loc, Expression.Array arr), errs)\n    | T_DIV\n    | T_DIV_ASSIGN ->\n      Cover_expr (regexp env)\n    | T_LESS_THAN ->\n      let (loc, expression) =\n        match Parse.jsx_element_or_fragment env with\n        | (loc, `Element e) -> (loc, Expression.JSXElement e)\n        | (loc, `Fragment f) -> (loc, Expression.JSXFragment f)\n      in\n      Cover_expr (loc, expression)\n    | T_TEMPLATE_PART part ->\n      let (loc, template) = template_literal env part in\n      Cover_expr (loc, Expression.TemplateLiteral template)\n    | T_CLASS -> Cover_expr (Parse.class_expression env)\n    | _ when Peek.is_identifier env ->\n      let id = Parse.identifier env in\n      Cover_expr (fst id, Expression.Identifier id)\n    | t ->\n      error_unexpected env;\n\n      (* Let's get rid of the bad token *)\n      begin\n        match t with\n        | T_ERROR _ -> Eat.token env\n        | _ -> ()\n      end;\n\n      (* Really no idea how to recover from this. I suppose a null\n       * expression is as good as anything *)\n      let value = Literal.Null in\n      let raw = \"null\" in\n      let trailing = [] in\n      Cover_expr\n        ( loc,\n          let open Expression in\n          Literal\n            { Literal.value; raw; comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing () }\n        )\n\n  and primary env = as_expression env (primary_cover env)\n\n  and template_literal =\n    let rec template_parts env quasis expressions =\n      let expr = Parse.expression env in\n      let expressions = expr :: expressions in\n      match Peek.token env with\n      | T_RCURLY ->\n        Eat.push_lex_mode env Lex_mode.TEMPLATE;\n        let (loc, part, is_tail) =\n          match Peek.token env with\n          | T_TEMPLATE_PART (loc, { cooked; raw; _ }, tail) ->\n            let open Ast.Expression.TemplateLiteral in\n            Eat.token env;\n            (loc, { Element.value = { Element.cooked; raw }; tail }, tail)\n          | _ -> assert false\n        in\n        Eat.pop_lex_mode env;\n        let quasis = (loc, part) :: quasis in\n        if is_tail then\n          (loc, List.rev quasis, List.rev expressions)\n        else\n          template_parts env quasis expressions\n      | _ ->\n        (* Malformed template *)\n        error_unexpected ~expected:\"a template literal part\" env;\n        let imaginary_quasi =\n          ( fst expr,\n            {\n              Expression.TemplateLiteral.Element.value =\n                { Expression.TemplateLiteral.Element.raw = \"\"; cooked = \"\" };\n              tail = true;\n            }\n          )\n        in\n        (fst expr, List.rev (imaginary_quasi :: quasis), List.rev expressions)\n    in\n    fun env ((start_loc, { cooked; raw; _ }, is_tail) as part) ->\n      let leading = Peek.comments env in\n      Expect.token env (T_TEMPLATE_PART part);\n      let (end_loc, quasis, expressions) =\n        let head =\n          ( start_loc,\n            {\n              Ast.Expression.TemplateLiteral.Element.value =\n                { Ast.Expression.TemplateLiteral.Element.cooked; raw };\n              tail = is_tail;\n            }\n          )\n        in\n\n        if is_tail then\n          (start_loc, [head], [])\n        else\n          template_parts env [head] []\n      in\n      let trailing = Eat.trailing_comments env in\n      let loc = Loc.btwn start_loc end_loc in\n      ( loc,\n        {\n          Expression.TemplateLiteral.quasis;\n          expressions;\n          comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing ();\n        }\n      )\n\n  and tagged_template env start_loc tag part =\n    let tag = expression_remove_trailing env tag in\n    let quasi = template_literal env part in\n    ( Loc.btwn start_loc (fst quasi),\n      Expression.(TaggedTemplate TaggedTemplate.{ tag; quasi; comments = None })\n    )\n\n  and group env =\n    let leading = Peek.comments env in\n    let (loc, cover) =\n      with_loc\n        (fun env ->\n          Expect.token env T_LPAREN;\n          let expr_start_loc = Peek.loc env in\n          let expression = assignment env in\n          let ret =\n            match Peek.token env with\n            | T_COLON ->\n              let annot = Type.annotation env in\n              Group_typecast Expression.TypeCast.{ expression; annot; comments = None }\n            | T_COMMA -> Group_expr (sequence env ~start_loc:expr_start_loc [expression])\n            | _ -> Group_expr expression\n          in\n          Expect.token env T_RPAREN;\n          ret)\n        env\n    in\n    let trailing = Eat.trailing_comments env in\n    let ret =\n      match cover with\n      | Group_expr expr -> expr\n      | Group_typecast cast -> (loc, Expression.TypeCast cast)\n    in\n    add_comments ret ~leading ~trailing\n\n  and add_comments ?(leading = []) ?(trailing = []) (loc, expression) =\n    let merge_comments inner =\n      Flow_ast_utils.merge_comments\n        ~inner\n        ~outer:(Flow_ast_utils.mk_comments_opt ~leading ~trailing ())\n    in\n    let merge_comments_with_internal inner =\n      Flow_ast_utils.merge_comments_with_internal\n        ~inner\n        ~outer:(Flow_ast_utils.mk_comments_opt ~leading ~trailing ())\n    in\n    let open Expression in\n    ( loc,\n      match expression with\n      | Array ({ Array.comments; _ } as e) ->\n        Array { e with Array.comments = merge_comments_with_internal comments }\n      | ArrowFunction ({ Function.comments; _ } as e) ->\n        ArrowFunction { e with Function.comments = merge_comments comments }\n      | Assignment ({ Assignment.comments; _ } as e) ->\n        Assignment { e with Assignment.comments = merge_comments comments }\n      | Binary ({ Binary.comments; _ } as e) ->\n        Binary { e with Binary.comments = merge_comments comments }\n      | Call ({ Call.comments; _ } as e) -> Call { e with Call.comments = merge_comments comments }\n      | Class ({ Class.comments; _ } as e) ->\n        Class { e with Class.comments = merge_comments comments }\n      | Conditional ({ Conditional.comments; _ } as e) ->\n        Conditional { e with Conditional.comments = merge_comments comments }\n      | Function ({ Function.comments; _ } as e) ->\n        Function { e with Function.comments = merge_comments comments }\n      | Identifier (loc, ({ Identifier.comments; _ } as e)) ->\n        Identifier (loc, { e with Identifier.comments = merge_comments comments })\n      | Import ({ Import.comments; _ } as e) ->\n        Import { e with Import.comments = merge_comments comments }\n      | JSXElement ({ JSX.comments; _ } as e) ->\n        JSXElement { e with JSX.comments = merge_comments comments }\n      | JSXFragment ({ JSX.frag_comments; _ } as e) ->\n        JSXFragment { e with JSX.frag_comments = merge_comments frag_comments }\n      | Literal ({ Literal.comments; _ } as e) ->\n        Literal { e with Literal.comments = merge_comments comments }\n      | Logical ({ Logical.comments; _ } as e) ->\n        Logical { e with Logical.comments = merge_comments comments }\n      | Member ({ Member.comments; _ } as e) ->\n        Member { e with Member.comments = merge_comments comments }\n      | MetaProperty ({ MetaProperty.comments; _ } as e) ->\n        MetaProperty { e with MetaProperty.comments = merge_comments comments }\n      | New ({ New.comments; _ } as e) -> New { e with New.comments = merge_comments comments }\n      | Object ({ Object.comments; _ } as e) ->\n        Object { e with Object.comments = merge_comments_with_internal comments }\n      | OptionalCall ({ OptionalCall.call = { Call.comments; _ } as call; _ } as optional_call) ->\n        OptionalCall\n          {\n            optional_call with\n            OptionalCall.call = { call with Call.comments = merge_comments comments };\n          }\n      | OptionalMember\n          ({ OptionalMember.member = { Member.comments; _ } as member; _ } as optional_member) ->\n        OptionalMember\n          {\n            optional_member with\n            OptionalMember.member = { member with Member.comments = merge_comments comments };\n          }\n      | Sequence ({ Sequence.comments; _ } as e) ->\n        Sequence { e with Sequence.comments = merge_comments comments }\n      | Super { Super.comments; _ } -> Super { Super.comments = merge_comments comments }\n      | TaggedTemplate ({ TaggedTemplate.comments; _ } as e) ->\n        TaggedTemplate { e with TaggedTemplate.comments = merge_comments comments }\n      | TemplateLiteral ({ TemplateLiteral.comments; _ } as e) ->\n        TemplateLiteral { e with TemplateLiteral.comments = merge_comments comments }\n      | This { This.comments; _ } -> This { This.comments = merge_comments comments }\n      | TypeCast ({ TypeCast.comments; _ } as e) ->\n        TypeCast { e with TypeCast.comments = merge_comments comments }\n      | Unary ({ Unary.comments; _ } as e) ->\n        Unary { e with Unary.comments = merge_comments comments }\n      | Update ({ Update.comments; _ } as e) ->\n        Update { e with Update.comments = merge_comments comments }\n      | Yield ({ Yield.comments; _ } as e) ->\n        Yield { e with Yield.comments = merge_comments comments }\n      (* TODO: Delete once all expressions support comment attachment *)\n      | _ -> expression\n    )\n\n  and array_initializer =\n    let rec elements env (acc, errs) =\n      match Peek.token env with\n      | T_EOF\n      | T_RBRACKET ->\n        (List.rev acc, Pattern_cover.rev_errors errs)\n      | T_COMMA ->\n        let loc = Peek.loc env in\n        Eat.token env;\n        elements env (Expression.Array.Hole loc :: acc, errs)\n      | T_ELLIPSIS ->\n        let leading = Peek.comments env in\n        let (loc, (argument, new_errs)) =\n          with_loc\n            (fun env ->\n              Eat.token env;\n              match assignment_cover env with\n              | Cover_expr argument -> (argument, Pattern_cover.empty_errors)\n              | Cover_patt (argument, new_errs) -> (argument, new_errs))\n            env\n        in\n        let elem =\n          Expression.(\n            Array.Spread\n              ( loc,\n                SpreadElement.{ argument; comments = Flow_ast_utils.mk_comments_opt ~leading () }\n              )\n          )\n        in\n        let is_last = Peek.token env = T_RBRACKET in\n        (* if this array is interpreted as a pattern, the spread becomes an AssignmentRestElement\n           which must be the last element. We can easily error about additional elements since\n           they will be in the element list, but a trailing elision, like `[...x,]`, is not part\n           of the AST. so, keep track of the error so we can raise it if this is a pattern. *)\n        let new_errs =\n          if (not is_last) && Peek.ith_token ~i:1 env = T_RBRACKET then\n            let if_patt = (loc, Parse_error.ElementAfterRestElement) :: new_errs.if_patt in\n            { new_errs with if_patt }\n          else\n            new_errs\n        in\n        if not is_last then Expect.token env T_COMMA;\n        let acc = elem :: acc in\n        let errs = Pattern_cover.rev_append_errors new_errs errs in\n        elements env (acc, errs)\n      | _ ->\n        let (elem, new_errs) =\n          match assignment_cover env with\n          | Cover_expr elem -> (elem, Pattern_cover.empty_errors)\n          | Cover_patt (elem, new_errs) -> (elem, new_errs)\n        in\n        if Peek.token env <> T_RBRACKET then Expect.token env T_COMMA;\n        let acc = Expression.Array.Expression elem :: acc in\n        let errs = Pattern_cover.rev_append_errors new_errs errs in\n        elements env (acc, errs)\n    in\n    fun env ->\n      let leading = Peek.comments env in\n      Expect.token env T_LBRACKET;\n      let (elems, errs) = elements env ([], Pattern_cover.empty_errors) in\n      let internal = Peek.comments env in\n      Expect.token env T_RBRACKET;\n      let trailing = Eat.trailing_comments env in\n      ( {\n          Ast.Expression.Array.elements = elems;\n          comments = Flow_ast_utils.mk_comments_with_internal_opt ~leading ~trailing ~internal ();\n        },\n        errs\n      )\n\n  and regexp env =\n    Eat.push_lex_mode env Lex_mode.REGEXP;\n    let loc = Peek.loc env in\n    let leading = Peek.comments env in\n    let tkn = Peek.token env in\n    let (raw, pattern, raw_flags, trailing) =\n      match tkn with\n      | T_REGEXP (_, pattern, flags) ->\n        Eat.token env;\n        let trailing = Eat.trailing_comments env in\n        let raw = \"/\" ^ pattern ^ \"/\" ^ flags in\n        (raw, pattern, flags, trailing)\n      | _ ->\n        error_unexpected ~expected:\"a regular expression\" env;\n        (\"\", \"\", \"\", [])\n    in\n    Eat.pop_lex_mode env;\n    let filtered_flags = Buffer.create (String.length raw_flags) in\n    String.iter\n      (function\n        | ('d' | 'g' | 'i' | 'm' | 's' | 'u' | 'y') as c -> Buffer.add_char filtered_flags c\n        | _ -> ())\n      raw_flags;\n    let flags = Buffer.contents filtered_flags in\n    if flags <> raw_flags then error env (Parse_error.InvalidRegExpFlags raw_flags);\n    let value = Literal.(RegExp { RegExp.pattern; flags }) in\n    ( loc,\n      let open Expression in\n      Literal\n        { Literal.value; raw; comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing () }\n    )\n\n  and try_arrow_function =\n    (* Certain errors (almost all errors) cause a rollback *)\n    let error_callback _ =\n      Parse_error.(\n        function\n        (* Don't rollback on these errors. *)\n        | StrictParamName\n        | StrictReservedWord\n        | ParameterAfterRestParameter\n        | NewlineBeforeArrow\n        | YieldInFormalParameters\n        | ThisParamBannedInArrowFunctions ->\n          ()\n        (* Everything else causes a rollback *)\n        | _ -> raise Try.Rollback\n      )\n    in\n    let concise_function_body env =\n      match Peek.token env with\n      | T_LCURLY ->\n        let (body_block, contains_use_strict) = Parse.function_block_body env ~expression:true in\n        (Function.BodyBlock body_block, contains_use_strict)\n      | _ ->\n        let expr = Parse.assignment env in\n        (Function.BodyExpression expr, false)\n    in\n    fun env ->\n      let env = env |> with_error_callback error_callback in\n      let start_loc = Peek.loc env in\n      (* a T_ASYNC could either be a parameter name or it could be indicating\n       * that it's an async function *)\n      let (async, leading) =\n        if Peek.ith_token ~i:1 env <> T_ARROW then\n          Declaration.async env\n        else\n          (false, [])\n      in\n      let (sig_loc, (tparams, params, return, predicate)) =\n        with_loc\n          (fun env ->\n            let tparams = type_params_remove_trailing env (Type.type_params env) in\n            (* Disallow all fancy features for identifier => body *)\n            if Peek.is_identifier env && tparams = None then\n              let ((loc, _) as name) =\n                Parse.identifier ~restricted_error:Parse_error.StrictParamName env\n              in\n              let param =\n                ( loc,\n                  {\n                    Ast.Function.Param.argument =\n                      ( loc,\n                        Pattern.Identifier\n                          {\n                            Pattern.Identifier.name;\n                            annot = Ast.Type.Missing (Peek.loc_skip_lookahead env);\n                            optional = false;\n                          }\n                      );\n                    default = None;\n                  }\n                )\n              in\n              ( tparams,\n                ( loc,\n                  {\n                    Ast.Function.Params.params = [param];\n                    rest = None;\n                    comments = None;\n                    this_ = None;\n                  }\n                ),\n                Ast.Type.Missing Loc.{ loc with start = loc._end },\n                None\n              )\n            else\n              let params =\n                let yield = allow_yield env in\n                let await = allow_await env in\n                Declaration.function_params ~await ~yield env\n              in\n              (* There's an ambiguity if you use a function type as the return\n               * type for an arrow function. So we disallow anonymous function\n               * types in arrow function return types unless the function type is\n               * enclosed in parens *)\n              let (return, predicate) =\n                env |> with_no_anon_function_type true |> Type.annotation_and_predicate_opt\n              in\n              (tparams, params, return, predicate))\n          env\n      in\n      (* It's hard to tell if an invalid expression was intended to be an\n       * arrow function before we see the =>. If there are no params, that\n       * implies \"()\" which is only ever found in arrow params. Similarly,\n       * rest params indicate arrow functions. Therefore, if we see a rest\n       * param or an empty param list then we can disable the rollback and\n       * instead generate errors as if we were parsing an arrow function *)\n      let env =\n        match params with\n        | (_, { Ast.Function.Params.params = _; rest = Some _; this_ = None; comments = _ })\n        | (_, { Ast.Function.Params.params = []; rest = _; this_ = None; comments = _ }) ->\n          without_error_callback env\n        | _ -> env\n      in\n\n      (* Disallow this param annotations in arrow functions *)\n      let params =\n        match params with\n        | (loc, ({ Ast.Function.Params.this_ = Some (this_loc, _); _ } as params)) ->\n          error_at env (this_loc, Parse_error.ThisParamBannedInArrowFunctions);\n          (loc, { params with Ast.Function.Params.this_ = None })\n        | _ -> params\n      in\n      let simple_params = is_simple_parameter_list params in\n\n      if Peek.is_line_terminator env && Peek.token env = T_ARROW then\n        error env Parse_error.NewlineBeforeArrow;\n      Expect.token env T_ARROW;\n\n      (* Now we know for sure this is an arrow function *)\n      let env = without_error_callback env in\n      (* arrow functions can't be generators *)\n      let env = enter_function env ~async ~generator:false ~simple_params in\n      let (end_loc, (body, contains_use_strict)) = with_loc concise_function_body env in\n      Declaration.strict_post_check env ~contains_use_strict None params;\n      let loc = Loc.btwn start_loc end_loc in\n      Cover_expr\n        ( loc,\n          let open Expression in\n          ArrowFunction\n            {\n              Function.id = None;\n              params;\n              body;\n              async;\n              generator = false;\n              (* arrow functions cannot be generators *)\n              predicate;\n              return;\n              tparams;\n              sig_loc;\n              comments = Flow_ast_utils.mk_comments_opt ~leading ();\n            }\n        )\n\n  and sequence =\n    let rec helper acc env =\n      match Peek.token env with\n      | T_COMMA ->\n        Eat.token env;\n        let expr = assignment env in\n        helper (expr :: acc) env\n      | _ ->\n        let expressions = List.rev acc in\n        Expression.(Sequence Sequence.{ expressions; comments = None })\n    in\n    (fun env ~start_loc acc -> with_loc ~start_loc (helper acc) env)\nend\n"
  },
  {
    "path": "analysis/vendor/js_parser/file_key.ml",
    "content": "(*\n * Copyright (c) Meta Platforms, Inc. and affiliates.\n *\n * This source code is licensed under the MIT license found in the\n * LICENSE file in the root directory of this source tree.\n *)\nopen Primitive_deriving\n\ntype t =\n  | LibFile of string\n  | SourceFile of string\n  | JsonFile of string\n  (* A resource that might get required, like .css, .jpg, etc. We don't parse\n     these, just check that they exist *)\n  | ResourceFile of string\n[@@deriving_inline equal]\nlet _ = fun (_ : t) -> ()\nlet equal =\n  (fun a__001_ ->\n     fun b__002_ ->\n       if Ppx_compare_lib.phys_equal a__001_ b__002_\n       then true\n       else\n         (match (a__001_, b__002_) with\n          | (LibFile _a__003_, LibFile _b__004_) ->\n              equal_string _a__003_ _b__004_\n          | (LibFile _, _) -> false\n          | (_, LibFile _) -> false\n          | (SourceFile _a__005_, SourceFile _b__006_) ->\n              equal_string _a__005_ _b__006_\n          | (SourceFile _, _) -> false\n          | (_, SourceFile _) -> false\n          | (JsonFile _a__007_, JsonFile _b__008_) ->\n              equal_string _a__007_ _b__008_\n          | (JsonFile _, _) -> false\n          | (_, JsonFile _) -> false\n          | (ResourceFile _a__009_, ResourceFile _b__010_) ->\n              equal_string _a__009_ _b__010_) : t -> t -> bool)\nlet _ = equal\n[@@@end]\nlet to_string = function\n  | LibFile x\n  | SourceFile x\n  | JsonFile x\n  | ResourceFile x ->\n    x\n\nlet to_path = function\n  | LibFile x\n  | SourceFile x\n  | JsonFile x\n  | ResourceFile x ->\n    Ok x\n\nlet compare =\n  (* libs, then source and json files at the same priority since JSON files are\n   * basically source files. We don't actually read resource files so they come\n   * last *)\n  let order_of_filename = function\n    | LibFile _ -> 1\n    | SourceFile _ -> 2\n    | JsonFile _ -> 2\n    | ResourceFile _ -> 3\n  in\n  fun a b ->\n    let k = order_of_filename a - order_of_filename b in\n    if k <> 0 then\n      k\n    else\n      String.compare (to_string a) (to_string b)\n\nlet compare_opt a b =\n  match (a, b) with\n  | (Some _, None) -> -1\n  | (None, Some _) -> 1\n  | (None, None) -> 0\n  | (Some a, Some b) -> compare a b\n\nlet is_lib_file = function\n  | LibFile _ -> true\n  | SourceFile _ -> false\n  | JsonFile _ -> false\n  | ResourceFile _ -> false\n\nlet map f = function\n  | LibFile filename -> LibFile (f filename)\n  | SourceFile filename -> SourceFile (f filename)\n  | JsonFile filename -> JsonFile (f filename)\n  | ResourceFile filename -> ResourceFile (f filename)\n\nlet exists f = function\n  | LibFile filename\n  | SourceFile filename\n  | JsonFile filename\n  | ResourceFile filename ->\n    f filename\n\nlet check_suffix filename suffix = exists (fun fn -> Filename.check_suffix fn suffix) filename\nlet chop_suffix filename suffix = map (fun fn -> Filename.chop_suffix fn suffix) filename\nlet with_suffix filename suffix = map (fun fn -> fn ^ suffix) filename\n"
  },
  {
    "path": "analysis/vendor/js_parser/flow_LICENSE",
    "content": "MIT License\n\nCopyright (c) 2013-present, Facebook, Inc.\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE.\n"
  },
  {
    "path": "analysis/vendor/js_parser/flow_ast.ml",
    "content": "(*\n * Copyright (c) Meta Platforms, Inc. and affiliates.\n *\n * This source code is licensed under the MIT license found in the\n * LICENSE file in the root directory of this source tree.\n *)\n\nmodule rec Syntax : sig\n  type ('M, 'internal) t = {\n    leading: 'M Comment.t list;\n    trailing: 'M Comment.t list;\n    internal: 'internal;\n  }\nend =\n  Syntax\n\nand Identifier : sig\n  type ('M, 'T) t = 'T * 'M t'\n\n  and 'M t' = {\n    name: string;\n    comments: ('M, unit) Syntax.t option;\n  }\nend =\n  Identifier\n\nand PrivateName : sig\n  type 'M t = 'M * 'M t'\n\n  and 'M t' = {\n    name: string;\n    comments: ('M, unit) Syntax.t option;\n  }\nend =\n  PrivateName\n\nand Literal : sig\n  module RegExp : sig\n    type t = {\n      pattern: string;\n      flags: string;\n    }\n  end\n\n  (* Literals also carry along their raw value *)\n  type 'M t = {\n    value: value;\n    raw: string;\n    comments: ('M, unit) Syntax.t option;\n  }\n\n  and value =\n    | String of string\n    | Boolean of bool\n    | Null\n    | Number of float\n    | BigInt of int64 option\n    | RegExp of RegExp.t\nend =\n  Literal\n\nand StringLiteral : sig\n  type 'M t = {\n    value: string;\n    raw: string;\n    comments: ('M, unit) Syntax.t option;\n  }\nend =\n  StringLiteral\n\nand NumberLiteral : sig\n  type 'M t = {\n    value: float;\n    raw: string;\n    comments: ('M, unit) Syntax.t option;\n  }\nend =\n  NumberLiteral\n\nand BigIntLiteral : sig\n  type 'M t = {\n    (* This will be None if we couldn't parse `raw`. That could be if the number is out of range or invalid (like a float) *)\n    value: int64 option;\n    raw: string;\n    comments: ('M, unit) Syntax.t option;\n  }\nend =\n  BigIntLiteral\n\nand BooleanLiteral : sig\n  type 'M t = {\n    value: bool;\n    comments: ('M, unit) Syntax.t option;\n  }\nend =\n  BooleanLiteral\n\nand Variance : sig\n  type 'M t = 'M * 'M t'\n\n  and kind =\n    | Plus\n    | Minus\n\n  and 'M t' = {\n    kind: kind;\n    comments: ('M, unit) Syntax.t option;\n  }\nend =\n  Variance\n\nand ComputedKey : sig\n  type ('M, 'T) t = 'M * ('M, 'T) ComputedKey.t'\n\n  and ('M, 'T) t' = {\n    expression: ('M, 'T) Expression.t;\n    comments: ('M, unit) Syntax.t option;\n  }\nend =\n  ComputedKey\n\nand Type : sig\n  module Function : sig\n    module Param : sig\n      type ('M, 'T) t = 'M * ('M, 'T) t'\n\n      and ('M, 'T) t' = {\n        name: ('M, 'T) Identifier.t option;\n        annot: ('M, 'T) Type.t;\n        optional: bool;\n      }\n    end\n\n    module RestParam : sig\n      type ('M, 'T) t = 'M * ('M, 'T) t'\n\n      and ('M, 'T) t' = {\n        argument: ('M, 'T) Param.t;\n        comments: ('M, unit) Syntax.t option;\n      }\n    end\n\n    module ThisParam : sig\n      type ('M, 'T) t = 'M * ('M, 'T) t'\n\n      and ('M, 'T) t' = {\n        annot: ('M, 'T) Type.annotation;\n        comments: ('M, unit) Syntax.t option;\n      }\n    end\n\n    module Params : sig\n      type ('M, 'T) t = 'M * ('M, 'T) t'\n\n      and ('M, 'T) t' = {\n        this_: ('M, 'T) ThisParam.t option;\n        params: ('M, 'T) Param.t list;\n        rest: ('M, 'T) RestParam.t option;\n        comments: ('M, 'M Comment.t list) Syntax.t option;\n      }\n    end\n\n    type ('M, 'T) t = {\n      tparams: ('M, 'T) Type.TypeParams.t option;\n      params: ('M, 'T) Params.t;\n      return: ('M, 'T) Type.t;\n      comments: ('M, unit) Syntax.t option;\n    }\n  end\n\n  module Generic : sig\n    module Identifier : sig\n      type ('M, 'T) t =\n        | Unqualified of ('M, 'T) Identifier.t\n        | Qualified of ('M, 'T) qualified\n\n      and ('M, 'T) qualified = 'M * ('M, 'T) qualified'\n\n      and ('M, 'T) qualified' = {\n        qualification: ('M, 'T) t;\n        id: ('M, 'T) Identifier.t;\n      }\n    end\n\n    type ('M, 'T) t = {\n      id: ('M, 'T) Identifier.t;\n      targs: ('M, 'T) Type.TypeArgs.t option;\n      comments: ('M, unit) Syntax.t option;\n    }\n  end\n\n  module IndexedAccess : sig\n    type ('M, 'T) t = {\n      _object: ('M, 'T) Type.t;\n      index: ('M, 'T) Type.t;\n      comments: ('M, unit) Syntax.t option;\n    }\n  end\n\n  module OptionalIndexedAccess : sig\n    type ('M, 'T) t = {\n      indexed_access: ('M, 'T) IndexedAccess.t;\n      optional: bool;\n    }\n  end\n\n  module Object : sig\n    module Property : sig\n      type ('M, 'T) t = 'M * ('M, 'T) t'\n\n      and ('M, 'T) t' = {\n        key: ('M, 'T) Expression.Object.Property.key;\n        value: ('M, 'T) value;\n        optional: bool;\n        static: bool;\n        proto: bool;\n        _method: bool;\n        variance: 'M Variance.t option;\n        comments: ('M, unit) Syntax.t option;\n      }\n\n      and ('M, 'T) value =\n        | Init of ('M, 'T) Type.t\n        | Get of ('M * ('M, 'T) Function.t)\n        | Set of ('M * ('M, 'T) Function.t)\n    end\n\n    module SpreadProperty : sig\n      type ('M, 'T) t = 'M * ('M, 'T) t'\n\n      and ('M, 'T) t' = {\n        argument: ('M, 'T) Type.t;\n        comments: ('M, unit) Syntax.t option;\n      }\n    end\n\n    module Indexer : sig\n      type ('M, 'T) t' = {\n        id: ('M, 'M) Identifier.t option;\n        key: ('M, 'T) Type.t;\n        value: ('M, 'T) Type.t;\n        static: bool;\n        variance: 'M Variance.t option;\n        comments: ('M, unit) Syntax.t option;\n      }\n\n      and ('M, 'T) t = 'M * ('M, 'T) t'\n    end\n\n    module CallProperty : sig\n      type ('M, 'T) t = 'M * ('M, 'T) t'\n\n      and ('M, 'T) t' = {\n        value: 'M * ('M, 'T) Function.t;\n        static: bool;\n        comments: ('M, unit) Syntax.t option;\n      }\n    end\n\n    module InternalSlot : sig\n      type ('M, 'T) t = 'M * ('M, 'T) t'\n\n      and ('M, 'T) t' = {\n        id: ('M, 'M) Identifier.t;\n        value: ('M, 'T) Type.t;\n        optional: bool;\n        static: bool;\n        _method: bool;\n        comments: ('M, unit) Syntax.t option;\n      }\n    end\n\n    type ('M, 'T) t = {\n      exact: bool;\n      (* Inexact indicates the presence of ... in the object. It is more\n       * easily understood if exact is read as \"explicitly exact\" and \"inexact\"\n       * is read as \"explicitly inexact\".\n       *\n       * This confusion will go away when we get rid of the exact flag in favor\n       * of inexact as part of the work to make object types exact by default.\n       * *)\n      inexact: bool;\n      properties: ('M, 'T) property list;\n      comments: ('M, 'M Comment.t list) Syntax.t option;\n    }\n\n    and ('M, 'T) property =\n      | Property of ('M, 'T) Property.t\n      | SpreadProperty of ('M, 'T) SpreadProperty.t\n      | Indexer of ('M, 'T) Indexer.t\n      | CallProperty of ('M, 'T) CallProperty.t\n      | InternalSlot of ('M, 'T) InternalSlot.t\n  end\n\n  module Interface : sig\n    type ('M, 'T) t = {\n      body: 'M * ('M, 'T) Object.t;\n      extends: ('M * ('M, 'T) Generic.t) list;\n      comments: ('M, unit) Syntax.t option;\n    }\n  end\n\n  module Nullable : sig\n    type ('M, 'T) t = {\n      argument: ('M, 'T) Type.t;\n      comments: ('M, unit) Syntax.t option;\n    }\n  end\n\n  module Typeof : sig\n    module Target : sig\n      type ('M, 'T) t =\n        | Unqualified of ('M, 'T) Identifier.t\n        | Qualified of ('M, 'T) qualified\n\n      and ('M, 'T) qualified' = {\n        qualification: ('M, 'T) t;\n        id: ('M, 'T) Identifier.t;\n      }\n\n      and ('M, 'T) qualified = 'T * ('M, 'T) qualified'\n    end\n\n    type ('M, 'T) t = {\n      argument: ('M, 'T) Target.t;\n      comments: ('M, unit) Syntax.t option;\n    }\n  end\n\n  module Tuple : sig\n    type ('M, 'T) t = {\n      types: ('M, 'T) Type.t list;\n      comments: ('M, unit) Syntax.t option;\n    }\n  end\n\n  module Array : sig\n    type ('M, 'T) t = {\n      argument: ('M, 'T) Type.t;\n      comments: ('M, unit) Syntax.t option;\n    }\n  end\n\n  module Union : sig\n    type ('M, 'T) t = {\n      types: ('M, 'T) Type.t * ('M, 'T) Type.t * ('M, 'T) Type.t list;\n      comments: ('M, unit) Syntax.t option;\n    }\n  end\n\n  module Intersection : sig\n    type ('M, 'T) t = {\n      types: ('M, 'T) Type.t * ('M, 'T) Type.t * ('M, 'T) Type.t list;\n      comments: ('M, unit) Syntax.t option;\n    }\n  end\n\n  type ('M, 'T) t = 'T * ('M, 'T) t'\n\n  (* Yes, we could add a little complexity here to show that Any and Void\n   * should never be declared nullable, but that check can happen later *)\n  and ('M, 'T) t' =\n    | Any of ('M, unit) Syntax.t option\n    | Mixed of ('M, unit) Syntax.t option\n    | Empty of ('M, unit) Syntax.t option\n    | Void of ('M, unit) Syntax.t option\n    | Null of ('M, unit) Syntax.t option\n    | Number of ('M, unit) Syntax.t option\n    | BigInt of ('M, unit) Syntax.t option\n    | String of ('M, unit) Syntax.t option\n    | Boolean of ('M, unit) Syntax.t option\n    | Symbol of ('M, unit) Syntax.t option\n    | Exists of ('M, unit) Syntax.t option\n    | Nullable of ('M, 'T) Nullable.t\n    | Function of ('M, 'T) Function.t\n    | Object of ('M, 'T) Object.t\n    | Interface of ('M, 'T) Interface.t\n    | Array of ('M, 'T) Array.t\n    | Generic of ('M, 'T) Generic.t\n    | IndexedAccess of ('M, 'T) IndexedAccess.t\n    | OptionalIndexedAccess of ('M, 'T) OptionalIndexedAccess.t\n    | Union of ('M, 'T) Union.t\n    | Intersection of ('M, 'T) Intersection.t\n    | Typeof of ('M, 'T) Typeof.t\n    | Tuple of ('M, 'T) Tuple.t\n    | StringLiteral of 'M StringLiteral.t\n    | NumberLiteral of 'M NumberLiteral.t\n    | BigIntLiteral of 'M BigIntLiteral.t\n    | BooleanLiteral of 'M BooleanLiteral.t\n\n  (* Type.annotation is a concrete syntax node with a location that starts at\n   * the colon and ends after the type. For example, \"var a: number\", the\n   * identifier a would have a property annot which contains a\n   * Type.annotation with a location from column 6-14 *)\n  and ('M, 'T) annotation = 'M * ('M, 'T) t\n\n  and ('M, 'T) annotation_or_hint =\n    | Missing of 'T\n    | Available of ('M, 'T) Type.annotation\n\n  module TypeParam : sig\n    type ('M, 'T) t = 'M * ('M, 'T) t'\n\n    and ('M, 'T) t' = {\n      name: ('M, 'M) Identifier.t;\n      bound: ('M, 'T) Type.annotation_or_hint;\n      variance: 'M Variance.t option;\n      default: ('M, 'T) Type.t option;\n    }\n  end\n\n  module TypeParams : sig\n    type ('M, 'T) t = 'M * ('M, 'T) t'\n\n    and ('M, 'T) t' = {\n      params: ('M, 'T) TypeParam.t list;\n      comments: ('M, 'M Comment.t list) Syntax.t option;\n    }\n  end\n\n  module TypeArgs : sig\n    type ('M, 'T) t = 'M * ('M, 'T) t'\n\n    and ('M, 'T) t' = {\n      arguments: ('M, 'T) Type.t list;\n      comments: ('M, 'M Comment.t list) Syntax.t option;\n    }\n  end\n\n  module Predicate : sig\n    type ('M, 'T) t = 'M * ('M, 'T) t'\n\n    and ('M, 'T) t' = {\n      kind: ('M, 'T) kind;\n      comments: ('M, unit) Syntax.t option;\n    }\n\n    and ('M, 'T) kind =\n      | Declared of ('M, 'T) Expression.t\n      | Inferred\n  end\nend =\n  Type\n\nand Statement : sig\n  module Block : sig\n    type ('M, 'T) t = {\n      body: ('M, 'T) Statement.t list;\n      comments: ('M, 'M Comment.t list) Syntax.t option;\n    }\n  end\n\n  module If : sig\n    module Alternate : sig\n      type ('M, 'T) t = 'M * ('M, 'T) t'\n\n      and ('M, 'T) t' = {\n        body: ('M, 'T) Statement.t;\n        comments: ('M, unit) Syntax.t option;\n      }\n    end\n\n    type ('M, 'T) t = {\n      test: ('M, 'T) Expression.t;\n      consequent: ('M, 'T) Statement.t;\n      alternate: ('M, 'T) Alternate.t option;\n      comments: ('M, unit) Syntax.t option;\n    }\n  end\n\n  module Labeled : sig\n    type ('M, 'T) t = {\n      label: ('M, 'M) Identifier.t;\n      body: ('M, 'T) Statement.t;\n      comments: ('M, unit) Syntax.t option;\n    }\n  end\n\n  module Break : sig\n    type 'M t = {\n      label: ('M, 'M) Identifier.t option;\n      comments: ('M, unit) Syntax.t option;\n    }\n  end\n\n  module Continue : sig\n    type 'M t = {\n      label: ('M, 'M) Identifier.t option;\n      comments: ('M, unit) Syntax.t option;\n    }\n  end\n\n  module Debugger : sig\n    type 'M t = { comments: ('M, unit) Syntax.t option }\n  end\n\n  module With : sig\n    type ('M, 'T) t = {\n      _object: ('M, 'T) Expression.t;\n      body: ('M, 'T) Statement.t;\n      comments: ('M, unit) Syntax.t option;\n    }\n  end\n\n  module TypeAlias : sig\n    type ('M, 'T) t = {\n      id: ('M, 'T) Identifier.t;\n      tparams: ('M, 'T) Type.TypeParams.t option;\n      right: ('M, 'T) Type.t;\n      comments: ('M, unit) Syntax.t option;\n    }\n  end\n\n  module OpaqueType : sig\n    type ('M, 'T) t = {\n      id: ('M, 'T) Identifier.t;\n      tparams: ('M, 'T) Type.TypeParams.t option;\n      impltype: ('M, 'T) Type.t option;\n      supertype: ('M, 'T) Type.t option;\n      comments: ('M, unit) Syntax.t option;\n    }\n  end\n\n  module Switch : sig\n    module Case : sig\n      type ('M, 'T) t = 'M * ('M, 'T) t'\n\n      and ('M, 'T) t' = {\n        test: ('M, 'T) Expression.t option;\n        consequent: ('M, 'T) Statement.t list;\n        comments: ('M, unit) Syntax.t option;\n      }\n    end\n\n    type ('M, 'T) t = {\n      discriminant: ('M, 'T) Expression.t;\n      cases: ('M, 'T) Case.t list;\n      comments: ('M, unit) Syntax.t option;\n      exhaustive_out: 'T;\n    }\n  end\n\n  module Return : sig\n    type ('M, 'T) t = {\n      argument: ('M, 'T) Expression.t option;\n      comments: ('M, unit) Syntax.t option;\n      return_out: 'T;\n    }\n  end\n\n  module Throw : sig\n    type ('M, 'T) t = {\n      argument: ('M, 'T) Expression.t;\n      comments: ('M, unit) Syntax.t option;\n    }\n  end\n\n  module Try : sig\n    module CatchClause : sig\n      type ('M, 'T) t = 'M * ('M, 'T) t'\n\n      and ('M, 'T) t' = {\n        param: ('M, 'T) Pattern.t option;\n        body: 'M * ('M, 'T) Block.t;\n        comments: ('M, unit) Syntax.t option;\n      }\n    end\n\n    type ('M, 'T) t = {\n      block: 'M * ('M, 'T) Block.t;\n      handler: ('M, 'T) CatchClause.t option;\n      finalizer: ('M * ('M, 'T) Block.t) option;\n      comments: ('M, unit) Syntax.t option;\n    }\n  end\n\n  module VariableDeclaration : sig\n    module Declarator : sig\n      type ('M, 'T) t = 'M * ('M, 'T) t'\n\n      and ('M, 'T) t' = {\n        id: ('M, 'T) Pattern.t;\n        init: ('M, 'T) Expression.t option;\n      }\n    end\n\n    type ('M, 'T) t = {\n      declarations: ('M, 'T) Declarator.t list;\n      kind: kind;\n      comments: ('M, unit) Syntax.t option;\n    }\n\n    and kind =\n      | Var\n      | Let\n      | Const\n  end\n\n  module While : sig\n    type ('M, 'T) t = {\n      test: ('M, 'T) Expression.t;\n      body: ('M, 'T) Statement.t;\n      comments: ('M, unit) Syntax.t option;\n    }\n  end\n\n  module DoWhile : sig\n    type ('M, 'T) t = {\n      body: ('M, 'T) Statement.t;\n      test: ('M, 'T) Expression.t;\n      comments: ('M, unit) Syntax.t option;\n    }\n  end\n\n  module For : sig\n    type ('M, 'T) t = {\n      init: ('M, 'T) init option;\n      test: ('M, 'T) Expression.t option;\n      update: ('M, 'T) Expression.t option;\n      body: ('M, 'T) Statement.t;\n      comments: ('M, unit) Syntax.t option;\n    }\n\n    and ('M, 'T) init =\n      | InitDeclaration of ('M * ('M, 'T) VariableDeclaration.t)\n      | InitExpression of ('M, 'T) Expression.t\n  end\n\n  module ForIn : sig\n    type ('M, 'T) t = {\n      left: ('M, 'T) left;\n      right: ('M, 'T) Expression.t;\n      body: ('M, 'T) Statement.t;\n      each: bool;\n      comments: ('M, unit) Syntax.t option;\n    }\n\n    and ('M, 'T) left =\n      | LeftDeclaration of ('M * ('M, 'T) VariableDeclaration.t)\n      | LeftPattern of ('M, 'T) Pattern.t\n  end\n\n  module ForOf : sig\n    type ('M, 'T) t = {\n      left: ('M, 'T) left;\n      right: ('M, 'T) Expression.t;\n      body: ('M, 'T) Statement.t;\n      await: bool;\n      comments: ('M, unit) Syntax.t option;\n    }\n\n    and ('M, 'T) left =\n      | LeftDeclaration of ('M * ('M, 'T) VariableDeclaration.t)\n      | LeftPattern of ('M, 'T) Pattern.t\n  end\n\n  module EnumDeclaration : sig\n    module DefaultedMember : sig\n      type 'M t = 'M * 'M t'\n      and 'M t' = { id: ('M, 'M) Identifier.t }\n    end\n\n    module InitializedMember : sig\n      type ('I, 'M) t = 'M * ('I, 'M) t'\n\n      and ('I, 'M) t' = {\n        id: ('M, 'M) Identifier.t;\n        init: 'M * 'I;\n      }\n    end\n\n    module BooleanBody : sig\n      type 'M t = {\n        members: ('M BooleanLiteral.t, 'M) InitializedMember.t list;\n        explicit_type: bool;\n        has_unknown_members: bool;\n        comments: ('M, 'M Comment.t list) Syntax.t option;\n      }\n    end\n\n    module NumberBody : sig\n      type 'M t = {\n        members: ('M NumberLiteral.t, 'M) InitializedMember.t list;\n        explicit_type: bool;\n        has_unknown_members: bool;\n        comments: ('M, 'M Comment.t list) Syntax.t option;\n      }\n    end\n\n    module StringBody : sig\n      type 'M t = {\n        members: ('M StringLiteral.t, 'M) members;\n        explicit_type: bool;\n        has_unknown_members: bool;\n        comments: ('M, 'M Comment.t list) Syntax.t option;\n      }\n\n      and ('I, 'M) members =\n        | Defaulted of 'M DefaultedMember.t list\n        | Initialized of ('I, 'M) InitializedMember.t list\n    end\n\n    module SymbolBody : sig\n      type 'M t = {\n        members: 'M DefaultedMember.t list;\n        has_unknown_members: bool;\n        comments: ('M, 'M Comment.t list) Syntax.t option;\n      }\n    end\n\n    type ('M, 'T) t = {\n      id: ('M, 'T) Identifier.t;\n      body: 'M body;\n      comments: ('M, unit) Syntax.t option;\n    }\n\n    and 'M body = 'M * 'M body'\n\n    and 'M body' =\n      | BooleanBody of 'M BooleanBody.t\n      | NumberBody of 'M NumberBody.t\n      | StringBody of 'M StringBody.t\n      | SymbolBody of 'M SymbolBody.t\n  end\n\n  module Interface : sig\n    type ('M, 'T) t = {\n      id: ('M, 'T) Identifier.t;\n      tparams: ('M, 'T) Type.TypeParams.t option;\n      extends: ('M * ('M, 'T) Type.Generic.t) list;\n      body: 'M * ('M, 'T) Type.Object.t;\n      comments: ('M, unit) Syntax.t option;\n    }\n  end\n\n  module DeclareClass : sig\n    type ('M, 'T) t = {\n      id: ('M, 'T) Identifier.t;\n      tparams: ('M, 'T) Type.TypeParams.t option;\n      body: 'M * ('M, 'T) Type.Object.t;\n      extends: ('M * ('M, 'T) Type.Generic.t) option;\n      mixins: ('M * ('M, 'T) Type.Generic.t) list;\n      implements: ('M, 'T) Class.Implements.t option;\n      comments: ('M, unit) Syntax.t option;\n    }\n  end\n\n  module DeclareVariable : sig\n    type ('M, 'T) t = {\n      id: ('M, 'T) Identifier.t;\n      annot: ('M, 'T) Type.annotation;\n      comments: ('M, unit) Syntax.t option;\n    }\n  end\n\n  module DeclareFunction : sig\n    type ('M, 'T) t = {\n      id: ('M, 'T) Identifier.t;\n      annot: ('M, 'T) Type.annotation;\n      predicate: ('M, 'T) Type.Predicate.t option;\n      comments: ('M, unit) Syntax.t option;\n    }\n  end\n\n  module DeclareModule : sig\n    type ('M, 'T) id =\n      | Identifier of ('M, 'T) Identifier.t\n      | Literal of ('T * 'M StringLiteral.t)\n\n    and module_kind =\n      | CommonJS\n      | ES\n\n    and ('M, 'T) t = {\n      id: ('M, 'T) id;\n      body: 'M * ('M, 'T) Block.t;\n      kind: module_kind;\n      comments: ('M, unit) Syntax.t option;\n    }\n  end\n\n  module DeclareModuleExports : sig\n    type ('M, 'T) t = {\n      annot: ('M, 'T) Type.annotation;\n      comments: ('M, unit) Syntax.t option;\n    }\n  end\n\n  module ExportNamedDeclaration : sig\n    module ExportSpecifier : sig\n      type 'M t = 'M * 'M t'\n\n      and 'M t' = {\n        local: ('M, 'M) Identifier.t;\n        exported: ('M, 'M) Identifier.t option;\n      }\n    end\n\n    module ExportBatchSpecifier : sig\n      type 'M t = 'M * ('M, 'M) Identifier.t option\n    end\n\n    type ('M, 'T) t = {\n      declaration: ('M, 'T) Statement.t option;\n      specifiers: 'M specifier option;\n      source: ('M * 'M StringLiteral.t) option;\n      export_kind: Statement.export_kind;\n      comments: ('M, unit) Syntax.t option;\n    }\n\n    and 'M specifier =\n      | ExportSpecifiers of 'M ExportSpecifier.t list\n      | ExportBatchSpecifier of 'M ExportBatchSpecifier.t\n  end\n\n  module ExportDefaultDeclaration : sig\n    type ('M, 'T) t = {\n      default: 'M;\n      declaration: ('M, 'T) declaration;\n      comments: ('M, unit) Syntax.t option;\n    }\n\n    and ('M, 'T) declaration =\n      | Declaration of ('M, 'T) Statement.t\n      | Expression of ('M, 'T) Expression.t\n  end\n\n  module DeclareExportDeclaration : sig\n    type ('M, 'T) declaration =\n      (* declare export var *)\n      | Variable of ('M * ('M, 'T) DeclareVariable.t)\n      (* declare export function *)\n      | Function of ('M * ('M, 'T) DeclareFunction.t)\n      (* declare export class *)\n      | Class of ('M * ('M, 'T) DeclareClass.t)\n      (* declare export default [type]\n       * this corresponds to things like\n       * export default 1+1; *)\n      | DefaultType of ('M, 'T) Type.t\n      (* declare export type *)\n      | NamedType of ('M * ('M, 'T) TypeAlias.t)\n      (* declare export opaque type *)\n      | NamedOpaqueType of ('M * ('M, 'T) OpaqueType.t)\n      (* declare export interface *)\n      | Interface of ('M * ('M, 'T) Interface.t)\n\n    and ('M, 'T) t = {\n      default: 'M option;\n      declaration: ('M, 'T) declaration option;\n      specifiers: 'M ExportNamedDeclaration.specifier option;\n      source: ('M * 'M StringLiteral.t) option;\n      comments: ('M, unit) Syntax.t option;\n    }\n  end\n\n  module ImportDeclaration : sig\n    type import_kind =\n      | ImportType\n      | ImportTypeof\n      | ImportValue\n\n    and ('M, 'T) specifier =\n      | ImportNamedSpecifiers of ('M, 'T) named_specifier list\n      | ImportNamespaceSpecifier of ('M * ('M, 'T) Identifier.t)\n\n    and ('M, 'T) named_specifier = {\n      kind: import_kind option;\n      local: ('M, 'T) Identifier.t option;\n      remote: ('M, 'T) Identifier.t;\n    }\n\n    and ('M, 'T) t = {\n      import_kind: import_kind;\n      source: 'T * 'M StringLiteral.t;\n      default: ('M, 'T) Identifier.t option;\n      specifiers: ('M, 'T) specifier option;\n      comments: ('M, unit) Syntax.t option;\n    }\n  end\n\n  module Expression : sig\n    type ('M, 'T) t = {\n      expression: ('M, 'T) Expression.t;\n      directive: string option;\n      comments: ('M, unit) Syntax.t option;\n    }\n  end\n\n  module Empty : sig\n    type 'M t = { comments: ('M, unit) Syntax.t option }\n  end\n\n  type export_kind =\n    | ExportType\n    | ExportValue\n\n  and ('M, 'T) t = 'M * ('M, 'T) t'\n\n  and ('M, 'T) t' =\n    | Block of ('M, 'T) Block.t\n    | Break of 'M Break.t\n    | ClassDeclaration of ('M, 'T) Class.t\n    | Continue of 'M Continue.t\n    | Debugger of 'M Debugger.t\n    | DeclareClass of ('M, 'T) DeclareClass.t\n    | DeclareExportDeclaration of ('M, 'T) DeclareExportDeclaration.t\n    | DeclareFunction of ('M, 'T) DeclareFunction.t\n    | DeclareInterface of ('M, 'T) Interface.t\n    | DeclareModule of ('M, 'T) DeclareModule.t\n    | DeclareModuleExports of ('M, 'T) DeclareModuleExports.t\n    | DeclareTypeAlias of ('M, 'T) TypeAlias.t\n    | DeclareOpaqueType of ('M, 'T) OpaqueType.t\n    | DeclareVariable of ('M, 'T) DeclareVariable.t\n    | DoWhile of ('M, 'T) DoWhile.t\n    | Empty of 'M Empty.t\n    | EnumDeclaration of ('M, 'T) EnumDeclaration.t\n    | ExportDefaultDeclaration of ('M, 'T) ExportDefaultDeclaration.t\n    | ExportNamedDeclaration of ('M, 'T) ExportNamedDeclaration.t\n    | Expression of ('M, 'T) Expression.t\n    | For of ('M, 'T) For.t\n    | ForIn of ('M, 'T) ForIn.t\n    | ForOf of ('M, 'T) ForOf.t\n    | FunctionDeclaration of ('M, 'T) Function.t\n    | If of ('M, 'T) If.t\n    | ImportDeclaration of ('M, 'T) ImportDeclaration.t\n    | InterfaceDeclaration of ('M, 'T) Interface.t\n    | Labeled of ('M, 'T) Labeled.t\n    | Return of ('M, 'T) Return.t\n    | Switch of ('M, 'T) Switch.t\n    | Throw of ('M, 'T) Throw.t\n    | Try of ('M, 'T) Try.t\n    | TypeAlias of ('M, 'T) TypeAlias.t\n    | OpaqueType of ('M, 'T) OpaqueType.t\n    | VariableDeclaration of ('M, 'T) VariableDeclaration.t\n    | While of ('M, 'T) While.t\n    | With of ('M, 'T) With.t\nend =\n  Statement\n\nand Expression : sig\n  module CallTypeArg : sig\n    module Implicit : sig\n      type ('M, 'T) t = 'T * 'M t'\n      and 'M t' = { comments: ('M, unit) Syntax.t option }\n    end\n\n    type ('M, 'T) t =\n      | Explicit of ('M, 'T) Type.t\n      | Implicit of ('M, 'T) Implicit.t\n  end\n\n  module CallTypeArgs : sig\n    type ('M, 'T) t = 'M * ('M, 'T) t'\n\n    and ('M, 'T) t' = {\n      arguments: ('M, 'T) CallTypeArg.t list;\n      comments: ('M, 'M Comment.t list) Syntax.t option;\n    }\n  end\n\n  module SpreadElement : sig\n    type ('M, 'T) t = 'M * ('M, 'T) t'\n\n    and ('M, 'T) t' = {\n      argument: ('M, 'T) Expression.t;\n      comments: ('M, unit) Syntax.t option;\n    }\n  end\n\n  module Array : sig\n    type ('M, 'T) element =\n      | Expression of ('M, 'T) Expression.t\n      | Spread of ('M, 'T) SpreadElement.t\n      | Hole of 'M\n\n    type ('M, 'T) t = {\n      elements: ('M, 'T) element list;\n      comments: ('M, 'M Comment.t list) Syntax.t option;\n    }\n  end\n\n  module TemplateLiteral : sig\n    module Element : sig\n      type value = {\n        raw: string;\n        cooked: string;\n      }\n\n      and 'M t = 'M * t'\n\n      and t' = {\n        value: value;\n        tail: bool;\n      }\n    end\n\n    type ('M, 'T) t = {\n      quasis: 'M Element.t list;\n      expressions: ('M, 'T) Expression.t list;\n      comments: ('M, unit) Syntax.t option;\n    }\n  end\n\n  module TaggedTemplate : sig\n    type ('M, 'T) t = {\n      tag: ('M, 'T) Expression.t;\n      quasi: 'M * ('M, 'T) TemplateLiteral.t;\n      comments: ('M, unit) Syntax.t option;\n    }\n  end\n\n  module Object : sig\n    module Property : sig\n      type ('M, 'T) key =\n        | Literal of ('T * 'M Literal.t)\n        | Identifier of ('M, 'T) Identifier.t\n        | PrivateName of 'M PrivateName.t\n        | Computed of ('M, 'T) ComputedKey.t\n\n      and ('M, 'T) t = 'M * ('M, 'T) t'\n\n      and ('M, 'T) t' =\n        | Init of {\n            key: ('M, 'T) key;\n            value: ('M, 'T) Expression.t;\n            shorthand: bool;\n          }\n        | Method of {\n            key: ('M, 'T) key;\n            value: 'M * ('M, 'T) Function.t;\n          }\n        | Get of {\n            key: ('M, 'T) key;\n            value: 'M * ('M, 'T) Function.t;\n            comments: ('M, unit) Syntax.t option;\n          }\n        | Set of {\n            key: ('M, 'T) key;\n            value: 'M * ('M, 'T) Function.t;\n            comments: ('M, unit) Syntax.t option;\n          }\n    end\n\n    module SpreadProperty : sig\n      type ('M, 'T) t = 'M * ('M, 'T) t'\n\n      and ('M, 'T) t' = {\n        argument: ('M, 'T) Expression.t;\n        comments: ('M, unit) Syntax.t option;\n      }\n    end\n\n    type ('M, 'T) property =\n      | Property of ('M, 'T) Property.t\n      | SpreadProperty of ('M, 'T) SpreadProperty.t\n\n    and ('M, 'T) t = {\n      properties: ('M, 'T) property list;\n      comments: ('M, 'M Comment.t list) Syntax.t option;\n    }\n  end\n\n  module Sequence : sig\n    type ('M, 'T) t = {\n      expressions: ('M, 'T) Expression.t list;\n      comments: ('M, unit) Syntax.t option;\n    }\n  end\n\n  module Unary : sig\n    type operator =\n      | Minus\n      | Plus\n      | Not\n      | BitNot\n      | Typeof\n      | Void\n      | Delete\n      | Await\n\n    and ('M, 'T) t = {\n      operator: operator;\n      argument: ('M, 'T) Expression.t;\n      comments: ('M, unit) Syntax.t option;\n    }\n  end\n\n  module Binary : sig\n    type operator =\n      | Equal\n      | NotEqual\n      | StrictEqual\n      | StrictNotEqual\n      | LessThan\n      | LessThanEqual\n      | GreaterThan\n      | GreaterThanEqual\n      | LShift\n      | RShift\n      | RShift3\n      | Plus\n      | Minus\n      | Mult\n      | Exp\n      | Div\n      | Mod\n      | BitOr\n      | Xor\n      | BitAnd\n      | In\n      | Instanceof\n\n    and ('M, 'T) t = {\n      operator: operator;\n      left: ('M, 'T) Expression.t;\n      right: ('M, 'T) Expression.t;\n      comments: ('M, unit) Syntax.t option;\n    }\n  end\n\n  module Assignment : sig\n    type operator =\n      | PlusAssign\n      | MinusAssign\n      | MultAssign\n      | ExpAssign\n      | DivAssign\n      | ModAssign\n      | LShiftAssign\n      | RShiftAssign\n      | RShift3Assign\n      | BitOrAssign\n      | BitXorAssign\n      | BitAndAssign\n      | NullishAssign\n      | AndAssign\n      | OrAssign\n\n    and ('M, 'T) t = {\n      operator: operator option;\n      left: ('M, 'T) Pattern.t;\n      right: ('M, 'T) Expression.t;\n      comments: ('M, unit) Syntax.t option;\n    }\n  end\n\n  module Update : sig\n    type operator =\n      | Increment\n      | Decrement\n\n    and ('M, 'T) t = {\n      operator: operator;\n      argument: ('M, 'T) Expression.t;\n      prefix: bool;\n      comments: ('M, unit) Syntax.t option;\n    }\n  end\n\n  module Logical : sig\n    type operator =\n      | Or\n      | And\n      | NullishCoalesce\n\n    and ('M, 'T) t = {\n      operator: operator;\n      left: ('M, 'T) Expression.t;\n      right: ('M, 'T) Expression.t;\n      comments: ('M, unit) Syntax.t option;\n    }\n  end\n\n  module Conditional : sig\n    type ('M, 'T) t = {\n      test: ('M, 'T) Expression.t;\n      consequent: ('M, 'T) Expression.t;\n      alternate: ('M, 'T) Expression.t;\n      comments: ('M, unit) Syntax.t option;\n    }\n  end\n\n  type ('M, 'T) expression_or_spread =\n    | Expression of ('M, 'T) Expression.t\n    | Spread of ('M, 'T) SpreadElement.t\n\n  module ArgList : sig\n    type ('M, 'T) t = 'M * ('M, 'T) t'\n\n    and ('M, 'T) t' = {\n      arguments: ('M, 'T) expression_or_spread list;\n      comments: ('M, 'M Comment.t list) Syntax.t option;\n    }\n  end\n\n  module New : sig\n    type ('M, 'T) t = {\n      callee: ('M, 'T) Expression.t;\n      targs: ('M, 'T) Expression.CallTypeArgs.t option;\n      arguments: ('M, 'T) ArgList.t option;\n      comments: ('M, unit) Syntax.t option;\n    }\n  end\n\n  module Call : sig\n    type ('M, 'T) t = {\n      callee: ('M, 'T) Expression.t;\n      targs: ('M, 'T) Expression.CallTypeArgs.t option;\n      arguments: ('M, 'T) ArgList.t;\n      comments: ('M, unit) Syntax.t option;\n    }\n  end\n\n  module OptionalCall : sig\n    type ('M, 'T) t = {\n      call: ('M, 'T) Call.t;\n      filtered_out: 'T;\n      optional: bool;\n    }\n  end\n\n  module Member : sig\n    type ('M, 'T) property =\n      | PropertyIdentifier of ('M, 'T) Identifier.t\n      | PropertyPrivateName of 'M PrivateName.t\n      | PropertyExpression of ('M, 'T) Expression.t\n\n    and ('M, 'T) t = {\n      _object: ('M, 'T) Expression.t;\n      property: ('M, 'T) property;\n      comments: ('M, unit) Syntax.t option;\n    }\n  end\n\n  module OptionalMember : sig\n    type ('M, 'T) t = {\n      member: ('M, 'T) Member.t;\n      filtered_out: 'T;\n      optional: bool;\n    }\n  end\n\n  module Yield : sig\n    type ('M, 'T) t = {\n      argument: ('M, 'T) Expression.t option;\n      comments: ('M, unit) Syntax.t option;\n      delegate: bool;\n      result_out: 'T;\n    }\n  end\n\n  module Comprehension : sig\n    module Block : sig\n      type ('M, 'T) t = 'M * ('M, 'T) t'\n\n      and ('M, 'T) t' = {\n        left: ('M, 'T) Pattern.t;\n        right: ('M, 'T) Expression.t;\n        each: bool;\n      }\n    end\n\n    type ('M, 'T) t = {\n      blocks: ('M, 'T) Block.t list;\n      filter: ('M, 'T) Expression.t option;\n    }\n  end\n\n  module Generator : sig\n    type ('M, 'T) t = {\n      blocks: ('M, 'T) Comprehension.Block.t list;\n      filter: ('M, 'T) Expression.t option;\n    }\n  end\n\n  module TypeCast : sig\n    type ('M, 'T) t = {\n      expression: ('M, 'T) Expression.t;\n      annot: ('M, 'T) Type.annotation;\n      comments: ('M, unit) Syntax.t option;\n    }\n  end\n\n  module MetaProperty : sig\n    type 'M t = {\n      meta: ('M, 'M) Identifier.t;\n      property: ('M, 'M) Identifier.t;\n      comments: ('M, unit) Syntax.t option;\n    }\n  end\n\n  module This : sig\n    type 'M t = { comments: ('M, unit) Syntax.t option }\n  end\n\n  module Super : sig\n    type 'M t = { comments: ('M, unit) Syntax.t option }\n  end\n\n  module Import : sig\n    type ('M, 'T) t = {\n      argument: ('M, 'T) Expression.t;\n      comments: ('M, unit) Syntax.t option;\n    }\n  end\n\n  type ('M, 'T) t = 'T * ('M, 'T) t'\n\n  and ('M, 'T) t' =\n    | Array of ('M, 'T) Array.t\n    | ArrowFunction of ('M, 'T) Function.t\n    | Assignment of ('M, 'T) Assignment.t\n    | Binary of ('M, 'T) Binary.t\n    | Call of ('M, 'T) Call.t\n    | Class of ('M, 'T) Class.t\n    | Comprehension of ('M, 'T) Comprehension.t\n    | Conditional of ('M, 'T) Conditional.t\n    | Function of ('M, 'T) Function.t\n    | Generator of ('M, 'T) Generator.t\n    | Identifier of ('M, 'T) Identifier.t\n    | Import of ('M, 'T) Import.t\n    | JSXElement of ('M, 'T) JSX.element\n    | JSXFragment of ('M, 'T) JSX.fragment\n    | Literal of 'M Literal.t\n    | Logical of ('M, 'T) Logical.t\n    | Member of ('M, 'T) Member.t\n    | MetaProperty of 'M MetaProperty.t\n    | New of ('M, 'T) New.t\n    | Object of ('M, 'T) Object.t\n    | OptionalCall of ('M, 'T) OptionalCall.t\n    | OptionalMember of ('M, 'T) OptionalMember.t\n    | Sequence of ('M, 'T) Sequence.t\n    | Super of 'M Super.t\n    | TaggedTemplate of ('M, 'T) TaggedTemplate.t\n    | TemplateLiteral of ('M, 'T) TemplateLiteral.t\n    | This of 'M This.t\n    | TypeCast of ('M, 'T) TypeCast.t\n    | Unary of ('M, 'T) Unary.t\n    | Update of ('M, 'T) Update.t\n    | Yield of ('M, 'T) Yield.t\nend =\n  Expression\n\nand JSX : sig\n  module Identifier : sig\n    type ('M, 'T) t = 'T * 'M t'\n\n    and 'M t' = {\n      name: string;\n      comments: ('M, unit) Syntax.t option;\n    }\n  end\n\n  module NamespacedName : sig\n    type ('M, 'T) t = 'M * ('M, 'T) t'\n\n    and ('M, 'T) t' = {\n      namespace: ('M, 'T) Identifier.t;\n      name: ('M, 'T) Identifier.t;\n    }\n  end\n\n  module ExpressionContainer : sig\n    type ('M, 'T) t = {\n      expression: ('M, 'T) expression;\n      comments: ('M, 'M Comment.t list) Syntax.t option;\n    }\n\n    and ('M, 'T) expression =\n      | Expression of ('M, 'T) Expression.t\n      | EmptyExpression\n  end\n\n  module Text : sig\n    type t = {\n      value: string;\n      raw: string;\n    }\n  end\n\n  module Attribute : sig\n    type ('M, 'T) t = 'M * ('M, 'T) t'\n\n    and ('M, 'T) name =\n      | Identifier of ('M, 'T) Identifier.t\n      | NamespacedName of ('M, 'T) NamespacedName.t\n\n    and ('M, 'T) value =\n      | Literal of 'T * 'M Literal.t\n      | ExpressionContainer of 'T * ('M, 'T) ExpressionContainer.t\n\n    and ('M, 'T) t' = {\n      name: ('M, 'T) name;\n      value: ('M, 'T) value option;\n    }\n  end\n\n  module SpreadAttribute : sig\n    type ('M, 'T) t = 'M * ('M, 'T) t'\n\n    and ('M, 'T) t' = {\n      argument: ('M, 'T) Expression.t;\n      comments: ('M, unit) Syntax.t option;\n    }\n  end\n\n  module MemberExpression : sig\n    type ('M, 'T) t = 'M * ('M, 'T) t'\n\n    and ('M, 'T) _object =\n      | Identifier of ('M, 'T) Identifier.t\n      | MemberExpression of ('M, 'T) t\n\n    and ('M, 'T) t' = {\n      _object: ('M, 'T) _object;\n      property: ('M, 'T) Identifier.t;\n    }\n  end\n\n  type ('M, 'T) name =\n    | Identifier of ('M, 'T) Identifier.t\n    | NamespacedName of ('M, 'T) NamespacedName.t\n    | MemberExpression of ('M, 'T) MemberExpression.t\n\n  module Opening : sig\n    type ('M, 'T) t = 'M * ('M, 'T) t'\n\n    and ('M, 'T) attribute =\n      | Attribute of ('M, 'T) Attribute.t\n      | SpreadAttribute of ('M, 'T) SpreadAttribute.t\n\n    and ('M, 'T) t' = {\n      name: ('M, 'T) name;\n      self_closing: bool;\n      attributes: ('M, 'T) attribute list;\n    }\n  end\n\n  module Closing : sig\n    type ('M, 'T) t = 'M * ('M, 'T) t'\n    and ('M, 'T) t' = { name: ('M, 'T) name }\n  end\n\n  module SpreadChild : sig\n    type ('M, 'T) t = {\n      expression: ('M, 'T) Expression.t;\n      comments: ('M, unit) Syntax.t option;\n    }\n  end\n\n  type ('M, 'T) child = 'M * ('M, 'T) child'\n\n  and ('M, 'T) child' =\n    | Element of ('M, 'T) element\n    | Fragment of ('M, 'T) fragment\n    | ExpressionContainer of ('M, 'T) ExpressionContainer.t\n    | SpreadChild of ('M, 'T) SpreadChild.t\n    | Text of Text.t\n\n  and ('M, 'T) element = {\n    opening_element: ('M, 'T) Opening.t;\n    closing_element: ('M, 'T) Closing.t option;\n    children: 'M * ('M, 'T) child list;\n    comments: ('M, unit) Syntax.t option;\n  }\n\n  and ('M, 'T) fragment = {\n    frag_opening_element: 'M;\n    frag_closing_element: 'M;\n    frag_children: 'M * ('M, 'T) child list;\n    frag_comments: ('M, unit) Syntax.t option;\n  }\nend =\n  JSX\n\nand Pattern : sig\n  module RestElement : sig\n    type ('M, 'T) t = 'M * ('M, 'T) t'\n\n    and ('M, 'T) t' = {\n      argument: ('M, 'T) Pattern.t;\n      comments: ('M, unit) Syntax.t option;\n    }\n  end\n\n  module Object : sig\n    module Property : sig\n      type ('M, 'T) key =\n        | Literal of ('M * 'M Literal.t)\n        | Identifier of ('M, 'T) Identifier.t\n        | Computed of ('M, 'T) ComputedKey.t\n\n      and ('M, 'T) t = 'M * ('M, 'T) t'\n\n      and ('M, 'T) t' = {\n        key: ('M, 'T) key;\n        pattern: ('M, 'T) Pattern.t;\n        default: ('M, 'T) Expression.t option;\n        shorthand: bool;\n      }\n    end\n\n    type ('M, 'T) property =\n      | Property of ('M, 'T) Property.t\n      | RestElement of ('M, 'T) RestElement.t\n\n    and ('M, 'T) t = {\n      properties: ('M, 'T) property list;\n      annot: ('M, 'T) Type.annotation_or_hint;\n      comments: ('M, 'M Comment.t list) Syntax.t option;\n    }\n  end\n\n  module Array : sig\n    module Element : sig\n      type ('M, 'T) t = 'M * ('M, 'T) t'\n\n      and ('M, 'T) t' = {\n        argument: ('M, 'T) Pattern.t;\n        default: ('M, 'T) Expression.t option;\n      }\n    end\n\n    type ('M, 'T) element =\n      | Element of ('M, 'T) Element.t\n      | RestElement of ('M, 'T) RestElement.t\n      | Hole of 'M\n\n    and ('M, 'T) t = {\n      elements: ('M, 'T) element list;\n      annot: ('M, 'T) Type.annotation_or_hint;\n      comments: ('M, 'M Comment.t list) Syntax.t option;\n    }\n  end\n\n  module Identifier : sig\n    type ('M, 'T) t = {\n      name: ('M, 'T) Identifier.t;\n      annot: ('M, 'T) Type.annotation_or_hint;\n      optional: bool;\n    }\n  end\n\n  type ('M, 'T) t = 'T * ('M, 'T) t'\n\n  and ('M, 'T) t' =\n    | Object of ('M, 'T) Object.t\n    | Array of ('M, 'T) Array.t\n    | Identifier of ('M, 'T) Identifier.t\n    | Expression of ('M, 'T) Expression.t\nend =\n  Pattern\n\nand Comment : sig\n  type 'M t = 'M * t'\n\n  and kind =\n    | Block\n    | Line\n\n  and t' = {\n    kind: kind;\n    text: string;\n    on_newline: bool;\n  }\nend =\n  Comment\n\nand Class : sig\n  module Method : sig\n    type ('M, 'T) t = 'T * ('M, 'T) t'\n\n    and kind =\n      | Constructor\n      | Method\n      | Get\n      | Set\n\n    and ('M, 'T) t' = {\n      kind: kind;\n      key: ('M, 'T) Expression.Object.Property.key;\n      value: 'M * ('M, 'T) Function.t;\n      static: bool;\n      decorators: ('M, 'T) Class.Decorator.t list;\n      comments: ('M, unit) Syntax.t option;\n    }\n  end\n\n  module Property : sig\n    type ('M, 'T) t = 'T * ('M, 'T) t'\n\n    and ('M, 'T) t' = {\n      key: ('M, 'T) Expression.Object.Property.key;\n      value: ('M, 'T) value;\n      annot: ('M, 'T) Type.annotation_or_hint;\n      static: bool;\n      variance: 'M Variance.t option;\n      comments: ('M, unit) Syntax.t option;\n    }\n\n    and ('M, 'T) value =\n      | Declared\n      | Uninitialized\n      | Initialized of ('M, 'T) Expression.t\n  end\n\n  module PrivateField : sig\n    type ('M, 'T) t = 'T * ('M, 'T) t'\n\n    and ('M, 'T) t' = {\n      key: 'M PrivateName.t;\n      value: ('M, 'T) Class.Property.value;\n      annot: ('M, 'T) Type.annotation_or_hint;\n      static: bool;\n      variance: 'M Variance.t option;\n      comments: ('M, unit) Syntax.t option;\n    }\n  end\n\n  module Extends : sig\n    type ('M, 'T) t = 'M * ('M, 'T) t'\n\n    and ('M, 'T) t' = {\n      expr: ('M, 'T) Expression.t;\n      targs: ('M, 'T) Type.TypeArgs.t option;\n      comments: ('M, unit) Syntax.t option;\n    }\n  end\n\n  module Implements : sig\n    module Interface : sig\n      type ('M, 'T) t = 'M * ('M, 'T) t'\n\n      and ('M, 'T) t' = {\n        id: ('M, 'T) Identifier.t;\n        targs: ('M, 'T) Type.TypeArgs.t option;\n      }\n    end\n\n    type ('M, 'T) t = 'M * ('M, 'T) t'\n\n    and ('M, 'T) t' = {\n      interfaces: ('M, 'T) Interface.t list;\n      comments: ('M, unit) Syntax.t option;\n    }\n  end\n\n  module Body : sig\n    type ('M, 'T) t = 'M * ('M, 'T) t'\n\n    and ('M, 'T) t' = {\n      body: ('M, 'T) element list;\n      comments: ('M, unit) Syntax.t option;\n    }\n\n    and ('M, 'T) element =\n      | Method of ('M, 'T) Method.t\n      | Property of ('M, 'T) Property.t\n      | PrivateField of ('M, 'T) PrivateField.t\n  end\n\n  module Decorator : sig\n    type ('M, 'T) t = 'M * ('M, 'T) t'\n\n    and ('M, 'T) t' = {\n      expression: ('M, 'T) Expression.t;\n      comments: ('M, unit) Syntax.t option;\n    }\n  end\n\n  type ('M, 'T) t = {\n    id: ('M, 'T) Identifier.t option;\n    body: ('M, 'T) Class.Body.t;\n    tparams: ('M, 'T) Type.TypeParams.t option;\n    extends: ('M, 'T) Extends.t option;\n    implements: ('M, 'T) Implements.t option;\n    class_decorators: ('M, 'T) Decorator.t list;\n    comments: ('M, unit) Syntax.t option;\n  }\nend =\n  Class\n\nand Function : sig\n  module RestParam : sig\n    type ('M, 'T) t = 'M * ('M, 'T) t'\n\n    and ('M, 'T) t' = {\n      argument: ('M, 'T) Pattern.t;\n      comments: ('M, unit) Syntax.t option;\n    }\n  end\n\n  module Param : sig\n    type ('M, 'T) t = 'M * ('M, 'T) t'\n\n    and ('M, 'T) t' = {\n      argument: ('M, 'T) Pattern.t;\n      default: ('M, 'T) Expression.t option;\n    }\n  end\n\n  module ThisParam : sig\n    type ('M, 'T) t = 'M * ('M, 'T) t'\n\n    and ('M, 'T) t' = {\n      annot: ('M, 'T) Type.annotation;\n      comments: ('M, unit) Syntax.t option;\n    }\n  end\n\n  module Params : sig\n    type ('M, 'T) t = 'M * ('M, 'T) t'\n\n    and ('M, 'T) t' = {\n      this_: ('M, 'T) ThisParam.t option;\n      params: ('M, 'T) Param.t list;\n      rest: ('M, 'T) RestParam.t option;\n      comments: ('M, 'M Comment.t list) Syntax.t option;\n    }\n  end\n\n  type ('M, 'T) t = {\n    id: ('M, 'T) Identifier.t option;\n    params: ('M, 'T) Params.t;\n    body: ('M, 'T) body;\n    async: bool;\n    generator: bool;\n    predicate: ('M, 'T) Type.Predicate.t option;\n    return: ('M, 'T) Type.annotation_or_hint;\n    tparams: ('M, 'T) Type.TypeParams.t option;\n    comments: ('M, unit) Syntax.t option;\n    (* Location of the signature portion of a function, e.g.\n     * function foo(): void {}\n     * ^^^^^^^^^^^^^^^^^^^^\n     *)\n    sig_loc: 'M;\n  }\n\n  and ('M, 'T) body =\n    | BodyBlock of ('M * ('M, 'T) Statement.Block.t)\n    | BodyExpression of ('M, 'T) Expression.t\nend =\n  Function\n\nand Program : sig\n  type ('M, 'T) t = 'M * ('M, 'T) t'\n\n  and ('M, 'T) t' = {\n    statements: ('M, 'T) Statement.t list;\n    comments: ('M, unit) Syntax.t option;\n    all_comments: 'M Comment.t list;\n  }\nend =\n  Program\n"
  },
  {
    "path": "analysis/vendor/js_parser/flow_ast_mapper.ml",
    "content": "(*\n * Copyright (c) Meta Platforms, Inc. and affiliates.\n *\n * This source code is licensed under the MIT license found in the\n * LICENSE file in the root directory of this source tree.\n *)\n\nmodule Ast = Flow_ast\n\nlet map_opt : 'node. ('node -> 'node) -> 'node option -> 'node option =\n fun map opt ->\n  match opt with\n  | Some item ->\n    let item' = map item in\n    if item == item' then\n      opt\n    else\n      Some item'\n  | None -> opt\n\nlet id_loc : 'node 'a. ('loc -> 'node -> 'node) -> 'loc -> 'node -> 'a -> ('node -> 'a) -> 'a =\n fun map loc item same diff ->\n  let item' = map loc item in\n  if item == item' then\n    same\n  else\n    diff item'\n\nlet id : 'node 'a. ('node -> 'node) -> 'node -> 'a -> ('node -> 'a) -> 'a =\n fun map item same diff ->\n  let item' = map item in\n  if item == item' then\n    same\n  else\n    diff item'\n\nlet map_loc : 'node. ('loc -> 'node -> 'node) -> 'loc * 'node -> 'loc * 'node =\n fun map same ->\n  let (loc, item) = same in\n  id_loc map loc item same (fun diff -> (loc, diff))\n\nlet map_loc_opt : 'node. ('loc -> 'node -> 'node) -> ('loc * 'node) option -> ('loc * 'node) option\n    =\n fun map same ->\n  map_opt\n    (fun same ->\n      let (loc, item) = same in\n      id_loc map loc item same (fun diff -> (loc, diff)))\n    same\n\nlet map_list map lst =\n  let (rev_lst, changed) =\n    List.fold_left\n      (fun (lst', changed) item ->\n        let item' = map item in\n        (item' :: lst', changed || item' != item))\n      ([], false)\n      lst\n  in\n  if changed then\n    List.rev rev_lst\n  else\n    lst\n\nlet map_list_multiple map lst =\n  let (rev_lst, changed) =\n    List.fold_left\n      (fun (lst', changed) item ->\n        match map item with\n        | [] -> (lst', true)\n        | [item'] -> (item' :: lst', changed || item != item')\n        | items' -> (List.rev_append items' lst', true))\n      ([], false)\n      lst\n  in\n  if changed then\n    List.rev rev_lst\n  else\n    lst\n\nclass ['loc] mapper =\n  object (this)\n    method program (program : ('loc, 'loc) Ast.Program.t) =\n      let open Ast.Program in\n      let (loc, { statements; comments; all_comments }) = program in\n      let statements' = this#toplevel_statement_list statements in\n      let comments' = this#syntax_opt comments in\n      let all_comments' = map_list this#comment all_comments in\n      if statements == statements' && comments == comments' && all_comments == all_comments' then\n        program\n      else\n        (loc, { statements = statements'; comments = comments'; all_comments = all_comments' })\n\n    method statement (stmt : ('loc, 'loc) Ast.Statement.t) =\n      let open Ast.Statement in\n      match stmt with\n      | (loc, Block block) -> id_loc this#block loc block stmt (fun block -> (loc, Block block))\n      | (loc, Break break) -> id_loc this#break loc break stmt (fun break -> (loc, Break break))\n      | (loc, ClassDeclaration cls) ->\n        id_loc this#class_declaration loc cls stmt (fun cls -> (loc, ClassDeclaration cls))\n      | (loc, Continue cont) -> id_loc this#continue loc cont stmt (fun cont -> (loc, Continue cont))\n      | (loc, Debugger dbg) -> id_loc this#debugger loc dbg stmt (fun dbg -> (loc, Debugger dbg))\n      | (loc, DeclareClass stuff) ->\n        id_loc this#declare_class loc stuff stmt (fun stuff -> (loc, DeclareClass stuff))\n      | (loc, DeclareExportDeclaration decl) ->\n        id_loc this#declare_export_declaration loc decl stmt (fun decl ->\n            (loc, DeclareExportDeclaration decl)\n        )\n      | (loc, DeclareFunction stuff) ->\n        id_loc this#declare_function loc stuff stmt (fun stuff -> (loc, DeclareFunction stuff))\n      | (loc, DeclareInterface stuff) ->\n        id_loc this#declare_interface loc stuff stmt (fun stuff -> (loc, DeclareInterface stuff))\n      | (loc, DeclareModule m) ->\n        id_loc this#declare_module loc m stmt (fun m -> (loc, DeclareModule m))\n      | (loc, DeclareTypeAlias stuff) ->\n        id_loc this#declare_type_alias loc stuff stmt (fun stuff -> (loc, DeclareTypeAlias stuff))\n      | (loc, DeclareVariable stuff) ->\n        id_loc this#declare_variable loc stuff stmt (fun stuff -> (loc, DeclareVariable stuff))\n      | (loc, DeclareModuleExports annot) ->\n        id_loc this#declare_module_exports loc annot stmt (fun annot ->\n            (loc, DeclareModuleExports annot)\n        )\n      | (loc, DoWhile stuff) ->\n        id_loc this#do_while loc stuff stmt (fun stuff -> (loc, DoWhile stuff))\n      | (loc, Empty empty) -> id_loc this#empty loc empty stmt (fun empty -> (loc, Empty empty))\n      | (loc, EnumDeclaration enum) ->\n        id_loc this#enum_declaration loc enum stmt (fun enum -> (loc, EnumDeclaration enum))\n      | (loc, ExportDefaultDeclaration decl) ->\n        id_loc this#export_default_declaration loc decl stmt (fun decl ->\n            (loc, ExportDefaultDeclaration decl)\n        )\n      | (loc, ExportNamedDeclaration decl) ->\n        id_loc this#export_named_declaration loc decl stmt (fun decl ->\n            (loc, ExportNamedDeclaration decl)\n        )\n      | (loc, Expression expr) ->\n        id_loc this#expression_statement loc expr stmt (fun expr -> (loc, Expression expr))\n      | (loc, For for_stmt) ->\n        id_loc this#for_statement loc for_stmt stmt (fun for_stmt -> (loc, For for_stmt))\n      | (loc, ForIn stuff) ->\n        id_loc this#for_in_statement loc stuff stmt (fun stuff -> (loc, ForIn stuff))\n      | (loc, ForOf stuff) ->\n        id_loc this#for_of_statement loc stuff stmt (fun stuff -> (loc, ForOf stuff))\n      | (loc, FunctionDeclaration func) ->\n        id_loc this#function_declaration loc func stmt (fun func -> (loc, FunctionDeclaration func))\n      | (loc, If if_stmt) ->\n        id_loc this#if_statement loc if_stmt stmt (fun if_stmt -> (loc, If if_stmt))\n      | (loc, ImportDeclaration decl) ->\n        id_loc this#import_declaration loc decl stmt (fun decl -> (loc, ImportDeclaration decl))\n      | (loc, InterfaceDeclaration stuff) ->\n        id_loc this#interface_declaration loc stuff stmt (fun stuff ->\n            (loc, InterfaceDeclaration stuff)\n        )\n      | (loc, Labeled label) ->\n        id_loc this#labeled_statement loc label stmt (fun label -> (loc, Labeled label))\n      | (loc, OpaqueType otype) ->\n        id_loc this#opaque_type loc otype stmt (fun otype -> (loc, OpaqueType otype))\n      | (loc, Return ret) -> id_loc this#return loc ret stmt (fun ret -> (loc, Return ret))\n      | (loc, Switch switch) ->\n        id_loc this#switch loc switch stmt (fun switch -> (loc, Switch switch))\n      | (loc, Throw throw) -> id_loc this#throw loc throw stmt (fun throw -> (loc, Throw throw))\n      | (loc, Try try_stmt) ->\n        id_loc this#try_catch loc try_stmt stmt (fun try_stmt -> (loc, Try try_stmt))\n      | (loc, VariableDeclaration decl) ->\n        id_loc this#variable_declaration loc decl stmt (fun decl -> (loc, VariableDeclaration decl))\n      | (loc, While stuff) -> id_loc this#while_ loc stuff stmt (fun stuff -> (loc, While stuff))\n      | (loc, With stuff) -> id_loc this#with_ loc stuff stmt (fun stuff -> (loc, With stuff))\n      | (loc, TypeAlias stuff) ->\n        id_loc this#type_alias loc stuff stmt (fun stuff -> (loc, TypeAlias stuff))\n      | (loc, DeclareOpaqueType otype) ->\n        id_loc this#opaque_type loc otype stmt (fun otype -> (loc, OpaqueType otype))\n\n    method comment (c : 'loc Ast.Comment.t) = c\n\n    method syntax_opt\n        : 'internal. ('loc, 'internal) Ast.Syntax.t option -> ('loc, 'internal) Ast.Syntax.t option\n        =\n      map_opt this#syntax\n\n    method syntax : 'internal. ('loc, 'internal) Ast.Syntax.t -> ('loc, 'internal) Ast.Syntax.t =\n      fun attached ->\n        let open Ast.Syntax in\n        let { leading; trailing; internal } = attached in\n        let leading' = map_list this#comment leading in\n        let trailing' = map_list this#comment trailing in\n        if leading == leading' && trailing == trailing' then\n          attached\n        else\n          { leading = leading'; trailing = trailing'; internal }\n\n    method expression (expr : ('loc, 'loc) Ast.Expression.t) =\n      let open Ast.Expression in\n      match expr with\n      | (loc, Array x) -> id_loc this#array loc x expr (fun x -> (loc, Array x))\n      | (loc, ArrowFunction x) ->\n        id_loc this#arrow_function loc x expr (fun x -> (loc, ArrowFunction x))\n      | (loc, Assignment x) -> id_loc this#assignment loc x expr (fun x -> (loc, Assignment x))\n      | (loc, Binary x) -> id_loc this#binary loc x expr (fun x -> (loc, Binary x))\n      | (loc, Call x) -> id_loc this#call loc x expr (fun x -> (loc, Call x))\n      | (loc, Class x) -> id_loc this#class_expression loc x expr (fun x -> (loc, Class x))\n      | (loc, Comprehension x) ->\n        id_loc this#comprehension loc x expr (fun x -> (loc, Comprehension x))\n      | (loc, Conditional x) -> id_loc this#conditional loc x expr (fun x -> (loc, Conditional x))\n      | (loc, Function x) -> id_loc this#function_expression loc x expr (fun x -> (loc, Function x))\n      | (loc, Generator x) -> id_loc this#generator loc x expr (fun x -> (loc, Generator x))\n      | (loc, Identifier x) -> id this#identifier x expr (fun x -> (loc, Identifier x))\n      | (loc, Import x) -> id (this#import loc) x expr (fun x -> (loc, Import x))\n      | (loc, JSXElement x) -> id_loc this#jsx_element loc x expr (fun x -> (loc, JSXElement x))\n      | (loc, JSXFragment x) -> id_loc this#jsx_fragment loc x expr (fun x -> (loc, JSXFragment x))\n      | (loc, Literal x) -> id_loc this#literal loc x expr (fun x -> (loc, Literal x))\n      | (loc, Logical x) -> id_loc this#logical loc x expr (fun x -> (loc, Logical x))\n      | (loc, Member x) -> id_loc this#member loc x expr (fun x -> (loc, Member x))\n      | (loc, MetaProperty x) ->\n        id_loc this#meta_property loc x expr (fun x -> (loc, MetaProperty x))\n      | (loc, New x) -> id_loc this#new_ loc x expr (fun x -> (loc, New x))\n      | (loc, Object x) -> id_loc this#object_ loc x expr (fun x -> (loc, Object x))\n      | (loc, OptionalCall x) -> id (this#optional_call loc) x expr (fun x -> (loc, OptionalCall x))\n      | (loc, OptionalMember x) ->\n        id_loc this#optional_member loc x expr (fun x -> (loc, OptionalMember x))\n      | (loc, Sequence x) -> id_loc this#sequence loc x expr (fun x -> (loc, Sequence x))\n      | (loc, Super x) -> id_loc this#super_expression loc x expr (fun x -> (loc, Super x))\n      | (loc, TaggedTemplate x) ->\n        id_loc this#tagged_template loc x expr (fun x -> (loc, TaggedTemplate x))\n      | (loc, TemplateLiteral x) ->\n        id_loc this#template_literal loc x expr (fun x -> (loc, TemplateLiteral x))\n      | (loc, This x) -> id_loc this#this_expression loc x expr (fun x -> (loc, This x))\n      | (loc, TypeCast x) -> id_loc this#type_cast loc x expr (fun x -> (loc, TypeCast x))\n      | (loc, Unary x) -> id_loc this#unary_expression loc x expr (fun x -> (loc, Unary x))\n      | (loc, Update x) -> id_loc this#update_expression loc x expr (fun x -> (loc, Update x))\n      | (loc, Yield x) -> id_loc this#yield loc x expr (fun x -> (loc, Yield x))\n\n    method array _loc (expr : ('loc, 'loc) Ast.Expression.Array.t) =\n      let open Ast.Expression in\n      let { Array.elements; comments } = expr in\n      let elements' = map_list this#array_element elements in\n      let comments' = this#syntax_opt comments in\n      if elements == elements' && comments == comments' then\n        expr\n      else\n        { Array.elements = elements'; comments = comments' }\n\n    method array_element element =\n      let open Ast.Expression.Array in\n      match element with\n      | Expression expr -> id this#expression expr element (fun expr -> Expression expr)\n      | Spread spread -> id this#spread_element spread element (fun spread -> Spread spread)\n      | Hole _ -> element\n\n    method arrow_function loc (expr : ('loc, 'loc) Ast.Function.t) = this#function_ loc expr\n\n    method assignment _loc (expr : ('loc, 'loc) Ast.Expression.Assignment.t) =\n      let open Ast.Expression.Assignment in\n      let { operator = _; left; right; comments } = expr in\n      let left' = this#assignment_pattern left in\n      let right' = this#expression right in\n      let comments' = this#syntax_opt comments in\n      if left == left' && right == right' && comments == comments' then\n        expr\n      else\n        { expr with left = left'; right = right'; comments = comments' }\n\n    method binary _loc (expr : ('loc, 'loc) Ast.Expression.Binary.t) =\n      let open Ast.Expression.Binary in\n      let { operator = _; left; right; comments } = expr in\n      let left' = this#expression left in\n      let right' = this#expression right in\n      let comments' = this#syntax_opt comments in\n      if left == left' && right == right' && comments == comments' then\n        expr\n      else\n        { expr with left = left'; right = right'; comments = comments' }\n\n    method block _loc (stmt : ('loc, 'loc) Ast.Statement.Block.t) =\n      let open Ast.Statement.Block in\n      let { body; comments } = stmt in\n      let body' = this#statement_list body in\n      let comments' = this#syntax_opt comments in\n      if body == body' && comments == comments' then\n        stmt\n      else\n        { body = body'; comments = comments' }\n\n    method break _loc (break : 'loc Ast.Statement.Break.t) =\n      let open Ast.Statement.Break in\n      let { label; comments } = break in\n      let label' = map_opt this#label_identifier label in\n      let comments' = this#syntax_opt comments in\n      if label == label' && comments == comments' then\n        break\n      else\n        { label = label'; comments = comments' }\n\n    method call _loc (expr : ('loc, 'loc) Ast.Expression.Call.t) =\n      let open Ast.Expression.Call in\n      let { callee; targs; arguments; comments } = expr in\n      let callee' = this#expression callee in\n      let targs' = map_opt this#call_type_args targs in\n      let arguments' = this#call_arguments arguments in\n      let comments' = this#syntax_opt comments in\n      if callee == callee' && targs == targs' && arguments == arguments' && comments == comments'\n      then\n        expr\n      else\n        { callee = callee'; targs = targs'; arguments = arguments'; comments = comments' }\n\n    method call_arguments (arg_list : ('loc, 'loc) Ast.Expression.ArgList.t) =\n      let open Ast.Expression.ArgList in\n      let (loc, { arguments; comments }) = arg_list in\n      let arguments' = map_list this#expression_or_spread arguments in\n      let comments' = this#syntax_opt comments in\n      if arguments == arguments' && comments == comments' then\n        arg_list\n      else\n        (loc, { arguments = arguments'; comments = comments' })\n\n    method optional_call loc (expr : ('loc, 'loc) Ast.Expression.OptionalCall.t) =\n      let open Ast.Expression.OptionalCall in\n      let { call; optional = _; filtered_out = _ } = expr in\n      let call' = this#call loc call in\n      if call == call' then\n        expr\n      else\n        { expr with call = call' }\n\n    method call_type_args (targs : ('loc, 'loc) Ast.Expression.CallTypeArgs.t) =\n      let open Ast.Expression.CallTypeArgs in\n      let (loc, { arguments; comments }) = targs in\n      let arguments' = map_list this#call_type_arg arguments in\n      let comments' = this#syntax_opt comments in\n      if arguments == arguments' && comments == comments' then\n        targs\n      else\n        (loc, { arguments = arguments'; comments = comments' })\n\n    method call_type_arg t =\n      let open Ast.Expression.CallTypeArg in\n      match t with\n      | Explicit x ->\n        let x' = this#type_ x in\n        if x' == x then\n          t\n        else\n          Explicit x'\n      | Implicit (loc, { Implicit.comments }) ->\n        let comments' = this#syntax_opt comments in\n        if comments == comments' then\n          t\n        else\n          Implicit (loc, { Implicit.comments = comments' })\n\n    method catch_body (body : 'loc * ('loc, 'loc) Ast.Statement.Block.t) = map_loc this#block body\n\n    method catch_clause _loc (clause : ('loc, 'loc) Ast.Statement.Try.CatchClause.t') =\n      let open Ast.Statement.Try.CatchClause in\n      let { param; body; comments } = clause in\n      let param' = map_opt this#catch_clause_pattern param in\n      let body' = this#catch_body body in\n      let comments' = this#syntax_opt comments in\n      if param == param' && body == body' && comments == comments' then\n        clause\n      else\n        { param = param'; body = body'; comments = comments' }\n\n    method class_declaration loc (cls : ('loc, 'loc) Ast.Class.t) = this#class_ loc cls\n\n    method class_expression loc (cls : ('loc, 'loc) Ast.Class.t) = this#class_ loc cls\n\n    method class_ _loc (cls : ('loc, 'loc) Ast.Class.t) =\n      let open Ast.Class in\n      let { id; body; tparams; extends; implements; class_decorators; comments } = cls in\n      let id' = map_opt this#class_identifier id in\n      let tparams' = map_opt this#type_params tparams in\n      let body' = this#class_body body in\n      let extends' = map_opt (map_loc this#class_extends) extends in\n      let implements' = map_opt this#class_implements implements in\n      let class_decorators' = map_list this#class_decorator class_decorators in\n      let comments' = this#syntax_opt comments in\n      if\n        id == id'\n        && body == body'\n        && extends == extends'\n        && implements == implements'\n        && class_decorators == class_decorators'\n        && comments == comments'\n        && tparams == tparams'\n      then\n        cls\n      else\n        {\n          id = id';\n          body = body';\n          extends = extends';\n          implements = implements';\n          class_decorators = class_decorators';\n          comments = comments';\n          tparams = tparams';\n        }\n\n    method class_extends _loc (extends : ('loc, 'loc) Ast.Class.Extends.t') =\n      let open Ast.Class.Extends in\n      let { expr; targs; comments } = extends in\n      let expr' = this#expression expr in\n      let targs' = map_opt this#type_args targs in\n      let comments' = this#syntax_opt comments in\n      if expr == expr' && targs == targs' && comments == comments' then\n        extends\n      else\n        { expr = expr'; targs = targs'; comments = comments' }\n\n    method class_identifier (ident : ('loc, 'loc) Ast.Identifier.t) =\n      this#pattern_identifier ~kind:Ast.Statement.VariableDeclaration.Let ident\n\n    method class_body (cls_body : ('loc, 'loc) Ast.Class.Body.t) =\n      let open Ast.Class.Body in\n      let (loc, { body; comments }) = cls_body in\n      let body' = map_list this#class_element body in\n      let comments' = this#syntax_opt comments in\n      if body == body' && comments == comments' then\n        cls_body\n      else\n        (loc, { body = body'; comments = comments' })\n\n    method class_decorator (dec : ('loc, 'loc) Ast.Class.Decorator.t) =\n      let open Ast.Class.Decorator in\n      let (loc, { expression; comments }) = dec in\n      let expression' = this#expression expression in\n      let comments' = this#syntax_opt comments in\n      if expression == expression' && comments == comments' then\n        dec\n      else\n        (loc, { expression = expression'; comments = comments' })\n\n    method class_element (elem : ('loc, 'loc) Ast.Class.Body.element) =\n      let open Ast.Class.Body in\n      match elem with\n      | Method (loc, meth) -> id_loc this#class_method loc meth elem (fun meth -> Method (loc, meth))\n      | Property (loc, prop) ->\n        id_loc this#class_property loc prop elem (fun prop -> Property (loc, prop))\n      | PrivateField (loc, field) ->\n        id_loc this#class_private_field loc field elem (fun field -> PrivateField (loc, field))\n\n    method class_implements (implements : ('loc, 'loc) Ast.Class.Implements.t) =\n      let open Ast.Class.Implements in\n      let (loc, { interfaces; comments }) = implements in\n      let interfaces' = map_list this#class_implements_interface interfaces in\n      let comments' = this#syntax_opt comments in\n      if interfaces == interfaces' && comments == comments' then\n        implements\n      else\n        (loc, { interfaces = interfaces'; comments = comments' })\n\n    method class_implements_interface (interface : ('loc, 'loc) Ast.Class.Implements.Interface.t) =\n      let open Ast.Class.Implements.Interface in\n      let (loc, { id; targs }) = interface in\n      let id' = this#type_identifier_reference id in\n      let targs' = map_opt this#type_args targs in\n      if id == id' && targs == targs' then\n        interface\n      else\n        (loc, { id = id'; targs = targs' })\n\n    method class_method _loc (meth : ('loc, 'loc) Ast.Class.Method.t') =\n      let open Ast.Class.Method in\n      let { kind = _; key; value; static = _; decorators; comments } = meth in\n      let key' = this#object_key key in\n      let value' = map_loc this#function_expression_or_method value in\n      let decorators' = map_list this#class_decorator decorators in\n      let comments' = this#syntax_opt comments in\n      if key == key' && value == value' && decorators == decorators' && comments == comments' then\n        meth\n      else\n        { meth with key = key'; value = value'; decorators = decorators'; comments = comments' }\n\n    method class_property _loc (prop : ('loc, 'loc) Ast.Class.Property.t') =\n      let open Ast.Class.Property in\n      let { key; value; annot; static = _; variance; comments } = prop in\n      let key' = this#object_key key in\n      let value' = this#class_property_value value in\n      let annot' = this#type_annotation_hint annot in\n      let variance' = this#variance_opt variance in\n      let comments' = this#syntax_opt comments in\n      if\n        key == key'\n        && value == value'\n        && annot' == annot\n        && variance' == variance\n        && comments' == comments\n      then\n        prop\n      else\n        {\n          prop with\n          key = key';\n          value = value';\n          annot = annot';\n          variance = variance';\n          comments = comments';\n        }\n\n    method class_property_value (value : ('loc, 'loc) Ast.Class.Property.value) =\n      let open Ast.Class.Property in\n      match value with\n      | Declared -> value\n      | Uninitialized -> value\n      | Initialized x ->\n        let x' = this#expression x in\n        if x == x' then\n          value\n        else\n          Initialized x'\n\n    method class_private_field _loc (prop : ('loc, 'loc) Ast.Class.PrivateField.t') =\n      let open Ast.Class.PrivateField in\n      let { key; value; annot; static = _; variance; comments } = prop in\n      let key' = this#private_name key in\n      let value' = this#class_property_value value in\n      let annot' = this#type_annotation_hint annot in\n      let variance' = this#variance_opt variance in\n      let comments' = this#syntax_opt comments in\n      if\n        key == key'\n        && value == value'\n        && annot' == annot\n        && variance' == variance\n        && comments' == comments\n      then\n        prop\n      else\n        {\n          prop with\n          key = key';\n          value = value';\n          annot = annot';\n          variance = variance';\n          comments = comments';\n        }\n\n    (* TODO *)\n    method comprehension _loc (expr : ('loc, 'loc) Ast.Expression.Comprehension.t) = expr\n\n    method conditional _loc (expr : ('loc, 'loc) Ast.Expression.Conditional.t) =\n      let open Ast.Expression.Conditional in\n      let { test; consequent; alternate; comments } = expr in\n      let test' = this#predicate_expression test in\n      let consequent' = this#expression consequent in\n      let alternate' = this#expression alternate in\n      let comments' = this#syntax_opt comments in\n      if\n        test == test'\n        && consequent == consequent'\n        && alternate == alternate'\n        && comments == comments'\n      then\n        expr\n      else\n        { test = test'; consequent = consequent'; alternate = alternate'; comments = comments' }\n\n    method continue _loc (cont : 'loc Ast.Statement.Continue.t) =\n      let open Ast.Statement.Continue in\n      let { label; comments } = cont in\n      let label' = map_opt this#label_identifier label in\n      let comments' = this#syntax_opt comments in\n      if label == label' && comments == comments' then\n        cont\n      else\n        { label = label'; comments = comments' }\n\n    method debugger _loc (dbg : 'loc Ast.Statement.Debugger.t) =\n      let open Ast.Statement.Debugger in\n      let { comments } = dbg in\n      let comments' = this#syntax_opt comments in\n      if comments == comments' then\n        dbg\n      else\n        { comments = comments' }\n\n    method declare_class _loc (decl : ('loc, 'loc) Ast.Statement.DeclareClass.t) =\n      let open Ast.Statement.DeclareClass in\n      let { id = ident; tparams; body; extends; mixins; implements; comments } = decl in\n      let id' = this#class_identifier ident in\n      let tparams' = map_opt this#type_params tparams in\n      let body' = map_loc this#object_type body in\n      let extends' = map_opt (map_loc this#generic_type) extends in\n      let mixins' = map_list (map_loc this#generic_type) mixins in\n      let implements' = map_opt this#class_implements implements in\n      let comments' = this#syntax_opt comments in\n      if\n        id' == ident\n        && tparams' == tparams\n        && body' == body\n        && extends' == extends\n        && mixins' == mixins\n        && implements' == implements\n        && comments' == comments\n      then\n        decl\n      else\n        {\n          id = id';\n          tparams = tparams';\n          body = body';\n          extends = extends';\n          mixins = mixins';\n          implements = implements';\n          comments = comments';\n        }\n\n    method declare_export_declaration\n        _loc (decl : ('loc, 'loc) Ast.Statement.DeclareExportDeclaration.t) =\n      let open Ast.Statement.DeclareExportDeclaration in\n      let { default; source; specifiers; declaration; comments } = decl in\n      let source' = map_loc_opt this#export_source source in\n      let specifiers' = map_opt this#export_named_specifier specifiers in\n      let declaration' = map_opt this#declare_export_declaration_decl declaration in\n      let comments' = this#syntax_opt comments in\n      if\n        source == source'\n        && specifiers == specifiers'\n        && declaration == declaration'\n        && comments == comments'\n      then\n        decl\n      else\n        {\n          default;\n          source = source';\n          specifiers = specifiers';\n          declaration = declaration';\n          comments = comments';\n        }\n\n    method declare_export_declaration_decl\n        (decl : ('loc, 'loc) Ast.Statement.DeclareExportDeclaration.declaration) =\n      let open Ast.Statement.DeclareExportDeclaration in\n      match decl with\n      | Variable (loc, dv) ->\n        let dv' = this#declare_variable loc dv in\n        if dv' == dv then\n          decl\n        else\n          Variable (loc, dv')\n      | Function (loc, df) ->\n        let df' = this#declare_function loc df in\n        if df' == df then\n          decl\n        else\n          Function (loc, df')\n      | Class (loc, dc) ->\n        let dc' = this#declare_class loc dc in\n        if dc' == dc then\n          decl\n        else\n          Class (loc, dc')\n      | DefaultType t ->\n        let t' = this#type_ t in\n        if t' == t then\n          decl\n        else\n          DefaultType t'\n      | NamedType (loc, ta) ->\n        let ta' = this#type_alias loc ta in\n        if ta' == ta then\n          decl\n        else\n          NamedType (loc, ta')\n      | NamedOpaqueType (loc, ot) ->\n        let ot' = this#opaque_type loc ot in\n        if ot' == ot then\n          decl\n        else\n          NamedOpaqueType (loc, ot')\n      | Interface (loc, i) ->\n        let i' = this#interface loc i in\n        if i' == i then\n          decl\n        else\n          Interface (loc, i')\n\n    method declare_function _loc (decl : ('loc, 'loc) Ast.Statement.DeclareFunction.t) =\n      let open Ast.Statement.DeclareFunction in\n      let { id = ident; annot; predicate; comments } = decl in\n      let id' = this#function_identifier ident in\n      let annot' = this#type_annotation annot in\n      let predicate' = map_opt this#predicate predicate in\n      let comments' = this#syntax_opt comments in\n      if id' == ident && annot' == annot && predicate' == predicate && comments' == comments then\n        decl\n      else\n        { id = id'; annot = annot'; predicate = predicate'; comments = comments' }\n\n    method declare_interface loc (decl : ('loc, 'loc) Ast.Statement.Interface.t) =\n      this#interface loc decl\n\n    method declare_module _loc (m : ('loc, 'loc) Ast.Statement.DeclareModule.t) =\n      let open Ast.Statement.DeclareModule in\n      let { id; body; kind; comments } = m in\n      let body' = map_loc this#block body in\n      let comments' = this#syntax_opt comments in\n      if body' == body && comments == comments' then\n        m\n      else\n        { id; body = body'; kind; comments = comments' }\n\n    method declare_module_exports _loc (exports : ('loc, 'loc) Ast.Statement.DeclareModuleExports.t)\n        =\n      let open Ast.Statement.DeclareModuleExports in\n      let { annot; comments } = exports in\n      let annot' = this#type_annotation annot in\n      let comments' = this#syntax_opt comments in\n      if annot == annot' && comments == comments' then\n        exports\n      else\n        { annot = annot'; comments = comments' }\n\n    method declare_type_alias loc (decl : ('loc, 'loc) Ast.Statement.TypeAlias.t) =\n      this#type_alias loc decl\n\n    method declare_variable _loc (decl : ('loc, 'loc) Ast.Statement.DeclareVariable.t) =\n      let open Ast.Statement.DeclareVariable in\n      let { id = ident; annot; comments } = decl in\n      let id' = this#pattern_identifier ~kind:Ast.Statement.VariableDeclaration.Var ident in\n      let annot' = this#type_annotation annot in\n      let comments' = this#syntax_opt comments in\n      if id' == ident && annot' == annot && comments' == comments then\n        decl\n      else\n        { id = id'; annot = annot'; comments = comments' }\n\n    method do_while _loc (stuff : ('loc, 'loc) Ast.Statement.DoWhile.t) =\n      let open Ast.Statement.DoWhile in\n      let { body; test; comments } = stuff in\n      let body' = this#statement body in\n      let test' = this#predicate_expression test in\n      let comments' = this#syntax_opt comments in\n      if body == body' && test == test' && comments == comments' then\n        stuff\n      else\n        { body = body'; test = test'; comments = comments' }\n\n    method empty _loc empty =\n      let open Ast.Statement.Empty in\n      let { comments } = empty in\n      let comments' = this#syntax_opt comments in\n      if comments == comments' then\n        empty\n      else\n        { comments = comments' }\n\n    method enum_declaration _loc (enum : ('loc, 'loc) Ast.Statement.EnumDeclaration.t) =\n      let open Ast.Statement.EnumDeclaration in\n      let { id = ident; body; comments } = enum in\n      let id' = this#pattern_identifier ~kind:Ast.Statement.VariableDeclaration.Const ident in\n      let body' = this#enum_body body in\n      let comments' = this#syntax_opt comments in\n      if ident == id' && body == body' && comments == comments' then\n        enum\n      else\n        { id = id'; body = body'; comments = comments' }\n\n    method enum_body (body : 'loc Ast.Statement.EnumDeclaration.body) =\n      let open Ast.Statement.EnumDeclaration in\n      match body with\n      | (loc, BooleanBody boolean_body) ->\n        id this#enum_boolean_body boolean_body body (fun body -> (loc, BooleanBody body))\n      | (loc, NumberBody number_body) ->\n        id this#enum_number_body number_body body (fun body -> (loc, NumberBody body))\n      | (loc, StringBody string_body) ->\n        id this#enum_string_body string_body body (fun body -> (loc, StringBody body))\n      | (loc, SymbolBody symbol_body) ->\n        id this#enum_symbol_body symbol_body body (fun body -> (loc, SymbolBody body))\n\n    method enum_boolean_body (body : 'loc Ast.Statement.EnumDeclaration.BooleanBody.t) =\n      let open Ast.Statement.EnumDeclaration.BooleanBody in\n      let { members; explicit_type = _; has_unknown_members = _; comments } = body in\n      let members' = map_list this#enum_boolean_member members in\n      let comments' = this#syntax_opt comments in\n      if members == members' && comments == comments' then\n        body\n      else\n        { body with members = members'; comments = comments' }\n\n    method enum_number_body (body : 'loc Ast.Statement.EnumDeclaration.NumberBody.t) =\n      let open Ast.Statement.EnumDeclaration.NumberBody in\n      let { members; explicit_type = _; has_unknown_members = _; comments } = body in\n      let members' = map_list this#enum_number_member members in\n      let comments' = this#syntax_opt comments in\n      if members == members' && comments == comments' then\n        body\n      else\n        { body with members = members'; comments = comments' }\n\n    method enum_string_body (body : 'loc Ast.Statement.EnumDeclaration.StringBody.t) =\n      let open Ast.Statement.EnumDeclaration.StringBody in\n      let { members; explicit_type = _; has_unknown_members = _; comments } = body in\n      let members' =\n        match members with\n        | Defaulted m -> id (map_list this#enum_defaulted_member) m members (fun m -> Defaulted m)\n        | Initialized m -> id (map_list this#enum_string_member) m members (fun m -> Initialized m)\n      in\n      let comments' = this#syntax_opt comments in\n      if members == members' && comments == comments' then\n        body\n      else\n        { body with members = members'; comments = comments' }\n\n    method enum_symbol_body (body : 'loc Ast.Statement.EnumDeclaration.SymbolBody.t) =\n      let open Ast.Statement.EnumDeclaration.SymbolBody in\n      let { members; has_unknown_members = _; comments } = body in\n      let members' = map_list this#enum_defaulted_member members in\n      let comments' = this#syntax_opt comments in\n      if members == members' && comments == comments' then\n        body\n      else\n        { body with members = members'; comments = comments' }\n\n    method enum_defaulted_member (member : 'loc Ast.Statement.EnumDeclaration.DefaultedMember.t) =\n      let open Ast.Statement.EnumDeclaration.DefaultedMember in\n      let (loc, { id = ident }) = member in\n      let id' = this#enum_member_identifier ident in\n      if ident == id' then\n        member\n      else\n        (loc, { id = id' })\n\n    method enum_boolean_member\n        (member :\n          ('loc Ast.BooleanLiteral.t, 'loc) Ast.Statement.EnumDeclaration.InitializedMember.t\n          ) =\n      let open Ast.Statement.EnumDeclaration.InitializedMember in\n      let (loc, { id = ident; init }) = member in\n      let id' = this#enum_member_identifier ident in\n      if ident == id' then\n        member\n      else\n        (loc, { id = id'; init })\n\n    method enum_number_member\n        (member : ('loc Ast.NumberLiteral.t, 'loc) Ast.Statement.EnumDeclaration.InitializedMember.t)\n        =\n      let open Ast.Statement.EnumDeclaration.InitializedMember in\n      let (loc, { id = ident; init }) = member in\n      let id' = this#enum_member_identifier ident in\n      if ident == id' then\n        member\n      else\n        (loc, { id = id'; init })\n\n    method enum_string_member\n        (member : ('loc Ast.StringLiteral.t, 'loc) Ast.Statement.EnumDeclaration.InitializedMember.t)\n        =\n      let open Ast.Statement.EnumDeclaration.InitializedMember in\n      let (loc, { id = ident; init }) = member in\n      let id' = this#enum_member_identifier ident in\n      if ident == id' then\n        member\n      else\n        (loc, { id = id'; init })\n\n    method enum_member_identifier (id : ('loc, 'loc) Ast.Identifier.t) = this#identifier id\n\n    method export_default_declaration\n        _loc (decl : ('loc, 'loc) Ast.Statement.ExportDefaultDeclaration.t) =\n      let open Ast.Statement.ExportDefaultDeclaration in\n      let { default; declaration; comments } = decl in\n      let declaration' = this#export_default_declaration_decl declaration in\n      let comments' = this#syntax_opt comments in\n      if declaration' == declaration && comments' == comments then\n        decl\n      else\n        { default; declaration = declaration'; comments = comments' }\n\n    method export_default_declaration_decl\n        (decl : ('loc, 'loc) Ast.Statement.ExportDefaultDeclaration.declaration) =\n      let open Ast.Statement.ExportDefaultDeclaration in\n      match decl with\n      | Declaration stmt -> id this#statement stmt decl (fun stmt -> Declaration stmt)\n      | Expression expr -> id this#expression expr decl (fun expr -> Expression expr)\n\n    method export_named_declaration _loc (decl : ('loc, 'loc) Ast.Statement.ExportNamedDeclaration.t)\n        =\n      let open Ast.Statement.ExportNamedDeclaration in\n      let { export_kind; source; specifiers; declaration; comments } = decl in\n      let source' = map_loc_opt this#export_source source in\n      let specifiers' = map_opt this#export_named_specifier specifiers in\n      let declaration' = map_opt this#statement declaration in\n      let comments' = this#syntax_opt comments in\n      if\n        source == source'\n        && specifiers == specifiers'\n        && declaration == declaration'\n        && comments == comments'\n      then\n        decl\n      else\n        {\n          export_kind;\n          source = source';\n          specifiers = specifiers';\n          declaration = declaration';\n          comments = comments';\n        }\n\n    method export_named_declaration_specifier\n        (spec : 'loc Ast.Statement.ExportNamedDeclaration.ExportSpecifier.t) =\n      let open Ast.Statement.ExportNamedDeclaration.ExportSpecifier in\n      let (loc, { local; exported }) = spec in\n      let local' = this#identifier local in\n      let exported' = map_opt this#identifier exported in\n      if local == local' && exported == exported' then\n        spec\n      else\n        (loc, { local = local'; exported = exported' })\n\n    method export_batch_specifier\n        (spec : 'loc Ast.Statement.ExportNamedDeclaration.ExportBatchSpecifier.t) =\n      let (loc, id_opt) = spec in\n      let id_opt' = map_opt this#identifier id_opt in\n      if id_opt == id_opt' then\n        spec\n      else\n        (loc, id_opt')\n\n    method export_named_specifier (spec : 'loc Ast.Statement.ExportNamedDeclaration.specifier) =\n      let open Ast.Statement.ExportNamedDeclaration in\n      match spec with\n      | ExportSpecifiers spec_list ->\n        let spec_list' = map_list this#export_named_declaration_specifier spec_list in\n        if spec_list == spec_list' then\n          spec\n        else\n          ExportSpecifiers spec_list'\n      | ExportBatchSpecifier batch ->\n        let batch' = this#export_batch_specifier batch in\n        if batch == batch' then\n          spec\n        else\n          ExportBatchSpecifier batch'\n\n    method export_source _loc (source : 'loc Ast.StringLiteral.t) =\n      let open Ast.StringLiteral in\n      let { value; raw; comments } = source in\n      let comments' = this#syntax_opt comments in\n      if comments == comments' then\n        source\n      else\n        { value; raw; comments = comments' }\n\n    method expression_statement _loc (stmt : ('loc, 'loc) Ast.Statement.Expression.t) =\n      let open Ast.Statement.Expression in\n      let { expression = expr; directive; comments } = stmt in\n      let expr' = this#expression expr in\n      let comments' = this#syntax_opt comments in\n      if expr == expr' && comments == comments' then\n        stmt\n      else\n        { expression = expr'; directive; comments = comments' }\n\n    method expression_or_spread expr_or_spread =\n      let open Ast.Expression in\n      match expr_or_spread with\n      | Expression expr -> id this#expression expr expr_or_spread (fun expr -> Expression expr)\n      | Spread spread -> id this#spread_element spread expr_or_spread (fun spread -> Spread spread)\n\n    method for_in_statement _loc (stmt : ('loc, 'loc) Ast.Statement.ForIn.t) =\n      let open Ast.Statement.ForIn in\n      let { left; right; body; each; comments } = stmt in\n      let left' = this#for_in_statement_lhs left in\n      let right' = this#expression right in\n      let body' = this#statement body in\n      let comments' = this#syntax_opt comments in\n      if left == left' && right == right' && body == body' && comments == comments' then\n        stmt\n      else\n        { left = left'; right = right'; body = body'; each; comments = comments' }\n\n    method for_in_statement_lhs (left : ('loc, 'loc) Ast.Statement.ForIn.left) =\n      let open Ast.Statement.ForIn in\n      match left with\n      | LeftDeclaration decl ->\n        id this#for_in_left_declaration decl left (fun decl -> LeftDeclaration decl)\n      | LeftPattern patt ->\n        id this#for_in_assignment_pattern patt left (fun patt -> LeftPattern patt)\n\n    method for_in_left_declaration left =\n      let (loc, decl) = left in\n      id_loc this#variable_declaration loc decl left (fun decl -> (loc, decl))\n\n    method for_of_statement _loc (stuff : ('loc, 'loc) Ast.Statement.ForOf.t) =\n      let open Ast.Statement.ForOf in\n      let { left; right; body; await; comments } = stuff in\n      let left' = this#for_of_statement_lhs left in\n      let right' = this#expression right in\n      let body' = this#statement body in\n      let comments' = this#syntax_opt comments in\n      if left == left' && right == right' && body == body' && comments == comments' then\n        stuff\n      else\n        { left = left'; right = right'; body = body'; await; comments = comments' }\n\n    method for_of_statement_lhs (left : ('loc, 'loc) Ast.Statement.ForOf.left) =\n      let open Ast.Statement.ForOf in\n      match left with\n      | LeftDeclaration decl ->\n        id this#for_of_left_declaration decl left (fun decl -> LeftDeclaration decl)\n      | LeftPattern patt ->\n        id this#for_of_assignment_pattern patt left (fun patt -> LeftPattern patt)\n\n    method for_of_left_declaration left =\n      let (loc, decl) = left in\n      id_loc this#variable_declaration loc decl left (fun decl -> (loc, decl))\n\n    method for_statement _loc (stmt : ('loc, 'loc) Ast.Statement.For.t) =\n      let open Ast.Statement.For in\n      let { init; test; update; body; comments } = stmt in\n      let init' = map_opt this#for_statement_init init in\n      let test' = map_opt this#predicate_expression test in\n      let update' = map_opt this#expression update in\n      let body' = this#statement body in\n      let comments' = this#syntax_opt comments in\n      if\n        init == init'\n        && test == test'\n        && update == update'\n        && body == body'\n        && comments == comments'\n      then\n        stmt\n      else\n        { init = init'; test = test'; update = update'; body = body'; comments = comments' }\n\n    method for_statement_init (init : ('loc, 'loc) Ast.Statement.For.init) =\n      let open Ast.Statement.For in\n      match init with\n      | InitDeclaration decl ->\n        id this#for_init_declaration decl init (fun decl -> InitDeclaration decl)\n      | InitExpression expr -> id this#expression expr init (fun expr -> InitExpression expr)\n\n    method for_init_declaration init =\n      let (loc, decl) = init in\n      id_loc this#variable_declaration loc decl init (fun decl -> (loc, decl))\n\n    method function_param_type (fpt : ('loc, 'loc) Ast.Type.Function.Param.t) =\n      let open Ast.Type.Function.Param in\n      let (loc, { annot; name; optional }) = fpt in\n      let annot' = this#type_ annot in\n      let name' = map_opt this#identifier name in\n      if annot' == annot && name' == name then\n        fpt\n      else\n        (loc, { annot = annot'; name = name'; optional })\n\n    method function_rest_param_type (frpt : ('loc, 'loc) Ast.Type.Function.RestParam.t) =\n      let open Ast.Type.Function.RestParam in\n      let (loc, { argument; comments }) = frpt in\n      let argument' = this#function_param_type argument in\n      let comments' = this#syntax_opt comments in\n      if argument' == argument && comments' == comments then\n        frpt\n      else\n        (loc, { argument = argument'; comments = comments' })\n\n    method function_this_param_type (this_param : ('loc, 'loc) Ast.Type.Function.ThisParam.t) =\n      let open Ast.Type.Function.ThisParam in\n      let (loc, { annot; comments }) = this_param in\n      let annot' = this#type_annotation annot in\n      let comments' = this#syntax_opt comments in\n      if annot' == annot && comments' == comments then\n        this_param\n      else\n        (loc, { annot = annot'; comments = comments' })\n\n    method function_type _loc (ft : ('loc, 'loc) Ast.Type.Function.t) =\n      let open Ast.Type.Function in\n      let {\n        params = (params_loc, { Params.this_; params = ps; rest = rpo; comments = params_comments });\n        return;\n        tparams;\n        comments = func_comments;\n      } =\n        ft\n      in\n      let tparams' = map_opt this#type_params tparams in\n      let this_' = map_opt this#function_this_param_type this_ in\n      let ps' = map_list this#function_param_type ps in\n      let rpo' = map_opt this#function_rest_param_type rpo in\n      let return' = this#type_ return in\n      let func_comments' = this#syntax_opt func_comments in\n      let params_comments' = this#syntax_opt params_comments in\n      if\n        ps' == ps\n        && rpo' == rpo\n        && return' == return\n        && tparams' == tparams\n        && func_comments' == func_comments\n        && params_comments' == params_comments\n        && this_' == this_\n      then\n        ft\n      else\n        {\n          params =\n            ( params_loc,\n              { Params.this_ = this_'; params = ps'; rest = rpo'; comments = params_comments' }\n            );\n          return = return';\n          tparams = tparams';\n          comments = func_comments';\n        }\n\n    method label_identifier (ident : ('loc, 'loc) Ast.Identifier.t) = this#identifier ident\n\n    method object_property_value_type (opvt : ('loc, 'loc) Ast.Type.Object.Property.value) =\n      let open Ast.Type.Object.Property in\n      match opvt with\n      | Init t -> id this#type_ t opvt (fun t -> Init t)\n      | Get t -> id this#object_type_property_getter t opvt (fun t -> Get t)\n      | Set t -> id this#object_type_property_setter t opvt (fun t -> Set t)\n\n    method object_type_property_getter getter =\n      let (loc, ft) = getter in\n      id_loc this#function_type loc ft getter (fun ft -> (loc, ft))\n\n    method object_type_property_setter setter =\n      let (loc, ft) = setter in\n      id_loc this#function_type loc ft setter (fun ft -> (loc, ft))\n\n    method object_property_type (opt : ('loc, 'loc) Ast.Type.Object.Property.t) =\n      let open Ast.Type.Object.Property in\n      let (loc, { key; value; optional; static; proto; _method; variance; comments }) = opt in\n      let key' = this#object_key key in\n      let value' = this#object_property_value_type value in\n      let variance' = this#variance_opt variance in\n      let comments' = this#syntax_opt comments in\n      if key' == key && value' == value && variance' == variance && comments' == comments then\n        opt\n      else\n        ( loc,\n          {\n            key = key';\n            value = value';\n            optional;\n            static;\n            proto;\n            _method;\n            variance = variance';\n            comments = comments';\n          }\n        )\n\n    method object_spread_property_type (opt : ('loc, 'loc) Ast.Type.Object.SpreadProperty.t) =\n      let open Ast.Type.Object.SpreadProperty in\n      let (loc, { argument; comments }) = opt in\n      let argument' = this#type_ argument in\n      let comments' = this#syntax_opt comments in\n      if argument' == argument && comments == comments' then\n        opt\n      else\n        (loc, { argument = argument'; comments = comments' })\n\n    method object_indexer_property_type (opt : ('loc, 'loc) Ast.Type.Object.Indexer.t) =\n      let open Ast.Type.Object.Indexer in\n      let (loc, { id; key; value; static; variance; comments }) = opt in\n      let key' = this#type_ key in\n      let value' = this#type_ value in\n      let variance' = this#variance_opt variance in\n      let comments' = this#syntax_opt comments in\n      if key' == key && value' == value && variance' == variance && comments' == comments then\n        opt\n      else\n        (loc, { id; key = key'; value = value'; static; variance = variance'; comments = comments' })\n\n    method object_internal_slot_property_type (slot : ('loc, 'loc) Ast.Type.Object.InternalSlot.t) =\n      let open Ast.Type.Object.InternalSlot in\n      let (loc, { id; value; optional; static; _method; comments }) = slot in\n      let id' = this#identifier id in\n      let value' = this#type_ value in\n      let comments' = this#syntax_opt comments in\n      if id == id' && value == value' && comments == comments' then\n        slot\n      else\n        (loc, { id = id'; value = value'; optional; static; _method; comments = comments' })\n\n    method object_call_property_type (call : ('loc, 'loc) Ast.Type.Object.CallProperty.t) =\n      let open Ast.Type.Object.CallProperty in\n      let (loc, { value = (value_loc, value); static; comments }) = call in\n      let value' = this#function_type value_loc value in\n      let comments' = this#syntax_opt comments in\n      if value == value' && comments == comments' then\n        call\n      else\n        (loc, { value = (value_loc, value'); static; comments = comments' })\n\n    method object_type _loc (ot : ('loc, 'loc) Ast.Type.Object.t) =\n      let open Ast.Type.Object in\n      let { properties; exact; inexact; comments } = ot in\n      let properties' =\n        map_list\n          (fun p ->\n            match p with\n            | Property p' -> id this#object_property_type p' p (fun p' -> Property p')\n            | SpreadProperty p' ->\n              id this#object_spread_property_type p' p (fun p' -> SpreadProperty p')\n            | Indexer p' -> id this#object_indexer_property_type p' p (fun p' -> Indexer p')\n            | InternalSlot p' ->\n              id this#object_internal_slot_property_type p' p (fun p' -> InternalSlot p')\n            | CallProperty p' -> id this#object_call_property_type p' p (fun p' -> CallProperty p'))\n          properties\n      in\n      let comments' = this#syntax_opt comments in\n      if properties' == properties && comments == comments' then\n        ot\n      else\n        { properties = properties'; exact; inexact; comments = comments' }\n\n    method interface_type _loc (i : ('loc, 'loc) Ast.Type.Interface.t) =\n      let open Ast.Type.Interface in\n      let { extends; body; comments } = i in\n      let extends' = map_list (map_loc this#generic_type) extends in\n      let body' = map_loc this#object_type body in\n      let comments' = this#syntax_opt comments in\n      if extends' == extends && body' == body && comments == comments' then\n        i\n      else\n        { extends = extends'; body = body'; comments = comments' }\n\n    method generic_identifier_type (git : ('loc, 'loc) Ast.Type.Generic.Identifier.t) =\n      let open Ast.Type.Generic.Identifier in\n      match git with\n      | Unqualified i -> id this#type_identifier_reference i git (fun i -> Unqualified i)\n      | Qualified i -> id this#generic_qualified_identifier_type i git (fun i -> Qualified i)\n\n    method generic_qualified_identifier_type qual =\n      let open Ast.Type.Generic.Identifier in\n      let (loc, { qualification; id }) = qual in\n      let qualification' = this#generic_identifier_type qualification in\n      let id' = this#member_type_identifier id in\n      if qualification' == qualification && id' == id then\n        qual\n      else\n        (loc, { qualification = qualification'; id = id' })\n\n    method member_type_identifier id = this#identifier id\n\n    method variance (variance : 'loc Ast.Variance.t) =\n      let (loc, { Ast.Variance.kind; comments }) = variance in\n      let comments' = this#syntax_opt comments in\n      if comments == comments' then\n        variance\n      else\n        (loc, { Ast.Variance.kind; comments = comments' })\n\n    method variance_opt (opt : 'loc Ast.Variance.t option) = map_opt this#variance opt\n\n    method type_args (targs : ('loc, 'loc) Ast.Type.TypeArgs.t) =\n      let open Ast.Type.TypeArgs in\n      let (loc, { arguments; comments }) = targs in\n      let arguments' = map_list this#type_ arguments in\n      let comments' = this#syntax_opt comments in\n      if arguments == arguments' && comments == comments' then\n        targs\n      else\n        (loc, { arguments = arguments'; comments = comments' })\n\n    method type_params (tparams : ('loc, 'loc) Ast.Type.TypeParams.t) =\n      let open Ast.Type.TypeParams in\n      let (loc, { params = tps; comments }) = tparams in\n      let tps' = map_list this#type_param tps in\n      let comments' = this#syntax_opt comments in\n      if tps' == tps && comments' == comments then\n        tparams\n      else\n        (loc, { params = tps'; comments = comments' })\n\n    method type_param (tparam : ('loc, 'loc) Ast.Type.TypeParam.t) =\n      let open Ast.Type.TypeParam in\n      let (loc, { name; bound; variance; default }) = tparam in\n      let bound' = this#type_annotation_hint bound in\n      let variance' = this#variance_opt variance in\n      let default' = map_opt this#type_ default in\n      let name' = this#binding_type_identifier name in\n      if name' == name && bound' == bound && variance' == variance && default' == default then\n        tparam\n      else\n        (loc, { name = name'; bound = bound'; variance = variance'; default = default' })\n\n    method generic_type _loc (gt : ('loc, 'loc) Ast.Type.Generic.t) =\n      let open Ast.Type.Generic in\n      let { id; targs; comments } = gt in\n      let id' = this#generic_identifier_type id in\n      let targs' = map_opt this#type_args targs in\n      let comments' = this#syntax_opt comments in\n      if id' == id && targs' == targs && comments' == comments then\n        gt\n      else\n        { id = id'; targs = targs'; comments = comments' }\n\n    method indexed_access _loc (ia : ('loc, 'loc) Ast.Type.IndexedAccess.t) =\n      let open Ast.Type.IndexedAccess in\n      let { _object; index; comments } = ia in\n      let _object' = this#type_ _object in\n      let index' = this#type_ index in\n      let comments' = this#syntax_opt comments in\n      if _object' == _object && index' == index && comments' == comments then\n        ia\n      else\n        { _object = _object'; index = index'; comments = comments' }\n\n    method optional_indexed_access loc (ia : ('loc, 'loc) Ast.Type.OptionalIndexedAccess.t) =\n      let open Ast.Type.OptionalIndexedAccess in\n      let { indexed_access; optional } = ia in\n      let indexed_access' = this#indexed_access loc indexed_access in\n      if indexed_access' == indexed_access then\n        ia\n      else\n        { indexed_access = indexed_access'; optional }\n\n    method string_literal_type _loc (lit : 'loc Ast.StringLiteral.t) =\n      let open Ast.StringLiteral in\n      let { value; raw; comments } = lit in\n      let comments' = this#syntax_opt comments in\n      if comments == comments' then\n        lit\n      else\n        { value; raw; comments = comments' }\n\n    method number_literal_type _loc (lit : 'loc Ast.NumberLiteral.t) =\n      let open Ast.NumberLiteral in\n      let { value; raw; comments } = lit in\n      let comments' = this#syntax_opt comments in\n      if comments == comments' then\n        lit\n      else\n        { value; raw; comments = comments' }\n\n    method bigint_literal_type _loc (lit : 'loc Ast.BigIntLiteral.t) =\n      let open Ast.BigIntLiteral in\n      let { value; raw; comments } = lit in\n      let comments' = this#syntax_opt comments in\n      if comments == comments' then\n        lit\n      else\n        { value; raw; comments = comments' }\n\n    method boolean_literal_type _loc (lit : 'loc Ast.BooleanLiteral.t) =\n      let open Ast.BooleanLiteral in\n      let { value; comments } = lit in\n      let comments' = this#syntax_opt comments in\n      if comments == comments' then\n        lit\n      else\n        { value; comments = comments' }\n\n    method nullable_type (t : ('loc, 'loc) Ast.Type.Nullable.t) =\n      let open Ast.Type.Nullable in\n      let { argument; comments } = t in\n      let argument' = this#type_ argument in\n      let comments' = this#syntax_opt comments in\n      if argument == argument' && comments == comments' then\n        t\n      else\n        { argument = argument'; comments = comments' }\n\n    method typeof_type (t : ('loc, 'loc) Ast.Type.Typeof.t) =\n      let open Ast.Type.Typeof in\n      let { argument; comments } = t in\n      let argument' = this#typeof_expression argument in\n      let comments' = this#syntax_opt comments in\n      if argument == argument' && comments == comments' then\n        t\n      else\n        { argument = argument'; comments = comments' }\n\n    method typeof_expression (git : ('loc, 'loc) Ast.Type.Typeof.Target.t) =\n      let open Ast.Type.Typeof.Target in\n      match git with\n      | Unqualified i -> id this#typeof_identifier i git (fun i -> Unqualified i)\n      | Qualified i -> id this#typeof_qualified_identifier i git (fun i -> Qualified i)\n\n    method typeof_identifier id = this#identifier id\n\n    method typeof_member_identifier id = this#identifier id\n\n    method typeof_qualified_identifier qual =\n      let open Ast.Type.Typeof.Target in\n      let (loc, { qualification; id }) = qual in\n      let qualification' = this#typeof_expression qualification in\n      let id' = this#typeof_member_identifier id in\n      if qualification' == qualification && id' == id then\n        qual\n      else\n        (loc, { qualification = qualification'; id = id' })\n\n    method tuple_type (t : ('loc, 'loc) Ast.Type.Tuple.t) =\n      let open Ast.Type.Tuple in\n      let { types; comments } = t in\n      let types' = map_list this#type_ types in\n      let comments' = this#syntax_opt comments in\n      if types == types' && comments == comments' then\n        t\n      else\n        { types = types'; comments = comments' }\n\n    method array_type (t : ('loc, 'loc) Ast.Type.Array.t) =\n      let open Ast.Type.Array in\n      let { argument; comments } = t in\n      let argument' = this#type_ argument in\n      let comments' = this#syntax_opt comments in\n      if argument == argument' && comments == comments' then\n        t\n      else\n        { argument = argument'; comments = comments' }\n\n    method union_type _loc (t : ('loc, 'loc) Ast.Type.Union.t) =\n      let open Ast.Type.Union in\n      let { types = (t0, t1, ts); comments } = t in\n      let t0' = this#type_ t0 in\n      let t1' = this#type_ t1 in\n      let ts' = map_list this#type_ ts in\n      let comments' = this#syntax_opt comments in\n      if t0' == t0 && t1' == t1 && ts' == ts && comments' == comments then\n        t\n      else\n        { types = (t0', t1', ts'); comments = comments' }\n\n    method intersection_type _loc (t : ('loc, 'loc) Ast.Type.Intersection.t) =\n      let open Ast.Type.Intersection in\n      let { types = (t0, t1, ts); comments } = t in\n      let t0' = this#type_ t0 in\n      let t1' = this#type_ t1 in\n      let ts' = map_list this#type_ ts in\n      let comments' = this#syntax_opt comments in\n      if t0' == t0 && t1' == t1 && ts' == ts && comments' == comments then\n        t\n      else\n        { types = (t0', t1', ts'); comments = comments' }\n\n    method type_ (t : ('loc, 'loc) Ast.Type.t) =\n      let open Ast.Type in\n      match t with\n      | (loc, Any comments) -> id this#syntax_opt comments t (fun comments -> (loc, Any comments))\n      | (loc, Mixed comments) ->\n        id this#syntax_opt comments t (fun comments -> (loc, Mixed comments))\n      | (loc, Empty comments) ->\n        id this#syntax_opt comments t (fun comments -> (loc, Empty comments))\n      | (loc, Void comments) -> id this#syntax_opt comments t (fun comments -> (loc, Void comments))\n      | (loc, Null comments) -> id this#syntax_opt comments t (fun comments -> (loc, Null comments))\n      | (loc, Symbol comments) ->\n        id this#syntax_opt comments t (fun comments -> (loc, Symbol comments))\n      | (loc, Number comments) ->\n        id this#syntax_opt comments t (fun comments -> (loc, Number comments))\n      | (loc, BigInt comments) ->\n        id this#syntax_opt comments t (fun comments -> (loc, BigInt comments))\n      | (loc, String comments) ->\n        id this#syntax_opt comments t (fun comments -> (loc, String comments))\n      | (loc, Boolean comments) ->\n        id this#syntax_opt comments t (fun comments -> (loc, Boolean comments))\n      | (loc, Exists comments) ->\n        id this#syntax_opt comments t (fun comments -> (loc, Exists comments))\n      | (loc, Nullable t') -> id this#nullable_type t' t (fun t' -> (loc, Nullable t'))\n      | (loc, Array t') -> id this#array_type t' t (fun t' -> (loc, Array t'))\n      | (loc, Typeof t') -> id this#typeof_type t' t (fun t' -> (loc, Typeof t'))\n      | (loc, Function ft) -> id_loc this#function_type loc ft t (fun ft -> (loc, Function ft))\n      | (loc, Object ot) -> id_loc this#object_type loc ot t (fun ot -> (loc, Object ot))\n      | (loc, Interface i) -> id_loc this#interface_type loc i t (fun i -> (loc, Interface i))\n      | (loc, Generic gt) -> id_loc this#generic_type loc gt t (fun gt -> (loc, Generic gt))\n      | (loc, IndexedAccess ia) ->\n        id_loc this#indexed_access loc ia t (fun ia -> (loc, IndexedAccess ia))\n      | (loc, OptionalIndexedAccess ia) ->\n        id_loc this#optional_indexed_access loc ia t (fun ia -> (loc, OptionalIndexedAccess ia))\n      | (loc, StringLiteral lit) ->\n        id_loc this#string_literal_type loc lit t (fun lit -> (loc, StringLiteral lit))\n      | (loc, NumberLiteral lit) ->\n        id_loc this#number_literal_type loc lit t (fun lit -> (loc, NumberLiteral lit))\n      | (loc, BigIntLiteral lit) ->\n        id_loc this#bigint_literal_type loc lit t (fun lit -> (loc, BigIntLiteral lit))\n      | (loc, BooleanLiteral lit) ->\n        id_loc this#boolean_literal_type loc lit t (fun lit -> (loc, BooleanLiteral lit))\n      | (loc, Union t') -> id_loc this#union_type loc t' t (fun t' -> (loc, Union t'))\n      | (loc, Intersection t') ->\n        id_loc this#intersection_type loc t' t (fun t' -> (loc, Intersection t'))\n      | (loc, Tuple t') -> id this#tuple_type t' t (fun t' -> (loc, Tuple t'))\n\n    method type_annotation (annot : ('loc, 'loc) Ast.Type.annotation) =\n      let (loc, a) = annot in\n      id this#type_ a annot (fun a -> (loc, a))\n\n    method type_annotation_hint (return : ('M, 'T) Ast.Type.annotation_or_hint) =\n      let open Ast.Type in\n      match return with\n      | Available annot ->\n        let annot' = this#type_annotation annot in\n        if annot' == annot then\n          return\n        else\n          Available annot'\n      | Missing _loc -> return\n\n    method function_declaration loc (stmt : ('loc, 'loc) Ast.Function.t) = this#function_ loc stmt\n\n    method function_expression loc (stmt : ('loc, 'loc) Ast.Function.t) =\n      this#function_expression_or_method loc stmt\n\n    (** previously, we conflated [function_expression] and [class_method]. callers should be\n        updated to override those individually. *)\n    method function_expression_or_method loc (stmt : ('loc, 'loc) Ast.Function.t) =\n      this#function_ loc stmt\n    \n    (* Internal helper for function declarations, function expressions and arrow functions *)\n    method function_ _loc (expr : ('loc, 'loc) Ast.Function.t) =\n      let open Ast.Function in\n      let {\n        id = ident;\n        params;\n        body;\n        async;\n        generator;\n        predicate;\n        return;\n        tparams;\n        sig_loc;\n        comments;\n      } =\n        expr\n      in\n      let ident' = map_opt this#function_identifier ident in\n      let tparams' = map_opt this#type_params tparams in\n      let params' = this#function_params params in\n      let return' = this#type_annotation_hint return in\n      let body' = this#function_body_any body in\n      let predicate' = map_opt this#predicate predicate in\n      let comments' = this#syntax_opt comments in\n      if\n        ident == ident'\n        && params == params'\n        && body == body'\n        && predicate == predicate'\n        && return == return'\n        && tparams == tparams'\n        && comments == comments'\n      then\n        expr\n      else\n        {\n          id = ident';\n          params = params';\n          return = return';\n          body = body';\n          async;\n          generator;\n          predicate = predicate';\n          tparams = tparams';\n          sig_loc;\n          comments = comments';\n        }\n\n    method function_params (params : ('loc, 'loc) Ast.Function.Params.t) =\n      let open Ast.Function in\n      let (loc, { Params.params = params_list; rest; comments; this_ }) = params in\n      let params_list' = map_list this#function_param params_list in\n      let rest' = map_opt this#function_rest_param rest in\n      let this_' = map_opt this#function_this_param this_ in\n      let comments' = this#syntax_opt comments in\n      if params_list == params_list' && rest == rest' && comments == comments' && this_ == this_'\n      then\n        params\n      else\n        (loc, { Params.params = params_list'; rest = rest'; comments = comments'; this_ = this_' })\n\n    method function_this_param (this_param : ('loc, 'loc) Ast.Function.ThisParam.t) =\n      let open Ast.Function.ThisParam in\n      let (loc, { annot; comments }) = this_param in\n      let annot' = this#type_annotation annot in\n      let comments' = this#syntax_opt comments in\n      if annot' == annot && comments' == comments then\n        this_param\n      else\n        (loc, { annot = annot'; comments = comments' })\n\n    method function_param (param : ('loc, 'loc) Ast.Function.Param.t) =\n      let open Ast.Function.Param in\n      let (loc, { argument; default }) = param in\n      let argument' = this#function_param_pattern argument in\n      let default' = map_opt this#expression default in\n      if argument == argument' && default == default' then\n        param\n      else\n        (loc, { argument = argument'; default = default' })\n\n    method function_body_any (body : ('loc, 'loc) Ast.Function.body) =\n      match body with\n      | Ast.Function.BodyBlock block ->\n        id this#function_body block body (fun block -> Ast.Function.BodyBlock block)\n      | Ast.Function.BodyExpression expr ->\n        id this#expression expr body (fun expr -> Ast.Function.BodyExpression expr)\n\n    method function_body (body : 'loc * ('loc, 'loc) Ast.Statement.Block.t) =\n      let (loc, block) = body in\n      id_loc this#block loc block body (fun block -> (loc, block))\n\n    method function_identifier (ident : ('loc, 'loc) Ast.Identifier.t) =\n      this#pattern_identifier ~kind:Ast.Statement.VariableDeclaration.Var ident\n\n    (* TODO *)\n    method generator _loc (expr : ('loc, 'loc) Ast.Expression.Generator.t) = expr\n\n    method identifier (id : ('loc, 'loc) Ast.Identifier.t) =\n      let open Ast.Identifier in\n      let (loc, { name; comments }) = id in\n      let comments' = this#syntax_opt comments in\n      if comments == comments' then\n        id\n      else\n        (loc, { name; comments = comments' })\n\n    method type_identifier (id : ('loc, 'loc) Ast.Identifier.t) = this#identifier id\n\n    method type_identifier_reference (id : ('loc, 'loc) Ast.Identifier.t) = this#type_identifier id\n\n    method binding_type_identifier (id : ('loc, 'loc) Ast.Identifier.t) = this#type_identifier id\n\n    method interface _loc (interface : ('loc, 'loc) Ast.Statement.Interface.t) =\n      let open Ast.Statement.Interface in\n      let { id = ident; tparams; extends; body; comments } = interface in\n      let id' = this#binding_type_identifier ident in\n      let tparams' = map_opt this#type_params tparams in\n      let extends' = map_list (map_loc this#generic_type) extends in\n      let body' = map_loc this#object_type body in\n      let comments' = this#syntax_opt comments in\n      if\n        id' == ident\n        && tparams' == tparams\n        && extends' == extends\n        && body' == body\n        && comments' == comments\n      then\n        interface\n      else\n        { id = id'; tparams = tparams'; extends = extends'; body = body'; comments = comments' }\n\n    method interface_declaration loc (decl : ('loc, 'loc) Ast.Statement.Interface.t) =\n      this#interface loc decl\n\n    method private_name (id : 'loc Ast.PrivateName.t) =\n      let open Ast.PrivateName in\n      let (loc, { name; comments }) = id in\n      let comments' = this#syntax_opt comments in\n      if comments == comments' then\n        id\n      else\n        (loc, { name; comments = comments' })\n\n    method computed_key (key : ('loc, 'loc) Ast.ComputedKey.t) =\n      let open Ast.ComputedKey in\n      let (loc, { expression; comments }) = key in\n      let expression' = this#expression expression in\n      let comments' = this#syntax_opt comments in\n      if expression == expression' && comments == comments' then\n        key\n      else\n        (loc, { expression = expression'; comments = comments' })\n\n    method import _loc (expr : ('loc, 'loc) Ast.Expression.Import.t) =\n      let open Ast.Expression.Import in\n      let { argument; comments } = expr in\n      let argument' = this#expression argument in\n      let comments' = this#syntax_opt comments in\n      if argument == argument' && comments == comments' then\n        expr\n      else\n        { argument = argument'; comments = comments' }\n\n    method if_consequent_statement ~has_else (stmt : ('loc, 'loc) Ast.Statement.t) =\n      ignore has_else;\n      this#statement stmt\n\n    method if_alternate_statement _loc (altern : ('loc, 'loc) Ast.Statement.If.Alternate.t') =\n      let open Ast.Statement.If.Alternate in\n      let { body; comments } = altern in\n      let body' = this#statement body in\n      let comments' = this#syntax_opt comments in\n      if body == body' && comments == comments' then\n        altern\n      else\n        { body = body'; comments = comments' }\n\n    method if_statement _loc (stmt : ('loc, 'loc) Ast.Statement.If.t) =\n      let open Ast.Statement.If in\n      let { test; consequent; alternate; comments } = stmt in\n      let test' = this#predicate_expression test in\n      let consequent' = this#if_consequent_statement ~has_else:(alternate <> None) consequent in\n      let alternate' = map_opt (map_loc this#if_alternate_statement) alternate in\n      let comments' = this#syntax_opt comments in\n      if\n        test == test'\n        && consequent == consequent'\n        && alternate == alternate'\n        && comments == comments'\n      then\n        stmt\n      else\n        { test = test'; consequent = consequent'; alternate = alternate'; comments = comments' }\n\n    method import_declaration _loc (decl : ('loc, 'loc) Ast.Statement.ImportDeclaration.t) =\n      let open Ast.Statement.ImportDeclaration in\n      let { import_kind; source; specifiers; default; comments } = decl in\n      let source' = map_loc this#import_source source in\n      let specifiers' = map_opt (this#import_specifier ~import_kind) specifiers in\n      let default' = map_opt (this#import_default_specifier ~import_kind) default in\n      let comments' = this#syntax_opt comments in\n      if\n        source == source'\n        && specifiers == specifiers'\n        && default == default'\n        && comments == comments'\n      then\n        decl\n      else\n        {\n          import_kind;\n          source = source';\n          specifiers = specifiers';\n          default = default';\n          comments = comments';\n        }\n\n    method import_source _loc (source : 'loc Ast.StringLiteral.t) =\n      let open Ast.StringLiteral in\n      let { value; raw; comments } = source in\n      let comments' = this#syntax_opt comments in\n      if comments == comments' then\n        source\n      else\n        { value; raw; comments = comments' }\n\n    method import_specifier\n        ~import_kind (specifier : ('loc, 'loc) Ast.Statement.ImportDeclaration.specifier) =\n      let open Ast.Statement.ImportDeclaration in\n      match specifier with\n      | ImportNamedSpecifiers named_specifiers ->\n        let named_specifiers' =\n          map_list (this#import_named_specifier ~import_kind) named_specifiers\n        in\n        if named_specifiers == named_specifiers' then\n          specifier\n        else\n          ImportNamedSpecifiers named_specifiers'\n      | ImportNamespaceSpecifier (loc, ident) ->\n        id_loc (this#import_namespace_specifier ~import_kind) loc ident specifier (fun ident ->\n            ImportNamespaceSpecifier (loc, ident)\n        )\n\n    method remote_identifier id = this#identifier id\n\n    method import_named_specifier\n        ~(import_kind : Ast.Statement.ImportDeclaration.import_kind)\n        (specifier : ('loc, 'loc) Ast.Statement.ImportDeclaration.named_specifier) =\n      let open Ast.Statement.ImportDeclaration in\n      let { kind; local; remote } = specifier in\n      let (is_type_remote, is_type_local) =\n        match (import_kind, kind) with\n        | (ImportType, _)\n        | (_, Some ImportType) ->\n          (true, true)\n        | (ImportTypeof, _)\n        | (_, Some ImportTypeof) ->\n          (false, true)\n        | _ -> (false, false)\n      in\n      let remote' =\n        match local with\n        | None ->\n          if is_type_remote then\n            this#binding_type_identifier remote\n          else\n            this#pattern_identifier ~kind:Ast.Statement.VariableDeclaration.Let remote\n        | Some _ -> this#remote_identifier remote\n      in\n      let local' =\n        match local with\n        | None -> None\n        | Some ident ->\n          let local_visitor =\n            if is_type_local then\n              this#binding_type_identifier\n            else\n              this#pattern_identifier ~kind:Ast.Statement.VariableDeclaration.Let\n          in\n          id local_visitor ident local (fun ident -> Some ident)\n      in\n      if local == local' && remote == remote' then\n        specifier\n      else\n        { kind; local = local'; remote = remote' }\n\n    method import_default_specifier ~import_kind (id : ('loc, 'loc) Ast.Identifier.t) =\n      let open Ast.Statement.ImportDeclaration in\n      let local_visitor =\n        match import_kind with\n        | ImportType\n        | ImportTypeof ->\n          this#binding_type_identifier\n        | _ -> this#pattern_identifier ~kind:Ast.Statement.VariableDeclaration.Let\n      in\n      local_visitor id\n\n    method import_namespace_specifier ~import_kind _loc (id : ('loc, 'loc) Ast.Identifier.t) =\n      let open Ast.Statement.ImportDeclaration in\n      let local_visitor =\n        match import_kind with\n        | ImportType\n        | ImportTypeof ->\n          this#binding_type_identifier\n        | _ -> this#pattern_identifier ~kind:Ast.Statement.VariableDeclaration.Let\n      in\n      local_visitor id\n\n    method jsx_element _loc (expr : ('loc, 'loc) Ast.JSX.element) =\n      let open Ast.JSX in\n      let { opening_element; closing_element; children; comments } = expr in\n      let opening_element' = this#jsx_opening_element opening_element in\n      let closing_element' = map_opt this#jsx_closing_element closing_element in\n      let children' = this#jsx_children children in\n      let comments' = this#syntax_opt comments in\n      if\n        opening_element == opening_element'\n        && closing_element == closing_element'\n        && children == children'\n        && comments == comments'\n      then\n        expr\n      else\n        {\n          opening_element = opening_element';\n          closing_element = closing_element';\n          children = children';\n          comments = comments';\n        }\n\n    method jsx_fragment _loc (expr : ('loc, 'loc) Ast.JSX.fragment) =\n      let open Ast.JSX in\n      let { frag_children; frag_comments; _ } = expr in\n      let children' = this#jsx_children frag_children in\n      let frag_comments' = this#syntax_opt frag_comments in\n      if frag_children == children' && frag_comments == frag_comments' then\n        expr\n      else\n        { expr with frag_children = children'; frag_comments = frag_comments' }\n\n    method jsx_opening_element (elem : ('loc, 'loc) Ast.JSX.Opening.t) =\n      let open Ast.JSX.Opening in\n      let (loc, { name; self_closing; attributes }) = elem in\n      let name' = this#jsx_element_name name in\n      let attributes' = map_list this#jsx_opening_attribute attributes in\n      if name == name' && attributes == attributes' then\n        elem\n      else\n        (loc, { name = name'; self_closing; attributes = attributes' })\n\n    method jsx_closing_element (elem : ('loc, 'loc) Ast.JSX.Closing.t) =\n      let open Ast.JSX.Closing in\n      let (loc, { name }) = elem in\n      let name' = this#jsx_element_name name in\n      if name == name' then\n        elem\n      else\n        (loc, { name = name' })\n\n    method jsx_opening_attribute (jsx_attr : ('loc, 'loc) Ast.JSX.Opening.attribute) =\n      let open Ast.JSX.Opening in\n      match jsx_attr with\n      | Attribute attr -> id this#jsx_attribute attr jsx_attr (fun attr -> Attribute attr)\n      | SpreadAttribute (loc, attr) ->\n        id_loc this#jsx_spread_attribute loc attr jsx_attr (fun attr -> SpreadAttribute (loc, attr))\n\n    method jsx_spread_attribute _loc (attr : ('loc, 'loc) Ast.JSX.SpreadAttribute.t') =\n      let open Ast.JSX.SpreadAttribute in\n      let { argument; comments } = attr in\n      let argument' = this#expression argument in\n      let comments' = this#syntax_opt comments in\n      if argument == argument' && comments == comments' then\n        attr\n      else\n        { argument = argument'; comments = comments' }\n\n    method jsx_attribute (attr : ('loc, 'loc) Ast.JSX.Attribute.t) =\n      let open Ast.JSX.Attribute in\n      let (loc, { name; value }) = attr in\n      let name' = this#jsx_attribute_name name in\n      let value' = map_opt this#jsx_attribute_value value in\n      if name == name' && value == value' then\n        attr\n      else\n        (loc, { name = name'; value = value' })\n\n    method jsx_attribute_name (name : ('loc, 'loc) Ast.JSX.Attribute.name) =\n      let open Ast.JSX.Attribute in\n      match name with\n      | Identifier ident ->\n        id this#jsx_attribute_name_identifier ident name (fun ident -> Identifier ident)\n      | NamespacedName ns ->\n        id this#jsx_attribute_name_namespaced ns name (fun ns -> NamespacedName ns)\n\n    method jsx_attribute_name_identifier ident = this#jsx_identifier ident\n\n    method jsx_attribute_name_namespaced ns = this#jsx_namespaced_name ns\n\n    method jsx_attribute_value (value : ('loc, 'loc) Ast.JSX.Attribute.value) =\n      let open Ast.JSX.Attribute in\n      match value with\n      | Literal (loc, lit) ->\n        id_loc this#jsx_attribute_value_literal loc lit value (fun lit -> Literal (loc, lit))\n      | ExpressionContainer (loc, expr) ->\n        id_loc this#jsx_attribute_value_expression loc expr value (fun expr ->\n            ExpressionContainer (loc, expr)\n        )\n\n    method jsx_attribute_value_expression loc (jsx_expr : ('loc, 'loc) Ast.JSX.ExpressionContainer.t)\n        =\n      this#jsx_expression loc jsx_expr\n\n    method jsx_attribute_value_literal loc (lit : 'loc Ast.Literal.t) = this#literal loc lit\n\n    method jsx_children ((loc, children) as orig : 'loc * ('loc, 'loc) Ast.JSX.child list) =\n      let children' = map_list this#jsx_child children in\n      if children == children' then\n        orig\n      else\n        (loc, children')\n\n    method jsx_child (child : ('loc, 'loc) Ast.JSX.child) =\n      let open Ast.JSX in\n      match child with\n      | (loc, Element elem) ->\n        id_loc this#jsx_element loc elem child (fun elem -> (loc, Element elem))\n      | (loc, Fragment frag) ->\n        id_loc this#jsx_fragment loc frag child (fun frag -> (loc, Fragment frag))\n      | (loc, ExpressionContainer expr) ->\n        id_loc this#jsx_expression loc expr child (fun expr -> (loc, ExpressionContainer expr))\n      | (loc, SpreadChild spread) ->\n        id this#jsx_spread_child spread child (fun spread -> (loc, SpreadChild spread))\n      | (_loc, Text _) -> child\n\n    method jsx_expression _loc (jsx_expr : ('loc, 'loc) Ast.JSX.ExpressionContainer.t) =\n      let open Ast.JSX.ExpressionContainer in\n      let { expression; comments } = jsx_expr in\n      let comments' = this#syntax_opt comments in\n      match expression with\n      | Expression expr ->\n        let expr' = this#expression expr in\n        if expr == expr' && comments == comments' then\n          jsx_expr\n        else\n          { expression = Expression expr'; comments = comments' }\n      | EmptyExpression ->\n        if comments == comments' then\n          jsx_expr\n        else\n          { expression = EmptyExpression; comments = comments' }\n\n    method jsx_spread_child (jsx_spread_child : ('loc, 'loc) Ast.JSX.SpreadChild.t) =\n      let open Ast.JSX.SpreadChild in\n      let { expression; comments } = jsx_spread_child in\n      let expression' = this#expression expression in\n      let comments' = this#syntax_opt comments in\n      if expression == expression' && comments == comments' then\n        jsx_spread_child\n      else\n        { expression = expression'; comments = comments' }\n\n    method jsx_element_name (name : ('loc, 'loc) Ast.JSX.name) =\n      let open Ast.JSX in\n      match name with\n      | Identifier ident ->\n        id this#jsx_element_name_identifier ident name (fun ident -> Identifier ident)\n      | NamespacedName ns ->\n        id this#jsx_element_name_namespaced ns name (fun ns -> NamespacedName ns)\n      | MemberExpression expr ->\n        id this#jsx_element_name_member_expression expr name (fun expr -> MemberExpression expr)\n\n    method jsx_element_name_identifier ident = this#jsx_identifier ident\n\n    method jsx_element_name_namespaced ns = this#jsx_namespaced_name ns\n\n    method jsx_element_name_member_expression expr = this#jsx_member_expression expr\n\n    method jsx_namespaced_name (namespaced_name : ('loc, 'loc) Ast.JSX.NamespacedName.t) =\n      let open Ast.JSX in\n      NamespacedName.(\n        let (loc, { namespace; name }) = namespaced_name in\n        let namespace' = this#jsx_identifier namespace in\n        let name' = this#jsx_identifier name in\n        if namespace == namespace' && name == name' then\n          namespaced_name\n        else\n          (loc, { namespace = namespace'; name = name' })\n      )\n\n    method jsx_member_expression (member_exp : ('loc, 'loc) Ast.JSX.MemberExpression.t) =\n      let open Ast.JSX in\n      let (loc, { MemberExpression._object; MemberExpression.property }) = member_exp in\n      let _object' = this#jsx_member_expression_object _object in\n      let property' = this#jsx_identifier property in\n      if _object == _object' && property == property' then\n        member_exp\n      else\n        (loc, MemberExpression.{ _object = _object'; property = property' })\n\n    method jsx_member_expression_object (_object : ('loc, 'loc) Ast.JSX.MemberExpression._object) =\n      let open Ast.JSX.MemberExpression in\n      match _object with\n      | Identifier ident ->\n        id this#jsx_member_expression_identifier ident _object (fun ident -> Identifier ident)\n      | MemberExpression nested_exp ->\n        id this#jsx_member_expression nested_exp _object (fun exp -> MemberExpression exp)\n\n    method jsx_member_expression_identifier ident = this#jsx_element_name_identifier ident\n\n    method jsx_identifier (id : ('loc, 'loc) Ast.JSX.Identifier.t) =\n      let open Ast.JSX.Identifier in\n      let (loc, { name; comments }) = id in\n      let comments' = this#syntax_opt comments in\n      if comments == comments' then\n        id\n      else\n        (loc, { name; comments = comments' })\n\n    method labeled_statement _loc (stmt : ('loc, 'loc) Ast.Statement.Labeled.t) =\n      let open Ast.Statement.Labeled in\n      let { label; body; comments } = stmt in\n      let label' = this#label_identifier label in\n      let body' = this#statement body in\n      let comments' = this#syntax_opt comments in\n      if label == label' && body == body' && comments == comments' then\n        stmt\n      else\n        { label = label'; body = body'; comments = comments' }\n\n    method literal _loc (expr : 'loc Ast.Literal.t) =\n      let open Ast.Literal in\n      let { value; raw; comments } = expr in\n      let comments' = this#syntax_opt comments in\n      if comments == comments' then\n        expr\n      else\n        { value; raw; comments = comments' }\n\n    method logical _loc (expr : ('loc, 'loc) Ast.Expression.Logical.t) =\n      let open Ast.Expression.Logical in\n      let { operator = _; left; right; comments } = expr in\n      let left' = this#expression left in\n      let right' = this#expression right in\n      let comments' = this#syntax_opt comments in\n      if left == left' && right == right' && comments == comments' then\n        expr\n      else\n        { expr with left = left'; right = right'; comments = comments' }\n\n    method member _loc (expr : ('loc, 'loc) Ast.Expression.Member.t) =\n      let open Ast.Expression.Member in\n      let { _object; property; comments } = expr in\n      let _object' = this#expression _object in\n      let property' = this#member_property property in\n      let comments' = this#syntax_opt comments in\n      if _object == _object' && property == property' && comments == comments' then\n        expr\n      else\n        { _object = _object'; property = property'; comments = comments' }\n\n    method optional_member loc (expr : ('loc, 'loc) Ast.Expression.OptionalMember.t) =\n      let open Ast.Expression.OptionalMember in\n      let { member; optional = _; filtered_out = _ } = expr in\n      let member' = this#member loc member in\n      if member == member' then\n        expr\n      else\n        { expr with member = member' }\n\n    method member_property (expr : ('loc, 'loc) Ast.Expression.Member.property) =\n      let open Ast.Expression.Member in\n      match expr with\n      | PropertyIdentifier ident ->\n        id this#member_property_identifier ident expr (fun ident -> PropertyIdentifier ident)\n      | PropertyPrivateName ident ->\n        id this#member_private_name ident expr (fun ident -> PropertyPrivateName ident)\n      | PropertyExpression e ->\n        id this#member_property_expression e expr (fun e -> PropertyExpression e)\n\n    method member_property_identifier (ident : ('loc, 'loc) Ast.Identifier.t) =\n      this#identifier ident\n\n    method member_private_name (name : 'loc Ast.PrivateName.t) = this#private_name name\n\n    method member_property_expression (expr : ('loc, 'loc) Ast.Expression.t) = this#expression expr\n\n    method meta_property _loc (expr : 'loc Ast.Expression.MetaProperty.t) =\n      let open Ast.Expression.MetaProperty in\n      let { meta; property; comments } = expr in\n      let meta' = this#identifier meta in\n      let property' = this#identifier property in\n      let comments' = this#syntax_opt comments in\n      if meta == meta' && property == property' && comments == comments' then\n        expr\n      else\n        { meta = meta'; property = property'; comments = comments' }\n\n    method new_ _loc (expr : ('loc, 'loc) Ast.Expression.New.t) =\n      let open Ast.Expression.New in\n      let { callee; targs; arguments; comments } = expr in\n      let callee' = this#expression callee in\n      let targs' = map_opt this#call_type_args targs in\n      let arguments' = map_opt this#call_arguments arguments in\n      let comments' = this#syntax_opt comments in\n      if callee == callee' && targs == targs' && arguments == arguments' && comments == comments'\n      then\n        expr\n      else\n        { callee = callee'; targs = targs'; arguments = arguments'; comments = comments' }\n\n    method object_ _loc (expr : ('loc, 'loc) Ast.Expression.Object.t) =\n      let open Ast.Expression.Object in\n      let { properties; comments } = expr in\n      let properties' =\n        map_list\n          (fun prop ->\n            match prop with\n            | Property p ->\n              let p' = this#object_property p in\n              if p == p' then\n                prop\n              else\n                Property p'\n            | SpreadProperty s ->\n              let s' = this#spread_property s in\n              if s == s' then\n                prop\n              else\n                SpreadProperty s')\n          properties\n      in\n      let comments' = this#syntax_opt comments in\n      if properties == properties' && comments == comments' then\n        expr\n      else\n        { properties = properties'; comments = comments' }\n\n    method object_property (prop : ('loc, 'loc) Ast.Expression.Object.Property.t) =\n      let open Ast.Expression.Object.Property in\n      match prop with\n      | (loc, Init { key; value; shorthand }) ->\n        let key' = this#object_key key in\n        let value' = this#expression value in\n        let shorthand' =\n          (* Try to figure out if shorthand should still be true--if\n             key and value change differently, it should become false *)\n          shorthand\n          &&\n          match (key', value') with\n          | ( Identifier (_, { Ast.Identifier.name = key_name; _ }),\n              (_, Ast.Expression.Identifier (_, { Ast.Identifier.name = value_name; _ }))\n            ) ->\n            String.equal key_name value_name\n          | _ -> key == key' && value == value'\n        in\n        if key == key' && value == value' && shorthand == shorthand' then\n          prop\n        else\n          (loc, Init { key = key'; value = value'; shorthand = shorthand' })\n      | (loc, Method { key; value = fn }) ->\n        let key' = this#object_key key in\n        let fn' = map_loc this#function_expression_or_method fn in\n        if key == key' && fn == fn' then\n          prop\n        else\n          (loc, Method { key = key'; value = fn' })\n      | (loc, Get { key; value = fn; comments }) ->\n        let key' = this#object_key key in\n        let fn' = map_loc this#function_expression_or_method fn in\n        let comments' = this#syntax_opt comments in\n        if key == key' && fn == fn' && comments == comments' then\n          prop\n        else\n          (loc, Get { key = key'; value = fn'; comments = comments' })\n      | (loc, Set { key; value = fn; comments }) ->\n        let key' = this#object_key key in\n        let fn' = map_loc this#function_expression_or_method fn in\n        let comments' = this#syntax_opt comments in\n        if key == key' && fn == fn' && comments == comments' then\n          prop\n        else\n          (loc, Set { key = key'; value = fn'; comments = comments' })\n\n    method object_key (key : ('loc, 'loc) Ast.Expression.Object.Property.key) =\n      let open Ast.Expression.Object.Property in\n      match key with\n      | Literal literal -> id this#object_key_literal literal key (fun lit -> Literal lit)\n      | Identifier ident -> id this#object_key_identifier ident key (fun ident -> Identifier ident)\n      | PrivateName ident -> id this#private_name ident key (fun ident -> PrivateName ident)\n      | Computed computed -> id this#object_key_computed computed key (fun expr -> Computed expr)\n\n    method object_key_literal (literal : 'loc * 'loc Ast.Literal.t) =\n      let (loc, lit) = literal in\n      id_loc this#literal loc lit literal (fun lit -> (loc, lit))\n\n    method object_key_identifier (ident : ('loc, 'loc) Ast.Identifier.t) = this#identifier ident\n\n    method object_key_computed (key : ('loc, 'loc) Ast.ComputedKey.t) = this#computed_key key\n\n    method opaque_type _loc (otype : ('loc, 'loc) Ast.Statement.OpaqueType.t) =\n      let open Ast.Statement.OpaqueType in\n      let { id; tparams; impltype; supertype; comments } = otype in\n      let id' = this#binding_type_identifier id in\n      let tparams' = map_opt this#type_params tparams in\n      let impltype' = map_opt this#type_ impltype in\n      let supertype' = map_opt this#type_ supertype in\n      let comments' = this#syntax_opt comments in\n      if\n        id == id'\n        && impltype == impltype'\n        && tparams == tparams'\n        && impltype == impltype'\n        && supertype == supertype'\n        && comments == comments'\n      then\n        otype\n      else\n        {\n          id = id';\n          tparams = tparams';\n          impltype = impltype';\n          supertype = supertype';\n          comments = comments';\n        }\n\n    method function_param_pattern (expr : ('loc, 'loc) Ast.Pattern.t) =\n      this#binding_pattern ~kind:Ast.Statement.VariableDeclaration.Let expr\n\n    method variable_declarator_pattern ~kind (expr : ('loc, 'loc) Ast.Pattern.t) =\n      this#binding_pattern ~kind expr\n\n    method catch_clause_pattern (expr : ('loc, 'loc) Ast.Pattern.t) =\n      this#binding_pattern ~kind:Ast.Statement.VariableDeclaration.Let expr\n\n    method for_in_assignment_pattern (expr : ('loc, 'loc) Ast.Pattern.t) =\n      this#assignment_pattern expr\n\n    method for_of_assignment_pattern (expr : ('loc, 'loc) Ast.Pattern.t) =\n      this#assignment_pattern expr\n\n    method binding_pattern\n        ?(kind = Ast.Statement.VariableDeclaration.Var) (expr : ('loc, 'loc) Ast.Pattern.t) =\n      this#pattern ~kind expr\n\n    method assignment_pattern (expr : ('loc, 'loc) Ast.Pattern.t) = this#pattern expr\n\n    (* NOTE: Patterns are highly overloaded. A pattern can be a binding pattern,\n       which has a kind (Var/Let/Const, with Var being the default for all pre-ES5\n       bindings), or an assignment pattern, which has no kind. Subterms that are\n       patterns inherit the kind (or lack thereof). *)\n    method pattern ?kind (expr : ('loc, 'loc) Ast.Pattern.t) =\n      let open Ast.Pattern in\n      let (loc, patt) = expr in\n      let patt' =\n        match patt with\n        | Object { Object.properties; annot; comments } ->\n          let properties' = map_list (this#pattern_object_p ?kind) properties in\n          let annot' = this#type_annotation_hint annot in\n          let comments' = this#syntax_opt comments in\n          if properties' == properties && annot' == annot && comments' == comments then\n            patt\n          else\n            Object { Object.properties = properties'; annot = annot'; comments = comments' }\n        | Array { Array.elements; annot; comments } ->\n          let elements' = map_list (this#pattern_array_e ?kind) elements in\n          let annot' = this#type_annotation_hint annot in\n          let comments' = this#syntax_opt comments in\n          if comments == comments' && elements' == elements && annot' == annot then\n            patt\n          else\n            Array { Array.elements = elements'; annot = annot'; comments = comments' }\n        | Identifier { Identifier.name; annot; optional } ->\n          let name' = this#pattern_identifier ?kind name in\n          let annot' = this#type_annotation_hint annot in\n          if name == name' && annot == annot' then\n            patt\n          else\n            Identifier { Identifier.name = name'; annot = annot'; optional }\n        | Expression e -> id this#pattern_expression e patt (fun e -> Expression e)\n      in\n      if patt == patt' then\n        expr\n      else\n        (loc, patt')\n\n    method pattern_identifier ?kind (ident : ('loc, 'loc) Ast.Identifier.t) =\n      ignore kind;\n      this#identifier ident\n\n    method pattern_literal ?kind loc (expr : 'loc Ast.Literal.t) =\n      ignore kind;\n      this#literal loc expr\n\n    method pattern_object_p ?kind (p : ('loc, 'loc) Ast.Pattern.Object.property) =\n      let open Ast.Pattern.Object in\n      match p with\n      | Property prop -> id (this#pattern_object_property ?kind) prop p (fun prop -> Property prop)\n      | RestElement prop ->\n        id (this#pattern_object_rest_property ?kind) prop p (fun prop -> RestElement prop)\n\n    method pattern_object_property ?kind (prop : ('loc, 'loc) Ast.Pattern.Object.Property.t) =\n      let open Ast.Pattern.Object.Property in\n      let (loc, { key; pattern; default; shorthand }) = prop in\n      let key' = this#pattern_object_property_key ?kind key in\n      let pattern' = this#pattern_object_property_pattern ?kind pattern in\n      let default' = map_opt this#expression default in\n      let shorthand' =\n        (* Try to figure out if shorthand should still be true--if\n            key and value change differently, it should become false *)\n        shorthand\n        &&\n        match (key', pattern') with\n        | ( Identifier (_, { Ast.Identifier.name = key_name; _ }),\n            ( _,\n              Ast.Pattern.Identifier\n                { Ast.Pattern.Identifier.name = (_, { Ast.Identifier.name = value_name; _ }); _ }\n            )\n          ) ->\n          String.equal key_name value_name\n        | _ -> key == key' && pattern == pattern'\n      in\n      if key' == key && pattern' == pattern && default' == default && shorthand == shorthand' then\n        prop\n      else\n        (loc, { key = key'; pattern = pattern'; default = default'; shorthand = shorthand' })\n\n    method pattern_object_property_key ?kind (key : ('loc, 'loc) Ast.Pattern.Object.Property.key) =\n      let open Ast.Pattern.Object.Property in\n      match key with\n      | Literal lit ->\n        id (this#pattern_object_property_literal_key ?kind) lit key (fun lit' -> Literal lit')\n      | Identifier identifier ->\n        id (this#pattern_object_property_identifier_key ?kind) identifier key (fun id' ->\n            Identifier id'\n        )\n      | Computed expr ->\n        id (this#pattern_object_property_computed_key ?kind) expr key (fun expr' -> Computed expr')\n\n    method pattern_object_property_literal_key ?kind (literal : 'loc * 'loc Ast.Literal.t) =\n      let (loc, key) = literal in\n      id_loc (this#pattern_literal ?kind) loc key literal (fun key' -> (loc, key'))\n\n    method pattern_object_property_identifier_key ?kind (key : ('loc, 'loc) Ast.Identifier.t) =\n      this#pattern_identifier ?kind key\n\n    method pattern_object_property_computed_key ?kind (key : ('loc, 'loc) Ast.ComputedKey.t) =\n      ignore kind;\n      this#computed_key key\n\n    method pattern_object_rest_property ?kind (prop : ('loc, 'loc) Ast.Pattern.RestElement.t) =\n      let open Ast.Pattern.RestElement in\n      let (loc, { argument; comments }) = prop in\n      let argument' = this#pattern_object_rest_property_pattern ?kind argument in\n      let comments' = this#syntax_opt comments in\n      if argument' == argument && comments == comments' then\n        prop\n      else\n        (loc, { argument = argument'; comments = comments' })\n\n    method pattern_object_property_pattern ?kind (expr : ('loc, 'loc) Ast.Pattern.t) =\n      this#pattern ?kind expr\n\n    method pattern_object_rest_property_pattern ?kind (expr : ('loc, 'loc) Ast.Pattern.t) =\n      this#pattern ?kind expr\n\n    method pattern_array_e ?kind (e : ('loc, 'loc) Ast.Pattern.Array.element) =\n      let open Ast.Pattern.Array in\n      match e with\n      | Hole _ -> e\n      | Element elem -> id (this#pattern_array_element ?kind) elem e (fun elem -> Element elem)\n      | RestElement elem ->\n        id (this#pattern_array_rest_element ?kind) elem e (fun elem -> RestElement elem)\n\n    method pattern_array_element ?kind (elem : ('loc, 'loc) Ast.Pattern.Array.Element.t) =\n      let open Ast.Pattern.Array.Element in\n      let (loc, { argument; default }) = elem in\n      let argument' = this#pattern_array_element_pattern ?kind argument in\n      let default' = map_opt this#expression default in\n      if argument == argument' && default == default' then\n        elem\n      else\n        (loc, { argument = argument'; default = default' })\n\n    method pattern_array_element_pattern ?kind (patt : ('loc, 'loc) Ast.Pattern.t) =\n      this#pattern ?kind patt\n\n    method pattern_array_rest_element ?kind (elem : ('loc, 'loc) Ast.Pattern.RestElement.t) =\n      let open Ast.Pattern.RestElement in\n      let (loc, { argument; comments }) = elem in\n      let argument' = this#pattern_array_rest_element_pattern ?kind argument in\n      let comments' = this#syntax_opt comments in\n      if argument' == argument && comments == comments' then\n        elem\n      else\n        (loc, { argument = argument'; comments = comments' })\n\n    method pattern_array_rest_element_pattern ?kind (expr : ('loc, 'loc) Ast.Pattern.t) =\n      this#pattern ?kind expr\n\n    method pattern_expression (expr : ('loc, 'loc) Ast.Expression.t) = this#expression expr\n\n    method predicate (pred : ('loc, 'loc) Ast.Type.Predicate.t) =\n      let open Ast.Type.Predicate in\n      let (loc, { kind; comments }) = pred in\n      let kind' =\n        match kind with\n        | Inferred -> kind\n        | Declared expr -> id this#expression expr kind (fun expr' -> Declared expr')\n      in\n      let comments' = this#syntax_opt comments in\n      if kind == kind' && comments == comments' then\n        pred\n      else\n        (loc, { kind = kind'; comments = comments' })\n\n    method predicate_expression (expr : ('loc, 'loc) Ast.Expression.t) = this#expression expr\n\n    method function_rest_param (expr : ('loc, 'loc) Ast.Function.RestParam.t) =\n      let open Ast.Function.RestParam in\n      let (loc, { argument; comments }) = expr in\n      let argument' = this#function_param_pattern argument in\n      let comments' = this#syntax_opt comments in\n      if argument == argument' && comments == comments' then\n        expr\n      else\n        (loc, { argument = argument'; comments = comments' })\n\n    method return _loc (stmt : ('loc, 'loc) Ast.Statement.Return.t) =\n      let open Ast.Statement.Return in\n      let { argument; comments; return_out } = stmt in\n      let argument' = map_opt this#expression argument in\n      let comments' = this#syntax_opt comments in\n      if argument == argument' && comments == comments' then\n        stmt\n      else\n        { argument = argument'; comments = comments'; return_out }\n\n    method sequence _loc (expr : ('loc, 'loc) Ast.Expression.Sequence.t) =\n      let open Ast.Expression.Sequence in\n      let { expressions; comments } = expr in\n      let expressions' = map_list this#expression expressions in\n      let comments' = this#syntax_opt comments in\n      if expressions == expressions' && comments == comments' then\n        expr\n      else\n        { expressions = expressions'; comments = comments' }\n\n    method toplevel_statement_list (stmts : ('loc, 'loc) Ast.Statement.t list) =\n      this#statement_list stmts\n\n    method statement_list (stmts : ('loc, 'loc) Ast.Statement.t list) =\n      map_list_multiple this#statement_fork_point stmts\n\n    method statement_fork_point (stmt : ('loc, 'loc) Ast.Statement.t) = [this#statement stmt]\n\n    method spread_element (expr : ('loc, 'loc) Ast.Expression.SpreadElement.t) =\n      let open Ast.Expression.SpreadElement in\n      let (loc, { argument; comments }) = expr in\n      let argument' = this#expression argument in\n      let comments' = this#syntax_opt comments in\n      if argument == argument' && comments == comments' then\n        expr\n      else\n        (loc, { argument = argument'; comments = comments' })\n\n    method spread_property (expr : ('loc, 'loc) Ast.Expression.Object.SpreadProperty.t) =\n      let open Ast.Expression.Object.SpreadProperty in\n      let (loc, { argument; comments }) = expr in\n      let argument' = this#expression argument in\n      let comments' = this#syntax_opt comments in\n      if argument == argument' && comments == comments' then\n        expr\n      else\n        (loc, { argument = argument'; comments = comments' })\n\n    method super_expression _loc (expr : 'loc Ast.Expression.Super.t) =\n      let open Ast.Expression.Super in\n      let { comments } = expr in\n      let comments' = this#syntax_opt comments in\n      if comments == comments' then\n        expr\n      else\n        { comments = comments' }\n\n    method switch _loc (switch : ('loc, 'loc) Ast.Statement.Switch.t) =\n      let open Ast.Statement.Switch in\n      let { discriminant; cases; comments; exhaustive_out } = switch in\n      let discriminant' = this#expression discriminant in\n      let cases' = map_list this#switch_case cases in\n      let comments' = this#syntax_opt comments in\n      if discriminant == discriminant' && cases == cases' && comments == comments' then\n        switch\n      else\n        { discriminant = discriminant'; cases = cases'; comments = comments'; exhaustive_out }\n\n    method switch_case (case : ('loc, 'loc) Ast.Statement.Switch.Case.t) =\n      let open Ast.Statement.Switch.Case in\n      let (loc, { test; consequent; comments }) = case in\n      let test' = map_opt this#expression test in\n      let consequent' = this#statement_list consequent in\n      let comments' = this#syntax_opt comments in\n      if test == test' && consequent == consequent' && comments == comments' then\n        case\n      else\n        (loc, { test = test'; consequent = consequent'; comments = comments' })\n\n    method tagged_template _loc (expr : ('loc, 'loc) Ast.Expression.TaggedTemplate.t) =\n      let open Ast.Expression.TaggedTemplate in\n      let { tag; quasi; comments } = expr in\n      let tag' = this#expression tag in\n      let quasi' = map_loc this#template_literal quasi in\n      let comments' = this#syntax_opt comments in\n      if tag == tag' && quasi == quasi' && comments == comments' then\n        expr\n      else\n        { tag = tag'; quasi = quasi'; comments = comments' }\n\n    method template_literal _loc (expr : ('loc, 'loc) Ast.Expression.TemplateLiteral.t) =\n      let open Ast.Expression.TemplateLiteral in\n      let { quasis; expressions; comments } = expr in\n      let quasis' = map_list this#template_literal_element quasis in\n      let expressions' = map_list this#expression expressions in\n      let comments' = this#syntax_opt comments in\n      if quasis == quasis' && expressions == expressions' && comments == comments' then\n        expr\n      else\n        { quasis = quasis'; expressions = expressions'; comments = comments' }\n\n    (* TODO *)\n    method template_literal_element (elem : 'loc Ast.Expression.TemplateLiteral.Element.t) = elem\n\n    method this_expression _loc (expr : 'loc Ast.Expression.This.t) =\n      let open Ast.Expression.This in\n      let { comments } = expr in\n      let comments' = this#syntax_opt comments in\n      if comments == comments' then\n        expr\n      else\n        { comments = comments' }\n\n    method throw _loc (stmt : ('loc, 'loc) Ast.Statement.Throw.t) =\n      let open Ast.Statement.Throw in\n      let { argument; comments } = stmt in\n      let argument' = this#expression argument in\n      let comments' = this#syntax_opt comments in\n      if argument == argument' && comments == comments' then\n        stmt\n      else\n        { argument = argument'; comments = comments' }\n\n    method try_catch _loc (stmt : ('loc, 'loc) Ast.Statement.Try.t) =\n      let open Ast.Statement.Try in\n      let { block; handler; finalizer; comments } = stmt in\n      let block' = map_loc this#block block in\n      let handler' =\n        match handler with\n        | Some (loc, clause) ->\n          id_loc this#catch_clause loc clause handler (fun clause -> Some (loc, clause))\n        | None -> handler\n      in\n      let finalizer' =\n        match finalizer with\n        | Some (finalizer_loc, block) ->\n          id_loc this#block finalizer_loc block finalizer (fun block -> Some (finalizer_loc, block))\n        | None -> finalizer\n      in\n      let comments' = this#syntax_opt comments in\n      if block == block' && handler == handler' && finalizer == finalizer' && comments == comments'\n      then\n        stmt\n      else\n        { block = block'; handler = handler'; finalizer = finalizer'; comments = comments' }\n\n    method type_cast _loc (expr : ('loc, 'loc) Ast.Expression.TypeCast.t) =\n      let open Ast.Expression.TypeCast in\n      let { expression; annot; comments } = expr in\n      let expression' = this#expression expression in\n      let annot' = this#type_annotation annot in\n      let comments' = this#syntax_opt comments in\n      if expression' == expression && annot' == annot && comments' == comments then\n        expr\n      else\n        { expression = expression'; annot = annot'; comments = comments' }\n\n    method unary_expression _loc (expr : ('loc, 'loc) Flow_ast.Expression.Unary.t) =\n      let open Flow_ast.Expression.Unary in\n      let { argument; operator = _; comments } = expr in\n      let argument' = this#expression argument in\n      let comments' = this#syntax_opt comments in\n      if argument == argument' && comments == comments' then\n        expr\n      else\n        { expr with argument = argument'; comments = comments' }\n\n    method update_expression _loc (expr : ('loc, 'loc) Ast.Expression.Update.t) =\n      let open Ast.Expression.Update in\n      let { argument; operator = _; prefix = _; comments } = expr in\n      let argument' = this#expression argument in\n      let comments' = this#syntax_opt comments in\n      if argument == argument' && comments == comments' then\n        expr\n      else\n        { expr with argument = argument'; comments = comments' }\n\n    method variable_declaration _loc (decl : ('loc, 'loc) Ast.Statement.VariableDeclaration.t) =\n      let open Ast.Statement.VariableDeclaration in\n      let { declarations; kind; comments } = decl in\n      let decls' = map_list (this#variable_declarator ~kind) declarations in\n      let comments' = this#syntax_opt comments in\n      if declarations == decls' && comments == comments' then\n        decl\n      else\n        { declarations = decls'; kind; comments = comments' }\n\n    method variable_declarator\n        ~kind (decl : ('loc, 'loc) Ast.Statement.VariableDeclaration.Declarator.t) =\n      let open Ast.Statement.VariableDeclaration.Declarator in\n      let (loc, { id; init }) = decl in\n      let id' = this#variable_declarator_pattern ~kind id in\n      let init' = map_opt this#expression init in\n      if id == id' && init == init' then\n        decl\n      else\n        (loc, { id = id'; init = init' })\n\n    method while_ _loc (stuff : ('loc, 'loc) Ast.Statement.While.t) =\n      let open Ast.Statement.While in\n      let { test; body; comments } = stuff in\n      let test' = this#predicate_expression test in\n      let body' = this#statement body in\n      let comments' = this#syntax_opt comments in\n      if test == test' && body == body' && comments == comments' then\n        stuff\n      else\n        { test = test'; body = body'; comments = comments' }\n\n    method with_ _loc (stuff : ('loc, 'loc) Ast.Statement.With.t) =\n      let open Ast.Statement.With in\n      let { _object; body; comments } = stuff in\n      let _object' = this#expression _object in\n      let body' = this#statement body in\n      let comments' = this#syntax_opt comments in\n      if _object == _object' && body == body' && comments == comments' then\n        stuff\n      else\n        { _object = _object'; body = body'; comments = comments' }\n\n    method type_alias _loc (stuff : ('loc, 'loc) Ast.Statement.TypeAlias.t) =\n      let open Ast.Statement.TypeAlias in\n      let { id; tparams; right; comments } = stuff in\n      let id' = this#binding_type_identifier id in\n      let tparams' = map_opt this#type_params tparams in\n      let right' = this#type_ right in\n      let comments' = this#syntax_opt comments in\n      if id == id' && right == right' && tparams == tparams' && comments == comments' then\n        stuff\n      else\n        { id = id'; tparams = tparams'; right = right'; comments = comments' }\n\n    method yield _loc (expr : ('loc, 'loc) Ast.Expression.Yield.t) =\n      let open Ast.Expression.Yield in\n      let { argument; delegate; comments; result_out } = expr in\n      let argument' = map_opt this#expression argument in\n      let comments' = this#syntax_opt comments in\n      if comments == comments' && argument == argument' then\n        expr\n      else\n        { argument = argument'; delegate; comments = comments'; result_out }\n  end\n\nlet fold_program (mappers : 'a mapper list) ast =\n  List.fold_left (fun ast (m : 'a mapper) -> m#program ast) ast mappers\n"
  },
  {
    "path": "analysis/vendor/js_parser/flow_ast_utils.ml",
    "content": "(*\n * Copyright (c) Meta Platforms, Inc. and affiliates.\n *\n * This source code is licensed under the MIT license found in the\n * LICENSE file in the root directory of this source tree.\n *)\n\nopen Flow_ast\n\ntype 'loc binding = 'loc * string\ntype 'loc ident = 'loc * string\ntype 'loc source = 'loc * string\n\nlet rec fold_bindings_of_pattern =\n  Pattern.(\n    let property f acc =\n      Object.(\n        function\n        | Property (_, { Property.pattern = p; _ })\n        | RestElement (_, { RestElement.argument = p; comments = _ }) ->\n          fold_bindings_of_pattern f acc p\n      )\n    in\n    let element f acc =\n      Array.(\n        function\n        | Hole _ -> acc\n        | Element (_, { Element.argument = p; default = _ })\n        | RestElement (_, { RestElement.argument = p; comments = _ }) ->\n          fold_bindings_of_pattern f acc p\n      )\n    in\n    fun f acc -> function\n      | (_, Identifier { Identifier.name; _ }) -> f acc name\n      | (_, Object { Object.properties; _ }) -> List.fold_left (property f) acc properties\n      | (_, Array { Array.elements; _ }) -> List.fold_left (element f) acc elements\n      (* This is for assignment and default param destructuring `[a.b=1]=c`, ignore these for now. *)\n      | (_, Expression _) -> acc\n  )\n\nlet fold_bindings_of_variable_declarations f acc declarations =\n  let open Flow_ast.Statement.VariableDeclaration in\n  List.fold_left\n    (fun acc -> function\n      | (_, { Declarator.id = pattern; _ }) ->\n        let has_anno =\n          (* Only the toplevel annotation in a pattern is meaningful *)\n          let open Flow_ast.Pattern in\n          match pattern with\n          | (_, Array { Array.annot = Flow_ast.Type.Available _; _ })\n          | (_, Object { Object.annot = Flow_ast.Type.Available _; _ })\n          | (_, Identifier { Identifier.annot = Flow_ast.Type.Available _; _ }) ->\n            true\n          | _ -> false\n        in\n        fold_bindings_of_pattern (f has_anno) acc pattern)\n    acc\n    declarations\n\nlet partition_directives statements =\n  let open Flow_ast.Statement in\n  let rec helper directives = function\n    | ((_, Expression { Expression.directive = Some _; _ }) as directive) :: rest ->\n      helper (directive :: directives) rest\n    | rest -> (List.rev directives, rest)\n  in\n  helper [] statements\n\nlet hoist_function_declarations stmts =\n  let open Flow_ast.Statement in\n  let (func_decs, other_stmts) =\n    List.partition\n      (function\n        (* function f() {} *)\n        | (_, FunctionDeclaration { Flow_ast.Function.id = Some _; _ })\n        (* export function f() {} *)\n        | ( _,\n            ExportNamedDeclaration\n              {\n                ExportNamedDeclaration.declaration =\n                  Some (_, FunctionDeclaration { Flow_ast.Function.id = Some _; _ });\n                _;\n              }\n          )\n        (* export default function f() {} *)\n        | ( _,\n            ExportDefaultDeclaration\n              {\n                ExportDefaultDeclaration.declaration =\n                  ExportDefaultDeclaration.Declaration\n                    (_, FunctionDeclaration { Flow_ast.Function.id = Some _; _ });\n                _;\n              }\n          )\n        (* declare function f(): void; *)\n        | (_, DeclareFunction _)\n        (* declare export function f(): void; *)\n        | ( _,\n            DeclareExportDeclaration DeclareExportDeclaration.{ declaration = Some (Function _); _ }\n          ) ->\n          true\n        | _ -> false)\n      stmts\n  in\n  func_decs @ other_stmts\n\nlet negate_number_literal (value, raw) =\n  let raw_len = String.length raw in\n  let raw =\n    if raw_len > 0 && raw.[0] = '-' then\n      String.sub raw 1 (raw_len - 1)\n    else\n      \"-\" ^ raw\n  in\n  (~-.value, raw)\n\nlet is_call_to_invariant callee =\n  match callee with\n  | (_, Expression.Identifier (_, { Identifier.name = \"invariant\"; _ })) -> true\n  | _ -> false\n\nlet is_call_to_is_array callee =\n  match callee with\n  | ( _,\n      Flow_ast.Expression.Member\n        {\n          Flow_ast.Expression.Member._object =\n            ( _,\n              Flow_ast.Expression.Identifier\n                (_, { Flow_ast.Identifier.name = \"Array\"; comments = _ })\n            );\n          property =\n            Flow_ast.Expression.Member.PropertyIdentifier\n              (_, { Flow_ast.Identifier.name = \"isArray\"; comments = _ });\n          comments = _;\n        }\n    ) ->\n    true\n  | _ -> false\n\nlet is_call_to_object_dot_freeze callee =\n  match callee with\n  | ( _,\n      Flow_ast.Expression.Member\n        {\n          Flow_ast.Expression.Member._object =\n            ( _,\n              Flow_ast.Expression.Identifier\n                (_, { Flow_ast.Identifier.name = \"Object\"; comments = _ })\n            );\n          property =\n            Flow_ast.Expression.Member.PropertyIdentifier\n              (_, { Flow_ast.Identifier.name = \"freeze\"; comments = _ });\n          comments = _;\n        }\n    ) ->\n    true\n  | _ -> false\n\nlet is_call_to_object_static_method callee =\n  match callee with\n  | ( _,\n      Flow_ast.Expression.Member\n        {\n          Flow_ast.Expression.Member._object =\n            ( _,\n              Flow_ast.Expression.Identifier\n                (_, { Flow_ast.Identifier.name = \"Object\"; comments = _ })\n            );\n          property = Flow_ast.Expression.Member.PropertyIdentifier _;\n          comments = _;\n        }\n    ) ->\n    true\n  | _ -> false\n\nlet loc_of_statement = fst\nlet loc_of_expression = fst\nlet loc_of_pattern = fst\nlet loc_of_ident = fst\nlet name_of_ident (_, { Identifier.name; comments = _ }) = name\nlet source_of_ident (loc, { Identifier.name; comments = _ }) = (loc, name)\nlet ident_of_source ?comments (loc, name) = (loc, { Identifier.name; comments })\nlet mk_comments ?(leading = []) ?(trailing = []) a = { Syntax.leading; trailing; internal = a }\n\nlet mk_comments_opt ?(leading = []) ?(trailing = []) () =\n  match (leading, trailing) with\n  | ([], []) -> None\n  | (_, _) -> Some (mk_comments ~leading ~trailing ())\n\nlet mk_comments_with_internal_opt ?(leading = []) ?(trailing = []) ~internal () =\n  match (leading, trailing, internal) with\n  | ([], [], []) -> None\n  | _ -> Some (mk_comments ~leading ~trailing internal)\n\nlet merge_comments ~inner ~outer =\n  let open Syntax in\n  match (inner, outer) with\n  | (None, c)\n  | (c, None) ->\n    c\n  | (Some inner, Some outer) ->\n    mk_comments_opt\n      ~leading:(outer.leading @ inner.leading)\n      ~trailing:(inner.trailing @ outer.trailing)\n      ()\n\nlet merge_comments_with_internal ~inner ~outer =\n  match (inner, outer) with\n  | (inner, None) -> inner\n  | (None, Some { Syntax.leading; trailing; _ }) ->\n    mk_comments_with_internal_opt ~leading ~trailing ~internal:[] ()\n  | ( Some { Syntax.leading = inner_leading; trailing = inner_trailing; internal },\n      Some { Syntax.leading = outer_leading; trailing = outer_trailing; _ }\n    ) ->\n    mk_comments_with_internal_opt\n      ~leading:(outer_leading @ inner_leading)\n      ~trailing:(inner_trailing @ outer_trailing)\n      ~internal\n      ()\n\nlet split_comments comments =\n  match comments with\n  | None -> (None, None)\n  | Some { Syntax.leading; trailing; _ } ->\n    (mk_comments_opt ~leading (), mk_comments_opt ~trailing ())\n\nlet string_of_assignment_operator op =\n  let open Flow_ast.Expression.Assignment in\n  match op with\n  | PlusAssign -> \"+=\"\n  | MinusAssign -> \"-=\"\n  | MultAssign -> \"*=\"\n  | ExpAssign -> \"**=\"\n  | DivAssign -> \"/=\"\n  | ModAssign -> \"%=\"\n  | LShiftAssign -> \"<<=\"\n  | RShiftAssign -> \">>=\"\n  | RShift3Assign -> \">>>=\"\n  | BitOrAssign -> \"|=\"\n  | BitXorAssign -> \"^=\"\n  | BitAndAssign -> \"&=\"\n  | NullishAssign -> \"??=\"\n  | AndAssign -> \"&&=\"\n  | OrAssign -> \"||=\"\n\nlet string_of_binary_operator op =\n  let open Flow_ast.Expression.Binary in\n  match op with\n  | Equal -> \"==\"\n  | NotEqual -> \"!=\"\n  | StrictEqual -> \"===\"\n  | StrictNotEqual -> \"!==\"\n  | LessThan -> \"<\"\n  | LessThanEqual -> \"<=\"\n  | GreaterThan -> \">\"\n  | GreaterThanEqual -> \">=\"\n  | LShift -> \"<<\"\n  | RShift -> \">>\"\n  | RShift3 -> \">>>\"\n  | Plus -> \"+\"\n  | Minus -> \"-\"\n  | Mult -> \"*\"\n  | Exp -> \"**\"\n  | Div -> \"/\"\n  | Mod -> \"%\"\n  | BitOr -> \"|\"\n  | Xor -> \"^\"\n  | BitAnd -> \"&\"\n  | In -> \"in\"\n  | Instanceof -> \"instanceof\"\n\nmodule ExpressionSort = struct\n  type t =\n    | Array\n    | ArrowFunction\n    | Assignment\n    | Binary\n    | Call\n    | Class\n    | Comprehension\n    | Conditional\n    | Function\n    | Generator\n    | Identifier\n    | Import\n    | JSXElement\n    | JSXFragment\n    | Literal\n    | Logical\n    | Member\n    | MetaProperty\n    | New\n    | Object\n    | OptionalCall\n    | OptionalMember\n    | Sequence\n    | Super\n    | TaggedTemplate\n    | TemplateLiteral\n    | This\n    | TypeCast\n    | Unary\n    | Update\n    | Yield\n\n  let to_string = function\n    | Array -> \"array\"\n    | ArrowFunction -> \"arrow function\"\n    | Assignment -> \"assignment expression\"\n    | Binary -> \"binary expression\"\n    | Call -> \"call expression\"\n    | Class -> \"class\"\n    | Comprehension -> \"comprehension expression\"\n    | Conditional -> \"conditional expression\"\n    | Function -> \"function\"\n    | Generator -> \"generator\"\n    | Identifier -> \"identifier\"\n    | Import -> \"import expression\"\n    | JSXElement -> \"JSX element\"\n    | JSXFragment -> \"JSX fragment\"\n    | Literal -> \"literal\"\n    | Logical -> \"logical expression\"\n    | Member -> \"member expression\"\n    | MetaProperty -> \"metaproperty expression\"\n    | New -> \"new expression\"\n    | Object -> \"object\"\n    | OptionalCall -> \"optional call expression\"\n    | OptionalMember -> \"optional member expression\"\n    | Sequence -> \"sequence\"\n    | Super -> \"`super` reference\"\n    | TaggedTemplate -> \"tagged template expression\"\n    | TemplateLiteral -> \"template literal\"\n    | This -> \"`this` reference\"\n    | TypeCast -> \"type cast\"\n    | Unary -> \"unary expression\"\n    | Update -> \"update expression\"\n    | Yield -> \"yield expression\"\nend\n"
  },
  {
    "path": "analysis/vendor/js_parser/flow_ast_utils.mli",
    "content": "(*\n * Copyright (c) Meta Platforms, Inc. and affiliates.\n *\n * This source code is licensed under the MIT license found in the\n * LICENSE file in the root directory of this source tree.\n *)\n\ntype 'loc binding = 'loc * string\n\ntype 'loc ident = 'loc * string \n\ntype 'loc source = 'loc * string \n\nval fold_bindings_of_pattern :\n  ('a -> ('m, 't) Flow_ast.Identifier.t -> 'a) -> 'a -> ('m, 't) Flow_ast.Pattern.t -> 'a\n\nval fold_bindings_of_variable_declarations :\n  (bool -> 'a -> ('m, 't) Flow_ast.Identifier.t -> 'a) ->\n  'a ->\n  ('m, 't) Flow_ast.Statement.VariableDeclaration.Declarator.t list ->\n  'a\n\nval partition_directives :\n  (Loc.t, Loc.t) Flow_ast.Statement.t list ->\n  (Loc.t, Loc.t) Flow_ast.Statement.t list * (Loc.t, Loc.t) Flow_ast.Statement.t list\n\nval hoist_function_declarations :\n  ('a, 'b) Flow_ast.Statement.t list -> ('a, 'b) Flow_ast.Statement.t list\n\nval is_call_to_invariant : ('a, 'b) Flow_ast.Expression.t -> bool\n\nval is_call_to_is_array : ('a, 'b) Flow_ast.Expression.t -> bool\n\nval is_call_to_object_dot_freeze : ('a, 'b) Flow_ast.Expression.t -> bool\n\nval is_call_to_object_static_method : ('a, 'b) Flow_ast.Expression.t -> bool\n\nval negate_number_literal : float * string -> float * string\n\nval loc_of_expression : ('a, 'a) Flow_ast.Expression.t -> 'a\n\nval loc_of_statement : ('a, 'a) Flow_ast.Statement.t -> 'a\n\nval loc_of_pattern : ('a, 'a) Flow_ast.Pattern.t -> 'a\n\nval loc_of_ident : ('a, 'a) Flow_ast.Identifier.t -> 'a\n\nval name_of_ident : ('loc, 'a) Flow_ast.Identifier.t -> string\n\nval source_of_ident : ('a, 'a) Flow_ast.Identifier.t -> 'a source\n\nval ident_of_source :\n  ?comments:('a, unit) Flow_ast.Syntax.t -> 'a source -> ('a, 'a) Flow_ast.Identifier.t\n\nval mk_comments :\n  ?leading:'loc Flow_ast.Comment.t list ->\n  ?trailing:'loc Flow_ast.Comment.t list ->\n  'a ->\n  ('loc, 'a) Flow_ast.Syntax.t\n\nval mk_comments_opt :\n  ?leading:'loc Flow_ast.Comment.t list ->\n  ?trailing:'loc Flow_ast.Comment.t list ->\n  unit ->\n  ('loc, unit) Flow_ast.Syntax.t option\n\nval mk_comments_with_internal_opt :\n  ?leading:'loc Flow_ast.Comment.t list ->\n  ?trailing:'loc Flow_ast.Comment.t list ->\n  internal:'loc Flow_ast.Comment.t list ->\n  unit ->\n  ('loc, 'loc Flow_ast.Comment.t list) Flow_ast.Syntax.t option\n\nval merge_comments :\n  inner:('M, unit) Flow_ast.Syntax.t option ->\n  outer:('M, unit) Flow_ast.Syntax.t option ->\n  ('M, unit) Flow_ast.Syntax.t option\n\nval merge_comments_with_internal :\n  inner:('M, 'loc Flow_ast.Comment.t list) Flow_ast.Syntax.t option ->\n  outer:('M, 'a) Flow_ast.Syntax.t option ->\n  ('M, 'loc Flow_ast.Comment.t list) Flow_ast.Syntax.t option\n\nval split_comments :\n  ('loc, unit) Flow_ast.Syntax.t option ->\n  ('loc, unit) Flow_ast.Syntax.t option * ('loc, unit) Flow_ast.Syntax.t option\n\nmodule ExpressionSort : sig\n  type t =\n    | Array\n    | ArrowFunction\n    | Assignment\n    | Binary\n    | Call\n    | Class\n    | Comprehension\n    | Conditional\n    | Function\n    | Generator\n    | Identifier\n    | Import\n    | JSXElement\n    | JSXFragment\n    | Literal\n    | Logical\n    | Member\n    | MetaProperty\n    | New\n    | Object\n    | OptionalCall\n    | OptionalMember\n    | Sequence\n    | Super\n    | TaggedTemplate\n    | TemplateLiteral\n    | This\n    | TypeCast\n    | Unary\n    | Update\n    | Yield\n  \n\n  val to_string : t -> string\nend\n\nval string_of_assignment_operator : Flow_ast.Expression.Assignment.operator -> string\n\nval string_of_binary_operator : Flow_ast.Expression.Binary.operator -> string\n"
  },
  {
    "path": "analysis/vendor/js_parser/flow_lexer.ml",
    "content": "\nlet __sedlex_table_58 =\n  \"\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\001\\001\"\nlet __sedlex_table_2 =\n  \"\\001\\000\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\"\nlet __sedlex_table_17 =\n  \"\\001\\000\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\002\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\"\nlet __sedlex_table_28 =\n  \"\\001\\000\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\002\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\003\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\"\nlet __sedlex_table_41 =\n  \"\\001\\000\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\002\\000\\003\\004\\004\\004\\004\\004\\004\\004\\004\\004\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\"\nlet __sedlex_table_52 =\n  \"\\001\\000\\000\\002\\003\\003\\003\\003\\003\\003\\003\\003\\003\"\nlet __sedlex_table_70 =\n  \"\\001\\000\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\002\\003\\003\\003\\003\\003\\003\\003\\003\\003\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\"\nlet __sedlex_table_47 =\n  \"\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\002\\001\\001\\003\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\004\"\nlet __sedlex_table_57 =\n  \"\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\002\"\nlet __sedlex_table_29 =\n  \"\\001\\002\\000\\003\\004\\004\\004\\004\\004\\004\\004\\004\\004\"\nlet __sedlex_table_30 =\n  \"\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\002\"\nlet __sedlex_table_42 =\n  \"\\001\\000\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\003\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\003\"\nlet __sedlex_table_5 = \"\\001\\002\"\nlet __sedlex_table_3 =\n  \"\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\001\\001\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\001\\001\\001\"\nlet __sedlex_table_21 =\n  \"\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\001\\001\\001\\001\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\"\nlet __sedlex_table_60 =\n  \"\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\001\\001\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\"\nlet __sedlex_table_83 =\n  \"\\001\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\003\\002\\002\\004\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\005\\002\\002\\002\\006\\005\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\005\\002\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\005\\002\\007\"\nlet __sedlex_table_18 =\n  \"\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\002\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\002\"\nlet __sedlex_table_23 =\n  \"\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\002\"\nlet __sedlex_table_43 =\n  \"\\001\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\003\\002\\002\\004\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\005\\006\\006\\006\\006\\006\\006\\006\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\002\\002\\002\\002\\002\\b\\002\\002\\002\\t\\002\\002\\002\\002\\002\\002\\002\\n\\002\\002\\002\\011\\002\\012\\r\\014\\002\\015\"\nlet __sedlex_table_76 =\n  \"\\001\\001\\001\\001\\001\\001\\001\\001\\001\\002\\003\\002\\002\\004\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\002\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\005\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\006\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\002\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\002\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\"\nlet __sedlex_table_82 =\n  \"\\001\\000\\001\\000\\000\\002\\003\\003\\003\\003\\003\\003\\003\\003\\003\"\nlet __sedlex_table_10 = \"\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\002\"\nlet __sedlex_table_12 =\n  \"\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\"\nlet __sedlex_table_33 =\n  \"\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\001\\001\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\001\\001\\001\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\"\nlet __sedlex_table_45 =\n  \"\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\002\\001\\001\\003\"\nlet __sedlex_table_78 =\n  \"\\001\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\003\\002\\002\\004\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\005\\006\"\nlet __sedlex_table_88 =\n  \"\\001\\000\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\003\"\nlet __sedlex_table_11 =\n  \"\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\002\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\"\nlet __sedlex_table_14 =\n  \"\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\000\\000\\000\\000\\000\\000\\000\\002\\002\\002\\002\\002\\002\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\002\\002\\002\\002\\002\\002\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\"\nlet __sedlex_table_16 =\n  \"\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\003\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\"\nlet __sedlex_table_22 =\n  \"\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\001\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\"\nlet __sedlex_table_27 =\n  \"\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\002\\000\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\004\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\"\nlet __sedlex_table_32 =\n  \"\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\002\\000\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\004\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\005\\000\\001\\001\\001\\001\\004\\001\\001\\001\\001\\001\\001\\001\\001\\006\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\"\nlet __sedlex_table_38 =\n  \"\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\"\nlet __sedlex_table_46 =\n  \"\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\003\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\003\\001\\001\\001\\001\\001\\001\\001\\001\\004\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\"\nlet __sedlex_table_49 =\n  \"\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\003\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\004\\000\\001\\001\\001\\001\\003\\001\\001\\001\\001\\001\\001\\001\\001\\005\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\"\nlet __sedlex_table_55 =\n  \"\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\002\\002\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\"\nlet __sedlex_table_59 =\n  \"\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\002\\000\\003\\003\\003\\003\\003\\003\\003\\003\\004\\004\\000\\000\\000\\000\\000\\000\\000\\001\\005\\001\\001\\006\\001\\001\\001\\001\\001\\001\\001\\001\\001\\007\\001\\001\\001\\001\\001\\001\\001\\001\\b\\001\\001\\000\\000\\000\\000\\000\\000\\001\\005\\001\\001\\006\\001\\001\\001\\001\\001\\001\\001\\001\\t\\007\\001\\001\\001\\001\\001\\001\\001\\001\\b\\001\\001\"\nlet __sedlex_table_62 =\n  \"\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\002\\002\\002\\002\\002\\002\\002\\002\\001\\001\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\"\nlet __sedlex_table_63 =\n  \"\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\003\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\004\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\"\nlet __sedlex_table_65 =\n  \"\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\002\\002\\002\\002\\002\\002\\002\\002\\001\\001\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\003\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\004\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\"\nlet __sedlex_table_68 =\n  \"\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\002\\000\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\004\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\005\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\"\nlet __sedlex_table_73 =\n  \"\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\002\\002\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\003\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\004\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\"\nlet __sedlex_table_75 =\n  \"\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\002\\000\\003\\003\\003\\003\\003\\003\\003\\003\\004\\004\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\005\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\"\nlet __sedlex_table_77 =\n  \"\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\000\\000\\000\\000\\000\\000\\000\\002\\002\\002\\002\\002\\002\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\003\\000\\002\\002\\002\\002\\002\\002\\001\\001\\001\\001\\001\\001\\001\\004\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\"\nlet __sedlex_table_89 =\n  \"\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\"\nlet __sedlex_table_1 =\n  \"\\001\\002\\002\\002\\002\\002\\002\\002\\002\\002\\003\\004\\003\\003\\005\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\003\\006\\007\\b\\t\\n\\011\\007\\012\\r\\014\\015\\016\\017\\018\\019\\020\\021\\021\\021\\021\\021\\021\\021\\021\\021\\022\\023\\024\\025\\026\\027\\028\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\029\\030\\031 \\t!\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\\"#$%\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\003\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\002\\002\\002\\002\\t\\002\\002\\002\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\002\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\t\\t\\t\\t\\002\\002\\002\\002\\002\\002\\002\\t\\002\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\t\\t\\t\\t\\002\\t\\t\\002\\002\\t\\t\\t\\t\\002\\t\\002\\002\\002\\002\\002\\002\\t\\002\\t\\t\\t\\002\\t\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\002\\002\\002\\002\\002\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\002\\t\\002\\002\\002\\002\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\002\\002\\002\\t\\t\\t\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\t\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\t\\002\\002\\002\\002\\002\\002\\002\\t\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\t\\t\\002\\002\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\t\\002\\002\\002\\002\\t\\002\\002\\002\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\002\\002\\002\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\002\\002\\002\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\002\\002\\002\\002\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\002\\002\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\t\\t\\t\\t\\t\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\002\\002\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\002\\002\\002\\002\\002\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\002\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\002\\002\\t\\t\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\t\\t\\t\\t\\t\\t\\t\\002\\t\\002\\002\\002\\t\\t\\t\\t\\002\\002\\002\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\t\\002\\t\\t\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\002\\002\\002\\002\\002\\002\\002\\002\\t\\t\\t\\t\\t\\t\\002\\002\\002\\002\\t\\t\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\t\\t\\t\\t\\t\\t\\t\\002\\t\\t\\002\\t\\t\\002\\t\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\t\\t\\t\\002\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\t\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\t\\t\\t\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\t\\t\\t\\t\\t\\t\\t\\002\\t\\t\\002\\t\\t\\t\\t\\t\\002\\002\\002\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\002\\002\\t\\t\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\t\\t\\t\\t\\t\\t\\t\\002\\t\\t\\002\\t\\t\\t\\t\\t\\002\\002\\002\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\t\\002\\t\\t\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\002\\t\\t\\t\\t\\t\\t\\002\\002\\002\\t\\t\\t\\002\\t\\t\\t\\t\\002\\002\\002\\t\\t\\002\\t\\002\\t\\t\\002\\002\\002\\t\\t\\002\\002\\002\\t\\t\\t\\002\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\002\\t\\t\\t\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\002\\002\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\t\\t\\002\\002\\t\\002\\002\\t\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\002\\002\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\002\\t\\t\\t\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\t\\t\\t\\t\\t\\002\\002\\002\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\t\\002\\t\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\t\\t\\t\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\002\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\002\\002\\002\\002\\002\\t\\t\\t\\002\\002\\002\\002\\002\\002\\002\\002\\t\\t\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\t\\t\\t\\t\\t\\002\\002\\002\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\t\\002\\002\\t\\t\\t\\t\\t\\t\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\t\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\t\\t\\t\\t\\t\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\t\\002\\t\\002\\t\\t\\t\\t\\t\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\t\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\t\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\002\\002\\t\\t\\t\\t\\t\\002\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\t\\t\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\t\\t\\t\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\t\\t\\t\\t\\t\\002\\002\\002\\002\\t\\t\\t\\t\\002\\002\\002\\t\\002\\002\\002\\t\\t\\002\\002\\002\\002\\002\\002\\002\\t\\t\\t\\002\\002\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\t\\002\\002\\002\\002\\002\\t\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\t\\t\\t\\t\\002\\002\\t\\t\\t\\t\\t\\t\\t\\002\\t\\002\\t\\t\\t\\t\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\t\\t\\t\\t\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\t\\t\\t\\t\\002\\002\\t\\t\\t\\t\\t\\t\\t\\002\\t\\002\\t\\t\\t\\t\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\t\\t\\t\\t\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\002\\t\\t\\t\\t\\t\\t\\002\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\003\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\002\\002\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\002\\002\\002\\002\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\t\\t\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\002\\002\\002\\002\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\002\\002\\002\\002\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\t\\002\\002\\002\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\002\\t\\t\\t\\t\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\002\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\t\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\002\\002\\002\\002\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\002\\t\\t\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\t\\t\\t\\002\\t\\t\\t\\t\\t\\t\\002\\t\\t\\002\\002\\002\\t\\002\\002\\002\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\002\\t\\t\\t\\t\\t\\t\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\002\\t\\t\\t\\t\\t\\t\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\002\\t\\002\\t\\002\\t\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\t\\t\\t\\t\\t\\t\\t\\002\\t\\002\\002\\002\\t\\t\\t\\002\\t\\t\\t\\t\\t\\t\\t\\002\\002\\002\\t\\t\\t\\t\\002\\002\\t\\t\\t\\t\\t\\t\\002\\002\\002\\002\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t\\002\\002\\002\\002\\002\\t\\t\\t\\002\\t\\t\\t\\t\\t\\t\\t\\002\\002\\002\"\nlet __sedlex_table_61 =\n  \"\\001\\002\\002\\002\\002\\002\\002\\002\\002\\002\\003\\004\\003\\003\\005\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\003\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\006\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\003\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\003\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\"\nlet __sedlex_table_66 =\n  \"\\001\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\003\\002\\002\\004\"\nlet __sedlex_table_72 = \"\\001\\000\\000\\000\\000\\002\"\nlet __sedlex_table_74 =\n  \"\\001\\002\\002\\002\\002\\002\\002\\002\\002\\002\\003\\004\\003\\003\\005\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\003\\002\\006\\002\\007\\b\\t\\006\\n\\011\\012\\r\\014\\015\\016\\017\\018\\019\\019\\019\\019\\019\\019\\019\\019\\019\\020\\021\\022\\023\\024\\025\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\026\\027\\028\\002\\007\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\029\\030\\031\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\003\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\002\\002\\002\\002\\007\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\007\\002\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\002\\007\\007\\002\\002\\007\\007\\007\\007\\002\\007\\002\\002\\002\\002\\002\\002\\007\\002\\007\\007\\007\\002\\007\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\007\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\002\\002\\002\\002\\002\\002\\002\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\002\\002\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\002\\002\\002\\002\\007\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\002\\002\\002\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\007\\007\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\007\\007\\007\\007\\007\\007\\002\\007\\002\\002\\002\\007\\007\\007\\007\\002\\002\\002\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\002\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\007\\007\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\007\\007\\007\\007\\007\\007\\002\\007\\007\\002\\007\\007\\002\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\002\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\007\\007\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\007\\007\\007\\007\\007\\007\\002\\007\\007\\002\\007\\007\\007\\007\\007\\002\\002\\002\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\007\\007\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\007\\007\\007\\007\\007\\007\\002\\007\\007\\002\\007\\007\\007\\007\\007\\002\\002\\002\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\002\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\002\\007\\007\\007\\007\\007\\007\\002\\002\\002\\007\\007\\007\\002\\007\\007\\007\\007\\002\\002\\002\\007\\007\\002\\007\\002\\007\\007\\002\\002\\002\\007\\007\\002\\002\\002\\007\\007\\007\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\007\\007\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\002\\002\\007\\002\\002\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\007\\007\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\007\\007\\007\\007\\002\\002\\002\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\002\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\007\\007\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\002\\002\\002\\002\\002\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\002\\002\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\002\\007\\002\\007\\007\\007\\007\\007\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\002\\002\\007\\007\\007\\007\\007\\002\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\007\\007\\007\\007\\002\\002\\002\\007\\002\\002\\002\\007\\007\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\002\\002\\002\\002\\002\\007\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\007\\007\\007\\002\\002\\007\\007\\007\\007\\007\\007\\007\\002\\007\\002\\007\\007\\007\\007\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\007\\007\\007\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\007\\007\\007\\002\\002\\007\\007\\007\\007\\007\\007\\007\\002\\007\\002\\007\\007\\007\\007\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\007\\007\\007\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\007\\007\\007\\007\\007\\007\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\003\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\002\\002\\002\\002\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\002\\007\\007\\007\\007\\007\\007\\002\\007\\007\\002\\002\\002\\007\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\007\\007\\007\\007\\007\\007\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\007\\007\\007\\007\\007\\007\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\002\\007\\002\\007\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\007\\007\\007\\007\\007\\007\\002\\007\\002\\002\\002\\007\\007\\007\\002\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\007\\007\\007\\007\\002\\002\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\007\\007\\007\\002\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\"\nlet __sedlex_table_91 =\n  \"\\001\\002\\002\\002\\002\\002\\002\\002\\002\\002\\003\\004\\003\\003\\005\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\003\\002\\006\\002\\007\\002\\002\\006\\002\\002\\002\\002\\002\\002\\b\\t\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\n\\002\\011\\012\\r\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\014\\002\\002\\007\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\015\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\003\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\002\\002\\002\\002\\007\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\007\\002\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\002\\007\\007\\002\\002\\007\\007\\007\\007\\002\\007\\002\\002\\002\\002\\002\\002\\007\\002\\007\\007\\007\\002\\007\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\007\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\002\\002\\002\\002\\002\\002\\002\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\002\\002\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\002\\002\\002\\002\\007\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\002\\002\\002\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\007\\007\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\007\\007\\007\\007\\007\\007\\002\\007\\002\\002\\002\\007\\007\\007\\007\\002\\002\\002\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\002\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\007\\007\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\007\\007\\007\\007\\007\\007\\002\\007\\007\\002\\007\\007\\002\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\002\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\007\\007\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\007\\007\\007\\007\\007\\007\\002\\007\\007\\002\\007\\007\\007\\007\\007\\002\\002\\002\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\007\\007\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\007\\007\\007\\007\\007\\007\\002\\007\\007\\002\\007\\007\\007\\007\\007\\002\\002\\002\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\002\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\002\\007\\007\\007\\007\\007\\007\\002\\002\\002\\007\\007\\007\\002\\007\\007\\007\\007\\002\\002\\002\\007\\007\\002\\007\\002\\007\\007\\002\\002\\002\\007\\007\\002\\002\\002\\007\\007\\007\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\007\\007\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\002\\002\\007\\002\\002\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\007\\007\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\007\\007\\007\\007\\002\\002\\002\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\002\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\007\\007\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\002\\002\\002\\002\\002\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\002\\002\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\002\\007\\002\\007\\007\\007\\007\\007\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\002\\002\\007\\007\\007\\007\\007\\002\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\007\\007\\007\\007\\002\\002\\002\\007\\002\\002\\002\\007\\007\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\002\\002\\002\\002\\002\\007\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\007\\007\\007\\002\\002\\007\\007\\007\\007\\007\\007\\007\\002\\007\\002\\007\\007\\007\\007\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\007\\007\\007\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\007\\007\\007\\002\\002\\007\\007\\007\\007\\007\\007\\007\\002\\007\\002\\007\\007\\007\\007\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\007\\007\\007\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\007\\007\\007\\007\\007\\007\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\003\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\002\\002\\002\\002\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\002\\007\\007\\007\\007\\007\\007\\002\\007\\007\\002\\002\\002\\007\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\007\\007\\007\\007\\007\\007\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\007\\007\\007\\007\\007\\007\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\002\\007\\002\\007\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\007\\007\\007\\007\\007\\007\\007\\002\\007\\002\\002\\002\\007\\007\\007\\002\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\007\\007\\007\\007\\002\\002\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\\002\\002\\007\\007\\007\\002\\007\\007\\007\\007\\007\\007\\007\\002\\002\\002\"\nlet __sedlex_table_51 = \"\\001\\000\\000\\002\"\nlet __sedlex_table_8 =\n  \"\\001\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\003\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\004\\002\\002\\002\\002\\004\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\005\"\nlet __sedlex_table_20 =\n  \"\\001\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\003\"\nlet __sedlex_table_69 =\n  \"\\001\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\003\\002\\002\\004\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\005\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\006\\007\"\nlet __sedlex_table_15 =\n  \"\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\002\"\nlet __sedlex_table_48 =\n  \"\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\002\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\002\"\nlet __sedlex_table_81 =\n  \"\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\002\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\003\\000\\000\\000\\000\\000\\002\"\nlet __sedlex_table_9 =\n  \"\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\002\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\003\"\nlet __sedlex_table_26 =\n  \"\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\002\\000\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\003\"\nlet __sedlex_table_35 =\n  \"\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\002\"\nlet __sedlex_table_67 =\n  \"\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\002\"\nlet __sedlex_table_36 = \"\\001\\000\\000\\000\\000\\000\\000\\000\\002\"\nlet __sedlex_table_39 =\n  \"\\001\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\003\\002\\002\\004\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\005\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\006\\002\\002\\002\\007\"\nlet __sedlex_table_50 =\n  \"\\001\\000\\000\\000\\000\\002\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\003\"\nlet __sedlex_table_90 =\n  \"\\001\\002\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\000\\000\\000\\000\\000\\000\\000\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\000\\000\\000\\000\\000\\000\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\"\nlet __sedlex_table_37 =\n  \"\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\002\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\003\"\nlet __sedlex_table_7 =\n  \"\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\"\nlet __sedlex_table_13 = \"\\001\\000\\002\\003\\003\\003\\003\\003\\003\\003\\003\\003\"\nlet __sedlex_table_53 =\n  \"\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\002\\000\\001\\001\\001\\001\\001\\001\"\nlet __sedlex_table_87 =\n  \"\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\002\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\"\nlet __sedlex_table_34 =\n  \"\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\001\\001\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\"\nlet __sedlex_table_54 = \"\\001\\000\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\"\nlet __sedlex_table_71 =\n  \"\\001\\000\\000\\000\\000\\000\\000\\002\\000\\002\\000\\000\\003\\004\\004\\004\\004\\004\\004\\004\\004\\004\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\"\nlet __sedlex_table_80 = \"\\001\\001\\001\\001\\001\\001\\001\\001\\002\\002\"\nlet __sedlex_table_4 =\n  \"\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\002\"\nlet __sedlex_table_79 =\n  \"\\001\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\003\\002\\002\\004\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\005\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\006\"\nlet __sedlex_table_84 =\n  \"\\001\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\003\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\002\\002\\002\\002\\002\\002\\002\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\002\\004\\002\\002\\003\\002\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\"\nlet __sedlex_table_85 =\n  \"\\001\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\002\\003\\002\\002\\002\\002\\002\\002\\002\\002\\003\\002\\002\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\002\\002\\002\\002\\002\\002\\002\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\002\\004\\002\\002\\003\\002\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\\003\"\nlet __sedlex_table_64 =\n  \"\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\002\\000\\000\\001\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\000\\000\\000\\000\\001\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\001\\000\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\000\\001\\001\\000\\000\\001\\001\\001\\001\\000\\001\\000\\000\\000\\000\\000\\000\\001\\000\\001\\001\\001\\000\\001\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\001\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\000\\000\\000\\000\\000\\000\\000\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\000\\000\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\000\\000\\000\\000\\001\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\000\\000\\000\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\001\\001\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\001\\001\\001\\001\\001\\001\\001\\000\\001\\000\\000\\000\\001\\001\\001\\001\\000\\000\\000\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\000\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\001\\001\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\001\\001\\001\\001\\001\\001\\001\\000\\001\\001\\000\\001\\001\\000\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\000\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\001\\001\\001\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\001\\001\\001\\001\\001\\001\\001\\000\\001\\001\\000\\001\\001\\001\\001\\001\\000\\000\\000\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\001\\001\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\001\\001\\001\\001\\001\\001\\001\\000\\001\\001\\000\\001\\001\\001\\001\\001\\000\\000\\000\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\000\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\000\\001\\001\\001\\001\\001\\001\\000\\000\\000\\001\\001\\001\\000\\001\\001\\001\\001\\000\\000\\000\\001\\001\\000\\001\\000\\001\\001\\000\\000\\000\\001\\001\\000\\000\\000\\001\\001\\001\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\000\\001\\001\\001\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\000\\000\\001\\000\\000\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\000\\001\\001\\001\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\001\\001\\001\\001\\001\\000\\000\\000\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\000\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\001\\001\\001\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\000\\000\\000\\000\\000\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\001\\000\\000\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\000\\001\\000\\001\\001\\001\\001\\001\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\001\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\000\\000\\001\\001\\001\\001\\001\\000\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\001\\001\\001\\001\\000\\000\\000\\001\\000\\000\\000\\001\\001\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\001\\000\\000\\000\\000\\000\\001\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\001\\001\\001\\001\\000\\000\\001\\001\\001\\001\\001\\001\\001\\000\\001\\000\\001\\001\\001\\001\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\001\\001\\001\\001\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\001\\001\\001\\001\\000\\000\\001\\001\\001\\001\\001\\001\\001\\000\\001\\000\\001\\001\\001\\001\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\001\\001\\001\\001\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\001\\001\\001\\001\\001\\001\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\000\\000\\000\\000\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\001\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\000\\001\\001\\001\\001\\001\\001\\000\\001\\001\\000\\000\\000\\001\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\001\\001\\001\\001\\001\\001\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\001\\001\\001\\001\\001\\001\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\000\\001\\000\\001\\000\\001\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\001\\001\\001\\001\\001\\001\\001\\000\\001\\000\\000\\000\\001\\001\\001\\000\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\001\\001\\001\\001\\000\\000\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\001\\001\\001\\000\\001\\001\\001\\001\\001\\001\\001\"\nlet __sedlex_table_86 = \"\\001\\000\\002\"\nlet __sedlex_table_6 =\n  \"\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\001\\001\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\"\nlet __sedlex_table_24 =\n  \"\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\002\"\nlet __sedlex_table_31 = \"\\001\\002\\002\\002\\002\\002\\002\\002\\002\\002\"\nlet __sedlex_table_25 =\n  \"\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\002\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\003\"\nlet __sedlex_table_56 =\n  \"\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\002\"\nlet __sedlex_table_44 =\n  \"\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\002\"\nlet __sedlex_table_19 =\n  \"\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\002\"\nlet __sedlex_table_40 =\n  \"\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\002\"\nlet __sedlex_partition_94 c =\n  if c <= 120 then (-1) else if c <= 121 then 0 else (-1)\nlet __sedlex_partition_50 c =\n  if c <= 8191\n  then (Char.code (String.unsafe_get __sedlex_table_1 (c - (-1)))) - 1\n  else\n    if c <= 194559\n    then\n      (if c <= 69599\n       then\n         (if c <= 43711\n          then\n            (if c <= 12703\n             then\n               (if c <= 11519\n                then\n                  (if c <= 8489\n                   then\n                     (if c <= 8454\n                      then\n                        (if c <= 8304\n                         then\n                           (if c <= 8238\n                            then\n                              (if c <= 8231\n                               then (if c <= 8202 then 2 else 1)\n                               else if c <= 8233 then 3 else 1)\n                            else\n                              if c <= 8286\n                              then (if c <= 8239 then 2 else 1)\n                              else if c <= 8287 then 2 else 1)\n                         else\n                           if c <= 8335\n                           then\n                             (if c <= 8318\n                              then (if c <= 8305 then 8 else 1)\n                              else if c <= 8319 then 8 else 1)\n                           else\n                             if c <= 8449\n                             then (if c <= 8348 then 8 else 1)\n                             else if c <= 8450 then 8 else 1)\n                      else\n                        if c <= 8477\n                        then\n                          (if c <= 8468\n                           then\n                             (if c <= 8457\n                              then (if c <= 8455 then 8 else 1)\n                              else if c <= 8467 then 8 else 1)\n                           else\n                             if c <= 8471\n                             then (if c <= 8469 then 8 else 1)\n                             else 8)\n                        else\n                          if c <= 8485\n                          then\n                            (if c <= 8483\n                             then 1\n                             else if c <= 8484 then 8 else 1)\n                          else\n                            if c <= 8487\n                            then (if c <= 8486 then 8 else 1)\n                            else if c <= 8488 then 8 else 1)\n                   else\n                     if c <= 8543\n                     then\n                       (if c <= 8505\n                        then 8\n                        else\n                          if c <= 8516\n                          then\n                            (if c <= 8507\n                             then 1\n                             else if c <= 8511 then 8 else 1)\n                          else\n                            if c <= 8525\n                            then (if c <= 8521 then 8 else 1)\n                            else if c <= 8526 then 8 else 1)\n                     else\n                       if c <= 11389\n                       then\n                         (if c <= 8584\n                          then 8\n                          else if c <= 11263 then 1 else 8)\n                       else\n                         if c <= 11498\n                         then (if c <= 11492 then 8 else 1)\n                         else\n                           if c <= 11505\n                           then (if c <= 11502 then 8 else 1)\n                           else if c <= 11507 then 8 else 1)\n                else\n                  if c <= 12294\n                  then\n                    (if c <= 11695\n                     then\n                       (if c <= 11630\n                        then\n                          (if c <= 11564\n                           then\n                             (if c <= 11558\n                              then (if c <= 11557 then 8 else 1)\n                              else if c <= 11559 then 8 else 1)\n                           else\n                             if c <= 11567\n                             then (if c <= 11565 then 8 else 1)\n                             else if c <= 11623 then 8 else 1)\n                        else\n                          if c <= 11679\n                          then\n                            (if c <= 11647\n                             then (if c <= 11631 then 8 else 1)\n                             else if c <= 11670 then 8 else 1)\n                          else\n                            if c <= 11687\n                            then (if c <= 11686 then 8 else 1)\n                            else if c <= 11694 then 8 else 1)\n                     else\n                       if c <= 11727\n                       then\n                         (if c <= 11711\n                          then\n                            (if c <= 11703\n                             then (if c <= 11702 then 8 else 1)\n                             else if c <= 11710 then 8 else 1)\n                          else\n                            if c <= 11719\n                            then (if c <= 11718 then 8 else 1)\n                            else if c <= 11726 then 8 else 1)\n                       else\n                         if c <= 12287\n                         then\n                           (if c <= 11735\n                            then (if c <= 11734 then 8 else 1)\n                            else if c <= 11742 then 8 else 1)\n                         else\n                           if c <= 12292\n                           then (if c <= 12288 then 2 else 1)\n                           else 8)\n                  else\n                    if c <= 12442\n                    then\n                      (if c <= 12343\n                       then\n                         (if c <= 12320\n                          then (if c <= 12295 then 8 else 1)\n                          else\n                            if c <= 12336\n                            then (if c <= 12329 then 8 else 1)\n                            else if c <= 12341 then 8 else 1)\n                       else\n                         if c <= 12348\n                         then 8\n                         else\n                           if c <= 12352\n                           then 1\n                           else if c <= 12438 then 8 else 1)\n                    else\n                      if c <= 12539\n                      then\n                        (if c <= 12447\n                         then 8\n                         else\n                           if c <= 12448\n                           then 1\n                           else if c <= 12538 then 8 else 1)\n                      else\n                        if c <= 12548\n                        then (if c <= 12543 then 8 else 1)\n                        else\n                          if c <= 12592\n                          then (if c <= 12591 then 8 else 1)\n                          else if c <= 12686 then 8 else 1)\n             else\n               if c <= 42999\n               then\n                 (if c <= 42653\n                  then\n                    (if c <= 42239\n                     then\n                       (if c <= 40981\n                        then\n                          (if c <= 13311\n                           then\n                             (if c <= 12783\n                              then (if c <= 12735 then 8 else 1)\n                              else if c <= 12799 then 8 else 1)\n                           else\n                             if c <= 19967\n                             then (if c <= 19903 then 8 else 1)\n                             else 8)\n                        else\n                          if c <= 42191\n                          then (if c <= 42124 then 8 else 1)\n                          else if c <= 42237 then 8 else 1)\n                     else\n                       if c <= 42559\n                       then\n                         (if c <= 42511\n                          then (if c <= 42508 then 8 else 1)\n                          else\n                            if c <= 42537\n                            then (if c <= 42527 then 8 else 1)\n                            else if c <= 42539 then 8 else 1)\n                       else\n                         if c <= 42622\n                         then (if c <= 42606 then 8 else 1)\n                         else 8)\n                  else\n                    if c <= 42890\n                    then\n                      (if c <= 42785\n                       then\n                         (if c <= 42735\n                          then (if c <= 42655 then 1 else 8)\n                          else\n                            if c <= 42774\n                            then 1\n                            else if c <= 42783 then 8 else 1)\n                       else\n                         if c <= 42887\n                         then 8\n                         else if c <= 42888 then 8 else 1)\n                    else\n                      if c <= 42962\n                      then\n                        (if c <= 42954\n                         then 8\n                         else\n                           if c <= 42959\n                           then 1\n                           else if c <= 42961 then 8 else 1)\n                      else\n                        if c <= 42993\n                        then\n                          (if c <= 42964\n                           then (if c <= 42963 then 8 else 1)\n                           else if c <= 42969 then 8 else 1)\n                        else 8)\n               else\n                 if c <= 43470\n                 then\n                   (if c <= 43137\n                    then\n                      (if c <= 43010\n                       then\n                         (if c <= 43002\n                          then 8\n                          else if c <= 43009 then 8 else 1)\n                       else\n                         if c <= 43019\n                         then\n                           (if c <= 43014\n                            then (if c <= 43013 then 8 else 1)\n                            else if c <= 43018 then 8 else 1)\n                         else\n                           if c <= 43071\n                           then (if c <= 43042 then 8 else 1)\n                           else if c <= 43123 then 8 else 1)\n                    else\n                      if c <= 43273\n                      then\n                        (if c <= 43258\n                         then\n                           (if c <= 43249\n                            then (if c <= 43187 then 8 else 1)\n                            else if c <= 43255 then 8 else 1)\n                         else\n                           if c <= 43260\n                           then (if c <= 43259 then 8 else 1)\n                           else if c <= 43262 then 8 else 1)\n                      else\n                        if c <= 43359\n                        then\n                          (if c <= 43311\n                           then (if c <= 43301 then 8 else 1)\n                           else if c <= 43334 then 8 else 1)\n                        else\n                          if c <= 43395\n                          then (if c <= 43388 then 8 else 1)\n                          else if c <= 43442 then 8 else 1)\n                 else\n                   if c <= 43615\n                   then\n                     (if c <= 43513\n                      then\n                        (if c <= 43493\n                         then\n                           (if c <= 43487\n                            then (if c <= 43471 then 8 else 1)\n                            else if c <= 43492 then 8 else 1)\n                         else if c <= 43503 then 8 else 1)\n                      else\n                        if c <= 43583\n                        then\n                          (if c <= 43519\n                           then (if c <= 43518 then 8 else 1)\n                           else if c <= 43560 then 8 else 1)\n                        else\n                          if c <= 43587\n                          then (if c <= 43586 then 8 else 1)\n                          else if c <= 43595 then 8 else 1)\n                   else\n                     if c <= 43645\n                     then\n                       (if c <= 43638\n                        then 8\n                        else\n                          if c <= 43641\n                          then 1\n                          else if c <= 43642 then 8 else 1)\n                     else\n                       if c <= 43700\n                       then\n                         (if c <= 43696\n                          then (if c <= 43695 then 8 else 1)\n                          else if c <= 43697 then 8 else 1)\n                       else\n                         if c <= 43704\n                         then (if c <= 43702 then 8 else 1)\n                         else if c <= 43709 then 8 else 1)\n          else\n            if c <= 66377\n            then\n              (if c <= 64325\n               then\n                 (if c <= 43887\n                  then\n                    (if c <= 43784\n                     then\n                       (if c <= 43743\n                        then\n                          (if c <= 43738\n                           then\n                             (if c <= 43713\n                              then (if c <= 43712 then 8 else 1)\n                              else if c <= 43714 then 8 else 1)\n                           else if c <= 43741 then 8 else 1)\n                        else\n                          if c <= 43764\n                          then\n                            (if c <= 43761\n                             then (if c <= 43754 then 8 else 1)\n                             else 8)\n                          else\n                            if c <= 43776\n                            then 1\n                            else if c <= 43782 then 8 else 1)\n                     else\n                       if c <= 43823\n                       then\n                         (if c <= 43807\n                          then\n                            (if c <= 43792\n                             then (if c <= 43790 then 8 else 1)\n                             else if c <= 43798 then 8 else 1)\n                          else\n                            if c <= 43815\n                            then (if c <= 43814 then 8 else 1)\n                            else if c <= 43822 then 8 else 1)\n                       else\n                         if c <= 43880\n                         then\n                           (if c <= 43867\n                            then (if c <= 43866 then 8 else 1)\n                            else 8)\n                         else if c <= 43881 then 8 else 1)\n                  else\n                    if c <= 64274\n                    then\n                      (if c <= 55242\n                       then\n                         (if c <= 44031\n                          then (if c <= 44002 then 8 else 1)\n                          else\n                            if c <= 55215\n                            then (if c <= 55203 then 8 else 1)\n                            else if c <= 55238 then 8 else 1)\n                       else\n                         if c <= 64111\n                         then\n                           (if c <= 63743\n                            then (if c <= 55291 then 8 else 1)\n                            else if c <= 64109 then 8 else 1)\n                         else\n                           if c <= 64255\n                           then (if c <= 64217 then 8 else 1)\n                           else if c <= 64262 then 8 else 1)\n                    else\n                      if c <= 64311\n                      then\n                        (if c <= 64286\n                         then\n                           (if c <= 64284\n                            then (if c <= 64279 then 8 else 1)\n                            else if c <= 64285 then 8 else 1)\n                         else\n                           if c <= 64297\n                           then (if c <= 64296 then 8 else 1)\n                           else if c <= 64310 then 8 else 1)\n                      else\n                        if c <= 64319\n                        then\n                          (if c <= 64317\n                           then (if c <= 64316 then 8 else 1)\n                           else if c <= 64318 then 8 else 1)\n                        else\n                          if c <= 64322\n                          then (if c <= 64321 then 8 else 1)\n                          else if c <= 64324 then 8 else 1)\n               else\n                 if c <= 65481\n                 then\n                   (if c <= 65312\n                    then\n                      (if c <= 65007\n                       then\n                         (if c <= 64847\n                          then\n                            (if c <= 64466\n                             then (if c <= 64433 then 8 else 1)\n                             else if c <= 64829 then 8 else 1)\n                          else\n                            if c <= 64913\n                            then (if c <= 64911 then 8 else 1)\n                            else if c <= 64967 then 8 else 1)\n                       else\n                         if c <= 65141\n                         then\n                           (if c <= 65135\n                            then (if c <= 65019 then 8 else 1)\n                            else if c <= 65140 then 8 else 1)\n                         else\n                           if c <= 65278\n                           then (if c <= 65276 then 8 else 1)\n                           else if c <= 65279 then 2 else 1)\n                    else\n                      if c <= 65437\n                      then\n                        (if c <= 65381\n                         then\n                           (if c <= 65344\n                            then (if c <= 65338 then 8 else 1)\n                            else if c <= 65370 then 8 else 1)\n                         else 8)\n                      else\n                        if c <= 65470\n                        then 8\n                        else\n                          if c <= 65473\n                          then 1\n                          else if c <= 65479 then 8 else 1)\n                 else\n                   if c <= 65615\n                   then\n                     (if c <= 65548\n                      then\n                        (if c <= 65497\n                         then\n                           (if c <= 65489\n                            then (if c <= 65487 then 8 else 1)\n                            else if c <= 65495 then 8 else 1)\n                         else\n                           if c <= 65535\n                           then (if c <= 65500 then 8 else 1)\n                           else if c <= 65547 then 8 else 1)\n                      else\n                        if c <= 65595\n                        then\n                          (if c <= 65575\n                           then (if c <= 65574 then 8 else 1)\n                           else if c <= 65594 then 8 else 1)\n                        else\n                          if c <= 65598\n                          then (if c <= 65597 then 8 else 1)\n                          else if c <= 65613 then 8 else 1)\n                   else\n                     if c <= 66207\n                     then\n                       (if c <= 65855\n                        then\n                          (if c <= 65663\n                           then (if c <= 65629 then 8 else 1)\n                           else if c <= 65786 then 8 else 1)\n                        else\n                          if c <= 66175\n                          then (if c <= 65908 then 8 else 1)\n                          else if c <= 66204 then 8 else 1)\n                     else\n                       if c <= 66348\n                       then\n                         (if c <= 66303\n                          then (if c <= 66256 then 8 else 1)\n                          else if c <= 66335 then 8 else 1)\n                       else 8)\n            else\n              if c <= 67646\n              then\n                (if c <= 66963\n                 then\n                   (if c <= 66717\n                    then\n                      (if c <= 66463\n                       then\n                         (if c <= 66383\n                          then (if c <= 66378 then 8 else 1)\n                          else\n                            if c <= 66431\n                            then (if c <= 66421 then 8 else 1)\n                            else if c <= 66461 then 8 else 1)\n                       else\n                         if c <= 66512\n                         then\n                           (if c <= 66503\n                            then (if c <= 66499 then 8 else 1)\n                            else if c <= 66511 then 8 else 1)\n                         else\n                           if c <= 66559\n                           then (if c <= 66517 then 8 else 1)\n                           else 8)\n                    else\n                      if c <= 66863\n                      then\n                        (if c <= 66775\n                         then\n                           (if c <= 66735\n                            then 1\n                            else if c <= 66771 then 8 else 1)\n                         else\n                           if c <= 66815\n                           then (if c <= 66811 then 8 else 1)\n                           else if c <= 66855 then 8 else 1)\n                      else\n                        if c <= 66939\n                        then\n                          (if c <= 66927\n                           then (if c <= 66915 then 8 else 1)\n                           else if c <= 66938 then 8 else 1)\n                        else\n                          if c <= 66955\n                          then (if c <= 66954 then 8 else 1)\n                          else if c <= 66962 then 8 else 1)\n                 else\n                   if c <= 67455\n                   then\n                     (if c <= 67002\n                      then\n                        (if c <= 66978\n                         then\n                           (if c <= 66966\n                            then (if c <= 66965 then 8 else 1)\n                            else if c <= 66977 then 8 else 1)\n                         else\n                           if c <= 66994\n                           then (if c <= 66993 then 8 else 1)\n                           else if c <= 67001 then 8 else 1)\n                      else\n                        if c <= 67391\n                        then\n                          (if c <= 67071\n                           then (if c <= 67004 then 8 else 1)\n                           else if c <= 67382 then 8 else 1)\n                        else\n                          if c <= 67423\n                          then (if c <= 67413 then 8 else 1)\n                          else if c <= 67431 then 8 else 1)\n                   else\n                     if c <= 67591\n                     then\n                       (if c <= 67505\n                        then\n                          (if c <= 67462\n                           then (if c <= 67461 then 8 else 1)\n                           else if c <= 67504 then 8 else 1)\n                        else\n                          if c <= 67583\n                          then (if c <= 67514 then 8 else 1)\n                          else if c <= 67589 then 8 else 1)\n                     else\n                       if c <= 67638\n                       then\n                         (if c <= 67593\n                          then (if c <= 67592 then 8 else 1)\n                          else if c <= 67637 then 8 else 1)\n                       else\n                         if c <= 67643\n                         then (if c <= 67640 then 8 else 1)\n                         else if c <= 67644 then 8 else 1)\n              else\n                if c <= 68296\n                then\n                  (if c <= 68029\n                   then\n                     (if c <= 67827\n                      then\n                        (if c <= 67711\n                         then\n                           (if c <= 67679\n                            then (if c <= 67669 then 8 else 1)\n                            else if c <= 67702 then 8 else 1)\n                         else\n                           if c <= 67807\n                           then (if c <= 67742 then 8 else 1)\n                           else if c <= 67826 then 8 else 1)\n                      else\n                        if c <= 67871\n                        then\n                          (if c <= 67839\n                           then (if c <= 67829 then 8 else 1)\n                           else if c <= 67861 then 8 else 1)\n                        else\n                          if c <= 67967\n                          then (if c <= 67897 then 8 else 1)\n                          else if c <= 68023 then 8 else 1)\n                   else\n                     if c <= 68120\n                     then\n                       (if c <= 68111\n                        then\n                          (if c <= 68095\n                           then (if c <= 68031 then 8 else 1)\n                           else if c <= 68096 then 8 else 1)\n                        else\n                          if c <= 68116\n                          then (if c <= 68115 then 8 else 1)\n                          else if c <= 68119 then 8 else 1)\n                     else\n                       if c <= 68223\n                       then\n                         (if c <= 68191\n                          then (if c <= 68149 then 8 else 1)\n                          else if c <= 68220 then 8 else 1)\n                       else\n                         if c <= 68287\n                         then (if c <= 68252 then 8 else 1)\n                         else if c <= 68295 then 8 else 1)\n                else\n                  if c <= 68863\n                  then\n                    (if c <= 68479\n                     then\n                       (if c <= 68415\n                        then\n                          (if c <= 68351\n                           then (if c <= 68324 then 8 else 1)\n                           else if c <= 68405 then 8 else 1)\n                        else\n                          if c <= 68447\n                          then (if c <= 68437 then 8 else 1)\n                          else if c <= 68466 then 8 else 1)\n                     else\n                       if c <= 68735\n                       then\n                         (if c <= 68607\n                          then (if c <= 68497 then 8 else 1)\n                          else if c <= 68680 then 8 else 1)\n                       else\n                         if c <= 68799\n                         then (if c <= 68786 then 8 else 1)\n                         else if c <= 68850 then 8 else 1)\n                  else\n                    if c <= 69414\n                    then\n                      (if c <= 69295\n                       then\n                         (if c <= 69247\n                          then (if c <= 68899 then 8 else 1)\n                          else if c <= 69289 then 8 else 1)\n                       else\n                         if c <= 69375\n                         then (if c <= 69297 then 8 else 1)\n                         else if c <= 69404 then 8 else 1)\n                    else\n                      if c <= 69487\n                      then\n                        (if c <= 69423\n                         then (if c <= 69415 then 8 else 1)\n                         else if c <= 69445 then 8 else 1)\n                      else\n                        if c <= 69551\n                        then (if c <= 69505 then 8 else 1)\n                        else if c <= 69572 then 8 else 1)\n       else\n         if c <= 120122\n         then\n           (if c <= 72348\n            then\n              (if c <= 70655\n               then\n                 (if c <= 70162\n                  then\n                    (if c <= 69958\n                     then\n                       (if c <= 69762\n                        then\n                          (if c <= 69744\n                           then\n                             (if c <= 69634\n                              then (if c <= 69622 then 8 else 1)\n                              else if c <= 69687 then 8 else 1)\n                           else\n                             if c <= 69748\n                             then (if c <= 69746 then 8 else 1)\n                             else if c <= 69749 then 8 else 1)\n                        else\n                          if c <= 69890\n                          then\n                            (if c <= 69839\n                             then (if c <= 69807 then 8 else 1)\n                             else if c <= 69864 then 8 else 1)\n                          else\n                            if c <= 69955\n                            then (if c <= 69926 then 8 else 1)\n                            else if c <= 69956 then 8 else 1)\n                     else\n                       if c <= 70080\n                       then\n                         (if c <= 70005\n                          then\n                            (if c <= 69967\n                             then (if c <= 69959 then 8 else 1)\n                             else if c <= 70002 then 8 else 1)\n                          else\n                            if c <= 70018\n                            then (if c <= 70006 then 8 else 1)\n                            else if c <= 70066 then 8 else 1)\n                       else\n                         if c <= 70107\n                         then\n                           (if c <= 70105\n                            then (if c <= 70084 then 8 else 1)\n                            else if c <= 70106 then 8 else 1)\n                         else\n                           if c <= 70143\n                           then (if c <= 70108 then 8 else 1)\n                           else if c <= 70161 then 8 else 1)\n                  else\n                    if c <= 70414\n                    then\n                      (if c <= 70286\n                       then\n                         (if c <= 70279\n                          then\n                            (if c <= 70271\n                             then (if c <= 70187 then 8 else 1)\n                             else if c <= 70278 then 8 else 1)\n                          else\n                            if c <= 70281\n                            then (if c <= 70280 then 8 else 1)\n                            else if c <= 70285 then 8 else 1)\n                       else\n                         if c <= 70319\n                         then\n                           (if c <= 70302\n                            then (if c <= 70301 then 8 else 1)\n                            else if c <= 70312 then 8 else 1)\n                         else\n                           if c <= 70404\n                           then (if c <= 70366 then 8 else 1)\n                           else if c <= 70412 then 8 else 1)\n                    else\n                      if c <= 70452\n                      then\n                        (if c <= 70441\n                         then\n                           (if c <= 70418\n                            then (if c <= 70416 then 8 else 1)\n                            else if c <= 70440 then 8 else 1)\n                         else\n                           if c <= 70449\n                           then (if c <= 70448 then 8 else 1)\n                           else if c <= 70451 then 8 else 1)\n                      else\n                        if c <= 70479\n                        then\n                          (if c <= 70460\n                           then (if c <= 70457 then 8 else 1)\n                           else if c <= 70461 then 8 else 1)\n                        else\n                          if c <= 70492\n                          then (if c <= 70480 then 8 else 1)\n                          else if c <= 70497 then 8 else 1)\n               else\n                 if c <= 71934\n                 then\n                   (if c <= 71167\n                    then\n                      (if c <= 70851\n                       then\n                         (if c <= 70750\n                          then\n                            (if c <= 70726\n                             then (if c <= 70708 then 8 else 1)\n                             else if c <= 70730 then 8 else 1)\n                          else\n                            if c <= 70783\n                            then (if c <= 70753 then 8 else 1)\n                            else if c <= 70831 then 8 else 1)\n                       else\n                         if c <= 71039\n                         then\n                           (if c <= 70854\n                            then (if c <= 70853 then 8 else 1)\n                            else if c <= 70855 then 8 else 1)\n                         else\n                           if c <= 71127\n                           then (if c <= 71086 then 8 else 1)\n                           else if c <= 71131 then 8 else 1)\n                    else\n                      if c <= 71423\n                      then\n                        (if c <= 71295\n                         then\n                           (if c <= 71235\n                            then (if c <= 71215 then 8 else 1)\n                            else if c <= 71236 then 8 else 1)\n                         else\n                           if c <= 71351\n                           then (if c <= 71338 then 8 else 1)\n                           else if c <= 71352 then 8 else 1)\n                      else\n                        if c <= 71679\n                        then\n                          (if c <= 71487\n                           then (if c <= 71450 then 8 else 1)\n                           else if c <= 71494 then 8 else 1)\n                        else\n                          if c <= 71839\n                          then (if c <= 71723 then 8 else 1)\n                          else if c <= 71903 then 8 else 1)\n                 else\n                   if c <= 72105\n                   then\n                     (if c <= 71959\n                      then\n                        (if c <= 71947\n                         then\n                           (if c <= 71944\n                            then (if c <= 71942 then 8 else 1)\n                            else if c <= 71945 then 8 else 1)\n                         else\n                           if c <= 71956\n                           then (if c <= 71955 then 8 else 1)\n                           else if c <= 71958 then 8 else 1)\n                      else\n                        if c <= 72000\n                        then\n                          (if c <= 71998\n                           then (if c <= 71983 then 8 else 1)\n                           else if c <= 71999 then 8 else 1)\n                        else\n                          if c <= 72095\n                          then (if c <= 72001 then 8 else 1)\n                          else if c <= 72103 then 8 else 1)\n                   else\n                     if c <= 72202\n                     then\n                       (if c <= 72162\n                        then\n                          (if c <= 72160\n                           then (if c <= 72144 then 8 else 1)\n                           else if c <= 72161 then 8 else 1)\n                        else\n                          if c <= 72191\n                          then (if c <= 72163 then 8 else 1)\n                          else if c <= 72192 then 8 else 1)\n                     else\n                       if c <= 72271\n                       then\n                         (if c <= 72249\n                          then (if c <= 72242 then 8 else 1)\n                          else if c <= 72250 then 8 else 1)\n                       else\n                         if c <= 72283\n                         then (if c <= 72272 then 8 else 1)\n                         else if c <= 72329 then 8 else 1)\n            else\n              if c <= 94031\n              then\n                (if c <= 73727\n                 then\n                   (if c <= 72970\n                    then\n                      (if c <= 72767\n                       then\n                         (if c <= 72703\n                          then\n                            (if c <= 72367\n                             then (if c <= 72349 then 8 else 1)\n                             else if c <= 72440 then 8 else 1)\n                          else\n                            if c <= 72713\n                            then (if c <= 72712 then 8 else 1)\n                            else if c <= 72750 then 8 else 1)\n                       else\n                         if c <= 72959\n                         then\n                           (if c <= 72817\n                            then (if c <= 72768 then 8 else 1)\n                            else if c <= 72847 then 8 else 1)\n                         else\n                           if c <= 72967\n                           then (if c <= 72966 then 8 else 1)\n                           else if c <= 72969 then 8 else 1)\n                    else\n                      if c <= 73065\n                      then\n                        (if c <= 73055\n                         then\n                           (if c <= 73029\n                            then (if c <= 73008 then 8 else 1)\n                            else if c <= 73030 then 8 else 1)\n                         else\n                           if c <= 73062\n                           then (if c <= 73061 then 8 else 1)\n                           else if c <= 73064 then 8 else 1)\n                      else\n                        if c <= 73439\n                        then\n                          (if c <= 73111\n                           then (if c <= 73097 then 8 else 1)\n                           else if c <= 73112 then 8 else 1)\n                        else\n                          if c <= 73647\n                          then (if c <= 73458 then 8 else 1)\n                          else if c <= 73648 then 8 else 1)\n                 else\n                   if c <= 92783\n                   then\n                     (if c <= 77823\n                      then\n                        (if c <= 74879\n                         then\n                           (if c <= 74751\n                            then (if c <= 74649 then 8 else 1)\n                            else if c <= 74862 then 8 else 1)\n                         else\n                           if c <= 77711\n                           then (if c <= 75075 then 8 else 1)\n                           else if c <= 77808 then 8 else 1)\n                      else\n                        if c <= 92159\n                        then\n                          (if c <= 82943\n                           then (if c <= 78894 then 8 else 1)\n                           else if c <= 83526 then 8 else 1)\n                        else\n                          if c <= 92735\n                          then (if c <= 92728 then 8 else 1)\n                          else if c <= 92766 then 8 else 1)\n                   else\n                     if c <= 93026\n                     then\n                       (if c <= 92927\n                        then\n                          (if c <= 92879\n                           then (if c <= 92862 then 8 else 1)\n                           else if c <= 92909 then 8 else 1)\n                        else\n                          if c <= 92991\n                          then (if c <= 92975 then 8 else 1)\n                          else if c <= 92995 then 8 else 1)\n                     else\n                       if c <= 93759\n                       then\n                         (if c <= 93052\n                          then (if c <= 93047 then 8 else 1)\n                          else if c <= 93071 then 8 else 1)\n                       else\n                         if c <= 93951\n                         then (if c <= 93823 then 8 else 1)\n                         else if c <= 94026 then 8 else 1)\n              else\n                if c <= 113791\n                then\n                  (if c <= 110580\n                   then\n                     (if c <= 94207\n                      then\n                        (if c <= 94175\n                         then\n                           (if c <= 94098\n                            then (if c <= 94032 then 8 else 1)\n                            else if c <= 94111 then 8 else 1)\n                         else\n                           if c <= 94178\n                           then (if c <= 94177 then 8 else 1)\n                           else if c <= 94179 then 8 else 1)\n                      else\n                        if c <= 101631\n                        then\n                          (if c <= 100351\n                           then (if c <= 100343 then 8 else 1)\n                           else if c <= 101589 then 8 else 1)\n                        else\n                          if c <= 110575\n                          then (if c <= 101640 then 8 else 1)\n                          else if c <= 110579 then 8 else 1)\n                   else\n                     if c <= 110947\n                     then\n                       (if c <= 110591\n                        then\n                          (if c <= 110588\n                           then (if c <= 110587 then 8 else 1)\n                           else if c <= 110590 then 8 else 1)\n                        else\n                          if c <= 110927\n                          then (if c <= 110882 then 8 else 1)\n                          else if c <= 110930 then 8 else 1)\n                     else\n                       if c <= 113663\n                       then\n                         (if c <= 110959\n                          then (if c <= 110951 then 8 else 1)\n                          else if c <= 111355 then 8 else 1)\n                       else\n                         if c <= 113775\n                         then (if c <= 113770 then 8 else 1)\n                         else if c <= 113788 then 8 else 1)\n                else\n                  if c <= 119981\n                  then\n                    (if c <= 119965\n                     then\n                       (if c <= 119807\n                        then\n                          (if c <= 113807\n                           then (if c <= 113800 then 8 else 1)\n                           else if c <= 113817 then 8 else 1)\n                        else\n                          if c <= 119893\n                          then (if c <= 119892 then 8 else 1)\n                          else if c <= 119964 then 8 else 1)\n                     else\n                       if c <= 119972\n                       then\n                         (if c <= 119969\n                          then (if c <= 119967 then 8 else 1)\n                          else if c <= 119970 then 8 else 1)\n                       else\n                         if c <= 119976\n                         then (if c <= 119974 then 8 else 1)\n                         else if c <= 119980 then 8 else 1)\n                  else\n                    if c <= 120070\n                    then\n                      (if c <= 119996\n                       then\n                         (if c <= 119994\n                          then (if c <= 119993 then 8 else 1)\n                          else if c <= 119995 then 8 else 1)\n                       else\n                         if c <= 120004\n                         then (if c <= 120003 then 8 else 1)\n                         else if c <= 120069 then 8 else 1)\n                    else\n                      if c <= 120085\n                      then\n                        (if c <= 120076\n                         then (if c <= 120074 then 8 else 1)\n                         else if c <= 120084 then 8 else 1)\n                      else\n                        if c <= 120093\n                        then (if c <= 120092 then 8 else 1)\n                        else if c <= 120121 then 8 else 1)\n         else\n           if c <= 131071\n           then\n             (if c <= 126468\n              then\n                (if c <= 122623\n                 then\n                   (if c <= 120571\n                    then\n                      (if c <= 120145\n                       then\n                         (if c <= 120133\n                          then\n                            (if c <= 120127\n                             then (if c <= 120126 then 8 else 1)\n                             else if c <= 120132 then 8 else 1)\n                          else\n                            if c <= 120137\n                            then (if c <= 120134 then 8 else 1)\n                            else if c <= 120144 then 8 else 1)\n                       else\n                         if c <= 120513\n                         then\n                           (if c <= 120487\n                            then (if c <= 120485 then 8 else 1)\n                            else if c <= 120512 then 8 else 1)\n                         else\n                           if c <= 120539\n                           then (if c <= 120538 then 8 else 1)\n                           else if c <= 120570 then 8 else 1)\n                    else\n                      if c <= 120687\n                      then\n                        (if c <= 120629\n                         then\n                           (if c <= 120597\n                            then (if c <= 120596 then 8 else 1)\n                            else if c <= 120628 then 8 else 1)\n                         else\n                           if c <= 120655\n                           then (if c <= 120654 then 8 else 1)\n                           else if c <= 120686 then 8 else 1)\n                      else\n                        if c <= 120745\n                        then\n                          (if c <= 120713\n                           then (if c <= 120712 then 8 else 1)\n                           else if c <= 120744 then 8 else 1)\n                        else\n                          if c <= 120771\n                          then (if c <= 120770 then 8 else 1)\n                          else if c <= 120779 then 8 else 1)\n                 else\n                   if c <= 124895\n                   then\n                     (if c <= 123190\n                      then\n                        (if c <= 122654\n                         then 8\n                         else\n                           if c <= 123135\n                           then 1\n                           else if c <= 123180 then 8 else 1)\n                      else\n                        if c <= 123535\n                        then\n                          (if c <= 123213\n                           then (if c <= 123197 then 8 else 1)\n                           else if c <= 123214 then 8 else 1)\n                        else\n                          if c <= 123583\n                          then (if c <= 123565 then 8 else 1)\n                          else if c <= 123627 then 8 else 1)\n                   else\n                     if c <= 124927\n                     then\n                       (if c <= 124908\n                        then\n                          (if c <= 124903\n                           then (if c <= 124902 then 8 else 1)\n                           else if c <= 124907 then 8 else 1)\n                        else\n                          if c <= 124911\n                          then (if c <= 124910 then 8 else 1)\n                          else if c <= 124926 then 8 else 1)\n                     else\n                       if c <= 125258\n                       then\n                         (if c <= 125183\n                          then (if c <= 125124 then 8 else 1)\n                          else if c <= 125251 then 8 else 1)\n                       else\n                         if c <= 126463\n                         then (if c <= 125259 then 8 else 1)\n                         else if c <= 126467 then 8 else 1)\n              else\n                if c <= 126552\n                then\n                  (if c <= 126529\n                   then\n                     (if c <= 126504\n                      then\n                        (if c <= 126499\n                         then\n                           (if c <= 126496\n                            then (if c <= 126495 then 8 else 1)\n                            else if c <= 126498 then 8 else 1)\n                         else\n                           if c <= 126502\n                           then (if c <= 126500 then 8 else 1)\n                           else if c <= 126503 then 8 else 1)\n                      else\n                        if c <= 126520\n                        then\n                          (if c <= 126515\n                           then (if c <= 126514 then 8 else 1)\n                           else if c <= 126519 then 8 else 1)\n                        else\n                          if c <= 126522\n                          then (if c <= 126521 then 8 else 1)\n                          else if c <= 126523 then 8 else 1)\n                   else\n                     if c <= 126540\n                     then\n                       (if c <= 126536\n                        then\n                          (if c <= 126534\n                           then (if c <= 126530 then 8 else 1)\n                           else if c <= 126535 then 8 else 1)\n                        else\n                          if c <= 126538\n                          then (if c <= 126537 then 8 else 1)\n                          else if c <= 126539 then 8 else 1)\n                     else\n                       if c <= 126547\n                       then\n                         (if c <= 126544\n                          then (if c <= 126543 then 8 else 1)\n                          else if c <= 126546 then 8 else 1)\n                       else\n                         if c <= 126550\n                         then (if c <= 126548 then 8 else 1)\n                         else if c <= 126551 then 8 else 1)\n                else\n                  if c <= 126579\n                  then\n                    (if c <= 126560\n                     then\n                       (if c <= 126556\n                        then\n                          (if c <= 126554\n                           then (if c <= 126553 then 8 else 1)\n                           else if c <= 126555 then 8 else 1)\n                        else\n                          if c <= 126558\n                          then (if c <= 126557 then 8 else 1)\n                          else if c <= 126559 then 8 else 1)\n                     else\n                       if c <= 126566\n                       then\n                         (if c <= 126563\n                          then (if c <= 126562 then 8 else 1)\n                          else if c <= 126564 then 8 else 1)\n                       else\n                         if c <= 126571\n                         then (if c <= 126570 then 8 else 1)\n                         else if c <= 126578 then 8 else 1)\n                  else\n                    if c <= 126602\n                    then\n                      (if c <= 126589\n                       then\n                         (if c <= 126584\n                          then (if c <= 126583 then 8 else 1)\n                          else if c <= 126588 then 8 else 1)\n                       else\n                         if c <= 126591\n                         then (if c <= 126590 then 8 else 1)\n                         else if c <= 126601 then 8 else 1)\n                    else\n                      if c <= 126628\n                      then\n                        (if c <= 126624\n                         then (if c <= 126619 then 8 else 1)\n                         else if c <= 126627 then 8 else 1)\n                      else\n                        if c <= 126634\n                        then (if c <= 126633 then 8 else 1)\n                        else if c <= 126651 then 8 else 1)\n           else\n             if c <= 183983\n             then\n               (if c <= 177983\n                then\n                  (if c <= 173823\n                   then (if c <= 173791 then 8 else 1)\n                   else if c <= 177976 then 8 else 1)\n                else\n                  if c <= 178207\n                  then (if c <= 178205 then 8 else 1)\n                  else if c <= 183969 then 8 else 1)\n             else if c <= 191456 then 8 else 1)\n    else (-1)\nlet __sedlex_partition_58 c =\n  if c <= 45 then (-1) else if c <= 46 then 0 else (-1)\nlet __sedlex_partition_51 c =\n  if c <= 8\n  then (-1)\n  else\n    if c <= 5760\n    then (Char.code (String.unsafe_get __sedlex_table_2 (c - 9))) - 1\n    else\n      if c <= 8191\n      then (-1)\n      else\n        if c <= 65279\n        then\n          (if c <= 12288\n           then\n             (if c <= 8239\n              then (if c <= 8202 then 0 else if c <= 8238 then (-1) else 0)\n              else\n                if c <= 8286\n                then (-1)\n                else if c <= 8287 then 0 else if c <= 12287 then (-1) else 0)\n           else if c <= 65278 then (-1) else 0)\n        else (-1)\nlet __sedlex_partition_21 c =\n  if c <= (-1)\n  then (-1)\n  else\n    if c <= 95\n    then (Char.code (String.unsafe_get __sedlex_table_3 c)) - 1\n    else if c <= 96 then (-1) else 0\nlet __sedlex_partition_91 c =\n  if c <= 63 then (-1) else if c <= 64 then 0 else (-1)\nlet __sedlex_partition_112 c =\n  if c <= 47\n  then (-1)\n  else\n    if c <= 120\n    then (Char.code (String.unsafe_get __sedlex_table_4 (c - 48))) - 1\n    else (-1)\nlet __sedlex_partition_33 c =\n  if c <= 47 then (-1) else if c <= 57 then 0 else (-1)\nlet __sedlex_partition_102 c =\n  if c <= 91\n  then (-1)\n  else\n    if c <= 93\n    then (Char.code (String.unsafe_get __sedlex_table_5 (c - 92))) - 1\n    else (-1)\nlet __sedlex_partition_104 c =\n  if c <= (-1)\n  then (-1)\n  else\n    if c <= 90\n    then (Char.code (String.unsafe_get __sedlex_table_6 c)) - 1\n    else\n      if c <= 92\n      then (-1)\n      else if c <= 8231 then 0 else if c <= 8233 then (-1) else 0\nlet __sedlex_partition_4 c =\n  if c <= 47\n  then (-1)\n  else\n    if c <= 102\n    then (Char.code (String.unsafe_get __sedlex_table_7 (c - 48))) - 1\n    else (-1)\nlet __sedlex_partition_18 c =\n  if c <= 92\n  then (Char.code (String.unsafe_get __sedlex_table_8 (c - (-1)))) - 1\n  else 1\nlet __sedlex_partition_42 c =\n  if c <= 47\n  then (-1)\n  else\n    if c <= 110\n    then (Char.code (String.unsafe_get __sedlex_table_9 (c - 48))) - 1\n    else (-1)\nlet __sedlex_partition_129 c =\n  if c <= 61 then (-1) else if c <= 62 then 0 else (-1)\nlet __sedlex_partition_130 c =\n  if c <= 123 then (-1) else if c <= 124 then 0 else (-1)\nlet __sedlex_partition_113 c =\n  if c <= 47\n  then (-1)\n  else\n    if c <= 59\n    then (Char.code (String.unsafe_get __sedlex_table_10 (c - 48))) - 1\n    else (-1)\nlet __sedlex_partition_115 c =\n  if c <= 35\n  then (-1)\n  else\n    if c <= 122\n    then (Char.code (String.unsafe_get __sedlex_table_11 (c - 36))) - 1\n    else (-1)\nlet __sedlex_partition_34 c =\n  if c <= 87\n  then (-1)\n  else\n    if c <= 120\n    then (Char.code (String.unsafe_get __sedlex_table_12 (c - 88))) - 1\n    else (-1)\nlet __sedlex_partition_37 c =\n  if c <= 45\n  then (-1)\n  else\n    if c <= 57\n    then (Char.code (String.unsafe_get __sedlex_table_13 (c - 46))) - 1\n    else (-1)\nlet __sedlex_partition_84 c =\n  if c <= 35\n  then (-1)\n  else\n    if c <= 122\n    then (Char.code (String.unsafe_get __sedlex_table_14 (c - 36))) - 1\n    else (-1)\nlet __sedlex_partition_5 c =\n  if c <= 47\n  then (-1)\n  else\n    if c <= 125\n    then (Char.code (String.unsafe_get __sedlex_table_15 (c - 48))) - 1\n    else (-1)\nlet __sedlex_partition_62 c =\n  if c <= 35\n  then (-1)\n  else\n    if c <= 122\n    then (Char.code (String.unsafe_get __sedlex_table_16 (c - 36))) - 1\n    else (-1)\nlet __sedlex_partition_121 c =\n  if c <= 8\n  then (-1)\n  else\n    if c <= 5760\n    then (Char.code (String.unsafe_get __sedlex_table_17 (c - 9))) - 1\n    else\n      if c <= 8191\n      then (-1)\n      else\n        if c <= 65279\n        then\n          (if c <= 12288\n           then\n             (if c <= 8239\n              then (if c <= 8202 then 0 else if c <= 8238 then (-1) else 0)\n              else\n                if c <= 8286\n                then (-1)\n                else if c <= 8287 then 0 else if c <= 12287 then (-1) else 0)\n           else if c <= 65278 then (-1) else 0)\n        else (-1)\nlet __sedlex_partition_131 c =\n  if c <= 124 then (-1) else if c <= 125 then 0 else (-1)\nlet __sedlex_partition_43 c =\n  if c <= 45\n  then (-1)\n  else\n    if c <= 101\n    then (Char.code (String.unsafe_get __sedlex_table_18 (c - 46))) - 1\n    else (-1)\nlet __sedlex_partition_56 c =\n  if c <= 42\n  then (-1)\n  else\n    if c <= 61\n    then (Char.code (String.unsafe_get __sedlex_table_19 (c - 43))) - 1\n    else (-1)\nlet __sedlex_partition_7 c =\n  if c <= 92\n  then (Char.code (String.unsafe_get __sedlex_table_20 (c - (-1)))) - 1\n  else 1\nlet __sedlex_partition_19 c =\n  if c <= (-1)\n  then (-1)\n  else\n    if c <= 91\n    then (Char.code (String.unsafe_get __sedlex_table_21 c)) - 1\n    else if c <= 92 then (-1) else 0\nlet __sedlex_partition_105 c =\n  if c <= 35\n  then (-1)\n  else\n    if c <= 122\n    then (Char.code (String.unsafe_get __sedlex_table_22 (c - 36))) - 1\n    else (-1)\nlet __sedlex_partition_57 c =\n  if c <= 44\n  then (-1)\n  else\n    if c <= 61\n    then (Char.code (String.unsafe_get __sedlex_table_23 (c - 45))) - 1\n    else (-1)\nlet __sedlex_partition_127 c =\n  if c <= 103 then (-1) else if c <= 104 then 0 else (-1)\nlet __sedlex_partition_28 c =\n  if c <= 47\n  then (-1)\n  else\n    if c <= 95\n    then (Char.code (String.unsafe_get __sedlex_table_24 (c - 48))) - 1\n    else (-1)\nlet __sedlex_partition_27 c =\n  if c <= 47\n  then (-1)\n  else\n    if c <= 110\n    then (Char.code (String.unsafe_get __sedlex_table_25 (c - 48))) - 1\n    else (-1)\nlet __sedlex_partition_35 c =\n  if c <= 47\n  then (-1)\n  else\n    if c <= 110\n    then (Char.code (String.unsafe_get __sedlex_table_26 (c - 48))) - 1\n    else (-1)\nlet __sedlex_partition_79 c =\n  if c <= 35\n  then (-1)\n  else\n    if c <= 122\n    then (Char.code (String.unsafe_get __sedlex_table_27 (c - 36))) - 1\n    else (-1)\nlet __sedlex_partition_65 c =\n  if c <= 8\n  then (-1)\n  else\n    if c <= 5760\n    then (Char.code (String.unsafe_get __sedlex_table_28 (c - 9))) - 1\n    else\n      if c <= 8191\n      then (-1)\n      else\n        if c <= 65279\n        then\n          (if c <= 12288\n           then\n             (if c <= 8239\n              then (if c <= 8202 then 0 else if c <= 8238 then (-1) else 0)\n              else\n                if c <= 8286\n                then (-1)\n                else if c <= 8287 then 0 else if c <= 12287 then (-1) else 0)\n           else if c <= 65278 then (-1) else 0)\n        else (-1)\nlet __sedlex_partition_122 c =\n  if c <= 44\n  then (-1)\n  else\n    if c <= 57\n    then (Char.code (String.unsafe_get __sedlex_table_29 (c - 45))) - 1\n    else (-1)\nlet __sedlex_partition_26 c =\n  if c <= 47 then (-1) else if c <= 49 then 0 else (-1)\nlet __sedlex_partition_31 c =\n  if c <= 47\n  then (-1)\n  else\n    if c <= 95\n    then (Char.code (String.unsafe_get __sedlex_table_30 (c - 48))) - 1\n    else (-1)\nlet __sedlex_partition_40 c =\n  if c <= 47\n  then (-1)\n  else\n    if c <= 57\n    then (Char.code (String.unsafe_get __sedlex_table_31 (c - 48))) - 1\n    else (-1)\nlet __sedlex_partition_86 c =\n  if c <= 35\n  then (-1)\n  else\n    if c <= 122\n    then (Char.code (String.unsafe_get __sedlex_table_32 (c - 36))) - 1\n    else (-1)\nlet __sedlex_partition_93 c =\n  if c <= 114 then (-1) else if c <= 115 then 0 else (-1)\nlet __sedlex_partition_52 c =\n  if c <= 60 then (-1) else if c <= 61 then 0 else (-1)\nlet __sedlex_partition_110 c =\n  if c <= (-1)\n  then (-1)\n  else\n    if c <= 122\n    then (Char.code (String.unsafe_get __sedlex_table_33 c)) - 1\n    else\n      if c <= 123\n      then (-1)\n      else if c <= 8231 then 0 else if c <= 8233 then (-1) else 0\nlet __sedlex_partition_10 c =\n  if c <= (-1)\n  then (-1)\n  else\n    if c <= 41\n    then (Char.code (String.unsafe_get __sedlex_table_34 c)) - 1\n    else\n      if c <= 42\n      then (-1)\n      else if c <= 8231 then 0 else if c <= 8233 then (-1) else 0\nlet __sedlex_partition_88 c =\n  if c <= 59\n  then (-1)\n  else\n    if c <= 61\n    then (Char.code (String.unsafe_get __sedlex_table_5 (c - 60))) - 1\n    else (-1)\nlet __sedlex_partition_41 c =\n  if c <= 47\n  then (-1)\n  else\n    if c <= 110\n    then (Char.code (String.unsafe_get __sedlex_table_35 (c - 48))) - 1\n    else (-1)\nlet __sedlex_partition_92 c =\n  if c <= 96\n  then (-1)\n  else\n    if c <= 105\n    then (Char.code (String.unsafe_get __sedlex_table_36 (c - 97))) - 1\n    else (-1)\nlet __sedlex_partition_30 c =\n  if c <= 47\n  then (-1)\n  else\n    if c <= 110\n    then (Char.code (String.unsafe_get __sedlex_table_37 (c - 48))) - 1\n    else (-1)\nlet __sedlex_partition_89 c =\n  if c <= 60\n  then (-1)\n  else\n    if c <= 62\n    then (Char.code (String.unsafe_get __sedlex_table_5 (c - 61))) - 1\n    else (-1)\nlet __sedlex_partition_22 c =\n  if c <= 122 then (-1) else if c <= 123 then 0 else (-1)\nlet __sedlex_partition_25 c =\n  if c <= 65\n  then (-1)\n  else\n    if c <= 98\n    then (Char.code (String.unsafe_get __sedlex_table_12 (c - 66))) - 1\n    else (-1)\nlet __sedlex_partition_63 c =\n  if c <= 35\n  then (-1)\n  else\n    if c <= 122\n    then (Char.code (String.unsafe_get __sedlex_table_38 (c - 36))) - 1\n    else (-1)\nlet __sedlex_partition_20 c =\n  if c <= 96\n  then (Char.code (String.unsafe_get __sedlex_table_39 (c - (-1)))) - 1\n  else 1\nlet __sedlex_partition_96 c =\n  if c <= 115 then (-1) else if c <= 116 then 0 else (-1)\nlet __sedlex_partition_17 c =\n  if c <= 47 then (-1) else if c <= 55 then 0 else (-1)\nlet __sedlex_partition_72 c =\n  if c <= 109 then (-1) else if c <= 110 then 0 else (-1)\nlet __sedlex_partition_99 c =\n  if c <= 60\n  then (-1)\n  else\n    if c <= 124\n    then (Char.code (String.unsafe_get __sedlex_table_40 (c - 61))) - 1\n    else (-1)\nlet __sedlex_partition_68 c =\n  if c <= 110 then (-1) else if c <= 111 then 0 else (-1)\nlet __sedlex_partition_73 c =\n  if c <= 98 then (-1) else if c <= 99 then 0 else (-1)\nlet __sedlex_partition_24 c =\n  if c <= 47 then (-1) else if c <= 48 then 0 else (-1)\nlet __sedlex_partition_123 c =\n  if c <= 8\n  then (-1)\n  else\n    if c <= 5760\n    then (Char.code (String.unsafe_get __sedlex_table_41 (c - 9))) - 1\n    else\n      if c <= 8191\n      then (-1)\n      else\n        if c <= 65279\n        then\n          (if c <= 12288\n           then\n             (if c <= 8239\n              then (if c <= 8202 then 0 else if c <= 8238 then (-1) else 0)\n              else\n                if c <= 8286\n                then (-1)\n                else if c <= 8287 then 0 else if c <= 12287 then (-1) else 0)\n           else if c <= 65278 then (-1) else 0)\n        else (-1)\nlet __sedlex_partition_45 c =\n  if c <= 45\n  then (-1)\n  else\n    if c <= 101\n    then (Char.code (String.unsafe_get __sedlex_table_42 (c - 46))) - 1\n    else (-1)\nlet __sedlex_partition_29 c =\n  if c <= 78\n  then (-1)\n  else\n    if c <= 111\n    then (Char.code (String.unsafe_get __sedlex_table_12 (c - 79))) - 1\n    else (-1)\nlet __sedlex_partition_23 c =\n  if c <= 41 then (-1) else if c <= 42 then 0 else (-1)\nlet __sedlex_partition_16 c =\n  if c <= 120\n  then (Char.code (String.unsafe_get __sedlex_table_43 (c - (-1)))) - 1\n  else if c <= 8233 then (if c <= 8231 then 1 else 2) else 1\nlet __sedlex_partition_53 c =\n  if c <= 32 then (-1) else if c <= 33 then 0 else (-1)\nlet __sedlex_partition_54 c =\n  if c <= 37\n  then (-1)\n  else\n    if c <= 61\n    then (Char.code (String.unsafe_get __sedlex_table_44 (c - 38))) - 1\n    else (-1)\nlet __sedlex_partition_106 c =\n  if c <= (-1)\n  then (-1)\n  else\n    if c <= 13\n    then (Char.code (String.unsafe_get __sedlex_table_45 c)) - 1\n    else if c <= 8233 then (if c <= 8231 then 0 else 1) else 0\nlet __sedlex_partition_77 c =\n  if c <= 35\n  then (-1)\n  else\n    if c <= 122\n    then (Char.code (String.unsafe_get __sedlex_table_46 (c - 36))) - 1\n    else (-1)\nlet __sedlex_partition_9 c =\n  if c <= (-1)\n  then (-1)\n  else\n    if c <= 42\n    then (Char.code (String.unsafe_get __sedlex_table_47 c)) - 1\n    else if c <= 8233 then (if c <= 8231 then 0 else 1) else 0\nlet __sedlex_partition_44 c =\n  if c <= 47\n  then (-1)\n  else\n    if c <= 101\n    then (Char.code (String.unsafe_get __sedlex_table_48 (c - 48))) - 1\n    else (-1)\nlet __sedlex_partition_59 c =\n  if c <= 35\n  then (-1)\n  else\n    if c <= 122\n    then (Char.code (String.unsafe_get __sedlex_table_49 (c - 36))) - 1\n    else (-1)\nlet __sedlex_partition_55 c =\n  if c <= 41\n  then (-1)\n  else\n    if c <= 61\n    then (Char.code (String.unsafe_get __sedlex_table_50 (c - 42))) - 1\n    else (-1)\nlet __sedlex_partition_95 c =\n  if c <= 72 then (-1) else if c <= 73 then 0 else (-1)\nlet __sedlex_partition_120 c =\n  if c <= 44\n  then (-1)\n  else\n    if c <= 48\n    then (Char.code (String.unsafe_get __sedlex_table_51 (c - 45))) - 1\n    else (-1)\nlet __sedlex_partition_124 c =\n  if c <= 44\n  then (-1)\n  else\n    if c <= 57\n    then (Char.code (String.unsafe_get __sedlex_table_52 (c - 45))) - 1\n    else (-1)\nlet __sedlex_partition_70 c =\n  if c <= 44 then (-1) else if c <= 45 then 0 else (-1)\nlet __sedlex_partition_71 c =\n  if c <= 104 then (-1) else if c <= 105 then 0 else (-1)\nlet __sedlex_partition_67 c =\n  if c <= 107 then (-1) else if c <= 108 then 0 else (-1)\nlet __sedlex_partition_74 c =\n  if c <= 99 then (-1) else if c <= 100 then 0 else (-1)\nlet __sedlex_partition_36 c =\n  if c <= 47\n  then (-1)\n  else\n    if c <= 102\n    then (Char.code (String.unsafe_get __sedlex_table_53 (c - 48))) - 1\n    else (-1)\nlet __sedlex_partition_97 c =\n  if c <= 113 then (-1) else if c <= 114 then 0 else (-1)\nlet __sedlex_partition_47 c =\n  if c <= 45\n  then (-1)\n  else\n    if c <= 57\n    then (Char.code (String.unsafe_get __sedlex_table_54 (c - 46))) - 1\n    else (-1)\nlet __sedlex_partition_80 c =\n  if c <= 35\n  then (-1)\n  else\n    if c <= 122\n    then (Char.code (String.unsafe_get __sedlex_table_55 (c - 36))) - 1\n    else (-1)\nlet __sedlex_partition_3 c =\n  if c <= 47\n  then (-1)\n  else\n    if c <= 123\n    then (Char.code (String.unsafe_get __sedlex_table_56 (c - 48))) - 1\n    else (-1)\nlet __sedlex_partition_90 c =\n  if c <= 45\n  then (-1)\n  else\n    if c <= 63\n    then (Char.code (String.unsafe_get __sedlex_table_57 (c - 46))) - 1\n    else (-1)\nlet __sedlex_partition_8 c =\n  if c <= (-1)\n  then (-1)\n  else if c <= 91 then 0 else if c <= 92 then (-1) else 0\nlet __sedlex_partition_15 c =\n  if c <= (-1)\n  then (-1)\n  else\n    if c <= 12\n    then (Char.code (String.unsafe_get __sedlex_table_58 c)) - 1\n    else\n      if c <= 13\n      then (-1)\n      else if c <= 8231 then 0 else if c <= 8233 then (-1) else 0\nlet __sedlex_partition_76 c =\n  if c <= 35\n  then (-1)\n  else\n    if c <= 122\n    then (Char.code (String.unsafe_get __sedlex_table_59 (c - 36))) - 1\n    else (-1)\nlet __sedlex_partition_101 c =\n  if c <= (-1)\n  then (-1)\n  else\n    if c <= 91\n    then (Char.code (String.unsafe_get __sedlex_table_60 c)) - 1\n    else\n      if c <= 93\n      then (-1)\n      else if c <= 8231 then 0 else if c <= 8233 then (-1) else 0\nlet __sedlex_partition_107 c =\n  if c <= 8191\n  then (Char.code (String.unsafe_get __sedlex_table_61 (c - (-1)))) - 1\n  else\n    if c <= 12287\n    then\n      (if c <= 8238\n       then\n         (if c <= 8231\n          then (if c <= 8202 then 2 else 1)\n          else if c <= 8233 then 3 else 1)\n       else\n         if c <= 8286\n         then (if c <= 8239 then 2 else 1)\n         else if c <= 8287 then 2 else 1)\n    else\n      if c <= 65278\n      then (if c <= 12288 then 2 else 1)\n      else if c <= 65279 then 2 else 1\nlet __sedlex_partition_11 c =\n  if c <= 9 then (-1) else if c <= 10 then 0 else (-1)\nlet __sedlex_partition_82 c =\n  if c <= 35\n  then (-1)\n  else\n    if c <= 122\n    then (Char.code (String.unsafe_get __sedlex_table_62 (c - 36))) - 1\n    else (-1)\nlet __sedlex_partition_98 c =\n  if c <= 96 then (-1) else if c <= 97 then 0 else (-1)\nlet __sedlex_partition_64 c =\n  if c <= 35\n  then (-1)\n  else\n    if c <= 122\n    then (Char.code (String.unsafe_get __sedlex_table_63 (c - 36))) - 1\n    else (-1)\nlet __sedlex_partition_132 c =\n  if c <= 35\n  then (-1)\n  else\n    if c <= 8188\n    then (Char.code (String.unsafe_get __sedlex_table_64 (c - 36))) - 1\n    else\n      if c <= 8304\n      then (-1)\n      else\n        if c <= 201546\n        then\n          (if c <= 69864\n           then\n             (if c <= 43754\n              then\n                (if c <= 40981\n                 then\n                   (if c <= 11623\n                    then\n                      (if c <= 8504\n                       then\n                         (if c <= 8472\n                          then\n                            (if c <= 8450\n                             then\n                               (if c <= 8319\n                                then\n                                  (if c <= 8305\n                                   then 0\n                                   else if c <= 8318 then (-1) else 0)\n                                else\n                                  if c <= 8335\n                                  then (-1)\n                                  else\n                                    if c <= 8348\n                                    then 0\n                                    else if c <= 8449 then (-1) else 0)\n                             else\n                               if c <= 8454\n                               then (-1)\n                               else\n                                 if c <= 8467\n                                 then\n                                   (if c <= 8455\n                                    then 0\n                                    else if c <= 8457 then (-1) else 0)\n                                 else\n                                   if c <= 8468\n                                   then (-1)\n                                   else\n                                     if c <= 8469\n                                     then 0\n                                     else if c <= 8471 then (-1) else 0)\n                          else\n                            if c <= 8488\n                            then\n                              (if c <= 8484\n                               then\n                                 (if c <= 8477\n                                  then 0\n                                  else if c <= 8483 then (-1) else 0)\n                               else\n                                 if c <= 8485\n                                 then (-1)\n                                 else\n                                   if c <= 8486\n                                   then 0\n                                   else if c <= 8487 then (-1) else 0)\n                            else if c <= 8489 then (-1) else 0)\n                       else\n                         if c <= 11387\n                         then\n                           (if c <= 8526\n                            then\n                              (if c <= 8511\n                               then\n                                 (if c <= 8505\n                                  then 0\n                                  else if c <= 8507 then (-1) else 0)\n                               else\n                                 if c <= 8516\n                                 then (-1)\n                                 else\n                                   if c <= 8521\n                                   then 0\n                                   else if c <= 8525 then (-1) else 0)\n                            else\n                              if c <= 8543\n                              then (-1)\n                              else\n                                if c <= 8580\n                                then 0\n                                else\n                                  if c <= 8584\n                                  then 0\n                                  else if c <= 11263 then (-1) else 0)\n                         else\n                           if c <= 11507\n                           then\n                             (if c <= 11492\n                              then 0\n                              else\n                                if c <= 11498\n                                then (-1)\n                                else\n                                  if c <= 11502\n                                  then 0\n                                  else if c <= 11505 then (-1) else 0)\n                           else\n                             if c <= 11519\n                             then (-1)\n                             else\n                               if c <= 11559\n                               then\n                                 (if c <= 11557\n                                  then 0\n                                  else if c <= 11558 then (-1) else 0)\n                               else\n                                 if c <= 11564\n                                 then (-1)\n                                 else\n                                   if c <= 11565\n                                   then 0\n                                   else if c <= 11567 then (-1) else 0)\n                    else\n                      if c <= 11630\n                      then (-1)\n                      else\n                        if c <= 12346\n                        then\n                          (if c <= 11726\n                           then\n                             (if c <= 11694\n                              then\n                                (if c <= 11670\n                                 then\n                                   (if c <= 11631\n                                    then 0\n                                    else if c <= 11647 then (-1) else 0)\n                                 else\n                                   if c <= 11679\n                                   then (-1)\n                                   else\n                                     if c <= 11686\n                                     then 0\n                                     else if c <= 11687 then (-1) else 0)\n                              else\n                                if c <= 11695\n                                then (-1)\n                                else\n                                  if c <= 11710\n                                  then\n                                    (if c <= 11702\n                                     then 0\n                                     else if c <= 11703 then (-1) else 0)\n                                  else\n                                    if c <= 11711\n                                    then (-1)\n                                    else\n                                      if c <= 11718\n                                      then 0\n                                      else if c <= 11719 then (-1) else 0)\n                           else\n                             if c <= 11727\n                             then (-1)\n                             else\n                               if c <= 12294\n                               then\n                                 (if c <= 11742\n                                  then\n                                    (if c <= 11734\n                                     then 0\n                                     else if c <= 11735 then (-1) else 0)\n                                  else if c <= 12292 then (-1) else 0)\n                               else\n                                 if c <= 12329\n                                 then\n                                   (if c <= 12295\n                                    then 0\n                                    else if c <= 12320 then (-1) else 0)\n                                 else\n                                   if c <= 12336\n                                   then (-1)\n                                   else\n                                     if c <= 12341\n                                     then 0\n                                     else if c <= 12343 then (-1) else 0)\n                        else\n                          if c <= 12542\n                          then\n                            (if c <= 12444\n                             then\n                               (if c <= 12348\n                                then 0\n                                else\n                                  if c <= 12352\n                                  then (-1)\n                                  else\n                                    if c <= 12438\n                                    then 0\n                                    else if c <= 12442 then (-1) else 0)\n                             else\n                               if c <= 12447\n                               then 0\n                               else\n                                 if c <= 12448\n                                 then (-1)\n                                 else\n                                   if c <= 12538\n                                   then 0\n                                   else if c <= 12539 then (-1) else 0)\n                          else\n                            if c <= 12735\n                            then\n                              (if c <= 12591\n                               then\n                                 (if c <= 12543\n                                  then 0\n                                  else if c <= 12548 then (-1) else 0)\n                               else\n                                 if c <= 12592\n                                 then (-1)\n                                 else\n                                   if c <= 12686\n                                   then 0\n                                   else if c <= 12703 then (-1) else 0)\n                            else\n                              if c <= 12783\n                              then (-1)\n                              else\n                                if c <= 19903\n                                then\n                                  (if c <= 12799\n                                   then 0\n                                   else if c <= 13311 then (-1) else 0)\n                                else if c <= 19967 then (-1) else 0)\n                 else\n                   if c <= 43013\n                   then\n                     (if c <= 42863\n                      then\n                        (if c <= 42605\n                         then\n                           (if c <= 42507\n                            then\n                              (if c <= 42231\n                               then\n                                 (if c <= 42124\n                                  then 0\n                                  else if c <= 42191 then (-1) else 0)\n                               else\n                                 if c <= 42237\n                                 then 0\n                                 else if c <= 42239 then (-1) else 0)\n                            else\n                              if c <= 42527\n                              then\n                                (if c <= 42508\n                                 then 0\n                                 else if c <= 42511 then (-1) else 0)\n                              else\n                                if c <= 42537\n                                then (-1)\n                                else\n                                  if c <= 42539\n                                  then 0\n                                  else if c <= 42559 then (-1) else 0)\n                         else\n                           if c <= 42653\n                           then\n                             (if c <= 42623\n                              then\n                                (if c <= 42606\n                                 then 0\n                                 else if c <= 42622 then (-1) else 0)\n                              else 0)\n                           else\n                             if c <= 42655\n                             then (-1)\n                             else\n                               if c <= 42735\n                               then 0\n                               else\n                                 if c <= 42774\n                                 then (-1)\n                                 else\n                                   if c <= 42783\n                                   then 0\n                                   else if c <= 42785 then (-1) else 0)\n                      else\n                        if c <= 42963\n                        then\n                          (if c <= 42894\n                           then\n                             (if c <= 42887\n                              then 0\n                              else\n                                if c <= 42888\n                                then 0\n                                else if c <= 42890 then (-1) else 0)\n                           else\n                             if c <= 42954\n                             then 0\n                             else\n                               if c <= 42959\n                               then (-1)\n                               else\n                                 if c <= 42961\n                                 then 0\n                                 else if c <= 42962 then (-1) else 0)\n                        else\n                          if c <= 42964\n                          then (-1)\n                          else\n                            if c <= 42999\n                            then\n                              (if c <= 42996\n                               then\n                                 (if c <= 42969\n                                  then 0\n                                  else if c <= 42993 then (-1) else 0)\n                               else 0)\n                            else\n                              if c <= 43002\n                              then 0\n                              else\n                                if c <= 43009\n                                then 0\n                                else if c <= 43010 then (-1) else 0)\n                   else\n                     if c <= 43014\n                     then (-1)\n                     else\n                       if c <= 43518\n                       then\n                         (if c <= 43301\n                          then\n                            (if c <= 43187\n                             then\n                               (if c <= 43042\n                                then\n                                  (if c <= 43018\n                                   then 0\n                                   else if c <= 43019 then (-1) else 0)\n                                else\n                                  if c <= 43071\n                                  then (-1)\n                                  else\n                                    if c <= 43123\n                                    then 0\n                                    else if c <= 43137 then (-1) else 0)\n                             else\n                               if c <= 43249\n                               then (-1)\n                               else\n                                 if c <= 43259\n                                 then\n                                   (if c <= 43255\n                                    then 0\n                                    else if c <= 43258 then (-1) else 0)\n                                 else\n                                   if c <= 43260\n                                   then (-1)\n                                   else\n                                     if c <= 43262\n                                     then 0\n                                     else if c <= 43273 then (-1) else 0)\n                          else\n                            if c <= 43311\n                            then (-1)\n                            else\n                              if c <= 43471\n                              then\n                                (if c <= 43388\n                                 then\n                                   (if c <= 43334\n                                    then 0\n                                    else if c <= 43359 then (-1) else 0)\n                                 else\n                                   if c <= 43395\n                                   then (-1)\n                                   else\n                                     if c <= 43442\n                                     then 0\n                                     else if c <= 43470 then (-1) else 0)\n                              else\n                                if c <= 43487\n                                then (-1)\n                                else\n                                  if c <= 43494\n                                  then\n                                    (if c <= 43492\n                                     then 0\n                                     else if c <= 43493 then (-1) else 0)\n                                  else\n                                    if c <= 43503\n                                    then 0\n                                    else if c <= 43513 then (-1) else 0)\n                       else\n                         if c <= 43519\n                         then (-1)\n                         else\n                           if c <= 43695\n                           then\n                             (if c <= 43631\n                              then\n                                (if c <= 43586\n                                 then\n                                   (if c <= 43560\n                                    then 0\n                                    else if c <= 43583 then (-1) else 0)\n                                 else\n                                   if c <= 43587\n                                   then (-1)\n                                   else\n                                     if c <= 43595\n                                     then 0\n                                     else if c <= 43615 then (-1) else 0)\n                              else\n                                if c <= 43638\n                                then 0\n                                else\n                                  if c <= 43641\n                                  then (-1)\n                                  else\n                                    if c <= 43642\n                                    then 0\n                                    else if c <= 43645 then (-1) else 0)\n                           else\n                             if c <= 43696\n                             then (-1)\n                             else\n                               if c <= 43712\n                               then\n                                 (if c <= 43702\n                                  then\n                                    (if c <= 43697\n                                     then 0\n                                     else if c <= 43700 then (-1) else 0)\n                                  else\n                                    if c <= 43704\n                                    then (-1)\n                                    else\n                                      if c <= 43709\n                                      then 0\n                                      else if c <= 43711 then (-1) else 0)\n                               else\n                                 if c <= 43713\n                                 then (-1)\n                                 else\n                                   if c <= 43740\n                                   then\n                                     (if c <= 43714\n                                      then 0\n                                      else if c <= 43738 then (-1) else 0)\n                                   else\n                                     if c <= 43741\n                                     then 0\n                                     else if c <= 43743 then (-1) else 0)\n              else\n                if c <= 43761\n                then (-1)\n                else\n                  if c <= 66511\n                  then\n                    (if c <= 65019\n                     then\n                       (if c <= 55291\n                        then\n                          (if c <= 43866\n                           then\n                             (if c <= 43790\n                              then\n                                (if c <= 43764\n                                 then 0\n                                 else\n                                   if c <= 43776\n                                   then (-1)\n                                   else\n                                     if c <= 43782\n                                     then 0\n                                     else if c <= 43784 then (-1) else 0)\n                              else\n                                if c <= 43792\n                                then (-1)\n                                else\n                                  if c <= 43814\n                                  then\n                                    (if c <= 43798\n                                     then 0\n                                     else if c <= 43807 then (-1) else 0)\n                                  else\n                                    if c <= 43815\n                                    then (-1)\n                                    else\n                                      if c <= 43822\n                                      then 0\n                                      else if c <= 43823 then (-1) else 0)\n                           else\n                             if c <= 43867\n                             then (-1)\n                             else\n                               if c <= 43967\n                               then\n                                 (if c <= 43880\n                                  then 0\n                                  else\n                                    if c <= 43881\n                                    then 0\n                                    else if c <= 43887 then (-1) else 0)\n                               else\n                                 if c <= 55203\n                                 then\n                                   (if c <= 44002\n                                    then 0\n                                    else if c <= 44031 then (-1) else 0)\n                                 else\n                                   if c <= 55215\n                                   then (-1)\n                                   else\n                                     if c <= 55238\n                                     then 0\n                                     else if c <= 55242 then (-1) else 0)\n                        else\n                          if c <= 63743\n                          then (-1)\n                          else\n                            if c <= 64316\n                            then\n                              (if c <= 64279\n                               then\n                                 (if c <= 64217\n                                  then\n                                    (if c <= 64109\n                                     then 0\n                                     else if c <= 64111 then (-1) else 0)\n                                  else\n                                    if c <= 64255\n                                    then (-1)\n                                    else\n                                      if c <= 64262\n                                      then 0\n                                      else if c <= 64274 then (-1) else 0)\n                               else\n                                 if c <= 64284\n                                 then (-1)\n                                 else\n                                   if c <= 64296\n                                   then\n                                     (if c <= 64285\n                                      then 0\n                                      else if c <= 64286 then (-1) else 0)\n                                   else\n                                     if c <= 64297\n                                     then (-1)\n                                     else\n                                       if c <= 64310\n                                       then 0\n                                       else if c <= 64311 then (-1) else 0)\n                            else\n                              if c <= 64317\n                              then (-1)\n                              else\n                                if c <= 64433\n                                then\n                                  (if c <= 64321\n                                   then\n                                     (if c <= 64318\n                                      then 0\n                                      else if c <= 64319 then (-1) else 0)\n                                   else\n                                     if c <= 64322\n                                     then (-1)\n                                     else\n                                       if c <= 64324\n                                       then 0\n                                       else if c <= 64325 then (-1) else 0)\n                                else\n                                  if c <= 64466\n                                  then (-1)\n                                  else\n                                    if c <= 64911\n                                    then\n                                      (if c <= 64829\n                                       then 0\n                                       else if c <= 64847 then (-1) else 0)\n                                    else\n                                      if c <= 64913\n                                      then (-1)\n                                      else\n                                        if c <= 64967\n                                        then 0\n                                        else if c <= 65007 then (-1) else 0)\n                     else\n                       if c <= 65135\n                       then (-1)\n                       else\n                         if c <= 65594\n                         then\n                           (if c <= 65439\n                            then\n                              (if c <= 65370\n                               then\n                                 (if c <= 65276\n                                  then\n                                    (if c <= 65140\n                                     then 0\n                                     else if c <= 65141 then (-1) else 0)\n                                  else\n                                    if c <= 65312\n                                    then (-1)\n                                    else\n                                      if c <= 65338\n                                      then 0\n                                      else if c <= 65344 then (-1) else 0)\n                               else if c <= 65381 then (-1) else 0)\n                            else\n                              if c <= 65495\n                              then\n                                (if c <= 65479\n                                 then\n                                   (if c <= 65470\n                                    then 0\n                                    else if c <= 65473 then (-1) else 0)\n                                 else\n                                   if c <= 65481\n                                   then (-1)\n                                   else\n                                     if c <= 65487\n                                     then 0\n                                     else if c <= 65489 then (-1) else 0)\n                              else\n                                if c <= 65497\n                                then (-1)\n                                else\n                                  if c <= 65547\n                                  then\n                                    (if c <= 65500\n                                     then 0\n                                     else if c <= 65535 then (-1) else 0)\n                                  else\n                                    if c <= 65548\n                                    then (-1)\n                                    else\n                                      if c <= 65574\n                                      then 0\n                                      else if c <= 65575 then (-1) else 0)\n                         else\n                           if c <= 65595\n                           then (-1)\n                           else\n                             if c <= 66335\n                             then\n                               (if c <= 65786\n                                then\n                                  (if c <= 65613\n                                   then\n                                     (if c <= 65597\n                                      then 0\n                                      else if c <= 65598 then (-1) else 0)\n                                   else\n                                     if c <= 65615\n                                     then (-1)\n                                     else\n                                       if c <= 65629\n                                       then 0\n                                       else if c <= 65663 then (-1) else 0)\n                                else\n                                  if c <= 65855\n                                  then (-1)\n                                  else\n                                    if c <= 66204\n                                    then\n                                      (if c <= 65908\n                                       then 0\n                                       else if c <= 66175 then (-1) else 0)\n                                    else\n                                      if c <= 66207\n                                      then (-1)\n                                      else\n                                        if c <= 66256\n                                        then 0\n                                        else if c <= 66303 then (-1) else 0)\n                             else\n                               if c <= 66348\n                               then (-1)\n                               else\n                                 if c <= 66378\n                                 then 0\n                                 else\n                                   if c <= 66383\n                                   then (-1)\n                                   else\n                                     if c <= 66461\n                                     then\n                                       (if c <= 66421\n                                        then 0\n                                        else if c <= 66431 then (-1) else 0)\n                                     else\n                                       if c <= 66463\n                                       then (-1)\n                                       else\n                                         if c <= 66499\n                                         then 0\n                                         else if c <= 66503 then (-1) else 0)\n                  else\n                    if c <= 66512\n                    then (-1)\n                    else\n                      if c <= 67861\n                      then\n                        (if c <= 67382\n                         then\n                           (if c <= 66938\n                            then\n                              (if c <= 66771\n                               then\n                                 (if c <= 66639\n                                  then\n                                    (if c <= 66517\n                                     then 0\n                                     else if c <= 66559 then (-1) else 0)\n                                  else\n                                    if c <= 66717\n                                    then 0\n                                    else if c <= 66735 then (-1) else 0)\n                               else\n                                 if c <= 66775\n                                 then (-1)\n                                 else\n                                   if c <= 66855\n                                   then\n                                     (if c <= 66811\n                                      then 0\n                                      else if c <= 66815 then (-1) else 0)\n                                   else\n                                     if c <= 66863\n                                     then (-1)\n                                     else\n                                       if c <= 66915\n                                       then 0\n                                       else if c <= 66927 then (-1) else 0)\n                            else\n                              if c <= 66939\n                              then (-1)\n                              else\n                                if c <= 66977\n                                then\n                                  (if c <= 66962\n                                   then\n                                     (if c <= 66954\n                                      then 0\n                                      else if c <= 66955 then (-1) else 0)\n                                   else\n                                     if c <= 66963\n                                     then (-1)\n                                     else\n                                       if c <= 66965\n                                       then 0\n                                       else if c <= 66966 then (-1) else 0)\n                                else\n                                  if c <= 66978\n                                  then (-1)\n                                  else\n                                    if c <= 67001\n                                    then\n                                      (if c <= 66993\n                                       then 0\n                                       else if c <= 66994 then (-1) else 0)\n                                    else\n                                      if c <= 67002\n                                      then (-1)\n                                      else\n                                        if c <= 67004\n                                        then 0\n                                        else if c <= 67071 then (-1) else 0)\n                         else\n                           if c <= 67391\n                           then (-1)\n                           else\n                             if c <= 67637\n                             then\n                               (if c <= 67504\n                                then\n                                  (if c <= 67431\n                                   then\n                                     (if c <= 67413\n                                      then 0\n                                      else if c <= 67423 then (-1) else 0)\n                                   else\n                                     if c <= 67455\n                                     then (-1)\n                                     else\n                                       if c <= 67461\n                                       then 0\n                                       else if c <= 67462 then (-1) else 0)\n                                else\n                                  if c <= 67505\n                                  then (-1)\n                                  else\n                                    if c <= 67589\n                                    then\n                                      (if c <= 67514\n                                       then 0\n                                       else if c <= 67583 then (-1) else 0)\n                                    else\n                                      if c <= 67591\n                                      then (-1)\n                                      else\n                                        if c <= 67592\n                                        then 0\n                                        else if c <= 67593 then (-1) else 0)\n                             else\n                               if c <= 67638\n                               then (-1)\n                               else\n                                 if c <= 67702\n                                 then\n                                   (if c <= 67644\n                                    then\n                                      (if c <= 67640\n                                       then 0\n                                       else if c <= 67643 then (-1) else 0)\n                                    else\n                                      if c <= 67646\n                                      then (-1)\n                                      else\n                                        if c <= 67669\n                                        then 0\n                                        else if c <= 67679 then (-1) else 0)\n                                 else\n                                   if c <= 67711\n                                   then (-1)\n                                   else\n                                     if c <= 67826\n                                     then\n                                       (if c <= 67742\n                                        then 0\n                                        else if c <= 67807 then (-1) else 0)\n                                     else\n                                       if c <= 67827\n                                       then (-1)\n                                       else\n                                         if c <= 67829\n                                         then 0\n                                         else if c <= 67839 then (-1) else 0)\n                      else\n                        if c <= 67871\n                        then (-1)\n                        else\n                          if c <= 68680\n                          then\n                            (if c <= 68220\n                             then\n                               (if c <= 68096\n                                then\n                                  (if c <= 68023\n                                   then\n                                     (if c <= 67897\n                                      then 0\n                                      else if c <= 67967 then (-1) else 0)\n                                   else\n                                     if c <= 68029\n                                     then (-1)\n                                     else\n                                       if c <= 68031\n                                       then 0\n                                       else if c <= 68095 then (-1) else 0)\n                                else\n                                  if c <= 68111\n                                  then (-1)\n                                  else\n                                    if c <= 68119\n                                    then\n                                      (if c <= 68115\n                                       then 0\n                                       else if c <= 68116 then (-1) else 0)\n                                    else\n                                      if c <= 68120\n                                      then (-1)\n                                      else\n                                        if c <= 68149\n                                        then 0\n                                        else if c <= 68191 then (-1) else 0)\n                             else\n                               if c <= 68223\n                               then (-1)\n                               else\n                                 if c <= 68405\n                                 then\n                                   (if c <= 68295\n                                    then\n                                      (if c <= 68252\n                                       then 0\n                                       else if c <= 68287 then (-1) else 0)\n                                    else\n                                      if c <= 68296\n                                      then (-1)\n                                      else\n                                        if c <= 68324\n                                        then 0\n                                        else if c <= 68351 then (-1) else 0)\n                                 else\n                                   if c <= 68415\n                                   then (-1)\n                                   else\n                                     if c <= 68466\n                                     then\n                                       (if c <= 68437\n                                        then 0\n                                        else if c <= 68447 then (-1) else 0)\n                                     else\n                                       if c <= 68479\n                                       then (-1)\n                                       else\n                                         if c <= 68497\n                                         then 0\n                                         else if c <= 68607 then (-1) else 0)\n                          else\n                            if c <= 68735\n                            then (-1)\n                            else\n                              if c <= 69445\n                              then\n                                (if c <= 69289\n                                 then\n                                   (if c <= 68850\n                                    then\n                                      (if c <= 68786\n                                       then 0\n                                       else if c <= 68799 then (-1) else 0)\n                                    else\n                                      if c <= 68863\n                                      then (-1)\n                                      else\n                                        if c <= 68899\n                                        then 0\n                                        else if c <= 69247 then (-1) else 0)\n                                 else\n                                   if c <= 69295\n                                   then (-1)\n                                   else\n                                     if c <= 69404\n                                     then\n                                       (if c <= 69297\n                                        then 0\n                                        else if c <= 69375 then (-1) else 0)\n                                     else\n                                       if c <= 69414\n                                       then (-1)\n                                       else\n                                         if c <= 69415\n                                         then 0\n                                         else if c <= 69423 then (-1) else 0)\n                              else\n                                if c <= 69487\n                                then (-1)\n                                else\n                                  if c <= 69687\n                                  then\n                                    (if c <= 69572\n                                     then\n                                       (if c <= 69505\n                                        then 0\n                                        else if c <= 69551 then (-1) else 0)\n                                     else\n                                       if c <= 69599\n                                       then (-1)\n                                       else\n                                         if c <= 69622\n                                         then 0\n                                         else if c <= 69634 then (-1) else 0)\n                                  else\n                                    if c <= 69744\n                                    then (-1)\n                                    else\n                                      if c <= 69749\n                                      then\n                                        (if c <= 69746\n                                         then 0\n                                         else if c <= 69748 then (-1) else 0)\n                                      else\n                                        if c <= 69762\n                                        then (-1)\n                                        else\n                                          if c <= 69807\n                                          then 0\n                                          else if c <= 69839 then (-1) else 0)\n           else\n             if c <= 69890\n             then (-1)\n             else\n               if c <= 120512\n               then\n                 (if c <= 72847\n                  then\n                    (if c <= 70855\n                     then\n                       (if c <= 70312\n                        then\n                          (if c <= 70106\n                           then\n                             (if c <= 70002\n                              then\n                                (if c <= 69956\n                                 then\n                                   (if c <= 69926\n                                    then 0\n                                    else if c <= 69955 then (-1) else 0)\n                                 else\n                                   if c <= 69958\n                                   then (-1)\n                                   else\n                                     if c <= 69959\n                                     then 0\n                                     else if c <= 69967 then (-1) else 0)\n                              else\n                                if c <= 70005\n                                then (-1)\n                                else\n                                  if c <= 70066\n                                  then\n                                    (if c <= 70006\n                                     then 0\n                                     else if c <= 70018 then (-1) else 0)\n                                  else\n                                    if c <= 70080\n                                    then (-1)\n                                    else\n                                      if c <= 70084\n                                      then 0\n                                      else if c <= 70105 then (-1) else 0)\n                           else\n                             if c <= 70107\n                             then (-1)\n                             else\n                               if c <= 70278\n                               then\n                                 (if c <= 70161\n                                  then\n                                    (if c <= 70108\n                                     then 0\n                                     else if c <= 70143 then (-1) else 0)\n                                  else\n                                    if c <= 70162\n                                    then (-1)\n                                    else\n                                      if c <= 70187\n                                      then 0\n                                      else if c <= 70271 then (-1) else 0)\n                               else\n                                 if c <= 70279\n                                 then (-1)\n                                 else\n                                   if c <= 70285\n                                   then\n                                     (if c <= 70280\n                                      then 0\n                                      else if c <= 70281 then (-1) else 0)\n                                   else\n                                     if c <= 70286\n                                     then (-1)\n                                     else\n                                       if c <= 70301\n                                       then 0\n                                       else if c <= 70302 then (-1) else 0)\n                        else\n                          if c <= 70319\n                          then (-1)\n                          else\n                            if c <= 70461\n                            then\n                              (if c <= 70440\n                               then\n                                 (if c <= 70412\n                                  then\n                                    (if c <= 70366\n                                     then 0\n                                     else if c <= 70404 then (-1) else 0)\n                                  else\n                                    if c <= 70414\n                                    then (-1)\n                                    else\n                                      if c <= 70416\n                                      then 0\n                                      else if c <= 70418 then (-1) else 0)\n                               else\n                                 if c <= 70441\n                                 then (-1)\n                                 else\n                                   if c <= 70451\n                                   then\n                                     (if c <= 70448\n                                      then 0\n                                      else if c <= 70449 then (-1) else 0)\n                                   else\n                                     if c <= 70452\n                                     then (-1)\n                                     else\n                                       if c <= 70457\n                                       then 0\n                                       else if c <= 70460 then (-1) else 0)\n                            else\n                              if c <= 70479\n                              then (-1)\n                              else\n                                if c <= 70730\n                                then\n                                  (if c <= 70497\n                                   then\n                                     (if c <= 70480\n                                      then 0\n                                      else if c <= 70492 then (-1) else 0)\n                                   else\n                                     if c <= 70655\n                                     then (-1)\n                                     else\n                                       if c <= 70708\n                                       then 0\n                                       else if c <= 70726 then (-1) else 0)\n                                else\n                                  if c <= 70750\n                                  then (-1)\n                                  else\n                                    if c <= 70831\n                                    then\n                                      (if c <= 70753\n                                       then 0\n                                       else if c <= 70783 then (-1) else 0)\n                                    else\n                                      if c <= 70851\n                                      then (-1)\n                                      else\n                                        if c <= 70853\n                                        then 0\n                                        else if c <= 70854 then (-1) else 0)\n                     else\n                       if c <= 71039\n                       then (-1)\n                       else\n                         if c <= 71999\n                         then\n                           (if c <= 71494\n                            then\n                              (if c <= 71236\n                               then\n                                 (if c <= 71131\n                                  then\n                                    (if c <= 71086\n                                     then 0\n                                     else if c <= 71127 then (-1) else 0)\n                                  else\n                                    if c <= 71167\n                                    then (-1)\n                                    else\n                                      if c <= 71215\n                                      then 0\n                                      else if c <= 71235 then (-1) else 0)\n                               else\n                                 if c <= 71295\n                                 then (-1)\n                                 else\n                                   if c <= 71352\n                                   then\n                                     (if c <= 71338\n                                      then 0\n                                      else if c <= 71351 then (-1) else 0)\n                                   else\n                                     if c <= 71423\n                                     then (-1)\n                                     else\n                                       if c <= 71450\n                                       then 0\n                                       else if c <= 71487 then (-1) else 0)\n                            else\n                              if c <= 71679\n                              then (-1)\n                              else\n                                if c <= 71945\n                                then\n                                  (if c <= 71903\n                                   then\n                                     (if c <= 71723\n                                      then 0\n                                      else if c <= 71839 then (-1) else 0)\n                                   else\n                                     if c <= 71934\n                                     then (-1)\n                                     else\n                                       if c <= 71942\n                                       then 0\n                                       else if c <= 71944 then (-1) else 0)\n                                else\n                                  if c <= 71947\n                                  then (-1)\n                                  else\n                                    if c <= 71958\n                                    then\n                                      (if c <= 71955\n                                       then 0\n                                       else if c <= 71956 then (-1) else 0)\n                                    else\n                                      if c <= 71959\n                                      then (-1)\n                                      else\n                                        if c <= 71983\n                                        then 0\n                                        else if c <= 71998 then (-1) else 0)\n                         else\n                           if c <= 72000\n                           then (-1)\n                           else\n                             if c <= 72250\n                             then\n                               (if c <= 72161\n                                then\n                                  (if c <= 72103\n                                   then\n                                     (if c <= 72001\n                                      then 0\n                                      else if c <= 72095 then (-1) else 0)\n                                   else\n                                     if c <= 72105\n                                     then (-1)\n                                     else\n                                       if c <= 72144\n                                       then 0\n                                       else if c <= 72160 then (-1) else 0)\n                                else\n                                  if c <= 72162\n                                  then (-1)\n                                  else\n                                    if c <= 72192\n                                    then\n                                      (if c <= 72163\n                                       then 0\n                                       else if c <= 72191 then (-1) else 0)\n                                    else\n                                      if c <= 72202\n                                      then (-1)\n                                      else\n                                        if c <= 72242\n                                        then 0\n                                        else if c <= 72249 then (-1) else 0)\n                             else\n                               if c <= 72271\n                               then (-1)\n                               else\n                                 if c <= 72440\n                                 then\n                                   (if c <= 72329\n                                    then\n                                      (if c <= 72272\n                                       then 0\n                                       else if c <= 72283 then (-1) else 0)\n                                    else\n                                      if c <= 72348\n                                      then (-1)\n                                      else\n                                        if c <= 72349\n                                        then 0\n                                        else if c <= 72367 then (-1) else 0)\n                                 else\n                                   if c <= 72703\n                                   then (-1)\n                                   else\n                                     if c <= 72750\n                                     then\n                                       (if c <= 72712\n                                        then 0\n                                        else if c <= 72713 then (-1) else 0)\n                                     else\n                                       if c <= 72767\n                                       then (-1)\n                                       else\n                                         if c <= 72768\n                                         then 0\n                                         else if c <= 72817 then (-1) else 0)\n                  else\n                    if c <= 72959\n                    then (-1)\n                    else\n                      if c <= 101589\n                      then\n                        (if c <= 83526\n                         then\n                           (if c <= 73112\n                            then\n                              (if c <= 73030\n                               then\n                                 (if c <= 72969\n                                  then\n                                    (if c <= 72966\n                                     then 0\n                                     else if c <= 72967 then (-1) else 0)\n                                  else\n                                    if c <= 72970\n                                    then (-1)\n                                    else\n                                      if c <= 73008\n                                      then 0\n                                      else if c <= 73029 then (-1) else 0)\n                               else\n                                 if c <= 73055\n                                 then (-1)\n                                 else\n                                   if c <= 73064\n                                   then\n                                     (if c <= 73061\n                                      then 0\n                                      else if c <= 73062 then (-1) else 0)\n                                   else\n                                     if c <= 73065\n                                     then (-1)\n                                     else\n                                       if c <= 73097\n                                       then 0\n                                       else if c <= 73111 then (-1) else 0)\n                            else\n                              if c <= 73439\n                              then (-1)\n                              else\n                                if c <= 74862\n                                then\n                                  (if c <= 73648\n                                   then\n                                     (if c <= 73458\n                                      then 0\n                                      else if c <= 73647 then (-1) else 0)\n                                   else\n                                     if c <= 73727\n                                     then (-1)\n                                     else\n                                       if c <= 74649\n                                       then 0\n                                       else if c <= 74751 then (-1) else 0)\n                                else\n                                  if c <= 74879\n                                  then (-1)\n                                  else\n                                    if c <= 77808\n                                    then\n                                      (if c <= 75075\n                                       then 0\n                                       else if c <= 77711 then (-1) else 0)\n                                    else\n                                      if c <= 77823\n                                      then (-1)\n                                      else\n                                        if c <= 78894\n                                        then 0\n                                        else if c <= 82943 then (-1) else 0)\n                         else\n                           if c <= 92159\n                           then (-1)\n                           else\n                             if c <= 93071\n                             then\n                               (if c <= 92909\n                                then\n                                  (if c <= 92766\n                                   then\n                                     (if c <= 92728\n                                      then 0\n                                      else if c <= 92735 then (-1) else 0)\n                                   else\n                                     if c <= 92783\n                                     then (-1)\n                                     else\n                                       if c <= 92862\n                                       then 0\n                                       else if c <= 92879 then (-1) else 0)\n                                else\n                                  if c <= 92927\n                                  then (-1)\n                                  else\n                                    if c <= 92995\n                                    then\n                                      (if c <= 92975\n                                       then 0\n                                       else if c <= 92991 then (-1) else 0)\n                                    else\n                                      if c <= 93026\n                                      then (-1)\n                                      else\n                                        if c <= 93047\n                                        then 0\n                                        else if c <= 93052 then (-1) else 0)\n                             else\n                               if c <= 93759\n                               then (-1)\n                               else\n                                 if c <= 94111\n                                 then\n                                   (if c <= 94026\n                                    then\n                                      (if c <= 93823\n                                       then 0\n                                       else if c <= 93951 then (-1) else 0)\n                                    else\n                                      if c <= 94031\n                                      then (-1)\n                                      else\n                                        if c <= 94032\n                                        then 0\n                                        else if c <= 94098 then (-1) else 0)\n                                 else\n                                   if c <= 94175\n                                   then (-1)\n                                   else\n                                     if c <= 94179\n                                     then\n                                       (if c <= 94177\n                                        then 0\n                                        else if c <= 94178 then (-1) else 0)\n                                     else\n                                       if c <= 94207\n                                       then (-1)\n                                       else\n                                         if c <= 100343\n                                         then 0\n                                         else if c <= 100351 then (-1) else 0)\n                      else\n                        if c <= 101631\n                        then (-1)\n                        else\n                          if c <= 119970\n                          then\n                            (if c <= 111355\n                             then\n                               (if c <= 110590\n                                then\n                                  (if c <= 110579\n                                   then\n                                     (if c <= 101640\n                                      then 0\n                                      else if c <= 110575 then (-1) else 0)\n                                   else\n                                     if c <= 110580\n                                     then (-1)\n                                     else\n                                       if c <= 110587\n                                       then 0\n                                       else if c <= 110588 then (-1) else 0)\n                                else\n                                  if c <= 110591\n                                  then (-1)\n                                  else\n                                    if c <= 110930\n                                    then\n                                      (if c <= 110882\n                                       then 0\n                                       else if c <= 110927 then (-1) else 0)\n                                    else\n                                      if c <= 110947\n                                      then (-1)\n                                      else\n                                        if c <= 110951\n                                        then 0\n                                        else if c <= 110959 then (-1) else 0)\n                             else\n                               if c <= 113663\n                               then (-1)\n                               else\n                                 if c <= 113817\n                                 then\n                                   (if c <= 113788\n                                    then\n                                      (if c <= 113770\n                                       then 0\n                                       else if c <= 113775 then (-1) else 0)\n                                    else\n                                      if c <= 113791\n                                      then (-1)\n                                      else\n                                        if c <= 113800\n                                        then 0\n                                        else if c <= 113807 then (-1) else 0)\n                                 else\n                                   if c <= 119807\n                                   then (-1)\n                                   else\n                                     if c <= 119964\n                                     then\n                                       (if c <= 119892\n                                        then 0\n                                        else if c <= 119893 then (-1) else 0)\n                                     else\n                                       if c <= 119965\n                                       then (-1)\n                                       else\n                                         if c <= 119967\n                                         then 0\n                                         else if c <= 119969 then (-1) else 0)\n                          else\n                            if c <= 119972\n                            then (-1)\n                            else\n                              if c <= 120084\n                              then\n                                (if c <= 119995\n                                 then\n                                   (if c <= 119980\n                                    then\n                                      (if c <= 119974\n                                       then 0\n                                       else if c <= 119976 then (-1) else 0)\n                                    else\n                                      if c <= 119981\n                                      then (-1)\n                                      else\n                                        if c <= 119993\n                                        then 0\n                                        else if c <= 119994 then (-1) else 0)\n                                 else\n                                   if c <= 119996\n                                   then (-1)\n                                   else\n                                     if c <= 120069\n                                     then\n                                       (if c <= 120003\n                                        then 0\n                                        else if c <= 120004 then (-1) else 0)\n                                     else\n                                       if c <= 120070\n                                       then (-1)\n                                       else\n                                         if c <= 120074\n                                         then 0\n                                         else if c <= 120076 then (-1) else 0)\n                              else\n                                if c <= 120085\n                                then (-1)\n                                else\n                                  if c <= 120132\n                                  then\n                                    (if c <= 120121\n                                     then\n                                       (if c <= 120092\n                                        then 0\n                                        else if c <= 120093 then (-1) else 0)\n                                     else\n                                       if c <= 120122\n                                       then (-1)\n                                       else\n                                         if c <= 120126\n                                         then 0\n                                         else if c <= 120127 then (-1) else 0)\n                                  else\n                                    if c <= 120133\n                                    then (-1)\n                                    else\n                                      if c <= 120144\n                                      then\n                                        (if c <= 120134\n                                         then 0\n                                         else if c <= 120137 then (-1) else 0)\n                                      else\n                                        if c <= 120145\n                                        then (-1)\n                                        else\n                                          if c <= 120485\n                                          then 0\n                                          else\n                                            if c <= 120487 then (-1) else 0)\n               else\n                 if c <= 120513\n                 then (-1)\n                 else\n                   if c <= 195101\n                   then\n                     (if c <= 126519\n                      then\n                        (if c <= 123214\n                         then\n                           (if c <= 120744\n                            then\n                              (if c <= 120628\n                               then\n                                 (if c <= 120570\n                                  then\n                                    (if c <= 120538\n                                     then 0\n                                     else if c <= 120539 then (-1) else 0)\n                                  else\n                                    if c <= 120571\n                                    then (-1)\n                                    else\n                                      if c <= 120596\n                                      then 0\n                                      else if c <= 120597 then (-1) else 0)\n                               else\n                                 if c <= 120629\n                                 then (-1)\n                                 else\n                                   if c <= 120686\n                                   then\n                                     (if c <= 120654\n                                      then 0\n                                      else if c <= 120655 then (-1) else 0)\n                                   else\n                                     if c <= 120687\n                                     then (-1)\n                                     else\n                                       if c <= 120712\n                                       then 0\n                                       else if c <= 120713 then (-1) else 0)\n                            else\n                              if c <= 120745\n                              then (-1)\n                              else\n                                if c <= 122634\n                                then\n                                  (if c <= 120779\n                                   then\n                                     (if c <= 120770\n                                      then 0\n                                      else if c <= 120771 then (-1) else 0)\n                                   else if c <= 122623 then (-1) else 0)\n                                else\n                                  if c <= 123180\n                                  then\n                                    (if c <= 122654\n                                     then 0\n                                     else if c <= 123135 then (-1) else 0)\n                                  else\n                                    if c <= 123190\n                                    then (-1)\n                                    else\n                                      if c <= 123197\n                                      then 0\n                                      else if c <= 123213 then (-1) else 0)\n                         else\n                           if c <= 123535\n                           then (-1)\n                           else\n                             if c <= 125251\n                             then\n                               (if c <= 124907\n                                then\n                                  (if c <= 123627\n                                   then\n                                     (if c <= 123565\n                                      then 0\n                                      else if c <= 123583 then (-1) else 0)\n                                   else\n                                     if c <= 124895\n                                     then (-1)\n                                     else\n                                       if c <= 124902\n                                       then 0\n                                       else if c <= 124903 then (-1) else 0)\n                                else\n                                  if c <= 124908\n                                  then (-1)\n                                  else\n                                    if c <= 124926\n                                    then\n                                      (if c <= 124910\n                                       then 0\n                                       else if c <= 124911 then (-1) else 0)\n                                    else\n                                      if c <= 124927\n                                      then (-1)\n                                      else\n                                        if c <= 125124\n                                        then 0\n                                        else if c <= 125183 then (-1) else 0)\n                             else\n                               if c <= 125258\n                               then (-1)\n                               else\n                                 if c <= 126498\n                                 then\n                                   (if c <= 126467\n                                    then\n                                      (if c <= 125259\n                                       then 0\n                                       else if c <= 126463 then (-1) else 0)\n                                    else\n                                      if c <= 126468\n                                      then (-1)\n                                      else\n                                        if c <= 126495\n                                        then 0\n                                        else if c <= 126496 then (-1) else 0)\n                                 else\n                                   if c <= 126499\n                                   then (-1)\n                                   else\n                                     if c <= 126503\n                                     then\n                                       (if c <= 126500\n                                        then 0\n                                        else if c <= 126502 then (-1) else 0)\n                                     else\n                                       if c <= 126504\n                                       then (-1)\n                                       else\n                                         if c <= 126514\n                                         then 0\n                                         else if c <= 126515 then (-1) else 0)\n                      else\n                        if c <= 126520\n                        then (-1)\n                        else\n                          if c <= 126564\n                          then\n                            (if c <= 126546\n                             then\n                               (if c <= 126535\n                                then\n                                  (if c <= 126523\n                                   then\n                                     (if c <= 126521\n                                      then 0\n                                      else if c <= 126522 then (-1) else 0)\n                                   else\n                                     if c <= 126529\n                                     then (-1)\n                                     else\n                                       if c <= 126530\n                                       then 0\n                                       else if c <= 126534 then (-1) else 0)\n                                else\n                                  if c <= 126536\n                                  then (-1)\n                                  else\n                                    if c <= 126539\n                                    then\n                                      (if c <= 126537\n                                       then 0\n                                       else if c <= 126538 then (-1) else 0)\n                                    else\n                                      if c <= 126540\n                                      then (-1)\n                                      else\n                                        if c <= 126543\n                                        then 0\n                                        else if c <= 126544 then (-1) else 0)\n                             else\n                               if c <= 126547\n                               then (-1)\n                               else\n                                 if c <= 126555\n                                 then\n                                   (if c <= 126551\n                                    then\n                                      (if c <= 126548\n                                       then 0\n                                       else if c <= 126550 then (-1) else 0)\n                                    else\n                                      if c <= 126552\n                                      then (-1)\n                                      else\n                                        if c <= 126553\n                                        then 0\n                                        else if c <= 126554 then (-1) else 0)\n                                 else\n                                   if c <= 126556\n                                   then (-1)\n                                   else\n                                     if c <= 126559\n                                     then\n                                       (if c <= 126557\n                                        then 0\n                                        else if c <= 126558 then (-1) else 0)\n                                     else\n                                       if c <= 126560\n                                       then (-1)\n                                       else\n                                         if c <= 126562\n                                         then 0\n                                         else if c <= 126563 then (-1) else 0)\n                          else\n                            if c <= 126566\n                            then (-1)\n                            else\n                              if c <= 126627\n                              then\n                                (if c <= 126588\n                                 then\n                                   (if c <= 126578\n                                    then\n                                      (if c <= 126570\n                                       then 0\n                                       else if c <= 126571 then (-1) else 0)\n                                    else\n                                      if c <= 126579\n                                      then (-1)\n                                      else\n                                        if c <= 126583\n                                        then 0\n                                        else if c <= 126584 then (-1) else 0)\n                                 else\n                                   if c <= 126589\n                                   then (-1)\n                                   else\n                                     if c <= 126601\n                                     then\n                                       (if c <= 126590\n                                        then 0\n                                        else if c <= 126591 then (-1) else 0)\n                                     else\n                                       if c <= 126602\n                                       then (-1)\n                                       else\n                                         if c <= 126619\n                                         then 0\n                                         else if c <= 126624 then (-1) else 0)\n                              else\n                                if c <= 126628\n                                then (-1)\n                                else\n                                  if c <= 177976\n                                  then\n                                    (if c <= 126651\n                                     then\n                                       (if c <= 126633\n                                        then 0\n                                        else if c <= 126634 then (-1) else 0)\n                                     else\n                                       if c <= 131071\n                                       then (-1)\n                                       else\n                                         if c <= 173791\n                                         then 0\n                                         else if c <= 173823 then (-1) else 0)\n                                  else\n                                    if c <= 177983\n                                    then (-1)\n                                    else\n                                      if c <= 183969\n                                      then\n                                        (if c <= 178205\n                                         then 0\n                                         else if c <= 178207 then (-1) else 0)\n                                      else\n                                        if c <= 183983\n                                        then (-1)\n                                        else\n                                          if c <= 191456\n                                          then 0\n                                          else\n                                            if c <= 194559 then (-1) else 0)\n                   else if c <= 196607 then (-1) else 0)\n        else (-1)\nlet __sedlex_partition_83 c =\n  if c <= 35\n  then (-1)\n  else\n    if c <= 122\n    then (Char.code (String.unsafe_get __sedlex_table_65 (c - 36))) - 1\n    else (-1)\nlet __sedlex_partition_128 c =\n  if c <= 106 then (-1) else if c <= 107 then 0 else (-1)\nlet __sedlex_partition_14 c =\n  if c <= 13\n  then (Char.code (String.unsafe_get __sedlex_table_66 (c - (-1)))) - 1\n  else if c <= 8233 then (if c <= 8231 then 1 else 2) else 1\nlet __sedlex_partition_46 c =\n  if c <= 47\n  then (-1)\n  else\n    if c <= 95\n    then (Char.code (String.unsafe_get __sedlex_table_67 (c - 48))) - 1\n    else (-1)\nlet __sedlex_partition_87 c =\n  if c <= 35\n  then (-1)\n  else\n    if c <= 122\n    then (Char.code (String.unsafe_get __sedlex_table_68 (c - 36))) - 1\n    else (-1)\nlet __sedlex_partition_103 c =\n  if c <= 92\n  then (Char.code (String.unsafe_get __sedlex_table_69 (c - (-1)))) - 1\n  else if c <= 8233 then (if c <= 8231 then 1 else 2) else 1\nlet __sedlex_partition_75 c =\n  if c <= 100 then (-1) else if c <= 101 then 0 else (-1)\nlet __sedlex_partition_116 c =\n  if c <= 58 then (-1) else if c <= 59 then 0 else (-1)\nlet __sedlex_partition_125 c =\n  if c <= 8\n  then (-1)\n  else\n    if c <= 5760\n    then (Char.code (String.unsafe_get __sedlex_table_70 (c - 9))) - 1\n    else\n      if c <= 8191\n      then (-1)\n      else\n        if c <= 65279\n        then\n          (if c <= 12288\n           then\n             (if c <= 8239\n              then (if c <= 8202 then 0 else if c <= 8238 then (-1) else 0)\n              else\n                if c <= 8286\n                then (-1)\n                else if c <= 8287 then 0 else if c <= 12287 then (-1) else 0)\n           else if c <= 65278 then (-1) else 0)\n        else (-1)\nlet __sedlex_partition_61 c =\n  if c <= 35\n  then (-1)\n  else\n    if c <= 122\n    then (Char.code (String.unsafe_get __sedlex_table_71 (c - 36))) - 1\n    else (-1)\nlet __sedlex_partition_108 c =\n  if c <= 41\n  then (-1)\n  else\n    if c <= 47\n    then (Char.code (String.unsafe_get __sedlex_table_72 (c - 42))) - 1\n    else (-1)\nlet __sedlex_partition_81 c =\n  if c <= 35\n  then (-1)\n  else\n    if c <= 122\n    then (Char.code (String.unsafe_get __sedlex_table_73 (c - 36))) - 1\n    else (-1)\nlet __sedlex_partition_126 c =\n  if c <= 8191\n  then (Char.code (String.unsafe_get __sedlex_table_74 (c - (-1)))) - 1\n  else\n    if c <= 194559\n    then\n      (if c <= 69599\n       then\n         (if c <= 43711\n          then\n            (if c <= 12703\n             then\n               (if c <= 11519\n                then\n                  (if c <= 8489\n                   then\n                     (if c <= 8454\n                      then\n                        (if c <= 8304\n                         then\n                           (if c <= 8238\n                            then\n                              (if c <= 8231\n                               then (if c <= 8202 then 2 else 1)\n                               else if c <= 8233 then 3 else 1)\n                            else\n                              if c <= 8286\n                              then (if c <= 8239 then 2 else 1)\n                              else if c <= 8287 then 2 else 1)\n                         else\n                           if c <= 8335\n                           then\n                             (if c <= 8318\n                              then (if c <= 8305 then 6 else 1)\n                              else if c <= 8319 then 6 else 1)\n                           else\n                             if c <= 8449\n                             then (if c <= 8348 then 6 else 1)\n                             else if c <= 8450 then 6 else 1)\n                      else\n                        if c <= 8477\n                        then\n                          (if c <= 8468\n                           then\n                             (if c <= 8457\n                              then (if c <= 8455 then 6 else 1)\n                              else if c <= 8467 then 6 else 1)\n                           else\n                             if c <= 8471\n                             then (if c <= 8469 then 6 else 1)\n                             else 6)\n                        else\n                          if c <= 8485\n                          then\n                            (if c <= 8483\n                             then 1\n                             else if c <= 8484 then 6 else 1)\n                          else\n                            if c <= 8487\n                            then (if c <= 8486 then 6 else 1)\n                            else if c <= 8488 then 6 else 1)\n                   else\n                     if c <= 8543\n                     then\n                       (if c <= 8505\n                        then 6\n                        else\n                          if c <= 8516\n                          then\n                            (if c <= 8507\n                             then 1\n                             else if c <= 8511 then 6 else 1)\n                          else\n                            if c <= 8525\n                            then (if c <= 8521 then 6 else 1)\n                            else if c <= 8526 then 6 else 1)\n                     else\n                       if c <= 11389\n                       then\n                         (if c <= 8584\n                          then 6\n                          else if c <= 11263 then 1 else 6)\n                       else\n                         if c <= 11498\n                         then (if c <= 11492 then 6 else 1)\n                         else\n                           if c <= 11505\n                           then (if c <= 11502 then 6 else 1)\n                           else if c <= 11507 then 6 else 1)\n                else\n                  if c <= 12294\n                  then\n                    (if c <= 11695\n                     then\n                       (if c <= 11630\n                        then\n                          (if c <= 11564\n                           then\n                             (if c <= 11558\n                              then (if c <= 11557 then 6 else 1)\n                              else if c <= 11559 then 6 else 1)\n                           else\n                             if c <= 11567\n                             then (if c <= 11565 then 6 else 1)\n                             else if c <= 11623 then 6 else 1)\n                        else\n                          if c <= 11679\n                          then\n                            (if c <= 11647\n                             then (if c <= 11631 then 6 else 1)\n                             else if c <= 11670 then 6 else 1)\n                          else\n                            if c <= 11687\n                            then (if c <= 11686 then 6 else 1)\n                            else if c <= 11694 then 6 else 1)\n                     else\n                       if c <= 11727\n                       then\n                         (if c <= 11711\n                          then\n                            (if c <= 11703\n                             then (if c <= 11702 then 6 else 1)\n                             else if c <= 11710 then 6 else 1)\n                          else\n                            if c <= 11719\n                            then (if c <= 11718 then 6 else 1)\n                            else if c <= 11726 then 6 else 1)\n                       else\n                         if c <= 12287\n                         then\n                           (if c <= 11735\n                            then (if c <= 11734 then 6 else 1)\n                            else if c <= 11742 then 6 else 1)\n                         else\n                           if c <= 12292\n                           then (if c <= 12288 then 2 else 1)\n                           else 6)\n                  else\n                    if c <= 12442\n                    then\n                      (if c <= 12343\n                       then\n                         (if c <= 12320\n                          then (if c <= 12295 then 6 else 1)\n                          else\n                            if c <= 12336\n                            then (if c <= 12329 then 6 else 1)\n                            else if c <= 12341 then 6 else 1)\n                       else\n                         if c <= 12348\n                         then 6\n                         else\n                           if c <= 12352\n                           then 1\n                           else if c <= 12438 then 6 else 1)\n                    else\n                      if c <= 12539\n                      then\n                        (if c <= 12447\n                         then 6\n                         else\n                           if c <= 12448\n                           then 1\n                           else if c <= 12538 then 6 else 1)\n                      else\n                        if c <= 12548\n                        then (if c <= 12543 then 6 else 1)\n                        else\n                          if c <= 12592\n                          then (if c <= 12591 then 6 else 1)\n                          else if c <= 12686 then 6 else 1)\n             else\n               if c <= 42999\n               then\n                 (if c <= 42653\n                  then\n                    (if c <= 42239\n                     then\n                       (if c <= 40981\n                        then\n                          (if c <= 13311\n                           then\n                             (if c <= 12783\n                              then (if c <= 12735 then 6 else 1)\n                              else if c <= 12799 then 6 else 1)\n                           else\n                             if c <= 19967\n                             then (if c <= 19903 then 6 else 1)\n                             else 6)\n                        else\n                          if c <= 42191\n                          then (if c <= 42124 then 6 else 1)\n                          else if c <= 42237 then 6 else 1)\n                     else\n                       if c <= 42559\n                       then\n                         (if c <= 42511\n                          then (if c <= 42508 then 6 else 1)\n                          else\n                            if c <= 42537\n                            then (if c <= 42527 then 6 else 1)\n                            else if c <= 42539 then 6 else 1)\n                       else\n                         if c <= 42622\n                         then (if c <= 42606 then 6 else 1)\n                         else 6)\n                  else\n                    if c <= 42890\n                    then\n                      (if c <= 42785\n                       then\n                         (if c <= 42735\n                          then (if c <= 42655 then 1 else 6)\n                          else\n                            if c <= 42774\n                            then 1\n                            else if c <= 42783 then 6 else 1)\n                       else\n                         if c <= 42887\n                         then 6\n                         else if c <= 42888 then 6 else 1)\n                    else\n                      if c <= 42962\n                      then\n                        (if c <= 42954\n                         then 6\n                         else\n                           if c <= 42959\n                           then 1\n                           else if c <= 42961 then 6 else 1)\n                      else\n                        if c <= 42993\n                        then\n                          (if c <= 42964\n                           then (if c <= 42963 then 6 else 1)\n                           else if c <= 42969 then 6 else 1)\n                        else 6)\n               else\n                 if c <= 43470\n                 then\n                   (if c <= 43137\n                    then\n                      (if c <= 43010\n                       then\n                         (if c <= 43002\n                          then 6\n                          else if c <= 43009 then 6 else 1)\n                       else\n                         if c <= 43019\n                         then\n                           (if c <= 43014\n                            then (if c <= 43013 then 6 else 1)\n                            else if c <= 43018 then 6 else 1)\n                         else\n                           if c <= 43071\n                           then (if c <= 43042 then 6 else 1)\n                           else if c <= 43123 then 6 else 1)\n                    else\n                      if c <= 43273\n                      then\n                        (if c <= 43258\n                         then\n                           (if c <= 43249\n                            then (if c <= 43187 then 6 else 1)\n                            else if c <= 43255 then 6 else 1)\n                         else\n                           if c <= 43260\n                           then (if c <= 43259 then 6 else 1)\n                           else if c <= 43262 then 6 else 1)\n                      else\n                        if c <= 43359\n                        then\n                          (if c <= 43311\n                           then (if c <= 43301 then 6 else 1)\n                           else if c <= 43334 then 6 else 1)\n                        else\n                          if c <= 43395\n                          then (if c <= 43388 then 6 else 1)\n                          else if c <= 43442 then 6 else 1)\n                 else\n                   if c <= 43615\n                   then\n                     (if c <= 43513\n                      then\n                        (if c <= 43493\n                         then\n                           (if c <= 43487\n                            then (if c <= 43471 then 6 else 1)\n                            else if c <= 43492 then 6 else 1)\n                         else if c <= 43503 then 6 else 1)\n                      else\n                        if c <= 43583\n                        then\n                          (if c <= 43519\n                           then (if c <= 43518 then 6 else 1)\n                           else if c <= 43560 then 6 else 1)\n                        else\n                          if c <= 43587\n                          then (if c <= 43586 then 6 else 1)\n                          else if c <= 43595 then 6 else 1)\n                   else\n                     if c <= 43645\n                     then\n                       (if c <= 43638\n                        then 6\n                        else\n                          if c <= 43641\n                          then 1\n                          else if c <= 43642 then 6 else 1)\n                     else\n                       if c <= 43700\n                       then\n                         (if c <= 43696\n                          then (if c <= 43695 then 6 else 1)\n                          else if c <= 43697 then 6 else 1)\n                       else\n                         if c <= 43704\n                         then (if c <= 43702 then 6 else 1)\n                         else if c <= 43709 then 6 else 1)\n          else\n            if c <= 66377\n            then\n              (if c <= 64325\n               then\n                 (if c <= 43887\n                  then\n                    (if c <= 43784\n                     then\n                       (if c <= 43743\n                        then\n                          (if c <= 43738\n                           then\n                             (if c <= 43713\n                              then (if c <= 43712 then 6 else 1)\n                              else if c <= 43714 then 6 else 1)\n                           else if c <= 43741 then 6 else 1)\n                        else\n                          if c <= 43764\n                          then\n                            (if c <= 43761\n                             then (if c <= 43754 then 6 else 1)\n                             else 6)\n                          else\n                            if c <= 43776\n                            then 1\n                            else if c <= 43782 then 6 else 1)\n                     else\n                       if c <= 43823\n                       then\n                         (if c <= 43807\n                          then\n                            (if c <= 43792\n                             then (if c <= 43790 then 6 else 1)\n                             else if c <= 43798 then 6 else 1)\n                          else\n                            if c <= 43815\n                            then (if c <= 43814 then 6 else 1)\n                            else if c <= 43822 then 6 else 1)\n                       else\n                         if c <= 43880\n                         then\n                           (if c <= 43867\n                            then (if c <= 43866 then 6 else 1)\n                            else 6)\n                         else if c <= 43881 then 6 else 1)\n                  else\n                    if c <= 64274\n                    then\n                      (if c <= 55242\n                       then\n                         (if c <= 44031\n                          then (if c <= 44002 then 6 else 1)\n                          else\n                            if c <= 55215\n                            then (if c <= 55203 then 6 else 1)\n                            else if c <= 55238 then 6 else 1)\n                       else\n                         if c <= 64111\n                         then\n                           (if c <= 63743\n                            then (if c <= 55291 then 6 else 1)\n                            else if c <= 64109 then 6 else 1)\n                         else\n                           if c <= 64255\n                           then (if c <= 64217 then 6 else 1)\n                           else if c <= 64262 then 6 else 1)\n                    else\n                      if c <= 64311\n                      then\n                        (if c <= 64286\n                         then\n                           (if c <= 64284\n                            then (if c <= 64279 then 6 else 1)\n                            else if c <= 64285 then 6 else 1)\n                         else\n                           if c <= 64297\n                           then (if c <= 64296 then 6 else 1)\n                           else if c <= 64310 then 6 else 1)\n                      else\n                        if c <= 64319\n                        then\n                          (if c <= 64317\n                           then (if c <= 64316 then 6 else 1)\n                           else if c <= 64318 then 6 else 1)\n                        else\n                          if c <= 64322\n                          then (if c <= 64321 then 6 else 1)\n                          else if c <= 64324 then 6 else 1)\n               else\n                 if c <= 65481\n                 then\n                   (if c <= 65312\n                    then\n                      (if c <= 65007\n                       then\n                         (if c <= 64847\n                          then\n                            (if c <= 64466\n                             then (if c <= 64433 then 6 else 1)\n                             else if c <= 64829 then 6 else 1)\n                          else\n                            if c <= 64913\n                            then (if c <= 64911 then 6 else 1)\n                            else if c <= 64967 then 6 else 1)\n                       else\n                         if c <= 65141\n                         then\n                           (if c <= 65135\n                            then (if c <= 65019 then 6 else 1)\n                            else if c <= 65140 then 6 else 1)\n                         else\n                           if c <= 65278\n                           then (if c <= 65276 then 6 else 1)\n                           else if c <= 65279 then 2 else 1)\n                    else\n                      if c <= 65437\n                      then\n                        (if c <= 65381\n                         then\n                           (if c <= 65344\n                            then (if c <= 65338 then 6 else 1)\n                            else if c <= 65370 then 6 else 1)\n                         else 6)\n                      else\n                        if c <= 65470\n                        then 6\n                        else\n                          if c <= 65473\n                          then 1\n                          else if c <= 65479 then 6 else 1)\n                 else\n                   if c <= 65615\n                   then\n                     (if c <= 65548\n                      then\n                        (if c <= 65497\n                         then\n                           (if c <= 65489\n                            then (if c <= 65487 then 6 else 1)\n                            else if c <= 65495 then 6 else 1)\n                         else\n                           if c <= 65535\n                           then (if c <= 65500 then 6 else 1)\n                           else if c <= 65547 then 6 else 1)\n                      else\n                        if c <= 65595\n                        then\n                          (if c <= 65575\n                           then (if c <= 65574 then 6 else 1)\n                           else if c <= 65594 then 6 else 1)\n                        else\n                          if c <= 65598\n                          then (if c <= 65597 then 6 else 1)\n                          else if c <= 65613 then 6 else 1)\n                   else\n                     if c <= 66207\n                     then\n                       (if c <= 65855\n                        then\n                          (if c <= 65663\n                           then (if c <= 65629 then 6 else 1)\n                           else if c <= 65786 then 6 else 1)\n                        else\n                          if c <= 66175\n                          then (if c <= 65908 then 6 else 1)\n                          else if c <= 66204 then 6 else 1)\n                     else\n                       if c <= 66348\n                       then\n                         (if c <= 66303\n                          then (if c <= 66256 then 6 else 1)\n                          else if c <= 66335 then 6 else 1)\n                       else 6)\n            else\n              if c <= 67646\n              then\n                (if c <= 66963\n                 then\n                   (if c <= 66717\n                    then\n                      (if c <= 66463\n                       then\n                         (if c <= 66383\n                          then (if c <= 66378 then 6 else 1)\n                          else\n                            if c <= 66431\n                            then (if c <= 66421 then 6 else 1)\n                            else if c <= 66461 then 6 else 1)\n                       else\n                         if c <= 66512\n                         then\n                           (if c <= 66503\n                            then (if c <= 66499 then 6 else 1)\n                            else if c <= 66511 then 6 else 1)\n                         else\n                           if c <= 66559\n                           then (if c <= 66517 then 6 else 1)\n                           else 6)\n                    else\n                      if c <= 66863\n                      then\n                        (if c <= 66775\n                         then\n                           (if c <= 66735\n                            then 1\n                            else if c <= 66771 then 6 else 1)\n                         else\n                           if c <= 66815\n                           then (if c <= 66811 then 6 else 1)\n                           else if c <= 66855 then 6 else 1)\n                      else\n                        if c <= 66939\n                        then\n                          (if c <= 66927\n                           then (if c <= 66915 then 6 else 1)\n                           else if c <= 66938 then 6 else 1)\n                        else\n                          if c <= 66955\n                          then (if c <= 66954 then 6 else 1)\n                          else if c <= 66962 then 6 else 1)\n                 else\n                   if c <= 67455\n                   then\n                     (if c <= 67002\n                      then\n                        (if c <= 66978\n                         then\n                           (if c <= 66966\n                            then (if c <= 66965 then 6 else 1)\n                            else if c <= 66977 then 6 else 1)\n                         else\n                           if c <= 66994\n                           then (if c <= 66993 then 6 else 1)\n                           else if c <= 67001 then 6 else 1)\n                      else\n                        if c <= 67391\n                        then\n                          (if c <= 67071\n                           then (if c <= 67004 then 6 else 1)\n                           else if c <= 67382 then 6 else 1)\n                        else\n                          if c <= 67423\n                          then (if c <= 67413 then 6 else 1)\n                          else if c <= 67431 then 6 else 1)\n                   else\n                     if c <= 67591\n                     then\n                       (if c <= 67505\n                        then\n                          (if c <= 67462\n                           then (if c <= 67461 then 6 else 1)\n                           else if c <= 67504 then 6 else 1)\n                        else\n                          if c <= 67583\n                          then (if c <= 67514 then 6 else 1)\n                          else if c <= 67589 then 6 else 1)\n                     else\n                       if c <= 67638\n                       then\n                         (if c <= 67593\n                          then (if c <= 67592 then 6 else 1)\n                          else if c <= 67637 then 6 else 1)\n                       else\n                         if c <= 67643\n                         then (if c <= 67640 then 6 else 1)\n                         else if c <= 67644 then 6 else 1)\n              else\n                if c <= 68296\n                then\n                  (if c <= 68029\n                   then\n                     (if c <= 67827\n                      then\n                        (if c <= 67711\n                         then\n                           (if c <= 67679\n                            then (if c <= 67669 then 6 else 1)\n                            else if c <= 67702 then 6 else 1)\n                         else\n                           if c <= 67807\n                           then (if c <= 67742 then 6 else 1)\n                           else if c <= 67826 then 6 else 1)\n                      else\n                        if c <= 67871\n                        then\n                          (if c <= 67839\n                           then (if c <= 67829 then 6 else 1)\n                           else if c <= 67861 then 6 else 1)\n                        else\n                          if c <= 67967\n                          then (if c <= 67897 then 6 else 1)\n                          else if c <= 68023 then 6 else 1)\n                   else\n                     if c <= 68120\n                     then\n                       (if c <= 68111\n                        then\n                          (if c <= 68095\n                           then (if c <= 68031 then 6 else 1)\n                           else if c <= 68096 then 6 else 1)\n                        else\n                          if c <= 68116\n                          then (if c <= 68115 then 6 else 1)\n                          else if c <= 68119 then 6 else 1)\n                     else\n                       if c <= 68223\n                       then\n                         (if c <= 68191\n                          then (if c <= 68149 then 6 else 1)\n                          else if c <= 68220 then 6 else 1)\n                       else\n                         if c <= 68287\n                         then (if c <= 68252 then 6 else 1)\n                         else if c <= 68295 then 6 else 1)\n                else\n                  if c <= 68863\n                  then\n                    (if c <= 68479\n                     then\n                       (if c <= 68415\n                        then\n                          (if c <= 68351\n                           then (if c <= 68324 then 6 else 1)\n                           else if c <= 68405 then 6 else 1)\n                        else\n                          if c <= 68447\n                          then (if c <= 68437 then 6 else 1)\n                          else if c <= 68466 then 6 else 1)\n                     else\n                       if c <= 68735\n                       then\n                         (if c <= 68607\n                          then (if c <= 68497 then 6 else 1)\n                          else if c <= 68680 then 6 else 1)\n                       else\n                         if c <= 68799\n                         then (if c <= 68786 then 6 else 1)\n                         else if c <= 68850 then 6 else 1)\n                  else\n                    if c <= 69414\n                    then\n                      (if c <= 69295\n                       then\n                         (if c <= 69247\n                          then (if c <= 68899 then 6 else 1)\n                          else if c <= 69289 then 6 else 1)\n                       else\n                         if c <= 69375\n                         then (if c <= 69297 then 6 else 1)\n                         else if c <= 69404 then 6 else 1)\n                    else\n                      if c <= 69487\n                      then\n                        (if c <= 69423\n                         then (if c <= 69415 then 6 else 1)\n                         else if c <= 69445 then 6 else 1)\n                      else\n                        if c <= 69551\n                        then (if c <= 69505 then 6 else 1)\n                        else if c <= 69572 then 6 else 1)\n       else\n         if c <= 120122\n         then\n           (if c <= 72348\n            then\n              (if c <= 70655\n               then\n                 (if c <= 70162\n                  then\n                    (if c <= 69958\n                     then\n                       (if c <= 69762\n                        then\n                          (if c <= 69744\n                           then\n                             (if c <= 69634\n                              then (if c <= 69622 then 6 else 1)\n                              else if c <= 69687 then 6 else 1)\n                           else\n                             if c <= 69748\n                             then (if c <= 69746 then 6 else 1)\n                             else if c <= 69749 then 6 else 1)\n                        else\n                          if c <= 69890\n                          then\n                            (if c <= 69839\n                             then (if c <= 69807 then 6 else 1)\n                             else if c <= 69864 then 6 else 1)\n                          else\n                            if c <= 69955\n                            then (if c <= 69926 then 6 else 1)\n                            else if c <= 69956 then 6 else 1)\n                     else\n                       if c <= 70080\n                       then\n                         (if c <= 70005\n                          then\n                            (if c <= 69967\n                             then (if c <= 69959 then 6 else 1)\n                             else if c <= 70002 then 6 else 1)\n                          else\n                            if c <= 70018\n                            then (if c <= 70006 then 6 else 1)\n                            else if c <= 70066 then 6 else 1)\n                       else\n                         if c <= 70107\n                         then\n                           (if c <= 70105\n                            then (if c <= 70084 then 6 else 1)\n                            else if c <= 70106 then 6 else 1)\n                         else\n                           if c <= 70143\n                           then (if c <= 70108 then 6 else 1)\n                           else if c <= 70161 then 6 else 1)\n                  else\n                    if c <= 70414\n                    then\n                      (if c <= 70286\n                       then\n                         (if c <= 70279\n                          then\n                            (if c <= 70271\n                             then (if c <= 70187 then 6 else 1)\n                             else if c <= 70278 then 6 else 1)\n                          else\n                            if c <= 70281\n                            then (if c <= 70280 then 6 else 1)\n                            else if c <= 70285 then 6 else 1)\n                       else\n                         if c <= 70319\n                         then\n                           (if c <= 70302\n                            then (if c <= 70301 then 6 else 1)\n                            else if c <= 70312 then 6 else 1)\n                         else\n                           if c <= 70404\n                           then (if c <= 70366 then 6 else 1)\n                           else if c <= 70412 then 6 else 1)\n                    else\n                      if c <= 70452\n                      then\n                        (if c <= 70441\n                         then\n                           (if c <= 70418\n                            then (if c <= 70416 then 6 else 1)\n                            else if c <= 70440 then 6 else 1)\n                         else\n                           if c <= 70449\n                           then (if c <= 70448 then 6 else 1)\n                           else if c <= 70451 then 6 else 1)\n                      else\n                        if c <= 70479\n                        then\n                          (if c <= 70460\n                           then (if c <= 70457 then 6 else 1)\n                           else if c <= 70461 then 6 else 1)\n                        else\n                          if c <= 70492\n                          then (if c <= 70480 then 6 else 1)\n                          else if c <= 70497 then 6 else 1)\n               else\n                 if c <= 71934\n                 then\n                   (if c <= 71167\n                    then\n                      (if c <= 70851\n                       then\n                         (if c <= 70750\n                          then\n                            (if c <= 70726\n                             then (if c <= 70708 then 6 else 1)\n                             else if c <= 70730 then 6 else 1)\n                          else\n                            if c <= 70783\n                            then (if c <= 70753 then 6 else 1)\n                            else if c <= 70831 then 6 else 1)\n                       else\n                         if c <= 71039\n                         then\n                           (if c <= 70854\n                            then (if c <= 70853 then 6 else 1)\n                            else if c <= 70855 then 6 else 1)\n                         else\n                           if c <= 71127\n                           then (if c <= 71086 then 6 else 1)\n                           else if c <= 71131 then 6 else 1)\n                    else\n                      if c <= 71423\n                      then\n                        (if c <= 71295\n                         then\n                           (if c <= 71235\n                            then (if c <= 71215 then 6 else 1)\n                            else if c <= 71236 then 6 else 1)\n                         else\n                           if c <= 71351\n                           then (if c <= 71338 then 6 else 1)\n                           else if c <= 71352 then 6 else 1)\n                      else\n                        if c <= 71679\n                        then\n                          (if c <= 71487\n                           then (if c <= 71450 then 6 else 1)\n                           else if c <= 71494 then 6 else 1)\n                        else\n                          if c <= 71839\n                          then (if c <= 71723 then 6 else 1)\n                          else if c <= 71903 then 6 else 1)\n                 else\n                   if c <= 72105\n                   then\n                     (if c <= 71959\n                      then\n                        (if c <= 71947\n                         then\n                           (if c <= 71944\n                            then (if c <= 71942 then 6 else 1)\n                            else if c <= 71945 then 6 else 1)\n                         else\n                           if c <= 71956\n                           then (if c <= 71955 then 6 else 1)\n                           else if c <= 71958 then 6 else 1)\n                      else\n                        if c <= 72000\n                        then\n                          (if c <= 71998\n                           then (if c <= 71983 then 6 else 1)\n                           else if c <= 71999 then 6 else 1)\n                        else\n                          if c <= 72095\n                          then (if c <= 72001 then 6 else 1)\n                          else if c <= 72103 then 6 else 1)\n                   else\n                     if c <= 72202\n                     then\n                       (if c <= 72162\n                        then\n                          (if c <= 72160\n                           then (if c <= 72144 then 6 else 1)\n                           else if c <= 72161 then 6 else 1)\n                        else\n                          if c <= 72191\n                          then (if c <= 72163 then 6 else 1)\n                          else if c <= 72192 then 6 else 1)\n                     else\n                       if c <= 72271\n                       then\n                         (if c <= 72249\n                          then (if c <= 72242 then 6 else 1)\n                          else if c <= 72250 then 6 else 1)\n                       else\n                         if c <= 72283\n                         then (if c <= 72272 then 6 else 1)\n                         else if c <= 72329 then 6 else 1)\n            else\n              if c <= 94031\n              then\n                (if c <= 73727\n                 then\n                   (if c <= 72970\n                    then\n                      (if c <= 72767\n                       then\n                         (if c <= 72703\n                          then\n                            (if c <= 72367\n                             then (if c <= 72349 then 6 else 1)\n                             else if c <= 72440 then 6 else 1)\n                          else\n                            if c <= 72713\n                            then (if c <= 72712 then 6 else 1)\n                            else if c <= 72750 then 6 else 1)\n                       else\n                         if c <= 72959\n                         then\n                           (if c <= 72817\n                            then (if c <= 72768 then 6 else 1)\n                            else if c <= 72847 then 6 else 1)\n                         else\n                           if c <= 72967\n                           then (if c <= 72966 then 6 else 1)\n                           else if c <= 72969 then 6 else 1)\n                    else\n                      if c <= 73065\n                      then\n                        (if c <= 73055\n                         then\n                           (if c <= 73029\n                            then (if c <= 73008 then 6 else 1)\n                            else if c <= 73030 then 6 else 1)\n                         else\n                           if c <= 73062\n                           then (if c <= 73061 then 6 else 1)\n                           else if c <= 73064 then 6 else 1)\n                      else\n                        if c <= 73439\n                        then\n                          (if c <= 73111\n                           then (if c <= 73097 then 6 else 1)\n                           else if c <= 73112 then 6 else 1)\n                        else\n                          if c <= 73647\n                          then (if c <= 73458 then 6 else 1)\n                          else if c <= 73648 then 6 else 1)\n                 else\n                   if c <= 92783\n                   then\n                     (if c <= 77823\n                      then\n                        (if c <= 74879\n                         then\n                           (if c <= 74751\n                            then (if c <= 74649 then 6 else 1)\n                            else if c <= 74862 then 6 else 1)\n                         else\n                           if c <= 77711\n                           then (if c <= 75075 then 6 else 1)\n                           else if c <= 77808 then 6 else 1)\n                      else\n                        if c <= 92159\n                        then\n                          (if c <= 82943\n                           then (if c <= 78894 then 6 else 1)\n                           else if c <= 83526 then 6 else 1)\n                        else\n                          if c <= 92735\n                          then (if c <= 92728 then 6 else 1)\n                          else if c <= 92766 then 6 else 1)\n                   else\n                     if c <= 93026\n                     then\n                       (if c <= 92927\n                        then\n                          (if c <= 92879\n                           then (if c <= 92862 then 6 else 1)\n                           else if c <= 92909 then 6 else 1)\n                        else\n                          if c <= 92991\n                          then (if c <= 92975 then 6 else 1)\n                          else if c <= 92995 then 6 else 1)\n                     else\n                       if c <= 93759\n                       then\n                         (if c <= 93052\n                          then (if c <= 93047 then 6 else 1)\n                          else if c <= 93071 then 6 else 1)\n                       else\n                         if c <= 93951\n                         then (if c <= 93823 then 6 else 1)\n                         else if c <= 94026 then 6 else 1)\n              else\n                if c <= 113791\n                then\n                  (if c <= 110580\n                   then\n                     (if c <= 94207\n                      then\n                        (if c <= 94175\n                         then\n                           (if c <= 94098\n                            then (if c <= 94032 then 6 else 1)\n                            else if c <= 94111 then 6 else 1)\n                         else\n                           if c <= 94178\n                           then (if c <= 94177 then 6 else 1)\n                           else if c <= 94179 then 6 else 1)\n                      else\n                        if c <= 101631\n                        then\n                          (if c <= 100351\n                           then (if c <= 100343 then 6 else 1)\n                           else if c <= 101589 then 6 else 1)\n                        else\n                          if c <= 110575\n                          then (if c <= 101640 then 6 else 1)\n                          else if c <= 110579 then 6 else 1)\n                   else\n                     if c <= 110947\n                     then\n                       (if c <= 110591\n                        then\n                          (if c <= 110588\n                           then (if c <= 110587 then 6 else 1)\n                           else if c <= 110590 then 6 else 1)\n                        else\n                          if c <= 110927\n                          then (if c <= 110882 then 6 else 1)\n                          else if c <= 110930 then 6 else 1)\n                     else\n                       if c <= 113663\n                       then\n                         (if c <= 110959\n                          then (if c <= 110951 then 6 else 1)\n                          else if c <= 111355 then 6 else 1)\n                       else\n                         if c <= 113775\n                         then (if c <= 113770 then 6 else 1)\n                         else if c <= 113788 then 6 else 1)\n                else\n                  if c <= 119981\n                  then\n                    (if c <= 119965\n                     then\n                       (if c <= 119807\n                        then\n                          (if c <= 113807\n                           then (if c <= 113800 then 6 else 1)\n                           else if c <= 113817 then 6 else 1)\n                        else\n                          if c <= 119893\n                          then (if c <= 119892 then 6 else 1)\n                          else if c <= 119964 then 6 else 1)\n                     else\n                       if c <= 119972\n                       then\n                         (if c <= 119969\n                          then (if c <= 119967 then 6 else 1)\n                          else if c <= 119970 then 6 else 1)\n                       else\n                         if c <= 119976\n                         then (if c <= 119974 then 6 else 1)\n                         else if c <= 119980 then 6 else 1)\n                  else\n                    if c <= 120070\n                    then\n                      (if c <= 119996\n                       then\n                         (if c <= 119994\n                          then (if c <= 119993 then 6 else 1)\n                          else if c <= 119995 then 6 else 1)\n                       else\n                         if c <= 120004\n                         then (if c <= 120003 then 6 else 1)\n                         else if c <= 120069 then 6 else 1)\n                    else\n                      if c <= 120085\n                      then\n                        (if c <= 120076\n                         then (if c <= 120074 then 6 else 1)\n                         else if c <= 120084 then 6 else 1)\n                      else\n                        if c <= 120093\n                        then (if c <= 120092 then 6 else 1)\n                        else if c <= 120121 then 6 else 1)\n         else\n           if c <= 131071\n           then\n             (if c <= 126468\n              then\n                (if c <= 122623\n                 then\n                   (if c <= 120571\n                    then\n                      (if c <= 120145\n                       then\n                         (if c <= 120133\n                          then\n                            (if c <= 120127\n                             then (if c <= 120126 then 6 else 1)\n                             else if c <= 120132 then 6 else 1)\n                          else\n                            if c <= 120137\n                            then (if c <= 120134 then 6 else 1)\n                            else if c <= 120144 then 6 else 1)\n                       else\n                         if c <= 120513\n                         then\n                           (if c <= 120487\n                            then (if c <= 120485 then 6 else 1)\n                            else if c <= 120512 then 6 else 1)\n                         else\n                           if c <= 120539\n                           then (if c <= 120538 then 6 else 1)\n                           else if c <= 120570 then 6 else 1)\n                    else\n                      if c <= 120687\n                      then\n                        (if c <= 120629\n                         then\n                           (if c <= 120597\n                            then (if c <= 120596 then 6 else 1)\n                            else if c <= 120628 then 6 else 1)\n                         else\n                           if c <= 120655\n                           then (if c <= 120654 then 6 else 1)\n                           else if c <= 120686 then 6 else 1)\n                      else\n                        if c <= 120745\n                        then\n                          (if c <= 120713\n                           then (if c <= 120712 then 6 else 1)\n                           else if c <= 120744 then 6 else 1)\n                        else\n                          if c <= 120771\n                          then (if c <= 120770 then 6 else 1)\n                          else if c <= 120779 then 6 else 1)\n                 else\n                   if c <= 124895\n                   then\n                     (if c <= 123190\n                      then\n                        (if c <= 122654\n                         then 6\n                         else\n                           if c <= 123135\n                           then 1\n                           else if c <= 123180 then 6 else 1)\n                      else\n                        if c <= 123535\n                        then\n                          (if c <= 123213\n                           then (if c <= 123197 then 6 else 1)\n                           else if c <= 123214 then 6 else 1)\n                        else\n                          if c <= 123583\n                          then (if c <= 123565 then 6 else 1)\n                          else if c <= 123627 then 6 else 1)\n                   else\n                     if c <= 124927\n                     then\n                       (if c <= 124908\n                        then\n                          (if c <= 124903\n                           then (if c <= 124902 then 6 else 1)\n                           else if c <= 124907 then 6 else 1)\n                        else\n                          if c <= 124911\n                          then (if c <= 124910 then 6 else 1)\n                          else if c <= 124926 then 6 else 1)\n                     else\n                       if c <= 125258\n                       then\n                         (if c <= 125183\n                          then (if c <= 125124 then 6 else 1)\n                          else if c <= 125251 then 6 else 1)\n                       else\n                         if c <= 126463\n                         then (if c <= 125259 then 6 else 1)\n                         else if c <= 126467 then 6 else 1)\n              else\n                if c <= 126552\n                then\n                  (if c <= 126529\n                   then\n                     (if c <= 126504\n                      then\n                        (if c <= 126499\n                         then\n                           (if c <= 126496\n                            then (if c <= 126495 then 6 else 1)\n                            else if c <= 126498 then 6 else 1)\n                         else\n                           if c <= 126502\n                           then (if c <= 126500 then 6 else 1)\n                           else if c <= 126503 then 6 else 1)\n                      else\n                        if c <= 126520\n                        then\n                          (if c <= 126515\n                           then (if c <= 126514 then 6 else 1)\n                           else if c <= 126519 then 6 else 1)\n                        else\n                          if c <= 126522\n                          then (if c <= 126521 then 6 else 1)\n                          else if c <= 126523 then 6 else 1)\n                   else\n                     if c <= 126540\n                     then\n                       (if c <= 126536\n                        then\n                          (if c <= 126534\n                           then (if c <= 126530 then 6 else 1)\n                           else if c <= 126535 then 6 else 1)\n                        else\n                          if c <= 126538\n                          then (if c <= 126537 then 6 else 1)\n                          else if c <= 126539 then 6 else 1)\n                     else\n                       if c <= 126547\n                       then\n                         (if c <= 126544\n                          then (if c <= 126543 then 6 else 1)\n                          else if c <= 126546 then 6 else 1)\n                       else\n                         if c <= 126550\n                         then (if c <= 126548 then 6 else 1)\n                         else if c <= 126551 then 6 else 1)\n                else\n                  if c <= 126579\n                  then\n                    (if c <= 126560\n                     then\n                       (if c <= 126556\n                        then\n                          (if c <= 126554\n                           then (if c <= 126553 then 6 else 1)\n                           else if c <= 126555 then 6 else 1)\n                        else\n                          if c <= 126558\n                          then (if c <= 126557 then 6 else 1)\n                          else if c <= 126559 then 6 else 1)\n                     else\n                       if c <= 126566\n                       then\n                         (if c <= 126563\n                          then (if c <= 126562 then 6 else 1)\n                          else if c <= 126564 then 6 else 1)\n                       else\n                         if c <= 126571\n                         then (if c <= 126570 then 6 else 1)\n                         else if c <= 126578 then 6 else 1)\n                  else\n                    if c <= 126602\n                    then\n                      (if c <= 126589\n                       then\n                         (if c <= 126584\n                          then (if c <= 126583 then 6 else 1)\n                          else if c <= 126588 then 6 else 1)\n                       else\n                         if c <= 126591\n                         then (if c <= 126590 then 6 else 1)\n                         else if c <= 126601 then 6 else 1)\n                    else\n                      if c <= 126628\n                      then\n                        (if c <= 126624\n                         then (if c <= 126619 then 6 else 1)\n                         else if c <= 126627 then 6 else 1)\n                      else\n                        if c <= 126634\n                        then (if c <= 126633 then 6 else 1)\n                        else if c <= 126651 then 6 else 1)\n           else\n             if c <= 183983\n             then\n               (if c <= 177983\n                then\n                  (if c <= 173823\n                   then (if c <= 173791 then 6 else 1)\n                   else if c <= 177976 then 6 else 1)\n                else\n                  if c <= 178207\n                  then (if c <= 178205 then 6 else 1)\n                  else if c <= 183969 then 6 else 1)\n             else if c <= 191456 then 6 else 1)\n    else (-1)\nlet __sedlex_partition_78 c =\n  if c <= 35\n  then (-1)\n  else\n    if c <= 122\n    then (Char.code (String.unsafe_get __sedlex_table_75 (c - 36))) - 1\n    else (-1)\nlet __sedlex_partition_119 c =\n  if c <= (-1)\n  then (-1)\n  else\n    if c <= 8191\n    then (Char.code (String.unsafe_get __sedlex_table_76 c)) - 1\n    else\n      if c <= 12287\n      then\n        (if c <= 8238\n         then\n           (if c <= 8231\n            then (if c <= 8202 then 1 else 0)\n            else if c <= 8233 then 2 else 0)\n         else\n           if c <= 8286\n           then (if c <= 8239 then 1 else 0)\n           else if c <= 8287 then 1 else 0)\n      else\n        if c <= 65278\n        then (if c <= 12288 then 1 else 0)\n        else if c <= 65279 then 1 else 0\nlet __sedlex_partition_69 c =\n  if c <= 118 then (-1) else if c <= 119 then 0 else (-1)\nlet __sedlex_partition_85 c =\n  if c <= 35\n  then (-1)\n  else\n    if c <= 122\n    then (Char.code (String.unsafe_get __sedlex_table_77 (c - 36))) - 1\n    else (-1)\nlet __sedlex_partition_100 c =\n  if c <= 93\n  then (Char.code (String.unsafe_get __sedlex_table_78 (c - (-1)))) - 1\n  else if c <= 8233 then (if c <= 8231 then 1 else 2) else 1\nlet __sedlex_partition_118 c =\n  if c <= 123\n  then (Char.code (String.unsafe_get __sedlex_table_79 (c - (-1)))) - 1\n  else if c <= 8233 then (if c <= 8231 then 1 else 2) else 1\nlet __sedlex_partition_32 c =\n  if c <= 47\n  then (-1)\n  else\n    if c <= 57\n    then (Char.code (String.unsafe_get __sedlex_table_80 (c - 48))) - 1\n    else (-1)\nlet __sedlex_partition_38 c =\n  if c <= 47\n  then (-1)\n  else\n    if c <= 101\n    then (Char.code (String.unsafe_get __sedlex_table_81 (c - 48))) - 1\n    else (-1)\nlet __sedlex_partition_39 c =\n  if c <= 42\n  then (-1)\n  else\n    if c <= 57\n    then (Char.code (String.unsafe_get __sedlex_table_82 (c - 43))) - 1\n    else (-1)\nlet __sedlex_partition_109 c =\n  if c <= 125\n  then (Char.code (String.unsafe_get __sedlex_table_83 (c - (-1)))) - 1\n  else if c <= 8233 then (if c <= 8231 then 1 else 2) else 1\nlet __sedlex_partition_1 c =\n  if c <= 122\n  then (Char.code (String.unsafe_get __sedlex_table_84 (c - (-1)))) - 1\n  else 1\nlet __sedlex_partition_6 c =\n  if c <= 122\n  then (Char.code (String.unsafe_get __sedlex_table_85 (c - (-1)))) - 1\n  else 1\nlet __sedlex_partition_12 c =\n  if c <= 44\n  then (-1)\n  else\n    if c <= 47\n    then (Char.code (String.unsafe_get __sedlex_table_86 (c - 45))) - 1\n    else (-1)\nlet __sedlex_partition_114 c =\n  if c <= 47\n  then (-1)\n  else\n    if c <= 102\n    then (Char.code (String.unsafe_get __sedlex_table_87 (c - 48))) - 1\n    else (-1)\nlet __sedlex_partition_49 c =\n  if c <= 62 then (-1) else if c <= 63 then 0 else (-1)\nlet __sedlex_partition_48 c =\n  if c <= 45\n  then (-1)\n  else\n    if c <= 95\n    then (Char.code (String.unsafe_get __sedlex_table_88 (c - 46))) - 1\n    else (-1)\nlet __sedlex_partition_2 c =\n  if c <= 116 then (-1) else if c <= 117 then 0 else (-1)\nlet __sedlex_partition_13 c =\n  if c <= 46 then (-1) else if c <= 47 then 0 else (-1)\nlet __sedlex_partition_66 c =\n  if c <= 57 then (-1) else if c <= 58 then 0 else (-1)\nlet __sedlex_partition_60 c =\n  if c <= 35\n  then (-1)\n  else\n    if c <= 122\n    then (Char.code (String.unsafe_get __sedlex_table_89 (c - 36))) - 1\n    else (-1)\nlet __sedlex_partition_111 c =\n  if c <= 34\n  then (-1)\n  else\n    if c <= 122\n    then (Char.code (String.unsafe_get __sedlex_table_90 (c - 35))) - 1\n    else (-1)\nlet __sedlex_partition_117 c =\n  if c <= 8191\n  then (Char.code (String.unsafe_get __sedlex_table_91 (c - (-1)))) - 1\n  else\n    if c <= 194559\n    then\n      (if c <= 69599\n       then\n         (if c <= 43711\n          then\n            (if c <= 12703\n             then\n               (if c <= 11519\n                then\n                  (if c <= 8489\n                   then\n                     (if c <= 8454\n                      then\n                        (if c <= 8304\n                         then\n                           (if c <= 8238\n                            then\n                              (if c <= 8231\n                               then (if c <= 8202 then 2 else 1)\n                               else if c <= 8233 then 3 else 1)\n                            else\n                              if c <= 8286\n                              then (if c <= 8239 then 2 else 1)\n                              else if c <= 8287 then 2 else 1)\n                         else\n                           if c <= 8335\n                           then\n                             (if c <= 8318\n                              then (if c <= 8305 then 6 else 1)\n                              else if c <= 8319 then 6 else 1)\n                           else\n                             if c <= 8449\n                             then (if c <= 8348 then 6 else 1)\n                             else if c <= 8450 then 6 else 1)\n                      else\n                        if c <= 8477\n                        then\n                          (if c <= 8468\n                           then\n                             (if c <= 8457\n                              then (if c <= 8455 then 6 else 1)\n                              else if c <= 8467 then 6 else 1)\n                           else\n                             if c <= 8471\n                             then (if c <= 8469 then 6 else 1)\n                             else 6)\n                        else\n                          if c <= 8485\n                          then\n                            (if c <= 8483\n                             then 1\n                             else if c <= 8484 then 6 else 1)\n                          else\n                            if c <= 8487\n                            then (if c <= 8486 then 6 else 1)\n                            else if c <= 8488 then 6 else 1)\n                   else\n                     if c <= 8543\n                     then\n                       (if c <= 8505\n                        then 6\n                        else\n                          if c <= 8516\n                          then\n                            (if c <= 8507\n                             then 1\n                             else if c <= 8511 then 6 else 1)\n                          else\n                            if c <= 8525\n                            then (if c <= 8521 then 6 else 1)\n                            else if c <= 8526 then 6 else 1)\n                     else\n                       if c <= 11389\n                       then\n                         (if c <= 8584\n                          then 6\n                          else if c <= 11263 then 1 else 6)\n                       else\n                         if c <= 11498\n                         then (if c <= 11492 then 6 else 1)\n                         else\n                           if c <= 11505\n                           then (if c <= 11502 then 6 else 1)\n                           else if c <= 11507 then 6 else 1)\n                else\n                  if c <= 12294\n                  then\n                    (if c <= 11695\n                     then\n                       (if c <= 11630\n                        then\n                          (if c <= 11564\n                           then\n                             (if c <= 11558\n                              then (if c <= 11557 then 6 else 1)\n                              else if c <= 11559 then 6 else 1)\n                           else\n                             if c <= 11567\n                             then (if c <= 11565 then 6 else 1)\n                             else if c <= 11623 then 6 else 1)\n                        else\n                          if c <= 11679\n                          then\n                            (if c <= 11647\n                             then (if c <= 11631 then 6 else 1)\n                             else if c <= 11670 then 6 else 1)\n                          else\n                            if c <= 11687\n                            then (if c <= 11686 then 6 else 1)\n                            else if c <= 11694 then 6 else 1)\n                     else\n                       if c <= 11727\n                       then\n                         (if c <= 11711\n                          then\n                            (if c <= 11703\n                             then (if c <= 11702 then 6 else 1)\n                             else if c <= 11710 then 6 else 1)\n                          else\n                            if c <= 11719\n                            then (if c <= 11718 then 6 else 1)\n                            else if c <= 11726 then 6 else 1)\n                       else\n                         if c <= 12287\n                         then\n                           (if c <= 11735\n                            then (if c <= 11734 then 6 else 1)\n                            else if c <= 11742 then 6 else 1)\n                         else\n                           if c <= 12292\n                           then (if c <= 12288 then 2 else 1)\n                           else 6)\n                  else\n                    if c <= 12442\n                    then\n                      (if c <= 12343\n                       then\n                         (if c <= 12320\n                          then (if c <= 12295 then 6 else 1)\n                          else\n                            if c <= 12336\n                            then (if c <= 12329 then 6 else 1)\n                            else if c <= 12341 then 6 else 1)\n                       else\n                         if c <= 12348\n                         then 6\n                         else\n                           if c <= 12352\n                           then 1\n                           else if c <= 12438 then 6 else 1)\n                    else\n                      if c <= 12539\n                      then\n                        (if c <= 12447\n                         then 6\n                         else\n                           if c <= 12448\n                           then 1\n                           else if c <= 12538 then 6 else 1)\n                      else\n                        if c <= 12548\n                        then (if c <= 12543 then 6 else 1)\n                        else\n                          if c <= 12592\n                          then (if c <= 12591 then 6 else 1)\n                          else if c <= 12686 then 6 else 1)\n             else\n               if c <= 42999\n               then\n                 (if c <= 42653\n                  then\n                    (if c <= 42239\n                     then\n                       (if c <= 40981\n                        then\n                          (if c <= 13311\n                           then\n                             (if c <= 12783\n                              then (if c <= 12735 then 6 else 1)\n                              else if c <= 12799 then 6 else 1)\n                           else\n                             if c <= 19967\n                             then (if c <= 19903 then 6 else 1)\n                             else 6)\n                        else\n                          if c <= 42191\n                          then (if c <= 42124 then 6 else 1)\n                          else if c <= 42237 then 6 else 1)\n                     else\n                       if c <= 42559\n                       then\n                         (if c <= 42511\n                          then (if c <= 42508 then 6 else 1)\n                          else\n                            if c <= 42537\n                            then (if c <= 42527 then 6 else 1)\n                            else if c <= 42539 then 6 else 1)\n                       else\n                         if c <= 42622\n                         then (if c <= 42606 then 6 else 1)\n                         else 6)\n                  else\n                    if c <= 42890\n                    then\n                      (if c <= 42785\n                       then\n                         (if c <= 42735\n                          then (if c <= 42655 then 1 else 6)\n                          else\n                            if c <= 42774\n                            then 1\n                            else if c <= 42783 then 6 else 1)\n                       else\n                         if c <= 42887\n                         then 6\n                         else if c <= 42888 then 6 else 1)\n                    else\n                      if c <= 42962\n                      then\n                        (if c <= 42954\n                         then 6\n                         else\n                           if c <= 42959\n                           then 1\n                           else if c <= 42961 then 6 else 1)\n                      else\n                        if c <= 42993\n                        then\n                          (if c <= 42964\n                           then (if c <= 42963 then 6 else 1)\n                           else if c <= 42969 then 6 else 1)\n                        else 6)\n               else\n                 if c <= 43470\n                 then\n                   (if c <= 43137\n                    then\n                      (if c <= 43010\n                       then\n                         (if c <= 43002\n                          then 6\n                          else if c <= 43009 then 6 else 1)\n                       else\n                         if c <= 43019\n                         then\n                           (if c <= 43014\n                            then (if c <= 43013 then 6 else 1)\n                            else if c <= 43018 then 6 else 1)\n                         else\n                           if c <= 43071\n                           then (if c <= 43042 then 6 else 1)\n                           else if c <= 43123 then 6 else 1)\n                    else\n                      if c <= 43273\n                      then\n                        (if c <= 43258\n                         then\n                           (if c <= 43249\n                            then (if c <= 43187 then 6 else 1)\n                            else if c <= 43255 then 6 else 1)\n                         else\n                           if c <= 43260\n                           then (if c <= 43259 then 6 else 1)\n                           else if c <= 43262 then 6 else 1)\n                      else\n                        if c <= 43359\n                        then\n                          (if c <= 43311\n                           then (if c <= 43301 then 6 else 1)\n                           else if c <= 43334 then 6 else 1)\n                        else\n                          if c <= 43395\n                          then (if c <= 43388 then 6 else 1)\n                          else if c <= 43442 then 6 else 1)\n                 else\n                   if c <= 43615\n                   then\n                     (if c <= 43513\n                      then\n                        (if c <= 43493\n                         then\n                           (if c <= 43487\n                            then (if c <= 43471 then 6 else 1)\n                            else if c <= 43492 then 6 else 1)\n                         else if c <= 43503 then 6 else 1)\n                      else\n                        if c <= 43583\n                        then\n                          (if c <= 43519\n                           then (if c <= 43518 then 6 else 1)\n                           else if c <= 43560 then 6 else 1)\n                        else\n                          if c <= 43587\n                          then (if c <= 43586 then 6 else 1)\n                          else if c <= 43595 then 6 else 1)\n                   else\n                     if c <= 43645\n                     then\n                       (if c <= 43638\n                        then 6\n                        else\n                          if c <= 43641\n                          then 1\n                          else if c <= 43642 then 6 else 1)\n                     else\n                       if c <= 43700\n                       then\n                         (if c <= 43696\n                          then (if c <= 43695 then 6 else 1)\n                          else if c <= 43697 then 6 else 1)\n                       else\n                         if c <= 43704\n                         then (if c <= 43702 then 6 else 1)\n                         else if c <= 43709 then 6 else 1)\n          else\n            if c <= 66377\n            then\n              (if c <= 64325\n               then\n                 (if c <= 43887\n                  then\n                    (if c <= 43784\n                     then\n                       (if c <= 43743\n                        then\n                          (if c <= 43738\n                           then\n                             (if c <= 43713\n                              then (if c <= 43712 then 6 else 1)\n                              else if c <= 43714 then 6 else 1)\n                           else if c <= 43741 then 6 else 1)\n                        else\n                          if c <= 43764\n                          then\n                            (if c <= 43761\n                             then (if c <= 43754 then 6 else 1)\n                             else 6)\n                          else\n                            if c <= 43776\n                            then 1\n                            else if c <= 43782 then 6 else 1)\n                     else\n                       if c <= 43823\n                       then\n                         (if c <= 43807\n                          then\n                            (if c <= 43792\n                             then (if c <= 43790 then 6 else 1)\n                             else if c <= 43798 then 6 else 1)\n                          else\n                            if c <= 43815\n                            then (if c <= 43814 then 6 else 1)\n                            else if c <= 43822 then 6 else 1)\n                       else\n                         if c <= 43880\n                         then\n                           (if c <= 43867\n                            then (if c <= 43866 then 6 else 1)\n                            else 6)\n                         else if c <= 43881 then 6 else 1)\n                  else\n                    if c <= 64274\n                    then\n                      (if c <= 55242\n                       then\n                         (if c <= 44031\n                          then (if c <= 44002 then 6 else 1)\n                          else\n                            if c <= 55215\n                            then (if c <= 55203 then 6 else 1)\n                            else if c <= 55238 then 6 else 1)\n                       else\n                         if c <= 64111\n                         then\n                           (if c <= 63743\n                            then (if c <= 55291 then 6 else 1)\n                            else if c <= 64109 then 6 else 1)\n                         else\n                           if c <= 64255\n                           then (if c <= 64217 then 6 else 1)\n                           else if c <= 64262 then 6 else 1)\n                    else\n                      if c <= 64311\n                      then\n                        (if c <= 64286\n                         then\n                           (if c <= 64284\n                            then (if c <= 64279 then 6 else 1)\n                            else if c <= 64285 then 6 else 1)\n                         else\n                           if c <= 64297\n                           then (if c <= 64296 then 6 else 1)\n                           else if c <= 64310 then 6 else 1)\n                      else\n                        if c <= 64319\n                        then\n                          (if c <= 64317\n                           then (if c <= 64316 then 6 else 1)\n                           else if c <= 64318 then 6 else 1)\n                        else\n                          if c <= 64322\n                          then (if c <= 64321 then 6 else 1)\n                          else if c <= 64324 then 6 else 1)\n               else\n                 if c <= 65481\n                 then\n                   (if c <= 65312\n                    then\n                      (if c <= 65007\n                       then\n                         (if c <= 64847\n                          then\n                            (if c <= 64466\n                             then (if c <= 64433 then 6 else 1)\n                             else if c <= 64829 then 6 else 1)\n                          else\n                            if c <= 64913\n                            then (if c <= 64911 then 6 else 1)\n                            else if c <= 64967 then 6 else 1)\n                       else\n                         if c <= 65141\n                         then\n                           (if c <= 65135\n                            then (if c <= 65019 then 6 else 1)\n                            else if c <= 65140 then 6 else 1)\n                         else\n                           if c <= 65278\n                           then (if c <= 65276 then 6 else 1)\n                           else if c <= 65279 then 2 else 1)\n                    else\n                      if c <= 65437\n                      then\n                        (if c <= 65381\n                         then\n                           (if c <= 65344\n                            then (if c <= 65338 then 6 else 1)\n                            else if c <= 65370 then 6 else 1)\n                         else 6)\n                      else\n                        if c <= 65470\n                        then 6\n                        else\n                          if c <= 65473\n                          then 1\n                          else if c <= 65479 then 6 else 1)\n                 else\n                   if c <= 65615\n                   then\n                     (if c <= 65548\n                      then\n                        (if c <= 65497\n                         then\n                           (if c <= 65489\n                            then (if c <= 65487 then 6 else 1)\n                            else if c <= 65495 then 6 else 1)\n                         else\n                           if c <= 65535\n                           then (if c <= 65500 then 6 else 1)\n                           else if c <= 65547 then 6 else 1)\n                      else\n                        if c <= 65595\n                        then\n                          (if c <= 65575\n                           then (if c <= 65574 then 6 else 1)\n                           else if c <= 65594 then 6 else 1)\n                        else\n                          if c <= 65598\n                          then (if c <= 65597 then 6 else 1)\n                          else if c <= 65613 then 6 else 1)\n                   else\n                     if c <= 66207\n                     then\n                       (if c <= 65855\n                        then\n                          (if c <= 65663\n                           then (if c <= 65629 then 6 else 1)\n                           else if c <= 65786 then 6 else 1)\n                        else\n                          if c <= 66175\n                          then (if c <= 65908 then 6 else 1)\n                          else if c <= 66204 then 6 else 1)\n                     else\n                       if c <= 66348\n                       then\n                         (if c <= 66303\n                          then (if c <= 66256 then 6 else 1)\n                          else if c <= 66335 then 6 else 1)\n                       else 6)\n            else\n              if c <= 67646\n              then\n                (if c <= 66963\n                 then\n                   (if c <= 66717\n                    then\n                      (if c <= 66463\n                       then\n                         (if c <= 66383\n                          then (if c <= 66378 then 6 else 1)\n                          else\n                            if c <= 66431\n                            then (if c <= 66421 then 6 else 1)\n                            else if c <= 66461 then 6 else 1)\n                       else\n                         if c <= 66512\n                         then\n                           (if c <= 66503\n                            then (if c <= 66499 then 6 else 1)\n                            else if c <= 66511 then 6 else 1)\n                         else\n                           if c <= 66559\n                           then (if c <= 66517 then 6 else 1)\n                           else 6)\n                    else\n                      if c <= 66863\n                      then\n                        (if c <= 66775\n                         then\n                           (if c <= 66735\n                            then 1\n                            else if c <= 66771 then 6 else 1)\n                         else\n                           if c <= 66815\n                           then (if c <= 66811 then 6 else 1)\n                           else if c <= 66855 then 6 else 1)\n                      else\n                        if c <= 66939\n                        then\n                          (if c <= 66927\n                           then (if c <= 66915 then 6 else 1)\n                           else if c <= 66938 then 6 else 1)\n                        else\n                          if c <= 66955\n                          then (if c <= 66954 then 6 else 1)\n                          else if c <= 66962 then 6 else 1)\n                 else\n                   if c <= 67455\n                   then\n                     (if c <= 67002\n                      then\n                        (if c <= 66978\n                         then\n                           (if c <= 66966\n                            then (if c <= 66965 then 6 else 1)\n                            else if c <= 66977 then 6 else 1)\n                         else\n                           if c <= 66994\n                           then (if c <= 66993 then 6 else 1)\n                           else if c <= 67001 then 6 else 1)\n                      else\n                        if c <= 67391\n                        then\n                          (if c <= 67071\n                           then (if c <= 67004 then 6 else 1)\n                           else if c <= 67382 then 6 else 1)\n                        else\n                          if c <= 67423\n                          then (if c <= 67413 then 6 else 1)\n                          else if c <= 67431 then 6 else 1)\n                   else\n                     if c <= 67591\n                     then\n                       (if c <= 67505\n                        then\n                          (if c <= 67462\n                           then (if c <= 67461 then 6 else 1)\n                           else if c <= 67504 then 6 else 1)\n                        else\n                          if c <= 67583\n                          then (if c <= 67514 then 6 else 1)\n                          else if c <= 67589 then 6 else 1)\n                     else\n                       if c <= 67638\n                       then\n                         (if c <= 67593\n                          then (if c <= 67592 then 6 else 1)\n                          else if c <= 67637 then 6 else 1)\n                       else\n                         if c <= 67643\n                         then (if c <= 67640 then 6 else 1)\n                         else if c <= 67644 then 6 else 1)\n              else\n                if c <= 68296\n                then\n                  (if c <= 68029\n                   then\n                     (if c <= 67827\n                      then\n                        (if c <= 67711\n                         then\n                           (if c <= 67679\n                            then (if c <= 67669 then 6 else 1)\n                            else if c <= 67702 then 6 else 1)\n                         else\n                           if c <= 67807\n                           then (if c <= 67742 then 6 else 1)\n                           else if c <= 67826 then 6 else 1)\n                      else\n                        if c <= 67871\n                        then\n                          (if c <= 67839\n                           then (if c <= 67829 then 6 else 1)\n                           else if c <= 67861 then 6 else 1)\n                        else\n                          if c <= 67967\n                          then (if c <= 67897 then 6 else 1)\n                          else if c <= 68023 then 6 else 1)\n                   else\n                     if c <= 68120\n                     then\n                       (if c <= 68111\n                        then\n                          (if c <= 68095\n                           then (if c <= 68031 then 6 else 1)\n                           else if c <= 68096 then 6 else 1)\n                        else\n                          if c <= 68116\n                          then (if c <= 68115 then 6 else 1)\n                          else if c <= 68119 then 6 else 1)\n                     else\n                       if c <= 68223\n                       then\n                         (if c <= 68191\n                          then (if c <= 68149 then 6 else 1)\n                          else if c <= 68220 then 6 else 1)\n                       else\n                         if c <= 68287\n                         then (if c <= 68252 then 6 else 1)\n                         else if c <= 68295 then 6 else 1)\n                else\n                  if c <= 68863\n                  then\n                    (if c <= 68479\n                     then\n                       (if c <= 68415\n                        then\n                          (if c <= 68351\n                           then (if c <= 68324 then 6 else 1)\n                           else if c <= 68405 then 6 else 1)\n                        else\n                          if c <= 68447\n                          then (if c <= 68437 then 6 else 1)\n                          else if c <= 68466 then 6 else 1)\n                     else\n                       if c <= 68735\n                       then\n                         (if c <= 68607\n                          then (if c <= 68497 then 6 else 1)\n                          else if c <= 68680 then 6 else 1)\n                       else\n                         if c <= 68799\n                         then (if c <= 68786 then 6 else 1)\n                         else if c <= 68850 then 6 else 1)\n                  else\n                    if c <= 69414\n                    then\n                      (if c <= 69295\n                       then\n                         (if c <= 69247\n                          then (if c <= 68899 then 6 else 1)\n                          else if c <= 69289 then 6 else 1)\n                       else\n                         if c <= 69375\n                         then (if c <= 69297 then 6 else 1)\n                         else if c <= 69404 then 6 else 1)\n                    else\n                      if c <= 69487\n                      then\n                        (if c <= 69423\n                         then (if c <= 69415 then 6 else 1)\n                         else if c <= 69445 then 6 else 1)\n                      else\n                        if c <= 69551\n                        then (if c <= 69505 then 6 else 1)\n                        else if c <= 69572 then 6 else 1)\n       else\n         if c <= 120122\n         then\n           (if c <= 72348\n            then\n              (if c <= 70655\n               then\n                 (if c <= 70162\n                  then\n                    (if c <= 69958\n                     then\n                       (if c <= 69762\n                        then\n                          (if c <= 69744\n                           then\n                             (if c <= 69634\n                              then (if c <= 69622 then 6 else 1)\n                              else if c <= 69687 then 6 else 1)\n                           else\n                             if c <= 69748\n                             then (if c <= 69746 then 6 else 1)\n                             else if c <= 69749 then 6 else 1)\n                        else\n                          if c <= 69890\n                          then\n                            (if c <= 69839\n                             then (if c <= 69807 then 6 else 1)\n                             else if c <= 69864 then 6 else 1)\n                          else\n                            if c <= 69955\n                            then (if c <= 69926 then 6 else 1)\n                            else if c <= 69956 then 6 else 1)\n                     else\n                       if c <= 70080\n                       then\n                         (if c <= 70005\n                          then\n                            (if c <= 69967\n                             then (if c <= 69959 then 6 else 1)\n                             else if c <= 70002 then 6 else 1)\n                          else\n                            if c <= 70018\n                            then (if c <= 70006 then 6 else 1)\n                            else if c <= 70066 then 6 else 1)\n                       else\n                         if c <= 70107\n                         then\n                           (if c <= 70105\n                            then (if c <= 70084 then 6 else 1)\n                            else if c <= 70106 then 6 else 1)\n                         else\n                           if c <= 70143\n                           then (if c <= 70108 then 6 else 1)\n                           else if c <= 70161 then 6 else 1)\n                  else\n                    if c <= 70414\n                    then\n                      (if c <= 70286\n                       then\n                         (if c <= 70279\n                          then\n                            (if c <= 70271\n                             then (if c <= 70187 then 6 else 1)\n                             else if c <= 70278 then 6 else 1)\n                          else\n                            if c <= 70281\n                            then (if c <= 70280 then 6 else 1)\n                            else if c <= 70285 then 6 else 1)\n                       else\n                         if c <= 70319\n                         then\n                           (if c <= 70302\n                            then (if c <= 70301 then 6 else 1)\n                            else if c <= 70312 then 6 else 1)\n                         else\n                           if c <= 70404\n                           then (if c <= 70366 then 6 else 1)\n                           else if c <= 70412 then 6 else 1)\n                    else\n                      if c <= 70452\n                      then\n                        (if c <= 70441\n                         then\n                           (if c <= 70418\n                            then (if c <= 70416 then 6 else 1)\n                            else if c <= 70440 then 6 else 1)\n                         else\n                           if c <= 70449\n                           then (if c <= 70448 then 6 else 1)\n                           else if c <= 70451 then 6 else 1)\n                      else\n                        if c <= 70479\n                        then\n                          (if c <= 70460\n                           then (if c <= 70457 then 6 else 1)\n                           else if c <= 70461 then 6 else 1)\n                        else\n                          if c <= 70492\n                          then (if c <= 70480 then 6 else 1)\n                          else if c <= 70497 then 6 else 1)\n               else\n                 if c <= 71934\n                 then\n                   (if c <= 71167\n                    then\n                      (if c <= 70851\n                       then\n                         (if c <= 70750\n                          then\n                            (if c <= 70726\n                             then (if c <= 70708 then 6 else 1)\n                             else if c <= 70730 then 6 else 1)\n                          else\n                            if c <= 70783\n                            then (if c <= 70753 then 6 else 1)\n                            else if c <= 70831 then 6 else 1)\n                       else\n                         if c <= 71039\n                         then\n                           (if c <= 70854\n                            then (if c <= 70853 then 6 else 1)\n                            else if c <= 70855 then 6 else 1)\n                         else\n                           if c <= 71127\n                           then (if c <= 71086 then 6 else 1)\n                           else if c <= 71131 then 6 else 1)\n                    else\n                      if c <= 71423\n                      then\n                        (if c <= 71295\n                         then\n                           (if c <= 71235\n                            then (if c <= 71215 then 6 else 1)\n                            else if c <= 71236 then 6 else 1)\n                         else\n                           if c <= 71351\n                           then (if c <= 71338 then 6 else 1)\n                           else if c <= 71352 then 6 else 1)\n                      else\n                        if c <= 71679\n                        then\n                          (if c <= 71487\n                           then (if c <= 71450 then 6 else 1)\n                           else if c <= 71494 then 6 else 1)\n                        else\n                          if c <= 71839\n                          then (if c <= 71723 then 6 else 1)\n                          else if c <= 71903 then 6 else 1)\n                 else\n                   if c <= 72105\n                   then\n                     (if c <= 71959\n                      then\n                        (if c <= 71947\n                         then\n                           (if c <= 71944\n                            then (if c <= 71942 then 6 else 1)\n                            else if c <= 71945 then 6 else 1)\n                         else\n                           if c <= 71956\n                           then (if c <= 71955 then 6 else 1)\n                           else if c <= 71958 then 6 else 1)\n                      else\n                        if c <= 72000\n                        then\n                          (if c <= 71998\n                           then (if c <= 71983 then 6 else 1)\n                           else if c <= 71999 then 6 else 1)\n                        else\n                          if c <= 72095\n                          then (if c <= 72001 then 6 else 1)\n                          else if c <= 72103 then 6 else 1)\n                   else\n                     if c <= 72202\n                     then\n                       (if c <= 72162\n                        then\n                          (if c <= 72160\n                           then (if c <= 72144 then 6 else 1)\n                           else if c <= 72161 then 6 else 1)\n                        else\n                          if c <= 72191\n                          then (if c <= 72163 then 6 else 1)\n                          else if c <= 72192 then 6 else 1)\n                     else\n                       if c <= 72271\n                       then\n                         (if c <= 72249\n                          then (if c <= 72242 then 6 else 1)\n                          else if c <= 72250 then 6 else 1)\n                       else\n                         if c <= 72283\n                         then (if c <= 72272 then 6 else 1)\n                         else if c <= 72329 then 6 else 1)\n            else\n              if c <= 94031\n              then\n                (if c <= 73727\n                 then\n                   (if c <= 72970\n                    then\n                      (if c <= 72767\n                       then\n                         (if c <= 72703\n                          then\n                            (if c <= 72367\n                             then (if c <= 72349 then 6 else 1)\n                             else if c <= 72440 then 6 else 1)\n                          else\n                            if c <= 72713\n                            then (if c <= 72712 then 6 else 1)\n                            else if c <= 72750 then 6 else 1)\n                       else\n                         if c <= 72959\n                         then\n                           (if c <= 72817\n                            then (if c <= 72768 then 6 else 1)\n                            else if c <= 72847 then 6 else 1)\n                         else\n                           if c <= 72967\n                           then (if c <= 72966 then 6 else 1)\n                           else if c <= 72969 then 6 else 1)\n                    else\n                      if c <= 73065\n                      then\n                        (if c <= 73055\n                         then\n                           (if c <= 73029\n                            then (if c <= 73008 then 6 else 1)\n                            else if c <= 73030 then 6 else 1)\n                         else\n                           if c <= 73062\n                           then (if c <= 73061 then 6 else 1)\n                           else if c <= 73064 then 6 else 1)\n                      else\n                        if c <= 73439\n                        then\n                          (if c <= 73111\n                           then (if c <= 73097 then 6 else 1)\n                           else if c <= 73112 then 6 else 1)\n                        else\n                          if c <= 73647\n                          then (if c <= 73458 then 6 else 1)\n                          else if c <= 73648 then 6 else 1)\n                 else\n                   if c <= 92783\n                   then\n                     (if c <= 77823\n                      then\n                        (if c <= 74879\n                         then\n                           (if c <= 74751\n                            then (if c <= 74649 then 6 else 1)\n                            else if c <= 74862 then 6 else 1)\n                         else\n                           if c <= 77711\n                           then (if c <= 75075 then 6 else 1)\n                           else if c <= 77808 then 6 else 1)\n                      else\n                        if c <= 92159\n                        then\n                          (if c <= 82943\n                           then (if c <= 78894 then 6 else 1)\n                           else if c <= 83526 then 6 else 1)\n                        else\n                          if c <= 92735\n                          then (if c <= 92728 then 6 else 1)\n                          else if c <= 92766 then 6 else 1)\n                   else\n                     if c <= 93026\n                     then\n                       (if c <= 92927\n                        then\n                          (if c <= 92879\n                           then (if c <= 92862 then 6 else 1)\n                           else if c <= 92909 then 6 else 1)\n                        else\n                          if c <= 92991\n                          then (if c <= 92975 then 6 else 1)\n                          else if c <= 92995 then 6 else 1)\n                     else\n                       if c <= 93759\n                       then\n                         (if c <= 93052\n                          then (if c <= 93047 then 6 else 1)\n                          else if c <= 93071 then 6 else 1)\n                       else\n                         if c <= 93951\n                         then (if c <= 93823 then 6 else 1)\n                         else if c <= 94026 then 6 else 1)\n              else\n                if c <= 113791\n                then\n                  (if c <= 110580\n                   then\n                     (if c <= 94207\n                      then\n                        (if c <= 94175\n                         then\n                           (if c <= 94098\n                            then (if c <= 94032 then 6 else 1)\n                            else if c <= 94111 then 6 else 1)\n                         else\n                           if c <= 94178\n                           then (if c <= 94177 then 6 else 1)\n                           else if c <= 94179 then 6 else 1)\n                      else\n                        if c <= 101631\n                        then\n                          (if c <= 100351\n                           then (if c <= 100343 then 6 else 1)\n                           else if c <= 101589 then 6 else 1)\n                        else\n                          if c <= 110575\n                          then (if c <= 101640 then 6 else 1)\n                          else if c <= 110579 then 6 else 1)\n                   else\n                     if c <= 110947\n                     then\n                       (if c <= 110591\n                        then\n                          (if c <= 110588\n                           then (if c <= 110587 then 6 else 1)\n                           else if c <= 110590 then 6 else 1)\n                        else\n                          if c <= 110927\n                          then (if c <= 110882 then 6 else 1)\n                          else if c <= 110930 then 6 else 1)\n                     else\n                       if c <= 113663\n                       then\n                         (if c <= 110959\n                          then (if c <= 110951 then 6 else 1)\n                          else if c <= 111355 then 6 else 1)\n                       else\n                         if c <= 113775\n                         then (if c <= 113770 then 6 else 1)\n                         else if c <= 113788 then 6 else 1)\n                else\n                  if c <= 119981\n                  then\n                    (if c <= 119965\n                     then\n                       (if c <= 119807\n                        then\n                          (if c <= 113807\n                           then (if c <= 113800 then 6 else 1)\n                           else if c <= 113817 then 6 else 1)\n                        else\n                          if c <= 119893\n                          then (if c <= 119892 then 6 else 1)\n                          else if c <= 119964 then 6 else 1)\n                     else\n                       if c <= 119972\n                       then\n                         (if c <= 119969\n                          then (if c <= 119967 then 6 else 1)\n                          else if c <= 119970 then 6 else 1)\n                       else\n                         if c <= 119976\n                         then (if c <= 119974 then 6 else 1)\n                         else if c <= 119980 then 6 else 1)\n                  else\n                    if c <= 120070\n                    then\n                      (if c <= 119996\n                       then\n                         (if c <= 119994\n                          then (if c <= 119993 then 6 else 1)\n                          else if c <= 119995 then 6 else 1)\n                       else\n                         if c <= 120004\n                         then (if c <= 120003 then 6 else 1)\n                         else if c <= 120069 then 6 else 1)\n                    else\n                      if c <= 120085\n                      then\n                        (if c <= 120076\n                         then (if c <= 120074 then 6 else 1)\n                         else if c <= 120084 then 6 else 1)\n                      else\n                        if c <= 120093\n                        then (if c <= 120092 then 6 else 1)\n                        else if c <= 120121 then 6 else 1)\n         else\n           if c <= 131071\n           then\n             (if c <= 126468\n              then\n                (if c <= 122623\n                 then\n                   (if c <= 120571\n                    then\n                      (if c <= 120145\n                       then\n                         (if c <= 120133\n                          then\n                            (if c <= 120127\n                             then (if c <= 120126 then 6 else 1)\n                             else if c <= 120132 then 6 else 1)\n                          else\n                            if c <= 120137\n                            then (if c <= 120134 then 6 else 1)\n                            else if c <= 120144 then 6 else 1)\n                       else\n                         if c <= 120513\n                         then\n                           (if c <= 120487\n                            then (if c <= 120485 then 6 else 1)\n                            else if c <= 120512 then 6 else 1)\n                         else\n                           if c <= 120539\n                           then (if c <= 120538 then 6 else 1)\n                           else if c <= 120570 then 6 else 1)\n                    else\n                      if c <= 120687\n                      then\n                        (if c <= 120629\n                         then\n                           (if c <= 120597\n                            then (if c <= 120596 then 6 else 1)\n                            else if c <= 120628 then 6 else 1)\n                         else\n                           if c <= 120655\n                           then (if c <= 120654 then 6 else 1)\n                           else if c <= 120686 then 6 else 1)\n                      else\n                        if c <= 120745\n                        then\n                          (if c <= 120713\n                           then (if c <= 120712 then 6 else 1)\n                           else if c <= 120744 then 6 else 1)\n                        else\n                          if c <= 120771\n                          then (if c <= 120770 then 6 else 1)\n                          else if c <= 120779 then 6 else 1)\n                 else\n                   if c <= 124895\n                   then\n                     (if c <= 123190\n                      then\n                        (if c <= 122654\n                         then 6\n                         else\n                           if c <= 123135\n                           then 1\n                           else if c <= 123180 then 6 else 1)\n                      else\n                        if c <= 123535\n                        then\n                          (if c <= 123213\n                           then (if c <= 123197 then 6 else 1)\n                           else if c <= 123214 then 6 else 1)\n                        else\n                          if c <= 123583\n                          then (if c <= 123565 then 6 else 1)\n                          else if c <= 123627 then 6 else 1)\n                   else\n                     if c <= 124927\n                     then\n                       (if c <= 124908\n                        then\n                          (if c <= 124903\n                           then (if c <= 124902 then 6 else 1)\n                           else if c <= 124907 then 6 else 1)\n                        else\n                          if c <= 124911\n                          then (if c <= 124910 then 6 else 1)\n                          else if c <= 124926 then 6 else 1)\n                     else\n                       if c <= 125258\n                       then\n                         (if c <= 125183\n                          then (if c <= 125124 then 6 else 1)\n                          else if c <= 125251 then 6 else 1)\n                       else\n                         if c <= 126463\n                         then (if c <= 125259 then 6 else 1)\n                         else if c <= 126467 then 6 else 1)\n              else\n                if c <= 126552\n                then\n                  (if c <= 126529\n                   then\n                     (if c <= 126504\n                      then\n                        (if c <= 126499\n                         then\n                           (if c <= 126496\n                            then (if c <= 126495 then 6 else 1)\n                            else if c <= 126498 then 6 else 1)\n                         else\n                           if c <= 126502\n                           then (if c <= 126500 then 6 else 1)\n                           else if c <= 126503 then 6 else 1)\n                      else\n                        if c <= 126520\n                        then\n                          (if c <= 126515\n                           then (if c <= 126514 then 6 else 1)\n                           else if c <= 126519 then 6 else 1)\n                        else\n                          if c <= 126522\n                          then (if c <= 126521 then 6 else 1)\n                          else if c <= 126523 then 6 else 1)\n                   else\n                     if c <= 126540\n                     then\n                       (if c <= 126536\n                        then\n                          (if c <= 126534\n                           then (if c <= 126530 then 6 else 1)\n                           else if c <= 126535 then 6 else 1)\n                        else\n                          if c <= 126538\n                          then (if c <= 126537 then 6 else 1)\n                          else if c <= 126539 then 6 else 1)\n                     else\n                       if c <= 126547\n                       then\n                         (if c <= 126544\n                          then (if c <= 126543 then 6 else 1)\n                          else if c <= 126546 then 6 else 1)\n                       else\n                         if c <= 126550\n                         then (if c <= 126548 then 6 else 1)\n                         else if c <= 126551 then 6 else 1)\n                else\n                  if c <= 126579\n                  then\n                    (if c <= 126560\n                     then\n                       (if c <= 126556\n                        then\n                          (if c <= 126554\n                           then (if c <= 126553 then 6 else 1)\n                           else if c <= 126555 then 6 else 1)\n                        else\n                          if c <= 126558\n                          then (if c <= 126557 then 6 else 1)\n                          else if c <= 126559 then 6 else 1)\n                     else\n                       if c <= 126566\n                       then\n                         (if c <= 126563\n                          then (if c <= 126562 then 6 else 1)\n                          else if c <= 126564 then 6 else 1)\n                       else\n                         if c <= 126571\n                         then (if c <= 126570 then 6 else 1)\n                         else if c <= 126578 then 6 else 1)\n                  else\n                    if c <= 126602\n                    then\n                      (if c <= 126589\n                       then\n                         (if c <= 126584\n                          then (if c <= 126583 then 6 else 1)\n                          else if c <= 126588 then 6 else 1)\n                       else\n                         if c <= 126591\n                         then (if c <= 126590 then 6 else 1)\n                         else if c <= 126601 then 6 else 1)\n                    else\n                      if c <= 126628\n                      then\n                        (if c <= 126624\n                         then (if c <= 126619 then 6 else 1)\n                         else if c <= 126627 then 6 else 1)\n                      else\n                        if c <= 126634\n                        then (if c <= 126633 then 6 else 1)\n                        else if c <= 126651 then 6 else 1)\n           else\n             if c <= 183983\n             then\n               (if c <= 177983\n                then\n                  (if c <= 173823\n                   then (if c <= 173791 then 6 else 1)\n                   else if c <= 177976 then 6 else 1)\n                else\n                  if c <= 178207\n                  then (if c <= 178205 then 6 else 1)\n                  else if c <= 183969 then 6 else 1)\n             else if c <= 191456 then 6 else 1)\n    else (-1)\n[@@@warning \"-39\"]\nopen Token\nopen Lex_env\nmodule Sedlexing = Flow_sedlexing\nlet lexeme = Sedlexing.Utf8.lexeme\nlet lexeme_to_buffer = Sedlexing.Utf8.lexeme_to_buffer\nlet lexeme_to_buffer2 = Sedlexing.Utf8.lexeme_to_buffer2\nlet sub_lexeme = Sedlexing.Utf8.sub_lexeme\nlet is_whitespace =\n  function\n  | 0x0009 | 0x000B | 0x000C | 0x0020 | 0x00A0 | 0xfeff | 0x1680 | 0x2000\n    | 0x2001 | 0x2002 | 0x2003 | 0x2004 | 0x2005 | 0x2006 | 0x2007 | 0x2008\n    | 0x2009 | 0x200a | 0x202f | 0x205f | 0x3000 -> true\n  | _ -> false\nlet rec loop_id_continues lexbuf =\n  let rec __sedlex_state_0 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_1 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> 1\n         | 1 -> 2\n         | 2 -> 0\n         | 3 -> __sedlex_state_4 lexbuf\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_4 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 2;\n         (match __sedlex_partition_2 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_5 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_5 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_3 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> __sedlex_state_6 lexbuf\n         | 1 -> __sedlex_state_10 lexbuf\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_6 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_4 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> __sedlex_state_7 lexbuf\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_7 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_4 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> __sedlex_state_8 lexbuf\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_8 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_4 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> 0\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_10 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_4 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> __sedlex_state_11 lexbuf\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_11 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_5 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> __sedlex_state_11 lexbuf\n         | 1 -> 0\n         | _ -> Sedlexing.backtrack lexbuf) in\n  Sedlexing.start lexbuf;\n  (match __sedlex_state_0 lexbuf with\n   | 0 -> loop_id_continues lexbuf\n   | 1 -> true\n   | 2 ->\n       let s = Sedlexing.current_code_point lexbuf in\n       if Js_id.is_valid_unicode_id s\n       then loop_id_continues lexbuf\n       else (Sedlexing.backoff lexbuf 1; false)\n   | _ -> assert false)\nlet rec loop_jsx_id_continues lexbuf =\n  (let rec __sedlex_state_0 =\n     function\n     | lexbuf ->\n         (match __sedlex_partition_6 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> 1\n          | 1 -> 2\n          | 2 -> 0\n          | 3 -> __sedlex_state_4 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf)\n   and __sedlex_state_4 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 2;\n          (match __sedlex_partition_2 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_5 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_5 =\n     function\n     | lexbuf ->\n         (match __sedlex_partition_3 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_6 lexbuf\n          | 1 -> __sedlex_state_10 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf)\n   and __sedlex_state_6 =\n     function\n     | lexbuf ->\n         (match __sedlex_partition_4 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_7 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf)\n   and __sedlex_state_7 =\n     function\n     | lexbuf ->\n         (match __sedlex_partition_4 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_8 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf)\n   and __sedlex_state_8 =\n     function\n     | lexbuf ->\n         (match __sedlex_partition_4 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> 0\n          | _ -> Sedlexing.backtrack lexbuf)\n   and __sedlex_state_10 =\n     function\n     | lexbuf ->\n         (match __sedlex_partition_4 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_11 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf)\n   and __sedlex_state_11 =\n     function\n     | lexbuf ->\n         (match __sedlex_partition_5 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_11 lexbuf\n          | 1 -> 0\n          | _ -> Sedlexing.backtrack lexbuf) in\n   Sedlexing.start lexbuf;\n   (match __sedlex_state_0 lexbuf with\n    | 0 -> loop_jsx_id_continues lexbuf\n    | 1 -> ()\n    | 2 ->\n        let s = Sedlexing.current_code_point lexbuf in\n        if Js_id.is_valid_unicode_id s\n        then loop_jsx_id_continues lexbuf\n        else Sedlexing.backoff lexbuf 1\n    | _ -> assert false) : unit)\nlet pos_at_offset env offset =\n  {\n    Loc.line = (Lex_env.line env);\n    column = (offset - (Lex_env.bol_offset env))\n  }\nlet loc_of_offsets env start_offset end_offset =\n  {\n    Loc.source = (Lex_env.source env);\n    start = (pos_at_offset env start_offset);\n    _end = (pos_at_offset env end_offset)\n  }\nlet start_pos_of_lexbuf env (lexbuf : Sedlexing.lexbuf) =\n  let start_offset = Sedlexing.lexeme_start lexbuf in\n  pos_at_offset env start_offset\nlet end_pos_of_lexbuf env (lexbuf : Sedlexing.lexbuf) =\n  let end_offset = Sedlexing.lexeme_end lexbuf in\n  pos_at_offset env end_offset\nlet loc_of_lexbuf env (lexbuf : Sedlexing.lexbuf) =\n  let start_offset = Sedlexing.lexeme_start lexbuf in\n  let end_offset = Sedlexing.lexeme_end lexbuf in\n  loc_of_offsets env start_offset end_offset\nlet loc_of_token env lex_token =\n  match lex_token with\n  | T_IDENTIFIER { loc;_} | T_JSX_IDENTIFIER { loc;_} | T_STRING\n    (loc, _, _, _) -> loc\n  | T_JSX_TEXT (loc, _, _) -> loc\n  | T_TEMPLATE_PART (loc, _, _) -> loc\n  | T_REGEXP (loc, _, _) -> loc\n  | _ -> loc_of_lexbuf env env.lex_lb\nlet lex_error (env : Lex_env.t) loc err =\n  (let lex_errors_acc = (loc, err) :: ((env.lex_state).lex_errors_acc) in\n   { env with lex_state = { lex_errors_acc } } : Lex_env.t)\nlet unexpected_error (env : Lex_env.t) (loc : Loc.t) value =\n  lex_error env loc (Parse_error.Unexpected (quote_token_value value))\nlet unexpected_error_w_suggest (env : Lex_env.t) (loc : Loc.t) value suggest\n  =\n  lex_error env loc\n    (Parse_error.UnexpectedTokenWithSuggestion (value, suggest))\nlet illegal (env : Lex_env.t) (loc : Loc.t) =\n  lex_error env loc (Parse_error.Unexpected \"token ILLEGAL\")\nlet new_line env lexbuf =\n  let offset = Sedlexing.lexeme_end lexbuf in\n  let lex_bol = { line = ((Lex_env.line env) + 1); offset } in\n  { env with Lex_env.lex_bol = lex_bol }\nlet bigint_strip_n raw =\n  let size = String.length raw in\n  let str =\n    if (size != 0) && ((raw.[size - 1]) == 'n')\n    then String.sub raw 0 (size - 1)\n    else raw in\n  str\nlet mk_comment (env : Lex_env.t) (start : Loc.position) (_end : Loc.position)\n  (buf : Buffer.t) (multiline : bool) =\n  (let open Flow_ast.Comment in\n     let loc = { Loc.source = (Lex_env.source env); start; _end } in\n     let text = Buffer.contents buf in\n     let kind = if multiline then Block else Line in\n     let on_newline =\n       let open Loc in\n         ((env.lex_last_loc)._end).Loc.line < (loc.start).Loc.line in\n     let c = { kind; text; on_newline } in (loc, c) : Loc.t\n                                                        Flow_ast.Comment.t)\nlet split_number_type =\n  let rec strip_whitespace i len lexeme =\n    if is_whitespace (lexeme.(i))\n    then ((strip_whitespace)[@tailcall ]) (i + 1) len lexeme\n    else Sedlexing.string_of_utf8 (Array.sub lexeme i (len - i)) in\n  fun (lexeme : int array) ->\n    if (lexeme.(0)) = (Char.code '-')\n    then\n      let num = strip_whitespace 1 (Array.length lexeme) lexeme in\n      let raw = Sedlexing.string_of_utf8 lexeme in (true, num, raw)\n    else (let raw = Sedlexing.string_of_utf8 lexeme in (false, raw, raw))\nlet mk_num_singleton number_type (lexeme : int array) =\n  let (neg, num, raw) = split_number_type lexeme in\n  let value =\n    match number_type with\n    | LEGACY_OCTAL ->\n        (try Int64.to_float (Int64.of_string (\"0o\" ^ num))\n         with | Failure _ -> failwith (\"Invalid legacy octal \" ^ num))\n    | BINARY | OCTAL ->\n        (try Int64.to_float (Int64.of_string num)\n         with | Failure _ -> failwith (\"Invalid binary/octal \" ^ num))\n    | LEGACY_NON_OCTAL | NORMAL ->\n        (try float_of_string num\n         with | Failure _ -> failwith (\"Invalid number \" ^ num)) in\n  let value = if neg then -. value else value in\n  T_NUMBER_SINGLETON_TYPE { kind = number_type; value; raw }\nlet mk_bignum_singleton kind lexeme =\n  let (neg, num, raw) = split_number_type lexeme in\n  let postraw = bigint_strip_n num in\n  let value =\n    (Int64.of_string_opt postraw) |>\n      (Option.map (fun value -> if neg then Int64.neg value else value)) in\n  T_BIGINT_SINGLETON_TYPE { kind; value; raw }\nlet assert_valid_unicode_in_identifier env loc code =\n  if Js_id.is_valid_unicode_id code\n  then env\n  else lex_error env loc Parse_error.IllegalUnicodeEscape\nlet decode_identifier =\n  let loc_and_sub_lexeme env offset lexbuf trim_start trim_end =\n    let start_offset = offset + (Sedlexing.lexeme_start lexbuf) in\n    let end_offset = offset + (Sedlexing.lexeme_end lexbuf) in\n    let loc = loc_of_offsets env start_offset end_offset in\n    (loc,\n      (sub_lexeme lexbuf trim_start\n         (((Sedlexing.lexeme_length lexbuf) - trim_start) - trim_end))) in\n  let rec id_char env offset buf lexbuf =\n    let rec __sedlex_state_0 =\n      function\n      | lexbuf ->\n          (match __sedlex_partition_7 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> 2\n           | 1 -> __sedlex_state_2 lexbuf\n           | 2 -> __sedlex_state_3 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf)\n    and __sedlex_state_2 =\n      function\n      | lexbuf ->\n          (Sedlexing.mark lexbuf 3;\n           (match __sedlex_partition_8 (Sedlexing.__private__next_int lexbuf)\n            with\n            | 0 -> __sedlex_state_2 lexbuf\n            | _ -> Sedlexing.backtrack lexbuf))\n    and __sedlex_state_3 =\n      function\n      | lexbuf ->\n          (Sedlexing.mark lexbuf 3;\n           (match __sedlex_partition_2 (Sedlexing.__private__next_int lexbuf)\n            with\n            | 0 -> __sedlex_state_4 lexbuf\n            | _ -> Sedlexing.backtrack lexbuf))\n    and __sedlex_state_4 =\n      function\n      | lexbuf ->\n          (match __sedlex_partition_3 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_5 lexbuf\n           | 1 -> __sedlex_state_9 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf)\n    and __sedlex_state_5 =\n      function\n      | lexbuf ->\n          (match __sedlex_partition_4 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_6 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf)\n    and __sedlex_state_6 =\n      function\n      | lexbuf ->\n          (match __sedlex_partition_4 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_7 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf)\n    and __sedlex_state_7 =\n      function\n      | lexbuf ->\n          (match __sedlex_partition_4 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> 0\n           | _ -> Sedlexing.backtrack lexbuf)\n    and __sedlex_state_9 =\n      function\n      | lexbuf ->\n          (match __sedlex_partition_4 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_10 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf)\n    and __sedlex_state_10 =\n      function\n      | lexbuf ->\n          (match __sedlex_partition_5 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_10 lexbuf\n           | 1 -> 1\n           | _ -> Sedlexing.backtrack lexbuf) in\n    Sedlexing.start lexbuf;\n    (match __sedlex_state_0 lexbuf with\n     | 0 ->\n         let (loc, hex) = loc_and_sub_lexeme env offset lexbuf 2 0 in\n         let code = int_of_string (\"0x\" ^ hex) in\n         let env =\n           if not (Uchar.is_valid code)\n           then lex_error env loc Parse_error.IllegalUnicodeEscape\n           else assert_valid_unicode_in_identifier env loc code in\n         (Wtf8.add_wtf_8 buf code; id_char env offset buf lexbuf)\n     | 1 ->\n         let (loc, hex) = loc_and_sub_lexeme env offset lexbuf 3 1 in\n         let code = int_of_string (\"0x\" ^ hex) in\n         let env = assert_valid_unicode_in_identifier env loc code in\n         (Wtf8.add_wtf_8 buf code; id_char env offset buf lexbuf)\n     | 2 -> (env, (Buffer.contents buf))\n     | 3 -> (lexeme_to_buffer lexbuf buf; id_char env offset buf lexbuf)\n     | _ -> failwith \"unreachable id_char\") in\n  fun env ->\n    fun raw ->\n      let offset = Sedlexing.lexeme_start env.lex_lb in\n      let lexbuf = Sedlexing.from_int_array raw in\n      let buf = Buffer.create (Array.length raw) in\n      id_char env offset buf lexbuf\nlet recover env lexbuf ~f  =\n  let env = illegal env (loc_of_lexbuf env lexbuf) in\n  Sedlexing.rollback lexbuf; f env lexbuf\ntype jsx_text_mode =\n  | JSX_SINGLE_QUOTED_TEXT \n  | JSX_DOUBLE_QUOTED_TEXT \n  | JSX_CHILD_TEXT \ntype result =\n  | Token of Lex_env.t * Token.t \n  | Comment of Lex_env.t * Loc.t Flow_ast.Comment.t \n  | Continue of Lex_env.t \nlet rec comment env buf lexbuf =\n  let rec __sedlex_state_0 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_9 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> __sedlex_state_1 lexbuf\n         | 1 -> 0\n         | 2 -> __sedlex_state_3 lexbuf\n         | 3 -> __sedlex_state_5 lexbuf\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_1 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 3;\n         (match __sedlex_partition_10 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_1 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_3 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 0;\n         (match __sedlex_partition_11 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> 0\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_5 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 3;\n         (match __sedlex_partition_12 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_6 lexbuf\n          | 1 -> 1\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_6 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_13 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> 2\n         | _ -> Sedlexing.backtrack lexbuf) in\n  Sedlexing.start lexbuf;\n  (match __sedlex_state_0 lexbuf with\n   | 0 ->\n       let env = new_line env lexbuf in\n       (lexeme_to_buffer lexbuf buf; comment env buf lexbuf)\n   | 1 ->\n       let env =\n         if is_in_comment_syntax env\n         then\n           let loc = loc_of_lexbuf env lexbuf in\n           unexpected_error_w_suggest env loc \"*/\" \"*-/\"\n         else env in\n       (env, (end_pos_of_lexbuf env lexbuf))\n   | 2 ->\n       if is_in_comment_syntax env\n       then (env, (end_pos_of_lexbuf env lexbuf))\n       else (Buffer.add_string buf \"*-/\"; comment env buf lexbuf)\n   | 3 -> (lexeme_to_buffer lexbuf buf; comment env buf lexbuf)\n   | _ ->\n       let env = illegal env (loc_of_lexbuf env lexbuf) in\n       (env, (end_pos_of_lexbuf env lexbuf)))\nlet rec line_comment env buf lexbuf =\n  let rec __sedlex_state_0 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_14 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> 0\n         | 1 -> __sedlex_state_2 lexbuf\n         | 2 -> 1\n         | 3 -> __sedlex_state_4 lexbuf\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_2 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 2;\n         (match __sedlex_partition_15 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_2 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_4 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 1;\n         (match __sedlex_partition_11 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> 1\n          | _ -> Sedlexing.backtrack lexbuf)) in\n  Sedlexing.start lexbuf;\n  (match __sedlex_state_0 lexbuf with\n   | 0 -> (env, (end_pos_of_lexbuf env lexbuf))\n   | 1 ->\n       let { Loc.line = line; column } = end_pos_of_lexbuf env lexbuf in\n       let env = new_line env lexbuf in\n       let len = Sedlexing.lexeme_length lexbuf in\n       let end_pos = { Loc.line = line; column = (column - len) } in\n       (env, end_pos)\n   | 2 -> (lexeme_to_buffer lexbuf buf; line_comment env buf lexbuf)\n   | _ -> failwith \"unreachable line_comment\")\nlet string_escape env lexbuf =\n  let rec __sedlex_state_0 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_16 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> 0\n         | 1 -> 16\n         | 2 -> 15\n         | 3 -> __sedlex_state_4 lexbuf\n         | 4 -> __sedlex_state_6 lexbuf\n         | 5 -> __sedlex_state_9 lexbuf\n         | 6 -> 0\n         | 7 -> 5\n         | 8 -> 6\n         | 9 -> 7\n         | 10 -> 8\n         | 11 -> 9\n         | 12 -> __sedlex_state_16 lexbuf\n         | 13 -> 10\n         | 14 -> __sedlex_state_25 lexbuf\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_4 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 15;\n         (match __sedlex_partition_11 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> 15\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_6 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 4;\n         (match __sedlex_partition_17 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_7 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_7 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 3;\n         (match __sedlex_partition_17 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> 2\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_9 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 11;\n         (match __sedlex_partition_17 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_7 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_16 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 14;\n         (match __sedlex_partition_3 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_17 lexbuf\n          | 1 -> __sedlex_state_21 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_17 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_4 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> __sedlex_state_18 lexbuf\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_18 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_4 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> __sedlex_state_19 lexbuf\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_19 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_4 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> 12\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_21 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_4 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> __sedlex_state_22 lexbuf\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_22 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_5 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> __sedlex_state_22 lexbuf\n         | 1 -> 13\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_25 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 14;\n         (match __sedlex_partition_4 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_26 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_26 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_4 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> 1\n         | _ -> Sedlexing.backtrack lexbuf) in\n  Sedlexing.start lexbuf;\n  (match __sedlex_state_0 lexbuf with\n   | 0 ->\n       let str = lexeme lexbuf in\n       let codes = Sedlexing.lexeme lexbuf in (env, str, codes, false)\n   | 1 ->\n       let str = lexeme lexbuf in\n       let code = int_of_string (\"0\" ^ str) in (env, str, [|code|], false)\n   | 2 ->\n       let str = lexeme lexbuf in\n       let code = int_of_string (\"0o\" ^ str) in\n       if code < 256\n       then (env, str, [|code|], true)\n       else\n         (let remainder = code land 7 in\n          let code = code lsr 3 in\n          (env, str, [|code;((Char.code '0') + remainder)|], true))\n   | 3 ->\n       let str = lexeme lexbuf in\n       let code = int_of_string (\"0o\" ^ str) in (env, str, [|code|], true)\n   | 4 -> (env, \"0\", [|0x0|], false)\n   | 5 -> (env, \"b\", [|0x8|], false)\n   | 6 -> (env, \"f\", [|0xC|], false)\n   | 7 -> (env, \"n\", [|0xA|], false)\n   | 8 -> (env, \"r\", [|0xD|], false)\n   | 9 -> (env, \"t\", [|0x9|], false)\n   | 10 -> (env, \"v\", [|0xB|], false)\n   | 11 ->\n       let str = lexeme lexbuf in\n       let code = int_of_string (\"0o\" ^ str) in (env, str, [|code|], true)\n   | 12 ->\n       let str = lexeme lexbuf in\n       let hex = String.sub str 1 ((String.length str) - 1) in\n       let code = int_of_string (\"0x\" ^ hex) in (env, str, [|code|], false)\n   | 13 ->\n       let str = lexeme lexbuf in\n       let hex = String.sub str 2 ((String.length str) - 3) in\n       let code = int_of_string (\"0x\" ^ hex) in\n       let env =\n         if code > 0x10FFFF\n         then illegal env (loc_of_lexbuf env lexbuf)\n         else env in\n       (env, str, [|code|], false)\n   | 14 ->\n       let str = lexeme lexbuf in\n       let codes = Sedlexing.lexeme lexbuf in\n       let env = illegal env (loc_of_lexbuf env lexbuf) in\n       (env, str, codes, false)\n   | 15 ->\n       let str = lexeme lexbuf in\n       let env = new_line env lexbuf in (env, str, [||], false)\n   | 16 ->\n       let str = lexeme lexbuf in\n       let codes = Sedlexing.lexeme lexbuf in (env, str, codes, false)\n   | _ -> failwith \"unreachable string_escape\")\nlet rec string_quote env q buf raw octal lexbuf =\n  let rec __sedlex_state_0 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_18 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> 3\n         | 1 -> __sedlex_state_2 lexbuf\n         | 2 -> 2\n         | 3 -> 0\n         | 4 -> 1\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_2 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 4;\n         (match __sedlex_partition_19 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_2 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf)) in\n  Sedlexing.start lexbuf;\n  (match __sedlex_state_0 lexbuf with\n   | 0 ->\n       let q' = lexeme lexbuf in\n       (Buffer.add_string raw q';\n        if q = q'\n        then (env, (end_pos_of_lexbuf env lexbuf), octal)\n        else\n          (Buffer.add_string buf q'; string_quote env q buf raw octal lexbuf))\n   | 1 ->\n       (Buffer.add_string raw \"\\\\\";\n        (let (env, str, codes, octal') = string_escape env lexbuf in\n         let octal = octal' || octal in\n         Buffer.add_string raw str;\n         Array.iter (Wtf8.add_wtf_8 buf) codes;\n         string_quote env q buf raw octal lexbuf))\n   | 2 ->\n       let x = lexeme lexbuf in\n       (Buffer.add_string raw x;\n        (let env = illegal env (loc_of_lexbuf env lexbuf) in\n         let env = new_line env lexbuf in\n         Buffer.add_string buf x;\n         (env, (end_pos_of_lexbuf env lexbuf), octal)))\n   | 3 ->\n       let x = lexeme lexbuf in\n       (Buffer.add_string raw x;\n        (let env = illegal env (loc_of_lexbuf env lexbuf) in\n         Buffer.add_string buf x;\n         (env, (end_pos_of_lexbuf env lexbuf), octal)))\n   | 4 ->\n       (lexeme_to_buffer2 lexbuf raw buf;\n        string_quote env q buf raw octal lexbuf)\n   | _ -> failwith \"unreachable string_quote\")\nlet rec template_part env cooked raw literal lexbuf =\n  let rec __sedlex_state_0 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_20 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> 0\n         | 1 -> __sedlex_state_2 lexbuf\n         | 2 -> 5\n         | 3 -> __sedlex_state_4 lexbuf\n         | 4 -> __sedlex_state_6 lexbuf\n         | 5 -> 3\n         | 6 -> 1\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_2 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 6;\n         (match __sedlex_partition_21 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_2 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_4 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 5;\n         (match __sedlex_partition_11 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> 4\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_6 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 6;\n         (match __sedlex_partition_22 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> 2\n          | _ -> Sedlexing.backtrack lexbuf)) in\n  Sedlexing.start lexbuf;\n  (match __sedlex_state_0 lexbuf with\n   | 0 -> let env = illegal env (loc_of_lexbuf env lexbuf) in (env, true)\n   | 1 -> (Buffer.add_char literal '`'; (env, true))\n   | 2 -> (Buffer.add_string literal \"${\"; (env, false))\n   | 3 ->\n       (Buffer.add_char raw '\\\\';\n        Buffer.add_char literal '\\\\';\n        (let (env, str, codes, _) = string_escape env lexbuf in\n         Buffer.add_string raw str;\n         Buffer.add_string literal str;\n         Array.iter (Wtf8.add_wtf_8 cooked) codes;\n         template_part env cooked raw literal lexbuf))\n   | 4 ->\n       (Buffer.add_string raw \"\\r\\n\";\n        Buffer.add_string literal \"\\r\\n\";\n        Buffer.add_string cooked \"\\n\";\n        (let env = new_line env lexbuf in\n         template_part env cooked raw literal lexbuf))\n   | 5 ->\n       let lf = lexeme lexbuf in\n       (Buffer.add_string raw lf;\n        Buffer.add_string literal lf;\n        Buffer.add_char cooked '\\n';\n        (let env = new_line env lexbuf in\n         template_part env cooked raw literal lexbuf))\n   | 6 ->\n       let c = lexeme lexbuf in\n       (Buffer.add_string raw c;\n        Buffer.add_string literal c;\n        Buffer.add_string cooked c;\n        template_part env cooked raw literal lexbuf)\n   | _ -> failwith \"unreachable template_part\")\nlet token (env : Lex_env.t) lexbuf =\n  (let rec __sedlex_state_0 =\n     function\n     | lexbuf ->\n         (match __sedlex_partition_50 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> 98\n          | 1 -> 99\n          | 2 -> __sedlex_state_3 lexbuf\n          | 3 -> 0\n          | 4 -> __sedlex_state_6 lexbuf\n          | 5 -> __sedlex_state_8 lexbuf\n          | 6 -> 7\n          | 7 -> __sedlex_state_12 lexbuf\n          | 8 -> 97\n          | 9 -> __sedlex_state_15 lexbuf\n          | 10 -> __sedlex_state_17 lexbuf\n          | 11 -> 38\n          | 12 -> 39\n          | 13 -> __sedlex_state_23 lexbuf\n          | 14 -> __sedlex_state_28 lexbuf\n          | 15 -> 45\n          | 16 -> __sedlex_state_32 lexbuf\n          | 17 -> __sedlex_state_35 lexbuf\n          | 18 -> __sedlex_state_58 lexbuf\n          | 19 -> __sedlex_state_76 lexbuf\n          | 20 -> __sedlex_state_129 lexbuf\n          | 21 -> 46\n          | 22 -> 44\n          | 23 -> __sedlex_state_135 lexbuf\n          | 24 -> __sedlex_state_139 lexbuf\n          | 25 -> __sedlex_state_143 lexbuf\n          | 26 -> __sedlex_state_149 lexbuf\n          | 27 -> __sedlex_state_154 lexbuf\n          | 28 -> 40\n          | 29 -> __sedlex_state_177 lexbuf\n          | 30 -> 41\n          | 31 -> __sedlex_state_186 lexbuf\n          | 32 -> 8\n          | 33 -> 36\n          | 34 -> __sedlex_state_190 lexbuf\n          | 35 -> 37\n          | 36 -> 89\n          | _ -> Sedlexing.backtrack lexbuf)\n   and __sedlex_state_3 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 1;\n          (match __sedlex_partition_51 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_4 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_4 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 1;\n          (match __sedlex_partition_51 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_4 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_6 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 0;\n          (match __sedlex_partition_11 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> 0\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_8 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 88;\n          (match __sedlex_partition_52 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_9 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_9 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 58;\n          (match __sedlex_partition_52 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> 54\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_12 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 95;\n          (match __sedlex_partition_53 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> 6\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_15 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 84;\n          (match __sedlex_partition_52 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> 71\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_17 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 86;\n          (match __sedlex_partition_54 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_18 lexbuf\n           | 1 -> 72\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_18 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 51;\n          (match __sedlex_partition_52 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> 76\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_23 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 82;\n          (match __sedlex_partition_55 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_24 lexbuf\n           | 1 -> 4\n           | 2 -> 69\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_24 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 83;\n          (match __sedlex_partition_52 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> 70\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_28 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 80;\n          (match __sedlex_partition_56 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> 59\n           | 1 -> 67\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_32 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 81;\n          (match __sedlex_partition_57 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> 60\n           | 1 -> 68\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_35 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 43;\n          (match __sedlex_partition_47 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_36 lexbuf\n           | 1 -> __sedlex_state_38 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_36 =\n     function\n     | lexbuf ->\n         (match __sedlex_partition_58 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> 42\n          | _ -> Sedlexing.backtrack lexbuf)\n   and __sedlex_state_38 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 34;\n          (match __sedlex_partition_59 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_39 lexbuf\n           | 1 -> __sedlex_state_38 lexbuf\n           | 2 -> __sedlex_state_40 lexbuf\n           | 3 -> __sedlex_state_54 lexbuf\n           | 4 -> __sedlex_state_56 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_39 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 33;\n          (match __sedlex_partition_60 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_39 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_40 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 33;\n          (match __sedlex_partition_61 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_39 lexbuf\n           | 1 -> __sedlex_state_41 lexbuf\n           | 2 -> __sedlex_state_49 lexbuf\n           | 3 -> __sedlex_state_53 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_41 =\n     function\n     | lexbuf ->\n         (match __sedlex_partition_40 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_42 lexbuf\n          | 1 -> __sedlex_state_46 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf)\n   and __sedlex_state_42 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 28;\n          (match __sedlex_partition_62 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_43 lexbuf\n           | 1 -> __sedlex_state_42 lexbuf\n           | 2 -> __sedlex_state_44 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_43 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 27;\n          (match __sedlex_partition_60 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_43 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_44 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 26;\n          (match __sedlex_partition_63 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_45 lexbuf\n           | 1 -> __sedlex_state_43 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_45 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 25;\n          (match __sedlex_partition_60 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_45 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_46 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 28;\n          (match __sedlex_partition_64 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_43 lexbuf\n           | 1 -> __sedlex_state_46 lexbuf\n           | 2 -> __sedlex_state_47 lexbuf\n           | 3 -> __sedlex_state_44 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_47 =\n     function\n     | lexbuf ->\n         (match __sedlex_partition_33 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_48 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf)\n   and __sedlex_state_48 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 28;\n          (match __sedlex_partition_64 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_43 lexbuf\n           | 1 -> __sedlex_state_48 lexbuf\n           | 2 -> __sedlex_state_47 lexbuf\n           | 3 -> __sedlex_state_44 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_49 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 28;\n          (match __sedlex_partition_62 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_50 lexbuf\n           | 1 -> __sedlex_state_49 lexbuf\n           | 2 -> __sedlex_state_51 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_50 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 27;\n          (match __sedlex_partition_60 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_50 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_51 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 26;\n          (match __sedlex_partition_63 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_52 lexbuf\n           | 1 -> __sedlex_state_50 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_52 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 25;\n          (match __sedlex_partition_60 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_52 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_53 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 28;\n          (match __sedlex_partition_64 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_50 lexbuf\n           | 1 -> __sedlex_state_53 lexbuf\n           | 2 -> __sedlex_state_47 lexbuf\n           | 3 -> __sedlex_state_51 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_54 =\n     function\n     | lexbuf ->\n         (match __sedlex_partition_33 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_55 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf)\n   and __sedlex_state_55 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 34;\n          (match __sedlex_partition_59 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_39 lexbuf\n           | 1 -> __sedlex_state_55 lexbuf\n           | 2 -> __sedlex_state_40 lexbuf\n           | 3 -> __sedlex_state_54 lexbuf\n           | 4 -> __sedlex_state_56 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_56 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 31;\n          (match __sedlex_partition_63 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_57 lexbuf\n           | 1 -> __sedlex_state_39 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_57 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 29;\n          (match __sedlex_partition_60 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_57 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_58 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 93;\n          (match __sedlex_partition_55 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_59 lexbuf\n           | 1 -> 5\n           | 2 -> 92\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_59 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 2;\n          (match __sedlex_partition_65 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_60 lexbuf\n           | 1 -> __sedlex_state_61 lexbuf\n           | 2 -> __sedlex_state_63 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_60 =\n     function\n     | lexbuf ->\n         (match __sedlex_partition_65 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_60 lexbuf\n          | 1 -> __sedlex_state_61 lexbuf\n          | 2 -> __sedlex_state_63 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf)\n   and __sedlex_state_61 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 3;\n          (match __sedlex_partition_66 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> 3\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_63 =\n     function\n     | lexbuf ->\n         (match __sedlex_partition_67 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_64 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf)\n   and __sedlex_state_64 =\n     function\n     | lexbuf ->\n         (match __sedlex_partition_68 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_65 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf)\n   and __sedlex_state_65 =\n     function\n     | lexbuf ->\n         (match __sedlex_partition_69 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_66 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf)\n   and __sedlex_state_66 =\n     function\n     | lexbuf ->\n         (match __sedlex_partition_70 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_67 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf)\n   and __sedlex_state_67 =\n     function\n     | lexbuf ->\n         (match __sedlex_partition_71 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_68 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf)\n   and __sedlex_state_68 =\n     function\n     | lexbuf ->\n         (match __sedlex_partition_72 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_69 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf)\n   and __sedlex_state_69 =\n     function\n     | lexbuf ->\n         (match __sedlex_partition_73 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_70 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf)\n   and __sedlex_state_70 =\n     function\n     | lexbuf ->\n         (match __sedlex_partition_67 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_71 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf)\n   and __sedlex_state_71 =\n     function\n     | lexbuf ->\n         (match __sedlex_partition_2 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_72 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf)\n   and __sedlex_state_72 =\n     function\n     | lexbuf ->\n         (match __sedlex_partition_74 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_73 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf)\n   and __sedlex_state_73 =\n     function\n     | lexbuf ->\n         (match __sedlex_partition_75 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> 3\n          | _ -> Sedlexing.backtrack lexbuf)\n   and __sedlex_state_76 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 34;\n          (match __sedlex_partition_76 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_39 lexbuf\n           | 1 -> __sedlex_state_77 lexbuf\n           | 2 -> __sedlex_state_81 lexbuf\n           | 3 -> __sedlex_state_93 lexbuf\n           | 4 -> __sedlex_state_97 lexbuf\n           | 5 -> __sedlex_state_40 lexbuf\n           | 6 -> __sedlex_state_107 lexbuf\n           | 7 -> __sedlex_state_117 lexbuf\n           | 8 -> __sedlex_state_127 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_77 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 34;\n          (match __sedlex_partition_77 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_39 lexbuf\n           | 1 -> __sedlex_state_78 lexbuf\n           | 2 -> __sedlex_state_40 lexbuf\n           | 3 -> __sedlex_state_56 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_78 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 34;\n          (match __sedlex_partition_59 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_39 lexbuf\n           | 1 -> __sedlex_state_78 lexbuf\n           | 2 -> __sedlex_state_40 lexbuf\n           | 3 -> __sedlex_state_79 lexbuf\n           | 4 -> __sedlex_state_56 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_79 =\n     function\n     | lexbuf ->\n         (match __sedlex_partition_33 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_80 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf)\n   and __sedlex_state_80 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 34;\n          (match __sedlex_partition_59 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_39 lexbuf\n           | 1 -> __sedlex_state_80 lexbuf\n           | 2 -> __sedlex_state_40 lexbuf\n           | 3 -> __sedlex_state_79 lexbuf\n           | 4 -> __sedlex_state_56 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_81 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 20;\n          (match __sedlex_partition_78 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_82 lexbuf\n           | 1 -> __sedlex_state_83 lexbuf\n           | 2 -> __sedlex_state_81 lexbuf\n           | 3 -> __sedlex_state_87 lexbuf\n           | 4 -> __sedlex_state_91 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_82 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 19;\n          (match __sedlex_partition_60 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_82 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_83 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 34;\n          (match __sedlex_partition_62 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_39 lexbuf\n           | 1 -> __sedlex_state_84 lexbuf\n           | 2 -> __sedlex_state_56 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_84 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 34;\n          (match __sedlex_partition_64 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_39 lexbuf\n           | 1 -> __sedlex_state_84 lexbuf\n           | 2 -> __sedlex_state_85 lexbuf\n           | 3 -> __sedlex_state_56 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_85 =\n     function\n     | lexbuf ->\n         (match __sedlex_partition_33 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_86 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf)\n   and __sedlex_state_86 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 34;\n          (match __sedlex_partition_64 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_39 lexbuf\n           | 1 -> __sedlex_state_86 lexbuf\n           | 2 -> __sedlex_state_85 lexbuf\n           | 3 -> __sedlex_state_56 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_87 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 18;\n          (match __sedlex_partition_79 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_88 lexbuf\n           | 1 -> __sedlex_state_83 lexbuf\n           | 2 -> __sedlex_state_87 lexbuf\n           | 3 -> __sedlex_state_89 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_88 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 17;\n          (match __sedlex_partition_60 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_88 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_89 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 17;\n          (match __sedlex_partition_63 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_90 lexbuf\n           | 1 -> __sedlex_state_88 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_90 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 17;\n          (match __sedlex_partition_60 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_90 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_91 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 19;\n          (match __sedlex_partition_63 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_92 lexbuf\n           | 1 -> __sedlex_state_82 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_92 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 19;\n          (match __sedlex_partition_60 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_92 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_93 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 18;\n          (match __sedlex_partition_79 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_94 lexbuf\n           | 1 -> __sedlex_state_83 lexbuf\n           | 2 -> __sedlex_state_93 lexbuf\n           | 3 -> __sedlex_state_95 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_94 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 17;\n          (match __sedlex_partition_60 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_94 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_95 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 17;\n          (match __sedlex_partition_63 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_96 lexbuf\n           | 1 -> __sedlex_state_94 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_96 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 17;\n          (match __sedlex_partition_60 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_96 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_97 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 33;\n          (match __sedlex_partition_80 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_39 lexbuf\n           | 1 -> __sedlex_state_98 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_98 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 12;\n          (match __sedlex_partition_81 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_99 lexbuf\n           | 1 -> __sedlex_state_98 lexbuf\n           | 2 -> __sedlex_state_100 lexbuf\n           | 3 -> __sedlex_state_105 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_99 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 11;\n          (match __sedlex_partition_60 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_99 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_100 =\n     function\n     | lexbuf ->\n         (match __sedlex_partition_26 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_101 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf)\n   and __sedlex_state_101 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 12;\n          (match __sedlex_partition_81 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_102 lexbuf\n           | 1 -> __sedlex_state_101 lexbuf\n           | 2 -> __sedlex_state_100 lexbuf\n           | 3 -> __sedlex_state_103 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_102 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 11;\n          (match __sedlex_partition_60 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_102 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_103 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 10;\n          (match __sedlex_partition_63 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_104 lexbuf\n           | 1 -> __sedlex_state_102 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_104 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 9;\n          (match __sedlex_partition_60 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_104 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_105 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 10;\n          (match __sedlex_partition_63 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_106 lexbuf\n           | 1 -> __sedlex_state_99 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_106 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 9;\n          (match __sedlex_partition_60 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_106 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_107 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 33;\n          (match __sedlex_partition_82 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_39 lexbuf\n           | 1 -> __sedlex_state_108 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_108 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 16;\n          (match __sedlex_partition_83 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_109 lexbuf\n           | 1 -> __sedlex_state_108 lexbuf\n           | 2 -> __sedlex_state_110 lexbuf\n           | 3 -> __sedlex_state_115 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_109 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 15;\n          (match __sedlex_partition_60 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_109 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_110 =\n     function\n     | lexbuf ->\n         (match __sedlex_partition_17 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_111 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf)\n   and __sedlex_state_111 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 16;\n          (match __sedlex_partition_83 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_112 lexbuf\n           | 1 -> __sedlex_state_111 lexbuf\n           | 2 -> __sedlex_state_110 lexbuf\n           | 3 -> __sedlex_state_113 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_112 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 15;\n          (match __sedlex_partition_60 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_112 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_113 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 14;\n          (match __sedlex_partition_63 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_114 lexbuf\n           | 1 -> __sedlex_state_112 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_114 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 13;\n          (match __sedlex_partition_60 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_114 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_115 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 14;\n          (match __sedlex_partition_63 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_116 lexbuf\n           | 1 -> __sedlex_state_109 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_116 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 13;\n          (match __sedlex_partition_60 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_116 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_117 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 33;\n          (match __sedlex_partition_84 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_39 lexbuf\n           | 1 -> __sedlex_state_118 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_118 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 24;\n          (match __sedlex_partition_85 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_119 lexbuf\n           | 1 -> __sedlex_state_118 lexbuf\n           | 2 -> __sedlex_state_120 lexbuf\n           | 3 -> __sedlex_state_125 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_119 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 23;\n          (match __sedlex_partition_60 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_119 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_120 =\n     function\n     | lexbuf ->\n         (match __sedlex_partition_4 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_121 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf)\n   and __sedlex_state_121 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 24;\n          (match __sedlex_partition_85 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_122 lexbuf\n           | 1 -> __sedlex_state_121 lexbuf\n           | 2 -> __sedlex_state_120 lexbuf\n           | 3 -> __sedlex_state_123 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_122 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 23;\n          (match __sedlex_partition_60 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_122 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_123 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 22;\n          (match __sedlex_partition_63 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_124 lexbuf\n           | 1 -> __sedlex_state_122 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_124 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 21;\n          (match __sedlex_partition_60 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_124 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_125 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 22;\n          (match __sedlex_partition_63 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_126 lexbuf\n           | 1 -> __sedlex_state_119 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_126 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 21;\n          (match __sedlex_partition_60 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_126 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_127 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 32;\n          (match __sedlex_partition_63 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_128 lexbuf\n           | 1 -> __sedlex_state_39 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_128 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 30;\n          (match __sedlex_partition_60 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_128 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_129 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 34;\n          (match __sedlex_partition_86 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_39 lexbuf\n           | 1 -> __sedlex_state_77 lexbuf\n           | 2 -> __sedlex_state_130 lexbuf\n           | 3 -> __sedlex_state_40 lexbuf\n           | 4 -> __sedlex_state_131 lexbuf\n           | 5 -> __sedlex_state_127 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_130 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 34;\n          (match __sedlex_partition_86 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_39 lexbuf\n           | 1 -> __sedlex_state_77 lexbuf\n           | 2 -> __sedlex_state_130 lexbuf\n           | 3 -> __sedlex_state_40 lexbuf\n           | 4 -> __sedlex_state_131 lexbuf\n           | 5 -> __sedlex_state_127 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_131 =\n     function\n     | lexbuf ->\n         (match __sedlex_partition_33 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_132 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf)\n   and __sedlex_state_132 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 34;\n          (match __sedlex_partition_87 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_39 lexbuf\n           | 1 -> __sedlex_state_83 lexbuf\n           | 2 -> __sedlex_state_132 lexbuf\n           | 3 -> __sedlex_state_131 lexbuf\n           | 4 -> __sedlex_state_127 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_135 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 78;\n          (match __sedlex_partition_88 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_136 lexbuf\n           | 1 -> 55\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_136 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 62;\n          (match __sedlex_partition_52 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> 61\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_139 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 90;\n          (match __sedlex_partition_89 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_140 lexbuf\n           | 1 -> 91\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_140 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 57;\n          (match __sedlex_partition_52 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> 53\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_143 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 79;\n          (match __sedlex_partition_89 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> 56\n           | 1 -> __sedlex_state_145 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_145 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 66;\n          (match __sedlex_partition_89 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> 63\n           | 1 -> __sedlex_state_147 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_147 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 65;\n          (match __sedlex_partition_52 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> 64\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_149 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 50;\n          (match __sedlex_partition_90 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_150 lexbuf\n           | 1 -> __sedlex_state_152 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_150 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 48;\n          (match __sedlex_partition_33 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> 47\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_152 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 49;\n          (match __sedlex_partition_52 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> 75\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_154 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 94;\n          (match __sedlex_partition_91 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_155 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_155 =\n     function\n     | lexbuf ->\n         (match __sedlex_partition_92 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_156 lexbuf\n          | 1 -> __sedlex_state_169 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf)\n   and __sedlex_state_156 =\n     function\n     | lexbuf ->\n         (match __sedlex_partition_93 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_157 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf)\n   and __sedlex_state_157 =\n     function\n     | lexbuf ->\n         (match __sedlex_partition_94 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_158 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf)\n   and __sedlex_state_158 =\n     function\n     | lexbuf ->\n         (match __sedlex_partition_72 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_159 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf)\n   and __sedlex_state_159 =\n     function\n     | lexbuf ->\n         (match __sedlex_partition_73 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_160 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf)\n   and __sedlex_state_160 =\n     function\n     | lexbuf ->\n         (match __sedlex_partition_95 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_161 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf)\n   and __sedlex_state_161 =\n     function\n     | lexbuf ->\n         (match __sedlex_partition_96 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_162 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf)\n   and __sedlex_state_162 =\n     function\n     | lexbuf ->\n         (match __sedlex_partition_75 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_163 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf)\n   and __sedlex_state_163 =\n     function\n     | lexbuf ->\n         (match __sedlex_partition_97 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_164 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf)\n   and __sedlex_state_164 =\n     function\n     | lexbuf ->\n         (match __sedlex_partition_98 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_165 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf)\n   and __sedlex_state_165 =\n     function\n     | lexbuf ->\n         (match __sedlex_partition_96 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_166 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf)\n   and __sedlex_state_166 =\n     function\n     | lexbuf ->\n         (match __sedlex_partition_68 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_167 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf)\n   and __sedlex_state_167 =\n     function\n     | lexbuf ->\n         (match __sedlex_partition_97 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> 35\n          | _ -> Sedlexing.backtrack lexbuf)\n   and __sedlex_state_169 =\n     function\n     | lexbuf ->\n         (match __sedlex_partition_96 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_170 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf)\n   and __sedlex_state_170 =\n     function\n     | lexbuf ->\n         (match __sedlex_partition_75 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_171 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf)\n   and __sedlex_state_171 =\n     function\n     | lexbuf ->\n         (match __sedlex_partition_97 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_172 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf)\n   and __sedlex_state_172 =\n     function\n     | lexbuf ->\n         (match __sedlex_partition_98 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_173 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf)\n   and __sedlex_state_173 =\n     function\n     | lexbuf ->\n         (match __sedlex_partition_96 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_174 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf)\n   and __sedlex_state_174 =\n     function\n     | lexbuf ->\n         (match __sedlex_partition_68 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_175 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf)\n   and __sedlex_state_175 =\n     function\n     | lexbuf ->\n         (match __sedlex_partition_97 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> 35\n          | _ -> Sedlexing.backtrack lexbuf)\n   and __sedlex_state_177 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 96;\n          (match __sedlex_partition_2 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> __sedlex_state_178 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_178 =\n     function\n     | lexbuf ->\n         (match __sedlex_partition_3 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_179 lexbuf\n          | 1 -> __sedlex_state_183 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf)\n   and __sedlex_state_179 =\n     function\n     | lexbuf ->\n         (match __sedlex_partition_4 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_180 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf)\n   and __sedlex_state_180 =\n     function\n     | lexbuf ->\n         (match __sedlex_partition_4 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_181 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf)\n   and __sedlex_state_181 =\n     function\n     | lexbuf ->\n         (match __sedlex_partition_4 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> 97\n          | _ -> Sedlexing.backtrack lexbuf)\n   and __sedlex_state_183 =\n     function\n     | lexbuf ->\n         (match __sedlex_partition_4 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_184 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf)\n   and __sedlex_state_184 =\n     function\n     | lexbuf ->\n         (match __sedlex_partition_5 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_184 lexbuf\n          | 1 -> 97\n          | _ -> Sedlexing.backtrack lexbuf)\n   and __sedlex_state_186 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 87;\n          (match __sedlex_partition_52 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> 74\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_190 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 85;\n          (match __sedlex_partition_99 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> 73\n           | 1 -> __sedlex_state_192 lexbuf\n           | _ -> Sedlexing.backtrack lexbuf))\n   and __sedlex_state_192 =\n     function\n     | lexbuf ->\n         (Sedlexing.mark lexbuf 52;\n          (match __sedlex_partition_52 (Sedlexing.__private__next_int lexbuf)\n           with\n           | 0 -> 77\n           | _ -> Sedlexing.backtrack lexbuf)) in\n   Sedlexing.start lexbuf;\n   (match __sedlex_state_0 lexbuf with\n    | 0 -> let env = new_line env lexbuf in Continue env\n    | 1 -> Continue env\n    | 2 ->\n        let start_pos = start_pos_of_lexbuf env lexbuf in\n        let buf = Buffer.create 127 in\n        let (env, end_pos) = comment env buf lexbuf in\n        Comment (env, (mk_comment env start_pos end_pos buf true))\n    | 3 ->\n        let pattern = lexeme lexbuf in\n        if not (is_comment_syntax_enabled env)\n        then\n          let start_pos = start_pos_of_lexbuf env lexbuf in\n          let buf = Buffer.create 127 in\n          (Buffer.add_string buf\n             (String.sub pattern 2 ((String.length pattern) - 2));\n           (let (env, end_pos) = comment env buf lexbuf in\n            Comment (env, (mk_comment env start_pos end_pos buf true))))\n        else\n          (let env =\n             if is_in_comment_syntax env\n             then\n               let loc = loc_of_lexbuf env lexbuf in\n               unexpected_error env loc pattern\n             else env in\n           let env = in_comment_syntax true env in\n           let len = Sedlexing.lexeme_length lexbuf in\n           if\n             ((Sedlexing.Utf8.sub_lexeme lexbuf (len - 1) 1) = \":\") &&\n               ((Sedlexing.Utf8.sub_lexeme lexbuf (len - 2) 1) <> \":\")\n           then Token (env, T_COLON)\n           else Continue env)\n    | 4 ->\n        if is_in_comment_syntax env\n        then let env = in_comment_syntax false env in Continue env\n        else\n          (Sedlexing.rollback lexbuf;\n           (let __sedlex_state_0 =\n              function\n              | lexbuf ->\n                  (match __sedlex_partition_23\n                           (Sedlexing.__private__next_int lexbuf)\n                   with\n                   | 0 -> 0\n                   | _ -> Sedlexing.backtrack lexbuf) in\n            Sedlexing.start lexbuf;\n            (match __sedlex_state_0 lexbuf with\n             | 0 -> Token (env, T_MULT)\n             | _ -> failwith \"expected *\")))\n    | 5 ->\n        let start_pos = start_pos_of_lexbuf env lexbuf in\n        let buf = Buffer.create 127 in\n        let (env, end_pos) = line_comment env buf lexbuf in\n        Comment (env, (mk_comment env start_pos end_pos buf false))\n    | 6 ->\n        if (Sedlexing.lexeme_start lexbuf) = 0\n        then\n          let (env, _) = line_comment env (Buffer.create 127) lexbuf in\n          Continue env\n        else Token (env, (T_ERROR \"#!\"))\n    | 7 ->\n        let quote = lexeme lexbuf in\n        let start = start_pos_of_lexbuf env lexbuf in\n        let buf = Buffer.create 127 in\n        let raw = Buffer.create 127 in\n        (Buffer.add_string raw quote;\n         (let octal = false in\n          let (env, _end, octal) =\n            string_quote env quote buf raw octal lexbuf in\n          let loc = { Loc.source = (Lex_env.source env); start; _end } in\n          Token\n            (env,\n              (T_STRING\n                 (loc, (Buffer.contents buf), (Buffer.contents raw), octal)))))\n    | 8 ->\n        let cooked = Buffer.create 127 in\n        let raw = Buffer.create 127 in\n        let literal = Buffer.create 127 in\n        (lexeme_to_buffer lexbuf literal;\n         (let start = start_pos_of_lexbuf env lexbuf in\n          let (env, is_tail) = template_part env cooked raw literal lexbuf in\n          let _end = end_pos_of_lexbuf env lexbuf in\n          let loc = { Loc.source = (Lex_env.source env); start; _end } in\n          Token\n            (env,\n              (T_TEMPLATE_PART\n                 (loc,\n                   {\n                     cooked = (Buffer.contents cooked);\n                     raw = (Buffer.contents raw);\n                     literal = (Buffer.contents literal)\n                   }, is_tail)))))\n    | 9 ->\n        recover env lexbuf\n          ~f:(fun env ->\n                fun lexbuf ->\n                  let rec __sedlex_state_0 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_24\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_1 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_1 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_25\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_2 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_2 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_26\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_3 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_3 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_27\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_3 lexbuf\n                         | 1 -> __sedlex_state_4 lexbuf\n                         | 2 -> 0\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_4 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_26\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_5 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_5 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_27\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_5 lexbuf\n                         | 1 -> __sedlex_state_4 lexbuf\n                         | 2 -> 0\n                         | _ -> Sedlexing.backtrack lexbuf) in\n                  Sedlexing.start lexbuf;\n                  (match __sedlex_state_0 lexbuf with\n                   | 0 ->\n                       Token\n                         (env,\n                           (T_BIGINT\n                              { kind = BIG_BINARY; raw = (lexeme lexbuf) }))\n                   | _ -> failwith \"unreachable token bigint\"))\n    | 10 ->\n        Token (env, (T_BIGINT { kind = BIG_BINARY; raw = (lexeme lexbuf) }))\n    | 11 ->\n        recover env lexbuf\n          ~f:(fun env ->\n                fun lexbuf ->\n                  let rec __sedlex_state_0 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_24\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_1 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_1 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_25\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_2 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_2 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_26\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_3 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_3 =\n                    function\n                    | lexbuf ->\n                        (Sedlexing.mark lexbuf 0;\n                         (match __sedlex_partition_28\n                                  (Sedlexing.__private__next_int lexbuf)\n                          with\n                          | 0 -> __sedlex_state_3 lexbuf\n                          | 1 -> __sedlex_state_4 lexbuf\n                          | _ -> Sedlexing.backtrack lexbuf))\n                  and __sedlex_state_4 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_26\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_5 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_5 =\n                    function\n                    | lexbuf ->\n                        (Sedlexing.mark lexbuf 0;\n                         (match __sedlex_partition_28\n                                  (Sedlexing.__private__next_int lexbuf)\n                          with\n                          | 0 -> __sedlex_state_5 lexbuf\n                          | 1 -> __sedlex_state_4 lexbuf\n                          | _ -> Sedlexing.backtrack lexbuf)) in\n                  Sedlexing.start lexbuf;\n                  (match __sedlex_state_0 lexbuf with\n                   | 0 ->\n                       Token\n                         (env,\n                           (T_NUMBER { kind = BINARY; raw = (lexeme lexbuf) }))\n                   | _ -> failwith \"unreachable token bignumber\"))\n    | 12 -> Token (env, (T_NUMBER { kind = BINARY; raw = (lexeme lexbuf) }))\n    | 13 ->\n        recover env lexbuf\n          ~f:(fun env ->\n                fun lexbuf ->\n                  let rec __sedlex_state_0 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_24\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_1 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_1 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_29\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_2 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_2 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_17\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_3 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_3 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_30\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_3 lexbuf\n                         | 1 -> __sedlex_state_4 lexbuf\n                         | 2 -> 0\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_4 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_17\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_5 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_5 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_30\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_5 lexbuf\n                         | 1 -> __sedlex_state_4 lexbuf\n                         | 2 -> 0\n                         | _ -> Sedlexing.backtrack lexbuf) in\n                  Sedlexing.start lexbuf;\n                  (match __sedlex_state_0 lexbuf with\n                   | 0 ->\n                       Token\n                         (env,\n                           (T_BIGINT\n                              { kind = BIG_OCTAL; raw = (lexeme lexbuf) }))\n                   | _ -> failwith \"unreachable token octbigint\"))\n    | 14 ->\n        Token (env, (T_BIGINT { kind = BIG_OCTAL; raw = (lexeme lexbuf) }))\n    | 15 ->\n        recover env lexbuf\n          ~f:(fun env ->\n                fun lexbuf ->\n                  let rec __sedlex_state_0 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_24\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_1 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_1 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_29\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_2 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_2 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_17\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_3 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_3 =\n                    function\n                    | lexbuf ->\n                        (Sedlexing.mark lexbuf 0;\n                         (match __sedlex_partition_31\n                                  (Sedlexing.__private__next_int lexbuf)\n                          with\n                          | 0 -> __sedlex_state_3 lexbuf\n                          | 1 -> __sedlex_state_4 lexbuf\n                          | _ -> Sedlexing.backtrack lexbuf))\n                  and __sedlex_state_4 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_17\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_5 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_5 =\n                    function\n                    | lexbuf ->\n                        (Sedlexing.mark lexbuf 0;\n                         (match __sedlex_partition_31\n                                  (Sedlexing.__private__next_int lexbuf)\n                          with\n                          | 0 -> __sedlex_state_5 lexbuf\n                          | 1 -> __sedlex_state_4 lexbuf\n                          | _ -> Sedlexing.backtrack lexbuf)) in\n                  Sedlexing.start lexbuf;\n                  (match __sedlex_state_0 lexbuf with\n                   | 0 ->\n                       Token\n                         (env,\n                           (T_NUMBER { kind = OCTAL; raw = (lexeme lexbuf) }))\n                   | _ -> failwith \"unreachable token octnumber\"))\n    | 16 -> Token (env, (T_NUMBER { kind = OCTAL; raw = (lexeme lexbuf) }))\n    | 17 ->\n        recover env lexbuf\n          ~f:(fun env ->\n                fun lexbuf ->\n                  let rec __sedlex_state_0 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_24\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_1 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_1 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_32\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_1 lexbuf\n                         | 1 -> __sedlex_state_2 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_2 =\n                    function\n                    | lexbuf ->\n                        (Sedlexing.mark lexbuf 0;\n                         (match __sedlex_partition_33\n                                  (Sedlexing.__private__next_int lexbuf)\n                          with\n                          | 0 -> __sedlex_state_2 lexbuf\n                          | _ -> Sedlexing.backtrack lexbuf)) in\n                  Sedlexing.start lexbuf;\n                  (match __sedlex_state_0 lexbuf with\n                   | 0 ->\n                       Token\n                         (env,\n                           (T_NUMBER\n                              {\n                                kind = LEGACY_NON_OCTAL;\n                                raw = (lexeme lexbuf)\n                              }))\n                   | _ -> failwith \"unreachable token legacynonoctnumber\"))\n    | 18 ->\n        Token\n          (env,\n            (T_NUMBER { kind = LEGACY_NON_OCTAL; raw = (lexeme lexbuf) }))\n    | 19 ->\n        recover env lexbuf\n          ~f:(fun env ->\n                fun lexbuf ->\n                  let rec __sedlex_state_0 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_24\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_1 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_1 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_17\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_2 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_2 =\n                    function\n                    | lexbuf ->\n                        (Sedlexing.mark lexbuf 0;\n                         (match __sedlex_partition_17\n                                  (Sedlexing.__private__next_int lexbuf)\n                          with\n                          | 0 -> __sedlex_state_2 lexbuf\n                          | _ -> Sedlexing.backtrack lexbuf)) in\n                  Sedlexing.start lexbuf;\n                  (match __sedlex_state_0 lexbuf with\n                   | 0 ->\n                       Token\n                         (env,\n                           (T_NUMBER\n                              { kind = LEGACY_OCTAL; raw = (lexeme lexbuf) }))\n                   | _ -> failwith \"unreachable token legacyoctnumber\"))\n    | 20 ->\n        Token\n          (env, (T_NUMBER { kind = LEGACY_OCTAL; raw = (lexeme lexbuf) }))\n    | 21 ->\n        recover env lexbuf\n          ~f:(fun env ->\n                fun lexbuf ->\n                  let rec __sedlex_state_0 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_24\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_1 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_1 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_34\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_2 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_2 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_4\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_3 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_3 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_35\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_3 lexbuf\n                         | 1 -> __sedlex_state_4 lexbuf\n                         | 2 -> 0\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_4 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_4\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_5 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_5 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_35\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_5 lexbuf\n                         | 1 -> __sedlex_state_4 lexbuf\n                         | 2 -> 0\n                         | _ -> Sedlexing.backtrack lexbuf) in\n                  Sedlexing.start lexbuf;\n                  (match __sedlex_state_0 lexbuf with\n                   | 0 ->\n                       Token\n                         (env,\n                           (T_BIGINT\n                              { kind = BIG_NORMAL; raw = (lexeme lexbuf) }))\n                   | _ -> failwith \"unreachable token hexbigint\"))\n    | 22 ->\n        Token (env, (T_BIGINT { kind = BIG_NORMAL; raw = (lexeme lexbuf) }))\n    | 23 ->\n        recover env lexbuf\n          ~f:(fun env ->\n                fun lexbuf ->\n                  let rec __sedlex_state_0 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_24\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_1 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_1 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_34\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_2 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_2 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_4\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_3 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_3 =\n                    function\n                    | lexbuf ->\n                        (Sedlexing.mark lexbuf 0;\n                         (match __sedlex_partition_36\n                                  (Sedlexing.__private__next_int lexbuf)\n                          with\n                          | 0 -> __sedlex_state_3 lexbuf\n                          | 1 -> __sedlex_state_4 lexbuf\n                          | _ -> Sedlexing.backtrack lexbuf))\n                  and __sedlex_state_4 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_4\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_5 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_5 =\n                    function\n                    | lexbuf ->\n                        (Sedlexing.mark lexbuf 0;\n                         (match __sedlex_partition_36\n                                  (Sedlexing.__private__next_int lexbuf)\n                          with\n                          | 0 -> __sedlex_state_5 lexbuf\n                          | 1 -> __sedlex_state_4 lexbuf\n                          | _ -> Sedlexing.backtrack lexbuf)) in\n                  Sedlexing.start lexbuf;\n                  (match __sedlex_state_0 lexbuf with\n                   | 0 ->\n                       Token\n                         (env,\n                           (T_NUMBER { kind = NORMAL; raw = (lexeme lexbuf) }))\n                   | _ -> failwith \"unreachable token hexnumber\"))\n    | 24 -> Token (env, (T_NUMBER { kind = NORMAL; raw = (lexeme lexbuf) }))\n    | 25 ->\n        recover env lexbuf\n          ~f:(fun env ->\n                fun lexbuf ->\n                  let rec __sedlex_state_0 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_37\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_1 lexbuf\n                         | 1 -> __sedlex_state_12 lexbuf\n                         | 2 -> __sedlex_state_17 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_1 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_33\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_2 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_2 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_38\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_2 lexbuf\n                         | 1 -> __sedlex_state_3 lexbuf\n                         | 2 -> __sedlex_state_10 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_3 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_39\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_4 lexbuf\n                         | 1 -> __sedlex_state_5 lexbuf\n                         | 2 -> __sedlex_state_7 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_4 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_40\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_5 lexbuf\n                         | 1 -> __sedlex_state_7 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_5 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_41\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_5 lexbuf\n                         | 1 -> 0\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_7 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_42\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_7 lexbuf\n                         | 1 -> __sedlex_state_8 lexbuf\n                         | 2 -> 0\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_8 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_33\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_9 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_9 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_42\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_9 lexbuf\n                         | 1 -> __sedlex_state_8 lexbuf\n                         | 2 -> 0\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_10 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_33\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_11 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_11 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_38\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_11 lexbuf\n                         | 1 -> __sedlex_state_3 lexbuf\n                         | 2 -> __sedlex_state_10 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_12 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_43\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_13 lexbuf\n                         | 1 -> __sedlex_state_3 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_13 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_44\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_14 lexbuf\n                         | 1 -> __sedlex_state_3 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_14 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_38\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_14 lexbuf\n                         | 1 -> __sedlex_state_3 lexbuf\n                         | 2 -> __sedlex_state_15 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_15 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_33\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_16 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_16 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_38\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_16 lexbuf\n                         | 1 -> __sedlex_state_3 lexbuf\n                         | 2 -> __sedlex_state_15 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_17 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_45\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_13 lexbuf\n                         | 1 -> __sedlex_state_17 lexbuf\n                         | 2 -> __sedlex_state_3 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf) in\n                  Sedlexing.start lexbuf;\n                  (match __sedlex_state_0 lexbuf with\n                   | 0 ->\n                       let loc = loc_of_lexbuf env lexbuf in\n                       let env =\n                         lex_error env loc Parse_error.InvalidSciBigInt in\n                       Token\n                         (env,\n                           (T_BIGINT\n                              { kind = BIG_NORMAL; raw = (lexeme lexbuf) }))\n                   | _ -> failwith \"unreachable token scibigint\"))\n    | 26 ->\n        let loc = loc_of_lexbuf env lexbuf in\n        let env = lex_error env loc Parse_error.InvalidSciBigInt in\n        Token (env, (T_BIGINT { kind = BIG_NORMAL; raw = (lexeme lexbuf) }))\n    | 27 ->\n        recover env lexbuf\n          ~f:(fun env ->\n                fun lexbuf ->\n                  let rec __sedlex_state_0 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_37\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_1 lexbuf\n                         | 1 -> __sedlex_state_11 lexbuf\n                         | 2 -> __sedlex_state_16 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_1 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_33\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_2 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_2 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_38\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_2 lexbuf\n                         | 1 -> __sedlex_state_3 lexbuf\n                         | 2 -> __sedlex_state_9 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_3 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_39\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_4 lexbuf\n                         | 1 -> __sedlex_state_5 lexbuf\n                         | 2 -> __sedlex_state_6 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_4 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_40\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_5 lexbuf\n                         | 1 -> __sedlex_state_6 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_5 =\n                    function\n                    | lexbuf ->\n                        (Sedlexing.mark lexbuf 0;\n                         (match __sedlex_partition_33\n                                  (Sedlexing.__private__next_int lexbuf)\n                          with\n                          | 0 -> __sedlex_state_5 lexbuf\n                          | _ -> Sedlexing.backtrack lexbuf))\n                  and __sedlex_state_6 =\n                    function\n                    | lexbuf ->\n                        (Sedlexing.mark lexbuf 0;\n                         (match __sedlex_partition_46\n                                  (Sedlexing.__private__next_int lexbuf)\n                          with\n                          | 0 -> __sedlex_state_6 lexbuf\n                          | 1 -> __sedlex_state_7 lexbuf\n                          | _ -> Sedlexing.backtrack lexbuf))\n                  and __sedlex_state_7 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_33\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_8 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_8 =\n                    function\n                    | lexbuf ->\n                        (Sedlexing.mark lexbuf 0;\n                         (match __sedlex_partition_46\n                                  (Sedlexing.__private__next_int lexbuf)\n                          with\n                          | 0 -> __sedlex_state_8 lexbuf\n                          | 1 -> __sedlex_state_7 lexbuf\n                          | _ -> Sedlexing.backtrack lexbuf))\n                  and __sedlex_state_9 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_33\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_10 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_10 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_38\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_10 lexbuf\n                         | 1 -> __sedlex_state_3 lexbuf\n                         | 2 -> __sedlex_state_9 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_11 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_43\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_12 lexbuf\n                         | 1 -> __sedlex_state_3 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_12 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_44\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_13 lexbuf\n                         | 1 -> __sedlex_state_3 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_13 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_38\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_13 lexbuf\n                         | 1 -> __sedlex_state_3 lexbuf\n                         | 2 -> __sedlex_state_14 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_14 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_33\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_15 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_15 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_38\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_15 lexbuf\n                         | 1 -> __sedlex_state_3 lexbuf\n                         | 2 -> __sedlex_state_14 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_16 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_45\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_12 lexbuf\n                         | 1 -> __sedlex_state_16 lexbuf\n                         | 2 -> __sedlex_state_3 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf) in\n                  Sedlexing.start lexbuf;\n                  (match __sedlex_state_0 lexbuf with\n                   | 0 ->\n                       Token\n                         (env,\n                           (T_NUMBER { kind = NORMAL; raw = (lexeme lexbuf) }))\n                   | _ -> failwith \"unreachable token scinumber\"))\n    | 28 -> Token (env, (T_NUMBER { kind = NORMAL; raw = (lexeme lexbuf) }))\n    | 29 ->\n        recover env lexbuf\n          ~f:(fun env ->\n                fun lexbuf ->\n                  let rec __sedlex_state_0 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_37\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_1 lexbuf\n                         | 1 -> __sedlex_state_6 lexbuf\n                         | 2 -> __sedlex_state_8 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_1 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_33\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_2 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_2 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_42\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_2 lexbuf\n                         | 1 -> __sedlex_state_3 lexbuf\n                         | 2 -> 0\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_3 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_33\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_4 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_4 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_42\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_4 lexbuf\n                         | 1 -> __sedlex_state_3 lexbuf\n                         | 2 -> 0\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_6 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_47\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_7 lexbuf\n                         | 1 -> __sedlex_state_6 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_7 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_41\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_2 lexbuf\n                         | 1 -> 0\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_8 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_48\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_7 lexbuf\n                         | 1 -> __sedlex_state_8 lexbuf\n                         | 2 -> __sedlex_state_9 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_9 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_33\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_10 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_10 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_48\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_7 lexbuf\n                         | 1 -> __sedlex_state_10 lexbuf\n                         | 2 -> __sedlex_state_9 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf) in\n                  Sedlexing.start lexbuf;\n                  (match __sedlex_state_0 lexbuf with\n                   | 0 ->\n                       let loc = loc_of_lexbuf env lexbuf in\n                       let env =\n                         lex_error env loc Parse_error.InvalidFloatBigInt in\n                       Token\n                         (env,\n                           (T_BIGINT\n                              { kind = BIG_NORMAL; raw = (lexeme lexbuf) }))\n                   | _ -> failwith \"unreachable token floatbigint\"))\n    | 30 ->\n        recover env lexbuf\n          ~f:(fun env ->\n                fun lexbuf ->\n                  let rec __sedlex_state_0 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_40\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_1 lexbuf\n                         | 1 -> __sedlex_state_3 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_1 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_41\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_1 lexbuf\n                         | 1 -> 0\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_3 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_42\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_3 lexbuf\n                         | 1 -> __sedlex_state_4 lexbuf\n                         | 2 -> 0\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_4 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_33\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_5 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_5 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_42\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_5 lexbuf\n                         | 1 -> __sedlex_state_4 lexbuf\n                         | 2 -> 0\n                         | _ -> Sedlexing.backtrack lexbuf) in\n                  Sedlexing.start lexbuf;\n                  (match __sedlex_state_0 lexbuf with\n                   | 0 ->\n                       Token\n                         (env,\n                           (T_BIGINT\n                              { kind = BIG_NORMAL; raw = (lexeme lexbuf) }))\n                   | _ -> failwith \"unreachable token wholebigint\"))\n    | 31 ->\n        let loc = loc_of_lexbuf env lexbuf in\n        let env = lex_error env loc Parse_error.InvalidFloatBigInt in\n        Token (env, (T_BIGINT { kind = BIG_NORMAL; raw = (lexeme lexbuf) }))\n    | 32 ->\n        Token (env, (T_BIGINT { kind = BIG_NORMAL; raw = (lexeme lexbuf) }))\n    | 33 ->\n        recover env lexbuf\n          ~f:(fun env ->\n                fun lexbuf ->\n                  let rec __sedlex_state_0 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_37\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_1 lexbuf\n                         | 1 -> __sedlex_state_5 lexbuf\n                         | 2 -> __sedlex_state_7 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_1 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_33\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_2 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_2 =\n                    function\n                    | lexbuf ->\n                        (Sedlexing.mark lexbuf 0;\n                         (match __sedlex_partition_46\n                                  (Sedlexing.__private__next_int lexbuf)\n                          with\n                          | 0 -> __sedlex_state_2 lexbuf\n                          | 1 -> __sedlex_state_3 lexbuf\n                          | _ -> Sedlexing.backtrack lexbuf))\n                  and __sedlex_state_3 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_33\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_4 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_4 =\n                    function\n                    | lexbuf ->\n                        (Sedlexing.mark lexbuf 0;\n                         (match __sedlex_partition_46\n                                  (Sedlexing.__private__next_int lexbuf)\n                          with\n                          | 0 -> __sedlex_state_4 lexbuf\n                          | 1 -> __sedlex_state_3 lexbuf\n                          | _ -> Sedlexing.backtrack lexbuf))\n                  and __sedlex_state_5 =\n                    function\n                    | lexbuf ->\n                        (Sedlexing.mark lexbuf 0;\n                         (match __sedlex_partition_47\n                                  (Sedlexing.__private__next_int lexbuf)\n                          with\n                          | 0 -> __sedlex_state_6 lexbuf\n                          | 1 -> __sedlex_state_5 lexbuf\n                          | _ -> Sedlexing.backtrack lexbuf))\n                  and __sedlex_state_6 =\n                    function\n                    | lexbuf ->\n                        (Sedlexing.mark lexbuf 0;\n                         (match __sedlex_partition_33\n                                  (Sedlexing.__private__next_int lexbuf)\n                          with\n                          | 0 -> __sedlex_state_2 lexbuf\n                          | _ -> Sedlexing.backtrack lexbuf))\n                  and __sedlex_state_7 =\n                    function\n                    | lexbuf ->\n                        (Sedlexing.mark lexbuf 0;\n                         (match __sedlex_partition_48\n                                  (Sedlexing.__private__next_int lexbuf)\n                          with\n                          | 0 -> __sedlex_state_6 lexbuf\n                          | 1 -> __sedlex_state_7 lexbuf\n                          | 2 -> __sedlex_state_8 lexbuf\n                          | _ -> Sedlexing.backtrack lexbuf))\n                  and __sedlex_state_8 =\n                    function\n                    | lexbuf ->\n                        (match __sedlex_partition_33\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_9 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)\n                  and __sedlex_state_9 =\n                    function\n                    | lexbuf ->\n                        (Sedlexing.mark lexbuf 0;\n                         (match __sedlex_partition_48\n                                  (Sedlexing.__private__next_int lexbuf)\n                          with\n                          | 0 -> __sedlex_state_6 lexbuf\n                          | 1 -> __sedlex_state_9 lexbuf\n                          | 2 -> __sedlex_state_8 lexbuf\n                          | _ -> Sedlexing.backtrack lexbuf)) in\n                  Sedlexing.start lexbuf;\n                  (match __sedlex_state_0 lexbuf with\n                   | 0 ->\n                       Token\n                         (env,\n                           (T_NUMBER { kind = NORMAL; raw = (lexeme lexbuf) }))\n                   | _ -> failwith \"unreachable token wholenumber\"))\n    | 34 -> Token (env, (T_NUMBER { kind = NORMAL; raw = (lexeme lexbuf) }))\n    | 35 ->\n        let loc = loc_of_lexbuf env lexbuf in\n        let raw = lexeme lexbuf in\n        Token (env, (T_IDENTIFIER { loc; value = raw; raw }))\n    | 36 -> Token (env, T_LCURLY)\n    | 37 -> Token (env, T_RCURLY)\n    | 38 -> Token (env, T_LPAREN)\n    | 39 -> Token (env, T_RPAREN)\n    | 40 -> Token (env, T_LBRACKET)\n    | 41 -> Token (env, T_RBRACKET)\n    | 42 -> Token (env, T_ELLIPSIS)\n    | 43 -> Token (env, T_PERIOD)\n    | 44 -> Token (env, T_SEMICOLON)\n    | 45 -> Token (env, T_COMMA)\n    | 46 -> Token (env, T_COLON)\n    | 47 ->\n        (Sedlexing.rollback lexbuf;\n         (let __sedlex_state_0 =\n            function\n            | lexbuf ->\n                (match __sedlex_partition_49\n                         (Sedlexing.__private__next_int lexbuf)\n                 with\n                 | 0 -> 0\n                 | _ -> Sedlexing.backtrack lexbuf) in\n          Sedlexing.start lexbuf;\n          (match __sedlex_state_0 lexbuf with\n           | 0 -> Token (env, T_PLING)\n           | _ -> failwith \"expected ?\")))\n    | 48 -> Token (env, T_PLING_PERIOD)\n    | 49 -> Token (env, T_PLING_PLING)\n    | 50 -> Token (env, T_PLING)\n    | 51 -> Token (env, T_AND)\n    | 52 -> Token (env, T_OR)\n    | 53 -> Token (env, T_STRICT_EQUAL)\n    | 54 -> Token (env, T_STRICT_NOT_EQUAL)\n    | 55 -> Token (env, T_LESS_THAN_EQUAL)\n    | 56 -> Token (env, T_GREATER_THAN_EQUAL)\n    | 57 -> Token (env, T_EQUAL)\n    | 58 -> Token (env, T_NOT_EQUAL)\n    | 59 -> Token (env, T_INCR)\n    | 60 -> Token (env, T_DECR)\n    | 61 -> Token (env, T_LSHIFT_ASSIGN)\n    | 62 -> Token (env, T_LSHIFT)\n    | 63 -> Token (env, T_RSHIFT_ASSIGN)\n    | 64 -> Token (env, T_RSHIFT3_ASSIGN)\n    | 65 -> Token (env, T_RSHIFT3)\n    | 66 -> Token (env, T_RSHIFT)\n    | 67 -> Token (env, T_PLUS_ASSIGN)\n    | 68 -> Token (env, T_MINUS_ASSIGN)\n    | 69 -> Token (env, T_MULT_ASSIGN)\n    | 70 -> Token (env, T_EXP_ASSIGN)\n    | 71 -> Token (env, T_MOD_ASSIGN)\n    | 72 -> Token (env, T_BIT_AND_ASSIGN)\n    | 73 -> Token (env, T_BIT_OR_ASSIGN)\n    | 74 -> Token (env, T_BIT_XOR_ASSIGN)\n    | 75 -> Token (env, T_NULLISH_ASSIGN)\n    | 76 -> Token (env, T_AND_ASSIGN)\n    | 77 -> Token (env, T_OR_ASSIGN)\n    | 78 -> Token (env, T_LESS_THAN)\n    | 79 -> Token (env, T_GREATER_THAN)\n    | 80 -> Token (env, T_PLUS)\n    | 81 -> Token (env, T_MINUS)\n    | 82 -> Token (env, T_MULT)\n    | 83 -> Token (env, T_EXP)\n    | 84 -> Token (env, T_MOD)\n    | 85 -> Token (env, T_BIT_OR)\n    | 86 -> Token (env, T_BIT_AND)\n    | 87 -> Token (env, T_BIT_XOR)\n    | 88 -> Token (env, T_NOT)\n    | 89 -> Token (env, T_BIT_NOT)\n    | 90 -> Token (env, T_ASSIGN)\n    | 91 -> Token (env, T_ARROW)\n    | 92 -> Token (env, T_DIV_ASSIGN)\n    | 93 -> Token (env, T_DIV)\n    | 94 -> Token (env, T_AT)\n    | 95 -> Token (env, T_POUND)\n    | 96 -> let env = illegal env (loc_of_lexbuf env lexbuf) in Continue env\n    | 97 ->\n        let start_offset = Sedlexing.lexeme_start lexbuf in\n        ((loop_id_continues lexbuf) |> ignore;\n         (let end_offset = Sedlexing.lexeme_end lexbuf in\n          let loc = loc_of_offsets env start_offset end_offset in\n          Sedlexing.set_lexeme_start lexbuf start_offset;\n          (let raw = Sedlexing.lexeme lexbuf in\n           let (nenv, value) = decode_identifier env raw in\n           match value with\n           | \"async\" -> Token (env, T_ASYNC)\n           | \"await\" -> Token (env, T_AWAIT)\n           | \"break\" -> Token (env, T_BREAK)\n           | \"case\" -> Token (env, T_CASE)\n           | \"catch\" -> Token (env, T_CATCH)\n           | \"class\" -> Token (env, T_CLASS)\n           | \"const\" -> Token (env, T_CONST)\n           | \"continue\" -> Token (env, T_CONTINUE)\n           | \"debugger\" -> Token (env, T_DEBUGGER)\n           | \"declare\" -> Token (env, T_DECLARE)\n           | \"default\" -> Token (env, T_DEFAULT)\n           | \"delete\" -> Token (env, T_DELETE)\n           | \"do\" -> Token (env, T_DO)\n           | \"else\" -> Token (env, T_ELSE)\n           | \"enum\" -> Token (env, T_ENUM)\n           | \"export\" -> Token (env, T_EXPORT)\n           | \"extends\" -> Token (env, T_EXTENDS)\n           | \"false\" -> Token (env, T_FALSE)\n           | \"finally\" -> Token (env, T_FINALLY)\n           | \"for\" -> Token (env, T_FOR)\n           | \"function\" -> Token (env, T_FUNCTION)\n           | \"if\" -> Token (env, T_IF)\n           | \"implements\" -> Token (env, T_IMPLEMENTS)\n           | \"import\" -> Token (env, T_IMPORT)\n           | \"in\" -> Token (env, T_IN)\n           | \"instanceof\" -> Token (env, T_INSTANCEOF)\n           | \"interface\" -> Token (env, T_INTERFACE)\n           | \"let\" -> Token (env, T_LET)\n           | \"new\" -> Token (env, T_NEW)\n           | \"null\" -> Token (env, T_NULL)\n           | \"of\" -> Token (env, T_OF)\n           | \"opaque\" -> Token (env, T_OPAQUE)\n           | \"package\" -> Token (env, T_PACKAGE)\n           | \"private\" -> Token (env, T_PRIVATE)\n           | \"protected\" -> Token (env, T_PROTECTED)\n           | \"public\" -> Token (env, T_PUBLIC)\n           | \"return\" -> Token (env, T_RETURN)\n           | \"static\" -> Token (env, T_STATIC)\n           | \"super\" -> Token (env, T_SUPER)\n           | \"switch\" -> Token (env, T_SWITCH)\n           | \"this\" -> Token (env, T_THIS)\n           | \"throw\" -> Token (env, T_THROW)\n           | \"true\" -> Token (env, T_TRUE)\n           | \"try\" -> Token (env, T_TRY)\n           | \"type\" -> Token (env, T_TYPE)\n           | \"typeof\" -> Token (env, T_TYPEOF)\n           | \"var\" -> Token (env, T_VAR)\n           | \"void\" -> Token (env, T_VOID)\n           | \"while\" -> Token (env, T_WHILE)\n           | \"with\" -> Token (env, T_WITH)\n           | \"yield\" -> Token (env, T_YIELD)\n           | _ ->\n               Token\n                 (nenv,\n                   (T_IDENTIFIER\n                      { loc; value; raw = (Sedlexing.string_of_utf8 raw) })))))\n    | 98 ->\n        let env =\n          if is_in_comment_syntax env\n          then\n            let loc = loc_of_lexbuf env lexbuf in\n            lex_error env loc Parse_error.UnexpectedEOS\n          else env in\n        Token (env, T_EOF)\n    | 99 ->\n        let env = illegal env (loc_of_lexbuf env lexbuf) in\n        Token (env, (T_ERROR (lexeme lexbuf)))\n    | _ -> failwith \"unreachable token\") : result)\nlet rec regexp_class env buf lexbuf =\n  let rec __sedlex_state_0 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_100 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> 0\n         | 1 -> __sedlex_state_2 lexbuf\n         | 2 -> 4\n         | 3 -> __sedlex_state_4 lexbuf\n         | 4 -> __sedlex_state_6 lexbuf\n         | 5 -> 3\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_2 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 5;\n         (match __sedlex_partition_101 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_2 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_4 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 4;\n         (match __sedlex_partition_11 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> 4\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_6 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 5;\n         (match __sedlex_partition_102 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> 1\n          | 1 -> 2\n          | _ -> Sedlexing.backtrack lexbuf)) in\n  Sedlexing.start lexbuf;\n  (match __sedlex_state_0 lexbuf with\n   | 0 -> env\n   | 1 -> (Buffer.add_string buf \"\\\\\\\\\"; regexp_class env buf lexbuf)\n   | 2 ->\n       (Buffer.add_char buf '\\\\';\n        Buffer.add_char buf ']';\n        regexp_class env buf lexbuf)\n   | 3 -> (Buffer.add_char buf ']'; env)\n   | 4 ->\n       let loc = loc_of_lexbuf env lexbuf in\n       let env = lex_error env loc Parse_error.UnterminatedRegExp in\n       let env = new_line env lexbuf in env\n   | 5 ->\n       let str = lexeme lexbuf in\n       (Buffer.add_string buf str; regexp_class env buf lexbuf)\n   | _ -> failwith \"unreachable regexp_class\")\nlet rec regexp_body env buf lexbuf =\n  let rec __sedlex_state_0 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_103 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> 0\n         | 1 -> __sedlex_state_2 lexbuf\n         | 2 -> 6\n         | 3 -> __sedlex_state_4 lexbuf\n         | 4 -> __sedlex_state_6 lexbuf\n         | 5 -> 5\n         | 6 -> __sedlex_state_9 lexbuf\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_2 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 7;\n         (match __sedlex_partition_104 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_2 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_4 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 6;\n         (match __sedlex_partition_11 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> 6\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_6 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 4;\n         (match __sedlex_partition_105 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_7 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_7 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 3;\n         (match __sedlex_partition_105 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_7 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_9 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 7;\n         (match __sedlex_partition_106 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> 2\n          | 1 -> 1\n          | 2 -> __sedlex_state_12 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_12 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 1;\n         (match __sedlex_partition_11 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> 1\n          | _ -> Sedlexing.backtrack lexbuf)) in\n  Sedlexing.start lexbuf;\n  (match __sedlex_state_0 lexbuf with\n   | 0 ->\n       let loc = loc_of_lexbuf env lexbuf in\n       let env = lex_error env loc Parse_error.UnterminatedRegExp in\n       (env, \"\")\n   | 1 ->\n       let loc = loc_of_lexbuf env lexbuf in\n       let env = lex_error env loc Parse_error.UnterminatedRegExp in\n       let env = new_line env lexbuf in (env, \"\")\n   | 2 ->\n       let s = lexeme lexbuf in\n       (Buffer.add_string buf s; regexp_body env buf lexbuf)\n   | 3 ->\n       let flags =\n         let str = lexeme lexbuf in\n         String.sub str 1 ((String.length str) - 1) in\n       (env, flags)\n   | 4 -> (env, \"\")\n   | 5 ->\n       (Buffer.add_char buf '[';\n        (let env = regexp_class env buf lexbuf in regexp_body env buf lexbuf))\n   | 6 ->\n       let loc = loc_of_lexbuf env lexbuf in\n       let env = lex_error env loc Parse_error.UnterminatedRegExp in\n       let env = new_line env lexbuf in (env, \"\")\n   | 7 ->\n       let str = lexeme lexbuf in\n       (Buffer.add_string buf str; regexp_body env buf lexbuf)\n   | _ -> failwith \"unreachable regexp_body\")\nlet regexp env lexbuf =\n  let rec __sedlex_state_0 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_107 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> 0\n         | 1 -> 6\n         | 2 -> __sedlex_state_3 lexbuf\n         | 3 -> 1\n         | 4 -> __sedlex_state_6 lexbuf\n         | 5 -> __sedlex_state_8 lexbuf\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_3 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 2;\n         (match __sedlex_partition_51 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_4 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_4 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 2;\n         (match __sedlex_partition_51 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_4 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_6 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 1;\n         (match __sedlex_partition_11 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> 1\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_8 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 5;\n         (match __sedlex_partition_108 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> 4\n          | 1 -> 3\n          | _ -> Sedlexing.backtrack lexbuf)) in\n  Sedlexing.start lexbuf;\n  (match __sedlex_state_0 lexbuf with\n   | 0 -> Token (env, T_EOF)\n   | 1 -> let env = new_line env lexbuf in Continue env\n   | 2 -> Continue env\n   | 3 ->\n       let start_pos = start_pos_of_lexbuf env lexbuf in\n       let buf = Buffer.create 127 in\n       let (env, end_pos) = line_comment env buf lexbuf in\n       Comment (env, (mk_comment env start_pos end_pos buf false))\n   | 4 ->\n       let start_pos = start_pos_of_lexbuf env lexbuf in\n       let buf = Buffer.create 127 in\n       let (env, end_pos) = comment env buf lexbuf in\n       Comment (env, (mk_comment env start_pos end_pos buf true))\n   | 5 ->\n       let start = start_pos_of_lexbuf env lexbuf in\n       let buf = Buffer.create 127 in\n       let (env, flags) = regexp_body env buf lexbuf in\n       let _end = end_pos_of_lexbuf env lexbuf in\n       let loc = { Loc.source = (Lex_env.source env); start; _end } in\n       Token (env, (T_REGEXP (loc, (Buffer.contents buf), flags)))\n   | 6 ->\n       let env = illegal env (loc_of_lexbuf env lexbuf) in\n       Token (env, (T_ERROR (lexeme lexbuf)))\n   | _ -> failwith \"unreachable regexp\")\nlet rec jsx_text env mode buf raw lexbuf =\n  let rec __sedlex_state_0 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_109 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> 1\n         | 1 -> __sedlex_state_2 lexbuf\n         | 2 -> 2\n         | 3 -> __sedlex_state_4 lexbuf\n         | 4 -> 0\n         | 5 -> __sedlex_state_7 lexbuf\n         | 6 -> __sedlex_state_23 lexbuf\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_2 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 6;\n         (match __sedlex_partition_110 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_2 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_4 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 2;\n         (match __sedlex_partition_11 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> 2\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_7 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 6;\n         (match __sedlex_partition_111 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_8 lexbuf\n          | 1 -> __sedlex_state_14 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_8 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_112 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> __sedlex_state_9 lexbuf\n         | 1 -> __sedlex_state_11 lexbuf\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_9 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_113 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> __sedlex_state_9 lexbuf\n         | 1 -> 4\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_11 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_4 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> __sedlex_state_12 lexbuf\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_12 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_114 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> __sedlex_state_12 lexbuf\n         | 1 -> 3\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_14 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_60 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> __sedlex_state_15 lexbuf\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_15 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_115 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> __sedlex_state_16 lexbuf\n         | 1 -> 5\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_16 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_115 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> __sedlex_state_17 lexbuf\n         | 1 -> 5\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_17 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_115 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> __sedlex_state_18 lexbuf\n         | 1 -> 5\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_18 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_115 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> __sedlex_state_19 lexbuf\n         | 1 -> 5\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_19 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_115 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> __sedlex_state_20 lexbuf\n         | 1 -> 5\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_20 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_115 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> __sedlex_state_21 lexbuf\n         | 1 -> 5\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_21 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_116 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> 5\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_23 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 0;\n         (match __sedlex_partition_110 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_2 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf)) in\n  Sedlexing.start lexbuf;\n  (match __sedlex_state_0 lexbuf with\n   | 0 ->\n       let c = lexeme lexbuf in\n       (match (mode, c) with\n        | (JSX_SINGLE_QUOTED_TEXT, \"'\") | (JSX_DOUBLE_QUOTED_TEXT, \"\\\"\") ->\n            env\n        | (JSX_CHILD_TEXT, (\"<\" | \"{\")) -> (Sedlexing.rollback lexbuf; env)\n        | (JSX_CHILD_TEXT, \">\") ->\n            unexpected_error_w_suggest env (loc_of_lexbuf env lexbuf) \">\"\n              \"{'>'}\"\n        | (JSX_CHILD_TEXT, \"}\") ->\n            unexpected_error_w_suggest env (loc_of_lexbuf env lexbuf) \"}\"\n              \"{'}'}\"\n        | _ ->\n            (Buffer.add_string raw c;\n             Buffer.add_string buf c;\n             jsx_text env mode buf raw lexbuf))\n   | 1 -> let env = illegal env (loc_of_lexbuf env lexbuf) in env\n   | 2 ->\n       let lt = lexeme lexbuf in\n       (Buffer.add_string raw lt;\n        Buffer.add_string buf lt;\n        (let env = new_line env lexbuf in jsx_text env mode buf raw lexbuf))\n   | 3 ->\n       let s = lexeme lexbuf in\n       let n = String.sub s 3 ((String.length s) - 4) in\n       (Buffer.add_string raw s;\n        (let code = int_of_string (\"0x\" ^ n) in\n         Wtf8.add_wtf_8 buf code; jsx_text env mode buf raw lexbuf))\n   | 4 ->\n       let s = lexeme lexbuf in\n       let n = String.sub s 2 ((String.length s) - 3) in\n       (Buffer.add_string raw s;\n        (let code = int_of_string n in\n         Wtf8.add_wtf_8 buf code; jsx_text env mode buf raw lexbuf))\n   | 5 ->\n       let s = lexeme lexbuf in\n       let entity = String.sub s 1 ((String.length s) - 2) in\n       (Buffer.add_string raw s;\n        (let code =\n           match entity with\n           | \"quot\" -> Some 0x0022\n           | \"amp\" -> Some 0x0026\n           | \"apos\" -> Some 0x0027\n           | \"lt\" -> Some 0x003C\n           | \"gt\" -> Some 0x003E\n           | \"nbsp\" -> Some 0x00A0\n           | \"iexcl\" -> Some 0x00A1\n           | \"cent\" -> Some 0x00A2\n           | \"pound\" -> Some 0x00A3\n           | \"curren\" -> Some 0x00A4\n           | \"yen\" -> Some 0x00A5\n           | \"brvbar\" -> Some 0x00A6\n           | \"sect\" -> Some 0x00A7\n           | \"uml\" -> Some 0x00A8\n           | \"copy\" -> Some 0x00A9\n           | \"ordf\" -> Some 0x00AA\n           | \"laquo\" -> Some 0x00AB\n           | \"not\" -> Some 0x00AC\n           | \"shy\" -> Some 0x00AD\n           | \"reg\" -> Some 0x00AE\n           | \"macr\" -> Some 0x00AF\n           | \"deg\" -> Some 0x00B0\n           | \"plusmn\" -> Some 0x00B1\n           | \"sup2\" -> Some 0x00B2\n           | \"sup3\" -> Some 0x00B3\n           | \"acute\" -> Some 0x00B4\n           | \"micro\" -> Some 0x00B5\n           | \"para\" -> Some 0x00B6\n           | \"middot\" -> Some 0x00B7\n           | \"cedil\" -> Some 0x00B8\n           | \"sup1\" -> Some 0x00B9\n           | \"ordm\" -> Some 0x00BA\n           | \"raquo\" -> Some 0x00BB\n           | \"frac14\" -> Some 0x00BC\n           | \"frac12\" -> Some 0x00BD\n           | \"frac34\" -> Some 0x00BE\n           | \"iquest\" -> Some 0x00BF\n           | \"Agrave\" -> Some 0x00C0\n           | \"Aacute\" -> Some 0x00C1\n           | \"Acirc\" -> Some 0x00C2\n           | \"Atilde\" -> Some 0x00C3\n           | \"Auml\" -> Some 0x00C4\n           | \"Aring\" -> Some 0x00C5\n           | \"AElig\" -> Some 0x00C6\n           | \"Ccedil\" -> Some 0x00C7\n           | \"Egrave\" -> Some 0x00C8\n           | \"Eacute\" -> Some 0x00C9\n           | \"Ecirc\" -> Some 0x00CA\n           | \"Euml\" -> Some 0x00CB\n           | \"Igrave\" -> Some 0x00CC\n           | \"Iacute\" -> Some 0x00CD\n           | \"Icirc\" -> Some 0x00CE\n           | \"Iuml\" -> Some 0x00CF\n           | \"ETH\" -> Some 0x00D0\n           | \"Ntilde\" -> Some 0x00D1\n           | \"Ograve\" -> Some 0x00D2\n           | \"Oacute\" -> Some 0x00D3\n           | \"Ocirc\" -> Some 0x00D4\n           | \"Otilde\" -> Some 0x00D5\n           | \"Ouml\" -> Some 0x00D6\n           | \"times\" -> Some 0x00D7\n           | \"Oslash\" -> Some 0x00D8\n           | \"Ugrave\" -> Some 0x00D9\n           | \"Uacute\" -> Some 0x00DA\n           | \"Ucirc\" -> Some 0x00DB\n           | \"Uuml\" -> Some 0x00DC\n           | \"Yacute\" -> Some 0x00DD\n           | \"THORN\" -> Some 0x00DE\n           | \"szlig\" -> Some 0x00DF\n           | \"agrave\" -> Some 0x00E0\n           | \"aacute\" -> Some 0x00E1\n           | \"acirc\" -> Some 0x00E2\n           | \"atilde\" -> Some 0x00E3\n           | \"auml\" -> Some 0x00E4\n           | \"aring\" -> Some 0x00E5\n           | \"aelig\" -> Some 0x00E6\n           | \"ccedil\" -> Some 0x00E7\n           | \"egrave\" -> Some 0x00E8\n           | \"eacute\" -> Some 0x00E9\n           | \"ecirc\" -> Some 0x00EA\n           | \"euml\" -> Some 0x00EB\n           | \"igrave\" -> Some 0x00EC\n           | \"iacute\" -> Some 0x00ED\n           | \"icirc\" -> Some 0x00EE\n           | \"iuml\" -> Some 0x00EF\n           | \"eth\" -> Some 0x00F0\n           | \"ntilde\" -> Some 0x00F1\n           | \"ograve\" -> Some 0x00F2\n           | \"oacute\" -> Some 0x00F3\n           | \"ocirc\" -> Some 0x00F4\n           | \"otilde\" -> Some 0x00F5\n           | \"ouml\" -> Some 0x00F6\n           | \"divide\" -> Some 0x00F7\n           | \"oslash\" -> Some 0x00F8\n           | \"ugrave\" -> Some 0x00F9\n           | \"uacute\" -> Some 0x00FA\n           | \"ucirc\" -> Some 0x00FB\n           | \"uuml\" -> Some 0x00FC\n           | \"yacute\" -> Some 0x00FD\n           | \"thorn\" -> Some 0x00FE\n           | \"yuml\" -> Some 0x00FF\n           | \"OElig\" -> Some 0x0152\n           | \"oelig\" -> Some 0x0153\n           | \"Scaron\" -> Some 0x0160\n           | \"scaron\" -> Some 0x0161\n           | \"Yuml\" -> Some 0x0178\n           | \"fnof\" -> Some 0x0192\n           | \"circ\" -> Some 0x02C6\n           | \"tilde\" -> Some 0x02DC\n           | \"Alpha\" -> Some 0x0391\n           | \"Beta\" -> Some 0x0392\n           | \"Gamma\" -> Some 0x0393\n           | \"Delta\" -> Some 0x0394\n           | \"Epsilon\" -> Some 0x0395\n           | \"Zeta\" -> Some 0x0396\n           | \"Eta\" -> Some 0x0397\n           | \"Theta\" -> Some 0x0398\n           | \"Iota\" -> Some 0x0399\n           | \"Kappa\" -> Some 0x039A\n           | \"Lambda\" -> Some 0x039B\n           | \"Mu\" -> Some 0x039C\n           | \"Nu\" -> Some 0x039D\n           | \"Xi\" -> Some 0x039E\n           | \"Omicron\" -> Some 0x039F\n           | \"Pi\" -> Some 0x03A0\n           | \"Rho\" -> Some 0x03A1\n           | \"Sigma\" -> Some 0x03A3\n           | \"Tau\" -> Some 0x03A4\n           | \"Upsilon\" -> Some 0x03A5\n           | \"Phi\" -> Some 0x03A6\n           | \"Chi\" -> Some 0x03A7\n           | \"Psi\" -> Some 0x03A8\n           | \"Omega\" -> Some 0x03A9\n           | \"alpha\" -> Some 0x03B1\n           | \"beta\" -> Some 0x03B2\n           | \"gamma\" -> Some 0x03B3\n           | \"delta\" -> Some 0x03B4\n           | \"epsilon\" -> Some 0x03B5\n           | \"zeta\" -> Some 0x03B6\n           | \"eta\" -> Some 0x03B7\n           | \"theta\" -> Some 0x03B8\n           | \"iota\" -> Some 0x03B9\n           | \"kappa\" -> Some 0x03BA\n           | \"lambda\" -> Some 0x03BB\n           | \"mu\" -> Some 0x03BC\n           | \"nu\" -> Some 0x03BD\n           | \"xi\" -> Some 0x03BE\n           | \"omicron\" -> Some 0x03BF\n           | \"pi\" -> Some 0x03C0\n           | \"rho\" -> Some 0x03C1\n           | \"sigmaf\" -> Some 0x03C2\n           | \"sigma\" -> Some 0x03C3\n           | \"tau\" -> Some 0x03C4\n           | \"upsilon\" -> Some 0x03C5\n           | \"phi\" -> Some 0x03C6\n           | \"chi\" -> Some 0x03C7\n           | \"psi\" -> Some 0x03C8\n           | \"omega\" -> Some 0x03C9\n           | \"thetasym\" -> Some 0x03D1\n           | \"upsih\" -> Some 0x03D2\n           | \"piv\" -> Some 0x03D6\n           | \"ensp\" -> Some 0x2002\n           | \"emsp\" -> Some 0x2003\n           | \"thinsp\" -> Some 0x2009\n           | \"zwnj\" -> Some 0x200C\n           | \"zwj\" -> Some 0x200D\n           | \"lrm\" -> Some 0x200E\n           | \"rlm\" -> Some 0x200F\n           | \"ndash\" -> Some 0x2013\n           | \"mdash\" -> Some 0x2014\n           | \"lsquo\" -> Some 0x2018\n           | \"rsquo\" -> Some 0x2019\n           | \"sbquo\" -> Some 0x201A\n           | \"ldquo\" -> Some 0x201C\n           | \"rdquo\" -> Some 0x201D\n           | \"bdquo\" -> Some 0x201E\n           | \"dagger\" -> Some 0x2020\n           | \"Dagger\" -> Some 0x2021\n           | \"bull\" -> Some 0x2022\n           | \"hellip\" -> Some 0x2026\n           | \"permil\" -> Some 0x2030\n           | \"prime\" -> Some 0x2032\n           | \"Prime\" -> Some 0x2033\n           | \"lsaquo\" -> Some 0x2039\n           | \"rsaquo\" -> Some 0x203A\n           | \"oline\" -> Some 0x203E\n           | \"frasl\" -> Some 0x2044\n           | \"euro\" -> Some 0x20AC\n           | \"image\" -> Some 0x2111\n           | \"weierp\" -> Some 0x2118\n           | \"real\" -> Some 0x211C\n           | \"trade\" -> Some 0x2122\n           | \"alefsym\" -> Some 0x2135\n           | \"larr\" -> Some 0x2190\n           | \"uarr\" -> Some 0x2191\n           | \"rarr\" -> Some 0x2192\n           | \"darr\" -> Some 0x2193\n           | \"harr\" -> Some 0x2194\n           | \"crarr\" -> Some 0x21B5\n           | \"lArr\" -> Some 0x21D0\n           | \"uArr\" -> Some 0x21D1\n           | \"rArr\" -> Some 0x21D2\n           | \"dArr\" -> Some 0x21D3\n           | \"hArr\" -> Some 0x21D4\n           | \"forall\" -> Some 0x2200\n           | \"part\" -> Some 0x2202\n           | \"exist\" -> Some 0x2203\n           | \"empty\" -> Some 0x2205\n           | \"nabla\" -> Some 0x2207\n           | \"isin\" -> Some 0x2208\n           | \"notin\" -> Some 0x2209\n           | \"ni\" -> Some 0x220B\n           | \"prod\" -> Some 0x220F\n           | \"sum\" -> Some 0x2211\n           | \"minus\" -> Some 0x2212\n           | \"lowast\" -> Some 0x2217\n           | \"radic\" -> Some 0x221A\n           | \"prop\" -> Some 0x221D\n           | \"infin\" -> Some 0x221E\n           | \"ang\" -> Some 0x2220\n           | \"and\" -> Some 0x2227\n           | \"or\" -> Some 0x2228\n           | \"cap\" -> Some 0x2229\n           | \"cup\" -> Some 0x222A\n           | \"'int'\" -> Some 0x222B\n           | \"there4\" -> Some 0x2234\n           | \"sim\" -> Some 0x223C\n           | \"cong\" -> Some 0x2245\n           | \"asymp\" -> Some 0x2248\n           | \"ne\" -> Some 0x2260\n           | \"equiv\" -> Some 0x2261\n           | \"le\" -> Some 0x2264\n           | \"ge\" -> Some 0x2265\n           | \"sub\" -> Some 0x2282\n           | \"sup\" -> Some 0x2283\n           | \"nsub\" -> Some 0x2284\n           | \"sube\" -> Some 0x2286\n           | \"supe\" -> Some 0x2287\n           | \"oplus\" -> Some 0x2295\n           | \"otimes\" -> Some 0x2297\n           | \"perp\" -> Some 0x22A5\n           | \"sdot\" -> Some 0x22C5\n           | \"lceil\" -> Some 0x2308\n           | \"rceil\" -> Some 0x2309\n           | \"lfloor\" -> Some 0x230A\n           | \"rfloor\" -> Some 0x230B\n           | \"lang\" -> Some 0x27E8\n           | \"rang\" -> Some 0x27E9\n           | \"loz\" -> Some 0x25CA\n           | \"spades\" -> Some 0x2660\n           | \"clubs\" -> Some 0x2663\n           | \"hearts\" -> Some 0x2665\n           | \"diams\" -> Some 0x2666\n           | _ -> None in\n         (match code with\n          | Some code -> Wtf8.add_wtf_8 buf code\n          | None -> Buffer.add_string buf (\"&\" ^ (entity ^ \";\")));\n         jsx_text env mode buf raw lexbuf))\n   | 6 ->\n       let c = lexeme lexbuf in\n       (Buffer.add_string raw c;\n        Buffer.add_string buf c;\n        jsx_text env mode buf raw lexbuf)\n   | _ -> failwith \"unreachable jsxtext\")\nlet jsx_tag env lexbuf =\n  let rec __sedlex_state_0 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_117 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> 0\n         | 1 -> 14\n         | 2 -> __sedlex_state_3 lexbuf\n         | 3 -> 1\n         | 4 -> __sedlex_state_6 lexbuf\n         | 5 -> 12\n         | 6 -> 13\n         | 7 -> 10\n         | 8 -> __sedlex_state_11 lexbuf\n         | 9 -> 9\n         | 10 -> 5\n         | 11 -> 11\n         | 12 -> 7\n         | 13 -> __sedlex_state_18 lexbuf\n         | 14 -> 8\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_3 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 2;\n         (match __sedlex_partition_51 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_4 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_4 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 2;\n         (match __sedlex_partition_51 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_4 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_6 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 1;\n         (match __sedlex_partition_11 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> 1\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_11 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 6;\n         (match __sedlex_partition_108 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> 4\n          | 1 -> 3\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_18 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 14;\n         (match __sedlex_partition_2 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_19 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_19 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_3 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> __sedlex_state_20 lexbuf\n         | 1 -> __sedlex_state_24 lexbuf\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_20 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_4 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> __sedlex_state_21 lexbuf\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_21 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_4 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> __sedlex_state_22 lexbuf\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_22 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_4 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> 13\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_24 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_4 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> __sedlex_state_25 lexbuf\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_25 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_5 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> __sedlex_state_25 lexbuf\n         | 1 -> 13\n         | _ -> Sedlexing.backtrack lexbuf) in\n  Sedlexing.start lexbuf;\n  (match __sedlex_state_0 lexbuf with\n   | 0 -> Token (env, T_EOF)\n   | 1 -> let env = new_line env lexbuf in Continue env\n   | 2 -> Continue env\n   | 3 ->\n       let start_pos = start_pos_of_lexbuf env lexbuf in\n       let buf = Buffer.create 127 in\n       let (env, end_pos) = line_comment env buf lexbuf in\n       Comment (env, (mk_comment env start_pos end_pos buf false))\n   | 4 ->\n       let start_pos = start_pos_of_lexbuf env lexbuf in\n       let buf = Buffer.create 127 in\n       let (env, end_pos) = comment env buf lexbuf in\n       Comment (env, (mk_comment env start_pos end_pos buf true))\n   | 5 -> Token (env, T_LESS_THAN)\n   | 6 -> Token (env, T_DIV)\n   | 7 -> Token (env, T_GREATER_THAN)\n   | 8 -> Token (env, T_LCURLY)\n   | 9 -> Token (env, T_COLON)\n   | 10 -> Token (env, T_PERIOD)\n   | 11 -> Token (env, T_ASSIGN)\n   | 12 ->\n       let quote = lexeme lexbuf in\n       let start = start_pos_of_lexbuf env lexbuf in\n       let buf = Buffer.create 127 in\n       let raw = Buffer.create 127 in\n       (Buffer.add_string raw quote;\n        (let mode =\n           if quote = \"'\"\n           then JSX_SINGLE_QUOTED_TEXT\n           else JSX_DOUBLE_QUOTED_TEXT in\n         let env = jsx_text env mode buf raw lexbuf in\n         let _end = end_pos_of_lexbuf env lexbuf in\n         Buffer.add_string raw quote;\n         (let value = Buffer.contents buf in\n          let raw = Buffer.contents raw in\n          let loc = { Loc.source = (Lex_env.source env); start; _end } in\n          Token (env, (T_JSX_TEXT (loc, value, raw))))))\n   | 13 ->\n       let start_offset = Sedlexing.lexeme_start lexbuf in\n       (loop_jsx_id_continues lexbuf;\n        (let end_offset = Sedlexing.lexeme_end lexbuf in\n         Sedlexing.set_lexeme_start lexbuf start_offset;\n         (let raw = Sedlexing.lexeme lexbuf in\n          let loc = loc_of_offsets env start_offset end_offset in\n          Token\n            (env,\n              (T_JSX_IDENTIFIER { raw = (Sedlexing.string_of_utf8 raw); loc })))))\n   | 14 -> Token (env, (T_ERROR (lexeme lexbuf)))\n   | _ -> failwith \"unreachable jsx_tag\")\nlet jsx_child env start buf raw lexbuf =\n  let rec __sedlex_state_0 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_118 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> 1\n         | 1 -> 4\n         | 2 -> 0\n         | 3 -> __sedlex_state_4 lexbuf\n         | 4 -> 2\n         | 5 -> 3\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_4 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 0;\n         (match __sedlex_partition_11 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> 0\n          | _ -> Sedlexing.backtrack lexbuf)) in\n  Sedlexing.start lexbuf;\n  (match __sedlex_state_0 lexbuf with\n   | 0 ->\n       let lt = lexeme lexbuf in\n       (Buffer.add_string raw lt;\n        Buffer.add_string buf lt;\n        (let env = new_line env lexbuf in\n         let env = jsx_text env JSX_CHILD_TEXT buf raw lexbuf in\n         let _end = end_pos_of_lexbuf env lexbuf in\n         let value = Buffer.contents buf in\n         let raw = Buffer.contents raw in\n         let loc = { Loc.source = (Lex_env.source env); start; _end } in\n         (env, (T_JSX_TEXT (loc, value, raw)))))\n   | 1 -> (env, T_EOF)\n   | 2 -> (env, T_LESS_THAN)\n   | 3 -> (env, T_LCURLY)\n   | 4 ->\n       (Sedlexing.rollback lexbuf;\n        (let env = jsx_text env JSX_CHILD_TEXT buf raw lexbuf in\n         let _end = end_pos_of_lexbuf env lexbuf in\n         let value = Buffer.contents buf in\n         let raw = Buffer.contents raw in\n         let loc = { Loc.source = (Lex_env.source env); start; _end } in\n         (env, (T_JSX_TEXT (loc, value, raw)))))\n   | _ -> failwith \"unreachable jsx_child\")\nlet template_tail env lexbuf =\n  let rec __sedlex_state_0 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_119 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> 5\n         | 1 -> __sedlex_state_2 lexbuf\n         | 2 -> 0\n         | 3 -> __sedlex_state_5 lexbuf\n         | 4 -> __sedlex_state_7 lexbuf\n         | 5 -> 4\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_2 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 1;\n         (match __sedlex_partition_51 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_3 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_3 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 1;\n         (match __sedlex_partition_51 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_3 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_5 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 0;\n         (match __sedlex_partition_11 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> 0\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_7 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 5;\n         (match __sedlex_partition_108 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> 3\n          | 1 -> 2\n          | _ -> Sedlexing.backtrack lexbuf)) in\n  Sedlexing.start lexbuf;\n  (match __sedlex_state_0 lexbuf with\n   | 0 -> let env = new_line env lexbuf in Continue env\n   | 1 -> Continue env\n   | 2 ->\n       let start_pos = start_pos_of_lexbuf env lexbuf in\n       let buf = Buffer.create 127 in\n       let (env, end_pos) = line_comment env buf lexbuf in\n       Comment (env, (mk_comment env start_pos end_pos buf false))\n   | 3 ->\n       let start_pos = start_pos_of_lexbuf env lexbuf in\n       let buf = Buffer.create 127 in\n       let (env, end_pos) = comment env buf lexbuf in\n       Comment (env, (mk_comment env start_pos end_pos buf true))\n   | 4 ->\n       let start = start_pos_of_lexbuf env lexbuf in\n       let cooked = Buffer.create 127 in\n       let raw = Buffer.create 127 in\n       let literal = Buffer.create 127 in\n       (Buffer.add_string literal \"}\";\n        (let (env, is_tail) = template_part env cooked raw literal lexbuf in\n         let _end = end_pos_of_lexbuf env lexbuf in\n         let loc = { Loc.source = (Lex_env.source env); start; _end } in\n         Token\n           (env,\n             (T_TEMPLATE_PART\n                (loc,\n                  {\n                    cooked = (Buffer.contents cooked);\n                    raw = (Buffer.contents raw);\n                    literal = (Buffer.contents literal)\n                  }, is_tail)))))\n   | 5 ->\n       let env = illegal env (loc_of_lexbuf env lexbuf) in\n       Token\n         (env,\n           (T_TEMPLATE_PART\n              ((loc_of_lexbuf env lexbuf),\n                { cooked = \"\"; raw = \"\"; literal = \"\" }, true)))\n   | _ -> failwith \"unreachable template_tail\")\nlet type_token env lexbuf =\n  let rec __sedlex_state_0 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_126 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> 62\n         | 1 -> 63\n         | 2 -> __sedlex_state_3 lexbuf\n         | 3 -> 0\n         | 4 -> __sedlex_state_6 lexbuf\n         | 5 -> 6\n         | 6 -> 61\n         | 7 -> __sedlex_state_10 lexbuf\n         | 8 -> 56\n         | 9 -> 38\n         | 10 -> 39\n         | 11 -> __sedlex_state_20 lexbuf\n         | 12 -> 59\n         | 13 -> 43\n         | 14 -> __sedlex_state_24 lexbuf\n         | 15 -> __sedlex_state_97 lexbuf\n         | 16 -> __sedlex_state_100 lexbuf\n         | 17 -> __sedlex_state_117 lexbuf\n         | 18 -> __sedlex_state_118 lexbuf\n         | 19 -> 44\n         | 20 -> 42\n         | 21 -> 49\n         | 22 -> __sedlex_state_122 lexbuf\n         | 23 -> 50\n         | 24 -> __sedlex_state_125 lexbuf\n         | 25 -> 32\n         | 26 -> __sedlex_state_128 lexbuf\n         | 27 -> 33\n         | 28 -> __sedlex_state_137 lexbuf\n         | 29 -> __sedlex_state_139 lexbuf\n         | 30 -> 35\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_3 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 1;\n         (match __sedlex_partition_51 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_4 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_4 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 1;\n         (match __sedlex_partition_51 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_4 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_6 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 0;\n         (match __sedlex_partition_11 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> 0\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_10 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 63;\n         (match __sedlex_partition_73 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_11 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_11 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_127 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> __sedlex_state_12 lexbuf\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_12 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_75 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> __sedlex_state_13 lexbuf\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_13 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_73 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> __sedlex_state_14 lexbuf\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_14 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_128 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> __sedlex_state_15 lexbuf\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_15 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_93 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> 31\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_20 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 53;\n         (match __sedlex_partition_13 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> 4\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_24 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 60;\n         (match __sedlex_partition_123 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_25 lexbuf\n          | 1 -> __sedlex_state_26 lexbuf\n          | 2 -> __sedlex_state_47 lexbuf\n          | 3 -> __sedlex_state_94 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_25 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_123 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> __sedlex_state_25 lexbuf\n         | 1 -> __sedlex_state_26 lexbuf\n         | 2 -> __sedlex_state_47 lexbuf\n         | 3 -> __sedlex_state_94 lexbuf\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_26 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_33 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> __sedlex_state_27 lexbuf\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_27 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 30;\n         (match __sedlex_partition_59 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_28 lexbuf\n          | 1 -> __sedlex_state_27 lexbuf\n          | 2 -> __sedlex_state_29 lexbuf\n          | 3 -> __sedlex_state_43 lexbuf\n          | 4 -> __sedlex_state_45 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_28 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 29;\n         (match __sedlex_partition_60 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_28 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_29 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 29;\n         (match __sedlex_partition_61 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_28 lexbuf\n          | 1 -> __sedlex_state_30 lexbuf\n          | 2 -> __sedlex_state_38 lexbuf\n          | 3 -> __sedlex_state_42 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_30 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_40 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> __sedlex_state_31 lexbuf\n         | 1 -> __sedlex_state_35 lexbuf\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_31 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 24;\n         (match __sedlex_partition_62 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_32 lexbuf\n          | 1 -> __sedlex_state_31 lexbuf\n          | 2 -> __sedlex_state_33 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_32 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 23;\n         (match __sedlex_partition_60 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_32 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_33 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 22;\n         (match __sedlex_partition_63 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_34 lexbuf\n          | 1 -> __sedlex_state_32 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_34 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 21;\n         (match __sedlex_partition_60 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_34 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_35 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 24;\n         (match __sedlex_partition_64 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_32 lexbuf\n          | 1 -> __sedlex_state_35 lexbuf\n          | 2 -> __sedlex_state_36 lexbuf\n          | 3 -> __sedlex_state_33 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_36 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_33 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> __sedlex_state_37 lexbuf\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_37 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 24;\n         (match __sedlex_partition_64 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_32 lexbuf\n          | 1 -> __sedlex_state_37 lexbuf\n          | 2 -> __sedlex_state_36 lexbuf\n          | 3 -> __sedlex_state_33 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_38 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 24;\n         (match __sedlex_partition_62 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_39 lexbuf\n          | 1 -> __sedlex_state_38 lexbuf\n          | 2 -> __sedlex_state_40 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_39 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 23;\n         (match __sedlex_partition_60 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_39 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_40 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 22;\n         (match __sedlex_partition_63 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_41 lexbuf\n          | 1 -> __sedlex_state_39 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_41 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 21;\n         (match __sedlex_partition_60 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_41 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_42 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 24;\n         (match __sedlex_partition_64 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_39 lexbuf\n          | 1 -> __sedlex_state_42 lexbuf\n          | 2 -> __sedlex_state_36 lexbuf\n          | 3 -> __sedlex_state_40 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_43 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_33 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> __sedlex_state_44 lexbuf\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_44 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 30;\n         (match __sedlex_partition_59 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_28 lexbuf\n          | 1 -> __sedlex_state_44 lexbuf\n          | 2 -> __sedlex_state_29 lexbuf\n          | 3 -> __sedlex_state_43 lexbuf\n          | 4 -> __sedlex_state_45 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_45 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 27;\n         (match __sedlex_partition_63 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_46 lexbuf\n          | 1 -> __sedlex_state_28 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_46 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 25;\n         (match __sedlex_partition_60 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_46 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_47 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 30;\n         (match __sedlex_partition_76 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_28 lexbuf\n          | 1 -> __sedlex_state_48 lexbuf\n          | 2 -> __sedlex_state_52 lexbuf\n          | 3 -> __sedlex_state_61 lexbuf\n          | 4 -> __sedlex_state_64 lexbuf\n          | 5 -> __sedlex_state_29 lexbuf\n          | 6 -> __sedlex_state_74 lexbuf\n          | 7 -> __sedlex_state_84 lexbuf\n          | 8 -> __sedlex_state_62 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_48 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 30;\n         (match __sedlex_partition_77 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_28 lexbuf\n          | 1 -> __sedlex_state_49 lexbuf\n          | 2 -> __sedlex_state_29 lexbuf\n          | 3 -> __sedlex_state_45 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_49 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 30;\n         (match __sedlex_partition_59 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_28 lexbuf\n          | 1 -> __sedlex_state_49 lexbuf\n          | 2 -> __sedlex_state_29 lexbuf\n          | 3 -> __sedlex_state_50 lexbuf\n          | 4 -> __sedlex_state_45 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_50 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_33 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> __sedlex_state_51 lexbuf\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_51 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 30;\n         (match __sedlex_partition_59 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_28 lexbuf\n          | 1 -> __sedlex_state_51 lexbuf\n          | 2 -> __sedlex_state_29 lexbuf\n          | 3 -> __sedlex_state_50 lexbuf\n          | 4 -> __sedlex_state_45 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_52 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 16;\n         (match __sedlex_partition_78 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_53 lexbuf\n          | 1 -> __sedlex_state_54 lexbuf\n          | 2 -> __sedlex_state_52 lexbuf\n          | 3 -> __sedlex_state_58 lexbuf\n          | 4 -> __sedlex_state_59 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_53 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 15;\n         (match __sedlex_partition_60 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_53 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_54 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 30;\n         (match __sedlex_partition_62 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_28 lexbuf\n          | 1 -> __sedlex_state_55 lexbuf\n          | 2 -> __sedlex_state_45 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_55 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 30;\n         (match __sedlex_partition_64 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_28 lexbuf\n          | 1 -> __sedlex_state_55 lexbuf\n          | 2 -> __sedlex_state_56 lexbuf\n          | 3 -> __sedlex_state_45 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_56 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_33 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> __sedlex_state_57 lexbuf\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_57 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 30;\n         (match __sedlex_partition_64 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_28 lexbuf\n          | 1 -> __sedlex_state_57 lexbuf\n          | 2 -> __sedlex_state_56 lexbuf\n          | 3 -> __sedlex_state_45 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_58 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 15;\n         (match __sedlex_partition_79 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_53 lexbuf\n          | 1 -> __sedlex_state_54 lexbuf\n          | 2 -> __sedlex_state_58 lexbuf\n          | 3 -> __sedlex_state_59 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_59 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 15;\n         (match __sedlex_partition_63 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_60 lexbuf\n          | 1 -> __sedlex_state_53 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_60 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 15;\n         (match __sedlex_partition_60 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_60 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_61 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 30;\n         (match __sedlex_partition_79 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_28 lexbuf\n          | 1 -> __sedlex_state_54 lexbuf\n          | 2 -> __sedlex_state_61 lexbuf\n          | 3 -> __sedlex_state_62 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_62 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 28;\n         (match __sedlex_partition_63 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_63 lexbuf\n          | 1 -> __sedlex_state_28 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_63 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 26;\n         (match __sedlex_partition_60 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_63 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_64 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 29;\n         (match __sedlex_partition_80 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_28 lexbuf\n          | 1 -> __sedlex_state_65 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_65 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 10;\n         (match __sedlex_partition_81 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_66 lexbuf\n          | 1 -> __sedlex_state_65 lexbuf\n          | 2 -> __sedlex_state_67 lexbuf\n          | 3 -> __sedlex_state_72 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_66 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 9;\n         (match __sedlex_partition_60 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_66 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_67 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_26 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> __sedlex_state_68 lexbuf\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_68 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 10;\n         (match __sedlex_partition_81 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_69 lexbuf\n          | 1 -> __sedlex_state_68 lexbuf\n          | 2 -> __sedlex_state_67 lexbuf\n          | 3 -> __sedlex_state_70 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_69 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 9;\n         (match __sedlex_partition_60 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_69 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_70 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 8;\n         (match __sedlex_partition_63 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_71 lexbuf\n          | 1 -> __sedlex_state_69 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_71 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 7;\n         (match __sedlex_partition_60 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_71 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_72 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 8;\n         (match __sedlex_partition_63 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_73 lexbuf\n          | 1 -> __sedlex_state_66 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_73 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 7;\n         (match __sedlex_partition_60 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_73 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_74 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 29;\n         (match __sedlex_partition_82 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_28 lexbuf\n          | 1 -> __sedlex_state_75 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_75 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 14;\n         (match __sedlex_partition_83 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_76 lexbuf\n          | 1 -> __sedlex_state_75 lexbuf\n          | 2 -> __sedlex_state_77 lexbuf\n          | 3 -> __sedlex_state_82 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_76 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 13;\n         (match __sedlex_partition_60 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_76 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_77 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_17 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> __sedlex_state_78 lexbuf\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_78 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 14;\n         (match __sedlex_partition_83 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_79 lexbuf\n          | 1 -> __sedlex_state_78 lexbuf\n          | 2 -> __sedlex_state_77 lexbuf\n          | 3 -> __sedlex_state_80 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_79 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 13;\n         (match __sedlex_partition_60 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_79 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_80 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 12;\n         (match __sedlex_partition_63 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_81 lexbuf\n          | 1 -> __sedlex_state_79 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_81 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 11;\n         (match __sedlex_partition_60 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_81 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_82 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 12;\n         (match __sedlex_partition_63 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_83 lexbuf\n          | 1 -> __sedlex_state_76 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_83 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 11;\n         (match __sedlex_partition_60 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_83 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_84 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 29;\n         (match __sedlex_partition_84 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_28 lexbuf\n          | 1 -> __sedlex_state_85 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_85 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 20;\n         (match __sedlex_partition_85 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_86 lexbuf\n          | 1 -> __sedlex_state_85 lexbuf\n          | 2 -> __sedlex_state_87 lexbuf\n          | 3 -> __sedlex_state_92 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_86 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 19;\n         (match __sedlex_partition_60 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_86 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_87 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_4 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> __sedlex_state_88 lexbuf\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_88 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 20;\n         (match __sedlex_partition_85 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_89 lexbuf\n          | 1 -> __sedlex_state_88 lexbuf\n          | 2 -> __sedlex_state_87 lexbuf\n          | 3 -> __sedlex_state_90 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_89 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 19;\n         (match __sedlex_partition_60 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_89 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_90 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 18;\n         (match __sedlex_partition_63 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_91 lexbuf\n          | 1 -> __sedlex_state_89 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_91 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 17;\n         (match __sedlex_partition_60 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_91 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_92 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 18;\n         (match __sedlex_partition_63 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_93 lexbuf\n          | 1 -> __sedlex_state_86 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_93 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 17;\n         (match __sedlex_partition_60 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_93 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_94 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 30;\n         (match __sedlex_partition_86 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_28 lexbuf\n          | 1 -> __sedlex_state_48 lexbuf\n          | 2 -> __sedlex_state_94 lexbuf\n          | 3 -> __sedlex_state_29 lexbuf\n          | 4 -> __sedlex_state_95 lexbuf\n          | 5 -> __sedlex_state_62 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_95 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_33 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> __sedlex_state_96 lexbuf\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_96 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 30;\n         (match __sedlex_partition_87 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_28 lexbuf\n          | 1 -> __sedlex_state_54 lexbuf\n          | 2 -> __sedlex_state_96 lexbuf\n          | 3 -> __sedlex_state_95 lexbuf\n          | 4 -> __sedlex_state_62 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_97 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 41;\n         (match __sedlex_partition_47 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_98 lexbuf\n          | 1 -> __sedlex_state_27 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_98 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_58 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> 40\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_100 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 63;\n         (match __sedlex_partition_108 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_101 lexbuf\n          | 1 -> 5\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_101 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 2;\n         (match __sedlex_partition_65 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_102 lexbuf\n          | 1 -> __sedlex_state_103 lexbuf\n          | 2 -> __sedlex_state_105 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_102 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_65 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> __sedlex_state_102 lexbuf\n         | 1 -> __sedlex_state_103 lexbuf\n         | 2 -> __sedlex_state_105 lexbuf\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_103 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 3;\n         (match __sedlex_partition_66 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> 3\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_105 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_67 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> __sedlex_state_106 lexbuf\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_106 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_68 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> __sedlex_state_107 lexbuf\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_107 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_69 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> __sedlex_state_108 lexbuf\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_108 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_70 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> __sedlex_state_109 lexbuf\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_109 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_71 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> __sedlex_state_110 lexbuf\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_110 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_72 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> __sedlex_state_111 lexbuf\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_111 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_73 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> __sedlex_state_112 lexbuf\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_112 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_67 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> __sedlex_state_113 lexbuf\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_113 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_2 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> __sedlex_state_114 lexbuf\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_114 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_74 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> __sedlex_state_115 lexbuf\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_115 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_75 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> 3\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_117 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 30;\n         (match __sedlex_partition_76 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_28 lexbuf\n          | 1 -> __sedlex_state_48 lexbuf\n          | 2 -> __sedlex_state_52 lexbuf\n          | 3 -> __sedlex_state_61 lexbuf\n          | 4 -> __sedlex_state_64 lexbuf\n          | 5 -> __sedlex_state_29 lexbuf\n          | 6 -> __sedlex_state_74 lexbuf\n          | 7 -> __sedlex_state_84 lexbuf\n          | 8 -> __sedlex_state_62 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_118 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 30;\n         (match __sedlex_partition_86 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_28 lexbuf\n          | 1 -> __sedlex_state_48 lexbuf\n          | 2 -> __sedlex_state_94 lexbuf\n          | 3 -> __sedlex_state_29 lexbuf\n          | 4 -> __sedlex_state_95 lexbuf\n          | 5 -> __sedlex_state_62 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_122 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 51;\n         (match __sedlex_partition_129 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> 57\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_125 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 46;\n         (match __sedlex_partition_58 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> 45\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_128 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 63;\n         (match __sedlex_partition_2 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> __sedlex_state_129 lexbuf\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_129 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_3 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> __sedlex_state_130 lexbuf\n         | 1 -> __sedlex_state_134 lexbuf\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_130 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_4 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> __sedlex_state_131 lexbuf\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_131 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_4 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> __sedlex_state_132 lexbuf\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_132 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_4 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> 61\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_134 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_4 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> __sedlex_state_135 lexbuf\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_135 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_5 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> __sedlex_state_135 lexbuf\n         | 1 -> 61\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_137 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 34;\n         (match __sedlex_partition_130 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> 36\n          | _ -> Sedlexing.backtrack lexbuf))\n  and __sedlex_state_139 =\n    function\n    | lexbuf ->\n        (Sedlexing.mark lexbuf 55;\n         (match __sedlex_partition_131 (Sedlexing.__private__next_int lexbuf)\n          with\n          | 0 -> 37\n          | _ -> Sedlexing.backtrack lexbuf)) in\n  Sedlexing.start lexbuf;\n  (match __sedlex_state_0 lexbuf with\n   | 0 -> let env = new_line env lexbuf in Continue env\n   | 1 -> Continue env\n   | 2 ->\n       let start_pos = start_pos_of_lexbuf env lexbuf in\n       let buf = Buffer.create 127 in\n       let (env, end_pos) = comment env buf lexbuf in\n       Comment (env, (mk_comment env start_pos end_pos buf true))\n   | 3 ->\n       let pattern = lexeme lexbuf in\n       if not (is_comment_syntax_enabled env)\n       then\n         let start_pos = start_pos_of_lexbuf env lexbuf in\n         let buf = Buffer.create 127 in\n         (Buffer.add_string buf pattern;\n          (let (env, end_pos) = comment env buf lexbuf in\n           Comment (env, (mk_comment env start_pos end_pos buf true))))\n       else\n         (let env =\n            if is_in_comment_syntax env\n            then\n              let loc = loc_of_lexbuf env lexbuf in\n              unexpected_error env loc pattern\n            else env in\n          let env = in_comment_syntax true env in\n          let len = Sedlexing.lexeme_length lexbuf in\n          if\n            ((Sedlexing.Utf8.sub_lexeme lexbuf (len - 1) 1) = \":\") &&\n              ((Sedlexing.Utf8.sub_lexeme lexbuf (len - 2) 1) <> \":\")\n          then Token (env, T_COLON)\n          else Continue env)\n   | 4 ->\n       if is_in_comment_syntax env\n       then let env = in_comment_syntax false env in Continue env\n       else\n         (Sedlexing.rollback lexbuf;\n          (let __sedlex_state_0 =\n             function\n             | lexbuf ->\n                 (match __sedlex_partition_23\n                          (Sedlexing.__private__next_int lexbuf)\n                  with\n                  | 0 -> 0\n                  | _ -> Sedlexing.backtrack lexbuf) in\n           Sedlexing.start lexbuf;\n           (match __sedlex_state_0 lexbuf with\n            | 0 -> Token (env, T_MULT)\n            | _ -> failwith \"expected *\")))\n   | 5 ->\n       let start_pos = start_pos_of_lexbuf env lexbuf in\n       let buf = Buffer.create 127 in\n       let (env, end_pos) = line_comment env buf lexbuf in\n       Comment (env, (mk_comment env start_pos end_pos buf false))\n   | 6 ->\n       let quote = lexeme lexbuf in\n       let start = start_pos_of_lexbuf env lexbuf in\n       let buf = Buffer.create 127 in\n       let raw = Buffer.create 127 in\n       (Buffer.add_string raw quote;\n        (let octal = false in\n         let (env, _end, octal) = string_quote env quote buf raw octal lexbuf in\n         let loc = { Loc.source = (Lex_env.source env); start; _end } in\n         Token\n           (env,\n             (T_STRING\n                (loc, (Buffer.contents buf), (Buffer.contents raw), octal)))))\n   | 7 ->\n       recover env lexbuf\n         ~f:(fun env ->\n               fun lexbuf ->\n                 let rec __sedlex_state_0 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_120\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_1 lexbuf\n                        | 1 -> __sedlex_state_2 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_1 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_121\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_1 lexbuf\n                        | 1 -> __sedlex_state_2 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_2 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_25\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_3 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_3 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_26\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_4 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_4 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_27\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_4 lexbuf\n                        | 1 -> __sedlex_state_5 lexbuf\n                        | 2 -> 0\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_5 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_26\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_6 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_6 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_27\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_6 lexbuf\n                        | 1 -> __sedlex_state_5 lexbuf\n                        | 2 -> 0\n                        | _ -> Sedlexing.backtrack lexbuf) in\n                 Sedlexing.start lexbuf;\n                 (match __sedlex_state_0 lexbuf with\n                  | 0 ->\n                      let num = Sedlexing.lexeme lexbuf in\n                      Token (env, (mk_bignum_singleton BIG_BINARY num))\n                  | _ -> failwith \"unreachable type_token bigbigint\"))\n   | 8 ->\n       let num = Sedlexing.lexeme lexbuf in\n       Token (env, (mk_bignum_singleton BIG_BINARY num))\n   | 9 ->\n       recover env lexbuf\n         ~f:(fun env ->\n               fun lexbuf ->\n                 let rec __sedlex_state_0 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_120\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_1 lexbuf\n                        | 1 -> __sedlex_state_2 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_1 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_121\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_1 lexbuf\n                        | 1 -> __sedlex_state_2 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_2 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_25\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_3 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_3 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_26\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_4 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_4 =\n                   function\n                   | lexbuf ->\n                       (Sedlexing.mark lexbuf 0;\n                        (match __sedlex_partition_28\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_4 lexbuf\n                         | 1 -> __sedlex_state_5 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf))\n                 and __sedlex_state_5 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_26\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_6 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_6 =\n                   function\n                   | lexbuf ->\n                       (Sedlexing.mark lexbuf 0;\n                        (match __sedlex_partition_28\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_6 lexbuf\n                         | 1 -> __sedlex_state_5 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)) in\n                 Sedlexing.start lexbuf;\n                 (match __sedlex_state_0 lexbuf with\n                  | 0 ->\n                      let num = Sedlexing.lexeme lexbuf in\n                      Token (env, (mk_num_singleton BINARY num))\n                  | _ -> failwith \"unreachable type_token binnumber\"))\n   | 10 ->\n       let num = Sedlexing.lexeme lexbuf in\n       Token (env, (mk_num_singleton BINARY num))\n   | 11 ->\n       recover env lexbuf\n         ~f:(fun env ->\n               fun lexbuf ->\n                 let rec __sedlex_state_0 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_120\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_1 lexbuf\n                        | 1 -> __sedlex_state_2 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_1 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_121\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_1 lexbuf\n                        | 1 -> __sedlex_state_2 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_2 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_29\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_3 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_3 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_17\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_4 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_4 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_30\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_4 lexbuf\n                        | 1 -> __sedlex_state_5 lexbuf\n                        | 2 -> 0\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_5 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_17\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_6 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_6 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_30\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_6 lexbuf\n                        | 1 -> __sedlex_state_5 lexbuf\n                        | 2 -> 0\n                        | _ -> Sedlexing.backtrack lexbuf) in\n                 Sedlexing.start lexbuf;\n                 (match __sedlex_state_0 lexbuf with\n                  | 0 ->\n                      let num = Sedlexing.lexeme lexbuf in\n                      Token (env, (mk_bignum_singleton BIG_OCTAL num))\n                  | _ -> failwith \"unreachable type_token octbigint\"))\n   | 12 ->\n       let num = Sedlexing.lexeme lexbuf in\n       Token (env, (mk_bignum_singleton BIG_OCTAL num))\n   | 13 ->\n       recover env lexbuf\n         ~f:(fun env ->\n               fun lexbuf ->\n                 let rec __sedlex_state_0 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_120\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_1 lexbuf\n                        | 1 -> __sedlex_state_2 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_1 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_121\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_1 lexbuf\n                        | 1 -> __sedlex_state_2 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_2 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_29\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_3 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_3 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_17\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_4 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_4 =\n                   function\n                   | lexbuf ->\n                       (Sedlexing.mark lexbuf 0;\n                        (match __sedlex_partition_31\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_4 lexbuf\n                         | 1 -> __sedlex_state_5 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf))\n                 and __sedlex_state_5 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_17\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_6 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_6 =\n                   function\n                   | lexbuf ->\n                       (Sedlexing.mark lexbuf 0;\n                        (match __sedlex_partition_31\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_6 lexbuf\n                         | 1 -> __sedlex_state_5 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)) in\n                 Sedlexing.start lexbuf;\n                 (match __sedlex_state_0 lexbuf with\n                  | 0 ->\n                      let num = Sedlexing.lexeme lexbuf in\n                      Token (env, (mk_num_singleton OCTAL num))\n                  | _ -> failwith \"unreachable type_token octnumber\"))\n   | 14 ->\n       let num = Sedlexing.lexeme lexbuf in\n       Token (env, (mk_num_singleton OCTAL num))\n   | 15 ->\n       recover env lexbuf\n         ~f:(fun env ->\n               fun lexbuf ->\n                 let rec __sedlex_state_0 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_120\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_1 lexbuf\n                        | 1 -> __sedlex_state_2 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_1 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_121\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_1 lexbuf\n                        | 1 -> __sedlex_state_2 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_2 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_17\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_3 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_3 =\n                   function\n                   | lexbuf ->\n                       (Sedlexing.mark lexbuf 0;\n                        (match __sedlex_partition_17\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_3 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)) in\n                 Sedlexing.start lexbuf;\n                 (match __sedlex_state_0 lexbuf with\n                  | 0 ->\n                      let num = Sedlexing.lexeme lexbuf in\n                      Token (env, (mk_num_singleton LEGACY_OCTAL num))\n                  | _ -> failwith \"unreachable type_token legacyoctnumber\"))\n   | 16 ->\n       let num = Sedlexing.lexeme lexbuf in\n       Token (env, (mk_num_singleton LEGACY_OCTAL num))\n   | 17 ->\n       recover env lexbuf\n         ~f:(fun env ->\n               fun lexbuf ->\n                 let rec __sedlex_state_0 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_120\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_1 lexbuf\n                        | 1 -> __sedlex_state_2 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_1 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_121\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_1 lexbuf\n                        | 1 -> __sedlex_state_2 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_2 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_34\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_3 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_3 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_4\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_4 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_4 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_35\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_4 lexbuf\n                        | 1 -> __sedlex_state_5 lexbuf\n                        | 2 -> 0\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_5 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_4\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_6 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_6 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_35\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_6 lexbuf\n                        | 1 -> __sedlex_state_5 lexbuf\n                        | 2 -> 0\n                        | _ -> Sedlexing.backtrack lexbuf) in\n                 Sedlexing.start lexbuf;\n                 (match __sedlex_state_0 lexbuf with\n                  | 0 ->\n                      let num = Sedlexing.lexeme lexbuf in\n                      Token (env, (mk_bignum_singleton BIG_NORMAL num))\n                  | _ -> failwith \"unreachable type_token hexbigint\"))\n   | 18 ->\n       let num = Sedlexing.lexeme lexbuf in\n       Token (env, (mk_bignum_singleton BIG_NORMAL num))\n   | 19 ->\n       recover env lexbuf\n         ~f:(fun env ->\n               fun lexbuf ->\n                 let rec __sedlex_state_0 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_120\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_1 lexbuf\n                        | 1 -> __sedlex_state_2 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_1 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_121\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_1 lexbuf\n                        | 1 -> __sedlex_state_2 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_2 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_34\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_3 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_3 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_4\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_4 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_4 =\n                   function\n                   | lexbuf ->\n                       (Sedlexing.mark lexbuf 0;\n                        (match __sedlex_partition_36\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_4 lexbuf\n                         | 1 -> __sedlex_state_5 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf))\n                 and __sedlex_state_5 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_4\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_6 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_6 =\n                   function\n                   | lexbuf ->\n                       (Sedlexing.mark lexbuf 0;\n                        (match __sedlex_partition_36\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_6 lexbuf\n                         | 1 -> __sedlex_state_5 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)) in\n                 Sedlexing.start lexbuf;\n                 (match __sedlex_state_0 lexbuf with\n                  | 0 ->\n                      let num = Sedlexing.lexeme lexbuf in\n                      Token (env, (mk_num_singleton NORMAL num))\n                  | _ -> failwith \"unreachable type_token hexnumber\"))\n   | 20 ->\n       let num = Sedlexing.lexeme lexbuf in\n       Token (env, (mk_num_singleton NORMAL num))\n   | 21 ->\n       recover env lexbuf\n         ~f:(fun env ->\n               fun lexbuf ->\n                 let rec __sedlex_state_0 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_122\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_1 lexbuf\n                        | 1 -> __sedlex_state_2 lexbuf\n                        | 2 -> __sedlex_state_13 lexbuf\n                        | 3 -> __sedlex_state_18 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_1 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_123\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_1 lexbuf\n                        | 1 -> __sedlex_state_2 lexbuf\n                        | 2 -> __sedlex_state_13 lexbuf\n                        | 3 -> __sedlex_state_18 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_2 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_33\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_3 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_3 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_38\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_3 lexbuf\n                        | 1 -> __sedlex_state_4 lexbuf\n                        | 2 -> __sedlex_state_11 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_4 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_39\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_5 lexbuf\n                        | 1 -> __sedlex_state_6 lexbuf\n                        | 2 -> __sedlex_state_8 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_5 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_40\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_6 lexbuf\n                        | 1 -> __sedlex_state_8 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_6 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_41\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_6 lexbuf\n                        | 1 -> 0\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_8 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_42\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_8 lexbuf\n                        | 1 -> __sedlex_state_9 lexbuf\n                        | 2 -> 0\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_9 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_33\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_10 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_10 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_42\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_10 lexbuf\n                        | 1 -> __sedlex_state_9 lexbuf\n                        | 2 -> 0\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_11 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_33\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_12 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_12 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_38\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_12 lexbuf\n                        | 1 -> __sedlex_state_4 lexbuf\n                        | 2 -> __sedlex_state_11 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_13 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_43\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_14 lexbuf\n                        | 1 -> __sedlex_state_4 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_14 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_44\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_15 lexbuf\n                        | 1 -> __sedlex_state_4 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_15 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_38\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_15 lexbuf\n                        | 1 -> __sedlex_state_4 lexbuf\n                        | 2 -> __sedlex_state_16 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_16 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_33\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_17 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_17 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_38\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_17 lexbuf\n                        | 1 -> __sedlex_state_4 lexbuf\n                        | 2 -> __sedlex_state_16 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_18 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_45\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_14 lexbuf\n                        | 1 -> __sedlex_state_18 lexbuf\n                        | 2 -> __sedlex_state_4 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf) in\n                 Sedlexing.start lexbuf;\n                 (match __sedlex_state_0 lexbuf with\n                  | 0 ->\n                      let num = Sedlexing.lexeme lexbuf in\n                      let loc = loc_of_lexbuf env lexbuf in\n                      let env =\n                        lex_error env loc Parse_error.InvalidSciBigInt in\n                      Token (env, (mk_bignum_singleton BIG_NORMAL num))\n                  | _ -> failwith \"unreachable type_token scibigint\"))\n   | 22 ->\n       let num = Sedlexing.lexeme lexbuf in\n       let loc = loc_of_lexbuf env lexbuf in\n       let env = lex_error env loc Parse_error.InvalidSciBigInt in\n       Token (env, (mk_bignum_singleton BIG_NORMAL num))\n   | 23 ->\n       recover env lexbuf\n         ~f:(fun env ->\n               fun lexbuf ->\n                 let rec __sedlex_state_0 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_122\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_1 lexbuf\n                        | 1 -> __sedlex_state_2 lexbuf\n                        | 2 -> __sedlex_state_12 lexbuf\n                        | 3 -> __sedlex_state_17 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_1 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_123\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_1 lexbuf\n                        | 1 -> __sedlex_state_2 lexbuf\n                        | 2 -> __sedlex_state_12 lexbuf\n                        | 3 -> __sedlex_state_17 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_2 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_33\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_3 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_3 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_38\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_3 lexbuf\n                        | 1 -> __sedlex_state_4 lexbuf\n                        | 2 -> __sedlex_state_10 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_4 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_39\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_5 lexbuf\n                        | 1 -> __sedlex_state_6 lexbuf\n                        | 2 -> __sedlex_state_7 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_5 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_40\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_6 lexbuf\n                        | 1 -> __sedlex_state_7 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_6 =\n                   function\n                   | lexbuf ->\n                       (Sedlexing.mark lexbuf 0;\n                        (match __sedlex_partition_33\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_6 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf))\n                 and __sedlex_state_7 =\n                   function\n                   | lexbuf ->\n                       (Sedlexing.mark lexbuf 0;\n                        (match __sedlex_partition_46\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_7 lexbuf\n                         | 1 -> __sedlex_state_8 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf))\n                 and __sedlex_state_8 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_33\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_9 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_9 =\n                   function\n                   | lexbuf ->\n                       (Sedlexing.mark lexbuf 0;\n                        (match __sedlex_partition_46\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_9 lexbuf\n                         | 1 -> __sedlex_state_8 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf))\n                 and __sedlex_state_10 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_33\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_11 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_11 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_38\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_11 lexbuf\n                        | 1 -> __sedlex_state_4 lexbuf\n                        | 2 -> __sedlex_state_10 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_12 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_43\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_13 lexbuf\n                        | 1 -> __sedlex_state_4 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_13 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_44\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_14 lexbuf\n                        | 1 -> __sedlex_state_4 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_14 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_38\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_14 lexbuf\n                        | 1 -> __sedlex_state_4 lexbuf\n                        | 2 -> __sedlex_state_15 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_15 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_33\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_16 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_16 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_38\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_16 lexbuf\n                        | 1 -> __sedlex_state_4 lexbuf\n                        | 2 -> __sedlex_state_15 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_17 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_45\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_13 lexbuf\n                        | 1 -> __sedlex_state_17 lexbuf\n                        | 2 -> __sedlex_state_4 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf) in\n                 Sedlexing.start lexbuf;\n                 (match __sedlex_state_0 lexbuf with\n                  | 0 ->\n                      let num = Sedlexing.lexeme lexbuf in\n                      Token (env, (mk_num_singleton NORMAL num))\n                  | _ -> failwith \"unreachable type_token scinumber\"))\n   | 24 ->\n       let num = Sedlexing.lexeme lexbuf in\n       Token (env, (mk_num_singleton NORMAL num))\n   | 25 ->\n       recover env lexbuf\n         ~f:(fun env ->\n               fun lexbuf ->\n                 let rec __sedlex_state_0 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_122\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_1 lexbuf\n                        | 1 -> __sedlex_state_2 lexbuf\n                        | 2 -> __sedlex_state_7 lexbuf\n                        | 3 -> __sedlex_state_9 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_1 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_123\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_1 lexbuf\n                        | 1 -> __sedlex_state_2 lexbuf\n                        | 2 -> __sedlex_state_7 lexbuf\n                        | 3 -> __sedlex_state_9 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_2 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_33\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_3 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_3 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_42\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_3 lexbuf\n                        | 1 -> __sedlex_state_4 lexbuf\n                        | 2 -> 0\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_4 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_33\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_5 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_5 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_42\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_5 lexbuf\n                        | 1 -> __sedlex_state_4 lexbuf\n                        | 2 -> 0\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_7 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_47\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_8 lexbuf\n                        | 1 -> __sedlex_state_7 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_8 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_41\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_3 lexbuf\n                        | 1 -> 0\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_9 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_48\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_8 lexbuf\n                        | 1 -> __sedlex_state_9 lexbuf\n                        | 2 -> __sedlex_state_10 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_10 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_33\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_11 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_11 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_48\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_8 lexbuf\n                        | 1 -> __sedlex_state_11 lexbuf\n                        | 2 -> __sedlex_state_10 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf) in\n                 Sedlexing.start lexbuf;\n                 (match __sedlex_state_0 lexbuf with\n                  | 0 ->\n                      let num = Sedlexing.lexeme lexbuf in\n                      let loc = loc_of_lexbuf env lexbuf in\n                      let env =\n                        lex_error env loc Parse_error.InvalidFloatBigInt in\n                      Token (env, (mk_bignum_singleton BIG_NORMAL num))\n                  | _ -> failwith \"unreachable type_token floatbigint\"))\n   | 26 ->\n       recover env lexbuf\n         ~f:(fun env ->\n               fun lexbuf ->\n                 let rec __sedlex_state_0 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_124\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_1 lexbuf\n                        | 1 -> __sedlex_state_2 lexbuf\n                        | 2 -> __sedlex_state_4 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_1 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_125\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_1 lexbuf\n                        | 1 -> __sedlex_state_2 lexbuf\n                        | 2 -> __sedlex_state_4 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_2 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_41\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_2 lexbuf\n                        | 1 -> 0\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_4 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_42\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_4 lexbuf\n                        | 1 -> __sedlex_state_5 lexbuf\n                        | 2 -> 0\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_5 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_33\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_6 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_6 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_42\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_6 lexbuf\n                        | 1 -> __sedlex_state_5 lexbuf\n                        | 2 -> 0\n                        | _ -> Sedlexing.backtrack lexbuf) in\n                 Sedlexing.start lexbuf;\n                 (match __sedlex_state_0 lexbuf with\n                  | 0 ->\n                      let num = Sedlexing.lexeme lexbuf in\n                      Token (env, (mk_bignum_singleton BIG_NORMAL num))\n                  | _ -> failwith \"unreachable type_token wholebigint\"))\n   | 27 ->\n       let num = Sedlexing.lexeme lexbuf in\n       let loc = loc_of_lexbuf env lexbuf in\n       let env = lex_error env loc Parse_error.InvalidFloatBigInt in\n       Token (env, (mk_bignum_singleton BIG_NORMAL num))\n   | 28 ->\n       let num = Sedlexing.lexeme lexbuf in\n       Token (env, (mk_bignum_singleton BIG_NORMAL num))\n   | 29 ->\n       recover env lexbuf\n         ~f:(fun env ->\n               fun lexbuf ->\n                 let rec __sedlex_state_0 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_122\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_1 lexbuf\n                        | 1 -> __sedlex_state_7 lexbuf\n                        | 2 -> __sedlex_state_11 lexbuf\n                        | 3 -> __sedlex_state_13 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_1 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_125\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_1 lexbuf\n                        | 1 -> __sedlex_state_2 lexbuf\n                        | 2 -> __sedlex_state_4 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_2 =\n                   function\n                   | lexbuf ->\n                       (Sedlexing.mark lexbuf 0;\n                        (match __sedlex_partition_47\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> 0\n                         | 1 -> __sedlex_state_2 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf))\n                 and __sedlex_state_4 =\n                   function\n                   | lexbuf ->\n                       (Sedlexing.mark lexbuf 0;\n                        (match __sedlex_partition_48\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> 0\n                         | 1 -> __sedlex_state_4 lexbuf\n                         | 2 -> __sedlex_state_5 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf))\n                 and __sedlex_state_5 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_33\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_6 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_6 =\n                   function\n                   | lexbuf ->\n                       (Sedlexing.mark lexbuf 0;\n                        (match __sedlex_partition_48\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> 0\n                         | 1 -> __sedlex_state_6 lexbuf\n                         | 2 -> __sedlex_state_5 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf))\n                 and __sedlex_state_7 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_33\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_8 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_8 =\n                   function\n                   | lexbuf ->\n                       (Sedlexing.mark lexbuf 0;\n                        (match __sedlex_partition_46\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_8 lexbuf\n                         | 1 -> __sedlex_state_9 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf))\n                 and __sedlex_state_9 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_33\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_10 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_10 =\n                   function\n                   | lexbuf ->\n                       (Sedlexing.mark lexbuf 0;\n                        (match __sedlex_partition_46\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_10 lexbuf\n                         | 1 -> __sedlex_state_9 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf))\n                 and __sedlex_state_11 =\n                   function\n                   | lexbuf ->\n                       (Sedlexing.mark lexbuf 0;\n                        (match __sedlex_partition_47\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_12 lexbuf\n                         | 1 -> __sedlex_state_11 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf))\n                 and __sedlex_state_12 =\n                   function\n                   | lexbuf ->\n                       (Sedlexing.mark lexbuf 0;\n                        (match __sedlex_partition_33\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_8 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf))\n                 and __sedlex_state_13 =\n                   function\n                   | lexbuf ->\n                       (Sedlexing.mark lexbuf 0;\n                        (match __sedlex_partition_48\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_12 lexbuf\n                         | 1 -> __sedlex_state_13 lexbuf\n                         | 2 -> __sedlex_state_14 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf))\n                 and __sedlex_state_14 =\n                   function\n                   | lexbuf ->\n                       (match __sedlex_partition_33\n                                (Sedlexing.__private__next_int lexbuf)\n                        with\n                        | 0 -> __sedlex_state_15 lexbuf\n                        | _ -> Sedlexing.backtrack lexbuf)\n                 and __sedlex_state_15 =\n                   function\n                   | lexbuf ->\n                       (Sedlexing.mark lexbuf 0;\n                        (match __sedlex_partition_48\n                                 (Sedlexing.__private__next_int lexbuf)\n                         with\n                         | 0 -> __sedlex_state_12 lexbuf\n                         | 1 -> __sedlex_state_15 lexbuf\n                         | 2 -> __sedlex_state_14 lexbuf\n                         | _ -> Sedlexing.backtrack lexbuf)) in\n                 Sedlexing.start lexbuf;\n                 (match __sedlex_state_0 lexbuf with\n                  | 0 ->\n                      let num = Sedlexing.lexeme lexbuf in\n                      Token (env, (mk_num_singleton NORMAL num))\n                  | _ -> failwith \"unreachable type_token wholenumber\"))\n   | 30 ->\n       let num = Sedlexing.lexeme lexbuf in\n       Token (env, (mk_num_singleton NORMAL num))\n   | 31 -> Token (env, T_CHECKS)\n   | 32 -> Token (env, T_LBRACKET)\n   | 33 -> Token (env, T_RBRACKET)\n   | 34 -> Token (env, T_LCURLY)\n   | 35 -> Token (env, T_RCURLY)\n   | 36 -> Token (env, T_LCURLYBAR)\n   | 37 -> Token (env, T_RCURLYBAR)\n   | 38 -> Token (env, T_LPAREN)\n   | 39 -> Token (env, T_RPAREN)\n   | 40 -> Token (env, T_ELLIPSIS)\n   | 41 -> Token (env, T_PERIOD)\n   | 42 -> Token (env, T_SEMICOLON)\n   | 43 -> Token (env, T_COMMA)\n   | 44 -> Token (env, T_COLON)\n   | 45 -> Token (env, T_PLING_PERIOD)\n   | 46 -> Token (env, T_PLING)\n   | 47 -> Token (env, T_LBRACKET)\n   | 48 -> Token (env, T_RBRACKET)\n   | 49 -> Token (env, T_LESS_THAN)\n   | 50 -> Token (env, T_GREATER_THAN)\n   | 51 -> Token (env, T_ASSIGN)\n   | 52 -> Token (env, T_PLING)\n   | 53 -> Token (env, T_MULT)\n   | 54 -> Token (env, T_COLON)\n   | 55 -> Token (env, T_BIT_OR)\n   | 56 -> Token (env, T_BIT_AND)\n   | 57 -> Token (env, T_ARROW)\n   | 58 -> Token (env, T_ASSIGN)\n   | 59 -> Token (env, T_PLUS)\n   | 60 -> Token (env, T_MINUS)\n   | 61 ->\n       let start_offset = Sedlexing.lexeme_start lexbuf in\n       ((loop_id_continues lexbuf) |> ignore;\n        (let end_offset = Sedlexing.lexeme_end lexbuf in\n         let loc = loc_of_offsets env start_offset end_offset in\n         Sedlexing.set_lexeme_start lexbuf start_offset;\n         (let raw = Sedlexing.lexeme lexbuf in\n          let (env, value) = decode_identifier env raw in\n          match value with\n          | \"any\" -> Token (env, T_ANY_TYPE)\n          | \"bool\" -> Token (env, (T_BOOLEAN_TYPE BOOL))\n          | \"boolean\" -> Token (env, (T_BOOLEAN_TYPE BOOLEAN))\n          | \"empty\" -> Token (env, T_EMPTY_TYPE)\n          | \"extends\" -> Token (env, T_EXTENDS)\n          | \"false\" -> Token (env, T_FALSE)\n          | \"interface\" -> Token (env, T_INTERFACE)\n          | \"mixed\" -> Token (env, T_MIXED_TYPE)\n          | \"null\" -> Token (env, T_NULL)\n          | \"number\" -> Token (env, T_NUMBER_TYPE)\n          | \"bigint\" -> Token (env, T_BIGINT_TYPE)\n          | \"static\" -> Token (env, T_STATIC)\n          | \"string\" -> Token (env, T_STRING_TYPE)\n          | \"true\" -> Token (env, T_TRUE)\n          | \"typeof\" -> Token (env, T_TYPEOF)\n          | \"void\" -> Token (env, T_VOID_TYPE)\n          | \"symbol\" -> Token (env, T_SYMBOL_TYPE)\n          | _ ->\n              Token\n                (env,\n                  (T_IDENTIFIER\n                     { loc; value; raw = (Sedlexing.string_of_utf8 raw) })))))\n   | 62 ->\n       let env =\n         if is_in_comment_syntax env\n         then\n           let loc = loc_of_lexbuf env lexbuf in\n           lex_error env loc Parse_error.UnexpectedEOS\n         else env in\n       Token (env, T_EOF)\n   | 63 -> Token (env, (T_ERROR (lexeme lexbuf)))\n   | _ -> failwith \"unreachable type_token\")\nlet jsx_child env =\n  let start = end_pos_of_lexbuf env env.lex_lb in\n  let buf = Buffer.create 127 in\n  let raw = Buffer.create 127 in\n  let (env, child) = jsx_child env start buf raw env.lex_lb in\n  let loc = loc_of_token env child in\n  let lex_errors_acc = (env.lex_state).lex_errors_acc in\n  if lex_errors_acc = []\n  then\n    (env,\n      {\n        Lex_result.lex_token = child;\n        lex_loc = loc;\n        lex_comments = [];\n        lex_errors = []\n      })\n  else\n    ({ env with lex_state = { lex_errors_acc = [] } },\n      {\n        Lex_result.lex_token = child;\n        lex_loc = loc;\n        lex_comments = [];\n        lex_errors = (List.rev lex_errors_acc)\n      })\nlet wrap f =\n  let rec helper comments env =\n    match f env env.lex_lb with\n    | Token (env, t) ->\n        let loc = loc_of_token env t in\n        let lex_comments = if comments = [] then [] else List.rev comments in\n        let lex_token = t in\n        let lex_errors_acc = (env.lex_state).lex_errors_acc in\n        if lex_errors_acc = []\n        then\n          ({ env with lex_last_loc = loc },\n            {\n              Lex_result.lex_token = lex_token;\n              lex_loc = loc;\n              lex_comments;\n              lex_errors = []\n            })\n        else\n          ({ env with lex_last_loc = loc; lex_state = Lex_env.empty_lex_state\n           },\n            {\n              Lex_result.lex_token = lex_token;\n              lex_loc = loc;\n              lex_comments;\n              lex_errors = (List.rev lex_errors_acc)\n            })\n    | Comment (env, ((loc, _) as comment)) ->\n        let env = { env with lex_last_loc = loc } in\n        helper (comment :: comments) env\n    | Continue env -> helper comments env in\n  fun env -> helper [] env\nlet regexp = wrap regexp\nlet jsx_tag = wrap jsx_tag\nlet template_tail = wrap template_tail\nlet type_token = wrap type_token\nlet token = wrap token\nlet is_valid_identifier_name lexbuf =\n  let rec __sedlex_state_0 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_132 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> 0\n         | 1 -> __sedlex_state_2 lexbuf\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_2 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_2 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> __sedlex_state_3 lexbuf\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_3 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_3 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> __sedlex_state_4 lexbuf\n         | 1 -> __sedlex_state_7 lexbuf\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_4 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_4 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> __sedlex_state_5 lexbuf\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_5 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_4 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> __sedlex_state_6 lexbuf\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_6 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_4 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> 0\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_7 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_4 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> __sedlex_state_8 lexbuf\n         | _ -> Sedlexing.backtrack lexbuf)\n  and __sedlex_state_8 =\n    function\n    | lexbuf ->\n        (match __sedlex_partition_5 (Sedlexing.__private__next_int lexbuf)\n         with\n         | 0 -> __sedlex_state_8 lexbuf\n         | 1 -> 0\n         | _ -> Sedlexing.backtrack lexbuf) in\n  Sedlexing.start lexbuf;\n  (match __sedlex_state_0 lexbuf with\n   | 0 -> loop_id_continues lexbuf\n   | _ -> false)"
  },
  {
    "path": "analysis/vendor/js_parser/flow_lexer.mli",
    "content": "(*\n * Copyright (c) Meta Platforms, Inc. and affiliates.\n *\n * This source code is licensed under the MIT license found in the\n * LICENSE file in the root directory of this source tree.\n *)\n\nval jsx_child : Lex_env.t -> Lex_env.t * Lex_result.t\n\nval regexp : Lex_env.t -> Lex_env.t * Lex_result.t\n\nval jsx_tag : Lex_env.t -> Lex_env.t * Lex_result.t\n\nval template_tail : Lex_env.t -> Lex_env.t * Lex_result.t\n\nval type_token : Lex_env.t -> Lex_env.t * Lex_result.t\n\nval token : Lex_env.t -> Lex_env.t * Lex_result.t\n\nval is_valid_identifier_name : Flow_sedlexing.lexbuf -> bool\n"
  },
  {
    "path": "analysis/vendor/js_parser/flow_sedlexing.ml",
    "content": "(* The package sedlex is released under the terms of an MIT-like license. *)\n(* See the attached LICENSE file.                                         *)\n(* Copyright 2005, 2013 by Alain Frisch and LexiFi.                       *)\nexternal ( .!()<- ) : int array -> int -> int -> unit = \"%array_unsafe_set\"\nexternal ( .!() ) : int array -> int -> int = \"%array_unsafe_get\"\nexternal ( .![] ) : string -> int -> char = \"%string_unsafe_get\"\nexternal ( .![]<- ) : bytes -> int -> char -> unit = \"%bytes_unsafe_set\"\n\nexception InvalidCodepoint of int\n\nexception MalFormed\n\n(* Absolute position from the beginning of the stream *)\ntype apos = int\n\n(* critical states:\n  [pos] [curr_bol] [curr_line]\n  The state of [curr_bol] and [curr_line] only changes when we hit a newline\n  [marked_pos] [marked_bol] [marked_line]\n  [start_pos] [start_bol] [start_line]\n  get reset whenever we get a new token\n*)\ntype lexbuf = {\n  buf: int array;\n  (* Number of meaningful char in buffer *)\n  len: int;\n  (* pos is the index in the buffer *)\n  mutable pos: int;\n  (* bol is the index in the input stream but not buffer *)\n  mutable curr_bol: int;\n  (* start from 1, if it is 0, we would not track postion info for you *)\n  mutable curr_line: int;\n  (* First char we need to keep visible *)\n  mutable start_pos: int;\n  mutable start_bol: int;\n  mutable start_line: int;\n  mutable marked_pos: int;\n  mutable marked_bol: int;\n  mutable marked_line: int;\n  mutable marked_val: int;\n}\n\n\nlet lexbuf_clone (x : lexbuf) : lexbuf =\n  {\n    buf = x.buf;\n    len = x.len;\n    pos = x.pos;\n    curr_bol = x.curr_bol;\n    curr_line = x.curr_line;\n    start_pos = x.start_pos;\n    start_bol = x.start_bol;\n    start_line = x.start_line;\n    marked_pos = x.marked_pos;\n    marked_bol = x.marked_bol;\n    marked_line = x.marked_line;\n    marked_val = x.marked_val;\n  }\n\nlet empty_lexbuf =\n  {\n    buf = [||];\n    len = 0;\n    pos = 0;\n    curr_bol = 0;\n    curr_line = 0;\n    start_pos = 0;\n    start_bol = 0;\n    start_line = 0;\n    marked_pos = 0;\n    marked_bol = 0;\n    marked_line = 0;\n    marked_val = 0;\n  }\n\nlet from_int_array a =\n  let len = Array.length a in\n  { empty_lexbuf with buf = a; len }\n\nlet from_int_sub_array a len =\n  { empty_lexbuf with buf = a; len }\n\nlet new_line lexbuf =\n  if lexbuf.curr_line != 0 then lexbuf.curr_line <- lexbuf.curr_line + 1;\n  lexbuf.curr_bol <- lexbuf.pos\n\nlet next lexbuf : Stdlib.Uchar.t option =\n  if lexbuf.pos = lexbuf.len then\n    None\n  else\n    let ret = lexbuf.buf.!(lexbuf.pos) in\n    lexbuf.pos <- lexbuf.pos + 1;\n    if ret = 10 then new_line lexbuf;\n    Some (Stdlib.Uchar.unsafe_of_int ret)\n\nlet __private__next_int lexbuf : int =\n  if lexbuf.pos = lexbuf.len then\n    -1\n  else\n    let ret = lexbuf.buf.!(lexbuf.pos) in\n    lexbuf.pos <- lexbuf.pos + 1;\n    if ret = 10 then new_line lexbuf;\n    ret\n\nlet mark lexbuf i =\n  lexbuf.marked_pos <- lexbuf.pos;\n  lexbuf.marked_bol <- lexbuf.curr_bol;\n  lexbuf.marked_line <- lexbuf.curr_line;\n  lexbuf.marked_val <- i\n\nlet start lexbuf =\n  lexbuf.start_pos <- lexbuf.pos;\n  lexbuf.start_bol <- lexbuf.curr_bol;\n  lexbuf.start_line <- lexbuf.curr_line;\n  mark lexbuf (-1)\n\nlet backtrack lexbuf =\n  lexbuf.pos <- lexbuf.marked_pos;\n  lexbuf.curr_bol <- lexbuf.marked_bol;\n  lexbuf.curr_line <- lexbuf.marked_line;\n  lexbuf.marked_val\n\nlet rollback lexbuf =\n  lexbuf.pos <- lexbuf.start_pos;\n  lexbuf.curr_bol <- lexbuf.start_bol;\n  lexbuf.curr_line <- lexbuf.start_line\n\nlet lexeme_start lexbuf = lexbuf.start_pos\nlet set_lexeme_start lexbuf pos = lexbuf.start_pos <- pos\nlet lexeme_end lexbuf = lexbuf.pos\n\nlet loc lexbuf = (lexbuf.start_pos , lexbuf.pos )\n\nlet lexeme_length lexbuf = lexbuf.pos - lexbuf.start_pos\n\nlet sub_lexeme lexbuf pos len = Array.sub lexbuf.buf (lexbuf.start_pos + pos) len\n\nlet lexeme lexbuf = Array.sub lexbuf.buf lexbuf.start_pos (lexbuf.pos - lexbuf.start_pos)\n\nlet current_code_point lexbuf = lexbuf.buf.(lexbuf.start_pos)\n(* Decode UTF-8 encoded [s] into codepoints in [a], returning the length of the\n * decoded string.\n *\n * To call this function safely:\n * - ensure that [slen] is not greater than the length of [s]\n * - ensure that [a] has enough capacity to hold the decoded value\n *)\nlet unsafe_utf8_of_string (s : string) slen (a : int array) : int =\n  let spos = ref 0 in\n  let apos = ref 0 in\n  while !spos < slen do\n    let spos_code = s.![!spos] in\n    (match spos_code with\n    | '\\000' .. '\\127' as c ->\n      (* U+0000 - U+007F: 0xxxxxxx *)\n      a.!(!apos) <- Char.code c;\n      incr spos\n    | '\\192' .. '\\223' as c ->\n      (* U+0080 - U+07FF: 110xxxxx 10xxxxxx *)\n      let n1 = Char.code c in\n      let n2 = Char.code s.![!spos + 1] in\n      if n2 lsr 6 != 0b10 then raise MalFormed;\n      a.!(!apos) <- ((n1 land 0x1f) lsl 6) lor (n2 land 0x3f);\n      spos := !spos + 2\n    | '\\224' .. '\\239' as c ->\n      (* U+0800 - U+FFFF: 1110xxxx 10xxxxxx 10xxxxxx\n         U+D800 - U+DFFF are reserved for surrogate halves (RFC 3629) *)\n      let n1 = Char.code c in\n      let n2 = Char.code s.![!spos + 1] in\n      let n3 = Char.code s.![!spos + 2] in\n      let p = ((n1 land 0x0f) lsl 12) lor ((n2 land 0x3f) lsl 6) lor (n3 land 0x3f) in\n      if (n2 lsr 6 != 0b10 || n3 lsr 6 != 0b10) || (p >= 0xd800 && p <= 0xdfff) then raise MalFormed;\n      a.!(!apos) <- p;\n      spos := !spos + 3\n    | '\\240' .. '\\247' as c ->\n      (* U+10000 - U+1FFFFF: 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx\n         > U+10FFFF are invalid (RFC 3629) *)\n      let n1 = Char.code c in\n      let n2 = Char.code s.![!spos + 1] in\n      let n3 = Char.code s.![!spos + 2] in\n      let n4 = Char.code s.![!spos + 3] in\n      if n2 lsr 6 != 0b10 || n3 lsr 6 != 0b10 || n4 lsr 6 != 0b10 then raise MalFormed;\n      let p =\n        ((n1 land 0x07) lsl 18)\n        lor ((n2 land 0x3f) lsl 12)\n        lor ((n3 land 0x3f) lsl 6)\n        lor (n4 land 0x3f)\n      in\n      if p > 0x10ffff then raise MalFormed;\n      a.!(!apos) <- p;\n      spos := !spos + 4\n    | _ -> raise MalFormed);\n    incr apos\n  done;\n  !apos\n\n(* Encode the decoded codepoints in [a] as UTF-8 into [b], returning the length\n * of the encoded string.\n *\n * To call this function safely:\n * - ensure that [offset + len] is not greater than the length of [a]\n * - ensure that [b] has sufficient capacity to hold the encoded value\n *)\nlet unsafe_string_of_utf8 (a : int array) ~(offset : int) ~(len : int) (b : bytes) : int =\n  let apos = ref offset in\n  let len = ref len in\n  let i = ref 0 in\n  while !len > 0 do\n    let u = a.!(!apos) in\n    if u < 0 then\n      raise MalFormed\n    else if u <= 0x007F then begin\n      b.![!i] <- Char.unsafe_chr u;\n      incr i\n    end else if u <= 0x07FF then (\n      b.![!i] <- Char.unsafe_chr (0xC0 lor (u lsr 6));\n      b.![!i + 1] <- Char.unsafe_chr (0x80 lor (u land 0x3F));\n      i := !i + 2\n    ) else if u <= 0xFFFF then (\n      b.![!i] <- Char.unsafe_chr (0xE0 lor (u lsr 12));\n      b.![!i + 1] <- Char.unsafe_chr (0x80 lor ((u lsr 6) land 0x3F));\n      b.![!i + 2] <- Char.unsafe_chr (0x80 lor (u land 0x3F));\n      i := !i + 3\n    ) else if u <= 0x10FFFF then (\n      b.![!i] <- Char.unsafe_chr (0xF0 lor (u lsr 18));\n      b.![!i + 1] <- Char.unsafe_chr (0x80 lor ((u lsr 12) land 0x3F));\n      b.![!i + 2] <- Char.unsafe_chr (0x80 lor ((u lsr 6) land 0x3F));\n      b.![!i + 3] <- Char.unsafe_chr (0x80 lor (u land 0x3F));\n      i := !i + 4\n    ) else\n      raise MalFormed;\n    incr apos;\n    decr len\n  done;\n  !i\n\nmodule Utf8 = struct\n  let from_string s =\n    let slen = String.length s in\n    let a = Array.make slen 0 in\n    let len = unsafe_utf8_of_string s slen a in\n    from_int_sub_array a len\n\n  let sub_lexeme lexbuf pos len : string =\n    let offset = lexbuf.start_pos + pos in\n    let b = Bytes.create (len * 4) in\n    let buf = lexbuf.buf in\n    (* Assertion needed, since we make use of unsafe API below *)\n    assert (offset + len <= Array.length buf);\n    let i = unsafe_string_of_utf8 buf ~offset ~len b in\n    Bytes.sub_string b 0 i\n\n  let lexeme lexbuf : string =\n    let offset = lexbuf.start_pos in\n    let len = lexbuf.pos - offset in\n    let b = Bytes.create (len * 4) in\n    let buf = lexbuf.buf in\n    let i = unsafe_string_of_utf8 buf ~offset ~len b in\n    Bytes.sub_string b 0 i\n\n  let lexeme_to_buffer lexbuf buffer : unit =\n    let offset = lexbuf.start_pos in\n    let len = lexbuf.pos - offset in\n    let b = Bytes.create (len * 4) in\n    let buf = lexbuf.buf in\n    let i = unsafe_string_of_utf8 buf ~offset ~len b in\n    Buffer.add_subbytes buffer b 0 i\n\n  let lexeme_to_buffer2 lexbuf buf1 buf2 : unit =\n    let offset = lexbuf.start_pos in\n    let len = lexbuf.pos - offset in\n    let b = Bytes.create (len * 4) in\n    let buf = lexbuf.buf in\n    let i = unsafe_string_of_utf8 buf ~offset ~len b in\n    Buffer.add_subbytes buf1 b 0 i;\n    Buffer.add_subbytes buf2 b 0 i\nend\n\nlet string_of_utf8 (lexbuf : int array) : string =\n  let offset = 0 in\n  let len = Array.length lexbuf in\n  let b = Bytes.create (len * 4) in\n  let i = unsafe_string_of_utf8 lexbuf ~offset ~len b in\n  Bytes.sub_string b 0 i\n\nlet backoff lexbuf npos =\n  lexbuf.pos <- lexbuf.pos - npos\n"
  },
  {
    "path": "analysis/vendor/js_parser/flow_sedlexing.mli",
    "content": "\n(** This is a module provides the minimal Sedlexing suppport\n  It is mostly a subset of Sedlexing with two functions for performance reasons:\n  - Utf8.lexeme_to_buffer\n  - Utf8.lexeme_to_buffer2\n*)\nexception InvalidCodepoint of int\nexception MalFormed\ntype apos = int\ntype lexbuf\nval lexbuf_clone : lexbuf -> lexbuf\n\nval from_int_array : int array -> lexbuf\nval new_line : lexbuf -> unit\nval next : lexbuf -> Uchar.t option\n\n(**/**)\nval __private__next_int : lexbuf -> int\n(**/**)\n\nval mark : lexbuf -> int -> unit\nval start : lexbuf -> unit\nval backtrack : lexbuf -> int\nval rollback : lexbuf -> unit\nval lexeme_start : lexbuf -> int\nval lexeme_end : lexbuf -> int\nval loc : lexbuf -> int * int\nval lexeme_length : lexbuf -> int\nval sub_lexeme : lexbuf -> int -> int -> int array\nval lexeme : lexbuf -> int array\nmodule Utf8 : sig\n  val from_string : string -> lexbuf\n  val sub_lexeme : lexbuf -> int -> int -> string\n  val lexeme : lexbuf -> string\n  (** This API avoids another allocation *)\n  val lexeme_to_buffer : lexbuf -> Buffer.t -> unit\n  val lexeme_to_buffer2 : lexbuf -> Buffer.t -> Buffer.t -> unit\nend\n\nval string_of_utf8 : int array -> string\n\n(** Two APIs used when we want to do customize lexing\n    instead of using the regex based engine\n*)\nval current_code_point : lexbuf -> int\nval backoff : lexbuf -> int -> unit\nval set_lexeme_start : lexbuf -> int -> unit\n"
  },
  {
    "path": "analysis/vendor/js_parser/js_id.ml",
    "content": "(*\n * Copyright (c) Meta Platforms, Inc. and affiliates.\n *\n * This source code is licensed under the MIT license found in the\n * LICENSE file in the root directory of this source tree.\n *)\n\nexternal ( .!() ) : (int * int) array -> int -> int * int = \"%array_unsafe_get\"\n\nlet rec search (arr : _ array) (start : int) (finish : int) target =\n  if start > finish then\n    false\n  else\n    let mid = start + ((finish - start) / 2) in\n    let (a, b) = arr.!(mid) in\n    if target < a then\n      search arr start (mid - 1) target\n    else if target >= b then\n      search arr (mid + 1) finish target\n    else\n      true\n\nlet is_valid_unicode_id (i : int) =\n  search Js_id_unicode.id_continue 0 (Array.length Js_id_unicode.id_continue - 1) i\n"
  },
  {
    "path": "analysis/vendor/js_parser/js_id.mli",
    "content": "(*\n * Copyright (c) Meta Platforms, Inc. and affiliates.\n *\n * This source code is licensed under the MIT license found in the\n * LICENSE file in the root directory of this source tree.\n *)\n\n(* This test is applied to non-start unicode points *)\nval is_valid_unicode_id : int -> bool\n"
  },
  {
    "path": "analysis/vendor/js_parser/js_id_unicode.ml",
    "content": "(*\n * Copyright (c) Meta Platforms, Inc. and affiliates.\n *\n * This source code is licensed under the MIT license found in the\n * LICENSE file in the root directory of this source tree.\n *)\n\n(* This lists two valid unicode point ranges in tuple format.\n   see more details in https://mathiasbynens.be/notes/javascript-identifiers-es6\n   TODO: store it in a flat array\n   add more docs\n*)\n[@@@ocamlformat \"disable\"]\n\n(* JS has stricter rules with start id *)\nlet id_start = [|36,37;65,91;95,96;97,123;170,171;181,182;186,187;192,215;216,247;248,706;710,722;736,741;748,749;750,751;880,885;886,888;890,894;895,896;902,903;904,907;908,909;910,930;931,1014;1015,1154;1162,1328;1329,1367;1369,1370;1376,1417;1488,1515;1519,1523;1568,1611;1646,1648;1649,1748;1749,1750;1765,1767;1774,1776;1786,1789;1791,1792;1808,1809;1810,1840;1869,1958;1969,1970;1994,2027;2036,2038;2042,2043;2048,2070;2074,2075;2084,2085;2088,2089;2112,2137;2144,2155;2208,2229;2230,2238;2308,2362;2365,2366;2384,2385;2392,2402;2417,2433;2437,2445;2447,2449;2451,2473;2474,2481;2482,2483;2486,2490;2493,2494;2510,2511;2524,2526;2527,2530;2544,2546;2556,2557;2565,2571;2575,2577;2579,2601;2602,2609;2610,2612;2613,2615;2616,2618;2649,2653;2654,2655;2674,2677;2693,2702;2703,2706;2707,2729;2730,2737;2738,2740;2741,2746;2749,2750;2768,2769;2784,2786;2809,2810;2821,2829;2831,2833;2835,2857;2858,2865;2866,2868;2869,2874;2877,2878;2908,2910;2911,2914;2929,2930;2947,2948;2949,2955;2958,2961;2962,2966;2969,2971;2972,2973;2974,2976;2979,2981;2984,2987;2990,3002;3024,3025;3077,3085;3086,3089;3090,3113;3114,3130;3133,3134;3160,3163;3168,3170;3200,3201;3205,3213;3214,3217;3218,3241;3242,3252;3253,3258;3261,3262;3294,3295;3296,3298;3313,3315;3333,3341;3342,3345;3346,3387;3389,3390;3406,3407;3412,3415;3423,3426;3450,3456;3461,3479;3482,3506;3507,3516;3517,3518;3520,3527;3585,3633;3634,3636;3648,3655;3713,3715;3716,3717;3718,3723;3724,3748;3749,3750;3751,3761;3762,3764;3773,3774;3776,3781;3782,3783;3804,3808;3840,3841;3904,3912;3913,3949;3976,3981;4096,4139;4159,4160;4176,4182;4186,4190;4193,4194;4197,4199;4206,4209;4213,4226;4238,4239;4256,4294;4295,4296;4301,4302;4304,4347;4348,4681;4682,4686;4688,4695;4696,4697;4698,4702;4704,4745;4746,4750;4752,4785;4786,4790;4792,4799;4800,4801;4802,4806;4808,4823;4824,4881;4882,4886;4888,4955;4992,5008;5024,5110;5112,5118;5121,5741;5743,5760;5761,5787;5792,5867;5870,5881;5888,5901;5902,5906;5920,5938;5952,5970;5984,5997;5998,6001;6016,6068;6103,6104;6108,6109;6176,6265;6272,6313;6314,6315;6320,6390;6400,6431;6480,6510;6512,6517;6528,6572;6576,6602;6656,6679;6688,6741;6823,6824;6917,6964;6981,6988;7043,7073;7086,7088;7098,7142;7168,7204;7245,7248;7258,7294;7296,7305;7312,7355;7357,7360;7401,7405;7406,7412;7413,7415;7418,7419;7424,7616;7680,7958;7960,7966;7968,8006;8008,8014;8016,8024;8025,8026;8027,8028;8029,8030;8031,8062;8064,8117;8118,8125;8126,8127;8130,8133;8134,8141;8144,8148;8150,8156;8160,8173;8178,8181;8182,8189;8305,8306;8319,8320;8336,8349;8450,8451;8455,8456;8458,8468;8469,8470;8472,8478;8484,8485;8486,8487;8488,8489;8490,8506;8508,8512;8517,8522;8526,8527;8544,8585;11264,11311;11312,11359;11360,11493;11499,11503;11506,11508;11520,11558;11559,11560;11565,11566;11568,11624;11631,11632;11648,11671;11680,11687;11688,11695;11696,11703;11704,11711;11712,11719;11720,11727;11728,11735;11736,11743;12293,12296;12321,12330;12337,12342;12344,12349;12353,12439;12443,12448;12449,12539;12540,12544;12549,12592;12593,12687;12704,12731;12784,12800;13312,19894;19968,40944;40960,42125;42192,42238;42240,42509;42512,42528;42538,42540;42560,42607;42623,42654;42656,42736;42775,42784;42786,42889;42891,42944;42946,42951;42999,43010;43011,43014;43015,43019;43020,43043;43072,43124;43138,43188;43250,43256;43259,43260;43261,43263;43274,43302;43312,43335;43360,43389;43396,43443;43471,43472;43488,43493;43494,43504;43514,43519;43520,43561;43584,43587;43588,43596;43616,43639;43642,43643;43646,43696;43697,43698;43701,43703;43705,43710;43712,43713;43714,43715;43739,43742;43744,43755;43762,43765;43777,43783;43785,43791;43793,43799;43808,43815;43816,43823;43824,43867;43868,43880;43888,44003;44032,55204;55216,55239;55243,55292;63744,64110;64112,64218;64256,64263;64275,64280;64285,64286;64287,64297;64298,64311;64312,64317;64318,64319;64320,64322;64323,64325;64326,64434;64467,64830;64848,64912;64914,64968;65008,65020;65136,65141;65142,65277;65313,65339;65345,65371;65382,65471;65474,65480;65482,65488;65490,65496;65498,65501;65536,65548;65549,65575;65576,65595;65596,65598;65599,65614;65616,65630;65664,65787;65856,65909;66176,66205;66208,66257;66304,66336;66349,66379;66384,66422;66432,66462;66464,66500;66504,66512;66513,66518;66560,66718;66736,66772;66776,66812;66816,66856;66864,66916;67072,67383;67392,67414;67424,67432;67584,67590;67592,67593;67594,67638;67639,67641;67644,67645;67647,67670;67680,67703;67712,67743;67808,67827;67828,67830;67840,67862;67872,67898;67968,68024;68030,68032;68096,68097;68112,68116;68117,68120;68121,68150;68192,68221;68224,68253;68288,68296;68297,68325;68352,68406;68416,68438;68448,68467;68480,68498;68608,68681;68736,68787;68800,68851;68864,68900;69376,69405;69415,69416;69424,69446;69600,69623;69635,69688;69763,69808;69840,69865;69891,69927;69956,69957;69968,70003;70006,70007;70019,70067;70081,70085;70106,70107;70108,70109;70144,70162;70163,70188;70272,70279;70280,70281;70282,70286;70287,70302;70303,70313;70320,70367;70405,70413;70415,70417;70419,70441;70442,70449;70450,70452;70453,70458;70461,70462;70480,70481;70493,70498;70656,70709;70727,70731;70751,70752;70784,70832;70852,70854;70855,70856;71040,71087;71128,71132;71168,71216;71236,71237;71296,71339;71352,71353;71424,71451;71680,71724;71840,71904;71935,71936;72096,72104;72106,72145;72161,72162;72163,72164;72192,72193;72203,72243;72250,72251;72272,72273;72284,72330;72349,72350;72384,72441;72704,72713;72714,72751;72768,72769;72818,72848;72960,72967;72968,72970;72971,73009;73030,73031;73056,73062;73063,73065;73066,73098;73112,73113;73440,73459;73728,74650;74752,74863;74880,75076;77824,78895;82944,83527;92160,92729;92736,92767;92880,92910;92928,92976;92992,92996;93027,93048;93053,93072;93760,93824;93952,94027;94032,94033;94099,94112;94176,94178;94179,94180;94208,100344;100352,101107;110592,110879;110928,110931;110948,110952;110960,111356;113664,113771;113776,113789;113792,113801;113808,113818;119808,119893;119894,119965;119966,119968;119970,119971;119973,119975;119977,119981;119982,119994;119995,119996;119997,120004;120005,120070;120071,120075;120077,120085;120086,120093;120094,120122;120123,120127;120128,120133;120134,120135;120138,120145;120146,120486;120488,120513;120514,120539;120540,120571;120572,120597;120598,120629;120630,120655;120656,120687;120688,120713;120714,120745;120746,120771;120772,120780;123136,123181;123191,123198;123214,123215;123584,123628;124928,125125;125184,125252;125259,125260;126464,126468;126469,126496;126497,126499;126500,126501;126503,126504;126505,126515;126516,126520;126521,126522;126523,126524;126530,126531;126535,126536;126537,126538;126539,126540;126541,126544;126545,126547;126548,126549;126551,126552;126553,126554;126555,126556;126557,126558;126559,126560;126561,126563;126564,126565;126567,126571;126572,126579;126580,126584;126585,126589;126590,126591;126592,126602;126603,126620;126625,126628;126629,126634;126635,126652;131072,173783;173824,177973;177984,178206;178208,183970;183984,191457;194560,195102|]\n\n(* The followed ID restriction is relaxed, this one\n   is used in our customized unicode lexing.\n *)\nlet id_continue = [|36,37;48,58;65,91;95,96;97,123;170,171;181,182;183,184;186,187;192,215;216,247;248,706;710,722;736,741;748,749;750,751;768,885;886,888;890,894;895,896;902,907;908,909;910,930;931,1014;1015,1154;1155,1160;1162,1328;1329,1367;1369,1370;1376,1417;1425,1470;1471,1472;1473,1475;1476,1478;1479,1480;1488,1515;1519,1523;1552,1563;1568,1642;1646,1748;1749,1757;1759,1769;1770,1789;1791,1792;1808,1867;1869,1970;1984,2038;2042,2043;2045,2046;2048,2094;2112,2140;2144,2155;2208,2229;2230,2238;2259,2274;2275,2404;2406,2416;2417,2436;2437,2445;2447,2449;2451,2473;2474,2481;2482,2483;2486,2490;2492,2501;2503,2505;2507,2511;2519,2520;2524,2526;2527,2532;2534,2546;2556,2557;2558,2559;2561,2564;2565,2571;2575,2577;2579,2601;2602,2609;2610,2612;2613,2615;2616,2618;2620,2621;2622,2627;2631,2633;2635,2638;2641,2642;2649,2653;2654,2655;2662,2678;2689,2692;2693,2702;2703,2706;2707,2729;2730,2737;2738,2740;2741,2746;2748,2758;2759,2762;2763,2766;2768,2769;2784,2788;2790,2800;2809,2816;2817,2820;2821,2829;2831,2833;2835,2857;2858,2865;2866,2868;2869,2874;2876,2885;2887,2889;2891,2894;2902,2904;2908,2910;2911,2916;2918,2928;2929,2930;2946,2948;2949,2955;2958,2961;2962,2966;2969,2971;2972,2973;2974,2976;2979,2981;2984,2987;2990,3002;3006,3011;3014,3017;3018,3022;3024,3025;3031,3032;3046,3056;3072,3085;3086,3089;3090,3113;3114,3130;3133,3141;3142,3145;3146,3150;3157,3159;3160,3163;3168,3172;3174,3184;3200,3204;3205,3213;3214,3217;3218,3241;3242,3252;3253,3258;3260,3269;3270,3273;3274,3278;3285,3287;3294,3295;3296,3300;3302,3312;3313,3315;3328,3332;3333,3341;3342,3345;3346,3397;3398,3401;3402,3407;3412,3416;3423,3428;3430,3440;3450,3456;3458,3460;3461,3479;3482,3506;3507,3516;3517,3518;3520,3527;3530,3531;3535,3541;3542,3543;3544,3552;3558,3568;3570,3572;3585,3643;3648,3663;3664,3674;3713,3715;3716,3717;3718,3723;3724,3748;3749,3750;3751,3774;3776,3781;3782,3783;3784,3790;3792,3802;3804,3808;3840,3841;3864,3866;3872,3882;3893,3894;3895,3896;3897,3898;3902,3912;3913,3949;3953,3973;3974,3992;3993,4029;4038,4039;4096,4170;4176,4254;4256,4294;4295,4296;4301,4302;4304,4347;4348,4681;4682,4686;4688,4695;4696,4697;4698,4702;4704,4745;4746,4750;4752,4785;4786,4790;4792,4799;4800,4801;4802,4806;4808,4823;4824,4881;4882,4886;4888,4955;4957,4960;4969,4978;4992,5008;5024,5110;5112,5118;5121,5741;5743,5760;5761,5787;5792,5867;5870,5881;5888,5901;5902,5909;5920,5941;5952,5972;5984,5997;5998,6001;6002,6004;6016,6100;6103,6104;6108,6110;6112,6122;6155,6158;6160,6170;6176,6265;6272,6315;6320,6390;6400,6431;6432,6444;6448,6460;6470,6510;6512,6517;6528,6572;6576,6602;6608,6619;6656,6684;6688,6751;6752,6781;6783,6794;6800,6810;6823,6824;6832,6846;6912,6988;6992,7002;7019,7028;7040,7156;7168,7224;7232,7242;7245,7294;7296,7305;7312,7355;7357,7360;7376,7379;7380,7419;7424,7674;7675,7958;7960,7966;7968,8006;8008,8014;8016,8024;8025,8026;8027,8028;8029,8030;8031,8062;8064,8117;8118,8125;8126,8127;8130,8133;8134,8141;8144,8148;8150,8156;8160,8173;8178,8181;8182,8189;8204,8206;8255,8257;8276,8277;8305,8306;8319,8320;8336,8349;8400,8413;8417,8418;8421,8433;8450,8451;8455,8456;8458,8468;8469,8470;8472,8478;8484,8485;8486,8487;8488,8489;8490,8506;8508,8512;8517,8522;8526,8527;8544,8585;11264,11311;11312,11359;11360,11493;11499,11508;11520,11558;11559,11560;11565,11566;11568,11624;11631,11632;11647,11671;11680,11687;11688,11695;11696,11703;11704,11711;11712,11719;11720,11727;11728,11735;11736,11743;11744,11776;12293,12296;12321,12336;12337,12342;12344,12349;12353,12439;12441,12448;12449,12539;12540,12544;12549,12592;12593,12687;12704,12731;12784,12800;13312,19894;19968,40944;40960,42125;42192,42238;42240,42509;42512,42540;42560,42608;42612,42622;42623,42738;42775,42784;42786,42889;42891,42944;42946,42951;42999,43048;43072,43124;43136,43206;43216,43226;43232,43256;43259,43260;43261,43310;43312,43348;43360,43389;43392,43457;43471,43482;43488,43519;43520,43575;43584,43598;43600,43610;43616,43639;43642,43715;43739,43742;43744,43760;43762,43767;43777,43783;43785,43791;43793,43799;43808,43815;43816,43823;43824,43867;43868,43880;43888,44011;44012,44014;44016,44026;44032,55204;55216,55239;55243,55292;63744,64110;64112,64218;64256,64263;64275,64280;64285,64297;64298,64311;64312,64317;64318,64319;64320,64322;64323,64325;64326,64434;64467,64830;64848,64912;64914,64968;65008,65020;65024,65040;65056,65072;65075,65077;65101,65104;65136,65141;65142,65277;65296,65306;65313,65339;65343,65344;65345,65371;65382,65471;65474,65480;65482,65488;65490,65496;65498,65501;65536,65548;65549,65575;65576,65595;65596,65598;65599,65614;65616,65630;65664,65787;65856,65909;66045,66046;66176,66205;66208,66257;66272,66273;66304,66336;66349,66379;66384,66427;66432,66462;66464,66500;66504,66512;66513,66518;66560,66718;66720,66730;66736,66772;66776,66812;66816,66856;66864,66916;67072,67383;67392,67414;67424,67432;67584,67590;67592,67593;67594,67638;67639,67641;67644,67645;67647,67670;67680,67703;67712,67743;67808,67827;67828,67830;67840,67862;67872,67898;67968,68024;68030,68032;68096,68100;68101,68103;68108,68116;68117,68120;68121,68150;68152,68155;68159,68160;68192,68221;68224,68253;68288,68296;68297,68327;68352,68406;68416,68438;68448,68467;68480,68498;68608,68681;68736,68787;68800,68851;68864,68904;68912,68922;69376,69405;69415,69416;69424,69457;69600,69623;69632,69703;69734,69744;69759,69819;69840,69865;69872,69882;69888,69941;69942,69952;69956,69959;69968,70004;70006,70007;70016,70085;70089,70093;70096,70107;70108,70109;70144,70162;70163,70200;70206,70207;70272,70279;70280,70281;70282,70286;70287,70302;70303,70313;70320,70379;70384,70394;70400,70404;70405,70413;70415,70417;70419,70441;70442,70449;70450,70452;70453,70458;70459,70469;70471,70473;70475,70478;70480,70481;70487,70488;70493,70500;70502,70509;70512,70517;70656,70731;70736,70746;70750,70752;70784,70854;70855,70856;70864,70874;71040,71094;71096,71105;71128,71134;71168,71233;71236,71237;71248,71258;71296,71353;71360,71370;71424,71451;71453,71468;71472,71482;71680,71739;71840,71914;71935,71936;72096,72104;72106,72152;72154,72162;72163,72165;72192,72255;72263,72264;72272,72346;72349,72350;72384,72441;72704,72713;72714,72759;72760,72769;72784,72794;72818,72848;72850,72872;72873,72887;72960,72967;72968,72970;72971,73015;73018,73019;73020,73022;73023,73032;73040,73050;73056,73062;73063,73065;73066,73103;73104,73106;73107,73113;73120,73130;73440,73463;73728,74650;74752,74863;74880,75076;77824,78895;82944,83527;92160,92729;92736,92767;92768,92778;92880,92910;92912,92917;92928,92983;92992,92996;93008,93018;93027,93048;93053,93072;93760,93824;93952,94027;94031,94088;94095,94112;94176,94178;94179,94180;94208,100344;100352,101107;110592,110879;110928,110931;110948,110952;110960,111356;113664,113771;113776,113789;113792,113801;113808,113818;113821,113823;119141,119146;119149,119155;119163,119171;119173,119180;119210,119214;119362,119365;119808,119893;119894,119965;119966,119968;119970,119971;119973,119975;119977,119981;119982,119994;119995,119996;119997,120004;120005,120070;120071,120075;120077,120085;120086,120093;120094,120122;120123,120127;120128,120133;120134,120135;120138,120145;120146,120486;120488,120513;120514,120539;120540,120571;120572,120597;120598,120629;120630,120655;120656,120687;120688,120713;120714,120745;120746,120771;120772,120780;120782,120832;121344,121399;121403,121453;121461,121462;121476,121477;121499,121504;121505,121520;122880,122887;122888,122905;122907,122914;122915,122917;122918,122923;123136,123181;123184,123198;123200,123210;123214,123215;123584,123642;124928,125125;125136,125143;125184,125260;125264,125274;126464,126468;126469,126496;126497,126499;126500,126501;126503,126504;126505,126515;126516,126520;126521,126522;126523,126524;126530,126531;126535,126536;126537,126538;126539,126540;126541,126544;126545,126547;126548,126549;126551,126552;126553,126554;126555,126556;126557,126558;126559,126560;126561,126563;126564,126565;126567,126571;126572,126579;126580,126584;126585,126589;126590,126591;126592,126602;126603,126620;126625,126628;126629,126634;126635,126652;131072,173783;173824,177973;177984,178206;178208,183970;183984,191457;194560,195102;917760,918000|]\n"
  },
  {
    "path": "analysis/vendor/js_parser/jsx_parser.ml",
    "content": "(*\n * Copyright (c) Meta Platforms, Inc. and affiliates.\n *\n * This source code is licensed under the MIT license found in the\n * LICENSE file in the root directory of this source tree.\n *)\n\nmodule Ast = Flow_ast\nopen Token\nopen Parser_common\nopen Parser_env\nopen Flow_ast\n\nmodule JSX (Parse : Parser_common.PARSER) = struct\n  (* Consumes and returns the trailing comments after the end of a JSX tag name,\n     attribute, or spread attribute.\n\n     If the component is followed by the end of the JSX tag, then all trailing\n     comments are returned. If the component is instead followed by another tag\n     component on another line, only trailing comments on the same line are\n     returned. If the component is followed by another tag component on the same\n     line, all trailing comments will instead be leading the next component. *)\n  let tag_component_trailing_comments env =\n    match Peek.token env with\n    | T_EOF\n    | T_DIV\n    | T_GREATER_THAN ->\n      Eat.trailing_comments env\n    | _ when Peek.is_line_terminator env -> Eat.comments_until_next_line env\n    | _ -> []\n\n  let spread_attribute env =\n    let leading = Peek.comments env in\n    Eat.push_lex_mode env Lex_mode.NORMAL;\n    let (loc, argument) =\n      with_loc\n        (fun env ->\n          Expect.token env T_LCURLY;\n          Expect.token env T_ELLIPSIS;\n          let argument = Parse.assignment env in\n          Expect.token env T_RCURLY;\n          argument)\n        env\n    in\n    Eat.pop_lex_mode env;\n    let trailing = tag_component_trailing_comments env in\n    ( loc,\n      {\n        JSX.SpreadAttribute.argument;\n        comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing ();\n      }\n    )\n\n  let expression_container_contents env =\n    if Peek.token env = T_RCURLY then\n      JSX.ExpressionContainer.EmptyExpression\n    else\n      JSX.ExpressionContainer.Expression (Parse.expression env)\n\n  let expression_container env =\n    let leading = Peek.comments env in\n    Eat.push_lex_mode env Lex_mode.NORMAL;\n    let (loc, expression) =\n      with_loc\n        (fun env ->\n          Expect.token env T_LCURLY;\n          let expression = expression_container_contents env in\n          Expect.token env T_RCURLY;\n          expression)\n        env\n    in\n    Eat.pop_lex_mode env;\n    let trailing = tag_component_trailing_comments env in\n    ( loc,\n      {\n        JSX.ExpressionContainer.expression;\n        comments = Flow_ast_utils.mk_comments_with_internal_opt ~leading ~trailing ~internal:[] ();\n      }\n    )\n\n  let expression_container_or_spread_child env =\n    Eat.push_lex_mode env Lex_mode.NORMAL;\n    let (loc, result) =\n      with_loc\n        (fun env ->\n          Expect.token env T_LCURLY;\n          let result =\n            match Peek.token env with\n            | T_ELLIPSIS ->\n              let leading = Peek.comments env in\n              Expect.token env T_ELLIPSIS;\n              let expression = Parse.assignment env in\n              JSX.SpreadChild\n                {\n                  JSX.SpreadChild.expression;\n                  comments = Flow_ast_utils.mk_comments_opt ~leading ();\n                }\n            | _ ->\n              let expression = expression_container_contents env in\n              let internal =\n                match expression with\n                | JSX.ExpressionContainer.EmptyExpression -> Peek.comments env\n                | _ -> []\n              in\n              JSX.ExpressionContainer\n                {\n                  JSX.ExpressionContainer.expression;\n                  comments = Flow_ast_utils.mk_comments_with_internal_opt ~internal ();\n                }\n          in\n          Expect.token env T_RCURLY;\n          result)\n        env\n    in\n    Eat.pop_lex_mode env;\n    (loc, result)\n\n  let identifier env =\n    let loc = Peek.loc env in\n    let name =\n      match Peek.token env with\n      | T_JSX_IDENTIFIER { raw; _ } -> raw\n      | _ ->\n        error_unexpected ~expected:\"an identifier\" env;\n        \"\"\n    in\n    let leading = Peek.comments env in\n    Eat.token env;\n    (* Unless this identifier is the first part of a namespaced name, member\n       expression, or attribute name, it is the end of a tag component. *)\n    let trailing =\n      match Peek.token env with\n      (* Namespaced name *)\n      | T_COLON\n      (* Member expression *)\n      | T_PERIOD\n      (* Attribute name *)\n      | T_ASSIGN ->\n        Eat.trailing_comments env\n      | _ -> tag_component_trailing_comments env\n    in\n    (loc, JSX.Identifier.{ name; comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing () })\n\n  let name =\n    let rec member_expression env member =\n      match Peek.token env with\n      | T_PERIOD ->\n        let (start_loc, _) = member in\n        let member =\n          with_loc\n            ~start_loc\n            (fun env ->\n              Expect.token env T_PERIOD;\n              let property = identifier env in\n              {\n                JSX.MemberExpression._object = JSX.MemberExpression.MemberExpression member;\n                property;\n              })\n            env\n        in\n        member_expression env member\n      | _ -> member\n    in\n    fun env ->\n      match Peek.ith_token ~i:1 env with\n      | T_COLON ->\n        let namespaced_name =\n          with_loc\n            (fun env ->\n              let namespace = identifier env in\n              Expect.token env T_COLON;\n              let name = identifier env in\n              { JSX.NamespacedName.namespace; name })\n            env\n        in\n        JSX.NamespacedName namespaced_name\n      | T_PERIOD ->\n        let member =\n          with_loc\n            (fun env ->\n              let _object = JSX.MemberExpression.Identifier (identifier env) in\n              Expect.token env T_PERIOD;\n              let property = identifier env in\n              { JSX.MemberExpression._object; property })\n            env\n        in\n        JSX.MemberExpression (member_expression env member)\n      | _ ->\n        let name = identifier env in\n        JSX.Identifier name\n\n  let attribute env =\n    with_loc\n      (fun env ->\n        let name =\n          match Peek.ith_token ~i:1 env with\n          | T_COLON ->\n            let namespaced_name =\n              with_loc\n                (fun env ->\n                  let namespace = identifier env in\n                  Expect.token env T_COLON;\n                  let name = identifier env in\n                  { JSX.NamespacedName.namespace; name })\n                env\n            in\n            JSX.Attribute.NamespacedName namespaced_name\n          | _ ->\n            let name = identifier env in\n            JSX.Attribute.Identifier name\n        in\n        let value =\n          match Peek.token env with\n          | T_ASSIGN ->\n            Expect.token env T_ASSIGN;\n            let leading = Peek.comments env in\n            let tkn = Peek.token env in\n            begin\n              match tkn with\n              | T_LCURLY ->\n                let (loc, expression_container) = expression_container env in\n                JSX.ExpressionContainer.(\n                  match expression_container.expression with\n                  | EmptyExpression ->\n                    error_at env (loc, Parse_error.JSXAttributeValueEmptyExpression)\n                  | _ -> ()\n                );\n                Some (JSX.Attribute.ExpressionContainer (loc, expression_container))\n              | T_JSX_TEXT (loc, value, raw) as token ->\n                Expect.token env token;\n                let value = Ast.Literal.String value in\n                let trailing = tag_component_trailing_comments env in\n                Some\n                  (JSX.Attribute.Literal\n                     ( loc,\n                       {\n                         Ast.Literal.value;\n                         raw;\n                         comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing ();\n                       }\n                     )\n                  )\n              | _ ->\n                error env Parse_error.InvalidJSXAttributeValue;\n                let loc = Peek.loc env in\n                let raw = \"\" in\n                let value = Ast.Literal.String \"\" in\n                Some (JSX.Attribute.Literal (loc, { Ast.Literal.value; raw; comments = None }))\n            end\n          | _ -> None\n        in\n        { JSX.Attribute.name; value })\n      env\n\n  let opening_element =\n    let rec attributes env acc =\n      match Peek.token env with\n      | T_JSX_IDENTIFIER _ ->\n        let attribute = JSX.Opening.Attribute (attribute env) in\n        attributes env (attribute :: acc)\n      | T_LCURLY ->\n        let attribute = JSX.Opening.SpreadAttribute (spread_attribute env) in\n        attributes env (attribute :: acc)\n      | _ -> List.rev acc\n    in\n    fun env ->\n      with_loc\n        (fun env ->\n          Expect.token env T_LESS_THAN;\n          match Peek.token env with\n          | T_GREATER_THAN ->\n            Eat.token env;\n            Ok `Fragment\n          | T_JSX_IDENTIFIER _ ->\n            let name = name env in\n            let attributes = attributes env [] in\n            let self_closing = Eat.maybe env T_DIV in\n            let element = `Element { JSX.Opening.name; self_closing; attributes } in\n            if Eat.maybe env T_GREATER_THAN then\n              Ok element\n            else (\n              Expect.error env T_GREATER_THAN;\n              Error element\n            )\n          | _ ->\n            (* TODO: also say that we could expect an identifier, or if we're in a JSX child\n               then suggest escaping the < as `{'<'}` *)\n            Expect.error env T_GREATER_THAN;\n            Error `Fragment)\n        env\n\n  let closing_element env =\n    with_loc\n      (fun env ->\n        Expect.token env T_LESS_THAN;\n        Expect.token env T_DIV;\n        match Peek.token env with\n        | T_GREATER_THAN ->\n          Eat.token env;\n          `Fragment\n        | T_JSX_IDENTIFIER _ ->\n          let name = name env in\n          Expect.token_opt env T_GREATER_THAN;\n          `Element { JSX.Closing.name }\n        | _ ->\n          Expect.error env T_GREATER_THAN;\n          `Fragment)\n      env\n\n  let rec child env =\n    match Peek.token env with\n    | T_LCURLY -> expression_container_or_spread_child env\n    | T_JSX_TEXT (loc, value, raw) as token ->\n      Expect.token env token;\n      (loc, JSX.Text { JSX.Text.value; raw })\n    | _ ->\n      (match element_or_fragment env with\n      | (loc, `Element element) -> (loc, JSX.Element element)\n      | (loc, `Fragment fragment) -> (loc, JSX.Fragment fragment))\n\n  and element =\n    let children_and_closing =\n      let rec children_and_closing env acc =\n        let previous_loc = last_loc env in\n        match Peek.token env with\n        | T_LESS_THAN ->\n          Eat.push_lex_mode env Lex_mode.JSX_TAG;\n          begin\n            match (Peek.token env, Peek.ith_token ~i:1 env) with\n            | (T_LESS_THAN, T_EOF)\n            | (T_LESS_THAN, T_DIV) ->\n              let closing =\n                match closing_element env with\n                | (loc, `Element ec) -> `Element (loc, ec)\n                | (loc, `Fragment) -> `Fragment loc\n              in\n              (* We double pop to avoid going back to childmode and re-lexing the\n               * lookahead *)\n              Eat.double_pop_lex_mode env;\n              (List.rev acc, previous_loc, closing)\n            | _ ->\n              let child =\n                match element env with\n                | (loc, `Element e) -> (loc, JSX.Element e)\n                | (loc, `Fragment f) -> (loc, JSX.Fragment f)\n              in\n              children_and_closing env (child :: acc)\n          end\n        | T_EOF ->\n          error_unexpected env;\n          (List.rev acc, previous_loc, `None)\n        | _ -> children_and_closing env (child env :: acc)\n      in\n      fun env ->\n        let start_loc = Peek.loc env in\n        let (children, last_child_loc, closing) = children_and_closing env [] in\n        let last_child_loc =\n          match last_child_loc with\n          | Some x -> x\n          | None -> start_loc\n        in\n        (* It's a little bit tricky to untangle the parsing of the child elements from the parsing\n         * of the closing element, so we can't easily use `with_loc` here. Instead, we'll use the\n         * same logic that `with_loc` uses, but manipulate the locations explicitly. *)\n        let children_loc = Loc.btwn start_loc last_child_loc in\n        ((children_loc, children), closing)\n    in\n    let rec normalize name =\n      JSX.(\n        match name with\n        | Identifier (_, { Identifier.name; comments = _ }) -> name\n        | NamespacedName (_, { NamespacedName.namespace; name }) ->\n          (snd namespace).Identifier.name ^ \":\" ^ (snd name).Identifier.name\n        | MemberExpression (_, { MemberExpression._object; property }) ->\n          let _object =\n            match _object with\n            | MemberExpression.Identifier (_, { Identifier.name = id; _ }) -> id\n            | MemberExpression.MemberExpression e -> normalize (JSX.MemberExpression e)\n          in\n          _object ^ \".\" ^ (snd property).Identifier.name\n      )\n    in\n    let is_self_closing = function\n      | (_, Ok (`Element e)) -> e.JSX.Opening.self_closing\n      | (_, Ok `Fragment) -> false\n      | (_, Error _) -> true\n    in\n    fun env ->\n      let leading = Peek.comments env in\n      let opening_element = opening_element env in\n      Eat.pop_lex_mode env;\n      let (children, closing_element) =\n        if is_self_closing opening_element then\n          (with_loc (fun _ -> []) env, `None)\n        else (\n          Eat.push_lex_mode env Lex_mode.JSX_CHILD;\n          children_and_closing env\n        )\n      in\n      let trailing = Eat.trailing_comments env in\n      let end_loc =\n        match closing_element with\n        | `Element (loc, { JSX.Closing.name }) ->\n          (match snd opening_element with\n          | Ok (`Element { JSX.Opening.name = opening_name; _ }) ->\n            let opening_name = normalize opening_name in\n            if normalize name <> opening_name then\n              error env (Parse_error.ExpectedJSXClosingTag opening_name)\n          | Ok `Fragment -> error env (Parse_error.ExpectedJSXClosingTag \"JSX fragment\")\n          | Error _ -> ());\n          loc\n        | `Fragment loc ->\n          (match snd opening_element with\n          | Ok (`Element { JSX.Opening.name = opening_name; _ }) ->\n            error env (Parse_error.ExpectedJSXClosingTag (normalize opening_name))\n          | Ok `Fragment -> ()\n          | Error _ -> ());\n          loc\n        | _ -> fst opening_element\n      in\n      let result =\n        match opening_element with\n        | (start_loc, Ok (`Element e))\n        | (start_loc, Error (`Element e)) ->\n          `Element\n            JSX.\n              {\n                opening_element = (start_loc, e);\n                closing_element =\n                  (match closing_element with\n                  | `Element e -> Some e\n                  | _ -> None);\n                children;\n                comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing ();\n              }\n            | (start_loc, Ok `Fragment)\n            | (start_loc, Error `Fragment) ->\n              `Fragment\n                {\n                  JSX.frag_opening_element = start_loc;\n                  frag_closing_element =\n                    (match closing_element with\n                    | `Fragment loc -> loc\n                    (* the following are parse erros *)\n                    | `Element (loc, _) -> loc\n                    | _ -> end_loc);\n                  frag_children = children;\n                  frag_comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing ();\n                }\n      in\n\n      (Loc.btwn (fst opening_element) end_loc, result)\n\n  and element_or_fragment env =\n    Eat.push_lex_mode env Lex_mode.JSX_TAG;\n    element env\nend\n"
  },
  {
    "path": "analysis/vendor/js_parser/lex_env.ml",
    "content": "(*\n * Copyright (c) Meta Platforms, Inc. and affiliates.\n *\n * This source code is licensed under the MIT license found in the\n * LICENSE file in the root directory of this source tree.\n *)\n\nmodule Sedlexing = Flow_sedlexing\n\n(* bol = Beginning Of Line *)\ntype bol = {\n  line: int;\n  offset: int;\n}\n\ntype lex_state = { lex_errors_acc: (Loc.t * Parse_error.t) list } [@@ocaml.unboxed]\n\ntype t = {\n  lex_source: File_key.t option;\n  lex_lb: Sedlexing.lexbuf;\n  lex_bol: bol;\n  lex_in_comment_syntax: bool;\n  lex_enable_comment_syntax: bool;\n  lex_state: lex_state;\n  lex_last_loc: Loc.t;\n}\n\nlet empty_lex_state = { lex_errors_acc = [] }\n\n(* The lex_last_loc should initially be set to the beginning of the first line, so that\n   comments on the first line are reported as not being on a new line. *)\nlet initial_last_loc =\n  { Loc.source = None; start = { Loc.line = 1; column = 0 }; _end = { Loc.line = 1; column = 0 } }\n\nlet new_lex_env lex_source lex_lb ~enable_types_in_comments =\n  {\n    lex_source;\n    lex_lb;\n    lex_bol = { line = 1; offset = 0 };\n    lex_in_comment_syntax = false;\n    lex_enable_comment_syntax = enable_types_in_comments;\n    lex_state = empty_lex_state;\n    lex_last_loc = initial_last_loc;\n  }\n\n(* copy all the mutable things so that we have a distinct lexing environment\n   that does not interfere with ordinary lexer operations *)\nlet clone env =\n  let lex_lb = Sedlexing.lexbuf_clone env.lex_lb in\n  { env with lex_lb }\n\nlet lexbuf env = env.lex_lb\n\nlet source env = env.lex_source\n\nlet state env = env.lex_state\n\nlet line env = env.lex_bol.line\n\nlet bol_offset env = env.lex_bol.offset\n\nlet is_in_comment_syntax env = env.lex_in_comment_syntax\n\nlet is_comment_syntax_enabled env = env.lex_enable_comment_syntax\n\nlet in_comment_syntax is_in env =\n  if is_in <> env.lex_in_comment_syntax then\n    { env with lex_in_comment_syntax = is_in }\n  else\n    env\n\n(* TODO *)\nlet debug_string_of_lexbuf _lb = \"\"\n\nlet debug_string_of_lex_env (env : t) =\n  let source =\n    match source env with\n    | None -> \"None\"\n    | Some x -> Printf.sprintf \"Some %S\" (File_key.to_string x)\n  in\n  Printf.sprintf\n    \"{\\n  lex_source = %s\\n  lex_lb = %s\\n  lex_in_comment_syntax = %b\\n  lex_enable_comment_syntax = %b\\n  lex_state = {errors = (count = %d)}\\n}\"\n    source\n    (debug_string_of_lexbuf env.lex_lb)\n    (is_in_comment_syntax env)\n    (is_comment_syntax_enabled env)\n    (List.length (state env).lex_errors_acc)\n"
  },
  {
    "path": "analysis/vendor/js_parser/lex_result.ml",
    "content": "(*\n * Copyright (c) Meta Platforms, Inc. and affiliates.\n *\n * This source code is licensed under the MIT license found in the\n * LICENSE file in the root directory of this source tree.\n *)\n\ntype t = {\n  lex_token: Token.t;\n  lex_loc: Loc.t;\n  lex_errors: (Loc.t * Parse_error.t) list;\n  lex_comments: Loc.t Flow_ast.Comment.t list;\n}\n\nlet token result = result.lex_token\n\nlet loc result = result.lex_loc\n\nlet comments result = result.lex_comments\n\nlet errors result = result.lex_errors\n\nlet debug_string_of_lex_result lex_result =\n  Printf.sprintf\n    \"{\\n  lex_token = %s\\n  lex_value = %S\\n  lex_errors = (length = %d)\\n  lex_comments = (length = %d)\\n}\"\n    (Token.token_to_string lex_result.lex_token)\n    (Token.value_of_token lex_result.lex_token)\n    (List.length lex_result.lex_errors)\n    (List.length lex_result.lex_comments)\n"
  },
  {
    "path": "analysis/vendor/js_parser/loc.ml",
    "content": "(*\n * Copyright (c) Meta Platforms, Inc. and affiliates.\n *\n * This source code is licensed under the MIT license found in the\n * LICENSE file in the root directory of this source tree.\n *)\nopen Primitive_deriving\n\n(* line numbers are 1-indexed; column numbers are 0-indexed *)\ntype position = {\n  line: int;\n  column: int;\n}\n[@@deriving_inline equal]\nlet _ = fun (_ : position) -> ()\nlet equal_position =\n  (fun a__001_ ->\n     fun b__002_ ->\n       if Ppx_compare_lib.phys_equal a__001_ b__002_\n       then true\n       else\n         Ppx_compare_lib.(&&) (equal_int a__001_.line b__002_.line)\n           (equal_int a__001_.column b__002_.column) : position ->\n                                                         position -> bool)\nlet _ = equal_position\n[@@@end]\n(* start is inclusive; end is exclusive *)\n(* If you are modifying this record, go look at ALoc.ml and make sure you understand the\n * representation there. *)\ntype t = {\n  source: File_key.t option;\n  start: position;\n  _end: position;\n}\n\nlet none = { source = None; start = { line = 0; column = 0 }; _end = { line = 0; column = 0 } }\n\nlet is_none (x : t) =\n  x == none\n  ||\n  match x with\n  | { source = None; start = { line = 0; column = 0 }; _end = { line = 0; column = 0 } } -> true\n  | _ -> false\n\nlet is_none_ignore_source (x : t) =\n  x == none\n  ||\n  match x with\n  | { source = _; start = { line = 0; column = 0 }; _end = { line = 0; column = 0 } } -> true\n  | _ -> false\n\nlet btwn loc1 loc2 = { source = loc1.source; start = loc1.start; _end = loc2._end }\n\n(* Returns the position immediately before the start of the given loc. If the\n   given loc is at the beginning of a line, return the position of the first\n   char on the same line. *)\nlet char_before loc =\n  let start =\n    let { line; column } = loc.start in\n    let column =\n      if column > 0 then\n        column - 1\n      else\n        column\n    in\n    { line; column }\n  in\n  let _end = loc.start in\n  { loc with start; _end }\n\n(* Returns the location of the first character in the given loc. Not accurate if the\n * first line is a newline character, but is still consistent with loc orderings. *)\nlet first_char loc =\n  let start = loc.start in\n  let _end = { start with column = start.column + 1 } in\n  { loc with _end }\n\nlet pos_cmp a b =\n  let k = a.line - b.line in\n  if k = 0 then\n    a.column - b.column\n  else\n    k\n\n(**\n * If `a` spans (completely contains) `b`, then returns 0.\n * If `b` starts before `a` (even if it ends inside), returns < 0.\n * If `b` ends after `a` (even if it starts inside), returns > 0.\n *)\nlet span_compare a b =\n  let k = File_key.compare_opt a.source b.source in\n  if k = 0 then\n    let k = pos_cmp a.start b.start in\n    if k <= 0 then\n      let k = pos_cmp a._end b._end in\n      if k >= 0 then\n        0\n      else\n        -1\n    else\n      1\n  else\n    k\n\n(** [contains loc1 loc2] returns true if [loc1] entirely overlaps [loc2] *)\nlet contains loc1 loc2 = span_compare loc1 loc2 = 0\n\n(** [intersects loc1 loc2] returns true if [loc1] intersects [loc2] at all *)\nlet intersects loc1 loc2 =\n  File_key.compare_opt loc1.source loc2.source = 0\n  && not (pos_cmp loc1._end loc2.start < 0 || pos_cmp loc1.start loc2._end > 0)\n\n(** [lines_intersect loc1 loc2] returns true if [loc1] and [loc2] cover any part of\n    the same line, even if they don't actually intersect.\n\n    For example, if [loc1] ends and then [loc2] begins later on the same line,\n    [intersects loc1 loc2] is false, but [lines_intersect loc1 loc2] is true. *)\nlet lines_intersect loc1 loc2 =\n  File_key.compare_opt loc1.source loc2.source = 0\n  && not (loc1._end.line < loc2.start.line || loc1.start.line > loc2._end.line)\n\nlet compare_ignore_source loc1 loc2 =\n  match pos_cmp loc1.start loc2.start with\n  | 0 -> pos_cmp loc1._end loc2._end\n  | k -> k\n\nlet compare loc1 loc2 =\n  let k = File_key.compare_opt loc1.source loc2.source in\n  if k = 0 then\n    compare_ignore_source loc1 loc2\n  else\n    k\n\nlet equal loc1 loc2 = compare loc1 loc2 = 0\n\n(**\n * This is mostly useful for debugging purposes.\n * Please don't dead-code delete this!\n *)\nlet debug_to_string ?(include_source = false) loc =\n  let source =\n    if include_source then\n      Printf.sprintf\n        \"%S: \"\n        (match loc.source with\n        | Some src -> File_key.to_string src\n        | None -> \"<NONE>\")\n    else\n      \"\"\n  in\n  let pos =\n    Printf.sprintf\n      \"(%d, %d) to (%d, %d)\"\n      loc.start.line\n      loc.start.column\n      loc._end.line\n      loc._end.column\n  in\n  source ^ pos\n\nlet to_string_no_source loc =\n  let line = loc.start.line in\n  let start = loc.start.column + 1 in\n  let end_ = loc._end.column in\n  if line <= 0 then\n    \"0:0\"\n  else if line = loc._end.line && start = end_ then\n    Printf.sprintf \"%d:%d\" line start\n  else if line != loc._end.line then\n    Printf.sprintf \"%d:%d,%d:%d\" line start loc._end.line end_\n  else\n    Printf.sprintf \"%d:%d-%d\" line start end_\n\nlet mk_loc ?source (start_line, start_column) (end_line, end_column) =\n  {\n    source;\n    start = { line = start_line; column = start_column };\n    _end = { line = end_line; column = end_column };\n  }\n\nlet source loc = loc.source\n\n(** Produces a zero-width Loc.t, where start = end *)\nlet cursor source line column = { source; start = { line; column }; _end = { line; column } }\n\nlet start_loc loc = { loc with _end = loc.start }\nlet end_loc loc = { loc with start = loc._end }\n"
  },
  {
    "path": "analysis/vendor/js_parser/loc.mli",
    "content": "(*\n * Copyright (c) Meta Platforms, Inc. and affiliates.\n *\n * This source code is licensed under the MIT license found in the\n * LICENSE file in the root directory of this source tree.\n *)\n\ntype position = {\n  line: int;\n  column: int;\n}\n[@@deriving_inline equal]\ninclude\n  sig\n    [@@@warning \"-32\"]\n    val equal_position : position -> position -> bool\n  end[@@ocaml.doc \"@inline\"]\n[@@@end]\ntype t = {\n  source: File_key.t option;\n  start: position;\n  _end: position;\n}\n\n\nval none : t\n\nval is_none : t -> bool\n\nval is_none_ignore_source : t -> bool\n\nval btwn : t -> t -> t\n\nval char_before : t -> t\n\nval first_char : t -> t\n\n(** [contains loc1 loc2] returns true if [loc1] entirely overlaps [loc2] *)\nval contains : t -> t -> bool\n\n(** [intersects loc1 loc2] returns true if [loc1] intersects [loc2] at all *)\nval intersects : t -> t -> bool\n\n(** [lines_intersect loc1 loc2] returns true if [loc1] and [loc2] cover any part of\n    the same line, even if they don't actually intersect.\n\n    For example, if [loc1] ends and then [loc2] begins later on the same line,\n    [intersects loc1 loc2] is false, but [lines_intersect loc1 loc2] is true. *)\nval lines_intersect : t -> t -> bool\n\nval pos_cmp : position -> position -> int\n\nval span_compare : t -> t -> int\n\nval compare_ignore_source : t -> t -> int\n\nval compare : t -> t -> int\n\nval equal : t -> t -> bool\n\nval debug_to_string : ?include_source:bool -> t -> string\n\n(* Relatively compact; suitable for use as a unique string identifier *)\nval to_string_no_source : t -> string\n\nval mk_loc : ?source:File_key.t -> int * int -> int * int -> t\n\nval source : t -> File_key.t option\n\n(** Produces a zero-width Loc.t, where start = end *)\nval cursor : File_key.t option -> int -> int -> t\n\n(* Produces a location at the start of the input location *)\nval start_loc : t -> t\n\n(* Produces a location at the end of the input location *)\nval end_loc : t -> t\n"
  },
  {
    "path": "analysis/vendor/js_parser/object_parser.ml",
    "content": "(*\n * Copyright (c) Meta Platforms, Inc. and affiliates.\n *\n * This source code is licensed under the MIT license found in the\n * LICENSE file in the root directory of this source tree.\n *)\n\nmodule Ast = Flow_ast\nopen Token\nopen Parser_env\nopen Flow_ast\nmodule SMap = Map.Make (String)\nopen Parser_common\nopen Comment_attachment\n\n(* A module for parsing various object related things, like object literals\n * and classes *)\n\nmodule type OBJECT = sig\n  val key : ?class_body:bool -> env -> Loc.t * (Loc.t, Loc.t) Ast.Expression.Object.Property.key\n  val _initializer : env -> Loc.t * (Loc.t, Loc.t) Ast.Expression.Object.t * pattern_errors\n\n  val class_declaration :\n    env -> (Loc.t, Loc.t) Ast.Class.Decorator.t list -> (Loc.t, Loc.t) Ast.Statement.t\n\n  val class_expression : env -> (Loc.t, Loc.t) Ast.Expression.t\n  val class_implements : env -> attach_leading:bool -> (Loc.t, Loc.t) Ast.Class.Implements.t\n  val decorator_list : env -> (Loc.t, Loc.t) Ast.Class.Decorator.t list\nend\n\nmodule Object\n    (Parse : Parser_common.PARSER)\n    (Type : Type_parser.TYPE)\n    (Declaration : Declaration_parser.DECLARATION)\n    (Expression : Expression_parser.EXPRESSION)\n    (Pattern_cover : Pattern_cover.COVER) : OBJECT = struct\n  let decorator_list =\n    let expression env =\n      let expression = Expression.left_hand_side env in\n      let { remove_trailing; _ } =\n        if Peek.is_line_terminator env then\n          trailing_and_remover_after_last_line env\n        else\n          trailing_and_remover_after_last_loc env\n      in\n      remove_trailing expression (fun remover expression -> remover#expression expression)\n    in\n    let decorator env =\n      let leading = Peek.comments env in\n      Eat.token env;\n      {\n        Ast.Class.Decorator.expression = expression env;\n        comments = Flow_ast_utils.mk_comments_opt ~leading ();\n      }\n    in\n    let rec decorator_list_helper env decorators =\n      match Peek.token env with\n      | T_AT -> decorator_list_helper env (with_loc decorator env :: decorators)\n      | _ -> decorators\n    in\n    fun env ->\n      if (parse_options env).esproposal_decorators then\n        List.rev (decorator_list_helper env [])\n      else\n        []\n\n  let key ?(class_body = false) env =\n    let open Ast.Expression.Object.Property in\n    let leading = Peek.comments env in\n    let tkn = Peek.token env in\n    match tkn with\n    | T_STRING (loc, value, raw, octal) ->\n      if octal then strict_error env Parse_error.StrictOctalLiteral;\n      Expect.token env (T_STRING (loc, value, raw, octal));\n      let value = Literal.String value in\n      let trailing = Eat.trailing_comments env in\n      ( loc,\n        Literal\n          ( loc,\n            { Literal.value; raw; comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing () }\n          )\n      )\n    | T_NUMBER { kind; raw } ->\n      let loc = Peek.loc env in\n      let value = Expression.number env kind raw in\n      let value = Literal.Number value in\n      let trailing = Eat.trailing_comments env in\n      ( loc,\n        Literal\n          ( loc,\n            { Literal.value; raw; comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing () }\n          )\n      )\n    | T_LBRACKET ->\n      let (loc, key) =\n        with_loc\n          (fun env ->\n            let leading = Peek.comments env in\n            Expect.token env T_LBRACKET;\n            let expr = Parse.assignment (env |> with_no_in false) in\n            Expect.token env T_RBRACKET;\n            let trailing = Eat.trailing_comments env in\n            {\n              ComputedKey.expression = expr;\n              comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing ();\n            })\n          env\n      in\n      (loc, Ast.Expression.Object.Property.Computed (loc, key))\n    | T_POUND when class_body ->\n      let ((loc, { PrivateName.name; _ }) as id) = private_identifier env in\n      add_declared_private env name;\n      (loc, PrivateName id)\n    | T_POUND ->\n      let (loc, id) =\n        with_loc\n          (fun env ->\n            Eat.token env;\n            Identifier (identifier_name env))\n          env\n      in\n      error_at env (loc, Parse_error.PrivateNotInClass);\n      (loc, id)\n    | _ ->\n      let ((loc, _) as id) = identifier_name env in\n      (loc, Identifier id)\n\n  let getter_or_setter env ~in_class_body is_getter =\n    (* this is a getter or setter, it cannot be async *)\n    let async = false in\n    let (generator, leading) = Declaration.generator env in\n    let (key_loc, key) = key ~class_body:in_class_body env in\n    let key = object_key_remove_trailing env key in\n    let value =\n      with_loc\n        (fun env ->\n          (* #sec-function-definitions-static-semantics-early-errors *)\n          let env = env |> with_allow_super Super_prop in\n          let (sig_loc, (tparams, params, return)) =\n            with_loc\n              (fun env ->\n                (* It's not clear how type params on getters & setters would make sense\n                 * in Flow's type system. Since this is a Flow syntax extension, we might\n                 * as well disallow it until we need it *)\n                let tparams = None in\n                let params =\n                  let params = Declaration.function_params ~await:false ~yield:false env in\n                  if Peek.token env = T_COLON then\n                    params\n                  else\n                    function_params_remove_trailing env params\n                in\n                begin\n                  match (is_getter, params) with\n                  | (true, (_, { Ast.Function.Params.this_ = Some _; _ })) ->\n                    error_at env (key_loc, Parse_error.GetterMayNotHaveThisParam)\n                  | (false, (_, { Ast.Function.Params.this_ = Some _; _ })) ->\n                    error_at env (key_loc, Parse_error.SetterMayNotHaveThisParam)\n                  | ( true,\n                      ( _,\n                        { Ast.Function.Params.params = []; rest = None; this_ = None; comments = _ }\n                      )\n                    ) ->\n                    ()\n                  | (false, (_, { Ast.Function.Params.rest = Some _; _ })) ->\n                    (* rest params don't make sense on a setter *)\n                    error_at env (key_loc, Parse_error.SetterArity)\n                  | ( false,\n                      ( _,\n                        {\n                          Ast.Function.Params.params = [_];\n                          rest = None;\n                          this_ = None;\n                          comments = _;\n                        }\n                      )\n                    ) ->\n                    ()\n                  | (true, _) -> error_at env (key_loc, Parse_error.GetterArity)\n                  | (false, _) -> error_at env (key_loc, Parse_error.SetterArity)\n                end;\n                let return = type_annotation_hint_remove_trailing env (Type.annotation_opt env) in\n                (tparams, params, return))\n              env\n          in\n          let simple_params = is_simple_parameter_list params in\n          let (body, contains_use_strict) =\n            Declaration.function_body env ~async ~generator ~expression:false ~simple_params\n          in\n          Declaration.strict_post_check env ~contains_use_strict None params;\n          {\n            Function.id = None;\n            params;\n            body;\n            generator;\n            async;\n            predicate = None;\n            (* setters/getter are not predicates *)\n            return;\n            tparams;\n            sig_loc;\n            comments = Flow_ast_utils.mk_comments_opt ~leading ();\n          })\n        env\n    in\n    (key, value)\n\n  let _initializer =\n    let parse_assignment_cover env =\n      match Expression.assignment_cover env with\n      | Cover_expr expr -> (expr, Pattern_cover.empty_errors)\n      | Cover_patt (expr, errs) -> (expr, errs)\n    in\n    let get env start_loc leading =\n      let (loc, (key, value)) =\n        with_loc ~start_loc (fun env -> getter_or_setter env ~in_class_body:false true) env\n      in\n      let open Ast.Expression.Object in\n      Property\n        (loc, Property.Get { key; value; comments = Flow_ast_utils.mk_comments_opt ~leading () })\n    in\n    let set env start_loc leading =\n      let (loc, (key, value)) =\n        with_loc ~start_loc (fun env -> getter_or_setter env ~in_class_body:false false) env\n      in\n      let open Ast.Expression.Object in\n      Property\n        (loc, Property.Set { key; value; comments = Flow_ast_utils.mk_comments_opt ~leading () })\n    in\n    (* #prod-PropertyDefinition *)\n    let init =\n      let open Ast.Expression.Object.Property in\n      (* #prod-IdentifierReference *)\n      let parse_shorthand env key =\n        match key with\n        | Literal (loc, lit) ->\n          error_at env (loc, Parse_error.LiteralShorthandProperty);\n          (loc, Ast.Expression.Literal lit)\n        | Identifier ((loc, { Identifier.name; comments = _ }) as id) ->\n          (* #sec-identifiers-static-semantics-early-errors *)\n          if is_reserved name && name <> \"yield\" && name <> \"await\" then\n            (* it is a syntax error if `name` is a reserved word other than await or yield *)\n            error_at env (loc, Parse_error.UnexpectedReserved)\n          else if is_strict_reserved name then\n            (* it is a syntax error if `name` is a strict reserved word, in strict mode *)\n            strict_error_at env (loc, Parse_error.StrictReservedWord);\n          (loc, Ast.Expression.Identifier id)\n        | PrivateName _ -> failwith \"Internal Error: private name found in object props\"\n        | Computed (_, { ComputedKey.expression = expr; comments = _ }) ->\n          error_at env (fst expr, Parse_error.ComputedShorthandProperty);\n          expr\n      in\n      (* #prod-MethodDefinition *)\n      let parse_method ~async ~generator ~leading =\n        with_loc (fun env ->\n            (* #sec-function-definitions-static-semantics-early-errors *)\n            let env = env |> with_allow_super Super_prop in\n            let (sig_loc, (tparams, params, return)) =\n              with_loc\n                (fun env ->\n                  let tparams = type_params_remove_trailing env (Type.type_params env) in\n                  let params =\n                    let (yield, await) =\n                      match (async, generator) with\n                      | (true, true) ->\n                        (true, true) (* proposal-async-iteration/#prod-AsyncGeneratorMethod *)\n                      | (true, false) -> (false, allow_await env) (* #prod-AsyncMethod *)\n                      | (false, true) -> (true, false) (* #prod-GeneratorMethod *)\n                      | (false, false) -> (false, false)\n                      (* #prod-MethodDefinition *)\n                    in\n                    let params = Declaration.function_params ~await ~yield env in\n                    if Peek.token env = T_COLON then\n                      params\n                    else\n                      function_params_remove_trailing env params\n                  in\n                  let return = type_annotation_hint_remove_trailing env (Type.annotation_opt env) in\n                  (tparams, params, return))\n                env\n            in\n            let simple_params = is_simple_parameter_list params in\n            let (body, contains_use_strict) =\n              Declaration.function_body env ~async ~generator ~expression:false ~simple_params\n            in\n            Declaration.strict_post_check env ~contains_use_strict None params;\n            {\n              Function.id = None;\n              params;\n              body;\n              generator;\n              async;\n              (* TODO: add support for object method predicates *)\n              predicate = None;\n              return;\n              tparams;\n              sig_loc;\n              comments = Flow_ast_utils.mk_comments_opt ~leading ();\n            }\n        )\n      in\n      (* PropertyName `:` AssignmentExpression *)\n      let parse_value env =\n        Expect.token env T_COLON;\n        parse_assignment_cover env\n      in\n      (* #prod-CoverInitializedName *)\n      let parse_assignment_pattern ~key env =\n        let open Ast.Expression.Object in\n        match key with\n        | Property.Identifier id ->\n          let assignment_loc = Peek.loc env in\n          let ast =\n            with_loc\n              ~start_loc:(fst id)\n              (fun env ->\n                let leading = Peek.comments env in\n                Expect.token env T_ASSIGN;\n                let trailing = Eat.trailing_comments env in\n                let left = Parse.pattern_from_expr env (fst id, Ast.Expression.Identifier id) in\n                let right = Parse.assignment env in\n                let comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing () in\n                Ast.Expression.Assignment\n                  { Ast.Expression.Assignment.operator = None; left; right; comments })\n              env\n          in\n          let errs =\n            {\n              if_expr = [(assignment_loc, Parse_error.Unexpected (Token.quote_token_value \"=\"))];\n              if_patt = [];\n            }\n          in\n          (ast, errs)\n        | Property.Literal _\n        | Property.PrivateName _\n        | Property.Computed _ ->\n          parse_value env\n      in\n      let parse_init ~key ~async ~generator ~leading env =\n        if async || generator then\n          let key = object_key_remove_trailing env key in\n          (* the `async` and `*` modifiers are only valid on methods *)\n          let value = parse_method env ~async ~generator ~leading in\n          let prop = Method { key; value } in\n          (prop, Pattern_cover.empty_errors)\n        else\n          match Peek.token env with\n          | T_RCURLY\n          | T_COMMA ->\n            let value = parse_shorthand env key in\n            let prop = Init { key; value; shorthand = true } in\n            (prop, Pattern_cover.empty_errors)\n          | T_LESS_THAN\n          | T_LPAREN ->\n            let key = object_key_remove_trailing env key in\n            let value = parse_method env ~async ~generator ~leading in\n            let prop = Method { key; value } in\n            (prop, Pattern_cover.empty_errors)\n          | T_ASSIGN ->\n            let (value, errs) = parse_assignment_pattern ~key env in\n            let prop = Init { key; value; shorthand = true } in\n            (prop, errs)\n          | T_COLON ->\n            let (value, errs) = parse_value env in\n            let prop = Init { key; value; shorthand = false } in\n            (prop, errs)\n          | _ ->\n            (* error. we recover by treating it as a shorthand property so as to not\n               consume any more tokens and make the error worse. we don't error here\n               because we'll expect a comma before the next token. *)\n            let value = parse_shorthand env key in\n            let prop = Init { key; value; shorthand = true } in\n            (prop, Pattern_cover.empty_errors)\n      in\n      fun env start_loc key async generator leading ->\n        let (loc, (prop, errs)) =\n          with_loc ~start_loc (parse_init ~key ~async ~generator ~leading) env\n        in\n        (Ast.Expression.Object.Property (loc, prop), errs)\n    in\n    let property env =\n      let open Ast.Expression.Object in\n      if Peek.token env = T_ELLIPSIS then\n        (* Spread property *)\n        let leading = Peek.comments env in\n        let (loc, (argument, errs)) =\n          with_loc\n            (fun env ->\n              Expect.token env T_ELLIPSIS;\n              parse_assignment_cover env)\n            env\n        in\n        ( SpreadProperty\n            (loc, { SpreadProperty.argument; comments = Flow_ast_utils.mk_comments_opt ~leading () }),\n          errs\n        )\n      else\n        let start_loc = Peek.loc env in\n        let (async, leading_async) =\n          match Peek.ith_token ~i:1 env with\n          | T_ASSIGN\n          (* { async = true } (destructuring) *)\n          | T_COLON\n          (* { async: true } *)\n          | T_LESS_THAN\n          (* { async<T>() {} } *)\n          | T_LPAREN\n          (* { async() {} } *)\n          | T_COMMA\n          (* { async, other, shorthand } *)\n          | T_RCURLY (* { async } *) ->\n            (false, [])\n          | _ -> Declaration.async env\n        in\n        let (generator, leading_generator) = Declaration.generator env in\n        let leading = leading_async @ leading_generator in\n        match (async, generator, Peek.token env) with\n        | (false, false, T_IDENTIFIER { raw = \"get\"; _ }) ->\n          let leading = Peek.comments env in\n          let (_, key) = key env in\n          begin\n            match Peek.token env with\n            | T_ASSIGN\n            | T_COLON\n            | T_LESS_THAN\n            | T_LPAREN\n            | T_COMMA\n            | T_RCURLY ->\n              init env start_loc key false false []\n            | _ ->\n              ignore (Comment_attachment.object_key_remove_trailing env key);\n              (get env start_loc leading, Pattern_cover.empty_errors)\n          end\n        | (false, false, T_IDENTIFIER { raw = \"set\"; _ }) ->\n          let leading = Peek.comments env in\n          let (_, key) = key env in\n          begin\n            match Peek.token env with\n            | T_ASSIGN\n            | T_COLON\n            | T_LESS_THAN\n            | T_LPAREN\n            | T_COMMA\n            | T_RCURLY ->\n              init env start_loc key false false []\n            | _ ->\n              ignore (Comment_attachment.object_key_remove_trailing env key);\n              (set env start_loc leading, Pattern_cover.empty_errors)\n          end\n        | (async, generator, _) ->\n          let (_, key) = key env in\n          init env start_loc key async generator leading\n    in\n    let rec properties env ~rest_trailing_comma (props, errs) =\n      match Peek.token env with\n      | T_EOF\n      | T_RCURLY ->\n        let errs =\n          match rest_trailing_comma with\n          | Some loc ->\n            { errs with if_patt = (loc, Parse_error.TrailingCommaAfterRestElement) :: errs.if_patt }\n          | None -> errs\n        in\n        (List.rev props, Pattern_cover.rev_errors errs)\n      | _ ->\n        let (prop, new_errs) = property env in\n        let rest_trailing_comma =\n          match prop with\n          | Ast.Expression.Object.SpreadProperty _ when Peek.token env = T_COMMA ->\n            Some (Peek.loc env)\n          | _ -> None\n        in\n        let errs = Pattern_cover.rev_append_errors new_errs errs in\n        let errs =\n          match Peek.token env with\n          | T_RCURLY\n          | T_EOF ->\n            errs\n          | T_COMMA ->\n            Eat.token env;\n            errs\n          | _ ->\n            (* we could use [Expect.error env T_COMMA], but we're in a weird\n               cover grammar situation where we're storing errors in\n               [Pattern_cover]. if we used [Expect.error], the errors would\n               end up out of order. *)\n            let err = Expect.get_error env T_COMMA in\n            (* if the unexpected token is a semicolon, consume it to aid\n               recovery. using a semicolon instead of a comma is a common\n               mistake. *)\n            let _ = Eat.maybe env T_SEMICOLON in\n            Pattern_cover.cons_error err errs\n        in\n        properties env ~rest_trailing_comma (prop :: props, errs)\n    in\n    fun env ->\n      let (loc, (expr, errs)) =\n        with_loc\n          (fun env ->\n            let leading = Peek.comments env in\n            Expect.token env T_LCURLY;\n            let (props, errs) =\n              properties env ~rest_trailing_comma:None ([], Pattern_cover.empty_errors)\n            in\n            let internal = Peek.comments env in\n            Expect.token env T_RCURLY;\n            let trailing = Eat.trailing_comments env in\n            ( {\n                Ast.Expression.Object.properties = props;\n                comments =\n                  Flow_ast_utils.mk_comments_with_internal_opt ~leading ~trailing ~internal ();\n              },\n              errs\n            ))\n          env\n      in\n      (loc, expr, errs)\n\n  let check_property_name env loc name static =\n    if String.equal name \"constructor\" || (String.equal name \"prototype\" && static) then\n      error_at\n        env\n        (loc, Parse_error.InvalidClassMemberName { name; static; method_ = false; private_ = false })\n\n  let check_private_names\n      env seen_names private_name (kind : [ `Method | `Field | `Getter | `Setter ]) =\n    let (loc, { PrivateName.name; comments = _ }) = private_name in\n    if String.equal name \"constructor\" then\n      let () =\n        error_at\n          env\n          ( loc,\n            Parse_error.InvalidClassMemberName\n              { name; static = false; method_ = kind = `Method; private_ = true }\n          )\n      in\n      seen_names\n    else\n      match SMap.find_opt name seen_names with\n      | Some seen ->\n        begin\n          match (kind, seen) with\n          | (`Getter, `Setter)\n          | (`Setter, `Getter) ->\n            (* one getter and one setter are allowed as long as it's not used as a field *)\n            ()\n          | _ -> error_at env (loc, Parse_error.DuplicatePrivateFields name)\n        end;\n        SMap.add name `Field seen_names\n      | None -> SMap.add name kind seen_names\n\n  let class_implements env ~attach_leading =\n    let rec interfaces env acc =\n      let interface =\n        with_loc\n          (fun env ->\n            let id =\n              let id = Type.type_identifier env in\n              if Peek.token env <> T_LESS_THAN then\n                id\n              else\n                let { remove_trailing; _ } = trailing_and_remover env in\n                remove_trailing id (fun remover id -> remover#identifier id)\n            in\n            let targs = Type.type_args env in\n            { Ast.Class.Implements.Interface.id; targs })\n          env\n      in\n      let acc = interface :: acc in\n      match Peek.token env with\n      | T_COMMA ->\n        Expect.token env T_COMMA;\n        interfaces env acc\n      | _ -> List.rev acc\n    in\n    with_loc\n      (fun env ->\n        let leading =\n          if attach_leading then\n            Peek.comments env\n          else\n            []\n        in\n        Expect.token env T_IMPLEMENTS;\n        let interfaces = interfaces env [] in\n        { Ast.Class.Implements.interfaces; comments = Flow_ast_utils.mk_comments_opt ~leading () })\n      env\n\n  let class_extends ~leading =\n    with_loc (fun env ->\n        let expr =\n          let expr = Expression.left_hand_side (env |> with_allow_yield false) in\n          if Peek.token env <> T_LESS_THAN then\n            expr\n          else\n            let { remove_trailing; _ } = trailing_and_remover env in\n            remove_trailing expr (fun remover expr -> remover#expression expr)\n        in\n        let targs = Type.type_args env in\n        { Class.Extends.expr; targs; comments = Flow_ast_utils.mk_comments_opt ~leading () }\n    )\n\n  (* https://tc39.es/ecma262/#prod-ClassHeritage *)\n  let class_heritage env =\n    let extends =\n      let leading = Peek.comments env in\n      if Eat.maybe env T_EXTENDS then\n        let (loc, extends) = class_extends ~leading env in\n        let { remove_trailing; _ } = trailing_and_remover env in\n        Some\n          (loc, remove_trailing extends (fun remover extends -> remover#class_extends loc extends))\n      else\n        None\n    in\n    let implements =\n      if Peek.token env = T_IMPLEMENTS then (\n        if not (should_parse_types env) then error env Parse_error.UnexpectedTypeInterface;\n        Some (class_implements_remove_trailing env (class_implements env ~attach_leading:true))\n      ) else\n        None\n    in\n    (extends, implements)\n\n  (* In the ES6 draft, all elements are methods. No properties (though there\n   * are getter and setters allowed *)\n  let class_element =\n    let get env start_loc decorators static leading =\n      let (loc, (key, value)) =\n        with_loc ~start_loc (fun env -> getter_or_setter env ~in_class_body:true true) env\n      in\n      let open Ast.Class in\n      Body.Method\n        ( loc,\n          {\n            Method.key;\n            value;\n            kind = Method.Get;\n            static;\n            decorators;\n            comments = Flow_ast_utils.mk_comments_opt ~leading ();\n          }\n        )\n    in\n    let set env start_loc decorators static leading =\n      let (loc, (key, value)) =\n        with_loc ~start_loc (fun env -> getter_or_setter env ~in_class_body:true false) env\n      in\n      let open Ast.Class in\n      Body.Method\n        ( loc,\n          {\n            Method.key;\n            value;\n            kind = Method.Set;\n            static;\n            decorators;\n            comments = Flow_ast_utils.mk_comments_opt ~leading ();\n          }\n        )\n    in\n    let error_unsupported_variance env = function\n      | Some (loc, _) -> error_at env (loc, Parse_error.UnexpectedVariance)\n      | None -> ()\n      (* Class property with annotation *)\n    in\n    let error_unsupported_declare env = function\n      | Some loc -> error_at env (loc, Parse_error.DeclareClassElement)\n      | None -> ()\n    in\n    let property_end_and_semicolon env key annot value =\n      match Peek.token env with\n      | T_LBRACKET\n      | T_LPAREN ->\n        error_unexpected env;\n        (key, annot, value, [])\n      | T_SEMICOLON ->\n        Eat.token env;\n        let trailing =\n          match Peek.token env with\n          | T_EOF\n          | T_RCURLY ->\n            Eat.trailing_comments env\n          | _ when Peek.is_line_terminator env -> Eat.comments_until_next_line env\n          | _ -> []\n        in\n        (key, annot, value, trailing)\n      | _ ->\n        let remover =\n          match Peek.token env with\n          | T_EOF\n          | T_RCURLY ->\n            { trailing = []; remove_trailing = (fun x _ -> x) }\n          | _ when Peek.is_line_terminator env ->\n            Comment_attachment.trailing_and_remover_after_last_line env\n          | _ -> Comment_attachment.trailing_and_remover_after_last_loc env\n        in\n        (* Remove trailing comments from the last node in this property *)\n        let (key, annot, value) =\n          match (annot, value) with\n          (* prop = init *)\n          | (_, Class.Property.Initialized expr) ->\n            ( key,\n              annot,\n              Class.Property.Initialized\n                (remover.remove_trailing expr (fun remover expr -> remover#expression expr))\n            )\n          (* prop: annot *)\n          | (Ast.Type.Available annot, _) ->\n            ( key,\n              Ast.Type.Available\n                (remover.remove_trailing annot (fun remover annot -> remover#type_annotation annot)),\n              value\n            )\n          (* prop *)\n          | _ ->\n            (remover.remove_trailing key (fun remover key -> remover#object_key key), annot, value)\n        in\n        (key, annot, value, [])\n    in\n    let property env start_loc key static declare variance leading =\n      let (loc, (key, annot, value, comments)) =\n        with_loc\n          ~start_loc\n          (fun env ->\n            let annot = Type.annotation_opt env in\n            let value =\n              match (declare, Peek.token env) with\n              | (None, T_ASSIGN) ->\n                Eat.token env;\n                Ast.Class.Property.Initialized\n                  (Parse.expression (env |> with_allow_super Super_prop))\n              | (Some _, T_ASSIGN) ->\n                error env Parse_error.DeclareClassFieldInitializer;\n                Eat.token env;\n                Ast.Class.Property.Declared\n              | (None, _) -> Ast.Class.Property.Uninitialized\n              | (Some _, _) -> Ast.Class.Property.Declared\n            in\n            let (key, annot, value, trailing) = property_end_and_semicolon env key annot value in\n            (key, annot, value, Flow_ast_utils.mk_comments_opt ~leading ~trailing ()))\n          env\n      in\n      match key with\n      | Ast.Expression.Object.Property.PrivateName private_name ->\n        let open Ast.Class in\n        Body.PrivateField\n          (loc, { PrivateField.key = private_name; value; annot; static; variance; comments })\n      | _ ->\n        Ast.Class.(Body.Property (loc, { Property.key; value; annot; static; variance; comments }))\n    in\n    let is_asi env =\n      match Peek.token env with\n      | T_LESS_THAN -> false\n      | T_LPAREN -> false\n      | _ when Peek.is_implicit_semicolon env -> true\n      | _ -> false\n    in\n    let rec init env start_loc decorators key ~async ~generator ~static ~declare variance leading =\n      match Peek.token env with\n      | T_COLON\n      | T_ASSIGN\n      | T_SEMICOLON\n      | T_RCURLY\n        when (not async) && not generator ->\n        property env start_loc key static declare variance leading\n      | T_PLING ->\n        (* TODO: add support for optional class properties *)\n        error_unexpected env;\n        Eat.token env;\n        init env start_loc decorators key ~async ~generator ~static ~declare variance leading\n      | _ when is_asi env ->\n        (* an uninitialized, unannotated property *)\n        property env start_loc key static declare variance leading\n      | _ ->\n        error_unsupported_declare env declare;\n        error_unsupported_variance env variance;\n        let (kind, env) =\n          match (static, key) with\n          | ( false,\n              Ast.Expression.Object.Property.Identifier\n                (_, { Identifier.name = \"constructor\"; comments = _ })\n            )\n          | ( false,\n              Ast.Expression.Object.Property.Literal\n                (_, { Literal.value = Literal.String \"constructor\"; _ })\n            ) ->\n            (Ast.Class.Method.Constructor, env |> with_allow_super Super_prop_or_call)\n          | _ -> (Ast.Class.Method.Method, env |> with_allow_super Super_prop)\n        in\n        let key = object_key_remove_trailing env key in\n        let value =\n          with_loc\n            (fun env ->\n              let (sig_loc, (tparams, params, return)) =\n                with_loc\n                  (fun env ->\n                    let tparams = type_params_remove_trailing env (Type.type_params env) in\n                    let params =\n                      let (yield, await) =\n                        match (async, generator) with\n                        | (true, true) ->\n                          (true, true) (* proposal-async-iteration/#prod-AsyncGeneratorMethod *)\n                        | (true, false) -> (false, allow_await env) (* #prod-AsyncMethod *)\n                        | (false, true) -> (true, false) (* #prod-GeneratorMethod *)\n                        | (false, false) -> (false, false)\n                        (* #prod-MethodDefinition *)\n                      in\n                      let params = Declaration.function_params ~await ~yield env in\n                      let params =\n                        if Peek.token env = T_COLON then\n                          params\n                        else\n                          function_params_remove_trailing env params\n                      in\n                      Ast.Function.Params.(\n                        match params with\n                        | (loc, ({ this_ = Some (this_loc, _); _ } as params))\n                          when kind = Ast.Class.Method.Constructor ->\n                          (* Disallow this param annotations for constructors *)\n                          error_at env (this_loc, Parse_error.ThisParamBannedInConstructor);\n                          (loc, { params with this_ = None })\n                        | params -> params\n                      )\n                    in\n                    let return =\n                      type_annotation_hint_remove_trailing env (Type.annotation_opt env)\n                    in\n                    (tparams, params, return))\n                  env\n              in\n              let simple_params = is_simple_parameter_list params in\n              let (body, contains_use_strict) =\n                Declaration.function_body env ~async ~generator ~expression:false ~simple_params\n              in\n              Declaration.strict_post_check env ~contains_use_strict None params;\n              {\n                Function.id = None;\n                params;\n                body;\n                generator;\n                async;\n                (* TODO: add support for method predicates *)\n                predicate = None;\n                return;\n                tparams;\n                sig_loc;\n                comments = None;\n              })\n            env\n        in\n        let open Ast.Class in\n        Body.Method\n          ( Loc.btwn start_loc (fst value),\n            {\n              Method.key;\n              value;\n              kind;\n              static;\n              decorators;\n              comments = Flow_ast_utils.mk_comments_opt ~leading ();\n            }\n          )\n    in\n    let ith_implies_identifier ~i env =\n      match Peek.ith_token ~i env with\n      | T_LESS_THAN\n      | T_COLON\n      | T_ASSIGN\n      | T_SEMICOLON\n      | T_LPAREN\n      | T_RCURLY ->\n        true\n      | _ -> false\n    in\n    let implies_identifier = ith_implies_identifier ~i:0 in\n    fun env ->\n      let start_loc = Peek.loc env in\n      let decorators = decorator_list env in\n      let (declare, leading_declare) =\n        match Peek.token env with\n        | T_DECLARE when not (ith_implies_identifier ~i:1 env) ->\n          let ret = Some (Peek.loc env) in\n          let leading = Peek.comments env in\n          Eat.token env;\n          (ret, leading)\n        | _ -> (None, [])\n      in\n      let static =\n        Peek.ith_token ~i:1 env <> T_LPAREN\n        && Peek.ith_token ~i:1 env <> T_LESS_THAN\n        && Peek.token env = T_STATIC\n      in\n      let leading_static =\n        if static then (\n          let leading = Peek.comments env in\n          Eat.token env;\n          leading\n        ) else\n          []\n      in\n      let async =\n        Peek.token env = T_ASYNC\n        && (not (ith_implies_identifier ~i:1 env))\n        && not (Peek.ith_is_line_terminator ~i:1 env)\n      in\n      (* consume `async` *)\n      let leading_async =\n        if async then (\n          let leading = Peek.comments env in\n          Eat.token env;\n          leading\n        ) else\n          []\n      in\n      let (generator, leading_generator) = Declaration.generator env in\n      let variance = Declaration.variance env async generator in\n      let (generator, leading_generator) =\n        match (generator, variance) with\n        | (false, Some _) -> Declaration.generator env\n        | _ -> (generator, leading_generator)\n      in\n      let leading =\n        List.concat [leading_declare; leading_static; leading_async; leading_generator]\n      in\n      match (async, generator, Peek.token env) with\n      | (false, false, T_IDENTIFIER { raw = \"get\"; _ }) ->\n        let leading_get = Peek.comments env in\n        let (_, key) = key ~class_body:true env in\n        if implies_identifier env then\n          init env start_loc decorators key ~async ~generator ~static ~declare variance leading\n        else (\n          error_unsupported_declare env declare;\n          error_unsupported_variance env variance;\n          ignore (object_key_remove_trailing env key);\n          get env start_loc decorators static (leading @ leading_get)\n        )\n      | (false, false, T_IDENTIFIER { raw = \"set\"; _ }) ->\n        let leading_set = Peek.comments env in\n        let (_, key) = key ~class_body:true env in\n        if implies_identifier env then\n          init env start_loc decorators key ~async ~generator ~static ~declare variance leading\n        else (\n          error_unsupported_declare env declare;\n          error_unsupported_variance env variance;\n          ignore (object_key_remove_trailing env key);\n          set env start_loc decorators static (leading @ leading_set)\n        )\n      | (_, _, _) ->\n        let (_, key) = key ~class_body:true env in\n        init env start_loc decorators key ~async ~generator ~static ~declare variance leading\n\n  let class_body =\n    let rec elements env seen_constructor private_names acc =\n      match Peek.token env with\n      | T_EOF\n      | T_RCURLY ->\n        List.rev acc\n      | T_SEMICOLON ->\n        (* Skip empty elements *)\n        Expect.token env T_SEMICOLON;\n        elements env seen_constructor private_names acc\n      | _ ->\n        let element = class_element env in\n        let (seen_constructor', private_names') =\n          match element with\n          | Ast.Class.Body.Method (loc, m) ->\n            let open Ast.Class.Method in\n            (match m.kind with\n            | Constructor ->\n              if m.static then\n                (seen_constructor, private_names)\n              else (\n                if seen_constructor then error_at env (loc, Parse_error.DuplicateConstructor);\n                (true, private_names)\n              )\n            | Method ->\n              let private_names =\n                match m.key with\n                | Ast.Expression.Object.Property.PrivateName name ->\n                  check_private_names env private_names name `Method\n                | _ -> private_names\n              in\n              (seen_constructor, private_names)\n            | Get ->\n              let open Ast.Expression.Object.Property in\n              let private_names =\n                match m.key with\n                | PrivateName name -> check_private_names env private_names name `Getter\n                | _ -> private_names\n              in\n              (seen_constructor, private_names)\n            | Set ->\n              let open Ast.Expression.Object.Property in\n              let private_names =\n                match m.key with\n                | PrivateName name -> check_private_names env private_names name `Setter\n                | _ -> private_names\n              in\n              (seen_constructor, private_names))\n          | Ast.Class.Body.Property (_, { Ast.Class.Property.key; static; _ }) ->\n            let open Ast.Expression.Object.Property in\n            begin\n              match key with\n              | Identifier (loc, { Identifier.name; comments = _ })\n              | Literal (loc, { Literal.value = Literal.String name; _ }) ->\n                check_property_name env loc name static\n              | Literal _\n              | Computed _ ->\n                ()\n              | PrivateName _ ->\n                failwith \"unexpected PrivateName in Property, expected a PrivateField\"\n            end;\n            (seen_constructor, private_names)\n          | Ast.Class.Body.PrivateField (_, { Ast.Class.PrivateField.key; _ }) ->\n            let private_names = check_private_names env private_names key `Field in\n            (seen_constructor, private_names)\n        in\n        elements env seen_constructor' private_names' (element :: acc)\n    in\n    fun ~expression env ->\n      with_loc\n        (fun env ->\n          let leading = Peek.comments env in\n          if Eat.maybe env T_LCURLY then (\n            enter_class env;\n            let body = elements env false SMap.empty [] in\n            exit_class env;\n            Expect.token env T_RCURLY;\n            let trailing =\n              match (expression, Peek.token env) with\n              | (true, _)\n              | (_, (T_RCURLY | T_EOF)) ->\n                Eat.trailing_comments env\n              | _ when Peek.is_line_terminator env -> Eat.comments_until_next_line env\n              | _ -> []\n            in\n            { Ast.Class.Body.body; comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing () }\n          ) else (\n            Expect.error env T_LCURLY;\n            { Ast.Class.Body.body = []; comments = None }\n          ))\n        env\n\n  let _class ?(decorators = []) env ~optional_id ~expression =\n    (* 10.2.1 says all parts of a class definition are strict *)\n    let env = env |> with_strict true in\n    let decorators = decorators @ decorator_list env in\n    let leading = Peek.comments env in\n    Expect.token env T_CLASS;\n    let id =\n      let tmp_env = env |> with_no_let true in\n      match (optional_id, Peek.token tmp_env) with\n      | (true, (T_EXTENDS | T_IMPLEMENTS | T_LESS_THAN | T_LCURLY)) -> None\n      | _ when Peek.is_identifier env ->\n        let id = Parse.identifier tmp_env in\n        let { remove_trailing; _ } = trailing_and_remover env in\n        let id = remove_trailing id (fun remover id -> remover#identifier id) in\n        Some id\n      | _ ->\n        (* error, but don't consume a token like Parse.identifier does. this helps\n           with recovery, and the parser won't get stuck because we consumed the\n           `class` token above. *)\n        error_nameless_declaration env \"class\";\n        Some (Peek.loc env, { Identifier.name = \"\"; comments = None })\n    in\n    let tparams =\n      match Type.type_params env with\n      | None -> None\n      | Some tparams ->\n        let { remove_trailing; _ } = trailing_and_remover env in\n        Some (remove_trailing tparams (fun remover tparams -> remover#type_params tparams))\n    in\n    let (extends, implements) = class_heritage env in\n    let body = class_body env ~expression in\n    let comments = Flow_ast_utils.mk_comments_opt ~leading () in\n    { Class.id; body; tparams; extends; implements; class_decorators = decorators; comments }\n\n  let class_declaration env decorators =\n    with_loc\n      (fun env ->\n        let optional_id = in_export_default env in\n        Ast.Statement.ClassDeclaration (_class env ~decorators ~optional_id ~expression:false))\n      env\n\n  let class_expression =\n    with_loc (fun env -> Ast.Expression.Class (_class env ~optional_id:true ~expression:true))\nend\n"
  },
  {
    "path": "analysis/vendor/js_parser/parse_error.ml",
    "content": "(*\n * Copyright (c) Meta Platforms, Inc. and affiliates.\n *\n * This source code is licensed under the MIT license found in the\n * LICENSE file in the root directory of this source tree.\n *)\nopen Primitive_deriving\n\ntype t =\n  | EnumBooleanMemberNotInitialized of {\n      enum_name: string;\n      member_name: string;\n    }\n  | EnumDuplicateMemberName of {\n      enum_name: string;\n      member_name: string;\n    }\n  | EnumInconsistentMemberValues of { enum_name: string }\n  | EnumInvalidExplicitType of {\n      enum_name: string;\n      supplied_type: string option;\n    }\n  | EnumInvalidExport\n  | EnumInvalidInitializerSeparator of { member_name: string }\n  | EnumInvalidMemberInitializer of {\n      enum_name: string;\n      explicit_type: Enum_common.explicit_type option;\n      member_name: string;\n    }\n  | EnumInvalidMemberName of {\n      enum_name: string;\n      member_name: string;\n    }\n  | EnumInvalidMemberSeparator\n  | EnumInvalidEllipsis of { trailing_comma: bool }\n  | EnumNumberMemberNotInitialized of {\n      enum_name: string;\n      member_name: string;\n    }\n  | EnumStringMemberInconsistentlyInitailized of { enum_name: string }\n  | Unexpected of string\n  | UnexpectedWithExpected of string * string\n  | UnexpectedTokenWithSuggestion of string * string\n  | UnexpectedReserved\n  | UnexpectedReservedType\n  | UnexpectedSuper\n  | UnexpectedSuperCall\n  | UnexpectedEOS\n  | UnexpectedVariance\n  | UnexpectedStatic\n  | UnexpectedProto\n  | UnexpectedTypeAlias\n  | UnexpectedOpaqueTypeAlias\n  | UnexpectedTypeAnnotation\n  | UnexpectedTypeDeclaration\n  | UnexpectedTypeImport\n  | UnexpectedTypeExport\n  | UnexpectedTypeInterface\n  | UnexpectedSpreadType\n  | UnexpectedExplicitInexactInObject\n  | InexactInsideExact\n  | InexactInsideNonObject\n  | NewlineAfterThrow\n  | InvalidFloatBigInt\n  | InvalidSciBigInt\n  | InvalidRegExp\n  | InvalidRegExpFlags of string\n  | UnterminatedRegExp\n  | InvalidLHSInAssignment\n  | InvalidLHSInExponentiation\n  | InvalidLHSInForIn\n  | InvalidLHSInForOf\n  | InvalidIndexedAccess of { has_bracket: bool }\n  | InvalidOptionalIndexedAccess\n  | ExpectedPatternFoundExpression\n  | MultipleDefaultsInSwitch\n  | NoCatchOrFinally\n  | UnknownLabel of string\n  | Redeclaration of string * string\n  | IllegalContinue\n  | IllegalBreak\n  | IllegalReturn\n  | IllegalUnicodeEscape\n  | StrictModeWith\n  | StrictCatchVariable\n  | StrictVarName\n  | StrictParamName\n  | StrictParamDupe\n  | StrictParamNotSimple\n  | StrictFunctionName\n  | StrictOctalLiteral\n  | StrictNonOctalLiteral\n  | StrictDelete\n  | StrictDuplicateProperty\n  | AccessorDataProperty\n  | AccessorGetSet\n  | InvalidTypeof\n  | StrictLHSAssignment\n  | StrictLHSPostfix\n  | StrictLHSPrefix\n  | StrictReservedWord\n  | JSXAttributeValueEmptyExpression\n  | InvalidJSXAttributeValue\n  | ExpectedJSXClosingTag of string\n  | NoUninitializedConst\n  | NoUninitializedDestructuring\n  | NewlineBeforeArrow\n  | FunctionAsStatement of { in_strict_mode: bool }\n  | AsyncFunctionAsStatement\n  | GeneratorFunctionAsStatement\n  | AdjacentJSXElements\n  | ParameterAfterRestParameter\n  | ElementAfterRestElement\n  | PropertyAfterRestElement\n  | DeclareAsync\n  | DeclareClassElement\n  | DeclareClassFieldInitializer\n  | DeclareOpaqueTypeInitializer\n  | DeclareExportLet\n  | DeclareExportConst\n  | DeclareExportType\n  | DeclareExportInterface\n  | DuplicateExport of string\n  | UnsupportedDecorator\n  | MissingTypeParamDefault\n  | DuplicateDeclareModuleExports\n  | AmbiguousDeclareModuleKind\n  | GetterArity\n  | SetterArity\n  | InvalidNonTypeImportInDeclareModule\n  | ImportTypeShorthandOnlyInPureImport\n  | ImportSpecifierMissingComma\n  | ExportSpecifierMissingComma\n  | MalformedUnicode\n  | DuplicateConstructor\n  | DuplicatePrivateFields of string\n  | InvalidClassMemberName of {\n      name: string;\n      static: bool;\n      method_: bool;\n      private_: bool;\n    }\n  | PrivateDelete\n  | UnboundPrivate of string\n  | PrivateNotInClass\n  | SuperPrivate\n  | YieldInFormalParameters\n  | AwaitAsIdentifierReference\n  | YieldAsIdentifierReference\n  | AmbiguousLetBracket\n  | LiteralShorthandProperty\n  | ComputedShorthandProperty\n  | MethodInDestructuring\n  | TrailingCommaAfterRestElement\n  | OptionalChainNew\n  | OptionalChainTemplate\n  | NullishCoalescingUnexpectedLogical of string\n  | WhitespaceInPrivateName\n  | ThisParamAnnotationRequired\n  | ThisParamMustBeFirst\n  | ThisParamMayNotBeOptional\n  | GetterMayNotHaveThisParam\n  | SetterMayNotHaveThisParam\n  | ThisParamBannedInArrowFunctions\n  | ThisParamBannedInConstructor\n[@@deriving_inline compare]\nlet _ = fun (_ : t) -> ()\nlet compare =\n  (fun a__001_ ->\n     fun b__002_ ->\n       if Ppx_compare_lib.phys_equal a__001_ b__002_\n       then 0\n       else\n         (match (a__001_, b__002_) with\n          | (EnumBooleanMemberNotInitialized _a__003_,\n             EnumBooleanMemberNotInitialized _b__004_) ->\n              (match compare_string _a__003_.enum_name _b__004_.enum_name\n               with\n               | 0 ->\n                   compare_string _a__003_.member_name _b__004_.member_name\n               | n -> n)\n          | (EnumBooleanMemberNotInitialized _, _) -> (-1)\n          | (_, EnumBooleanMemberNotInitialized _) -> 1\n          | (EnumDuplicateMemberName _a__005_, EnumDuplicateMemberName\n             _b__006_) ->\n              (match compare_string _a__005_.enum_name _b__006_.enum_name\n               with\n               | 0 ->\n                   compare_string _a__005_.member_name _b__006_.member_name\n               | n -> n)\n          | (EnumDuplicateMemberName _, _) -> (-1)\n          | (_, EnumDuplicateMemberName _) -> 1\n          | (EnumInconsistentMemberValues _a__007_,\n             EnumInconsistentMemberValues _b__008_) ->\n              compare_string _a__007_.enum_name _b__008_.enum_name\n          | (EnumInconsistentMemberValues _, _) -> (-1)\n          | (_, EnumInconsistentMemberValues _) -> 1\n          | (EnumInvalidExplicitType _a__009_, EnumInvalidExplicitType\n             _b__010_) ->\n              (match compare_string _a__009_.enum_name _b__010_.enum_name\n               with\n               | 0 ->\n                   compare_option compare_string _a__009_.supplied_type\n                     _b__010_.supplied_type\n               | n -> n)\n          | (EnumInvalidExplicitType _, _) -> (-1)\n          | (_, EnumInvalidExplicitType _) -> 1\n          | (EnumInvalidExport, EnumInvalidExport) -> 0\n          | (EnumInvalidExport, _) -> (-1)\n          | (_, EnumInvalidExport) -> 1\n          | (EnumInvalidInitializerSeparator _a__013_,\n             EnumInvalidInitializerSeparator _b__014_) ->\n              compare_string _a__013_.member_name _b__014_.member_name\n          | (EnumInvalidInitializerSeparator _, _) -> (-1)\n          | (_, EnumInvalidInitializerSeparator _) -> 1\n          | (EnumInvalidMemberInitializer _a__015_,\n             EnumInvalidMemberInitializer _b__016_) ->\n              (match compare_string _a__015_.enum_name _b__016_.enum_name\n               with\n               | 0 ->\n                   (match compare_option Enum_common.compare_explicit_type\n                            _a__015_.explicit_type _b__016_.explicit_type\n                    with\n                    | 0 ->\n                        compare_string _a__015_.member_name\n                          _b__016_.member_name\n                    | n -> n)\n               | n -> n)\n          | (EnumInvalidMemberInitializer _, _) -> (-1)\n          | (_, EnumInvalidMemberInitializer _) -> 1\n          | (EnumInvalidMemberName _a__019_, EnumInvalidMemberName _b__020_)\n              ->\n              (match compare_string _a__019_.enum_name _b__020_.enum_name\n               with\n               | 0 ->\n                   compare_string _a__019_.member_name _b__020_.member_name\n               | n -> n)\n          | (EnumInvalidMemberName _, _) -> (-1)\n          | (_, EnumInvalidMemberName _) -> 1\n          | (EnumInvalidMemberSeparator, EnumInvalidMemberSeparator) -> 0\n          | (EnumInvalidMemberSeparator, _) -> (-1)\n          | (_, EnumInvalidMemberSeparator) -> 1\n          | (EnumInvalidEllipsis _a__021_, EnumInvalidEllipsis _b__022_) ->\n              compare_bool _a__021_.trailing_comma _b__022_.trailing_comma\n          | (EnumInvalidEllipsis _, _) -> (-1)\n          | (_, EnumInvalidEllipsis _) -> 1\n          | (EnumNumberMemberNotInitialized _a__023_,\n             EnumNumberMemberNotInitialized _b__024_) ->\n              (match compare_string _a__023_.enum_name _b__024_.enum_name\n               with\n               | 0 ->\n                   compare_string _a__023_.member_name _b__024_.member_name\n               | n -> n)\n          | (EnumNumberMemberNotInitialized _, _) -> (-1)\n          | (_, EnumNumberMemberNotInitialized _) -> 1\n          | (EnumStringMemberInconsistentlyInitailized _a__025_,\n             EnumStringMemberInconsistentlyInitailized _b__026_) ->\n              compare_string _a__025_.enum_name _b__026_.enum_name\n          | (EnumStringMemberInconsistentlyInitailized _, _) -> (-1)\n          | (_, EnumStringMemberInconsistentlyInitailized _) -> 1\n          | (Unexpected _a__027_, Unexpected _b__028_) ->\n              compare_string _a__027_ _b__028_\n          | (Unexpected _, _) -> (-1)\n          | (_, Unexpected _) -> 1\n          | (UnexpectedWithExpected (_a__029_, _a__031_),\n             UnexpectedWithExpected (_b__030_, _b__032_)) ->\n              (match compare_string _a__029_ _b__030_ with\n               | 0 -> compare_string _a__031_ _b__032_\n               | n -> n)\n          | (UnexpectedWithExpected _, _) -> (-1)\n          | (_, UnexpectedWithExpected _) -> 1\n          | (UnexpectedTokenWithSuggestion (_a__033_, _a__035_),\n             UnexpectedTokenWithSuggestion (_b__034_, _b__036_)) ->\n              (match compare_string _a__033_ _b__034_ with\n               | 0 -> compare_string _a__035_ _b__036_\n               | n -> n)\n          | (UnexpectedTokenWithSuggestion _, _) -> (-1)\n          | (_, UnexpectedTokenWithSuggestion _) -> 1\n          | (UnexpectedReserved, UnexpectedReserved) -> 0\n          | (UnexpectedReserved, _) -> (-1)\n          | (_, UnexpectedReserved) -> 1\n          | (UnexpectedReservedType, UnexpectedReservedType) -> 0\n          | (UnexpectedReservedType, _) -> (-1)\n          | (_, UnexpectedReservedType) -> 1\n          | (UnexpectedSuper, UnexpectedSuper) -> 0\n          | (UnexpectedSuper, _) -> (-1)\n          | (_, UnexpectedSuper) -> 1\n          | (UnexpectedSuperCall, UnexpectedSuperCall) -> 0\n          | (UnexpectedSuperCall, _) -> (-1)\n          | (_, UnexpectedSuperCall) -> 1\n          | (UnexpectedEOS, UnexpectedEOS) -> 0\n          | (UnexpectedEOS, _) -> (-1)\n          | (_, UnexpectedEOS) -> 1\n          | (UnexpectedVariance, UnexpectedVariance) -> 0\n          | (UnexpectedVariance, _) -> (-1)\n          | (_, UnexpectedVariance) -> 1\n          | (UnexpectedStatic, UnexpectedStatic) -> 0\n          | (UnexpectedStatic, _) -> (-1)\n          | (_, UnexpectedStatic) -> 1\n          | (UnexpectedProto, UnexpectedProto) -> 0\n          | (UnexpectedProto, _) -> (-1)\n          | (_, UnexpectedProto) -> 1\n          | (UnexpectedTypeAlias, UnexpectedTypeAlias) -> 0\n          | (UnexpectedTypeAlias, _) -> (-1)\n          | (_, UnexpectedTypeAlias) -> 1\n          | (UnexpectedOpaqueTypeAlias, UnexpectedOpaqueTypeAlias) -> 0\n          | (UnexpectedOpaqueTypeAlias, _) -> (-1)\n          | (_, UnexpectedOpaqueTypeAlias) -> 1\n          | (UnexpectedTypeAnnotation, UnexpectedTypeAnnotation) -> 0\n          | (UnexpectedTypeAnnotation, _) -> (-1)\n          | (_, UnexpectedTypeAnnotation) -> 1\n          | (UnexpectedTypeDeclaration, UnexpectedTypeDeclaration) -> 0\n          | (UnexpectedTypeDeclaration, _) -> (-1)\n          | (_, UnexpectedTypeDeclaration) -> 1\n          | (UnexpectedTypeImport, UnexpectedTypeImport) -> 0\n          | (UnexpectedTypeImport, _) -> (-1)\n          | (_, UnexpectedTypeImport) -> 1\n          | (UnexpectedTypeExport, UnexpectedTypeExport) -> 0\n          | (UnexpectedTypeExport, _) -> (-1)\n          | (_, UnexpectedTypeExport) -> 1\n          | (UnexpectedTypeInterface, UnexpectedTypeInterface) -> 0\n          | (UnexpectedTypeInterface, _) -> (-1)\n          | (_, UnexpectedTypeInterface) -> 1\n          | (UnexpectedSpreadType, UnexpectedSpreadType) -> 0\n          | (UnexpectedSpreadType, _) -> (-1)\n          | (_, UnexpectedSpreadType) -> 1\n          | (UnexpectedExplicitInexactInObject,\n             UnexpectedExplicitInexactInObject) -> 0\n          | (UnexpectedExplicitInexactInObject, _) -> (-1)\n          | (_, UnexpectedExplicitInexactInObject) -> 1\n          | (InexactInsideExact, InexactInsideExact) -> 0\n          | (InexactInsideExact, _) -> (-1)\n          | (_, InexactInsideExact) -> 1\n          | (InexactInsideNonObject, InexactInsideNonObject) -> 0\n          | (InexactInsideNonObject, _) -> (-1)\n          | (_, InexactInsideNonObject) -> 1\n          | (NewlineAfterThrow, NewlineAfterThrow) -> 0\n          | (NewlineAfterThrow, _) -> (-1)\n          | (_, NewlineAfterThrow) -> 1\n          | (InvalidFloatBigInt, InvalidFloatBigInt) -> 0\n          | (InvalidFloatBigInt, _) -> (-1)\n          | (_, InvalidFloatBigInt) -> 1\n          | (InvalidSciBigInt, InvalidSciBigInt) -> 0\n          | (InvalidSciBigInt, _) -> (-1)\n          | (_, InvalidSciBigInt) -> 1\n          | (InvalidRegExp, InvalidRegExp) -> 0\n          | (InvalidRegExp, _) -> (-1)\n          | (_, InvalidRegExp) -> 1\n          | (InvalidRegExpFlags _a__037_, InvalidRegExpFlags _b__038_) ->\n              compare_string _a__037_ _b__038_\n          | (InvalidRegExpFlags _, _) -> (-1)\n          | (_, InvalidRegExpFlags _) -> 1\n          | (UnterminatedRegExp, UnterminatedRegExp) -> 0\n          | (UnterminatedRegExp, _) -> (-1)\n          | (_, UnterminatedRegExp) -> 1\n          | (InvalidLHSInAssignment, InvalidLHSInAssignment) -> 0\n          | (InvalidLHSInAssignment, _) -> (-1)\n          | (_, InvalidLHSInAssignment) -> 1\n          | (InvalidLHSInExponentiation, InvalidLHSInExponentiation) -> 0\n          | (InvalidLHSInExponentiation, _) -> (-1)\n          | (_, InvalidLHSInExponentiation) -> 1\n          | (InvalidLHSInForIn, InvalidLHSInForIn) -> 0\n          | (InvalidLHSInForIn, _) -> (-1)\n          | (_, InvalidLHSInForIn) -> 1\n          | (InvalidLHSInForOf, InvalidLHSInForOf) -> 0\n          | (InvalidLHSInForOf, _) -> (-1)\n          | (_, InvalidLHSInForOf) -> 1\n          | (InvalidIndexedAccess _a__039_, InvalidIndexedAccess _b__040_) ->\n              compare_bool _a__039_.has_bracket _b__040_.has_bracket\n          | (InvalidIndexedAccess _, _) -> (-1)\n          | (_, InvalidIndexedAccess _) -> 1\n          | (InvalidOptionalIndexedAccess, InvalidOptionalIndexedAccess) -> 0\n          | (InvalidOptionalIndexedAccess, _) -> (-1)\n          | (_, InvalidOptionalIndexedAccess) -> 1\n          | (ExpectedPatternFoundExpression, ExpectedPatternFoundExpression)\n              -> 0\n          | (ExpectedPatternFoundExpression, _) -> (-1)\n          | (_, ExpectedPatternFoundExpression) -> 1\n          | (MultipleDefaultsInSwitch, MultipleDefaultsInSwitch) -> 0\n          | (MultipleDefaultsInSwitch, _) -> (-1)\n          | (_, MultipleDefaultsInSwitch) -> 1\n          | (NoCatchOrFinally, NoCatchOrFinally) -> 0\n          | (NoCatchOrFinally, _) -> (-1)\n          | (_, NoCatchOrFinally) -> 1\n          | (UnknownLabel _a__041_, UnknownLabel _b__042_) ->\n              compare_string _a__041_ _b__042_\n          | (UnknownLabel _, _) -> (-1)\n          | (_, UnknownLabel _) -> 1\n          | (Redeclaration (_a__043_, _a__045_), Redeclaration\n             (_b__044_, _b__046_)) ->\n              (match compare_string _a__043_ _b__044_ with\n               | 0 -> compare_string _a__045_ _b__046_\n               | n -> n)\n          | (Redeclaration _, _) -> (-1)\n          | (_, Redeclaration _) -> 1\n          | (IllegalContinue, IllegalContinue) -> 0\n          | (IllegalContinue, _) -> (-1)\n          | (_, IllegalContinue) -> 1\n          | (IllegalBreak, IllegalBreak) -> 0\n          | (IllegalBreak, _) -> (-1)\n          | (_, IllegalBreak) -> 1\n          | (IllegalReturn, IllegalReturn) -> 0\n          | (IllegalReturn, _) -> (-1)\n          | (_, IllegalReturn) -> 1\n          | (IllegalUnicodeEscape, IllegalUnicodeEscape) -> 0\n          | (IllegalUnicodeEscape, _) -> (-1)\n          | (_, IllegalUnicodeEscape) -> 1\n          | (StrictModeWith, StrictModeWith) -> 0\n          | (StrictModeWith, _) -> (-1)\n          | (_, StrictModeWith) -> 1\n          | (StrictCatchVariable, StrictCatchVariable) -> 0\n          | (StrictCatchVariable, _) -> (-1)\n          | (_, StrictCatchVariable) -> 1\n          | (StrictVarName, StrictVarName) -> 0\n          | (StrictVarName, _) -> (-1)\n          | (_, StrictVarName) -> 1\n          | (StrictParamName, StrictParamName) -> 0\n          | (StrictParamName, _) -> (-1)\n          | (_, StrictParamName) -> 1\n          | (StrictParamDupe, StrictParamDupe) -> 0\n          | (StrictParamDupe, _) -> (-1)\n          | (_, StrictParamDupe) -> 1\n          | (StrictParamNotSimple, StrictParamNotSimple) -> 0\n          | (StrictParamNotSimple, _) -> (-1)\n          | (_, StrictParamNotSimple) -> 1\n          | (StrictFunctionName, StrictFunctionName) -> 0\n          | (StrictFunctionName, _) -> (-1)\n          | (_, StrictFunctionName) -> 1\n          | (StrictOctalLiteral, StrictOctalLiteral) -> 0\n          | (StrictOctalLiteral, _) -> (-1)\n          | (_, StrictOctalLiteral) -> 1\n          | (StrictNonOctalLiteral, StrictNonOctalLiteral) -> 0\n          | (StrictNonOctalLiteral, _) -> (-1)\n          | (_, StrictNonOctalLiteral) -> 1\n          | (StrictDelete, StrictDelete) -> 0\n          | (StrictDelete, _) -> (-1)\n          | (_, StrictDelete) -> 1\n          | (StrictDuplicateProperty, StrictDuplicateProperty) -> 0\n          | (StrictDuplicateProperty, _) -> (-1)\n          | (_, StrictDuplicateProperty) -> 1\n          | (AccessorDataProperty, AccessorDataProperty) -> 0\n          | (AccessorDataProperty, _) -> (-1)\n          | (_, AccessorDataProperty) -> 1\n          | (AccessorGetSet, AccessorGetSet) -> 0\n          | (AccessorGetSet, _) -> (-1)\n          | (_, AccessorGetSet) -> 1\n          | (InvalidTypeof, InvalidTypeof) -> 0\n          | (InvalidTypeof, _) -> (-1)\n          | (_, InvalidTypeof) -> 1\n          | (StrictLHSAssignment, StrictLHSAssignment) -> 0\n          | (StrictLHSAssignment, _) -> (-1)\n          | (_, StrictLHSAssignment) -> 1\n          | (StrictLHSPostfix, StrictLHSPostfix) -> 0\n          | (StrictLHSPostfix, _) -> (-1)\n          | (_, StrictLHSPostfix) -> 1\n          | (StrictLHSPrefix, StrictLHSPrefix) -> 0\n          | (StrictLHSPrefix, _) -> (-1)\n          | (_, StrictLHSPrefix) -> 1\n          | (StrictReservedWord, StrictReservedWord) -> 0\n          | (StrictReservedWord, _) -> (-1)\n          | (_, StrictReservedWord) -> 1\n          | (JSXAttributeValueEmptyExpression,\n             JSXAttributeValueEmptyExpression) -> 0\n          | (JSXAttributeValueEmptyExpression, _) -> (-1)\n          | (_, JSXAttributeValueEmptyExpression) -> 1\n          | (InvalidJSXAttributeValue, InvalidJSXAttributeValue) -> 0\n          | (InvalidJSXAttributeValue, _) -> (-1)\n          | (_, InvalidJSXAttributeValue) -> 1\n          | (ExpectedJSXClosingTag _a__047_, ExpectedJSXClosingTag _b__048_)\n              -> compare_string _a__047_ _b__048_\n          | (ExpectedJSXClosingTag _, _) -> (-1)\n          | (_, ExpectedJSXClosingTag _) -> 1\n          | (NoUninitializedConst, NoUninitializedConst) -> 0\n          | (NoUninitializedConst, _) -> (-1)\n          | (_, NoUninitializedConst) -> 1\n          | (NoUninitializedDestructuring, NoUninitializedDestructuring) -> 0\n          | (NoUninitializedDestructuring, _) -> (-1)\n          | (_, NoUninitializedDestructuring) -> 1\n          | (NewlineBeforeArrow, NewlineBeforeArrow) -> 0\n          | (NewlineBeforeArrow, _) -> (-1)\n          | (_, NewlineBeforeArrow) -> 1\n          | (FunctionAsStatement _a__049_, FunctionAsStatement _b__050_) ->\n              compare_bool _a__049_.in_strict_mode _b__050_.in_strict_mode\n          | (FunctionAsStatement _, _) -> (-1)\n          | (_, FunctionAsStatement _) -> 1\n          | (AsyncFunctionAsStatement, AsyncFunctionAsStatement) -> 0\n          | (AsyncFunctionAsStatement, _) -> (-1)\n          | (_, AsyncFunctionAsStatement) -> 1\n          | (GeneratorFunctionAsStatement, GeneratorFunctionAsStatement) -> 0\n          | (GeneratorFunctionAsStatement, _) -> (-1)\n          | (_, GeneratorFunctionAsStatement) -> 1\n          | (AdjacentJSXElements, AdjacentJSXElements) -> 0\n          | (AdjacentJSXElements, _) -> (-1)\n          | (_, AdjacentJSXElements) -> 1\n          | (ParameterAfterRestParameter, ParameterAfterRestParameter) -> 0\n          | (ParameterAfterRestParameter, _) -> (-1)\n          | (_, ParameterAfterRestParameter) -> 1\n          | (ElementAfterRestElement, ElementAfterRestElement) -> 0\n          | (ElementAfterRestElement, _) -> (-1)\n          | (_, ElementAfterRestElement) -> 1\n          | (PropertyAfterRestElement, PropertyAfterRestElement) -> 0\n          | (PropertyAfterRestElement, _) -> (-1)\n          | (_, PropertyAfterRestElement) -> 1\n          | (DeclareAsync, DeclareAsync) -> 0\n          | (DeclareAsync, _) -> (-1)\n          | (_, DeclareAsync) -> 1\n          | (DeclareClassElement, DeclareClassElement) -> 0\n          | (DeclareClassElement, _) -> (-1)\n          | (_, DeclareClassElement) -> 1\n          | (DeclareClassFieldInitializer, DeclareClassFieldInitializer) -> 0\n          | (DeclareClassFieldInitializer, _) -> (-1)\n          | (_, DeclareClassFieldInitializer) -> 1\n          | (DeclareOpaqueTypeInitializer, DeclareOpaqueTypeInitializer) -> 0\n          | (DeclareOpaqueTypeInitializer, _) -> (-1)\n          | (_, DeclareOpaqueTypeInitializer) -> 1\n          | (DeclareExportLet, DeclareExportLet) -> 0\n          | (DeclareExportLet, _) -> (-1)\n          | (_, DeclareExportLet) -> 1\n          | (DeclareExportConst, DeclareExportConst) -> 0\n          | (DeclareExportConst, _) -> (-1)\n          | (_, DeclareExportConst) -> 1\n          | (DeclareExportType, DeclareExportType) -> 0\n          | (DeclareExportType, _) -> (-1)\n          | (_, DeclareExportType) -> 1\n          | (DeclareExportInterface, DeclareExportInterface) -> 0\n          | (DeclareExportInterface, _) -> (-1)\n          | (_, DeclareExportInterface) -> 1\n          | (DuplicateExport _a__051_, DuplicateExport _b__052_) ->\n              compare_string _a__051_ _b__052_\n          | (DuplicateExport _, _) -> (-1)\n          | (_, DuplicateExport _) -> 1\n          | (UnsupportedDecorator, UnsupportedDecorator) -> 0\n          | (UnsupportedDecorator, _) -> (-1)\n          | (_, UnsupportedDecorator) -> 1\n          | (MissingTypeParamDefault, MissingTypeParamDefault) -> 0\n          | (MissingTypeParamDefault, _) -> (-1)\n          | (_, MissingTypeParamDefault) -> 1\n          | (DuplicateDeclareModuleExports, DuplicateDeclareModuleExports) ->\n              0\n          | (DuplicateDeclareModuleExports, _) -> (-1)\n          | (_, DuplicateDeclareModuleExports) -> 1\n          | (AmbiguousDeclareModuleKind, AmbiguousDeclareModuleKind) -> 0\n          | (AmbiguousDeclareModuleKind, _) -> (-1)\n          | (_, AmbiguousDeclareModuleKind) -> 1\n          | (GetterArity, GetterArity) -> 0\n          | (GetterArity, _) -> (-1)\n          | (_, GetterArity) -> 1\n          | (SetterArity, SetterArity) -> 0\n          | (SetterArity, _) -> (-1)\n          | (_, SetterArity) -> 1\n          | (InvalidNonTypeImportInDeclareModule,\n             InvalidNonTypeImportInDeclareModule) -> 0\n          | (InvalidNonTypeImportInDeclareModule, _) -> (-1)\n          | (_, InvalidNonTypeImportInDeclareModule) -> 1\n          | (ImportTypeShorthandOnlyInPureImport,\n             ImportTypeShorthandOnlyInPureImport) -> 0\n          | (ImportTypeShorthandOnlyInPureImport, _) -> (-1)\n          | (_, ImportTypeShorthandOnlyInPureImport) -> 1\n          | (ImportSpecifierMissingComma, ImportSpecifierMissingComma) -> 0\n          | (ImportSpecifierMissingComma, _) -> (-1)\n          | (_, ImportSpecifierMissingComma) -> 1\n          | (ExportSpecifierMissingComma, ExportSpecifierMissingComma) -> 0\n          | (ExportSpecifierMissingComma, _) -> (-1)\n          | (_, ExportSpecifierMissingComma) -> 1\n          | (MalformedUnicode, MalformedUnicode) -> 0\n          | (MalformedUnicode, _) -> (-1)\n          | (_, MalformedUnicode) -> 1\n          | (DuplicateConstructor, DuplicateConstructor) -> 0\n          | (DuplicateConstructor, _) -> (-1)\n          | (_, DuplicateConstructor) -> 1\n          | (DuplicatePrivateFields _a__053_, DuplicatePrivateFields\n             _b__054_) -> compare_string _a__053_ _b__054_\n          | (DuplicatePrivateFields _, _) -> (-1)\n          | (_, DuplicatePrivateFields _) -> 1\n          | (InvalidClassMemberName _a__055_, InvalidClassMemberName\n             _b__056_) ->\n              (match compare_string _a__055_.name _b__056_.name with\n               | 0 ->\n                   (match compare_bool _a__055_.static _b__056_.static with\n                    | 0 ->\n                        (match compare_bool _a__055_.method_ _b__056_.method_\n                         with\n                         | 0 ->\n                             compare_bool _a__055_.private_ _b__056_.private_\n                         | n -> n)\n                    | n -> n)\n               | n -> n)\n          | (InvalidClassMemberName _, _) -> (-1)\n          | (_, InvalidClassMemberName _) -> 1\n          | (PrivateDelete, PrivateDelete) -> 0\n          | (PrivateDelete, _) -> (-1)\n          | (_, PrivateDelete) -> 1\n          | (UnboundPrivate _a__057_, UnboundPrivate _b__058_) ->\n              compare_string _a__057_ _b__058_\n          | (UnboundPrivate _, _) -> (-1)\n          | (_, UnboundPrivate _) -> 1\n          | (PrivateNotInClass, PrivateNotInClass) -> 0\n          | (PrivateNotInClass, _) -> (-1)\n          | (_, PrivateNotInClass) -> 1\n          | (SuperPrivate, SuperPrivate) -> 0\n          | (SuperPrivate, _) -> (-1)\n          | (_, SuperPrivate) -> 1\n          | (YieldInFormalParameters, YieldInFormalParameters) -> 0\n          | (YieldInFormalParameters, _) -> (-1)\n          | (_, YieldInFormalParameters) -> 1\n          | (AwaitAsIdentifierReference, AwaitAsIdentifierReference) -> 0\n          | (AwaitAsIdentifierReference, _) -> (-1)\n          | (_, AwaitAsIdentifierReference) -> 1\n          | (YieldAsIdentifierReference, YieldAsIdentifierReference) -> 0\n          | (YieldAsIdentifierReference, _) -> (-1)\n          | (_, YieldAsIdentifierReference) -> 1\n          | (AmbiguousLetBracket, AmbiguousLetBracket) -> 0\n          | (AmbiguousLetBracket, _) -> (-1)\n          | (_, AmbiguousLetBracket) -> 1\n          | (LiteralShorthandProperty, LiteralShorthandProperty) -> 0\n          | (LiteralShorthandProperty, _) -> (-1)\n          | (_, LiteralShorthandProperty) -> 1\n          | (ComputedShorthandProperty, ComputedShorthandProperty) -> 0\n          | (ComputedShorthandProperty, _) -> (-1)\n          | (_, ComputedShorthandProperty) -> 1\n          | (MethodInDestructuring, MethodInDestructuring) -> 0\n          | (MethodInDestructuring, _) -> (-1)\n          | (_, MethodInDestructuring) -> 1\n          | (TrailingCommaAfterRestElement, TrailingCommaAfterRestElement) ->\n              0\n          | (TrailingCommaAfterRestElement, _) -> (-1)\n          | (_, TrailingCommaAfterRestElement) -> 1\n          | (OptionalChainNew, OptionalChainNew) -> 0\n          | (OptionalChainNew, _) -> (-1)\n          | (_, OptionalChainNew) -> 1\n          | (OptionalChainTemplate, OptionalChainTemplate) -> 0\n          | (OptionalChainTemplate, _) -> (-1)\n          | (_, OptionalChainTemplate) -> 1\n          | (NullishCoalescingUnexpectedLogical _a__059_,\n             NullishCoalescingUnexpectedLogical _b__060_) ->\n              compare_string _a__059_ _b__060_\n          | (NullishCoalescingUnexpectedLogical _, _) -> (-1)\n          | (_, NullishCoalescingUnexpectedLogical _) -> 1\n          | (WhitespaceInPrivateName, WhitespaceInPrivateName) -> 0\n          | (WhitespaceInPrivateName, _) -> (-1)\n          | (_, WhitespaceInPrivateName) -> 1\n          | (ThisParamAnnotationRequired, ThisParamAnnotationRequired) -> 0\n          | (ThisParamAnnotationRequired, _) -> (-1)\n          | (_, ThisParamAnnotationRequired) -> 1\n          | (ThisParamMustBeFirst, ThisParamMustBeFirst) -> 0\n          | (ThisParamMustBeFirst, _) -> (-1)\n          | (_, ThisParamMustBeFirst) -> 1\n          | (ThisParamMayNotBeOptional, ThisParamMayNotBeOptional) -> 0\n          | (ThisParamMayNotBeOptional, _) -> (-1)\n          | (_, ThisParamMayNotBeOptional) -> 1\n          | (GetterMayNotHaveThisParam, GetterMayNotHaveThisParam) -> 0\n          | (GetterMayNotHaveThisParam, _) -> (-1)\n          | (_, GetterMayNotHaveThisParam) -> 1\n          | (SetterMayNotHaveThisParam, SetterMayNotHaveThisParam) -> 0\n          | (SetterMayNotHaveThisParam, _) -> (-1)\n          | (_, SetterMayNotHaveThisParam) -> 1\n          | (ThisParamBannedInArrowFunctions,\n             ThisParamBannedInArrowFunctions) -> 0\n          | (ThisParamBannedInArrowFunctions, _) -> (-1)\n          | (_, ThisParamBannedInArrowFunctions) -> 1\n          | (ThisParamBannedInConstructor, ThisParamBannedInConstructor) -> 0) :\n  t -> t -> int)\nlet _ = compare\n[@@@end]\nexception Error of (Loc.t * t) * (Loc.t * t) list\n\nlet error loc e = raise (Error ((loc, e), []))\n\nmodule PP = struct\n  let error = function\n    | EnumBooleanMemberNotInitialized { enum_name; member_name } ->\n      Printf.sprintf\n        \"Boolean enum members need to be initialized. Use either `%s = true,` or `%s = false,` in enum `%s`.\"\n        member_name\n        member_name\n        enum_name\n    | EnumDuplicateMemberName { enum_name; member_name } ->\n      Printf.sprintf\n        \"Enum member names need to be unique, but the name `%s` has already been used before in enum `%s`.\"\n        member_name\n        enum_name\n    | EnumInconsistentMemberValues { enum_name } ->\n      Printf.sprintf\n        \"Enum `%s` has inconsistent member initializers. Either use no initializers, or consistently use literals (either booleans, numbers, or strings) for all member initializers.\"\n        enum_name\n    | EnumInvalidExplicitType { enum_name; supplied_type } ->\n      let suggestion =\n        Printf.sprintf\n          \"Use one of `boolean`, `number`, `string`, or `symbol` in enum `%s`.\"\n          enum_name\n      in\n      begin\n        match supplied_type with\n        | Some supplied_type ->\n          Printf.sprintf \"Enum type `%s` is not valid. %s\" supplied_type suggestion\n        | None -> Printf.sprintf \"Supplied enum type is not valid. %s\" suggestion\n      end\n    | EnumInvalidExport ->\n      \"Cannot export an enum with `export type`, try `export enum E {}` or `module.exports = E;` instead.\"\n    | EnumInvalidInitializerSeparator { member_name } ->\n      Printf.sprintf\n        \"Enum member names and initializers are separated with `=`. Replace `%s:` with `%s =`.\"\n        member_name\n        member_name\n    | EnumInvalidMemberInitializer { enum_name; explicit_type; member_name } -> begin\n      match explicit_type with\n      | Some (Enum_common.Boolean as explicit_type)\n      | Some (Enum_common.Number as explicit_type)\n      | Some (Enum_common.String as explicit_type) ->\n        let explicit_type_str = Enum_common.string_of_explicit_type explicit_type in\n        Printf.sprintf\n          \"Enum `%s` has type `%s`, so the initializer of `%s` needs to be a %s literal.\"\n          enum_name\n          explicit_type_str\n          member_name\n          explicit_type_str\n      | Some Enum_common.Symbol ->\n        Printf.sprintf\n          \"Symbol enum members cannot be initialized. Use `%s,` in enum `%s`.\"\n          member_name\n          enum_name\n      | None ->\n        Printf.sprintf\n          \"The enum member initializer for `%s` needs to be a literal (either a boolean, number, or string) in enum `%s`.\"\n          member_name\n          enum_name\n    end\n    | EnumInvalidMemberName { enum_name; member_name } ->\n      (* Based on the error condition, we will only receive member names starting with [a-z] *)\n      let suggestion = String.capitalize_ascii member_name in\n      Printf.sprintf\n        \"Enum member names cannot start with lowercase 'a' through 'z'. Instead of using `%s`, consider using `%s`, in enum `%s`.\"\n        member_name\n        suggestion\n        enum_name\n    | EnumInvalidMemberSeparator -> \"Enum members are separated with `,`. Replace `;` with `,`.\"\n    | EnumInvalidEllipsis { trailing_comma } ->\n      if trailing_comma then\n        \"The `...` must come at the end of the enum body. Remove the trailing comma.\"\n      else\n        \"The `...` must come after all enum members. Move it to the end of the enum body.\"\n    | EnumNumberMemberNotInitialized { enum_name; member_name } ->\n      Printf.sprintf\n        \"Number enum members need to be initialized, e.g. `%s = 1,` in enum `%s`.\"\n        member_name\n        enum_name\n    | EnumStringMemberInconsistentlyInitailized { enum_name } ->\n      Printf.sprintf\n        \"String enum members need to consistently either all use initializers, or use no initializers, in enum %s.\"\n        enum_name\n    | Unexpected unexpected -> Printf.sprintf \"Unexpected %s\" unexpected\n    | UnexpectedWithExpected (unexpected, expected) ->\n      Printf.sprintf \"Unexpected %s, expected %s\" unexpected expected\n    | UnexpectedTokenWithSuggestion (token, suggestion) ->\n      Printf.sprintf \"Unexpected token `%s`. Did you mean `%s`?\" token suggestion\n    | UnexpectedReserved -> \"Unexpected reserved word\"\n    | UnexpectedReservedType -> \"Unexpected reserved type\"\n    | UnexpectedSuper -> \"Unexpected `super` outside of a class method\"\n    | UnexpectedSuperCall -> \"`super()` is only valid in a class constructor\"\n    | UnexpectedEOS -> \"Unexpected end of input\"\n    | UnexpectedVariance -> \"Unexpected variance sigil\"\n    | UnexpectedStatic -> \"Unexpected static modifier\"\n    | UnexpectedProto -> \"Unexpected proto modifier\"\n    | UnexpectedTypeAlias -> \"Type aliases are not allowed in untyped mode\"\n    | UnexpectedOpaqueTypeAlias -> \"Opaque type aliases are not allowed in untyped mode\"\n    | UnexpectedTypeAnnotation -> \"Type annotations are not allowed in untyped mode\"\n    | UnexpectedTypeDeclaration -> \"Type declarations are not allowed in untyped mode\"\n    | UnexpectedTypeImport -> \"Type imports are not allowed in untyped mode\"\n    | UnexpectedTypeExport -> \"Type exports are not allowed in untyped mode\"\n    | UnexpectedTypeInterface -> \"Interfaces are not allowed in untyped mode\"\n    | UnexpectedSpreadType -> \"Spreading a type is only allowed inside an object type\"\n    | UnexpectedExplicitInexactInObject ->\n      \"Explicit inexact syntax must come at the end of an object type\"\n    | InexactInsideExact ->\n      \"Explicit inexact syntax cannot appear inside an explicit exact object type\"\n    | InexactInsideNonObject -> \"Explicit inexact syntax can only appear inside an object type\"\n    | NewlineAfterThrow -> \"Illegal newline after throw\"\n    | InvalidFloatBigInt -> \"A bigint literal must be an integer\"\n    | InvalidSciBigInt -> \"A bigint literal cannot use exponential notation\"\n    | InvalidRegExp -> \"Invalid regular expression\"\n    | InvalidRegExpFlags flags -> \"Invalid flags supplied to RegExp constructor '\" ^ flags ^ \"'\"\n    | UnterminatedRegExp -> \"Invalid regular expression: missing /\"\n    | InvalidLHSInAssignment -> \"Invalid left-hand side in assignment\"\n    | InvalidLHSInExponentiation -> \"Invalid left-hand side in exponentiation expression\"\n    | InvalidLHSInForIn -> \"Invalid left-hand side in for-in\"\n    | InvalidLHSInForOf -> \"Invalid left-hand side in for-of\"\n    | InvalidIndexedAccess { has_bracket } ->\n      let msg =\n        if has_bracket then\n          \"Remove the period.\"\n        else\n          \"Indexed access uses bracket notation.\"\n      in\n      Printf.sprintf \"Invalid indexed access. %s Use the format `T[K]`.\" msg\n    | InvalidOptionalIndexedAccess ->\n      \"Invalid optional indexed access. Indexed access uses bracket notation. Use the format `T?.[K]`.\"\n    | ExpectedPatternFoundExpression ->\n      \"Expected an object pattern, array pattern, or an identifier but \"\n      ^ \"found an expression instead\"\n    | MultipleDefaultsInSwitch -> \"More than one default clause in switch statement\"\n    | NoCatchOrFinally -> \"Missing catch or finally after try\"\n    | UnknownLabel label -> \"Undefined label '\" ^ label ^ \"'\"\n    | Redeclaration (what, name) -> what ^ \" '\" ^ name ^ \"' has already been declared\"\n    | IllegalContinue -> \"Illegal continue statement\"\n    | IllegalBreak -> \"Illegal break statement\"\n    | IllegalReturn -> \"Illegal return statement\"\n    | IllegalUnicodeEscape -> \"Illegal Unicode escape\"\n    | StrictModeWith -> \"Strict mode code may not include a with statement\"\n    | StrictCatchVariable -> \"Catch variable may not be eval or arguments in strict mode\"\n    | StrictVarName -> \"Variable name may not be eval or arguments in strict mode\"\n    | StrictParamName -> \"Parameter name eval or arguments is not allowed in strict mode\"\n    | StrictParamDupe -> \"Strict mode function may not have duplicate parameter names\"\n    | StrictParamNotSimple ->\n      \"Illegal \\\"use strict\\\" directive in function with non-simple parameter list\"\n    | StrictFunctionName -> \"Function name may not be eval or arguments in strict mode\"\n    | StrictOctalLiteral -> \"Octal literals are not allowed in strict mode.\"\n    | StrictNonOctalLiteral -> \"Number literals with leading zeros are not allowed in strict mode.\"\n    | StrictDelete -> \"Delete of an unqualified identifier in strict mode.\"\n    | StrictDuplicateProperty ->\n      \"Duplicate data property in object literal not allowed in strict mode\"\n    | AccessorDataProperty ->\n      \"Object literal may not have data and accessor property with the same name\"\n    | AccessorGetSet -> \"Object literal may not have multiple get/set accessors with the same name\"\n    | StrictLHSAssignment -> \"Assignment to eval or arguments is not allowed in strict mode\"\n    | StrictLHSPostfix ->\n      \"Postfix increment/decrement may not have eval or arguments operand in strict mode\"\n    | StrictLHSPrefix ->\n      \"Prefix increment/decrement may not have eval or arguments operand in strict mode\"\n    | StrictReservedWord -> \"Use of future reserved word in strict mode\"\n    | JSXAttributeValueEmptyExpression ->\n      \"JSX attributes must only be assigned a non-empty expression\"\n    | InvalidJSXAttributeValue -> \"JSX value should be either an expression or a quoted JSX text\"\n    | ExpectedJSXClosingTag name -> \"Expected corresponding JSX closing tag for \" ^ name\n    | NoUninitializedConst -> \"Const must be initialized\"\n    | NoUninitializedDestructuring -> \"Destructuring assignment must be initialized\"\n    | NewlineBeforeArrow -> \"Illegal newline before arrow\"\n    | FunctionAsStatement { in_strict_mode } ->\n      if in_strict_mode then\n        \"In strict mode code, functions can only be declared at top level or \"\n        ^ \"immediately within another function.\"\n      else\n        \"In non-strict mode code, functions can only be declared at top level, \"\n        ^ \"inside a block, or as the body of an if statement.\"\n    | AsyncFunctionAsStatement ->\n      \"Async functions can only be declared at top level or \"\n      ^ \"immediately within another function.\"\n    | GeneratorFunctionAsStatement ->\n      \"Generators can only be declared at top level or \" ^ \"immediately within another function.\"\n    | AdjacentJSXElements ->\n      \"Unexpected token <. Remember, adjacent JSX \"\n      ^ \"elements must be wrapped in an enclosing parent tag\"\n    | ParameterAfterRestParameter -> \"Rest parameter must be final parameter of an argument list\"\n    | ElementAfterRestElement -> \"Rest element must be final element of an array pattern\"\n    | PropertyAfterRestElement -> \"Rest property must be final property of an object pattern\"\n    | DeclareAsync ->\n      \"async is an implementation detail and isn't necessary for your declare function statement. It is sufficient for your declare function to just have a Promise return type.\"\n    | DeclareClassElement -> \"`declare` modifier can only appear on class fields.\"\n    | DeclareClassFieldInitializer ->\n      \"Unexpected token `=`. Initializers are not allowed in a `declare`.\"\n    | DeclareOpaqueTypeInitializer ->\n      \"Unexpected token `=`. Initializers are not allowed in a `declare opaque type`.\"\n    | DeclareExportLet -> \"`declare export let` is not supported. Use `declare export var` instead.\"\n    | DeclareExportConst ->\n      \"`declare export const` is not supported. Use `declare export var` instead.\"\n    | DeclareExportType -> \"`declare export type` is not supported. Use `export type` instead.\"\n    | DeclareExportInterface ->\n      \"`declare export interface` is not supported. Use `export interface` instead.\"\n    | DuplicateExport export -> Printf.sprintf \"Duplicate export for `%s`\" export\n    | UnsupportedDecorator -> \"Found a decorator in an unsupported position.\"\n    | MissingTypeParamDefault ->\n      \"Type parameter declaration needs a default, since a preceding type parameter declaration has a default.\"\n    | DuplicateDeclareModuleExports -> \"Duplicate `declare module.exports` statement!\"\n    | AmbiguousDeclareModuleKind ->\n      \"Found both `declare module.exports` and `declare export` in the same module. Modules can only have 1 since they are either an ES module xor they are a CommonJS module.\"\n    | GetterArity -> \"Getter should have zero parameters\"\n    | SetterArity -> \"Setter should have exactly one parameter\"\n    | InvalidNonTypeImportInDeclareModule ->\n      \"Imports within a `declare module` body must always be \" ^ \"`import type` or `import typeof`!\"\n    | ImportTypeShorthandOnlyInPureImport ->\n      \"The `type` and `typeof` keywords on named imports can only be used on regular `import` statements. It cannot be used with `import type` or `import typeof` statements\"\n    | ImportSpecifierMissingComma -> \"Missing comma between import specifiers\"\n    | ExportSpecifierMissingComma -> \"Missing comma between export specifiers\"\n    | MalformedUnicode -> \"Malformed unicode\"\n    | DuplicateConstructor -> \"Classes may only have one constructor\"\n    | DuplicatePrivateFields name ->\n      \"Private fields may only be declared once. `#\" ^ name ^ \"` is declared more than once.\"\n    | InvalidClassMemberName { name; static; method_; private_ } ->\n      let static_modifier =\n        if static then\n          \"static \"\n        else\n          \"\"\n      in\n      let name =\n        if private_ then\n          \"#\" ^ name\n        else\n          name\n      in\n      let category =\n        if method_ then\n          \"methods\"\n        else\n          \"fields\"\n      in\n      \"Classes may not have \" ^ static_modifier ^ category ^ \" named `\" ^ name ^ \"`.\"\n    | PrivateDelete -> \"Private fields may not be deleted.\"\n    | UnboundPrivate name ->\n      \"Private fields must be declared before they can be referenced. `#\"\n      ^ name\n      ^ \"` has not been declared.\"\n    | PrivateNotInClass -> \"Private fields can only be referenced from within a class.\"\n    | SuperPrivate -> \"You may not access a private field through the `super` keyword.\"\n    | YieldInFormalParameters -> \"Yield expression not allowed in formal parameter\"\n    | AwaitAsIdentifierReference -> \"`await` is an invalid identifier in async functions\"\n    | YieldAsIdentifierReference -> \"`yield` is an invalid identifier in generators\"\n    | AmbiguousLetBracket ->\n      \"`let [` is ambiguous in this position because it is \"\n      ^ \"either a `let` binding pattern, or a member expression.\"\n    | LiteralShorthandProperty -> \"Literals cannot be used as shorthand properties.\"\n    | ComputedShorthandProperty -> \"Computed properties must have a value.\"\n    | MethodInDestructuring -> \"Object pattern can't contain methods\"\n    | TrailingCommaAfterRestElement -> \"A trailing comma is not permitted after the rest element\"\n    | OptionalChainNew -> \"An optional chain may not be used in a `new` expression.\"\n    | OptionalChainTemplate -> \"Template literals may not be used in an optional chain.\"\n    | NullishCoalescingUnexpectedLogical operator ->\n      Printf.sprintf\n        \"Unexpected token `%s`. Parentheses are required to combine `??` with `&&` or `||` expressions.\"\n        operator\n    | WhitespaceInPrivateName -> \"Unexpected whitespace between `#` and identifier\"\n    | ThisParamAnnotationRequired -> \"A type annotation is required for the `this` parameter.\"\n    | ThisParamMustBeFirst -> \"The `this` parameter must be the first function parameter.\"\n    | ThisParamMayNotBeOptional -> \"The `this` parameter cannot be optional.\"\n    | GetterMayNotHaveThisParam -> \"A getter cannot have a `this` parameter.\"\n    | SetterMayNotHaveThisParam -> \"A setter cannot have a `this` parameter.\"\n    | ThisParamBannedInArrowFunctions ->\n      \"Arrow functions cannot have a `this` parameter; arrow functions automatically bind `this` when declared.\"\n    | ThisParamBannedInConstructor ->\n      \"Constructors cannot have a `this` parameter; constructors don't bind `this` like other functions.\"\n    | InvalidTypeof -> \"`typeof` can only be used to get the type of variables.\"\nend\n"
  },
  {
    "path": "analysis/vendor/js_parser/parser_common.ml",
    "content": "(*\n * Copyright (c) Meta Platforms, Inc. and affiliates.\n *\n * This source code is licensed under the MIT license found in the\n * LICENSE file in the root directory of this source tree.\n *)\n\nopen Parser_env\nopen Flow_ast\n\ntype pattern_errors = {\n  if_expr: (Loc.t * Parse_error.t) list;\n  if_patt: (Loc.t * Parse_error.t) list;\n}\n\ntype pattern_cover =\n  | Cover_expr of (Loc.t, Loc.t) Expression.t\n  | Cover_patt of (Loc.t, Loc.t) Expression.t * pattern_errors\n\nmodule type PARSER = sig\n  val program : env -> (Loc.t, Loc.t) Program.t\n\n  val statement : env -> (Loc.t, Loc.t) Statement.t\n\n  val statement_list_item :\n    ?decorators:(Loc.t, Loc.t) Class.Decorator.t list -> env -> (Loc.t, Loc.t) Statement.t\n\n  val statement_list : term_fn:(Token.t -> bool) -> env -> (Loc.t, Loc.t) Statement.t list\n\n  val statement_list_with_directives :\n    term_fn:(Token.t -> bool) -> env -> (Loc.t, Loc.t) Statement.t list * bool\n\n  val module_body : term_fn:(Token.t -> bool) -> env -> (Loc.t, Loc.t) Statement.t list\n\n  val expression : env -> (Loc.t, Loc.t) Expression.t\n\n  val expression_or_pattern : env -> pattern_cover\n\n  val conditional : env -> (Loc.t, Loc.t) Expression.t\n\n  val assignment : env -> (Loc.t, Loc.t) Expression.t\n\n  val left_hand_side : env -> (Loc.t, Loc.t) Expression.t\n\n  val object_initializer : env -> Loc.t * (Loc.t, Loc.t) Expression.Object.t * pattern_errors\n\n  val identifier : ?restricted_error:Parse_error.t -> env -> (Loc.t, Loc.t) Identifier.t\n\n  val identifier_with_type :\n    env -> ?no_optional:bool -> Parse_error.t -> Loc.t * (Loc.t, Loc.t) Pattern.Identifier.t\n\n  val block_body : env -> Loc.t * (Loc.t, Loc.t) Statement.Block.t\n\n  val function_block_body :\n    expression:bool -> env -> (Loc.t * (Loc.t, Loc.t) Statement.Block.t) * bool\n\n  val jsx_element_or_fragment :\n    env ->\n    Loc.t * [ `Element of (Loc.t, Loc.t) JSX.element | `Fragment of (Loc.t, Loc.t) JSX.fragment ]\n\n  val pattern : env -> Parse_error.t -> (Loc.t, Loc.t) Pattern.t\n\n  val pattern_from_expr : env -> (Loc.t, Loc.t) Expression.t -> (Loc.t, Loc.t) Pattern.t\n\n  val object_key : ?class_body:bool -> env -> Loc.t * (Loc.t, Loc.t) Expression.Object.Property.key\n\n  val class_declaration : env -> (Loc.t, Loc.t) Class.Decorator.t list -> (Loc.t, Loc.t) Statement.t\n\n  val class_expression : env -> (Loc.t, Loc.t) Expression.t\n\n  val is_assignable_lhs : (Loc.t, Loc.t) Expression.t -> bool\n\n  val number : env -> Token.number_type -> string -> float\n\n  val annot : env -> (Loc.t, Loc.t) Type.annotation\nend\n\nlet identifier_name_raw env =\n  let open Token in\n  let name =\n    match Peek.token env with\n    (* obviously, Identifier is a valid IdentifierName *)\n    | T_IDENTIFIER { value; _ } -> value\n    (* keywords are also IdentifierNames *)\n    | T_AWAIT -> \"await\"\n    | T_BREAK -> \"break\"\n    | T_CASE -> \"case\"\n    | T_CATCH -> \"catch\"\n    | T_CLASS -> \"class\"\n    | T_CONST -> \"const\"\n    | T_CONTINUE -> \"continue\"\n    | T_DEBUGGER -> \"debugger\"\n    | T_DEFAULT -> \"default\"\n    | T_DELETE -> \"delete\"\n    | T_DO -> \"do\"\n    | T_ELSE -> \"else\"\n    | T_EXPORT -> \"export\"\n    | T_EXTENDS -> \"extends\"\n    | T_FINALLY -> \"finally\"\n    | T_FOR -> \"for\"\n    | T_FUNCTION -> \"function\"\n    | T_IF -> \"if\"\n    | T_IMPORT -> \"import\"\n    | T_IN -> \"in\"\n    | T_INSTANCEOF -> \"instanceof\"\n    | T_NEW -> \"new\"\n    | T_RETURN -> \"return\"\n    | T_SUPER -> \"super\"\n    | T_SWITCH -> \"switch\"\n    | T_THIS -> \"this\"\n    | T_THROW -> \"throw\"\n    | T_TRY -> \"try\"\n    | T_TYPEOF -> \"typeof\"\n    | T_VAR -> \"var\"\n    | T_VOID -> \"void\"\n    | T_WHILE -> \"while\"\n    | T_WITH -> \"with\"\n    | T_YIELD -> \"yield\"\n    (* FutureReservedWord *)\n    | T_ENUM -> \"enum\"\n    | T_LET -> \"let\"\n    | T_STATIC -> \"static\"\n    | T_INTERFACE -> \"interface\"\n    | T_IMPLEMENTS -> \"implements\"\n    | T_PACKAGE -> \"package\"\n    | T_PRIVATE -> \"private\"\n    | T_PROTECTED -> \"protected\"\n    | T_PUBLIC -> \"public\"\n    (* NullLiteral *)\n    | T_NULL -> \"null\"\n    (* BooleanLiteral *)\n    | T_TRUE -> \"true\"\n    | T_FALSE -> \"false\"\n    (* Flow-specific stuff *)\n    | T_DECLARE -> \"declare\"\n    | T_TYPE -> \"type\"\n    | T_OPAQUE -> \"opaque\"\n    | T_ANY_TYPE -> \"any\"\n    | T_MIXED_TYPE -> \"mixed\"\n    | T_EMPTY_TYPE -> \"empty\"\n    | T_BOOLEAN_TYPE BOOL -> \"bool\"\n    | T_BOOLEAN_TYPE BOOLEAN -> \"boolean\"\n    | T_NUMBER_TYPE -> \"number\"\n    | T_BIGINT_TYPE -> \"bigint\"\n    | T_STRING_TYPE -> \"string\"\n    | T_VOID_TYPE -> \"void\"\n    | T_SYMBOL_TYPE -> \"symbol\"\n    (* Contextual stuff *)\n    | T_OF -> \"of\"\n    | T_ASYNC -> \"async\"\n    (* punctuators, types, literals, etc are not identifiers *)\n    | _ ->\n      error_unexpected ~expected:\"an identifier\" env;\n      \"\"\n  in\n  Eat.token env;\n  name\n\n(* IdentifierName - https://tc39.github.io/ecma262/#prod-IdentifierName *)\nlet identifier_name env =\n  let loc = Peek.loc env in\n  let leading = Peek.comments env in\n  let name = identifier_name_raw env in\n  let trailing = Eat.trailing_comments env in\n  let comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing () in\n  (loc, { Identifier.name; comments })\n\n(** PrivateIdentifier - https://tc39.es/ecma262/#prod-PrivateIdentifier\n\n    N.B.: whitespace, line terminators, and comments are not allowed\n    between the # and IdentifierName because PrivateIdentifier is a\n    CommonToken which is considered a single token. See also\n    https://tc39.es/ecma262/#prod-InputElementDiv *)\nlet private_identifier env =\n  let start_loc = Peek.loc env in\n  let leading = Peek.comments env in\n  Expect.token env Token.T_POUND;\n  let name_loc = Peek.loc env in\n  let name = identifier_name_raw env in\n  let trailing = Eat.trailing_comments env in\n  let comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing () in\n  let loc = Loc.btwn start_loc name_loc in\n  if not (Loc.equal_position start_loc.Loc._end name_loc.Loc.start) then\n    error_at env (loc, Parse_error.WhitespaceInPrivateName);\n  (loc, { PrivateName.name; comments })\n\n(** The operation IsSimpleParamterList\n    https://tc39.es/ecma262/#sec-static-semantics-issimpleparameterlist *)\nlet is_simple_parameter_list =\n  let is_simple_param = function\n    | (_, { Flow_ast.Function.Param.argument = (_, Pattern.Identifier _); default = None }) -> true\n    | _ -> false\n  in\n  fun (_, { Flow_ast.Function.Params.params; rest; comments = _; this_ = _ }) ->\n    rest = None && List.for_all is_simple_param params\n\n(**\n * The abstract operation IsLabelledFunction\n *\n * https://tc39.github.io/ecma262/#sec-islabelledfunction\n *)\nlet rec is_labelled_function = function\n  | (_, Flow_ast.Statement.Labeled { Flow_ast.Statement.Labeled.body; _ }) ->\n    begin\n      match body with\n      | (_, Flow_ast.Statement.FunctionDeclaration _) -> true\n      | _ -> is_labelled_function body\n    end\n  | _ -> false\n\nlet with_loc ?start_loc fn env =\n  let start_loc =\n    match start_loc with\n    | Some x -> x\n    | None -> Peek.loc env\n  in\n  let result = fn env in\n  let loc =\n    match last_loc env with\n    | Some end_loc -> Loc.btwn start_loc end_loc\n    | None -> start_loc\n  in\n  (loc, result)\n\nlet with_loc_opt ?start_loc fn env =\n  match with_loc ?start_loc fn env with\n  | (loc, Some x) -> Some (loc, x)\n  | (_, None) -> None\n\nlet with_loc_extra ?start_loc fn env =\n  let (loc, (x, extra)) = with_loc ?start_loc fn env in\n  ((loc, x), extra)\n"
  },
  {
    "path": "analysis/vendor/js_parser/parser_env.ml",
    "content": "(*\n * Copyright (c) Meta Platforms, Inc. and affiliates.\n *\n * This source code is licensed under the MIT license found in the\n * LICENSE file in the root directory of this source tree.\n *)\n\nmodule Sedlexing = Flow_sedlexing\nopen Flow_ast\nmodule SSet = Set.Make (String)\n\nmodule Lex_mode = struct\n  type t =\n    | NORMAL\n    | TYPE\n    | JSX_TAG\n    | JSX_CHILD\n    | TEMPLATE\n    | REGEXP\n\n  let debug_string_of_lex_mode (mode : t) =\n    match mode with\n    | NORMAL -> \"NORMAL\"\n    | TYPE -> \"TYPE\"\n    | JSX_TAG -> \"JSX_TAG\"\n    | JSX_CHILD -> \"JSX_CHILD\"\n    | TEMPLATE -> \"TEMPLATE\"\n    | REGEXP -> \"REGEXP\"\nend\n\n(* READ THIS BEFORE YOU MODIFY:\n *\n * The current implementation for lookahead beyond a single token is\n * inefficient. If you believe you need to increase this constant, do one of the\n * following:\n * - Find another way\n * - Benchmark your change and provide convincing evidence that it doesn't\n *   actually have a significant perf impact.\n * - Refactor this to memoize all requested lookahead, so we aren't lexing the\n *   same token multiple times.\n *)\n\nmodule Lookahead : sig\n  type t\n\n  val create : Lex_env.t -> Lex_mode.t -> t\n  val peek_0 : t -> Lex_result.t\n  val peek_1 : t -> Lex_result.t\n  val lex_env_0 : t -> Lex_env.t\n  val junk : t -> unit\nend = struct\n  type la_result = (Lex_env.t * Lex_result.t) option\n\n  type t = {\n    mutable la_results_0: la_result;\n    mutable la_results_1: la_result;\n    la_lex_mode: Lex_mode.t;\n    mutable la_lex_env: Lex_env.t;\n  }\n\n  let create lex_env mode =\n    let lex_env = Lex_env.clone lex_env in\n    { la_results_0 = None; la_results_1 = None; la_lex_mode = mode; la_lex_env = lex_env }\n\n  (* precondition: there is enough room in t.la_results for the result *)\n  let lex t =\n    let lex_env = t.la_lex_env in\n    let (lex_env, lex_result) =\n      match t.la_lex_mode with\n      | Lex_mode.NORMAL -> Flow_lexer.token lex_env\n      | Lex_mode.TYPE -> Flow_lexer.type_token lex_env\n      | Lex_mode.JSX_TAG -> Flow_lexer.jsx_tag lex_env\n      | Lex_mode.JSX_CHILD -> Flow_lexer.jsx_child lex_env\n      | Lex_mode.TEMPLATE -> Flow_lexer.template_tail lex_env\n      | Lex_mode.REGEXP -> Flow_lexer.regexp lex_env\n    in\n    let cloned_env = Lex_env.clone lex_env in\n    let result = (cloned_env, lex_result) in\n    t.la_lex_env <- lex_env;\n    begin\n      match t.la_results_0 with\n      | None -> t.la_results_0 <- Some result\n      | Some _ -> t.la_results_1 <- Some result\n    end;\n    result\n\n  let peek_0 t =\n    match t.la_results_0 with\n    | Some (_, result) -> result\n    | None -> snd (lex t)\n\n  let peek_1 t =\n    (match t.la_results_0 with\n    | None -> ignore (lex t)\n    | Some _ -> ());\n    match t.la_results_1 with\n    | None -> snd (lex t)\n    | Some (_, result) -> result\n\n  let lex_env_0 t =\n    match t.la_results_0 with\n    | Some (lex_env, _) -> lex_env\n    | None -> fst (lex t)\n\n  (* Throws away the first peeked-at token, shifting any subsequent tokens up *)\n  let junk t =\n    match t.la_results_1 with\n    | None ->\n      ignore (peek_0 t);\n      t.la_results_0 <- None\n    | Some _ ->\n      t.la_results_0 <- t.la_results_1;\n      t.la_results_1 <- None\nend\n\ntype token_sink_result = {\n  token_loc: Loc.t;\n  token: Token.t;\n  token_context: Lex_mode.t;\n}\n\ntype parse_options = {\n  enums: bool;  (** enable parsing of Flow enums *)\n  esproposal_decorators: bool;  (** enable parsing of decorators *)\n  types: bool;  (** enable parsing of Flow types *)\n  use_strict: bool;  (** treat the file as strict, without needing a \"use strict\" directive *)\n}\n\nlet default_parse_options =\n  { enums = false; esproposal_decorators = false; types = true; use_strict = false }\n\ntype allowed_super =\n  | No_super\n  | Super_prop\n  | Super_prop_or_call\n\ntype env = {\n  errors: (Loc.t * Parse_error.t) list ref;\n  comments: Loc.t Comment.t list ref;\n  labels: SSet.t;\n  last_lex_result: Lex_result.t option ref;\n  in_strict_mode: bool;\n  in_export: bool;\n  in_export_default: bool;\n  in_loop: bool;\n  in_switch: bool;\n  in_formal_parameters: bool;\n  in_function: bool;\n  no_in: bool;\n  no_call: bool;\n  no_let: bool;\n  no_anon_function_type: bool;\n  no_new: bool;\n  allow_yield: bool;\n  allow_await: bool;\n  allow_directive: bool;\n  has_simple_parameters: bool;\n  allow_super: allowed_super;\n  error_callback: (env -> Parse_error.t -> unit) option;\n  lex_mode_stack: Lex_mode.t list ref;\n  (* lex_env is the lex_env after the single lookahead has been lexed *)\n  lex_env: Lex_env.t ref;\n  (* This needs to be cleared whenever we advance. *)\n  lookahead: Lookahead.t ref;\n  token_sink: (token_sink_result -> unit) option ref;\n  parse_options: parse_options;\n  source: File_key.t option;\n  (* It is a syntax error to reference private fields not in scope. In order to enforce this,\n   * we keep track of the privates we've seen declared and used. *)\n  privates: (SSet.t * (string * Loc.t) list) list ref;\n  (* The position up to which comments have been consumed, exclusive. *)\n  consumed_comments_pos: Loc.position ref;\n}\n\n(* constructor *)\nlet init_env ?(token_sink = None) ?(parse_options = None) source content =\n  (* let lb = Sedlexing.Utf16.from_string\n     content (Some Sedlexing.Utf16.Little_endian) in *)\n  let (lb, errors) =\n    try (Sedlexing.Utf8.from_string content, []) with\n    | Sedlexing.MalFormed ->\n      (Sedlexing.Utf8.from_string \"\", [({ Loc.none with Loc.source }, Parse_error.MalformedUnicode)])\n  in\n  let parse_options =\n    match parse_options with\n    | Some opts -> opts\n    | None -> default_parse_options\n  in\n  let enable_types_in_comments = parse_options.types in\n  let lex_env = Lex_env.new_lex_env source lb ~enable_types_in_comments in\n  {\n    errors = ref errors;\n    comments = ref [];\n    labels = SSet.empty;\n    last_lex_result = ref None;\n    has_simple_parameters = true;\n    in_strict_mode = parse_options.use_strict;\n    in_export = false;\n    in_export_default = false;\n    in_loop = false;\n    in_switch = false;\n    in_formal_parameters = false;\n    in_function = false;\n    no_in = false;\n    no_call = false;\n    no_let = false;\n    no_anon_function_type = false;\n    no_new = false;\n    allow_yield = false;\n    allow_await = false;\n    allow_directive = false;\n    allow_super = No_super;\n    error_callback = None;\n    lex_mode_stack = ref [Lex_mode.NORMAL];\n    lex_env = ref lex_env;\n    lookahead = ref (Lookahead.create lex_env Lex_mode.NORMAL);\n    token_sink = ref token_sink;\n    parse_options;\n    source;\n    privates = ref [];\n    consumed_comments_pos = ref { Loc.line = 0; column = 0 };\n  }\n\n(* getters: *)\nlet in_strict_mode env = env.in_strict_mode\nlet lex_mode env = List.hd !(env.lex_mode_stack)\nlet in_export env = env.in_export\nlet in_export_default env = env.in_export_default\nlet comments env = !(env.comments)\nlet labels env = env.labels\nlet in_loop env = env.in_loop\nlet in_switch env = env.in_switch\nlet in_formal_parameters env = env.in_formal_parameters\nlet in_function env = env.in_function\nlet allow_yield env = env.allow_yield\nlet allow_await env = env.allow_await\nlet allow_directive env = env.allow_directive\nlet allow_super env = env.allow_super\nlet has_simple_parameters env = env.has_simple_parameters\nlet no_in env = env.no_in\nlet no_call env = env.no_call\nlet no_let env = env.no_let\nlet no_anon_function_type env = env.no_anon_function_type\nlet no_new env = env.no_new\nlet errors env = !(env.errors)\nlet parse_options env = env.parse_options\nlet source env = env.source\nlet should_parse_types env = env.parse_options.types\n\n(* mutators: *)\nlet error_at env (loc, e) =\n  env.errors := (loc, e) :: !(env.errors);\n  match env.error_callback with\n  | None -> ()\n  | Some callback -> callback env e\n\n(* Since private fields out of scope are a parse error, we keep track of the declared and used\n * private fields.\n *\n * Whenever we enter a class, we push new empty lists of declared and used privates.\n * When we encounter a new declared private, we add it to the top of the declared_privates list\n * via add_declared_private. We do the same with used_privates via add_used_private.\n *\n * When we exit a class, we look for all the unbound private variables. Since class fields\n * are hoisted to the scope of the class, we may need to look further before we conclude that\n * a field is out of scope. To do that, we add all of the unbound private fields to the\n * next used_private list. Once we run out of declared private lists, any leftover used_privates\n * are unbound private variables. *)\nlet enter_class env = env.privates := (SSet.empty, []) :: !(env.privates)\n\nlet exit_class env =\n  let get_unbound_privates declared_privates used_privates =\n    List.filter (fun x -> not (SSet.mem (fst x) declared_privates)) used_privates\n  in\n  match !(env.privates) with\n  | [(declared_privates, used_privates)] ->\n    let unbound_privates = get_unbound_privates declared_privates used_privates in\n    List.iter\n      (fun (name, loc) -> error_at env (loc, Parse_error.UnboundPrivate name))\n      unbound_privates;\n    env.privates := []\n  | (loc_declared_privates, loc_used_privates) :: privates ->\n    let unbound_privates = get_unbound_privates loc_declared_privates loc_used_privates in\n    let (decl_head, used_head) = List.hd privates in\n    env.privates := (decl_head, used_head @ unbound_privates) :: List.tl privates\n  | _ -> failwith \"Internal Error: `exit_class` called before a matching `enter_class`\"\n\nlet add_declared_private env name =\n  match !(env.privates) with\n  | [] -> failwith \"Internal Error: Tried to add_declared_private with outside of class scope.\"\n  | (declared, used) :: xs -> env.privates := (SSet.add name declared, used) :: xs\n\nlet add_used_private env name loc =\n  match !(env.privates) with\n  | [] -> error_at env (loc, Parse_error.PrivateNotInClass)\n  | (declared, used) :: xs -> env.privates := (declared, (name, loc) :: used) :: xs\n\nlet consume_comments_until env pos = env.consumed_comments_pos := pos\n\n(* lookahead: *)\nlet lookahead_0 env = Lookahead.peek_0 !(env.lookahead)\nlet lookahead_1 env = Lookahead.peek_1 !(env.lookahead)\n\nlet lookahead ~i env =\n  match i with\n  | 0 -> lookahead_0 env\n  | 1 -> lookahead_1 env\n  | _ -> assert false\n\n(* functional operations: *)\nlet with_strict in_strict_mode env =\n  if in_strict_mode = env.in_strict_mode then\n    env\n  else\n    { env with in_strict_mode }\n\nlet with_in_formal_parameters in_formal_parameters env =\n  if in_formal_parameters = env.in_formal_parameters then\n    env\n  else\n    { env with in_formal_parameters }\n\nlet with_in_function in_function env =\n  if in_function = env.in_function then\n    env\n  else\n    { env with in_function }\n\nlet with_allow_yield allow_yield env =\n  if allow_yield = env.allow_yield then\n    env\n  else\n    { env with allow_yield }\n\nlet with_allow_await allow_await env =\n  if allow_await = env.allow_await then\n    env\n  else\n    { env with allow_await }\n\nlet with_allow_directive allow_directive env =\n  if allow_directive = env.allow_directive then\n    env\n  else\n    { env with allow_directive }\n\nlet with_allow_super allow_super env =\n  if allow_super = env.allow_super then\n    env\n  else\n    { env with allow_super }\n\nlet with_no_let no_let env =\n  if no_let = env.no_let then\n    env\n  else\n    { env with no_let }\n\nlet with_in_loop in_loop env =\n  if in_loop = env.in_loop then\n    env\n  else\n    { env with in_loop }\n\nlet with_no_in no_in env =\n  if no_in = env.no_in then\n    env\n  else\n    { env with no_in }\n\nlet with_no_anon_function_type no_anon_function_type env =\n  if no_anon_function_type = env.no_anon_function_type then\n    env\n  else\n    { env with no_anon_function_type }\n\nlet with_no_new no_new env =\n  if no_new = env.no_new then\n    env\n  else\n    { env with no_new }\n\nlet with_in_switch in_switch env =\n  if in_switch = env.in_switch then\n    env\n  else\n    { env with in_switch }\n\nlet with_in_export in_export env =\n  if in_export = env.in_export then\n    env\n  else\n    { env with in_export }\n\nlet with_in_export_default in_export_default env =\n  if in_export_default = env.in_export_default then\n    env\n  else\n    { env with in_export_default }\n\nlet with_no_call no_call env =\n  if no_call = env.no_call then\n    env\n  else\n    { env with no_call }\n\nlet with_error_callback error_callback env = { env with error_callback = Some error_callback }\n\n(* other helper functions: *)\nlet error_list env = List.iter (error_at env)\n\nlet last_loc env =\n  match !(env.last_lex_result) with\n  | Some lex_result -> Some (Lex_result.loc lex_result)\n  | None -> None\n\nlet last_token env =\n  match !(env.last_lex_result) with\n  | Some lex_result -> Some (Lex_result.token lex_result)\n  | None -> None\n\nlet without_error_callback env = { env with error_callback = None }\nlet add_label env label = { env with labels = SSet.add label env.labels }\n\nlet enter_function env ~async ~generator ~simple_params =\n  {\n    env with\n    in_formal_parameters = false;\n    has_simple_parameters = simple_params;\n    in_function = true;\n    in_loop = false;\n    in_switch = false;\n    in_export = false;\n    in_export_default = false;\n    labels = SSet.empty;\n    allow_await = async;\n    allow_yield = generator;\n  }\n\n(* #sec-keywords *)\nlet is_keyword = function\n  | \"await\"\n  | \"break\"\n  | \"case\"\n  | \"catch\"\n  | \"class\"\n  | \"const\"\n  | \"continue\"\n  | \"debugger\"\n  | \"default\"\n  | \"delete\"\n  | \"do\"\n  | \"else\"\n  | \"export\"\n  | \"extends\"\n  | \"finally\"\n  | \"for\"\n  | \"function\"\n  | \"if\"\n  | \"import\"\n  | \"in\"\n  | \"instanceof\"\n  | \"new\"\n  | \"return\"\n  | \"super\"\n  | \"switch\"\n  | \"this\"\n  | \"throw\"\n  | \"try\"\n  | \"typeof\"\n  | \"var\"\n  | \"void\"\n  | \"while\"\n  | \"with\"\n  | \"yield\" ->\n    true\n  | _ -> false\n\nlet token_is_keyword =\n  Token.(\n    function\n    | T_IDENTIFIER { raw; _ } when is_keyword raw -> true\n    | T_AWAIT\n    | T_BREAK\n    | T_CASE\n    | T_CATCH\n    | T_CLASS\n    | T_CONST\n    | T_CONTINUE\n    | T_DEBUGGER\n    | T_DEFAULT\n    | T_DELETE\n    | T_DO\n    | T_ELSE\n    | T_EXPORT\n    | T_EXTENDS\n    | T_FINALLY\n    | T_FOR\n    | T_FUNCTION\n    | T_IF\n    | T_IMPORT\n    | T_IN\n    | T_INSTANCEOF\n    | T_NEW\n    | T_RETURN\n    | T_SUPER\n    | T_SWITCH\n    | T_THIS\n    | T_THROW\n    | T_TRY\n    | T_TYPEOF\n    | T_VAR\n    | T_VOID\n    | T_WHILE\n    | T_WITH\n    | T_YIELD ->\n      true\n    | _ -> false\n  )\n\n(* #sec-future-reserved-words *)\nlet is_future_reserved = function\n  | \"enum\" -> true\n  | _ -> false\n\nlet token_is_future_reserved =\n  Token.(\n    function\n    | T_IDENTIFIER { raw; _ } when is_future_reserved raw -> true\n    | T_ENUM -> true\n    | _ -> false\n  )\n\n(* #sec-strict-mode-of-ecmascript *)\nlet is_strict_reserved = function\n  | \"interface\"\n  | \"implements\"\n  | \"package\"\n  | \"private\"\n  | \"protected\"\n  | \"public\"\n  | \"static\"\n  | \"yield\" ->\n    true\n  | _ -> false\n\nlet token_is_strict_reserved =\n  Token.(\n    function\n    | T_IDENTIFIER { raw; _ } when is_strict_reserved raw -> true\n    | T_INTERFACE\n    | T_IMPLEMENTS\n    | T_PACKAGE\n    | T_PRIVATE\n    | T_PROTECTED\n    | T_PUBLIC\n    | T_STATIC\n    | T_YIELD ->\n      true\n    | _ -> false\n  )\n\n(* #sec-strict-mode-of-ecmascript *)\nlet is_restricted = function\n  | \"eval\"\n  | \"arguments\" ->\n    true\n  | _ -> false\n\nlet token_is_restricted =\n  Token.(\n    function\n    | T_IDENTIFIER { raw; _ } when is_restricted raw -> true\n    | _ -> false\n  )\n\n(* #sec-reserved-words *)\nlet is_reserved str_val =\n  is_keyword str_val\n  || is_future_reserved str_val\n  ||\n  match str_val with\n  | \"null\"\n  | \"true\"\n  | \"false\" ->\n    true\n  | _ -> false\n\nlet token_is_reserved t =\n  token_is_keyword t\n  || token_is_future_reserved t\n  ||\n  match t with\n  | Token.T_IDENTIFIER { raw = \"null\" | \"true\" | \"false\"; _ }\n  | Token.T_NULL\n  | Token.T_TRUE\n  | Token.T_FALSE ->\n    true\n  | _ -> false\n\nlet is_reserved_type str_val =\n  match str_val with\n  | \"any\"\n  | \"bool\"\n  | \"boolean\"\n  | \"empty\"\n  | \"false\"\n  | \"mixed\"\n  | \"null\"\n  | \"number\"\n  | \"bigint\"\n  | \"static\"\n  | \"string\"\n  | \"true\"\n  | \"typeof\"\n  | \"void\"\n  | \"interface\"\n  | \"extends\"\n  | \"_\" ->\n    true\n  | _ -> false\n\n(* Answer questions about what comes next *)\nmodule Peek = struct\n  open Loc\n  open Token\n\n  let ith_token ~i env = Lex_result.token (lookahead ~i env)\n  let ith_loc ~i env = Lex_result.loc (lookahead ~i env)\n  let ith_errors ~i env = Lex_result.errors (lookahead ~i env)\n\n  let ith_comments ~i env =\n    let comments = Lex_result.comments (lookahead ~i env) in\n    match comments with\n    | [] -> []\n    | _ ->\n      List.filter\n        (fun ({ Loc.start; _ }, _) -> Loc.pos_cmp !(env.consumed_comments_pos) start <= 0)\n        comments\n\n  let token env = ith_token ~i:0 env\n  let loc env = ith_loc ~i:0 env\n\n  (* loc_skip_lookahead is used to give a loc hint to optional tokens such as type annotations *)\n  let loc_skip_lookahead env =\n    let loc =\n      match last_loc env with\n      | Some loc -> loc\n      | None -> failwith \"Peeking current location when not available\"\n    in\n    Loc.{ loc with start = loc._end }\n\n  let errors env = ith_errors ~i:0 env\n  let comments env = ith_comments ~i:0 env\n\n  let has_eaten_comments env =\n    let comments = Lex_result.comments (lookahead ~i:0 env) in\n    List.exists\n      (fun ({ Loc.start; _ }, _) -> Loc.pos_cmp start !(env.consumed_comments_pos) < 0)\n      comments\n\n  let lex_env env = Lookahead.lex_env_0 !(env.lookahead)\n\n  (* True if there is a line terminator before the next token *)\n  let ith_is_line_terminator ~i env =\n    let loc =\n      if i > 0 then\n        Some (ith_loc ~i:(i - 1) env)\n      else\n        last_loc env\n    in\n    match loc with\n    | None -> false\n    | Some loc' -> (ith_loc ~i env).start.line > loc'.start.line\n\n  let is_line_terminator env = ith_is_line_terminator ~i:0 env\n\n  let ith_is_implicit_semicolon ~i env =\n    match ith_token ~i env with\n    | T_EOF\n    | T_RCURLY ->\n      true\n    | T_SEMICOLON -> false\n    | _ -> ith_is_line_terminator ~i env\n\n  let is_implicit_semicolon env = ith_is_implicit_semicolon ~i:0 env\n\n  let ith_is_identifier ~i env =\n    match ith_token ~i env with\n    | t when token_is_strict_reserved t -> true\n    | t when token_is_future_reserved t -> true\n    | t when token_is_restricted t -> true\n    | T_LET\n    | T_TYPE\n    | T_OPAQUE\n    | T_OF\n    | T_DECLARE\n    | T_ASYNC\n    | T_AWAIT\n    | T_POUND\n    | T_IDENTIFIER _ ->\n      true\n    | _ -> false\n\n  let ith_is_type_identifier ~i env =\n    match lex_mode env with\n    | Lex_mode.TYPE -> begin\n      match ith_token ~i env with\n      | T_IDENTIFIER _ -> true\n      | _ -> false\n    end\n    | Lex_mode.NORMAL -> begin\n      (* Sometimes we peek at type identifiers while in normal lex mode. For\n         example, when deciding whether a `type` token is an identifier or the\n         start of a type declaration, based on whether the following token\n         `is_type_identifier`. *)\n      match ith_token ~i env with\n      | T_IDENTIFIER { raw; _ } when is_reserved_type raw -> false\n      (* reserved type identifiers, but these don't appear in NORMAL mode *)\n      | T_ANY_TYPE\n      | T_MIXED_TYPE\n      | T_EMPTY_TYPE\n      | T_NUMBER_TYPE\n      | T_BIGINT_TYPE\n      | T_STRING_TYPE\n      | T_VOID_TYPE\n      | T_SYMBOL_TYPE\n      | T_BOOLEAN_TYPE _\n      | T_NUMBER_SINGLETON_TYPE _\n      | T_BIGINT_SINGLETON_TYPE _\n      (* identifier-ish *)\n      | T_ASYNC\n      | T_AWAIT\n      | T_BREAK\n      | T_CASE\n      | T_CATCH\n      | T_CLASS\n      | T_CONST\n      | T_CONTINUE\n      | T_DEBUGGER\n      | T_DECLARE\n      | T_DEFAULT\n      | T_DELETE\n      | T_DO\n      | T_ELSE\n      | T_ENUM\n      | T_EXPORT\n      | T_EXTENDS\n      | T_FALSE\n      | T_FINALLY\n      | T_FOR\n      | T_FUNCTION\n      | T_IDENTIFIER _\n      | T_IF\n      | T_IMPLEMENTS\n      | T_IMPORT\n      | T_IN\n      | T_INSTANCEOF\n      | T_INTERFACE\n      | T_LET\n      | T_NEW\n      | T_NULL\n      | T_OF\n      | T_OPAQUE\n      | T_PACKAGE\n      | T_PRIVATE\n      | T_PROTECTED\n      | T_PUBLIC\n      | T_RETURN\n      | T_SUPER\n      | T_SWITCH\n      | T_THIS\n      | T_THROW\n      | T_TRUE\n      | T_TRY\n      | T_TYPE\n      | T_VAR\n      | T_WHILE\n      | T_WITH\n      | T_YIELD ->\n        true\n      (* identifier-ish, but not valid types *)\n      | T_STATIC\n      | T_TYPEOF\n      | T_VOID ->\n        false\n      (* syntax *)\n      | T_LCURLY\n      | T_RCURLY\n      | T_LCURLYBAR\n      | T_RCURLYBAR\n      | T_LPAREN\n      | T_RPAREN\n      | T_LBRACKET\n      | T_RBRACKET\n      | T_SEMICOLON\n      | T_COMMA\n      | T_PERIOD\n      | T_ARROW\n      | T_ELLIPSIS\n      | T_AT\n      | T_POUND\n      | T_CHECKS\n      | T_RSHIFT3_ASSIGN\n      | T_RSHIFT_ASSIGN\n      | T_LSHIFT_ASSIGN\n      | T_BIT_XOR_ASSIGN\n      | T_BIT_OR_ASSIGN\n      | T_BIT_AND_ASSIGN\n      | T_MOD_ASSIGN\n      | T_DIV_ASSIGN\n      | T_MULT_ASSIGN\n      | T_EXP_ASSIGN\n      | T_MINUS_ASSIGN\n      | T_PLUS_ASSIGN\n      | T_NULLISH_ASSIGN\n      | T_AND_ASSIGN\n      | T_OR_ASSIGN\n      | T_ASSIGN\n      | T_PLING_PERIOD\n      | T_PLING_PLING\n      | T_PLING\n      | T_COLON\n      | T_OR\n      | T_AND\n      | T_BIT_OR\n      | T_BIT_XOR\n      | T_BIT_AND\n      | T_EQUAL\n      | T_NOT_EQUAL\n      | T_STRICT_EQUAL\n      | T_STRICT_NOT_EQUAL\n      | T_LESS_THAN_EQUAL\n      | T_GREATER_THAN_EQUAL\n      | T_LESS_THAN\n      | T_GREATER_THAN\n      | T_LSHIFT\n      | T_RSHIFT\n      | T_RSHIFT3\n      | T_PLUS\n      | T_MINUS\n      | T_DIV\n      | T_MULT\n      | T_EXP\n      | T_MOD\n      | T_NOT\n      | T_BIT_NOT\n      | T_INCR\n      | T_DECR\n      | T_EOF ->\n        false\n      (* literals *)\n      | T_NUMBER _\n      | T_BIGINT _\n      | T_STRING _\n      | T_TEMPLATE_PART _\n      | T_REGEXP _\n      (* misc that shouldn't appear in NORMAL mode *)\n      | T_JSX_IDENTIFIER _\n      | T_JSX_TEXT _\n      | T_ERROR _ ->\n        false\n    end\n    | Lex_mode.JSX_TAG\n    | Lex_mode.JSX_CHILD\n    | Lex_mode.TEMPLATE\n    | Lex_mode.REGEXP ->\n      false\n\n  let ith_is_identifier_name ~i env = ith_is_identifier ~i env || ith_is_type_identifier ~i env\n\n  (* This returns true if the next token is identifier-ish (even if it is an\n     error) *)\n  let is_identifier env = ith_is_identifier ~i:0 env\n  let is_identifier_name env = ith_is_identifier_name ~i:0 env\n  let is_type_identifier env = ith_is_type_identifier ~i:0 env\n\n  let is_function env =\n    token env = T_FUNCTION\n    || token env = T_ASYNC\n       && ith_token ~i:1 env = T_FUNCTION\n       && (loc env)._end.line = (ith_loc ~i:1 env).start.line\n\n  let is_class env =\n    match token env with\n    | T_CLASS\n    | T_AT ->\n      true\n    | _ -> false\nend\n\n(*****************************************************************************)\n(* Errors *)\n(*****************************************************************************)\n\n(* Complains about an error at the location of the lookahead *)\nlet error env e =\n  let loc = Peek.loc env in\n  error_at env (loc, e)\n\nlet get_unexpected_error ?expected token =\n  if token_is_future_reserved token then\n    Parse_error.UnexpectedReserved\n  else if token_is_strict_reserved token then\n    Parse_error.StrictReservedWord\n  else\n    let unexpected = Token.explanation_of_token token in\n    match expected with\n    | Some expected_msg -> Parse_error.UnexpectedWithExpected (unexpected, expected_msg)\n    | None -> Parse_error.Unexpected unexpected\n\nlet error_unexpected ?expected env =\n  (* So normally we consume the lookahead lex result when Eat.token calls\n   * Parser_env.advance, which will add any lexing errors to our list of errors.\n   * However, raising an unexpected error for a lookahead is kind of like\n   * consuming that token, so we should process any lexing errors before\n   * complaining about the unexpected token *)\n  error_list env (Peek.errors env);\n  error env (get_unexpected_error ?expected (Peek.token env))\n\nlet error_on_decorators env =\n  List.iter (fun decorator -> error_at env (fst decorator, Parse_error.UnsupportedDecorator))\n\nlet error_nameless_declaration env kind =\n  let expected =\n    if in_export env then\n      Printf.sprintf\n        \"an identifier. When exporting a %s as a named export, you must specify a %s name. Did you mean `export default %s ...`?\"\n        kind\n        kind\n        kind\n    else\n      \"an identifier\"\n  in\n  error_unexpected ~expected env\n\nlet strict_error env e = if in_strict_mode env then error env e\nlet strict_error_at env (loc, e) = if in_strict_mode env then error_at env (loc, e)\n\nlet function_as_statement_error_at env loc =\n  error_at env (loc, Parse_error.FunctionAsStatement { in_strict_mode = in_strict_mode env })\n\n(* Consume zero or more tokens *)\nmodule Eat = struct\n  (* Consume a single token *)\n  let token env =\n    (* If there's a token_sink, emit the lexed token before moving forward *)\n    (match !(env.token_sink) with\n    | None -> ()\n    | Some token_sink ->\n      token_sink\n        {\n          token_loc = Peek.loc env;\n          token = Peek.token env;\n          (*\n           * The lex mode is useful because it gives context to some\n           * context-sensitive tokens.\n           *\n           * Some examples of such tokens include:\n           *\n           * `=>` - Part of an arrow function? or part of a type annotation?\n           * `<`  - A less-than? Or an opening to a JSX element?\n           * ...etc...\n           *)\n          token_context = lex_mode env;\n        });\n\n    env.lex_env := Peek.lex_env env;\n\n    error_list env (Peek.errors env);\n    env.comments := List.rev_append (Lex_result.comments (lookahead ~i:0 env)) !(env.comments);\n    env.last_lex_result := Some (lookahead ~i:0 env);\n\n    Lookahead.junk !(env.lookahead)\n\n  (** [maybe env t] eats the next token and returns [true] if it is [t], else return [false] *)\n  let maybe env t =\n    let is_t = Token.equal (Peek.token env) t in\n    if is_t then token env;\n    is_t\n\n  let push_lex_mode env mode =\n    env.lex_mode_stack := mode :: !(env.lex_mode_stack);\n    env.lookahead := Lookahead.create !(env.lex_env) (lex_mode env)\n\n  let pop_lex_mode env =\n    let new_stack =\n      match !(env.lex_mode_stack) with\n      | _mode :: stack -> stack\n      | _ -> failwith \"Popping lex mode from empty stack\"\n    in\n    env.lex_mode_stack := new_stack;\n    env.lookahead := Lookahead.create !(env.lex_env) (lex_mode env)\n\n  let double_pop_lex_mode env =\n    let new_stack =\n      match !(env.lex_mode_stack) with\n      | _ :: _ :: stack -> stack\n      | _ -> failwith \"Popping lex mode from empty stack\"\n    in\n    env.lex_mode_stack := new_stack;\n    env.lookahead := Lookahead.create !(env.lex_env) (lex_mode env)\n\n  let trailing_comments env =\n    let open Loc in\n    let loc = Peek.loc env in\n    if Peek.token env = Token.T_COMMA && Peek.ith_is_line_terminator ~i:1 env then (\n      let trailing_before_comma = Peek.comments env in\n      let trailing_after_comma =\n        List.filter\n          (fun (comment_loc, _) -> comment_loc.start.line <= loc._end.line)\n          (Lex_result.comments (lookahead ~i:1 env))\n      in\n      let trailing = trailing_before_comma @ trailing_after_comma in\n      consume_comments_until env { Loc.line = loc._end.line + 1; column = 0 };\n      trailing\n    ) else\n      let trailing = Peek.comments env in\n      consume_comments_until env loc._end;\n      trailing\n\n  let comments_until_next_line env =\n    let open Loc in\n    match !(env.last_lex_result) with\n    | None -> []\n    | Some { Lex_result.lex_loc = last_loc; _ } ->\n      let comments = Peek.comments env in\n      let comments = List.filter (fun (loc, _) -> loc.start.line <= last_loc._end.line) comments in\n      consume_comments_until env { line = last_loc._end.line + 1; column = 0 };\n      comments\n\n  let program_comments env =\n    let open Flow_ast.Comment in\n    let comments = Peek.comments env in\n    let flow_directive = \"@flow\" in\n    let flow_directive_length = String.length flow_directive in\n    let contains_flow_directive { text; _ } =\n      let text_length = String.length text in\n      let rec contains_flow_directive_after_offset off =\n        if off + flow_directive_length > text_length then\n          false\n        else\n          String.sub text off flow_directive_length = flow_directive\n          || contains_flow_directive_after_offset (off + 1)\n      in\n      contains_flow_directive_after_offset 0\n    in\n    (* Comments up through the last comment with an @flow directive are considered program comments *)\n    let rec flow_directive_comments comments =\n      match comments with\n      | [] -> []\n      | (loc, comment) :: rest ->\n        if contains_flow_directive comment then (\n          (env.consumed_comments_pos := Loc.(loc._end));\n          List.rev ((loc, comment) :: rest)\n        ) else\n          flow_directive_comments rest\n    in\n    let program_comments = flow_directive_comments (List.rev comments) in\n    let program_comments =\n      if program_comments <> [] then\n        program_comments\n      else\n        (* If there is no @flow directive, consider the first block comment a program comment if\n           it starts with \"/**\" *)\n        match comments with\n        | ((loc, { kind = Block; text; _ }) as first_comment) :: _\n          when String.length text >= 1 && text.[0] = '*' ->\n          (env.consumed_comments_pos := Loc.(loc._end));\n          [first_comment]\n        | _ -> []\n    in\n    program_comments\nend\n\nmodule Expect = struct\n  let get_error env t =\n    let expected = Token.explanation_of_token ~use_article:true t in\n    (Peek.loc env, get_unexpected_error ~expected (Peek.token env))\n\n  let error env t =\n    let expected = Token.explanation_of_token ~use_article:true t in\n    error_unexpected ~expected env\n\n  let token env t =\n    if not (Token.equal (Peek.token env) t) then error env t;\n    Eat.token env\n\n  (** [token_maybe env T_FOO] eats a token if it is [T_FOO], and errors without consuming if\n      not. Returns whether it consumed a token, like [Eat.maybe]. *)\n  let token_maybe env t =\n    let ate = Eat.maybe env t in\n    if not ate then error env t;\n    ate\n\n  (** [token_opt env T_FOO] eats a token if it is [T_FOO], and errors without consuming if not.\n      This differs from [token], which always consumes. Only use [token_opt] when it's ok for\n      the parser to not advance, like if you are guaranteed that something else has eaten a\n      token. *)\n  let token_opt env t = ignore (token_maybe env t)\n\n  let identifier env name =\n    let t = Peek.token env in\n    begin\n      match t with\n      | Token.T_IDENTIFIER { raw; _ } when raw = name -> ()\n      | _ ->\n        let expected = Printf.sprintf \"the identifier `%s`\" name in\n        error_unexpected ~expected env\n    end;\n    Eat.token env\nend\n\n(* This module allows you to try parsing and rollback if you need. This is not\n * cheap and its usage is strongly discouraged *)\nmodule Try = struct\n  type 'a parse_result =\n    | ParsedSuccessfully of 'a\n    | FailedToParse\n\n  exception Rollback\n\n  type saved_state = {\n    saved_errors: (Loc.t * Parse_error.t) list;\n    saved_comments: Loc.t Flow_ast.Comment.t list;\n    saved_last_lex_result: Lex_result.t option;\n    saved_lex_mode_stack: Lex_mode.t list;\n    saved_lex_env: Lex_env.t;\n    saved_consumed_comments_pos: Loc.position;\n    token_buffer: ((token_sink_result -> unit) * token_sink_result Queue.t) option;\n  }\n\n  let save_state env =\n    let token_buffer =\n      match !(env.token_sink) with\n      | None -> None\n      | Some orig_token_sink ->\n        let buffer = Queue.create () in\n        env.token_sink := Some (fun token_data -> Queue.add token_data buffer);\n        Some (orig_token_sink, buffer)\n    in\n    {\n      saved_errors = !(env.errors);\n      saved_comments = !(env.comments);\n      saved_last_lex_result = !(env.last_lex_result);\n      saved_lex_mode_stack = !(env.lex_mode_stack);\n      saved_lex_env = !(env.lex_env);\n      saved_consumed_comments_pos = !(env.consumed_comments_pos);\n      token_buffer;\n    }\n\n  let reset_token_sink ~flush env token_buffer_info =\n    match token_buffer_info with\n    | None -> ()\n    | Some (orig_token_sink, token_buffer) ->\n      env.token_sink := Some orig_token_sink;\n      if flush then Queue.iter orig_token_sink token_buffer\n\n  let rollback_state env saved_state =\n    reset_token_sink ~flush:false env saved_state.token_buffer;\n    env.errors := saved_state.saved_errors;\n    env.comments := saved_state.saved_comments;\n    env.last_lex_result := saved_state.saved_last_lex_result;\n    env.lex_mode_stack := saved_state.saved_lex_mode_stack;\n    env.lex_env := saved_state.saved_lex_env;\n    env.consumed_comments_pos := saved_state.saved_consumed_comments_pos;\n    env.lookahead := Lookahead.create !(env.lex_env) (lex_mode env);\n\n    FailedToParse\n\n  let success env saved_state result =\n    reset_token_sink ~flush:true env saved_state.token_buffer;\n    ParsedSuccessfully result\n\n  let to_parse env parse =\n    let saved_state = save_state env in\n    try success env saved_state (parse env) with\n    | Rollback -> rollback_state env saved_state\n\n  let or_else env ~fallback parse =\n    match to_parse env parse with\n    | ParsedSuccessfully result -> result\n    | FailedToParse -> fallback\nend\n"
  },
  {
    "path": "analysis/vendor/js_parser/parser_env.mli",
    "content": "(*\n * Copyright (c) Meta Platforms, Inc. and affiliates.\n *\n * This source code is licensed under the MIT license found in the\n * LICENSE file in the root directory of this source tree.\n *)\n\n(* This module provides a layer between the lexer and the parser which includes\n * some parser state and some lexer state *)\n\nmodule SSet : Set.S with type elt = string\n\nmodule Lex_mode : sig\n  type t =\n    | NORMAL\n    | TYPE\n    | JSX_TAG\n    | JSX_CHILD\n    | TEMPLATE\n    | REGEXP\n\n  val debug_string_of_lex_mode : t -> string\nend\n\ntype token_sink_result = {\n  token_loc: Loc.t;\n  token: Token.t;\n  token_context: Lex_mode.t;\n}\n\ntype parse_options = {\n  enums: bool;  (** enable parsing of Flow enums *)\n  esproposal_decorators: bool;  (** enable parsing of decorators *)\n  types: bool;  (** enable parsing of Flow types *)\n  use_strict: bool;  (** treat the file as strict, without needing a \"use strict\" directive *)\n}\n\nval default_parse_options : parse_options\n\ntype env\n\ntype allowed_super =\n  | No_super\n  | Super_prop\n  | Super_prop_or_call\n\n(* constructor: *)\nval init_env :\n  ?token_sink:(token_sink_result -> unit) option ->\n  ?parse_options:parse_options option ->\n  File_key.t option ->\n  string ->\n  env\n\n(* getters: *)\nval in_strict_mode : env -> bool\n\nval last_loc : env -> Loc.t option\n\nval last_token : env -> Token.t option\n\nval in_export : env -> bool\n\nval in_export_default : env -> bool\n\nval labels : env -> SSet.t\n\nval comments : env -> Loc.t Flow_ast.Comment.t list\n\nval in_loop : env -> bool\n\nval in_switch : env -> bool\n\nval in_formal_parameters : env -> bool\n\nval in_function : env -> bool\n\nval allow_yield : env -> bool\n\nval allow_await : env -> bool\n\nval allow_directive : env -> bool\n\nval allow_super : env -> allowed_super\n\nval has_simple_parameters : env -> bool\n\nval no_in : env -> bool\n\nval no_call : env -> bool\n\nval no_let : env -> bool\n\nval no_anon_function_type : env -> bool\n\nval no_new : env -> bool\n\nval errors : env -> (Loc.t * Parse_error.t) list\n\nval parse_options : env -> parse_options\n\nval source : env -> File_key.t option\n\nval should_parse_types : env -> bool\n\nval get_unexpected_error : ?expected:string -> Token.t -> Parse_error.t\n\n(* mutators: *)\nval error_at : env -> Loc.t * Parse_error.t -> unit\n\nval error : env -> Parse_error.t -> unit\n\nval error_unexpected : ?expected:string -> env -> unit\n\nval error_on_decorators : env -> (Loc.t * 'a) list -> unit\n\nval error_nameless_declaration : env -> string -> unit\n\nval strict_error : env -> Parse_error.t -> unit\n\nval strict_error_at : env -> Loc.t * Parse_error.t -> unit\n\nval function_as_statement_error_at : env -> Loc.t -> unit\n\nval error_list : env -> (Loc.t * Parse_error.t) list -> unit\n\nval enter_class : env -> unit\n\nval exit_class : env -> unit\n\nval add_declared_private : env -> string -> unit\n\nval add_used_private : env -> string -> Loc.t -> unit\n\nval consume_comments_until : env -> Loc.position -> unit\n\n(* functional operations -- these return shallow copies, so future mutations to\n * the returned env will also affect the original: *)\nval with_strict : bool -> env -> env\n\nval with_in_formal_parameters : bool -> env -> env\n\nval with_in_function : bool -> env -> env\n\nval with_allow_yield : bool -> env -> env\n\nval with_allow_await : bool -> env -> env\n\nval with_allow_directive : bool -> env -> env\n\nval with_allow_super : allowed_super -> env -> env\n\nval with_no_let : bool -> env -> env\n\nval with_in_loop : bool -> env -> env\n\nval with_no_in : bool -> env -> env\n\nval with_no_anon_function_type : bool -> env -> env\n\nval with_no_new : bool -> env -> env\n\nval with_in_switch : bool -> env -> env\n\nval with_in_export : bool -> env -> env\n\nval with_in_export_default : bool -> env -> env\n\nval with_no_call : bool -> env -> env\n\nval with_error_callback : (env -> Parse_error.t -> unit) -> env -> env\n\nval without_error_callback : env -> env\n\nval add_label : env -> string -> env\n\nval enter_function : env -> async:bool -> generator:bool -> simple_params:bool -> env\n\nval is_reserved : string -> bool\n\nval token_is_reserved : Token.t -> bool\n\nval is_future_reserved : string -> bool\n\nval is_strict_reserved : string -> bool\n\nval token_is_strict_reserved : Token.t -> bool\n\nval is_restricted : string -> bool\n\nval is_reserved_type : string -> bool\n\nval token_is_restricted : Token.t -> bool\n\nmodule Peek : sig\n  val token : env -> Token.t\n\n  val loc : env -> Loc.t\n\n  val loc_skip_lookahead : env -> Loc.t\n\n  val errors : env -> (Loc.t * Parse_error.t) list\n\n  val comments : env -> Loc.t Flow_ast.Comment.t list\n\n  val has_eaten_comments : env -> bool\n\n  val is_line_terminator : env -> bool\n\n  val is_implicit_semicolon : env -> bool\n\n  val is_identifier : env -> bool\n\n  val is_type_identifier : env -> bool\n\n  val is_identifier_name : env -> bool\n\n  val is_function : env -> bool\n\n  val is_class : env -> bool\n\n  val ith_token : i:int -> env -> Token.t\n\n  val ith_loc : i:int -> env -> Loc.t\n\n  val ith_errors : i:int -> env -> (Loc.t * Parse_error.t) list\n\n  val ith_comments : i:int -> env -> Loc.t Flow_ast.Comment.t list\n\n  val ith_is_line_terminator : i:int -> env -> bool\n\n  val ith_is_implicit_semicolon : i:int -> env -> bool\n\n  val ith_is_identifier : i:int -> env -> bool\n\n  val ith_is_identifier_name : i:int -> env -> bool\n\n  val ith_is_type_identifier : i:int -> env -> bool\nend\n\nmodule Eat : sig\n  val token : env -> unit\n\n  val maybe : env -> Token.t -> bool\n\n  val push_lex_mode : env -> Lex_mode.t -> unit\n\n  val pop_lex_mode : env -> unit\n\n  val double_pop_lex_mode : env -> unit\n\n  val trailing_comments : env -> Loc.t Flow_ast.Comment.t list\n\n  val comments_until_next_line : env -> Loc.t Flow_ast.Comment.t list\n\n  val program_comments : env -> Loc.t Flow_ast.Comment.t list\nend\n\nmodule Expect : sig\n  val get_error : env -> Token.t -> Loc.t * Parse_error.t\n\n  val error : env -> Token.t -> unit\n\n  val token : env -> Token.t -> unit\n\n  val token_opt : env -> Token.t -> unit\n\n  val token_maybe : env -> Token.t -> bool\n\n  val identifier : env -> string -> unit\nend\n\nmodule Try : sig\n  type 'a parse_result =\n    | ParsedSuccessfully of 'a\n    | FailedToParse\n\n  exception Rollback\n\n  val to_parse : env -> (env -> 'a) -> 'a parse_result\n\n  val or_else : env -> fallback:'a -> (env -> 'a) -> 'a\nend\n"
  },
  {
    "path": "analysis/vendor/js_parser/parser_flow.ml",
    "content": "(*\n * Copyright (c) Meta Platforms, Inc. and affiliates.\n *\n * This source code is licensed under the MIT license found in the\n * LICENSE file in the root directory of this source tree.\n *)\n\nmodule Sedlexing = Flow_sedlexing\nmodule Ast = Flow_ast\nopen Token\nopen Parser_env\nopen Parser_common\n\n(* Sometimes we add the same error for multiple different reasons. This is hard\n   to avoid, so instead we just filter the duplicates out. This function takes\n   a reversed list of errors and returns the list in forward order with dupes\n   removed. This differs from a set because the original order is preserved. *)\nlet filter_duplicate_errors =\n  let module PrintableErrorSet = Set.Make (struct\n    type t = Loc.t * Parse_error.t\n\n    let compare (a_loc, a_error) (b_loc, b_error) =\n      let loc = Loc.compare a_loc b_loc in\n      if loc = 0 then\n        Parse_error.compare a_error b_error\n      else\n        loc\n  end) in\n  fun errs ->\n    let errs = List.rev errs in\n    let (_, deduped) =\n      List.fold_left\n        (fun (set, deduped) err ->\n          if PrintableErrorSet.mem err set then\n            (set, deduped)\n          else\n            (PrintableErrorSet.add err set, err :: deduped))\n        (PrintableErrorSet.empty, [])\n        errs\n    in\n    List.rev deduped\n\nlet check_for_duplicate_exports =\n  let open Ast in\n  let record_export env seen (loc, { Identifier.name = export_name; comments = _ }) =\n    if export_name = \"\" then\n      (* empty identifiers signify an error, don't export it *)\n      seen\n    else if SSet.mem export_name seen then (\n      error_at env (loc, Parse_error.DuplicateExport export_name);\n      seen\n    ) else\n      SSet.add export_name seen\n  in\n  let extract_pattern_binding_names =\n    let rec fold acc =\n      let open Pattern in\n      function\n      | (_, Object { Object.properties; _ }) ->\n        List.fold_left\n          (fun acc prop ->\n            match prop with\n            | Object.Property (_, { Object.Property.pattern; _ })\n            | Object.RestElement (_, { RestElement.argument = pattern; comments = _ }) ->\n              fold acc pattern)\n          acc\n          properties\n      | (_, Array { Array.elements; _ }) ->\n        List.fold_left\n          (fun acc elem ->\n            match elem with\n            | Array.Element (_, { Array.Element.argument = pattern; default = _ })\n            | Array.RestElement (_, { RestElement.argument = pattern; comments = _ }) ->\n              fold acc pattern\n            | Array.Hole _ -> acc)\n          acc\n          elements\n      | (_, Identifier { Pattern.Identifier.name; _ }) -> name :: acc\n      | (_, Expression _) -> failwith \"Parser error: No such thing as an expression pattern!\"\n    in\n    List.fold_left fold\n  in\n  let record_export_of_statement env seen decl =\n    match decl with\n    | (_, Statement.ExportDefaultDeclaration { Statement.ExportDefaultDeclaration.default; _ }) ->\n      record_export env seen (Flow_ast_utils.ident_of_source (default, \"default\"))\n    | ( _,\n        Statement.ExportNamedDeclaration\n          { Statement.ExportNamedDeclaration.specifiers = Some specifiers; declaration = None; _ }\n      ) ->\n      let open Statement.ExportNamedDeclaration in\n      (match specifiers with\n      | ExportSpecifiers specifiers ->\n        List.fold_left\n          (fun seen (_, { Statement.ExportNamedDeclaration.ExportSpecifier.local; exported }) ->\n            match exported with\n            | Some exported -> record_export env seen exported\n            | None -> record_export env seen local)\n          seen\n          specifiers\n      | ExportBatchSpecifier _ ->\n        (* doesn't export specific names *)\n        seen)\n    | ( _,\n        Statement.ExportNamedDeclaration\n          { Statement.ExportNamedDeclaration.specifiers = None; declaration = Some declaration; _ }\n      ) ->\n      (match declaration with\n      | ( loc,\n          ( Statement.TypeAlias { Statement.TypeAlias.id; _ }\n          | Statement.OpaqueType { Statement.OpaqueType.id; _ }\n          | Statement.InterfaceDeclaration { Statement.Interface.id; _ }\n          | Statement.ClassDeclaration { Class.id = Some id; _ }\n          | Statement.FunctionDeclaration { Function.id = Some id; _ }\n          | Statement.EnumDeclaration { Statement.EnumDeclaration.id; _ } )\n        ) ->\n        record_export\n          env\n          seen\n          (Flow_ast_utils.ident_of_source (loc, Flow_ast_utils.name_of_ident id))\n      | (_, Statement.VariableDeclaration { Statement.VariableDeclaration.declarations; _ }) ->\n        declarations\n        |> List.fold_left\n             (fun names (_, { Statement.VariableDeclaration.Declarator.id; _ }) ->\n               extract_pattern_binding_names names [id])\n             []\n        |> List.fold_left (record_export env) seen\n      | ( _,\n          Statement.(\n            ( Block _ | Break _\n            | ClassDeclaration { Class.id = None; _ }\n            | Continue _ | Debugger _ | DeclareClass _ | DeclareExportDeclaration _\n            | DeclareFunction _ | DeclareInterface _ | DeclareModule _ | DeclareModuleExports _\n            | DeclareTypeAlias _ | DeclareOpaqueType _ | DeclareVariable _ | DoWhile _ | Empty _\n            | ExportDefaultDeclaration _ | ExportNamedDeclaration _ | Expression _ | For _ | ForIn _\n            | ForOf _\n            | FunctionDeclaration { Function.id = None; _ }\n            | If _ | ImportDeclaration _ | Labeled _ | Return _ | Switch _ | Throw _ | Try _\n            | While _ | With _ ))\n        ) ->\n        (* these don't export names -- some are invalid, but the AST allows them *)\n        seen)\n    | ( _,\n        Statement.ExportNamedDeclaration\n          { Statement.ExportNamedDeclaration.declaration = None; specifiers = None; _ }\n      )\n    | ( _,\n        Statement.ExportNamedDeclaration\n          { Statement.ExportNamedDeclaration.declaration = Some _; specifiers = Some _; _ }\n      ) ->\n      (* impossible *)\n      seen\n    | ( _,\n        Statement.(\n          ( Block _ | Break _ | ClassDeclaration _ | Continue _ | Debugger _ | DeclareClass _\n          | DeclareExportDeclaration _ | DeclareFunction _ | DeclareInterface _ | DeclareModule _\n          | DeclareModuleExports _ | DeclareTypeAlias _ | DeclareOpaqueType _ | DeclareVariable _\n          | DoWhile _ | Empty _ | EnumDeclaration _ | Expression _ | For _ | ForIn _ | ForOf _\n          | FunctionDeclaration _ | If _ | ImportDeclaration _ | InterfaceDeclaration _ | Labeled _\n          | Return _ | Switch _ | Throw _ | Try _ | TypeAlias _ | OpaqueType _\n          | VariableDeclaration _ | While _ | With _ ))\n      ) ->\n      seen\n  in\n  (fun env stmts -> ignore (List.fold_left (record_export_of_statement env) SSet.empty stmts))\n\nmodule rec Parse : PARSER = struct\n  module Type = Type_parser.Type (Parse)\n  module Declaration = Declaration_parser.Declaration (Parse) (Type)\n  module Pattern_cover = Pattern_cover.Cover (Parse)\n  module Expression = Expression_parser.Expression (Parse) (Type) (Declaration) (Pattern_cover)\n  module Object = Object_parser.Object (Parse) (Type) (Declaration) (Expression) (Pattern_cover)\n\n  module Statement =\n    Statement_parser.Statement (Parse) (Type) (Declaration) (Object) (Pattern_cover)\n\n  module Pattern = Pattern_parser.Pattern (Parse) (Type)\n  module JSX = Jsx_parser.JSX (Parse)\n\n  let annot = Type.annotation\n\n  let identifier ?restricted_error env =\n    (match Peek.token env with\n    (* \"let\" is disallowed as an identifier in a few situations. 11.6.2.1\n       lists them out. It is always disallowed in strict mode *)\n    | T_LET when in_strict_mode env -> error env Parse_error.StrictReservedWord\n    | T_LET when no_let env -> error_unexpected env\n    | T_LET -> ()\n    (* `allow_await` means that `await` is allowed to be a keyword,\n       which makes it illegal to use as an identifier.\n       https://tc39.github.io/ecma262/#sec-identifiers-static-semantics-early-errors *)\n    | T_AWAIT when allow_await env -> error env Parse_error.UnexpectedReserved\n    | T_AWAIT -> ()\n    (* `allow_yield` means that `yield` is allowed to be a keyword,\n       which makes it illegal to use as an identifier.\n       https://tc39.github.io/ecma262/#sec-identifiers-static-semantics-early-errors *)\n    | T_YIELD when allow_yield env -> error env Parse_error.UnexpectedReserved\n    | T_YIELD when in_strict_mode env -> error env Parse_error.StrictReservedWord\n    | T_YIELD -> ()\n    | t when token_is_strict_reserved t -> strict_error env Parse_error.StrictReservedWord\n    | t when token_is_reserved t -> error_unexpected env\n    | t ->\n      (match restricted_error with\n      | Some err when token_is_restricted t -> strict_error env err\n      | _ -> ()));\n    identifier_name env\n\n  let rec program env =\n    let leading = Eat.program_comments env in\n    let stmts = module_body_with_directives env (fun _ -> false) in\n    let end_loc = Peek.loc env in\n    Expect.token env T_EOF;\n    check_for_duplicate_exports env stmts;\n    let loc =\n      match stmts with\n      | [] -> end_loc\n      | _ -> Loc.btwn (fst (List.hd stmts)) (fst (List.hd (List.rev stmts)))\n    in\n    let all_comments = List.rev (comments env) in\n    ( loc,\n      {\n        Ast.Program.statements = stmts;\n        comments = Flow_ast_utils.mk_comments_opt ~leading ();\n        all_comments;\n      }\n    )\n\n  and directives =\n    let check env token =\n      match token with\n      | T_STRING (loc, _, _, octal) ->\n        if octal then strict_error_at env (loc, Parse_error.StrictOctalLiteral)\n      | _ -> failwith (\"Nooo: \" ^ token_to_string token ^ \"\\n\")\n    in\n    let rec statement_list env term_fn item_fn (string_tokens, stmts, contains_use_strict) =\n      match Peek.token env with\n      | T_EOF -> (env, string_tokens, stmts, contains_use_strict)\n      | t when term_fn t -> (env, string_tokens, stmts, contains_use_strict)\n      | T_STRING _ as string_token ->\n        let possible_directive = item_fn env in\n        let stmts = possible_directive :: stmts in\n        (match possible_directive with\n        | (loc, Ast.Statement.Expression { Ast.Statement.Expression.directive = Some raw; _ }) ->\n          (* 14.1.1 says that it has to be \"use strict\" without any\n             escapes, so \"use\\x20strict\" is disallowed. *)\n          let strict = raw = \"use strict\" in\n          if strict && not (has_simple_parameters env) then\n            error_at env (loc, Parse_error.StrictParamNotSimple);\n          let env =\n            if strict then\n              with_strict true env\n            else\n              env\n          in\n          let string_tokens = string_token :: string_tokens in\n          statement_list env term_fn item_fn (string_tokens, stmts, contains_use_strict || strict)\n        | _ -> (env, string_tokens, stmts, contains_use_strict))\n      | _ -> (env, string_tokens, stmts, contains_use_strict)\n    in\n    fun env term_fn item_fn ->\n      let env = with_allow_directive true env in\n      let (env, string_tokens, stmts, contains_use_strict) =\n        statement_list env term_fn item_fn ([], [], false)\n      in\n      let env = with_allow_directive false env in\n      List.iter (check env) (List.rev string_tokens);\n      (env, stmts, contains_use_strict)\n\n  (* 15.2 *)\n  and module_item env =\n    let decorators = Object.decorator_list env in\n    match Peek.token env with\n    | T_EXPORT -> Statement.export_declaration ~decorators env\n    | T_IMPORT ->\n      error_on_decorators env decorators;\n      let statement =\n        match Peek.ith_token ~i:1 env with\n        | T_LPAREN (* import(...) *)\n        | T_PERIOD (* import.meta *) ->\n          Statement.expression env\n        | _ -> Statement.import_declaration env\n      in\n      statement\n    | T_DECLARE when Peek.ith_token ~i:1 env = T_EXPORT ->\n      error_on_decorators env decorators;\n      Statement.declare_export_declaration env\n    | _ -> statement_list_item env ~decorators\n\n  and module_body_with_directives env term_fn =\n    let (env, directives, _contains_use_strict) = directives env term_fn module_item in\n    let stmts = module_body ~term_fn env in\n    (* Prepend the directives *)\n    List.fold_left (fun acc stmt -> stmt :: acc) stmts directives\n\n  and module_body =\n    let rec module_item_list env term_fn acc =\n      match Peek.token env with\n      | T_EOF -> List.rev acc\n      | t when term_fn t -> List.rev acc\n      | _ -> module_item_list env term_fn (module_item env :: acc)\n    in\n    (fun ~term_fn env -> module_item_list env term_fn [])\n\n  and statement_list_with_directives ~term_fn env =\n    let (env, directives, contains_use_strict) = directives env term_fn statement_list_item in\n    let stmts = statement_list ~term_fn env in\n    (* Prepend the directives *)\n    let stmts = List.fold_left (fun acc stmt -> stmt :: acc) stmts directives in\n    (stmts, contains_use_strict)\n\n  and statement_list =\n    let rec statements env term_fn acc =\n      match Peek.token env with\n      | T_EOF -> List.rev acc\n      | t when term_fn t -> List.rev acc\n      | _ -> statements env term_fn (statement_list_item env :: acc)\n    in\n    (fun ~term_fn env -> statements env term_fn [])\n\n  and statement_list_item ?(decorators = []) env =\n    if not (Peek.is_class env) then error_on_decorators env decorators;\n    let open Statement in\n    match Peek.token env with\n    (* Remember kids, these look like statements but they're not\n       * statements... (see section 13) *)\n    | T_LET -> let_ env\n    | T_CONST -> const env\n    | _ when Peek.is_function env -> Declaration._function env\n    | _ when Peek.is_class env -> class_declaration env decorators\n    | T_INTERFACE -> interface env\n    | T_DECLARE -> declare env\n    | T_TYPE -> type_alias env\n    | T_OPAQUE -> opaque_type env\n    | T_ENUM when (parse_options env).enums -> Declaration.enum_declaration env\n    | _ -> statement env\n\n  and statement env =\n    let open Statement in\n    match Peek.token env with\n    | T_EOF ->\n      error_unexpected ~expected:\"the start of a statement\" env;\n      (Peek.loc env, Ast.Statement.Empty { Ast.Statement.Empty.comments = None })\n    | T_SEMICOLON -> empty env\n    | T_LCURLY -> block env\n    | T_VAR -> var env\n    | T_BREAK -> break env\n    | T_CONTINUE -> continue env\n    | T_DEBUGGER -> debugger env\n    | T_DO -> do_while env\n    | T_FOR -> for_ env\n    | T_IF -> if_ env\n    | T_RETURN -> return env\n    | T_SWITCH -> switch env\n    | T_THROW -> throw env\n    | T_TRY -> try_ env\n    | T_WHILE -> while_ env\n    | T_WITH -> with_ env\n    (* If we see an else then it's definitely an error, but we can probably\n     * assume that this is a malformed if statement that is missing the if *)\n    | T_ELSE -> if_ env\n    (* There are a bunch of tokens that aren't the start of any valid\n     * statement. We list them here in order to skip over them, rather than\n     * getting stuck *)\n    | T_COLON\n    | T_RPAREN\n    | T_RCURLY\n    | T_RBRACKET\n    | T_COMMA\n    | T_PERIOD\n    | T_PLING_PERIOD\n    | T_ARROW\n    | T_IN\n    | T_INSTANCEOF\n    | T_CATCH\n    | T_FINALLY\n    | T_CASE\n    | T_DEFAULT\n    | T_EXTENDS\n    | T_STATIC\n    | T_EXPORT\n    (* TODO *)\n    | T_ELLIPSIS ->\n      error_unexpected ~expected:\"the start of a statement\" env;\n      Eat.token env;\n      statement env\n    (* The rest of these patterns handle ExpressionStatement and its negative\n       lookaheads, which prevent ambiguities.\n       See https://tc39.github.io/ecma262/#sec-expression-statement *)\n    | _ when Peek.is_function env ->\n      let func = Declaration._function env in\n      function_as_statement_error_at env (fst func);\n      func\n    | T_LET when Peek.ith_token ~i:1 env = T_LBRACKET ->\n      (* `let [foo]` is ambiguous: either a let binding pattern, or a\n         member expression, so it is banned. *)\n      let loc = Loc.btwn (Peek.loc env) (Peek.ith_loc ~i:1 env) in\n      error_at env (loc, Parse_error.AmbiguousLetBracket);\n      Statement.expression env\n    (* recover as a member expression *)\n    | _ when Peek.is_identifier env -> maybe_labeled env\n    | _ when Peek.is_class env ->\n      error_unexpected env;\n      Eat.token env;\n      Statement.expression env\n    | _ -> Statement.expression env\n\n  and expression env =\n    let start_loc = Peek.loc env in\n    let expr = Expression.assignment env in\n    match Peek.token env with\n    | T_COMMA -> Expression.sequence env ~start_loc [expr]\n    | _ -> expr\n\n  and expression_or_pattern env =\n    let start_loc = Peek.loc env in\n    let expr_or_pattern = Expression.assignment_cover env in\n    match Peek.token env with\n    | T_COMMA ->\n      let expr = Pattern_cover.as_expression env expr_or_pattern in\n      let seq = Expression.sequence env ~start_loc [expr] in\n      Cover_expr seq\n    | _ -> expr_or_pattern\n\n  and conditional = Expression.conditional\n  and assignment = Expression.assignment\n  and left_hand_side = Expression.left_hand_side\n  and object_initializer = Object._initializer\n  and object_key = Object.key\n  and class_declaration = Object.class_declaration\n  and class_expression = Object.class_expression\n  and is_assignable_lhs = Expression.is_assignable_lhs\n  and number = Expression.number\n\n  and identifier_with_type =\n    let with_loc_helper no_optional restricted_error env =\n      let name = identifier ~restricted_error env in\n      let optional = (not no_optional) && Peek.token env = T_PLING in\n      if optional then (\n        if not (should_parse_types env) then error env Parse_error.UnexpectedTypeAnnotation;\n        Expect.token env T_PLING\n      );\n      let annot = Type.annotation_opt env in\n      Ast.Pattern.Identifier.{ name; optional; annot }\n    in\n    fun env ?(no_optional = false) restricted_error ->\n      with_loc (with_loc_helper no_optional restricted_error) env\n\n  and block_body env =\n    let start_loc = Peek.loc env in\n    let leading = Peek.comments env in\n    Expect.token env T_LCURLY;\n    let term_fn t = t = T_RCURLY in\n    let body = statement_list ~term_fn env in\n    let end_loc = Peek.loc env in\n    let internal =\n      if body = [] then\n        Peek.comments env\n      else\n        []\n    in\n    Expect.token env T_RCURLY;\n    let trailing = Eat.trailing_comments env in\n    ( Loc.btwn start_loc end_loc,\n      {\n        Ast.Statement.Block.body;\n        comments = Flow_ast_utils.mk_comments_with_internal_opt ~leading ~trailing ~internal ();\n      }\n    )\n\n  and function_block_body ~expression =\n    with_loc_extra (fun env ->\n        let leading = Peek.comments env in\n        Expect.token env T_LCURLY;\n        let term_fn t = t = T_RCURLY in\n        let (body, contains_use_strict) = statement_list_with_directives ~term_fn env in\n        let internal =\n          if body = [] then\n            Peek.comments env\n          else\n            []\n        in\n        Expect.token env T_RCURLY;\n        let trailing =\n          match (expression, Peek.token env) with\n          | (true, _)\n          | (_, (T_RCURLY | T_EOF)) ->\n            Eat.trailing_comments env\n          | _ when Peek.is_line_terminator env -> Eat.comments_until_next_line env\n          | _ -> []\n        in\n        let comments =\n          Flow_ast_utils.mk_comments_with_internal_opt ~leading ~trailing ~internal ()\n        in\n        ({ Ast.Statement.Block.body; comments }, contains_use_strict)\n    )\n\n  and jsx_element_or_fragment = JSX.element_or_fragment\n  and pattern = Pattern.pattern\n  and pattern_from_expr = Pattern.from_expr\nend\n\n(*****************************************************************************)\n(* Entry points *)\n(*****************************************************************************)\nlet do_parse env parser fail =\n  let ast = parser env in\n  let error_list = filter_duplicate_errors (errors env) in\n  match error_list with\n  | e :: es when fail -> raise (Parse_error.Error (e, es))\n  | _ -> (ast, error_list)\n\n(* Makes the input parser expect EOF at the end. Use this to error on trailing\n * junk when parsing non-Program nodes. *)\nlet with_eof parser env =\n  let ast = parser env in\n  Expect.token env T_EOF;\n  ast\n\nlet parse_statement env fail = do_parse env (with_eof Parse.statement_list_item) fail\nlet parse_expression env fail = do_parse env (with_eof Parse.expression) fail\n\nlet parse_program fail ?(token_sink = None) ?(parse_options = None) filename content =\n  let env = init_env ~token_sink ~parse_options filename content in\n  do_parse env Parse.program fail\n\nlet program ?(fail = true) ?(token_sink = None) ?(parse_options = None) content =\n  parse_program fail ~token_sink ~parse_options None content\n\nlet program_file ?(fail = true) ?(token_sink = None) ?(parse_options = None) content filename =\n  parse_program fail ~token_sink ~parse_options filename content\n\nlet parse_annot ?(parse_options = None) filename content =\n  let env = init_env ~token_sink:None ~parse_options filename content in\n  do_parse env Parse.annot false\n\nlet package_json_file =\n  let parser env =\n    let (loc, obj, { if_expr; _ }) = Parse.object_initializer env in\n    List.iter (error_at env) if_expr;\n    (loc, obj)\n  in\n  fun ?(fail = true) ?(token_sink = None) ?(parse_options = None) content filename ->\n    let env = init_env ~token_sink ~parse_options filename content in\n    do_parse env parser fail\n\n(* even if fail=false, still raises an error on a totally invalid token, since\n   there's no legitimate fallback. *)\nlet json_file =\n  let null_fallback _env =\n    Ast.Expression.Literal { Ast.Literal.value = Ast.Literal.Null; raw = \"null\"; comments = None }\n  in\n  let parser env =\n    match Peek.token env with\n    | T_LBRACKET\n    | T_LCURLY\n    | T_STRING _\n    | T_NUMBER _\n    | T_TRUE\n    | T_FALSE\n    | T_NULL ->\n      Parse.expression env\n    | T_MINUS ->\n      (match Peek.ith_token ~i:1 env with\n      | T_NUMBER _ -> Parse.expression env\n      | _ ->\n        error_unexpected ~expected:\"a number\" env;\n        with_loc null_fallback env)\n    | _ ->\n      error_unexpected ~expected:\"a valid JSON value\" env;\n      with_loc null_fallback env\n  in\n  fun ?(fail = true) ?(token_sink = None) ?(parse_options = None) content filename ->\n    let env = init_env ~token_sink ~parse_options filename content in\n    do_parse env parser fail\n\nlet jsx_pragma_expression =\n  let left_hand_side env =\n    let ast = Parse.left_hand_side (with_no_new true env) in\n    Expect.token env T_EOF;\n    ast\n  in\n  fun content filename ->\n    let env = init_env ~token_sink:None ~parse_options:None filename content in\n    do_parse env left_hand_side true\n\nlet string_is_valid_identifier_name str =\n  let lexbuf = Sedlexing.Utf8.from_string str in\n  Flow_lexer.is_valid_identifier_name lexbuf\n"
  },
  {
    "path": "analysis/vendor/js_parser/pattern_cover.ml",
    "content": "(*\n * Copyright (c) Meta Platforms, Inc. and affiliates.\n *\n * This source code is licensed under the MIT license found in the\n * LICENSE file in the root directory of this source tree.\n *)\n\nopen Flow_ast\nopen Parser_common\nopen Parser_env\n\nmodule type COVER = sig\n  val as_expression : env -> pattern_cover -> (Loc.t, Loc.t) Expression.t\n\n  val as_pattern : ?err:Parse_error.t -> env -> pattern_cover -> (Loc.t, Loc.t) Pattern.t\n\n  val empty_errors : pattern_errors\n\n  val cons_error : Loc.t * Parse_error.t -> pattern_errors -> pattern_errors\n\n  val rev_append_errors : pattern_errors -> pattern_errors -> pattern_errors\n\n  val rev_errors : pattern_errors -> pattern_errors\nend\n\nmodule Cover (Parse : PARSER) : COVER = struct\n  let as_expression env = function\n    | Cover_expr expr -> expr\n    | Cover_patt (expr, { if_expr; if_patt = _ }) ->\n      List.iter (error_at env) if_expr;\n      expr\n\n  let as_pattern ?(err = Parse_error.InvalidLHSInAssignment) env cover =\n    let expr =\n      match cover with\n      | Cover_expr expr -> expr\n      | Cover_patt (expr, { if_expr = _; if_patt }) ->\n        List.iter (error_at env) if_patt;\n        expr\n    in\n    if not (Parse.is_assignable_lhs expr) then error_at env (fst expr, err);\n\n    (match expr with\n    | (loc, Flow_ast.Expression.Identifier (_, { Flow_ast.Identifier.name; comments = _ }))\n      when is_restricted name ->\n      strict_error_at env (loc, Parse_error.StrictLHSAssignment)\n    | _ -> ());\n\n    Parse.pattern_from_expr env expr\n\n  let empty_errors = { if_patt = []; if_expr = [] }\n\n  let cons_error err { if_patt; if_expr } = { if_patt = err :: if_patt; if_expr = err :: if_expr }\n\n  let rev_append_errors a b =\n    { if_patt = List.rev_append a.if_patt b.if_patt; if_expr = List.rev_append a.if_expr b.if_expr }\n\n  let rev_errors a = { if_patt = List.rev a.if_patt; if_expr = List.rev a.if_expr }\nend\n"
  },
  {
    "path": "analysis/vendor/js_parser/pattern_parser.ml",
    "content": "(*\n * Copyright (c) Meta Platforms, Inc. and affiliates.\n *\n * This source code is licensed under the MIT license found in the\n * LICENSE file in the root directory of this source tree.\n *)\n\nmodule Ast = Flow_ast\nopen Token\nopen Parser_common\nopen Parser_env\nopen Flow_ast\n\nlet missing_annot env = Ast.Type.Missing (Peek.loc_skip_lookahead env)\n\nmodule Pattern (Parse : Parser_common.PARSER) (Type : Type_parser.TYPE) = struct\n  (* Reinterpret various expressions as patterns.\n   * This is not the correct thing to do and is only used for assignment\n   * expressions. This should be removed and replaced ASAP.\n   *)\n  let rec object_from_expr =\n    let rec properties env acc =\n      let open Ast.Expression.Object in\n      function\n      | [] -> List.rev acc\n      | Property (loc, prop) :: remaining ->\n        let acc =\n          match prop with\n          | Property.Init { key; value; shorthand } ->\n            let open Ast.Expression in\n            let key =\n              match key with\n              | Property.Literal lit -> Pattern.Object.Property.Literal lit\n              | Property.Identifier id -> Pattern.Object.Property.Identifier id\n              | Property.PrivateName _ -> failwith \"Internal Error: Found object private prop\"\n              | Property.Computed key -> Pattern.Object.Property.Computed key\n            in\n            let (pattern, default) =\n              match value with\n              | (_loc, Assignment { Assignment.operator = None; left; right; comments = _ }) ->\n                (left, Some right)\n              | _ -> (from_expr env value, None)\n            in\n            Pattern.Object.Property\n              (loc, { Pattern.Object.Property.key; pattern; default; shorthand })\n            :: acc\n          | Property.Method { key = _; value = (loc, _) } ->\n            error_at env (loc, Parse_error.MethodInDestructuring);\n            acc\n          | Property.Get { key = _; value = (loc, _); comments = _ }\n          | Property.Set { key = _; value = (loc, _); comments = _ } ->\n            (* these should never happen *)\n            error_at env (loc, Parse_error.Unexpected \"identifier\");\n            acc\n        in\n        properties env acc remaining\n      | [SpreadProperty (loc, { SpreadProperty.argument; comments })] ->\n        let acc =\n          Pattern.Object.RestElement\n            (loc, { Pattern.RestElement.argument = from_expr env argument; comments })\n          :: acc\n        in\n        properties env acc []\n      | SpreadProperty (loc, _) :: remaining ->\n        error_at env (loc, Parse_error.PropertyAfterRestElement);\n        properties env acc remaining\n    in\n    fun env (loc, { Ast.Expression.Object.properties = props; comments }) ->\n      ( loc,\n        Pattern.(\n          Object\n            { Object.properties = properties env [] props; annot = missing_annot env; comments }\n        )\n      )\n\n  and array_from_expr =\n    (* Convert an Expression to a Pattern if it is a valid\n       DestructuringAssignmentTarget, which must be an Object, Array or\n       IsValidSimpleAssignmentTarget.\n       #sec-destructuring-assignment-static-semantics-early-errors *)\n    let assignment_target env ((loc, _) as expr) =\n      if Parse.is_assignable_lhs expr then\n        Some (from_expr env expr)\n      else (\n        error_at env (loc, Parse_error.InvalidLHSInAssignment);\n        None\n      )\n    in\n    let rec elements env acc =\n      let open Ast.Expression in\n      function\n      | [] -> List.rev acc\n      | [Array.Spread (loc, { SpreadElement.argument; comments })] ->\n        (* AssignmentRestElement is a DestructuringAssignmentTarget, see\n           #prod-AssignmentRestElement *)\n        let acc =\n          match assignment_target env argument with\n          | Some argument ->\n            Pattern.Array.RestElement (loc, { Pattern.RestElement.argument; comments }) :: acc\n          | None -> acc\n        in\n        elements env acc []\n      | Array.Spread (loc, _) :: remaining ->\n        error_at env (loc, Parse_error.ElementAfterRestElement);\n        elements env acc remaining\n      | Array.Expression (loc, Assignment { Assignment.operator = None; left; right; comments = _ })\n        :: remaining ->\n        (* AssignmentElement is a `DestructuringAssignmentTarget Initializer`, see\n           #prod-AssignmentElement *)\n        let acc =\n          Pattern.Array.Element\n            (loc, { Pattern.Array.Element.argument = left; default = Some right })\n          :: acc\n        in\n        elements env acc remaining\n      | Array.Expression expr :: remaining ->\n        (* AssignmentElement is a DestructuringAssignmentTarget, see\n           #prod-AssignmentElement *)\n        let acc =\n          match assignment_target env expr with\n          | Some ((loc, _) as expr) ->\n            let element =\n              Pattern.Array.Element (loc, { Pattern.Array.Element.argument = expr; default = None })\n            in\n            element :: acc\n          | None -> acc\n        in\n        elements env acc remaining\n      | Array.Hole loc :: remaining -> elements env (Pattern.Array.Hole loc :: acc) remaining\n    in\n    fun env (loc, { Ast.Expression.Array.elements = elems; comments }) ->\n      ( loc,\n        Pattern.Array\n          { Pattern.Array.elements = elements env [] elems; annot = missing_annot env; comments }\n      )\n\n  and from_expr env (loc, expr) =\n    let open Ast.Expression in\n    match expr with\n    | Object obj -> object_from_expr env (loc, obj)\n    | Array arr -> array_from_expr env (loc, arr)\n    | Identifier ((id_loc, { Identifier.name = string_val; comments = _ }) as name) ->\n      (* per #sec-destructuring-assignment-static-semantics-early-errors,\n         it is a syntax error if IsValidSimpleAssignmentTarget of this\n         IdentifierReference is false. That happens when `string_val` is\n         \"eval\" or \"arguments\" in strict mode. *)\n      if in_strict_mode env && is_restricted string_val then\n        error_at env (id_loc, Parse_error.StrictLHSAssignment)\n      (* per #prod-IdentifierReference, yield is only a valid\n         IdentifierReference when [~Yield], and await is only valid\n         when [~Await]. but per #sec-identifiers-static-semantics-early-errors,\n         they are already invalid in strict mode, which we should have\n         already errored about when parsing the expression that we're now\n         converting into a pattern. *)\n      else if not (in_strict_mode env) then\n        if allow_yield env && string_val = \"yield\" then\n          error_at env (id_loc, Parse_error.YieldAsIdentifierReference)\n        else if allow_await env && string_val = \"await\" then\n          error_at env (id_loc, Parse_error.AwaitAsIdentifierReference);\n      ( loc,\n        Pattern.Identifier { Pattern.Identifier.name; annot = missing_annot env; optional = false }\n      )\n    | expr -> (loc, Pattern.Expression (loc, expr))\n\n  (* Parse object destructuring pattern *)\n  let rec object_ restricted_error =\n    let rest_property env =\n      let leading = Peek.comments env in\n      let (loc, argument) =\n        with_loc\n          (fun env ->\n            Expect.token env T_ELLIPSIS;\n            pattern env restricted_error)\n          env\n      in\n      Pattern.Object.RestElement\n        ( loc,\n          { Pattern.RestElement.argument; comments = Flow_ast_utils.mk_comments_opt ~leading () }\n        )\n    in\n    let property_default env =\n      match Peek.token env with\n      | T_ASSIGN ->\n        Expect.token env T_ASSIGN;\n        Some (Parse.assignment env)\n      | _ -> None\n    in\n    let rec property env =\n      if Peek.token env = T_ELLIPSIS then\n        Some (rest_property env)\n      else\n        let start_loc = Peek.loc env in\n        let raw_key = Parse.object_key env in\n        match Peek.token env with\n        | T_COLON ->\n          Expect.token env T_COLON;\n          let (loc, (pattern, default)) =\n            with_loc\n              ~start_loc\n              (fun env ->\n                let pattern = pattern env restricted_error in\n                let default = property_default env in\n                (pattern, default))\n              env\n          in\n          let key =\n            let open Ast.Expression.Object.Property in\n            match raw_key with\n            | (_, Literal lit) -> Pattern.Object.Property.Literal lit\n            | (_, Identifier id) -> Pattern.Object.Property.Identifier id\n            | (_, PrivateName _) -> failwith \"Internal Error: Found object private prop\"\n            | (_, Computed key) -> Pattern.Object.Property.Computed key\n          in\n          Some\n            Pattern.Object.(Property (loc, Property.{ key; pattern; default; shorthand = false }))\n        | _ ->\n          (match raw_key with\n          | ( _,\n              Ast.Expression.Object.Property.Identifier\n                ((id_loc, { Identifier.name = string_val; comments = _ }) as name)\n            ) ->\n            (* #sec-identifiers-static-semantics-early-errors *)\n            if is_reserved string_val && string_val <> \"yield\" && string_val <> \"await\" then\n              (* it is a syntax error if `name` is a reserved word other than await or yield *)\n              error_at env (id_loc, Parse_error.UnexpectedReserved)\n            else if is_strict_reserved string_val then\n              (* it is a syntax error if `name` is a strict reserved word, in strict mode *)\n              strict_error_at env (id_loc, Parse_error.StrictReservedWord);\n            let (loc, (pattern, default)) =\n              with_loc\n                ~start_loc\n                (fun env ->\n                  let pattern =\n                    ( id_loc,\n                      Pattern.Identifier\n                        { Pattern.Identifier.name; annot = missing_annot env; optional = false }\n                    )\n                  in\n                  let default = property_default env in\n                  (pattern, default))\n                env\n            in\n            Some\n              Pattern.Object.(\n                Property\n                  ( loc,\n                    { Property.key = Property.Identifier name; pattern; default; shorthand = true }\n                  )\n              )\n          | _ ->\n            error_unexpected ~expected:\"an identifier\" env;\n\n            (* invalid shorthand destructuring *)\n            None)\n    (* seen_rest is true when we've seen a rest element. rest_trailing_comma is the location of\n     * the rest element's trailing command\n     * Trailing comma: `let { ...rest, } = obj`\n     * Still invalid, but not a trailing comma: `let { ...rest, x } = obj` *)\n    and properties env ~seen_rest ~rest_trailing_comma acc =\n      match Peek.token env with\n      | T_EOF\n      | T_RCURLY ->\n        begin\n          match rest_trailing_comma with\n          | Some loc -> error_at env (loc, Parse_error.TrailingCommaAfterRestElement)\n          | None -> ()\n        end;\n        List.rev acc\n      | _ ->\n        (match property env with\n        | Some ((Pattern.Object.Property (loc, _) | Pattern.Object.RestElement (loc, _)) as prop) ->\n          let rest_trailing_comma =\n            if seen_rest then (\n              error_at env (loc, Parse_error.PropertyAfterRestElement);\n              None\n            ) else\n              rest_trailing_comma\n          in\n          let (seen_rest, rest_trailing_comma) =\n            match prop with\n            | Pattern.Object.RestElement _ ->\n              ( true,\n                if Peek.token env = T_COMMA then\n                  Some (Peek.loc env)\n                else\n                  None\n              )\n            | _ -> (seen_rest, rest_trailing_comma)\n          in\n          if Peek.token env <> T_RCURLY then Expect.token env T_COMMA;\n          properties env ~seen_rest ~rest_trailing_comma (prop :: acc)\n        | None -> properties env ~seen_rest ~rest_trailing_comma acc)\n    in\n    with_loc (fun env ->\n        let leading = Peek.comments env in\n        Expect.token env T_LCURLY;\n        let properties = properties env ~seen_rest:false ~rest_trailing_comma:None [] in\n        let internal = Peek.comments env in\n        Expect.token env T_RCURLY;\n        let trailing = Eat.trailing_comments env in\n        let annot =\n          if Peek.token env = T_COLON then\n            Ast.Type.Available (Type.annotation env)\n          else\n            missing_annot env\n        in\n        Pattern.Object\n          {\n            Pattern.Object.properties;\n            annot;\n            comments = Flow_ast_utils.mk_comments_with_internal_opt ~leading ~trailing ~internal ();\n          }\n    )\n\n  (* Parse array destructuring pattern *)\n  and array_ restricted_error =\n    let rec elements env acc =\n      match Peek.token env with\n      | T_EOF\n      | T_RBRACKET ->\n        List.rev acc\n      | T_COMMA ->\n        let loc = Peek.loc env in\n        Expect.token env T_COMMA;\n        elements env (Pattern.Array.Hole loc :: acc)\n      | T_ELLIPSIS ->\n        let leading = Peek.comments env in\n        let (loc, argument) =\n          with_loc\n            (fun env ->\n              Expect.token env T_ELLIPSIS;\n              pattern env restricted_error)\n            env\n        in\n        let element =\n          Pattern.Array.RestElement\n            ( loc,\n              {\n                Pattern.RestElement.argument;\n                comments = Flow_ast_utils.mk_comments_opt ~leading ();\n              }\n            )\n        in\n        (* rest elements are always last, the closing ] should be next. but if not,\n           error and keep going so we recover gracefully by parsing the rest of the\n           elements. *)\n        if Peek.token env <> T_RBRACKET then (\n          error_at env (loc, Parse_error.ElementAfterRestElement);\n          if Peek.token env = T_COMMA then Eat.token env\n        );\n        elements env (element :: acc)\n      | _ ->\n        let (loc, (pattern, default)) =\n          with_loc\n            (fun env ->\n              let pattern = pattern env restricted_error in\n              let default =\n                match Peek.token env with\n                | T_ASSIGN ->\n                  Expect.token env T_ASSIGN;\n                  Some (Parse.assignment env)\n                | _ -> None\n              in\n              (pattern, default))\n            env\n        in\n        let element = Pattern.Array.(Element (loc, { Element.argument = pattern; default })) in\n        if Peek.token env <> T_RBRACKET then Expect.token env T_COMMA;\n        elements env (element :: acc)\n    in\n    with_loc (fun env ->\n        let leading = Peek.comments env in\n        Expect.token env T_LBRACKET;\n        let elements = elements env [] in\n        let internal = Peek.comments env in\n        Expect.token env T_RBRACKET;\n        let annot =\n          if Peek.token env = T_COLON then\n            Ast.Type.Available (Type.annotation env)\n          else\n            missing_annot env\n        in\n        let trailing = Eat.trailing_comments env in\n        let comments =\n          Flow_ast_utils.mk_comments_with_internal_opt ~leading ~trailing ~internal ()\n        in\n        Pattern.Array { Pattern.Array.elements; annot; comments }\n    )\n\n  and pattern env restricted_error =\n    match Peek.token env with\n    | T_LCURLY -> object_ restricted_error env\n    | T_LBRACKET -> array_ restricted_error env\n    | _ ->\n      let (loc, id) = Parse.identifier_with_type env restricted_error in\n      (loc, Pattern.Identifier id)\nend\n"
  },
  {
    "path": "analysis/vendor/js_parser/primitive_deriving.ml",
    "content": "let equal_int (x : int) y = x = y\nlet equal_string (x : string) y = x = y\nlet equal_bool (x : bool) y = x = y\nlet equal_float (x : float) y = x = y\nlet equal_int64 (x : int64) y = x = y\n\nlet equal_option f x y =\n  match x with\n  | None -> y = None\n  | Some x -> begin\n    match y with\n    | None -> false\n    | Some y -> f x y\n  end\n\nlet compare_string (x : string) y = compare x y\n\nlet compare_option cmp x y =\n  match x with\n  | None ->\n    (match y with\n    | None -> 0\n    | Some _ -> -1)\n  | Some x ->\n    (match y with\n    | None -> 1\n    | Some y -> cmp x y)\n\nlet compare_bool (x : bool) (y : bool) = compare x y\n(* TODO : turn it into externals *)\nmodule Ppx_compare_lib = struct \n  external polymorphic_compare : 'a -> 'a -> int = \"%compare\"\n  external phys_equal : 'a -> 'a -> bool = \"%eq\"\n  \n  external ( && ) : bool -> bool -> bool = \"%sequand\"\n\n  external   polymorphic_equal : 'a -> 'a -> bool = \"%equal\"\nend  \n\n"
  },
  {
    "path": "analysis/vendor/js_parser/sedlex_LICENSE",
    "content": "The MIT License (MIT)\n\nCopyright 2005, 2014 by Alain Frisch and LexiFi.\n\nPermission is hereby granted, free of charge, to any person obtaining\na copy of this software and associated documentation files (the\n\"Software\"), to deal in the Software without restriction, including\nwithout limitation the rights to use, copy, modify, merge, publish,\ndistribute, sublicense, and/or sell copies of the Software, and to\npermit persons to whom the Software is furnished to do so, subject to\nthe following conditions:\n\nThe above copyright notice and this permission notice shall be\nincluded in all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND,\nEXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\nMERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND\nNONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE\nLIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION\nOF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION\nWITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n"
  },
  {
    "path": "analysis/vendor/js_parser/statement_parser.ml",
    "content": "(*\n * Copyright (c) Meta Platforms, Inc. and affiliates.\n *\n * This source code is licensed under the MIT license found in the\n * LICENSE file in the root directory of this source tree.\n *)\n\nmodule Ast = Flow_ast\nopen Token\nopen Parser_env\nopen Flow_ast\nopen Parser_common\nopen Comment_attachment\n\nmodule type STATEMENT = sig\n  val for_ : env -> (Loc.t, Loc.t) Statement.t\n\n  val if_ : env -> (Loc.t, Loc.t) Statement.t\n\n  val let_ : env -> (Loc.t, Loc.t) Statement.t\n\n  val try_ : env -> (Loc.t, Loc.t) Statement.t\n\n  val while_ : env -> (Loc.t, Loc.t) Statement.t\n\n  val with_ : env -> (Loc.t, Loc.t) Statement.t\n\n  val block : env -> (Loc.t, Loc.t) Statement.t\n\n  val break : env -> (Loc.t, Loc.t) Statement.t\n\n  val continue : env -> (Loc.t, Loc.t) Statement.t\n\n  val debugger : env -> (Loc.t, Loc.t) Statement.t\n\n  val declare : ?in_module:bool -> env -> (Loc.t, Loc.t) Statement.t\n\n  val declare_export_declaration : ?allow_export_type:bool -> env -> (Loc.t, Loc.t) Statement.t\n\n  val declare_opaque_type : env -> (Loc.t, Loc.t) Statement.t\n\n  val do_while : env -> (Loc.t, Loc.t) Statement.t\n\n  val empty : env -> (Loc.t, Loc.t) Statement.t\n\n  val export_declaration :\n    decorators:(Loc.t, Loc.t) Class.Decorator.t list -> env -> (Loc.t, Loc.t) Statement.t\n\n  val expression : env -> (Loc.t, Loc.t) Statement.t\n\n  val import_declaration : env -> (Loc.t, Loc.t) Statement.t\n\n  val interface : env -> (Loc.t, Loc.t) Statement.t\n\n  val maybe_labeled : env -> (Loc.t, Loc.t) Statement.t\n\n  val opaque_type : env -> (Loc.t, Loc.t) Statement.t\n\n  val return : env -> (Loc.t, Loc.t) Statement.t\n\n  val switch : env -> (Loc.t, Loc.t) Statement.t\n\n  val throw : env -> (Loc.t, Loc.t) Statement.t\n\n  val type_alias : env -> (Loc.t, Loc.t) Statement.t\n\n  val var : env -> (Loc.t, Loc.t) Statement.t\n\n  val const : env -> (Loc.t, Loc.t) Statement.t\nend\n\nmodule Statement\n    (Parse : PARSER)\n    (Type : Type_parser.TYPE)\n    (Declaration : Declaration_parser.DECLARATION)\n    (Object : Object_parser.OBJECT)\n    (Pattern_cover : Pattern_cover.COVER) : STATEMENT = struct\n  type for_lhs =\n    | For_expression of pattern_cover\n    | For_declaration of (Loc.t * (Loc.t, Loc.t) Ast.Statement.VariableDeclaration.t)\n\n  type semicolon_type =\n    | Explicit of Loc.t Comment.t list\n    | Implicit of Comment_attachment.trailing_and_remover_result\n\n  (* FunctionDeclaration is not a valid Statement, but Annex B sometimes allows it.\n     However, AsyncFunctionDeclaration and GeneratorFunctionDeclaration are never\n     allowed as statements. We still parse them as statements (and raise an error) to\n     recover gracefully. *)\n  let function_as_statement env =\n    let func = Declaration._function env in\n    ( if in_strict_mode env then\n      function_as_statement_error_at env (fst func)\n    else\n      let open Ast.Statement in\n      match func with\n      | (loc, FunctionDeclaration { Ast.Function.async = true; _ }) ->\n        error_at env (loc, Parse_error.AsyncFunctionAsStatement)\n      | (loc, FunctionDeclaration { Ast.Function.generator = true; _ }) ->\n        error_at env (loc, Parse_error.GeneratorFunctionAsStatement)\n      | _ -> ()\n    );\n    func\n\n  (* https://tc39.es/ecma262/#sec-exports-static-semantics-early-errors *)\n  let assert_identifier_name_is_identifier\n      ?restricted_error env (loc, { Ast.Identifier.name; comments = _ }) =\n    match name with\n    | \"let\" ->\n      (* \"let\" is disallowed as an identifier in a few situations. 11.6.2.1\n         lists them out. It is always disallowed in strict mode *)\n      if in_strict_mode env then\n        strict_error_at env (loc, Parse_error.StrictReservedWord)\n      else if no_let env then\n        error_at env (loc, Parse_error.Unexpected (Token.quote_token_value name))\n    | \"await\" ->\n      (* `allow_await` means that `await` is allowed to be a keyword,\n         which makes it illegal to use as an identifier.\n         https://tc39.github.io/ecma262/#sec-identifiers-static-semantics-early-errors *)\n      if allow_await env then error_at env (loc, Parse_error.UnexpectedReserved)\n    | \"yield\" ->\n      (* `allow_yield` means that `yield` is allowed to be a keyword,\n         which makes it illegal to use as an identifier.\n         https://tc39.github.io/ecma262/#sec-identifiers-static-semantics-early-errors *)\n      if allow_yield env then\n        error_at env (loc, Parse_error.UnexpectedReserved)\n      else\n        strict_error_at env (loc, Parse_error.StrictReservedWord)\n    | _ when is_strict_reserved name -> strict_error_at env (loc, Parse_error.StrictReservedWord)\n    | _ when is_reserved name ->\n      error_at env (loc, Parse_error.Unexpected (Token.quote_token_value name))\n    | _ ->\n      begin\n        match restricted_error with\n        | Some err when is_restricted name -> strict_error_at env (loc, err)\n        | _ -> ()\n      end\n\n  let string_literal env (loc, value, raw, octal) =\n    if octal then strict_error env Parse_error.StrictOctalLiteral;\n    let leading = Peek.comments env in\n    Expect.token env (T_STRING (loc, value, raw, octal));\n    let trailing = Eat.trailing_comments env in\n    ( loc,\n      { StringLiteral.value; raw; comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing () }\n    )\n\n  (* Semicolon insertion is handled here :(. There seem to be 2 cases where\n   * semicolons are inserted. First, if we reach the EOF. Second, if the next\n   * token is } or is separated by a LineTerminator.\n   *)\n  let semicolon ?(expected = \"the token `;`\") ?(required = true) env =\n    match Peek.token env with\n    | T_EOF\n    | T_RCURLY ->\n      Implicit { trailing = Eat.trailing_comments env; remove_trailing = (fun x _ -> x) }\n    | T_SEMICOLON ->\n      Eat.token env;\n      (match Peek.token env with\n      | T_EOF\n      | T_RCURLY ->\n        Explicit (Eat.trailing_comments env)\n      | _ when Peek.is_line_terminator env -> Explicit (Eat.comments_until_next_line env)\n      | _ -> Explicit [])\n    | _ when Peek.is_line_terminator env ->\n      Implicit (Comment_attachment.trailing_and_remover_after_last_line env)\n    | _ ->\n      if required then error_unexpected ~expected env;\n      Explicit []\n\n  (* Consumes and returns the trailing comments after the end of a statement. Also returns\n     a remover that can remove all comments that are not trailing the previous token.\n\n     If a statement is the end of a block or file, all comments are trailing.\n     Otherwise, if a statement is followed by a new line, only comments on the current\n     line are trailing. If a statement is not followed by a new line, it does not have\n     trailing comments as they are instead leading comments for the next statement. *)\n  let statement_end_trailing_comments env =\n    match Peek.token env with\n    | T_EOF\n    | T_RCURLY ->\n      { trailing = Eat.trailing_comments env; remove_trailing = (fun x _ -> x) }\n    | _ when Peek.is_line_terminator env ->\n      Comment_attachment.trailing_and_remover_after_last_line env\n    | _ -> Comment_attachment.trailing_and_remover_after_last_loc env\n\n  let variable_declaration_end ~kind env declarations =\n    match semicolon env with\n    | Explicit comments -> (comments, declarations)\n    | Implicit { remove_trailing; _ } ->\n      (* Remove trailing comments from the last declarator *)\n      let declarations =\n        match List.rev declarations with\n        | [] -> []\n        | decl :: decls ->\n          let decl' =\n            remove_trailing decl (fun remover decl -> remover#variable_declarator ~kind decl)\n          in\n          List.rev (decl' :: decls)\n      in\n      ([], declarations)\n\n  let rec empty env =\n    let loc = Peek.loc env in\n    let leading = Peek.comments env in\n    Expect.token env T_SEMICOLON;\n    let { trailing; _ } = statement_end_trailing_comments env in\n    ( loc,\n      Statement.Empty\n        { Statement.Empty.comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing () }\n    )\n\n  and break env =\n    let leading = Peek.comments env in\n    let (loc, (label, trailing)) =\n      with_loc\n        (fun env ->\n          Expect.token env T_BREAK;\n          let label =\n            if Peek.token env = T_SEMICOLON || Peek.is_implicit_semicolon env then\n              None\n            else\n              let ((_, { Identifier.name; comments = _ }) as label) = Parse.identifier env in\n              if not (SSet.mem name (labels env)) then error env (Parse_error.UnknownLabel name);\n              Some label\n          in\n          let (trailing, label) =\n            match (semicolon env, label) with\n            | (Explicit trailing, _)\n            | (Implicit { trailing; _ }, None) ->\n              (trailing, label)\n            | (Implicit { remove_trailing; _ }, Some label) ->\n              ([], Some (remove_trailing label (fun remover label -> remover#identifier label)))\n          in\n          (label, trailing))\n        env\n    in\n    if label = None && not (in_loop env || in_switch env) then\n      error_at env (loc, Parse_error.IllegalBreak);\n    let comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing () in\n    (loc, Statement.Break { Statement.Break.label; comments })\n\n  and continue env =\n    let leading = Peek.comments env in\n    let (loc, (label, trailing)) =\n      with_loc\n        (fun env ->\n          Expect.token env T_CONTINUE;\n          let label =\n            if Peek.token env = T_SEMICOLON || Peek.is_implicit_semicolon env then\n              None\n            else\n              let ((_, { Identifier.name; comments = _ }) as label) = Parse.identifier env in\n              if not (SSet.mem name (labels env)) then error env (Parse_error.UnknownLabel name);\n              Some label\n          in\n          let (trailing, label) =\n            match (semicolon env, label) with\n            | (Explicit trailing, _)\n            | (Implicit { trailing; _ }, None) ->\n              (trailing, label)\n            | (Implicit { remove_trailing; _ }, Some label) ->\n              ([], Some (remove_trailing label (fun remover label -> remover#identifier label)))\n          in\n          (label, trailing))\n        env\n    in\n    if not (in_loop env) then error_at env (loc, Parse_error.IllegalContinue);\n    ( loc,\n      Statement.Continue\n        {\n          Statement.Continue.label;\n          comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing ();\n        }\n    )\n\n  and debugger =\n    with_loc (fun env ->\n        let leading = Peek.comments env in\n        Expect.token env T_DEBUGGER;\n        let pre_semicolon_trailing =\n          if Peek.token env = T_SEMICOLON then\n            Eat.trailing_comments env\n          else\n            []\n        in\n        let trailing =\n          match semicolon env with\n          | Explicit trailing\n          | Implicit { trailing; _ } ->\n            pre_semicolon_trailing @ trailing\n        in\n        Statement.Debugger\n          { Statement.Debugger.comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing () }\n    )\n\n  and do_while =\n    with_loc (fun env ->\n        let leading = Peek.comments env in\n        Expect.token env T_DO;\n        let body = Parse.statement (env |> with_in_loop true) in\n        (* Annex B allows labelled FunctionDeclarations (see\n           sec-labelled-function-declarations), but not in IterationStatement\n           (see sec-semantics-static-semantics-early-errors). *)\n        if (not (in_strict_mode env)) && is_labelled_function body then\n          function_as_statement_error_at env (fst body);\n        let pre_keyword_trailing = Eat.trailing_comments env in\n        Expect.token env T_WHILE;\n        let pre_cond_trailing = Eat.trailing_comments env in\n        Expect.token env T_LPAREN;\n        let test = Parse.expression env in\n        Expect.token env T_RPAREN;\n        let past_cond_trailing =\n          if Peek.token env = T_SEMICOLON then\n            Eat.trailing_comments env\n          else\n            []\n        in\n        (* The rules of automatic semicolon insertion in ES5 don't mention this,\n         * but the semicolon after a do-while loop is optional. This is properly\n         * specified in ES6 *)\n        let past_cond_trailing =\n          match semicolon ~required:false env with\n          | Explicit trailing -> past_cond_trailing @ trailing\n          | Implicit { trailing; _ } -> trailing\n        in\n        let trailing = pre_keyword_trailing @ pre_cond_trailing @ past_cond_trailing in\n        Statement.DoWhile\n          {\n            Statement.DoWhile.body;\n            test;\n            comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing ();\n          }\n    )\n\n  and for_ =\n    let assert_can_be_forin_or_forof env err = function\n      | (loc, { Statement.VariableDeclaration.declarations; _ }) ->\n        (* Only a single declarator is allowed, without an init. So\n         * something like\n         *\n         * for (var x in y) {}\n         *\n         * is allowed, but we disallow\n         *\n         * for (var x, y in z) {}\n         * for (var x = 42 in y) {}\n         *)\n        (match declarations with\n        | [(_, { Statement.VariableDeclaration.Declarator.init = None; _ })] -> ()\n        | _ -> error_at env (loc, err))\n    in\n    (* Annex B allows labelled FunctionDeclarations (see\n       sec-labelled-function-declarations), but not in IterationStatement\n       (see sec-semantics-static-semantics-early-errors). *)\n    let assert_not_labelled_function env body =\n      if (not (in_strict_mode env)) && is_labelled_function body then\n        function_as_statement_error_at env (fst body)\n      else\n        ()\n    in\n    with_loc (fun env ->\n        let leading = Peek.comments env in\n        Expect.token env T_FOR;\n        let async = allow_await env && Eat.maybe env T_AWAIT in\n        let leading = leading @ Peek.comments env in\n        Expect.token env T_LPAREN;\n        let comments = Flow_ast_utils.mk_comments_opt ~leading () in\n        let (init, errs) =\n          let env = env |> with_no_in true in\n          match Peek.token env with\n          | T_SEMICOLON -> (None, [])\n          | T_LET ->\n            let (loc, (declarations, leading, errs)) = with_loc Declaration.let_ env in\n            ( Some\n                (For_declaration\n                   ( loc,\n                     {\n                       Statement.VariableDeclaration.kind = Statement.VariableDeclaration.Let;\n                       declarations;\n                       comments = Flow_ast_utils.mk_comments_opt ~leading ();\n                     }\n                   )\n                ),\n              errs\n            )\n          | T_CONST ->\n            let (loc, (declarations, leading, errs)) = with_loc Declaration.const env in\n            ( Some\n                (For_declaration\n                   ( loc,\n                     {\n                       Statement.VariableDeclaration.kind = Statement.VariableDeclaration.Const;\n                       declarations;\n                       comments = Flow_ast_utils.mk_comments_opt ~leading ();\n                     }\n                   )\n                ),\n              errs\n            )\n          | T_VAR ->\n            let (loc, (declarations, leading, errs)) = with_loc Declaration.var env in\n            ( Some\n                (For_declaration\n                   ( loc,\n                     {\n                       Statement.VariableDeclaration.kind = Statement.VariableDeclaration.Var;\n                       declarations;\n                       comments = Flow_ast_utils.mk_comments_opt ~leading ();\n                     }\n                   )\n                ),\n              errs\n            )\n          | _ ->\n            let expr = Parse.expression_or_pattern (env |> with_no_let true) in\n            (Some (For_expression expr), [])\n        in\n        match Peek.token env with\n        | T_OF ->\n          (* This is a for of loop *)\n          let left =\n            match init with\n            | Some (For_declaration decl) ->\n              assert_can_be_forin_or_forof env Parse_error.InvalidLHSInForOf decl;\n              Statement.ForOf.LeftDeclaration decl\n            | Some (For_expression expr) ->\n              (* #sec-for-in-and-for-of-statements-static-semantics-early-errors *)\n              let patt = Pattern_cover.as_pattern ~err:Parse_error.InvalidLHSInForOf env expr in\n              Statement.ForOf.LeftPattern patt\n            | None -> assert false\n          in\n          Expect.token env T_OF;\n          let right = Parse.assignment env in\n          Expect.token env T_RPAREN;\n          let body = Parse.statement (env |> with_in_loop true) in\n          assert_not_labelled_function env body;\n          Statement.ForOf { Statement.ForOf.left; right; body; await = async; comments }\n        | T_IN ->\n          (* This is a for in loop *)\n          let left =\n            match init with\n            | Some (For_declaration decl) ->\n              assert_can_be_forin_or_forof env Parse_error.InvalidLHSInForIn decl;\n              Statement.ForIn.LeftDeclaration decl\n            | Some (For_expression expr) ->\n              (* #sec-for-in-and-for-of-statements-static-semantics-early-errors *)\n              let patt = Pattern_cover.as_pattern ~err:Parse_error.InvalidLHSInForIn env expr in\n              Statement.ForIn.LeftPattern patt\n            | None -> assert false\n          in\n          if async then\n            (* If `async` is true, this should have been a for-await-of loop, but we\n               recover by trying to parse like a for-in loop. *)\n            Expect.token env T_OF\n          else\n            Expect.token env T_IN;\n          let right = Parse.expression env in\n          Expect.token env T_RPAREN;\n          let body = Parse.statement (env |> with_in_loop true) in\n          assert_not_labelled_function env body;\n          Statement.ForIn { Statement.ForIn.left; right; body; each = false; comments }\n        | _ ->\n          (* This is a for loop *)\n          errs |> List.iter (error_at env);\n          if async then\n            (* If `async` is true, this should have been a for-await-of loop, but we\n               recover by trying to parse like a normal loop. *)\n            Expect.token env T_OF\n          else\n            Expect.token env T_SEMICOLON;\n          let init =\n            match init with\n            | Some (For_declaration decl) -> Some (Statement.For.InitDeclaration decl)\n            | Some (For_expression expr) ->\n              Some (Statement.For.InitExpression (Pattern_cover.as_expression env expr))\n            | None -> None\n          in\n          let test =\n            match Peek.token env with\n            | T_SEMICOLON -> None\n            | _ -> Some (Parse.expression env)\n          in\n          Expect.token env T_SEMICOLON;\n          let update =\n            match Peek.token env with\n            | T_RPAREN -> None\n            | _ -> Some (Parse.expression env)\n          in\n          Expect.token env T_RPAREN;\n          let body = Parse.statement (env |> with_in_loop true) in\n          assert_not_labelled_function env body;\n          Statement.For { Statement.For.init; test; update; body; comments }\n    )\n\n  and if_ =\n    (*\n     * Either the consequent or alternate of an if statement\n     *)\n    let if_branch env =\n      (* Normally this would just be a Statement, but Annex B allows\n         FunctionDeclarations in non-strict mode. See\n         sec-functiondeclarations-in-ifstatement-statement-clauses *)\n      let stmt =\n        if Peek.is_function env then\n          function_as_statement env\n        else\n          Parse.statement env\n      in\n      (* Annex B allows labelled FunctionDeclarations in non-strict mode\n         (see sec-labelled-function-declarations), but not in IfStatement\n         (see sec-if-statement-static-semantics-early-errors). *)\n      if (not (in_strict_mode env)) && is_labelled_function stmt then\n        function_as_statement_error_at env (fst stmt);\n\n      stmt\n    in\n    let alternate env =\n      let leading = Peek.comments env in\n      Expect.token env T_ELSE;\n      let body = if_branch env in\n      { Statement.If.Alternate.body; comments = Flow_ast_utils.mk_comments_opt ~leading () }\n    in\n    with_loc (fun env ->\n        let pre_if_leading = Peek.comments env in\n        Expect.token env T_IF;\n        let pre_cond_leading = Peek.comments env in\n        let leading = pre_if_leading @ pre_cond_leading in\n        Expect.token env T_LPAREN;\n        let test = Parse.expression env in\n        Expect.token env T_RPAREN;\n        let consequent = if_branch env in\n        let alternate =\n          if Peek.token env = T_ELSE then\n            Some (with_loc alternate env)\n          else\n            None\n        in\n        Statement.If\n          {\n            Statement.If.test;\n            consequent;\n            alternate;\n            comments = Flow_ast_utils.mk_comments_opt ~leading ();\n          }\n    )\n\n  and return =\n    with_loc (fun env ->\n        if not (in_function env) then error env Parse_error.IllegalReturn;\n        let leading = Peek.comments env in\n        let start_loc = Peek.loc env in\n        Expect.token env T_RETURN;\n        let trailing =\n          if Peek.token env = T_SEMICOLON then\n            Eat.trailing_comments env\n          else\n            []\n        in\n        let argument =\n          if Peek.token env = T_SEMICOLON || Peek.is_implicit_semicolon env then\n            None\n          else\n            Some (Parse.expression env)\n        in\n        let return_out = Loc.btwn start_loc (Peek.loc env) in\n        let (trailing, argument) =\n          match (semicolon env, argument) with\n          | (Explicit comments, _)\n          | (Implicit { trailing = comments; _ }, None) ->\n            (trailing @ comments, argument)\n          | (Implicit { remove_trailing; _ }, Some arg) ->\n            (trailing, Some (remove_trailing arg (fun remover arg -> remover#expression arg)))\n        in\n        Statement.Return\n          {\n            Statement.Return.argument;\n            return_out;\n            comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing ();\n          }\n    )\n\n  and switch =\n    let case ~seen_default env =\n      let leading = Peek.comments env in\n      let (test, trailing) =\n        match Peek.token env with\n        | T_DEFAULT ->\n          if seen_default then error env Parse_error.MultipleDefaultsInSwitch;\n          Expect.token env T_DEFAULT;\n          (None, Eat.trailing_comments env)\n        | _ ->\n          Expect.token env T_CASE;\n          (Some (Parse.expression env), [])\n      in\n      let seen_default = seen_default || test = None in\n      Expect.token env T_COLON;\n      let { trailing = line_end_trailing; _ } = statement_end_trailing_comments env in\n      let trailing = trailing @ line_end_trailing in\n      let term_fn = function\n        | T_RCURLY\n        | T_DEFAULT\n        | T_CASE ->\n          true\n        | _ -> false\n      in\n      let consequent = Parse.statement_list ~term_fn (env |> with_in_switch true) in\n      let comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing () in\n      let case = { Statement.Switch.Case.test; consequent; comments } in\n      (case, seen_default)\n    in\n    let rec case_list env (seen_default, acc) =\n      match Peek.token env with\n      | T_EOF\n      | T_RCURLY ->\n        List.rev acc\n      | _ ->\n        let (case_, seen_default) = with_loc_extra (case ~seen_default) env in\n        let acc = case_ :: acc in\n        case_list env (seen_default, acc)\n    in\n    with_loc (fun env ->\n        let leading = Peek.comments env in\n        Expect.token env T_SWITCH;\n        Expect.token env T_LPAREN;\n        let discriminant = Parse.expression env in\n        Expect.token env T_RPAREN;\n        Expect.token env T_LCURLY;\n        let cases = case_list env (false, []) in\n        Expect.token env T_RCURLY;\n        let { trailing; _ } = statement_end_trailing_comments env in\n        Statement.Switch\n          {\n            Statement.Switch.discriminant;\n            cases;\n            comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing ();\n            exhaustive_out = fst discriminant;\n          }\n    )\n\n  and throw =\n    with_loc (fun env ->\n        let leading = Peek.comments env in\n        let start_loc = Peek.loc env in\n        Expect.token env T_THROW;\n        if Peek.is_line_terminator env then error_at env (start_loc, Parse_error.NewlineAfterThrow);\n        let argument = Parse.expression env in\n        let (trailing, argument) =\n          match semicolon env with\n          | Explicit trailing -> (trailing, argument)\n          | Implicit { remove_trailing; _ } ->\n            ([], remove_trailing argument (fun remover arg -> remover#expression arg))\n        in\n        let open Statement in\n        Throw { Throw.argument; comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing () }\n    )\n\n  and try_ =\n    with_loc (fun env ->\n        let leading = Peek.comments env in\n        Expect.token env T_TRY;\n        let block =\n          let block = Parse.block_body env in\n          if Peek.token env = T_CATCH then\n            block_remove_trailing env block\n          else\n            block\n        in\n        let handler =\n          match Peek.token env with\n          | T_CATCH ->\n            let catch =\n              with_loc\n                (fun env ->\n                  let leading = Peek.comments env in\n                  Expect.token env T_CATCH;\n                  let trailing = Eat.trailing_comments env in\n                  let param =\n                    if Peek.token env = T_LPAREN then (\n                      Expect.token env T_LPAREN;\n                      let p = Some (Parse.pattern env Parse_error.StrictCatchVariable) in\n                      Expect.token env T_RPAREN;\n                      p\n                    ) else\n                      None\n                  in\n                  let body = Parse.block_body env in\n                  (* Fix trailing comment attachment if catch block is end of statement *)\n                  let body =\n                    if Peek.token env <> T_FINALLY then\n                      let { remove_trailing; _ } = statement_end_trailing_comments env in\n                      remove_trailing body (fun remover (loc, body) -> (loc, remover#block loc body))\n                    else\n                      body\n                  in\n                  {\n                    Ast.Statement.Try.CatchClause.param;\n                    body;\n                    comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing ();\n                  })\n                env\n            in\n            Some catch\n          | _ -> None\n        in\n        let finalizer =\n          match Peek.token env with\n          | T_FINALLY ->\n            Expect.token env T_FINALLY;\n            let (loc, body) = Parse.block_body env in\n            let { remove_trailing; _ } = statement_end_trailing_comments env in\n            let body = remove_trailing body (fun remover body -> remover#block loc body) in\n            Some (loc, body)\n          | _ -> None\n        in\n        (* No catch or finally? That's an error! *)\n        if handler = None && finalizer = None then\n          error_at env (fst block, Parse_error.NoCatchOrFinally);\n\n        Statement.Try\n          {\n            Statement.Try.block;\n            handler;\n            finalizer;\n            comments = Flow_ast_utils.mk_comments_opt ~leading ();\n          }\n    )\n\n  and var =\n    with_loc (fun env ->\n        let kind = Statement.VariableDeclaration.Var in\n        let (declarations, leading, errs) = Declaration.var env in\n        let (trailing, declarations) = variable_declaration_end ~kind env declarations in\n        errs |> List.iter (error_at env);\n        Statement.VariableDeclaration\n          {\n            Statement.VariableDeclaration.kind;\n            declarations;\n            comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing ();\n          }\n    )\n\n  and const =\n    with_loc (fun env ->\n        let kind = Statement.VariableDeclaration.Const in\n        let (declarations, leading, errs) = Declaration.const env in\n        let (trailing, declarations) = variable_declaration_end ~kind env declarations in\n        errs |> List.iter (error_at env);\n        Statement.VariableDeclaration\n          {\n            Statement.VariableDeclaration.kind;\n            declarations;\n            comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing ();\n          }\n    )\n\n  and let_ =\n    with_loc (fun env ->\n        let kind = Statement.VariableDeclaration.Let in\n        let (declarations, leading, errs) = Declaration.let_ env in\n        let (trailing, declarations) = variable_declaration_end ~kind env declarations in\n        errs |> List.iter (error_at env);\n        Statement.VariableDeclaration\n          {\n            Statement.VariableDeclaration.kind;\n            declarations;\n            comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing ();\n          }\n    )\n\n  and while_ =\n    with_loc (fun env ->\n        let leading = Peek.comments env in\n        Expect.token env T_WHILE;\n        let leading = leading @ Peek.comments env in\n        Expect.token env T_LPAREN;\n        let test = Parse.expression env in\n        Expect.token env T_RPAREN;\n        let body = Parse.statement (env |> with_in_loop true) in\n        (* Annex B allows labelled FunctionDeclarations in non-strict mode\n           (see sec-labelled-function-declarations), but not in IterationStatement\n           (see sec-semantics-static-semantics-early-errors). *)\n        if (not (in_strict_mode env)) && is_labelled_function body then\n          function_as_statement_error_at env (fst body);\n        Statement.While\n          { Statement.While.test; body; comments = Flow_ast_utils.mk_comments_opt ~leading () }\n    )\n\n  and with_ env =\n    let (loc, stmt) =\n      with_loc\n        (fun env ->\n          let leading = Peek.comments env in\n          Expect.token env T_WITH;\n          let leading = leading @ Peek.comments env in\n          Expect.token env T_LPAREN;\n          let _object = Parse.expression env in\n          Expect.token env T_RPAREN;\n          let body = Parse.statement env in\n          (* Annex B allows labelled FunctionDeclarations in non-strict mode\n             (see sec-labelled-function-declarations), but not in WithStatement\n             (see sec-with-statement-static-semantics-early-errors). *)\n          if (not (in_strict_mode env)) && is_labelled_function body then\n            function_as_statement_error_at env (fst body);\n          Statement.With\n            { Statement.With._object; body; comments = Flow_ast_utils.mk_comments_opt ~leading () })\n        env\n    in\n    strict_error_at env (loc, Parse_error.StrictModeWith);\n    (loc, stmt)\n\n  and block env =\n    let (loc, block) = Parse.block_body env in\n    let { remove_trailing; _ } = statement_end_trailing_comments env in\n    let block = remove_trailing block (fun remover block -> remover#block loc block) in\n    (loc, Statement.Block block)\n\n  and maybe_labeled =\n    with_loc (fun env ->\n        let leading = Peek.comments env in\n        match (Parse.expression env, Peek.token env) with\n        | ((loc, Ast.Expression.Identifier label), T_COLON) ->\n          let (_, { Identifier.name; comments = _ }) = label in\n          Expect.token env T_COLON;\n          if SSet.mem name (labels env) then\n            error_at env (loc, Parse_error.Redeclaration (\"Label\", name));\n          let env = add_label env name in\n          let body =\n            (* labelled FunctionDeclarations are allowed in non-strict mode\n               (see #sec-labelled-function-declarations) *)\n            if Peek.is_function env then\n              function_as_statement env\n            else\n              Parse.statement env\n          in\n          Statement.Labeled\n            { Statement.Labeled.label; body; comments = Flow_ast_utils.mk_comments_opt ~leading () }\n        | (expression, _) ->\n          let (trailing, expression) =\n            match semicolon ~expected:\"the end of an expression statement (`;`)\" env with\n            | Explicit comments -> (comments, expression)\n            | Implicit { remove_trailing; _ } ->\n              ([], remove_trailing expression (fun remover expr -> remover#expression expr))\n          in\n          let open Statement in\n          Expression\n            {\n              Expression.expression;\n              directive = None;\n              comments = Flow_ast_utils.mk_comments_opt ~trailing ();\n            }\n    )\n\n  and expression =\n    with_loc (fun env ->\n        let expression = Parse.expression env in\n        let (trailing, expression) =\n          match semicolon ~expected:\"the end of an expression statement (`;`)\" env with\n          | Explicit comments -> (comments, expression)\n          | Implicit { remove_trailing; _ } ->\n            ([], remove_trailing expression (fun remover expr -> remover#expression expr))\n        in\n        let directive =\n          if allow_directive env then\n            match expression with\n            | (_, Ast.Expression.Literal { Ast.Literal.value = Ast.Literal.String _; raw; _ }) ->\n              (* the parser may recover from errors and generate unclosed strings, where\n                 the opening quote should be reliable but the closing one might not exist.\n                 be defensive. *)\n              if String.length raw > 1 && raw.[0] = raw.[String.length raw - 1] then\n                Some (String.sub raw 1 (String.length raw - 2))\n              else\n                None\n            | _ -> None\n          else\n            None\n        in\n        Statement.Expression\n          {\n            Statement.Expression.expression;\n            directive;\n            comments = Flow_ast_utils.mk_comments_opt ~trailing ();\n          }\n    )\n\n  and type_alias_helper ~leading env =\n    if not (should_parse_types env) then error env Parse_error.UnexpectedTypeAlias;\n    let leading = leading @ Peek.comments env in\n    Expect.token env T_TYPE;\n    Eat.push_lex_mode env Lex_mode.TYPE;\n    let id =\n      let id = Type.type_identifier env in\n      if Peek.token env = T_LESS_THAN then\n        id_remove_trailing env id\n      else\n        id\n    in\n    let tparams = Type.type_params env in\n    Expect.token env T_ASSIGN;\n    let right = Type._type env in\n    Eat.pop_lex_mode env;\n    let (trailing, right) =\n      match semicolon env with\n      | Explicit comments -> (comments, right)\n      | Implicit { remove_trailing; _ } ->\n        ([], remove_trailing right (fun remover right -> remover#type_ right))\n    in\n\n    {\n      Statement.TypeAlias.id;\n      tparams;\n      right;\n      comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing ();\n    }\n\n  and declare_type_alias env =\n    with_loc\n      (fun env ->\n        let leading = Peek.comments env in\n        Expect.token env T_DECLARE;\n        let type_alias = type_alias_helper ~leading env in\n        Statement.DeclareTypeAlias type_alias)\n      env\n\n  (** Type aliases squeeze into an unambiguous unused portion of the grammar: `type` is not a\n      reserved word, so `type T` is otherwise two identifiers in a row and that's never valid JS.\n      However, if there's a line separator between the two, ASI makes it valid JS, so line\n      separators are disallowed. *)\n  and type_alias env =\n    if Peek.ith_is_identifier ~i:1 env && not (Peek.ith_is_implicit_semicolon ~i:1 env) then\n      let (loc, type_alias) = with_loc (type_alias_helper ~leading:[]) env in\n      (loc, Statement.TypeAlias type_alias)\n    else\n      Parse.statement env\n\n  and opaque_type_helper ?(declare = false) ~leading env =\n    if not (should_parse_types env) then error env Parse_error.UnexpectedOpaqueTypeAlias;\n    let leading_opaque = leading @ Peek.comments env in\n    Expect.token env T_OPAQUE;\n    let leading_type = Peek.comments env in\n    Expect.token env T_TYPE;\n    let leading = leading_opaque @ leading_type in\n    Eat.push_lex_mode env Lex_mode.TYPE;\n    let id =\n      let id = Type.type_identifier env in\n      if Peek.token env = T_LESS_THAN then\n        id_remove_trailing env id\n      else\n        id\n    in\n    let tparams = Type.type_params env in\n    let supertype =\n      match Peek.token env with\n      | T_COLON ->\n        Expect.token env T_COLON;\n        Some (Type._type env)\n      | _ -> None\n    in\n    let impltype =\n      if declare then\n        match Peek.token env with\n        | T_ASSIGN ->\n          error env Parse_error.DeclareOpaqueTypeInitializer;\n          Eat.token env;\n          if Peek.token env = T_SEMICOLON || Peek.is_implicit_semicolon env then\n            None\n          else\n            Some (Type._type env)\n        | _ -> None\n      else (\n        Expect.token env T_ASSIGN;\n        Some (Type._type env)\n      )\n    in\n    Eat.pop_lex_mode env;\n    let (trailing, id, tparams, supertype, impltype) =\n      match (semicolon env, tparams, supertype, impltype) with\n      (* opaque type Foo = Bar; *)\n      | (Explicit comments, _, _, _) -> (comments, id, tparams, supertype, impltype)\n      (* opaque type Foo = Bar *)\n      | (Implicit { remove_trailing; _ }, _, _, Some impl) ->\n        ( [],\n          id,\n          tparams,\n          supertype,\n          Some (remove_trailing impl (fun remover impl -> remover#type_ impl))\n        )\n      (* opaque type Foo: Super *)\n      | (Implicit { remove_trailing; _ }, _, Some super, None) ->\n        ( [],\n          id,\n          tparams,\n          Some (remove_trailing super (fun remover super -> remover#type_ super)),\n          None\n        )\n      (* opaque type Foo<T> *)\n      | (Implicit { remove_trailing; _ }, Some tparams, None, None) ->\n        ( [],\n          id,\n          Some (remove_trailing tparams (fun remover tparams -> remover#type_params tparams)),\n          None,\n          None\n        )\n      (* declare opaque type Foo *)\n      | (Implicit { remove_trailing; _ }, None, None, None) ->\n        ([], remove_trailing id (fun remover id -> remover#identifier id), None, None, None)\n    in\n\n    {\n      Statement.OpaqueType.id;\n      tparams;\n      impltype;\n      supertype;\n      comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing ();\n    }\n\n  and declare_opaque_type env =\n    with_loc\n      (fun env ->\n        let leading = Peek.comments env in\n        Expect.token env T_DECLARE;\n        let opaque_t = opaque_type_helper ~declare:true ~leading env in\n        Statement.DeclareOpaqueType opaque_t)\n      env\n\n  and opaque_type env =\n    match Peek.ith_token ~i:1 env with\n    | T_TYPE ->\n      let (loc, opaque_t) = with_loc (opaque_type_helper ~declare:false ~leading:[]) env in\n      (loc, Statement.OpaqueType opaque_t)\n    | _ -> Parse.statement env\n\n  and interface_helper ~leading env =\n    if not (should_parse_types env) then error env Parse_error.UnexpectedTypeInterface;\n    let leading = leading @ Peek.comments env in\n    Expect.token env T_INTERFACE;\n    let id =\n      let id = Type.type_identifier env in\n      if Peek.token env = T_EXTENDS then\n        id\n      else\n        id_remove_trailing env id\n    in\n    let tparams =\n      let tparams = Type.type_params env in\n      if Peek.token env = T_EXTENDS then\n        tparams\n      else\n        type_params_remove_trailing env tparams\n    in\n    let (extends, body) = Type.interface_helper env in\n    let { remove_trailing; _ } = statement_end_trailing_comments env in\n    let body =\n      remove_trailing body (fun remover (loc, body) -> (loc, remover#object_type loc body))\n    in\n\n    {\n      Statement.Interface.id;\n      tparams;\n      body;\n      extends;\n      comments = Flow_ast_utils.mk_comments_opt ~leading ();\n    }\n\n  and declare_interface env =\n    with_loc\n      (fun env ->\n        let leading = Peek.comments env in\n        Expect.token env T_DECLARE;\n        let iface = interface_helper ~leading env in\n        Statement.DeclareInterface iface)\n      env\n\n  and interface env =\n    (* disambiguate between a value named `interface`, like `var interface = 1; interface++`,\n       and an interface declaration like `interface Foo {}`.` *)\n    if Peek.ith_is_identifier_name ~i:1 env then\n      let (loc, iface) = with_loc (interface_helper ~leading:[]) env in\n      (loc, Statement.InterfaceDeclaration iface)\n    else\n      expression env\n\n  and declare_class =\n    let rec mixins env acc =\n      let super = Type.generic env in\n      let acc = super :: acc in\n      match Peek.token env with\n      | T_COMMA ->\n        Expect.token env T_COMMA;\n        mixins env acc\n      | _ -> List.rev acc\n      (* This is identical to `interface`, except that mixins are allowed *)\n    in\n    fun ~leading env ->\n      let env = env |> with_strict true in\n      let leading = leading @ Peek.comments env in\n      Expect.token env T_CLASS;\n      let id =\n        let id = Parse.identifier env in\n        match Peek.token env with\n        | T_LESS_THAN\n        | T_LCURLY ->\n          id_remove_trailing env id\n        | _ -> id\n      in\n      let tparams =\n        let tparams = Type.type_params env in\n        match Peek.token env with\n        | T_LCURLY -> type_params_remove_trailing env tparams\n        | _ -> tparams\n      in\n      let extends =\n        if Eat.maybe env T_EXTENDS then\n          let extends = Type.generic env in\n          match Peek.token env with\n          | T_LCURLY -> Some (generic_type_remove_trailing env extends)\n          | _ -> Some extends\n        else\n          None\n      in\n      let mixins =\n        match Peek.token env with\n        | T_IDENTIFIER { raw = \"mixins\"; _ } ->\n          Eat.token env;\n          let mixins = mixins env [] in\n          (match Peek.token env with\n          | T_LCURLY -> generic_type_list_remove_trailing env mixins\n          | _ -> mixins)\n        | _ -> []\n      in\n      let implements =\n        match Peek.token env with\n        | T_IMPLEMENTS ->\n          let implements = Object.class_implements env ~attach_leading:false in\n          (match Peek.token env with\n          | T_LCURLY -> Some (class_implements_remove_trailing env implements)\n          | _ -> Some implements)\n        | _ -> None\n      in\n      let body = Type._object ~is_class:true env in\n      let { remove_trailing; _ } = statement_end_trailing_comments env in\n      let body =\n        remove_trailing body (fun remover (loc, body) -> (loc, remover#object_type loc body))\n      in\n      let comments = Flow_ast_utils.mk_comments_opt ~leading () in\n      Statement.DeclareClass.{ id; tparams; body; extends; mixins; implements; comments }\n\n  and declare_class_statement env =\n    with_loc\n      (fun env ->\n        let leading = Peek.comments env in\n        Expect.token env T_DECLARE;\n        let fn = declare_class ~leading env in\n        Statement.DeclareClass fn)\n      env\n\n  and declare_function ?(leading = []) env =\n    let leading = leading @ Peek.comments env in\n    Expect.token env T_FUNCTION;\n    let id = id_remove_trailing env (Parse.identifier env) in\n    let annot =\n      with_loc\n        (fun env ->\n          let tparams = type_params_remove_trailing env (Type.type_params env) in\n          let params = Type.function_param_list env in\n          Expect.token env T_COLON;\n          let return =\n            let return = Type._type env in\n            let has_predicate =\n              Eat.push_lex_mode env Lex_mode.TYPE;\n              let type_token = Peek.token env in\n              Eat.pop_lex_mode env;\n              type_token = T_CHECKS\n            in\n            if has_predicate then\n              type_remove_trailing env return\n            else\n              return\n          in\n          Ast.Type.(Function { Function.params; return; tparams; comments = None }))\n        env\n    in\n    let predicate = Type.predicate_opt env in\n    let (trailing, annot, predicate) =\n      match (semicolon env, predicate) with\n      | (Explicit comments, _) -> (comments, annot, predicate)\n      | (Implicit { remove_trailing; _ }, None) ->\n        ([], remove_trailing annot (fun remover annot -> remover#type_ annot), None)\n      | (Implicit { remove_trailing; _ }, Some pred) ->\n        ([], annot, Some (remove_trailing pred (fun remover pred -> remover#predicate pred)))\n    in\n    let annot = (fst annot, annot) in\n\n    {\n      Statement.DeclareFunction.id;\n      annot;\n      predicate;\n      comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing ();\n    }\n\n  and declare_function_statement env =\n    with_loc\n      (fun env ->\n        let leading = Peek.comments env in\n        Expect.token env T_DECLARE;\n        begin\n          match Peek.token env with\n          | T_ASYNC ->\n            error env Parse_error.DeclareAsync;\n            Expect.token env T_ASYNC\n          | _ -> ()\n        end;\n        let fn = declare_function ~leading env in\n        Statement.DeclareFunction fn)\n      env\n\n  and declare_var env leading =\n    let leading = leading @ Peek.comments env in\n    Expect.token env T_VAR;\n    let name = Parse.identifier ~restricted_error:Parse_error.StrictVarName env in\n    let annot = Type.annotation env in\n    let (trailing, name, annot) =\n      match semicolon env with\n      (* declare var x; *)\n      | Explicit trailing -> (trailing, name, annot)\n      (* declare var x *)\n      | Implicit { remove_trailing; _ } ->\n        ([], name, remove_trailing annot (fun remover annot -> remover#type_annotation annot))\n    in\n\n    {\n      Statement.DeclareVariable.id = name;\n      annot;\n      comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing ();\n    }\n\n  and declare_var_statement env =\n    with_loc\n      (fun env ->\n        let leading = Peek.comments env in\n        Expect.token env T_DECLARE;\n        let var = declare_var env leading in\n        Statement.DeclareVariable var)\n      env\n\n  and declare_module =\n    let rec module_items env ~module_kind acc =\n      match Peek.token env with\n      | T_EOF\n      | T_RCURLY ->\n        (module_kind, List.rev acc)\n      | _ ->\n        let stmt = declare ~in_module:true env in\n        (* TODO: This is a semantic analysis and shouldn't be in the parser *)\n        let module_kind =\n          let open Statement in\n          let (_loc, stmt) = stmt in\n          match (module_kind, stmt) with\n          (*\n           * The first time we see either a `declare export` or a\n           * `declare module.exports`, we lock in the kind of the module.\n           *\n           * `declare export type` and `declare export interface` are the two\n           * exceptions to this rule because they are valid in both CommonJS\n           * and ES modules (and thus do not indicate an intent for either).\n           *)\n          | (None, DeclareModuleExports _) -> Some DeclareModule.CommonJS\n          | (None, DeclareExportDeclaration { DeclareExportDeclaration.declaration; _ }) ->\n            (match declaration with\n            | Some (DeclareExportDeclaration.NamedType _)\n            | Some (DeclareExportDeclaration.Interface _) ->\n              module_kind\n            | _ -> Some DeclareModule.ES)\n          (*\n           * There should never be more than one `declare module.exports`\n           * statement *)\n          | (Some DeclareModule.CommonJS, DeclareModuleExports _) ->\n            error env Parse_error.DuplicateDeclareModuleExports;\n            module_kind\n          (*\n           * It's never ok to mix and match `declare export` and\n           * `declare module.exports` in the same module because it leaves the\n           * kind of the module (CommonJS vs ES) ambiguous.\n           *\n           * The 1 exception to this rule is that `export type/interface` are\n           * both ok in CommonJS modules.\n           *)\n          | (Some DeclareModule.ES, DeclareModuleExports _) ->\n            error env Parse_error.AmbiguousDeclareModuleKind;\n            module_kind\n          | ( Some DeclareModule.CommonJS,\n              DeclareExportDeclaration { DeclareExportDeclaration.declaration; _ }\n            ) ->\n            (match declaration with\n            | Some (DeclareExportDeclaration.NamedType _)\n            | Some (DeclareExportDeclaration.Interface _) ->\n              ()\n            | _ -> error env Parse_error.AmbiguousDeclareModuleKind);\n            module_kind\n          | _ -> module_kind\n        in\n        module_items env ~module_kind (stmt :: acc)\n    in\n    let declare_module_ ~leading env =\n      let id =\n        match Peek.token env with\n        | T_STRING str ->\n          Statement.DeclareModule.Literal\n            (string_literal_remove_trailing env (string_literal env str))\n        | _ -> Statement.DeclareModule.Identifier (id_remove_trailing env (Parse.identifier env))\n      in\n      let (body, module_kind) =\n        with_loc_extra\n          (fun env ->\n            let leading = Peek.comments env in\n            Expect.token env T_LCURLY;\n            let (module_kind, body) = module_items env ~module_kind:None [] in\n            let internal =\n              if body = [] then\n                Peek.comments env\n              else\n                []\n            in\n            Expect.token env T_RCURLY;\n            let { trailing; _ } = statement_end_trailing_comments env in\n            let comments =\n              Flow_ast_utils.mk_comments_with_internal_opt ~leading ~trailing ~internal ()\n            in\n            let body = { Statement.Block.body; comments } in\n            (body, module_kind))\n          env\n      in\n      let kind =\n        match module_kind with\n        | Some k -> k\n        | None -> Statement.DeclareModule.CommonJS\n      in\n      let comments = Flow_ast_utils.mk_comments_opt ~leading () in\n      Statement.(DeclareModule DeclareModule.{ id; body; kind; comments })\n    in\n    fun ~in_module env ->\n      let start_loc = Peek.loc env in\n      let leading = Peek.comments env in\n      Expect.token env T_DECLARE;\n      let leading = leading @ Peek.comments env in\n      Expect.identifier env \"module\";\n      if in_module || Peek.token env = T_PERIOD then\n        with_loc ~start_loc (declare_module_exports ~leading) env\n      else\n        with_loc ~start_loc (declare_module_ ~leading) env\n\n  and declare_module_exports ~leading env =\n    let leading_period = Peek.comments env in\n    Expect.token env T_PERIOD;\n    let leading_exports = Peek.comments env in\n    Expect.identifier env \"exports\";\n    let leading_annot = Peek.comments env in\n    let leading = List.concat [leading; leading_period; leading_exports; leading_annot] in\n    let annot = Type.annotation env in\n    let (annot, trailing) =\n      match semicolon env with\n      | Explicit trailing -> (annot, trailing)\n      | Implicit { remove_trailing; _ } ->\n        (remove_trailing annot (fun remover annot -> remover#type_annotation annot), [])\n    in\n    let comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing () in\n    Statement.DeclareModuleExports { Statement.DeclareModuleExports.annot; comments }\n\n  and declare ?(in_module = false) env =\n    if not (should_parse_types env) then error env Parse_error.UnexpectedTypeDeclaration;\n\n    (* eventually, just emit a wrapper AST node *)\n    match Peek.ith_token ~i:1 env with\n    | T_CLASS -> declare_class_statement env\n    | T_INTERFACE -> declare_interface env\n    | T_TYPE ->\n      (match Peek.token env with\n      | T_IMPORT when in_module -> import_declaration env\n      | _ -> declare_type_alias env)\n    | T_OPAQUE -> declare_opaque_type env\n    | T_TYPEOF when Peek.token env = T_IMPORT -> import_declaration env\n    | T_FUNCTION\n    | T_ASYNC ->\n      declare_function_statement env\n    | T_VAR -> declare_var_statement env\n    | T_EXPORT when in_module -> declare_export_declaration ~allow_export_type:in_module env\n    | T_IDENTIFIER { raw = \"module\"; _ } -> declare_module ~in_module env\n    | _ when in_module ->\n      (match Peek.token env with\n      | T_IMPORT ->\n        error env Parse_error.InvalidNonTypeImportInDeclareModule;\n        Parse.statement env\n      | _ ->\n        (* Oh boy, found some bad stuff in a declare module. Let's just\n         * pretend it's a declare var (arbitrary choice) *)\n        declare_var_statement env)\n    | _ -> Parse.statement env\n\n  and export_source env =\n    Expect.identifier env \"from\";\n    match Peek.token env with\n    | T_STRING str -> string_literal env str\n    | _ ->\n      (* Just make up a string for the error case *)\n      let ret = (Peek.loc env, { StringLiteral.value = \"\"; raw = \"\"; comments = None }) in\n      error_unexpected ~expected:\"a string\" env;\n      ret\n\n  and export_source_and_semicolon env =\n    let (source_loc, source) = export_source env in\n    match semicolon env with\n    | Explicit trailing -> ((source_loc, source), trailing)\n    | Implicit { remove_trailing; _ } ->\n      ( ( source_loc,\n          remove_trailing source (fun remover source ->\n              remover#string_literal_type source_loc source\n          )\n        ),\n        []\n      )\n\n  and export_specifiers ?(preceding_comma = true) env specifiers =\n    match Peek.token env with\n    | T_EOF\n    | T_RCURLY ->\n      List.rev specifiers\n    | _ ->\n      if not preceding_comma then error env Parse_error.ExportSpecifierMissingComma;\n      let specifier =\n        with_loc\n          (fun env ->\n            let local = identifier_name env in\n            let exported =\n              match Peek.token env with\n              | T_IDENTIFIER { raw = \"as\"; _ } ->\n                Eat.token env;\n                Some (identifier_name env)\n              | _ -> None\n            in\n            { Statement.ExportNamedDeclaration.ExportSpecifier.local; exported })\n          env\n      in\n      let preceding_comma = Eat.maybe env T_COMMA in\n      export_specifiers ~preceding_comma env (specifier :: specifiers)\n\n  and assert_export_specifier_identifiers env specifiers =\n    Statement.ExportNamedDeclaration.ExportSpecifier.(\n      List.iter\n        (function\n          | (_, { local = id; exported = None }) ->\n            assert_identifier_name_is_identifier ~restricted_error:Parse_error.StrictVarName env id\n          | _ -> ())\n        specifiers\n    )\n\n  and export_declaration ~decorators env =\n    let env = env |> with_strict true |> with_in_export true in\n    let leading = Peek.comments env in\n    let start_loc = Peek.loc env in\n    Expect.token env T_EXPORT;\n    match Peek.token env with\n    | T_DEFAULT ->\n      (* export default ... *)\n      with_loc\n        ~start_loc\n        (fun env ->\n          let open Statement.ExportDefaultDeclaration in\n          let leading = leading @ Peek.comments env in\n          let (default, ()) = with_loc (fun env -> Expect.token env T_DEFAULT) env in\n          let env = with_in_export_default true env in\n          let (declaration, trailing) =\n            if Peek.is_function env then\n              (* export default [async] function [foo] (...) { ... } *)\n              let fn = Declaration._function env in\n              (Declaration fn, [])\n            else if Peek.is_class env then\n              (* export default class foo { ... } *)\n              let _class = Object.class_declaration env decorators in\n              (Declaration _class, [])\n            else if Peek.token env = T_ENUM then\n              (* export default enum foo { ... } *)\n              (Declaration (Declaration.enum_declaration env), [])\n            else\n              (* export default [assignment expression]; *)\n              let expr = Parse.assignment env in\n              let (expr, trailing) =\n                match semicolon env with\n                | Explicit trailing -> (expr, trailing)\n                | Implicit { remove_trailing; _ } ->\n                  (remove_trailing expr (fun remover expr -> remover#expression expr), [])\n              in\n              (Expression expr, trailing)\n          in\n          Statement.ExportDefaultDeclaration\n            {\n              default;\n              declaration;\n              comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing ();\n            })\n        env\n    | T_TYPE when Peek.ith_token ~i:1 env <> T_LCURLY ->\n      (* export type ... *)\n      with_loc\n        ~start_loc\n        (fun env ->\n          let open Statement.ExportNamedDeclaration in\n          if not (should_parse_types env) then error env Parse_error.UnexpectedTypeExport;\n          match Peek.ith_token ~i:1 env with\n          | T_MULT ->\n            Expect.token env T_TYPE;\n            let specifier_loc = Peek.loc env in\n            Expect.token env T_MULT;\n            let (source, trailing) = export_source_and_semicolon env in\n            Statement.ExportNamedDeclaration\n              {\n                declaration = None;\n                specifiers = Some (ExportBatchSpecifier (specifier_loc, None));\n                source = Some source;\n                export_kind = Statement.ExportType;\n                comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing ();\n              }\n          | T_ENUM ->\n            error env Parse_error.EnumInvalidExport;\n            Expect.token env T_TYPE;\n            Statement.ExportNamedDeclaration\n              {\n                declaration = None;\n                specifiers = None;\n                source = None;\n                export_kind = Statement.ExportType;\n                comments = Flow_ast_utils.mk_comments_opt ~leading ();\n              }\n          | _ ->\n            let (loc, type_alias) = with_loc (type_alias_helper ~leading:[]) env in\n            let type_alias = (loc, Statement.TypeAlias type_alias) in\n            Statement.ExportNamedDeclaration\n              {\n                declaration = Some type_alias;\n                specifiers = None;\n                source = None;\n                export_kind = Statement.ExportType;\n                comments = Flow_ast_utils.mk_comments_opt ~leading ();\n              })\n        env\n    | T_OPAQUE ->\n      (* export opaque type ... *)\n      with_loc\n        ~start_loc\n        (fun env ->\n          let open Statement.ExportNamedDeclaration in\n          let (loc, opaque_t) = with_loc (opaque_type_helper ~leading:[]) env in\n          let opaque_t = (loc, Statement.OpaqueType opaque_t) in\n          Statement.ExportNamedDeclaration\n            {\n              declaration = Some opaque_t;\n              specifiers = None;\n              source = None;\n              export_kind = Statement.ExportType;\n              comments = Flow_ast_utils.mk_comments_opt ~leading ();\n            })\n        env\n    | T_INTERFACE ->\n      (* export interface I { ... } *)\n      with_loc\n        ~start_loc\n        (fun env ->\n          let open Statement.ExportNamedDeclaration in\n          if not (should_parse_types env) then error env Parse_error.UnexpectedTypeExport;\n          let interface =\n            let (loc, iface) = with_loc (interface_helper ~leading:[]) env in\n            (loc, Statement.InterfaceDeclaration iface)\n          in\n          Statement.ExportNamedDeclaration\n            {\n              declaration = Some interface;\n              specifiers = None;\n              source = None;\n              export_kind = Statement.ExportType;\n              comments = Flow_ast_utils.mk_comments_opt ~leading ();\n            })\n        env\n    | _ when Peek.is_class env ->\n      with_loc\n        ~start_loc\n        (fun env ->\n          let stmt = Object.class_declaration env decorators in\n          Statement.ExportNamedDeclaration\n            {\n              Statement.ExportNamedDeclaration.declaration = Some stmt;\n              specifiers = None;\n              source = None;\n              export_kind = Statement.ExportValue;\n              comments = Flow_ast_utils.mk_comments_opt ~leading ();\n            })\n        env\n    | _ when Peek.is_function env ->\n      with_loc\n        ~start_loc\n        (fun env ->\n          error_on_decorators env decorators;\n          let stmt = Declaration._function env in\n          Statement.ExportNamedDeclaration\n            {\n              Statement.ExportNamedDeclaration.declaration = Some stmt;\n              specifiers = None;\n              source = None;\n              export_kind = Statement.ExportValue;\n              comments = Flow_ast_utils.mk_comments_opt ~leading ();\n            })\n        env\n    | T_LET\n    | T_CONST\n    | T_VAR ->\n      with_loc\n        ~start_loc\n        (fun env ->\n          let stmt = Parse.statement_list_item env ~decorators in\n          Statement.ExportNamedDeclaration\n            {\n              Statement.ExportNamedDeclaration.declaration = Some stmt;\n              specifiers = None;\n              source = None;\n              export_kind = Statement.ExportValue;\n              comments = Flow_ast_utils.mk_comments_opt ~leading ();\n            })\n        env\n    | T_ENUM when (parse_options env).enums ->\n      with_loc\n        ~start_loc\n        (fun env ->\n          let stmt = Parse.statement_list_item env ~decorators in\n          Statement.ExportNamedDeclaration\n            {\n              Statement.ExportNamedDeclaration.declaration = Some stmt;\n              specifiers = None;\n              source = None;\n              export_kind = Statement.ExportValue;\n              comments = Flow_ast_utils.mk_comments_opt ~leading ();\n            })\n        env\n    | T_MULT ->\n      with_loc\n        ~start_loc\n        (fun env ->\n          let open Statement.ExportNamedDeclaration in\n          let loc = Peek.loc env in\n          Expect.token env T_MULT;\n          let local_name =\n            match Peek.token env with\n            | T_IDENTIFIER { raw = \"as\"; _ } ->\n              Eat.token env;\n              Some (Parse.identifier env)\n            | _ -> None\n          in\n          let specifiers = Some (ExportBatchSpecifier (loc, local_name)) in\n          let (source, trailing) = export_source_and_semicolon env in\n          Statement.ExportNamedDeclaration\n            {\n              declaration = None;\n              specifiers;\n              source = Some source;\n              export_kind = Statement.ExportValue;\n              comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing ();\n            })\n        env\n    | _ ->\n      let open Statement.ExportNamedDeclaration in\n      let export_kind =\n        if Eat.maybe env T_TYPE then\n          Statement.ExportType\n        else\n          Statement.ExportValue\n      in\n      if Eat.maybe env T_LCURLY then\n        with_loc\n          ~start_loc\n          (fun env ->\n            let specifiers = export_specifiers env [] in\n            Expect.token env T_RCURLY;\n            let (source, trailing) =\n              match Peek.token env with\n              | T_IDENTIFIER { raw = \"from\"; _ } ->\n                let (source, trailing) = export_source_and_semicolon env in\n                (Some source, trailing)\n              | _ ->\n                assert_export_specifier_identifiers env specifiers;\n                let trailing =\n                  match semicolon env with\n                  | Explicit trailing -> trailing\n                  | Implicit { trailing; _ } -> trailing\n                in\n                (None, trailing)\n            in\n            Statement.ExportNamedDeclaration\n              {\n                declaration = None;\n                specifiers = Some (ExportSpecifiers specifiers);\n                source;\n                export_kind;\n                comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing ();\n              })\n          env\n      else (\n        (* error. recover by ignoring the `export` *)\n        error_unexpected ~expected:\"a declaration, statement or export specifiers\" env;\n        Parse.statement_list_item env ~decorators\n      )\n\n  and declare_export_declaration ?(allow_export_type = false) =\n    with_loc (fun env ->\n        if not (should_parse_types env) then error env Parse_error.UnexpectedTypeDeclaration;\n        let leading = Peek.comments env in\n        Expect.token env T_DECLARE;\n        let env = env |> with_strict true |> with_in_export true in\n        let leading = leading @ Peek.comments env in\n        Expect.token env T_EXPORT;\n        Statement.DeclareExportDeclaration.(\n          match Peek.token env with\n          | T_DEFAULT ->\n            (* declare export default ... *)\n            let leading = leading @ Peek.comments env in\n            let (default, ()) = with_loc (fun env -> Expect.token env T_DEFAULT) env in\n            let env = with_in_export_default true env in\n            let (declaration, trailing) =\n              match Peek.token env with\n              | T_FUNCTION ->\n                (* declare export default function foo (...): ...  *)\n                let fn = with_loc declare_function env in\n                (Some (Function fn), [])\n              | T_CLASS ->\n                (* declare export default class foo { ... } *)\n                let class_ = with_loc (declare_class ~leading:[]) env in\n                (Some (Class class_), [])\n              | _ ->\n                (* declare export default [type]; *)\n                let type_ = Type._type env in\n                let (type_, trailing) =\n                  match semicolon env with\n                  | Explicit trailing -> (type_, trailing)\n                  | Implicit { remove_trailing; _ } ->\n                    (remove_trailing type_ (fun remover type_ -> remover#type_ type_), [])\n                in\n                (Some (DefaultType type_), trailing)\n            in\n            let comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing () in\n            Statement.DeclareExportDeclaration\n              { default = Some default; declaration; specifiers = None; source = None; comments }\n          | T_LET\n          | T_CONST\n          | T_VAR\n          | T_CLASS\n          | T_FUNCTION ->\n            let declaration =\n              match Peek.token env with\n              | T_FUNCTION ->\n                (* declare export function foo (...): ...  *)\n                let fn = with_loc declare_function env in\n                Some (Function fn)\n              | T_CLASS ->\n                (* declare export class foo { ... } *)\n                let class_ = with_loc (declare_class ~leading:[]) env in\n                Some (Class class_)\n              | (T_LET | T_CONST | T_VAR) as token ->\n                (match token with\n                | T_LET -> error env Parse_error.DeclareExportLet\n                | T_CONST -> error env Parse_error.DeclareExportConst\n                | _ -> ());\n\n                (* declare export var foo: ... *)\n                let var = with_loc (fun env -> declare_var env []) env in\n                Some (Variable var)\n              | _ -> assert false\n            in\n            let comments = Flow_ast_utils.mk_comments_opt ~leading () in\n            Statement.DeclareExportDeclaration\n              { default = None; declaration; specifiers = None; source = None; comments }\n          | T_MULT ->\n            (* declare export * from 'foo' *)\n            let loc = Peek.loc env in\n            Expect.token env T_MULT;\n            let local_name =\n              match Peek.token env with\n              | T_IDENTIFIER { raw = \"as\"; _ } ->\n                Eat.token env;\n                Some (Parse.identifier env)\n              | _ -> None\n            in\n            let specifiers =\n              Statement.ExportNamedDeclaration.(Some (ExportBatchSpecifier (loc, local_name)))\n            in\n            let (source, trailing) = export_source_and_semicolon env in\n            let comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing () in\n            Statement.DeclareExportDeclaration\n              { default = None; declaration = None; specifiers; source = Some source; comments }\n          | T_TYPE when allow_export_type ->\n            (* declare export type = ... *)\n            let alias = with_loc (type_alias_helper ~leading:[]) env in\n            let comments = Flow_ast_utils.mk_comments_opt ~leading () in\n            Statement.DeclareExportDeclaration\n              {\n                default = None;\n                declaration = Some (NamedType alias);\n                specifiers = None;\n                source = None;\n                comments;\n              }\n          | T_OPAQUE ->\n            (* declare export opaque type = ... *)\n            let opaque = with_loc (opaque_type_helper ~declare:true ~leading:[]) env in\n            let comments = Flow_ast_utils.mk_comments_opt ~leading () in\n            Statement.DeclareExportDeclaration\n              {\n                default = None;\n                declaration = Some (NamedOpaqueType opaque);\n                specifiers = None;\n                source = None;\n                comments;\n              }\n          | T_INTERFACE when allow_export_type ->\n            (* declare export interface ... *)\n            let iface = with_loc (interface_helper ~leading:[]) env in\n            let comments = Flow_ast_utils.mk_comments_opt ~leading () in\n            Statement.DeclareExportDeclaration\n              {\n                default = None;\n                declaration = Some (Interface iface);\n                specifiers = None;\n                source = None;\n                comments;\n              }\n          | _ ->\n            (match Peek.token env with\n            | T_TYPE -> error env Parse_error.DeclareExportType\n            | T_INTERFACE -> error env Parse_error.DeclareExportInterface\n            | _ -> ());\n            Expect.token env T_LCURLY;\n            let specifiers = export_specifiers env [] in\n            Expect.token env T_RCURLY;\n            let (source, trailing) =\n              match Peek.token env with\n              | T_IDENTIFIER { raw = \"from\"; _ } ->\n                let (source, trailing) = export_source_and_semicolon env in\n                (Some source, trailing)\n              | _ ->\n                assert_export_specifier_identifiers env specifiers;\n                let trailing =\n                  match semicolon env with\n                  | Explicit trailing -> trailing\n                  | Implicit { trailing; _ } -> trailing\n                in\n                (None, trailing)\n            in\n            let comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing () in\n            Statement.DeclareExportDeclaration\n              {\n                default = None;\n                declaration = None;\n                specifiers = Some (Statement.ExportNamedDeclaration.ExportSpecifiers specifiers);\n                source;\n                comments;\n              }\n        )\n    )\n\n  and import_declaration =\n    Statement.ImportDeclaration.(\n      let missing_source env =\n        (* Just make up a string for the error case *)\n        let loc = Peek.loc_skip_lookahead env in\n        (loc, { StringLiteral.value = \"\"; raw = \"\"; comments = None })\n      in\n      let source env =\n        match Peek.token env with\n        | T_IDENTIFIER { raw = \"from\"; _ } ->\n          Eat.token env;\n          (match Peek.token env with\n          | T_STRING str -> string_literal env str\n          | _ ->\n            error_unexpected ~expected:\"a string\" env;\n            missing_source env)\n        | _ ->\n          error_unexpected ~expected:\"the keyword `from`\" env;\n          missing_source env\n      in\n      let is_type_import = function\n        | T_TYPE\n        | T_TYPEOF ->\n          true\n        | _ -> false\n        (* `x` or `x as y` in a specifier *)\n      in\n      let with_maybe_as ~for_type ?error_if_type env =\n        let identifier env =\n          if for_type then\n            Type.type_identifier env\n          else\n            Parse.identifier env\n        in\n        match Peek.ith_token ~i:1 env with\n        | T_IDENTIFIER { raw = \"as\"; _ } ->\n          let remote = identifier_name env in\n          Eat.token env;\n\n          (* as *)\n          let local = Some (identifier env) in\n          (remote, local)\n        | T_EOF\n        | T_COMMA\n        | T_RCURLY ->\n          (identifier env, None)\n        | _ ->\n          begin\n            match (error_if_type, Peek.token env) with\n            | (Some error_if_type, T_TYPE)\n            | (Some error_if_type, T_TYPEOF) ->\n              error env error_if_type;\n              Eat.token env;\n\n              (* consume `type` or `typeof` *)\n              (Type.type_identifier env, None)\n            | _ -> (identifier env, None)\n          end\n        (*\n           ImportSpecifier[Type]:\n             [~Type] ImportedBinding\n             [~Type] IdentifierName ImportedTypeBinding\n             [~Type] IdentifierName IdentifierName ImportedBinding\n             [~Type] IdentifierName IdentifierName IdentifierName ImportedTypeBinding\n             [+Type] ImportedTypeBinding\n             [+Type] IdentifierName IdentifierName ImportedTypeBinding\n\n           Static Semantics:\n\n           `IdentifierName ImportedTypeBinding`:\n           - It is a Syntax Error if IdentifierName's StringValue is not \"type\" or \"typeof\"\n\n           `IdentifierName IdentifierName ImportedBinding`:\n           - It is a Syntax Error if the second IdentifierName's StringValue is not \"as\"\n\n           `IdentifierName IdentifierName IdentifierName  ImportedTypeBinding`:\n           - It is a Syntax Error if the first IdentifierName's StringValue is not \"type\"\n             or \"typeof\", and the third IdentifierName's StringValue is not \"as\"\n        *)\n      in\n\n      let specifier env =\n        let kind =\n          match Peek.token env with\n          | T_TYPE -> Some ImportType\n          | T_TYPEOF -> Some ImportTypeof\n          | _ -> None\n        in\n        if is_type_import (Peek.token env) then\n          (* consume `type`, but we don't know yet whether this is `type foo` or\n             `type as foo`. *)\n          let type_keyword_or_remote = identifier_name env in\n          match Peek.token env with\n          (* `type` (a value) *)\n          | T_EOF\n          | T_RCURLY\n          | T_COMMA ->\n            let remote = type_keyword_or_remote in\n            (* `type` becomes a value *)\n            assert_identifier_name_is_identifier env remote;\n            { remote; local = None; kind = None }\n          (* `type as foo` (value named `type`) or `type as,` (type named `as`) *)\n          | T_IDENTIFIER { raw = \"as\"; _ } ->\n            begin\n              match Peek.ith_token ~i:1 env with\n              | T_EOF\n              | T_RCURLY\n              | T_COMMA ->\n                (* `type as` *)\n                { remote = Type.type_identifier env; local = None; kind }\n              | T_IDENTIFIER { raw = \"as\"; _ } ->\n                (* `type as as foo` *)\n                let remote = identifier_name env in\n                (* first `as` *)\n                Eat.token env;\n\n                (* second `as` *)\n                let local = Some (Type.type_identifier env) in\n                (* `foo` *)\n                { remote; local; kind }\n              | _ ->\n                (* `type as foo` *)\n                let remote = type_keyword_or_remote in\n                (* `type` becomes a value *)\n                assert_identifier_name_is_identifier env remote;\n                Eat.token env;\n\n                (* `as` *)\n                let local = Some (Parse.identifier env) in\n                { remote; local; kind = None }\n            end\n          (* `type x`, or `type x as y` *)\n          | _ ->\n            let (remote, local) = with_maybe_as ~for_type:true env in\n            { remote; local; kind }\n        else\n          (* standard `x` or `x as y` *)\n          let (remote, local) = with_maybe_as ~for_type:false env in\n          { remote; local; kind = None }\n        (* specifier in an `import type { ... }` *)\n      in\n      let type_specifier env =\n        let (remote, local) =\n          with_maybe_as\n            env\n            ~for_type:true\n            ~error_if_type:Parse_error.ImportTypeShorthandOnlyInPureImport\n        in\n        { remote; local; kind = None }\n        (* specifier in an `import typeof { ... }` *)\n      in\n      let typeof_specifier env =\n        let (remote, local) =\n          with_maybe_as\n            env\n            ~for_type:true\n            ~error_if_type:Parse_error.ImportTypeShorthandOnlyInPureImport\n        in\n        { remote; local; kind = None }\n      in\n      let rec specifier_list ?(preceding_comma = true) env statement_kind acc =\n        match Peek.token env with\n        | T_EOF\n        | T_RCURLY ->\n          List.rev acc\n        | _ ->\n          if not preceding_comma then error env Parse_error.ImportSpecifierMissingComma;\n          let specifier =\n            match statement_kind with\n            | ImportType -> type_specifier env\n            | ImportTypeof -> typeof_specifier env\n            | ImportValue -> specifier env\n          in\n          let preceding_comma = Eat.maybe env T_COMMA in\n          specifier_list ~preceding_comma env statement_kind (specifier :: acc)\n      in\n      let named_or_namespace_specifier env import_kind =\n        match Peek.token env with\n        | T_MULT ->\n          let id =\n            with_loc_opt\n              (fun env ->\n                (* consume T_MULT *)\n                Eat.token env;\n                match Peek.token env with\n                | T_IDENTIFIER { raw = \"as\"; _ } ->\n                  (* consume \"as\" *)\n                  Eat.token env;\n                  (match import_kind with\n                  | ImportType\n                  | ImportTypeof ->\n                    Some (Type.type_identifier env)\n                  | ImportValue -> Some (Parse.identifier env))\n                | _ ->\n                  error_unexpected ~expected:\"the keyword `as`\" env;\n                  None)\n              env\n          in\n          (match id with\n          | Some id -> Some (ImportNamespaceSpecifier id)\n          | None -> None)\n        | _ ->\n          Expect.token env T_LCURLY;\n          let specifiers = specifier_list env import_kind [] in\n          Expect.token env T_RCURLY;\n          Some (ImportNamedSpecifiers specifiers)\n      in\n      let semicolon_and_trailing env source =\n        match semicolon env with\n        | Explicit trailing -> (trailing, source)\n        | Implicit { remove_trailing; _ } ->\n          ( [],\n            remove_trailing source (fun remover (loc, source) ->\n                (loc, remover#string_literal_type loc source)\n            )\n          )\n      in\n      let with_specifiers import_kind env leading =\n        let specifiers = named_or_namespace_specifier env import_kind in\n        let source = source env in\n        let (trailing, source) = semicolon_and_trailing env source in\n        Statement.ImportDeclaration\n          {\n            import_kind;\n            source;\n            specifiers;\n            default = None;\n            comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing ();\n          }\n      in\n      let with_default import_kind env leading =\n        let default_specifier =\n          match import_kind with\n          | ImportType\n          | ImportTypeof ->\n            Type.type_identifier env\n          | ImportValue -> Parse.identifier env\n        in\n        let additional_specifiers =\n          match Peek.token env with\n          | T_COMMA ->\n            (* `import Foo, ...` *)\n            Expect.token env T_COMMA;\n            named_or_namespace_specifier env import_kind\n          | _ -> None\n        in\n        let source = source env in\n        let (trailing, source) = semicolon_and_trailing env source in\n        Statement.ImportDeclaration\n          {\n            import_kind;\n            source;\n            specifiers = additional_specifiers;\n            default = Some default_specifier;\n            comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing ();\n          }\n      in\n      with_loc (fun env ->\n          let env = env |> with_strict true in\n          let leading = Peek.comments env in\n          Expect.token env T_IMPORT;\n\n          match Peek.token env with\n          (* `import * as ns from \"ModuleName\";` *)\n          | T_MULT -> with_specifiers ImportValue env leading\n          (* `import { ... } from \"ModuleName\";` *)\n          | T_LCURLY -> with_specifiers ImportValue env leading\n          (* `import \"ModuleName\";` *)\n          | T_STRING str ->\n            let source = string_literal env str in\n            let (trailing, source) = semicolon_and_trailing env source in\n            Statement.ImportDeclaration\n              {\n                import_kind = ImportValue;\n                source;\n                specifiers = None;\n                default = None;\n                comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing ();\n              }\n          (* `import type [...] from \"ModuleName\";`\n             note that if [...] is missing, we're importing a value named `type`! *)\n          | T_TYPE when should_parse_types env ->\n            begin\n              match Peek.ith_token ~i:1 env with\n              (* `import type, { other, names } from \"ModuleName\";` *)\n              | T_COMMA\n              (* `import type from \"ModuleName\";` *)\n              | T_IDENTIFIER { raw = \"from\"; _ } ->\n                (* Importing the exported value named \"type\". This is not a type-import.*)\n                with_default ImportValue env leading\n              (* `import type *` is invalid, since the namespace can't be a type *)\n              | T_MULT ->\n                (* consume `type` *)\n                Eat.token env;\n\n                (* unexpected `*` *)\n                error_unexpected env;\n\n                with_specifiers ImportType env leading\n              | T_LCURLY ->\n                (* consume `type` *)\n                Eat.token env;\n\n                with_specifiers ImportType env leading\n              | _ ->\n                (* consume `type` *)\n                Eat.token env;\n\n                with_default ImportType env leading\n            end\n          (* `import typeof ... from \"ModuleName\";` *)\n          | T_TYPEOF when should_parse_types env ->\n            Expect.token env T_TYPEOF;\n            begin\n              match Peek.token env with\n              | T_MULT\n              | T_LCURLY ->\n                with_specifiers ImportTypeof env leading\n              | _ -> with_default ImportTypeof env leading\n            end\n          (* import Foo from \"ModuleName\"; *)\n          | _ -> with_default ImportValue env leading\n      )\n    )\nend\n"
  },
  {
    "path": "analysis/vendor/js_parser/token.ml",
    "content": "(*\n * Copyright (c) Meta Platforms, Inc. and affiliates.\n *\n * This source code is licensed under the MIT license found in the\n * LICENSE file in the root directory of this source tree.\n *)\nopen Primitive_deriving\n\ntype t =\n  | T_NUMBER of {\n      kind: number_type;\n      raw: string;\n    }\n  | T_BIGINT of {\n      kind: bigint_type;\n      raw: string;\n    }\n  | T_STRING of (Loc.t * string * string * bool) (* loc, value, raw, octal *)\n  | T_TEMPLATE_PART of (Loc.t * template_part * bool) (* loc, value, is_tail *)\n  | T_IDENTIFIER of {\n      loc: Loc.t;\n      value: string;\n      raw: string;\n    }\n  | T_REGEXP of Loc.t * string * string (* /pattern/flags *)\n  (* Syntax *)\n  | T_LCURLY\n  | T_RCURLY\n  | T_LCURLYBAR\n  | T_RCURLYBAR\n  | T_LPAREN\n  | T_RPAREN\n  | T_LBRACKET\n  | T_RBRACKET\n  | T_SEMICOLON\n  | T_COMMA\n  | T_PERIOD\n  | T_ARROW\n  | T_ELLIPSIS\n  | T_AT\n  | T_POUND\n  (* Keywords *)\n  | T_FUNCTION\n  | T_IF\n  | T_IN\n  | T_INSTANCEOF\n  | T_RETURN\n  | T_SWITCH\n  | T_THIS\n  | T_THROW\n  | T_TRY\n  | T_VAR\n  | T_WHILE\n  | T_WITH\n  | T_CONST\n  | T_LET\n  | T_NULL\n  | T_FALSE\n  | T_TRUE\n  | T_BREAK\n  | T_CASE\n  | T_CATCH\n  | T_CONTINUE\n  | T_DEFAULT\n  | T_DO\n  | T_FINALLY\n  | T_FOR\n  | T_CLASS\n  | T_EXTENDS\n  | T_STATIC\n  | T_ELSE\n  | T_NEW\n  | T_DELETE\n  | T_TYPEOF\n  | T_VOID\n  | T_ENUM\n  | T_EXPORT\n  | T_IMPORT\n  | T_SUPER\n  | T_IMPLEMENTS\n  | T_INTERFACE\n  | T_PACKAGE\n  | T_PRIVATE\n  | T_PROTECTED\n  | T_PUBLIC\n  | T_YIELD\n  | T_DEBUGGER\n  | T_DECLARE\n  | T_TYPE\n  | T_OPAQUE\n  | T_OF\n  | T_ASYNC\n  | T_AWAIT\n  | T_CHECKS\n  (* Operators *)\n  | T_RSHIFT3_ASSIGN\n  | T_RSHIFT_ASSIGN\n  | T_LSHIFT_ASSIGN\n  | T_BIT_XOR_ASSIGN\n  | T_BIT_OR_ASSIGN\n  | T_BIT_AND_ASSIGN\n  | T_MOD_ASSIGN\n  | T_DIV_ASSIGN\n  | T_MULT_ASSIGN\n  | T_EXP_ASSIGN\n  | T_MINUS_ASSIGN\n  | T_PLUS_ASSIGN\n  | T_NULLISH_ASSIGN\n  | T_AND_ASSIGN\n  | T_OR_ASSIGN\n  | T_ASSIGN\n  | T_PLING_PERIOD\n  | T_PLING_PLING\n  | T_PLING\n  | T_COLON\n  | T_OR\n  | T_AND\n  | T_BIT_OR\n  | T_BIT_XOR\n  | T_BIT_AND\n  | T_EQUAL\n  | T_NOT_EQUAL\n  | T_STRICT_EQUAL\n  | T_STRICT_NOT_EQUAL\n  | T_LESS_THAN_EQUAL\n  | T_GREATER_THAN_EQUAL\n  | T_LESS_THAN\n  | T_GREATER_THAN\n  | T_LSHIFT\n  | T_RSHIFT\n  | T_RSHIFT3\n  | T_PLUS\n  | T_MINUS\n  | T_DIV\n  | T_MULT\n  | T_EXP\n  | T_MOD\n  | T_NOT\n  | T_BIT_NOT\n  | T_INCR\n  | T_DECR\n  (* Extra tokens *)\n  | T_ERROR of string\n  | T_EOF\n  (* JSX *)\n  | T_JSX_IDENTIFIER of {\n      raw: string;\n      loc: Loc.t;\n    }\n  | T_JSX_TEXT of Loc.t * string * string (* loc, value, raw *)\n  (* Type primitives *)\n  | T_ANY_TYPE\n  | T_MIXED_TYPE\n  | T_EMPTY_TYPE\n  | T_BOOLEAN_TYPE of bool_or_boolean\n  | T_NUMBER_TYPE\n  | T_BIGINT_TYPE\n  | T_NUMBER_SINGLETON_TYPE of {\n      kind: number_type;\n      value: float;\n      raw: string;\n    }\n  | T_BIGINT_SINGLETON_TYPE of {\n      kind: bigint_type;\n      value: int64 option;\n      raw: string;\n    }\n  | T_STRING_TYPE\n  | T_VOID_TYPE\n  | T_SYMBOL_TYPE\n\n(* `bool` and `boolean` are equivalent annotations, but we need to track\n   which one was used for when it might be an identifier, as in\n   `(bool: boolean) => void`. It's lexed as two T_BOOLEAN_TYPEs, then the\n   first one is converted into an identifier. *)\nand bool_or_boolean =\n  | BOOL\n  | BOOLEAN\n\nand number_type =\n  | BINARY\n  | LEGACY_OCTAL\n  | LEGACY_NON_OCTAL (* NonOctalDecimalIntegerLiteral in Annex B *)\n  | OCTAL\n  | NORMAL\n\nand bigint_type =\n  | BIG_BINARY\n  | BIG_OCTAL\n  | BIG_NORMAL\n\nand template_part = {\n  cooked: string;\n  (* string after processing special chars *)\n  raw: string;\n  (* string as specified in source *)\n  literal: string; (* same as raw, plus characters like ` and ${ *)\n}\n[@@deriving_inline equal]\nlet _ = fun (_ : t) -> ()\nlet _ = fun (_ : bool_or_boolean) -> ()\nlet _ = fun (_ : number_type) -> ()\nlet _ = fun (_ : bigint_type) -> ()\nlet _ = fun (_ : template_part) -> ()\nlet rec equal =\n  (fun a__001_ ->\n     fun b__002_ ->\n       if Ppx_compare_lib.phys_equal a__001_ b__002_\n       then true\n       else\n         (match (a__001_, b__002_) with\n          | (T_NUMBER _a__003_, T_NUMBER _b__004_) ->\n              Ppx_compare_lib.(&&)\n                (equal_number_type _a__003_.kind _b__004_.kind)\n                (equal_string _a__003_.raw _b__004_.raw)\n          | (T_NUMBER _, _) -> false\n          | (_, T_NUMBER _) -> false\n          | (T_BIGINT _a__005_, T_BIGINT _b__006_) ->\n              Ppx_compare_lib.(&&)\n                (equal_bigint_type _a__005_.kind _b__006_.kind)\n                (equal_string _a__005_.raw _b__006_.raw)\n          | (T_BIGINT _, _) -> false\n          | (_, T_BIGINT _) -> false\n          | (T_STRING _a__007_, T_STRING _b__008_) ->\n              let (t__009_, t__010_, t__011_, t__012_) = _a__007_ in\n              let (t__013_, t__014_, t__015_, t__016_) = _b__008_ in\n              Ppx_compare_lib.(&&) (Loc.equal t__009_ t__013_)\n                (Ppx_compare_lib.(&&) (equal_string t__010_ t__014_)\n                   (Ppx_compare_lib.(&&) (equal_string t__011_ t__015_)\n                      (equal_bool t__012_ t__016_)))\n          | (T_STRING _, _) -> false\n          | (_, T_STRING _) -> false\n          | (T_TEMPLATE_PART _a__017_, T_TEMPLATE_PART _b__018_) ->\n              let (t__019_, t__020_, t__021_) = _a__017_ in\n              let (t__022_, t__023_, t__024_) = _b__018_ in\n              Ppx_compare_lib.(&&) (Loc.equal t__019_ t__022_)\n                (Ppx_compare_lib.(&&) (equal_template_part t__020_ t__023_)\n                   (equal_bool t__021_ t__024_))\n          | (T_TEMPLATE_PART _, _) -> false\n          | (_, T_TEMPLATE_PART _) -> false\n          | (T_IDENTIFIER _a__025_, T_IDENTIFIER _b__026_) ->\n              Ppx_compare_lib.(&&) (Loc.equal _a__025_.loc _b__026_.loc)\n                (Ppx_compare_lib.(&&)\n                   (equal_string _a__025_.value _b__026_.value)\n                   (equal_string _a__025_.raw _b__026_.raw))\n          | (T_IDENTIFIER _, _) -> false\n          | (_, T_IDENTIFIER _) -> false\n          | (T_REGEXP (_a__027_, _a__029_, _a__031_), T_REGEXP\n             (_b__028_, _b__030_, _b__032_)) ->\n              Ppx_compare_lib.(&&) (Loc.equal _a__027_ _b__028_)\n                (Ppx_compare_lib.(&&) (equal_string _a__029_ _b__030_)\n                   (equal_string _a__031_ _b__032_))\n          | (T_REGEXP _, _) -> false\n          | (_, T_REGEXP _) -> false\n          | (T_LCURLY, T_LCURLY) -> true\n          | (T_LCURLY, _) -> false\n          | (_, T_LCURLY) -> false\n          | (T_RCURLY, T_RCURLY) -> true\n          | (T_RCURLY, _) -> false\n          | (_, T_RCURLY) -> false\n          | (T_LCURLYBAR, T_LCURLYBAR) -> true\n          | (T_LCURLYBAR, _) -> false\n          | (_, T_LCURLYBAR) -> false\n          | (T_RCURLYBAR, T_RCURLYBAR) -> true\n          | (T_RCURLYBAR, _) -> false\n          | (_, T_RCURLYBAR) -> false\n          | (T_LPAREN, T_LPAREN) -> true\n          | (T_LPAREN, _) -> false\n          | (_, T_LPAREN) -> false\n          | (T_RPAREN, T_RPAREN) -> true\n          | (T_RPAREN, _) -> false\n          | (_, T_RPAREN) -> false\n          | (T_LBRACKET, T_LBRACKET) -> true\n          | (T_LBRACKET, _) -> false\n          | (_, T_LBRACKET) -> false\n          | (T_RBRACKET, T_RBRACKET) -> true\n          | (T_RBRACKET, _) -> false\n          | (_, T_RBRACKET) -> false\n          | (T_SEMICOLON, T_SEMICOLON) -> true\n          | (T_SEMICOLON, _) -> false\n          | (_, T_SEMICOLON) -> false\n          | (T_COMMA, T_COMMA) -> true\n          | (T_COMMA, _) -> false\n          | (_, T_COMMA) -> false\n          | (T_PERIOD, T_PERIOD) -> true\n          | (T_PERIOD, _) -> false\n          | (_, T_PERIOD) -> false\n          | (T_ARROW, T_ARROW) -> true\n          | (T_ARROW, _) -> false\n          | (_, T_ARROW) -> false\n          | (T_ELLIPSIS, T_ELLIPSIS) -> true\n          | (T_ELLIPSIS, _) -> false\n          | (_, T_ELLIPSIS) -> false\n          | (T_AT, T_AT) -> true\n          | (T_AT, _) -> false\n          | (_, T_AT) -> false\n          | (T_POUND, T_POUND) -> true\n          | (T_POUND, _) -> false\n          | (_, T_POUND) -> false\n          | (T_FUNCTION, T_FUNCTION) -> true\n          | (T_FUNCTION, _) -> false\n          | (_, T_FUNCTION) -> false\n          | (T_IF, T_IF) -> true\n          | (T_IF, _) -> false\n          | (_, T_IF) -> false\n          | (T_IN, T_IN) -> true\n          | (T_IN, _) -> false\n          | (_, T_IN) -> false\n          | (T_INSTANCEOF, T_INSTANCEOF) -> true\n          | (T_INSTANCEOF, _) -> false\n          | (_, T_INSTANCEOF) -> false\n          | (T_RETURN, T_RETURN) -> true\n          | (T_RETURN, _) -> false\n          | (_, T_RETURN) -> false\n          | (T_SWITCH, T_SWITCH) -> true\n          | (T_SWITCH, _) -> false\n          | (_, T_SWITCH) -> false\n          | (T_THIS, T_THIS) -> true\n          | (T_THIS, _) -> false\n          | (_, T_THIS) -> false\n          | (T_THROW, T_THROW) -> true\n          | (T_THROW, _) -> false\n          | (_, T_THROW) -> false\n          | (T_TRY, T_TRY) -> true\n          | (T_TRY, _) -> false\n          | (_, T_TRY) -> false\n          | (T_VAR, T_VAR) -> true\n          | (T_VAR, _) -> false\n          | (_, T_VAR) -> false\n          | (T_WHILE, T_WHILE) -> true\n          | (T_WHILE, _) -> false\n          | (_, T_WHILE) -> false\n          | (T_WITH, T_WITH) -> true\n          | (T_WITH, _) -> false\n          | (_, T_WITH) -> false\n          | (T_CONST, T_CONST) -> true\n          | (T_CONST, _) -> false\n          | (_, T_CONST) -> false\n          | (T_LET, T_LET) -> true\n          | (T_LET, _) -> false\n          | (_, T_LET) -> false\n          | (T_NULL, T_NULL) -> true\n          | (T_NULL, _) -> false\n          | (_, T_NULL) -> false\n          | (T_FALSE, T_FALSE) -> true\n          | (T_FALSE, _) -> false\n          | (_, T_FALSE) -> false\n          | (T_TRUE, T_TRUE) -> true\n          | (T_TRUE, _) -> false\n          | (_, T_TRUE) -> false\n          | (T_BREAK, T_BREAK) -> true\n          | (T_BREAK, _) -> false\n          | (_, T_BREAK) -> false\n          | (T_CASE, T_CASE) -> true\n          | (T_CASE, _) -> false\n          | (_, T_CASE) -> false\n          | (T_CATCH, T_CATCH) -> true\n          | (T_CATCH, _) -> false\n          | (_, T_CATCH) -> false\n          | (T_CONTINUE, T_CONTINUE) -> true\n          | (T_CONTINUE, _) -> false\n          | (_, T_CONTINUE) -> false\n          | (T_DEFAULT, T_DEFAULT) -> true\n          | (T_DEFAULT, _) -> false\n          | (_, T_DEFAULT) -> false\n          | (T_DO, T_DO) -> true\n          | (T_DO, _) -> false\n          | (_, T_DO) -> false\n          | (T_FINALLY, T_FINALLY) -> true\n          | (T_FINALLY, _) -> false\n          | (_, T_FINALLY) -> false\n          | (T_FOR, T_FOR) -> true\n          | (T_FOR, _) -> false\n          | (_, T_FOR) -> false\n          | (T_CLASS, T_CLASS) -> true\n          | (T_CLASS, _) -> false\n          | (_, T_CLASS) -> false\n          | (T_EXTENDS, T_EXTENDS) -> true\n          | (T_EXTENDS, _) -> false\n          | (_, T_EXTENDS) -> false\n          | (T_STATIC, T_STATIC) -> true\n          | (T_STATIC, _) -> false\n          | (_, T_STATIC) -> false\n          | (T_ELSE, T_ELSE) -> true\n          | (T_ELSE, _) -> false\n          | (_, T_ELSE) -> false\n          | (T_NEW, T_NEW) -> true\n          | (T_NEW, _) -> false\n          | (_, T_NEW) -> false\n          | (T_DELETE, T_DELETE) -> true\n          | (T_DELETE, _) -> false\n          | (_, T_DELETE) -> false\n          | (T_TYPEOF, T_TYPEOF) -> true\n          | (T_TYPEOF, _) -> false\n          | (_, T_TYPEOF) -> false\n          | (T_VOID, T_VOID) -> true\n          | (T_VOID, _) -> false\n          | (_, T_VOID) -> false\n          | (T_ENUM, T_ENUM) -> true\n          | (T_ENUM, _) -> false\n          | (_, T_ENUM) -> false\n          | (T_EXPORT, T_EXPORT) -> true\n          | (T_EXPORT, _) -> false\n          | (_, T_EXPORT) -> false\n          | (T_IMPORT, T_IMPORT) -> true\n          | (T_IMPORT, _) -> false\n          | (_, T_IMPORT) -> false\n          | (T_SUPER, T_SUPER) -> true\n          | (T_SUPER, _) -> false\n          | (_, T_SUPER) -> false\n          | (T_IMPLEMENTS, T_IMPLEMENTS) -> true\n          | (T_IMPLEMENTS, _) -> false\n          | (_, T_IMPLEMENTS) -> false\n          | (T_INTERFACE, T_INTERFACE) -> true\n          | (T_INTERFACE, _) -> false\n          | (_, T_INTERFACE) -> false\n          | (T_PACKAGE, T_PACKAGE) -> true\n          | (T_PACKAGE, _) -> false\n          | (_, T_PACKAGE) -> false\n          | (T_PRIVATE, T_PRIVATE) -> true\n          | (T_PRIVATE, _) -> false\n          | (_, T_PRIVATE) -> false\n          | (T_PROTECTED, T_PROTECTED) -> true\n          | (T_PROTECTED, _) -> false\n          | (_, T_PROTECTED) -> false\n          | (T_PUBLIC, T_PUBLIC) -> true\n          | (T_PUBLIC, _) -> false\n          | (_, T_PUBLIC) -> false\n          | (T_YIELD, T_YIELD) -> true\n          | (T_YIELD, _) -> false\n          | (_, T_YIELD) -> false\n          | (T_DEBUGGER, T_DEBUGGER) -> true\n          | (T_DEBUGGER, _) -> false\n          | (_, T_DEBUGGER) -> false\n          | (T_DECLARE, T_DECLARE) -> true\n          | (T_DECLARE, _) -> false\n          | (_, T_DECLARE) -> false\n          | (T_TYPE, T_TYPE) -> true\n          | (T_TYPE, _) -> false\n          | (_, T_TYPE) -> false\n          | (T_OPAQUE, T_OPAQUE) -> true\n          | (T_OPAQUE, _) -> false\n          | (_, T_OPAQUE) -> false\n          | (T_OF, T_OF) -> true\n          | (T_OF, _) -> false\n          | (_, T_OF) -> false\n          | (T_ASYNC, T_ASYNC) -> true\n          | (T_ASYNC, _) -> false\n          | (_, T_ASYNC) -> false\n          | (T_AWAIT, T_AWAIT) -> true\n          | (T_AWAIT, _) -> false\n          | (_, T_AWAIT) -> false\n          | (T_CHECKS, T_CHECKS) -> true\n          | (T_CHECKS, _) -> false\n          | (_, T_CHECKS) -> false\n          | (T_RSHIFT3_ASSIGN, T_RSHIFT3_ASSIGN) -> true\n          | (T_RSHIFT3_ASSIGN, _) -> false\n          | (_, T_RSHIFT3_ASSIGN) -> false\n          | (T_RSHIFT_ASSIGN, T_RSHIFT_ASSIGN) -> true\n          | (T_RSHIFT_ASSIGN, _) -> false\n          | (_, T_RSHIFT_ASSIGN) -> false\n          | (T_LSHIFT_ASSIGN, T_LSHIFT_ASSIGN) -> true\n          | (T_LSHIFT_ASSIGN, _) -> false\n          | (_, T_LSHIFT_ASSIGN) -> false\n          | (T_BIT_XOR_ASSIGN, T_BIT_XOR_ASSIGN) -> true\n          | (T_BIT_XOR_ASSIGN, _) -> false\n          | (_, T_BIT_XOR_ASSIGN) -> false\n          | (T_BIT_OR_ASSIGN, T_BIT_OR_ASSIGN) -> true\n          | (T_BIT_OR_ASSIGN, _) -> false\n          | (_, T_BIT_OR_ASSIGN) -> false\n          | (T_BIT_AND_ASSIGN, T_BIT_AND_ASSIGN) -> true\n          | (T_BIT_AND_ASSIGN, _) -> false\n          | (_, T_BIT_AND_ASSIGN) -> false\n          | (T_MOD_ASSIGN, T_MOD_ASSIGN) -> true\n          | (T_MOD_ASSIGN, _) -> false\n          | (_, T_MOD_ASSIGN) -> false\n          | (T_DIV_ASSIGN, T_DIV_ASSIGN) -> true\n          | (T_DIV_ASSIGN, _) -> false\n          | (_, T_DIV_ASSIGN) -> false\n          | (T_MULT_ASSIGN, T_MULT_ASSIGN) -> true\n          | (T_MULT_ASSIGN, _) -> false\n          | (_, T_MULT_ASSIGN) -> false\n          | (T_EXP_ASSIGN, T_EXP_ASSIGN) -> true\n          | (T_EXP_ASSIGN, _) -> false\n          | (_, T_EXP_ASSIGN) -> false\n          | (T_MINUS_ASSIGN, T_MINUS_ASSIGN) -> true\n          | (T_MINUS_ASSIGN, _) -> false\n          | (_, T_MINUS_ASSIGN) -> false\n          | (T_PLUS_ASSIGN, T_PLUS_ASSIGN) -> true\n          | (T_PLUS_ASSIGN, _) -> false\n          | (_, T_PLUS_ASSIGN) -> false\n          | (T_NULLISH_ASSIGN, T_NULLISH_ASSIGN) -> true\n          | (T_NULLISH_ASSIGN, _) -> false\n          | (_, T_NULLISH_ASSIGN) -> false\n          | (T_AND_ASSIGN, T_AND_ASSIGN) -> true\n          | (T_AND_ASSIGN, _) -> false\n          | (_, T_AND_ASSIGN) -> false\n          | (T_OR_ASSIGN, T_OR_ASSIGN) -> true\n          | (T_OR_ASSIGN, _) -> false\n          | (_, T_OR_ASSIGN) -> false\n          | (T_ASSIGN, T_ASSIGN) -> true\n          | (T_ASSIGN, _) -> false\n          | (_, T_ASSIGN) -> false\n          | (T_PLING_PERIOD, T_PLING_PERIOD) -> true\n          | (T_PLING_PERIOD, _) -> false\n          | (_, T_PLING_PERIOD) -> false\n          | (T_PLING_PLING, T_PLING_PLING) -> true\n          | (T_PLING_PLING, _) -> false\n          | (_, T_PLING_PLING) -> false\n          | (T_PLING, T_PLING) -> true\n          | (T_PLING, _) -> false\n          | (_, T_PLING) -> false\n          | (T_COLON, T_COLON) -> true\n          | (T_COLON, _) -> false\n          | (_, T_COLON) -> false\n          | (T_OR, T_OR) -> true\n          | (T_OR, _) -> false\n          | (_, T_OR) -> false\n          | (T_AND, T_AND) -> true\n          | (T_AND, _) -> false\n          | (_, T_AND) -> false\n          | (T_BIT_OR, T_BIT_OR) -> true\n          | (T_BIT_OR, _) -> false\n          | (_, T_BIT_OR) -> false\n          | (T_BIT_XOR, T_BIT_XOR) -> true\n          | (T_BIT_XOR, _) -> false\n          | (_, T_BIT_XOR) -> false\n          | (T_BIT_AND, T_BIT_AND) -> true\n          | (T_BIT_AND, _) -> false\n          | (_, T_BIT_AND) -> false\n          | (T_EQUAL, T_EQUAL) -> true\n          | (T_EQUAL, _) -> false\n          | (_, T_EQUAL) -> false\n          | (T_NOT_EQUAL, T_NOT_EQUAL) -> true\n          | (T_NOT_EQUAL, _) -> false\n          | (_, T_NOT_EQUAL) -> false\n          | (T_STRICT_EQUAL, T_STRICT_EQUAL) -> true\n          | (T_STRICT_EQUAL, _) -> false\n          | (_, T_STRICT_EQUAL) -> false\n          | (T_STRICT_NOT_EQUAL, T_STRICT_NOT_EQUAL) -> true\n          | (T_STRICT_NOT_EQUAL, _) -> false\n          | (_, T_STRICT_NOT_EQUAL) -> false\n          | (T_LESS_THAN_EQUAL, T_LESS_THAN_EQUAL) -> true\n          | (T_LESS_THAN_EQUAL, _) -> false\n          | (_, T_LESS_THAN_EQUAL) -> false\n          | (T_GREATER_THAN_EQUAL, T_GREATER_THAN_EQUAL) -> true\n          | (T_GREATER_THAN_EQUAL, _) -> false\n          | (_, T_GREATER_THAN_EQUAL) -> false\n          | (T_LESS_THAN, T_LESS_THAN) -> true\n          | (T_LESS_THAN, _) -> false\n          | (_, T_LESS_THAN) -> false\n          | (T_GREATER_THAN, T_GREATER_THAN) -> true\n          | (T_GREATER_THAN, _) -> false\n          | (_, T_GREATER_THAN) -> false\n          | (T_LSHIFT, T_LSHIFT) -> true\n          | (T_LSHIFT, _) -> false\n          | (_, T_LSHIFT) -> false\n          | (T_RSHIFT, T_RSHIFT) -> true\n          | (T_RSHIFT, _) -> false\n          | (_, T_RSHIFT) -> false\n          | (T_RSHIFT3, T_RSHIFT3) -> true\n          | (T_RSHIFT3, _) -> false\n          | (_, T_RSHIFT3) -> false\n          | (T_PLUS, T_PLUS) -> true\n          | (T_PLUS, _) -> false\n          | (_, T_PLUS) -> false\n          | (T_MINUS, T_MINUS) -> true\n          | (T_MINUS, _) -> false\n          | (_, T_MINUS) -> false\n          | (T_DIV, T_DIV) -> true\n          | (T_DIV, _) -> false\n          | (_, T_DIV) -> false\n          | (T_MULT, T_MULT) -> true\n          | (T_MULT, _) -> false\n          | (_, T_MULT) -> false\n          | (T_EXP, T_EXP) -> true\n          | (T_EXP, _) -> false\n          | (_, T_EXP) -> false\n          | (T_MOD, T_MOD) -> true\n          | (T_MOD, _) -> false\n          | (_, T_MOD) -> false\n          | (T_NOT, T_NOT) -> true\n          | (T_NOT, _) -> false\n          | (_, T_NOT) -> false\n          | (T_BIT_NOT, T_BIT_NOT) -> true\n          | (T_BIT_NOT, _) -> false\n          | (_, T_BIT_NOT) -> false\n          | (T_INCR, T_INCR) -> true\n          | (T_INCR, _) -> false\n          | (_, T_INCR) -> false\n          | (T_DECR, T_DECR) -> true\n          | (T_DECR, _) -> false\n          | (_, T_DECR) -> false\n          | (T_ERROR _a__033_, T_ERROR _b__034_) ->\n              equal_string _a__033_ _b__034_\n          | (T_ERROR _, _) -> false\n          | (_, T_ERROR _) -> false\n          | (T_EOF, T_EOF) -> true\n          | (T_EOF, _) -> false\n          | (_, T_EOF) -> false\n          | (T_JSX_IDENTIFIER _a__035_, T_JSX_IDENTIFIER _b__036_) ->\n              Ppx_compare_lib.(&&) (equal_string _a__035_.raw _b__036_.raw)\n                (Loc.equal _a__035_.loc _b__036_.loc)\n          | (T_JSX_IDENTIFIER _, _) -> false\n          | (_, T_JSX_IDENTIFIER _) -> false\n          | (T_JSX_TEXT (_a__037_, _a__039_, _a__041_), T_JSX_TEXT\n             (_b__038_, _b__040_, _b__042_)) ->\n              Ppx_compare_lib.(&&) (Loc.equal _a__037_ _b__038_)\n                (Ppx_compare_lib.(&&) (equal_string _a__039_ _b__040_)\n                   (equal_string _a__041_ _b__042_))\n          | (T_JSX_TEXT _, _) -> false\n          | (_, T_JSX_TEXT _) -> false\n          | (T_ANY_TYPE, T_ANY_TYPE) -> true\n          | (T_ANY_TYPE, _) -> false\n          | (_, T_ANY_TYPE) -> false\n          | (T_MIXED_TYPE, T_MIXED_TYPE) -> true\n          | (T_MIXED_TYPE, _) -> false\n          | (_, T_MIXED_TYPE) -> false\n          | (T_EMPTY_TYPE, T_EMPTY_TYPE) -> true\n          | (T_EMPTY_TYPE, _) -> false\n          | (_, T_EMPTY_TYPE) -> false\n          | (T_BOOLEAN_TYPE _a__043_, T_BOOLEAN_TYPE _b__044_) ->\n              equal_bool_or_boolean _a__043_ _b__044_\n          | (T_BOOLEAN_TYPE _, _) -> false\n          | (_, T_BOOLEAN_TYPE _) -> false\n          | (T_NUMBER_TYPE, T_NUMBER_TYPE) -> true\n          | (T_NUMBER_TYPE, _) -> false\n          | (_, T_NUMBER_TYPE) -> false\n          | (T_BIGINT_TYPE, T_BIGINT_TYPE) -> true\n          | (T_BIGINT_TYPE, _) -> false\n          | (_, T_BIGINT_TYPE) -> false\n          | (T_NUMBER_SINGLETON_TYPE _a__045_, T_NUMBER_SINGLETON_TYPE\n             _b__046_) ->\n              Ppx_compare_lib.(&&)\n                (equal_number_type _a__045_.kind _b__046_.kind)\n                (Ppx_compare_lib.(&&)\n                   (equal_float _a__045_.value _b__046_.value)\n                   (equal_string _a__045_.raw _b__046_.raw))\n          | (T_NUMBER_SINGLETON_TYPE _, _) -> false\n          | (_, T_NUMBER_SINGLETON_TYPE _) -> false\n          | (T_BIGINT_SINGLETON_TYPE _a__047_, T_BIGINT_SINGLETON_TYPE\n             _b__048_) ->\n              Ppx_compare_lib.(&&)\n                (equal_bigint_type _a__047_.kind _b__048_.kind)\n                (Ppx_compare_lib.(&&)\n                   (equal_option equal_int64 _a__047_.value _b__048_.value)\n                   (equal_string _a__047_.raw _b__048_.raw))\n          | (T_BIGINT_SINGLETON_TYPE _, _) -> false\n          | (_, T_BIGINT_SINGLETON_TYPE _) -> false\n          | (T_STRING_TYPE, T_STRING_TYPE) -> true\n          | (T_STRING_TYPE, _) -> false\n          | (_, T_STRING_TYPE) -> false\n          | (T_VOID_TYPE, T_VOID_TYPE) -> true\n          | (T_VOID_TYPE, _) -> false\n          | (_, T_VOID_TYPE) -> false\n          | (T_SYMBOL_TYPE, T_SYMBOL_TYPE) -> true) : t -> t -> bool)\nand equal_bool_or_boolean =\n  (fun a__051_ ->\n     fun b__052_ -> Ppx_compare_lib.polymorphic_equal a__051_ b__052_ :\n  bool_or_boolean -> bool_or_boolean -> bool)\nand equal_number_type =\n  (fun a__053_ ->\n     fun b__054_ -> Ppx_compare_lib.polymorphic_equal a__053_ b__054_ :\n  number_type -> number_type -> bool)\nand equal_bigint_type =\n  (fun a__055_ ->\n     fun b__056_ -> Ppx_compare_lib.polymorphic_equal a__055_ b__056_ :\n  bigint_type -> bigint_type -> bool)\nand equal_template_part =\n  (fun a__057_ ->\n     fun b__058_ ->\n       if Ppx_compare_lib.phys_equal a__057_ b__058_\n       then true\n       else\n         Ppx_compare_lib.(&&) (equal_string a__057_.cooked b__058_.cooked)\n           (Ppx_compare_lib.(&&) (equal_string a__057_.raw b__058_.raw)\n              (equal_string a__057_.literal b__058_.literal)) : template_part\n                                                                  ->\n                                                                  template_part\n                                                                    ->\n                                                                    bool)\nlet _ = equal\nand _ = equal_bool_or_boolean\nand _ = equal_number_type\nand _ = equal_bigint_type\nand _ = equal_template_part\n[@@@end]\n(*****************************************************************************)\n(* Pretty printer (pretty?) *)\n(*****************************************************************************)\nlet token_to_string = function\n  | T_NUMBER _ -> \"T_NUMBER\"\n  | T_BIGINT _ -> \"T_BIGINT\"\n  | T_STRING _ -> \"T_STRING\"\n  | T_TEMPLATE_PART _ -> \"T_TEMPLATE_PART\"\n  | T_IDENTIFIER _ -> \"T_IDENTIFIER\"\n  | T_REGEXP _ -> \"T_REGEXP\"\n  | T_FUNCTION -> \"T_FUNCTION\"\n  | T_IF -> \"T_IF\"\n  | T_IN -> \"T_IN\"\n  | T_INSTANCEOF -> \"T_INSTANCEOF\"\n  | T_RETURN -> \"T_RETURN\"\n  | T_SWITCH -> \"T_SWITCH\"\n  | T_THIS -> \"T_THIS\"\n  | T_THROW -> \"T_THROW\"\n  | T_TRY -> \"T_TRY\"\n  | T_VAR -> \"T_VAR\"\n  | T_WHILE -> \"T_WHILE\"\n  | T_WITH -> \"T_WITH\"\n  | T_CONST -> \"T_CONST\"\n  | T_LET -> \"T_LET\"\n  | T_NULL -> \"T_NULL\"\n  | T_FALSE -> \"T_FALSE\"\n  | T_TRUE -> \"T_TRUE\"\n  | T_BREAK -> \"T_BREAK\"\n  | T_CASE -> \"T_CASE\"\n  | T_CATCH -> \"T_CATCH\"\n  | T_CONTINUE -> \"T_CONTINUE\"\n  | T_DEFAULT -> \"T_DEFAULT\"\n  | T_DO -> \"T_DO\"\n  | T_FINALLY -> \"T_FINALLY\"\n  | T_FOR -> \"T_FOR\"\n  | T_CLASS -> \"T_CLASS\"\n  | T_EXTENDS -> \"T_EXTENDS\"\n  | T_STATIC -> \"T_STATIC\"\n  | T_ELSE -> \"T_ELSE\"\n  | T_NEW -> \"T_NEW\"\n  | T_DELETE -> \"T_DELETE\"\n  | T_TYPEOF -> \"T_TYPEOF\"\n  | T_VOID -> \"T_VOID\"\n  | T_ENUM -> \"T_ENUM\"\n  | T_EXPORT -> \"T_EXPORT\"\n  | T_IMPORT -> \"T_IMPORT\"\n  | T_SUPER -> \"T_SUPER\"\n  | T_IMPLEMENTS -> \"T_IMPLEMENTS\"\n  | T_INTERFACE -> \"T_INTERFACE\"\n  | T_PACKAGE -> \"T_PACKAGE\"\n  | T_PRIVATE -> \"T_PRIVATE\"\n  | T_PROTECTED -> \"T_PROTECTED\"\n  | T_PUBLIC -> \"T_PUBLIC\"\n  | T_YIELD -> \"T_YIELD\"\n  | T_DEBUGGER -> \"T_DEBUGGER\"\n  | T_DECLARE -> \"T_DECLARE\"\n  | T_TYPE -> \"T_TYPE\"\n  | T_OPAQUE -> \"T_OPAQUE\"\n  | T_OF -> \"T_OF\"\n  | T_ASYNC -> \"T_ASYNC\"\n  | T_AWAIT -> \"T_AWAIT\"\n  | T_CHECKS -> \"T_CHECKS\"\n  | T_LCURLY -> \"T_LCURLY\"\n  | T_RCURLY -> \"T_RCURLY\"\n  | T_LCURLYBAR -> \"T_LCURLYBAR\"\n  | T_RCURLYBAR -> \"T_RCURLYBAR\"\n  | T_LPAREN -> \"T_LPAREN\"\n  | T_RPAREN -> \"T_RPAREN\"\n  | T_LBRACKET -> \"T_LBRACKET\"\n  | T_RBRACKET -> \"T_RBRACKET\"\n  | T_SEMICOLON -> \"T_SEMICOLON\"\n  | T_COMMA -> \"T_COMMA\"\n  | T_PERIOD -> \"T_PERIOD\"\n  | T_ARROW -> \"T_ARROW\"\n  | T_ELLIPSIS -> \"T_ELLIPSIS\"\n  | T_AT -> \"T_AT\"\n  | T_POUND -> \"T_POUND\"\n  | T_RSHIFT3_ASSIGN -> \"T_RSHIFT3_ASSIGN\"\n  | T_RSHIFT_ASSIGN -> \"T_RSHIFT_ASSIGN\"\n  | T_LSHIFT_ASSIGN -> \"T_LSHIFT_ASSIGN\"\n  | T_BIT_XOR_ASSIGN -> \"T_BIT_XOR_ASSIGN\"\n  | T_BIT_OR_ASSIGN -> \"T_BIT_OR_ASSIGN\"\n  | T_BIT_AND_ASSIGN -> \"T_BIT_AND_ASSIGN\"\n  | T_MOD_ASSIGN -> \"T_MOD_ASSIGN\"\n  | T_DIV_ASSIGN -> \"T_DIV_ASSIGN\"\n  | T_MULT_ASSIGN -> \"T_MULT_ASSIGN\"\n  | T_EXP_ASSIGN -> \"T_EXP_ASSIGN\"\n  | T_MINUS_ASSIGN -> \"T_MINUS_ASSIGN\"\n  | T_PLUS_ASSIGN -> \"T_PLUS_ASSIGN\"\n  | T_NULLISH_ASSIGN -> \"T_NULLISH_ASSIGN\"\n  | T_AND_ASSIGN -> \"T_AND_ASSIGN\"\n  | T_OR_ASSIGN -> \"T_OR_ASSIGN\"\n  | T_ASSIGN -> \"T_ASSIGN\"\n  | T_PLING_PERIOD -> \"T_PLING_PERIOD\"\n  | T_PLING_PLING -> \"T_PLING_PLING\"\n  | T_PLING -> \"T_PLING\"\n  | T_COLON -> \"T_COLON\"\n  | T_OR -> \"T_OR\"\n  | T_AND -> \"T_AND\"\n  | T_BIT_OR -> \"T_BIT_OR\"\n  | T_BIT_XOR -> \"T_BIT_XOR\"\n  | T_BIT_AND -> \"T_BIT_AND\"\n  | T_EQUAL -> \"T_EQUAL\"\n  | T_NOT_EQUAL -> \"T_NOT_EQUAL\"\n  | T_STRICT_EQUAL -> \"T_STRICT_EQUAL\"\n  | T_STRICT_NOT_EQUAL -> \"T_STRICT_NOT_EQUAL\"\n  | T_LESS_THAN_EQUAL -> \"T_LESS_THAN_EQUAL\"\n  | T_GREATER_THAN_EQUAL -> \"T_GREATER_THAN_EQUAL\"\n  | T_LESS_THAN -> \"T_LESS_THAN\"\n  | T_GREATER_THAN -> \"T_GREATER_THAN\"\n  | T_LSHIFT -> \"T_LSHIFT\"\n  | T_RSHIFT -> \"T_RSHIFT\"\n  | T_RSHIFT3 -> \"T_RSHIFT3\"\n  | T_PLUS -> \"T_PLUS\"\n  | T_MINUS -> \"T_MINUS\"\n  | T_DIV -> \"T_DIV\"\n  | T_MULT -> \"T_MULT\"\n  | T_EXP -> \"T_EXP\"\n  | T_MOD -> \"T_MOD\"\n  | T_NOT -> \"T_NOT\"\n  | T_BIT_NOT -> \"T_BIT_NOT\"\n  | T_INCR -> \"T_INCR\"\n  | T_DECR -> \"T_DECR\"\n  (* Extra tokens *)\n  | T_ERROR _ -> \"T_ERROR\"\n  | T_EOF -> \"T_EOF\"\n  | T_JSX_IDENTIFIER _ -> \"T_JSX_IDENTIFIER\"\n  | T_JSX_TEXT _ -> \"T_JSX_TEXT\"\n  (* Type primitives *)\n  | T_ANY_TYPE -> \"T_ANY_TYPE\"\n  | T_MIXED_TYPE -> \"T_MIXED_TYPE\"\n  | T_EMPTY_TYPE -> \"T_EMPTY_TYPE\"\n  | T_BOOLEAN_TYPE _ -> \"T_BOOLEAN_TYPE\"\n  | T_NUMBER_TYPE -> \"T_NUMBER_TYPE\"\n  | T_BIGINT_TYPE -> \"T_BIGINT_TYPE\"\n  | T_NUMBER_SINGLETON_TYPE _ -> \"T_NUMBER_SINGLETON_TYPE\"\n  | T_BIGINT_SINGLETON_TYPE _ -> \"T_BIGINT_SINGLETON_TYPE\"\n  | T_STRING_TYPE -> \"T_STRING_TYPE\"\n  | T_VOID_TYPE -> \"T_VOID_TYPE\"\n  | T_SYMBOL_TYPE -> \"T_SYMBOL_TYPE\"\n\nlet value_of_token = function\n  | T_NUMBER { raw; _ } -> raw\n  | T_BIGINT { raw; _ } -> raw\n  | T_STRING (_, _, raw, _) -> raw\n  | T_TEMPLATE_PART (_, { literal; _ }, _) -> literal\n  | T_IDENTIFIER { raw; _ } -> raw\n  | T_REGEXP (_, pattern, flags) -> \"/\" ^ pattern ^ \"/\" ^ flags\n  | T_LCURLY -> \"{\"\n  | T_RCURLY -> \"}\"\n  | T_LCURLYBAR -> \"{|\"\n  | T_RCURLYBAR -> \"|}\"\n  | T_LPAREN -> \"(\"\n  | T_RPAREN -> \")\"\n  | T_LBRACKET -> \"[\"\n  | T_RBRACKET -> \"]\"\n  | T_SEMICOLON -> \";\"\n  | T_COMMA -> \",\"\n  | T_PERIOD -> \".\"\n  | T_ARROW -> \"=>\"\n  | T_ELLIPSIS -> \"...\"\n  | T_AT -> \"@\"\n  | T_POUND -> \"#\"\n  | T_FUNCTION -> \"function\"\n  | T_IF -> \"if\"\n  | T_IN -> \"in\"\n  | T_INSTANCEOF -> \"instanceof\"\n  | T_RETURN -> \"return\"\n  | T_SWITCH -> \"switch\"\n  | T_THIS -> \"this\"\n  | T_THROW -> \"throw\"\n  | T_TRY -> \"try\"\n  | T_VAR -> \"var\"\n  | T_WHILE -> \"while\"\n  | T_WITH -> \"with\"\n  | T_CONST -> \"const\"\n  | T_LET -> \"let\"\n  | T_NULL -> \"null\"\n  | T_FALSE -> \"false\"\n  | T_TRUE -> \"true\"\n  | T_BREAK -> \"break\"\n  | T_CASE -> \"case\"\n  | T_CATCH -> \"catch\"\n  | T_CONTINUE -> \"continue\"\n  | T_DEFAULT -> \"default\"\n  | T_DO -> \"do\"\n  | T_FINALLY -> \"finally\"\n  | T_FOR -> \"for\"\n  | T_CLASS -> \"class\"\n  | T_EXTENDS -> \"extends\"\n  | T_STATIC -> \"static\"\n  | T_ELSE -> \"else\"\n  | T_NEW -> \"new\"\n  | T_DELETE -> \"delete\"\n  | T_TYPEOF -> \"typeof\"\n  | T_VOID -> \"void\"\n  | T_ENUM -> \"enum\"\n  | T_EXPORT -> \"export\"\n  | T_IMPORT -> \"import\"\n  | T_SUPER -> \"super\"\n  | T_IMPLEMENTS -> \"implements\"\n  | T_INTERFACE -> \"interface\"\n  | T_PACKAGE -> \"package\"\n  | T_PRIVATE -> \"private\"\n  | T_PROTECTED -> \"protected\"\n  | T_PUBLIC -> \"public\"\n  | T_YIELD -> \"yield\"\n  | T_DEBUGGER -> \"debugger\"\n  | T_DECLARE -> \"declare\"\n  | T_TYPE -> \"type\"\n  | T_OPAQUE -> \"opaque\"\n  | T_OF -> \"of\"\n  | T_ASYNC -> \"async\"\n  | T_AWAIT -> \"await\"\n  | T_CHECKS -> \"%checks\"\n  | T_RSHIFT3_ASSIGN -> \">>>=\"\n  | T_RSHIFT_ASSIGN -> \">>=\"\n  | T_LSHIFT_ASSIGN -> \"<<=\"\n  | T_BIT_XOR_ASSIGN -> \"^=\"\n  | T_BIT_OR_ASSIGN -> \"|=\"\n  | T_BIT_AND_ASSIGN -> \"&=\"\n  | T_MOD_ASSIGN -> \"%=\"\n  | T_DIV_ASSIGN -> \"/=\"\n  | T_MULT_ASSIGN -> \"*=\"\n  | T_EXP_ASSIGN -> \"**=\"\n  | T_MINUS_ASSIGN -> \"-=\"\n  | T_PLUS_ASSIGN -> \"+=\"\n  | T_NULLISH_ASSIGN -> \"??=\"\n  | T_AND_ASSIGN -> \"&&=\"\n  | T_OR_ASSIGN -> \"||=\"\n  | T_ASSIGN -> \"=\"\n  | T_PLING_PERIOD -> \"?.\"\n  | T_PLING_PLING -> \"??\"\n  | T_PLING -> \"?\"\n  | T_COLON -> \":\"\n  | T_OR -> \"||\"\n  | T_AND -> \"&&\"\n  | T_BIT_OR -> \"|\"\n  | T_BIT_XOR -> \"^\"\n  | T_BIT_AND -> \"&\"\n  | T_EQUAL -> \"==\"\n  | T_NOT_EQUAL -> \"!=\"\n  | T_STRICT_EQUAL -> \"===\"\n  | T_STRICT_NOT_EQUAL -> \"!==\"\n  | T_LESS_THAN_EQUAL -> \"<=\"\n  | T_GREATER_THAN_EQUAL -> \">=\"\n  | T_LESS_THAN -> \"<\"\n  | T_GREATER_THAN -> \">\"\n  | T_LSHIFT -> \"<<\"\n  | T_RSHIFT -> \">>\"\n  | T_RSHIFT3 -> \">>>\"\n  | T_PLUS -> \"+\"\n  | T_MINUS -> \"-\"\n  | T_DIV -> \"/\"\n  | T_MULT -> \"*\"\n  | T_EXP -> \"**\"\n  | T_MOD -> \"%\"\n  | T_NOT -> \"!\"\n  | T_BIT_NOT -> \"~\"\n  | T_INCR -> \"++\"\n  | T_DECR -> \"--\"\n  (* Extra tokens *)\n  | T_ERROR raw -> raw\n  | T_EOF -> \"\"\n  | T_JSX_IDENTIFIER { raw; _ } -> raw\n  | T_JSX_TEXT (_, _, raw) -> raw\n  (* Type primitives *)\n  | T_ANY_TYPE -> \"any\"\n  | T_MIXED_TYPE -> \"mixed\"\n  | T_EMPTY_TYPE -> \"empty\"\n  | T_BOOLEAN_TYPE kind -> begin\n    match kind with\n    | BOOL -> \"bool\"\n    | BOOLEAN -> \"boolean\"\n  end\n  | T_NUMBER_TYPE -> \"number\"\n  | T_BIGINT_TYPE -> \"bigint\"\n  | T_NUMBER_SINGLETON_TYPE { raw; _ } -> raw\n  | T_BIGINT_SINGLETON_TYPE { raw; _ } -> raw\n  | T_STRING_TYPE -> \"string\"\n  | T_VOID_TYPE -> \"void\"\n  | T_SYMBOL_TYPE -> \"symbol\"\n\nlet quote_token_value value = Printf.sprintf \"token `%s`\" value\n\nlet explanation_of_token ?(use_article = false) token =\n  let (value, article) =\n    match token with\n    | T_NUMBER_SINGLETON_TYPE _\n    | T_NUMBER _ ->\n      (\"number\", \"a\")\n    | T_BIGINT_SINGLETON_TYPE _\n    | T_BIGINT _ ->\n      (\"bigint\", \"a\")\n    | T_JSX_TEXT _\n    | T_STRING _ ->\n      (\"string\", \"a\")\n    | T_TEMPLATE_PART _ -> (\"template literal part\", \"a\")\n    | T_JSX_IDENTIFIER _\n    | T_IDENTIFIER _ ->\n      (\"identifier\", \"an\")\n    | T_REGEXP _ -> (\"regexp\", \"a\")\n    | T_EOF -> (\"end of input\", \"the\")\n    | _ -> (quote_token_value (value_of_token token), \"the\")\n  in\n  if use_article then\n    article ^ \" \" ^ value\n  else\n    value\n"
  },
  {
    "path": "analysis/vendor/js_parser/type_parser.ml",
    "content": "(*\n * Copyright (c) Meta Platforms, Inc. and affiliates.\n *\n * This source code is licensed under the MIT license found in the\n * LICENSE file in the root directory of this source tree.\n *)\n\nmodule Ast = Flow_ast\nopen Token\nopen Parser_env\nopen Flow_ast\nopen Parser_common\nopen Comment_attachment\n\nmodule type TYPE = sig\n  val _type : env -> (Loc.t, Loc.t) Ast.Type.t\n\n  val type_identifier : env -> (Loc.t, Loc.t) Ast.Identifier.t\n\n  val type_params : env -> (Loc.t, Loc.t) Ast.Type.TypeParams.t option\n\n  val type_args : env -> (Loc.t, Loc.t) Ast.Type.TypeArgs.t option\n\n  val generic : env -> Loc.t * (Loc.t, Loc.t) Ast.Type.Generic.t\n\n  val _object : is_class:bool -> env -> Loc.t * (Loc.t, Loc.t) Type.Object.t\n\n  val interface_helper :\n    env ->\n    (Loc.t * (Loc.t, Loc.t) Ast.Type.Generic.t) list * (Loc.t * (Loc.t, Loc.t) Ast.Type.Object.t)\n\n  val function_param_list : env -> (Loc.t, Loc.t) Type.Function.Params.t\n\n  val annotation : env -> (Loc.t, Loc.t) Ast.Type.annotation\n\n  val annotation_opt : env -> (Loc.t, Loc.t) Ast.Type.annotation_or_hint\n\n  val predicate_opt : env -> (Loc.t, Loc.t) Ast.Type.Predicate.t option\n\n  val annotation_and_predicate_opt :\n    env -> (Loc.t, Loc.t) Ast.Type.annotation_or_hint * (Loc.t, Loc.t) Ast.Type.Predicate.t option\nend\n\nmodule Type (Parse : Parser_common.PARSER) : TYPE = struct\n  type param_list_or_type =\n    | ParamList of (Loc.t, Loc.t) Type.Function.Params.t'\n    | Type of (Loc.t, Loc.t) Type.t\n\n  let maybe_variance env =\n    let loc = Peek.loc env in\n    match Peek.token env with\n    | T_PLUS ->\n      let leading = Peek.comments env in\n      Eat.token env;\n      Some\n        ( loc,\n          { Variance.kind = Variance.Plus; comments = Flow_ast_utils.mk_comments_opt ~leading () }\n        )\n    | T_MINUS ->\n      let leading = Peek.comments env in\n      Eat.token env;\n      Some\n        ( loc,\n          { Variance.kind = Variance.Minus; comments = Flow_ast_utils.mk_comments_opt ~leading () }\n        )\n    | _ -> None\n\n  let rec _type env = union env\n\n  and annotation env =\n    if not (should_parse_types env) then error env Parse_error.UnexpectedTypeAnnotation;\n    with_loc\n      (fun env ->\n        Expect.token env T_COLON;\n        _type env)\n      env\n\n  and union env =\n    let leading =\n      if Peek.token env = T_BIT_OR then (\n        let leading = Peek.comments env in\n        Eat.token env;\n        leading\n      ) else\n        []\n    in\n    let left = intersection env in\n    union_with env ~leading left\n\n  and union_with =\n    let rec unions leading acc env =\n      match Peek.token env with\n      | T_BIT_OR ->\n        Expect.token env T_BIT_OR;\n        unions leading (intersection env :: acc) env\n      | _ ->\n        (match List.rev acc with\n        | t0 :: t1 :: ts ->\n          Type.Union\n            {\n              Type.Union.types = (t0, t1, ts);\n              comments = Flow_ast_utils.mk_comments_opt ~leading ();\n            }\n        | _ -> assert false)\n    in\n    fun env ?(leading = []) left ->\n      if Peek.token env = T_BIT_OR then\n        with_loc ~start_loc:(fst left) (unions leading [left]) env\n      else\n        left\n\n  and intersection env =\n    let leading =\n      if Peek.token env = T_BIT_AND then (\n        let leading = Peek.comments env in\n        Eat.token env;\n        leading\n      ) else\n        []\n    in\n    let left = anon_function_without_parens env in\n    intersection_with env ~leading left\n\n  and intersection_with =\n    let rec intersections leading acc env =\n      match Peek.token env with\n      | T_BIT_AND ->\n        Expect.token env T_BIT_AND;\n        intersections leading (anon_function_without_parens env :: acc) env\n      | _ ->\n        (match List.rev acc with\n        | t0 :: t1 :: ts ->\n          Type.Intersection\n            {\n              Type.Intersection.types = (t0, t1, ts);\n              comments = Flow_ast_utils.mk_comments_opt ~leading ();\n            }\n        | _ -> assert false)\n    in\n    fun env ?(leading = []) left ->\n      if Peek.token env = T_BIT_AND then\n        with_loc ~start_loc:(fst left) (intersections leading [left]) env\n      else\n        left\n\n  and anon_function_without_parens env =\n    let param = prefix env in\n    anon_function_without_parens_with env param\n\n  and anon_function_without_parens_with env param =\n    match Peek.token env with\n    | T_ARROW when not (no_anon_function_type env) ->\n      let (start_loc, tparams, params) =\n        let param = anonymous_function_param env param in\n        ( fst param,\n          None,\n          ( fst param,\n            {\n              Ast.Type.Function.Params.params = [param];\n              this_ = None;\n              rest = None;\n              comments = None;\n            }\n          )\n        )\n      in\n      function_with_params env start_loc tparams params\n    | _ -> param\n\n  and prefix env =\n    match Peek.token env with\n    | T_PLING ->\n      with_loc\n        (fun env ->\n          let leading = Peek.comments env in\n          Expect.token env T_PLING;\n          Type.Nullable\n            {\n              Type.Nullable.argument = prefix env;\n              comments = Flow_ast_utils.mk_comments_opt ~leading ();\n            })\n        env\n    | _ -> postfix env\n\n  and postfix env =\n    let t = primary env in\n    postfix_with env t\n\n  and postfix_with ?(in_optional_indexed_access = false) env t =\n    if Peek.is_line_terminator env then\n      t\n    else\n      match Peek.token env with\n      | T_PLING_PERIOD ->\n        Eat.token env;\n        if Peek.token env <> T_LBRACKET then error env Parse_error.InvalidOptionalIndexedAccess;\n        Expect.token env T_LBRACKET;\n        postfix_brackets ~in_optional_indexed_access:true ~optional_indexed_access:true env t\n      | T_LBRACKET ->\n        Eat.token env;\n        postfix_brackets ~in_optional_indexed_access ~optional_indexed_access:false env t\n      | T_PERIOD ->\n        (match Peek.ith_token ~i:1 env with\n        | T_LBRACKET ->\n          error env (Parse_error.InvalidIndexedAccess { has_bracket = true });\n          Expect.token env T_PERIOD;\n          Expect.token env T_LBRACKET;\n          postfix_brackets ~in_optional_indexed_access ~optional_indexed_access:false env t\n        | _ ->\n          error env (Parse_error.InvalidIndexedAccess { has_bracket = false });\n          t)\n      | _ -> t\n\n  and postfix_brackets ~in_optional_indexed_access ~optional_indexed_access env t =\n    let t =\n      with_loc\n        ~start_loc:(fst t)\n        (fun env ->\n          (* Legacy Array syntax `Foo[]` *)\n          if (not optional_indexed_access) && Eat.maybe env T_RBRACKET then\n            let trailing = Eat.trailing_comments env in\n            Type.Array\n              { Type.Array.argument = t; comments = Flow_ast_utils.mk_comments_opt ~trailing () }\n          else\n            let index = _type env in\n            Expect.token env T_RBRACKET;\n            let trailing = Eat.trailing_comments env in\n            let indexed_access =\n              {\n                Type.IndexedAccess._object = t;\n                index;\n                comments = Flow_ast_utils.mk_comments_opt ~trailing ();\n              }\n            in\n            if in_optional_indexed_access then\n              Type.OptionalIndexedAccess\n                { Type.OptionalIndexedAccess.indexed_access; optional = optional_indexed_access }\n            else\n              Type.IndexedAccess indexed_access)\n        env\n    in\n    postfix_with env ~in_optional_indexed_access t\n\n  and typeof_expr env = raw_typeof_expr_with_identifier env (Parse.identifier env)\n\n  and raw_typeof_expr_with_identifier =\n    let rec identifier env (q_loc, qualification) =\n      if Peek.token env = T_PERIOD && Peek.ith_is_identifier ~i:1 env then\n        let (loc, q) =\n          with_loc\n            ~start_loc:q_loc\n            (fun env ->\n              Expect.token env T_PERIOD;\n              let id = identifier_name env in\n              { Type.Typeof.Target.qualification; id })\n            env\n        in\n        let qualification = Type.Typeof.Target.Qualified (loc, q) in\n        identifier env (loc, qualification)\n      else\n        qualification\n    in\n    fun env ((loc, _) as id) ->\n      let id = Type.Typeof.Target.Unqualified id in\n      identifier env (loc, id)\n\n  and typeof_arg env =\n    match Peek.token env with\n    | T_LPAREN ->\n      Eat.token env;\n      let typeof = typeof_arg env in\n      Expect.token env T_RPAREN;\n      typeof\n    | T_IDENTIFIER _ (* `static` is reserved in strict mode, but still an identifier *) ->\n      Some (typeof_expr env)\n    | _ ->\n      error env Parse_error.InvalidTypeof;\n      None\n\n  and typeof env =\n    with_loc\n      (fun env ->\n        let leading = Peek.comments env in\n        Expect.token env T_TYPEOF;\n        match typeof_arg env with\n        | None -> Type.Any None\n        | Some argument ->\n          Type.Typeof\n            { Type.Typeof.argument; comments = Flow_ast_utils.mk_comments_opt ~leading () })\n      env\n\n  and primary env =\n    let loc = Peek.loc env in\n    match Peek.token env with\n    | T_MULT ->\n      let leading = Peek.comments env in\n      Expect.token env T_MULT;\n      let trailing = Eat.trailing_comments env in\n      (loc, Type.Exists (Flow_ast_utils.mk_comments_opt ~leading ~trailing ()))\n    | T_LESS_THAN -> _function env\n    | T_LPAREN -> function_or_group env\n    | T_LCURLY\n    | T_LCURLYBAR ->\n      let (loc, o) = _object env ~is_class:false ~allow_exact:true ~allow_spread:true in\n      (loc, Type.Object o)\n    | T_INTERFACE ->\n      with_loc\n        (fun env ->\n          let leading = Peek.comments env in\n          Expect.token env T_INTERFACE;\n          let (extends, body) = interface_helper env in\n          Type.Interface\n            { Type.Interface.extends; body; comments = Flow_ast_utils.mk_comments_opt ~leading () })\n        env\n    | T_TYPEOF -> typeof env\n    | T_LBRACKET -> tuple env\n    | T_IDENTIFIER _\n    | T_STATIC (* `static` is reserved in strict mode, but still an identifier *) ->\n      let (loc, g) = generic env in\n      (loc, Type.Generic g)\n    | T_STRING (loc, value, raw, octal) ->\n      if octal then strict_error env Parse_error.StrictOctalLiteral;\n      let leading = Peek.comments env in\n      Expect.token env (T_STRING (loc, value, raw, octal));\n      let trailing = Eat.trailing_comments env in\n      ( loc,\n        Type.StringLiteral\n          {\n            Ast.StringLiteral.value;\n            raw;\n            comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing ();\n          }\n      )\n    | T_NUMBER_SINGLETON_TYPE { kind; value; raw } ->\n      if kind = LEGACY_OCTAL then strict_error env Parse_error.StrictOctalLiteral;\n      let leading = Peek.comments env in\n      Expect.token env (T_NUMBER_SINGLETON_TYPE { kind; value; raw });\n      let trailing = Eat.trailing_comments env in\n      ( loc,\n        Type.NumberLiteral\n          {\n            Ast.NumberLiteral.value;\n            raw;\n            comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing ();\n          }\n      )\n    | T_BIGINT_SINGLETON_TYPE { kind; value; raw } ->\n      let leading = Peek.comments env in\n      Expect.token env (T_BIGINT_SINGLETON_TYPE { kind; value; raw });\n      let trailing = Eat.trailing_comments env in\n      ( loc,\n        Type.BigIntLiteral\n          {\n            Ast.BigIntLiteral.value;\n            raw;\n            comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing ();\n          }\n      )\n    | (T_TRUE | T_FALSE) as token ->\n      let leading = Peek.comments env in\n      Expect.token env token;\n      let trailing = Eat.trailing_comments env in\n      let value = token = T_TRUE in\n      ( loc,\n        Type.BooleanLiteral\n          { BooleanLiteral.value; comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing () }\n      )\n    | _ ->\n      (match primitive env with\n      | Some t -> (loc, t)\n      | None ->\n        error_unexpected ~expected:\"a type\" env;\n        (loc, Type.Any None))\n\n  and is_primitive = function\n    | T_ANY_TYPE\n    | T_MIXED_TYPE\n    | T_EMPTY_TYPE\n    | T_BOOLEAN_TYPE _\n    | T_NUMBER_TYPE\n    | T_BIGINT_TYPE\n    | T_STRING_TYPE\n    | T_SYMBOL_TYPE\n    | T_VOID_TYPE\n    | T_NULL ->\n      true\n    | _ -> false\n\n  and primitive env =\n    let leading = Peek.comments env in\n    let token = Peek.token env in\n    match token with\n    | T_ANY_TYPE ->\n      Eat.token env;\n      let trailing = Eat.trailing_comments env in\n      Some (Type.Any (Flow_ast_utils.mk_comments_opt ~leading ~trailing ()))\n    | T_MIXED_TYPE ->\n      Eat.token env;\n      let trailing = Eat.trailing_comments env in\n      Some (Type.Mixed (Flow_ast_utils.mk_comments_opt ~leading ~trailing ()))\n    | T_EMPTY_TYPE ->\n      Eat.token env;\n      let trailing = Eat.trailing_comments env in\n      Some (Type.Empty (Flow_ast_utils.mk_comments_opt ~leading ~trailing ()))\n    | T_BOOLEAN_TYPE _ ->\n      Eat.token env;\n      let trailing = Eat.trailing_comments env in\n      Some (Type.Boolean (Flow_ast_utils.mk_comments_opt ~leading ~trailing ()))\n    | T_NUMBER_TYPE ->\n      Eat.token env;\n      let trailing = Eat.trailing_comments env in\n      Some (Type.Number (Flow_ast_utils.mk_comments_opt ~leading ~trailing ()))\n    | T_BIGINT_TYPE ->\n      Eat.token env;\n      let trailing = Eat.trailing_comments env in\n      Some (Type.BigInt (Flow_ast_utils.mk_comments_opt ~leading ~trailing ()))\n    | T_STRING_TYPE ->\n      Eat.token env;\n      let trailing = Eat.trailing_comments env in\n      Some (Type.String (Flow_ast_utils.mk_comments_opt ~leading ~trailing ()))\n    | T_SYMBOL_TYPE ->\n      Eat.token env;\n      let trailing = Eat.trailing_comments env in\n      Some (Type.Symbol (Flow_ast_utils.mk_comments_opt ~leading ~trailing ()))\n    | T_VOID_TYPE ->\n      Eat.token env;\n      let trailing = Eat.trailing_comments env in\n      Some (Type.Void (Flow_ast_utils.mk_comments_opt ~leading ~trailing ()))\n    | T_NULL ->\n      Eat.token env;\n      let trailing = Eat.trailing_comments env in\n      Some (Type.Null (Flow_ast_utils.mk_comments_opt ~leading ~trailing ()))\n    | _ -> None\n\n  and tuple =\n    let rec types env acc =\n      match Peek.token env with\n      | T_EOF\n      | T_RBRACKET ->\n        List.rev acc\n      | _ ->\n        let acc = _type env :: acc in\n        (* Trailing comma support (like [number, string,]) *)\n        if Peek.token env <> T_RBRACKET then Expect.token env T_COMMA;\n        types env acc\n    in\n    fun env ->\n      with_loc\n        (fun env ->\n          let leading = Peek.comments env in\n          Expect.token env T_LBRACKET;\n          let tl = types (with_no_anon_function_type false env) [] in\n          Expect.token env T_RBRACKET;\n          let trailing = Eat.trailing_comments env in\n          Type.Tuple\n            {\n              Type.Tuple.types = tl;\n              comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing ();\n            })\n        env\n\n  and anonymous_function_param _env annot =\n    (fst annot, Type.Function.Param.{ name = None; annot; optional = false })\n\n  and function_param_with_id env =\n    with_loc\n      (fun env ->\n        Eat.push_lex_mode env Lex_mode.NORMAL;\n        let name = Parse.identifier env in\n        Eat.pop_lex_mode env;\n        if not (should_parse_types env) then error env Parse_error.UnexpectedTypeAnnotation;\n        let optional = Eat.maybe env T_PLING in\n        Expect.token env T_COLON;\n        let annot = _type env in\n        { Type.Function.Param.name = Some name; annot; optional })\n      env\n\n  and function_param_list_without_parens =\n    let param env =\n      match Peek.ith_token ~i:1 env with\n      | T_COLON\n      | T_PLING ->\n        function_param_with_id env\n      | _ ->\n        let annot = _type env in\n        anonymous_function_param env annot\n    in\n    let rec param_list env this_ acc =\n      match Peek.token env with\n      | (T_EOF | T_ELLIPSIS | T_RPAREN) as t ->\n        let rest =\n          if t = T_ELLIPSIS then\n            let rest =\n              with_loc\n                (fun env ->\n                  let leading = Peek.comments env in\n                  Expect.token env T_ELLIPSIS;\n                  {\n                    Type.Function.RestParam.argument = param env;\n                    comments = Flow_ast_utils.mk_comments_opt ~leading ();\n                  })\n                env\n            in\n            Some rest\n          else\n            None\n        in\n        { Ast.Type.Function.Params.params = List.rev acc; rest; this_; comments = None }\n      | T_IDENTIFIER { raw = \"this\"; _ }\n        when Peek.ith_token ~i:1 env == T_COLON || Peek.ith_token ~i:1 env == T_PLING ->\n        if this_ <> None || acc <> [] then error env Parse_error.ThisParamMustBeFirst;\n        let this_ =\n          with_loc\n            (fun env ->\n              let leading = Peek.comments env in\n              Eat.token env;\n              if Peek.token env == T_PLING then error env Parse_error.ThisParamMayNotBeOptional;\n              {\n                Type.Function.ThisParam.annot = annotation env;\n                comments = Flow_ast_utils.mk_comments_opt ~leading ();\n              })\n            env\n        in\n        if Peek.token env <> T_RPAREN then Expect.token env T_COMMA;\n        param_list env (Some this_) acc\n      | _ ->\n        let acc = param env :: acc in\n        if Peek.token env <> T_RPAREN then Expect.token env T_COMMA;\n        param_list env this_ acc\n    in\n    (fun env -> param_list env None)\n\n  and function_param_list env =\n    with_loc\n      (fun env ->\n        let leading = Peek.comments env in\n        Expect.token env T_LPAREN;\n        let params = function_param_list_without_parens env [] in\n        let internal = Peek.comments env in\n        Expect.token env T_RPAREN;\n        let trailing = Eat.trailing_comments env in\n        {\n          params with\n          Ast.Type.Function.Params.comments =\n            Flow_ast_utils.mk_comments_with_internal_opt ~leading ~trailing ~internal ();\n        })\n      env\n\n  and param_list_or_type env =\n    let leading = Peek.comments env in\n    Expect.token env T_LPAREN;\n    let ret =\n      let env = with_no_anon_function_type false env in\n      match Peek.token env with\n      | T_EOF\n      | T_ELLIPSIS ->\n        (* (... is definitely the beginning of a param list *)\n        ParamList (function_param_list_without_parens env [])\n      | T_RPAREN ->\n        (* () or is definitely a param list *)\n        ParamList\n          { Ast.Type.Function.Params.this_ = None; params = []; rest = None; comments = None }\n      | T_IDENTIFIER _\n      | T_STATIC (* `static` is reserved in strict mode, but still an identifier *) ->\n        (* This could be a function parameter or a generic type *)\n        function_param_or_generic_type env\n      | token when is_primitive token ->\n        (* Don't know if this is (number) or (number: number). The first\n         * is a type, the second is a param. *)\n        (match Peek.ith_token ~i:1 env with\n        | T_PLING\n        | T_COLON ->\n          (* Ok this is definitely a parameter *)\n          ParamList (function_param_list_without_parens env [])\n        | _ -> Type (_type env))\n      | _ ->\n        (* All params start with an identifier or `...` *)\n        Type (_type env)\n    in\n    (* Now that we allow anonymous parameters in function types, we need to\n     * disambiguate a little bit more *)\n    let ret =\n      match ret with\n      | ParamList _ -> ret\n      | Type _ when no_anon_function_type env -> ret\n      | Type t ->\n        (match Peek.token env with\n        | T_RPAREN ->\n          (* Reinterpret `(type) =>` as a ParamList *)\n          if Peek.ith_token ~i:1 env = T_ARROW then\n            let param = anonymous_function_param env t in\n            ParamList (function_param_list_without_parens env [param])\n          else\n            Type t\n        | T_COMMA ->\n          (* Reinterpret `(type,` as a ParamList *)\n          Expect.token env T_COMMA;\n          let param = anonymous_function_param env t in\n          ParamList (function_param_list_without_parens env [param])\n        | _ -> ret)\n    in\n    let internal = Peek.comments env in\n    Expect.token env T_RPAREN;\n    let trailing = Eat.trailing_comments env in\n    let ret =\n      match ret with\n      | ParamList params ->\n        ParamList\n          {\n            params with\n            Ast.Type.Function.Params.comments =\n              Flow_ast_utils.mk_comments_with_internal_opt ~leading ~trailing ~internal ();\n          }\n      | Type t -> Type (add_comments t leading trailing)\n    in\n    ret\n\n  and function_param_or_generic_type env =\n    match Peek.ith_token ~i:1 env with\n    | T_PLING\n    (* optional param *)\n    | T_COLON ->\n      ParamList (function_param_list_without_parens env [])\n    | _ ->\n      let id = type_identifier env in\n      Type\n        (generic_type_with_identifier env id\n        |> postfix_with env\n        |> anon_function_without_parens_with env\n        |> intersection_with env\n        |> union_with env\n        )\n\n  and function_or_group env =\n    let start_loc = Peek.loc env in\n    match with_loc param_list_or_type env with\n    | (loc, ParamList params) -> function_with_params env start_loc None (loc, params)\n    | (_, Type _type) -> _type\n\n  and _function env =\n    let start_loc = Peek.loc env in\n    let tparams = type_params_remove_trailing env (type_params env) in\n    let params = function_param_list env in\n    function_with_params env start_loc tparams params\n\n  and function_with_params env start_loc tparams (params : (Loc.t, Loc.t) Ast.Type.Function.Params.t)\n      =\n    with_loc\n      ~start_loc\n      (fun env ->\n        Expect.token env T_ARROW;\n        let return = _type env in\n        Type.(Function { Function.params; return; tparams; comments = None }))\n      env\n\n  and _object =\n    let methodish env start_loc tparams =\n      with_loc\n        ~start_loc\n        (fun env ->\n          let params = function_param_list env in\n          Expect.token env T_COLON;\n          let return = _type env in\n          { Type.Function.params; return; tparams; comments = None })\n        env\n    in\n    let method_property env start_loc static key ~leading =\n      let key = object_key_remove_trailing env key in\n      let tparams = type_params_remove_trailing env (type_params env) in\n      let value = methodish env start_loc tparams in\n      let value = (fst value, Type.Function (snd value)) in\n      Type.Object.(\n        Property\n          ( fst value,\n            {\n              Property.key;\n              value = Property.Init value;\n              optional = false;\n              static = static <> None;\n              proto = false;\n              _method = true;\n              variance = None;\n              comments = Flow_ast_utils.mk_comments_opt ~leading ();\n            }\n          )\n      )\n    in\n    let call_property env start_loc static ~leading =\n      let prop =\n        with_loc\n          ~start_loc\n          (fun env ->\n            let start_loc = Peek.loc env in\n            let tparams = type_params_remove_trailing env (type_params env) in\n            let value = methodish env start_loc tparams in\n            Type.Object.CallProperty.\n              {\n                value;\n                static = static <> None;\n                comments = Flow_ast_utils.mk_comments_opt ~leading ();\n              }\n            )\n          env\n      in\n      Type.Object.CallProperty prop\n    in\n    let init_property env start_loc ~variance ~static ~proto ~leading (key_loc, key) =\n      ignore proto;\n      if not (should_parse_types env) then error env Parse_error.UnexpectedTypeAnnotation;\n      let prop =\n        with_loc\n          ~start_loc\n          (fun env ->\n            let optional = Eat.maybe env T_PLING in\n            let value =\n              if Expect.token_maybe env T_COLON then\n                _type env\n              else\n                (key_loc, Type.Any None)\n            in\n            Type.Object.Property.\n              {\n                key;\n                value = Init value;\n                optional;\n                static = static <> None;\n                proto = proto <> None;\n                _method = false;\n                variance;\n                comments = Flow_ast_utils.mk_comments_opt ~leading ();\n              }\n            )\n          env\n      in\n      Type.Object.Property prop\n    in\n    let getter_or_setter ~is_getter ~leading env start_loc static key =\n      let prop =\n        with_loc\n          ~start_loc\n          (fun env ->\n            let (key_loc, key) = key in\n            let key = object_key_remove_trailing env key in\n            let value = methodish env start_loc None in\n            let (_, { Type.Function.params; _ }) = value in\n            begin\n              match (is_getter, params) with\n              | (true, (_, { Type.Function.Params.this_ = Some _; _ })) ->\n                error_at env (key_loc, Parse_error.GetterMayNotHaveThisParam)\n              | (false, (_, { Type.Function.Params.this_ = Some _; _ })) ->\n                error_at env (key_loc, Parse_error.SetterMayNotHaveThisParam)\n              | ( true,\n                  (_, { Type.Function.Params.params = []; rest = None; this_ = None; comments = _ })\n                ) ->\n                ()\n              | (false, (_, { Type.Function.Params.rest = Some _; _ })) ->\n                (* rest params don't make sense on a setter *)\n                error_at env (key_loc, Parse_error.SetterArity)\n              | (false, (_, { Type.Function.Params.params = [_]; _ })) -> ()\n              | (true, _) -> error_at env (key_loc, Parse_error.GetterArity)\n              | (false, _) -> error_at env (key_loc, Parse_error.SetterArity)\n            end;\n            Type.Object.Property.\n              {\n                key;\n                value =\n                  ( if is_getter then\n                    Get value\n                  else\n                    Set value\n                  );\n                optional = false;\n                static = static <> None;\n                proto = false;\n                _method = false;\n                variance = None;\n                comments = Flow_ast_utils.mk_comments_opt ~leading ();\n              }\n            )\n          env\n      in\n      Type.Object.Property prop\n    in\n    let indexer_property env start_loc static variance ~leading =\n      let indexer =\n        with_loc\n          ~start_loc\n          (fun env ->\n            let leading = leading @ Peek.comments env in\n            Expect.token env T_LBRACKET;\n            let id =\n              if Peek.ith_token ~i:1 env = T_COLON then (\n                let id = identifier_name env in\n                Expect.token env T_COLON;\n                Some id\n              ) else\n                None\n            in\n            let key = _type env in\n            Expect.token env T_RBRACKET;\n            let trailing = Eat.trailing_comments env in\n            Expect.token env T_COLON;\n            let value = _type env in\n            {\n              Type.Object.Indexer.id;\n              key;\n              value;\n              static = static <> None;\n              variance;\n              comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing ();\n            })\n          env\n      in\n      Type.Object.Indexer indexer\n    in\n    let internal_slot env start_loc static ~leading =\n      let islot =\n        with_loc\n          ~start_loc\n          (fun env ->\n            let leading = leading @ Peek.comments env in\n            Expect.token env T_LBRACKET;\n            Expect.token env T_LBRACKET;\n            let id = identifier_name env in\n            Expect.token env T_RBRACKET;\n            Expect.token env T_RBRACKET;\n            let (optional, _method, value, trailing) =\n              match Peek.token env with\n              | T_LESS_THAN\n              | T_LPAREN ->\n                let tparams = type_params_remove_trailing env (type_params env) in\n                let value =\n                  let (fn_loc, fn) = methodish env start_loc tparams in\n                  (fn_loc, Type.Function fn)\n                in\n                (false, true, value, [])\n              | _ ->\n                let optional = Eat.maybe env T_PLING in\n                let trailing = Eat.trailing_comments env in\n                Expect.token env T_COLON;\n                let value = _type env in\n                (optional, false, value, trailing)\n            in\n            {\n              Type.Object.InternalSlot.id;\n              value;\n              optional;\n              static = static <> None;\n              _method;\n              comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing ();\n            })\n          env\n      in\n      Type.Object.InternalSlot islot\n      (* Expects the T_ELLIPSIS has already been eaten *)\n    in\n    let spread_property env start_loc ~leading =\n      let spread =\n        with_loc\n          ~start_loc\n          (fun env ->\n            {\n              Type.Object.SpreadProperty.argument = _type env;\n              comments = Flow_ast_utils.mk_comments_opt ~leading ();\n            })\n          env\n      in\n      Type.Object.SpreadProperty spread\n    in\n    let semicolon exact env =\n      match Peek.token env with\n      | T_COMMA\n      | T_SEMICOLON ->\n        Eat.token env\n      | T_RCURLYBAR when exact -> ()\n      | T_RCURLY when not exact -> ()\n      | _ -> Expect.error env T_COMMA\n    in\n    let error_unexpected_variance env = function\n      | Some (loc, _) -> error_at env (loc, Parse_error.UnexpectedVariance)\n      | None -> ()\n    in\n    let error_unexpected_proto env = function\n      | Some loc -> error_at env (loc, Parse_error.UnexpectedProto)\n      | None -> ()\n    in\n    let error_invalid_property_name env is_class static key =\n      let is_static = static <> None in\n      let is_constructor = String.equal \"constructor\" in\n      let is_prototype = String.equal \"prototype\" in\n      match key with\n      | Expression.Object.Property.Identifier (loc, { Identifier.name; comments = _ })\n        when is_class && (is_constructor name || (is_static && is_prototype name)) ->\n        error_at\n          env\n          ( loc,\n            Parse_error.InvalidClassMemberName\n              { name; static = is_static; method_ = false; private_ = false }\n          )\n      | _ -> ()\n    in\n    let rec properties\n        ~is_class ~allow_inexact ~allow_spread ~exact env ((props, inexact, internal) as acc) =\n      (* no `static ...A` *)\n      assert (not (is_class && allow_spread));\n\n      (* allow_inexact implies allow_spread *)\n      assert ((not allow_inexact) || allow_spread);\n\n      let start_loc = Peek.loc env in\n      match Peek.token env with\n      | T_EOF -> (List.rev props, inexact, internal)\n      | T_RCURLYBAR when exact -> (List.rev props, inexact, internal)\n      | T_RCURLY when not exact -> (List.rev props, inexact, internal)\n      | T_ELLIPSIS when allow_spread ->\n        let leading = Peek.comments env in\n        Eat.token env;\n        begin\n          match Peek.token env with\n          | T_COMMA\n          | T_SEMICOLON\n          | T_RCURLY\n          | T_RCURLYBAR ->\n            semicolon exact env;\n            begin\n              match Peek.token env with\n              | T_RCURLY when allow_inexact -> (List.rev props, true, leading)\n              | T_RCURLYBAR ->\n                error_at env (start_loc, Parse_error.InexactInsideExact);\n                (List.rev props, inexact, internal)\n              | _ ->\n                error_at env (start_loc, Parse_error.UnexpectedExplicitInexactInObject);\n                properties ~is_class ~allow_inexact ~allow_spread ~exact env acc\n            end\n          | _ ->\n            let prop = spread_property env start_loc ~leading in\n            semicolon exact env;\n            properties\n              ~is_class\n              ~allow_inexact\n              ~allow_spread\n              ~exact\n              env\n              (prop :: props, inexact, internal)\n        end\n      (* In this case, allow_spread is false, so we may assume allow_inexact is false based on our\n       * assertion at the top of this function. Thus, any T_ELLIPSIS here is not allowed.\n       *)\n      | T_ELLIPSIS ->\n        Eat.token env;\n        begin\n          match Peek.token env with\n          | T_COMMA\n          | T_SEMICOLON\n          | T_RCURLY\n          | T_RCURLYBAR ->\n            error_at env (start_loc, Parse_error.InexactInsideNonObject);\n            semicolon exact env;\n            properties ~is_class ~allow_inexact ~allow_spread ~exact env acc\n          | _ ->\n            error_list env (Peek.errors env);\n            error_at env (start_loc, Parse_error.UnexpectedSpreadType);\n\n            (* It's likely the user is trying to spread something here, so we can\n             * eat what they try to spread to try to continue parsing the remaining\n             * properties.\n             *)\n            Eat.token env;\n            semicolon exact env;\n            properties ~is_class ~allow_inexact ~allow_spread ~exact env acc\n        end\n      | _ ->\n        let prop =\n          property\n            env\n            start_loc\n            ~is_class\n            ~allow_static:is_class\n            ~allow_proto:is_class\n            ~variance:None\n            ~static:None\n            ~proto:None\n            ~leading:[]\n        in\n        semicolon exact env;\n        properties\n          ~is_class\n          ~allow_inexact\n          ~allow_spread\n          ~exact\n          env\n          (prop :: props, inexact, internal)\n    and property\n        env ~is_class ~allow_static ~allow_proto ~variance ~static ~proto ~leading start_loc =\n      match Peek.token env with\n      | T_PLUS\n      | T_MINUS\n        when variance = None ->\n        let variance = maybe_variance env in\n        property\n          env\n          ~is_class\n          ~allow_static:false\n          ~allow_proto:false\n          ~variance\n          ~static\n          ~proto\n          ~leading\n          start_loc\n      | T_STATIC when allow_static ->\n        assert (variance = None);\n\n        (* if we parsed variance, allow_static = false *)\n        let static = Some (Peek.loc env) in\n        let leading = leading @ Peek.comments env in\n        Eat.token env;\n        property\n          env\n          ~is_class\n          ~allow_static:false\n          ~allow_proto:false\n          ~variance\n          ~static\n          ~proto\n          ~leading\n          start_loc\n      | T_IDENTIFIER { raw = \"proto\"; _ } when allow_proto ->\n        assert (variance = None);\n\n        (* if we parsed variance, allow_proto = false *)\n        let proto = Some (Peek.loc env) in\n        let leading = leading @ Peek.comments env in\n        Eat.token env;\n        property\n          env\n          ~is_class\n          ~allow_static:false\n          ~allow_proto:false\n          ~variance\n          ~static\n          ~proto\n          ~leading\n          start_loc\n      | T_LBRACKET ->\n        error_unexpected_proto env proto;\n        (match Peek.ith_token ~i:1 env with\n        | T_LBRACKET ->\n          error_unexpected_variance env variance;\n          internal_slot env start_loc static ~leading\n        | _ -> indexer_property env start_loc static variance ~leading)\n      | T_LESS_THAN\n      | T_LPAREN ->\n        (* Note that `static(): void` is a static callable property if we\n           successfully parsed the static modifier above. *)\n        error_unexpected_proto env proto;\n        error_unexpected_variance env variance;\n        call_property env start_loc static ~leading\n      | token ->\n        (match (static, proto, token) with\n        | (Some _, Some _, _) -> failwith \"Can not have both `static` and `proto`\"\n        | (Some static_loc, None, (T_PLING | T_COLON)) ->\n          (* We speculatively parsed `static` as a static modifier, but now\n             that we've parsed the next token, we changed our minds and want\n             to parse `static` as the key of a named property. *)\n          let key =\n            Expression.Object.Property.Identifier\n              (Flow_ast_utils.ident_of_source\n                 (static_loc, \"static\")\n                 ?comments:(Flow_ast_utils.mk_comments_opt ~leading ())\n              )\n          in\n          let static = None in\n          init_property env start_loc ~variance ~static ~proto ~leading:[] (static_loc, key)\n        | (None, Some proto_loc, (T_PLING | T_COLON)) ->\n          (* We speculatively parsed `proto` as a proto modifier, but now\n             that we've parsed the next token, we changed our minds and want\n             to parse `proto` as the key of a named property. *)\n          let key =\n            Expression.Object.Property.Identifier\n              (Flow_ast_utils.ident_of_source\n                 (proto_loc, \"proto\")\n                 ?comments:(Flow_ast_utils.mk_comments_opt ~leading ())\n              )\n          in\n          let proto = None in\n          init_property env start_loc ~variance ~static ~proto ~leading:[] (proto_loc, key)\n        | _ ->\n          let object_key env =\n            Eat.push_lex_mode env Lex_mode.NORMAL;\n            let result = Parse.object_key env in\n            Eat.pop_lex_mode env;\n            result\n          in\n          let leading_key = Peek.comments env in\n          (match object_key env with\n          | ( key_loc,\n              ( Expression.Object.Property.Identifier\n                  (_, { Identifier.name = (\"get\" | \"set\") as name; comments = _ }) as key\n              )\n            ) ->\n            begin\n              match Peek.token env with\n              | T_LESS_THAN\n              | T_LPAREN ->\n                error_unexpected_proto env proto;\n                error_unexpected_variance env variance;\n                method_property env start_loc static key ~leading\n              | T_COLON\n              | T_PLING ->\n                init_property env start_loc ~variance ~static ~proto ~leading (key_loc, key)\n              | _ ->\n                ignore (object_key_remove_trailing env key);\n                let key = object_key env in\n                let is_getter = name = \"get\" in\n                let leading = leading @ leading_key in\n                error_unexpected_proto env proto;\n                error_unexpected_variance env variance;\n                getter_or_setter ~is_getter ~leading env start_loc static key\n            end\n          | (key_loc, key) ->\n            begin\n              match Peek.token env with\n              | T_LESS_THAN\n              | T_LPAREN ->\n                error_unexpected_proto env proto;\n                error_unexpected_variance env variance;\n                method_property env start_loc static key ~leading\n              | _ ->\n                error_invalid_property_name env is_class static key;\n                init_property env start_loc ~variance ~static ~proto ~leading (key_loc, key)\n            end))\n    in\n    fun ~is_class ~allow_exact ~allow_spread env ->\n      let exact = allow_exact && Peek.token env = T_LCURLYBAR in\n      let allow_inexact = allow_exact && not exact in\n      with_loc\n        (fun env ->\n          let leading = Peek.comments env in\n          Expect.token\n            env\n            ( if exact then\n              T_LCURLYBAR\n            else\n              T_LCURLY\n            );\n          let (properties, inexact, internal) =\n            let env = with_no_anon_function_type false env in\n            properties ~is_class ~allow_inexact ~exact ~allow_spread env ([], false, [])\n          in\n          let internal = internal @ Peek.comments env in\n          Expect.token\n            env\n            ( if exact then\n              T_RCURLYBAR\n            else\n              T_RCURLY\n            );\n          let trailing = Eat.trailing_comments env in\n\n          (* inexact = true iff `...` was used to indicate inexactnes *)\n          {\n            Type.Object.exact;\n            properties;\n            inexact;\n            comments = Flow_ast_utils.mk_comments_with_internal_opt ~leading ~trailing ~internal ();\n          })\n        env\n\n  and interface_helper =\n    let rec supers env acc =\n      let super = generic env in\n      let acc = super :: acc in\n      match Peek.token env with\n      | T_COMMA ->\n        Expect.token env T_COMMA;\n        supers env acc\n      | _ -> List.rev acc\n    in\n    fun env ->\n      let extends =\n        if Peek.token env = T_EXTENDS then (\n          Expect.token env T_EXTENDS;\n          let extends = supers env [] in\n          generic_type_list_remove_trailing env extends\n        ) else\n          []\n      in\n      let body = _object env ~allow_exact:false ~allow_spread:false ~is_class:false in\n      (extends, body)\n\n  and type_identifier env =\n    let (loc, { Identifier.name; comments }) = identifier_name env in\n    if is_reserved_type name then error_at env (loc, Parse_error.UnexpectedReservedType);\n    (loc, { Identifier.name; comments })\n\n  and bounded_type env =\n    with_loc\n      (fun env ->\n        let name = type_identifier env in\n        let bound =\n          if Peek.token env = T_COLON then\n            Ast.Type.Available (annotation env)\n          else\n            Ast.Type.Missing (Peek.loc_skip_lookahead env)\n        in\n        (name, bound))\n      env\n\n  and type_params =\n    let rec params env ~require_default acc =\n      Type.TypeParam.(\n        let (loc, (variance, name, bound, default, require_default)) =\n          with_loc\n            (fun env ->\n              let variance = maybe_variance env in\n              let (loc, (name, bound)) = bounded_type env in\n              let (default, require_default) =\n                match Peek.token env with\n                | T_ASSIGN ->\n                  Eat.token env;\n                  (Some (_type env), true)\n                | _ ->\n                  if require_default then error_at env (loc, Parse_error.MissingTypeParamDefault);\n                  (None, require_default)\n              in\n              (variance, name, bound, default, require_default))\n            env\n        in\n        let param = (loc, { name; bound; variance; default }) in\n        let acc = param :: acc in\n        match Peek.token env with\n        | T_EOF\n        | T_GREATER_THAN ->\n          List.rev acc\n        | _ ->\n          Expect.token env T_COMMA;\n          if Peek.token env = T_GREATER_THAN then\n            List.rev acc\n          else\n            params env ~require_default acc\n      )\n    in\n    fun env ->\n      if Peek.token env = T_LESS_THAN then (\n        if not (should_parse_types env) then error env Parse_error.UnexpectedTypeAnnotation;\n        Some\n          (with_loc\n             (fun env ->\n               let leading = Peek.comments env in\n               Expect.token env T_LESS_THAN;\n               let params = params env ~require_default:false [] in\n               let internal = Peek.comments env in\n               Expect.token env T_GREATER_THAN;\n               let trailing = Eat.trailing_comments env in\n               {\n                 Type.TypeParams.params;\n                 comments =\n                   Flow_ast_utils.mk_comments_with_internal_opt ~leading ~trailing ~internal ();\n               })\n             env\n          )\n      ) else\n        None\n\n  and type_args =\n    let rec args env acc =\n      match Peek.token env with\n      | T_EOF\n      | T_GREATER_THAN ->\n        List.rev acc\n      | _ ->\n        let acc = _type env :: acc in\n        if Peek.token env <> T_GREATER_THAN then Expect.token env T_COMMA;\n        args env acc\n    in\n    fun env ->\n      if Peek.token env = T_LESS_THAN then\n        Some\n          (with_loc\n             (fun env ->\n               let leading = Peek.comments env in\n               Expect.token env T_LESS_THAN;\n               let env = with_no_anon_function_type false env in\n               let arguments = args env [] in\n               let internal = Peek.comments env in\n               Expect.token env T_GREATER_THAN;\n               let trailing = Eat.trailing_comments env in\n               {\n                 Type.TypeArgs.arguments;\n                 comments =\n                   Flow_ast_utils.mk_comments_with_internal_opt ~leading ~trailing ~internal ();\n               })\n             env\n          )\n      else\n        None\n\n  and generic env = raw_generic_with_identifier env (type_identifier env)\n\n  and raw_generic_with_identifier =\n    let rec identifier env (q_loc, qualification) =\n      if Peek.token env = T_PERIOD && Peek.ith_is_type_identifier ~i:1 env then\n        let (loc, q) =\n          with_loc\n            ~start_loc:q_loc\n            (fun env ->\n              Expect.token env T_PERIOD;\n              let id = type_identifier env in\n              { Type.Generic.Identifier.qualification; id })\n            env\n        in\n        let qualification = Type.Generic.Identifier.Qualified (loc, q) in\n        identifier env (loc, qualification)\n      else\n        (q_loc, qualification)\n    in\n    fun env id ->\n      with_loc\n        ~start_loc:(fst id)\n        (fun env ->\n          let id = (fst id, Type.Generic.Identifier.Unqualified id) in\n          let id =\n            let (_id_loc, id) = identifier env id in\n            if Peek.token env <> T_LESS_THAN then\n              id\n            else\n              let { remove_trailing; _ } = trailing_and_remover env in\n              remove_trailing id (fun remover id -> remover#generic_identifier_type id)\n          in\n          let targs = type_args env in\n          { Type.Generic.id; targs; comments = None })\n        env\n\n  and generic_type_with_identifier env id =\n    let (loc, generic) = raw_generic_with_identifier env id in\n    (loc, Type.Generic generic)\n\n  and annotation_opt env =\n    match Peek.token env with\n    | T_COLON -> Type.Available (annotation env)\n    | _ -> Type.Missing (Peek.loc_skip_lookahead env)\n\n  and add_comments (loc, t) leading trailing =\n    let merge_comments inner =\n      Flow_ast_utils.merge_comments\n        ~inner\n        ~outer:(Flow_ast_utils.mk_comments_opt ~leading ~trailing ())\n    in\n    let merge_comments_with_internal inner =\n      Flow_ast_utils.merge_comments_with_internal\n        ~inner\n        ~outer:(Flow_ast_utils.mk_comments_opt ~leading ~trailing ())\n    in\n    let open Ast.Type in\n    ( loc,\n      match t with\n      | Any comments -> Any (merge_comments comments)\n      | Mixed comments -> Mixed (merge_comments comments)\n      | Empty comments -> Empty (merge_comments comments)\n      | Void comments -> Void (merge_comments comments)\n      | Null comments -> Null (merge_comments comments)\n      | Number comments -> Number (merge_comments comments)\n      | BigInt comments -> BigInt (merge_comments comments)\n      | String comments -> String (merge_comments comments)\n      | Boolean comments -> Boolean (merge_comments comments)\n      | Symbol comments -> Symbol (merge_comments comments)\n      | Exists comments -> Exists (merge_comments comments)\n      | Nullable ({ Nullable.comments; _ } as t) ->\n        Nullable { t with Nullable.comments = merge_comments comments }\n      | Function ({ Function.comments; _ } as t) ->\n        Function { t with Function.comments = merge_comments comments }\n      | Object ({ Object.comments; _ } as t) ->\n        Object { t with Object.comments = merge_comments_with_internal comments }\n      | Interface ({ Interface.comments; _ } as t) ->\n        Interface { t with Interface.comments = merge_comments comments }\n      | Array ({ Array.comments; _ } as t) ->\n        Array { t with Array.comments = merge_comments comments }\n      | Generic ({ Generic.comments; _ } as t) ->\n        Generic { t with Generic.comments = merge_comments comments }\n      | IndexedAccess ({ IndexedAccess.comments; _ } as t) ->\n        IndexedAccess { t with IndexedAccess.comments = merge_comments comments }\n      | OptionalIndexedAccess\n          {\n            OptionalIndexedAccess.indexed_access = { IndexedAccess.comments; _ } as indexed_access;\n            optional;\n          } ->\n        OptionalIndexedAccess\n          {\n            OptionalIndexedAccess.indexed_access =\n              { indexed_access with IndexedAccess.comments = merge_comments comments };\n            optional;\n          }\n      | Union ({ Union.comments; _ } as t) ->\n        Union { t with Union.comments = merge_comments comments }\n      | Intersection ({ Intersection.comments; _ } as t) ->\n        Intersection { t with Intersection.comments = merge_comments comments }\n      | Typeof ({ Typeof.comments; _ } as t) ->\n        Typeof { t with Typeof.comments = merge_comments comments }\n      | Tuple ({ Tuple.comments; _ } as t) ->\n        Tuple { t with Tuple.comments = merge_comments comments }\n      | StringLiteral ({ StringLiteral.comments; _ } as t) ->\n        StringLiteral { t with StringLiteral.comments = merge_comments comments }\n      | NumberLiteral ({ NumberLiteral.comments; _ } as t) ->\n        NumberLiteral { t with NumberLiteral.comments = merge_comments comments }\n      | BigIntLiteral ({ BigIntLiteral.comments; _ } as t) ->\n        BigIntLiteral { t with BigIntLiteral.comments = merge_comments comments }\n      | BooleanLiteral ({ BooleanLiteral.comments; _ } as t) ->\n        BooleanLiteral { t with BooleanLiteral.comments = merge_comments comments }\n    )\n\n  let predicate =\n    with_loc (fun env ->\n        let open Ast.Type.Predicate in\n        let leading = Peek.comments env in\n        Expect.token env T_CHECKS;\n        if Peek.token env = T_LPAREN then (\n          let leading = leading @ Peek.comments env in\n          Expect.token env T_LPAREN;\n          Eat.push_lex_mode env Lex_mode.NORMAL;\n          let exp = Parse.conditional env in\n          Eat.pop_lex_mode env;\n          Expect.token env T_RPAREN;\n          let trailing = Eat.trailing_comments env in\n          { kind = Declared exp; comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing () }\n        ) else\n          let trailing = Eat.trailing_comments env in\n          {\n            kind = Ast.Type.Predicate.Inferred;\n            comments = Flow_ast_utils.mk_comments_opt ~leading ~trailing ();\n          }\n    )\n\n  let predicate_opt env =\n    let env = with_no_anon_function_type false env in\n    match Peek.token env with\n    | T_CHECKS -> Some (predicate env)\n    | _ -> None\n\n  let annotation_and_predicate_opt env =\n    let open Ast.Type in\n    match (Peek.token env, Peek.ith_token ~i:1 env) with\n    | (T_COLON, T_CHECKS) ->\n      Expect.token env T_COLON;\n      (Missing (Peek.loc_skip_lookahead env), predicate_opt env)\n    | (T_COLON, _) ->\n      let annotation =\n        let annotation = annotation_opt env in\n        if Peek.token env = T_CHECKS then\n          type_annotation_hint_remove_trailing env annotation\n        else\n          annotation\n      in\n      let predicate = predicate_opt env in\n      (annotation, predicate)\n    | _ -> (Missing (Peek.loc_skip_lookahead env), None)\n\n  let wrap f env =\n    let env = env |> with_strict true in\n    Eat.push_lex_mode env Lex_mode.TYPE;\n    let ret = f env in\n    Eat.pop_lex_mode env;\n    ret\n\n  let _type = wrap _type\n\n  let type_identifier = wrap type_identifier\n\n  let type_params = wrap type_params\n\n  let type_args = wrap type_args\n\n  let _object ~is_class env = wrap (_object ~is_class ~allow_exact:false ~allow_spread:false) env\n\n  let interface_helper = wrap interface_helper\n\n  let function_param_list = wrap function_param_list\n\n  let annotation = wrap annotation\n\n  let annotation_opt = wrap annotation_opt\n\n  let predicate_opt = wrap predicate_opt\n\n  let annotation_and_predicate_opt = wrap annotation_and_predicate_opt\n\n  let generic = wrap generic\nend\n"
  },
  {
    "path": "analysis/vendor/js_parser/wtf8.ml",
    "content": "(**\n * Copyright (c) 2017-present, Facebook, Inc.\n *\n * This source code is licensed under the MIT license found in the\n * LICENSE file in the root directory of this source tree.\n *)\n\n(*\n * WTF-8 is a superset of UTF-8 that allows unpaired surrogates.\n *\n * From ES6 6.1.4, \"The String Type\":\n *\n *   Where ECMAScript operations interpret String values, each element is\n *   interpreted as a single UTF-16 code unit. However, ECMAScript does not\n *   place any restrictions or requirements on the sequence of code units in\n *   a String value, so they may be ill-formed when interpreted as UTF-16 code\n *   unit sequences. Operations that do not interpret String contents treat\n *   them as sequences of undifferentiated 16-bit unsigned integers.\n *\n * If we try to encode these ill-formed code units into UTF-8, we similarly\n * get ill-formed UTF-8. WTF-8 is a fun name for that encoding.\n *\n * https://simonsapin.github.io/wtf-8/\n *)\n\ntype codepoint =\n  | Point of int\n  | Malformed\n\ntype 'a folder = 'a -> int -> codepoint -> 'a\n\n(* WTF-8 is a variable length encoding. The first byte in each codepoint\n   determines how many other bytes follow. *)\nlet needed_bytes c =\n  if 0x00 <= c && c <= 0x7F then 1 else\n  if 0xC2 <= c && c <= 0xDF then 2 else\n  if 0xE0 <= c && c <= 0xEF then 3 else\n  if 0xF0 <= c && c <= 0xF4 then 4 else\n  0\n\nlet unsafe_char s i = Char.code (Bytes.unsafe_get s i)\n\nlet codepoint s i = function\n  | 1 -> unsafe_char s i\n  | 2 ->\n    let b0 = unsafe_char s i in\n    let b1 = unsafe_char s (i + 1) in\n    ((b0 land 0x1F) lsl 6) lor (b1 land 0x3F)\n  | 3 ->\n    let b0 = unsafe_char s (i) in\n    let b1 = unsafe_char s (i + 1) in\n    let b2 = unsafe_char s (i + 2) in\n    ((b0 land 0x0F) lsl 12) lor\n    ((b1 land 0x3F) lsl 6) lor\n    (b2 land 0x3F)\n  | 4 ->\n    let b0 = unsafe_char s (i) in\n    let b1 = unsafe_char s (i + 1) in\n    let b2 = unsafe_char s (i + 2) in\n    let b3 = unsafe_char s (i + 3) in\n    ((b0 land 0x07) lsl 18) lor\n    ((b1 land 0x3F) lsl 12) lor\n    ((b2 land 0x3F) lsl 6) lor\n    (b3 land 0x3F)\n  | _ -> assert false\n\n(* Fold over the WTF-8 code units in a string *)\nlet fold_wtf_8 ?(pos = 0) ?len f acc s =\n  let rec loop acc f s i l =\n    if i = l then acc else\n    let need = needed_bytes (unsafe_char s i) in\n    if need = 0 then (loop [@tailcall]) (f acc i Malformed) f s (i + 1) l else\n    let rem = l - i in\n    if rem < need then f acc i Malformed else\n    (loop [@tailcall]) (f acc i (Point (codepoint s i need))) f s (i + need) l\n  in\n  let len = match len with\n  | None -> String.length s - pos\n  | Some l -> l\n  in\n  loop acc f (Bytes.unsafe_of_string s) pos len\n\n(* Add a UTF-16 code unit to a buffer, encoded in WTF-8. *)\nlet add_wtf_8 buf code =\n  let[@inline] w byte = Buffer.add_char buf (Char.unsafe_chr byte) in\n  if code >= 0x10000 then begin\n  (* 4 bytes *)\n    w (0xf0 lor (code lsr 18));\n    w (0x80 lor ((code lsr 12) land 0x3F));\n    w (0x80 lor ((code lsr 6) land 0x3F));\n    w (0x80 lor (code land 0x3F))\n  end else if code >= 0x800 then begin\n  (* 3 bytes *)\n    w (0xe0 lor (code lsr 12));\n    w (0x80 lor ((code lsr 6) land 0x3F));\n    w (0x80 lor (code land 0x3F))\n  end else if code >= 0x80 then begin\n  (* 2 bytes *)\n    w (0xc0 lor (code lsr 6));\n    w (0x80 lor (code land 0x3F))\n  end else\n  (* 1 byte *)\n    w code\n"
  },
  {
    "path": "analysis/vendor/js_parser/wtf8.mli",
    "content": "(*\n * Copyright (c) Facebook, Inc. and its affiliates.\n *\n * This source code is licensed under the MIT license found in the\n * LICENSE file in the root directory of this source tree.\n *)\n\n[@@@ocaml.text\n\"\\n * Copyright (c) 2017-present, Facebook, Inc.\\n *\\n * This source code is licensed under the MIT license found in the\\n * LICENSE file in the root directory of this source tree.\\n \"]\n\ntype codepoint =\n  | Point of int\n  | Malformed\n\ntype 'a folder = 'a -> int -> codepoint -> 'a\n\nval fold_wtf_8 : ?pos:int -> ?len:int -> 'a folder -> 'a -> string -> 'a\n\nval add_wtf_8 : Buffer.t -> int -> unit\n"
  },
  {
    "path": "analysis/vendor/json/Json.ml",
    "content": "(** # Json parser\n *\n * Works with bucklescript and bsb-native\n *\n * ## Basics\n *\n * ```\n * open Json.Infix; /* for the nice infix operators */\n * let raw = {|{\"hello\": \"folks\"}|};\n * let who = Json.parse(raw) |> Json.get(\"hello\") |?> Json.string;\n * Js.log(who);\n * ```\n *\n * ## Parse & stringify\n *\n * @doc parse, stringify\n *\n * ## Accessing descendents\n *\n * @doc get, nth, getPath\n *\n * ## Coercing to types\n *\n * @doc string, number, array, obj, bool, null\n *\n * ## The JSON type\n *\n * @doc t\n *\n * ## Infix operators for easier working\n *\n * @doc Infix\n *)\n\ntype t =\n  | String of string\n  | Number of float\n  | Array of t list\n  | Object of (string * t) list\n  | True\n  | False\n  | Null\n\nlet string_of_number f =\n  let s = string_of_float f in\n  if s.[String.length s - 1] = '.' then String.sub s 0 (String.length s - 1)\n  else s\n\n(** * This module is provided for easier working with optional values. *)\nmodule Infix = struct\n  (** The \"force unwrap\" operator * * If you're sure there's a value, you can\n      force it. * ``` * open Json.Infix; * let x: int = Some(10) |! \"Expected\n      this to be present\"; * Js.log(x); * ``` * * But you gotta be sure,\n      otherwise it will throw. * ```reason;raises * open Json.Infix; * let x:\n      int = None |! \"This will throw\"; * ``` *)\n  let ( |! ) o d =\n    match o with\n    | None -> failwith d\n    | Some v -> v\n\n  (** The \"upwrap with default\" operator * ``` * open Json.Infix; * let x: int =\n      Some(10) |? 4; * let y: int = None |? 5; * Js.log2(x, y); * ``` *)\n  let ( |? ) o d =\n    match o with\n    | None -> d\n    | Some v -> v\n\n  (** The \"transform contents into new optional\" operator * ``` * open\n      Json.Infix; * let maybeInc = x => x > 5 ? Some(x + 1) : None; * let x:\n      option(int) = Some(14) |?> maybeInc; * let y: option(int) = None |?>\n      maybeInc; * ``` *)\n  let ( |?> ) o fn =\n    match o with\n    | None -> None\n    | Some v -> fn v\n\n  (** The \"transform contents into new value & then re-wrap\" operator * ``` *\n      open Json.Infix; * let inc = x => x + 1; * let x: option(int) = Some(7)\n      |?>> inc; * let y: option(int) = None |?>> inc; * Js.log2(x, y); * ``` *)\n  let ( |?>> ) o fn =\n    match o with\n    | None -> None\n    | Some v -> Some (fn v)\n\n  (** \"handle the value if present, otherwise here's the default\" * * It's\n      called fold because that's what people call it :?. It's the same as\n      \"transform contents to new value\" + \"unwrap with default\". * * ``` * open\n      Json.Infix; * let inc = x => x + 1; * let x: int = fold(Some(4), 10, inc);\n      * let y: int = fold(None, 2, inc); * Js.log2(x, y); * ``` *)\n  let fold o d f =\n    match o with\n    | None -> d\n    | Some v -> f v\nend\n\nlet escape text =\n  let ln = String.length text in\n  let buf = Buffer.create ln in\n  let rec loop i =\n    if i < ln then (\n      (match text.[i] with\n      | '\\012' -> Buffer.add_string buf \"\\\\f\"\n      | '\\\\' -> Buffer.add_string buf \"\\\\\\\\\"\n      | '\"' -> Buffer.add_string buf \"\\\\\\\"\"\n      | '\\n' -> Buffer.add_string buf \"\\\\n\"\n      | '\\b' -> Buffer.add_string buf \"\\\\b\"\n      | '\\r' -> Buffer.add_string buf \"\\\\r\"\n      | '\\t' -> Buffer.add_string buf \"\\\\t\"\n      | c ->\n        let code = Char.code c in\n        if code < 0x20 then Printf.bprintf buf \"\\\\u%04x\" code\n        else Buffer.add_char buf c);\n      loop (i + 1))\n  in\n  loop 0;\n  Buffer.contents buf\n\n(**\n * ```\n * let text = {|{\"hello\": \"folks\", \"aa\": [2, 3, \"four\"]}|};\n * let result = Json.stringify(Json.parse(text));\n * Js.log(result);\n * assert(text == result);\n * ```\n *)\nlet rec stringify t =\n  match t with\n  | String value -> \"\\\"\" ^ escape value ^ \"\\\"\"\n  | Number num -> string_of_number num\n  | Array items -> \"[\" ^ String.concat \", \" (List.map stringify items) ^ \"]\"\n  | Object items ->\n    \"{\"\n    ^ String.concat \", \"\n        (List.map\n           (fun (k, v) -> \"\\\"\" ^ String.escaped k ^ \"\\\": \" ^ stringify v)\n           items)\n    ^ \"}\"\n  | True -> \"true\"\n  | False -> \"false\"\n  | Null -> \"null\"\n\nlet white n =\n  let buffer = Buffer.create n in\n  for i = 0 to n - 1 do\n    Buffer.add_char buffer ' '\n  done;\n  Buffer.contents buffer\n\nlet rec stringifyPretty ?(indent = 0) t =\n  match t with\n  | String value -> \"\\\"\" ^ escape value ^ \"\\\"\"\n  | Number num -> string_of_number num\n  | Array [] -> \"[]\"\n  | Array [(String _ as contents)] -> \"[\" ^ stringifyPretty contents ^ \"]\"\n  | Array items ->\n    \"[\\n\" ^ white indent\n    ^ String.concat\n        (\",\\n\" ^ white indent)\n        (List.map (stringifyPretty ~indent:(indent + 2)) items)\n    ^ \"\\n\"\n    ^ white (indent - 2)\n    ^ \"]\"\n  | Object [] -> \"{}\"\n  | Object items ->\n    \"{\\n\" ^ white indent\n    ^ String.concat\n        (\",\\n\" ^ white indent)\n        (List.map\n           (fun (k, v) ->\n             \"\\\"\" ^ String.escaped k ^ \"\\\": \"\n             ^ stringifyPretty ~indent:(indent + 2) v)\n           items)\n    ^ \"\\n\"\n    ^ white (indent - 2)\n    ^ \"}\"\n  | True -> \"true\"\n  | False -> \"false\"\n  | Null -> \"null\"\n\nlet unwrap message t =\n  match t with\n  | Some v -> v\n  | None -> failwith message\n\nmodule Parser = struct\n  let split_by ?(keep_empty = false) is_delim str =\n    let len = String.length str in\n    let rec loop acc last_pos pos =\n      if pos = -1 then\n        if last_pos = 0 && not keep_empty then acc\n        else String.sub str 0 last_pos :: acc\n      else if is_delim str.[pos] then\n        let new_len = last_pos - pos - 1 in\n        if new_len <> 0 || keep_empty then\n          let v = String.sub str (pos + 1) new_len in\n          loop (v :: acc) pos (pos - 1)\n        else loop acc pos (pos - 1)\n      else loop acc last_pos (pos - 1)\n    in\n    loop [] len (len - 1)\n\n  let fail text pos message =\n    let pre = String.sub text 0 pos in\n    let lines = split_by (fun c -> c = '\\n') pre in\n    let count = List.length lines in\n    let last =\n      match count > 0 with\n      | true -> List.nth lines (count - 1)\n      | false -> \"\"\n    in\n    let col = String.length last + 1 in\n    let line = List.length lines in\n    let string =\n      Printf.sprintf \"Error \\\"%s\\\" at %d:%d -> %s\\n\" message line col last\n    in\n    failwith string\n\n  let rec skipToNewline text pos =\n    if pos >= String.length text then pos\n    else if text.[pos] = '\\n' then pos + 1\n    else skipToNewline text (pos + 1)\n\n  let stringTail text =\n    let len = String.length text in\n    if len > 1 then String.sub text 1 (len - 1) else \"\"\n\n  let rec skipToCloseMultilineComment text pos =\n    if pos + 1 >= String.length text then failwith \"Unterminated comment\"\n    else if text.[pos] = '*' && text.[pos + 1] = '/' then pos + 2\n    else skipToCloseMultilineComment text (pos + 1)\n\n  let rec skipWhite text pos =\n    if\n      pos < String.length text\n      && (text.[pos] = ' '\n         || text.[pos] = '\\t'\n         || text.[pos] = '\\n'\n         || text.[pos] = '\\r')\n    then skipWhite text (pos + 1)\n    else pos\n\n  (* from https://stackoverflow.com/a/42431362 *)\n  let utf8encode s =\n    let prefs = [|0; 192; 224|] in\n    let s1 n = String.make 1 (Char.chr n) in\n    let rec ienc k sofar resid =\n      let bct = if k = 0 then 7 else 6 - k in\n      if resid < 1 lsl bct then s1 (prefs.(k) + resid) ^ sofar\n      else ienc (k + 1) (s1 (128 + (resid mod 64)) ^ sofar) (resid / 64)\n    in\n    ienc 0 \"\" (int_of_string (\"0x\" ^ s))\n\n  let parseString text pos =\n    (* let i = ref(pos); *)\n    let buffer = Buffer.create (String.length text) in\n    let ln = String.length text in\n    let rec loop i =\n      match i >= ln with\n      | true -> fail text i \"Unterminated string\"\n      | false -> (\n        match text.[i] with\n        | '\"' -> i + 1\n        | '\\\\' -> (\n          match i + 1 >= ln with\n          | true -> fail text i \"Unterminated string\"\n          | false -> (\n            match text.[i + 1] with\n            | '/' ->\n              Buffer.add_char buffer '/';\n              loop (i + 2)\n            | 'f' ->\n              Buffer.add_char buffer '\\012';\n              loop (i + 2)\n            | 'u' when i + 6 < ln ->\n              Buffer.add_string buffer (utf8encode (String.sub text (i + 2) 4));\n              loop (i + 7)\n            | _ ->\n              Buffer.add_string buffer (Scanf.unescaped (String.sub text i 2));\n              loop (i + 2)))\n        | c ->\n          Buffer.add_char buffer c;\n          loop (i + 1))\n    in\n    let final = loop pos in\n    (Buffer.contents buffer, final)\n\n  let parseDigits text pos =\n    let len = String.length text in\n    let rec loop i =\n      if i >= len then i\n      else\n        match text.[i] with\n        | '0' .. '9' -> loop (i + 1)\n        | _ -> i\n    in\n    loop (pos + 1)\n\n  let parseWithDecimal text pos =\n    let pos = parseDigits text pos in\n    if pos < String.length text && text.[pos] = '.' then\n      let pos = parseDigits text (pos + 1) in\n      pos\n    else pos\n\n  let parseNumber text pos =\n    let pos = parseWithDecimal text pos in\n    let ln = String.length text in\n    if pos < ln - 1 && (text.[pos] = 'E' || text.[pos] = 'e') then\n      let pos =\n        match text.[pos + 1] with\n        | '-' | '+' -> pos + 2\n        | _ -> pos + 1\n      in\n      parseDigits text pos\n    else pos\n\n  let parseNegativeNumber text pos =\n    let final =\n      if text.[pos] = '-' then parseNumber text (pos + 1)\n      else parseNumber text pos\n    in\n    (Number (float_of_string (String.sub text pos (final - pos))), final)\n\n  let expect char text pos message =\n    if text.[pos] <> char then fail text pos (\"Expected: \" ^ message)\n    else pos + 1\n\n  let parseComment : 'a. string -> int -> (string -> int -> 'a) -> 'a =\n   fun text pos next ->\n    if text.[pos] <> '/' then\n      if text.[pos] = '*' then\n        next text (skipToCloseMultilineComment text (pos + 1))\n      else failwith \"Invalid syntax\"\n    else next text (skipToNewline text (pos + 1))\n\n  let maybeSkipComment text pos =\n    if pos < String.length text && text.[pos] = '/' then\n      if pos + 1 < String.length text && text.[pos + 1] = '/' then\n        skipToNewline text (pos + 1)\n      else if pos + 1 < String.length text && text.[pos + 1] = '*' then\n        skipToCloseMultilineComment text (pos + 1)\n      else fail text pos \"Invalid synatx\"\n    else pos\n\n  let rec skip text pos =\n    if pos = String.length text then pos\n    else\n      let n = skipWhite text pos |> maybeSkipComment text in\n      if n > pos then skip text n else n\n\n  let rec parse text pos =\n    if pos >= String.length text then\n      fail text pos \"Reached end of file without being done parsing\"\n    else\n      match text.[pos] with\n      | '/' -> parseComment text (pos + 1) parse\n      | '[' -> parseArray text (pos + 1)\n      | '{' -> parseObject text (pos + 1)\n      | 'n' ->\n        if String.sub text pos 4 = \"null\" then (Null, pos + 4)\n        else fail text pos \"unexpected character\"\n      | 't' ->\n        if String.sub text pos 4 = \"true\" then (True, pos + 4)\n        else fail text pos \"unexpected character\"\n      | 'f' ->\n        if String.sub text pos 5 = \"false\" then (False, pos + 5)\n        else fail text pos \"unexpected character\"\n      | '\\n' | '\\t' | ' ' | '\\r' -> parse text (skipWhite text pos)\n      | '\"' ->\n        let s, pos = parseString text (pos + 1) in\n        (String s, pos)\n      | '-' | '0' .. '9' -> parseNegativeNumber text pos\n      | _ -> fail text pos \"unexpected character\"\n\n  and parseArrayValue text pos =\n    let pos = skip text pos in\n    let value, pos = parse text pos in\n    let pos = skip text pos in\n    match text.[pos] with\n    | ',' ->\n      let pos = skip text (pos + 1) in\n      if text.[pos] = ']' then ([value], pos + 1)\n      else\n        let rest, pos = parseArrayValue text pos in\n        (value :: rest, pos)\n    | ']' -> ([value], pos + 1)\n    | _ -> fail text pos \"unexpected character\"\n\n  and parseArray text pos =\n    let pos = skip text pos in\n    match text.[pos] with\n    | ']' -> (Array [], pos + 1)\n    | _ ->\n      let items, pos = parseArrayValue text pos in\n      (Array items, pos)\n\n  and parseObjectValue text pos =\n    let pos = skip text pos in\n    if text.[pos] <> '\"' then fail text pos \"Expected string\"\n    else\n      let key, pos = parseString text (pos + 1) in\n      let pos = skip text pos in\n      let pos = expect ':' text pos \"Colon\" in\n      let value, pos = parse text pos in\n      let pos = skip text pos in\n      match text.[pos] with\n      | ',' ->\n        let pos = skip text (pos + 1) in\n        if text.[pos] = '}' then ([(key, value)], pos + 1)\n        else\n          let rest, pos = parseObjectValue text pos in\n          ((key, value) :: rest, pos)\n      | '}' -> ([(key, value)], pos + 1)\n      | _ ->\n        let rest, pos = parseObjectValue text pos in\n        ((key, value) :: rest, pos)\n\n  and parseObject text pos =\n    let pos = skip text pos in\n    if text.[pos] = '}' then (Object [], pos + 1)\n    else\n      let pairs, pos = parseObjectValue text pos in\n      (Object pairs, pos)\nend\n[@@nodoc]\n\n(** Turns some text into a json object. throws on failure *)\nlet parse text =\n  try\n    let item, pos = Parser.parse text 0 in\n    let pos = Parser.skip text pos in\n    if pos < String.length text then\n      (* failwith\n         (\"Extra data after parse finished: \"\n         ^ String.sub text pos (String.length text - pos)) *)\n      None\n    else Some item\n  with Invalid_argument _ | Failure _ -> None\n\n(* Accessor helpers *)\nlet bind v fn =\n  match v with\n  | None -> None\n  | Some v -> fn v\n\n(** If `t` is an object, get the value associated with the given string key *)\nlet get key t =\n  match t with\n  | Object items -> ( try Some (List.assoc key items) with Not_found -> None)\n  | _ -> None\n\n(** If `t` is an array, get the value associated with the given index *)\nlet nth n t =\n  match t with\n  | Array items ->\n    if n < List.length items then Some (List.nth items n) else None\n  | _ -> None\n\nlet string t =\n  match t with\n  | String s -> Some s\n  | _ -> None\nlet number t =\n  match t with\n  | Number s -> Some s\n  | _ -> None\nlet array t =\n  match t with\n  | Array s -> Some s\n  | _ -> None\nlet obj t =\n  match t with\n  | Object s -> Some s\n  | _ -> None\nlet bool t =\n  match t with\n  | True -> Some true\n  | False -> Some false\n  | _ -> None\nlet null t =\n  match t with\n  | Null -> Some ()\n  | _ -> None\n\nlet rec parsePath keyList t =\n  match keyList with\n  | [] -> Some t\n  | head :: rest -> (\n    match get head t with\n    | None -> None\n    | Some value -> parsePath rest value)\n\n(** Get a deeply nested value from an object `t`.\n * ```\n * open Json.Infix;\n * let json = Json.parse({|{\"a\": {\"b\": {\"c\": 2}}}|});\n * let num = Json.getPath(\"a.b.c\", json) |?> Json.number;\n * assert(num == Some(2.))\n * ```\n *)\nlet getPath path t =\n  let keys = Parser.split_by (fun c -> c = '.') path in\n  parsePath keys t\n"
  },
  {
    "path": "analysis/vendor/json/dune",
    "content": "(library\n (name jsonlib)\n (wrapped false)\n (flags -w \"-9\")\n (libraries))\n"
  },
  {
    "path": "analysis/vendor/ml/annot.ml",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*           Damien Doligez, projet Gallium, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 2007 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(* Data types for annotations (Stypes.ml) *)\n\ntype call = Tail | Stack | Inline\n\ntype ident =\n  | Iref_internal of Location.t (* defining occurrence *)\n  | Iref_external\n  | Idef of Location.t (* scope *)\n"
  },
  {
    "path": "analysis/vendor/ml/ast_async.ml",
    "content": "let is_async : Parsetree.attribute -> bool =\n fun ({txt}, _) -> txt = \"async\" || txt = \"res.async\"\n\nlet add_promise_type ?(loc = Location.none) ~async\n    (result : Parsetree.expression) =\n  if async then\n    let unsafe_async =\n      Ast_helper.Exp.ident ~loc\n        {txt = Ldot (Ldot (Lident \"Js\", \"Promise\"), \"unsafe_async\"); loc}\n    in\n    Ast_helper.Exp.apply ~loc unsafe_async [(Nolabel, result)]\n  else result\n\nlet add_async_attribute ~async (body : Parsetree.expression) =\n  if async then\n    match body.pexp_desc with\n    | Pexp_construct (x, Some e) when Ast_uncurried.expr_is_uncurried_fun body\n      ->\n      {\n        body with\n        pexp_desc =\n          Pexp_construct\n            ( x,\n              Some\n                {\n                  e with\n                  pexp_attributes =\n                    ({txt = \"res.async\"; loc = Location.none}, PStr [])\n                    :: e.pexp_attributes;\n                } );\n      }\n    | _ ->\n      {\n        body with\n        pexp_attributes =\n          ({txt = \"res.async\"; loc = Location.none}, PStr [])\n          :: body.pexp_attributes;\n      }\n  else body\n\nlet rec add_promise_to_result ~loc (e : Parsetree.expression) =\n  match e.pexp_desc with\n  | Pexp_fun (label, eo, pat, body) ->\n    let body = add_promise_to_result ~loc body in\n    {e with pexp_desc = Pexp_fun (label, eo, pat, body)}\n  | _ -> add_promise_type ~loc ~async:true e\n\nlet make_function_async ~async (e : Parsetree.expression) =\n  if async then\n    match e.pexp_desc with\n    | Pexp_fun (_, _, {ppat_loc}, _) -> add_promise_to_result ~loc:ppat_loc e\n    | _ -> assert false\n  else e\n"
  },
  {
    "path": "analysis/vendor/ml/ast_await.ml",
    "content": "let is_await : Parsetree.attribute -> bool =\n fun ({txt}, _) -> txt = \"await\" || txt = \"res.await\"\n\nlet create_await_expression (e : Parsetree.expression) =\n  let loc = {e.pexp_loc with loc_ghost = true} in\n  let unsafe_await =\n    Ast_helper.Exp.ident ~loc\n      {txt = Ldot (Ldot (Lident \"Js\", \"Promise\"), \"unsafe_await\"); loc}\n  in\n  Ast_helper.Exp.apply ~loc unsafe_await [(Nolabel, e)]\n\n(* Transform `@res.await M` to unpack(@res.await Js.import(module(M: __M0__))) *)\nlet create_await_module_expression ~module_type_lid (e : Parsetree.module_expr)\n    =\n  let open Ast_helper in\n  let remove_await_attribute =\n    List.filter (fun ((loc, _) : Parsetree.attribute) -> loc.txt != \"res.await\")\n  in\n  {\n    e with\n    pmod_desc =\n      Pmod_unpack\n        (create_await_expression\n           (Exp.apply ~loc:e.pmod_loc\n              (Exp.ident ~loc:e.pmod_loc\n                 {\n                   txt = Longident.Ldot (Lident \"Js\", \"import\");\n                   loc = e.pmod_loc;\n                 })\n              [\n                ( Nolabel,\n                  Exp.constraint_ ~loc:e.pmod_loc\n                    (Exp.pack ~loc:e.pmod_loc\n                       {\n                         e with\n                         pmod_attributes =\n                           remove_await_attribute e.pmod_attributes;\n                       })\n                    (Typ.package ~loc:e.pmod_loc module_type_lid []) );\n              ]));\n  }\n"
  },
  {
    "path": "analysis/vendor/ml/ast_helper.ml",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*                         Alain Frisch, LexiFi                           *)\n(*                                                                        *)\n(*   Copyright 2012 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(** Helpers to produce Parsetree fragments *)\n\nopen Asttypes\nopen Parsetree\nopen Docstrings\n\ntype lid = Longident.t loc\ntype str = string loc\ntype loc = Location.t\ntype attrs = attribute list\n\nlet default_loc = ref Location.none\n\nlet with_default_loc l f =\n  let old = !default_loc in\n  default_loc := l;\n  try\n    let r = f () in\n    default_loc := old;\n    r\n  with exn ->\n    default_loc := old;\n    raise exn\n\nmodule Const = struct\n  let integer ?suffix i = Pconst_integer (i, suffix)\n  let int ?suffix i = integer ?suffix (string_of_int i)\n  let int32 ?(suffix = 'l') i = integer ~suffix (Int32.to_string i)\n  let int64 ?(suffix = 'L') i = integer ~suffix (Int64.to_string i)\n  let nativeint ?(suffix = 'n') i = integer ~suffix (Nativeint.to_string i)\n  let float ?suffix f = Pconst_float (f, suffix)\n  let char c = Pconst_char (Char.code c)\n  let string ?quotation_delimiter s = Pconst_string (s, quotation_delimiter)\nend\n\nmodule Typ = struct\n  let mk ?(loc = !default_loc) ?(attrs = []) d =\n    {ptyp_desc = d; ptyp_loc = loc; ptyp_attributes = attrs}\n  let attr d a = {d with ptyp_attributes = d.ptyp_attributes @ [a]}\n\n  let any ?loc ?attrs () = mk ?loc ?attrs Ptyp_any\n  let var ?loc ?attrs a = mk ?loc ?attrs (Ptyp_var a)\n  let arrow ?loc ?attrs a b c = mk ?loc ?attrs (Ptyp_arrow (a, b, c))\n  let tuple ?loc ?attrs a = mk ?loc ?attrs (Ptyp_tuple a)\n  let constr ?loc ?attrs a b = mk ?loc ?attrs (Ptyp_constr (a, b))\n  let object_ ?loc ?attrs a b = mk ?loc ?attrs (Ptyp_object (a, b))\n  let class_ ?loc ?attrs a b = mk ?loc ?attrs (Ptyp_class (a, b))\n  let alias ?loc ?attrs a b = mk ?loc ?attrs (Ptyp_alias (a, b))\n  let variant ?loc ?attrs a b c = mk ?loc ?attrs (Ptyp_variant (a, b, c))\n  let poly ?loc ?attrs a b = mk ?loc ?attrs (Ptyp_poly (a, b))\n  let package ?loc ?attrs a b = mk ?loc ?attrs (Ptyp_package (a, b))\n  let extension ?loc ?attrs a = mk ?loc ?attrs (Ptyp_extension a)\n\n  let force_poly t =\n    match t.ptyp_desc with\n    | Ptyp_poly _ -> t\n    | _ -> poly ~loc:t.ptyp_loc [] t (* -> ghost? *)\n\n  let varify_constructors var_names t =\n    let check_variable vl loc v =\n      if List.mem v vl then raise Syntaxerr.(Error (Variable_in_scope (loc, v)))\n    in\n    let var_names = List.map (fun v -> v.txt) var_names in\n    let rec loop t =\n      let desc =\n        match t.ptyp_desc with\n        | Ptyp_any -> Ptyp_any\n        | Ptyp_var x ->\n          check_variable var_names t.ptyp_loc x;\n          Ptyp_var x\n        | Ptyp_arrow (label, core_type, core_type') ->\n          Ptyp_arrow (label, loop core_type, loop core_type')\n        | Ptyp_tuple lst -> Ptyp_tuple (List.map loop lst)\n        | Ptyp_constr ({txt = Longident.Lident s}, []) when List.mem s var_names\n          ->\n          Ptyp_var s\n        | Ptyp_constr (longident, lst) ->\n          Ptyp_constr (longident, List.map loop lst)\n        | Ptyp_object (lst, o) -> Ptyp_object (List.map loop_object_field lst, o)\n        | Ptyp_class (longident, lst) ->\n          Ptyp_class (longident, List.map loop lst)\n        | Ptyp_alias (core_type, string) ->\n          check_variable var_names t.ptyp_loc string;\n          Ptyp_alias (loop core_type, string)\n        | Ptyp_variant (row_field_list, flag, lbl_lst_option) ->\n          Ptyp_variant\n            (List.map loop_row_field row_field_list, flag, lbl_lst_option)\n        | Ptyp_poly (string_lst, core_type) ->\n          List.iter\n            (fun v -> check_variable var_names t.ptyp_loc v.txt)\n            string_lst;\n          Ptyp_poly (string_lst, loop core_type)\n        | Ptyp_package (longident, lst) ->\n          Ptyp_package (longident, List.map (fun (n, typ) -> (n, loop typ)) lst)\n        | Ptyp_extension (s, arg) -> Ptyp_extension (s, arg)\n      in\n      {t with ptyp_desc = desc}\n    and loop_row_field = function\n      | Rtag (label, attrs, flag, lst) ->\n        Rtag (label, attrs, flag, List.map loop lst)\n      | Rinherit t -> Rinherit (loop t)\n    and loop_object_field = function\n      | Otag (label, attrs, t) -> Otag (label, attrs, loop t)\n      | Oinherit t -> Oinherit (loop t)\n    in\n    loop t\nend\n\nmodule Pat = struct\n  let mk ?(loc = !default_loc) ?(attrs = []) d =\n    {ppat_desc = d; ppat_loc = loc; ppat_attributes = attrs}\n  let attr d a = {d with ppat_attributes = d.ppat_attributes @ [a]}\n\n  let any ?loc ?attrs () = mk ?loc ?attrs Ppat_any\n  let var ?loc ?attrs a = mk ?loc ?attrs (Ppat_var a)\n  let alias ?loc ?attrs a b = mk ?loc ?attrs (Ppat_alias (a, b))\n  let constant ?loc ?attrs a = mk ?loc ?attrs (Ppat_constant a)\n  let interval ?loc ?attrs a b = mk ?loc ?attrs (Ppat_interval (a, b))\n  let tuple ?loc ?attrs a = mk ?loc ?attrs (Ppat_tuple a)\n  let construct ?loc ?attrs a b = mk ?loc ?attrs (Ppat_construct (a, b))\n  let variant ?loc ?attrs a b = mk ?loc ?attrs (Ppat_variant (a, b))\n  let record ?loc ?attrs a b = mk ?loc ?attrs (Ppat_record (a, b))\n  let array ?loc ?attrs a = mk ?loc ?attrs (Ppat_array a)\n  let or_ ?loc ?attrs a b = mk ?loc ?attrs (Ppat_or (a, b))\n  let constraint_ ?loc ?attrs a b = mk ?loc ?attrs (Ppat_constraint (a, b))\n  let type_ ?loc ?attrs a = mk ?loc ?attrs (Ppat_type a)\n  let lazy_ ?loc ?attrs a = mk ?loc ?attrs (Ppat_lazy a)\n  let unpack ?loc ?attrs a = mk ?loc ?attrs (Ppat_unpack a)\n  let open_ ?loc ?attrs a b = mk ?loc ?attrs (Ppat_open (a, b))\n  let exception_ ?loc ?attrs a = mk ?loc ?attrs (Ppat_exception a)\n  let extension ?loc ?attrs a = mk ?loc ?attrs (Ppat_extension a)\nend\n\nmodule Exp = struct\n  let mk ?(loc = !default_loc) ?(attrs = []) d =\n    {pexp_desc = d; pexp_loc = loc; pexp_attributes = attrs}\n  let attr d a = {d with pexp_attributes = d.pexp_attributes @ [a]}\n\n  let ident ?loc ?attrs a = mk ?loc ?attrs (Pexp_ident a)\n  let constant ?loc ?attrs a = mk ?loc ?attrs (Pexp_constant a)\n  let let_ ?loc ?attrs a b c = mk ?loc ?attrs (Pexp_let (a, b, c))\n  let fun_ ?loc ?attrs a b c d = mk ?loc ?attrs (Pexp_fun (a, b, c, d))\n  let function_ ?loc ?attrs a = mk ?loc ?attrs (Pexp_function a)\n  let apply ?loc ?attrs a b = mk ?loc ?attrs (Pexp_apply (a, b))\n  let match_ ?loc ?attrs a b = mk ?loc ?attrs (Pexp_match (a, b))\n  let try_ ?loc ?attrs a b = mk ?loc ?attrs (Pexp_try (a, b))\n  let tuple ?loc ?attrs a = mk ?loc ?attrs (Pexp_tuple a)\n  let construct ?loc ?attrs a b = mk ?loc ?attrs (Pexp_construct (a, b))\n  let variant ?loc ?attrs a b = mk ?loc ?attrs (Pexp_variant (a, b))\n  let record ?loc ?attrs a b = mk ?loc ?attrs (Pexp_record (a, b))\n  let field ?loc ?attrs a b = mk ?loc ?attrs (Pexp_field (a, b))\n  let setfield ?loc ?attrs a b c = mk ?loc ?attrs (Pexp_setfield (a, b, c))\n  let array ?loc ?attrs a = mk ?loc ?attrs (Pexp_array a)\n  let ifthenelse ?loc ?attrs a b c = mk ?loc ?attrs (Pexp_ifthenelse (a, b, c))\n  let sequence ?loc ?attrs a b = mk ?loc ?attrs (Pexp_sequence (a, b))\n  let while_ ?loc ?attrs a b = mk ?loc ?attrs (Pexp_while (a, b))\n  let for_ ?loc ?attrs a b c d e = mk ?loc ?attrs (Pexp_for (a, b, c, d, e))\n  let constraint_ ?loc ?attrs a b = mk ?loc ?attrs (Pexp_constraint (a, b))\n  let coerce ?loc ?attrs a b c = mk ?loc ?attrs (Pexp_coerce (a, b, c))\n  let send ?loc ?attrs a b = mk ?loc ?attrs (Pexp_send (a, b))\n  let new_ ?loc ?attrs a = mk ?loc ?attrs (Pexp_new a)\n  let setinstvar ?loc ?attrs a b = mk ?loc ?attrs (Pexp_setinstvar (a, b))\n  let override ?loc ?attrs a = mk ?loc ?attrs (Pexp_override a)\n  let letmodule ?loc ?attrs a b c = mk ?loc ?attrs (Pexp_letmodule (a, b, c))\n  let letexception ?loc ?attrs a b = mk ?loc ?attrs (Pexp_letexception (a, b))\n  let assert_ ?loc ?attrs a = mk ?loc ?attrs (Pexp_assert a)\n  let lazy_ ?loc ?attrs a = mk ?loc ?attrs (Pexp_lazy a)\n  let poly ?loc ?attrs a b = mk ?loc ?attrs (Pexp_poly (a, b))\n  let object_ ?loc ?attrs a = mk ?loc ?attrs (Pexp_object a)\n  let newtype ?loc ?attrs a b = mk ?loc ?attrs (Pexp_newtype (a, b))\n  let pack ?loc ?attrs a = mk ?loc ?attrs (Pexp_pack a)\n  let open_ ?loc ?attrs a b c = mk ?loc ?attrs (Pexp_open (a, b, c))\n  let extension ?loc ?attrs a = mk ?loc ?attrs (Pexp_extension a)\n  let unreachable ?loc ?attrs () = mk ?loc ?attrs Pexp_unreachable\n\n  let case lhs ?guard rhs = {pc_lhs = lhs; pc_guard = guard; pc_rhs = rhs}\nend\n\nmodule Mty = struct\n  let mk ?(loc = !default_loc) ?(attrs = []) d =\n    {pmty_desc = d; pmty_loc = loc; pmty_attributes = attrs}\n  let attr d a = {d with pmty_attributes = d.pmty_attributes @ [a]}\n\n  let ident ?loc ?attrs a = mk ?loc ?attrs (Pmty_ident a)\n  let alias ?loc ?attrs a = mk ?loc ?attrs (Pmty_alias a)\n  let signature ?loc ?attrs a = mk ?loc ?attrs (Pmty_signature a)\n  let functor_ ?loc ?attrs a b c = mk ?loc ?attrs (Pmty_functor (a, b, c))\n  let with_ ?loc ?attrs a b = mk ?loc ?attrs (Pmty_with (a, b))\n  let typeof_ ?loc ?attrs a = mk ?loc ?attrs (Pmty_typeof a)\n  let extension ?loc ?attrs a = mk ?loc ?attrs (Pmty_extension a)\nend\n\nmodule Mod = struct\n  let mk ?(loc = !default_loc) ?(attrs = []) d =\n    {pmod_desc = d; pmod_loc = loc; pmod_attributes = attrs}\n  let attr d a = {d with pmod_attributes = d.pmod_attributes @ [a]}\n\n  let ident ?loc ?attrs x = mk ?loc ?attrs (Pmod_ident x)\n  let structure ?loc ?attrs x = mk ?loc ?attrs (Pmod_structure x)\n  let functor_ ?loc ?attrs arg arg_ty body =\n    mk ?loc ?attrs (Pmod_functor (arg, arg_ty, body))\n  let apply ?loc ?attrs m1 m2 = mk ?loc ?attrs (Pmod_apply (m1, m2))\n  let constraint_ ?loc ?attrs m mty = mk ?loc ?attrs (Pmod_constraint (m, mty))\n  let unpack ?loc ?attrs e = mk ?loc ?attrs (Pmod_unpack e)\n  let extension ?loc ?attrs a = mk ?loc ?attrs (Pmod_extension a)\nend\n\nmodule Sig = struct\n  let mk ?(loc = !default_loc) d = {psig_desc = d; psig_loc = loc}\n\n  let value ?loc a = mk ?loc (Psig_value a)\n  let type_ ?loc rec_flag a = mk ?loc (Psig_type (rec_flag, a))\n  let type_extension ?loc a = mk ?loc (Psig_typext a)\n  let exception_ ?loc a = mk ?loc (Psig_exception a)\n  let module_ ?loc a = mk ?loc (Psig_module a)\n  let rec_module ?loc a = mk ?loc (Psig_recmodule a)\n  let modtype ?loc a = mk ?loc (Psig_modtype a)\n  let open_ ?loc a = mk ?loc (Psig_open a)\n  let include_ ?loc a = mk ?loc (Psig_include a)\n\n  let class_type ?loc a = mk ?loc (Psig_class_type a)\n  let extension ?loc ?(attrs = []) a = mk ?loc (Psig_extension (a, attrs))\n  let attribute ?loc a = mk ?loc (Psig_attribute a)\n  let text txt =\n    let f_txt = Ext_list.filter txt (fun ds -> docstring_body ds <> \"\") in\n    List.map (fun ds -> attribute ~loc:(docstring_loc ds) (text_attr ds)) f_txt\nend\n\nmodule Str = struct\n  let mk ?(loc = !default_loc) d = {pstr_desc = d; pstr_loc = loc}\n\n  let eval ?loc ?(attrs = []) a = mk ?loc (Pstr_eval (a, attrs))\n  let value ?loc a b = mk ?loc (Pstr_value (a, b))\n  let primitive ?loc a = mk ?loc (Pstr_primitive a)\n  let type_ ?loc rec_flag a = mk ?loc (Pstr_type (rec_flag, a))\n  let type_extension ?loc a = mk ?loc (Pstr_typext a)\n  let exception_ ?loc a = mk ?loc (Pstr_exception a)\n  let module_ ?loc a = mk ?loc (Pstr_module a)\n  let rec_module ?loc a = mk ?loc (Pstr_recmodule a)\n  let modtype ?loc a = mk ?loc (Pstr_modtype a)\n  let open_ ?loc a = mk ?loc (Pstr_open a)\n  let class_type ?loc a = mk ?loc (Pstr_class_type a)\n  let include_ ?loc a = mk ?loc (Pstr_include a)\n  let extension ?loc ?(attrs = []) a = mk ?loc (Pstr_extension (a, attrs))\n  let attribute ?loc a = mk ?loc (Pstr_attribute a)\n  let text txt =\n    let f_txt = Ext_list.filter txt (fun ds -> docstring_body ds <> \"\") in\n    List.map (fun ds -> attribute ~loc:(docstring_loc ds) (text_attr ds)) f_txt\nend\n\nmodule Cl = struct\n  let mk ?(loc = !default_loc) ?(attrs = []) d =\n    {pcl_desc = d; pcl_loc = loc; pcl_attributes = attrs}\n  let attr d a = {d with pcl_attributes = d.pcl_attributes @ [a]}\n\n  let constr ?loc ?attrs a b = mk ?loc ?attrs (Pcl_constr (a, b))\n  let structure ?loc ?attrs a = mk ?loc ?attrs (Pcl_structure a)\n  let fun_ ?loc ?attrs a b c d = mk ?loc ?attrs (Pcl_fun (a, b, c, d))\n  let apply ?loc ?attrs a b = mk ?loc ?attrs (Pcl_apply (a, b))\n  let let_ ?loc ?attrs a b c = mk ?loc ?attrs (Pcl_let (a, b, c))\n  let constraint_ ?loc ?attrs a b = mk ?loc ?attrs (Pcl_constraint (a, b))\n  let extension ?loc ?attrs a = mk ?loc ?attrs (Pcl_extension a)\n  let open_ ?loc ?attrs a b c = mk ?loc ?attrs (Pcl_open (a, b, c))\nend\n\nmodule Cty = struct\n  let mk ?(loc = !default_loc) ?(attrs = []) d =\n    {pcty_desc = d; pcty_loc = loc; pcty_attributes = attrs}\n  let attr d a = {d with pcty_attributes = d.pcty_attributes @ [a]}\n\n  let constr ?loc ?attrs a b = mk ?loc ?attrs (Pcty_constr (a, b))\n  let signature ?loc ?attrs a = mk ?loc ?attrs (Pcty_signature a)\n  let arrow ?loc ?attrs a b c = mk ?loc ?attrs (Pcty_arrow (a, b, c))\n  let extension ?loc ?attrs a = mk ?loc ?attrs (Pcty_extension a)\n  let open_ ?loc ?attrs a b c = mk ?loc ?attrs (Pcty_open (a, b, c))\nend\n\nmodule Ctf = struct\n  let mk ?(loc = !default_loc) ?(attrs = []) ?(docs = empty_docs) d =\n    {pctf_desc = d; pctf_loc = loc; pctf_attributes = add_docs_attrs docs attrs}\n\n  let inherit_ ?loc ?attrs a = mk ?loc ?attrs (Pctf_inherit a)\n  let val_ ?loc ?attrs a b c d = mk ?loc ?attrs (Pctf_val (a, b, c, d))\n  let method_ ?loc ?attrs a b c d = mk ?loc ?attrs (Pctf_method (a, b, c, d))\n  let constraint_ ?loc ?attrs a b = mk ?loc ?attrs (Pctf_constraint (a, b))\n  let extension ?loc ?attrs a = mk ?loc ?attrs (Pctf_extension a)\n  let attribute ?loc a = mk ?loc (Pctf_attribute a)\n  let text txt =\n    let f_txt = Ext_list.filter txt (fun ds -> docstring_body ds <> \"\") in\n    List.map (fun ds -> attribute ~loc:(docstring_loc ds) (text_attr ds)) f_txt\n\n  let attr d a = {d with pctf_attributes = d.pctf_attributes @ [a]}\nend\n\nmodule Cf = struct\n  let mk ?(loc = !default_loc) ?(attrs = []) ?(docs = empty_docs) d =\n    {pcf_desc = d; pcf_loc = loc; pcf_attributes = add_docs_attrs docs attrs}\n\n  let val_ ?loc ?attrs a b c = mk ?loc ?attrs (Pcf_val (a, b, c))\n  let method_ ?loc ?attrs a b c = mk ?loc ?attrs (Pcf_method (a, b, c))\n  let constraint_ ?loc ?attrs a b = mk ?loc ?attrs (Pcf_constraint (a, b))\n  let initializer_ ?loc ?attrs a = mk ?loc ?attrs (Pcf_initializer a)\n  let extension ?loc ?attrs a = mk ?loc ?attrs (Pcf_extension a)\n  let attribute ?loc a = mk ?loc (Pcf_attribute a)\n  let text txt =\n    let f_txt = Ext_list.filter txt (fun ds -> docstring_body ds <> \"\") in\n    List.map (fun ds -> attribute ~loc:(docstring_loc ds) (text_attr ds)) f_txt\n\n  let virtual_ ct = Cfk_virtual ct\n  let concrete o e = Cfk_concrete (o, e)\n\n  let attr d a = {d with pcf_attributes = d.pcf_attributes @ [a]}\nend\n\nmodule Val = struct\n  let mk ?(loc = !default_loc) ?(attrs = []) ?(docs = empty_docs) ?(prim = [])\n      name typ =\n    {\n      pval_name = name;\n      pval_type = typ;\n      pval_attributes = add_docs_attrs docs attrs;\n      pval_loc = loc;\n      pval_prim = prim;\n    }\nend\n\nmodule Md = struct\n  let mk ?(loc = !default_loc) ?(attrs = []) ?(docs = empty_docs) ?(text = [])\n      name typ =\n    {\n      pmd_name = name;\n      pmd_type = typ;\n      pmd_attributes = add_text_attrs text (add_docs_attrs docs attrs);\n      pmd_loc = loc;\n    }\nend\n\nmodule Mtd = struct\n  let mk ?(loc = !default_loc) ?(attrs = []) ?(docs = empty_docs) ?(text = [])\n      ?typ name =\n    {\n      pmtd_name = name;\n      pmtd_type = typ;\n      pmtd_attributes = add_text_attrs text (add_docs_attrs docs attrs);\n      pmtd_loc = loc;\n    }\nend\n\nmodule Mb = struct\n  let mk ?(loc = !default_loc) ?(attrs = []) ?(docs = empty_docs) ?(text = [])\n      name expr =\n    {\n      pmb_name = name;\n      pmb_expr = expr;\n      pmb_attributes = add_text_attrs text (add_docs_attrs docs attrs);\n      pmb_loc = loc;\n    }\nend\n\nmodule Opn = struct\n  let mk ?(loc = !default_loc) ?(attrs = []) ?(docs = empty_docs)\n      ?(override = Fresh) lid =\n    {\n      popen_lid = lid;\n      popen_override = override;\n      popen_loc = loc;\n      popen_attributes = add_docs_attrs docs attrs;\n    }\nend\n\nmodule Incl = struct\n  let mk ?(loc = !default_loc) ?(attrs = []) ?(docs = empty_docs) mexpr =\n    {\n      pincl_mod = mexpr;\n      pincl_loc = loc;\n      pincl_attributes = add_docs_attrs docs attrs;\n    }\nend\n\nmodule Vb = struct\n  let mk ?(loc = !default_loc) ?(attrs = []) ?(docs = empty_docs) ?(text = [])\n      pat expr =\n    {\n      pvb_pat = pat;\n      pvb_expr = expr;\n      pvb_attributes = add_text_attrs text (add_docs_attrs docs attrs);\n      pvb_loc = loc;\n    }\nend\n\nmodule Ci = struct\n  let mk ?(loc = !default_loc) ?(attrs = []) ?(docs = empty_docs) ?(text = [])\n      ?(virt = Concrete) ?(params = []) name expr =\n    {\n      pci_virt = virt;\n      pci_params = params;\n      pci_name = name;\n      pci_expr = expr;\n      pci_attributes = add_text_attrs text (add_docs_attrs docs attrs);\n      pci_loc = loc;\n    }\nend\n\nmodule Type = struct\n  let mk ?(loc = !default_loc) ?(attrs = []) ?(docs = empty_docs) ?(text = [])\n      ?(params = []) ?(cstrs = []) ?(kind = Ptype_abstract) ?(priv = Public)\n      ?manifest name =\n    {\n      ptype_name = name;\n      ptype_params = params;\n      ptype_cstrs = cstrs;\n      ptype_kind = kind;\n      ptype_private = priv;\n      ptype_manifest = manifest;\n      ptype_attributes = add_text_attrs text (add_docs_attrs docs attrs);\n      ptype_loc = loc;\n    }\n\n  let constructor ?(loc = !default_loc) ?(attrs = []) ?(info = empty_info)\n      ?(args = Pcstr_tuple []) ?res name =\n    {\n      pcd_name = name;\n      pcd_args = args;\n      pcd_res = res;\n      pcd_loc = loc;\n      pcd_attributes = add_info_attrs info attrs;\n    }\n\n  let field ?(loc = !default_loc) ?(attrs = []) ?(info = empty_info)\n      ?(mut = Immutable) name typ =\n    {\n      pld_name = name;\n      pld_mutable = mut;\n      pld_type = typ;\n      pld_loc = loc;\n      pld_attributes = add_info_attrs info attrs;\n    }\nend\n\n(** Type extensions *)\nmodule Te = struct\n  let mk ?(attrs = []) ?(docs = empty_docs) ?(params = []) ?(priv = Public) path\n      constructors =\n    {\n      ptyext_path = path;\n      ptyext_params = params;\n      ptyext_constructors = constructors;\n      ptyext_private = priv;\n      ptyext_attributes = add_docs_attrs docs attrs;\n    }\n\n  let constructor ?(loc = !default_loc) ?(attrs = []) ?(docs = empty_docs)\n      ?(info = empty_info) name kind =\n    {\n      pext_name = name;\n      pext_kind = kind;\n      pext_loc = loc;\n      pext_attributes = add_docs_attrs docs (add_info_attrs info attrs);\n    }\n\n  let decl ?(loc = !default_loc) ?(attrs = []) ?(docs = empty_docs)\n      ?(info = empty_info) ?(args = Pcstr_tuple []) ?res name =\n    {\n      pext_name = name;\n      pext_kind = Pext_decl (args, res);\n      pext_loc = loc;\n      pext_attributes = add_docs_attrs docs (add_info_attrs info attrs);\n    }\n\n  let rebind ?(loc = !default_loc) ?(attrs = []) ?(docs = empty_docs)\n      ?(info = empty_info) name lid =\n    {\n      pext_name = name;\n      pext_kind = Pext_rebind lid;\n      pext_loc = loc;\n      pext_attributes = add_docs_attrs docs (add_info_attrs info attrs);\n    }\nend\n\nmodule Csig = struct\n  let mk self fields = {pcsig_self = self; pcsig_fields = fields}\nend\n\nmodule Cstr = struct\n  let mk self fields = {pcstr_self = self; pcstr_fields = fields}\nend\n"
  },
  {
    "path": "analysis/vendor/ml/ast_helper.mli",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*                         Alain Frisch, LexiFi                           *)\n(*                                                                        *)\n(*   Copyright 2012 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(** Helpers to produce Parsetree fragments *)\n\nopen Asttypes\nopen Docstrings\nopen Parsetree\n\ntype lid = Longident.t loc\ntype str = string loc\ntype loc = Location.t\ntype attrs = attribute list\n\n(** {1 Default locations} *)\n\nval default_loc : loc ref\n(** Default value for all optional location arguments. *)\n\nval with_default_loc : loc -> (unit -> 'a) -> 'a\n(** Set the [default_loc] within the scope of the execution of the provided\n    function. *)\n\n(** {1 Constants} *)\n\nmodule Const : sig\n  val char : char -> constant\n  val string : ?quotation_delimiter:string -> string -> constant\n  val integer : ?suffix:char -> string -> constant\n  val int : ?suffix:char -> int -> constant\n  val int32 : ?suffix:char -> int32 -> constant\n  val int64 : ?suffix:char -> int64 -> constant\n  val nativeint : ?suffix:char -> nativeint -> constant\n  val float : ?suffix:char -> string -> constant\nend\n\n(** {1 Core language} *)\n\n(** Type expressions *)\nmodule Typ : sig\n  val mk : ?loc:loc -> ?attrs:attrs -> core_type_desc -> core_type\n  val attr : core_type -> attribute -> core_type\n\n  val any : ?loc:loc -> ?attrs:attrs -> unit -> core_type\n  val var : ?loc:loc -> ?attrs:attrs -> string -> core_type\n  val arrow :\n    ?loc:loc -> ?attrs:attrs -> arg_label -> core_type -> core_type -> core_type\n  val tuple : ?loc:loc -> ?attrs:attrs -> core_type list -> core_type\n  val constr : ?loc:loc -> ?attrs:attrs -> lid -> core_type list -> core_type\n  val object_ :\n    ?loc:loc -> ?attrs:attrs -> object_field list -> closed_flag -> core_type\n  val class_ : ?loc:loc -> ?attrs:attrs -> lid -> core_type list -> core_type\n  val alias : ?loc:loc -> ?attrs:attrs -> core_type -> string -> core_type\n  val variant :\n    ?loc:loc ->\n    ?attrs:attrs ->\n    row_field list ->\n    closed_flag ->\n    label list option ->\n    core_type\n  val poly : ?loc:loc -> ?attrs:attrs -> str list -> core_type -> core_type\n  val package :\n    ?loc:loc -> ?attrs:attrs -> lid -> (lid * core_type) list -> core_type\n  val extension : ?loc:loc -> ?attrs:attrs -> extension -> core_type\n\n  val force_poly : core_type -> core_type\n\n  val varify_constructors : str list -> core_type -> core_type\n  (** [varify_constructors newtypes te] is type expression [te], of which any of\n      nullary type constructor [tc] is replaced by type variable of the same\n      name, if [tc]'s name appears in [newtypes]. Raise\n      [Syntaxerr.Variable_in_scope] if any type variable inside [te] appears in\n      [newtypes].\n      @since 4.05 *)\nend\n\n(** Patterns *)\nmodule Pat : sig\n  val mk : ?loc:loc -> ?attrs:attrs -> pattern_desc -> pattern\n  val attr : pattern -> attribute -> pattern\n\n  val any : ?loc:loc -> ?attrs:attrs -> unit -> pattern\n  val var : ?loc:loc -> ?attrs:attrs -> str -> pattern\n  val alias : ?loc:loc -> ?attrs:attrs -> pattern -> str -> pattern\n  val constant : ?loc:loc -> ?attrs:attrs -> constant -> pattern\n  val interval : ?loc:loc -> ?attrs:attrs -> constant -> constant -> pattern\n  val tuple : ?loc:loc -> ?attrs:attrs -> pattern list -> pattern\n  val construct : ?loc:loc -> ?attrs:attrs -> lid -> pattern option -> pattern\n  val variant : ?loc:loc -> ?attrs:attrs -> label -> pattern option -> pattern\n  val record :\n    ?loc:loc -> ?attrs:attrs -> (lid * pattern) list -> closed_flag -> pattern\n  val array : ?loc:loc -> ?attrs:attrs -> pattern list -> pattern\n  val or_ : ?loc:loc -> ?attrs:attrs -> pattern -> pattern -> pattern\n  val constraint_ : ?loc:loc -> ?attrs:attrs -> pattern -> core_type -> pattern\n  val type_ : ?loc:loc -> ?attrs:attrs -> lid -> pattern\n  val lazy_ : ?loc:loc -> ?attrs:attrs -> pattern -> pattern\n  val unpack : ?loc:loc -> ?attrs:attrs -> str -> pattern\n  val open_ : ?loc:loc -> ?attrs:attrs -> lid -> pattern -> pattern\n  val exception_ : ?loc:loc -> ?attrs:attrs -> pattern -> pattern\n  val extension : ?loc:loc -> ?attrs:attrs -> extension -> pattern\nend\n\n(** Expressions *)\nmodule Exp : sig\n  val mk : ?loc:loc -> ?attrs:attrs -> expression_desc -> expression\n  val attr : expression -> attribute -> expression\n\n  val ident : ?loc:loc -> ?attrs:attrs -> lid -> expression\n  val constant : ?loc:loc -> ?attrs:attrs -> constant -> expression\n  val let_ :\n    ?loc:loc ->\n    ?attrs:attrs ->\n    rec_flag ->\n    value_binding list ->\n    expression ->\n    expression\n  val fun_ :\n    ?loc:loc ->\n    ?attrs:attrs ->\n    arg_label ->\n    expression option ->\n    pattern ->\n    expression ->\n    expression\n  val function_ : ?loc:loc -> ?attrs:attrs -> case list -> expression\n  val apply :\n    ?loc:loc ->\n    ?attrs:attrs ->\n    expression ->\n    (arg_label * expression) list ->\n    expression\n  val match_ : ?loc:loc -> ?attrs:attrs -> expression -> case list -> expression\n  val try_ : ?loc:loc -> ?attrs:attrs -> expression -> case list -> expression\n  val tuple : ?loc:loc -> ?attrs:attrs -> expression list -> expression\n  val construct :\n    ?loc:loc -> ?attrs:attrs -> lid -> expression option -> expression\n  val variant :\n    ?loc:loc -> ?attrs:attrs -> label -> expression option -> expression\n  val record :\n    ?loc:loc ->\n    ?attrs:attrs ->\n    (lid * expression) list ->\n    expression option ->\n    expression\n  val field : ?loc:loc -> ?attrs:attrs -> expression -> lid -> expression\n  val setfield :\n    ?loc:loc -> ?attrs:attrs -> expression -> lid -> expression -> expression\n  val array : ?loc:loc -> ?attrs:attrs -> expression list -> expression\n  val ifthenelse :\n    ?loc:loc ->\n    ?attrs:attrs ->\n    expression ->\n    expression ->\n    expression option ->\n    expression\n  val sequence :\n    ?loc:loc -> ?attrs:attrs -> expression -> expression -> expression\n  val while_ :\n    ?loc:loc -> ?attrs:attrs -> expression -> expression -> expression\n  val for_ :\n    ?loc:loc ->\n    ?attrs:attrs ->\n    pattern ->\n    expression ->\n    expression ->\n    direction_flag ->\n    expression ->\n    expression\n  val coerce :\n    ?loc:loc ->\n    ?attrs:attrs ->\n    expression ->\n    core_type option ->\n    core_type ->\n    expression\n  val constraint_ :\n    ?loc:loc -> ?attrs:attrs -> expression -> core_type -> expression\n  val send : ?loc:loc -> ?attrs:attrs -> expression -> str -> expression\n  val new_ : ?loc:loc -> ?attrs:attrs -> lid -> expression\n  val setinstvar : ?loc:loc -> ?attrs:attrs -> str -> expression -> expression\n  val override :\n    ?loc:loc -> ?attrs:attrs -> (str * expression) list -> expression\n  val letmodule :\n    ?loc:loc -> ?attrs:attrs -> str -> module_expr -> expression -> expression\n  val letexception :\n    ?loc:loc ->\n    ?attrs:attrs ->\n    extension_constructor ->\n    expression ->\n    expression\n  val assert_ : ?loc:loc -> ?attrs:attrs -> expression -> expression\n  val lazy_ : ?loc:loc -> ?attrs:attrs -> expression -> expression\n  val poly :\n    ?loc:loc -> ?attrs:attrs -> expression -> core_type option -> expression\n  val object_ : ?loc:loc -> ?attrs:attrs -> class_structure -> expression\n  val newtype : ?loc:loc -> ?attrs:attrs -> str -> expression -> expression\n  val pack : ?loc:loc -> ?attrs:attrs -> module_expr -> expression\n  val open_ :\n    ?loc:loc -> ?attrs:attrs -> override_flag -> lid -> expression -> expression\n  val extension : ?loc:loc -> ?attrs:attrs -> extension -> expression\n  val unreachable : ?loc:loc -> ?attrs:attrs -> unit -> expression\n\n  val case : pattern -> ?guard:expression -> expression -> case\nend\n\n(** Value declarations *)\nmodule Val : sig\n  val mk :\n    ?loc:loc ->\n    ?attrs:attrs ->\n    ?docs:docs ->\n    ?prim:string list ->\n    str ->\n    core_type ->\n    value_description\nend\n\n(** Type declarations *)\nmodule Type : sig\n  val mk :\n    ?loc:loc ->\n    ?attrs:attrs ->\n    ?docs:docs ->\n    ?text:text ->\n    ?params:(core_type * variance) list ->\n    ?cstrs:(core_type * core_type * loc) list ->\n    ?kind:type_kind ->\n    ?priv:private_flag ->\n    ?manifest:core_type ->\n    str ->\n    type_declaration\n\n  val constructor :\n    ?loc:loc ->\n    ?attrs:attrs ->\n    ?info:info ->\n    ?args:constructor_arguments ->\n    ?res:core_type ->\n    str ->\n    constructor_declaration\n  val field :\n    ?loc:loc ->\n    ?attrs:attrs ->\n    ?info:info ->\n    ?mut:mutable_flag ->\n    str ->\n    core_type ->\n    label_declaration\nend\n\n(** Type extensions *)\nmodule Te : sig\n  val mk :\n    ?attrs:attrs ->\n    ?docs:docs ->\n    ?params:(core_type * variance) list ->\n    ?priv:private_flag ->\n    lid ->\n    extension_constructor list ->\n    type_extension\n\n  val constructor :\n    ?loc:loc ->\n    ?attrs:attrs ->\n    ?docs:docs ->\n    ?info:info ->\n    str ->\n    extension_constructor_kind ->\n    extension_constructor\n\n  val decl :\n    ?loc:loc ->\n    ?attrs:attrs ->\n    ?docs:docs ->\n    ?info:info ->\n    ?args:constructor_arguments ->\n    ?res:core_type ->\n    str ->\n    extension_constructor\n  val rebind :\n    ?loc:loc ->\n    ?attrs:attrs ->\n    ?docs:docs ->\n    ?info:info ->\n    str ->\n    lid ->\n    extension_constructor\nend\n\n(** {1 Module language} *)\n\n(** Module type expressions *)\nmodule Mty : sig\n  val mk : ?loc:loc -> ?attrs:attrs -> module_type_desc -> module_type\n  val attr : module_type -> attribute -> module_type\n\n  val ident : ?loc:loc -> ?attrs:attrs -> lid -> module_type\n  val alias : ?loc:loc -> ?attrs:attrs -> lid -> module_type\n  val signature : ?loc:loc -> ?attrs:attrs -> signature -> module_type\n  val functor_ :\n    ?loc:loc ->\n    ?attrs:attrs ->\n    str ->\n    module_type option ->\n    module_type ->\n    module_type\n  val with_ :\n    ?loc:loc ->\n    ?attrs:attrs ->\n    module_type ->\n    with_constraint list ->\n    module_type\n  val typeof_ : ?loc:loc -> ?attrs:attrs -> module_expr -> module_type\n  val extension : ?loc:loc -> ?attrs:attrs -> extension -> module_type\nend\n\n(** Module expressions *)\nmodule Mod : sig\n  val mk : ?loc:loc -> ?attrs:attrs -> module_expr_desc -> module_expr\n  val attr : module_expr -> attribute -> module_expr\n\n  val ident : ?loc:loc -> ?attrs:attrs -> lid -> module_expr\n  val structure : ?loc:loc -> ?attrs:attrs -> structure -> module_expr\n  val functor_ :\n    ?loc:loc ->\n    ?attrs:attrs ->\n    str ->\n    module_type option ->\n    module_expr ->\n    module_expr\n  val apply :\n    ?loc:loc -> ?attrs:attrs -> module_expr -> module_expr -> module_expr\n  val constraint_ :\n    ?loc:loc -> ?attrs:attrs -> module_expr -> module_type -> module_expr\n  val unpack : ?loc:loc -> ?attrs:attrs -> expression -> module_expr\n  val extension : ?loc:loc -> ?attrs:attrs -> extension -> module_expr\nend\n\n(** Signature items *)\nmodule Sig : sig\n  val mk : ?loc:loc -> signature_item_desc -> signature_item\n\n  val value : ?loc:loc -> value_description -> signature_item\n  val type_ : ?loc:loc -> rec_flag -> type_declaration list -> signature_item\n  val type_extension : ?loc:loc -> type_extension -> signature_item\n  val exception_ : ?loc:loc -> extension_constructor -> signature_item\n  val module_ : ?loc:loc -> module_declaration -> signature_item\n  val rec_module : ?loc:loc -> module_declaration list -> signature_item\n  val modtype : ?loc:loc -> module_type_declaration -> signature_item\n  val open_ : ?loc:loc -> open_description -> signature_item\n  val include_ : ?loc:loc -> include_description -> signature_item\n  val class_type : ?loc:loc -> class_type_declaration list -> signature_item\n  val extension : ?loc:loc -> ?attrs:attrs -> extension -> signature_item\n  val attribute : ?loc:loc -> attribute -> signature_item\n  val text : text -> signature_item list\nend\n\n(** Structure items *)\nmodule Str : sig\n  val mk : ?loc:loc -> structure_item_desc -> structure_item\n\n  val eval : ?loc:loc -> ?attrs:attributes -> expression -> structure_item\n  val value : ?loc:loc -> rec_flag -> value_binding list -> structure_item\n  val primitive : ?loc:loc -> value_description -> structure_item\n  val type_ : ?loc:loc -> rec_flag -> type_declaration list -> structure_item\n  val type_extension : ?loc:loc -> type_extension -> structure_item\n  val exception_ : ?loc:loc -> extension_constructor -> structure_item\n  val module_ : ?loc:loc -> module_binding -> structure_item\n  val rec_module : ?loc:loc -> module_binding list -> structure_item\n  val modtype : ?loc:loc -> module_type_declaration -> structure_item\n  val open_ : ?loc:loc -> open_description -> structure_item\n  val class_type : ?loc:loc -> class_type_declaration list -> structure_item\n  val include_ : ?loc:loc -> include_declaration -> structure_item\n  val extension : ?loc:loc -> ?attrs:attrs -> extension -> structure_item\n  val attribute : ?loc:loc -> attribute -> structure_item\n  val text : text -> structure_item list\nend\n\n(** Module declarations *)\nmodule Md : sig\n  val mk :\n    ?loc:loc ->\n    ?attrs:attrs ->\n    ?docs:docs ->\n    ?text:text ->\n    str ->\n    module_type ->\n    module_declaration\nend\n\n(** Module type declarations *)\nmodule Mtd : sig\n  val mk :\n    ?loc:loc ->\n    ?attrs:attrs ->\n    ?docs:docs ->\n    ?text:text ->\n    ?typ:module_type ->\n    str ->\n    module_type_declaration\nend\n\n(** Module bindings *)\nmodule Mb : sig\n  val mk :\n    ?loc:loc ->\n    ?attrs:attrs ->\n    ?docs:docs ->\n    ?text:text ->\n    str ->\n    module_expr ->\n    module_binding\nend\n\n(** Opens *)\nmodule Opn : sig\n  val mk :\n    ?loc:loc ->\n    ?attrs:attrs ->\n    ?docs:docs ->\n    ?override:override_flag ->\n    lid ->\n    open_description\nend\n\n(** Includes *)\nmodule Incl : sig\n  val mk : ?loc:loc -> ?attrs:attrs -> ?docs:docs -> 'a -> 'a include_infos\nend\n\n(** Value bindings *)\nmodule Vb : sig\n  val mk :\n    ?loc:loc ->\n    ?attrs:attrs ->\n    ?docs:docs ->\n    ?text:text ->\n    pattern ->\n    expression ->\n    value_binding\nend\n\n(** {1 Class language} *)\n\n(** Class type expressions *)\nmodule Cty : sig\n  val mk : ?loc:loc -> ?attrs:attrs -> class_type_desc -> class_type\n  val attr : class_type -> attribute -> class_type\n\n  val constr : ?loc:loc -> ?attrs:attrs -> lid -> core_type list -> class_type\n  val signature : ?loc:loc -> ?attrs:attrs -> class_signature -> class_type\n  val arrow :\n    ?loc:loc ->\n    ?attrs:attrs ->\n    arg_label ->\n    core_type ->\n    class_type ->\n    class_type\n  val extension : ?loc:loc -> ?attrs:attrs -> extension -> class_type\n  val open_ :\n    ?loc:loc -> ?attrs:attrs -> override_flag -> lid -> class_type -> class_type\nend\n\n(** Class type fields *)\nmodule Ctf : sig\n  val mk :\n    ?loc:loc ->\n    ?attrs:attrs ->\n    ?docs:docs ->\n    class_type_field_desc ->\n    class_type_field\n  val attr : class_type_field -> attribute -> class_type_field\n\n  val inherit_ : ?loc:loc -> ?attrs:attrs -> class_type -> class_type_field\n  val val_ :\n    ?loc:loc ->\n    ?attrs:attrs ->\n    str ->\n    mutable_flag ->\n    virtual_flag ->\n    core_type ->\n    class_type_field\n  val method_ :\n    ?loc:loc ->\n    ?attrs:attrs ->\n    str ->\n    private_flag ->\n    virtual_flag ->\n    core_type ->\n    class_type_field\n  val constraint_ :\n    ?loc:loc -> ?attrs:attrs -> core_type -> core_type -> class_type_field\n  val extension : ?loc:loc -> ?attrs:attrs -> extension -> class_type_field\n  val attribute : ?loc:loc -> attribute -> class_type_field\n  val text : text -> class_type_field list\nend\n\n(** Class expressions *)\nmodule Cl : sig\n  val mk : ?loc:loc -> ?attrs:attrs -> class_expr_desc -> class_expr\n  val attr : class_expr -> attribute -> class_expr\n\n  val constr : ?loc:loc -> ?attrs:attrs -> lid -> core_type list -> class_expr\n  val structure : ?loc:loc -> ?attrs:attrs -> class_structure -> class_expr\n  val fun_ :\n    ?loc:loc ->\n    ?attrs:attrs ->\n    arg_label ->\n    expression option ->\n    pattern ->\n    class_expr ->\n    class_expr\n  val apply :\n    ?loc:loc ->\n    ?attrs:attrs ->\n    class_expr ->\n    (arg_label * expression) list ->\n    class_expr\n  val let_ :\n    ?loc:loc ->\n    ?attrs:attrs ->\n    rec_flag ->\n    value_binding list ->\n    class_expr ->\n    class_expr\n  val constraint_ :\n    ?loc:loc -> ?attrs:attrs -> class_expr -> class_type -> class_expr\n  val extension : ?loc:loc -> ?attrs:attrs -> extension -> class_expr\n  val open_ :\n    ?loc:loc -> ?attrs:attrs -> override_flag -> lid -> class_expr -> class_expr\nend\n\n(** Class fields *)\nmodule Cf : sig\n  val mk :\n    ?loc:loc -> ?attrs:attrs -> ?docs:docs -> class_field_desc -> class_field\n  val attr : class_field -> attribute -> class_field\n\n  val val_ :\n    ?loc:loc ->\n    ?attrs:attrs ->\n    str ->\n    mutable_flag ->\n    class_field_kind ->\n    class_field\n  val method_ :\n    ?loc:loc ->\n    ?attrs:attrs ->\n    str ->\n    private_flag ->\n    class_field_kind ->\n    class_field\n  val constraint_ :\n    ?loc:loc -> ?attrs:attrs -> core_type -> core_type -> class_field\n  val initializer_ : ?loc:loc -> ?attrs:attrs -> expression -> class_field\n  val extension : ?loc:loc -> ?attrs:attrs -> extension -> class_field\n  val attribute : ?loc:loc -> attribute -> class_field\n  val text : text -> class_field list\n\n  val virtual_ : core_type -> class_field_kind\n  val concrete : override_flag -> expression -> class_field_kind\nend\n\n(** Classes *)\nmodule Ci : sig\n  val mk :\n    ?loc:loc ->\n    ?attrs:attrs ->\n    ?docs:docs ->\n    ?text:text ->\n    ?virt:virtual_flag ->\n    ?params:(core_type * variance) list ->\n    str ->\n    'a ->\n    'a class_infos\nend\n\n(** Class signatures *)\nmodule Csig : sig\n  val mk : core_type -> class_type_field list -> class_signature\nend\n\n(** Class structures *)\nmodule Cstr : sig\n  val mk : pattern -> class_field list -> class_structure\nend\n"
  },
  {
    "path": "analysis/vendor/ml/ast_invariants.ml",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*                   Jeremie Dimino, Jane Street Europe                   *)\n(*                                                                        *)\n(*   Copyright 2015 Jane Street Group LLC                                 *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\nopen Asttypes\nopen Parsetree\nopen Ast_iterator\n\nlet err = Syntaxerr.ill_formed_ast\n\nlet empty_record loc = err loc \"Records cannot be empty.\"\nlet empty_variant loc = err loc \"Variant types cannot be empty.\"\nlet invalid_tuple loc = err loc \"Tuples must have at least 2 components.\"\nlet no_args loc = err loc \"Function application with no argument.\"\nlet empty_let loc = err loc \"Let with no bindings.\"\nlet empty_type loc = err loc \"Type declarations cannot be empty.\"\nlet complex_id loc = err loc \"Functor application not allowed here.\"\n\nlet simple_longident id =\n  let rec is_simple = function\n    | Longident.Lident _ -> true\n    | Longident.Ldot (id, _) -> is_simple id\n    | Longident.Lapply _ -> false\n  in\n  if not (is_simple id.txt) then complex_id id.loc\n\nlet iterator =\n  let super = Ast_iterator.default_iterator in\n  let type_declaration self td =\n    super.type_declaration self td;\n    let loc = td.ptype_loc in\n    match td.ptype_kind with\n    | Ptype_record [] -> empty_record loc\n    | Ptype_variant [] -> empty_variant loc\n    | _ -> ()\n  in\n  let typ self ty =\n    super.typ self ty;\n    let loc = ty.ptyp_loc in\n    match ty.ptyp_desc with\n    | Ptyp_tuple ([] | [_]) -> invalid_tuple loc\n    | Ptyp_class (id, _) -> simple_longident id\n    | Ptyp_package (_, cstrs) ->\n      List.iter (fun (id, _) -> simple_longident id) cstrs\n    | _ -> ()\n  in\n  let pat self pat =\n    (match pat.ppat_desc with\n    | Ppat_construct (_, Some ({ppat_desc = Ppat_tuple _} as p))\n      when Builtin_attributes.explicit_arity pat.ppat_attributes ->\n      super.pat self p (* allow unary tuple, see GPR#523. *)\n    | _ -> super.pat self pat);\n    let loc = pat.ppat_loc in\n    match pat.ppat_desc with\n    | Ppat_tuple ([] | [_]) -> invalid_tuple loc\n    | Ppat_record ([], _) -> empty_record loc\n    | Ppat_construct (id, _) -> simple_longident id\n    | Ppat_record (fields, _) ->\n      List.iter (fun (id, _) -> simple_longident id) fields\n    | _ -> ()\n  in\n  let expr self exp =\n    (match exp.pexp_desc with\n    | Pexp_construct (_, Some ({pexp_desc = Pexp_tuple _} as e))\n      when Builtin_attributes.explicit_arity exp.pexp_attributes ->\n      super.expr self e (* allow unary tuple, see GPR#523. *)\n    | _ -> super.expr self exp);\n    let loc = exp.pexp_loc in\n    match exp.pexp_desc with\n    | Pexp_tuple ([] | [_]) -> invalid_tuple loc\n    | Pexp_record ([], _) -> empty_record loc\n    | Pexp_apply (_, []) -> no_args loc\n    | Pexp_let (_, [], _) -> empty_let loc\n    | Pexp_ident id\n    | Pexp_construct (id, _)\n    | Pexp_field (_, id)\n    | Pexp_setfield (_, id, _)\n    | Pexp_new id\n    | Pexp_open (_, id, _) ->\n      simple_longident id\n    | Pexp_record (fields, _) ->\n      List.iter (fun (id, _) -> simple_longident id) fields\n    | _ -> ()\n  in\n  let extension_constructor self ec =\n    super.extension_constructor self ec;\n    match ec.pext_kind with\n    | Pext_rebind id -> simple_longident id\n    | _ -> ()\n  in\n  let class_expr self ce =\n    super.class_expr self ce;\n    let loc = ce.pcl_loc in\n    match ce.pcl_desc with\n    | Pcl_apply (_, []) -> no_args loc\n    | Pcl_constr (id, _) -> simple_longident id\n    | _ -> ()\n  in\n  let module_type self mty =\n    super.module_type self mty;\n    match mty.pmty_desc with\n    | Pmty_alias id -> simple_longident id\n    | _ -> ()\n  in\n  let open_description self opn =\n    super.open_description self opn;\n    simple_longident opn.popen_lid\n  in\n  let with_constraint self wc =\n    super.with_constraint self wc;\n    match wc with\n    | Pwith_type (id, _) | Pwith_module (id, _) -> simple_longident id\n    | _ -> ()\n  in\n  let module_expr self me =\n    super.module_expr self me;\n    match me.pmod_desc with\n    | Pmod_ident id -> simple_longident id\n    | _ -> ()\n  in\n  let structure_item self st =\n    super.structure_item self st;\n    let loc = st.pstr_loc in\n    match st.pstr_desc with\n    | Pstr_type (_, []) -> empty_type loc\n    | Pstr_value (_, []) -> empty_let loc\n    | _ -> ()\n  in\n  let signature_item self sg =\n    super.signature_item self sg;\n    let loc = sg.psig_loc in\n    match sg.psig_desc with\n    | Psig_type (_, []) -> empty_type loc\n    | _ -> ()\n  in\n  {\n    super with\n    type_declaration;\n    typ;\n    pat;\n    expr;\n    extension_constructor;\n    class_expr;\n    module_expr;\n    module_type;\n    open_description;\n    with_constraint;\n    structure_item;\n    signature_item;\n  }\n\nlet structure st = iterator.structure iterator st\nlet signature sg = iterator.signature iterator sg\n"
  },
  {
    "path": "analysis/vendor/ml/ast_invariants.mli",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*                   Jeremie Dimino, Jane Street Europe                   *)\n(*                                                                        *)\n(*   Copyright 2015 Jane Street Group LLC                                 *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(** Check AST invariants *)\n\nval structure : Parsetree.structure -> unit\nval signature : Parsetree.signature -> unit\n"
  },
  {
    "path": "analysis/vendor/ml/ast_iterator.ml",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*                      Nicolas Ojeda Bar, LexiFi                         *)\n(*                                                                        *)\n(*   Copyright 2012 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(* A generic Parsetree mapping class *)\n\n(*\n[@@@warning \"+9\"]\n  (* Ensure that record patterns don't miss any field. *)\n*)\n\nopen Parsetree\nopen Location\n\ntype iterator = {\n  attribute: iterator -> attribute -> unit;\n  attributes: iterator -> attribute list -> unit;\n  case: iterator -> case -> unit;\n  cases: iterator -> case list -> unit;\n  class_expr: iterator -> class_expr -> unit;\n  class_field: iterator -> class_field -> unit;\n  class_signature: iterator -> class_signature -> unit;\n  class_structure: iterator -> class_structure -> unit;\n  class_type: iterator -> class_type -> unit;\n  class_type_declaration: iterator -> class_type_declaration -> unit;\n  class_type_field: iterator -> class_type_field -> unit;\n  constructor_declaration: iterator -> constructor_declaration -> unit;\n  expr: iterator -> expression -> unit;\n  extension: iterator -> extension -> unit;\n  extension_constructor: iterator -> extension_constructor -> unit;\n  include_declaration: iterator -> include_declaration -> unit;\n  include_description: iterator -> include_description -> unit;\n  label_declaration: iterator -> label_declaration -> unit;\n  location: iterator -> Location.t -> unit;\n  module_binding: iterator -> module_binding -> unit;\n  module_declaration: iterator -> module_declaration -> unit;\n  module_expr: iterator -> module_expr -> unit;\n  module_type: iterator -> module_type -> unit;\n  module_type_declaration: iterator -> module_type_declaration -> unit;\n  open_description: iterator -> open_description -> unit;\n  pat: iterator -> pattern -> unit;\n  payload: iterator -> payload -> unit;\n  signature: iterator -> signature -> unit;\n  signature_item: iterator -> signature_item -> unit;\n  structure: iterator -> structure -> unit;\n  structure_item: iterator -> structure_item -> unit;\n  typ: iterator -> core_type -> unit;\n  type_declaration: iterator -> type_declaration -> unit;\n  type_extension: iterator -> type_extension -> unit;\n  type_kind: iterator -> type_kind -> unit;\n  value_binding: iterator -> value_binding -> unit;\n  value_description: iterator -> value_description -> unit;\n  with_constraint: iterator -> with_constraint -> unit;\n}\n(** A [iterator] record implements one \"method\" per syntactic category, using an\n    open recursion style: each method takes as its first argument the iterator\n    to be applied to children in the syntax tree. *)\n\nlet iter_fst f (x, _) = f x\nlet iter_snd f (_, y) = f y\nlet iter_tuple f1 f2 (x, y) =\n  f1 x;\n  f2 y\nlet iter_tuple3 f1 f2 f3 (x, y, z) =\n  f1 x;\n  f2 y;\n  f3 z\nlet iter_opt f = function\n  | None -> ()\n  | Some x -> f x\n\nlet iter_loc sub {loc; txt = _} = sub.location sub loc\n\nmodule T = struct\n  (* Type expressions for the core language *)\n\n  let row_field sub = function\n    | Rtag (_, attrs, _, tl) ->\n      sub.attributes sub attrs;\n      List.iter (sub.typ sub) tl\n    | Rinherit t -> sub.typ sub t\n\n  let object_field sub = function\n    | Otag (_, attrs, t) ->\n      sub.attributes sub attrs;\n      sub.typ sub t\n    | Oinherit t -> sub.typ sub t\n\n  let iter sub {ptyp_desc = desc; ptyp_loc = loc; ptyp_attributes = attrs} =\n    sub.location sub loc;\n    sub.attributes sub attrs;\n    match desc with\n    | Ptyp_any | Ptyp_var _ -> ()\n    | Ptyp_arrow (_lab, t1, t2) ->\n      sub.typ sub t1;\n      sub.typ sub t2\n    | Ptyp_tuple tyl -> List.iter (sub.typ sub) tyl\n    | Ptyp_constr (lid, tl) ->\n      iter_loc sub lid;\n      List.iter (sub.typ sub) tl\n    | Ptyp_object (ol, _o) -> List.iter (object_field sub) ol\n    | Ptyp_class (lid, tl) ->\n      iter_loc sub lid;\n      List.iter (sub.typ sub) tl\n    | Ptyp_alias (t, _) -> sub.typ sub t\n    | Ptyp_variant (rl, _b, _ll) -> List.iter (row_field sub) rl\n    | Ptyp_poly (_, t) -> sub.typ sub t\n    | Ptyp_package (lid, l) ->\n      iter_loc sub lid;\n      List.iter (iter_tuple (iter_loc sub) (sub.typ sub)) l\n    | Ptyp_extension x -> sub.extension sub x\n\n  let iter_type_declaration sub\n      {\n        ptype_name;\n        ptype_params;\n        ptype_cstrs;\n        ptype_kind;\n        ptype_private = _;\n        ptype_manifest;\n        ptype_attributes;\n        ptype_loc;\n      } =\n    iter_loc sub ptype_name;\n    List.iter (iter_fst (sub.typ sub)) ptype_params;\n    List.iter\n      (iter_tuple3 (sub.typ sub) (sub.typ sub) (sub.location sub))\n      ptype_cstrs;\n    sub.type_kind sub ptype_kind;\n    iter_opt (sub.typ sub) ptype_manifest;\n    sub.location sub ptype_loc;\n    sub.attributes sub ptype_attributes\n\n  let iter_type_kind sub = function\n    | Ptype_abstract -> ()\n    | Ptype_variant l -> List.iter (sub.constructor_declaration sub) l\n    | Ptype_record l -> List.iter (sub.label_declaration sub) l\n    | Ptype_open -> ()\n\n  let iter_constructor_arguments sub = function\n    | Pcstr_tuple l -> List.iter (sub.typ sub) l\n    | Pcstr_record l -> List.iter (sub.label_declaration sub) l\n\n  let iter_type_extension sub\n      {\n        ptyext_path;\n        ptyext_params;\n        ptyext_constructors;\n        ptyext_private = _;\n        ptyext_attributes;\n      } =\n    iter_loc sub ptyext_path;\n    List.iter (sub.extension_constructor sub) ptyext_constructors;\n    List.iter (iter_fst (sub.typ sub)) ptyext_params;\n    sub.attributes sub ptyext_attributes\n\n  let iter_extension_constructor_kind sub = function\n    | Pext_decl (ctl, cto) ->\n      iter_constructor_arguments sub ctl;\n      iter_opt (sub.typ sub) cto\n    | Pext_rebind li -> iter_loc sub li\n\n  let iter_extension_constructor sub\n      {pext_name; pext_kind; pext_loc; pext_attributes} =\n    iter_loc sub pext_name;\n    iter_extension_constructor_kind sub pext_kind;\n    sub.location sub pext_loc;\n    sub.attributes sub pext_attributes\nend\n\nmodule CT = struct\n  (* Type expressions for the class language *)\n\n  let iter sub {pcty_loc = loc; pcty_desc = desc; pcty_attributes = attrs} =\n    sub.location sub loc;\n    sub.attributes sub attrs;\n    match desc with\n    | Pcty_constr (lid, tys) ->\n      iter_loc sub lid;\n      List.iter (sub.typ sub) tys\n    | Pcty_signature x -> sub.class_signature sub x\n    | Pcty_arrow (_lab, t, ct) ->\n      sub.typ sub t;\n      sub.class_type sub ct\n    | Pcty_extension x -> sub.extension sub x\n    | Pcty_open (_ovf, lid, e) ->\n      iter_loc sub lid;\n      sub.class_type sub e\n\n  let iter_field sub {pctf_desc = desc; pctf_loc = loc; pctf_attributes = attrs}\n      =\n    sub.location sub loc;\n    sub.attributes sub attrs;\n    match desc with\n    | Pctf_inherit ct -> sub.class_type sub ct\n    | Pctf_val (_s, _m, _v, t) -> sub.typ sub t\n    | Pctf_method (_s, _p, _v, t) -> sub.typ sub t\n    | Pctf_constraint (t1, t2) ->\n      sub.typ sub t1;\n      sub.typ sub t2\n    | Pctf_attribute x -> sub.attribute sub x\n    | Pctf_extension x -> sub.extension sub x\n\n  let iter_signature sub {pcsig_self; pcsig_fields} =\n    sub.typ sub pcsig_self;\n    List.iter (sub.class_type_field sub) pcsig_fields\nend\n\nmodule MT = struct\n  (* Type expressions for the module language *)\n\n  let iter sub {pmty_desc = desc; pmty_loc = loc; pmty_attributes = attrs} =\n    sub.location sub loc;\n    sub.attributes sub attrs;\n    match desc with\n    | Pmty_ident s -> iter_loc sub s\n    | Pmty_alias s -> iter_loc sub s\n    | Pmty_signature sg -> sub.signature sub sg\n    | Pmty_functor (s, mt1, mt2) ->\n      iter_loc sub s;\n      iter_opt (sub.module_type sub) mt1;\n      sub.module_type sub mt2\n    | Pmty_with (mt, l) ->\n      sub.module_type sub mt;\n      List.iter (sub.with_constraint sub) l\n    | Pmty_typeof me -> sub.module_expr sub me\n    | Pmty_extension x -> sub.extension sub x\n\n  let iter_with_constraint sub = function\n    | Pwith_type (lid, d) ->\n      iter_loc sub lid;\n      sub.type_declaration sub d\n    | Pwith_module (lid, lid2) ->\n      iter_loc sub lid;\n      iter_loc sub lid2\n    | Pwith_typesubst (lid, d) ->\n      iter_loc sub lid;\n      sub.type_declaration sub d\n    | Pwith_modsubst (s, lid) ->\n      iter_loc sub s;\n      iter_loc sub lid\n\n  let iter_signature_item sub {psig_desc = desc; psig_loc = loc} =\n    sub.location sub loc;\n    match desc with\n    | Psig_value vd -> sub.value_description sub vd\n    | Psig_type (_rf, l) -> List.iter (sub.type_declaration sub) l\n    | Psig_typext te -> sub.type_extension sub te\n    | Psig_exception ed -> sub.extension_constructor sub ed\n    | Psig_module x -> sub.module_declaration sub x\n    | Psig_recmodule l -> List.iter (sub.module_declaration sub) l\n    | Psig_modtype x -> sub.module_type_declaration sub x\n    | Psig_open x -> sub.open_description sub x\n    | Psig_include x -> sub.include_description sub x\n    | Psig_class () -> ()\n    | Psig_class_type l -> List.iter (sub.class_type_declaration sub) l\n    | Psig_extension (x, attrs) ->\n      sub.extension sub x;\n      sub.attributes sub attrs\n    | Psig_attribute x -> sub.attribute sub x\nend\n\nmodule M = struct\n  (* Value expressions for the module language *)\n\n  let iter sub {pmod_loc = loc; pmod_desc = desc; pmod_attributes = attrs} =\n    sub.location sub loc;\n    sub.attributes sub attrs;\n    match desc with\n    | Pmod_ident x -> iter_loc sub x\n    | Pmod_structure str -> sub.structure sub str\n    | Pmod_functor (arg, arg_ty, body) ->\n      iter_loc sub arg;\n      iter_opt (sub.module_type sub) arg_ty;\n      sub.module_expr sub body\n    | Pmod_apply (m1, m2) ->\n      sub.module_expr sub m1;\n      sub.module_expr sub m2\n    | Pmod_constraint (m, mty) ->\n      sub.module_expr sub m;\n      sub.module_type sub mty\n    | Pmod_unpack e -> sub.expr sub e\n    | Pmod_extension x -> sub.extension sub x\n\n  let iter_structure_item sub {pstr_loc = loc; pstr_desc = desc} =\n    sub.location sub loc;\n    match desc with\n    | Pstr_eval (x, attrs) ->\n      sub.expr sub x;\n      sub.attributes sub attrs\n    | Pstr_value (_r, vbs) -> List.iter (sub.value_binding sub) vbs\n    | Pstr_primitive vd -> sub.value_description sub vd\n    | Pstr_type (_rf, l) -> List.iter (sub.type_declaration sub) l\n    | Pstr_typext te -> sub.type_extension sub te\n    | Pstr_exception ed -> sub.extension_constructor sub ed\n    | Pstr_module x -> sub.module_binding sub x\n    | Pstr_recmodule l -> List.iter (sub.module_binding sub) l\n    | Pstr_modtype x -> sub.module_type_declaration sub x\n    | Pstr_open x -> sub.open_description sub x\n    | Pstr_class () -> ()\n    | Pstr_class_type l -> List.iter (sub.class_type_declaration sub) l\n    | Pstr_include x -> sub.include_declaration sub x\n    | Pstr_extension (x, attrs) ->\n      sub.extension sub x;\n      sub.attributes sub attrs\n    | Pstr_attribute x -> sub.attribute sub x\nend\n\nmodule E = struct\n  (* Value expressions for the core language *)\n\n  let iter sub {pexp_loc = loc; pexp_desc = desc; pexp_attributes = attrs} =\n    sub.location sub loc;\n    sub.attributes sub attrs;\n    match desc with\n    | Pexp_ident x -> iter_loc sub x\n    | Pexp_constant _ -> ()\n    | Pexp_let (_r, vbs, e) ->\n      List.iter (sub.value_binding sub) vbs;\n      sub.expr sub e\n    | Pexp_fun (_lab, def, p, e) ->\n      iter_opt (sub.expr sub) def;\n      sub.pat sub p;\n      sub.expr sub e\n    | Pexp_function pel -> sub.cases sub pel\n    | Pexp_apply (e, l) ->\n      sub.expr sub e;\n      List.iter (iter_snd (sub.expr sub)) l\n    | Pexp_match (e, pel) ->\n      sub.expr sub e;\n      sub.cases sub pel\n    | Pexp_try (e, pel) ->\n      sub.expr sub e;\n      sub.cases sub pel\n    | Pexp_tuple el -> List.iter (sub.expr sub) el\n    | Pexp_construct (lid, arg) ->\n      iter_loc sub lid;\n      iter_opt (sub.expr sub) arg\n    | Pexp_variant (_lab, eo) -> iter_opt (sub.expr sub) eo\n    | Pexp_record (l, eo) ->\n      List.iter (iter_tuple (iter_loc sub) (sub.expr sub)) l;\n      iter_opt (sub.expr sub) eo\n    | Pexp_field (e, lid) ->\n      sub.expr sub e;\n      iter_loc sub lid\n    | Pexp_setfield (e1, lid, e2) ->\n      sub.expr sub e1;\n      iter_loc sub lid;\n      sub.expr sub e2\n    | Pexp_array el -> List.iter (sub.expr sub) el\n    | Pexp_ifthenelse (e1, e2, e3) ->\n      sub.expr sub e1;\n      sub.expr sub e2;\n      iter_opt (sub.expr sub) e3\n    | Pexp_sequence (e1, e2) ->\n      sub.expr sub e1;\n      sub.expr sub e2\n    | Pexp_while (e1, e2) ->\n      sub.expr sub e1;\n      sub.expr sub e2\n    | Pexp_for (p, e1, e2, _d, e3) ->\n      sub.pat sub p;\n      sub.expr sub e1;\n      sub.expr sub e2;\n      sub.expr sub e3\n    | Pexp_coerce (e, t1, t2) ->\n      sub.expr sub e;\n      iter_opt (sub.typ sub) t1;\n      sub.typ sub t2\n    | Pexp_constraint (e, t) ->\n      sub.expr sub e;\n      sub.typ sub t\n    | Pexp_send (e, _s) -> sub.expr sub e\n    | Pexp_new lid -> iter_loc sub lid\n    | Pexp_setinstvar (s, e) ->\n      iter_loc sub s;\n      sub.expr sub e\n    | Pexp_override sel ->\n      List.iter (iter_tuple (iter_loc sub) (sub.expr sub)) sel\n    | Pexp_letmodule (s, me, e) ->\n      iter_loc sub s;\n      sub.module_expr sub me;\n      sub.expr sub e\n    | Pexp_letexception (cd, e) ->\n      sub.extension_constructor sub cd;\n      sub.expr sub e\n    | Pexp_assert e -> sub.expr sub e\n    | Pexp_lazy e -> sub.expr sub e\n    | Pexp_poly (e, t) ->\n      sub.expr sub e;\n      iter_opt (sub.typ sub) t\n    | Pexp_object cls -> sub.class_structure sub cls\n    | Pexp_newtype (_s, e) -> sub.expr sub e\n    | Pexp_pack me -> sub.module_expr sub me\n    | Pexp_open (_ovf, lid, e) ->\n      iter_loc sub lid;\n      sub.expr sub e\n    | Pexp_extension x -> sub.extension sub x\n    | Pexp_unreachable -> ()\nend\n\nmodule P = struct\n  (* Patterns *)\n\n  let iter sub {ppat_desc = desc; ppat_loc = loc; ppat_attributes = attrs} =\n    sub.location sub loc;\n    sub.attributes sub attrs;\n    match desc with\n    | Ppat_any -> ()\n    | Ppat_var s -> iter_loc sub s\n    | Ppat_alias (p, s) ->\n      sub.pat sub p;\n      iter_loc sub s\n    | Ppat_constant _ -> ()\n    | Ppat_interval _ -> ()\n    | Ppat_tuple pl -> List.iter (sub.pat sub) pl\n    | Ppat_construct (l, p) ->\n      iter_loc sub l;\n      iter_opt (sub.pat sub) p\n    | Ppat_variant (_l, p) -> iter_opt (sub.pat sub) p\n    | Ppat_record (lpl, _cf) ->\n      List.iter (iter_tuple (iter_loc sub) (sub.pat sub)) lpl\n    | Ppat_array pl -> List.iter (sub.pat sub) pl\n    | Ppat_or (p1, p2) ->\n      sub.pat sub p1;\n      sub.pat sub p2\n    | Ppat_constraint (p, t) ->\n      sub.pat sub p;\n      sub.typ sub t\n    | Ppat_type s -> iter_loc sub s\n    | Ppat_lazy p -> sub.pat sub p\n    | Ppat_unpack s -> iter_loc sub s\n    | Ppat_exception p -> sub.pat sub p\n    | Ppat_extension x -> sub.extension sub x\n    | Ppat_open (lid, p) ->\n      iter_loc sub lid;\n      sub.pat sub p\nend\n\nmodule CE = struct\n  (* Value expressions for the class language *)\n\n  let iter sub {pcl_loc = loc; pcl_desc = desc; pcl_attributes = attrs} =\n    sub.location sub loc;\n    sub.attributes sub attrs;\n    match desc with\n    | Pcl_constr (lid, tys) ->\n      iter_loc sub lid;\n      List.iter (sub.typ sub) tys\n    | Pcl_structure s -> sub.class_structure sub s\n    | Pcl_fun (_lab, e, p, ce) ->\n      iter_opt (sub.expr sub) e;\n      sub.pat sub p;\n      sub.class_expr sub ce\n    | Pcl_apply (ce, l) ->\n      sub.class_expr sub ce;\n      List.iter (iter_snd (sub.expr sub)) l\n    | Pcl_let (_r, vbs, ce) ->\n      List.iter (sub.value_binding sub) vbs;\n      sub.class_expr sub ce\n    | Pcl_constraint (ce, ct) ->\n      sub.class_expr sub ce;\n      sub.class_type sub ct\n    | Pcl_extension x -> sub.extension sub x\n    | Pcl_open (_ovf, lid, e) ->\n      iter_loc sub lid;\n      sub.class_expr sub e\n\n  let iter_kind sub = function\n    | Cfk_concrete (_o, e) -> sub.expr sub e\n    | Cfk_virtual t -> sub.typ sub t\n\n  let iter_field sub {pcf_desc = desc; pcf_loc = loc; pcf_attributes = attrs} =\n    sub.location sub loc;\n    sub.attributes sub attrs;\n    match desc with\n    | Pcf_inherit () -> ()\n    | Pcf_val (s, _m, k) ->\n      iter_loc sub s;\n      iter_kind sub k\n    | Pcf_method (s, _p, k) ->\n      iter_loc sub s;\n      iter_kind sub k\n    | Pcf_constraint (t1, t2) ->\n      sub.typ sub t1;\n      sub.typ sub t2\n    | Pcf_initializer e -> sub.expr sub e\n    | Pcf_attribute x -> sub.attribute sub x\n    | Pcf_extension x -> sub.extension sub x\n\n  let iter_structure sub {pcstr_self; pcstr_fields} =\n    sub.pat sub pcstr_self;\n    List.iter (sub.class_field sub) pcstr_fields\n\n  let class_infos sub f\n      {\n        pci_virt = _;\n        pci_params = pl;\n        pci_name;\n        pci_expr;\n        pci_loc;\n        pci_attributes;\n      } =\n    List.iter (iter_fst (sub.typ sub)) pl;\n    iter_loc sub pci_name;\n    f pci_expr;\n    sub.location sub pci_loc;\n    sub.attributes sub pci_attributes\nend\n\n(* Now, a generic AST mapper, to be extended to cover all kinds and\n   cases of the OCaml grammar.  The default behavior of the mapper is\n   the identity. *)\n\nlet default_iterator =\n  {\n    structure = (fun this l -> List.iter (this.structure_item this) l);\n    structure_item = M.iter_structure_item;\n    module_expr = M.iter;\n    signature = (fun this l -> List.iter (this.signature_item this) l);\n    signature_item = MT.iter_signature_item;\n    module_type = MT.iter;\n    with_constraint = MT.iter_with_constraint;\n    class_expr = CE.iter;\n    class_field = CE.iter_field;\n    class_structure = CE.iter_structure;\n    class_type = CT.iter;\n    class_type_field = CT.iter_field;\n    class_signature = CT.iter_signature;\n    class_type_declaration =\n      (fun this -> CE.class_infos this (this.class_type this));\n    type_declaration = T.iter_type_declaration;\n    type_kind = T.iter_type_kind;\n    typ = T.iter;\n    type_extension = T.iter_type_extension;\n    extension_constructor = T.iter_extension_constructor;\n    value_description =\n      (fun this\n        {pval_name; pval_type; pval_prim = _; pval_loc; pval_attributes}\n      ->\n        iter_loc this pval_name;\n        this.typ this pval_type;\n        this.attributes this pval_attributes;\n        this.location this pval_loc);\n    pat = P.iter;\n    expr = E.iter;\n    module_declaration =\n      (fun this {pmd_name; pmd_type; pmd_attributes; pmd_loc} ->\n        iter_loc this pmd_name;\n        this.module_type this pmd_type;\n        this.attributes this pmd_attributes;\n        this.location this pmd_loc);\n    module_type_declaration =\n      (fun this {pmtd_name; pmtd_type; pmtd_attributes; pmtd_loc} ->\n        iter_loc this pmtd_name;\n        iter_opt (this.module_type this) pmtd_type;\n        this.attributes this pmtd_attributes;\n        this.location this pmtd_loc);\n    module_binding =\n      (fun this {pmb_name; pmb_expr; pmb_attributes; pmb_loc} ->\n        iter_loc this pmb_name;\n        this.module_expr this pmb_expr;\n        this.attributes this pmb_attributes;\n        this.location this pmb_loc);\n    open_description =\n      (fun this {popen_lid; popen_override = _; popen_attributes; popen_loc} ->\n        iter_loc this popen_lid;\n        this.location this popen_loc;\n        this.attributes this popen_attributes);\n    include_description =\n      (fun this {pincl_mod; pincl_attributes; pincl_loc} ->\n        this.module_type this pincl_mod;\n        this.location this pincl_loc;\n        this.attributes this pincl_attributes);\n    include_declaration =\n      (fun this {pincl_mod; pincl_attributes; pincl_loc} ->\n        this.module_expr this pincl_mod;\n        this.location this pincl_loc;\n        this.attributes this pincl_attributes);\n    value_binding =\n      (fun this {pvb_pat; pvb_expr; pvb_attributes; pvb_loc} ->\n        this.pat this pvb_pat;\n        this.expr this pvb_expr;\n        this.location this pvb_loc;\n        this.attributes this pvb_attributes);\n    constructor_declaration =\n      (fun this {pcd_name; pcd_args; pcd_res; pcd_loc; pcd_attributes} ->\n        iter_loc this pcd_name;\n        T.iter_constructor_arguments this pcd_args;\n        iter_opt (this.typ this) pcd_res;\n        this.location this pcd_loc;\n        this.attributes this pcd_attributes);\n    label_declaration =\n      (fun this\n        {pld_name; pld_type; pld_loc; pld_mutable = _; pld_attributes}\n      ->\n        iter_loc this pld_name;\n        this.typ this pld_type;\n        this.location this pld_loc;\n        this.attributes this pld_attributes);\n    cases = (fun this l -> List.iter (this.case this) l);\n    case =\n      (fun this {pc_lhs; pc_guard; pc_rhs} ->\n        this.pat this pc_lhs;\n        iter_opt (this.expr this) pc_guard;\n        this.expr this pc_rhs);\n    location = (fun _this _l -> ());\n    extension =\n      (fun this (s, e) ->\n        iter_loc this s;\n        this.payload this e);\n    attribute =\n      (fun this (s, e) ->\n        iter_loc this s;\n        this.payload this e);\n    attributes = (fun this l -> List.iter (this.attribute this) l);\n    payload =\n      (fun this -> function\n        | PStr x -> this.structure this x\n        | PSig x -> this.signature this x\n        | PTyp x -> this.typ this x\n        | PPat (x, g) ->\n          this.pat this x;\n          iter_opt (this.expr this) g);\n  }\n"
  },
  {
    "path": "analysis/vendor/ml/ast_iterator.mli",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*                      Nicolas Ojeda Bar, LexiFi                         *)\n(*                                                                        *)\n(*   Copyright 2012 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(** {!iterator} allows to implement AST inspection using open recursion. A\n    typical mapper would be based on {!default_iterator}, a trivial iterator,\n    and will fall back on it for handling the syntax it does not modify. *)\n\nopen Parsetree\n\n(** {1 A generic Parsetree iterator} *)\n\ntype iterator = {\n  attribute: iterator -> attribute -> unit;\n  attributes: iterator -> attribute list -> unit;\n  case: iterator -> case -> unit;\n  cases: iterator -> case list -> unit;\n  class_expr: iterator -> class_expr -> unit;\n  class_field: iterator -> class_field -> unit;\n  class_signature: iterator -> class_signature -> unit;\n  class_structure: iterator -> class_structure -> unit;\n  class_type: iterator -> class_type -> unit;\n  class_type_declaration: iterator -> class_type_declaration -> unit;\n  class_type_field: iterator -> class_type_field -> unit;\n  constructor_declaration: iterator -> constructor_declaration -> unit;\n  expr: iterator -> expression -> unit;\n  extension: iterator -> extension -> unit;\n  extension_constructor: iterator -> extension_constructor -> unit;\n  include_declaration: iterator -> include_declaration -> unit;\n  include_description: iterator -> include_description -> unit;\n  label_declaration: iterator -> label_declaration -> unit;\n  location: iterator -> Location.t -> unit;\n  module_binding: iterator -> module_binding -> unit;\n  module_declaration: iterator -> module_declaration -> unit;\n  module_expr: iterator -> module_expr -> unit;\n  module_type: iterator -> module_type -> unit;\n  module_type_declaration: iterator -> module_type_declaration -> unit;\n  open_description: iterator -> open_description -> unit;\n  pat: iterator -> pattern -> unit;\n  payload: iterator -> payload -> unit;\n  signature: iterator -> signature -> unit;\n  signature_item: iterator -> signature_item -> unit;\n  structure: iterator -> structure -> unit;\n  structure_item: iterator -> structure_item -> unit;\n  typ: iterator -> core_type -> unit;\n  type_declaration: iterator -> type_declaration -> unit;\n  type_extension: iterator -> type_extension -> unit;\n  type_kind: iterator -> type_kind -> unit;\n  value_binding: iterator -> value_binding -> unit;\n  value_description: iterator -> value_description -> unit;\n  with_constraint: iterator -> with_constraint -> unit;\n}\n(** A [iterator] record implements one \"method\" per syntactic category, using an\n    open recursion style: each method takes as its first argument the iterator\n    to be applied to children in the syntax tree. *)\n\nval default_iterator : iterator\n(** A default iterator, which implements a \"do not do anything\" mapping. *)\n"
  },
  {
    "path": "analysis/vendor/ml/ast_mapper.ml",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*                         Alain Frisch, LexiFi                           *)\n(*                                                                        *)\n(*   Copyright 2012 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(* A generic Parsetree mapping class *)\n\n(*\n[@@@warning \"+9\"]\n  (* Ensure that record patterns don't miss any field. *)\n*)\n\nopen Parsetree\nopen Ast_helper\nopen Location\n\ntype mapper = {\n  attribute: mapper -> attribute -> attribute;\n  attributes: mapper -> attribute list -> attribute list;\n  case: mapper -> case -> case;\n  cases: mapper -> case list -> case list;\n  class_expr: mapper -> class_expr -> class_expr;\n  class_field: mapper -> class_field -> class_field;\n  class_signature: mapper -> class_signature -> class_signature;\n  class_structure: mapper -> class_structure -> class_structure;\n  class_type: mapper -> class_type -> class_type;\n  class_type_declaration:\n    mapper -> class_type_declaration -> class_type_declaration;\n  class_type_field: mapper -> class_type_field -> class_type_field;\n  constructor_declaration:\n    mapper -> constructor_declaration -> constructor_declaration;\n  expr: mapper -> expression -> expression;\n  extension: mapper -> extension -> extension;\n  extension_constructor:\n    mapper -> extension_constructor -> extension_constructor;\n  include_declaration: mapper -> include_declaration -> include_declaration;\n  include_description: mapper -> include_description -> include_description;\n  label_declaration: mapper -> label_declaration -> label_declaration;\n  location: mapper -> Location.t -> Location.t;\n  module_binding: mapper -> module_binding -> module_binding;\n  module_declaration: mapper -> module_declaration -> module_declaration;\n  module_expr: mapper -> module_expr -> module_expr;\n  module_type: mapper -> module_type -> module_type;\n  module_type_declaration:\n    mapper -> module_type_declaration -> module_type_declaration;\n  open_description: mapper -> open_description -> open_description;\n  pat: mapper -> pattern -> pattern;\n  payload: mapper -> payload -> payload;\n  signature: mapper -> signature -> signature;\n  signature_item: mapper -> signature_item -> signature_item;\n  structure: mapper -> structure -> structure;\n  structure_item: mapper -> structure_item -> structure_item;\n  typ: mapper -> core_type -> core_type;\n  type_declaration: mapper -> type_declaration -> type_declaration;\n  type_extension: mapper -> type_extension -> type_extension;\n  type_kind: mapper -> type_kind -> type_kind;\n  value_binding: mapper -> value_binding -> value_binding;\n  value_description: mapper -> value_description -> value_description;\n  with_constraint: mapper -> with_constraint -> with_constraint;\n}\n\nlet map_fst f (x, y) = (f x, y)\nlet map_snd f (x, y) = (x, f y)\nlet map_tuple f1 f2 (x, y) = (f1 x, f2 y)\nlet map_tuple3 f1 f2 f3 (x, y, z) = (f1 x, f2 y, f3 z)\nlet map_opt f = function\n  | None -> None\n  | Some x -> Some (f x)\n\nlet map_loc sub {loc; txt} = {loc = sub.location sub loc; txt}\n\nmodule T = struct\n  (* Type expressions for the core language *)\n\n  let row_field sub = function\n    | Rtag (l, attrs, b, tl) ->\n      Rtag\n        (map_loc sub l, sub.attributes sub attrs, b, List.map (sub.typ sub) tl)\n    | Rinherit t -> Rinherit (sub.typ sub t)\n\n  let object_field sub = function\n    | Otag (l, attrs, t) ->\n      Otag (map_loc sub l, sub.attributes sub attrs, sub.typ sub t)\n    | Oinherit t -> Oinherit (sub.typ sub t)\n\n  let map sub {ptyp_desc = desc; ptyp_loc = loc; ptyp_attributes = attrs} =\n    let open Typ in\n    let loc = sub.location sub loc in\n    let attrs = sub.attributes sub attrs in\n    match desc with\n    | Ptyp_any -> any ~loc ~attrs ()\n    | Ptyp_var s -> var ~loc ~attrs s\n    | Ptyp_arrow (lab, t1, t2) ->\n      arrow ~loc ~attrs lab (sub.typ sub t1) (sub.typ sub t2)\n    | Ptyp_tuple tyl -> tuple ~loc ~attrs (List.map (sub.typ sub) tyl)\n    | Ptyp_constr (lid, tl) ->\n      constr ~loc ~attrs (map_loc sub lid) (List.map (sub.typ sub) tl)\n    | Ptyp_object (l, o) ->\n      object_ ~loc ~attrs (List.map (object_field sub) l) o\n    | Ptyp_class (lid, tl) ->\n      class_ ~loc ~attrs (map_loc sub lid) (List.map (sub.typ sub) tl)\n    | Ptyp_alias (t, s) -> alias ~loc ~attrs (sub.typ sub t) s\n    | Ptyp_variant (rl, b, ll) ->\n      variant ~loc ~attrs (List.map (row_field sub) rl) b ll\n    | Ptyp_poly (sl, t) ->\n      poly ~loc ~attrs (List.map (map_loc sub) sl) (sub.typ sub t)\n    | Ptyp_package (lid, l) ->\n      package ~loc ~attrs (map_loc sub lid)\n        (List.map (map_tuple (map_loc sub) (sub.typ sub)) l)\n    | Ptyp_extension x -> extension ~loc ~attrs (sub.extension sub x)\n\n  let map_type_declaration sub\n      {\n        ptype_name;\n        ptype_params;\n        ptype_cstrs;\n        ptype_kind;\n        ptype_private;\n        ptype_manifest;\n        ptype_attributes;\n        ptype_loc;\n      } =\n    Type.mk (map_loc sub ptype_name)\n      ~params:(List.map (map_fst (sub.typ sub)) ptype_params)\n      ~priv:ptype_private\n      ~cstrs:\n        (List.map\n           (map_tuple3 (sub.typ sub) (sub.typ sub) (sub.location sub))\n           ptype_cstrs)\n      ~kind:(sub.type_kind sub ptype_kind)\n      ?manifest:(map_opt (sub.typ sub) ptype_manifest)\n      ~loc:(sub.location sub ptype_loc)\n      ~attrs:(sub.attributes sub ptype_attributes)\n\n  let map_type_kind sub = function\n    | Ptype_abstract -> Ptype_abstract\n    | Ptype_variant l ->\n      Ptype_variant (List.map (sub.constructor_declaration sub) l)\n    | Ptype_record l -> Ptype_record (List.map (sub.label_declaration sub) l)\n    | Ptype_open -> Ptype_open\n\n  let map_constructor_arguments sub = function\n    | Pcstr_tuple l -> Pcstr_tuple (List.map (sub.typ sub) l)\n    | Pcstr_record l -> Pcstr_record (List.map (sub.label_declaration sub) l)\n\n  let map_type_extension sub\n      {\n        ptyext_path;\n        ptyext_params;\n        ptyext_constructors;\n        ptyext_private;\n        ptyext_attributes;\n      } =\n    Te.mk (map_loc sub ptyext_path)\n      (List.map (sub.extension_constructor sub) ptyext_constructors)\n      ~params:(List.map (map_fst (sub.typ sub)) ptyext_params)\n      ~priv:ptyext_private\n      ~attrs:(sub.attributes sub ptyext_attributes)\n\n  let map_extension_constructor_kind sub = function\n    | Pext_decl (ctl, cto) ->\n      Pext_decl (map_constructor_arguments sub ctl, map_opt (sub.typ sub) cto)\n    | Pext_rebind li -> Pext_rebind (map_loc sub li)\n\n  let map_extension_constructor sub\n      {pext_name; pext_kind; pext_loc; pext_attributes} =\n    Te.constructor (map_loc sub pext_name)\n      (map_extension_constructor_kind sub pext_kind)\n      ~loc:(sub.location sub pext_loc)\n      ~attrs:(sub.attributes sub pext_attributes)\nend\n\nmodule CT = struct\n  (* Type expressions for the class language *)\n\n  let map sub {pcty_loc = loc; pcty_desc = desc; pcty_attributes = attrs} =\n    let open Cty in\n    let loc = sub.location sub loc in\n    let attrs = sub.attributes sub attrs in\n    match desc with\n    | Pcty_constr (lid, tys) ->\n      constr ~loc ~attrs (map_loc sub lid) (List.map (sub.typ sub) tys)\n    | Pcty_signature x -> signature ~loc ~attrs (sub.class_signature sub x)\n    | Pcty_arrow (lab, t, ct) ->\n      arrow ~loc ~attrs lab (sub.typ sub t) (sub.class_type sub ct)\n    | Pcty_extension x -> extension ~loc ~attrs (sub.extension sub x)\n    | Pcty_open (ovf, lid, ct) ->\n      open_ ~loc ~attrs ovf (map_loc sub lid) (sub.class_type sub ct)\n\n  let map_field sub {pctf_desc = desc; pctf_loc = loc; pctf_attributes = attrs}\n      =\n    let open Ctf in\n    let loc = sub.location sub loc in\n    let attrs = sub.attributes sub attrs in\n    match desc with\n    | Pctf_inherit ct -> inherit_ ~loc ~attrs (sub.class_type sub ct)\n    | Pctf_val (s, m, v, t) ->\n      val_ ~loc ~attrs (map_loc sub s) m v (sub.typ sub t)\n    | Pctf_method (s, p, v, t) ->\n      method_ ~loc ~attrs (map_loc sub s) p v (sub.typ sub t)\n    | Pctf_constraint (t1, t2) ->\n      constraint_ ~loc ~attrs (sub.typ sub t1) (sub.typ sub t2)\n    | Pctf_attribute x -> attribute ~loc (sub.attribute sub x)\n    | Pctf_extension x -> extension ~loc ~attrs (sub.extension sub x)\n\n  let map_signature sub {pcsig_self; pcsig_fields} =\n    Csig.mk (sub.typ sub pcsig_self)\n      (List.map (sub.class_type_field sub) pcsig_fields)\nend\n\nmodule MT = struct\n  (* Type expressions for the module language *)\n\n  let map sub {pmty_desc = desc; pmty_loc = loc; pmty_attributes = attrs} =\n    let open Mty in\n    let loc = sub.location sub loc in\n    let attrs = sub.attributes sub attrs in\n    match desc with\n    | Pmty_ident s -> ident ~loc ~attrs (map_loc sub s)\n    | Pmty_alias s -> alias ~loc ~attrs (map_loc sub s)\n    | Pmty_signature sg -> signature ~loc ~attrs (sub.signature sub sg)\n    | Pmty_functor (s, mt1, mt2) ->\n      functor_ ~loc ~attrs (map_loc sub s)\n        (Misc.may_map (sub.module_type sub) mt1)\n        (sub.module_type sub mt2)\n    | Pmty_with (mt, l) ->\n      with_ ~loc ~attrs (sub.module_type sub mt)\n        (List.map (sub.with_constraint sub) l)\n    | Pmty_typeof me -> typeof_ ~loc ~attrs (sub.module_expr sub me)\n    | Pmty_extension x -> extension ~loc ~attrs (sub.extension sub x)\n\n  let map_with_constraint sub = function\n    | Pwith_type (lid, d) ->\n      Pwith_type (map_loc sub lid, sub.type_declaration sub d)\n    | Pwith_module (lid, lid2) ->\n      Pwith_module (map_loc sub lid, map_loc sub lid2)\n    | Pwith_typesubst (lid, d) ->\n      Pwith_typesubst (map_loc sub lid, sub.type_declaration sub d)\n    | Pwith_modsubst (s, lid) -> Pwith_modsubst (map_loc sub s, map_loc sub lid)\n\n  let map_signature_item sub {psig_desc = desc; psig_loc = loc} =\n    let open Sig in\n    let loc = sub.location sub loc in\n    match desc with\n    | Psig_value vd -> value ~loc (sub.value_description sub vd)\n    | Psig_type (rf, l) -> type_ ~loc rf (List.map (sub.type_declaration sub) l)\n    | Psig_typext te -> type_extension ~loc (sub.type_extension sub te)\n    | Psig_exception ed -> exception_ ~loc (sub.extension_constructor sub ed)\n    | Psig_module x -> module_ ~loc (sub.module_declaration sub x)\n    | Psig_recmodule l ->\n      rec_module ~loc (List.map (sub.module_declaration sub) l)\n    | Psig_modtype x -> modtype ~loc (sub.module_type_declaration sub x)\n    | Psig_open x -> open_ ~loc (sub.open_description sub x)\n    | Psig_include x -> include_ ~loc (sub.include_description sub x)\n    | Psig_class _ -> assert false\n    | Psig_class_type l ->\n      class_type ~loc (List.map (sub.class_type_declaration sub) l)\n    | Psig_extension (x, attrs) ->\n      extension ~loc (sub.extension sub x) ~attrs:(sub.attributes sub attrs)\n    | Psig_attribute x -> attribute ~loc (sub.attribute sub x)\nend\n\nmodule M = struct\n  (* Value expressions for the module language *)\n\n  let map sub {pmod_loc = loc; pmod_desc = desc; pmod_attributes = attrs} =\n    let open Mod in\n    let loc = sub.location sub loc in\n    let attrs = sub.attributes sub attrs in\n    match desc with\n    | Pmod_ident x -> ident ~loc ~attrs (map_loc sub x)\n    | Pmod_structure str -> structure ~loc ~attrs (sub.structure sub str)\n    | Pmod_functor (arg, arg_ty, body) ->\n      functor_ ~loc ~attrs (map_loc sub arg)\n        (Misc.may_map (sub.module_type sub) arg_ty)\n        (sub.module_expr sub body)\n    | Pmod_apply (m1, m2) ->\n      apply ~loc ~attrs (sub.module_expr sub m1) (sub.module_expr sub m2)\n    | Pmod_constraint (m, mty) ->\n      constraint_ ~loc ~attrs (sub.module_expr sub m) (sub.module_type sub mty)\n    | Pmod_unpack e -> unpack ~loc ~attrs (sub.expr sub e)\n    | Pmod_extension x -> extension ~loc ~attrs (sub.extension sub x)\n\n  let map_structure_item sub {pstr_loc = loc; pstr_desc = desc} =\n    let open Str in\n    let loc = sub.location sub loc in\n    match desc with\n    | Pstr_eval (x, attrs) ->\n      eval ~loc ~attrs:(sub.attributes sub attrs) (sub.expr sub x)\n    | Pstr_value (r, vbs) -> value ~loc r (List.map (sub.value_binding sub) vbs)\n    | Pstr_primitive vd -> primitive ~loc (sub.value_description sub vd)\n    | Pstr_type (rf, l) -> type_ ~loc rf (List.map (sub.type_declaration sub) l)\n    | Pstr_typext te -> type_extension ~loc (sub.type_extension sub te)\n    | Pstr_exception ed -> exception_ ~loc (sub.extension_constructor sub ed)\n    | Pstr_module x -> module_ ~loc (sub.module_binding sub x)\n    | Pstr_recmodule l -> rec_module ~loc (List.map (sub.module_binding sub) l)\n    | Pstr_modtype x -> modtype ~loc (sub.module_type_declaration sub x)\n    | Pstr_open x -> open_ ~loc (sub.open_description sub x)\n    | Pstr_class () -> {pstr_loc = loc; pstr_desc = Pstr_class ()}\n    | Pstr_class_type l ->\n      class_type ~loc (List.map (sub.class_type_declaration sub) l)\n    | Pstr_include x -> include_ ~loc (sub.include_declaration sub x)\n    | Pstr_extension (x, attrs) ->\n      extension ~loc (sub.extension sub x) ~attrs:(sub.attributes sub attrs)\n    | Pstr_attribute x -> attribute ~loc (sub.attribute sub x)\nend\n\nmodule E = struct\n  (* Value expressions for the core language *)\n\n  let map sub {pexp_loc = loc; pexp_desc = desc; pexp_attributes = attrs} =\n    let open Exp in\n    let loc = sub.location sub loc in\n    let attrs = sub.attributes sub attrs in\n    match desc with\n    | Pexp_ident x -> ident ~loc ~attrs (map_loc sub x)\n    | Pexp_constant x -> constant ~loc ~attrs x\n    | Pexp_let (r, vbs, e) ->\n      let_ ~loc ~attrs r (List.map (sub.value_binding sub) vbs) (sub.expr sub e)\n    | Pexp_fun (lab, def, p, e) ->\n      fun_ ~loc ~attrs lab\n        (map_opt (sub.expr sub) def)\n        (sub.pat sub p) (sub.expr sub e)\n    | Pexp_function pel -> function_ ~loc ~attrs (sub.cases sub pel)\n    | Pexp_apply (e, l) ->\n      apply ~loc ~attrs (sub.expr sub e) (List.map (map_snd (sub.expr sub)) l)\n    | Pexp_match (e, pel) ->\n      match_ ~loc ~attrs (sub.expr sub e) (sub.cases sub pel)\n    | Pexp_try (e, pel) -> try_ ~loc ~attrs (sub.expr sub e) (sub.cases sub pel)\n    | Pexp_tuple el -> tuple ~loc ~attrs (List.map (sub.expr sub) el)\n    | Pexp_construct (lid, arg) ->\n      construct ~loc ~attrs (map_loc sub lid) (map_opt (sub.expr sub) arg)\n    | Pexp_variant (lab, eo) ->\n      variant ~loc ~attrs lab (map_opt (sub.expr sub) eo)\n    | Pexp_record (l, eo) ->\n      record ~loc ~attrs\n        (List.map (map_tuple (map_loc sub) (sub.expr sub)) l)\n        (map_opt (sub.expr sub) eo)\n    | Pexp_field (e, lid) ->\n      field ~loc ~attrs (sub.expr sub e) (map_loc sub lid)\n    | Pexp_setfield (e1, lid, e2) ->\n      setfield ~loc ~attrs (sub.expr sub e1) (map_loc sub lid) (sub.expr sub e2)\n    | Pexp_array el -> array ~loc ~attrs (List.map (sub.expr sub) el)\n    | Pexp_ifthenelse (e1, e2, e3) ->\n      ifthenelse ~loc ~attrs (sub.expr sub e1) (sub.expr sub e2)\n        (map_opt (sub.expr sub) e3)\n    | Pexp_sequence (e1, e2) ->\n      sequence ~loc ~attrs (sub.expr sub e1) (sub.expr sub e2)\n    | Pexp_while (e1, e2) ->\n      while_ ~loc ~attrs (sub.expr sub e1) (sub.expr sub e2)\n    | Pexp_for (p, e1, e2, d, e3) ->\n      for_ ~loc ~attrs (sub.pat sub p) (sub.expr sub e1) (sub.expr sub e2) d\n        (sub.expr sub e3)\n    | Pexp_coerce (e, t1, t2) ->\n      coerce ~loc ~attrs (sub.expr sub e)\n        (map_opt (sub.typ sub) t1)\n        (sub.typ sub t2)\n    | Pexp_constraint (e, t) ->\n      constraint_ ~loc ~attrs (sub.expr sub e) (sub.typ sub t)\n    | Pexp_send (e, s) -> send ~loc ~attrs (sub.expr sub e) (map_loc sub s)\n    | Pexp_new lid -> new_ ~loc ~attrs (map_loc sub lid)\n    | Pexp_setinstvar (s, e) ->\n      setinstvar ~loc ~attrs (map_loc sub s) (sub.expr sub e)\n    | Pexp_override sel ->\n      override ~loc ~attrs\n        (List.map (map_tuple (map_loc sub) (sub.expr sub)) sel)\n    | Pexp_letmodule (s, me, e) ->\n      letmodule ~loc ~attrs (map_loc sub s) (sub.module_expr sub me)\n        (sub.expr sub e)\n    | Pexp_letexception (cd, e) ->\n      letexception ~loc ~attrs\n        (sub.extension_constructor sub cd)\n        (sub.expr sub e)\n    | Pexp_assert e -> assert_ ~loc ~attrs (sub.expr sub e)\n    | Pexp_lazy e -> lazy_ ~loc ~attrs (sub.expr sub e)\n    | Pexp_poly (e, t) ->\n      poly ~loc ~attrs (sub.expr sub e) (map_opt (sub.typ sub) t)\n    | Pexp_object cls -> object_ ~loc ~attrs (sub.class_structure sub cls)\n    | Pexp_newtype (s, e) ->\n      newtype ~loc ~attrs (map_loc sub s) (sub.expr sub e)\n    | Pexp_pack me -> pack ~loc ~attrs (sub.module_expr sub me)\n    | Pexp_open (ovf, lid, e) ->\n      open_ ~loc ~attrs ovf (map_loc sub lid) (sub.expr sub e)\n    | Pexp_extension x -> extension ~loc ~attrs (sub.extension sub x)\n    | Pexp_unreachable -> unreachable ~loc ~attrs ()\nend\n\nmodule P = struct\n  (* Patterns *)\n\n  let map sub {ppat_desc = desc; ppat_loc = loc; ppat_attributes = attrs} =\n    let open Pat in\n    let loc = sub.location sub loc in\n    let attrs = sub.attributes sub attrs in\n    match desc with\n    | Ppat_any -> any ~loc ~attrs ()\n    | Ppat_var s -> var ~loc ~attrs (map_loc sub s)\n    | Ppat_alias (p, s) -> alias ~loc ~attrs (sub.pat sub p) (map_loc sub s)\n    | Ppat_constant c -> constant ~loc ~attrs c\n    | Ppat_interval (c1, c2) -> interval ~loc ~attrs c1 c2\n    | Ppat_tuple pl -> tuple ~loc ~attrs (List.map (sub.pat sub) pl)\n    | Ppat_construct (l, p) ->\n      construct ~loc ~attrs (map_loc sub l) (map_opt (sub.pat sub) p)\n    | Ppat_variant (l, p) -> variant ~loc ~attrs l (map_opt (sub.pat sub) p)\n    | Ppat_record (lpl, cf) ->\n      record ~loc ~attrs\n        (List.map (map_tuple (map_loc sub) (sub.pat sub)) lpl)\n        cf\n    | Ppat_array pl -> array ~loc ~attrs (List.map (sub.pat sub) pl)\n    | Ppat_or (p1, p2) -> or_ ~loc ~attrs (sub.pat sub p1) (sub.pat sub p2)\n    | Ppat_constraint (p, t) ->\n      constraint_ ~loc ~attrs (sub.pat sub p) (sub.typ sub t)\n    | Ppat_type s -> type_ ~loc ~attrs (map_loc sub s)\n    | Ppat_lazy p -> lazy_ ~loc ~attrs (sub.pat sub p)\n    | Ppat_unpack s -> unpack ~loc ~attrs (map_loc sub s)\n    | Ppat_open (lid, p) -> open_ ~loc ~attrs (map_loc sub lid) (sub.pat sub p)\n    | Ppat_exception p -> exception_ ~loc ~attrs (sub.pat sub p)\n    | Ppat_extension x -> extension ~loc ~attrs (sub.extension sub x)\nend\n\nmodule CE = struct\n  (* Value expressions for the class language *)\n\n  let map sub {pcl_loc = loc; pcl_desc = desc; pcl_attributes = attrs} =\n    let open Cl in\n    let loc = sub.location sub loc in\n    let attrs = sub.attributes sub attrs in\n    match desc with\n    | Pcl_constr (lid, tys) ->\n      constr ~loc ~attrs (map_loc sub lid) (List.map (sub.typ sub) tys)\n    | Pcl_structure s -> structure ~loc ~attrs (sub.class_structure sub s)\n    | Pcl_fun (lab, e, p, ce) ->\n      fun_ ~loc ~attrs lab\n        (map_opt (sub.expr sub) e)\n        (sub.pat sub p) (sub.class_expr sub ce)\n    | Pcl_apply (ce, l) ->\n      apply ~loc ~attrs (sub.class_expr sub ce)\n        (List.map (map_snd (sub.expr sub)) l)\n    | Pcl_let (r, vbs, ce) ->\n      let_ ~loc ~attrs r\n        (List.map (sub.value_binding sub) vbs)\n        (sub.class_expr sub ce)\n    | Pcl_constraint (ce, ct) ->\n      constraint_ ~loc ~attrs (sub.class_expr sub ce) (sub.class_type sub ct)\n    | Pcl_extension x -> extension ~loc ~attrs (sub.extension sub x)\n    | Pcl_open (ovf, lid, ce) ->\n      open_ ~loc ~attrs ovf (map_loc sub lid) (sub.class_expr sub ce)\n\n  let map_kind sub = function\n    | Cfk_concrete (o, e) -> Cfk_concrete (o, sub.expr sub e)\n    | Cfk_virtual t -> Cfk_virtual (sub.typ sub t)\n\n  let map_field sub {pcf_desc = desc; pcf_loc = loc; pcf_attributes = attrs} =\n    let open Cf in\n    let loc = sub.location sub loc in\n    let attrs = sub.attributes sub attrs in\n    match desc with\n    | Pcf_inherit () -> {pcf_desc = desc; pcf_loc = loc; pcf_attributes = attrs}\n    | Pcf_val (s, m, k) -> val_ ~loc ~attrs (map_loc sub s) m (map_kind sub k)\n    | Pcf_method (s, p, k) ->\n      method_ ~loc ~attrs (map_loc sub s) p (map_kind sub k)\n    | Pcf_constraint (t1, t2) ->\n      constraint_ ~loc ~attrs (sub.typ sub t1) (sub.typ sub t2)\n    | Pcf_initializer e -> initializer_ ~loc ~attrs (sub.expr sub e)\n    | Pcf_attribute x -> attribute ~loc (sub.attribute sub x)\n    | Pcf_extension x -> extension ~loc ~attrs (sub.extension sub x)\n\n  let map_structure sub {pcstr_self; pcstr_fields} =\n    {\n      pcstr_self = sub.pat sub pcstr_self;\n      pcstr_fields = List.map (sub.class_field sub) pcstr_fields;\n    }\n\n  let class_infos sub f\n      {pci_virt; pci_params = pl; pci_name; pci_expr; pci_loc; pci_attributes} =\n    Ci.mk ~virt:pci_virt\n      ~params:(List.map (map_fst (sub.typ sub)) pl)\n      (map_loc sub pci_name) (f pci_expr) ~loc:(sub.location sub pci_loc)\n      ~attrs:(sub.attributes sub pci_attributes)\nend\n\n(* Now, a generic AST mapper, to be extended to cover all kinds and\n   cases of the OCaml grammar.  The default behavior of the mapper is\n   the identity. *)\n\nlet default_mapper =\n  {\n    structure = (fun this l -> List.map (this.structure_item this) l);\n    structure_item = M.map_structure_item;\n    module_expr = M.map;\n    signature = (fun this l -> List.map (this.signature_item this) l);\n    signature_item = MT.map_signature_item;\n    module_type = MT.map;\n    with_constraint = MT.map_with_constraint;\n    class_expr = CE.map;\n    class_field = CE.map_field;\n    class_structure = CE.map_structure;\n    class_type = CT.map;\n    class_type_field = CT.map_field;\n    class_signature = CT.map_signature;\n    class_type_declaration =\n      (fun this -> CE.class_infos this (this.class_type this));\n    type_declaration = T.map_type_declaration;\n    type_kind = T.map_type_kind;\n    typ = T.map;\n    type_extension = T.map_type_extension;\n    extension_constructor = T.map_extension_constructor;\n    value_description =\n      (fun this {pval_name; pval_type; pval_prim; pval_loc; pval_attributes} ->\n        Val.mk (map_loc this pval_name) (this.typ this pval_type)\n          ~attrs:(this.attributes this pval_attributes)\n          ~loc:(this.location this pval_loc)\n          ~prim:pval_prim);\n    pat = P.map;\n    expr = E.map;\n    module_declaration =\n      (fun this {pmd_name; pmd_type; pmd_attributes; pmd_loc} ->\n        Md.mk (map_loc this pmd_name)\n          (this.module_type this pmd_type)\n          ~attrs:(this.attributes this pmd_attributes)\n          ~loc:(this.location this pmd_loc));\n    module_type_declaration =\n      (fun this {pmtd_name; pmtd_type; pmtd_attributes; pmtd_loc} ->\n        Mtd.mk (map_loc this pmtd_name)\n          ?typ:(map_opt (this.module_type this) pmtd_type)\n          ~attrs:(this.attributes this pmtd_attributes)\n          ~loc:(this.location this pmtd_loc));\n    module_binding =\n      (fun this {pmb_name; pmb_expr; pmb_attributes; pmb_loc} ->\n        Mb.mk (map_loc this pmb_name)\n          (this.module_expr this pmb_expr)\n          ~attrs:(this.attributes this pmb_attributes)\n          ~loc:(this.location this pmb_loc));\n    open_description =\n      (fun this {popen_lid; popen_override; popen_attributes; popen_loc} ->\n        Opn.mk (map_loc this popen_lid) ~override:popen_override\n          ~loc:(this.location this popen_loc)\n          ~attrs:(this.attributes this popen_attributes));\n    include_description =\n      (fun this {pincl_mod; pincl_attributes; pincl_loc} ->\n        Incl.mk\n          (this.module_type this pincl_mod)\n          ~loc:(this.location this pincl_loc)\n          ~attrs:(this.attributes this pincl_attributes));\n    include_declaration =\n      (fun this {pincl_mod; pincl_attributes; pincl_loc} ->\n        Incl.mk\n          (this.module_expr this pincl_mod)\n          ~loc:(this.location this pincl_loc)\n          ~attrs:(this.attributes this pincl_attributes));\n    value_binding =\n      (fun this {pvb_pat; pvb_expr; pvb_attributes; pvb_loc} ->\n        Vb.mk (this.pat this pvb_pat) (this.expr this pvb_expr)\n          ~loc:(this.location this pvb_loc)\n          ~attrs:(this.attributes this pvb_attributes));\n    constructor_declaration =\n      (fun this {pcd_name; pcd_args; pcd_res; pcd_loc; pcd_attributes} ->\n        Type.constructor (map_loc this pcd_name)\n          ~args:(T.map_constructor_arguments this pcd_args)\n          ?res:(map_opt (this.typ this) pcd_res)\n          ~loc:(this.location this pcd_loc)\n          ~attrs:(this.attributes this pcd_attributes));\n    label_declaration =\n      (fun this {pld_name; pld_type; pld_loc; pld_mutable; pld_attributes} ->\n        Type.field (map_loc this pld_name) (this.typ this pld_type)\n          ~mut:pld_mutable\n          ~loc:(this.location this pld_loc)\n          ~attrs:(this.attributes this pld_attributes));\n    cases = (fun this l -> List.map (this.case this) l);\n    case =\n      (fun this {pc_lhs; pc_guard; pc_rhs} ->\n        {\n          pc_lhs = this.pat this pc_lhs;\n          pc_guard = map_opt (this.expr this) pc_guard;\n          pc_rhs = this.expr this pc_rhs;\n        });\n    location = (fun _this l -> l);\n    extension = (fun this (s, e) -> (map_loc this s, this.payload this e));\n    attribute = (fun this (s, e) -> (map_loc this s, this.payload this e));\n    attributes = (fun this l -> List.map (this.attribute this) l);\n    payload =\n      (fun this -> function\n        | PStr x -> PStr (this.structure this x)\n        | PSig x -> PSig (this.signature this x)\n        | PTyp x -> PTyp (this.typ this x)\n        | PPat (x, g) -> PPat (this.pat this x, map_opt (this.expr this) g));\n  }\n\nlet rec extension_of_error {loc; msg; if_highlight; sub} =\n  ( {loc; txt = \"ocaml.error\"},\n    PStr\n      ([\n         Str.eval (Exp.constant (Pconst_string (msg, None)));\n         Str.eval (Exp.constant (Pconst_string (if_highlight, None)));\n       ]\n      @ List.map (fun ext -> Str.extension (extension_of_error ext)) sub) )\n\nlet attribute_of_warning loc s =\n  ( {loc; txt = \"ocaml.ppwarning\"},\n    PStr [Str.eval ~loc (Exp.constant (Pconst_string (s, None)))] )\n\nmodule StringMap = Map.Make (struct\n  type t = string\n  let compare = compare\nend)\n\nlet cookies = ref StringMap.empty\n\nlet get_cookie k = try Some (StringMap.find k !cookies) with Not_found -> None\n\nlet set_cookie k v = cookies := StringMap.add k v !cookies\n\nlet tool_name_ref = ref \"_none_\"\n\nlet tool_name () = !tool_name_ref\n\nmodule PpxContext = struct\n  open Longident\n  open Asttypes\n  open Ast_helper\n\n  let lid name = {txt = Lident name; loc = Location.none}\n\n  let make_string x = Exp.constant (Pconst_string (x, None))\n\n  let make_bool x =\n    if x then Exp.construct (lid \"true\") None\n    else Exp.construct (lid \"false\") None\n\n  let rec make_list f lst =\n    match lst with\n    | x :: rest ->\n      Exp.construct (lid \"::\") (Some (Exp.tuple [f x; make_list f rest]))\n    | [] -> Exp.construct (lid \"[]\") None\n\n  let make_pair f1 f2 (x1, x2) = Exp.tuple [f1 x1; f2 x2]\n\n  let get_cookies () =\n    ( lid \"cookies\",\n      make_list\n        (make_pair make_string (fun x -> x))\n        (StringMap.bindings !cookies) )\n\n  let mk fields =\n    ( {txt = \"ocaml.ppx.context\"; loc = Location.none},\n      Parsetree.PStr [Str.eval (Exp.record fields None)] )\n\n  let make ~tool_name () =\n    let fields =\n      [\n        (lid \"tool_name\", make_string tool_name);\n        (lid \"include_dirs\", make_list make_string !Clflags.include_dirs);\n        (lid \"load_path\", make_list make_string !Config.load_path);\n        (lid \"open_modules\", make_list make_string !Clflags.open_modules);\n        (lid \"debug\", make_bool !Clflags.debug);\n        get_cookies ();\n      ]\n    in\n    mk fields\n\n  let get_fields = function\n    | PStr\n        [{pstr_desc = Pstr_eval ({pexp_desc = Pexp_record (fields, None)}, [])}]\n      ->\n      fields\n    | _ -> raise_errorf \"Internal error: invalid [@@@ocaml.ppx.context] syntax\"\n\n  let restore fields =\n    let field name payload =\n      let rec get_string = function\n        | {pexp_desc = Pexp_constant (Pconst_string (str, None))} -> str\n        | _ ->\n          raise_errorf\n            \"Internal error: invalid [@@@ocaml.ppx.context { %s }] string \\\n             syntax\"\n            name\n      and get_bool pexp =\n        match pexp with\n        | {pexp_desc = Pexp_construct ({txt = Longident.Lident \"true\"}, None)}\n          ->\n          true\n        | {pexp_desc = Pexp_construct ({txt = Longident.Lident \"false\"}, None)}\n          ->\n          false\n        | _ ->\n          raise_errorf\n            \"Internal error: invalid [@@@ocaml.ppx.context { %s }] bool syntax\"\n            name\n      and get_list elem = function\n        | {\n            pexp_desc =\n              Pexp_construct\n                ( {txt = Longident.Lident \"::\"},\n                  Some {pexp_desc = Pexp_tuple [exp; rest]} );\n          } ->\n          elem exp :: get_list elem rest\n        | {pexp_desc = Pexp_construct ({txt = Longident.Lident \"[]\"}, None)} ->\n          []\n        | _ ->\n          raise_errorf\n            \"Internal error: invalid [@@@ocaml.ppx.context { %s }] list syntax\"\n            name\n      and get_pair f1 f2 = function\n        | {pexp_desc = Pexp_tuple [e1; e2]} -> (f1 e1, f2 e2)\n        | _ ->\n          raise_errorf\n            \"Internal error: invalid [@@@ocaml.ppx.context { %s }] pair syntax\"\n            name\n      in\n      match name with\n      | \"tool_name\" -> tool_name_ref := get_string payload\n      | \"include_dirs\" -> Clflags.include_dirs := get_list get_string payload\n      | \"load_path\" -> Config.load_path := get_list get_string payload\n      | \"open_modules\" -> Clflags.open_modules := get_list get_string payload\n      | \"debug\" -> Clflags.debug := get_bool payload\n      | \"cookies\" ->\n        let l = get_list (get_pair get_string (fun x -> x)) payload in\n        cookies :=\n          List.fold_left (fun s (k, v) -> StringMap.add k v s) StringMap.empty l\n      | _ -> ()\n    in\n    List.iter\n      (function\n        | {txt = Lident name}, x -> field name x\n        | _ -> ())\n      fields\n\n  let update_cookies fields =\n    let fields =\n      Ext_list.filter fields (function\n        | {txt = Lident \"cookies\"}, _ -> false\n        | _ -> true)\n    in\n    fields @ [get_cookies ()]\nend\n\nlet ppx_context = PpxContext.make\n\nlet extension_of_exn exn =\n  match error_of_exn exn with\n  | Some (`Ok error) -> extension_of_error error\n  | Some `Already_displayed ->\n    ({loc = Location.none; txt = \"ocaml.error\"}, PStr [])\n  | None -> raise exn\n\nlet apply_lazy ~source ~target mapper =\n  let implem ast =\n    let fields, ast =\n      match ast with\n      | {pstr_desc = Pstr_attribute ({txt = \"ocaml.ppx.context\"}, x)} :: l ->\n        (PpxContext.get_fields x, l)\n      | _ -> ([], ast)\n    in\n    PpxContext.restore fields;\n    let ast =\n      try\n        let mapper = mapper () in\n        mapper.structure mapper ast\n      with exn ->\n        [\n          {\n            pstr_desc = Pstr_extension (extension_of_exn exn, []);\n            pstr_loc = Location.none;\n          };\n        ]\n    in\n    let fields = PpxContext.update_cookies fields in\n    Str.attribute (PpxContext.mk fields) :: ast\n  in\n  let iface ast =\n    let fields, ast =\n      match ast with\n      | {psig_desc = Psig_attribute ({txt = \"ocaml.ppx.context\"}, x)} :: l ->\n        (PpxContext.get_fields x, l)\n      | _ -> ([], ast)\n    in\n    PpxContext.restore fields;\n    let ast =\n      try\n        let mapper = mapper () in\n        mapper.signature mapper ast\n      with exn ->\n        [\n          {\n            psig_desc = Psig_extension (extension_of_exn exn, []);\n            psig_loc = Location.none;\n          };\n        ]\n    in\n    let fields = PpxContext.update_cookies fields in\n    Sig.attribute (PpxContext.mk fields) :: ast\n  in\n\n  let ic = open_in_bin source in\n  let magic =\n    really_input_string ic (String.length Config.ast_impl_magic_number)\n  in\n\n  let rewrite transform =\n    Location.set_input_name @@ input_value ic;\n    let ast = input_value ic in\n    close_in ic;\n    let ast = transform ast in\n    let oc = open_out_bin target in\n    output_string oc magic;\n    output_value oc !Location.input_name;\n    output_value oc ast;\n    close_out oc\n  and fail () =\n    close_in ic;\n    failwith \"Ast_mapper: OCaml version mismatch or malformed input\"\n  in\n\n  if magic = Config.ast_impl_magic_number then\n    rewrite (implem : structure -> structure)\n  else if magic = Config.ast_intf_magic_number then\n    rewrite (iface : signature -> signature)\n  else fail ()\n\nlet drop_ppx_context_str ~restore = function\n  | {pstr_desc = Pstr_attribute ({Location.txt = \"ocaml.ppx.context\"}, a)}\n    :: items ->\n    if restore then PpxContext.restore (PpxContext.get_fields a);\n    items\n  | items -> items\n\nlet drop_ppx_context_sig ~restore = function\n  | {psig_desc = Psig_attribute ({Location.txt = \"ocaml.ppx.context\"}, a)}\n    :: items ->\n    if restore then PpxContext.restore (PpxContext.get_fields a);\n    items\n  | items -> items\n\nlet add_ppx_context_str ~tool_name ast =\n  Ast_helper.Str.attribute (ppx_context ~tool_name ()) :: ast\n\nlet add_ppx_context_sig ~tool_name ast =\n  Ast_helper.Sig.attribute (ppx_context ~tool_name ()) :: ast\n\nlet apply ~source ~target mapper = apply_lazy ~source ~target (fun () -> mapper)\n\nlet run_main mapper =\n  try\n    let a = Sys.argv in\n    let n = Array.length a in\n    if n > 2 then\n      let mapper () =\n        try mapper (Array.to_list (Array.sub a 1 (n - 3)))\n        with exn ->\n          (* PR#6463 *)\n          let f _ _ = raise exn in\n          {default_mapper with structure = f; signature = f}\n      in\n      apply_lazy ~source:a.(n - 2) ~target:a.(n - 1) mapper\n    else (\n      Printf.eprintf \"Usage: %s [extra_args] <infile> <outfile>\\n%!\"\n        Sys.executable_name;\n      exit 2)\n  with exn ->\n    prerr_endline (Printexc.to_string exn);\n    exit 2\n\nlet register_function = ref (fun _name f -> run_main f)\nlet register name f = !register_function name f\n"
  },
  {
    "path": "analysis/vendor/ml/ast_mapper.mli",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*                         Alain Frisch, LexiFi                           *)\n(*                                                                        *)\n(*   Copyright 2012 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(** The interface of a -ppx rewriter\n\n    A -ppx rewriter is a program that accepts a serialized abstract syntax tree\n    and outputs another, possibly modified, abstract syntax tree. This module\n    encapsulates the interface between the compiler and the -ppx rewriters,\n    handling such details as the serialization format, forwarding of\n    command-line flags, and storing state.\n\n    {!mapper} allows to implement AST rewriting using open recursion. A typical\n    mapper would be based on {!default_mapper}, a deep identity mapper, and will\n    fall back on it for handling the syntax it does not modify. For example:\n\n    {[\n      open Asttypes\n      open Parsetree\n      open Ast_mapper\n\n      let test_mapper argv =\n        {\n          default_mapper with\n          expr =\n            (fun mapper expr ->\n              match expr with\n              | {pexp_desc = Pexp_extension ({txt = \"test\"}, PStr [])} ->\n                Ast_helper.Exp.constant (Const_int 42)\n              | other -> default_mapper.expr mapper other);\n        }\n\n      let () = register \"ppx_test\" test_mapper\n    ]}\n\n    This -ppx rewriter, which replaces [[%test]] in expressions with the\n    constant [42], can be compiled using\n    [ocamlc -o ppx_test -I +compiler-libs ocamlcommon.cma ppx_test.ml]. *)\n\nopen Parsetree\n\n(** {1 A generic Parsetree mapper} *)\n\ntype mapper = {\n  attribute: mapper -> attribute -> attribute;\n  attributes: mapper -> attribute list -> attribute list;\n  case: mapper -> case -> case;\n  cases: mapper -> case list -> case list;\n  class_expr: mapper -> class_expr -> class_expr;\n  class_field: mapper -> class_field -> class_field;\n  class_signature: mapper -> class_signature -> class_signature;\n  class_structure: mapper -> class_structure -> class_structure;\n  class_type: mapper -> class_type -> class_type;\n  class_type_declaration:\n    mapper -> class_type_declaration -> class_type_declaration;\n  class_type_field: mapper -> class_type_field -> class_type_field;\n  constructor_declaration:\n    mapper -> constructor_declaration -> constructor_declaration;\n  expr: mapper -> expression -> expression;\n  extension: mapper -> extension -> extension;\n  extension_constructor:\n    mapper -> extension_constructor -> extension_constructor;\n  include_declaration: mapper -> include_declaration -> include_declaration;\n  include_description: mapper -> include_description -> include_description;\n  label_declaration: mapper -> label_declaration -> label_declaration;\n  location: mapper -> Location.t -> Location.t;\n  module_binding: mapper -> module_binding -> module_binding;\n  module_declaration: mapper -> module_declaration -> module_declaration;\n  module_expr: mapper -> module_expr -> module_expr;\n  module_type: mapper -> module_type -> module_type;\n  module_type_declaration:\n    mapper -> module_type_declaration -> module_type_declaration;\n  open_description: mapper -> open_description -> open_description;\n  pat: mapper -> pattern -> pattern;\n  payload: mapper -> payload -> payload;\n  signature: mapper -> signature -> signature;\n  signature_item: mapper -> signature_item -> signature_item;\n  structure: mapper -> structure -> structure;\n  structure_item: mapper -> structure_item -> structure_item;\n  typ: mapper -> core_type -> core_type;\n  type_declaration: mapper -> type_declaration -> type_declaration;\n  type_extension: mapper -> type_extension -> type_extension;\n  type_kind: mapper -> type_kind -> type_kind;\n  value_binding: mapper -> value_binding -> value_binding;\n  value_description: mapper -> value_description -> value_description;\n  with_constraint: mapper -> with_constraint -> with_constraint;\n}\n(** A mapper record implements one \"method\" per syntactic category, using an\n    open recursion style: each method takes as its first argument the mapper to\n    be applied to children in the syntax tree. *)\n\nval default_mapper : mapper\n(** A default mapper, which implements a \"deep identity\" mapping. *)\n\n(** {1 Apply mappers to compilation units} *)\n\nval tool_name : unit -> string\n(** Can be used within a ppx preprocessor to know which tool is calling it\n    [\"ocamlc\"], [\"ocamlopt\"], [\"ocamldoc\"], [\"ocamldep\"], [\"ocaml\"], ... Some\n    global variables that reflect command-line options are automatically\n    synchronized between the calling tool and the ppx preprocessor:\n    {!Clflags.include_dirs}, {!Config.load_path}, {!Clflags.open_modules},\n    {!Clflags.for_package}, {!Clflags.debug}. *)\n\nval apply : source:string -> target:string -> mapper -> unit\n(** Apply a mapper (parametrized by the unit name) to a dumped parsetree found\n    in the [source] file and put the result in the [target] file. The\n    [structure] or [signature] field of the mapper is applied to the\n    implementation or interface. *)\n\nval run_main : (string list -> mapper) -> unit\n(** Entry point to call to implement a standalone -ppx rewriter from a mapper,\n    parametrized by the command line arguments. The current unit name can be\n    obtained from {!Location.input_name}. This function implements proper error\n    reporting for uncaught exceptions. *)\n\n(** {1 Registration API} *)\n\nval register_function : (string -> (string list -> mapper) -> unit) ref\n\nval register : string -> (string list -> mapper) -> unit\n(** Apply the [register_function]. The default behavior is to run the mapper\n    immediately, taking arguments from the process command line. This is to\n    support a scenario where a mapper is linked as a stand-alone executable.\n\n    It is possible to overwrite the [register_function] to define \"-ppx\n    drivers\", which combine several mappers in a single process. Typically, a\n    driver starts by defining [register_function] to a custom implementation,\n    then lets ppx rewriters (linked statically or dynamically) register\n    themselves, and then run all or some of them. It is also possible to have\n    -ppx drivers apply rewriters to only specific parts of an AST.\n\n    The first argument to [register] is a symbolic name to be used by the ppx\n    driver. *)\n\n(** {1 Convenience functions to write mappers} *)\n\nval map_opt : ('a -> 'b) -> 'a option -> 'b option\n\nval extension_of_error : Location.error -> extension\n(** Encode an error into an 'ocaml.error' extension node which can be inserted\n    in a generated Parsetree. The compiler will be responsible for reporting the\n    error. *)\n\nval attribute_of_warning : Location.t -> string -> attribute\n(** Encode a warning message into an 'ocaml.ppwarning' attribute which can be\n    inserted in a generated Parsetree. The compiler will be responsible for\n    reporting the warning. *)\n\n(** {1 Helper functions to call external mappers} *)\n\nval add_ppx_context_str :\n  tool_name:string -> Parsetree.structure -> Parsetree.structure\n(** Extract information from the current environment and encode it into an\n    attribute which is prepended to the list of structure items in order to pass\n    the information to an external processor. *)\n\nval add_ppx_context_sig :\n  tool_name:string -> Parsetree.signature -> Parsetree.signature\n(** Same as [add_ppx_context_str], but for signatures. *)\n\nval drop_ppx_context_str :\n  restore:bool -> Parsetree.structure -> Parsetree.structure\n(** Drop the ocaml.ppx.context attribute from a structure. If [restore] is true,\n    also restore the associated data in the current process. *)\n\nval drop_ppx_context_sig :\n  restore:bool -> Parsetree.signature -> Parsetree.signature\n(** Same as [drop_ppx_context_str], but for signatures. *)\n\n(** {1 Cookies} *)\n\n(** Cookies are used to pass information from a ppx processor to a further\n    invocation of itself, when called from the OCaml toplevel (or other tools\n    that support cookies). *)\n\nval set_cookie : string -> Parsetree.expression -> unit\nval get_cookie : string -> Parsetree.expression option\n"
  },
  {
    "path": "analysis/vendor/ml/ast_payload.ml",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\ntype t = Parsetree.payload\n\nlet is_single_string (x : t) =\n  match x with\n  (* TODO also need detect empty phrase case *)\n  | PStr\n      [\n        {\n          pstr_desc =\n            Pstr_eval\n              ({pexp_desc = Pexp_constant (Pconst_string (name, dec)); _}, _);\n          _;\n        };\n      ] ->\n    Some (name, dec)\n  | _ -> None\n\nlet is_single_string_as_ast (x : t) : Parsetree.expression option =\n  match x with\n  (*TODO also need detect empty phrase case *)\n  | PStr\n      [\n        {\n          pstr_desc =\n            Pstr_eval\n              (({pexp_desc = Pexp_constant (Pconst_string (_, _)); _} as e), _);\n          _;\n        };\n      ] ->\n    Some e\n  | _ -> None\n\nlet is_single_int (x : t) : int option =\n  match x with\n  | PStr\n      [\n        {\n          pstr_desc =\n            Pstr_eval\n              ({pexp_desc = Pexp_constant (Pconst_integer (name, char)); _}, _);\n          _;\n        };\n      ]\n    when match char with\n         | Some n when n = 'n' -> false\n         | _ -> true ->\n    Some (int_of_string name)\n  | _ -> None\n\nlet is_single_float (x : t) : string option =\n  match x with\n  | PStr\n      [\n        {\n          pstr_desc =\n            Pstr_eval\n              ({pexp_desc = Pexp_constant (Pconst_float (name, _)); _}, _);\n          _;\n        };\n      ] ->\n    Some name\n  | _ -> None\n\nlet is_single_bigint (x : t) : string option =\n  match x with\n  | PStr\n      [\n        {\n          pstr_desc =\n            Pstr_eval\n              ( {pexp_desc = Pexp_constant (Pconst_integer (name, Some 'n')); _},\n                _ );\n          _;\n        };\n      ] ->\n    Some name\n  | _ -> None\n\nlet is_single_bool (x : t) : bool option =\n  match x with\n  | PStr\n      [\n        {\n          pstr_desc =\n            Pstr_eval\n              ( {\n                  pexp_desc =\n                    Pexp_construct ({txt = Lident ((\"true\" | \"false\") as b)}, _);\n                  _;\n                },\n                _ );\n          _;\n        };\n      ] ->\n    Some (b = \"true\")\n  | _ -> None\n\nlet is_single_ident (x : t) =\n  match x with\n  | PStr [{pstr_desc = Pstr_eval ({pexp_desc = Pexp_ident lid}, _); _}] ->\n    Some lid.txt\n  | _ -> None\n\nlet raw_as_string_exp_exn ~(kind : Js_raw_info.raw_kind) ?is_function (x : t) :\n    Parsetree.expression option =\n  match x with\n  (* TODO also need detect empty phrase case *)\n  | PStr\n      [\n        {\n          pstr_desc =\n            Pstr_eval\n              ( ({\n                   pexp_desc = Pexp_constant (Pconst_string (str, deli));\n                   pexp_loc = loc;\n                 } as e),\n                _ );\n          _;\n        };\n      ] ->\n    Bs_flow_ast_utils.check_flow_errors ~loc\n      ~offset:(Bs_flow_ast_utils.flow_deli_offset deli)\n      (match kind with\n      | Raw_re | Raw_exp ->\n        let ((_loc, e) as prog), errors =\n          Parser_flow.parse_expression (Parser_env.init_env None str) false\n        in\n        (if kind = Raw_re then\n           match e with\n           | Literal {value = RegExp _} -> ()\n           | _ ->\n             Location.raise_errorf ~loc\n               \"Syntax error: a valid JS regex literal expected\");\n        (match is_function with\n        | Some is_function -> (\n          match Classify_function.classify_exp prog with\n          | Js_function {arity; _} -> is_function := Some arity\n          | _ -> ())\n        | None -> ());\n        errors\n      | Raw_program -> snd (Parser_flow.parse_program false None str));\n    Some {e with pexp_desc = Pexp_constant (Pconst_string (str, None))}\n  | _ -> None\n\nlet as_core_type loc (x : t) =\n  match x with\n  | PTyp x -> x\n  | _ -> Location.raise_errorf ~loc \"except a core type\"\n\nlet as_ident (x : t) =\n  match x with\n  | PStr [{pstr_desc = Pstr_eval ({pexp_desc = Pexp_ident ident}, _)}] ->\n    Some ident\n  | _ -> None\n\ntype lid = string Asttypes.loc\n\ntype label_expr = lid * Parsetree.expression\n\ntype action = lid * Parsetree.expression option\n(** None means punning is hit\n    {[\n      {x}\n    ]}\n    otherwise it comes with a payload\n    {[\n      {x = exp}\n    ]} *)\n\nlet unrecognized_config_record loc text =\n  Location.prerr_warning loc (Warnings.Bs_derive_warning text)\n\nlet ident_or_record_as_config loc (x : t) :\n    (string Location.loc * Parsetree.expression option) list =\n  match x with\n  | PStr\n      [\n        {\n          pstr_desc =\n            Pstr_eval\n              ( {pexp_desc = Pexp_record (label_exprs, with_obj); pexp_loc = loc},\n                _ );\n          _;\n        };\n      ] -> (\n    match with_obj with\n    | None ->\n      Ext_list.map label_exprs (fun u ->\n          match u with\n          | ( {txt = Lident name; loc},\n              {Parsetree.pexp_desc = Pexp_ident {txt = Lident name2}} )\n            when name2 = name ->\n            ({Asttypes.txt = name; loc}, None)\n          | {txt = Lident name; loc}, y -> ({Asttypes.txt = name; loc}, Some y)\n          | _ -> Location.raise_errorf ~loc \"Qualified label is not allowed\")\n    | Some _ ->\n      unrecognized_config_record loc \"`with` is not supported, discarding\";\n      [])\n  | PStr\n      [\n        {\n          pstr_desc =\n            Pstr_eval\n              ({pexp_desc = Pexp_ident {loc = lloc; txt = Lident txt}}, _);\n        };\n      ] ->\n    [({Asttypes.txt; loc = lloc}, None)]\n  | PStr [] -> []\n  | _ ->\n    unrecognized_config_record loc \"invalid attribute config-record, ignoring\";\n    []\n\nlet assert_strings loc (x : t) : string list =\n  let exception Not_str in\n  match x with\n  | PStr\n      [\n        {\n          pstr_desc = Pstr_eval ({pexp_desc = Pexp_tuple strs; _}, _);\n          pstr_loc = loc;\n          _;\n        };\n      ] -> (\n    try\n      Ext_list.map strs (fun e ->\n          match (e : Parsetree.expression) with\n          | {pexp_desc = Pexp_constant (Pconst_string (name, _)); _} -> name\n          | _ -> raise Not_str)\n    with Not_str -> Location.raise_errorf ~loc \"expect string tuple list\")\n  | PStr\n      [\n        {\n          pstr_desc =\n            Pstr_eval\n              ({pexp_desc = Pexp_constant (Pconst_string (name, _)); _}, _);\n          _;\n        };\n      ] ->\n    [name]\n  | PStr [] -> []\n  | PSig _ | PStr _ | PTyp _ | PPat _ ->\n    Location.raise_errorf ~loc \"expect string tuple list\"\n\nlet assert_bool_lit (e : Parsetree.expression) =\n  match e.pexp_desc with\n  | Pexp_construct ({txt = Lident \"true\"}, None) -> true\n  | Pexp_construct ({txt = Lident \"false\"}, None) -> false\n  | _ ->\n    Location.raise_errorf ~loc:e.pexp_loc\n      \"expect `true` or `false` in this field\"\n\nlet empty : t = Parsetree.PStr []\n\nlet table_dispatch table (action : action) =\n  match action with\n  | {txt = name; loc}, y -> (\n    match Map_string.find_exn table name with\n    | fn -> fn y\n    | exception _ -> Location.raise_errorf ~loc \"%s is not supported\" name)\n"
  },
  {
    "path": "analysis/vendor/ml/ast_payload.mli",
    "content": "(* Copyright (C) 2015-2016 Bloomberg Finance L.P.\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\n(** A utility module used when destructuring parsetree attributes, used for\n    compiling FFI attributes and built-in ppx *)\n\ntype t = Parsetree.payload\n\ntype lid = string Asttypes.loc\n\ntype label_expr = lid * Parsetree.expression\n\ntype action = lid * Parsetree.expression option\n\nval is_single_string : t -> (string * string option) option\n\nval is_single_string_as_ast : t -> Parsetree.expression option\n\nval is_single_int : t -> int option\n\nval is_single_float : t -> string option\n\nval is_single_bigint : t -> string option\n\nval is_single_bool : t -> bool option\n\nval is_single_ident : t -> Longident.t option\n\nval raw_as_string_exp_exn :\n  kind:Js_raw_info.raw_kind ->\n  ?is_function:int option ref ->\n  t ->\n  Parsetree.expression option\n(** Convert %raw into expression *)\n\nval as_core_type : Location.t -> t -> Parsetree.core_type\n\n(* val as_empty_structure :  t -> bool  *)\nval as_ident : t -> Longident.t Asttypes.loc option\n\n(* val raw_string_payload : Location.t -> string -> t  *)\nval assert_strings : Location.t -> t -> string list\n\n(** as a record or empty it will accept\n\n    {[\n      [@@@config]\n    ]}\n    or\n    {[\n      [@@@config no_export]\n    ]}\n    or\n    {[\n      [@@@config { property  .. } ]\n    ]}\n    Note that we only\n    {[\n      {flat_property}\n    ]}\n    below is not allowed\n    {[\n      {M.flat_property}\n    ]} *)\n\nval ident_or_record_as_config : Location.t -> t -> action list\n\nval assert_bool_lit : Parsetree.expression -> bool\n\nval empty : t\n\nval table_dispatch :\n  (Parsetree.expression option -> 'a) Map_string.t -> action -> 'a\n\nval unrecognized_config_record : Location.t -> string -> unit\n(** Report to the user, as a warning, that the bs-attribute parser is bailing\n    out. (This is to allow external ppx, like ppx_deriving, to pick up where the\n    builtin ppx leave off.) *)\n"
  },
  {
    "path": "analysis/vendor/ml/ast_uncurried.ml",
    "content": "(* Uncurried AST *)\n\nlet encode_arity_string arity = \"Has_arity\" ^ string_of_int arity\nlet decode_arity_string arity_s =\n  int_of_string\n    ((String.sub [@doesNotRaise]) arity_s 9 (String.length arity_s - 9))\n\nlet arity_type ~loc arity =\n  Ast_helper.Typ.variant ~loc\n    [Rtag ({txt = encode_arity_string arity; loc}, [], true, [])]\n    Closed None\n\nlet arity_from_type (typ : Parsetree.core_type) =\n  match typ.ptyp_desc with\n  | Ptyp_variant ([Rtag ({txt}, _, _, _)], _, _) -> decode_arity_string txt\n  | _ -> assert false\n\nlet uncurried_type ~loc ~arity t_arg =\n  let t_arity = arity_type ~loc arity in\n  Ast_helper.Typ.constr ~loc {txt = Lident \"function$\"; loc} [t_arg; t_arity]\n\nlet arity_to_attributes arity =\n  [\n    ( Location.mknoloc \"res.arity\",\n      Parsetree.PStr\n        [\n          Ast_helper.Str.eval\n            (Ast_helper.Exp.constant\n               (Pconst_integer (string_of_int arity, None)));\n        ] );\n  ]\n\nlet rec attributes_to_arity (attrs : Parsetree.attributes) =\n  match attrs with\n  | ( {txt = \"res.arity\"},\n      PStr\n        [\n          {\n            pstr_desc =\n              Pstr_eval\n                ({pexp_desc = Pexp_constant (Pconst_integer (arity, _))}, _);\n          };\n        ] )\n    :: _ ->\n    int_of_string arity\n  | _ :: rest -> attributes_to_arity rest\n  | _ -> assert false\n\nlet uncurried_fun ~loc ~arity fun_expr =\n  Ast_helper.Exp.construct ~loc\n    ~attrs:(arity_to_attributes arity)\n    (Location.mknoloc (Longident.Lident \"Function$\"))\n    (Some fun_expr)\n\nlet expr_is_uncurried_fun (expr : Parsetree.expression) =\n  match expr.pexp_desc with\n  | Pexp_construct ({txt = Lident \"Function$\"}, Some _) -> true\n  | _ -> false\n\nlet expr_extract_uncurried_fun (expr : Parsetree.expression) =\n  match expr.pexp_desc with\n  | Pexp_construct ({txt = Lident \"Function$\"}, Some e) -> e\n  | _ -> assert false\n\nlet core_type_is_uncurried_fun (typ : Parsetree.core_type) =\n  match typ.ptyp_desc with\n  | Ptyp_constr ({txt = Lident \"function$\"}, [{ptyp_desc = Ptyp_arrow _}; _]) ->\n    true\n  | _ -> false\n\nlet core_type_extract_uncurried_fun (typ : Parsetree.core_type) =\n  match typ.ptyp_desc with\n  | Ptyp_constr ({txt = Lident \"function$\"}, [t_arg; t_arity]) ->\n    (arity_from_type t_arity, t_arg)\n  | _ -> assert false\n\nlet type_is_uncurried_fun = Ast_uncurried_utils.type_is_uncurried_fun\n\nlet type_extract_uncurried_fun (typ : Types.type_expr) =\n  match typ.desc with\n  | Tconstr (Pident {name = \"function$\"}, [t_arg; _], _) -> t_arg\n  | _ -> assert false\n\n(* Typed AST *)\n\nlet arity_to_type arity =\n  let arity_s = encode_arity_string arity in\n  Ctype.newty\n    (Tvariant\n       {\n         row_fields = [(arity_s, Rpresent None)];\n         row_more = Ctype.newty Tnil;\n         row_bound = ();\n         row_closed = true;\n         row_fixed = false;\n         row_name = None;\n       })\n\nlet type_to_arity (t_arity : Types.type_expr) =\n  match (Ctype.repr t_arity).desc with\n  | Tvariant {row_fields = [(label, _)]} -> decode_arity_string label\n  | _ -> assert false\n\nlet make_uncurried_type ~env ~arity t =\n  let typ_arity = arity_to_type arity in\n  let lid : Longident.t = Lident \"function$\" in\n  let path = Env.lookup_type lid env in\n  Ctype.newconstr path [t; typ_arity]\n\nlet uncurried_type_get_arity ~env typ =\n  match (Ctype.expand_head env typ).desc with\n  | Tconstr (Pident {name = \"function$\"}, [_t; t_arity], _) ->\n    type_to_arity t_arity\n  | _ -> assert false\n\nlet uncurried_type_get_arity_opt ~env typ =\n  match (Ctype.expand_head env typ).desc with\n  | Tconstr (Pident {name = \"function$\"}, [_t; t_arity], _) ->\n    Some (type_to_arity t_arity)\n  | _ -> None\n"
  },
  {
    "path": "analysis/vendor/ml/ast_uncurried_utils.ml",
    "content": "let type_is_uncurried_fun (typ : Types.type_expr) =\n  match typ.desc with\n  | Tconstr (Pident {name = \"function$\"}, [{desc = Tarrow _}; _], _) -> true\n  | _ -> false\n"
  },
  {
    "path": "analysis/vendor/ml/ast_untagged_variants.ml",
    "content": "module Instance = struct\n  type t = Array | Blob | Date | File | Promise | RegExp\n  let to_string = function\n    | Array -> \"Array\"\n    | Blob -> \"Blob\"\n    | Date -> \"Date\"\n    | File -> \"File\"\n    | Promise -> \"Promise\"\n    | RegExp -> \"RegExp\"\nend\n\ntype untagged_error =\n  | OnlyOneUnknown of string\n  | AtMostOneObject\n  | AtMostOneInstance of Instance.t\n  | AtMostOneFunction\n  | AtMostOneString\n  | AtMostOneNumber\n  | AtMostOneBigint\n  | AtMostOneBoolean\n  | DuplicateLiteral of string\n  | ConstructorMoreThanOneArg of string\ntype error =\n  | InvalidVariantAsAnnotation\n  | Duplicated_bs_as\n  | InvalidVariantTagAnnotation\n  | InvalidUntaggedVariantDefinition of untagged_error\nexception Error of Location.t * error\n\nlet report_error ppf =\n  let open Format in\n  function\n  | InvalidVariantAsAnnotation ->\n    fprintf ppf\n      \"A variant case annotation @as(...) must be a string or integer, \\\n       boolean, null, undefined\"\n  | Duplicated_bs_as -> fprintf ppf \"duplicate @as \"\n  | InvalidVariantTagAnnotation ->\n    fprintf ppf \"A variant tag annotation @tag(...) must be a string\"\n  | InvalidUntaggedVariantDefinition untagged_variant ->\n    fprintf ppf \"This untagged variant definition is invalid: %s\"\n      (match untagged_variant with\n      | OnlyOneUnknown name ->\n        \"Case \" ^ name\n        ^ \" has a payload that is not of one of the recognized shapes (object, \\\n           array, etc). Then it must be the only case with payloads.\"\n      | AtMostOneObject -> \"At most one case can be an object type.\"\n      | AtMostOneInstance Array ->\n        \"At most one case can be an array or tuple type.\"\n      | AtMostOneInstance i ->\n        \"At most one case can be a \" ^ Instance.to_string i ^ \" type.\"\n      | AtMostOneFunction -> \"At most one case can be a function type.\"\n      | AtMostOneString -> \"At most one case can be a string type.\"\n      | AtMostOneBoolean -> \"At most one case can be a boolean type.\"\n      | AtMostOneNumber ->\n        \"At most one case can be a number type (int or float).\"\n      | AtMostOneBigint -> \"At most one case can be a bigint type.\"\n      | DuplicateLiteral s -> \"Duplicate literal \" ^ s ^ \".\"\n      | ConstructorMoreThanOneArg name ->\n        \"Constructor \" ^ name ^ \" has more than one argument.\")\n\n(* Type of the runtime representation of an untagged block (case with payoad) *)\ntype block_type =\n  | IntType\n  | StringType\n  | FloatType\n  | BigintType\n  | BooleanType\n  | InstanceType of Instance.t\n  | FunctionType\n  | ObjectType\n  | UnknownType\n\n(*\n  Type of the runtime representation of a tag.\n  Can be a literal (case with no payload), or a block (case with payload).\n  In the case of block it can be tagged or untagged.\n*)\ntype tag_type =\n  | String of string\n  | Int of int\n  | Float of string\n  | BigInt of string\n  | Bool of bool\n  | Null\n  | Undefined (* literal or tagged block *)\n  | Untagged of block_type (* untagged block *)\ntype tag = {name: string; tag_type: tag_type option}\ntype block = {tag: tag; tag_name: string option; block_type: block_type option}\ntype switch_names = {consts: tag array; blocks: block array}\n\nlet untagged = \"unboxed\"\n\nlet has_untagged (attrs : Parsetree.attributes) =\n  Ext_list.exists attrs (function {txt}, _ -> txt = untagged)\n\nlet process_untagged (attrs : Parsetree.attributes) =\n  let st = ref false in\n  Ext_list.iter attrs (fun ({txt}, _) ->\n      match txt with\n      | \"unboxed\" -> st := true\n      | _ -> ());\n  !st\n\nlet extract_concrete_typedecl :\n    (Env.t -> Types.type_expr -> Path.t * Path.t * Types.type_declaration) ref =\n  ref (Obj.magic ())\n\nlet expand_head : (Env.t -> Types.type_expr -> Types.type_expr) ref =\n  ref (Obj.magic ())\n\nlet process_tag_type (attrs : Parsetree.attributes) =\n  let st : tag_type option ref = ref None in\n  Ext_list.iter attrs (fun ({txt; loc}, payload) ->\n      match txt with\n      | \"as\" ->\n        if !st = None then (\n          (match Ast_payload.is_single_string payload with\n          | None -> ()\n          | Some (s, _dec) -> st := Some (String s));\n          (match Ast_payload.is_single_int payload with\n          | None -> ()\n          | Some i -> st := Some (Int i));\n          (match Ast_payload.is_single_float payload with\n          | None -> ()\n          | Some f -> st := Some (Float f));\n          (match Ast_payload.is_single_bigint payload with\n          | None -> ()\n          | Some i -> st := Some (BigInt i));\n          (match Ast_payload.is_single_bool payload with\n          | None -> ()\n          | Some b -> st := Some (Bool b));\n          (match Ast_payload.is_single_ident payload with\n          | None -> ()\n          | Some (Lident \"null\") -> st := Some Null\n          | Some (Lident \"undefined\") -> st := Some Undefined\n          | Some _ -> raise (Error (loc, InvalidVariantAsAnnotation)));\n          if !st = None then raise (Error (loc, InvalidVariantAsAnnotation)))\n        else raise (Error (loc, Duplicated_bs_as))\n      | _ -> ());\n  !st\n\nlet () =\n  Location.register_error_of_exn (function\n    | Error (loc, err) -> Some (Location.error_of_printer loc report_error err)\n    | _ -> None)\n\nlet report_constructor_more_than_one_arg ~loc ~name =\n  raise\n    (Error\n       (loc, InvalidUntaggedVariantDefinition (ConstructorMoreThanOneArg name)))\n\nlet type_is_builtin_object (t : Types.type_expr) =\n  match t.desc with\n  | Tconstr (Path.Pident ident, [_], _) when Ident.name ident = \"dict\" -> true\n  | Tconstr (path, _, _) ->\n    let name = Path.name path in\n    name = \"Js.Dict.t\" || name = \"Js_dict.t\"\n  | _ -> false\n\nlet type_to_instanceof_backed_obj (t : Types.type_expr) =\n  match t.desc with\n  | Tconstr (path, _, _) when Path.same path Predef.path_promise ->\n    Some Instance.Promise\n  | Tconstr (path, _, _) when Path.same path Predef.path_array -> Some Array\n  | Tconstr (path, _, _) -> (\n    match Path.name path with\n    | \"Js_date.t\" -> Some Date\n    | \"Js_re.t\" -> Some RegExp\n    | \"Js_file.t\" -> Some File\n    | \"Js_blob.t\" -> Some Blob\n    | _ -> None)\n  | _ -> None\n\nlet get_block_type_from_typ ~env (t : Types.type_expr) : block_type option =\n  let t = !expand_head env t in\n  match t with\n  | {desc = Tconstr (path, _, _)} when Path.same path Predef.path_string ->\n    Some StringType\n  | {desc = Tconstr (path, _, _)} when Path.same path Predef.path_int ->\n    Some IntType\n  | {desc = Tconstr (path, _, _)} when Path.same path Predef.path_float ->\n    Some FloatType\n  | {desc = Tconstr (path, _, _)} when Path.same path Predef.path_bigint ->\n    Some BigintType\n  | {desc = Tconstr (path, _, _)} when Path.same path Predef.path_bool ->\n    Some BooleanType\n  | {desc = Tconstr _} as t when Ast_uncurried_utils.type_is_uncurried_fun t ->\n    Some FunctionType\n  | {desc = Tarrow _} -> Some FunctionType\n  | {desc = Tconstr (path, _, _)} when Path.same path Predef.path_string ->\n    Some StringType\n  | {desc = Tconstr _} as t when type_is_builtin_object t -> Some ObjectType\n  | {desc = Tconstr _} as t\n    when type_to_instanceof_backed_obj t |> Option.is_some -> (\n    match type_to_instanceof_backed_obj t with\n    | None -> None\n    | Some instance_type -> Some (InstanceType instance_type))\n  | {desc = Ttuple _} -> Some (InstanceType Array)\n  | _ -> None\n\nlet get_block_type ~env (cstr : Types.constructor_declaration) :\n    block_type option =\n  match (process_untagged cstr.cd_attributes, cstr.cd_args) with\n  | false, _ -> None\n  | true, Cstr_tuple [t] when get_block_type_from_typ ~env t |> Option.is_some\n    ->\n    get_block_type_from_typ ~env t\n  | true, Cstr_tuple [ty] -> (\n    let default = Some UnknownType in\n    match !extract_concrete_typedecl env ty with\n    | _, _, {type_kind = Type_record (_, Record_unboxed _)} -> default\n    | _, _, {type_kind = Type_record (_, _)} -> Some ObjectType\n    | _ -> default\n    | exception _ -> default)\n  | true, Cstr_tuple (_ :: _ :: _) ->\n    (* C(_, _) with at least 2 args is an object *)\n    Some ObjectType\n  | true, Cstr_record _ ->\n    (* inline record is an object *)\n    Some ObjectType\n  | true, _ -> None (* TODO: add restrictions here *)\n\nlet process_tag_name (attrs : Parsetree.attributes) =\n  let st = ref None in\n  Ext_list.iter attrs (fun ({txt; loc}, payload) ->\n      match txt with\n      | \"tag\" ->\n        if !st = None then (\n          (match Ast_payload.is_single_string payload with\n          | None -> ()\n          | Some (s, _dec) -> st := Some s);\n          if !st = None then raise (Error (loc, InvalidVariantTagAnnotation)))\n        else raise (Error (loc, Duplicated_bs_as))\n      | _ -> ());\n  !st\n\nlet get_tag_name (cstr : Types.constructor_declaration) =\n  process_tag_name cstr.cd_attributes\n\nlet is_nullary_variant (x : Types.constructor_arguments) =\n  match x with\n  | Types.Cstr_tuple [] -> true\n  | _ -> false\n\nlet check_invariant ~is_untagged_def ~(consts : (Location.t * tag) list)\n    ~(blocks : (Location.t * block) list) =\n  let module StringSet = Set.Make (String) in\n  let string_literals = ref StringSet.empty in\n  let nonstring_literals = ref StringSet.empty in\n  let instance_types = Hashtbl.create 1 in\n  let function_types = ref 0 in\n  let object_types = ref 0 in\n  let string_types = ref 0 in\n  let number_types = ref 0 in\n  let bigint_types = ref 0 in\n  let boolean_types = ref 0 in\n  let unknown_types = ref 0 in\n  let add_string_literal ~loc s =\n    if StringSet.mem s !string_literals then\n      raise (Error (loc, InvalidUntaggedVariantDefinition (DuplicateLiteral s)));\n    string_literals := StringSet.add s !string_literals\n  in\n  let add_nonstring_literal ~loc s =\n    if StringSet.mem s !nonstring_literals then\n      raise (Error (loc, InvalidUntaggedVariantDefinition (DuplicateLiteral s)));\n    nonstring_literals := StringSet.add s !nonstring_literals\n  in\n  let invariant loc name =\n    if !unknown_types <> 0 && List.length blocks <> 1 then\n      raise\n        (Error (loc, InvalidUntaggedVariantDefinition (OnlyOneUnknown name)));\n    if !object_types > 1 then\n      raise (Error (loc, InvalidUntaggedVariantDefinition AtMostOneObject));\n    Hashtbl.iter\n      (fun i count ->\n        if count > 1 then\n          raise\n            (Error (loc, InvalidUntaggedVariantDefinition (AtMostOneInstance i))))\n      instance_types;\n    if !function_types > 1 then\n      raise (Error (loc, InvalidUntaggedVariantDefinition AtMostOneFunction));\n    if !string_types > 1 then\n      raise (Error (loc, InvalidUntaggedVariantDefinition AtMostOneString));\n    if !number_types > 1 then\n      raise (Error (loc, InvalidUntaggedVariantDefinition AtMostOneNumber));\n    if !bigint_types > 1 then\n      raise (Error (loc, InvalidUntaggedVariantDefinition AtMostOneBigint));\n    if !boolean_types > 1 then\n      raise (Error (loc, InvalidUntaggedVariantDefinition AtMostOneBoolean));\n    if\n      !boolean_types > 0\n      && (StringSet.mem \"true\" !nonstring_literals\n         || StringSet.mem \"false\" !nonstring_literals)\n    then raise (Error (loc, InvalidUntaggedVariantDefinition AtMostOneBoolean));\n    ()\n  in\n  Ext_list.rev_iter consts (fun (loc, literal) ->\n      match literal.tag_type with\n      | Some (String s) -> add_string_literal ~loc s\n      | Some (Int i) -> add_nonstring_literal ~loc (string_of_int i)\n      | Some (Float f) -> add_nonstring_literal ~loc f\n      | Some (BigInt i) -> add_nonstring_literal ~loc i\n      | Some Null -> add_nonstring_literal ~loc \"null\"\n      | Some Undefined -> add_nonstring_literal ~loc \"undefined\"\n      | Some (Bool b) ->\n        add_nonstring_literal ~loc (if b then \"true\" else \"false\")\n      | Some (Untagged _) -> ()\n      | None -> add_string_literal ~loc literal.name);\n  if is_untagged_def then\n    Ext_list.rev_iter blocks (fun (loc, block) ->\n        match block.block_type with\n        | Some block_type ->\n          (match block_type with\n          | UnknownType -> incr unknown_types\n          | ObjectType -> incr object_types\n          | InstanceType i ->\n            let count =\n              Hashtbl.find_opt instance_types i |> Option.value ~default:0\n            in\n            Hashtbl.replace instance_types i (count + 1)\n          | FunctionType -> incr function_types\n          | IntType | FloatType -> incr number_types\n          | BigintType -> incr bigint_types\n          | BooleanType -> incr boolean_types\n          | StringType -> incr string_types);\n          invariant loc block.tag.name\n        | None -> ())\n\nlet names_from_type_variant ?(is_untagged_def = false) ~env\n    (cstrs : Types.constructor_declaration list) =\n  let get_cstr_name (cstr : Types.constructor_declaration) =\n    ( cstr.cd_loc,\n      {\n        name = Ident.name cstr.cd_id;\n        tag_type = process_tag_type cstr.cd_attributes;\n      } )\n  in\n  let get_block (cstr : Types.constructor_declaration) : block =\n    let tag = snd (get_cstr_name cstr) in\n    {tag; tag_name = get_tag_name cstr; block_type = get_block_type ~env cstr}\n  in\n  let consts, blocks =\n    Ext_list.fold_left cstrs ([], []) (fun (consts, blocks) cstr ->\n        if is_nullary_variant cstr.cd_args then\n          (get_cstr_name cstr :: consts, blocks)\n        else (consts, (cstr.cd_loc, get_block cstr) :: blocks))\n  in\n  check_invariant ~is_untagged_def ~consts ~blocks;\n  let blocks = blocks |> List.map snd in\n  let consts = consts |> List.map snd in\n  let consts = Ext_array.reverse_of_list consts in\n  let blocks = Ext_array.reverse_of_list blocks in\n  Some {consts; blocks}\n\nlet check_well_formed ~env ~is_untagged_def\n    (cstrs : Types.constructor_declaration list) =\n  ignore (names_from_type_variant ~env ~is_untagged_def cstrs)\n\nlet has_undefined_literal attrs = process_tag_type attrs = Some Undefined\n\nlet block_is_object ~env attrs = get_block_type ~env attrs = Some ObjectType\n\nmodule DynamicChecks = struct\n  type op = EqEqEq | NotEqEq | Or | And\n  type 'a t =\n    | BinOp of op * 'a t * 'a t\n    | TagType of tag_type\n    | TypeOf of 'a t\n    | IsInstanceOf of Instance.t * 'a t\n    | Not of 'a t\n    | Expr of 'a\n\n  let bin op x y = BinOp (op, x, y)\n  let tag_type t = TagType t\n  let typeof x = TypeOf x\n  let str s = String s |> tag_type\n  let is_instance i x = IsInstanceOf (i, x)\n  let not x = Not x\n  let nil = Null |> tag_type\n  let undefined = Undefined |> tag_type\n  let object_ = Untagged ObjectType |> tag_type\n\n  let function_ = Untagged FunctionType |> tag_type\n  let string = Untagged StringType |> tag_type\n  let number = Untagged IntType |> tag_type\n\n  let bigint = Untagged BigintType |> tag_type\n\n  let boolean = Untagged BooleanType |> tag_type\n\n  let ( == ) x y = bin EqEqEq x y\n  let ( != ) x y = bin NotEqEq x y\n  let ( ||| ) x y = bin Or x y\n  let ( &&& ) x y = bin And x y\n\n  let rec is_a_literal_case ~(literal_cases : tag_type list) ~block_cases\n      (e : _ t) =\n    let literals_overlaps_with_string () =\n      Ext_list.exists literal_cases (function\n        | String _ -> true\n        | _ -> false)\n    in\n    let literals_overlaps_with_number () =\n      Ext_list.exists literal_cases (function\n        | Int _ | Float _ -> true\n        | _ -> false)\n    in\n    let literals_overlaps_with_bigint () =\n      Ext_list.exists literal_cases (function\n        | BigInt _ -> true\n        | _ -> false)\n    in\n    let literals_overlaps_with_boolean () =\n      Ext_list.exists literal_cases (function\n        | Bool _ -> true\n        | _ -> false)\n    in\n    let literals_overlaps_with_object () =\n      Ext_list.exists literal_cases (function\n        | Null -> true\n        | _ -> false)\n    in\n    let is_literal_case (t : tag_type) : _ t = e == tag_type t in\n    let is_not_block_case (c : block_type) : _ t =\n      match c with\n      | StringType\n        when literals_overlaps_with_string () = false (* No overlap *) ->\n        typeof e != string\n      | IntType when literals_overlaps_with_number () = false ->\n        typeof e != number\n      | FloatType when literals_overlaps_with_number () = false ->\n        typeof e != number\n      | BigintType when literals_overlaps_with_bigint () = false ->\n        typeof e != bigint\n      | BooleanType when literals_overlaps_with_boolean () = false ->\n        typeof e != boolean\n      | InstanceType i -> not (is_instance i e)\n      | FunctionType -> typeof e != function_\n      | ObjectType when literals_overlaps_with_object () = false ->\n        typeof e != object_\n      | ObjectType (* overlap *) -> e == nil ||| (typeof e != object_)\n      | StringType (* overlap *)\n      | IntType (* overlap *)\n      | FloatType (* overlap *)\n      | BigintType (* overlap *)\n      | BooleanType (* overlap *)\n      | UnknownType -> (\n        (* We don't know the type of unknown, so we need to express:\n           this is not one of the literals *)\n        match literal_cases with\n        | [] ->\n          (* this should not happen *)\n          assert false\n        | l1 :: others ->\n          let is_literal_1 = is_literal_case l1 in\n          Ext_list.fold_right others is_literal_1 (fun literal_n acc ->\n              is_literal_case literal_n ||| acc))\n    in\n    match block_cases with\n    | [c] -> is_not_block_case c\n    | c1 :: (_ :: _ as rest) ->\n      is_not_block_case c1\n      &&& is_a_literal_case ~literal_cases ~block_cases:rest e\n    | [] -> assert false\n\n  let is_int_tag ?(has_null_undefined_other = (false, false, false)) (e : _ t) :\n      _ t =\n    let has_null, has_undefined, has_other = has_null_undefined_other in\n    if has_null && has_undefined = false && has_other = false then\n      (* null *)\n      bin EqEqEq e nil\n    else if has_null && has_undefined && has_other = false then\n      (* null + undefined *)\n      e == nil ||| e == undefined\n    else if has_null = false && has_undefined && has_other = false then\n      (* undefined *)\n      e == undefined\n    else if has_null then\n      (* (null + undefined + other) || (null + other) *)\n      e == nil ||| typeof e != object_\n    else (* (undefiled + other) || other *)\n      typeof e != object_\n\n  let add_runtime_type_check ~tag_type ~(block_cases : block_type list) x y =\n    let instances =\n      Ext_list.filter_map block_cases (function\n        | InstanceType i -> Some i\n        | _ -> None)\n    in\n    match tag_type with\n    | Untagged\n        ( IntType | StringType | FloatType | BigintType | BooleanType\n        | FunctionType ) ->\n      typeof y == x\n    | Untagged ObjectType ->\n      if instances <> [] then\n        let not_one_of_the_instances =\n          Ext_list.fold_right instances\n            (typeof y == x)\n            (fun i x -> x &&& not (is_instance i y))\n        in\n        not_one_of_the_instances\n      else typeof y == x\n    | Untagged (InstanceType i) -> is_instance i y\n    | Untagged UnknownType ->\n      (* This should not happen because unknown must be the only non-literal case *)\n      assert false\n    | Bool _ | Float _ | Int _ | BigInt _ | String _ | Null | Undefined -> x\nend\n"
  },
  {
    "path": "analysis/vendor/ml/asttypes.ml",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(** Auxiliary AST types used by parsetree and typedtree. *)\n\ntype constant =\n  | Const_int of int\n  | Const_char of int\n  | Const_string of string * string option\n  | Const_float of string\n  | Const_int32 of int32\n  | Const_int64 of int64\n  | Const_bigint of bool * string\n\ntype rec_flag = Nonrecursive | Recursive\n\ntype direction_flag = Upto | Downto\n\n(* Order matters, used in polymorphic comparison *)\ntype private_flag = Private | Public\n\ntype mutable_flag = Immutable | Mutable\n\ntype virtual_flag = Virtual | Concrete\n\ntype override_flag = Override | Fresh\n\ntype closed_flag = Closed | Open\n\ntype label = string\n\ntype arg_label =\n  | Nolabel\n  | Labelled of string (*  label:T -> ... *)\n  | Optional of string (* ?label:T -> ... *)\n\ntype 'a loc = 'a Location.loc = {txt: 'a; loc: Location.t}\n\ntype variance = Covariant | Contravariant | Invariant\n\nlet same_arg_label (x : arg_label) y =\n  match x with\n  | Nolabel -> y = Nolabel\n  | Labelled s -> (\n    match y with\n    | Labelled s0 -> s = s0\n    | _ -> false)\n  | Optional s -> (\n    match y with\n    | Optional s0 -> s = s0\n    | _ -> false)\n"
  },
  {
    "path": "analysis/vendor/ml/bigint_utils.ml",
    "content": "let is_neg s = String.length s > 0 && s.[0] = '-'\nlet is_pos s = String.length s > 0 && s.[0] = '+'\n\nlet to_string sign s = (if sign then \"\" else \"-\") ^ s\n\nlet remove_leading_sign str : bool * string =\n  let len = String.length str in\n  if len = 0 then (false, str)\n  else if is_neg str || is_pos str then\n    (not (is_neg str), String.sub str 1 (len - 1))\n  else (true, str)\n\n(*\n   Removes leading zeros from the string only if the first non-zero character\n   encountered is a digit. Unlike int and float, bigint cannot be of_string, so\n   This function removes only leading 0s. Instead, values like 00x1 are not converted\n   and are intended to be syntax errors.\n\n   000n -> 0n\n   001n -> 1n\n   01_000_000n -> 1000000n\n   -00100n -> -100n\n\n   The following values are syntax errors\n\n   00o1n -> 00o1n\n   00x1_000_000n -> 00x1000000n\n*)\nlet remove_leading_zeros str =\n  let aux str =\n    let len = String.length str in\n    if len = 0 then \"\"\n    else\n      let is_digit c = c >= '0' && c <= '9' in\n      let idx = ref 0 in\n      while !idx < len && str.[!idx] = '0' do\n        incr idx\n      done;\n      if !idx >= len then \"0\"\n        (* If the string contains only '0's, return '0'. *)\n      else if is_digit str.[!idx] then String.sub str !idx (len - !idx)\n        (* Remove leading zeros and return the rest of the string. *)\n      else str\n  in\n  (* Replace the delimiters '_' inside number *)\n  let str = String.concat \"\" (String.split_on_char '_' str) in\n  (* Check if negative *)\n  let starts_with_minus = str <> \"\" && str.[0] = '-' in\n  let str =\n    if is_neg str || is_pos str then String.sub str 1 (String.length str - 1)\n    else str\n  in\n  let processed_str = aux str in\n  if starts_with_minus then \"-\" ^ processed_str else processed_str\n\nlet parse_bigint s =\n  let sign, i = remove_leading_sign s in\n  (sign, remove_leading_zeros i)\n\nlet is_valid s =\n  let len = String.length s in\n  if len = 0 then false\n  else\n    let is_digit c = (c >= '0' && c <= '9') || c = '_' in\n    let first_char = s.[0] in\n    if first_char <> '-' && first_char <> '+' && not (is_digit first_char) then\n      false\n    else\n      let rec check idx =\n        if idx >= len then true\n        else\n          let c = s.[idx] in\n          if is_digit c then check (idx + 1) else false\n      in\n      check 1\n\nlet compare (p0, s0) (p1, s1) =\n  match (p0, p1) with\n  | false, true -> -1 (* If only s1 is positive, s0 is smaller. *)\n  | true, false -> 1 (* If only s0 is positive, s0 is larger. *)\n  | _ ->\n    (* If both numbers are either negative or positive, compare their lengths. *)\n    let len0, len1 = (String.length s0, String.length s1) in\n    if len0 = len1 then\n      if p0 then String.compare s0 s1\n      else\n        String.compare s1\n          s0 (* If lengths are equal, compare the strings directly. *)\n    else if len0 > len1 then\n      if p0 then 1\n      else -1 (* A longer s0 means it's larger unless it's negative. *)\n    else if\n      (* len0 < len1 *)\n      p0\n    then -1\n    else 1 (* A longer s1 means s0 is smaller unless s1 is negative. *)\n"
  },
  {
    "path": "analysis/vendor/ml/bigint_utils.mli",
    "content": "val is_neg : string -> bool\nval is_pos : string -> bool\nval to_string : bool -> string -> string\nval remove_leading_sign : string -> bool * string\nval remove_leading_zeros : string -> string\nval parse_bigint : string -> bool * string\nval is_valid : string -> bool\nval compare : bool * string -> bool * string -> int\n"
  },
  {
    "path": "analysis/vendor/ml/bs_flow_ast_utils.ml",
    "content": "(* Copyright (C) 2020 - Hongbo Zhang, Authors of ReScript \n *\n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n *\n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\nlet offset_pos ({pos_lnum; pos_bol; pos_cnum} as loc : Lexing.position)\n    ({line; column} : Loc.position) first_line_offset : Lexing.position =\n  if line = 1 then {loc with pos_cnum = pos_cnum + column + first_line_offset}\n  else {loc with pos_lnum = pos_lnum + line - 1; pos_cnum = pos_bol + column}\n\nlet flow_deli_offset deli =\n  match deli with\n  | None -> 1 (* length of '\"'*)\n  | Some deli -> String.length deli + 2\n(* length of \"{|\"*)\n\n(* Here the loc is  the payload loc *)\nlet check_flow_errors ~(loc : Location.t) ~offset\n    (errors : (Loc.t * Parse_error.t) list) : unit =\n  match errors with\n  | [] -> ()\n  | ({start; _end}, first_error) :: _ ->\n    let loc_start = loc.loc_start in\n    Location.prerr_warning\n      {\n        loc with\n        loc_start = offset_pos loc_start start offset;\n        loc_end = offset_pos loc_start _end offset;\n      }\n      (Bs_ffi_warning (Parse_error.PP.error first_error))\n"
  },
  {
    "path": "analysis/vendor/ml/bs_flow_ast_utils.mli",
    "content": "(* Copyright (C) 2020 - Authors of ReScript \n *\n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n *\n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\nval flow_deli_offset : string option -> int\n\nval check_flow_errors :\n  loc:Location.t -> offset:int -> (Loc.t * Parse_error.t) list -> unit\n"
  },
  {
    "path": "analysis/vendor/ml/btype.ml",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*  Xavier Leroy and Jerome Vouillon, projet Cristal, INRIA Rocquencourt  *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(* Basic operations on core types *)\n\nopen Misc\nopen Asttypes\nopen Types\n\n(**** Sets, maps and hashtables of types ****)\n\nmodule TypeSet = Set.Make (TypeOps)\nmodule TypeMap = Map.Make (TypeOps)\nmodule TypeHash = Hashtbl.Make (TypeOps)\n\n(**** Forward declarations ****)\n\nlet print_raw =\n  ref (fun _ -> assert false : Format.formatter -> type_expr -> unit)\n\n(**** Type level management ****)\n\nlet generic_level = 100000000\n\n(* Used to mark a type during a traversal. *)\nlet lowest_level = 0\nlet pivot_level = (2 * lowest_level) - 1\n(* pivot_level - lowest_level < lowest_level *)\n\n(**** Some type creators ****)\n\nlet new_id = ref (-1)\n\nlet newty2 level desc =\n  incr new_id;\n  {desc; level; id = !new_id}\nlet newgenty desc = newty2 generic_level desc\nlet newgenvar ?name () = newgenty (Tvar name)\n(*\nlet newmarkedvar level =\n  incr new_id; { desc = Tvar; level = pivot_level - level; id = !new_id }\nlet newmarkedgenvar () =\n  incr new_id;\n  { desc = Tvar; level = pivot_level - generic_level; id = !new_id }\n*)\n\n(**** Check some types ****)\n\nlet is_Tvar = function\n  | {desc = Tvar _} -> true\n  | _ -> false\nlet is_Tunivar = function\n  | {desc = Tunivar _} -> true\n  | _ -> false\nlet is_Tconstr = function\n  | {desc = Tconstr _} -> true\n  | _ -> false\n\nlet dummy_method = \"*dummy method*\"\nlet default_mty = function\n  | Some mty -> mty\n  | None -> Mty_signature []\n\n(**** Definitions for backtracking ****)\n\ntype change =\n  | Ctype of type_expr * type_desc\n  | Ccompress of type_expr * type_desc * type_desc\n  | Clevel of type_expr * int\n  | Cname of\n      (Path.t * type_expr list) option ref * (Path.t * type_expr list) option\n  | Crow of row_field option ref * row_field option\n  | Ckind of field_kind option ref * field_kind option\n  | Ccommu of commutable ref * commutable\n  | Cuniv of type_expr option ref * type_expr option\n  | Ctypeset of TypeSet.t ref * TypeSet.t\n\ntype changes = Change of change * changes ref | Unchanged | Invalid\n\nlet trail = Weak.create 1\n\nlet log_change ch =\n  match Weak.get trail 0 with\n  | None -> ()\n  | Some r ->\n    let r' = ref Unchanged in\n    r := Change (ch, r');\n    Weak.set trail 0 (Some r')\n\n(**** Representative of a type ****)\n\nlet rec field_kind_repr = function\n  | Fvar {contents = Some kind} -> field_kind_repr kind\n  | kind -> kind\n\nlet rec repr_link compress t d = function\n  | {desc = Tlink t' as d'} -> repr_link true t d' t'\n  | {desc = Tfield (_, k, _, t') as d'} when field_kind_repr k = Fabsent ->\n    repr_link true t d' t'\n  | t' ->\n    if compress then (\n      log_change (Ccompress (t, t.desc, d));\n      t.desc <- d);\n    t'\n\nlet repr t =\n  match t.desc with\n  | Tlink t' as d -> repr_link false t d t'\n  | Tfield (_, k, _, t') as d when field_kind_repr k = Fabsent ->\n    repr_link false t d t'\n  | _ -> t\n\nlet rec commu_repr = function\n  | Clink r when !r <> Cunknown -> commu_repr !r\n  | c -> c\n\nlet rec row_field_repr_aux tl = function\n  | Reither (_, tl', _, {contents = Some fi}) ->\n    row_field_repr_aux (tl @ tl') fi\n  | Reither (c, tl', m, r) -> Reither (c, tl @ tl', m, r)\n  | Rpresent (Some _) when tl <> [] -> Rpresent (Some (List.hd tl))\n  | fi -> fi\n\nlet row_field_repr fi = row_field_repr_aux [] fi\n\nlet rec rev_concat l ll =\n  match ll with\n  | [] -> l\n  | l' :: ll -> rev_concat (l' @ l) ll\n\nlet rec row_repr_aux ll row =\n  match (repr row.row_more).desc with\n  | Tvariant row' ->\n    let f = row.row_fields in\n    row_repr_aux (if f = [] then ll else f :: ll) row'\n  | _ ->\n    if ll = [] then row\n    else {row with row_fields = rev_concat row.row_fields ll}\n\nlet row_repr row = row_repr_aux [] row\n\nlet rec row_field tag row =\n  let rec find = function\n    | (tag', f) :: fields ->\n      if tag = tag' then row_field_repr f else find fields\n    | [] -> (\n      match repr row.row_more with\n      | {desc = Tvariant row'} -> row_field tag row'\n      | _ -> Rabsent)\n  in\n  find row.row_fields\n\nlet rec row_more row =\n  match repr row.row_more with\n  | {desc = Tvariant row'} -> row_more row'\n  | ty -> ty\n\nlet row_fixed row =\n  let row = row_repr row in\n  row.row_fixed\n  ||\n  match (repr row.row_more).desc with\n  | Tvar _ | Tnil -> false\n  | Tunivar _ | Tconstr _ -> true\n  | _ -> assert false\n\nlet static_row row =\n  let row = row_repr row in\n  row.row_closed\n  && List.for_all\n       (fun (_, f) ->\n         match row_field_repr f with\n         | Reither _ -> false\n         | _ -> true)\n       row.row_fields\n\nlet hash_variant s =\n  let accu = ref 0 in\n  for i = 0 to String.length s - 1 do\n    accu := (223 * !accu) + Char.code s.[i]\n  done;\n  (* reduce to 31 bits *)\n  accu := !accu land ((1 lsl 31) - 1);\n  (* make it signed for 64 bits architectures *)\n  if !accu > 0x3FFFFFFF then !accu - (1 lsl 31) else !accu\n\nlet proxy ty =\n  let ty0 = repr ty in\n  match ty0.desc with\n  | Tvariant row when not (static_row row) -> row_more row\n  | Tobject (ty, _) ->\n    let rec proxy_obj ty =\n      match ty.desc with\n      | Tfield (_, _, _, ty) | Tlink ty -> proxy_obj ty\n      | Tvar _ | Tunivar _ | Tconstr _ -> ty\n      | Tnil -> ty0\n      | _ -> assert false\n    in\n    proxy_obj ty\n  | _ -> ty0\n\n(**** Utilities for fixed row private types ****)\n\nlet row_of_type t =\n  match (repr t).desc with\n  | Tobject (t, _) ->\n    let rec get_row t =\n      let t = repr t in\n      match t.desc with\n      | Tfield (_, _, _, t) -> get_row t\n      | _ -> t\n    in\n    get_row t\n  | Tvariant row -> row_more row\n  | _ -> t\n\nlet has_constr_row t = (not (is_Tconstr t)) && is_Tconstr (row_of_type t)\n\nlet is_row_name s =\n  let l = String.length s in\n  if l < 4 then false else String.sub s (l - 4) 4 = \"#row\"\n\nlet is_constr_row ~allow_ident t =\n  match t.desc with\n  | Tconstr (Path.Pident id, _, _) when allow_ident ->\n    is_row_name (Ident.name id)\n  | Tconstr (Path.Pdot (_, s, _), _, _) -> is_row_name s\n  | _ -> false\n\n(**********************************)\n(*  Utilities for type traversal  *)\n(**********************************)\n\nlet rec iter_row f row =\n  List.iter\n    (fun (_, fi) ->\n      match row_field_repr fi with\n      | Rpresent (Some ty) -> f ty\n      | Reither (_, tl, _, _) -> List.iter f tl\n      | _ -> ())\n    row.row_fields;\n  match (repr row.row_more).desc with\n  | Tvariant row -> iter_row f row\n  | Tvar _ | Tunivar _ | Tsubst _ | Tconstr _ | Tnil ->\n    Misc.may (fun (_, l) -> List.iter f l) row.row_name\n  | _ -> assert false\n\nlet iter_type_expr f ty =\n  match ty.desc with\n  | Tvar _ -> ()\n  | Tarrow (_, ty1, ty2, _) ->\n    f ty1;\n    f ty2\n  | Ttuple l -> List.iter f l\n  | Tconstr (_, l, _) -> List.iter f l\n  | Tobject (ty, {contents = Some (_, p)}) ->\n    f ty;\n    List.iter f p\n  | Tobject (ty, _) -> f ty\n  | Tvariant row ->\n    iter_row f row;\n    f (row_more row)\n  | Tfield (_, _, ty1, ty2) ->\n    f ty1;\n    f ty2\n  | Tnil -> ()\n  | Tlink ty -> f ty\n  | Tsubst ty -> f ty\n  | Tunivar _ -> ()\n  | Tpoly (ty, tyl) ->\n    f ty;\n    List.iter f tyl\n  | Tpackage (_, _, l) -> List.iter f l\n\nlet rec iter_abbrev f = function\n  | Mnil -> ()\n  | Mcons (_, _, ty, ty', rem) ->\n    f ty;\n    f ty';\n    iter_abbrev f rem\n  | Mlink rem -> iter_abbrev f !rem\n\ntype type_iterators = {\n  it_signature: type_iterators -> signature -> unit;\n  it_signature_item: type_iterators -> signature_item -> unit;\n  it_value_description: type_iterators -> value_description -> unit;\n  it_type_declaration: type_iterators -> type_declaration -> unit;\n  it_extension_constructor: type_iterators -> extension_constructor -> unit;\n  it_module_declaration: type_iterators -> module_declaration -> unit;\n  it_modtype_declaration: type_iterators -> modtype_declaration -> unit;\n  it_class_declaration: type_iterators -> class_declaration -> unit;\n  it_class_type_declaration: type_iterators -> class_type_declaration -> unit;\n  it_module_type: type_iterators -> module_type -> unit;\n  it_class_type: type_iterators -> class_type -> unit;\n  it_type_kind: type_iterators -> type_kind -> unit;\n  it_do_type_expr: type_iterators -> type_expr -> unit;\n  it_type_expr: type_iterators -> type_expr -> unit;\n  it_path: Path.t -> unit;\n}\n\nlet iter_type_expr_cstr_args f = function\n  | Cstr_tuple tl -> List.iter f tl\n  | Cstr_record lbls -> List.iter (fun d -> f d.ld_type) lbls\n\nlet map_type_expr_cstr_args f = function\n  | Cstr_tuple tl -> Cstr_tuple (List.map f tl)\n  | Cstr_record lbls ->\n    Cstr_record (List.map (fun d -> {d with ld_type = f d.ld_type}) lbls)\n\nlet iter_type_expr_kind f = function\n  | Type_abstract -> ()\n  | Type_variant cstrs ->\n    List.iter\n      (fun cd ->\n        iter_type_expr_cstr_args f cd.cd_args;\n        Misc.may f cd.cd_res)\n      cstrs\n  | Type_record (lbls, _) -> List.iter (fun d -> f d.ld_type) lbls\n  | Type_open -> ()\n\nlet type_iterators =\n  let it_signature it = List.iter (it.it_signature_item it)\n  and it_signature_item it = function\n    | Sig_value (_, vd) -> it.it_value_description it vd\n    | Sig_type (_, td, _) -> it.it_type_declaration it td\n    | Sig_typext (_, td, _) -> it.it_extension_constructor it td\n    | Sig_module (_, md, _) -> it.it_module_declaration it md\n    | Sig_modtype (_, mtd) -> it.it_modtype_declaration it mtd\n    | Sig_class () -> assert false\n    | Sig_class_type (_, ctd, _) -> it.it_class_type_declaration it ctd\n  and it_value_description it vd = it.it_type_expr it vd.val_type\n  and it_type_declaration it td =\n    List.iter (it.it_type_expr it) td.type_params;\n    may (it.it_type_expr it) td.type_manifest;\n    it.it_type_kind it td.type_kind\n  and it_extension_constructor it td =\n    it.it_path td.ext_type_path;\n    List.iter (it.it_type_expr it) td.ext_type_params;\n    iter_type_expr_cstr_args (it.it_type_expr it) td.ext_args;\n    may (it.it_type_expr it) td.ext_ret_type\n  and it_module_declaration it md = it.it_module_type it md.md_type\n  and it_modtype_declaration it mtd = may (it.it_module_type it) mtd.mtd_type\n  and it_class_declaration it cd =\n    List.iter (it.it_type_expr it) cd.cty_params;\n    it.it_class_type it cd.cty_type;\n    may (it.it_type_expr it) cd.cty_new;\n    it.it_path cd.cty_path\n  and it_class_type_declaration it ctd =\n    List.iter (it.it_type_expr it) ctd.clty_params;\n    it.it_class_type it ctd.clty_type;\n    it.it_path ctd.clty_path\n  and it_module_type it = function\n    | Mty_ident p | Mty_alias (_, p) -> it.it_path p\n    | Mty_signature sg -> it.it_signature it sg\n    | Mty_functor (_, mto, mt) ->\n      may (it.it_module_type it) mto;\n      it.it_module_type it mt\n  and it_class_type it = function\n    | Cty_constr (p, tyl, cty) ->\n      it.it_path p;\n      List.iter (it.it_type_expr it) tyl;\n      it.it_class_type it cty\n    | Cty_signature cs ->\n      it.it_type_expr it cs.csig_self;\n      Vars.iter (fun _ (_, _, ty) -> it.it_type_expr it ty) cs.csig_vars;\n      List.iter\n        (fun (p, tl) ->\n          it.it_path p;\n          List.iter (it.it_type_expr it) tl)\n        cs.csig_inher\n    | Cty_arrow (_, ty, cty) ->\n      it.it_type_expr it ty;\n      it.it_class_type it cty\n  and it_type_kind it kind = iter_type_expr_kind (it.it_type_expr it) kind\n  and it_do_type_expr it ty =\n    iter_type_expr (it.it_type_expr it) ty;\n    match ty.desc with\n    | Tconstr (p, _, _)\n    | Tobject (_, {contents = Some (p, _)})\n    | Tpackage (p, _, _) ->\n      it.it_path p\n    | Tvariant row -> may (fun (p, _) -> it.it_path p) (row_repr row).row_name\n    | _ -> ()\n  and it_path _p = () in\n  {\n    it_path;\n    it_type_expr = it_do_type_expr;\n    it_do_type_expr;\n    it_type_kind;\n    it_class_type;\n    it_module_type;\n    it_signature;\n    it_class_type_declaration;\n    it_class_declaration;\n    it_modtype_declaration;\n    it_module_declaration;\n    it_extension_constructor;\n    it_type_declaration;\n    it_value_description;\n    it_signature_item;\n  }\n\nlet copy_row f fixed row keep more =\n  let fields =\n    List.map\n      (fun (l, fi) ->\n        ( l,\n          match row_field_repr fi with\n          | Rpresent (Some ty) -> Rpresent (Some (f ty))\n          | Reither (c, tl, m, e) ->\n            let e = if keep then e else ref None in\n            let m = if row.row_fixed then fixed else m in\n            let tl = List.map f tl in\n            Reither (c, tl, m, e)\n          | _ -> fi ))\n      row.row_fields\n  in\n  let name =\n    match row.row_name with\n    | None -> None\n    | Some (path, tl) -> Some (path, List.map f tl)\n  in\n  {\n    row_fields = fields;\n    row_more = more;\n    row_bound = ();\n    row_fixed = row.row_fixed && fixed;\n    row_closed = row.row_closed;\n    row_name = name;\n  }\n\nlet rec copy_kind = function\n  | Fvar {contents = Some k} -> copy_kind k\n  | Fvar _ -> Fvar (ref None)\n  | Fpresent -> Fpresent\n  | Fabsent -> assert false\n\nlet copy_commu c = if commu_repr c = Cok then Cok else Clink (ref Cunknown)\n\n(* Since univars may be used as row variables, we need to do some\n   encoding during substitution *)\nlet rec norm_univar ty =\n  match ty.desc with\n  | Tunivar _ | Tsubst _ -> ty\n  | Tlink ty -> norm_univar ty\n  | Ttuple (ty :: _) -> norm_univar ty\n  | _ -> assert false\n\nlet rec copy_type_desc ?(keep_names = false) f = function\n  | Tvar _ as ty -> if keep_names then ty else Tvar None\n  | Tarrow (p, ty1, ty2, c) -> Tarrow (p, f ty1, f ty2, copy_commu c)\n  | Ttuple l -> Ttuple (List.map f l)\n  | Tconstr (p, l, _) -> Tconstr (p, List.map f l, ref Mnil)\n  | Tobject (ty, {contents = Some (p, tl)}) ->\n    Tobject (f ty, ref (Some (p, List.map f tl)))\n  | Tobject (ty, _) -> Tobject (f ty, ref None)\n  | Tvariant _ -> assert false (* too ambiguous *)\n  | Tfield (p, k, ty1, ty2) ->\n    (* the kind is kept shared *)\n    Tfield (p, field_kind_repr k, f ty1, f ty2)\n  | Tnil -> Tnil\n  | Tlink ty -> copy_type_desc f ty.desc\n  | Tsubst _ -> assert false\n  | Tunivar _ as ty -> ty (* always keep the name *)\n  | Tpoly (ty, tyl) ->\n    let tyl = List.map (fun x -> norm_univar (f x)) tyl in\n    Tpoly (f ty, tyl)\n  | Tpackage (p, n, l) -> Tpackage (p, n, List.map f l)\n\n(* Utilities for copying *)\n\nlet saved_desc = ref []\n(* Saved association of generic nodes with their description. *)\n\nlet save_desc ty desc = saved_desc := (ty, desc) :: !saved_desc\n\nlet saved_kinds = ref [] (* duplicated kind variables *)\nlet new_kinds = ref [] (* new kind variables *)\nlet dup_kind r =\n  (match !r with\n  | None -> ()\n  | Some _ -> assert false);\n  if not (List.memq r !new_kinds) then (\n    saved_kinds := r :: !saved_kinds;\n    let r' = ref None in\n    new_kinds := r' :: !new_kinds;\n    r := Some (Fvar r'))\n\n(* Restored type descriptions. *)\nlet cleanup_types () =\n  List.iter (fun (ty, desc) -> ty.desc <- desc) !saved_desc;\n  List.iter (fun r -> r := None) !saved_kinds;\n  saved_desc := [];\n  saved_kinds := [];\n  new_kinds := []\n\n(* Mark a type. *)\nlet rec mark_type ty =\n  let ty = repr ty in\n  if ty.level >= lowest_level then (\n    ty.level <- pivot_level - ty.level;\n    iter_type_expr mark_type ty)\n\nlet mark_type_node ty =\n  let ty = repr ty in\n  if ty.level >= lowest_level then ty.level <- pivot_level - ty.level\n\nlet mark_type_params ty = iter_type_expr mark_type ty\n\nlet type_iterators =\n  let it_type_expr it ty =\n    let ty = repr ty in\n    if ty.level >= lowest_level then (\n      mark_type_node ty;\n      it.it_do_type_expr it ty)\n  in\n  {type_iterators with it_type_expr}\n\n(* Remove marks from a type. *)\nlet rec unmark_type ty =\n  let ty = repr ty in\n  if ty.level < lowest_level then (\n    ty.level <- pivot_level - ty.level;\n    iter_type_expr unmark_type ty)\n\nlet unmark_iterators =\n  let it_type_expr _it ty = unmark_type ty in\n  {type_iterators with it_type_expr}\n\nlet unmark_type_decl decl =\n  unmark_iterators.it_type_declaration unmark_iterators decl\n\nlet unmark_extension_constructor ext =\n  List.iter unmark_type ext.ext_type_params;\n  iter_type_expr_cstr_args unmark_type ext.ext_args;\n  Misc.may unmark_type ext.ext_ret_type\n\nlet unmark_class_signature sign =\n  unmark_type sign.csig_self;\n  Vars.iter (fun _l (_m, _v, t) -> unmark_type t) sign.csig_vars\n\nlet unmark_class_type cty = unmark_iterators.it_class_type unmark_iterators cty\n\n(*******************************************)\n(*  Memorization of abbreviation expansion *)\n(*******************************************)\n\n(* Search whether the expansion has been memorized. *)\n\nlet lte_public p1 p2 =\n  (* Private <= Public *)\n  match (p1, p2) with\n  | Private, _ | _, Public -> true\n  | Public, Private -> false\n\nlet rec find_expans priv p1 = function\n  | Mnil -> None\n  | Mcons (priv', p2, _ty0, ty, _) when lte_public priv priv' && Path.same p1 p2\n    ->\n    Some ty\n  | Mcons (_, _, _, _, rem) -> find_expans priv p1 rem\n  | Mlink {contents = rem} -> find_expans priv p1 rem\n\n(* debug: check for cycles in abbreviation. only works with -principal\n   let rec check_expans visited ty =\n     let ty = repr ty in\n     assert (not (List.memq ty visited));\n     match ty.desc with\n       Tconstr (path, args, abbrev) ->\n         begin match find_expans path !abbrev with\n           Some ty' -> check_expans (ty :: visited) ty'\n         | None -> ()\n         end\n     | _ -> ()\n*)\n\nlet memo = ref []\n(* Contains the list of saved abbreviation expansions. *)\n\nlet cleanup_abbrev () =\n  (* Remove all memorized abbreviation expansions. *)\n  List.iter (fun abbr -> abbr := Mnil) !memo;\n  memo := []\n\nlet memorize_abbrev mem priv path v v' =\n  (* Memorize the expansion of an abbreviation. *)\n  mem := Mcons (priv, path, v, v', !mem);\n  (* check_expans [] v; *)\n  memo := mem :: !memo\n\nlet rec forget_abbrev_rec mem path =\n  match mem with\n  | Mnil -> assert false\n  | Mcons (_, path', _, _, rem) when Path.same path path' -> rem\n  | Mcons (priv, path', v, v', rem) ->\n    Mcons (priv, path', v, v', forget_abbrev_rec rem path)\n  | Mlink mem' ->\n    mem' := forget_abbrev_rec !mem' path;\n    raise Exit\n\nlet forget_abbrev mem path =\n  try mem := forget_abbrev_rec !mem path with Exit -> ()\n\n(* debug: check for invalid abbreviations\n   let rec check_abbrev_rec = function\n       Mnil -> true\n     | Mcons (_, ty1, ty2, rem) ->\n         repr ty1 != repr ty2\n     | Mlink mem' ->\n         check_abbrev_rec !mem'\n\n   let check_memorized_abbrevs () =\n     List.for_all (fun mem -> check_abbrev_rec !mem) !memo\n*)\n\n(**********************************)\n(*  Utilities for labels          *)\n(**********************************)\n\nlet is_optional = function\n  | Optional _ -> true\n  | _ -> false\n\nlet label_name = function\n  | Nolabel -> \"\"\n  | Labelled s | Optional s -> s\n\nlet prefixed_label_name = function\n  | Nolabel -> \"\"\n  | Labelled s -> \"~\" ^ s\n  | Optional s -> \"?\" ^ s\n\ntype sargs = (Asttypes.arg_label * Parsetree.expression) list\n\nlet rec extract_label_aux hd l = function\n  | [] -> None\n  | ((l', t) as p) :: ls ->\n    if label_name l' = l then Some (l', t, List.rev_append hd ls)\n    else extract_label_aux (p :: hd) l ls\n\nlet extract_label l (ls : sargs) :\n    (arg_label * Parsetree.expression * sargs) option =\n  extract_label_aux [] l ls\n\nlet rec label_assoc x (args : sargs) =\n  match args with\n  | [] -> false\n  | (a, _) :: l -> Asttypes.same_arg_label a x || label_assoc x l\n\n(**********************************)\n(*  Utilities for backtracking    *)\n(**********************************)\n\nlet undo_change = function\n  | Ctype (ty, desc) -> ty.desc <- desc\n  | Ccompress (ty, desc, _) -> ty.desc <- desc\n  | Clevel (ty, level) -> ty.level <- level\n  | Cname (r, v) -> r := v\n  | Crow (r, v) -> r := v\n  | Ckind (r, v) -> r := v\n  | Ccommu (r, v) -> r := v\n  | Cuniv (r, v) -> r := v\n  | Ctypeset (r, v) -> r := v\n\ntype snapshot = changes ref * int\nlet last_snapshot = ref 0\n\nlet log_type ty =\n  if ty.id <= !last_snapshot then log_change (Ctype (ty, ty.desc))\nlet link_type ty ty' =\n  log_type ty;\n  let desc = ty.desc in\n  ty.desc <- Tlink ty';\n  (* Name is a user-supplied name for this unification variable (obtained\n   * through a type annotation for instance). *)\n  match (desc, ty'.desc) with\n  | Tvar name, Tvar name' -> (\n    match (name, name') with\n    | Some _, None ->\n      log_type ty';\n      ty'.desc <- Tvar name\n    | None, Some _ -> ()\n    | Some _, Some _ ->\n      if ty.level < ty'.level then (\n        log_type ty';\n        ty'.desc <- Tvar name)\n    | None, None -> ())\n  | _ -> ()\n\n(* ; assert (check_memorized_abbrevs ()) *)\n(*  ; check_expans [] ty' *)\nlet set_level ty level =\n  if ty.id <= !last_snapshot then log_change (Clevel (ty, ty.level));\n  ty.level <- level\nlet set_univar rty ty =\n  log_change (Cuniv (rty, !rty));\n  rty := Some ty\nlet set_name nm v =\n  log_change (Cname (nm, !nm));\n  nm := v\nlet set_row_field e v =\n  log_change (Crow (e, !e));\n  e := Some v\nlet set_kind rk k =\n  log_change (Ckind (rk, !rk));\n  rk := Some k\nlet set_commu rc c =\n  log_change (Ccommu (rc, !rc));\n  rc := c\nlet set_typeset rs s =\n  log_change (Ctypeset (rs, !rs));\n  rs := s\n\nlet snapshot () =\n  let old = !last_snapshot in\n  last_snapshot := !new_id;\n  match Weak.get trail 0 with\n  | Some r -> (r, old)\n  | None ->\n    let r = ref Unchanged in\n    Weak.set trail 0 (Some r);\n    (r, old)\n\nlet rec rev_log accu = function\n  | Unchanged -> accu\n  | Invalid -> assert false\n  | Change (ch, next) ->\n    let d = !next in\n    next := Invalid;\n    rev_log (ch :: accu) d\n\nlet backtrack (changes, old) =\n  match !changes with\n  | Unchanged -> last_snapshot := old\n  | Invalid -> failwith \"Btype.backtrack\"\n  | Change _ as change ->\n    cleanup_abbrev ();\n    let backlog = rev_log [] change in\n    List.iter undo_change backlog;\n    changes := Unchanged;\n    last_snapshot := old;\n    Weak.set trail 0 (Some changes)\n\nlet rec rev_compress_log log r =\n  match !r with\n  | Unchanged | Invalid -> log\n  | Change (Ccompress _, next) -> rev_compress_log (r :: log) next\n  | Change (_, next) -> rev_compress_log log next\n\nlet undo_compress (changes, _old) =\n  match !changes with\n  | Unchanged | Invalid -> ()\n  | Change _ ->\n    let log = rev_compress_log [] changes in\n    List.iter\n      (fun r ->\n        match !r with\n        | Change (Ccompress (ty, desc, d), next) when ty.desc == d ->\n          ty.desc <- desc;\n          r := !next\n        | _ -> ())\n      log\n"
  },
  {
    "path": "analysis/vendor/ml/btype.mli",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(* Basic operations on core types *)\n\nopen Asttypes\nopen Types\n\n(**** Sets, maps and hashtables of types ****)\n\nmodule TypeSet : Set.S with type elt = type_expr\nmodule TypeMap : Map.S with type key = type_expr\nmodule TypeHash : Hashtbl.S with type key = type_expr\n\n(**** Levels ****)\n\nval generic_level : int\n\nval newty2 : int -> type_desc -> type_expr\n(* Create a type *)\n\nval newgenty : type_desc -> type_expr\n(* Create a generic type *)\n\nval newgenvar : ?name:string -> unit -> type_expr\n(* Return a fresh generic variable *)\n\n(* Use Tsubst instead\n   val newmarkedvar: int -> type_expr\n           (* Return a fresh marked variable *)\n   val newmarkedgenvar: unit -> type_expr\n           (* Return a fresh marked generic variable *)\n*)\n\n(**** Types ****)\n\nval is_Tvar : type_expr -> bool\nval is_Tunivar : type_expr -> bool\nval is_Tconstr : type_expr -> bool\nval dummy_method : label\nval default_mty : module_type option -> module_type\n\nval repr : type_expr -> type_expr\n(* Return the canonical representative of a type. *)\n\nval field_kind_repr : field_kind -> field_kind\n(* Return the canonical representative of an object field\n   kind. *)\n\nval commu_repr : commutable -> commutable\n(* Return the canonical representative of a commutation lock *)\n\n(**** polymorphic variants ****)\n\nval row_repr : row_desc -> row_desc\n(* Return the canonical representative of a row description *)\n\nval row_field_repr : row_field -> row_field\nval row_field : label -> row_desc -> row_field\n(* Return the canonical representative of a row field *)\n\nval row_more : row_desc -> type_expr\n(* Return the extension variable of the row *)\n\nval row_fixed : row_desc -> bool\n(* Return whether the row should be treated as fixed or not *)\n\nval static_row : row_desc -> bool\n(* Return whether the row is static or not *)\n\nval hash_variant : label -> int\n(* Hash function for variant tags *)\n\nval proxy : type_expr -> type_expr\n(* Return the proxy representative of the type: either itself\n   or a row variable *)\n\n(**** Utilities for private abbreviations with fixed rows ****)\nval row_of_type : type_expr -> type_expr\nval has_constr_row : type_expr -> bool\nval is_row_name : string -> bool\nval is_constr_row : allow_ident:bool -> type_expr -> bool\n\n(**** Utilities for type traversal ****)\n\nval iter_type_expr : (type_expr -> unit) -> type_expr -> unit\n(* Iteration on types *)\n\nval iter_row : (type_expr -> unit) -> row_desc -> unit\n(* Iteration on types in a row *)\n\nval iter_abbrev : (type_expr -> unit) -> abbrev_memo -> unit\n(* Iteration on types in an abbreviation list *)\n\ntype type_iterators = {\n  it_signature: type_iterators -> signature -> unit;\n  it_signature_item: type_iterators -> signature_item -> unit;\n  it_value_description: type_iterators -> value_description -> unit;\n  it_type_declaration: type_iterators -> type_declaration -> unit;\n  it_extension_constructor: type_iterators -> extension_constructor -> unit;\n  it_module_declaration: type_iterators -> module_declaration -> unit;\n  it_modtype_declaration: type_iterators -> modtype_declaration -> unit;\n  it_class_declaration: type_iterators -> class_declaration -> unit;\n  it_class_type_declaration: type_iterators -> class_type_declaration -> unit;\n  it_module_type: type_iterators -> module_type -> unit;\n  it_class_type: type_iterators -> class_type -> unit;\n  it_type_kind: type_iterators -> type_kind -> unit;\n  it_do_type_expr: type_iterators -> type_expr -> unit;\n  it_type_expr: type_iterators -> type_expr -> unit;\n  it_path: Path.t -> unit;\n}\nval type_iterators : type_iterators\n(* Iteration on arbitrary type information.\n   [it_type_expr] calls [mark_type_node] to avoid loops. *)\n\nval unmark_iterators : type_iterators\n(* Unmark any structure containing types. See [unmark_type] below. *)\n\nval copy_type_desc :\n  ?keep_names:bool -> (type_expr -> type_expr) -> type_desc -> type_desc\n(* Copy on types *)\n\nval copy_row :\n  (type_expr -> type_expr) -> bool -> row_desc -> bool -> type_expr -> row_desc\nval copy_kind : field_kind -> field_kind\n\nval save_desc : type_expr -> type_desc -> unit\n(* Save a type description *)\n\nval dup_kind : field_kind option ref -> unit\n(* Save a None field_kind, and make it point to a fresh Fvar *)\n\nval cleanup_types : unit -> unit\n(* Restore type descriptions *)\n\nval lowest_level : int\n(* Marked type: ty.level < lowest_level *)\n\nval pivot_level : int\n(* Type marking: ty.level <- pivot_level - ty.level *)\n\nval mark_type : type_expr -> unit\n(* Mark a type *)\n\nval mark_type_node : type_expr -> unit\n(* Mark a type node (but not its sons) *)\n\nval mark_type_params : type_expr -> unit\n(* Mark the sons of a type node *)\n\nval unmark_type : type_expr -> unit\nval unmark_type_decl : type_declaration -> unit\nval unmark_extension_constructor : extension_constructor -> unit\nval unmark_class_type : class_type -> unit\nval unmark_class_signature : class_signature -> unit\n(* Remove marks from a type *)\n\n(**** Memorization of abbreviation expansion ****)\n\nval find_expans : private_flag -> Path.t -> abbrev_memo -> type_expr option\n(* Look up a memorized abbreviation *)\n\nval cleanup_abbrev : unit -> unit\n(* Flush the cache of abbreviation expansions.\n   When some types are saved (using [output_value]), this\n   function MUST be called just before. *)\n\nval memorize_abbrev :\n  abbrev_memo ref -> private_flag -> Path.t -> type_expr -> type_expr -> unit\n(* Add an expansion in the cache *)\n\nval forget_abbrev : abbrev_memo ref -> Path.t -> unit\n(* Remove an abbreviation from the cache *)\n\n(**** Utilities for labels ****)\n\nval is_optional : arg_label -> bool\nval label_name : arg_label -> label\n\n(* Returns the label name with first character '?' or '~' as appropriate. *)\nval prefixed_label_name : arg_label -> label\n\ntype sargs = (arg_label * Parsetree.expression) list\n\nval extract_label :\n  label -> sargs -> (arg_label * Parsetree.expression * sargs) option\n(* actual label, value, new list with the same order *)\n\nval label_assoc : arg_label -> sargs -> bool\n(**** Utilities for backtracking ****)\n\ntype snapshot\n(* A snapshot for backtracking *)\n\nval snapshot : unit -> snapshot\n(* Make a snapshot for later backtracking. Costs nothing *)\n\nval backtrack : snapshot -> unit\n(* Backtrack to a given snapshot. Only possible if you have\n   not already backtracked to a previous snapshot.\n   Calls [cleanup_abbrev] internally *)\n\nval undo_compress : snapshot -> unit\n(* Backtrack only path compression. Only meaningful if you have\n   not already backtracked to a previous snapshot.\n   Does not call [cleanup_abbrev] *)\n\n(* Functions to use when modifying a type (only Ctype?) *)\nval link_type : type_expr -> type_expr -> unit\n(* Set the desc field of [t1] to [Tlink t2], logging the old\n   value if there is an active snapshot *)\n\nval set_level : type_expr -> int -> unit\nval set_name :\n  (Path.t * type_expr list) option ref ->\n  (Path.t * type_expr list) option ->\n  unit\nval set_row_field : row_field option ref -> row_field -> unit\nval set_univar : type_expr option ref -> type_expr -> unit\nval set_kind : field_kind option ref -> field_kind -> unit\nval set_commu : commutable ref -> commutable -> unit\nval set_typeset : TypeSet.t ref -> TypeSet.t -> unit\n(* Set references, logging the old value *)\n\nval log_type : type_expr -> unit\n(* Log the old value of a type, before modifying it by hand *)\n\n(**** Forward declarations ****)\nval print_raw : (Format.formatter -> type_expr -> unit) ref\n\nval iter_type_expr_kind : (type_expr -> unit) -> type_kind -> unit\n\nval iter_type_expr_cstr_args :\n  (type_expr -> unit) -> constructor_arguments -> unit\nval map_type_expr_cstr_args :\n  (type_expr -> type_expr) -> constructor_arguments -> constructor_arguments\n"
  },
  {
    "path": "analysis/vendor/ml/builtin_attributes.ml",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*                         Alain Frisch, LexiFi                           *)\n(*                                                                        *)\n(*   Copyright 2012 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\nopen Asttypes\nopen Parsetree\n\nlet string_of_cst = function\n  | Pconst_string (s, _) -> Some s\n  | _ -> None\n\nlet string_of_payload = function\n  | PStr [{pstr_desc = Pstr_eval ({pexp_desc = Pexp_constant c}, _)}] ->\n    string_of_cst c\n  | _ -> None\n\nlet string_of_opt_payload p =\n  match string_of_payload p with\n  | Some s -> s\n  | None -> \"\"\n\nlet rec error_of_extension ext =\n  match ext with\n  | {txt = (\"ocaml.error\" | \"error\") as txt; loc}, p -> (\n    let rec sub_from inner =\n      match inner with\n      | {pstr_desc = Pstr_extension (ext, _)} :: rest ->\n        error_of_extension ext :: sub_from rest\n      | _ :: rest ->\n        Location.errorf ~loc \"Invalid syntax for sub-error of extension '%s'.\"\n          txt\n        :: sub_from rest\n      | [] -> []\n    in\n    match p with\n    | PStr [] -> raise Location.Already_displayed_error\n    | PStr\n        ({\n           pstr_desc =\n             Pstr_eval ({pexp_desc = Pexp_constant (Pconst_string (msg, _))}, _);\n         }\n        :: {\n             pstr_desc =\n               Pstr_eval\n                 ( {pexp_desc = Pexp_constant (Pconst_string (if_highlight, _))},\n                   _ );\n           }\n        :: inner) ->\n      Location.error ~loc ~if_highlight ~sub:(sub_from inner) msg\n    | PStr\n        ({\n           pstr_desc =\n             Pstr_eval ({pexp_desc = Pexp_constant (Pconst_string (msg, _))}, _);\n         }\n        :: inner) ->\n      Location.error ~loc ~sub:(sub_from inner) msg\n    | _ -> Location.errorf ~loc \"Invalid syntax for extension '%s'.\" txt)\n  | {txt; loc}, _ -> Location.errorf ~loc \"Uninterpreted extension '%s'.\" txt\n\nlet cat s1 s2 =\n  if s2 = \"\" then s1\n  else (* 2 spaces indentation for the next line *)\n    s1 ^ \"\\n  \" ^ s2\n\nlet rec deprecated_of_attrs = function\n  | [] -> None\n  | ({txt = \"ocaml.deprecated\" | \"deprecated\"; _}, p) :: _ ->\n    Some (string_of_opt_payload p)\n  | _ :: tl -> deprecated_of_attrs tl\n\nlet check_deprecated loc attrs s =\n  match deprecated_of_attrs attrs with\n  | None -> ()\n  | Some txt -> Location.deprecated loc (cat s txt)\n\nlet check_deprecated_inclusion ~def ~use loc attrs1 attrs2 s =\n  match (deprecated_of_attrs attrs1, deprecated_of_attrs attrs2) with\n  | None, _ | Some _, Some _ -> ()\n  | Some txt, None -> Location.deprecated ~def ~use loc (cat s txt)\n\nlet rec deprecated_mutable_of_attrs = function\n  | [] -> None\n  | ({txt = \"ocaml.deprecated_mutable\" | \"deprecated_mutable\"; _}, p) :: _ ->\n    Some (string_of_opt_payload p)\n  | _ :: tl -> deprecated_mutable_of_attrs tl\n\nlet check_deprecated_mutable loc attrs s =\n  match deprecated_mutable_of_attrs attrs with\n  | None -> ()\n  | Some txt ->\n    Location.deprecated loc (Printf.sprintf \"mutating field %s\" (cat s txt))\n\nlet check_deprecated_mutable_inclusion ~def ~use loc attrs1 attrs2 s =\n  match\n    (deprecated_mutable_of_attrs attrs1, deprecated_mutable_of_attrs attrs2)\n  with\n  | None, _ | Some _, Some _ -> ()\n  | Some txt, None ->\n    Location.deprecated ~def ~use loc\n      (Printf.sprintf \"mutating field %s\" (cat s txt))\n\nlet check_bs_attributes_inclusion = ref (fun _attrs1 _attrs2 _s -> None)\n\nlet check_duplicated_labels : (_ -> _ option) ref = ref (fun _lbls -> None)\n\nlet rec deprecated_of_sig = function\n  | {psig_desc = Psig_attribute a} :: tl -> (\n    match deprecated_of_attrs [a] with\n    | None -> deprecated_of_sig tl\n    | Some _ as r -> r)\n  | _ -> None\n\nlet rec deprecated_of_str = function\n  | {pstr_desc = Pstr_attribute a} :: tl -> (\n    match deprecated_of_attrs [a] with\n    | None -> deprecated_of_str tl\n    | Some _ as r -> r)\n  | _ -> None\n\nlet warning_attribute ?(ppwarning = true) =\n  let process loc txt errflag payload =\n    match string_of_payload payload with\n    | Some s -> (\n      try Warnings.parse_options errflag s\n      with Arg.Bad _ ->\n        Location.prerr_warning loc\n          (Warnings.Attribute_payload (txt, \"Ill-formed list of warnings\")))\n    | None ->\n      Location.prerr_warning loc\n        (Warnings.Attribute_payload (txt, \"A single string literal is expected\"))\n  in\n  function\n  | {txt = (\"ocaml.warning\" | \"warning\") as txt; loc}, payload ->\n    process loc txt false payload\n  | {txt = (\"ocaml.warnerror\" | \"warnerror\") as txt; loc}, payload ->\n    process loc txt true payload\n  | ( {txt = \"ocaml.ppwarning\" | \"ppwarning\"},\n      PStr\n        [\n          {\n            pstr_desc =\n              Pstr_eval ({pexp_desc = Pexp_constant (Pconst_string (s, _))}, _);\n            pstr_loc;\n          };\n        ] )\n    when ppwarning ->\n    Location.prerr_warning pstr_loc (Warnings.Preprocessor s)\n  | _ -> ()\n\nlet warning_scope ?ppwarning attrs f =\n  let prev = Warnings.backup () in\n  try\n    List.iter (warning_attribute ?ppwarning) (List.rev attrs);\n    let ret = f () in\n    Warnings.restore prev;\n    ret\n  with exn ->\n    Warnings.restore prev;\n    raise exn\n\nlet warn_on_literal_pattern =\n  List.exists (function\n    | {txt = \"ocaml.warn_on_literal_pattern\" | \"warn_on_literal_pattern\"; _}, _\n      ->\n      true\n    | _ -> false)\n\nlet explicit_arity =\n  List.exists (function\n    | {txt = \"ocaml.explicit_arity\" | \"explicit_arity\"; _}, _ -> true\n    | _ -> false)\n\nlet immediate =\n  List.exists (function\n    | {txt = \"ocaml.immediate\" | \"immediate\"; _}, _ -> true\n    | _ -> false)\n\n(* The \"ocaml.boxed (default)\" and \"ocaml.unboxed (default)\"\n   attributes cannot be input by the user, they are added by the\n   compiler when applying the default setting. This is done to record\n   in the .cmi the default used by the compiler when compiling the\n   source file because the default can change between compiler\n   invocations. *)\n\nlet check l (x, _) = List.mem x.txt l\n\nlet has_unboxed attr = List.exists (check [\"ocaml.unboxed\"; \"unboxed\"]) attr\n\nlet has_boxed attr = List.exists (check [\"ocaml.boxed\"; \"boxed\"]) attr\n"
  },
  {
    "path": "analysis/vendor/ml/builtin_attributes.mli",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*                         Alain Frisch, LexiFi                           *)\n(*                                                                        *)\n(*   Copyright 2012 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(* Support for some of the builtin attributes:\n\n   ocaml.deprecated\n   ocaml.error\n   ocaml.ppwarning\n   ocaml.warning\n   ocaml.warnerror\n   ocaml.explicit_arity (for camlp4/camlp5)\n   ocaml.warn_on_literal_pattern\n   ocaml.deprecated_mutable\n   ocaml.immediate\n   ocaml.boxed / ocaml.unboxed\n*)\n\nval check_deprecated : Location.t -> Parsetree.attributes -> string -> unit\nval check_deprecated_inclusion :\n  def:Location.t ->\n  use:Location.t ->\n  Location.t ->\n  Parsetree.attributes ->\n  Parsetree.attributes ->\n  string ->\n  unit\nval deprecated_of_attrs : Parsetree.attributes -> string option\nval deprecated_of_sig : Parsetree.signature -> string option\nval deprecated_of_str : Parsetree.structure -> string option\n\nval check_deprecated_mutable :\n  Location.t -> Parsetree.attributes -> string -> unit\nval check_deprecated_mutable_inclusion :\n  def:Location.t ->\n  use:Location.t ->\n  Location.t ->\n  Parsetree.attributes ->\n  Parsetree.attributes ->\n  string ->\n  unit\n\nval check_bs_attributes_inclusion :\n  (Parsetree.attributes ->\n  Parsetree.attributes ->\n  string ->\n  (string * string) option)\n  ref\n\nval check_duplicated_labels :\n  (Parsetree.label_declaration list -> string Asttypes.loc option) ref\nval error_of_extension : Parsetree.extension -> Location.error\n\nval warning_attribute : ?ppwarning:bool -> Parsetree.attribute -> unit\n(** Apply warning settings from the specified attribute.\n    \"ocaml.warning\"/\"ocaml.warnerror\" (and variants without the prefix) are\n    processed and other attributes are ignored.\n\n    Also implement ocaml.ppwarning (unless ~ppwarning:false is passed). *)\n\nval warning_scope :\n  ?ppwarning:bool -> Parsetree.attributes -> (unit -> 'a) -> 'a\n(** Execute a function in a new scope for warning settings. This means that the\n    effect of any call to [warning_attribute] during the execution of this\n    function will be discarded after execution.\n\n    The function also takes a list of attributes which are processed with\n    [warning_attribute] in the fresh scope before the function is executed. *)\n\nval warn_on_literal_pattern : Parsetree.attributes -> bool\nval explicit_arity : Parsetree.attributes -> bool\n\nval immediate : Parsetree.attributes -> bool\n\nval has_unboxed : Parsetree.attributes -> bool\nval has_boxed : Parsetree.attributes -> bool\n"
  },
  {
    "path": "analysis/vendor/ml/ccomp.ml",
    "content": "let command cmdline =\n  if !Clflags.verbose then (\n    prerr_string \"+ \";\n    prerr_string cmdline;\n    prerr_newline ());\n  Sys.command cmdline\n"
  },
  {
    "path": "analysis/vendor/ml/ccomp.mli",
    "content": "val command : string -> int\n"
  },
  {
    "path": "analysis/vendor/ml/classify_function.ml",
    "content": "(* Copyright (C) 2020- Hongbo Zhang, Authors of ReScript\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\nlet rec is_obj_literal (x : _ Flow_ast.Expression.t) : bool =\n  match snd x with\n  | Identifier (_, {name = \"undefined\"}) | Literal _ -> true\n  | Unary {operator = Minus; argument} -> is_obj_literal argument\n  | Object {properties} -> Ext_list.for_all properties is_literal_kv\n  | Array {elements} ->\n    Ext_list.for_all elements (fun x ->\n        match x with\n        | Expression x -> is_obj_literal x\n        | _ -> false)\n  | _ -> false\n\nand is_literal_kv (x : _ Flow_ast.Expression.Object.property) =\n  match x with\n  | Property (_, Init {value}) -> is_obj_literal value\n  | _ -> false\n\nlet classify_exp (prog : _ Flow_ast.Expression.t) : Js_raw_info.exp =\n  match prog with\n  | ( _,\n      Function\n        {\n          id = _;\n          params = _, {params};\n          async = false;\n          generator = false;\n          predicate = None;\n        } ) ->\n    Js_function {arity = List.length params; arrow = false}\n  | ( _,\n      ArrowFunction\n        {\n          id = None;\n          params = _, {params};\n          async = false;\n          generator = false;\n          predicate = None;\n        } ) ->\n    Js_function {arity = List.length params; arrow = true}\n  | _, Literal {comments} ->\n    let comment =\n      match comments with\n      | None -> None\n      | Some {leading = [(_, {kind = Block; text = comment})]} ->\n        Some (\"/*\" ^ comment ^ \"*/\")\n      | Some {leading = [(_, {kind = Line; text = comment})]} ->\n        Some (\"//\" ^ comment)\n      | Some _ -> None\n    in\n    Js_literal {comment}\n  | _, Identifier (_, {name = \"undefined\"}) -> Js_literal {comment = None}\n  | _, _ ->\n    if is_obj_literal prog then Js_literal {comment = None} else Js_exp_unknown\n  | exception _ -> Js_exp_unknown\n\n(** It seems we do the parse twice\n    - in parsing\n    - in code generation *)\nlet classify ?(check : (Location.t * int) option) (prog : string) :\n    Js_raw_info.exp =\n  let prog, errors =\n    Parser_flow.parse_expression (Parser_env.init_env None prog) false\n  in\n  match (check, errors) with\n  | Some (loc, offset), _ :: _ ->\n    Bs_flow_ast_utils.check_flow_errors ~loc ~offset errors;\n    Js_exp_unknown\n  | Some _, [] | None, [] -> classify_exp prog\n  | None, _ :: _ -> Js_exp_unknown\n\nlet classify_stmt (prog : string) : Js_raw_info.stmt =\n  let result = Parser_flow.parse_program false None prog in\n  match fst result with\n  | _loc, {statements = []} -> Js_stmt_comment\n  | _ -> Js_stmt_unknown\n(* we can also analayze throw\n   x.x pure access\n*)\n"
  },
  {
    "path": "analysis/vendor/ml/classify_function.mli",
    "content": "(* Copyright (C) 2020- Authors of ReScript\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\nval classify : ?check:Location.t * int -> string -> Js_raw_info.exp\n\nval classify_exp : (Loc.t, Loc.t) Flow_ast.Expression.t -> Js_raw_info.exp\n\nval classify_stmt : string -> Js_raw_info.stmt\n"
  },
  {
    "path": "analysis/vendor/ml/clflags.ml",
    "content": "let output_name = ref (None : string option) (* -o *)\n\nand include_dirs = ref ([] : string list) (* -I *)\n\nand debug = ref false (* -g *)\n\nand fast = ref false (* -unsafe *)\n\nand nopervasives = ref false (* -nopervasives *)\n\nand preprocessor = ref (None : string option) (* -pp *)\n\nand all_ppx = ref ([] : string list)\n\n(* -ppx *)\nlet annotations = ref false (* -annot *)\nlet binary_annotations = ref false (* -annot *)\n\nand noassert = ref false (* -noassert *)\n\nand verbose = ref false (* -verbose *)\n\nand open_modules = ref [] (* -open *)\n\nand real_paths = ref true (* -short-paths *)\n\nand applicative_functors = ref true (* -no-app-funct *)\n\nand error_size = ref 500 (* -error-size *)\n\nand transparent_modules = ref false (* -trans-mod *)\nlet dump_source = ref false (* -dsource *)\nlet dump_parsetree = ref false (* -dparsetree *)\n\nand dump_typedtree = ref false (* -dtypedtree *)\n\nand dump_rawlambda = ref false (* -drawlambda *)\n\nand dump_lambda = ref false (* -dlambda *)\n\nand only_parse = ref false (* -only-parse *)\n\nand ignore_parse_errors = ref false (* -ignore-parse-errors *)\n\nlet dont_write_files = ref false (* set to true under ocamldoc *)\n\nlet reset_dump_state () =\n  dump_source := false;\n  dump_parsetree := false;\n  dump_typedtree := false;\n  dump_rawlambda := false\n\nlet keep_docs = ref false (* -keep-docs *)\nlet keep_locs = ref true (* -keep-locs *)\n\nlet parse_color_setting = function\n  | \"auto\" -> Some Misc.Color.Auto\n  | \"always\" -> Some Misc.Color.Always\n  | \"never\" -> Some Misc.Color.Never\n  | _ -> None\nlet color = ref None\n\n(* -color *)\n\nlet unboxed_types = ref false\n\ntype mli_status = Mli_exists | Mli_non_exists\nlet assume_no_mli = ref Mli_non_exists\nlet dont_record_crc_unit : string option ref = ref None\nlet bs_gentype = ref false\nlet no_assert_false = ref false\nlet dump_location = ref true\n"
  },
  {
    "path": "analysis/vendor/ml/clflags.mli",
    "content": "val output_name : string option ref\nval include_dirs : string list ref\n\nval debug : bool ref\nval fast : bool ref\n\nval nopervasives : bool ref\nval open_modules : string list ref\nval preprocessor : string option ref\nval all_ppx : string list ref\nval annotations : bool ref\nval binary_annotations : bool ref\nval noassert : bool ref\nval verbose : bool ref\nval real_paths : bool ref\nval applicative_functors : bool ref\nval error_size : int ref\nval transparent_modules : bool ref\nval dump_source : bool ref\nval dump_parsetree : bool ref\nval dump_typedtree : bool ref\nval dump_rawlambda : bool ref\nval dump_lambda : bool ref\nval dont_write_files : bool ref\nval keep_docs : bool ref\nval keep_locs : bool ref\nval only_parse : bool ref\nval ignore_parse_errors : bool ref\n\nval parse_color_setting : string -> Misc.Color.setting option\nval color : Misc.Color.setting option ref\n\nval unboxed_types : bool ref\n\nval reset_dump_state : unit -> unit\n\ntype mli_status = Mli_exists | Mli_non_exists\nval assume_no_mli : mli_status ref\nval dont_record_crc_unit : string option ref\nval bs_gentype : bool ref\nval no_assert_false : bool ref\nval dump_location : bool ref\n"
  },
  {
    "path": "analysis/vendor/ml/cmi_format.ml",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*                   Fabrice Le Fessant, INRIA Saclay                     *)\n(*                                                                        *)\n(*   Copyright 2012 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\ntype pers_flags = Deprecated of string\n\ntype error =\n  | Not_an_interface of string\n  | Wrong_version_interface of string * string\n  | Corrupted_interface of string\n\nexception Error of error\n\ntype cmi_infos = {\n  cmi_name: string;\n  cmi_sign: Types.signature_item list;\n  cmi_crcs: (string * Digest.t option) list;\n  cmi_flags: pers_flags list;\n}\n\nlet input_cmi ic =\n  let name, sign = input_value ic in\n  let crcs = input_value ic in\n  let flags = input_value ic in\n  {cmi_name = name; cmi_sign = sign; cmi_crcs = crcs; cmi_flags = flags}\n\nlet read_cmi filename =\n  let ic = open_in_bin filename in\n  try\n    let buffer =\n      really_input_string ic (String.length Config.cmi_magic_number)\n    in\n    if buffer <> Config.cmi_magic_number then (\n      close_in ic;\n      let pre_len = String.length Config.cmi_magic_number - 3 in\n      if\n        String.sub buffer 0 pre_len\n        = String.sub Config.cmi_magic_number 0 pre_len\n      then\n        let msg =\n          if buffer < Config.cmi_magic_number then \"an older\" else \"a newer\"\n        in\n        raise (Error (Wrong_version_interface (filename, msg)))\n      else raise (Error (Not_an_interface filename)));\n    let cmi = input_cmi ic in\n    close_in ic;\n    cmi\n  with\n  | End_of_file | Failure _ ->\n    close_in ic;\n    raise (Error (Corrupted_interface filename))\n  | Error e ->\n    close_in ic;\n    raise (Error e)\n\nlet output_cmi filename oc cmi =\n  (* beware: the provided signature must have been substituted for saving *)\n  output_string oc Config.cmi_magic_number;\n  output_value oc (cmi.cmi_name, cmi.cmi_sign);\n  flush oc;\n  let crc = Digest.file filename in\n  let crcs = (cmi.cmi_name, Some crc) :: cmi.cmi_crcs in\n  output_value oc crcs;\n  output_value oc cmi.cmi_flags;\n  crc\n\n(* This function is also called by [save_cmt] as cmi_format is subset of\n       cmt_format, so dont close the channel yet\n*)\nlet create_cmi ?check_exists filename (cmi : cmi_infos) =\n  (* beware: the provided signature must have been substituted for saving *)\n  let content =\n    Config.cmi_magic_number ^ Marshal.to_string (cmi.cmi_name, cmi.cmi_sign) []\n    (* checkout [output_value] in {!Pervasives} module *)\n  in\n  let crc = Digest.string content in\n  let cmi_infos =\n    if check_exists <> None && Sys.file_exists filename then\n      Some (read_cmi filename)\n    else None\n  in\n  match cmi_infos with\n  | Some\n      {\n        cmi_name = _;\n        cmi_sign = _;\n        cmi_crcs = (old_name, Some old_crc) :: rest;\n        cmi_flags;\n      }\n  (* TODO: design the cmi format so that we don't need read the whole cmi *)\n    when cmi.cmi_name = old_name && crc = old_crc && cmi.cmi_crcs = rest\n         && cmi_flags = cmi.cmi_flags ->\n    crc\n  | _ ->\n    let crcs = (cmi.cmi_name, Some crc) :: cmi.cmi_crcs in\n    let oc = open_out_bin filename in\n    output_string oc content;\n    output_value oc crcs;\n    output_value oc cmi.cmi_flags;\n    close_out oc;\n    crc\n\n(* Error report *)\n\nopen Format\n\nlet report_error ppf = function\n  | Not_an_interface filename ->\n    fprintf ppf \"%a@ is not a compiled interface\" Location.print_filename\n      filename\n  | Wrong_version_interface (filename, older_newer) ->\n    fprintf ppf\n      \"%a@ is not a compiled interface for this version of OCaml.@.It seems to \\\n       be for %s version of OCaml.\"\n      Location.print_filename filename older_newer\n  | Corrupted_interface filename ->\n    fprintf ppf \"Corrupted compiled interface@ %a\" Location.print_filename\n      filename\n\nlet () =\n  Location.register_error_of_exn (function\n    | Error err -> Some (Location.error_of_printer_file report_error err)\n    | _ -> None)\n"
  },
  {
    "path": "analysis/vendor/ml/cmi_format.mli",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*                   Fabrice Le Fessant, INRIA Saclay                     *)\n(*                                                                        *)\n(*   Copyright 2012 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\ntype pers_flags = Deprecated of string\n\ntype cmi_infos = {\n  cmi_name: string;\n  cmi_sign: Types.signature_item list;\n  cmi_crcs: (string * Digest.t option) list;\n  cmi_flags: pers_flags list;\n}\n\n(* write the magic + the cmi information *)\nval output_cmi : string -> out_channel -> cmi_infos -> Digest.t\n\nval create_cmi : ?check_exists:unit -> string -> cmi_infos -> Digest.t\n\n(* read the cmi information (the magic is supposed to have already been read) *)\nval input_cmi : in_channel -> cmi_infos\n\n(* read a cmi from a filename, checking the magic *)\nval read_cmi : string -> cmi_infos\n\n(* Error report *)\n\ntype error =\n  | Not_an_interface of string\n  | Wrong_version_interface of string * string\n  | Corrupted_interface of string\n\nexception Error of error\n\nopen Format\n\nval report_error : formatter -> error -> unit\n"
  },
  {
    "path": "analysis/vendor/ml/cmt_format.ml",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*                   Fabrice Le Fessant, INRIA Saclay                     *)\n(*                                                                        *)\n(*   Copyright 2012 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n#ifdef BROWSER\n[@@@warning \"-32\"]\n#endif\n\nopen Typedtree\n\n(* Note that in Typerex, there is an awful hack to save a cmt file\n   together with the interface file that was generated by ocaml (this\n   is because the installed version of ocaml might differ from the one\n   integrated in Typerex).\n*)\n\n\n\nlet read_magic_number ic =\n  let len_magic_number = String.length Config.cmt_magic_number in\n  really_input_string ic len_magic_number\n\ntype binary_annots =\n  | Packed of Types.signature * string list\n  | Implementation of structure\n  | Interface of signature\n  | Partial_implementation of binary_part array\n  | Partial_interface of binary_part array\n\nand binary_part =\n| Partial_structure of structure\n| Partial_structure_item of structure_item\n| Partial_expression of expression\n| Partial_pattern of pattern\n| Partial_class_expr of unit\n| Partial_signature of signature\n| Partial_signature_item of signature_item\n| Partial_module_type of module_type\n\ntype cmt_infos = {\n  cmt_modname : string;\n  cmt_annots : binary_annots;\n  cmt_value_dependencies :\n    (Types.value_description * Types.value_description) list;\n  cmt_comments : (string * Location.t) list;\n  cmt_args : string array;\n  cmt_sourcefile : string option;\n  cmt_builddir : string;\n  cmt_loadpath : string list;\n  cmt_source_digest : Digest.t option;\n  cmt_initial_env : Env.t;\n  cmt_imports : (string * Digest.t option) list;\n  cmt_interface_digest : Digest.t option;\n  cmt_use_summaries : bool;\n}\n\ntype error =\n    Not_a_typedtree of string\n\nlet need_to_clear_env =\n  try ignore (Sys.getenv \"OCAML_BINANNOT_WITHENV\"); false\n  with Not_found -> true\n\nlet keep_only_summary = Env.keep_only_summary\n\nopen Tast_mapper\n\nlet cenv =\n  {Tast_mapper.default with env = fun _sub env -> keep_only_summary env}\n\nlet clear_part = function\n  | Partial_structure s -> Partial_structure (cenv.structure cenv s)\n  | Partial_structure_item s ->\n      Partial_structure_item (cenv.structure_item cenv s)\n  | Partial_expression e -> Partial_expression (cenv.expr cenv e)\n  | Partial_pattern p -> Partial_pattern (cenv.pat cenv p)\n  | Partial_class_expr () -> assert false\n  | Partial_signature s -> Partial_signature (cenv.signature cenv s)\n  | Partial_signature_item s ->\n      Partial_signature_item (cenv.signature_item cenv s)\n  | Partial_module_type s -> Partial_module_type (cenv.module_type cenv s)\n\nlet clear_env binary_annots =\n  if need_to_clear_env then\n    match binary_annots with\n    | Implementation s -> Implementation (cenv.structure cenv s)\n    | Interface s -> Interface (cenv.signature cenv s)\n    | Packed _ -> binary_annots\n    | Partial_implementation array ->\n        Partial_implementation (Array.map clear_part array)\n    | Partial_interface array ->\n        Partial_interface (Array.map clear_part array)\n\n  else binary_annots\n\nexception Error of error\n\nlet input_cmt ic = (input_value ic : cmt_infos)\n\nlet output_cmt oc cmt =\n  output_string oc Config.cmt_magic_number;\n  output_value oc (cmt : cmt_infos)\n\nlet read filename =\n(*  Printf.fprintf stderr \"Cmt_format.read %s\\n%!\" filename; *)\n  let ic = open_in_bin filename in\n  try\n    let magic_number = read_magic_number ic in\n    let cmi, cmt =\n      if magic_number = Config.cmt_magic_number then\n        None, Some (input_cmt ic)\n      else if magic_number = Config.cmi_magic_number then\n        let cmi = Cmi_format.input_cmi ic in\n        let cmt = try\n                    let magic_number = read_magic_number ic in\n                    if magic_number = Config.cmt_magic_number then\n                      let cmt = input_cmt ic in\n                      Some cmt\n                    else None\n          with _ -> None\n        in\n        Some cmi, cmt\n      else\n        raise(Cmi_format.Error(Cmi_format.Not_an_interface filename))\n    in\n    close_in ic;\n(*    Printf.fprintf stderr \"Cmt_format.read done\\n%!\"; *)\n    cmi, cmt\n  with e ->\n    close_in ic;\n    raise e\n\nlet read_cmt filename =\n  match read filename with\n      _, None -> raise (Error (Not_a_typedtree filename))\n    | _, Some cmt -> cmt\n\nlet read_cmi filename =\n  match read filename with\n      None, _ ->\n        raise (Cmi_format.Error (Cmi_format.Not_an_interface filename))\n    | Some cmi, _ -> cmi\n\nlet saved_types = ref []\nlet value_deps = ref []\n\nlet clear () =\n  saved_types := [];\n  value_deps := []\n\nlet add_saved_type b = saved_types := b :: !saved_types\nlet get_saved_types () = !saved_types\nlet set_saved_types l = saved_types := l\n\nlet record_value_dependency vd1 vd2 =\n  if vd1.Types.val_loc <> vd2.Types.val_loc then\n    value_deps := (vd1, vd2) :: !value_deps\n\n#ifdef BROWSER\nlet save_cmt _filename _modname _binary_annots _sourcefile _initial_env _cmi = ()  \n#else\nopen Cmi_format\n\nlet save_cmt filename modname binary_annots sourcefile initial_env cmi =\n  if !Clflags.binary_annotations then begin\n    (if !Config.bs_only then Misc.output_to_bin_file_directly else \n    Misc.output_to_file_via_temporary\n       ~mode:[Open_binary] ) filename\n       (fun temp_file_name oc ->\n         let this_crc =\n           match cmi with\n           | None -> None\n           | Some cmi -> Some (output_cmi temp_file_name oc cmi)\n         in\n         let source_digest = Misc.may_map Digest.file sourcefile in\n         let cmt = {\n           cmt_modname = modname;\n           cmt_annots = clear_env binary_annots;\n           cmt_value_dependencies = !value_deps;\n           cmt_comments = Lexer.comments ();\n           cmt_args = Sys.argv;\n           cmt_sourcefile = sourcefile;\n           cmt_builddir =  Sys.getcwd ();\n           cmt_loadpath = !Config.load_path;\n           cmt_source_digest = source_digest;\n           cmt_initial_env = if need_to_clear_env then\n               keep_only_summary initial_env else initial_env;\n           cmt_imports = List.sort compare (Env.imports ());\n           cmt_interface_digest = this_crc;\n           cmt_use_summaries = need_to_clear_env;\n         } in\n         output_cmt oc cmt)\n  end;\n  clear ()\n#endif"
  },
  {
    "path": "analysis/vendor/ml/cmt_format.mli",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*                   Fabrice Le Fessant, INRIA Saclay                     *)\n(*                                                                        *)\n(*   Copyright 2012 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(** cmt and cmti files format. *)\n\n(** The layout of a cmt file is as follows: <cmt> := \\{<cmi>\\} <cmt magic> \\{cmt\n    infos\\} \\{<source info>\\} where <cmi> is the cmi file format: <cmi> := <cmi\n    magic> <cmi info>. More precisely, the optional <cmi> part must be present\n    if and only if the file is:\n    - a cmti, or\n    - a cmt, for a ml file which has no corresponding mli (hence no\n      corresponding cmti).\n\n    Thus, we provide a common reading function for cmi and cmt(i) files which\n    returns an option for each of the three parts: cmi info, cmt info, source\n    info. *)\n\nopen Typedtree\n\ntype binary_annots =\n  | Packed of Types.signature * string list\n  | Implementation of structure\n  | Interface of signature\n  | Partial_implementation of binary_part array\n  | Partial_interface of binary_part array\n\nand binary_part =\n  | Partial_structure of structure\n  | Partial_structure_item of structure_item\n  | Partial_expression of expression\n  | Partial_pattern of pattern\n  | Partial_class_expr of unit\n  | Partial_signature of signature\n  | Partial_signature_item of signature_item\n  | Partial_module_type of module_type\n\ntype cmt_infos = {\n  cmt_modname: string;\n  cmt_annots: binary_annots;\n  cmt_value_dependencies:\n    (Types.value_description * Types.value_description) list;\n  cmt_comments: (string * Location.t) list;\n  cmt_args: string array;\n  cmt_sourcefile: string option;\n  cmt_builddir: string;\n  cmt_loadpath: string list;\n  cmt_source_digest: string option;\n  cmt_initial_env: Env.t;\n  cmt_imports: (string * Digest.t option) list;\n  cmt_interface_digest: Digest.t option;\n  cmt_use_summaries: bool;\n}\n\ntype error = Not_a_typedtree of string\n\nexception Error of error\n\nval read : string -> Cmi_format.cmi_infos option * cmt_infos option\n(** [read filename] opens filename, and extract both the cmi_infos, if it\n    exists, and the cmt_infos, if it exists. Thus, it can be used with .cmi,\n    .cmt and .cmti files.\n\n    .cmti files always contain a cmi_infos at the beginning. .cmt files only\n    contain a cmi_infos at the beginning if there is no associated .cmti file.\n*)\n\nval read_cmt : string -> cmt_infos\nval read_cmi : string -> Cmi_format.cmi_infos\n\nval save_cmt :\n  string ->\n  (* filename.cmt to generate *)\n  string ->\n  (* module name *)\n  binary_annots ->\n  string option ->\n  (* source file *)\n  Env.t ->\n  (* initial env *)\n  Cmi_format.cmi_infos option ->\n  (* if a .cmi was generated *)\n  unit\n(** [save_cmt filename modname binary_annots sourcefile initial_env cmi] writes\n    a cmt(i) file. *)\n\n(* Miscellaneous functions *)\n\nval read_magic_number : in_channel -> string\n\nval clear : unit -> unit\n\nval add_saved_type : binary_part -> unit\nval get_saved_types : unit -> binary_part list\nval set_saved_types : binary_part list -> unit\n\nval record_value_dependency :\n  Types.value_description -> Types.value_description -> unit\n\n(*\n\n  val is_magic_number : string -> bool\n  val read : in_channel -> Env.cmi_infos option * t\n  val write_magic_number : out_channel -> unit\n  val write : out_channel -> t -> unit\n\n  val find : string list -> string -> string\n  val read_signature : 'a -> string -> Types.signature * 'b list * 'c list\n\n*)\n"
  },
  {
    "path": "analysis/vendor/ml/code_frame.ml",
    "content": "let digits_count n =\n  let rec loop n base count =\n    if n >= base then loop n (base * 10) (count + 1) else count\n  in\n  loop (abs n) 1 0\n\nlet seek_2_lines_before src (pos : Lexing.position) =\n  let original_line = pos.pos_lnum in\n  let rec loop current_line current_char =\n    if current_line + 2 >= original_line then (current_char, current_line)\n    else\n      loop\n        (if src.[current_char] = '\\n' then current_line + 1 else current_line)\n        (current_char + 1)\n  in\n  loop 1 0\n\nlet seek_2_lines_after src (pos : Lexing.position) =\n  let original_line = pos.pos_lnum in\n  let rec loop current_line current_char =\n    if current_char = String.length src then (current_char, current_line)\n    else\n      match src.[current_char] with\n      | '\\n' when current_line = original_line + 2 ->\n        (current_char, current_line)\n      | '\\n' -> loop (current_line + 1) (current_char + 1)\n      | _ -> loop current_line (current_char + 1)\n  in\n  loop original_line pos.pos_cnum\n\nlet leading_space_count str =\n  let rec loop i count =\n    if i = String.length str then count\n    else if str.[i] != ' ' then count\n    else loop (i + 1) (count + 1)\n  in\n  loop 0 0\n\nlet break_long_line max_width line =\n  let rec loop pos accum =\n    if pos = String.length line then accum\n    else\n      let chunk_length = min max_width (String.length line - pos) in\n      let chunk = String.sub line pos chunk_length in\n      loop (pos + chunk_length) (chunk :: accum)\n  in\n  loop 0 [] |> List.rev\n\nlet filter_mapi f l =\n  let rec loop f l i accum =\n    match l with\n    | [] -> accum\n    | head :: rest ->\n      let accum =\n        match f i head with\n        | None -> accum\n        | Some result -> result :: accum\n      in\n      loop f rest (i + 1) accum\n  in\n  loop f l 0 [] |> List.rev\n\n(* Spiritual equivalent of\n   https://github.com/ocaml/ocaml/blob/414bdec9ae387129b8102cc6bf3c0b6ae173eeb9/utils/misc.ml#L601\n*)\nmodule Color = struct\n  type color =\n    | Dim\n    (* | Filename *)\n    | Err\n    | Warn\n    | NoColor\n\n  let dim = \"\\x1b[2m\"\n\n  (* let filename = \"\\x1b[46m\" *)\n  let err = \"\\x1b[1;31m\"\n  let warn = \"\\x1b[1;33m\"\n  let reset = \"\\x1b[0m\"\n\n  external isatty : out_channel -> bool = \"caml_sys_isatty\"\n\n  (* reasonable heuristic on whether colors should be enabled *)\n  let should_enable_color () =\n    let term = try Sys.getenv \"TERM\" with Not_found -> \"\" in\n    term <> \"dumb\" && term <> \"\" && isatty stderr\n\n  let color_enabled = ref true\n\n  let setup =\n    let first = ref true in\n    (* initialize only once *)\n    fun o ->\n      if !first then (\n        first := false;\n        color_enabled :=\n          match o with\n          | Some Misc.Color.Always -> true\n          | Some Auto -> should_enable_color ()\n          | Some Never -> false\n          | None -> should_enable_color ());\n      ()\nend\n\nlet setup = Color.setup\n\ntype gutter = Number of int | Elided\ntype highlighted_string = {s: string; start: int; end_: int}\ntype line = {gutter: gutter; content: highlighted_string list}\n\n(*\n  Features:\n  - display a line gutter\n  - break long line into multiple for terminal display\n  - peek 2 lines before & after for context\n  - center snippet when it's heavily indented\n  - ellide intermediate lines when the reported range is huge\n*)\nlet print ~is_warning ~src ~(start_pos : Lexing.position)\n    ~(end_pos : Lexing.position) =\n  let indent = 2 in\n  let highlight_line_start_line = start_pos.pos_lnum in\n  let highlight_line_end_line = end_pos.pos_lnum in\n  let start_line_line_offset, first_shown_line =\n    seek_2_lines_before src start_pos\n  in\n  let end_line_line_end_offset, last_shown_line =\n    seek_2_lines_after src end_pos\n  in\n\n  let more_than_5_highlighted_lines =\n    highlight_line_end_line - highlight_line_start_line + 1 > 5\n  in\n  let max_line_digits_count = digits_count last_shown_line in\n  (* TODO: change this back to a fixed 100? *)\n  (* 3 for separator + the 2 spaces around it *)\n  let line_width = 78 - max_line_digits_count - indent - 3 in\n  let lines =\n    String.sub src start_line_line_offset\n      (end_line_line_end_offset - start_line_line_offset)\n    |> String.split_on_char '\\n'\n    |> filter_mapi (fun i line ->\n           let line_number = i + first_shown_line in\n           if more_than_5_highlighted_lines then\n             if line_number = highlight_line_start_line + 2 then\n               Some (Elided, line)\n             else if\n               line_number > highlight_line_start_line + 2\n               && line_number < highlight_line_end_line - 1\n             then None\n             else Some (Number line_number, line)\n           else Some (Number line_number, line))\n  in\n  let leading_space_to_cut =\n    lines\n    |> List.fold_left\n         (fun current_max (_, line) ->\n           let leading_spaces = leading_space_count line in\n           if String.length line = leading_spaces then\n             (* the line's nothing but spaces. Doesn't count *)\n             current_max\n           else min leading_spaces current_max)\n         99999\n  in\n  let separator = if leading_space_to_cut = 0 then \"│\" else \"┆\" in\n  let stripped_lines =\n    lines\n    |> List.map (fun (gutter, line) ->\n           let new_content =\n             if String.length line <= leading_space_to_cut then\n               [{s = \"\"; start = 0; end_ = 0}]\n             else\n               String.sub line leading_space_to_cut\n                 (String.length line - leading_space_to_cut)\n               |> break_long_line line_width\n               |> List.mapi (fun i line ->\n                      match gutter with\n                      | Elided -> {s = line; start = 0; end_ = 0}\n                      | Number line_number ->\n                        let highlight_line_start_offset =\n                          start_pos.pos_cnum - start_pos.pos_bol\n                        in\n                        let highlight_line_end_offset =\n                          end_pos.pos_cnum - end_pos.pos_bol\n                        in\n                        let start =\n                          if i = 0 && line_number = highlight_line_start_line\n                          then\n                            highlight_line_start_offset - leading_space_to_cut\n                          else 0\n                        in\n                        let end_ =\n                          if line_number < highlight_line_start_line then 0\n                          else if\n                            line_number = highlight_line_start_line\n                            && line_number = highlight_line_end_line\n                          then highlight_line_end_offset - leading_space_to_cut\n                          else if line_number = highlight_line_start_line then\n                            String.length line\n                          else if\n                            line_number > highlight_line_start_line\n                            && line_number < highlight_line_end_line\n                          then String.length line\n                          else if line_number = highlight_line_end_line then\n                            highlight_line_end_offset - leading_space_to_cut\n                          else 0\n                        in\n                        {s = line; start; end_})\n           in\n           {gutter; content = new_content})\n  in\n  let buf = Buffer.create 100 in\n  let open Color in\n  let add_ch =\n    let last_color = ref NoColor in\n    fun color ch ->\n      if (not !Color.color_enabled) || !last_color = color then\n        Buffer.add_char buf ch\n      else\n        let ansi =\n          match (!last_color, color) with\n          | NoColor, Dim -> dim\n          (* | NoColor, Filename -> filename *)\n          | NoColor, Err -> err\n          | NoColor, Warn -> warn\n          | _, NoColor -> reset\n          | _, Dim -> reset ^ dim\n          (* | _, Filename -> reset ^ filename *)\n          | _, Err -> reset ^ err\n          | _, Warn -> reset ^ warn\n        in\n        Buffer.add_string buf ansi;\n        Buffer.add_char buf ch;\n        last_color := color\n  in\n  let draw_gutter color s =\n    for _i = 1 to max_line_digits_count + indent - String.length s do\n      add_ch NoColor ' '\n    done;\n    s |> String.iter (add_ch color);\n    add_ch NoColor ' ';\n    separator |> String.iter (add_ch Dim);\n    add_ch NoColor ' '\n  in\n  stripped_lines\n  |> List.iter (fun {gutter; content} ->\n         match gutter with\n         | Elided ->\n           draw_gutter Dim \".\";\n           add_ch Dim '.';\n           add_ch Dim '.';\n           add_ch Dim '.';\n           add_ch NoColor '\\n'\n         | Number line_number ->\n           content\n           |> List.iteri (fun i line ->\n                  let gutter_content =\n                    if i = 0 then string_of_int line_number else \"\"\n                  in\n                  let gutter_color =\n                    if\n                      i = 0\n                      && line_number >= highlight_line_start_line\n                      && line_number <= highlight_line_end_line\n                    then if is_warning then Warn else Err\n                    else NoColor\n                  in\n                  draw_gutter gutter_color gutter_content;\n\n                  line.s\n                  |> String.iteri (fun ii ch ->\n                         let c =\n                           if ii >= line.start && ii < line.end_ then\n                             if is_warning then Warn else Err\n                           else NoColor\n                         in\n                         add_ch c ch);\n                  add_ch NoColor '\\n'));\n  Buffer.contents buf\n"
  },
  {
    "path": "analysis/vendor/ml/consistbl.ml",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 2002 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(* Consistency tables: for checking consistency of module CRCs *)\n\ntype t = (string, Digest.t * string) Hashtbl.t\n\nlet create () = Hashtbl.create 13\n\nlet clear = Hashtbl.clear\n\nexception Inconsistency of string * string * string\n\nexception Not_available of string\n\nlet check tbl name crc source =\n  try\n    let old_crc, old_source = Hashtbl.find tbl name in\n    if crc <> old_crc then raise (Inconsistency (name, source, old_source))\n  with Not_found -> Hashtbl.add tbl name (crc, source)\n\nlet check_noadd tbl name crc source =\n  try\n    let old_crc, old_source = Hashtbl.find tbl name in\n    if crc <> old_crc then raise (Inconsistency (name, source, old_source))\n  with Not_found -> raise (Not_available name)\n\nlet set tbl name crc source = Hashtbl.add tbl name (crc, source)\n\nlet source tbl name = snd (Hashtbl.find tbl name)\n\nlet extract l tbl =\n  let l = List.sort_uniq String.compare l in\n  List.fold_left\n    (fun assc name ->\n      try\n        let crc, _ = Hashtbl.find tbl name in\n        (name, Some crc) :: assc\n      with Not_found -> (name, None) :: assc)\n    [] l\n\nlet filter p tbl =\n  let to_remove = ref [] in\n  Hashtbl.iter\n    (fun name _ -> if not (p name) then to_remove := name :: !to_remove)\n    tbl;\n  List.iter\n    (fun name ->\n      while Hashtbl.mem tbl name do\n        Hashtbl.remove tbl name\n      done)\n    !to_remove\n"
  },
  {
    "path": "analysis/vendor/ml/consistbl.mli",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 2002 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(* Consistency tables: for checking consistency of module CRCs *)\n\ntype t\n\nval create : unit -> t\n\nval clear : t -> unit\n\nval check : t -> string -> Digest.t -> string -> unit\n(* [check tbl name crc source]\n     checks consistency of ([name], [crc]) with infos previously\n     stored in [tbl].  If no CRC was previously associated with\n     [name], record ([name], [crc]) in [tbl].\n     [source] is the name of the file from which the information\n     comes from.  This is used for error reporting. *)\n\nval check_noadd : t -> string -> Digest.t -> string -> unit\n(* Same as [check], but raise [Not_available] if no CRC was previously\n     associated with [name]. *)\n\nval set : t -> string -> Digest.t -> string -> unit\n(* [set tbl name crc source] forcefully associates [name] with\n   [crc] in [tbl], even if [name] already had a different CRC\n   associated with [name] in [tbl]. *)\n\nval source : t -> string -> string\n(* [source tbl name] returns the file name associated with [name]\n   if the latter has an associated CRC in [tbl].\n   Raise [Not_found] otherwise. *)\n\nval extract : string list -> t -> (string * Digest.t option) list\n(* [extract tbl names] returns an associative list mapping each string\n   in [names] to the CRC associated with it in [tbl]. If no CRC is\n   associated with a name then it is mapped to [None]. *)\n\nval filter : (string -> bool) -> t -> unit\n(* [filter pred tbl] removes from [tbl] table all (name, CRC) pairs\n   such that [pred name] is [false]. *)\n\nexception Inconsistency of string * string * string\n(* Raised by [check] when a CRC mismatch is detected.\n   First string is the name of the compilation unit.\n   Second string is the source that caused the inconsistency.\n   Third string is the source that set the CRC. *)\n\nexception Not_available of string\n(* Raised by [check_noadd] when a name doesn't have an associated CRC. *)\n"
  },
  {
    "path": "analysis/vendor/ml/ctype.ml",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*  Xavier Leroy and Jerome Vouillon, projet Cristal, INRIA Rocquencourt  *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(* Operations on core types *)\n\nopen Misc\nopen Asttypes\nopen Types\nopen Btype\n\n(*\n   Type manipulation after type inference\n   ======================================\n   If one wants to manipulate a type after type inference (for\n   instance, during code generation or in the debugger), one must\n   first make sure that the type levels are correct, using the\n   function [correct_levels]. Then, this type can be correctly\n   manipulated by [apply], [expand_head] and [moregeneral].\n*)\n\n(*\n   General notes\n   =============\n   - As much sharing as possible should be kept : it makes types\n     smaller and better abbreviated.\n     When necessary, some sharing can be lost. Types will still be\n     printed correctly (+++ TO DO...), and abbreviations defined by a\n     class do not depend on sharing thanks to constrained\n     abbreviations. (Of course, even if some sharing is lost, typing\n     will still be correct.)\n   - All nodes of a type have a level : that way, one know whether a\n     node need to be duplicated or not when instantiating a type.\n   - Levels of a type are decreasing (generic level being considered\n     as greatest).\n   - The level of a type constructor is superior to the binding\n     time of its path.\n   - Recursive types without limitation should be handled (even if\n     there is still an occur check). This avoid treating specially the\n     case for objects, for instance. Furthermore, the occur check\n     policy can then be easily changed.\n*)\n\n(**** Errors ****)\n\nexception Unify of (type_expr * type_expr) list\n\nexception Tags of label * label\n\nlet () =\n  Location.register_error_of_exn (function\n    | Tags (l, l') ->\n      Some\n        Location.(\n          errorf ~loc:(in_file !input_name)\n            \"In this program,@ variant constructors@ #%s and #%s@ have the \\\n             same hash value.@ Change one of them.\"\n            l l')\n    | _ -> None)\n\nexception Subtype of (type_expr * type_expr) list * (type_expr * type_expr) list\n\nexception Cannot_expand\n\nexception Cannot_apply\n\nexception Recursive_abbrev\n\n(* GADT: recursive abbrevs can appear as a result of local constraints *)\nexception Unification_recursive_abbrev of (type_expr * type_expr) list\n\n(**** Type level management ****)\n\nlet current_level = ref 0\nlet nongen_level = ref 0\nlet global_level = ref 1\nlet saved_level = ref []\n\ntype levels = {\n  current_level: int;\n  nongen_level: int;\n  global_level: int;\n  saved_level: (int * int) list;\n}\nlet save_levels () =\n  {\n    current_level = !current_level;\n    nongen_level = !nongen_level;\n    global_level = !global_level;\n    saved_level = !saved_level;\n  }\nlet set_levels l =\n  current_level := l.current_level;\n  nongen_level := l.nongen_level;\n  global_level := l.global_level;\n  saved_level := l.saved_level\n\nlet get_current_level () = !current_level\nlet init_def level =\n  current_level := level;\n  nongen_level := level\nlet begin_def () =\n  saved_level := (!current_level, !nongen_level) :: !saved_level;\n  incr current_level;\n  nongen_level := !current_level\nlet begin_class_def () =\n  saved_level := (!current_level, !nongen_level) :: !saved_level;\n  incr current_level\nlet raise_nongen_level () =\n  saved_level := (!current_level, !nongen_level) :: !saved_level;\n  nongen_level := !current_level\nlet end_def () =\n  let cl, nl = List.hd !saved_level in\n  saved_level := List.tl !saved_level;\n  current_level := cl;\n  nongen_level := nl\n\nlet reset_global_level () = global_level := !current_level + 1\nlet increase_global_level () =\n  let gl = !global_level in\n  global_level := !current_level;\n  gl\nlet restore_global_level gl = global_level := gl\n\n(**** Whether a path points to an object type (with hidden row variable) ****)\nlet is_object_type path =\n  let name =\n    match path with\n    | Path.Pident id -> Ident.name id\n    | Path.Pdot (_, s, _) -> s\n    | Path.Papply _ -> assert false\n  in\n  name.[0] = '#'\n\n(**** Control tracing of GADT instances *)\n\nlet trace_gadt_instances = ref false\nlet check_trace_gadt_instances env =\n  (not !trace_gadt_instances)\n  && Env.has_local_constraints env\n  &&\n  (trace_gadt_instances := true;\n   cleanup_abbrev ();\n   true)\n\nlet reset_trace_gadt_instances b = if b then trace_gadt_instances := false\n\nlet wrap_trace_gadt_instances env f x =\n  let b = check_trace_gadt_instances env in\n  let y = f x in\n  reset_trace_gadt_instances b;\n  y\n\n(**** Abbreviations without parameters ****)\n(* Shall reset after generalizing *)\n\nlet simple_abbrevs = ref Mnil\n\nlet proper_abbrevs path tl abbrev =\n  if tl <> [] || !trace_gadt_instances || is_object_type path then abbrev\n  else simple_abbrevs\n\n(**** Some type creators ****)\n\n(* Re-export generic type creators *)\n\nlet newty2 = Btype.newty2\nlet newty desc = newty2 !current_level desc\n\nlet newvar ?name () = newty2 !current_level (Tvar name)\nlet newvar2 ?name level = newty2 level (Tvar name)\nlet new_global_var ?name () = newty2 !global_level (Tvar name)\n\nlet newobj fields = newty (Tobject (fields, ref None))\n\nlet newconstr path tyl = newty (Tconstr (path, tyl, ref Mnil))\n\nlet none = newty (Ttuple []) (* Clearly ill-formed type *)\n\n(**** Representative of a type ****)\n\n(* Re-export repr *)\nlet repr = repr\n\n(**** Type maps ****)\n\nmodule TypePairs = Hashtbl.Make (struct\n  type t = type_expr * type_expr\n  let equal (t1, t1') (t2, t2') = t1 == t2 && t1' == t2'\n  let hash (t, t') = t.id + (93 * t'.id)\nend)\n\n(**** unification mode ****)\n\ntype unification_mode =\n  | Expression (* unification in expression *)\n  | Pattern (* unification in pattern which may add local constraints *)\n\nlet umode = ref Expression\nlet generate_equations = ref false\nlet assume_injective = ref false\nlet variant_is_subtype = ref (fun _env _row _p1 -> false)\nlet set_mode_pattern ~generate ~injective f =\n  let old_unification_mode = !umode\n  and old_gen = !generate_equations\n  and old_inj = !assume_injective in\n  try\n    umode := Pattern;\n    generate_equations := generate;\n    assume_injective := injective;\n    let ret = f () in\n    umode := old_unification_mode;\n    generate_equations := old_gen;\n    assume_injective := old_inj;\n    ret\n  with e ->\n    umode := old_unification_mode;\n    generate_equations := old_gen;\n    assume_injective := old_inj;\n    raise e\n\n(*** Checks for type definitions ***)\n\nlet in_current_module = function\n  | Path.Pident _ -> true\n  | Path.Pdot _ | Path.Papply _ -> false\n\nlet in_pervasives p =\n  in_current_module p\n  &&\n  try\n    ignore (Env.find_type p Env.initial_safe_string);\n    true\n  with Not_found -> false\n\nlet is_datatype decl =\n  match decl.type_kind with\n  | Type_record _ | Type_variant _ | Type_open -> true\n  | Type_abstract -> false\n\n(**********************************************)\n(*  Miscellaneous operations on object types  *)\n(**********************************************)\n\n(* Note:\n   We need to maintain some invariants:\n   * cty_self must be a Tobject\n   * ...\n*)\ntype fields = (string * Types.field_kind * Types.type_expr) list\n(**** Object field manipulation. ****)\n\nlet object_fields ty =\n  match (repr ty).desc with\n  | Tobject (fields, _) -> fields\n  | _ -> assert false\n\nlet flatten_fields (ty : Types.type_expr) : fields * _ =\n  let rec flatten (l : fields) ty =\n    let ty = repr ty in\n    match ty.desc with\n    | Tfield (s, k, ty1, ty2) -> flatten ((s, k, ty1) :: l) ty2\n    | _ -> (l, ty)\n  in\n  let l, r = flatten [] ty in\n  (List.sort (fun (n, _, _) (n', _, _) -> compare n n') l, r)\n\nlet build_fields level =\n  List.fold_right (fun (s, k, ty1) ty2 ->\n      newty2 level (Tfield (s, k, ty1, ty2)))\n\nlet associate_fields (fields1 : fields) (fields2 : fields) : _ * fields * fields\n    =\n  let rec associate p s s' : fields * fields -> _ = function\n    | l, [] -> (List.rev p, List.rev s @ l, List.rev s')\n    | [], l' -> (List.rev p, List.rev s, List.rev s' @ l')\n    | (n, k, t) :: r, (n', k', t') :: r' when n = n' ->\n      associate ((n, k, t, k', t') :: p) s s' (r, r')\n    | (n, k, t) :: r, ((n', _k', _t') :: _ as l') when n < n' ->\n      associate p ((n, k, t) :: s) s' (r, l')\n    | ((_n, _k, _t) :: _ as l), (n', k', t') :: r' (* when n > n' *) ->\n      associate p s ((n', k', t') :: s') (l, r')\n  in\n  associate [] [] [] (fields1, fields2)\n\n(**** Check whether an object is open ****)\n\n(* +++ The abbreviation should eventually be expanded *)\nlet rec object_row ty =\n  let ty = repr ty in\n  match ty.desc with\n  | Tobject (t, _) -> object_row t\n  | Tfield (_, _, _, t) -> object_row t\n  | _ -> ty\n\nlet opened_object ty =\n  match (object_row ty).desc with\n  | Tvar _ | Tunivar _ | Tconstr _ -> true\n  | _ -> false\n\nlet concrete_object ty =\n  match (object_row ty).desc with\n  | Tvar _ -> false\n  | _ -> true\n\n(**** Close an object ****)\n\nlet close_object ty =\n  let rec close ty =\n    let ty = repr ty in\n    match ty.desc with\n    | Tvar _ -> link_type ty (newty2 ty.level Tnil)\n    | Tfield (_, _, _, ty') -> close ty'\n    | _ -> assert false\n  in\n  match (repr ty).desc with\n  | Tobject (ty, _) -> close ty\n  | _ -> assert false\n\n(**** Row variable of an object type ****)\n\nlet row_variable ty =\n  let rec find ty =\n    let ty = repr ty in\n    match ty.desc with\n    | Tfield (_, _, _, ty) -> find ty\n    | Tvar _ -> ty\n    | _ -> assert false\n  in\n  match (repr ty).desc with\n  | Tobject (fi, _) -> find fi\n  | _ -> assert false\n\n(**** Object name manipulation ****)\n(* +++ Bientot obsolete *)\n\nlet set_object_name id rv params ty =\n  match (repr ty).desc with\n  | Tobject (_fi, nm) -> set_name nm (Some (Path.Pident id, rv :: params))\n  | _ -> assert false\n\nlet remove_object_name ty =\n  match (repr ty).desc with\n  | Tobject (_, nm) -> set_name nm None\n  | Tconstr (_, _, _) -> ()\n  | _ -> fatal_error \"Ctype.remove_object_name\"\n\n(**** Hiding of private methods ****)\n\nlet hide_private_methods ty =\n  match (repr ty).desc with\n  | Tobject (fi, nm) ->\n    nm := None;\n    let fl, _ = flatten_fields fi in\n    List.iter\n      (function\n        | _, k, _ -> (\n          match field_kind_repr k with\n          | Fvar r -> set_kind r Fabsent\n          | _ -> ()))\n      fl\n  | _ -> assert false\n\n(*******************************)\n(*  Operations on class types  *)\n(*******************************)\n\nlet rec signature_of_class_type = function\n  | Cty_constr (_, _, cty) -> signature_of_class_type cty\n  | Cty_signature sign -> sign\n  | Cty_arrow (_, _, cty) -> signature_of_class_type cty\n\nlet self_type cty = repr (signature_of_class_type cty).csig_self\n\nlet rec class_type_arity = function\n  | Cty_constr (_, _, cty) -> class_type_arity cty\n  | Cty_signature _ -> 0\n  | Cty_arrow (_, _, cty) -> 1 + class_type_arity cty\n\n(*******************************************)\n(*  Miscellaneous operations on row types  *)\n(*******************************************)\ntype row_fields = (Asttypes.label * Types.row_field) list\ntype row_pairs = (Asttypes.label * Types.row_field * Types.row_field) list\nlet sort_row_fields : row_fields -> row_fields =\n  List.sort (fun (p, _) (q, _) -> compare (p : string) q)\n\nlet rec merge_rf (r1 : row_fields) (r2 : row_fields) (pairs : row_pairs)\n    (fi1 : row_fields) (fi2 : row_fields) =\n  match (fi1, fi2) with\n  | ((l1, f1) as p1) :: fi1', ((l2, f2) as p2) :: fi2' ->\n    if l1 = l2 then merge_rf r1 r2 ((l1, f1, f2) :: pairs) fi1' fi2'\n    else if l1 < l2 then merge_rf (p1 :: r1) r2 pairs fi1' fi2\n    else merge_rf r1 (p2 :: r2) pairs fi1 fi2'\n  | [], _ -> (List.rev r1, List.rev_append r2 fi2, pairs)\n  | _, [] -> (List.rev_append r1 fi1, List.rev r2, pairs)\n\nlet merge_row_fields (fi1 : row_fields) (fi2 : row_fields) :\n    row_fields * row_fields * row_pairs =\n  match (fi1, fi2) with\n  | [], _ | _, [] -> (fi1, fi2, [])\n  | [p1], _ when not (List.mem_assoc (fst p1) fi2) -> (fi1, fi2, [])\n  | _, [p2] when not (List.mem_assoc (fst p2) fi1) -> (fi1, fi2, [])\n  | _ -> merge_rf [] [] [] (sort_row_fields fi1) (sort_row_fields fi2)\n\nlet rec filter_row_fields erase = function\n  | [] -> []\n  | ((_l, f) as p) :: fi -> (\n    let fi = filter_row_fields erase fi in\n    match row_field_repr f with\n    | Rabsent -> fi\n    | Reither (_, _, false, e) when erase ->\n      set_row_field e Rabsent;\n      fi\n    | _ -> p :: fi)\n\n(**************************************)\n(*  Check genericity of type schemes  *)\n(**************************************)\n\nexception Non_closed of type_expr * bool\n\nlet free_variables = ref []\nlet really_closed = ref None\n\nlet rec free_vars_rec real ty =\n  let ty = repr ty in\n  if ty.level >= lowest_level then (\n    ty.level <- pivot_level - ty.level;\n    match (ty.desc, !really_closed) with\n    | Tvar _, _ -> free_variables := (ty, real) :: !free_variables\n    | Tconstr (path, tl, _), Some env ->\n      (try\n         let _, body, _ = Env.find_type_expansion path env in\n         if (repr body).level <> generic_level then\n           free_variables := (ty, real) :: !free_variables\n       with Not_found -> ());\n      List.iter (free_vars_rec true) tl\n    (* Do not count \"virtual\" free variables\n        | Tobject(ty, {contents = Some (_, p)}) ->\n            free_vars_rec false ty; List.iter (free_vars_rec true) p\n    *)\n    | Tobject (ty, _), _ -> free_vars_rec false ty\n    | Tfield (_, _, ty1, ty2), _ ->\n      free_vars_rec true ty1;\n      free_vars_rec false ty2\n    | Tvariant row, _ ->\n      let row = row_repr row in\n      iter_row (free_vars_rec true) row;\n      if not (static_row row) then free_vars_rec false row.row_more\n    | _ -> iter_type_expr (free_vars_rec true) ty)\n\nlet free_vars ?env ty =\n  free_variables := [];\n  really_closed := env;\n  free_vars_rec true ty;\n  let res = !free_variables in\n  free_variables := [];\n  really_closed := None;\n  res\n\nlet free_variables ?env ty =\n  let tl = List.map fst (free_vars ?env ty) in\n  unmark_type ty;\n  tl\n\nlet closed_type ty =\n  match free_vars ty with\n  | [] -> ()\n  | (v, real) :: _ -> raise (Non_closed (v, real))\n\nlet closed_parameterized_type params ty =\n  List.iter mark_type params;\n  let ok =\n    try\n      closed_type ty;\n      true\n    with Non_closed _ -> false\n  in\n  List.iter unmark_type params;\n  unmark_type ty;\n  ok\n\nlet closed_type_decl decl =\n  try\n    List.iter mark_type decl.type_params;\n    (match decl.type_kind with\n    | Type_abstract -> ()\n    | Type_variant v ->\n      List.iter\n        (fun {cd_args; cd_res; _} ->\n          match cd_res with\n          | Some _ -> ()\n          | None -> (\n            match cd_args with\n            | Cstr_tuple l -> List.iter closed_type l\n            | Cstr_record l -> List.iter (fun l -> closed_type l.ld_type) l))\n        v\n    | Type_record (r, _rep) -> List.iter (fun l -> closed_type l.ld_type) r\n    | Type_open -> ());\n    (match decl.type_manifest with\n    | None -> ()\n    | Some ty -> closed_type ty);\n    unmark_type_decl decl;\n    None\n  with Non_closed (ty, _) ->\n    unmark_type_decl decl;\n    Some ty\n\nlet closed_extension_constructor ext =\n  try\n    List.iter mark_type ext.ext_type_params;\n    (match ext.ext_ret_type with\n    | Some _ -> ()\n    | None -> iter_type_expr_cstr_args closed_type ext.ext_args);\n    unmark_extension_constructor ext;\n    None\n  with Non_closed (ty, _) ->\n    unmark_extension_constructor ext;\n    Some ty\n\ntype closed_class_failure =\n  | CC_Method of type_expr * bool * string * type_expr\n  | CC_Value of type_expr * bool * string * type_expr\n\nexception CCFailure of closed_class_failure\n\nlet closed_class params sign =\n  let ty = object_fields (repr sign.csig_self) in\n  let fields, rest = flatten_fields ty in\n  List.iter mark_type params;\n  mark_type rest;\n  List.iter (fun (lab, _, ty) -> if lab = dummy_method then mark_type ty) fields;\n  try\n    mark_type_node (repr sign.csig_self);\n    List.iter\n      (fun (lab, kind, ty) ->\n        if field_kind_repr kind = Fpresent then\n          try closed_type ty\n          with Non_closed (ty0, real) ->\n            raise (CCFailure (CC_Method (ty0, real, lab, ty))))\n      fields;\n    mark_type_params (repr sign.csig_self);\n    List.iter unmark_type params;\n    unmark_class_signature sign;\n    None\n  with CCFailure reason ->\n    mark_type_params (repr sign.csig_self);\n    List.iter unmark_type params;\n    unmark_class_signature sign;\n    Some reason\n\n(**********************)\n(*  Type duplication  *)\n(**********************)\n\n(* Duplicate a type, preserving only type variables *)\nlet duplicate_type ty = Subst.type_expr Subst.identity ty\n\n(* Same, for class types *)\nlet duplicate_class_type ty = Subst.class_type Subst.identity ty\n\n(*****************************)\n(*  Type level manipulation  *)\n(*****************************)\n\n(*\n   It would be a bit more efficient to remove abbreviation expansions\n   rather than generalizing them: these expansions will usually not be\n   used anymore. However, this is not possible in the general case, as\n   [expand_abbrev] (via [subst]) requires these expansions to be\n   preserved. Does it worth duplicating this code ?\n*)\nlet rec generalize ty =\n  let ty = repr ty in\n  if ty.level > !current_level && ty.level <> generic_level then (\n    set_level ty generic_level;\n    (match ty.desc with\n    | Tconstr (_, _, abbrev) -> iter_abbrev generalize !abbrev\n    | _ -> ());\n    iter_type_expr generalize ty)\n\nlet generalize ty =\n  simple_abbrevs := Mnil;\n  generalize ty\n\n(* Generalize the structure and lower the variables *)\n\nlet rec generalize_structure var_level ty =\n  let ty = repr ty in\n  if ty.level <> generic_level then\n    if is_Tvar ty && ty.level > var_level then set_level ty var_level\n    else if\n      ty.level > !current_level\n      &&\n      match ty.desc with\n      | Tconstr (p, _, abbrev) ->\n        (not (is_object_type p))\n        &&\n        (abbrev := Mnil;\n         true)\n      | _ -> true\n    then (\n      set_level ty generic_level;\n      iter_type_expr (generalize_structure var_level) ty)\n\nlet generalize_structure var_level ty =\n  simple_abbrevs := Mnil;\n  generalize_structure var_level ty\n\nlet forward_try_expand_once =\n  (* Forward declaration *)\n  ref (fun _env _ty -> raise Cannot_expand)\n\n(*\n   Lower the levels of a type (assume [level] is not\n   [generic_level]).\n*)\n(*\n    The level of a type constructor must be greater than its binding\n    time. That way, a type constructor cannot escape the scope of its\n    definition, as would be the case in\n      let x = ref []\n      module M = struct type t let _ = (x : t list ref) end\n    (without this constraint, the type system would actually be unsound.)\n*)\nlet get_level env p =\n  try\n    match (Env.find_type p env).type_newtype_level with\n    | None -> Path.binding_time p\n    | Some (x, _) -> x\n  with Not_found ->\n    (* no newtypes in predef *)\n    Path.binding_time p\n\nlet rec normalize_package_path env p =\n  let t = try (Env.find_modtype p env).mtd_type with Not_found -> None in\n  match t with\n  | Some (Mty_ident p) -> normalize_package_path env p\n  | Some (Mty_signature _ | Mty_functor _ | Mty_alias _) | None -> (\n    match p with\n    | Path.Pdot (p1, s, n) ->\n      (* For module aliases *)\n      let p1' = Env.normalize_path None env p1 in\n      if Path.same p1 p1' then p\n      else normalize_package_path env (Path.Pdot (p1', s, n))\n    | _ -> p)\n\nlet rec update_level env level expand ty =\n  let ty = repr ty in\n  if ty.level > level then (\n    (match Env.gadt_instance_level env ty with\n    | Some lv -> if level < lv then raise (Unify [(ty, newvar2 level)])\n    | None -> ());\n    match ty.desc with\n    | Tconstr (p, _tl, _abbrev) when level < get_level env p -> (\n      (* Try first to replace an abbreviation by its expansion. *)\n      try\n        (* if is_newtype env p then raise Cannot_expand; *)\n        link_type ty (!forward_try_expand_once env ty);\n        update_level env level expand ty\n      with Cannot_expand ->\n        (* +++ Levels should be restored... *)\n        (* Format.printf \"update_level: %i < %i@.\" level (get_level env p); *)\n        if level < get_level env p then raise (Unify [(ty, newvar2 level)]);\n        iter_type_expr (update_level env level expand) ty)\n    | Tconstr (_, _ :: _, _) when expand -> (\n      try\n        link_type ty (!forward_try_expand_once env ty);\n        update_level env level expand ty\n      with Cannot_expand ->\n        set_level ty level;\n        iter_type_expr (update_level env level expand) ty)\n    | Tpackage (p, nl, tl) when level < Path.binding_time p ->\n      let p' = normalize_package_path env p in\n      if Path.same p p' then raise (Unify [(ty, newvar2 level)]);\n      log_type ty;\n      ty.desc <- Tpackage (p', nl, tl);\n      update_level env level expand ty\n    | Tobject (_, ({contents = Some (p, _tl)} as nm))\n      when level < get_level env p ->\n      set_name nm None;\n      update_level env level expand ty\n    | Tvariant row ->\n      let row = row_repr row in\n      (match row.row_name with\n      | Some (p, _tl) when level < get_level env p ->\n        log_type ty;\n        ty.desc <- Tvariant {row with row_name = None}\n      | _ -> ());\n      set_level ty level;\n      iter_type_expr (update_level env level expand) ty\n    | Tfield (lab, _, ty1, _)\n      when lab = dummy_method && (repr ty1).level > level ->\n      raise (Unify [(ty1, newvar2 level)])\n    | _ ->\n      set_level ty level;\n      (* XXX what about abbreviations in Tconstr ? *)\n      iter_type_expr (update_level env level expand) ty)\n\n(* First try without expanding, then expand everything,\n   to avoid combinatorial blow-up *)\nlet update_level env level ty =\n  let ty = repr ty in\n  if ty.level > level then (\n    let snap = snapshot () in\n    try update_level env level false ty\n    with Unify _ ->\n      backtrack snap;\n      update_level env level true ty)\n\n(* Generalize and lower levels of contravariant branches simultaneously *)\n\nlet rec generalize_expansive env var_level visited ty =\n  let ty = repr ty in\n  if ty.level = generic_level || ty.level <= var_level then ()\n  else if not (Hashtbl.mem visited ty.id) then (\n    Hashtbl.add visited ty.id ();\n    match ty.desc with\n    | Tconstr (path, tyl, abbrev) ->\n      let variance =\n        try (Env.find_type path env).type_variance\n        with Not_found -> List.map (fun _ -> Variance.may_inv) tyl\n      in\n      abbrev := Mnil;\n      List.iter2\n        (fun v t ->\n          if Variance.(mem May_weak v) then generalize_structure var_level t\n          else generalize_expansive env var_level visited t)\n        variance tyl\n    | Tpackage (_, _, tyl) -> List.iter (generalize_structure var_level) tyl\n    | Tarrow (_, t1, t2, _) ->\n      generalize_structure var_level t1;\n      generalize_expansive env var_level visited t2\n    | _ -> iter_type_expr (generalize_expansive env var_level visited) ty)\n\nlet generalize_expansive env ty =\n  simple_abbrevs := Mnil;\n  try generalize_expansive env !nongen_level (Hashtbl.create 7) ty\n  with Unify ([(_, ty')] as tr) -> raise (Unify ((ty, ty') :: tr))\n\nlet generalize_global ty = generalize_structure !global_level ty\nlet generalize_structure ty = generalize_structure !current_level ty\n\n(* Correct the levels of type [ty]. *)\nlet correct_levels ty = duplicate_type ty\n\n(* Only generalize the type ty0 in ty *)\nlet limited_generalize ty0 ty =\n  let ty0 = repr ty0 in\n\n  let graph = Hashtbl.create 17 in\n  let idx = ref lowest_level in\n  let roots = ref [] in\n\n  let rec inverse pty ty =\n    let ty = repr ty in\n    if ty.level > !current_level || ty.level = generic_level then (\n      decr idx;\n      Hashtbl.add graph !idx (ty, ref pty);\n      if ty.level = generic_level || ty == ty0 then roots := ty :: !roots;\n      set_level ty !idx;\n      iter_type_expr (inverse [ty]) ty)\n    else if ty.level < lowest_level then\n      let _, parents = Hashtbl.find graph ty.level in\n      parents := pty @ !parents\n  and generalize_parents ty =\n    let idx = ty.level in\n    if idx <> generic_level then (\n      set_level ty generic_level;\n      List.iter generalize_parents !(snd (Hashtbl.find graph idx));\n      (* Special case for rows: must generalize the row variable *)\n      match ty.desc with\n      | Tvariant row ->\n        let more = row_more row in\n        let lv = more.level in\n        if (lv < lowest_level || lv > !current_level) && lv <> generic_level\n        then set_level more generic_level\n      | _ -> ())\n  in\n\n  inverse [] ty;\n  if ty0.level < lowest_level then iter_type_expr (inverse []) ty0;\n  List.iter generalize_parents !roots;\n  Hashtbl.iter\n    (fun _ (ty, _) ->\n      if ty.level <> generic_level then set_level ty !current_level)\n    graph\n\n(* Compute statically the free univars of all nodes in a type *)\n(* This avoids doing it repeatedly during instantiation *)\n\ntype inv_type_expr = {\n  inv_type: type_expr;\n  mutable inv_parents: inv_type_expr list;\n}\n\nlet rec inv_type hash pty ty =\n  let ty = repr ty in\n  try\n    let inv = TypeHash.find hash ty in\n    inv.inv_parents <- pty @ inv.inv_parents\n  with Not_found ->\n    let inv = {inv_type = ty; inv_parents = pty} in\n    TypeHash.add hash ty inv;\n    iter_type_expr (inv_type hash [inv]) ty\n\nlet compute_univars ty =\n  let inverted = TypeHash.create 17 in\n  inv_type inverted [] ty;\n  let node_univars = TypeHash.create 17 in\n  let rec add_univar univ inv =\n    match inv.inv_type.desc with\n    | Tpoly (_ty, tl) when List.memq univ (List.map repr tl) -> ()\n    | _ -> (\n      try\n        let univs = TypeHash.find node_univars inv.inv_type in\n        if not (TypeSet.mem univ !univs) then (\n          univs := TypeSet.add univ !univs;\n          List.iter (add_univar univ) inv.inv_parents)\n      with Not_found ->\n        TypeHash.add node_univars inv.inv_type (ref (TypeSet.singleton univ));\n        List.iter (add_univar univ) inv.inv_parents)\n  in\n  TypeHash.iter (fun ty inv -> if is_Tunivar ty then add_univar ty inv) inverted;\n  fun ty ->\n    try !(TypeHash.find node_univars ty) with Not_found -> TypeSet.empty\n\n(*******************)\n(*  Instantiation  *)\n(*******************)\n\nlet rec find_repr p1 = function\n  | Mnil -> None\n  | Mcons (Public, p2, ty, _, _) when Path.same p1 p2 -> Some ty\n  | Mcons (_, _, _, _, rem) -> find_repr p1 rem\n  | Mlink {contents = rem} -> find_repr p1 rem\n\n(*\n   Generic nodes are duplicated, while non-generic nodes are left\n   as-is.\n   During instantiation, the description of a generic node is first\n   replaced by a link to a stub ([Tsubst (newvar ())]). Once the\n   copy is made, it replaces the stub.\n   After instantiation, the description of generic node, which was\n   stored by [save_desc], must be put back, using [cleanup_types].\n*)\n\nlet abbreviations = ref (ref Mnil)\n(* Abbreviation memorized. *)\n\n(* partial: we may not wish to copy the non generic types\n   before we call type_pat *)\nlet rec copy ?env ?partial ?keep_names ty =\n  let copy = copy ?env ?partial ?keep_names in\n  let ty = repr ty in\n  match ty.desc with\n  | Tsubst ty -> ty\n  | _ ->\n    if ty.level <> generic_level && partial = None then ty\n    else\n      (* We only forget types that are non generic and do not contain\n         free univars *)\n      let forget =\n        if ty.level = generic_level then generic_level\n        else\n          match partial with\n          | None -> assert false\n          | Some (free_univars, keep) ->\n            if TypeSet.is_empty (free_univars ty) then\n              if keep then ty.level else !current_level\n            else generic_level\n      in\n      if forget <> generic_level then newty2 forget (Tvar None)\n      else\n        let desc = ty.desc in\n        save_desc ty desc;\n        let t = newvar () in\n        (* Stub *)\n        (match env with\n        | Some env when Env.has_local_constraints env -> (\n          match Env.gadt_instance_level env ty with\n          | Some lv -> Env.add_gadt_instances env lv [t]\n          | None -> ())\n        | _ -> ());\n        ty.desc <- Tsubst t;\n        t.desc <-\n          (match desc with\n          | Tconstr (p, tl, _) -> (\n            let abbrevs = proper_abbrevs p tl !abbreviations in\n            match find_repr p !abbrevs with\n            | Some ty when repr ty != t -> Tlink ty\n            | _ ->\n              (*\n             One must allocate a new reference, so that abbrevia-\n             tions belonging to different branches of a type are\n             independent.\n             Moreover, a reference containing a [Mcons] must be\n             shared, so that the memorized expansion of an abbrevi-\n             ation can be released by changing the content of just\n             one reference.\n          *)\n              Tconstr\n                ( p,\n                  List.map copy tl,\n                  ref\n                    (match !(!abbreviations) with\n                    | Mcons _ -> Mlink !abbreviations\n                    | abbrev -> abbrev) ))\n          | Tvariant row0 -> (\n            let row = row_repr row0 in\n            let more = repr row.row_more in\n            (* We must substitute in a subtle way *)\n            (* Tsubst takes a tuple containing the row var and the variant *)\n            match more.desc with\n            | Tsubst {desc = Ttuple [_; ty2]} ->\n              (* This variant type has been already copied *)\n              ty.desc <- Tsubst ty2;\n              (* avoid Tlink in the new type *)\n              Tlink ty2\n            | _ ->\n              (* If the row variable is not generic, we must keep it *)\n              let keep = more.level <> generic_level in\n              let more' =\n                match more.desc with\n                | Tsubst ty -> ty\n                | Tconstr _ | Tnil ->\n                  if keep then save_desc more more.desc;\n                  copy more\n                | Tvar _ | Tunivar _ ->\n                  save_desc more more.desc;\n                  if keep then more else newty more.desc\n                | _ -> assert false\n              in\n              let row =\n                match repr more' with\n                (* PR#6163 *)\n                | {desc = Tconstr _} when not row.row_fixed ->\n                  {row with row_fixed = true}\n                | _ -> row\n              in\n              (* Open row if partial for pattern and contains Reither *)\n              let more', row =\n                match partial with\n                | Some (free_univars, false) ->\n                  let more' =\n                    if more.id != more'.id then more'\n                    else\n                      let lv = if keep then more.level else !current_level in\n                      newty2 lv (Tvar None)\n                  in\n                  let not_reither (_, f) =\n                    match row_field_repr f with\n                    | Reither _ -> false\n                    | _ -> true\n                  in\n                  if\n                    row.row_closed && (not row.row_fixed)\n                    && TypeSet.is_empty (free_univars ty)\n                    && not (List.for_all not_reither row.row_fields)\n                  then\n                    ( more',\n                      {\n                        row_fields = Ext_list.filter row.row_fields not_reither;\n                        row_more = more';\n                        row_bound = ();\n                        row_closed = false;\n                        row_fixed = false;\n                        row_name = None;\n                      } )\n                  else (more', row)\n                | _ -> (more', row)\n              in\n              (* Register new type first for recursion *)\n              more.desc <- Tsubst (newgenty (Ttuple [more'; t]));\n              (* Return a new copy *)\n              Tvariant (copy_row copy true row keep more'))\n          | Tfield (_p, k, _ty1, ty2) -> (\n            match field_kind_repr k with\n            | Fabsent -> Tlink (copy ty2)\n            | Fpresent -> copy_type_desc copy desc\n            | Fvar r ->\n              dup_kind r;\n              copy_type_desc copy desc)\n          | Tobject (ty1, _) when partial <> None -> Tobject (copy ty1, ref None)\n          | _ -> copy_type_desc ?keep_names copy desc);\n        t\n\nlet simple_copy t = copy t\n\n(**** Variants of instantiations ****)\n\nlet gadt_env env = if Env.has_local_constraints env then Some env else None\n\nlet instance ?partial env sch =\n  let env = gadt_env env in\n  let partial =\n    match partial with\n    | None -> None\n    | Some keep -> Some (compute_univars sch, keep)\n  in\n  let ty = copy ?env ?partial sch in\n  cleanup_types ();\n  ty\n\nlet instance_def sch =\n  let ty = copy sch in\n  cleanup_types ();\n  ty\n\nlet generic_instance env sch =\n  let old = !current_level in\n  current_level := generic_level;\n  let ty = instance env sch in\n  current_level := old;\n  ty\n\nlet instance_list env schl =\n  let env = gadt_env env in\n  let tyl = List.map (fun t -> copy ?env t) schl in\n  cleanup_types ();\n  tyl\n\nlet reified_var_counter = ref Vars.empty\nlet reset_reified_var_counter () = reified_var_counter := Vars.empty\n\n(* names given to new type constructors.\n   Used for existential types and\n   local constraints *)\nlet get_new_abstract_name s =\n  let index = try Vars.find s !reified_var_counter + 1 with Not_found -> 0 in\n  reified_var_counter := Vars.add s index !reified_var_counter;\n  if index = 0 && s <> \"\" && s.[String.length s - 1] <> '$' then s\n  else Printf.sprintf \"%s%d\" s index\n\nlet new_declaration newtype manifest =\n  {\n    type_params = [];\n    type_arity = 0;\n    type_kind = Type_abstract;\n    type_private = Public;\n    type_manifest = manifest;\n    type_variance = [];\n    type_newtype_level = newtype;\n    type_loc = Location.none;\n    type_attributes = [];\n    type_immediate = false;\n    type_unboxed = unboxed_false_default_false;\n  }\n\nlet instance_constructor ?in_pattern cstr =\n  (match in_pattern with\n  | None -> ()\n  | Some (env, newtype_lev) ->\n    let process existential =\n      let decl = new_declaration (Some (newtype_lev, newtype_lev)) None in\n      let name =\n        match repr existential with\n        | {desc = Tvar (Some name)} -> \"$\" ^ cstr.cstr_name ^ \"_'\" ^ name\n        | _ -> \"$\" ^ cstr.cstr_name\n      in\n      let path = Path.Pident (Ident.create (get_new_abstract_name name)) in\n      let new_env = Env.add_local_type path decl !env in\n      env := new_env;\n      let to_unify = newty (Tconstr (path, [], ref Mnil)) in\n      let tv = copy existential in\n      assert (is_Tvar tv);\n      link_type tv to_unify\n    in\n    List.iter process cstr.cstr_existentials);\n  let ty_res = copy cstr.cstr_res in\n  let ty_args = List.map simple_copy cstr.cstr_args in\n  cleanup_types ();\n  (ty_args, ty_res)\n\nlet instance_parameterized_type ?keep_names sch_args sch =\n  let ty_args = List.map (fun t -> copy ?keep_names t) sch_args in\n  let ty = copy sch in\n  cleanup_types ();\n  (ty_args, ty)\n\nlet instance_parameterized_type_2 sch_args sch_lst sch =\n  let ty_args = List.map simple_copy sch_args in\n  let ty_lst = List.map simple_copy sch_lst in\n  let ty = copy sch in\n  cleanup_types ();\n  (ty_args, ty_lst, ty)\n\nlet map_kind f = function\n  | Type_abstract -> Type_abstract\n  | Type_open -> Type_open\n  | Type_variant cl ->\n    Type_variant\n      (List.map\n         (fun c ->\n           {\n             c with\n             cd_args = map_type_expr_cstr_args f c.cd_args;\n             cd_res = may_map f c.cd_res;\n           })\n         cl)\n  | Type_record (fl, rr) ->\n    Type_record (List.map (fun l -> {l with ld_type = f l.ld_type}) fl, rr)\n\nlet instance_declaration decl =\n  let decl =\n    {\n      decl with\n      type_params = List.map simple_copy decl.type_params;\n      type_manifest = may_map simple_copy decl.type_manifest;\n      type_kind = map_kind simple_copy decl.type_kind;\n    }\n  in\n  cleanup_types ();\n  decl\n\nlet instance_class params cty =\n  let rec copy_class_type = function\n    | Cty_constr (path, tyl, cty) ->\n      Cty_constr (path, List.map simple_copy tyl, copy_class_type cty)\n    | Cty_signature sign ->\n      Cty_signature\n        {\n          csig_self = copy sign.csig_self;\n          csig_vars =\n            Vars.map\n              (function\n                | m, v, ty -> (m, v, copy ty))\n              sign.csig_vars;\n          csig_concr = sign.csig_concr;\n          csig_inher =\n            List.map\n              (fun (p, tl) -> (p, List.map simple_copy tl))\n              sign.csig_inher;\n        }\n    | Cty_arrow (l, ty, cty) -> Cty_arrow (l, copy ty, copy_class_type cty)\n  in\n  let params' = List.map simple_copy params in\n  let cty' = copy_class_type cty in\n  cleanup_types ();\n  (params', cty')\n\n(**** Instantiation for types with free universal variables ****)\n\nlet rec diff_list l1 l2 =\n  if l1 == l2 then []\n  else\n    match l1 with\n    | [] -> invalid_arg \"Ctype.diff_list\"\n    | a :: l1 -> a :: diff_list l1 l2\n\nlet conflicts free bound =\n  let bound = List.map repr bound in\n  TypeSet.exists (fun t -> List.memq (repr t) bound) free\n\nlet delayed_copy = ref []\n(* copying to do later *)\n\n(* Copy without sharing until there are no free univars left *)\n(* all free univars must be included in [visited]            *)\nlet rec copy_sep fixed free bound visited ty =\n  let ty = repr ty in\n  let univars = free ty in\n  if TypeSet.is_empty univars then (\n    if ty.level <> generic_level then ty\n    else\n      let t = newvar () in\n      delayed_copy := lazy (t.desc <- Tlink (copy ty)) :: !delayed_copy;\n      t)\n  else\n    try\n      let t, bound_t = List.assq ty visited in\n      let dl = if is_Tunivar ty then [] else diff_list bound bound_t in\n      if dl <> [] && conflicts univars dl then raise Not_found;\n      t\n    with Not_found ->\n      let t = newvar () in\n      (* Stub *)\n      let visited =\n        match ty.desc with\n        | Tarrow _ | Ttuple _ | Tvariant _ | Tconstr _ | Tobject _ | Tpackage _\n          ->\n          (ty, (t, bound)) :: visited\n        | _ -> visited\n      in\n      let copy_rec = copy_sep fixed free bound visited in\n      t.desc <-\n        (match ty.desc with\n        | Tvariant row0 ->\n          let row = row_repr row0 in\n          let more = repr row.row_more in\n          (* We shall really check the level on the row variable *)\n          let keep = is_Tvar more && more.level <> generic_level in\n          let more' = copy_rec more in\n          let fixed' = fixed && is_Tvar (repr more') in\n          let row = copy_row copy_rec fixed' row keep more' in\n          Tvariant row\n        | Tpoly (t1, tl) ->\n          let tl = List.map repr tl in\n          let tl' = List.map (fun t -> newty t.desc) tl in\n          let bound = tl @ bound in\n          let visited =\n            List.map2 (fun ty t -> (ty, (t, bound))) tl tl' @ visited\n          in\n          Tpoly (copy_sep fixed free bound visited t1, tl')\n        | _ -> copy_type_desc copy_rec ty.desc);\n      t\n\nlet instance_poly ?(keep_names = false) fixed univars sch =\n  let univars = List.map repr univars in\n  let copy_var ty =\n    match ty.desc with\n    | Tunivar name -> if keep_names then newty (Tvar name) else newvar ()\n    | _ -> assert false\n  in\n  let vars = List.map copy_var univars in\n  let pairs = List.map2 (fun u v -> (u, (v, []))) univars vars in\n  delayed_copy := [];\n  let ty = copy_sep fixed (compute_univars sch) [] pairs sch in\n  List.iter Lazy.force !delayed_copy;\n  delayed_copy := [];\n  cleanup_types ();\n  (vars, ty)\n\nlet instance_label fixed lbl =\n  let ty_res = copy lbl.lbl_res in\n  let vars, ty_arg =\n    match repr lbl.lbl_arg with\n    | {desc = Tpoly (ty, tl)} -> instance_poly fixed tl ty\n    | _ -> ([], copy lbl.lbl_arg)\n  in\n  cleanup_types ();\n  (vars, ty_arg, ty_res)\n\n(**** Instantiation with parameter substitution ****)\n\nlet unify' =\n  (* Forward declaration *)\n  ref (fun _env _ty1 _ty2 -> raise (Unify []))\n\nlet subst env level priv abbrev ty params args body =\n  if List.length params <> List.length args then raise (Unify []);\n  let old_level = !current_level in\n  current_level := level;\n  try\n    let body0 = newvar () in\n    (* Stub *)\n    (match ty with\n    | None -> ()\n    | Some ({desc = Tconstr (path, tl, _)} as ty) ->\n      let abbrev = proper_abbrevs path tl abbrev in\n      memorize_abbrev abbrev priv path ty body0\n    | _ -> assert false);\n    abbreviations := abbrev;\n    let params', body' = instance_parameterized_type params body in\n    abbreviations := ref Mnil;\n    !unify' env body0 body';\n    List.iter2 (!unify' env) params' args;\n    current_level := old_level;\n    body'\n  with Unify _ as exn ->\n    current_level := old_level;\n    raise exn\n\n(*\n   Only the shape of the type matters, not whether it is generic or\n   not. [generic_level] might be somewhat slower, but it ensures\n   invariants on types are enforced (decreasing levels), and we don't\n   care about efficiency here.\n*)\nlet apply env params body args =\n  try subst env generic_level Public (ref Mnil) None params args body\n  with Unify _ -> raise Cannot_apply\n\nlet () = Subst.ctype_apply_env_empty := apply Env.empty\n\n(****************************)\n(*  Abbreviation expansion  *)\n(****************************)\n\n(*\n   If the environment has changed, memorized expansions might not\n   be correct anymore, and so we flush the cache. This is safe but\n   quite pessimistic: it would be enough to flush the cache when a\n   type or module definition is overridden in the environment.\n*)\nlet previous_env = ref Env.empty\n\n(*let string_of_kind = function Public -> \"public\" | Private -> \"private\"*)\nlet check_abbrev_env env =\n  if env != !previous_env then (\n    (* prerr_endline \"cleanup expansion cache\"; *)\n    cleanup_abbrev ();\n    previous_env := env)\n\n(* Expand an abbreviation. The expansion is memorized. *)\n(*\n   Assume the level is greater than the path binding time of the\n   expanded abbreviation.\n*)\n(*\n   An abbreviation expansion will fail in either of these cases:\n   1. The type constructor does not correspond to a manifest type.\n   2. The type constructor is defined in an external file, and this\n      file is not in the path (missing -I options).\n   3. The type constructor is not in the \"local\" environment. This can\n      happens when a non-generic type variable has been instantiated\n      afterwards to the not yet defined type constructor. (Actually,\n      this cannot happen at the moment due to the strong constraints\n      between type levels and constructor binding time.)\n   4. The expansion requires the expansion of another abbreviation,\n      and this other expansion fails.\n*)\nlet expand_abbrev_gen kind find_type_expansion env ty =\n  check_abbrev_env env;\n  match ty with\n  | {desc = Tconstr (path, args, abbrev); level} -> (\n    let lookup_abbrev = proper_abbrevs path args abbrev in\n    match find_expans kind path !lookup_abbrev with\n    | Some ty' ->\n      (* prerr_endline\n         (\"found a \"^string_of_kind kind^\" expansion for \"^Path.name path);*)\n      (if level <> generic_level then\n         try update_level env level ty'\n         with Unify _ ->\n           (* XXX This should not happen.\n              However, levels are not correctly restored after a\n              typing error *)\n           ());\n      let ty' = repr ty' in\n      (* assert (ty != ty'); *)\n      (* PR#7324 *)\n      ty'\n    | None -> (\n      match find_type_expansion path env with\n      | exception Not_found ->\n        (* another way to expand is to normalize the path itself *)\n        let path' = Env.normalize_path None env path in\n        if Path.same path path' then raise Cannot_expand\n        else newty2 level (Tconstr (path', args, abbrev))\n      | params, body, lv ->\n        (* prerr_endline\n           (\"add a \"^string_of_kind kind^\" expansion for \"^Path.name path);*)\n        let ty' = subst env level kind abbrev (Some ty) params args body in\n        (* For gadts, remember type as non exportable *)\n        (* The ambiguous level registered for ty' should be the highest *)\n        (if !trace_gadt_instances then\n           match\n             Ext_pervasives.max_int_option lv (Env.gadt_instance_level env ty)\n           with\n           | None -> ()\n           | Some lv ->\n             if level < lv then raise (Unify [(ty, newvar2 level)]);\n             Env.add_gadt_instances env lv [ty; ty']);\n        ty'))\n  | _ -> assert false\n\n(* Expand respecting privacy *)\nlet expand_abbrev env ty =\n  expand_abbrev_gen Public Env.find_type_expansion env ty\n\n(* Expand once the head of a type *)\nlet expand_head_once env ty =\n  try expand_abbrev env (repr ty) with Cannot_expand -> assert false\n\n(* Check whether a type can be expanded *)\nlet safe_abbrev env ty =\n  let snap = Btype.snapshot () in\n  try\n    ignore (expand_abbrev env ty);\n    true\n  with Cannot_expand | Unify _ ->\n    Btype.backtrack snap;\n    false\n\n(* Expand the head of a type once.\n   Raise Cannot_expand if the type cannot be expanded.\n   May raise Unify, if a recursion was hidden in the type. *)\nlet try_expand_once env ty =\n  let ty = repr ty in\n  match ty.desc with\n  | Tconstr _ -> repr (expand_abbrev env ty)\n  | _ -> raise Cannot_expand\n\n(* This one only raises Cannot_expand *)\nlet try_expand_safe env ty =\n  let snap = Btype.snapshot () in\n  try try_expand_once env ty\n  with Unify _ ->\n    Btype.backtrack snap;\n    raise Cannot_expand\n\n(* Fully expand the head of a type. *)\nlet rec try_expand_head try_once env ty =\n  let ty' = try_once env ty in\n  try try_expand_head try_once env ty' with Cannot_expand -> ty'\n\nlet try_expand_head try_once env ty =\n  let ty' = try_expand_head try_once env ty in\n  (match Env.gadt_instance_level env ty' with\n  | None -> ()\n  | Some lv -> Env.add_gadt_instance_chain env lv ty);\n  ty'\n\n(* Unsafe full expansion, may raise Unify. *)\nlet expand_head_unif env ty =\n  try try_expand_head try_expand_once env ty with Cannot_expand -> repr ty\n\n(* Safe version of expand_head, never fails *)\nlet expand_head env ty =\n  try try_expand_head try_expand_safe env ty with Cannot_expand -> repr ty\n\nlet _ = forward_try_expand_once := try_expand_safe\n\n(* Expand until we find a non-abstract type declaration *)\n\nlet rec extract_concrete_typedecl env ty =\n  let ty = repr ty in\n  match ty.desc with\n  | Tconstr (p, _, _) ->\n    let decl = Env.find_type p env in\n    if decl.type_kind <> Type_abstract then (p, p, decl)\n    else\n      let ty =\n        try try_expand_once env ty with Cannot_expand -> raise Not_found\n      in\n      let _, p', decl = extract_concrete_typedecl env ty in\n      (p, p', decl)\n  | _ -> raise Not_found\n\n(* Implementing function [expand_head_opt], the compiler's own version of\n   [expand_head] used for type-based optimisations.\n   [expand_head_opt] uses [Env.find_type_expansion_opt] to access the\n   manifest type information of private abstract data types which is\n   normally hidden to the type-checker out of the implementation module of\n   the private abbreviation. *)\n\nlet expand_abbrev_opt = expand_abbrev_gen Private Env.find_type_expansion_opt\n\nlet try_expand_once_opt env ty =\n  let ty = repr ty in\n  match ty.desc with\n  | Tconstr _ -> repr (expand_abbrev_opt env ty)\n  | _ -> raise Cannot_expand\n\nlet rec try_expand_head_opt env ty =\n  let ty' = try_expand_once_opt env ty in\n  try try_expand_head_opt env ty' with Cannot_expand -> ty'\n\nlet expand_head_opt env ty =\n  let snap = Btype.snapshot () in\n  try try_expand_head_opt env ty\n  with Cannot_expand | Unify _ ->\n    (* expand_head shall never fail *)\n    Btype.backtrack snap;\n    repr ty\n\n(* Make sure that the type parameters of the type constructor [ty]\n   respect the type constraints *)\nlet enforce_constraints env ty =\n  match ty with\n  | {desc = Tconstr (path, args, _abbrev); level} -> (\n    try\n      let decl = Env.find_type path env in\n      ignore\n        (subst env level Public (ref Mnil) None decl.type_params args\n           (newvar2 level))\n    with Not_found -> ())\n  | _ -> assert false\n\n(* Recursively expand the head of a type.\n   Also expand #-types. *)\nlet full_expand env ty =\n  let ty = repr (expand_head env ty) in\n  match ty.desc with\n  | Tobject (fi, {contents = Some (_, v :: _)}) when is_Tvar (repr v) ->\n    newty2 ty.level (Tobject (fi, ref None))\n  | _ -> ty\n\n(*\n   Check whether the abbreviation expands to a well-defined type.\n   During the typing of a class, abbreviations for correspondings\n   types expand to non-generic types.\n*)\nlet generic_abbrev env path =\n  try\n    let _, body, _ = Env.find_type_expansion path env in\n    (repr body).level = generic_level\n  with Not_found -> false\n\nlet generic_private_abbrev env path =\n  try\n    match Env.find_type path env with\n    | {\n     type_kind = Type_abstract;\n     type_private = Private;\n     type_manifest = Some body;\n    } ->\n      (repr body).level = generic_level\n    | _ -> false\n  with Not_found -> false\n\nlet is_contractive env p =\n  try\n    let decl = Env.find_type p env in\n    (in_pervasives p && decl.type_manifest = None) || is_datatype decl\n  with Not_found -> false\n\n(*****************)\n(*  Occur check  *)\n(*****************)\n\nexception Occur\n\nlet rec occur_rec env allow_recursive visited ty0 = function\n  | {desc = Tlink ty} -> occur_rec env allow_recursive visited ty0 ty\n  | ty -> (\n    if ty == ty0 then raise Occur;\n    match ty.desc with\n    | Tconstr (p, _tl, _abbrev) -> (\n      if allow_recursive && is_contractive env p then ()\n      else\n        try\n          if TypeSet.mem ty visited then raise Occur;\n          let visited = TypeSet.add ty visited in\n          iter_type_expr (occur_rec env allow_recursive visited ty0) ty\n        with Occur -> (\n          try\n            let ty' = try_expand_head try_expand_once env ty in\n            (* This call used to be inlined, but there seems no reason for it.\n               Message was referring to change in rev. 1.58 of the CVS repo. *)\n            occur_rec env allow_recursive visited ty0 ty'\n          with Cannot_expand -> raise Occur))\n    | Tobject _ | Tvariant _ -> ()\n    | _ ->\n      if allow_recursive || TypeSet.mem ty visited then ()\n      else\n        let visited = TypeSet.add ty visited in\n        iter_type_expr (occur_rec env allow_recursive visited ty0) ty)\n\nlet type_changed = ref false (* trace possible changes to the studied type *)\n\nlet merge r b = if b then r := true\n\nlet occur env ty0 ty =\n  let allow_recursive = (*!Clflags.recursive_types ||*) !umode = Pattern in\n  let old = !type_changed in\n  try\n    while\n      type_changed := false;\n      occur_rec env allow_recursive TypeSet.empty ty0 ty;\n      !type_changed\n    do\n      () (* prerr_endline \"changed\" *)\n    done;\n    merge type_changed old\n  with exn ->\n    merge type_changed old;\n    raise\n      (match exn with\n      | Occur -> Unify []\n      | _ -> exn)\n\nlet occur_in env ty0 t =\n  try\n    occur env ty0 t;\n    false\n  with Unify _ -> true\n\n(* Check that a local constraint is well-founded *)\n(* PR#6405: not needed since we allow recursion and work on normalized types *)\n(* PR#6992: we actually need it for contractiveness *)\n(* This is a simplified version of occur, only for the rectypes case *)\n\nlet rec local_non_recursive_abbrev strict visited env p ty =\n  (*Format.eprintf \"@[Check %s =@ %a@]@.\" (Path.name p) !Btype.print_raw ty;*)\n  let ty = repr ty in\n  if not (List.memq ty visited) then\n    match ty.desc with\n    | Tconstr (p', args, _abbrev) -> (\n      if Path.same p p' then raise Occur;\n      if (not strict) && is_contractive env p' then ()\n      else\n        let visited = ty :: visited in\n        try\n          (* try expanding, since [p] could be hidden *)\n          local_non_recursive_abbrev strict visited env p\n            (try_expand_head try_expand_once env ty)\n        with Cannot_expand ->\n          let params =\n            try (Env.find_type p' env).type_params with Not_found -> args\n          in\n          List.iter2\n            (fun tv ty ->\n              let strict = strict || not (is_Tvar (repr tv)) in\n              local_non_recursive_abbrev strict visited env p ty)\n            params args)\n    | _ ->\n      if strict then\n        (* PR#7374 *)\n        let visited = ty :: visited in\n        iter_type_expr (local_non_recursive_abbrev true visited env p) ty\n\nlet local_non_recursive_abbrev env p ty =\n  try\n    (* PR#7397: need to check trace_gadt_instances *)\n    wrap_trace_gadt_instances env (local_non_recursive_abbrev false [] env p) ty;\n    true\n  with Occur -> false\n\n(*****************************)\n(*  Polymorphic Unification  *)\n(*****************************)\n\n(* Since we cannot duplicate universal variables, unification must\n   be done at meta-level, using bindings in univar_pairs *)\nlet rec unify_univar t1 t2 = function\n  | (cl1, cl2) :: rem -> (\n    let find_univ t cl =\n      try\n        let _, r = List.find (fun (t', _) -> t == repr t') cl in\n        Some r\n      with Not_found -> None\n    in\n    match (find_univ t1 cl1, find_univ t2 cl2) with\n    | Some {contents = Some t'2}, Some _ when t2 == repr t'2 -> ()\n    | Some ({contents = None} as r1), Some ({contents = None} as r2) ->\n      set_univar r1 t2;\n      set_univar r2 t1\n    | None, None -> unify_univar t1 t2 rem\n    | _ -> raise (Unify []))\n  | [] -> raise (Unify [])\n\n(* Test the occurrence of free univars in a type *)\n(* that's way too expensive. Must do some kind of caching *)\nlet occur_univar env ty =\n  let visited = ref TypeMap.empty in\n  let rec occur_rec bound ty =\n    let ty = repr ty in\n    if\n      ty.level >= lowest_level\n      &&\n      if TypeSet.is_empty bound then (\n        ty.level <- pivot_level - ty.level;\n        true)\n      else\n        try\n          let bound' = TypeMap.find ty !visited in\n          if TypeSet.exists (fun x -> not (TypeSet.mem x bound)) bound' then (\n            visited := TypeMap.add ty (TypeSet.inter bound bound') !visited;\n            true)\n          else false\n        with Not_found ->\n          visited := TypeMap.add ty bound !visited;\n          true\n    then\n      match ty.desc with\n      | Tunivar _ ->\n        if not (TypeSet.mem ty bound) then raise (Unify [(ty, newgenvar ())])\n      | Tpoly (ty, tyl) ->\n        let bound = List.fold_right TypeSet.add (List.map repr tyl) bound in\n        occur_rec bound ty\n      | Tconstr (_, [], _) -> ()\n      | Tconstr (p, tl, _) -> (\n        try\n          let td = Env.find_type p env in\n          List.iter2\n            (fun t v ->\n              if Variance.(mem May_pos v || mem May_neg v) then\n                occur_rec bound t)\n            tl td.type_variance\n        with Not_found -> List.iter (occur_rec bound) tl)\n      | _ -> iter_type_expr (occur_rec bound) ty\n  in\n  try\n    occur_rec TypeSet.empty ty;\n    unmark_type ty\n  with exn ->\n    unmark_type ty;\n    raise exn\n\n(* Grouping univars by families according to their binders *)\nlet add_univars = List.fold_left (fun s (t, _) -> TypeSet.add (repr t) s)\n\nlet get_univar_family univar_pairs univars =\n  if univars = [] then TypeSet.empty\n  else\n    let insert s = function\n      | cl1, (_ :: _ as cl2) ->\n        if List.exists (fun (t1, _) -> TypeSet.mem (repr t1) s) cl1 then\n          add_univars s cl2\n        else s\n      | _ -> s\n    in\n    let s = List.fold_right TypeSet.add univars TypeSet.empty in\n    List.fold_left insert s univar_pairs\n\n(* Whether a family of univars escapes from a type *)\nlet univars_escape env univar_pairs vl ty =\n  let family = get_univar_family univar_pairs vl in\n  let visited = ref TypeSet.empty in\n  let rec occur t =\n    let t = repr t in\n    if TypeSet.mem t !visited then ()\n    else (\n      visited := TypeSet.add t !visited;\n      match t.desc with\n      | Tpoly (t, tl) ->\n        if List.exists (fun t -> TypeSet.mem (repr t) family) tl then ()\n        else occur t\n      | Tunivar _ -> if TypeSet.mem t family then raise Occur\n      | Tconstr (_, [], _) -> ()\n      | Tconstr (p, tl, _) -> (\n        try\n          let td = Env.find_type p env in\n          List.iter2\n            (fun t v ->\n              if Variance.(mem May_pos v || mem May_neg v) then occur t)\n            tl td.type_variance\n        with Not_found -> List.iter occur tl)\n      | _ -> iter_type_expr occur t)\n  in\n  try\n    occur ty;\n    false\n  with Occur -> true\n\n(* Wrapper checking that no variable escapes and updating univar_pairs *)\nlet enter_poly env univar_pairs t1 tl1 t2 tl2 f =\n  let old_univars = !univar_pairs in\n  let known_univars =\n    List.fold_left (fun s (cl, _) -> add_univars s cl) TypeSet.empty old_univars\n  in\n  let tl1 = List.map repr tl1 and tl2 = List.map repr tl2 in\n  if\n    List.exists (fun t -> TypeSet.mem t known_univars) tl1\n    && univars_escape env old_univars tl1 (newty (Tpoly (t2, tl2)))\n    || List.exists (fun t -> TypeSet.mem t known_univars) tl2\n       && univars_escape env old_univars tl2 (newty (Tpoly (t1, tl1)))\n  then raise (Unify []);\n  let cl1 = List.map (fun t -> (t, ref None)) tl1\n  and cl2 = List.map (fun t -> (t, ref None)) tl2 in\n  univar_pairs := (cl1, cl2) :: (cl2, cl1) :: old_univars;\n  try\n    let res = f t1 t2 in\n    univar_pairs := old_univars;\n    res\n  with exn ->\n    univar_pairs := old_univars;\n    raise exn\n\nlet univar_pairs = ref []\n\n(*****************)\n(*  Unification  *)\n(*****************)\n\nlet rec has_cached_expansion p abbrev =\n  match abbrev with\n  | Mnil -> false\n  | Mcons (_, p', _, _, rem) -> Path.same p p' || has_cached_expansion p rem\n  | Mlink rem -> has_cached_expansion p !rem\n\n(**** Transform error trace ****)\n(* +++ Move it to some other place ? *)\n\nlet expand_trace env trace =\n  List.fold_right\n    (fun (t1, t2) rem ->\n      (repr t1, full_expand env t1) :: (repr t2, full_expand env t2) :: rem)\n    trace []\n\n(* build a dummy variant type *)\nlet mkvariant fields closed =\n  newgenty\n    (Tvariant\n       {\n         row_fields = fields;\n         row_closed = closed;\n         row_more = newvar ();\n         row_bound = ();\n         row_fixed = false;\n         row_name = None;\n       })\n\n(**** Unification ****)\n\n(* Return whether [t0] occurs in [ty]. Objects are also traversed. *)\nlet deep_occur t0 ty =\n  let rec occur_rec ty =\n    let ty = repr ty in\n    if ty.level >= lowest_level then (\n      if ty == t0 then raise Occur;\n      ty.level <- pivot_level - ty.level;\n      iter_type_expr occur_rec ty)\n  in\n  try\n    occur_rec ty;\n    unmark_type ty;\n    false\n  with Occur ->\n    unmark_type ty;\n    true\n\n(*\n   1. When unifying two non-abbreviated types, one type is made a link\n      to the other. When unifying an abbreviated type with a\n      non-abbreviated type, the non-abbreviated type is made a link to\n      the other one. When unifying to abbreviated types, these two\n      types are kept distincts, but they are made to (temporally)\n      expand to the same type.\n   2. Abbreviations with at least one parameter are systematically\n      expanded. The overhead does not seem too high, and that way\n      abbreviations where some parameters does not appear in the\n      expansion, such as ['a t = int], are correctly handled. In\n      particular, for this example, unifying ['a t] with ['b t] keeps\n      ['a] and ['b] distincts. (Is it really important ?)\n   3. Unifying an abbreviation ['a t = 'a] with ['a] should not yield\n      ['a t as 'a]. Indeed, the type variable would otherwise be lost.\n      This problem occurs for abbreviations expanding to a type\n      variable, but also to many other constrained abbreviations (for\n      instance, [(< x : 'a > -> unit) t = <x : 'a>]). The solution is\n      that, if an abbreviation is unified with some subpart of its\n      parameters, then the parameter actually does not get\n      abbreviated.  It would be possible to check whether some\n      information is indeed lost, but it probably does not worth it.\n*)\n\nlet newtype_level = ref None\n\nlet get_newtype_level () =\n  match !newtype_level with\n  | None -> assert false\n  | Some x -> x\n\n(* a local constraint can be added only if the rhs\n   of the constraint does not contain any Tvars.\n   They need to be removed using this function *)\nlet reify env t =\n  let newtype_level = get_newtype_level () in\n  let create_fresh_constr lev name =\n    let decl = new_declaration (Some (newtype_level, newtype_level)) None in\n    let name =\n      match name with\n      | Some s -> \"$'\" ^ s\n      | _ -> \"$\"\n    in\n    let path = Path.Pident (Ident.create (get_new_abstract_name name)) in\n    let new_env = Env.add_local_type path decl !env in\n    let t = newty2 lev (Tconstr (path, [], ref Mnil)) in\n    env := new_env;\n    t\n  in\n  let visited = ref TypeSet.empty in\n  let rec iterator ty =\n    let ty = repr ty in\n    if TypeSet.mem ty !visited then ()\n    else (\n      visited := TypeSet.add ty !visited;\n      match ty.desc with\n      | Tvar o ->\n        let t = create_fresh_constr ty.level o in\n        link_type ty t;\n        if ty.level < newtype_level then raise (Unify [(t, newvar2 ty.level)])\n      | Tvariant r ->\n        let r = row_repr r in\n        (if not (static_row r) then\n           if r.row_fixed then iterator (row_more r)\n           else\n             let m = r.row_more in\n             match m.desc with\n             | Tvar o ->\n               let t = create_fresh_constr m.level o in\n               let row =\n                 {r with row_fields = []; row_fixed = true; row_more = t}\n               in\n               link_type m (newty2 m.level (Tvariant row));\n               if m.level < newtype_level then\n                 raise (Unify [(t, newvar2 m.level)])\n             | _ -> assert false);\n        iter_row iterator r\n      | Tconstr (p, _, _) when is_object_type p ->\n        iter_type_expr iterator (full_expand !env ty)\n      | _ -> iter_type_expr iterator ty)\n  in\n  iterator t\n\nlet is_newtype env p =\n  try\n    let decl = Env.find_type p env in\n    decl.type_newtype_level <> None\n    && decl.type_kind = Type_abstract\n    && decl.type_private = Public\n  with Not_found -> false\n\nlet non_aliasable p decl =\n  (* in_pervasives p ||  (subsumed by in_current_module) *)\n  in_current_module p && decl.type_newtype_level = None\n\nlet is_instantiable env p =\n  try\n    let decl = Env.find_type p env in\n    decl.type_kind = Type_abstract\n    && decl.type_private = Public && decl.type_arity = 0\n    && decl.type_manifest = None\n    && not (non_aliasable p decl)\n  with Not_found -> false\n\n(* PR#7113: -safe-string should be a global property *)\nlet compatible_paths p1 p2 =\n  let open Predef in\n  Path.same p1 p2\n  || (Path.same p1 path_bytes && Path.same p2 path_string)\n  || (Path.same p1 path_string && Path.same p2 path_bytes)\n\n(* Check for datatypes carefully; see PR#6348 *)\nlet rec expands_to_datatype env ty =\n  let ty = repr ty in\n  match ty.desc with\n  | Tconstr (p, _, _) -> (\n    try\n      is_datatype (Env.find_type p env)\n      || expands_to_datatype env (try_expand_once env ty)\n    with Not_found | Cannot_expand -> false)\n  | _ -> false\n\n(* mcomp type_pairs subst env t1 t2 does not raise an\n   exception if it is possible that t1 and t2 are actually\n   equal, assuming the types in type_pairs are equal and\n   that the mapping subst holds.\n   Assumes that both t1 and t2 do not contain any tvars\n   and that both their objects and variants are closed\n*)\n\nlet rec mcomp type_pairs env t1 t2 =\n  if t1 == t2 then ()\n  else\n    let t1 = repr t1 in\n    let t2 = repr t2 in\n    if t1 == t2 then ()\n    else\n      match (t1.desc, t2.desc) with\n      | Tvar _, _ | _, Tvar _ -> ()\n      | Tconstr (p1, [], _), Tconstr (p2, [], _) when Path.same p1 p2 -> ()\n      | _ -> (\n        let t1' = expand_head_opt env t1 in\n        let t2' = expand_head_opt env t2 in\n        (* Expansion may have changed the representative of the types... *)\n        let t1' = repr t1' and t2' = repr t2' in\n        if t1' == t2' then ()\n        else\n          try TypePairs.find type_pairs (t1', t2')\n          with Not_found -> (\n            TypePairs.add type_pairs (t1', t2') ();\n            match (t1'.desc, t2'.desc) with\n            | Tvar _, Tvar _ -> assert false\n            | Tarrow (l1, t1, u1, _), Tarrow (l2, t2, u2, _)\n              when Asttypes.same_arg_label l1 l2\n                   || not (is_optional l1 || is_optional l2) ->\n              mcomp type_pairs env t1 t2;\n              mcomp type_pairs env u1 u2\n            | Ttuple tl1, Ttuple tl2 -> mcomp_list type_pairs env tl1 tl2\n            | Tconstr (p1, tl1, _), Tconstr (p2, tl2, _) ->\n              mcomp_type_decl type_pairs env p1 p2 tl1 tl2\n            | Tconstr (p, _, _), _ | _, Tconstr (p, _, _) -> (\n              try\n                let decl = Env.find_type p env in\n                if non_aliasable p decl || is_datatype decl then\n                  raise (Unify [])\n              with Not_found -> ())\n            (*\n        | (Tpackage (p1, n1, tl1), Tpackage (p2, n2, tl2)) when n1 = n2 ->\n            mcomp_list type_pairs env tl1 tl2\n        *)\n            | Tpackage _, Tpackage _ -> ()\n            | Tvariant row1, Tvariant row2 -> mcomp_row type_pairs env row1 row2\n            | Tobject (fi1, _), Tobject (fi2, _) ->\n              mcomp_fields type_pairs env fi1 fi2\n            | Tfield _, Tfield _ ->\n              (* Actually unused *)\n              mcomp_fields type_pairs env t1' t2'\n            | Tnil, Tnil -> ()\n            | Tpoly (t1, []), Tpoly (t2, []) -> mcomp type_pairs env t1 t2\n            | Tpoly (t1, tl1), Tpoly (t2, tl2) ->\n              enter_poly env univar_pairs t1 tl1 t2 tl2 (mcomp type_pairs env)\n            | Tunivar _, Tunivar _ -> unify_univar t1' t2' !univar_pairs\n            | _, _ -> raise (Unify [])))\n\nand mcomp_list type_pairs env tl1 tl2 =\n  if List.length tl1 <> List.length tl2 then raise (Unify []);\n  List.iter2 (mcomp type_pairs env) tl1 tl2\n\nand mcomp_fields type_pairs env ty1 ty2 =\n  if not (concrete_object ty1 && concrete_object ty2) then assert false;\n  let fields2, rest2 = flatten_fields ty2 in\n  let fields1, rest1 = flatten_fields ty1 in\n  let pairs, miss1, miss2 = associate_fields fields1 fields2 in\n  let has_present =\n    List.exists (fun (_, k, _) -> field_kind_repr k = Fpresent)\n  in\n  mcomp type_pairs env rest1 rest2;\n  if\n    (has_present miss1 && (object_row ty2).desc = Tnil)\n    || (has_present miss2 && (object_row ty1).desc = Tnil)\n  then raise (Unify []);\n  List.iter\n    (function\n      | _n, k1, t1, k2, t2 ->\n        mcomp_kind k1 k2;\n        mcomp type_pairs env t1 t2)\n    pairs\n\nand mcomp_kind k1 k2 =\n  let k1 = field_kind_repr k1 in\n  let k2 = field_kind_repr k2 in\n  match (k1, k2) with\n  | Fpresent, Fabsent | Fabsent, Fpresent -> raise (Unify [])\n  | _ -> ()\n\nand mcomp_row type_pairs env row1 row2 =\n  let row1 = row_repr row1 and row2 = row_repr row2 in\n  let r1, r2, pairs = merge_row_fields row1.row_fields row2.row_fields in\n  let cannot_erase (_, f) =\n    match row_field_repr f with\n    | Rpresent _ -> true\n    | Rabsent | Reither _ -> false\n  in\n  if\n    (row1.row_closed && List.exists cannot_erase r2)\n    || (row2.row_closed && List.exists cannot_erase r1)\n  then raise (Unify []);\n  List.iter\n    (fun (_, f1, f2) ->\n      match (row_field_repr f1, row_field_repr f2) with\n      | Rpresent None, (Rpresent (Some _) | Reither (_, _ :: _, _, _) | Rabsent)\n      | Rpresent (Some _), (Rpresent None | Reither (true, _, _, _) | Rabsent)\n      | (Reither (_, _ :: _, _, _) | Rabsent), Rpresent None\n      | (Reither (true, _, _, _) | Rabsent), Rpresent (Some _) ->\n        raise (Unify [])\n      | Rpresent (Some t1), Rpresent (Some t2) -> mcomp type_pairs env t1 t2\n      | Rpresent (Some t1), Reither (false, tl2, _, _) ->\n        List.iter (mcomp type_pairs env t1) tl2\n      | Reither (false, tl1, _, _), Rpresent (Some t2) ->\n        List.iter (mcomp type_pairs env t2) tl1\n      | _ -> ())\n    pairs\n\nand mcomp_type_decl type_pairs env p1 p2 tl1 tl2 =\n  try\n    let decl = Env.find_type p1 env in\n    let decl' = Env.find_type p2 env in\n    if compatible_paths p1 p2 then\n      let inj =\n        try List.map Variance.(mem Inj) (Env.find_type p1 env).type_variance\n        with Not_found -> List.map (fun _ -> false) tl1\n      in\n      List.iter2\n        (fun i (t1, t2) -> if i then mcomp type_pairs env t1 t2)\n        inj (List.combine tl1 tl2)\n    else if non_aliasable p1 decl && non_aliasable p2 decl' then\n      raise (Unify [])\n    else\n      match (decl.type_kind, decl'.type_kind) with\n      | Type_record (lst, r), Type_record (lst', r')\n        when Types.same_record_representation r r' ->\n        mcomp_list type_pairs env tl1 tl2;\n        mcomp_record_description type_pairs env lst lst'\n      | Type_variant v1, Type_variant v2 ->\n        mcomp_list type_pairs env tl1 tl2;\n        mcomp_variant_description type_pairs env v1 v2\n      | Type_open, Type_open -> mcomp_list type_pairs env tl1 tl2\n      | Type_abstract, Type_abstract -> ()\n      | Type_abstract, _ when not (non_aliasable p1 decl) -> ()\n      | _, Type_abstract when not (non_aliasable p2 decl') -> ()\n      | _ -> raise (Unify [])\n  with Not_found -> ()\n\nand mcomp_type_option type_pairs env t t' =\n  match (t, t') with\n  | None, None -> ()\n  | Some t, Some t' -> mcomp type_pairs env t t'\n  | _ -> raise (Unify [])\n\nand mcomp_variant_description type_pairs env xs ys =\n  let rec iter x y =\n    match (x, y) with\n    | c1 :: xs, c2 :: ys ->\n      mcomp_type_option type_pairs env c1.cd_res c2.cd_res;\n      (match (c1.cd_args, c2.cd_args) with\n      | Cstr_tuple l1, Cstr_tuple l2 -> mcomp_list type_pairs env l1 l2\n      | Cstr_record l1, Cstr_record l2 ->\n        mcomp_record_description type_pairs env l1 l2\n      | _ -> raise (Unify []));\n      if Ident.name c1.cd_id = Ident.name c2.cd_id then iter xs ys\n      else raise (Unify [])\n    | [], [] -> ()\n    | _ -> raise (Unify [])\n  in\n  iter xs ys\n\nand mcomp_record_description type_pairs env =\n  let rec iter x y =\n    match (x, y) with\n    | l1 :: xs, l2 :: ys ->\n      mcomp type_pairs env l1.ld_type l2.ld_type;\n      if\n        Ident.name l1.ld_id = Ident.name l2.ld_id\n        && l1.ld_mutable = l2.ld_mutable\n      then iter xs ys\n      else raise (Unify [])\n    | [], [] -> ()\n    | _ -> raise (Unify [])\n  in\n  iter\n\nlet mcomp env t1 t2 = mcomp (TypePairs.create 4) env t1 t2\n\n(* Real unification *)\n\nlet find_newtype_level env path =\n  try\n    match (Env.find_type path env).type_newtype_level with\n    | Some x -> x\n    | None -> raise Not_found\n  with Not_found ->\n    let lev = Path.binding_time path in\n    (lev, lev)\n\nlet add_gadt_equation env source destination =\n  if local_non_recursive_abbrev !env source destination then (\n    let destination = duplicate_type destination in\n    let source_lev = find_newtype_level !env source in\n    let decl = new_declaration (Some source_lev) (Some destination) in\n    let newtype_level = get_newtype_level () in\n    env := Env.add_local_constraint source decl newtype_level !env;\n    cleanup_abbrev ())\n\nlet unify_eq_set = TypePairs.create 11\n\nlet order_type_pair t1 t2 = if t1.id <= t2.id then (t1, t2) else (t2, t1)\n\nlet add_type_equality t1 t2 =\n  TypePairs.add unify_eq_set (order_type_pair t1 t2) ()\n\nlet eq_package_path env p1 p2 =\n  Path.same p1 p2\n  || Path.same (normalize_package_path env p1) (normalize_package_path env p2)\n\nlet nondep_type' = ref (fun _ _ _ -> assert false)\nlet package_subtype = ref (fun _ _ _ _ _ _ _ -> assert false)\n\nlet rec concat_longident lid1 =\n  let open Longident in\n  function\n  | Lident s -> Ldot (lid1, s)\n  | Ldot (lid2, s) -> Ldot (concat_longident lid1 lid2, s)\n  | Lapply (lid2, lid) -> Lapply (concat_longident lid1 lid2, lid)\n\nlet nondep_instance env level id ty =\n  let ty = !nondep_type' env id ty in\n  if level = generic_level then duplicate_type ty\n  else\n    let old = !current_level in\n    current_level := level;\n    let ty = instance env ty in\n    current_level := old;\n    ty\n\n(* Find the type paths nl1 in the module type mty2, and add them to the\n   list (nl2, tl2). raise Not_found if impossible *)\nlet complete_type_list ?(allow_absent = false) env nl1 lv2 mty2 nl2 tl2 =\n  let id2 = Ident.create \"Pkg\" in\n  let env' = Env.add_module id2 mty2 env in\n  let rec complete nl1 ntl2 =\n    match (nl1, ntl2) with\n    | [], _ -> ntl2\n    | n :: nl, ((n2, _) as nt2) :: ntl' when Longident.cmp n n2 >= 0 ->\n      nt2 :: complete (if Longident.cmp n n2 = 0 then nl else nl1) ntl'\n    | n :: nl, _ -> (\n      try\n        let path =\n          Env.lookup_type (concat_longident (Longident.Lident \"Pkg\") n) env'\n        in\n        match Env.find_type path env' with\n        | {\n         type_arity = 0;\n         type_kind = Type_abstract;\n         type_private = Public;\n         type_manifest = Some t2;\n        } ->\n          (n, nondep_instance env' lv2 id2 t2) :: complete nl ntl2\n        | {\n         type_arity = 0;\n         type_kind = Type_abstract;\n         type_private = Public;\n         type_manifest = None;\n        }\n          when allow_absent ->\n          complete nl ntl2\n        | _ -> raise Exit\n      with\n      | Not_found when allow_absent -> complete nl ntl2\n      | Exit -> raise Not_found)\n  in\n  complete nl1 (List.combine nl2 tl2)\n\n(* raise Not_found rather than Unify if the module types are incompatible *)\nlet unify_package env unify_list lv1 p1 n1 tl1 lv2 p2 n2 tl2 =\n  let ntl2 = complete_type_list env n1 lv2 (Mty_ident p2) n2 tl2\n  and ntl1 = complete_type_list env n2 lv1 (Mty_ident p1) n1 tl1 in\n  unify_list (List.map snd ntl1) (List.map snd ntl2);\n  if\n    eq_package_path env p1 p2\n    || !package_subtype env p1 n1 tl1 p2 n2 tl2\n       && !package_subtype env p2 n2 tl2 p1 n1 tl1\n  then ()\n  else raise Not_found\n\n(* force unification in Reither when one side has a non-conjunctive type *)\nlet rigid_variants = ref false\n\n(* drop not force unification in Reither, even in fixed case\n   (not sound, only use it when checking exhaustiveness) *)\nlet passive_variants = ref false\nlet with_passive_variants f x =\n  if !passive_variants then f x\n  else\n    match\n      passive_variants := true;\n      f x\n    with\n    | r ->\n      passive_variants := false;\n      r\n    | exception e ->\n      passive_variants := false;\n      raise e\n\nlet unify_eq t1 t2 =\n  t1 == t2\n  ||\n  match !umode with\n  | Expression -> false\n  | Pattern -> (\n    try\n      TypePairs.find unify_eq_set (order_type_pair t1 t2);\n      true\n    with Not_found -> false)\n\nlet unify1_var env t1 t2 =\n  assert (is_Tvar t1);\n  occur env t1 t2;\n  occur_univar env t2;\n  let d1 = t1.desc in\n  link_type t1 t2;\n  try update_level env t1.level t2\n  with Unify _ as e ->\n    t1.desc <- d1;\n    raise e\n\nlet rec unify (env : Env.t ref) t1 t2 =\n  (* First step: special cases (optimizations) *)\n  if t1 == t2 then ()\n  else\n    let t1 = repr t1 in\n    let t2 = repr t2 in\n    if unify_eq t1 t2 then ()\n    else\n      let reset_tracing = check_trace_gadt_instances !env in\n\n      try\n        type_changed := true;\n        (match (t1.desc, t2.desc) with\n        | Tvar _, Tconstr _ when deep_occur t1 t2 -> unify2 env t1 t2\n        | Tconstr _, Tvar _ when deep_occur t2 t1 -> unify2 env t1 t2\n        | Tvar _, _ -> unify1_var !env t1 t2\n        | _, Tvar _ -> unify1_var !env t2 t1\n        | Tunivar _, Tunivar _ ->\n          unify_univar t1 t2 !univar_pairs;\n          update_level !env t1.level t2;\n          link_type t1 t2\n        | Tconstr (p1, [], a1), Tconstr (p2, [], a2)\n          when Path.same p1 p2 (* && actual_mode !env = Old *)\n               (* This optimization assumes that t1 does not expand to t2\n                  (and conversely), so we fall back to the general case\n                  when any of the types has a cached expansion. *)\n               && not\n                    (has_cached_expansion p1 !a1 || has_cached_expansion p2 !a2)\n          ->\n          update_level !env t1.level t2;\n          link_type t1 t2\n        | Tconstr (p1, [], _), Tconstr (p2, [], _)\n          when Env.has_local_constraints !env\n               && is_newtype !env p1 && is_newtype !env p2 -> (\n          (* Do not use local constraints more than necessary *)\n          try\n            let[@local] ( < ) ((a : int), (b : int)) (c, d) =\n              a < c || (a = c && b < d)\n            in\n            if find_newtype_level !env p1 < find_newtype_level !env p2 then\n              unify env t1 (try_expand_once !env t2)\n            else unify env (try_expand_once !env t1) t2\n          with Cannot_expand -> unify2 env t1 t2)\n        | _ -> unify2 env t1 t2);\n        reset_trace_gadt_instances reset_tracing\n      with Unify trace ->\n        reset_trace_gadt_instances reset_tracing;\n        raise (Unify ((t1, t2) :: trace))\n\nand unify2 env t1 t2 =\n  (* Second step: expansion of abbreviations *)\n  (* Expansion may change the representative of the types. *)\n  ignore (expand_head_unif !env t1);\n  ignore (expand_head_unif !env t2);\n  let t1' = expand_head_unif !env t1 in\n  let t2' = expand_head_unif !env t2 in\n  let lv = Ext_pervasives.min_int t1'.level t2'.level in\n  update_level !env lv t2;\n  update_level !env lv t1;\n  if unify_eq t1' t2' then ()\n  else\n    let t1 = repr t1 and t2 = repr t2 in\n    (if !trace_gadt_instances then\n       (* All types in chains already have the same ambiguity levels *)\n       let ilevel t =\n         match Env.gadt_instance_level !env t with\n         | None -> 0\n         | Some lv -> lv\n       in\n       let lv1 = ilevel t1 and lv2 = ilevel t2 in\n       if lv1 > lv2 then Env.add_gadt_instance_chain !env lv1 t2\n       else if lv2 > lv1 then Env.add_gadt_instance_chain !env lv2 t1);\n    if unify_eq t1 t1' || not (unify_eq t2 t2') then unify3 env t1 t1' t2 t2'\n    else\n      try unify3 env t2 t2' t1 t1'\n      with Unify trace ->\n        raise (Unify (List.map (fun (x, y) -> (y, x)) trace))\n\nand unify3 env t1 t1' t2 t2' =\n  (* Third step: truly unification *)\n  (* Assumes either [t1 == t1'] or [t2 != t2'] *)\n  let d1 = t1'.desc and d2 = t2'.desc in\n  let create_recursion = t2 != t2' && deep_occur t1' t2 in\n\n  match (d1, d2) with\n  (* handle vars and univars specially *)\n  | Tunivar _, Tunivar _ ->\n    unify_univar t1' t2' !univar_pairs;\n    link_type t1' t2'\n  | Tvar _, _ ->\n    occur !env t1' t2;\n    occur_univar !env t2;\n    link_type t1' t2\n  | _, Tvar _ ->\n    occur !env t2' t1;\n    occur_univar !env t1;\n    link_type t2' t1\n  | Tfield _, Tfield _ ->\n    (* special case for GADTs *)\n    unify_fields env t1' t2'\n  | Tconstr (Pident {name = \"function$\"}, [t_fun; _], _), Tarrow _\n    when !Config.uncurried = Uncurried ->\n    (* subtype: an uncurried function is cast to a curried one *)\n    unify2 env t_fun t2\n  | _ -> (\n    (match !umode with\n    | Expression ->\n      occur !env t1' t2';\n      link_type t1' t2\n    | Pattern -> add_type_equality t1' t2');\n    try\n      (match (d1, d2) with\n      | Tarrow (l1, t1, u1, c1), Tarrow (l2, t2, u2, c2)\n        when Asttypes.same_arg_label l1 l2\n             || (!umode = Pattern && not (is_optional l1 || is_optional l2))\n        -> (\n        unify env t1 t2;\n        unify env u1 u2;\n        match (commu_repr c1, commu_repr c2) with\n        | Clink r, c2 -> set_commu r c2\n        | c1, Clink r -> set_commu r c1\n        | _ -> ())\n      | Ttuple tl1, Ttuple tl2 -> unify_list env tl1 tl2\n      | Tconstr (p1, tl1, _), Tconstr (p2, tl2, _) when Path.same p1 p2 ->\n        if !umode = Expression || not !generate_equations then\n          unify_list env tl1 tl2\n        else if !assume_injective then\n          set_mode_pattern ~generate:true ~injective:false (fun () ->\n              unify_list env tl1 tl2)\n        else if\n          in_current_module p1 (* || in_pervasives p1 *)\n          || List.exists (expands_to_datatype !env) [t1'; t1; t2]\n        then unify_list env tl1 tl2\n        else\n          let inj =\n            try\n              List.map Variance.(mem Inj) (Env.find_type p1 !env).type_variance\n            with Not_found -> List.map (fun _ -> false) tl1\n          in\n          List.iter2\n            (fun i (t1, t2) ->\n              if i then unify env t1 t2\n              else\n                set_mode_pattern ~generate:false ~injective:false (fun () ->\n                    let snap = snapshot () in\n                    try unify env t1 t2\n                    with Unify _ ->\n                      backtrack snap;\n                      reify env t1;\n                      reify env t2))\n            inj (List.combine tl1 tl2)\n      | Tconstr (path, [], _), Tconstr (path', [], _)\n        when is_instantiable !env path && is_instantiable !env path'\n             && !generate_equations ->\n        let[@local] ( > ) ((a : int), (b : int)) (c, d) =\n          a > c || (a = c && b > d)\n        in\n        let source, destination =\n          if find_newtype_level !env path > find_newtype_level !env path' then\n            (path, t2')\n          else (path', t1')\n        in\n        add_gadt_equation env source destination\n      | Tconstr (path, [], _), _\n        when is_instantiable !env path && !generate_equations ->\n        reify env t2';\n        add_gadt_equation env path t2'\n      | _, Tconstr (path, [], _)\n        when is_instantiable !env path && !generate_equations ->\n        reify env t1';\n        add_gadt_equation env path t1'\n      | (Tconstr (_, _, _), _ | _, Tconstr (_, _, _)) when !umode = Pattern ->\n        reify env t1';\n        reify env t2';\n        if !generate_equations then mcomp !env t1' t2'\n      | Tobject (fi1, nm1), Tobject (fi2, _) -> (\n        unify_fields env fi1 fi2;\n        (* Type [t2'] may have been instantiated by [unify_fields] *)\n        (* XXX One should do some kind of unification... *)\n        match (repr t2').desc with\n        | Tobject (_, {contents = Some (_, va :: _)})\n          when match (repr va).desc with\n               | Tvar _ | Tunivar _ | Tnil -> true\n               | _ -> false ->\n          ()\n        | Tobject (_, nm2) -> set_name nm2 !nm1\n        | _ -> ())\n      | Tvariant row1, Tvariant row2 -> (\n        if !umode = Expression then unify_row env row1 row2\n        else\n          let snap = snapshot () in\n          try unify_row env row1 row2\n          with Unify _ ->\n            backtrack snap;\n            reify env t1';\n            reify env t2';\n            if !generate_equations then mcomp !env t1' t2')\n      | Tfield (f, kind, _, rem), Tnil | Tnil, Tfield (f, kind, _, rem) -> (\n        match field_kind_repr kind with\n        | Fvar r when f <> dummy_method ->\n          set_kind r Fabsent;\n          if d2 = Tnil then unify env rem t2'\n          else unify env (newty2 rem.level Tnil) rem\n        | _ -> raise (Unify []))\n      | Tnil, Tnil -> ()\n      | Tpoly (t1, []), Tpoly (t2, []) -> unify env t1 t2\n      | Tpoly (t1, tl1), Tpoly (t2, tl2) ->\n        enter_poly !env univar_pairs t1 tl1 t2 tl2 (unify env)\n      | Tpackage (p1, n1, tl1), Tpackage (p2, n2, tl2) -> (\n        try\n          unify_package !env (unify_list env) t1.level p1 n1 tl1 t2.level p2 n2\n            tl2\n        with Not_found ->\n          if !umode = Expression then raise (Unify []);\n          List.iter (reify env) (tl1 @ tl2)\n          (* if !generate_equations then List.iter2 (mcomp !env) tl1 tl2 *))\n      | _, _ -> raise (Unify []));\n      (* XXX Commentaires + changer \"create_recursion\"\n         ||| Comments + change \"create_recursion\" *)\n      if create_recursion then\n        match t2.desc with\n        | Tconstr (p, tl, abbrev) ->\n          forget_abbrev abbrev p;\n          let t2'' = expand_head_unif !env t2 in\n          if not (closed_parameterized_type tl t2'') then\n            link_type (repr t2) (repr t2')\n        | _ -> () (* t2 has already been expanded by update_level *)\n    with Unify trace ->\n      t1'.desc <- d1;\n      raise (Unify trace))\n\nand unify_list env tl1 tl2 =\n  if List.length tl1 <> List.length tl2 then raise (Unify []);\n  List.iter2 (unify env) tl1 tl2\n\n(* Build a fresh row variable for unification *)\nand make_rowvar level use1 rest1 use2 rest2 =\n  let set_name ty name =\n    match ty.desc with\n    | Tvar None ->\n      log_type ty;\n      ty.desc <- Tvar name\n    | _ -> ()\n  in\n  let name =\n    match (rest1.desc, rest2.desc) with\n    | Tvar (Some _ as name1), Tvar (Some _ as name2) ->\n      if rest1.level <= rest2.level then name1 else name2\n    | Tvar (Some _ as name), _ ->\n      if use2 then set_name rest2 name;\n      name\n    | _, Tvar (Some _ as name) ->\n      if use1 then set_name rest2 name;\n      name\n    | _ -> None\n  in\n  if use1 then rest1 else if use2 then rest2 else newvar2 ?name level\n\nand unify_fields env (ty1 : Types.type_expr) (ty2 : Types.type_expr) =\n  (* Optimization *)\n  let fields1, rest1 = flatten_fields ty1\n  and fields2, rest2 = flatten_fields ty2 in\n  let pairs, miss1, miss2 = associate_fields fields1 fields2 in\n  let l1 = (repr ty1).level and l2 = (repr ty2).level in\n  let va =\n    make_rowvar\n      (Ext_pervasives.min_int l1 l2)\n      (miss2 = []) rest1 (miss1 = []) rest2\n  in\n  let d1 = rest1.desc and d2 = rest2.desc in\n  try\n    unify env (build_fields l1 miss1 va) rest2;\n    unify env rest1 (build_fields l2 miss2 va);\n    List.iter\n      (fun (n, k1, t1, k2, t2) ->\n        unify_kind k1 k2;\n        try\n          if !trace_gadt_instances then update_level !env va.level t1;\n          unify env t1 t2\n        with Unify trace ->\n          raise\n            (Unify\n               (( newty (Tfield (n, k1, t1, newty Tnil)),\n                  newty (Tfield (n, k2, t2, newty Tnil)) )\n               :: trace)))\n      pairs\n  with exn ->\n    log_type rest1;\n    rest1.desc <- d1;\n    log_type rest2;\n    rest2.desc <- d2;\n    raise exn\n\nand unify_kind k1 k2 =\n  let k1 = field_kind_repr k1 in\n  let k2 = field_kind_repr k2 in\n  if k1 == k2 then ()\n  else\n    match (k1, k2) with\n    | Fvar r, (Fvar _ | Fpresent) -> set_kind r k2\n    | Fpresent, Fvar r -> set_kind r k1\n    | Fpresent, Fpresent -> ()\n    | _ -> assert false\n\nand unify_row env row1 row2 =\n  let row1 = row_repr row1 and row2 = row_repr row2 in\n  let rm1 = row_more row1 and rm2 = row_more row2 in\n  if unify_eq rm1 rm2 then ()\n  else\n    let r1, r2, pairs = merge_row_fields row1.row_fields row2.row_fields in\n    if (not !Config.bs_only) && r1 <> [] && r2 <> [] then (\n      (* pairs are the intersection, r1 , r2 should be disjoint *)\n      let ht = Hashtbl.create (List.length r1) in\n      List.iter (fun (l, _) -> Hashtbl.add ht (hash_variant l) l) r1;\n      List.iter\n        (fun (l, _) ->\n          try raise (Tags (l, Hashtbl.find ht (hash_variant l)))\n          with Not_found -> ())\n        r2);\n    let fixed1 = row_fixed row1 and fixed2 = row_fixed row2 in\n    let more =\n      if fixed1 then rm1\n      else if fixed2 then rm2\n      else newty2 (Ext_pervasives.min_int rm1.level rm2.level) (Tvar None)\n    in\n    let fixed = fixed1 || fixed2\n    and closed = row1.row_closed || row2.row_closed in\n    let keep switch =\n      List.for_all\n        (fun (_, f1, f2) ->\n          let f1, f2 = switch f1 f2 in\n          row_field_repr f1 = Rabsent || row_field_repr f2 <> Rabsent)\n        pairs\n    in\n    let empty fields =\n      List.for_all (fun (_, f) -> row_field_repr f = Rabsent) fields\n    in\n    (* Check whether we are going to build an empty type *)\n    if\n      closed\n      && (empty r1 || row2.row_closed)\n      && (empty r2 || row1.row_closed)\n      && List.for_all\n           (fun (_, f1, f2) ->\n             row_field_repr f1 = Rabsent || row_field_repr f2 = Rabsent)\n           pairs\n    then raise (Unify [(mkvariant [] true, mkvariant [] true)]);\n    let name =\n      if\n        row1.row_name <> None\n        && (row1.row_closed || empty r2)\n        && ((not row2.row_closed) || (keep (fun f1 f2 -> (f1, f2)) && empty r1))\n      then row1.row_name\n      else if\n        row2.row_name <> None\n        && (row2.row_closed || empty r1)\n        && ((not row1.row_closed) || (keep (fun f1 f2 -> (f2, f1)) && empty r2))\n      then row2.row_name\n      else None\n    in\n    let row0 =\n      {\n        row_fields = [];\n        row_more = more;\n        row_bound = ();\n        row_closed = closed;\n        row_fixed = fixed;\n        row_name = name;\n      }\n    in\n    let set_more row rest =\n      let rest =\n        if closed then filter_row_fields row.row_closed rest else rest\n      in\n      (if\n         (rest <> [] && (row.row_closed || row_fixed row))\n         || (closed && row_fixed row && not row.row_closed)\n       then\n         let t1 = mkvariant [] true and t2 = mkvariant rest false in\n         raise (Unify [(if row == row1 then (t1, t2) else (t2, t1))]));\n      (* The following test is not principal... should rather use Tnil *)\n      let rm = row_more row in\n      (*if !trace_gadt_instances && rm.desc = Tnil then () else*)\n      if !trace_gadt_instances then\n        update_level !env rm.level (newgenty (Tvariant row));\n      if row_fixed row then\n        if more == rm then ()\n        else if is_Tvar rm then link_type rm more\n        else unify env rm more\n      else\n        let ty = newgenty (Tvariant {row0 with row_fields = rest}) in\n        update_level !env rm.level ty;\n        link_type rm ty\n    in\n    let md1 = rm1.desc and md2 = rm2.desc in\n    try\n      set_more row2 r1;\n      set_more row1 r2;\n      List.iter\n        (fun (l, f1, f2) ->\n          try unify_row_field env fixed1 fixed2 more l f1 f2\n          with Unify trace ->\n            raise\n              (Unify\n                 ((mkvariant [(l, f1)] true, mkvariant [(l, f2)] true) :: trace)))\n        pairs;\n      if static_row row1 then\n        let rm = row_more row1 in\n        if is_Tvar rm then link_type rm (newty2 rm.level Tnil)\n    with exn ->\n      log_type rm1;\n      rm1.desc <- md1;\n      log_type rm2;\n      rm2.desc <- md2;\n      raise exn\n\nand unify_row_field env fixed1 fixed2 more l f1 f2 =\n  let f1 = row_field_repr f1 and f2 = row_field_repr f2 in\n  if f1 == f2 then ()\n  else\n    match (f1, f2) with\n    | Rpresent (Some t1), Rpresent (Some t2) -> unify env t1 t2\n    | Rpresent None, Rpresent None -> ()\n    | Reither (c1, tl1, m1, e1), Reither (c2, tl2, m2, e2) ->\n      if e1 == e2 then ()\n      else if\n        (fixed1 || fixed2)\n        && (not (c1 || c2))\n        && List.length tl1 = List.length tl2\n      then (\n        (* PR#7496 *)\n        let f = Reither (c1 || c2, [], m1 || m2, ref None) in\n        set_row_field e1 f;\n        set_row_field e2 f;\n        List.iter2 (unify env) tl1 tl2)\n      else\n        let redo =\n          (not !passive_variants)\n          && (m1 || m2 || fixed1 || fixed2\n             || (!rigid_variants && (List.length tl1 = 1 || List.length tl2 = 1))\n             )\n          &&\n          match tl1 @ tl2 with\n          | [] -> false\n          | t1 :: tl ->\n            if c1 || c2 then raise (Unify []);\n            List.iter (unify env t1) tl;\n            !e1 <> None || !e2 <> None\n        in\n        if redo then unify_row_field env fixed1 fixed2 more l f1 f2\n        else\n          let tl1 = List.map repr tl1 and tl2 = List.map repr tl2 in\n          let rec remq tl = function\n            | [] -> []\n            | ty :: tl' ->\n              if List.memq ty tl then remq tl tl' else ty :: remq tl tl'\n          in\n          let tl2' = remq tl2 tl1 and tl1' = remq tl1 tl2 in\n          (* PR#6744 *)\n          let split_univars =\n            List.partition (fun ty ->\n                try\n                  occur_univar !env ty;\n                  true\n                with Unify _ -> false)\n          in\n          let tl1', tlu1 = split_univars tl1'\n          and tl2', tlu2 = split_univars tl2' in\n          (match (tlu1, tlu2) with\n          | [], [] -> ()\n          | tu1 :: tlu1, _ :: _ ->\n            (* Attempt to merge all the types containing univars *)\n            if not !passive_variants then List.iter (unify env tu1) (tlu1 @ tlu2)\n          | tu :: _, [] | [], tu :: _ -> occur_univar !env tu);\n          (* Is this handling of levels really principal? *)\n          List.iter (update_level !env (repr more).level) (tl1' @ tl2');\n          let e = ref None in\n          let f1' = Reither (c1 || c2, tl1', m1 || m2, e)\n          and f2' = Reither (c1 || c2, tl2', m1 || m2, e) in\n          set_row_field e1 f1';\n          set_row_field e2 f2'\n    | Reither (_, _, false, e1), Rabsent when not fixed1 -> set_row_field e1 f2\n    | Rabsent, Reither (_, _, false, e2) when not fixed2 -> set_row_field e2 f1\n    | Rabsent, Rabsent -> ()\n    | Reither (false, tl, _, e1), Rpresent (Some t2) when not fixed1 -> (\n      set_row_field e1 f2;\n      update_level !env (repr more).level t2;\n      try List.iter (fun t1 -> unify env t1 t2) tl\n      with exn ->\n        e1 := None;\n        raise exn)\n    | Rpresent (Some t1), Reither (false, tl, _, e2) when not fixed2 -> (\n      set_row_field e2 f1;\n      update_level !env (repr more).level t1;\n      try List.iter (unify env t1) tl\n      with exn ->\n        e2 := None;\n        raise exn)\n    | Reither (true, [], _, e1), Rpresent None when not fixed1 ->\n      set_row_field e1 f2\n    | Rpresent None, Reither (true, [], _, e2) when not fixed2 ->\n      set_row_field e2 f1\n    | _ -> raise (Unify [])\n\nlet unify env ty1 ty2 =\n  let snap = Btype.snapshot () in\n  try unify env ty1 ty2 with\n  | Unify trace ->\n    undo_compress snap;\n    raise (Unify (expand_trace !env trace))\n  | Recursive_abbrev ->\n    undo_compress snap;\n    raise (Unification_recursive_abbrev (expand_trace !env [(ty1, ty2)]))\n\nlet unify_gadt ~newtype_level:lev (env : Env.t ref) ty1 ty2 =\n  try\n    univar_pairs := [];\n    newtype_level := Some lev;\n    set_mode_pattern ~generate:true ~injective:true (fun () ->\n        unify env ty1 ty2);\n    newtype_level := None;\n    TypePairs.clear unify_eq_set\n  with e ->\n    newtype_level := None;\n    TypePairs.clear unify_eq_set;\n    raise e\n\nlet unify_var env t1 t2 =\n  let t1 = repr t1 and t2 = repr t2 in\n  if t1 == t2 then ()\n  else\n    match (t1.desc, t2.desc) with\n    | Tvar _, Tconstr _ when deep_occur t1 t2 -> unify (ref env) t1 t2\n    | Tvar _, _ -> (\n      let reset_tracing = check_trace_gadt_instances env in\n      try\n        occur env t1 t2;\n        update_level env t1.level t2;\n        link_type t1 t2;\n        reset_trace_gadt_instances reset_tracing\n      with Unify trace ->\n        reset_trace_gadt_instances reset_tracing;\n        let expanded_trace = expand_trace env ((t1, t2) :: trace) in\n        raise (Unify expanded_trace))\n    | _ -> unify (ref env) t1 t2\n\nlet _ = unify' := unify_var\n\nlet unify_pairs env ty1 ty2 pairs =\n  univar_pairs := pairs;\n  unify env ty1 ty2\n\nlet unify env ty1 ty2 = unify_pairs (ref env) ty1 ty2 []\n\n(**** Special cases of unification ****)\n\nlet expand_head_trace env t =\n  let reset_tracing = check_trace_gadt_instances env in\n  let t = expand_head_unif env t in\n  reset_trace_gadt_instances reset_tracing;\n  t\n\n(*\n   Unify [t] and [l:'a -> 'b]. Return ['a] and ['b].\n   In label mode, label mismatch is accepted when\n   (1) the requested label is \"\"\n   (2) the original label is not optional\n*)\n\nlet filter_arrow env t l =\n  let t = expand_head_trace env t in\n  match t.desc with\n  | Tvar _ ->\n    let lv = t.level in\n    let t1 = newvar2 lv and t2 = newvar2 lv in\n    let t' = newty2 lv (Tarrow (l, t1, t2, Cok)) in\n    link_type t t';\n    (t1, t2)\n  | Tarrow (l', t1, t2, _) when Asttypes.same_arg_label l l' -> (t1, t2)\n  | _ -> raise (Unify [])\n\n(* Used by [filter_method]. *)\nlet rec filter_method_field env name priv ty =\n  let ty = expand_head_trace env ty in\n  match ty.desc with\n  | Tvar _ ->\n    let level = ty.level in\n    let ty1 = newvar2 level and ty2 = newvar2 level in\n    let ty' =\n      newty2 level\n        (Tfield\n           ( name,\n             (match priv with\n             | Private -> Fvar (ref None)\n             | Public -> Fpresent),\n             ty1,\n             ty2 ))\n    in\n    link_type ty ty';\n    ty1\n  | Tfield (n, kind, ty1, ty2) ->\n    let kind = field_kind_repr kind in\n    if n = name && kind <> Fabsent then (\n      if priv = Public then unify_kind kind Fpresent;\n      ty1)\n    else filter_method_field env name priv ty2\n  | _ -> raise (Unify [])\n\n(* Unify [ty] and [< name : 'a; .. >]. Return ['a]. *)\nlet filter_method env name priv ty =\n  let ty = expand_head_trace env ty in\n  match ty.desc with\n  | Tvar _ ->\n    let ty1 = newvar () in\n    let ty' = newobj ty1 in\n    update_level env ty.level ty';\n    link_type ty ty';\n    filter_method_field env name priv ty1\n  | Tobject (f, _) -> filter_method_field env name priv f\n  | _ -> raise (Unify [])\n\nlet check_filter_method env name priv ty =\n  ignore (filter_method env name priv ty)\n\nlet filter_self_method env lab priv meths ty =\n  let ty' = filter_method env lab priv ty in\n  try Meths.find lab !meths\n  with Not_found ->\n    let pair = (Ident.create lab, ty') in\n    meths := Meths.add lab pair !meths;\n    pair\n\n(***********************************)\n(*  Matching between type schemes  *)\n(***********************************)\n\n(*\n   Update the level of [ty]. First check that the levels of generic\n   variables from the subject are not lowered.\n*)\nlet moregen_occur env level ty =\n  let rec occur ty =\n    let ty = repr ty in\n    if ty.level > level then (\n      if is_Tvar ty && ty.level >= generic_level - 1 then raise Occur;\n      ty.level <- pivot_level - ty.level;\n      match ty.desc with\n      | Tvariant row when static_row row -> iter_row occur row\n      | _ -> iter_type_expr occur ty)\n  in\n  (try\n     occur ty;\n     unmark_type ty\n   with Occur ->\n     unmark_type ty;\n     raise (Unify []));\n  (* also check for free univars *)\n  occur_univar env ty;\n  update_level env level ty\n\nlet may_instantiate inst_nongen t1 =\n  if inst_nongen then t1.level <> generic_level - 1\n  else t1.level = generic_level\n\nlet rec moregen inst_nongen type_pairs env t1 t2 =\n  if t1 == t2 then ()\n  else\n    let t1 = repr t1 in\n    let t2 = repr t2 in\n    if t1 == t2 then ()\n    else\n      try\n        match (t1.desc, t2.desc) with\n        | Tvar _, _ when may_instantiate inst_nongen t1 ->\n          moregen_occur env t1.level t2;\n          occur env t1 t2;\n          link_type t1 t2\n        | Tconstr (p1, [], _), Tconstr (p2, [], _) when Path.same p1 p2 -> ()\n        | _ -> (\n          let t1' = expand_head env t1 in\n          let t2' = expand_head env t2 in\n          (* Expansion may have changed the representative of the types... *)\n          let t1' = repr t1' and t2' = repr t2' in\n          if t1' == t2' then ()\n          else\n            try TypePairs.find type_pairs (t1', t2')\n            with Not_found -> (\n              TypePairs.add type_pairs (t1', t2') ();\n              match (t1'.desc, t2'.desc) with\n              | Tvar _, _ when may_instantiate inst_nongen t1' ->\n                moregen_occur env t1'.level t2;\n                link_type t1' t2\n              | Tarrow (l1, t1, u1, _), Tarrow (l2, t2, u2, _)\n                when Asttypes.same_arg_label l1 l2 ->\n                moregen inst_nongen type_pairs env t1 t2;\n                moregen inst_nongen type_pairs env u1 u2\n              | Ttuple tl1, Ttuple tl2 ->\n                moregen_list inst_nongen type_pairs env tl1 tl2\n              | Tconstr (p1, tl1, _), Tconstr (p2, tl2, _) when Path.same p1 p2\n                ->\n                moregen_list inst_nongen type_pairs env tl1 tl2\n              | Tpackage (p1, n1, tl1), Tpackage (p2, n2, tl2) -> (\n                try\n                  unify_package env\n                    (moregen_list inst_nongen type_pairs env)\n                    t1'.level p1 n1 tl1 t2'.level p2 n2 tl2\n                with Not_found -> raise (Unify []))\n              | Tvariant row1, Tvariant row2 ->\n                moregen_row inst_nongen type_pairs env row1 row2\n              | Tobject (fi1, _nm1), Tobject (fi2, _nm2) ->\n                moregen_fields inst_nongen type_pairs env fi1 fi2\n              | Tfield _, Tfield _ ->\n                (* Actually unused *)\n                moregen_fields inst_nongen type_pairs env t1' t2'\n              | Tnil, Tnil -> ()\n              | Tpoly (t1, []), Tpoly (t2, []) ->\n                moregen inst_nongen type_pairs env t1 t2\n              | Tpoly (t1, tl1), Tpoly (t2, tl2) ->\n                enter_poly env univar_pairs t1 tl1 t2 tl2\n                  (moregen inst_nongen type_pairs env)\n              | Tunivar _, Tunivar _ -> unify_univar t1' t2' !univar_pairs\n              | _, _ -> raise (Unify [])))\n      with Unify trace -> raise (Unify ((t1, t2) :: trace))\n\nand moregen_list inst_nongen type_pairs env tl1 tl2 =\n  if List.length tl1 <> List.length tl2 then raise (Unify []);\n  List.iter2 (moregen inst_nongen type_pairs env) tl1 tl2\n\nand moregen_fields inst_nongen type_pairs env ty1 ty2 =\n  let fields1, rest1 = flatten_fields ty1\n  and fields2, rest2 = flatten_fields ty2 in\n  let pairs, miss1, miss2 = associate_fields fields1 fields2 in\n  if miss1 <> [] then raise (Unify []);\n  moregen inst_nongen type_pairs env rest1\n    (build_fields (repr ty2).level miss2 rest2);\n  List.iter\n    (fun (n, k1, t1, k2, t2) ->\n      moregen_kind k1 k2;\n      try moregen inst_nongen type_pairs env t1 t2\n      with Unify trace ->\n        raise\n          (Unify\n             (( newty (Tfield (n, k1, t1, rest2)),\n                newty (Tfield (n, k2, t2, rest2)) )\n             :: trace)))\n    pairs\n\nand moregen_kind k1 k2 =\n  let k1 = field_kind_repr k1 in\n  let k2 = field_kind_repr k2 in\n  if k1 == k2 then ()\n  else\n    match (k1, k2) with\n    | Fvar r, (Fvar _ | Fpresent) -> set_kind r k2\n    | Fpresent, Fpresent -> ()\n    | _ -> raise (Unify [])\n\nand moregen_row inst_nongen type_pairs env row1 row2 =\n  let row1 = row_repr row1 and row2 = row_repr row2 in\n  let rm1 = repr row1.row_more and rm2 = repr row2.row_more in\n  if rm1 == rm2 then ()\n  else\n    let may_inst =\n      (is_Tvar rm1 && may_instantiate inst_nongen rm1) || rm1.desc = Tnil\n    in\n    let r1, r2, pairs = merge_row_fields row1.row_fields row2.row_fields in\n    let r1, r2 =\n      if row2.row_closed then\n        (filter_row_fields may_inst r1, filter_row_fields false r2)\n      else (r1, r2)\n    in\n    if r1 <> [] || (row1.row_closed && ((not row2.row_closed) || r2 <> [])) then\n      raise (Unify []);\n    (match (rm1.desc, rm2.desc) with\n    | Tunivar _, Tunivar _ -> unify_univar rm1 rm2 !univar_pairs\n    | Tunivar _, _ | _, Tunivar _ -> raise (Unify [])\n    | _ when static_row row1 -> ()\n    | _ when may_inst ->\n      let ext =\n        newgenty (Tvariant {row2 with row_fields = r2; row_name = None})\n      in\n      moregen_occur env rm1.level ext;\n      link_type rm1 ext\n    | Tconstr _, Tconstr _ -> moregen inst_nongen type_pairs env rm1 rm2\n    | _ -> raise (Unify []));\n    List.iter\n      (fun (_l, f1, f2) ->\n        let f1 = row_field_repr f1 and f2 = row_field_repr f2 in\n        if f1 == f2 then ()\n        else\n          match (f1, f2) with\n          | Rpresent (Some t1), Rpresent (Some t2) ->\n            moregen inst_nongen type_pairs env t1 t2\n          | Rpresent None, Rpresent None -> ()\n          | Reither (false, tl1, _, e1), Rpresent (Some t2) when may_inst ->\n            set_row_field e1 f2;\n            List.iter (fun t1 -> moregen inst_nongen type_pairs env t1 t2) tl1\n          | Reither (c1, tl1, _, e1), Reither (c2, tl2, m2, e2) ->\n            if e1 != e2 then (\n              if c1 && not c2 then raise (Unify []);\n              set_row_field e1 (Reither (c2, [], m2, e2));\n              if List.length tl1 = List.length tl2 then\n                List.iter2 (moregen inst_nongen type_pairs env) tl1 tl2\n              else\n                match tl2 with\n                | t2 :: _ ->\n                  List.iter\n                    (fun t1 -> moregen inst_nongen type_pairs env t1 t2)\n                    tl1\n                | [] -> if tl1 <> [] then raise (Unify []))\n          | Reither (true, [], _, e1), Rpresent None when may_inst ->\n            set_row_field e1 f2\n          | Reither (_, _, _, e1), Rabsent when may_inst -> set_row_field e1 f2\n          | Rabsent, Rabsent -> ()\n          | _ -> raise (Unify []))\n      pairs\n\n(* Must empty univar_pairs first *)\nlet moregen inst_nongen type_pairs env patt subj =\n  univar_pairs := [];\n  moregen inst_nongen type_pairs env patt subj\n\n(*\n   Non-generic variable can be instantiated only if [inst_nongen] is\n   true. So, [inst_nongen] should be set to false if the subject might\n   contain non-generic variables (and we do not want them to be\n   instantiated).\n   Usually, the subject is given by the user, and the pattern\n   is unimportant.  So, no need to propagate abbreviations.\n*)\nlet moregeneral env inst_nongen pat_sch subj_sch =\n  let old_level = !current_level in\n  current_level := generic_level - 1;\n  (*\n     Generic variables are first duplicated with [instance].  So,\n     their levels are lowered to [generic_level - 1].  The subject is\n     then copied with [duplicate_type].  That way, its levels won't be\n     changed.\n  *)\n  let subj = duplicate_type (instance env subj_sch) in\n  current_level := generic_level;\n  (* Duplicate generic variables *)\n  let patt = instance env pat_sch in\n  let res =\n    try\n      moregen inst_nongen (TypePairs.create 13) env patt subj;\n      true\n    with Unify _ -> false\n  in\n  current_level := old_level;\n  res\n\n(* Alternative approach: \"rigidify\" a type scheme,\n   and check validity after unification *)\n(* Simpler, no? *)\n\nlet rec rigidify_rec vars ty =\n  let ty = repr ty in\n  if ty.level >= lowest_level then (\n    ty.level <- pivot_level - ty.level;\n    match ty.desc with\n    | Tvar _ -> if not (List.memq ty !vars) then vars := ty :: !vars\n    | Tvariant row ->\n      let row = row_repr row in\n      let more = repr row.row_more in\n      (if is_Tvar more && not (row_fixed row) then\n         let more' = newty2 more.level more.desc in\n         let row' =\n           {row with row_fixed = true; row_fields = []; row_more = more'}\n         in\n         link_type more (newty2 ty.level (Tvariant row')));\n      iter_row (rigidify_rec vars) row;\n      (* only consider the row variable if the variant is not static *)\n      if not (static_row row) then rigidify_rec vars (row_more row)\n    | _ -> iter_type_expr (rigidify_rec vars) ty)\n\nlet rigidify ty =\n  let vars = ref [] in\n  rigidify_rec vars ty;\n  unmark_type ty;\n  !vars\n\nlet all_distinct_vars env vars =\n  let tyl = ref [] in\n  List.for_all\n    (fun ty ->\n      let ty = expand_head env ty in\n      if List.memq ty !tyl then false\n      else (\n        tyl := ty :: !tyl;\n        is_Tvar ty))\n    vars\n\nlet matches env ty ty' =\n  let snap = snapshot () in\n  let vars = rigidify ty in\n  cleanup_abbrev ();\n  let ok =\n    try\n      unify env ty ty';\n      all_distinct_vars env vars\n    with Unify _ -> false\n  in\n  backtrack snap;\n  ok\n\n(*********************************************)\n(*  Equivalence between parameterized types  *)\n(*********************************************)\n\nlet expand_head_rigid env ty =\n  let old = !rigid_variants in\n  rigid_variants := true;\n  let ty' = expand_head env ty in\n  rigid_variants := old;\n  ty'\n\nlet normalize_subst subst =\n  if\n    List.exists\n      (function\n        | {desc = Tlink _}, _ | _, {desc = Tlink _} -> true\n        | _ -> false)\n      !subst\n  then subst := List.map (fun (t1, t2) -> (repr t1, repr t2)) !subst\n\nlet rec eqtype rename type_pairs subst env t1 t2 =\n  if t1 == t2 then ()\n  else\n    let t1 = repr t1 in\n    let t2 = repr t2 in\n    if t1 == t2 then ()\n    else\n      try\n        match (t1.desc, t2.desc) with\n        | Tvar _, Tvar _ when rename -> (\n          try\n            normalize_subst subst;\n            if List.assq t1 !subst != t2 then raise (Unify [])\n          with Not_found ->\n            if List.exists (fun (_, t) -> t == t2) !subst then raise (Unify []);\n            subst := (t1, t2) :: !subst)\n        | Tconstr (p1, [], _), Tconstr (p2, [], _) when Path.same p1 p2 -> ()\n        | _ -> (\n          let t1' = expand_head_rigid env t1 in\n          let t2' = expand_head_rigid env t2 in\n          (* Expansion may have changed the representative of the types... *)\n          let t1' = repr t1' and t2' = repr t2' in\n          if t1' == t2' then ()\n          else\n            try TypePairs.find type_pairs (t1', t2')\n            with Not_found -> (\n              TypePairs.add type_pairs (t1', t2') ();\n              match (t1'.desc, t2'.desc) with\n              | Tvar _, Tvar _ when rename -> (\n                try\n                  normalize_subst subst;\n                  if List.assq t1' !subst != t2' then raise (Unify [])\n                with Not_found ->\n                  if List.exists (fun (_, t) -> t == t2') !subst then\n                    raise (Unify []);\n                  subst := (t1', t2') :: !subst)\n              | Tarrow (l1, t1, u1, _), Tarrow (l2, t2, u2, _)\n                when Asttypes.same_arg_label l1 l2 ->\n                eqtype rename type_pairs subst env t1 t2;\n                eqtype rename type_pairs subst env u1 u2\n              | Ttuple tl1, Ttuple tl2 ->\n                eqtype_list rename type_pairs subst env tl1 tl2\n              | Tconstr (p1, tl1, _), Tconstr (p2, tl2, _) when Path.same p1 p2\n                ->\n                eqtype_list rename type_pairs subst env tl1 tl2\n              | Tpackage (p1, n1, tl1), Tpackage (p2, n2, tl2) -> (\n                try\n                  unify_package env\n                    (eqtype_list rename type_pairs subst env)\n                    t1'.level p1 n1 tl1 t2'.level p2 n2 tl2\n                with Not_found -> raise (Unify []))\n              | Tvariant row1, Tvariant row2 ->\n                eqtype_row rename type_pairs subst env row1 row2\n              | Tobject (fi1, _nm1), Tobject (fi2, _nm2) ->\n                eqtype_fields rename type_pairs subst env fi1 fi2\n              | Tfield _, Tfield _ ->\n                (* Actually unused *)\n                eqtype_fields rename type_pairs subst env t1' t2'\n              | Tnil, Tnil -> ()\n              | Tpoly (t1, []), Tpoly (t2, []) ->\n                eqtype rename type_pairs subst env t1 t2\n              | Tpoly (t1, tl1), Tpoly (t2, tl2) ->\n                enter_poly env univar_pairs t1 tl1 t2 tl2\n                  (eqtype rename type_pairs subst env)\n              | Tunivar _, Tunivar _ -> unify_univar t1' t2' !univar_pairs\n              | _, _ -> raise (Unify [])))\n      with Unify trace -> raise (Unify ((t1, t2) :: trace))\n\nand eqtype_list rename type_pairs subst env tl1 tl2 =\n  if List.length tl1 <> List.length tl2 then raise (Unify []);\n  List.iter2 (eqtype rename type_pairs subst env) tl1 tl2\n\nand eqtype_fields rename type_pairs subst env ty1 ty2 : unit =\n  let fields1, rest1 = flatten_fields ty1 in\n  let fields2, rest2 = flatten_fields ty2 in\n  (* First check if same row => already equal *)\n  let same_row =\n    rest1 == rest2\n    || TypePairs.mem type_pairs (rest1, rest2)\n    || (rename && List.mem (rest1, rest2) !subst)\n  in\n  if same_row then ()\n  else\n    (* Try expansion, needed when called from Includecore.type_manifest *)\n    match expand_head_rigid env rest2 with\n    | {desc = Tobject (ty2, _)} ->\n      eqtype_fields rename type_pairs subst env ty1 ty2\n    | _ ->\n      let pairs, miss1, miss2 = associate_fields fields1 fields2 in\n      eqtype rename type_pairs subst env rest1 rest2;\n      if miss1 <> [] || miss2 <> [] then raise (Unify []);\n      List.iter\n        (function\n          | n, k1, t1, k2, t2 -> (\n            eqtype_kind k1 k2;\n            try eqtype rename type_pairs subst env t1 t2\n            with Unify trace ->\n              raise\n                (Unify\n                   (( newty (Tfield (n, k1, t1, rest2)),\n                      newty (Tfield (n, k2, t2, rest2)) )\n                   :: trace))))\n        pairs\n\nand eqtype_kind k1 k2 =\n  let k1 = field_kind_repr k1 in\n  let k2 = field_kind_repr k2 in\n  match (k1, k2) with\n  | Fvar _, Fvar _ | Fpresent, Fpresent -> ()\n  | _ -> raise (Unify [])\n\nand eqtype_row rename type_pairs subst env row1 row2 =\n  (* Try expansion, needed when called from Includecore.type_manifest *)\n  match expand_head_rigid env (row_more row2) with\n  | {desc = Tvariant row2} -> eqtype_row rename type_pairs subst env row1 row2\n  | _ ->\n    let row1 = row_repr row1 and row2 = row_repr row2 in\n    let r1, r2, pairs = merge_row_fields row1.row_fields row2.row_fields in\n    if\n      row1.row_closed <> row2.row_closed\n      || ((not row1.row_closed) && (r1 <> [] || r2 <> []))\n      || filter_row_fields false (r1 @ r2) <> []\n    then raise (Unify []);\n    if not (static_row row1) then\n      eqtype rename type_pairs subst env row1.row_more row2.row_more;\n    List.iter\n      (fun (_, f1, f2) ->\n        match (row_field_repr f1, row_field_repr f2) with\n        | Rpresent (Some t1), Rpresent (Some t2) ->\n          eqtype rename type_pairs subst env t1 t2\n        | Reither (c1, [], _, _), Reither (c2, [], _, _) when c1 = c2 -> ()\n        | Reither (c1, t1 :: tl1, _, _), Reither (c2, t2 :: tl2, _, _)\n          when c1 = c2 ->\n          eqtype rename type_pairs subst env t1 t2;\n          if List.length tl1 = List.length tl2 then\n            (* if same length allow different types (meaning?) *)\n            List.iter2 (eqtype rename type_pairs subst env) tl1 tl2\n          else (\n            (* otherwise everything must be equal *)\n            List.iter (eqtype rename type_pairs subst env t1) tl2;\n            List.iter (fun t1 -> eqtype rename type_pairs subst env t1 t2) tl1)\n        | Rpresent None, Rpresent None -> ()\n        | Rabsent, Rabsent -> ()\n        | _ -> raise (Unify []))\n      pairs\n\n(* Must empty univar_pairs first *)\nlet eqtype_list rename type_pairs subst env tl1 tl2 =\n  univar_pairs := [];\n  let snap = Btype.snapshot () in\n  try\n    eqtype_list rename type_pairs subst env tl1 tl2;\n    backtrack snap\n  with exn ->\n    backtrack snap;\n    raise exn\n\nlet eqtype rename type_pairs subst env t1 t2 =\n  eqtype_list rename type_pairs subst env [t1] [t2]\n\n(* Two modes: with or without renaming of variables *)\nlet equal env rename tyl1 tyl2 =\n  try\n    eqtype_list rename (TypePairs.create 11) (ref []) env tyl1 tyl2;\n    true\n  with Unify _ -> false\n\n(*************************)\n(*  Class type matching  *)\n(*************************)\n\ntype class_match_failure =\n  | CM_Virtual_class\n  | CM_Parameter_arity_mismatch of int * int\n  | CM_Type_parameter_mismatch of Env.t * (type_expr * type_expr) list\n  | CM_Class_type_mismatch of Env.t * class_type * class_type\n  | CM_Parameter_mismatch of Env.t * (type_expr * type_expr) list\n  | CM_Val_type_mismatch of string * Env.t * (type_expr * type_expr) list\n  | CM_Meth_type_mismatch of string * Env.t * (type_expr * type_expr) list\n  | CM_Non_mutable_value of string\n  | CM_Non_concrete_value of string\n  | CM_Missing_value of string\n  | CM_Missing_method of string\n  | CM_Hide_public of string\n  | CM_Hide_virtual of string * string\n  | CM_Public_method of string\n  | CM_Private_method of string\n  | CM_Virtual_method of string\n\nexception Failure of class_match_failure list\n\nlet rec moregen_clty trace type_pairs env cty1 cty2 =\n  try\n    match (cty1, cty2) with\n    | Cty_constr (_, _, cty1), _ -> moregen_clty true type_pairs env cty1 cty2\n    | _, Cty_constr (_, _, cty2) -> moregen_clty true type_pairs env cty1 cty2\n    | Cty_arrow (l1, ty1, cty1'), Cty_arrow (l2, ty2, cty2')\n      when Asttypes.same_arg_label l1 l2 ->\n      (try moregen true type_pairs env ty1 ty2\n       with Unify trace ->\n         raise (Failure [CM_Parameter_mismatch (env, expand_trace env trace)]));\n      moregen_clty false type_pairs env cty1' cty2'\n    | Cty_signature sign1, Cty_signature sign2 ->\n      let ty1 = object_fields (repr sign1.csig_self) in\n      let ty2 = object_fields (repr sign2.csig_self) in\n      let fields1, _rest1 = flatten_fields ty1\n      and fields2, _rest2 = flatten_fields ty2 in\n      let pairs, _miss1, _miss2 = associate_fields fields1 fields2 in\n      List.iter\n        (fun (lab, _k1, t1, _k2, t2) ->\n          try moregen true type_pairs env t1 t2\n          with Unify trace ->\n            raise\n              (Failure\n                 [CM_Meth_type_mismatch (lab, env, expand_trace env trace)]))\n        pairs;\n      Vars.iter\n        (fun lab (_mut, _v, ty) ->\n          let _mut', _v', ty' = Vars.find lab sign1.csig_vars in\n          try moregen true type_pairs env ty' ty\n          with Unify trace ->\n            raise\n              (Failure [CM_Val_type_mismatch (lab, env, expand_trace env trace)]))\n        sign2.csig_vars\n    | _ -> raise (Failure [])\n  with Failure error when trace || error = [] ->\n    raise (Failure (CM_Class_type_mismatch (env, cty1, cty2) :: error))\n\nlet match_class_types ?(trace = true) env pat_sch subj_sch =\n  let type_pairs = TypePairs.create 53 in\n  let old_level = !current_level in\n  current_level := generic_level - 1;\n  (*\n     Generic variables are first duplicated with [instance].  So,\n     their levels are lowered to [generic_level - 1].  The subject is\n     then copied with [duplicate_type].  That way, its levels won't be\n     changed.\n  *)\n  let _, subj_inst = instance_class [] subj_sch in\n  let subj = duplicate_class_type subj_inst in\n  current_level := generic_level;\n  (* Duplicate generic variables *)\n  let _, patt = instance_class [] pat_sch in\n  let res =\n    let sign1 = signature_of_class_type patt in\n    let sign2 = signature_of_class_type subj in\n    let t1 = repr sign1.csig_self in\n    let t2 = repr sign2.csig_self in\n    TypePairs.add type_pairs (t1, t2) ();\n    let fields1, rest1 = flatten_fields (object_fields t1)\n    and fields2, rest2 = flatten_fields (object_fields t2) in\n    let pairs, miss1, miss2 = associate_fields fields1 fields2 in\n    let error =\n      List.fold_right\n        (fun (lab, k, _) err ->\n          let err =\n            let k = field_kind_repr k in\n            match k with\n            | Fvar r ->\n              set_kind r Fabsent;\n              err\n            | _ -> CM_Hide_public lab :: err\n          in\n          if Concr.mem lab sign1.csig_concr then err\n          else CM_Hide_virtual (\"method\", lab) :: err)\n        miss1 []\n    in\n    let missing_method = List.map (fun (m, _, _) -> m) miss2 in\n    let error =\n      List.map (fun m -> CM_Missing_method m) missing_method @ error\n    in\n    (* Always succeeds *)\n    moregen true type_pairs env rest1 rest2;\n    let error =\n      List.fold_right\n        (fun (lab, k1, _t1, k2, _t2) err ->\n          try\n            moregen_kind k1 k2;\n            err\n          with Unify _ -> CM_Public_method lab :: err)\n        pairs error\n    in\n    let error =\n      Vars.fold\n        (fun lab (mut, vr, _ty) err ->\n          try\n            let mut', vr', _ty' = Vars.find lab sign1.csig_vars in\n            if mut = Mutable && mut' <> Mutable then\n              CM_Non_mutable_value lab :: err\n            else if vr = Concrete && vr' <> Concrete then\n              CM_Non_concrete_value lab :: err\n            else err\n          with Not_found -> CM_Missing_value lab :: err)\n        sign2.csig_vars error\n    in\n    let error =\n      Vars.fold\n        (fun lab (_, vr, _) err ->\n          if vr = Virtual && not (Vars.mem lab sign2.csig_vars) then\n            CM_Hide_virtual (\"instance variable\", lab) :: err\n          else err)\n        sign1.csig_vars error\n    in\n    let error =\n      List.fold_right\n        (fun e l ->\n          if List.mem e missing_method then l else CM_Virtual_method e :: l)\n        (Concr.elements (Concr.diff sign2.csig_concr sign1.csig_concr))\n        error\n    in\n    match error with\n    | [] -> (\n      try\n        moregen_clty trace type_pairs env patt subj;\n        []\n      with Failure r -> r)\n    | error -> CM_Class_type_mismatch (env, patt, subj) :: error\n  in\n  current_level := old_level;\n  res\n\nlet rec equal_clty trace type_pairs subst env cty1 cty2 =\n  try\n    match (cty1, cty2) with\n    | Cty_constr (_, _, cty1), Cty_constr (_, _, cty2) ->\n      equal_clty true type_pairs subst env cty1 cty2\n    | Cty_constr (_, _, cty1), _ ->\n      equal_clty true type_pairs subst env cty1 cty2\n    | _, Cty_constr (_, _, cty2) ->\n      equal_clty true type_pairs subst env cty1 cty2\n    | Cty_arrow (l1, ty1, cty1'), Cty_arrow (l2, ty2, cty2')\n      when Asttypes.same_arg_label l1 l2 ->\n      (try eqtype true type_pairs subst env ty1 ty2\n       with Unify trace ->\n         raise (Failure [CM_Parameter_mismatch (env, expand_trace env trace)]));\n      equal_clty false type_pairs subst env cty1' cty2'\n    | Cty_signature sign1, Cty_signature sign2 ->\n      let ty1 = object_fields (repr sign1.csig_self) in\n      let ty2 = object_fields (repr sign2.csig_self) in\n      let fields1, _rest1 = flatten_fields ty1\n      and fields2, _rest2 = flatten_fields ty2 in\n      let pairs, _miss1, _miss2 = associate_fields fields1 fields2 in\n      List.iter\n        (fun (lab, _k1, t1, _k2, t2) ->\n          try eqtype true type_pairs subst env t1 t2\n          with Unify trace ->\n            raise\n              (Failure\n                 [CM_Meth_type_mismatch (lab, env, expand_trace env trace)]))\n        pairs;\n      Vars.iter\n        (fun lab (_, _, ty) ->\n          let _, _, ty' = Vars.find lab sign1.csig_vars in\n          try eqtype true type_pairs subst env ty' ty\n          with Unify trace ->\n            raise\n              (Failure [CM_Val_type_mismatch (lab, env, expand_trace env trace)]))\n        sign2.csig_vars\n    | _ ->\n      raise\n        (Failure\n           (if trace then [] else [CM_Class_type_mismatch (env, cty1, cty2)]))\n  with Failure error when trace ->\n    raise (Failure (CM_Class_type_mismatch (env, cty1, cty2) :: error))\n\nlet match_class_declarations env patt_params patt_type subj_params subj_type =\n  let type_pairs = TypePairs.create 53 in\n  let subst = ref [] in\n  let sign1 = signature_of_class_type patt_type in\n  let sign2 = signature_of_class_type subj_type in\n  let t1 = repr sign1.csig_self in\n  let t2 = repr sign2.csig_self in\n  TypePairs.add type_pairs (t1, t2) ();\n  let fields1, rest1 = flatten_fields (object_fields t1)\n  and fields2, rest2 = flatten_fields (object_fields t2) in\n  let pairs, miss1, miss2 = associate_fields fields1 fields2 in\n  let error =\n    List.fold_right\n      (fun (lab, k, _) err ->\n        let err =\n          let k = field_kind_repr k in\n          match k with\n          | Fvar _ -> err\n          | _ -> CM_Hide_public lab :: err\n        in\n        if Concr.mem lab sign1.csig_concr then err\n        else CM_Hide_virtual (\"method\", lab) :: err)\n      miss1 []\n  in\n  let missing_method = List.map (fun (m, _, _) -> m) miss2 in\n  let error = List.map (fun m -> CM_Missing_method m) missing_method @ error in\n  (* Always succeeds *)\n  eqtype true type_pairs subst env rest1 rest2;\n  let error =\n    List.fold_right\n      (fun (lab, k1, _t1, k2, _t2) err ->\n        let k1 = field_kind_repr k1 in\n        let k2 = field_kind_repr k2 in\n        match (k1, k2) with\n        | Fvar _, Fvar _ | Fpresent, Fpresent -> err\n        | Fvar _, Fpresent -> CM_Private_method lab :: err\n        | Fpresent, Fvar _ -> CM_Public_method lab :: err\n        | _ -> assert false)\n      pairs error\n  in\n  let error =\n    Vars.fold\n      (fun lab (mut, vr, _ty) err ->\n        try\n          let mut', vr', _ty' = Vars.find lab sign1.csig_vars in\n          if mut = Mutable && mut' <> Mutable then\n            CM_Non_mutable_value lab :: err\n          else if vr = Concrete && vr' <> Concrete then\n            CM_Non_concrete_value lab :: err\n          else err\n        with Not_found -> CM_Missing_value lab :: err)\n      sign2.csig_vars error\n  in\n  let error =\n    Vars.fold\n      (fun lab (_, vr, _) err ->\n        if vr = Virtual && not (Vars.mem lab sign2.csig_vars) then\n          CM_Hide_virtual (\"instance variable\", lab) :: err\n        else err)\n      sign1.csig_vars error\n  in\n  let error =\n    List.fold_right\n      (fun e l ->\n        if List.mem e missing_method then l else CM_Virtual_method e :: l)\n      (Concr.elements (Concr.diff sign2.csig_concr sign1.csig_concr))\n      error\n  in\n  match error with\n  | [] -> (\n    try\n      let lp = List.length patt_params in\n      let ls = List.length subj_params in\n      if lp <> ls then raise (Failure [CM_Parameter_arity_mismatch (lp, ls)]);\n      List.iter2\n        (fun p s ->\n          try eqtype true type_pairs subst env p s\n          with Unify trace ->\n            raise\n              (Failure\n                 [CM_Type_parameter_mismatch (env, expand_trace env trace)]))\n        patt_params subj_params;\n      (* old code: equal_clty false type_pairs subst env patt_type subj_type; *)\n      equal_clty false type_pairs subst env (Cty_signature sign1)\n        (Cty_signature sign2);\n      (* Use moregeneral for class parameters, need to recheck everything to\n         keeps relationships (PR#4824) *)\n      let clty_params =\n        List.fold_right (fun ty cty -> Cty_arrow (Labelled \"*\", ty, cty))\n      in\n      match_class_types ~trace:false env\n        (clty_params patt_params patt_type)\n        (clty_params subj_params subj_type)\n    with Failure r -> r)\n  | error -> error\n\n(***************)\n(*  Subtyping  *)\n(***************)\n\n(**** Build a subtype of a given type. ****)\n\n(* build_subtype:\n   [visited] traces traversed object and variant types\n   [loops] is a mapping from variables to variables, to reproduce\n     positive loops in a class type\n   [posi] true if the current variance is positive\n   [level] number of expansions/enlargement allowed on this branch *)\n\nlet warn = ref false (* whether double coercion might do better *)\nlet pred_expand n = if n mod 2 = 0 && n > 0 then pred n else n\nlet pred_enlarge n = if n mod 2 = 1 then pred n else n\n\ntype change = Unchanged | Equiv | Changed [@@immediate]\n\nlet[@inline] max (c1 : change) (c2 : change) : change =\n  Obj.magic (Ext_pervasives.max_int (Obj.magic c1 : int) (Obj.magic c2 : int))\nlet collect l = List.fold_left (fun c1 (_, c2) -> max c1 c2) Unchanged l\n\nlet rec filter_visited = function\n  | [] -> []\n  | {desc = Tobject _ | Tvariant _} :: _ as l -> l\n  | _ :: l -> filter_visited l\n\nlet memq_warn t visited =\n  if List.memq t visited then (\n    warn := true;\n    true)\n  else false\n\nlet rec lid_of_path ?(hash = \"\") = function\n  | Path.Pident id -> Longident.Lident (hash ^ Ident.name id)\n  | Path.Pdot (p1, s, _) -> Longident.Ldot (lid_of_path p1, hash ^ s)\n  | Path.Papply (p1, p2) ->\n    Longident.Lapply (lid_of_path ~hash p1, lid_of_path p2)\n\nlet find_cltype_for_path env p =\n  let cl_path = Env.lookup_type (lid_of_path ~hash:\"#\" p) env in\n  let cl_abbr = Env.find_type cl_path env in\n\n  match cl_abbr.type_manifest with\n  | Some ty -> (\n    match (repr ty).desc with\n    | Tobject (_, {contents = Some (p', _)}) when Path.same p p' -> (cl_abbr, ty)\n    | _ -> raise Not_found)\n  | None -> assert false\n\nlet has_constr_row' env t = has_constr_row (expand_abbrev env t)\n\nlet rec build_subtype env visited loops posi level t =\n  let t = repr t in\n  match t.desc with\n  | Tvar _ ->\n    if posi then\n      try\n        let t' = List.assq t loops in\n        warn := true;\n        (t', Equiv)\n      with Not_found -> (t, Unchanged)\n    else (t, Unchanged)\n  | Tarrow (l, t1, t2, _) ->\n    if memq_warn t visited then (t, Unchanged)\n    else\n      let visited = t :: visited in\n      let t1', c1 = build_subtype env visited loops (not posi) level t1 in\n      let t2', c2 = build_subtype env visited loops posi level t2 in\n      let c = max c1 c2 in\n      if c > Unchanged then (newty (Tarrow (l, t1', t2', Cok)), c)\n      else (t, Unchanged)\n  | Ttuple tlist ->\n    if memq_warn t visited then (t, Unchanged)\n    else\n      let visited = t :: visited in\n      let tlist' =\n        List.map (build_subtype env visited loops posi level) tlist\n      in\n      let c = collect tlist' in\n      if c > Unchanged then (newty (Ttuple (List.map fst tlist')), c)\n      else (t, Unchanged)\n  | Tconstr (p, tl, abbrev)\n    when level > 0 && generic_abbrev env p && safe_abbrev env t\n         && not (has_constr_row' env t) -> (\n    let t' = repr (expand_abbrev env t) in\n    let level' = pred_expand level in\n    try\n      match t'.desc with\n      | Tobject _ when posi && not (opened_object t') ->\n        let cl_abbr, body = find_cltype_for_path env p in\n        let ty =\n          subst env !current_level Public abbrev None cl_abbr.type_params tl\n            body\n        in\n        let ty = repr ty in\n        let ty1, tl1 =\n          match ty.desc with\n          | Tobject (ty1, {contents = Some (p', tl1)}) when Path.same p p' ->\n            (ty1, tl1)\n          | _ -> raise Not_found\n        in\n        (* Fix PR#4505: do not set ty to Tvar when it appears in tl1,\n           as this occurrence might break the occur check.\n           XXX not clear whether this correct anyway... *)\n        if List.exists (deep_occur ty) tl1 then raise Not_found;\n        ty.desc <- Tvar None;\n        let t'' = newvar () in\n        let loops = (ty, t'') :: loops in\n        (* May discard [visited] as level is going down *)\n        let ty1', c =\n          build_subtype env [t'] loops posi (pred_enlarge level') ty1\n        in\n        assert (is_Tvar t'');\n        let nm =\n          if c > Equiv || deep_occur ty ty1' then None else Some (p, tl1)\n        in\n        t''.desc <- Tobject (ty1', ref nm);\n        (try unify_var env ty t with Unify _ -> assert false);\n        (t'', Changed)\n      | _ -> raise Not_found\n    with Not_found ->\n      let t'', c = build_subtype env visited loops posi level' t' in\n      if c > Unchanged then (t'', c) else (t, Unchanged))\n  | Tconstr (p, tl, _abbrev) -> (\n    if\n      (* Must check recursion on constructors, since we do not always\n         expand them *)\n      memq_warn t visited\n    then (t, Unchanged)\n    else\n      let visited = t :: visited in\n      try\n        let decl = Env.find_type p env in\n        if\n          level = 0 && generic_abbrev env p && safe_abbrev env t\n          && not (has_constr_row' env t)\n        then warn := true;\n        let tl' =\n          List.map2\n            (fun v t ->\n              let co, cn = Variance.get_upper v in\n              if cn then\n                if co then (t, Unchanged)\n                else build_subtype env visited loops (not posi) level t\n              else if co then build_subtype env visited loops posi level t\n              else (newvar (), Changed))\n            decl.type_variance tl\n        in\n        let c = collect tl' in\n        if c > Unchanged then (newconstr p (List.map fst tl'), c)\n        else (t, Unchanged)\n      with Not_found -> (t, Unchanged))\n  | Tvariant row ->\n    let row = row_repr row in\n    if memq_warn t visited || not (static_row row) then (t, Unchanged)\n    else\n      let level' = pred_enlarge level in\n      let visited =\n        t :: (if level' < level then [] else filter_visited visited)\n      in\n      let fields = filter_row_fields false row.row_fields in\n      let fields =\n        List.map\n          (fun ((l, f) as orig) ->\n            match row_field_repr f with\n            | Rpresent None ->\n              if posi then ((l, Reither (true, [], false, ref None)), Unchanged)\n              else (orig, Unchanged)\n            | Rpresent (Some t) ->\n              let t', c = build_subtype env visited loops posi level' t in\n              let f =\n                if posi && level > 0 then Reither (false, [t'], false, ref None)\n                else Rpresent (Some t')\n              in\n              ((l, f), c)\n            | _ -> assert false)\n          fields\n      in\n      let c = collect fields in\n      let row =\n        {\n          row_fields = List.map fst fields;\n          row_more = newvar ();\n          row_bound = ();\n          row_closed = posi;\n          row_fixed = false;\n          row_name = (if c > Unchanged then None else row.row_name);\n        }\n      in\n      (newty (Tvariant row), Changed)\n  | Tobject (t1, _) ->\n    if memq_warn t visited || opened_object t1 then (t, Unchanged)\n    else\n      let level' = pred_enlarge level in\n      let visited =\n        t :: (if level' < level then [] else filter_visited visited)\n      in\n      let t1', c = build_subtype env visited loops posi level' t1 in\n      if c > Unchanged then (newty (Tobject (t1', ref None)), c)\n      else (t, Unchanged)\n  | Tfield (s, _, t1, t2) (* Always present *) ->\n    let t1', c1 = build_subtype env visited loops posi level t1 in\n    let t2', c2 = build_subtype env visited loops posi level t2 in\n    let c = max c1 c2 in\n    if c > Unchanged then (newty (Tfield (s, Fpresent, t1', t2')), c)\n    else (t, Unchanged)\n  | Tnil ->\n    if posi then\n      let v = newvar () in\n      (v, Changed)\n    else (\n      warn := true;\n      (t, Unchanged))\n  | Tsubst _ | Tlink _ -> assert false\n  | Tpoly (t1, tl) ->\n    let t1', c = build_subtype env visited loops posi level t1 in\n    if c > Unchanged then (newty (Tpoly (t1', tl)), c) else (t, Unchanged)\n  | Tunivar _ | Tpackage _ -> (t, Unchanged)\n\nlet enlarge_type env ty =\n  warn := false;\n  (* [level = 4] allows 2 expansions involving objects/variants *)\n  let ty', _ = build_subtype env [] [] true 4 ty in\n  (ty', !warn)\n\n(**** Check whether a type is a subtype of another type. ****)\n\n(*\n    During the traversal, a trace of visited types is maintained. It\n    is printed in case of error.\n    Constraints (pairs of types that must be equals) are accumulated\n    rather than being enforced straight. Indeed, the result would\n    otherwise depend on the order in which these constraints are\n    enforced.\n    A function enforcing these constraints is returned. That way, type\n    variables can be bound to their actual values before this function\n    is called (see Typecore).\n    Only well-defined abbreviations are expanded (hence the tests\n    [generic_abbrev ...]).\n*)\n\nlet subtypes = TypePairs.create 17\n\nlet subtype_error env trace =\n  raise (Subtype (expand_trace env (List.rev trace), []))\n\nlet extract_concrete_typedecl_opt env t =\n  match extract_concrete_typedecl env t with\n  | v -> Some v\n  | exception Not_found -> None\n\nlet rec subtype_rec env trace t1 t2 cstrs =\n  let t1 = repr t1 in\n  let t2 = repr t2 in\n  if t1 == t2 then cstrs\n  else\n    try\n      TypePairs.find subtypes (t1, t2);\n      cstrs\n    with Not_found -> (\n      TypePairs.add subtypes (t1, t2) ();\n      match (t1.desc, t2.desc) with\n      | Tvar _, _ | _, Tvar _ -> (trace, t1, t2, !univar_pairs) :: cstrs\n      | Tarrow (l1, t1, u1, _), Tarrow (l2, t2, u2, _)\n        when Asttypes.same_arg_label l1 l2 ->\n        let cstrs = subtype_rec env ((t2, t1) :: trace) t2 t1 cstrs in\n        subtype_rec env ((u1, u2) :: trace) u1 u2 cstrs\n      | Ttuple tl1, Ttuple tl2 -> subtype_list env trace tl1 tl2 cstrs\n      | Tconstr (p1, [], _), Tconstr (p2, [], _) when Path.same p1 p2 -> cstrs\n      | Tconstr (p1, _tl1, _abbrev1), _\n        when generic_abbrev env p1 && safe_abbrev env t1 ->\n        subtype_rec env trace (expand_abbrev env t1) t2 cstrs\n      | _, Tconstr (p2, _tl2, _abbrev2)\n        when generic_abbrev env p2 && safe_abbrev env t2 ->\n        subtype_rec env trace t1 (expand_abbrev env t2) cstrs\n      | Tconstr (p1, tl1, _), Tconstr (p2, tl2, _) when Path.same p1 p2 -> (\n        try\n          let decl = Env.find_type p1 env in\n          List.fold_left2\n            (fun cstrs v (t1, t2) ->\n              let co, cn = Variance.get_upper v in\n              if co then\n                if cn then\n                  (* Invariant type argument: check both ways *)\n                  if\n                    subtype_rec env ((t1, t2) :: trace) t1 t2 [] = []\n                    && subtype_rec env ((t2, t1) :: trace) t2 t1 [] = []\n                  then cstrs\n                  else\n                    ( trace,\n                      newty2 t1.level (Ttuple [t1]),\n                      newty2 t2.level (Ttuple [t2]),\n                      !univar_pairs )\n                    :: cstrs\n                else subtype_rec env ((t1, t2) :: trace) t1 t2 cstrs\n              else if cn then subtype_rec env ((t2, t1) :: trace) t2 t1 cstrs\n              else cstrs)\n            cstrs decl.type_variance (List.combine tl1 tl2)\n        with Not_found -> (trace, t1, t2, !univar_pairs) :: cstrs)\n      | Tconstr (p1, _, _), _ when generic_private_abbrev env p1 ->\n        subtype_rec env trace (expand_abbrev_opt env t1) t2 cstrs\n      | Tconstr (p1, [], _), Tconstr (p2, [], _)\n        when Path.same p1 Predef.path_int && Path.same p2 Predef.path_float ->\n        cstrs\n      | Tconstr (path, [], _), Tconstr (_, [], _)\n        when Variant_coercion.can_coerce_primitive path\n             && extract_concrete_typedecl_opt env t2\n                |> Variant_coercion.can_try_coerce_variant_to_primitive_opt\n                |> Option.is_some -> (\n        (* type coercion for primitives (int/float/string) to elgible unboxed variants:\n           - must be unboxed\n           - must have a constructor case with a supported and matching primitive payload *)\n        match\n          Variant_coercion.can_try_coerce_variant_to_primitive_opt\n            (extract_concrete_typedecl_opt env t2)\n        with\n        | Some (constructors, true) ->\n          if\n            Variant_coercion.variant_has_catch_all_case constructors (fun p ->\n                Path.same p path)\n          then cstrs\n          else (trace, t1, t2, !univar_pairs) :: cstrs\n        | _ -> (trace, t1, t2, !univar_pairs) :: cstrs)\n      | Tconstr (_, [], _), Tconstr (path, [], _)\n        when Variant_coercion.can_coerce_primitive path\n             && extract_concrete_typedecl_opt env t1\n                |> Variant_coercion.can_try_coerce_variant_to_primitive_opt\n                |> Option.is_some -> (\n        (* type coercion for variants to primitives *)\n        match\n          Variant_coercion.can_try_coerce_variant_to_primitive_opt\n            (extract_concrete_typedecl_opt env t1)\n        with\n        | Some (constructors, unboxed) ->\n          if\n            constructors\n            |> Variant_coercion\n               .variant_has_same_runtime_representation_as_target\n                 ~target_path:path ~unboxed\n          then cstrs\n          else (trace, t1, t2, !univar_pairs) :: cstrs\n        | None -> (trace, t1, t2, !univar_pairs) :: cstrs)\n      | Tconstr (_, [], _), Tconstr (_, [], _) -> (\n        (* type coercion for variants and records *)\n        match\n          (extract_concrete_typedecl env t1, extract_concrete_typedecl env t2)\n        with\n        | ( (_, _, {type_kind = Type_variant c1; type_attributes = t1attrs}),\n            (_, _, {type_kind = Type_variant c2; type_attributes = t2attrs}) )\n          ->\n          if\n            Variant_coercion.variant_configuration_can_be_coerced t1attrs\n              t2attrs\n            = false\n          then (trace, t1, t2, !univar_pairs) :: cstrs\n          else\n            let c1_len = List.length c1 in\n            if c1_len > List.length c2 then\n              (trace, t1, t2, !univar_pairs) :: cstrs\n            else\n              let constructor_map = Hashtbl.create c1_len in\n              c2\n              |> List.iter (fun (c : Types.constructor_declaration) ->\n                     Hashtbl.add constructor_map (Ident.name c.cd_id) c);\n              if\n                c1\n                |> List.for_all (fun (c : Types.constructor_declaration) ->\n                       match\n                         ( c,\n                           Hashtbl.find_opt constructor_map (Ident.name c.cd_id)\n                         )\n                       with\n                       | ( {\n                             Types.cd_args = Cstr_record fields1;\n                             cd_attributes = c1_attributes;\n                           },\n                           Some\n                             {\n                               Types.cd_args = Cstr_record fields2;\n                               cd_attributes = c2_attributes;\n                             } ) ->\n                         if\n                           Variant_coercion.variant_representation_matches\n                             c1_attributes c2_attributes\n                         then\n                           let violation, tl1, tl2 =\n                             Record_coercion.check_record_fields fields1 fields2\n                           in\n                           if violation then false\n                           else\n                             try\n                               let lst = subtype_list env trace tl1 tl2 cstrs in\n                               List.length lst = List.length cstrs\n                             with _ -> false\n                         else false\n                       | ( {\n                             Types.cd_args = Cstr_tuple tl1;\n                             cd_attributes = c1_attributes;\n                           },\n                           Some\n                             {\n                               Types.cd_args = Cstr_tuple tl2;\n                               cd_attributes = c2_attributes;\n                             } ) ->\n                         if\n                           Variant_coercion.variant_representation_matches\n                             c1_attributes c2_attributes\n                         then\n                           try\n                             let lst = subtype_list env trace tl1 tl2 cstrs in\n                             List.length lst = List.length cstrs\n                           with _ -> false\n                         else false\n                       | _ -> false)\n              then cstrs\n              else (trace, t1, t2, !univar_pairs) :: cstrs\n        | ( (_, _, {type_kind = Type_record (fields1, repr1)}),\n            (_, _, {type_kind = Type_record (fields2, repr2)}) ) ->\n          let same_repr =\n            match (repr1, repr2) with\n            | ( (Record_regular | Record_optional_labels _),\n                (Record_regular | Record_optional_labels _) ) ->\n              true (* handled in the fields checks *)\n            | Record_unboxed b1, Record_unboxed b2 -> b1 = b2\n            | Record_inlined _, Record_inlined _ -> repr1 = repr2\n            | Record_extension, Record_extension -> true\n            | _ -> false\n          in\n          if same_repr then\n            let violation, tl1, tl2 =\n              Record_coercion.check_record_fields ~repr1 ~repr2 fields1 fields2\n            in\n            if violation then (trace, t1, t2, !univar_pairs) :: cstrs\n            else subtype_list env trace tl1 tl2 cstrs\n          else (trace, t1, t2, !univar_pairs) :: cstrs\n        | _ -> (trace, t1, t2, !univar_pairs) :: cstrs\n        | exception Not_found -> (trace, t1, t2, !univar_pairs) :: cstrs)\n      (* | (_, Tconstr(p2, _, _)) when generic_private_abbrev false env p2 ->\n         subtype_rec env trace t1 (expand_abbrev_opt env t2) cstrs *)\n      | Tobject (f1, _), Tobject (f2, _)\n        when is_Tvar (object_row f1) && is_Tvar (object_row f2) ->\n        (* Same row variable implies same object. *)\n        (trace, t1, t2, !univar_pairs) :: cstrs\n      | Tobject (f1, _), Tobject (f2, _) -> subtype_fields env trace f1 f2 cstrs\n      | Tvariant row1, Tvariant row2 -> (\n        try subtype_row env trace row1 row2 cstrs\n        with Exit -> (trace, t1, t2, !univar_pairs) :: cstrs)\n      | Tvariant v, _\n        when !Config.bs_only && !variant_is_subtype env (row_repr v) t2 ->\n        cstrs\n      | Tpoly (u1, []), Tpoly (u2, []) -> subtype_rec env trace u1 u2 cstrs\n      | Tpoly (u1, tl1), Tpoly (u2, []) ->\n        let _, u1' = instance_poly false tl1 u1 in\n        subtype_rec env trace u1' u2 cstrs\n      | Tpoly (u1, tl1), Tpoly (u2, tl2) -> (\n        try\n          enter_poly env univar_pairs u1 tl1 u2 tl2 (fun t1 t2 ->\n              subtype_rec env trace t1 t2 cstrs)\n        with Unify _ -> (trace, t1, t2, !univar_pairs) :: cstrs)\n      | Tpackage (p1, nl1, tl1), Tpackage (p2, nl2, tl2) -> (\n        try\n          let ntl1 = complete_type_list env nl2 t1.level (Mty_ident p1) nl1 tl1\n          and ntl2 =\n            complete_type_list env nl1 t2.level (Mty_ident p2) nl2 tl2\n              ~allow_absent:true\n          in\n          let cstrs' =\n            List.map\n              (fun (n2, t2) -> (trace, List.assoc n2 ntl1, t2, !univar_pairs))\n              ntl2\n          in\n          if eq_package_path env p1 p2 then cstrs' @ cstrs\n          else\n            (* need to check module subtyping *)\n            let snap = Btype.snapshot () in\n            try\n              List.iter (fun (_, t1, t2, _) -> unify env t1 t2) cstrs';\n              if !package_subtype env p1 nl1 tl1 p2 nl2 tl2 then (\n                Btype.backtrack snap;\n                cstrs' @ cstrs)\n              else raise (Unify [])\n            with Unify _ ->\n              Btype.backtrack snap;\n              raise Not_found\n        with Not_found -> (trace, t1, t2, !univar_pairs) :: cstrs)\n      | _, _ -> (trace, t1, t2, !univar_pairs) :: cstrs)\n\nand subtype_list env trace tl1 tl2 cstrs =\n  if List.length tl1 <> List.length tl2 then subtype_error env trace;\n  List.fold_left2\n    (fun cstrs t1 t2 -> subtype_rec env ((t1, t2) :: trace) t1 t2 cstrs)\n    cstrs tl1 tl2\n\nand subtype_fields env trace ty1 ty2 cstrs =\n  (* Assume that either rest1 or rest2 is not Tvar *)\n  let fields1, rest1 = flatten_fields ty1 in\n  let fields2, rest2 = flatten_fields ty2 in\n  let pairs, miss1, miss2 = associate_fields fields1 fields2 in\n  let cstrs =\n    if rest2.desc = Tnil then cstrs\n    else if miss1 = [] then\n      subtype_rec env ((rest1, rest2) :: trace) rest1 rest2 cstrs\n    else\n      (trace, build_fields (repr ty1).level miss1 rest1, rest2, !univar_pairs)\n      :: cstrs\n  in\n  let cstrs =\n    if miss2 = [] then cstrs\n    else\n      ( trace,\n        rest1,\n        build_fields (repr ty2).level miss2 (newvar ()),\n        !univar_pairs )\n      :: cstrs\n  in\n  List.fold_left\n    (fun cstrs (_, _k1, t1, _k2, t2) ->\n      (* These fields are always present *)\n      subtype_rec env ((t1, t2) :: trace) t1 t2 cstrs)\n    cstrs pairs\n\nand subtype_row env trace row1 row2 cstrs =\n  let row1 = row_repr row1 and row2 = row_repr row2 in\n  let r1, r2, pairs = merge_row_fields row1.row_fields row2.row_fields in\n  let more1 = repr row1.row_more and more2 = repr row2.row_more in\n  match (more1.desc, more2.desc) with\n  | Tconstr (p1, _, _), Tconstr (p2, _, _) when Path.same p1 p2 ->\n    subtype_rec env ((more1, more2) :: trace) more1 more2 cstrs\n  | (Tvar _ | Tconstr _ | Tnil), (Tvar _ | Tconstr _ | Tnil)\n    when row1.row_closed && r1 = [] ->\n    List.fold_left\n      (fun cstrs (_, f1, f2) ->\n        match (row_field_repr f1, row_field_repr f2) with\n        | (Rpresent None | Reither (true, _, _, _)), Rpresent None -> cstrs\n        | Rpresent (Some t1), Rpresent (Some t2) ->\n          subtype_rec env ((t1, t2) :: trace) t1 t2 cstrs\n        | Reither (false, t1 :: _, _, _), Rpresent (Some t2) ->\n          subtype_rec env ((t1, t2) :: trace) t1 t2 cstrs\n        | Rabsent, _ -> cstrs\n        | _ -> raise Exit)\n      cstrs pairs\n  | Tunivar _, Tunivar _\n    when row1.row_closed = row2.row_closed && r1 = [] && r2 = [] ->\n    let cstrs = subtype_rec env ((more1, more2) :: trace) more1 more2 cstrs in\n    List.fold_left\n      (fun cstrs (_, f1, f2) ->\n        match (row_field_repr f1, row_field_repr f2) with\n        | Rpresent None, Rpresent None\n        | Reither (true, [], _, _), Reither (true, [], _, _)\n        | Rabsent, Rabsent ->\n          cstrs\n        | Rpresent (Some t1), Rpresent (Some t2)\n        | Reither (false, [t1], _, _), Reither (false, [t2], _, _) ->\n          subtype_rec env ((t1, t2) :: trace) t1 t2 cstrs\n        | _ -> raise Exit)\n      cstrs pairs\n  | _ -> raise Exit\n\nlet subtype env ty1 ty2 =\n  TypePairs.clear subtypes;\n  univar_pairs := [];\n  (* Build constraint set. *)\n  let cstrs = subtype_rec env [(ty1, ty2)] ty1 ty2 [] in\n  TypePairs.clear subtypes;\n  (* Enforce constraints. *)\n  function\n  | () ->\n    List.iter\n      (function\n        | trace0, t1, t2, pairs -> (\n          try unify_pairs (ref env) t1 t2 pairs\n          with Unify trace ->\n            raise\n              (Subtype\n                 (expand_trace env (List.rev trace0), List.tl (List.tl trace)))))\n      (List.rev cstrs)\n\n(*******************)\n(*  Miscellaneous  *)\n(*******************)\n\n(* Utility for printing. The resulting type is not used in computation. *)\nlet rec unalias_object ty =\n  let ty = repr ty in\n  match ty.desc with\n  | Tfield (s, k, t1, t2) ->\n    newty2 ty.level (Tfield (s, k, t1, unalias_object t2))\n  | Tvar _ | Tnil -> newty2 ty.level ty.desc\n  | Tunivar _ -> ty\n  | Tconstr _ -> newvar2 ty.level\n  | _ -> assert false\n\nlet unalias ty =\n  let ty = repr ty in\n  match ty.desc with\n  | Tvar _ | Tunivar _ -> ty\n  | Tvariant row ->\n    let row = row_repr row in\n    let more = row.row_more in\n    newty2 ty.level (Tvariant {row with row_more = newty2 more.level more.desc})\n  | Tobject (ty, nm) -> newty2 ty.level (Tobject (unalias_object ty, nm))\n  | _ -> newty2 ty.level ty.desc\n\n(* Return the arity (as for curried functions) of the given type. *)\nlet rec arity ty =\n  match (repr ty).desc with\n  | Tarrow (_, _t1, t2, _) -> 1 + arity t2\n  | _ -> 0\n\n(* Check whether an abbreviation expands to itself. *)\nlet cyclic_abbrev env id ty =\n  let rec check_cycle seen ty =\n    let ty = repr ty in\n    match ty.desc with\n    | Tconstr (p, _tl, _abbrev) -> (\n      (match p with\n      | Path.Pident p -> Ident.same p id\n      | _ -> false)\n      || List.memq ty seen\n      ||\n      try check_cycle (ty :: seen) (expand_abbrev_opt env ty) with\n      | Cannot_expand -> false\n      | Unify _ -> true)\n    | _ -> false\n  in\n  check_cycle [] ty\n\n(* Check for non-generalizable type variables *)\nexception Non_closed0\nlet visited = ref TypeSet.empty\n\nlet rec closed_schema_rec env ty =\n  let ty = repr ty in\n  if TypeSet.mem ty !visited then ()\n  else (\n    visited := TypeSet.add ty !visited;\n    match ty.desc with\n    | Tvar _ when ty.level <> generic_level -> raise Non_closed0\n    | Tconstr _ -> (\n      let old = !visited in\n      try iter_type_expr (closed_schema_rec env) ty\n      with Non_closed0 -> (\n        try\n          visited := old;\n          closed_schema_rec env (try_expand_head try_expand_safe env ty)\n        with Cannot_expand -> raise Non_closed0))\n    | Tfield (_, kind, t1, t2) ->\n      if field_kind_repr kind = Fpresent then closed_schema_rec env t1;\n      closed_schema_rec env t2\n    | Tvariant row ->\n      let row = row_repr row in\n      iter_row (closed_schema_rec env) row;\n      if not (static_row row) then closed_schema_rec env row.row_more\n    | _ -> iter_type_expr (closed_schema_rec env) ty)\n\n(* Return whether all variables of type [ty] are generic. *)\nlet closed_schema env ty =\n  visited := TypeSet.empty;\n  try\n    closed_schema_rec env ty;\n    visited := TypeSet.empty;\n    true\n  with Non_closed0 ->\n    visited := TypeSet.empty;\n    false\n\n(* Normalize a type before printing, saving... *)\n(* Cannot use mark_type because deep_occur uses it too *)\nlet rec normalize_type_rec env visited ty =\n  let ty = repr ty in\n  if not (TypeSet.mem ty !visited) then (\n    visited := TypeSet.add ty !visited;\n    let tm = row_of_type ty in\n    (if (not (is_Tconstr ty)) && is_constr_row ~allow_ident:false tm then\n       match tm.desc with\n       (* PR#7348 *)\n       | Tconstr (Path.Pdot (m, i, pos), tl, _abbrev) ->\n         let i' = String.sub i 0 (String.length i - 4) in\n         log_type ty;\n         ty.desc <- Tconstr (Path.Pdot (m, i', pos), tl, ref Mnil)\n       | _ -> assert false\n     else\n       match ty.desc with\n       | Tvariant row ->\n         let row = row_repr row in\n         let fields =\n           List.map\n             (fun (l, f0) ->\n               let f = row_field_repr f0 in\n               ( l,\n                 match f with\n                 | Reither (b, ty :: (_ :: _ as tyl), m, e) ->\n                   let tyl' =\n                     List.fold_left\n                       (fun tyl ty ->\n                         if\n                           List.exists\n                             (fun ty' -> equal env false [ty] [ty'])\n                             tyl\n                         then tyl\n                         else ty :: tyl)\n                       [ty] tyl\n                   in\n                   if f != f0 || List.length tyl' < List.length tyl then\n                     Reither (b, List.rev tyl', m, e)\n                   else f\n                 | _ -> f ))\n             row.row_fields\n         in\n         let fields =\n           List.sort\n             (fun (p, _) (q, _) -> compare p q)\n             (Ext_list.filter fields (fun (_, fi) -> fi <> Rabsent))\n         in\n         log_type ty;\n         ty.desc <- Tvariant {row with row_fields = fields}\n       | Tobject (fi, nm) ->\n         (match !nm with\n         | None -> ()\n         | Some (n, v :: l) -> (\n           if deep_occur ty (newgenty (Ttuple l)) then\n             (* The abbreviation may be hiding something, so remove it *)\n             set_name nm None\n           else\n             let v' = repr v in\n             match v'.desc with\n             | Tvar _ | Tunivar _ ->\n               if v' != v then set_name nm (Some (n, v' :: l))\n             | Tnil ->\n               log_type ty;\n               ty.desc <- Tconstr (n, l, ref Mnil)\n             | _ -> set_name nm None)\n         | _ -> fatal_error \"Ctype.normalize_type_rec\");\n         let fi = repr fi in\n         if fi.level < lowest_level then ()\n         else\n           let fields, row = flatten_fields fi in\n           let fi' = build_fields fi.level fields row in\n           log_type ty;\n           fi.desc <- fi'.desc\n       | _ -> ());\n    iter_type_expr (normalize_type_rec env visited) ty)\n\nlet normalize_type env ty = normalize_type_rec env (ref TypeSet.empty) ty\n\n(*************************)\n(*  Remove dependencies  *)\n(*************************)\n\n(*\n   Variables are left unchanged. Other type nodes are duplicated, with\n   levels set to generic level.\n   We cannot use Tsubst here, because unification may be called by\n   expand_abbrev.\n*)\n\nlet nondep_hash = TypeHash.create 47\nlet nondep_variants = TypeHash.create 17\nlet clear_hash () =\n  TypeHash.clear nondep_hash;\n  TypeHash.clear nondep_variants\n\nlet rec nondep_type_rec env id ty =\n  match ty.desc with\n  | Tvar _ | Tunivar _ -> ty\n  | Tlink ty -> nondep_type_rec env id ty\n  | _ -> (\n    try TypeHash.find nondep_hash ty\n    with Not_found ->\n      let ty' = newgenvar () in\n      (* Stub *)\n      TypeHash.add nondep_hash ty ty';\n      ty'.desc <-\n        (match ty.desc with\n        | Tconstr (p, tl, _abbrev) ->\n          if Path.isfree id p then\n            try\n              Tlink\n                (nondep_type_rec env id\n                   (expand_abbrev env (newty2 ty.level ty.desc)))\n              (*\n                 The [Tlink] is important. The expanded type may be a\n                 variable, or may not be completely copied yet\n                 (recursive type), so one cannot just take its\n                 description.\n               *)\n            with Cannot_expand | Unify _ -> raise Not_found\n          else Tconstr (p, List.map (nondep_type_rec env id) tl, ref Mnil)\n        | Tpackage (p, nl, tl) when Path.isfree id p ->\n          let p' = normalize_package_path env p in\n          if Path.isfree id p' then raise Not_found;\n          Tpackage (p', nl, List.map (nondep_type_rec env id) tl)\n        | Tobject (t1, name) ->\n          Tobject\n            ( nondep_type_rec env id t1,\n              ref\n                (match !name with\n                | None -> None\n                | Some (p, tl) ->\n                  if Path.isfree id p then None\n                  else Some (p, List.map (nondep_type_rec env id) tl)) )\n        | Tvariant row -> (\n          let row = row_repr row in\n          let more = repr row.row_more in\n          (* We must keep sharing according to the row variable *)\n          try\n            let ty2 = TypeHash.find nondep_variants more in\n            (* This variant type has been already copied *)\n            TypeHash.add nondep_hash ty ty2;\n            Tlink ty2\n          with Not_found -> (\n            (* Register new type first for recursion *)\n            TypeHash.add nondep_variants more ty';\n            let static = static_row row in\n            let more' = if static then newgenty Tnil else more in\n            (* Return a new copy *)\n            let row = copy_row (nondep_type_rec env id) true row true more' in\n            match row.row_name with\n            | Some (p, _tl) when Path.isfree id p ->\n              Tvariant {row with row_name = None}\n            | _ -> Tvariant row))\n        | _ -> copy_type_desc (nondep_type_rec env id) ty.desc);\n      ty')\n\nlet nondep_type env id ty =\n  try\n    let ty' = nondep_type_rec env id ty in\n    clear_hash ();\n    ty'\n  with Not_found ->\n    clear_hash ();\n    raise Not_found\n\nlet () = nondep_type' := nondep_type\n\nlet unroll_abbrev id tl ty =\n  let ty = repr ty and path = Path.Pident id in\n  if is_Tvar ty || List.exists (deep_occur ty) tl || is_object_type path then ty\n  else\n    let ty' = newty2 ty.level ty.desc in\n    link_type ty (newty2 ty.level (Tconstr (path, tl, ref Mnil)));\n    ty'\n\n(* Preserve sharing inside type declarations. *)\nlet nondep_type_decl env mid id is_covariant decl =\n  try\n    let params = List.map (nondep_type_rec env mid) decl.type_params in\n    let tk =\n      try map_kind (nondep_type_rec env mid) decl.type_kind\n      with Not_found when is_covariant -> Type_abstract\n    and tm =\n      try\n        match decl.type_manifest with\n        | None -> None\n        | Some ty -> Some (unroll_abbrev id params (nondep_type_rec env mid ty))\n      with Not_found when is_covariant -> None\n    in\n    clear_hash ();\n    let priv =\n      match tm with\n      | Some ty when Btype.has_constr_row ty -> Private\n      | _ -> decl.type_private\n    in\n    {\n      type_params = params;\n      type_arity = decl.type_arity;\n      type_kind = tk;\n      type_manifest = tm;\n      type_private = priv;\n      type_variance = decl.type_variance;\n      type_newtype_level = None;\n      type_loc = decl.type_loc;\n      type_attributes = decl.type_attributes;\n      type_immediate = decl.type_immediate;\n      type_unboxed = decl.type_unboxed;\n    }\n  with Not_found ->\n    clear_hash ();\n    raise Not_found\n\n(* Preserve sharing inside extension constructors. *)\nlet nondep_extension_constructor env mid ext =\n  try\n    let type_path, type_params =\n      if Path.isfree mid ext.ext_type_path then\n        let ty =\n          newgenty (Tconstr (ext.ext_type_path, ext.ext_type_params, ref Mnil))\n        in\n        let ty' = nondep_type_rec env mid ty in\n        match (repr ty').desc with\n        | Tconstr (p, tl, _) -> (p, tl)\n        | _ -> raise Not_found\n      else\n        let type_params =\n          List.map (nondep_type_rec env mid) ext.ext_type_params\n        in\n        (ext.ext_type_path, type_params)\n    in\n    let args = map_type_expr_cstr_args (nondep_type_rec env mid) ext.ext_args in\n    let ret_type = may_map (nondep_type_rec env mid) ext.ext_ret_type in\n    clear_hash ();\n    {\n      ext_type_path = type_path;\n      ext_type_params = type_params;\n      ext_args = args;\n      ext_ret_type = ret_type;\n      ext_private = ext.ext_private;\n      ext_attributes = ext.ext_attributes;\n      ext_loc = ext.ext_loc;\n    }\n  with Not_found ->\n    clear_hash ();\n    raise Not_found\n\n(* Preserve sharing inside class types. *)\nlet nondep_class_signature env id sign =\n  {\n    csig_self = nondep_type_rec env id sign.csig_self;\n    csig_vars =\n      Vars.map\n        (function\n          | m, v, t -> (m, v, nondep_type_rec env id t))\n        sign.csig_vars;\n    csig_concr = sign.csig_concr;\n    csig_inher =\n      List.map\n        (fun (p, tl) -> (p, List.map (nondep_type_rec env id) tl))\n        sign.csig_inher;\n  }\n\nlet rec nondep_class_type env id = function\n  | Cty_constr (p, _, cty) when Path.isfree id p -> nondep_class_type env id cty\n  | Cty_constr (p, tyl, cty) ->\n    Cty_constr\n      (p, List.map (nondep_type_rec env id) tyl, nondep_class_type env id cty)\n  | Cty_signature sign -> Cty_signature (nondep_class_signature env id sign)\n  | Cty_arrow (l, ty, cty) ->\n    Cty_arrow (l, nondep_type_rec env id ty, nondep_class_type env id cty)\n\nlet nondep_class_declaration env id decl =\n  assert (not (Path.isfree id decl.cty_path));\n  let decl =\n    {\n      cty_params = List.map (nondep_type_rec env id) decl.cty_params;\n      cty_variance = decl.cty_variance;\n      cty_type = nondep_class_type env id decl.cty_type;\n      cty_path = decl.cty_path;\n      cty_new =\n        (match decl.cty_new with\n        | None -> None\n        | Some ty -> Some (nondep_type_rec env id ty));\n      cty_loc = decl.cty_loc;\n      cty_attributes = decl.cty_attributes;\n    }\n  in\n  clear_hash ();\n  decl\n\nlet nondep_cltype_declaration env id decl =\n  assert (not (Path.isfree id decl.clty_path));\n  let decl =\n    {\n      clty_params = List.map (nondep_type_rec env id) decl.clty_params;\n      clty_variance = decl.clty_variance;\n      clty_type = nondep_class_type env id decl.clty_type;\n      clty_path = decl.clty_path;\n      clty_loc = decl.clty_loc;\n      clty_attributes = decl.clty_attributes;\n    }\n  in\n  clear_hash ();\n  decl\n\n(* collapse conjunctive types in class parameters *)\nlet rec collapse_conj env visited ty =\n  let ty = repr ty in\n  if List.memq ty visited then ()\n  else\n    let visited = ty :: visited in\n    match ty.desc with\n    | Tvariant row ->\n      let row = row_repr row in\n      List.iter\n        (fun (_l, fi) ->\n          match row_field_repr fi with\n          | Reither (c, t1 :: (_ :: _ as tl), m, e) ->\n            List.iter (unify env t1) tl;\n            set_row_field e (Reither (c, [t1], m, ref None))\n          | _ -> ())\n        row.row_fields;\n      iter_row (collapse_conj env visited) row\n    | _ -> iter_type_expr (collapse_conj env visited) ty\n\nlet collapse_conj_params env params = List.iter (collapse_conj env []) params\n\nlet same_constr env t1 t2 =\n  let t1 = expand_head env t1 in\n  let t2 = expand_head env t2 in\n  match (t1.desc, t2.desc) with\n  | Tconstr (p1, _, _), Tconstr (p2, _, _) -> Path.same p1 p2\n  | _ -> false\n\nlet () = Env.same_constr := same_constr\n\nlet maybe_pointer_type env typ =\n  match (repr typ).desc with\n  | Tconstr (p, _args, _abbrev) -> (\n    try\n      let type_decl = Env.find_type p env in\n      not type_decl.type_immediate\n    with Not_found ->\n      true\n      (* This can happen due to e.g. missing -I options,\n         causing some .cmi files to be unavailable.\n         Maybe we should emit a warning. *)\n    )\n  | Tvariant row ->\n    let row = Btype.row_repr row in\n    (* if all labels are devoid of arguments, not a pointer *)\n    (not row.row_closed)\n    || List.exists\n         (function\n           | _, (Rpresent (Some _) | Reither (false, _, _, _)) -> true\n           | _ -> false)\n         row.row_fields\n  | _ -> true\n"
  },
  {
    "path": "analysis/vendor/ml/ctype.mli",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(* Operations on core types *)\n\nopen Asttypes\nopen Types\n\nexception Unify of (type_expr * type_expr) list\nexception Tags of label * label\nexception Subtype of (type_expr * type_expr) list * (type_expr * type_expr) list\nexception Cannot_expand\nexception Cannot_apply\nexception Recursive_abbrev\nexception Unification_recursive_abbrev of (type_expr * type_expr) list\n\nval init_def : int -> unit\n(* Set the initial variable level *)\n\nval begin_def : unit -> unit\n(* Raise the variable level by one at the beginning of a definition. *)\n\nval end_def : unit -> unit\n(* Lower the variable level by one at the end of a definition *)\n\nval begin_class_def : unit -> unit\nval raise_nongen_level : unit -> unit\nval reset_global_level : unit -> unit\n(* Reset the global level before typing an expression *)\n\nval increase_global_level : unit -> int\nval restore_global_level : int -> unit\n(* This pair of functions is only used in Typetexp *)\n\ntype levels = {\n  current_level: int;\n  nongen_level: int;\n  global_level: int;\n  saved_level: (int * int) list;\n}\nval save_levels : unit -> levels\nval set_levels : levels -> unit\n\nval newty : type_desc -> type_expr\nval newvar : ?name:string -> unit -> type_expr\nval newvar2 : ?name:string -> int -> type_expr\n(* Return a fresh variable *)\n\nval new_global_var : ?name:string -> unit -> type_expr\n(* Return a fresh variable, bound at toplevel\n   (as type variables ['a] in type constraints). *)\n\nval newobj : type_expr -> type_expr\nval newconstr : Path.t -> type_expr list -> type_expr\nval none : type_expr\n(* A dummy type expression *)\n\nval repr : type_expr -> type_expr\n(* Return the canonical representative of a type. *)\n\nval object_fields : type_expr -> type_expr\nval flatten_fields :\n  type_expr -> (string * field_kind * type_expr) list * type_expr\n\n(* Transform a field type into a list of pairs label-type *)\n(* The fields are sorted *)\nval associate_fields :\n  (string * field_kind * type_expr) list ->\n  (string * field_kind * type_expr) list ->\n  (string * field_kind * type_expr * field_kind * type_expr) list\n  * (string * field_kind * type_expr) list\n  * (string * field_kind * type_expr) list\nval opened_object : type_expr -> bool\nval close_object : type_expr -> unit\nval row_variable : type_expr -> type_expr\n(* Return the row variable of an open object type *)\n\nval set_object_name :\n  Ident.t -> type_expr -> type_expr list -> type_expr -> unit\nval remove_object_name : type_expr -> unit\nval hide_private_methods : type_expr -> unit\nval find_cltype_for_path : Env.t -> Path.t -> type_declaration * type_expr\nval lid_of_path : ?hash:string -> Path.t -> Longident.t\n\nval sort_row_fields : (label * row_field) list -> (label * row_field) list\nval merge_row_fields :\n  (label * row_field) list ->\n  (label * row_field) list ->\n  (label * row_field) list\n  * (label * row_field) list\n  * (label * row_field * row_field) list\nval filter_row_fields :\n  bool -> (label * row_field) list -> (label * row_field) list\n\nval generalize : type_expr -> unit\n(* Generalize in-place the given type *)\n\nval generalize_expansive : Env.t -> type_expr -> unit\n(* Generalize the covariant part of a type, making\n   contravariant branches non-generalizable *)\n\nval generalize_global : type_expr -> unit\n(* Generalize the structure of a type, lowering variables\n   to !global_level *)\n\nval generalize_structure : type_expr -> unit\n(* Same, but variables are only lowered to !current_level *)\n\nval correct_levels : type_expr -> type_expr\n(* Returns a copy with decreasing levels *)\n\nval limited_generalize : type_expr -> type_expr -> unit\n(* Only generalize some part of the type\n   Make the remaining of the type non-generalizable *)\n\nval instance : ?partial:bool -> Env.t -> type_expr -> type_expr\n\n(* Take an instance of a type scheme *)\n(* partial=None  -> normal\n   partial=false -> newvar() for non generic subterms\n   partial=true  -> newty2 ty.level Tvar for non generic subterms *)\nval instance_def : type_expr -> type_expr\n(* use defaults *)\n\nval generic_instance : Env.t -> type_expr -> type_expr\n(* Same as instance, but new nodes at generic_level *)\n\nval instance_list : Env.t -> type_expr list -> type_expr list\n(* Take an instance of a list of type schemes *)\n\nval instance_constructor :\n  ?in_pattern:Env.t ref * int ->\n  constructor_description ->\n  type_expr list * type_expr\n(* Same, for a constructor *)\n\nval instance_parameterized_type :\n  ?keep_names:bool -> type_expr list -> type_expr -> type_expr list * type_expr\nval instance_parameterized_type_2 :\n  type_expr list ->\n  type_expr list ->\n  type_expr ->\n  type_expr list * type_expr list * type_expr\nval instance_declaration : type_declaration -> type_declaration\nval instance_class : type_expr list -> class_type -> type_expr list * class_type\nval instance_poly :\n  ?keep_names:bool ->\n  bool ->\n  type_expr list ->\n  type_expr ->\n  type_expr list * type_expr\n(* Take an instance of a type scheme containing free univars *)\n\nval instance_label :\n  bool -> label_description -> type_expr list * type_expr * type_expr\n(* Same, for a label *)\n\nval apply : Env.t -> type_expr list -> type_expr -> type_expr list -> type_expr\n(* [apply [p1...pN] t [a1...aN]] match the arguments [ai] to\n   the parameters [pi] and returns the corresponding instance of\n   [t]. Exception [Cannot_apply] is raised in case of failure. *)\n\nval expand_head_once : Env.t -> type_expr -> type_expr\nval expand_head : Env.t -> type_expr -> type_expr\nval try_expand_once_opt : Env.t -> type_expr -> type_expr\n\nval expand_head_opt : Env.t -> type_expr -> type_expr\n(** The compiler's own version of [expand_head] necessary for type-based\n    optimisations. *)\n\nval full_expand : Env.t -> type_expr -> type_expr\nval extract_concrete_typedecl :\n  Env.t -> type_expr -> Path.t * Path.t * type_declaration\n(* Return the original path of the types, and the first concrete\n   type declaration found expanding it.\n   Raise [Not_found] if none appears or not a type constructor. *)\n\nval enforce_constraints : Env.t -> type_expr -> unit\n\nval unify : Env.t -> type_expr -> type_expr -> unit\n(* Unify the two types given. Raise [Unify] if not possible. *)\n\nval unify_gadt :\n  newtype_level:int -> Env.t ref -> type_expr -> type_expr -> unit\n(* Unify the two types given and update the environment with the\n   local constraints. Raise [Unify] if not possible. *)\n\nval unify_var : Env.t -> type_expr -> type_expr -> unit\n(* Same as [unify], but allow free univars when first type\n   is a variable. *)\n\nval with_passive_variants : ('a -> 'b) -> 'a -> 'b\n(* Call [f] in passive_variants mode, for exhaustiveness check. *)\n\nval filter_arrow : Env.t -> type_expr -> arg_label -> type_expr * type_expr\n(* A special case of unification (with l:'a -> 'b). *)\n\nval filter_method : Env.t -> string -> private_flag -> type_expr -> type_expr\n(* A special case of unification (with {m : 'a; 'b}). *)\n\nval check_filter_method : Env.t -> string -> private_flag -> type_expr -> unit\n(* A special case of unification (with {m : 'a; 'b}), returning unit. *)\n\nval occur_in : Env.t -> type_expr -> type_expr -> bool\nval deep_occur : type_expr -> type_expr -> bool\nval filter_self_method :\n  Env.t ->\n  string ->\n  private_flag ->\n  (Ident.t * type_expr) Meths.t ref ->\n  type_expr ->\n  Ident.t * type_expr\nval moregeneral : Env.t -> bool -> type_expr -> type_expr -> bool\n(* Check if the first type scheme is more general than the second. *)\n\nval rigidify : type_expr -> type_expr list\n(* \"Rigidify\" a type and return its type variable *)\n\nval all_distinct_vars : Env.t -> type_expr list -> bool\n(* Check those types are all distinct type variables *)\n\nval matches : Env.t -> type_expr -> type_expr -> bool\n(* Same as [moregeneral false], implemented using the two above\n   functions and backtracking. Ignore levels *)\n\ntype class_match_failure =\n  | CM_Virtual_class\n  | CM_Parameter_arity_mismatch of int * int\n  | CM_Type_parameter_mismatch of Env.t * (type_expr * type_expr) list\n  | CM_Class_type_mismatch of Env.t * class_type * class_type\n  | CM_Parameter_mismatch of Env.t * (type_expr * type_expr) list\n  | CM_Val_type_mismatch of string * Env.t * (type_expr * type_expr) list\n  | CM_Meth_type_mismatch of string * Env.t * (type_expr * type_expr) list\n  | CM_Non_mutable_value of string\n  | CM_Non_concrete_value of string\n  | CM_Missing_value of string\n  | CM_Missing_method of string\n  | CM_Hide_public of string\n  | CM_Hide_virtual of string * string\n  | CM_Public_method of string\n  | CM_Private_method of string\n  | CM_Virtual_method of string\nval match_class_types :\n  ?trace:bool -> Env.t -> class_type -> class_type -> class_match_failure list\n(* Check if the first class type is more general than the second. *)\n\nval equal : Env.t -> bool -> type_expr list -> type_expr list -> bool\n(* [equal env [x1...xn] tau [y1...yn] sigma]\n   checks whether the parameterized types\n   [/\\x1.../\\xn.tau] and [/\\y1.../\\yn.sigma] are equivalent. *)\n\nval match_class_declarations :\n  Env.t ->\n  type_expr list ->\n  class_type ->\n  type_expr list ->\n  class_type ->\n  class_match_failure list\n(* Check if the first class type is more general than the second. *)\n\nval enlarge_type : Env.t -> type_expr -> type_expr * bool\n(* Make a type larger, flag is true if some pruning had to be done *)\n\nval subtype : Env.t -> type_expr -> type_expr -> unit -> unit\n(* [subtype env t1 t2] checks that [t1] is a subtype of [t2].\n   It accumulates the constraints the type variables must\n   enforce and returns a function that enforces this\n   constraints. *)\n\nval nondep_type : Env.t -> Ident.t -> type_expr -> type_expr\n(* Return a type equivalent to the given type but without\n   references to the given module identifier. Raise [Not_found]\n   if no such type exists. *)\n\nval nondep_type_decl :\n  Env.t -> Ident.t -> Ident.t -> bool -> type_declaration -> type_declaration\n(* Same for type declarations. *)\n\nval nondep_extension_constructor :\n  Env.t -> Ident.t -> extension_constructor -> extension_constructor\n(* Same for extension constructor *)\n\nval nondep_class_declaration :\n  Env.t -> Ident.t -> class_declaration -> class_declaration\n(* Same for class declarations. *)\n\nval nondep_cltype_declaration :\n  Env.t -> Ident.t -> class_type_declaration -> class_type_declaration\n\n(* Same for class type declarations. *)\n(*val correct_abbrev: Env.t -> Path.t -> type_expr list -> type_expr -> unit*)\nval cyclic_abbrev : Env.t -> Ident.t -> type_expr -> bool\nval is_contractive : Env.t -> Path.t -> bool\nval normalize_type : Env.t -> type_expr -> unit\n\nval closed_schema : Env.t -> type_expr -> bool\n(* Check whether the given type scheme contains no non-generic\n   type variables *)\n\nval free_variables : ?env:Env.t -> type_expr -> type_expr list\n(* If env present, then check for incomplete definitions too *)\n\nval closed_type_decl : type_declaration -> type_expr option\nval closed_extension_constructor : extension_constructor -> type_expr option\ntype closed_class_failure =\n  | CC_Method of type_expr * bool * string * type_expr\n  | CC_Value of type_expr * bool * string * type_expr\nval closed_class :\n  type_expr list -> class_signature -> closed_class_failure option\n(* Check whether all type variables are bound *)\n\nval unalias : type_expr -> type_expr\nval signature_of_class_type : class_type -> class_signature\nval self_type : class_type -> type_expr\nval class_type_arity : class_type -> int\nval arity : type_expr -> int\n(* Return the arity (as for curried functions) of the given type. *)\n\nval collapse_conj_params : Env.t -> type_expr list -> unit\n(* Collapse conjunctive types in class parameters *)\n\nval get_current_level : unit -> int\nval wrap_trace_gadt_instances : Env.t -> ('a -> 'b) -> 'a -> 'b\nval reset_reified_var_counter : unit -> unit\n\nval maybe_pointer_type : Env.t -> type_expr -> bool\n(* True if type is possibly pointer, false if definitely not a pointer *)\n\n(* Stubs *)\nval package_subtype :\n  (Env.t ->\n  Path.t ->\n  Longident.t list ->\n  type_expr list ->\n  Path.t ->\n  Longident.t list ->\n  type_expr list ->\n  bool)\n  ref\n\nval variant_is_subtype :\n  (Env.t -> Types.row_desc -> Types.type_expr -> bool) ref\n"
  },
  {
    "path": "analysis/vendor/ml/datarepr.ml",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(* Compute constructor and label descriptions from type declarations,\n   determining their representation. *)\n\nopen Asttypes\nopen Types\nopen Btype\n\n(* Simplified version of Ctype.free_vars *)\nlet free_vars ?(param = false) ty =\n  let ret = ref TypeSet.empty in\n  let rec loop ty =\n    let ty = repr ty in\n    if ty.level >= lowest_level then (\n      ty.level <- pivot_level - ty.level;\n      match ty.desc with\n      | Tvar _ -> ret := TypeSet.add ty !ret\n      | Tvariant row -> (\n        let row = row_repr row in\n        iter_row loop row;\n        if not (static_row row) then\n          match row.row_more.desc with\n          | Tvar _ when param -> ret := TypeSet.add ty !ret\n          | _ -> loop row.row_more)\n      (* XXX: What about Tobject ? *)\n      | _ -> iter_type_expr loop ty)\n  in\n  loop ty;\n  unmark_type ty;\n  !ret\n\nlet newgenconstr path tyl = newgenty (Tconstr (path, tyl, ref Mnil))\n\nlet constructor_existentials cd_args cd_res =\n  let tyl =\n    match cd_args with\n    | Cstr_tuple l -> l\n    | Cstr_record l -> List.map (fun l -> l.ld_type) l\n  in\n  let existentials =\n    match cd_res with\n    | None -> []\n    | Some type_ret ->\n      let arg_vars_set = free_vars (newgenty (Ttuple tyl)) in\n      let res_vars = free_vars type_ret in\n      TypeSet.elements (TypeSet.diff arg_vars_set res_vars)\n  in\n  (tyl, existentials)\n\nlet constructor_args priv cd_args cd_res path rep =\n  let tyl, existentials = constructor_existentials cd_args cd_res in\n  match cd_args with\n  | Cstr_tuple l -> (existentials, l, None)\n  | Cstr_record lbls ->\n    let arg_vars_set = free_vars ~param:true (newgenty (Ttuple tyl)) in\n    let type_params = TypeSet.elements arg_vars_set in\n    let type_unboxed =\n      match rep with\n      | Record_unboxed _ -> unboxed_true_default_false\n      | _ -> unboxed_false_default_false\n    in\n    let tdecl =\n      {\n        type_params;\n        type_arity = List.length type_params;\n        type_kind = Type_record (lbls, rep);\n        type_private = priv;\n        type_manifest = None;\n        type_variance = List.map (fun _ -> Variance.full) type_params;\n        type_newtype_level = None;\n        type_loc = Location.none;\n        type_attributes = [];\n        type_immediate = false;\n        type_unboxed;\n      }\n    in\n    (existentials, [newgenconstr path type_params], Some tdecl)\n\nlet internal_optional = \"internal.optional\"\n\nlet optional_shape : Parsetree.attribute =\n  ({txt = internal_optional; loc = Location.none}, Parsetree.PStr [])\n\nlet constructor_has_optional_shape\n    ({cstr_attributes = attrs} : constructor_description) =\n  List.exists (fun (x, _) -> x.txt = internal_optional) attrs\n\nlet constructor_descrs ty_path decl cstrs =\n  let ty_res = newgenconstr ty_path decl.type_params in\n  let num_consts = ref 0 and num_nonconsts = ref 0 and num_normal = ref 0 in\n  List.iter\n    (fun {cd_args; cd_res; _} ->\n      if cd_args = Cstr_tuple [] then incr num_consts else incr num_nonconsts;\n      if cd_res = None then incr num_normal)\n    cstrs;\n  let has_optional attrs =\n    Ext_list.exists attrs (fun ({txt}, _) -> txt = \"res.optional\")\n  in\n  let rec describe_constructors idx_const idx_nonconst = function\n    | [] -> []\n    | {cd_id; cd_args; cd_res; cd_loc; cd_attributes} :: rem ->\n      let ty_res =\n        match cd_res with\n        | Some ty_res' -> ty_res'\n        | None -> ty_res\n      in\n      let tag, descr_rem =\n        match cd_args with\n        | _ when decl.type_unboxed.unboxed ->\n          assert (rem = []);\n          (Cstr_unboxed, [])\n        | Cstr_tuple [] ->\n          ( Cstr_constant idx_const,\n            describe_constructors (idx_const + 1) idx_nonconst rem )\n        | _ ->\n          ( Cstr_block idx_nonconst,\n            describe_constructors idx_const (idx_nonconst + 1) rem )\n      in\n      let cstr_name = Ident.name cd_id in\n      let optional_labels =\n        match cd_args with\n        | Cstr_tuple _ -> []\n        | Cstr_record lbls ->\n          Ext_list.filter_map lbls (fun {ld_id; ld_attributes; _} ->\n              if has_optional ld_attributes then Some ld_id.name else None)\n      in\n      let existentials, cstr_args, cstr_inlined =\n        let representation =\n          if decl.type_unboxed.unboxed then Record_unboxed true\n          else\n            Record_inlined\n              {\n                tag = idx_nonconst;\n                name = cstr_name;\n                num_nonconsts = !num_nonconsts;\n                optional_labels;\n                attrs = cd_attributes;\n              }\n        in\n        constructor_args decl.type_private cd_args cd_res\n          (Path.Pdot (ty_path, cstr_name, Path.nopos))\n          representation\n      in\n      let cstr =\n        {\n          cstr_name;\n          cstr_res = ty_res;\n          cstr_existentials = existentials;\n          cstr_args;\n          cstr_arity = List.length cstr_args;\n          cstr_tag = tag;\n          cstr_consts = !num_consts;\n          cstr_nonconsts = !num_nonconsts;\n          cstr_normal = !num_normal;\n          cstr_private = decl.type_private;\n          cstr_generalized = cd_res <> None;\n          cstr_loc = cd_loc;\n          cstr_attributes = cd_attributes;\n          cstr_inlined;\n        }\n      in\n      (cd_id, cstr) :: descr_rem\n  in\n  let result = describe_constructors 0 0 cstrs in\n  match result with\n  | [\n      (({Ident.name = \"None\"} as a_id), ({cstr_args = []} as a_descr));\n      (({Ident.name = \"Some\"} as b_id), ({cstr_args = [_]} as b_descr));\n    ]\n  | [\n      (({Ident.name = \"Some\"} as a_id), ({cstr_args = [_]} as a_descr));\n      (({Ident.name = \"None\"} as b_id), ({cstr_args = []} as b_descr));\n    ] ->\n    [\n      ( a_id,\n        {\n          a_descr with\n          cstr_attributes = optional_shape :: a_descr.cstr_attributes;\n        } );\n      ( b_id,\n        {\n          b_descr with\n          cstr_attributes = optional_shape :: b_descr.cstr_attributes;\n        } );\n    ]\n  | _ -> result\n\nlet extension_descr path_ext ext =\n  let ty_res =\n    match ext.ext_ret_type with\n    | Some type_ret -> type_ret\n    | None -> newgenconstr ext.ext_type_path ext.ext_type_params\n  in\n  let existentials, cstr_args, cstr_inlined =\n    constructor_args ext.ext_private ext.ext_args ext.ext_ret_type path_ext\n      Record_extension\n  in\n  {\n    cstr_name = Path.last path_ext;\n    cstr_res = ty_res;\n    cstr_existentials = existentials;\n    cstr_args;\n    cstr_arity = List.length cstr_args;\n    cstr_tag = Cstr_extension (path_ext, cstr_args = []);\n    cstr_consts = -1;\n    cstr_nonconsts = -1;\n    cstr_private = ext.ext_private;\n    cstr_normal = -1;\n    cstr_generalized = ext.ext_ret_type <> None;\n    cstr_loc = ext.ext_loc;\n    cstr_attributes = ext.ext_attributes;\n    cstr_inlined;\n  }\n\nlet none = {desc = Ttuple []; level = -1; id = -1}\n(* Clearly ill-formed type *)\n\nlet dummy_label =\n  {\n    lbl_name = \"\";\n    lbl_res = none;\n    lbl_arg = none;\n    lbl_mut = Immutable;\n    lbl_pos = -1;\n    lbl_all = [||];\n    lbl_repres = Record_regular;\n    lbl_private = Public;\n    lbl_loc = Location.none;\n    lbl_attributes = [];\n  }\n\nlet label_descrs ty_res lbls repres priv =\n  let all_labels = Array.make (List.length lbls) dummy_label in\n  let rec describe_labels num = function\n    | [] -> []\n    | l :: rest ->\n      let lbl =\n        {\n          lbl_name = Ident.name l.ld_id;\n          lbl_res = ty_res;\n          lbl_arg = l.ld_type;\n          lbl_mut = l.ld_mutable;\n          lbl_pos = num;\n          lbl_all = all_labels;\n          lbl_repres = repres;\n          lbl_private = priv;\n          lbl_loc = l.ld_loc;\n          lbl_attributes = l.ld_attributes;\n        }\n      in\n      all_labels.(num) <- lbl;\n      (l.ld_id, lbl) :: describe_labels (num + 1) rest\n  in\n  describe_labels 0 lbls\n\nexception Constr_not_found\n\nlet rec find_constr tag num_const num_nonconst = function\n  | [] -> raise Constr_not_found\n  | ({cd_args = Cstr_tuple []; _} as c) :: rem ->\n    if Types.equal_tag tag (Cstr_constant num_const) then c\n    else find_constr tag (num_const + 1) num_nonconst rem\n  | c :: rem ->\n    if Types.equal_tag tag (Cstr_block num_nonconst) || tag = Cstr_unboxed then\n      c\n    else find_constr tag num_const (num_nonconst + 1) rem\n\nlet find_constr_by_tag tag cstrlist = find_constr tag 0 0 cstrlist\n\nlet constructors_of_type ty_path decl =\n  match decl.type_kind with\n  | Type_variant cstrs -> constructor_descrs ty_path decl cstrs\n  | Type_record _ | Type_abstract | Type_open -> []\n\nlet labels_of_type ty_path decl =\n  match decl.type_kind with\n  | Type_record (labels, rep) ->\n    label_descrs\n      (newgenconstr ty_path decl.type_params)\n      labels rep decl.type_private\n  | Type_variant _ | Type_abstract | Type_open -> []\n\n(* Set row_name in Env, cf. GPR#1204/1329 *)\nlet set_row_name decl path =\n  match decl.type_manifest with\n  | None -> ()\n  | Some ty -> (\n    let ty = repr ty in\n    match ty.desc with\n    | Tvariant row when static_row row ->\n      let row =\n        {(row_repr row) with row_name = Some (path, decl.type_params)}\n      in\n      ty.desc <- Tvariant row\n    | _ -> ())\n"
  },
  {
    "path": "analysis/vendor/ml/datarepr.mli",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(* Compute constructor and label descriptions from type declarations,\n   determining their representation. *)\n\nopen Types\n\nval constructor_has_optional_shape : Types.constructor_description -> bool\n\nval extension_descr : Path.t -> extension_constructor -> constructor_description\n\nval labels_of_type :\n  Path.t -> type_declaration -> (Ident.t * label_description) list\nval constructors_of_type :\n  Path.t -> type_declaration -> (Ident.t * constructor_description) list\n\nexception Constr_not_found\n\nval find_constr_by_tag :\n  constructor_tag -> constructor_declaration list -> constructor_declaration\n\nval constructor_existentials :\n  constructor_arguments -> type_expr option -> type_expr list * type_expr list\n(** Takes [cd_args] and [cd_res] from a [constructor_declaration] and returns:\n    - the types of the constructor's arguments\n    - the existential variables introduced by the constructor *)\n\n(* Set the polymorphic variant row_name field *)\nval set_row_name : type_declaration -> Path.t -> unit\n"
  },
  {
    "path": "analysis/vendor/ml/delayed_checks.ml",
    "content": "let delayed_checks = ref []\nlet reset_delayed_checks () = delayed_checks := []\nlet add_delayed_check f =\n  delayed_checks := (f, Warnings.backup ()) :: !delayed_checks\n\nlet force_delayed_checks () =\n  (* checks may change type levels *)\n  let snap = Btype.snapshot () in\n  let w_old = Warnings.backup () in\n  List.iter\n    (fun (f, w) ->\n      Warnings.restore w;\n      f ())\n    (List.rev !delayed_checks);\n  Warnings.restore w_old;\n  reset_delayed_checks ();\n  Btype.backtrack snap\n"
  },
  {
    "path": "analysis/vendor/ml/delayed_checks.mli",
    "content": "val reset_delayed_checks : unit -> unit\nval add_delayed_check : (unit -> unit) -> unit\nval force_delayed_checks : unit -> unit\n"
  },
  {
    "path": "analysis/vendor/ml/depend.ml",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1999 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\nopen Asttypes\nopen Location\nopen Longident\nopen Parsetree\n\nlet pp_deps = ref []\n\nmodule StringSet = Set.Make (struct\n  type t = string\n  let compare = compare\nend)\nmodule StringMap = Map.Make (String)\n\n(* Module resolution map *)\n(* Node (set of imports for this path, map for submodules) *)\ntype map_tree = Node of StringSet.t * bound_map\nand bound_map = map_tree StringMap.t\nlet bound = Node (StringSet.empty, StringMap.empty)\n\n(*let get_free (Node (s, _m)) = s*)\nlet get_map (Node (_s, m)) = m\nlet make_leaf s = Node (StringSet.singleton s, StringMap.empty)\nlet make_node m = Node (StringSet.empty, m)\nlet rec weaken_map s (Node (s0, m0)) =\n  Node (StringSet.union s s0, StringMap.map (weaken_map s) m0)\nlet rec collect_free (Node (s, m)) =\n  StringMap.fold (fun _ n -> StringSet.union (collect_free n)) m s\n\n(* Returns the imports required to access the structure at path p *)\n(* Only raises Not_found if the head of p is not in the toplevel map *)\nlet rec lookup_free p m =\n  match p with\n  | [] -> raise Not_found\n  | s :: p -> (\n    let (Node (f, m')) = StringMap.find s m in\n    try lookup_free p m' with Not_found -> f)\n\n(* Returns the node corresponding to the structure at path p *)\nlet rec lookup_map lid m =\n  match lid with\n  | Lident s -> StringMap.find s m\n  | Ldot (l, s) -> StringMap.find s (get_map (lookup_map l m))\n  | Lapply _ -> raise Not_found\n\n(* Collect free module identifiers in the a.s.t. *)\n\nlet free_structure_names = ref StringSet.empty\n\nlet add_names s =\n  free_structure_names := StringSet.union s !free_structure_names\n\nlet rec add_path bv ?(p = []) = function\n  | Lident s ->\n    let free =\n      try lookup_free (s :: p) bv with Not_found -> StringSet.singleton s\n    in\n    (*StringSet.iter (fun s -> Printf.eprintf \"%s \" s) free;\n      prerr_endline \"\";*)\n    add_names free\n  | Ldot (l, s) -> add_path bv ~p:(s :: p) l\n  | Lapply (l1, l2) ->\n    add_path bv l1;\n    add_path bv l2\n\nlet open_module bv lid =\n  match lookup_map lid bv with\n  | Node (s, m) ->\n    add_names s;\n    StringMap.fold StringMap.add m bv\n  | exception Not_found ->\n    add_path bv lid;\n    bv\n\nlet add_parent bv lid =\n  match lid.txt with\n  | Ldot (l, _s) -> add_path bv l\n  | _ -> ()\n\nlet add = add_parent\n\nlet addmodule bv lid = add_path bv lid.txt\n\nlet handle_extension ext =\n  match (fst ext).txt with\n  | \"error\" | \"ocaml.error\" ->\n    raise (Location.Error (Builtin_attributes.error_of_extension ext))\n  | _ -> ()\n\nlet rec add_type bv ty =\n  match ty.ptyp_desc with\n  | Ptyp_any -> ()\n  | Ptyp_var _ -> ()\n  | Ptyp_arrow (_, t1, t2) ->\n    add_type bv t1;\n    add_type bv t2\n  | Ptyp_tuple tl -> List.iter (add_type bv) tl\n  | Ptyp_constr (c, tl) ->\n    add bv c;\n    List.iter (add_type bv) tl\n  | Ptyp_object (fl, _) ->\n    List.iter\n      (function\n        | Otag (_, _, t) -> add_type bv t\n        | Oinherit t -> add_type bv t)\n      fl\n  | Ptyp_class (c, tl) ->\n    add bv c;\n    List.iter (add_type bv) tl\n  | Ptyp_alias (t, _) -> add_type bv t\n  | Ptyp_variant (fl, _, _) ->\n    List.iter\n      (function\n        | Rtag (_, _, _, stl) -> List.iter (add_type bv) stl\n        | Rinherit sty -> add_type bv sty)\n      fl\n  | Ptyp_poly (_, t) -> add_type bv t\n  | Ptyp_package pt -> add_package_type bv pt\n  | Ptyp_extension e -> handle_extension e\n\nand add_package_type bv (lid, l) =\n  add bv lid;\n  List.iter (add_type bv) (List.map (fun (_, e) -> e) l)\n\nlet add_opt add_fn bv = function\n  | None -> ()\n  | Some x -> add_fn bv x\n\nlet add_constructor_arguments bv = function\n  | Pcstr_tuple l -> List.iter (add_type bv) l\n  | Pcstr_record l -> List.iter (fun l -> add_type bv l.pld_type) l\n\nlet add_constructor_decl bv pcd =\n  add_constructor_arguments bv pcd.pcd_args;\n  Misc.may (add_type bv) pcd.pcd_res\n\nlet add_type_declaration bv td =\n  List.iter\n    (fun (ty1, ty2, _) ->\n      add_type bv ty1;\n      add_type bv ty2)\n    td.ptype_cstrs;\n  add_opt add_type bv td.ptype_manifest;\n  let add_tkind = function\n    | Ptype_abstract -> ()\n    | Ptype_variant cstrs -> List.iter (add_constructor_decl bv) cstrs\n    | Ptype_record lbls -> List.iter (fun pld -> add_type bv pld.pld_type) lbls\n    | Ptype_open -> ()\n  in\n  add_tkind td.ptype_kind\n\nlet add_extension_constructor bv ext =\n  match ext.pext_kind with\n  | Pext_decl (args, rty) ->\n    add_constructor_arguments bv args;\n    Misc.may (add_type bv) rty\n  | Pext_rebind lid -> add bv lid\n\nlet add_type_extension bv te =\n  add bv te.ptyext_path;\n  List.iter (add_extension_constructor bv) te.ptyext_constructors\n\nlet rec add_class_type bv cty =\n  match cty.pcty_desc with\n  | Pcty_constr (l, tyl) ->\n    add bv l;\n    List.iter (add_type bv) tyl\n  | Pcty_signature {pcsig_self = ty; pcsig_fields = fieldl} ->\n    add_type bv ty;\n    List.iter (add_class_type_field bv) fieldl\n  | Pcty_arrow (_, ty1, cty2) ->\n    add_type bv ty1;\n    add_class_type bv cty2\n  | Pcty_extension e -> handle_extension e\n  | Pcty_open (_ovf, m, e) ->\n    let bv = open_module bv m.txt in\n    add_class_type bv e\n\nand add_class_type_field bv pctf =\n  match pctf.pctf_desc with\n  | Pctf_inherit cty -> add_class_type bv cty\n  | Pctf_val (_, _, _, ty) -> add_type bv ty\n  | Pctf_method (_, _, _, ty) -> add_type bv ty\n  | Pctf_constraint (ty1, ty2) ->\n    add_type bv ty1;\n    add_type bv ty2\n  | Pctf_attribute _ -> ()\n  | Pctf_extension e -> handle_extension e\n\nlet add_class_description bv infos = add_class_type bv infos.pci_expr\n\nlet add_class_type_declaration = add_class_description\n\nlet pattern_bv = ref StringMap.empty\n\nlet rec add_pattern bv pat =\n  match pat.ppat_desc with\n  | Ppat_any -> ()\n  | Ppat_var _ -> ()\n  | Ppat_alias (p, _) -> add_pattern bv p\n  | Ppat_interval _ | Ppat_constant _ -> ()\n  | Ppat_tuple pl -> List.iter (add_pattern bv) pl\n  | Ppat_construct (c, op) ->\n    add bv c;\n    add_opt add_pattern bv op\n  | Ppat_record (pl, _) ->\n    List.iter\n      (fun (lbl, p) ->\n        add bv lbl;\n        add_pattern bv p)\n      pl\n  | Ppat_array pl -> List.iter (add_pattern bv) pl\n  | Ppat_or (p1, p2) ->\n    add_pattern bv p1;\n    add_pattern bv p2\n  | Ppat_constraint (p, ty) ->\n    add_pattern bv p;\n    add_type bv ty\n  | Ppat_variant (_, op) -> add_opt add_pattern bv op\n  | Ppat_type li -> add bv li\n  | Ppat_lazy p -> add_pattern bv p\n  | Ppat_unpack id -> pattern_bv := StringMap.add id.txt bound !pattern_bv\n  | Ppat_open (m, p) ->\n    let bv = open_module bv m.txt in\n    add_pattern bv p\n  | Ppat_exception p -> add_pattern bv p\n  | Ppat_extension e -> handle_extension e\n\nlet add_pattern bv pat =\n  pattern_bv := bv;\n  add_pattern bv pat;\n  !pattern_bv\n\nlet rec add_expr bv exp =\n  match exp.pexp_desc with\n  | Pexp_ident l -> add bv l\n  | Pexp_constant _ -> ()\n  | Pexp_let (rf, pel, e) ->\n    let bv = add_bindings rf bv pel in\n    add_expr bv e\n  | Pexp_fun (_, opte, p, e) ->\n    add_opt add_expr bv opte;\n    add_expr (add_pattern bv p) e\n  | Pexp_function pel -> add_cases bv pel\n  | Pexp_apply (e, el) ->\n    add_expr bv e;\n    List.iter (fun (_, e) -> add_expr bv e) el\n  | Pexp_match (e, pel) ->\n    add_expr bv e;\n    add_cases bv pel\n  | Pexp_try (e, pel) ->\n    add_expr bv e;\n    add_cases bv pel\n  | Pexp_tuple el -> List.iter (add_expr bv) el\n  | Pexp_construct (c, opte) ->\n    add bv c;\n    add_opt add_expr bv opte\n  | Pexp_variant (_, opte) -> add_opt add_expr bv opte\n  | Pexp_record (lblel, opte) ->\n    List.iter\n      (fun (lbl, e) ->\n        add bv lbl;\n        add_expr bv e)\n      lblel;\n    add_opt add_expr bv opte\n  | Pexp_field (e, fld) ->\n    add_expr bv e;\n    add bv fld\n  | Pexp_setfield (e1, fld, e2) ->\n    add_expr bv e1;\n    add bv fld;\n    add_expr bv e2\n  | Pexp_array el -> List.iter (add_expr bv) el\n  | Pexp_ifthenelse (e1, e2, opte3) ->\n    add_expr bv e1;\n    add_expr bv e2;\n    add_opt add_expr bv opte3\n  | Pexp_sequence (e1, e2) ->\n    add_expr bv e1;\n    add_expr bv e2\n  | Pexp_while (e1, e2) ->\n    add_expr bv e1;\n    add_expr bv e2\n  | Pexp_for (_, e1, e2, _, e3) ->\n    add_expr bv e1;\n    add_expr bv e2;\n    add_expr bv e3\n  | Pexp_coerce (e1, oty2, ty3) ->\n    add_expr bv e1;\n    add_opt add_type bv oty2;\n    add_type bv ty3\n  | Pexp_constraint (e1, ty2) ->\n    add_expr bv e1;\n    add_type bv ty2\n  | Pexp_send (e, _m) -> add_expr bv e\n  | Pexp_new li -> add bv li\n  | Pexp_setinstvar (_v, e) -> add_expr bv e\n  | Pexp_override sel -> List.iter (fun (_s, e) -> add_expr bv e) sel\n  | Pexp_letmodule (id, m, e) ->\n    let b = add_module_binding bv m in\n    add_expr (StringMap.add id.txt b bv) e\n  | Pexp_letexception (_, e) -> add_expr bv e\n  | Pexp_assert e -> add_expr bv e\n  | Pexp_lazy e -> add_expr bv e\n  | Pexp_poly (e, t) ->\n    add_expr bv e;\n    add_opt add_type bv t\n  | Pexp_object {pcstr_self = pat; pcstr_fields = fieldl} ->\n    let bv = add_pattern bv pat in\n    List.iter (add_class_field bv) fieldl\n  | Pexp_newtype (_, e) -> add_expr bv e\n  | Pexp_pack m -> add_module bv m\n  | Pexp_open (_ovf, m, e) ->\n    let bv = open_module bv m.txt in\n    add_expr bv e\n  | Pexp_extension\n      (( {txt = \"ocaml.extension_constructor\" | \"extension_constructor\"; _},\n         PStr [item] ) as e) -> (\n    match item.pstr_desc with\n    | Pstr_eval ({pexp_desc = Pexp_construct (c, None)}, _) -> add bv c\n    | _ -> handle_extension e)\n  | Pexp_extension e -> handle_extension e\n  | Pexp_unreachable -> ()\n\nand add_cases bv cases = List.iter (add_case bv) cases\n\nand add_case bv {pc_lhs; pc_guard; pc_rhs} =\n  let bv = add_pattern bv pc_lhs in\n  add_opt add_expr bv pc_guard;\n  add_expr bv pc_rhs\n\nand add_bindings recf bv pel =\n  let bv' = List.fold_left (fun bv x -> add_pattern bv x.pvb_pat) bv pel in\n  let bv = if recf = Recursive then bv' else bv in\n  List.iter (fun x -> add_expr bv x.pvb_expr) pel;\n  bv'\n\nand add_modtype bv mty =\n  match mty.pmty_desc with\n  | Pmty_ident l -> add bv l\n  | Pmty_alias l -> addmodule bv l\n  | Pmty_signature s -> add_signature bv s\n  | Pmty_functor (id, mty1, mty2) ->\n    Misc.may (add_modtype bv) mty1;\n    add_modtype (StringMap.add id.txt bound bv) mty2\n  | Pmty_with (mty, cstrl) ->\n    add_modtype bv mty;\n    List.iter\n      (function\n        | Pwith_type (_, td) -> add_type_declaration bv td\n        | Pwith_module (_, lid) -> addmodule bv lid\n        | Pwith_typesubst (_, td) -> add_type_declaration bv td\n        | Pwith_modsubst (_, lid) -> addmodule bv lid)\n      cstrl\n  | Pmty_typeof m -> add_module bv m\n  | Pmty_extension e -> handle_extension e\n\nand add_module_alias bv l =\n  try\n    add_parent bv l;\n    lookup_map l.txt bv\n  with Not_found -> (\n    match l.txt with\n    | Lident s -> make_leaf s\n    | _ ->\n      addmodule bv l;\n      bound (* cannot delay *))\n\nand add_modtype_binding bv mty =\n  if not !Clflags.transparent_modules then add_modtype bv mty;\n  match mty.pmty_desc with\n  | Pmty_alias l -> add_module_alias bv l\n  | Pmty_signature s -> make_node (add_signature_binding bv s)\n  | Pmty_typeof modl -> add_module_binding bv modl\n  | _ ->\n    if !Clflags.transparent_modules then add_modtype bv mty;\n    bound\n\nand add_signature bv sg = ignore (add_signature_binding bv sg)\n\nand add_signature_binding bv sg =\n  snd (List.fold_left add_sig_item (bv, StringMap.empty) sg)\n\nand add_sig_item (bv, m) item =\n  match item.psig_desc with\n  | Psig_value vd ->\n    add_type bv vd.pval_type;\n    (bv, m)\n  | Psig_type (_, dcls) ->\n    List.iter (add_type_declaration bv) dcls;\n    (bv, m)\n  | Psig_typext te ->\n    add_type_extension bv te;\n    (bv, m)\n  | Psig_exception pext ->\n    add_extension_constructor bv pext;\n    (bv, m)\n  | Psig_module pmd ->\n    let m' = add_modtype_binding bv pmd.pmd_type in\n    let add = StringMap.add pmd.pmd_name.txt m' in\n    (add bv, add m)\n  | Psig_recmodule decls ->\n    let add =\n      List.fold_right (fun pmd -> StringMap.add pmd.pmd_name.txt bound) decls\n    in\n    let bv' = add bv and m' = add m in\n    List.iter (fun pmd -> add_modtype bv' pmd.pmd_type) decls;\n    (bv', m')\n  | Psig_modtype x ->\n    (match x.pmtd_type with\n    | None -> ()\n    | Some mty -> add_modtype bv mty);\n    (bv, m)\n  | Psig_open od -> (open_module bv od.popen_lid.txt, m)\n  | Psig_include incl ->\n    let (Node (s, m')) = add_modtype_binding bv incl.pincl_mod in\n    add_names s;\n    let add = StringMap.fold StringMap.add m' in\n    (add bv, add m)\n  | Psig_class () -> (bv, m)\n  | Psig_class_type cdtl ->\n    List.iter (add_class_type_declaration bv) cdtl;\n    (bv, m)\n  | Psig_attribute _ -> (bv, m)\n  | Psig_extension (e, _) ->\n    handle_extension e;\n    (bv, m)\n\nand add_module_binding bv modl =\n  if not !Clflags.transparent_modules then add_module bv modl;\n  match modl.pmod_desc with\n  | Pmod_ident l -> (\n    try\n      add_parent bv l;\n      lookup_map l.txt bv\n    with Not_found -> (\n      match l.txt with\n      | Lident s -> make_leaf s\n      | _ ->\n        addmodule bv l;\n        bound))\n  | Pmod_structure s -> make_node (snd (add_structure_binding bv s))\n  | _ ->\n    if !Clflags.transparent_modules then add_module bv modl;\n    bound\n\nand add_module bv modl =\n  match modl.pmod_desc with\n  | Pmod_ident l -> addmodule bv l\n  | Pmod_structure s -> ignore (add_structure bv s)\n  | Pmod_functor (id, mty, modl) ->\n    Misc.may (add_modtype bv) mty;\n    add_module (StringMap.add id.txt bound bv) modl\n  | Pmod_apply (mod1, mod2) ->\n    add_module bv mod1;\n    add_module bv mod2\n  | Pmod_constraint (modl, mty) ->\n    add_module bv modl;\n    add_modtype bv mty\n  | Pmod_unpack e -> add_expr bv e\n  | Pmod_extension e -> handle_extension e\n\nand add_structure bv item_list =\n  let bv, m = add_structure_binding bv item_list in\n  add_names (collect_free (make_node m));\n  bv\n\nand add_structure_binding bv item_list =\n  List.fold_left add_struct_item (bv, StringMap.empty) item_list\n\nand add_struct_item (bv, m) item : _ StringMap.t * _ StringMap.t =\n  match item.pstr_desc with\n  | Pstr_eval (e, _attrs) ->\n    add_expr bv e;\n    (bv, m)\n  | Pstr_value (rf, pel) ->\n    let bv = add_bindings rf bv pel in\n    (bv, m)\n  | Pstr_primitive vd ->\n    add_type bv vd.pval_type;\n    (bv, m)\n  | Pstr_type (_, dcls) ->\n    List.iter (add_type_declaration bv) dcls;\n    (bv, m)\n  | Pstr_typext te ->\n    add_type_extension bv te;\n    (bv, m)\n  | Pstr_exception pext ->\n    add_extension_constructor bv pext;\n    (bv, m)\n  | Pstr_module x ->\n    let b = add_module_binding bv x.pmb_expr in\n    let add = StringMap.add x.pmb_name.txt b in\n    (add bv, add m)\n  | Pstr_recmodule bindings ->\n    let add =\n      List.fold_right (fun x -> StringMap.add x.pmb_name.txt bound) bindings\n    in\n    let bv' = add bv and m = add m in\n    List.iter (fun x -> add_module bv' x.pmb_expr) bindings;\n    (bv', m)\n  | Pstr_modtype x ->\n    (match x.pmtd_type with\n    | None -> ()\n    | Some mty -> add_modtype bv mty);\n    (bv, m)\n  | Pstr_open od -> (open_module bv od.popen_lid.txt, m)\n  | Pstr_class () -> (bv, m)\n  | Pstr_class_type cdtl ->\n    List.iter (add_class_type_declaration bv) cdtl;\n    (bv, m)\n  | Pstr_include incl ->\n    let (Node (s, m')) = add_module_binding bv incl.pincl_mod in\n    add_names s;\n    let add = StringMap.fold StringMap.add m' in\n    (add bv, add m)\n  | Pstr_attribute _ -> (bv, m)\n  | Pstr_extension (e, _) ->\n    handle_extension e;\n    (bv, m)\n\nand add_implementation bv l =\n  if !Clflags.transparent_modules then ignore (add_structure_binding bv l)\n  else ignore (add_structure bv l)\n\nand add_implementation_binding bv l = snd (add_structure_binding bv l)\n\nand add_class_field bv pcf =\n  match pcf.pcf_desc with\n  | Pcf_inherit () -> ()\n  | Pcf_val (_, _, Cfk_concrete (_, e)) | Pcf_method (_, _, Cfk_concrete (_, e))\n    ->\n    add_expr bv e\n  | Pcf_val (_, _, Cfk_virtual ty) | Pcf_method (_, _, Cfk_virtual ty) ->\n    add_type bv ty\n  | Pcf_constraint (ty1, ty2) ->\n    add_type bv ty1;\n    add_type bv ty2\n  | Pcf_initializer e -> add_expr bv e\n  | Pcf_attribute _ -> ()\n  | Pcf_extension e -> handle_extension e\n"
  },
  {
    "path": "analysis/vendor/ml/depend.mli",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1999 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(** Module dependencies. *)\n\nmodule StringSet : Set.S with type elt = string\nmodule StringMap : Map.S with type key = string\n\ntype map_tree = Node of StringSet.t * bound_map\nand bound_map = map_tree StringMap.t\nval make_leaf : string -> map_tree\nval make_node : bound_map -> map_tree\nval weaken_map : StringSet.t -> map_tree -> map_tree\n\nval free_structure_names : StringSet.t ref\n\n(* dependencies found by preprocessing tools (plugins) *)\nval pp_deps : string list ref\n\nval open_module : bound_map -> Longident.t -> bound_map\n\nval add_signature : bound_map -> Parsetree.signature -> unit\n\nval add_implementation : bound_map -> Parsetree.structure -> unit\n\nval add_implementation_binding : bound_map -> Parsetree.structure -> bound_map\nval add_signature_binding : bound_map -> Parsetree.signature -> bound_map\n"
  },
  {
    "path": "analysis/vendor/ml/docstrings.ml",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*                               Leo White                                *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\nopen Location\n\n(* Docstrings *)\n\n(* A docstring is \"attached\" if it has been inserted in the AST. This\n   is used for generating unexpected docstring warnings. *)\ntype ds_attached =\n  | Unattached (* Not yet attached anything.*)\n  | Info (* Attached to a field or constructor. *)\n  | Docs (* Attached to an item or as floating text. *)\n\n(* A docstring is \"associated\" with an item if there are no blank lines between\n   them. This is used for generating docstring ambiguity warnings. *)\ntype ds_associated =\n  | Zero (* Not associated with an item *)\n  | One (* Associated with one item *)\n  | Many (* Associated with multiple items (ambiguity) *)\n\ntype docstring = {\n  ds_body: string;\n  ds_loc: Location.t;\n  mutable ds_attached: ds_attached;\n  mutable ds_associated: ds_associated;\n}\n\n(* List of docstrings *)\n\nlet docstrings : docstring list ref = ref []\n\n(* Warn for unused and ambiguous docstrings *)\n\nlet warn_bad_docstrings () =\n  if Warnings.is_active (Warnings.Bad_docstring true) then\n    List.iter\n      (fun ds ->\n        match ds.ds_attached with\n        | Info -> ()\n        | Unattached -> prerr_warning ds.ds_loc (Warnings.Bad_docstring true)\n        | Docs -> (\n          match ds.ds_associated with\n          | Zero | One -> ()\n          | Many -> prerr_warning ds.ds_loc (Warnings.Bad_docstring false)))\n      (List.rev !docstrings)\n\n(* Docstring constructors and destructors *)\n\nlet docstring body loc =\n  let ds =\n    {\n      ds_body = body;\n      ds_loc = loc;\n      ds_attached = Unattached;\n      ds_associated = Zero;\n    }\n  in\n  ds\n\nlet register ds = docstrings := ds :: !docstrings\n\nlet docstring_body ds = ds.ds_body\n\nlet docstring_loc ds = ds.ds_loc\n\n(* Docstrings attached to items *)\n\ntype docs = {docs_pre: docstring option; docs_post: docstring option}\n\nlet empty_docs = {docs_pre = None; docs_post = None}\n\nlet doc_loc = {txt = \"ocaml.doc\"; loc = Location.none}\n\nlet docs_attr ds =\n  let open Parsetree in\n  let exp =\n    {\n      pexp_desc = Pexp_constant (Pconst_string (ds.ds_body, None));\n      pexp_loc = ds.ds_loc;\n      pexp_attributes = [];\n    }\n  in\n  let item = {pstr_desc = Pstr_eval (exp, []); pstr_loc = exp.pexp_loc} in\n  (doc_loc, PStr [item])\n\nlet add_docs_attrs docs attrs =\n  let attrs =\n    match docs.docs_pre with\n    | None | Some {ds_body = \"\"; _} -> attrs\n    | Some ds -> docs_attr ds :: attrs\n  in\n  let attrs =\n    match docs.docs_post with\n    | None | Some {ds_body = \"\"; _} -> attrs\n    | Some ds -> attrs @ [docs_attr ds]\n  in\n  attrs\n\n(* Docstrings attached to constructors or fields *)\n\ntype info = docstring option\n\nlet empty_info = None\n\nlet info_attr = docs_attr\n\nlet add_info_attrs info attrs =\n  match info with\n  | None | Some {ds_body = \"\"; _} -> attrs\n  | Some ds -> attrs @ [info_attr ds]\n\n(* Docstrings not attached to a specific item *)\n\ntype text = docstring list\n\nlet empty_text = []\nlet empty_text_lazy = lazy []\n\nlet text_loc = {txt = \"ocaml.text\"; loc = Location.none}\n\nlet text_attr ds =\n  let open Parsetree in\n  let exp =\n    {\n      pexp_desc = Pexp_constant (Pconst_string (ds.ds_body, None));\n      pexp_loc = ds.ds_loc;\n      pexp_attributes = [];\n    }\n  in\n  let item = {pstr_desc = Pstr_eval (exp, []); pstr_loc = exp.pexp_loc} in\n  (text_loc, PStr [item])\n\nlet add_text_attrs dsl attrs =\n  let fdsl =\n    Ext_list.filter dsl (function\n      | {ds_body = \"\"} -> false\n      | _ -> true)\n  in\n  List.map text_attr fdsl @ attrs\n\n(* Find the first non-info docstring in a list, attach it and return it *)\nlet get_docstring ~info dsl =\n  let rec loop = function\n    | [] -> None\n    | {ds_attached = Info; _} :: rest -> loop rest\n    | ds :: _ ->\n      ds.ds_attached <- (if info then Info else Docs);\n      Some ds\n  in\n  loop dsl\n\n(* Find all the non-info docstrings in a list, attach them and return them *)\nlet get_docstrings dsl =\n  let rec loop acc = function\n    | [] -> List.rev acc\n    | {ds_attached = Info; _} :: rest -> loop acc rest\n    | ds :: rest ->\n      ds.ds_attached <- Docs;\n      loop (ds :: acc) rest\n  in\n  loop [] dsl\n\n(* \"Associate\" all the docstrings in a list *)\nlet associate_docstrings dsl =\n  List.iter\n    (fun ds ->\n      match ds.ds_associated with\n      | Zero -> ds.ds_associated <- One\n      | One | Many -> ds.ds_associated <- Many)\n    dsl\n\n(* Map from positions to pre docstrings *)\n\nlet pre_table : (Lexing.position, docstring list) Hashtbl.t = Hashtbl.create 50\n\nlet set_pre_docstrings pos dsl = if dsl <> [] then Hashtbl.add pre_table pos dsl\n\nlet get_pre_docs pos =\n  try\n    let dsl = Hashtbl.find pre_table pos in\n    associate_docstrings dsl;\n    get_docstring ~info:false dsl\n  with Not_found -> None\n\nlet mark_pre_docs pos =\n  try\n    let dsl = Hashtbl.find pre_table pos in\n    associate_docstrings dsl\n  with Not_found -> ()\n\n(* Map from positions to post docstrings *)\n\nlet post_table : (Lexing.position, docstring list) Hashtbl.t = Hashtbl.create 50\n\nlet set_post_docstrings pos dsl =\n  if dsl <> [] then Hashtbl.add post_table pos dsl\n\nlet get_post_docs pos =\n  try\n    let dsl = Hashtbl.find post_table pos in\n    associate_docstrings dsl;\n    get_docstring ~info:false dsl\n  with Not_found -> None\n\nlet mark_post_docs pos =\n  try\n    let dsl = Hashtbl.find post_table pos in\n    associate_docstrings dsl\n  with Not_found -> ()\n\nlet get_info pos =\n  try\n    let dsl = Hashtbl.find post_table pos in\n    get_docstring ~info:true dsl\n  with Not_found -> None\n\n(* Map from positions to floating docstrings *)\n\nlet floating_table : (Lexing.position, docstring list) Hashtbl.t =\n  Hashtbl.create 50\n\nlet set_floating_docstrings pos dsl =\n  if dsl <> [] then Hashtbl.add floating_table pos dsl\n\nlet get_text pos =\n  try\n    let dsl = Hashtbl.find floating_table pos in\n    get_docstrings dsl\n  with Not_found -> []\n\n(* Maps from positions to extra docstrings *)\n\nlet pre_extra_table : (Lexing.position, docstring list) Hashtbl.t =\n  Hashtbl.create 50\n\nlet set_pre_extra_docstrings pos dsl =\n  if dsl <> [] then Hashtbl.add pre_extra_table pos dsl\n\nlet get_pre_extra_text pos =\n  try\n    let dsl = Hashtbl.find pre_extra_table pos in\n    get_docstrings dsl\n  with Not_found -> []\n\nlet post_extra_table : (Lexing.position, docstring list) Hashtbl.t =\n  Hashtbl.create 50\n\nlet set_post_extra_docstrings pos dsl =\n  if dsl <> [] then Hashtbl.add post_extra_table pos dsl\n\nlet get_post_extra_text pos =\n  try\n    let dsl = Hashtbl.find post_extra_table pos in\n    get_docstrings dsl\n  with Not_found -> []\n\n(* Docstrings from parser actions *)\n\nlet symbol_docs () =\n  {\n    docs_pre = get_pre_docs (Parsing.symbol_start_pos ());\n    docs_post = get_post_docs (Parsing.symbol_end_pos ());\n  }\n\nlet symbol_docs_lazy () =\n  let p1 = Parsing.symbol_start_pos () in\n  let p2 = Parsing.symbol_end_pos () in\n  lazy {docs_pre = get_pre_docs p1; docs_post = get_post_docs p2}\n\nlet rhs_docs pos1 pos2 =\n  {\n    docs_pre = get_pre_docs (Parsing.rhs_start_pos pos1);\n    docs_post = get_post_docs (Parsing.rhs_end_pos pos2);\n  }\n\nlet rhs_docs_lazy pos1 pos2 =\n  let p1 = Parsing.rhs_start_pos pos1 in\n  let p2 = Parsing.rhs_end_pos pos2 in\n  lazy {docs_pre = get_pre_docs p1; docs_post = get_post_docs p2}\n\nlet mark_symbol_docs () =\n  mark_pre_docs (Parsing.symbol_start_pos ());\n  mark_post_docs (Parsing.symbol_end_pos ())\n\nlet mark_rhs_docs pos1 pos2 =\n  mark_pre_docs (Parsing.rhs_start_pos pos1);\n  mark_post_docs (Parsing.rhs_end_pos pos2)\n\nlet symbol_info () = get_info (Parsing.symbol_end_pos ())\n\nlet rhs_info pos = get_info (Parsing.rhs_end_pos pos)\n\nlet symbol_text () = get_text (Parsing.symbol_start_pos ())\n\nlet symbol_text_lazy () =\n  let pos = Parsing.symbol_start_pos () in\n  lazy (get_text pos)\n\nlet rhs_text pos = get_text (Parsing.rhs_start_pos pos)\n\nlet rhs_text_lazy pos =\n  let pos = Parsing.rhs_start_pos pos in\n  lazy (get_text pos)\n\nlet symbol_pre_extra_text () = get_pre_extra_text (Parsing.symbol_start_pos ())\n\nlet symbol_post_extra_text () = get_post_extra_text (Parsing.symbol_end_pos ())\n\nlet rhs_pre_extra_text pos = get_pre_extra_text (Parsing.rhs_start_pos pos)\n\nlet rhs_post_extra_text pos = get_post_extra_text (Parsing.rhs_end_pos pos)\n\n(* (Re)Initialise all comment state *)\n\nlet init () =\n  docstrings := [];\n  Hashtbl.reset pre_table;\n  Hashtbl.reset post_table;\n  Hashtbl.reset floating_table;\n  Hashtbl.reset pre_extra_table;\n  Hashtbl.reset post_extra_table\n"
  },
  {
    "path": "analysis/vendor/ml/docstrings.mli",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*                               Leo White                                *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(** Documentation comments *)\n\nval init : unit -> unit\n(** (Re)Initialise all docstring state *)\n\nval warn_bad_docstrings : unit -> unit\n(** Emit warnings for unattached and ambiguous docstrings *)\n\n(** {2 Docstrings} *)\n\ntype docstring\n(** Documentation comments *)\n\nval docstring : string -> Location.t -> docstring\n(** Create a docstring *)\n\nval register : docstring -> unit\n(** Register a docstring *)\n\nval docstring_body : docstring -> string\n(** Get the text of a docstring *)\n\nval docstring_loc : docstring -> Location.t\n(** Get the location of a docstring *)\n\n(** {2 Set functions}\n\n    These functions are used by the lexer to associate docstrings to the\n    locations of tokens. *)\n\nval set_pre_docstrings : Lexing.position -> docstring list -> unit\n(** Docstrings immediately preceding a token *)\n\nval set_post_docstrings : Lexing.position -> docstring list -> unit\n(** Docstrings immediately following a token *)\n\nval set_floating_docstrings : Lexing.position -> docstring list -> unit\n(** Docstrings not immediately adjacent to a token *)\n\nval set_pre_extra_docstrings : Lexing.position -> docstring list -> unit\n(** Docstrings immediately following the token which precedes this one *)\n\nval set_post_extra_docstrings : Lexing.position -> docstring list -> unit\n(** Docstrings immediately preceding the token which follows this one *)\n\n(** {2 Items}\n\n    The {!docs} type represents documentation attached to an item. *)\n\ntype docs = {docs_pre: docstring option; docs_post: docstring option}\n\nval empty_docs : docs\n\nval docs_attr : docstring -> Parsetree.attribute\n\nval add_docs_attrs : docs -> Parsetree.attributes -> Parsetree.attributes\n(** Convert item documentation to attributes and add them to an attribute list\n*)\n\nval symbol_docs : unit -> docs\n(** Fetch the item documentation for the current symbol. This also marks this\n    documentation (for ambiguity warnings). *)\n\nval symbol_docs_lazy : unit -> docs Lazy.t\n\nval rhs_docs : int -> int -> docs\n(** Fetch the item documentation for the symbols between two positions. This\n    also marks this documentation (for ambiguity warnings). *)\n\nval rhs_docs_lazy : int -> int -> docs Lazy.t\n\nval mark_symbol_docs : unit -> unit\n(** Mark the item documentation for the current symbol (for ambiguity warnings).\n*)\n\nval mark_rhs_docs : int -> int -> unit\n(** Mark as associated the item documentation for the symbols between two\n    positions (for ambiguity warnings) *)\n\n(** {2 Fields and constructors}\n\n    The {!info} type represents documentation attached to a field or\n    constructor. *)\n\ntype info = docstring option\n\nval empty_info : info\n\nval info_attr : docstring -> Parsetree.attribute\n\nval add_info_attrs : info -> Parsetree.attributes -> Parsetree.attributes\n(** Convert field info to attributes and add them to an attribute list *)\n\nval symbol_info : unit -> info\n(** Fetch the field info for the current symbol. *)\n\nval rhs_info : int -> info\n(** Fetch the field info following the symbol at a given position. *)\n\n(** {2 Unattached comments}\n\n    The {!text} type represents documentation which is not attached to anything.\n*)\n\ntype text = docstring list\n\nval empty_text : text\nval empty_text_lazy : text Lazy.t\n\nval text_attr : docstring -> Parsetree.attribute\n\nval add_text_attrs : text -> Parsetree.attributes -> Parsetree.attributes\n(** Convert text to attributes and add them to an attribute list *)\n\nval symbol_text : unit -> text\n(** Fetch the text preceding the current symbol. *)\n\nval symbol_text_lazy : unit -> text Lazy.t\n\nval rhs_text : int -> text\n(** Fetch the text preceding the symbol at the given position. *)\n\nval rhs_text_lazy : int -> text Lazy.t\n\n(** {2 Extra text}\n\n    There may be additional text attached to the delimiters of a block (e.g.\n    [struct] and [end]). This is fetched by the following functions, which are\n    applied to the contents of the block rather than the delimiters. *)\n\nval symbol_pre_extra_text : unit -> text\n(** Fetch additional text preceding the current symbol *)\n\nval symbol_post_extra_text : unit -> text\n(** Fetch additional text following the current symbol *)\n\nval rhs_pre_extra_text : int -> text\n(** Fetch additional text preceding the symbol at the given position *)\n\nval rhs_post_extra_text : int -> text\n(** Fetch additional text following the symbol at the given position *)\n"
  },
  {
    "path": "analysis/vendor/ml/dune",
    "content": "(library\n (name ml)\n (wrapped false)\n (preprocess\n  (action\n   (run %{bin:cppo} %{env:CPPO_FLAGS=} %{input-file})))\n (flags\n  (:standard -w +a-4-42-40-41-44-45-9-48-67-70))\n (libraries ext js_parser))\n\n(ocamllex lexer)\n"
  },
  {
    "path": "analysis/vendor/ml/env.ml",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(* Environment handling *)\n\nopen Cmi_format\nopen Config\nopen Misc\nopen Asttypes\nopen Longident\nopen Path\nopen Types\nopen Btype\n\nlet value_declarations : (string * Location.t, unit -> unit) Hashtbl.t =\n  Hashtbl.create 16\n(* This table is used to usage of value declarations.  A declaration is\n   identified with its name and location.  The callback attached to a\n   declaration is called whenever the value is used explicitly\n   (lookup_value) or implicitly (inclusion test between signatures,\n   cf Includemod.value_descriptions). *)\n\nlet type_declarations = Hashtbl.create 16\nlet module_declarations = Hashtbl.create 16\n\ntype constructor_usage = Positive | Pattern | Privatize\ntype constructor_usages = {\n  mutable cu_positive: bool;\n  mutable cu_pattern: bool;\n  mutable cu_privatize: bool;\n}\nlet add_constructor_usage cu = function\n  | Positive -> cu.cu_positive <- true\n  | Pattern -> cu.cu_pattern <- true\n  | Privatize -> cu.cu_privatize <- true\nlet constructor_usages () =\n  {cu_positive = false; cu_pattern = false; cu_privatize = false}\n\nlet used_constructors :\n    (string * Location.t * string, constructor_usage -> unit) Hashtbl.t =\n  Hashtbl.create 16\n\nlet prefixed_sg = Hashtbl.create 113\n\ntype error =\n  | Illegal_renaming of string * string * string\n  | Inconsistent_import of string * string * string\n  | Need_recursive_types of string * string\n  | Missing_module of Location.t * Path.t * Path.t\n  | Illegal_value_name of Location.t * string\n\nexception Error of error\n\nlet error err = raise (Error err)\n\nmodule EnvLazy : sig\n  type ('a, 'b) t\n\n  type log\n\n  val force : ('a -> 'b) -> ('a, 'b) t -> 'b\n  val create : 'a -> ('a, 'b) t\n  val get_arg : ('a, 'b) t -> 'a option\n\n  (* [force_logged log f t] is equivalent to [force f t] but if [f] returns [None] then\n     [t] is recorded in [log]. [backtrack log] will then reset all the recorded [t]s back\n     to their original state. *)\n  val log : unit -> log\n  val force_logged : log -> ('a -> 'b option) -> ('a, 'b option) t -> 'b option\n  val backtrack : log -> unit\nend = struct\n  type ('a, 'b) t = ('a, 'b) eval ref\n\n  and ('a, 'b) eval = Done of 'b | Raise of exn | Thunk of 'a\n\n  type undo = Nil | Cons : ('a, 'b) t * 'a * undo -> undo\n\n  type log = undo ref\n\n  let force f x =\n    match !x with\n    | Done x -> x\n    | Raise e -> raise e\n    | Thunk e -> (\n      match f e with\n      | y ->\n        x := Done y;\n        y\n      | exception e ->\n        x := Raise e;\n        raise e)\n\n  let get_arg x =\n    match !x with\n    | Thunk a -> Some a\n    | _ -> None\n\n  let create x = ref (Thunk x)\n\n  let log () = ref Nil\n\n  let force_logged log f x =\n    match !x with\n    | Done x -> x\n    | Raise e -> raise e\n    | Thunk e -> (\n      match f e with\n      | None ->\n        x := Done None;\n        log := Cons (x, e, !log);\n        None\n      | Some _ as y ->\n        x := Done y;\n        y\n      | exception e ->\n        x := Raise e;\n        raise e)\n\n  let backtrack log =\n    let rec loop = function\n      | Nil -> ()\n      | Cons (x, e, rest) ->\n        x := Thunk e;\n        loop rest\n    in\n    loop !log\nend\n\nmodule PathMap = Map.Make (Path)\n\ntype summary =\n  | Env_empty\n  | Env_value of summary * Ident.t * value_description\n  | Env_type of summary * Ident.t * type_declaration\n  | Env_extension of summary * Ident.t * extension_constructor\n  | Env_module of summary * Ident.t * module_declaration\n  | Env_modtype of summary * Ident.t * modtype_declaration\n  | Env_class of unit\n  | Env_cltype of summary * Ident.t * class_type_declaration\n  | Env_open of summary * Path.t\n  | Env_functor_arg of summary * Ident.t\n  | Env_constraints of summary * type_declaration PathMap.t\n  | Env_copy_types of summary * string list\n\nmodule TycompTbl = struct\n  (** This module is used to store components of types (i.e. labels and\n      constructors). We keep a representation of each nested \"open\" and the set\n      of local bindings between each of them. *)\n\n  type 'a t = {\n    current: 'a Ident.tbl;  (** Local bindings since the last open. *)\n    opened: 'a opened option;\n        (** Symbolic representation of the last (innermost) open, if any. *)\n  }\n\n  and 'a opened = {\n    components: (string, 'a list) Tbl.t;\n        (** Components from the opened module. We keep a list of bindings for\n            each name, as in comp_labels and comp_constrs. *)\n    using: (string -> ('a * 'a) option -> unit) option;\n        (** A callback to be applied when a component is used from this \"open\".\n            This is used to detect unused \"opens\". The arguments are used to\n            detect shadowing. *)\n    next: 'a t;  (** The table before opening the module. *)\n  }\n\n  let empty = {current = Ident.empty; opened = None}\n\n  let add id x tbl = {tbl with current = Ident.add id x tbl.current}\n\n  let add_open slot wrap components next =\n    let using =\n      match slot with\n      | None -> None\n      | Some f -> Some (fun s x -> f s (wrap x))\n    in\n    {current = Ident.empty; opened = Some {using; components; next}}\n\n  let rec find_same id tbl =\n    try Ident.find_same id tbl.current\n    with Not_found as exn -> (\n      match tbl.opened with\n      | Some {next; _} -> find_same id next\n      | None -> raise exn)\n\n  let nothing () = ()\n\n  let mk_callback rest name desc = function\n    | None -> nothing\n    | Some f -> (\n      fun () ->\n        match rest with\n        | [] -> f name None\n        | (hidden, _) :: _ -> f name (Some (desc, hidden)))\n\n  let rec find_all name tbl =\n    List.map\n      (fun (_id, desc) -> (desc, nothing))\n      (Ident.find_all name tbl.current)\n    @\n    match tbl.opened with\n    | None -> []\n    | Some {using; next; components} -> (\n      let rest = find_all name next in\n      match Tbl.find_str name components with\n      | exception Not_found -> rest\n      | opened ->\n        List.map (fun desc -> (desc, mk_callback rest name desc using)) opened\n        @ rest)\n\n  let rec fold_name f tbl acc =\n    let acc = Ident.fold_name (fun _id d -> f d) tbl.current acc in\n    match tbl.opened with\n    | Some {using = _; next; components} ->\n      acc\n      |> Tbl.fold (fun _name -> List.fold_right (fun desc -> f desc)) components\n      |> fold_name f next\n    | None -> acc\n\n  let rec local_keys tbl acc =\n    let acc = Ident.fold_all (fun k _ accu -> k :: accu) tbl.current acc in\n    match tbl.opened with\n    | Some o -> local_keys o.next acc\n    | None -> acc\n\n  let diff_keys is_local tbl1 tbl2 =\n    let keys2 = local_keys tbl2 [] in\n    Ext_list.filter keys2 (fun id ->\n        is_local (find_same id tbl2)\n        &&\n        try\n          ignore (find_same id tbl1);\n          false\n        with Not_found -> true)\nend\n\nmodule IdTbl = struct\n  (** This module is used to store all kinds of components except (labels and\n      constructors) in environments. We keep a representation of each nested\n      \"open\" and the set of local bindings between each of them. *)\n\n  type 'a t = {\n    current: 'a Ident.tbl;  (** Local bindings since the last open *)\n    opened: 'a opened option;\n        (** Symbolic representation of the last (innermost) open, if any. *)\n  }\n\n  and 'a opened = {\n    root: Path.t;\n        (** The path of the opened module, to be prefixed in front of its local\n            names to produce a valid path in the current environment. *)\n    components: (string, 'a * int) Tbl.t;\n        (** Components from the opened module. *)\n    using: (string -> ('a * 'a) option -> unit) option;\n        (** A callback to be applied when a component is used from this \"open\".\n            This is used to detect unused \"opens\". The arguments are used to\n            detect shadowing. *)\n    next: 'a t;  (** The table before opening the module. *)\n  }\n\n  let empty = {current = Ident.empty; opened = None}\n\n  let add id x tbl = {tbl with current = Ident.add id x tbl.current}\n\n  let add_open slot wrap root components next =\n    let using =\n      match slot with\n      | None -> None\n      | Some f -> Some (fun s x -> f s (wrap x))\n    in\n    {current = Ident.empty; opened = Some {using; root; components; next}}\n\n  let rec find_same id tbl =\n    try Ident.find_same id tbl.current\n    with Not_found as exn -> (\n      match tbl.opened with\n      | Some {next; _} -> find_same id next\n      | None -> raise exn)\n\n  let rec find_name mark name tbl =\n    try\n      let id, desc = Ident.find_name name tbl.current in\n      (Pident id, desc)\n    with Not_found as exn -> (\n      match tbl.opened with\n      | Some {using; root; next; components} -> (\n        try\n          let descr, pos = Tbl.find_str name components in\n          let res = (Pdot (root, name, pos), descr) in\n          (if mark then\n             match using with\n             | None -> ()\n             | Some f -> (\n               try f name (Some (snd (find_name false name next), snd res))\n               with Not_found -> f name None));\n          res\n        with Not_found -> find_name mark name next)\n      | None -> raise exn)\n\n  let find_name name tbl = find_name true name tbl\n\n  let rec update name f tbl =\n    try\n      let id, desc = Ident.find_name name tbl.current in\n      let new_desc = f desc in\n      {tbl with current = Ident.add id new_desc tbl.current}\n    with Not_found -> (\n      match tbl.opened with\n      | Some {root; using; next; components} -> (\n        try\n          let desc, pos = Tbl.find_str name components in\n          let new_desc = f desc in\n          let components = Tbl.add name (new_desc, pos) components in\n          {tbl with opened = Some {root; using; next; components}}\n        with Not_found ->\n          let next = update name f next in\n          {tbl with opened = Some {root; using; next; components}})\n      | None -> tbl)\n\n  let rec find_all name tbl =\n    List.map\n      (fun (id, desc) -> (Pident id, desc))\n      (Ident.find_all name tbl.current)\n    @\n    match tbl.opened with\n    | None -> []\n    | Some {root; using = _; next; components} -> (\n      try\n        let desc, pos = Tbl.find_str name components in\n        (Pdot (root, name, pos), desc) :: find_all name next\n      with Not_found -> find_all name next)\n\n  let rec fold_name f tbl acc =\n    let acc =\n      Ident.fold_name\n        (fun id d -> f (Ident.name id) (Pident id, d))\n        tbl.current acc\n    in\n    match tbl.opened with\n    | Some {root; using = _; next; components} ->\n      acc\n      |> Tbl.fold\n           (fun name (desc, pos) -> f name (Pdot (root, name, pos), desc))\n           components\n      |> fold_name f next\n    | None -> acc\n\n  let rec local_keys tbl acc =\n    let acc = Ident.fold_all (fun k _ accu -> k :: accu) tbl.current acc in\n    match tbl.opened with\n    | Some o -> local_keys o.next acc\n    | None -> acc\n\n  let rec iter f tbl =\n    Ident.iter (fun id desc -> f id (Pident id, desc)) tbl.current;\n    match tbl.opened with\n    | Some {root; using = _; next; components} ->\n      Tbl.iter\n        (fun s (x, pos) ->\n          f (Ident.hide (Ident.create s) (* ??? *)) (Pdot (root, s, pos), x))\n        components;\n      iter f next\n    | None -> ()\n\n  let diff_keys tbl1 tbl2 =\n    let keys2 = local_keys tbl2 [] in\n    Ext_list.filter keys2 (fun id ->\n        try\n          ignore (find_same id tbl1);\n          false\n        with Not_found -> true)\nend\n\ntype type_descriptions = constructor_description list * label_description list\n\nlet in_signature_flag = 0x01\nlet implicit_coercion_flag = 0x02\n\ntype t = {\n  values: value_description IdTbl.t;\n  constrs: constructor_description TycompTbl.t;\n  labels: label_description TycompTbl.t;\n  types: (type_declaration * type_descriptions) IdTbl.t;\n  modules: (Subst.t * module_declaration, module_declaration) EnvLazy.t IdTbl.t;\n  modtypes: modtype_declaration IdTbl.t;\n  components: module_components IdTbl.t;\n  classes: class_declaration IdTbl.t;\n  cltypes: class_type_declaration IdTbl.t;\n  functor_args: unit Ident.tbl;\n  summary: summary;\n  local_constraints: type_declaration PathMap.t;\n  gadt_instances: (int * TypeSet.t ref) list;\n  flags: int;\n}\n\nand module_components = {\n  deprecated: string option;\n  loc: Location.t;\n  comps:\n    ( t * Subst.t * Path.t * Types.module_type,\n      module_components_repr option )\n    EnvLazy.t;\n}\n\nand module_components_repr =\n  | Structure_comps of structure_components\n  | Functor_comps of functor_components\n\nand 'a comp_tbl = (string, 'a * int) Tbl.t\n\nand structure_components = {\n  mutable comp_values: value_description comp_tbl;\n  mutable comp_constrs: (string, constructor_description list) Tbl.t;\n  mutable comp_labels: (string, label_description list) Tbl.t;\n  mutable comp_types: (type_declaration * type_descriptions) comp_tbl;\n  mutable comp_modules:\n    (Subst.t * module_declaration, module_declaration) EnvLazy.t comp_tbl;\n  mutable comp_modtypes: modtype_declaration comp_tbl;\n  mutable comp_components: module_components comp_tbl;\n  comp_classes: class_declaration comp_tbl; (* warning -69*)\n  mutable comp_cltypes: class_type_declaration comp_tbl;\n}\n\nand functor_components = {\n  fcomp_param: Ident.t; (* Formal parameter *)\n  fcomp_arg: module_type option; (* Argument signature *)\n  fcomp_res: module_type; (* Result signature *)\n  fcomp_cache: (Path.t, module_components) Hashtbl.t; (* For memoization *)\n  fcomp_subst_cache: (Path.t, module_type) Hashtbl.t;\n}\n\nlet copy_local ~from env =\n  {\n    env with\n    local_constraints = from.local_constraints;\n    gadt_instances = from.gadt_instances;\n    flags = from.flags;\n  }\n\nlet same_constr = ref (fun _ _ _ -> assert false)\n\n(* Helper to decide whether to report an identifier shadowing\n   by some 'open'. For labels and constructors, we do not report\n   if the two elements are from the same re-exported declaration.\n\n   Later, one could also interpret some attributes on value and\n   type declarations to silence the shadowing warnings. *)\n\nlet check_shadowing env = function\n  | `Constructor (Some (c1, c2))\n    when not (!same_constr env c1.cstr_res c2.cstr_res) ->\n    Some \"constructor\"\n  | `Label (Some (l1, l2)) when not (!same_constr env l1.lbl_res l2.lbl_res) ->\n    Some \"label\"\n  | `Value (Some _) -> Some \"value\"\n  | `Type (Some _) -> Some \"type\"\n  | `Module (Some _) | `Component (Some _) -> Some \"module\"\n  | `Module_type (Some _) -> Some \"module type\"\n  | `Class (Some _) -> Some \"class\"\n  | `Class_type (Some _) -> Some \"class type\"\n  | `Constructor _ | `Label _\n  | `Value None\n  | `Type None\n  | `Module None\n  | `Module_type None\n  | `Class None\n  | `Class_type None\n  | `Component None ->\n    None\n\nlet subst_modtype_maker (subst, md) =\n  if subst == Subst.identity then md\n  else {md with md_type = Subst.modtype subst md.md_type}\n\nlet empty =\n  {\n    values = IdTbl.empty;\n    constrs = TycompTbl.empty;\n    labels = TycompTbl.empty;\n    types = IdTbl.empty;\n    modules = IdTbl.empty;\n    modtypes = IdTbl.empty;\n    components = IdTbl.empty;\n    classes = IdTbl.empty;\n    cltypes = IdTbl.empty;\n    summary = Env_empty;\n    local_constraints = PathMap.empty;\n    gadt_instances = [];\n    flags = 0;\n    functor_args = Ident.empty;\n  }\n\nlet in_signature b env =\n  let flags =\n    if b then env.flags lor in_signature_flag\n    else env.flags land lnot in_signature_flag\n  in\n  {env with flags}\n\nlet implicit_coercion env =\n  {env with flags = env.flags lor implicit_coercion_flag}\n\nlet is_in_signature env = env.flags land in_signature_flag <> 0\nlet is_implicit_coercion env = env.flags land implicit_coercion_flag <> 0\n\nlet is_ident = function\n  | Pident _ -> true\n  | Pdot _ | Papply _ -> false\n\nlet is_local_ext = function\n  | {cstr_tag = Cstr_extension (p, _)} -> is_ident p\n  | _ -> false\n\nlet diff env1 env2 =\n  IdTbl.diff_keys env1.values env2.values\n  @ TycompTbl.diff_keys is_local_ext env1.constrs env2.constrs\n  @ IdTbl.diff_keys env1.modules env2.modules\n  @ IdTbl.diff_keys env1.classes env2.classes\n\ntype can_load_cmis = Can_load_cmis | Cannot_load_cmis of EnvLazy.log\n\nlet can_load_cmis = ref Can_load_cmis\n\nlet without_cmis f x =\n  let log = EnvLazy.log () in\n  let res =\n    Misc.(\n      protect_refs [R (can_load_cmis, Cannot_load_cmis log)] (fun () -> f x))\n  in\n  EnvLazy.backtrack log;\n  res\n\n(* Forward declarations *)\n\nlet components_of_module' =\n  ref\n    (fun ~deprecated:_ ~loc:_ _env _sub _path _mty -> assert false\n      : deprecated:string option ->\n        loc:Location.t ->\n        t ->\n        Subst.t ->\n        Path.t ->\n        module_type ->\n        module_components)\nlet components_of_module_maker' =\n  ref\n    (fun (_env, _sub, _path, _mty) -> assert false\n      : t * Subst.t * Path.t * module_type -> module_components_repr option)\nlet components_of_functor_appl' =\n  ref\n    (fun _f _env _p1 _p2 -> assert false\n      : functor_components -> t -> Path.t -> Path.t -> module_components)\nlet check_modtype_inclusion =\n  (* to be filled with Includemod.check_modtype_inclusion *)\n  ref\n    (fun ~loc:_ _env _mty1 _path1 _mty2 -> assert false\n      : loc:Location.t -> t -> module_type -> Path.t -> module_type -> unit)\nlet strengthen =\n  (* to be filled with Mtype.strengthen *)\n  ref\n    (fun ~aliasable:_ _env _mty _path -> assert false\n      : aliasable:bool -> t -> module_type -> Path.t -> module_type)\n\nlet md md_type = {md_type; md_attributes = []; md_loc = Location.none}\n\nlet get_components_opt c =\n  match !can_load_cmis with\n  | Can_load_cmis -> EnvLazy.force !components_of_module_maker' c.comps\n  | Cannot_load_cmis log ->\n    EnvLazy.force_logged log !components_of_module_maker' c.comps\n\nlet empty_structure =\n  Structure_comps\n    {\n      comp_values = Tbl.empty;\n      comp_constrs = Tbl.empty;\n      comp_labels = Tbl.empty;\n      comp_types = Tbl.empty;\n      comp_modules = Tbl.empty;\n      comp_modtypes = Tbl.empty;\n      comp_components = Tbl.empty;\n      comp_classes = Tbl.empty;\n      comp_cltypes = Tbl.empty;\n    }\n\nlet get_components c =\n  match get_components_opt c with\n  | None -> empty_structure\n  | Some c -> c\n\n(* The name of the compilation unit currently compiled.\n   \"\" if outside a compilation unit. *)\n\nlet current_unit = ref \"\"\n\n(* Persistent structure descriptions *)\n\ntype pers_struct = {\n  ps_name: string;\n  ps_sig: signature Lazy.t;\n  ps_comps: module_components;\n  ps_crcs: (string * Digest.t option) list;\n  ps_filename: string;\n  ps_flags: pers_flags list;\n}\n[@@warning \"-69\"]\n\nlet persistent_structures =\n  (Hashtbl.create 17 : (string, pers_struct option) Hashtbl.t)\n\n(* Consistency between persistent structures *)\n\nlet crc_units = Consistbl.create ()\n\nmodule StringSet = Set.Make (struct\n  type t = string\n  let compare = String.compare\nend)\n\nlet imported_units = ref StringSet.empty\n\nlet add_import s = imported_units := StringSet.add s !imported_units\n\nlet clear_imports () =\n  Consistbl.clear crc_units;\n  imported_units := StringSet.empty\n\nlet check_consistency ps =\n  try\n    List.iter\n      (fun (name, crco) ->\n        match crco with\n        | None -> ()\n        | Some crc ->\n          add_import name;\n          Consistbl.check crc_units name crc ps.ps_filename)\n      ps.ps_crcs\n  with Consistbl.Inconsistency (name, source, auth) ->\n    error (Inconsistent_import (name, auth, source))\n\n(* Reading persistent structures from .cmi files *)\n\nlet save_pers_struct crc ps =\n  let modname = ps.ps_name in\n  Hashtbl.add persistent_structures modname (Some ps);\n  Consistbl.set crc_units modname crc ps.ps_filename;\n  add_import modname\n\nmodule Persistent_signature = struct\n  type t = {filename: string; cmi: Cmi_format.cmi_infos}\n\n  let load =\n    ref (fun ~unit_name ->\n        match find_in_path_uncap !load_path (unit_name ^ \".cmi\") with\n        | filename -> Some {filename; cmi = read_cmi filename}\n        | exception Not_found -> None)\nend\n\nlet acknowledge_pers_struct check modname {Persistent_signature.filename; cmi} =\n  let name = cmi.cmi_name in\n  let sign = cmi.cmi_sign in\n  let crcs = cmi.cmi_crcs in\n  let flags = cmi.cmi_flags in\n  let deprecated =\n    List.fold_left\n      (fun _ -> function\n        | Deprecated s -> Some s)\n      None flags\n  in\n  let comps =\n    !components_of_module' ~deprecated ~loc:Location.none empty Subst.identity\n      (Pident (Ident.create_persistent name))\n      (Mty_signature sign)\n  in\n  let ps =\n    {\n      ps_name = name;\n      ps_sig = lazy (Subst.signature Subst.identity sign);\n      ps_comps = comps;\n      ps_crcs = crcs;\n      ps_filename = filename;\n      ps_flags = flags;\n    }\n  in\n  if ps.ps_name <> modname then\n    error (Illegal_renaming (modname, ps.ps_name, filename));\n  if check then check_consistency ps;\n  Hashtbl.add persistent_structures modname (Some ps);\n  ps\n\nlet read_pers_struct check modname filename =\n  add_import modname;\n  let cmi = read_cmi filename in\n  acknowledge_pers_struct check modname {Persistent_signature.filename; cmi}\n\nlet find_pers_struct check name =\n  if name = \"*predef*\" then raise Not_found;\n  match Hashtbl.find persistent_structures name with\n  | Some ps -> ps\n  | None -> raise Not_found\n  | exception Not_found -> (\n    match !can_load_cmis with\n    | Cannot_load_cmis _ -> raise Not_found\n    | Can_load_cmis ->\n      let ps =\n        match !Persistent_signature.load ~unit_name:name with\n        | Some ps -> ps\n        | None ->\n          Hashtbl.add persistent_structures name None;\n          raise Not_found\n      in\n      add_import name;\n      acknowledge_pers_struct check name ps)\n\n(* Emits a warning if there is no valid cmi for name *)\nlet check_pers_struct name =\n  try ignore (find_pers_struct false name) with\n  | Not_found ->\n    let warn = Warnings.No_cmi_file (name, None) in\n    Location.prerr_warning Location.none warn\n  | Cmi_format.Error err ->\n    let msg = Format.asprintf \"%a\" Cmi_format.report_error err in\n    let warn = Warnings.No_cmi_file (name, Some msg) in\n    Location.prerr_warning Location.none warn\n  | Error err ->\n    let msg =\n      match err with\n      | Illegal_renaming (name, ps_name, filename) ->\n        Format.asprintf\n          \" %a@ contains the compiled interface for @ %s when %s was expected\"\n          Location.print_filename filename ps_name name\n      | Inconsistent_import _ -> assert false\n      | Need_recursive_types (name, _) ->\n        Format.sprintf \"%s uses recursive types\" name\n      | Missing_module _ -> assert false\n      | Illegal_value_name _ -> assert false\n    in\n    let warn = Warnings.No_cmi_file (name, Some msg) in\n    Location.prerr_warning Location.none warn\n\nlet read_pers_struct modname filename = read_pers_struct true modname filename\n\nlet find_pers_struct name = find_pers_struct true name\n\nlet check_pers_struct name =\n  if not (Hashtbl.mem persistent_structures name) then (\n    (* PR#6843: record the weak dependency ([add_import]) regardless of\n       whether the check succeeds, to help make builds more\n       deterministic. *)\n    add_import name;\n    if Warnings.is_active (Warnings.No_cmi_file (\"\", None)) then\n      Delayed_checks.add_delayed_check (fun () -> check_pers_struct name))\n\nlet reset_cache () =\n  current_unit := \"\";\n  Hashtbl.clear persistent_structures;\n  clear_imports ();\n  Hashtbl.clear value_declarations;\n  Hashtbl.clear type_declarations;\n  Hashtbl.clear module_declarations;\n  Hashtbl.clear used_constructors;\n  Hashtbl.clear prefixed_sg\n\nlet reset_cache_toplevel () =\n  (* Delete 'missing cmi' entries from the cache. *)\n  let l =\n    Hashtbl.fold\n      (fun name r acc -> if r = None then name :: acc else acc)\n      persistent_structures []\n  in\n  List.iter (Hashtbl.remove persistent_structures) l;\n  Hashtbl.clear value_declarations;\n  Hashtbl.clear type_declarations;\n  Hashtbl.clear module_declarations;\n  Hashtbl.clear used_constructors;\n  Hashtbl.clear prefixed_sg\n\nlet set_unit_name name = current_unit := name\n\nlet get_unit_name () = !current_unit\n\n(* Lookup by identifier *)\n\nlet rec find_module_descr path env =\n  match path with\n  | Pident id -> (\n    try IdTbl.find_same id env.components\n    with Not_found ->\n      if Ident.persistent id && not (Ident.name id = !current_unit) then\n        (find_pers_struct (Ident.name id)).ps_comps\n      else raise Not_found)\n  | Pdot (p, s, _pos) -> (\n    match get_components (find_module_descr p env) with\n    | Structure_comps c ->\n      let descr, _pos = Tbl.find_str s c.comp_components in\n      descr\n    | Functor_comps _ -> raise Not_found)\n  | Papply (p1, p2) -> (\n    match get_components (find_module_descr p1 env) with\n    | Functor_comps f -> !components_of_functor_appl' f env p1 p2\n    | Structure_comps _ -> raise Not_found)\n\nlet find proj1 proj2 path env =\n  match path with\n  | Pident id -> IdTbl.find_same id (proj1 env)\n  | Pdot (p, s, _pos) -> (\n    match get_components (find_module_descr p env) with\n    | Structure_comps c ->\n      let data, _pos = Tbl.find_str s (proj2 c) in\n      data\n    | Functor_comps _ -> raise Not_found)\n  | Papply _ -> raise Not_found\n\nlet find_value = find (fun env -> env.values) (fun sc -> sc.comp_values)\n\nand find_type_full = find (fun env -> env.types) (fun sc -> sc.comp_types)\n\nand find_modtype = find (fun env -> env.modtypes) (fun sc -> sc.comp_modtypes)\n\nand find_class = find (fun env -> env.classes) (fun sc -> sc.comp_classes)\n\nand find_cltype = find (fun env -> env.cltypes) (fun sc -> sc.comp_cltypes)\n\nlet type_of_cstr path = function\n  | {cstr_inlined = Some d; _} ->\n    (d, ([], List.map snd (Datarepr.labels_of_type path d)))\n  | _ -> assert false\n\nlet find_type_full path env =\n  match Path.constructor_typath path with\n  | Regular p -> (\n    try (PathMap.find p env.local_constraints, ([], []))\n    with Not_found -> find_type_full p env)\n  | Cstr (ty_path, s) ->\n    let _, (cstrs, _) =\n      try find_type_full ty_path env with Not_found -> assert false\n    in\n    let cstr =\n      try List.find (fun cstr -> cstr.cstr_name = s) cstrs\n      with Not_found -> assert false\n    in\n    type_of_cstr path cstr\n  | LocalExt id ->\n    let cstr =\n      try TycompTbl.find_same id env.constrs with Not_found -> assert false\n    in\n    type_of_cstr path cstr\n  | Ext (mod_path, s) -> (\n    let comps =\n      try find_module_descr mod_path env with Not_found -> assert false\n    in\n    let comps =\n      match get_components comps with\n      | Structure_comps c -> c\n      | Functor_comps _ -> assert false\n    in\n    let exts =\n      Ext_list.filter\n        (try Tbl.find_str s comps.comp_constrs with Not_found -> assert false)\n        (function\n          | {cstr_tag = Cstr_extension _} -> true\n          | _ -> false)\n    in\n\n    match exts with\n    | [cstr] -> type_of_cstr path cstr\n    | _ -> assert false)\n\nlet find_type p env = fst (find_type_full p env)\nlet find_type_descrs p env = snd (find_type_full p env)\n\nlet find_module ~alias path env =\n  match path with\n  | Pident id -> (\n    try\n      let data = IdTbl.find_same id env.modules in\n      EnvLazy.force subst_modtype_maker data\n    with Not_found ->\n      if Ident.persistent id && not (Ident.name id = !current_unit) then\n        let ps = find_pers_struct (Ident.name id) in\n        md (Mty_signature (Lazy.force ps.ps_sig))\n      else raise Not_found)\n  | Pdot (p, s, _pos) -> (\n    match get_components (find_module_descr p env) with\n    | Structure_comps c ->\n      let data, _pos = Tbl.find_str s c.comp_modules in\n      EnvLazy.force subst_modtype_maker data\n    | Functor_comps _ -> raise Not_found)\n  | Papply (p1, p2) -> (\n    let desc1 = find_module_descr p1 env in\n    match get_components desc1 with\n    | Functor_comps f ->\n      md\n        (match f.fcomp_res with\n        | Mty_alias _ as mty -> mty\n        | mty -> (\n          if alias then mty\n          else\n            try Hashtbl.find f.fcomp_subst_cache p2\n            with Not_found ->\n              let mty =\n                Subst.modtype\n                  (Subst.add_module f.fcomp_param p2 Subst.identity)\n                  f.fcomp_res\n              in\n              Hashtbl.add f.fcomp_subst_cache p2 mty;\n              mty))\n    | Structure_comps _ -> raise Not_found)\n\nlet rec normalize_path lax env path =\n  let path =\n    match path with\n    | Pdot (p, s, pos) -> Pdot (normalize_path lax env p, s, pos)\n    | Papply (p1, p2) ->\n      Papply (normalize_path lax env p1, normalize_path true env p2)\n    | _ -> path\n  in\n  try\n    match find_module ~alias:true path env with\n    | {md_type = Mty_alias (_, path1)} -> normalize_path lax env path1\n    | _ -> path\n  with\n  | Not_found\n  when lax\n       ||\n       match path with\n       | Pident id -> not (Ident.persistent id)\n       | _ -> true\n  ->\n    path\n\nlet normalize_path oloc env path =\n  try normalize_path (oloc = None) env path\n  with Not_found -> (\n    match oloc with\n    | None -> assert false\n    | Some loc ->\n      raise (Error (Missing_module (loc, path, normalize_path true env path))))\n\nlet normalize_path_prefix oloc env path =\n  match path with\n  | Pdot (p, s, pos) -> Pdot (normalize_path oloc env p, s, pos)\n  | Pident _ -> path\n  | Papply _ -> assert false\n\nlet find_module = find_module ~alias:false\n\n(* Find the manifest type associated to a type when appropriate:\n   - the type should be public or should have a private row,\n   - the type should have an associated manifest type. *)\nlet find_type_expansion path env =\n  let decl = find_type path env in\n  match decl.type_manifest with\n  | Some body\n    when decl.type_private = Public\n         || decl.type_kind <> Type_abstract\n         || Btype.has_constr_row body ->\n    (decl.type_params, body, may_map snd decl.type_newtype_level)\n  (* The manifest type of Private abstract data types without\n     private row are still considered unknown to the type system.\n     Hence, this case is caught by the following clause that also handles\n     purely abstract data types without manifest type definition. *)\n  | _ -> raise Not_found\n\n(* Find the manifest type information associated to a type, i.e.\n   the necessary information for the compiler's type-based optimisations.\n   In particular, the manifest type associated to a private abstract type\n   is revealed for the sake of compiler's type-based optimisations. *)\nlet find_type_expansion_opt path env =\n  let decl = find_type path env in\n  match decl.type_manifest with\n  (* The manifest type of Private abstract data types can still get\n     an approximation using their manifest type. *)\n  | Some body -> (decl.type_params, body, may_map snd decl.type_newtype_level)\n  | _ -> raise Not_found\n\nlet find_modtype_expansion path env =\n  match (find_modtype path env).mtd_type with\n  | None -> raise Not_found\n  | Some mty -> mty\n\nlet rec is_functor_arg path env =\n  match path with\n  | Pident id -> (\n    try\n      Ident.find_same id env.functor_args;\n      true\n    with Not_found -> false)\n  | Pdot (p, _s, _) -> is_functor_arg p env\n  | Papply _ -> true\n\n(* Lookup by name *)\n\nexception Recmodule\n\nlet report_deprecated ?loc p deprecated =\n  match (loc, deprecated) with\n  | Some loc, Some txt ->\n    let txt = if txt = \"\" then \"\" else \"\\n\" ^ txt in\n    Location.deprecated loc (Printf.sprintf \"module %s%s\" (Path.name p) txt)\n  | _ -> ()\n\nlet mark_module_used env name loc =\n  if not (is_implicit_coercion env) then\n    try Hashtbl.find module_declarations (name, loc) () with Not_found -> ()\n\nlet rec lookup_module_descr_aux ?loc lid env =\n  match lid with\n  | Lident s -> (\n    try IdTbl.find_name s env.components\n    with Not_found ->\n      if s = !current_unit then raise Not_found;\n      let ps = find_pers_struct s in\n      (Pident (Ident.create_persistent s), ps.ps_comps))\n  | Ldot (l, s) -> (\n    let p, descr = lookup_module_descr ?loc l env in\n    match get_components descr with\n    | Structure_comps c ->\n      let descr, pos = Tbl.find_str s c.comp_components in\n      (Pdot (p, s, pos), descr)\n    | Functor_comps _ -> raise Not_found)\n  | Lapply (l1, l2) -> (\n    let p1, desc1 = lookup_module_descr ?loc l1 env in\n    let p2 = lookup_module ~load:true ?loc l2 env in\n    let {md_type = mty2} = find_module p2 env in\n    match get_components desc1 with\n    | Functor_comps f ->\n      let loc =\n        match loc with\n        | Some l -> l\n        | None -> Location.none\n      in\n      Misc.may (!check_modtype_inclusion ~loc env mty2 p2) f.fcomp_arg;\n      (Papply (p1, p2), !components_of_functor_appl' f env p1 p2)\n    | Structure_comps _ -> raise Not_found)\n\nand lookup_module_descr ?loc lid env =\n  let ((p, comps) as res) = lookup_module_descr_aux ?loc lid env in\n  mark_module_used env (Path.last p) comps.loc;\n  (*\n  Format.printf \"USE module %s at %a@.\" (Path.last p)\n    Location.print comps.loc;\n*)\n  report_deprecated ?loc p comps.deprecated;\n  res\n\nand lookup_module ~load ?loc lid env : Path.t =\n  match lid with\n  | Lident s -> (\n    try\n      let p, data = IdTbl.find_name s env.modules in\n      let {md_loc; md_attributes; md_type} =\n        EnvLazy.force subst_modtype_maker data\n      in\n      mark_module_used env s md_loc;\n      (match md_type with\n      | Mty_ident (Path.Pident id) when Ident.name id = \"#recmod#\" ->\n        (* see #5965 *)\n        raise Recmodule\n      | Mty_alias (_, Path.Pident id) ->\n        if\n          !Config.bs_only\n          && (not !Clflags.transparent_modules)\n          && Ident.persistent id\n        then find_pers_struct (Ident.name id) |> ignore\n      | _ -> ());\n      report_deprecated ?loc p\n        (Builtin_attributes.deprecated_of_attrs md_attributes);\n      p\n    with Not_found ->\n      if s = !current_unit then raise Not_found;\n      let p = Pident (Ident.create_persistent s) in\n      (if !Clflags.transparent_modules && not load then check_pers_struct s\n       else\n         let ps = find_pers_struct s in\n         report_deprecated ?loc p ps.ps_comps.deprecated);\n      p)\n  | Ldot (l, s) -> (\n    let p, descr = lookup_module_descr ?loc l env in\n    match get_components descr with\n    | Structure_comps c ->\n      let _data, pos = Tbl.find_str s c.comp_modules in\n      let comps, _ = Tbl.find_str s c.comp_components in\n      mark_module_used env s comps.loc;\n      let p = Pdot (p, s, pos) in\n      report_deprecated ?loc p comps.deprecated;\n      p\n    | Functor_comps _ -> raise Not_found)\n  | Lapply (l1, l2) -> (\n    let p1, desc1 = lookup_module_descr ?loc l1 env in\n    let p2 = lookup_module ~load:true ?loc l2 env in\n    let {md_type = mty2} = find_module p2 env in\n    let p = Papply (p1, p2) in\n    match get_components desc1 with\n    | Functor_comps f ->\n      let loc =\n        match loc with\n        | Some l -> l\n        | None -> Location.none\n      in\n      Misc.may (!check_modtype_inclusion ~loc env mty2 p2) f.fcomp_arg;\n      p\n    | Structure_comps _ -> raise Not_found)\n\nlet lookup proj1 proj2 ?loc lid env =\n  match lid with\n  | Lident s -> IdTbl.find_name s (proj1 env)\n  | Ldot (l, s) -> (\n    let p, desc = lookup_module_descr ?loc l env in\n    match get_components desc with\n    | Structure_comps c ->\n      let data, pos = Tbl.find_str s (proj2 c) in\n      (Pdot (p, s, pos), data)\n    | Functor_comps _ -> raise Not_found)\n  | Lapply _ -> raise Not_found\n\nlet lookup_all_simple proj1 proj2 shadow ?loc lid env =\n  match lid with\n  | Lident s ->\n    let xl = TycompTbl.find_all s (proj1 env) in\n    let rec do_shadow = function\n      | [] -> []\n      | (x, f) :: xs ->\n        (x, f)\n        :: do_shadow (Ext_list.filter xs (fun (y, _) -> not (shadow x y)))\n    in\n    do_shadow xl\n  | Ldot (l, s) -> (\n    let _p, desc = lookup_module_descr ?loc l env in\n    match get_components desc with\n    | Structure_comps c ->\n      let comps = try Tbl.find_str s (proj2 c) with Not_found -> [] in\n      List.map (fun data -> (data, fun () -> ())) comps\n    | Functor_comps _ -> raise Not_found)\n  | Lapply _ -> raise Not_found\n\nlet has_local_constraints env = not (PathMap.is_empty env.local_constraints)\n\nlet cstr_shadow cstr1 cstr2 =\n  match (cstr1.cstr_tag, cstr2.cstr_tag) with\n  | Cstr_extension _, Cstr_extension _ -> true\n  | _ -> false\n\nlet lbl_shadow _lbl1 _lbl2 = false\n\nlet lookup_value = lookup (fun env -> env.values) (fun sc -> sc.comp_values)\nlet lookup_all_constructors =\n  lookup_all_simple\n    (fun env -> env.constrs)\n    (fun sc -> sc.comp_constrs)\n    cstr_shadow\nlet lookup_all_labels =\n  lookup_all_simple\n    (fun env -> env.labels)\n    (fun sc -> sc.comp_labels)\n    lbl_shadow\nlet lookup_type = lookup (fun env -> env.types) (fun sc -> sc.comp_types)\nlet lookup_modtype =\n  lookup (fun env -> env.modtypes) (fun sc -> sc.comp_modtypes)\nlet lookup_class = lookup (fun env -> env.classes) (fun sc -> sc.comp_classes)\nlet lookup_cltype = lookup (fun env -> env.cltypes) (fun sc -> sc.comp_cltypes)\n\nlet copy_types l env =\n  let f desc =\n    {desc with val_type = Subst.type_expr Subst.identity desc.val_type}\n  in\n  let values =\n    List.fold_left (fun env s -> IdTbl.update s f env) env.values l\n  in\n  {env with values; summary = Env_copy_types (env.summary, l)}\n\nlet mark_value_used env name vd =\n  if not (is_implicit_coercion env) then\n    try Hashtbl.find value_declarations (name, vd.val_loc) ()\n    with Not_found -> ()\n\nlet mark_type_used env name vd =\n  if not (is_implicit_coercion env) then\n    try Hashtbl.find type_declarations (name, vd.type_loc) ()\n    with Not_found -> ()\n\nlet mark_constructor_used usage env name vd constr =\n  if not (is_implicit_coercion env) then\n    try Hashtbl.find used_constructors (name, vd.type_loc, constr) usage\n    with Not_found -> ()\n\nlet mark_extension_used usage env ext name =\n  if not (is_implicit_coercion env) then\n    let ty_name = Path.last ext.ext_type_path in\n    try Hashtbl.find used_constructors (ty_name, ext.ext_loc, name) usage\n    with Not_found -> ()\n\nlet set_value_used_callback name vd callback =\n  let key = (name, vd.val_loc) in\n  try\n    let old = Hashtbl.find value_declarations key in\n    Hashtbl.replace value_declarations key (fun () ->\n        old ();\n        callback ())\n    (* this is to support cases like:\n             let x = let x = 1 in x in x\n       where the two declarations have the same location\n       (e.g. resulting from Camlp4 expansion of grammar entries) *)\n  with Not_found -> Hashtbl.add value_declarations key callback\n\nlet set_type_used_callback name td callback =\n  let loc = td.type_loc in\n  if loc.Location.loc_ghost then ()\n  else\n    let key = (name, loc) in\n    let old =\n      try Hashtbl.find type_declarations key with Not_found -> assert false\n    in\n    Hashtbl.replace type_declarations key (fun () -> callback old)\n\nlet lookup_value ?loc lid env =\n  let ((_, desc) as r) = lookup_value ?loc lid env in\n  mark_value_used env (Longident.last lid) desc;\n  r\n\nlet lookup_type ?loc lid env =\n  let path, (decl, _) = lookup_type ?loc lid env in\n  mark_type_used env (Longident.last lid) decl;\n  path\n\nlet mark_type_path env path =\n  try\n    let decl = find_type path env in\n    mark_type_used env (Path.last path) decl\n  with Not_found -> ()\n\nlet ty_path t =\n  match repr t with\n  | {desc = Tconstr (path, _, _)} -> path\n  | _ -> assert false\n\nlet lookup_constructor ?loc lid env =\n  match lookup_all_constructors ?loc lid env with\n  | [] -> raise Not_found\n  | (desc, use) :: _ ->\n    mark_type_path env (ty_path desc.cstr_res);\n    use ();\n    desc\n\nlet is_lident = function\n  | Lident _ -> true\n  | _ -> false\n\nlet lookup_all_constructors ?loc lid env =\n  try\n    let cstrs = lookup_all_constructors ?loc lid env in\n    let wrap_use desc use () =\n      mark_type_path env (ty_path desc.cstr_res);\n      use ()\n    in\n    List.map (fun (cstr, use) -> (cstr, wrap_use cstr use)) cstrs\n  with Not_found when is_lident lid -> []\n\nlet mark_constructor usage env name desc =\n  if not (is_implicit_coercion env) then\n    match desc.cstr_tag with\n    | Cstr_extension _ -> (\n      let ty_path = ty_path desc.cstr_res in\n      let ty_name = Path.last ty_path in\n      try Hashtbl.find used_constructors (ty_name, desc.cstr_loc, name) usage\n      with Not_found -> ())\n    | _ ->\n      let ty_path = ty_path desc.cstr_res in\n      let ty_decl =\n        try find_type ty_path env with Not_found -> assert false\n      in\n      let ty_name = Path.last ty_path in\n      mark_constructor_used usage env ty_name ty_decl name\n\nlet lookup_label ?loc lid env =\n  match lookup_all_labels ?loc lid env with\n  | [] -> raise Not_found\n  | (desc, use) :: _ ->\n    mark_type_path env (ty_path desc.lbl_res);\n    use ();\n    desc\n\nlet lookup_all_labels ?loc lid env =\n  try\n    let lbls = lookup_all_labels ?loc lid env in\n    let wrap_use desc use () =\n      mark_type_path env (ty_path desc.lbl_res);\n      use ()\n    in\n    List.map (fun (lbl, use) -> (lbl, wrap_use lbl use)) lbls\n  with Not_found when is_lident lid -> []\n\nlet lookup_class ?loc lid env =\n  let ((_, desc) as r) = lookup_class ?loc lid env in\n  (* special support for Typeclass.unbound_class *)\n  if Path.name desc.cty_path = \"\" then ignore (lookup_type ?loc lid env)\n  else mark_type_path env desc.cty_path;\n  r\n\nlet lookup_cltype ?loc lid env =\n  let ((_, desc) as r) = lookup_cltype ?loc lid env in\n  if Path.name desc.clty_path = \"\" then ignore (lookup_type ?loc lid env)\n  else mark_type_path env desc.clty_path;\n  mark_type_path env desc.clty_path;\n  r\n\n(* Iter on an environment (ignoring the body of functors and\n   not yet evaluated structures) *)\n\ntype iter_cont = unit -> unit\nlet iter_env_cont = ref []\n\nlet rec scrape_alias_for_visit env mty =\n  match mty with\n  | Mty_alias (_, Pident id)\n    when Ident.persistent id\n         && not (Hashtbl.mem persistent_structures (Ident.name id)) ->\n    false\n  | Mty_alias (_, path) -> (\n    (* PR#6600: find_module may raise Not_found *)\n    try scrape_alias_for_visit env (find_module path env).md_type\n    with Not_found -> false)\n  | _ -> true\n\nlet iter_env proj1 proj2 f env () =\n  IdTbl.iter (fun id x -> f (Pident id) x) (proj1 env);\n  let rec iter_components path path' mcomps =\n    let cont () =\n      let visit =\n        match EnvLazy.get_arg mcomps.comps with\n        | None -> true\n        | Some (env, _sub, _path, mty) -> scrape_alias_for_visit env mty\n      in\n      if not visit then ()\n      else\n        match get_components mcomps with\n        | Structure_comps comps ->\n          Tbl.iter\n            (fun s (d, n) -> f (Pdot (path, s, n)) (Pdot (path', s, n), d))\n            (proj2 comps);\n          Tbl.iter\n            (fun s (c, n) ->\n              iter_components (Pdot (path, s, n)) (Pdot (path', s, n)) c)\n            comps.comp_components\n        | Functor_comps _ -> ()\n    in\n    iter_env_cont := (path, cont) :: !iter_env_cont\n  in\n  Hashtbl.iter\n    (fun s pso ->\n      match pso with\n      | None -> ()\n      | Some ps ->\n        let id = Pident (Ident.create_persistent s) in\n        iter_components id id ps.ps_comps)\n    persistent_structures;\n  IdTbl.iter\n    (fun id (path, comps) -> iter_components (Pident id) path comps)\n    env.components\n\nlet run_iter_cont l =\n  iter_env_cont := [];\n  List.iter (fun c -> c ()) l;\n  let cont = List.rev !iter_env_cont in\n  iter_env_cont := [];\n  cont\n\nlet iter_types f = iter_env (fun env -> env.types) (fun sc -> sc.comp_types) f\n\nlet same_types env1 env2 =\n  env1.types == env2.types && env1.components == env2.components\n\nlet used_persistent () =\n  let r = ref Concr.empty in\n  Hashtbl.iter\n    (fun s pso -> if pso != None then r := Concr.add s !r)\n    persistent_structures;\n  !r\n\nlet find_all_comps proj s (p, mcomps) =\n  match get_components mcomps with\n  | Functor_comps _ -> []\n  | Structure_comps comps -> (\n    try\n      let c, n = Tbl.find_str s (proj comps) in\n      [(Pdot (p, s, n), c)]\n    with Not_found -> [])\n\nlet rec find_shadowed_comps path env =\n  match path with\n  | Pident id -> IdTbl.find_all (Ident.name id) env.components\n  | Pdot (p, s, _) ->\n    let l = find_shadowed_comps p env in\n    let l' =\n      List.map (find_all_comps (fun comps -> comps.comp_components) s) l\n    in\n    List.flatten l'\n  | Papply _ -> []\n\nlet find_shadowed proj1 proj2 path env =\n  match path with\n  | Pident id -> IdTbl.find_all (Ident.name id) (proj1 env)\n  | Pdot (p, s, _) ->\n    let l = find_shadowed_comps p env in\n    let l' = List.map (find_all_comps proj2 s) l in\n    List.flatten l'\n  | Papply _ -> []\n\nlet find_shadowed_types path env =\n  List.map fst\n    (find_shadowed\n       (fun env -> env.types)\n       (fun comps -> comps.comp_types)\n       path env)\n\n(* GADT instance tracking *)\n\nlet add_gadt_instance_level lv env =\n  {env with gadt_instances = (lv, ref TypeSet.empty) :: env.gadt_instances}\n\nlet is_Tlink = function\n  | {desc = Tlink _} -> true\n  | _ -> false\n\nlet gadt_instance_level env t =\n  let rec find_instance = function\n    | [] -> None\n    | (lv, r) :: rem ->\n      if TypeSet.exists is_Tlink !r then\n        (* Should we use set_typeset ? *)\n        r := TypeSet.fold (fun ty -> TypeSet.add (repr ty)) !r TypeSet.empty;\n      if TypeSet.mem t !r then Some lv else find_instance rem\n  in\n  find_instance env.gadt_instances\n\nlet add_gadt_instances env lv tl =\n  let r =\n    try List.assoc lv env.gadt_instances with Not_found -> assert false\n  in\n  (* Format.eprintf \"Added\";\n     List.iter (fun ty -> Format.eprintf \"@ %a\" !Btype.print_raw ty) tl;\n     Format.eprintf \"@.\"; *)\n  set_typeset r (List.fold_right TypeSet.add tl !r)\n\n(* Only use this after expand_head! *)\nlet add_gadt_instance_chain env lv t =\n  let r =\n    try List.assoc lv env.gadt_instances with Not_found -> assert false\n  in\n  let rec add_instance t =\n    let t = repr t in\n    if not (TypeSet.mem t !r) then (\n      (* Format.eprintf \"@ %a\" !Btype.print_raw t; *)\n      set_typeset r (TypeSet.add t !r);\n      match t.desc with\n      | Tconstr (p, _, memo) -> may add_instance (find_expans Private p !memo)\n      | _ -> ())\n  in\n  (* Format.eprintf \"Added chain\"; *)\n  add_instance t\n(* Format.eprintf \"@.\" *)\n\n(* Expand manifest module type names at the top of the given module type *)\n\nlet rec scrape_alias env ?path mty =\n  match (mty, path) with\n  | Mty_ident p, _ -> (\n    try scrape_alias env (find_modtype_expansion p env) ?path\n    with Not_found -> mty)\n  | Mty_alias (_, path), _ -> (\n    try scrape_alias env (find_module path env).md_type ~path\n    with Not_found ->\n      (*Location.prerr_warning Location.none\n        (Warnings.No_cmi_file (Path.name path));*)\n      mty)\n  | mty, Some path -> !strengthen ~aliasable:true env mty path\n  | _ -> mty\n\nlet scrape_alias env mty = scrape_alias env mty\n\n(* Given a signature and a root path, prefix all idents in the signature\n   by the root path and build the corresponding substitution. *)\n\nlet rec prefix_idents root pos sub = function\n  | [] -> ([], sub)\n  | Sig_value (id, decl) :: rem ->\n    let p = Pdot (root, Ident.name id, pos) in\n    let nextpos =\n      match decl.val_kind with\n      | Val_prim _ -> pos\n      | _ -> pos + 1\n    in\n    let pl, final_sub = prefix_idents root nextpos sub rem in\n    (p :: pl, final_sub)\n  | Sig_type (id, _, _) :: rem ->\n    let p = Pdot (root, Ident.name id, nopos) in\n    let pl, final_sub = prefix_idents root pos (Subst.add_type id p sub) rem in\n    (p :: pl, final_sub)\n  | Sig_typext (id, _, _) :: rem ->\n    let p = Pdot (root, Ident.name id, pos) in\n    (* we extend the substitution in case of an inlined record *)\n    let pl, final_sub =\n      prefix_idents root (pos + 1) (Subst.add_type id p sub) rem\n    in\n    (p :: pl, final_sub)\n  | Sig_module (id, _, _) :: rem ->\n    let p = Pdot (root, Ident.name id, pos) in\n    let pl, final_sub =\n      prefix_idents root (pos + 1) (Subst.add_module id p sub) rem\n    in\n    (p :: pl, final_sub)\n  | Sig_modtype (id, _) :: rem ->\n    let p = Pdot (root, Ident.name id, nopos) in\n    let pl, final_sub =\n      prefix_idents root pos (Subst.add_modtype id (Mty_ident p) sub) rem\n    in\n    (p :: pl, final_sub)\n  | Sig_class _ :: _ -> assert false\n  | Sig_class_type (id, _, _) :: rem ->\n    let p = Pdot (root, Ident.name id, nopos) in\n    let pl, final_sub = prefix_idents root pos (Subst.add_type id p sub) rem in\n    (p :: pl, final_sub)\n\nlet prefix_idents root sub sg =\n  if sub = Subst.identity then (\n    let sgs =\n      try Hashtbl.find prefixed_sg root\n      with Not_found ->\n        let sgs = ref [] in\n        Hashtbl.add prefixed_sg root sgs;\n        sgs\n    in\n    try List.assq sg !sgs\n    with Not_found ->\n      let r = prefix_idents root 0 sub sg in\n      sgs := (sg, r) :: !sgs;\n      r)\n  else prefix_idents root 0 sub sg\n\n(* Compute structure descriptions *)\n\nlet add_to_tbl id decl tbl =\n  let decls = try Tbl.find_str id tbl with Not_found -> [] in\n  Tbl.add id (decl :: decls) tbl\n\nlet rec components_of_module ~deprecated ~loc env sub path mty =\n  {deprecated; loc; comps = EnvLazy.create (env, sub, path, mty)}\n\nand components_of_module_maker (env, sub, path, mty) =\n  match scrape_alias env mty with\n  | Mty_signature sg ->\n    let c =\n      {\n        comp_values = Tbl.empty;\n        comp_constrs = Tbl.empty;\n        comp_labels = Tbl.empty;\n        comp_types = Tbl.empty;\n        comp_modules = Tbl.empty;\n        comp_modtypes = Tbl.empty;\n        comp_components = Tbl.empty;\n        comp_classes = Tbl.empty;\n        comp_cltypes = Tbl.empty;\n      }\n    in\n    let pl, sub = prefix_idents path sub sg in\n    let env = ref env in\n    let pos = ref 0 in\n    List.iter2\n      (fun item path ->\n        match item with\n        | Sig_value (id, decl) -> (\n          let decl' = Subst.value_description sub decl in\n          c.comp_values <- Tbl.add (Ident.name id) (decl', !pos) c.comp_values;\n          match decl.val_kind with\n          | Val_prim _ -> ()\n          | _ -> incr pos)\n        | Sig_type (id, decl, _) ->\n          let decl' = Subst.type_declaration sub decl in\n          Datarepr.set_row_name decl' (Subst.type_path sub (Path.Pident id));\n          let constructors =\n            List.map snd (Datarepr.constructors_of_type path decl')\n          in\n          let labels = List.map snd (Datarepr.labels_of_type path decl') in\n          c.comp_types <-\n            Tbl.add (Ident.name id)\n              ((decl', (constructors, labels)), nopos)\n              c.comp_types;\n          List.iter\n            (fun descr ->\n              c.comp_constrs <- add_to_tbl descr.cstr_name descr c.comp_constrs)\n            constructors;\n          List.iter\n            (fun descr ->\n              c.comp_labels <- add_to_tbl descr.lbl_name descr c.comp_labels)\n            labels;\n          env := store_type_infos id decl !env\n        | Sig_typext (id, ext, _) ->\n          let ext' = Subst.extension_constructor sub ext in\n          let descr = Datarepr.extension_descr path ext' in\n          c.comp_constrs <- add_to_tbl (Ident.name id) descr c.comp_constrs;\n          incr pos\n        | Sig_module (id, md, _) ->\n          let md' = EnvLazy.create (sub, md) in\n          c.comp_modules <- Tbl.add (Ident.name id) (md', !pos) c.comp_modules;\n          let deprecated =\n            Builtin_attributes.deprecated_of_attrs md.md_attributes\n          in\n          let comps =\n            components_of_module ~deprecated ~loc:md.md_loc !env sub path\n              md.md_type\n          in\n          c.comp_components <-\n            Tbl.add (Ident.name id) (comps, !pos) c.comp_components;\n          env := store_module ~check:false id md !env;\n          incr pos\n        | Sig_modtype (id, decl) ->\n          let decl' = Subst.modtype_declaration sub decl in\n          c.comp_modtypes <-\n            Tbl.add (Ident.name id) (decl', nopos) c.comp_modtypes;\n          env := store_modtype id decl !env\n        | Sig_class () -> assert false\n        | Sig_class_type (id, decl, _) ->\n          let decl' = Subst.cltype_declaration sub decl in\n          c.comp_cltypes <- Tbl.add (Ident.name id) (decl', !pos) c.comp_cltypes)\n      sg pl;\n    Some (Structure_comps c)\n  | Mty_functor (param, ty_arg, ty_res) ->\n    Some\n      (Functor_comps\n         {\n           fcomp_param = param;\n           (* fcomp_arg and fcomp_res must be prefixed eagerly, because\n              they are interpreted in the outer environment *)\n           fcomp_arg = may_map (Subst.modtype sub) ty_arg;\n           fcomp_res = Subst.modtype sub ty_res;\n           fcomp_cache = Hashtbl.create 17;\n           fcomp_subst_cache = Hashtbl.create 17;\n         })\n  | Mty_ident _ | Mty_alias _ -> None\n\n(* Insertion of bindings by identifier + path *)\n\nand check_usage loc id warn tbl =\n  if (not loc.Location.loc_ghost) && Warnings.is_active (warn \"\") then (\n    let name = Ident.name id in\n    let key = (name, loc) in\n    if Hashtbl.mem tbl key then ()\n    else\n      let used = ref false in\n      Hashtbl.add tbl key (fun () -> used := true);\n      if not (name = \"\" || name.[0] = '_' || name.[0] = '#') then\n        Delayed_checks.add_delayed_check (fun () ->\n            if not !used then Location.prerr_warning loc (warn name)))\n\nand check_value_name name loc =\n  (* Note: we could also check here general validity of the\n     identifier, to protect against bad identifiers forged by -pp or\n     -ppx preprocessors. *)\n  if name = \"|.\" then raise (Error (Illegal_value_name (loc, name)))\n  else if String.length name > 0 && name.[0] = '#' then\n    for i = 1 to String.length name - 1 do\n      if name.[i] = '#' then raise (Error (Illegal_value_name (loc, name)))\n    done\n\nand store_value ?check id decl env =\n  check_value_name (Ident.name id) decl.val_loc;\n  may (fun f -> check_usage decl.val_loc id f value_declarations) check;\n  {\n    env with\n    values = IdTbl.add id decl env.values;\n    summary = Env_value (env.summary, id, decl);\n  }\n\nand store_type ~check id info env =\n  let loc = info.type_loc in\n  if check then\n    check_usage loc id\n      (fun s -> Warnings.Unused_type_declaration s)\n      type_declarations;\n  let path = Pident id in\n  let constructors = Datarepr.constructors_of_type path info in\n  let labels = Datarepr.labels_of_type path info in\n  let descrs = (List.map snd constructors, List.map snd labels) in\n\n  (if\n     check\n     && (not loc.Location.loc_ghost)\n     && Warnings.is_active (Warnings.Unused_constructor (\"\", false, false))\n   then\n     let ty = Ident.name id in\n     List.iter\n       (fun (_, {cstr_name = c; _}) ->\n         let k = (ty, loc, c) in\n         if not (Hashtbl.mem used_constructors k) then (\n           let used = constructor_usages () in\n           Hashtbl.add used_constructors k (add_constructor_usage used);\n           if not (ty = \"\" || ty.[0] = '_') then\n             Delayed_checks.add_delayed_check (fun () ->\n                 if (not (is_in_signature env)) && not used.cu_positive then\n                   Location.prerr_warning loc\n                     (Warnings.Unused_constructor\n                        (c, used.cu_pattern, used.cu_privatize)))))\n       constructors);\n  {\n    env with\n    constrs =\n      List.fold_right\n        (fun (id, descr) constrs -> TycompTbl.add id descr constrs)\n        constructors env.constrs;\n    labels =\n      List.fold_right\n        (fun (id, descr) labels -> TycompTbl.add id descr labels)\n        labels env.labels;\n    types = IdTbl.add id (info, descrs) env.types;\n    summary = Env_type (env.summary, id, info);\n  }\n\nand store_type_infos id info env =\n  (* Simplified version of store_type that doesn't compute and store\n     constructor and label infos, but simply record the arity and\n     manifest-ness of the type.  Used in components_of_module to\n     keep track of type abbreviations (e.g. type t = float) in the\n     computation of label representations. *)\n  {\n    env with\n    types = IdTbl.add id (info, ([], [])) env.types;\n    summary = Env_type (env.summary, id, info);\n  }\n\nand store_extension ~check id ext env =\n  let loc = ext.ext_loc in\n  (if\n     check\n     && (not loc.Location.loc_ghost)\n     && Warnings.is_active (Warnings.Unused_extension (\"\", false, false, false))\n   then\n     let is_exception = Path.same ext.ext_type_path Predef.path_exn in\n     let ty = Path.last ext.ext_type_path in\n     let n = Ident.name id in\n     let k = (ty, loc, n) in\n     if not (Hashtbl.mem used_constructors k) then (\n       let used = constructor_usages () in\n       Hashtbl.add used_constructors k (add_constructor_usage used);\n       Delayed_checks.add_delayed_check (fun () ->\n           if (not (is_in_signature env)) && not used.cu_positive then\n             Location.prerr_warning loc\n               (Warnings.Unused_extension\n                  (n, is_exception, used.cu_pattern, used.cu_privatize)))));\n  {\n    env with\n    constrs =\n      TycompTbl.add id (Datarepr.extension_descr (Pident id) ext) env.constrs;\n    summary = Env_extension (env.summary, id, ext);\n  }\n\nand store_module ~check id md env =\n  let loc = md.md_loc in\n  if check then\n    check_usage loc id (fun s -> Warnings.Unused_module s) module_declarations;\n\n  let deprecated = Builtin_attributes.deprecated_of_attrs md.md_attributes in\n  {\n    env with\n    modules = IdTbl.add id (EnvLazy.create (Subst.identity, md)) env.modules;\n    components =\n      IdTbl.add id\n        (components_of_module ~deprecated ~loc:md.md_loc env Subst.identity\n           (Pident id) md.md_type)\n        env.components;\n    summary = Env_module (env.summary, id, md);\n  }\n\nand store_modtype id info env =\n  {\n    env with\n    modtypes = IdTbl.add id info env.modtypes;\n    summary = Env_modtype (env.summary, id, info);\n  }\n\nand store_cltype id desc env =\n  {\n    env with\n    cltypes = IdTbl.add id desc env.cltypes;\n    summary = Env_cltype (env.summary, id, desc);\n  }\n\n(* Compute the components of a functor application in a path. *)\n\nlet components_of_functor_appl f env p1 p2 =\n  try Hashtbl.find f.fcomp_cache p2\n  with Not_found ->\n    let p = Papply (p1, p2) in\n    let sub = Subst.add_module f.fcomp_param p2 Subst.identity in\n    let mty = Subst.modtype sub f.fcomp_res in\n    let comps =\n      components_of_module ~deprecated:None ~loc:Location.none (*???*)\n        env Subst.identity p mty\n    in\n    Hashtbl.add f.fcomp_cache p2 comps;\n    comps\n\n(* Define forward functions *)\n\nlet _ =\n  components_of_module' := components_of_module;\n  components_of_functor_appl' := components_of_functor_appl;\n  components_of_module_maker' := components_of_module_maker\n\n(* Insertion of bindings by identifier *)\n\nlet add_functor_arg id env =\n  {\n    env with\n    functor_args = Ident.add id () env.functor_args;\n    summary = Env_functor_arg (env.summary, id);\n  }\n\nlet add_value ?check id desc env = store_value ?check id desc env\n\nlet add_type ~check id info env = store_type ~check id info env\n\nand add_extension ~check id ext env = store_extension ~check id ext env\n\nand add_module_declaration ?(arg = false) ~check id md env =\n  let env = store_module ~check id md env in\n  if arg then add_functor_arg id env else env\n\nand add_modtype id info env = store_modtype id info env\n\nand add_cltype id ty env = store_cltype id ty env\n\nlet add_module ?arg id mty env =\n  add_module_declaration ~check:false ?arg id (md mty) env\n\nlet add_local_type path info env =\n  {env with local_constraints = PathMap.add path info env.local_constraints}\n\nlet add_local_constraint path info elv env =\n  match info with\n  | {type_manifest = Some _; type_newtype_level = Some (lv, _)} ->\n    (* elv is the expansion level, lv is the definition level *)\n    let info = {info with type_newtype_level = Some (lv, elv)} in\n    add_local_type path info env\n  | _ -> assert false\n\n(* Insertion of bindings by name *)\n\nlet enter store_fun name data env =\n  let id = Ident.create name in\n  (id, store_fun id data env)\n\nlet enter_value ?check = enter (store_value ?check)\n\nand enter_type = enter (store_type ~check:true)\n\nand enter_extension = enter (store_extension ~check:true)\n\nand enter_module_declaration ?arg id md env =\n  add_module_declaration ?arg ~check:true id md env\n(* let (id, env) = enter store_module name md env in\n   (id, add_functor_arg ?arg id env) *)\n\nand enter_modtype = enter store_modtype\n\nand enter_cltype = enter store_cltype\n\nlet enter_module ?arg s mty env =\n  let id = Ident.create s in\n  (id, enter_module_declaration ?arg id (md mty) env)\n\n(* Insertion of all components of a signature *)\n\nlet add_item comp env =\n  match comp with\n  | Sig_value (id, decl) -> add_value id decl env\n  | Sig_type (id, decl, _) -> add_type ~check:false id decl env\n  | Sig_typext (id, ext, _) -> add_extension ~check:false id ext env\n  | Sig_module (id, md, _) -> add_module_declaration ~check:false id md env\n  | Sig_modtype (id, decl) -> add_modtype id decl env\n  | Sig_class () -> env\n  | Sig_class_type (id, decl, _) -> add_cltype id decl env\n\nlet rec add_signature sg env =\n  match sg with\n  | [] -> env\n  | comp :: rem -> add_signature rem (add_item comp env)\n\n(* Open a signature path *)\n\nlet add_components slot root env0 comps =\n  let add_l w comps env0 = TycompTbl.add_open slot w comps env0 in\n\n  let add w comps env0 = IdTbl.add_open slot w root comps env0 in\n\n  let constrs =\n    add_l (fun x -> `Constructor x) comps.comp_constrs env0.constrs\n  in\n  let labels = add_l (fun x -> `Label x) comps.comp_labels env0.labels in\n\n  let values = add (fun x -> `Value x) comps.comp_values env0.values in\n  let types = add (fun x -> `Type x) comps.comp_types env0.types in\n  let modtypes =\n    add (fun x -> `Module_type x) comps.comp_modtypes env0.modtypes\n  in\n  let classes = add (fun x -> `Class x) comps.comp_classes env0.classes in\n  let cltypes = add (fun x -> `Class_type x) comps.comp_cltypes env0.cltypes in\n  let components =\n    add (fun x -> `Component x) comps.comp_components env0.components\n  in\n\n  let modules = add (fun x -> `Module x) comps.comp_modules env0.modules in\n\n  {\n    env0 with\n    summary = Env_open (env0.summary, root);\n    constrs;\n    labels;\n    values;\n    types;\n    modtypes;\n    classes;\n    cltypes;\n    components;\n    modules;\n  }\n\nlet open_signature slot root env0 =\n  match get_components (find_module_descr root env0) with\n  | Functor_comps _ -> None\n  | Structure_comps comps -> Some (add_components slot root env0 comps)\n\n(* Open a signature from a file *)\n\nlet open_pers_signature name env =\n  match open_signature None (Pident (Ident.create_persistent name)) env with\n  | Some env -> env\n  | None -> assert false (* a compilation unit cannot refer to a functor *)\n\nlet open_signature ?(used_slot = ref false) ?(loc = Location.none)\n    ?(toplevel = false) ovf root env =\n  if\n    (not toplevel) && ovf = Asttypes.Fresh\n    && (not loc.Location.loc_ghost)\n    && (Warnings.is_active (Warnings.Unused_open \"\")\n       || Warnings.is_active (Warnings.Open_shadow_identifier (\"\", \"\"))\n       || Warnings.is_active (Warnings.Open_shadow_label_constructor (\"\", \"\")))\n  then (\n    let used = used_slot in\n    Delayed_checks.add_delayed_check (fun () ->\n        if not !used then (\n          used := true;\n          Location.prerr_warning loc (Warnings.Unused_open (Path.name root))));\n    let shadowed = ref [] in\n    let slot s b =\n      (match check_shadowing env b with\n      | Some kind when not (List.mem (kind, s) !shadowed) ->\n        shadowed := (kind, s) :: !shadowed;\n        let w =\n          match kind with\n          | \"label\" | \"constructor\" ->\n            Warnings.Open_shadow_label_constructor (kind, s)\n          | _ -> Warnings.Open_shadow_identifier (kind, s)\n        in\n        Location.prerr_warning loc w\n      | _ -> ());\n      used := true\n    in\n    open_signature (Some slot) root env)\n  else open_signature None root env\n\n(* Read a signature from a file *)\n\nlet read_signature modname filename =\n  let ps = read_pers_struct modname filename in\n  Lazy.force ps.ps_sig\n\n(* Return the CRC of the interface of the given compilation unit *)\n\nlet crc_of_unit name =\n  let ps = find_pers_struct name in\n  let crco = try List.assoc name ps.ps_crcs with Not_found -> assert false in\n  match crco with\n  | None -> assert false\n  | Some crc -> crc\n\n(* Return the list of imported interfaces with their CRCs *)\n\nlet imports () =\n  let dont_record_crc_unit = !Clflags.dont_record_crc_unit in\n  match dont_record_crc_unit with\n  | None -> Consistbl.extract (StringSet.elements !imported_units) crc_units\n  | Some x ->\n    Consistbl.extract\n      (StringSet.fold\n         (fun m acc -> if m = x then acc else m :: acc)\n         !imported_units [])\n      crc_units\n\n(* Save a signature to a file *)\n\nlet save_signature_with_imports ?check_exists ~deprecated sg modname filename\n    imports =\n  (*prerr_endline filename;\n    List.iter (fun (name, crc) -> prerr_endline name) imports;*)\n  Btype.cleanup_abbrev ();\n  Subst.reset_for_saving ();\n  let sg = Subst.signature (Subst.for_saving Subst.identity) sg in\n  let flags =\n    match deprecated with\n    | Some s -> [Deprecated s]\n    | None -> []\n  in\n  try\n    let cmi =\n      {cmi_name = modname; cmi_sign = sg; cmi_crcs = imports; cmi_flags = flags}\n    in\n    let crc = create_cmi ?check_exists filename cmi in\n    (* Enter signature in persistent table so that imported_unit()\n       will also return its crc *)\n    let comps =\n      components_of_module ~deprecated ~loc:Location.none empty Subst.identity\n        (Pident (Ident.create_persistent modname))\n        (Mty_signature sg)\n    in\n    let ps =\n      {\n        ps_name = modname;\n        ps_sig = lazy (Subst.signature Subst.identity sg);\n        ps_comps = comps;\n        ps_crcs = (cmi.cmi_name, Some crc) :: imports;\n        ps_filename = filename;\n        ps_flags = cmi.cmi_flags;\n      }\n    in\n    save_pers_struct crc ps;\n    cmi\n  with exn ->\n    remove_file filename;\n    raise exn\n\nlet save_signature ?check_exists ~deprecated sg modname filename =\n  save_signature_with_imports ?check_exists ~deprecated sg modname filename\n    (imports ())\n\n(* Folding on environments *)\n\nlet find_all proj1 proj2 f lid env acc =\n  match lid with\n  | None ->\n    IdTbl.fold_name\n      (fun name (p, data) acc -> f name p data acc)\n      (proj1 env) acc\n  | Some l -> (\n    let p, desc = lookup_module_descr l env in\n    match get_components desc with\n    | Structure_comps c ->\n      Tbl.fold\n        (fun s (data, pos) acc -> f s (Pdot (p, s, pos)) data acc)\n        (proj2 c) acc\n    | Functor_comps _ -> acc)\n\nlet find_all_simple_list proj1 proj2 f lid env acc =\n  match lid with\n  | None -> TycompTbl.fold_name (fun data acc -> f data acc) (proj1 env) acc\n  | Some l -> (\n    let _p, desc = lookup_module_descr l env in\n    match get_components desc with\n    | Structure_comps c ->\n      Tbl.fold\n        (fun _s comps acc ->\n          match comps with\n          | [] -> acc\n          | data :: _ -> f data acc)\n        (proj2 c) acc\n    | Functor_comps _ -> acc)\n\nlet fold_modules f lid env acc =\n  match lid with\n  | None ->\n    let acc =\n      IdTbl.fold_name\n        (fun name (p, data) acc ->\n          let data = EnvLazy.force subst_modtype_maker data in\n          f name p data acc)\n        env.modules acc\n    in\n    Hashtbl.fold\n      (fun name ps acc ->\n        match ps with\n        | None -> acc\n        | Some ps ->\n          f name\n            (Pident (Ident.create_persistent name))\n            (md (Mty_signature (Lazy.force ps.ps_sig)))\n            acc)\n      persistent_structures acc\n  | Some l -> (\n    let p, desc = lookup_module_descr l env in\n    match get_components desc with\n    | Structure_comps c ->\n      Tbl.fold\n        (fun s (data, pos) acc ->\n          f s (Pdot (p, s, pos)) (EnvLazy.force subst_modtype_maker data) acc)\n        c.comp_modules acc\n    | Functor_comps _ -> acc)\n\nlet fold_values f =\n  find_all (fun env -> env.values) (fun sc -> sc.comp_values) f\n\nand fold_constructors f =\n  find_all_simple_list (fun env -> env.constrs) (fun sc -> sc.comp_constrs) f\n\nand fold_labels f =\n  find_all_simple_list (fun env -> env.labels) (fun sc -> sc.comp_labels) f\n\nand fold_types f = find_all (fun env -> env.types) (fun sc -> sc.comp_types) f\n\nand fold_modtypes f =\n  find_all (fun env -> env.modtypes) (fun sc -> sc.comp_modtypes) f\n\nand fold_classs f =\n  find_all (fun env -> env.classes) (fun sc -> sc.comp_classes) f\n\nand fold_cltypes f =\n  find_all (fun env -> env.cltypes) (fun sc -> sc.comp_cltypes) f\n\n(* Make the initial environment *)\nlet initial_safe_string =\n  Predef.build_initial_env (add_type ~check:false)\n    (add_extension ~check:false)\n    empty\n\n(* Return the environment summary *)\n\nlet summary env =\n  if PathMap.is_empty env.local_constraints then env.summary\n  else Env_constraints (env.summary, env.local_constraints)\n\nlet last_env = ref empty\nlet last_reduced_env = ref empty\n\nlet keep_only_summary env =\n  if !last_env == env then !last_reduced_env\n  else\n    let new_env =\n      {\n        empty with\n        summary = env.summary;\n        local_constraints = env.local_constraints;\n        flags = env.flags;\n      }\n    in\n    last_env := env;\n    last_reduced_env := new_env;\n    new_env\n\nlet env_of_only_summary env_from_summary env =\n  let new_env = env_from_summary env.summary Subst.identity in\n  {new_env with local_constraints = env.local_constraints; flags = env.flags}\n\n(* Error report *)\n\nopen Format\n\n(* taken from https://github.com/rescript-lang/ocaml/blob/d4144647d1bf9bc7dc3aadc24c25a7efa3a67915/typing/env.ml#L1842 *)\n(* modified branches are commented *)\nlet report_error ppf = function\n  | Illegal_renaming (name, modname, _filename) ->\n    (* modified *)\n    fprintf ppf\n      \"@[You referred to the module %s, but we've found one called %s \\\n       instead.@ Is the name's casing right?@]\"\n      name modname\n  | Inconsistent_import (name, source1, source2) ->\n    (* modified *)\n    fprintf ppf\n      \"@[<v>@[@{<info>It's possible that your build is stale.@}@ Try to clean \\\n       the artifacts and build again?@]@,\\\n       @,\\\n       @[@{<info>Here's the original error message@}@]@,\\\n       @]\";\n    fprintf ppf\n      \"@[<hov>The files %a@ and %a@ make inconsistent assumptions@ over \\\n       interface %s@]\"\n      Location.print_filename source1 Location.print_filename source2 name\n  | Need_recursive_types (import, export) ->\n    fprintf ppf\n      \"@[<hov>Unit %s imports from %s, which uses recursive types.@ %s@]\" export\n      import \"The compilation flag -rectypes is required\"\n  | Missing_module (_, path1, path2) ->\n    fprintf ppf \"@[@[<hov>\";\n    if Path.same path1 path2 then\n      fprintf ppf \"Internal path@ %s@ is dangling.\" (Path.name path1)\n    else\n      fprintf ppf \"Internal path@ %s@ expands to@ %s@ which is dangling.\"\n        (Path.name path1) (Path.name path2);\n    fprintf ppf \"@]@ @[%s@ %s@ %s.@]@]\" \"The compiled interface for module\"\n      (Ident.name (Path.head path2))\n      \"was not found\"\n  | Illegal_value_name (_loc, name) ->\n    fprintf ppf \"'%s' is not a valid value identifier.\" name\n\nlet () =\n  Location.register_error_of_exn (function\n    | Error ((Missing_module (loc, _, _) | Illegal_value_name (loc, _)) as err)\n      when loc <> Location.none ->\n      Some (Location.error_of_printer loc report_error err)\n    | Error err -> Some (Location.error_of_printer_file report_error err)\n    | _ -> None)\n"
  },
  {
    "path": "analysis/vendor/ml/env.mli",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(* Environment handling *)\n\nopen Types\n\nmodule PathMap :\n  Map.S with type key = Path.t and type 'a t = 'a Map.Make(Path).t\n\ntype summary =\n  | Env_empty\n  | Env_value of summary * Ident.t * value_description\n  | Env_type of summary * Ident.t * type_declaration\n  | Env_extension of summary * Ident.t * extension_constructor\n  | Env_module of summary * Ident.t * module_declaration\n  | Env_modtype of summary * Ident.t * modtype_declaration\n  | Env_class of unit\n  | Env_cltype of summary * Ident.t * class_type_declaration\n  | Env_open of summary * Path.t\n  | Env_functor_arg of summary * Ident.t\n  | Env_constraints of summary * type_declaration PathMap.t\n  | Env_copy_types of summary * string list\n\ntype t\n\nval empty : t\nval initial_safe_string : t\n\nval diff : t -> t -> Ident.t list\nval copy_local : from:t -> t -> t\n\ntype type_descriptions = constructor_description list * label_description list\n\n(* For short-paths *)\ntype iter_cont\nval iter_types :\n  (Path.t -> Path.t * (type_declaration * type_descriptions) -> unit) ->\n  t ->\n  iter_cont\nval run_iter_cont : iter_cont list -> (Path.t * iter_cont) list\nval same_types : t -> t -> bool\nval used_persistent : unit -> Concr.t\nval find_shadowed_types : Path.t -> t -> Path.t list\nval without_cmis : ('a -> 'b) -> 'a -> 'b\n(* [without_cmis f arg] applies [f] to [arg], but does not\n   allow opening cmis during its execution *)\n\n(* Lookup by paths *)\n\nval find_value : Path.t -> t -> value_description\nval find_type : Path.t -> t -> type_declaration\nval find_type_descrs : Path.t -> t -> type_descriptions\nval find_module : Path.t -> t -> module_declaration\nval find_modtype : Path.t -> t -> modtype_declaration\nval find_class : Path.t -> t -> class_declaration\nval find_cltype : Path.t -> t -> class_type_declaration\n\nval find_type_expansion : Path.t -> t -> type_expr list * type_expr * int option\nval find_type_expansion_opt :\n  Path.t -> t -> type_expr list * type_expr * int option\n\n(* Find the manifest type information associated to a type for the sake\n   of the compiler's type-based optimisations. *)\nval find_modtype_expansion : Path.t -> t -> module_type\nval add_functor_arg : Ident.t -> t -> t\nval is_functor_arg : Path.t -> t -> bool\nval normalize_path : Location.t option -> t -> Path.t -> Path.t\n\n(* Normalize the path to a concrete value or module.\n   If the option is None, allow returning dangling paths.\n   Otherwise raise a Missing_module error, and may add forgotten\n   head as required global. *)\nval normalize_path_prefix : Location.t option -> t -> Path.t -> Path.t\n(* Only normalize the prefix part of the path *)\n\nval has_local_constraints : t -> bool\nval add_gadt_instance_level : int -> t -> t\nval gadt_instance_level : t -> type_expr -> int option\nval add_gadt_instances : t -> int -> type_expr list -> unit\nval add_gadt_instance_chain : t -> int -> type_expr -> unit\n\n(* Lookup by long identifiers *)\n\n(* ?loc is used to report 'deprecated module' warnings *)\n\nval lookup_value :\n  ?loc:Location.t -> Longident.t -> t -> Path.t * value_description\nval lookup_constructor :\n  ?loc:Location.t -> Longident.t -> t -> constructor_description\nval lookup_all_constructors :\n  ?loc:Location.t ->\n  Longident.t ->\n  t ->\n  (constructor_description * (unit -> unit)) list\nval lookup_label : ?loc:Location.t -> Longident.t -> t -> label_description\nval lookup_all_labels :\n  ?loc:Location.t ->\n  Longident.t ->\n  t ->\n  (label_description * (unit -> unit)) list\nval lookup_type : ?loc:Location.t -> Longident.t -> t -> Path.t\n(* Since 4.04, this function no longer returns [type_description].\n   To obtain it, you should either call [Env.find_type], or replace\n   it by [Typetexp.find_type] *)\n\nval lookup_module : load:bool -> ?loc:Location.t -> Longident.t -> t -> Path.t\nval lookup_modtype :\n  ?loc:Location.t -> Longident.t -> t -> Path.t * modtype_declaration\nval lookup_class :\n  ?loc:Location.t -> Longident.t -> t -> Path.t * class_declaration\nval lookup_cltype :\n  ?loc:Location.t -> Longident.t -> t -> Path.t * class_type_declaration\n\nval copy_types : string list -> t -> t\n(* Used only in Typecore.duplicate_ident_types. *)\n\nexception Recmodule\n(* Raise by lookup_module when the identifier refers\n   to one of the modules of a recursive definition\n   during the computation of its approximation (see #5965). *)\n\n(* Insertion by identifier *)\n\nval add_value :\n  ?check:(string -> Warnings.t) -> Ident.t -> value_description -> t -> t\nval add_type : check:bool -> Ident.t -> type_declaration -> t -> t\nval add_extension : check:bool -> Ident.t -> extension_constructor -> t -> t\nval add_module : ?arg:bool -> Ident.t -> module_type -> t -> t\nval add_module_declaration :\n  ?arg:bool -> check:bool -> Ident.t -> module_declaration -> t -> t\nval add_modtype : Ident.t -> modtype_declaration -> t -> t\n\nval add_cltype : Ident.t -> class_type_declaration -> t -> t\nval add_local_constraint : Path.t -> type_declaration -> int -> t -> t\nval add_local_type : Path.t -> type_declaration -> t -> t\n\n(* Insertion of all fields of a signature. *)\n\nval add_item : signature_item -> t -> t\nval add_signature : signature -> t -> t\n\n(* Insertion of all fields of a signature, relative to the given path.\n   Used to implement open. Returns None if the path refers to a functor,\n   not a structure. *)\nval open_signature :\n  ?used_slot:bool ref ->\n  ?loc:Location.t ->\n  ?toplevel:bool ->\n  Asttypes.override_flag ->\n  Path.t ->\n  t ->\n  t option\n\nval open_pers_signature : string -> t -> t\n\n(* Insertion by name *)\n\nval enter_value :\n  ?check:(string -> Warnings.t) ->\n  string ->\n  value_description ->\n  t ->\n  Ident.t * t\nval enter_type : string -> type_declaration -> t -> Ident.t * t\nval enter_extension : string -> extension_constructor -> t -> Ident.t * t\nval enter_module : ?arg:bool -> string -> module_type -> t -> Ident.t * t\nval enter_module_declaration :\n  ?arg:bool -> Ident.t -> module_declaration -> t -> t\nval enter_modtype : string -> modtype_declaration -> t -> Ident.t * t\n\nval enter_cltype : string -> class_type_declaration -> t -> Ident.t * t\n\n(* Initialize the cache of in-core module interfaces. *)\nval reset_cache : unit -> unit\n\n(* To be called before each toplevel phrase. *)\nval reset_cache_toplevel : unit -> unit\n\n(* Remember the name of the current compilation unit. *)\nval set_unit_name : string -> unit\nval get_unit_name : unit -> string\n\n(* Read, save a signature to/from a file *)\n\nval read_signature : string -> string -> signature\n(* Arguments: module name, file name. Results: signature. *)\n\nval save_signature :\n  ?check_exists:unit ->\n  deprecated:string option ->\n  signature ->\n  string ->\n  string ->\n  Cmi_format.cmi_infos\n(* Arguments: signature, module name, file name. *)\n\nval save_signature_with_imports :\n  ?check_exists:unit ->\n  deprecated:string option ->\n  signature ->\n  string ->\n  string ->\n  (string * Digest.t option) list ->\n  Cmi_format.cmi_infos\n(* Arguments: signature, module name, file name,\n   imported units with their CRCs. *)\n\n(* Return the CRC of the interface of the given compilation unit *)\n\nval crc_of_unit : string -> Digest.t\n\n(* Return the set of compilation units imported, with their CRC *)\n\nval imports : unit -> (string * Digest.t option) list\n\n(* Direct access to the table of imported compilation units with their CRC *)\n\nval crc_units : Consistbl.t\nval add_import : string -> unit\n\n(* Summaries -- compact representation of an environment, to be\n   exported in debugging information. *)\n\nval summary : t -> summary\n\n(* Return an equivalent environment where all fields have been reset,\n   except the summary. The initial environment can be rebuilt from the\n   summary, using Envaux.env_of_only_summary. *)\n\nval keep_only_summary : t -> t\nval env_of_only_summary : (summary -> Subst.t -> t) -> t -> t\n\n(* Error report *)\n\ntype error =\n  | Illegal_renaming of string * string * string\n  | Inconsistent_import of string * string * string\n  | Need_recursive_types of string * string\n  | Missing_module of Location.t * Path.t * Path.t\n  | Illegal_value_name of Location.t * string\n\nexception Error of error\n\nopen Format\n\nval report_error : formatter -> error -> unit\n\nval mark_value_used : t -> string -> value_description -> unit\nval mark_module_used : t -> string -> Location.t -> unit\nval mark_type_used : t -> string -> type_declaration -> unit\n\ntype constructor_usage = Positive | Pattern | Privatize\nval mark_constructor_used :\n  constructor_usage -> t -> string -> type_declaration -> string -> unit\nval mark_constructor :\n  constructor_usage -> t -> string -> constructor_description -> unit\nval mark_extension_used :\n  constructor_usage -> t -> extension_constructor -> string -> unit\n\nval in_signature : bool -> t -> t\nval implicit_coercion : t -> t\n\nval is_in_signature : t -> bool\n\nval set_value_used_callback :\n  string -> value_description -> (unit -> unit) -> unit\nval set_type_used_callback :\n  string -> type_declaration -> ((unit -> unit) -> unit) -> unit\n\n(* Forward declaration to break mutual recursion with Includemod. *)\nval check_modtype_inclusion :\n  (loc:Location.t -> t -> module_type -> Path.t -> module_type -> unit) ref\n\n(* Forward declaration to break mutual recursion with Mtype. *)\nval strengthen :\n  (aliasable:bool -> t -> module_type -> Path.t -> module_type) ref\n\n(* Forward declaration to break mutual recursion with Ctype. *)\nval same_constr : (t -> type_expr -> type_expr -> bool) ref\n\n(** Folding over all identifiers (for analysis purpose) *)\n\nval fold_values :\n  (string -> Path.t -> value_description -> 'a -> 'a) ->\n  Longident.t option ->\n  t ->\n  'a ->\n  'a\nval fold_types :\n  (string -> Path.t -> type_declaration * type_descriptions -> 'a -> 'a) ->\n  Longident.t option ->\n  t ->\n  'a ->\n  'a\nval fold_constructors :\n  (constructor_description -> 'a -> 'a) -> Longident.t option -> t -> 'a -> 'a\nval fold_labels :\n  (label_description -> 'a -> 'a) -> Longident.t option -> t -> 'a -> 'a\n\nval fold_modules :\n  (string -> Path.t -> module_declaration -> 'a -> 'a) ->\n  Longident.t option ->\n  t ->\n  'a ->\n  'a\n(** Persistent structures are only traversed if they are already loaded. *)\n\nval fold_modtypes :\n  (string -> Path.t -> modtype_declaration -> 'a -> 'a) ->\n  Longident.t option ->\n  t ->\n  'a ->\n  'a\nval fold_classs :\n  (string -> Path.t -> class_declaration -> 'a -> 'a) ->\n  Longident.t option ->\n  t ->\n  'a ->\n  'a\nval fold_cltypes :\n  (string -> Path.t -> class_type_declaration -> 'a -> 'a) ->\n  Longident.t option ->\n  t ->\n  'a ->\n  'a\n\nval scrape_alias : t -> module_type -> module_type\n(** Utilities *)\n\nval check_value_name : string -> Location.t -> unit\n\nmodule Persistent_signature : sig\n  type t = {\n    filename: string;  (** Name of the file containing the signature. *)\n    cmi: Cmi_format.cmi_infos;\n  }\n\n  val load : (unit_name:string -> t option) ref\n  (** Function used to load a persistent signature. The default is to look for\n      the .cmi file in the load path. This function can be overridden to load it\n      from memory, for instance to build a self-contained toplevel. *)\nend\n"
  },
  {
    "path": "analysis/vendor/ml/envaux.ml",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*           Jerome Vouillon, projet Cristal, INRIA Rocquencourt          *)\n(*           OCaml port by John Malecki and Xavier Leroy                  *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\nopen Env\n\ntype error = Module_not_found of Path.t\n\nexception Error of error\n\nlet env_cache = (Hashtbl.create 59 : (Env.summary * Subst.t, Env.t) Hashtbl.t)\n\nlet reset_cache () =\n  Hashtbl.clear env_cache;\n  Env.reset_cache ()\n\nlet rec env_from_summary sum subst =\n  try Hashtbl.find env_cache (sum, subst)\n  with Not_found ->\n    let env =\n      match sum with\n      | Env_empty -> Env.empty\n      | Env_value (s, id, desc) ->\n        Env.add_value id\n          (Subst.value_description subst desc)\n          (env_from_summary s subst)\n      | Env_type (s, id, desc) ->\n        Env.add_type ~check:false id\n          (Subst.type_declaration subst desc)\n          (env_from_summary s subst)\n      | Env_extension (s, id, desc) ->\n        Env.add_extension ~check:false id\n          (Subst.extension_constructor subst desc)\n          (env_from_summary s subst)\n      | Env_module (s, id, desc) ->\n        Env.add_module_declaration ~check:false id\n          (Subst.module_declaration subst desc)\n          (env_from_summary s subst)\n      | Env_modtype (s, id, desc) ->\n        Env.add_modtype id\n          (Subst.modtype_declaration subst desc)\n          (env_from_summary s subst)\n      | Env_cltype (s, id, desc) ->\n        Env.add_cltype id\n          (Subst.cltype_declaration subst desc)\n          (env_from_summary s subst)\n      | Env_open (s, path) -> (\n        let env = env_from_summary s subst in\n        let path' = Subst.module_path subst path in\n        match Env.open_signature Asttypes.Override path' env with\n        | Some env -> env\n        | None -> assert false)\n      | Env_functor_arg (Env_module (s, id, desc), id') when Ident.same id id'\n        ->\n        Env.add_module_declaration ~check:false id\n          (Subst.module_declaration subst desc)\n          ~arg:true (env_from_summary s subst)\n      | Env_class _ | Env_functor_arg _ -> assert false\n      | Env_constraints (s, map) ->\n        PathMap.fold\n          (fun path info ->\n            Env.add_local_type\n              (Subst.type_path subst path)\n              (Subst.type_declaration subst info))\n          map (env_from_summary s subst)\n      | Env_copy_types (s, sl) -> Env.copy_types sl (env_from_summary s subst)\n    in\n    Hashtbl.add env_cache (sum, subst) env;\n    env\n\nlet env_of_only_summary env = Env.env_of_only_summary env_from_summary env\n\n(* Error report *)\n\nopen Format\n\nlet report_error ppf = function\n  | Module_not_found p ->\n    fprintf ppf \"@[Cannot find module %a@].@.\" Printtyp.path p\n"
  },
  {
    "path": "analysis/vendor/ml/envaux.mli",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*           Jerome Vouillon, projet Cristal, INRIA Rocquencourt          *)\n(*           OCaml port by John Malecki and Xavier Leroy                  *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\nopen Format\n\n(* Convert environment summaries to environments *)\n\nval env_from_summary : Env.summary -> Subst.t -> Env.t\n\n(* Empty the environment caches. To be called when load_path changes. *)\n\nval reset_cache : unit -> unit\n\nval env_of_only_summary : Env.t -> Env.t\n\n(* Error report *)\n\ntype error = Module_not_found of Path.t\n\nexception Error of error\n\nval report_error : formatter -> error -> unit\n"
  },
  {
    "path": "analysis/vendor/ml/error_message_utils.ml",
    "content": "type type_clash_statement = FunctionCall\ntype type_clash_context =\n  | SetRecordField\n  | ArrayValue\n  | FunctionReturn\n  | MaybeUnwrapOption\n  | IfCondition\n  | IfReturn\n  | Switch\n  | StringConcat\n  | ComparisonOperator\n  | MathOperator of {\n      for_float: bool;\n      operator: string;\n      is_constant: string option;\n    }\n  | FunctionArgument\n  | Statement of type_clash_statement\n\nlet fprintf = Format.fprintf\n\nlet error_type_text ppf type_clash_context =\n  let text =\n    match type_clash_context with\n    | Some (Statement FunctionCall) -> \"This function call returns:\"\n    | Some (MathOperator {is_constant = Some _}) -> \"This value has type:\"\n    | Some ArrayValue -> \"This array item has type:\"\n    | Some SetRecordField ->\n      \"You're assigning something to this field that has type:\"\n    | _ -> \"This has type:\"\n  in\n  fprintf ppf \"%s\" text\n\nlet error_expected_type_text ppf type_clash_context =\n  match type_clash_context with\n  | Some FunctionArgument ->\n    fprintf ppf \"But this function argument is expecting:\"\n  | Some ComparisonOperator ->\n    fprintf ppf \"But it's being compared to something of type:\"\n  | Some Switch -> fprintf ppf \"But this switch is expected to return:\"\n  | Some IfCondition ->\n    fprintf ppf \"But @{<info>if@} conditions must always be of type:\"\n  | Some IfReturn ->\n    fprintf ppf \"But this @{<info>if@} statement is expected to return:\"\n  | Some ArrayValue ->\n    fprintf ppf \"But this array is expected to have items of type:\"\n  | Some SetRecordField -> fprintf ppf \"But this record field is of type:\"\n  | Some (Statement FunctionCall) -> fprintf ppf \"But it's expected to return:\"\n  | Some (MathOperator {operator}) ->\n    fprintf ppf\n      \"But it's being used with the @{<info>%s@} operator, which works on:\"\n      operator\n  | Some FunctionReturn ->\n    fprintf ppf \"But this function is expecting you to return:\"\n  | _ -> fprintf ppf \"But it's expected to have type:\"\n\nlet print_extra_type_clash_help ppf trace type_clash_context =\n  match (type_clash_context, trace) with\n  | Some (MathOperator {for_float; operator; is_constant}), _ -> (\n    let operator_for_other_type =\n      match operator with\n      | \"+\" -> \"+.\"\n      | \"+.\" -> \"+\"\n      | \"/\" -> \"/.\"\n      | \"/.\" -> \"/\"\n      | \"-\" -> \"-.\"\n      | \"*\" -> \"*.\"\n      | \"*.\" -> \"*\"\n      | v -> v\n    in\n    let operator_text =\n      match operator.[0] with\n      | '+' -> \"add\"\n      | '-' -> \"subtract\"\n      | '/' -> \"divide\"\n      | '*' -> \"multiply\"\n      | _ -> \"compute\"\n    in\n    (* TODO check int vs float explicitly before showing this *)\n    (match (operator, trace) with\n    | ( \"+\",\n        [\n          ({Types.desc = Tconstr (p1, _, _)}, _);\n          ({desc = Tconstr (p2, _, _)}, _);\n        ] )\n      when Path.same Predef.path_string p1 || Path.same Predef.path_string p2 ->\n      fprintf ppf\n        \"\\n\\n\\\n        \\  Are you looking to concatenate strings? Use the operator \\\n         @{<info>++@}, which concatenates strings.\\n\\n\\\n        \\  Possible solutions:\\n\\\n        \\  - Change the @{<info>+@} operator to @{<info>++@} to concatenate \\\n         strings instead.\"\n    | _ ->\n      fprintf ppf\n        \"\\n\\n\\\n        \\  Floats and ints have their own mathematical operators. This means \\\n         you cannot %s a float and an int without converting between the two.\\n\\n\\\n        \\  Possible solutions:\\n\\\n        \\  - Ensure all values in this calculation has the type @{<info>%s@}. \\\n         You can convert between floats and ints via \\\n         @{<info>Belt.Float.toInt@} and @{<info>Belt.Int.fromFloat@}.\"\n        operator_text\n        (if for_float then \"float\" else \"int\"));\n    match (is_constant, trace) with\n    | Some constant, _ ->\n      if for_float then\n        fprintf ppf\n          \"\\n\\\n          \\  - Make @{<info>%s@} a @{<info>float@} by adding a trailing dot: \\\n           @{<info>%s.@}\"\n          constant constant\n      else\n        fprintf ppf\n          \"\\n\\\n          \\  - Make @{<info>%s@} an @{<info>int@} by removing the dot or \\\n           explicitly converting to int\"\n          constant\n    | ( _,\n        [\n          ({Types.desc = Tconstr (p1, _, _)}, _);\n          ({desc = Tconstr (p2, _, _)}, _);\n        ] ) -> (\n      match (Path.name p1, Path.name p2) with\n      | \"float\", \"int\" | \"int\", \"float\" ->\n        fprintf ppf\n          \"\\n\\\n          \\  - Change the operator to @{<info>%s@}, which works on @{<info>%s@}\"\n          operator_for_other_type\n          (if for_float then \"int\" else \"float\")\n      | _ -> ())\n    | _ -> ())\n  | Some Switch, _ ->\n    fprintf ppf\n      \"\\n\\n\\\n      \\  All branches in a @{<info>switch@} must return the same type. To fix \\\n       this, change your branch to return the expected type.\"\n  | Some IfCondition, _ ->\n    fprintf ppf\n      \"\\n\\n\\\n      \\  To fix this, change the highlighted code so it evaluates to a \\\n       @{<info>bool@}.\"\n  | Some IfReturn, _ ->\n    fprintf ppf\n      \"\\n\\n\\\n      \\  @{<info>if@} expressions must return the same type in all branches \\\n       (@{<info>if@}, @{<info>else if@}, @{<info>else@}).\"\n  | Some MaybeUnwrapOption, _ ->\n    fprintf ppf\n      \"\\n\\n\\\n      \\  Possible solutions:\\n\\\n      \\  - Unwrap the option to its underlying value using \\\n       `yourValue->Belt.Option.getWithDefault(someDefaultValue)`\"\n  | Some ComparisonOperator, _ ->\n    fprintf ppf \"\\n\\n  You can only compare things of the same type.\"\n  | Some ArrayValue, _ ->\n    fprintf ppf\n      \"\\n\\n\\\n      \\  Arrays can only contain items of the same type.\\n\\n\\\n      \\  Possible solutions:\\n\\\n      \\  - Convert all values in the array to the same type.\\n\\\n      \\  - Use a tuple, if your array is of fixed length. Tuples can mix types \\\n       freely, and compiles to a JavaScript array. Example of a tuple: `let \\\n       myTuple = (10, \\\"hello\\\", 15.5, true)\"\n  | _ -> ()\n\nlet type_clash_context_from_function sexp sfunct =\n  let is_constant =\n    match sexp.Parsetree.pexp_desc with\n    | Pexp_constant (Pconst_integer (txt, _) | Pconst_float (txt, _)) ->\n      Some txt\n    | _ -> None\n  in\n  match sfunct.Parsetree.pexp_desc with\n  | Pexp_ident\n      {txt = Lident (\"=\" | \"==\" | \"<>\" | \"!=\" | \">\" | \">=\" | \"<\" | \"<=\")} ->\n    Some ComparisonOperator\n  | Pexp_ident {txt = Lident \"++\"} -> Some StringConcat\n  | Pexp_ident {txt = Lident ((\"/.\" | \"*.\" | \"+.\" | \"-.\") as operator)} ->\n    Some (MathOperator {for_float = true; operator; is_constant})\n  | Pexp_ident {txt = Lident ((\"/\" | \"*\" | \"+\" | \"-\") as operator)} ->\n    Some (MathOperator {for_float = false; operator; is_constant})\n  | _ -> Some FunctionArgument\n\nlet type_clash_context_for_function_argument type_clash_context sarg0 =\n  match type_clash_context with\n  | Some (MathOperator {for_float; operator}) ->\n    Some\n      (MathOperator\n         {\n           for_float;\n           operator;\n           is_constant =\n             (match sarg0.Parsetree.pexp_desc with\n             | Pexp_constant (Pconst_integer (txt, _) | Pconst_float (txt, _))\n               ->\n               Some txt\n             | _ -> None);\n         })\n  | type_clash_context -> type_clash_context\n\nlet type_clash_context_maybe_option ty_expected ty_res =\n  match (ty_expected, ty_res) with\n  | ( {Types.desc = Tconstr (expected_path, _, _)},\n      {Types.desc = Tconstr (type_path, _, _)} )\n    when Path.same Predef.path_option type_path\n         && Path.same expected_path Predef.path_option = false\n         && Path.same expected_path Predef.path_uncurried = false ->\n    Some MaybeUnwrapOption\n  | _ -> None\n\nlet type_clash_context_in_statement sexp =\n  match sexp.Parsetree.pexp_desc with\n  | Pexp_apply _ -> Some (Statement FunctionCall)\n  | _ -> None\n"
  },
  {
    "path": "analysis/vendor/ml/includeclass.ml",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*           Jerome Vouillon, projet Cristal, INRIA Rocquencourt          *)\n(*                                                                        *)\n(*   Copyright 1997 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(* Inclusion checks for the class language *)\n\nopen Types\n\nlet class_types env cty1 cty2 = Ctype.match_class_types env cty1 cty2\n\nlet class_type_declarations ~loc env cty1 cty2 =\n  Builtin_attributes.check_deprecated_inclusion ~def:cty1.clty_loc\n    ~use:cty2.clty_loc loc cty1.clty_attributes cty2.clty_attributes\n    (Path.last cty1.clty_path);\n  Ctype.match_class_declarations env cty1.clty_params cty1.clty_type\n    cty2.clty_params cty2.clty_type\n\nlet class_declarations env cty1 cty2 =\n  match (cty1.cty_new, cty2.cty_new) with\n  | None, Some _ -> [Ctype.CM_Virtual_class]\n  | _ ->\n    Ctype.match_class_declarations env cty1.cty_params cty1.cty_type\n      cty2.cty_params cty2.cty_type\n\nopen Format\nopen Ctype\n\n(*\nlet rec hide_params = function\n    Tcty_arrow (\"*\", _, cty) -> hide_params cty\n  | cty -> cty\n*)\n\nlet include_err ppf = function\n  | CM_Virtual_class ->\n    fprintf ppf \"A class cannot be changed from virtual to concrete\"\n  | CM_Parameter_arity_mismatch _ ->\n    fprintf ppf \"The classes do not have the same number of type parameters\"\n  | CM_Type_parameter_mismatch (env, trace) ->\n    Printtyp.report_unification_error ppf env ~unif:false trace\n      (function\n        | ppf -> fprintf ppf \"A type parameter has type\")\n      (function ppf -> fprintf ppf \"but is expected to have type\")\n  | CM_Class_type_mismatch (env, cty1, cty2) ->\n    Printtyp.wrap_printing_env env (fun () ->\n        fprintf ppf \"@[The class type@;<1 2>%a@ %s@;<1 2>%a@]\"\n          Printtyp.class_type cty1 \"is not matched by the class type\"\n          Printtyp.class_type cty2)\n  | CM_Parameter_mismatch (env, trace) ->\n    Printtyp.report_unification_error ppf env ~unif:false trace\n      (function\n        | ppf -> fprintf ppf \"A parameter has type\")\n      (function ppf -> fprintf ppf \"but is expected to have type\")\n  | CM_Val_type_mismatch (lab, env, trace) ->\n    Printtyp.report_unification_error ppf env ~unif:false trace\n      (function\n        | ppf -> fprintf ppf \"The instance variable %s@ has type\" lab)\n      (function ppf -> fprintf ppf \"but is expected to have type\")\n  | CM_Meth_type_mismatch (lab, env, trace) ->\n    Printtyp.report_unification_error ppf env ~unif:false trace\n      (function\n        | ppf -> fprintf ppf \"The method %s@ has type\" lab)\n      (function ppf -> fprintf ppf \"but is expected to have type\")\n  | CM_Non_mutable_value lab ->\n    fprintf ppf \"@[The non-mutable instance variable %s cannot become mutable@]\"\n      lab\n  | CM_Non_concrete_value lab ->\n    fprintf ppf \"@[The virtual instance variable %s cannot become concrete@]\"\n      lab\n  | CM_Missing_value lab ->\n    fprintf ppf \"@[The first class type has no instance variable %s@]\" lab\n  | CM_Missing_method lab ->\n    fprintf ppf \"@[The first class type has no field %s@]\" lab\n  | CM_Hide_public lab ->\n    fprintf ppf \"@[The public method %s cannot be hidden@]\" lab\n  | CM_Hide_virtual (k, lab) ->\n    fprintf ppf \"@[The virtual %s %s cannot be hidden@]\" k lab\n  | CM_Public_method lab ->\n    fprintf ppf \"@[The public method %s cannot become private\" lab\n  | CM_Virtual_method lab ->\n    fprintf ppf \"@[The virtual method %s cannot become concrete\" lab\n  | CM_Private_method lab ->\n    fprintf ppf \"The private method %s cannot become public\" lab\n\nlet report_error ppf = function\n  | [] -> ()\n  | err :: errs ->\n    let print_errs ppf errs =\n      List.iter (fun err -> fprintf ppf \"@ %a\" include_err err) errs\n    in\n    fprintf ppf \"@[<v>%a%a@]\" include_err err print_errs errs\n"
  },
  {
    "path": "analysis/vendor/ml/includeclass.mli",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*           Jerome Vouillon, projet Cristal, INRIA Rocquencourt          *)\n(*                                                                        *)\n(*   Copyright 1997 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(* Inclusion checks for the class language *)\n\nopen Types\nopen Ctype\nopen Format\n\nval class_types : Env.t -> class_type -> class_type -> class_match_failure list\nval class_type_declarations :\n  loc:Location.t ->\n  Env.t ->\n  class_type_declaration ->\n  class_type_declaration ->\n  class_match_failure list\nval class_declarations :\n  Env.t -> class_declaration -> class_declaration -> class_match_failure list\n\nval report_error : formatter -> class_match_failure list -> unit\n"
  },
  {
    "path": "analysis/vendor/ml/includecore.ml",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(* Inclusion checks for the core language *)\n\nopen Asttypes\nopen Path\nopen Types\nopen Typedtree\n\n(* Inclusion between value descriptions *)\n\nexception Dont_match\n\nlet value_descriptions ~loc env name (vd1 : Types.value_description)\n    (vd2 : Types.value_description) =\n  Builtin_attributes.check_deprecated_inclusion ~def:vd1.val_loc\n    ~use:vd2.val_loc loc vd1.val_attributes vd2.val_attributes (Ident.name name);\n  if Ctype.moregeneral env true vd1.val_type vd2.val_type then\n    match (vd1.val_kind, vd2.val_kind) with\n    | Val_prim p1, Val_prim p2 ->\n      if !Primitive.coerce p1 p2 then Tcoerce_none else raise Dont_match\n    | Val_prim p, _ ->\n      let pc =\n        {\n          pc_desc = p;\n          pc_type = vd2.Types.val_type;\n          pc_env = env;\n          pc_loc = vd1.Types.val_loc;\n          pc_id = name;\n        }\n      in\n      Tcoerce_primitive pc\n    | _, Val_prim _ -> raise Dont_match\n    | _, _ -> Tcoerce_none\n  else raise Dont_match\n\n(* Inclusion between \"private\" annotations *)\n\nlet private_flags decl1 decl2 =\n  match (decl1.type_private, decl2.type_private) with\n  | Private, Public ->\n    decl2.type_kind = Type_abstract\n    && (decl2.type_manifest = None || decl1.type_kind <> Type_abstract)\n  | _, _ -> true\n\n(* Inclusion between manifest types (particularly for private row types) *)\n\nlet is_absrow env ty =\n  match ty.desc with\n  | Tconstr (Pident _, _, _) -> (\n    match Ctype.expand_head env ty with\n    | {desc = Tobject _ | Tvariant _} -> true\n    | _ -> false)\n  | _ -> false\n\nlet type_manifest env ty1 params1 ty2 params2 priv2 =\n  let ty1' = Ctype.expand_head env ty1 and ty2' = Ctype.expand_head env ty2 in\n  match (ty1'.desc, ty2'.desc) with\n  | Tvariant row1, Tvariant row2 when is_absrow env (Btype.row_more row2) ->\n    let row1 = Btype.row_repr row1 and row2 = Btype.row_repr row2 in\n    Ctype.equal env true (ty1 :: params1) (row2.row_more :: params2)\n    && (match row1.row_more with\n       | {desc = Tvar _ | Tconstr _ | Tnil} -> true\n       | _ -> false)\n    &&\n    let r1, r2, pairs =\n      Ctype.merge_row_fields row1.row_fields row2.row_fields\n    in\n    ((not row2.row_closed)\n    || (row1.row_closed && Ctype.filter_row_fields false r1 = []))\n    && List.for_all\n         (fun (_, f) ->\n           match Btype.row_field_repr f with\n           | Rabsent | Reither _ -> true\n           | Rpresent _ -> false)\n         r2\n    &&\n    let to_equal = ref (List.combine params1 params2) in\n    List.for_all\n      (fun (_, f1, f2) ->\n        match (Btype.row_field_repr f1, Btype.row_field_repr f2) with\n        | Rpresent (Some t1), (Rpresent (Some t2) | Reither (false, [t2], _, _))\n          ->\n          to_equal := (t1, t2) :: !to_equal;\n          true\n        | Rpresent None, (Rpresent None | Reither (true, [], _, _)) -> true\n        | Reither (c1, tl1, _, _), Reither (c2, tl2, _, _)\n          when List.length tl1 = List.length tl2 && c1 = c2 ->\n          to_equal := List.combine tl1 tl2 @ !to_equal;\n          true\n        | Rabsent, (Reither _ | Rabsent) -> true\n        | _ -> false)\n      pairs\n    &&\n    let tl1, tl2 = List.split !to_equal in\n    Ctype.equal env true tl1 tl2\n  | Tobject (fi1, _), Tobject (fi2, _)\n    when is_absrow env (snd (Ctype.flatten_fields fi2)) ->\n    let fields2, rest2 = Ctype.flatten_fields fi2 in\n    Ctype.equal env true (ty1 :: params1) (rest2 :: params2)\n    &&\n    let fields1, rest1 = Ctype.flatten_fields fi1 in\n    (match rest1 with\n    | {desc = Tnil | Tvar _ | Tconstr _} -> true\n    | _ -> false)\n    &&\n    let pairs, _miss1, miss2 = Ctype.associate_fields fields1 fields2 in\n    miss2 = []\n    &&\n    let tl1, tl2 =\n      List.split (List.map (fun (_, _, t1, _, t2) -> (t1, t2)) pairs)\n    in\n    Ctype.equal env true (params1 @ tl1) (params2 @ tl2)\n  | _ ->\n    let rec check_super ty1 =\n      Ctype.equal env true (ty1 :: params1) (ty2 :: params2)\n      || priv2 = Private\n         &&\n         try\n           check_super\n             (Ctype.try_expand_once_opt env (Ctype.expand_head env ty1))\n         with Ctype.Cannot_expand -> false\n    in\n    check_super ty1\n\n(* Inclusion between type declarations *)\n\ntype type_mismatch =\n  | Arity\n  | Privacy\n  | Kind\n  | Constraint\n  | Manifest\n  | Variance\n  | Field_type of Ident.t\n  | Field_mutable of Ident.t\n  | Field_arity of Ident.t\n  | Field_names of int * string * string\n  | Field_missing of bool * Ident.t\n  | Record_representation of record_representation * record_representation\n  | Unboxed_representation of bool (* true means second one is unboxed *)\n  | Immediate\n  | Tag_name\n  | Variant_representation of Ident.t\n\nlet report_type_mismatch0 first second decl ppf err =\n  let pr fmt = Format.fprintf ppf fmt in\n  match err with\n  | Arity -> pr \"They have different arities\"\n  | Privacy -> pr \"A private type would be revealed\"\n  | Kind -> pr \"Their kinds differ\"\n  | Constraint -> pr \"Their constraints differ\"\n  | Manifest -> ()\n  | Variance -> pr \"Their variances do not agree\"\n  | Field_type s -> pr \"The types for field %s are not equal\" (Ident.name s)\n  | Field_mutable s ->\n    pr \"The mutability of field %s is different\" (Ident.name s)\n  | Field_arity s -> pr \"The arities for field %s differ\" (Ident.name s)\n  | Field_names (n, name1, name2) ->\n    pr \"Fields number %i have different names, %s and %s\" n name1 name2\n  | Field_missing (b, s) ->\n    pr \"The field %s is only present in %s %s\" (Ident.name s)\n      (if b then second else first)\n      decl\n  | Record_representation (rep1, rep2) -> (\n    let default () = pr \"Their internal representations differ\" in\n    match (rep1, rep2) with\n    | Record_optional_labels lbls1, Record_optional_labels lbls2 -> (\n      let only_in_lhs =\n        Ext_list.find_first lbls1 (fun l -> not (Ext_list.mem_string lbls2 l))\n      in\n      let only_in_rhs =\n        Ext_list.find_first lbls2 (fun l -> not (Ext_list.mem_string lbls1 l))\n      in\n      match (only_in_lhs, only_in_rhs) with\n      | Some l, _ -> pr \"@optional label %s only in %s\" l second\n      | _, Some l -> pr \"@optional label %s only in %s\" l first\n      | None, None -> default ())\n    | _ -> default ())\n  | Unboxed_representation b ->\n    pr \"Their internal representations differ:@ %s %s %s\"\n      (if b then second else first)\n      decl \"uses unboxed representation\"\n  | Immediate -> pr \"%s is not an immediate type\" first\n  | Tag_name -> pr \"Their @tag annotations differ\"\n  | Variant_representation s ->\n    pr \"The internal representations for case %s are not equal\" (Ident.name s)\n\nlet report_type_mismatch first second decl ppf =\n  List.iter (fun err ->\n      if err = Manifest then ()\n      else\n        Format.fprintf ppf \"@ %a.\" (report_type_mismatch0 first second decl) err)\n\nlet rec compare_constructor_arguments ~loc env cstr params1 params2 arg1 arg2 =\n  match (arg1, arg2) with\n  | Types.Cstr_tuple arg1, Types.Cstr_tuple arg2 ->\n    if List.length arg1 <> List.length arg2 then [Field_arity cstr]\n    else if\n      (* Ctype.equal must be called on all arguments at once, cf. PR#7378 *)\n      Ctype.equal env true (params1 @ arg1) (params2 @ arg2)\n    then []\n    else [Field_type cstr]\n  | Types.Cstr_record l1, Types.Cstr_record l2 ->\n    compare_records env ~loc params1 params2 0 l1 l2\n  | _ -> [Field_type cstr]\n\nand compare_variants ~loc env params1 params2 n\n    (cstrs1 : Types.constructor_declaration list)\n    (cstrs2 : Types.constructor_declaration list) =\n  match (cstrs1, cstrs2) with\n  | [], [] -> []\n  | [], c :: _ -> [Field_missing (true, c.Types.cd_id)]\n  | c :: _, [] -> [Field_missing (false, c.Types.cd_id)]\n  | cd1 :: rem1, cd2 :: rem2 ->\n    if Ident.name cd1.cd_id <> Ident.name cd2.cd_id then\n      [Field_names (n, cd1.cd_id.name, cd2.cd_id.name)]\n    else (\n      Builtin_attributes.check_deprecated_inclusion ~def:cd1.cd_loc\n        ~use:cd2.cd_loc loc cd1.cd_attributes cd2.cd_attributes\n        (Ident.name cd1.cd_id);\n      let r =\n        match (cd1.cd_res, cd2.cd_res) with\n        | Some r1, Some r2 ->\n          if Ctype.equal env true [r1] [r2] then\n            compare_constructor_arguments ~loc env cd1.cd_id [r1] [r2]\n              cd1.cd_args cd2.cd_args\n          else [Field_type cd1.cd_id]\n        | Some _, None | None, Some _ -> [Field_type cd1.cd_id]\n        | _ ->\n          compare_constructor_arguments ~loc env cd1.cd_id params1 params2\n            cd1.cd_args cd2.cd_args\n      in\n      let r =\n        if r <> [] then r\n        else\n          match Ast_untagged_variants.is_nullary_variant cd1.cd_args with\n          | true ->\n            let tag_type1 =\n              Ast_untagged_variants.process_tag_type cd1.cd_attributes\n            in\n            let tag_type2 =\n              Ast_untagged_variants.process_tag_type cd2.cd_attributes\n            in\n            if tag_type1 <> tag_type2 then [Variant_representation cd1.cd_id]\n            else []\n          | false -> r\n      in\n      if r <> [] then r\n      else compare_variants ~loc env params1 params2 (n + 1) rem1 rem2)\n\nand compare_records ~loc env params1_ params2_ n_\n    (labels1_ : Types.label_declaration list)\n    (labels2_ : Types.label_declaration list) =\n  (* First try a fast path that checks if all the fields at once are consistent.\n     When that fails, try a slow path that blames the first inconsistent field *)\n  let rec aux ~fast params1 params2 n labels1 labels2 =\n    match (labels1, labels2) with\n    | [], [] ->\n      if fast then\n        if Ctype.equal env true params1 params2 then []\n        else aux ~fast:false params1_ params2_ n_ labels1_ labels2_\n      else []\n    | [], l :: _ -> [Field_missing (true, l.Types.ld_id)]\n    | l :: _, [] -> [Field_missing (false, l.Types.ld_id)]\n    | ld1 :: rem1, ld2 :: rem2 ->\n      if Ident.name ld1.ld_id <> Ident.name ld2.ld_id then\n        [Field_names (n, ld1.ld_id.name, ld2.ld_id.name)]\n      else if ld1.ld_mutable <> ld2.ld_mutable then [Field_mutable ld1.ld_id]\n      else (\n        Builtin_attributes.check_deprecated_mutable_inclusion ~def:ld1.ld_loc\n          ~use:ld2.ld_loc loc ld1.ld_attributes ld2.ld_attributes\n          (Ident.name ld1.ld_id);\n        let field_mismatch =\n          !Builtin_attributes.check_bs_attributes_inclusion\n            ld1.ld_attributes ld2.ld_attributes (Ident.name ld1.ld_id)\n        in\n        match field_mismatch with\n        | Some (a, b) -> [Field_names (n, a, b)]\n        | None ->\n          let current_field_consistent =\n            if fast then true\n            else\n              Ctype.equal env true (ld1.ld_type :: params1)\n                (ld2.ld_type :: params2)\n          in\n          if current_field_consistent then\n            (* add arguments to the parameters, cf. PR#7378 *)\n            aux ~fast (ld1.ld_type :: params1) (ld2.ld_type :: params2) (n + 1)\n              rem1 rem2\n          else [Field_type ld1.ld_id])\n  in\n  aux ~fast:true params1_ params2_ n_ labels1_ labels2_\n\nlet type_declarations ?(equality = false) ~loc env name decl1 id decl2 =\n  Builtin_attributes.check_deprecated_inclusion ~def:decl1.type_loc\n    ~use:decl2.type_loc loc decl1.type_attributes decl2.type_attributes name;\n  if decl1.type_arity <> decl2.type_arity then [Arity]\n  else if not (private_flags decl1 decl2) then [Privacy]\n  else\n    let err =\n      match (decl1.type_manifest, decl2.type_manifest) with\n      | _, None ->\n        if Ctype.equal env true decl1.type_params decl2.type_params then []\n        else [Constraint]\n      | Some ty1, Some ty2 ->\n        if\n          type_manifest env ty1 decl1.type_params ty2 decl2.type_params\n            decl2.type_private\n        then []\n        else [Manifest]\n      | None, Some ty2 ->\n        let ty1 =\n          Btype.newgenty (Tconstr (Pident id, decl2.type_params, ref Mnil))\n        in\n        if Ctype.equal env true decl1.type_params decl2.type_params then\n          if Ctype.equal env false [ty1] [ty2] then [] else [Manifest]\n        else [Constraint]\n    in\n    if err <> [] then err\n    else\n      let err =\n        match\n          ( decl2.type_kind,\n            decl1.type_unboxed.unboxed,\n            decl2.type_unboxed.unboxed )\n        with\n        | Type_abstract, _, _ -> []\n        | _, true, false -> [Unboxed_representation false]\n        | _, false, true -> [Unboxed_representation true]\n        | _ -> []\n      in\n      if err <> [] then err\n      else\n        let err =\n          let tag1 =\n            Ast_untagged_variants.process_tag_name decl1.type_attributes\n          in\n          let tag2 =\n            Ast_untagged_variants.process_tag_name decl2.type_attributes\n          in\n          if tag1 <> tag2 then [Tag_name] else err\n        in\n        if err <> [] then err\n        else\n          let err =\n            match (decl1.type_kind, decl2.type_kind) with\n            | _, Type_abstract -> []\n            | Type_variant cstrs1, Type_variant cstrs2 ->\n              let mark cstrs usage name decl =\n                List.iter\n                  (fun c ->\n                    Env.mark_constructor_used usage env name decl\n                      (Ident.name c.Types.cd_id))\n                  cstrs\n              in\n              let usage =\n                if decl1.type_private = Private || decl2.type_private = Public\n                then Env.Positive\n                else Env.Privatize\n              in\n              mark cstrs1 usage name decl1;\n              if equality then mark cstrs2 Env.Positive (Ident.name id) decl2;\n              compare_variants ~loc env decl1.type_params decl2.type_params 1\n                cstrs1 cstrs2\n            | Type_record (labels1, rep1), Type_record (labels2, rep2) ->\n              let err =\n                compare_records ~loc env decl1.type_params decl2.type_params 1\n                  labels1 labels2\n              in\n              if err <> [] || rep1 = rep2 then err\n              else [Record_representation (rep1, rep2)]\n            | Type_open, Type_open -> []\n            | _, _ -> [Kind]\n          in\n          if err <> [] then err\n          else\n            let abstr =\n              decl2.type_kind = Type_abstract && decl2.type_manifest = None\n            in\n            (* If attempt to assign a non-immediate type (e.g. string) to a type that\n             * must be immediate, then we error *)\n            let err =\n              if abstr && (not decl1.type_immediate) && decl2.type_immediate\n              then [Immediate]\n              else []\n            in\n            if err <> [] then err\n            else\n              let need_variance =\n                abstr\n                || decl1.type_private = Private\n                || decl1.type_kind = Type_open\n              in\n              if not need_variance then []\n              else\n                let abstr = abstr || decl2.type_private = Private in\n                let opn =\n                  decl2.type_kind = Type_open && decl2.type_manifest = None\n                in\n                let constrained ty = not Btype.(is_Tvar (repr ty)) in\n                if\n                  List.for_all2\n                    (fun ty (v1, v2) ->\n                      let open Variance in\n                      let imp a b = (not a) || b in\n                      let co1, cn1 = get_upper v1 and co2, cn2 = get_upper v2 in\n                      (if abstr then imp co1 co2 && imp cn1 cn2\n                       else if opn || constrained ty then co1 = co2 && cn1 = cn2\n                       else true)\n                      &&\n                      let p1, n1, i1, j1 = get_lower v1\n                      and p2, n2, i2, j2 = get_lower v2 in\n                      imp abstr\n                        (imp p2 p1 && imp n2 n1 && imp i2 i1 && imp j2 j1))\n                    decl2.type_params\n                    (List.combine decl1.type_variance decl2.type_variance)\n                then []\n                else [Variance]\n\n(* Inclusion between extension constructors *)\n\nlet extension_constructors ~loc env id ext1 ext2 =\n  let usage =\n    if ext1.ext_private = Private || ext2.ext_private = Public then Env.Positive\n    else Env.Privatize\n  in\n  Env.mark_extension_used usage env ext1 (Ident.name id);\n  let ty1 =\n    Btype.newgenty\n      (Tconstr (ext1.ext_type_path, ext1.ext_type_params, ref Mnil))\n  in\n  let ty2 =\n    Btype.newgenty\n      (Tconstr (ext2.ext_type_path, ext2.ext_type_params, ref Mnil))\n  in\n  if\n    Ctype.equal env true\n      (ty1 :: ext1.ext_type_params)\n      (ty2 :: ext2.ext_type_params)\n  then\n    if\n      compare_constructor_arguments ~loc env (Ident.create \"\")\n        ext1.ext_type_params ext2.ext_type_params ext1.ext_args ext2.ext_args\n      = []\n    then\n      if\n        match (ext1.ext_ret_type, ext2.ext_ret_type) with\n        | Some r1, Some r2 when not (Ctype.equal env true [r1] [r2]) -> false\n        | Some _, None | None, Some _ -> false\n        | _ -> true\n      then\n        match (ext1.ext_private, ext2.ext_private) with\n        | Private, Public -> false\n        | _, _ -> true\n      else false\n    else false\n  else false\n"
  },
  {
    "path": "analysis/vendor/ml/includecore.mli",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(* Inclusion checks for the core language *)\n\nopen Typedtree\nopen Types\n\nexception Dont_match\n\ntype type_mismatch =\n  | Arity\n  | Privacy\n  | Kind\n  | Constraint\n  | Manifest\n  | Variance\n  | Field_type of Ident.t\n  | Field_mutable of Ident.t\n  | Field_arity of Ident.t\n  | Field_names of int * string * string\n  | Field_missing of bool * Ident.t\n  | Record_representation of record_representation * record_representation\n  | Unboxed_representation of bool\n  | Immediate\n  | Tag_name\n  | Variant_representation of Ident.t\n\nval value_descriptions :\n  loc:Location.t ->\n  Env.t ->\n  Ident.t ->\n  value_description ->\n  value_description ->\n  module_coercion\n\nval type_declarations :\n  ?equality:bool ->\n  loc:Location.t ->\n  Env.t ->\n  string ->\n  type_declaration ->\n  Ident.t ->\n  type_declaration ->\n  type_mismatch list\n\nval extension_constructors :\n  loc:Location.t ->\n  Env.t ->\n  Ident.t ->\n  extension_constructor ->\n  extension_constructor ->\n  bool\n(*\nval class_types:\n        Env.t -> class_type -> class_type -> bool\n*)\n\nval report_type_mismatch :\n  string -> string -> string -> Format.formatter -> type_mismatch list -> unit\n"
  },
  {
    "path": "analysis/vendor/ml/includemod.ml",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(* Inclusion checks for the module language *)\n\nopen Misc\nopen Path\nopen Typedtree\nopen Types\n\ntype symptom =\n  | Missing_field of Ident.t * Location.t * string (* kind *)\n  | Value_descriptions of Ident.t * value_description * value_description\n  | Type_declarations of\n      Ident.t\n      * type_declaration\n      * type_declaration\n      * Includecore.type_mismatch list\n  | Extension_constructors of\n      Ident.t * extension_constructor * extension_constructor\n  | Module_types of module_type * module_type\n  | Modtype_infos of Ident.t * modtype_declaration * modtype_declaration\n  | Modtype_permutation\n  | Interface_mismatch of string * string\n  | Class_type_declarations of\n      Ident.t\n      * class_type_declaration\n      * class_type_declaration\n      * Ctype.class_match_failure list\n  | Unbound_modtype_path of Path.t\n  | Unbound_module_path of Path.t\n  | Invalid_module_alias of Path.t\n\ntype pos =\n  | Module of Ident.t\n  | Modtype of Ident.t\n  | Arg of Ident.t\n  | Body of Ident.t\ntype error = pos list * Env.t * symptom\n\nexception Error of error list\n\n(* All functions \"blah env x1 x2\" check that x1 is included in x2,\n   i.e. that x1 is the type of an implementation that fulfills the\n   specification x2. If not, Error is raised with a backtrace of the error. *)\n\n(* Inclusion between value descriptions *)\n\nlet value_descriptions ~loc env cxt subst id vd1 vd2 =\n  Cmt_format.record_value_dependency vd1 vd2;\n  Env.mark_value_used env (Ident.name id) vd1;\n  let vd2 = Subst.value_description subst vd2 in\n  try Includecore.value_descriptions ~loc env id vd1 vd2\n  with Includecore.Dont_match ->\n    raise (Error [(cxt, env, Value_descriptions (id, vd1, vd2))])\n\n(* Inclusion between type declarations *)\n\nlet type_declarations ~loc env ?(old_env = env) cxt subst id decl1 decl2 =\n  Env.mark_type_used env (Ident.name id) decl1;\n  let decl2 = Subst.type_declaration subst decl2 in\n  let err =\n    Includecore.type_declarations ~loc env (Ident.name id) decl1 id decl2\n  in\n  if err <> [] then\n    raise (Error [(cxt, old_env, Type_declarations (id, decl1, decl2, err))])\n\n(* Inclusion between extension constructors *)\n\nlet extension_constructors ~loc env cxt subst id ext1 ext2 =\n  let ext2 = Subst.extension_constructor subst ext2 in\n  if Includecore.extension_constructors ~loc env id ext1 ext2 then ()\n  else raise (Error [(cxt, env, Extension_constructors (id, ext1, ext2))])\n\n(* Inclusion between class declarations *)\n\nlet class_type_declarations ~loc ~old_env env cxt subst id decl1 decl2 =\n  let decl2 = Subst.cltype_declaration subst decl2 in\n  match Includeclass.class_type_declarations ~loc env decl1 decl2 with\n  | [] -> ()\n  | reason ->\n    raise\n      (Error\n         [(cxt, old_env, Class_type_declarations (id, decl1, decl2, reason))])\n\n(* Expand a module type identifier when possible *)\n\nexception Dont_match\n\nlet may_expand_module_path env path =\n  try\n    ignore (Env.find_modtype_expansion path env);\n    true\n  with Not_found -> false\n\nlet expand_module_path env cxt path =\n  try Env.find_modtype_expansion path env\n  with Not_found -> raise (Error [(cxt, env, Unbound_modtype_path path)])\n\nlet expand_module_alias env cxt path =\n  try (Env.find_module path env).md_type\n  with Not_found -> raise (Error [(cxt, env, Unbound_module_path path)])\n\n(*\nlet rec normalize_module_path env cxt path =\n  match expand_module_alias env cxt path with\n    Mty_alias path' -> normalize_module_path env cxt path'\n  | _ -> path\n*)\n\n(* Extract name, kind and ident from a signature item *)\n\ntype field_desc =\n  | Field_value of string\n  | Field_type of string\n  | Field_typext of string\n  | Field_module of string\n  | Field_modtype of string\n  | Field_classtype of string\n\nlet kind_of_field_desc = function\n  | Field_value _ -> \"value\"\n  | Field_type _ -> \"type\"\n  | Field_typext _ -> \"extension constructor\"\n  | Field_module _ -> \"module\"\n  | Field_modtype _ -> \"module type\"\n  | Field_classtype _ -> \"class type\"\n\nlet item_ident_name = function\n  | Sig_value (id, d) -> (id, d.val_loc, Field_value (Ident.name id))\n  | Sig_type (id, d, _) -> (id, d.type_loc, Field_type (Ident.name id))\n  | Sig_typext (id, d, _) -> (id, d.ext_loc, Field_typext (Ident.name id))\n  | Sig_module (id, d, _) -> (id, d.md_loc, Field_module (Ident.name id))\n  | Sig_modtype (id, d) -> (id, d.mtd_loc, Field_modtype (Ident.name id))\n  | Sig_class () -> assert false\n  | Sig_class_type (id, d, _) ->\n    (id, d.clty_loc, Field_classtype (Ident.name id))\n\nlet is_runtime_component = function\n  | Sig_value (_, {val_kind = Val_prim _})\n  | Sig_type (_, _, _)\n  | Sig_modtype (_, _)\n  | Sig_class_type (_, _, _) ->\n    false\n  | Sig_value (_, _)\n  | Sig_typext (_, _, _)\n  | Sig_module (_, _, _)\n  | Sig_class () ->\n    true\n\n(* Print a coercion *)\n\nlet rec print_list pr ppf = function\n  | [] -> ()\n  | [a] -> pr ppf a\n  | a :: l ->\n    pr ppf a;\n    Format.fprintf ppf \";@ \";\n    print_list pr ppf l\nlet print_list pr ppf l = Format.fprintf ppf \"[@[%a@]]\" (print_list pr) l\n\nlet rec print_coercion ppf c =\n  let pr fmt = Format.fprintf ppf fmt in\n  match c with\n  | Tcoerce_none -> pr \"id\"\n  | Tcoerce_structure (fl, nl, _) ->\n    pr \"@[<2>struct@ %a@ %a@]\"\n      (print_list print_coercion2)\n      fl\n      (print_list print_coercion3)\n      nl\n  | Tcoerce_functor (inp, out) ->\n    pr \"@[<2>functor@ (%a)@ (%a)@]\" print_coercion inp print_coercion out\n  | Tcoerce_primitive {pc_desc; pc_env = _; pc_type} ->\n    pr \"prim %s@ (%a)\" pc_desc.Primitive.prim_name Printtyp.raw_type_expr\n      pc_type\n  | Tcoerce_alias (p, c) ->\n    pr \"@[<2>alias %a@ (%a)@]\" Printtyp.path p print_coercion c\n\nand print_coercion2 ppf (n, c) =\n  Format.fprintf ppf \"@[%d,@ %a@]\" n print_coercion c\n\nand print_coercion3 ppf (i, n, c) =\n  Format.fprintf ppf \"@[%s, %d,@ %a@]\" (Ident.unique_name i) n print_coercion c\n\n(* Simplify a structure coercion *)\n\nlet simplify_structure_coercion cc id_pos_list runtime_fields =\n  let rec is_identity_coercion pos = function\n    | [] -> true\n    | (n, c) :: rem ->\n      n = pos && c = Tcoerce_none && is_identity_coercion (pos + 1) rem\n  in\n  if is_identity_coercion 0 cc then Tcoerce_none\n  else Tcoerce_structure (cc, id_pos_list, runtime_fields)\n\n(* Inclusion between module types.\n   Return the restriction that transforms a value of the smaller type\n   into a value of the bigger type. *)\n\nlet rec modtypes ~loc env cxt subst mty1 mty2 =\n  try try_modtypes ~loc env cxt subst mty1 mty2 with\n  | Dont_match ->\n    raise (Error [(cxt, env, Module_types (mty1, Subst.modtype subst mty2))])\n  | Error reasons as err -> (\n    match (mty1, mty2) with\n    | Mty_alias _, _ | _, Mty_alias _ -> raise err\n    | _ ->\n      raise\n        (Error\n           ((cxt, env, Module_types (mty1, Subst.modtype subst mty2)) :: reasons))\n    )\n\nand try_modtypes ~loc env cxt subst mty1 mty2 =\n  match (mty1, mty2) with\n  | Mty_alias (pres1, p1), Mty_alias (pres2, p2) -> (\n    if Env.is_functor_arg p2 env then\n      raise (Error [(cxt, env, Invalid_module_alias p2)]);\n    (if not (Path.same p1 p2) then\n       let p1 = Env.normalize_path None env p1\n       and p2 = Env.normalize_path None env (Subst.module_path subst p2) in\n       if not (Path.same p1 p2) then raise Dont_match);\n    match (pres1, pres2) with\n    | Mta_present, Mta_present ->\n      Tcoerce_none (* Should really be Tcoerce_ignore if it existed *)\n    | Mta_absent, Mta_absent ->\n      Tcoerce_none (* Should really be Tcoerce_empty if it existed *)\n    | Mta_present, Mta_absent -> Tcoerce_none\n    | Mta_absent, Mta_present ->\n      let p1 =\n        try Env.normalize_path (Some Location.none) env p1\n        with Env.Error (Env.Missing_module (_, _, path)) ->\n          raise (Error [(cxt, env, Unbound_module_path path)])\n      in\n      Tcoerce_alias (p1, Tcoerce_none))\n  | Mty_alias (pres1, p1), _ -> (\n    let p1 =\n      try Env.normalize_path (Some Location.none) env p1\n      with Env.Error (Env.Missing_module (_, _, path)) ->\n        raise (Error [(cxt, env, Unbound_module_path path)])\n    in\n    let mty1 =\n      Mtype.strengthen ~aliasable:true env (expand_module_alias env cxt p1) p1\n    in\n    let cc = modtypes ~loc env cxt subst mty1 mty2 in\n    match pres1 with\n    | Mta_present -> cc\n    | Mta_absent -> Tcoerce_alias (p1, cc))\n  | Mty_ident p1, _ when may_expand_module_path env p1 ->\n    try_modtypes ~loc env cxt subst (expand_module_path env cxt p1) mty2\n  | _, Mty_ident _ -> try_modtypes2 ~loc env cxt mty1 (Subst.modtype subst mty2)\n  | Mty_signature sig1, Mty_signature sig2 ->\n    signatures ~loc env cxt subst sig1 sig2\n  | Mty_functor (param1, None, res1), Mty_functor (_param2, None, res2) -> (\n    match modtypes ~loc env (Body param1 :: cxt) subst res1 res2 with\n    | Tcoerce_none -> Tcoerce_none\n    | cc -> Tcoerce_functor (Tcoerce_none, cc))\n  | Mty_functor (param1, Some arg1, res1), Mty_functor (param2, Some arg2, res2)\n    -> (\n    let arg2' = Subst.modtype subst arg2 in\n    let cc_arg =\n      modtypes ~loc env (Arg param1 :: cxt) Subst.identity arg2' arg1\n    in\n    let cc_res =\n      modtypes ~loc\n        (Env.add_module param1 arg2' env)\n        (Body param1 :: cxt)\n        (Subst.add_module param2 (Pident param1) subst)\n        res1 res2\n    in\n    match (cc_arg, cc_res) with\n    | Tcoerce_none, Tcoerce_none -> Tcoerce_none\n    | _ -> Tcoerce_functor (cc_arg, cc_res))\n  | _, _ -> raise Dont_match\n\nand try_modtypes2 ~loc env cxt mty1 mty2 =\n  (* mty2 is an identifier *)\n  match (mty1, mty2) with\n  | Mty_ident p1, Mty_ident p2\n    when Path.same\n           (Env.normalize_path_prefix None env p1)\n           (Env.normalize_path_prefix None env p2) ->\n    Tcoerce_none\n  | _, Mty_ident p2 when may_expand_module_path env p2 ->\n    try_modtypes ~loc env cxt Subst.identity mty1\n      (expand_module_path env cxt p2)\n  | _, _ -> raise Dont_match\n\n(* Inclusion between signatures *)\n\nand signatures ~loc env cxt subst sig1 sig2 =\n  (* Environment used to check inclusion of components *)\n  let new_env = Env.add_signature sig1 (Env.in_signature true env) in\n  (* Keep ids for module aliases *)\n  let id_pos_list, _ =\n    List.fold_left\n      (fun ((l, pos) as id_pos) -> function\n        | Sig_module (id, _, _) -> ((id, pos, Tcoerce_none) :: l, pos + 1)\n        | item -> if is_runtime_component item then (l, pos + 1) else id_pos)\n      ([], 0) sig1\n  in\n\n  let runtime_fields =\n    let get_id = function\n      | Sig_value (i, _)\n      | Sig_module (i, _, _)\n      | Sig_typext (i, _, _)\n      | Sig_modtype (i, _)\n      | Sig_class_type (i, _, _)\n      | Sig_type (i, _, _) ->\n        Ident.name i\n      | Sig_class () -> assert false\n    in\n    List.fold_right\n      (fun item fields ->\n        if is_runtime_component item then get_id item :: fields else fields)\n      sig2 []\n  in\n\n  (* Build a table of the components of sig1, along with their positions.\n     The table is indexed by kind and name of component *)\n  let rec build_component_table pos tbl = function\n    | [] -> (pos, tbl)\n    | item :: rem ->\n      let id, _loc, name = item_ident_name item in\n      let nextpos = if is_runtime_component item then pos + 1 else pos in\n      build_component_table nextpos (Tbl.add name (id, item, pos) tbl) rem\n  in\n  let len1, comps1 = build_component_table 0 Tbl.empty sig1 in\n  let len2 =\n    List.fold_left\n      (fun n i -> if is_runtime_component i then n + 1 else n)\n      0 sig2\n  in\n  (* Pair each component of sig2 with a component of sig1,\n     identifying the names along the way.\n     Return a coercion list indicating, for all run-time components\n     of sig2, the position of the matching run-time components of sig1\n     and the coercion to be applied to it. *)\n  let rec pair_components subst paired unpaired = function\n    | [] -> (\n      match unpaired with\n      | [] ->\n        let cc =\n          signature_components ~loc env new_env cxt subst (List.rev paired)\n        in\n        if len1 = len2 then\n          (* see PR#5098 *)\n          simplify_structure_coercion cc id_pos_list runtime_fields\n        else Tcoerce_structure (cc, id_pos_list, runtime_fields)\n      | _ -> raise (Error unpaired))\n    | item2 :: rem -> (\n      let id2, loc, name2 = item_ident_name item2 in\n      let name2, report =\n        match (item2, name2) with\n        | Sig_type (_, {type_manifest = None}, _), Field_type s\n          when Btype.is_row_name s ->\n          (* Do not report in case of failure,\n             as the main type will generate an error *)\n          (Field_type (String.sub s 0 (String.length s - 4)), false)\n        | _ -> (name2, true)\n      in\n      match Tbl.find name2 comps1 with\n      | id1, item1, pos1 ->\n        let new_subst =\n          match item2 with\n          | Sig_type _ -> Subst.add_type id2 (Pident id1) subst\n          | Sig_module _ -> Subst.add_module id2 (Pident id1) subst\n          | Sig_modtype _ ->\n            Subst.add_modtype id2 (Mty_ident (Pident id1)) subst\n          | Sig_value _ | Sig_typext _ | Sig_class _ | Sig_class_type _ -> subst\n        in\n        pair_components new_subst ((item1, item2, pos1) :: paired) unpaired rem\n      | exception Not_found ->\n        let unpaired =\n          if report then\n            (cxt, env, Missing_field (id2, loc, kind_of_field_desc name2))\n            :: unpaired\n          else unpaired\n        in\n        pair_components subst paired unpaired rem)\n  in\n  (* Do the pairing and checking, and return the final coercion *)\n  pair_components subst [] [] sig2\n\n(* Inclusion between signature components *)\n\nand signature_components ~loc old_env env cxt subst paired =\n  let comps_rec rem = signature_components ~loc old_env env cxt subst rem in\n  match paired with\n  | [] -> []\n  | (Sig_value (id1, valdecl1), Sig_value (_id2, valdecl2), pos) :: rem -> (\n    let cc = value_descriptions ~loc env cxt subst id1 valdecl1 valdecl2 in\n    match valdecl2.val_kind with\n    | Val_prim _ -> comps_rec rem\n    | _ -> (pos, cc) :: comps_rec rem)\n  | (Sig_type (id1, tydecl1, _), Sig_type (_id2, tydecl2, _), _pos) :: rem ->\n    type_declarations ~loc ~old_env env cxt subst id1 tydecl1 tydecl2;\n    comps_rec rem\n  | (Sig_typext (id1, ext1, _), Sig_typext (_id2, ext2, _), pos) :: rem ->\n    extension_constructors ~loc env cxt subst id1 ext1 ext2;\n    (pos, Tcoerce_none) :: comps_rec rem\n  | (Sig_module (id1, mty1, _), Sig_module (_id2, mty2, _), pos) :: rem ->\n    let cc = module_declarations ~loc env cxt subst id1 mty1 mty2 in\n    (pos, cc) :: comps_rec rem\n  | (Sig_modtype (id1, info1), Sig_modtype (_id2, info2), _pos) :: rem ->\n    modtype_infos ~loc env cxt subst id1 info1 info2;\n    comps_rec rem\n  | (Sig_class _, Sig_class _, _) :: _ -> assert false\n  | (Sig_class_type (id1, info1, _), Sig_class_type (_id2, info2, _), _pos)\n    :: rem ->\n    class_type_declarations ~loc ~old_env env cxt subst id1 info1 info2;\n    comps_rec rem\n  | _ -> assert false\n\nand module_declarations ~loc env cxt subst id1 md1 md2 =\n  Builtin_attributes.check_deprecated_inclusion ~def:md1.md_loc ~use:md2.md_loc\n    loc md1.md_attributes md2.md_attributes (Ident.name id1);\n  let p1 = Pident id1 in\n  Env.mark_module_used env (Ident.name id1) md1.md_loc;\n  modtypes ~loc env (Module id1 :: cxt) subst\n    (Mtype.strengthen ~aliasable:true env md1.md_type p1)\n    md2.md_type\n\n(* Inclusion between module type specifications *)\n\nand modtype_infos ~loc env cxt subst id info1 info2 =\n  Builtin_attributes.check_deprecated_inclusion ~def:info1.mtd_loc\n    ~use:info2.mtd_loc loc info1.mtd_attributes info2.mtd_attributes\n    (Ident.name id);\n  let info2 = Subst.modtype_declaration subst info2 in\n  let cxt' = Modtype id :: cxt in\n  try\n    match (info1.mtd_type, info2.mtd_type) with\n    | None, None -> ()\n    | Some _, None -> ()\n    | Some mty1, Some mty2 -> check_modtype_equiv ~loc env cxt' mty1 mty2\n    | None, Some mty2 ->\n      check_modtype_equiv ~loc env cxt' (Mty_ident (Pident id)) mty2\n  with Error reasons ->\n    raise (Error ((cxt, env, Modtype_infos (id, info1, info2)) :: reasons))\n\nand check_modtype_equiv ~loc env cxt mty1 mty2 =\n  match\n    ( modtypes ~loc env cxt Subst.identity mty1 mty2,\n      modtypes ~loc env cxt Subst.identity mty2 mty1 )\n  with\n  | Tcoerce_none, Tcoerce_none -> ()\n  | _c1, _c2 ->\n    (* Format.eprintf \"@[c1 = %a@ c2 = %a@]@.\"\n       print_coercion _c1 print_coercion _c2; *)\n    raise (Error [(cxt, env, Modtype_permutation)])\n\n(* Simplified inclusion check between module types (for Env) *)\n\nlet can_alias env path =\n  let rec no_apply = function\n    | Pident _ -> true\n    | Pdot (p, _, _) -> no_apply p\n    | Papply _ -> false\n  in\n  no_apply path && not (Env.is_functor_arg path env)\n\nlet check_modtype_inclusion ~loc env mty1 path1 mty2 =\n  try\n    let aliasable = can_alias env path1 in\n    ignore\n      (modtypes ~loc env [] Subst.identity\n         (Mtype.strengthen ~aliasable env mty1 path1)\n         mty2)\n  with Error _ -> raise Not_found\n\nlet _ = Env.check_modtype_inclusion := check_modtype_inclusion\n\n(* Check that an implementation of a compilation unit meets its\n   interface. *)\n\nlet compunit env impl_name impl_sig intf_name intf_sig =\n  try\n    signatures\n      ~loc:(Location.in_file impl_name)\n      env [] Subst.identity impl_sig intf_sig\n  with Error reasons ->\n    raise\n      (Error\n         (([], Env.empty, Interface_mismatch (impl_name, intf_name)) :: reasons))\n\n(* Hide the context and substitution parameters to the outside world *)\n\nlet modtypes ~loc env mty1 mty2 = modtypes ~loc env [] Subst.identity mty1 mty2\nlet signatures env sig1 sig2 =\n  signatures ~loc:Location.none env [] Subst.identity sig1 sig2\nlet type_declarations ~loc env id decl1 decl2 =\n  type_declarations ~loc env [] Subst.identity id decl1 decl2\n\n(*\nlet modtypes env m1 m2 =\n  let c = modtypes env m1 m2 in\n  Format.eprintf \"@[<2>modtypes@ %a@ %a =@ %a@]@.\"\n    Printtyp.modtype m1 Printtyp.modtype m2\n    print_coercion c;\n  c\n*)\n\n(* Error report *)\n\nopen Format\nopen Printtyp\n\nlet show_loc msg ppf loc =\n  fprintf ppf \"@\\n@[<2>%a:@ %s@]\" Location.print_loc loc msg\n\nlet show_locs ppf (loc1, loc2) =\n  show_loc \"Expected declaration\" ppf loc2;\n  show_loc \"Actual declaration\" ppf loc1\n\nlet include_err ~env ppf = function\n  | Missing_field (id, loc, kind) ->\n    fprintf ppf \"The %s `%a' is required but not provided\" kind ident id;\n    show_loc \"Expected declaration\" ppf loc\n  | Value_descriptions (id, d1, d2) ->\n    let curry_kind_1, curry_kind_2 =\n      match\n        (Ctype.expand_head env d1.val_type, Ctype.expand_head env d2.val_type)\n      with\n      | {desc = Tarrow _}, {desc = Tconstr (Pident {name = \"function$\"}, _, _)}\n        ->\n        (\" (curried)\", \" (uncurried)\")\n      | {desc = Tconstr (Pident {name = \"function$\"}, _, _)}, {desc = Tarrow _}\n        ->\n        (\" (uncurried)\", \" (curried)\")\n      | _ -> (\"\", \"\")\n    in\n    fprintf ppf\n      \"@[<hv 2>Values do not match:@ %a%s@;<1 -2>is not included in@ %a%s@]\"\n      (value_description id) d1 curry_kind_1 (value_description id) d2\n      curry_kind_2;\n    show_locs ppf (d1.val_loc, d2.val_loc)\n  | Type_declarations (id, d1, d2, errs) ->\n    fprintf ppf \"@[<v>@[<hv>%s:@;<1 2>%a@ %s@;<1 2>%a@]%a%a@]\"\n      \"Type declarations do not match\" (type_declaration id) d1\n      \"is not included in\" (type_declaration id) d2 show_locs\n      (d1.type_loc, d2.type_loc)\n      (Includecore.report_type_mismatch \"the first\" \"the second\" \"declaration\")\n      errs\n  | Extension_constructors (id, x1, x2) ->\n    fprintf ppf\n      \"@[<hv 2>Extension declarations do not match:@ %a@;\\\n       <1 -2>is not included in@ %a@]\"\n      (extension_constructor id) x1 (extension_constructor id) x2;\n    show_locs ppf (x1.ext_loc, x2.ext_loc)\n  | Module_types (mty1, mty2) ->\n    fprintf ppf\n      \"@[<hv 2>Modules do not match:@ %a@;<1 -2>is not included in@ %a@]\"\n      modtype mty1 modtype mty2\n  | Modtype_infos (id, d1, d2) ->\n    fprintf ppf\n      \"@[<hv 2>Module type declarations do not match:@ %a@;\\\n       <1 -2>does not match@ %a@]\"\n      (modtype_declaration id) d1 (modtype_declaration id) d2\n  | Modtype_permutation -> fprintf ppf \"Illegal permutation of structure fields\"\n  | Interface_mismatch (impl_name, intf_name) ->\n    fprintf ppf \"@[The implementation %s@ does not match the interface %s:\"\n      impl_name intf_name\n  | Class_type_declarations (id, d1, d2, reason) ->\n    fprintf ppf\n      \"@[<hv 2>Class type declarations do not match:@ %a@;\\\n       <1 -2>does not match@ %a@]@ %a\"\n      (Printtyp.cltype_declaration id)\n      d1\n      (Printtyp.cltype_declaration id)\n      d2 Includeclass.report_error reason\n  | Unbound_modtype_path path ->\n    fprintf ppf \"Unbound module type %a\" Printtyp.path path\n  | Unbound_module_path path ->\n    fprintf ppf \"Unbound module %a\" Printtyp.path path\n  | Invalid_module_alias path ->\n    fprintf ppf \"Module %a cannot be aliased\" Printtyp.path path\n\nlet rec context ppf = function\n  | Module id :: rem -> fprintf ppf \"@[<2>module %a%a@]\" ident id args rem\n  | Modtype id :: rem ->\n    fprintf ppf \"@[<2>module type %a =@ %a@]\" ident id context_mty rem\n  | Body x :: rem ->\n    fprintf ppf \"functor (%s) ->@ %a\" (argname x) context_mty rem\n  | Arg x :: rem ->\n    fprintf ppf \"functor (%a : %a) -> ...\" ident x context_mty rem\n  | [] -> fprintf ppf \"<here>\"\n\nand context_mty ppf = function\n  | (Module _ | Modtype _) :: _ as rem ->\n    fprintf ppf \"@[<2>sig@ %a@;<1 -2>end@]\" context rem\n  | cxt -> context ppf cxt\n\nand args ppf = function\n  | Body x :: rem -> fprintf ppf \"(%s)%a\" (argname x) args rem\n  | Arg x :: rem -> fprintf ppf \"(%a :@ %a) : ...\" ident x context_mty rem\n  | cxt -> fprintf ppf \" :@ %a\" context_mty cxt\n\nand argname x =\n  let s = Ident.name x in\n  if s = \"*\" then \"\" else s\n\nlet path_of_context = function\n  | Module id :: rem ->\n    let rec subm path = function\n      | [] -> path\n      | Module id :: rem -> subm (Pdot (path, Ident.name id, -1)) rem\n      | _ -> assert false\n    in\n    subm (Pident id) rem\n  | _ -> assert false\n\nlet context ppf cxt =\n  if cxt = [] then ()\n  else if\n    List.for_all\n      (function\n        | Module _ -> true\n        | _ -> false)\n      cxt\n  then fprintf ppf \"In module %a:@ \" path (path_of_context cxt)\n  else fprintf ppf \"@[<hv 2>At position@ %a@]@ \" context cxt\n\nlet include_err ppf (cxt, env, err) =\n  Printtyp.wrap_printing_env env (fun () ->\n      fprintf ppf \"@[<v>%a%a@]\" context (List.rev cxt) (include_err ~env) err)\n\nlet buffer = ref Bytes.empty\nlet is_big obj =\n  let size = !Clflags.error_size in\n  size > 0\n  &&\n  (if Bytes.length !buffer < size then buffer := Bytes.create size;\n   try\n     ignore (Marshal.to_buffer !buffer 0 size obj []);\n     false\n   with _ -> true)\n\nlet report_error ppf errs =\n  if errs = [] then ()\n  else\n    let errs, err = split_last errs in\n    let pe = ref true in\n    let include_err' ppf ((_, _, obj) as err) =\n      if not (is_big obj) then fprintf ppf \"%a@ \" include_err err\n      else if !pe then (\n        fprintf ppf \"...@ \";\n        pe := false)\n    in\n    let print_errs ppf = List.iter (include_err' ppf) in\n    fprintf ppf \"@[<v>%a%a@]\" print_errs errs include_err err\n\nlet better_candidate_loc (x : error list) =\n  match x with\n  | [(_, _, Interface_mismatch _); (_, _, descr)] -> (\n    match descr with\n    | Value_descriptions (_, d1, _) -> Some d1.val_loc\n    | Type_declarations (_, tdcl1, _, _) -> Some tdcl1.type_loc\n    | Missing_field (_, loc, _) -> Some loc\n    | _ -> None)\n  | _ -> None\n\n(* We could do a better job to split the individual error items\n   as sub-messages of the main interface mismatch on the whole unit. *)\nlet () =\n  Location.register_error_of_exn (function\n    | Error err -> (\n      match better_candidate_loc err with\n      | None -> Some (Location.error_of_printer_file report_error err)\n      | Some loc -> Some (Location.error_of_printer loc report_error err))\n    | _ -> None)\n"
  },
  {
    "path": "analysis/vendor/ml/includemod.mli",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(* Inclusion checks for the module language *)\n\nopen Typedtree\nopen Types\nopen Format\n\nval modtypes :\n  loc:Location.t -> Env.t -> module_type -> module_type -> module_coercion\n\nval signatures : Env.t -> signature -> signature -> module_coercion\n\nval compunit :\n  Env.t -> string -> signature -> string -> signature -> module_coercion\n\nval type_declarations :\n  loc:Location.t ->\n  Env.t ->\n  Ident.t ->\n  type_declaration ->\n  type_declaration ->\n  unit\n\nval print_coercion : formatter -> module_coercion -> unit\n\ntype symptom =\n  | Missing_field of Ident.t * Location.t * string (* kind *)\n  | Value_descriptions of Ident.t * value_description * value_description\n  | Type_declarations of\n      Ident.t\n      * type_declaration\n      * type_declaration\n      * Includecore.type_mismatch list\n  | Extension_constructors of\n      Ident.t * extension_constructor * extension_constructor\n  | Module_types of module_type * module_type\n  | Modtype_infos of Ident.t * modtype_declaration * modtype_declaration\n  | Modtype_permutation\n  | Interface_mismatch of string * string\n  | Class_type_declarations of\n      Ident.t\n      * class_type_declaration\n      * class_type_declaration\n      * Ctype.class_match_failure list\n  | Unbound_modtype_path of Path.t\n  | Unbound_module_path of Path.t\n  | Invalid_module_alias of Path.t\n\ntype pos =\n  | Module of Ident.t\n  | Modtype of Ident.t\n  | Arg of Ident.t\n  | Body of Ident.t\ntype error = pos list * Env.t * symptom\n\nexception Error of error list\n\nval report_error : formatter -> error list -> unit\nval expand_module_alias : Env.t -> pos list -> Path.t -> Types.module_type\n"
  },
  {
    "path": "analysis/vendor/ml/js_raw_info.ml",
    "content": "(* Copyright (C) 2020 Hongbo Zhang, Authors of ReScript\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\ntype exp =\n  | Js_function of {arity: int; arrow: bool}\n  | Js_literal of {comment: string option}\n  (* A special handling of\n     [%raw \"/*lint*/ 0\"]\n  *)\n  (* Flow ast module\n     {[\n       and value =\n           | String of string\n         | Boolean of bool\n         | Null\n         | Number of float\n         | BigInt of float\n         | RegExp of RegExp.t\n     ]}\n  *)\n  | Js_exp_unknown\n\ntype raw_kind = Raw_re | Raw_exp | Raw_program\n\ntype stmt = Js_stmt_comment | Js_stmt_unknown\n\ntype code_info = Exp of exp | Stmt of stmt\n\ntype t = {code: string; code_info: code_info}\n"
  },
  {
    "path": "analysis/vendor/ml/lambda.ml",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\ntype compile_time_constant =\n  | Big_endian\n  | Word_size\n  | Int_size\n  | Max_wosize\n  | Ostype_unix\n  | Ostype_win32\n  | Ostype_cygwin\n  | Backend_type\n\ntype loc_kind = Loc_FILE | Loc_LINE | Loc_MODULE | Loc_LOC | Loc_POS\n\ntype record_repr = Record_regular | Record_optional\n\ntype tag_info =\n  | Blk_constructor of {\n      name: string;\n      num_nonconst: int;\n      tag: int;\n      attrs: Parsetree.attributes;\n    }\n  | Blk_record_inlined of {\n      name: string;\n      num_nonconst: int;\n      tag: int;\n      optional_labels: string list;\n      fields: string array;\n      mutable_flag: Asttypes.mutable_flag;\n      attrs: Parsetree.attributes;\n    }\n  | Blk_tuple\n  | Blk_poly_var of string\n  | Blk_record of {\n      fields: string array;\n      mutable_flag: Asttypes.mutable_flag;\n      record_repr: record_repr;\n    }\n  | Blk_module of string list\n  | Blk_module_export of Ident.t list\n  | Blk_extension\n  | Blk_some\n  | Blk_some_not_nested\n    (* ['a option] where ['a] can not inhabit a non-like value *)\n  | Blk_record_ext of {\n      fields: string array;\n      mutable_flag: Asttypes.mutable_flag;\n    }\n  | Blk_lazy_general\n\nlet tag_of_tag_info (tag : tag_info) =\n  match tag with\n  | Blk_constructor {tag} | Blk_record_inlined {tag} -> tag\n  | Blk_tuple | Blk_poly_var _ | Blk_record _ | Blk_module _\n  | Blk_module_export _ | Blk_extension | Blk_some (* tag not make sense *)\n  | Blk_some_not_nested (* tag not make sense *)\n  | Blk_lazy_general (* tag not make sense 248 *)\n  | Blk_record_ext _ (* similar to Blk_extension*) ->\n    0\n\nlet mutable_flag_of_tag_info (tag : tag_info) =\n  match tag with\n  | Blk_record_inlined {mutable_flag}\n  | Blk_record {mutable_flag}\n  | Blk_record_ext {mutable_flag} ->\n    mutable_flag\n  | Blk_lazy_general -> Mutable\n  | Blk_tuple | Blk_constructor _ | Blk_poly_var _ | Blk_module _\n  | Blk_module_export _ | Blk_extension | Blk_some_not_nested | Blk_some ->\n    Immutable\n\ntype label = Types.label_description\n\nlet find_name (attr : Parsetree.attribute) =\n  match attr with\n  | ( {txt = \"as\"},\n      PStr\n        [\n          {\n            pstr_desc =\n              Pstr_eval ({pexp_desc = Pexp_constant (Pconst_string (s, _))}, _);\n          };\n        ] ) ->\n    Some s\n  | _ -> None\n\nlet blk_record (fields : (label * _) array) mut record_repr =\n  let all_labels_info =\n    Ext_array.map fields (fun (lbl, _) ->\n        Ext_list.find_def lbl.lbl_attributes find_name lbl.lbl_name)\n  in\n  Blk_record {fields = all_labels_info; mutable_flag = mut; record_repr}\n\nlet blk_record_ext fields mutable_flag =\n  let all_labels_info =\n    Array.map\n      (fun ((lbl : label), _) ->\n        Ext_list.find_def lbl.Types.lbl_attributes find_name lbl.lbl_name)\n      fields\n  in\n  Blk_record_ext {fields = all_labels_info; mutable_flag}\n\nlet blk_record_inlined fields name num_nonconst optional_labels ~tag ~attrs\n    mutable_flag =\n  let fields =\n    Array.map\n      (fun ((lbl : label), _) ->\n        Ext_list.find_def lbl.lbl_attributes find_name lbl.lbl_name)\n      fields\n  in\n  Blk_record_inlined\n    {fields; name; num_nonconst; tag; mutable_flag; optional_labels; attrs}\n\nlet ref_tag_info : tag_info =\n  Blk_record\n    {\n      fields = [|\"contents\"|];\n      mutable_flag = Mutable;\n      record_repr = Record_regular;\n    }\n\ntype field_dbg_info =\n  | Fld_record of {name: string; mutable_flag: Asttypes.mutable_flag}\n  | Fld_module of {name: string}\n  | Fld_record_inline of {name: string}\n  | Fld_record_extension of {name: string}\n  | Fld_tuple\n  | Fld_poly_var_tag\n  | Fld_poly_var_content\n  | Fld_extension\n  | Fld_variant\n  | Fld_cons\n  | Fld_array\n\nlet fld_record (lbl : label) =\n  Fld_record\n    {\n      name = Ext_list.find_def lbl.lbl_attributes find_name lbl.lbl_name;\n      mutable_flag = lbl.lbl_mut;\n    }\n\nlet fld_record_extension (lbl : label) =\n  Fld_record_extension\n    {name = Ext_list.find_def lbl.lbl_attributes find_name lbl.lbl_name}\n\nlet ref_field_info : field_dbg_info =\n  Fld_record {name = \"contents\"; mutable_flag = Mutable}\n\ntype set_field_dbg_info =\n  | Fld_record_set of string\n  | Fld_record_inline_set of string\n  | Fld_record_extension_set of string\n\nlet ref_field_set_info : set_field_dbg_info = Fld_record_set \"contents\"\nlet fld_record_set (lbl : label) =\n  Fld_record_set (Ext_list.find_def lbl.lbl_attributes find_name lbl.lbl_name)\n\nlet fld_record_inline (lbl : label) =\n  Fld_record_inline\n    {name = Ext_list.find_def lbl.lbl_attributes find_name lbl.lbl_name}\n\nlet fld_record_inline_set (lbl : label) =\n  Fld_record_inline_set\n    (Ext_list.find_def lbl.lbl_attributes find_name lbl.lbl_name)\n\nlet fld_record_extension_set (lbl : label) =\n  Fld_record_extension_set\n    (Ext_list.find_def lbl.lbl_attributes find_name lbl.lbl_name)\n\ntype immediate_or_pointer = Immediate | Pointer\n\ntype is_safe = Safe | Unsafe\n\ntype primitive =\n  | Pidentity\n  | Pbytes_to_string\n  | Pignore\n  | Prevapply\n  | Pdirapply\n  | Ploc of loc_kind (* Globals *)\n  | Pgetglobal of Ident.t\n  (* Operations on heap blocks *)\n  | Pmakeblock of tag_info\n  | Pfield of int * field_dbg_info\n  | Psetfield of int * set_field_dbg_info\n  | Pduprecord\n  (* Force lazy values *)\n  | Plazyforce\n  (* External call *)\n  | Pccall of Primitive.description\n  (* Exceptions *)\n  | Praise of raise_kind\n  (* Boolean operations *)\n  | Psequand\n  | Psequor\n  | Pnot\n  (* Integer operations *)\n  | Pnegint\n  | Paddint\n  | Psubint\n  | Pmulint\n  | Pdivint of is_safe\n  | Pmodint of is_safe\n  | Pandint\n  | Porint\n  | Pxorint\n  | Plslint\n  | Plsrint\n  | Pasrint\n  | Pintcomp of comparison\n  | Poffsetint of int\n  | Poffsetref of int\n  (* Float operations *)\n  | Pintoffloat\n  | Pfloatofint\n  | Pnegfloat\n  | Pabsfloat\n  | Paddfloat\n  | Psubfloat\n  | Pmulfloat\n  | Pdivfloat\n  | Pfloatcomp of comparison\n  (* BigInt operations *)\n  | Pnegbigint\n  | Paddbigint\n  | Psubbigint\n  | Ppowbigint\n  | Pmulbigint\n  | Pdivbigint\n  | Pmodbigint\n  | Pandbigint\n  | Porbigint\n  | Pxorbigint\n  | Plslbigint\n  | Pasrbigint\n  | Pbigintcomp of comparison\n  (* String operations *)\n  | Pstringlength\n  | Pstringrefu\n  | Pstringrefs\n  | Pbyteslength\n  | Pbytesrefu\n  | Pbytessetu\n  | Pbytesrefs\n  | Pbytessets\n  (* Array operations *)\n  | Pmakearray of Asttypes.mutable_flag\n  | Parraylength\n  | Parrayrefu\n  | Parraysetu\n  | Parrayrefs\n  | Parraysets\n  (* Test if the argument is a block or an immediate integer *)\n  | Pisint\n  (* Test if the (integer) argument is outside an interval *)\n  | Pisout\n  | Pbintofint of boxed_integer\n  | Pintofbint of boxed_integer\n  | Pcvtbint of boxed_integer (*source*) * boxed_integer (*destination*)\n  | Pnegbint of boxed_integer\n  | Paddbint of boxed_integer\n  | Psubbint of boxed_integer\n  | Pmulbint of boxed_integer\n  | Pdivbint of {size: boxed_integer; is_safe: is_safe}\n  | Pmodbint of {size: boxed_integer; is_safe: is_safe}\n  | Pandbint of boxed_integer\n  | Porbint of boxed_integer\n  | Pxorbint of boxed_integer\n  | Plslbint of boxed_integer\n  | Plsrbint of boxed_integer\n  | Pasrbint of boxed_integer\n  | Pbintcomp of boxed_integer * comparison\n  | Pctconst of compile_time_constant\n  (* Inhibition of optimisation *)\n  | Popaque\n  | Puncurried_apply\n  | Pcreate_extension of string\nand comparison = Ceq | Cneq | Clt | Cgt | Cle | Cge\n\nand value_kind = Pgenval\n\nand boxed_integer = Primitive.boxed_integer = Pbigint | Pint32 | Pint64\n\nand raise_kind = Raise_regular | Raise_reraise | Raise_notrace\n\ntype pointer_info =\n  | Pt_constructor of {\n      name: string;\n      const: int;\n      non_const: int;\n      attrs: Parsetree.attributes;\n    }\n  | Pt_variant of {name: string}\n  | Pt_module_alias\n  | Pt_shape_none\n  | Pt_assertfalse\n\ntype structured_constant =\n  | Const_base of Asttypes.constant\n  | Const_pointer of int * pointer_info\n  | Const_block of tag_info * structured_constant list\n  | Const_float_array of string list\n  | Const_immstring of string\n  | Const_false\n  | Const_true\ntype inline_attribute =\n  | Always_inline (* [@inline] or [@inline always] *)\n  | Never_inline (* [@inline never] *)\n  | Default_inline (* no [@inline] attribute *)\n\ntype let_kind = Strict | Alias | StrictOpt | Variable\n\ntype function_attribute = {\n  inline: inline_attribute;\n  is_a_functor: bool;\n  return_unit: bool;\n  async: bool;\n  directive: string option;\n  one_unit_arg: bool;\n}\n\ntype lambda =\n  | Lvar of Ident.t\n  | Lconst of structured_constant\n  | Lapply of lambda_apply\n  | Lfunction of lfunction\n  | Llet of let_kind * value_kind * Ident.t * lambda * lambda\n  | Lletrec of (Ident.t * lambda) list * lambda\n  | Lprim of primitive * lambda list * Location.t\n  | Lswitch of lambda * lambda_switch * Location.t\n  | Lstringswitch of\n      lambda * (string * lambda) list * lambda option * Location.t\n  | Lstaticraise of int * lambda list\n  | Lstaticcatch of lambda * (int * Ident.t list) * lambda\n  | Ltrywith of lambda * Ident.t * lambda\n  | Lifthenelse of lambda * lambda * lambda\n  | Lsequence of lambda * lambda\n  | Lwhile of lambda * lambda\n  | Lfor of Ident.t * lambda * lambda * Asttypes.direction_flag * lambda\n  | Lassign of Ident.t * lambda\n  | Lsend of string * lambda * Location.t\n\nand lfunction = {\n  params: Ident.t list;\n  body: lambda;\n  attr: function_attribute; (* specified with [@inline] attribute *)\n  loc: Location.t;\n}\n\nand lambda_apply = {\n  ap_func: lambda;\n  ap_args: lambda list;\n  ap_loc: Location.t;\n  ap_inlined: inline_attribute;\n}\n\nand lambda_switch = {\n  sw_numconsts: int;\n  sw_consts: (int * lambda) list;\n  sw_numblocks: int;\n  sw_blocks: (int * lambda) list;\n  sw_failaction: lambda option;\n  sw_names: Ast_untagged_variants.switch_names option;\n}\n\n(* This is actually a dummy value\n    not necessary \"()\", it can be used as a place holder for module\n    alias etc.\n*)\nlet const_unit =\n  Const_pointer\n    (0, Pt_constructor {name = \"()\"; const = 1; non_const = 0; attrs = []})\n\nlet lambda_assert_false = Lconst (Const_pointer (0, Pt_assertfalse))\n\nlet lambda_module_alias = Lconst (Const_pointer (0, Pt_module_alias))\n\nlet lambda_unit = Lconst const_unit\n\nlet default_function_attribute =\n  {\n    inline = Default_inline;\n    is_a_functor = false;\n    return_unit = false;\n    async = false;\n    one_unit_arg = false;\n    directive = None;\n  }\n\n(* Build sharing keys *)\n(*\n   Those keys are later compared with Pervasives.compare.\n   For that reason, they should not include cycles.\n*)\n\nexception Not_simple\n\nlet max_raw = 32\n\nlet make_key e =\n  let count = ref 0 (* Used for controling size *)\n  and make_key = Ident.make_key_generator () in\n  (* make_key is used for normalizing let-bound variables *)\n  let rec tr_rec env e =\n    incr count;\n    if !count > max_raw then raise_notrace Not_simple;\n    (* Too big ! *)\n    match e with\n    | Lvar id -> ( try Ident.find_same id env with Not_found -> e)\n    | Lconst (Const_base (Const_string _)) ->\n      (* Mutable constants are not shared *)\n      raise_notrace Not_simple\n    | Lconst _ -> e\n    | Lapply ap ->\n      Lapply\n        {\n          ap with\n          ap_func = tr_rec env ap.ap_func;\n          ap_args = tr_recs env ap.ap_args;\n          ap_loc = Location.none;\n        }\n    | Llet (Alias, _k, x, ex, e) ->\n      (* Ignore aliases -> substitute *)\n      let ex = tr_rec env ex in\n      tr_rec (Ident.add x ex env) e\n    | Llet ((Strict | StrictOpt), _k, x, ex, Lvar v) when Ident.same v x ->\n      tr_rec env ex\n    | Llet (str, k, x, ex, e) ->\n      (* Because of side effects, keep other lets with normalized names *)\n      let ex = tr_rec env ex in\n      let y = make_key x in\n      Llet (str, k, y, ex, tr_rec (Ident.add x (Lvar y) env) e)\n    | Lprim (p, es, _) -> Lprim (p, tr_recs env es, Location.none)\n    | Lswitch (e, sw, loc) -> Lswitch (tr_rec env e, tr_sw env sw, loc)\n    | Lstringswitch (e, sw, d, _) ->\n      Lstringswitch\n        ( tr_rec env e,\n          List.map (fun (s, e) -> (s, tr_rec env e)) sw,\n          tr_opt env d,\n          Location.none )\n    | Lstaticraise (i, es) -> Lstaticraise (i, tr_recs env es)\n    | Lstaticcatch (e1, xs, e2) ->\n      Lstaticcatch (tr_rec env e1, xs, tr_rec env e2)\n    | Ltrywith (e1, x, e2) -> Ltrywith (tr_rec env e1, x, tr_rec env e2)\n    | Lifthenelse (cond, ifso, ifnot) ->\n      Lifthenelse (tr_rec env cond, tr_rec env ifso, tr_rec env ifnot)\n    | Lsequence (e1, e2) -> Lsequence (tr_rec env e1, tr_rec env e2)\n    | Lassign (x, e) -> Lassign (x, tr_rec env e)\n    | Lsend (m, e1, _loc) -> Lsend (m, tr_rec env e1, Location.none)\n    | Lletrec _ | Lfunction _ | Lfor _ | Lwhile _ -> raise_notrace Not_simple\n  and tr_recs env es = List.map (tr_rec env) es\n  and tr_sw env sw =\n    {\n      sw with\n      sw_consts = List.map (fun (i, e) -> (i, tr_rec env e)) sw.sw_consts;\n      sw_blocks = List.map (fun (i, e) -> (i, tr_rec env e)) sw.sw_blocks;\n      sw_failaction = tr_opt env sw.sw_failaction;\n    }\n  and tr_opt env = function\n    | None -> None\n    | Some e -> Some (tr_rec env e)\n  in\n\n  try Some (tr_rec Ident.empty e) with Not_simple -> None\n\n(***************)\n\nlet name_lambda strict arg fn =\n  match arg with\n  | Lvar id -> fn id\n  | _ ->\n    let id = Ident.create \"let\" in\n    Llet (strict, Pgenval, id, arg, fn id)\n\nlet name_lambda_list args fn =\n  let rec name_list names = function\n    | [] -> fn (List.rev names)\n    | (Lvar _ as arg) :: rem -> name_list (arg :: names) rem\n    | arg :: rem ->\n      let id = Ident.create \"let\" in\n      Llet (Strict, Pgenval, id, arg, name_list (Lvar id :: names) rem)\n  in\n  name_list [] args\n\nlet iter_opt f = function\n  | None -> ()\n  | Some e -> f e\n\nlet iter f = function\n  | Lvar _ | Lconst _ -> ()\n  | Lapply {ap_func = fn; ap_args = args} ->\n    f fn;\n    List.iter f args\n  | Lfunction {body} -> f body\n  | Llet (_str, _k, _id, arg, body) ->\n    f arg;\n    f body\n  | Lletrec (decl, body) ->\n    f body;\n    List.iter (fun (_id, exp) -> f exp) decl\n  | Lprim (_p, args, _loc) -> List.iter f args\n  | Lswitch (arg, sw, _) ->\n    f arg;\n    List.iter (fun (_key, case) -> f case) sw.sw_consts;\n    List.iter (fun (_key, case) -> f case) sw.sw_blocks;\n    iter_opt f sw.sw_failaction\n  | Lstringswitch (arg, cases, default, _) ->\n    f arg;\n    List.iter (fun (_, act) -> f act) cases;\n    iter_opt f default\n  | Lstaticraise (_, args) -> List.iter f args\n  | Lstaticcatch (e1, _, e2) ->\n    f e1;\n    f e2\n  | Ltrywith (e1, _, e2) ->\n    f e1;\n    f e2\n  | Lifthenelse (e1, e2, e3) ->\n    f e1;\n    f e2;\n    f e3\n  | Lsequence (e1, e2) ->\n    f e1;\n    f e2\n  | Lwhile (e1, e2) ->\n    f e1;\n    f e2\n  | Lfor (_v, e1, e2, _dir, e3) ->\n    f e1;\n    f e2;\n    f e3\n  | Lassign (_, e) -> f e\n  | Lsend (_k, obj, _) -> f obj\n\nmodule IdentSet = Set.Make (Ident)\n\nlet free_ids get l =\n  let fv = ref IdentSet.empty in\n  let rec free l =\n    iter free l;\n    fv := List.fold_right IdentSet.add (get l) !fv;\n    match l with\n    | Lfunction {params} ->\n      List.iter (fun param -> fv := IdentSet.remove param !fv) params\n    | Llet (_str, _k, id, _arg, _body) -> fv := IdentSet.remove id !fv\n    | Lletrec (decl, _body) ->\n      List.iter (fun (id, _exp) -> fv := IdentSet.remove id !fv) decl\n    | Lstaticcatch (_e1, (_, vars), _e2) ->\n      List.iter (fun id -> fv := IdentSet.remove id !fv) vars\n    | Ltrywith (_e1, exn, _e2) -> fv := IdentSet.remove exn !fv\n    | Lfor (v, _e1, _e2, _dir, _e3) -> fv := IdentSet.remove v !fv\n    | Lassign (id, _e) -> fv := IdentSet.add id !fv\n    | Lvar _ | Lconst _ | Lapply _ | Lprim _ | Lswitch _ | Lstringswitch _\n    | Lstaticraise _ | Lifthenelse _ | Lsequence _ | Lwhile _ | Lsend _ ->\n      ()\n  in\n  free l;\n  !fv\n\nlet free_variables l =\n  free_ids\n    (function\n      | Lvar id -> [id]\n      | _ -> [])\n    l\n\n(* Check if an action has a \"when\" guard *)\nlet raise_count = ref 0\n\nlet next_raise_count () =\n  incr raise_count;\n  !raise_count\n\nlet negative_raise_count = ref 0\n\nlet next_negative_raise_count () =\n  decr negative_raise_count;\n  !negative_raise_count\n\n(* Anticipated staticraise, for guards *)\nlet staticfail = Lstaticraise (0, [])\n\nlet rec is_guarded = function\n  | Lifthenelse (_cond, _body, Lstaticraise (0, [])) -> true\n  | Llet (_str, _k, _id, _lam, body) -> is_guarded body\n  | _ -> false\n\nlet rec patch_guarded patch = function\n  | Lifthenelse (cond, body, Lstaticraise (0, [])) ->\n    Lifthenelse (cond, body, patch)\n  | Llet (str, k, id, lam, body) ->\n    Llet (str, k, id, lam, patch_guarded patch body)\n  | _ -> assert false\n\n(* Translate an access path *)\n\nlet rec transl_normal_path = function\n  | Path.Pident id ->\n    if Ident.global id then Lprim (Pgetglobal id, [], Location.none)\n    else Lvar id\n  | Pdot (p, s, pos) ->\n    Lprim\n      ( Pfield (pos, Fld_module {name = s}),\n        [transl_normal_path p],\n        Location.none )\n  | Papply _ -> assert false\n\n(* Translation of identifiers *)\n\nlet transl_module_path ?(loc = Location.none) env path =\n  transl_normal_path (Env.normalize_path (Some loc) env path)\n\nlet transl_value_path ?(loc = Location.none) env path =\n  transl_normal_path (Env.normalize_path_prefix (Some loc) env path)\n\nlet transl_extension_path = transl_value_path\n\n(* compatibility alias, deprecated in the .mli *)\n(* Compile a sequence of expressions *)\n\nlet rec make_sequence fn = function\n  | [] -> lambda_unit\n  | [x] -> fn x\n  | x :: rem ->\n    let lam = fn x in\n    Lsequence (lam, make_sequence fn rem)\n\n(* Apply a substitution to a lambda-term.\n   Assumes that the bound variables of the lambda-term do not\n   belong to the domain of the substitution.\n   Assumes that the image of the substitution is out of reach\n   of the bound variables of the lambda-term (no capture). *)\n\nlet subst_lambda s lam =\n  let rec subst = function\n    | Lvar id as l -> ( try Ident.find_same id s with Not_found -> l)\n    | Lconst _ as l -> l\n    | Lapply ap ->\n      Lapply\n        {\n          ap with\n          ap_func = subst ap.ap_func;\n          ap_args = List.map subst ap.ap_args;\n        }\n    | Lfunction {params; body; attr; loc} ->\n      Lfunction {params; body = subst body; attr; loc}\n    | Llet (str, k, id, arg, body) -> Llet (str, k, id, subst arg, subst body)\n    | Lletrec (decl, body) -> Lletrec (List.map subst_decl decl, subst body)\n    | Lprim (p, args, loc) -> Lprim (p, List.map subst args, loc)\n    | Lswitch (arg, sw, loc) ->\n      Lswitch\n        ( subst arg,\n          {\n            sw with\n            sw_consts = List.map subst_case sw.sw_consts;\n            sw_blocks = List.map subst_case sw.sw_blocks;\n            sw_failaction = subst_opt sw.sw_failaction;\n          },\n          loc )\n    | Lstringswitch (arg, cases, default, loc) ->\n      Lstringswitch\n        (subst arg, List.map subst_strcase cases, subst_opt default, loc)\n    | Lstaticraise (i, args) -> Lstaticraise (i, List.map subst args)\n    | Lstaticcatch (e1, io, e2) -> Lstaticcatch (subst e1, io, subst e2)\n    | Ltrywith (e1, exn, e2) -> Ltrywith (subst e1, exn, subst e2)\n    | Lifthenelse (e1, e2, e3) -> Lifthenelse (subst e1, subst e2, subst e3)\n    | Lsequence (e1, e2) -> Lsequence (subst e1, subst e2)\n    | Lwhile (e1, e2) -> Lwhile (subst e1, subst e2)\n    | Lfor (v, e1, e2, dir, e3) -> Lfor (v, subst e1, subst e2, dir, subst e3)\n    | Lassign (id, e) -> Lassign (id, subst e)\n    | Lsend (k, obj, loc) -> Lsend (k, subst obj, loc)\n  and subst_decl (id, exp) = (id, subst exp)\n  and subst_case (key, case) = (key, subst case)\n  and subst_strcase (key, case) = (key, subst case)\n  and subst_opt = function\n    | None -> None\n    | Some e -> Some (subst e)\n  in\n  subst lam\n\nlet rec map f lam =\n  let lam =\n    match lam with\n    | Lvar _ -> lam\n    | Lconst _ -> lam\n    | Lapply {ap_func; ap_args; ap_loc; ap_inlined} ->\n      Lapply\n        {\n          ap_func = map f ap_func;\n          ap_args = List.map (map f) ap_args;\n          ap_loc;\n          ap_inlined;\n        }\n    | Lfunction {params; body; attr; loc} ->\n      Lfunction {params; body = map f body; attr; loc}\n    | Llet (str, k, v, e1, e2) -> Llet (str, k, v, map f e1, map f e2)\n    | Lletrec (idel, e2) ->\n      Lletrec (List.map (fun (v, e) -> (v, map f e)) idel, map f e2)\n    | Lprim (p, el, loc) -> Lprim (p, List.map (map f) el, loc)\n    | Lswitch (e, sw, loc) ->\n      Lswitch\n        ( map f e,\n          {\n            sw_numconsts = sw.sw_numconsts;\n            sw_consts = List.map (fun (n, e) -> (n, map f e)) sw.sw_consts;\n            sw_numblocks = sw.sw_numblocks;\n            sw_blocks = List.map (fun (n, e) -> (n, map f e)) sw.sw_blocks;\n            sw_failaction = Misc.may_map (map f) sw.sw_failaction;\n            sw_names = sw.sw_names;\n          },\n          loc )\n    | Lstringswitch (e, sw, default, loc) ->\n      Lstringswitch\n        ( map f e,\n          List.map (fun (s, e) -> (s, map f e)) sw,\n          Misc.may_map (map f) default,\n          loc )\n    | Lstaticraise (i, args) -> Lstaticraise (i, List.map (map f) args)\n    | Lstaticcatch (body, id, handler) ->\n      Lstaticcatch (map f body, id, map f handler)\n    | Ltrywith (e1, v, e2) -> Ltrywith (map f e1, v, map f e2)\n    | Lifthenelse (e1, e2, e3) -> Lifthenelse (map f e1, map f e2, map f e3)\n    | Lsequence (e1, e2) -> Lsequence (map f e1, map f e2)\n    | Lwhile (e1, e2) -> Lwhile (map f e1, map f e2)\n    | Lfor (v, e1, e2, dir, e3) -> Lfor (v, map f e1, map f e2, dir, map f e3)\n    | Lassign (v, e) -> Lassign (v, map f e)\n    | Lsend (k, o, loc) -> Lsend (k, map f o, loc)\n  in\n  f lam\n\n(* To let-bind expressions to variables *)\n\nlet bind str var exp body =\n  match exp with\n  | Lvar var' when Ident.same var var' -> body\n  | _ -> Llet (str, Pgenval, var, exp, body)\n\nand commute_comparison = function\n  | Ceq -> Ceq\n  | Cneq -> Cneq\n  | Clt -> Cgt\n  | Cle -> Cge\n  | Cgt -> Clt\n  | Cge -> Cle\n\nand negate_comparison = function\n  | Ceq -> Cneq\n  | Cneq -> Ceq\n  | Clt -> Cge\n  | Cle -> Cgt\n  | Cgt -> Cle\n  | Cge -> Clt\n\nlet raise_kind = function\n  | Raise_regular -> \"raise\"\n  | Raise_reraise -> \"reraise\"\n  | Raise_notrace -> \"raise_notrace\"\n\nlet lam_of_loc kind loc =\n  let loc_start = loc.Location.loc_start in\n  let file, lnum, cnum = Location.get_pos_info loc_start in\n  let file = Filename.basename file in\n  let enum =\n    loc.Location.loc_end.Lexing.pos_cnum - loc_start.Lexing.pos_cnum + cnum\n  in\n  match kind with\n  | Loc_POS ->\n    Lconst\n      (Const_block\n         ( Blk_tuple,\n           [\n             Const_immstring file;\n             Const_base (Const_int lnum);\n             Const_base (Const_int cnum);\n             Const_base (Const_int enum);\n           ] ))\n  | Loc_FILE -> Lconst (Const_immstring file)\n  | Loc_MODULE ->\n    let filename = Filename.basename file in\n    let name = Env.get_unit_name () in\n    let module_name = if name = \"\" then \"//\" ^ filename ^ \"//\" else name in\n    Lconst (Const_immstring module_name)\n  | Loc_LOC ->\n    let loc =\n      Printf.sprintf \"File %S, line %d, characters %d-%d\" file lnum cnum enum\n    in\n    Lconst (Const_immstring loc)\n  | Loc_LINE -> Lconst (Const_base (Const_int lnum))\n\nlet reset () = raise_count := 0\n"
  },
  {
    "path": "analysis/vendor/ml/lambda.mli",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(* The \"lambda\" intermediate code *)\n\nopen Asttypes\n\ntype compile_time_constant =\n  | Big_endian\n  | Word_size\n  | Int_size\n  | Max_wosize\n  | Ostype_unix\n  | Ostype_win32\n  | Ostype_cygwin\n  | Backend_type\n\ntype loc_kind = Loc_FILE | Loc_LINE | Loc_MODULE | Loc_LOC | Loc_POS\n\ntype record_repr = Record_regular | Record_optional\n\ntype tag_info =\n  | Blk_constructor of {\n      name: string;\n      num_nonconst: int;\n      tag: int;\n      attrs: Parsetree.attributes;\n    }\n  | Blk_record_inlined of {\n      name: string;\n      num_nonconst: int;\n      tag: int;\n      optional_labels: string list;\n      fields: string array;\n      mutable_flag: mutable_flag;\n      attrs: Parsetree.attributes;\n    }\n  | Blk_tuple\n  | Blk_poly_var of string\n  | Blk_record of {\n      fields: string array;\n      mutable_flag: mutable_flag;\n      record_repr: record_repr;\n    }\n  | Blk_module of string list\n  | Blk_module_export of Ident.t list\n  | Blk_extension\n    (* underlying is the same as tuple, immutable block\n       {[\n          exception A of int * int\n       ]}\n       is translated into\n       {[\n         [A, x, y]\n       ]}\n    *)\n  | Blk_some\n  | Blk_some_not_nested\n    (* ['a option] where ['a] can not inhabit a non-like value *)\n  | Blk_record_ext of {fields: string array; mutable_flag: mutable_flag}\n  | Blk_lazy_general\n\nval find_name : Parsetree.attribute -> Asttypes.label option\n\nval tag_of_tag_info : tag_info -> int\nval mutable_flag_of_tag_info : tag_info -> mutable_flag\nval blk_record :\n  (Types.label_description * Typedtree.record_label_definition) array ->\n  mutable_flag ->\n  record_repr ->\n  tag_info\n\nval blk_record_ext :\n  (Types.label_description * Typedtree.record_label_definition) array ->\n  mutable_flag ->\n  tag_info\n\nval blk_record_inlined :\n  (Types.label_description * Typedtree.record_label_definition) array ->\n  string ->\n  int ->\n  string list ->\n  tag:int ->\n  attrs:Parsetree.attributes ->\n  mutable_flag ->\n  tag_info\n\nval ref_tag_info : tag_info\n\ntype field_dbg_info =\n  | Fld_record of {name: string; mutable_flag: Asttypes.mutable_flag}\n  | Fld_module of {name: string}\n  | Fld_record_inline of {name: string}\n  | Fld_record_extension of {name: string}\n  | Fld_tuple\n  | Fld_poly_var_tag\n  | Fld_poly_var_content\n  | Fld_extension\n  | Fld_variant\n  | Fld_cons\n  | Fld_array\n\nval fld_record : Types.label_description -> field_dbg_info\n\nval fld_record_inline : Types.label_description -> field_dbg_info\n\nval fld_record_extension : Types.label_description -> field_dbg_info\n\nval ref_field_info : field_dbg_info\n\ntype set_field_dbg_info =\n  | Fld_record_set of string\n  | Fld_record_inline_set of string\n  | Fld_record_extension_set of string\n\nval ref_field_set_info : set_field_dbg_info\n\nval fld_record_set : Types.label_description -> set_field_dbg_info\n\nval fld_record_inline_set : Types.label_description -> set_field_dbg_info\n\nval fld_record_extension_set : Types.label_description -> set_field_dbg_info\n\ntype immediate_or_pointer = Immediate | Pointer\ntype is_safe = Safe | Unsafe\n\ntype pointer_info =\n  | Pt_constructor of {\n      name: string;\n      const: int;\n      non_const: int;\n      attrs: Parsetree.attributes;\n    }\n  | Pt_variant of {name: string}\n  | Pt_module_alias\n  | Pt_shape_none\n  | Pt_assertfalse\n\ntype primitive =\n  | Pidentity\n  | Pbytes_to_string\n  | Pignore\n  | Prevapply\n  | Pdirapply\n  | Ploc of loc_kind (* Globals *)\n  | Pgetglobal of Ident.t\n  (* Operations on heap blocks *)\n  | Pmakeblock of tag_info\n  | Pfield of int * field_dbg_info\n  | Psetfield of int * set_field_dbg_info\n  | Pduprecord\n  (* Force lazy values *)\n  | Plazyforce\n  (* External call *)\n  | Pccall of Primitive.description\n  (* Exceptions *)\n  | Praise of raise_kind\n  (* Boolean operations *)\n  | Psequand\n  | Psequor\n  | Pnot\n  (* Integer operations *)\n  | Pnegint\n  | Paddint\n  | Psubint\n  | Pmulint\n  | Pdivint of is_safe\n  | Pmodint of is_safe\n  | Pandint\n  | Porint\n  | Pxorint\n  | Plslint\n  | Plsrint\n  | Pasrint\n  | Pintcomp of comparison\n  | Poffsetint of int\n  | Poffsetref of int\n  (* Float operations *)\n  | Pintoffloat\n  | Pfloatofint\n  | Pnegfloat\n  | Pabsfloat\n  | Paddfloat\n  | Psubfloat\n  | Pmulfloat\n  | Pdivfloat\n  | Pfloatcomp of comparison\n  (* BigInt operations *)\n  | Pnegbigint\n  | Paddbigint\n  | Psubbigint\n  | Ppowbigint\n  | Pmulbigint\n  | Pdivbigint\n  | Pmodbigint\n  | Pandbigint\n  | Porbigint\n  | Pxorbigint\n  | Plslbigint\n  | Pasrbigint\n  | Pbigintcomp of comparison\n  (* String operations *)\n  | Pstringlength\n  | Pstringrefu\n  | Pstringrefs\n  | Pbyteslength\n  | Pbytesrefu\n  | Pbytessetu\n  | Pbytesrefs\n  | Pbytessets\n  (* Array operations *)\n  | Pmakearray of mutable_flag\n  | Parraylength\n  | Parrayrefu\n  | Parraysetu\n  | Parrayrefs\n  | Parraysets\n  (* Test if the argument is a block or an immediate integer *)\n  | Pisint\n  (* Test if the (integer) argument is outside an interval *)\n  | Pisout\n  (* Operations on boxed integers (Nativeint.t, Int32.t, Int64.t) *)\n  | Pbintofint of boxed_integer\n  | Pintofbint of boxed_integer\n  | Pcvtbint of boxed_integer (*source*) * boxed_integer (*destination*)\n  | Pnegbint of boxed_integer\n  | Paddbint of boxed_integer\n  | Psubbint of boxed_integer\n  | Pmulbint of boxed_integer\n  | Pdivbint of {size: boxed_integer; is_safe: is_safe}\n  | Pmodbint of {size: boxed_integer; is_safe: is_safe}\n  | Pandbint of boxed_integer\n  | Porbint of boxed_integer\n  | Pxorbint of boxed_integer\n  | Plslbint of boxed_integer\n  | Plsrbint of boxed_integer\n  | Pasrbint of boxed_integer\n  | Pbintcomp of boxed_integer * comparison\n  | Pctconst of compile_time_constant\n  (* Inhibition of optimisation *)\n  | Popaque\n  | Puncurried_apply\n  | Pcreate_extension of string\nand comparison = Ceq | Cneq | Clt | Cgt | Cle | Cge\n\nand value_kind = Pgenval\n\nand boxed_integer = Primitive.boxed_integer = Pbigint | Pint32 | Pint64\n\nand raise_kind = Raise_regular | Raise_reraise | Raise_notrace\n\ntype structured_constant =\n  | Const_base of constant\n  | Const_pointer of int * pointer_info\n  | Const_block of tag_info * structured_constant list\n  | Const_float_array of string list\n  | Const_immstring of string\n  | Const_false\n  | Const_true\n\ntype inline_attribute =\n  | Always_inline (* [@inline] or [@inline always] *)\n  | Never_inline (* [@inline never] *)\n  | Default_inline (* no [@inline] attribute *)\n\ntype let_kind = Strict | Alias | StrictOpt | Variable\n(* Meaning of kinds for let x = e in e':\n    Strict: e may have side-effects; always evaluate e first\n      (If e is a simple expression, e.g. a variable or constant,\n       we may still substitute e'[x/e].)\n    Alias: e is pure, we can substitute e'[x/e] if x has 0 or 1 occurrences\n      in e'\n    StrictOpt: e does not have side-effects, but depend on the store;\n      we can discard e if x does not appear in e'\n    Variable: the variable x is assigned later in e'\n*)\n\n(* [true] means yes, [false] may mean unknown *)\ntype function_attribute = {\n  inline: inline_attribute;\n  is_a_functor: bool;\n  return_unit: bool;\n  async: bool;\n  directive: string option;\n  one_unit_arg: bool;\n}\n\ntype lambda =\n  | Lvar of Ident.t\n  | Lconst of structured_constant\n  | Lapply of lambda_apply\n  | Lfunction of lfunction\n  | Llet of let_kind * value_kind * Ident.t * lambda * lambda\n  | Lletrec of (Ident.t * lambda) list * lambda\n  | Lprim of primitive * lambda list * Location.t\n  | Lswitch of lambda * lambda_switch * Location.t\n  (* switch on strings, clauses are sorted by string order,\n     strings are pairwise distinct *)\n  | Lstringswitch of\n      lambda * (string * lambda) list * lambda option * Location.t\n  | Lstaticraise of int * lambda list\n  | Lstaticcatch of lambda * (int * Ident.t list) * lambda\n  | Ltrywith of lambda * Ident.t * lambda\n  | Lifthenelse of lambda * lambda * lambda\n  | Lsequence of lambda * lambda\n  | Lwhile of lambda * lambda\n  | Lfor of Ident.t * lambda * lambda * direction_flag * lambda\n  | Lassign of Ident.t * lambda\n  | Lsend of string * lambda * Location.t\n\nand lfunction = {\n  params: Ident.t list;\n  body: lambda;\n  attr: function_attribute; (* specified with [@inline] attribute *)\n  loc: Location.t;\n}\n\nand lambda_apply = {\n  ap_func: lambda;\n  ap_args: lambda list;\n  ap_loc: Location.t;\n  ap_inlined: inline_attribute; (* specified with the [@inlined] attribute *)\n}\n\nand lambda_switch = {\n  sw_numconsts: int; (* Number of integer cases *)\n  sw_consts: (int * lambda) list; (* Integer cases *)\n  sw_numblocks: int; (* Number of tag block cases *)\n  sw_blocks: (int * lambda) list; (* Tag block cases *)\n  sw_failaction: lambda option; (* Action to take if failure *)\n  sw_names: Ast_untagged_variants.switch_names option;\n}\n\n(* Lambda code for the middle-end.\n   * In the closure case the code is a sequence of assignments to a\n     preallocated block of size [main_module_block_size] using\n     (Setfield(Getglobal(module_ident))). The size is used to preallocate\n     the block.\n   * In the flambda case the code is an expression returning a block\n     value of size [main_module_block_size]. The size is used to build\n     the module root as an initialize_symbol\n     Initialize_symbol(module_name, 0,\n       [getfield 0; ...; getfield (main_module_block_size - 1)])\n*)\n\n(* Sharing key *)\nval make_key : lambda -> lambda option\n\nval const_unit : structured_constant\nval lambda_assert_false : lambda\nval lambda_unit : lambda\nval lambda_module_alias : lambda\nval name_lambda : let_kind -> lambda -> (Ident.t -> lambda) -> lambda\nval name_lambda_list : lambda list -> (lambda list -> lambda) -> lambda\n\nval iter : (lambda -> unit) -> lambda -> unit\nmodule IdentSet : Set.S with type elt = Ident.t\nval free_variables : lambda -> IdentSet.t\n\nval transl_normal_path : Path.t -> lambda (* Path.t is already normal *)\n\nval transl_module_path : ?loc:Location.t -> Env.t -> Path.t -> lambda\nval transl_value_path : ?loc:Location.t -> Env.t -> Path.t -> lambda\nval transl_extension_path : ?loc:Location.t -> Env.t -> Path.t -> lambda\n\nval make_sequence : ('a -> lambda) -> 'a list -> lambda\n\nval subst_lambda : lambda Ident.tbl -> lambda -> lambda\nval map : (lambda -> lambda) -> lambda -> lambda\nval bind : let_kind -> Ident.t -> lambda -> lambda -> lambda\n\nval commute_comparison : comparison -> comparison\nval negate_comparison : comparison -> comparison\n\nval default_function_attribute : function_attribute\n\n(***********************)\n(* For static failures *)\n(***********************)\n\n(* Get a new static failure ident *)\nval next_raise_count : unit -> int\nval next_negative_raise_count : unit -> int\n(* Negative raise counts are used to compile 'match ... with\n   exception x -> ...'.  This disabled some simplifications\n   performed by the Simplif module that assume that static raises\n   are in tail position in their handler. *)\n\nval staticfail : lambda (* Anticipated static failure *)\n\n(* Check anticipated failure, substitute its final value *)\nval is_guarded : lambda -> bool\nval patch_guarded : lambda -> lambda -> lambda\n\nval raise_kind : raise_kind -> string\nval lam_of_loc : loc_kind -> Location.t -> lambda\n\nval reset : unit -> unit\n"
  },
  {
    "path": "analysis/vendor/ml/lexer.mli",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(* The lexical analyzer *)\n\nval init : unit -> unit\nval token : Lexing.lexbuf -> Parser.token\nval skip_hash_bang : Lexing.lexbuf -> unit\n\ntype error =\n  | Illegal_character of char\n  | Illegal_escape of string\n  | Unterminated_comment of Location.t\n  | Unterminated_string\n  | Unterminated_string_in_comment of Location.t * Location.t\n  | Keyword_as_label of string\n  | Invalid_literal of string\n  | Invalid_directive of string * string option\n\nexception Error of error * Location.t\n\nval in_comment : unit -> bool\nval in_string : unit -> bool\n\nval print_warnings : bool ref\nval handle_docstrings : bool ref\nval comments : unit -> (string * Location.t) list\nval token_with_comments : Lexing.lexbuf -> Parser.token\n\n(*\n  [set_preprocessor init preprocessor] registers [init] as the function\nto call to initialize the preprocessor when the lexer is initialized,\nand [preprocessor] a function that is called when a new token is needed\nby the parser, as [preprocessor lexer lexbuf] where [lexer] is the\nlexing function.\n\nWhen a preprocessor is configured by calling [set_preprocessor], the lexer\nchanges its behavior to accept backslash-newline as a token-separating blank.\n*)\n\nval set_preprocessor :\n  (unit -> unit) ->\n  ((Lexing.lexbuf -> Parser.token) -> Lexing.lexbuf -> Parser.token) ->\n  unit\n"
  },
  {
    "path": "analysis/vendor/ml/lexer.mll",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(* The lexer definition *)\n\n{\nopen Lexing\nopen Misc\nopen Parser\n\ntype error =\n  | Illegal_character of char\n  | Illegal_escape of string\n  | Unterminated_comment of Location.t\n  | Unterminated_string\n  | Unterminated_string_in_comment of Location.t * Location.t\n  | Keyword_as_label of string\n  | Invalid_literal of string\n  | Invalid_directive of string * string option\n;;\n\nexception Error of error * Location.t;;\n\n(* The table of keywords *)\n\nlet keyword_table =\n  create_hashtable 149 [\n    \"and\", AND;\n    \"as\", AS;\n    \"assert\", ASSERT;\n    \"begin\", BEGIN;\n    \"class\", CLASS;\n    \"constraint\", CONSTRAINT;\n    \"do\", DO;\n    \"done\", DONE;\n    \"downto\", DOWNTO;\n    \"else\", ELSE;\n    \"end\", END;\n    \"exception\", EXCEPTION;\n    \"external\", EXTERNAL;\n    \"false\", FALSE;\n    \"for\", FOR;\n    \"fun\", FUN;\n    \"function\", FUNCTION;\n    \"functor\", FUNCTOR;\n    \"if\", IF;\n    \"in\", IN;\n    \"include\", INCLUDE;\n    \"inherit\", INHERIT;\n    \"initializer\", INITIALIZER;\n    \"lazy\", LAZY;\n    \"let\", LET;\n    \"match\", MATCH;\n    \"method\", METHOD;\n    \"module\", MODULE;\n    \"mutable\", MUTABLE;\n    \"new\", NEW;\n    \"nonrec\", NONREC;\n    \"object\", OBJECT;\n    \"of\", OF;\n    \"open\", OPEN;\n    \"or\", OR;\n(*  \"parser\", PARSER; *)\n    \"private\", PRIVATE;\n    \"rec\", REC;\n    \"sig\", SIG;\n    \"struct\", STRUCT;\n    \"then\", THEN;\n    \"to\", TO;\n    \"true\", TRUE;\n    \"try\", TRY;\n    \"type\", TYPE;\n    \"val\", VAL;\n    \"virtual\", VIRTUAL;\n    \"when\", WHEN;\n    \"while\", WHILE;\n    \"with\", WITH;\n\n    \"lor\", INFIXOP3(\"lor\"); (* Should be INFIXOP2 *)\n    \"lxor\", INFIXOP3(\"lxor\"); (* Should be INFIXOP2 *)\n    \"mod\", INFIXOP3(\"mod\");\n    \"land\", INFIXOP3(\"land\");\n    \"lsl\", INFIXOP4(\"lsl\");\n    \"lsr\", INFIXOP4(\"lsr\");\n    \"asr\", INFIXOP4(\"asr\")\n]\n\n(* To buffer string literals *)\n\nlet string_buffer = Buffer.create 256\nlet reset_string_buffer () = Buffer.reset string_buffer\nlet get_stored_string () = Buffer.contents string_buffer\n\nlet store_string_char c = Buffer.add_char string_buffer c\nlet store_string_utf_8_uchar u = Buffer.add_utf_8_uchar string_buffer u\nlet store_string s = Buffer.add_string string_buffer s\nlet store_lexeme lexbuf = store_string (Lexing.lexeme lexbuf)\n\n(* To store the position of the beginning of a string and comment *)\nlet string_start_loc = ref Location.none;;\nlet comment_start_loc = ref [];;\nlet in_comment () = !comment_start_loc <> [];;\nlet is_in_string = ref false\nlet in_string () = !is_in_string\nlet print_warnings = ref true\n\n(* Escaped chars are interpreted in strings unless they are in comments. *)\nlet store_escaped_char lexbuf c =\n  if in_comment () then store_lexeme lexbuf else store_string_char c\n\nlet store_escaped_uchar lexbuf u =\n  if in_comment () then store_lexeme lexbuf else store_string_utf_8_uchar u\n\nlet with_comment_buffer comment lexbuf =\n  let start_loc = Location.curr lexbuf  in\n  comment_start_loc := [start_loc];\n  reset_string_buffer ();\n  let end_loc = comment lexbuf in\n  let s = get_stored_string () in\n  reset_string_buffer ();\n  let loc = { start_loc with Location.loc_end = end_loc.Location.loc_end } in\n  s, loc\n\n(* To translate escape sequences *)\n\nlet hex_digit_value d = (* assert (d in '0'..'9' 'a'..'f' 'A'..'F') *)\n  let d = Char.code d in\n  if d >= 97 then d - 87 else\n  if d >= 65 then d - 55 else\n  d - 48\n\nlet hex_num_value lexbuf ~first ~last =\n  let rec loop acc i = match i > last with\n  | true -> acc\n  | false ->\n      let value = hex_digit_value (Lexing.lexeme_char lexbuf i) in\n      loop (16 * acc + value) (i + 1)\n  in\n  loop 0 first\n\nlet char_for_backslash = function\n  | 'n' -> '\\010'\n  | 'r' -> '\\013'\n  | 'b' -> '\\008'\n  | 't' -> '\\009'\n  | c   -> c\n\nlet char_for_decimal_code lexbuf i =\n  let c = 100 * (Char.code(Lexing.lexeme_char lexbuf i) - 48) +\n           10 * (Char.code(Lexing.lexeme_char lexbuf (i+1)) - 48) +\n                (Char.code(Lexing.lexeme_char lexbuf (i+2)) - 48) in\n  if not (Uchar.is_valid c ) then\n    if in_comment ()\n    then 'x'\n    else raise (Error(Illegal_escape (Lexing.lexeme lexbuf),\n                      Location.curr lexbuf))\n  else (Obj.magic (c : int) : char)\n\nlet char_for_octal_code lexbuf i =\n  let c = 64 * (Char.code(Lexing.lexeme_char lexbuf i) - 48) +\n           8 * (Char.code(Lexing.lexeme_char lexbuf (i+1)) - 48) +\n               (Char.code(Lexing.lexeme_char lexbuf (i+2)) - 48) in\n  Char.chr c\n\nlet char_for_hexadecimal_code lexbuf i =\n  let byte = hex_num_value lexbuf ~first:i ~last:(i+1) in\n  Char.chr byte\n\nlet uchar_for_uchar_escape lexbuf =\n  let err e =\n    raise\n      (Error (Illegal_escape (Lexing.lexeme lexbuf ^ e), Location.curr lexbuf))\n  in\n  let len = Lexing.lexeme_end lexbuf - Lexing.lexeme_start lexbuf in\n  let first = 3 (* skip opening \\u{ *) in\n  let last = len - 2 (* skip closing } *) in\n  let digit_count = last - first + 1 in\n  match digit_count > 6 with\n  | true -> err \", too many digits, expected 1 to 6 hexadecimal digits\"\n  | false ->\n      let cp = hex_num_value lexbuf ~first ~last in\n      if Uchar.is_valid cp then Uchar.unsafe_of_int cp else\n      err (\", \" ^ Printf.sprintf \"%X\" cp ^ \" is not a Unicode scalar value\")\n\n(* recover the name from a LABEL or OPTLABEL token *)\n\nlet get_label_name lexbuf =\n  let s = Lexing.lexeme lexbuf in\n  let name = String.sub s 1 (String.length s - 2) in\n  if Hashtbl.mem keyword_table name then\n    raise (Error(Keyword_as_label name, Location.curr lexbuf));\n  name\n;;\n\n(* Update the current location with file name and line number. *)\n\nlet update_loc lexbuf file line absolute chars =\n  let pos = lexbuf.lex_curr_p in\n  let new_file = match file with\n                 | None -> pos.pos_fname\n                 | Some s -> s\n  in\n  lexbuf.lex_curr_p <- { pos with\n    pos_fname = new_file;\n    pos_lnum = if absolute then line else pos.pos_lnum + line;\n    pos_bol = pos.pos_cnum - chars;\n  }\n;;\n\nlet preprocessor = ref None\n\nlet escaped_newlines = ref false\n\n\nlet handle_docstrings = ref true\nlet comment_list = ref []\n\nlet add_comment com =\n  comment_list := com :: !comment_list\n\nlet add_docstring_comment ds =\n  let com =\n    (\"*\" ^ Docstrings.docstring_body ds, Docstrings.docstring_loc ds)\n  in\n    add_comment com\n\nlet comments () = List.rev !comment_list\n\n(* Error report *)\n\nopen Format\n\nlet report_error ppf = function\n  | Illegal_character c ->\n      fprintf ppf \"Illegal character (%s)\" (Char.escaped c)\n  | Illegal_escape s ->\n      fprintf ppf \"Illegal backslash escape in string or character (%s)\" s\n  | Unterminated_comment _ ->\n      fprintf ppf \"Comment not terminated\"\n  | Unterminated_string ->\n      fprintf ppf \"String literal not terminated\"\n  | Unterminated_string_in_comment (_, loc) ->\n      fprintf ppf \"This comment contains an unterminated string literal@.\\\n                   %aString literal begins here\"\n              (Location.print_error \"\") loc\n  | Keyword_as_label kwd ->\n      fprintf ppf \"`%s' is a keyword, it cannot be used as label name\" kwd\n  | Invalid_literal s ->\n      fprintf ppf \"Invalid literal %s\" s\n  | Invalid_directive (dir, explanation) ->\n      fprintf ppf \"Invalid lexer directive %S\" dir;\n      begin match explanation with\n        | None -> ()\n        | Some expl -> fprintf ppf \": %s\" expl\n      end\n\nlet () =\n  Location.register_error_of_exn\n    (function\n      | Error (err, loc) ->\n          Some (Location.error_of_printer loc report_error err)\n      | _ ->\n          None\n    )\n\n}\n\nlet newline = ('\\013'* '\\010')\nlet blank = [' ' '\\009' '\\012']\nlet lowercase = ['a'-'z' '_']\nlet uppercase = ['A'-'Z']\nlet identchar = ['A'-'Z' 'a'-'z' '_' '\\'' '0'-'9']\nlet symbolchar =\n  ['!' '$' '%' '&' '*' '+' '-' '.' '/' ':' '<' '=' '>' '?' '@' '^' '|' '~']\nlet dotsymbolchar =\n  ['!' '$' '%' '&' '*' '+' '-' '/' ':' '=' '>' '?' '@' '^' '|' '~']\nlet decimal_literal =\n  ['0'-'9'] ['0'-'9' '_']*\nlet hex_digit =\n  ['0'-'9' 'A'-'F' 'a'-'f']\nlet hex_literal =\n  '0' ['x' 'X'] ['0'-'9' 'A'-'F' 'a'-'f']['0'-'9' 'A'-'F' 'a'-'f' '_']*\nlet oct_literal =\n  '0' ['o' 'O'] ['0'-'7'] ['0'-'7' '_']*\nlet bin_literal =\n  '0' ['b' 'B'] ['0'-'1'] ['0'-'1' '_']*\nlet int_literal =\n  decimal_literal | hex_literal | oct_literal | bin_literal\nlet float_literal =\n  ['0'-'9'] ['0'-'9' '_']*\n  ('.' ['0'-'9' '_']* )?\n  (['e' 'E'] ['+' '-']? ['0'-'9'] ['0'-'9' '_']* )?\nlet hex_float_literal =\n  '0' ['x' 'X']\n  ['0'-'9' 'A'-'F' 'a'-'f'] ['0'-'9' 'A'-'F' 'a'-'f' '_']*\n  ('.' ['0'-'9' 'A'-'F' 'a'-'f' '_']* )?\n  (['p' 'P'] ['+' '-']? ['0'-'9'] ['0'-'9' '_']* )?\nlet literal_modifier = ['G'-'Z' 'g'-'z']\n\nrule token = parse\n  | \"\\\\\" newline {\n      if not !escaped_newlines then\n        raise (Error(Illegal_character (Lexing.lexeme_char lexbuf 0),\n                     Location.curr lexbuf));\n      update_loc lexbuf None 1 false 0;\n      token lexbuf }\n  | newline\n      { update_loc lexbuf None 1 false 0;\n        EOL }\n  | blank +\n      { token lexbuf }\n  | \"_\"\n      { UNDERSCORE }\n  | \"~\"\n      { TILDE }\n  | \"~\" lowercase identchar * ':'\n      { LABEL (get_label_name lexbuf) }\n  | \"?\"\n      { QUESTION }\n  | \"?\" lowercase identchar * ':'\n      { OPTLABEL (get_label_name lexbuf) }\n  | lowercase identchar *\n      { let s = Lexing.lexeme lexbuf in\n        try Hashtbl.find keyword_table s\n        with Not_found -> LIDENT s }\n  | uppercase identchar *\n      { UIDENT(Lexing.lexeme lexbuf) }       (* No capitalized keywords *)\n  | int_literal { INT (Lexing.lexeme lexbuf, None) }\n  | (int_literal as lit) (literal_modifier as modif)\n      { INT (lit, Some modif) }\n  | float_literal | hex_float_literal\n      { FLOAT (Lexing.lexeme lexbuf, None) }\n  | ((float_literal | hex_float_literal) as lit) (literal_modifier as modif)\n      { FLOAT (lit, Some modif) }\n  | (float_literal | hex_float_literal | int_literal) identchar+\n      { raise (Error(Invalid_literal (Lexing.lexeme lexbuf),\n                     Location.curr lexbuf)) }\n  | \"\\\"\"\n      { reset_string_buffer();\n        is_in_string := true;\n        let string_start = lexbuf.lex_start_p in\n        string_start_loc := Location.curr lexbuf;\n        string lexbuf;\n        is_in_string := false;\n        lexbuf.lex_start_p <- string_start;\n        STRING (get_stored_string(), None) }\n  | \"{\" lowercase* \"|\"\n      { reset_string_buffer();\n        let delim = Lexing.lexeme lexbuf in\n        let delim = String.sub delim 1 (String.length delim - 2) in\n        is_in_string := true;\n        let string_start = lexbuf.lex_start_p in\n        string_start_loc := Location.curr lexbuf;\n        quoted_string delim lexbuf;\n        is_in_string := false;\n        lexbuf.lex_start_p <- string_start;\n        STRING (get_stored_string(), Some delim) }\n  | \"\\'\" newline \"\\'\"\n      { update_loc lexbuf None 1 false 1;\n        CHAR (Lexing.lexeme_char lexbuf 1) }\n  | \"\\'\" [^ '\\\\' '\\'' '\\010' '\\013'] \"\\'\"\n      { CHAR(Lexing.lexeme_char lexbuf 1) }\n  | \"\\'\\\\\" ['\\\\' '\\'' '\\\"' 'n' 't' 'b' 'r' ' '] \"\\'\"\n      { CHAR(char_for_backslash (Lexing.lexeme_char lexbuf 2)) }\n  | \"\\'\\\\\" ['0'-'9'] ['0'-'9'] ['0'-'9'] \"\\'\"\n      { CHAR(char_for_decimal_code lexbuf 2) }\n  | \"\\'\\\\\" 'o' ['0'-'3'] ['0'-'7'] ['0'-'7'] \"\\'\"\n      { CHAR(char_for_octal_code lexbuf 3) }\n  | \"\\'\\\\\" 'x' ['0'-'9' 'a'-'f' 'A'-'F'] ['0'-'9' 'a'-'f' 'A'-'F'] \"\\'\"\n      { CHAR(char_for_hexadecimal_code lexbuf 3) }\n  | \"\\'\\\\\" _\n      { let l = Lexing.lexeme lexbuf in\n        let esc = String.sub l 1 (String.length l - 1) in\n        raise (Error(Illegal_escape esc, Location.curr lexbuf))\n      }\n  | \"(*\"\n      { let s, loc = with_comment_buffer comment lexbuf in\n        COMMENT (s, loc) }\n  | \"(**\"\n      { let s, loc = with_comment_buffer comment lexbuf in\n        if !handle_docstrings then\n          DOCSTRING (Docstrings.docstring s loc)\n        else\n          COMMENT (\"*\" ^ s, loc)\n      }\n  | \"(**\" (('*'+) as stars)\n      { let s, loc =\n          with_comment_buffer\n            (fun lexbuf ->\n               store_string (\"*\" ^ stars);\n               comment lexbuf)\n            lexbuf\n        in\n        COMMENT (s, loc) }\n  | \"(*)\"\n      { if !print_warnings then\n          Location.prerr_warning (Location.curr lexbuf) Warnings.Comment_start;\n        let s, loc = with_comment_buffer comment lexbuf in\n        COMMENT (s, loc) }\n  | \"(*\" (('*'*) as stars) \"*)\"\n      { if !handle_docstrings && stars=\"\" then\n         (* (**) is an empty docstring *)\n          DOCSTRING(Docstrings.docstring \"\" (Location.curr lexbuf))\n        else\n          COMMENT (stars, Location.curr lexbuf) }\n  | \"*)\"\n      { let loc = Location.curr lexbuf in\n        Location.prerr_warning loc Warnings.Comment_not_end;\n        lexbuf.Lexing.lex_curr_pos <- lexbuf.Lexing.lex_curr_pos - 1;\n        let curpos = lexbuf.lex_curr_p in\n        lexbuf.lex_curr_p <- { curpos with pos_cnum = curpos.pos_cnum - 1 };\n        STAR\n      }\n  | (\"#\" [' ' '\\t']* (['0'-'9']+ as num) [' ' '\\t']*\n        (\"\\\"\" ([^ '\\010' '\\013' '\\\"' ] * as name) \"\\\"\")?) as directive\n        [^ '\\010' '\\013'] * newline\n      {\n        match int_of_string num with\n        | exception _ ->\n            (* PR#7165 *)\n            let loc = Location.curr lexbuf in\n            let explanation = \"line number out of range\" in\n            let error = Invalid_directive (directive, Some explanation) in\n            raise (Error (error, loc))\n        | line_num ->\n           (* Documentation says that the line number should be\n              positive, but we have never guarded against this and it\n              might have useful hackish uses. *)\n            update_loc lexbuf name line_num true 0;\n            token lexbuf\n      }\n  | \"#\"  { HASH }\n  | \"&\"  { AMPERSAND }\n  | \"&&\" { AMPERAMPER }\n  | \"`\"  { BACKQUOTE }\n  | \"\\'\" { QUOTE }\n  | \"(\"  { LPAREN }\n  | \")\"  { RPAREN }\n  | \"*\"  { STAR }\n  | \",\"  { COMMA }\n  | \"->\" { MINUSGREATER }\n  | \".\"  { DOT }\n  | \"..\" { DOTDOT }\n  | \".\" (dotsymbolchar symbolchar* as s) { DOTOP s }\n  | \":\"  { COLON }\n  | \"::\" { COLONCOLON }\n  | \":=\" { COLONEQUAL }\n  | \":>\" { COLONGREATER }\n  | \";\"  { SEMI }\n  | \";;\" { SEMISEMI }\n  | \"<\"  { LESS }\n  | \"<-\" { LESSMINUS }\n  | \"=\"  { EQUAL }\n  | \"[\"  { LBRACKET }\n  | \"[|\" { LBRACKETBAR }\n  | \"[<\" { LBRACKETLESS }\n  | \"[>\" { LBRACKETGREATER }\n  | \"]\"  { RBRACKET }\n  | \"{\"  { LBRACE }\n  | \"{<\" { LBRACELESS }\n  | \"|\"  { BAR }\n  | \"||\" { BARBAR }\n  | \"|]\" { BARRBRACKET }\n  | \">\"  { GREATER }\n  | \">]\" { GREATERRBRACKET }\n  | \"}\"  { RBRACE }\n  | \">}\" { GREATERRBRACE }\n  | \"[@\" { LBRACKETAT }\n  | \"[@@\"  { LBRACKETATAT }\n  | \"[@@@\" { LBRACKETATATAT }\n  | \"[%\"   { LBRACKETPERCENT }\n  | \"[%%\"  { LBRACKETPERCENTPERCENT }\n  | \"!\"  { BANG }\n  | \"!=\" { INFIXOP0 \"!=\" }\n  | \"+\"  { PLUS }\n  | \"+.\" { PLUSDOT }\n  | \"+=\" { PLUSEQ }\n  | \"-\"  { MINUS }\n  | \"-.\" { MINUSDOT }\n\n  | \"!\" symbolchar +\n            { PREFIXOP(Lexing.lexeme lexbuf) }\n  | ['~' '?'] symbolchar +\n            { PREFIXOP(Lexing.lexeme lexbuf) }\n  | ['=' '<' '>' '|' '&' '$'] symbolchar *\n            { INFIXOP0(Lexing.lexeme lexbuf) }\n  | ['@' '^'] symbolchar *\n            { INFIXOP1(Lexing.lexeme lexbuf) }\n  | ['+' '-'] symbolchar *\n            { INFIXOP2(Lexing.lexeme lexbuf) }\n  | \"**\" symbolchar *\n            { INFIXOP4(Lexing.lexeme lexbuf) }\n  | '%'     { PERCENT }\n  | ['*' '/' '%'] symbolchar *\n            { INFIXOP3(Lexing.lexeme lexbuf) }\n  | '#' (symbolchar | '#') +\n            { HASHOP(Lexing.lexeme lexbuf) }\n  | eof { Rescript_cpp.eof_check lexbuf; EOF}\n  | _\n      { raise (Error(Illegal_character (Lexing.lexeme_char lexbuf 0),\n                     Location.curr lexbuf))\n      }\n\nand comment = parse\n    \"(*\"\n      { comment_start_loc := (Location.curr lexbuf) :: !comment_start_loc;\n        store_lexeme lexbuf;\n        comment lexbuf\n      }\n  | \"*)\"\n      { match !comment_start_loc with\n        | [] -> assert false\n        | [_] -> comment_start_loc := []; Location.curr lexbuf\n        | _ :: l -> comment_start_loc := l;\n                  store_lexeme lexbuf;\n                  comment lexbuf\n       }\n  | \"\\\"\"\n      {\n        string_start_loc := Location.curr lexbuf;\n        store_string_char '\\\"';\n        is_in_string := true;\n        begin try string lexbuf\n        with Error (Unterminated_string, str_start) ->\n          match !comment_start_loc with\n          | [] -> assert false\n          | loc :: _ ->\n            let start = List.hd (List.rev !comment_start_loc) in\n            comment_start_loc := [];\n            raise (Error (Unterminated_string_in_comment (start, str_start),\n                          loc))\n        end;\n        is_in_string := false;\n        store_string_char '\\\"';\n        comment lexbuf }\n  | \"{\" lowercase* \"|\"\n      {\n        let delim = Lexing.lexeme lexbuf in\n        let delim = String.sub delim 1 (String.length delim - 2) in\n        string_start_loc := Location.curr lexbuf;\n        store_lexeme lexbuf;\n        is_in_string := true;\n        begin try quoted_string delim lexbuf\n        with Error (Unterminated_string, str_start) ->\n          match !comment_start_loc with\n          | [] -> assert false\n          | loc :: _ ->\n            let start = List.hd (List.rev !comment_start_loc) in\n            comment_start_loc := [];\n            raise (Error (Unterminated_string_in_comment (start, str_start),\n                          loc))\n        end;\n        is_in_string := false;\n        store_string_char '|';\n        store_string delim;\n        store_string_char '}';\n        comment lexbuf }\n\n  | \"\\'\\'\"\n      { store_lexeme lexbuf; comment lexbuf }\n  | \"\\'\" newline \"\\'\"\n      { update_loc lexbuf None 1 false 1;\n        store_lexeme lexbuf;\n        comment lexbuf\n      }\n  | \"\\'\" [^ '\\\\' '\\'' '\\010' '\\013' ] \"\\'\"\n      { store_lexeme lexbuf; comment lexbuf }\n  | \"\\'\\\\\" ['\\\\' '\\\"' '\\'' 'n' 't' 'b' 'r' ' '] \"\\'\"\n      { store_lexeme lexbuf; comment lexbuf }\n  | \"\\'\\\\\" ['0'-'9'] ['0'-'9'] ['0'-'9'] \"\\'\"\n      { store_lexeme lexbuf; comment lexbuf }\n  | \"\\'\\\\\" 'x' ['0'-'9' 'a'-'f' 'A'-'F'] ['0'-'9' 'a'-'f' 'A'-'F'] \"\\'\"\n      { store_lexeme lexbuf; comment lexbuf }\n  | eof\n      { match !comment_start_loc with\n        | [] -> assert false\n        | loc :: _ ->\n          let start = List.hd (List.rev !comment_start_loc) in\n          comment_start_loc := [];\n          raise (Error (Unterminated_comment start, loc))\n      }\n  | newline\n      { update_loc lexbuf None 1 false 0;\n        store_lexeme lexbuf;\n        comment lexbuf\n      }\n  | _\n      { store_lexeme lexbuf; comment lexbuf }\n\nand string = parse\n    '\\\"'\n      { () }\n  | '\\\\' newline ([' ' '\\t'] * as space)\n      { update_loc lexbuf None 1 false (String.length space);\n        if in_comment () then store_lexeme lexbuf;\n        string lexbuf\n      }\n  | '\\\\' ['\\\\' '\\'' '\\\"' 'n' 't' 'b' 'r' ' ']\n      { store_escaped_char lexbuf\n                           (char_for_backslash(Lexing.lexeme_char lexbuf 1));\n        string lexbuf }\n  | '\\\\' ['0'-'9'] ['0'-'9'] ['0'-'9']\n      { store_escaped_char lexbuf (char_for_decimal_code lexbuf 1);\n         string lexbuf }\n  | '\\\\' 'o' ['0'-'3'] ['0'-'7'] ['0'-'7']\n      { store_escaped_char lexbuf (char_for_octal_code lexbuf 2);\n         string lexbuf }\n  | '\\\\' 'x' ['0'-'9' 'a'-'f' 'A'-'F'] ['0'-'9' 'a'-'f' 'A'-'F']\n      { store_escaped_char lexbuf (char_for_hexadecimal_code lexbuf 2);\n         string lexbuf }\n  | '\\\\' 'u' '{' hex_digit+ '}'\n        { store_escaped_uchar lexbuf (uchar_for_uchar_escape lexbuf);\n          string lexbuf }\n  | '\\\\' _\n      { if not (in_comment ()) then begin\n(*  Should be an error, but we are very lax.\n          raise (Error (Illegal_escape (Lexing.lexeme lexbuf),\n                        Location.curr lexbuf))\n*)\n          let loc = Location.curr lexbuf in\n          Location.prerr_warning loc Warnings.Illegal_backslash;\n        end;\n        store_lexeme lexbuf;\n        string lexbuf\n      }\n  | newline\n      { if not (in_comment ()) then\n          Location.prerr_warning (Location.curr lexbuf) Warnings.Eol_in_string;\n        update_loc lexbuf None 1 false 0;\n        store_lexeme lexbuf;\n        string lexbuf\n      }\n  | eof\n      { is_in_string := false;\n        raise (Error (Unterminated_string, !string_start_loc)) }\n  | _\n      { store_string_char(Lexing.lexeme_char lexbuf 0);\n        string lexbuf }\n\nand quoted_string delim = parse\n  | newline\n      { update_loc lexbuf None 1 false 0;\n        store_lexeme lexbuf;\n        quoted_string delim lexbuf\n      }\n  | eof\n      { is_in_string := false;\n        raise (Error (Unterminated_string, !string_start_loc)) }\n  | \"|\" lowercase* \"}\"\n      {\n        let edelim = Lexing.lexeme lexbuf in\n        let edelim = String.sub edelim 1 (String.length edelim - 2) in\n        if delim = edelim then ()\n        else (store_lexeme lexbuf; quoted_string delim lexbuf)\n      }\n  | _\n      { store_string_char(Lexing.lexeme_char lexbuf 0);\n        quoted_string delim lexbuf }\n\nand skip_hash_bang = parse\n  | \"#!\" [^ '\\n']* '\\n' [^ '\\n']* \"\\n!#\\n\"\n       { update_loc lexbuf None 3 false 0 }\n  | \"#!\" [^ '\\n']* '\\n'\n       { update_loc lexbuf None 1 false 0 }\n  | \"\" { () }\n\n{\n  let token_with_comments lexbuf =\n    match !preprocessor with\n    | None -> token lexbuf\n    | Some (_init, preprocess) -> preprocess token lexbuf\n\n  type newline_state =\n    | NoLine (* There have been no blank lines yet. *)\n    | NewLine\n        (* There have been no blank lines, and the previous\n           token was a newline. *)\n    | BlankLine (* There have been blank lines. *)\n\n  type doc_state =\n    | Initial  (* There have been no docstrings yet *)\n    | After of docstring list\n        (* There have been docstrings, none of which were\n           preceded by a blank line *)\n    | Before of docstring list * docstring list * docstring list\n        (* There have been docstrings, some of which were\n           preceded by a blank line *)\n\n  and docstring = Docstrings.docstring\n\n  let token lexbuf =\n    let post_pos = lexeme_end_p lexbuf in\n    let attach lines docs pre_pos =\n      let open Docstrings in\n        match docs, lines with\n        | Initial, _ -> ()\n        | After a, (NoLine | NewLine) ->\n            set_post_docstrings post_pos (List.rev a);\n            set_pre_docstrings pre_pos a;\n        | After a, BlankLine ->\n            set_post_docstrings post_pos (List.rev a);\n            set_pre_extra_docstrings pre_pos (List.rev a)\n        | Before(a, f, b), (NoLine | NewLine) ->\n            set_post_docstrings post_pos (List.rev a);\n            set_post_extra_docstrings post_pos\n              (List.rev_append f (List.rev b));\n            set_floating_docstrings pre_pos (List.rev f);\n            set_pre_extra_docstrings pre_pos (List.rev a);\n            set_pre_docstrings pre_pos b\n        | Before(a, f, b), BlankLine ->\n            set_post_docstrings post_pos (List.rev a);\n            set_post_extra_docstrings post_pos\n              (List.rev_append f (List.rev b));\n            set_floating_docstrings pre_pos\n              (List.rev_append f (List.rev b));\n            set_pre_extra_docstrings pre_pos (List.rev a)\n    in\n    let rec loop lines docs lexbuf =\n      match token_with_comments lexbuf with\n      | COMMENT (s, loc) ->\n          add_comment (s, loc);\n          let lines' =\n            match lines with\n            | NoLine -> NoLine\n            | NewLine -> NoLine\n            | BlankLine -> BlankLine\n          in\n          loop lines' docs lexbuf\n      | EOL ->\n          let lines' =\n            match lines with\n            | NoLine -> NewLine\n            | NewLine -> BlankLine\n            | BlankLine -> BlankLine\n          in\n          loop lines' docs lexbuf\n      | HASH when Rescript_cpp.at_bol lexbuf -> \n          Rescript_cpp.interpret_directive lexbuf \n            ~cont:(fun lexbuf -> loop lines docs lexbuf)\n            ~token_with_comments\n      | DOCSTRING doc ->\n          Docstrings.register doc;\n          add_docstring_comment doc;\n          let docs' =\n            if Docstrings.docstring_body doc = \"/*\" then\n              match docs with\n              | Initial -> Before([], [doc], [])\n              | After a -> Before (a, [doc], [])\n              | Before(a, f, b) -> Before(a, doc :: b @ f, [])\n            else\n              match docs, lines with\n              | Initial, (NoLine | NewLine) -> After [doc]\n              | Initial, BlankLine -> Before([], [], [doc])\n              | After a, (NoLine | NewLine) -> After (doc :: a)\n              | After a, BlankLine -> Before (a, [], [doc])\n              | Before(a, f, b), (NoLine | NewLine) -> Before(a, f, doc :: b)\n              | Before(a, f, b), BlankLine -> Before(a, b @ f, [doc])\n          in\n          loop NoLine docs' lexbuf\n      | tok ->\n          attach lines docs (lexeme_start_p lexbuf);\n          tok\n    in\n    Rescript_cpp.check_sharp_look_ahead (fun _ -> loop NoLine Initial lexbuf)\n\n  let init () =\n    Rescript_cpp.init ();\n    is_in_string := false;\n    comment_start_loc := [];\n    comment_list := [];\n    match !preprocessor with\n    | None -> ()\n    | Some (init, _preprocess) -> init ()\n\n\n  let set_preprocessor init preprocess =\n    escaped_newlines := true;\n    preprocessor := Some (init, preprocess)\n\n}\n"
  },
  {
    "path": "analysis/vendor/ml/location.ml",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\nopen Lexing\n\nlet absname = ref false\n(* This reference should be in Clflags, but it would create an additional\n   dependency and make bootstrapping Camlp4 more difficult. *)\n\ntype t = Warnings.loc = {\n  loc_start: position;\n  loc_end: position;\n  loc_ghost: bool;\n}\n\nlet in_file name =\n  let loc = {pos_fname = name; pos_lnum = 1; pos_bol = 0; pos_cnum = -1} in\n  {loc_start = loc; loc_end = loc; loc_ghost = true}\n\nlet none = in_file \"_none_\"\n\nlet curr lexbuf =\n  {\n    loc_start = lexbuf.lex_start_p;\n    loc_end = lexbuf.lex_curr_p;\n    loc_ghost = false;\n  }\n\nlet init lexbuf fname =\n  lexbuf.lex_curr_p <-\n    {pos_fname = fname; pos_lnum = 1; pos_bol = 0; pos_cnum = 0}\n\nlet symbol_rloc () =\n  {\n    loc_start = Parsing.symbol_start_pos ();\n    loc_end = Parsing.symbol_end_pos ();\n    loc_ghost = false;\n  }\n\nlet symbol_gloc () =\n  {\n    loc_start = Parsing.symbol_start_pos ();\n    loc_end = Parsing.symbol_end_pos ();\n    loc_ghost = true;\n  }\n\nlet rhs_loc n =\n  {\n    loc_start = Parsing.rhs_start_pos n;\n    loc_end = Parsing.rhs_end_pos n;\n    loc_ghost = false;\n  }\n\nlet input_name = ref \"_none_\"\nlet input_lexbuf = ref (None : lexbuf option)\nlet set_input_name name = if name <> \"\" then input_name := name\n(* Terminal info *)\n\nlet num_loc_lines = ref 0 (* number of lines already printed after input *)\n\n(* Print the location in some way or another *)\n\nopen Format\n\nlet absolute_path s =\n  (* This function could go into Filename *)\n  let open Filename in\n  let s = if is_relative s then concat (Sys.getcwd ()) s else s in\n  (* Now simplify . and .. components *)\n  let rec aux s =\n    let base = basename s in\n    let dir = dirname s in\n    if dir = s then dir\n    else if base = current_dir_name then aux dir\n    else if base = parent_dir_name then dirname (aux dir)\n    else concat (aux dir) base\n  in\n  aux s\n\nlet show_filename file =\n  let file = if file = \"_none_\" then !input_name else file in\n  if !absname then absolute_path file else file\n\nlet print_filename ppf file = Format.fprintf ppf \"%s\" (show_filename file)\n\nlet reset () = num_loc_lines := 0\n\n(* return file, line, char from the given position *)\nlet get_pos_info pos = (pos.pos_fname, pos.pos_lnum, pos.pos_cnum - pos.pos_bol)\n\nlet setup_colors () =\n  Misc.Color.setup !Clflags.color;\n  Code_frame.setup !Clflags.color\n\n(* ocaml's reported line/col numbering is horrible and super error-prone\n   when being handled programmatically (or humanly for that matter. If you're\n   an ocaml contributor reading this: who the heck reads the character count\n   starting from the first erroring character?) *)\nlet normalize_range loc =\n  (* TODO: lots of the handlings here aren't needed anymore because the new\n     rescript syntax has much stronger invariants regarding positions, e.g.\n     no -1 *)\n  let _, start_line, start_char = get_pos_info loc.loc_start in\n  let _, end_line, end_char = get_pos_info loc.loc_end in\n  (* line is 1-indexed, column is 0-indexed. We convert all of them to 1-indexed to avoid confusion *)\n  (* start_char is inclusive, end_char is exclusive *)\n  if start_char == -1 || end_char == -1 then\n    (* happens sometimes. Syntax error for example *)\n    None\n  else if start_line = end_line && start_char >= end_char then\n    (* in some errors, starting char and ending char can be the same. But\n       since ending char was supposed to be exclusive, here it might end up\n       smaller than the starting char if we naively did start_char + 1 to\n       just the starting char and forget ending char *)\n    let same_char = start_char + 1 in\n    Some ((start_line, same_char), (end_line, same_char))\n  else\n    (* again: end_char is exclusive, so +1-1=0 *)\n    Some ((start_line, start_char + 1), (end_line, end_char))\n\nlet print_loc ppf (loc : t) =\n  setup_colors ();\n  let normalized_range = normalize_range loc in\n  let dim_loc ppf = function\n    | None -> ()\n    | Some ((start_line, start_line_start_char), (end_line, end_line_end_char))\n      ->\n      if start_line = end_line then\n        if start_line_start_char = end_line_end_char then\n          fprintf ppf \":@{<dim>%i:%i@}\" start_line start_line_start_char\n        else\n          fprintf ppf \":@{<dim>%i:%i-%i@}\" start_line start_line_start_char\n            end_line_end_char\n      else\n        fprintf ppf \":@{<dim>%i:%i-%i:%i@}\" start_line start_line_start_char\n          end_line end_line_end_char\n  in\n  fprintf ppf \"@{<filename>%a@}%a\" print_filename loc.loc_start.pos_fname\n    dim_loc normalized_range\n\nlet print ?(src = None) ~message_kind intro ppf (loc : t) =\n  (match message_kind with\n  | `warning -> fprintf ppf \"@[@{<info>%s@}@]@,\" intro\n  | `warning_as_error ->\n    fprintf ppf \"@[@{<error>%s@} (configured as error) @]@,\" intro\n  | `error -> fprintf ppf \"@[@{<error>%s@}@]@,\" intro);\n  (* ocaml's reported line/col numbering is horrible and super error-prone\n     when being handled programmatically (or humanly for that matter. If you're\n     an ocaml contributor reading this: who the heck reads the character count\n     starting from the first erroring character?) *)\n  let file, start_line, start_char = get_pos_info loc.loc_start in\n  let _, end_line, end_char = get_pos_info loc.loc_end in\n  (* line is 1-indexed, column is 0-indexed. We convert all of them to 1-indexed to avoid confusion *)\n  (* start_char is inclusive, end_char is exclusive *)\n  let normalized_range =\n    (* TODO: lots of the handlings here aren't needed anymore because the new\n       rescript syntax has much stronger invariants regarding positions, e.g.\n       no -1 *)\n    if start_char == -1 || end_char == -1 then\n      (* happens sometimes. Syntax error for example *)\n      None\n    else if start_line = end_line && start_char >= end_char then\n      (* in some errors, starting char and ending char can be the same. But\n         since ending char was supposed to be exclusive, here it might end up\n         smaller than the starting char if we naively did start_char + 1 to\n         just the starting char and forget ending char *)\n      let same_char = start_char + 1 in\n      Some ((start_line, same_char), (end_line, same_char))\n    else\n      (* again: end_char is exclusive, so +1-1=0 *)\n      Some ((start_line, start_char + 1), (end_line, end_char))\n  in\n  fprintf ppf \"  @[%a@]@,\" print_loc loc;\n  match normalized_range with\n  | None -> ()\n  | Some _ -> (\n    try\n      (* Print a syntax error that is a list of Res_diagnostics.t.\n         Instead of reading file for every error, it uses the source that the parser already has. *)\n      let src =\n        match src with\n        | Some src -> src\n        | None -> Ext_io.load_file file\n      in\n      (* we're putting the line break `@,` here rather than above, because this\n         branch might not be reached (aka no inline file content display) so\n         we don't wanna end up with two line breaks in the the consequent *)\n      fprintf ppf \"@,%s\"\n        (Code_frame.print ~is_warning:(message_kind = `warning) ~src\n           ~start_pos:loc.loc_start ~end_pos:loc.loc_end)\n    with\n    (* this might happen if the file is e.g. \"\", \"_none_\" or any of the fake file name placeholders.\n       we've already printed the location above, so nothing more to do here. *)\n    | Sys_error _ ->\n      ())\n\nlet error_prefix = \"Error\"\n\nlet print_error_prefix ppf =\n  setup_colors ();\n  fprintf ppf \"@{<error>%s@}\" error_prefix\n\nlet print_compact ppf loc =\n  let file, line, startchar = get_pos_info loc.loc_start in\n  let endchar = loc.loc_end.pos_cnum - loc.loc_start.pos_cnum + startchar in\n  fprintf ppf \"%a:%i\" print_filename file line;\n  if startchar >= 0 then fprintf ppf \",%i--%i\" startchar endchar\n\nlet print_error intro ppf loc =\n  fprintf ppf \"%a%t:\" (print ~message_kind:`error intro) loc print_error_prefix\n\nlet default_warning_printer loc ppf w =\n  match Warnings.report w with\n  | `Inactive -> ()\n  | `Active {Warnings.number = _; message = _; is_error; sub_locs = _} ->\n    setup_colors ();\n    let message_kind = if is_error then `warning_as_error else `warning in\n    Format.fprintf ppf \"@[<v>@,  %a@,  %s@,@]@.\"\n      (print ~message_kind\n         (\"Warning number \" ^ (Warnings.number w |> string_of_int)))\n      loc (Warnings.message w)\n(* at this point, you can display sub_locs too, from e.g. https://github.com/ocaml/ocaml/commit/f6d53cc38f87c67fbf49109f5fb79a0334bab17a\n   but we won't bother for now *)\n\nlet warning_printer = ref default_warning_printer\n\nlet print_warning loc ppf w = !warning_printer loc ppf w\n\nlet formatter_for_warnings = ref err_formatter\nlet prerr_warning loc w = print_warning loc !formatter_for_warnings w\n\nlet echo_eof () =\n  print_newline ();\n  incr num_loc_lines\n\ntype 'a loc = {txt: 'a; loc: t}\n\nlet mkloc txt loc = {txt; loc}\nlet mknoloc txt = mkloc txt none\n\ntype error = {\n  loc: t;\n  msg: string;\n  sub: error list;\n  if_highlight: string; (* alternative message if locations are highlighted *)\n}\n\nlet pp_ksprintf ?before k fmt =\n  let buf = Buffer.create 64 in\n  let ppf = Format.formatter_of_buffer buf in\n  Misc.Color.set_color_tag_handling ppf;\n  (match before with\n  | None -> ()\n  | Some f -> f ppf);\n  kfprintf\n    (fun _ ->\n      pp_print_flush ppf ();\n      let msg = Buffer.contents buf in\n      k msg)\n    ppf fmt\n\n(* taken from https://github.com/rescript-lang/ocaml/blob/d4144647d1bf9bc7dc3aadc24c25a7efa3a67915/parsing/location.ml#L354 *)\n(* Shift the formatter's offset by the length of the error prefix, which\n   is always added by the compiler after the message has been formatted *)\nlet print_phanton_error_prefix ppf =\n  (* modified from the original. We use only 2 indentations for error report\n     (see super_error_reporter above) *)\n  Format.pp_print_as ppf 2 \"\"\n\nlet errorf ?(loc = none) ?(sub = []) ?(if_highlight = \"\") fmt =\n  pp_ksprintf ~before:print_phanton_error_prefix\n    (fun msg -> {loc; msg; sub; if_highlight})\n    fmt\n\nlet error ?(loc = none) ?(sub = []) ?(if_highlight = \"\") msg =\n  {loc; msg; sub; if_highlight}\n\nlet error_of_exn : (exn -> error option) list ref = ref []\n\nlet register_error_of_exn f = error_of_exn := f :: !error_of_exn\n\nexception Already_displayed_error = Warnings.Errors\n\nlet error_of_exn exn =\n  match exn with\n  | Already_displayed_error -> Some `Already_displayed\n  | _ ->\n    let rec loop = function\n      | [] -> None\n      | f :: rest -> (\n        match f exn with\n        | Some error -> Some (`Ok error)\n        | None -> loop rest)\n    in\n    loop !error_of_exn\n\n(* taken from https://github.com/rescript-lang/ocaml/blob/d4144647d1bf9bc7dc3aadc24c25a7efa3a67915/parsing/location.ml#L380 *)\n(* This is the error report entry point. We'll replace the default reporter with this one. *)\nlet rec default_error_reporter ?(src = None) ppf {loc; msg; sub} =\n  setup_colors ();\n  (* open a vertical box. Everything in our message is indented 2 spaces *)\n  (* If src is given, it will display a syntax error after parsing. *)\n  let intro =\n    match src with\n    | Some _ -> \"Syntax error!\"\n    | None -> \"We've found a bug for you!\"\n  in\n  Format.fprintf ppf \"@[<v>@,  %a@,  %s@,@]\"\n    (print ~src ~message_kind:`error intro)\n    loc msg;\n  List.iter (Format.fprintf ppf \"@,@[%a@]\" (default_error_reporter ~src)) sub\n(* no need to flush here; location's report_exception (which uses this ultimately) flushes *)\n\nlet error_reporter = ref default_error_reporter\n\nlet report_error ?(src = None) ppf err = !error_reporter ~src ppf err\n\nlet error_of_printer loc print x = errorf ~loc \"%a@?\" print x\n\nlet error_of_printer_file print x =\n  error_of_printer (in_file !input_name) print x\n\nlet () =\n  register_error_of_exn (function\n    | Sys_error msg ->\n      Some (errorf ~loc:(in_file !input_name) \"I/O error: %s\" msg)\n    | Misc.HookExnWrapper {error = e; hook_name; hook_info = {Misc.sourcefile}}\n      ->\n      let sub =\n        match error_of_exn e with\n        | None | Some `Already_displayed -> error (Printexc.to_string e)\n        | Some (`Ok err) -> err\n      in\n      Some (errorf ~loc:(in_file sourcefile) \"In hook %S:\" hook_name ~sub:[sub])\n    | _ -> None)\n\nexternal reraise : exn -> 'a = \"%reraise\"\n\nlet rec report_exception_rec n ppf exn =\n  try\n    match error_of_exn exn with\n    | None -> reraise exn\n    | Some `Already_displayed -> ()\n    | Some (`Ok err) -> fprintf ppf \"@[%a@]@.\" (report_error ~src:None) err\n  with exn when n > 0 -> report_exception_rec (n - 1) ppf exn\n\nlet report_exception ppf exn = report_exception_rec 5 ppf exn\n\nexception Error of error\n\nlet () =\n  register_error_of_exn (function\n    | Error e -> Some e\n    | _ -> None)\n\nlet raise_errorf ?(loc = none) ?(sub = []) ?(if_highlight = \"\") =\n  pp_ksprintf ~before:print_phanton_error_prefix (fun msg ->\n      raise (Error {loc; msg; sub; if_highlight}))\n\nlet deprecated ?(def = none) ?(use = none) loc msg =\n  prerr_warning loc (Warnings.Deprecated (msg, def, use))\n"
  },
  {
    "path": "analysis/vendor/ml/location.mli",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(** Source code locations (ranges of positions), used in parsetree. *)\n\nopen Format\n\ntype t = Warnings.loc = {\n  loc_start: Lexing.position;\n  loc_end: Lexing.position;\n  loc_ghost: bool;\n}\n\n(** Note on the use of Lexing.position in this module. If [pos_fname = \"\"], then\n    use [!input_name] instead. If [pos_lnum = -1], then [pos_bol = 0]. Use\n    [pos_cnum] and re-parse the file to get the line and character numbers. Else\n    all fields are correct. *)\n\nval none : t\n(** An arbitrary value of type [t]; describes an empty ghost range. *)\n\nval in_file : string -> t\n(** Return an empty ghost range located in a given file. *)\n\nval init : Lexing.lexbuf -> string -> unit\n(** Set the file name and line number of the [lexbuf] to be the start of the\n    named file. *)\n\nval curr : Lexing.lexbuf -> t\n(** Get the location of the current token from the [lexbuf]. *)\n\nval symbol_rloc : unit -> t\nval symbol_gloc : unit -> t\n\nval rhs_loc : int -> t\n(** [rhs_loc n] returns the location of the symbol at position [n], starting at\n    1, in the current parser rule. *)\n\nval input_name : string ref\nval set_input_name : string -> unit\nval input_lexbuf : Lexing.lexbuf option ref\n\nval get_pos_info : Lexing.position -> string * int * int (* file, line, char *)\nval print_loc : formatter -> t -> unit\nval print_error : tag -> formatter -> t -> unit\n\nval prerr_warning : t -> Warnings.t -> unit\nval echo_eof : unit -> unit\nval reset : unit -> unit\n\nval warning_printer : (t -> formatter -> Warnings.t -> unit) ref\n(** Hook for intercepting warnings. *)\n\nval formatter_for_warnings : formatter ref\n\nval default_warning_printer : t -> formatter -> Warnings.t -> unit\n(** Original warning printer for use in hooks. *)\n\ntype 'a loc = {txt: 'a; loc: t}\n\nval mknoloc : 'a -> 'a loc\nval mkloc : 'a -> t -> 'a loc\n\nval print :\n  ?src:string option ->\n  message_kind:[< `error | `warning | `warning_as_error > `warning] ->\n  string ->\n  formatter ->\n  t ->\n  unit\nval print_compact : formatter -> t -> unit\nval print_filename : formatter -> string -> unit\n\nval absolute_path : string -> string\n\nval show_filename : string -> string\n(** In -absname mode, return the absolute path for this filename. Otherwise,\n    returns the filename unchanged. *)\n\nval absname : bool ref\n\n(** Support for located errors *)\n\ntype error = {\n  loc: t;\n  msg: string;\n  sub: error list;\n  if_highlight: string; (* alternative message if locations are highlighted *)\n}\n\nexception Already_displayed_error\nexception Error of error\n\nval error : ?loc:t -> ?sub:error list -> ?if_highlight:string -> string -> error\n\nval print_error_prefix : Format.formatter -> unit\nval pp_ksprintf :\n  ?before:(formatter -> unit) ->\n  (string -> 'a) ->\n  ('b, formatter, unit, 'a) format4 ->\n  'b\n\nval errorf :\n  ?loc:t ->\n  ?sub:error list ->\n  ?if_highlight:string ->\n  ('a, Format.formatter, unit, error) format4 ->\n  'a\n\nval raise_errorf :\n  ?loc:t ->\n  ?sub:error list ->\n  ?if_highlight:string ->\n  ('a, Format.formatter, unit, 'b) format4 ->\n  'a\n\nval error_of_printer : t -> (formatter -> 'a -> unit) -> 'a -> error\n\nval error_of_printer_file : (formatter -> 'a -> unit) -> 'a -> error\n\nval error_of_exn : exn -> [`Ok of error | `Already_displayed] option\n\nval register_error_of_exn : (exn -> error option) -> unit\n(** Each compiler module which defines a custom type of exception which can\n    surface as a user-visible error should register a \"printer\" for this\n    exception using [register_error_of_exn]. The result of the printer is an\n    [error] value containing a location, a message, and optionally sub-messages\n    (each of them being located as well). *)\n\nval report_error : ?src:string option -> formatter -> error -> unit\n\nval error_reporter : (?src:string option -> formatter -> error -> unit) ref\n(** Hook for intercepting error reports. *)\n\nval default_error_reporter : ?src:string option -> formatter -> error -> unit\n(** Original error reporter for use in hooks. *)\n\nval report_exception : formatter -> exn -> unit\n(** Reraise the exception if it is unknown. *)\n\nval deprecated : ?def:t -> ?use:t -> t -> string -> unit\n"
  },
  {
    "path": "analysis/vendor/ml/longident.ml",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\ntype t = Lident of string | Ldot of t * string | Lapply of t * t\nlet rec cmp : t -> t -> int =\n fun a b ->\n  if a == b then 0\n  else\n    match (a, b) with\n    | Lident a, Lident b -> compare a b\n    | Lident _, _ -> -1\n    | _, Lident _ -> 1\n    | Ldot (a, b), Ldot (c, d) -> (\n      match cmp a c with\n      | 0 -> compare b d\n      | n -> n)\n    | Ldot _, _ -> -1\n    | _, Ldot _ -> 1\n    | Lapply (a, b), Lapply (c, d) -> (\n      match cmp a c with\n      | 0 -> cmp b d\n      | n -> n)\n\nlet rec flat accu = function\n  | Lident s -> s :: accu\n  | Ldot (lid, s) -> flat (s :: accu) lid\n  | Lapply (_, _) -> Misc.fatal_error \"Longident.flat\"\n\nlet flatten lid = flat [] lid\n\nlet last = function\n  | Lident s -> s\n  | Ldot (_, s) -> s\n  | Lapply (_, _) -> Misc.fatal_error \"Longident.last\"\n\nlet rec split_at_dots s pos =\n  try\n    let dot = String.index_from s pos '.' in\n    String.sub s pos (dot - pos) :: split_at_dots s (dot + 1)\n  with Not_found -> [String.sub s pos (String.length s - pos)]\n\nlet unflatten l =\n  match l with\n  | [] -> None\n  | hd :: tl -> Some (List.fold_left (fun p s -> Ldot (p, s)) (Lident hd) tl)\n\nlet parse s =\n  match unflatten (split_at_dots s 0) with\n  | None ->\n    Lident \"\"\n    (* should not happen, but don't put assert false\n       so as not to crash the toplevel (see Genprintval) *)\n  | Some v -> v\n"
  },
  {
    "path": "analysis/vendor/ml/longident.mli",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(** Long identifiers, used in parsetree. *)\n\ntype t = Lident of string | Ldot of t * string | Lapply of t * t\n\nval cmp : t -> t -> int\nval flatten : t -> string list\nval unflatten : string list -> t option\nval last : t -> string\nval parse : string -> t\n"
  },
  {
    "path": "analysis/vendor/ml/matching.ml",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(* Compilation of pattern matching *)\n\nopen Misc\nopen Asttypes\nopen Types\nopen Typedtree\nopen Lambda\nopen Parmatch\nopen Printf\n\nlet dbg = false\n\n(* See Peyton-Jones, ``The Implementation of functional programming\n   languages'', chapter 5. *)\n(*\n  Well, it was true at the beginning of the world.\n  Now, see Lefessant-Maranget ``Optimizing Pattern-Matching'' ICFP'2001\n*)\n\n(*\n   Compatibility predicate that considers potential rebindings of constructors\n   of an extension type.\n\n   \"may_compat p q\" returns false when p and q never admit a common instance;\n   returns true when they may have a common instance.\n*)\n\nmodule MayCompat = Parmatch.Compat (struct\n  let equal = Types.may_equal_constr\nend)\nlet may_compat = MayCompat.compat\n\nand may_compats = MayCompat.compats\n\n(*\n   Many functions on the various data structures of the algorithm :\n     - Pattern matrices.\n     - Default environments: mapping from matrices to exit numbers.\n     - Contexts:  matrices whose column are partitioned into\n       left and right.\n     - Jump summaries: mapping from exit numbers to contexts\n*)\n\nlet string_of_lam lam =\n  Printlambda.lambda Format.str_formatter lam;\n  Format.flush_str_formatter ()\n\ntype matrix = pattern list list\n\nlet add_omega_column pss = List.map (fun ps -> omega :: ps) pss\n\ntype ctx = {left: pattern list; right: pattern list}\n\nlet pretty_ctx ctx =\n  List.iter\n    (fun {left; right} ->\n      prerr_string \"LEFT:\";\n      pretty_line left;\n      prerr_string \" RIGHT:\";\n      pretty_line right;\n      prerr_endline \"\")\n    ctx\n\nlet le_ctx c1 c2 = le_pats c1.left c2.left && le_pats c1.right c2.right\n\nlet lshift {left; right} =\n  match right with\n  | x :: xs -> {left = x :: left; right = xs}\n  | _ -> assert false\n\nlet lforget {left; right} =\n  match right with\n  | _ :: xs -> {left = omega :: left; right = xs}\n  | _ -> assert false\n\nlet rec small_enough n = function\n  | [] -> true\n  | _ :: rem -> if n <= 0 then false else small_enough (n - 1) rem\n\nlet ctx_lshift ctx =\n  if small_enough 31 ctx then List.map lshift ctx\n  else (* Context pruning *) get_mins le_ctx (List.map lforget ctx)\n\nlet rshift {left; right} =\n  match left with\n  | p :: ps -> {left = ps; right = p :: right}\n  | _ -> assert false\n\nlet ctx_rshift ctx = List.map rshift ctx\n\nlet rec nchars n ps =\n  if n <= 0 then ([], ps)\n  else\n    match ps with\n    | p :: rem ->\n      let chars, cdrs = nchars (n - 1) rem in\n      (p :: chars, cdrs)\n    | _ -> assert false\n\nlet rshift_num n {left; right} =\n  let shifted, left = nchars n left in\n  {left; right = shifted @ right}\n\nlet ctx_rshift_num n ctx = List.map (rshift_num n) ctx\n\n(* Recombination of contexts (eg: (_,_)::p1::p2::rem ->  (p1,p2)::rem)\n   All mutable fields are replaced by '_', since side-effects in\n   guards can alter these fields *)\n\nlet combine {left; right} =\n  match left with\n  | p :: ps -> {left = ps; right = set_args_erase_mutable p right}\n  | _ -> assert false\n\nlet ctx_combine ctx = List.map combine ctx\n\nlet ncols = function\n  | [] -> 0\n  | ps :: _ -> List.length ps\n\nexception NoMatch\nexception OrPat\n\nlet filter_matrix matcher pss =\n  let rec filter_rec = function\n    | (p :: ps) :: rem -> (\n      match p.pat_desc with\n      | Tpat_alias (p, _, _) -> filter_rec ((p :: ps) :: rem)\n      | Tpat_var _ -> filter_rec ((omega :: ps) :: rem)\n      | _ -> (\n        let rem = filter_rec rem in\n        try matcher p ps :: rem with\n        | NoMatch -> rem\n        | OrPat -> (\n          match p.pat_desc with\n          | Tpat_or (p1, p2, _) -> filter_rec [p1 :: ps; p2 :: ps] @ rem\n          | _ -> assert false)))\n    | [] -> []\n    | _ ->\n      pretty_matrix pss;\n      fatal_error \"Matching.filter_matrix\"\n  in\n  filter_rec pss\n\nlet make_default matcher env =\n  let rec make_rec = function\n    | [] -> []\n    | ([[]], i) :: _ -> [([[]], i)]\n    | (pss, i) :: rem -> (\n      let rem = make_rec rem in\n      match filter_matrix matcher pss with\n      | [] -> rem\n      | [] :: _ -> ([[]], i) :: rem\n      | pss -> (pss, i) :: rem)\n  in\n  make_rec env\n\nlet ctx_matcher p =\n  let p = normalize_pat p in\n  match p.pat_desc with\n  | Tpat_construct (_, cstr, omegas) -> (\n    fun q rem ->\n      match q.pat_desc with\n      | Tpat_construct (_, cstr', args)\n      (* NB:  may_constr_equal considers (potential) constructor rebinding *)\n        when Types.may_equal_constr cstr cstr' ->\n        (p, args @ rem)\n      | Tpat_any -> (p, omegas @ rem)\n      | _ -> raise NoMatch)\n  | Tpat_constant cst -> (\n    fun q rem ->\n      match q.pat_desc with\n      | Tpat_constant cst' when const_compare cst cst' = 0 -> (p, rem)\n      | Tpat_any -> (p, rem)\n      | _ -> raise NoMatch)\n  | Tpat_variant (lab, Some omega, _) -> (\n    fun q rem ->\n      match q.pat_desc with\n      | Tpat_variant (lab', Some arg, _) when lab = lab' -> (p, arg :: rem)\n      | Tpat_any -> (p, omega :: rem)\n      | _ -> raise NoMatch)\n  | Tpat_variant (lab, None, _) -> (\n    fun q rem ->\n      match q.pat_desc with\n      | Tpat_variant (lab', None, _) when lab = lab' -> (p, rem)\n      | Tpat_any -> (p, rem)\n      | _ -> raise NoMatch)\n  | Tpat_array omegas -> (\n    let len = List.length omegas in\n    fun q rem ->\n      match q.pat_desc with\n      | Tpat_array args when List.length args = len -> (p, args @ rem)\n      | Tpat_any -> (p, omegas @ rem)\n      | _ -> raise NoMatch)\n  | Tpat_tuple omegas -> (\n    let len = List.length omegas in\n    fun q rem ->\n      match q.pat_desc with\n      | Tpat_tuple args when List.length args = len -> (p, args @ rem)\n      | Tpat_any -> (p, omegas @ rem)\n      | _ -> raise NoMatch)\n  | Tpat_record (((_, lbl, _) :: _ as l), _) -> (\n    (* Records are normalized *)\n    let len = Array.length lbl.lbl_all in\n    fun q rem ->\n      match q.pat_desc with\n      | Tpat_record (((_, lbl', _) :: _ as l'), _)\n        when Array.length lbl'.lbl_all = len ->\n        let l' = all_record_args l' in\n        (p, List.fold_right (fun (_, _, p) r -> p :: r) l' rem)\n      | Tpat_any -> (p, List.fold_right (fun (_, _, p) r -> p :: r) l rem)\n      | _ -> raise NoMatch)\n  | Tpat_lazy omega -> (\n    fun q rem ->\n      match q.pat_desc with\n      | Tpat_lazy arg -> (p, arg :: rem)\n      | Tpat_any -> (p, omega :: rem)\n      | _ -> raise NoMatch)\n  | _ -> fatal_error \"Matching.ctx_matcher\"\n\nlet filter_ctx q ctx =\n  let matcher = ctx_matcher q in\n\n  let rec filter_rec = function\n    | ({right = p :: ps} as l) :: rem -> (\n      match p.pat_desc with\n      | Tpat_or (p1, p2, _) ->\n        filter_rec\n          ({l with right = p1 :: ps} :: {l with right = p2 :: ps} :: rem)\n      | Tpat_alias (p, _, _) -> filter_rec ({l with right = p :: ps} :: rem)\n      | Tpat_var _ -> filter_rec ({l with right = omega :: ps} :: rem)\n      | _ -> (\n        let rem = filter_rec rem in\n        try\n          let to_left, right = matcher p ps in\n          {left = to_left :: l.left; right} :: rem\n        with NoMatch -> rem))\n    | [] -> []\n    | _ -> fatal_error \"Matching.filter_ctx\"\n  in\n\n  filter_rec ctx\n\nlet select_columns pss ctx =\n  let n = ncols pss in\n  List.fold_right\n    (fun ps r ->\n      List.fold_right\n        (fun {left; right} r ->\n          let transfert, right = nchars n right in\n          try {left = lubs transfert ps @ left; right} :: r with Empty -> r)\n        ctx r)\n    pss []\n\nlet ctx_lub p ctx =\n  List.fold_right\n    (fun {left; right} r ->\n      match right with\n      | q :: rem -> ( try {left; right = lub p q :: rem} :: r with Empty -> r)\n      | _ -> fatal_error \"Matching.ctx_lub\")\n    ctx []\n\nlet ctx_match ctx pss =\n  List.exists\n    (fun {right = qs} -> List.exists (fun ps -> may_compats qs ps) pss)\n    ctx\n\ntype jumps = (int * ctx list) list\n\nlet pretty_jumps (env : jumps) =\n  match env with\n  | [] -> ()\n  | _ ->\n    List.iter\n      (fun (i, ctx) ->\n        Printf.fprintf stderr \"jump for %d\\n\" i;\n        pretty_ctx ctx)\n      env\n\nlet rec jumps_extract (i : int) = function\n  | [] -> ([], [])\n  | ((j, pss) as x) :: rem as all ->\n    if i = j then (pss, rem)\n    else if j < i then ([], all)\n    else\n      let r, rem = jumps_extract i rem in\n      (r, x :: rem)\n\nlet rec jumps_remove (i : int) = function\n  | [] -> []\n  | (j, _) :: rem when i = j -> rem\n  | x :: rem -> x :: jumps_remove i rem\n\nlet jumps_empty = []\n\nand jumps_is_empty = function\n  | [] -> true\n  | _ -> false\n\nlet jumps_singleton i = function\n  | [] -> []\n  | ctx -> [(i, ctx)]\n\nlet jumps_add i pss jumps =\n  match pss with\n  | [] -> jumps\n  | _ ->\n    let rec add = function\n      | [] -> [(i, pss)]\n      | ((j, qss) as x) :: rem as all ->\n        if (j : int) > i then x :: add rem\n        else if j < i then (i, pss) :: all\n        else (i, get_mins le_ctx (pss @ qss)) :: rem\n    in\n    add jumps\n\nlet rec jumps_union (env1 : (int * ctx list) list) env2 =\n  match (env1, env2) with\n  | [], _ -> env2\n  | _, [] -> env1\n  | ((i1, pss1) as x1) :: rem1, ((i2, pss2) as x2) :: rem2 ->\n    if i1 = i2 then (i1, get_mins le_ctx (pss1 @ pss2)) :: jumps_union rem1 rem2\n    else if i1 > i2 then x1 :: jumps_union rem1 env2\n    else x2 :: jumps_union env1 rem2\n\nlet rec merge = function\n  | env1 :: env2 :: rem -> jumps_union env1 env2 :: merge rem\n  | envs -> envs\n\nlet rec jumps_unions envs =\n  match envs with\n  | [] -> []\n  | [env] -> env\n  | _ -> jumps_unions (merge envs)\n\nlet jumps_map f env = List.map (fun (i, pss) -> (i, f pss)) env\n\n(* Pattern matching before any compilation *)\n\ntype pattern_matching = {\n  mutable cases: (pattern list * lambda) list;\n  args: (lambda * let_kind) list;\n  default: (matrix * int) list;\n}\n\n(* Pattern matching after application of both the or-pat rule and the\n   mixture rule *)\n\ntype pm_or_compiled = {\n  body: pattern_matching;\n  handlers: (matrix * int * Ident.t list * pattern_matching) list;\n  or_matrix: matrix;\n}\n\ntype pm_half_compiled =\n  | PmOr of pm_or_compiled\n  | PmVar of pm_var_compiled\n  | Pm of pattern_matching\n\nand pm_var_compiled = {inside: pm_half_compiled; var_arg: lambda}\n\ntype pm_half_compiled_info = {\n  me: pm_half_compiled;\n  matrix: matrix;\n  top_default: (matrix * int) list;\n}\n\nlet pretty_cases cases =\n  List.iter\n    (fun (ps, _l) ->\n      List.iter\n        (fun p ->\n          Parmatch.top_pretty Format.str_formatter p;\n          prerr_string \" \";\n          prerr_string (Format.flush_str_formatter ()))\n        ps;\n      (*\n      prerr_string \" -> \" ;\n      Printlambda.lambda Format.str_formatter l ;\n      prerr_string (Format.flush_str_formatter ()) ;\n*)\n      prerr_endline \"\")\n    cases\n\nlet pretty_def def =\n  prerr_endline \"+++++ Defaults +++++\";\n  List.iter\n    (fun (pss, i) ->\n      Printf.fprintf stderr \"Matrix for %d\\n\" i;\n      pretty_matrix pss)\n    def;\n  prerr_endline \"+++++++++++++++++++++\"\n\nlet pretty_pm pm =\n  pretty_cases pm.cases;\n  if pm.default <> [] then pretty_def pm.default\n\nlet rec pretty_precompiled = function\n  | Pm pm ->\n    prerr_endline \"++++ PM ++++\";\n    pretty_pm pm\n  | PmVar x ->\n    prerr_endline \"++++ VAR ++++\";\n    pretty_precompiled x.inside\n  | PmOr x ->\n    prerr_endline \"++++ OR ++++\";\n    pretty_pm x.body;\n    pretty_matrix x.or_matrix;\n    List.iter\n      (fun (_, i, _, pm) ->\n        eprintf \"++ Handler %d ++\\n\" i;\n        pretty_pm pm)\n      x.handlers\n\nlet pretty_precompiled_res first nexts =\n  pretty_precompiled first;\n  List.iter\n    (fun (e, pmh) ->\n      eprintf \"** DEFAULT %d **\\n\" e;\n      pretty_precompiled pmh)\n    nexts\n\n(* Identifying some semantically equivalent lambda-expressions,\n   Our goal here is also to\n   find alpha-equivalent (simple) terms *)\n\n(* However, as shown by PR#6359 such sharing may hinders the\n   lambda-code invariant that all bound idents are unique,\n   when switches are compiled to test sequences.\n   The definitive fix is the systematic introduction of exit/catch\n   in case action sharing is present.\n*)\n\nmodule StoreExp = Switch.Store (struct\n  type t = lambda\n  type key = lambda\n  let compare_key = compare\n  let make_key = Lambda.make_key\nend)\n\nlet make_exit i = Lstaticraise (i, [])\n\n(* Introduce a catch, if worth it *)\nlet make_catch d k =\n  match d with\n  | Lstaticraise (_, []) -> k d\n  | _ ->\n    let e = next_raise_count () in\n    Lstaticcatch (k (make_exit e), (e, []), d)\n\n(* Introduce a catch, if worth it, delayed version *)\nlet rec as_simple_exit = function\n  | Lstaticraise (i, []) -> Some i\n  | Llet (Alias, _k, _, _, e) -> as_simple_exit e\n  | _ -> None\n\nlet make_catch_delayed handler =\n  match as_simple_exit handler with\n  | Some i -> (i, fun act -> act)\n  | None -> (\n    let i = next_raise_count () in\n    (*\n    Printf.eprintf \"SHARE LAMBDA: %i\\n%s\\n\" i (string_of_lam handler);\n*)\n    ( i,\n      fun body ->\n        match body with\n        | Lstaticraise (j, _) -> if i = j then handler else body\n        | _ -> Lstaticcatch (body, (i, []), handler) ))\n\nlet raw_action l =\n  match make_key l with\n  | Some l -> l\n  | None -> l\n\nlet tr_raw act =\n  match make_key act with\n  | Some act -> act\n  | None -> raise Exit\n\nlet same_actions = function\n  | [] -> None\n  | [(_, act)] -> Some act\n  | (_, act0) :: rem -> (\n    try\n      let raw_act0 = tr_raw act0 in\n      let rec s_rec = function\n        | [] -> Some act0\n        | (_, act) :: rem -> if raw_act0 = tr_raw act then s_rec rem else None\n      in\n      s_rec rem\n    with Exit -> None)\n\n(* Test for swapping two clauses *)\n\nlet up_ok_action act1 act2 =\n  try\n    let raw1 = tr_raw act1 and raw2 = tr_raw act2 in\n    raw1 = raw2\n  with Exit -> false\n\nlet up_ok (ps, act_p) l =\n  List.for_all\n    (fun (qs, act_q) -> up_ok_action act_p act_q || not (may_compats ps qs))\n    l\n\n(*\n   The simplify function normalizes the first column of the match\n     - records are expanded so that they possess all fields\n     - aliases are removed and replaced by bindings in actions.\n   However or-patterns are simplified differently,\n     - aliases are not removed\n     - or-patterns (_|p) are changed into _\n*)\n\nexception Var of pattern\n\nlet simplify_or p =\n  let rec simpl_rec p =\n    match p with\n    | {pat_desc = Tpat_any | Tpat_var _} -> raise (Var p)\n    | {pat_desc = Tpat_alias (q, id, s)} -> (\n      try {p with pat_desc = Tpat_alias (simpl_rec q, id, s)}\n      with Var q -> raise (Var {p with pat_desc = Tpat_alias (q, id, s)}))\n    | {pat_desc = Tpat_or (p1, p2, o)} -> (\n      let q1 = simpl_rec p1 in\n      try\n        let q2 = simpl_rec p2 in\n        {p with pat_desc = Tpat_or (q1, q2, o)}\n      with Var q2 -> raise (Var {p with pat_desc = Tpat_or (q1, q2, o)}))\n    | {pat_desc = Tpat_record (lbls, closed)} ->\n      let all_lbls = all_record_args lbls in\n      {p with pat_desc = Tpat_record (all_lbls, closed)}\n    | _ -> p\n  in\n  try simpl_rec p with Var p -> p\n\nlet simplify_cases args cls =\n  match args with\n  | [] -> assert false\n  | (arg, _) :: _ ->\n    let rec simplify = function\n      | [] -> []\n      | ((pat :: patl, action) as cl) :: rem -> (\n        match pat.pat_desc with\n        | Tpat_var (id, _) ->\n          (omega :: patl, bind Alias id arg action) :: simplify rem\n        | Tpat_any -> cl :: simplify rem\n        | Tpat_alias (p, id, _) ->\n          simplify ((p :: patl, bind Alias id arg action) :: rem)\n        | Tpat_record ([], _) -> (omega :: patl, action) :: simplify rem\n        | Tpat_record (lbls, closed) ->\n          let all_lbls = all_record_args lbls in\n          let full_pat = {pat with pat_desc = Tpat_record (all_lbls, closed)} in\n          (full_pat :: patl, action) :: simplify rem\n        | Tpat_or _ -> (\n          let pat_simple = simplify_or pat in\n          match pat_simple.pat_desc with\n          | Tpat_or _ -> (pat_simple :: patl, action) :: simplify rem\n          | _ -> simplify ((pat_simple :: patl, action) :: rem))\n        | _ -> cl :: simplify rem)\n      | _ -> assert false\n    in\n\n    simplify cls\n\n(* Once matchings are simplified one can easily find\n   their nature *)\n\nlet rec what_is_cases cases =\n  match cases with\n  | ({pat_desc = Tpat_any} :: _, _) :: rem -> what_is_cases rem\n  | ({pat_desc = Tpat_var _ | Tpat_or (_, _, _) | Tpat_alias (_, _, _)} :: _, _)\n    :: _ ->\n    assert false (* applies to simplified matchings only *)\n  | (p :: _, _) :: _ -> p\n  | [] -> omega\n  | _ -> assert false\n\n(* A few operations on default environments *)\nlet as_matrix cases = get_mins le_pats (List.map (fun (ps, _) -> ps) cases)\n\nlet cons_default matrix raise_num default =\n  match matrix with\n  | [] -> default\n  | _ -> (matrix, raise_num) :: default\n\nlet default_compat p def =\n  List.fold_right\n    (fun (pss, i) r ->\n      let qss =\n        List.fold_right\n          (fun qs r ->\n            match qs with\n            | q :: rem when may_compat p q -> rem :: r\n            | _ -> r)\n          pss []\n      in\n      match qss with\n      | [] -> r\n      | _ -> (qss, i) :: r)\n    def []\n\n(* Or-pattern expansion, variables are a complication w.r.t. the article *)\nlet rec extract_vars r p =\n  match p.pat_desc with\n  | Tpat_var (id, _) -> IdentSet.add id r\n  | Tpat_alias (p, id, _) -> extract_vars (IdentSet.add id r) p\n  | Tpat_tuple pats -> List.fold_left extract_vars r pats\n  | Tpat_record (lpats, _) ->\n    List.fold_left (fun r (_, _, p) -> extract_vars r p) r lpats\n  | Tpat_construct (_, _, pats) -> List.fold_left extract_vars r pats\n  | Tpat_array pats -> List.fold_left extract_vars r pats\n  | Tpat_variant (_, Some p, _) -> extract_vars r p\n  | Tpat_lazy p -> extract_vars r p\n  | Tpat_or (p, _, _) -> extract_vars r p\n  | Tpat_constant _ | Tpat_any | Tpat_variant (_, None, _) -> r\n\nexception Cannot_flatten\n\nlet mk_alpha_env arg aliases ids =\n  List.map\n    (fun id ->\n      ( id,\n        if List.mem id aliases then\n          match arg with\n          | Some v -> v\n          | _ -> raise Cannot_flatten\n        else Ident.create (Ident.name id) ))\n    ids\n\nlet rec explode_or_pat arg patl mk_action rem vars aliases = function\n  | {pat_desc = Tpat_or (p1, p2, _)} ->\n    explode_or_pat arg patl mk_action\n      (explode_or_pat arg patl mk_action rem vars aliases p2)\n      vars aliases p1\n  | {pat_desc = Tpat_alias (p, id, _)} ->\n    explode_or_pat arg patl mk_action rem vars (id :: aliases) p\n  | {pat_desc = Tpat_var (x, _)} ->\n    let env = mk_alpha_env arg (x :: aliases) vars in\n    (omega :: patl, mk_action (List.map snd env)) :: rem\n  | p ->\n    let env = mk_alpha_env arg aliases vars in\n    (alpha_pat env p :: patl, mk_action (List.map snd env)) :: rem\n\nlet pm_free_variables {cases} =\n  List.fold_right\n    (fun (_, act) r -> IdentSet.union (free_variables act) r)\n    cases IdentSet.empty\n\n(* Basic grouping predicates *)\nlet pat_as_constr = function\n  | {pat_desc = Tpat_construct (_, cstr, _)} -> cstr\n  | _ -> fatal_error \"Matching.pat_as_constr\"\n\nlet group_constant = function\n  | {pat_desc = Tpat_constant _} -> true\n  | _ -> false\n\nand group_constructor = function\n  | {pat_desc = Tpat_construct (_, _, _)} -> true\n  | _ -> false\n\nand group_variant = function\n  | {pat_desc = Tpat_variant (_, _, _)} -> true\n  | _ -> false\n\nand group_var = function\n  | {pat_desc = Tpat_any} -> true\n  | _ -> false\n\nand group_tuple = function\n  | {pat_desc = Tpat_tuple _ | Tpat_any} -> true\n  | _ -> false\n\nand group_record = function\n  | {pat_desc = Tpat_record _ | Tpat_any} -> true\n  | _ -> false\n\nand group_array = function\n  | {pat_desc = Tpat_array _} -> true\n  | _ -> false\n\nand group_lazy = function\n  | {pat_desc = Tpat_lazy _} -> true\n  | _ -> false\n\nlet get_group p =\n  match p.pat_desc with\n  | Tpat_any -> group_var\n  | Tpat_constant _ -> group_constant\n  | Tpat_construct _ -> group_constructor\n  | Tpat_tuple _ -> group_tuple\n  | Tpat_record _ -> group_record\n  | Tpat_array _ -> group_array\n  | Tpat_variant (_, _, _) -> group_variant\n  | Tpat_lazy _ -> group_lazy\n  | _ -> fatal_error \"Matching.get_group\"\n\nlet is_or p =\n  match p.pat_desc with\n  | Tpat_or _ -> true\n  | _ -> false\n\n(* Conditions for appending to the Or matrix *)\nlet conda p q = not (may_compat p q)\n\nand condb act ps qs = (not (is_guarded act)) && Parmatch.le_pats qs ps\n\nlet or_ok p ps l =\n  List.for_all\n    (function\n      | ({pat_desc = Tpat_or _} as q) :: qs, act -> conda p q || condb act ps qs\n      | _ -> true)\n    l\n\n(* Insert or append a pattern in the Or matrix *)\n\nlet equiv_pat p q = le_pat p q && le_pat q p\n\nlet rec get_equiv p l =\n  match l with\n  | ((q :: _, _) as cl) :: rem ->\n    if equiv_pat p q then\n      let others, rem = get_equiv p rem in\n      (cl :: others, rem)\n    else ([], l)\n  | _ -> ([], l)\n\nlet insert_or_append p ps act ors no =\n  let rec attempt seen = function\n    | ((q :: qs, act_q) as cl) :: rem ->\n      if is_or q then\n        if may_compat p q then\n          if\n            IdentSet.is_empty (extract_vars IdentSet.empty p)\n            && IdentSet.is_empty (extract_vars IdentSet.empty q)\n            && equiv_pat p q\n          then\n            (* attempt insert, for equivalent orpats with no variables *)\n            let _, not_e = get_equiv q rem in\n            if\n              or_ok p ps not_e\n              &&\n              (* check append condition for head of O *)\n              List.for_all (* check insert condition for tail of O *)\n                (fun cl ->\n                  match cl with\n                  | q :: _, _ -> not (may_compat p q)\n                  | _ -> assert false)\n                seen\n            then\n              (* insert *)\n              (List.rev_append seen ((p :: ps, act) :: cl :: rem), no)\n            else (* fail to insert or append *)\n              (ors, (p :: ps, act) :: no)\n          else if condb act_q ps qs then\n            (* check condition (b) for append *)\n            attempt (cl :: seen) rem\n          else (ors, (p :: ps, act) :: no)\n        else (* p # q, go on with append/insert *)\n          attempt (cl :: seen) rem\n      else\n        (* q is not an or-pat, go on with append/insert *)\n        attempt (cl :: seen) rem\n    | _ ->\n      (* [] in fact *)\n      ((p :: ps, act) :: ors, no)\n  in\n  (* success in appending *)\n  attempt [] ors\n\n(* Reconstruct default information from half_compiled  pm list *)\n\nlet rec rebuild_matrix pmh =\n  match pmh with\n  | Pm pm -> as_matrix pm.cases\n  | PmOr {or_matrix = m} -> m\n  | PmVar x -> add_omega_column (rebuild_matrix x.inside)\n\nlet rec rebuild_default nexts def =\n  match nexts with\n  | [] -> def\n  | (e, pmh) :: rem ->\n    (add_omega_column (rebuild_matrix pmh), e) :: rebuild_default rem def\n\nlet rebuild_nexts arg nexts k =\n  List.fold_right\n    (fun (e, pm) k -> (e, PmVar {inside = pm; var_arg = arg}) :: k)\n    nexts k\n\n(*\n  Split a matching.\n    Splitting is first directed by or-patterns, then by\n    tests (e.g. constructors)/variable transitions.\n\n    The approach is greedy, every split function attempts to\n    raise rows as much as possible in the top matrix,\n    then splitting applies again to the remaining rows.\n\n    Some precompilation of or-patterns and\n    variable pattern occurs. Mostly this means that bindings\n    are performed now,  being replaced by let-bindings\n    in actions (cf. simplify_cases).\n\n    Additionally, if the match argument is a variable, matchings whose\n    first column is made of variables only are splitted further\n    (cf. precompile_var).\n\n*)\n\nlet rec split_or argo cls args def =\n  let cls = simplify_cases args cls in\n\n  let rec do_split before ors no = function\n    | [] -> cons_next (List.rev before) (List.rev ors) (List.rev no)\n    | ((p :: ps, act) as cl) :: rem ->\n      if up_ok cl no then\n        if is_or p then\n          let ors, no = insert_or_append p ps act ors no in\n          do_split before ors no rem\n        else if up_ok cl ors then do_split (cl :: before) ors no rem\n        else if or_ok p ps ors then do_split before (cl :: ors) no rem\n        else do_split before ors (cl :: no) rem\n      else do_split before ors (cl :: no) rem\n    | _ -> assert false\n  and cons_next yes yesor = function\n    | [] -> precompile_or argo yes yesor args def []\n    | rem ->\n      let {me = next; matrix; top_default = def}, nexts =\n        do_split [] [] [] rem\n      in\n      let idef = next_raise_count () in\n      precompile_or argo yes yesor args\n        (cons_default matrix idef def)\n        ((idef, next) :: nexts)\n  in\n\n  do_split [] [] [] cls\n\n(* Ultra-naive splitting, close to semantics, used for extension,\n   as potential rebind prevents any kind of optimisation *)\n\nand split_naive cls args def k =\n  let rec split_exc cstr0 yes = function\n    | [] ->\n      let yes = List.rev yes in\n      ( {\n          me = Pm {cases = yes; args; default = def};\n          matrix = as_matrix yes;\n          top_default = def;\n        },\n        k )\n    | ((p :: _, _) as cl) :: rem ->\n      if group_constructor p then\n        let cstr = pat_as_constr p in\n        if cstr = cstr0 then split_exc cstr0 (cl :: yes) rem\n        else\n          let yes = List.rev yes in\n          let {me = next; matrix; top_default = def}, nexts =\n            split_exc cstr [cl] rem\n          in\n          let idef = next_raise_count () in\n          let def = cons_default matrix idef def in\n          ( {\n              me = Pm {cases = yes; args; default = def};\n              matrix = as_matrix yes;\n              top_default = def;\n            },\n            (idef, next) :: nexts )\n      else\n        let yes = List.rev yes in\n        let {me = next; matrix; top_default = def}, nexts =\n          split_noexc [cl] rem\n        in\n        let idef = next_raise_count () in\n        let def = cons_default matrix idef def in\n        ( {\n            me = Pm {cases = yes; args; default = def};\n            matrix = as_matrix yes;\n            top_default = def;\n          },\n          (idef, next) :: nexts )\n    | _ -> assert false\n  and split_noexc yes = function\n    | [] -> precompile_var args (List.rev yes) def k\n    | ((p :: _, _) as cl) :: rem ->\n      if group_constructor p then\n        let yes = List.rev yes in\n        let {me = next; matrix; top_default = def}, nexts =\n          split_exc (pat_as_constr p) [cl] rem\n        in\n        let idef = next_raise_count () in\n        precompile_var args yes\n          (cons_default matrix idef def)\n          ((idef, next) :: nexts)\n      else split_noexc (cl :: yes) rem\n    | _ -> assert false\n  in\n\n  match cls with\n  | [] -> assert false\n  | ((p :: _, _) as cl) :: rem ->\n    if group_constructor p then split_exc (pat_as_constr p) [cl] rem\n    else split_noexc [cl] rem\n  | _ -> assert false\n\nand split_constr cls args def k =\n  let ex_pat = what_is_cases cls in\n  match ex_pat.pat_desc with\n  | Tpat_any -> precompile_var args cls def k\n  | Tpat_construct (_, {cstr_tag = Cstr_extension _}, _) ->\n    split_naive cls args def k\n  | _ -> (\n    let group = get_group ex_pat in\n\n    let rec split_ex yes no = function\n      | [] -> (\n        let yes = List.rev yes and no = List.rev no in\n        match no with\n        | [] ->\n          ( {\n              me = Pm {cases = yes; args; default = def};\n              matrix = as_matrix yes;\n              top_default = def;\n            },\n            k )\n        | cl :: rem -> (\n          match yes with\n          | [] ->\n            (* Could not success in raising up a constr matching up *)\n            split_noex [cl] [] rem\n          | _ ->\n            let {me = next; matrix; top_default = def}, nexts =\n              split_noex [cl] [] rem\n            in\n            let idef = next_raise_count () in\n            let def = cons_default matrix idef def in\n            ( {\n                me = Pm {cases = yes; args; default = def};\n                matrix = as_matrix yes;\n                top_default = def;\n              },\n              (idef, next) :: nexts )))\n      | ((p :: _, _) as cl) :: rem ->\n        if group p && up_ok cl no then split_ex (cl :: yes) no rem\n        else split_ex yes (cl :: no) rem\n      | _ -> assert false\n    and split_noex yes no = function\n      | [] -> (\n        let yes = List.rev yes and no = List.rev no in\n        match no with\n        | [] -> precompile_var args yes def k\n        | cl :: rem ->\n          let {me = next; matrix; top_default = def}, nexts =\n            split_ex [cl] [] rem\n          in\n          let idef = next_raise_count () in\n          precompile_var args yes\n            (cons_default matrix idef def)\n            ((idef, next) :: nexts))\n      | [((ps, _) as cl)] when List.for_all group_var ps && yes <> [] ->\n        (* This enables an extra division in some frequent cases :\n           last row is made of variables only *)\n        split_noex yes (cl :: no) []\n      | ((p :: _, _) as cl) :: rem ->\n        if (not (group p)) && up_ok cl no then split_noex (cl :: yes) no rem\n        else split_noex yes (cl :: no) rem\n      | _ -> assert false\n    in\n\n    match cls with\n    | ((p :: _, _) as cl) :: rem ->\n      if group p then split_ex [cl] [] rem else split_noex [cl] [] rem\n    | _ -> assert false)\n\nand precompile_var args cls def k =\n  match args with\n  | [] -> assert false\n  | _ :: (((Lvar v as av), _) as arg) :: rargs -> (\n    match cls with\n    | [_] ->\n      (* as splitted as it can *)\n      dont_precompile_var args cls def k\n    | _ -> (\n      (* Precompile *)\n      let var_cls =\n        List.map\n          (fun (ps, act) ->\n            match ps with\n            | _ :: ps -> (ps, act)\n            | _ -> assert false)\n          cls\n      and var_def = make_default (fun _ rem -> rem) def in\n      let {me = first; matrix}, nexts =\n        split_or (Some v) var_cls (arg :: rargs) var_def\n      in\n\n      (* Compute top information *)\n      match nexts with\n      | [] ->\n        (* If you need *)\n        dont_precompile_var args cls def k\n      | _ ->\n        let rfirst =\n          {\n            me = PmVar {inside = first; var_arg = av};\n            matrix = add_omega_column matrix;\n            top_default = rebuild_default nexts def;\n          }\n        and rnexts = rebuild_nexts av nexts k in\n        (rfirst, rnexts)))\n  | _ -> dont_precompile_var args cls def k\n\nand dont_precompile_var args cls def k =\n  ( {\n      me = Pm {cases = cls; args; default = def};\n      matrix = as_matrix cls;\n      top_default = def;\n    },\n    k )\n\nand precompile_or argo cls ors args def k =\n  match ors with\n  | [] -> split_constr cls args def k\n  | _ ->\n    let rec do_cases = function\n      | (({pat_desc = Tpat_or _} as orp) :: patl, action) :: rem ->\n        let others, rem = get_equiv orp rem in\n        let orpm =\n          {\n            cases =\n              (patl, action)\n              :: List.map\n                   (function\n                     | _ :: ps, action -> (ps, action)\n                     | _ -> assert false)\n                   others;\n            args =\n              (match args with\n              | _ :: r -> r\n              | _ -> assert false);\n            default = default_compat orp def;\n          }\n        in\n        let vars =\n          IdentSet.elements\n            (IdentSet.inter\n               (extract_vars IdentSet.empty orp)\n               (pm_free_variables orpm))\n        in\n        let or_num = next_raise_count () in\n        let new_patl = Parmatch.omega_list patl in\n\n        let mk_new_action vs =\n          Lstaticraise (or_num, List.map (fun v -> Lvar v) vs)\n        in\n\n        let body, handlers = do_cases rem in\n        ( explode_or_pat argo new_patl mk_new_action body vars [] orp,\n          let mat = [[orp]] in\n          (mat, or_num, vars, orpm) :: handlers )\n      | cl :: rem ->\n        let new_ord, new_to_catch = do_cases rem in\n        (cl :: new_ord, new_to_catch)\n      | [] -> ([], [])\n    in\n\n    let end_body, handlers = do_cases ors in\n    let matrix = as_matrix (cls @ ors)\n    and body = {cases = cls @ end_body; args; default = def} in\n    ( {me = PmOr {body; handlers; or_matrix = matrix}; matrix; top_default = def},\n      k )\n\nlet split_precompile argo pm =\n  let {me = next}, nexts = split_or argo pm.cases pm.args pm.default in\n  if\n    dbg\n    && (nexts <> []\n       ||\n       match next with\n       | PmOr _ -> true\n       | _ -> false)\n  then (\n    prerr_endline \"** SPLIT **\";\n    pretty_pm pm;\n    pretty_precompiled_res next nexts);\n  (next, nexts)\n\n(* General divide functions *)\n\nlet add_line patl_action pm =\n  pm.cases <- patl_action :: pm.cases;\n  pm\n\ntype cell = {pm: pattern_matching; ctx: ctx list; pat: pattern}\n\nlet add make_matching_fun division eq_key key patl_action args =\n  try\n    let _, cell = List.find (fun (k, _) -> eq_key key k) division in\n    cell.pm.cases <- patl_action :: cell.pm.cases;\n    division\n  with Not_found ->\n    let cell = make_matching_fun args in\n    cell.pm.cases <- [patl_action];\n    (key, cell) :: division\n\nlet divide make eq_key get_key get_args ctx pm =\n  let rec divide_rec = function\n    | (p :: patl, action) :: rem ->\n      let this_match = divide_rec rem in\n      add (make p pm.default ctx) this_match eq_key (get_key p)\n        (get_args p patl, action)\n        pm.args\n    | _ -> []\n  in\n\n  divide_rec pm.cases\n\nlet divide_line make_ctx make get_args pat ctx pm =\n  let rec divide_rec = function\n    | (p :: patl, action) :: rem ->\n      let this_match = divide_rec rem in\n      add_line (get_args p patl, action) this_match\n    | _ -> make pm.default pm.args\n  in\n\n  {pm = divide_rec pm.cases; ctx = make_ctx ctx; pat}\n\n(* Then come various functions,\n   There is one set of functions per matching style\n   (constants, constructors etc.)\n\n   - matcher functions are arguments to make_default (for default handlers)\n   They may raise NoMatch or OrPat and perform the full\n   matching (selection + arguments).\n\n\n   - get_args and get_key are for the compiled matrices, note that\n   selection and getting arguments are separated.\n\n   - make_ _matching combines the previous functions for producing\n   new  ``pattern_matching'' records.\n*)\n\nlet rec matcher_const cst p rem =\n  match p.pat_desc with\n  | Tpat_or (p1, p2, _) -> (\n    try matcher_const cst p1 rem with NoMatch -> matcher_const cst p2 rem)\n  | Tpat_constant c1 when const_compare c1 cst = 0 -> rem\n  | Tpat_any -> rem\n  | _ -> raise NoMatch\n\nlet get_key_constant caller = function\n  | {pat_desc = Tpat_constant cst} -> cst\n  | p ->\n    prerr_endline (\"BAD: \" ^ caller);\n    pretty_pat p;\n    assert false\n\nlet get_args_constant _ rem = rem\n\nlet make_constant_matching p def ctx = function\n  | [] -> fatal_error \"Matching.make_constant_matching\"\n  | _ :: argl ->\n    let def = make_default (matcher_const (get_key_constant \"make\" p)) def\n    and ctx = filter_ctx p ctx in\n    {pm = {cases = []; args = argl; default = def}; ctx; pat = normalize_pat p}\n\nlet divide_constant ctx m =\n  divide make_constant_matching\n    (fun c d -> const_compare c d = 0)\n    (get_key_constant \"divide\")\n    get_args_constant ctx m\n\n(* Matching against a constructor *)\n\nlet make_field_args ~fld_info loc binding_kind arg first_pos last_pos argl =\n  let rec make_args pos =\n    if pos > last_pos then argl\n    else\n      (Lprim (Pfield (pos, fld_info), [arg], loc), binding_kind)\n      :: make_args (pos + 1)\n  in\n  make_args first_pos\n\nlet get_key_constr = function\n  | {pat_desc = Tpat_construct (_, cstr, _)} -> cstr.cstr_tag\n  | _ -> assert false\n\nlet get_args_constr p rem =\n  match p with\n  | {pat_desc = Tpat_construct (_, _, args)} -> args @ rem\n  | _ -> assert false\n\n(* NB: matcher_constr applies to default matrices.\n\n       In that context, matching by constructors of extensible\n       types degrades to arity checking, due to potential rebinding.\n       This comparison is performed by Types.may_equal_constr.\n*)\n\nlet matcher_constr cstr =\n  match cstr.cstr_arity with\n  | 0 ->\n    let rec matcher_rec q rem =\n      match q.pat_desc with\n      | Tpat_or (p1, p2, _) -> (\n        try matcher_rec p1 rem with NoMatch -> matcher_rec p2 rem)\n      | Tpat_construct (_, cstr', []) when Types.may_equal_constr cstr cstr' ->\n        rem\n      | Tpat_any -> rem\n      | _ -> raise NoMatch\n    in\n    matcher_rec\n  | 1 ->\n    let rec matcher_rec q rem =\n      match q.pat_desc with\n      | Tpat_or (p1, p2, _) -> (\n        let r1 = try Some (matcher_rec p1 rem) with NoMatch -> None\n        and r2 = try Some (matcher_rec p2 rem) with NoMatch -> None in\n        match (r1, r2) with\n        | None, None -> raise NoMatch\n        | Some r1, None -> r1\n        | None, Some r2 -> r2\n        | Some (a1 :: _), Some (a2 :: _) ->\n          {a1 with pat_loc = Location.none; pat_desc = Tpat_or (a1, a2, None)}\n          :: rem\n        | _, _ -> assert false)\n      | Tpat_construct (_, cstr', [arg]) when Types.may_equal_constr cstr cstr'\n        ->\n        arg :: rem\n      | Tpat_any -> omega :: rem\n      | _ -> raise NoMatch\n    in\n    matcher_rec\n  | _ -> (\n    fun q rem ->\n      match q.pat_desc with\n      | Tpat_or (_, _, _) -> raise OrPat\n      | Tpat_construct (_, cstr', args) when Types.may_equal_constr cstr cstr'\n        ->\n        args @ rem\n      | Tpat_any -> Parmatch.omegas cstr.cstr_arity @ rem\n      | _ -> raise NoMatch)\n\nlet is_not_none_bs_primitve : Lambda.primitive =\n  Pccall (Primitive.simple ~name:\"#is_not_none\" ~arity:1 ~alloc:false)\n\nlet val_from_option_bs_primitive : Lambda.primitive =\n  Pccall (Primitive.simple ~name:\"#val_from_option\" ~arity:1 ~alloc:true)\n\nlet val_from_unnest_option_bs_primitive : Lambda.primitive =\n  Pccall (Primitive.simple ~name:\"#val_from_unnest_option\" ~arity:1 ~alloc:true)\n\nlet make_constr_matching p def ctx = function\n  | [] -> fatal_error \"Matching.make_constr_matching\"\n  | (arg, _mut) :: argl ->\n    let cstr = pat_as_constr p in\n    let untagged = Ast_untagged_variants.has_untagged cstr.cstr_attributes in\n    let newargs =\n      if cstr.cstr_inlined <> None || (untagged && cstr.cstr_args <> []) then\n        (arg, Alias) :: argl\n      else\n        match cstr.cstr_tag with\n        | Cstr_block _\n          when !Config.bs_only && Datarepr.constructor_has_optional_shape cstr\n          ->\n          let from_option =\n            match p.pat_desc with\n            | Tpat_construct (_, _, [{pat_type; pat_env}])\n              when Typeopt.type_cannot_contain_undefined pat_type pat_env ->\n              val_from_unnest_option_bs_primitive\n            | _ -> val_from_option_bs_primitive\n          in\n          (Lprim (from_option, [arg], p.pat_loc), Alias) :: argl\n        | Cstr_constant _ | Cstr_block _ ->\n          make_field_args p.pat_loc Alias arg 0 (cstr.cstr_arity - 1) argl\n            ~fld_info:(if cstr.cstr_name = \"::\" then Fld_cons else Fld_variant)\n        | Cstr_unboxed -> (arg, Alias) :: argl\n        | Cstr_extension _ ->\n          make_field_args p.pat_loc Alias arg 1 cstr.cstr_arity argl\n            ~fld_info:Fld_extension\n    in\n    {\n      pm =\n        {\n          cases = [];\n          args = newargs;\n          default = make_default (matcher_constr cstr) def;\n        };\n      ctx = filter_ctx p ctx;\n      pat = normalize_pat p;\n    }\n\nlet divide_constructor ctx pm =\n  divide make_constr_matching Types.equal_tag get_key_constr get_args_constr ctx\n    pm\n\n(* Matching against a variant *)\n\nlet rec matcher_variant_const lab p rem =\n  match p.pat_desc with\n  | Tpat_or (p1, p2, _) -> (\n    try matcher_variant_const lab p1 rem\n    with NoMatch -> matcher_variant_const lab p2 rem)\n  | Tpat_variant (lab1, _, _) when lab1 = lab -> rem\n  | Tpat_any -> rem\n  | _ -> raise NoMatch\n\nlet make_variant_matching_constant p lab def ctx = function\n  | [] -> fatal_error \"Matching.make_variant_matching_constant\"\n  | _ :: argl ->\n    let def = make_default (matcher_variant_const lab) def\n    and ctx = filter_ctx p ctx in\n    {pm = {cases = []; args = argl; default = def}; ctx; pat = normalize_pat p}\n\nlet matcher_variant_nonconst lab p rem =\n  match p.pat_desc with\n  | Tpat_or (_, _, _) -> raise OrPat\n  | Tpat_variant (lab1, Some arg, _) when lab1 = lab -> arg :: rem\n  | Tpat_any -> omega :: rem\n  | _ -> raise NoMatch\n\nlet make_variant_matching_nonconst p lab def ctx = function\n  | [] -> fatal_error \"Matching.make_variant_matching_nonconst\"\n  | (arg, _mut) :: argl ->\n    let def = make_default (matcher_variant_nonconst lab) def\n    and ctx = filter_ctx p ctx in\n    {\n      pm =\n        {\n          cases = [];\n          args =\n            (Lprim (Pfield (1, Fld_poly_var_content), [arg], p.pat_loc), Alias)\n            :: argl;\n          default = def;\n        };\n      ctx;\n      pat = normalize_pat p;\n    }\n\nlet divide_variant row ctx {cases = cl; args = al; default = def} =\n  let row = Btype.row_repr row in\n  let rec divide = function\n    | (({pat_desc = Tpat_variant (lab, pato, _)} as p) :: patl, action) :: rem\n      -> (\n      let variants = divide rem in\n      if\n        try Btype.row_field_repr (List.assoc lab row.row_fields) = Rabsent\n        with Not_found -> true\n      then variants\n      else\n        let tag = Btype.hash_variant lab in\n        let ( = ) ((a : string), (b : Types.constructor_tag)) (c, d) =\n          a = c && Types.equal_tag b d\n        in\n        match pato with\n        | None ->\n          add\n            (make_variant_matching_constant p lab def ctx)\n            variants ( = ) (lab, Cstr_constant tag) (patl, action) al\n        | Some pat ->\n          add\n            (make_variant_matching_nonconst p lab def ctx)\n            variants ( = ) (lab, Cstr_block tag)\n            (pat :: patl, action)\n            al)\n    | _ -> []\n  in\n  divide cl\n\n(*\n  Three ``no-test'' cases\n  *)\n\n(* Matching against a variable *)\n\nlet get_args_var _ rem = rem\n\nlet make_var_matching def = function\n  | [] -> fatal_error \"Matching.make_var_matching\"\n  | _ :: argl ->\n    {cases = []; args = argl; default = make_default get_args_var def}\n\nlet divide_var ctx pm =\n  divide_line ctx_lshift make_var_matching get_args_var omega ctx pm\n\n(* Matching and forcing a lazy value *)\n\nlet get_arg_lazy p rem =\n  match p with\n  | {pat_desc = Tpat_any} -> omega :: rem\n  | {pat_desc = Tpat_lazy arg} -> arg :: rem\n  | _ -> assert false\n\nlet matcher_lazy p rem =\n  match p.pat_desc with\n  | Tpat_or (_, _, _) -> raise OrPat\n  | Tpat_any | Tpat_var _ -> omega :: rem\n  | Tpat_lazy arg -> arg :: rem\n  | _ -> raise NoMatch\n\n(* Inlining the tag tests before calling the primitive that works on\n   lazy blocks. This is also used in translcore.ml.\n   No other call than Obj.tag when the value has been forced before.\n*)\n\nlet get_mod_field modname field =\n  lazy\n    (try\n       let mod_ident = Ident.create_persistent modname in\n       let env = Env.open_pers_signature modname Env.initial_safe_string in\n       let p =\n         try\n           match Env.lookup_value (Longident.Lident field) env with\n           | Path.Pdot (_, _, i), _ -> i\n           | _ ->\n             fatal_error (\"Primitive \" ^ modname ^ \".\" ^ field ^ \" not found.\")\n         with Not_found ->\n           fatal_error (\"Primitive \" ^ modname ^ \".\" ^ field ^ \" not found.\")\n       in\n       Lprim\n         ( Pfield (p, Fld_module {name = field}),\n           [Lprim (Pgetglobal mod_ident, [], Location.none)],\n           Location.none )\n     with Not_found -> fatal_error (\"Module \" ^ modname ^ \" unavailable.\"))\n\nlet code_force = get_mod_field \"CamlinternalLazy\" \"force\"\n\n(* inline_lazy_force inlines the beginning of the code of Lazy.force. When\n   the value argument is tagged as:\n   - forward, take field 0\n   - lazy, call the primitive that forces (without testing again the tag)\n   - anything else, return it\n\n   Using Lswitch below relies on the fact that the GC does not shortcut\n   Forward(val_out_of_heap).\n*)\n\nlet inline_lazy_force arg loc =\n  Lapply\n    {\n      ap_func = Lazy.force code_force;\n      ap_inlined = Default_inline;\n      ap_args = [arg];\n      ap_loc = loc;\n    }\nlet make_lazy_matching def = function\n  | [] -> fatal_error \"Matching.make_lazy_matching\"\n  | (arg, _mut) :: argl ->\n    {\n      cases = [];\n      args = (inline_lazy_force arg Location.none, Strict) :: argl;\n      default = make_default matcher_lazy def;\n    }\n\nlet divide_lazy p ctx pm =\n  divide_line (filter_ctx p) make_lazy_matching get_arg_lazy p ctx pm\n\n(* Matching against a tuple pattern *)\n\nlet get_args_tuple arity p rem =\n  match p with\n  | {pat_desc = Tpat_any} -> omegas arity @ rem\n  | {pat_desc = Tpat_tuple args} -> args @ rem\n  | _ -> assert false\n\nlet matcher_tuple arity p rem =\n  match p.pat_desc with\n  | Tpat_or (_, _, _) -> raise OrPat\n  | Tpat_any | Tpat_var _ -> omegas arity @ rem\n  | Tpat_tuple args when List.length args = arity -> args @ rem\n  | _ -> raise NoMatch\n\nlet make_tuple_matching loc arity def = function\n  | [] -> fatal_error \"Matching.make_tuple_matching\"\n  | (arg, _mut) :: argl ->\n    let rec make_args pos =\n      if pos >= arity then argl\n      else\n        (Lprim (Pfield (pos, Fld_tuple), [arg], loc), Alias)\n        :: make_args (pos + 1)\n    in\n    {\n      cases = [];\n      args = make_args 0;\n      default = make_default (matcher_tuple arity) def;\n    }\n\nlet divide_tuple arity p ctx pm =\n  divide_line (filter_ctx p)\n    (make_tuple_matching p.pat_loc arity)\n    (get_args_tuple arity) p ctx pm\n\n(* Matching against a record pattern *)\n\nlet record_matching_line num_fields lbl_pat_list =\n  let patv = Array.make num_fields omega in\n  List.iter (fun (_, lbl, pat) -> patv.(lbl.lbl_pos) <- pat) lbl_pat_list;\n  Array.to_list patv\n\nlet get_args_record num_fields p rem =\n  match p with\n  | {pat_desc = Tpat_any} -> record_matching_line num_fields [] @ rem\n  | {pat_desc = Tpat_record (lbl_pat_list, _)} ->\n    record_matching_line num_fields lbl_pat_list @ rem\n  | _ -> assert false\n\nlet matcher_record num_fields p rem =\n  match p.pat_desc with\n  | Tpat_or (_, _, _) -> raise OrPat\n  | Tpat_any | Tpat_var _ -> record_matching_line num_fields [] @ rem\n  | Tpat_record ([], _) when num_fields = 0 -> rem\n  | Tpat_record (((_, lbl, _) :: _ as lbl_pat_list), _)\n    when Array.length lbl.lbl_all = num_fields ->\n    record_matching_line num_fields lbl_pat_list @ rem\n  | _ -> raise NoMatch\n\nlet make_record_matching loc all_labels def = function\n  | [] -> fatal_error \"Matching.make_record_matching\"\n  | (arg, _mut) :: argl ->\n    let rec make_args pos =\n      if pos >= Array.length all_labels then argl\n      else\n        let lbl = all_labels.(pos) in\n        let access =\n          match lbl.lbl_repres with\n          | Record_float_unused -> assert false\n          | Record_regular | Record_optional_labels _ ->\n            Lprim (Pfield (lbl.lbl_pos, Lambda.fld_record lbl), [arg], loc)\n          | Record_inlined _ ->\n            Lprim\n              (Pfield (lbl.lbl_pos, Lambda.fld_record_inline lbl), [arg], loc)\n          | Record_unboxed _ -> arg\n          | Record_extension ->\n            Lprim\n              ( Pfield (lbl.lbl_pos + 1, Lambda.fld_record_extension lbl),\n                [arg],\n                loc )\n        in\n        let str =\n          match lbl.lbl_mut with\n          | Immutable -> Alias\n          | Mutable -> StrictOpt\n        in\n        (access, str) :: make_args (pos + 1)\n    in\n    let nfields = Array.length all_labels in\n    let def = make_default (matcher_record nfields) def in\n    {cases = []; args = make_args 0; default = def}\n\nlet divide_record all_labels p ctx pm =\n  let get_args = get_args_record (Array.length all_labels) in\n  divide_line (filter_ctx p)\n    (make_record_matching p.pat_loc all_labels)\n    get_args p ctx pm\n\n(* Matching against an array pattern *)\n\nlet get_key_array = function\n  | {pat_desc = Tpat_array patl} -> List.length patl\n  | _ -> assert false\n\nlet get_args_array p rem =\n  match p with\n  | {pat_desc = Tpat_array patl} -> patl @ rem\n  | _ -> assert false\n\nlet matcher_array len p rem =\n  match p.pat_desc with\n  | Tpat_or (_, _, _) -> raise OrPat\n  | Tpat_array args when List.length args = len -> args @ rem\n  | Tpat_any -> Parmatch.omegas len @ rem\n  | _ -> raise NoMatch\n\nlet make_array_matching p def ctx = function\n  | [] -> fatal_error \"Matching.make_array_matching\"\n  | (arg, _mut) :: argl ->\n    let len = get_key_array p in\n    let rec make_args pos =\n      if pos >= len then argl\n      else\n        ( Lprim\n            (Parrayrefu, [arg; Lconst (Const_base (Const_int pos))], p.pat_loc),\n          StrictOpt )\n        :: make_args (pos + 1)\n    in\n    let def = make_default (matcher_array len) def and ctx = filter_ctx p ctx in\n    {\n      pm = {cases = []; args = make_args 0; default = def};\n      ctx;\n      pat = normalize_pat p;\n    }\n\nlet divide_array ctx pm =\n  divide make_array_matching ( = ) get_key_array get_args_array ctx pm\n\n(*\n   Specific string test sequence\n   Will be called by the bytecode compiler, from bytegen.ml.\n   The strategy is first dichotomic search (we perform 3-way tests\n   with compare_string), then sequence of equality tests\n   when there are less then T=strings_test_threshold static strings to match.\n\n  Increasing T entails (slightly) less code, decreasing T\n  (slightly) favors runtime speed.\n  T=8 looks a decent tradeoff.\n*)\n\n(* Utilities *)\n\nlet strings_test_threshold = 8\n\nlet prim_string_notequal =\n  Pccall (Primitive.simple ~name:\"caml_string_notequal\" ~arity:2 ~alloc:false)\n\nlet prim_string_compare =\n  Pccall (Primitive.simple ~name:\"caml_string_compare\" ~arity:2 ~alloc:false)\n\nlet bind_sw arg k =\n  match arg with\n  | Lvar _ -> k arg\n  | _ ->\n    let id = Ident.create \"switch\" in\n    Llet (Strict, Pgenval, id, arg, k (Lvar id))\n\n(* Sequential equality tests *)\n\nlet make_string_test_sequence loc arg sw d =\n  let d, sw =\n    match d with\n    | None -> (\n      match sw with\n      | (_, d) :: sw -> (d, sw)\n      | [] -> assert false)\n    | Some d -> (d, sw)\n  in\n  bind_sw arg (fun arg ->\n      List.fold_right\n        (fun (s, lam) k ->\n          Lifthenelse\n            ( Lprim\n                (prim_string_notequal, [arg; Lconst (Const_immstring s)], loc),\n              k,\n              lam ))\n        sw d)\n\nlet rec split k xs =\n  match xs with\n  | [] -> assert false\n  | x0 :: xs ->\n    if k <= 1 then ([], x0, xs)\n    else\n      let xs, y0, ys = split (k - 2) xs in\n      (x0 :: xs, y0, ys)\n\nlet zero_lam = Lconst (Const_base (Const_int 0))\n\nlet tree_way_test loc arg lt eq gt =\n  Lifthenelse\n    ( Lprim (Pintcomp Clt, [arg; zero_lam], loc),\n      lt,\n      Lifthenelse (Lprim (Pintcomp Clt, [zero_lam; arg], loc), gt, eq) )\n\n(* Dichotomic tree *)\n\nlet rec do_make_string_test_tree loc arg sw delta d =\n  let len = List.length sw in\n  if len <= strings_test_threshold + delta then\n    make_string_test_sequence loc arg sw d\n  else\n    let lt, (s, act), gt = split len sw in\n    bind_sw\n      (Lprim (prim_string_compare, [arg; Lconst (Const_immstring s)], loc))\n      (fun r ->\n        tree_way_test loc r\n          (do_make_string_test_tree loc arg lt delta d)\n          act\n          (do_make_string_test_tree loc arg gt delta d))\n\n(* Entry point *)\nlet expand_stringswitch loc arg sw d =\n  match d with\n  | None -> bind_sw arg (fun arg -> do_make_string_test_tree loc arg sw 0 None)\n  | Some e ->\n    bind_sw arg (fun arg ->\n        make_catch e (fun d -> do_make_string_test_tree loc arg sw 1 (Some d)))\n\n(**********************)\n(* Generic test trees *)\n(**********************)\n\n(* Sharing *)\n\n(* Add handler, if shared *)\nlet handle_shared () =\n  let hs = ref (fun x -> x) in\n  let handle_shared act =\n    match act with\n    | Switch.Single act -> act\n    | Switch.Shared act ->\n      let i, h = make_catch_delayed act in\n      let ohs = !hs in\n      (hs := fun act -> h (ohs act));\n      make_exit i\n  in\n  (hs, handle_shared)\n\nlet share_actions_tree sw d =\n  let store = StoreExp.mk_store () in\n  (* Default action is always shared *)\n  let d =\n    match d with\n    | None -> None\n    | Some d -> Some (store.Switch.act_store_shared d)\n  in\n  (* Store all other actions *)\n  let sw = List.map (fun (cst, act) -> (cst, store.Switch.act_store act)) sw in\n\n  (* Retrieve all actions, including potential default *)\n  let acts = store.Switch.act_get_shared () in\n\n  (* Array of actual actions *)\n  let hs, handle_shared = handle_shared () in\n  let acts = Array.map handle_shared acts in\n\n  (* Reconstruct default and switch list *)\n  let d =\n    match d with\n    | None -> None\n    | Some d -> Some acts.(d)\n  in\n  let sw = List.map (fun (cst, j) -> (cst, acts.(j))) sw in\n  (!hs, sw, d)\n\n(* Note: dichotomic search requires sorted input with no duplicates *)\nlet rec uniq_lambda_list sw =\n  match sw with\n  | [] | [_] -> sw\n  | ((c1, _) as p1) :: ((c2, _) :: sw2 as sw1) ->\n    if const_compare c1 c2 = 0 then uniq_lambda_list (p1 :: sw2)\n    else p1 :: uniq_lambda_list sw1\n\nlet sort_lambda_list l =\n  let l = List.stable_sort (fun (x, _) (y, _) -> const_compare x y) l in\n  uniq_lambda_list l\n\nlet rec cut n l =\n  if n = 0 then ([], l)\n  else\n    match l with\n    | [] -> raise (Invalid_argument \"cut\")\n    | a :: l ->\n      let l1, l2 = cut (n - 1) l in\n      (a :: l1, l2)\n\nlet rec do_tests_fail loc fail tst arg = function\n  | [] -> fail\n  | (c, act) :: rem ->\n    Lifthenelse\n      ( Lprim (tst, [arg; Lconst (Const_base c)], loc),\n        do_tests_fail loc fail tst arg rem,\n        act )\n\nlet rec do_tests_nofail loc tst arg = function\n  | [] -> fatal_error \"Matching.do_tests_nofail\"\n  | [(_, act)] -> act\n  | (c, act) :: rem ->\n    Lifthenelse\n      ( Lprim (tst, [arg; Lconst (Const_base c)], loc),\n        do_tests_nofail loc tst arg rem,\n        act )\n\nlet make_test_sequence loc fail tst lt_tst arg const_lambda_list =\n  let const_lambda_list = sort_lambda_list const_lambda_list in\n  let hs, const_lambda_list, fail = share_actions_tree const_lambda_list fail in\n\n  let rec make_test_sequence const_lambda_list =\n    if List.length const_lambda_list >= 4 && lt_tst <> Pignore then\n      split_sequence const_lambda_list\n    else\n      match fail with\n      | None -> do_tests_nofail loc tst arg const_lambda_list\n      | Some fail -> do_tests_fail loc fail tst arg const_lambda_list\n  and split_sequence const_lambda_list =\n    let list1, list2 =\n      cut (List.length const_lambda_list / 2) const_lambda_list\n    in\n    Lifthenelse\n      ( Lprim (lt_tst, [arg; Lconst (Const_base (fst (List.hd list2)))], loc),\n        make_test_sequence list1,\n        make_test_sequence list2 )\n  in\n  hs (make_test_sequence const_lambda_list)\n\nmodule SArg = struct\n  type primitive = Lambda.primitive\n\n  let eqint = Pintcomp Ceq\n  let neint = Pintcomp Cneq\n  let leint = Pintcomp Cle\n  let ltint = Pintcomp Clt\n  let geint = Pintcomp Cge\n  let gtint = Pintcomp Cgt\n\n  type act = Lambda.lambda\n\n  let make_prim p args = Lprim (p, args, Location.none)\n  let make_offset arg n =\n    match n with\n    | 0 -> arg\n    | _ -> Lprim (Poffsetint n, [arg], Location.none)\n\n  let bind arg body =\n    let newvar, newarg =\n      match arg with\n      | Lvar v -> (v, arg)\n      | _ ->\n        let newvar = Ident.create \"switcher\" in\n        (newvar, Lvar newvar)\n    in\n    bind Alias newvar arg (body newarg)\n  let make_const i = Lconst (Const_base (Const_int i))\n  let make_isout h arg = Lprim (Pisout, [h; arg], Location.none)\n  let make_isin h arg = Lprim (Pnot, [make_isout h arg], Location.none)\n  let make_if cond ifso ifnot = Lifthenelse (cond, ifso, ifnot)\n  let make_switch loc arg cases acts ~offset sw_names =\n    let l = ref [] in\n    for i = Array.length cases - 1 downto 0 do\n      l := (offset + i, acts.(cases.(i))) :: !l\n    done;\n    Lswitch\n      ( arg,\n        {\n          sw_numconsts = Array.length cases;\n          sw_consts = !l;\n          sw_numblocks = 0;\n          sw_blocks = [];\n          sw_failaction = None;\n          sw_names;\n        },\n        loc )\n  let make_catch = make_catch_delayed\n  let make_exit = make_exit\nend\n\n(* Action sharing for Lswitch argument *)\nlet share_actions_sw sw =\n  (* Attempt sharing on all actions *)\n  let store = StoreExp.mk_store () in\n  let fail =\n    match sw.sw_failaction with\n    | None -> None\n    | Some fail ->\n      (* Fail is translated to exit, whatever happens *)\n      Some (store.Switch.act_store_shared fail)\n  in\n  let consts =\n    List.map (fun (i, e) -> (i, store.Switch.act_store e)) sw.sw_consts\n  and blocks =\n    List.map (fun (i, e) -> (i, store.Switch.act_store e)) sw.sw_blocks\n  in\n  let acts = store.Switch.act_get_shared () in\n  let hs, handle_shared = handle_shared () in\n  let acts = Array.map handle_shared acts in\n  let fail =\n    match fail with\n    | None -> None\n    | Some fail -> Some acts.(fail)\n  in\n  ( !hs,\n    {\n      sw with\n      sw_consts = List.map (fun (i, j) -> (i, acts.(j))) consts;\n      sw_blocks = List.map (fun (i, j) -> (i, acts.(j))) blocks;\n      sw_failaction = fail;\n    } )\n\n(* Reintroduce fail action in switch argument,\n   for the sake of avoiding carrying over huge switches *)\n\nlet reintroduce_fail sw =\n  match sw.sw_failaction with\n  | None ->\n    let t = Hashtbl.create 17 in\n    let seen (_, l) =\n      match as_simple_exit l with\n      | Some i ->\n        let old = try Hashtbl.find t i with Not_found -> 0 in\n        Hashtbl.replace t i (old + 1)\n      | None -> ()\n    in\n    List.iter seen sw.sw_consts;\n    List.iter seen sw.sw_blocks;\n    let i_max = ref (-1) and max = ref (-1) in\n    Hashtbl.iter\n      (fun i c ->\n        if c > !max then (\n          i_max := i;\n          max := c))\n      t;\n    if !max >= 3 then\n      let default = !i_max in\n      let remove ls =\n        Ext_list.filter ls (fun (_, lam) ->\n            match as_simple_exit lam with\n            | Some j -> j <> default\n            | None -> true)\n      in\n      {\n        sw with\n        sw_consts = remove sw.sw_consts;\n        sw_blocks = remove sw.sw_blocks;\n        sw_failaction = Some (make_exit default);\n      }\n    else sw\n  | Some _ -> sw\n\nmodule Switcher = Switch.Make (SArg)\nopen Switch\n\nlet rec last def = function\n  | [] -> def\n  | [(x, _)] -> x\n  | _ :: rem -> last def rem\n\nlet get_edges low high l =\n  match l with\n  | [] -> (low, high)\n  | (x, _) :: _ -> (x, last high l)\n\nlet as_interval_canfail fail low high l =\n  let store = StoreExp.mk_store () in\n\n  let do_store _tag act =\n    let i = store.act_store act in\n    (*\n    eprintf \"STORE [%s] %i %s\\n\" tag i (string_of_lam act) ;\n*)\n    i\n  in\n\n  let rec nofail_rec cur_low cur_high cur_act = function\n    | [] ->\n      if cur_high = high then [(cur_low, cur_high, cur_act)]\n      else [(cur_low, cur_high, cur_act); (cur_high + 1, high, 0)]\n    | (i, act_i) :: rem as all ->\n      let act_index = do_store \"NO\" act_i in\n      if cur_high + 1 = i then\n        if act_index = cur_act then nofail_rec cur_low i cur_act rem\n        else if act_index = 0 then (cur_low, i - 1, cur_act) :: fail_rec i i rem\n        else (cur_low, i - 1, cur_act) :: nofail_rec i i act_index rem\n      else if act_index = 0 then\n        (cur_low, cur_high, cur_act)\n        :: fail_rec (cur_high + 1) (cur_high + 1) all\n      else\n        (cur_low, cur_high, cur_act)\n        :: (cur_high + 1, i - 1, 0)\n        :: nofail_rec i i act_index rem\n  and fail_rec cur_low cur_high = function\n    | [] -> [(cur_low, cur_high, 0)]\n    | (i, act_i) :: rem ->\n      let index = do_store \"YES\" act_i in\n      if index = 0 then fail_rec cur_low i rem\n      else (cur_low, i - 1, 0) :: nofail_rec i i index rem\n  in\n\n  let init_rec = function\n    | [] -> [(low, high, 0)]\n    | (i, act_i) :: rem ->\n      let index = do_store \"INIT\" act_i in\n      if index = 0 then fail_rec low i rem\n      else if low < i then (low, i - 1, 0) :: nofail_rec i i index rem\n      else nofail_rec i i index rem\n  in\n\n  assert (do_store \"FAIL\" fail = 0);\n  (* fail has action index 0 *)\n  let r = init_rec l in\n  (Array.of_list r, store)\n\nlet as_interval_nofail l =\n  let store = StoreExp.mk_store () in\n  let rec some_hole = function\n    | [] | [_] -> false\n    | (i, _) :: ((j, _) :: _ as rem) -> j > i + 1 || some_hole rem\n  in\n  let rec i_rec cur_low cur_high cur_act = function\n    | [] -> [(cur_low, cur_high, cur_act)]\n    | (i, act) :: rem ->\n      let act_index = store.act_store act in\n      if act_index = cur_act then i_rec cur_low i cur_act rem\n      else (cur_low, cur_high, cur_act) :: i_rec i i act_index rem\n  in\n  let inters =\n    match l with\n    | (i, act) :: rem ->\n      let act_index =\n        (* In case there is some hole and that a switch is emitted,\n           action 0 will be used as the action of unreachable\n           cases (cf. switch.ml, make_switch).\n           Hence, this action will be shared *)\n        if some_hole rem then store.act_store_shared act\n        else store.act_store act\n      in\n      assert (act_index = 0);\n      i_rec i i act_index rem\n    | _ -> assert false\n  in\n\n  (Array.of_list inters, store)\n\nlet sort_int_lambda_list l =\n  List.sort\n    (fun (i1, _) (i2, _) -> if i1 < i2 then -1 else if i2 < i1 then 1 else 0)\n    l\n\nlet as_interval fail low high l =\n  let l = sort_int_lambda_list l in\n  ( get_edges low high l,\n    match fail with\n    | None -> as_interval_nofail l\n    | Some act -> as_interval_canfail act low high l )\n\nlet call_switcher loc fail arg low high int_lambda_list sw_names =\n  let edges, (cases, actions) = as_interval fail low high int_lambda_list in\n  Switcher.zyva loc edges arg cases actions sw_names\n\nlet rec list_as_pat = function\n  | [] -> fatal_error \"Matching.list_as_pat\"\n  | [pat] -> pat\n  | pat :: rem -> {pat with pat_desc = Tpat_or (pat, list_as_pat rem, None)}\n\nlet complete_pats_constrs = function\n  | p :: _ as pats ->\n    List.map (pat_of_constr p)\n      (complete_constrs p (List.map get_key_constr pats))\n  | _ -> assert false\n\n(*\n     Following two ``failaction'' function compute n, the trap handler\n    to jump to in case of failure of elementary tests\n*)\n\nlet mk_failaction_neg partial ctx def =\n  match partial with\n  | Partial -> (\n    match def with\n    | (_, idef) :: _ ->\n      (Some (Lstaticraise (idef, [])), jumps_singleton idef ctx)\n    | [] ->\n      (* Act as Total, this means\n         If no appropriate default matrix exists,\n         then this switch cannot fail *)\n      (None, jumps_empty))\n  | Total -> (None, jumps_empty)\n\n(* In line with the article and simpler than before *)\nlet mk_failaction_pos partial seen ctx defs =\n  if dbg then (\n    prerr_endline \"**POS**\";\n    pretty_def defs;\n    ());\n  let rec scan_def env to_test defs =\n    match (to_test, defs) with\n    | [], _ | _, [] ->\n      List.fold_left\n        (fun (klist, jumps) (pats, i) ->\n          let action = Lstaticraise (i, []) in\n          let klist =\n            List.fold_right\n              (fun pat r -> (get_key_constr pat, action) :: r)\n              pats klist\n          and jumps = jumps_add i (ctx_lub (list_as_pat pats) ctx) jumps in\n          (klist, jumps))\n        ([], jumps_empty) env\n    | _, (pss, idef) :: rem -> (\n      let now, later =\n        List.partition (fun (_p, p_ctx) -> ctx_match p_ctx pss) to_test\n      in\n      match now with\n      | [] -> scan_def env to_test rem\n      | _ -> scan_def ((List.map fst now, idef) :: env) later rem)\n  in\n\n  let fail_pats = complete_pats_constrs seen in\n  if List.length fail_pats < 32 then (\n    let fail, jmps =\n      scan_def [] (List.map (fun pat -> (pat, ctx_lub pat ctx)) fail_pats) defs\n    in\n    if dbg then (\n      eprintf \"POSITIVE JUMPS [%i]:\\n\" (List.length fail_pats);\n      pretty_jumps jmps);\n    (None, fail, jmps))\n  else (\n    (* Too many non-matched constructors -> reduced information *)\n    if dbg then eprintf \"POS->NEG!!!\\n%!\";\n    let fail, jumps = mk_failaction_neg partial ctx defs in\n    if dbg then\n      eprintf \"FAIL: %s\\n\"\n        (match fail with\n        | None -> \"<none>\"\n        | Some lam -> string_of_lam lam);\n    (fail, [], jumps))\n\nlet combine_constant names loc arg cst partial ctx def\n    (const_lambda_list, total, _pats) =\n  let fail, local_jumps = mk_failaction_neg partial ctx def in\n  let lambda1 =\n    match cst with\n    | Const_int _ ->\n      let int_lambda_list =\n        List.map\n          (function\n            | Const_int n, l -> (n, l)\n            | _ -> assert false)\n          const_lambda_list\n      in\n      call_switcher loc fail arg min_int max_int int_lambda_list names\n    | Const_char _ ->\n      let int_lambda_list =\n        List.map\n          (function\n            | Const_char c, l -> (c, l)\n            | _ -> assert false)\n          const_lambda_list\n      in\n      call_switcher loc fail arg 0 max_int int_lambda_list names\n    | Const_string _ ->\n      (* Note as the bytecode compiler may resort to dichotomic search,\n         the clauses of stringswitch  are sorted with duplicates removed.\n         This partly applies to the native code compiler, which requires\n         no duplicates *)\n      let const_lambda_list = sort_lambda_list const_lambda_list in\n      let sw =\n        List.map\n          (fun (c, act) ->\n            match c with\n            | Const_string (s, _) -> (s, act)\n            | _ -> assert false)\n          const_lambda_list\n      in\n      let hs, sw, fail = share_actions_tree sw fail in\n      hs (Lstringswitch (arg, sw, fail, loc))\n    | Const_float _ ->\n      make_test_sequence loc fail (Pfloatcomp Cneq) (Pfloatcomp Clt) arg\n        const_lambda_list\n    | Const_int32 _ ->\n      make_test_sequence loc fail\n        (Pbintcomp (Pint32, Cneq))\n        (Pbintcomp (Pint32, Clt))\n        arg const_lambda_list\n    | Const_int64 _ ->\n      make_test_sequence loc fail\n        (Pbintcomp (Pint64, Cneq))\n        (Pbintcomp (Pint64, Clt))\n        arg const_lambda_list\n    | Const_bigint _ ->\n      make_test_sequence loc fail (Pbigintcomp Cneq) (Pbigintcomp Clt) arg\n        const_lambda_list\n  in\n  (lambda1, jumps_union local_jumps total)\n\nlet split_cases tag_lambda_list =\n  let rec split_rec = function\n    | [] -> ([], [])\n    | (cstr, act) :: rem -> (\n      let consts, nonconsts = split_rec rem in\n      match cstr with\n      | Cstr_constant n -> ((n, act) :: consts, nonconsts)\n      | Cstr_block n -> (consts, (n, act) :: nonconsts)\n      | Cstr_unboxed -> (consts, (0, act) :: nonconsts)\n      | Cstr_extension _ -> assert false)\n  in\n  let const, nonconst = split_rec tag_lambda_list in\n  (sort_int_lambda_list const, sort_int_lambda_list nonconst)\n\n(* refine [split_cases] and [split_variant_cases] *)\nlet split_variant_cases tag_lambda_list =\n  let rec split_rec = function\n    | [] -> ([], [])\n    | ((name, cstr), act) :: rem -> (\n      let consts, nonconsts = split_rec rem in\n      match cstr with\n      | Cstr_constant n -> ((n, (name, act)) :: consts, nonconsts)\n      | Cstr_block n -> (consts, (n, (name, act)) :: nonconsts)\n      | Cstr_unboxed -> assert false\n      | Cstr_extension _ -> assert false)\n  in\n  let const, nonconst = split_rec tag_lambda_list in\n  (sort_int_lambda_list const, sort_int_lambda_list nonconst)\n\nlet split_extension_cases tag_lambda_list =\n  let rec split_rec = function\n    | [] -> ([], [])\n    | (cstr, act) :: rem -> (\n      let consts, nonconsts = split_rec rem in\n      match cstr with\n      | Cstr_extension (path, true) when not !Config.bs_only ->\n        ((path, act) :: consts, nonconsts)\n      | Cstr_extension (path, _) -> (consts, (path, act) :: nonconsts)\n      | _ -> assert false)\n  in\n  split_rec tag_lambda_list\n\nlet extension_slot_eq =\n  Pccall (Primitive.simple ~name:\"#extension_slot_eq\" ~arity:2 ~alloc:false)\nlet combine_constructor sw_names loc arg ex_pat cstr partial ctx def\n    (tag_lambda_list, total1, pats) =\n  if cstr.cstr_consts < 0 then\n    (* Special cases for extensions *)\n    let fail, local_jumps = mk_failaction_neg partial ctx def in\n    let lambda1 =\n      let consts, nonconsts = split_extension_cases tag_lambda_list in\n      let default, consts, nonconsts =\n        match fail with\n        | None -> (\n          match (consts, nonconsts) with\n          | _, (_, act) :: rem -> (act, consts, rem)\n          | (_, act) :: rem, _ -> (act, rem, nonconsts)\n          | _ -> assert false)\n        | Some fail -> (fail, consts, nonconsts)\n      in\n      let nonconst_lambda =\n        match nonconsts with\n        | [] -> default\n        | _ ->\n          let tag = Ident.create \"tag\" in\n          let tests =\n            List.fold_right\n              (fun (path, act) rem ->\n                let ext = transl_extension_path ex_pat.pat_env path in\n                Lifthenelse\n                  (Lprim (extension_slot_eq, [Lvar tag; ext], loc), act, rem))\n              nonconsts default\n          in\n          Llet (Alias, Pgenval, tag, arg, tests)\n      in\n      List.fold_right\n        (fun (path, act) rem ->\n          let ext = transl_extension_path ex_pat.pat_env path in\n          Lifthenelse (Lprim (extension_slot_eq, [arg; ext], loc), act, rem))\n        consts nonconst_lambda\n    in\n    (lambda1, jumps_union local_jumps total1)\n  else\n    (* Regular concrete type *)\n    let ncases = List.length tag_lambda_list\n    and nconstrs = cstr.cstr_consts + cstr.cstr_nonconsts in\n    let sig_complete = ncases = nconstrs in\n    let fail_opt, fails, local_jumps =\n      if sig_complete then (None, [], jumps_empty)\n      else mk_failaction_pos partial pats ctx def\n    in\n\n    let tag_lambda_list = fails @ tag_lambda_list in\n    let consts, nonconsts = split_cases tag_lambda_list in\n    let lambda1 =\n      match (fail_opt, same_actions tag_lambda_list) with\n      | None, Some act -> act (* Identical actions, no failure *)\n      | _ -> (\n        match (cstr.cstr_consts, cstr.cstr_nonconsts, consts, nonconsts) with\n        | 1, 1, [(0, act1)], [(0, act2)]\n          when cstr.cstr_name = \"::\" || cstr.cstr_name = \"[]\"\n               || Datarepr.constructor_has_optional_shape cstr ->\n          (* Typically, match on lists, will avoid isint primitive in that\n             case *)\n          let arg =\n            if !Config.bs_only && Datarepr.constructor_has_optional_shape cstr\n            then Lprim (is_not_none_bs_primitve, [arg], loc)\n            else arg\n          in\n          Lifthenelse (arg, act2, act1)\n        | 2, 0, [(i1, act1); (_, act2)], []\n          when cstr.cstr_name = \"true\" || cstr.cstr_name = \"false\" ->\n          if i1 = 0 then Lifthenelse (arg, act2, act1)\n          else Lifthenelse (arg, act1, act2)\n        | n, 0, _, [] when false (* relies on tag being an int *) ->\n          (* The type defines constant constructors only *)\n          call_switcher loc fail_opt arg 0 (n - 1) consts sw_names\n        | n, _, _, _ -> (\n          let act0 =\n            (* = Some act when all non-const constructors match to act *)\n            match (fail_opt, nonconsts) with\n            | Some a, [] -> Some a\n            | Some _, _ ->\n              if List.length nonconsts = cstr.cstr_nonconsts then\n                same_actions nonconsts\n              else None\n            | None, _ -> same_actions nonconsts\n          in\n          match act0 with\n          | Some act when false (* relies on tag being an int *) ->\n            Lifthenelse\n              ( Lprim (Pisint, [arg], loc),\n                call_switcher loc fail_opt arg 0 (n - 1) consts sw_names,\n                act )\n          (* Emit a switch, as bytecode implements this sophisticated instruction *)\n          | _ ->\n            let sw =\n              {\n                sw_numconsts = cstr.cstr_consts;\n                sw_consts = consts;\n                sw_numblocks = cstr.cstr_nonconsts;\n                sw_blocks = nonconsts;\n                sw_failaction = fail_opt;\n                sw_names;\n              }\n            in\n            let hs, sw = share_actions_sw sw in\n            let sw = reintroduce_fail sw in\n            hs (Lswitch (arg, sw, loc))))\n    in\n    (lambda1, jumps_union local_jumps total1)\n\nlet make_test_sequence_variant_constant fail arg int_lambda_list =\n  let _, (cases, actions) =\n    as_interval fail min_int max_int\n      (List.map (fun (a, (_, c)) -> (a, c)) int_lambda_list)\n  in\n  Switcher.test_sequence arg cases actions\n\nlet call_switcher_variant_constant loc fail arg int_lambda_list names =\n  call_switcher loc fail arg min_int max_int\n    (List.map (fun (a, (_, c)) -> (a, c)) int_lambda_list)\n    names\n\nlet call_switcher_variant_constr loc fail arg int_lambda_list names =\n  let v = Ident.create \"variant\" in\n  Llet\n    ( Alias,\n      Pgenval,\n      v,\n      Lprim (Pfield (0, Fld_poly_var_tag), [arg], loc),\n      call_switcher loc fail (Lvar v) min_int max_int\n        (List.map (fun (a, (_, c)) -> (a, c)) int_lambda_list)\n        names )\n\nlet call_switcher_variant_constant :\n    (Location.t ->\n    Lambda.lambda option ->\n    Lambda.lambda ->\n    (int * (string * Lambda.lambda)) list ->\n    Ast_untagged_variants.switch_names option ->\n    Lambda.lambda)\n    ref =\n  ref call_switcher_variant_constant\n\nlet call_switcher_variant_constr :\n    (Location.t ->\n    Lambda.lambda option ->\n    Lambda.lambda ->\n    (int * (string * Lambda.lambda)) list ->\n    Ast_untagged_variants.switch_names option ->\n    Lambda.lambda)\n    ref =\n  ref call_switcher_variant_constr\n\nlet make_test_sequence_variant_constant :\n    (Lambda.lambda option ->\n    Lambda.lambda ->\n    (int * (string * Lambda.lambda)) list ->\n    Lambda.lambda)\n    ref =\n  ref make_test_sequence_variant_constant\n\nlet combine_variant names loc row arg partial ctx def\n    (tag_lambda_list, total1, _pats) =\n  let row = Btype.row_repr row in\n  let num_constr = ref 0 in\n  if row.row_closed then\n    List.iter\n      (fun (_, f) ->\n        match Btype.row_field_repr f with\n        | Rabsent | Reither (true, _ :: _, _, _) -> ()\n        | _ -> incr num_constr)\n      row.row_fields\n  else num_constr := max_int;\n  let test_int_or_block arg if_int if_block =\n    if !Config.bs_only then\n      Lifthenelse\n        ( Lprim\n            ( Pccall\n                (Primitive.simple ~name:\"#is_poly_var_block\" ~arity:1\n                   ~alloc:false),\n              [arg],\n              loc ),\n          if_block,\n          if_int )\n    else Lifthenelse (Lprim (Pisint, [arg], loc), if_int, if_block)\n  in\n  let sig_complete = List.length tag_lambda_list = !num_constr\n  and one_action = same_actions tag_lambda_list in\n  (* reduandant work under bs context *)\n  let fail, local_jumps =\n    if\n      sig_complete\n      ||\n      match partial with\n      | Total -> true\n      | _ -> false\n    then (None, jumps_empty)\n    else mk_failaction_neg partial ctx def\n  in\n  let consts, nonconsts = split_variant_cases tag_lambda_list in\n  let lambda1 =\n    match (fail, one_action) with\n    | None, Some act -> act\n    | _, _ -> (\n      match (consts, nonconsts) with\n      | [(_, (_, act1))], [(_, (_, act2))] when fail = None ->\n        test_int_or_block arg act1 act2\n      | _, [] ->\n        (* One can compare integers and pointers *)\n        !make_test_sequence_variant_constant fail arg consts\n      | [], _ -> (\n        let lam = !call_switcher_variant_constr loc fail arg nonconsts names in\n        (* One must not dereference integers *)\n        match fail with\n        | None -> lam\n        | Some fail -> test_int_or_block arg fail lam)\n      | _, _ ->\n        let lam_const =\n          !call_switcher_variant_constant loc fail arg consts names\n        and lam_nonconst =\n          !call_switcher_variant_constr loc fail arg nonconsts names\n        in\n        test_int_or_block arg lam_const lam_nonconst)\n  in\n  (lambda1, jumps_union local_jumps total1)\n\nlet combine_array names loc arg partial ctx def (len_lambda_list, total1, _pats)\n    =\n  let fail, local_jumps = mk_failaction_neg partial ctx def in\n  let lambda1 =\n    let newvar = Ident.create \"len\" in\n    let switch =\n      call_switcher loc fail (Lvar newvar) 0 max_int len_lambda_list names\n    in\n    bind Alias newvar (Lprim (Parraylength, [arg], loc)) switch\n  in\n  (lambda1, jumps_union local_jumps total1)\n\n(* Insertion of debugging events *)\n\nlet[@inline] event_branch _repr lam = lam\n\n(*\n   This exception is raised when the compiler cannot produce code\n   because control cannot reach the compiled clause,\n\n   Unused is raised initially in compile_test.\n\n   compile_list (for compiling switch results) catch Unused\n\n   comp_match_handlers (for compiling splitted matches)\n   may reraise Unused\n\n\n*)\n\nexception Unused\n\nlet compile_list compile_fun division =\n  let rec c_rec totals = function\n    | [] -> ([], jumps_unions totals, [])\n    | (key, cell) :: rem -> (\n      match cell.ctx with\n      | [] -> c_rec totals rem\n      | _ -> (\n        try\n          let lambda1, total1 = compile_fun cell.ctx cell.pm in\n          let c_rem, total, new_pats =\n            c_rec (jumps_map ctx_combine total1 :: totals) rem\n          in\n          ((key, lambda1) :: c_rem, total, cell.pat :: new_pats)\n        with Unused -> c_rec totals rem))\n  in\n  c_rec [] division\n\nlet compile_orhandlers compile_fun lambda1 total1 ctx to_catch =\n  let rec do_rec r total_r = function\n    | [] -> (r, total_r)\n    | (mat, i, vars, pm) :: rem -> (\n      try\n        let ctx = select_columns mat ctx in\n        let handler_i, total_i = compile_fun ctx pm in\n        match raw_action r with\n        | Lstaticraise (j, args) ->\n          if i = j then\n            ( List.fold_right2 (bind Alias) vars args handler_i,\n              jumps_map (ctx_rshift_num (ncols mat)) total_i )\n          else do_rec r total_r rem\n        | _ ->\n          do_rec\n            (Lstaticcatch (r, (i, vars), handler_i))\n            (jumps_union (jumps_remove i total_r)\n               (jumps_map (ctx_rshift_num (ncols mat)) total_i))\n            rem\n      with Unused ->\n        do_rec (Lstaticcatch (r, (i, vars), lambda_unit)) total_r rem)\n  in\n  do_rec lambda1 total1 to_catch\n\nlet compile_test compile_fun partial divide combine ctx to_match =\n  let division = divide ctx to_match in\n  let c_div = compile_list compile_fun division in\n  match c_div with\n  | [], _, _ -> (\n    match mk_failaction_neg partial ctx to_match.default with\n    | None, _ -> raise Unused\n    | Some l, total -> (l, total))\n  | _ -> combine ctx to_match.default c_div\n\n(* Attempt to avoid some useless bindings by lowering them *)\n\n(* Approximation of v present in lam *)\nlet rec approx_present v = function\n  | Lconst _ -> false\n  | Lstaticraise (_, args) -> List.exists (fun lam -> approx_present v lam) args\n  | Lprim (_, args, _) -> List.exists (fun lam -> approx_present v lam) args\n  | Llet (Alias, _k, _, l1, l2) -> approx_present v l1 || approx_present v l2\n  | Lvar vv -> Ident.same v vv\n  | _ -> true\n\nlet rec lower_bind v arg lam =\n  match lam with\n  | Lifthenelse (cond, ifso, ifnot) -> (\n    let pcond = approx_present v cond\n    and pso = approx_present v ifso\n    and pnot = approx_present v ifnot in\n    match (pcond, pso, pnot) with\n    | false, false, false -> lam\n    | false, true, false -> Lifthenelse (cond, lower_bind v arg ifso, ifnot)\n    | false, false, true -> Lifthenelse (cond, ifso, lower_bind v arg ifnot)\n    | _, _, _ -> bind Alias v arg lam)\n  | Lswitch (ls, ({sw_consts = [(i, act)]; sw_blocks = []} as sw), loc)\n    when not (approx_present v ls) ->\n    Lswitch (ls, {sw with sw_consts = [(i, lower_bind v arg act)]}, loc)\n  | Lswitch (ls, ({sw_consts = []; sw_blocks = [(i, act)]} as sw), loc)\n    when not (approx_present v ls) ->\n    Lswitch (ls, {sw with sw_blocks = [(i, lower_bind v arg act)]}, loc)\n  | Llet (Alias, k, vv, lv, l) ->\n    if approx_present v lv then bind Alias v arg lam\n    else Llet (Alias, k, vv, lv, lower_bind v arg l)\n  | Lvar u when Ident.same u v && Ident.name u = \"*sth*\" ->\n    arg (* eliminate let *sth* = from_option x in *sth* *)\n  | _ -> bind Alias v arg lam\n\nlet bind_check str v arg lam =\n  match (str, arg) with\n  | _, Lvar _ -> bind str v arg lam\n  | Alias, _ -> lower_bind v arg lam\n  | _, _ -> bind str v arg lam\n\nlet comp_exit ctx m =\n  match m.default with\n  | (_, i) :: _ -> (Lstaticraise (i, []), jumps_singleton i ctx)\n  | _ -> fatal_error \"Matching.comp_exit\"\n\nlet rec comp_match_handlers comp_fun partial ctx arg first_match next_matchs =\n  match next_matchs with\n  | [] -> comp_fun partial ctx arg first_match\n  | rem -> (\n    let rec c_rec body total_body = function\n      | [] -> (body, total_body)\n      (* Hum, -1 means never taken\n         | (-1,pm)::rem -> c_rec body total_body rem *)\n      | (i, pm) :: rem -> (\n        let ctx_i, total_rem = jumps_extract i total_body in\n        match ctx_i with\n        | [] -> c_rec body total_body rem\n        | _ -> (\n          try\n            let li, total_i =\n              comp_fun\n                (match rem with\n                | [] -> partial\n                | _ -> Partial)\n                ctx_i arg pm\n            in\n            c_rec\n              (Lstaticcatch (body, (i, []), li))\n              (jumps_union total_i total_rem)\n              rem\n          with Unused ->\n            c_rec (Lstaticcatch (body, (i, []), lambda_unit)) total_rem rem))\n    in\n    try\n      let first_lam, total = comp_fun Partial ctx arg first_match in\n      c_rec first_lam total rem\n    with Unused -> (\n      match next_matchs with\n      | [] -> raise Unused\n      | (_, x) :: xs -> comp_match_handlers comp_fun partial ctx arg x xs))\n\n(* To find reasonable names for variables *)\n\nlet rec name_pattern default = function\n  | (pat :: _, _) :: rem -> (\n    match Typecore.id_of_pattern pat with\n    | Some id -> id\n    | None -> name_pattern default rem)\n  | _ -> Ident.create default\n\nlet arg_to_var arg cls =\n  match arg with\n  | Lvar v -> (v, arg)\n  | _ ->\n    let v = name_pattern \"match\" cls in\n    (v, Lvar v)\n\n(* To be set by Lam_compile *)\nlet names_from_construct_pattern :\n    (pattern -> Ast_untagged_variants.switch_names option) ref =\n  ref (fun _ -> None)\n\n(*\n  The main compilation function.\n   Input:\n      repr=used for inserting debug events\n      partial=exhaustiveness information from Parmatch\n      ctx=a context\n      m=a pattern matching\n\n   Output: a lambda term, a jump summary {..., exit number -> context, .. }\n*)\n\nlet rec compile_match repr partial ctx m =\n  match m with\n  | {cases = []; args = []} -> comp_exit ctx m\n  | {cases = ([], action) :: rem} ->\n    if is_guarded action then\n      let lambda, total = compile_match None partial ctx {m with cases = rem} in\n      (event_branch repr (patch_guarded lambda action), total)\n    else (event_branch repr action, jumps_empty)\n  | {args = (arg, str) :: argl} ->\n    let v, newarg = arg_to_var arg m.cases in\n    let first_match, rem =\n      split_precompile (Some v) {m with args = (newarg, Alias) :: argl}\n    in\n    let lam, total =\n      comp_match_handlers\n        ((if dbg then do_compile_matching_pr else do_compile_matching) repr)\n        partial ctx newarg first_match rem\n    in\n    (bind_check str v arg lam, total)\n  | _ -> assert false\n\n(* verbose version of do_compile_matching, for debug *)\n\nand do_compile_matching_pr repr partial ctx arg x =\n  prerr_string \"COMPILE: \";\n  prerr_endline\n    (match partial with\n    | Partial -> \"Partial\"\n    | Total -> \"Total\");\n  prerr_endline \"MATCH\";\n  pretty_precompiled x;\n  prerr_endline \"CTX\";\n  pretty_ctx ctx;\n  let ((_, jumps) as r) = do_compile_matching repr partial ctx arg x in\n  prerr_endline \"JUMPS\";\n  pretty_jumps jumps;\n  r\n\nand do_compile_matching repr partial ctx arg pmh =\n  match pmh with\n  | Pm pm -> (\n    let pat = what_is_cases pm.cases in\n    match pat.pat_desc with\n    | Tpat_any -> compile_no_test divide_var ctx_rshift repr partial ctx pm\n    | Tpat_tuple patl ->\n      compile_no_test\n        (divide_tuple (List.length patl) (normalize_pat pat))\n        ctx_combine repr partial ctx pm\n    | Tpat_record ((_, lbl, _) :: _, _) ->\n      compile_no_test\n        (divide_record lbl.lbl_all (normalize_pat pat))\n        ctx_combine repr partial ctx pm\n    | Tpat_constant cst ->\n      let names = None in\n      compile_test\n        (compile_match repr partial)\n        partial divide_constant\n        (combine_constant names pat.pat_loc arg cst partial)\n        ctx pm\n    | Tpat_construct (_, cstr, _) ->\n      let sw_names = !names_from_construct_pattern pat in\n      compile_test\n        (compile_match repr partial)\n        partial divide_constructor\n        (combine_constructor sw_names pat.pat_loc arg pat cstr partial)\n        ctx pm\n    | Tpat_array _ ->\n      let names = None in\n      compile_test\n        (compile_match repr partial)\n        partial divide_array\n        (combine_array names pat.pat_loc arg partial)\n        ctx pm\n    | Tpat_lazy _ ->\n      compile_no_test\n        (divide_lazy (normalize_pat pat))\n        ctx_combine repr partial ctx pm\n    | Tpat_variant (_, _, row) ->\n      let names = None in\n      compile_test\n        (compile_match repr partial)\n        partial (divide_variant !row)\n        (combine_variant names pat.pat_loc !row arg partial)\n        ctx pm\n    | _ -> assert false)\n  | PmVar {inside = pmh; var_arg = arg} ->\n    let lam, total =\n      do_compile_matching repr partial (ctx_lshift ctx) arg pmh\n    in\n    (lam, jumps_map ctx_rshift total)\n  | PmOr {body; handlers} ->\n    let lam, total = compile_match repr partial ctx body in\n    compile_orhandlers (compile_match repr partial) lam total ctx handlers\n\nand compile_no_test divide up_ctx repr partial ctx to_match =\n  let {pm = this_match; ctx = this_ctx} = divide ctx to_match in\n  let lambda, total = compile_match repr partial this_ctx this_match in\n  (lambda, jumps_map up_ctx total)\n\n(* The entry points *)\n\n(*\n   If there is a guard in a matching or a lazy pattern,\n   then set exhaustiveness info to Partial.\n   (because of side effects, assume the worst).\n\n   Notice that exhaustiveness information is trusted by the compiler,\n   that is, a match flagged as Total should not fail at runtime.\n   More specifically, for instance if match y with x::_ -> x is flagged\n   total (as it happens during JoCaml compilation) then y cannot be []\n   at runtime. As a consequence, the static Total exhaustiveness information\n   have to be downgraded to Partial, in the dubious cases where guards\n   or lazy pattern execute arbitrary code that may perform side effects\n   and change the subject values.\nLM:\n   Lazy pattern was PR#5992, initial patch by lpw25.\n   I have  generalized the patch, so as to also find mutable fields.\n*)\n\nlet find_in_pat pred =\n  let rec find_rec p =\n    pred p.pat_desc\n    ||\n    match p.pat_desc with\n    | Tpat_alias (p, _, _) | Tpat_variant (_, Some p, _) | Tpat_lazy p ->\n      find_rec p\n    | Tpat_tuple ps | Tpat_construct (_, _, ps) | Tpat_array ps ->\n      List.exists find_rec ps\n    | Tpat_record (lpats, _) -> List.exists (fun (_, _, p) -> find_rec p) lpats\n    | Tpat_or (p, q, _) -> find_rec p || find_rec q\n    | Tpat_constant _ | Tpat_var _ | Tpat_any | Tpat_variant (_, None, _) ->\n      false\n  in\n  find_rec\n\nlet is_lazy_pat = function\n  | Tpat_lazy _ -> true\n  | Tpat_alias _ | Tpat_variant _ | Tpat_record _ | Tpat_tuple _\n  | Tpat_construct _ | Tpat_array _ | Tpat_or _ | Tpat_constant _ | Tpat_var _\n  | Tpat_any ->\n    false\n\nlet is_lazy p = find_in_pat is_lazy_pat p\n\nlet have_mutable_field p =\n  match p with\n  | Tpat_record (lps, _) ->\n    List.exists\n      (fun (_, lbl, _) ->\n        match lbl.Types.lbl_mut with\n        | Mutable -> true\n        | Immutable -> false)\n      lps\n  | Tpat_alias _ | Tpat_variant _ | Tpat_lazy _ | Tpat_tuple _\n  | Tpat_construct _ | Tpat_array _ | Tpat_or _ | Tpat_constant _ | Tpat_var _\n  | Tpat_any ->\n    false\n\nlet is_mutable p = find_in_pat have_mutable_field p\n\n(* Downgrade Total when\n   1. Matching accesses some mutable fields;\n   2. And there are  guards or lazy patterns.\n*)\n\nlet check_partial is_mutable is_lazy pat_act_list = function\n  | Partial -> Partial\n  | Total ->\n    if\n      pat_act_list = []\n      ||\n      (* allow empty case list *)\n      List.exists\n        (fun (pats, lam) -> is_mutable pats && (is_guarded lam || is_lazy pats))\n        pat_act_list\n    then Partial\n    else Total\n\nlet check_partial_list =\n  check_partial (List.exists is_mutable) (List.exists is_lazy)\nlet check_partial = check_partial is_mutable is_lazy\n\n(* have toplevel handler when appropriate *)\n\nlet start_ctx n = [{left = []; right = omegas n}]\n\nlet check_total total lambda i handler_fun =\n  if jumps_is_empty total then lambda\n  else Lstaticcatch (lambda, (i, []), handler_fun ())\n\nlet compile_matching repr handler_fun arg pat_act_list partial =\n  let partial = check_partial pat_act_list partial in\n  match partial with\n  | Partial -> (\n    let raise_num = next_raise_count () in\n    let pm =\n      {\n        cases = List.map (fun (pat, act) -> ([pat], act)) pat_act_list;\n        args = [(arg, Strict)];\n        default = [([[omega]], raise_num)];\n      }\n    in\n    try\n      let lambda, total = compile_match repr partial (start_ctx 1) pm in\n      check_total total lambda raise_num handler_fun\n    with Unused -> assert false (* ; handler_fun() *))\n  | Total ->\n    let pm =\n      {\n        cases = List.map (fun (pat, act) -> ([pat], act)) pat_act_list;\n        args = [(arg, Strict)];\n        default = [];\n      }\n    in\n    let lambda, total = compile_match repr partial (start_ctx 1) pm in\n    assert (jumps_is_empty total);\n    lambda\n\nlet partial_function loc () =\n  (* [Location.get_pos_info] is too expensive *)\n  let fname, line, char = Location.get_pos_info loc.Location.loc_start in\n  let fname = Filename.basename fname in\n  Lprim\n    ( Praise Raise_regular,\n      [\n        Lprim\n          ( Pmakeblock Blk_extension,\n            [\n              transl_normal_path Predef.path_match_failure;\n              Lconst\n                (Const_block\n                   ( Blk_tuple,\n                     [\n                       Const_base (Const_string (fname, None));\n                       Const_base (Const_int line);\n                       Const_base (Const_int char);\n                     ] ));\n            ],\n            loc );\n      ],\n      loc )\n\nlet for_function loc repr param pat_act_list partial =\n  compile_matching repr (partial_function loc) param pat_act_list partial\n\n(* In the following two cases, exhaustiveness info is not available! *)\nlet for_trywith param pat_act_list =\n  compile_matching None\n    (fun () -> Lprim (Praise Raise_reraise, [param], Location.none))\n    param pat_act_list Partial\n\nlet simple_for_let loc param pat body =\n  compile_matching None (partial_function loc) param [(pat, body)] Partial\n\n(* Optimize binding of immediate tuples\n\n   The goal of the implementation of 'for_let' below, which replaces\n   'simple_for_let', is to avoid tuple allocation in cases such as\n   this one:\n\n     let (x,y) =\n        let foo = ... in\n        if foo then (1, 2) else (3,4)\n     in bar\n\n   The compiler easily optimizes the simple `let (x,y) = (1,2) in ...`\n   case (call to Matching.for_multiple_match from Translcore), but\n   didn't optimize situations where the rhs tuples are hidden under\n   a more complex context.\n\n   The idea comes from Alain Frisch who suggested and implemented\n   the following compilation method, based on Lassign:\n\n     let x = dummy in let y = dummy in\n     begin\n      let foo = ... in\n      if foo then\n        (let x1 = 1 in let y1 = 2 in x <- x1; y <- y1)\n      else\n        (let x2 = 3 in let y2 = 4 in x <- x2; y <- y2)\n     end;\n     bar\n\n   The current implementation from Gabriel Scherer uses Lstaticcatch /\n   Lstaticraise instead:\n\n     catch\n       let foo = ... in\n       if foo then\n         (let x1 = 1 in let y1 = 2 in exit x1 y1)\n       else\n        (let x2 = 3 in let y2 = 4 in exit x2 y2)\n     with x y ->\n       bar\n\n   The catch/exit is used to avoid duplication of the let body ('bar'\n   in the example), on 'if' branches for example; it is useless for\n   linear contexts such as 'let', but we don't need to be careful to\n   generate nice code because Simplif will remove such useless\n   catch/exit.\n*)\n\nlet rec map_return f = function\n  | Llet (str, k, id, l1, l2) -> Llet (str, k, id, l1, map_return f l2)\n  | Lletrec (l1, l2) -> Lletrec (l1, map_return f l2)\n  | Lifthenelse (lcond, lthen, lelse) ->\n    Lifthenelse (lcond, map_return f lthen, map_return f lelse)\n  | Lsequence (l1, l2) -> Lsequence (l1, map_return f l2)\n  | Ltrywith (l1, id, l2) -> Ltrywith (map_return f l1, id, map_return f l2)\n  | Lstaticcatch (l1, b, l2) ->\n    Lstaticcatch (map_return f l1, b, map_return f l2)\n  | (Lstaticraise _ | Lprim (Praise _, _, _)) as l -> l\n  | l -> f l\n\n(* The 'opt' reference indicates if the optimization is worthy.\n\n   It is shared by the different calls to 'assign_pat' performed from\n   'map_return'. For example with the code\n     let (x, y) = if foo then z else (1,2)\n   the else-branch will activate the optimization for both branches.\n\n   That means that the optimization is activated if *there exists* an\n   interesting tuple in one hole of the let-rhs context. We could\n   choose to activate it only if *all* holes are interesting. We made\n   that choice because being optimistic is extremely cheap (one static\n   exit/catch overhead in the \"wrong cases\"), while being pessimistic\n   can be costly (one unnecessary tuple allocation).\n*)\n\nlet assign_pat opt nraise catch_ids loc pat lam =\n  let rec collect acc pat lam =\n    match (pat.pat_desc, lam) with\n    | Tpat_tuple patl, Lprim (Pmakeblock _, lams, _) ->\n      opt := true;\n      List.fold_left2 collect acc patl lams\n    | Tpat_tuple patl, Lconst (Const_block (_, scl)) ->\n      opt := true;\n      let collect_const acc pat sc = collect acc pat (Lconst sc) in\n      List.fold_left2 collect_const acc patl scl\n    | _ ->\n      (* pattern idents will be bound in staticcatch (let body), so we\n         refresh them here to guarantee binders  uniqueness *)\n      let pat_ids = pat_bound_idents pat in\n      let fresh_ids = List.map (fun id -> (id, Ident.rename id)) pat_ids in\n      (fresh_ids, alpha_pat fresh_ids pat, lam) :: acc\n  in\n\n  (* sublets were accumulated by 'collect' with the leftmost tuple\n     pattern at the bottom of the list; to respect right-to-left\n     evaluation order for tuples, we must evaluate sublets\n     top-to-bottom. To preserve tail-rec, we will fold_left the\n     reversed list. *)\n  let rev_sublets = List.rev (collect [] pat lam) in\n  let exit =\n    (* build an Ident.tbl to avoid quadratic refreshing costs *)\n    let add t (id, fresh_id) = Ident.add id fresh_id t in\n    let add_ids acc (ids, _pat, _lam) = List.fold_left add acc ids in\n    let tbl = List.fold_left add_ids Ident.empty rev_sublets in\n    let fresh_var id = Lvar (Ident.find_same id tbl) in\n    Lstaticraise (nraise, List.map fresh_var catch_ids)\n  in\n  let push_sublet code (_ids, pat, lam) = simple_for_let loc lam pat code in\n  List.fold_left push_sublet exit rev_sublets\n\nlet for_let loc param pat body =\n  match pat.pat_desc with\n  | Tpat_any ->\n    (* This eliminates a useless variable (and stack slot in bytecode)\n       for \"let _ = ...\". See #6865. *)\n    Lsequence (param, body)\n  | Tpat_var (id, _) ->\n    (* fast path, and keep track of simple bindings to unboxable numbers *)\n    Llet (Strict, Pgenval, id, param, body)\n  | _ ->\n    (* Turn off such optimization to reduce diff in the beginning - FIXME*)\n    if !Config.bs_only then simple_for_let loc param pat body\n    else\n      let opt = ref false in\n      let nraise = next_raise_count () in\n      let catch_ids = pat_bound_idents pat in\n      let bind = map_return (assign_pat opt nraise catch_ids loc pat) param in\n      if !opt then Lstaticcatch (bind, (nraise, catch_ids), body)\n      else simple_for_let loc param pat body\n\n(* Handling of tupled functions and matchings *)\n\n(* Easy case since variables are available *)\nlet for_tupled_function loc paraml pats_act_list partial =\n  let partial = check_partial_list pats_act_list partial in\n  let raise_num = next_raise_count () in\n  let omegas = [List.map (fun _ -> omega) paraml] in\n  let pm =\n    {\n      cases = pats_act_list;\n      args = List.map (fun id -> (Lvar id, Strict)) paraml;\n      default = [(omegas, raise_num)];\n    }\n  in\n  try\n    let lambda, total =\n      compile_match None partial (start_ctx (List.length paraml)) pm\n    in\n    check_total total lambda raise_num (partial_function loc)\n  with Unused -> partial_function loc ()\n\nlet flatten_pattern size p =\n  match p.pat_desc with\n  | Tpat_tuple args -> args\n  | Tpat_any -> omegas size\n  | _ -> raise Cannot_flatten\n\nlet rec flatten_pat_line size p k =\n  match p.pat_desc with\n  | Tpat_any -> omegas size :: k\n  | Tpat_tuple args -> args :: k\n  | Tpat_or (p1, p2, _) -> flatten_pat_line size p1 (flatten_pat_line size p2 k)\n  | Tpat_alias (p, _, _) ->\n    (* Note: if this 'as' pat is here, then this is a\n       useless binding, solves PR#3780 *)\n    flatten_pat_line size p k\n  | _ -> fatal_error \"Matching.flatten_pat_line\"\n\nlet flatten_cases size cases =\n  List.map\n    (fun (ps, action) ->\n      match ps with\n      | [p] -> (flatten_pattern size p, action)\n      | _ -> fatal_error \"Matching.flatten_case\")\n    cases\n\nlet flatten_matrix size pss =\n  List.fold_right\n    (fun ps r ->\n      match ps with\n      | [p] -> flatten_pat_line size p r\n      | _ -> fatal_error \"Matching.flatten_matrix\")\n    pss []\n\nlet flatten_def size def =\n  List.map (fun (pss, i) -> (flatten_matrix size pss, i)) def\n\nlet flatten_pm size args pm =\n  {\n    args;\n    cases = flatten_cases size pm.cases;\n    default = flatten_def size pm.default;\n  }\n\nlet flatten_precompiled size args pmh =\n  match pmh with\n  | Pm pm -> Pm (flatten_pm size args pm)\n  | PmOr {body = b; handlers = hs; or_matrix = m} ->\n    PmOr\n      {\n        body = flatten_pm size args b;\n        handlers =\n          List.map\n            (fun (mat, i, vars, pm) -> (flatten_matrix size mat, i, vars, pm))\n            hs;\n        or_matrix = flatten_matrix size m;\n      }\n  | PmVar _ -> assert false\n\n(*\n   compiled_flattened is a ``comp_fun'' argument to comp_match_handlers.\n   Hence it needs a fourth argument, which it ignores\n*)\n\nlet compile_flattened repr partial ctx _ pmh =\n  match pmh with\n  | Pm pm -> compile_match repr partial ctx pm\n  | PmOr {body = b; handlers = hs} ->\n    let lam, total = compile_match repr partial ctx b in\n    compile_orhandlers (compile_match repr partial) lam total ctx hs\n  | PmVar _ -> assert false\n\nlet do_for_multiple_match loc paraml pat_act_list partial =\n  let repr = None in\n  let partial = check_partial pat_act_list partial in\n  let raise_num, pm1 =\n    match partial with\n    | Partial ->\n      let raise_num = next_raise_count () in\n      ( raise_num,\n        {\n          cases = List.map (fun (pat, act) -> ([pat], act)) pat_act_list;\n          args = [(Lprim (Pmakeblock Blk_tuple, paraml, loc), Strict)];\n          default = [([[omega]], raise_num)];\n        } )\n    | _ ->\n      ( -1,\n        {\n          cases = List.map (fun (pat, act) -> ([pat], act)) pat_act_list;\n          args = [(Lprim (Pmakeblock Blk_tuple, paraml, loc), Strict)];\n          default = [];\n        } )\n  in\n\n  try\n    try\n      (* Once for checking that compilation is possible *)\n      let next, nexts = split_precompile None pm1 in\n\n      let size = List.length paraml\n      and idl = List.map (fun _ -> Ident.create \"match\") paraml in\n      let args = List.map (fun id -> (Lvar id, Alias)) idl in\n\n      let flat_next = flatten_precompiled size args next\n      and flat_nexts =\n        List.map (fun (e, pm) -> (e, flatten_precompiled size args pm)) nexts\n      in\n\n      let lam, total =\n        comp_match_handlers (compile_flattened repr) partial (start_ctx size) ()\n          flat_next flat_nexts\n      in\n      List.fold_right2 (bind Strict) idl paraml\n        (match partial with\n        | Partial -> check_total total lam raise_num (partial_function loc)\n        | Total ->\n          assert (jumps_is_empty total);\n          lam)\n    with Cannot_flatten -> (\n      let lambda, total = compile_match None partial (start_ctx 1) pm1 in\n      match partial with\n      | Partial -> check_total total lambda raise_num (partial_function loc)\n      | Total ->\n        assert (jumps_is_empty total);\n        lambda)\n  with Unused -> assert false (* ; partial_function loc () *)\n\n(* PR#4828: Believe it or not, the 'paraml' argument below\n   may not be side effect free. *)\n\nlet param_to_var param =\n  match param with\n  | Lvar v -> (v, None)\n  | _ -> (Ident.create \"match\", Some param)\n\nlet bind_opt (v, eo) k =\n  match eo with\n  | None -> k\n  | Some e -> Lambda.bind Strict v e k\n\nlet for_multiple_match loc paraml pat_act_list partial =\n  let v_paraml = List.map param_to_var paraml in\n  let paraml = List.map (fun (v, _) -> Lvar v) v_paraml in\n  List.fold_right bind_opt v_paraml\n    (do_for_multiple_match loc paraml pat_act_list partial)\n"
  },
  {
    "path": "analysis/vendor/ml/matching.mli",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(* Compilation of pattern-matching *)\n\nopen Typedtree\nopen Lambda\n\nval call_switcher_variant_constant :\n  (Location.t ->\n  Lambda.lambda option ->\n  Lambda.lambda ->\n  (int * (string * Lambda.lambda)) list ->\n  Ast_untagged_variants.switch_names option ->\n  Lambda.lambda)\n  ref\n\nval call_switcher_variant_constr :\n  (Location.t ->\n  Lambda.lambda option ->\n  Lambda.lambda ->\n  (int * (string * Lambda.lambda)) list ->\n  Ast_untagged_variants.switch_names option ->\n  Lambda.lambda)\n  ref\n\nval make_test_sequence_variant_constant :\n  (Lambda.lambda option ->\n  Lambda.lambda ->\n  (int * (string * Lambda.lambda)) list ->\n  Lambda.lambda)\n  ref\n\n(* Entry points to match compiler *)\nval for_function :\n  Location.t ->\n  int ref option ->\n  lambda ->\n  (pattern * lambda) list ->\n  partial ->\n  lambda\nval for_trywith : lambda -> (pattern * lambda) list -> lambda\nval for_let : Location.t -> lambda -> pattern -> lambda -> lambda\nval for_multiple_match :\n  Location.t -> lambda list -> (pattern * lambda) list -> partial -> lambda\n\nval for_tupled_function :\n  Location.t ->\n  Ident.t list ->\n  (pattern list * lambda) list ->\n  partial ->\n  lambda\n\nexception Cannot_flatten\n\nval flatten_pattern : int -> pattern -> pattern list\n\n(* Expand stringswitch to  string test tree *)\nval expand_stringswitch :\n  Location.t -> lambda -> (string * lambda) list -> lambda option -> lambda\n\nval inline_lazy_force : lambda -> Location.t -> lambda\n\n(* To be set by Lam_compile *)\nval names_from_construct_pattern :\n  (pattern -> Ast_untagged_variants.switch_names option) ref\n"
  },
  {
    "path": "analysis/vendor/ml/mtype.ml",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(* Operations on module types *)\n\nopen Asttypes\nopen Path\nopen Types\n\nlet rec scrape env mty =\n  match mty with\n  | Mty_ident p -> (\n    try scrape env (Env.find_modtype_expansion p env) with Not_found -> mty)\n  | _ -> mty\n\nlet freshen mty = Subst.modtype Subst.identity mty\n\nlet rec strengthen ~aliasable env mty p =\n  match scrape env mty with\n  | Mty_signature sg -> Mty_signature (strengthen_sig ~aliasable env sg p 0)\n  | Mty_functor (param, arg, res)\n    when !Clflags.applicative_functors && Ident.name param <> \"*\" ->\n    Mty_functor\n      ( param,\n        arg,\n        strengthen ~aliasable:false env res (Papply (p, Pident param)) )\n  | mty -> mty\n\nand strengthen_sig ~aliasable env sg p pos =\n  match sg with\n  | [] -> []\n  | (Sig_value (_, desc) as sigelt) :: rem ->\n    let nextpos =\n      match desc.val_kind with\n      | Val_prim _ -> pos\n      | _ -> pos + 1\n    in\n    sigelt :: strengthen_sig ~aliasable env rem p nextpos\n  | Sig_type (id, {type_kind = Type_abstract}, _)\n    :: (Sig_type (id', {type_private = Private}, _) :: _ as rem)\n    when Ident.name id = Ident.name id' ^ \"#row\" ->\n    strengthen_sig ~aliasable env rem p pos\n  | Sig_type (id, decl, rs) :: rem ->\n    let newdecl =\n      match (decl.type_manifest, decl.type_private, decl.type_kind) with\n      | Some _, Public, _ -> decl\n      | Some _, Private, (Type_record _ | Type_variant _) -> decl\n      | _ ->\n        let manif =\n          Some\n            (Btype.newgenty\n               (Tconstr\n                  (Pdot (p, Ident.name id, nopos), decl.type_params, ref Mnil)))\n        in\n        if decl.type_kind = Type_abstract then\n          {decl with type_private = Public; type_manifest = manif}\n        else {decl with type_manifest = manif}\n    in\n    Sig_type (id, newdecl, rs) :: strengthen_sig ~aliasable env rem p pos\n  | (Sig_typext _ as sigelt) :: rem ->\n    sigelt :: strengthen_sig ~aliasable env rem p (pos + 1)\n  | Sig_module (id, md, rs) :: rem ->\n    let str =\n      strengthen_decl ~aliasable env md (Pdot (p, Ident.name id, pos))\n    in\n    Sig_module (id, str, rs)\n    :: strengthen_sig ~aliasable\n         (Env.add_module_declaration ~check:false id md env)\n         rem p (pos + 1)\n    (* Need to add the module in case it defines manifest module types *)\n  | Sig_modtype (id, decl) :: rem ->\n    let newdecl =\n      match decl.mtd_type with\n      | None ->\n        {decl with mtd_type = Some (Mty_ident (Pdot (p, Ident.name id, nopos)))}\n      | Some _ -> decl\n    in\n    Sig_modtype (id, newdecl)\n    :: strengthen_sig ~aliasable (Env.add_modtype id decl env) rem p pos\n    (* Need to add the module type in case it is manifest *)\n  | (Sig_class _ as sigelt) :: rem ->\n    sigelt :: strengthen_sig ~aliasable env rem p (pos + 1)\n  | (Sig_class_type _ as sigelt) :: rem ->\n    sigelt :: strengthen_sig ~aliasable env rem p pos\n\nand strengthen_decl ~aliasable env md p =\n  match md.md_type with\n  | Mty_alias _ -> md\n  | _ when aliasable -> {md with md_type = Mty_alias (Mta_present, p)}\n  | mty -> {md with md_type = strengthen ~aliasable env mty p}\n\nlet () = Env.strengthen := strengthen\n\n(* In nondep_supertype, env is only used for the type it assigns to id.\n   Hence there is no need to keep env up-to-date by adding the bindings\n   traversed. *)\n\ntype variance = Co | Contra | Strict\n\nlet nondep_supertype env mid mty =\n  let rec nondep_mty env va mty =\n    match mty with\n    | Mty_ident p ->\n      if Path.isfree mid p then\n        nondep_mty env va (Env.find_modtype_expansion p env)\n      else mty\n    | Mty_alias (_, p) ->\n      if Path.isfree mid p then\n        nondep_mty env va (Env.find_module p env).md_type\n      else mty\n    | Mty_signature sg -> Mty_signature (nondep_sig env va sg)\n    | Mty_functor (param, arg, res) ->\n      let var_inv =\n        match va with\n        | Co -> Contra\n        | Contra -> Co\n        | Strict -> Strict\n      in\n      Mty_functor\n        ( param,\n          Misc.may_map (nondep_mty env var_inv) arg,\n          nondep_mty\n            (Env.add_module ~arg:true param (Btype.default_mty arg) env)\n            va res )\n  and nondep_sig env va = function\n    | [] -> []\n    | item :: rem -> (\n      let rem' = nondep_sig env va rem in\n      match item with\n      | Sig_value (id, d) ->\n        Sig_value (id, {d with val_type = Ctype.nondep_type env mid d.val_type})\n        :: rem'\n      | Sig_type (id, d, rs) ->\n        Sig_type (id, Ctype.nondep_type_decl env mid id (va = Co) d, rs) :: rem'\n      | Sig_typext (id, ext, es) ->\n        Sig_typext (id, Ctype.nondep_extension_constructor env mid ext, es)\n        :: rem'\n      | Sig_module (id, md, rs) ->\n        Sig_module (id, {md with md_type = nondep_mty env va md.md_type}, rs)\n        :: rem'\n      | Sig_modtype (id, d) -> (\n        try Sig_modtype (id, nondep_modtype_decl env d) :: rem'\n        with Not_found -> (\n          match va with\n          | Co ->\n            Sig_modtype\n              ( id,\n                {mtd_type = None; mtd_loc = Location.none; mtd_attributes = []}\n              )\n            :: rem'\n          | _ -> raise Not_found))\n      | Sig_class _ -> assert false\n      | Sig_class_type (id, d, rs) ->\n        Sig_class_type (id, Ctype.nondep_cltype_declaration env mid d, rs)\n        :: rem')\n  and nondep_modtype_decl env mtd =\n    {mtd with mtd_type = Misc.may_map (nondep_mty env Strict) mtd.mtd_type}\n  in\n\n  nondep_mty env Co mty\n\nlet enrich_typedecl env p decl =\n  match decl.type_manifest with\n  | Some _ -> decl\n  | None -> (\n    try\n      let orig_decl = Env.find_type p env in\n      if orig_decl.type_arity <> decl.type_arity then decl\n      else\n        {\n          decl with\n          type_manifest =\n            Some (Btype.newgenty (Tconstr (p, decl.type_params, ref Mnil)));\n        }\n    with Not_found -> decl)\n\nlet rec enrich_modtype env p mty =\n  match mty with\n  | Mty_signature sg -> Mty_signature (List.map (enrich_item env p) sg)\n  | _ -> mty\n\nand enrich_item env p = function\n  | Sig_type (id, decl, rs) ->\n    Sig_type (id, enrich_typedecl env (Pdot (p, Ident.name id, nopos)) decl, rs)\n  | Sig_module (id, md, rs) ->\n    Sig_module\n      ( id,\n        {\n          md with\n          md_type =\n            enrich_modtype env (Pdot (p, Ident.name id, nopos)) md.md_type;\n        },\n        rs )\n  | item -> item\n\nlet rec type_paths env p mty =\n  match scrape env mty with\n  | Mty_ident _ -> []\n  | Mty_alias _ -> []\n  | Mty_signature sg -> type_paths_sig env p 0 sg\n  | Mty_functor _ -> []\n\nand type_paths_sig env p pos sg =\n  match sg with\n  | [] -> []\n  | Sig_value (_id, decl) :: rem ->\n    let pos' =\n      match decl.val_kind with\n      | Val_prim _ -> pos\n      | _ -> pos + 1\n    in\n    type_paths_sig env p pos' rem\n  | Sig_type (id, _decl, _) :: rem ->\n    Pdot (p, Ident.name id, nopos) :: type_paths_sig env p pos rem\n  | Sig_module (id, md, _) :: rem ->\n    type_paths env (Pdot (p, Ident.name id, pos)) md.md_type\n    @ type_paths_sig\n        (Env.add_module_declaration ~check:false id md env)\n        p (pos + 1) rem\n  | Sig_modtype (id, decl) :: rem ->\n    type_paths_sig (Env.add_modtype id decl env) p pos rem\n  | (Sig_typext _ | Sig_class _) :: rem -> type_paths_sig env p (pos + 1) rem\n  | Sig_class_type _ :: rem -> type_paths_sig env p pos rem\n\nlet rec no_code_needed env mty =\n  match scrape env mty with\n  | Mty_ident _ -> false\n  | Mty_signature sg -> no_code_needed_sig env sg\n  | Mty_functor (_, _, _) -> false\n  | Mty_alias (Mta_absent, _) -> true\n  | Mty_alias (Mta_present, _) -> false\n\nand no_code_needed_sig env sg =\n  match sg with\n  | [] -> true\n  | Sig_value (_id, decl) :: rem -> (\n    match decl.val_kind with\n    | Val_prim _ -> no_code_needed_sig env rem\n    | _ -> false)\n  | Sig_module (id, md, _) :: rem ->\n    no_code_needed env md.md_type\n    && no_code_needed_sig\n         (Env.add_module_declaration ~check:false id md env)\n         rem\n  | (Sig_type _ | Sig_modtype _ | Sig_class_type _) :: rem ->\n    no_code_needed_sig env rem\n  | (Sig_typext _ | Sig_class _) :: _ -> false\n\n(* Check whether a module type may return types *)\n\nlet rec contains_type env = function\n  | Mty_ident path -> (\n    try\n      match (Env.find_modtype path env).mtd_type with\n      | None -> raise Exit (* PR#6427 *)\n      | Some mty -> contains_type env mty\n    with Not_found -> raise Exit)\n  | Mty_signature sg -> contains_type_sig env sg\n  | Mty_functor (_, _, body) -> contains_type env body\n  | Mty_alias _ -> ()\n\nand contains_type_sig env = List.iter (contains_type_item env)\n\nand contains_type_item env = function\n  | Sig_type\n      ( _,\n        ( {type_manifest = None}\n        | {type_kind = Type_abstract; type_private = Private} ),\n        _ )\n  | Sig_modtype _\n  | Sig_typext (_, {ext_args = Cstr_record _}, _) ->\n    (* We consider that extension constructors with an inlined\n       record create a type (the inlined record), even though\n       it would be technically safe to ignore that considering\n       the current constraints which guarantee that this type\n       is kept local to expressions. *)\n    raise Exit\n  | Sig_module (_, {md_type = mty}, _) -> contains_type env mty\n  | Sig_value _ | Sig_type _ | Sig_typext _ | Sig_class _ | Sig_class_type _ ->\n    ()\n\nlet contains_type env mty =\n  try\n    contains_type env mty;\n    false\n  with Exit -> true\n\n(* Remove module aliases from a signature *)\n\nmodule PathSet = Set.Make (Path)\nmodule PathMap = Map.Make (Path)\nmodule IdentSet = Set.Make (Ident)\n\nlet rec get_prefixes = function\n  | Pident _ -> PathSet.empty\n  | Pdot (p, _, _) | Papply (p, _) -> PathSet.add p (get_prefixes p)\n\nlet rec get_arg_paths = function\n  | Pident _ -> PathSet.empty\n  | Pdot (p, _, _) -> get_arg_paths p\n  | Papply (p1, p2) ->\n    PathSet.add p2\n      (PathSet.union (get_prefixes p2)\n         (PathSet.union (get_arg_paths p1) (get_arg_paths p2)))\n\nlet rec rollback_path subst p =\n  try Pident (PathMap.find p subst)\n  with Not_found -> (\n    match p with\n    | Pident _ | Papply _ -> p\n    | Pdot (p1, s, n) ->\n      let p1' = rollback_path subst p1 in\n      if Path.same p1 p1' then p else rollback_path subst (Pdot (p1', s, n)))\n\nlet rec collect_ids subst bindings p =\n  match rollback_path subst p with\n  | Pident id ->\n    let ids =\n      try collect_ids subst bindings (Ident.find_same id bindings)\n      with Not_found -> IdentSet.empty\n    in\n    IdentSet.add id ids\n  | _ -> IdentSet.empty\n\nlet collect_arg_paths mty =\n  let open Btype in\n  let paths = ref PathSet.empty\n  and subst = ref PathMap.empty\n  and bindings = ref Ident.empty in\n  (* let rt = Ident.create \"Root\" in\n     and prefix = ref (Path.Pident rt) in *)\n  let it_path p = paths := PathSet.union (get_arg_paths p) !paths\n  and it_signature_item it si =\n    type_iterators.it_signature_item it si;\n    match si with\n    | Sig_module (id, {md_type = Mty_alias (_, p)}, _) ->\n      bindings := Ident.add id p !bindings\n    | Sig_module (id, {md_type = Mty_signature sg}, _) ->\n      List.iter\n        (function\n          | Sig_module (id', _, _) ->\n            subst :=\n              PathMap.add (Pdot (Pident id, Ident.name id', -1)) id' !subst\n          | _ -> ())\n        sg\n    | _ -> ()\n  in\n  let it = {type_iterators with it_path; it_signature_item} in\n  it.it_module_type it mty;\n  it.it_module_type unmark_iterators mty;\n  PathSet.fold\n    (fun p -> IdentSet.union (collect_ids !subst !bindings p))\n    !paths IdentSet.empty\n\nlet rec remove_aliases env excl mty =\n  match mty with\n  | Mty_signature sg -> Mty_signature (remove_aliases_sig env excl sg)\n  | Mty_alias _ ->\n    let mty' = Env.scrape_alias env mty in\n    if mty' = mty then mty\n    else (* nested polymorphic comparison *)\n      remove_aliases env excl mty'\n  | mty -> mty\n\nand remove_aliases_sig env excl sg =\n  match sg with\n  | [] -> []\n  | Sig_module (id, md, rs) :: rem ->\n    let mty =\n      match md.md_type with\n      | Mty_alias _ when IdentSet.mem id excl -> md.md_type\n      | mty -> remove_aliases env excl mty\n    in\n    Sig_module (id, {md with md_type = mty}, rs)\n    :: remove_aliases_sig (Env.add_module id mty env) excl rem\n  | Sig_modtype (id, mtd) :: rem ->\n    Sig_modtype (id, mtd)\n    :: remove_aliases_sig (Env.add_modtype id mtd env) excl rem\n  | it :: rem -> it :: remove_aliases_sig env excl rem\n\nlet remove_aliases env sg =\n  let excl = collect_arg_paths sg in\n  (* PathSet.iter (fun p -> Format.eprintf \"%a@ \" Printtyp.path p) excl;\n     Format.eprintf \"@.\"; *)\n  remove_aliases env excl sg\n\n(* Lower non-generalizable type variables *)\n\nlet lower_nongen nglev mty =\n  let open Btype in\n  let it_type_expr it ty =\n    let ty = repr ty in\n    match ty with\n    | {desc = Tvar _; level} ->\n      if level < generic_level && level > nglev then set_level ty nglev\n    | _ -> type_iterators.it_type_expr it ty\n  in\n  let it = {type_iterators with it_type_expr} in\n  it.it_module_type it mty;\n  it.it_module_type unmark_iterators mty\n"
  },
  {
    "path": "analysis/vendor/ml/mtype.mli",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(* Operations on module types *)\n\nopen Types\n\nval scrape : Env.t -> module_type -> module_type\n(* Expand toplevel module type abbreviations\n   till hitting a \"hard\" module type (signature, functor,\n   or abstract module type ident. *)\n\nval freshen : module_type -> module_type\n(* Return an alpha-equivalent copy of the given module type\n   where bound identifiers are fresh. *)\n\nval strengthen : aliasable:bool -> Env.t -> module_type -> Path.t -> module_type\n(* Strengthen abstract type components relative to the\n   given path. *)\n\nval strengthen_decl :\n  aliasable:bool -> Env.t -> module_declaration -> Path.t -> module_declaration\nval nondep_supertype : Env.t -> Ident.t -> module_type -> module_type\n(* Return the smallest supertype of the given type\n   in which the given ident does not appear.\n   Raise [Not_found] if no such type exists. *)\n\nval no_code_needed : Env.t -> module_type -> bool\nval no_code_needed_sig : Env.t -> signature -> bool\n(* Determine whether a module needs no implementation code,\n   i.e. consists only of type definitions. *)\n\nval enrich_modtype : Env.t -> Path.t -> module_type -> module_type\nval enrich_typedecl : Env.t -> Path.t -> type_declaration -> type_declaration\nval type_paths : Env.t -> Path.t -> module_type -> Path.t list\nval contains_type : Env.t -> module_type -> bool\nval remove_aliases : Env.t -> module_type -> module_type\nval lower_nongen : int -> module_type -> unit\n"
  },
  {
    "path": "analysis/vendor/ml/oprint.ml",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*                   Projet Cristal, INRIA Rocquencourt                   *)\n(*                                                                        *)\n(*   Copyright 2002 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\nopen Format\nopen Outcometree\n\nexception Ellipsis\n\nlet cautious f ppf arg = try f ppf arg with Ellipsis -> fprintf ppf \"...\"\n\nlet out_ident = ref pp_print_string\nlet map_primitive_name = ref (fun x -> x)\n\nlet print_lident ppf = function\n  | \"::\" -> !out_ident ppf \"(::)\"\n  | s -> !out_ident ppf s\n\nlet rec print_ident ppf = function\n  | Oide_ident s -> print_lident ppf s\n  | Oide_dot (id, s) ->\n    print_ident ppf id;\n    pp_print_char ppf '.';\n    print_lident ppf s\n  | Oide_apply (id1, id2) ->\n    fprintf ppf \"%a(%a)\" print_ident id1 print_ident id2\n\nlet parenthesized_ident name =\n  List.mem name [\"or\"; \"mod\"; \"land\"; \"lor\"; \"lxor\"; \"lsl\"; \"lsr\"; \"asr\"]\n  ||\n  match name.[0] with\n  | 'a' .. 'z' | 'A' .. 'Z' | '\\223' .. '\\246' | '\\248' .. '\\255' | '_' -> false\n  | _ -> true\n\nlet value_ident ppf name =\n  if parenthesized_ident name then fprintf ppf \"( %s )\" name\n  else pp_print_string ppf name\n\n(* Values *)\n\nlet valid_float_lexeme s =\n  let l = String.length s in\n  let rec loop i =\n    if i >= l then s ^ \".\"\n    else\n      match s.[i] with\n      | '0' .. '9' | '-' -> loop (i + 1)\n      | _ -> s\n  in\n  loop 0\n\nlet float_repres f =\n  match classify_float f with\n  | FP_nan -> \"nan\"\n  | FP_infinite -> if f < 0.0 then \"neg_infinity\" else \"infinity\"\n  | _ ->\n    let float_val =\n      let s1 = Printf.sprintf \"%.12g\" f in\n      if f = float_of_string s1 then s1\n      else\n        let s2 = Printf.sprintf \"%.15g\" f in\n        if f = float_of_string s2 then s2 else Printf.sprintf \"%.18g\" f\n    in\n    valid_float_lexeme float_val\n\nlet parenthesize_if_neg ppf fmt v isneg =\n  if isneg then pp_print_char ppf '(';\n  fprintf ppf fmt v;\n  if isneg then pp_print_char ppf ')'\n\nlet escape_string s =\n  (* Escape only C0 control characters (bytes <= 0x1F), DEL(0x7F), '\\\\' and '\"' *)\n  let n = ref 0 in\n  for i = 0 to String.length s - 1 do\n    n :=\n      !n\n      +\n      match String.unsafe_get s i with\n      | '\\\"' | '\\\\' | '\\n' | '\\t' | '\\r' | '\\b' -> 2\n      | '\\x00' .. '\\x1F' | '\\x7F' -> 4\n      | _ -> 1\n  done;\n  if !n = String.length s then s\n  else\n    let s' = Bytes.create !n in\n    n := 0;\n    for i = 0 to String.length s - 1 do\n      (match String.unsafe_get s i with\n      | ('\\\"' | '\\\\') as c ->\n        Bytes.unsafe_set s' !n '\\\\';\n        incr n;\n        Bytes.unsafe_set s' !n c\n      | '\\n' ->\n        Bytes.unsafe_set s' !n '\\\\';\n        incr n;\n        Bytes.unsafe_set s' !n 'n'\n      | '\\t' ->\n        Bytes.unsafe_set s' !n '\\\\';\n        incr n;\n        Bytes.unsafe_set s' !n 't'\n      | '\\r' ->\n        Bytes.unsafe_set s' !n '\\\\';\n        incr n;\n        Bytes.unsafe_set s' !n 'r'\n      | '\\b' ->\n        Bytes.unsafe_set s' !n '\\\\';\n        incr n;\n        Bytes.unsafe_set s' !n 'b'\n      | ('\\x00' .. '\\x1F' | '\\x7F') as c ->\n        let a = Char.code c in\n        Bytes.unsafe_set s' !n '\\\\';\n        incr n;\n        Bytes.unsafe_set s' !n (Char.chr (48 + (a / 100)));\n        incr n;\n        Bytes.unsafe_set s' !n (Char.chr (48 + (a / 10 mod 10)));\n        incr n;\n        Bytes.unsafe_set s' !n (Char.chr (48 + (a mod 10)))\n      | c -> Bytes.unsafe_set s' !n c);\n      incr n\n    done;\n    Bytes.to_string s'\n\nlet print_out_string ppf s =\n  let not_escaped =\n    (* let the user dynamically choose if strings should be escaped: *)\n    match Sys.getenv_opt \"OCAMLTOP_UTF_8\" with\n    | None -> true\n    | Some x -> (\n      match bool_of_string_opt x with\n      | None -> true\n      | Some f -> f)\n  in\n  if not_escaped then fprintf ppf \"\\\"%s\\\"\" (escape_string s)\n  else fprintf ppf \"%S\" s\n\nlet print_out_value ppf tree =\n  let rec print_tree_1 ppf = function\n    | Oval_constr (name, [param]) ->\n      fprintf ppf \"@[<1>%a@ %a@]\" print_ident name print_constr_param param\n    | Oval_constr (name, (_ :: _ as params)) ->\n      fprintf ppf \"@[<1>%a@ (%a)@]\" print_ident name\n        (print_tree_list print_tree_1 \",\")\n        params\n    | Oval_variant (name, Some param) ->\n      fprintf ppf \"@[<2>`%s@ %a@]\" name print_constr_param param\n    | tree -> print_simple_tree ppf tree\n  and print_constr_param ppf = function\n    | Oval_int i -> parenthesize_if_neg ppf \"%i\" i (i < 0)\n    | Oval_int32 i -> parenthesize_if_neg ppf \"%lil\" i (i < 0l)\n    | Oval_int64 i -> parenthesize_if_neg ppf \"%LiL\" i (i < 0L)\n    | Oval_nativeint i -> parenthesize_if_neg ppf \"%nin\" i (i < 0n)\n    | Oval_float f -> parenthesize_if_neg ppf \"%s\" (float_repres f) (f < 0.0)\n    | Oval_string (_, _, Ostr_bytes) as tree ->\n      pp_print_char ppf '(';\n      print_simple_tree ppf tree;\n      pp_print_char ppf ')'\n    | tree -> print_simple_tree ppf tree\n  and print_simple_tree ppf = function\n    | Oval_int i -> fprintf ppf \"%i\" i\n    | Oval_int32 i -> fprintf ppf \"%lil\" i\n    | Oval_int64 i -> fprintf ppf \"%LiL\" i\n    | Oval_nativeint i -> fprintf ppf \"%nin\" i\n    | Oval_float f -> pp_print_string ppf (float_repres f)\n    | Oval_char c -> fprintf ppf \"%C\" c\n    | Oval_string (s, maxlen, kind) -> (\n      try\n        let len = String.length s in\n        let s = if len > maxlen then String.sub s 0 maxlen else s in\n        (match kind with\n        | Ostr_bytes -> fprintf ppf \"Bytes.of_string %S\" s\n        | Ostr_string -> print_out_string ppf s);\n        if len > maxlen then\n          fprintf ppf \"... (* string length %d; truncated *)\" len\n      with Invalid_argument _ (* \"String.create\" *) ->\n        fprintf ppf \"<huge string>\")\n    | Oval_list tl ->\n      fprintf ppf \"@[<1>[%a]@]\" (print_tree_list print_tree_1 \";\") tl\n    | Oval_array tl ->\n      fprintf ppf \"@[<2>[|%a|]@]\" (print_tree_list print_tree_1 \";\") tl\n    | Oval_constr (name, []) -> print_ident ppf name\n    | Oval_variant (name, None) -> fprintf ppf \"`%s\" name\n    | Oval_stuff s -> pp_print_string ppf s\n    | Oval_record fel ->\n      fprintf ppf \"@[<1>{%a}@]\" (cautious (print_fields true)) fel\n    | Oval_ellipsis -> raise Ellipsis\n    | Oval_printer f -> f ppf\n    | Oval_tuple tree_list ->\n      fprintf ppf \"@[<1>(%a)@]\" (print_tree_list print_tree_1 \",\") tree_list\n    | tree -> fprintf ppf \"@[<1>(%a)@]\" (cautious print_tree_1) tree\n  and print_fields first ppf = function\n    | [] -> ()\n    | (name, tree) :: fields ->\n      if not first then fprintf ppf \";@ \";\n      fprintf ppf \"@[<1>%a@ =@ %a@]\" print_ident name (cautious print_tree_1)\n        tree;\n      print_fields false ppf fields\n  and print_tree_list print_item sep ppf tree_list =\n    let rec print_list first ppf = function\n      | [] -> ()\n      | tree :: tree_list ->\n        if not first then fprintf ppf \"%s@ \" sep;\n        print_item ppf tree;\n        print_list false ppf tree_list\n    in\n    cautious (print_list true) ppf tree_list\n  in\n  cautious print_tree_1 ppf tree\n\nlet out_value = ref print_out_value\n\n(* Types *)\n\nlet rec print_list_init pr sep ppf = function\n  | [] -> ()\n  | a :: l ->\n    sep ppf;\n    pr ppf a;\n    print_list_init pr sep ppf l\n\nlet rec print_list pr sep ppf = function\n  | [] -> ()\n  | [a] -> pr ppf a\n  | a :: l ->\n    pr ppf a;\n    sep ppf;\n    print_list pr sep ppf l\n\nlet pr_present =\n  print_list (fun ppf s -> fprintf ppf \"`%s\" s) (fun ppf -> fprintf ppf \"@ \")\n\nlet pr_vars =\n  print_list (fun ppf s -> fprintf ppf \"'%s\" s) (fun ppf -> fprintf ppf \"@ \")\n\nlet rec print_out_type ppf = function\n  | Otyp_alias (ty, s) -> fprintf ppf \"@[%a@ as '%s@]\" print_out_type ty s\n  | Otyp_poly (sl, ty) ->\n    fprintf ppf \"@[<hov 2>%a.@ %a@]\" pr_vars sl print_out_type ty\n  | ty -> print_out_type_1 ppf ty\n\nand print_out_type_1 ppf = function\n  | Otyp_arrow (lab, ty1, ty2) ->\n    pp_open_box ppf 0;\n    if lab <> \"\" then (\n      pp_print_string ppf lab;\n      pp_print_char ppf ':');\n    print_out_type_2 ppf ty1;\n    pp_print_string ppf \" ->\";\n    pp_print_space ppf ();\n    print_out_type_1 ppf ty2;\n    pp_close_box ppf ()\n  | ty -> print_out_type_2 ppf ty\n\nand print_out_type_2 ppf = function\n  | Otyp_tuple tyl ->\n    fprintf ppf \"@[<0>%a@]\" (print_typlist print_simple_out_type \" *\") tyl\n  | ty -> print_simple_out_type ppf ty\n\nand print_simple_out_type ppf = function\n  | Otyp_class (ng, id, tyl) ->\n    fprintf ppf \"@[%a%s#%a@]\" print_typargs tyl\n      (if ng then \"_\" else \"\")\n      print_ident id\n  | Otyp_constr (Oide_dot (Oide_dot (Oide_ident \"Js\", \"Fn\"), name), [tyl]) ->\n    let res =\n      if name = \"arity0\" then\n        Otyp_arrow (\"\", Otyp_constr (Oide_ident \"unit\", []), tyl)\n      else tyl\n    in\n    fprintf ppf \"@[<0>(%a@ [@bs])@]\" print_out_type_1 res\n  | Otyp_constr (Oide_dot (Oide_dot (Oide_ident \"Js_OO\", \"Meth\"), name), [tyl])\n    ->\n    let res =\n      if name = \"arity0\" then\n        Otyp_arrow (\"\", Otyp_constr (Oide_ident \"unit\", []), tyl)\n      else tyl\n    in\n    fprintf ppf \"@[<0>(%a@ [@meth])@]\" print_out_type_1 res\n  | Otyp_constr (Oide_dot (Oide_dot (Oide_ident \"Js_OO\", \"Callback\"), _), [tyl])\n    ->\n    fprintf ppf \"@[<0>(%a@ [@this])@]\" print_out_type_1 tyl\n  | Otyp_constr (id, tyl) ->\n    pp_open_box ppf 0;\n    print_typargs ppf tyl;\n    print_ident ppf id;\n    pp_close_box ppf ()\n  | Otyp_object (fields, rest) ->\n    fprintf ppf \"@[<2>< %a >@]\" (print_fields rest) fields\n  | Otyp_stuff s -> pp_print_string ppf s\n  | Otyp_var (ng, s) -> fprintf ppf \"'%s%s\" (if ng then \"_\" else \"\") s\n  | Otyp_variant (non_gen, row_fields, closed, tags) ->\n    let print_present ppf = function\n      | None | Some [] -> ()\n      | Some l -> fprintf ppf \"@;<1 -2>> @[<hov>%a@]\" pr_present l\n    in\n    let print_fields ppf = function\n      | Ovar_fields fields ->\n        print_list print_row_field\n          (fun ppf -> fprintf ppf \"@;<1 -2>| \")\n          ppf fields\n      | Ovar_typ typ -> print_simple_out_type ppf typ\n    in\n    fprintf ppf \"%s[%s@[<hv>@[<hv>%a@]%a ]@]\"\n      (if non_gen then \"_\" else \"\")\n      (if closed then if tags = None then \" \" else \"< \"\n       else if tags = None then \"> \"\n       else \"? \")\n      print_fields row_fields print_present tags\n  | (Otyp_alias _ | Otyp_poly _ | Otyp_arrow _ | Otyp_tuple _) as ty ->\n    pp_open_box ppf 1;\n    pp_print_char ppf '(';\n    print_out_type ppf ty;\n    pp_print_char ppf ')';\n    pp_close_box ppf ()\n  | Otyp_abstract | Otyp_open | Otyp_sum _ | Otyp_manifest (_, _) -> ()\n  | Otyp_record lbls -> print_record_decl ppf lbls\n  | Otyp_module (p, n, tyl) ->\n    fprintf ppf \"@[<1>(module %s\" p;\n    let first = ref true in\n    List.iter2\n      (fun s t ->\n        let sep =\n          if !first then (\n            first := false;\n            \"with\")\n          else \"and\"\n        in\n        fprintf ppf \" %s type %s = %a\" sep s print_out_type t)\n      n tyl;\n    fprintf ppf \")@]\"\n  | Otyp_attribute (t, attr) ->\n    fprintf ppf \"@[<1>(%a [@@%s])@]\" print_out_type t attr.oattr_name\n\nand print_record_decl ppf lbls =\n  fprintf ppf \"{%a@;<1 -2>}\"\n    (print_list_init print_out_label (fun ppf -> fprintf ppf \"@ \"))\n    lbls\n\nand print_fields rest ppf = function\n  | [] -> (\n    match rest with\n    | Some non_gen -> fprintf ppf \"%s..\" (if non_gen then \"_\" else \"\")\n    | None -> ())\n  | [(s, t)] ->\n    fprintf ppf \"%s : %a\" s print_out_type t;\n    (match rest with\n    | Some _ -> fprintf ppf \";@ \"\n    | None -> ());\n    print_fields rest ppf []\n  | (s, t) :: l ->\n    fprintf ppf \"%s : %a;@ %a\" s print_out_type t (print_fields rest) l\n\nand print_row_field ppf (l, opt_amp, tyl) =\n  let pr_of ppf =\n    if opt_amp then fprintf ppf \" of@ &@ \"\n    else if tyl <> [] then fprintf ppf \" of@ \"\n    else fprintf ppf \"\"\n  in\n  fprintf ppf \"@[<hv 2>`%s%t%a@]\" l pr_of\n    (print_typlist print_out_type \" &\")\n    tyl\n\nand print_typlist print_elem sep ppf = function\n  | [] -> ()\n  | [ty] -> print_elem ppf ty\n  | ty :: tyl ->\n    print_elem ppf ty;\n    pp_print_string ppf sep;\n    pp_print_space ppf ();\n    print_typlist print_elem sep ppf tyl\n\nand print_typargs ppf = function\n  | [] -> ()\n  | [ty1] ->\n    print_simple_out_type ppf ty1;\n    pp_print_space ppf ()\n  | tyl ->\n    pp_open_box ppf 1;\n    pp_print_char ppf '(';\n    print_typlist print_out_type \",\" ppf tyl;\n    pp_print_char ppf ')';\n    pp_close_box ppf ();\n    pp_print_space ppf ()\n\nand print_out_label ppf (name, mut, opt, arg) =\n  fprintf ppf \"@[<2>%s%s%s :@ %a@];\"\n    (if opt then \"@optional \" else \"\")\n    (if mut then \"mutable \" else \"\")\n    name print_out_type arg\n\nlet out_type = ref print_out_type\n\n(* Class types *)\n\nlet type_parameter ppf (ty, (co, cn)) =\n  fprintf ppf \"%s%s\"\n    (if not cn then \"+\" else if not co then \"-\" else \"\")\n    (if ty = \"_\" then ty else \"'\" ^ ty)\n\nlet print_out_class_params ppf = function\n  | [] -> ()\n  | tyl ->\n    fprintf ppf \"@[<1>[%a]@]@ \"\n      (print_list type_parameter (fun ppf -> fprintf ppf \", \"))\n      tyl\n\nlet rec print_out_class_type ppf = function\n  | Octy_constr (id, tyl) ->\n    let pr_tyl ppf = function\n      | [] -> ()\n      | tyl -> fprintf ppf \"@[<1>[%a]@]@ \" (print_typlist !out_type \",\") tyl\n    in\n    fprintf ppf \"@[%a%a@]\" pr_tyl tyl print_ident id\n  | Octy_arrow (lab, ty, cty) ->\n    fprintf ppf \"@[%s%a ->@ %a@]\"\n      (if lab <> \"\" then lab ^ \":\" else \"\")\n      print_out_type_2 ty print_out_class_type cty\n  | Octy_signature (self_ty, csil) ->\n    let pr_param ppf = function\n      | Some ty -> fprintf ppf \"@ @[(%a)@]\" !out_type ty\n      | None -> ()\n    in\n    fprintf ppf \"@[<hv 2>@[<2>object%a@]@ %a@;<1 -2>end@]\" pr_param self_ty\n      (print_list print_out_class_sig_item (fun ppf -> fprintf ppf \"@ \"))\n      csil\n\nand print_out_class_sig_item ppf = function\n  | Ocsg_constraint (ty1, ty2) ->\n    fprintf ppf \"@[<2>constraint %a =@ %a@]\" !out_type ty1 !out_type ty2\n  | Ocsg_method (name, priv, virt, ty) ->\n    fprintf ppf \"@[<2>method %s%s%s :@ %a@]\"\n      (if priv then \"private \" else \"\")\n      (if virt then \"virtual \" else \"\")\n      name !out_type ty\n  | Ocsg_value (name, mut, vr, ty) ->\n    fprintf ppf \"@[<2>val %s%s%s :@ %a@]\"\n      (if mut then \"mutable \" else \"\")\n      (if vr then \"virtual \" else \"\")\n      name !out_type ty\n\nlet out_class_type = ref print_out_class_type\n\n(* Signature *)\n\nlet out_module_type = ref (fun _ -> failwith \"Oprint.out_module_type\")\nlet out_sig_item = ref (fun _ -> failwith \"Oprint.out_sig_item\")\nlet out_signature = ref (fun _ -> failwith \"Oprint.out_signature\")\nlet out_type_extension = ref (fun _ -> failwith \"Oprint.out_type_extension\")\n\nlet rec print_out_functor funct ppf = function\n  | Omty_functor (_, None, mty_res) ->\n    if funct then fprintf ppf \"() %a\" (print_out_functor true) mty_res\n    else fprintf ppf \"functor@ () %a\" (print_out_functor true) mty_res\n  | Omty_functor (name, Some mty_arg, mty_res) -> (\n    match (name, funct) with\n    | \"_\", true ->\n      fprintf ppf \"->@ %a ->@ %a\" print_out_module_type mty_arg\n        (print_out_functor false) mty_res\n    | \"_\", false ->\n      fprintf ppf \"%a ->@ %a\" print_out_module_type mty_arg\n        (print_out_functor false) mty_res\n    | name, true ->\n      fprintf ppf \"(%s : %a) %a\" name print_out_module_type mty_arg\n        (print_out_functor true) mty_res\n    | name, false ->\n      fprintf ppf \"functor@ (%s : %a) %a\" name print_out_module_type mty_arg\n        (print_out_functor true) mty_res)\n  | m ->\n    if funct then fprintf ppf \"->@ %a\" print_out_module_type m\n    else print_out_module_type ppf m\n\nand print_out_module_type ppf = function\n  | Omty_abstract -> ()\n  | Omty_functor _ as t -> fprintf ppf \"@[<2>%a@]\" (print_out_functor false) t\n  | Omty_ident id -> fprintf ppf \"%a\" print_ident id\n  | Omty_signature sg ->\n    fprintf ppf \"@[<hv 2>sig@ %a@;<1 -2>end@]\" !out_signature sg\n  | Omty_alias id -> fprintf ppf \"(module %a)\" print_ident id\n\nand print_out_signature ppf = function\n  | [] -> ()\n  | [item] -> !out_sig_item ppf item\n  | Osig_typext (ext, Oext_first) :: items ->\n    (* Gather together the extension constructors *)\n    let rec gather_extensions acc items =\n      match items with\n      | Osig_typext (ext, Oext_next) :: items ->\n        gather_extensions\n          ((ext.oext_name, ext.oext_args, ext.oext_ret_type) :: acc)\n          items\n      | _ -> (List.rev acc, items)\n    in\n    let exts, items =\n      gather_extensions\n        [(ext.oext_name, ext.oext_args, ext.oext_ret_type)]\n        items\n    in\n    let te =\n      {\n        otyext_name = ext.oext_type_name;\n        otyext_params = ext.oext_type_params;\n        otyext_constructors = exts;\n        otyext_private = ext.oext_private;\n      }\n    in\n    fprintf ppf \"%a@ %a\" !out_type_extension te print_out_signature items\n  | item :: items ->\n    fprintf ppf \"%a@ %a\" !out_sig_item item print_out_signature items\n\nand print_out_sig_item ppf = function\n  | Osig_class (vir_flag, name, params, clt, rs) ->\n    fprintf ppf \"@[<2>%s%s@ %a%s@ :@ %a@]\"\n      (if rs = Orec_next then \"and\" else \"class\")\n      (if vir_flag then \" virtual\" else \"\")\n      print_out_class_params params name !out_class_type clt\n  | Osig_class_type (vir_flag, name, params, clt, rs) ->\n    fprintf ppf \"@[<2>%s%s@ %a%s@ =@ %a@]\"\n      (if rs = Orec_next then \"and\" else \"class type\")\n      (if vir_flag then \" virtual\" else \"\")\n      print_out_class_params params name !out_class_type clt\n  | Osig_typext (ext, Oext_exception) ->\n    fprintf ppf \"@[<2>exception %a@]\" print_out_constr\n      (ext.oext_name, ext.oext_args, ext.oext_ret_type)\n  | Osig_typext (ext, _es) -> print_out_extension_constructor ppf ext\n  | Osig_modtype (name, Omty_abstract) ->\n    fprintf ppf \"@[<2>module type %s@]\" name\n  | Osig_modtype (name, mty) ->\n    fprintf ppf \"@[<2>module type %s =@ %a@]\" name !out_module_type mty\n  | Osig_module (name, Omty_alias id, _) ->\n    fprintf ppf \"@[<2>module %s =@ %a@]\" name print_ident id\n  | Osig_module (name, mty, rs) ->\n    fprintf ppf \"@[<2>%s %s :@ %a@]\"\n      (match rs with\n      | Orec_not -> \"module\"\n      | Orec_first -> \"module rec\"\n      | Orec_next -> \"and\")\n      name !out_module_type mty\n  | Osig_type (td, rs) ->\n    print_out_type_decl\n      (match rs with\n      | Orec_not -> \"type nonrec\"\n      | Orec_first -> \"type\"\n      | Orec_next -> \"and\")\n      ppf td\n  | Osig_value vd ->\n    let kwd = if vd.oval_prims = [] then \"val\" else \"external\" in\n    let pr_prims ppf = function\n      | [] -> ()\n      | s :: sl ->\n        fprintf ppf \"@ = \\\"%s\\\"\" s;\n        List.iter\n          (fun s ->\n            (* TODO: in general, we should print bs attributes, some attributes like\n               variadic do need it *)\n            fprintf ppf \"@ \\\"%s\\\"\" (!map_primitive_name s))\n          sl\n    in\n    fprintf ppf \"@[<2>%s %a :@ %a%a%a@]\" kwd value_ident vd.oval_name !out_type\n      vd.oval_type pr_prims vd.oval_prims\n      (fun ppf -> List.iter (fun a -> fprintf ppf \"@ [@@@@%s]\" a.oattr_name))\n      vd.oval_attributes\n  | Osig_ellipsis -> fprintf ppf \"...\"\n\nand print_out_type_decl kwd ppf td =\n  let print_constraints ppf =\n    List.iter\n      (fun (ty1, ty2) ->\n        fprintf ppf \"@ @[<2>constraint %a =@ %a@]\" !out_type ty1 !out_type ty2)\n      td.otype_cstrs\n  in\n  let type_defined ppf =\n    match td.otype_params with\n    | [] -> pp_print_string ppf td.otype_name\n    | [param] -> fprintf ppf \"@[%a@ %s@]\" type_parameter param td.otype_name\n    | _ ->\n      fprintf ppf \"@[(@[%a)@]@ %s@]\"\n        (print_list type_parameter (fun ppf -> fprintf ppf \",@ \"))\n        td.otype_params td.otype_name\n  in\n  let print_manifest ppf = function\n    | Otyp_manifest (ty, _) -> fprintf ppf \" =@ %a\" !out_type ty\n    | _ -> ()\n  in\n  let print_name_params ppf =\n    fprintf ppf \"%s %t%a\" kwd type_defined print_manifest td.otype_type\n  in\n  let ty =\n    match td.otype_type with\n    | Otyp_manifest (_, ty) -> ty\n    | _ -> td.otype_type\n  in\n  let print_private ppf = function\n    | Asttypes.Private -> fprintf ppf \" private\"\n    | Asttypes.Public -> ()\n  in\n  let print_immediate ppf =\n    if td.otype_immediate then fprintf ppf \" [%@%@immediate]\" else ()\n  in\n  let print_unboxed ppf =\n    if td.otype_unboxed then fprintf ppf \" [%@%@unboxed]\" else ()\n  in\n  let print_out_tkind ppf = function\n    | Otyp_abstract -> ()\n    | Otyp_record lbls ->\n      fprintf ppf \" =%a %a\" print_private td.otype_private print_record_decl\n        lbls\n    | Otyp_sum constrs ->\n      fprintf ppf \" =%a@;<1 2>%a\" print_private td.otype_private\n        (print_list print_out_constr (fun ppf -> fprintf ppf \"@ | \"))\n        constrs\n    | Otyp_open -> fprintf ppf \" =%a ..\" print_private td.otype_private\n    | ty ->\n      fprintf ppf \" =%a@;<1 2>%a\" print_private td.otype_private !out_type ty\n  in\n  fprintf ppf \"@[<2>@[<hv 2>%t%a@]%t%t%t@]\" print_name_params print_out_tkind ty\n    print_constraints print_immediate print_unboxed\n\nand print_out_constr ppf (name, tyl, ret_type_opt) =\n  let name =\n    match name with\n    | \"::\" -> \"(::)\" (* #7200 *)\n    | s -> s\n  in\n  match ret_type_opt with\n  | None -> (\n    match tyl with\n    | [] -> pp_print_string ppf name\n    | _ ->\n      fprintf ppf \"@[<2>%s of@ %a@]\" name\n        (print_typlist print_simple_out_type \" *\")\n        tyl)\n  | Some ret_type -> (\n    match tyl with\n    | [] -> fprintf ppf \"@[<2>%s :@ %a@]\" name print_simple_out_type ret_type\n    | _ ->\n      fprintf ppf \"@[<2>%s :@ %a -> %a@]\" name\n        (print_typlist print_simple_out_type \" *\")\n        tyl print_simple_out_type ret_type)\n\nand print_out_extension_constructor ppf ext =\n  let print_extended_type ppf =\n    let print_type_parameter ppf ty =\n      fprintf ppf \"%s\" (if ty = \"_\" then ty else \"'\" ^ ty)\n    in\n    match ext.oext_type_params with\n    | [] -> fprintf ppf \"%s\" ext.oext_type_name\n    | [ty_param] ->\n      fprintf ppf \"@[%a@ %s@]\" print_type_parameter ty_param ext.oext_type_name\n    | _ ->\n      fprintf ppf \"@[(@[%a)@]@ %s@]\"\n        (print_list print_type_parameter (fun ppf -> fprintf ppf \",@ \"))\n        ext.oext_type_params ext.oext_type_name\n  in\n  fprintf ppf \"@[<hv 2>type %t +=%s@;<1 2>%a@]\" print_extended_type\n    (if ext.oext_private = Asttypes.Private then \" private\" else \"\")\n    print_out_constr\n    (ext.oext_name, ext.oext_args, ext.oext_ret_type)\n\nand print_out_type_extension ppf te =\n  let print_extended_type ppf =\n    let print_type_parameter ppf ty =\n      fprintf ppf \"%s\" (if ty = \"_\" then ty else \"'\" ^ ty)\n    in\n    match te.otyext_params with\n    | [] -> fprintf ppf \"%s\" te.otyext_name\n    | [param] ->\n      fprintf ppf \"@[%a@ %s@]\" print_type_parameter param te.otyext_name\n    | _ ->\n      fprintf ppf \"@[(@[%a)@]@ %s@]\"\n        (print_list print_type_parameter (fun ppf -> fprintf ppf \",@ \"))\n        te.otyext_params te.otyext_name\n  in\n  fprintf ppf \"@[<hv 2>type %t +=%s@;<1 2>%a@]\" print_extended_type\n    (if te.otyext_private = Asttypes.Private then \" private\" else \"\")\n    (print_list print_out_constr (fun ppf -> fprintf ppf \"@ | \"))\n    te.otyext_constructors\n\nlet _ = out_module_type := print_out_module_type\nlet _ = out_signature := print_out_signature\nlet _ = out_sig_item := print_out_sig_item\nlet _ = out_type_extension := print_out_type_extension\n\n(* Phrases *)\n\nlet print_out_exception ppf exn outv =\n  match exn with\n  | Sys.Break -> fprintf ppf \"Interrupted.@.\"\n  | Out_of_memory -> fprintf ppf \"Out of memory during evaluation.@.\"\n  | Stack_overflow ->\n    fprintf ppf \"Stack overflow during evaluation (looping recursion?).@.\"\n  | _ -> fprintf ppf \"@[Exception:@ %a.@]@.\" !out_value outv\n\nlet rec print_items ppf = function\n  | [] -> ()\n  | (Osig_typext (ext, Oext_first), None) :: items ->\n    (* Gather together extension constructors *)\n    let rec gather_extensions acc items =\n      match items with\n      | (Osig_typext (ext, Oext_next), None) :: items ->\n        gather_extensions\n          ((ext.oext_name, ext.oext_args, ext.oext_ret_type) :: acc)\n          items\n      | _ -> (List.rev acc, items)\n    in\n    let exts, items =\n      gather_extensions\n        [(ext.oext_name, ext.oext_args, ext.oext_ret_type)]\n        items\n    in\n    let te =\n      {\n        otyext_name = ext.oext_type_name;\n        otyext_params = ext.oext_type_params;\n        otyext_constructors = exts;\n        otyext_private = ext.oext_private;\n      }\n    in\n    fprintf ppf \"@[%a@]\" !out_type_extension te;\n    if items <> [] then fprintf ppf \"@ %a\" print_items items\n  | (tree, valopt) :: items ->\n    (match valopt with\n    | Some v -> fprintf ppf \"@[<2>%a =@ %a@]\" !out_sig_item tree !out_value v\n    | None -> fprintf ppf \"@[%a@]\" !out_sig_item tree);\n    if items <> [] then fprintf ppf \"@ %a\" print_items items\n\nlet print_out_phrase ppf = function\n  | Ophr_eval (outv, ty) ->\n    fprintf ppf \"@[- : %a@ =@ %a@]@.\" !out_type ty !out_value outv\n  | Ophr_signature [] -> ()\n  | Ophr_signature items -> fprintf ppf \"@[<v>%a@]@.\" print_items items\n  | Ophr_exception (exn, outv) -> print_out_exception ppf exn outv\n\nlet out_phrase = ref print_out_phrase\n"
  },
  {
    "path": "analysis/vendor/ml/oprint.mli",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*                   Projet Cristal, INRIA Rocquencourt                   *)\n(*                                                                        *)\n(*   Copyright 2002 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\nopen Format\nopen Outcometree\n\nval out_ident : (formatter -> string -> unit) ref\nval map_primitive_name : (string -> string) ref\n\nval out_value : (formatter -> out_value -> unit) ref\nval out_type : (formatter -> out_type -> unit) ref\nval out_class_type : (formatter -> out_class_type -> unit) ref\nval out_module_type : (formatter -> out_module_type -> unit) ref\nval out_sig_item : (formatter -> out_sig_item -> unit) ref\nval out_signature : (formatter -> out_sig_item list -> unit) ref\nval out_type_extension : (formatter -> out_type_extension -> unit) ref\nval out_phrase : (formatter -> out_phrase -> unit) ref\n\nval parenthesized_ident : string -> bool\n"
  },
  {
    "path": "analysis/vendor/ml/outcometree.ml",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*      Daniel de Rauglaudre, projet Cristal, INRIA Rocquencourt          *)\n(*                                                                        *)\n(*   Copyright 2001 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(* Module [Outcometree]: results displayed by the toplevel *)\n\n(* These types represent messages that the toplevel displays as normal\n   results or errors. The real displaying is customisable using the hooks:\n      [Toploop.print_out_value]\n      [Toploop.print_out_type]\n      [Toploop.print_out_sig_item]\n      [Toploop.print_out_phrase] *)\n\ntype out_ident =\n  | Oide_apply of out_ident * out_ident\n  | Oide_dot of out_ident * string\n  | Oide_ident of string\n\ntype out_string = Ostr_string | Ostr_bytes\n\ntype out_attribute = {oattr_name: string}\n\ntype out_value =\n  | Oval_array of out_value list\n  | Oval_char of char\n  | Oval_constr of out_ident * out_value list\n  | Oval_ellipsis\n  | Oval_float of float\n  | Oval_int of int\n  | Oval_int32 of int32\n  | Oval_int64 of int64\n  | Oval_nativeint of nativeint\n  | Oval_list of out_value list\n  | Oval_printer of (Format.formatter -> unit)\n  | Oval_record of (out_ident * out_value) list\n  | Oval_string of string * int * out_string (* string, size-to-print, kind *)\n  | Oval_stuff of string\n  | Oval_tuple of out_value list\n  | Oval_variant of string * out_value option\n\ntype out_type =\n  | Otyp_abstract\n  | Otyp_open\n  | Otyp_alias of out_type * string\n  | Otyp_arrow of string * out_type * out_type\n  | Otyp_class of bool * out_ident * out_type list\n  | Otyp_constr of out_ident * out_type list\n  | Otyp_manifest of out_type * out_type\n  | Otyp_object of (string * out_type) list * bool option\n  | Otyp_record of (string * bool * bool * out_type) list\n  | Otyp_stuff of string\n  | Otyp_sum of (string * out_type list * out_type option) list\n  | Otyp_tuple of out_type list\n  | Otyp_var of bool * string\n  | Otyp_variant of bool * out_variant * bool * string list option\n  | Otyp_poly of string list * out_type\n  | Otyp_module of string * string list * out_type list\n  | Otyp_attribute of out_type * out_attribute\n\nand out_variant =\n  | Ovar_fields of (string * bool * out_type list) list\n  | Ovar_typ of out_type\n\ntype out_class_type =\n  | Octy_constr of out_ident * out_type list\n  | Octy_arrow of string * out_type * out_class_type\n  | Octy_signature of out_type option * out_class_sig_item list\nand out_class_sig_item =\n  | Ocsg_constraint of out_type * out_type\n  | Ocsg_method of string * bool * bool * out_type\n  | Ocsg_value of string * bool * bool * out_type\n\ntype out_module_type =\n  | Omty_abstract\n  | Omty_functor of string * out_module_type option * out_module_type\n  | Omty_ident of out_ident\n  | Omty_signature of out_sig_item list\n  | Omty_alias of out_ident\nand out_sig_item =\n  | Osig_class of\n      bool\n      * string\n      * (string * (bool * bool)) list\n      * out_class_type\n      * out_rec_status\n  | Osig_class_type of\n      bool\n      * string\n      * (string * (bool * bool)) list\n      * out_class_type\n      * out_rec_status\n  | Osig_typext of out_extension_constructor * out_ext_status\n  | Osig_modtype of string * out_module_type\n  | Osig_module of string * out_module_type * out_rec_status\n  | Osig_type of out_type_decl * out_rec_status\n  | Osig_value of out_val_decl\n  | Osig_ellipsis\nand out_type_decl = {\n  otype_name: string;\n  otype_params: (string * (bool * bool)) list;\n  otype_type: out_type;\n  otype_private: Asttypes.private_flag;\n  otype_immediate: bool;\n  otype_unboxed: bool;\n  otype_cstrs: (out_type * out_type) list;\n}\nand out_extension_constructor = {\n  oext_name: string;\n  oext_type_name: string;\n  oext_type_params: string list;\n  oext_args: out_type list;\n  oext_ret_type: out_type option;\n  oext_private: Asttypes.private_flag;\n}\nand out_type_extension = {\n  otyext_name: string;\n  otyext_params: string list;\n  otyext_constructors: (string * out_type list * out_type option) list;\n  otyext_private: Asttypes.private_flag;\n}\nand out_val_decl = {\n  oval_name: string;\n  oval_type: out_type;\n  oval_prims: string list;\n  oval_attributes: out_attribute list;\n}\nand out_rec_status = Orec_not | Orec_first | Orec_next\nand out_ext_status = Oext_first | Oext_next | Oext_exception\n\ntype out_phrase =\n  | Ophr_eval of out_value * out_type\n  | Ophr_signature of (out_sig_item * out_value option) list\n  | Ophr_exception of (exn * out_value)\n"
  },
  {
    "path": "analysis/vendor/ml/parmatch.ml",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(* Detection of partial matches and unused match cases. *)\n\nopen Misc\nopen Asttypes\nopen Types\nopen Typedtree\n\n(*************************************)\n(* Utilities for building patterns   *)\n(*************************************)\n\nlet make_pat desc ty tenv =\n  {\n    pat_desc = desc;\n    pat_loc = Location.none;\n    pat_extra = [];\n    pat_type = ty;\n    pat_env = tenv;\n    pat_attributes = [];\n  }\n\nlet omega = make_pat Tpat_any Ctype.none Env.empty\n\nlet extra_pat =\n  make_pat (Tpat_var (Ident.create \"+\", mknoloc \"+\")) Ctype.none Env.empty\n\nlet rec omegas i = if i <= 0 then [] else omega :: omegas (i - 1)\n\nlet omega_list l = List.map (fun _ -> omega) l\n\nlet zero = make_pat (Tpat_constant (Const_int 0)) Ctype.none Env.empty\n\n(*******************)\n(* Coherence check *)\n(*******************)\n\n(* For some of the operations we do in this module, we would like (because it\n   simplifies matters) to assume that patterns appearing on a given column in a\n   pattern matrix are /coherent/ (think \"of the same type\").\n   Unfortunately that is not always true.\n\n   Consider the following (well-typed) example:\n   {[\n     type _ t = S : string t | U : unit t\n\n     let f (type a) (t1 : a t) (t2 : a t) (a : a) =\n       match t1, t2, a with\n       | U, _, () -> ()\n       | _, S, \"\" -> ()\n   ]}\n\n   Clearly the 3rd column contains incoherent patterns.\n\n   On the example above, most of the algorithms will explore the pattern matrix\n   as illustrated by the following tree:\n\n   {v\n                                                   S\n                                                -------> | \"\" |\n                             U     | S, \"\" | __/         | () |\n                         --------> | _, () |   \\  ¬ S\n        | U, _, () | __/                        -------> | () |\n        | _, S, \"\" |   \\\n                        ---------> | S, \"\" | ----------> | \"\" |\n                           ¬ U                    S\n   v}\n\n   where following an edge labelled by a pattern P means \"assuming the value I\n   am matching on is filtered by [P] on the column I am currently looking at,\n   then the following submatrix is still reachable\".\n\n   Notice that at any point of that tree, if the first column of a matrix is\n   incoherent, then the branch leading to it can only be taken if the scrutinee\n   is ill-typed.\n   In the example above the only case where we have a matrix with an incoherent\n   first column is when we consider [t1, t2, a] to be [U, S, ...]. However such\n   a value would be ill-typed, so we can never actually get there.\n\n   Checking the first column at each step of the recursion and making the\n   concious decision of \"aborting\" the algorithm whenever the first column\n   becomes incoherent, allows us to retain the initial assumption in later\n   stages of the algorithms.\n\n   ---\n\n   N.B. two patterns can be considered coherent even though they might not be of\n   the same type.\n\n   That's in part because we only care about the \"head\" of patterns and leave\n   checking coherence of subpatterns for the next steps of the algorithm:\n   ('a', 'b') and (1, ()) will be deemed coherent because they are both a tuples\n   of arity 2 (we'll notice at a later stage the incoherence of 'a' and 1).\n\n   But also because it can be hard/costly to determine exactly whether two\n   patterns are of the same type or not (eg. in the example above with _ and S,\n   but see also the module [Coherence_illustration] in\n   testsuite/tests/basic-more/robustmatch.ml).\n\n   For the moment our weak, loosely-syntactic, coherence check seems to be\n   enough and we leave it to each user to consider (and document!) what happens\n   when an \"incoherence\" is not detected by this check.\n*)\n\nlet simplify_head_pat p k =\n  let rec simplify_head_pat p k =\n    match p.pat_desc with\n    | Tpat_alias (p, _, _) -> simplify_head_pat p k\n    | Tpat_var (_, _) -> omega :: k\n    | Tpat_or (p1, p2, _) -> simplify_head_pat p1 (simplify_head_pat p2 k)\n    | _ -> p :: k\n  in\n  simplify_head_pat p k\n\nlet rec simplified_first_col = function\n  | [] -> []\n  | [] :: _ -> assert false (* the rows are non-empty! *)\n  | (p :: _) :: rows -> simplify_head_pat p (simplified_first_col rows)\n\n(* Given the simplified first column of a matrix, this function first looks for\n   a \"discriminating\" pattern on that column (i.e. a non-omega one) and then\n   check that every other head pattern in the column is coherent with that one.\n*)\nlet all_coherent column =\n  let coherent_heads hp1 hp2 =\n    match (hp1.pat_desc, hp2.pat_desc) with\n    | (Tpat_var _ | Tpat_alias _ | Tpat_or _), _\n    | _, (Tpat_var _ | Tpat_alias _ | Tpat_or _) ->\n      assert false\n    | Tpat_construct (_, c, _), Tpat_construct (_, c', _) ->\n      c.cstr_consts = c'.cstr_consts && c.cstr_nonconsts = c'.cstr_nonconsts\n    | Tpat_constant c1, Tpat_constant c2 -> (\n      match (c1, c2) with\n      | Const_char _, Const_char _\n      | Const_int _, Const_int _\n      | Const_int32 _, Const_int32 _\n      | Const_int64 _, Const_int64 _\n      | Const_bigint _, Const_bigint _\n      | Const_float _, Const_float _\n      | Const_string _, Const_string _ ->\n        true\n      | ( ( Const_char _ | Const_int _ | Const_int32 _ | Const_int64 _\n          | Const_bigint _ | Const_float _ | Const_string _ ),\n          _ ) ->\n        false)\n    | Tpat_tuple l1, Tpat_tuple l2 -> List.length l1 = List.length l2\n    | Tpat_record ((_, lbl1, _) :: _, _), Tpat_record ((_, lbl2, _) :: _, _) ->\n      Array.length lbl1.lbl_all = Array.length lbl2.lbl_all\n    | Tpat_any, _\n    | _, Tpat_any\n    | Tpat_record ([], _), Tpat_record (_, _)\n    | Tpat_record (_, _), Tpat_record ([], _)\n    | Tpat_variant _, Tpat_variant _\n    | Tpat_array _, Tpat_array _\n    | Tpat_lazy _, Tpat_lazy _ ->\n      true\n    | _, _ -> false\n  in\n  match\n    List.find\n      (fun head_pat ->\n        match head_pat.pat_desc with\n        | Tpat_var _ | Tpat_alias _ | Tpat_or _ -> assert false\n        | Tpat_any -> false\n        | _ -> true)\n      column\n  with\n  | exception Not_found ->\n    (* only omegas on the column: the column is coherent. *)\n    true\n  | discr_pat -> List.for_all (coherent_heads discr_pat) column\n\nlet first_column simplified_matrix = List.map fst simplified_matrix\n\n(***********************)\n(* Compatibility check *)\n(***********************)\n\n(* Patterns p and q compatible means:\n    there exists value V that matches both, However....\n\n   The case of extension types is dubious, as constructor rebind permits\n   that different constructors are the same (and are thus compatible).\n\n   Compilation must take this into account, consider:\n\n   type t = ..\n   type t += A|B\n   type t += C=A\n\n   let f x y = match x,y with\n   | true,A  -> '1'\n   | _,C     -> '2'\n   | false,A -> '3'\n   | _,_     -> '_'\n\n   As C is bound to A the value of f false A is '2' (and not '3' as it would\n   be in the absence of rebinding).\n\n   Not considering rebinding, patterns \"false,A\" and \"_,C\" are incompatible\n   and the compiler can swap the second and third clause, resulting in the\n   (more efficiently compiled) matching\n\n   match x,y with\n   | true,A  -> '1'\n   | false,A -> '3'\n   | _,C     -> '2'\n   | _,_     -> '_'\n\n   This is not correct: when C is bound to A, \"f false A\" returns '2' (not '3')\n\n\n   However, diagnostics do not take constructor rebinding into account.\n   Notice, that due to module abstraction constructor rebinding is hidden.\n\n   module X : sig type t = .. type t += A|B end = struct\n     type t = ..\n     type t += A\n     type t += B=A\n   end\n\n   open X\n\n   let f x = match x with\n   | A -> '1'\n   | B -> '2'\n   | _ -> '_'\n\n   The second clause above will NOT (and cannot) be flagged as useless.\n\n   Finally, there are two compatibility fonction\n    compat p q      ---> 'syntactic compatibility, used for diagnostics.\n    may_compat p q --->   a safe approximation of possible compat,\n                          for compilation\n*)\n\nlet is_absent tag row = Btype.row_field tag !row = Rabsent\n\nlet is_absent_pat p =\n  match p.pat_desc with\n  | Tpat_variant (tag, _, row) -> is_absent tag row\n  | _ -> false\n\nlet const_compare x y =\n  match (x, y) with\n  | Const_float f1, Const_float f2 ->\n    compare (float_of_string f1) (float_of_string f2)\n  | Const_bigint (s1, b1), Const_bigint (s2, b2) ->\n    Bigint_utils.compare (s1, b1) (s2, b2)\n  | Const_string (s1, _), Const_string (s2, _) -> String.compare s1 s2\n  | _, _ -> compare x y\n\nlet records_args l1 l2 =\n  (* Invariant: fields are already sorted by Typecore.type_label_a_list *)\n  let rec combine r1 r2 l1 l2 =\n    match (l1, l2) with\n    | [], [] -> (List.rev r1, List.rev r2)\n    | [], (_, _, p2) :: rem2 -> combine (omega :: r1) (p2 :: r2) [] rem2\n    | (_, _, p1) :: rem1, [] -> combine (p1 :: r1) (omega :: r2) rem1 []\n    | (_, lbl1, p1) :: rem1, (_, lbl2, p2) :: rem2 ->\n      if lbl1.lbl_pos < lbl2.lbl_pos then\n        combine (p1 :: r1) (omega :: r2) rem1 l2\n      else if lbl1.lbl_pos > lbl2.lbl_pos then\n        combine (omega :: r1) (p2 :: r2) l1 rem2\n      else (* same label on both sides *)\n        combine (p1 :: r1) (p2 :: r2) rem1 rem2\n  in\n  combine [] [] l1 l2\n\nmodule Compat (Constr : sig\n  val equal :\n    Types.constructor_description -> Types.constructor_description -> bool\nend) =\nstruct\n  let rec compat p q =\n    match (p.pat_desc, q.pat_desc) with\n    (* Variables match any value *)\n    | (Tpat_any | Tpat_var _), _ | _, (Tpat_any | Tpat_var _) -> true\n    (* Structural induction *)\n    | Tpat_alias (p, _, _), _ -> compat p q\n    | _, Tpat_alias (q, _, _) -> compat p q\n    | Tpat_or (p1, p2, _), _ -> compat p1 q || compat p2 q\n    | _, Tpat_or (q1, q2, _) -> compat p q1 || compat p q2\n    (* Constructors, with special case for extension *)\n    | Tpat_construct (_, c1, ps1), Tpat_construct (_, c2, ps2) ->\n      Constr.equal c1 c2 && compats ps1 ps2\n    (* More standard stuff *)\n    | Tpat_variant (l1, op1, _), Tpat_variant (l2, op2, _) ->\n      l1 = l2 && ocompat op1 op2\n    | Tpat_constant c1, Tpat_constant c2 -> const_compare c1 c2 = 0\n    | Tpat_tuple ps, Tpat_tuple qs -> compats ps qs\n    | Tpat_lazy p, Tpat_lazy q -> compat p q\n    | Tpat_record (l1, _), Tpat_record (l2, _) ->\n      let ps, qs = records_args l1 l2 in\n      compats ps qs\n    | Tpat_array ps, Tpat_array qs ->\n      List.length ps = List.length qs && compats ps qs\n    | _, _ -> false\n\n  and ocompat op oq =\n    match (op, oq) with\n    | None, None -> true\n    | Some p, Some q -> compat p q\n    | None, Some _ | Some _, None -> false\n\n  and compats ps qs =\n    match (ps, qs) with\n    | [], [] -> true\n    | p :: ps, q :: qs -> compat p q && compats ps qs\n    | _, _ -> false\nend\n\nmodule SyntacticCompat = Compat (struct\n  let equal c1 c2 = Types.equal_tag c1.cstr_tag c2.cstr_tag\nend)\n\nlet compat = SyntacticCompat.compat\n\nand compats = SyntacticCompat.compats\n\n(* Due to (potential) rebinding, two extension constructors\n   of the same arity type may equal *)\n\nexception Empty (* Empty pattern *)\n\n(****************************************)\n(* Utilities for retrieving type paths  *)\n(****************************************)\n\n(* May need a clean copy, cf. PR#4745 *)\nlet clean_copy ty =\n  if ty.level = Btype.generic_level then ty\n  else Subst.type_expr Subst.identity ty\n\nlet get_type_path ty tenv =\n  let ty = Ctype.repr (Ctype.expand_head tenv (clean_copy ty)) in\n  match ty.desc with\n  | Tconstr (path, _, _) -> path\n  | _ -> fatal_error \"Parmatch.get_type_path\"\n\n(*************************************)\n(* Values as patterns pretty printer *)\n(*************************************)\n\nlet print_res_pat : (Typedtree.pattern -> string) ref =\n  ref (fun _ -> assert false)\n\nopen Format\n\nlet is_cons = function\n  | {cstr_name = \"::\"} -> true\n  | _ -> false\n\nlet pretty_const c =\n  match c with\n  | Const_int i -> Printf.sprintf \"%d\" i\n  | Const_char i -> Printf.sprintf \"%s\" (Pprintast.string_of_int_as_char i)\n  | Const_string (s, _) -> Printf.sprintf \"%S\" s\n  | Const_float f -> Printf.sprintf \"%s\" f\n  | Const_int32 i -> Printf.sprintf \"%ldl\" i\n  | Const_int64 i -> Printf.sprintf \"%LdL\" i\n  | Const_bigint (sign, i) ->\n    Printf.sprintf \"%s\" (Bigint_utils.to_string sign i)\n\nlet rec pretty_val ppf v =\n  match v.pat_extra with\n  | (cstr, _loc, _attrs) :: rem -> (\n    match cstr with\n    | Tpat_unpack ->\n      fprintf ppf \"@[(module %a)@]\" pretty_val {v with pat_extra = rem}\n    | Tpat_constraint _ ->\n      fprintf ppf \"@[(%a : _)@]\" pretty_val {v with pat_extra = rem}\n    | Tpat_type _ ->\n      fprintf ppf \"@[(# %a)@]\" pretty_val {v with pat_extra = rem}\n    | Tpat_open _ ->\n      fprintf ppf \"@[(# %a)@]\" pretty_val {v with pat_extra = rem})\n  | [] -> (\n    match v.pat_desc with\n    | Tpat_any -> fprintf ppf \"_\"\n    | Tpat_var (x, _) -> fprintf ppf \"%s\" (Ident.name x)\n    | Tpat_constant c -> fprintf ppf \"%s\" (pretty_const c)\n    | Tpat_tuple vs -> fprintf ppf \"@[(%a)@]\" (pretty_vals \",\") vs\n    | Tpat_construct (_, cstr, []) -> fprintf ppf \"%s\" cstr.cstr_name\n    | Tpat_construct (_, cstr, [w]) ->\n      fprintf ppf \"@[<2>%s(%a)@]\" cstr.cstr_name pretty_arg w\n    | Tpat_construct (_, cstr, vs) -> (\n      let name = cstr.cstr_name in\n      match (name, vs) with\n      | \"::\", [v1; v2] -> fprintf ppf \"@[%a::@,%a@]\" pretty_car v1 pretty_cdr v2\n      | _ -> fprintf ppf \"@[<2>%s@ @[(%a)@]@]\" name (pretty_vals \",\") vs)\n    | Tpat_variant (l, None, _) -> fprintf ppf \"#%s\" l\n    | Tpat_variant (l, Some w, _) -> fprintf ppf \"@[<2>#%s(%a)@]\" l pretty_arg w\n    | Tpat_record (lvs, _) -> (\n      let filtered_lvs =\n        Ext_list.filter lvs (function\n          | _, _, {pat_desc = Tpat_any} -> false (* do not show lbl=_ *)\n          | _ -> true)\n      in\n      match filtered_lvs with\n      | [] -> fprintf ppf \"_\"\n      | (_, _lbl, _) :: _q ->\n        let elision_mark _ = () in\n        fprintf ppf \"@[{%a%t}@]\" pretty_lvals filtered_lvs elision_mark)\n    | Tpat_array vs -> fprintf ppf \"@[[%a]@]\" (pretty_vals \",\") vs\n    | Tpat_lazy v -> fprintf ppf \"@[<2>lazy@ %a@]\" pretty_arg v\n    | Tpat_alias (v, x, _) ->\n      fprintf ppf \"@[(%a@ as %a)@]\" pretty_val v Ident.print x\n    | Tpat_or (v, w, _) -> fprintf ppf \"@[%a | @,%a@]\" pretty_or v pretty_or w)\n\nand pretty_car ppf v =\n  match v.pat_desc with\n  | Tpat_construct (_, cstr, [_; _]) when is_cons cstr ->\n    fprintf ppf \"(%a)\" pretty_val v\n  | _ -> pretty_val ppf v\n\nand pretty_cdr ppf v =\n  match v.pat_desc with\n  | Tpat_construct (_, cstr, [v1; v2]) when is_cons cstr ->\n    fprintf ppf \"%a::@,%a\" pretty_car v1 pretty_cdr v2\n  | _ -> pretty_val ppf v\n\nand pretty_arg ppf v =\n  match v.pat_desc with\n  | Tpat_construct (_, _, _ :: _) | Tpat_variant (_, Some _, _) ->\n    fprintf ppf \"(%a)\" pretty_val v\n  | _ -> pretty_val ppf v\n\nand pretty_or ppf v =\n  match v.pat_desc with\n  | Tpat_or (v, w, _) -> fprintf ppf \"%a | @,%a\" pretty_or v pretty_or w\n  | _ -> pretty_val ppf v\n\nand pretty_vals sep ppf = function\n  | [] -> ()\n  | [v] -> pretty_val ppf v\n  | v :: vs -> fprintf ppf \"%a%s@ %a\" pretty_val v sep (pretty_vals sep) vs\n\nand pretty_lvals ppf = function\n  | [] -> ()\n  | [(_, lbl, v)] -> fprintf ppf \"%s: %a\" lbl.lbl_name pretty_val v\n  | (_, lbl, v) :: rest ->\n    fprintf ppf \"%s: %a,@ %a\" lbl.lbl_name pretty_val v pretty_lvals rest\n\nlet top_pretty ppf v = fprintf ppf \"@[%a@]@?\" pretty_val v\n\nlet pretty_pat p =\n  top_pretty Format.str_formatter p;\n  prerr_string (Format.flush_str_formatter ())\n\ntype matrix = pattern list list\n\nlet pretty_line ps =\n  List.iter\n    (fun p ->\n      top_pretty Format.str_formatter p;\n      prerr_string \" <\";\n      prerr_string (Format.flush_str_formatter ());\n      prerr_string \">\")\n    ps\n\nlet pretty_matrix (pss : matrix) =\n  prerr_endline \"begin matrix\";\n  List.iter\n    (fun ps ->\n      pretty_line ps;\n      prerr_endline \"\")\n    pss;\n  prerr_endline \"end matrix\"\n\n(****************************)\n(* Utilities for matching   *)\n(****************************)\n\n(* Check top matching *)\nlet simple_match p1 p2 =\n  match (p1.pat_desc, p2.pat_desc) with\n  | Tpat_construct (_, c1, _), Tpat_construct (_, c2, _) ->\n    Types.equal_tag c1.cstr_tag c2.cstr_tag\n  | Tpat_variant (l1, _, _), Tpat_variant (l2, _, _) -> l1 = l2\n  | Tpat_constant c1, Tpat_constant c2 -> const_compare c1 c2 = 0\n  | Tpat_lazy _, Tpat_lazy _ -> true\n  | Tpat_record _, Tpat_record _ -> true\n  | Tpat_tuple p1s, Tpat_tuple p2s | Tpat_array p1s, Tpat_array p2s ->\n    List.length p1s = List.length p2s\n  | _, (Tpat_any | Tpat_var _) -> true\n  | _, _ -> false\n\n(* extract record fields as a whole *)\nlet record_arg p =\n  match p.pat_desc with\n  | Tpat_any -> []\n  | Tpat_record (args, _) -> args\n  | _ -> fatal_error \"Parmatch.as_record\"\n\n(* Raise Not_found when pos is not present in arg *)\nlet get_field pos arg =\n  let _, _, p = List.find (fun (_, lbl, _) -> pos = lbl.lbl_pos) arg in\n  p\n\nlet extract_fields omegas arg =\n  List.map\n    (fun (_, lbl, _) -> try get_field lbl.lbl_pos arg with Not_found -> omega)\n    omegas\n\nlet all_record_args lbls =\n  match lbls with\n  | (_, {lbl_all}, _) :: _ ->\n    let t =\n      Array.map\n        (fun lbl -> (mknoloc (Longident.Lident \"?temp?\"), lbl, omega))\n        lbl_all\n    in\n    List.iter (fun ((_, lbl, _) as x) -> t.(lbl.lbl_pos) <- x) lbls;\n    Array.to_list t\n  | _ -> fatal_error \"Parmatch.all_record_args\"\n\n(* Build argument list when p2 >= p1, where p1 is a simple pattern *)\nlet rec simple_match_args p1 p2 =\n  match p2.pat_desc with\n  | Tpat_alias (p2, _, _) -> simple_match_args p1 p2\n  | Tpat_construct (_, _, args) -> args\n  | Tpat_variant (_, Some arg, _) -> [arg]\n  | Tpat_tuple args -> args\n  | Tpat_record (args, _) -> extract_fields (record_arg p1) args\n  | Tpat_array args -> args\n  | Tpat_lazy arg -> [arg]\n  | Tpat_any | Tpat_var _ -> (\n    match p1.pat_desc with\n    | Tpat_construct (_, _, args) -> omega_list args\n    | Tpat_variant (_, Some _, _) -> [omega]\n    | Tpat_tuple args -> omega_list args\n    | Tpat_record (args, _) -> omega_list args\n    | Tpat_array args -> omega_list args\n    | Tpat_lazy _ -> [omega]\n    | _ -> [])\n  | _ -> []\n\n(*\n  Normalize a pattern ->\n   all arguments are omega (simple pattern) and no more variables\n*)\n\nlet rec normalize_pat q =\n  match q.pat_desc with\n  | Tpat_any | Tpat_constant _ -> q\n  | Tpat_var _ -> make_pat Tpat_any q.pat_type q.pat_env\n  | Tpat_alias (p, _, _) -> normalize_pat p\n  | Tpat_tuple args ->\n    make_pat (Tpat_tuple (omega_list args)) q.pat_type q.pat_env\n  | Tpat_construct (lid, c, args) ->\n    make_pat (Tpat_construct (lid, c, omega_list args)) q.pat_type q.pat_env\n  | Tpat_variant (l, arg, row) ->\n    make_pat\n      (Tpat_variant (l, may_map (fun _ -> omega) arg, row))\n      q.pat_type q.pat_env\n  | Tpat_array args ->\n    make_pat (Tpat_array (omega_list args)) q.pat_type q.pat_env\n  | Tpat_record (largs, closed) ->\n    make_pat\n      (Tpat_record\n         (List.map (fun (lid, lbl, _) -> (lid, lbl, omega)) largs, closed))\n      q.pat_type q.pat_env\n  | Tpat_lazy _ -> make_pat (Tpat_lazy omega) q.pat_type q.pat_env\n  | Tpat_or _ -> fatal_error \"Parmatch.normalize_pat\"\n\n(*\n  Build normalized (cf. supra) discriminating pattern,\n  in the non-data type case\n*)\n\nlet discr_pat q pss =\n  let rec acc_pat acc pss =\n    match pss with\n    | ({pat_desc = Tpat_alias (p, _, _)} :: ps) :: pss ->\n      acc_pat acc ((p :: ps) :: pss)\n    | ({pat_desc = Tpat_or (p1, p2, _)} :: ps) :: pss ->\n      acc_pat acc ((p1 :: ps) :: (p2 :: ps) :: pss)\n    | ({pat_desc = Tpat_any | Tpat_var _} :: _) :: pss -> acc_pat acc pss\n    | (({pat_desc = Tpat_tuple _} as p) :: _) :: _ -> normalize_pat p\n    | (({pat_desc = Tpat_lazy _} as p) :: _) :: _ -> normalize_pat p\n    | (({pat_desc = Tpat_record (largs, closed)} as p) :: _) :: pss ->\n      let new_omegas =\n        List.fold_right\n          (fun (lid, lbl, _) r ->\n            try\n              let _ = get_field lbl.lbl_pos r in\n              r\n            with Not_found -> (lid, lbl, omega) :: r)\n          largs (record_arg acc)\n      in\n      acc_pat\n        (make_pat (Tpat_record (new_omegas, closed)) p.pat_type p.pat_env)\n        pss\n    | _ -> acc\n  in\n\n  match normalize_pat q with\n  | {pat_desc = Tpat_any | Tpat_record _} as q -> acc_pat q pss\n  | q -> q\n\n(*\n   In case a matching value is found, set actual arguments\n   of the matching pattern.\n*)\n\nlet rec read_args xs r =\n  match (xs, r) with\n  | [], _ -> ([], r)\n  | _ :: xs, arg :: rest ->\n    let args, rest = read_args xs rest in\n    (arg :: args, rest)\n  | _, _ -> fatal_error \"Parmatch.read_args\"\n\nlet do_set_args erase_mutable q r =\n  match q with\n  | {pat_desc = Tpat_tuple omegas} ->\n    let args, rest = read_args omegas r in\n    make_pat (Tpat_tuple args) q.pat_type q.pat_env :: rest\n  | {pat_desc = Tpat_record (omegas, closed)} ->\n    let args, rest = read_args omegas r in\n    make_pat\n      (Tpat_record\n         ( List.map2\n             (fun (lid, lbl, _) arg ->\n               if\n                 erase_mutable\n                 &&\n                 match lbl.lbl_mut with\n                 | Mutable -> true\n                 | Immutable -> false\n               then (lid, lbl, omega)\n               else (lid, lbl, arg))\n             omegas args,\n           closed ))\n      q.pat_type q.pat_env\n    :: rest\n  | {pat_desc = Tpat_construct (lid, c, omegas)} ->\n    let args, rest = read_args omegas r in\n    make_pat (Tpat_construct (lid, c, args)) q.pat_type q.pat_env :: rest\n  | {pat_desc = Tpat_variant (l, omega, row)} ->\n    let arg, rest =\n      match (omega, r) with\n      | Some _, a :: r -> (Some a, r)\n      | None, r -> (None, r)\n      | _ -> assert false\n    in\n    make_pat (Tpat_variant (l, arg, row)) q.pat_type q.pat_env :: rest\n  | {pat_desc = Tpat_lazy _omega} -> (\n    match r with\n    | arg :: rest -> make_pat (Tpat_lazy arg) q.pat_type q.pat_env :: rest\n    | _ -> fatal_error \"Parmatch.do_set_args (lazy)\")\n  | {pat_desc = Tpat_array omegas} ->\n    let args, rest = read_args omegas r in\n    make_pat (Tpat_array args) q.pat_type q.pat_env :: rest\n  | {pat_desc = Tpat_constant _ | Tpat_any} ->\n    q :: r (* case any is used in matching.ml *)\n  | _ -> fatal_error \"Parmatch.set_args\"\n\nlet set_args q r = do_set_args false q r\n\nand set_args_erase_mutable q r = do_set_args true q r\n\n(* filter pss according to pattern q *)\nlet filter_one q pss =\n  let rec filter_rec = function\n    | ({pat_desc = Tpat_alias (p, _, _)} :: ps) :: pss ->\n      filter_rec ((p :: ps) :: pss)\n    | ({pat_desc = Tpat_or (p1, p2, _)} :: ps) :: pss ->\n      filter_rec ((p1 :: ps) :: (p2 :: ps) :: pss)\n    | (p :: ps) :: pss ->\n      if simple_match q p then (simple_match_args q p @ ps) :: filter_rec pss\n      else filter_rec pss\n    | _ -> []\n  in\n  filter_rec pss\n\n(*\n  Filter pss in the ``extra case''. This applies :\n  - According to an extra constructor (datatype case, non-complete signature).\n  - According to anything (all-variables case).\n*)\nlet filter_extra pss =\n  let rec filter_rec = function\n    | ({pat_desc = Tpat_alias (p, _, _)} :: ps) :: pss ->\n      filter_rec ((p :: ps) :: pss)\n    | ({pat_desc = Tpat_or (p1, p2, _)} :: ps) :: pss ->\n      filter_rec ((p1 :: ps) :: (p2 :: ps) :: pss)\n    | ({pat_desc = Tpat_any | Tpat_var _} :: qs) :: pss -> qs :: filter_rec pss\n    | _ :: pss -> filter_rec pss\n    | [] -> []\n  in\n  filter_rec pss\n\n(*\n  Pattern p0 is the discriminating pattern,\n  returns [(q0,pss0) ; ... ; (qn,pssn)]\n  where the qi's are simple patterns and the pssi's are\n  matched matrices.\n\n  NOTES\n   * (qi,[]) is impossible.\n   * In the case when matching is useless (all-variable case),\n     returns []\n*)\n\nlet filter_all pat0 pss =\n  let rec insert q qs env =\n    match env with\n    | [] ->\n      let q0 = normalize_pat q in\n      [(q0, [simple_match_args q0 q @ qs])]\n    | ((q0, pss) as c) :: env ->\n      if simple_match q0 q then\n        (q0, (simple_match_args q0 q @ qs) :: pss) :: env\n      else c :: insert q qs env\n  in\n\n  let rec filter_rec env = function\n    | ({pat_desc = Tpat_alias (p, _, _)} :: ps) :: pss ->\n      filter_rec env ((p :: ps) :: pss)\n    | ({pat_desc = Tpat_or (p1, p2, _)} :: ps) :: pss ->\n      filter_rec env ((p1 :: ps) :: (p2 :: ps) :: pss)\n    | ({pat_desc = Tpat_any | Tpat_var _} :: _) :: pss -> filter_rec env pss\n    | (p :: ps) :: pss -> filter_rec (insert p ps env) pss\n    | _ -> env\n  and filter_omega env = function\n    | ({pat_desc = Tpat_alias (p, _, _)} :: ps) :: pss ->\n      filter_omega env ((p :: ps) :: pss)\n    | ({pat_desc = Tpat_or (p1, p2, _)} :: ps) :: pss ->\n      filter_omega env ((p1 :: ps) :: (p2 :: ps) :: pss)\n    | ({pat_desc = Tpat_any | Tpat_var _} :: ps) :: pss ->\n      filter_omega\n        (List.map\n           (fun (q, qss) -> (q, (simple_match_args q omega @ ps) :: qss))\n           env)\n        pss\n    | _ :: pss -> filter_omega env pss\n    | [] -> env\n  in\n\n  filter_omega\n    (filter_rec\n       (match pat0.pat_desc with\n       | Tpat_record _ | Tpat_tuple _ | Tpat_lazy _ -> [(pat0, [])]\n       | _ -> [])\n       pss)\n    pss\n\n(* Variant related functions *)\n\nlet rec set_last a = function\n  | [] -> []\n  | [_] -> [a]\n  | x :: l -> x :: set_last a l\n\n(* mark constructor lines for failure when they are incomplete *)\nlet rec mark_partial = function\n  | ({pat_desc = Tpat_alias (p, _, _)} :: ps) :: pss ->\n    mark_partial ((p :: ps) :: pss)\n  | ({pat_desc = Tpat_or (p1, p2, _)} :: ps) :: pss ->\n    mark_partial ((p1 :: ps) :: (p2 :: ps) :: pss)\n  | ({pat_desc = Tpat_any | Tpat_var _} :: _ as ps) :: pss ->\n    ps :: mark_partial pss\n  | ps :: pss -> set_last zero ps :: mark_partial pss\n  | [] -> []\n\nlet close_variant env row =\n  let row = Btype.row_repr row in\n  let nm =\n    List.fold_left\n      (fun nm (_tag, f) ->\n        match Btype.row_field_repr f with\n        | Reither (_, _, false, e) ->\n          (* m=false means that this tag is not explicitly matched *)\n          Btype.set_row_field e Rabsent;\n          None\n        | Rabsent | Reither (_, _, true, _) | Rpresent _ -> nm)\n      row.row_name row.row_fields\n  in\n  if (not row.row_closed) || nm != row.row_name then\n    (* this unification cannot fail *)\n    Ctype.unify env row.row_more\n      (Btype.newgenty\n         (Tvariant\n            {\n              row with\n              row_fields = [];\n              row_more = Btype.newgenvar ();\n              row_closed = true;\n              row_name = nm;\n            }))\n\nlet row_of_pat pat =\n  match Ctype.expand_head pat.pat_env pat.pat_type with\n  | {desc = Tvariant row} -> Btype.row_repr row\n  | _ -> assert false\n\n(*\n  Check whether the first column of env makes up a complete signature or\n  not.\n*)\n\nlet full_match closing env =\n  match env with\n  | ({pat_desc = Tpat_construct (_, c, _)}, _) :: _ ->\n    if c.cstr_consts < 0 then false (* extensions *)\n    else List.length env = c.cstr_consts + c.cstr_nonconsts\n  | (({pat_desc = Tpat_variant _} as p), _) :: _ ->\n    let fields =\n      List.map\n        (function\n          | {pat_desc = Tpat_variant (tag, _, _)}, _ -> tag\n          | _ -> assert false)\n        env\n    in\n    let row = row_of_pat p in\n    if closing && not (Btype.row_fixed row) then\n      (* closing=true, we are considering the variant as closed *)\n      List.for_all\n        (fun (tag, f) ->\n          match Btype.row_field_repr f with\n          | Rabsent | Reither (_, _, false, _) -> true\n          | Reither (_, _, true, _)\n          (* m=true, do not discard matched tags, rather warn *)\n          | Rpresent _ ->\n            List.mem tag fields)\n        row.row_fields\n    else\n      row.row_closed\n      && List.for_all\n           (fun (tag, f) ->\n             Btype.row_field_repr f = Rabsent || List.mem tag fields)\n           row.row_fields\n  | ({pat_desc = Tpat_constant _}, _) :: _ -> false\n  | ({pat_desc = Tpat_tuple _}, _) :: _ -> true\n  | ({pat_desc = Tpat_record _}, _) :: _ -> true\n  | ({pat_desc = Tpat_array _}, _) :: _ -> false\n  | ({pat_desc = Tpat_lazy _}, _) :: _ -> true\n  | ({pat_desc = Tpat_any | Tpat_var _ | Tpat_alias _ | Tpat_or _}, _) :: _ | []\n    ->\n    assert false\n\n(* Written as a non-fragile matching, PR#7451 originated from a fragile matching below. *)\nlet should_extend ext env =\n  match ext with\n  | None -> false\n  | Some ext -> (\n    match env with\n    | [] -> assert false\n    | (p, _) :: _ -> (\n      match p.pat_desc with\n      | Tpat_construct\n          (_, {cstr_tag = Cstr_constant _ | Cstr_block _ | Cstr_unboxed}, _) ->\n        let path = get_type_path p.pat_type p.pat_env in\n        Path.same path ext\n      | Tpat_construct (_, {cstr_tag = Cstr_extension _}, _) -> false\n      | Tpat_constant _ | Tpat_tuple _ | Tpat_variant _ | Tpat_record _\n      | Tpat_array _ | Tpat_lazy _ ->\n        false\n      | Tpat_any | Tpat_var _ | Tpat_alias _ | Tpat_or _ -> assert false))\n\nmodule ConstructorTagHashtbl = Hashtbl.Make (struct\n  type t = Types.constructor_tag\n  let hash = Hashtbl.hash\n  let equal = Types.equal_tag\nend)\n\n(* complement constructor tags *)\nlet complete_tags nconsts nconstrs tags =\n  let seen_const = Array.make nconsts false\n  and seen_constr = Array.make nconstrs false in\n  List.iter\n    (function\n      | Cstr_constant i -> seen_const.(i) <- true\n      | Cstr_block i -> seen_constr.(i) <- true\n      | _ -> assert false)\n    tags;\n  let r = ConstructorTagHashtbl.create (nconsts + nconstrs) in\n  for i = 0 to nconsts - 1 do\n    if not seen_const.(i) then ConstructorTagHashtbl.add r (Cstr_constant i) ()\n  done;\n  for i = 0 to nconstrs - 1 do\n    if not seen_constr.(i) then ConstructorTagHashtbl.add r (Cstr_block i) ()\n  done;\n  r\n\n(* build a pattern from a constructor list *)\nlet pat_of_constr ex_pat cstr =\n  {\n    ex_pat with\n    pat_desc =\n      Tpat_construct\n        ( mknoloc (Longident.Lident \"?pat_of_constr?\"),\n          cstr,\n          omegas cstr.cstr_arity );\n  }\n\nlet orify x y = make_pat (Tpat_or (x, y, None)) x.pat_type x.pat_env\n\nlet rec orify_many = function\n  | [] -> assert false\n  | [x] -> x\n  | x :: xs -> orify x (orify_many xs)\n\nlet pat_of_constrs ex_pat cstrs =\n  if cstrs = [] then raise Empty\n  else orify_many (List.map (pat_of_constr ex_pat) cstrs)\n\nlet pats_of_type ?(always = false) env ty =\n  let ty' = Ctype.expand_head env ty in\n  match ty'.desc with\n  | Tconstr (path, _, _) -> (\n    try\n      match (Env.find_type path env).type_kind with\n      | Type_variant cl\n        when always\n             || List.length cl = 1\n             || List.for_all (fun cd -> cd.Types.cd_res <> None) cl ->\n        let cstrs = fst (Env.find_type_descrs path env) in\n        List.map (pat_of_constr (make_pat Tpat_any ty env)) cstrs\n      | Type_record _ ->\n        let labels = snd (Env.find_type_descrs path env) in\n        let fields =\n          List.map\n            (fun ld -> (mknoloc (Longident.Lident \"?pat_of_label?\"), ld, omega))\n            labels\n        in\n        [make_pat (Tpat_record (fields, Closed)) ty env]\n      | _ -> [omega]\n    with Not_found -> [omega])\n  | Ttuple tl -> [make_pat (Tpat_tuple (omegas (List.length tl))) ty env]\n  | _ -> [omega]\n\nlet rec get_variant_constructors env ty =\n  match (Ctype.repr ty).desc with\n  | Tconstr (path, _, _) -> (\n    try\n      match Env.find_type path env with\n      | {type_kind = Type_variant _} -> fst (Env.find_type_descrs path env)\n      | {type_manifest = Some _} ->\n        get_variant_constructors env\n          (Ctype.expand_head_once env (clean_copy ty))\n      | _ -> fatal_error \"Parmatch.get_variant_constructors\"\n    with Not_found -> fatal_error \"Parmatch.get_variant_constructors\")\n  | _ -> fatal_error \"Parmatch.get_variant_constructors\"\n\n(* Sends back a pattern that complements constructor tags all_tag *)\nlet complete_constrs p all_tags =\n  let c =\n    match p.pat_desc with\n    | Tpat_construct (_, c, _) -> c\n    | _ -> assert false\n  in\n  let not_tags = complete_tags c.cstr_consts c.cstr_nonconsts all_tags in\n  let constrs = get_variant_constructors p.pat_env c.cstr_res in\n  let others =\n    Ext_list.filter constrs (fun cnstr ->\n        ConstructorTagHashtbl.mem not_tags cnstr.cstr_tag)\n  in\n  let const, nonconst =\n    List.partition (fun cnstr -> cnstr.cstr_arity = 0) others\n  in\n  const @ nonconst\n\nlet build_other_constrs env p =\n  match p.pat_desc with\n  | Tpat_construct (_, {cstr_tag = Cstr_constant _ | Cstr_block _}, _) ->\n    let get_tag = function\n      | {pat_desc = Tpat_construct (_, c, _)} -> c.cstr_tag\n      | _ -> fatal_error \"Parmatch.get_tag\"\n    in\n    let all_tags = List.map (fun (p, _) -> get_tag p) env in\n    pat_of_constrs p (complete_constrs p all_tags)\n  | _ -> extra_pat\n\n(* Auxiliary for build_other *)\n\nlet build_other_constant proj make first next p env =\n  let all = List.map (fun (p, _) -> proj p.pat_desc) env in\n  let rec try_const i =\n    if List.mem i all then try_const (next i)\n    else make_pat (make i) p.pat_type p.pat_env\n  in\n  try_const first\n\n(*\n  Builds a pattern that is incompatible with all patterns in\n  in the first column of env\n*)\n\nlet some_other_tag = \"<some other tag>\"\n\nlet build_other ext env : Typedtree.pattern =\n  match env with\n  | ({pat_desc = Tpat_construct (lid, {cstr_tag = Cstr_extension _}, _)}, _)\n    :: _ ->\n    (* let c = {c with cstr_name = \"*extension*\"} in *)\n    (* PR#7330 *)\n    make_pat\n      (Tpat_var (Ident.create \"*extension*\", {lid with txt = \"*extension*\"}))\n      Ctype.none Env.empty\n  | (({pat_desc = Tpat_construct _} as p), _) :: _ -> (\n    match ext with\n    | Some ext when Path.same ext (get_type_path p.pat_type p.pat_env) ->\n      extra_pat\n    | _ -> build_other_constrs env p)\n  | (({pat_desc = Tpat_variant (_, _, r)} as p), _) :: _ -> (\n    let tags =\n      List.map\n        (function\n          | {pat_desc = Tpat_variant (tag, _, _)}, _ -> tag\n          | _ -> assert false)\n        env\n    in\n    let row = row_of_pat p in\n    let make_other_pat tag const =\n      let arg = if const then None else Some omega in\n      make_pat (Tpat_variant (tag, arg, r)) p.pat_type p.pat_env\n    in\n    match\n      List.fold_left\n        (fun others (tag, f) ->\n          if List.mem tag tags then others\n          else\n            match Btype.row_field_repr f with\n            | Rabsent (* | Reither _ *) -> others\n            (* This one is called after erasing pattern info *)\n            | Reither (c, _, _, _) -> make_other_pat tag c :: others\n            | Rpresent arg -> make_other_pat tag (arg = None) :: others)\n        [] row.row_fields\n    with\n    | [] -> make_other_pat some_other_tag true\n    | pat :: other_pats ->\n      List.fold_left\n        (fun p_res pat ->\n          make_pat (Tpat_or (pat, p_res, None)) p.pat_type p.pat_env)\n        pat other_pats)\n  | (({pat_desc = Tpat_constant (Const_int _)} as p), _) :: _ ->\n    build_other_constant\n      (function\n        | Tpat_constant (Const_int i) -> i\n        | _ -> assert false)\n      (function\n        | i -> Tpat_constant (Const_int i))\n      0 succ p env\n  | (({pat_desc = Tpat_constant (Const_char _)} as p), _) :: _ ->\n    build_other_constant\n      (function\n        | Tpat_constant (Const_char i) -> i\n        | _ -> assert false)\n      (function\n        | i -> Tpat_constant (Const_char i))\n      0 succ p env\n  | (({pat_desc = Tpat_constant (Const_int32 _)} as p), _) :: _ ->\n    build_other_constant\n      (function\n        | Tpat_constant (Const_int32 i) -> i\n        | _ -> assert false)\n      (function\n        | i -> Tpat_constant (Const_int32 i))\n      0l Int32.succ p env\n  | (({pat_desc = Tpat_constant (Const_int64 _)} as p), _) :: _ ->\n    build_other_constant\n      (function\n        | Tpat_constant (Const_int64 i) -> i\n        | _ -> assert false)\n      (function\n        | i -> Tpat_constant (Const_int64 i))\n      0L Int64.succ p env\n  | (({pat_desc = Tpat_constant (Const_bigint _)} as p), _) :: _ ->\n    build_other_constant\n      (function\n        | Tpat_constant (Const_bigint (sign, i)) ->\n          String.length (Bigint_utils.to_string sign i)\n        | _ -> assert false)\n      (function\n        | i -> Tpat_constant (Const_bigint (true, string_of_int i)))\n      0 succ p env\n  | (({pat_desc = Tpat_constant (Const_string _)} as p), _) :: _ ->\n    build_other_constant\n      (function\n        | Tpat_constant (Const_string (s, _)) -> String.length s\n        | _ -> assert false)\n      (function\n        | i -> Tpat_constant (Const_string (String.make i '*', None)))\n      0 succ p env\n  | (({pat_desc = Tpat_constant (Const_float _)} as p), _) :: _ ->\n    build_other_constant\n      (function\n        | Tpat_constant (Const_float f) -> float_of_string f\n        | _ -> assert false)\n      (function\n        | f -> Tpat_constant (Const_float (string_of_float f)))\n      0.0\n      (fun f -> f +. 1.0)\n      p env\n  | (({pat_desc = Tpat_array _} as p), _) :: _ ->\n    let all_lengths =\n      List.map\n        (fun (p, _) ->\n          match p.pat_desc with\n          | Tpat_array args -> List.length args\n          | _ -> assert false)\n        env\n    in\n    let rec try_arrays l =\n      if List.mem l all_lengths then try_arrays (l + 1)\n      else make_pat (Tpat_array (omegas l)) p.pat_type p.pat_env\n    in\n    try_arrays 0\n  | [] -> omega\n  | _ -> omega\n\n(*\n  Core function :\n  Is the last row of pattern matrix pss + qs satisfiable ?\n  That is :\n    Does there exists at least one value vector, es such that :\n     1- for all ps in pss ps # es (ps and es are not compatible)\n     2- qs <= es                  (es matches qs)\n*)\n\nlet rec has_instance p =\n  match p.pat_desc with\n  | Tpat_variant (l, _, r) when is_absent l r -> false\n  | Tpat_any | Tpat_var _ | Tpat_constant _ | Tpat_variant (_, None, _) -> true\n  | Tpat_alias (p, _, _) | Tpat_variant (_, Some p, _) -> has_instance p\n  | Tpat_or (p1, p2, _) -> has_instance p1 || has_instance p2\n  | Tpat_construct (_, _, ps) | Tpat_tuple ps | Tpat_array ps ->\n    has_instances ps\n  | Tpat_record (lps, _) -> has_instances (List.map (fun (_, _, x) -> x) lps)\n  | Tpat_lazy p -> has_instance p\n\nand has_instances = function\n  | [] -> true\n  | q :: rem -> has_instance q && has_instances rem\n\n(*\n   In two places in the following function, we check the coherence of the first\n   column of (pss + qs).\n   If it is incoherent, then we exit early saying that (pss + qs) is not\n   satisfiable (which is equivalent to saying \"oh, we shouldn't have considered\n   that branch, no good result came come from here\").\n\n   But what happens if we have a coherent but ill-typed column?\n   - we might end up returning [false], which is equivalent to noticing the\n   incompatibility: clearly this is fine.\n   - if we end up returning [true] then we're saying that [qs] is useful while\n   it is not. This is sad but not the end of the world, we're just allowing dead\n   code to survive.\n*)\nlet rec satisfiable pss qs =\n  match pss with\n  | [] -> has_instances qs\n  | _ -> (\n    match qs with\n    | [] -> false\n    | {pat_desc = Tpat_or (q1, q2, _)} :: qs ->\n      satisfiable pss (q1 :: qs) || satisfiable pss (q2 :: qs)\n    | {pat_desc = Tpat_alias (q, _, _)} :: qs -> satisfiable pss (q :: qs)\n    | {pat_desc = Tpat_any | Tpat_var _} :: qs -> (\n      if not (all_coherent (simplified_first_col pss)) then false\n      else\n        let q0 = discr_pat omega pss in\n        match filter_all q0 pss with\n        (* first column of pss is made of variables only *)\n        | [] -> satisfiable (filter_extra pss) qs\n        | constrs ->\n          if full_match false constrs then\n            List.exists\n              (fun (p, pss) ->\n                (not (is_absent_pat p))\n                && satisfiable pss (simple_match_args p omega @ qs))\n              constrs\n          else satisfiable (filter_extra pss) qs)\n    | {pat_desc = Tpat_variant (l, _, r)} :: _ when is_absent l r -> false\n    | q :: qs ->\n      if not (all_coherent (q :: simplified_first_col pss)) then false\n      else\n        let q0 = discr_pat q pss in\n        satisfiable (filter_one q0 pss) (simple_match_args q0 q @ qs))\n\n(* Also return the remaining cases, to enable GADT handling\n\n   For considerations regarding the coherence check, see the comment on\n   [satisfiable] above. *)\nlet rec satisfiables pss qs =\n  match pss with\n  | [] -> if has_instances qs then [qs] else []\n  | _ -> (\n    match qs with\n    | [] -> []\n    | {pat_desc = Tpat_or (q1, q2, _)} :: qs ->\n      satisfiables pss (q1 :: qs) @ satisfiables pss (q2 :: qs)\n    | {pat_desc = Tpat_alias (q, _, _)} :: qs -> satisfiables pss (q :: qs)\n    | {pat_desc = Tpat_any | Tpat_var _} :: qs -> (\n      if not (all_coherent (simplified_first_col pss)) then []\n      else\n        let q0 = discr_pat omega pss in\n        let wild p =\n          List.map (fun qs -> p :: qs) (satisfiables (filter_extra pss) qs)\n        in\n        match filter_all q0 pss with\n        (* first column of pss is made of variables only *)\n        | [] -> wild omega\n        | (p, _) :: _ as constrs -> (\n          let for_constrs () =\n            List.flatten\n              (List.map\n                 (fun (p, pss) ->\n                   if is_absent_pat p then []\n                   else\n                     List.map (set_args p)\n                       (satisfiables pss (simple_match_args p omega @ qs)))\n                 constrs)\n          in\n          if full_match false constrs then for_constrs ()\n          else\n            match p.pat_desc with\n            | Tpat_construct _ ->\n              (* activate this code for checking non-gadt constructors *)\n              wild (build_other_constrs constrs p) @ for_constrs ()\n            | _ -> wild omega))\n    | {pat_desc = Tpat_variant (l, _, r)} :: _ when is_absent l r -> []\n    | q :: qs ->\n      if not (all_coherent (q :: simplified_first_col pss)) then []\n      else\n        let q0 = discr_pat q pss in\n        List.map (set_args q0)\n          (satisfiables (filter_one q0 pss) (simple_match_args q0 q @ qs)))\n\n(*\n  Now another satisfiable function that additionally\n  supplies an example of a matching value.\n\n  This function should be called for exhaustiveness check only.\n*)\n\ntype 'a result =\n  | Rnone (* No matching value *)\n  | Rsome of 'a (* This matching value *)\n\n(*\nlet rec try_many  f = function\n  | [] -> Rnone\n  | (p,pss)::rest ->\n      match f (p,pss) with\n      | Rnone -> try_many  f rest\n      | r -> r\n*)\n\nlet rappend r1 r2 =\n  match (r1, r2) with\n  | Rnone, _ -> r2\n  | _, Rnone -> r1\n  | Rsome l1, Rsome l2 -> Rsome (l1 @ l2)\n\nlet rec try_many_gadt f = function\n  | [] -> Rnone\n  | (p, pss) :: rest -> rappend (f (p, pss)) (try_many_gadt f rest)\n\n(*\nlet rec exhaust ext pss n = match pss with\n| []    ->  Rsome (omegas n)\n| []::_ ->  Rnone\n| pss   ->\n    let q0 = discr_pat omega pss in\n    begin match filter_all q0 pss with\n          (* first column of pss is made of variables only *)\n    | [] ->\n        begin match exhaust ext (filter_extra pss) (n-1) with\n        | Rsome r -> Rsome (q0::r)\n        | r -> r\n      end\n    | constrs ->\n        let try_non_omega (p,pss) =\n          if is_absent_pat p then\n            Rnone\n          else\n            match\n              exhaust\n                ext pss (List.length (simple_match_args p omega) + n - 1)\n            with\n            | Rsome r -> Rsome (set_args p r)\n            | r       -> r in\n        if\n          full_match true false constrs && not (should_extend ext constrs)\n        then\n          try_many try_non_omega constrs\n        else\n          (*\n             D = filter_extra pss is the default matrix\n             as it is included in pss, one can avoid\n             recursive calls on specialized matrices,\n             Essentially :\n             * D exhaustive => pss exhaustive\n             * D non-exhaustive => we have a non-filtered value\n          *)\n          let r =  exhaust ext (filter_extra pss) (n-1) in\n          match r with\n          | Rnone -> Rnone\n          | Rsome r ->\n              try\n                Rsome (build_other ext constrs::r)\n              with\n      (* cannot occur, since constructors don't make a full signature *)\n              | Empty -> fatal_error \"Parmatch.exhaust\"\n    end\n\nlet combinations f lst lst' =\n  let rec iter2 x =\n    function\n        [] -> []\n      | y :: ys ->\n          f x y :: iter2 x ys\n  in\n  let rec iter =\n    function\n        [] -> []\n      | x :: xs -> iter2 x lst' @ iter xs\n  in\n  iter lst\n*)\n(*\nlet print_pat pat =\n  let rec string_of_pat pat =\n    match pat.pat_desc with\n        Tpat_var _ -> \"v\"\n      | Tpat_any -> \"_\"\n      | Tpat_alias (p, x) -> Printf.sprintf \"(%s) as ?\"  (string_of_pat p)\n      | Tpat_constant n -> \"0\"\n      | Tpat_construct (_, lid, _) ->\n        Printf.sprintf \"%s\" (String.concat \".\" (Longident.flatten lid.txt))\n      | Tpat_lazy p ->\n        Printf.sprintf \"(lazy %s)\" (string_of_pat p)\n      | Tpat_or (p1,p2,_) ->\n        Printf.sprintf \"(%s | %s)\" (string_of_pat p1) (string_of_pat p2)\n      | Tpat_tuple list ->\n        Printf.sprintf \"(%s)\" (String.concat \",\" (List.map string_of_pat list))\n      | Tpat_variant (_, _, _) -> \"variant\"\n      | Tpat_record (_, _) -> \"record\"\n      | Tpat_array _ -> \"array\"\n  in\n  Printf.fprintf stderr \"PAT[%s]\\n%!\" (string_of_pat pat)\n*)\n\n(* strictly more powerful than exhaust; however, exhaust\n   was kept for backwards compatibility *)\nlet rec exhaust_gadt (ext : Path.t option) pss n =\n  match pss with\n  | [] -> Rsome [omegas n]\n  | [] :: _ -> Rnone\n  | pss -> (\n    if not (all_coherent (simplified_first_col pss)) then\n      (* We're considering an ill-typed branch, we won't actually be able to\n         produce a well typed value taking that branch. *)\n      Rnone\n    else\n      (* Assuming the first column is ill-typed but considered coherent, we\n         might end up producing an ill-typed witness of non-exhaustivity\n         corresponding to the current branch.\n\n         If [exhaust] has been called by [do_check_partial], then the witnesses\n         produced get typechecked and the ill-typed ones are discarded.\n\n         If [exhaust] has been called by [do_check_fragile], then it is possible\n         we might fail to warn the user that the matching is fragile. See for\n         example testsuite/tests/warnings/w04_failure.ml. *)\n      let q0 = discr_pat omega pss in\n      match filter_all q0 pss with\n      (* first column of pss is made of variables only *)\n      | [] -> (\n        match exhaust_gadt ext (filter_extra pss) (n - 1) with\n        | Rsome r -> Rsome (List.map (fun row -> q0 :: row) r)\n        | r -> r)\n      | constrs -> (\n        let try_non_omega (p, pss) =\n          if is_absent_pat p then Rnone\n          else\n            match\n              exhaust_gadt ext pss\n                (List.length (simple_match_args p omega) + n - 1)\n            with\n            | Rsome r -> Rsome (List.map (fun row -> set_args p row) r)\n            | r -> r\n        in\n        let before = try_many_gadt try_non_omega constrs in\n        if full_match false constrs && not (should_extend ext constrs) then\n          before\n        else\n          (*\n              D = filter_extra pss is the default matrix\n              as it is included in pss, one can avoid\n              recursive calls on specialized matrices,\n              Essentially :\n            * D exhaustive => pss exhaustive\n            * D non-exhaustive => we have a non-filtered value\n            *)\n          let r = exhaust_gadt ext (filter_extra pss) (n - 1) in\n          match r with\n          | Rnone -> before\n          | Rsome r -> (\n            try\n              let p = build_other ext constrs in\n              let dug = List.map (fun tail -> p :: tail) r in\n              match before with\n              | Rnone -> Rsome dug\n              | Rsome x -> Rsome (x @ dug)\n            with\n            (* cannot occur, since constructors don't make a full signature *)\n            | Empty ->\n              fatal_error \"Parmatch.exhaust\")))\n\nlet exhaust_gadt ext pss n =\n  let ret = exhaust_gadt ext pss n in\n  match ret with\n  | Rnone -> Rnone\n  | Rsome lst ->\n    (* The following line is needed to compile stdlib/printf.ml *)\n    if lst = [] then Rsome (omegas n)\n    else\n      let singletons =\n        List.map\n          (function\n            | [x] -> x\n            | _ -> assert false)\n          lst\n      in\n      Rsome [orify_many singletons]\n\n(*\n   Another exhaustiveness check, enforcing variant typing.\n   Note that it does not check exact exhaustiveness, but whether a\n   matching could be made exhaustive by closing all variant types.\n   When this is true of all other columns, the current column is left\n   open (even if it means that the whole matching is not exhaustive as\n   a result).\n   When this is false for the matrix minus the current column, and the\n   current column is composed of variant tags, we close the variant\n   (even if it doesn't help in making the matching exhaustive).\n*)\n\nlet rec pressure_variants tdefs = function\n  | [] -> false\n  | [] :: _ -> true\n  | pss -> (\n    if not (all_coherent (simplified_first_col pss)) then true\n    else\n      let q0 = discr_pat omega pss in\n      match filter_all q0 pss with\n      | [] -> pressure_variants tdefs (filter_extra pss)\n      | constrs ->\n        let rec try_non_omega = function\n          | (_p, pss) :: rem ->\n            let ok = pressure_variants tdefs pss in\n            try_non_omega rem && ok\n          | [] -> true\n        in\n        if full_match (tdefs = None) constrs then try_non_omega constrs\n        else if tdefs = None then pressure_variants None (filter_extra pss)\n        else\n          let full = full_match true constrs in\n          let ok =\n            if full then try_non_omega constrs\n            else try_non_omega (filter_all q0 (mark_partial pss))\n          in\n          (match (constrs, tdefs) with\n          | (({pat_desc = Tpat_variant _} as p), _) :: _, Some env ->\n            let row = row_of_pat p in\n            if Btype.row_fixed row || pressure_variants None (filter_extra pss)\n            then ()\n            else close_variant env row\n          | _ -> ());\n          ok)\n\n(* Yet another satisfiable function *)\n\n(*\n   This time every_satisfiable pss qs checks the\n   utility of every expansion of qs.\n   Expansion means expansion of or-patterns inside qs\n*)\n\ntype answer =\n  | Used (* Useful pattern *)\n  | Unused (* Useless pattern *)\n  | Upartial of Typedtree.pattern list (* Mixed, with list of useless ones *)\n\n(* this row type enable column processing inside the matrix\n    - left  ->  elements not to be processed,\n    - right ->  elements to be processed\n*)\ntype 'a row = {no_ors: 'a list; ors: 'a list; active: 'a list}\n\n(*\nlet pretty_row {ors=ors ; no_ors=no_ors; active=active} =\n  pretty_line ors ; prerr_string \" *\" ;\n  pretty_line no_ors ; prerr_string \" *\" ;\n  pretty_line active\n\nlet pretty_rows rs =\n  prerr_endline \"begin matrix\" ;\n  List.iter\n    (fun r ->\n      pretty_row r ;\n      prerr_endline \"\")\n    rs ;\n  prerr_endline \"end matrix\"\n*)\n\n(* Initial build *)\nlet make_row ps = {ors = []; no_ors = []; active = ps}\n\nlet make_rows pss = List.map make_row pss\n\n(* Useful to detect and expand  or pats inside as pats *)\nlet rec unalias p =\n  match p.pat_desc with\n  | Tpat_alias (p, _, _) -> unalias p\n  | _ -> p\n\nlet is_var p =\n  match (unalias p).pat_desc with\n  | Tpat_any | Tpat_var _ -> true\n  | _ -> false\n\nlet is_var_column rs =\n  List.for_all\n    (fun r ->\n      match r.active with\n      | p :: _ -> is_var p\n      | [] -> assert false)\n    rs\n\n(* Standard or-args for left-to-right matching *)\nlet rec or_args p =\n  match p.pat_desc with\n  | Tpat_or (p1, p2, _) -> (p1, p2)\n  | Tpat_alias (p, _, _) -> or_args p\n  | _ -> assert false\n\n(* Just remove current column *)\nlet remove r =\n  match r.active with\n  | _ :: rem -> {r with active = rem}\n  | [] -> assert false\n\nlet remove_column rs = List.map remove rs\n\n(* Current column has been processed *)\nlet push_no_or r =\n  match r.active with\n  | p :: rem -> {r with no_ors = p :: r.no_ors; active = rem}\n  | [] -> assert false\n\nlet push_or r =\n  match r.active with\n  | p :: rem -> {r with ors = p :: r.ors; active = rem}\n  | [] -> assert false\n\nlet push_or_column rs = List.map push_or rs\n\nand push_no_or_column rs = List.map push_no_or rs\n\n(* Those are adaptations of the previous homonymous functions that\n   work on the current column, instead of the first column\n*)\n\nlet discr_pat q rs = discr_pat q (List.map (fun r -> r.active) rs)\n\nlet filter_one q rs =\n  let rec filter_rec rs =\n    match rs with\n    | [] -> []\n    | r :: rem -> (\n      match r.active with\n      | [] -> assert false\n      | {pat_desc = Tpat_alias (p, _, _)} :: ps ->\n        filter_rec ({r with active = p :: ps} :: rem)\n      | {pat_desc = Tpat_or (p1, p2, _)} :: ps ->\n        filter_rec\n          ({r with active = p1 :: ps} :: {r with active = p2 :: ps} :: rem)\n      | p :: ps ->\n        if simple_match q p then\n          {r with active = simple_match_args q p @ ps} :: filter_rec rem\n        else filter_rec rem)\n  in\n  filter_rec rs\n\n(* Back to normal matrices *)\nlet make_vector r = List.rev r.no_ors\n\nlet make_matrix rs = List.map make_vector rs\n\n(* Standard union on answers *)\nlet union_res r1 r2 =\n  match (r1, r2) with\n  | Unused, _ | _, Unused -> Unused\n  | Used, _ -> r2\n  | _, Used -> r1\n  | Upartial u1, Upartial u2 -> Upartial (u1 @ u2)\n\n(* propose or pats for expansion *)\nlet extract_elements qs =\n  let rec do_rec seen = function\n    | [] -> []\n    | q :: rem ->\n      {no_ors = List.rev_append seen rem @ qs.no_ors; ors = []; active = [q]}\n      :: do_rec (q :: seen) rem\n  in\n  do_rec [] qs.ors\n\n(* idem for matrices *)\nlet transpose rs =\n  match rs with\n  | [] -> assert false\n  | r :: rem ->\n    let i = List.map (fun x -> [x]) r in\n    List.fold_left (List.map2 (fun r x -> x :: r)) i rem\n\nlet extract_columns pss qs =\n  match pss with\n  | [] -> List.map (fun _ -> []) qs.ors\n  | _ ->\n    let rows = List.map extract_elements pss in\n    transpose rows\n\n(* Core function\n   The idea is to first look for or patterns (recursive case), then\n   check or-patterns argument usefulness (terminal case)\n*)\nlet rec simplified_first_usefulness_col = function\n  | [] -> []\n  | row :: rows -> (\n    match row.active with\n    | [] -> assert false (* the rows are non-empty! *)\n    | p :: _ -> simplify_head_pat p (simplified_first_usefulness_col rows))\n\nlet rec every_satisfiables pss qs =\n  match qs.active with\n  | [] -> (\n    (* qs is now partitionned,  check usefulness *)\n    match qs.ors with\n    | [] ->\n      (* no or-patterns *)\n      if satisfiable (make_matrix pss) (make_vector qs) then Used else Unused\n    | _ ->\n      (* n or-patterns -> 2n expansions *)\n      List.fold_right2\n        (fun pss qs r ->\n          match r with\n          | Unused -> Unused\n          | _ -> (\n            match qs.active with\n            | [q] ->\n              let q1, q2 = or_args q in\n              let r_loc = every_both pss qs q1 q2 in\n              union_res r r_loc\n            | _ -> assert false))\n        (extract_columns pss qs) (extract_elements qs) Used)\n  | q :: rem -> (\n    let uq = unalias q in\n    match uq.pat_desc with\n    | Tpat_any | Tpat_var _ ->\n      if is_var_column pss then\n        (* forget about ``all-variable''  columns now *)\n        every_satisfiables (remove_column pss) (remove qs)\n      else\n        (* otherwise this is direct food for satisfiable *)\n        every_satisfiables (push_no_or_column pss) (push_no_or qs)\n    | Tpat_or (q1, q2, _) ->\n      if q1.pat_loc.Location.loc_ghost && q2.pat_loc.Location.loc_ghost then\n        (* syntactically generated or-pats should not be expanded *)\n        every_satisfiables (push_no_or_column pss) (push_no_or qs)\n      else\n        (* this is a real or-pattern *)\n        every_satisfiables (push_or_column pss) (push_or qs)\n    | Tpat_variant (l, _, r) when is_absent l r ->\n      (* Ah Jacques... *)\n      Unused\n    | _ ->\n      (* standard case, filter matrix *)\n      (* The handling of incoherent matrices is kept in line with\n                [satisfiable] *)\n      if not (all_coherent (uq :: simplified_first_usefulness_col pss)) then\n        Unused\n      else\n        let q0 = discr_pat q pss in\n        every_satisfiables (filter_one q0 pss)\n          {qs with active = simple_match_args q0 q @ rem})\n\n(*\n  This function ``every_both'' performs the usefulness check\n  of or-pat q1|q2.\n  The trick is to call every_satisfied twice with\n  current active columns restricted to q1 and q2,\n  That way,\n  - others orpats in qs.ors will not get expanded.\n  - all matching work performed on qs.no_ors is not performed again.\n  *)\nand every_both pss qs q1 q2 =\n  let qs1 = {qs with active = [q1]} and qs2 = {qs with active = [q2]} in\n  let r1 = every_satisfiables pss qs1\n  and r2 = every_satisfiables (if compat q1 q2 then qs1 :: pss else pss) qs2 in\n  match r1 with\n  | Unused -> (\n    match r2 with\n    | Unused -> Unused\n    | Used -> Upartial [q1]\n    | Upartial u2 -> Upartial (q1 :: u2))\n  | Used -> (\n    match r2 with\n    | Unused -> Upartial [q2]\n    | _ -> r2)\n  | Upartial u1 -> (\n    match r2 with\n    | Unused -> Upartial (u1 @ [q2])\n    | Used -> r1\n    | Upartial u2 -> Upartial (u1 @ u2))\n\n(* le_pat p q  means, forall V,  V matches q implies V matches p *)\nlet rec le_pat p q =\n  match (p.pat_desc, q.pat_desc) with\n  | (Tpat_var _ | Tpat_any), _ -> true\n  | Tpat_alias (p, _, _), _ -> le_pat p q\n  | _, Tpat_alias (q, _, _) -> le_pat p q\n  | Tpat_constant c1, Tpat_constant c2 -> const_compare c1 c2 = 0\n  | Tpat_construct (_, c1, ps), Tpat_construct (_, c2, qs) ->\n    Types.equal_tag c1.cstr_tag c2.cstr_tag && le_pats ps qs\n  | Tpat_variant (l1, Some p1, _), Tpat_variant (l2, Some p2, _) ->\n    l1 = l2 && le_pat p1 p2\n  | Tpat_variant (l1, None, _r1), Tpat_variant (l2, None, _) -> l1 = l2\n  | Tpat_variant (_, _, _), Tpat_variant (_, _, _) -> false\n  | Tpat_tuple ps, Tpat_tuple qs -> le_pats ps qs\n  | Tpat_lazy p, Tpat_lazy q -> le_pat p q\n  | Tpat_record (l1, _), Tpat_record (l2, _) ->\n    let ps, qs = records_args l1 l2 in\n    le_pats ps qs\n  | Tpat_array ps, Tpat_array qs -> Ext_list.same_length ps qs && le_pats ps qs\n  (* In all other cases, enumeration is performed *)\n  | _, _ -> not (satisfiable [[p]] [q])\n\nand le_pats ps qs =\n  match (ps, qs) with\n  | p :: ps, q :: qs -> le_pat p q && le_pats ps qs\n  | _, _ -> true\n\nlet get_mins le ps =\n  let rec select_rec r = function\n    | [] -> r\n    | p :: ps ->\n      if List.exists (fun p0 -> le p0 p) ps then select_rec r ps\n      else select_rec (p :: r) ps\n  in\n  select_rec [] (select_rec [] ps)\n\n(*\n  lub p q is a pattern that matches all values matched by p and q\n  may raise Empty, when p and q are not compatible\n*)\n\nlet rec lub p q =\n  match (p.pat_desc, q.pat_desc) with\n  | Tpat_alias (p, _, _), _ -> lub p q\n  | _, Tpat_alias (q, _, _) -> lub p q\n  | (Tpat_any | Tpat_var _), _ -> q\n  | _, (Tpat_any | Tpat_var _) -> p\n  | Tpat_or (p1, p2, _), _ -> orlub p1 p2 q\n  | _, Tpat_or (q1, q2, _) -> orlub q1 q2 p (* Thanks god, lub is commutative *)\n  | Tpat_constant c1, Tpat_constant c2 when const_compare c1 c2 = 0 -> p\n  | Tpat_tuple ps, Tpat_tuple qs ->\n    let rs = lubs ps qs in\n    make_pat (Tpat_tuple rs) p.pat_type p.pat_env\n  | Tpat_lazy p, Tpat_lazy q ->\n    let r = lub p q in\n    make_pat (Tpat_lazy r) p.pat_type p.pat_env\n  | Tpat_construct (lid, c1, ps1), Tpat_construct (_, c2, ps2)\n    when Types.equal_tag c1.cstr_tag c2.cstr_tag ->\n    let rs = lubs ps1 ps2 in\n    make_pat (Tpat_construct (lid, c1, rs)) p.pat_type p.pat_env\n  | Tpat_variant (l1, Some p1, row), Tpat_variant (l2, Some p2, _) when l1 = l2\n    ->\n    let r = lub p1 p2 in\n    make_pat (Tpat_variant (l1, Some r, row)) p.pat_type p.pat_env\n  | Tpat_variant (l1, None, _row), Tpat_variant (l2, None, _) when l1 = l2 -> p\n  | Tpat_record (l1, closed), Tpat_record (l2, _) ->\n    let rs = record_lubs l1 l2 in\n    make_pat (Tpat_record (rs, closed)) p.pat_type p.pat_env\n  | Tpat_array ps, Tpat_array qs when List.length ps = List.length qs ->\n    let rs = lubs ps qs in\n    make_pat (Tpat_array rs) p.pat_type p.pat_env\n  | _, _ -> raise Empty\n\nand orlub p1 p2 q =\n  try\n    let r1 = lub p1 q in\n    try {q with pat_desc = Tpat_or (r1, lub p2 q, None)} with Empty -> r1\n  with Empty -> lub p2 q\n\nand record_lubs l1 l2 =\n  let rec lub_rec l1 l2 =\n    match (l1, l2) with\n    | [], _ -> l2\n    | _, [] -> l1\n    | (lid1, lbl1, p1) :: rem1, (lid2, lbl2, p2) :: rem2 ->\n      if lbl1.lbl_pos < lbl2.lbl_pos then (lid1, lbl1, p1) :: lub_rec rem1 l2\n      else if lbl2.lbl_pos < lbl1.lbl_pos then\n        (lid2, lbl2, p2) :: lub_rec l1 rem2\n      else (lid1, lbl1, lub p1 p2) :: lub_rec rem1 rem2\n  in\n  lub_rec l1 l2\n\nand lubs ps qs =\n  match (ps, qs) with\n  | p :: ps, q :: qs -> lub p q :: lubs ps qs\n  | _, _ -> []\n\n(******************************)\n(* Exported variant closing   *)\n(******************************)\n\n(* Apply pressure to variants *)\n\nlet pressure_variants tdefs patl =\n  let pss = List.map (fun p -> [p; omega]) patl in\n  ignore (pressure_variants (Some tdefs) pss)\n\n(*****************************)\n(* Utilities for diagnostics *)\n(*****************************)\n\n(*\n  Build up a working pattern matrix by forgetting\n  about guarded patterns\n*)\n\nlet rec initial_matrix = function\n  | [] -> []\n  | {c_guard = Some _} :: rem -> initial_matrix rem\n  | {c_guard = None; c_lhs = p} :: rem -> [p] :: initial_matrix rem\n\n(******************************************)\n(* Look for a row that matches some value *)\n(******************************************)\n\n(*\n  Useful for seeing if the example of\n  non-matched value can indeed be matched\n  (by a guarded clause)\n*)\n\nexception NoGuard\n\nlet rec initial_all no_guard = function\n  | [] -> if no_guard then raise NoGuard else []\n  | {c_lhs = pat; c_guard; _} :: rem ->\n    ([pat], pat.pat_loc) :: initial_all (no_guard && c_guard = None) rem\n\nlet rec do_filter_var = function\n  | (_ :: ps, loc) :: rem -> (ps, loc) :: do_filter_var rem\n  | _ -> []\n\nlet do_filter_one q pss =\n  let rec filter_rec = function\n    | ({pat_desc = Tpat_alias (p, _, _)} :: ps, loc) :: pss ->\n      filter_rec ((p :: ps, loc) :: pss)\n    | ({pat_desc = Tpat_or (p1, p2, _)} :: ps, loc) :: pss ->\n      filter_rec ((p1 :: ps, loc) :: (p2 :: ps, loc) :: pss)\n    | (p :: ps, loc) :: pss ->\n      if simple_match q p then\n        (simple_match_args q p @ ps, loc) :: filter_rec pss\n      else filter_rec pss\n    | _ -> []\n  in\n  filter_rec pss\n\nlet rec do_match pss qs =\n  match qs with\n  | [] -> (\n    match pss with\n    | ([], loc) :: _ -> Some loc\n    | _ -> None)\n  | q :: qs -> (\n    match q with\n    | {pat_desc = Tpat_or (q1, q2, _)} -> (\n      match do_match pss (q1 :: qs) with\n      | None -> do_match pss (q2 :: qs)\n      | r -> r)\n    | {pat_desc = Tpat_any} -> do_match (do_filter_var pss) qs\n    | _ ->\n      let q0 = normalize_pat q in\n      (* [pss] will (or won't) match [q0 :: qs] regardless of the coherence of\n         its first column. *)\n      do_match (do_filter_one q0 pss) (simple_match_args q0 q @ qs))\n\nlet check_partial_all v casel =\n  try\n    let pss = initial_all true casel in\n    do_match pss [v]\n  with NoGuard -> None\n\n(************************)\n(* Exhaustiveness check *)\n(************************)\n\n(* conversion from Typedtree.pattern to Parsetree.pattern list *)\nmodule Conv = struct\n  open Parsetree\n  let mkpat desc = Ast_helper.Pat.mk desc\n\n  let name_counter = ref 0\n  let fresh name =\n    let current = !name_counter in\n    name_counter := !name_counter + 1;\n    \"#$\" ^ name ^ string_of_int current\n\n  let conv typed =\n    let constrs = Hashtbl.create 7 in\n    let labels = Hashtbl.create 7 in\n    let rec loop pat =\n      match pat.pat_desc with\n      | Tpat_or (pa, pb, _) -> mkpat (Ppat_or (loop pa, loop pb))\n      | Tpat_var (_, ({txt = \"*extension*\"} as nm)) ->\n        (* PR#7330 *)\n        mkpat (Ppat_var nm)\n      | Tpat_any | Tpat_var _ -> mkpat Ppat_any\n      | Tpat_constant c -> mkpat (Ppat_constant (Untypeast.constant c))\n      | Tpat_alias (p, _, _) -> loop p\n      | Tpat_tuple lst -> mkpat (Ppat_tuple (List.map loop lst))\n      | Tpat_construct (cstr_lid, cstr, lst) ->\n        let id = fresh cstr.cstr_name in\n        let lid = {cstr_lid with txt = Longident.Lident id} in\n        Hashtbl.add constrs id cstr;\n        let arg =\n          match List.map loop lst with\n          | [] -> None\n          | [p] -> Some p\n          | lst -> Some (mkpat (Ppat_tuple lst))\n        in\n        mkpat (Ppat_construct (lid, arg))\n      | Tpat_variant (label, p_opt, _row_desc) ->\n        let arg = Misc.may_map loop p_opt in\n        mkpat (Ppat_variant (label, arg))\n      | Tpat_record (subpatterns, _closed_flag) ->\n        let fields =\n          List.map\n            (fun (_, lbl, p) ->\n              let id = fresh lbl.lbl_name in\n              Hashtbl.add labels id lbl;\n              (mknoloc (Longident.Lident id), loop p))\n            subpatterns\n        in\n        mkpat (Ppat_record (fields, Open))\n      | Tpat_array lst -> mkpat (Ppat_array (List.map loop lst))\n      | Tpat_lazy p -> mkpat (Ppat_lazy (loop p))\n    in\n    let ps = loop typed in\n    (ps, constrs, labels)\nend\n\n(* Whether the counter-example contains an extension pattern *)\nlet contains_extension pat =\n  let r = ref false in\n  let rec loop = function\n    | {pat_desc = Tpat_var (_, {txt = \"*extension*\"})} -> r := true\n    | p -> Typedtree.iter_pattern_desc loop p.pat_desc\n  in\n  loop pat;\n  !r\n\n(* Build an untyped or-pattern from its expected type *)\nlet ppat_of_type env ty =\n  match pats_of_type env ty with\n  | [{pat_desc = Tpat_any}] ->\n    (Conv.mkpat Parsetree.Ppat_any, Hashtbl.create 0, Hashtbl.create 0)\n  | pats -> Conv.conv (orify_many pats)\n\nlet do_check_partial ?pred exhaust loc casel pss =\n  match pss with\n  | [] ->\n    (*\n          This can occur\n          - For empty matches generated by ocamlp4 (no warning)\n          - when all patterns have guards (then, casel <> [])\n          (specific warning)\n          Then match MUST be considered non-exhaustive,\n          otherwise compilation of PM is broken.\n          *)\n    (match casel with\n    | [] -> ()\n    | _ ->\n      if Warnings.is_active Warnings.All_clauses_guarded then\n        Location.prerr_warning loc Warnings.All_clauses_guarded);\n    Partial\n  | ps :: _ -> (\n    match exhaust None pss (List.length ps) with\n    | Rnone -> Total\n    | Rsome [u] -> (\n      let v =\n        match pred with\n        | Some pred ->\n          let pattern, constrs, labels = Conv.conv u in\n          let u' = pred constrs labels pattern in\n          (* pretty_pat u;\n             begin match u' with\n               None -> prerr_endline \": impossible\"\n             | Some _ -> prerr_endline \": possible\"\n             end; *)\n          u'\n        | None -> Some u\n      in\n      match v with\n      | None -> Total\n      | Some v ->\n        (if Warnings.is_active (Warnings.Partial_match \"\") then\n           let errmsg =\n             try\n               let buf = Buffer.create 16 in\n               Buffer.add_string buf \"| \";\n               Buffer.add_string buf (!print_res_pat v);\n               (match check_partial_all v casel with\n               | None -> ()\n               | Some _ ->\n                 (* This is 'Some loc', where loc is the location of\n                    a possibly matching clause.\n                    Forget about loc, because printing two locations\n                    is a pain in the top-level *)\n                 Buffer.add_string buf\n                   \"\\n(However, some guarded clause may match this value.)\");\n               if contains_extension v then\n                 Buffer.add_string buf\n                   \"\\n\\\n                    Matching over values of extensible variant types (the \\\n                    *extension* above)\\n\\\n                    must include a wild card pattern in order to be exhaustive.\";\n               Buffer.contents buf\n             with _ -> \"\"\n           in\n           Location.prerr_warning loc (Warnings.Partial_match errmsg));\n        Partial)\n    | _ -> fatal_error \"Parmatch.check_partial\")\n\n(*\nlet do_check_partial_normal loc casel pss =\n  do_check_partial exhaust loc casel pss\n *)\n\nlet do_check_partial_gadt pred loc casel pss =\n  do_check_partial ~pred exhaust_gadt loc casel pss\n\n(*****************)\n(* Fragile check *)\n(*****************)\n\n(* Collect all data types in a pattern *)\n\nlet rec add_path path = function\n  | [] -> [path]\n  | x :: rem as paths ->\n    if Path.same path x then paths else x :: add_path path rem\n\nlet extendable_path path =\n  not\n    (Path.same path Predef.path_bool\n    || Path.same path Predef.path_list\n    || Path.same path Predef.path_unit\n    || Path.same path Predef.path_option)\n\nlet rec collect_paths_from_pat r p =\n  match p.pat_desc with\n  | Tpat_construct\n      (_, {cstr_tag = Cstr_constant _ | Cstr_block _ | Cstr_unboxed}, ps) ->\n    let path = get_type_path p.pat_type p.pat_env in\n    List.fold_left collect_paths_from_pat\n      (if extendable_path path then add_path path r else r)\n      ps\n  | Tpat_any | Tpat_var _ | Tpat_constant _ | Tpat_variant (_, None, _) -> r\n  | Tpat_tuple ps\n  | Tpat_array ps\n  | Tpat_construct (_, {cstr_tag = Cstr_extension _}, ps) ->\n    List.fold_left collect_paths_from_pat r ps\n  | Tpat_record (lps, _) ->\n    List.fold_left (fun r (_, _, p) -> collect_paths_from_pat r p) r lps\n  | Tpat_variant (_, Some p, _) | Tpat_alias (p, _, _) ->\n    collect_paths_from_pat r p\n  | Tpat_or (p1, p2, _) ->\n    collect_paths_from_pat (collect_paths_from_pat r p1) p2\n  | Tpat_lazy p -> collect_paths_from_pat r p\n\n(*\n  Actual fragile check\n   1. Collect data types in the patterns of the match.\n   2. One exhaustivity check per datatype, considering that\n      the type is extended.\n*)\n\nlet do_check_fragile_param exhaust loc casel pss =\n  let exts =\n    List.fold_left (fun r c -> collect_paths_from_pat r c.c_lhs) [] casel\n  in\n  match exts with\n  | [] -> ()\n  | _ -> (\n    match pss with\n    | [] -> ()\n    | ps :: _ ->\n      List.iter\n        (fun ext ->\n          match exhaust (Some ext) pss (List.length ps) with\n          | Rnone ->\n            Location.prerr_warning loc (Warnings.Fragile_match (Path.name ext))\n          | Rsome _ -> ())\n        exts)\n\n(*let do_check_fragile_normal = do_check_fragile_param exhaust*)\nlet do_check_fragile_gadt = do_check_fragile_param exhaust_gadt\n\n(********************************)\n(* Exported unused clause check *)\n(********************************)\n\nlet check_unused pred casel =\n  if\n    Warnings.is_active Warnings.Unused_match\n    || List.exists (fun c -> c.c_rhs.exp_desc = Texp_unreachable) casel\n  then\n    let rec do_rec pref = function\n      | [] -> ()\n      | {c_lhs = q; c_guard; c_rhs} :: rem ->\n        let qs = [q] in\n        (try\n           let pss = get_mins le_pats (Ext_list.filter pref (compats qs)) in\n           (* First look for redundant or partially redundant patterns *)\n           let r = every_satisfiables (make_rows pss) (make_row qs) in\n           let refute = c_rhs.exp_desc = Texp_unreachable in\n           (* Do not warn for unused [pat -> .] *)\n           if r = Unused && refute then ()\n           else\n             let r =\n               (* Do not refine if there are no other lines *)\n               let skip =\n                 r = Unused\n                 || ((not refute) && pref = [])\n                 || not (refute || Warnings.is_active Warnings.Unreachable_case)\n               in\n               if skip then r\n               else\n                 (* Then look for empty patterns *)\n                 let sfs = satisfiables pss qs in\n                 if sfs = [] then Unused\n                 else\n                   let sfs =\n                     List.map\n                       (function\n                         | [u] -> u\n                         | _ -> assert false)\n                       sfs\n                   in\n                   let u = orify_many sfs in\n                   (*Format.eprintf \"%a@.\" pretty_val u;*)\n                   let pattern, constrs, labels = Conv.conv u in\n                   let pattern =\n                     {pattern with Parsetree.ppat_loc = q.pat_loc}\n                   in\n                   match pred refute constrs labels pattern with\n                   | None when not refute ->\n                     Location.prerr_warning q.pat_loc Warnings.Unreachable_case;\n                     Used\n                   | _ -> r\n             in\n             match r with\n             | Unused -> Location.prerr_warning q.pat_loc Warnings.Unused_match\n             | Upartial ps ->\n               List.iter\n                 (fun p -> Location.prerr_warning p.pat_loc Warnings.Unused_pat)\n                 ps\n             | Used -> ()\n         with Empty | Not_found | NoGuard -> assert false);\n\n        if c_guard <> None then do_rec pref rem else do_rec ([q] :: pref) rem\n    in\n\n    do_rec [] casel\n\n(*********************************)\n(* Exported irrefutability tests *)\n(*********************************)\n\nlet irrefutable pat = le_pat pat omega\n\nlet inactive ~partial pat =\n  match partial with\n  | Partial -> false\n  | Total ->\n    let rec loop pat =\n      match pat.pat_desc with\n      | Tpat_lazy _ | Tpat_array _ -> false\n      | Tpat_any | Tpat_var _ | Tpat_variant (_, None, _) -> true\n      | Tpat_constant c -> (\n        match c with\n        | Const_string _ -> true (*Config.safe_string*)\n        | Const_int _ | Const_char _ | Const_float _ | Const_int32 _\n        | Const_int64 _ | Const_bigint _ ->\n          true)\n      | Tpat_tuple ps | Tpat_construct (_, _, ps) ->\n        List.for_all (fun p -> loop p) ps\n      | Tpat_alias (p, _, _) | Tpat_variant (_, Some p, _) -> loop p\n      | Tpat_record (ldps, _) ->\n        List.for_all (fun (_, lbl, p) -> lbl.lbl_mut = Immutable && loop p) ldps\n      | Tpat_or (p, q, _) -> loop p && loop q\n    in\n    loop pat\n\n(*********************************)\n(* Exported exhaustiveness check *)\n(*********************************)\n\n(*\n   Fragile check is performed when required and\n   on exhaustive matches only.\n*)\n\nlet check_partial_param do_check_partial do_check_fragile loc casel =\n  let pss = initial_matrix casel in\n  let pss = get_mins le_pats pss in\n  let total = do_check_partial loc casel pss in\n  if total = Total && Warnings.is_active (Warnings.Fragile_match \"\") then\n    do_check_fragile loc casel pss;\n  total\n\n(*let check_partial =\n    check_partial_param\n      do_check_partial_normal\n      do_check_fragile_normal*)\n\nlet check_partial_gadt pred loc casel =\n  check_partial_param\n    (do_check_partial_gadt pred)\n    do_check_fragile_gadt loc casel\n\n(*************************************)\n(* Ambiguous variable in or-patterns *)\n(*************************************)\n\n(* Specification: ambiguous variables in or-patterns.\n\n   The semantics of or-patterns in OCaml is specified with\n   a left-to-right bias: a value [v] matches the pattern [p | q] if it\n   matches [p] or [q], but if it matches both, the environment\n   captured by the match is the environment captured by [p], never the\n   one captured by [q].\n\n   While this property is generally well-understood, one specific case\n   where users expect a different semantics is when a pattern is\n   followed by a when-guard: [| p when g -> e]. Consider for example:\n\n     | ((Const x, _) | (_, Const x)) when is_neutral x -> branch\n\n   The semantics is clear: match the scrutinee against the pattern, if\n   it matches, test the guard, and if the guard passes, take the\n   branch.\n\n   However, consider the input [(Const a, Const b)], where [a] fails\n   the test [is_neutral f], while [b] passes the test [is_neutral\n   b]. With the left-to-right semantics, the clause above is *not*\n   taken by its input: matching [(Const a, Const b)] against the\n   or-pattern succeeds in the left branch, it returns the environment\n   [x -> a], and then the guard [is_neutral a] is tested and fails,\n   the branch is not taken. Most users, however, intuitively expect\n   that any pair that has one side passing the test will take the\n   branch. They assume it is equivalent to the following:\n\n     | (Const x, _) when is_neutral x -> branch\n     | (_, Const x) when is_neutral x -> branch\n\n   while it is not.\n\n   The code below is dedicated to finding these confusing cases: the\n   cases where a guard uses \"ambiguous\" variables, that are bound to\n   different parts of the scrutinees by different sides of\n   a or-pattern. In other words, it finds the cases where the\n   specified left-to-right semantics is not equivalent to\n   a non-deterministic semantics (any branch can be taken) relatively\n   to a specific guard.\n*)\n\nmodule IdSet = Set.Make (Ident)\n\nlet pattern_vars p = IdSet.of_list (Typedtree.pat_bound_idents p)\n\n(* Row for ambiguous variable search,\n   unseen is the traditional pattern row,\n   seen   is a list of position bindings *)\n\ntype amb_row = {unseen: pattern list; seen: IdSet.t list}\n\n(* Push binding variables now *)\n\nlet rec do_push r p ps seen k =\n  match p.pat_desc with\n  | Tpat_alias (p, x, _) -> do_push (IdSet.add x r) p ps seen k\n  | Tpat_var (x, _) -> (omega, {unseen = ps; seen = IdSet.add x r :: seen}) :: k\n  | Tpat_or (p1, p2, _) -> do_push r p1 ps seen (do_push r p2 ps seen k)\n  | _ -> (p, {unseen = ps; seen = r :: seen}) :: k\n\nlet rec push_vars = function\n  | [] -> []\n  | {unseen = []} :: _ -> assert false\n  | {unseen = p :: ps; seen} :: rem ->\n    do_push IdSet.empty p ps seen (push_vars rem)\n\nlet collect_stable = function\n  | [] -> assert false\n  | {seen = xss; _} :: rem ->\n    let rec c_rec xss = function\n      | [] -> xss\n      | {seen = yss; _} :: rem ->\n        let xss = List.map2 IdSet.inter xss yss in\n        c_rec xss rem\n    in\n    let inters = c_rec xss rem in\n    List.fold_left IdSet.union IdSet.empty inters\n\n(*********************************************)\n(* Filtering utilities for our specific rows *)\n(*********************************************)\n\n(* Take a pattern matrix as a list (rows) of lists (columns) of patterns\n     | p1, p2, .., pn\n     | q1, q2, .., qn\n     | r1, r2, .., rn\n     | ...\n\n   We split this matrix into a list of sub-matrices, one for each head\n   constructor appearing in the leftmost column. For each row whose\n   left column starts with a head constructor, remove this head\n   column, prepend one column for each argument of the constructor,\n   and add the resulting row in the sub-matrix corresponding to this\n   head constructor.\n\n   Rows whose left column is omega (the Any pattern _) may match any\n   head constructor, so they are added to all groups.\n\n   The list of sub-matrices is represented as a list of pair\n     (head constructor, submatrix)\n*)\n\nlet filter_all =\n  (* the head constructor (as a pattern with omega arguments) of\n     a pattern *)\n  let discr_head pat =\n    match pat.pat_desc with\n    | Tpat_record (lbls, closed) ->\n      (* a partial record pattern { f1 = p1; f2 = p2; _ }\n         needs to be expanded, otherwise matching against this head\n         would drop the pattern arguments for non-mentioned fields *)\n      let lbls = all_record_args lbls in\n      normalize_pat {pat with pat_desc = Tpat_record (lbls, closed)}\n    | _ -> normalize_pat pat\n  in\n\n  (* insert a row of head [p] and rest [r] into the right group *)\n  let rec insert p r env =\n    match env with\n    | [] ->\n      (* if no group matched this row, it has a head constructor that\n         was never seen before; add a new sub-matrix for this head *)\n      let p0 = discr_head p in\n      [(p0, [{r with unseen = simple_match_args p0 p @ r.unseen}])]\n    | ((q0, rs) as bd) :: env ->\n      if simple_match q0 p then\n        let r = {r with unseen = simple_match_args q0 p @ r.unseen} in\n        (q0, r :: rs) :: env\n      else bd :: insert p r env\n  in\n\n  (* insert a row of head omega into all groups *)\n  let insert_omega r env =\n    List.map\n      (fun (q0, rs) ->\n        let r = {r with unseen = simple_match_args q0 omega @ r.unseen} in\n        (q0, r :: rs))\n      env\n  in\n\n  let rec filter_rec env = function\n    | [] -> env\n    | ({pat_desc = Tpat_var _ | Tpat_alias _ | Tpat_or _}, _) :: _ ->\n      assert false\n    | ({pat_desc = Tpat_any}, _) :: rs -> filter_rec env rs\n    | (p, r) :: rs -> filter_rec (insert p r env) rs\n  in\n\n  let rec filter_omega env = function\n    | [] -> env\n    | ({pat_desc = Tpat_var _ | Tpat_alias _ | Tpat_or _}, _) :: _ ->\n      assert false\n    | ({pat_desc = Tpat_any}, r) :: rs -> filter_omega (insert_omega r env) rs\n    | _ :: rs -> filter_omega env rs\n  in\n\n  fun rs ->\n    (* first insert the rows with head constructors,\n       to get the definitive list of groups *)\n    let env = filter_rec [] rs in\n    (* then add the omega rows to all groups *)\n    filter_omega env rs\n\n(* Compute stable bindings *)\n\nlet rec do_stable rs =\n  match rs with\n  | [] -> assert false (* No empty matrix *)\n  | {unseen = []; _} :: _ -> collect_stable rs\n  | _ -> (\n    let rs = push_vars rs in\n    if not (all_coherent (first_column rs)) then\n      (* If the first column is incoherent, then all the variables of this\n         matrix are stable. *)\n      List.fold_left\n        (fun acc (_, {seen; _}) -> List.fold_left IdSet.union acc seen)\n        IdSet.empty rs\n    else\n      (* If the column is ill-typed but deemed coherent, we might spuriously\n         warn about some variables being unstable.\n         As sad as that might be, the warning can be silenced by splitting the\n         or-pattern... *)\n      match filter_all rs with\n      | [] -> do_stable (List.map snd rs)\n      | (_, rs) :: env ->\n        List.fold_left\n          (fun xs (_, rs) -> IdSet.inter xs (do_stable rs))\n          (do_stable rs) env)\n\nlet stable p = do_stable [{unseen = [p]; seen = []}]\n\n(* All identifier paths that appear in an expression that occurs\n    as a clause right hand side or guard.\n\n   The function is rather complex due to the compilation of\n   unpack patterns by introducing code in rhs expressions\n   and **guards**.\n\n   For pattern (module M:S)  -> e the code is\n   let module M_mod = unpack M .. in e\n\n   Hence M is \"free\" in e iff M_mod is free in e.\n\n   Not doing so will yield excessive  warning in\n   (module (M:S) } ...) when true -> ....\n   as M is always present in\n   let module M_mod = unpack M .. in true\n*)\n\nlet all_rhs_idents exp =\n  let ids = ref IdSet.empty in\n  let module Iterator = TypedtreeIter.MakeIterator (struct\n    include TypedtreeIter.DefaultIteratorArgument\n    let enter_expression exp =\n      match exp.exp_desc with\n      | Texp_ident (path, _lid, _descr) ->\n        List.iter (fun id -> ids := IdSet.add id !ids) (Path.heads path)\n      | _ -> ()\n\n    (* Very hackish, detect unpack pattern  compilation\n       and perform \"indirect check for them\" *)\n    let is_unpack exp =\n      List.exists (fun (attr, _) -> attr.txt = \"#modulepat\") exp.exp_attributes\n\n    let leave_expression exp =\n      if is_unpack exp then\n        match exp.exp_desc with\n        | Texp_letmodule\n            ( id_mod,\n              _,\n              {\n                mod_desc =\n                  Tmod_unpack\n                    ({exp_desc = Texp_ident (Path.Pident id_exp, _, _)}, _);\n              },\n              _ ) ->\n          assert (IdSet.mem id_exp !ids);\n          if not (IdSet.mem id_mod !ids) then ids := IdSet.remove id_exp !ids\n        | _ -> assert false\n  end) in\n  Iterator.iter_expression exp;\n  !ids\n\nlet check_ambiguous_bindings =\n  let open Warnings in\n  let warn0 = Ambiguous_pattern [] in\n  fun cases ->\n    if is_active warn0 then\n      List.iter\n        (fun case ->\n          match case with\n          | {c_guard = None; _} -> ()\n          | {c_lhs = p; c_guard = Some g; _} ->\n            let all = IdSet.inter (pattern_vars p) (all_rhs_idents g) in\n            if not (IdSet.is_empty all) then\n              let st = stable p in\n              let ambiguous = IdSet.diff all st in\n              if not (IdSet.is_empty ambiguous) then\n                let pps = IdSet.elements ambiguous |> List.map Ident.name in\n                let warn = Ambiguous_pattern pps in\n                Location.prerr_warning p.pat_loc warn)\n        cases\n"
  },
  {
    "path": "analysis/vendor/ml/parmatch.mli",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(* Detection of partial matches and unused match cases. *)\nopen Asttypes\nopen Typedtree\nopen Types\n\nval pretty_const : constant -> string\nval top_pretty : Format.formatter -> pattern -> unit\nval pretty_pat : pattern -> unit\nval pretty_line : pattern list -> unit\nval pretty_matrix : pattern list list -> unit\n\nval print_res_pat : (Typedtree.pattern -> string) ref\n\nval omega : pattern\nval omegas : int -> pattern list\nval omega_list : 'a list -> pattern list\nval normalize_pat : pattern -> pattern\nval all_record_args :\n  (Longident.t loc * label_description * pattern) list ->\n  (Longident.t loc * label_description * pattern) list\nval const_compare : constant -> constant -> int\n\nval le_pat : pattern -> pattern -> bool\nval le_pats : pattern list -> pattern list -> bool\n\n(* Exported compatibility functor, abstracted over constructor equality *)\nmodule Compat : functor\n  (Constr : sig\n     val equal :\n       Types.constructor_description -> Types.constructor_description -> bool\n   end)\n  -> sig\n  val compat : pattern -> pattern -> bool\n  val compats : pattern list -> pattern list -> bool\nend\n[@@warning \"-67\"]\n\nexception Empty\nval lub : pattern -> pattern -> pattern\nval lubs : pattern list -> pattern list -> pattern list\n\nval get_mins : ('a -> 'a -> bool) -> 'a list -> 'a list\n\n(* Those two functions recombine one pattern and its arguments:\n   For instance:\n     (_,_)::p1::p2::rem -> (p1, p2)::rem\n   The second one will replace mutable arguments by '_'\n*)\nval set_args : pattern -> pattern list -> pattern list\nval set_args_erase_mutable : pattern -> pattern list -> pattern list\n\nval pat_of_constr : pattern -> constructor_description -> pattern\nval complete_constrs :\n  pattern -> constructor_tag list -> constructor_description list\nval ppat_of_type :\n  Env.t ->\n  type_expr ->\n  Parsetree.pattern\n  * (string, constructor_description) Hashtbl.t\n  * (string, label_description) Hashtbl.t\n\nval pressure_variants : Env.t -> pattern list -> unit\nval check_partial_gadt :\n  ((string, constructor_description) Hashtbl.t ->\n  (string, label_description) Hashtbl.t ->\n  Parsetree.pattern ->\n  pattern option) ->\n  Location.t ->\n  case list ->\n  partial\nval check_unused :\n  (bool ->\n  (string, constructor_description) Hashtbl.t ->\n  (string, label_description) Hashtbl.t ->\n  Parsetree.pattern ->\n  pattern option) ->\n  case list ->\n  unit\n\n(* Irrefutability tests *)\nval irrefutable : pattern -> bool\n\nval inactive : partial:partial -> pattern -> bool\n(** An inactive pattern is a pattern, matching against which can be duplicated,\n    erased or delayed without change in observable behavior of the program.\n    Patterns containing (lazy _) subpatterns or reads of mutable fields are\n    active. *)\n\n(* Ambiguous bindings *)\nval check_ambiguous_bindings : case list -> unit\n\n(* The tag used for open polymorphic variant types *)\nval some_other_tag : label\n"
  },
  {
    "path": "analysis/vendor/ml/parse.ml",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(* Entry points in the parser *)\n\nlet wrap parsing_fun lexbuf =\n  try\n    Docstrings.init ();\n    Lexer.init ();\n    let ast = parsing_fun Lexer.token lexbuf in\n    Parsing.clear_parser ();\n    Docstrings.warn_bad_docstrings ();\n    ast\n  with Parsing.Parse_error | Syntaxerr.Escape_error ->\n    let loc = Location.curr lexbuf in\n    raise (Syntaxerr.Error (Syntaxerr.Other loc))\n\nlet implementation = wrap Parser.implementation\n\nand interface = wrap Parser.interface\n\nand core_type = wrap Parser.parse_core_type\n\nand expression = wrap Parser.parse_expression\n\nand pattern = wrap Parser.parse_pattern\n"
  },
  {
    "path": "analysis/vendor/ml/parse.mli",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(** Entry points in the parser *)\n\nval implementation : Lexing.lexbuf -> Parsetree.structure\nval interface : Lexing.lexbuf -> Parsetree.signature\nval core_type : Lexing.lexbuf -> Parsetree.core_type\nval expression : Lexing.lexbuf -> Parsetree.expression\nval pattern : Lexing.lexbuf -> Parsetree.pattern\n"
  },
  {
    "path": "analysis/vendor/ml/parser.ml",
    "content": "type token =\n  | AMPERAMPER\n  | AMPERSAND\n  | AND\n  | AS\n  | ASSERT\n  | BACKQUOTE\n  | BANG\n  | BAR\n  | BARBAR\n  | BARRBRACKET\n  | BEGIN\n  | CHAR of (char)\n  | CLASS\n  | COLON\n  | COLONCOLON\n  | COLONEQUAL\n  | COLONGREATER\n  | COMMA\n  | CONSTRAINT\n  | DO\n  | DONE\n  | DOT\n  | DOTDOT\n  | DOWNTO\n  | ELSE\n  | END\n  | EOF\n  | EQUAL\n  | EXCEPTION\n  | EXTERNAL\n  | FALSE\n  | FLOAT of (string * char option)\n  | FOR\n  | FUN\n  | FUNCTION\n  | FUNCTOR\n  | GREATER\n  | GREATERRBRACE\n  | GREATERRBRACKET\n  | IF\n  | IN\n  | INCLUDE\n  | INFIXOP0 of (string)\n  | INFIXOP1 of (string)\n  | INFIXOP2 of (string)\n  | INFIXOP3 of (string)\n  | INFIXOP4 of (string)\n  | DOTOP of (string)\n  | INHERIT\n  | INITIALIZER\n  | INT of (string * char option)\n  | LABEL of (string)\n  | LAZY\n  | LBRACE\n  | LBRACELESS\n  | LBRACKET\n  | LBRACKETBAR\n  | LBRACKETLESS\n  | LBRACKETGREATER\n  | LBRACKETPERCENT\n  | LBRACKETPERCENTPERCENT\n  | LESS\n  | LESSMINUS\n  | LET\n  | LIDENT of (string)\n  | LPAREN\n  | LBRACKETAT\n  | LBRACKETATAT\n  | LBRACKETATATAT\n  | MATCH\n  | METHOD\n  | MINUS\n  | MINUSDOT\n  | MINUSGREATER\n  | MODULE\n  | MUTABLE\n  | NEW\n  | NONREC\n  | OBJECT\n  | OF\n  | OPEN\n  | OPTLABEL of (string)\n  | OR\n  | PERCENT\n  | PLUS\n  | PLUSDOT\n  | PLUSEQ\n  | PREFIXOP of (string)\n  | PRIVATE\n  | QUESTION\n  | QUOTE\n  | RBRACE\n  | RBRACKET\n  | REC\n  | RPAREN\n  | SEMI\n  | SEMISEMI\n  | HASH\n  | HASHOP of (string)\n  | SIG\n  | STAR\n  | STRING of (string * string option)\n  | STRUCT\n  | THEN\n  | TILDE\n  | TO\n  | TRUE\n  | TRY\n  | TYPE\n  | UIDENT of (string)\n  | UNDERSCORE\n  | VAL\n  | VIRTUAL\n  | WHEN\n  | WHILE\n  | WITH\n  | COMMENT of (string * Location.t)\n  | DOCSTRING of (Docstrings.docstring)\n  | EOL\n\nopen Parsing;;\nlet _ = parse_error;;\n# 19 \"ml/parser.mly\"\nopen Location\nopen Asttypes\nopen Longident\nopen Parsetree\nopen Ast_helper\nopen Docstrings\n\nlet mktyp d = Typ.mk ~loc:(symbol_rloc()) d\nlet mkpat d = Pat.mk ~loc:(symbol_rloc()) d\nlet mkexp d = Exp.mk ~loc:(symbol_rloc()) d\nlet mkmty ?attrs d = Mty.mk ~loc:(symbol_rloc()) ?attrs d\nlet mksig d = Sig.mk ~loc:(symbol_rloc()) d\nlet mkmod ?attrs d = Mod.mk ~loc:(symbol_rloc()) ?attrs d\nlet mkstr d = Str.mk ~loc:(symbol_rloc()) d\nlet mkcty ?attrs d = Cty.mk ~loc:(symbol_rloc()) ?attrs d\nlet mkctf ?attrs ?docs d =\n  Ctf.mk ~loc:(symbol_rloc()) ?attrs ?docs d\nlet mkcf ?attrs ?docs d =\n  Cf.mk ~loc:(symbol_rloc()) ?attrs ?docs d\n\nlet mkrhs rhs pos = mkloc rhs (rhs_loc pos)\n\nlet reloc_pat x = { x with ppat_loc = symbol_rloc () };;\nlet reloc_exp x = { x with pexp_loc = symbol_rloc () };;\n\nlet mkoperator name pos =\n  let loc = rhs_loc pos in\n  Exp.mk ~loc (Pexp_ident(mkloc (Lident name) loc))\n\nlet mkpatvar name pos =\n  Pat.mk ~loc:(rhs_loc pos) (Ppat_var (mkrhs name pos))\n\n(*\n  Ghost expressions and patterns:\n  expressions and patterns that do not appear explicitly in the\n  source file they have the loc_ghost flag set to true.\n  Then the profiler will not try to instrument them and the\n  -annot option will not try to display their type.\n\n  Every grammar rule that generates an element with a location must\n  make at most one non-ghost element, the topmost one.\n\n  How to tell whether your location must be ghost:\n  A location corresponds to a range of characters in the source file.\n  If the location contains a piece of code that is syntactically\n  valid (according to the documentation), and corresponds to the\n  AST node, then the location must be real; in all other cases,\n  it must be ghost.\n*)\nlet ghexp d = Exp.mk ~loc:(symbol_gloc ()) d\nlet ghpat d = Pat.mk ~loc:(symbol_gloc ()) d\nlet ghtyp d = Typ.mk ~loc:(symbol_gloc ()) d\nlet ghloc d = { txt = d; loc = symbol_gloc () }\nlet ghstr d = Str.mk ~loc:(symbol_gloc()) d\nlet ghsig d = Sig.mk ~loc:(symbol_gloc()) d\n\nlet mkinfix arg1 name arg2 =\n  mkexp(Pexp_apply(mkoperator name 2, [Nolabel, arg1; Nolabel, arg2]))\n\nlet neg_string f =\n  if String.length f > 0 && f.[0] = '-'\n  then String.sub f 1 (String.length f - 1)\n  else \"-\" ^ f\n\nlet mkuminus name arg =\n  match name, arg.pexp_desc with\n  | \"-\", Pexp_constant(Pconst_integer (n,m)) ->\n      mkexp(Pexp_constant(Pconst_integer(neg_string n,m)))\n  | (\"-\" | \"-.\"), Pexp_constant(Pconst_float (f, m)) ->\n      mkexp(Pexp_constant(Pconst_float(neg_string f, m)))\n  | _ ->\n      mkexp(Pexp_apply(mkoperator (\"~\" ^ name) 1, [Nolabel, arg]))\n\nlet mkuplus name arg =\n  let desc = arg.pexp_desc in\n  match name, desc with\n  | \"+\", Pexp_constant(Pconst_integer _)\n  | (\"+\" | \"+.\"), Pexp_constant(Pconst_float _) -> mkexp desc\n  | _ ->\n      mkexp(Pexp_apply(mkoperator (\"~\" ^ name) 1, [Nolabel, arg]))\n\nlet mkexp_cons consloc args loc =\n  Exp.mk ~loc (Pexp_construct(mkloc (Lident \"::\") consloc, Some args))\n\nlet mkpat_cons consloc args loc =\n  Pat.mk ~loc (Ppat_construct(mkloc (Lident \"::\") consloc, Some args))\n\nlet rec mktailexp nilloc = function\n    [] ->\n      let loc = { nilloc with loc_ghost = true } in\n      let nil = { txt = Lident \"[]\"; loc = loc } in\n      Exp.mk ~loc (Pexp_construct (nil, None))\n  | e1 :: el ->\n      let exp_el = mktailexp nilloc el in\n      let loc = {loc_start = e1.pexp_loc.loc_start;\n               loc_end = exp_el.pexp_loc.loc_end;\n               loc_ghost = true}\n      in\n      let arg = Exp.mk ~loc (Pexp_tuple [e1; exp_el]) in\n      mkexp_cons {loc with loc_ghost = true} arg loc\n\nlet rec mktailpat nilloc = function\n    [] ->\n      let loc = { nilloc with loc_ghost = true } in\n      let nil = { txt = Lident \"[]\"; loc = loc } in\n      Pat.mk ~loc (Ppat_construct (nil, None))\n  | p1 :: pl ->\n      let pat_pl = mktailpat nilloc pl in\n      let loc = {loc_start = p1.ppat_loc.loc_start;\n               loc_end = pat_pl.ppat_loc.loc_end;\n               loc_ghost = true}\n      in\n      let arg = Pat.mk ~loc (Ppat_tuple [p1; pat_pl]) in\n      mkpat_cons {loc with loc_ghost = true} arg loc\n\nlet mkstrexp e attrs =\n  { pstr_desc = Pstr_eval (e, attrs); pstr_loc = e.pexp_loc }\n\nlet mkexp_constraint e (t1, t2) =\n  match t1, t2 with\n  | Some t, None -> ghexp(Pexp_constraint(e, t))\n  | _, Some t -> ghexp(Pexp_coerce(e, t1, t))\n  | None, None -> assert false\n\nlet mkexp_opt_constraint e = function\n  | None -> e\n  | Some constraint_ -> mkexp_constraint e constraint_\n\nlet mkpat_opt_constraint p = function\n  | None -> p\n  | Some typ -> mkpat (Ppat_constraint(p, typ))\n\nlet array_function str name =\n  ghloc (Ldot(Lident str, (if !Clflags.fast then \"unsafe_\" ^ name else name)))\n\nlet syntax_error () =\n  raise Syntaxerr.Escape_error\n\nlet unclosed opening_name opening_num closing_name closing_num =\n  raise(Syntaxerr.Error(Syntaxerr.Unclosed(rhs_loc opening_num, opening_name,\n                                           rhs_loc closing_num, closing_name)))\n\nlet expecting pos nonterm =\n    raise Syntaxerr.(Error(Expecting(rhs_loc pos, nonterm)))\n\nlet not_expecting pos nonterm =\n    raise Syntaxerr.(Error(Not_expecting(rhs_loc pos, nonterm)))\n\n\nlet lapply p1 p2 =\n  if !Clflags.applicative_functors\n  then Lapply(p1, p2)\n  else raise (Syntaxerr.Error(Syntaxerr.Applicative_path (symbol_rloc())))\n\nlet exp_of_label lbl pos =\n  mkexp (Pexp_ident(mkrhs (Lident(Longident.last lbl)) pos))\n\nlet pat_of_label lbl pos =\n  mkpat (Ppat_var (mkrhs (Longident.last lbl) pos))\n\nlet mk_newtypes newtypes exp =\n  List.fold_right (fun newtype exp -> mkexp (Pexp_newtype (newtype, exp)))\n    newtypes exp\n\nlet wrap_type_annotation newtypes core_type body =\n  let exp = mkexp(Pexp_constraint(body,core_type)) in\n  let exp = mk_newtypes newtypes exp in\n  (exp, ghtyp(Ptyp_poly(newtypes, Typ.varify_constructors newtypes core_type)))\n\nlet wrap_exp_attrs body (ext, attrs) =\n  (* todo: keep exact location for the entire attribute *)\n  let body = {body with pexp_attributes = attrs @ body.pexp_attributes} in\n  match ext with\n  | None -> body\n  | Some id -> ghexp(Pexp_extension (id, PStr [mkstrexp body []]))\n\nlet mkexp_attrs d attrs =\n  wrap_exp_attrs (mkexp d) attrs\n\nlet wrap_typ_attrs typ (ext, attrs) =\n  (* todo: keep exact location for the entire attribute *)\n  let typ = {typ with ptyp_attributes = attrs @ typ.ptyp_attributes} in\n  match ext with\n  | None -> typ\n  | Some id -> ghtyp(Ptyp_extension (id, PTyp typ))\n\nlet mktyp_attrs d attrs =\n  wrap_typ_attrs (mktyp d) attrs\n\nlet wrap_pat_attrs pat (ext, attrs) =\n  (* todo: keep exact location for the entire attribute *)\n  let pat = {pat with ppat_attributes = attrs @ pat.ppat_attributes} in\n  match ext with\n  | None -> pat\n  | Some id -> ghpat(Ppat_extension (id, PPat (pat, None)))\n\nlet mkpat_attrs d attrs =\n  wrap_pat_attrs (mkpat d) attrs\n\nlet wrap_class_type_attrs body attrs =\n  {body with pcty_attributes = attrs @ body.pcty_attributes}\nlet wrap_mod_attrs body attrs =\n  {body with pmod_attributes = attrs @ body.pmod_attributes}\nlet wrap_mty_attrs body attrs =\n  {body with pmty_attributes = attrs @ body.pmty_attributes}\n\nlet wrap_str_ext body ext =\n  match ext with\n  | None -> body\n  | Some id -> ghstr(Pstr_extension ((id, PStr [body]), []))\n\nlet mkstr_ext d ext =\n  wrap_str_ext (mkstr d) ext\n\nlet wrap_sig_ext body ext =\n  match ext with\n  | None -> body\n  | Some id -> ghsig(Psig_extension ((id, PSig [body]), []))\n\nlet mksig_ext d ext =\n  wrap_sig_ext (mksig d) ext\n\nlet text_str pos = Str.text (rhs_text pos)\nlet text_sig pos = Sig.text (rhs_text pos)\nlet text_cstr pos = Cf.text (rhs_text pos)\nlet text_csig pos = Ctf.text (rhs_text pos)\n\n\nlet extra_text text pos items =\n  let pre_extras = rhs_pre_extra_text pos in\n  let post_extras = rhs_post_extra_text pos in\n    text pre_extras @ items @ text post_extras\n\nlet extra_str pos items = extra_text Str.text pos items\nlet extra_sig pos items = extra_text Sig.text pos items\nlet extra_cstr pos items = extra_text Cf.text pos items\nlet extra_csig pos items = extra_text Ctf.text pos items\n\nlet extra_rhs_core_type ct ~pos =\n  let docs = rhs_info pos in\n  { ct with ptyp_attributes = add_info_attrs docs ct.ptyp_attributes }\n\ntype let_binding =\n  { lb_pattern: pattern;\n    lb_expression: expression;\n    lb_attributes: attributes;\n    lb_docs: docs Lazy.t;\n    lb_text: text Lazy.t;\n    lb_loc: Location.t; }\n\ntype [@warning \"-69\"] let_bindings =\n  { lbs_bindings: let_binding list;\n    lbs_rec: rec_flag;\n    lbs_extension: string Asttypes.loc option;\n    lbs_loc: Location.t }\n\nlet mklb first (p, e) attrs =\n  { lb_pattern = p;\n    lb_expression = e;\n    lb_attributes = attrs;\n    lb_docs = symbol_docs_lazy ();\n    lb_text = if first then empty_text_lazy\n              else symbol_text_lazy ();\n    lb_loc = symbol_rloc (); }\n\nlet mklbs ext rf lb =\n  { lbs_bindings = [lb];\n    lbs_rec = rf;\n    lbs_extension = ext ;\n    lbs_loc = symbol_rloc (); }\n\nlet addlb lbs lb =\n  { lbs with lbs_bindings = lb :: lbs.lbs_bindings }\n\nlet val_of_let_bindings lbs =\n  let bindings =\n    List.map\n      (fun lb ->\n         Vb.mk ~loc:lb.lb_loc ~attrs:lb.lb_attributes\n           ~docs:(Lazy.force lb.lb_docs)\n           ~text:(Lazy.force lb.lb_text)\n           lb.lb_pattern lb.lb_expression)\n      lbs.lbs_bindings\n  in\n  let str = mkstr(Pstr_value(lbs.lbs_rec, List.rev bindings)) in\n  match lbs.lbs_extension with\n  | None -> str\n  | Some id -> ghstr (Pstr_extension((id, PStr [str]), []))\n\nlet expr_of_let_bindings lbs body =\n  let bindings =\n    List.map\n      (fun lb ->\n         Vb.mk ~loc:lb.lb_loc ~attrs:lb.lb_attributes\n           lb.lb_pattern lb.lb_expression)\n      lbs.lbs_bindings\n  in\n    mkexp_attrs (Pexp_let(lbs.lbs_rec, List.rev bindings, body))\n      (lbs.lbs_extension, [])\n\n\n\n(* Alternatively, we could keep the generic module type in the Parsetree\n   and extract the package type during type-checking. In that case,\n   the assertions below should be turned into explicit checks. *)\nlet package_type_of_module_type pmty =\n  let err loc s =\n    raise (Syntaxerr.Error (Syntaxerr.Invalid_package_type (loc, s)))\n  in\n  let map_cstr = function\n    | Pwith_type (lid, ptyp) ->\n        let loc = ptyp.ptype_loc in\n        if ptyp.ptype_params <> [] then\n          err loc \"parametrized types are not supported\";\n        if ptyp.ptype_cstrs <> [] then\n          err loc \"constrained types are not supported\";\n        if ptyp.ptype_private <> Public then\n          err loc \"private types are not supported\";\n\n        (* restrictions below are checked by the 'with_constraint' rule *)\n        assert (ptyp.ptype_kind = Ptype_abstract);\n        assert (ptyp.ptype_attributes = []);\n        let ty =\n          match ptyp.ptype_manifest with\n          | Some ty -> ty\n          | None -> assert false\n        in\n        (lid, ty)\n    | _ ->\n        err pmty.pmty_loc \"only 'with type t =' constraints are supported\"\n  in\n  match pmty with\n  | {pmty_desc = Pmty_ident lid} -> (lid, [])\n  | {pmty_desc = Pmty_with({pmty_desc = Pmty_ident lid}, cstrs)} ->\n      (lid, List.map map_cstr cstrs)\n  | _ ->\n      err pmty.pmty_loc\n        \"only module type identifier and 'with type' constraints are supported\"\n\n\n# 466 \"ml/parser.ml\"\nlet yytransl_const = [|\n  257 (* AMPERAMPER *);\n  258 (* AMPERSAND *);\n  259 (* AND *);\n  260 (* AS *);\n  261 (* ASSERT *);\n  262 (* BACKQUOTE *);\n  263 (* BANG *);\n  264 (* BAR *);\n  265 (* BARBAR *);\n  266 (* BARRBRACKET *);\n  267 (* BEGIN *);\n  269 (* CLASS *);\n  270 (* COLON *);\n  271 (* COLONCOLON *);\n  272 (* COLONEQUAL *);\n  273 (* COLONGREATER *);\n  274 (* COMMA *);\n  275 (* CONSTRAINT *);\n  276 (* DO *);\n  277 (* DONE *);\n  278 (* DOT *);\n  279 (* DOTDOT *);\n  280 (* DOWNTO *);\n  281 (* ELSE *);\n  282 (* END *);\n    0 (* EOF *);\n  283 (* EQUAL *);\n  284 (* EXCEPTION *);\n  285 (* EXTERNAL *);\n  286 (* FALSE *);\n  288 (* FOR *);\n  289 (* FUN *);\n  290 (* FUNCTION *);\n  291 (* FUNCTOR *);\n  292 (* GREATER *);\n  293 (* GREATERRBRACE *);\n  294 (* GREATERRBRACKET *);\n  295 (* IF *);\n  296 (* IN *);\n  297 (* INCLUDE *);\n  304 (* INHERIT *);\n  305 (* INITIALIZER *);\n  308 (* LAZY *);\n  309 (* LBRACE *);\n  310 (* LBRACELESS *);\n  311 (* LBRACKET *);\n  312 (* LBRACKETBAR *);\n  313 (* LBRACKETLESS *);\n  314 (* LBRACKETGREATER *);\n  315 (* LBRACKETPERCENT *);\n  316 (* LBRACKETPERCENTPERCENT *);\n  317 (* LESS *);\n  318 (* LESSMINUS *);\n  319 (* LET *);\n  321 (* LPAREN *);\n  322 (* LBRACKETAT *);\n  323 (* LBRACKETATAT *);\n  324 (* LBRACKETATATAT *);\n  325 (* MATCH *);\n  326 (* METHOD *);\n  327 (* MINUS *);\n  328 (* MINUSDOT *);\n  329 (* MINUSGREATER *);\n  330 (* MODULE *);\n  331 (* MUTABLE *);\n  332 (* NEW *);\n  333 (* NONREC *);\n  334 (* OBJECT *);\n  335 (* OF *);\n  336 (* OPEN *);\n  338 (* OR *);\n  339 (* PERCENT *);\n  340 (* PLUS *);\n  341 (* PLUSDOT *);\n  342 (* PLUSEQ *);\n  344 (* PRIVATE *);\n  345 (* QUESTION *);\n  346 (* QUOTE *);\n  347 (* RBRACE *);\n  348 (* RBRACKET *);\n  349 (* REC *);\n  350 (* RPAREN *);\n  351 (* SEMI *);\n  352 (* SEMISEMI *);\n  353 (* HASH *);\n  355 (* SIG *);\n  356 (* STAR *);\n  358 (* STRUCT *);\n  359 (* THEN *);\n  360 (* TILDE *);\n  361 (* TO *);\n  362 (* TRUE *);\n  363 (* TRY *);\n  364 (* TYPE *);\n  366 (* UNDERSCORE *);\n  367 (* VAL *);\n  368 (* VIRTUAL *);\n  369 (* WHEN *);\n  370 (* WHILE *);\n  371 (* WITH *);\n  374 (* EOL *);\n    0|]\n\nlet yytransl_block = [|\n  268 (* CHAR *);\n  287 (* FLOAT *);\n  298 (* INFIXOP0 *);\n  299 (* INFIXOP1 *);\n  300 (* INFIXOP2 *);\n  301 (* INFIXOP3 *);\n  302 (* INFIXOP4 *);\n  303 (* DOTOP *);\n  306 (* INT *);\n  307 (* LABEL *);\n  320 (* LIDENT *);\n  337 (* OPTLABEL *);\n  343 (* PREFIXOP *);\n  354 (* HASHOP *);\n  357 (* STRING *);\n  365 (* UIDENT *);\n  372 (* COMMENT *);\n  373 (* DOCSTRING *);\n    0|]\n\nlet yylhs = \"\\255\\255\\\n\\001\\000\\002\\000\\003\\000\\004\\000\\005\\000\\011\\000\\011\\000\\012\\000\\\n\\012\\000\\014\\000\\014\\000\\015\\000\\015\\000\\015\\000\\015\\000\\015\\000\\\n\\015\\000\\015\\000\\015\\000\\015\\000\\018\\000\\018\\000\\018\\000\\018\\000\\\n\\018\\000\\018\\000\\018\\000\\018\\000\\018\\000\\018\\000\\018\\000\\006\\000\\\n\\006\\000\\024\\000\\024\\000\\024\\000\\025\\000\\025\\000\\025\\000\\025\\000\\\n\\025\\000\\025\\000\\025\\000\\025\\000\\025\\000\\025\\000\\025\\000\\025\\000\\\n\\025\\000\\025\\000\\037\\000\\041\\000\\041\\000\\041\\000\\032\\000\\033\\000\\\n\\033\\000\\042\\000\\043\\000\\013\\000\\013\\000\\013\\000\\013\\000\\013\\000\\\n\\013\\000\\013\\000\\013\\000\\013\\000\\013\\000\\013\\000\\007\\000\\007\\000\\\n\\007\\000\\046\\000\\046\\000\\046\\000\\046\\000\\046\\000\\046\\000\\046\\000\\\n\\046\\000\\046\\000\\046\\000\\046\\000\\046\\000\\046\\000\\046\\000\\035\\000\\\n\\052\\000\\054\\000\\054\\000\\054\\000\\049\\000\\050\\000\\051\\000\\051\\000\\\n\\055\\000\\056\\000\\057\\000\\057\\000\\034\\000\\059\\000\\059\\000\\061\\000\\\n\\062\\000\\062\\000\\062\\000\\063\\000\\063\\000\\064\\000\\064\\000\\064\\000\\\n\\064\\000\\064\\000\\064\\000\\065\\000\\065\\000\\065\\000\\065\\000\\066\\000\\\n\\066\\000\\066\\000\\066\\000\\066\\000\\075\\000\\075\\000\\075\\000\\075\\000\\\n\\075\\000\\075\\000\\075\\000\\078\\000\\079\\000\\079\\000\\080\\000\\080\\000\\\n\\081\\000\\081\\000\\081\\000\\081\\000\\081\\000\\081\\000\\082\\000\\082\\000\\\n\\082\\000\\085\\000\\067\\000\\036\\000\\036\\000\\086\\000\\087\\000\\009\\000\\\n\\009\\000\\009\\000\\009\\000\\089\\000\\089\\000\\089\\000\\089\\000\\089\\000\\\n\\089\\000\\089\\000\\089\\000\\094\\000\\094\\000\\091\\000\\091\\000\\090\\000\\\n\\090\\000\\092\\000\\093\\000\\093\\000\\021\\000\\021\\000\\021\\000\\021\\000\\\n\\021\\000\\021\\000\\021\\000\\021\\000\\021\\000\\021\\000\\021\\000\\021\\000\\\n\\021\\000\\021\\000\\021\\000\\021\\000\\021\\000\\021\\000\\021\\000\\021\\000\\\n\\021\\000\\021\\000\\021\\000\\021\\000\\021\\000\\021\\000\\021\\000\\021\\000\\\n\\021\\000\\021\\000\\021\\000\\021\\000\\021\\000\\021\\000\\021\\000\\021\\000\\\n\\021\\000\\021\\000\\021\\000\\021\\000\\021\\000\\021\\000\\021\\000\\021\\000\\\n\\021\\000\\021\\000\\021\\000\\021\\000\\021\\000\\021\\000\\021\\000\\021\\000\\\n\\021\\000\\021\\000\\021\\000\\021\\000\\021\\000\\021\\000\\096\\000\\096\\000\\\n\\096\\000\\096\\000\\096\\000\\096\\000\\096\\000\\096\\000\\096\\000\\096\\000\\\n\\096\\000\\096\\000\\096\\000\\096\\000\\096\\000\\096\\000\\096\\000\\096\\000\\\n\\096\\000\\096\\000\\096\\000\\096\\000\\096\\000\\096\\000\\096\\000\\096\\000\\\n\\096\\000\\096\\000\\096\\000\\096\\000\\096\\000\\096\\000\\096\\000\\096\\000\\\n\\096\\000\\096\\000\\096\\000\\096\\000\\096\\000\\096\\000\\096\\000\\096\\000\\\n\\096\\000\\096\\000\\096\\000\\096\\000\\096\\000\\096\\000\\096\\000\\096\\000\\\n\\096\\000\\096\\000\\096\\000\\096\\000\\096\\000\\096\\000\\096\\000\\096\\000\\\n\\096\\000\\096\\000\\096\\000\\096\\000\\097\\000\\097\\000\\115\\000\\115\\000\\\n\\116\\000\\116\\000\\116\\000\\116\\000\\117\\000\\074\\000\\074\\000\\118\\000\\\n\\118\\000\\118\\000\\118\\000\\118\\000\\118\\000\\026\\000\\026\\000\\123\\000\\\n\\124\\000\\126\\000\\126\\000\\073\\000\\073\\000\\073\\000\\100\\000\\100\\000\\\n\\127\\000\\127\\000\\127\\000\\101\\000\\101\\000\\101\\000\\101\\000\\102\\000\\\n\\102\\000\\111\\000\\111\\000\\129\\000\\129\\000\\129\\000\\130\\000\\130\\000\\\n\\114\\000\\114\\000\\132\\000\\132\\000\\112\\000\\112\\000\\070\\000\\070\\000\\\n\\070\\000\\070\\000\\070\\000\\131\\000\\131\\000\\010\\000\\010\\000\\010\\000\\\n\\010\\000\\010\\000\\010\\000\\010\\000\\010\\000\\010\\000\\010\\000\\121\\000\\\n\\121\\000\\121\\000\\121\\000\\121\\000\\121\\000\\121\\000\\121\\000\\121\\000\\\n\\134\\000\\134\\000\\134\\000\\134\\000\\095\\000\\095\\000\\122\\000\\122\\000\\\n\\122\\000\\122\\000\\122\\000\\122\\000\\122\\000\\122\\000\\122\\000\\122\\000\\\n\\122\\000\\122\\000\\122\\000\\122\\000\\122\\000\\122\\000\\122\\000\\122\\000\\\n\\122\\000\\122\\000\\122\\000\\122\\000\\138\\000\\138\\000\\138\\000\\138\\000\\\n\\138\\000\\138\\000\\138\\000\\133\\000\\133\\000\\133\\000\\135\\000\\135\\000\\\n\\135\\000\\140\\000\\140\\000\\139\\000\\139\\000\\139\\000\\139\\000\\141\\000\\\n\\141\\000\\142\\000\\142\\000\\028\\000\\143\\000\\143\\000\\027\\000\\029\\000\\\n\\029\\000\\144\\000\\145\\000\\149\\000\\149\\000\\148\\000\\148\\000\\148\\000\\\n\\148\\000\\148\\000\\148\\000\\148\\000\\148\\000\\148\\000\\148\\000\\148\\000\\\n\\147\\000\\147\\000\\147\\000\\152\\000\\153\\000\\153\\000\\155\\000\\155\\000\\\n\\156\\000\\154\\000\\154\\000\\154\\000\\157\\000\\060\\000\\060\\000\\150\\000\\\n\\150\\000\\150\\000\\158\\000\\159\\000\\031\\000\\031\\000\\048\\000\\098\\000\\\n\\161\\000\\161\\000\\161\\000\\161\\000\\162\\000\\162\\000\\151\\000\\151\\000\\\n\\151\\000\\164\\000\\165\\000\\030\\000\\047\\000\\167\\000\\167\\000\\167\\000\\\n\\167\\000\\167\\000\\167\\000\\168\\000\\168\\000\\168\\000\\169\\000\\170\\000\\\n\\171\\000\\172\\000\\045\\000\\045\\000\\173\\000\\173\\000\\173\\000\\173\\000\\\n\\174\\000\\174\\000\\120\\000\\120\\000\\071\\000\\071\\000\\166\\000\\166\\000\\\n\\008\\000\\008\\000\\175\\000\\175\\000\\177\\000\\177\\000\\177\\000\\177\\000\\\n\\177\\000\\128\\000\\128\\000\\179\\000\\179\\000\\179\\000\\179\\000\\179\\000\\\n\\179\\000\\179\\000\\179\\000\\179\\000\\179\\000\\179\\000\\179\\000\\179\\000\\\n\\179\\000\\179\\000\\179\\000\\179\\000\\179\\000\\179\\000\\022\\000\\183\\000\\\n\\183\\000\\184\\000\\184\\000\\182\\000\\182\\000\\186\\000\\186\\000\\187\\000\\\n\\187\\000\\185\\000\\185\\000\\178\\000\\178\\000\\076\\000\\076\\000\\163\\000\\\n\\163\\000\\180\\000\\180\\000\\180\\000\\180\\000\\180\\000\\180\\000\\180\\000\\\n\\190\\000\\188\\000\\189\\000\\068\\000\\110\\000\\110\\000\\110\\000\\110\\000\\\n\\136\\000\\136\\000\\136\\000\\136\\000\\136\\000\\058\\000\\058\\000\\119\\000\\\n\\119\\000\\119\\000\\119\\000\\119\\000\\191\\000\\191\\000\\191\\000\\191\\000\\\n\\191\\000\\191\\000\\191\\000\\191\\000\\191\\000\\191\\000\\191\\000\\191\\000\\\n\\191\\000\\191\\000\\191\\000\\191\\000\\191\\000\\191\\000\\191\\000\\191\\000\\\n\\191\\000\\191\\000\\191\\000\\191\\000\\191\\000\\191\\000\\191\\000\\191\\000\\\n\\191\\000\\160\\000\\160\\000\\160\\000\\160\\000\\160\\000\\160\\000\\109\\000\\\n\\109\\000\\103\\000\\103\\000\\103\\000\\103\\000\\103\\000\\103\\000\\103\\000\\\n\\108\\000\\108\\000\\137\\000\\137\\000\\016\\000\\016\\000\\176\\000\\176\\000\\\n\\176\\000\\044\\000\\044\\000\\077\\000\\077\\000\\181\\000\\181\\000\\104\\000\\\n\\125\\000\\125\\000\\146\\000\\146\\000\\105\\000\\105\\000\\072\\000\\072\\000\\\n\\069\\000\\069\\000\\084\\000\\084\\000\\083\\000\\083\\000\\083\\000\\083\\000\\\n\\083\\000\\053\\000\\053\\000\\099\\000\\099\\000\\113\\000\\113\\000\\106\\000\\\n\\106\\000\\107\\000\\107\\000\\192\\000\\192\\000\\192\\000\\192\\000\\192\\000\\\n\\192\\000\\192\\000\\192\\000\\192\\000\\192\\000\\192\\000\\192\\000\\192\\000\\\n\\192\\000\\192\\000\\192\\000\\192\\000\\192\\000\\192\\000\\192\\000\\192\\000\\\n\\192\\000\\192\\000\\192\\000\\192\\000\\192\\000\\192\\000\\192\\000\\192\\000\\\n\\192\\000\\192\\000\\192\\000\\192\\000\\192\\000\\192\\000\\192\\000\\192\\000\\\n\\192\\000\\192\\000\\192\\000\\192\\000\\192\\000\\192\\000\\192\\000\\192\\000\\\n\\192\\000\\192\\000\\192\\000\\192\\000\\192\\000\\192\\000\\088\\000\\088\\000\\\n\\019\\000\\194\\000\\039\\000\\023\\000\\023\\000\\017\\000\\017\\000\\040\\000\\\n\\040\\000\\040\\000\\020\\000\\038\\000\\193\\000\\193\\000\\193\\000\\193\\000\\\n\\193\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\"\n\nlet yylen = \"\\002\\000\\\n\\002\\000\\002\\000\\002\\000\\002\\000\\002\\000\\002\\000\\005\\000\\001\\000\\\n\\001\\000\\002\\000\\001\\000\\001\\000\\004\\000\\004\\000\\005\\000\\002\\000\\\n\\003\\000\\001\\000\\002\\000\\001\\000\\005\\000\\005\\000\\003\\000\\003\\000\\\n\\005\\000\\007\\000\\009\\000\\007\\000\\006\\000\\006\\000\\005\\000\\003\\000\\\n\\001\\000\\000\\000\\002\\000\\002\\000\\001\\000\\001\\000\\001\\000\\001\\000\\\n\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\\n\\002\\000\\001\\000\\004\\000\\002\\000\\004\\000\\002\\000\\005\\000\\001\\000\\\n\\002\\000\\006\\000\\005\\000\\001\\000\\004\\000\\004\\000\\005\\000\\003\\000\\\n\\003\\000\\005\\000\\003\\000\\003\\000\\001\\000\\002\\000\\000\\000\\002\\000\\\n\\002\\000\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\\n\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\002\\000\\001\\000\\005\\000\\\n\\004\\000\\002\\000\\006\\000\\003\\000\\005\\000\\006\\000\\001\\000\\002\\000\\\n\\007\\000\\006\\000\\000\\000\\002\\000\\006\\000\\000\\000\\003\\000\\002\\000\\\n\\003\\000\\005\\000\\000\\000\\000\\000\\002\\000\\003\\000\\003\\000\\004\\000\\\n\\004\\000\\002\\000\\001\\000\\007\\000\\007\\000\\006\\000\\007\\000\\007\\000\\\n\\007\\000\\005\\000\\008\\000\\011\\000\\004\\000\\001\\000\\004\\000\\004\\000\\\n\\002\\000\\001\\000\\007\\000\\002\\000\\003\\000\\000\\000\\000\\000\\002\\000\\\n\\004\\000\\004\\000\\007\\000\\004\\000\\002\\000\\001\\000\\005\\000\\005\\000\\\n\\003\\000\\003\\000\\003\\000\\001\\000\\002\\000\\009\\000\\008\\000\\001\\000\\\n\\002\\000\\003\\000\\005\\000\\005\\000\\002\\000\\005\\000\\002\\000\\004\\000\\\n\\002\\000\\002\\000\\001\\000\\001\\000\\001\\000\\000\\000\\002\\000\\001\\000\\\n\\003\\000\\001\\000\\001\\000\\003\\000\\001\\000\\002\\000\\003\\000\\007\\000\\\n\\006\\000\\007\\000\\004\\000\\004\\000\\007\\000\\006\\000\\006\\000\\005\\000\\\n\\001\\000\\002\\000\\002\\000\\007\\000\\005\\000\\006\\000\\010\\000\\003\\000\\\n\\003\\000\\003\\000\\003\\000\\003\\000\\003\\000\\003\\000\\003\\000\\003\\000\\\n\\003\\000\\003\\000\\003\\000\\003\\000\\003\\000\\003\\000\\003\\000\\003\\000\\\n\\003\\000\\003\\000\\003\\000\\003\\000\\002\\000\\002\\000\\005\\000\\007\\000\\\n\\007\\000\\007\\000\\007\\000\\007\\000\\009\\000\\009\\000\\009\\000\\003\\000\\\n\\003\\000\\003\\000\\004\\000\\004\\000\\002\\000\\001\\000\\001\\000\\001\\000\\\n\\001\\000\\001\\000\\003\\000\\003\\000\\004\\000\\003\\000\\004\\000\\004\\000\\\n\\003\\000\\005\\000\\004\\000\\005\\000\\005\\000\\005\\000\\005\\000\\005\\000\\\n\\005\\000\\005\\000\\005\\000\\005\\000\\005\\000\\005\\000\\007\\000\\007\\000\\\n\\007\\000\\007\\000\\007\\000\\007\\000\\005\\000\\003\\000\\003\\000\\005\\000\\\n\\005\\000\\004\\000\\004\\000\\002\\000\\006\\000\\004\\000\\006\\000\\004\\000\\\n\\004\\000\\006\\000\\004\\000\\006\\000\\002\\000\\002\\000\\003\\000\\003\\000\\\n\\002\\000\\005\\000\\004\\000\\005\\000\\003\\000\\003\\000\\005\\000\\007\\000\\\n\\006\\000\\009\\000\\008\\000\\001\\000\\001\\000\\002\\000\\001\\000\\001\\000\\\n\\002\\000\\002\\000\\002\\000\\002\\000\\001\\000\\001\\000\\002\\000\\002\\000\\\n\\004\\000\\007\\000\\008\\000\\003\\000\\005\\000\\001\\000\\002\\000\\005\\000\\\n\\004\\000\\001\\000\\003\\000\\002\\000\\002\\000\\005\\000\\001\\000\\003\\000\\\n\\003\\000\\005\\000\\003\\000\\002\\000\\004\\000\\002\\000\\005\\000\\003\\000\\\n\\003\\000\\003\\000\\001\\000\\001\\000\\003\\000\\002\\000\\004\\000\\002\\000\\\n\\002\\000\\003\\000\\003\\000\\001\\000\\001\\000\\003\\000\\002\\000\\004\\000\\\n\\002\\000\\002\\000\\002\\000\\001\\000\\000\\000\\003\\000\\003\\000\\001\\000\\\n\\003\\000\\003\\000\\003\\000\\003\\000\\003\\000\\002\\000\\001\\000\\003\\000\\\n\\003\\000\\001\\000\\003\\000\\003\\000\\003\\000\\003\\000\\002\\000\\001\\000\\\n\\001\\000\\002\\000\\002\\000\\003\\000\\001\\000\\001\\000\\001\\000\\001\\000\\\n\\003\\000\\001\\000\\001\\000\\002\\000\\001\\000\\003\\000\\004\\000\\004\\000\\\n\\005\\000\\005\\000\\004\\000\\003\\000\\003\\000\\005\\000\\005\\000\\004\\000\\\n\\005\\000\\007\\000\\007\\000\\001\\000\\003\\000\\003\\000\\004\\000\\004\\000\\\n\\004\\000\\002\\000\\004\\000\\003\\000\\003\\000\\003\\000\\003\\000\\003\\000\\\n\\003\\000\\001\\000\\003\\000\\001\\000\\002\\000\\004\\000\\003\\000\\004\\000\\\n\\002\\000\\002\\000\\000\\000\\006\\000\\001\\000\\002\\000\\008\\000\\001\\000\\\n\\002\\000\\008\\000\\007\\000\\003\\000\\000\\000\\000\\000\\002\\000\\003\\000\\\n\\002\\000\\003\\000\\002\\000\\003\\000\\005\\000\\005\\000\\005\\000\\007\\000\\\n\\000\\000\\001\\000\\003\\000\\002\\000\\001\\000\\003\\000\\002\\000\\001\\000\\\n\\002\\000\\000\\000\\001\\000\\001\\000\\002\\000\\001\\000\\003\\000\\001\\000\\\n\\001\\000\\002\\000\\003\\000\\004\\000\\001\\000\\007\\000\\006\\000\\003\\000\\\n\\000\\000\\002\\000\\004\\000\\002\\000\\001\\000\\003\\000\\001\\000\\001\\000\\\n\\002\\000\\005\\000\\007\\000\\009\\000\\009\\000\\001\\000\\001\\000\\001\\000\\\n\\001\\000\\002\\000\\002\\000\\001\\000\\001\\000\\002\\000\\003\\000\\004\\000\\\n\\004\\000\\005\\000\\001\\000\\003\\000\\006\\000\\005\\000\\004\\000\\004\\000\\\n\\001\\000\\002\\000\\002\\000\\003\\000\\001\\000\\003\\000\\001\\000\\003\\000\\\n\\001\\000\\002\\000\\001\\000\\004\\000\\001\\000\\006\\000\\004\\000\\005\\000\\\n\\003\\000\\001\\000\\003\\000\\002\\000\\001\\000\\001\\000\\002\\000\\004\\000\\\n\\003\\000\\002\\000\\002\\000\\003\\000\\005\\000\\003\\000\\004\\000\\005\\000\\\n\\004\\000\\002\\000\\004\\000\\006\\000\\005\\000\\001\\000\\001\\000\\001\\000\\\n\\003\\000\\001\\000\\001\\000\\005\\000\\002\\000\\001\\000\\000\\000\\001\\000\\\n\\003\\000\\001\\000\\002\\000\\001\\000\\003\\000\\001\\000\\003\\000\\001\\000\\\n\\003\\000\\002\\000\\002\\000\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\\n\\004\\000\\006\\000\\002\\000\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\\n\\001\\000\\002\\000\\002\\000\\002\\000\\002\\000\\001\\000\\001\\000\\001\\000\\\n\\003\\000\\003\\000\\002\\000\\003\\000\\001\\000\\001\\000\\001\\000\\001\\000\\\n\\001\\000\\001\\000\\003\\000\\004\\000\\003\\000\\004\\000\\003\\000\\004\\000\\\n\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\\n\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\\n\\001\\000\\001\\000\\002\\000\\002\\000\\003\\000\\001\\000\\001\\000\\001\\000\\\n\\003\\000\\001\\000\\005\\000\\002\\000\\002\\000\\003\\000\\001\\000\\001\\000\\\n\\001\\000\\003\\000\\001\\000\\003\\000\\001\\000\\003\\000\\001\\000\\003\\000\\\n\\004\\000\\001\\000\\003\\000\\001\\000\\003\\000\\001\\000\\003\\000\\002\\000\\\n\\000\\000\\001\\000\\000\\000\\001\\000\\001\\000\\001\\000\\000\\000\\001\\000\\\n\\000\\000\\001\\000\\000\\000\\001\\000\\000\\000\\001\\000\\001\\000\\002\\000\\\n\\002\\000\\000\\000\\001\\000\\000\\000\\001\\000\\000\\000\\001\\000\\001\\000\\\n\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\\n\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\\n\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\\n\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\\n\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\\n\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\\n\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\001\\000\\003\\000\\\n\\004\\000\\004\\000\\004\\000\\000\\000\\002\\000\\000\\000\\002\\000\\000\\000\\\n\\002\\000\\003\\000\\004\\000\\004\\000\\001\\000\\002\\000\\002\\000\\002\\000\\\n\\004\\000\\002\\000\\002\\000\\002\\000\\002\\000\\002\\000\"\n\nlet yydefred = \"\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\054\\002\\000\\000\\000\\000\\000\\000\\111\\002\\056\\002\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\053\\002\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\152\\002\\153\\002\\000\\000\\000\\000\\000\\000\\154\\002\\\n\\155\\002\\000\\000\\000\\000\\055\\002\\112\\002\\000\\000\\000\\000\\117\\002\\\n\\230\\000\\000\\000\\000\\000\\226\\002\\000\\000\\000\\000\\000\\000\\036\\001\\\n\\000\\000\\033\\000\\000\\000\\000\\000\\038\\000\\039\\000\\000\\000\\041\\000\\\n\\042\\000\\043\\000\\000\\000\\045\\000\\046\\000\\000\\000\\048\\000\\000\\000\\\n\\050\\000\\056\\000\\205\\001\\000\\000\\148\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\231\\000\\232\\000\\104\\002\\054\\001\\168\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\227\\002\\000\\000\\075\\000\\\n\\074\\000\\000\\000\\082\\000\\083\\000\\000\\000\\000\\000\\087\\000\\000\\000\\\n\\077\\000\\078\\000\\079\\000\\080\\000\\000\\000\\084\\000\\095\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\119\\002\\005\\002\\228\\002\\000\\000\\022\\002\\000\\000\\006\\002\\\n\\249\\001\\000\\000\\000\\000\\253\\001\\000\\000\\229\\002\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\064\\002\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\119\\001\\230\\002\\000\\000\\000\\000\\140\\001\\113\\001\\\n\\000\\000\\000\\000\\057\\002\\117\\001\\118\\001\\000\\000\\103\\001\\000\\000\\\n\\125\\001\\000\\000\\000\\000\\000\\000\\000\\000\\063\\002\\062\\002\\128\\002\\\n\\022\\001\\233\\000\\234\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\075\\001\\000\\000\\025\\001\\052\\002\\000\\000\\000\\000\\\n\\000\\000\\108\\002\\000\\000\\000\\000\\012\\001\\000\\000\\158\\002\\159\\002\\\n\\160\\002\\161\\002\\162\\002\\163\\002\\164\\002\\165\\002\\166\\002\\167\\002\\\n\\168\\002\\169\\002\\170\\002\\171\\002\\172\\002\\173\\002\\174\\002\\175\\002\\\n\\176\\002\\177\\002\\178\\002\\179\\002\\180\\002\\181\\002\\182\\002\\156\\002\\\n\\183\\002\\184\\002\\185\\002\\186\\002\\187\\002\\188\\002\\189\\002\\190\\002\\\n\\191\\002\\192\\002\\193\\002\\194\\002\\195\\002\\196\\002\\197\\002\\198\\002\\\n\\199\\002\\200\\002\\201\\002\\157\\002\\202\\002\\203\\002\\204\\002\\205\\002\\\n\\206\\002\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\067\\002\\094\\002\\093\\002\\000\\000\\092\\002\\000\\000\\095\\002\\088\\002\\\n\\090\\002\\070\\002\\071\\002\\072\\002\\073\\002\\074\\002\\000\\000\\089\\002\\\n\\000\\000\\000\\000\\000\\000\\091\\002\\097\\002\\000\\000\\000\\000\\096\\002\\\n\\000\\000\\109\\002\\081\\002\\087\\002\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\147\\002\\000\\000\\021\\001\\035\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\229\\000\\\n\\000\\000\\036\\000\\000\\000\\000\\000\\000\\000\\055\\001\\000\\000\\169\\001\\\n\\000\\000\\057\\000\\000\\000\\149\\000\\049\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\037\\001\\040\\001\\000\\000\\000\\000\\000\\000\\213\\000\\214\\000\\000\\000\\\n\\000\\000\\000\\000\\072\\000\\000\\000\\002\\000\\086\\000\\073\\000\\000\\000\\\n\\096\\000\\000\\000\\115\\002\\000\\000\\027\\002\\000\\000\\000\\000\\149\\002\\\n\\000\\000\\018\\002\\000\\000\\048\\002\\010\\002\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\045\\002\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\004\\002\\126\\002\\000\\000\\011\\002\\003\\000\\250\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\007\\002\\004\\000\\\n\\000\\000\\000\\000\\113\\002\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\146\\001\\000\\000\\082\\002\\000\\000\\086\\002\\000\\000\\000\\000\\\n\\084\\002\\069\\002\\000\\000\\059\\002\\058\\002\\061\\002\\060\\002\\124\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\005\\000\\102\\001\\000\\000\\114\\001\\\n\\115\\001\\000\\000\\000\\000\\000\\000\\000\\000\\217\\002\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\238\\000\\000\\000\\000\\000\\102\\002\\000\\000\\000\\000\\\n\\103\\002\\098\\002\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\163\\000\\122\\001\\123\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\018\\000\\020\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\092\\001\\000\\000\\007\\001\\\n\\006\\001\\000\\000\\000\\000\\024\\001\\023\\001\\000\\000\\081\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\221\\002\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\130\\002\\000\\000\\110\\002\\000\\000\\000\\000\\\n\\000\\000\\068\\002\\000\\000\\236\\000\\235\\000\\000\\000\\066\\002\\065\\002\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\108\\000\\\n\\000\\000\\000\\000\\132\\002\\000\\000\\000\\000\\000\\000\\000\\000\\032\\000\\\n\\213\\002\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\118\\002\\105\\002\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\154\\000\\000\\000\\\n\\000\\000\\175\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\045\\001\\043\\001\\029\\001\\000\\000\\042\\001\\038\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\069\\000\\060\\000\\\n\\122\\002\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\026\\002\\000\\000\\\n\\024\\002\\000\\000\\029\\002\\014\\002\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\051\\002\\009\\002\\042\\002\\043\\002\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\040\\002\\000\\000\\116\\002\\120\\002\\000\\000\\\n\\000\\000\\000\\000\\012\\002\\101\\001\\116\\001\\000\\000\\000\\000\\000\\000\\\n\\142\\001\\141\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\133\\001\\\n\\000\\000\\132\\001\\095\\001\\094\\001\\100\\001\\000\\000\\098\\001\\000\\000\\\n\\150\\001\\000\\000\\000\\000\\000\\000\\126\\001\\000\\000\\121\\001\\000\\000\\\n\\218\\002\\215\\002\\000\\000\\000\\000\\000\\000\\241\\000\\000\\000\\000\\000\\\n\\000\\000\\239\\000\\237\\000\\140\\002\\000\\000\\099\\002\\000\\000\\100\\002\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\085\\002\\000\\000\\083\\002\\000\\000\\\n\\000\\000\\162\\000\\000\\000\\164\\000\\000\\000\\165\\000\\159\\000\\170\\000\\\n\\000\\000\\157\\000\\000\\000\\161\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\180\\000\\000\\000\\000\\000\\063\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\016\\000\\019\\000\\051\\000\\000\\000\\000\\000\\074\\001\\\n\\090\\001\\000\\000\\091\\001\\000\\000\\000\\000\\077\\001\\000\\000\\082\\001\\\n\\000\\000\\017\\001\\016\\001\\011\\001\\010\\001\\222\\002\\000\\000\\000\\000\\\n\\219\\002\\208\\002\\220\\002\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\112\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\240\\000\\211\\002\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\228\\000\\227\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\196\\001\\195\\001\\000\\000\\186\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\027\\001\\000\\000\\019\\001\\000\\000\\014\\001\\\n\\000\\000\\000\\000\\000\\000\\243\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\070\\000\\089\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\015\\002\\030\\002\\000\\000\\000\\000\\\n\\000\\000\\019\\002\\017\\002\\000\\000\\000\\000\\000\\000\\247\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\008\\002\\000\\000\\000\\000\\127\\002\\\n\\000\\000\\000\\000\\121\\002\\252\\001\\114\\002\\000\\000\\000\\000\\000\\000\\\n\\159\\001\\000\\000\\144\\001\\143\\001\\147\\001\\145\\001\\000\\000\\136\\001\\\n\\000\\000\\127\\001\\131\\001\\128\\001\\000\\000\\209\\002\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\101\\002\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\210\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\068\\001\\070\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\011\\000\\000\\000\\000\\000\\024\\000\\000\\000\\023\\000\\\n\\000\\000\\017\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\056\\001\\000\\000\\000\\000\\000\\000\\000\\000\\048\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\111\\001\\000\\000\\000\\000\\\n\\080\\002\\078\\002\\076\\002\\000\\000\\031\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\006\\000\\008\\000\\009\\000\\000\\000\\054\\000\\\n\\055\\000\\000\\000\\105\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\115\\000\\109\\000\\088\\000\\184\\000\\000\\000\\189\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\192\\001\\188\\001\\000\\000\\000\\000\\210\\002\\009\\001\\\n\\008\\001\\028\\001\\026\\001\\000\\000\\000\\000\\107\\002\\000\\000\\244\\000\\\n\\242\\000\\155\\000\\057\\001\\000\\000\\000\\000\\000\\000\\005\\001\\248\\000\\\n\\000\\000\\246\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\254\\000\\\n\\000\\000\\250\\000\\000\\000\\252\\000\\000\\000\\000\\000\\068\\000\\067\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\235\\001\\000\\000\\\n\\123\\002\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\093\\000\\000\\000\\\n\\000\\000\\025\\002\\032\\002\\000\\000\\016\\002\\034\\002\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\021\\002\\013\\002\\000\\000\\041\\002\\\n\\000\\000\\151\\002\\158\\001\\000\\000\\137\\001\\135\\001\\134\\001\\130\\001\\\n\\129\\001\\247\\000\\245\\000\\000\\000\\000\\000\\000\\000\\253\\000\\249\\000\\\n\\251\\000\\000\\000\\000\\000\\198\\001\\000\\000\\138\\002\\000\\000\\000\\000\\\n\\215\\001\\000\\000\\000\\000\\000\\000\\000\\000\\207\\001\\000\\000\\134\\002\\\n\\133\\002\\000\\000\\047\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\160\\000\\000\\000\\000\\000\\067\\001\\065\\001\\000\\000\\064\\001\\\n\\000\\000\\000\\000\\010\\000\\000\\000\\000\\000\\014\\000\\013\\000\\000\\000\\\n\\225\\002\\177\\000\\208\\001\\000\\000\\000\\000\\000\\000\\000\\000\\060\\001\\\n\\000\\000\\000\\000\\000\\000\\058\\001\\061\\001\\105\\001\\104\\001\\110\\001\\\n\\000\\000\\108\\001\\000\\000\\153\\001\\000\\000\\052\\001\\000\\000\\000\\000\\\n\\033\\001\\000\\000\\000\\000\\000\\000\\101\\000\\058\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\114\\000\\\n\\000\\000\\000\\000\\187\\001\\000\\000\\173\\001\\000\\000\\191\\001\\164\\001\\\n\\190\\000\\020\\001\\018\\001\\015\\001\\013\\001\\000\\000\\173\\001\\059\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\062\\000\\061\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\094\\000\\092\\000\\000\\000\\000\\000\\000\\000\\000\\000\\028\\002\\\n\\020\\002\\035\\002\\248\\001\\244\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\103\\000\\000\\000\\193\\001\\000\\000\\000\\000\\\n\\214\\001\\217\\001\\211\\001\\000\\000\\206\\001\\000\\000\\000\\000\\000\\000\\\n\\181\\000\\000\\000\\167\\000\\158\\000\\156\\000\\000\\000\\069\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\031\\000\\000\\000\\000\\000\\025\\000\\022\\000\\\n\\021\\000\\176\\000\\178\\000\\000\\000\\000\\000\\000\\000\\049\\001\\000\\000\\\n\\000\\000\\032\\001\\000\\000\\000\\000\\106\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\111\\000\\000\\000\\110\\000\\190\\001\\000\\000\\179\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\200\\001\\201\\001\\\n\\000\\000\\000\\000\\136\\002\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\004\\001\\000\\000\\000\\001\\000\\000\\002\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\236\\001\\\n\\097\\000\\000\\000\\000\\000\\098\\000\\033\\002\\050\\002\\139\\001\\138\\001\\\n\\003\\001\\255\\000\\001\\001\\199\\001\\197\\001\\000\\000\\000\\000\\124\\002\\\n\\000\\000\\130\\000\\000\\000\\126\\000\\000\\000\\000\\000\\166\\001\\167\\001\\\n\\000\\000\\071\\001\\066\\001\\029\\000\\000\\000\\030\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\059\\001\\053\\001\\007\\000\\000\\000\\112\\000\\113\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\180\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\202\\001\\000\\000\\000\\000\\170\\001\\\n\\000\\000\\000\\000\\000\\000\\222\\001\\223\\001\\224\\001\\225\\001\\035\\001\\\n\\000\\000\\171\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\228\\001\\\n\\229\\001\\000\\000\\000\\000\\000\\000\\129\\000\\150\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\026\\000\\028\\000\\000\\000\\000\\000\\062\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\203\\001\\000\\000\\172\\001\\000\\000\\000\\000\\000\\000\\220\\001\\\n\\226\\001\\227\\001\\034\\001\\151\\000\\000\\000\\000\\000\\000\\000\\238\\001\\\n\\242\\001\\173\\001\\091\\000\\000\\000\\221\\001\\230\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\135\\000\\125\\002\\000\\000\\191\\000\\000\\000\\000\\000\\\n\\050\\001\\000\\000\\000\\000\\000\\000\\122\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\204\\001\\183\\001\\000\\000\\000\\000\\181\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\231\\001\\000\\000\\125\\000\\000\\000\\000\\000\\128\\000\\\n\\127\\000\\000\\000\\000\\000\\027\\000\\051\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\118\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\232\\001\\233\\001\\000\\000\\133\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\142\\000\\136\\000\\219\\001\\120\\000\\121\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\119\\000\\184\\001\\234\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\141\\000\\000\\000\\123\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\140\\000\\137\\000\\144\\002\\145\\002\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\138\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\124\\000\\000\\000\\000\\000\\000\\000\\139\\000\\000\\000\\\n\\000\\000\"\n\nlet yydgoto = \"\\006\\000\\\n\\052\\000\\094\\000\\124\\000\\134\\000\\148\\000\\245\\001\\095\\000\\153\\005\\\n\\054\\000\\171\\001\\250\\002\\175\\003\\065\\003\\132\\003\\200\\002\\055\\000\\\n\\190\\001\\223\\001\\072\\001\\056\\000\\057\\000\\066\\003\\046\\001\\058\\000\\\n\\059\\000\\136\\000\\061\\000\\062\\000\\063\\000\\064\\000\\065\\000\\066\\000\\\n\\067\\000\\068\\000\\069\\000\\070\\000\\071\\000\\072\\000\\073\\000\\000\\001\\\n\\251\\002\\074\\000\\082\\001\\088\\002\\238\\003\\104\\000\\105\\000\\075\\000\\\n\\107\\000\\108\\000\\109\\000\\110\\000\\037\\001\\049\\003\\111\\000\\113\\001\\\n\\168\\003\\089\\002\\102\\003\\026\\004\\015\\002\\016\\002\\255\\002\\186\\003\\\n\\103\\004\\101\\004\\199\\004\\076\\000\\031\\004\\075\\004\\154\\005\\213\\004\\\n\\076\\004\\117\\003\\003\\005\\136\\001\\004\\005\\114\\005\\115\\005\\146\\005\\\n\\173\\005\\203\\005\\199\\005\\165\\002\\092\\005\\077\\000\\084\\001\\250\\000\\\n\\192\\002\\120\\003\\047\\004\\121\\003\\119\\003\\183\\002\\152\\000\\078\\000\\\n\\096\\001\\228\\002\\121\\001\\195\\002\\193\\002\\079\\000\\080\\000\\081\\000\\\n\\042\\004\\082\\000\\083\\000\\185\\000\\084\\000\\085\\000\\186\\000\\196\\000\\\n\\239\\001\\192\\000\\097\\001\\098\\001\\074\\002\\232\\002\\086\\000\\155\\005\\\n\\234\\002\\157\\000\\087\\000\\078\\001\\253\\001\\077\\004\\196\\002\\127\\000\\\n\\187\\000\\188\\000\\231\\001\\193\\000\\158\\000\\159\\000\\237\\002\\160\\000\\\n\\128\\000\\161\\000\\158\\001\\161\\001\\159\\001\\128\\002\\167\\004\\088\\000\\\n\\080\\001\\020\\002\\005\\003\\109\\004\\218\\004\\214\\004\\032\\004\\006\\003\\\n\\191\\003\\007\\003\\196\\003\\028\\004\\158\\004\\215\\004\\216\\004\\217\\004\\\n\\172\\002\\106\\003\\107\\003\\033\\004\\034\\004\\062\\003\\043\\005\\063\\005\\\n\\044\\005\\045\\005\\046\\005\\047\\005\\239\\003\\059\\005\\129\\000\\130\\000\\\n\\131\\000\\132\\000\\133\\000\\129\\001\\142\\001\\095\\002\\096\\002\\097\\002\\\n\\255\\003\\055\\003\\252\\003\\130\\001\\131\\001\\132\\001\\030\\001\\251\\000\\\n\\246\\001\\047\\001\"\n\nlet yysindex = \"\\180\\007\\\n\\119\\061\\200\\008\\016\\047\\124\\064\\160\\067\\000\\000\\076\\004\\241\\002\\\n\\244\\009\\076\\004\\000\\000\\236\\254\\076\\004\\076\\004\\000\\000\\000\\000\\\n\\076\\004\\076\\004\\076\\004\\076\\004\\076\\004\\000\\000\\076\\004\\044\\067\\\n\\174\\002\\205\\061\\037\\062\\122\\057\\122\\057\\068\\003\\000\\000\\232\\054\\\n\\122\\057\\076\\004\\000\\000\\000\\000\\036\\004\\076\\004\\106\\000\\000\\000\\\n\\000\\000\\244\\009\\119\\061\\000\\000\\000\\000\\076\\004\\076\\004\\000\\000\\\n\\000\\000\\076\\004\\076\\004\\000\\000\\254\\000\\102\\000\\157\\000\\000\\000\\\n\\225\\072\\000\\000\\222\\005\\236\\255\\000\\000\\000\\000\\241\\000\\000\\000\\\n\\000\\000\\000\\000\\017\\001\\000\\000\\000\\000\\024\\002\\000\\000\\102\\000\\\n\\000\\000\\000\\000\\000\\000\\089\\001\\000\\000\\034\\069\\015\\002\\244\\009\\\n\\244\\009\\124\\064\\124\\064\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\076\\004\\076\\004\\036\\004\\200\\008\\076\\004\\000\\000\\140\\003\\000\\000\\\n\\000\\000\\241\\000\\000\\000\\000\\000\\024\\002\\102\\000\\000\\000\\200\\008\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\113\\002\\000\\000\\000\\000\\145\\007\\\n\\220\\002\\050\\255\\122\\009\\044\\003\\165\\016\\016\\047\\054\\003\\241\\002\\\n\\021\\003\\000\\000\\000\\000\\000\\000\\038\\000\\000\\000\\023\\003\\000\\000\\\n\\000\\000\\115\\001\\232\\000\\000\\000\\061\\002\\000\\000\\216\\004\\236\\255\\\n\\076\\004\\076\\004\\035\\003\\163\\066\\226\\066\\000\\000\\088\\059\\018\\004\\\n\\129\\004\\086\\003\\000\\000\\000\\000\\067\\000\\251\\003\\000\\000\\000\\000\\\n\\160\\067\\160\\067\\000\\000\\000\\000\\000\\000\\039\\004\\000\\000\\107\\004\\\n\\000\\000\\122\\057\\122\\057\\033\\004\\244\\009\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\122\\062\\076\\004\\041\\002\\097\\005\\160\\067\\\n\\040\\066\\220\\002\\124\\064\\019\\002\\244\\009\\000\\000\\188\\004\\113\\001\\\n\\212\\002\\117\\255\\000\\000\\127\\004\\000\\000\\000\\000\\246\\004\\165\\002\\\n\\224\\004\\000\\000\\073\\073\\248\\004\\000\\000\\248\\004\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\032\\061\\080\\005\\032\\061\\076\\004\\076\\004\\106\\000\\022\\005\\\n\\000\\000\\000\\000\\000\\000\\244\\009\\000\\000\\036\\005\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\083\\005\\000\\000\\\n\\000\\000\\000\\000\\165\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\244\\009\\000\\000\\000\\000\\000\\000\\184\\255\\130\\255\\032\\061\\124\\064\\\n\\076\\004\\160\\255\\070\\005\\000\\000\\076\\004\\000\\000\\000\\000\\124\\064\\\n\\069\\005\\097\\005\\124\\064\\000\\000\\122\\057\\222\\005\\102\\000\\225\\004\\\n\\124\\064\\124\\064\\124\\064\\124\\064\\124\\064\\124\\064\\124\\064\\124\\064\\\n\\124\\064\\124\\064\\124\\064\\124\\064\\124\\064\\124\\064\\124\\064\\124\\064\\\n\\124\\064\\124\\064\\124\\064\\124\\064\\124\\064\\207\\062\\124\\064\\000\\000\\\n\\076\\004\\000\\000\\173\\005\\033\\004\\124\\064\\000\\000\\033\\004\\000\\000\\\n\\033\\004\\000\\000\\033\\004\\000\\000\\000\\000\\124\\064\\104\\003\\099\\005\\\n\\244\\009\\244\\009\\150\\005\\157\\005\\244\\009\\150\\005\\119\\002\\041\\069\\\n\\000\\000\\000\\000\\124\\064\\119\\002\\119\\002\\000\\000\\000\\000\\041\\002\\\n\\219\\003\\168\\004\\000\\000\\069\\005\\000\\000\\000\\000\\000\\000\\033\\004\\\n\\000\\000\\174\\004\\000\\000\\017\\255\\000\\000\\138\\005\\235\\005\\000\\000\\\n\\174\\004\\000\\000\\174\\004\\000\\000\\000\\000\\000\\000\\233\\005\\163\\005\\\n\\231\\005\\043\\017\\043\\017\\000\\000\\016\\047\\076\\004\\033\\004\\183\\000\\\n\\198\\005\\004\\006\\000\\000\\000\\000\\255\\005\\000\\000\\000\\000\\000\\000\\\n\\090\\008\\094\\003\\170\\005\\194\\005\\016\\047\\021\\003\\000\\000\\000\\000\\\n\\160\\067\\168\\068\\000\\000\\010\\006\\034\\006\\203\\255\\224\\005\\037\\004\\\n\\236\\005\\000\\000\\236\\005\\000\\000\\018\\004\\000\\000\\165\\000\\129\\004\\\n\\000\\000\\000\\000\\076\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\053\\002\\152\\013\\239\\059\\044\\060\\000\\000\\000\\000\\144\\003\\000\\000\\\n\\000\\000\\160\\067\\076\\003\\032\\061\\033\\004\\000\\000\\033\\004\\119\\002\\\n\\189\\004\\102\\005\\000\\000\\205\\001\\227\\005\\000\\000\\251\\005\\158\\000\\\n\\000\\000\\000\\000\\009\\002\\006\\070\\077\\006\\128\\003\\168\\068\\011\\058\\\n\\104\\002\\136\\005\\205\\005\\188\\065\\000\\000\\000\\000\\000\\000\\160\\067\\\n\\241\\005\\033\\004\\141\\001\\033\\004\\115\\005\\072\\006\\000\\000\\000\\000\\\n\\119\\002\\143\\005\\035\\003\\070\\006\\214\\007\\000\\000\\078\\006\\000\\000\\\n\\000\\000\\035\\003\\124\\064\\000\\000\\000\\000\\157\\005\\000\\000\\124\\064\\\n\\118\\255\\051\\003\\200\\073\\160\\067\\000\\000\\020\\006\\122\\057\\023\\006\\\n\\041\\002\\009\\006\\076\\004\\000\\000\\229\\050\\000\\000\\022\\006\\028\\006\\\n\\029\\006\\000\\000\\019\\002\\000\\000\\000\\000\\038\\006\\000\\000\\000\\000\\\n\\041\\006\\027\\006\\241\\002\\037\\006\\178\\002\\160\\067\\232\\002\\000\\000\\\n\\043\\006\\032\\006\\000\\000\\029\\005\\122\\006\\123\\006\\032\\061\\000\\000\\\n\\000\\000\\044\\067\\116\\003\\036\\063\\124\\063\\087\\055\\000\\000\\000\\000\\\n\\166\\073\\166\\073\\134\\073\\247\\007\\073\\073\\134\\073\\239\\009\\239\\009\\\n\\239\\009\\239\\009\\089\\002\\104\\006\\104\\006\\239\\009\\089\\002\\089\\002\\\n\\134\\073\\104\\006\\089\\002\\089\\002\\089\\002\\122\\057\\000\\000\\104\\006\\\n\\229\\050\\000\\000\\029\\005\\044\\006\\227\\005\\073\\073\\124\\064\\124\\064\\\n\\124\\064\\170\\004\\092\\006\\124\\064\\124\\064\\124\\064\\119\\002\\119\\002\\\n\\000\\000\\000\\000\\000\\000\\218\\004\\000\\000\\000\\000\\134\\073\\027\\001\\\n\\033\\004\\219\\003\\048\\006\\033\\004\\000\\000\\211\\002\\000\\000\\000\\000\\\n\\000\\000\\123\\002\\055\\006\\186\\002\\029\\005\\057\\006\\000\\000\\199\\255\\\n\\000\\000\\155\\006\\000\\000\\000\\000\\174\\004\\091\\001\\211\\255\\062\\048\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\096\\006\\219\\003\\016\\047\\159\\002\\\n\\016\\047\\016\\047\\119\\003\\000\\000\\071\\006\\000\\000\\000\\000\\021\\001\\\n\\241\\002\\097\\006\\000\\000\\000\\000\\000\\000\\121\\003\\016\\047\\148\\006\\\n\\000\\000\\000\\000\\053\\003\\160\\067\\029\\000\\108\\005\\067\\006\\000\\000\\\n\\097\\011\\000\\000\\000\\000\\000\\000\\000\\000\\179\\002\\000\\000\\162\\006\\\n\\000\\000\\173\\000\\031\\067\\178\\059\\000\\000\\173\\000\\000\\000\\094\\006\\\n\\000\\000\\000\\000\\124\\064\\124\\064\\235\\004\\000\\000\\124\\064\\124\\064\\\n\\124\\064\\000\\000\\000\\000\\000\\000\\132\\006\\000\\000\\095\\006\\000\\000\\\n\\019\\015\\074\\002\\019\\015\\033\\004\\000\\000\\188\\006\\000\\000\\016\\047\\\n\\124\\064\\000\\000\\126\\006\\000\\000\\160\\067\\000\\000\\000\\000\\000\\000\\\n\\127\\006\\000\\000\\127\\006\\000\\000\\090\\008\\122\\058\\124\\064\\188\\065\\\n\\000\\000\\108\\000\\184\\006\\000\\000\\124\\064\\130\\006\\033\\004\\073\\001\\\n\\119\\061\\155\\001\\000\\000\\000\\000\\000\\000\\087\\006\\000\\000\\000\\000\\\n\\000\\000\\161\\000\\000\\000\\033\\004\\124\\064\\000\\000\\073\\073\\000\\000\\\n\\073\\073\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\033\\004\\243\\000\\\n\\000\\000\\000\\000\\000\\000\\157\\006\\027\\001\\178\\002\\043\\006\\102\\000\\\n\\100\\065\\068\\005\\190\\006\\000\\000\\187\\006\\146\\006\\149\\006\\153\\006\\\n\\021\\002\\000\\000\\000\\000\\220\\002\\191\\006\\178\\002\\219\\003\\019\\002\\\n\\078\\003\\178\\002\\102\\000\\007\\002\\000\\000\\000\\000\\169\\001\\201\\003\\\n\\091\\005\\103\\004\\000\\000\\000\\000\\176\\003\\000\\000\\244\\254\\016\\047\\\n\\124\\064\\125\\006\\221\\255\\000\\000\\255\\002\\000\\000\\248\\004\\000\\000\\\n\\248\\004\\128\\006\\165\\000\\000\\000\\165\\255\\124\\064\\102\\000\\156\\006\\\n\\178\\002\\132\\006\\073\\073\\038\\005\\063\\000\\190\\255\\162\\005\\124\\064\\\n\\085\\070\\117\\070\\195\\070\\130\\006\\094\\255\\145\\006\\200\\008\\219\\003\\\n\\129\\002\\000\\000\\000\\000\\186\\003\\212\\006\\219\\003\\043\\006\\214\\004\\\n\\102\\000\\176\\003\\214\\006\\174\\004\\000\\000\\000\\000\\016\\047\\057\\000\\\n\\224\\006\\000\\000\\000\\000\\241\\002\\057\\255\\033\\004\\000\\000\\016\\047\\\n\\180\\001\\137\\006\\033\\004\\021\\003\\000\\000\\097\\006\\159\\006\\000\\000\\\n\\090\\008\\124\\006\\000\\000\\000\\000\\000\\000\\033\\004\\160\\067\\142\\006\\\n\\000\\000\\037\\004\\000\\000\\000\\000\\000\\000\\000\\000\\149\\000\\000\\000\\\n\\223\\255\\000\\000\\000\\000\\000\\000\\208\\001\\000\\000\\098\\000\\245\\255\\\n\\181\\005\\227\\070\\049\\071\\081\\071\\103\\004\\174\\006\\000\\000\\164\\006\\\n\\000\\000\\172\\006\\071\\006\\158\\006\\169\\000\\225\\006\\033\\004\\000\\000\\\n\\102\\000\\144\\000\\182\\255\\126\\006\\154\\006\\125\\005\\226\\006\\226\\006\\\n\\237\\006\\166\\006\\179\\006\\126\\006\\000\\000\\000\\000\\210\\063\\124\\064\\\n\\160\\067\\041\\073\\000\\000\\044\\005\\124\\064\\000\\000\\219\\003\\000\\000\\\n\\030\\003\\000\\000\\016\\047\\073\\073\\124\\064\\124\\064\\033\\004\\216\\006\\\n\\060\\255\\000\\000\\028\\009\\124\\064\\233\\058\\234\\006\\000\\000\\161\\065\\\n\\059\\002\\105\\060\\166\\060\\227\\060\\124\\064\\000\\000\\016\\047\\160\\067\\\n\\000\\000\\000\\000\\000\\000\\015\\000\\000\\000\\160\\067\\219\\003\\102\\000\\\n\\102\\000\\175\\001\\226\\005\\000\\000\\000\\000\\000\\000\\248\\006\\000\\000\\\n\\000\\000\\016\\047\\000\\000\\033\\004\\033\\004\\106\\000\\106\\000\\102\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\160\\067\\000\\000\\217\\000\\236\\006\\\n\\180\\006\\241\\002\\000\\000\\000\\000\\234\\005\\244\\006\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\110\\000\\122\\005\\000\\000\\019\\002\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\236\\006\\102\\000\\203\\006\\000\\000\\000\\000\\\n\\206\\006\\000\\000\\210\\006\\124\\064\\124\\064\\124\\064\\073\\073\\000\\000\\\n\\211\\006\\000\\000\\217\\006\\000\\000\\223\\006\\199\\005\\000\\000\\000\\000\\\n\\033\\004\\120\\004\\180\\001\\043\\006\\029\\005\\015\\007\\000\\000\\000\\000\\\n\\000\\000\\219\\003\\180\\001\\201\\003\\061\\001\\008\\007\\000\\000\\200\\006\\\n\\219\\003\\000\\000\\000\\000\\087\\001\\000\\000\\000\\000\\074\\255\\000\\000\\\n\\016\\047\\241\\002\\193\\006\\097\\006\\000\\000\\000\\000\\016\\047\\000\\000\\\n\\037\\004\\000\\000\\000\\000\\219\\003\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\124\\064\\124\\064\\124\\064\\000\\000\\000\\000\\\n\\000\\000\\228\\255\\201\\006\\000\\000\\007\\007\\000\\000\\157\\005\\208\\006\\\n\\000\\000\\164\\006\\090\\008\\184\\000\\102\\000\\000\\000\\204\\006\\000\\000\\\n\\000\\000\\124\\064\\000\\000\\188\\065\\016\\047\\124\\064\\209\\006\\215\\006\\\n\\016\\047\\000\\000\\124\\064\\218\\006\\000\\000\\000\\000\\219\\006\\000\\000\\\n\\124\\064\\019\\002\\000\\000\\174\\069\\097\\255\\000\\000\\000\\000\\033\\004\\\n\\000\\000\\000\\000\\000\\000\\124\\064\\124\\064\\126\\006\\046\\001\\000\\000\\\n\\126\\006\\124\\064\\019\\007\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\179\\002\\000\\000\\162\\006\\000\\000\\173\\000\\000\\000\\088\\003\\173\\000\\\n\\000\\000\\227\\006\\184\\006\\180\\001\\000\\000\\000\\000\\019\\002\\219\\003\\\n\\255\\254\\016\\047\\124\\064\\033\\004\\102\\000\\033\\004\\102\\000\\000\\000\\\n\\184\\006\\103\\004\\000\\000\\162\\011\\000\\000\\229\\006\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\108\\002\\000\\000\\000\\000\\\n\\036\\007\\124\\064\\124\\064\\168\\071\\200\\071\\022\\072\\124\\064\\124\\064\\\n\\124\\064\\219\\003\\019\\002\\000\\000\\000\\000\\202\\005\\035\\003\\129\\002\\\n\\211\\002\\000\\000\\000\\000\\219\\003\\229\\006\\211\\002\\016\\047\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\033\\004\\097\\006\\001\\000\\054\\072\\\n\\132\\072\\164\\072\\103\\004\\000\\000\\241\\002\\000\\000\\131\\005\\053\\007\\\n\\000\\000\\000\\000\\000\\000\\055\\007\\000\\000\\204\\006\\102\\000\\048\\007\\\n\\000\\000\\033\\004\\000\\000\\000\\000\\000\\000\\033\\004\\000\\000\\188\\065\\\n\\124\\064\\073\\073\\226\\005\\000\\000\\094\\000\\082\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\049\\007\\016\\047\\239\\006\\000\\000\\124\\064\\\n\\124\\064\\000\\000\\226\\005\\161\\003\\000\\000\\125\\003\\102\\000\\102\\000\\\n\\174\\255\\000\\000\\187\\003\\000\\000\\000\\000\\041\\002\\000\\000\\249\\006\\\n\\222\\069\\229\\045\\000\\000\\222\\003\\021\\007\\070\\007\\000\\000\\000\\000\\\n\\027\\001\\054\\255\\000\\000\\252\\000\\015\\003\\054\\255\\131\\005\\073\\073\\\n\\073\\073\\000\\000\\018\\007\\000\\000\\022\\007\\000\\000\\024\\007\\073\\073\\\n\\073\\073\\073\\073\\180\\001\\226\\005\\170\\005\\170\\005\\046\\005\\000\\000\\\n\\000\\000\\105\\004\\048\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\016\\047\\001\\007\\000\\000\\\n\\033\\004\\000\\000\\234\\005\\000\\000\\254\\002\\062\\048\\000\\000\\000\\000\\\n\\124\\064\\000\\000\\000\\000\\000\\000\\202\\000\\000\\000\\250\\006\\016\\047\\\n\\238\\003\\161\\065\\000\\000\\000\\000\\000\\000\\016\\047\\000\\000\\000\\000\\\n\\233\\006\\229\\006\\157\\005\\235\\006\\164\\006\\157\\005\\027\\001\\000\\000\\\n\\033\\004\\070\\007\\229\\006\\164\\006\\000\\000\\033\\004\\016\\047\\000\\000\\\n\\041\\002\\030\\002\\193\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\254\\006\\000\\000\\234\\005\\124\\064\\124\\064\\124\\064\\007\\003\\007\\003\\\n\\016\\047\\005\\007\\016\\047\\061\\001\\041\\002\\027\\001\\008\\002\\000\\000\\\n\\000\\000\\099\\000\\106\\000\\029\\007\\000\\000\\000\\000\\194\\003\\033\\004\\\n\\079\\007\\219\\003\\000\\000\\000\\000\\065\\004\\124\\064\\000\\000\\033\\004\\\n\\157\\005\\157\\005\\013\\066\\157\\005\\157\\005\\110\\005\\033\\004\\093\\255\\\n\\010\\007\\000\\000\\090\\004\\000\\000\\106\\002\\074\\002\\033\\004\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\073\\073\\073\\073\\073\\073\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\027\\001\\000\\000\\000\\000\\213\\003\\033\\004\\\n\\016\\047\\135\\004\\000\\000\\000\\000\\009\\007\\000\\000\\011\\007\\124\\064\\\n\\000\\000\\092\\007\\093\\007\\060\\017\\000\\000\\094\\007\\097\\007\\124\\064\\\n\\085\\007\\000\\000\\000\\000\\164\\006\\070\\007\\000\\000\\016\\047\\074\\002\\\n\\033\\004\\033\\004\\000\\000\\096\\007\\000\\000\\043\\006\\083\\001\\000\\000\\\n\\000\\000\\037\\002\\033\\004\\000\\000\\000\\000\\062\\048\\062\\048\\126\\006\\\n\\033\\004\\086\\007\\075\\001\\016\\047\\016\\047\\000\\000\\124\\064\\025\\007\\\n\\033\\004\\033\\004\\000\\000\\000\\000\\039\\005\\000\\000\\033\\004\\033\\004\\\n\\033\\004\\033\\004\\102\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\095\\007\\124\\064\\016\\047\\033\\004\\033\\004\\000\\000\\000\\000\\000\\000\\\n\\131\\005\\016\\047\\131\\005\\139\\001\\009\\003\\000\\000\\016\\047\\000\\000\\\n\\033\\004\\033\\004\\102\\000\\234\\005\\006\\007\\032\\007\\157\\005\\227\\005\\\n\\164\\006\\107\\007\\102\\000\\091\\004\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\109\\007\\157\\005\\157\\005\\016\\047\\000\\000\\124\\064\\062\\048\\110\\007\\\n\\111\\007\\033\\004\\000\\000\\102\\000\\016\\047\\016\\047\\000\\000\\033\\004\\\n\\033\\004\"\n\nlet yyrindex = \"\\000\\000\\\n\\126\\008\\127\\008\\000\\000\\000\\000\\000\\000\\000\\000\\106\\069\\000\\000\\\n\\000\\000\\039\\064\\000\\000\\000\\000\\214\\002\\242\\005\\000\\000\\000\\000\\\n\\221\\067\\101\\066\\099\\067\\209\\064\\139\\003\\000\\000\\106\\069\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\248\\067\\193\\017\\000\\000\\\n\\000\\000\\209\\064\\000\\000\\000\\000\\200\\004\\096\\000\\042\\004\\000\\000\\\n\\000\\000\\000\\000\\060\\000\\000\\000\\000\\000\\209\\064\\225\\007\\000\\000\\\n\\000\\000\\242\\005\\209\\064\\000\\000\\000\\000\\021\\043\\103\\016\\000\\000\\\n\\136\\044\\000\\000\\060\\000\\120\\043\\000\\000\\000\\000\\067\\044\\000\\000\\\n\\000\\000\\000\\000\\081\\053\\000\\000\\000\\000\\102\\053\\000\\000\\021\\043\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\035\\025\\221\\027\\058\\024\\\n\\174\\024\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\214\\002\\099\\004\\200\\004\\062\\000\\225\\007\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\218\\012\\000\\000\\000\\000\\111\\053\\146\\053\\000\\000\\062\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\167\\053\\000\\000\\000\\000\\000\\000\\\n\\113\\005\\113\\005\\000\\000\\188\\012\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\240\\016\\000\\000\\\n\\000\\000\\000\\000\\151\\015\\000\\000\\163\\014\\000\\000\\000\\000\\000\\000\\\n\\221\\067\\229\\068\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\014\\049\\000\\000\\000\\000\\\n\\255\\001\\098\\003\\000\\000\\000\\000\\000\\000\\050\\005\\000\\000\\125\\049\\\n\\000\\000\\000\\000\\000\\000\\117\\054\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\221\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\053\\068\\000\\000\\000\\000\\000\\000\\135\\255\\016\\002\\000\\000\\\n\\214\\255\\000\\000\\000\\000\\076\\000\\000\\000\\000\\000\\069\\255\\000\\000\\\n\\040\\004\\000\\000\\215\\255\\131\\000\\000\\000\\245\\005\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\038\\007\\004\\054\\038\\007\\214\\002\\026\\007\\042\\004\\080\\068\\\n\\000\\000\\000\\000\\000\\000\\012\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\028\\056\\114\\056\\139\\003\\000\\000\\000\\000\\200\\056\\030\\057\\000\\000\\\n\\014\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\038\\007\\000\\000\\\n\\217\\003\\000\\000\\008\\003\\000\\000\\026\\007\\000\\000\\000\\000\\000\\000\\\n\\084\\006\\000\\000\\000\\000\\000\\000\\000\\000\\060\\000\\132\\050\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\057\\034\\000\\000\\000\\000\\\n\\248\\067\\000\\000\\120\\043\\141\\068\\000\\000\\000\\000\\041\\005\\000\\000\\\n\\031\\007\\000\\000\\055\\002\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\221\\022\\151\\025\\\n\\000\\000\\000\\000\\000\\000\\011\\026\\128\\026\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\084\\006\\000\\000\\000\\000\\000\\000\\031\\007\\\n\\000\\000\\000\\000\\000\\000\\125\\001\\000\\000\\120\\007\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\147\\255\\000\\000\\098\\007\\\n\\000\\000\\102\\007\\116\\007\\000\\000\\000\\000\\099\\004\\180\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\045\\000\\000\\000\\194\\000\\090\\000\\\n\\131\\000\\000\\000\\245\\005\\000\\000\\066\\000\\000\\000\\026\\007\\101\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\038\\007\\117\\054\\000\\000\\155\\048\\244\\026\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\164\\005\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\068\\017\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\066\\007\\000\\000\\198\\055\\021\\043\\207\\002\\000\\000\\000\\000\\\n\\104\\027\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\085\\255\\000\\000\\\n\\000\\000\\196\\000\\000\\000\\000\\000\\000\\000\\069\\004\\000\\000\\162\\000\\\n\\000\\000\\000\\000\\041\\007\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\026\\007\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\001\\003\\000\\000\\000\\000\\038\\007\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\096\\037\\205\\037\\053\\038\\174\\034\\208\\039\\157\\038\\034\\035\\150\\035\\\n\\011\\036\\127\\036\\127\\031\\081\\028\\197\\028\\243\\036\\244\\031\\104\\032\\\n\\005\\039\\058\\029\\220\\032\\081\\033\\197\\033\\000\\000\\000\\000\\174\\029\\\n\\000\\000\\000\\000\\111\\003\\000\\000\\164\\005\\051\\040\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\082\\018\\000\\000\\000\\000\\000\\000\\081\\023\\198\\023\\\n\\000\\000\\000\\000\\000\\000\\105\\022\\000\\000\\000\\000\\109\\039\\025\\053\\\n\\066\\007\\000\\000\\000\\000\\005\\004\\030\\006\\146\\053\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\001\\003\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\193\\049\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\246\\044\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\205\\045\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\048\\046\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\102\\255\\\n\\000\\000\\000\\000\\222\\000\\162\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\202\\006\\000\\000\\093\\005\\\n\\000\\000\\225\\003\\000\\000\\000\\000\\000\\000\\165\\005\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\090\\007\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\238\\039\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\034\\030\\000\\000\\000\\000\\000\\000\\038\\065\\000\\000\\\n\\169\\004\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\025\\001\\000\\000\\\n\\000\\000\\084\\255\\000\\000\\169\\255\\000\\000\\000\\000\\185\\255\\000\\000\\\n\\097\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\064\\007\\065\\007\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\136\\003\\000\\000\\000\\000\\213\\005\\\n\\182\\004\\000\\000\\074\\006\\000\\000\\191\\002\\105\\000\\139\\000\\143\\000\\\n\\000\\000\\000\\000\\000\\000\\053\\068\\185\\040\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\021\\043\\000\\000\\000\\000\\000\\000\\234\\004\\021\\043\\\n\\053\\068\\228\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\131\\000\\000\\000\\\n\\245\\005\\000\\000\\139\\003\\000\\000\\000\\000\\000\\000\\213\\005\\000\\000\\\n\\000\\000\\090\\007\\000\\000\\198\\006\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\018\\005\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\146\\053\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\047\\002\\000\\000\\000\\000\\\n\\087\\255\\000\\000\\210\\000\\000\\000\\000\\000\\147\\046\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\197\\000\\000\\000\\229\\000\\\n\\000\\000\\125\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\068\\007\\000\\000\\000\\000\\099\\007\\\n\\012\\050\\000\\000\\074\\050\\000\\000\\000\\000\\040\\011\\238\\039\\000\\000\\\n\\021\\043\\000\\000\\000\\000\\174\\001\\000\\000\\049\\255\\073\\007\\073\\007\\\n\\068\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\089\\045\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\136\\255\\000\\000\\000\\000\\122\\007\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\021\\043\\\n\\028\\041\\000\\000\\127\\010\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\050\\038\\065\\139\\004\\225\\002\\136\\004\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\163\\051\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\021\\043\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\016\\052\\028\\041\\000\\000\\000\\000\\000\\000\\\n\\198\\018\\000\\000\\058\\019\\000\\000\\000\\000\\000\\000\\155\\040\\000\\000\\\n\\175\\019\\000\\000\\035\\020\\000\\000\\151\\020\\000\\000\\000\\000\\000\\000\\\n\\235\\003\\000\\000\\162\\050\\000\\000\\001\\003\\240\\047\\000\\000\\119\\007\\\n\\000\\000\\000\\000\\191\\047\\146\\053\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\125\\001\\000\\000\\000\\000\\000\\000\\190\\057\\\n\\000\\000\\000\\000\\128\\007\\248\\046\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\186\\255\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\048\\004\\000\\000\\000\\000\\021\\043\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\220\\255\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\012\\005\\000\\000\\125\\004\\000\\000\\203\\004\\000\\000\\000\\000\\062\\005\\\n\\000\\000\\000\\000\\151\\030\\231\\041\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\120\\003\\136\\004\\058\\003\\136\\004\\000\\000\\\n\\011\\031\\228\\001\\000\\000\\115\\007\\000\\000\\063\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\069\\007\\000\\000\\000\\000\\000\\000\\063\\001\\069\\007\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\229\\015\\091\\047\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\068\\007\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\074\\042\\021\\043\\000\\000\\\n\\000\\000\\103\\001\\000\\000\\000\\000\\000\\000\\148\\001\\000\\000\\000\\000\\\n\\000\\000\\254\\040\\175\\008\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\026\\012\\000\\000\\000\\000\\000\\000\\136\\004\\136\\004\\\n\\108\\007\\000\\000\\099\\007\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\118\\007\\199\\049\\069\\052\\000\\000\\122\\052\\000\\000\\000\\000\\\n\\249\\050\\028\\041\\000\\000\\000\\000\\000\\000\\028\\041\\000\\000\\097\\041\\\n\\201\\041\\000\\000\\012\\021\\000\\000\\128\\021\\000\\000\\244\\021\\044\\042\\\n\\143\\042\\247\\042\\049\\051\\042\\048\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\044\\001\\000\\000\\028\\041\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\112\\007\\108\\007\\000\\000\\114\\007\\099\\007\\000\\000\\249\\050\\000\\000\\\n\\178\\052\\208\\052\\056\\006\\099\\007\\000\\000\\219\\051\\000\\000\\000\\000\\\n\\000\\000\\092\\052\\021\\043\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\028\\041\\000\\000\\000\\000\\000\\000\\106\\010\\238\\013\\\n\\000\\000\\156\\050\\000\\000\\000\\000\\000\\000\\028\\015\\146\\053\\000\\000\\\n\\000\\000\\000\\000\\115\\003\\193\\002\\000\\000\\000\\000\\000\\000\\249\\004\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\111\\002\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\219\\051\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\092\\052\\000\\000\\139\\039\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\090\\043\\189\\043\\037\\044\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\028\\015\\000\\000\\000\\000\\000\\000\\031\\007\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\082\\007\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\099\\007\\008\\053\\000\\000\\000\\000\\000\\000\\\n\\139\\039\\139\\039\\000\\000\\241\\015\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\019\\005\\161\\004\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\168\\255\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\181\\047\\139\\039\\000\\000\\000\\000\\000\\000\\000\\000\\000\\050\\021\\006\\\n\\120\\003\\058\\003\\243\\004\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\032\\002\\071\\003\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\117\\007\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\182\\002\\105\\051\\243\\004\\243\\004\\125\\007\\126\\007\\000\\000\\130\\007\\\n\\099\\007\\000\\000\\243\\004\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\196\\003\\000\\000\\243\\004\\000\\000\\000\\000\\000\\000\\212\\003\\\n\\237\\004\"\n\nlet yygindex = \"\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\020\\000\\183\\255\\037\\000\\\n\\168\\000\\184\\005\\119\\253\\000\\000\\166\\254\\147\\005\\096\\255\\145\\008\\\n\\232\\012\\061\\254\\077\\005\\253\\255\\063\\014\\144\\252\\036\\003\\247\\255\\\n\\000\\000\\046\\000\\016\\000\\021\\000\\027\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\030\\000\\035\\000\\040\\000\\000\\000\\255\\255\\003\\000\\093\\009\\\n\\084\\002\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\041\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\010\\255\\059\\252\\000\\000\\000\\000\\\n\\000\\000\\004\\000\\148\\005\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\010\\003\\056\\000\\112\\251\\081\\255\\136\\253\\214\\251\\\n\\048\\253\\185\\252\\087\\251\\199\\003\\087\\003\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\211\\253\\000\\000\\000\\000\\000\\000\\042\\000\\\n\\082\\255\\014\\006\\085\\005\\100\\005\\000\\000\\000\\000\\083\\255\\048\\000\\\n\\000\\000\\000\\000\\170\\255\\035\\002\\103\\253\\160\\006\\187\\010\\173\\011\\\n\\000\\000\\000\\000\\000\\000\\131\\255\\000\\000\\006\\013\\182\\006\\006\\000\\\n\\104\\255\\048\\003\\121\\007\\000\\000\\124\\007\\165\\006\\244\\010\\176\\253\\\n\\000\\000\\218\\000\\000\\000\\000\\000\\000\\000\\198\\003\\090\\005\\152\\255\\\n\\254\\004\\000\\000\\000\\000\\000\\000\\000\\000\\227\\000\\000\\000\\034\\007\\\n\\145\\255\\042\\007\\081\\006\\083\\008\\000\\000\\000\\000\\060\\004\\000\\000\\\n\\000\\000\\129\\007\\233\\253\\016\\005\\193\\251\\101\\251\\000\\252\\028\\253\\\n\\000\\000\\204\\252\\000\\000\\074\\004\\000\\000\\000\\000\\119\\251\\088\\255\\\n\\101\\253\\062\\006\\091\\007\\000\\000\\000\\000\\232\\003\\000\\000\\000\\000\\\n\\253\\003\\243\\252\\000\\000\\200\\003\\108\\004\\000\\000\\179\\253\\135\\002\\\n\\155\\255\\000\\000\\000\\000\\192\\005\\147\\254\\157\\255\\199\\254\\151\\255\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\055\\255\\000\\000\"\n\nlet yytablesize = 19255\nlet yytable = \"\\126\\000\\\n\\102\\000\\151\\000\\212\\001\\213\\001\\103\\000\\203\\001\\119\\001\\117\\001\\\n\\251\\001\\230\\001\\128\\001\\168\\000\\118\\001\\157\\001\\086\\002\\026\\003\\\n\\137\\001\\096\\000\\107\\001\\221\\001\\053\\000\\151\\001\\097\\000\\061\\003\\\n\\151\\003\\203\\002\\063\\003\\123\\001\\098\\000\\190\\003\\111\\001\\099\\000\\\n\\198\\000\\162\\004\\176\\001\\024\\003\\100\\000\\143\\001\\126\\003\\125\\000\\\n\\123\\002\\101\\000\\106\\000\\241\\001\\043\\004\\242\\001\\060\\000\\139\\004\\\n\\027\\004\\074\\001\\248\\001\\090\\004\\052\\004\\051\\005\\034\\005\\222\\004\\\n\\169\\000\\120\\001\\030\\005\\034\\000\\131\\003\\071\\000\\039\\001\\102\\002\\\n\\162\\000\\103\\002\\181\\001\\220\\004\\084\\001\\050\\003\\252\\000\\184\\000\\\n\\039\\005\\143\\003\\031\\001\\171\\000\\037\\005\\194\\003\\001\\004\\008\\000\\\n\\191\\000\\206\\002\\162\\000\\087\\001\\080\\001\\009\\002\\023\\002\\173\\000\\\n\\060\\000\\038\\001\\102\\000\\216\\001\\197\\004\\231\\003\\103\\000\\098\\002\\\n\\184\\004\\195\\003\\243\\004\\069\\004\\206\\004\\161\\001\\102\\000\\023\\002\\\n\\075\\001\\084\\001\\103\\000\\096\\000\\126\\000\\006\\002\\087\\001\\126\\000\\\n\\097\\000\\126\\000\\126\\000\\131\\005\\232\\001\\218\\002\\098\\000\\096\\000\\\n\\045\\001\\099\\000\\198\\001\\139\\001\\097\\000\\095\\001\\100\\000\\100\\001\\\n\\101\\001\\007\\002\\098\\000\\101\\000\\106\\000\\099\\000\\113\\002\\079\\001\\\n\\151\\000\\151\\000\\100\\000\\151\\000\\085\\005\\122\\001\\171\\000\\101\\000\\\n\\106\\000\\132\\005\\002\\004\\199\\001\\113\\002\\151\\000\\151\\000\\113\\002\\\n\\037\\005\\135\\001\\131\\003\\151\\004\\064\\002\\200\\001\\027\\005\\162\\000\\\n\\052\\002\\113\\002\\162\\000\\084\\001\\208\\003\\145\\004\\040\\003\\245\\001\\\n\\089\\001\\040\\003\\127\\001\\135\\000\\151\\000\\151\\000\\087\\001\\080\\001\\\n\\224\\001\\087\\001\\087\\001\\080\\001\\023\\002\\064\\002\\115\\002\\004\\002\\\n\\083\\001\\160\\001\\245\\001\\232\\003\\133\\005\\218\\003\\185\\004\\080\\002\\\n\\161\\001\\245\\001\\245\\001\\089\\001\\161\\001\\228\\001\\201\\001\\029\\001\\\n\\229\\001\\202\\001\\129\\002\\188\\001\\189\\001\\040\\004\\052\\003\\233\\001\\\n\\041\\003\\219\\002\\115\\002\\041\\003\\192\\001\\093\\001\\085\\001\\245\\001\\\n\\245\\001\\067\\002\\052\\003\\088\\001\\200\\003\\083\\001\\014\\004\\008\\002\\\n\\085\\001\\113\\002\\079\\001\\245\\001\\225\\001\\113\\002\\079\\001\\064\\002\\\n\\064\\002\\251\\003\\245\\001\\245\\001\\125\\002\\245\\001\\076\\001\\082\\005\\\n\\093\\001\\115\\002\\152\\002\\115\\002\\218\\003\\155\\004\\088\\001\\037\\003\\\n\\088\\005\\064\\002\\059\\004\\037\\005\\012\\002\\044\\001\\188\\004\\115\\002\\\n\\247\\004\\190\\004\\209\\003\\089\\001\\203\\002\\025\\005\\089\\001\\089\\001\\\n\\133\\002\\117\\001\\134\\002\\082\\002\\013\\002\\069\\002\\089\\004\\194\\005\\\n\\117\\001\\196\\005\\117\\001\\077\\001\\160\\001\\005\\002\\245\\001\\083\\001\\\n\\160\\001\\128\\001\\128\\001\\219\\003\\083\\003\\026\\005\\041\\004\\109\\002\\\n\\162\\000\\178\\002\\053\\003\\089\\005\\024\\002\\130\\002\\213\\001\\060\\000\\\n\\116\\002\\060\\000\\140\\005\\056\\003\\163\\001\\203\\002\\059\\003\\122\\002\\\n\\093\\001\\081\\002\\085\\001\\169\\000\\093\\001\\085\\001\\088\\001\\201\\003\\\n\\211\\005\\088\\001\\088\\001\\034\\000\\015\\004\\071\\000\\216\\003\\156\\004\\\n\\052\\003\\085\\002\\071\\004\\158\\002\\013\\005\\015\\005\\177\\001\\163\\001\\\n\\038\\001\\028\\000\\178\\001\\076\\001\\060\\000\\010\\003\\166\\000\\082\\002\\\n\\229\\002\\179\\001\\019\\004\\196\\001\\180\\001\\034\\000\\023\\002\\071\\000\\\n\\083\\002\\154\\001\\217\\001\\075\\001\\059\\004\\012\\005\\248\\004\\216\\002\\\n\\086\\001\\216\\003\\241\\002\\154\\001\\083\\002\\087\\002\\027\\004\\162\\000\\\n\\079\\002\\082\\002\\086\\001\\069\\002\\049\\005\\114\\004\\126\\000\\177\\001\\\n\\036\\001\\084\\002\\216\\002\\178\\001\\111\\002\\126\\000\\107\\005\\126\\000\\\n\\084\\003\\216\\002\\179\\001\\085\\002\\155\\001\\180\\001\\126\\000\\126\\000\\\n\\081\\002\\126\\000\\150\\002\\160\\005\\162\\000\\205\\004\\155\\001\\163\\001\\\n\\071\\002\\072\\002\\077\\002\\163\\001\\076\\002\\126\\000\\075\\002\\095\\001\\\n\\216\\002\\126\\000\\169\\004\\075\\002\\253\\003\\151\\000\\151\\000\\034\\000\\\n\\028\\000\\071\\000\\217\\003\\216\\002\\170\\003\\166\\000\\082\\002\\085\\002\\\n\\216\\002\\151\\002\\012\\004\\216\\002\\002\\002\\216\\002\\076\\001\\083\\002\\\n\\045\\001\\222\\002\\039\\004\\151\\002\\167\\002\\162\\000\\151\\000\\151\\000\\\n\\151\\000\\139\\003\\048\\001\\147\\004\\127\\003\\154\\001\\151\\000\\006\\001\\\n\\154\\001\\127\\001\\127\\001\\179\\001\\086\\001\\018\\004\\111\\005\\086\\001\\\n\\084\\002\\156\\001\\083\\002\\078\\001\\162\\001\\038\\002\\079\\002\\010\\002\\\n\\111\\002\\115\\004\\085\\002\\151\\000\\151\\000\\235\\003\\216\\002\\018\\002\\\n\\151\\000\\162\\000\\022\\002\\243\\003\\151\\000\\135\\004\\006\\004\\224\\001\\\n\\155\\001\\119\\005\\074\\005\\155\\001\\128\\003\\157\\001\\150\\002\\162\\001\\\n\\126\\000\\126\\000\\162\\000\\039\\002\\150\\002\\065\\005\\162\\000\\203\\002\\\n\\077\\002\\060\\000\\106\\004\\148\\001\\075\\002\\055\\002\\162\\000\\126\\000\\\n\\151\\000\\102\\000\\013\\004\\079\\001\\058\\002\\103\\000\\177\\001\\163\\000\\\n\\164\\004\\151\\000\\178\\001\\168\\002\\117\\001\\151\\002\\122\\001\\224\\001\\\n\\069\\003\\179\\001\\096\\000\\041\\005\\180\\001\\157\\001\\026\\001\\097\\000\\\n\\210\\002\\212\\002\\151\\000\\070\\003\\071\\003\\098\\000\\245\\002\\038\\002\\\n\\099\\000\\038\\002\\213\\001\\081\\001\\112\\002\\100\\000\\010\\005\\223\\002\\\n\\114\\002\\198\\001\\101\\000\\106\\000\\156\\001\\097\\005\\078\\001\\162\\001\\\n\\226\\002\\015\\003\\017\\003\\162\\001\\031\\002\\191\\000\\114\\002\\075\\005\\\n\\169\\002\\114\\002\\074\\003\\214\\002\\061\\004\\039\\002\\064\\002\\039\\002\\\n\\149\\001\\110\\005\\199\\001\\114\\002\\162\\000\\151\\000\\107\\004\\061\\005\\\n\\157\\001\\150\\003\\152\\003\\213\\001\\200\\001\\038\\005\\214\\002\\150\\002\\\n\\105\\003\\245\\004\\116\\002\\189\\004\\060\\000\\214\\002\\135\\002\\064\\002\\\n\\134\\003\\184\\000\\046\\003\\136\\002\\092\\004\\198\\001\\087\\002\\177\\001\\\n\\177\\005\\014\\005\\191\\000\\178\\001\\123\\003\\147\\001\\135\\003\\171\\003\\\n\\143\\004\\137\\002\\179\\001\\214\\002\\135\\002\\180\\001\\168\\000\\022\\003\\\n\\179\\005\\126\\000\\052\\003\\141\\003\\126\\000\\201\\001\\199\\001\\214\\002\\\n\\202\\001\\171\\002\\087\\002\\126\\000\\214\\002\\126\\000\\126\\000\\214\\002\\\n\\200\\001\\214\\002\\075\\003\\114\\002\\081\\002\\135\\002\\204\\003\\114\\002\\\n\\205\\003\\064\\002\\064\\002\\126\\000\\076\\003\\048\\003\\057\\003\\135\\002\\\n\\151\\000\\172\\000\\125\\005\\087\\005\\214\\002\\126\\000\\193\\001\\002\\004\\\n\\146\\001\\202\\002\\162\\000\\064\\002\\028\\000\\162\\000\\095\\005\\151\\000\\\n\\151\\000\\166\\000\\082\\002\\067\\003\\162\\000\\193\\003\\086\\001\\137\\004\\\n\\162\\000\\201\\001\\214\\002\\083\\002\\202\\001\\166\\003\\142\\004\\194\\001\\\n\\214\\002\\168\\000\\095\\005\\078\\003\\002\\004\\126\\000\\136\\003\\126\\000\\\n\\135\\002\\138\\002\\189\\003\\135\\002\\126\\000\\089\\003\\169\\000\\218\\001\\\n\\166\\005\\151\\000\\203\\002\\147\\001\\084\\002\\061\\003\\058\\003\\011\\004\\\n\\063\\003\\126\\000\\151\\000\\180\\003\\151\\000\\218\\001\\085\\002\\100\\004\\\n\\102\\004\\137\\005\\248\\003\\046\\001\\172\\000\\021\\003\\224\\001\\028\\000\\\n\\041\\005\\095\\004\\250\\003\\117\\001\\162\\002\\219\\001\\012\\000\\016\\004\\\n\\137\\005\\092\\001\\093\\001\\177\\001\\114\\003\\028\\000\\235\\002\\178\\001\\\n\\214\\002\\181\\003\\004\\004\\219\\001\\137\\003\\234\\003\\179\\001\\236\\002\\\n\\008\\004\\180\\001\\197\\005\\227\\001\\029\\000\\151\\000\\163\\002\\029\\003\\\n\\030\\003\\152\\003\\213\\001\\104\\005\\033\\000\\106\\005\\182\\003\\203\\002\\\n\\162\\000\\169\\000\\220\\001\\087\\002\\224\\001\\162\\000\\060\\000\\040\\003\\\n\\138\\003\\048\\000\\198\\005\\199\\002\\040\\003\\184\\003\\122\\001\\203\\002\\\n\\220\\001\\185\\003\\122\\001\\045\\001\\126\\000\\196\\004\\122\\001\\048\\000\\\n\\122\\001\\199\\002\\177\\001\\046\\001\\122\\001\\122\\001\\178\\001\\061\\005\\\n\\122\\001\\162\\000\\235\\002\\216\\002\\178\\003\\179\\001\\169\\002\\183\\003\\\n\\180\\001\\122\\001\\083\\001\\236\\002\\216\\002\\175\\005\\176\\005\\116\\000\\\n\\099\\001\\041\\003\\164\\003\\170\\002\\087\\002\\102\\000\\041\\003\\235\\004\\\n\\203\\002\\103\\000\\087\\002\\169\\002\\197\\003\\017\\004\\118\\004\\241\\003\\\n\\126\\000\\242\\004\\116\\000\\126\\000\\139\\002\\218\\001\\096\\000\\167\\005\\\n\\094\\005\\116\\000\\078\\004\\097\\000\\126\\000\\194\\001\\106\\002\\000\\004\\\n\\122\\001\\098\\000\\095\\003\\096\\003\\099\\000\\126\\000\\198\\001\\122\\001\\\n\\162\\000\\100\\000\\045\\001\\151\\000\\216\\002\\028\\000\\101\\000\\106\\000\\\n\\116\\000\\194\\001\\214\\002\\219\\001\\168\\005\\202\\002\\162\\000\\171\\002\\\n\\115\\003\\122\\001\\122\\001\\116\\000\\122\\001\\122\\001\\220\\005\\199\\001\\\n\\029\\000\\123\\001\\116\\000\\116\\000\\179\\003\\116\\000\\125\\003\\015\\000\\\n\\033\\000\\200\\001\\169\\005\\085\\001\\171\\002\\214\\002\\147\\000\\122\\001\\\n\\106\\002\\106\\002\\165\\003\\112\\001\\142\\000\\204\\001\\214\\002\\169\\002\\\n\\220\\001\\221\\004\\142\\000\\204\\001\\115\\001\\151\\000\\213\\001\\048\\000\\\n\\108\\003\\147\\000\\106\\002\\087\\002\\136\\005\\060\\001\\061\\001\\126\\000\\\n\\147\\000\\110\\001\\109\\003\\109\\001\\193\\001\\214\\002\\116\\000\\126\\000\\\n\\044\\003\\151\\000\\201\\001\\170\\005\\151\\000\\202\\001\\151\\000\\151\\000\\\n\\151\\000\\179\\004\\210\\005\\126\\000\\151\\000\\150\\001\\147\\000\\147\\000\\\n\\087\\002\\150\\004\\151\\000\\087\\002\\236\\001\\194\\001\\214\\002\\180\\002\\\n\\181\\002\\122\\000\\147\\000\\066\\001\\202\\002\\162\\000\\126\\000\\064\\004\\\n\\198\\003\\147\\000\\147\\000\\045\\000\\147\\000\\246\\001\\048\\000\\210\\002\\\n\\171\\002\\151\\000\\163\\004\\147\\001\\071\\001\\210\\003\\195\\004\\247\\002\\\n\\134\\000\\179\\001\\106\\001\\087\\004\\180\\001\\111\\004\\106\\001\\046\\003\\\n\\246\\001\\237\\001\\236\\003\\224\\001\\248\\002\\106\\001\\012\\000\\246\\001\\\n\\246\\001\\012\\000\\189\\000\\134\\000\\047\\003\\182\\002\\097\\004\\092\\001\\\n\\093\\001\\106\\001\\134\\000\\012\\000\\012\\000\\147\\000\\115\\001\\012\\000\\\n\\149\\001\\228\\001\\236\\004\\120\\001\\229\\001\\246\\001\\246\\001\\253\\002\\\n\\012\\000\\012\\000\\012\\000\\012\\000\\237\\003\\190\\000\\087\\002\\090\\002\\\n\\134\\000\\246\\001\\249\\002\\216\\002\\162\\000\\087\\002\\012\\000\\012\\000\\\n\\246\\001\\246\\001\\048\\003\\246\\001\\134\\000\\126\\000\\202\\003\\068\\003\\\n\\106\\001\\254\\002\\213\\001\\126\\000\\134\\000\\148\\004\\134\\000\\107\\000\\\n\\087\\002\\239\\004\\012\\000\\122\\000\\216\\002\\012\\000\\048\\005\\012\\000\\\n\\012\\000\\012\\000\\012\\000\\071\\005\\162\\000\\045\\001\\216\\002\\012\\000\\\n\\012\\000\\120\\002\\107\\000\\040\\003\\074\\003\\062\\004\\012\\000\\126\\000\\\n\\146\\002\\107\\000\\146\\002\\203\\003\\246\\001\\031\\005\\054\\004\\055\\004\\\n\\151\\000\\126\\000\\012\\000\\146\\002\\012\\000\\126\\000\\012\\000\\134\\000\\\n\\166\\000\\081\\002\\220\\002\\042\\005\\065\\004\\066\\004\\224\\001\\063\\004\\\n\\107\\000\\133\\001\\012\\000\\072\\004\\221\\002\\012\\000\\147\\001\\216\\002\\\n\\185\\001\\012\\000\\216\\002\\107\\000\\086\\004\\041\\003\\117\\000\\147\\001\\\n\\190\\000\\028\\000\\062\\005\\107\\000\\112\\005\\107\\000\\166\\000\\082\\002\\\n\\146\\002\\170\\004\\025\\002\\200\\005\\140\\001\\174\\004\\160\\004\\011\\000\\\n\\083\\002\\117\\000\\194\\001\\224\\001\\087\\002\\167\\000\\126\\000\\253\\000\\\n\\117\\000\\123\\001\\155\\001\\152\\003\\213\\001\\123\\001\\119\\001\\117\\001\\\n\\126\\000\\123\\001\\016\\000\\123\\001\\118\\001\\185\\001\\194\\001\\123\\001\\\n\\123\\001\\084\\002\\193\\004\\123\\001\\155\\001\\138\\001\\107\\000\\117\\000\\\n\\201\\005\\214\\002\\145\\001\\085\\002\\123\\001\\022\\000\\087\\002\\224\\001\\\n\\093\\005\\048\\000\\117\\000\\177\\001\\214\\002\\162\\000\\198\\004\\178\\001\\\n\\087\\002\\117\\000\\117\\000\\126\\000\\117\\000\\254\\000\\179\\001\\048\\000\\\n\\212\\004\\180\\001\\144\\000\\255\\000\\108\\005\\115\\001\\163\\000\\022\\005\\\n\\012\\003\\162\\000\\177\\002\\002\\005\\063\\002\\118\\002\\064\\002\\145\\000\\\n\\253\\004\\048\\000\\080\\003\\123\\001\\152\\003\\213\\001\\129\\005\\155\\001\\\n\\065\\002\\214\\002\\123\\001\\172\\003\\151\\000\\216\\002\\185\\001\\209\\001\\\n\\044\\000\\087\\002\\087\\002\\190\\000\\146\\002\\117\\000\\072\\003\\214\\002\\\n\\077\\003\\126\\000\\173\\003\\174\\003\\123\\001\\123\\001\\162\\000\\123\\001\\\n\\123\\001\\162\\000\\122\\000\\145\\000\\139\\000\\216\\002\\147\\002\\141\\000\\\n\\194\\001\\209\\001\\119\\002\\216\\002\\126\\000\\126\\000\\126\\000\\214\\002\\\n\\148\\002\\168\\004\\123\\001\\144\\000\\048\\000\\171\\004\\145\\000\\152\\001\\\n\\090\\002\\087\\002\\175\\004\\002\\005\\194\\001\\145\\000\\206\\002\\146\\002\\\n\\149\\001\\017\\005\\162\\000\\031\\002\\149\\001\\031\\002\\144\\000\\214\\002\\\n\\149\\001\\040\\003\\149\\001\\186\\004\\187\\004\\144\\000\\149\\001\\192\\003\\\n\\216\\002\\191\\004\\149\\001\\145\\000\\090\\002\\135\\001\\033\\005\\216\\002\\\n\\035\\005\\166\\000\\126\\000\\149\\001\\031\\002\\081\\002\\021\\005\\145\\000\\\n\\205\\002\\116\\005\\126\\000\\144\\000\\214\\002\\028\\005\\145\\000\\145\\000\\\n\\078\\005\\145\\000\\200\\004\\045\\001\\126\\000\\214\\002\\151\\000\\144\\000\\\n\\183\\001\\214\\002\\126\\000\\041\\003\\000\\005\\028\\000\\144\\000\\144\\000\\\n\\216\\002\\144\\000\\166\\000\\082\\002\\122\\000\\216\\002\\214\\002\\162\\000\\\n\\214\\002\\214\\002\\135\\001\\126\\000\\083\\002\\214\\002\\240\\003\\150\\002\\\n\\177\\001\\149\\001\\029\\005\\214\\002\\178\\001\\214\\002\\119\\002\\162\\000\\\n\\172\\001\\096\\001\\145\\000\\179\\001\\077\\005\\126\\000\\180\\001\\126\\000\\\n\\186\\001\\144\\003\\080\\005\\149\\001\\149\\001\\084\\002\\149\\001\\149\\001\\\n\\214\\002\\122\\000\\144\\000\\173\\001\\151\\002\\216\\002\\087\\002\\085\\002\\\n\\214\\002\\169\\003\\013\\003\\091\\005\\150\\002\\176\\003\\214\\002\\151\\000\\\n\\214\\002\\149\\001\\083\\005\\164\\000\\214\\002\\086\\005\\164\\000\\214\\002\\\n\\011\\005\\164\\000\\164\\000\\120\\005\\097\\001\\164\\000\\164\\000\\164\\000\\\n\\164\\000\\164\\000\\162\\000\\164\\000\\214\\002\\162\\000\\162\\000\\019\\005\\\n\\020\\005\\151\\002\\164\\000\\146\\002\\213\\003\\126\\000\\164\\000\\137\\002\\\n\\214\\002\\164\\000\\164\\000\\214\\002\\135\\005\\214\\005\\163\\000\\132\\004\\\n\\126\\000\\043\\003\\164\\000\\164\\000\\146\\002\\090\\002\\164\\000\\164\\000\\\n\\107\\001\\187\\001\\162\\000\\126\\000\\107\\001\\216\\002\\144\\005\\212\\002\\\n\\122\\005\\123\\005\\216\\001\\126\\005\\127\\005\\162\\000\\107\\001\\033\\001\\\n\\171\\005\\133\\004\\126\\000\\126\\000\\172\\005\\143\\005\\146\\002\\107\\001\\\n\\126\\000\\126\\000\\212\\002\\162\\000\\162\\000\\216\\002\\163\\000\\174\\001\\\n\\145\\005\\212\\002\\216\\002\\216\\002\\148\\001\\164\\000\\164\\000\\164\\000\\\n\\034\\000\\164\\000\\162\\000\\161\\005\\216\\002\\003\\003\\090\\002\\126\\000\\\n\\073\\005\\040\\003\\175\\001\\008\\000\\090\\002\\002\\005\\126\\000\\002\\005\\\n\\212\\002\\117\\001\\004\\003\\126\\000\\149\\000\\117\\001\\107\\001\\126\\002\\\n\\180\\005\\181\\005\\034\\000\\212\\002\\117\\001\\216\\002\\060\\005\\117\\001\\\n\\091\\004\\144\\001\\146\\002\\212\\002\\146\\002\\212\\002\\152\\001\\216\\002\\\n\\126\\000\\226\\001\\152\\001\\126\\000\\212\\002\\164\\000\\164\\000\\193\\005\\\n\\031\\003\\126\\000\\126\\000\\041\\003\\152\\001\\234\\001\\198\\004\\105\\004\\\n\\214\\002\\182\\001\\146\\002\\204\\005\\112\\000\\152\\001\\113\\000\\114\\000\\\n\\028\\000\\104\\000\\115\\000\\214\\002\\143\\000\\115\\001\\117\\000\\193\\001\\\n\\191\\001\\063\\002\\212\\002\\155\\002\\202\\005\\121\\005\\212\\002\\117\\001\\\n\\218\\005\\164\\000\\146\\002\\214\\002\\155\\001\\156\\002\\209\\005\\143\\000\\\n\\126\\002\\224\\005\\225\\005\\104\\000\\091\\002\\212\\002\\143\\000\\120\\000\\\n\\194\\001\\216\\005\\217\\005\\146\\003\\212\\002\\090\\002\\121\\000\\109\\001\\\n\\235\\001\\071\\000\\132\\000\\109\\001\\092\\002\\026\\002\\027\\002\\028\\002\\\n\\029\\002\\097\\003\\122\\000\\123\\000\\143\\000\\062\\002\\177\\003\\149\\005\\\n\\142\\000\\030\\002\\212\\002\\187\\003\\216\\002\\215\\003\\109\\001\\158\\005\\\n\\143\\000\\048\\000\\090\\002\\071\\000\\132\\000\\090\\002\\212\\002\\143\\000\\\n\\143\\000\\096\\001\\143\\000\\245\\003\\216\\002\\096\\001\\212\\002\\099\\001\\\n\\212\\002\\096\\001\\211\\003\\096\\001\\206\\002\\057\\005\\238\\001\\096\\001\\\n\\096\\001\\151\\001\\246\\003\\160\\001\\160\\001\\151\\001\\182\\005\\153\\003\\\n\\058\\005\\164\\000\\164\\000\\154\\003\\096\\001\\031\\002\\185\\005\\151\\001\\\n\\184\\001\\185\\001\\155\\003\\214\\002\\247\\003\\156\\003\\240\\001\\214\\002\\\n\\151\\001\\192\\005\\188\\003\\143\\000\\097\\001\\002\\003\\157\\003\\164\\000\\\n\\097\\001\\212\\002\\120\\001\\003\\003\\097\\001\\247\\001\\097\\001\\206\\001\\\n\\214\\002\\214\\002\\097\\001\\085\\003\\249\\002\\164\\000\\097\\001\\214\\002\\\n\\004\\003\\164\\000\\252\\001\\096\\001\\058\\004\\086\\003\\148\\002\\097\\001\\\n\\090\\002\\116\\004\\096\\001\\228\\001\\214\\002\\219\\005\\229\\001\\090\\002\\\n\\177\\001\\254\\001\\214\\002\\117\\004\\178\\001\\162\\000\\014\\002\\255\\001\\\n\\128\\005\\000\\002\\045\\004\\179\\001\\096\\001\\096\\001\\180\\001\\096\\001\\\n\\096\\001\\019\\002\\090\\002\\001\\002\\038\\004\\164\\000\\214\\002\\068\\002\\\n\\191\\001\\069\\002\\159\\002\\191\\001\\160\\002\\191\\001\\097\\001\\191\\001\\\n\\142\\000\\204\\001\\096\\001\\070\\002\\148\\001\\097\\001\\161\\002\\148\\002\\\n\\148\\001\\148\\002\\148\\002\\148\\002\\148\\001\\148\\002\\148\\001\\076\\001\\\n\\148\\002\\148\\002\\148\\001\\202\\002\\162\\000\\045\\001\\148\\001\\097\\001\\\n\\097\\001\\254\\004\\097\\001\\097\\001\\191\\001\\028\\000\\162\\000\\148\\001\\\n\\191\\001\\255\\004\\000\\005\\026\\002\\027\\002\\028\\002\\029\\002\\184\\002\\\n\\185\\002\\099\\001\\148\\002\\093\\004\\094\\004\\097\\001\\207\\002\\030\\002\\\n\\001\\005\\148\\002\\164\\000\\144\\001\\212\\002\\073\\002\\220\\003\\212\\002\\\n\\221\\003\\237\\004\\139\\002\\104\\004\\190\\000\\148\\002\\148\\002\\206\\002\\\n\\208\\002\\212\\002\\222\\003\\139\\002\\238\\004\\100\\002\\090\\002\\214\\002\\\n\\112\\004\\020\\004\\012\\000\\021\\004\\182\\001\\148\\001\\212\\002\\122\\000\\\n\\212\\002\\212\\002\\101\\002\\164\\000\\150\\002\\022\\004\\104\\002\\182\\001\\\n\\120\\004\\013\\000\\014\\000\\031\\002\\212\\002\\212\\002\\150\\002\\148\\001\\\n\\148\\001\\105\\002\\148\\001\\148\\001\\182\\001\\182\\001\\021\\000\\249\\002\\\n\\090\\002\\191\\001\\106\\002\\191\\001\\184\\002\\187\\002\\113\\002\\130\\004\\\n\\212\\002\\114\\002\\090\\002\\212\\002\\115\\002\\148\\001\\122\\000\\138\\004\\\n\\212\\002\\029\\000\\182\\001\\121\\002\\073\\001\\062\\002\\212\\002\\126\\002\\\n\\062\\002\\033\\000\\202\\002\\162\\000\\212\\002\\005\\005\\191\\001\\037\\000\\\n\\191\\001\\204\\002\\062\\002\\162\\000\\045\\001\\039\\000\\062\\002\\127\\002\\\n\\212\\002\\216\\002\\216\\002\\119\\002\\212\\002\\186\\002\\188\\002\\062\\002\\\n\\062\\002\\062\\002\\062\\002\\090\\002\\090\\002\\043\\000\\131\\002\\135\\002\\\n\\212\\002\\107\\002\\108\\002\\212\\002\\212\\002\\209\\002\\062\\002\\164\\000\\\n\\165\\004\\047\\000\\132\\002\\214\\002\\050\\000\\118\\001\\135\\002\\214\\002\\\n\\124\\002\\118\\001\\164\\002\\214\\002\\214\\002\\135\\002\\166\\002\\197\\002\\\n\\118\\001\\062\\002\\176\\002\\118\\001\\062\\002\\206\\002\\119\\002\\062\\002\\\n\\062\\002\\062\\002\\214\\002\\090\\002\\118\\001\\005\\005\\062\\002\\062\\002\\\n\\213\\002\\142\\002\\144\\002\\146\\002\\135\\002\\062\\002\\135\\002\\225\\002\\\n\\238\\002\\150\\002\\227\\002\\055\\005\\056\\005\\230\\002\\062\\002\\239\\002\\\n\\135\\002\\062\\002\\240\\002\\062\\002\\112\\000\\062\\002\\113\\000\\114\\000\\\n\\028\\000\\214\\002\\115\\000\\242\\002\\243\\002\\116\\000\\117\\000\\008\\003\\\n\\202\\004\\062\\002\\204\\004\\118\\001\\062\\002\\244\\002\\009\\003\\194\\002\\\n\\062\\002\\246\\002\\001\\003\\131\\002\\131\\002\\061\\001\\118\\000\\048\\000\\\n\\025\\003\\032\\003\\131\\002\\038\\003\\054\\003\\191\\001\\119\\000\\120\\000\\\n\\191\\001\\135\\002\\042\\003\\045\\003\\135\\002\\051\\003\\121\\000\\131\\002\\\n\\064\\003\\149\\001\\073\\003\\224\\002\\241\\004\\131\\002\\079\\003\\087\\003\\\n\\179\\001\\244\\004\\122\\000\\123\\000\\001\\000\\002\\000\\003\\000\\004\\000\\\n\\005\\000\\094\\003\\101\\003\\002\\002\\103\\003\\116\\003\\184\\002\\129\\003\\\n\\131\\002\\131\\002\\249\\002\\031\\002\\142\\003\\252\\002\\185\\000\\185\\000\\\n\\182\\001\\099\\001\\008\\005\\159\\003\\160\\003\\099\\001\\185\\000\\161\\003\\\n\\090\\002\\099\\001\\162\\003\\099\\001\\185\\000\\185\\000\\163\\003\\099\\001\\\n\\199\\003\\167\\003\\182\\001\\212\\003\\182\\001\\206\\003\\182\\001\\233\\003\\\n\\185\\000\\242\\003\\182\\001\\249\\003\\099\\001\\008\\000\\005\\004\\007\\004\\\n\\119\\002\\185\\000\\023\\005\\024\\005\\010\\004\\029\\004\\030\\004\\185\\000\\\n\\185\\000\\185\\000\\185\\000\\185\\000\\035\\004\\005\\005\\036\\004\\044\\004\\\n\\191\\001\\194\\000\\049\\004\\051\\004\\046\\004\\040\\005\\008\\000\\068\\004\\\n\\114\\001\\050\\005\\185\\000\\050\\004\\074\\004\\096\\004\\108\\004\\185\\000\\\n\\113\\004\\110\\004\\121\\004\\122\\004\\185\\000\\185\\000\\182\\001\\123\\004\\\n\\127\\004\\136\\004\\099\\001\\191\\001\\204\\002\\140\\004\\128\\004\\185\\000\\\n\\185\\000\\185\\000\\185\\000\\185\\000\\129\\004\\141\\004\\144\\001\\149\\004\\\n\\144\\001\\159\\004\\157\\004\\177\\004\\099\\001\\099\\001\\070\\005\\099\\001\\\n\\099\\001\\185\\000\\161\\004\\144\\001\\182\\001\\192\\004\\172\\004\\112\\000\\\n\\166\\004\\113\\000\\114\\000\\028\\000\\173\\004\\115\\000\\158\\003\\176\\004\\\n\\115\\001\\117\\000\\099\\001\\082\\003\\219\\004\\204\\002\\223\\004\\005\\005\\\n\\194\\004\\005\\005\\006\\005\\009\\005\\212\\002\\018\\003\\016\\005\\212\\002\\\n\\182\\001\\036\\005\\160\\001\\093\\003\\018\\005\\206\\004\\096\\005\\052\\005\\\n\\067\\005\\212\\002\\120\\000\\053\\005\\166\\002\\054\\005\\100\\005\\076\\005\\\n\\081\\005\\121\\000\\084\\005\\099\\005\\105\\005\\113\\005\\212\\002\\164\\000\\\n\\212\\002\\212\\002\\109\\005\\118\\005\\134\\005\\122\\000\\123\\000\\147\\005\\\n\\148\\005\\150\\005\\151\\005\\156\\005\\118\\003\\212\\002\\157\\005\\159\\005\\\n\\178\\005\\042\\003\\039\\005\\183\\005\\191\\005\\207\\005\\062\\002\\208\\005\\\n\\212\\005\\062\\002\\215\\005\\221\\005\\222\\005\\034\\000\\071\\000\\026\\002\\\n\\212\\002\\034\\000\\214\\002\\062\\002\\071\\000\\047\\002\\216\\002\\062\\002\\\n\\212\\002\\044\\002\\191\\001\\214\\002\\120\\002\\042\\003\\212\\002\\144\\001\\\n\\062\\002\\062\\002\\062\\002\\062\\002\\212\\002\\150\\000\\008\\000\\046\\002\\\n\\114\\001\\102\\000\\144\\001\\223\\002\\224\\002\\194\\001\\182\\001\\062\\002\\\n\\212\\002\\214\\002\\137\\002\\049\\002\\212\\002\\144\\001\\166\\000\\135\\002\\\n\\183\\000\\182\\001\\136\\002\\135\\002\\218\\001\\214\\003\\015\\000\\136\\002\\\n\\212\\002\\138\\002\\062\\002\\212\\002\\141\\002\\062\\002\\230\\003\\120\\002\\\n\\062\\002\\062\\002\\062\\002\\191\\001\\142\\002\\143\\002\\144\\001\\062\\002\\\n\\062\\002\\139\\002\\182\\001\\195\\005\\066\\005\\141\\005\\062\\002\\112\\000\\\n\\122\\003\\113\\000\\114\\000\\028\\000\\048\\004\\115\\000\\190\\005\\011\\003\\\n\\115\\001\\117\\000\\062\\002\\081\\003\\062\\002\\211\\002\\062\\002\\079\\005\\\n\\078\\002\\077\\002\\056\\004\\191\\001\\151\\002\\023\\003\\028\\003\\163\\001\\\n\\149\\002\\007\\005\\062\\002\\119\\004\\252\\004\\062\\002\\205\\005\\206\\005\\\n\\112\\003\\062\\002\\120\\000\\117\\002\\093\\002\\072\\005\\213\\005\\064\\005\\\n\\000\\000\\121\\000\\098\\005\\240\\004\\000\\000\\000\\000\\042\\003\\204\\002\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\122\\000\\123\\000\\223\\005\\\n\\191\\001\\191\\001\\000\\000\\000\\000\\000\\000\\052\\001\\009\\004\\000\\000\\\n\\000\\000\\141\\001\\000\\000\\000\\000\\112\\000\\000\\000\\113\\000\\114\\000\\\n\\028\\000\\144\\001\\115\\000\\000\\000\\000\\000\\116\\000\\117\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\156\\001\\150\\000\\150\\000\\000\\000\\150\\000\\\n\\216\\002\\216\\002\\059\\001\\060\\001\\061\\001\\000\\000\\118\\000\\216\\002\\\n\\000\\000\\150\\000\\150\\000\\000\\000\\000\\000\\216\\002\\119\\000\\120\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\216\\002\\191\\001\\121\\000\\042\\003\\\n\\194\\002\\000\\000\\216\\002\\000\\000\\000\\000\\063\\001\\064\\001\\042\\003\\\n\\150\\000\\150\\000\\122\\000\\123\\000\\222\\001\\000\\000\\000\\000\\000\\000\\\n\\191\\001\\066\\001\\067\\001\\068\\001\\069\\001\\216\\002\\216\\002\\000\\000\\\n\\000\\000\\081\\004\\083\\004\\085\\004\\000\\000\\182\\001\\000\\000\\088\\004\\\n\\000\\000\\000\\000\\071\\001\\000\\000\\000\\000\\194\\002\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\165\\000\\000\\000\\000\\000\\172\\000\\000\\000\\\n\\000\\000\\174\\000\\175\\000\\000\\000\\000\\000\\176\\000\\177\\000\\178\\000\\\n\\179\\000\\180\\000\\000\\000\\181\\000\\194\\002\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\032\\001\\000\\000\\\n\\000\\000\\034\\001\\035\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\042\\003\\040\\001\\041\\001\\144\\001\\000\\000\\042\\001\\043\\001\\\n\\112\\000\\000\\000\\113\\000\\114\\000\\028\\000\\000\\000\\115\\000\\000\\000\\\n\\000\\000\\115\\001\\117\\000\\000\\000\\000\\000\\182\\001\\000\\000\\182\\001\\\n\\000\\000\\182\\001\\000\\000\\144\\001\\182\\001\\000\\000\\000\\000\\000\\000\\\n\\042\\003\\000\\000\\000\\000\\000\\000\\000\\000\\144\\001\\015\\000\\000\\000\\\n\\191\\001\\015\\000\\191\\001\\120\\000\\000\\000\\104\\001\\105\\001\\106\\001\\\n\\000\\000\\108\\001\\121\\000\\015\\000\\015\\000\\000\\000\\000\\000\\015\\000\\\n\\000\\000\\000\\000\\204\\002\\000\\000\\000\\000\\000\\000\\122\\000\\123\\000\\\n\\015\\000\\015\\000\\015\\000\\015\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\012\\000\\042\\003\\015\\000\\015\\000\\\n\\000\\000\\000\\000\\042\\003\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\191\\001\\000\\000\\089\\000\\014\\000\\153\\001\\154\\001\\066\\002\\\n\\000\\000\\000\\000\\015\\000\\000\\000\\000\\000\\015\\000\\000\\000\\000\\000\\\n\\090\\000\\015\\000\\015\\000\\000\\000\\000\\000\\000\\000\\144\\001\\015\\000\\\n\\015\\000\\000\\000\\144\\001\\000\\000\\000\\000\\000\\000\\015\\000\\204\\002\\\n\\000\\000\\000\\000\\000\\000\\029\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\197\\001\\015\\000\\033\\000\\015\\000\\000\\000\\015\\000\\204\\002\\\n\\042\\003\\091\\000\\144\\001\\000\\000\\000\\000\\000\\000\\000\\000\\039\\000\\\n\\000\\000\\000\\000\\015\\000\\209\\002\\000\\000\\015\\000\\000\\000\\000\\000\\\n\\144\\001\\015\\000\\000\\000\\000\\000\\000\\000\\000\\000\\141\\001\\092\\000\\\n\\000\\000\\150\\000\\150\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\093\\000\\000\\000\\000\\000\\050\\000\\042\\003\\\n\\204\\002\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\042\\003\\000\\000\\\n\\000\\000\\000\\000\\150\\000\\150\\000\\150\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\150\\000\\000\\000\\000\\000\\191\\001\\000\\000\\069\\005\\\n\\000\\000\\157\\002\\112\\000\\000\\000\\113\\000\\114\\000\\028\\000\\000\\000\\\n\\115\\000\\249\\001\\250\\001\\116\\000\\117\\000\\144\\001\\000\\000\\150\\000\\\n\\150\\000\\000\\000\\000\\000\\000\\000\\150\\000\\000\\000\\000\\000\\000\\000\\\n\\150\\000\\240\\001\\000\\000\\222\\001\\118\\000\\144\\001\\000\\000\\003\\002\\\n\\000\\000\\000\\000\\191\\001\\156\\001\\119\\000\\060\\003\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\156\\001\\000\\000\\121\\000\\011\\002\\052\\000\\069\\005\\\n\\000\\000\\017\\002\\000\\000\\000\\000\\150\\000\\000\\000\\000\\000\\070\\004\\\n\\122\\000\\123\\000\\000\\000\\000\\000\\000\\000\\150\\000\\000\\000\\000\\000\\\n\\124\\001\\000\\000\\000\\000\\222\\001\\191\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\144\\001\\000\\000\\000\\000\\144\\001\\125\\001\\150\\000\\000\\000\\\n\\000\\000\\000\\003\\000\\000\\191\\001\\000\\000\\000\\000\\000\\000\\144\\001\\\n\\000\\000\\000\\000\\183\\000\\191\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\112\\000\\000\\000\\113\\000\\114\\000\\028\\000\\000\\000\\115\\000\\000\\000\\\n\\000\\000\\126\\001\\117\\000\\000\\000\\191\\001\\000\\000\\000\\000\\153\\000\\\n\\000\\000\\000\\000\\000\\000\\170\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\150\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\170\\000\\120\\000\\000\\000\\191\\001\\191\\001\\000\\000\\\n\\000\\000\\000\\000\\121\\000\\144\\001\\000\\000\\000\\000\\000\\000\\191\\001\\\n\\000\\000\\000\\000\\110\\002\\000\\000\\170\\000\\144\\001\\122\\000\\123\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\144\\001\\191\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\191\\001\\191\\001\\191\\001\\191\\001\\000\\000\\\n\\156\\000\\008\\000\\009\\000\\000\\000\\000\\000\\052\\001\\010\\000\\011\\000\\\n\\144\\001\\144\\001\\000\\000\\135\\002\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\170\\000\\000\\000\\170\\000\\170\\000\\000\\000\\144\\001\\069\\005\\000\\000\\\n\\069\\005\\015\\000\\016\\000\\156\\001\\150\\000\\000\\000\\000\\000\\000\\000\\\n\\144\\001\\058\\001\\059\\001\\060\\001\\061\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\150\\000\\150\\000\\022\\000\\144\\001\\106\\002\\\n\\024\\000\\025\\000\\026\\000\\027\\000\\144\\001\\144\\001\\028\\000\\000\\000\\\n\\162\\000\\000\\000\\000\\000\\142\\000\\032\\000\\063\\001\\064\\001\\000\\000\\\n\\000\\000\\000\\000\\110\\003\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\066\\001\\067\\001\\068\\001\\069\\001\\150\\000\\153\\000\\153\\000\\\n\\000\\000\\153\\000\\042\\000\\000\\000\\000\\000\\000\\000\\150\\000\\000\\000\\\n\\150\\000\\000\\000\\071\\001\\153\\000\\153\\000\\000\\000\\000\\000\\231\\002\\\n\\044\\000\\000\\000\\222\\001\\000\\000\\000\\000\\045\\000\\000\\000\\170\\000\\\n\\048\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\240\\001\\153\\000\\214\\001\\240\\001\\000\\000\\000\\000\\170\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\240\\001\\145\\003\\\n\\000\\000\\150\\000\\240\\001\\000\\000\\000\\000\\000\\000\\052\\000\\156\\000\\\n\\156\\000\\052\\000\\156\\000\\240\\001\\240\\001\\240\\001\\240\\001\\000\\000\\\n\\222\\001\\000\\000\\000\\000\\052\\000\\156\\000\\156\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\240\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\052\\000\\000\\000\\052\\000\\052\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\205\\001\\156\\000\\156\\000\\240\\001\\052\\000\\052\\000\\\n\\240\\001\\000\\000\\000\\000\\240\\001\\240\\001\\240\\001\\000\\000\\000\\000\\\n\\000\\000\\154\\000\\240\\001\\240\\001\\000\\000\\171\\000\\000\\000\\000\\000\\\n\\000\\000\\240\\001\\052\\000\\000\\000\\000\\000\\052\\000\\170\\000\\244\\003\\\n\\000\\000\\052\\000\\052\\000\\000\\000\\171\\000\\240\\001\\000\\000\\240\\001\\\n\\052\\000\\240\\001\\000\\000\\000\\000\\000\\000\\000\\000\\052\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\170\\000\\141\\001\\240\\001\\171\\000\\000\\000\\\n\\240\\001\\000\\000\\052\\000\\000\\000\\240\\001\\000\\000\\052\\000\\150\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\052\\000\\000\\000\\000\\000\\052\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\171\\000\\000\\000\\171\\000\\171\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\150\\000\\000\\000\\170\\000\\170\\000\\000\\000\\000\\000\\170\\000\\\n\\000\\000\\053\\000\\170\\000\\000\\000\\116\\001\\021\\002\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\032\\002\\000\\000\\150\\000\\000\\000\\106\\002\\\n\\150\\000\\000\\000\\150\\000\\150\\000\\150\\000\\000\\000\\000\\000\\106\\002\\\n\\150\\000\\000\\000\\000\\000\\000\\000\\106\\002\\000\\000\\150\\000\\000\\000\\\n\\154\\000\\154\\000\\000\\000\\154\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\106\\002\\000\\000\\106\\002\\106\\002\\154\\000\\154\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\150\\000\\000\\000\\000\\000\\\n\\106\\002\\171\\000\\000\\000\\153\\000\\214\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\154\\000\\215\\001\\000\\000\\222\\001\\\n\\088\\003\\171\\000\\000\\000\\106\\002\\000\\000\\000\\000\\106\\002\\000\\000\\\n\\000\\000\\106\\002\\106\\002\\106\\002\\153\\000\\153\\000\\153\\000\\207\\003\\\n\\000\\000\\106\\002\\000\\000\\000\\000\\153\\000\\000\\000\\000\\000\\106\\002\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\134\\004\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\106\\002\\000\\000\\000\\000\\000\\000\\106\\002\\\n\\000\\000\\214\\001\\153\\000\\000\\000\\156\\000\\156\\000\\214\\001\\000\\000\\\n\\000\\000\\000\\000\\153\\000\\106\\002\\000\\000\\000\\000\\106\\002\\112\\000\\\n\\000\\000\\113\\000\\114\\000\\028\\000\\000\\000\\115\\000\\000\\000\\000\\000\\\n\\116\\000\\117\\000\\000\\000\\000\\000\\140\\002\\156\\000\\156\\000\\156\\000\\\n\\000\\000\\206\\004\\000\\000\\000\\000\\000\\000\\156\\000\\153\\000\\000\\000\\\n\\171\\000\\118\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\153\\000\\\n\\207\\004\\119\\000\\120\\000\\115\\002\\150\\000\\000\\000\\000\\000\\198\\001\\\n\\000\\000\\121\\000\\156\\000\\156\\000\\000\\000\\171\\000\\000\\000\\156\\000\\\n\\153\\000\\000\\000\\222\\001\\156\\000\\000\\000\\122\\000\\123\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\170\\000\\032\\002\\000\\000\\000\\000\\\n\\208\\004\\076\\000\\113\\000\\114\\000\\028\\000\\000\\000\\115\\000\\000\\000\\\n\\000\\000\\116\\000\\209\\004\\000\\000\\000\\000\\000\\000\\000\\000\\156\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\222\\001\\\n\\233\\002\\000\\000\\118\\000\\153\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\210\\004\\119\\000\\120\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\156\\000\\121\\000\\000\\000\\000\\000\\171\\000\\171\\000\\000\\000\\\n\\000\\000\\171\\000\\155\\000\\201\\001\\171\\000\\000\\000\\211\\004\\123\\000\\\n\\000\\000\\000\\000\\000\\000\\222\\001\\000\\000\\000\\000\\000\\000\\156\\001\\\n\\000\\000\\053\\000\\000\\000\\000\\000\\053\\000\\000\\000\\116\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\116\\001\\053\\000\\116\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\233\\002\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\053\\000\\000\\000\\053\\000\\053\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\153\\000\\000\\000\\\n\\150\\000\\053\\000\\053\\000\\000\\000\\000\\000\\154\\000\\215\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\153\\000\\153\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\053\\000\\000\\000\\000\\000\\\n\\053\\000\\000\\000\\000\\000\\000\\000\\053\\000\\053\\000\\154\\000\\154\\000\\\n\\154\\000\\000\\000\\000\\000\\053\\000\\111\\003\\000\\000\\154\\000\\000\\000\\\n\\000\\000\\053\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\153\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\053\\000\\000\\000\\156\\000\\\n\\153\\000\\053\\000\\214\\001\\215\\001\\154\\000\\000\\000\\000\\000\\000\\000\\\n\\215\\001\\000\\000\\000\\000\\000\\000\\154\\000\\053\\000\\156\\000\\156\\000\\\n\\053\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\155\\000\\155\\000\\000\\000\\155\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\155\\000\\155\\000\\\n\\154\\000\\000\\000\\150\\000\\214\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\156\\000\\154\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\156\\000\\000\\000\\156\\000\\000\\000\\155\\000\\155\\000\\000\\000\\\n\\000\\000\\000\\000\\154\\000\\115\\002\\000\\000\\115\\002\\115\\002\\115\\002\\\n\\000\\000\\000\\000\\000\\000\\115\\002\\000\\000\\000\\000\\171\\000\\000\\000\\\n\\115\\002\\000\\000\\000\\000\\000\\000\\115\\002\\115\\002\\115\\002\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\115\\002\\115\\002\\115\\002\\\n\\115\\002\\076\\000\\000\\000\\000\\000\\156\\000\\000\\000\\000\\000\\115\\002\\\n\\000\\000\\000\\000\\000\\000\\150\\000\\115\\002\\154\\000\\076\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\115\\002\\115\\002\\239\\001\\110\\003\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\076\\000\\000\\000\\076\\000\\076\\000\\115\\002\\\n\\000\\000\\000\\000\\115\\002\\115\\002\\000\\000\\115\\002\\115\\002\\115\\002\\\n\\000\\000\\115\\002\\076\\000\\000\\000\\115\\002\\115\\002\\000\\000\\000\\000\\\n\\000\\000\\153\\000\\000\\000\\115\\002\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\116\\001\\000\\000\\000\\000\\000\\000\\076\\000\\115\\002\\115\\002\\\n\\110\\003\\115\\002\\115\\002\\115\\002\\115\\002\\076\\000\\165\\005\\115\\002\\\n\\000\\000\\000\\000\\000\\000\\076\\000\\000\\000\\000\\000\\000\\000\\115\\002\\\n\\115\\002\\076\\000\\115\\002\\000\\000\\000\\000\\000\\000\\115\\002\\000\\000\\\n\\154\\000\\000\\000\\000\\000\\057\\002\\000\\000\\076\\000\\059\\002\\000\\000\\\n\\060\\002\\076\\000\\061\\002\\153\\000\\000\\000\\000\\000\\000\\000\\154\\000\\\n\\154\\000\\000\\000\\156\\000\\000\\000\\000\\000\\076\\000\\000\\000\\000\\000\\\n\\076\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\153\\000\\\n\\000\\000\\000\\000\\214\\001\\000\\000\\153\\000\\153\\000\\153\\000\\094\\002\\\n\\195\\000\\195\\000\\153\\000\\099\\002\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\153\\000\\154\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\154\\000\\000\\000\\215\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\156\\000\\000\\000\\000\\000\\153\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\156\\000\\000\\000\\000\\000\\156\\000\\079\\004\\156\\000\\156\\000\\156\\000\\\n\\102\\001\\103\\001\\000\\000\\156\\000\\000\\000\\215\\001\\000\\000\\141\\002\\\n\\000\\000\\156\\000\\000\\000\\000\\000\\000\\000\\008\\000\\155\\000\\155\\000\\\n\\000\\000\\000\\000\\002\\002\\011\\000\\153\\002\\000\\000\\154\\002\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\156\\000\\000\\000\\000\\000\\137\\000\\000\\000\\015\\000\\016\\000\\155\\000\\\n\\155\\000\\155\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\155\\000\\\n\\155\\000\\198\\002\\000\\000\\201\\002\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\022\\000\\000\\000\\138\\000\\139\\000\\000\\000\\140\\000\\141\\000\\\n\\000\\000\\000\\000\\028\\000\\000\\000\\155\\000\\155\\000\\000\\000\\142\\000\\\n\\143\\000\\155\\000\\000\\000\\000\\000\\000\\000\\155\\000\\144\\000\\000\\000\\\n\\116\\001\\000\\000\\000\\000\\000\\000\\000\\000\\254\\003\\214\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\145\\000\\000\\000\\239\\001\\000\\000\\000\\000\\\n\\239\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\146\\000\\155\\000\\239\\001\\154\\000\\044\\000\\000\\000\\239\\001\\000\\000\\\n\\000\\000\\045\\000\\155\\000\\000\\000\\048\\000\\147\\000\\000\\000\\239\\001\\\n\\239\\001\\239\\001\\239\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\155\\000\\000\\000\\000\\000\\239\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\209\\001\\000\\000\\000\\000\\000\\000\\156\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\239\\001\\000\\000\\000\\000\\239\\001\\154\\000\\000\\000\\239\\001\\\n\\239\\001\\239\\001\\000\\000\\000\\000\\000\\000\\000\\000\\239\\001\\239\\001\\\n\\036\\003\\000\\000\\000\\000\\039\\003\\000\\000\\239\\001\\155\\000\\000\\000\\\n\\000\\000\\154\\000\\000\\000\\000\\000\\215\\001\\000\\000\\154\\000\\154\\000\\\n\\154\\000\\239\\001\\000\\000\\239\\001\\154\\000\\239\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\154\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\239\\001\\000\\000\\000\\000\\239\\001\\000\\000\\000\\000\\000\\000\\\n\\239\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\154\\000\\214\\001\\000\\000\\000\\000\\000\\000\\000\\000\\033\\002\\\n\\034\\002\\035\\002\\036\\002\\037\\002\\038\\002\\039\\002\\040\\002\\041\\002\\\n\\042\\002\\043\\002\\044\\002\\045\\002\\046\\002\\047\\002\\048\\002\\049\\002\\\n\\050\\002\\051\\002\\052\\002\\053\\002\\000\\000\\056\\002\\000\\000\\000\\000\\\n\\000\\000\\155\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\113\\003\\062\\002\\000\\000\\251\\001\\000\\000\\\n\\155\\000\\155\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\079\\002\\002\\002\\156\\000\\002\\002\\002\\002\\002\\002\\000\\000\\\n\\000\\000\\000\\000\\002\\002\\146\\004\\000\\000\\000\\000\\133\\003\\002\\002\\\n\\000\\000\\000\\000\\000\\000\\002\\002\\002\\002\\002\\002\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\155\\000\\000\\000\\002\\002\\002\\002\\002\\002\\002\\002\\\n\\000\\000\\000\\000\\000\\000\\155\\000\\000\\000\\155\\000\\002\\002\\000\\000\\\n\\000\\000\\000\\000\\002\\002\\002\\002\\214\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\002\\002\\002\\002\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\215\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\002\\002\\000\\000\\\n\\000\\000\\002\\002\\000\\000\\000\\000\\002\\002\\002\\002\\002\\002\\000\\000\\\n\\002\\002\\000\\000\\000\\000\\002\\002\\002\\002\\000\\000\\155\\000\\000\\000\\\n\\237\\001\\000\\000\\002\\002\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\002\\002\\002\\002\\000\\000\\\n\\002\\002\\002\\002\\002\\002\\000\\000\\000\\000\\156\\000\\002\\002\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\214\\001\\002\\002\\000\\000\\\n\\000\\000\\002\\002\\000\\000\\000\\000\\000\\000\\002\\002\\000\\000\\000\\000\\\n\\138\\005\\000\\000\\000\\000\\209\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\209\\001\\000\\000\\003\\004\\000\\000\\000\\000\\\n\\209\\001\\215\\002\\000\\000\\000\\000\\000\\000\\000\\000\\217\\002\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\209\\001\\000\\000\\209\\001\\\n\\209\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\162\\005\\000\\000\\209\\001\\000\\000\\156\\000\\104\\003\\\n\\000\\000\\112\\000\\000\\000\\113\\000\\114\\000\\028\\000\\000\\000\\115\\000\\\n\\000\\000\\000\\000\\115\\001\\117\\000\\155\\000\\000\\000\\037\\004\\209\\001\\\n\\000\\000\\000\\000\\195\\000\\195\\000\\215\\001\\209\\001\\209\\001\\209\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\209\\001\\106\\002\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\209\\001\\120\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\121\\000\\000\\000\\000\\000\\067\\004\\209\\001\\\n\\000\\000\\000\\000\\000\\000\\209\\001\\116\\001\\027\\003\\000\\000\\122\\000\\\n\\123\\000\\000\\000\\033\\003\\034\\003\\035\\003\\000\\000\\155\\000\\209\\001\\\n\\000\\000\\000\\000\\209\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\251\\001\\000\\000\\\n\\251\\001\\251\\001\\155\\000\\098\\004\\099\\004\\155\\000\\251\\001\\155\\000\\\n\\155\\000\\155\\000\\000\\000\\251\\001\\000\\000\\155\\000\\000\\000\\251\\001\\\n\\251\\001\\251\\001\\000\\000\\155\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\251\\001\\251\\001\\251\\001\\251\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\251\\001\\000\\000\\000\\000\\000\\000\\215\\001\\251\\001\\\n\\000\\000\\000\\000\\155\\000\\000\\000\\000\\000\\000\\000\\251\\001\\251\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\131\\004\\000\\000\\251\\001\\000\\000\\000\\000\\251\\001\\000\\000\\000\\000\\\n\\251\\001\\251\\001\\251\\001\\000\\000\\251\\001\\098\\003\\099\\003\\100\\003\\\n\\251\\001\\000\\000\\000\\000\\144\\004\\000\\000\\000\\000\\251\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\036\\002\\\n\\237\\001\\251\\001\\251\\001\\237\\001\\251\\001\\251\\001\\251\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\214\\002\\000\\000\\237\\001\\000\\000\\215\\001\\\n\\000\\000\\237\\001\\251\\001\\130\\003\\000\\000\\251\\001\\000\\000\\000\\000\\\n\\214\\002\\251\\001\\237\\001\\237\\001\\237\\001\\237\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\140\\003\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\237\\001\\000\\000\\214\\002\\000\\000\\214\\002\\214\\002\\214\\002\\\n\\000\\000\\214\\002\\000\\000\\000\\000\\214\\002\\214\\002\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\237\\001\\000\\000\\000\\000\\237\\001\\\n\\000\\000\\155\\000\\237\\001\\237\\001\\237\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\237\\001\\237\\001\\000\\000\\000\\000\\000\\000\\214\\002\\000\\000\\\n\\237\\001\\000\\000\\000\\000\\209\\001\\000\\000\\214\\002\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\201\\004\\237\\001\\203\\004\\237\\001\\000\\000\\\n\\237\\001\\214\\002\\214\\002\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\237\\001\\000\\000\\223\\003\\237\\001\\\n\\000\\000\\000\\000\\000\\000\\237\\001\\000\\000\\000\\000\\106\\002\\106\\002\\\n\\106\\002\\106\\002\\000\\000\\000\\000\\106\\002\\106\\002\\106\\002\\106\\002\\\n\\106\\002\\106\\002\\106\\002\\106\\002\\106\\002\\106\\002\\106\\002\\106\\002\\\n\\106\\002\\106\\002\\106\\002\\106\\002\\246\\004\\000\\000\\106\\002\\106\\002\\\n\\106\\002\\106\\002\\106\\002\\106\\002\\106\\002\\106\\002\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\106\\002\\106\\002\\000\\000\\000\\000\\106\\002\\106\\002\\\n\\106\\002\\106\\002\\106\\002\\106\\002\\106\\002\\106\\002\\000\\000\\106\\002\\\n\\106\\002\\106\\002\\000\\000\\106\\002\\106\\002\\106\\002\\106\\002\\000\\000\\\n\\000\\000\\106\\002\\106\\002\\106\\002\\000\\000\\106\\002\\106\\002\\106\\002\\\n\\106\\002\\106\\002\\106\\002\\000\\000\\106\\002\\106\\002\\106\\002\\106\\002\\\n\\106\\002\\000\\000\\000\\000\\000\\000\\000\\000\\155\\000\\106\\002\\106\\002\\\n\\106\\002\\106\\002\\106\\002\\106\\002\\106\\002\\106\\002\\000\\000\\106\\002\\\n\\064\\002\\106\\002\\106\\002\\060\\004\\106\\002\\106\\002\\106\\002\\106\\002\\\n\\106\\002\\000\\000\\106\\002\\106\\002\\000\\000\\106\\002\\106\\002\\106\\002\\\n\\106\\002\\000\\000\\106\\002\\106\\002\\000\\000\\106\\002\\000\\000\\000\\000\\\n\\000\\000\\106\\002\\000\\000\\112\\000\\000\\000\\113\\000\\114\\000\\028\\000\\\n\\000\\000\\115\\000\\000\\000\\000\\000\\116\\000\\117\\000\\000\\000\\000\\000\\\n\\068\\005\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\134\\001\\036\\002\\\n\\000\\000\\036\\002\\036\\002\\036\\002\\000\\000\\118\\000\\000\\000\\036\\002\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\036\\002\\119\\000\\120\\000\\000\\000\\\n\\036\\002\\036\\002\\036\\002\\000\\000\\000\\000\\121\\000\\000\\000\\000\\000\\\n\\000\\000\\036\\002\\036\\002\\036\\002\\036\\002\\090\\005\\000\\000\\000\\000\\\n\\000\\000\\122\\000\\123\\000\\036\\002\\000\\000\\000\\000\\000\\000\\155\\000\\\n\\036\\002\\000\\000\\124\\004\\125\\004\\126\\004\\000\\000\\000\\000\\036\\002\\\n\\036\\002\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\036\\002\\000\\000\\000\\000\\036\\002\\117\\005\\\n\\000\\000\\036\\002\\036\\002\\036\\002\\000\\000\\036\\002\\000\\000\\000\\000\\\n\\036\\002\\036\\002\\000\\000\\000\\000\\000\\000\\000\\000\\130\\005\\036\\002\\\n\\000\\000\\124\\001\\000\\000\\209\\001\\000\\000\\000\\000\\139\\005\\000\\000\\\n\\000\\000\\000\\000\\036\\002\\036\\002\\000\\000\\036\\002\\036\\002\\036\\002\\\n\\209\\001\\241\\000\\152\\004\\153\\004\\154\\004\\000\\000\\000\\000\\142\\005\\\n\\155\\000\\000\\000\\000\\000\\036\\002\\000\\000\\209\\001\\036\\002\\209\\001\\\n\\209\\001\\112\\000\\036\\002\\113\\000\\114\\000\\028\\000\\000\\000\\115\\000\\\n\\000\\000\\000\\000\\126\\001\\117\\000\\209\\001\\000\\000\\000\\000\\000\\000\\\n\\163\\005\\164\\005\\112\\000\\000\\000\\113\\000\\114\\000\\028\\000\\178\\004\\\n\\115\\000\\000\\000\\174\\005\\116\\000\\117\\000\\000\\000\\000\\000\\209\\001\\\n\\000\\000\\000\\000\\209\\001\\000\\000\\120\\000\\209\\001\\209\\001\\209\\001\\\n\\000\\000\\184\\005\\000\\000\\121\\000\\118\\000\\209\\001\\186\\005\\187\\005\\\n\\188\\005\\189\\005\\000\\000\\209\\001\\119\\000\\060\\003\\000\\000\\122\\000\\\n\\123\\000\\000\\000\\000\\000\\000\\000\\121\\000\\000\\000\\000\\000\\209\\001\\\n\\000\\000\\000\\000\\000\\000\\209\\001\\000\\000\\000\\000\\000\\000\\152\\005\\\n\\122\\000\\123\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\209\\001\\\n\\000\\000\\000\\000\\209\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\224\\004\\225\\004\\000\\000\\000\\000\\000\\000\\232\\004\\233\\004\\234\\004\\\n\\064\\002\\064\\002\\064\\002\\064\\002\\000\\000\\247\\000\\064\\002\\064\\002\\\n\\064\\002\\064\\002\\064\\002\\064\\002\\064\\002\\064\\002\\064\\002\\064\\002\\\n\\064\\002\\064\\002\\064\\002\\064\\002\\064\\002\\064\\002\\064\\002\\000\\000\\\n\\064\\002\\064\\002\\064\\002\\064\\002\\064\\002\\064\\002\\064\\002\\064\\002\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\064\\002\\064\\002\\000\\000\\000\\000\\\n\\064\\002\\064\\002\\064\\002\\064\\002\\064\\002\\064\\002\\064\\002\\064\\002\\\n\\000\\000\\064\\002\\064\\002\\064\\002\\000\\000\\064\\002\\064\\002\\064\\002\\\n\\064\\002\\000\\000\\000\\000\\064\\002\\064\\002\\064\\002\\052\\002\\064\\002\\\n\\064\\002\\064\\002\\064\\002\\064\\002\\064\\002\\000\\000\\064\\002\\064\\002\\\n\\064\\002\\064\\002\\064\\002\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\064\\002\\064\\002\\064\\002\\064\\002\\064\\002\\064\\002\\064\\002\\064\\002\\\n\\000\\000\\064\\002\\000\\000\\064\\002\\064\\002\\000\\000\\064\\002\\064\\002\\\n\\064\\002\\064\\002\\064\\002\\000\\000\\064\\002\\064\\002\\000\\000\\064\\002\\\n\\064\\002\\064\\002\\064\\002\\000\\000\\064\\002\\064\\002\\000\\000\\064\\002\\\n\\000\\000\\000\\000\\000\\000\\064\\002\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\245\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\241\\000\\241\\000\\241\\000\\241\\000\\000\\000\\000\\000\\241\\000\\\n\\241\\000\\241\\000\\241\\000\\241\\000\\241\\000\\241\\000\\241\\000\\241\\000\\\n\\241\\000\\241\\000\\241\\000\\241\\000\\241\\000\\241\\000\\241\\000\\241\\000\\\n\\000\\000\\241\\000\\241\\000\\241\\000\\241\\000\\241\\000\\241\\000\\241\\000\\\n\\241\\000\\000\\000\\101\\005\\102\\005\\103\\005\\241\\000\\241\\000\\000\\000\\\n\\000\\000\\241\\000\\241\\000\\241\\000\\241\\000\\241\\000\\241\\000\\241\\000\\\n\\241\\000\\000\\000\\241\\000\\241\\000\\241\\000\\000\\000\\241\\000\\241\\000\\\n\\241\\000\\241\\000\\000\\000\\000\\000\\241\\000\\241\\000\\241\\000\\000\\000\\\n\\241\\000\\241\\000\\241\\000\\241\\000\\241\\000\\241\\000\\000\\000\\241\\000\\\n\\241\\000\\241\\000\\241\\000\\241\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\241\\000\\241\\000\\241\\000\\241\\000\\241\\000\\241\\000\\241\\000\\\n\\241\\000\\000\\000\\241\\000\\000\\000\\241\\000\\241\\000\\253\\000\\241\\000\\\n\\241\\000\\241\\000\\241\\000\\241\\000\\000\\000\\241\\000\\241\\000\\000\\000\\\n\\241\\000\\241\\000\\241\\000\\241\\000\\000\\000\\241\\000\\241\\000\\000\\000\\\n\\241\\000\\000\\000\\000\\000\\000\\000\\241\\000\\247\\000\\247\\000\\247\\000\\\n\\247\\000\\000\\000\\000\\000\\247\\000\\247\\000\\247\\000\\247\\000\\247\\000\\\n\\247\\000\\247\\000\\247\\000\\247\\000\\247\\000\\247\\000\\247\\000\\247\\000\\\n\\247\\000\\247\\000\\247\\000\\247\\000\\000\\000\\247\\000\\247\\000\\247\\000\\\n\\247\\000\\247\\000\\247\\000\\247\\000\\247\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\247\\000\\247\\000\\000\\000\\000\\000\\247\\000\\247\\000\\247\\000\\\n\\247\\000\\247\\000\\247\\000\\247\\000\\247\\000\\000\\000\\247\\000\\247\\000\\\n\\247\\000\\000\\000\\247\\000\\247\\000\\247\\000\\247\\000\\000\\000\\000\\000\\\n\\247\\000\\247\\000\\247\\000\\000\\000\\247\\000\\247\\000\\247\\000\\247\\000\\\n\\247\\000\\247\\000\\000\\000\\247\\000\\247\\000\\247\\000\\247\\000\\247\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\247\\000\\247\\000\\247\\000\\\n\\247\\000\\247\\000\\247\\000\\247\\000\\247\\000\\000\\000\\247\\000\\000\\000\\\n\\247\\000\\247\\000\\249\\000\\247\\000\\247\\000\\247\\000\\247\\000\\247\\000\\\n\\000\\000\\247\\000\\247\\000\\000\\000\\247\\000\\247\\000\\247\\000\\247\\000\\\n\\000\\000\\247\\000\\247\\000\\000\\000\\247\\000\\000\\000\\000\\000\\000\\000\\\n\\247\\000\\245\\000\\245\\000\\245\\000\\245\\000\\000\\000\\000\\000\\245\\000\\\n\\245\\000\\245\\000\\245\\000\\245\\000\\245\\000\\245\\000\\245\\000\\245\\000\\\n\\245\\000\\245\\000\\245\\000\\245\\000\\245\\000\\245\\000\\245\\000\\245\\000\\\n\\000\\000\\245\\000\\245\\000\\245\\000\\245\\000\\245\\000\\245\\000\\245\\000\\\n\\245\\000\\000\\000\\000\\000\\000\\000\\000\\000\\245\\000\\245\\000\\000\\000\\\n\\000\\000\\245\\000\\245\\000\\245\\000\\245\\000\\245\\000\\245\\000\\245\\000\\\n\\245\\000\\000\\000\\245\\000\\245\\000\\245\\000\\000\\000\\245\\000\\245\\000\\\n\\245\\000\\245\\000\\000\\000\\000\\000\\245\\000\\245\\000\\245\\000\\000\\000\\\n\\245\\000\\245\\000\\245\\000\\245\\000\\245\\000\\245\\000\\000\\000\\245\\000\\\n\\245\\000\\245\\000\\245\\000\\245\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\245\\000\\245\\000\\245\\000\\245\\000\\245\\000\\245\\000\\245\\000\\\n\\245\\000\\000\\000\\245\\000\\000\\000\\245\\000\\245\\000\\251\\000\\245\\000\\\n\\245\\000\\245\\000\\245\\000\\245\\000\\000\\000\\245\\000\\245\\000\\000\\000\\\n\\245\\000\\245\\000\\245\\000\\245\\000\\000\\000\\245\\000\\245\\000\\000\\000\\\n\\245\\000\\000\\000\\000\\000\\000\\000\\245\\000\\000\\000\\253\\000\\253\\000\\\n\\253\\000\\253\\000\\000\\000\\000\\000\\253\\000\\253\\000\\253\\000\\253\\000\\\n\\253\\000\\253\\000\\253\\000\\253\\000\\253\\000\\253\\000\\253\\000\\253\\000\\\n\\253\\000\\253\\000\\253\\000\\253\\000\\253\\000\\000\\000\\253\\000\\253\\000\\\n\\253\\000\\253\\000\\253\\000\\253\\000\\253\\000\\253\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\253\\000\\253\\000\\000\\000\\000\\000\\253\\000\\253\\000\\\n\\253\\000\\253\\000\\253\\000\\253\\000\\253\\000\\253\\000\\000\\000\\253\\000\\\n\\253\\000\\253\\000\\000\\000\\253\\000\\253\\000\\253\\000\\253\\000\\000\\000\\\n\\000\\000\\253\\000\\253\\000\\253\\000\\000\\000\\253\\000\\253\\000\\253\\000\\\n\\253\\000\\253\\000\\253\\000\\000\\000\\253\\000\\253\\000\\253\\000\\253\\000\\\n\\253\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\253\\000\\253\\000\\\n\\253\\000\\253\\000\\253\\000\\253\\000\\253\\000\\253\\000\\000\\000\\253\\000\\\n\\000\\000\\253\\000\\253\\000\\003\\001\\253\\000\\253\\000\\253\\000\\253\\000\\\n\\253\\000\\000\\000\\253\\000\\253\\000\\000\\000\\253\\000\\253\\000\\253\\000\\\n\\253\\000\\000\\000\\253\\000\\253\\000\\000\\000\\253\\000\\000\\000\\000\\000\\\n\\000\\000\\253\\000\\249\\000\\249\\000\\249\\000\\249\\000\\000\\000\\000\\000\\\n\\249\\000\\249\\000\\249\\000\\249\\000\\249\\000\\249\\000\\249\\000\\249\\000\\\n\\249\\000\\249\\000\\249\\000\\249\\000\\249\\000\\249\\000\\249\\000\\249\\000\\\n\\249\\000\\000\\000\\249\\000\\249\\000\\249\\000\\249\\000\\249\\000\\249\\000\\\n\\249\\000\\249\\000\\000\\000\\000\\000\\000\\000\\000\\000\\249\\000\\249\\000\\\n\\000\\000\\000\\000\\249\\000\\249\\000\\249\\000\\249\\000\\249\\000\\249\\000\\\n\\249\\000\\249\\000\\000\\000\\249\\000\\249\\000\\249\\000\\000\\000\\249\\000\\\n\\249\\000\\249\\000\\249\\000\\000\\000\\000\\000\\249\\000\\249\\000\\249\\000\\\n\\000\\000\\249\\000\\249\\000\\249\\000\\249\\000\\249\\000\\249\\000\\000\\000\\\n\\249\\000\\249\\000\\249\\000\\249\\000\\249\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\249\\000\\249\\000\\249\\000\\249\\000\\249\\000\\249\\000\\\n\\249\\000\\249\\000\\000\\000\\249\\000\\000\\000\\249\\000\\249\\000\\255\\000\\\n\\249\\000\\249\\000\\249\\000\\249\\000\\249\\000\\000\\000\\249\\000\\249\\000\\\n\\000\\000\\249\\000\\249\\000\\249\\000\\249\\000\\000\\000\\249\\000\\249\\000\\\n\\000\\000\\249\\000\\000\\000\\000\\000\\000\\000\\249\\000\\251\\000\\251\\000\\\n\\251\\000\\251\\000\\000\\000\\000\\000\\251\\000\\251\\000\\251\\000\\251\\000\\\n\\251\\000\\251\\000\\251\\000\\251\\000\\251\\000\\251\\000\\251\\000\\251\\000\\\n\\251\\000\\251\\000\\251\\000\\251\\000\\251\\000\\000\\000\\251\\000\\251\\000\\\n\\251\\000\\251\\000\\251\\000\\251\\000\\251\\000\\251\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\251\\000\\251\\000\\000\\000\\000\\000\\251\\000\\251\\000\\\n\\251\\000\\251\\000\\251\\000\\251\\000\\251\\000\\251\\000\\000\\000\\251\\000\\\n\\251\\000\\251\\000\\000\\000\\251\\000\\251\\000\\251\\000\\251\\000\\000\\000\\\n\\000\\000\\251\\000\\251\\000\\251\\000\\000\\000\\251\\000\\251\\000\\251\\000\\\n\\251\\000\\251\\000\\251\\000\\000\\000\\251\\000\\251\\000\\251\\000\\251\\000\\\n\\251\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\251\\000\\251\\000\\\n\\251\\000\\251\\000\\251\\000\\251\\000\\251\\000\\251\\000\\000\\000\\251\\000\\\n\\000\\000\\251\\000\\251\\000\\001\\001\\251\\000\\251\\000\\251\\000\\251\\000\\\n\\251\\000\\000\\000\\251\\000\\251\\000\\000\\000\\251\\000\\251\\000\\251\\000\\\n\\251\\000\\000\\000\\251\\000\\251\\000\\000\\000\\251\\000\\000\\000\\000\\000\\\n\\000\\000\\251\\000\\000\\000\\003\\001\\003\\001\\003\\001\\003\\001\\000\\000\\\n\\000\\000\\003\\001\\003\\001\\003\\001\\003\\001\\003\\001\\003\\001\\003\\001\\\n\\003\\001\\003\\001\\003\\001\\003\\001\\003\\001\\003\\001\\003\\001\\003\\001\\\n\\003\\001\\003\\001\\000\\000\\003\\001\\003\\001\\003\\001\\003\\001\\003\\001\\\n\\003\\001\\003\\001\\003\\001\\000\\000\\000\\000\\000\\000\\000\\000\\003\\001\\\n\\003\\001\\000\\000\\000\\000\\003\\001\\003\\001\\003\\001\\003\\001\\003\\001\\\n\\003\\001\\003\\001\\003\\001\\000\\000\\003\\001\\003\\001\\003\\001\\000\\000\\\n\\003\\001\\003\\001\\003\\001\\003\\001\\000\\000\\000\\000\\003\\001\\003\\001\\\n\\003\\001\\000\\000\\003\\001\\003\\001\\003\\001\\003\\001\\003\\001\\003\\001\\\n\\000\\000\\003\\001\\003\\001\\003\\001\\003\\001\\003\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\003\\001\\003\\001\\003\\001\\003\\001\\003\\001\\\n\\003\\001\\003\\001\\003\\001\\000\\000\\003\\001\\000\\000\\003\\001\\003\\001\\\n\\030\\001\\003\\001\\003\\001\\003\\001\\003\\001\\003\\001\\000\\000\\003\\001\\\n\\003\\001\\000\\000\\003\\001\\003\\001\\003\\001\\003\\001\\000\\000\\003\\001\\\n\\003\\001\\000\\000\\003\\001\\000\\000\\000\\000\\000\\000\\003\\001\\255\\000\\\n\\255\\000\\255\\000\\255\\000\\000\\000\\000\\000\\255\\000\\255\\000\\255\\000\\\n\\255\\000\\255\\000\\255\\000\\255\\000\\255\\000\\255\\000\\255\\000\\255\\000\\\n\\255\\000\\255\\000\\255\\000\\255\\000\\255\\000\\255\\000\\000\\000\\255\\000\\\n\\255\\000\\255\\000\\255\\000\\255\\000\\255\\000\\255\\000\\255\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\255\\000\\255\\000\\000\\000\\000\\000\\255\\000\\\n\\255\\000\\255\\000\\255\\000\\255\\000\\255\\000\\255\\000\\255\\000\\000\\000\\\n\\255\\000\\255\\000\\255\\000\\000\\000\\255\\000\\255\\000\\255\\000\\255\\000\\\n\\000\\000\\000\\000\\255\\000\\255\\000\\255\\000\\000\\000\\255\\000\\255\\000\\\n\\255\\000\\255\\000\\255\\000\\255\\000\\000\\000\\255\\000\\255\\000\\255\\000\\\n\\255\\000\\255\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\255\\000\\\n\\255\\000\\255\\000\\255\\000\\255\\000\\255\\000\\255\\000\\255\\000\\000\\000\\\n\\255\\000\\000\\000\\255\\000\\255\\000\\039\\001\\255\\000\\255\\000\\255\\000\\\n\\255\\000\\255\\000\\000\\000\\255\\000\\255\\000\\000\\000\\255\\000\\255\\000\\\n\\255\\000\\255\\000\\000\\000\\255\\000\\255\\000\\000\\000\\255\\000\\000\\000\\\n\\000\\000\\000\\000\\255\\000\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\\n\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\\n\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\\n\\001\\001\\001\\001\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\\n\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\000\\000\\000\\000\\001\\001\\\n\\001\\001\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\\n\\001\\001\\001\\001\\001\\001\\000\\000\\001\\001\\001\\001\\001\\001\\000\\000\\\n\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\001\\001\\001\\001\\\n\\001\\001\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\\n\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\\n\\001\\001\\001\\001\\001\\001\\000\\000\\001\\001\\000\\000\\001\\001\\001\\001\\\n\\041\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\001\\001\\\n\\001\\001\\000\\000\\001\\001\\001\\001\\001\\001\\001\\001\\000\\000\\001\\001\\\n\\001\\001\\000\\000\\001\\001\\000\\000\\000\\000\\000\\000\\001\\001\\000\\000\\\n\\030\\001\\030\\001\\030\\001\\030\\001\\000\\000\\000\\000\\030\\001\\030\\001\\\n\\030\\001\\030\\001\\030\\001\\030\\001\\030\\001\\030\\001\\030\\001\\030\\001\\\n\\030\\001\\030\\001\\030\\001\\030\\001\\030\\001\\030\\001\\000\\000\\000\\000\\\n\\030\\001\\030\\001\\030\\001\\030\\001\\030\\001\\030\\001\\030\\001\\030\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\030\\001\\030\\001\\000\\000\\000\\000\\\n\\030\\001\\030\\001\\030\\001\\030\\001\\030\\001\\030\\001\\030\\001\\000\\000\\\n\\000\\000\\030\\001\\030\\001\\030\\001\\000\\000\\030\\001\\030\\001\\030\\001\\\n\\030\\001\\000\\000\\000\\000\\030\\001\\030\\001\\030\\001\\000\\000\\030\\001\\\n\\030\\001\\030\\001\\030\\001\\030\\001\\030\\001\\000\\000\\030\\001\\030\\001\\\n\\030\\001\\030\\001\\030\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\030\\001\\030\\001\\030\\001\\030\\001\\030\\001\\030\\001\\030\\001\\030\\001\\\n\\000\\000\\030\\001\\000\\000\\030\\001\\030\\001\\044\\001\\030\\001\\030\\001\\\n\\030\\001\\030\\001\\030\\001\\000\\000\\030\\001\\030\\001\\000\\000\\030\\001\\\n\\030\\001\\030\\001\\030\\001\\000\\000\\030\\001\\030\\001\\000\\000\\030\\001\\\n\\000\\000\\000\\000\\000\\000\\030\\001\\039\\001\\039\\001\\039\\001\\039\\001\\\n\\000\\000\\000\\000\\039\\001\\039\\001\\039\\001\\039\\001\\039\\001\\039\\001\\\n\\039\\001\\039\\001\\039\\001\\039\\001\\039\\001\\039\\001\\039\\001\\039\\001\\\n\\039\\001\\039\\001\\000\\000\\000\\000\\039\\001\\039\\001\\039\\001\\039\\001\\\n\\039\\001\\039\\001\\039\\001\\039\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\039\\001\\039\\001\\000\\000\\000\\000\\039\\001\\039\\001\\039\\001\\039\\001\\\n\\039\\001\\039\\001\\039\\001\\000\\000\\000\\000\\039\\001\\039\\001\\039\\001\\\n\\000\\000\\039\\001\\039\\001\\039\\001\\039\\001\\000\\000\\000\\000\\039\\001\\\n\\039\\001\\039\\001\\000\\000\\039\\001\\039\\001\\039\\001\\039\\001\\039\\001\\\n\\039\\001\\000\\000\\039\\001\\039\\001\\039\\001\\039\\001\\039\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\039\\001\\039\\001\\039\\001\\039\\001\\\n\\039\\001\\039\\001\\039\\001\\039\\001\\000\\000\\039\\001\\000\\000\\039\\001\\\n\\039\\001\\233\\000\\039\\001\\039\\001\\039\\001\\000\\000\\000\\000\\000\\000\\\n\\039\\001\\039\\001\\000\\000\\039\\001\\039\\001\\039\\001\\039\\001\\000\\000\\\n\\039\\001\\039\\001\\000\\000\\039\\001\\000\\000\\000\\000\\000\\000\\039\\001\\\n\\041\\001\\041\\001\\041\\001\\041\\001\\000\\000\\000\\000\\041\\001\\041\\001\\\n\\041\\001\\041\\001\\041\\001\\041\\001\\041\\001\\041\\001\\041\\001\\041\\001\\\n\\041\\001\\041\\001\\041\\001\\041\\001\\041\\001\\041\\001\\000\\000\\000\\000\\\n\\041\\001\\041\\001\\041\\001\\041\\001\\041\\001\\041\\001\\041\\001\\041\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\041\\001\\041\\001\\000\\000\\000\\000\\\n\\041\\001\\041\\001\\041\\001\\041\\001\\041\\001\\041\\001\\041\\001\\000\\000\\\n\\000\\000\\041\\001\\041\\001\\041\\001\\000\\000\\041\\001\\041\\001\\041\\001\\\n\\041\\001\\000\\000\\000\\000\\041\\001\\041\\001\\041\\001\\000\\000\\041\\001\\\n\\041\\001\\041\\001\\041\\001\\041\\001\\041\\001\\000\\000\\041\\001\\041\\001\\\n\\041\\001\\041\\001\\041\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\041\\001\\041\\001\\041\\001\\041\\001\\041\\001\\041\\001\\041\\001\\041\\001\\\n\\000\\000\\041\\001\\000\\000\\041\\001\\041\\001\\234\\000\\041\\001\\041\\001\\\n\\041\\001\\000\\000\\000\\000\\000\\000\\041\\001\\041\\001\\000\\000\\041\\001\\\n\\041\\001\\041\\001\\041\\001\\000\\000\\041\\001\\041\\001\\000\\000\\041\\001\\\n\\000\\000\\000\\000\\000\\000\\041\\001\\000\\000\\044\\001\\044\\001\\044\\001\\\n\\044\\001\\000\\000\\000\\000\\044\\001\\044\\001\\044\\001\\044\\001\\044\\001\\\n\\044\\001\\044\\001\\044\\001\\044\\001\\044\\001\\044\\001\\044\\001\\044\\001\\\n\\044\\001\\044\\001\\044\\001\\000\\000\\000\\000\\044\\001\\044\\001\\044\\001\\\n\\044\\001\\044\\001\\044\\001\\044\\001\\044\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\044\\001\\044\\001\\000\\000\\000\\000\\044\\001\\044\\001\\044\\001\\\n\\044\\001\\044\\001\\044\\001\\044\\001\\000\\000\\000\\000\\044\\001\\044\\001\\\n\\044\\001\\000\\000\\044\\001\\044\\001\\044\\001\\044\\001\\000\\000\\000\\000\\\n\\044\\001\\044\\001\\044\\001\\000\\000\\044\\001\\044\\001\\044\\001\\044\\001\\\n\\044\\001\\044\\001\\000\\000\\044\\001\\044\\001\\044\\001\\044\\001\\044\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\044\\001\\044\\001\\044\\001\\\n\\044\\001\\044\\001\\044\\001\\044\\001\\044\\001\\000\\000\\044\\001\\000\\000\\\n\\044\\001\\044\\001\\173\\000\\044\\001\\044\\001\\044\\001\\000\\000\\000\\000\\\n\\000\\000\\044\\001\\044\\001\\000\\000\\044\\001\\044\\001\\044\\001\\044\\001\\\n\\000\\000\\044\\001\\044\\001\\000\\000\\044\\001\\000\\000\\000\\000\\000\\000\\\n\\044\\001\\233\\000\\233\\000\\233\\000\\233\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\233\\000\\233\\000\\233\\000\\000\\000\\000\\000\\233\\000\\233\\000\\\n\\233\\000\\233\\000\\233\\000\\233\\000\\233\\000\\233\\000\\233\\000\\233\\000\\\n\\000\\000\\233\\000\\233\\000\\233\\000\\233\\000\\233\\000\\233\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\233\\000\\233\\000\\000\\000\\\n\\000\\000\\233\\000\\233\\000\\233\\000\\233\\000\\233\\000\\233\\000\\233\\000\\\n\\233\\000\\000\\000\\233\\000\\000\\000\\233\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\233\\000\\233\\000\\000\\000\\\n\\233\\000\\000\\000\\000\\000\\233\\000\\233\\000\\233\\000\\000\\000\\233\\000\\\n\\233\\000\\233\\000\\233\\000\\233\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\233\\000\\233\\000\\233\\000\\233\\000\\233\\000\\233\\000\\233\\000\\\n\\000\\000\\000\\000\\233\\000\\000\\000\\233\\000\\233\\000\\174\\000\\233\\000\\\n\\233\\000\\233\\000\\233\\000\\233\\000\\000\\000\\233\\000\\000\\000\\000\\000\\\n\\233\\000\\233\\000\\233\\000\\000\\000\\000\\000\\233\\000\\000\\000\\000\\000\\\n\\233\\000\\000\\000\\000\\000\\000\\000\\233\\000\\234\\000\\234\\000\\234\\000\\\n\\234\\000\\000\\000\\000\\000\\000\\000\\000\\000\\234\\000\\234\\000\\234\\000\\\n\\000\\000\\000\\000\\234\\000\\234\\000\\234\\000\\234\\000\\234\\000\\234\\000\\\n\\234\\000\\234\\000\\234\\000\\234\\000\\000\\000\\234\\000\\234\\000\\234\\000\\\n\\234\\000\\234\\000\\234\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\234\\000\\234\\000\\000\\000\\000\\000\\234\\000\\234\\000\\234\\000\\\n\\234\\000\\234\\000\\234\\000\\234\\000\\234\\000\\000\\000\\234\\000\\000\\000\\\n\\234\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\234\\000\\234\\000\\000\\000\\234\\000\\000\\000\\000\\000\\234\\000\\\n\\234\\000\\234\\000\\000\\000\\234\\000\\234\\000\\234\\000\\234\\000\\234\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\234\\000\\234\\000\\234\\000\\\n\\234\\000\\234\\000\\234\\000\\234\\000\\000\\000\\000\\000\\234\\000\\000\\000\\\n\\234\\000\\234\\000\\186\\000\\234\\000\\234\\000\\234\\000\\234\\000\\234\\000\\\n\\000\\000\\234\\000\\000\\000\\000\\000\\234\\000\\234\\000\\234\\000\\000\\000\\\n\\000\\000\\234\\000\\000\\000\\000\\000\\234\\000\\000\\000\\000\\000\\000\\000\\\n\\234\\000\\000\\000\\173\\000\\173\\000\\173\\000\\173\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\173\\000\\173\\000\\173\\000\\000\\000\\000\\000\\173\\000\\\n\\173\\000\\173\\000\\173\\000\\173\\000\\173\\000\\173\\000\\173\\000\\173\\000\\\n\\000\\000\\000\\000\\173\\000\\173\\000\\173\\000\\173\\000\\173\\000\\173\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\173\\000\\173\\000\\\n\\000\\000\\000\\000\\173\\000\\173\\000\\173\\000\\173\\000\\173\\000\\173\\000\\\n\\173\\000\\000\\000\\000\\000\\173\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\173\\000\\173\\000\\\n\\000\\000\\173\\000\\000\\000\\000\\000\\173\\000\\173\\000\\173\\000\\000\\000\\\n\\173\\000\\173\\000\\173\\000\\173\\000\\173\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\173\\000\\000\\000\\173\\000\\173\\000\\173\\000\\173\\000\\\n\\173\\000\\000\\000\\000\\000\\000\\000\\000\\000\\173\\000\\173\\000\\187\\000\\\n\\173\\000\\173\\000\\173\\000\\000\\000\\000\\000\\000\\000\\173\\000\\000\\000\\\n\\000\\000\\173\\000\\000\\000\\173\\000\\000\\000\\000\\000\\173\\000\\000\\000\\\n\\000\\000\\173\\000\\000\\000\\000\\000\\000\\000\\173\\000\\174\\000\\174\\000\\\n\\174\\000\\174\\000\\000\\000\\000\\000\\000\\000\\000\\000\\174\\000\\174\\000\\\n\\174\\000\\000\\000\\000\\000\\174\\000\\174\\000\\174\\000\\174\\000\\174\\000\\\n\\174\\000\\174\\000\\174\\000\\174\\000\\000\\000\\000\\000\\174\\000\\174\\000\\\n\\174\\000\\174\\000\\174\\000\\174\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\174\\000\\174\\000\\000\\000\\000\\000\\174\\000\\174\\000\\\n\\174\\000\\174\\000\\174\\000\\174\\000\\174\\000\\000\\000\\000\\000\\174\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\174\\000\\174\\000\\000\\000\\174\\000\\000\\000\\000\\000\\\n\\174\\000\\174\\000\\174\\000\\000\\000\\174\\000\\174\\000\\174\\000\\174\\000\\\n\\174\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\174\\000\\000\\000\\\n\\174\\000\\174\\000\\174\\000\\174\\000\\174\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\174\\000\\174\\000\\225\\000\\174\\000\\174\\000\\174\\000\\000\\000\\\n\\000\\000\\000\\000\\174\\000\\000\\000\\000\\000\\174\\000\\000\\000\\174\\000\\\n\\000\\000\\000\\000\\174\\000\\000\\000\\000\\000\\174\\000\\000\\000\\000\\000\\\n\\000\\000\\174\\000\\186\\000\\186\\000\\186\\000\\186\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\186\\000\\186\\000\\186\\000\\000\\000\\000\\000\\186\\000\\\n\\186\\000\\186\\000\\186\\000\\186\\000\\186\\000\\186\\000\\186\\000\\186\\000\\\n\\000\\000\\000\\000\\186\\000\\186\\000\\186\\000\\186\\000\\186\\000\\186\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\186\\000\\186\\000\\\n\\000\\000\\000\\000\\186\\000\\186\\000\\186\\000\\186\\000\\186\\000\\186\\000\\\n\\186\\000\\000\\000\\000\\000\\186\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\186\\000\\186\\000\\\n\\000\\000\\186\\000\\000\\000\\000\\000\\186\\000\\186\\000\\186\\000\\000\\000\\\n\\186\\000\\186\\000\\186\\000\\186\\000\\186\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\186\\000\\000\\000\\186\\000\\186\\000\\186\\000\\186\\000\\\n\\186\\000\\000\\000\\000\\000\\000\\000\\000\\000\\186\\000\\186\\000\\226\\000\\\n\\186\\000\\186\\000\\186\\000\\000\\000\\000\\000\\000\\000\\186\\000\\000\\000\\\n\\000\\000\\186\\000\\000\\000\\186\\000\\000\\000\\000\\000\\186\\000\\000\\000\\\n\\000\\000\\186\\000\\000\\000\\000\\000\\000\\000\\186\\000\\000\\000\\187\\000\\\n\\187\\000\\187\\000\\187\\000\\000\\000\\000\\000\\000\\000\\000\\000\\187\\000\\\n\\187\\000\\187\\000\\000\\000\\000\\000\\187\\000\\187\\000\\187\\000\\187\\000\\\n\\187\\000\\187\\000\\187\\000\\187\\000\\187\\000\\000\\000\\000\\000\\187\\000\\\n\\187\\000\\187\\000\\187\\000\\187\\000\\187\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\187\\000\\187\\000\\000\\000\\000\\000\\187\\000\\\n\\187\\000\\187\\000\\187\\000\\187\\000\\187\\000\\187\\000\\000\\000\\000\\000\\\n\\187\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\187\\000\\187\\000\\000\\000\\187\\000\\000\\000\\\n\\000\\000\\187\\000\\187\\000\\187\\000\\000\\000\\187\\000\\187\\000\\187\\000\\\n\\187\\000\\187\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\187\\000\\\n\\000\\000\\187\\000\\187\\000\\187\\000\\187\\000\\187\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\187\\000\\187\\000\\185\\000\\187\\000\\187\\000\\187\\000\\\n\\000\\000\\000\\000\\000\\000\\187\\000\\000\\000\\000\\000\\187\\000\\000\\000\\\n\\187\\000\\000\\000\\000\\000\\187\\000\\000\\000\\000\\000\\187\\000\\000\\000\\\n\\000\\000\\000\\000\\187\\000\\225\\000\\225\\000\\225\\000\\225\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\225\\000\\225\\000\\225\\000\\000\\000\\000\\000\\\n\\225\\000\\225\\000\\225\\000\\225\\000\\225\\000\\225\\000\\225\\000\\225\\000\\\n\\225\\000\\000\\000\\000\\000\\225\\000\\225\\000\\225\\000\\225\\000\\225\\000\\\n\\225\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\225\\000\\\n\\225\\000\\000\\000\\000\\000\\225\\000\\225\\000\\225\\000\\225\\000\\225\\000\\\n\\225\\000\\225\\000\\000\\000\\000\\000\\225\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\225\\000\\\n\\225\\000\\000\\000\\225\\000\\000\\000\\000\\000\\225\\000\\225\\000\\225\\000\\\n\\000\\000\\225\\000\\225\\000\\225\\000\\225\\000\\225\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\225\\000\\000\\000\\225\\000\\225\\000\\225\\000\\\n\\225\\000\\225\\000\\000\\000\\000\\000\\000\\000\\000\\000\\225\\000\\225\\000\\\n\\196\\000\\225\\000\\225\\000\\225\\000\\000\\000\\000\\000\\000\\000\\225\\000\\\n\\000\\000\\000\\000\\225\\000\\000\\000\\225\\000\\000\\000\\000\\000\\225\\000\\\n\\000\\000\\000\\000\\225\\000\\000\\000\\000\\000\\000\\000\\225\\000\\226\\000\\\n\\226\\000\\226\\000\\226\\000\\000\\000\\000\\000\\000\\000\\000\\000\\226\\000\\\n\\226\\000\\226\\000\\000\\000\\000\\000\\226\\000\\226\\000\\226\\000\\226\\000\\\n\\226\\000\\226\\000\\226\\000\\226\\000\\226\\000\\000\\000\\000\\000\\226\\000\\\n\\226\\000\\226\\000\\226\\000\\226\\000\\226\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\226\\000\\226\\000\\000\\000\\000\\000\\226\\000\\\n\\226\\000\\226\\000\\226\\000\\226\\000\\226\\000\\226\\000\\000\\000\\000\\000\\\n\\226\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\226\\000\\226\\000\\000\\000\\226\\000\\000\\000\\\n\\000\\000\\226\\000\\226\\000\\226\\000\\000\\000\\226\\000\\226\\000\\226\\000\\\n\\226\\000\\226\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\226\\000\\\n\\000\\000\\226\\000\\226\\000\\226\\000\\226\\000\\226\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\226\\000\\226\\000\\197\\000\\226\\000\\226\\000\\226\\000\\\n\\000\\000\\000\\000\\000\\000\\226\\000\\000\\000\\000\\000\\226\\000\\000\\000\\\n\\226\\000\\000\\000\\000\\000\\226\\000\\000\\000\\000\\000\\226\\000\\000\\000\\\n\\000\\000\\000\\000\\226\\000\\000\\000\\185\\000\\185\\000\\185\\000\\185\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\185\\000\\185\\000\\185\\000\\000\\000\\\n\\000\\000\\185\\000\\185\\000\\185\\000\\185\\000\\185\\000\\000\\000\\185\\000\\\n\\185\\000\\185\\000\\000\\000\\000\\000\\185\\000\\185\\000\\185\\000\\185\\000\\\n\\185\\000\\185\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\185\\000\\185\\000\\000\\000\\000\\000\\185\\000\\185\\000\\185\\000\\185\\000\\\n\\185\\000\\185\\000\\185\\000\\000\\000\\000\\000\\185\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\185\\000\\185\\000\\000\\000\\185\\000\\000\\000\\000\\000\\185\\000\\185\\000\\\n\\185\\000\\000\\000\\185\\000\\185\\000\\185\\000\\185\\000\\185\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\185\\000\\000\\000\\185\\000\\185\\000\\\n\\185\\000\\185\\000\\185\\000\\000\\000\\000\\000\\000\\000\\000\\000\\185\\000\\\n\\185\\000\\204\\000\\185\\000\\185\\000\\185\\000\\000\\000\\000\\000\\000\\000\\\n\\185\\000\\000\\000\\000\\000\\185\\000\\000\\000\\185\\000\\000\\000\\000\\000\\\n\\185\\000\\000\\000\\000\\000\\185\\000\\000\\000\\000\\000\\000\\000\\185\\000\\\n\\196\\000\\196\\000\\196\\000\\196\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\196\\000\\196\\000\\196\\000\\000\\000\\000\\000\\196\\000\\196\\000\\196\\000\\\n\\196\\000\\196\\000\\196\\000\\196\\000\\196\\000\\196\\000\\000\\000\\000\\000\\\n\\196\\000\\196\\000\\196\\000\\196\\000\\196\\000\\196\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\196\\000\\196\\000\\000\\000\\000\\000\\\n\\196\\000\\196\\000\\196\\000\\196\\000\\196\\000\\196\\000\\000\\000\\000\\000\\\n\\000\\000\\196\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\196\\000\\196\\000\\000\\000\\196\\000\\\n\\000\\000\\000\\000\\196\\000\\196\\000\\196\\000\\000\\000\\196\\000\\196\\000\\\n\\196\\000\\196\\000\\196\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\196\\000\\000\\000\\196\\000\\196\\000\\196\\000\\196\\000\\196\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\196\\000\\196\\000\\203\\000\\196\\000\\196\\000\\\n\\196\\000\\000\\000\\000\\000\\000\\000\\196\\000\\000\\000\\000\\000\\196\\000\\\n\\000\\000\\196\\000\\000\\000\\000\\000\\196\\000\\000\\000\\000\\000\\196\\000\\\n\\000\\000\\000\\000\\000\\000\\196\\000\\197\\000\\197\\000\\197\\000\\197\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\197\\000\\197\\000\\197\\000\\000\\000\\\n\\000\\000\\197\\000\\197\\000\\197\\000\\197\\000\\197\\000\\197\\000\\197\\000\\\n\\197\\000\\197\\000\\000\\000\\000\\000\\197\\000\\197\\000\\197\\000\\197\\000\\\n\\197\\000\\197\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\197\\000\\197\\000\\000\\000\\000\\000\\197\\000\\197\\000\\197\\000\\197\\000\\\n\\197\\000\\197\\000\\000\\000\\000\\000\\000\\000\\197\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\197\\000\\197\\000\\000\\000\\197\\000\\000\\000\\000\\000\\197\\000\\197\\000\\\n\\197\\000\\000\\000\\197\\000\\197\\000\\197\\000\\197\\000\\197\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\197\\000\\000\\000\\197\\000\\197\\000\\\n\\197\\000\\197\\000\\197\\000\\000\\000\\000\\000\\000\\000\\000\\000\\197\\000\\\n\\197\\000\\179\\000\\197\\000\\197\\000\\197\\000\\000\\000\\000\\000\\000\\000\\\n\\197\\000\\000\\000\\000\\000\\197\\000\\000\\000\\197\\000\\000\\000\\000\\000\\\n\\197\\000\\000\\000\\000\\000\\197\\000\\000\\000\\000\\000\\000\\000\\197\\000\\\n\\000\\000\\204\\000\\204\\000\\204\\000\\204\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\204\\000\\204\\000\\204\\000\\000\\000\\000\\000\\204\\000\\204\\000\\\n\\204\\000\\204\\000\\204\\000\\204\\000\\204\\000\\204\\000\\204\\000\\000\\000\\\n\\000\\000\\204\\000\\204\\000\\204\\000\\204\\000\\204\\000\\204\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\204\\000\\204\\000\\000\\000\\\n\\000\\000\\204\\000\\204\\000\\204\\000\\204\\000\\204\\000\\204\\000\\000\\000\\\n\\000\\000\\000\\000\\204\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\204\\000\\204\\000\\000\\000\\\n\\204\\000\\000\\000\\000\\000\\204\\000\\204\\000\\204\\000\\000\\000\\204\\000\\\n\\204\\000\\204\\000\\204\\000\\204\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\204\\000\\000\\000\\204\\000\\204\\000\\204\\000\\204\\000\\204\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\204\\000\\204\\000\\182\\000\\204\\000\\\n\\204\\000\\204\\000\\000\\000\\000\\000\\000\\000\\204\\000\\000\\000\\000\\000\\\n\\204\\000\\000\\000\\204\\000\\000\\000\\000\\000\\204\\000\\000\\000\\000\\000\\\n\\204\\000\\000\\000\\000\\000\\000\\000\\204\\000\\203\\000\\203\\000\\203\\000\\\n\\203\\000\\000\\000\\000\\000\\000\\000\\000\\000\\203\\000\\203\\000\\203\\000\\\n\\000\\000\\000\\000\\203\\000\\203\\000\\203\\000\\203\\000\\203\\000\\203\\000\\\n\\203\\000\\203\\000\\203\\000\\000\\000\\000\\000\\203\\000\\203\\000\\203\\000\\\n\\203\\000\\203\\000\\203\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\203\\000\\203\\000\\000\\000\\000\\000\\203\\000\\203\\000\\203\\000\\\n\\203\\000\\203\\000\\203\\000\\000\\000\\000\\000\\000\\000\\203\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\203\\000\\203\\000\\000\\000\\203\\000\\000\\000\\000\\000\\203\\000\\\n\\203\\000\\203\\000\\000\\000\\203\\000\\203\\000\\203\\000\\203\\000\\203\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\203\\000\\000\\000\\203\\000\\\n\\203\\000\\203\\000\\203\\000\\203\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\203\\000\\203\\000\\183\\000\\203\\000\\203\\000\\203\\000\\000\\000\\000\\000\\\n\\000\\000\\203\\000\\000\\000\\000\\000\\203\\000\\000\\000\\203\\000\\000\\000\\\n\\000\\000\\203\\000\\000\\000\\000\\000\\203\\000\\000\\000\\000\\000\\000\\000\\\n\\203\\000\\179\\000\\179\\000\\179\\000\\179\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\179\\000\\179\\000\\000\\000\\000\\000\\179\\000\\179\\000\\\n\\179\\000\\179\\000\\179\\000\\179\\000\\179\\000\\179\\000\\179\\000\\000\\000\\\n\\000\\000\\179\\000\\179\\000\\179\\000\\179\\000\\179\\000\\179\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\179\\000\\179\\000\\000\\000\\\n\\000\\000\\179\\000\\179\\000\\179\\000\\179\\000\\179\\000\\179\\000\\179\\000\\\n\\000\\000\\000\\000\\179\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\179\\000\\179\\000\\000\\000\\\n\\179\\000\\000\\000\\000\\000\\179\\000\\179\\000\\179\\000\\000\\000\\179\\000\\\n\\179\\000\\179\\000\\179\\000\\179\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\179\\000\\000\\000\\179\\000\\179\\000\\179\\000\\179\\000\\179\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\179\\000\\179\\000\\195\\000\\179\\000\\\n\\179\\000\\179\\000\\000\\000\\000\\000\\000\\000\\179\\000\\000\\000\\000\\000\\\n\\179\\000\\000\\000\\179\\000\\000\\000\\000\\000\\179\\000\\000\\000\\000\\000\\\n\\179\\000\\000\\000\\000\\000\\000\\000\\179\\000\\000\\000\\182\\000\\182\\000\\\n\\182\\000\\182\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\182\\000\\\n\\182\\000\\000\\000\\000\\000\\182\\000\\182\\000\\182\\000\\182\\000\\182\\000\\\n\\182\\000\\182\\000\\182\\000\\182\\000\\000\\000\\000\\000\\182\\000\\182\\000\\\n\\182\\000\\182\\000\\182\\000\\182\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\182\\000\\182\\000\\000\\000\\000\\000\\182\\000\\182\\000\\\n\\182\\000\\182\\000\\182\\000\\182\\000\\182\\000\\000\\000\\000\\000\\182\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\182\\000\\182\\000\\000\\000\\182\\000\\000\\000\\000\\000\\\n\\182\\000\\182\\000\\182\\000\\000\\000\\182\\000\\182\\000\\182\\000\\182\\000\\\n\\182\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\182\\000\\000\\000\\\n\\182\\000\\182\\000\\182\\000\\182\\000\\182\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\182\\000\\182\\000\\201\\000\\182\\000\\182\\000\\182\\000\\000\\000\\\n\\000\\000\\000\\000\\182\\000\\000\\000\\000\\000\\182\\000\\000\\000\\182\\000\\\n\\000\\000\\000\\000\\182\\000\\000\\000\\000\\000\\182\\000\\000\\000\\000\\000\\\n\\000\\000\\182\\000\\183\\000\\183\\000\\183\\000\\183\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\183\\000\\183\\000\\000\\000\\000\\000\\183\\000\\\n\\183\\000\\183\\000\\183\\000\\183\\000\\183\\000\\183\\000\\183\\000\\183\\000\\\n\\000\\000\\000\\000\\183\\000\\183\\000\\183\\000\\183\\000\\183\\000\\183\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\183\\000\\183\\000\\\n\\000\\000\\000\\000\\183\\000\\183\\000\\183\\000\\183\\000\\183\\000\\183\\000\\\n\\183\\000\\000\\000\\000\\000\\183\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\183\\000\\183\\000\\\n\\000\\000\\183\\000\\000\\000\\000\\000\\183\\000\\183\\000\\183\\000\\000\\000\\\n\\183\\000\\183\\000\\183\\000\\183\\000\\183\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\183\\000\\000\\000\\183\\000\\183\\000\\183\\000\\183\\000\\\n\\183\\000\\000\\000\\000\\000\\000\\000\\000\\000\\183\\000\\183\\000\\202\\000\\\n\\183\\000\\183\\000\\183\\000\\000\\000\\000\\000\\000\\000\\183\\000\\000\\000\\\n\\000\\000\\183\\000\\000\\000\\183\\000\\000\\000\\000\\000\\183\\000\\000\\000\\\n\\000\\000\\183\\000\\000\\000\\000\\000\\000\\000\\183\\000\\195\\000\\195\\000\\\n\\195\\000\\195\\000\\000\\000\\000\\000\\000\\000\\000\\000\\195\\000\\195\\000\\\n\\195\\000\\000\\000\\000\\000\\195\\000\\195\\000\\195\\000\\195\\000\\195\\000\\\n\\195\\000\\195\\000\\195\\000\\195\\000\\000\\000\\000\\000\\195\\000\\195\\000\\\n\\195\\000\\195\\000\\195\\000\\195\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\195\\000\\195\\000\\000\\000\\000\\000\\195\\000\\195\\000\\\n\\195\\000\\195\\000\\195\\000\\000\\000\\000\\000\\000\\000\\000\\000\\195\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\195\\000\\195\\000\\000\\000\\195\\000\\000\\000\\000\\000\\\n\\195\\000\\195\\000\\195\\000\\000\\000\\195\\000\\195\\000\\195\\000\\195\\000\\\n\\195\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\195\\000\\000\\000\\\n\\195\\000\\000\\000\\195\\000\\195\\000\\195\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\195\\000\\195\\000\\198\\000\\195\\000\\195\\000\\195\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\195\\000\\000\\000\\195\\000\\\n\\000\\000\\000\\000\\195\\000\\000\\000\\000\\000\\195\\000\\000\\000\\000\\000\\\n\\000\\000\\195\\000\\000\\000\\201\\000\\201\\000\\201\\000\\201\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\201\\000\\201\\000\\201\\000\\000\\000\\000\\000\\\n\\201\\000\\201\\000\\201\\000\\201\\000\\201\\000\\201\\000\\201\\000\\201\\000\\\n\\201\\000\\000\\000\\000\\000\\201\\000\\201\\000\\201\\000\\201\\000\\201\\000\\\n\\201\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\201\\000\\\n\\201\\000\\000\\000\\000\\000\\201\\000\\201\\000\\201\\000\\201\\000\\201\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\201\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\201\\000\\\n\\201\\000\\000\\000\\201\\000\\000\\000\\000\\000\\201\\000\\201\\000\\201\\000\\\n\\000\\000\\201\\000\\201\\000\\201\\000\\201\\000\\201\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\201\\000\\000\\000\\201\\000\\000\\000\\201\\000\\\n\\201\\000\\201\\000\\000\\000\\000\\000\\000\\000\\000\\000\\201\\000\\201\\000\\\n\\199\\000\\201\\000\\201\\000\\201\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\201\\000\\000\\000\\201\\000\\000\\000\\000\\000\\201\\000\\\n\\000\\000\\000\\000\\201\\000\\000\\000\\000\\000\\000\\000\\201\\000\\202\\000\\\n\\202\\000\\202\\000\\202\\000\\000\\000\\000\\000\\000\\000\\000\\000\\202\\000\\\n\\202\\000\\202\\000\\000\\000\\000\\000\\202\\000\\202\\000\\202\\000\\202\\000\\\n\\202\\000\\202\\000\\202\\000\\202\\000\\202\\000\\000\\000\\000\\000\\202\\000\\\n\\202\\000\\202\\000\\202\\000\\202\\000\\202\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\202\\000\\202\\000\\000\\000\\000\\000\\202\\000\\\n\\202\\000\\202\\000\\202\\000\\202\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\202\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\202\\000\\202\\000\\000\\000\\202\\000\\000\\000\\\n\\000\\000\\202\\000\\202\\000\\202\\000\\000\\000\\202\\000\\202\\000\\202\\000\\\n\\202\\000\\202\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\202\\000\\\n\\000\\000\\202\\000\\000\\000\\202\\000\\202\\000\\202\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\202\\000\\202\\000\\200\\000\\202\\000\\202\\000\\202\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\202\\000\\000\\000\\\n\\202\\000\\000\\000\\000\\000\\202\\000\\000\\000\\000\\000\\202\\000\\000\\000\\\n\\000\\000\\000\\000\\202\\000\\198\\000\\198\\000\\198\\000\\198\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\198\\000\\198\\000\\198\\000\\000\\000\\000\\000\\\n\\198\\000\\198\\000\\198\\000\\198\\000\\198\\000\\198\\000\\198\\000\\198\\000\\\n\\198\\000\\000\\000\\000\\000\\198\\000\\198\\000\\198\\000\\198\\000\\198\\000\\\n\\198\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\198\\000\\\n\\198\\000\\000\\000\\000\\000\\198\\000\\198\\000\\198\\000\\198\\000\\198\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\198\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\198\\000\\\n\\198\\000\\000\\000\\198\\000\\000\\000\\000\\000\\198\\000\\198\\000\\198\\000\\\n\\000\\000\\198\\000\\198\\000\\198\\000\\198\\000\\198\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\198\\000\\000\\000\\198\\000\\000\\000\\198\\000\\\n\\198\\000\\198\\000\\000\\000\\000\\000\\000\\000\\000\\000\\198\\000\\198\\000\\\n\\153\\000\\198\\000\\198\\000\\198\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\198\\000\\000\\000\\198\\000\\000\\000\\000\\000\\198\\000\\\n\\000\\000\\000\\000\\198\\000\\000\\000\\000\\000\\000\\000\\198\\000\\000\\000\\\n\\199\\000\\199\\000\\199\\000\\199\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\199\\000\\199\\000\\199\\000\\000\\000\\000\\000\\199\\000\\199\\000\\199\\000\\\n\\199\\000\\199\\000\\199\\000\\199\\000\\199\\000\\199\\000\\000\\000\\000\\000\\\n\\199\\000\\199\\000\\199\\000\\199\\000\\199\\000\\199\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\199\\000\\199\\000\\000\\000\\000\\000\\\n\\199\\000\\199\\000\\199\\000\\199\\000\\199\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\199\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\199\\000\\199\\000\\000\\000\\199\\000\\\n\\000\\000\\000\\000\\199\\000\\199\\000\\199\\000\\000\\000\\199\\000\\199\\000\\\n\\199\\000\\199\\000\\199\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\199\\000\\000\\000\\199\\000\\000\\000\\199\\000\\199\\000\\199\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\199\\000\\199\\000\\192\\000\\199\\000\\199\\000\\\n\\199\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\199\\000\\\n\\000\\000\\199\\000\\000\\000\\000\\000\\199\\000\\000\\000\\000\\000\\199\\000\\\n\\000\\000\\000\\000\\000\\000\\199\\000\\200\\000\\200\\000\\200\\000\\200\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\200\\000\\200\\000\\200\\000\\000\\000\\\n\\000\\000\\200\\000\\200\\000\\200\\000\\200\\000\\200\\000\\200\\000\\200\\000\\\n\\200\\000\\200\\000\\000\\000\\000\\000\\200\\000\\200\\000\\200\\000\\200\\000\\\n\\200\\000\\200\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\200\\000\\200\\000\\000\\000\\000\\000\\200\\000\\200\\000\\200\\000\\200\\000\\\n\\200\\000\\000\\000\\000\\000\\000\\000\\000\\000\\200\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\200\\000\\200\\000\\000\\000\\200\\000\\000\\000\\000\\000\\200\\000\\200\\000\\\n\\200\\000\\000\\000\\200\\000\\200\\000\\200\\000\\200\\000\\200\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\200\\000\\000\\000\\200\\000\\000\\000\\\n\\200\\000\\200\\000\\200\\000\\000\\000\\000\\000\\000\\000\\000\\000\\200\\000\\\n\\200\\000\\205\\000\\200\\000\\200\\000\\200\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\200\\000\\000\\000\\200\\000\\000\\000\\000\\000\\\n\\200\\000\\000\\000\\000\\000\\200\\000\\000\\000\\000\\000\\000\\000\\200\\000\\\n\\153\\000\\153\\000\\153\\000\\153\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\153\\000\\153\\000\\153\\000\\000\\000\\000\\000\\153\\000\\153\\000\\153\\000\\\n\\153\\000\\153\\000\\153\\000\\153\\000\\153\\000\\153\\000\\000\\000\\000\\000\\\n\\153\\000\\153\\000\\153\\000\\153\\000\\153\\000\\153\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\153\\000\\153\\000\\000\\000\\000\\000\\\n\\153\\000\\153\\000\\153\\000\\153\\000\\153\\000\\153\\000\\153\\000\\000\\000\\\n\\000\\000\\153\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\153\\000\\153\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\153\\000\\153\\000\\153\\000\\000\\000\\153\\000\\000\\000\\\n\\000\\000\\153\\000\\153\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\153\\000\\000\\000\\153\\000\\000\\000\\000\\000\\000\\000\\153\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\153\\000\\153\\000\\207\\000\\153\\000\\153\\000\\\n\\153\\000\\000\\000\\000\\000\\000\\000\\153\\000\\000\\000\\000\\000\\153\\000\\\n\\000\\000\\153\\000\\000\\000\\000\\000\\153\\000\\000\\000\\000\\000\\153\\000\\\n\\000\\000\\000\\000\\000\\000\\153\\000\\000\\000\\192\\000\\192\\000\\192\\000\\\n\\192\\000\\000\\000\\000\\000\\000\\000\\000\\000\\192\\000\\192\\000\\192\\000\\\n\\000\\000\\000\\000\\192\\000\\192\\000\\000\\000\\192\\000\\192\\000\\192\\000\\\n\\192\\000\\192\\000\\192\\000\\000\\000\\000\\000\\192\\000\\192\\000\\192\\000\\\n\\192\\000\\192\\000\\192\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\192\\000\\192\\000\\000\\000\\000\\000\\192\\000\\192\\000\\192\\000\\\n\\192\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\192\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\192\\000\\192\\000\\000\\000\\192\\000\\000\\000\\000\\000\\192\\000\\\n\\192\\000\\192\\000\\000\\000\\192\\000\\000\\000\\000\\000\\192\\000\\192\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\192\\000\\000\\000\\192\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\192\\000\\192\\000\\193\\000\\192\\000\\192\\000\\192\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\192\\000\\000\\000\\192\\000\\000\\000\\\n\\000\\000\\192\\000\\000\\000\\000\\000\\192\\000\\000\\000\\000\\000\\000\\000\\\n\\192\\000\\205\\000\\205\\000\\205\\000\\205\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\205\\000\\205\\000\\205\\000\\000\\000\\000\\000\\205\\000\\205\\000\\\n\\000\\000\\205\\000\\205\\000\\205\\000\\205\\000\\205\\000\\205\\000\\000\\000\\\n\\000\\000\\205\\000\\205\\000\\205\\000\\205\\000\\205\\000\\205\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\205\\000\\205\\000\\000\\000\\\n\\000\\000\\205\\000\\205\\000\\205\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\205\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\205\\000\\205\\000\\000\\000\\\n\\205\\000\\000\\000\\000\\000\\000\\000\\205\\000\\205\\000\\000\\000\\205\\000\\\n\\000\\000\\000\\000\\205\\000\\205\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\205\\000\\000\\000\\205\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\205\\000\\205\\000\\194\\000\\205\\000\\\n\\205\\000\\205\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\205\\000\\000\\000\\205\\000\\000\\000\\000\\000\\205\\000\\000\\000\\000\\000\\\n\\205\\000\\000\\000\\000\\000\\000\\000\\205\\000\\207\\000\\207\\000\\207\\000\\\n\\207\\000\\000\\000\\000\\000\\000\\000\\000\\000\\207\\000\\207\\000\\207\\000\\\n\\000\\000\\000\\000\\207\\000\\207\\000\\000\\000\\207\\000\\207\\000\\207\\000\\\n\\207\\000\\207\\000\\207\\000\\000\\000\\000\\000\\207\\000\\207\\000\\207\\000\\\n\\207\\000\\207\\000\\207\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\207\\000\\207\\000\\000\\000\\000\\000\\207\\000\\207\\000\\207\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\207\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\207\\000\\207\\000\\000\\000\\207\\000\\000\\000\\000\\000\\000\\000\\\n\\207\\000\\207\\000\\000\\000\\207\\000\\000\\000\\000\\000\\207\\000\\207\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\207\\000\\000\\000\\207\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\207\\000\\207\\000\\206\\000\\207\\000\\207\\000\\207\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\207\\000\\000\\000\\207\\000\\000\\000\\\n\\000\\000\\207\\000\\000\\000\\000\\000\\207\\000\\000\\000\\000\\000\\000\\000\\\n\\207\\000\\000\\000\\193\\000\\193\\000\\193\\000\\193\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\193\\000\\193\\000\\193\\000\\000\\000\\000\\000\\193\\000\\\n\\193\\000\\000\\000\\193\\000\\193\\000\\193\\000\\193\\000\\193\\000\\193\\000\\\n\\000\\000\\000\\000\\193\\000\\193\\000\\193\\000\\193\\000\\193\\000\\193\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\193\\000\\193\\000\\\n\\000\\000\\000\\000\\193\\000\\193\\000\\193\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\193\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\193\\000\\193\\000\\\n\\000\\000\\193\\000\\000\\000\\000\\000\\000\\000\\193\\000\\193\\000\\000\\000\\\n\\193\\000\\000\\000\\000\\000\\193\\000\\193\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\193\\000\\000\\000\\193\\000\\000\\000\\000\\000\\211\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\193\\000\\193\\000\\000\\000\\\n\\193\\000\\193\\000\\193\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\193\\000\\000\\000\\193\\000\\000\\000\\000\\000\\193\\000\\000\\000\\\n\\000\\000\\193\\000\\000\\000\\000\\000\\000\\000\\193\\000\\194\\000\\194\\000\\\n\\194\\000\\194\\000\\000\\000\\000\\000\\000\\000\\000\\000\\194\\000\\194\\000\\\n\\194\\000\\000\\000\\000\\000\\194\\000\\194\\000\\000\\000\\194\\000\\194\\000\\\n\\194\\000\\194\\000\\194\\000\\194\\000\\000\\000\\000\\000\\194\\000\\194\\000\\\n\\194\\000\\194\\000\\194\\000\\194\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\194\\000\\194\\000\\000\\000\\000\\000\\194\\000\\194\\000\\\n\\194\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\194\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\194\\000\\194\\000\\000\\000\\194\\000\\000\\000\\000\\000\\\n\\000\\000\\194\\000\\194\\000\\000\\000\\194\\000\\000\\000\\000\\000\\194\\000\\\n\\194\\000\\000\\000\\000\\000\\000\\000\\210\\000\\000\\000\\194\\000\\000\\000\\\n\\194\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\194\\000\\194\\000\\000\\000\\194\\000\\194\\000\\194\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\194\\000\\000\\000\\194\\000\\\n\\000\\000\\000\\000\\194\\000\\000\\000\\000\\000\\194\\000\\000\\000\\000\\000\\\n\\000\\000\\194\\000\\206\\000\\206\\000\\206\\000\\206\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\206\\000\\206\\000\\206\\000\\000\\000\\000\\000\\206\\000\\\n\\206\\000\\000\\000\\206\\000\\206\\000\\206\\000\\206\\000\\206\\000\\206\\000\\\n\\000\\000\\000\\000\\206\\000\\206\\000\\206\\000\\206\\000\\206\\000\\206\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\206\\000\\206\\000\\\n\\000\\000\\000\\000\\206\\000\\206\\000\\206\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\206\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\206\\000\\206\\000\\\n\\000\\000\\206\\000\\000\\000\\000\\000\\209\\000\\206\\000\\206\\000\\000\\000\\\n\\206\\000\\000\\000\\000\\000\\206\\000\\206\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\206\\000\\000\\000\\206\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\206\\000\\206\\000\\000\\000\\\n\\206\\000\\206\\000\\206\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\206\\000\\000\\000\\206\\000\\000\\000\\000\\000\\206\\000\\211\\000\\\n\\000\\000\\206\\000\\211\\000\\000\\000\\000\\000\\206\\000\\000\\000\\211\\000\\\n\\211\\000\\211\\000\\000\\000\\000\\000\\211\\000\\211\\000\\000\\000\\211\\000\\\n\\211\\000\\211\\000\\211\\000\\211\\000\\211\\000\\000\\000\\000\\000\\211\\000\\\n\\211\\000\\211\\000\\000\\000\\211\\000\\211\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\211\\000\\000\\000\\000\\000\\211\\000\\\n\\211\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\211\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\211\\000\\073\\001\\000\\000\\211\\000\\000\\000\\\n\\000\\000\\000\\000\\211\\000\\211\\000\\000\\000\\211\\000\\000\\000\\000\\000\\\n\\211\\000\\211\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\211\\000\\\n\\000\\000\\211\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\211\\000\\211\\000\\000\\000\\211\\000\\211\\000\\211\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\211\\000\\000\\000\\\n\\211\\000\\000\\000\\000\\000\\211\\000\\210\\000\\000\\000\\211\\000\\210\\000\\\n\\000\\000\\000\\000\\211\\000\\000\\000\\210\\000\\210\\000\\210\\000\\000\\000\\\n\\000\\000\\210\\000\\210\\000\\000\\000\\210\\000\\210\\000\\210\\000\\210\\000\\\n\\210\\000\\210\\000\\000\\000\\000\\000\\210\\000\\210\\000\\210\\000\\000\\000\\\n\\210\\000\\210\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\210\\000\\000\\000\\000\\000\\210\\000\\210\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\210\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\208\\000\\000\\000\\000\\000\\000\\000\\\n\\210\\000\\000\\000\\000\\000\\210\\000\\000\\000\\000\\000\\000\\000\\210\\000\\\n\\210\\000\\000\\000\\210\\000\\000\\000\\000\\000\\210\\000\\210\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\210\\000\\000\\000\\210\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\210\\000\\\n\\210\\000\\000\\000\\210\\000\\210\\000\\210\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\210\\000\\209\\000\\210\\000\\000\\000\\209\\000\\\n\\210\\000\\000\\000\\000\\000\\210\\000\\209\\000\\000\\000\\209\\000\\210\\000\\\n\\000\\000\\209\\000\\209\\000\\000\\000\\209\\000\\209\\000\\209\\000\\209\\000\\\n\\209\\000\\209\\000\\000\\000\\000\\000\\209\\000\\209\\000\\209\\000\\000\\000\\\n\\209\\000\\209\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\209\\000\\000\\000\\000\\000\\209\\000\\209\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\209\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\072\\001\\000\\000\\000\\000\\000\\000\\\n\\209\\000\\000\\000\\000\\000\\209\\000\\000\\000\\000\\000\\000\\000\\209\\000\\\n\\209\\000\\000\\000\\209\\000\\000\\000\\000\\000\\209\\000\\209\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\209\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\214\\002\\000\\000\\000\\000\\000\\000\\000\\000\\209\\000\\\n\\209\\000\\000\\000\\209\\000\\209\\000\\209\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\209\\000\\073\\001\\209\\000\\000\\000\\073\\001\\\n\\209\\000\\000\\000\\000\\000\\209\\000\\073\\001\\000\\000\\073\\001\\209\\000\\\n\\000\\000\\073\\001\\073\\001\\000\\000\\073\\001\\073\\001\\073\\001\\073\\001\\\n\\073\\001\\073\\001\\000\\000\\000\\000\\073\\001\\073\\001\\073\\001\\000\\000\\\n\\073\\001\\073\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\073\\001\\000\\000\\000\\000\\073\\001\\073\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\073\\001\\000\\000\\212\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\073\\001\\000\\000\\000\\000\\073\\001\\000\\000\\000\\000\\000\\000\\073\\001\\\n\\073\\001\\000\\000\\073\\001\\000\\000\\000\\000\\073\\001\\073\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\073\\001\\214\\002\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\073\\001\\\n\\073\\001\\000\\000\\073\\001\\073\\001\\073\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\073\\001\\208\\000\\073\\001\\000\\000\\208\\000\\\n\\073\\001\\000\\000\\000\\000\\073\\001\\208\\000\\000\\000\\208\\000\\073\\001\\\n\\000\\000\\208\\000\\208\\000\\000\\000\\208\\000\\208\\000\\208\\000\\208\\000\\\n\\208\\000\\208\\000\\000\\000\\000\\000\\208\\000\\208\\000\\208\\000\\000\\000\\\n\\208\\000\\208\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\208\\000\\000\\000\\000\\000\\208\\000\\208\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\224\\000\\000\\000\\000\\000\\208\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\208\\000\\000\\000\\000\\000\\208\\000\\000\\000\\000\\000\\000\\000\\208\\000\\\n\\208\\000\\000\\000\\208\\000\\000\\000\\000\\000\\208\\000\\208\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\208\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\208\\000\\\n\\208\\000\\000\\000\\208\\000\\208\\000\\208\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\208\\000\\072\\001\\208\\000\\000\\000\\072\\001\\\n\\208\\000\\000\\000\\000\\000\\208\\000\\072\\001\\000\\000\\072\\001\\208\\000\\\n\\000\\000\\072\\001\\072\\001\\000\\000\\072\\001\\072\\001\\072\\001\\072\\001\\\n\\072\\001\\072\\001\\000\\000\\000\\000\\072\\001\\072\\001\\072\\001\\000\\000\\\n\\072\\001\\072\\001\\214\\002\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\072\\001\\214\\002\\000\\000\\072\\001\\072\\001\\000\\000\\214\\002\\\n\\000\\000\\000\\000\\215\\000\\000\\000\\000\\000\\072\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\214\\002\\000\\000\\214\\002\\214\\002\\\n\\072\\001\\000\\000\\000\\000\\072\\001\\000\\000\\000\\000\\000\\000\\072\\001\\\n\\072\\001\\000\\000\\072\\001\\214\\002\\000\\000\\072\\001\\072\\001\\000\\000\\\n\\099\\000\\000\\000\\000\\000\\000\\000\\072\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\214\\002\\072\\001\\\n\\072\\001\\214\\002\\072\\001\\072\\001\\072\\001\\214\\002\\214\\002\\212\\000\\\n\\000\\000\\000\\000\\212\\000\\072\\001\\214\\002\\072\\001\\000\\000\\212\\000\\\n\\072\\001\\212\\000\\214\\002\\072\\001\\212\\000\\212\\000\\000\\000\\072\\001\\\n\\212\\000\\000\\000\\212\\000\\212\\000\\212\\000\\000\\000\\214\\002\\212\\000\\\n\\212\\000\\212\\000\\214\\002\\212\\000\\212\\000\\214\\002\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\212\\000\\000\\000\\214\\002\\212\\000\\\n\\212\\000\\214\\002\\214\\002\\000\\000\\000\\000\\188\\000\\000\\000\\000\\000\\\n\\212\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\214\\002\\\n\\000\\000\\214\\002\\214\\002\\212\\000\\000\\000\\000\\000\\212\\000\\000\\000\\\n\\000\\000\\000\\000\\212\\000\\212\\000\\000\\000\\212\\000\\214\\002\\000\\000\\\n\\212\\000\\212\\000\\000\\000\\212\\002\\000\\000\\000\\000\\000\\000\\212\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\214\\002\\212\\000\\212\\000\\214\\002\\212\\000\\212\\000\\212\\000\\\n\\214\\002\\214\\002\\224\\000\\000\\000\\000\\000\\224\\000\\212\\000\\214\\002\\\n\\212\\000\\000\\000\\224\\000\\212\\000\\224\\000\\214\\002\\212\\000\\224\\000\\\n\\224\\000\\000\\000\\212\\000\\224\\000\\000\\000\\224\\000\\224\\000\\224\\000\\\n\\000\\000\\214\\002\\224\\000\\224\\000\\224\\000\\214\\002\\224\\000\\224\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\224\\000\\\n\\000\\000\\214\\002\\224\\000\\224\\000\\214\\002\\000\\000\\000\\000\\000\\000\\\n\\217\\000\\000\\000\\000\\000\\224\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\224\\000\\000\\000\\\n\\000\\000\\224\\000\\000\\000\\000\\000\\000\\000\\224\\000\\224\\000\\000\\000\\\n\\224\\000\\000\\000\\000\\000\\224\\000\\224\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\224\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\224\\000\\224\\000\\000\\000\\\n\\224\\000\\224\\000\\224\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\224\\000\\215\\000\\224\\000\\000\\000\\215\\000\\224\\000\\000\\000\\\n\\000\\000\\224\\000\\215\\000\\000\\000\\215\\000\\224\\000\\000\\000\\215\\000\\\n\\215\\000\\000\\000\\000\\000\\215\\000\\000\\000\\215\\000\\215\\000\\215\\000\\\n\\000\\000\\000\\000\\215\\000\\215\\000\\215\\000\\000\\000\\215\\000\\215\\000\\\n\\099\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\215\\000\\\n\\000\\000\\000\\000\\215\\000\\215\\000\\000\\000\\099\\000\\000\\000\\000\\000\\\n\\216\\000\\000\\000\\000\\000\\215\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\099\\000\\000\\000\\099\\000\\099\\000\\215\\000\\000\\000\\\n\\000\\000\\215\\000\\000\\000\\000\\000\\000\\000\\215\\000\\215\\000\\000\\000\\\n\\215\\000\\099\\000\\000\\000\\215\\000\\215\\000\\000\\000\\100\\000\\000\\000\\\n\\000\\000\\000\\000\\215\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\099\\000\\215\\000\\215\\000\\099\\000\\\n\\215\\000\\215\\000\\215\\000\\099\\000\\099\\000\\188\\000\\000\\000\\000\\000\\\n\\188\\000\\215\\000\\099\\000\\215\\000\\000\\000\\188\\000\\215\\000\\188\\000\\\n\\099\\000\\215\\000\\188\\000\\188\\000\\000\\000\\215\\000\\188\\000\\000\\000\\\n\\188\\000\\188\\000\\188\\000\\000\\000\\099\\000\\188\\000\\188\\000\\188\\000\\\n\\099\\000\\188\\000\\188\\000\\212\\002\\000\\000\\000\\000\\212\\002\\000\\000\\\n\\000\\000\\000\\000\\188\\000\\000\\000\\099\\000\\188\\000\\188\\000\\099\\000\\\n\\212\\002\\000\\000\\000\\000\\220\\000\\000\\000\\000\\000\\188\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\212\\002\\000\\000\\212\\002\\\n\\212\\002\\188\\000\\000\\000\\000\\000\\188\\000\\000\\000\\000\\000\\000\\000\\\n\\188\\000\\188\\000\\000\\000\\188\\000\\212\\002\\000\\000\\188\\000\\188\\000\\\n\\000\\000\\165\\001\\000\\000\\000\\000\\000\\000\\188\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\212\\002\\\n\\188\\000\\188\\000\\212\\002\\188\\000\\188\\000\\188\\000\\000\\000\\212\\002\\\n\\217\\000\\000\\000\\000\\000\\217\\000\\188\\000\\212\\002\\188\\000\\000\\000\\\n\\217\\000\\188\\000\\217\\000\\212\\002\\188\\000\\217\\000\\217\\000\\000\\000\\\n\\188\\000\\217\\000\\000\\000\\217\\000\\217\\000\\217\\000\\000\\000\\212\\002\\\n\\217\\000\\217\\000\\217\\000\\212\\002\\217\\000\\217\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\217\\000\\000\\000\\212\\002\\\n\\217\\000\\217\\000\\212\\002\\000\\000\\000\\000\\000\\000\\218\\000\\000\\000\\\n\\000\\000\\217\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\217\\000\\000\\000\\000\\000\\217\\000\\\n\\000\\000\\000\\000\\000\\000\\217\\000\\217\\000\\000\\000\\217\\000\\000\\000\\\n\\000\\000\\217\\000\\217\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\217\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\217\\000\\217\\000\\000\\000\\217\\000\\217\\000\\\n\\217\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\217\\000\\\n\\216\\000\\217\\000\\000\\000\\216\\000\\217\\000\\000\\000\\000\\000\\217\\000\\\n\\216\\000\\000\\000\\216\\000\\217\\000\\000\\000\\216\\000\\216\\000\\000\\000\\\n\\000\\000\\216\\000\\000\\000\\216\\000\\216\\000\\216\\000\\000\\000\\000\\000\\\n\\216\\000\\216\\000\\216\\000\\000\\000\\216\\000\\216\\000\\100\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\216\\000\\000\\000\\000\\000\\\n\\216\\000\\216\\000\\000\\000\\100\\000\\000\\000\\000\\000\\219\\000\\000\\000\\\n\\000\\000\\216\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\100\\000\\000\\000\\100\\000\\100\\000\\216\\000\\000\\000\\000\\000\\216\\000\\\n\\000\\000\\000\\000\\000\\000\\216\\000\\216\\000\\000\\000\\216\\000\\100\\000\\\n\\000\\000\\216\\000\\216\\000\\000\\000\\212\\002\\000\\000\\000\\000\\000\\000\\\n\\216\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\100\\000\\216\\000\\216\\000\\100\\000\\216\\000\\216\\000\\\n\\216\\000\\100\\000\\100\\000\\220\\000\\000\\000\\000\\000\\220\\000\\216\\000\\\n\\100\\000\\216\\000\\000\\000\\220\\000\\216\\000\\220\\000\\100\\000\\216\\000\\\n\\220\\000\\220\\000\\000\\000\\216\\000\\220\\000\\000\\000\\220\\000\\220\\000\\\n\\220\\000\\000\\000\\100\\000\\220\\000\\220\\000\\220\\000\\100\\000\\220\\000\\\n\\220\\000\\165\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\220\\000\\000\\000\\100\\000\\220\\000\\220\\000\\100\\000\\165\\001\\000\\000\\\n\\000\\000\\223\\000\\000\\000\\000\\000\\220\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\165\\001\\000\\000\\165\\001\\165\\001\\220\\000\\\n\\000\\000\\000\\000\\220\\000\\000\\000\\000\\000\\000\\000\\220\\000\\220\\000\\\n\\000\\000\\220\\000\\165\\001\\000\\000\\220\\000\\220\\000\\000\\000\\037\\000\\\n\\000\\000\\000\\000\\000\\000\\220\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\165\\001\\220\\000\\220\\000\\\n\\165\\001\\220\\000\\220\\000\\220\\000\\165\\001\\165\\001\\218\\000\\000\\000\\\n\\000\\000\\218\\000\\220\\000\\165\\001\\220\\000\\000\\000\\218\\000\\220\\000\\\n\\218\\000\\165\\001\\220\\000\\218\\000\\218\\000\\000\\000\\220\\000\\218\\000\\\n\\000\\000\\218\\000\\218\\000\\218\\000\\000\\000\\165\\001\\218\\000\\218\\000\\\n\\218\\000\\165\\001\\218\\000\\218\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\218\\000\\000\\000\\165\\001\\218\\000\\218\\000\\\n\\165\\001\\000\\000\\000\\000\\000\\000\\221\\000\\000\\000\\000\\000\\218\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\218\\000\\000\\000\\000\\000\\218\\000\\000\\000\\000\\000\\\n\\000\\000\\218\\000\\218\\000\\000\\000\\218\\000\\000\\000\\000\\000\\218\\000\\\n\\218\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\218\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\218\\000\\218\\000\\000\\000\\218\\000\\218\\000\\218\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\218\\000\\219\\000\\218\\000\\\n\\000\\000\\219\\000\\218\\000\\000\\000\\000\\000\\218\\000\\219\\000\\000\\000\\\n\\219\\000\\218\\000\\000\\000\\219\\000\\219\\000\\000\\000\\000\\000\\219\\000\\\n\\000\\000\\219\\000\\219\\000\\219\\000\\000\\000\\000\\000\\219\\000\\219\\000\\\n\\219\\000\\000\\000\\219\\000\\219\\000\\212\\002\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\219\\000\\000\\000\\000\\000\\219\\000\\219\\000\\\n\\000\\000\\212\\002\\000\\000\\000\\000\\222\\000\\000\\000\\000\\000\\219\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\212\\002\\000\\000\\\n\\212\\002\\212\\002\\219\\000\\000\\000\\000\\000\\219\\000\\000\\000\\000\\000\\\n\\000\\000\\219\\000\\219\\000\\000\\000\\219\\000\\212\\002\\000\\000\\219\\000\\\n\\219\\000\\000\\000\\040\\000\\000\\000\\000\\000\\000\\000\\219\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\212\\002\\219\\000\\219\\000\\212\\002\\219\\000\\219\\000\\219\\000\\000\\000\\\n\\212\\002\\223\\000\\000\\000\\000\\000\\223\\000\\219\\000\\212\\002\\219\\000\\\n\\000\\000\\223\\000\\219\\000\\223\\000\\212\\002\\219\\000\\223\\000\\223\\000\\\n\\000\\000\\219\\000\\223\\000\\000\\000\\223\\000\\223\\000\\223\\000\\000\\000\\\n\\212\\002\\223\\000\\223\\000\\223\\000\\212\\002\\223\\000\\223\\000\\037\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\223\\000\\000\\000\\\n\\212\\002\\223\\000\\223\\000\\212\\002\\037\\000\\000\\000\\000\\000\\152\\000\\\n\\000\\000\\000\\000\\223\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\037\\000\\000\\000\\037\\000\\037\\000\\223\\000\\000\\000\\000\\000\\\n\\223\\000\\000\\000\\000\\000\\000\\000\\223\\000\\223\\000\\000\\000\\223\\000\\\n\\037\\000\\000\\000\\223\\000\\223\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\223\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\037\\000\\223\\000\\223\\000\\037\\000\\223\\000\\\n\\223\\000\\223\\000\\000\\000\\037\\000\\221\\000\\000\\000\\000\\000\\221\\000\\\n\\223\\000\\037\\000\\223\\000\\000\\000\\221\\000\\223\\000\\221\\000\\037\\000\\\n\\223\\000\\221\\000\\221\\000\\000\\000\\223\\000\\221\\000\\000\\000\\221\\000\\\n\\221\\000\\221\\000\\000\\000\\037\\000\\221\\000\\221\\000\\221\\000\\037\\000\\\n\\221\\000\\221\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\221\\000\\000\\000\\037\\000\\221\\000\\221\\000\\037\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\221\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\003\\002\\000\\000\\000\\000\\\n\\221\\000\\000\\000\\000\\000\\221\\000\\000\\000\\000\\000\\000\\000\\221\\000\\\n\\221\\000\\000\\000\\221\\000\\000\\000\\000\\000\\221\\000\\221\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\221\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\221\\000\\\n\\221\\000\\000\\000\\221\\000\\221\\000\\221\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\221\\000\\222\\000\\221\\000\\000\\000\\222\\000\\\n\\221\\000\\000\\000\\000\\000\\221\\000\\222\\000\\000\\000\\222\\000\\221\\000\\\n\\000\\000\\222\\000\\222\\000\\000\\000\\000\\000\\222\\000\\000\\000\\222\\000\\\n\\222\\000\\222\\000\\000\\000\\000\\000\\222\\000\\222\\000\\222\\000\\000\\000\\\n\\222\\000\\222\\000\\040\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\222\\000\\000\\000\\000\\000\\222\\000\\222\\000\\000\\000\\040\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\222\\000\\000\\000\\000\\000\\\n\\189\\000\\000\\000\\000\\000\\000\\000\\040\\000\\000\\000\\040\\000\\040\\000\\\n\\222\\000\\000\\000\\000\\000\\222\\000\\000\\000\\000\\000\\000\\000\\222\\000\\\n\\222\\000\\000\\000\\222\\000\\040\\000\\000\\000\\222\\000\\222\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\222\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\040\\000\\222\\000\\\n\\222\\000\\040\\000\\222\\000\\222\\000\\222\\000\\000\\000\\040\\000\\152\\000\\\n\\000\\000\\000\\000\\152\\000\\222\\000\\040\\000\\222\\000\\000\\000\\152\\000\\\n\\222\\000\\152\\000\\040\\000\\222\\000\\152\\000\\152\\000\\000\\000\\222\\000\\\n\\152\\000\\000\\000\\152\\000\\152\\000\\152\\000\\000\\000\\040\\000\\152\\000\\\n\\152\\000\\152\\000\\040\\000\\152\\000\\152\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\152\\000\\000\\000\\040\\000\\152\\000\\\n\\152\\000\\040\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\152\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\152\\000\\000\\000\\000\\000\\152\\000\\000\\000\\\n\\000\\000\\000\\000\\152\\000\\152\\000\\037\\002\\152\\000\\000\\000\\000\\000\\\n\\152\\000\\152\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\152\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\152\\000\\152\\000\\000\\000\\152\\000\\000\\000\\152\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\152\\000\\000\\000\\\n\\152\\000\\000\\000\\000\\000\\152\\000\\000\\000\\003\\002\\152\\000\\003\\002\\\n\\003\\002\\003\\002\\152\\000\\000\\000\\000\\000\\003\\002\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\003\\002\\000\\000\\000\\000\\000\\000\\003\\002\\003\\002\\\n\\003\\002\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\003\\002\\\n\\003\\002\\003\\002\\003\\002\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\003\\002\\000\\000\\000\\000\\000\\000\\003\\002\\003\\002\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\003\\002\\003\\002\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\002\\\n\\000\\000\\003\\002\\000\\000\\000\\000\\003\\002\\000\\000\\000\\000\\003\\002\\\n\\003\\002\\003\\002\\000\\000\\003\\002\\000\\000\\000\\000\\003\\002\\003\\002\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\003\\002\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\003\\002\\003\\002\\000\\000\\003\\002\\003\\002\\003\\002\\000\\000\\000\\000\\\n\\189\\000\\003\\002\\000\\000\\189\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\189\\000\\003\\002\\189\\000\\000\\000\\003\\002\\189\\000\\189\\000\\000\\000\\\n\\003\\002\\189\\000\\000\\000\\189\\000\\189\\000\\189\\000\\000\\000\\000\\000\\\n\\189\\000\\000\\000\\189\\000\\000\\000\\189\\000\\189\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\189\\000\\000\\000\\000\\000\\\n\\189\\000\\189\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\189\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\255\\001\\000\\000\\189\\000\\000\\000\\000\\000\\189\\000\\\n\\000\\000\\000\\000\\000\\000\\189\\000\\189\\000\\000\\000\\189\\000\\000\\000\\\n\\000\\000\\189\\000\\189\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\189\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\189\\000\\189\\000\\000\\000\\189\\000\\189\\000\\\n\\189\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\189\\000\\\n\\000\\000\\189\\000\\000\\000\\000\\000\\189\\000\\000\\000\\000\\000\\189\\000\\\n\\000\\000\\000\\000\\000\\000\\189\\000\\037\\002\\000\\000\\037\\002\\037\\002\\\n\\037\\002\\000\\000\\000\\000\\000\\000\\037\\002\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\037\\002\\000\\000\\000\\000\\000\\000\\037\\002\\037\\002\\037\\002\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\037\\002\\037\\002\\\n\\037\\002\\037\\002\\000\\000\\000\\000\\206\\004\\000\\000\\000\\000\\000\\000\\\n\\037\\002\\000\\000\\000\\000\\000\\000\\000\\000\\037\\002\\000\\000\\000\\002\\\n\\000\\000\\000\\000\\000\\000\\032\\005\\037\\002\\037\\002\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\198\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\037\\002\\000\\000\\000\\000\\037\\002\\000\\000\\000\\000\\037\\002\\037\\002\\\n\\037\\002\\000\\000\\037\\002\\000\\000\\000\\000\\037\\002\\037\\002\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\208\\004\\037\\002\\113\\000\\114\\000\\028\\000\\\n\\000\\000\\115\\000\\000\\000\\000\\000\\116\\000\\209\\004\\000\\000\\037\\002\\\n\\037\\002\\000\\000\\037\\002\\037\\002\\037\\002\\000\\000\\000\\000\\001\\002\\\n\\000\\000\\001\\002\\001\\002\\001\\002\\000\\000\\118\\000\\000\\000\\001\\002\\\n\\037\\002\\000\\000\\000\\000\\037\\002\\001\\002\\119\\000\\120\\000\\037\\002\\\n\\001\\002\\001\\002\\001\\002\\000\\000\\000\\000\\121\\000\\000\\000\\000\\000\\\n\\000\\000\\001\\002\\001\\002\\001\\002\\001\\002\\000\\000\\201\\001\\000\\000\\\n\\000\\000\\211\\004\\123\\000\\001\\002\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\001\\002\\000\\000\\254\\001\\000\\000\\000\\000\\000\\000\\000\\000\\001\\002\\\n\\001\\002\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\001\\002\\000\\000\\000\\000\\001\\002\\000\\000\\\n\\000\\000\\001\\002\\001\\002\\001\\002\\000\\000\\001\\002\\000\\000\\000\\000\\\n\\000\\000\\001\\002\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\001\\002\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\001\\002\\001\\002\\000\\000\\001\\002\\001\\002\\001\\002\\\n\\000\\000\\000\\000\\255\\001\\000\\000\\255\\001\\255\\001\\255\\001\\000\\000\\\n\\000\\000\\000\\000\\255\\001\\001\\002\\000\\000\\000\\000\\001\\002\\255\\001\\\n\\000\\000\\000\\000\\001\\002\\255\\001\\255\\001\\255\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\255\\001\\255\\001\\255\\001\\255\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\146\\000\\000\\000\\255\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\255\\001\\000\\000\\000\\000\\090\\000\\000\\000\\\n\\000\\000\\000\\000\\255\\001\\255\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\255\\001\\000\\000\\\n\\000\\000\\255\\001\\000\\000\\000\\000\\255\\001\\255\\001\\255\\001\\000\\000\\\n\\255\\001\\000\\000\\000\\000\\000\\000\\255\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\255\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\255\\001\\255\\001\\065\\000\\\n\\255\\001\\255\\001\\255\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\002\\\n\\000\\000\\000\\002\\000\\002\\000\\002\\000\\000\\000\\000\\255\\001\\000\\002\\\n\\000\\000\\255\\001\\000\\000\\000\\000\\000\\002\\255\\001\\000\\000\\000\\000\\\n\\000\\002\\000\\002\\000\\002\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\002\\000\\002\\000\\002\\000\\002\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\002\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\002\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\002\\\n\\000\\002\\066\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\002\\000\\000\\000\\000\\000\\002\\000\\000\\\n\\000\\000\\000\\002\\000\\002\\000\\002\\000\\000\\000\\002\\000\\000\\000\\000\\\n\\000\\000\\000\\002\\000\\000\\000\\000\\000\\000\\000\\000\\112\\000\\000\\002\\\n\\113\\000\\114\\000\\028\\000\\000\\000\\115\\000\\000\\000\\000\\000\\116\\000\\\n\\117\\000\\000\\000\\000\\002\\000\\002\\000\\000\\000\\002\\000\\002\\000\\002\\\n\\000\\000\\000\\000\\254\\001\\000\\000\\254\\001\\254\\001\\254\\001\\000\\000\\\n\\118\\000\\000\\000\\254\\001\\000\\002\\000\\000\\000\\000\\000\\002\\254\\001\\\n\\119\\000\\120\\000\\000\\002\\254\\001\\254\\001\\254\\001\\000\\000\\000\\000\\\n\\121\\000\\000\\000\\000\\000\\000\\000\\254\\001\\254\\001\\254\\001\\254\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\122\\000\\123\\000\\254\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\254\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\254\\001\\254\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\254\\001\\000\\000\\\n\\000\\000\\254\\001\\214\\002\\000\\000\\254\\001\\254\\001\\254\\001\\000\\000\\\n\\254\\001\\000\\000\\000\\000\\000\\000\\254\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\254\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\146\\000\\254\\001\\254\\001\\146\\000\\\n\\254\\001\\254\\001\\254\\001\\000\\000\\000\\000\\000\\000\\090\\000\\000\\000\\\n\\000\\000\\146\\000\\000\\000\\000\\000\\000\\000\\146\\000\\254\\001\\146\\000\\\n\\000\\000\\254\\001\\000\\000\\090\\000\\000\\000\\254\\001\\146\\000\\146\\000\\\n\\146\\000\\146\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\090\\000\\000\\000\\090\\000\\090\\000\\000\\000\\146\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\090\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\065\\000\\\n\\146\\000\\000\\000\\000\\000\\146\\000\\000\\000\\000\\000\\000\\000\\146\\000\\\n\\146\\000\\000\\000\\090\\000\\000\\000\\065\\000\\146\\000\\146\\000\\000\\000\\\n\\065\\000\\090\\000\\090\\000\\000\\000\\146\\000\\000\\000\\000\\000\\000\\000\\\n\\090\\000\\065\\000\\065\\000\\065\\000\\065\\000\\106\\002\\090\\000\\000\\000\\\n\\146\\000\\000\\000\\146\\000\\000\\000\\146\\000\\000\\000\\000\\000\\000\\000\\\n\\065\\000\\000\\000\\090\\000\\000\\000\\000\\000\\000\\000\\090\\000\\000\\000\\\n\\146\\000\\000\\000\\000\\000\\146\\000\\000\\000\\000\\000\\000\\000\\146\\000\\\n\\000\\000\\066\\000\\090\\000\\065\\000\\066\\000\\090\\000\\065\\000\\000\\000\\\n\\000\\000\\065\\000\\065\\000\\065\\000\\000\\000\\000\\000\\066\\000\\000\\000\\\n\\065\\000\\065\\000\\066\\000\\000\\000\\000\\000\\000\\000\\000\\000\\065\\000\\\n\\000\\000\\000\\000\\000\\000\\066\\000\\066\\000\\066\\000\\066\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\065\\000\\000\\000\\065\\000\\000\\000\\065\\000\\\n\\000\\000\\000\\000\\066\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\065\\000\\000\\000\\000\\000\\065\\000\\000\\000\\\n\\000\\000\\000\\000\\065\\000\\000\\000\\000\\000\\066\\000\\000\\000\\000\\000\\\n\\066\\000\\000\\000\\000\\000\\000\\000\\066\\000\\066\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\066\\000\\066\\000\\112\\000\\000\\000\\113\\000\\114\\000\\\n\\028\\000\\066\\000\\115\\000\\000\\000\\120\\001\\116\\000\\117\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\066\\000\\000\\000\\066\\000\\\n\\000\\000\\066\\000\\000\\000\\000\\000\\000\\000\\000\\000\\118\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\066\\000\\119\\000\\060\\003\\\n\\066\\000\\000\\000\\214\\002\\000\\000\\066\\000\\214\\002\\121\\000\\214\\002\\\n\\214\\002\\214\\002\\214\\002\\000\\000\\000\\000\\214\\002\\214\\002\\214\\002\\\n\\000\\000\\000\\000\\122\\000\\123\\000\\000\\000\\214\\002\\000\\000\\000\\000\\\n\\000\\000\\214\\002\\000\\000\\000\\000\\214\\002\\000\\000\\214\\002\\214\\002\\\n\\214\\002\\214\\002\\214\\002\\214\\002\\214\\002\\214\\002\\214\\002\\000\\000\\\n\\000\\000\\214\\002\\214\\002\\214\\002\\000\\000\\000\\000\\098\\002\\000\\000\\\n\\000\\000\\000\\000\\214\\002\\214\\002\\214\\002\\214\\002\\214\\002\\214\\002\\\n\\214\\002\\214\\002\\214\\002\\214\\002\\214\\002\\214\\002\\214\\002\\214\\002\\\n\\000\\000\\214\\002\\214\\002\\214\\002\\000\\000\\214\\002\\214\\002\\214\\002\\\n\\214\\002\\214\\002\\214\\002\\000\\000\\214\\002\\214\\002\\000\\000\\214\\002\\\n\\214\\002\\000\\000\\214\\002\\214\\002\\000\\000\\000\\000\\214\\002\\214\\002\\\n\\000\\000\\214\\002\\214\\002\\214\\002\\214\\002\\214\\002\\214\\002\\214\\002\\\n\\000\\000\\214\\002\\214\\002\\214\\002\\000\\000\\214\\002\\000\\000\\214\\002\\\n\\214\\002\\000\\000\\214\\002\\000\\000\\214\\002\\214\\002\\214\\002\\214\\002\\\n\\214\\002\\214\\002\\214\\002\\212\\001\\214\\002\\106\\002\\000\\000\\000\\000\\\n\\000\\000\\106\\002\\000\\000\\106\\002\\000\\000\\106\\002\\000\\000\\106\\002\\\n\\000\\000\\106\\002\\000\\000\\106\\002\\106\\002\\000\\000\\106\\002\\106\\002\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\106\\002\\106\\002\\000\\000\\106\\002\\106\\002\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\106\\002\\\n\\106\\002\\106\\002\\106\\002\\000\\000\\106\\002\\106\\002\\000\\000\\000\\000\\\n\\106\\002\\213\\001\\000\\000\\000\\000\\000\\000\\106\\002\\106\\002\\106\\002\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\106\\002\\000\\000\\106\\002\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\106\\002\\000\\000\\\n\\000\\000\\106\\002\\000\\000\\000\\000\\000\\000\\000\\000\\106\\002\\000\\000\\\n\\106\\002\\106\\002\\000\\000\\106\\002\\106\\002\\000\\000\\106\\002\\000\\000\\\n\\000\\000\\000\\000\\106\\002\\000\\000\\000\\000\\106\\002\\000\\000\\106\\002\\\n\\000\\000\\000\\000\\106\\002\\106\\002\\120\\001\\000\\000\\106\\002\\000\\000\\\n\\120\\001\\000\\000\\120\\001\\212\\002\\120\\001\\000\\000\\120\\001\\000\\000\\\n\\120\\001\\000\\000\\120\\001\\120\\001\\000\\000\\120\\001\\120\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\120\\001\\\n\\000\\000\\000\\000\\120\\001\\120\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\064\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\120\\001\\120\\001\\\n\\000\\000\\120\\001\\000\\000\\120\\001\\120\\001\\000\\000\\000\\000\\120\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\120\\001\\120\\001\\120\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\120\\001\\000\\000\\120\\001\\098\\002\\000\\000\\\n\\000\\000\\098\\002\\000\\000\\000\\000\\000\\000\\120\\001\\098\\002\\000\\000\\\n\\120\\001\\000\\000\\000\\000\\098\\002\\098\\002\\120\\001\\000\\000\\120\\001\\\n\\120\\001\\098\\002\\120\\001\\120\\001\\119\\002\\120\\001\\000\\000\\000\\000\\\n\\098\\002\\120\\001\\098\\002\\098\\002\\120\\001\\000\\000\\120\\001\\000\\000\\\n\\000\\000\\120\\001\\120\\001\\000\\000\\000\\000\\120\\001\\000\\000\\098\\002\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\031\\002\\\n\\209\\001\\031\\002\\031\\002\\031\\002\\000\\000\\031\\002\\000\\000\\000\\000\\\n\\031\\002\\031\\002\\098\\002\\000\\000\\000\\000\\098\\002\\000\\000\\119\\002\\\n\\098\\002\\098\\002\\098\\002\\212\\001\\000\\000\\000\\000\\212\\001\\000\\000\\\n\\098\\002\\031\\002\\000\\000\\212\\001\\000\\000\\098\\002\\098\\002\\000\\000\\\n\\212\\001\\031\\002\\031\\002\\000\\000\\000\\000\\000\\000\\212\\001\\000\\000\\\n\\000\\000\\031\\002\\098\\002\\000\\000\\000\\000\\212\\001\\098\\002\\212\\001\\\n\\212\\001\\000\\000\\000\\000\\000\\000\\000\\000\\031\\002\\031\\002\\000\\000\\\n\\063\\000\\000\\000\\098\\002\\212\\001\\212\\001\\098\\002\\214\\002\\000\\000\\\n\\214\\002\\214\\002\\214\\002\\000\\000\\214\\002\\000\\000\\000\\000\\214\\002\\\n\\214\\002\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\212\\001\\\n\\000\\000\\213\\001\\212\\001\\000\\000\\213\\001\\212\\001\\212\\001\\212\\001\\\n\\214\\002\\213\\001\\000\\000\\000\\000\\040\\002\\212\\001\\213\\001\\000\\000\\\n\\214\\002\\214\\002\\000\\000\\212\\001\\213\\001\\000\\000\\000\\000\\000\\000\\\n\\214\\002\\000\\000\\000\\000\\213\\001\\000\\000\\213\\001\\213\\001\\212\\001\\\n\\131\\000\\000\\000\\000\\000\\212\\001\\214\\002\\214\\002\\000\\000\\040\\002\\\n\\000\\000\\213\\001\\213\\001\\000\\000\\000\\000\\000\\000\\000\\000\\212\\001\\\n\\000\\000\\000\\000\\212\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\212\\002\\000\\000\\213\\001\\212\\002\\000\\000\\\n\\213\\001\\000\\000\\000\\000\\213\\001\\213\\001\\213\\001\\000\\000\\000\\000\\\n\\212\\002\\000\\000\\213\\001\\213\\001\\000\\000\\000\\000\\212\\002\\000\\000\\\n\\000\\000\\213\\001\\000\\000\\000\\000\\000\\000\\212\\002\\000\\000\\212\\002\\\n\\212\\002\\064\\000\\174\\001\\000\\000\\064\\000\\213\\001\\000\\000\\000\\000\\\n\\000\\000\\213\\001\\000\\000\\212\\002\\212\\002\\000\\000\\064\\000\\000\\000\\\n\\000\\000\\000\\000\\064\\000\\212\\002\\212\\002\\213\\001\\000\\000\\000\\000\\\n\\213\\001\\000\\000\\000\\000\\064\\000\\064\\000\\064\\000\\064\\000\\212\\002\\\n\\000\\000\\000\\000\\212\\002\\000\\000\\000\\000\\000\\000\\000\\000\\212\\002\\\n\\000\\000\\212\\002\\064\\000\\000\\000\\000\\000\\212\\002\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\241\\001\\212\\002\\241\\001\\241\\001\\241\\001\\000\\000\\\n\\241\\001\\000\\000\\214\\002\\241\\001\\241\\001\\064\\000\\000\\000\\212\\002\\\n\\064\\000\\000\\000\\000\\000\\212\\002\\064\\000\\064\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\008\\000\\064\\000\\241\\001\\000\\000\\000\\000\\212\\002\\\n\\011\\000\\064\\000\\212\\002\\000\\000\\241\\001\\241\\001\\000\\000\\000\\000\\\n\\209\\001\\000\\000\\000\\000\\209\\001\\241\\001\\064\\000\\000\\000\\064\\000\\\n\\209\\001\\064\\000\\015\\000\\016\\000\\000\\000\\209\\001\\000\\000\\000\\000\\\n\\241\\001\\241\\001\\000\\000\\209\\001\\000\\000\\064\\000\\000\\000\\174\\001\\\n\\064\\000\\000\\000\\209\\001\\000\\000\\209\\001\\209\\001\\022\\000\\000\\000\\\n\\138\\000\\139\\000\\000\\000\\140\\000\\141\\000\\000\\000\\000\\000\\028\\000\\\n\\000\\000\\209\\001\\000\\000\\000\\000\\142\\000\\143\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\144\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\063\\000\\000\\000\\000\\000\\063\\000\\209\\001\\000\\000\\000\\000\\209\\001\\\n\\145\\000\\000\\000\\209\\001\\209\\001\\209\\001\\063\\000\\000\\000\\000\\000\\\n\\000\\000\\063\\000\\209\\001\\000\\000\\175\\001\\146\\000\\000\\000\\000\\000\\\n\\209\\001\\044\\000\\063\\000\\063\\000\\063\\000\\063\\000\\045\\000\\000\\000\\\n\\000\\000\\048\\000\\147\\000\\000\\000\\209\\001\\000\\000\\000\\000\\000\\000\\\n\\209\\001\\063\\000\\000\\000\\209\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\209\\001\\000\\000\\000\\000\\209\\001\\\n\\131\\000\\000\\000\\000\\000\\131\\000\\063\\000\\000\\000\\000\\000\\063\\000\\\n\\000\\000\\000\\000\\000\\000\\063\\000\\063\\000\\131\\000\\000\\000\\000\\000\\\n\\000\\000\\177\\001\\063\\000\\131\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\063\\000\\000\\000\\131\\000\\000\\000\\131\\000\\131\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\063\\000\\000\\000\\063\\000\\000\\000\\\n\\063\\000\\131\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\131\\000\\000\\000\\000\\000\\000\\000\\063\\000\\000\\000\\000\\000\\063\\000\\\n\\000\\000\\000\\000\\174\\001\\000\\000\\131\\000\\174\\001\\000\\000\\131\\000\\\n\\000\\000\\000\\000\\000\\000\\131\\000\\131\\000\\000\\000\\131\\000\\174\\001\\\n\\000\\000\\176\\001\\131\\000\\000\\000\\000\\000\\174\\001\\000\\000\\000\\000\\\n\\131\\000\\000\\000\\000\\000\\000\\000\\174\\001\\000\\000\\174\\001\\174\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\131\\000\\000\\000\\000\\000\\000\\000\\\n\\131\\000\\000\\000\\000\\000\\174\\001\\000\\000\\000\\000\\000\\000\\178\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\131\\000\\000\\000\\000\\000\\131\\000\\\n\\000\\000\\000\\000\\214\\002\\000\\000\\000\\000\\214\\002\\174\\001\\000\\000\\\n\\000\\000\\174\\001\\214\\002\\000\\000\\000\\000\\174\\001\\174\\001\\214\\002\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\174\\001\\214\\002\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\174\\001\\000\\000\\214\\002\\000\\000\\214\\002\\214\\002\\\n\\115\\002\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\174\\001\\000\\000\\\n\\000\\000\\000\\000\\174\\001\\214\\002\\000\\000\\000\\000\\000\\000\\182\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\174\\001\\174\\001\\\n\\000\\000\\174\\001\\174\\001\\000\\000\\000\\000\\000\\000\\214\\002\\000\\000\\\n\\209\\001\\214\\002\\000\\000\\000\\000\\174\\001\\214\\002\\214\\002\\000\\000\\\n\\000\\000\\000\\000\\174\\001\\000\\000\\214\\002\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\174\\001\\214\\002\\174\\001\\174\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\214\\002\\000\\000\\\n\\174\\001\\000\\000\\214\\002\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\175\\001\\000\\000\\214\\002\\175\\001\\\n\\000\\000\\214\\002\\000\\000\\174\\001\\000\\000\\000\\000\\174\\001\\000\\000\\\n\\044\\000\\175\\001\\174\\001\\174\\001\\000\\000\\000\\000\\000\\000\\175\\001\\\n\\000\\000\\174\\001\\000\\000\\209\\001\\000\\000\\000\\000\\175\\001\\174\\001\\\n\\175\\001\\175\\001\\000\\000\\209\\001\\000\\000\\047\\000\\000\\000\\000\\000\\\n\\209\\001\\000\\000\\000\\000\\174\\001\\000\\000\\175\\001\\085\\000\\174\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\209\\001\\000\\000\\209\\001\\\n\\209\\001\\177\\001\\000\\000\\174\\001\\177\\001\\000\\000\\174\\001\\000\\000\\\n\\175\\001\\000\\000\\000\\000\\175\\001\\209\\001\\000\\000\\177\\001\\175\\001\\\n\\175\\001\\000\\000\\000\\000\\000\\000\\177\\001\\000\\000\\175\\001\\000\\000\\\n\\000\\000\\212\\002\\000\\000\\177\\001\\175\\001\\177\\001\\177\\001\\209\\001\\\n\\000\\000\\000\\000\\209\\001\\000\\000\\000\\000\\209\\001\\209\\001\\209\\001\\\n\\175\\001\\000\\000\\177\\001\\000\\000\\175\\001\\209\\001\\081\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\209\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\175\\001\\176\\001\\000\\000\\175\\001\\176\\001\\177\\001\\000\\000\\209\\001\\\n\\177\\001\\000\\000\\000\\000\\209\\001\\177\\001\\177\\001\\176\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\177\\001\\176\\001\\000\\000\\000\\000\\209\\001\\\n\\000\\000\\177\\001\\209\\001\\176\\001\\000\\000\\176\\001\\176\\001\\178\\001\\\n\\000\\000\\000\\000\\178\\001\\000\\000\\000\\000\\177\\001\\000\\000\\000\\000\\\n\\000\\000\\177\\001\\176\\001\\000\\000\\178\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\178\\001\\000\\000\\000\\000\\177\\001\\000\\000\\000\\000\\\n\\177\\001\\178\\001\\000\\000\\178\\001\\178\\001\\176\\001\\000\\000\\000\\000\\\n\\176\\001\\000\\000\\000\\000\\000\\000\\176\\001\\176\\001\\000\\000\\000\\000\\\n\\178\\001\\000\\000\\000\\000\\176\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\176\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\182\\001\\\n\\000\\000\\000\\000\\182\\001\\178\\001\\000\\000\\176\\001\\178\\001\\000\\000\\\n\\000\\000\\176\\001\\178\\001\\178\\001\\182\\001\\000\\000\\000\\000\\000\\000\\\n\\209\\001\\178\\001\\182\\001\\000\\000\\000\\000\\176\\001\\000\\000\\178\\001\\\n\\176\\001\\182\\001\\000\\000\\182\\001\\182\\001\\209\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\178\\001\\000\\000\\000\\000\\000\\000\\178\\001\\\n\\182\\001\\000\\000\\209\\001\\000\\000\\209\\001\\209\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\178\\001\\000\\000\\000\\000\\178\\001\\000\\000\\\n\\000\\000\\209\\001\\000\\000\\182\\001\\000\\000\\000\\000\\182\\001\\000\\000\\\n\\000\\000\\000\\000\\182\\001\\182\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\044\\000\\182\\001\\000\\000\\000\\000\\209\\001\\000\\000\\000\\000\\182\\001\\\n\\000\\000\\000\\000\\209\\001\\209\\001\\209\\001\\044\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\209\\001\\182\\001\\000\\000\\047\\000\\000\\000\\182\\001\\\n\\209\\001\\000\\000\\044\\000\\000\\000\\044\\000\\044\\000\\085\\000\\000\\000\\\n\\000\\000\\000\\000\\047\\000\\182\\001\\209\\001\\000\\000\\182\\001\\000\\000\\\n\\209\\001\\044\\000\\000\\000\\085\\000\\000\\000\\000\\000\\000\\000\\047\\000\\\n\\000\\000\\047\\000\\047\\000\\000\\000\\209\\001\\000\\000\\000\\000\\209\\001\\\n\\085\\000\\000\\000\\085\\000\\085\\000\\044\\000\\000\\000\\047\\000\\044\\000\\\n\\000\\000\\212\\002\\000\\000\\000\\000\\044\\000\\000\\000\\000\\000\\085\\000\\\n\\000\\000\\000\\000\\044\\000\\000\\000\\000\\000\\000\\000\\212\\002\\000\\000\\\n\\044\\000\\047\\000\\000\\000\\000\\000\\047\\000\\000\\000\\081\\000\\000\\000\\\n\\000\\000\\047\\000\\085\\000\\212\\002\\044\\000\\212\\002\\212\\002\\047\\000\\\n\\044\\000\\000\\000\\085\\000\\081\\000\\000\\000\\047\\000\\000\\000\\000\\000\\\n\\085\\000\\000\\000\\212\\002\\000\\000\\044\\000\\000\\000\\085\\000\\044\\000\\\n\\081\\000\\047\\000\\081\\000\\081\\000\\000\\000\\047\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\085\\000\\000\\000\\000\\000\\212\\002\\085\\000\\081\\000\\\n\\000\\000\\047\\000\\000\\000\\000\\000\\047\\000\\212\\002\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\085\\000\\212\\002\\000\\000\\085\\000\\000\\000\\000\\000\\\n\\000\\000\\212\\002\\081\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\081\\000\\000\\000\\000\\000\\212\\002\\000\\000\\000\\000\\\n\\081\\000\\212\\002\\000\\000\\000\\000\\000\\000\\000\\000\\081\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\212\\002\\000\\000\\000\\000\\\n\\212\\002\\000\\000\\081\\000\\207\\002\\000\\000\\000\\000\\081\\000\\000\\000\\\n\\207\\002\\207\\002\\207\\002\\207\\002\\000\\000\\000\\000\\207\\002\\207\\002\\\n\\207\\002\\207\\002\\081\\000\\000\\000\\000\\000\\081\\000\\207\\002\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\207\\002\\000\\000\\207\\002\\\n\\207\\002\\207\\002\\207\\002\\207\\002\\207\\002\\207\\002\\207\\002\\000\\000\\\n\\000\\000\\000\\000\\207\\002\\000\\000\\207\\002\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\207\\002\\207\\002\\207\\002\\207\\002\\\n\\207\\002\\207\\002\\207\\002\\207\\002\\000\\000\\000\\000\\207\\002\\207\\002\\\n\\000\\000\\000\\000\\207\\002\\207\\002\\207\\002\\207\\002\\000\\000\\207\\002\\\n\\207\\002\\207\\002\\207\\002\\207\\002\\000\\000\\207\\002\\000\\000\\000\\000\\\n\\207\\002\\207\\002\\000\\000\\207\\002\\207\\002\\000\\000\\000\\000\\207\\002\\\n\\207\\002\\000\\000\\207\\002\\000\\000\\207\\002\\207\\002\\000\\000\\207\\002\\\n\\207\\002\\000\\000\\000\\000\\207\\002\\207\\002\\000\\000\\207\\002\\000\\000\\\n\\207\\002\\207\\002\\000\\000\\207\\002\\000\\000\\207\\002\\207\\002\\207\\002\\\n\\207\\002\\207\\002\\207\\002\\207\\002\\214\\002\\207\\002\\000\\000\\000\\000\\\n\\000\\000\\214\\002\\214\\002\\214\\002\\214\\002\\000\\000\\000\\000\\214\\002\\\n\\214\\002\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\214\\002\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\214\\002\\000\\000\\\n\\214\\002\\000\\000\\214\\002\\214\\002\\214\\002\\214\\002\\214\\002\\214\\002\\\n\\000\\000\\000\\000\\000\\000\\214\\002\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\214\\002\\214\\002\\214\\002\\\n\\214\\002\\214\\002\\214\\002\\214\\002\\214\\002\\000\\000\\000\\000\\214\\002\\\n\\214\\002\\000\\000\\000\\000\\214\\002\\214\\002\\214\\002\\000\\000\\000\\000\\\n\\214\\002\\214\\002\\214\\002\\214\\002\\214\\002\\000\\000\\214\\002\\000\\000\\\n\\000\\000\\214\\002\\214\\002\\000\\000\\000\\000\\214\\002\\000\\000\\000\\000\\\n\\214\\002\\214\\002\\000\\000\\214\\002\\000\\000\\214\\002\\214\\002\\000\\000\\\n\\000\\000\\214\\002\\000\\000\\000\\000\\000\\000\\214\\002\\000\\000\\214\\002\\\n\\000\\000\\214\\002\\214\\002\\000\\000\\214\\002\\000\\000\\214\\002\\214\\002\\\n\\000\\000\\214\\002\\214\\002\\214\\002\\214\\002\\000\\000\\214\\002\\001\\001\\\n\\002\\001\\003\\001\\000\\000\\000\\000\\007\\000\\008\\000\\004\\001\\000\\000\\\n\\005\\001\\000\\000\\010\\000\\011\\000\\000\\000\\000\\000\\006\\001\\007\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\008\\001\\000\\000\\000\\000\\015\\000\\016\\000\\017\\000\\\n\\018\\000\\019\\000\\000\\000\\009\\001\\000\\000\\000\\000\\020\\000\\000\\000\\\n\\000\\000\\010\\001\\011\\001\\012\\001\\013\\001\\014\\001\\015\\001\\000\\000\\\n\\000\\000\\022\\000\\000\\000\\023\\000\\024\\000\\025\\000\\026\\000\\027\\000\\\n\\000\\000\\000\\000\\028\\000\\000\\000\\016\\001\\000\\000\\030\\000\\031\\000\\\n\\032\\000\\000\\000\\000\\000\\000\\000\\034\\000\\000\\000\\017\\001\\018\\001\\\n\\000\\000\\019\\001\\000\\000\\000\\000\\000\\000\\038\\000\\000\\000\\000\\000\\\n\\000\\000\\020\\001\\021\\001\\022\\001\\023\\001\\024\\001\\025\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\026\\001\\000\\000\\000\\000\\\n\\000\\000\\027\\001\\000\\000\\028\\001\\044\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\045\\000\\046\\000\\000\\000\\048\\000\\049\\000\\001\\001\\002\\001\\\n\\003\\001\\051\\000\\000\\000\\007\\000\\008\\000\\004\\001\\000\\000\\005\\001\\\n\\000\\000\\010\\000\\011\\000\\000\\000\\000\\000\\018\\003\\007\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\008\\001\\000\\000\\000\\000\\015\\000\\016\\000\\017\\000\\018\\000\\\n\\019\\000\\000\\000\\009\\001\\000\\000\\000\\000\\020\\000\\000\\000\\000\\000\\\n\\010\\001\\011\\001\\012\\001\\013\\001\\014\\001\\015\\001\\000\\000\\000\\000\\\n\\022\\000\\000\\000\\023\\000\\024\\000\\025\\000\\026\\000\\027\\000\\000\\000\\\n\\000\\000\\028\\000\\000\\000\\016\\001\\000\\000\\030\\000\\031\\000\\032\\000\\\n\\000\\000\\000\\000\\000\\000\\034\\000\\000\\000\\017\\001\\018\\001\\000\\000\\\n\\019\\003\\000\\000\\000\\000\\000\\000\\038\\000\\000\\000\\000\\000\\000\\000\\\n\\020\\001\\021\\001\\022\\001\\023\\001\\024\\001\\025\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\020\\003\\000\\000\\000\\000\\000\\000\\\n\\027\\001\\000\\000\\028\\001\\044\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\045\\000\\046\\000\\000\\000\\048\\000\\049\\000\\214\\002\\000\\000\\000\\000\\\n\\051\\000\\000\\000\\214\\002\\214\\002\\214\\002\\000\\000\\000\\000\\000\\000\\\n\\214\\002\\214\\002\\214\\002\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\214\\002\\\n\\000\\000\\214\\002\\214\\002\\214\\002\\214\\002\\214\\002\\214\\002\\214\\002\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\214\\002\\000\\000\\214\\002\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\214\\002\\\n\\000\\000\\214\\002\\214\\002\\214\\002\\214\\002\\214\\002\\000\\000\\000\\000\\\n\\214\\002\\214\\002\\000\\000\\000\\000\\214\\002\\214\\002\\214\\002\\000\\000\\\n\\000\\000\\214\\002\\214\\002\\000\\000\\214\\002\\214\\002\\000\\000\\214\\002\\\n\\000\\000\\000\\000\\000\\000\\214\\002\\000\\000\\214\\002\\000\\000\\000\\000\\\n\\000\\000\\214\\002\\214\\002\\085\\002\\214\\002\\000\\000\\000\\000\\000\\000\\\n\\152\\002\\152\\002\\152\\002\\000\\000\\000\\000\\214\\002\\152\\002\\152\\002\\\n\\000\\000\\000\\000\\214\\002\\000\\000\\000\\000\\000\\000\\000\\000\\214\\002\\\n\\214\\002\\214\\002\\214\\002\\214\\002\\214\\002\\000\\000\\000\\000\\214\\002\\\n\\000\\000\\152\\002\\152\\002\\152\\002\\152\\002\\152\\002\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\152\\002\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\152\\002\\000\\000\\152\\002\\\n\\152\\002\\152\\002\\152\\002\\152\\002\\000\\000\\000\\000\\152\\002\\000\\000\\\n\\000\\000\\000\\000\\152\\002\\152\\002\\152\\002\\000\\000\\000\\000\\000\\000\\\n\\152\\002\\000\\000\\152\\002\\152\\002\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\152\\002\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\152\\002\\\n\\152\\002\\086\\002\\152\\002\\000\\000\\000\\000\\000\\000\\153\\002\\153\\002\\\n\\153\\002\\085\\002\\000\\000\\000\\000\\153\\002\\153\\002\\000\\000\\000\\000\\\n\\152\\002\\000\\000\\000\\000\\000\\000\\000\\000\\152\\002\\152\\002\\000\\000\\\n\\152\\002\\152\\002\\000\\000\\000\\000\\000\\000\\152\\002\\000\\000\\153\\002\\\n\\153\\002\\153\\002\\153\\002\\153\\002\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\153\\002\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\153\\002\\000\\000\\153\\002\\153\\002\\153\\002\\\n\\153\\002\\153\\002\\000\\000\\000\\000\\153\\002\\000\\000\\000\\000\\000\\000\\\n\\153\\002\\153\\002\\153\\002\\000\\000\\000\\000\\000\\000\\153\\002\\000\\000\\\n\\153\\002\\153\\002\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\153\\002\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\153\\002\\153\\002\\083\\002\\\n\\153\\002\\000\\000\\000\\000\\000\\000\\154\\002\\154\\002\\154\\002\\086\\002\\\n\\000\\000\\000\\000\\154\\002\\154\\002\\000\\000\\000\\000\\153\\002\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\153\\002\\153\\002\\000\\000\\153\\002\\153\\002\\\n\\000\\000\\000\\000\\000\\000\\153\\002\\000\\000\\154\\002\\154\\002\\154\\002\\\n\\154\\002\\154\\002\\000\\000\\000\\000\\000\\000\\000\\000\\154\\002\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\154\\002\\000\\000\\154\\002\\154\\002\\154\\002\\154\\002\\154\\002\\\n\\000\\000\\000\\000\\154\\002\\000\\000\\000\\000\\000\\000\\154\\002\\154\\002\\\n\\154\\002\\000\\000\\000\\000\\000\\000\\154\\002\\000\\000\\154\\002\\154\\002\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\154\\002\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\154\\002\\154\\002\\084\\002\\154\\002\\000\\000\\\n\\000\\000\\000\\000\\155\\002\\155\\002\\155\\002\\083\\002\\000\\000\\000\\000\\\n\\155\\002\\155\\002\\000\\000\\000\\000\\154\\002\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\154\\002\\154\\002\\000\\000\\154\\002\\154\\002\\000\\000\\000\\000\\\n\\000\\000\\154\\002\\000\\000\\155\\002\\155\\002\\155\\002\\155\\002\\155\\002\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\155\\002\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\155\\002\\\n\\000\\000\\155\\002\\155\\002\\155\\002\\155\\002\\155\\002\\000\\000\\000\\000\\\n\\155\\002\\000\\000\\000\\000\\000\\000\\155\\002\\155\\002\\155\\002\\000\\000\\\n\\000\\000\\000\\000\\155\\002\\000\\000\\155\\002\\155\\002\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\155\\002\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\155\\002\\155\\002\\000\\000\\155\\002\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\084\\002\\199\\000\\200\\000\\201\\000\\000\\000\\\n\\000\\000\\000\\000\\155\\002\\000\\000\\202\\000\\000\\000\\203\\000\\155\\002\\\n\\155\\002\\000\\000\\155\\002\\155\\002\\204\\000\\205\\000\\206\\000\\155\\002\\\n\\000\\000\\207\\000\\208\\000\\209\\000\\000\\000\\210\\000\\211\\000\\212\\000\\\n\\000\\000\\213\\000\\214\\000\\215\\000\\216\\000\\000\\000\\000\\000\\000\\000\\\n\\217\\000\\218\\000\\219\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\220\\000\\221\\000\\000\\000\\000\\000\\222\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\223\\000\\224\\000\\000\\000\\000\\000\\000\\000\\004\\002\\225\\000\\226\\000\\\n\\000\\000\\004\\002\\000\\000\\227\\000\\228\\000\\229\\000\\230\\000\\231\\000\\\n\\232\\000\\233\\000\\000\\000\\234\\000\\000\\000\\000\\000\\004\\002\\000\\000\\\n\\004\\002\\235\\000\\000\\000\\243\\001\\000\\000\\000\\000\\236\\000\\004\\002\\\n\\004\\002\\000\\000\\000\\000\\000\\000\\237\\000\\000\\000\\000\\000\\238\\000\\\n\\239\\000\\004\\002\\240\\000\\241\\000\\242\\000\\243\\000\\244\\000\\000\\000\\\n\\245\\000\\246\\000\\247\\000\\248\\000\\249\\000\\004\\002\\004\\002\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\004\\002\\000\\000\\000\\000\\000\\000\\004\\002\\000\\000\\004\\002\\\n\\004\\002\\004\\002\\000\\000\\004\\002\\000\\000\\000\\000\\004\\002\\000\\000\\\n\\000\\000\\000\\000\\001\\001\\002\\001\\003\\001\\000\\000\\000\\000\\000\\000\\\n\\008\\000\\164\\001\\000\\000\\005\\001\\000\\000\\000\\000\\011\\000\\243\\001\\\n\\004\\002\\006\\001\\007\\001\\000\\000\\004\\002\\000\\000\\004\\002\\000\\000\\\n\\000\\000\\004\\002\\000\\000\\000\\000\\000\\000\\008\\001\\137\\000\\000\\000\\\n\\015\\000\\016\\000\\004\\002\\000\\000\\004\\002\\000\\000\\009\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\010\\001\\011\\001\\012\\001\\013\\001\\\n\\014\\001\\015\\001\\000\\000\\000\\000\\022\\000\\000\\000\\138\\000\\139\\000\\\n\\000\\000\\140\\000\\141\\000\\000\\000\\000\\000\\028\\000\\000\\000\\016\\001\\\n\\000\\000\\000\\000\\142\\000\\143\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\165\\001\\166\\001\\000\\000\\167\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\020\\001\\021\\001\\168\\001\\169\\001\\\n\\024\\001\\170\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\026\\001\\000\\000\\000\\000\\146\\000\\027\\001\\000\\000\\028\\001\\044\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\045\\000\\000\\000\\179\\002\\048\\000\\\n\\147\\000\\001\\001\\002\\001\\003\\001\\000\\000\\000\\000\\000\\000\\008\\000\\\n\\164\\001\\000\\000\\005\\001\\000\\000\\000\\000\\011\\000\\000\\000\\000\\000\\\n\\006\\001\\007\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\008\\001\\137\\000\\000\\000\\015\\000\\\n\\016\\000\\000\\000\\000\\000\\000\\000\\000\\000\\009\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\010\\001\\011\\001\\012\\001\\013\\001\\014\\001\\\n\\015\\001\\000\\000\\000\\000\\022\\000\\000\\000\\138\\000\\139\\000\\000\\000\\\n\\140\\000\\141\\000\\000\\000\\000\\000\\028\\000\\000\\000\\016\\001\\000\\000\\\n\\000\\000\\142\\000\\143\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\165\\001\\166\\001\\000\\000\\167\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\020\\001\\021\\001\\168\\001\\169\\001\\024\\001\\\n\\170\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\026\\001\\\n\\000\\000\\000\\000\\146\\000\\027\\001\\000\\000\\028\\001\\044\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\045\\000\\000\\000\\124\\003\\048\\000\\147\\000\\\n\\001\\001\\002\\001\\003\\001\\000\\000\\000\\000\\000\\000\\008\\000\\164\\001\\\n\\000\\000\\005\\001\\000\\000\\000\\000\\011\\000\\000\\000\\000\\000\\006\\001\\\n\\007\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\008\\001\\137\\000\\000\\000\\015\\000\\016\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\009\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\010\\001\\011\\001\\012\\001\\013\\001\\014\\001\\015\\001\\\n\\000\\000\\000\\000\\022\\000\\000\\000\\138\\000\\139\\000\\000\\000\\140\\000\\\n\\141\\000\\000\\000\\000\\000\\028\\000\\000\\000\\016\\001\\000\\000\\000\\000\\\n\\142\\000\\143\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\165\\001\\\n\\166\\001\\000\\000\\167\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\020\\001\\021\\001\\168\\001\\169\\001\\024\\001\\170\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\026\\001\\000\\000\\\n\\000\\000\\146\\000\\027\\001\\000\\000\\028\\001\\044\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\045\\000\\000\\000\\073\\004\\048\\000\\147\\000\\001\\001\\\n\\002\\001\\003\\001\\000\\000\\000\\000\\000\\000\\008\\000\\164\\001\\000\\000\\\n\\005\\001\\000\\000\\000\\000\\011\\000\\000\\000\\000\\000\\006\\001\\007\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\008\\001\\137\\000\\000\\000\\015\\000\\016\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\009\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\010\\001\\011\\001\\012\\001\\013\\001\\014\\001\\015\\001\\000\\000\\\n\\000\\000\\022\\000\\000\\000\\138\\000\\139\\000\\000\\000\\140\\000\\141\\000\\\n\\000\\000\\000\\000\\028\\000\\000\\000\\016\\001\\000\\000\\000\\000\\142\\000\\\n\\143\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\165\\001\\166\\001\\\n\\000\\000\\167\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\020\\001\\021\\001\\168\\001\\169\\001\\024\\001\\170\\001\\000\\000\\\n\\000\\000\\091\\003\\000\\000\\000\\000\\000\\000\\026\\001\\000\\000\\008\\000\\\n\\146\\000\\027\\001\\000\\000\\028\\001\\044\\000\\011\\000\\000\\000\\000\\000\\\n\\018\\003\\045\\000\\000\\000\\000\\000\\048\\000\\147\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\137\\000\\000\\000\\015\\000\\\n\\016\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\022\\000\\000\\000\\138\\000\\139\\000\\000\\000\\\n\\140\\000\\141\\000\\000\\000\\000\\000\\028\\000\\000\\000\\143\\002\\000\\000\\\n\\000\\000\\142\\000\\143\\000\\000\\000\\008\\000\\000\\000\\000\\000\\000\\000\\\n\\144\\000\\000\\000\\011\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\145\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\137\\000\\000\\000\\015\\000\\016\\000\\000\\000\\092\\003\\\n\\000\\000\\000\\000\\146\\000\\000\\000\\000\\000\\000\\000\\044\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\045\\000\\000\\000\\000\\000\\048\\000\\147\\000\\\n\\022\\000\\000\\000\\138\\000\\139\\000\\000\\000\\140\\000\\141\\000\\000\\000\\\n\\000\\000\\028\\000\\000\\000\\145\\002\\000\\000\\000\\000\\142\\000\\143\\000\\\n\\000\\000\\008\\000\\000\\000\\000\\000\\000\\000\\144\\000\\000\\000\\011\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\145\\000\\000\\000\\000\\000\\000\\000\\000\\000\\137\\000\\\n\\000\\000\\015\\000\\016\\000\\000\\000\\000\\000\\000\\000\\000\\000\\146\\000\\\n\\000\\000\\000\\000\\000\\000\\044\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\045\\000\\000\\000\\000\\000\\048\\000\\147\\000\\022\\000\\000\\000\\138\\000\\\n\\139\\000\\000\\000\\140\\000\\141\\000\\000\\000\\000\\000\\028\\000\\000\\000\\\n\\080\\004\\000\\000\\000\\000\\142\\000\\143\\000\\000\\000\\008\\000\\000\\000\\\n\\000\\000\\000\\000\\144\\000\\000\\000\\011\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\145\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\137\\000\\000\\000\\015\\000\\016\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\146\\000\\000\\000\\000\\000\\000\\000\\\n\\044\\000\\000\\000\\000\\000\\000\\000\\000\\000\\045\\000\\000\\000\\000\\000\\\n\\048\\000\\147\\000\\022\\000\\000\\000\\138\\000\\139\\000\\000\\000\\140\\000\\\n\\141\\000\\000\\000\\000\\000\\028\\000\\000\\000\\082\\004\\000\\000\\000\\000\\\n\\142\\000\\143\\000\\000\\000\\008\\000\\000\\000\\000\\000\\000\\000\\144\\000\\\n\\000\\000\\011\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\145\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\137\\000\\000\\000\\015\\000\\016\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\146\\000\\000\\000\\000\\000\\000\\000\\044\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\045\\000\\000\\000\\000\\000\\048\\000\\147\\000\\022\\000\\\n\\000\\000\\138\\000\\139\\000\\000\\000\\140\\000\\141\\000\\000\\000\\000\\000\\\n\\028\\000\\000\\000\\084\\004\\000\\000\\000\\000\\142\\000\\143\\000\\000\\000\\\n\\008\\000\\000\\000\\000\\000\\000\\000\\144\\000\\000\\000\\011\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\145\\000\\000\\000\\000\\000\\000\\000\\000\\000\\137\\000\\000\\000\\\n\\015\\000\\016\\000\\000\\000\\000\\000\\000\\000\\000\\000\\146\\000\\000\\000\\\n\\000\\000\\000\\000\\044\\000\\000\\000\\000\\000\\000\\000\\000\\000\\045\\000\\\n\\000\\000\\000\\000\\048\\000\\147\\000\\022\\000\\000\\000\\138\\000\\139\\000\\\n\\000\\000\\140\\000\\141\\000\\000\\000\\000\\000\\028\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\142\\000\\143\\000\\007\\000\\008\\000\\009\\000\\000\\000\\\n\\000\\000\\144\\000\\010\\000\\011\\000\\012\\000\\243\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\145\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\013\\000\\014\\000\\015\\000\\016\\000\\017\\000\\\n\\018\\000\\019\\000\\000\\000\\146\\000\\000\\000\\000\\000\\020\\000\\044\\000\\\n\\021\\000\\000\\000\\000\\000\\000\\000\\045\\000\\000\\000\\000\\000\\048\\000\\\n\\147\\000\\022\\000\\000\\000\\023\\000\\024\\000\\025\\000\\026\\000\\027\\000\\\n\\000\\000\\000\\000\\028\\000\\029\\000\\000\\000\\000\\000\\030\\000\\031\\000\\\n\\032\\000\\000\\000\\000\\000\\033\\000\\034\\000\\000\\000\\035\\000\\036\\000\\\n\\000\\000\\037\\000\\000\\000\\000\\000\\000\\000\\038\\000\\000\\000\\039\\000\\\n\\000\\000\\000\\000\\000\\000\\040\\000\\041\\000\\000\\000\\042\\000\\000\\000\\\n\\244\\001\\000\\000\\000\\000\\007\\000\\008\\000\\009\\000\\000\\000\\043\\000\\\n\\000\\000\\010\\000\\011\\000\\012\\000\\044\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\045\\000\\046\\000\\047\\000\\048\\000\\049\\000\\050\\000\\000\\000\\\n\\000\\000\\051\\000\\013\\000\\014\\000\\015\\000\\016\\000\\017\\000\\018\\000\\\n\\019\\000\\000\\000\\000\\000\\000\\000\\000\\000\\020\\000\\000\\000\\021\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\022\\000\\000\\000\\023\\000\\024\\000\\025\\000\\026\\000\\027\\000\\000\\000\\\n\\000\\000\\028\\000\\029\\000\\000\\000\\000\\000\\030\\000\\031\\000\\032\\000\\\n\\000\\000\\000\\000\\033\\000\\034\\000\\000\\000\\035\\000\\036\\000\\000\\000\\\n\\037\\000\\000\\000\\000\\000\\000\\000\\038\\000\\000\\000\\039\\000\\000\\000\\\n\\000\\000\\000\\000\\040\\000\\041\\000\\000\\000\\042\\000\\000\\000\\000\\000\\\n\\000\\000\\007\\000\\008\\000\\009\\000\\000\\000\\000\\000\\043\\000\\010\\000\\\n\\011\\000\\000\\000\\000\\000\\044\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\045\\000\\046\\000\\047\\000\\048\\000\\049\\000\\050\\000\\000\\000\\000\\000\\\n\\051\\000\\000\\000\\015\\000\\016\\000\\017\\000\\018\\000\\019\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\020\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\022\\000\\000\\000\\\n\\023\\000\\024\\000\\025\\000\\026\\000\\027\\000\\000\\000\\000\\000\\028\\000\\\n\\000\\000\\000\\000\\000\\000\\030\\000\\031\\000\\032\\000\\000\\000\\000\\000\\\n\\000\\000\\034\\000\\000\\000\\035\\000\\036\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\038\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\040\\000\\041\\000\\000\\000\\042\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\194\\000\\007\\000\\008\\000\\009\\000\\000\\000\\000\\000\\197\\000\\010\\000\\\n\\011\\000\\044\\000\\000\\000\\000\\000\\000\\000\\000\\000\\045\\000\\046\\000\\\n\\000\\000\\048\\000\\049\\000\\000\\000\\000\\000\\000\\000\\051\\000\\000\\000\\\n\\000\\000\\000\\000\\015\\000\\016\\000\\017\\000\\018\\000\\019\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\020\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\022\\000\\000\\000\\\n\\023\\000\\024\\000\\025\\000\\026\\000\\027\\000\\000\\000\\000\\000\\028\\000\\\n\\000\\000\\000\\000\\000\\000\\030\\000\\031\\000\\032\\000\\000\\000\\000\\000\\\n\\000\\000\\034\\000\\000\\000\\035\\000\\036\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\038\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\040\\000\\041\\000\\000\\000\\042\\000\\000\\000\\000\\000\\007\\000\\008\\000\\\n\\009\\000\\000\\000\\000\\000\\000\\000\\010\\000\\011\\000\\000\\000\\000\\000\\\n\\000\\000\\044\\000\\000\\000\\000\\000\\000\\000\\000\\000\\045\\000\\046\\000\\\n\\000\\000\\048\\000\\049\\000\\195\\001\\000\\000\\000\\000\\051\\000\\015\\000\\\n\\016\\000\\017\\000\\018\\000\\019\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\020\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\022\\000\\000\\000\\023\\000\\024\\000\\025\\000\\\n\\026\\000\\027\\000\\000\\000\\000\\000\\028\\000\\000\\000\\000\\000\\000\\000\\\n\\030\\000\\031\\000\\032\\000\\000\\000\\000\\000\\000\\000\\034\\000\\000\\000\\\n\\035\\000\\036\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\038\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\040\\000\\041\\000\\000\\000\\\n\\042\\000\\000\\000\\000\\000\\007\\000\\008\\000\\009\\000\\000\\000\\000\\000\\\n\\000\\000\\010\\000\\011\\000\\000\\000\\000\\000\\000\\000\\044\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\045\\000\\046\\000\\000\\000\\048\\000\\049\\000\\\n\\000\\000\\000\\000\\000\\000\\051\\000\\015\\000\\016\\000\\017\\000\\018\\000\\\n\\019\\000\\000\\000\\000\\000\\000\\000\\000\\000\\020\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\022\\000\\000\\000\\023\\000\\024\\000\\025\\000\\026\\000\\027\\000\\000\\000\\\n\\000\\000\\028\\000\\000\\000\\000\\000\\000\\000\\030\\000\\031\\000\\032\\000\\\n\\000\\000\\000\\000\\000\\000\\034\\000\\000\\000\\035\\000\\036\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\038\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\054\\002\\040\\000\\041\\000\\000\\000\\042\\000\\000\\000\\000\\000\\\n\\007\\000\\008\\000\\009\\000\\000\\000\\000\\000\\000\\000\\010\\000\\011\\000\\\n\\000\\000\\000\\000\\000\\000\\044\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\045\\000\\046\\000\\000\\000\\048\\000\\049\\000\\000\\000\\000\\000\\000\\000\\\n\\051\\000\\015\\000\\016\\000\\017\\000\\018\\000\\019\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\020\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\022\\000\\000\\000\\023\\000\\\n\\024\\000\\025\\000\\026\\000\\027\\000\\000\\000\\000\\000\\028\\000\\000\\000\\\n\\000\\000\\000\\000\\030\\000\\031\\000\\032\\000\\000\\000\\000\\000\\000\\000\\\n\\034\\000\\000\\000\\035\\000\\036\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\038\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\040\\000\\\n\\041\\000\\000\\000\\042\\000\\000\\000\\000\\000\\000\\000\\000\\000\\014\\003\\\n\\007\\000\\008\\000\\009\\000\\000\\000\\000\\000\\016\\003\\010\\000\\011\\000\\\n\\044\\000\\000\\000\\000\\000\\000\\000\\000\\000\\045\\000\\046\\000\\000\\000\\\n\\048\\000\\049\\000\\000\\000\\000\\000\\000\\000\\051\\000\\000\\000\\000\\000\\\n\\000\\000\\015\\000\\016\\000\\017\\000\\018\\000\\019\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\020\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\022\\000\\000\\000\\023\\000\\\n\\024\\000\\025\\000\\026\\000\\027\\000\\000\\000\\000\\000\\028\\000\\000\\000\\\n\\000\\000\\000\\000\\030\\000\\031\\000\\032\\000\\000\\000\\000\\000\\000\\000\\\n\\034\\000\\000\\000\\035\\000\\036\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\038\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\040\\000\\\n\\041\\000\\000\\000\\042\\000\\000\\000\\000\\000\\000\\000\\007\\000\\008\\000\\\n\\009\\000\\000\\000\\000\\000\\000\\000\\010\\000\\011\\000\\000\\000\\000\\000\\\n\\044\\000\\000\\000\\000\\000\\000\\000\\000\\000\\045\\000\\046\\000\\053\\004\\\n\\048\\000\\049\\000\\000\\000\\000\\000\\000\\000\\051\\000\\000\\000\\015\\000\\\n\\016\\000\\017\\000\\018\\000\\019\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\020\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\022\\000\\000\\000\\023\\000\\024\\000\\025\\000\\\n\\026\\000\\027\\000\\000\\000\\000\\000\\028\\000\\000\\000\\000\\000\\000\\000\\\n\\030\\000\\031\\000\\032\\000\\000\\000\\000\\000\\000\\000\\034\\000\\000\\000\\\n\\035\\000\\036\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\038\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\040\\000\\041\\000\\000\\000\\\n\\042\\000\\000\\000\\000\\000\\216\\002\\216\\002\\216\\002\\000\\000\\000\\000\\\n\\000\\000\\216\\002\\216\\002\\000\\000\\000\\000\\000\\000\\044\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\045\\000\\046\\000\\000\\000\\048\\000\\049\\000\\\n\\216\\002\\000\\000\\000\\000\\051\\000\\216\\002\\216\\002\\216\\002\\216\\002\\\n\\216\\002\\000\\000\\000\\000\\000\\000\\000\\000\\216\\002\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\216\\002\\000\\000\\216\\002\\216\\002\\216\\002\\216\\002\\216\\002\\000\\000\\\n\\000\\000\\216\\002\\000\\000\\000\\000\\000\\000\\216\\002\\216\\002\\216\\002\\\n\\000\\000\\000\\000\\000\\000\\216\\002\\000\\000\\216\\002\\216\\002\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\216\\002\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\216\\002\\216\\002\\000\\000\\216\\002\\000\\000\\000\\000\\\n\\007\\000\\008\\000\\009\\000\\000\\000\\000\\000\\000\\000\\010\\000\\011\\000\\\n\\000\\000\\000\\000\\000\\000\\216\\002\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\216\\002\\216\\002\\000\\000\\216\\002\\216\\002\\000\\000\\000\\000\\000\\000\\\n\\216\\002\\015\\000\\016\\000\\017\\000\\018\\000\\019\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\020\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\022\\000\\000\\000\\023\\000\\\n\\024\\000\\025\\000\\026\\000\\027\\000\\000\\000\\000\\000\\028\\000\\000\\000\\\n\\000\\000\\000\\000\\030\\000\\031\\000\\032\\000\\000\\000\\000\\000\\000\\000\\\n\\034\\000\\000\\000\\035\\000\\036\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\038\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\040\\000\\\n\\041\\000\\000\\000\\042\\000\\000\\000\\000\\000\\216\\002\\216\\002\\216\\002\\\n\\000\\000\\000\\000\\000\\000\\216\\002\\216\\002\\000\\000\\000\\000\\000\\000\\\n\\044\\000\\000\\000\\000\\000\\000\\000\\000\\000\\045\\000\\046\\000\\000\\000\\\n\\048\\000\\049\\000\\000\\000\\000\\000\\000\\000\\051\\000\\216\\002\\216\\002\\\n\\216\\002\\216\\002\\216\\002\\000\\000\\000\\000\\000\\000\\000\\000\\216\\002\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\216\\002\\000\\000\\216\\002\\216\\002\\216\\002\\216\\002\\\n\\216\\002\\000\\000\\000\\000\\216\\002\\000\\000\\000\\000\\000\\000\\216\\002\\\n\\216\\002\\216\\002\\000\\000\\000\\000\\000\\000\\216\\002\\000\\000\\216\\002\\\n\\216\\002\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\216\\002\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\216\\002\\216\\002\\000\\000\\216\\002\\\n\\000\\000\\000\\000\\214\\002\\214\\002\\214\\002\\000\\000\\000\\000\\000\\000\\\n\\214\\002\\214\\002\\000\\000\\000\\000\\000\\000\\216\\002\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\216\\002\\216\\002\\000\\000\\216\\002\\216\\002\\000\\000\\\n\\000\\000\\000\\000\\216\\002\\214\\002\\214\\002\\214\\002\\214\\002\\214\\002\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\214\\002\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\214\\002\\\n\\000\\000\\214\\002\\214\\002\\214\\002\\214\\002\\214\\002\\000\\000\\000\\000\\\n\\214\\002\\000\\000\\000\\000\\000\\000\\214\\002\\214\\002\\214\\002\\000\\000\\\n\\000\\000\\008\\000\\214\\002\\000\\000\\214\\002\\214\\002\\000\\000\\011\\000\\\n\\000\\000\\147\\003\\000\\000\\214\\002\\229\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\214\\002\\214\\002\\000\\000\\214\\002\\000\\000\\148\\003\\000\\000\\\n\\000\\000\\015\\000\\016\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\214\\002\\000\\000\\000\\000\\000\\000\\000\\000\\214\\002\\\n\\214\\002\\000\\000\\214\\002\\214\\002\\000\\000\\022\\000\\207\\001\\214\\002\\\n\\139\\000\\000\\000\\140\\000\\141\\000\\000\\000\\000\\000\\028\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\142\\000\\149\\003\\000\\000\\008\\000\\000\\000\\\n\\000\\000\\000\\000\\144\\000\\000\\000\\011\\000\\000\\000\\228\\001\\000\\000\\\n\\000\\000\\229\\001\\000\\000\\000\\000\\209\\001\\000\\000\\000\\000\\145\\000\\\n\\000\\000\\000\\000\\000\\000\\148\\003\\210\\001\\000\\000\\015\\000\\016\\000\\\n\\000\\000\\008\\000\\000\\000\\000\\000\\146\\000\\000\\000\\000\\000\\011\\000\\\n\\044\\000\\189\\002\\000\\000\\211\\001\\000\\000\\045\\000\\000\\000\\000\\000\\\n\\048\\000\\147\\000\\022\\000\\207\\001\\000\\000\\139\\000\\000\\000\\140\\000\\\n\\141\\000\\015\\000\\016\\000\\028\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\142\\000\\149\\003\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\144\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\022\\000\\207\\001\\000\\000\\\n\\139\\000\\209\\001\\140\\000\\141\\000\\145\\000\\000\\000\\028\\000\\000\\000\\\n\\000\\000\\210\\001\\000\\000\\142\\000\\190\\002\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\146\\000\\144\\000\\000\\000\\191\\002\\044\\000\\000\\000\\000\\000\\\n\\211\\001\\000\\000\\045\\000\\000\\000\\209\\001\\048\\000\\147\\000\\145\\000\\\n\\000\\000\\000\\000\\008\\000\\000\\000\\210\\001\\000\\000\\000\\000\\000\\000\\\n\\011\\000\\000\\000\\124\\005\\000\\000\\146\\000\\000\\000\\000\\000\\000\\000\\\n\\044\\000\\000\\000\\000\\000\\211\\001\\000\\000\\045\\000\\000\\000\\148\\003\\\n\\048\\000\\147\\000\\015\\000\\016\\000\\000\\000\\008\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\011\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\022\\000\\207\\001\\\n\\000\\000\\139\\000\\000\\000\\140\\000\\141\\000\\015\\000\\016\\000\\028\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\142\\000\\149\\003\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\144\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\022\\000\\207\\001\\000\\000\\139\\000\\209\\001\\140\\000\\141\\000\\\n\\145\\000\\000\\000\\028\\000\\000\\000\\000\\000\\210\\001\\000\\000\\142\\000\\\n\\208\\001\\000\\000\\216\\002\\000\\000\\000\\000\\146\\000\\144\\000\\000\\000\\\n\\216\\002\\044\\000\\000\\000\\000\\000\\211\\001\\000\\000\\045\\000\\000\\000\\\n\\209\\001\\048\\000\\147\\000\\145\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\210\\001\\000\\000\\216\\002\\216\\002\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\146\\000\\000\\000\\000\\000\\000\\000\\044\\000\\000\\000\\000\\000\\211\\001\\\n\\000\\000\\045\\000\\000\\000\\000\\000\\048\\000\\147\\000\\216\\002\\216\\002\\\n\\000\\000\\216\\002\\000\\000\\216\\002\\216\\002\\000\\000\\000\\000\\216\\002\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\216\\002\\216\\002\\000\\000\\000\\000\\\n\\008\\000\\000\\000\\000\\000\\216\\002\\000\\000\\000\\000\\011\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\216\\002\\000\\000\\000\\000\\\n\\216\\002\\000\\000\\000\\000\\000\\000\\000\\000\\216\\002\\137\\000\\000\\000\\\n\\015\\000\\016\\000\\000\\000\\000\\000\\000\\000\\216\\002\\000\\000\\000\\000\\\n\\000\\000\\216\\002\\000\\000\\000\\000\\216\\002\\000\\000\\216\\002\\000\\000\\\n\\000\\000\\216\\002\\216\\002\\000\\000\\022\\000\\000\\000\\138\\000\\139\\000\\\n\\000\\000\\140\\000\\141\\000\\000\\000\\000\\000\\028\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\142\\000\\143\\000\\000\\000\\000\\000\\000\\000\\008\\000\\\n\\000\\000\\144\\000\\000\\000\\162\\001\\000\\000\\011\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\145\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\137\\000\\194\\000\\015\\000\\\n\\016\\000\\000\\000\\000\\000\\146\\000\\000\\000\\000\\000\\000\\000\\044\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\045\\000\\000\\000\\000\\000\\048\\000\\\n\\147\\000\\000\\000\\000\\000\\022\\000\\000\\000\\138\\000\\139\\000\\000\\000\\\n\\140\\000\\141\\000\\000\\000\\000\\000\\028\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\142\\000\\143\\000\\000\\000\\008\\000\\000\\000\\000\\000\\000\\000\\\n\\144\\000\\000\\000\\011\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\008\\000\\009\\000\\000\\000\\000\\000\\145\\000\\010\\000\\011\\000\\\n\\000\\000\\000\\000\\137\\000\\000\\000\\015\\000\\016\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\146\\000\\000\\000\\000\\000\\000\\000\\044\\000\\000\\000\\\n\\000\\000\\015\\000\\016\\000\\045\\000\\000\\000\\000\\000\\048\\000\\147\\000\\\n\\022\\000\\000\\000\\138\\000\\139\\000\\000\\000\\140\\000\\141\\000\\000\\000\\\n\\000\\000\\028\\000\\000\\000\\000\\000\\000\\000\\022\\000\\142\\000\\143\\000\\\n\\024\\000\\025\\000\\026\\000\\027\\000\\000\\000\\144\\000\\028\\000\\000\\000\\\n\\216\\002\\000\\000\\216\\002\\182\\000\\032\\000\\000\\000\\216\\002\\000\\000\\\n\\000\\000\\000\\000\\145\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\090\\003\\000\\000\\000\\000\\000\\000\\216\\002\\146\\000\\\n\\216\\002\\216\\002\\042\\000\\044\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\045\\000\\000\\000\\000\\000\\048\\000\\147\\000\\000\\000\\000\\000\\000\\000\\\n\\044\\000\\000\\000\\000\\000\\000\\000\\216\\002\\045\\000\\216\\002\\216\\002\\\n\\048\\000\\216\\002\\216\\002\\000\\000\\000\\000\\216\\002\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\216\\002\\216\\002\\000\\000\\008\\000\\000\\000\\000\\000\\\n\\000\\000\\216\\002\\000\\000\\011\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\216\\002\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\137\\000\\000\\000\\015\\000\\016\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\216\\002\\000\\000\\000\\000\\000\\000\\216\\002\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\216\\002\\000\\000\\000\\000\\216\\002\\\n\\216\\002\\022\\000\\000\\000\\138\\000\\139\\000\\000\\000\\140\\000\\141\\000\\\n\\000\\000\\000\\000\\028\\000\\000\\000\\000\\000\\000\\000\\000\\000\\142\\000\\\n\\143\\000\\000\\000\\216\\002\\000\\000\\000\\000\\000\\000\\144\\000\\000\\000\\\n\\216\\002\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\145\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\216\\002\\000\\000\\216\\002\\216\\002\\000\\000\\216\\002\\000\\000\\000\\000\\\n\\146\\000\\000\\000\\000\\000\\216\\002\\044\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\045\\000\\000\\000\\000\\000\\048\\000\\147\\000\\216\\002\\000\\000\\\n\\216\\002\\216\\002\\000\\000\\216\\002\\216\\002\\216\\002\\216\\002\\216\\002\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\216\\002\\216\\002\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\216\\002\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\216\\002\\000\\000\\216\\002\\216\\002\\000\\000\\216\\002\\216\\002\\\n\\216\\002\\000\\000\\216\\002\\000\\000\\000\\000\\000\\000\\000\\000\\216\\002\\\n\\216\\002\\000\\000\\148\\002\\000\\000\\000\\000\\216\\002\\216\\002\\000\\000\\\n\\148\\002\\216\\002\\000\\000\\000\\000\\000\\000\\000\\000\\216\\002\\000\\000\\\n\\000\\000\\216\\002\\216\\002\\216\\002\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\148\\002\\000\\000\\148\\002\\148\\002\\216\\002\\129\\002\\000\\000\\000\\000\\\n\\216\\002\\000\\000\\000\\000\\129\\002\\216\\002\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\216\\002\\000\\000\\000\\000\\216\\002\\216\\002\\148\\002\\000\\000\\\n\\148\\002\\148\\002\\000\\000\\148\\002\\148\\002\\129\\002\\129\\002\\148\\002\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\148\\002\\148\\002\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\148\\002\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\129\\002\\000\\000\\129\\002\\129\\002\\000\\000\\129\\002\\129\\002\\\n\\148\\002\\000\\000\\129\\002\\000\\000\\000\\000\\000\\000\\000\\000\\129\\002\\\n\\129\\002\\000\\000\\214\\002\\000\\000\\000\\000\\148\\002\\129\\002\\000\\000\\\n\\214\\002\\148\\002\\000\\000\\000\\000\\000\\000\\000\\000\\148\\002\\000\\000\\\n\\000\\000\\148\\002\\148\\002\\129\\002\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\214\\002\\214\\002\\000\\000\\008\\000\\000\\000\\000\\000\\\n\\129\\002\\000\\000\\000\\000\\011\\000\\129\\002\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\129\\002\\000\\000\\000\\000\\129\\002\\129\\002\\214\\002\\000\\000\\\n\\214\\002\\214\\002\\000\\000\\214\\002\\214\\002\\015\\000\\016\\000\\214\\002\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\214\\002\\214\\002\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\214\\002\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\022\\000\\000\\000\\000\\000\\139\\000\\000\\000\\140\\000\\141\\000\\\n\\214\\002\\000\\000\\028\\000\\000\\000\\000\\000\\000\\000\\000\\000\\142\\000\\\n\\143\\000\\000\\000\\216\\002\\000\\000\\000\\000\\214\\002\\144\\000\\000\\000\\\n\\216\\002\\214\\002\\000\\000\\000\\000\\000\\000\\000\\000\\214\\002\\000\\000\\\n\\000\\000\\214\\002\\214\\002\\145\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\216\\002\\216\\002\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\146\\000\\000\\000\\000\\000\\000\\000\\044\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\045\\000\\000\\000\\000\\000\\048\\000\\147\\000\\216\\002\\000\\000\\\n\\000\\000\\216\\002\\000\\000\\216\\002\\216\\002\\000\\000\\000\\000\\216\\002\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\216\\002\\216\\002\\000\\000\\008\\000\\\n\\009\\000\\000\\000\\000\\000\\216\\002\\010\\000\\011\\000\\008\\000\\009\\000\\\n\\000\\000\\000\\000\\000\\000\\010\\000\\011\\000\\000\\000\\000\\000\\087\\001\\\n\\216\\002\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\015\\000\\\n\\016\\000\\000\\000\\000\\000\\000\\000\\000\\000\\216\\002\\015\\000\\016\\000\\\n\\000\\000\\216\\002\\000\\000\\000\\000\\000\\000\\000\\000\\216\\002\\000\\000\\\n\\088\\001\\216\\002\\216\\002\\022\\000\\089\\001\\000\\000\\024\\000\\025\\000\\\n\\026\\000\\027\\000\\022\\000\\089\\001\\028\\000\\024\\000\\025\\000\\026\\000\\\n\\027\\000\\142\\000\\032\\000\\028\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\142\\000\\032\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\216\\002\\\n\\216\\002\\000\\000\\090\\001\\000\\000\\216\\002\\216\\002\\000\\000\\000\\000\\\n\\042\\000\\090\\001\\091\\001\\000\\000\\000\\000\\000\\000\\000\\000\\042\\000\\\n\\000\\000\\091\\001\\092\\001\\093\\001\\000\\000\\000\\000\\044\\000\\216\\002\\\n\\216\\002\\094\\001\\000\\000\\045\\000\\000\\000\\044\\000\\048\\000\\000\\000\\\n\\094\\001\\000\\000\\045\\000\\000\\000\\000\\000\\048\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\216\\002\\000\\000\\000\\000\\216\\002\\216\\002\\\n\\216\\002\\216\\002\\000\\000\\000\\000\\216\\002\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\216\\002\\216\\002\\000\\000\\000\\000\\180\\004\\049\\001\\050\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\051\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\181\\004\\052\\001\\053\\001\\182\\004\\054\\001\\\n\\216\\002\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\055\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\216\\002\\000\\000\\\n\\000\\000\\056\\001\\000\\000\\216\\002\\000\\000\\000\\000\\216\\002\\057\\001\\\n\\058\\001\\059\\001\\060\\001\\061\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\062\\001\\000\\000\\167\\002\\000\\000\\000\\000\\162\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\063\\001\\064\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\065\\001\\\n\\066\\001\\067\\001\\068\\001\\069\\001\\000\\000\\001\\001\\002\\001\\003\\001\\\n\\000\\000\\000\\000\\000\\000\\183\\004\\164\\001\\000\\000\\005\\001\\000\\000\\\n\\000\\000\\071\\001\\000\\000\\000\\000\\112\\000\\007\\001\\113\\000\\114\\000\\\n\\028\\000\\000\\000\\115\\000\\000\\000\\000\\000\\116\\000\\117\\000\\000\\000\\\n\\008\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\134\\001\\\n\\000\\000\\009\\001\\000\\000\\000\\000\\000\\000\\000\\000\\118\\000\\010\\001\\\n\\011\\001\\012\\001\\013\\001\\014\\001\\015\\001\\000\\000\\119\\000\\120\\000\\\n\\000\\000\\000\\000\\000\\000\\168\\002\\000\\000\\000\\000\\121\\000\\000\\000\\\n\\000\\000\\000\\000\\016\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\122\\000\\123\\000\\173\\002\\166\\001\\000\\000\\174\\002\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\224\\003\\049\\001\\050\\001\\020\\001\\\n\\021\\001\\175\\002\\169\\001\\024\\001\\170\\001\\051\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\052\\001\\053\\001\\000\\000\\054\\001\\027\\001\\\n\\000\\000\\028\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\055\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\226\\003\\049\\001\\050\\001\\000\\000\\\n\\056\\001\\000\\000\\000\\000\\000\\000\\000\\000\\051\\001\\057\\001\\058\\001\\\n\\059\\001\\060\\001\\061\\001\\052\\001\\053\\001\\000\\000\\054\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\055\\001\\\n\\000\\000\\062\\001\\000\\000\\000\\000\\000\\000\\000\\000\\162\\000\\000\\000\\\n\\056\\001\\000\\000\\000\\000\\063\\001\\064\\001\\000\\000\\057\\001\\058\\001\\\n\\059\\001\\060\\001\\061\\001\\000\\000\\000\\000\\000\\000\\065\\001\\066\\001\\\n\\067\\001\\068\\001\\069\\001\\000\\000\\000\\000\\000\\000\\000\\000\\225\\003\\\n\\000\\000\\062\\001\\000\\000\\000\\000\\000\\000\\000\\000\\162\\000\\000\\000\\\n\\071\\001\\000\\000\\000\\000\\063\\001\\064\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\228\\003\\049\\001\\050\\001\\000\\000\\065\\001\\066\\001\\\n\\067\\001\\068\\001\\069\\001\\051\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\227\\003\\052\\001\\053\\001\\000\\000\\054\\001\\000\\000\\000\\000\\000\\000\\\n\\071\\001\\000\\000\\000\\000\\000\\000\\000\\000\\055\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\224\\003\\049\\001\\050\\001\\000\\000\\056\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\051\\001\\057\\001\\058\\001\\059\\001\\060\\001\\\n\\061\\001\\052\\001\\053\\001\\000\\000\\054\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\055\\001\\000\\000\\062\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\162\\000\\000\\000\\056\\001\\000\\000\\\n\\000\\000\\063\\001\\064\\001\\000\\000\\057\\001\\058\\001\\059\\001\\060\\001\\\n\\061\\001\\000\\000\\000\\000\\000\\000\\065\\001\\066\\001\\067\\001\\068\\001\\\n\\069\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\062\\001\\\n\\229\\003\\000\\000\\000\\000\\000\\000\\162\\000\\000\\000\\071\\001\\000\\000\\\n\\000\\000\\063\\001\\064\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\226\\003\\049\\001\\050\\001\\000\\000\\065\\001\\066\\001\\067\\001\\068\\001\\\n\\069\\001\\051\\001\\000\\000\\000\\000\\000\\000\\023\\004\\000\\000\\052\\001\\\n\\053\\001\\000\\000\\054\\001\\000\\000\\000\\000\\000\\000\\071\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\055\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\228\\003\\049\\001\\050\\001\\000\\000\\056\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\051\\001\\057\\001\\058\\001\\059\\001\\060\\001\\061\\001\\052\\001\\\n\\053\\001\\000\\000\\054\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\055\\001\\000\\000\\062\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\162\\000\\000\\000\\056\\001\\000\\000\\000\\000\\063\\001\\\n\\064\\001\\000\\000\\057\\001\\058\\001\\059\\001\\060\\001\\061\\001\\000\\000\\\n\\000\\000\\000\\000\\065\\001\\066\\001\\067\\001\\068\\001\\069\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\024\\004\\062\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\162\\000\\000\\000\\071\\001\\000\\000\\000\\000\\063\\001\\\n\\064\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\065\\001\\066\\001\\067\\001\\068\\001\\069\\001\\226\\004\\\n\\049\\001\\050\\001\\000\\000\\000\\000\\000\\000\\000\\000\\025\\004\\000\\000\\\n\\051\\001\\000\\000\\000\\000\\000\\000\\071\\001\\000\\000\\052\\001\\053\\001\\\n\\000\\000\\054\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\055\\001\\000\\000\\000\\000\\000\\000\\000\\000\\228\\004\\\n\\049\\001\\050\\001\\000\\000\\056\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\051\\001\\057\\001\\058\\001\\059\\001\\060\\001\\061\\001\\052\\001\\053\\001\\\n\\000\\000\\054\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\055\\001\\000\\000\\062\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\162\\000\\000\\000\\056\\001\\000\\000\\000\\000\\063\\001\\064\\001\\\n\\000\\000\\057\\001\\058\\001\\059\\001\\060\\001\\061\\001\\000\\000\\000\\000\\\n\\000\\000\\065\\001\\066\\001\\067\\001\\068\\001\\069\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\227\\004\\000\\000\\062\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\162\\000\\000\\000\\071\\001\\000\\000\\000\\000\\063\\001\\064\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\230\\004\\049\\001\\050\\001\\\n\\000\\000\\065\\001\\066\\001\\067\\001\\068\\001\\069\\001\\051\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\229\\004\\052\\001\\053\\001\\000\\000\\054\\001\\\n\\000\\000\\000\\000\\000\\000\\071\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\055\\001\\000\\000\\000\\000\\000\\000\\000\\000\\226\\004\\049\\001\\050\\001\\\n\\000\\000\\056\\001\\000\\000\\000\\000\\000\\000\\000\\000\\051\\001\\057\\001\\\n\\058\\001\\059\\001\\060\\001\\061\\001\\052\\001\\053\\001\\000\\000\\054\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\055\\001\\000\\000\\062\\001\\000\\000\\000\\000\\000\\000\\000\\000\\162\\000\\\n\\000\\000\\056\\001\\000\\000\\000\\000\\063\\001\\064\\001\\000\\000\\057\\001\\\n\\058\\001\\059\\001\\060\\001\\061\\001\\000\\000\\000\\000\\000\\000\\065\\001\\\n\\066\\001\\067\\001\\068\\001\\069\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\062\\001\\231\\004\\000\\000\\000\\000\\000\\000\\162\\000\\\n\\000\\000\\071\\001\\000\\000\\000\\000\\063\\001\\064\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\228\\004\\049\\001\\050\\001\\000\\000\\065\\001\\\n\\066\\001\\067\\001\\068\\001\\069\\001\\051\\001\\000\\000\\000\\000\\000\\000\\\n\\249\\004\\000\\000\\052\\001\\053\\001\\000\\000\\054\\001\\000\\000\\000\\000\\\n\\000\\000\\071\\001\\000\\000\\000\\000\\000\\000\\000\\000\\055\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\230\\004\\049\\001\\050\\001\\000\\000\\056\\001\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\051\\001\\057\\001\\058\\001\\059\\001\\\n\\060\\001\\061\\001\\052\\001\\053\\001\\000\\000\\054\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\055\\001\\000\\000\\\n\\062\\001\\000\\000\\000\\000\\000\\000\\000\\000\\162\\000\\000\\000\\056\\001\\\n\\000\\000\\000\\000\\063\\001\\064\\001\\000\\000\\057\\001\\058\\001\\059\\001\\\n\\060\\001\\061\\001\\000\\000\\000\\000\\000\\000\\065\\001\\066\\001\\067\\001\\\n\\068\\001\\069\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\250\\004\\\n\\062\\001\\049\\001\\050\\001\\000\\000\\000\\000\\162\\000\\000\\000\\071\\001\\\n\\000\\000\\051\\001\\063\\001\\064\\001\\000\\000\\000\\000\\000\\000\\052\\001\\\n\\053\\001\\000\\000\\054\\001\\000\\000\\000\\000\\065\\001\\066\\001\\067\\001\\\n\\068\\001\\069\\001\\000\\000\\055\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\251\\004\\000\\000\\000\\000\\056\\001\\000\\000\\000\\000\\071\\001\\\n\\000\\000\\000\\000\\057\\001\\058\\001\\059\\001\\060\\001\\061\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\062\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\162\\000\\000\\000\\000\\000\\000\\000\\000\\000\\063\\001\\\n\\064\\001\\049\\001\\050\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\051\\001\\065\\001\\066\\001\\067\\001\\068\\001\\069\\001\\052\\001\\\n\\053\\001\\000\\000\\054\\001\\000\\000\\000\\000\\000\\000\\000\\000\\070\\001\\\n\\000\\000\\057\\004\\000\\000\\055\\001\\071\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\049\\001\\050\\001\\000\\000\\056\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\051\\001\\057\\001\\058\\001\\059\\001\\060\\001\\061\\001\\052\\001\\\n\\053\\001\\000\\000\\054\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\055\\001\\000\\000\\062\\001\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\162\\000\\000\\000\\056\\001\\000\\000\\000\\000\\063\\001\\\n\\064\\001\\000\\000\\057\\001\\058\\001\\059\\001\\060\\001\\061\\001\\000\\000\\\n\\000\\000\\000\\000\\065\\001\\066\\001\\067\\001\\068\\001\\069\\001\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\062\\001\\049\\001\\050\\001\\\n\\000\\000\\000\\000\\162\\000\\000\\000\\071\\001\\000\\000\\051\\001\\063\\001\\\n\\064\\001\\000\\000\\000\\000\\000\\000\\052\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\065\\001\\066\\001\\067\\001\\068\\001\\069\\001\\000\\000\\\n\\055\\001\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\049\\001\\050\\001\\\n\\000\\000\\056\\001\\000\\000\\000\\000\\071\\001\\000\\000\\000\\000\\057\\001\\\n\\058\\001\\059\\001\\060\\001\\061\\001\\052\\001\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\055\\001\\000\\000\\062\\001\\000\\000\\000\\000\\000\\000\\000\\000\\162\\000\\\n\\000\\000\\056\\001\\000\\000\\000\\000\\063\\001\\064\\001\\000\\000\\057\\001\\\n\\058\\001\\059\\001\\060\\001\\061\\001\\012\\000\\000\\000\\000\\000\\065\\001\\\n\\066\\001\\067\\001\\068\\001\\069\\001\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\062\\001\\089\\000\\014\\000\\000\\000\\000\\000\\162\\000\\\n\\000\\000\\071\\001\\000\\000\\000\\000\\063\\001\\064\\001\\000\\000\\000\\000\\\n\\090\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\066\\001\\067\\001\\068\\001\\069\\001\\000\\000\\000\\000\\112\\000\\000\\000\\\n\\113\\000\\114\\000\\028\\000\\029\\000\\115\\000\\000\\000\\000\\000\\116\\000\\\n\\117\\000\\071\\001\\000\\000\\033\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\091\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\039\\000\\\n\\118\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\119\\000\\120\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\092\\000\\\n\\121\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\000\\\n\\000\\000\\000\\000\\000\\000\\093\\000\\122\\000\\123\\000\\050\\000\"\n\nlet yycheck = \"\\003\\000\\\n\\002\\000\\005\\000\\177\\000\\177\\000\\002\\000\\174\\000\\112\\000\\112\\000\\\n\\255\\000\\185\\000\\115\\000\\008\\000\\112\\000\\139\\000\\105\\001\\061\\002\\\n\\118\\000\\002\\000\\092\\000\\180\\000\\001\\000\\133\\000\\002\\000\\104\\002\\\n\\233\\002\\221\\001\\104\\002\\114\\000\\002\\000\\002\\003\\104\\000\\002\\000\\\n\\027\\000\\034\\004\\146\\000\\059\\002\\002\\000\\000\\000\\192\\002\\003\\000\\\n\\150\\001\\002\\000\\002\\000\\196\\000\\116\\003\\198\\000\\001\\000\\245\\003\\\n\\101\\003\\059\\000\\252\\000\\164\\003\\124\\003\\223\\004\\210\\004\\119\\004\\\n\\009\\000\\008\\001\\203\\004\\000\\000\\198\\002\\000\\000\\043\\000\\121\\001\\\n\\066\\001\\123\\001\\000\\000\\110\\004\\000\\001\\093\\002\\029\\000\\024\\000\\\n\\019\\001\\229\\002\\033\\000\\027\\001\\214\\004\\090\\001\\022\\001\\006\\001\\\n\\025\\000\\022\\001\\066\\001\\000\\001\\000\\001\\031\\001\\000\\001\\108\\001\\\n\\043\\000\\042\\000\\092\\000\\178\\000\\094\\001\\000\\001\\092\\000\\079\\001\\\n\\000\\001\\110\\001\\141\\004\\040\\001\\008\\001\\000\\001\\104\\000\\017\\001\\\n\\059\\000\\037\\001\\104\\000\\092\\000\\112\\000\\029\\001\\027\\001\\115\\000\\\n\\092\\000\\117\\000\\118\\000\\023\\001\\000\\001\\000\\001\\092\\000\\104\\000\\\n\\067\\001\\092\\000\\030\\001\\120\\000\\104\\000\\078\\000\\092\\000\\080\\000\\\n\\081\\000\\000\\001\\104\\000\\092\\000\\092\\000\\104\\000\\000\\001\\000\\001\\\n\\140\\000\\141\\000\\104\\000\\143\\000\\029\\005\\092\\001\\094\\001\\104\\000\\\n\\104\\000\\053\\001\\090\\001\\055\\001\\014\\001\\153\\000\\154\\000\\017\\001\\\n\\034\\005\\117\\000\\036\\003\\012\\004\\022\\001\\065\\001\\201\\004\\066\\001\\\n\\014\\001\\027\\001\\066\\001\\095\\001\\000\\001\\092\\001\\073\\001\\000\\001\\\n\\000\\001\\073\\001\\115\\000\\004\\000\\176\\000\\177\\000\\091\\001\\091\\001\\\n\\180\\000\\094\\001\\095\\001\\095\\001\\094\\001\\047\\001\\036\\001\\000\\001\\\n\\000\\001\\000\\001\\019\\001\\094\\001\\088\\005\\000\\001\\094\\001\\104\\001\\\n\\091\\001\\026\\001\\027\\001\\027\\001\\095\\001\\014\\001\\106\\001\\032\\000\\\n\\017\\001\\109\\001\\000\\001\\162\\000\\163\\000\\024\\001\\008\\001\\091\\001\\\n\\115\\001\\092\\001\\064\\001\\115\\001\\165\\000\\000\\001\\000\\001\\048\\001\\\n\\049\\001\\087\\001\\008\\001\\000\\001\\000\\001\\037\\001\\000\\001\\094\\001\\\n\\010\\001\\091\\001\\091\\001\\060\\001\\181\\000\\095\\001\\095\\001\\097\\001\\\n\\098\\001\\055\\003\\067\\001\\068\\001\\154\\001\\070\\001\\003\\001\\026\\005\\\n\\027\\001\\095\\001\\188\\001\\097\\001\\000\\001\\018\\001\\027\\001\\082\\002\\\n\\035\\005\\115\\001\\132\\003\\133\\005\\093\\001\\000\\000\\070\\004\\109\\001\\\n\\000\\001\\073\\004\\094\\001\\091\\001\\200\\002\\088\\001\\094\\001\\095\\001\\\n\\161\\001\\114\\001\\163\\001\\000\\001\\109\\001\\000\\001\\000\\001\\185\\005\\\n\\121\\001\\187\\005\\123\\001\\040\\001\\091\\001\\094\\001\\111\\001\\095\\001\\\n\\095\\001\\130\\001\\131\\001\\094\\001\\000\\001\\112\\001\\105\\001\\133\\001\\\n\\066\\001\\207\\001\\092\\001\\036\\005\\046\\001\\091\\001\\212\\001\\250\\000\\\n\\145\\001\\252\\000\\106\\005\\101\\002\\000\\001\\241\\002\\092\\001\\149\\001\\\n\\091\\001\\035\\001\\092\\001\\004\\001\\095\\001\\095\\001\\091\\001\\091\\001\\\n\\201\\005\\094\\001\\095\\001\\000\\001\\094\\001\\000\\001\\000\\001\\092\\001\\\n\\008\\001\\000\\001\\147\\003\\193\\001\\181\\004\\182\\004\\004\\001\\027\\001\\\n\\025\\001\\059\\001\\008\\001\\000\\001\\031\\001\\023\\002\\064\\001\\065\\001\\\n\\249\\001\\015\\001\\094\\001\\172\\000\\018\\001\\026\\001\\045\\001\\026\\001\\\n\\074\\001\\000\\001\\179\\000\\046\\001\\230\\003\\000\\001\\094\\001\\000\\001\\\n\\000\\001\\000\\001\\003\\002\\010\\001\\000\\001\\105\\001\\155\\004\\066\\001\\\n\\000\\001\\094\\001\\010\\001\\094\\001\\221\\004\\000\\001\\114\\001\\004\\001\\\n\\007\\001\\099\\001\\019\\001\\008\\001\\018\\001\\121\\001\\060\\005\\123\\001\\\n\\092\\001\\026\\001\\015\\001\\109\\001\\000\\001\\018\\001\\130\\001\\131\\001\\\n\\035\\001\\133\\001\\000\\001\\132\\005\\066\\001\\106\\004\\010\\001\\091\\001\\\n\\089\\001\\090\\001\\000\\001\\095\\001\\093\\001\\145\\001\\000\\001\\096\\001\\\n\\049\\001\\149\\001\\044\\004\\092\\001\\092\\001\\153\\001\\154\\001\\092\\001\\\n\\059\\001\\092\\001\\092\\001\\060\\001\\247\\002\\064\\001\\065\\001\\094\\001\\\n\\065\\001\\000\\001\\014\\001\\068\\001\\000\\001\\070\\001\\091\\001\\074\\001\\\n\\067\\001\\243\\001\\027\\001\\010\\001\\015\\001\\066\\001\\178\\001\\179\\001\\\n\\180\\001\\017\\001\\022\\001\\001\\004\\073\\001\\092\\001\\186\\001\\015\\001\\\n\\095\\001\\130\\001\\131\\001\\015\\001\\092\\001\\092\\001\\092\\001\\095\\001\\\n\\099\\001\\000\\001\\094\\001\\000\\001\\000\\001\\018\\001\\094\\001\\032\\001\\\n\\018\\001\\092\\001\\109\\001\\207\\001\\208\\001\\040\\003\\111\\001\\040\\001\\\n\\212\\001\\066\\001\\043\\001\\046\\003\\216\\001\\237\\003\\068\\003\\219\\001\\\n\\092\\001\\074\\005\\017\\001\\095\\001\\113\\001\\000\\001\\092\\001\\027\\001\\\n\\228\\001\\229\\001\\066\\001\\018\\001\\000\\001\\243\\004\\066\\001\\171\\003\\\n\\094\\001\\188\\001\\018\\001\\004\\001\\094\\001\\070\\001\\066\\001\\243\\001\\\n\\244\\001\\243\\001\\094\\001\\003\\001\\077\\001\\243\\001\\004\\001\\083\\001\\\n\\065\\001\\253\\001\\008\\001\\094\\001\\101\\002\\092\\001\\000\\000\\003\\002\\\n\\112\\002\\015\\001\\243\\001\\008\\001\\018\\001\\131\\002\\094\\001\\243\\001\\\n\\228\\001\\229\\001\\014\\002\\113\\002\\114\\002\\243\\001\\011\\002\\092\\001\\\n\\243\\001\\094\\001\\192\\002\\003\\001\\094\\001\\243\\001\\176\\004\\243\\001\\\n\\000\\001\\030\\001\\243\\001\\243\\001\\091\\001\\043\\005\\091\\001\\091\\001\\\n\\247\\001\\028\\002\\029\\002\\095\\001\\109\\001\\238\\001\\014\\001\\094\\001\\\n\\014\\001\\017\\001\\022\\001\\000\\001\\135\\003\\092\\001\\022\\001\\094\\001\\\n\\073\\001\\063\\005\\055\\001\\027\\001\\066\\001\\057\\002\\094\\001\\008\\001\\\n\\091\\001\\233\\002\\233\\002\\233\\002\\065\\001\\217\\004\\019\\001\\091\\001\\\n\\169\\002\\143\\004\\171\\002\\022\\001\\023\\002\\026\\001\\008\\001\\047\\001\\\n\\000\\001\\026\\002\\014\\001\\000\\001\\167\\003\\030\\001\\082\\002\\004\\001\\\n\\152\\005\\000\\001\\027\\002\\008\\001\\189\\002\\065\\001\\014\\001\\248\\002\\\n\\002\\001\\014\\001\\015\\001\\048\\001\\030\\001\\018\\001\\027\\001\\054\\002\\\n\\022\\001\\101\\002\\008\\001\\113\\001\\104\\002\\106\\001\\055\\001\\060\\001\\\n\\109\\001\\079\\001\\110\\002\\111\\002\\065\\001\\113\\002\\114\\002\\068\\001\\\n\\065\\001\\070\\001\\094\\001\\091\\001\\035\\001\\055\\001\\015\\003\\095\\001\\\n\\017\\003\\097\\001\\098\\001\\127\\002\\121\\002\\065\\001\\036\\001\\065\\001\\\n\\132\\002\\027\\001\\083\\005\\031\\005\\008\\001\\137\\002\\022\\001\\090\\001\\\n\\022\\001\\065\\001\\066\\001\\115\\001\\059\\001\\066\\001\\042\\005\\147\\002\\\n\\148\\002\\064\\001\\065\\001\\111\\002\\066\\001\\005\\003\\062\\001\\242\\003\\\n\\066\\001\\106\\001\\111\\001\\074\\001\\109\\001\\244\\002\\249\\003\\047\\001\\\n\\036\\001\\094\\001\\062\\005\\127\\002\\090\\001\\169\\002\\094\\001\\171\\002\\\n\\106\\001\\094\\001\\001\\003\\109\\001\\176\\002\\137\\002\\027\\001\\035\\001\\\n\\094\\001\\181\\002\\118\\004\\065\\001\\099\\001\\006\\005\\092\\001\\080\\003\\\n\\006\\005\\189\\002\\190\\002\\019\\001\\192\\002\\035\\001\\109\\001\\182\\003\\\n\\183\\003\\093\\005\\050\\003\\022\\001\\094\\001\\030\\002\\202\\002\\059\\001\\\n\\008\\001\\027\\001\\052\\003\\052\\003\\000\\001\\065\\001\\000\\000\\000\\001\\\n\\108\\005\\097\\001\\098\\001\\004\\001\\176\\002\\059\\001\\253\\001\\008\\001\\\n\\092\\001\\049\\001\\064\\003\\065\\001\\201\\002\\039\\003\\015\\001\\253\\001\\\n\\073\\003\\018\\001\\088\\001\\115\\001\\060\\001\\233\\002\\026\\001\\064\\002\\\n\\065\\002\\152\\003\\152\\003\\057\\005\\068\\001\\059\\005\\070\\001\\179\\004\\\n\\066\\001\\094\\001\\102\\001\\247\\002\\248\\002\\066\\001\\201\\002\\073\\001\\\n\\094\\001\\109\\001\\112\\001\\111\\001\\073\\001\\255\\002\\000\\001\\195\\004\\\n\\102\\001\\255\\002\\004\\001\\067\\001\\008\\003\\096\\004\\008\\001\\109\\001\\\n\\010\\001\\111\\001\\004\\001\\094\\001\\014\\001\\015\\001\\008\\001\\008\\001\\\n\\018\\001\\066\\001\\057\\002\\055\\001\\014\\001\\015\\001\\014\\001\\111\\001\\\n\\018\\001\\027\\001\\003\\001\\057\\002\\064\\001\\150\\005\\151\\005\\000\\001\\\n\\018\\001\\115\\001\\014\\001\\027\\001\\040\\003\\039\\003\\115\\001\\130\\004\\\n\\236\\004\\039\\003\\046\\003\\014\\001\\008\\003\\094\\001\\207\\003\\044\\003\\\n\\052\\003\\140\\004\\019\\001\\055\\003\\000\\001\\035\\001\\039\\003\\019\\001\\\n\\027\\001\\026\\001\\000\\001\\039\\003\\064\\003\\090\\001\\047\\001\\060\\003\\\n\\066\\001\\039\\003\\155\\002\\156\\002\\039\\003\\073\\003\\030\\001\\073\\001\\\n\\066\\001\\039\\003\\067\\001\\079\\003\\112\\001\\059\\001\\039\\003\\039\\003\\\n\\049\\001\\110\\001\\036\\001\\065\\001\\048\\001\\065\\001\\066\\001\\079\\001\\\n\\177\\002\\091\\001\\092\\001\\060\\001\\094\\001\\095\\001\\215\\005\\055\\001\\\n\\060\\001\\000\\000\\067\\001\\068\\001\\094\\001\\070\\001\\191\\002\\030\\001\\\n\\068\\001\\065\\001\\070\\001\\072\\000\\079\\001\\055\\001\\000\\001\\113\\001\\\n\\097\\001\\098\\001\\094\\001\\003\\001\\064\\001\\065\\001\\064\\001\\014\\001\\\n\\102\\001\\014\\001\\064\\001\\065\\001\\064\\001\\129\\003\\044\\004\\109\\001\\\n\\055\\001\\019\\001\\115\\001\\135\\003\\027\\001\\045\\001\\046\\001\\139\\003\\\n\\026\\001\\102\\000\\065\\001\\000\\000\\022\\001\\095\\001\\111\\001\\147\\003\\\n\\022\\001\\149\\003\\106\\001\\111\\001\\152\\003\\109\\001\\154\\003\\155\\003\\\n\\156\\003\\058\\004\\200\\005\\159\\003\\160\\003\\097\\001\\048\\001\\049\\001\\\n\\164\\003\\007\\004\\166\\003\\167\\003\\000\\001\\047\\001\\112\\001\\064\\001\\\n\\065\\001\\109\\001\\060\\001\\083\\001\\065\\001\\066\\001\\178\\003\\139\\003\\\n\\009\\003\\067\\001\\068\\001\\106\\001\\070\\001\\000\\001\\109\\001\\147\\003\\\n\\079\\001\\189\\003\\035\\004\\065\\001\\100\\001\\022\\003\\095\\004\\014\\001\\\n\\000\\001\\015\\001\\004\\001\\159\\003\\018\\001\\194\\003\\008\\001\\014\\001\\\n\\019\\001\\037\\001\\074\\001\\207\\003\\027\\001\\015\\001\\000\\001\\026\\001\\\n\\027\\001\\003\\001\\037\\001\\019\\001\\027\\001\\110\\001\\178\\003\\097\\001\\\n\\098\\001\\027\\001\\026\\001\\013\\001\\014\\001\\111\\001\\064\\001\\017\\001\\\n\\000\\000\\014\\001\\131\\004\\008\\001\\017\\001\\048\\001\\049\\001\\000\\001\\\n\\026\\001\\027\\001\\028\\001\\029\\001\\108\\001\\064\\001\\242\\003\\105\\001\\\n\\048\\001\\060\\001\\065\\001\\030\\001\\066\\001\\249\\003\\040\\001\\041\\001\\\n\\067\\001\\068\\001\\065\\001\\070\\001\\060\\001\\001\\004\\000\\001\\097\\001\\\n\\066\\001\\026\\001\\176\\004\\007\\004\\068\\001\\002\\004\\070\\001\\000\\001\\\n\\012\\004\\135\\004\\060\\001\\109\\001\\055\\001\\063\\001\\000\\001\\065\\001\\\n\\066\\001\\067\\001\\068\\001\\022\\001\\066\\001\\067\\001\\065\\001\\073\\001\\\n\\074\\001\\147\\001\\019\\001\\073\\001\\022\\001\\000\\001\\080\\001\\035\\004\\\n\\064\\001\\026\\001\\066\\001\\037\\001\\111\\001\\206\\004\\127\\003\\128\\003\\\n\\044\\004\\045\\004\\092\\001\\075\\001\\094\\001\\049\\004\\096\\001\\111\\001\\\n\\064\\001\\035\\001\\000\\001\\220\\004\\141\\003\\142\\003\\058\\004\\026\\001\\\n\\049\\001\\014\\001\\108\\001\\148\\003\\010\\001\\111\\001\\065\\001\\106\\001\\\n\\064\\001\\115\\001\\109\\001\\060\\001\\157\\003\\115\\001\\000\\001\\065\\001\\\n\\064\\001\\059\\001\\243\\004\\068\\001\\067\\005\\070\\001\\064\\001\\065\\001\\\n\\112\\001\\045\\004\\047\\001\\075\\001\\064\\001\\049\\004\\031\\004\\012\\001\\\n\\074\\001\\019\\001\\090\\001\\095\\004\\096\\004\\109\\001\\098\\004\\028\\001\\\n\\026\\001\\000\\001\\064\\001\\018\\005\\018\\005\\004\\001\\208\\004\\208\\004\\\n\\108\\004\\008\\001\\031\\001\\010\\001\\208\\004\\109\\001\\110\\001\\014\\001\\\n\\015\\001\\099\\001\\027\\001\\018\\001\\064\\001\\064\\001\\111\\001\\049\\001\\\n\\112\\001\\064\\001\\100\\001\\109\\001\\027\\001\\050\\001\\130\\004\\131\\004\\\n\\041\\005\\109\\001\\060\\001\\004\\001\\075\\001\\066\\001\\098\\004\\008\\001\\\n\\140\\004\\067\\001\\068\\001\\143\\004\\070\\001\\074\\001\\015\\001\\109\\001\\\n\\108\\004\\018\\001\\071\\001\\080\\001\\061\\005\\064\\001\\083\\001\\027\\001\\\n\\037\\001\\066\\001\\027\\001\\159\\004\\053\\001\\064\\001\\055\\001\\084\\001\\\n\\157\\004\\109\\001\\110\\001\\066\\001\\083\\005\\083\\005\\086\\005\\064\\001\\\n\\065\\001\\112\\001\\073\\001\\094\\001\\176\\004\\035\\001\\064\\001\\040\\001\\\n\\101\\001\\181\\004\\182\\004\\064\\001\\066\\001\\111\\001\\064\\001\\064\\001\\\n\\064\\001\\189\\004\\109\\001\\110\\001\\091\\001\\092\\001\\066\\001\\094\\001\\\n\\095\\001\\066\\001\\109\\001\\000\\001\\053\\001\\059\\001\\055\\001\\056\\001\\\n\\090\\001\\066\\001\\109\\001\\065\\001\\208\\004\\209\\004\\210\\004\\088\\001\\\n\\065\\001\\042\\004\\113\\001\\000\\001\\109\\001\\046\\004\\019\\001\\000\\000\\\n\\082\\002\\221\\004\\051\\004\\223\\004\\110\\001\\026\\001\\022\\001\\109\\001\\\n\\000\\001\\189\\004\\066\\001\\109\\001\\004\\001\\109\\001\\019\\001\\112\\001\\\n\\008\\001\\073\\001\\010\\001\\068\\004\\069\\004\\026\\001\\014\\001\\064\\001\\\n\\102\\001\\074\\004\\018\\001\\048\\001\\110\\002\\209\\004\\210\\004\\109\\001\\\n\\027\\001\\064\\001\\254\\004\\027\\001\\109\\001\\035\\001\\094\\001\\060\\001\\\n\\221\\001\\064\\001\\006\\005\\048\\001\\000\\001\\075\\001\\067\\001\\068\\001\\\n\\027\\001\\070\\001\\099\\004\\067\\001\\016\\005\\035\\001\\018\\005\\060\\001\\\n\\022\\001\\013\\001\\022\\005\\115\\001\\064\\001\\059\\001\\067\\001\\068\\001\\\n\\064\\001\\070\\001\\064\\001\\065\\001\\109\\001\\238\\001\\026\\001\\066\\001\\\n\\028\\001\\029\\001\\254\\004\\039\\005\\074\\001\\059\\001\\109\\001\\000\\001\\\n\\004\\001\\073\\001\\112\\001\\065\\001\\008\\001\\041\\001\\109\\001\\066\\001\\\n\\031\\001\\000\\000\\111\\001\\015\\001\\016\\005\\057\\005\\018\\001\\059\\005\\\n\\018\\001\\230\\002\\022\\005\\091\\001\\092\\001\\099\\001\\094\\001\\095\\001\\\n\\060\\001\\109\\001\\111\\001\\050\\001\\000\\001\\109\\001\\074\\005\\109\\001\\\n\\068\\001\\246\\002\\027\\002\\039\\005\\037\\001\\250\\002\\074\\001\\083\\005\\\n\\102\\001\\113\\001\\027\\005\\007\\000\\080\\001\\030\\005\\010\\000\\109\\001\\\n\\177\\004\\013\\000\\014\\000\\027\\001\\000\\000\\017\\000\\018\\000\\019\\000\\\n\\020\\000\\021\\000\\066\\001\\023\\000\\096\\001\\066\\001\\066\\001\\192\\004\\\n\\193\\004\\037\\001\\030\\000\\066\\001\\025\\003\\113\\005\\034\\000\\064\\001\\\n\\108\\001\\037\\000\\038\\000\\111\\001\\027\\001\\027\\001\\083\\001\\000\\001\\\n\\124\\005\\086\\002\\046\\000\\047\\000\\083\\001\\247\\002\\050\\000\\051\\000\\\n\\004\\001\\023\\001\\066\\001\\135\\005\\008\\001\\035\\001\\000\\001\\000\\001\\\n\\081\\005\\082\\005\\091\\001\\084\\005\\085\\005\\066\\001\\018\\001\\108\\001\\\n\\146\\005\\026\\001\\150\\005\\151\\005\\146\\005\\113\\005\\109\\001\\027\\001\\\n\\156\\005\\157\\005\\019\\001\\066\\001\\066\\001\\059\\001\\083\\001\\031\\001\\\n\\026\\001\\026\\001\\064\\001\\065\\001\\000\\000\\089\\000\\090\\000\\091\\000\\\n\\000\\001\\093\\000\\066\\001\\135\\005\\074\\001\\071\\001\\040\\003\\179\\005\\\n\\009\\005\\073\\001\\050\\001\\006\\001\\046\\003\\185\\005\\186\\005\\187\\005\\\n\\049\\001\\004\\001\\084\\001\\191\\005\\005\\000\\008\\001\\066\\001\\022\\001\\\n\\156\\005\\157\\005\\026\\001\\060\\001\\015\\001\\099\\001\\094\\001\\018\\001\\\n\\166\\003\\125\\000\\064\\001\\068\\001\\066\\001\\070\\001\\004\\001\\109\\001\\\n\\212\\005\\022\\001\\008\\001\\215\\005\\000\\000\\137\\000\\138\\000\\179\\005\\\n\\047\\001\\221\\005\\222\\005\\115\\001\\018\\001\\095\\001\\186\\005\\189\\003\\\n\\064\\001\\149\\000\\088\\001\\191\\005\\055\\001\\027\\001\\057\\001\\058\\001\\\n\\059\\001\\000\\001\\061\\001\\075\\001\\000\\001\\064\\001\\065\\001\\022\\001\\\n\\164\\000\\053\\001\\000\\001\\055\\001\\189\\005\\078\\005\\111\\001\\066\\001\\\n\\212\\005\\173\\000\\112\\001\\091\\001\\064\\001\\065\\001\\199\\005\\019\\001\\\n\\022\\001\\221\\005\\222\\005\\026\\001\\093\\001\\019\\001\\026\\001\\090\\001\\\n\\047\\001\\210\\005\\211\\005\\232\\002\\026\\001\\135\\003\\097\\001\\004\\001\\\n\\027\\001\\000\\001\\000\\001\\008\\001\\109\\001\\053\\001\\054\\001\\055\\001\\\n\\056\\001\\047\\001\\109\\001\\110\\001\\048\\001\\000\\000\\251\\002\\120\\005\\\n\\064\\001\\065\\001\\048\\001\\000\\003\\093\\001\\000\\001\\027\\001\\128\\005\\\n\\060\\001\\109\\001\\164\\003\\026\\001\\026\\001\\167\\003\\060\\001\\067\\001\\\n\\068\\001\\000\\001\\070\\001\\094\\001\\109\\001\\004\\001\\068\\001\\018\\001\\\n\\070\\001\\008\\001\\023\\003\\010\\001\\022\\001\\016\\001\\095\\001\\014\\001\\\n\\015\\001\\004\\001\\109\\001\\140\\000\\141\\000\\008\\001\\159\\005\\004\\001\\\n\\027\\001\\253\\000\\254\\000\\008\\001\\027\\001\\109\\001\\040\\001\\018\\001\\\n\\153\\000\\154\\000\\015\\001\\091\\001\\049\\003\\018\\001\\095\\001\\095\\001\\\n\\027\\001\\178\\005\\000\\001\\111\\001\\000\\001\\065\\001\\027\\001\\019\\001\\\n\\004\\001\\111\\001\\008\\001\\071\\001\\008\\001\\022\\001\\010\\001\\176\\000\\\n\\064\\001\\065\\001\\014\\001\\000\\001\\065\\001\\033\\001\\018\\001\\071\\001\\\n\\084\\001\\037\\001\\093\\001\\066\\001\\073\\001\\010\\001\\006\\001\\027\\001\\\n\\242\\003\\000\\001\\073\\001\\014\\001\\084\\001\\214\\005\\017\\001\\249\\003\\\n\\004\\001\\094\\001\\090\\001\\010\\001\\008\\001\\066\\001\\065\\001\\053\\001\\\n\\027\\001\\055\\001\\014\\001\\015\\001\\091\\001\\092\\001\\018\\001\\094\\001\\\n\\095\\001\\077\\001\\012\\004\\065\\001\\113\\003\\073\\001\\110\\001\\053\\001\\\n\\076\\001\\055\\001\\053\\001\\079\\001\\055\\001\\081\\001\\066\\001\\083\\001\\\n\\064\\001\\065\\001\\113\\001\\065\\001\\000\\001\\073\\001\\065\\001\\055\\001\\\n\\004\\001\\057\\001\\058\\001\\059\\001\\008\\001\\061\\001\\010\\001\\003\\001\\\n\\064\\001\\065\\001\\014\\001\\065\\001\\066\\001\\067\\001\\018\\001\\091\\001\\\n\\092\\001\\055\\001\\094\\001\\095\\001\\112\\001\\059\\001\\066\\001\\027\\001\\\n\\116\\001\\063\\001\\064\\001\\053\\001\\054\\001\\055\\001\\056\\001\\064\\001\\\n\\065\\001\\000\\000\\090\\001\\168\\003\\169\\003\\113\\001\\064\\001\\065\\001\\\n\\078\\001\\097\\001\\134\\001\\135\\001\\000\\001\\064\\001\\053\\001\\003\\001\\\n\\055\\001\\016\\001\\055\\001\\184\\003\\064\\001\\109\\001\\110\\001\\022\\001\\\n\\227\\001\\013\\001\\065\\001\\064\\001\\027\\001\\092\\001\\096\\004\\234\\001\\\n\\197\\003\\053\\001\\013\\001\\055\\001\\160\\001\\073\\001\\026\\001\\109\\001\\\n\\028\\001\\029\\001\\008\\001\\167\\001\\000\\001\\065\\001\\014\\001\\171\\001\\\n\\213\\003\\028\\001\\029\\001\\109\\001\\040\\001\\041\\001\\010\\001\\091\\001\\\n\\092\\001\\095\\001\\094\\001\\095\\001\\184\\001\\185\\001\\041\\001\\065\\001\\\n\\130\\004\\189\\001\\036\\001\\191\\001\\064\\001\\065\\001\\073\\001\\073\\001\\\n\\060\\001\\014\\001\\140\\004\\063\\001\\022\\001\\113\\001\\109\\001\\244\\003\\\n\\068\\001\\060\\001\\206\\001\\090\\001\\063\\001\\000\\001\\074\\001\\022\\001\\\n\\003\\001\\068\\001\\065\\001\\066\\001\\080\\001\\159\\004\\218\\001\\074\\001\\\n\\220\\001\\221\\001\\013\\001\\066\\001\\067\\001\\080\\001\\017\\001\\014\\001\\\n\\092\\001\\064\\001\\065\\001\\022\\001\\096\\001\\210\\001\\211\\001\\026\\001\\\n\\027\\001\\028\\001\\029\\001\\181\\004\\182\\004\\096\\001\\095\\001\\008\\001\\\n\\108\\001\\130\\001\\131\\001\\111\\001\\000\\000\\000\\001\\041\\001\\251\\001\\\n\\037\\004\\108\\001\\095\\001\\055\\001\\111\\001\\004\\001\\023\\001\\059\\001\\\n\\153\\001\\008\\001\\112\\001\\063\\001\\064\\001\\030\\001\\092\\001\\103\\001\\\n\\015\\001\\060\\001\\014\\001\\018\\001\\063\\001\\022\\001\\065\\001\\066\\001\\\n\\067\\001\\068\\001\\078\\001\\221\\004\\027\\001\\223\\004\\073\\001\\074\\001\\\n\\027\\001\\178\\001\\179\\001\\180\\001\\053\\001\\080\\001\\055\\001\\092\\001\\\n\\091\\001\\186\\001\\092\\001\\237\\004\\238\\004\\109\\001\\000\\000\\092\\001\\\n\\065\\001\\092\\001\\094\\001\\094\\001\\055\\001\\096\\001\\057\\001\\058\\001\\\n\\059\\001\\109\\001\\061\\001\\094\\001\\092\\001\\064\\001\\065\\001\\014\\001\\\n\\101\\004\\108\\001\\103\\004\\066\\001\\111\\001\\115\\001\\020\\001\\216\\001\\\n\\115\\001\\109\\001\\115\\001\\064\\001\\065\\001\\046\\001\\081\\001\\109\\001\\\n\\109\\001\\062\\001\\071\\001\\108\\001\\002\\001\\081\\002\\089\\001\\090\\001\\\n\\084\\002\\106\\001\\086\\002\\109\\001\\109\\001\\109\\001\\097\\001\\084\\001\\\n\\073\\001\\073\\001\\100\\001\\244\\001\\137\\004\\090\\001\\027\\001\\109\\001\\\n\\015\\001\\142\\004\\109\\001\\110\\001\\001\\000\\002\\000\\003\\000\\004\\000\\\n\\005\\000\\092\\001\\055\\001\\000\\001\\094\\001\\064\\001\\064\\001\\008\\001\\\n\\109\\001\\110\\001\\065\\001\\109\\001\\040\\001\\014\\002\\001\\001\\002\\001\\\n\\124\\002\\000\\001\\167\\004\\014\\001\\018\\001\\004\\001\\009\\001\\062\\001\\\n\\074\\005\\008\\001\\062\\001\\010\\001\\015\\001\\016\\001\\062\\001\\014\\001\\\n\\092\\001\\027\\001\\142\\002\\064\\001\\144\\002\\094\\001\\146\\002\\079\\001\\\n\\027\\001\\014\\001\\150\\002\\014\\001\\027\\001\\006\\001\\094\\001\\073\\001\\\n\\109\\001\\036\\001\\199\\004\\200\\004\\095\\001\\064\\001\\075\\001\\042\\001\\\n\\043\\001\\044\\001\\045\\001\\046\\001\\073\\001\\111\\005\\022\\001\\094\\001\\\n\\172\\002\\092\\001\\014\\001\\073\\001\\027\\001\\218\\004\\006\\001\\040\\001\\\n\\008\\001\\222\\004\\061\\001\\094\\001\\027\\001\\014\\001\\027\\001\\066\\001\\\n\\021\\001\\086\\001\\064\\001\\062\\001\\071\\001\\072\\001\\194\\002\\062\\001\\\n\\062\\001\\003\\001\\073\\001\\199\\002\\200\\002\\014\\001\\062\\001\\082\\001\\\n\\083\\001\\084\\001\\085\\001\\086\\001\\062\\001\\086\\001\\210\\002\\095\\001\\\n\\212\\002\\027\\001\\090\\001\\073\\001\\091\\001\\092\\001\\003\\005\\094\\001\\\n\\095\\001\\100\\001\\091\\001\\223\\002\\224\\002\\027\\001\\094\\001\\055\\001\\\n\\101\\001\\057\\001\\058\\001\\059\\001\\094\\001\\061\\001\\234\\002\\094\\001\\\n\\064\\001\\065\\001\\113\\001\\132\\002\\088\\001\\241\\002\\027\\001\\185\\005\\\n\\094\\001\\187\\005\\014\\001\\020\\001\\000\\001\\015\\001\\022\\001\\003\\001\\\n\\252\\002\\053\\001\\147\\002\\148\\002\\094\\001\\008\\001\\043\\005\\062\\001\\\n\\080\\001\\013\\001\\090\\001\\062\\001\\092\\001\\062\\001\\051\\005\\094\\001\\\n\\112\\001\\097\\001\\112\\001\\094\\001\\088\\001\\065\\001\\026\\001\\019\\003\\\n\\028\\001\\029\\001\\063\\005\\021\\001\\091\\001\\109\\001\\110\\001\\095\\001\\\n\\094\\001\\014\\001\\014\\001\\014\\001\\181\\002\\041\\001\\014\\001\\027\\001\\\n\\027\\001\\037\\003\\019\\001\\091\\001\\022\\001\\112\\001\\000\\001\\088\\001\\\n\\014\\001\\003\\001\\014\\001\\014\\001\\014\\001\\000\\000\\000\\000\\008\\001\\\n\\060\\001\\092\\001\\065\\001\\013\\001\\092\\001\\036\\001\\109\\001\\017\\001\\\n\\068\\001\\036\\001\\062\\003\\109\\001\\022\\001\\065\\003\\074\\001\\067\\003\\\n\\026\\001\\027\\001\\028\\001\\029\\001\\080\\001\\005\\000\\006\\001\\036\\001\\\n\\008\\001\\064\\001\\078\\003\\092\\001\\092\\001\\090\\001\\082\\003\\041\\001\\\n\\092\\001\\040\\001\\064\\001\\036\\001\\096\\001\\089\\003\\094\\001\\053\\001\\\n\\024\\000\\093\\003\\053\\001\\064\\001\\091\\001\\026\\003\\000\\000\\064\\001\\\n\\108\\001\\064\\001\\060\\001\\111\\001\\064\\001\\063\\001\\036\\003\\065\\001\\\n\\066\\001\\067\\001\\068\\001\\111\\003\\064\\001\\064\\001\\114\\003\\073\\001\\\n\\074\\001\\064\\001\\118\\003\\186\\005\\254\\004\\111\\005\\080\\001\\055\\001\\\n\\187\\002\\057\\001\\058\\001\\059\\001\\120\\003\\061\\001\\171\\005\\026\\002\\\n\\064\\001\\065\\001\\092\\001\\131\\002\\094\\001\\000\\001\\096\\001\\018\\005\\\n\\096\\001\\094\\001\\129\\003\\143\\003\\187\\001\\057\\002\\063\\002\\141\\000\\\n\\183\\001\\166\\004\\108\\001\\212\\003\\155\\004\\111\\001\\195\\005\\196\\005\\\n\\171\\002\\115\\001\\090\\001\\145\\001\\108\\001\\006\\005\\203\\005\\243\\004\\\n\\255\\255\\097\\001\\043\\005\\136\\004\\255\\255\\255\\255\\170\\003\\171\\003\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\109\\001\\110\\001\\220\\005\\\n\\180\\003\\181\\003\\255\\255\\255\\255\\255\\255\\015\\001\\079\\003\\255\\255\\\n\\255\\255\\121\\000\\255\\255\\255\\255\\055\\001\\255\\255\\057\\001\\058\\001\\\n\\059\\001\\197\\003\\061\\001\\255\\255\\255\\255\\064\\001\\065\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\139\\000\\140\\000\\141\\000\\255\\255\\143\\000\\\n\\064\\001\\065\\001\\044\\001\\045\\001\\046\\001\\255\\255\\081\\001\\071\\001\\\n\\255\\255\\153\\000\\154\\000\\255\\255\\255\\255\\077\\001\\089\\001\\090\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\084\\001\\233\\003\\097\\001\\235\\003\\\n\\129\\003\\255\\255\\090\\001\\255\\255\\255\\255\\071\\001\\072\\001\\243\\003\\\n\\176\\000\\177\\000\\109\\001\\110\\001\\180\\000\\255\\255\\255\\255\\255\\255\\\n\\252\\003\\083\\001\\084\\001\\085\\001\\086\\001\\109\\001\\110\\001\\255\\255\\\n\\255\\255\\154\\003\\155\\003\\156\\003\\255\\255\\009\\004\\255\\255\\160\\003\\\n\\255\\255\\255\\255\\100\\001\\255\\255\\255\\255\\166\\003\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\007\\000\\255\\255\\255\\255\\010\\000\\255\\255\\\n\\255\\255\\013\\000\\014\\000\\255\\255\\255\\255\\017\\000\\018\\000\\019\\000\\\n\\020\\000\\021\\000\\255\\255\\023\\000\\189\\003\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\034\\000\\255\\255\\\n\\255\\255\\037\\000\\038\\000\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\061\\004\\046\\000\\047\\000\\064\\004\\255\\255\\050\\000\\051\\000\\\n\\055\\001\\255\\255\\057\\001\\058\\001\\059\\001\\255\\255\\061\\001\\255\\255\\\n\\255\\255\\064\\001\\065\\001\\255\\255\\255\\255\\081\\004\\255\\255\\083\\004\\\n\\255\\255\\085\\004\\255\\255\\087\\004\\088\\004\\255\\255\\255\\255\\255\\255\\\n\\092\\004\\255\\255\\255\\255\\255\\255\\255\\255\\097\\004\\000\\001\\255\\255\\\n\\100\\004\\003\\001\\102\\004\\090\\001\\255\\255\\089\\000\\090\\000\\091\\000\\\n\\255\\255\\093\\000\\097\\001\\013\\001\\014\\001\\255\\255\\255\\255\\017\\001\\\n\\255\\255\\255\\255\\118\\004\\255\\255\\255\\255\\255\\255\\109\\001\\110\\001\\\n\\026\\001\\027\\001\\028\\001\\029\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\013\\001\\137\\004\\040\\001\\041\\001\\\n\\255\\255\\255\\255\\142\\004\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\149\\004\\255\\255\\028\\001\\029\\001\\137\\000\\138\\000\\087\\001\\\n\\255\\255\\255\\255\\060\\001\\255\\255\\255\\255\\063\\001\\255\\255\\255\\255\\\n\\041\\001\\067\\001\\068\\001\\255\\255\\255\\255\\255\\255\\170\\004\\073\\001\\\n\\074\\001\\255\\255\\174\\004\\255\\255\\255\\255\\255\\255\\080\\001\\179\\004\\\n\\255\\255\\255\\255\\255\\255\\060\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\173\\000\\092\\001\\068\\001\\094\\001\\255\\255\\096\\001\\195\\004\\\n\\196\\004\\074\\001\\198\\004\\255\\255\\255\\255\\255\\255\\255\\255\\080\\001\\\n\\255\\255\\255\\255\\108\\001\\000\\001\\255\\255\\111\\001\\255\\255\\255\\255\\\n\\212\\004\\115\\001\\255\\255\\255\\255\\255\\255\\255\\255\\150\\001\\096\\001\\\n\\255\\255\\153\\001\\154\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\108\\001\\255\\255\\255\\255\\111\\001\\235\\004\\\n\\236\\004\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\242\\004\\255\\255\\\n\\255\\255\\255\\255\\178\\001\\179\\001\\180\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\186\\001\\255\\255\\255\\255\\001\\005\\255\\255\\003\\005\\\n\\255\\255\\193\\001\\055\\001\\255\\255\\057\\001\\058\\001\\059\\001\\255\\255\\\n\\061\\001\\253\\000\\254\\000\\064\\001\\065\\001\\017\\005\\255\\255\\207\\001\\\n\\208\\001\\255\\255\\255\\255\\255\\255\\212\\001\\255\\255\\255\\255\\255\\255\\\n\\216\\001\\000\\000\\255\\255\\219\\001\\081\\001\\033\\005\\255\\255\\019\\001\\\n\\255\\255\\255\\255\\038\\005\\227\\001\\089\\001\\090\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\234\\001\\255\\255\\097\\001\\033\\001\\000\\000\\051\\005\\\n\\255\\255\\037\\001\\255\\255\\255\\255\\244\\001\\255\\255\\255\\255\\108\\001\\\n\\109\\001\\110\\001\\255\\255\\255\\255\\255\\255\\253\\001\\255\\255\\255\\255\\\n\\023\\001\\255\\255\\255\\255\\003\\002\\072\\005\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\077\\005\\255\\255\\255\\255\\080\\005\\036\\001\\014\\002\\255\\255\\\n\\255\\255\\017\\002\\255\\255\\087\\005\\255\\255\\255\\255\\255\\255\\091\\005\\\n\\255\\255\\255\\255\\026\\002\\095\\005\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\055\\001\\255\\255\\057\\001\\058\\001\\059\\001\\255\\255\\061\\001\\255\\255\\\n\\255\\255\\064\\001\\065\\001\\255\\255\\112\\005\\255\\255\\255\\255\\005\\000\\\n\\255\\255\\255\\255\\255\\255\\009\\000\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\057\\002\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\024\\000\\090\\001\\255\\255\\137\\005\\138\\005\\255\\255\\\n\\255\\255\\255\\255\\097\\001\\143\\005\\255\\255\\255\\255\\255\\255\\147\\005\\\n\\255\\255\\255\\255\\134\\001\\255\\255\\042\\000\\153\\005\\109\\001\\110\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\161\\005\\162\\005\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\167\\005\\168\\005\\169\\005\\170\\005\\255\\255\\\n\\005\\000\\006\\001\\007\\001\\255\\255\\255\\255\\015\\001\\011\\001\\012\\001\\\n\\180\\005\\181\\005\\255\\255\\167\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\078\\000\\255\\255\\080\\000\\081\\000\\255\\255\\193\\005\\194\\005\\255\\255\\\n\\196\\005\\030\\001\\031\\001\\131\\002\\132\\002\\255\\255\\255\\255\\255\\255\\\n\\204\\005\\043\\001\\044\\001\\045\\001\\046\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\147\\002\\148\\002\\050\\001\\218\\005\\000\\000\\\n\\053\\001\\054\\001\\055\\001\\056\\001\\224\\005\\225\\005\\059\\001\\255\\255\\\n\\066\\001\\255\\255\\255\\255\\064\\001\\065\\001\\071\\001\\072\\001\\255\\255\\\n\\255\\255\\255\\255\\170\\002\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\083\\001\\084\\001\\085\\001\\086\\001\\181\\002\\140\\000\\141\\000\\\n\\255\\255\\143\\000\\087\\001\\255\\255\\255\\255\\255\\255\\190\\002\\255\\255\\\n\\192\\002\\255\\255\\100\\001\\153\\000\\154\\000\\255\\255\\255\\255\\251\\001\\\n\\101\\001\\255\\255\\202\\002\\255\\255\\255\\255\\106\\001\\255\\255\\165\\000\\\n\\109\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\000\\001\\176\\000\\177\\000\\003\\001\\255\\255\\255\\255\\181\\000\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\013\\001\\231\\002\\\n\\255\\255\\233\\002\\017\\001\\255\\255\\255\\255\\255\\255\\000\\001\\140\\000\\\n\\141\\000\\003\\001\\143\\000\\026\\001\\027\\001\\028\\001\\029\\001\\255\\255\\\n\\248\\002\\255\\255\\255\\255\\013\\001\\153\\000\\154\\000\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\041\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\026\\001\\255\\255\\028\\001\\029\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\175\\000\\176\\000\\177\\000\\060\\001\\040\\001\\041\\001\\\n\\063\\001\\255\\255\\255\\255\\066\\001\\067\\001\\068\\001\\255\\255\\255\\255\\\n\\255\\255\\005\\000\\073\\001\\074\\001\\255\\255\\009\\000\\255\\255\\255\\255\\\n\\255\\255\\080\\001\\060\\001\\255\\255\\255\\255\\063\\001\\004\\001\\047\\003\\\n\\255\\255\\067\\001\\068\\001\\255\\255\\024\\000\\092\\001\\255\\255\\094\\001\\\n\\074\\001\\096\\001\\255\\255\\255\\255\\255\\255\\255\\255\\080\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\025\\001\\068\\003\\108\\001\\042\\000\\255\\255\\\n\\111\\001\\255\\255\\092\\001\\255\\255\\115\\001\\255\\255\\096\\001\\079\\003\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\108\\001\\255\\255\\255\\255\\111\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\078\\000\\255\\255\\080\\000\\081\\000\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\129\\003\\255\\255\\089\\001\\090\\001\\255\\255\\255\\255\\093\\001\\\n\\255\\255\\000\\000\\096\\001\\255\\255\\112\\000\\042\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\048\\001\\255\\255\\149\\003\\255\\255\\000\\001\\\n\\152\\003\\255\\255\\154\\003\\155\\003\\156\\003\\255\\255\\255\\255\\008\\001\\\n\\160\\003\\255\\255\\255\\255\\255\\255\\013\\001\\255\\255\\166\\003\\255\\255\\\n\\140\\000\\141\\000\\255\\255\\143\\000\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\026\\001\\255\\255\\028\\001\\029\\001\\153\\000\\154\\000\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\189\\003\\255\\255\\255\\255\\\n\\041\\001\\165\\000\\255\\255\\153\\001\\154\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\176\\000\\177\\000\\255\\255\\207\\003\\\n\\000\\001\\181\\000\\255\\255\\060\\001\\255\\255\\255\\255\\063\\001\\255\\255\\\n\\255\\255\\066\\001\\067\\001\\068\\001\\178\\001\\179\\001\\180\\001\\019\\003\\\n\\255\\255\\074\\001\\255\\255\\255\\255\\186\\001\\255\\255\\255\\255\\080\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\236\\003\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\092\\001\\255\\255\\255\\255\\255\\255\\096\\001\\\n\\255\\255\\207\\001\\208\\001\\255\\255\\153\\001\\154\\001\\212\\001\\255\\255\\\n\\255\\255\\255\\255\\216\\001\\108\\001\\255\\255\\255\\255\\111\\001\\055\\001\\\n\\255\\255\\057\\001\\058\\001\\059\\001\\255\\255\\061\\001\\255\\255\\255\\255\\\n\\064\\001\\065\\001\\255\\255\\255\\255\\177\\001\\178\\001\\179\\001\\180\\001\\\n\\255\\255\\008\\001\\255\\255\\255\\255\\255\\255\\186\\001\\244\\001\\255\\255\\\n\\004\\001\\081\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\253\\001\\\n\\023\\001\\089\\001\\090\\001\\000\\000\\044\\004\\255\\255\\255\\255\\030\\001\\\n\\255\\255\\097\\001\\207\\001\\208\\001\\255\\255\\025\\001\\255\\255\\212\\001\\\n\\014\\002\\255\\255\\058\\004\\216\\001\\255\\255\\109\\001\\110\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\026\\002\\226\\001\\255\\255\\255\\255\\\n\\055\\001\\000\\000\\057\\001\\058\\001\\059\\001\\255\\255\\061\\001\\255\\255\\\n\\255\\255\\064\\001\\065\\001\\255\\255\\255\\255\\255\\255\\255\\255\\244\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\095\\004\\\n\\253\\001\\255\\255\\081\\001\\057\\002\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\088\\001\\089\\001\\090\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\014\\002\\097\\001\\255\\255\\255\\255\\089\\001\\090\\001\\255\\255\\\n\\255\\255\\093\\001\\005\\000\\106\\001\\096\\001\\255\\255\\109\\001\\110\\001\\\n\\255\\255\\255\\255\\255\\255\\131\\004\\255\\255\\255\\255\\255\\255\\135\\004\\\n\\255\\255\\000\\001\\255\\255\\255\\255\\003\\001\\255\\255\\114\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\121\\001\\013\\001\\123\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\057\\002\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\026\\001\\255\\255\\028\\001\\029\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\132\\002\\255\\255\\\n\\176\\004\\040\\001\\041\\001\\255\\255\\255\\255\\153\\001\\154\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\147\\002\\148\\002\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\060\\001\\255\\255\\255\\255\\\n\\063\\001\\255\\255\\255\\255\\255\\255\\067\\001\\068\\001\\178\\001\\179\\001\\\n\\180\\001\\255\\255\\255\\255\\074\\001\\170\\002\\255\\255\\186\\001\\255\\255\\\n\\255\\255\\080\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\181\\002\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\092\\001\\255\\255\\132\\002\\\n\\190\\002\\096\\001\\192\\002\\207\\001\\208\\001\\255\\255\\255\\255\\255\\255\\\n\\212\\001\\255\\255\\255\\255\\255\\255\\216\\001\\108\\001\\147\\002\\148\\002\\\n\\111\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\140\\000\\141\\000\\255\\255\\143\\000\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\153\\000\\154\\000\\\n\\244\\001\\255\\255\\018\\005\\233\\002\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\181\\002\\253\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\190\\002\\255\\255\\192\\002\\255\\255\\176\\000\\177\\000\\255\\255\\\n\\255\\255\\255\\255\\014\\002\\000\\001\\255\\255\\002\\001\\003\\001\\004\\001\\\n\\255\\255\\255\\255\\255\\255\\008\\001\\255\\255\\255\\255\\026\\002\\255\\255\\\n\\013\\001\\255\\255\\255\\255\\255\\255\\017\\001\\018\\001\\019\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\026\\001\\027\\001\\028\\001\\\n\\029\\001\\000\\001\\255\\255\\255\\255\\233\\002\\255\\255\\255\\255\\036\\001\\\n\\255\\255\\255\\255\\255\\255\\083\\005\\041\\001\\057\\002\\013\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\048\\001\\049\\001\\000\\000\\094\\005\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\026\\001\\255\\255\\028\\001\\029\\001\\060\\001\\\n\\255\\255\\255\\255\\063\\001\\064\\001\\255\\255\\066\\001\\067\\001\\068\\001\\\n\\255\\255\\070\\001\\041\\001\\255\\255\\073\\001\\074\\001\\255\\255\\255\\255\\\n\\255\\255\\079\\003\\255\\255\\080\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\101\\002\\255\\255\\255\\255\\255\\255\\060\\001\\091\\001\\092\\001\\\n\\136\\005\\094\\001\\095\\001\\096\\001\\097\\001\\068\\001\\142\\005\\100\\001\\\n\\255\\255\\255\\255\\255\\255\\074\\001\\255\\255\\255\\255\\255\\255\\108\\001\\\n\\109\\001\\080\\001\\111\\001\\255\\255\\255\\255\\255\\255\\115\\001\\255\\255\\\n\\132\\002\\255\\255\\255\\255\\076\\001\\255\\255\\092\\001\\079\\001\\255\\255\\\n\\081\\001\\096\\001\\083\\001\\129\\003\\255\\255\\255\\255\\255\\255\\147\\002\\\n\\148\\002\\255\\255\\079\\003\\255\\255\\255\\255\\108\\001\\255\\255\\255\\255\\\n\\111\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\149\\003\\\n\\255\\255\\255\\255\\152\\003\\255\\255\\154\\003\\155\\003\\156\\003\\112\\001\\\n\\026\\000\\027\\000\\160\\003\\116\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\166\\003\\181\\002\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\190\\002\\255\\255\\192\\002\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\129\\003\\255\\255\\255\\255\\189\\003\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\149\\003\\255\\255\\255\\255\\152\\003\\153\\003\\154\\003\\155\\003\\156\\003\\\n\\082\\000\\083\\000\\255\\255\\160\\003\\255\\255\\233\\002\\255\\255\\000\\001\\\n\\255\\255\\166\\003\\255\\255\\255\\255\\255\\255\\006\\001\\153\\001\\154\\001\\\n\\255\\255\\255\\255\\000\\000\\012\\001\\189\\001\\255\\255\\191\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\189\\003\\255\\255\\255\\255\\028\\001\\255\\255\\030\\001\\031\\001\\178\\001\\\n\\179\\001\\180\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\186\\001\\\n\\187\\001\\218\\001\\255\\255\\220\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\050\\001\\255\\255\\052\\001\\053\\001\\255\\255\\055\\001\\056\\001\\\n\\255\\255\\255\\255\\059\\001\\255\\255\\207\\001\\208\\001\\255\\255\\064\\001\\\n\\065\\001\\212\\001\\255\\255\\255\\255\\255\\255\\216\\001\\071\\001\\255\\255\\\n\\052\\003\\255\\255\\255\\255\\255\\255\\255\\255\\057\\003\\044\\004\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\084\\001\\255\\255\\000\\001\\255\\255\\255\\255\\\n\\003\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\097\\001\\244\\001\\013\\001\\079\\003\\101\\001\\255\\255\\017\\001\\255\\255\\\n\\255\\255\\106\\001\\253\\001\\255\\255\\109\\001\\110\\001\\255\\255\\026\\001\\\n\\027\\001\\028\\001\\029\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\014\\002\\255\\255\\255\\255\\041\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\000\\000\\255\\255\\255\\255\\255\\255\\044\\004\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\060\\001\\255\\255\\255\\255\\063\\001\\129\\003\\255\\255\\066\\001\\\n\\067\\001\\068\\001\\255\\255\\255\\255\\255\\255\\255\\255\\073\\001\\074\\001\\\n\\081\\002\\255\\255\\255\\255\\084\\002\\255\\255\\080\\001\\057\\002\\255\\255\\\n\\255\\255\\149\\003\\255\\255\\255\\255\\152\\003\\255\\255\\154\\003\\155\\003\\\n\\156\\003\\092\\001\\255\\255\\094\\001\\160\\003\\096\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\166\\003\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\108\\001\\255\\255\\255\\255\\111\\001\\255\\255\\255\\255\\255\\255\\\n\\115\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\189\\003\\176\\004\\255\\255\\255\\255\\255\\255\\255\\255\\049\\001\\\n\\050\\001\\051\\001\\052\\001\\053\\001\\054\\001\\055\\001\\056\\001\\057\\001\\\n\\058\\001\\059\\001\\060\\001\\061\\001\\062\\001\\063\\001\\064\\001\\065\\001\\\n\\066\\001\\067\\001\\068\\001\\069\\001\\255\\255\\071\\001\\255\\255\\255\\255\\\n\\255\\255\\132\\002\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\172\\002\\086\\001\\255\\255\\000\\000\\255\\255\\\n\\147\\002\\148\\002\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\099\\001\\000\\001\\176\\004\\002\\001\\003\\001\\004\\001\\255\\255\\\n\\255\\255\\255\\255\\008\\001\\255\\003\\255\\255\\255\\255\\199\\002\\013\\001\\\n\\255\\255\\255\\255\\255\\255\\017\\001\\018\\001\\019\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\181\\002\\255\\255\\026\\001\\027\\001\\028\\001\\029\\001\\\n\\255\\255\\255\\255\\255\\255\\190\\002\\255\\255\\192\\002\\036\\001\\255\\255\\\n\\255\\255\\255\\255\\040\\001\\041\\001\\018\\005\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\048\\001\\049\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\044\\004\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\060\\001\\255\\255\\\n\\255\\255\\063\\001\\255\\255\\255\\255\\066\\001\\067\\001\\068\\001\\255\\255\\\n\\070\\001\\255\\255\\255\\255\\073\\001\\074\\001\\255\\255\\233\\002\\255\\255\\\n\\000\\000\\255\\255\\080\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\091\\001\\092\\001\\255\\255\\\n\\094\\001\\095\\001\\096\\001\\255\\255\\255\\255\\018\\005\\100\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\083\\005\\108\\001\\255\\255\\\n\\255\\255\\111\\001\\255\\255\\255\\255\\255\\255\\115\\001\\255\\255\\255\\255\\\n\\094\\005\\255\\255\\255\\255\\000\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\008\\001\\255\\255\\062\\003\\255\\255\\255\\255\\\n\\013\\001\\235\\001\\255\\255\\255\\255\\255\\255\\255\\255\\240\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\026\\001\\255\\255\\028\\001\\\n\\029\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\136\\005\\255\\255\\041\\001\\255\\255\\083\\005\\053\\001\\\n\\255\\255\\055\\001\\255\\255\\057\\001\\058\\001\\059\\001\\255\\255\\061\\001\\\n\\255\\255\\255\\255\\064\\001\\065\\001\\079\\003\\255\\255\\111\\003\\060\\001\\\n\\255\\255\\255\\255\\028\\002\\029\\002\\176\\004\\066\\001\\067\\001\\068\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\074\\001\\000\\000\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\080\\001\\090\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\097\\001\\255\\255\\255\\255\\143\\003\\092\\001\\\n\\255\\255\\255\\255\\255\\255\\096\\001\\208\\004\\063\\002\\255\\255\\109\\001\\\n\\110\\001\\255\\255\\068\\002\\069\\002\\070\\002\\255\\255\\129\\003\\108\\001\\\n\\255\\255\\255\\255\\111\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\000\\001\\255\\255\\\n\\002\\001\\003\\001\\149\\003\\180\\003\\181\\003\\152\\003\\008\\001\\154\\003\\\n\\155\\003\\156\\003\\255\\255\\013\\001\\255\\255\\160\\003\\255\\255\\017\\001\\\n\\018\\001\\019\\001\\255\\255\\166\\003\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\026\\001\\027\\001\\028\\001\\029\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\036\\001\\255\\255\\255\\255\\255\\255\\018\\005\\041\\001\\\n\\255\\255\\255\\255\\189\\003\\255\\255\\255\\255\\255\\255\\048\\001\\049\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\233\\003\\255\\255\\060\\001\\255\\255\\255\\255\\063\\001\\255\\255\\255\\255\\\n\\066\\001\\067\\001\\068\\001\\255\\255\\070\\001\\159\\002\\160\\002\\161\\002\\\n\\074\\001\\255\\255\\255\\255\\252\\003\\255\\255\\255\\255\\080\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\000\\000\\\n\\000\\001\\091\\001\\092\\001\\003\\001\\094\\001\\095\\001\\096\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\023\\001\\255\\255\\013\\001\\255\\255\\083\\005\\\n\\255\\255\\017\\001\\108\\001\\197\\002\\255\\255\\111\\001\\255\\255\\255\\255\\\n\\036\\001\\115\\001\\026\\001\\027\\001\\028\\001\\029\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\213\\002\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\041\\001\\255\\255\\055\\001\\255\\255\\057\\001\\058\\001\\059\\001\\\n\\255\\255\\061\\001\\255\\255\\255\\255\\064\\001\\065\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\060\\001\\255\\255\\255\\255\\063\\001\\\n\\255\\255\\044\\004\\066\\001\\067\\001\\068\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\073\\001\\074\\001\\255\\255\\255\\255\\255\\255\\090\\001\\255\\255\\\n\\080\\001\\255\\255\\255\\255\\000\\000\\255\\255\\097\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\100\\004\\092\\001\\102\\004\\094\\001\\255\\255\\\n\\096\\001\\109\\001\\110\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\108\\001\\255\\255\\032\\003\\111\\001\\\n\\255\\255\\255\\255\\255\\255\\115\\001\\255\\255\\255\\255\\000\\001\\001\\001\\\n\\002\\001\\003\\001\\255\\255\\255\\255\\006\\001\\007\\001\\008\\001\\009\\001\\\n\\010\\001\\011\\001\\012\\001\\013\\001\\014\\001\\015\\001\\016\\001\\017\\001\\\n\\018\\001\\019\\001\\020\\001\\021\\001\\149\\004\\255\\255\\024\\001\\025\\001\\\n\\026\\001\\027\\001\\028\\001\\029\\001\\030\\001\\031\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\036\\001\\037\\001\\255\\255\\255\\255\\040\\001\\041\\001\\\n\\042\\001\\043\\001\\044\\001\\045\\001\\046\\001\\047\\001\\255\\255\\049\\001\\\n\\050\\001\\051\\001\\255\\255\\053\\001\\054\\001\\055\\001\\056\\001\\255\\255\\\n\\255\\255\\059\\001\\060\\001\\061\\001\\255\\255\\063\\001\\064\\001\\065\\001\\\n\\066\\001\\067\\001\\068\\001\\255\\255\\070\\001\\071\\001\\072\\001\\073\\001\\\n\\074\\001\\255\\255\\255\\255\\255\\255\\255\\255\\176\\004\\080\\001\\081\\001\\\n\\082\\001\\083\\001\\084\\001\\085\\001\\086\\001\\087\\001\\255\\255\\089\\001\\\n\\000\\000\\091\\001\\092\\001\\133\\003\\094\\001\\095\\001\\096\\001\\097\\001\\\n\\098\\001\\255\\255\\100\\001\\101\\001\\255\\255\\103\\001\\104\\001\\105\\001\\\n\\106\\001\\255\\255\\108\\001\\109\\001\\255\\255\\111\\001\\255\\255\\255\\255\\\n\\255\\255\\115\\001\\255\\255\\055\\001\\255\\255\\057\\001\\058\\001\\059\\001\\\n\\255\\255\\061\\001\\255\\255\\255\\255\\064\\001\\065\\001\\255\\255\\255\\255\\\n\\001\\005\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\074\\001\\000\\001\\\n\\255\\255\\002\\001\\003\\001\\004\\001\\255\\255\\081\\001\\255\\255\\008\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\013\\001\\089\\001\\090\\001\\255\\255\\\n\\017\\001\\018\\001\\019\\001\\255\\255\\255\\255\\097\\001\\255\\255\\255\\255\\\n\\255\\255\\026\\001\\027\\001\\028\\001\\029\\001\\038\\005\\255\\255\\255\\255\\\n\\255\\255\\109\\001\\110\\001\\036\\001\\255\\255\\255\\255\\255\\255\\018\\005\\\n\\041\\001\\255\\255\\220\\003\\221\\003\\222\\003\\255\\255\\255\\255\\048\\001\\\n\\049\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\060\\001\\255\\255\\255\\255\\063\\001\\072\\005\\\n\\255\\255\\066\\001\\067\\001\\068\\001\\255\\255\\070\\001\\255\\255\\255\\255\\\n\\073\\001\\074\\001\\255\\255\\255\\255\\255\\255\\255\\255\\087\\005\\080\\001\\\n\\255\\255\\023\\001\\255\\255\\000\\001\\255\\255\\255\\255\\095\\005\\255\\255\\\n\\255\\255\\255\\255\\091\\001\\092\\001\\255\\255\\094\\001\\095\\001\\096\\001\\\n\\013\\001\\000\\000\\020\\004\\021\\004\\022\\004\\255\\255\\255\\255\\112\\005\\\n\\083\\005\\255\\255\\255\\255\\108\\001\\255\\255\\026\\001\\111\\001\\028\\001\\\n\\029\\001\\055\\001\\115\\001\\057\\001\\058\\001\\059\\001\\255\\255\\061\\001\\\n\\255\\255\\255\\255\\064\\001\\065\\001\\041\\001\\255\\255\\255\\255\\255\\255\\\n\\137\\005\\138\\005\\055\\001\\255\\255\\057\\001\\058\\001\\059\\001\\057\\004\\\n\\061\\001\\255\\255\\147\\005\\064\\001\\065\\001\\255\\255\\255\\255\\060\\001\\\n\\255\\255\\255\\255\\063\\001\\255\\255\\090\\001\\066\\001\\067\\001\\068\\001\\\n\\255\\255\\162\\005\\255\\255\\097\\001\\081\\001\\074\\001\\167\\005\\168\\005\\\n\\169\\005\\170\\005\\255\\255\\080\\001\\089\\001\\090\\001\\255\\255\\109\\001\\\n\\110\\001\\255\\255\\255\\255\\255\\255\\097\\001\\255\\255\\255\\255\\092\\001\\\n\\255\\255\\255\\255\\255\\255\\096\\001\\255\\255\\255\\255\\255\\255\\108\\001\\\n\\109\\001\\110\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\108\\001\\\n\\255\\255\\255\\255\\111\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\122\\004\\123\\004\\255\\255\\255\\255\\255\\255\\127\\004\\128\\004\\129\\004\\\n\\000\\001\\001\\001\\002\\001\\003\\001\\255\\255\\000\\000\\006\\001\\007\\001\\\n\\008\\001\\009\\001\\010\\001\\011\\001\\012\\001\\013\\001\\014\\001\\015\\001\\\n\\016\\001\\017\\001\\018\\001\\019\\001\\020\\001\\021\\001\\022\\001\\255\\255\\\n\\024\\001\\025\\001\\026\\001\\027\\001\\028\\001\\029\\001\\030\\001\\031\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\036\\001\\037\\001\\255\\255\\255\\255\\\n\\040\\001\\041\\001\\042\\001\\043\\001\\044\\001\\045\\001\\046\\001\\047\\001\\\n\\255\\255\\049\\001\\050\\001\\051\\001\\255\\255\\053\\001\\054\\001\\055\\001\\\n\\056\\001\\255\\255\\255\\255\\059\\001\\060\\001\\061\\001\\062\\001\\063\\001\\\n\\064\\001\\065\\001\\066\\001\\067\\001\\068\\001\\255\\255\\070\\001\\071\\001\\\n\\072\\001\\073\\001\\074\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\080\\001\\081\\001\\082\\001\\083\\001\\084\\001\\085\\001\\086\\001\\087\\001\\\n\\255\\255\\089\\001\\255\\255\\091\\001\\092\\001\\255\\255\\094\\001\\095\\001\\\n\\096\\001\\097\\001\\098\\001\\255\\255\\100\\001\\101\\001\\255\\255\\103\\001\\\n\\104\\001\\105\\001\\106\\001\\255\\255\\108\\001\\109\\001\\255\\255\\111\\001\\\n\\255\\255\\255\\255\\255\\255\\115\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\000\\000\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\000\\001\\001\\001\\002\\001\\003\\001\\255\\255\\255\\255\\006\\001\\\n\\007\\001\\008\\001\\009\\001\\010\\001\\011\\001\\012\\001\\013\\001\\014\\001\\\n\\015\\001\\016\\001\\017\\001\\018\\001\\019\\001\\020\\001\\021\\001\\022\\001\\\n\\255\\255\\024\\001\\025\\001\\026\\001\\027\\001\\028\\001\\029\\001\\030\\001\\\n\\031\\001\\255\\255\\052\\005\\053\\005\\054\\005\\036\\001\\037\\001\\255\\255\\\n\\255\\255\\040\\001\\041\\001\\042\\001\\043\\001\\044\\001\\045\\001\\046\\001\\\n\\047\\001\\255\\255\\049\\001\\050\\001\\051\\001\\255\\255\\053\\001\\054\\001\\\n\\055\\001\\056\\001\\255\\255\\255\\255\\059\\001\\060\\001\\061\\001\\255\\255\\\n\\063\\001\\064\\001\\065\\001\\066\\001\\067\\001\\068\\001\\255\\255\\070\\001\\\n\\071\\001\\072\\001\\073\\001\\074\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\080\\001\\081\\001\\082\\001\\083\\001\\084\\001\\085\\001\\086\\001\\\n\\087\\001\\255\\255\\089\\001\\255\\255\\091\\001\\092\\001\\000\\000\\094\\001\\\n\\095\\001\\096\\001\\097\\001\\098\\001\\255\\255\\100\\001\\101\\001\\255\\255\\\n\\103\\001\\104\\001\\105\\001\\106\\001\\255\\255\\108\\001\\109\\001\\255\\255\\\n\\111\\001\\255\\255\\255\\255\\255\\255\\115\\001\\000\\001\\001\\001\\002\\001\\\n\\003\\001\\255\\255\\255\\255\\006\\001\\007\\001\\008\\001\\009\\001\\010\\001\\\n\\011\\001\\012\\001\\013\\001\\014\\001\\015\\001\\016\\001\\017\\001\\018\\001\\\n\\019\\001\\020\\001\\021\\001\\022\\001\\255\\255\\024\\001\\025\\001\\026\\001\\\n\\027\\001\\028\\001\\029\\001\\030\\001\\031\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\036\\001\\037\\001\\255\\255\\255\\255\\040\\001\\041\\001\\042\\001\\\n\\043\\001\\044\\001\\045\\001\\046\\001\\047\\001\\255\\255\\049\\001\\050\\001\\\n\\051\\001\\255\\255\\053\\001\\054\\001\\055\\001\\056\\001\\255\\255\\255\\255\\\n\\059\\001\\060\\001\\061\\001\\255\\255\\063\\001\\064\\001\\065\\001\\066\\001\\\n\\067\\001\\068\\001\\255\\255\\070\\001\\071\\001\\072\\001\\073\\001\\074\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\080\\001\\081\\001\\082\\001\\\n\\083\\001\\084\\001\\085\\001\\086\\001\\087\\001\\255\\255\\089\\001\\255\\255\\\n\\091\\001\\092\\001\\000\\000\\094\\001\\095\\001\\096\\001\\097\\001\\098\\001\\\n\\255\\255\\100\\001\\101\\001\\255\\255\\103\\001\\104\\001\\105\\001\\106\\001\\\n\\255\\255\\108\\001\\109\\001\\255\\255\\111\\001\\255\\255\\255\\255\\255\\255\\\n\\115\\001\\000\\001\\001\\001\\002\\001\\003\\001\\255\\255\\255\\255\\006\\001\\\n\\007\\001\\008\\001\\009\\001\\010\\001\\011\\001\\012\\001\\013\\001\\014\\001\\\n\\015\\001\\016\\001\\017\\001\\018\\001\\019\\001\\020\\001\\021\\001\\022\\001\\\n\\255\\255\\024\\001\\025\\001\\026\\001\\027\\001\\028\\001\\029\\001\\030\\001\\\n\\031\\001\\255\\255\\255\\255\\255\\255\\255\\255\\036\\001\\037\\001\\255\\255\\\n\\255\\255\\040\\001\\041\\001\\042\\001\\043\\001\\044\\001\\045\\001\\046\\001\\\n\\047\\001\\255\\255\\049\\001\\050\\001\\051\\001\\255\\255\\053\\001\\054\\001\\\n\\055\\001\\056\\001\\255\\255\\255\\255\\059\\001\\060\\001\\061\\001\\255\\255\\\n\\063\\001\\064\\001\\065\\001\\066\\001\\067\\001\\068\\001\\255\\255\\070\\001\\\n\\071\\001\\072\\001\\073\\001\\074\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\080\\001\\081\\001\\082\\001\\083\\001\\084\\001\\085\\001\\086\\001\\\n\\087\\001\\255\\255\\089\\001\\255\\255\\091\\001\\092\\001\\000\\000\\094\\001\\\n\\095\\001\\096\\001\\097\\001\\098\\001\\255\\255\\100\\001\\101\\001\\255\\255\\\n\\103\\001\\104\\001\\105\\001\\106\\001\\255\\255\\108\\001\\109\\001\\255\\255\\\n\\111\\001\\255\\255\\255\\255\\255\\255\\115\\001\\255\\255\\000\\001\\001\\001\\\n\\002\\001\\003\\001\\255\\255\\255\\255\\006\\001\\007\\001\\008\\001\\009\\001\\\n\\010\\001\\011\\001\\012\\001\\013\\001\\014\\001\\015\\001\\016\\001\\017\\001\\\n\\018\\001\\019\\001\\020\\001\\021\\001\\022\\001\\255\\255\\024\\001\\025\\001\\\n\\026\\001\\027\\001\\028\\001\\029\\001\\030\\001\\031\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\036\\001\\037\\001\\255\\255\\255\\255\\040\\001\\041\\001\\\n\\042\\001\\043\\001\\044\\001\\045\\001\\046\\001\\047\\001\\255\\255\\049\\001\\\n\\050\\001\\051\\001\\255\\255\\053\\001\\054\\001\\055\\001\\056\\001\\255\\255\\\n\\255\\255\\059\\001\\060\\001\\061\\001\\255\\255\\063\\001\\064\\001\\065\\001\\\n\\066\\001\\067\\001\\068\\001\\255\\255\\070\\001\\071\\001\\072\\001\\073\\001\\\n\\074\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\080\\001\\081\\001\\\n\\082\\001\\083\\001\\084\\001\\085\\001\\086\\001\\087\\001\\255\\255\\089\\001\\\n\\255\\255\\091\\001\\092\\001\\000\\000\\094\\001\\095\\001\\096\\001\\097\\001\\\n\\098\\001\\255\\255\\100\\001\\101\\001\\255\\255\\103\\001\\104\\001\\105\\001\\\n\\106\\001\\255\\255\\108\\001\\109\\001\\255\\255\\111\\001\\255\\255\\255\\255\\\n\\255\\255\\115\\001\\000\\001\\001\\001\\002\\001\\003\\001\\255\\255\\255\\255\\\n\\006\\001\\007\\001\\008\\001\\009\\001\\010\\001\\011\\001\\012\\001\\013\\001\\\n\\014\\001\\015\\001\\016\\001\\017\\001\\018\\001\\019\\001\\020\\001\\021\\001\\\n\\022\\001\\255\\255\\024\\001\\025\\001\\026\\001\\027\\001\\028\\001\\029\\001\\\n\\030\\001\\031\\001\\255\\255\\255\\255\\255\\255\\255\\255\\036\\001\\037\\001\\\n\\255\\255\\255\\255\\040\\001\\041\\001\\042\\001\\043\\001\\044\\001\\045\\001\\\n\\046\\001\\047\\001\\255\\255\\049\\001\\050\\001\\051\\001\\255\\255\\053\\001\\\n\\054\\001\\055\\001\\056\\001\\255\\255\\255\\255\\059\\001\\060\\001\\061\\001\\\n\\255\\255\\063\\001\\064\\001\\065\\001\\066\\001\\067\\001\\068\\001\\255\\255\\\n\\070\\001\\071\\001\\072\\001\\073\\001\\074\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\080\\001\\081\\001\\082\\001\\083\\001\\084\\001\\085\\001\\\n\\086\\001\\087\\001\\255\\255\\089\\001\\255\\255\\091\\001\\092\\001\\000\\000\\\n\\094\\001\\095\\001\\096\\001\\097\\001\\098\\001\\255\\255\\100\\001\\101\\001\\\n\\255\\255\\103\\001\\104\\001\\105\\001\\106\\001\\255\\255\\108\\001\\109\\001\\\n\\255\\255\\111\\001\\255\\255\\255\\255\\255\\255\\115\\001\\000\\001\\001\\001\\\n\\002\\001\\003\\001\\255\\255\\255\\255\\006\\001\\007\\001\\008\\001\\009\\001\\\n\\010\\001\\011\\001\\012\\001\\013\\001\\014\\001\\015\\001\\016\\001\\017\\001\\\n\\018\\001\\019\\001\\020\\001\\021\\001\\022\\001\\255\\255\\024\\001\\025\\001\\\n\\026\\001\\027\\001\\028\\001\\029\\001\\030\\001\\031\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\036\\001\\037\\001\\255\\255\\255\\255\\040\\001\\041\\001\\\n\\042\\001\\043\\001\\044\\001\\045\\001\\046\\001\\047\\001\\255\\255\\049\\001\\\n\\050\\001\\051\\001\\255\\255\\053\\001\\054\\001\\055\\001\\056\\001\\255\\255\\\n\\255\\255\\059\\001\\060\\001\\061\\001\\255\\255\\063\\001\\064\\001\\065\\001\\\n\\066\\001\\067\\001\\068\\001\\255\\255\\070\\001\\071\\001\\072\\001\\073\\001\\\n\\074\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\080\\001\\081\\001\\\n\\082\\001\\083\\001\\084\\001\\085\\001\\086\\001\\087\\001\\255\\255\\089\\001\\\n\\255\\255\\091\\001\\092\\001\\000\\000\\094\\001\\095\\001\\096\\001\\097\\001\\\n\\098\\001\\255\\255\\100\\001\\101\\001\\255\\255\\103\\001\\104\\001\\105\\001\\\n\\106\\001\\255\\255\\108\\001\\109\\001\\255\\255\\111\\001\\255\\255\\255\\255\\\n\\255\\255\\115\\001\\255\\255\\000\\001\\001\\001\\002\\001\\003\\001\\255\\255\\\n\\255\\255\\006\\001\\007\\001\\008\\001\\009\\001\\010\\001\\011\\001\\012\\001\\\n\\013\\001\\014\\001\\015\\001\\016\\001\\017\\001\\018\\001\\019\\001\\020\\001\\\n\\021\\001\\022\\001\\255\\255\\024\\001\\025\\001\\026\\001\\027\\001\\028\\001\\\n\\029\\001\\030\\001\\031\\001\\255\\255\\255\\255\\255\\255\\255\\255\\036\\001\\\n\\037\\001\\255\\255\\255\\255\\040\\001\\041\\001\\042\\001\\043\\001\\044\\001\\\n\\045\\001\\046\\001\\047\\001\\255\\255\\049\\001\\050\\001\\051\\001\\255\\255\\\n\\053\\001\\054\\001\\055\\001\\056\\001\\255\\255\\255\\255\\059\\001\\060\\001\\\n\\061\\001\\255\\255\\063\\001\\064\\001\\065\\001\\066\\001\\067\\001\\068\\001\\\n\\255\\255\\070\\001\\071\\001\\072\\001\\073\\001\\074\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\080\\001\\081\\001\\082\\001\\083\\001\\084\\001\\\n\\085\\001\\086\\001\\087\\001\\255\\255\\089\\001\\255\\255\\091\\001\\092\\001\\\n\\000\\000\\094\\001\\095\\001\\096\\001\\097\\001\\098\\001\\255\\255\\100\\001\\\n\\101\\001\\255\\255\\103\\001\\104\\001\\105\\001\\106\\001\\255\\255\\108\\001\\\n\\109\\001\\255\\255\\111\\001\\255\\255\\255\\255\\255\\255\\115\\001\\000\\001\\\n\\001\\001\\002\\001\\003\\001\\255\\255\\255\\255\\006\\001\\007\\001\\008\\001\\\n\\009\\001\\010\\001\\011\\001\\012\\001\\013\\001\\014\\001\\015\\001\\016\\001\\\n\\017\\001\\018\\001\\019\\001\\020\\001\\021\\001\\022\\001\\255\\255\\024\\001\\\n\\025\\001\\026\\001\\027\\001\\028\\001\\029\\001\\030\\001\\031\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\036\\001\\037\\001\\255\\255\\255\\255\\040\\001\\\n\\041\\001\\042\\001\\043\\001\\044\\001\\045\\001\\046\\001\\047\\001\\255\\255\\\n\\049\\001\\050\\001\\051\\001\\255\\255\\053\\001\\054\\001\\055\\001\\056\\001\\\n\\255\\255\\255\\255\\059\\001\\060\\001\\061\\001\\255\\255\\063\\001\\064\\001\\\n\\065\\001\\066\\001\\067\\001\\068\\001\\255\\255\\070\\001\\071\\001\\072\\001\\\n\\073\\001\\074\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\080\\001\\\n\\081\\001\\082\\001\\083\\001\\084\\001\\085\\001\\086\\001\\087\\001\\255\\255\\\n\\089\\001\\255\\255\\091\\001\\092\\001\\000\\000\\094\\001\\095\\001\\096\\001\\\n\\097\\001\\098\\001\\255\\255\\100\\001\\101\\001\\255\\255\\103\\001\\104\\001\\\n\\105\\001\\106\\001\\255\\255\\108\\001\\109\\001\\255\\255\\111\\001\\255\\255\\\n\\255\\255\\255\\255\\115\\001\\000\\001\\001\\001\\002\\001\\003\\001\\255\\255\\\n\\255\\255\\006\\001\\007\\001\\008\\001\\009\\001\\010\\001\\011\\001\\012\\001\\\n\\013\\001\\014\\001\\015\\001\\016\\001\\017\\001\\018\\001\\019\\001\\020\\001\\\n\\021\\001\\022\\001\\255\\255\\024\\001\\025\\001\\026\\001\\027\\001\\028\\001\\\n\\029\\001\\030\\001\\031\\001\\255\\255\\255\\255\\255\\255\\255\\255\\036\\001\\\n\\037\\001\\255\\255\\255\\255\\040\\001\\041\\001\\042\\001\\043\\001\\044\\001\\\n\\045\\001\\046\\001\\047\\001\\255\\255\\049\\001\\050\\001\\051\\001\\255\\255\\\n\\053\\001\\054\\001\\055\\001\\056\\001\\255\\255\\255\\255\\059\\001\\060\\001\\\n\\061\\001\\255\\255\\063\\001\\064\\001\\065\\001\\066\\001\\067\\001\\068\\001\\\n\\255\\255\\070\\001\\071\\001\\072\\001\\073\\001\\074\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\080\\001\\081\\001\\082\\001\\083\\001\\084\\001\\\n\\085\\001\\086\\001\\087\\001\\255\\255\\089\\001\\255\\255\\091\\001\\092\\001\\\n\\000\\000\\094\\001\\095\\001\\096\\001\\097\\001\\098\\001\\255\\255\\100\\001\\\n\\101\\001\\255\\255\\103\\001\\104\\001\\105\\001\\106\\001\\255\\255\\108\\001\\\n\\109\\001\\255\\255\\111\\001\\255\\255\\255\\255\\255\\255\\115\\001\\255\\255\\\n\\000\\001\\001\\001\\002\\001\\003\\001\\255\\255\\255\\255\\006\\001\\007\\001\\\n\\008\\001\\009\\001\\010\\001\\011\\001\\012\\001\\013\\001\\014\\001\\015\\001\\\n\\016\\001\\017\\001\\018\\001\\019\\001\\020\\001\\021\\001\\255\\255\\255\\255\\\n\\024\\001\\025\\001\\026\\001\\027\\001\\028\\001\\029\\001\\030\\001\\031\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\036\\001\\037\\001\\255\\255\\255\\255\\\n\\040\\001\\041\\001\\042\\001\\043\\001\\044\\001\\045\\001\\046\\001\\255\\255\\\n\\255\\255\\049\\001\\050\\001\\051\\001\\255\\255\\053\\001\\054\\001\\055\\001\\\n\\056\\001\\255\\255\\255\\255\\059\\001\\060\\001\\061\\001\\255\\255\\063\\001\\\n\\064\\001\\065\\001\\066\\001\\067\\001\\068\\001\\255\\255\\070\\001\\071\\001\\\n\\072\\001\\073\\001\\074\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\080\\001\\081\\001\\082\\001\\083\\001\\084\\001\\085\\001\\086\\001\\087\\001\\\n\\255\\255\\089\\001\\255\\255\\091\\001\\092\\001\\000\\000\\094\\001\\095\\001\\\n\\096\\001\\097\\001\\098\\001\\255\\255\\100\\001\\101\\001\\255\\255\\103\\001\\\n\\104\\001\\105\\001\\106\\001\\255\\255\\108\\001\\109\\001\\255\\255\\111\\001\\\n\\255\\255\\255\\255\\255\\255\\115\\001\\000\\001\\001\\001\\002\\001\\003\\001\\\n\\255\\255\\255\\255\\006\\001\\007\\001\\008\\001\\009\\001\\010\\001\\011\\001\\\n\\012\\001\\013\\001\\014\\001\\015\\001\\016\\001\\017\\001\\018\\001\\019\\001\\\n\\020\\001\\021\\001\\255\\255\\255\\255\\024\\001\\025\\001\\026\\001\\027\\001\\\n\\028\\001\\029\\001\\030\\001\\031\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\036\\001\\037\\001\\255\\255\\255\\255\\040\\001\\041\\001\\042\\001\\043\\001\\\n\\044\\001\\045\\001\\046\\001\\255\\255\\255\\255\\049\\001\\050\\001\\051\\001\\\n\\255\\255\\053\\001\\054\\001\\055\\001\\056\\001\\255\\255\\255\\255\\059\\001\\\n\\060\\001\\061\\001\\255\\255\\063\\001\\064\\001\\065\\001\\066\\001\\067\\001\\\n\\068\\001\\255\\255\\070\\001\\071\\001\\072\\001\\073\\001\\074\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\080\\001\\081\\001\\082\\001\\083\\001\\\n\\084\\001\\085\\001\\086\\001\\087\\001\\255\\255\\089\\001\\255\\255\\091\\001\\\n\\092\\001\\000\\000\\094\\001\\095\\001\\096\\001\\255\\255\\255\\255\\255\\255\\\n\\100\\001\\101\\001\\255\\255\\103\\001\\104\\001\\105\\001\\106\\001\\255\\255\\\n\\108\\001\\109\\001\\255\\255\\111\\001\\255\\255\\255\\255\\255\\255\\115\\001\\\n\\000\\001\\001\\001\\002\\001\\003\\001\\255\\255\\255\\255\\006\\001\\007\\001\\\n\\008\\001\\009\\001\\010\\001\\011\\001\\012\\001\\013\\001\\014\\001\\015\\001\\\n\\016\\001\\017\\001\\018\\001\\019\\001\\020\\001\\021\\001\\255\\255\\255\\255\\\n\\024\\001\\025\\001\\026\\001\\027\\001\\028\\001\\029\\001\\030\\001\\031\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\036\\001\\037\\001\\255\\255\\255\\255\\\n\\040\\001\\041\\001\\042\\001\\043\\001\\044\\001\\045\\001\\046\\001\\255\\255\\\n\\255\\255\\049\\001\\050\\001\\051\\001\\255\\255\\053\\001\\054\\001\\055\\001\\\n\\056\\001\\255\\255\\255\\255\\059\\001\\060\\001\\061\\001\\255\\255\\063\\001\\\n\\064\\001\\065\\001\\066\\001\\067\\001\\068\\001\\255\\255\\070\\001\\071\\001\\\n\\072\\001\\073\\001\\074\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\080\\001\\081\\001\\082\\001\\083\\001\\084\\001\\085\\001\\086\\001\\087\\001\\\n\\255\\255\\089\\001\\255\\255\\091\\001\\092\\001\\000\\000\\094\\001\\095\\001\\\n\\096\\001\\255\\255\\255\\255\\255\\255\\100\\001\\101\\001\\255\\255\\103\\001\\\n\\104\\001\\105\\001\\106\\001\\255\\255\\108\\001\\109\\001\\255\\255\\111\\001\\\n\\255\\255\\255\\255\\255\\255\\115\\001\\255\\255\\000\\001\\001\\001\\002\\001\\\n\\003\\001\\255\\255\\255\\255\\006\\001\\007\\001\\008\\001\\009\\001\\010\\001\\\n\\011\\001\\012\\001\\013\\001\\014\\001\\015\\001\\016\\001\\017\\001\\018\\001\\\n\\019\\001\\020\\001\\021\\001\\255\\255\\255\\255\\024\\001\\025\\001\\026\\001\\\n\\027\\001\\028\\001\\029\\001\\030\\001\\031\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\036\\001\\037\\001\\255\\255\\255\\255\\040\\001\\041\\001\\042\\001\\\n\\043\\001\\044\\001\\045\\001\\046\\001\\255\\255\\255\\255\\049\\001\\050\\001\\\n\\051\\001\\255\\255\\053\\001\\054\\001\\055\\001\\056\\001\\255\\255\\255\\255\\\n\\059\\001\\060\\001\\061\\001\\255\\255\\063\\001\\064\\001\\065\\001\\066\\001\\\n\\067\\001\\068\\001\\255\\255\\070\\001\\071\\001\\072\\001\\073\\001\\074\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\080\\001\\081\\001\\082\\001\\\n\\083\\001\\084\\001\\085\\001\\086\\001\\087\\001\\255\\255\\089\\001\\255\\255\\\n\\091\\001\\092\\001\\000\\000\\094\\001\\095\\001\\096\\001\\255\\255\\255\\255\\\n\\255\\255\\100\\001\\101\\001\\255\\255\\103\\001\\104\\001\\105\\001\\106\\001\\\n\\255\\255\\108\\001\\109\\001\\255\\255\\111\\001\\255\\255\\255\\255\\255\\255\\\n\\115\\001\\000\\001\\001\\001\\002\\001\\003\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\008\\001\\009\\001\\010\\001\\255\\255\\255\\255\\013\\001\\014\\001\\\n\\015\\001\\016\\001\\017\\001\\018\\001\\019\\001\\020\\001\\021\\001\\022\\001\\\n\\255\\255\\024\\001\\025\\001\\026\\001\\027\\001\\028\\001\\029\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\036\\001\\037\\001\\255\\255\\\n\\255\\255\\040\\001\\041\\001\\042\\001\\043\\001\\044\\001\\045\\001\\046\\001\\\n\\047\\001\\255\\255\\049\\001\\255\\255\\051\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\060\\001\\061\\001\\255\\255\\\n\\063\\001\\255\\255\\255\\255\\066\\001\\067\\001\\068\\001\\255\\255\\070\\001\\\n\\071\\001\\072\\001\\073\\001\\074\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\080\\001\\081\\001\\082\\001\\083\\001\\084\\001\\085\\001\\086\\001\\\n\\255\\255\\255\\255\\089\\001\\255\\255\\091\\001\\092\\001\\000\\000\\094\\001\\\n\\095\\001\\096\\001\\097\\001\\098\\001\\255\\255\\100\\001\\255\\255\\255\\255\\\n\\103\\001\\104\\001\\105\\001\\255\\255\\255\\255\\108\\001\\255\\255\\255\\255\\\n\\111\\001\\255\\255\\255\\255\\255\\255\\115\\001\\000\\001\\001\\001\\002\\001\\\n\\003\\001\\255\\255\\255\\255\\255\\255\\255\\255\\008\\001\\009\\001\\010\\001\\\n\\255\\255\\255\\255\\013\\001\\014\\001\\015\\001\\016\\001\\017\\001\\018\\001\\\n\\019\\001\\020\\001\\021\\001\\022\\001\\255\\255\\024\\001\\025\\001\\026\\001\\\n\\027\\001\\028\\001\\029\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\036\\001\\037\\001\\255\\255\\255\\255\\040\\001\\041\\001\\042\\001\\\n\\043\\001\\044\\001\\045\\001\\046\\001\\047\\001\\255\\255\\049\\001\\255\\255\\\n\\051\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\060\\001\\061\\001\\255\\255\\063\\001\\255\\255\\255\\255\\066\\001\\\n\\067\\001\\068\\001\\255\\255\\070\\001\\071\\001\\072\\001\\073\\001\\074\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\080\\001\\081\\001\\082\\001\\\n\\083\\001\\084\\001\\085\\001\\086\\001\\255\\255\\255\\255\\089\\001\\255\\255\\\n\\091\\001\\092\\001\\000\\000\\094\\001\\095\\001\\096\\001\\097\\001\\098\\001\\\n\\255\\255\\100\\001\\255\\255\\255\\255\\103\\001\\104\\001\\105\\001\\255\\255\\\n\\255\\255\\108\\001\\255\\255\\255\\255\\111\\001\\255\\255\\255\\255\\255\\255\\\n\\115\\001\\255\\255\\000\\001\\001\\001\\002\\001\\003\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\008\\001\\009\\001\\010\\001\\255\\255\\255\\255\\013\\001\\\n\\014\\001\\015\\001\\016\\001\\017\\001\\018\\001\\019\\001\\020\\001\\021\\001\\\n\\255\\255\\255\\255\\024\\001\\025\\001\\026\\001\\027\\001\\028\\001\\029\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\036\\001\\037\\001\\\n\\255\\255\\255\\255\\040\\001\\041\\001\\042\\001\\043\\001\\044\\001\\045\\001\\\n\\046\\001\\255\\255\\255\\255\\049\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\060\\001\\061\\001\\\n\\255\\255\\063\\001\\255\\255\\255\\255\\066\\001\\067\\001\\068\\001\\255\\255\\\n\\070\\001\\071\\001\\072\\001\\073\\001\\074\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\080\\001\\255\\255\\082\\001\\083\\001\\084\\001\\085\\001\\\n\\086\\001\\255\\255\\255\\255\\255\\255\\255\\255\\091\\001\\092\\001\\000\\000\\\n\\094\\001\\095\\001\\096\\001\\255\\255\\255\\255\\255\\255\\100\\001\\255\\255\\\n\\255\\255\\103\\001\\255\\255\\105\\001\\255\\255\\255\\255\\108\\001\\255\\255\\\n\\255\\255\\111\\001\\255\\255\\255\\255\\255\\255\\115\\001\\000\\001\\001\\001\\\n\\002\\001\\003\\001\\255\\255\\255\\255\\255\\255\\255\\255\\008\\001\\009\\001\\\n\\010\\001\\255\\255\\255\\255\\013\\001\\014\\001\\015\\001\\016\\001\\017\\001\\\n\\018\\001\\019\\001\\020\\001\\021\\001\\255\\255\\255\\255\\024\\001\\025\\001\\\n\\026\\001\\027\\001\\028\\001\\029\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\036\\001\\037\\001\\255\\255\\255\\255\\040\\001\\041\\001\\\n\\042\\001\\043\\001\\044\\001\\045\\001\\046\\001\\255\\255\\255\\255\\049\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\060\\001\\061\\001\\255\\255\\063\\001\\255\\255\\255\\255\\\n\\066\\001\\067\\001\\068\\001\\255\\255\\070\\001\\071\\001\\072\\001\\073\\001\\\n\\074\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\080\\001\\255\\255\\\n\\082\\001\\083\\001\\084\\001\\085\\001\\086\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\091\\001\\092\\001\\000\\000\\094\\001\\095\\001\\096\\001\\255\\255\\\n\\255\\255\\255\\255\\100\\001\\255\\255\\255\\255\\103\\001\\255\\255\\105\\001\\\n\\255\\255\\255\\255\\108\\001\\255\\255\\255\\255\\111\\001\\255\\255\\255\\255\\\n\\255\\255\\115\\001\\000\\001\\001\\001\\002\\001\\003\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\008\\001\\009\\001\\010\\001\\255\\255\\255\\255\\013\\001\\\n\\014\\001\\015\\001\\016\\001\\017\\001\\018\\001\\019\\001\\020\\001\\021\\001\\\n\\255\\255\\255\\255\\024\\001\\025\\001\\026\\001\\027\\001\\028\\001\\029\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\036\\001\\037\\001\\\n\\255\\255\\255\\255\\040\\001\\041\\001\\042\\001\\043\\001\\044\\001\\045\\001\\\n\\046\\001\\255\\255\\255\\255\\049\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\060\\001\\061\\001\\\n\\255\\255\\063\\001\\255\\255\\255\\255\\066\\001\\067\\001\\068\\001\\255\\255\\\n\\070\\001\\071\\001\\072\\001\\073\\001\\074\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\080\\001\\255\\255\\082\\001\\083\\001\\084\\001\\085\\001\\\n\\086\\001\\255\\255\\255\\255\\255\\255\\255\\255\\091\\001\\092\\001\\000\\000\\\n\\094\\001\\095\\001\\096\\001\\255\\255\\255\\255\\255\\255\\100\\001\\255\\255\\\n\\255\\255\\103\\001\\255\\255\\105\\001\\255\\255\\255\\255\\108\\001\\255\\255\\\n\\255\\255\\111\\001\\255\\255\\255\\255\\255\\255\\115\\001\\255\\255\\000\\001\\\n\\001\\001\\002\\001\\003\\001\\255\\255\\255\\255\\255\\255\\255\\255\\008\\001\\\n\\009\\001\\010\\001\\255\\255\\255\\255\\013\\001\\014\\001\\015\\001\\016\\001\\\n\\017\\001\\018\\001\\019\\001\\020\\001\\021\\001\\255\\255\\255\\255\\024\\001\\\n\\025\\001\\026\\001\\027\\001\\028\\001\\029\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\036\\001\\037\\001\\255\\255\\255\\255\\040\\001\\\n\\041\\001\\042\\001\\043\\001\\044\\001\\045\\001\\046\\001\\255\\255\\255\\255\\\n\\049\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\060\\001\\061\\001\\255\\255\\063\\001\\255\\255\\\n\\255\\255\\066\\001\\067\\001\\068\\001\\255\\255\\070\\001\\071\\001\\072\\001\\\n\\073\\001\\074\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\080\\001\\\n\\255\\255\\082\\001\\083\\001\\084\\001\\085\\001\\086\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\091\\001\\092\\001\\000\\000\\094\\001\\095\\001\\096\\001\\\n\\255\\255\\255\\255\\255\\255\\100\\001\\255\\255\\255\\255\\103\\001\\255\\255\\\n\\105\\001\\255\\255\\255\\255\\108\\001\\255\\255\\255\\255\\111\\001\\255\\255\\\n\\255\\255\\255\\255\\115\\001\\000\\001\\001\\001\\002\\001\\003\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\008\\001\\009\\001\\010\\001\\255\\255\\255\\255\\\n\\013\\001\\014\\001\\015\\001\\016\\001\\017\\001\\018\\001\\019\\001\\020\\001\\\n\\021\\001\\255\\255\\255\\255\\024\\001\\025\\001\\026\\001\\027\\001\\028\\001\\\n\\029\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\036\\001\\\n\\037\\001\\255\\255\\255\\255\\040\\001\\041\\001\\042\\001\\043\\001\\044\\001\\\n\\045\\001\\046\\001\\255\\255\\255\\255\\049\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\060\\001\\\n\\061\\001\\255\\255\\063\\001\\255\\255\\255\\255\\066\\001\\067\\001\\068\\001\\\n\\255\\255\\070\\001\\071\\001\\072\\001\\073\\001\\074\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\080\\001\\255\\255\\082\\001\\083\\001\\084\\001\\\n\\085\\001\\086\\001\\255\\255\\255\\255\\255\\255\\255\\255\\091\\001\\092\\001\\\n\\000\\000\\094\\001\\095\\001\\096\\001\\255\\255\\255\\255\\255\\255\\100\\001\\\n\\255\\255\\255\\255\\103\\001\\255\\255\\105\\001\\255\\255\\255\\255\\108\\001\\\n\\255\\255\\255\\255\\111\\001\\255\\255\\255\\255\\255\\255\\115\\001\\000\\001\\\n\\001\\001\\002\\001\\003\\001\\255\\255\\255\\255\\255\\255\\255\\255\\008\\001\\\n\\009\\001\\010\\001\\255\\255\\255\\255\\013\\001\\014\\001\\015\\001\\016\\001\\\n\\017\\001\\018\\001\\019\\001\\020\\001\\021\\001\\255\\255\\255\\255\\024\\001\\\n\\025\\001\\026\\001\\027\\001\\028\\001\\029\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\036\\001\\037\\001\\255\\255\\255\\255\\040\\001\\\n\\041\\001\\042\\001\\043\\001\\044\\001\\045\\001\\046\\001\\255\\255\\255\\255\\\n\\049\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\060\\001\\061\\001\\255\\255\\063\\001\\255\\255\\\n\\255\\255\\066\\001\\067\\001\\068\\001\\255\\255\\070\\001\\071\\001\\072\\001\\\n\\073\\001\\074\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\080\\001\\\n\\255\\255\\082\\001\\083\\001\\084\\001\\085\\001\\086\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\091\\001\\092\\001\\000\\000\\094\\001\\095\\001\\096\\001\\\n\\255\\255\\255\\255\\255\\255\\100\\001\\255\\255\\255\\255\\103\\001\\255\\255\\\n\\105\\001\\255\\255\\255\\255\\108\\001\\255\\255\\255\\255\\111\\001\\255\\255\\\n\\255\\255\\255\\255\\115\\001\\255\\255\\000\\001\\001\\001\\002\\001\\003\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\008\\001\\009\\001\\010\\001\\255\\255\\\n\\255\\255\\013\\001\\014\\001\\015\\001\\016\\001\\017\\001\\255\\255\\019\\001\\\n\\020\\001\\021\\001\\255\\255\\255\\255\\024\\001\\025\\001\\026\\001\\027\\001\\\n\\028\\001\\029\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\036\\001\\037\\001\\255\\255\\255\\255\\040\\001\\041\\001\\042\\001\\043\\001\\\n\\044\\001\\045\\001\\046\\001\\255\\255\\255\\255\\049\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\060\\001\\061\\001\\255\\255\\063\\001\\255\\255\\255\\255\\066\\001\\067\\001\\\n\\068\\001\\255\\255\\070\\001\\071\\001\\072\\001\\073\\001\\074\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\080\\001\\255\\255\\082\\001\\083\\001\\\n\\084\\001\\085\\001\\086\\001\\255\\255\\255\\255\\255\\255\\255\\255\\091\\001\\\n\\092\\001\\000\\000\\094\\001\\095\\001\\096\\001\\255\\255\\255\\255\\255\\255\\\n\\100\\001\\255\\255\\255\\255\\103\\001\\255\\255\\105\\001\\255\\255\\255\\255\\\n\\108\\001\\255\\255\\255\\255\\111\\001\\255\\255\\255\\255\\255\\255\\115\\001\\\n\\000\\001\\001\\001\\002\\001\\003\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\008\\001\\009\\001\\010\\001\\255\\255\\255\\255\\013\\001\\014\\001\\015\\001\\\n\\016\\001\\017\\001\\018\\001\\019\\001\\020\\001\\021\\001\\255\\255\\255\\255\\\n\\024\\001\\025\\001\\026\\001\\027\\001\\028\\001\\029\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\036\\001\\037\\001\\255\\255\\255\\255\\\n\\040\\001\\041\\001\\042\\001\\043\\001\\044\\001\\045\\001\\255\\255\\255\\255\\\n\\255\\255\\049\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\060\\001\\061\\001\\255\\255\\063\\001\\\n\\255\\255\\255\\255\\066\\001\\067\\001\\068\\001\\255\\255\\070\\001\\071\\001\\\n\\072\\001\\073\\001\\074\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\080\\001\\255\\255\\082\\001\\083\\001\\084\\001\\085\\001\\086\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\091\\001\\092\\001\\000\\000\\094\\001\\095\\001\\\n\\096\\001\\255\\255\\255\\255\\255\\255\\100\\001\\255\\255\\255\\255\\103\\001\\\n\\255\\255\\105\\001\\255\\255\\255\\255\\108\\001\\255\\255\\255\\255\\111\\001\\\n\\255\\255\\255\\255\\255\\255\\115\\001\\000\\001\\001\\001\\002\\001\\003\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\008\\001\\009\\001\\010\\001\\255\\255\\\n\\255\\255\\013\\001\\014\\001\\015\\001\\016\\001\\017\\001\\018\\001\\019\\001\\\n\\020\\001\\021\\001\\255\\255\\255\\255\\024\\001\\025\\001\\026\\001\\027\\001\\\n\\028\\001\\029\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\036\\001\\037\\001\\255\\255\\255\\255\\040\\001\\041\\001\\042\\001\\043\\001\\\n\\044\\001\\045\\001\\255\\255\\255\\255\\255\\255\\049\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\060\\001\\061\\001\\255\\255\\063\\001\\255\\255\\255\\255\\066\\001\\067\\001\\\n\\068\\001\\255\\255\\070\\001\\071\\001\\072\\001\\073\\001\\074\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\080\\001\\255\\255\\082\\001\\083\\001\\\n\\084\\001\\085\\001\\086\\001\\255\\255\\255\\255\\255\\255\\255\\255\\091\\001\\\n\\092\\001\\000\\000\\094\\001\\095\\001\\096\\001\\255\\255\\255\\255\\255\\255\\\n\\100\\001\\255\\255\\255\\255\\103\\001\\255\\255\\105\\001\\255\\255\\255\\255\\\n\\108\\001\\255\\255\\255\\255\\111\\001\\255\\255\\255\\255\\255\\255\\115\\001\\\n\\255\\255\\000\\001\\001\\001\\002\\001\\003\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\008\\001\\009\\001\\010\\001\\255\\255\\255\\255\\013\\001\\014\\001\\\n\\015\\001\\016\\001\\017\\001\\018\\001\\019\\001\\020\\001\\021\\001\\255\\255\\\n\\255\\255\\024\\001\\025\\001\\026\\001\\027\\001\\028\\001\\029\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\036\\001\\037\\001\\255\\255\\\n\\255\\255\\040\\001\\041\\001\\042\\001\\043\\001\\044\\001\\045\\001\\255\\255\\\n\\255\\255\\255\\255\\049\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\060\\001\\061\\001\\255\\255\\\n\\063\\001\\255\\255\\255\\255\\066\\001\\067\\001\\068\\001\\255\\255\\070\\001\\\n\\071\\001\\072\\001\\073\\001\\074\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\080\\001\\255\\255\\082\\001\\083\\001\\084\\001\\085\\001\\086\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\091\\001\\092\\001\\000\\000\\094\\001\\\n\\095\\001\\096\\001\\255\\255\\255\\255\\255\\255\\100\\001\\255\\255\\255\\255\\\n\\103\\001\\255\\255\\105\\001\\255\\255\\255\\255\\108\\001\\255\\255\\255\\255\\\n\\111\\001\\255\\255\\255\\255\\255\\255\\115\\001\\000\\001\\001\\001\\002\\001\\\n\\003\\001\\255\\255\\255\\255\\255\\255\\255\\255\\008\\001\\009\\001\\010\\001\\\n\\255\\255\\255\\255\\013\\001\\014\\001\\015\\001\\016\\001\\017\\001\\018\\001\\\n\\019\\001\\020\\001\\021\\001\\255\\255\\255\\255\\024\\001\\025\\001\\026\\001\\\n\\027\\001\\028\\001\\029\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\036\\001\\037\\001\\255\\255\\255\\255\\040\\001\\041\\001\\042\\001\\\n\\043\\001\\044\\001\\045\\001\\255\\255\\255\\255\\255\\255\\049\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\060\\001\\061\\001\\255\\255\\063\\001\\255\\255\\255\\255\\066\\001\\\n\\067\\001\\068\\001\\255\\255\\070\\001\\071\\001\\072\\001\\073\\001\\074\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\080\\001\\255\\255\\082\\001\\\n\\083\\001\\084\\001\\085\\001\\086\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\091\\001\\092\\001\\000\\000\\094\\001\\095\\001\\096\\001\\255\\255\\255\\255\\\n\\255\\255\\100\\001\\255\\255\\255\\255\\103\\001\\255\\255\\105\\001\\255\\255\\\n\\255\\255\\108\\001\\255\\255\\255\\255\\111\\001\\255\\255\\255\\255\\255\\255\\\n\\115\\001\\000\\001\\001\\001\\002\\001\\003\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\009\\001\\010\\001\\255\\255\\255\\255\\013\\001\\014\\001\\\n\\015\\001\\016\\001\\017\\001\\018\\001\\019\\001\\020\\001\\021\\001\\255\\255\\\n\\255\\255\\024\\001\\025\\001\\026\\001\\027\\001\\028\\001\\029\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\036\\001\\037\\001\\255\\255\\\n\\255\\255\\040\\001\\041\\001\\042\\001\\043\\001\\044\\001\\045\\001\\046\\001\\\n\\255\\255\\255\\255\\049\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\060\\001\\061\\001\\255\\255\\\n\\063\\001\\255\\255\\255\\255\\066\\001\\067\\001\\068\\001\\255\\255\\070\\001\\\n\\071\\001\\072\\001\\073\\001\\074\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\080\\001\\255\\255\\082\\001\\083\\001\\084\\001\\085\\001\\086\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\091\\001\\092\\001\\000\\000\\094\\001\\\n\\095\\001\\096\\001\\255\\255\\255\\255\\255\\255\\100\\001\\255\\255\\255\\255\\\n\\103\\001\\255\\255\\105\\001\\255\\255\\255\\255\\108\\001\\255\\255\\255\\255\\\n\\111\\001\\255\\255\\255\\255\\255\\255\\115\\001\\255\\255\\000\\001\\001\\001\\\n\\002\\001\\003\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\009\\001\\\n\\010\\001\\255\\255\\255\\255\\013\\001\\014\\001\\015\\001\\016\\001\\017\\001\\\n\\018\\001\\019\\001\\020\\001\\021\\001\\255\\255\\255\\255\\024\\001\\025\\001\\\n\\026\\001\\027\\001\\028\\001\\029\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\036\\001\\037\\001\\255\\255\\255\\255\\040\\001\\041\\001\\\n\\042\\001\\043\\001\\044\\001\\045\\001\\046\\001\\255\\255\\255\\255\\049\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\060\\001\\061\\001\\255\\255\\063\\001\\255\\255\\255\\255\\\n\\066\\001\\067\\001\\068\\001\\255\\255\\070\\001\\071\\001\\072\\001\\073\\001\\\n\\074\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\080\\001\\255\\255\\\n\\082\\001\\083\\001\\084\\001\\085\\001\\086\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\091\\001\\092\\001\\000\\000\\094\\001\\095\\001\\096\\001\\255\\255\\\n\\255\\255\\255\\255\\100\\001\\255\\255\\255\\255\\103\\001\\255\\255\\105\\001\\\n\\255\\255\\255\\255\\108\\001\\255\\255\\255\\255\\111\\001\\255\\255\\255\\255\\\n\\255\\255\\115\\001\\000\\001\\001\\001\\002\\001\\003\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\009\\001\\010\\001\\255\\255\\255\\255\\013\\001\\\n\\014\\001\\015\\001\\016\\001\\017\\001\\018\\001\\019\\001\\020\\001\\021\\001\\\n\\255\\255\\255\\255\\024\\001\\025\\001\\026\\001\\027\\001\\028\\001\\029\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\036\\001\\037\\001\\\n\\255\\255\\255\\255\\040\\001\\041\\001\\042\\001\\043\\001\\044\\001\\045\\001\\\n\\046\\001\\255\\255\\255\\255\\049\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\060\\001\\061\\001\\\n\\255\\255\\063\\001\\255\\255\\255\\255\\066\\001\\067\\001\\068\\001\\255\\255\\\n\\070\\001\\071\\001\\072\\001\\073\\001\\074\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\080\\001\\255\\255\\082\\001\\083\\001\\084\\001\\085\\001\\\n\\086\\001\\255\\255\\255\\255\\255\\255\\255\\255\\091\\001\\092\\001\\000\\000\\\n\\094\\001\\095\\001\\096\\001\\255\\255\\255\\255\\255\\255\\100\\001\\255\\255\\\n\\255\\255\\103\\001\\255\\255\\105\\001\\255\\255\\255\\255\\108\\001\\255\\255\\\n\\255\\255\\111\\001\\255\\255\\255\\255\\255\\255\\115\\001\\000\\001\\001\\001\\\n\\002\\001\\003\\001\\255\\255\\255\\255\\255\\255\\255\\255\\008\\001\\009\\001\\\n\\010\\001\\255\\255\\255\\255\\013\\001\\014\\001\\015\\001\\016\\001\\017\\001\\\n\\018\\001\\019\\001\\020\\001\\021\\001\\255\\255\\255\\255\\024\\001\\025\\001\\\n\\026\\001\\027\\001\\028\\001\\029\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\036\\001\\037\\001\\255\\255\\255\\255\\040\\001\\041\\001\\\n\\042\\001\\043\\001\\044\\001\\255\\255\\255\\255\\255\\255\\255\\255\\049\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\060\\001\\061\\001\\255\\255\\063\\001\\255\\255\\255\\255\\\n\\066\\001\\067\\001\\068\\001\\255\\255\\070\\001\\071\\001\\072\\001\\073\\001\\\n\\074\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\080\\001\\255\\255\\\n\\082\\001\\255\\255\\084\\001\\085\\001\\086\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\091\\001\\092\\001\\000\\000\\094\\001\\095\\001\\096\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\103\\001\\255\\255\\105\\001\\\n\\255\\255\\255\\255\\108\\001\\255\\255\\255\\255\\111\\001\\255\\255\\255\\255\\\n\\255\\255\\115\\001\\255\\255\\000\\001\\001\\001\\002\\001\\003\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\008\\001\\009\\001\\010\\001\\255\\255\\255\\255\\\n\\013\\001\\014\\001\\015\\001\\016\\001\\017\\001\\018\\001\\019\\001\\020\\001\\\n\\021\\001\\255\\255\\255\\255\\024\\001\\025\\001\\026\\001\\027\\001\\028\\001\\\n\\029\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\036\\001\\\n\\037\\001\\255\\255\\255\\255\\040\\001\\041\\001\\042\\001\\043\\001\\044\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\049\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\060\\001\\\n\\061\\001\\255\\255\\063\\001\\255\\255\\255\\255\\066\\001\\067\\001\\068\\001\\\n\\255\\255\\070\\001\\071\\001\\072\\001\\073\\001\\074\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\080\\001\\255\\255\\082\\001\\255\\255\\084\\001\\\n\\085\\001\\086\\001\\255\\255\\255\\255\\255\\255\\255\\255\\091\\001\\092\\001\\\n\\000\\000\\094\\001\\095\\001\\096\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\103\\001\\255\\255\\105\\001\\255\\255\\255\\255\\108\\001\\\n\\255\\255\\255\\255\\111\\001\\255\\255\\255\\255\\255\\255\\115\\001\\000\\001\\\n\\001\\001\\002\\001\\003\\001\\255\\255\\255\\255\\255\\255\\255\\255\\008\\001\\\n\\009\\001\\010\\001\\255\\255\\255\\255\\013\\001\\014\\001\\015\\001\\016\\001\\\n\\017\\001\\018\\001\\019\\001\\020\\001\\021\\001\\255\\255\\255\\255\\024\\001\\\n\\025\\001\\026\\001\\027\\001\\028\\001\\029\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\036\\001\\037\\001\\255\\255\\255\\255\\040\\001\\\n\\041\\001\\042\\001\\043\\001\\044\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\049\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\060\\001\\061\\001\\255\\255\\063\\001\\255\\255\\\n\\255\\255\\066\\001\\067\\001\\068\\001\\255\\255\\070\\001\\071\\001\\072\\001\\\n\\073\\001\\074\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\080\\001\\\n\\255\\255\\082\\001\\255\\255\\084\\001\\085\\001\\086\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\091\\001\\092\\001\\000\\000\\094\\001\\095\\001\\096\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\103\\001\\255\\255\\\n\\105\\001\\255\\255\\255\\255\\108\\001\\255\\255\\255\\255\\111\\001\\255\\255\\\n\\255\\255\\255\\255\\115\\001\\000\\001\\001\\001\\002\\001\\003\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\008\\001\\009\\001\\010\\001\\255\\255\\255\\255\\\n\\013\\001\\014\\001\\015\\001\\016\\001\\017\\001\\018\\001\\019\\001\\020\\001\\\n\\021\\001\\255\\255\\255\\255\\024\\001\\025\\001\\026\\001\\027\\001\\028\\001\\\n\\029\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\036\\001\\\n\\037\\001\\255\\255\\255\\255\\040\\001\\041\\001\\042\\001\\043\\001\\044\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\049\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\060\\001\\\n\\061\\001\\255\\255\\063\\001\\255\\255\\255\\255\\066\\001\\067\\001\\068\\001\\\n\\255\\255\\070\\001\\071\\001\\072\\001\\073\\001\\074\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\080\\001\\255\\255\\082\\001\\255\\255\\084\\001\\\n\\085\\001\\086\\001\\255\\255\\255\\255\\255\\255\\255\\255\\091\\001\\092\\001\\\n\\000\\000\\094\\001\\095\\001\\096\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\103\\001\\255\\255\\105\\001\\255\\255\\255\\255\\108\\001\\\n\\255\\255\\255\\255\\111\\001\\255\\255\\255\\255\\255\\255\\115\\001\\255\\255\\\n\\000\\001\\001\\001\\002\\001\\003\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\008\\001\\009\\001\\010\\001\\255\\255\\255\\255\\013\\001\\014\\001\\015\\001\\\n\\016\\001\\017\\001\\018\\001\\019\\001\\020\\001\\021\\001\\255\\255\\255\\255\\\n\\024\\001\\025\\001\\026\\001\\027\\001\\028\\001\\029\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\036\\001\\037\\001\\255\\255\\255\\255\\\n\\040\\001\\041\\001\\042\\001\\043\\001\\044\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\049\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\060\\001\\061\\001\\255\\255\\063\\001\\\n\\255\\255\\255\\255\\066\\001\\067\\001\\068\\001\\255\\255\\070\\001\\071\\001\\\n\\072\\001\\073\\001\\074\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\080\\001\\255\\255\\082\\001\\255\\255\\084\\001\\085\\001\\086\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\091\\001\\092\\001\\000\\000\\094\\001\\095\\001\\\n\\096\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\103\\001\\\n\\255\\255\\105\\001\\255\\255\\255\\255\\108\\001\\255\\255\\255\\255\\111\\001\\\n\\255\\255\\255\\255\\255\\255\\115\\001\\000\\001\\001\\001\\002\\001\\003\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\008\\001\\009\\001\\010\\001\\255\\255\\\n\\255\\255\\013\\001\\014\\001\\015\\001\\016\\001\\017\\001\\018\\001\\019\\001\\\n\\020\\001\\021\\001\\255\\255\\255\\255\\024\\001\\025\\001\\026\\001\\027\\001\\\n\\028\\001\\029\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\036\\001\\037\\001\\255\\255\\255\\255\\040\\001\\041\\001\\042\\001\\043\\001\\\n\\044\\001\\255\\255\\255\\255\\255\\255\\255\\255\\049\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\060\\001\\061\\001\\255\\255\\063\\001\\255\\255\\255\\255\\066\\001\\067\\001\\\n\\068\\001\\255\\255\\070\\001\\071\\001\\072\\001\\073\\001\\074\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\080\\001\\255\\255\\082\\001\\255\\255\\\n\\084\\001\\085\\001\\086\\001\\255\\255\\255\\255\\255\\255\\255\\255\\091\\001\\\n\\092\\001\\000\\000\\094\\001\\095\\001\\096\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\103\\001\\255\\255\\105\\001\\255\\255\\255\\255\\\n\\108\\001\\255\\255\\255\\255\\111\\001\\255\\255\\255\\255\\255\\255\\115\\001\\\n\\000\\001\\001\\001\\002\\001\\003\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\008\\001\\009\\001\\010\\001\\255\\255\\255\\255\\013\\001\\014\\001\\015\\001\\\n\\016\\001\\017\\001\\018\\001\\019\\001\\020\\001\\021\\001\\255\\255\\255\\255\\\n\\024\\001\\025\\001\\026\\001\\027\\001\\028\\001\\029\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\036\\001\\037\\001\\255\\255\\255\\255\\\n\\040\\001\\041\\001\\042\\001\\043\\001\\044\\001\\045\\001\\046\\001\\255\\255\\\n\\255\\255\\049\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\060\\001\\061\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\066\\001\\067\\001\\068\\001\\255\\255\\070\\001\\255\\255\\\n\\255\\255\\073\\001\\074\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\080\\001\\255\\255\\082\\001\\255\\255\\255\\255\\255\\255\\086\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\091\\001\\092\\001\\000\\000\\094\\001\\095\\001\\\n\\096\\001\\255\\255\\255\\255\\255\\255\\100\\001\\255\\255\\255\\255\\103\\001\\\n\\255\\255\\105\\001\\255\\255\\255\\255\\108\\001\\255\\255\\255\\255\\111\\001\\\n\\255\\255\\255\\255\\255\\255\\115\\001\\255\\255\\000\\001\\001\\001\\002\\001\\\n\\003\\001\\255\\255\\255\\255\\255\\255\\255\\255\\008\\001\\009\\001\\010\\001\\\n\\255\\255\\255\\255\\013\\001\\014\\001\\255\\255\\016\\001\\017\\001\\018\\001\\\n\\019\\001\\020\\001\\021\\001\\255\\255\\255\\255\\024\\001\\025\\001\\026\\001\\\n\\027\\001\\028\\001\\029\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\036\\001\\037\\001\\255\\255\\255\\255\\040\\001\\041\\001\\042\\001\\\n\\043\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\049\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\060\\001\\061\\001\\255\\255\\063\\001\\255\\255\\255\\255\\066\\001\\\n\\067\\001\\068\\001\\255\\255\\070\\001\\255\\255\\255\\255\\073\\001\\074\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\080\\001\\255\\255\\082\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\091\\001\\092\\001\\000\\000\\094\\001\\095\\001\\096\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\103\\001\\255\\255\\105\\001\\255\\255\\\n\\255\\255\\108\\001\\255\\255\\255\\255\\111\\001\\255\\255\\255\\255\\255\\255\\\n\\115\\001\\000\\001\\001\\001\\002\\001\\003\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\008\\001\\009\\001\\010\\001\\255\\255\\255\\255\\013\\001\\014\\001\\\n\\255\\255\\016\\001\\017\\001\\018\\001\\019\\001\\020\\001\\021\\001\\255\\255\\\n\\255\\255\\024\\001\\025\\001\\026\\001\\027\\001\\028\\001\\029\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\036\\001\\037\\001\\255\\255\\\n\\255\\255\\040\\001\\041\\001\\042\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\049\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\060\\001\\061\\001\\255\\255\\\n\\063\\001\\255\\255\\255\\255\\255\\255\\067\\001\\068\\001\\255\\255\\070\\001\\\n\\255\\255\\255\\255\\073\\001\\074\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\080\\001\\255\\255\\082\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\091\\001\\092\\001\\000\\000\\094\\001\\\n\\095\\001\\096\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\103\\001\\255\\255\\105\\001\\255\\255\\255\\255\\108\\001\\255\\255\\255\\255\\\n\\111\\001\\255\\255\\255\\255\\255\\255\\115\\001\\000\\001\\001\\001\\002\\001\\\n\\003\\001\\255\\255\\255\\255\\255\\255\\255\\255\\008\\001\\009\\001\\010\\001\\\n\\255\\255\\255\\255\\013\\001\\014\\001\\255\\255\\016\\001\\017\\001\\018\\001\\\n\\019\\001\\020\\001\\021\\001\\255\\255\\255\\255\\024\\001\\025\\001\\026\\001\\\n\\027\\001\\028\\001\\029\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\036\\001\\037\\001\\255\\255\\255\\255\\040\\001\\041\\001\\042\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\049\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\060\\001\\061\\001\\255\\255\\063\\001\\255\\255\\255\\255\\255\\255\\\n\\067\\001\\068\\001\\255\\255\\070\\001\\255\\255\\255\\255\\073\\001\\074\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\080\\001\\255\\255\\082\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\091\\001\\092\\001\\000\\000\\094\\001\\095\\001\\096\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\103\\001\\255\\255\\105\\001\\255\\255\\\n\\255\\255\\108\\001\\255\\255\\255\\255\\111\\001\\255\\255\\255\\255\\255\\255\\\n\\115\\001\\255\\255\\000\\001\\001\\001\\002\\001\\003\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\008\\001\\009\\001\\010\\001\\255\\255\\255\\255\\013\\001\\\n\\014\\001\\255\\255\\016\\001\\017\\001\\018\\001\\019\\001\\020\\001\\021\\001\\\n\\255\\255\\255\\255\\024\\001\\025\\001\\026\\001\\027\\001\\028\\001\\029\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\036\\001\\037\\001\\\n\\255\\255\\255\\255\\040\\001\\041\\001\\042\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\049\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\060\\001\\061\\001\\\n\\255\\255\\063\\001\\255\\255\\255\\255\\255\\255\\067\\001\\068\\001\\255\\255\\\n\\070\\001\\255\\255\\255\\255\\073\\001\\074\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\080\\001\\255\\255\\082\\001\\255\\255\\255\\255\\000\\000\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\091\\001\\092\\001\\255\\255\\\n\\094\\001\\095\\001\\096\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\103\\001\\255\\255\\105\\001\\255\\255\\255\\255\\108\\001\\255\\255\\\n\\255\\255\\111\\001\\255\\255\\255\\255\\255\\255\\115\\001\\000\\001\\001\\001\\\n\\002\\001\\003\\001\\255\\255\\255\\255\\255\\255\\255\\255\\008\\001\\009\\001\\\n\\010\\001\\255\\255\\255\\255\\013\\001\\014\\001\\255\\255\\016\\001\\017\\001\\\n\\018\\001\\019\\001\\020\\001\\021\\001\\255\\255\\255\\255\\024\\001\\025\\001\\\n\\026\\001\\027\\001\\028\\001\\029\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\036\\001\\037\\001\\255\\255\\255\\255\\040\\001\\041\\001\\\n\\042\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\049\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\060\\001\\061\\001\\255\\255\\063\\001\\255\\255\\255\\255\\\n\\255\\255\\067\\001\\068\\001\\255\\255\\070\\001\\255\\255\\255\\255\\073\\001\\\n\\074\\001\\255\\255\\255\\255\\255\\255\\000\\000\\255\\255\\080\\001\\255\\255\\\n\\082\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\091\\001\\092\\001\\255\\255\\094\\001\\095\\001\\096\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\103\\001\\255\\255\\105\\001\\\n\\255\\255\\255\\255\\108\\001\\255\\255\\255\\255\\111\\001\\255\\255\\255\\255\\\n\\255\\255\\115\\001\\000\\001\\001\\001\\002\\001\\003\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\008\\001\\009\\001\\010\\001\\255\\255\\255\\255\\013\\001\\\n\\014\\001\\255\\255\\016\\001\\017\\001\\018\\001\\019\\001\\020\\001\\021\\001\\\n\\255\\255\\255\\255\\024\\001\\025\\001\\026\\001\\027\\001\\028\\001\\029\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\036\\001\\037\\001\\\n\\255\\255\\255\\255\\040\\001\\041\\001\\042\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\049\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\060\\001\\061\\001\\\n\\255\\255\\063\\001\\255\\255\\255\\255\\000\\000\\067\\001\\068\\001\\255\\255\\\n\\070\\001\\255\\255\\255\\255\\073\\001\\074\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\080\\001\\255\\255\\082\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\091\\001\\092\\001\\255\\255\\\n\\094\\001\\095\\001\\096\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\103\\001\\255\\255\\105\\001\\255\\255\\255\\255\\108\\001\\000\\001\\\n\\255\\255\\111\\001\\003\\001\\255\\255\\255\\255\\115\\001\\255\\255\\008\\001\\\n\\009\\001\\010\\001\\255\\255\\255\\255\\013\\001\\014\\001\\255\\255\\016\\001\\\n\\017\\001\\018\\001\\019\\001\\020\\001\\021\\001\\255\\255\\255\\255\\024\\001\\\n\\025\\001\\026\\001\\255\\255\\028\\001\\029\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\037\\001\\255\\255\\255\\255\\040\\001\\\n\\041\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\049\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\060\\001\\000\\000\\255\\255\\063\\001\\255\\255\\\n\\255\\255\\255\\255\\067\\001\\068\\001\\255\\255\\070\\001\\255\\255\\255\\255\\\n\\073\\001\\074\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\080\\001\\\n\\255\\255\\082\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\091\\001\\092\\001\\255\\255\\094\\001\\095\\001\\096\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\103\\001\\255\\255\\\n\\105\\001\\255\\255\\255\\255\\108\\001\\000\\001\\255\\255\\111\\001\\003\\001\\\n\\255\\255\\255\\255\\115\\001\\255\\255\\008\\001\\009\\001\\010\\001\\255\\255\\\n\\255\\255\\013\\001\\014\\001\\255\\255\\016\\001\\017\\001\\018\\001\\019\\001\\\n\\020\\001\\021\\001\\255\\255\\255\\255\\024\\001\\025\\001\\026\\001\\255\\255\\\n\\028\\001\\029\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\037\\001\\255\\255\\255\\255\\040\\001\\041\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\049\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\000\\000\\255\\255\\255\\255\\255\\255\\\n\\060\\001\\255\\255\\255\\255\\063\\001\\255\\255\\255\\255\\255\\255\\067\\001\\\n\\068\\001\\255\\255\\070\\001\\255\\255\\255\\255\\073\\001\\074\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\080\\001\\255\\255\\082\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\091\\001\\\n\\092\\001\\255\\255\\094\\001\\095\\001\\096\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\103\\001\\000\\001\\105\\001\\255\\255\\003\\001\\\n\\108\\001\\255\\255\\255\\255\\111\\001\\008\\001\\255\\255\\010\\001\\115\\001\\\n\\255\\255\\013\\001\\014\\001\\255\\255\\016\\001\\017\\001\\018\\001\\019\\001\\\n\\020\\001\\021\\001\\255\\255\\255\\255\\024\\001\\025\\001\\026\\001\\255\\255\\\n\\028\\001\\029\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\037\\001\\255\\255\\255\\255\\040\\001\\041\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\049\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\000\\000\\255\\255\\255\\255\\255\\255\\\n\\060\\001\\255\\255\\255\\255\\063\\001\\255\\255\\255\\255\\255\\255\\067\\001\\\n\\068\\001\\255\\255\\070\\001\\255\\255\\255\\255\\073\\001\\074\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\080\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\000\\000\\255\\255\\255\\255\\255\\255\\255\\255\\091\\001\\\n\\092\\001\\255\\255\\094\\001\\095\\001\\096\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\103\\001\\000\\001\\105\\001\\255\\255\\003\\001\\\n\\108\\001\\255\\255\\255\\255\\111\\001\\008\\001\\255\\255\\010\\001\\115\\001\\\n\\255\\255\\013\\001\\014\\001\\255\\255\\016\\001\\017\\001\\018\\001\\019\\001\\\n\\020\\001\\021\\001\\255\\255\\255\\255\\024\\001\\025\\001\\026\\001\\255\\255\\\n\\028\\001\\029\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\037\\001\\255\\255\\255\\255\\040\\001\\041\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\049\\001\\255\\255\\000\\000\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\060\\001\\255\\255\\255\\255\\063\\001\\255\\255\\255\\255\\255\\255\\067\\001\\\n\\068\\001\\255\\255\\070\\001\\255\\255\\255\\255\\073\\001\\074\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\080\\001\\000\\000\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\091\\001\\\n\\092\\001\\255\\255\\094\\001\\095\\001\\096\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\103\\001\\000\\001\\105\\001\\255\\255\\003\\001\\\n\\108\\001\\255\\255\\255\\255\\111\\001\\008\\001\\255\\255\\010\\001\\115\\001\\\n\\255\\255\\013\\001\\014\\001\\255\\255\\016\\001\\017\\001\\018\\001\\019\\001\\\n\\020\\001\\021\\001\\255\\255\\255\\255\\024\\001\\025\\001\\026\\001\\255\\255\\\n\\028\\001\\029\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\037\\001\\255\\255\\255\\255\\040\\001\\041\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\000\\000\\255\\255\\255\\255\\049\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\060\\001\\255\\255\\255\\255\\063\\001\\255\\255\\255\\255\\255\\255\\067\\001\\\n\\068\\001\\255\\255\\070\\001\\255\\255\\255\\255\\073\\001\\074\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\080\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\091\\001\\\n\\092\\001\\255\\255\\094\\001\\095\\001\\096\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\103\\001\\000\\001\\105\\001\\255\\255\\003\\001\\\n\\108\\001\\255\\255\\255\\255\\111\\001\\008\\001\\255\\255\\010\\001\\115\\001\\\n\\255\\255\\013\\001\\014\\001\\255\\255\\016\\001\\017\\001\\018\\001\\019\\001\\\n\\020\\001\\021\\001\\255\\255\\255\\255\\024\\001\\025\\001\\026\\001\\255\\255\\\n\\028\\001\\029\\001\\000\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\037\\001\\008\\001\\255\\255\\040\\001\\041\\001\\255\\255\\013\\001\\\n\\255\\255\\255\\255\\000\\000\\255\\255\\255\\255\\049\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\026\\001\\255\\255\\028\\001\\029\\001\\\n\\060\\001\\255\\255\\255\\255\\063\\001\\255\\255\\255\\255\\255\\255\\067\\001\\\n\\068\\001\\255\\255\\070\\001\\041\\001\\255\\255\\073\\001\\074\\001\\255\\255\\\n\\000\\000\\255\\255\\255\\255\\255\\255\\080\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\060\\001\\091\\001\\\n\\092\\001\\063\\001\\094\\001\\095\\001\\096\\001\\067\\001\\068\\001\\000\\001\\\n\\255\\255\\255\\255\\003\\001\\103\\001\\074\\001\\105\\001\\255\\255\\008\\001\\\n\\108\\001\\010\\001\\080\\001\\111\\001\\013\\001\\014\\001\\255\\255\\115\\001\\\n\\017\\001\\255\\255\\019\\001\\020\\001\\021\\001\\255\\255\\092\\001\\024\\001\\\n\\025\\001\\026\\001\\096\\001\\028\\001\\029\\001\\000\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\037\\001\\255\\255\\108\\001\\040\\001\\\n\\041\\001\\111\\001\\013\\001\\255\\255\\255\\255\\000\\000\\255\\255\\255\\255\\\n\\049\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\026\\001\\\n\\255\\255\\028\\001\\029\\001\\060\\001\\255\\255\\255\\255\\063\\001\\255\\255\\\n\\255\\255\\255\\255\\067\\001\\068\\001\\255\\255\\070\\001\\041\\001\\255\\255\\\n\\073\\001\\074\\001\\255\\255\\000\\000\\255\\255\\255\\255\\255\\255\\080\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\060\\001\\091\\001\\092\\001\\063\\001\\094\\001\\095\\001\\096\\001\\\n\\067\\001\\068\\001\\000\\001\\255\\255\\255\\255\\003\\001\\103\\001\\074\\001\\\n\\105\\001\\255\\255\\008\\001\\108\\001\\010\\001\\080\\001\\111\\001\\013\\001\\\n\\014\\001\\255\\255\\115\\001\\017\\001\\255\\255\\019\\001\\020\\001\\021\\001\\\n\\255\\255\\092\\001\\024\\001\\025\\001\\026\\001\\096\\001\\028\\001\\029\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\037\\001\\\n\\255\\255\\108\\001\\040\\001\\041\\001\\111\\001\\255\\255\\255\\255\\255\\255\\\n\\000\\000\\255\\255\\255\\255\\049\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\060\\001\\255\\255\\\n\\255\\255\\063\\001\\255\\255\\255\\255\\255\\255\\067\\001\\068\\001\\255\\255\\\n\\070\\001\\255\\255\\255\\255\\073\\001\\074\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\080\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\091\\001\\092\\001\\255\\255\\\n\\094\\001\\095\\001\\096\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\103\\001\\000\\001\\105\\001\\255\\255\\003\\001\\108\\001\\255\\255\\\n\\255\\255\\111\\001\\008\\001\\255\\255\\010\\001\\115\\001\\255\\255\\013\\001\\\n\\014\\001\\255\\255\\255\\255\\017\\001\\255\\255\\019\\001\\020\\001\\021\\001\\\n\\255\\255\\255\\255\\024\\001\\025\\001\\026\\001\\255\\255\\028\\001\\029\\001\\\n\\000\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\037\\001\\\n\\255\\255\\255\\255\\040\\001\\041\\001\\255\\255\\013\\001\\255\\255\\255\\255\\\n\\000\\000\\255\\255\\255\\255\\049\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\026\\001\\255\\255\\028\\001\\029\\001\\060\\001\\255\\255\\\n\\255\\255\\063\\001\\255\\255\\255\\255\\255\\255\\067\\001\\068\\001\\255\\255\\\n\\070\\001\\041\\001\\255\\255\\073\\001\\074\\001\\255\\255\\000\\000\\255\\255\\\n\\255\\255\\255\\255\\080\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\060\\001\\091\\001\\092\\001\\063\\001\\\n\\094\\001\\095\\001\\096\\001\\067\\001\\068\\001\\000\\001\\255\\255\\255\\255\\\n\\003\\001\\103\\001\\074\\001\\105\\001\\255\\255\\008\\001\\108\\001\\010\\001\\\n\\080\\001\\111\\001\\013\\001\\014\\001\\255\\255\\115\\001\\017\\001\\255\\255\\\n\\019\\001\\020\\001\\021\\001\\255\\255\\092\\001\\024\\001\\025\\001\\026\\001\\\n\\096\\001\\028\\001\\029\\001\\000\\001\\255\\255\\255\\255\\003\\001\\255\\255\\\n\\255\\255\\255\\255\\037\\001\\255\\255\\108\\001\\040\\001\\041\\001\\111\\001\\\n\\013\\001\\255\\255\\255\\255\\000\\000\\255\\255\\255\\255\\049\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\026\\001\\255\\255\\028\\001\\\n\\029\\001\\060\\001\\255\\255\\255\\255\\063\\001\\255\\255\\255\\255\\255\\255\\\n\\067\\001\\068\\001\\255\\255\\070\\001\\041\\001\\255\\255\\073\\001\\074\\001\\\n\\255\\255\\000\\000\\255\\255\\255\\255\\255\\255\\080\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\060\\001\\\n\\091\\001\\092\\001\\063\\001\\094\\001\\095\\001\\096\\001\\255\\255\\068\\001\\\n\\000\\001\\255\\255\\255\\255\\003\\001\\103\\001\\074\\001\\105\\001\\255\\255\\\n\\008\\001\\108\\001\\010\\001\\080\\001\\111\\001\\013\\001\\014\\001\\255\\255\\\n\\115\\001\\017\\001\\255\\255\\019\\001\\020\\001\\021\\001\\255\\255\\092\\001\\\n\\024\\001\\025\\001\\026\\001\\096\\001\\028\\001\\029\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\037\\001\\255\\255\\108\\001\\\n\\040\\001\\041\\001\\111\\001\\255\\255\\255\\255\\255\\255\\000\\000\\255\\255\\\n\\255\\255\\049\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\060\\001\\255\\255\\255\\255\\063\\001\\\n\\255\\255\\255\\255\\255\\255\\067\\001\\068\\001\\255\\255\\070\\001\\255\\255\\\n\\255\\255\\073\\001\\074\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\080\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\091\\001\\092\\001\\255\\255\\094\\001\\095\\001\\\n\\096\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\103\\001\\\n\\000\\001\\105\\001\\255\\255\\003\\001\\108\\001\\255\\255\\255\\255\\111\\001\\\n\\008\\001\\255\\255\\010\\001\\115\\001\\255\\255\\013\\001\\014\\001\\255\\255\\\n\\255\\255\\017\\001\\255\\255\\019\\001\\020\\001\\021\\001\\255\\255\\255\\255\\\n\\024\\001\\025\\001\\026\\001\\255\\255\\028\\001\\029\\001\\000\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\037\\001\\255\\255\\255\\255\\\n\\040\\001\\041\\001\\255\\255\\013\\001\\255\\255\\255\\255\\000\\000\\255\\255\\\n\\255\\255\\049\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\026\\001\\255\\255\\028\\001\\029\\001\\060\\001\\255\\255\\255\\255\\063\\001\\\n\\255\\255\\255\\255\\255\\255\\067\\001\\068\\001\\255\\255\\070\\001\\041\\001\\\n\\255\\255\\073\\001\\074\\001\\255\\255\\000\\000\\255\\255\\255\\255\\255\\255\\\n\\080\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\060\\001\\091\\001\\092\\001\\063\\001\\094\\001\\095\\001\\\n\\096\\001\\067\\001\\068\\001\\000\\001\\255\\255\\255\\255\\003\\001\\103\\001\\\n\\074\\001\\105\\001\\255\\255\\008\\001\\108\\001\\010\\001\\080\\001\\111\\001\\\n\\013\\001\\014\\001\\255\\255\\115\\001\\017\\001\\255\\255\\019\\001\\020\\001\\\n\\021\\001\\255\\255\\092\\001\\024\\001\\025\\001\\026\\001\\096\\001\\028\\001\\\n\\029\\001\\000\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\037\\001\\255\\255\\108\\001\\040\\001\\041\\001\\111\\001\\013\\001\\255\\255\\\n\\255\\255\\000\\000\\255\\255\\255\\255\\049\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\026\\001\\255\\255\\028\\001\\029\\001\\060\\001\\\n\\255\\255\\255\\255\\063\\001\\255\\255\\255\\255\\255\\255\\067\\001\\068\\001\\\n\\255\\255\\070\\001\\041\\001\\255\\255\\073\\001\\074\\001\\255\\255\\000\\000\\\n\\255\\255\\255\\255\\255\\255\\080\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\060\\001\\091\\001\\092\\001\\\n\\063\\001\\094\\001\\095\\001\\096\\001\\067\\001\\068\\001\\000\\001\\255\\255\\\n\\255\\255\\003\\001\\103\\001\\074\\001\\105\\001\\255\\255\\008\\001\\108\\001\\\n\\010\\001\\080\\001\\111\\001\\013\\001\\014\\001\\255\\255\\115\\001\\017\\001\\\n\\255\\255\\019\\001\\020\\001\\021\\001\\255\\255\\092\\001\\024\\001\\025\\001\\\n\\026\\001\\096\\001\\028\\001\\029\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\037\\001\\255\\255\\108\\001\\040\\001\\041\\001\\\n\\111\\001\\255\\255\\255\\255\\255\\255\\000\\000\\255\\255\\255\\255\\049\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\060\\001\\255\\255\\255\\255\\063\\001\\255\\255\\255\\255\\\n\\255\\255\\067\\001\\068\\001\\255\\255\\070\\001\\255\\255\\255\\255\\073\\001\\\n\\074\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\080\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\091\\001\\092\\001\\255\\255\\094\\001\\095\\001\\096\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\103\\001\\000\\001\\105\\001\\\n\\255\\255\\003\\001\\108\\001\\255\\255\\255\\255\\111\\001\\008\\001\\255\\255\\\n\\010\\001\\115\\001\\255\\255\\013\\001\\014\\001\\255\\255\\255\\255\\017\\001\\\n\\255\\255\\019\\001\\020\\001\\021\\001\\255\\255\\255\\255\\024\\001\\025\\001\\\n\\026\\001\\255\\255\\028\\001\\029\\001\\000\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\037\\001\\255\\255\\255\\255\\040\\001\\041\\001\\\n\\255\\255\\013\\001\\255\\255\\255\\255\\000\\000\\255\\255\\255\\255\\049\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\026\\001\\255\\255\\\n\\028\\001\\029\\001\\060\\001\\255\\255\\255\\255\\063\\001\\255\\255\\255\\255\\\n\\255\\255\\067\\001\\068\\001\\255\\255\\070\\001\\041\\001\\255\\255\\073\\001\\\n\\074\\001\\255\\255\\000\\000\\255\\255\\255\\255\\255\\255\\080\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\060\\001\\091\\001\\092\\001\\063\\001\\094\\001\\095\\001\\096\\001\\255\\255\\\n\\068\\001\\000\\001\\255\\255\\255\\255\\003\\001\\103\\001\\074\\001\\105\\001\\\n\\255\\255\\008\\001\\108\\001\\010\\001\\080\\001\\111\\001\\013\\001\\014\\001\\\n\\255\\255\\115\\001\\017\\001\\255\\255\\019\\001\\020\\001\\021\\001\\255\\255\\\n\\092\\001\\024\\001\\025\\001\\026\\001\\096\\001\\028\\001\\029\\001\\000\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\037\\001\\255\\255\\\n\\108\\001\\040\\001\\041\\001\\111\\001\\013\\001\\255\\255\\255\\255\\000\\000\\\n\\255\\255\\255\\255\\049\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\026\\001\\255\\255\\028\\001\\029\\001\\060\\001\\255\\255\\255\\255\\\n\\063\\001\\255\\255\\255\\255\\255\\255\\067\\001\\068\\001\\255\\255\\070\\001\\\n\\041\\001\\255\\255\\073\\001\\074\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\080\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\060\\001\\091\\001\\092\\001\\063\\001\\094\\001\\\n\\095\\001\\096\\001\\255\\255\\068\\001\\000\\001\\255\\255\\255\\255\\003\\001\\\n\\103\\001\\074\\001\\105\\001\\255\\255\\008\\001\\108\\001\\010\\001\\080\\001\\\n\\111\\001\\013\\001\\014\\001\\255\\255\\115\\001\\017\\001\\255\\255\\019\\001\\\n\\020\\001\\021\\001\\255\\255\\092\\001\\024\\001\\025\\001\\026\\001\\096\\001\\\n\\028\\001\\029\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\037\\001\\255\\255\\108\\001\\040\\001\\041\\001\\111\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\049\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\000\\000\\255\\255\\255\\255\\\n\\060\\001\\255\\255\\255\\255\\063\\001\\255\\255\\255\\255\\255\\255\\067\\001\\\n\\068\\001\\255\\255\\070\\001\\255\\255\\255\\255\\073\\001\\074\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\080\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\091\\001\\\n\\092\\001\\255\\255\\094\\001\\095\\001\\096\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\103\\001\\000\\001\\105\\001\\255\\255\\003\\001\\\n\\108\\001\\255\\255\\255\\255\\111\\001\\008\\001\\255\\255\\010\\001\\115\\001\\\n\\255\\255\\013\\001\\014\\001\\255\\255\\255\\255\\017\\001\\255\\255\\019\\001\\\n\\020\\001\\021\\001\\255\\255\\255\\255\\024\\001\\025\\001\\026\\001\\255\\255\\\n\\028\\001\\029\\001\\000\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\037\\001\\255\\255\\255\\255\\040\\001\\041\\001\\255\\255\\013\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\049\\001\\255\\255\\255\\255\\\n\\000\\000\\255\\255\\255\\255\\255\\255\\026\\001\\255\\255\\028\\001\\029\\001\\\n\\060\\001\\255\\255\\255\\255\\063\\001\\255\\255\\255\\255\\255\\255\\067\\001\\\n\\068\\001\\255\\255\\070\\001\\041\\001\\255\\255\\073\\001\\074\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\080\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\060\\001\\091\\001\\\n\\092\\001\\063\\001\\094\\001\\095\\001\\096\\001\\255\\255\\068\\001\\000\\001\\\n\\255\\255\\255\\255\\003\\001\\103\\001\\074\\001\\105\\001\\255\\255\\008\\001\\\n\\108\\001\\010\\001\\080\\001\\111\\001\\013\\001\\014\\001\\255\\255\\115\\001\\\n\\017\\001\\255\\255\\019\\001\\020\\001\\021\\001\\255\\255\\092\\001\\024\\001\\\n\\025\\001\\026\\001\\096\\001\\028\\001\\029\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\037\\001\\255\\255\\108\\001\\040\\001\\\n\\041\\001\\111\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\049\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\060\\001\\255\\255\\255\\255\\063\\001\\255\\255\\\n\\255\\255\\255\\255\\067\\001\\068\\001\\000\\000\\070\\001\\255\\255\\255\\255\\\n\\073\\001\\074\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\080\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\091\\001\\092\\001\\255\\255\\094\\001\\255\\255\\096\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\103\\001\\255\\255\\\n\\105\\001\\255\\255\\255\\255\\108\\001\\255\\255\\000\\001\\111\\001\\002\\001\\\n\\003\\001\\004\\001\\115\\001\\255\\255\\255\\255\\008\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\013\\001\\255\\255\\255\\255\\255\\255\\017\\001\\018\\001\\\n\\019\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\026\\001\\\n\\027\\001\\028\\001\\029\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\036\\001\\255\\255\\255\\255\\255\\255\\040\\001\\041\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\048\\001\\049\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\000\\000\\\n\\255\\255\\060\\001\\255\\255\\255\\255\\063\\001\\255\\255\\255\\255\\066\\001\\\n\\067\\001\\068\\001\\255\\255\\070\\001\\255\\255\\255\\255\\073\\001\\074\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\080\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\091\\001\\092\\001\\255\\255\\094\\001\\095\\001\\096\\001\\255\\255\\255\\255\\\n\\000\\001\\100\\001\\255\\255\\003\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\008\\001\\108\\001\\010\\001\\255\\255\\111\\001\\013\\001\\014\\001\\255\\255\\\n\\115\\001\\017\\001\\255\\255\\019\\001\\020\\001\\021\\001\\255\\255\\255\\255\\\n\\024\\001\\255\\255\\026\\001\\255\\255\\028\\001\\029\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\037\\001\\255\\255\\255\\255\\\n\\040\\001\\041\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\049\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\000\\000\\255\\255\\060\\001\\255\\255\\255\\255\\063\\001\\\n\\255\\255\\255\\255\\255\\255\\067\\001\\068\\001\\255\\255\\070\\001\\255\\255\\\n\\255\\255\\073\\001\\074\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\080\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\091\\001\\092\\001\\255\\255\\094\\001\\095\\001\\\n\\096\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\103\\001\\\n\\255\\255\\105\\001\\255\\255\\255\\255\\108\\001\\255\\255\\255\\255\\111\\001\\\n\\255\\255\\255\\255\\255\\255\\115\\001\\000\\001\\255\\255\\002\\001\\003\\001\\\n\\004\\001\\255\\255\\255\\255\\255\\255\\008\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\013\\001\\255\\255\\255\\255\\255\\255\\017\\001\\018\\001\\019\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\026\\001\\027\\001\\\n\\028\\001\\029\\001\\255\\255\\255\\255\\008\\001\\255\\255\\255\\255\\255\\255\\\n\\036\\001\\255\\255\\255\\255\\255\\255\\255\\255\\041\\001\\255\\255\\000\\000\\\n\\255\\255\\255\\255\\255\\255\\023\\001\\048\\001\\049\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\030\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\060\\001\\255\\255\\255\\255\\063\\001\\255\\255\\255\\255\\066\\001\\067\\001\\\n\\068\\001\\255\\255\\070\\001\\255\\255\\255\\255\\073\\001\\074\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\055\\001\\080\\001\\057\\001\\058\\001\\059\\001\\\n\\255\\255\\061\\001\\255\\255\\255\\255\\064\\001\\065\\001\\255\\255\\091\\001\\\n\\092\\001\\255\\255\\094\\001\\095\\001\\096\\001\\255\\255\\255\\255\\000\\001\\\n\\255\\255\\002\\001\\003\\001\\004\\001\\255\\255\\081\\001\\255\\255\\008\\001\\\n\\108\\001\\255\\255\\255\\255\\111\\001\\013\\001\\089\\001\\090\\001\\115\\001\\\n\\017\\001\\018\\001\\019\\001\\255\\255\\255\\255\\097\\001\\255\\255\\255\\255\\\n\\255\\255\\026\\001\\027\\001\\028\\001\\029\\001\\255\\255\\106\\001\\255\\255\\\n\\255\\255\\109\\001\\110\\001\\036\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\041\\001\\255\\255\\000\\000\\255\\255\\255\\255\\255\\255\\255\\255\\048\\001\\\n\\049\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\060\\001\\255\\255\\255\\255\\063\\001\\255\\255\\\n\\255\\255\\066\\001\\067\\001\\068\\001\\255\\255\\070\\001\\255\\255\\255\\255\\\n\\255\\255\\074\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\080\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\091\\001\\092\\001\\255\\255\\094\\001\\095\\001\\096\\001\\\n\\255\\255\\255\\255\\000\\001\\255\\255\\002\\001\\003\\001\\004\\001\\255\\255\\\n\\255\\255\\255\\255\\008\\001\\108\\001\\255\\255\\255\\255\\111\\001\\013\\001\\\n\\255\\255\\255\\255\\115\\001\\017\\001\\018\\001\\019\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\026\\001\\027\\001\\028\\001\\029\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\000\\000\\255\\255\\036\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\041\\001\\255\\255\\255\\255\\000\\000\\255\\255\\\n\\255\\255\\255\\255\\048\\001\\049\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\060\\001\\255\\255\\\n\\255\\255\\063\\001\\255\\255\\255\\255\\066\\001\\067\\001\\068\\001\\255\\255\\\n\\070\\001\\255\\255\\255\\255\\255\\255\\074\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\080\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\091\\001\\092\\001\\000\\000\\\n\\094\\001\\095\\001\\096\\001\\255\\255\\255\\255\\255\\255\\255\\255\\000\\001\\\n\\255\\255\\002\\001\\003\\001\\004\\001\\255\\255\\255\\255\\108\\001\\008\\001\\\n\\255\\255\\111\\001\\255\\255\\255\\255\\013\\001\\115\\001\\255\\255\\255\\255\\\n\\017\\001\\018\\001\\019\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\026\\001\\027\\001\\028\\001\\029\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\036\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\041\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\048\\001\\\n\\049\\001\\000\\000\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\060\\001\\255\\255\\255\\255\\063\\001\\255\\255\\\n\\255\\255\\066\\001\\067\\001\\068\\001\\255\\255\\070\\001\\255\\255\\255\\255\\\n\\255\\255\\074\\001\\255\\255\\255\\255\\255\\255\\255\\255\\055\\001\\080\\001\\\n\\057\\001\\058\\001\\059\\001\\255\\255\\061\\001\\255\\255\\255\\255\\064\\001\\\n\\065\\001\\255\\255\\091\\001\\092\\001\\255\\255\\094\\001\\095\\001\\096\\001\\\n\\255\\255\\255\\255\\000\\001\\255\\255\\002\\001\\003\\001\\004\\001\\255\\255\\\n\\081\\001\\255\\255\\008\\001\\108\\001\\255\\255\\255\\255\\111\\001\\013\\001\\\n\\089\\001\\090\\001\\115\\001\\017\\001\\018\\001\\019\\001\\255\\255\\255\\255\\\n\\097\\001\\255\\255\\255\\255\\255\\255\\026\\001\\027\\001\\028\\001\\029\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\109\\001\\110\\001\\036\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\041\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\048\\001\\049\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\060\\001\\255\\255\\\n\\255\\255\\063\\001\\000\\000\\255\\255\\066\\001\\067\\001\\068\\001\\255\\255\\\n\\070\\001\\255\\255\\255\\255\\255\\255\\074\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\080\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\000\\001\\091\\001\\092\\001\\003\\001\\\n\\094\\001\\095\\001\\096\\001\\255\\255\\255\\255\\255\\255\\000\\001\\255\\255\\\n\\255\\255\\013\\001\\255\\255\\255\\255\\255\\255\\017\\001\\108\\001\\019\\001\\\n\\255\\255\\111\\001\\255\\255\\013\\001\\255\\255\\115\\001\\026\\001\\027\\001\\\n\\028\\001\\029\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\026\\001\\255\\255\\028\\001\\029\\001\\255\\255\\041\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\041\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\000\\001\\\n\\060\\001\\255\\255\\255\\255\\063\\001\\255\\255\\255\\255\\255\\255\\067\\001\\\n\\068\\001\\255\\255\\060\\001\\255\\255\\013\\001\\073\\001\\074\\001\\255\\255\\\n\\017\\001\\067\\001\\068\\001\\255\\255\\080\\001\\255\\255\\255\\255\\255\\255\\\n\\074\\001\\026\\001\\027\\001\\028\\001\\029\\001\\000\\000\\080\\001\\255\\255\\\n\\092\\001\\255\\255\\094\\001\\255\\255\\096\\001\\255\\255\\255\\255\\255\\255\\\n\\041\\001\\255\\255\\092\\001\\255\\255\\255\\255\\255\\255\\096\\001\\255\\255\\\n\\108\\001\\255\\255\\255\\255\\111\\001\\255\\255\\255\\255\\255\\255\\115\\001\\\n\\255\\255\\000\\001\\108\\001\\060\\001\\003\\001\\111\\001\\063\\001\\255\\255\\\n\\255\\255\\066\\001\\067\\001\\068\\001\\255\\255\\255\\255\\013\\001\\255\\255\\\n\\073\\001\\074\\001\\017\\001\\255\\255\\255\\255\\255\\255\\255\\255\\080\\001\\\n\\255\\255\\255\\255\\255\\255\\026\\001\\027\\001\\028\\001\\029\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\092\\001\\255\\255\\094\\001\\255\\255\\096\\001\\\n\\255\\255\\255\\255\\041\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\108\\001\\255\\255\\255\\255\\111\\001\\255\\255\\\n\\255\\255\\255\\255\\115\\001\\255\\255\\255\\255\\060\\001\\255\\255\\255\\255\\\n\\063\\001\\255\\255\\255\\255\\255\\255\\067\\001\\068\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\073\\001\\074\\001\\055\\001\\255\\255\\057\\001\\058\\001\\\n\\059\\001\\080\\001\\061\\001\\255\\255\\000\\000\\064\\001\\065\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\092\\001\\255\\255\\094\\001\\\n\\255\\255\\096\\001\\255\\255\\255\\255\\255\\255\\255\\255\\081\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\108\\001\\089\\001\\090\\001\\\n\\111\\001\\255\\255\\000\\001\\255\\255\\115\\001\\003\\001\\097\\001\\005\\001\\\n\\006\\001\\007\\001\\008\\001\\255\\255\\255\\255\\011\\001\\012\\001\\013\\001\\\n\\255\\255\\255\\255\\109\\001\\110\\001\\255\\255\\019\\001\\255\\255\\255\\255\\\n\\255\\255\\023\\001\\255\\255\\255\\255\\026\\001\\255\\255\\028\\001\\029\\001\\\n\\030\\001\\031\\001\\032\\001\\033\\001\\034\\001\\035\\001\\036\\001\\255\\255\\\n\\255\\255\\039\\001\\040\\001\\041\\001\\255\\255\\255\\255\\000\\000\\255\\255\\\n\\255\\255\\255\\255\\048\\001\\049\\001\\050\\001\\051\\001\\052\\001\\053\\001\\\n\\054\\001\\055\\001\\056\\001\\057\\001\\058\\001\\059\\001\\060\\001\\061\\001\\\n\\255\\255\\063\\001\\064\\001\\065\\001\\255\\255\\067\\001\\068\\001\\069\\001\\\n\\070\\001\\071\\001\\072\\001\\255\\255\\074\\001\\075\\001\\255\\255\\077\\001\\\n\\078\\001\\255\\255\\080\\001\\081\\001\\255\\255\\255\\255\\084\\001\\085\\001\\\n\\255\\255\\087\\001\\088\\001\\089\\001\\090\\001\\091\\001\\092\\001\\093\\001\\\n\\255\\255\\095\\001\\096\\001\\097\\001\\255\\255\\099\\001\\255\\255\\101\\001\\\n\\102\\001\\255\\255\\104\\001\\255\\255\\106\\001\\107\\001\\108\\001\\109\\001\\\n\\110\\001\\111\\001\\112\\001\\000\\000\\114\\001\\000\\001\\255\\255\\255\\255\\\n\\255\\255\\004\\001\\255\\255\\006\\001\\255\\255\\008\\001\\255\\255\\010\\001\\\n\\255\\255\\012\\001\\255\\255\\014\\001\\015\\001\\255\\255\\017\\001\\018\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\027\\001\\028\\001\\255\\255\\030\\001\\031\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\050\\001\\\n\\051\\001\\052\\001\\053\\001\\255\\255\\055\\001\\056\\001\\255\\255\\255\\255\\\n\\059\\001\\000\\000\\255\\255\\255\\255\\255\\255\\064\\001\\065\\001\\066\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\071\\001\\255\\255\\073\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\081\\001\\255\\255\\\n\\255\\255\\084\\001\\255\\255\\255\\255\\255\\255\\255\\255\\089\\001\\255\\255\\\n\\091\\001\\092\\001\\255\\255\\094\\001\\095\\001\\255\\255\\097\\001\\255\\255\\\n\\255\\255\\255\\255\\101\\001\\255\\255\\255\\255\\104\\001\\255\\255\\106\\001\\\n\\255\\255\\255\\255\\109\\001\\110\\001\\000\\001\\255\\255\\113\\001\\255\\255\\\n\\004\\001\\255\\255\\006\\001\\000\\000\\008\\001\\255\\255\\010\\001\\255\\255\\\n\\012\\001\\255\\255\\014\\001\\015\\001\\255\\255\\017\\001\\018\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\027\\001\\\n\\255\\255\\255\\255\\030\\001\\031\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\000\\000\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\050\\001\\051\\001\\\n\\255\\255\\053\\001\\255\\255\\055\\001\\056\\001\\255\\255\\255\\255\\059\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\064\\001\\065\\001\\066\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\071\\001\\255\\255\\073\\001\\000\\001\\255\\255\\\n\\255\\255\\003\\001\\255\\255\\255\\255\\255\\255\\081\\001\\008\\001\\255\\255\\\n\\084\\001\\255\\255\\255\\255\\013\\001\\014\\001\\089\\001\\255\\255\\091\\001\\\n\\092\\001\\019\\001\\094\\001\\095\\001\\022\\001\\097\\001\\255\\255\\255\\255\\\n\\026\\001\\101\\001\\028\\001\\029\\001\\104\\001\\255\\255\\106\\001\\255\\255\\\n\\255\\255\\109\\001\\110\\001\\255\\255\\255\\255\\113\\001\\255\\255\\041\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\055\\001\\\n\\000\\000\\057\\001\\058\\001\\059\\001\\255\\255\\061\\001\\255\\255\\255\\255\\\n\\064\\001\\065\\001\\060\\001\\255\\255\\255\\255\\063\\001\\255\\255\\065\\001\\\n\\066\\001\\067\\001\\068\\001\\000\\001\\255\\255\\255\\255\\003\\001\\255\\255\\\n\\074\\001\\081\\001\\255\\255\\008\\001\\255\\255\\079\\001\\080\\001\\255\\255\\\n\\013\\001\\089\\001\\090\\001\\255\\255\\255\\255\\255\\255\\019\\001\\255\\255\\\n\\255\\255\\097\\001\\092\\001\\255\\255\\255\\255\\026\\001\\096\\001\\028\\001\\\n\\029\\001\\255\\255\\255\\255\\255\\255\\255\\255\\109\\001\\110\\001\\255\\255\\\n\\000\\000\\255\\255\\108\\001\\040\\001\\041\\001\\111\\001\\055\\001\\255\\255\\\n\\057\\001\\058\\001\\059\\001\\255\\255\\061\\001\\255\\255\\255\\255\\064\\001\\\n\\065\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\060\\001\\\n\\255\\255\\000\\001\\063\\001\\255\\255\\003\\001\\066\\001\\067\\001\\068\\001\\\n\\081\\001\\008\\001\\255\\255\\255\\255\\073\\001\\074\\001\\013\\001\\255\\255\\\n\\089\\001\\090\\001\\255\\255\\080\\001\\019\\001\\255\\255\\255\\255\\255\\255\\\n\\097\\001\\255\\255\\255\\255\\026\\001\\255\\255\\028\\001\\029\\001\\092\\001\\\n\\000\\000\\255\\255\\255\\255\\096\\001\\109\\001\\110\\001\\255\\255\\100\\001\\\n\\255\\255\\040\\001\\041\\001\\255\\255\\255\\255\\255\\255\\255\\255\\108\\001\\\n\\255\\255\\255\\255\\111\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\000\\001\\255\\255\\060\\001\\003\\001\\255\\255\\\n\\063\\001\\255\\255\\255\\255\\066\\001\\067\\001\\068\\001\\255\\255\\255\\255\\\n\\013\\001\\255\\255\\073\\001\\074\\001\\255\\255\\255\\255\\019\\001\\255\\255\\\n\\255\\255\\080\\001\\255\\255\\255\\255\\255\\255\\026\\001\\255\\255\\028\\001\\\n\\029\\001\\000\\001\\000\\000\\255\\255\\003\\001\\092\\001\\255\\255\\255\\255\\\n\\255\\255\\096\\001\\255\\255\\040\\001\\041\\001\\255\\255\\013\\001\\255\\255\\\n\\255\\255\\255\\255\\017\\001\\048\\001\\049\\001\\108\\001\\255\\255\\255\\255\\\n\\111\\001\\255\\255\\255\\255\\026\\001\\027\\001\\028\\001\\029\\001\\060\\001\\\n\\255\\255\\255\\255\\063\\001\\255\\255\\255\\255\\255\\255\\255\\255\\068\\001\\\n\\255\\255\\070\\001\\041\\001\\255\\255\\255\\255\\074\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\055\\001\\080\\001\\057\\001\\058\\001\\059\\001\\255\\255\\\n\\061\\001\\255\\255\\000\\000\\064\\001\\065\\001\\060\\001\\255\\255\\092\\001\\\n\\063\\001\\255\\255\\255\\255\\096\\001\\067\\001\\068\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\006\\001\\074\\001\\081\\001\\255\\255\\255\\255\\108\\001\\\n\\012\\001\\080\\001\\111\\001\\255\\255\\089\\001\\090\\001\\255\\255\\255\\255\\\n\\000\\001\\255\\255\\255\\255\\003\\001\\097\\001\\092\\001\\255\\255\\094\\001\\\n\\008\\001\\096\\001\\030\\001\\031\\001\\255\\255\\013\\001\\255\\255\\255\\255\\\n\\109\\001\\110\\001\\255\\255\\019\\001\\255\\255\\108\\001\\255\\255\\000\\000\\\n\\111\\001\\255\\255\\026\\001\\255\\255\\028\\001\\029\\001\\050\\001\\255\\255\\\n\\052\\001\\053\\001\\255\\255\\055\\001\\056\\001\\255\\255\\255\\255\\059\\001\\\n\\255\\255\\041\\001\\255\\255\\255\\255\\064\\001\\065\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\071\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\000\\001\\255\\255\\255\\255\\003\\001\\060\\001\\255\\255\\255\\255\\063\\001\\\n\\084\\001\\255\\255\\066\\001\\067\\001\\068\\001\\013\\001\\255\\255\\255\\255\\\n\\255\\255\\017\\001\\074\\001\\255\\255\\000\\000\\097\\001\\255\\255\\255\\255\\\n\\080\\001\\101\\001\\026\\001\\027\\001\\028\\001\\029\\001\\106\\001\\255\\255\\\n\\255\\255\\109\\001\\110\\001\\255\\255\\092\\001\\255\\255\\255\\255\\255\\255\\\n\\096\\001\\041\\001\\255\\255\\000\\000\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\108\\001\\255\\255\\255\\255\\111\\001\\\n\\000\\001\\255\\255\\255\\255\\003\\001\\060\\001\\255\\255\\255\\255\\063\\001\\\n\\255\\255\\255\\255\\255\\255\\067\\001\\068\\001\\013\\001\\255\\255\\255\\255\\\n\\255\\255\\000\\000\\074\\001\\019\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\080\\001\\255\\255\\026\\001\\255\\255\\028\\001\\029\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\092\\001\\255\\255\\094\\001\\255\\255\\\n\\096\\001\\041\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\048\\001\\255\\255\\255\\255\\255\\255\\108\\001\\255\\255\\255\\255\\111\\001\\\n\\255\\255\\255\\255\\000\\001\\255\\255\\060\\001\\003\\001\\255\\255\\063\\001\\\n\\255\\255\\255\\255\\255\\255\\067\\001\\068\\001\\255\\255\\070\\001\\013\\001\\\n\\255\\255\\000\\000\\074\\001\\255\\255\\255\\255\\019\\001\\255\\255\\255\\255\\\n\\080\\001\\255\\255\\255\\255\\255\\255\\026\\001\\255\\255\\028\\001\\029\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\092\\001\\255\\255\\255\\255\\255\\255\\\n\\096\\001\\255\\255\\255\\255\\041\\001\\255\\255\\255\\255\\255\\255\\000\\000\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\108\\001\\255\\255\\255\\255\\111\\001\\\n\\255\\255\\255\\255\\000\\001\\255\\255\\255\\255\\003\\001\\060\\001\\255\\255\\\n\\255\\255\\063\\001\\008\\001\\255\\255\\255\\255\\067\\001\\068\\001\\013\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\074\\001\\019\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\080\\001\\255\\255\\026\\001\\255\\255\\028\\001\\029\\001\\\n\\086\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\092\\001\\255\\255\\\n\\255\\255\\255\\255\\096\\001\\041\\001\\255\\255\\255\\255\\255\\255\\000\\000\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\108\\001\\000\\001\\\n\\255\\255\\111\\001\\003\\001\\255\\255\\255\\255\\255\\255\\060\\001\\255\\255\\\n\\000\\000\\063\\001\\255\\255\\255\\255\\013\\001\\067\\001\\068\\001\\255\\255\\\n\\255\\255\\255\\255\\019\\001\\255\\255\\074\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\026\\001\\080\\001\\028\\001\\029\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\092\\001\\255\\255\\\n\\041\\001\\255\\255\\096\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\000\\001\\255\\255\\108\\001\\003\\001\\\n\\255\\255\\111\\001\\255\\255\\060\\001\\255\\255\\255\\255\\063\\001\\255\\255\\\n\\000\\000\\013\\001\\067\\001\\068\\001\\255\\255\\255\\255\\255\\255\\019\\001\\\n\\255\\255\\074\\001\\255\\255\\000\\001\\255\\255\\255\\255\\026\\001\\080\\001\\\n\\028\\001\\029\\001\\255\\255\\008\\001\\255\\255\\000\\000\\255\\255\\255\\255\\\n\\013\\001\\255\\255\\255\\255\\092\\001\\255\\255\\041\\001\\000\\000\\096\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\026\\001\\255\\255\\028\\001\\\n\\029\\001\\000\\001\\255\\255\\108\\001\\003\\001\\255\\255\\111\\001\\255\\255\\\n\\060\\001\\255\\255\\255\\255\\063\\001\\041\\001\\255\\255\\013\\001\\067\\001\\\n\\068\\001\\255\\255\\255\\255\\255\\255\\019\\001\\255\\255\\074\\001\\255\\255\\\n\\255\\255\\000\\000\\255\\255\\026\\001\\080\\001\\028\\001\\029\\001\\060\\001\\\n\\255\\255\\255\\255\\063\\001\\255\\255\\255\\255\\066\\001\\067\\001\\068\\001\\\n\\092\\001\\255\\255\\041\\001\\255\\255\\096\\001\\074\\001\\000\\000\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\080\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\108\\001\\000\\001\\255\\255\\111\\001\\003\\001\\060\\001\\255\\255\\092\\001\\\n\\063\\001\\255\\255\\255\\255\\096\\001\\067\\001\\068\\001\\013\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\074\\001\\019\\001\\255\\255\\255\\255\\108\\001\\\n\\255\\255\\080\\001\\111\\001\\026\\001\\255\\255\\028\\001\\029\\001\\000\\001\\\n\\255\\255\\255\\255\\003\\001\\255\\255\\255\\255\\092\\001\\255\\255\\255\\255\\\n\\255\\255\\096\\001\\041\\001\\255\\255\\013\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\019\\001\\255\\255\\255\\255\\108\\001\\255\\255\\255\\255\\\n\\111\\001\\026\\001\\255\\255\\028\\001\\029\\001\\060\\001\\255\\255\\255\\255\\\n\\063\\001\\255\\255\\255\\255\\255\\255\\067\\001\\068\\001\\255\\255\\255\\255\\\n\\041\\001\\255\\255\\255\\255\\074\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\080\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\000\\001\\\n\\255\\255\\255\\255\\003\\001\\060\\001\\255\\255\\092\\001\\063\\001\\255\\255\\\n\\255\\255\\096\\001\\067\\001\\068\\001\\013\\001\\255\\255\\255\\255\\255\\255\\\n\\000\\001\\074\\001\\019\\001\\255\\255\\255\\255\\108\\001\\255\\255\\080\\001\\\n\\111\\001\\026\\001\\255\\255\\028\\001\\029\\001\\013\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\092\\001\\255\\255\\255\\255\\255\\255\\096\\001\\\n\\041\\001\\255\\255\\026\\001\\255\\255\\028\\001\\029\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\108\\001\\255\\255\\255\\255\\111\\001\\255\\255\\\n\\255\\255\\041\\001\\255\\255\\060\\001\\255\\255\\255\\255\\063\\001\\255\\255\\\n\\255\\255\\255\\255\\067\\001\\068\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\000\\001\\074\\001\\255\\255\\255\\255\\060\\001\\255\\255\\255\\255\\080\\001\\\n\\255\\255\\255\\255\\066\\001\\067\\001\\068\\001\\013\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\074\\001\\092\\001\\255\\255\\000\\001\\255\\255\\096\\001\\\n\\080\\001\\255\\255\\026\\001\\255\\255\\028\\001\\029\\001\\000\\001\\255\\255\\\n\\255\\255\\255\\255\\013\\001\\108\\001\\092\\001\\255\\255\\111\\001\\255\\255\\\n\\096\\001\\041\\001\\255\\255\\013\\001\\255\\255\\255\\255\\255\\255\\026\\001\\\n\\255\\255\\028\\001\\029\\001\\255\\255\\108\\001\\255\\255\\255\\255\\111\\001\\\n\\026\\001\\255\\255\\028\\001\\029\\001\\060\\001\\255\\255\\041\\001\\063\\001\\\n\\255\\255\\000\\001\\255\\255\\255\\255\\068\\001\\255\\255\\255\\255\\041\\001\\\n\\255\\255\\255\\255\\074\\001\\255\\255\\255\\255\\255\\255\\013\\001\\255\\255\\\n\\080\\001\\060\\001\\255\\255\\255\\255\\063\\001\\255\\255\\000\\001\\255\\255\\\n\\255\\255\\068\\001\\060\\001\\026\\001\\092\\001\\028\\001\\029\\001\\074\\001\\\n\\096\\001\\255\\255\\068\\001\\013\\001\\255\\255\\080\\001\\255\\255\\255\\255\\\n\\074\\001\\255\\255\\041\\001\\255\\255\\108\\001\\255\\255\\080\\001\\111\\001\\\n\\026\\001\\092\\001\\028\\001\\029\\001\\255\\255\\096\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\092\\001\\255\\255\\255\\255\\060\\001\\096\\001\\041\\001\\\n\\255\\255\\108\\001\\255\\255\\255\\255\\111\\001\\068\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\108\\001\\074\\001\\255\\255\\111\\001\\255\\255\\255\\255\\\n\\255\\255\\080\\001\\060\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\068\\001\\255\\255\\255\\255\\092\\001\\255\\255\\255\\255\\\n\\074\\001\\096\\001\\255\\255\\255\\255\\255\\255\\255\\255\\080\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\108\\001\\255\\255\\255\\255\\\n\\111\\001\\255\\255\\092\\001\\000\\001\\255\\255\\255\\255\\096\\001\\255\\255\\\n\\005\\001\\006\\001\\007\\001\\008\\001\\255\\255\\255\\255\\011\\001\\012\\001\\\n\\013\\001\\014\\001\\108\\001\\255\\255\\255\\255\\111\\001\\019\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\026\\001\\255\\255\\028\\001\\\n\\029\\001\\030\\001\\031\\001\\032\\001\\033\\001\\034\\001\\035\\001\\255\\255\\\n\\255\\255\\255\\255\\039\\001\\255\\255\\041\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\049\\001\\050\\001\\051\\001\\052\\001\\\n\\053\\001\\054\\001\\055\\001\\056\\001\\255\\255\\255\\255\\059\\001\\060\\001\\\n\\255\\255\\255\\255\\063\\001\\064\\001\\065\\001\\066\\001\\255\\255\\068\\001\\\n\\069\\001\\070\\001\\071\\001\\072\\001\\255\\255\\074\\001\\255\\255\\255\\255\\\n\\077\\001\\078\\001\\255\\255\\080\\001\\081\\001\\255\\255\\255\\255\\084\\001\\\n\\085\\001\\255\\255\\087\\001\\255\\255\\089\\001\\090\\001\\255\\255\\092\\001\\\n\\093\\001\\255\\255\\255\\255\\096\\001\\097\\001\\255\\255\\099\\001\\255\\255\\\n\\101\\001\\102\\001\\255\\255\\104\\001\\255\\255\\106\\001\\107\\001\\108\\001\\\n\\109\\001\\110\\001\\111\\001\\112\\001\\000\\001\\114\\001\\255\\255\\255\\255\\\n\\255\\255\\005\\001\\006\\001\\007\\001\\008\\001\\255\\255\\255\\255\\011\\001\\\n\\012\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\019\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\026\\001\\255\\255\\\n\\028\\001\\255\\255\\030\\001\\031\\001\\032\\001\\033\\001\\034\\001\\035\\001\\\n\\255\\255\\255\\255\\255\\255\\039\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\049\\001\\050\\001\\051\\001\\\n\\052\\001\\053\\001\\054\\001\\055\\001\\056\\001\\255\\255\\255\\255\\059\\001\\\n\\060\\001\\255\\255\\255\\255\\063\\001\\064\\001\\065\\001\\255\\255\\255\\255\\\n\\068\\001\\069\\001\\070\\001\\071\\001\\072\\001\\255\\255\\074\\001\\255\\255\\\n\\255\\255\\077\\001\\078\\001\\255\\255\\255\\255\\081\\001\\255\\255\\255\\255\\\n\\084\\001\\085\\001\\255\\255\\087\\001\\255\\255\\089\\001\\090\\001\\255\\255\\\n\\255\\255\\093\\001\\255\\255\\255\\255\\255\\255\\097\\001\\255\\255\\099\\001\\\n\\255\\255\\101\\001\\102\\001\\255\\255\\104\\001\\255\\255\\106\\001\\107\\001\\\n\\255\\255\\109\\001\\110\\001\\111\\001\\112\\001\\255\\255\\114\\001\\000\\001\\\n\\001\\001\\002\\001\\255\\255\\255\\255\\005\\001\\006\\001\\007\\001\\255\\255\\\n\\009\\001\\255\\255\\011\\001\\012\\001\\255\\255\\255\\255\\015\\001\\016\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\027\\001\\255\\255\\255\\255\\030\\001\\031\\001\\032\\001\\\n\\033\\001\\034\\001\\255\\255\\036\\001\\255\\255\\255\\255\\039\\001\\255\\255\\\n\\255\\255\\042\\001\\043\\001\\044\\001\\045\\001\\046\\001\\047\\001\\255\\255\\\n\\255\\255\\050\\001\\255\\255\\052\\001\\053\\001\\054\\001\\055\\001\\056\\001\\\n\\255\\255\\255\\255\\059\\001\\255\\255\\061\\001\\255\\255\\063\\001\\064\\001\\\n\\065\\001\\255\\255\\255\\255\\255\\255\\069\\001\\255\\255\\071\\001\\072\\001\\\n\\255\\255\\074\\001\\255\\255\\255\\255\\255\\255\\078\\001\\255\\255\\255\\255\\\n\\255\\255\\082\\001\\083\\001\\084\\001\\085\\001\\086\\001\\087\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\094\\001\\255\\255\\255\\255\\\n\\255\\255\\098\\001\\255\\255\\100\\001\\101\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\106\\001\\107\\001\\255\\255\\109\\001\\110\\001\\000\\001\\001\\001\\\n\\002\\001\\114\\001\\255\\255\\005\\001\\006\\001\\007\\001\\255\\255\\009\\001\\\n\\255\\255\\011\\001\\012\\001\\255\\255\\255\\255\\015\\001\\016\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\027\\001\\255\\255\\255\\255\\030\\001\\031\\001\\032\\001\\033\\001\\\n\\034\\001\\255\\255\\036\\001\\255\\255\\255\\255\\039\\001\\255\\255\\255\\255\\\n\\042\\001\\043\\001\\044\\001\\045\\001\\046\\001\\047\\001\\255\\255\\255\\255\\\n\\050\\001\\255\\255\\052\\001\\053\\001\\054\\001\\055\\001\\056\\001\\255\\255\\\n\\255\\255\\059\\001\\255\\255\\061\\001\\255\\255\\063\\001\\064\\001\\065\\001\\\n\\255\\255\\255\\255\\255\\255\\069\\001\\255\\255\\071\\001\\072\\001\\255\\255\\\n\\074\\001\\255\\255\\255\\255\\255\\255\\078\\001\\255\\255\\255\\255\\255\\255\\\n\\082\\001\\083\\001\\084\\001\\085\\001\\086\\001\\087\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\094\\001\\255\\255\\255\\255\\255\\255\\\n\\098\\001\\255\\255\\100\\001\\101\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\106\\001\\107\\001\\255\\255\\109\\001\\110\\001\\000\\001\\255\\255\\255\\255\\\n\\114\\001\\255\\255\\005\\001\\006\\001\\007\\001\\255\\255\\255\\255\\255\\255\\\n\\011\\001\\012\\001\\013\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\026\\001\\\n\\255\\255\\028\\001\\029\\001\\030\\001\\031\\001\\032\\001\\033\\001\\034\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\039\\001\\255\\255\\041\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\050\\001\\\n\\255\\255\\052\\001\\053\\001\\054\\001\\055\\001\\056\\001\\255\\255\\255\\255\\\n\\059\\001\\060\\001\\255\\255\\255\\255\\063\\001\\064\\001\\065\\001\\255\\255\\\n\\255\\255\\068\\001\\069\\001\\255\\255\\071\\001\\072\\001\\255\\255\\074\\001\\\n\\255\\255\\255\\255\\255\\255\\078\\001\\255\\255\\080\\001\\255\\255\\255\\255\\\n\\255\\255\\084\\001\\085\\001\\000\\001\\087\\001\\255\\255\\255\\255\\255\\255\\\n\\005\\001\\006\\001\\007\\001\\255\\255\\255\\255\\096\\001\\011\\001\\012\\001\\\n\\255\\255\\255\\255\\101\\001\\255\\255\\255\\255\\255\\255\\255\\255\\106\\001\\\n\\107\\001\\108\\001\\109\\001\\110\\001\\111\\001\\255\\255\\255\\255\\114\\001\\\n\\255\\255\\030\\001\\031\\001\\032\\001\\033\\001\\034\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\039\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\050\\001\\255\\255\\052\\001\\\n\\053\\001\\054\\001\\055\\001\\056\\001\\255\\255\\255\\255\\059\\001\\255\\255\\\n\\255\\255\\255\\255\\063\\001\\064\\001\\065\\001\\255\\255\\255\\255\\255\\255\\\n\\069\\001\\255\\255\\071\\001\\072\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\078\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\084\\001\\\n\\085\\001\\000\\001\\087\\001\\255\\255\\255\\255\\255\\255\\005\\001\\006\\001\\\n\\007\\001\\094\\001\\255\\255\\255\\255\\011\\001\\012\\001\\255\\255\\255\\255\\\n\\101\\001\\255\\255\\255\\255\\255\\255\\255\\255\\106\\001\\107\\001\\255\\255\\\n\\109\\001\\110\\001\\255\\255\\255\\255\\255\\255\\114\\001\\255\\255\\030\\001\\\n\\031\\001\\032\\001\\033\\001\\034\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\039\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\050\\001\\255\\255\\052\\001\\053\\001\\054\\001\\\n\\055\\001\\056\\001\\255\\255\\255\\255\\059\\001\\255\\255\\255\\255\\255\\255\\\n\\063\\001\\064\\001\\065\\001\\255\\255\\255\\255\\255\\255\\069\\001\\255\\255\\\n\\071\\001\\072\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\078\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\084\\001\\085\\001\\000\\001\\\n\\087\\001\\255\\255\\255\\255\\255\\255\\005\\001\\006\\001\\007\\001\\094\\001\\\n\\255\\255\\255\\255\\011\\001\\012\\001\\255\\255\\255\\255\\101\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\106\\001\\107\\001\\255\\255\\109\\001\\110\\001\\\n\\255\\255\\255\\255\\255\\255\\114\\001\\255\\255\\030\\001\\031\\001\\032\\001\\\n\\033\\001\\034\\001\\255\\255\\255\\255\\255\\255\\255\\255\\039\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\050\\001\\255\\255\\052\\001\\053\\001\\054\\001\\055\\001\\056\\001\\\n\\255\\255\\255\\255\\059\\001\\255\\255\\255\\255\\255\\255\\063\\001\\064\\001\\\n\\065\\001\\255\\255\\255\\255\\255\\255\\069\\001\\255\\255\\071\\001\\072\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\078\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\084\\001\\085\\001\\000\\001\\087\\001\\255\\255\\\n\\255\\255\\255\\255\\005\\001\\006\\001\\007\\001\\094\\001\\255\\255\\255\\255\\\n\\011\\001\\012\\001\\255\\255\\255\\255\\101\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\106\\001\\107\\001\\255\\255\\109\\001\\110\\001\\255\\255\\255\\255\\\n\\255\\255\\114\\001\\255\\255\\030\\001\\031\\001\\032\\001\\033\\001\\034\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\039\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\050\\001\\\n\\255\\255\\052\\001\\053\\001\\054\\001\\055\\001\\056\\001\\255\\255\\255\\255\\\n\\059\\001\\255\\255\\255\\255\\255\\255\\063\\001\\064\\001\\065\\001\\255\\255\\\n\\255\\255\\255\\255\\069\\001\\255\\255\\071\\001\\072\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\078\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\084\\001\\085\\001\\255\\255\\087\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\094\\001\\003\\001\\004\\001\\005\\001\\255\\255\\\n\\255\\255\\255\\255\\101\\001\\255\\255\\011\\001\\255\\255\\013\\001\\106\\001\\\n\\107\\001\\255\\255\\109\\001\\110\\001\\019\\001\\020\\001\\021\\001\\114\\001\\\n\\255\\255\\024\\001\\025\\001\\026\\001\\255\\255\\028\\001\\029\\001\\030\\001\\\n\\255\\255\\032\\001\\033\\001\\034\\001\\035\\001\\255\\255\\255\\255\\255\\255\\\n\\039\\001\\040\\001\\041\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\048\\001\\049\\001\\255\\255\\255\\255\\052\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\063\\001\\064\\001\\255\\255\\255\\255\\255\\255\\000\\001\\069\\001\\070\\001\\\n\\255\\255\\004\\001\\255\\255\\074\\001\\075\\001\\076\\001\\077\\001\\078\\001\\\n\\079\\001\\080\\001\\255\\255\\082\\001\\255\\255\\255\\255\\017\\001\\255\\255\\\n\\019\\001\\088\\001\\255\\255\\022\\001\\255\\255\\255\\255\\093\\001\\026\\001\\\n\\027\\001\\255\\255\\255\\255\\255\\255\\099\\001\\255\\255\\255\\255\\102\\001\\\n\\103\\001\\036\\001\\105\\001\\106\\001\\107\\001\\108\\001\\109\\001\\255\\255\\\n\\111\\001\\112\\001\\113\\001\\114\\001\\115\\001\\048\\001\\049\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\060\\001\\255\\255\\255\\255\\255\\255\\064\\001\\255\\255\\066\\001\\\n\\067\\001\\068\\001\\255\\255\\070\\001\\255\\255\\255\\255\\073\\001\\255\\255\\\n\\255\\255\\255\\255\\000\\001\\001\\001\\002\\001\\255\\255\\255\\255\\255\\255\\\n\\006\\001\\007\\001\\255\\255\\009\\001\\255\\255\\255\\255\\012\\001\\090\\001\\\n\\091\\001\\015\\001\\016\\001\\255\\255\\095\\001\\255\\255\\097\\001\\255\\255\\\n\\255\\255\\100\\001\\255\\255\\255\\255\\255\\255\\027\\001\\028\\001\\255\\255\\\n\\030\\001\\031\\001\\109\\001\\255\\255\\111\\001\\255\\255\\036\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\042\\001\\043\\001\\044\\001\\045\\001\\\n\\046\\001\\047\\001\\255\\255\\255\\255\\050\\001\\255\\255\\052\\001\\053\\001\\\n\\255\\255\\055\\001\\056\\001\\255\\255\\255\\255\\059\\001\\255\\255\\061\\001\\\n\\255\\255\\255\\255\\064\\001\\065\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\071\\001\\072\\001\\255\\255\\074\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\082\\001\\083\\001\\084\\001\\085\\001\\\n\\086\\001\\087\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\094\\001\\255\\255\\255\\255\\097\\001\\098\\001\\255\\255\\100\\001\\101\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\106\\001\\255\\255\\108\\001\\109\\001\\\n\\110\\001\\000\\001\\001\\001\\002\\001\\255\\255\\255\\255\\255\\255\\006\\001\\\n\\007\\001\\255\\255\\009\\001\\255\\255\\255\\255\\012\\001\\255\\255\\255\\255\\\n\\015\\001\\016\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\027\\001\\028\\001\\255\\255\\030\\001\\\n\\031\\001\\255\\255\\255\\255\\255\\255\\255\\255\\036\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\042\\001\\043\\001\\044\\001\\045\\001\\046\\001\\\n\\047\\001\\255\\255\\255\\255\\050\\001\\255\\255\\052\\001\\053\\001\\255\\255\\\n\\055\\001\\056\\001\\255\\255\\255\\255\\059\\001\\255\\255\\061\\001\\255\\255\\\n\\255\\255\\064\\001\\065\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\071\\001\\072\\001\\255\\255\\074\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\082\\001\\083\\001\\084\\001\\085\\001\\086\\001\\\n\\087\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\094\\001\\\n\\255\\255\\255\\255\\097\\001\\098\\001\\255\\255\\100\\001\\101\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\106\\001\\255\\255\\108\\001\\109\\001\\110\\001\\\n\\000\\001\\001\\001\\002\\001\\255\\255\\255\\255\\255\\255\\006\\001\\007\\001\\\n\\255\\255\\009\\001\\255\\255\\255\\255\\012\\001\\255\\255\\255\\255\\015\\001\\\n\\016\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\027\\001\\028\\001\\255\\255\\030\\001\\031\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\036\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\042\\001\\043\\001\\044\\001\\045\\001\\046\\001\\047\\001\\\n\\255\\255\\255\\255\\050\\001\\255\\255\\052\\001\\053\\001\\255\\255\\055\\001\\\n\\056\\001\\255\\255\\255\\255\\059\\001\\255\\255\\061\\001\\255\\255\\255\\255\\\n\\064\\001\\065\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\071\\001\\\n\\072\\001\\255\\255\\074\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\082\\001\\083\\001\\084\\001\\085\\001\\086\\001\\087\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\094\\001\\255\\255\\\n\\255\\255\\097\\001\\098\\001\\255\\255\\100\\001\\101\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\106\\001\\255\\255\\108\\001\\109\\001\\110\\001\\000\\001\\\n\\001\\001\\002\\001\\255\\255\\255\\255\\255\\255\\006\\001\\007\\001\\255\\255\\\n\\009\\001\\255\\255\\255\\255\\012\\001\\255\\255\\255\\255\\015\\001\\016\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\027\\001\\028\\001\\255\\255\\030\\001\\031\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\036\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\042\\001\\043\\001\\044\\001\\045\\001\\046\\001\\047\\001\\255\\255\\\n\\255\\255\\050\\001\\255\\255\\052\\001\\053\\001\\255\\255\\055\\001\\056\\001\\\n\\255\\255\\255\\255\\059\\001\\255\\255\\061\\001\\255\\255\\255\\255\\064\\001\\\n\\065\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\071\\001\\072\\001\\\n\\255\\255\\074\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\082\\001\\083\\001\\084\\001\\085\\001\\086\\001\\087\\001\\255\\255\\\n\\255\\255\\000\\001\\255\\255\\255\\255\\255\\255\\094\\001\\255\\255\\006\\001\\\n\\097\\001\\098\\001\\255\\255\\100\\001\\101\\001\\012\\001\\255\\255\\255\\255\\\n\\015\\001\\106\\001\\255\\255\\255\\255\\109\\001\\110\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\028\\001\\255\\255\\030\\001\\\n\\031\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\050\\001\\255\\255\\052\\001\\053\\001\\255\\255\\\n\\055\\001\\056\\001\\255\\255\\255\\255\\059\\001\\255\\255\\000\\001\\255\\255\\\n\\255\\255\\064\\001\\065\\001\\255\\255\\006\\001\\255\\255\\255\\255\\255\\255\\\n\\071\\001\\255\\255\\012\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\084\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\028\\001\\255\\255\\030\\001\\031\\001\\255\\255\\094\\001\\\n\\255\\255\\255\\255\\097\\001\\255\\255\\255\\255\\255\\255\\101\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\106\\001\\255\\255\\255\\255\\109\\001\\110\\001\\\n\\050\\001\\255\\255\\052\\001\\053\\001\\255\\255\\055\\001\\056\\001\\255\\255\\\n\\255\\255\\059\\001\\255\\255\\000\\001\\255\\255\\255\\255\\064\\001\\065\\001\\\n\\255\\255\\006\\001\\255\\255\\255\\255\\255\\255\\071\\001\\255\\255\\012\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\084\\001\\255\\255\\255\\255\\255\\255\\255\\255\\028\\001\\\n\\255\\255\\030\\001\\031\\001\\255\\255\\255\\255\\255\\255\\255\\255\\097\\001\\\n\\255\\255\\255\\255\\255\\255\\101\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\106\\001\\255\\255\\255\\255\\109\\001\\110\\001\\050\\001\\255\\255\\052\\001\\\n\\053\\001\\255\\255\\055\\001\\056\\001\\255\\255\\255\\255\\059\\001\\255\\255\\\n\\000\\001\\255\\255\\255\\255\\064\\001\\065\\001\\255\\255\\006\\001\\255\\255\\\n\\255\\255\\255\\255\\071\\001\\255\\255\\012\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\084\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\028\\001\\255\\255\\030\\001\\031\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\097\\001\\255\\255\\255\\255\\255\\255\\\n\\101\\001\\255\\255\\255\\255\\255\\255\\255\\255\\106\\001\\255\\255\\255\\255\\\n\\109\\001\\110\\001\\050\\001\\255\\255\\052\\001\\053\\001\\255\\255\\055\\001\\\n\\056\\001\\255\\255\\255\\255\\059\\001\\255\\255\\000\\001\\255\\255\\255\\255\\\n\\064\\001\\065\\001\\255\\255\\006\\001\\255\\255\\255\\255\\255\\255\\071\\001\\\n\\255\\255\\012\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\084\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\028\\001\\255\\255\\030\\001\\031\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\097\\001\\255\\255\\255\\255\\255\\255\\101\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\106\\001\\255\\255\\255\\255\\109\\001\\110\\001\\050\\001\\\n\\255\\255\\052\\001\\053\\001\\255\\255\\055\\001\\056\\001\\255\\255\\255\\255\\\n\\059\\001\\255\\255\\000\\001\\255\\255\\255\\255\\064\\001\\065\\001\\255\\255\\\n\\006\\001\\255\\255\\255\\255\\255\\255\\071\\001\\255\\255\\012\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\084\\001\\255\\255\\255\\255\\255\\255\\255\\255\\028\\001\\255\\255\\\n\\030\\001\\031\\001\\255\\255\\255\\255\\255\\255\\255\\255\\097\\001\\255\\255\\\n\\255\\255\\255\\255\\101\\001\\255\\255\\255\\255\\255\\255\\255\\255\\106\\001\\\n\\255\\255\\255\\255\\109\\001\\110\\001\\050\\001\\255\\255\\052\\001\\053\\001\\\n\\255\\255\\055\\001\\056\\001\\255\\255\\255\\255\\059\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\064\\001\\065\\001\\005\\001\\006\\001\\007\\001\\255\\255\\\n\\255\\255\\071\\001\\011\\001\\012\\001\\013\\001\\014\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\084\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\028\\001\\029\\001\\030\\001\\031\\001\\032\\001\\\n\\033\\001\\034\\001\\255\\255\\097\\001\\255\\255\\255\\255\\039\\001\\101\\001\\\n\\041\\001\\255\\255\\255\\255\\255\\255\\106\\001\\255\\255\\255\\255\\109\\001\\\n\\110\\001\\050\\001\\255\\255\\052\\001\\053\\001\\054\\001\\055\\001\\056\\001\\\n\\255\\255\\255\\255\\059\\001\\060\\001\\255\\255\\255\\255\\063\\001\\064\\001\\\n\\065\\001\\255\\255\\255\\255\\068\\001\\069\\001\\255\\255\\071\\001\\072\\001\\\n\\255\\255\\074\\001\\255\\255\\255\\255\\255\\255\\078\\001\\255\\255\\080\\001\\\n\\255\\255\\255\\255\\255\\255\\084\\001\\085\\001\\255\\255\\087\\001\\255\\255\\\n\\089\\001\\255\\255\\255\\255\\005\\001\\006\\001\\007\\001\\255\\255\\096\\001\\\n\\255\\255\\011\\001\\012\\001\\013\\001\\101\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\106\\001\\107\\001\\108\\001\\109\\001\\110\\001\\111\\001\\255\\255\\\n\\255\\255\\114\\001\\028\\001\\029\\001\\030\\001\\031\\001\\032\\001\\033\\001\\\n\\034\\001\\255\\255\\255\\255\\255\\255\\255\\255\\039\\001\\255\\255\\041\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\050\\001\\255\\255\\052\\001\\053\\001\\054\\001\\055\\001\\056\\001\\255\\255\\\n\\255\\255\\059\\001\\060\\001\\255\\255\\255\\255\\063\\001\\064\\001\\065\\001\\\n\\255\\255\\255\\255\\068\\001\\069\\001\\255\\255\\071\\001\\072\\001\\255\\255\\\n\\074\\001\\255\\255\\255\\255\\255\\255\\078\\001\\255\\255\\080\\001\\255\\255\\\n\\255\\255\\255\\255\\084\\001\\085\\001\\255\\255\\087\\001\\255\\255\\255\\255\\\n\\255\\255\\005\\001\\006\\001\\007\\001\\255\\255\\255\\255\\096\\001\\011\\001\\\n\\012\\001\\255\\255\\255\\255\\101\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\106\\001\\107\\001\\108\\001\\109\\001\\110\\001\\111\\001\\255\\255\\255\\255\\\n\\114\\001\\255\\255\\030\\001\\031\\001\\032\\001\\033\\001\\034\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\039\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\050\\001\\255\\255\\\n\\052\\001\\053\\001\\054\\001\\055\\001\\056\\001\\255\\255\\255\\255\\059\\001\\\n\\255\\255\\255\\255\\255\\255\\063\\001\\064\\001\\065\\001\\255\\255\\255\\255\\\n\\255\\255\\069\\001\\255\\255\\071\\001\\072\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\078\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\084\\001\\085\\001\\255\\255\\087\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\092\\001\\005\\001\\006\\001\\007\\001\\255\\255\\255\\255\\010\\001\\011\\001\\\n\\012\\001\\101\\001\\255\\255\\255\\255\\255\\255\\255\\255\\106\\001\\107\\001\\\n\\255\\255\\109\\001\\110\\001\\255\\255\\255\\255\\255\\255\\114\\001\\255\\255\\\n\\255\\255\\255\\255\\030\\001\\031\\001\\032\\001\\033\\001\\034\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\039\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\050\\001\\255\\255\\\n\\052\\001\\053\\001\\054\\001\\055\\001\\056\\001\\255\\255\\255\\255\\059\\001\\\n\\255\\255\\255\\255\\255\\255\\063\\001\\064\\001\\065\\001\\255\\255\\255\\255\\\n\\255\\255\\069\\001\\255\\255\\071\\001\\072\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\078\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\084\\001\\085\\001\\255\\255\\087\\001\\255\\255\\255\\255\\005\\001\\006\\001\\\n\\007\\001\\255\\255\\255\\255\\255\\255\\011\\001\\012\\001\\255\\255\\255\\255\\\n\\255\\255\\101\\001\\255\\255\\255\\255\\255\\255\\255\\255\\106\\001\\107\\001\\\n\\255\\255\\109\\001\\110\\001\\026\\001\\255\\255\\255\\255\\114\\001\\030\\001\\\n\\031\\001\\032\\001\\033\\001\\034\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\039\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\050\\001\\255\\255\\052\\001\\053\\001\\054\\001\\\n\\055\\001\\056\\001\\255\\255\\255\\255\\059\\001\\255\\255\\255\\255\\255\\255\\\n\\063\\001\\064\\001\\065\\001\\255\\255\\255\\255\\255\\255\\069\\001\\255\\255\\\n\\071\\001\\072\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\078\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\084\\001\\085\\001\\255\\255\\\n\\087\\001\\255\\255\\255\\255\\005\\001\\006\\001\\007\\001\\255\\255\\255\\255\\\n\\255\\255\\011\\001\\012\\001\\255\\255\\255\\255\\255\\255\\101\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\106\\001\\107\\001\\255\\255\\109\\001\\110\\001\\\n\\255\\255\\255\\255\\255\\255\\114\\001\\030\\001\\031\\001\\032\\001\\033\\001\\\n\\034\\001\\255\\255\\255\\255\\255\\255\\255\\255\\039\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\050\\001\\255\\255\\052\\001\\053\\001\\054\\001\\055\\001\\056\\001\\255\\255\\\n\\255\\255\\059\\001\\255\\255\\255\\255\\255\\255\\063\\001\\064\\001\\065\\001\\\n\\255\\255\\255\\255\\255\\255\\069\\001\\255\\255\\071\\001\\072\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\078\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\083\\001\\084\\001\\085\\001\\255\\255\\087\\001\\255\\255\\255\\255\\\n\\005\\001\\006\\001\\007\\001\\255\\255\\255\\255\\255\\255\\011\\001\\012\\001\\\n\\255\\255\\255\\255\\255\\255\\101\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\106\\001\\107\\001\\255\\255\\109\\001\\110\\001\\255\\255\\255\\255\\255\\255\\\n\\114\\001\\030\\001\\031\\001\\032\\001\\033\\001\\034\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\039\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\050\\001\\255\\255\\052\\001\\\n\\053\\001\\054\\001\\055\\001\\056\\001\\255\\255\\255\\255\\059\\001\\255\\255\\\n\\255\\255\\255\\255\\063\\001\\064\\001\\065\\001\\255\\255\\255\\255\\255\\255\\\n\\069\\001\\255\\255\\071\\001\\072\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\078\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\084\\001\\\n\\085\\001\\255\\255\\087\\001\\255\\255\\255\\255\\255\\255\\255\\255\\092\\001\\\n\\005\\001\\006\\001\\007\\001\\255\\255\\255\\255\\010\\001\\011\\001\\012\\001\\\n\\101\\001\\255\\255\\255\\255\\255\\255\\255\\255\\106\\001\\107\\001\\255\\255\\\n\\109\\001\\110\\001\\255\\255\\255\\255\\255\\255\\114\\001\\255\\255\\255\\255\\\n\\255\\255\\030\\001\\031\\001\\032\\001\\033\\001\\034\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\039\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\050\\001\\255\\255\\052\\001\\\n\\053\\001\\054\\001\\055\\001\\056\\001\\255\\255\\255\\255\\059\\001\\255\\255\\\n\\255\\255\\255\\255\\063\\001\\064\\001\\065\\001\\255\\255\\255\\255\\255\\255\\\n\\069\\001\\255\\255\\071\\001\\072\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\078\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\084\\001\\\n\\085\\001\\255\\255\\087\\001\\255\\255\\255\\255\\255\\255\\005\\001\\006\\001\\\n\\007\\001\\255\\255\\255\\255\\255\\255\\011\\001\\012\\001\\255\\255\\255\\255\\\n\\101\\001\\255\\255\\255\\255\\255\\255\\255\\255\\106\\001\\107\\001\\022\\001\\\n\\109\\001\\110\\001\\255\\255\\255\\255\\255\\255\\114\\001\\255\\255\\030\\001\\\n\\031\\001\\032\\001\\033\\001\\034\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\039\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\050\\001\\255\\255\\052\\001\\053\\001\\054\\001\\\n\\055\\001\\056\\001\\255\\255\\255\\255\\059\\001\\255\\255\\255\\255\\255\\255\\\n\\063\\001\\064\\001\\065\\001\\255\\255\\255\\255\\255\\255\\069\\001\\255\\255\\\n\\071\\001\\072\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\078\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\084\\001\\085\\001\\255\\255\\\n\\087\\001\\255\\255\\255\\255\\005\\001\\006\\001\\007\\001\\255\\255\\255\\255\\\n\\255\\255\\011\\001\\012\\001\\255\\255\\255\\255\\255\\255\\101\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\106\\001\\107\\001\\255\\255\\109\\001\\110\\001\\\n\\026\\001\\255\\255\\255\\255\\114\\001\\030\\001\\031\\001\\032\\001\\033\\001\\\n\\034\\001\\255\\255\\255\\255\\255\\255\\255\\255\\039\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\050\\001\\255\\255\\052\\001\\053\\001\\054\\001\\055\\001\\056\\001\\255\\255\\\n\\255\\255\\059\\001\\255\\255\\255\\255\\255\\255\\063\\001\\064\\001\\065\\001\\\n\\255\\255\\255\\255\\255\\255\\069\\001\\255\\255\\071\\001\\072\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\078\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\084\\001\\085\\001\\255\\255\\087\\001\\255\\255\\255\\255\\\n\\005\\001\\006\\001\\007\\001\\255\\255\\255\\255\\255\\255\\011\\001\\012\\001\\\n\\255\\255\\255\\255\\255\\255\\101\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\106\\001\\107\\001\\255\\255\\109\\001\\110\\001\\255\\255\\255\\255\\255\\255\\\n\\114\\001\\030\\001\\031\\001\\032\\001\\033\\001\\034\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\039\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\050\\001\\255\\255\\052\\001\\\n\\053\\001\\054\\001\\055\\001\\056\\001\\255\\255\\255\\255\\059\\001\\255\\255\\\n\\255\\255\\255\\255\\063\\001\\064\\001\\065\\001\\255\\255\\255\\255\\255\\255\\\n\\069\\001\\255\\255\\071\\001\\072\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\078\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\084\\001\\\n\\085\\001\\255\\255\\087\\001\\255\\255\\255\\255\\005\\001\\006\\001\\007\\001\\\n\\255\\255\\255\\255\\255\\255\\011\\001\\012\\001\\255\\255\\255\\255\\255\\255\\\n\\101\\001\\255\\255\\255\\255\\255\\255\\255\\255\\106\\001\\107\\001\\255\\255\\\n\\109\\001\\110\\001\\255\\255\\255\\255\\255\\255\\114\\001\\030\\001\\031\\001\\\n\\032\\001\\033\\001\\034\\001\\255\\255\\255\\255\\255\\255\\255\\255\\039\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\050\\001\\255\\255\\052\\001\\053\\001\\054\\001\\055\\001\\\n\\056\\001\\255\\255\\255\\255\\059\\001\\255\\255\\255\\255\\255\\255\\063\\001\\\n\\064\\001\\065\\001\\255\\255\\255\\255\\255\\255\\069\\001\\255\\255\\071\\001\\\n\\072\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\078\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\084\\001\\085\\001\\255\\255\\087\\001\\\n\\255\\255\\255\\255\\005\\001\\006\\001\\007\\001\\255\\255\\255\\255\\255\\255\\\n\\011\\001\\012\\001\\255\\255\\255\\255\\255\\255\\101\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\106\\001\\107\\001\\255\\255\\109\\001\\110\\001\\255\\255\\\n\\255\\255\\255\\255\\114\\001\\030\\001\\031\\001\\032\\001\\033\\001\\034\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\039\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\050\\001\\\n\\255\\255\\052\\001\\053\\001\\054\\001\\055\\001\\056\\001\\255\\255\\255\\255\\\n\\059\\001\\255\\255\\255\\255\\255\\255\\063\\001\\064\\001\\065\\001\\255\\255\\\n\\255\\255\\006\\001\\069\\001\\255\\255\\071\\001\\072\\001\\255\\255\\012\\001\\\n\\255\\255\\014\\001\\255\\255\\078\\001\\017\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\084\\001\\085\\001\\255\\255\\087\\001\\255\\255\\027\\001\\255\\255\\\n\\255\\255\\030\\001\\031\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\101\\001\\255\\255\\255\\255\\255\\255\\255\\255\\106\\001\\\n\\107\\001\\255\\255\\109\\001\\110\\001\\255\\255\\050\\001\\051\\001\\114\\001\\\n\\053\\001\\255\\255\\055\\001\\056\\001\\255\\255\\255\\255\\059\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\064\\001\\065\\001\\255\\255\\006\\001\\255\\255\\\n\\255\\255\\255\\255\\071\\001\\255\\255\\012\\001\\255\\255\\014\\001\\255\\255\\\n\\255\\255\\017\\001\\255\\255\\255\\255\\081\\001\\255\\255\\255\\255\\084\\001\\\n\\255\\255\\255\\255\\255\\255\\027\\001\\089\\001\\255\\255\\030\\001\\031\\001\\\n\\255\\255\\006\\001\\255\\255\\255\\255\\097\\001\\255\\255\\255\\255\\012\\001\\\n\\101\\001\\014\\001\\255\\255\\104\\001\\255\\255\\106\\001\\255\\255\\255\\255\\\n\\109\\001\\110\\001\\050\\001\\051\\001\\255\\255\\053\\001\\255\\255\\055\\001\\\n\\056\\001\\030\\001\\031\\001\\059\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\064\\001\\065\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\071\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\050\\001\\051\\001\\255\\255\\\n\\053\\001\\081\\001\\055\\001\\056\\001\\084\\001\\255\\255\\059\\001\\255\\255\\\n\\255\\255\\089\\001\\255\\255\\064\\001\\065\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\097\\001\\071\\001\\255\\255\\073\\001\\101\\001\\255\\255\\255\\255\\\n\\104\\001\\255\\255\\106\\001\\255\\255\\081\\001\\109\\001\\110\\001\\084\\001\\\n\\255\\255\\255\\255\\006\\001\\255\\255\\089\\001\\255\\255\\255\\255\\255\\255\\\n\\012\\001\\255\\255\\014\\001\\255\\255\\097\\001\\255\\255\\255\\255\\255\\255\\\n\\101\\001\\255\\255\\255\\255\\104\\001\\255\\255\\106\\001\\255\\255\\027\\001\\\n\\109\\001\\110\\001\\030\\001\\031\\001\\255\\255\\006\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\012\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\050\\001\\051\\001\\\n\\255\\255\\053\\001\\255\\255\\055\\001\\056\\001\\030\\001\\031\\001\\059\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\064\\001\\065\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\071\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\050\\001\\051\\001\\255\\255\\053\\001\\081\\001\\055\\001\\056\\001\\\n\\084\\001\\255\\255\\059\\001\\255\\255\\255\\255\\089\\001\\255\\255\\064\\001\\\n\\065\\001\\255\\255\\006\\001\\255\\255\\255\\255\\097\\001\\071\\001\\255\\255\\\n\\012\\001\\101\\001\\255\\255\\255\\255\\104\\001\\255\\255\\106\\001\\255\\255\\\n\\081\\001\\109\\001\\110\\001\\084\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\089\\001\\255\\255\\030\\001\\031\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\097\\001\\255\\255\\255\\255\\255\\255\\101\\001\\255\\255\\255\\255\\104\\001\\\n\\255\\255\\106\\001\\255\\255\\255\\255\\109\\001\\110\\001\\050\\001\\051\\001\\\n\\255\\255\\053\\001\\255\\255\\055\\001\\056\\001\\255\\255\\255\\255\\059\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\064\\001\\065\\001\\255\\255\\255\\255\\\n\\006\\001\\255\\255\\255\\255\\071\\001\\255\\255\\255\\255\\012\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\081\\001\\255\\255\\255\\255\\\n\\084\\001\\255\\255\\255\\255\\255\\255\\255\\255\\089\\001\\028\\001\\255\\255\\\n\\030\\001\\031\\001\\255\\255\\255\\255\\255\\255\\097\\001\\255\\255\\255\\255\\\n\\255\\255\\101\\001\\255\\255\\255\\255\\104\\001\\255\\255\\106\\001\\255\\255\\\n\\255\\255\\109\\001\\110\\001\\255\\255\\050\\001\\255\\255\\052\\001\\053\\001\\\n\\255\\255\\055\\001\\056\\001\\255\\255\\255\\255\\059\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\064\\001\\065\\001\\255\\255\\255\\255\\255\\255\\006\\001\\\n\\255\\255\\071\\001\\255\\255\\010\\001\\255\\255\\012\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\084\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\028\\001\\092\\001\\030\\001\\\n\\031\\001\\255\\255\\255\\255\\097\\001\\255\\255\\255\\255\\255\\255\\101\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\106\\001\\255\\255\\255\\255\\109\\001\\\n\\110\\001\\255\\255\\255\\255\\050\\001\\255\\255\\052\\001\\053\\001\\255\\255\\\n\\055\\001\\056\\001\\255\\255\\255\\255\\059\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\064\\001\\065\\001\\255\\255\\006\\001\\255\\255\\255\\255\\255\\255\\\n\\071\\001\\255\\255\\012\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\006\\001\\007\\001\\255\\255\\255\\255\\084\\001\\011\\001\\012\\001\\\n\\255\\255\\255\\255\\028\\001\\255\\255\\030\\001\\031\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\097\\001\\255\\255\\255\\255\\255\\255\\101\\001\\255\\255\\\n\\255\\255\\030\\001\\031\\001\\106\\001\\255\\255\\255\\255\\109\\001\\110\\001\\\n\\050\\001\\255\\255\\052\\001\\053\\001\\255\\255\\055\\001\\056\\001\\255\\255\\\n\\255\\255\\059\\001\\255\\255\\255\\255\\255\\255\\050\\001\\064\\001\\065\\001\\\n\\053\\001\\054\\001\\055\\001\\056\\001\\255\\255\\071\\001\\059\\001\\255\\255\\\n\\006\\001\\255\\255\\008\\001\\064\\001\\065\\001\\255\\255\\012\\001\\255\\255\\\n\\255\\255\\255\\255\\084\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\092\\001\\255\\255\\255\\255\\255\\255\\028\\001\\097\\001\\\n\\030\\001\\031\\001\\087\\001\\101\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\106\\001\\255\\255\\255\\255\\109\\001\\110\\001\\255\\255\\255\\255\\255\\255\\\n\\101\\001\\255\\255\\255\\255\\255\\255\\050\\001\\106\\001\\052\\001\\053\\001\\\n\\109\\001\\055\\001\\056\\001\\255\\255\\255\\255\\059\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\064\\001\\065\\001\\255\\255\\006\\001\\255\\255\\255\\255\\\n\\255\\255\\071\\001\\255\\255\\012\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\084\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\028\\001\\255\\255\\030\\001\\031\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\097\\001\\255\\255\\255\\255\\255\\255\\101\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\106\\001\\255\\255\\255\\255\\109\\001\\\n\\110\\001\\050\\001\\255\\255\\052\\001\\053\\001\\255\\255\\055\\001\\056\\001\\\n\\255\\255\\255\\255\\059\\001\\255\\255\\255\\255\\255\\255\\255\\255\\064\\001\\\n\\065\\001\\255\\255\\006\\001\\255\\255\\255\\255\\255\\255\\071\\001\\255\\255\\\n\\012\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\084\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\028\\001\\255\\255\\030\\001\\031\\001\\255\\255\\006\\001\\255\\255\\255\\255\\\n\\097\\001\\255\\255\\255\\255\\012\\001\\101\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\106\\001\\255\\255\\255\\255\\109\\001\\110\\001\\050\\001\\255\\255\\\n\\052\\001\\053\\001\\255\\255\\055\\001\\056\\001\\030\\001\\031\\001\\059\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\064\\001\\065\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\071\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\050\\001\\255\\255\\052\\001\\053\\001\\255\\255\\055\\001\\056\\001\\\n\\084\\001\\255\\255\\059\\001\\255\\255\\255\\255\\255\\255\\255\\255\\064\\001\\\n\\065\\001\\255\\255\\006\\001\\255\\255\\255\\255\\097\\001\\071\\001\\255\\255\\\n\\012\\001\\101\\001\\255\\255\\255\\255\\255\\255\\255\\255\\106\\001\\255\\255\\\n\\255\\255\\109\\001\\110\\001\\084\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\028\\001\\255\\255\\030\\001\\031\\001\\093\\001\\006\\001\\255\\255\\255\\255\\\n\\097\\001\\255\\255\\255\\255\\012\\001\\101\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\106\\001\\255\\255\\255\\255\\109\\001\\110\\001\\050\\001\\255\\255\\\n\\052\\001\\053\\001\\255\\255\\055\\001\\056\\001\\030\\001\\031\\001\\059\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\064\\001\\065\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\071\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\050\\001\\255\\255\\052\\001\\053\\001\\255\\255\\055\\001\\056\\001\\\n\\084\\001\\255\\255\\059\\001\\255\\255\\255\\255\\255\\255\\255\\255\\064\\001\\\n\\065\\001\\255\\255\\006\\001\\255\\255\\255\\255\\097\\001\\071\\001\\255\\255\\\n\\012\\001\\101\\001\\255\\255\\255\\255\\255\\255\\255\\255\\106\\001\\255\\255\\\n\\255\\255\\109\\001\\110\\001\\084\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\030\\001\\031\\001\\255\\255\\006\\001\\255\\255\\255\\255\\\n\\097\\001\\255\\255\\255\\255\\012\\001\\101\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\106\\001\\255\\255\\255\\255\\109\\001\\110\\001\\050\\001\\255\\255\\\n\\052\\001\\053\\001\\255\\255\\055\\001\\056\\001\\030\\001\\031\\001\\059\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\064\\001\\065\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\071\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\050\\001\\255\\255\\255\\255\\053\\001\\255\\255\\055\\001\\056\\001\\\n\\084\\001\\255\\255\\059\\001\\255\\255\\255\\255\\255\\255\\255\\255\\064\\001\\\n\\065\\001\\255\\255\\006\\001\\255\\255\\255\\255\\097\\001\\071\\001\\255\\255\\\n\\012\\001\\101\\001\\255\\255\\255\\255\\255\\255\\255\\255\\106\\001\\255\\255\\\n\\255\\255\\109\\001\\110\\001\\084\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\030\\001\\031\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\097\\001\\255\\255\\255\\255\\255\\255\\101\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\106\\001\\255\\255\\255\\255\\109\\001\\110\\001\\050\\001\\255\\255\\\n\\255\\255\\053\\001\\255\\255\\055\\001\\056\\001\\255\\255\\255\\255\\059\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\064\\001\\065\\001\\255\\255\\006\\001\\\n\\007\\001\\255\\255\\255\\255\\071\\001\\011\\001\\012\\001\\006\\001\\007\\001\\\n\\255\\255\\255\\255\\255\\255\\011\\001\\012\\001\\255\\255\\255\\255\\022\\001\\\n\\084\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\030\\001\\\n\\031\\001\\255\\255\\255\\255\\255\\255\\255\\255\\097\\001\\030\\001\\031\\001\\\n\\255\\255\\101\\001\\255\\255\\255\\255\\255\\255\\255\\255\\106\\001\\255\\255\\\n\\047\\001\\109\\001\\110\\001\\050\\001\\051\\001\\255\\255\\053\\001\\054\\001\\\n\\055\\001\\056\\001\\050\\001\\051\\001\\059\\001\\053\\001\\054\\001\\055\\001\\\n\\056\\001\\064\\001\\065\\001\\059\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\064\\001\\065\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\006\\001\\\n\\007\\001\\255\\255\\081\\001\\255\\255\\011\\001\\012\\001\\255\\255\\255\\255\\\n\\087\\001\\081\\001\\089\\001\\255\\255\\255\\255\\255\\255\\255\\255\\087\\001\\\n\\255\\255\\089\\001\\097\\001\\098\\001\\255\\255\\255\\255\\101\\001\\030\\001\\\n\\031\\001\\104\\001\\255\\255\\106\\001\\255\\255\\101\\001\\109\\001\\255\\255\\\n\\104\\001\\255\\255\\106\\001\\255\\255\\255\\255\\109\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\050\\001\\255\\255\\255\\255\\053\\001\\054\\001\\\n\\055\\001\\056\\001\\255\\255\\255\\255\\059\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\064\\001\\065\\001\\255\\255\\255\\255\\000\\001\\001\\001\\002\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\009\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\014\\001\\015\\001\\016\\001\\017\\001\\018\\001\\\n\\087\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\027\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\101\\001\\255\\255\\\n\\255\\255\\036\\001\\255\\255\\106\\001\\255\\255\\255\\255\\109\\001\\042\\001\\\n\\043\\001\\044\\001\\045\\001\\046\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\061\\001\\255\\255\\015\\001\\255\\255\\255\\255\\066\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\071\\001\\072\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\082\\001\\\n\\083\\001\\084\\001\\085\\001\\086\\001\\255\\255\\000\\001\\001\\001\\002\\001\\\n\\255\\255\\255\\255\\255\\255\\094\\001\\007\\001\\255\\255\\009\\001\\255\\255\\\n\\255\\255\\100\\001\\255\\255\\255\\255\\055\\001\\016\\001\\057\\001\\058\\001\\\n\\059\\001\\255\\255\\061\\001\\255\\255\\255\\255\\064\\001\\065\\001\\255\\255\\\n\\027\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\074\\001\\\n\\255\\255\\036\\001\\255\\255\\255\\255\\255\\255\\255\\255\\081\\001\\042\\001\\\n\\043\\001\\044\\001\\045\\001\\046\\001\\047\\001\\255\\255\\089\\001\\090\\001\\\n\\255\\255\\255\\255\\255\\255\\094\\001\\255\\255\\255\\255\\097\\001\\255\\255\\\n\\255\\255\\255\\255\\061\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\109\\001\\110\\001\\071\\001\\072\\001\\255\\255\\074\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\000\\001\\001\\001\\002\\001\\082\\001\\\n\\083\\001\\084\\001\\085\\001\\086\\001\\087\\001\\009\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\015\\001\\016\\001\\255\\255\\018\\001\\098\\001\\\n\\255\\255\\100\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\027\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\000\\001\\001\\001\\002\\001\\255\\255\\\n\\036\\001\\255\\255\\255\\255\\255\\255\\255\\255\\009\\001\\042\\001\\043\\001\\\n\\044\\001\\045\\001\\046\\001\\015\\001\\016\\001\\255\\255\\018\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\027\\001\\\n\\255\\255\\061\\001\\255\\255\\255\\255\\255\\255\\255\\255\\066\\001\\255\\255\\\n\\036\\001\\255\\255\\255\\255\\071\\001\\072\\001\\255\\255\\042\\001\\043\\001\\\n\\044\\001\\045\\001\\046\\001\\255\\255\\255\\255\\255\\255\\082\\001\\083\\001\\\n\\084\\001\\085\\001\\086\\001\\255\\255\\255\\255\\255\\255\\255\\255\\091\\001\\\n\\255\\255\\061\\001\\255\\255\\255\\255\\255\\255\\255\\255\\066\\001\\255\\255\\\n\\100\\001\\255\\255\\255\\255\\071\\001\\072\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\000\\001\\001\\001\\002\\001\\255\\255\\082\\001\\083\\001\\\n\\084\\001\\085\\001\\086\\001\\009\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\092\\001\\015\\001\\016\\001\\255\\255\\018\\001\\255\\255\\255\\255\\255\\255\\\n\\100\\001\\255\\255\\255\\255\\255\\255\\255\\255\\027\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\000\\001\\001\\001\\002\\001\\255\\255\\036\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\009\\001\\042\\001\\043\\001\\044\\001\\045\\001\\\n\\046\\001\\015\\001\\016\\001\\255\\255\\018\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\027\\001\\255\\255\\061\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\066\\001\\255\\255\\036\\001\\255\\255\\\n\\255\\255\\071\\001\\072\\001\\255\\255\\042\\001\\043\\001\\044\\001\\045\\001\\\n\\046\\001\\255\\255\\255\\255\\255\\255\\082\\001\\083\\001\\084\\001\\085\\001\\\n\\086\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\061\\001\\\n\\094\\001\\255\\255\\255\\255\\255\\255\\066\\001\\255\\255\\100\\001\\255\\255\\\n\\255\\255\\071\\001\\072\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\000\\001\\001\\001\\002\\001\\255\\255\\082\\001\\083\\001\\084\\001\\085\\001\\\n\\086\\001\\009\\001\\255\\255\\255\\255\\255\\255\\091\\001\\255\\255\\015\\001\\\n\\016\\001\\255\\255\\018\\001\\255\\255\\255\\255\\255\\255\\100\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\027\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\000\\001\\001\\001\\002\\001\\255\\255\\036\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\009\\001\\042\\001\\043\\001\\044\\001\\045\\001\\046\\001\\015\\001\\\n\\016\\001\\255\\255\\018\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\027\\001\\255\\255\\061\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\066\\001\\255\\255\\036\\001\\255\\255\\255\\255\\071\\001\\\n\\072\\001\\255\\255\\042\\001\\043\\001\\044\\001\\045\\001\\046\\001\\255\\255\\\n\\255\\255\\255\\255\\082\\001\\083\\001\\084\\001\\085\\001\\086\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\092\\001\\061\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\066\\001\\255\\255\\100\\001\\255\\255\\255\\255\\071\\001\\\n\\072\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\082\\001\\083\\001\\084\\001\\085\\001\\086\\001\\000\\001\\\n\\001\\001\\002\\001\\255\\255\\255\\255\\255\\255\\255\\255\\094\\001\\255\\255\\\n\\009\\001\\255\\255\\255\\255\\255\\255\\100\\001\\255\\255\\015\\001\\016\\001\\\n\\255\\255\\018\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\027\\001\\255\\255\\255\\255\\255\\255\\255\\255\\000\\001\\\n\\001\\001\\002\\001\\255\\255\\036\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\009\\001\\042\\001\\043\\001\\044\\001\\045\\001\\046\\001\\015\\001\\016\\001\\\n\\255\\255\\018\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\027\\001\\255\\255\\061\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\066\\001\\255\\255\\036\\001\\255\\255\\255\\255\\071\\001\\072\\001\\\n\\255\\255\\042\\001\\043\\001\\044\\001\\045\\001\\046\\001\\255\\255\\255\\255\\\n\\255\\255\\082\\001\\083\\001\\084\\001\\085\\001\\086\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\091\\001\\255\\255\\061\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\066\\001\\255\\255\\100\\001\\255\\255\\255\\255\\071\\001\\072\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\000\\001\\001\\001\\002\\001\\\n\\255\\255\\082\\001\\083\\001\\084\\001\\085\\001\\086\\001\\009\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\092\\001\\015\\001\\016\\001\\255\\255\\018\\001\\\n\\255\\255\\255\\255\\255\\255\\100\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\027\\001\\255\\255\\255\\255\\255\\255\\255\\255\\000\\001\\001\\001\\002\\001\\\n\\255\\255\\036\\001\\255\\255\\255\\255\\255\\255\\255\\255\\009\\001\\042\\001\\\n\\043\\001\\044\\001\\045\\001\\046\\001\\015\\001\\016\\001\\255\\255\\018\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\027\\001\\255\\255\\061\\001\\255\\255\\255\\255\\255\\255\\255\\255\\066\\001\\\n\\255\\255\\036\\001\\255\\255\\255\\255\\071\\001\\072\\001\\255\\255\\042\\001\\\n\\043\\001\\044\\001\\045\\001\\046\\001\\255\\255\\255\\255\\255\\255\\082\\001\\\n\\083\\001\\084\\001\\085\\001\\086\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\061\\001\\094\\001\\255\\255\\255\\255\\255\\255\\066\\001\\\n\\255\\255\\100\\001\\255\\255\\255\\255\\071\\001\\072\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\000\\001\\001\\001\\002\\001\\255\\255\\082\\001\\\n\\083\\001\\084\\001\\085\\001\\086\\001\\009\\001\\255\\255\\255\\255\\255\\255\\\n\\091\\001\\255\\255\\015\\001\\016\\001\\255\\255\\018\\001\\255\\255\\255\\255\\\n\\255\\255\\100\\001\\255\\255\\255\\255\\255\\255\\255\\255\\027\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\000\\001\\001\\001\\002\\001\\255\\255\\036\\001\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\009\\001\\042\\001\\043\\001\\044\\001\\\n\\045\\001\\046\\001\\015\\001\\016\\001\\255\\255\\018\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\027\\001\\255\\255\\\n\\061\\001\\255\\255\\255\\255\\255\\255\\255\\255\\066\\001\\255\\255\\036\\001\\\n\\255\\255\\255\\255\\071\\001\\072\\001\\255\\255\\042\\001\\043\\001\\044\\001\\\n\\045\\001\\046\\001\\255\\255\\255\\255\\255\\255\\082\\001\\083\\001\\084\\001\\\n\\085\\001\\086\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\092\\001\\\n\\061\\001\\001\\001\\002\\001\\255\\255\\255\\255\\066\\001\\255\\255\\100\\001\\\n\\255\\255\\009\\001\\071\\001\\072\\001\\255\\255\\255\\255\\255\\255\\015\\001\\\n\\016\\001\\255\\255\\018\\001\\255\\255\\255\\255\\082\\001\\083\\001\\084\\001\\\n\\085\\001\\086\\001\\255\\255\\027\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\094\\001\\255\\255\\255\\255\\036\\001\\255\\255\\255\\255\\100\\001\\\n\\255\\255\\255\\255\\042\\001\\043\\001\\044\\001\\045\\001\\046\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\061\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\066\\001\\255\\255\\255\\255\\255\\255\\255\\255\\071\\001\\\n\\072\\001\\001\\001\\002\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\009\\001\\082\\001\\083\\001\\084\\001\\085\\001\\086\\001\\015\\001\\\n\\016\\001\\255\\255\\018\\001\\255\\255\\255\\255\\255\\255\\255\\255\\095\\001\\\n\\255\\255\\025\\001\\255\\255\\027\\001\\100\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\001\\001\\002\\001\\255\\255\\036\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\009\\001\\042\\001\\043\\001\\044\\001\\045\\001\\046\\001\\015\\001\\\n\\016\\001\\255\\255\\018\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\027\\001\\255\\255\\061\\001\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\066\\001\\255\\255\\036\\001\\255\\255\\255\\255\\071\\001\\\n\\072\\001\\255\\255\\042\\001\\043\\001\\044\\001\\045\\001\\046\\001\\255\\255\\\n\\255\\255\\255\\255\\082\\001\\083\\001\\084\\001\\085\\001\\086\\001\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\061\\001\\001\\001\\002\\001\\\n\\255\\255\\255\\255\\066\\001\\255\\255\\100\\001\\255\\255\\009\\001\\071\\001\\\n\\072\\001\\255\\255\\255\\255\\255\\255\\015\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\082\\001\\083\\001\\084\\001\\085\\001\\086\\001\\255\\255\\\n\\027\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\001\\001\\002\\001\\\n\\255\\255\\036\\001\\255\\255\\255\\255\\100\\001\\255\\255\\255\\255\\042\\001\\\n\\043\\001\\044\\001\\045\\001\\046\\001\\015\\001\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\027\\001\\255\\255\\061\\001\\255\\255\\255\\255\\255\\255\\255\\255\\066\\001\\\n\\255\\255\\036\\001\\255\\255\\255\\255\\071\\001\\072\\001\\255\\255\\042\\001\\\n\\043\\001\\044\\001\\045\\001\\046\\001\\013\\001\\255\\255\\255\\255\\082\\001\\\n\\083\\001\\084\\001\\085\\001\\086\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\061\\001\\028\\001\\029\\001\\255\\255\\255\\255\\066\\001\\\n\\255\\255\\100\\001\\255\\255\\255\\255\\071\\001\\072\\001\\255\\255\\255\\255\\\n\\041\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\083\\001\\084\\001\\085\\001\\086\\001\\255\\255\\255\\255\\055\\001\\255\\255\\\n\\057\\001\\058\\001\\059\\001\\060\\001\\061\\001\\255\\255\\255\\255\\064\\001\\\n\\065\\001\\100\\001\\255\\255\\068\\001\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\074\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\080\\001\\\n\\081\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\089\\001\\090\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\096\\001\\\n\\097\\001\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\255\\\n\\255\\255\\255\\255\\255\\255\\108\\001\\109\\001\\110\\001\\111\\001\"\n\nlet yynames_const = \"\\\n  AMPERAMPER\\000\\\n  AMPERSAND\\000\\\n  AND\\000\\\n  AS\\000\\\n  ASSERT\\000\\\n  BACKQUOTE\\000\\\n  BANG\\000\\\n  BAR\\000\\\n  BARBAR\\000\\\n  BARRBRACKET\\000\\\n  BEGIN\\000\\\n  CLASS\\000\\\n  COLON\\000\\\n  COLONCOLON\\000\\\n  COLONEQUAL\\000\\\n  COLONGREATER\\000\\\n  COMMA\\000\\\n  CONSTRAINT\\000\\\n  DO\\000\\\n  DONE\\000\\\n  DOT\\000\\\n  DOTDOT\\000\\\n  DOWNTO\\000\\\n  ELSE\\000\\\n  END\\000\\\n  EOF\\000\\\n  EQUAL\\000\\\n  EXCEPTION\\000\\\n  EXTERNAL\\000\\\n  FALSE\\000\\\n  FOR\\000\\\n  FUN\\000\\\n  FUNCTION\\000\\\n  FUNCTOR\\000\\\n  GREATER\\000\\\n  GREATERRBRACE\\000\\\n  GREATERRBRACKET\\000\\\n  IF\\000\\\n  IN\\000\\\n  INCLUDE\\000\\\n  INHERIT\\000\\\n  INITIALIZER\\000\\\n  LAZY\\000\\\n  LBRACE\\000\\\n  LBRACELESS\\000\\\n  LBRACKET\\000\\\n  LBRACKETBAR\\000\\\n  LBRACKETLESS\\000\\\n  LBRACKETGREATER\\000\\\n  LBRACKETPERCENT\\000\\\n  LBRACKETPERCENTPERCENT\\000\\\n  LESS\\000\\\n  LESSMINUS\\000\\\n  LET\\000\\\n  LPAREN\\000\\\n  LBRACKETAT\\000\\\n  LBRACKETATAT\\000\\\n  LBRACKETATATAT\\000\\\n  MATCH\\000\\\n  METHOD\\000\\\n  MINUS\\000\\\n  MINUSDOT\\000\\\n  MINUSGREATER\\000\\\n  MODULE\\000\\\n  MUTABLE\\000\\\n  NEW\\000\\\n  NONREC\\000\\\n  OBJECT\\000\\\n  OF\\000\\\n  OPEN\\000\\\n  OR\\000\\\n  PERCENT\\000\\\n  PLUS\\000\\\n  PLUSDOT\\000\\\n  PLUSEQ\\000\\\n  PRIVATE\\000\\\n  QUESTION\\000\\\n  QUOTE\\000\\\n  RBRACE\\000\\\n  RBRACKET\\000\\\n  REC\\000\\\n  RPAREN\\000\\\n  SEMI\\000\\\n  SEMISEMI\\000\\\n  HASH\\000\\\n  SIG\\000\\\n  STAR\\000\\\n  STRUCT\\000\\\n  THEN\\000\\\n  TILDE\\000\\\n  TO\\000\\\n  TRUE\\000\\\n  TRY\\000\\\n  TYPE\\000\\\n  UNDERSCORE\\000\\\n  VAL\\000\\\n  VIRTUAL\\000\\\n  WHEN\\000\\\n  WHILE\\000\\\n  WITH\\000\\\n  EOL\\000\\\n  \"\n\nlet yynames_block = \"\\\n  CHAR\\000\\\n  FLOAT\\000\\\n  INFIXOP0\\000\\\n  INFIXOP1\\000\\\n  INFIXOP2\\000\\\n  INFIXOP3\\000\\\n  INFIXOP4\\000\\\n  DOTOP\\000\\\n  INT\\000\\\n  LABEL\\000\\\n  LIDENT\\000\\\n  OPTLABEL\\000\\\n  PREFIXOP\\000\\\n  HASHOP\\000\\\n  STRING\\000\\\n  UIDENT\\000\\\n  COMMENT\\000\\\n  DOCSTRING\\000\\\n  \"\n\nlet yyact = [|\n  (fun _ -> failwith \"parser\")\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'structure) in\n    Obj.repr(\n# 568 \"ml/parser.mly\"\n                                         ( extra_str 1 _1 )\n# 6360 \"ml/parser.ml\"\n               : Parsetree.structure))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'signature) in\n    Obj.repr(\n# 571 \"ml/parser.mly\"\n                                         ( extra_sig 1 _1 )\n# 6367 \"ml/parser.ml\"\n               : Parsetree.signature))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'core_type) in\n    Obj.repr(\n# 576 \"ml/parser.mly\"\n                  ( _1 )\n# 6374 \"ml/parser.ml\"\n               : Parsetree.core_type))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'seq_expr) in\n    Obj.repr(\n# 579 \"ml/parser.mly\"\n                 ( _1 )\n# 6381 \"ml/parser.ml\"\n               : Parsetree.expression))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'pattern) in\n    Obj.repr(\n# 582 \"ml/parser.mly\"\n                ( _1 )\n# 6388 \"ml/parser.ml\"\n               : Parsetree.pattern))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 589 \"ml/parser.mly\"\n      ( mkrhs \"*\" 2, None )\n# 6394 \"ml/parser.ml\"\n               : 'functor_arg))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 3 : 'functor_arg_name) in\n    let _4 = (Parsing.peek_val __caml_parser_env 1 : 'module_type) in\n    Obj.repr(\n# 591 \"ml/parser.mly\"\n      ( mkrhs _2 2, Some _4 )\n# 6402 \"ml/parser.ml\"\n               : 'functor_arg))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in\n    Obj.repr(\n# 595 \"ml/parser.mly\"\n               ( _1 )\n# 6409 \"ml/parser.ml\"\n               : 'functor_arg_name))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 596 \"ml/parser.mly\"\n               ( \"_\" )\n# 6415 \"ml/parser.ml\"\n               : 'functor_arg_name))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'functor_args) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'functor_arg) in\n    Obj.repr(\n# 601 \"ml/parser.mly\"\n      ( _2 :: _1 )\n# 6423 \"ml/parser.ml\"\n               : 'functor_args))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'functor_arg) in\n    Obj.repr(\n# 603 \"ml/parser.mly\"\n      ( [ _1 ] )\n# 6430 \"ml/parser.ml\"\n               : 'functor_args))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'mod_longident) in\n    Obj.repr(\n# 608 \"ml/parser.mly\"\n      ( mkmod(Pmod_ident (mkrhs _1 1)) )\n# 6437 \"ml/parser.ml\"\n               : 'module_expr))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'attributes) in\n    let _3 = (Parsing.peek_val __caml_parser_env 1 : 'structure) in\n    Obj.repr(\n# 610 \"ml/parser.mly\"\n      ( mkmod ~attrs:_2 (Pmod_structure(extra_str 3 _3)) )\n# 6445 \"ml/parser.ml\"\n               : 'module_expr))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'attributes) in\n    let _3 = (Parsing.peek_val __caml_parser_env 1 : 'structure) in\n    Obj.repr(\n# 612 \"ml/parser.mly\"\n      ( unclosed \"struct\" 1 \"end\" 4 )\n# 6453 \"ml/parser.ml\"\n               : 'module_expr))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 3 : 'attributes) in\n    let _3 = (Parsing.peek_val __caml_parser_env 2 : 'functor_args) in\n    let _5 = (Parsing.peek_val __caml_parser_env 0 : 'module_expr) in\n    Obj.repr(\n# 614 \"ml/parser.mly\"\n      ( let modexp =\n          List.fold_left\n            (fun acc (n, t) -> mkmod(Pmod_functor(n, t, acc)))\n            _5 _3\n        in wrap_mod_attrs modexp _2 )\n# 6466 \"ml/parser.ml\"\n               : 'module_expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'module_expr) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'paren_module_expr) in\n    Obj.repr(\n# 620 \"ml/parser.mly\"\n      ( mkmod(Pmod_apply(_1, _2)) )\n# 6474 \"ml/parser.ml\"\n               : 'module_expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'module_expr) in\n    Obj.repr(\n# 622 \"ml/parser.mly\"\n      ( mkmod(Pmod_apply(_1, mkmod (Pmod_structure []))) )\n# 6481 \"ml/parser.ml\"\n               : 'module_expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'paren_module_expr) in\n    Obj.repr(\n# 624 \"ml/parser.mly\"\n      ( _1 )\n# 6488 \"ml/parser.ml\"\n               : 'module_expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'module_expr) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'attribute) in\n    Obj.repr(\n# 626 \"ml/parser.mly\"\n      ( Mod.attr _1 _2 )\n# 6496 \"ml/parser.ml\"\n               : 'module_expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'extension) in\n    Obj.repr(\n# 628 \"ml/parser.mly\"\n      ( mkmod(Pmod_extension _1) )\n# 6503 \"ml/parser.ml\"\n               : 'module_expr))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 3 : 'module_expr) in\n    let _4 = (Parsing.peek_val __caml_parser_env 1 : 'module_type) in\n    Obj.repr(\n# 633 \"ml/parser.mly\"\n      ( mkmod(Pmod_constraint(_2, _4)) )\n# 6511 \"ml/parser.ml\"\n               : 'paren_module_expr))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 3 : 'module_expr) in\n    let _4 = (Parsing.peek_val __caml_parser_env 1 : 'module_type) in\n    Obj.repr(\n# 635 \"ml/parser.mly\"\n      ( unclosed \"(\" 1 \")\" 5 )\n# 6519 \"ml/parser.ml\"\n               : 'paren_module_expr))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'module_expr) in\n    Obj.repr(\n# 637 \"ml/parser.mly\"\n      ( _2 )\n# 6526 \"ml/parser.ml\"\n               : 'paren_module_expr))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'module_expr) in\n    Obj.repr(\n# 639 \"ml/parser.mly\"\n      ( unclosed \"(\" 1 \")\" 3 )\n# 6533 \"ml/parser.ml\"\n               : 'paren_module_expr))\n; (fun __caml_parser_env ->\n    let _3 = (Parsing.peek_val __caml_parser_env 2 : 'attributes) in\n    let _4 = (Parsing.peek_val __caml_parser_env 1 : 'expr) in\n    Obj.repr(\n# 641 \"ml/parser.mly\"\n      ( mkmod ~attrs:_3 (Pmod_unpack _4))\n# 6541 \"ml/parser.ml\"\n               : 'paren_module_expr))\n; (fun __caml_parser_env ->\n    let _3 = (Parsing.peek_val __caml_parser_env 4 : 'attributes) in\n    let _4 = (Parsing.peek_val __caml_parser_env 3 : 'expr) in\n    let _6 = (Parsing.peek_val __caml_parser_env 1 : 'package_type) in\n    Obj.repr(\n# 643 \"ml/parser.mly\"\n      ( mkmod ~attrs:_3\n          (Pmod_unpack(\n               ghexp(Pexp_constraint(_4, ghtyp(Ptyp_package _6))))) )\n# 6552 \"ml/parser.ml\"\n               : 'paren_module_expr))\n; (fun __caml_parser_env ->\n    let _3 = (Parsing.peek_val __caml_parser_env 6 : 'attributes) in\n    let _4 = (Parsing.peek_val __caml_parser_env 5 : 'expr) in\n    let _6 = (Parsing.peek_val __caml_parser_env 3 : 'package_type) in\n    let _8 = (Parsing.peek_val __caml_parser_env 1 : 'package_type) in\n    Obj.repr(\n# 648 \"ml/parser.mly\"\n      ( mkmod ~attrs:_3\n          (Pmod_unpack(\n               ghexp(Pexp_coerce(_4, Some(ghtyp(Ptyp_package _6)),\n                                 ghtyp(Ptyp_package _8))))) )\n# 6565 \"ml/parser.ml\"\n               : 'paren_module_expr))\n; (fun __caml_parser_env ->\n    let _3 = (Parsing.peek_val __caml_parser_env 4 : 'attributes) in\n    let _4 = (Parsing.peek_val __caml_parser_env 3 : 'expr) in\n    let _6 = (Parsing.peek_val __caml_parser_env 1 : 'package_type) in\n    Obj.repr(\n# 653 \"ml/parser.mly\"\n      ( mkmod ~attrs:_3\n          (Pmod_unpack(\n               ghexp(Pexp_coerce(_4, None, ghtyp(Ptyp_package _6))))) )\n# 6576 \"ml/parser.ml\"\n               : 'paren_module_expr))\n; (fun __caml_parser_env ->\n    let _3 = (Parsing.peek_val __caml_parser_env 3 : 'attributes) in\n    let _4 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in\n    Obj.repr(\n# 657 \"ml/parser.mly\"\n      ( unclosed \"(\" 1 \")\" 6 )\n# 6584 \"ml/parser.ml\"\n               : 'paren_module_expr))\n; (fun __caml_parser_env ->\n    let _3 = (Parsing.peek_val __caml_parser_env 3 : 'attributes) in\n    let _4 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in\n    Obj.repr(\n# 659 \"ml/parser.mly\"\n      ( unclosed \"(\" 1 \")\" 6 )\n# 6592 \"ml/parser.ml\"\n               : 'paren_module_expr))\n; (fun __caml_parser_env ->\n    let _3 = (Parsing.peek_val __caml_parser_env 2 : 'attributes) in\n    let _4 = (Parsing.peek_val __caml_parser_env 1 : 'expr) in\n    Obj.repr(\n# 661 \"ml/parser.mly\"\n      ( unclosed \"(\" 1 \")\" 5 )\n# 6600 \"ml/parser.ml\"\n               : 'paren_module_expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'seq_expr) in\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'post_item_attributes) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'structure_tail) in\n    Obj.repr(\n# 666 \"ml/parser.mly\"\n      ( mark_rhs_docs 1 2;\n        (text_str 1) @ mkstrexp _1 _2 :: _3 )\n# 6610 \"ml/parser.ml\"\n               : 'structure))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'structure_tail) in\n    Obj.repr(\n# 668 \"ml/parser.mly\"\n                   ( _1 )\n# 6617 \"ml/parser.ml\"\n               : 'structure))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 671 \"ml/parser.mly\"\n                         ( [] )\n# 6623 \"ml/parser.ml\"\n               : 'structure_tail))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'structure) in\n    Obj.repr(\n# 672 \"ml/parser.mly\"\n                         ( (text_str 1) @ _2 )\n# 6630 \"ml/parser.ml\"\n               : 'structure_tail))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'structure_item) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'structure_tail) in\n    Obj.repr(\n# 673 \"ml/parser.mly\"\n                                  ( (text_str 1) @ _1 :: _2 )\n# 6638 \"ml/parser.ml\"\n               : 'structure_tail))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'let_bindings) in\n    Obj.repr(\n# 677 \"ml/parser.mly\"\n      ( val_of_let_bindings _1 )\n# 6645 \"ml/parser.ml\"\n               : 'structure_item))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'primitive_declaration) in\n    Obj.repr(\n# 679 \"ml/parser.mly\"\n      ( let (body, ext) = _1 in mkstr_ext (Pstr_primitive body) ext )\n# 6652 \"ml/parser.ml\"\n               : 'structure_item))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'value_description) in\n    Obj.repr(\n# 681 \"ml/parser.mly\"\n      ( let (body, ext) = _1 in mkstr_ext (Pstr_primitive body) ext )\n# 6659 \"ml/parser.ml\"\n               : 'structure_item))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'type_declarations) in\n    Obj.repr(\n# 683 \"ml/parser.mly\"\n      ( let (nr, l, ext ) = _1 in mkstr_ext (Pstr_type (nr, List.rev l)) ext )\n# 6666 \"ml/parser.ml\"\n               : 'structure_item))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'str_type_extension) in\n    Obj.repr(\n# 685 \"ml/parser.mly\"\n      ( let (l, ext) = _1 in mkstr_ext (Pstr_typext l) ext )\n# 6673 \"ml/parser.ml\"\n               : 'structure_item))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'str_exception_declaration) in\n    Obj.repr(\n# 687 \"ml/parser.mly\"\n      ( let (l, ext) = _1 in mkstr_ext (Pstr_exception l) ext )\n# 6680 \"ml/parser.ml\"\n               : 'structure_item))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'module_binding) in\n    Obj.repr(\n# 689 \"ml/parser.mly\"\n      ( let (body, ext) = _1 in mkstr_ext (Pstr_module body) ext )\n# 6687 \"ml/parser.ml\"\n               : 'structure_item))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'rec_module_bindings) in\n    Obj.repr(\n# 691 \"ml/parser.mly\"\n      ( let (l, ext) = _1 in mkstr_ext (Pstr_recmodule(List.rev l)) ext )\n# 6694 \"ml/parser.ml\"\n               : 'structure_item))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'module_type_declaration) in\n    Obj.repr(\n# 693 \"ml/parser.mly\"\n      ( let (body, ext) = _1 in mkstr_ext (Pstr_modtype body) ext )\n# 6701 \"ml/parser.ml\"\n               : 'structure_item))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'open_statement) in\n    Obj.repr(\n# 695 \"ml/parser.mly\"\n      ( let (body, ext) = _1 in mkstr_ext (Pstr_open body) ext )\n# 6708 \"ml/parser.ml\"\n               : 'structure_item))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'class_type_declarations) in\n    Obj.repr(\n# 697 \"ml/parser.mly\"\n      ( let (l, ext) = _1 in mkstr_ext (Pstr_class_type (List.rev l)) ext )\n# 6715 \"ml/parser.ml\"\n               : 'structure_item))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'str_include_statement) in\n    Obj.repr(\n# 699 \"ml/parser.mly\"\n      ( let (body, ext) = _1 in mkstr_ext (Pstr_include body) ext )\n# 6722 \"ml/parser.ml\"\n               : 'structure_item))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'item_extension) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in\n    Obj.repr(\n# 701 \"ml/parser.mly\"\n      ( mkstr(Pstr_extension (_1, (add_docs_attrs (symbol_docs ()) _2))) )\n# 6730 \"ml/parser.ml\"\n               : 'structure_item))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'floating_attribute) in\n    Obj.repr(\n# 703 \"ml/parser.mly\"\n      ( mark_symbol_docs ();\n        mkstr(Pstr_attribute _1) )\n# 6738 \"ml/parser.ml\"\n               : 'structure_item))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'ext_attributes) in\n    let _3 = (Parsing.peek_val __caml_parser_env 1 : 'module_expr) in\n    let _4 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in\n    Obj.repr(\n# 708 \"ml/parser.mly\"\n      ( let (ext, attrs) = _2 in\n        Incl.mk _3 ~attrs:(attrs@_4)\n            ~loc:(symbol_rloc()) ~docs:(symbol_docs ())\n      , ext )\n# 6750 \"ml/parser.ml\"\n               : 'str_include_statement))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'module_expr) in\n    Obj.repr(\n# 715 \"ml/parser.mly\"\n      ( _2 )\n# 6757 \"ml/parser.ml\"\n               : 'module_binding_body))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'module_type) in\n    let _4 = (Parsing.peek_val __caml_parser_env 0 : 'module_expr) in\n    Obj.repr(\n# 717 \"ml/parser.mly\"\n      ( mkmod(Pmod_constraint(_4, _2)) )\n# 6765 \"ml/parser.ml\"\n               : 'module_binding_body))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'functor_arg) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'module_binding_body) in\n    Obj.repr(\n# 719 \"ml/parser.mly\"\n      ( mkmod(Pmod_functor(fst _1, snd _1, _2)) )\n# 6773 \"ml/parser.ml\"\n               : 'module_binding_body))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 3 : 'ext_attributes) in\n    let _3 = (Parsing.peek_val __caml_parser_env 2 : string) in\n    let _4 = (Parsing.peek_val __caml_parser_env 1 : 'module_binding_body) in\n    let _5 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in\n    Obj.repr(\n# 723 \"ml/parser.mly\"\n      ( let (ext, attrs) = _2 in\n        Mb.mk (mkrhs _3 3) _4 ~attrs:(attrs@_5)\n            ~loc:(symbol_rloc ()) ~docs:(symbol_docs ())\n      , ext )\n# 6786 \"ml/parser.ml\"\n               : 'module_binding))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'rec_module_binding) in\n    Obj.repr(\n# 729 \"ml/parser.mly\"\n                                           ( let (b, ext) = _1 in ([b], ext) )\n# 6793 \"ml/parser.ml\"\n               : 'rec_module_bindings))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'rec_module_bindings) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'and_module_binding) in\n    Obj.repr(\n# 731 \"ml/parser.mly\"\n      ( let (l, ext) = _1 in (_2 :: l, ext) )\n# 6801 \"ml/parser.ml\"\n               : 'rec_module_bindings))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 4 : 'ext_attributes) in\n    let _4 = (Parsing.peek_val __caml_parser_env 2 : string) in\n    let _5 = (Parsing.peek_val __caml_parser_env 1 : 'module_binding_body) in\n    let _6 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in\n    Obj.repr(\n# 735 \"ml/parser.mly\"\n      ( let (ext, attrs) = _2 in\n        Mb.mk (mkrhs _4 4) _5 ~attrs:(attrs@_6)\n            ~loc:(symbol_rloc ()) ~docs:(symbol_docs ())\n      , ext )\n# 6814 \"ml/parser.ml\"\n               : 'rec_module_binding))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 3 : 'attributes) in\n    let _3 = (Parsing.peek_val __caml_parser_env 2 : string) in\n    let _4 = (Parsing.peek_val __caml_parser_env 1 : 'module_binding_body) in\n    let _5 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in\n    Obj.repr(\n# 742 \"ml/parser.mly\"\n      ( Mb.mk (mkrhs _3 3) _4 ~attrs:(_2@_5) ~loc:(symbol_rloc ())\n               ~text:(symbol_text ()) ~docs:(symbol_docs ()) )\n# 6825 \"ml/parser.ml\"\n               : 'and_module_binding))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'mty_longident) in\n    Obj.repr(\n# 750 \"ml/parser.mly\"\n      ( mkmty(Pmty_ident (mkrhs _1 1)) )\n# 6832 \"ml/parser.ml\"\n               : 'module_type))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'attributes) in\n    let _3 = (Parsing.peek_val __caml_parser_env 1 : 'signature) in\n    Obj.repr(\n# 752 \"ml/parser.mly\"\n      ( mkmty ~attrs:_2 (Pmty_signature (extra_sig 3 _3)) )\n# 6840 \"ml/parser.ml\"\n               : 'module_type))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'attributes) in\n    let _3 = (Parsing.peek_val __caml_parser_env 1 : 'signature) in\n    Obj.repr(\n# 754 \"ml/parser.mly\"\n      ( unclosed \"sig\" 1 \"end\" 4 )\n# 6848 \"ml/parser.ml\"\n               : 'module_type))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 3 : 'attributes) in\n    let _3 = (Parsing.peek_val __caml_parser_env 2 : 'functor_args) in\n    let _5 = (Parsing.peek_val __caml_parser_env 0 : 'module_type) in\n    Obj.repr(\n# 757 \"ml/parser.mly\"\n      ( let mty =\n          List.fold_left\n            (fun acc (n, t) -> mkmty(Pmty_functor(n, t, acc)))\n            _5 _3\n        in wrap_mty_attrs mty _2 )\n# 6861 \"ml/parser.ml\"\n               : 'module_type))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'module_type) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'module_type) in\n    Obj.repr(\n# 764 \"ml/parser.mly\"\n      ( mkmty(Pmty_functor(mknoloc \"_\", Some _1, _3)) )\n# 6869 \"ml/parser.ml\"\n               : 'module_type))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'module_type) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'with_constraints) in\n    Obj.repr(\n# 766 \"ml/parser.mly\"\n      ( mkmty(Pmty_with(_1, List.rev _3)) )\n# 6877 \"ml/parser.ml\"\n               : 'module_type))\n; (fun __caml_parser_env ->\n    let _4 = (Parsing.peek_val __caml_parser_env 1 : 'attributes) in\n    let _5 = (Parsing.peek_val __caml_parser_env 0 : 'module_expr) in\n    Obj.repr(\n# 768 \"ml/parser.mly\"\n      ( mkmty ~attrs:_4 (Pmty_typeof _5) )\n# 6885 \"ml/parser.ml\"\n               : 'module_type))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'module_type) in\n    Obj.repr(\n# 772 \"ml/parser.mly\"\n      ( _2 )\n# 6892 \"ml/parser.ml\"\n               : 'module_type))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'module_type) in\n    Obj.repr(\n# 774 \"ml/parser.mly\"\n      ( unclosed \"(\" 1 \")\" 3 )\n# 6899 \"ml/parser.ml\"\n               : 'module_type))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'extension) in\n    Obj.repr(\n# 776 \"ml/parser.mly\"\n      ( mkmty(Pmty_extension _1) )\n# 6906 \"ml/parser.ml\"\n               : 'module_type))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'module_type) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'attribute) in\n    Obj.repr(\n# 778 \"ml/parser.mly\"\n      ( Mty.attr _1 _2 )\n# 6914 \"ml/parser.ml\"\n               : 'module_type))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 781 \"ml/parser.mly\"\n                         ( [] )\n# 6920 \"ml/parser.ml\"\n               : 'signature))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'signature) in\n    Obj.repr(\n# 782 \"ml/parser.mly\"\n                         ( (text_sig 1) @ _2 )\n# 6927 \"ml/parser.ml\"\n               : 'signature))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'signature_item) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'signature) in\n    Obj.repr(\n# 783 \"ml/parser.mly\"\n                             ( (text_sig 1) @ _1 :: _2 )\n# 6935 \"ml/parser.ml\"\n               : 'signature))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'value_description) in\n    Obj.repr(\n# 787 \"ml/parser.mly\"\n      ( let (body, ext) = _1 in mksig_ext (Psig_value body) ext )\n# 6942 \"ml/parser.ml\"\n               : 'signature_item))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'primitive_declaration) in\n    Obj.repr(\n# 789 \"ml/parser.mly\"\n      ( let (body, ext) = _1 in mksig_ext (Psig_value body) ext)\n# 6949 \"ml/parser.ml\"\n               : 'signature_item))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'type_declarations) in\n    Obj.repr(\n# 791 \"ml/parser.mly\"\n      ( let (nr, l, ext) = _1 in mksig_ext (Psig_type (nr, List.rev l)) ext )\n# 6956 \"ml/parser.ml\"\n               : 'signature_item))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'sig_type_extension) in\n    Obj.repr(\n# 793 \"ml/parser.mly\"\n      ( let (l, ext) = _1 in mksig_ext (Psig_typext l) ext )\n# 6963 \"ml/parser.ml\"\n               : 'signature_item))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'sig_exception_declaration) in\n    Obj.repr(\n# 795 \"ml/parser.mly\"\n      ( let (l, ext) = _1 in mksig_ext (Psig_exception l) ext )\n# 6970 \"ml/parser.ml\"\n               : 'signature_item))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'module_declaration) in\n    Obj.repr(\n# 797 \"ml/parser.mly\"\n      ( let (body, ext) = _1 in mksig_ext (Psig_module body) ext )\n# 6977 \"ml/parser.ml\"\n               : 'signature_item))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'module_alias) in\n    Obj.repr(\n# 799 \"ml/parser.mly\"\n      ( let (body, ext) = _1 in mksig_ext (Psig_module body) ext )\n# 6984 \"ml/parser.ml\"\n               : 'signature_item))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'rec_module_declarations) in\n    Obj.repr(\n# 801 \"ml/parser.mly\"\n      ( let (l, ext) = _1 in mksig_ext (Psig_recmodule (List.rev l)) ext )\n# 6991 \"ml/parser.ml\"\n               : 'signature_item))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'module_type_declaration) in\n    Obj.repr(\n# 803 \"ml/parser.mly\"\n      ( let (body, ext) = _1 in mksig_ext (Psig_modtype body) ext )\n# 6998 \"ml/parser.ml\"\n               : 'signature_item))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'open_statement) in\n    Obj.repr(\n# 805 \"ml/parser.mly\"\n      ( let (body, ext) = _1 in mksig_ext (Psig_open body) ext )\n# 7005 \"ml/parser.ml\"\n               : 'signature_item))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'sig_include_statement) in\n    Obj.repr(\n# 807 \"ml/parser.mly\"\n      ( let (body, ext) = _1 in mksig_ext (Psig_include body) ext )\n# 7012 \"ml/parser.ml\"\n               : 'signature_item))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'class_type_declarations) in\n    Obj.repr(\n# 809 \"ml/parser.mly\"\n      ( let (l, ext) = _1 in mksig_ext (Psig_class_type (List.rev l)) ext )\n# 7019 \"ml/parser.ml\"\n               : 'signature_item))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'item_extension) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in\n    Obj.repr(\n# 811 \"ml/parser.mly\"\n      ( mksig(Psig_extension (_1, (add_docs_attrs (symbol_docs ()) _2))) )\n# 7027 \"ml/parser.ml\"\n               : 'signature_item))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'floating_attribute) in\n    Obj.repr(\n# 813 \"ml/parser.mly\"\n      ( mark_symbol_docs ();\n        mksig(Psig_attribute _1) )\n# 7035 \"ml/parser.ml\"\n               : 'signature_item))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 3 : 'override_flag) in\n    let _3 = (Parsing.peek_val __caml_parser_env 2 : 'ext_attributes) in\n    let _4 = (Parsing.peek_val __caml_parser_env 1 : 'mod_longident) in\n    let _5 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in\n    Obj.repr(\n# 818 \"ml/parser.mly\"\n      ( let (ext, attrs) = _3 in\n        Opn.mk (mkrhs _4 4) ~override:_2 ~attrs:(attrs@_5)\n          ~loc:(symbol_rloc()) ~docs:(symbol_docs ())\n      , ext)\n# 7048 \"ml/parser.ml\"\n               : 'open_statement))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'ext_attributes) in\n    let _3 = (Parsing.peek_val __caml_parser_env 1 : 'module_type) in\n    let _4 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in\n    Obj.repr(\n# 825 \"ml/parser.mly\"\n      ( let (ext, attrs) = _2 in\n        Incl.mk _3 ~attrs:(attrs@_4)\n            ~loc:(symbol_rloc()) ~docs:(symbol_docs ())\n      , ext)\n# 7060 \"ml/parser.ml\"\n               : 'sig_include_statement))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'module_type) in\n    Obj.repr(\n# 832 \"ml/parser.mly\"\n      ( _2 )\n# 7067 \"ml/parser.ml\"\n               : 'module_declaration_body))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 4 : string) in\n    let _4 = (Parsing.peek_val __caml_parser_env 2 : 'module_type) in\n    let _6 = (Parsing.peek_val __caml_parser_env 0 : 'module_declaration_body) in\n    Obj.repr(\n# 834 \"ml/parser.mly\"\n      ( mkmty(Pmty_functor(mkrhs _2 2, Some _4, _6)) )\n# 7076 \"ml/parser.ml\"\n               : 'module_declaration_body))\n; (fun __caml_parser_env ->\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'module_declaration_body) in\n    Obj.repr(\n# 836 \"ml/parser.mly\"\n      ( mkmty(Pmty_functor(mkrhs \"*\" 1, None, _3)) )\n# 7083 \"ml/parser.ml\"\n               : 'module_declaration_body))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 3 : 'ext_attributes) in\n    let _3 = (Parsing.peek_val __caml_parser_env 2 : string) in\n    let _4 = (Parsing.peek_val __caml_parser_env 1 : 'module_declaration_body) in\n    let _5 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in\n    Obj.repr(\n# 840 \"ml/parser.mly\"\n      ( let (ext, attrs) = _2 in\n        Md.mk (mkrhs _3 3) _4 ~attrs:(attrs@_5)\n          ~loc:(symbol_rloc()) ~docs:(symbol_docs ())\n      , ext )\n# 7096 \"ml/parser.ml\"\n               : 'module_declaration))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 4 : 'ext_attributes) in\n    let _3 = (Parsing.peek_val __caml_parser_env 3 : string) in\n    let _5 = (Parsing.peek_val __caml_parser_env 1 : 'mod_longident) in\n    let _6 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in\n    Obj.repr(\n# 847 \"ml/parser.mly\"\n      ( let (ext, attrs) = _2 in\n        Md.mk (mkrhs _3 3)\n          (Mty.alias ~loc:(rhs_loc 5) (mkrhs _5 5)) ~attrs:(attrs@_6)\n             ~loc:(symbol_rloc()) ~docs:(symbol_docs ())\n      , ext )\n# 7110 \"ml/parser.ml\"\n               : 'module_alias))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'rec_module_declaration) in\n    Obj.repr(\n# 855 \"ml/parser.mly\"\n      ( let (body, ext) = _1 in ([body], ext) )\n# 7117 \"ml/parser.ml\"\n               : 'rec_module_declarations))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'rec_module_declarations) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'and_module_declaration) in\n    Obj.repr(\n# 857 \"ml/parser.mly\"\n      ( let (l, ext) = _1 in (_2 :: l, ext) )\n# 7125 \"ml/parser.ml\"\n               : 'rec_module_declarations))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 5 : 'ext_attributes) in\n    let _4 = (Parsing.peek_val __caml_parser_env 3 : string) in\n    let _6 = (Parsing.peek_val __caml_parser_env 1 : 'module_type) in\n    let _7 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in\n    Obj.repr(\n# 861 \"ml/parser.mly\"\n      ( let (ext, attrs) = _2 in\n        Md.mk (mkrhs _4 4) _6 ~attrs:(attrs@_7)\n            ~loc:(symbol_rloc()) ~docs:(symbol_docs ())\n      , ext)\n# 7138 \"ml/parser.ml\"\n               : 'rec_module_declaration))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 4 : 'attributes) in\n    let _3 = (Parsing.peek_val __caml_parser_env 3 : string) in\n    let _5 = (Parsing.peek_val __caml_parser_env 1 : 'module_type) in\n    let _6 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in\n    Obj.repr(\n# 868 \"ml/parser.mly\"\n      ( Md.mk (mkrhs _3 3) _5 ~attrs:(_2@_6) ~loc:(symbol_rloc())\n              ~text:(symbol_text()) ~docs:(symbol_docs()) )\n# 7149 \"ml/parser.ml\"\n               : 'and_module_declaration))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 872 \"ml/parser.mly\"\n                              ( None )\n# 7155 \"ml/parser.ml\"\n               : 'module_type_declaration_body))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'module_type) in\n    Obj.repr(\n# 873 \"ml/parser.mly\"\n                              ( Some _2 )\n# 7162 \"ml/parser.ml\"\n               : 'module_type_declaration_body))\n; (fun __caml_parser_env ->\n    let _3 = (Parsing.peek_val __caml_parser_env 3 : 'ext_attributes) in\n    let _4 = (Parsing.peek_val __caml_parser_env 2 : 'ident) in\n    let _5 = (Parsing.peek_val __caml_parser_env 1 : 'module_type_declaration_body) in\n    let _6 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in\n    Obj.repr(\n# 878 \"ml/parser.mly\"\n      ( let (ext, attrs) = _3 in\n        Mtd.mk (mkrhs _4 4) ?typ:_5 ~attrs:(attrs@_6)\n          ~loc:(symbol_rloc()) ~docs:(symbol_docs ())\n      , ext )\n# 7175 \"ml/parser.ml\"\n               : 'module_type_declaration))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 886 \"ml/parser.mly\"\n                                                ( [] )\n# 7181 \"ml/parser.ml\"\n               : 'class_type_parameters))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'type_parameter_list) in\n    Obj.repr(\n# 887 \"ml/parser.mly\"\n                                                ( List.rev _2 )\n# 7188 \"ml/parser.ml\"\n               : 'class_type_parameters))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'class_self_pattern) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'class_fields) in\n    Obj.repr(\n# 891 \"ml/parser.mly\"\n       ( Cstr.mk _1 (extra_cstr 2 (List.rev _2)) )\n# 7196 \"ml/parser.ml\"\n               : 'class_structure))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'pattern) in\n    Obj.repr(\n# 895 \"ml/parser.mly\"\n      ( reloc_pat _2 )\n# 7203 \"ml/parser.ml\"\n               : 'class_self_pattern))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 3 : 'pattern) in\n    let _4 = (Parsing.peek_val __caml_parser_env 1 : 'core_type) in\n    Obj.repr(\n# 897 \"ml/parser.mly\"\n      ( mkpat(Ppat_constraint(_2, _4)) )\n# 7211 \"ml/parser.ml\"\n               : 'class_self_pattern))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 899 \"ml/parser.mly\"\n      ( ghpat(Ppat_any) )\n# 7217 \"ml/parser.ml\"\n               : 'class_self_pattern))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 903 \"ml/parser.mly\"\n      ( [] )\n# 7223 \"ml/parser.ml\"\n               : 'class_fields))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'class_fields) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'class_field) in\n    Obj.repr(\n# 905 \"ml/parser.mly\"\n      ( _2 :: (text_cstr 2) @ _1 )\n# 7231 \"ml/parser.ml\"\n               : 'class_fields))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'value) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in\n    Obj.repr(\n# 909 \"ml/parser.mly\"\n      ( let v, attrs = _2 in\n        mkcf (Pcf_val v) ~attrs:(attrs@_3) ~docs:(symbol_docs ()) )\n# 7240 \"ml/parser.ml\"\n               : 'class_field))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'method_) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in\n    Obj.repr(\n# 912 \"ml/parser.mly\"\n      ( let meth, attrs = _2 in\n        mkcf (Pcf_method meth) ~attrs:(attrs@_3) ~docs:(symbol_docs ()) )\n# 7249 \"ml/parser.ml\"\n               : 'class_field))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'attributes) in\n    let _3 = (Parsing.peek_val __caml_parser_env 1 : 'constrain_field) in\n    let _4 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in\n    Obj.repr(\n# 915 \"ml/parser.mly\"\n      ( mkcf (Pcf_constraint _3) ~attrs:(_2@_4) ~docs:(symbol_docs ()) )\n# 7258 \"ml/parser.ml\"\n               : 'class_field))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'attributes) in\n    let _3 = (Parsing.peek_val __caml_parser_env 1 : 'seq_expr) in\n    let _4 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in\n    Obj.repr(\n# 917 \"ml/parser.mly\"\n      ( mkcf (Pcf_initializer _3) ~attrs:(_2@_4) ~docs:(symbol_docs ()) )\n# 7267 \"ml/parser.ml\"\n               : 'class_field))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'item_extension) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in\n    Obj.repr(\n# 919 \"ml/parser.mly\"\n      ( mkcf (Pcf_extension _1) ~attrs:_2 ~docs:(symbol_docs ()) )\n# 7275 \"ml/parser.ml\"\n               : 'class_field))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'floating_attribute) in\n    Obj.repr(\n# 921 \"ml/parser.mly\"\n      ( mark_symbol_docs ();\n        mkcf (Pcf_attribute _1) )\n# 7283 \"ml/parser.ml\"\n               : 'class_field))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 6 : 'override_flag) in\n    let _2 = (Parsing.peek_val __caml_parser_env 5 : 'attributes) in\n    let _5 = (Parsing.peek_val __caml_parser_env 2 : 'label) in\n    let _7 = (Parsing.peek_val __caml_parser_env 0 : 'core_type) in\n    Obj.repr(\n# 927 \"ml/parser.mly\"\n      ( if _1 = Override then syntax_error ();\n        (mkloc _5 (rhs_loc 5), Mutable, Cfk_virtual _7), _2 )\n# 7294 \"ml/parser.ml\"\n               : 'value))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 6 : 'override_flag) in\n    let _2 = (Parsing.peek_val __caml_parser_env 5 : 'attributes) in\n    let _4 = (Parsing.peek_val __caml_parser_env 3 : 'mutable_flag) in\n    let _5 = (Parsing.peek_val __caml_parser_env 2 : 'label) in\n    let _7 = (Parsing.peek_val __caml_parser_env 0 : 'core_type) in\n    Obj.repr(\n# 930 \"ml/parser.mly\"\n      ( if _1 = Override then syntax_error ();\n        (mkrhs _5 5, _4, Cfk_virtual _7), _2 )\n# 7306 \"ml/parser.ml\"\n               : 'value))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 5 : 'override_flag) in\n    let _2 = (Parsing.peek_val __caml_parser_env 4 : 'attributes) in\n    let _3 = (Parsing.peek_val __caml_parser_env 3 : 'mutable_flag) in\n    let _4 = (Parsing.peek_val __caml_parser_env 2 : 'label) in\n    let _6 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in\n    Obj.repr(\n# 933 \"ml/parser.mly\"\n      ( (mkrhs _4 4, _3, Cfk_concrete (_1, _6)), _2 )\n# 7317 \"ml/parser.ml\"\n               : 'value))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 6 : 'override_flag) in\n    let _2 = (Parsing.peek_val __caml_parser_env 5 : 'attributes) in\n    let _3 = (Parsing.peek_val __caml_parser_env 4 : 'mutable_flag) in\n    let _4 = (Parsing.peek_val __caml_parser_env 3 : 'label) in\n    let _5 = (Parsing.peek_val __caml_parser_env 2 : 'type_constraint) in\n    let _7 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in\n    Obj.repr(\n# 935 \"ml/parser.mly\"\n      (\n       let e = mkexp_constraint _7 _5 in\n       (mkrhs _4 4, _3, Cfk_concrete (_1, e)), _2\n      )\n# 7332 \"ml/parser.ml\"\n               : 'value))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 6 : 'override_flag) in\n    let _2 = (Parsing.peek_val __caml_parser_env 5 : 'attributes) in\n    let _5 = (Parsing.peek_val __caml_parser_env 2 : 'label) in\n    let _7 = (Parsing.peek_val __caml_parser_env 0 : 'poly_type) in\n    Obj.repr(\n# 943 \"ml/parser.mly\"\n      ( if _1 = Override then syntax_error ();\n        (mkloc _5 (rhs_loc 5), Private, Cfk_virtual _7), _2 )\n# 7343 \"ml/parser.ml\"\n               : 'method_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 6 : 'override_flag) in\n    let _2 = (Parsing.peek_val __caml_parser_env 5 : 'attributes) in\n    let _4 = (Parsing.peek_val __caml_parser_env 3 : 'private_flag) in\n    let _5 = (Parsing.peek_val __caml_parser_env 2 : 'label) in\n    let _7 = (Parsing.peek_val __caml_parser_env 0 : 'poly_type) in\n    Obj.repr(\n# 946 \"ml/parser.mly\"\n      ( if _1 = Override then syntax_error ();\n        (mkloc _5 (rhs_loc 5), _4, Cfk_virtual _7), _2 )\n# 7355 \"ml/parser.ml\"\n               : 'method_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 4 : 'override_flag) in\n    let _2 = (Parsing.peek_val __caml_parser_env 3 : 'attributes) in\n    let _3 = (Parsing.peek_val __caml_parser_env 2 : 'private_flag) in\n    let _4 = (Parsing.peek_val __caml_parser_env 1 : 'label) in\n    let _5 = (Parsing.peek_val __caml_parser_env 0 : 'strict_binding) in\n    Obj.repr(\n# 949 \"ml/parser.mly\"\n      ( (mkloc _4 (rhs_loc 4), _3,\n        Cfk_concrete (_1, ghexp(Pexp_poly (_5, None)))), _2 )\n# 7367 \"ml/parser.ml\"\n               : 'method_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 7 : 'override_flag) in\n    let _2 = (Parsing.peek_val __caml_parser_env 6 : 'attributes) in\n    let _3 = (Parsing.peek_val __caml_parser_env 5 : 'private_flag) in\n    let _4 = (Parsing.peek_val __caml_parser_env 4 : 'label) in\n    let _6 = (Parsing.peek_val __caml_parser_env 2 : 'poly_type) in\n    let _8 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in\n    Obj.repr(\n# 952 \"ml/parser.mly\"\n      ( (mkloc _4 (rhs_loc 4), _3,\n        Cfk_concrete (_1, ghexp(Pexp_poly(_8, Some _6)))), _2 )\n# 7380 \"ml/parser.ml\"\n               : 'method_))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 10 : 'override_flag) in\n    let _2 = (Parsing.peek_val __caml_parser_env 9 : 'attributes) in\n    let _3 = (Parsing.peek_val __caml_parser_env 8 : 'private_flag) in\n    let _4 = (Parsing.peek_val __caml_parser_env 7 : 'label) in\n    let _7 = (Parsing.peek_val __caml_parser_env 4 : 'lident_list) in\n    let _9 = (Parsing.peek_val __caml_parser_env 2 : 'core_type) in\n    let _11 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in\n    Obj.repr(\n# 956 \"ml/parser.mly\"\n      ( let exp, poly = wrap_type_annotation _7 _9 _11 in\n        (mkloc _4 (rhs_loc 4), _3,\n        Cfk_concrete (_1, ghexp(Pexp_poly(exp, Some poly)))), _2 )\n# 7395 \"ml/parser.ml\"\n               : 'method_))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'core_type_comma_list) in\n    let _4 = (Parsing.peek_val __caml_parser_env 0 : 'clty_longident) in\n    Obj.repr(\n# 965 \"ml/parser.mly\"\n      ( mkcty(Pcty_constr (mkloc _4 (rhs_loc 4), List.rev _2)) )\n# 7403 \"ml/parser.ml\"\n               : 'class_signature))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'clty_longident) in\n    Obj.repr(\n# 967 \"ml/parser.mly\"\n      ( mkcty(Pcty_constr (mkrhs _1 1, [])) )\n# 7410 \"ml/parser.ml\"\n               : 'class_signature))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'attributes) in\n    let _3 = (Parsing.peek_val __caml_parser_env 1 : 'class_sig_body) in\n    Obj.repr(\n# 969 \"ml/parser.mly\"\n      ( mkcty ~attrs:_2 (Pcty_signature _3) )\n# 7418 \"ml/parser.ml\"\n               : 'class_signature))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'attributes) in\n    let _3 = (Parsing.peek_val __caml_parser_env 1 : 'class_sig_body) in\n    Obj.repr(\n# 971 \"ml/parser.mly\"\n      ( unclosed \"object\" 1 \"end\" 4 )\n# 7426 \"ml/parser.ml\"\n               : 'class_signature))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'class_signature) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'attribute) in\n    Obj.repr(\n# 973 \"ml/parser.mly\"\n      ( Cty.attr _1 _2 )\n# 7434 \"ml/parser.ml\"\n               : 'class_signature))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'extension) in\n    Obj.repr(\n# 975 \"ml/parser.mly\"\n      ( mkcty(Pcty_extension _1) )\n# 7441 \"ml/parser.ml\"\n               : 'class_signature))\n; (fun __caml_parser_env ->\n    let _3 = (Parsing.peek_val __caml_parser_env 4 : 'override_flag) in\n    let _4 = (Parsing.peek_val __caml_parser_env 3 : 'attributes) in\n    let _5 = (Parsing.peek_val __caml_parser_env 2 : 'mod_longident) in\n    let _7 = (Parsing.peek_val __caml_parser_env 0 : 'class_signature) in\n    Obj.repr(\n# 977 \"ml/parser.mly\"\n      ( wrap_class_type_attrs (mkcty(Pcty_open(_3, mkrhs _5 5, _7))) _4 )\n# 7451 \"ml/parser.ml\"\n               : 'class_signature))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'class_self_type) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'class_sig_fields) in\n    Obj.repr(\n# 981 \"ml/parser.mly\"\n      ( Csig.mk _1 (extra_csig 2 (List.rev _2)) )\n# 7459 \"ml/parser.ml\"\n               : 'class_sig_body))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'core_type) in\n    Obj.repr(\n# 985 \"ml/parser.mly\"\n      ( _2 )\n# 7466 \"ml/parser.ml\"\n               : 'class_self_type))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 987 \"ml/parser.mly\"\n      ( mktyp(Ptyp_any) )\n# 7472 \"ml/parser.ml\"\n               : 'class_self_type))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 990 \"ml/parser.mly\"\n                                                ( [] )\n# 7478 \"ml/parser.ml\"\n               : 'class_sig_fields))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'class_sig_fields) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'class_sig_field) in\n    Obj.repr(\n# 991 \"ml/parser.mly\"\n                                       ( _2 :: (text_csig 2) @ _1 )\n# 7486 \"ml/parser.ml\"\n               : 'class_sig_fields))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'attributes) in\n    let _3 = (Parsing.peek_val __caml_parser_env 1 : 'class_signature) in\n    let _4 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in\n    Obj.repr(\n# 995 \"ml/parser.mly\"\n      ( mkctf (Pctf_inherit _3) ~attrs:(_2@_4) ~docs:(symbol_docs ()) )\n# 7495 \"ml/parser.ml\"\n               : 'class_sig_field))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'attributes) in\n    let _3 = (Parsing.peek_val __caml_parser_env 1 : 'value_type) in\n    let _4 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in\n    Obj.repr(\n# 997 \"ml/parser.mly\"\n      ( mkctf (Pctf_val _3) ~attrs:(_2@_4) ~docs:(symbol_docs ()) )\n# 7504 \"ml/parser.ml\"\n               : 'class_sig_field))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 5 : 'attributes) in\n    let _3 = (Parsing.peek_val __caml_parser_env 4 : 'private_virtual_flags) in\n    let _4 = (Parsing.peek_val __caml_parser_env 3 : 'label) in\n    let _6 = (Parsing.peek_val __caml_parser_env 1 : 'poly_type) in\n    let _7 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in\n    Obj.repr(\n# 1000 \"ml/parser.mly\"\n      (\n       let (p, v) = _3 in\n       mkctf (Pctf_method (mkrhs _4 4, p, v, _6)) ~attrs:(_2@_7) ~docs:(symbol_docs ())\n      )\n# 7518 \"ml/parser.ml\"\n               : 'class_sig_field))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'attributes) in\n    let _3 = (Parsing.peek_val __caml_parser_env 1 : 'constrain_field) in\n    let _4 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in\n    Obj.repr(\n# 1005 \"ml/parser.mly\"\n      ( mkctf (Pctf_constraint _3) ~attrs:(_2@_4) ~docs:(symbol_docs ()) )\n# 7527 \"ml/parser.ml\"\n               : 'class_sig_field))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'item_extension) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in\n    Obj.repr(\n# 1007 \"ml/parser.mly\"\n      ( mkctf (Pctf_extension _1) ~attrs:_2 ~docs:(symbol_docs ()) )\n# 7535 \"ml/parser.ml\"\n               : 'class_sig_field))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'floating_attribute) in\n    Obj.repr(\n# 1009 \"ml/parser.mly\"\n      ( mark_symbol_docs ();\n        mkctf(Pctf_attribute _1) )\n# 7543 \"ml/parser.ml\"\n               : 'class_sig_field))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 3 : 'mutable_flag) in\n    let _3 = (Parsing.peek_val __caml_parser_env 2 : 'label) in\n    let _5 = (Parsing.peek_val __caml_parser_env 0 : 'core_type) in\n    Obj.repr(\n# 1014 \"ml/parser.mly\"\n      ( mkrhs _3 3, _2, Virtual, _5 )\n# 7552 \"ml/parser.ml\"\n               : 'value_type))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 3 : 'virtual_flag) in\n    let _3 = (Parsing.peek_val __caml_parser_env 2 : 'label) in\n    let _5 = (Parsing.peek_val __caml_parser_env 0 : 'core_type) in\n    Obj.repr(\n# 1016 \"ml/parser.mly\"\n      ( mkrhs _3 3, Mutable, _2, _5 )\n# 7561 \"ml/parser.ml\"\n               : 'value_type))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'label) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'core_type) in\n    Obj.repr(\n# 1018 \"ml/parser.mly\"\n      ( mkrhs _1 1, Immutable, Concrete, _3 )\n# 7569 \"ml/parser.ml\"\n               : 'value_type))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'core_type) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'core_type) in\n    Obj.repr(\n# 1021 \"ml/parser.mly\"\n                                           ( _1, _3, symbol_rloc() )\n# 7577 \"ml/parser.ml\"\n               : 'constrain))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'core_type) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'core_type) in\n    Obj.repr(\n# 1024 \"ml/parser.mly\"\n                                           ( _1, _3 )\n# 7585 \"ml/parser.ml\"\n               : 'constrain_field))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'class_type_declaration) in\n    Obj.repr(\n# 1028 \"ml/parser.mly\"\n      ( let (body, ext) = _1 in ([body],ext) )\n# 7592 \"ml/parser.ml\"\n               : 'class_type_declarations))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'class_type_declarations) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'and_class_type_declaration) in\n    Obj.repr(\n# 1030 \"ml/parser.mly\"\n      ( let (l, ext) = _1 in (_2 :: l, ext) )\n# 7600 \"ml/parser.ml\"\n               : 'class_type_declarations))\n; (fun __caml_parser_env ->\n    let _3 = (Parsing.peek_val __caml_parser_env 6 : 'ext_attributes) in\n    let _4 = (Parsing.peek_val __caml_parser_env 5 : 'virtual_flag) in\n    let _5 = (Parsing.peek_val __caml_parser_env 4 : 'class_type_parameters) in\n    let _6 = (Parsing.peek_val __caml_parser_env 3 : string) in\n    let _8 = (Parsing.peek_val __caml_parser_env 1 : 'class_signature) in\n    let _9 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in\n    Obj.repr(\n# 1035 \"ml/parser.mly\"\n      ( let (ext, attrs) = _3 in\n        Ci.mk (mkrhs _6 6) _8 ~virt:_4 ~params:_5 ~attrs:(attrs@_9)\n            ~loc:(symbol_rloc ()) ~docs:(symbol_docs ())\n      , ext)\n# 7615 \"ml/parser.ml\"\n               : 'class_type_declaration))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 6 : 'attributes) in\n    let _3 = (Parsing.peek_val __caml_parser_env 5 : 'virtual_flag) in\n    let _4 = (Parsing.peek_val __caml_parser_env 4 : 'class_type_parameters) in\n    let _5 = (Parsing.peek_val __caml_parser_env 3 : string) in\n    let _7 = (Parsing.peek_val __caml_parser_env 1 : 'class_signature) in\n    let _8 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in\n    Obj.repr(\n# 1043 \"ml/parser.mly\"\n      ( Ci.mk (mkrhs _5 5) _7 ~virt:_3 ~params:_4\n         ~attrs:(_2@_8) ~loc:(symbol_rloc ())\n         ~text:(symbol_text ()) ~docs:(symbol_docs ()) )\n# 7629 \"ml/parser.ml\"\n               : 'and_class_type_declaration))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in\n    Obj.repr(\n# 1051 \"ml/parser.mly\"\n                                  ( _1 )\n# 7636 \"ml/parser.ml\"\n               : 'seq_expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'expr) in\n    Obj.repr(\n# 1052 \"ml/parser.mly\"\n                                  ( _1 )\n# 7643 \"ml/parser.ml\"\n               : 'seq_expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in\n    Obj.repr(\n# 1053 \"ml/parser.mly\"\n                                  ( mkexp(Pexp_sequence(_1, _3)) )\n# 7651 \"ml/parser.ml\"\n               : 'seq_expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 4 : 'expr) in\n    let _4 = (Parsing.peek_val __caml_parser_env 1 : 'attr_id) in\n    let _5 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in\n    Obj.repr(\n# 1055 \"ml/parser.mly\"\n      ( let seq = mkexp(Pexp_sequence (_1, _5)) in\n        let payload = PStr [mkstrexp seq []] in\n        mkexp (Pexp_extension (_4, payload)) )\n# 7662 \"ml/parser.ml\"\n               : 'seq_expr))\n; (fun __caml_parser_env ->\n    let _3 = (Parsing.peek_val __caml_parser_env 2 : 'label_let_pattern) in\n    let _4 = (Parsing.peek_val __caml_parser_env 1 : 'opt_default) in\n    Obj.repr(\n# 1061 \"ml/parser.mly\"\n      ( (Optional (fst _3), _4, snd _3) )\n# 7670 \"ml/parser.ml\"\n               : 'labeled_simple_pattern))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'label_var) in\n    Obj.repr(\n# 1063 \"ml/parser.mly\"\n      ( (Optional (fst _2), None, snd _2) )\n# 7677 \"ml/parser.ml\"\n               : 'labeled_simple_pattern))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 4 : string) in\n    let _3 = (Parsing.peek_val __caml_parser_env 2 : 'let_pattern) in\n    let _4 = (Parsing.peek_val __caml_parser_env 1 : 'opt_default) in\n    Obj.repr(\n# 1065 \"ml/parser.mly\"\n      ( (Optional _1, _4, _3) )\n# 7686 \"ml/parser.ml\"\n               : 'labeled_simple_pattern))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : string) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'pattern_var) in\n    Obj.repr(\n# 1067 \"ml/parser.mly\"\n      ( (Optional _1, None, _2) )\n# 7694 \"ml/parser.ml\"\n               : 'labeled_simple_pattern))\n; (fun __caml_parser_env ->\n    let _3 = (Parsing.peek_val __caml_parser_env 1 : 'label_let_pattern) in\n    Obj.repr(\n# 1069 \"ml/parser.mly\"\n      ( (Labelled (fst _3), None, snd _3) )\n# 7701 \"ml/parser.ml\"\n               : 'labeled_simple_pattern))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'label_var) in\n    Obj.repr(\n# 1071 \"ml/parser.mly\"\n      ( (Labelled (fst _2), None, snd _2) )\n# 7708 \"ml/parser.ml\"\n               : 'labeled_simple_pattern))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : string) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'simple_pattern) in\n    Obj.repr(\n# 1073 \"ml/parser.mly\"\n      ( (Labelled _1, None, _2) )\n# 7716 \"ml/parser.ml\"\n               : 'labeled_simple_pattern))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'simple_pattern) in\n    Obj.repr(\n# 1075 \"ml/parser.mly\"\n      ( (Nolabel, None, _1) )\n# 7723 \"ml/parser.ml\"\n               : 'labeled_simple_pattern))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in\n    Obj.repr(\n# 1078 \"ml/parser.mly\"\n                      ( mkpat(Ppat_var (mkrhs _1 1)) )\n# 7730 \"ml/parser.ml\"\n               : 'pattern_var))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 1079 \"ml/parser.mly\"\n                      ( mkpat Ppat_any )\n# 7736 \"ml/parser.ml\"\n               : 'pattern_var))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 1082 \"ml/parser.mly\"\n                                        ( None )\n# 7742 \"ml/parser.ml\"\n               : 'opt_default))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in\n    Obj.repr(\n# 1083 \"ml/parser.mly\"\n                                        ( Some _2 )\n# 7749 \"ml/parser.ml\"\n               : 'opt_default))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'label_var) in\n    Obj.repr(\n# 1087 \"ml/parser.mly\"\n      ( _1 )\n# 7756 \"ml/parser.ml\"\n               : 'label_let_pattern))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'label_var) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'core_type) in\n    Obj.repr(\n# 1089 \"ml/parser.mly\"\n      ( let (lab, pat) = _1 in (lab, mkpat(Ppat_constraint(pat, _3))) )\n# 7764 \"ml/parser.ml\"\n               : 'label_let_pattern))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in\n    Obj.repr(\n# 1092 \"ml/parser.mly\"\n              ( (_1, mkpat(Ppat_var (mkrhs _1 1))) )\n# 7771 \"ml/parser.ml\"\n               : 'label_var))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'pattern) in\n    Obj.repr(\n# 1096 \"ml/parser.mly\"\n      ( _1 )\n# 7778 \"ml/parser.ml\"\n               : 'let_pattern))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'core_type) in\n    Obj.repr(\n# 1098 \"ml/parser.mly\"\n      ( mkpat(Ppat_constraint(_1, _3)) )\n# 7786 \"ml/parser.ml\"\n               : 'let_pattern))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'simple_expr) in\n    Obj.repr(\n# 1102 \"ml/parser.mly\"\n      ( _1 )\n# 7793 \"ml/parser.ml\"\n               : 'expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'simple_expr) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'simple_labeled_expr_list) in\n    Obj.repr(\n# 1104 \"ml/parser.mly\"\n      ( mkexp(Pexp_apply(_1, List.rev _2)) )\n# 7801 \"ml/parser.ml\"\n               : 'expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'let_bindings) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in\n    Obj.repr(\n# 1106 \"ml/parser.mly\"\n      ( expr_of_let_bindings _1 _3 )\n# 7809 \"ml/parser.ml\"\n               : 'expr))\n; (fun __caml_parser_env ->\n    let _3 = (Parsing.peek_val __caml_parser_env 4 : 'ext_attributes) in\n    let _4 = (Parsing.peek_val __caml_parser_env 3 : string) in\n    let _5 = (Parsing.peek_val __caml_parser_env 2 : 'module_binding_body) in\n    let _7 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in\n    Obj.repr(\n# 1108 \"ml/parser.mly\"\n      ( mkexp_attrs (Pexp_letmodule(mkrhs _4 4, _5, _7)) _3 )\n# 7819 \"ml/parser.ml\"\n               : 'expr))\n; (fun __caml_parser_env ->\n    let _3 = (Parsing.peek_val __caml_parser_env 3 : 'ext_attributes) in\n    let _4 = (Parsing.peek_val __caml_parser_env 2 : 'let_exception_declaration) in\n    let _6 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in\n    Obj.repr(\n# 1110 \"ml/parser.mly\"\n      ( mkexp_attrs (Pexp_letexception(_4, _6)) _3 )\n# 7828 \"ml/parser.ml\"\n               : 'expr))\n; (fun __caml_parser_env ->\n    let _3 = (Parsing.peek_val __caml_parser_env 4 : 'override_flag) in\n    let _4 = (Parsing.peek_val __caml_parser_env 3 : 'ext_attributes) in\n    let _5 = (Parsing.peek_val __caml_parser_env 2 : 'mod_longident) in\n    let _7 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in\n    Obj.repr(\n# 1112 \"ml/parser.mly\"\n      ( mkexp_attrs (Pexp_open(_3, mkrhs _5 5, _7)) _4 )\n# 7838 \"ml/parser.ml\"\n               : 'expr))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'ext_attributes) in\n    let _3 = (Parsing.peek_val __caml_parser_env 1 : 'opt_bar) in\n    let _4 = (Parsing.peek_val __caml_parser_env 0 : 'match_cases) in\n    Obj.repr(\n# 1114 \"ml/parser.mly\"\n      ( mkexp_attrs (Pexp_function(List.rev _4)) _2 )\n# 7847 \"ml/parser.ml\"\n               : 'expr))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'ext_attributes) in\n    let _3 = (Parsing.peek_val __caml_parser_env 1 : 'labeled_simple_pattern) in\n    let _4 = (Parsing.peek_val __caml_parser_env 0 : 'fun_def) in\n    Obj.repr(\n# 1116 \"ml/parser.mly\"\n      ( let (l,o,p) = _3 in\n        mkexp_attrs (Pexp_fun(l, o, p, _4)) _2 )\n# 7857 \"ml/parser.ml\"\n               : 'expr))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 5 : 'ext_attributes) in\n    let _5 = (Parsing.peek_val __caml_parser_env 2 : 'lident_list) in\n    let _7 = (Parsing.peek_val __caml_parser_env 0 : 'fun_def) in\n    Obj.repr(\n# 1119 \"ml/parser.mly\"\n      ( mkexp_attrs (mk_newtypes _5 _7).pexp_desc _2 )\n# 7866 \"ml/parser.ml\"\n               : 'expr))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 4 : 'ext_attributes) in\n    let _3 = (Parsing.peek_val __caml_parser_env 3 : 'seq_expr) in\n    let _5 = (Parsing.peek_val __caml_parser_env 1 : 'opt_bar) in\n    let _6 = (Parsing.peek_val __caml_parser_env 0 : 'match_cases) in\n    Obj.repr(\n# 1121 \"ml/parser.mly\"\n      ( mkexp_attrs (Pexp_match(_3, List.rev _6)) _2 )\n# 7876 \"ml/parser.ml\"\n               : 'expr))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 4 : 'ext_attributes) in\n    let _3 = (Parsing.peek_val __caml_parser_env 3 : 'seq_expr) in\n    let _5 = (Parsing.peek_val __caml_parser_env 1 : 'opt_bar) in\n    let _6 = (Parsing.peek_val __caml_parser_env 0 : 'match_cases) in\n    Obj.repr(\n# 1123 \"ml/parser.mly\"\n      ( mkexp_attrs (Pexp_try(_3, List.rev _6)) _2 )\n# 7886 \"ml/parser.ml\"\n               : 'expr))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 3 : 'ext_attributes) in\n    let _3 = (Parsing.peek_val __caml_parser_env 2 : 'seq_expr) in\n    Obj.repr(\n# 1125 \"ml/parser.mly\"\n      ( syntax_error() )\n# 7894 \"ml/parser.ml\"\n               : 'expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'expr_comma_list) in\n    Obj.repr(\n# 1127 \"ml/parser.mly\"\n      ( mkexp(Pexp_tuple(List.rev _1)) )\n# 7901 \"ml/parser.ml\"\n               : 'expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'constr_longident) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'simple_expr) in\n    Obj.repr(\n# 1129 \"ml/parser.mly\"\n      ( mkexp(Pexp_construct(mkrhs _1 1, Some _2)) )\n# 7909 \"ml/parser.ml\"\n               : 'expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'name_tag) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'simple_expr) in\n    Obj.repr(\n# 1131 \"ml/parser.mly\"\n      ( mkexp(Pexp_variant(_1, Some _2)) )\n# 7917 \"ml/parser.ml\"\n               : 'expr))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 5 : 'ext_attributes) in\n    let _3 = (Parsing.peek_val __caml_parser_env 4 : 'seq_expr) in\n    let _5 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in\n    let _7 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in\n    Obj.repr(\n# 1133 \"ml/parser.mly\"\n      ( mkexp_attrs(Pexp_ifthenelse(_3, _5, Some _7)) _2 )\n# 7927 \"ml/parser.ml\"\n               : 'expr))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 3 : 'ext_attributes) in\n    let _3 = (Parsing.peek_val __caml_parser_env 2 : 'seq_expr) in\n    let _5 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in\n    Obj.repr(\n# 1135 \"ml/parser.mly\"\n      ( mkexp_attrs (Pexp_ifthenelse(_3, _5, None)) _2 )\n# 7936 \"ml/parser.ml\"\n               : 'expr))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 4 : 'ext_attributes) in\n    let _3 = (Parsing.peek_val __caml_parser_env 3 : 'seq_expr) in\n    let _5 = (Parsing.peek_val __caml_parser_env 1 : 'seq_expr) in\n    Obj.repr(\n# 1137 \"ml/parser.mly\"\n      ( mkexp_attrs (Pexp_while(_3, _5)) _2 )\n# 7945 \"ml/parser.ml\"\n               : 'expr))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 8 : 'ext_attributes) in\n    let _3 = (Parsing.peek_val __caml_parser_env 7 : 'pattern) in\n    let _5 = (Parsing.peek_val __caml_parser_env 5 : 'seq_expr) in\n    let _6 = (Parsing.peek_val __caml_parser_env 4 : 'direction_flag) in\n    let _7 = (Parsing.peek_val __caml_parser_env 3 : 'seq_expr) in\n    let _9 = (Parsing.peek_val __caml_parser_env 1 : 'seq_expr) in\n    Obj.repr(\n# 1140 \"ml/parser.mly\"\n      ( mkexp_attrs(Pexp_for(_3, _5, _7, _6, _9)) _2 )\n# 7957 \"ml/parser.ml\"\n               : 'expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in\n    Obj.repr(\n# 1142 \"ml/parser.mly\"\n      ( mkexp_cons (rhs_loc 2) (ghexp(Pexp_tuple[_1;_3])) (symbol_rloc()) )\n# 7965 \"ml/parser.ml\"\n               : 'expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in\n    Obj.repr(\n# 1144 \"ml/parser.mly\"\n      ( mkinfix _1 _2 _3 )\n# 7974 \"ml/parser.ml\"\n               : 'expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in\n    Obj.repr(\n# 1146 \"ml/parser.mly\"\n      ( mkinfix _1 _2 _3 )\n# 7983 \"ml/parser.ml\"\n               : 'expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in\n    Obj.repr(\n# 1148 \"ml/parser.mly\"\n      ( mkinfix _1 _2 _3 )\n# 7992 \"ml/parser.ml\"\n               : 'expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in\n    Obj.repr(\n# 1150 \"ml/parser.mly\"\n      ( mkinfix _1 _2 _3 )\n# 8001 \"ml/parser.ml\"\n               : 'expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in\n    Obj.repr(\n# 1152 \"ml/parser.mly\"\n      ( mkinfix _1 _2 _3 )\n# 8010 \"ml/parser.ml\"\n               : 'expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in\n    Obj.repr(\n# 1154 \"ml/parser.mly\"\n      ( mkinfix _1 \"+\" _3 )\n# 8018 \"ml/parser.ml\"\n               : 'expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in\n    Obj.repr(\n# 1156 \"ml/parser.mly\"\n      ( mkinfix _1 \"+.\" _3 )\n# 8026 \"ml/parser.ml\"\n               : 'expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in\n    Obj.repr(\n# 1158 \"ml/parser.mly\"\n      ( mkinfix _1 \"+=\" _3 )\n# 8034 \"ml/parser.ml\"\n               : 'expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in\n    Obj.repr(\n# 1160 \"ml/parser.mly\"\n      ( mkinfix _1 \"-\" _3 )\n# 8042 \"ml/parser.ml\"\n               : 'expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in\n    Obj.repr(\n# 1162 \"ml/parser.mly\"\n      ( mkinfix _1 \"-.\" _3 )\n# 8050 \"ml/parser.ml\"\n               : 'expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in\n    Obj.repr(\n# 1164 \"ml/parser.mly\"\n      ( mkinfix _1 \"*\" _3 )\n# 8058 \"ml/parser.ml\"\n               : 'expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in\n    Obj.repr(\n# 1166 \"ml/parser.mly\"\n      ( mkinfix _1 \"%\" _3 )\n# 8066 \"ml/parser.ml\"\n               : 'expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in\n    Obj.repr(\n# 1168 \"ml/parser.mly\"\n      ( mkinfix _1 \"=\" _3 )\n# 8074 \"ml/parser.ml\"\n               : 'expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in\n    Obj.repr(\n# 1170 \"ml/parser.mly\"\n    ( mkinfix _1 \"<\" _3 )\n# 8082 \"ml/parser.ml\"\n               : 'expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in\n    Obj.repr(\n# 1172 \"ml/parser.mly\"\n      ( mkinfix _1 \">\" _3 )\n# 8090 \"ml/parser.ml\"\n               : 'expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in\n    Obj.repr(\n# 1174 \"ml/parser.mly\"\n      ( mkinfix _1 \"or\" _3 )\n# 8098 \"ml/parser.ml\"\n               : 'expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in\n    Obj.repr(\n# 1176 \"ml/parser.mly\"\n      ( mkinfix _1 \"||\" _3 )\n# 8106 \"ml/parser.ml\"\n               : 'expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in\n    Obj.repr(\n# 1178 \"ml/parser.mly\"\n      ( mkinfix _1 \"&\" _3 )\n# 8114 \"ml/parser.ml\"\n               : 'expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in\n    Obj.repr(\n# 1180 \"ml/parser.mly\"\n      ( mkinfix _1 \"&&\" _3 )\n# 8122 \"ml/parser.ml\"\n               : 'expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in\n    Obj.repr(\n# 1182 \"ml/parser.mly\"\n      ( mkinfix _1 \":=\" _3 )\n# 8130 \"ml/parser.ml\"\n               : 'expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'subtractive) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in\n    Obj.repr(\n# 1184 \"ml/parser.mly\"\n      ( mkuminus _1 _2 )\n# 8138 \"ml/parser.ml\"\n               : 'expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'additive) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in\n    Obj.repr(\n# 1186 \"ml/parser.mly\"\n      ( mkuplus _1 _2 )\n# 8146 \"ml/parser.ml\"\n               : 'expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 4 : 'simple_expr) in\n    let _3 = (Parsing.peek_val __caml_parser_env 2 : 'label_longident) in\n    let _5 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in\n    Obj.repr(\n# 1188 \"ml/parser.mly\"\n      ( mkexp(Pexp_setfield(_1, mkrhs _3 3, _5)) )\n# 8155 \"ml/parser.ml\"\n               : 'expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 6 : 'simple_expr) in\n    let _4 = (Parsing.peek_val __caml_parser_env 3 : 'seq_expr) in\n    let _7 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in\n    Obj.repr(\n# 1190 \"ml/parser.mly\"\n      ( mkexp(Pexp_apply(ghexp(Pexp_ident(array_function \"Array\" \"set\")),\n                         [Nolabel,_1; Nolabel,_4; Nolabel,_7])) )\n# 8165 \"ml/parser.ml\"\n               : 'expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 6 : 'simple_expr) in\n    let _4 = (Parsing.peek_val __caml_parser_env 3 : 'seq_expr) in\n    let _7 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in\n    Obj.repr(\n# 1193 \"ml/parser.mly\"\n      ( mkexp(Pexp_apply(ghexp(Pexp_ident(array_function \"String\" \"set\")),\n                         [Nolabel,_1; Nolabel,_4; Nolabel,_7])) )\n# 8175 \"ml/parser.ml\"\n               : 'expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 6 : 'simple_expr) in\n    let _2 = (Parsing.peek_val __caml_parser_env 5 : string) in\n    let _4 = (Parsing.peek_val __caml_parser_env 3 : 'expr) in\n    let _7 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in\n    Obj.repr(\n# 1196 \"ml/parser.mly\"\n      ( let id = mkexp @@ Pexp_ident( ghloc @@ Lident (\".\" ^ _2 ^ \"[]<-\")) in\n        mkexp @@ Pexp_apply(id , [Nolabel, _1; Nolabel, _4; Nolabel, _7]) )\n# 8186 \"ml/parser.ml\"\n               : 'expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 6 : 'simple_expr) in\n    let _2 = (Parsing.peek_val __caml_parser_env 5 : string) in\n    let _4 = (Parsing.peek_val __caml_parser_env 3 : 'expr) in\n    let _7 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in\n    Obj.repr(\n# 1199 \"ml/parser.mly\"\n      ( let id = mkexp @@ Pexp_ident( ghloc @@ Lident (\".\" ^ _2 ^ \"()<-\")) in\n        mkexp @@ Pexp_apply(id , [Nolabel, _1; Nolabel, _4; Nolabel, _7]) )\n# 8197 \"ml/parser.ml\"\n               : 'expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 6 : 'simple_expr) in\n    let _2 = (Parsing.peek_val __caml_parser_env 5 : string) in\n    let _4 = (Parsing.peek_val __caml_parser_env 3 : 'expr) in\n    let _7 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in\n    Obj.repr(\n# 1202 \"ml/parser.mly\"\n      ( let id = mkexp @@ Pexp_ident( ghloc @@ Lident (\".\" ^ _2 ^ \"{}<-\")) in\n        mkexp @@ Pexp_apply(id , [Nolabel, _1; Nolabel, _4; Nolabel, _7]) )\n# 8208 \"ml/parser.ml\"\n               : 'expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 8 : 'simple_expr) in\n    let _3 = (Parsing.peek_val __caml_parser_env 6 : 'mod_longident) in\n    let _4 = (Parsing.peek_val __caml_parser_env 5 : string) in\n    let _6 = (Parsing.peek_val __caml_parser_env 3 : 'expr) in\n    let _9 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in\n    Obj.repr(\n# 1205 \"ml/parser.mly\"\n      ( let id = mkexp @@ Pexp_ident( ghloc @@ Ldot(_3,\".\" ^ _4 ^ \"[]<-\")) in\n        mkexp @@ Pexp_apply(id , [Nolabel, _1; Nolabel, _6; Nolabel, _9]) )\n# 8220 \"ml/parser.ml\"\n               : 'expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 8 : 'simple_expr) in\n    let _3 = (Parsing.peek_val __caml_parser_env 6 : 'mod_longident) in\n    let _4 = (Parsing.peek_val __caml_parser_env 5 : string) in\n    let _6 = (Parsing.peek_val __caml_parser_env 3 : 'expr) in\n    let _9 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in\n    Obj.repr(\n# 1208 \"ml/parser.mly\"\n      ( let id = mkexp @@ Pexp_ident( ghloc @@ Ldot(_3, \".\" ^ _4 ^ \"()<-\")) in\n        mkexp @@ Pexp_apply(id , [Nolabel, _1; Nolabel, _6; Nolabel, _9]) )\n# 8232 \"ml/parser.ml\"\n               : 'expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 8 : 'simple_expr) in\n    let _3 = (Parsing.peek_val __caml_parser_env 6 : 'mod_longident) in\n    let _4 = (Parsing.peek_val __caml_parser_env 5 : string) in\n    let _6 = (Parsing.peek_val __caml_parser_env 3 : 'expr) in\n    let _9 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in\n    Obj.repr(\n# 1211 \"ml/parser.mly\"\n      ( let id = mkexp @@ Pexp_ident( ghloc @@ Ldot(_3, \".\" ^ _4 ^ \"{}<-\")) in\n        mkexp @@ Pexp_apply(id , [Nolabel, _1; Nolabel, _6; Nolabel, _9]) )\n# 8244 \"ml/parser.ml\"\n               : 'expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'label) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in\n    Obj.repr(\n# 1214 \"ml/parser.mly\"\n      ( mkexp(Pexp_setinstvar(mkrhs _1 1, _3)) )\n# 8252 \"ml/parser.ml\"\n               : 'expr))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'ext_attributes) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'simple_expr) in\n    Obj.repr(\n# 1216 \"ml/parser.mly\"\n      ( mkexp_attrs (Pexp_assert _3) _2 )\n# 8260 \"ml/parser.ml\"\n               : 'expr))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'ext_attributes) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'simple_expr) in\n    Obj.repr(\n# 1218 \"ml/parser.mly\"\n      ( mkexp_attrs (Pexp_lazy _3) _2 )\n# 8268 \"ml/parser.ml\"\n               : 'expr))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'ext_attributes) in\n    let _3 = (Parsing.peek_val __caml_parser_env 1 : 'class_structure) in\n    Obj.repr(\n# 1220 \"ml/parser.mly\"\n      ( mkexp_attrs (Pexp_object _3) _2 )\n# 8276 \"ml/parser.ml\"\n               : 'expr))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'ext_attributes) in\n    let _3 = (Parsing.peek_val __caml_parser_env 1 : 'class_structure) in\n    Obj.repr(\n# 1222 \"ml/parser.mly\"\n      ( unclosed \"object\" 1 \"end\" 4 )\n# 8284 \"ml/parser.ml\"\n               : 'expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'expr) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'attribute) in\n    Obj.repr(\n# 1224 \"ml/parser.mly\"\n      ( Exp.attr _1 _2 )\n# 8292 \"ml/parser.ml\"\n               : 'expr))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 1226 \"ml/parser.mly\"\n     ( not_expecting 1 \"wildcard \\\"_\\\"\" )\n# 8298 \"ml/parser.ml\"\n               : 'expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'val_longident) in\n    Obj.repr(\n# 1230 \"ml/parser.mly\"\n      ( mkexp(Pexp_ident (mkrhs _1 1)) )\n# 8305 \"ml/parser.ml\"\n               : 'simple_expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'constant) in\n    Obj.repr(\n# 1232 \"ml/parser.mly\"\n      ( mkexp(Pexp_constant _1) )\n# 8312 \"ml/parser.ml\"\n               : 'simple_expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'constr_longident) in\n    Obj.repr(\n# 1234 \"ml/parser.mly\"\n      ( mkexp(Pexp_construct(mkrhs _1 1, None)) )\n# 8319 \"ml/parser.ml\"\n               : 'simple_expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'name_tag) in\n    Obj.repr(\n# 1236 \"ml/parser.mly\"\n      ( mkexp(Pexp_variant(_1, None)) )\n# 8326 \"ml/parser.ml\"\n               : 'simple_expr))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'seq_expr) in\n    Obj.repr(\n# 1238 \"ml/parser.mly\"\n      ( reloc_exp _2 )\n# 8333 \"ml/parser.ml\"\n               : 'simple_expr))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'seq_expr) in\n    Obj.repr(\n# 1240 \"ml/parser.mly\"\n      ( unclosed \"(\" 1 \")\" 3 )\n# 8340 \"ml/parser.ml\"\n               : 'simple_expr))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'ext_attributes) in\n    let _3 = (Parsing.peek_val __caml_parser_env 1 : 'seq_expr) in\n    Obj.repr(\n# 1242 \"ml/parser.mly\"\n      ( wrap_exp_attrs (reloc_exp _3) _2 (* check location *) )\n# 8348 \"ml/parser.ml\"\n               : 'simple_expr))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'ext_attributes) in\n    Obj.repr(\n# 1244 \"ml/parser.mly\"\n      ( mkexp_attrs (Pexp_construct (mkloc (Lident \"()\") (symbol_rloc ()),\n                               None)) _2 )\n# 8356 \"ml/parser.ml\"\n               : 'simple_expr))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'ext_attributes) in\n    let _3 = (Parsing.peek_val __caml_parser_env 1 : 'seq_expr) in\n    Obj.repr(\n# 1247 \"ml/parser.mly\"\n      ( unclosed \"begin\" 1 \"end\" 4 )\n# 8364 \"ml/parser.ml\"\n               : 'simple_expr))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'seq_expr) in\n    let _3 = (Parsing.peek_val __caml_parser_env 1 : 'type_constraint) in\n    Obj.repr(\n# 1249 \"ml/parser.mly\"\n      ( mkexp_constraint _2 _3 )\n# 8372 \"ml/parser.ml\"\n               : 'simple_expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'simple_expr) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'label_longident) in\n    Obj.repr(\n# 1251 \"ml/parser.mly\"\n      ( mkexp(Pexp_field(_1, mkrhs _3 3)) )\n# 8380 \"ml/parser.ml\"\n               : 'simple_expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 4 : 'mod_longident) in\n    let _4 = (Parsing.peek_val __caml_parser_env 1 : 'seq_expr) in\n    Obj.repr(\n# 1253 \"ml/parser.mly\"\n      ( mkexp(Pexp_open(Fresh, mkrhs _1 1, _4)) )\n# 8388 \"ml/parser.ml\"\n               : 'simple_expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 3 : 'mod_longident) in\n    Obj.repr(\n# 1255 \"ml/parser.mly\"\n      ( mkexp(Pexp_open(Fresh, mkrhs _1 1,\n                        mkexp(Pexp_construct(mkrhs (Lident \"()\") 1, None)))) )\n# 8396 \"ml/parser.ml\"\n               : 'simple_expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 4 : 'mod_longident) in\n    let _4 = (Parsing.peek_val __caml_parser_env 1 : 'seq_expr) in\n    Obj.repr(\n# 1258 \"ml/parser.mly\"\n      ( unclosed \"(\" 3 \")\" 5 )\n# 8404 \"ml/parser.ml\"\n               : 'simple_expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 4 : 'simple_expr) in\n    let _4 = (Parsing.peek_val __caml_parser_env 1 : 'seq_expr) in\n    Obj.repr(\n# 1260 \"ml/parser.mly\"\n      ( mkexp(Pexp_apply(ghexp(Pexp_ident(array_function \"Array\" \"get\")),\n                         [Nolabel,_1; Nolabel,_4])) )\n# 8413 \"ml/parser.ml\"\n               : 'simple_expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 4 : 'simple_expr) in\n    let _4 = (Parsing.peek_val __caml_parser_env 1 : 'seq_expr) in\n    Obj.repr(\n# 1263 \"ml/parser.mly\"\n      ( unclosed \"(\" 3 \")\" 5 )\n# 8421 \"ml/parser.ml\"\n               : 'simple_expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 4 : 'simple_expr) in\n    let _4 = (Parsing.peek_val __caml_parser_env 1 : 'seq_expr) in\n    Obj.repr(\n# 1265 \"ml/parser.mly\"\n      ( mkexp(Pexp_apply(ghexp(Pexp_ident(array_function \"String\" \"get\")),\n                         [Nolabel,_1; Nolabel,_4])) )\n# 8430 \"ml/parser.ml\"\n               : 'simple_expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 4 : 'simple_expr) in\n    let _4 = (Parsing.peek_val __caml_parser_env 1 : 'seq_expr) in\n    Obj.repr(\n# 1268 \"ml/parser.mly\"\n      ( unclosed \"[\" 3 \"]\" 5 )\n# 8438 \"ml/parser.ml\"\n               : 'simple_expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 4 : 'simple_expr) in\n    let _2 = (Parsing.peek_val __caml_parser_env 3 : string) in\n    let _4 = (Parsing.peek_val __caml_parser_env 1 : 'expr) in\n    Obj.repr(\n# 1270 \"ml/parser.mly\"\n      ( let id = mkexp @@ Pexp_ident( ghloc @@ Lident (\".\" ^ _2 ^ \"[]\")) in\n        mkexp @@ Pexp_apply(id, [Nolabel, _1; Nolabel, _4]) )\n# 8448 \"ml/parser.ml\"\n               : 'simple_expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 4 : 'simple_expr) in\n    let _2 = (Parsing.peek_val __caml_parser_env 3 : string) in\n    let _4 = (Parsing.peek_val __caml_parser_env 1 : 'expr) in\n    Obj.repr(\n# 1273 \"ml/parser.mly\"\n      ( unclosed \"[\" 3 \"]\" 5 )\n# 8457 \"ml/parser.ml\"\n               : 'simple_expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 4 : 'simple_expr) in\n    let _2 = (Parsing.peek_val __caml_parser_env 3 : string) in\n    let _4 = (Parsing.peek_val __caml_parser_env 1 : 'expr) in\n    Obj.repr(\n# 1275 \"ml/parser.mly\"\n      ( let id = mkexp @@ Pexp_ident( ghloc @@ Lident (\".\" ^ _2 ^ \"()\")) in\n        mkexp @@ Pexp_apply(id, [Nolabel, _1; Nolabel, _4]) )\n# 8467 \"ml/parser.ml\"\n               : 'simple_expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 4 : 'simple_expr) in\n    let _2 = (Parsing.peek_val __caml_parser_env 3 : string) in\n    let _4 = (Parsing.peek_val __caml_parser_env 1 : 'expr) in\n    Obj.repr(\n# 1278 \"ml/parser.mly\"\n      ( unclosed \"(\" 3 \")\" 5 )\n# 8476 \"ml/parser.ml\"\n               : 'simple_expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 4 : 'simple_expr) in\n    let _2 = (Parsing.peek_val __caml_parser_env 3 : string) in\n    let _4 = (Parsing.peek_val __caml_parser_env 1 : 'expr) in\n    Obj.repr(\n# 1280 \"ml/parser.mly\"\n      ( let id = mkexp @@ Pexp_ident( ghloc @@ Lident (\".\" ^ _2 ^ \"{}\")) in\n        mkexp @@ Pexp_apply(id, [Nolabel, _1; Nolabel, _4]) )\n# 8486 \"ml/parser.ml\"\n               : 'simple_expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 4 : 'simple_expr) in\n    let _2 = (Parsing.peek_val __caml_parser_env 3 : string) in\n    let _4 = (Parsing.peek_val __caml_parser_env 1 : 'expr) in\n    Obj.repr(\n# 1283 \"ml/parser.mly\"\n      ( unclosed \"{\" 3 \"}\" 5 )\n# 8495 \"ml/parser.ml\"\n               : 'simple_expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 6 : 'simple_expr) in\n    let _3 = (Parsing.peek_val __caml_parser_env 4 : 'mod_longident) in\n    let _4 = (Parsing.peek_val __caml_parser_env 3 : string) in\n    let _6 = (Parsing.peek_val __caml_parser_env 1 : 'expr) in\n    Obj.repr(\n# 1285 \"ml/parser.mly\"\n      ( let id = mkexp @@ Pexp_ident( ghloc @@ Ldot(_3, \".\" ^ _4 ^ \"[]\")) in\n        mkexp @@ Pexp_apply(id, [Nolabel, _1; Nolabel, _6]) )\n# 8506 \"ml/parser.ml\"\n               : 'simple_expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 6 : 'simple_expr) in\n    let _3 = (Parsing.peek_val __caml_parser_env 4 : 'mod_longident) in\n    let _4 = (Parsing.peek_val __caml_parser_env 3 : string) in\n    let _6 = (Parsing.peek_val __caml_parser_env 1 : 'expr) in\n    Obj.repr(\n# 1288 \"ml/parser.mly\"\n      ( unclosed \"[\" 5 \"]\" 7 )\n# 8516 \"ml/parser.ml\"\n               : 'simple_expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 6 : 'simple_expr) in\n    let _3 = (Parsing.peek_val __caml_parser_env 4 : 'mod_longident) in\n    let _4 = (Parsing.peek_val __caml_parser_env 3 : string) in\n    let _6 = (Parsing.peek_val __caml_parser_env 1 : 'expr) in\n    Obj.repr(\n# 1290 \"ml/parser.mly\"\n      ( let id = mkexp @@ Pexp_ident( ghloc @@ Ldot(_3, \".\" ^ _4 ^ \"()\")) in\n        mkexp @@ Pexp_apply(id, [Nolabel, _1; Nolabel, _6]) )\n# 8527 \"ml/parser.ml\"\n               : 'simple_expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 6 : 'simple_expr) in\n    let _3 = (Parsing.peek_val __caml_parser_env 4 : 'mod_longident) in\n    let _4 = (Parsing.peek_val __caml_parser_env 3 : string) in\n    let _6 = (Parsing.peek_val __caml_parser_env 1 : 'expr) in\n    Obj.repr(\n# 1293 \"ml/parser.mly\"\n      ( unclosed \"(\" 5 \")\" 7 )\n# 8537 \"ml/parser.ml\"\n               : 'simple_expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 6 : 'simple_expr) in\n    let _3 = (Parsing.peek_val __caml_parser_env 4 : 'mod_longident) in\n    let _4 = (Parsing.peek_val __caml_parser_env 3 : string) in\n    let _6 = (Parsing.peek_val __caml_parser_env 1 : 'expr) in\n    Obj.repr(\n# 1295 \"ml/parser.mly\"\n      ( let id = mkexp @@ Pexp_ident( ghloc @@ Ldot(_3, \".\" ^ _4 ^ \"{}\")) in\n        mkexp @@ Pexp_apply(id, [Nolabel, _1; Nolabel, _6]) )\n# 8548 \"ml/parser.ml\"\n               : 'simple_expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 6 : 'simple_expr) in\n    let _3 = (Parsing.peek_val __caml_parser_env 4 : 'mod_longident) in\n    let _4 = (Parsing.peek_val __caml_parser_env 3 : string) in\n    let _6 = (Parsing.peek_val __caml_parser_env 1 : 'expr) in\n    Obj.repr(\n# 1298 \"ml/parser.mly\"\n      ( unclosed \"{\" 5 \"}\" 7 )\n# 8558 \"ml/parser.ml\"\n               : 'simple_expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 4 : 'simple_expr) in\n    let _4 = (Parsing.peek_val __caml_parser_env 1 : 'expr_comma_list) in\n    Obj.repr(\n# 1300 \"ml/parser.mly\"\n      ( unclosed \"{\" 3 \"}\" 5 )\n# 8566 \"ml/parser.ml\"\n               : 'simple_expr))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'record_expr) in\n    Obj.repr(\n# 1302 \"ml/parser.mly\"\n      ( let (exten, fields) = _2 in mkexp (Pexp_record(fields, exten)) )\n# 8573 \"ml/parser.ml\"\n               : 'simple_expr))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'record_expr) in\n    Obj.repr(\n# 1304 \"ml/parser.mly\"\n      ( unclosed \"{\" 1 \"}\" 3 )\n# 8580 \"ml/parser.ml\"\n               : 'simple_expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 4 : 'mod_longident) in\n    let _4 = (Parsing.peek_val __caml_parser_env 1 : 'record_expr) in\n    Obj.repr(\n# 1306 \"ml/parser.mly\"\n      ( let (exten, fields) = _4 in\n        let rec_exp = mkexp(Pexp_record(fields, exten)) in\n        mkexp(Pexp_open(Fresh, mkrhs _1 1, rec_exp)) )\n# 8590 \"ml/parser.ml\"\n               : 'simple_expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 4 : 'mod_longident) in\n    let _4 = (Parsing.peek_val __caml_parser_env 1 : 'record_expr) in\n    Obj.repr(\n# 1310 \"ml/parser.mly\"\n      ( unclosed \"{\" 3 \"}\" 5 )\n# 8598 \"ml/parser.ml\"\n               : 'simple_expr))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'expr_semi_list) in\n    let _3 = (Parsing.peek_val __caml_parser_env 1 : 'opt_semi) in\n    Obj.repr(\n# 1312 \"ml/parser.mly\"\n      ( mkexp (Pexp_array(List.rev _2)) )\n# 8606 \"ml/parser.ml\"\n               : 'simple_expr))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'expr_semi_list) in\n    let _3 = (Parsing.peek_val __caml_parser_env 1 : 'opt_semi) in\n    Obj.repr(\n# 1314 \"ml/parser.mly\"\n      ( unclosed \"[|\" 1 \"|]\" 4 )\n# 8614 \"ml/parser.ml\"\n               : 'simple_expr))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 1316 \"ml/parser.mly\"\n      ( mkexp (Pexp_array []) )\n# 8620 \"ml/parser.ml\"\n               : 'simple_expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 5 : 'mod_longident) in\n    let _4 = (Parsing.peek_val __caml_parser_env 2 : 'expr_semi_list) in\n    let _5 = (Parsing.peek_val __caml_parser_env 1 : 'opt_semi) in\n    Obj.repr(\n# 1318 \"ml/parser.mly\"\n      ( mkexp(Pexp_open(Fresh, mkrhs _1 1, mkexp(Pexp_array(List.rev _4)))) )\n# 8629 \"ml/parser.ml\"\n               : 'simple_expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 3 : 'mod_longident) in\n    Obj.repr(\n# 1320 \"ml/parser.mly\"\n      ( mkexp(Pexp_open(Fresh, mkrhs _1 1, mkexp(Pexp_array []))) )\n# 8636 \"ml/parser.ml\"\n               : 'simple_expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 5 : 'mod_longident) in\n    let _4 = (Parsing.peek_val __caml_parser_env 2 : 'expr_semi_list) in\n    let _5 = (Parsing.peek_val __caml_parser_env 1 : 'opt_semi) in\n    Obj.repr(\n# 1322 \"ml/parser.mly\"\n      ( unclosed \"[|\" 3 \"|]\" 6 )\n# 8645 \"ml/parser.ml\"\n               : 'simple_expr))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'expr_semi_list) in\n    let _3 = (Parsing.peek_val __caml_parser_env 1 : 'opt_semi) in\n    Obj.repr(\n# 1324 \"ml/parser.mly\"\n      ( reloc_exp (mktailexp (rhs_loc 4) (List.rev _2)) )\n# 8653 \"ml/parser.ml\"\n               : 'simple_expr))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'expr_semi_list) in\n    let _3 = (Parsing.peek_val __caml_parser_env 1 : 'opt_semi) in\n    Obj.repr(\n# 1326 \"ml/parser.mly\"\n      ( unclosed \"[\" 1 \"]\" 4 )\n# 8661 \"ml/parser.ml\"\n               : 'simple_expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 5 : 'mod_longident) in\n    let _4 = (Parsing.peek_val __caml_parser_env 2 : 'expr_semi_list) in\n    let _5 = (Parsing.peek_val __caml_parser_env 1 : 'opt_semi) in\n    Obj.repr(\n# 1328 \"ml/parser.mly\"\n      ( let list_exp = reloc_exp (mktailexp (rhs_loc 6) (List.rev _4)) in\n        mkexp(Pexp_open(Fresh, mkrhs _1 1, list_exp)) )\n# 8671 \"ml/parser.ml\"\n               : 'simple_expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 3 : 'mod_longident) in\n    Obj.repr(\n# 1331 \"ml/parser.mly\"\n      ( mkexp(Pexp_open(Fresh, mkrhs _1 1,\n                        mkexp(Pexp_construct(mkrhs (Lident \"[]\") 1, None)))) )\n# 8679 \"ml/parser.ml\"\n               : 'simple_expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 5 : 'mod_longident) in\n    let _4 = (Parsing.peek_val __caml_parser_env 2 : 'expr_semi_list) in\n    let _5 = (Parsing.peek_val __caml_parser_env 1 : 'opt_semi) in\n    Obj.repr(\n# 1334 \"ml/parser.mly\"\n      ( unclosed \"[\" 3 \"]\" 6 )\n# 8688 \"ml/parser.ml\"\n               : 'simple_expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : string) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'simple_expr) in\n    Obj.repr(\n# 1336 \"ml/parser.mly\"\n      ( mkexp(Pexp_apply(mkoperator _1 1, [Nolabel,_2])) )\n# 8696 \"ml/parser.ml\"\n               : 'simple_expr))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'simple_expr) in\n    Obj.repr(\n# 1338 \"ml/parser.mly\"\n      ( mkexp(Pexp_apply(mkoperator \"!\" 1, [Nolabel,_2])) )\n# 8703 \"ml/parser.ml\"\n               : 'simple_expr))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'field_expr_list) in\n    Obj.repr(\n# 1340 \"ml/parser.mly\"\n      ( mkexp (Pexp_override _2) )\n# 8710 \"ml/parser.ml\"\n               : 'simple_expr))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'field_expr_list) in\n    Obj.repr(\n# 1342 \"ml/parser.mly\"\n      ( unclosed \"{<\" 1 \">}\" 3 )\n# 8717 \"ml/parser.ml\"\n               : 'simple_expr))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 1344 \"ml/parser.mly\"\n      ( mkexp (Pexp_override []))\n# 8723 \"ml/parser.ml\"\n               : 'simple_expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 4 : 'mod_longident) in\n    let _4 = (Parsing.peek_val __caml_parser_env 1 : 'field_expr_list) in\n    Obj.repr(\n# 1346 \"ml/parser.mly\"\n      ( mkexp(Pexp_open(Fresh, mkrhs _1 1, mkexp (Pexp_override _4))))\n# 8731 \"ml/parser.ml\"\n               : 'simple_expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 3 : 'mod_longident) in\n    Obj.repr(\n# 1348 \"ml/parser.mly\"\n      ( mkexp(Pexp_open(Fresh, mkrhs _1 1, mkexp (Pexp_override []))))\n# 8738 \"ml/parser.ml\"\n               : 'simple_expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 4 : 'mod_longident) in\n    let _4 = (Parsing.peek_val __caml_parser_env 1 : 'field_expr_list) in\n    Obj.repr(\n# 1350 \"ml/parser.mly\"\n      ( unclosed \"{<\" 3 \">}\" 5 )\n# 8746 \"ml/parser.ml\"\n               : 'simple_expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'simple_expr) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'label) in\n    Obj.repr(\n# 1352 \"ml/parser.mly\"\n      ( mkexp(Pexp_send(_1, mkrhs _3 3)) )\n# 8754 \"ml/parser.ml\"\n               : 'simple_expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'simple_expr) in\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : string) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'simple_expr) in\n    Obj.repr(\n# 1354 \"ml/parser.mly\"\n      ( mkinfix _1 _2 _3 )\n# 8763 \"ml/parser.ml\"\n               : 'simple_expr))\n; (fun __caml_parser_env ->\n    let _3 = (Parsing.peek_val __caml_parser_env 2 : 'ext_attributes) in\n    let _4 = (Parsing.peek_val __caml_parser_env 1 : 'module_expr) in\n    Obj.repr(\n# 1356 \"ml/parser.mly\"\n      ( mkexp_attrs (Pexp_pack _4) _3 )\n# 8771 \"ml/parser.ml\"\n               : 'simple_expr))\n; (fun __caml_parser_env ->\n    let _3 = (Parsing.peek_val __caml_parser_env 4 : 'ext_attributes) in\n    let _4 = (Parsing.peek_val __caml_parser_env 3 : 'module_expr) in\n    let _6 = (Parsing.peek_val __caml_parser_env 1 : 'package_type) in\n    Obj.repr(\n# 1358 \"ml/parser.mly\"\n      ( mkexp_attrs (Pexp_constraint (ghexp (Pexp_pack _4),\n                                      ghtyp (Ptyp_package _6)))\n                    _3 )\n# 8782 \"ml/parser.ml\"\n               : 'simple_expr))\n; (fun __caml_parser_env ->\n    let _3 = (Parsing.peek_val __caml_parser_env 3 : 'ext_attributes) in\n    let _4 = (Parsing.peek_val __caml_parser_env 2 : 'module_expr) in\n    Obj.repr(\n# 1362 \"ml/parser.mly\"\n      ( unclosed \"(\" 1 \")\" 6 )\n# 8790 \"ml/parser.ml\"\n               : 'simple_expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 8 : 'mod_longident) in\n    let _5 = (Parsing.peek_val __caml_parser_env 4 : 'ext_attributes) in\n    let _6 = (Parsing.peek_val __caml_parser_env 3 : 'module_expr) in\n    let _8 = (Parsing.peek_val __caml_parser_env 1 : 'package_type) in\n    Obj.repr(\n# 1365 \"ml/parser.mly\"\n      ( mkexp(Pexp_open(Fresh, mkrhs _1 1,\n        mkexp_attrs (Pexp_constraint (ghexp (Pexp_pack _6),\n                                ghtyp (Ptyp_package _8)))\n                    _5 )) )\n# 8803 \"ml/parser.ml\"\n               : 'simple_expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 7 : 'mod_longident) in\n    let _5 = (Parsing.peek_val __caml_parser_env 3 : 'ext_attributes) in\n    let _6 = (Parsing.peek_val __caml_parser_env 2 : 'module_expr) in\n    Obj.repr(\n# 1370 \"ml/parser.mly\"\n      ( unclosed \"(\" 3 \")\" 8 )\n# 8812 \"ml/parser.ml\"\n               : 'simple_expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'extension) in\n    Obj.repr(\n# 1372 \"ml/parser.mly\"\n      ( mkexp (Pexp_extension _1) )\n# 8819 \"ml/parser.ml\"\n               : 'simple_expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'labeled_simple_expr) in\n    Obj.repr(\n# 1376 \"ml/parser.mly\"\n      ( [_1] )\n# 8826 \"ml/parser.ml\"\n               : 'simple_labeled_expr_list))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'simple_labeled_expr_list) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'labeled_simple_expr) in\n    Obj.repr(\n# 1378 \"ml/parser.mly\"\n      ( _2 :: _1 )\n# 8834 \"ml/parser.ml\"\n               : 'simple_labeled_expr_list))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'simple_expr) in\n    Obj.repr(\n# 1382 \"ml/parser.mly\"\n      ( (Nolabel, _1) )\n# 8841 \"ml/parser.ml\"\n               : 'labeled_simple_expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'label_expr) in\n    Obj.repr(\n# 1384 \"ml/parser.mly\"\n      ( _1 )\n# 8848 \"ml/parser.ml\"\n               : 'labeled_simple_expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : string) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'simple_expr) in\n    Obj.repr(\n# 1388 \"ml/parser.mly\"\n      ( (Labelled _1, _2) )\n# 8856 \"ml/parser.ml\"\n               : 'label_expr))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'label_ident) in\n    Obj.repr(\n# 1390 \"ml/parser.mly\"\n      ( (Labelled (fst _2), snd _2) )\n# 8863 \"ml/parser.ml\"\n               : 'label_expr))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'label_ident) in\n    Obj.repr(\n# 1392 \"ml/parser.mly\"\n      ( (Optional (fst _2), snd _2) )\n# 8870 \"ml/parser.ml\"\n               : 'label_expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : string) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'simple_expr) in\n    Obj.repr(\n# 1394 \"ml/parser.mly\"\n      ( (Optional _1, _2) )\n# 8878 \"ml/parser.ml\"\n               : 'label_expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in\n    Obj.repr(\n# 1397 \"ml/parser.mly\"\n             ( (_1, mkexp(Pexp_ident(mkrhs (Lident _1) 1))) )\n# 8885 \"ml/parser.ml\"\n               : 'label_ident))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in\n    Obj.repr(\n# 1400 \"ml/parser.mly\"\n                                      ( [mkrhs _1 1] )\n# 8892 \"ml/parser.ml\"\n               : 'lident_list))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : string) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'lident_list) in\n    Obj.repr(\n# 1401 \"ml/parser.mly\"\n                                      ( mkrhs _1 1 :: _2 )\n# 8900 \"ml/parser.ml\"\n               : 'lident_list))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'val_ident) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'strict_binding) in\n    Obj.repr(\n# 1405 \"ml/parser.mly\"\n      ( (mkpatvar _1 1, _2) )\n# 8908 \"ml/parser.ml\"\n               : 'let_binding_body))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 3 : 'val_ident) in\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'type_constraint) in\n    let _4 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in\n    Obj.repr(\n# 1407 \"ml/parser.mly\"\n      ( let v = mkpatvar _1 1 in (* PR#7344 *)\n        let t =\n          match _2 with\n            Some t, None -> t\n          | _, Some t -> t\n          | _ -> assert false\n        in\n        (ghpat(Ppat_constraint(v, ghtyp(Ptyp_poly([],t)))),\n         mkexp_constraint _4 _2) )\n# 8925 \"ml/parser.ml\"\n               : 'let_binding_body))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 6 : 'val_ident) in\n    let _3 = (Parsing.peek_val __caml_parser_env 4 : 'typevar_list) in\n    let _5 = (Parsing.peek_val __caml_parser_env 2 : 'core_type) in\n    let _7 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in\n    Obj.repr(\n# 1417 \"ml/parser.mly\"\n      ( (ghpat(Ppat_constraint(mkpatvar _1 1,\n                               ghtyp(Ptyp_poly(List.rev _3,_5)))),\n         _7) )\n# 8937 \"ml/parser.ml\"\n               : 'let_binding_body))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 7 : 'val_ident) in\n    let _4 = (Parsing.peek_val __caml_parser_env 4 : 'lident_list) in\n    let _6 = (Parsing.peek_val __caml_parser_env 2 : 'core_type) in\n    let _8 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in\n    Obj.repr(\n# 1421 \"ml/parser.mly\"\n      ( let exp, poly = wrap_type_annotation _4 _6 _8 in\n        (ghpat(Ppat_constraint(mkpatvar _1 1, poly)), exp) )\n# 8948 \"ml/parser.ml\"\n               : 'let_binding_body))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern_no_exn) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in\n    Obj.repr(\n# 1424 \"ml/parser.mly\"\n      ( (_1, _3) )\n# 8956 \"ml/parser.ml\"\n               : 'let_binding_body))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 4 : 'simple_pattern_not_ident) in\n    let _3 = (Parsing.peek_val __caml_parser_env 2 : 'core_type) in\n    let _5 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in\n    Obj.repr(\n# 1426 \"ml/parser.mly\"\n      ( (ghpat(Ppat_constraint(_1, _3)), _5) )\n# 8965 \"ml/parser.ml\"\n               : 'let_binding_body))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'let_binding) in\n    Obj.repr(\n# 1429 \"ml/parser.mly\"\n                                                ( _1 )\n# 8972 \"ml/parser.ml\"\n               : 'let_bindings))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'let_bindings) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'and_let_binding) in\n    Obj.repr(\n# 1430 \"ml/parser.mly\"\n                                                ( addlb _1 _2 )\n# 8980 \"ml/parser.ml\"\n               : 'let_bindings))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 3 : 'ext_attributes) in\n    let _3 = (Parsing.peek_val __caml_parser_env 2 : 'rec_flag) in\n    let _4 = (Parsing.peek_val __caml_parser_env 1 : 'let_binding_body) in\n    let _5 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in\n    Obj.repr(\n# 1434 \"ml/parser.mly\"\n      ( let (ext, attr) = _2 in\n        mklbs ext _3 (mklb true _4 (attr@_5)) )\n# 8991 \"ml/parser.ml\"\n               : 'let_binding))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'attributes) in\n    let _3 = (Parsing.peek_val __caml_parser_env 1 : 'let_binding_body) in\n    let _4 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in\n    Obj.repr(\n# 1439 \"ml/parser.mly\"\n      ( mklb false _3 (_2@_4) )\n# 9000 \"ml/parser.ml\"\n               : 'and_let_binding))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'strict_binding) in\n    Obj.repr(\n# 1443 \"ml/parser.mly\"\n      ( _1 )\n# 9007 \"ml/parser.ml\"\n               : 'fun_binding))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'type_constraint) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in\n    Obj.repr(\n# 1445 \"ml/parser.mly\"\n      ( mkexp_constraint _3 _1 )\n# 9015 \"ml/parser.ml\"\n               : 'fun_binding))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in\n    Obj.repr(\n# 1449 \"ml/parser.mly\"\n      ( _2 )\n# 9022 \"ml/parser.ml\"\n               : 'strict_binding))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'labeled_simple_pattern) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'fun_binding) in\n    Obj.repr(\n# 1451 \"ml/parser.mly\"\n      ( let (l, o, p) = _1 in ghexp(Pexp_fun(l, o, p, _2)) )\n# 9030 \"ml/parser.ml\"\n               : 'strict_binding))\n; (fun __caml_parser_env ->\n    let _3 = (Parsing.peek_val __caml_parser_env 2 : 'lident_list) in\n    let _5 = (Parsing.peek_val __caml_parser_env 0 : 'fun_binding) in\n    Obj.repr(\n# 1453 \"ml/parser.mly\"\n      ( mk_newtypes _3 _5 )\n# 9038 \"ml/parser.ml\"\n               : 'strict_binding))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'match_case) in\n    Obj.repr(\n# 1456 \"ml/parser.mly\"\n               ( [_1] )\n# 9045 \"ml/parser.ml\"\n               : 'match_cases))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'match_cases) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'match_case) in\n    Obj.repr(\n# 1457 \"ml/parser.mly\"\n                               ( _3 :: _1 )\n# 9053 \"ml/parser.ml\"\n               : 'match_cases))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in\n    Obj.repr(\n# 1461 \"ml/parser.mly\"\n      ( Exp.case _1 _3 )\n# 9061 \"ml/parser.ml\"\n               : 'match_case))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 4 : 'pattern) in\n    let _3 = (Parsing.peek_val __caml_parser_env 2 : 'seq_expr) in\n    let _5 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in\n    Obj.repr(\n# 1463 \"ml/parser.mly\"\n      ( Exp.case _1 ~guard:_3 _5 )\n# 9070 \"ml/parser.ml\"\n               : 'match_case))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern) in\n    Obj.repr(\n# 1465 \"ml/parser.mly\"\n      ( Exp.case _1 (Exp.unreachable ~loc:(rhs_loc 3) ()))\n# 9077 \"ml/parser.ml\"\n               : 'match_case))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in\n    Obj.repr(\n# 1469 \"ml/parser.mly\"\n      ( _2 )\n# 9084 \"ml/parser.ml\"\n               : 'fun_def))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'simple_core_type) in\n    let _4 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in\n    Obj.repr(\n# 1471 \"ml/parser.mly\"\n      ( mkexp (Pexp_constraint (_4, _2)) )\n# 9092 \"ml/parser.ml\"\n               : 'fun_def))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'labeled_simple_pattern) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'fun_def) in\n    Obj.repr(\n# 1474 \"ml/parser.mly\"\n      (\n       let (l,o,p) = _1 in\n       ghexp(Pexp_fun(l, o, p, _2))\n      )\n# 9103 \"ml/parser.ml\"\n               : 'fun_def))\n; (fun __caml_parser_env ->\n    let _3 = (Parsing.peek_val __caml_parser_env 2 : 'lident_list) in\n    let _5 = (Parsing.peek_val __caml_parser_env 0 : 'fun_def) in\n    Obj.repr(\n# 1479 \"ml/parser.mly\"\n      ( mk_newtypes _3 _5 )\n# 9111 \"ml/parser.ml\"\n               : 'fun_def))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr_comma_list) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in\n    Obj.repr(\n# 1482 \"ml/parser.mly\"\n                                                ( _3 :: _1 )\n# 9119 \"ml/parser.ml\"\n               : 'expr_comma_list))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in\n    Obj.repr(\n# 1483 \"ml/parser.mly\"\n                                                ( [_3; _1] )\n# 9127 \"ml/parser.ml\"\n               : 'expr_comma_list))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'simple_expr) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'lbl_expr_list) in\n    Obj.repr(\n# 1486 \"ml/parser.mly\"\n                                                ( (Some _1, _3) )\n# 9135 \"ml/parser.ml\"\n               : 'record_expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'lbl_expr_list) in\n    Obj.repr(\n# 1487 \"ml/parser.mly\"\n                                                ( (None, _1) )\n# 9142 \"ml/parser.ml\"\n               : 'record_expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'lbl_expr) in\n    Obj.repr(\n# 1490 \"ml/parser.mly\"\n              ( [_1] )\n# 9149 \"ml/parser.ml\"\n               : 'lbl_expr_list))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'lbl_expr) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'lbl_expr_list) in\n    Obj.repr(\n# 1491 \"ml/parser.mly\"\n                                 ( _1 :: _3 )\n# 9157 \"ml/parser.ml\"\n               : 'lbl_expr_list))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'lbl_expr) in\n    Obj.repr(\n# 1492 \"ml/parser.mly\"\n                   ( [_1] )\n# 9164 \"ml/parser.ml\"\n               : 'lbl_expr_list))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 3 : 'label_longident) in\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'opt_type_constraint) in\n    let _4 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in\n    Obj.repr(\n# 1496 \"ml/parser.mly\"\n      ( (mkrhs _1 1, mkexp_opt_constraint _4 _2) )\n# 9173 \"ml/parser.ml\"\n               : 'lbl_expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'label_longident) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'opt_type_constraint) in\n    Obj.repr(\n# 1498 \"ml/parser.mly\"\n      ( (mkrhs _1 1, mkexp_opt_constraint (exp_of_label _1 1) _2) )\n# 9181 \"ml/parser.ml\"\n               : 'lbl_expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'field_expr) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'opt_semi) in\n    Obj.repr(\n# 1501 \"ml/parser.mly\"\n                        ( [_1] )\n# 9189 \"ml/parser.ml\"\n               : 'field_expr_list))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'field_expr) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'field_expr_list) in\n    Obj.repr(\n# 1502 \"ml/parser.mly\"\n                                    ( _1 :: _3 )\n# 9197 \"ml/parser.ml\"\n               : 'field_expr_list))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'label) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in\n    Obj.repr(\n# 1506 \"ml/parser.mly\"\n      ( (mkrhs _1 1, _3) )\n# 9205 \"ml/parser.ml\"\n               : 'field_expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'label) in\n    Obj.repr(\n# 1508 \"ml/parser.mly\"\n      ( (mkrhs _1 1, exp_of_label (Lident _1) 1) )\n# 9212 \"ml/parser.ml\"\n               : 'field_expr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in\n    Obj.repr(\n# 1511 \"ml/parser.mly\"\n                                                ( [_1] )\n# 9219 \"ml/parser.ml\"\n               : 'expr_semi_list))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'expr_semi_list) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'expr) in\n    Obj.repr(\n# 1512 \"ml/parser.mly\"\n                                                ( _3 :: _1 )\n# 9227 \"ml/parser.ml\"\n               : 'expr_semi_list))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'core_type) in\n    Obj.repr(\n# 1515 \"ml/parser.mly\"\n                                                ( (Some _2, None) )\n# 9234 \"ml/parser.ml\"\n               : 'type_constraint))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'core_type) in\n    let _4 = (Parsing.peek_val __caml_parser_env 0 : 'core_type) in\n    Obj.repr(\n# 1516 \"ml/parser.mly\"\n                                                ( (Some _2, Some _4) )\n# 9242 \"ml/parser.ml\"\n               : 'type_constraint))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'core_type) in\n    Obj.repr(\n# 1517 \"ml/parser.mly\"\n                                                ( (None, Some _2) )\n# 9249 \"ml/parser.ml\"\n               : 'type_constraint))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 1518 \"ml/parser.mly\"\n                                                ( syntax_error() )\n# 9255 \"ml/parser.ml\"\n               : 'type_constraint))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 1519 \"ml/parser.mly\"\n                                                ( syntax_error() )\n# 9261 \"ml/parser.ml\"\n               : 'type_constraint))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'type_constraint) in\n    Obj.repr(\n# 1522 \"ml/parser.mly\"\n                    ( Some _1 )\n# 9268 \"ml/parser.ml\"\n               : 'opt_type_constraint))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 1523 \"ml/parser.mly\"\n                ( None )\n# 9274 \"ml/parser.ml\"\n               : 'opt_type_constraint))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'val_ident) in\n    Obj.repr(\n# 1530 \"ml/parser.mly\"\n      ( mkpat(Ppat_alias(_1, mkrhs _3 3)) )\n# 9282 \"ml/parser.ml\"\n               : 'pattern))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern) in\n    Obj.repr(\n# 1532 \"ml/parser.mly\"\n      ( expecting 3 \"identifier\" )\n# 9289 \"ml/parser.ml\"\n               : 'pattern))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'pattern_comma_list) in\n    Obj.repr(\n# 1534 \"ml/parser.mly\"\n      ( mkpat(Ppat_tuple(List.rev _1)) )\n# 9296 \"ml/parser.ml\"\n               : 'pattern))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'pattern) in\n    Obj.repr(\n# 1536 \"ml/parser.mly\"\n      ( mkpat_cons (rhs_loc 2) (ghpat(Ppat_tuple[_1;_3])) (symbol_rloc()) )\n# 9304 \"ml/parser.ml\"\n               : 'pattern))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern) in\n    Obj.repr(\n# 1538 \"ml/parser.mly\"\n      ( expecting 3 \"pattern\" )\n# 9311 \"ml/parser.ml\"\n               : 'pattern))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'pattern) in\n    Obj.repr(\n# 1540 \"ml/parser.mly\"\n      ( mkpat(Ppat_or(_1, _3)) )\n# 9319 \"ml/parser.ml\"\n               : 'pattern))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern) in\n    Obj.repr(\n# 1542 \"ml/parser.mly\"\n      ( expecting 3 \"pattern\" )\n# 9326 \"ml/parser.ml\"\n               : 'pattern))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'ext_attributes) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'pattern) in\n    Obj.repr(\n# 1544 \"ml/parser.mly\"\n      ( mkpat_attrs (Ppat_exception _3) _2)\n# 9334 \"ml/parser.ml\"\n               : 'pattern))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'pattern) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'attribute) in\n    Obj.repr(\n# 1546 \"ml/parser.mly\"\n      ( Pat.attr _1 _2 )\n# 9342 \"ml/parser.ml\"\n               : 'pattern))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'pattern_gen) in\n    Obj.repr(\n# 1547 \"ml/parser.mly\"\n                ( _1 )\n# 9349 \"ml/parser.ml\"\n               : 'pattern))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern_no_exn) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'val_ident) in\n    Obj.repr(\n# 1551 \"ml/parser.mly\"\n      ( mkpat(Ppat_alias(_1, mkrhs _3 3)) )\n# 9357 \"ml/parser.ml\"\n               : 'pattern_no_exn))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern_no_exn) in\n    Obj.repr(\n# 1553 \"ml/parser.mly\"\n      ( expecting 3 \"identifier\" )\n# 9364 \"ml/parser.ml\"\n               : 'pattern_no_exn))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'pattern_no_exn_comma_list) in\n    Obj.repr(\n# 1555 \"ml/parser.mly\"\n      ( mkpat(Ppat_tuple(List.rev _1)) )\n# 9371 \"ml/parser.ml\"\n               : 'pattern_no_exn))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern_no_exn) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'pattern) in\n    Obj.repr(\n# 1557 \"ml/parser.mly\"\n      ( mkpat_cons (rhs_loc 2) (ghpat(Ppat_tuple[_1;_3])) (symbol_rloc()) )\n# 9379 \"ml/parser.ml\"\n               : 'pattern_no_exn))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern_no_exn) in\n    Obj.repr(\n# 1559 \"ml/parser.mly\"\n      ( expecting 3 \"pattern\" )\n# 9386 \"ml/parser.ml\"\n               : 'pattern_no_exn))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern_no_exn) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'pattern) in\n    Obj.repr(\n# 1561 \"ml/parser.mly\"\n      ( mkpat(Ppat_or(_1, _3)) )\n# 9394 \"ml/parser.ml\"\n               : 'pattern_no_exn))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern_no_exn) in\n    Obj.repr(\n# 1563 \"ml/parser.mly\"\n      ( expecting 3 \"pattern\" )\n# 9401 \"ml/parser.ml\"\n               : 'pattern_no_exn))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'pattern_no_exn) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'attribute) in\n    Obj.repr(\n# 1565 \"ml/parser.mly\"\n      ( Pat.attr _1 _2 )\n# 9409 \"ml/parser.ml\"\n               : 'pattern_no_exn))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'pattern_gen) in\n    Obj.repr(\n# 1566 \"ml/parser.mly\"\n                ( _1 )\n# 9416 \"ml/parser.ml\"\n               : 'pattern_no_exn))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'simple_pattern) in\n    Obj.repr(\n# 1570 \"ml/parser.mly\"\n      ( _1 )\n# 9423 \"ml/parser.ml\"\n               : 'pattern_gen))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'constr_longident) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'pattern) in\n    Obj.repr(\n# 1572 \"ml/parser.mly\"\n      ( mkpat(Ppat_construct(mkrhs _1 1, Some _2)) )\n# 9431 \"ml/parser.ml\"\n               : 'pattern_gen))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'name_tag) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'pattern) in\n    Obj.repr(\n# 1574 \"ml/parser.mly\"\n      ( mkpat(Ppat_variant(_1, Some _2)) )\n# 9439 \"ml/parser.ml\"\n               : 'pattern_gen))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'ext_attributes) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'simple_pattern) in\n    Obj.repr(\n# 1576 \"ml/parser.mly\"\n      ( mkpat_attrs (Ppat_lazy _3) _2)\n# 9447 \"ml/parser.ml\"\n               : 'pattern_gen))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'val_ident) in\n    Obj.repr(\n# 1580 \"ml/parser.mly\"\n      ( mkpat(Ppat_var (mkrhs _1 1)) )\n# 9454 \"ml/parser.ml\"\n               : 'simple_pattern))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'simple_pattern_not_ident) in\n    Obj.repr(\n# 1581 \"ml/parser.mly\"\n                             ( _1 )\n# 9461 \"ml/parser.ml\"\n               : 'simple_pattern))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 1585 \"ml/parser.mly\"\n      ( mkpat(Ppat_any) )\n# 9467 \"ml/parser.ml\"\n               : 'simple_pattern_not_ident))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'signed_constant) in\n    Obj.repr(\n# 1587 \"ml/parser.mly\"\n      ( mkpat(Ppat_constant _1) )\n# 9474 \"ml/parser.ml\"\n               : 'simple_pattern_not_ident))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'signed_constant) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'signed_constant) in\n    Obj.repr(\n# 1589 \"ml/parser.mly\"\n      ( mkpat(Ppat_interval (_1, _3)) )\n# 9482 \"ml/parser.ml\"\n               : 'simple_pattern_not_ident))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'constr_longident) in\n    Obj.repr(\n# 1591 \"ml/parser.mly\"\n      ( mkpat(Ppat_construct(mkrhs _1 1, None)) )\n# 9489 \"ml/parser.ml\"\n               : 'simple_pattern_not_ident))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'name_tag) in\n    Obj.repr(\n# 1593 \"ml/parser.mly\"\n      ( mkpat(Ppat_variant(_1, None)) )\n# 9496 \"ml/parser.ml\"\n               : 'simple_pattern_not_ident))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'type_longident) in\n    Obj.repr(\n# 1595 \"ml/parser.mly\"\n      ( mkpat(Ppat_type (mkrhs _2 2)) )\n# 9503 \"ml/parser.ml\"\n               : 'simple_pattern_not_ident))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'simple_delimited_pattern) in\n    Obj.repr(\n# 1597 \"ml/parser.mly\"\n      ( _1 )\n# 9510 \"ml/parser.ml\"\n               : 'simple_pattern_not_ident))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'mod_longident) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'simple_delimited_pattern) in\n    Obj.repr(\n# 1599 \"ml/parser.mly\"\n      ( mkpat @@ Ppat_open(mkrhs _1 1, _3) )\n# 9518 \"ml/parser.ml\"\n               : 'simple_pattern_not_ident))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 3 : 'mod_longident) in\n    Obj.repr(\n# 1601 \"ml/parser.mly\"\n    ( mkpat @@ Ppat_open(mkrhs _1 1, mkpat @@\n               Ppat_construct ( mkrhs (Lident \"[]\") 4, None)) )\n# 9526 \"ml/parser.ml\"\n               : 'simple_pattern_not_ident))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 3 : 'mod_longident) in\n    Obj.repr(\n# 1604 \"ml/parser.mly\"\n      ( mkpat @@ Ppat_open( mkrhs _1 1, mkpat @@\n                 Ppat_construct ( mkrhs (Lident \"()\") 4, None) ) )\n# 9534 \"ml/parser.ml\"\n               : 'simple_pattern_not_ident))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 4 : 'mod_longident) in\n    let _4 = (Parsing.peek_val __caml_parser_env 1 : 'pattern) in\n    Obj.repr(\n# 1607 \"ml/parser.mly\"\n      ( mkpat @@ Ppat_open (mkrhs _1 1, _4))\n# 9542 \"ml/parser.ml\"\n               : 'simple_pattern_not_ident))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 4 : 'mod_longident) in\n    let _4 = (Parsing.peek_val __caml_parser_env 1 : 'pattern) in\n    Obj.repr(\n# 1609 \"ml/parser.mly\"\n      (unclosed \"(\" 3 \")\" 5  )\n# 9550 \"ml/parser.ml\"\n               : 'simple_pattern_not_ident))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 3 : 'mod_longident) in\n    Obj.repr(\n# 1611 \"ml/parser.mly\"\n      ( expecting 4 \"pattern\" )\n# 9557 \"ml/parser.ml\"\n               : 'simple_pattern_not_ident))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'pattern) in\n    Obj.repr(\n# 1613 \"ml/parser.mly\"\n      ( reloc_pat _2 )\n# 9564 \"ml/parser.ml\"\n               : 'simple_pattern_not_ident))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'pattern) in\n    Obj.repr(\n# 1615 \"ml/parser.mly\"\n      ( unclosed \"(\" 1 \")\" 3 )\n# 9571 \"ml/parser.ml\"\n               : 'simple_pattern_not_ident))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 3 : 'pattern) in\n    let _4 = (Parsing.peek_val __caml_parser_env 1 : 'core_type) in\n    Obj.repr(\n# 1617 \"ml/parser.mly\"\n      ( mkpat(Ppat_constraint(_2, _4)) )\n# 9579 \"ml/parser.ml\"\n               : 'simple_pattern_not_ident))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 3 : 'pattern) in\n    let _4 = (Parsing.peek_val __caml_parser_env 1 : 'core_type) in\n    Obj.repr(\n# 1619 \"ml/parser.mly\"\n      ( unclosed \"(\" 1 \")\" 5 )\n# 9587 \"ml/parser.ml\"\n               : 'simple_pattern_not_ident))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'pattern) in\n    Obj.repr(\n# 1621 \"ml/parser.mly\"\n      ( expecting 4 \"type\" )\n# 9594 \"ml/parser.ml\"\n               : 'simple_pattern_not_ident))\n; (fun __caml_parser_env ->\n    let _3 = (Parsing.peek_val __caml_parser_env 2 : 'ext_attributes) in\n    let _4 = (Parsing.peek_val __caml_parser_env 1 : string) in\n    Obj.repr(\n# 1623 \"ml/parser.mly\"\n      ( mkpat_attrs (Ppat_unpack (mkrhs _4 4)) _3 )\n# 9602 \"ml/parser.ml\"\n               : 'simple_pattern_not_ident))\n; (fun __caml_parser_env ->\n    let _3 = (Parsing.peek_val __caml_parser_env 4 : 'ext_attributes) in\n    let _4 = (Parsing.peek_val __caml_parser_env 3 : string) in\n    let _6 = (Parsing.peek_val __caml_parser_env 1 : 'package_type) in\n    Obj.repr(\n# 1625 \"ml/parser.mly\"\n      ( mkpat_attrs\n          (Ppat_constraint(mkpat(Ppat_unpack (mkrhs _4 4)),\n                           ghtyp(Ptyp_package _6)))\n          _3 )\n# 9614 \"ml/parser.ml\"\n               : 'simple_pattern_not_ident))\n; (fun __caml_parser_env ->\n    let _3 = (Parsing.peek_val __caml_parser_env 4 : 'ext_attributes) in\n    let _4 = (Parsing.peek_val __caml_parser_env 3 : string) in\n    let _6 = (Parsing.peek_val __caml_parser_env 1 : 'package_type) in\n    Obj.repr(\n# 1630 \"ml/parser.mly\"\n      ( unclosed \"(\" 1 \")\" 7 )\n# 9623 \"ml/parser.ml\"\n               : 'simple_pattern_not_ident))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'extension) in\n    Obj.repr(\n# 1632 \"ml/parser.mly\"\n      ( mkpat(Ppat_extension _1) )\n# 9630 \"ml/parser.ml\"\n               : 'simple_pattern_not_ident))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'lbl_pattern_list) in\n    Obj.repr(\n# 1637 \"ml/parser.mly\"\n    ( let (fields, closed) = _2 in mkpat(Ppat_record(fields, closed)) )\n# 9637 \"ml/parser.ml\"\n               : 'simple_delimited_pattern))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'lbl_pattern_list) in\n    Obj.repr(\n# 1639 \"ml/parser.mly\"\n    ( unclosed \"{\" 1 \"}\" 3 )\n# 9644 \"ml/parser.ml\"\n               : 'simple_delimited_pattern))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'pattern_semi_list) in\n    let _3 = (Parsing.peek_val __caml_parser_env 1 : 'opt_semi) in\n    Obj.repr(\n# 1641 \"ml/parser.mly\"\n    ( reloc_pat (mktailpat (rhs_loc 4) (List.rev _2)) )\n# 9652 \"ml/parser.ml\"\n               : 'simple_delimited_pattern))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'pattern_semi_list) in\n    let _3 = (Parsing.peek_val __caml_parser_env 1 : 'opt_semi) in\n    Obj.repr(\n# 1643 \"ml/parser.mly\"\n    ( unclosed \"[\" 1 \"]\" 4 )\n# 9660 \"ml/parser.ml\"\n               : 'simple_delimited_pattern))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'pattern_semi_list) in\n    let _3 = (Parsing.peek_val __caml_parser_env 1 : 'opt_semi) in\n    Obj.repr(\n# 1645 \"ml/parser.mly\"\n    ( mkpat(Ppat_array(List.rev _2)) )\n# 9668 \"ml/parser.ml\"\n               : 'simple_delimited_pattern))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 1647 \"ml/parser.mly\"\n    ( mkpat(Ppat_array []) )\n# 9674 \"ml/parser.ml\"\n               : 'simple_delimited_pattern))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'pattern_semi_list) in\n    let _3 = (Parsing.peek_val __caml_parser_env 1 : 'opt_semi) in\n    Obj.repr(\n# 1649 \"ml/parser.mly\"\n    ( unclosed \"[|\" 1 \"|]\" 4 )\n# 9682 \"ml/parser.ml\"\n               : 'simple_delimited_pattern))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern_comma_list) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'pattern) in\n    Obj.repr(\n# 1652 \"ml/parser.mly\"\n                                                ( _3 :: _1 )\n# 9690 \"ml/parser.ml\"\n               : 'pattern_comma_list))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'pattern) in\n    Obj.repr(\n# 1653 \"ml/parser.mly\"\n                                                ( [_3; _1] )\n# 9698 \"ml/parser.ml\"\n               : 'pattern_comma_list))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern) in\n    Obj.repr(\n# 1654 \"ml/parser.mly\"\n                                                ( expecting 3 \"pattern\" )\n# 9705 \"ml/parser.ml\"\n               : 'pattern_comma_list))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern_no_exn_comma_list) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'pattern) in\n    Obj.repr(\n# 1657 \"ml/parser.mly\"\n                                                ( _3 :: _1 )\n# 9713 \"ml/parser.ml\"\n               : 'pattern_no_exn_comma_list))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern_no_exn) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'pattern) in\n    Obj.repr(\n# 1658 \"ml/parser.mly\"\n                                                ( [_3; _1] )\n# 9721 \"ml/parser.ml\"\n               : 'pattern_no_exn_comma_list))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern_no_exn) in\n    Obj.repr(\n# 1659 \"ml/parser.mly\"\n                                                ( expecting 3 \"pattern\" )\n# 9728 \"ml/parser.ml\"\n               : 'pattern_no_exn_comma_list))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'pattern) in\n    Obj.repr(\n# 1662 \"ml/parser.mly\"\n                                                ( [_1] )\n# 9735 \"ml/parser.ml\"\n               : 'pattern_semi_list))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern_semi_list) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'pattern) in\n    Obj.repr(\n# 1663 \"ml/parser.mly\"\n                                                ( _3 :: _1 )\n# 9743 \"ml/parser.ml\"\n               : 'pattern_semi_list))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'lbl_pattern) in\n    Obj.repr(\n# 1666 \"ml/parser.mly\"\n                ( [_1], Closed )\n# 9750 \"ml/parser.ml\"\n               : 'lbl_pattern_list))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'lbl_pattern) in\n    Obj.repr(\n# 1667 \"ml/parser.mly\"\n                     ( [_1], Closed )\n# 9757 \"ml/parser.ml\"\n               : 'lbl_pattern_list))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 3 : 'lbl_pattern) in\n    let _4 = (Parsing.peek_val __caml_parser_env 0 : 'opt_semi) in\n    Obj.repr(\n# 1668 \"ml/parser.mly\"\n                                         ( [_1], Open )\n# 9765 \"ml/parser.ml\"\n               : 'lbl_pattern_list))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'lbl_pattern) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'lbl_pattern_list) in\n    Obj.repr(\n# 1670 \"ml/parser.mly\"\n      ( let (fields, closed) = _3 in _1 :: fields, closed )\n# 9773 \"ml/parser.ml\"\n               : 'lbl_pattern_list))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 3 : 'label_longident) in\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'opt_pattern_type_constraint) in\n    let _4 = (Parsing.peek_val __caml_parser_env 0 : 'pattern) in\n    Obj.repr(\n# 1674 \"ml/parser.mly\"\n     ( (mkrhs _1 1, mkpat_opt_constraint _4 _2) )\n# 9782 \"ml/parser.ml\"\n               : 'lbl_pattern))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'label_longident) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'opt_pattern_type_constraint) in\n    Obj.repr(\n# 1676 \"ml/parser.mly\"\n     ( (mkrhs _1 1, mkpat_opt_constraint (pat_of_label _1 1) _2) )\n# 9790 \"ml/parser.ml\"\n               : 'lbl_pattern))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'core_type) in\n    Obj.repr(\n# 1679 \"ml/parser.mly\"\n                    ( Some _2 )\n# 9797 \"ml/parser.ml\"\n               : 'opt_pattern_type_constraint))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 1680 \"ml/parser.mly\"\n                ( None )\n# 9803 \"ml/parser.ml\"\n               : 'opt_pattern_type_constraint))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 4 : 'ext_attributes) in\n    let _3 = (Parsing.peek_val __caml_parser_env 3 : 'val_ident) in\n    let _5 = (Parsing.peek_val __caml_parser_env 1 : 'core_type) in\n    let _6 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in\n    Obj.repr(\n# 1687 \"ml/parser.mly\"\n      ( let (ext, attrs) = _2 in\n        Val.mk (mkrhs _3 3) _5 ~attrs:(attrs@_6)\n              ~loc:(symbol_rloc()) ~docs:(symbol_docs ())\n      , ext )\n# 9816 \"ml/parser.ml\"\n               : 'value_description))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : string * string option) in\n    Obj.repr(\n# 1696 \"ml/parser.mly\"\n                                                ( [fst _1] )\n# 9823 \"ml/parser.ml\"\n               : 'primitive_declaration_body))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : string * string option) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'primitive_declaration_body) in\n    Obj.repr(\n# 1697 \"ml/parser.mly\"\n                                                ( fst _1 :: _2 )\n# 9831 \"ml/parser.ml\"\n               : 'primitive_declaration_body))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 6 : 'ext_attributes) in\n    let _3 = (Parsing.peek_val __caml_parser_env 5 : 'val_ident) in\n    let _5 = (Parsing.peek_val __caml_parser_env 3 : 'core_type) in\n    let _7 = (Parsing.peek_val __caml_parser_env 1 : 'primitive_declaration_body) in\n    let _8 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in\n    Obj.repr(\n# 1702 \"ml/parser.mly\"\n      ( let (ext, attrs) = _2 in\n        Val.mk (mkrhs _3 3) _5 ~prim:_7 ~attrs:(attrs@_8)\n              ~loc:(symbol_rloc ()) ~docs:(symbol_docs ())\n      , ext )\n# 9845 \"ml/parser.ml\"\n               : 'primitive_declaration))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'type_declaration) in\n    Obj.repr(\n# 1712 \"ml/parser.mly\"\n      ( let (nonrec_flag, ty, ext) = _1 in (nonrec_flag, [ty], ext) )\n# 9852 \"ml/parser.ml\"\n               : 'type_declarations))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'type_declarations) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'and_type_declaration) in\n    Obj.repr(\n# 1714 \"ml/parser.mly\"\n      ( let (nonrec_flag, tys, ext) = _1 in (nonrec_flag, _2 :: tys, ext) )\n# 9860 \"ml/parser.ml\"\n               : 'type_declarations))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 6 : 'ext_attributes) in\n    let _3 = (Parsing.peek_val __caml_parser_env 5 : 'nonrec_flag) in\n    let _4 = (Parsing.peek_val __caml_parser_env 4 : 'optional_type_parameters) in\n    let _5 = (Parsing.peek_val __caml_parser_env 3 : string) in\n    let _6 = (Parsing.peek_val __caml_parser_env 2 : 'type_kind) in\n    let _7 = (Parsing.peek_val __caml_parser_env 1 : 'constraints) in\n    let _8 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in\n    Obj.repr(\n# 1720 \"ml/parser.mly\"\n      ( let (kind, priv, manifest) = _6 in\n        let (ext, attrs) = _2 in\n        let ty =\n          Type.mk (mkrhs _5 5) ~params:_4 ~cstrs:(List.rev _7) ~kind\n            ~priv ?manifest ~attrs:(attrs@_8)\n            ~loc:(symbol_rloc ()) ~docs:(symbol_docs ())\n        in\n          (_3, ty, ext) )\n# 9880 \"ml/parser.ml\"\n               : 'type_declaration))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 5 : 'attributes) in\n    let _3 = (Parsing.peek_val __caml_parser_env 4 : 'optional_type_parameters) in\n    let _4 = (Parsing.peek_val __caml_parser_env 3 : string) in\n    let _5 = (Parsing.peek_val __caml_parser_env 2 : 'type_kind) in\n    let _6 = (Parsing.peek_val __caml_parser_env 1 : 'constraints) in\n    let _7 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in\n    Obj.repr(\n# 1732 \"ml/parser.mly\"\n      ( let (kind, priv, manifest) = _5 in\n          Type.mk (mkrhs _4 4) ~params:_3 ~cstrs:(List.rev _6)\n            ~kind ~priv ?manifest ~attrs:(_2@_7) ~loc:(symbol_rloc ())\n            ~text:(symbol_text ()) ~docs:(symbol_docs ()) )\n# 9895 \"ml/parser.ml\"\n               : 'and_type_declaration))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'constraints) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'constrain) in\n    Obj.repr(\n# 1738 \"ml/parser.mly\"\n                                                ( _3 :: _1 )\n# 9903 \"ml/parser.ml\"\n               : 'constraints))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 1739 \"ml/parser.mly\"\n                                                ( [] )\n# 9909 \"ml/parser.ml\"\n               : 'constraints))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 1743 \"ml/parser.mly\"\n      ( (Ptype_abstract, Public, None) )\n# 9915 \"ml/parser.ml\"\n               : 'type_kind))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'core_type) in\n    Obj.repr(\n# 1745 \"ml/parser.mly\"\n      ( (Ptype_abstract, Public, Some _2) )\n# 9922 \"ml/parser.ml\"\n               : 'type_kind))\n; (fun __caml_parser_env ->\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'core_type) in\n    Obj.repr(\n# 1747 \"ml/parser.mly\"\n      ( (Ptype_abstract, Private, Some _3) )\n# 9929 \"ml/parser.ml\"\n               : 'type_kind))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'constructor_declarations) in\n    Obj.repr(\n# 1749 \"ml/parser.mly\"\n      ( (Ptype_variant(List.rev _2), Public, None) )\n# 9936 \"ml/parser.ml\"\n               : 'type_kind))\n; (fun __caml_parser_env ->\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'constructor_declarations) in\n    Obj.repr(\n# 1751 \"ml/parser.mly\"\n      ( (Ptype_variant(List.rev _3), Private, None) )\n# 9943 \"ml/parser.ml\"\n               : 'type_kind))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 1753 \"ml/parser.mly\"\n      ( (Ptype_open, Public, None) )\n# 9949 \"ml/parser.ml\"\n               : 'type_kind))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 1755 \"ml/parser.mly\"\n      ( (Ptype_open, Private, None) )\n# 9955 \"ml/parser.ml\"\n               : 'type_kind))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 3 : 'private_flag) in\n    let _4 = (Parsing.peek_val __caml_parser_env 1 : 'label_declarations) in\n    Obj.repr(\n# 1757 \"ml/parser.mly\"\n      ( (Ptype_record _4, _2, None) )\n# 9963 \"ml/parser.ml\"\n               : 'type_kind))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 3 : 'core_type) in\n    let _4 = (Parsing.peek_val __caml_parser_env 1 : 'private_flag) in\n    let _5 = (Parsing.peek_val __caml_parser_env 0 : 'constructor_declarations) in\n    Obj.repr(\n# 1759 \"ml/parser.mly\"\n      ( (Ptype_variant(List.rev _5), _4, Some _2) )\n# 9972 \"ml/parser.ml\"\n               : 'type_kind))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 3 : 'core_type) in\n    let _4 = (Parsing.peek_val __caml_parser_env 1 : 'private_flag) in\n    Obj.repr(\n# 1761 \"ml/parser.mly\"\n      ( (Ptype_open, _4, Some _2) )\n# 9980 \"ml/parser.ml\"\n               : 'type_kind))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 5 : 'core_type) in\n    let _4 = (Parsing.peek_val __caml_parser_env 3 : 'private_flag) in\n    let _6 = (Parsing.peek_val __caml_parser_env 1 : 'label_declarations) in\n    Obj.repr(\n# 1763 \"ml/parser.mly\"\n      ( (Ptype_record _6, _4, Some _2) )\n# 9989 \"ml/parser.ml\"\n               : 'type_kind))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 1766 \"ml/parser.mly\"\n                                                ( [] )\n# 9995 \"ml/parser.ml\"\n               : 'optional_type_parameters))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'optional_type_parameter) in\n    Obj.repr(\n# 1767 \"ml/parser.mly\"\n                                                ( [_1] )\n# 10002 \"ml/parser.ml\"\n               : 'optional_type_parameters))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'optional_type_parameter_list) in\n    Obj.repr(\n# 1768 \"ml/parser.mly\"\n                                                ( List.rev _2 )\n# 10009 \"ml/parser.ml\"\n               : 'optional_type_parameters))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'type_variance) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'optional_type_variable) in\n    Obj.repr(\n# 1771 \"ml/parser.mly\"\n                                                ( _2, _1 )\n# 10017 \"ml/parser.ml\"\n               : 'optional_type_parameter))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'optional_type_parameter) in\n    Obj.repr(\n# 1774 \"ml/parser.mly\"\n                                                         ( [_1] )\n# 10024 \"ml/parser.ml\"\n               : 'optional_type_parameter_list))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'optional_type_parameter_list) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'optional_type_parameter) in\n    Obj.repr(\n# 1775 \"ml/parser.mly\"\n                                                                  ( _3 :: _1 )\n# 10032 \"ml/parser.ml\"\n               : 'optional_type_parameter_list))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'ident) in\n    Obj.repr(\n# 1778 \"ml/parser.mly\"\n                                                ( mktyp(Ptyp_var _2) )\n# 10039 \"ml/parser.ml\"\n               : 'optional_type_variable))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 1779 \"ml/parser.mly\"\n                                                ( mktyp(Ptyp_any) )\n# 10045 \"ml/parser.ml\"\n               : 'optional_type_variable))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'type_variance) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'type_variable) in\n    Obj.repr(\n# 1784 \"ml/parser.mly\"\n                                                  ( _2, _1 )\n# 10053 \"ml/parser.ml\"\n               : 'type_parameter))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 1787 \"ml/parser.mly\"\n                                                ( Invariant )\n# 10059 \"ml/parser.ml\"\n               : 'type_variance))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 1788 \"ml/parser.mly\"\n                                                ( Covariant )\n# 10065 \"ml/parser.ml\"\n               : 'type_variance))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 1789 \"ml/parser.mly\"\n                                                ( Contravariant )\n# 10071 \"ml/parser.ml\"\n               : 'type_variance))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'ident) in\n    Obj.repr(\n# 1792 \"ml/parser.mly\"\n                                                ( mktyp(Ptyp_var _2) )\n# 10078 \"ml/parser.ml\"\n               : 'type_variable))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'type_parameter) in\n    Obj.repr(\n# 1795 \"ml/parser.mly\"\n                                                ( [_1] )\n# 10085 \"ml/parser.ml\"\n               : 'type_parameter_list))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'type_parameter_list) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'type_parameter) in\n    Obj.repr(\n# 1796 \"ml/parser.mly\"\n                                                ( _3 :: _1 )\n# 10093 \"ml/parser.ml\"\n               : 'type_parameter_list))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'constructor_declaration) in\n    Obj.repr(\n# 1799 \"ml/parser.mly\"\n                                                         ( [_1] )\n# 10100 \"ml/parser.ml\"\n               : 'constructor_declarations))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'bar_constructor_declaration) in\n    Obj.repr(\n# 1800 \"ml/parser.mly\"\n                                                         ( [_1] )\n# 10107 \"ml/parser.ml\"\n               : 'constructor_declarations))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'constructor_declarations) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'bar_constructor_declaration) in\n    Obj.repr(\n# 1801 \"ml/parser.mly\"\n                                                         ( _2 :: _1 )\n# 10115 \"ml/parser.ml\"\n               : 'constructor_declarations))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'constr_ident) in\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'generalized_constructor_arguments) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'attributes) in\n    Obj.repr(\n# 1805 \"ml/parser.mly\"\n      (\n       let args,res = _2 in\n       Type.constructor (mkrhs _1 1) ~args ?res ~attrs:_3\n         ~loc:(symbol_rloc()) ~info:(symbol_info ())\n      )\n# 10128 \"ml/parser.ml\"\n               : 'constructor_declaration))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'constr_ident) in\n    let _3 = (Parsing.peek_val __caml_parser_env 1 : 'generalized_constructor_arguments) in\n    let _4 = (Parsing.peek_val __caml_parser_env 0 : 'attributes) in\n    Obj.repr(\n# 1813 \"ml/parser.mly\"\n      (\n       let args,res = _3 in\n       Type.constructor (mkrhs _2 2) ~args ?res ~attrs:_4\n         ~loc:(symbol_rloc()) ~info:(symbol_info ())\n      )\n# 10141 \"ml/parser.ml\"\n               : 'bar_constructor_declaration))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'sig_exception_declaration) in\n    Obj.repr(\n# 1820 \"ml/parser.mly\"\n                                                 ( _1 )\n# 10148 \"ml/parser.ml\"\n               : 'str_exception_declaration))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 5 : 'ext_attributes) in\n    let _3 = (Parsing.peek_val __caml_parser_env 4 : 'constr_ident) in\n    let _5 = (Parsing.peek_val __caml_parser_env 2 : 'constr_longident) in\n    let _6 = (Parsing.peek_val __caml_parser_env 1 : 'attributes) in\n    let _7 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in\n    Obj.repr(\n# 1823 \"ml/parser.mly\"\n      ( let (ext,attrs) = _2 in\n        Te.rebind (mkrhs _3 3) (mkrhs _5 5) ~attrs:(attrs @ _6 @ _7)\n          ~loc:(symbol_rloc()) ~docs:(symbol_docs ())\n        , ext )\n# 10162 \"ml/parser.ml\"\n               : 'str_exception_declaration))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 4 : 'ext_attributes) in\n    let _3 = (Parsing.peek_val __caml_parser_env 3 : 'constr_ident) in\n    let _4 = (Parsing.peek_val __caml_parser_env 2 : 'generalized_constructor_arguments) in\n    let _5 = (Parsing.peek_val __caml_parser_env 1 : 'attributes) in\n    let _6 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in\n    Obj.repr(\n# 1831 \"ml/parser.mly\"\n      ( let args, res = _4 in\n        let (ext,attrs) = _2 in\n          Te.decl (mkrhs _3 3) ~args ?res ~attrs:(attrs @ _5 @ _6)\n            ~loc:(symbol_rloc()) ~docs:(symbol_docs ())\n        , ext )\n# 10177 \"ml/parser.ml\"\n               : 'sig_exception_declaration))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'constr_ident) in\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'generalized_constructor_arguments) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'attributes) in\n    Obj.repr(\n# 1839 \"ml/parser.mly\"\n      ( let args, res = _2 in\n        Te.decl (mkrhs _1 1) ~args ?res ~attrs:_3 ~loc:(symbol_rloc()) )\n# 10187 \"ml/parser.ml\"\n               : 'let_exception_declaration))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 1843 \"ml/parser.mly\"\n                                  ( (Pcstr_tuple [],None) )\n# 10193 \"ml/parser.ml\"\n               : 'generalized_constructor_arguments))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'constructor_arguments) in\n    Obj.repr(\n# 1844 \"ml/parser.mly\"\n                                  ( (_2,None) )\n# 10200 \"ml/parser.ml\"\n               : 'generalized_constructor_arguments))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'constructor_arguments) in\n    let _4 = (Parsing.peek_val __caml_parser_env 0 : 'simple_core_type) in\n    Obj.repr(\n# 1846 \"ml/parser.mly\"\n                                  ( (_2,Some _4) )\n# 10208 \"ml/parser.ml\"\n               : 'generalized_constructor_arguments))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'simple_core_type) in\n    Obj.repr(\n# 1848 \"ml/parser.mly\"\n                                  ( (Pcstr_tuple [],Some _2) )\n# 10215 \"ml/parser.ml\"\n               : 'generalized_constructor_arguments))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'core_type_list) in\n    Obj.repr(\n# 1852 \"ml/parser.mly\"\n                                     ( Pcstr_tuple (List.rev _1) )\n# 10222 \"ml/parser.ml\"\n               : 'constructor_arguments))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'label_declarations) in\n    Obj.repr(\n# 1853 \"ml/parser.mly\"\n                                     ( Pcstr_record _2 )\n# 10229 \"ml/parser.ml\"\n               : 'constructor_arguments))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'label_declaration) in\n    Obj.repr(\n# 1856 \"ml/parser.mly\"\n                                                ( [_1] )\n# 10236 \"ml/parser.ml\"\n               : 'label_declarations))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'label_declaration_semi) in\n    Obj.repr(\n# 1857 \"ml/parser.mly\"\n                                                ( [_1] )\n# 10243 \"ml/parser.ml\"\n               : 'label_declarations))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'label_declaration_semi) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'label_declarations) in\n    Obj.repr(\n# 1858 \"ml/parser.mly\"\n                                                ( _1 :: _2 )\n# 10251 \"ml/parser.ml\"\n               : 'label_declarations))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 4 : 'mutable_flag) in\n    let _2 = (Parsing.peek_val __caml_parser_env 3 : 'label) in\n    let _4 = (Parsing.peek_val __caml_parser_env 1 : 'poly_type_no_attr) in\n    let _5 = (Parsing.peek_val __caml_parser_env 0 : 'attributes) in\n    Obj.repr(\n# 1862 \"ml/parser.mly\"\n      (\n       Type.field (mkrhs _2 2) _4 ~mut:_1 ~attrs:_5\n         ~loc:(symbol_rloc()) ~info:(symbol_info ())\n      )\n# 10264 \"ml/parser.ml\"\n               : 'label_declaration))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 6 : 'mutable_flag) in\n    let _2 = (Parsing.peek_val __caml_parser_env 5 : 'label) in\n    let _4 = (Parsing.peek_val __caml_parser_env 3 : 'poly_type_no_attr) in\n    let _5 = (Parsing.peek_val __caml_parser_env 2 : 'attributes) in\n    let _7 = (Parsing.peek_val __caml_parser_env 0 : 'attributes) in\n    Obj.repr(\n# 1869 \"ml/parser.mly\"\n      (\n       let info =\n         match rhs_info 5 with\n         | Some _ as info_before_semi -> info_before_semi\n         | None -> symbol_info ()\n       in\n       Type.field (mkrhs _2 2) _4 ~mut:_1 ~attrs:(_5 @ _7)\n         ~loc:(symbol_rloc()) ~info\n      )\n# 10283 \"ml/parser.ml\"\n               : 'label_declaration_semi))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 7 : 'ext_attributes) in\n    let _3 = (Parsing.peek_val __caml_parser_env 6 : 'nonrec_flag) in\n    let _4 = (Parsing.peek_val __caml_parser_env 5 : 'optional_type_parameters) in\n    let _5 = (Parsing.peek_val __caml_parser_env 4 : 'type_longident) in\n    let _7 = (Parsing.peek_val __caml_parser_env 2 : 'private_flag) in\n    let _8 = (Parsing.peek_val __caml_parser_env 1 : 'str_extension_constructors) in\n    let _9 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in\n    Obj.repr(\n# 1885 \"ml/parser.mly\"\n      ( let (ext, attrs) = _2 in\n        if _3 <> Recursive then not_expecting 3 \"nonrec flag\";\n        Te.mk (mkrhs _5 5) (List.rev _8) ~params:_4 ~priv:_7\n          ~attrs:(attrs@_9) ~docs:(symbol_docs ())\n        , ext )\n# 10300 \"ml/parser.ml\"\n               : 'str_type_extension))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 7 : 'ext_attributes) in\n    let _3 = (Parsing.peek_val __caml_parser_env 6 : 'nonrec_flag) in\n    let _4 = (Parsing.peek_val __caml_parser_env 5 : 'optional_type_parameters) in\n    let _5 = (Parsing.peek_val __caml_parser_env 4 : 'type_longident) in\n    let _7 = (Parsing.peek_val __caml_parser_env 2 : 'private_flag) in\n    let _8 = (Parsing.peek_val __caml_parser_env 1 : 'sig_extension_constructors) in\n    let _9 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in\n    Obj.repr(\n# 1894 \"ml/parser.mly\"\n      ( let (ext, attrs) = _2 in\n        if _3 <> Recursive then not_expecting 3 \"nonrec flag\";\n        Te.mk (mkrhs _5 5) (List.rev _8) ~params:_4 ~priv:_7\n          ~attrs:(attrs @ _9) ~docs:(symbol_docs ())\n        , ext )\n# 10317 \"ml/parser.ml\"\n               : 'sig_type_extension))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'extension_constructor_declaration) in\n    Obj.repr(\n# 1901 \"ml/parser.mly\"\n                                                          ( [_1] )\n# 10324 \"ml/parser.ml\"\n               : 'str_extension_constructors))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'bar_extension_constructor_declaration) in\n    Obj.repr(\n# 1902 \"ml/parser.mly\"\n                                                          ( [_1] )\n# 10331 \"ml/parser.ml\"\n               : 'str_extension_constructors))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'extension_constructor_rebind) in\n    Obj.repr(\n# 1903 \"ml/parser.mly\"\n                                                          ( [_1] )\n# 10338 \"ml/parser.ml\"\n               : 'str_extension_constructors))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'bar_extension_constructor_rebind) in\n    Obj.repr(\n# 1904 \"ml/parser.mly\"\n                                                          ( [_1] )\n# 10345 \"ml/parser.ml\"\n               : 'str_extension_constructors))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'str_extension_constructors) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'bar_extension_constructor_declaration) in\n    Obj.repr(\n# 1906 \"ml/parser.mly\"\n      ( _2 :: _1 )\n# 10353 \"ml/parser.ml\"\n               : 'str_extension_constructors))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'str_extension_constructors) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'bar_extension_constructor_rebind) in\n    Obj.repr(\n# 1908 \"ml/parser.mly\"\n      ( _2 :: _1 )\n# 10361 \"ml/parser.ml\"\n               : 'str_extension_constructors))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'extension_constructor_declaration) in\n    Obj.repr(\n# 1911 \"ml/parser.mly\"\n                                                          ( [_1] )\n# 10368 \"ml/parser.ml\"\n               : 'sig_extension_constructors))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'bar_extension_constructor_declaration) in\n    Obj.repr(\n# 1912 \"ml/parser.mly\"\n                                                          ( [_1] )\n# 10375 \"ml/parser.ml\"\n               : 'sig_extension_constructors))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'sig_extension_constructors) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'bar_extension_constructor_declaration) in\n    Obj.repr(\n# 1914 \"ml/parser.mly\"\n      ( _2 :: _1 )\n# 10383 \"ml/parser.ml\"\n               : 'sig_extension_constructors))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'constr_ident) in\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'generalized_constructor_arguments) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'attributes) in\n    Obj.repr(\n# 1918 \"ml/parser.mly\"\n      ( let args, res = _2 in\n        Te.decl (mkrhs _1 1) ~args ?res ~attrs:_3\n          ~loc:(symbol_rloc()) ~info:(symbol_info ()) )\n# 10394 \"ml/parser.ml\"\n               : 'extension_constructor_declaration))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'constr_ident) in\n    let _3 = (Parsing.peek_val __caml_parser_env 1 : 'generalized_constructor_arguments) in\n    let _4 = (Parsing.peek_val __caml_parser_env 0 : 'attributes) in\n    Obj.repr(\n# 1924 \"ml/parser.mly\"\n      ( let args, res = _3 in\n        Te.decl (mkrhs _2 2) ~args ?res ~attrs:_4\n           ~loc:(symbol_rloc()) ~info:(symbol_info ()) )\n# 10405 \"ml/parser.ml\"\n               : 'bar_extension_constructor_declaration))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 3 : 'constr_ident) in\n    let _3 = (Parsing.peek_val __caml_parser_env 1 : 'constr_longident) in\n    let _4 = (Parsing.peek_val __caml_parser_env 0 : 'attributes) in\n    Obj.repr(\n# 1930 \"ml/parser.mly\"\n      ( Te.rebind (mkrhs _1 1) (mkrhs _3 3) ~attrs:_4\n          ~loc:(symbol_rloc()) ~info:(symbol_info ()) )\n# 10415 \"ml/parser.ml\"\n               : 'extension_constructor_rebind))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 3 : 'constr_ident) in\n    let _4 = (Parsing.peek_val __caml_parser_env 1 : 'constr_longident) in\n    let _5 = (Parsing.peek_val __caml_parser_env 0 : 'attributes) in\n    Obj.repr(\n# 1935 \"ml/parser.mly\"\n      ( Te.rebind (mkrhs _2 2) (mkrhs _4 4) ~attrs:_5\n          ~loc:(symbol_rloc()) ~info:(symbol_info ()) )\n# 10425 \"ml/parser.ml\"\n               : 'bar_extension_constructor_rebind))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'with_constraint) in\n    Obj.repr(\n# 1942 \"ml/parser.mly\"\n                                                ( [_1] )\n# 10432 \"ml/parser.ml\"\n               : 'with_constraints))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'with_constraints) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'with_constraint) in\n    Obj.repr(\n# 1943 \"ml/parser.mly\"\n                                                ( _3 :: _1 )\n# 10440 \"ml/parser.ml\"\n               : 'with_constraints))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 4 : 'optional_type_parameters) in\n    let _3 = (Parsing.peek_val __caml_parser_env 3 : 'label_longident) in\n    let _4 = (Parsing.peek_val __caml_parser_env 2 : 'with_type_binder) in\n    let _5 = (Parsing.peek_val __caml_parser_env 1 : 'core_type_no_attr) in\n    let _6 = (Parsing.peek_val __caml_parser_env 0 : 'constraints) in\n    Obj.repr(\n# 1948 \"ml/parser.mly\"\n      ( Pwith_type\n          (mkrhs _3 3,\n           (Type.mk (mkrhs (Longident.last _3) 3)\n              ~params:_2\n              ~cstrs:(List.rev _6)\n              ~manifest:_5\n              ~priv:_4\n              ~loc:(symbol_rloc()))) )\n# 10458 \"ml/parser.ml\"\n               : 'with_constraint))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 3 : 'optional_type_parameters) in\n    let _3 = (Parsing.peek_val __caml_parser_env 2 : 'label_longident) in\n    let _5 = (Parsing.peek_val __caml_parser_env 0 : 'core_type_no_attr) in\n    Obj.repr(\n# 1959 \"ml/parser.mly\"\n      ( Pwith_typesubst\n         (mkrhs _3 3,\n           (Type.mk (mkrhs (Longident.last _3) 3)\n             ~params:_2\n             ~manifest:_5\n             ~loc:(symbol_rloc()))) )\n# 10472 \"ml/parser.ml\"\n               : 'with_constraint))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'mod_longident) in\n    let _4 = (Parsing.peek_val __caml_parser_env 0 : 'mod_ext_longident) in\n    Obj.repr(\n# 1966 \"ml/parser.mly\"\n      ( Pwith_module (mkrhs _2 2, mkrhs _4 4) )\n# 10480 \"ml/parser.ml\"\n               : 'with_constraint))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'mod_longident) in\n    let _4 = (Parsing.peek_val __caml_parser_env 0 : 'mod_ext_longident) in\n    Obj.repr(\n# 1968 \"ml/parser.mly\"\n      ( Pwith_modsubst (mkrhs _2 2, mkrhs _4 4) )\n# 10488 \"ml/parser.ml\"\n               : 'with_constraint))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 1971 \"ml/parser.mly\"\n                   ( Public )\n# 10494 \"ml/parser.ml\"\n               : 'with_type_binder))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 1972 \"ml/parser.mly\"\n                   ( Private )\n# 10500 \"ml/parser.ml\"\n               : 'with_type_binder))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'ident) in\n    Obj.repr(\n# 1978 \"ml/parser.mly\"\n                                                ( [mkrhs _2 2] )\n# 10507 \"ml/parser.ml\"\n               : 'typevar_list))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'typevar_list) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'ident) in\n    Obj.repr(\n# 1979 \"ml/parser.mly\"\n                                                ( mkrhs _3 3 :: _1 )\n# 10515 \"ml/parser.ml\"\n               : 'typevar_list))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'core_type) in\n    Obj.repr(\n# 1983 \"ml/parser.mly\"\n          ( _1 )\n# 10522 \"ml/parser.ml\"\n               : 'poly_type))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'typevar_list) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'core_type) in\n    Obj.repr(\n# 1985 \"ml/parser.mly\"\n          ( mktyp(Ptyp_poly(List.rev _1, _3)) )\n# 10530 \"ml/parser.ml\"\n               : 'poly_type))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'core_type_no_attr) in\n    Obj.repr(\n# 1989 \"ml/parser.mly\"\n          ( _1 )\n# 10537 \"ml/parser.ml\"\n               : 'poly_type_no_attr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'typevar_list) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'core_type_no_attr) in\n    Obj.repr(\n# 1991 \"ml/parser.mly\"\n          ( mktyp(Ptyp_poly(List.rev _1, _3)) )\n# 10545 \"ml/parser.ml\"\n               : 'poly_type_no_attr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'core_type_no_attr) in\n    Obj.repr(\n# 1998 \"ml/parser.mly\"\n      ( _1 )\n# 10552 \"ml/parser.ml\"\n               : 'core_type))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'core_type) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'attribute) in\n    Obj.repr(\n# 2000 \"ml/parser.mly\"\n      ( Typ.attr _1 _2 )\n# 10560 \"ml/parser.ml\"\n               : 'core_type))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'core_type2) in\n    Obj.repr(\n# 2004 \"ml/parser.mly\"\n      ( _1 )\n# 10567 \"ml/parser.ml\"\n               : 'core_type_no_attr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 3 : 'core_type2) in\n    let _4 = (Parsing.peek_val __caml_parser_env 0 : 'ident) in\n    Obj.repr(\n# 2006 \"ml/parser.mly\"\n      ( mktyp(Ptyp_alias(_1, _4)) )\n# 10575 \"ml/parser.ml\"\n               : 'core_type_no_attr))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'simple_core_type_or_tuple) in\n    Obj.repr(\n# 2010 \"ml/parser.mly\"\n      ( _1 )\n# 10582 \"ml/parser.ml\"\n               : 'core_type2))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 4 : string) in\n    let _4 = (Parsing.peek_val __caml_parser_env 2 : 'core_type2) in\n    let _6 = (Parsing.peek_val __caml_parser_env 0 : 'core_type2) in\n    Obj.repr(\n# 2012 \"ml/parser.mly\"\n      ( let param = extra_rhs_core_type _4 ~pos:4 in\n        mktyp (Ptyp_arrow(Optional _2 , param, _6)) )\n# 10592 \"ml/parser.ml\"\n               : 'core_type2))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 3 : string) in\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'core_type2) in\n    let _4 = (Parsing.peek_val __caml_parser_env 0 : 'core_type2) in\n    Obj.repr(\n# 2015 \"ml/parser.mly\"\n      ( let param = extra_rhs_core_type _2 ~pos:2 in\n        mktyp(Ptyp_arrow(Optional _1 , param, _4))\n      )\n# 10603 \"ml/parser.ml\"\n               : 'core_type2))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 4 : string) in\n    let _3 = (Parsing.peek_val __caml_parser_env 2 : 'core_type2) in\n    let _5 = (Parsing.peek_val __caml_parser_env 0 : 'core_type2) in\n    Obj.repr(\n# 2019 \"ml/parser.mly\"\n      ( let param = extra_rhs_core_type _3 ~pos:3 in\n        mktyp(Ptyp_arrow(Labelled _1, param, _5)) )\n# 10613 \"ml/parser.ml\"\n               : 'core_type2))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'core_type2) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'core_type2) in\n    Obj.repr(\n# 2022 \"ml/parser.mly\"\n      ( let param = extra_rhs_core_type _1 ~pos:1 in\n        mktyp(Ptyp_arrow(Nolabel, param, _3)) )\n# 10622 \"ml/parser.ml\"\n               : 'core_type2))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'simple_core_type2) in\n    Obj.repr(\n# 2028 \"ml/parser.mly\"\n      ( _1 )\n# 10629 \"ml/parser.ml\"\n               : 'simple_core_type))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'core_type_comma_list) in\n    Obj.repr(\n# 2030 \"ml/parser.mly\"\n      ( match _2 with [sty] -> sty | _ -> raise Parse_error )\n# 10636 \"ml/parser.ml\"\n               : 'simple_core_type))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'ident) in\n    Obj.repr(\n# 2035 \"ml/parser.mly\"\n      ( mktyp(Ptyp_var _2) )\n# 10643 \"ml/parser.ml\"\n               : 'simple_core_type2))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2037 \"ml/parser.mly\"\n      ( mktyp(Ptyp_any) )\n# 10649 \"ml/parser.ml\"\n               : 'simple_core_type2))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'type_longident) in\n    Obj.repr(\n# 2039 \"ml/parser.mly\"\n      ( mktyp(Ptyp_constr(mkrhs _1 1, [])) )\n# 10656 \"ml/parser.ml\"\n               : 'simple_core_type2))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'simple_core_type2) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'type_longident) in\n    Obj.repr(\n# 2041 \"ml/parser.mly\"\n      ( mktyp(Ptyp_constr(mkrhs _2 2, [_1])) )\n# 10664 \"ml/parser.ml\"\n               : 'simple_core_type2))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'core_type_comma_list) in\n    let _4 = (Parsing.peek_val __caml_parser_env 0 : 'type_longident) in\n    Obj.repr(\n# 2043 \"ml/parser.mly\"\n      ( mktyp(Ptyp_constr(mkrhs _4 4, List.rev _2)) )\n# 10672 \"ml/parser.ml\"\n               : 'simple_core_type2))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'meth_list) in\n    Obj.repr(\n# 2045 \"ml/parser.mly\"\n      ( let (f, c) = _2 in mktyp(Ptyp_object (f, c)) )\n# 10679 \"ml/parser.ml\"\n               : 'simple_core_type2))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2047 \"ml/parser.mly\"\n      ( mktyp(Ptyp_object ([], Closed)) )\n# 10685 \"ml/parser.ml\"\n               : 'simple_core_type2))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'class_longident) in\n    Obj.repr(\n# 2049 \"ml/parser.mly\"\n      ( mktyp(Ptyp_class(mkrhs _2 2, [])) )\n# 10692 \"ml/parser.ml\"\n               : 'simple_core_type2))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'simple_core_type2) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'class_longident) in\n    Obj.repr(\n# 2051 \"ml/parser.mly\"\n      ( mktyp(Ptyp_class(mkrhs _3 3, [_1])) )\n# 10700 \"ml/parser.ml\"\n               : 'simple_core_type2))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 3 : 'core_type_comma_list) in\n    let _5 = (Parsing.peek_val __caml_parser_env 0 : 'class_longident) in\n    Obj.repr(\n# 2053 \"ml/parser.mly\"\n      ( mktyp(Ptyp_class(mkrhs _5 5, List.rev _2)) )\n# 10708 \"ml/parser.ml\"\n               : 'simple_core_type2))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'tag_field) in\n    Obj.repr(\n# 2055 \"ml/parser.mly\"\n      ( mktyp(Ptyp_variant([_2], Closed, None)) )\n# 10715 \"ml/parser.ml\"\n               : 'simple_core_type2))\n; (fun __caml_parser_env ->\n    let _3 = (Parsing.peek_val __caml_parser_env 1 : 'row_field_list) in\n    Obj.repr(\n# 2061 \"ml/parser.mly\"\n      ( mktyp(Ptyp_variant(List.rev _3, Closed, None)) )\n# 10722 \"ml/parser.ml\"\n               : 'simple_core_type2))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 3 : 'row_field) in\n    let _4 = (Parsing.peek_val __caml_parser_env 1 : 'row_field_list) in\n    Obj.repr(\n# 2063 \"ml/parser.mly\"\n      ( mktyp(Ptyp_variant(_2 :: List.rev _4, Closed, None)) )\n# 10730 \"ml/parser.ml\"\n               : 'simple_core_type2))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'opt_bar) in\n    let _3 = (Parsing.peek_val __caml_parser_env 1 : 'row_field_list) in\n    Obj.repr(\n# 2065 \"ml/parser.mly\"\n      ( mktyp(Ptyp_variant(List.rev _3, Open, None)) )\n# 10738 \"ml/parser.ml\"\n               : 'simple_core_type2))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2067 \"ml/parser.mly\"\n      ( mktyp(Ptyp_variant([], Open, None)) )\n# 10744 \"ml/parser.ml\"\n               : 'simple_core_type2))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'opt_bar) in\n    let _3 = (Parsing.peek_val __caml_parser_env 1 : 'row_field_list) in\n    Obj.repr(\n# 2069 \"ml/parser.mly\"\n      ( mktyp(Ptyp_variant(List.rev _3, Closed, Some [])) )\n# 10752 \"ml/parser.ml\"\n               : 'simple_core_type2))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 4 : 'opt_bar) in\n    let _3 = (Parsing.peek_val __caml_parser_env 3 : 'row_field_list) in\n    let _5 = (Parsing.peek_val __caml_parser_env 1 : 'name_tag_list) in\n    Obj.repr(\n# 2071 \"ml/parser.mly\"\n      ( mktyp(Ptyp_variant(List.rev _3, Closed, Some (List.rev _5))) )\n# 10761 \"ml/parser.ml\"\n               : 'simple_core_type2))\n; (fun __caml_parser_env ->\n    let _3 = (Parsing.peek_val __caml_parser_env 2 : 'ext_attributes) in\n    let _4 = (Parsing.peek_val __caml_parser_env 1 : 'package_type) in\n    Obj.repr(\n# 2073 \"ml/parser.mly\"\n      ( mktyp_attrs (Ptyp_package _4) _3 )\n# 10769 \"ml/parser.ml\"\n               : 'simple_core_type2))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'extension) in\n    Obj.repr(\n# 2075 \"ml/parser.mly\"\n      ( mktyp (Ptyp_extension _1) )\n# 10776 \"ml/parser.ml\"\n               : 'simple_core_type2))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'module_type) in\n    Obj.repr(\n# 2078 \"ml/parser.mly\"\n                ( package_type_of_module_type _1 )\n# 10783 \"ml/parser.ml\"\n               : 'package_type))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'row_field) in\n    Obj.repr(\n# 2081 \"ml/parser.mly\"\n                                                ( [_1] )\n# 10790 \"ml/parser.ml\"\n               : 'row_field_list))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'row_field_list) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'row_field) in\n    Obj.repr(\n# 2082 \"ml/parser.mly\"\n                                                ( _3 :: _1 )\n# 10798 \"ml/parser.ml\"\n               : 'row_field_list))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'tag_field) in\n    Obj.repr(\n# 2085 \"ml/parser.mly\"\n                                                ( _1 )\n# 10805 \"ml/parser.ml\"\n               : 'row_field))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'simple_core_type) in\n    Obj.repr(\n# 2086 \"ml/parser.mly\"\n                                                ( Rinherit _1 )\n# 10812 \"ml/parser.ml\"\n               : 'row_field))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 4 : 'name_tag) in\n    let _3 = (Parsing.peek_val __caml_parser_env 2 : 'opt_ampersand) in\n    let _4 = (Parsing.peek_val __caml_parser_env 1 : 'amper_type_list) in\n    let _5 = (Parsing.peek_val __caml_parser_env 0 : 'attributes) in\n    Obj.repr(\n# 2090 \"ml/parser.mly\"\n      ( Rtag (mkrhs _1 1, add_info_attrs (symbol_info ()) _5,\n               _3, List.rev _4) )\n# 10823 \"ml/parser.ml\"\n               : 'tag_field))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'name_tag) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'attributes) in\n    Obj.repr(\n# 2093 \"ml/parser.mly\"\n      ( Rtag (mkrhs _1 1, add_info_attrs (symbol_info ()) _2, true, []) )\n# 10831 \"ml/parser.ml\"\n               : 'tag_field))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2096 \"ml/parser.mly\"\n                                                ( true )\n# 10837 \"ml/parser.ml\"\n               : 'opt_ampersand))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2097 \"ml/parser.mly\"\n                                                ( false )\n# 10843 \"ml/parser.ml\"\n               : 'opt_ampersand))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'core_type_no_attr) in\n    Obj.repr(\n# 2100 \"ml/parser.mly\"\n                                                ( [_1] )\n# 10850 \"ml/parser.ml\"\n               : 'amper_type_list))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'amper_type_list) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'core_type_no_attr) in\n    Obj.repr(\n# 2101 \"ml/parser.mly\"\n                                                ( _3 :: _1 )\n# 10858 \"ml/parser.ml\"\n               : 'amper_type_list))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'name_tag) in\n    Obj.repr(\n# 2104 \"ml/parser.mly\"\n                                                ( [_1] )\n# 10865 \"ml/parser.ml\"\n               : 'name_tag_list))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'name_tag_list) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'name_tag) in\n    Obj.repr(\n# 2105 \"ml/parser.mly\"\n                                                ( _2 :: _1 )\n# 10873 \"ml/parser.ml\"\n               : 'name_tag_list))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'simple_core_type) in\n    Obj.repr(\n# 2108 \"ml/parser.mly\"\n                     ( _1 )\n# 10880 \"ml/parser.ml\"\n               : 'simple_core_type_or_tuple))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'simple_core_type) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'core_type_list) in\n    Obj.repr(\n# 2110 \"ml/parser.mly\"\n      ( mktyp(Ptyp_tuple(_1 :: List.rev _3)) )\n# 10888 \"ml/parser.ml\"\n               : 'simple_core_type_or_tuple))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'core_type) in\n    Obj.repr(\n# 2113 \"ml/parser.mly\"\n                                                ( [_1] )\n# 10895 \"ml/parser.ml\"\n               : 'core_type_comma_list))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'core_type_comma_list) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'core_type) in\n    Obj.repr(\n# 2114 \"ml/parser.mly\"\n                                                ( _3 :: _1 )\n# 10903 \"ml/parser.ml\"\n               : 'core_type_comma_list))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'simple_core_type) in\n    Obj.repr(\n# 2117 \"ml/parser.mly\"\n                                                ( [_1] )\n# 10910 \"ml/parser.ml\"\n               : 'core_type_list))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'core_type_list) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'simple_core_type) in\n    Obj.repr(\n# 2118 \"ml/parser.mly\"\n                                                ( _3 :: _1 )\n# 10918 \"ml/parser.ml\"\n               : 'core_type_list))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'field_semi) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'meth_list) in\n    Obj.repr(\n# 2121 \"ml/parser.mly\"\n                                                ( let (f, c) = _2 in (_1 :: f, c) )\n# 10926 \"ml/parser.ml\"\n               : 'meth_list))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'inherit_field_semi) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'meth_list) in\n    Obj.repr(\n# 2122 \"ml/parser.mly\"\n                                                ( let (f, c) = _2 in (_1 :: f, c) )\n# 10934 \"ml/parser.ml\"\n               : 'meth_list))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'field_semi) in\n    Obj.repr(\n# 2123 \"ml/parser.mly\"\n                                                ( [_1], Closed )\n# 10941 \"ml/parser.ml\"\n               : 'meth_list))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'field) in\n    Obj.repr(\n# 2124 \"ml/parser.mly\"\n                                                ( [_1], Closed )\n# 10948 \"ml/parser.ml\"\n               : 'meth_list))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'inherit_field_semi) in\n    Obj.repr(\n# 2125 \"ml/parser.mly\"\n                                                ( [_1], Closed )\n# 10955 \"ml/parser.ml\"\n               : 'meth_list))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'simple_core_type) in\n    Obj.repr(\n# 2126 \"ml/parser.mly\"\n                                                ( [Oinherit _1], Closed )\n# 10962 \"ml/parser.ml\"\n               : 'meth_list))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2127 \"ml/parser.mly\"\n                                                ( [], Open )\n# 10968 \"ml/parser.ml\"\n               : 'meth_list))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 3 : 'label) in\n    let _3 = (Parsing.peek_val __caml_parser_env 1 : 'poly_type_no_attr) in\n    let _4 = (Parsing.peek_val __caml_parser_env 0 : 'attributes) in\n    Obj.repr(\n# 2131 \"ml/parser.mly\"\n    ( Otag (mkrhs _1 1, add_info_attrs (symbol_info ()) _4, _3) )\n# 10977 \"ml/parser.ml\"\n               : 'field))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 5 : 'label) in\n    let _3 = (Parsing.peek_val __caml_parser_env 3 : 'poly_type_no_attr) in\n    let _4 = (Parsing.peek_val __caml_parser_env 2 : 'attributes) in\n    let _6 = (Parsing.peek_val __caml_parser_env 0 : 'attributes) in\n    Obj.repr(\n# 2136 \"ml/parser.mly\"\n    ( let info =\n        match rhs_info 4 with\n        | Some _ as info_before_semi -> info_before_semi\n        | None -> symbol_info ()\n      in\n      ( Otag (mkrhs _1 1, add_info_attrs info (_4 @ _6), _3)) )\n# 10992 \"ml/parser.ml\"\n               : 'field_semi))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'simple_core_type) in\n    Obj.repr(\n# 2145 \"ml/parser.mly\"\n                        ( Oinherit _1 )\n# 10999 \"ml/parser.ml\"\n               : 'inherit_field_semi))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in\n    Obj.repr(\n# 2148 \"ml/parser.mly\"\n                                                ( _1 )\n# 11006 \"ml/parser.ml\"\n               : 'label))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : string * char option) in\n    Obj.repr(\n# 2154 \"ml/parser.mly\"\n                 ( let (n, m) = _1 in Pconst_integer (n, m) )\n# 11013 \"ml/parser.ml\"\n               : 'constant))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : char) in\n    Obj.repr(\n# 2155 \"ml/parser.mly\"\n                 ( Pconst_char (Char.code _1) )\n# 11020 \"ml/parser.ml\"\n               : 'constant))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : string * string option) in\n    Obj.repr(\n# 2156 \"ml/parser.mly\"\n                 ( let (s, d) = _1 in Pconst_string (s, d) )\n# 11027 \"ml/parser.ml\"\n               : 'constant))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : string * char option) in\n    Obj.repr(\n# 2157 \"ml/parser.mly\"\n                 ( let (f, m) = _1 in Pconst_float (f, m) )\n# 11034 \"ml/parser.ml\"\n               : 'constant))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'constant) in\n    Obj.repr(\n# 2160 \"ml/parser.mly\"\n                 ( _1 )\n# 11041 \"ml/parser.ml\"\n               : 'signed_constant))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : string * char option) in\n    Obj.repr(\n# 2161 \"ml/parser.mly\"\n                 ( let (n, m) = _2 in Pconst_integer(\"-\" ^ n, m) )\n# 11048 \"ml/parser.ml\"\n               : 'signed_constant))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : string * char option) in\n    Obj.repr(\n# 2162 \"ml/parser.mly\"\n                 ( let (f, m) = _2 in Pconst_float(\"-\" ^ f, m) )\n# 11055 \"ml/parser.ml\"\n               : 'signed_constant))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : string * char option) in\n    Obj.repr(\n# 2163 \"ml/parser.mly\"\n                 ( let (n, m) = _2 in Pconst_integer (n, m) )\n# 11062 \"ml/parser.ml\"\n               : 'signed_constant))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : string * char option) in\n    Obj.repr(\n# 2164 \"ml/parser.mly\"\n                 ( let (f, m) = _2 in Pconst_float(f, m) )\n# 11069 \"ml/parser.ml\"\n               : 'signed_constant))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in\n    Obj.repr(\n# 2170 \"ml/parser.mly\"\n                                                ( _1 )\n# 11076 \"ml/parser.ml\"\n               : 'ident))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in\n    Obj.repr(\n# 2171 \"ml/parser.mly\"\n                                                ( _1 )\n# 11083 \"ml/parser.ml\"\n               : 'ident))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in\n    Obj.repr(\n# 2174 \"ml/parser.mly\"\n                                                ( _1 )\n# 11090 \"ml/parser.ml\"\n               : 'val_ident))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'operator) in\n    Obj.repr(\n# 2175 \"ml/parser.mly\"\n                                                ( _2 )\n# 11097 \"ml/parser.ml\"\n               : 'val_ident))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'operator) in\n    Obj.repr(\n# 2176 \"ml/parser.mly\"\n                                                ( unclosed \"(\" 1 \")\" 3 )\n# 11104 \"ml/parser.ml\"\n               : 'val_ident))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2177 \"ml/parser.mly\"\n                                                ( expecting 2 \"operator\" )\n# 11110 \"ml/parser.ml\"\n               : 'val_ident))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2178 \"ml/parser.mly\"\n                                                ( expecting 3 \"module-expr\" )\n# 11116 \"ml/parser.ml\"\n               : 'val_ident))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in\n    Obj.repr(\n# 2181 \"ml/parser.mly\"\n                                                ( _1 )\n# 11123 \"ml/parser.ml\"\n               : 'operator))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in\n    Obj.repr(\n# 2182 \"ml/parser.mly\"\n                                                ( _1 )\n# 11130 \"ml/parser.ml\"\n               : 'operator))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in\n    Obj.repr(\n# 2183 \"ml/parser.mly\"\n                                                ( _1 )\n# 11137 \"ml/parser.ml\"\n               : 'operator))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in\n    Obj.repr(\n# 2184 \"ml/parser.mly\"\n                                                ( _1 )\n# 11144 \"ml/parser.ml\"\n               : 'operator))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in\n    Obj.repr(\n# 2185 \"ml/parser.mly\"\n                                                ( _1 )\n# 11151 \"ml/parser.ml\"\n               : 'operator))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in\n    Obj.repr(\n# 2186 \"ml/parser.mly\"\n                                                ( _1 )\n# 11158 \"ml/parser.ml\"\n               : 'operator))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : string) in\n    Obj.repr(\n# 2187 \"ml/parser.mly\"\n                                                ( \".\"^ _1 ^\"()\" )\n# 11165 \"ml/parser.ml\"\n               : 'operator))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 3 : string) in\n    Obj.repr(\n# 2188 \"ml/parser.mly\"\n                                                ( \".\"^ _1 ^ \"()<-\" )\n# 11172 \"ml/parser.ml\"\n               : 'operator))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : string) in\n    Obj.repr(\n# 2189 \"ml/parser.mly\"\n                                                ( \".\"^ _1 ^\"[]\" )\n# 11179 \"ml/parser.ml\"\n               : 'operator))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 3 : string) in\n    Obj.repr(\n# 2190 \"ml/parser.mly\"\n                                                ( \".\"^ _1 ^ \"[]<-\" )\n# 11186 \"ml/parser.ml\"\n               : 'operator))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : string) in\n    Obj.repr(\n# 2191 \"ml/parser.mly\"\n                                                ( \".\"^ _1 ^\"{}\" )\n# 11193 \"ml/parser.ml\"\n               : 'operator))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 3 : string) in\n    Obj.repr(\n# 2192 \"ml/parser.mly\"\n                                                ( \".\"^ _1 ^ \"{}<-\" )\n# 11200 \"ml/parser.ml\"\n               : 'operator))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in\n    Obj.repr(\n# 2193 \"ml/parser.mly\"\n                                                ( _1 )\n# 11207 \"ml/parser.ml\"\n               : 'operator))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2194 \"ml/parser.mly\"\n                                                ( \"!\" )\n# 11213 \"ml/parser.ml\"\n               : 'operator))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2195 \"ml/parser.mly\"\n                                                ( \"+\" )\n# 11219 \"ml/parser.ml\"\n               : 'operator))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2196 \"ml/parser.mly\"\n                                                ( \"+.\" )\n# 11225 \"ml/parser.ml\"\n               : 'operator))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2197 \"ml/parser.mly\"\n                                                ( \"-\" )\n# 11231 \"ml/parser.ml\"\n               : 'operator))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2198 \"ml/parser.mly\"\n                                                ( \"-.\" )\n# 11237 \"ml/parser.ml\"\n               : 'operator))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2199 \"ml/parser.mly\"\n                                                ( \"*\" )\n# 11243 \"ml/parser.ml\"\n               : 'operator))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2200 \"ml/parser.mly\"\n                                                ( \"=\" )\n# 11249 \"ml/parser.ml\"\n               : 'operator))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2201 \"ml/parser.mly\"\n                                                ( \"<\" )\n# 11255 \"ml/parser.ml\"\n               : 'operator))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2202 \"ml/parser.mly\"\n                                                ( \">\" )\n# 11261 \"ml/parser.ml\"\n               : 'operator))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2203 \"ml/parser.mly\"\n                                                ( \"or\" )\n# 11267 \"ml/parser.ml\"\n               : 'operator))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2204 \"ml/parser.mly\"\n                                                ( \"||\" )\n# 11273 \"ml/parser.ml\"\n               : 'operator))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2205 \"ml/parser.mly\"\n                                                ( \"&\" )\n# 11279 \"ml/parser.ml\"\n               : 'operator))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2206 \"ml/parser.mly\"\n                                                ( \"&&\" )\n# 11285 \"ml/parser.ml\"\n               : 'operator))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2207 \"ml/parser.mly\"\n                                                ( \":=\" )\n# 11291 \"ml/parser.ml\"\n               : 'operator))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2208 \"ml/parser.mly\"\n                                                ( \"+=\" )\n# 11297 \"ml/parser.ml\"\n               : 'operator))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2209 \"ml/parser.mly\"\n                                                ( \"%\" )\n# 11303 \"ml/parser.ml\"\n               : 'operator))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in\n    Obj.repr(\n# 2212 \"ml/parser.mly\"\n                                                ( _1 )\n# 11310 \"ml/parser.ml\"\n               : 'constr_ident))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2213 \"ml/parser.mly\"\n                                                ( \"[]\" )\n# 11316 \"ml/parser.ml\"\n               : 'constr_ident))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2214 \"ml/parser.mly\"\n                                                ( \"()\" )\n# 11322 \"ml/parser.ml\"\n               : 'constr_ident))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2215 \"ml/parser.mly\"\n                                                ( \"::\" )\n# 11328 \"ml/parser.ml\"\n               : 'constr_ident))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2216 \"ml/parser.mly\"\n                                                ( \"false\" )\n# 11334 \"ml/parser.ml\"\n               : 'constr_ident))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2217 \"ml/parser.mly\"\n                                                ( \"true\" )\n# 11340 \"ml/parser.ml\"\n               : 'constr_ident))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'val_ident) in\n    Obj.repr(\n# 2221 \"ml/parser.mly\"\n                                                ( Lident _1 )\n# 11347 \"ml/parser.ml\"\n               : 'val_longident))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'mod_longident) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'val_ident) in\n    Obj.repr(\n# 2222 \"ml/parser.mly\"\n                                                ( Ldot(_1, _3) )\n# 11355 \"ml/parser.ml\"\n               : 'val_longident))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'mod_longident) in\n    Obj.repr(\n# 2225 \"ml/parser.mly\"\n                                                ( _1 )\n# 11362 \"ml/parser.ml\"\n               : 'constr_longident))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 4 : 'mod_longident) in\n    Obj.repr(\n# 2226 \"ml/parser.mly\"\n                                                ( Ldot(_1,\"::\") )\n# 11369 \"ml/parser.ml\"\n               : 'constr_longident))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2227 \"ml/parser.mly\"\n                                                ( Lident \"[]\" )\n# 11375 \"ml/parser.ml\"\n               : 'constr_longident))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2228 \"ml/parser.mly\"\n                                                ( Lident \"()\" )\n# 11381 \"ml/parser.ml\"\n               : 'constr_longident))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2229 \"ml/parser.mly\"\n                                                ( Lident \"::\" )\n# 11387 \"ml/parser.ml\"\n               : 'constr_longident))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2230 \"ml/parser.mly\"\n                                                ( Lident \"false\" )\n# 11393 \"ml/parser.ml\"\n               : 'constr_longident))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2231 \"ml/parser.mly\"\n                                                ( Lident \"true\" )\n# 11399 \"ml/parser.ml\"\n               : 'constr_longident))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in\n    Obj.repr(\n# 2234 \"ml/parser.mly\"\n                                                ( Lident _1 )\n# 11406 \"ml/parser.ml\"\n               : 'label_longident))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'mod_longident) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : string) in\n    Obj.repr(\n# 2235 \"ml/parser.mly\"\n                                                ( Ldot(_1, _3) )\n# 11414 \"ml/parser.ml\"\n               : 'label_longident))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in\n    Obj.repr(\n# 2238 \"ml/parser.mly\"\n                                                ( Lident _1 )\n# 11421 \"ml/parser.ml\"\n               : 'type_longident))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'mod_ext_longident) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : string) in\n    Obj.repr(\n# 2239 \"ml/parser.mly\"\n                                                ( Ldot(_1, _3) )\n# 11429 \"ml/parser.ml\"\n               : 'type_longident))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in\n    Obj.repr(\n# 2242 \"ml/parser.mly\"\n                                                ( Lident _1 )\n# 11436 \"ml/parser.ml\"\n               : 'mod_longident))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'mod_longident) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : string) in\n    Obj.repr(\n# 2243 \"ml/parser.mly\"\n                                                ( Ldot(_1, _3) )\n# 11444 \"ml/parser.ml\"\n               : 'mod_longident))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in\n    Obj.repr(\n# 2246 \"ml/parser.mly\"\n                                                ( Lident _1 )\n# 11451 \"ml/parser.ml\"\n               : 'mod_ext_longident))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'mod_ext_longident) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : string) in\n    Obj.repr(\n# 2247 \"ml/parser.mly\"\n                                                ( Ldot(_1, _3) )\n# 11459 \"ml/parser.ml\"\n               : 'mod_ext_longident))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 3 : 'mod_ext_longident) in\n    let _3 = (Parsing.peek_val __caml_parser_env 1 : 'mod_ext_longident) in\n    Obj.repr(\n# 2248 \"ml/parser.mly\"\n                                                      ( lapply _1 _3 )\n# 11467 \"ml/parser.ml\"\n               : 'mod_ext_longident))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'ident) in\n    Obj.repr(\n# 2251 \"ml/parser.mly\"\n                                                ( Lident _1 )\n# 11474 \"ml/parser.ml\"\n               : 'mty_longident))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'mod_ext_longident) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'ident) in\n    Obj.repr(\n# 2252 \"ml/parser.mly\"\n                                                ( Ldot(_1, _3) )\n# 11482 \"ml/parser.ml\"\n               : 'mty_longident))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in\n    Obj.repr(\n# 2255 \"ml/parser.mly\"\n                                                ( Lident _1 )\n# 11489 \"ml/parser.ml\"\n               : 'clty_longident))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'mod_ext_longident) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : string) in\n    Obj.repr(\n# 2256 \"ml/parser.mly\"\n                                                ( Ldot(_1, _3) )\n# 11497 \"ml/parser.ml\"\n               : 'clty_longident))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in\n    Obj.repr(\n# 2259 \"ml/parser.mly\"\n                                                ( Lident _1 )\n# 11504 \"ml/parser.ml\"\n               : 'class_longident))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'mod_longident) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : string) in\n    Obj.repr(\n# 2260 \"ml/parser.mly\"\n                                                ( Ldot(_1, _3) )\n# 11512 \"ml/parser.ml\"\n               : 'class_longident))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'ident) in\n    Obj.repr(\n# 2269 \"ml/parser.mly\"\n                                                ( _2 )\n# 11519 \"ml/parser.ml\"\n               : 'name_tag))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2272 \"ml/parser.mly\"\n                                                ( Nonrecursive )\n# 11525 \"ml/parser.ml\"\n               : 'rec_flag))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2273 \"ml/parser.mly\"\n                                                ( Recursive )\n# 11531 \"ml/parser.ml\"\n               : 'rec_flag))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2276 \"ml/parser.mly\"\n                                                ( Recursive )\n# 11537 \"ml/parser.ml\"\n               : 'nonrec_flag))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2277 \"ml/parser.mly\"\n                                                ( Nonrecursive )\n# 11543 \"ml/parser.ml\"\n               : 'nonrec_flag))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2280 \"ml/parser.mly\"\n                                                ( Upto )\n# 11549 \"ml/parser.ml\"\n               : 'direction_flag))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2281 \"ml/parser.mly\"\n                                                ( Downto )\n# 11555 \"ml/parser.ml\"\n               : 'direction_flag))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2284 \"ml/parser.mly\"\n                                                ( Public )\n# 11561 \"ml/parser.ml\"\n               : 'private_flag))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2285 \"ml/parser.mly\"\n                                                ( Private )\n# 11567 \"ml/parser.ml\"\n               : 'private_flag))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2288 \"ml/parser.mly\"\n                                                ( Immutable )\n# 11573 \"ml/parser.ml\"\n               : 'mutable_flag))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2289 \"ml/parser.mly\"\n                                                ( Mutable )\n# 11579 \"ml/parser.ml\"\n               : 'mutable_flag))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2292 \"ml/parser.mly\"\n                                                ( Concrete )\n# 11585 \"ml/parser.ml\"\n               : 'virtual_flag))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2293 \"ml/parser.mly\"\n                                                ( Virtual )\n# 11591 \"ml/parser.ml\"\n               : 'virtual_flag))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2296 \"ml/parser.mly\"\n                 ( Public, Concrete )\n# 11597 \"ml/parser.ml\"\n               : 'private_virtual_flags))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2297 \"ml/parser.mly\"\n            ( Private, Concrete )\n# 11603 \"ml/parser.ml\"\n               : 'private_virtual_flags))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2298 \"ml/parser.mly\"\n            ( Public, Virtual )\n# 11609 \"ml/parser.ml\"\n               : 'private_virtual_flags))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2299 \"ml/parser.mly\"\n                    ( Private, Virtual )\n# 11615 \"ml/parser.ml\"\n               : 'private_virtual_flags))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2300 \"ml/parser.mly\"\n                    ( Private, Virtual )\n# 11621 \"ml/parser.ml\"\n               : 'private_virtual_flags))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2303 \"ml/parser.mly\"\n                                                ( Fresh )\n# 11627 \"ml/parser.ml\"\n               : 'override_flag))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2304 \"ml/parser.mly\"\n                                                ( Override )\n# 11633 \"ml/parser.ml\"\n               : 'override_flag))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2307 \"ml/parser.mly\"\n                                                ( () )\n# 11639 \"ml/parser.ml\"\n               : 'opt_bar))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2308 \"ml/parser.mly\"\n                                                ( () )\n# 11645 \"ml/parser.ml\"\n               : 'opt_bar))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2311 \"ml/parser.mly\"\n                                                ( () )\n# 11651 \"ml/parser.ml\"\n               : 'opt_semi))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2312 \"ml/parser.mly\"\n                                                ( () )\n# 11657 \"ml/parser.ml\"\n               : 'opt_semi))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2315 \"ml/parser.mly\"\n                                                ( \"-\" )\n# 11663 \"ml/parser.ml\"\n               : 'subtractive))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2316 \"ml/parser.mly\"\n                                                ( \"-.\" )\n# 11669 \"ml/parser.ml\"\n               : 'subtractive))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2319 \"ml/parser.mly\"\n                                                ( \"+\" )\n# 11675 \"ml/parser.ml\"\n               : 'additive))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2320 \"ml/parser.mly\"\n                                                ( \"+.\" )\n# 11681 \"ml/parser.ml\"\n               : 'additive))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in\n    Obj.repr(\n# 2326 \"ml/parser.mly\"\n           ( _1 )\n# 11688 \"ml/parser.ml\"\n               : 'single_attr_id))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : string) in\n    Obj.repr(\n# 2327 \"ml/parser.mly\"\n           ( _1 )\n# 11695 \"ml/parser.ml\"\n               : 'single_attr_id))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2328 \"ml/parser.mly\"\n        ( \"and\" )\n# 11701 \"ml/parser.ml\"\n               : 'single_attr_id))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2329 \"ml/parser.mly\"\n       ( \"as\" )\n# 11707 \"ml/parser.ml\"\n               : 'single_attr_id))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2330 \"ml/parser.mly\"\n           ( \"assert\" )\n# 11713 \"ml/parser.ml\"\n               : 'single_attr_id))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2331 \"ml/parser.mly\"\n          ( \"begin\" )\n# 11719 \"ml/parser.ml\"\n               : 'single_attr_id))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2332 \"ml/parser.mly\"\n          ( \"class\" )\n# 11725 \"ml/parser.ml\"\n               : 'single_attr_id))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2333 \"ml/parser.mly\"\n               ( \"constraint\" )\n# 11731 \"ml/parser.ml\"\n               : 'single_attr_id))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2334 \"ml/parser.mly\"\n       ( \"do\" )\n# 11737 \"ml/parser.ml\"\n               : 'single_attr_id))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2335 \"ml/parser.mly\"\n         ( \"done\" )\n# 11743 \"ml/parser.ml\"\n               : 'single_attr_id))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2336 \"ml/parser.mly\"\n           ( \"downto\" )\n# 11749 \"ml/parser.ml\"\n               : 'single_attr_id))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2337 \"ml/parser.mly\"\n         ( \"else\" )\n# 11755 \"ml/parser.ml\"\n               : 'single_attr_id))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2338 \"ml/parser.mly\"\n        ( \"end\" )\n# 11761 \"ml/parser.ml\"\n               : 'single_attr_id))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2339 \"ml/parser.mly\"\n              ( \"exception\" )\n# 11767 \"ml/parser.ml\"\n               : 'single_attr_id))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2340 \"ml/parser.mly\"\n             ( \"external\" )\n# 11773 \"ml/parser.ml\"\n               : 'single_attr_id))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2341 \"ml/parser.mly\"\n          ( \"false\" )\n# 11779 \"ml/parser.ml\"\n               : 'single_attr_id))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2342 \"ml/parser.mly\"\n        ( \"for\" )\n# 11785 \"ml/parser.ml\"\n               : 'single_attr_id))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2343 \"ml/parser.mly\"\n        ( \"fun\" )\n# 11791 \"ml/parser.ml\"\n               : 'single_attr_id))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2344 \"ml/parser.mly\"\n             ( \"function\" )\n# 11797 \"ml/parser.ml\"\n               : 'single_attr_id))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2345 \"ml/parser.mly\"\n            ( \"functor\" )\n# 11803 \"ml/parser.ml\"\n               : 'single_attr_id))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2346 \"ml/parser.mly\"\n       ( \"if\" )\n# 11809 \"ml/parser.ml\"\n               : 'single_attr_id))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2347 \"ml/parser.mly\"\n       ( \"in\" )\n# 11815 \"ml/parser.ml\"\n               : 'single_attr_id))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2348 \"ml/parser.mly\"\n            ( \"include\" )\n# 11821 \"ml/parser.ml\"\n               : 'single_attr_id))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2349 \"ml/parser.mly\"\n            ( \"inherit\" )\n# 11827 \"ml/parser.ml\"\n               : 'single_attr_id))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2350 \"ml/parser.mly\"\n                ( \"initializer\" )\n# 11833 \"ml/parser.ml\"\n               : 'single_attr_id))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2351 \"ml/parser.mly\"\n         ( \"lazy\" )\n# 11839 \"ml/parser.ml\"\n               : 'single_attr_id))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2352 \"ml/parser.mly\"\n        ( \"let\" )\n# 11845 \"ml/parser.ml\"\n               : 'single_attr_id))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2353 \"ml/parser.mly\"\n          ( \"match\" )\n# 11851 \"ml/parser.ml\"\n               : 'single_attr_id))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2354 \"ml/parser.mly\"\n           ( \"method\" )\n# 11857 \"ml/parser.ml\"\n               : 'single_attr_id))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2355 \"ml/parser.mly\"\n           ( \"module\" )\n# 11863 \"ml/parser.ml\"\n               : 'single_attr_id))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2356 \"ml/parser.mly\"\n            ( \"mutable\" )\n# 11869 \"ml/parser.ml\"\n               : 'single_attr_id))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2357 \"ml/parser.mly\"\n        ( \"new\" )\n# 11875 \"ml/parser.ml\"\n               : 'single_attr_id))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2358 \"ml/parser.mly\"\n           ( \"nonrec\" )\n# 11881 \"ml/parser.ml\"\n               : 'single_attr_id))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2359 \"ml/parser.mly\"\n           ( \"object\" )\n# 11887 \"ml/parser.ml\"\n               : 'single_attr_id))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2360 \"ml/parser.mly\"\n       ( \"of\" )\n# 11893 \"ml/parser.ml\"\n               : 'single_attr_id))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2361 \"ml/parser.mly\"\n         ( \"open\" )\n# 11899 \"ml/parser.ml\"\n               : 'single_attr_id))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2362 \"ml/parser.mly\"\n       ( \"or\" )\n# 11905 \"ml/parser.ml\"\n               : 'single_attr_id))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2363 \"ml/parser.mly\"\n            ( \"private\" )\n# 11911 \"ml/parser.ml\"\n               : 'single_attr_id))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2364 \"ml/parser.mly\"\n        ( \"rec\" )\n# 11917 \"ml/parser.ml\"\n               : 'single_attr_id))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2365 \"ml/parser.mly\"\n        ( \"sig\" )\n# 11923 \"ml/parser.ml\"\n               : 'single_attr_id))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2366 \"ml/parser.mly\"\n           ( \"struct\" )\n# 11929 \"ml/parser.ml\"\n               : 'single_attr_id))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2367 \"ml/parser.mly\"\n         ( \"then\" )\n# 11935 \"ml/parser.ml\"\n               : 'single_attr_id))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2368 \"ml/parser.mly\"\n       ( \"to\" )\n# 11941 \"ml/parser.ml\"\n               : 'single_attr_id))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2369 \"ml/parser.mly\"\n         ( \"true\" )\n# 11947 \"ml/parser.ml\"\n               : 'single_attr_id))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2370 \"ml/parser.mly\"\n        ( \"try\" )\n# 11953 \"ml/parser.ml\"\n               : 'single_attr_id))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2371 \"ml/parser.mly\"\n         ( \"type\" )\n# 11959 \"ml/parser.ml\"\n               : 'single_attr_id))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2372 \"ml/parser.mly\"\n        ( \"val\" )\n# 11965 \"ml/parser.ml\"\n               : 'single_attr_id))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2373 \"ml/parser.mly\"\n            ( \"virtual\" )\n# 11971 \"ml/parser.ml\"\n               : 'single_attr_id))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2374 \"ml/parser.mly\"\n         ( \"when\" )\n# 11977 \"ml/parser.ml\"\n               : 'single_attr_id))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2375 \"ml/parser.mly\"\n          ( \"while\" )\n# 11983 \"ml/parser.ml\"\n               : 'single_attr_id))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2376 \"ml/parser.mly\"\n         ( \"with\" )\n# 11989 \"ml/parser.ml\"\n               : 'single_attr_id))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'single_attr_id) in\n    Obj.repr(\n# 2381 \"ml/parser.mly\"\n                   ( mkloc _1 (symbol_rloc()) )\n# 11996 \"ml/parser.ml\"\n               : 'attr_id))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 2 : 'single_attr_id) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'attr_id) in\n    Obj.repr(\n# 2382 \"ml/parser.mly\"\n                               ( mkloc (_1 ^ \".\" ^ _3.txt) (symbol_rloc()))\n# 12004 \"ml/parser.ml\"\n               : 'attr_id))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'attr_id) in\n    let _3 = (Parsing.peek_val __caml_parser_env 1 : 'payload) in\n    Obj.repr(\n# 2385 \"ml/parser.mly\"\n                                      ( (_2, _3) )\n# 12012 \"ml/parser.ml\"\n               : 'attribute))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'attr_id) in\n    let _3 = (Parsing.peek_val __caml_parser_env 1 : 'payload) in\n    Obj.repr(\n# 2388 \"ml/parser.mly\"\n                                        ( (_2, _3) )\n# 12020 \"ml/parser.ml\"\n               : 'post_item_attribute))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'attr_id) in\n    let _3 = (Parsing.peek_val __caml_parser_env 1 : 'payload) in\n    Obj.repr(\n# 2391 \"ml/parser.mly\"\n                                          ( (_2, _3) )\n# 12028 \"ml/parser.ml\"\n               : 'floating_attribute))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2394 \"ml/parser.mly\"\n                 ( [] )\n# 12034 \"ml/parser.ml\"\n               : 'post_item_attributes))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'post_item_attribute) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'post_item_attributes) in\n    Obj.repr(\n# 2395 \"ml/parser.mly\"\n                                             ( _1 :: _2 )\n# 12042 \"ml/parser.ml\"\n               : 'post_item_attributes))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2398 \"ml/parser.mly\"\n               ( [] )\n# 12048 \"ml/parser.ml\"\n               : 'attributes))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'attribute) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'attributes) in\n    Obj.repr(\n# 2399 \"ml/parser.mly\"\n                         ( _1 :: _2 )\n# 12056 \"ml/parser.ml\"\n               : 'attributes))\n; (fun __caml_parser_env ->\n    Obj.repr(\n# 2402 \"ml/parser.mly\"\n                 ( None, [] )\n# 12062 \"ml/parser.ml\"\n               : 'ext_attributes))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 1 : 'attribute) in\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'attributes) in\n    Obj.repr(\n# 2403 \"ml/parser.mly\"\n                         ( None, _1 :: _2 )\n# 12070 \"ml/parser.ml\"\n               : 'ext_attributes))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 1 : 'attr_id) in\n    let _3 = (Parsing.peek_val __caml_parser_env 0 : 'attributes) in\n    Obj.repr(\n# 2404 \"ml/parser.mly\"\n                               ( Some _2, _3 )\n# 12078 \"ml/parser.ml\"\n               : 'ext_attributes))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'attr_id) in\n    let _3 = (Parsing.peek_val __caml_parser_env 1 : 'payload) in\n    Obj.repr(\n# 2407 \"ml/parser.mly\"\n                                           ( (_2, _3) )\n# 12086 \"ml/parser.ml\"\n               : 'extension))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'attr_id) in\n    let _3 = (Parsing.peek_val __caml_parser_env 1 : 'payload) in\n    Obj.repr(\n# 2410 \"ml/parser.mly\"\n                                                  ( (_2, _3) )\n# 12094 \"ml/parser.ml\"\n               : 'item_extension))\n; (fun __caml_parser_env ->\n    let _1 = (Parsing.peek_val __caml_parser_env 0 : 'structure) in\n    Obj.repr(\n# 2413 \"ml/parser.mly\"\n              ( PStr _1 )\n# 12101 \"ml/parser.ml\"\n               : 'payload))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'signature) in\n    Obj.repr(\n# 2414 \"ml/parser.mly\"\n                    ( PSig _2 )\n# 12108 \"ml/parser.ml\"\n               : 'payload))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'core_type) in\n    Obj.repr(\n# 2415 \"ml/parser.mly\"\n                    ( PTyp _2 )\n# 12115 \"ml/parser.ml\"\n               : 'payload))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 0 : 'pattern) in\n    Obj.repr(\n# 2416 \"ml/parser.mly\"\n                     ( PPat (_2, None) )\n# 12122 \"ml/parser.ml\"\n               : 'payload))\n; (fun __caml_parser_env ->\n    let _2 = (Parsing.peek_val __caml_parser_env 2 : 'pattern) in\n    let _4 = (Parsing.peek_val __caml_parser_env 0 : 'seq_expr) in\n    Obj.repr(\n# 2417 \"ml/parser.mly\"\n                                   ( PPat (_2, Some _4) )\n# 12130 \"ml/parser.ml\"\n               : 'payload))\n(* Entry implementation *)\n; (fun __caml_parser_env -> raise (Parsing.YYexit (Parsing.peek_val __caml_parser_env 0)))\n(* Entry interface *)\n; (fun __caml_parser_env -> raise (Parsing.YYexit (Parsing.peek_val __caml_parser_env 0)))\n(* Entry parse_core_type *)\n; (fun __caml_parser_env -> raise (Parsing.YYexit (Parsing.peek_val __caml_parser_env 0)))\n(* Entry parse_expression *)\n; (fun __caml_parser_env -> raise (Parsing.YYexit (Parsing.peek_val __caml_parser_env 0)))\n(* Entry parse_pattern *)\n; (fun __caml_parser_env -> raise (Parsing.YYexit (Parsing.peek_val __caml_parser_env 0)))\n|]\nlet yytables =\n  { Parsing.actions=yyact;\n    Parsing.transl_const=yytransl_const;\n    Parsing.transl_block=yytransl_block;\n    Parsing.lhs=yylhs;\n    Parsing.len=yylen;\n    Parsing.defred=yydefred;\n    Parsing.dgoto=yydgoto;\n    Parsing.sindex=yysindex;\n    Parsing.rindex=yyrindex;\n    Parsing.gindex=yygindex;\n    Parsing.tablesize=yytablesize;\n    Parsing.table=yytable;\n    Parsing.check=yycheck;\n    Parsing.error_function=parse_error;\n    Parsing.names_const=yynames_const;\n    Parsing.names_block=yynames_block }\nlet implementation (lexfun : Lexing.lexbuf -> token) (lexbuf : Lexing.lexbuf) =\n   (Parsing.yyparse yytables 1 lexfun lexbuf : Parsetree.structure)\nlet interface (lexfun : Lexing.lexbuf -> token) (lexbuf : Lexing.lexbuf) =\n   (Parsing.yyparse yytables 2 lexfun lexbuf : Parsetree.signature)\nlet parse_core_type (lexfun : Lexing.lexbuf -> token) (lexbuf : Lexing.lexbuf) =\n   (Parsing.yyparse yytables 3 lexfun lexbuf : Parsetree.core_type)\nlet parse_expression (lexfun : Lexing.lexbuf -> token) (lexbuf : Lexing.lexbuf) =\n   (Parsing.yyparse yytables 4 lexfun lexbuf : Parsetree.expression)\nlet parse_pattern (lexfun : Lexing.lexbuf -> token) (lexbuf : Lexing.lexbuf) =\n   (Parsing.yyparse yytables 5 lexfun lexbuf : Parsetree.pattern)\n;;\n"
  },
  {
    "path": "analysis/vendor/ml/parser.mli",
    "content": "type token =\n  | AMPERAMPER\n  | AMPERSAND\n  | AND\n  | AS\n  | ASSERT\n  | BACKQUOTE\n  | BANG\n  | BAR\n  | BARBAR\n  | BARRBRACKET\n  | BEGIN\n  | CHAR of char\n  | CLASS\n  | COLON\n  | COLONCOLON\n  | COLONEQUAL\n  | COLONGREATER\n  | COMMA\n  | CONSTRAINT\n  | DO\n  | DONE\n  | DOT\n  | DOTDOT\n  | DOWNTO\n  | ELSE\n  | END\n  | EOF\n  | EQUAL\n  | EXCEPTION\n  | EXTERNAL\n  | FALSE\n  | FLOAT of (string * char option)\n  | FOR\n  | FUN\n  | FUNCTION\n  | FUNCTOR\n  | GREATER\n  | GREATERRBRACE\n  | GREATERRBRACKET\n  | IF\n  | IN\n  | INCLUDE\n  | INFIXOP0 of string\n  | INFIXOP1 of string\n  | INFIXOP2 of string\n  | INFIXOP3 of string\n  | INFIXOP4 of string\n  | DOTOP of string\n  | INHERIT\n  | INITIALIZER\n  | INT of (string * char option)\n  | LABEL of string\n  | LAZY\n  | LBRACE\n  | LBRACELESS\n  | LBRACKET\n  | LBRACKETBAR\n  | LBRACKETLESS\n  | LBRACKETGREATER\n  | LBRACKETPERCENT\n  | LBRACKETPERCENTPERCENT\n  | LESS\n  | LESSMINUS\n  | LET\n  | LIDENT of string\n  | LPAREN\n  | LBRACKETAT\n  | LBRACKETATAT\n  | LBRACKETATATAT\n  | MATCH\n  | METHOD\n  | MINUS\n  | MINUSDOT\n  | MINUSGREATER\n  | MODULE\n  | MUTABLE\n  | NEW\n  | NONREC\n  | OBJECT\n  | OF\n  | OPEN\n  | OPTLABEL of string\n  | OR\n  | PERCENT\n  | PLUS\n  | PLUSDOT\n  | PLUSEQ\n  | PREFIXOP of string\n  | PRIVATE\n  | QUESTION\n  | QUOTE\n  | RBRACE\n  | RBRACKET\n  | REC\n  | RPAREN\n  | SEMI\n  | SEMISEMI\n  | HASH\n  | HASHOP of string\n  | SIG\n  | STAR\n  | STRING of (string * string option)\n  | STRUCT\n  | THEN\n  | TILDE\n  | TO\n  | TRUE\n  | TRY\n  | TYPE\n  | UIDENT of string\n  | UNDERSCORE\n  | VAL\n  | VIRTUAL\n  | WHEN\n  | WHILE\n  | WITH\n  | COMMENT of (string * Location.t)\n  | DOCSTRING of Docstrings.docstring\n  | EOL\n\nval implementation :\n  (Lexing.lexbuf -> token) -> Lexing.lexbuf -> Parsetree.structure\nval interface : (Lexing.lexbuf -> token) -> Lexing.lexbuf -> Parsetree.signature\nval parse_core_type :\n  (Lexing.lexbuf -> token) -> Lexing.lexbuf -> Parsetree.core_type\nval parse_expression :\n  (Lexing.lexbuf -> token) -> Lexing.lexbuf -> Parsetree.expression\nval parse_pattern :\n  (Lexing.lexbuf -> token) -> Lexing.lexbuf -> Parsetree.pattern\n"
  },
  {
    "path": "analysis/vendor/ml/parser.mly",
    "content": "/**************************************************************************/\n/*                                                                        */\n/*                                 OCaml                                  */\n/*                                                                        */\n/*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           */\n/*                                                                        */\n/*   Copyright 1996 Institut National de Recherche en Informatique et     */\n/*     en Automatique.                                                    */\n/*                                                                        */\n/*   All rights reserved.  This file is distributed under the terms of    */\n/*   the GNU Lesser General Public License version 2.1, with the          */\n/*   special exception on linking described in the file LICENSE.          */\n/*                                                                        */\n/**************************************************************************/\n\n/* The parser definition */\n\n%{\nopen Location\nopen Asttypes\nopen Longident\nopen Parsetree\nopen Ast_helper\nopen Docstrings\n\nlet mktyp d = Typ.mk ~loc:(symbol_rloc()) d\nlet mkpat d = Pat.mk ~loc:(symbol_rloc()) d\nlet mkexp d = Exp.mk ~loc:(symbol_rloc()) d\nlet mkmty ?attrs d = Mty.mk ~loc:(symbol_rloc()) ?attrs d\nlet mksig d = Sig.mk ~loc:(symbol_rloc()) d\nlet mkmod ?attrs d = Mod.mk ~loc:(symbol_rloc()) ?attrs d\nlet mkstr d = Str.mk ~loc:(symbol_rloc()) d\nlet mkcty ?attrs d = Cty.mk ~loc:(symbol_rloc()) ?attrs d\nlet mkctf ?attrs ?docs d =\n  Ctf.mk ~loc:(symbol_rloc()) ?attrs ?docs d\nlet mkcf ?attrs ?docs d =\n  Cf.mk ~loc:(symbol_rloc()) ?attrs ?docs d\n\nlet mkrhs rhs pos = mkloc rhs (rhs_loc pos)\n\nlet reloc_pat x = { x with ppat_loc = symbol_rloc () };;\nlet reloc_exp x = { x with pexp_loc = symbol_rloc () };;\n\nlet mkoperator name pos =\n  let loc = rhs_loc pos in\n  Exp.mk ~loc (Pexp_ident(mkloc (Lident name) loc))\n\nlet mkpatvar name pos =\n  Pat.mk ~loc:(rhs_loc pos) (Ppat_var (mkrhs name pos))\n\n(*\n  Ghost expressions and patterns:\n  expressions and patterns that do not appear explicitly in the\n  source file they have the loc_ghost flag set to true.\n  Then the profiler will not try to instrument them and the\n  -annot option will not try to display their type.\n\n  Every grammar rule that generates an element with a location must\n  make at most one non-ghost element, the topmost one.\n\n  How to tell whether your location must be ghost:\n  A location corresponds to a range of characters in the source file.\n  If the location contains a piece of code that is syntactically\n  valid (according to the documentation), and corresponds to the\n  AST node, then the location must be real; in all other cases,\n  it must be ghost.\n*)\nlet ghexp d = Exp.mk ~loc:(symbol_gloc ()) d\nlet ghpat d = Pat.mk ~loc:(symbol_gloc ()) d\nlet ghtyp d = Typ.mk ~loc:(symbol_gloc ()) d\nlet ghloc d = { txt = d; loc = symbol_gloc () }\nlet ghstr d = Str.mk ~loc:(symbol_gloc()) d\nlet ghsig d = Sig.mk ~loc:(symbol_gloc()) d\n\nlet mkinfix arg1 name arg2 =\n  mkexp(Pexp_apply(mkoperator name 2, [Nolabel, arg1; Nolabel, arg2]))\n\nlet neg_string f =\n  if String.length f > 0 && f.[0] = '-'\n  then String.sub f 1 (String.length f - 1)\n  else \"-\" ^ f\n\nlet mkuminus name arg =\n  match name, arg.pexp_desc with\n  | \"-\", Pexp_constant(Pconst_integer (n,m)) ->\n      mkexp(Pexp_constant(Pconst_integer(neg_string n,m)))\n  | (\"-\" | \"-.\"), Pexp_constant(Pconst_float (f, m)) ->\n      mkexp(Pexp_constant(Pconst_float(neg_string f, m)))\n  | _ ->\n      mkexp(Pexp_apply(mkoperator (\"~\" ^ name) 1, [Nolabel, arg]))\n\nlet mkuplus name arg =\n  let desc = arg.pexp_desc in\n  match name, desc with\n  | \"+\", Pexp_constant(Pconst_integer _)\n  | (\"+\" | \"+.\"), Pexp_constant(Pconst_float _) -> mkexp desc\n  | _ ->\n      mkexp(Pexp_apply(mkoperator (\"~\" ^ name) 1, [Nolabel, arg]))\n\nlet mkexp_cons consloc args loc =\n  Exp.mk ~loc (Pexp_construct(mkloc (Lident \"::\") consloc, Some args))\n\nlet mkpat_cons consloc args loc =\n  Pat.mk ~loc (Ppat_construct(mkloc (Lident \"::\") consloc, Some args))\n\nlet rec mktailexp nilloc = function\n    [] ->\n      let loc = { nilloc with loc_ghost = true } in\n      let nil = { txt = Lident \"[]\"; loc = loc } in\n      Exp.mk ~loc (Pexp_construct (nil, None))\n  | e1 :: el ->\n      let exp_el = mktailexp nilloc el in\n      let loc = {loc_start = e1.pexp_loc.loc_start;\n               loc_end = exp_el.pexp_loc.loc_end;\n               loc_ghost = true}\n      in\n      let arg = Exp.mk ~loc (Pexp_tuple [e1; exp_el]) in\n      mkexp_cons {loc with loc_ghost = true} arg loc\n\nlet rec mktailpat nilloc = function\n    [] ->\n      let loc = { nilloc with loc_ghost = true } in\n      let nil = { txt = Lident \"[]\"; loc = loc } in\n      Pat.mk ~loc (Ppat_construct (nil, None))\n  | p1 :: pl ->\n      let pat_pl = mktailpat nilloc pl in\n      let loc = {loc_start = p1.ppat_loc.loc_start;\n               loc_end = pat_pl.ppat_loc.loc_end;\n               loc_ghost = true}\n      in\n      let arg = Pat.mk ~loc (Ppat_tuple [p1; pat_pl]) in\n      mkpat_cons {loc with loc_ghost = true} arg loc\n\nlet mkstrexp e attrs =\n  { pstr_desc = Pstr_eval (e, attrs); pstr_loc = e.pexp_loc }\n\nlet mkexp_constraint e (t1, t2) =\n  match t1, t2 with\n  | Some t, None -> ghexp(Pexp_constraint(e, t))\n  | _, Some t -> ghexp(Pexp_coerce(e, t1, t))\n  | None, None -> assert false\n\nlet mkexp_opt_constraint e = function\n  | None -> e\n  | Some constraint_ -> mkexp_constraint e constraint_\n\nlet mkpat_opt_constraint p = function\n  | None -> p\n  | Some typ -> mkpat (Ppat_constraint(p, typ))\n\nlet array_function str name =\n  ghloc (Ldot(Lident str, (if !Clflags.fast then \"unsafe_\" ^ name else name)))\n\nlet syntax_error () =\n  raise Syntaxerr.Escape_error\n\nlet unclosed opening_name opening_num closing_name closing_num =\n  raise(Syntaxerr.Error(Syntaxerr.Unclosed(rhs_loc opening_num, opening_name,\n                                           rhs_loc closing_num, closing_name)))\n\nlet expecting pos nonterm =\n    raise Syntaxerr.(Error(Expecting(rhs_loc pos, nonterm)))\n\nlet not_expecting pos nonterm =\n    raise Syntaxerr.(Error(Not_expecting(rhs_loc pos, nonterm)))\n\n\nlet lapply p1 p2 =\n  if !Clflags.applicative_functors\n  then Lapply(p1, p2)\n  else raise (Syntaxerr.Error(Syntaxerr.Applicative_path (symbol_rloc())))\n\nlet exp_of_label lbl pos =\n  mkexp (Pexp_ident(mkrhs (Lident(Longident.last lbl)) pos))\n\nlet pat_of_label lbl pos =\n  mkpat (Ppat_var (mkrhs (Longident.last lbl) pos))\n\nlet mk_newtypes newtypes exp =\n  List.fold_right (fun newtype exp -> mkexp (Pexp_newtype (newtype, exp)))\n    newtypes exp\n\nlet wrap_type_annotation newtypes core_type body =\n  let exp = mkexp(Pexp_constraint(body,core_type)) in\n  let exp = mk_newtypes newtypes exp in\n  (exp, ghtyp(Ptyp_poly(newtypes, Typ.varify_constructors newtypes core_type)))\n\nlet wrap_exp_attrs body (ext, attrs) =\n  (* todo: keep exact location for the entire attribute *)\n  let body = {body with pexp_attributes = attrs @ body.pexp_attributes} in\n  match ext with\n  | None -> body\n  | Some id -> ghexp(Pexp_extension (id, PStr [mkstrexp body []]))\n\nlet mkexp_attrs d attrs =\n  wrap_exp_attrs (mkexp d) attrs\n\nlet wrap_typ_attrs typ (ext, attrs) =\n  (* todo: keep exact location for the entire attribute *)\n  let typ = {typ with ptyp_attributes = attrs @ typ.ptyp_attributes} in\n  match ext with\n  | None -> typ\n  | Some id -> ghtyp(Ptyp_extension (id, PTyp typ))\n\nlet mktyp_attrs d attrs =\n  wrap_typ_attrs (mktyp d) attrs\n\nlet wrap_pat_attrs pat (ext, attrs) =\n  (* todo: keep exact location for the entire attribute *)\n  let pat = {pat with ppat_attributes = attrs @ pat.ppat_attributes} in\n  match ext with\n  | None -> pat\n  | Some id -> ghpat(Ppat_extension (id, PPat (pat, None)))\n\nlet mkpat_attrs d attrs =\n  wrap_pat_attrs (mkpat d) attrs\n\nlet wrap_class_type_attrs body attrs =\n  {body with pcty_attributes = attrs @ body.pcty_attributes}\nlet wrap_mod_attrs body attrs =\n  {body with pmod_attributes = attrs @ body.pmod_attributes}\nlet wrap_mty_attrs body attrs =\n  {body with pmty_attributes = attrs @ body.pmty_attributes}\n\nlet wrap_str_ext body ext =\n  match ext with\n  | None -> body\n  | Some id -> ghstr(Pstr_extension ((id, PStr [body]), []))\n\nlet mkstr_ext d ext =\n  wrap_str_ext (mkstr d) ext\n\nlet wrap_sig_ext body ext =\n  match ext with\n  | None -> body\n  | Some id -> ghsig(Psig_extension ((id, PSig [body]), []))\n\nlet mksig_ext d ext =\n  wrap_sig_ext (mksig d) ext\n\nlet text_str pos = Str.text (rhs_text pos)\nlet text_sig pos = Sig.text (rhs_text pos)\nlet text_cstr pos = Cf.text (rhs_text pos)\nlet text_csig pos = Ctf.text (rhs_text pos)\n\n\nlet extra_text text pos items =\n  let pre_extras = rhs_pre_extra_text pos in\n  let post_extras = rhs_post_extra_text pos in\n    text pre_extras @ items @ text post_extras\n\nlet extra_str pos items = extra_text Str.text pos items\nlet extra_sig pos items = extra_text Sig.text pos items\nlet extra_cstr pos items = extra_text Cf.text pos items\nlet extra_csig pos items = extra_text Ctf.text pos items\n\nlet extra_rhs_core_type ct ~pos =\n  let docs = rhs_info pos in\n  { ct with ptyp_attributes = add_info_attrs docs ct.ptyp_attributes }\n\ntype let_binding =\n  { lb_pattern: pattern;\n    lb_expression: expression;\n    lb_attributes: attributes;\n    lb_docs: docs Lazy.t;\n    lb_text: text Lazy.t;\n    lb_loc: Location.t; }\n\ntype [@warning \"-69\"] let_bindings =\n  { lbs_bindings: let_binding list;\n    lbs_rec: rec_flag;\n    lbs_extension: string Asttypes.loc option;\n    lbs_loc: Location.t }\n\nlet mklb first (p, e) attrs =\n  { lb_pattern = p;\n    lb_expression = e;\n    lb_attributes = attrs;\n    lb_docs = symbol_docs_lazy ();\n    lb_text = if first then empty_text_lazy\n              else symbol_text_lazy ();\n    lb_loc = symbol_rloc (); }\n\nlet mklbs ext rf lb =\n  { lbs_bindings = [lb];\n    lbs_rec = rf;\n    lbs_extension = ext ;\n    lbs_loc = symbol_rloc (); }\n\nlet addlb lbs lb =\n  { lbs with lbs_bindings = lb :: lbs.lbs_bindings }\n\nlet val_of_let_bindings lbs =\n  let bindings =\n    List.map\n      (fun lb ->\n         Vb.mk ~loc:lb.lb_loc ~attrs:lb.lb_attributes\n           ~docs:(Lazy.force lb.lb_docs)\n           ~text:(Lazy.force lb.lb_text)\n           lb.lb_pattern lb.lb_expression)\n      lbs.lbs_bindings\n  in\n  let str = mkstr(Pstr_value(lbs.lbs_rec, List.rev bindings)) in\n  match lbs.lbs_extension with\n  | None -> str\n  | Some id -> ghstr (Pstr_extension((id, PStr [str]), []))\n\nlet expr_of_let_bindings lbs body =\n  let bindings =\n    List.map\n      (fun lb ->\n         Vb.mk ~loc:lb.lb_loc ~attrs:lb.lb_attributes\n           lb.lb_pattern lb.lb_expression)\n      lbs.lbs_bindings\n  in\n    mkexp_attrs (Pexp_let(lbs.lbs_rec, List.rev bindings, body))\n      (lbs.lbs_extension, [])\n\n\n\n(* Alternatively, we could keep the generic module type in the Parsetree\n   and extract the package type during type-checking. In that case,\n   the assertions below should be turned into explicit checks. *)\nlet package_type_of_module_type pmty =\n  let err loc s =\n    raise (Syntaxerr.Error (Syntaxerr.Invalid_package_type (loc, s)))\n  in\n  let map_cstr = function\n    | Pwith_type (lid, ptyp) ->\n        let loc = ptyp.ptype_loc in\n        if ptyp.ptype_params <> [] then\n          err loc \"parametrized types are not supported\";\n        if ptyp.ptype_cstrs <> [] then\n          err loc \"constrained types are not supported\";\n        if ptyp.ptype_private <> Public then\n          err loc \"private types are not supported\";\n\n        (* restrictions below are checked by the 'with_constraint' rule *)\n        assert (ptyp.ptype_kind = Ptype_abstract);\n        assert (ptyp.ptype_attributes = []);\n        let ty =\n          match ptyp.ptype_manifest with\n          | Some ty -> ty\n          | None -> assert false\n        in\n        (lid, ty)\n    | _ ->\n        err pmty.pmty_loc \"only 'with type t =' constraints are supported\"\n  in\n  match pmty with\n  | {pmty_desc = Pmty_ident lid} -> (lid, [])\n  | {pmty_desc = Pmty_with({pmty_desc = Pmty_ident lid}, cstrs)} ->\n      (lid, List.map map_cstr cstrs)\n  | _ ->\n      err pmty.pmty_loc\n        \"only module type identifier and 'with type' constraints are supported\"\n\n\n%}\n\n/* Tokens */\n\n%token AMPERAMPER\n%token AMPERSAND\n%token AND\n%token AS\n%token ASSERT\n%token BACKQUOTE\n%token BANG\n%token BAR\n%token BARBAR\n%token BARRBRACKET\n%token BEGIN\n%token <char> CHAR\n%token CLASS\n%token COLON\n%token COLONCOLON\n%token COLONEQUAL\n%token COLONGREATER\n%token COMMA\n%token CONSTRAINT\n%token DO\n%token DONE\n%token DOT\n%token DOTDOT\n%token DOWNTO\n%token ELSE\n%token END\n%token EOF\n%token EQUAL\n%token EXCEPTION\n%token EXTERNAL\n%token FALSE\n%token <string * char option> FLOAT\n%token FOR\n%token FUN\n%token FUNCTION\n%token FUNCTOR\n%token GREATER\n%token GREATERRBRACE\n%token GREATERRBRACKET\n%token IF\n%token IN\n%token INCLUDE\n%token <string> INFIXOP0\n%token <string> INFIXOP1\n%token <string> INFIXOP2\n%token <string> INFIXOP3\n%token <string> INFIXOP4\n%token <string> DOTOP\n%token INHERIT\n%token INITIALIZER\n%token <string * char option> INT\n%token <string> LABEL\n%token LAZY\n%token LBRACE\n%token LBRACELESS\n%token LBRACKET\n%token LBRACKETBAR\n%token LBRACKETLESS\n%token LBRACKETGREATER\n%token LBRACKETPERCENT\n%token LBRACKETPERCENTPERCENT\n%token LESS\n%token LESSMINUS\n%token LET\n%token <string> LIDENT\n%token LPAREN\n%token LBRACKETAT\n%token LBRACKETATAT\n%token LBRACKETATATAT\n%token MATCH\n%token METHOD\n%token MINUS\n%token MINUSDOT\n%token MINUSGREATER\n%token MODULE\n%token MUTABLE\n%token NEW\n%token NONREC\n%token OBJECT\n%token OF\n%token OPEN\n%token <string> OPTLABEL\n%token OR\n/* %token PARSER */\n%token PERCENT\n%token PLUS\n%token PLUSDOT\n%token PLUSEQ\n%token <string> PREFIXOP\n%token PRIVATE\n%token QUESTION\n%token QUOTE\n%token RBRACE\n%token RBRACKET\n%token REC\n%token RPAREN\n%token SEMI\n%token SEMISEMI\n%token HASH\n%token <string> HASHOP\n%token SIG\n%token STAR\n%token <string * string option> STRING\n%token STRUCT\n%token THEN\n%token TILDE\n%token TO\n%token TRUE\n%token TRY\n%token TYPE\n%token <string> UIDENT\n%token UNDERSCORE\n%token VAL\n%token VIRTUAL\n%token WHEN\n%token WHILE\n%token WITH\n%token <string * Location.t> COMMENT\n%token <Docstrings.docstring> DOCSTRING\n\n%token EOL\n\n/* Precedences and associativities.\n\nTokens and rules have precedences.  A reduce/reduce conflict is resolved\nin favor of the first rule (in source file order).  A shift/reduce conflict\nis resolved by comparing the precedence and associativity of the token to\nbe shifted with those of the rule to be reduced.\n\nBy default, a rule has the precedence of its rightmost terminal (if any).\n\nWhen there is a shift/reduce conflict between a rule and a token that\nhave the same precedence, it is resolved using the associativity:\nif the token is left-associative, the parser will reduce; if\nright-associative, the parser will shift; if non-associative,\nthe parser will declare a syntax error.\n\nWe will only use associativities with operators of the kind  x * x -> x\nfor example, in the rules of the form    expr: expr BINOP expr\nin all other cases, we define two precedences if needed to resolve\nconflicts.\n\nThe precedences must be listed from low to high.\n*/\n\n%nonassoc IN\n%nonassoc below_SEMI\n%nonassoc SEMI                          /* below EQUAL ({lbl=...; lbl=...}) */\n%nonassoc LET                           /* above SEMI ( ...; let ... in ...) */\n%nonassoc below_WITH\n%nonassoc FUNCTION WITH                 /* below BAR  (match ... with ...) */\n%nonassoc AND             /* above WITH (module rec A: SIG with ... and ...) */\n%nonassoc THEN                          /* below ELSE (if ... then ...) */\n%nonassoc ELSE                          /* (if ... then ... else ...) */\n%nonassoc LESSMINUS                     /* below COLONEQUAL (lbl <- x := e) */\n%right    COLONEQUAL                    /* expr (e := e := e) */\n%nonassoc AS\n%left     BAR                           /* pattern (p|p|p) */\n%nonassoc below_COMMA\n%left     COMMA                         /* expr/expr_comma_list (e,e,e) */\n%right    MINUSGREATER                  /* core_type2 (t -> t -> t) */\n%right    OR BARBAR                     /* expr (e || e || e) */\n%right    AMPERSAND AMPERAMPER          /* expr (e && e && e) */\n%nonassoc below_EQUAL\n%left     INFIXOP0 EQUAL LESS GREATER   /* expr (e OP e OP e) */\n%right    INFIXOP1                      /* expr (e OP e OP e) */\n%nonassoc below_LBRACKETAT\n%nonassoc LBRACKETAT\n%nonassoc LBRACKETATAT\n%right    COLONCOLON                    /* expr (e :: e :: e) */\n%left     INFIXOP2 PLUS PLUSDOT MINUS MINUSDOT PLUSEQ /* expr (e OP e OP e) */\n%left     PERCENT INFIXOP3 STAR                 /* expr (e OP e OP e) */\n%right    INFIXOP4                      /* expr (e OP e OP e) */\n%nonassoc prec_unary_minus prec_unary_plus /* unary - */\n%nonassoc prec_constant_constructor     /* cf. simple_expr (C versus C x) */\n%nonassoc prec_constr_appl              /* above AS BAR COLONCOLON COMMA */\n%nonassoc below_HASH\n%nonassoc HASH                         /* simple_expr/toplevel_directive */\n%left     HASHOP\n%nonassoc below_DOT\n%nonassoc DOT DOTOP\n/* Finally, the first tokens of simple_expr are above everything else. */\n%nonassoc BACKQUOTE BANG BEGIN CHAR FALSE FLOAT INT\n          LBRACE LBRACELESS LBRACKET LBRACKETBAR LIDENT LPAREN\n          NEW PREFIXOP STRING TRUE UIDENT\n          LBRACKETPERCENT LBRACKETPERCENTPERCENT\n\n\n/* Entry points */\n\n%start implementation                   /* for implementation files */\n%type <Parsetree.structure> implementation\n%start interface                        /* for interface files */\n%type <Parsetree.signature> interface\n%start parse_core_type\n%type <Parsetree.core_type> parse_core_type\n%start parse_expression\n%type <Parsetree.expression> parse_expression\n%start parse_pattern\n%type <Parsetree.pattern> parse_pattern\n%%\n\n/* Entry points */\n\nimplementation:\n    structure EOF                        { extra_str 1 $1 }\n;\ninterface:\n    signature EOF                        { extra_sig 1 $1 }\n;\n\n\nparse_core_type:\n    core_type EOF { $1 }\n;\nparse_expression:\n    seq_expr EOF { $1 }\n;\nparse_pattern:\n    pattern EOF { $1 }\n;\n\n/* Module expressions */\n\nfunctor_arg:\n    LPAREN RPAREN\n      { mkrhs \"*\" 2, None }\n  | LPAREN functor_arg_name COLON module_type RPAREN\n      { mkrhs $2 2, Some $4 }\n;\n\nfunctor_arg_name:\n    UIDENT     { $1 }\n  | UNDERSCORE { \"_\" }\n;\n\nfunctor_args:\n    functor_args functor_arg\n      { $2 :: $1 }\n  | functor_arg\n      { [ $1 ] }\n;\n\nmodule_expr:\n    mod_longident\n      { mkmod(Pmod_ident (mkrhs $1 1)) }\n  | STRUCT attributes structure END\n      { mkmod ~attrs:$2 (Pmod_structure(extra_str 3 $3)) }\n  | STRUCT attributes structure error\n      { unclosed \"struct\" 1 \"end\" 4 }\n  | FUNCTOR attributes functor_args MINUSGREATER module_expr\n      { let modexp =\n          List.fold_left\n            (fun acc (n, t) -> mkmod(Pmod_functor(n, t, acc)))\n            $5 $3\n        in wrap_mod_attrs modexp $2 }\n  | module_expr paren_module_expr\n      { mkmod(Pmod_apply($1, $2)) }\n  | module_expr LPAREN RPAREN\n      { mkmod(Pmod_apply($1, mkmod (Pmod_structure []))) }\n  | paren_module_expr\n      { $1 }\n  | module_expr attribute\n      { Mod.attr $1 $2 }\n  | extension\n      { mkmod(Pmod_extension $1) }\n;\n\nparen_module_expr:\n    LPAREN module_expr COLON module_type RPAREN\n      { mkmod(Pmod_constraint($2, $4)) }\n  | LPAREN module_expr COLON module_type error\n      { unclosed \"(\" 1 \")\" 5 }\n  | LPAREN module_expr RPAREN\n      { $2 }\n  | LPAREN module_expr error\n      { unclosed \"(\" 1 \")\" 3 }\n  | LPAREN VAL attributes expr RPAREN\n      { mkmod ~attrs:$3 (Pmod_unpack $4)}\n  | LPAREN VAL attributes expr COLON package_type RPAREN\n      { mkmod ~attrs:$3\n          (Pmod_unpack(\n               ghexp(Pexp_constraint($4, ghtyp(Ptyp_package $6))))) }\n  | LPAREN VAL attributes expr COLON package_type COLONGREATER package_type\n    RPAREN\n      { mkmod ~attrs:$3\n          (Pmod_unpack(\n               ghexp(Pexp_coerce($4, Some(ghtyp(Ptyp_package $6)),\n                                 ghtyp(Ptyp_package $8))))) }\n  | LPAREN VAL attributes expr COLONGREATER package_type RPAREN\n      { mkmod ~attrs:$3\n          (Pmod_unpack(\n               ghexp(Pexp_coerce($4, None, ghtyp(Ptyp_package $6))))) }\n  | LPAREN VAL attributes expr COLON error\n      { unclosed \"(\" 1 \")\" 6 }\n  | LPAREN VAL attributes expr COLONGREATER error\n      { unclosed \"(\" 1 \")\" 6 }\n  | LPAREN VAL attributes expr error\n      { unclosed \"(\" 1 \")\" 5 }\n;\n\nstructure:\n    seq_expr post_item_attributes structure_tail\n      { mark_rhs_docs 1 2;\n        (text_str 1) @ mkstrexp $1 $2 :: $3 }\n  | structure_tail { $1 }\n;\nstructure_tail:\n    /* empty */          { [] }\n  | SEMISEMI structure   { (text_str 1) @ $2 }\n  | structure_item structure_tail { (text_str 1) @ $1 :: $2 }\n;\nstructure_item:\n    let_bindings\n      { val_of_let_bindings $1 }\n  | primitive_declaration\n      { let (body, ext) = $1 in mkstr_ext (Pstr_primitive body) ext }\n  | value_description\n      { let (body, ext) = $1 in mkstr_ext (Pstr_primitive body) ext }\n  | type_declarations\n      { let (nr, l, ext ) = $1 in mkstr_ext (Pstr_type (nr, List.rev l)) ext }\n  | str_type_extension\n      { let (l, ext) = $1 in mkstr_ext (Pstr_typext l) ext }\n  | str_exception_declaration\n      { let (l, ext) = $1 in mkstr_ext (Pstr_exception l) ext }\n  | module_binding\n      { let (body, ext) = $1 in mkstr_ext (Pstr_module body) ext }\n  | rec_module_bindings\n      { let (l, ext) = $1 in mkstr_ext (Pstr_recmodule(List.rev l)) ext }\n  | module_type_declaration\n      { let (body, ext) = $1 in mkstr_ext (Pstr_modtype body) ext }\n  | open_statement\n      { let (body, ext) = $1 in mkstr_ext (Pstr_open body) ext }\n  | class_type_declarations\n      { let (l, ext) = $1 in mkstr_ext (Pstr_class_type (List.rev l)) ext }\n  | str_include_statement\n      { let (body, ext) = $1 in mkstr_ext (Pstr_include body) ext }\n  | item_extension post_item_attributes\n      { mkstr(Pstr_extension ($1, (add_docs_attrs (symbol_docs ()) $2))) }\n  | floating_attribute\n      { mark_symbol_docs ();\n        mkstr(Pstr_attribute $1) }\n;\nstr_include_statement:\n    INCLUDE ext_attributes module_expr post_item_attributes\n      { let (ext, attrs) = $2 in\n        Incl.mk $3 ~attrs:(attrs@$4)\n            ~loc:(symbol_rloc()) ~docs:(symbol_docs ())\n      , ext }\n;\nmodule_binding_body:\n    EQUAL module_expr\n      { $2 }\n  | COLON module_type EQUAL module_expr\n      { mkmod(Pmod_constraint($4, $2)) }\n  | functor_arg module_binding_body\n      { mkmod(Pmod_functor(fst $1, snd $1, $2)) }\n;\nmodule_binding:\n    MODULE ext_attributes UIDENT module_binding_body post_item_attributes\n      { let (ext, attrs) = $2 in\n        Mb.mk (mkrhs $3 3) $4 ~attrs:(attrs@$5)\n            ~loc:(symbol_rloc ()) ~docs:(symbol_docs ())\n      , ext }\n;\nrec_module_bindings:\n    rec_module_binding                     { let (b, ext) = $1 in ([b], ext) }\n  | rec_module_bindings and_module_binding\n      { let (l, ext) = $1 in ($2 :: l, ext) }\n;\nrec_module_binding:\n    MODULE ext_attributes REC UIDENT module_binding_body post_item_attributes\n      { let (ext, attrs) = $2 in\n        Mb.mk (mkrhs $4 4) $5 ~attrs:(attrs@$6)\n            ~loc:(symbol_rloc ()) ~docs:(symbol_docs ())\n      , ext }\n;\nand_module_binding:\n    AND attributes UIDENT module_binding_body post_item_attributes\n      { Mb.mk (mkrhs $3 3) $4 ~attrs:($2@$5) ~loc:(symbol_rloc ())\n               ~text:(symbol_text ()) ~docs:(symbol_docs ()) }\n;\n\n/* Module types */\n\nmodule_type:\n    mty_longident\n      { mkmty(Pmty_ident (mkrhs $1 1)) }\n  | SIG attributes signature END\n      { mkmty ~attrs:$2 (Pmty_signature (extra_sig 3 $3)) }\n  | SIG attributes signature error\n      { unclosed \"sig\" 1 \"end\" 4 }\n  | FUNCTOR attributes functor_args MINUSGREATER module_type\n      %prec below_WITH\n      { let mty =\n          List.fold_left\n            (fun acc (n, t) -> mkmty(Pmty_functor(n, t, acc)))\n            $5 $3\n        in wrap_mty_attrs mty $2 }\n  | module_type MINUSGREATER module_type\n      %prec below_WITH\n      { mkmty(Pmty_functor(mknoloc \"_\", Some $1, $3)) }\n  | module_type WITH with_constraints\n      { mkmty(Pmty_with($1, List.rev $3)) }\n  | MODULE TYPE OF attributes module_expr %prec below_LBRACKETAT\n      { mkmty ~attrs:$4 (Pmty_typeof $5) }\n/*  | LPAREN MODULE mod_longident RPAREN\n      { mkmty (Pmty_alias (mkrhs $3 3)) } */\n  | LPAREN module_type RPAREN\n      { $2 }\n  | LPAREN module_type error\n      { unclosed \"(\" 1 \")\" 3 }\n  | extension\n      { mkmty(Pmty_extension $1) }\n  | module_type attribute\n      { Mty.attr $1 $2 }\n;\nsignature:\n    /* empty */          { [] }\n  | SEMISEMI signature   { (text_sig 1) @ $2 }\n  | signature_item signature { (text_sig 1) @ $1 :: $2 }\n;\nsignature_item:\n    value_description\n      { let (body, ext) = $1 in mksig_ext (Psig_value body) ext }\n  | primitive_declaration\n      { let (body, ext) = $1 in mksig_ext (Psig_value body) ext}\n  | type_declarations\n      { let (nr, l, ext) = $1 in mksig_ext (Psig_type (nr, List.rev l)) ext }\n  | sig_type_extension\n      { let (l, ext) = $1 in mksig_ext (Psig_typext l) ext }\n  | sig_exception_declaration\n      { let (l, ext) = $1 in mksig_ext (Psig_exception l) ext }\n  | module_declaration\n      { let (body, ext) = $1 in mksig_ext (Psig_module body) ext }\n  | module_alias\n      { let (body, ext) = $1 in mksig_ext (Psig_module body) ext }\n  | rec_module_declarations\n      { let (l, ext) = $1 in mksig_ext (Psig_recmodule (List.rev l)) ext }\n  | module_type_declaration\n      { let (body, ext) = $1 in mksig_ext (Psig_modtype body) ext }\n  | open_statement\n      { let (body, ext) = $1 in mksig_ext (Psig_open body) ext }\n  | sig_include_statement\n      { let (body, ext) = $1 in mksig_ext (Psig_include body) ext }\n  | class_type_declarations\n      { let (l, ext) = $1 in mksig_ext (Psig_class_type (List.rev l)) ext }\n  | item_extension post_item_attributes\n      { mksig(Psig_extension ($1, (add_docs_attrs (symbol_docs ()) $2))) }\n  | floating_attribute\n      { mark_symbol_docs ();\n        mksig(Psig_attribute $1) }\n;\nopen_statement:\n  | OPEN override_flag ext_attributes mod_longident post_item_attributes\n      { let (ext, attrs) = $3 in\n        Opn.mk (mkrhs $4 4) ~override:$2 ~attrs:(attrs@$5)\n          ~loc:(symbol_rloc()) ~docs:(symbol_docs ())\n      , ext}\n;\nsig_include_statement:\n    INCLUDE ext_attributes module_type post_item_attributes %prec below_WITH\n      { let (ext, attrs) = $2 in\n        Incl.mk $3 ~attrs:(attrs@$4)\n            ~loc:(symbol_rloc()) ~docs:(symbol_docs ())\n      , ext}\n;\nmodule_declaration_body:\n    COLON module_type\n      { $2 }\n  | LPAREN UIDENT COLON module_type RPAREN module_declaration_body\n      { mkmty(Pmty_functor(mkrhs $2 2, Some $4, $6)) }\n  | LPAREN RPAREN module_declaration_body\n      { mkmty(Pmty_functor(mkrhs \"*\" 1, None, $3)) }\n;\nmodule_declaration:\n    MODULE ext_attributes UIDENT module_declaration_body post_item_attributes\n      { let (ext, attrs) = $2 in\n        Md.mk (mkrhs $3 3) $4 ~attrs:(attrs@$5)\n          ~loc:(symbol_rloc()) ~docs:(symbol_docs ())\n      , ext }\n;\nmodule_alias:\n    MODULE ext_attributes UIDENT EQUAL mod_longident post_item_attributes\n      { let (ext, attrs) = $2 in\n        Md.mk (mkrhs $3 3)\n          (Mty.alias ~loc:(rhs_loc 5) (mkrhs $5 5)) ~attrs:(attrs@$6)\n             ~loc:(symbol_rloc()) ~docs:(symbol_docs ())\n      , ext }\n;\nrec_module_declarations:\n    rec_module_declaration\n      { let (body, ext) = $1 in ([body], ext) }\n  | rec_module_declarations and_module_declaration\n      { let (l, ext) = $1 in ($2 :: l, ext) }\n;\nrec_module_declaration:\n    MODULE ext_attributes REC UIDENT COLON module_type post_item_attributes\n      { let (ext, attrs) = $2 in\n        Md.mk (mkrhs $4 4) $6 ~attrs:(attrs@$7)\n            ~loc:(symbol_rloc()) ~docs:(symbol_docs ())\n      , ext}\n;\nand_module_declaration:\n    AND attributes UIDENT COLON module_type post_item_attributes\n      { Md.mk (mkrhs $3 3) $5 ~attrs:($2@$6) ~loc:(symbol_rloc())\n              ~text:(symbol_text()) ~docs:(symbol_docs()) }\n;\nmodule_type_declaration_body:\n    /* empty */               { None }\n  | EQUAL module_type         { Some $2 }\n;\nmodule_type_declaration:\n    MODULE TYPE ext_attributes ident module_type_declaration_body\n    post_item_attributes\n      { let (ext, attrs) = $3 in\n        Mtd.mk (mkrhs $4 4) ?typ:$5 ~attrs:(attrs@$6)\n          ~loc:(symbol_rloc()) ~docs:(symbol_docs ())\n      , ext }\n;\n/* Class expressions */\n\nclass_type_parameters:\n    /*empty*/                                   { [] }\n  | LBRACKET type_parameter_list RBRACKET       { List.rev $2 }\n;\nclass_structure:\n  |  class_self_pattern class_fields\n       { Cstr.mk $1 (extra_cstr 2 (List.rev $2)) }\n;\nclass_self_pattern:\n    LPAREN pattern RPAREN\n      { reloc_pat $2 }\n  | LPAREN pattern COLON core_type RPAREN\n      { mkpat(Ppat_constraint($2, $4)) }\n  | /* empty */\n      { ghpat(Ppat_any) }\n;\nclass_fields:\n    /* empty */\n      { [] }\n  | class_fields class_field\n      { $2 :: (text_cstr 2) @ $1 }\n;\nclass_field:\n  | VAL value post_item_attributes\n      { let v, attrs = $2 in\n        mkcf (Pcf_val v) ~attrs:(attrs@$3) ~docs:(symbol_docs ()) }\n  | METHOD method_ post_item_attributes\n      { let meth, attrs = $2 in\n        mkcf (Pcf_method meth) ~attrs:(attrs@$3) ~docs:(symbol_docs ()) }\n  | CONSTRAINT attributes constrain_field post_item_attributes\n      { mkcf (Pcf_constraint $3) ~attrs:($2@$4) ~docs:(symbol_docs ()) }\n  | INITIALIZER attributes seq_expr post_item_attributes\n      { mkcf (Pcf_initializer $3) ~attrs:($2@$4) ~docs:(symbol_docs ()) }\n  | item_extension post_item_attributes\n      { mkcf (Pcf_extension $1) ~attrs:$2 ~docs:(symbol_docs ()) }\n  | floating_attribute\n      { mark_symbol_docs ();\n        mkcf (Pcf_attribute $1) }\n;\nvalue:\n/* TODO: factorize these rules (also with method): */\n    override_flag attributes MUTABLE VIRTUAL label COLON core_type\n      { if $1 = Override then syntax_error ();\n        (mkloc $5 (rhs_loc 5), Mutable, Cfk_virtual $7), $2 }\n  | override_flag attributes VIRTUAL mutable_flag label COLON core_type\n      { if $1 = Override then syntax_error ();\n        (mkrhs $5 5, $4, Cfk_virtual $7), $2 }\n  | override_flag attributes mutable_flag label EQUAL seq_expr\n      { (mkrhs $4 4, $3, Cfk_concrete ($1, $6)), $2 }\n  | override_flag attributes mutable_flag label type_constraint EQUAL seq_expr\n      {\n       let e = mkexp_constraint $7 $5 in\n       (mkrhs $4 4, $3, Cfk_concrete ($1, e)), $2\n      }\n;\nmethod_:\n/* TODO: factorize those rules... */\n    override_flag attributes PRIVATE VIRTUAL label COLON poly_type\n      { if $1 = Override then syntax_error ();\n        (mkloc $5 (rhs_loc 5), Private, Cfk_virtual $7), $2 }\n  | override_flag attributes VIRTUAL private_flag label COLON poly_type\n      { if $1 = Override then syntax_error ();\n        (mkloc $5 (rhs_loc 5), $4, Cfk_virtual $7), $2 }\n  | override_flag attributes private_flag label strict_binding\n      { (mkloc $4 (rhs_loc 4), $3,\n        Cfk_concrete ($1, ghexp(Pexp_poly ($5, None)))), $2 }\n  | override_flag attributes private_flag label COLON poly_type EQUAL seq_expr\n      { (mkloc $4 (rhs_loc 4), $3,\n        Cfk_concrete ($1, ghexp(Pexp_poly($8, Some $6)))), $2 }\n  | override_flag attributes private_flag label COLON TYPE lident_list\n    DOT core_type EQUAL seq_expr\n      { let exp, poly = wrap_type_annotation $7 $9 $11 in\n        (mkloc $4 (rhs_loc 4), $3,\n        Cfk_concrete ($1, ghexp(Pexp_poly(exp, Some poly)))), $2 }\n;\n\n/* Class types */\n\nclass_signature:\n    LBRACKET core_type_comma_list RBRACKET clty_longident\n      { mkcty(Pcty_constr (mkloc $4 (rhs_loc 4), List.rev $2)) }\n  | clty_longident\n      { mkcty(Pcty_constr (mkrhs $1 1, [])) }\n  | OBJECT attributes class_sig_body END\n      { mkcty ~attrs:$2 (Pcty_signature $3) }\n  | OBJECT attributes class_sig_body error\n      { unclosed \"object\" 1 \"end\" 4 }\n  | class_signature attribute\n      { Cty.attr $1 $2 }\n  | extension\n      { mkcty(Pcty_extension $1) }\n  | LET OPEN override_flag attributes mod_longident IN class_signature\n      { wrap_class_type_attrs (mkcty(Pcty_open($3, mkrhs $5 5, $7))) $4 }\n;\nclass_sig_body:\n    class_self_type class_sig_fields\n      { Csig.mk $1 (extra_csig 2 (List.rev $2)) }\n;\nclass_self_type:\n    LPAREN core_type RPAREN\n      { $2 }\n  | /* empty */\n      { mktyp(Ptyp_any) }\n;\nclass_sig_fields:\n    /* empty */                                 { [] }\n| class_sig_fields class_sig_field     { $2 :: (text_csig 2) @ $1 }\n;\nclass_sig_field:\n    INHERIT attributes class_signature post_item_attributes\n      { mkctf (Pctf_inherit $3) ~attrs:($2@$4) ~docs:(symbol_docs ()) }\n  | VAL attributes value_type post_item_attributes\n      { mkctf (Pctf_val $3) ~attrs:($2@$4) ~docs:(symbol_docs ()) }\n  | METHOD attributes private_virtual_flags label COLON poly_type\n    post_item_attributes\n      {\n       let (p, v) = $3 in\n       mkctf (Pctf_method (mkrhs $4 4, p, v, $6)) ~attrs:($2@$7) ~docs:(symbol_docs ())\n      }\n  | CONSTRAINT attributes constrain_field post_item_attributes\n      { mkctf (Pctf_constraint $3) ~attrs:($2@$4) ~docs:(symbol_docs ()) }\n  | item_extension post_item_attributes\n      { mkctf (Pctf_extension $1) ~attrs:$2 ~docs:(symbol_docs ()) }\n  | floating_attribute\n      { mark_symbol_docs ();\n        mkctf(Pctf_attribute $1) }\n;\nvalue_type:\n    VIRTUAL mutable_flag label COLON core_type\n      { mkrhs $3 3, $2, Virtual, $5 }\n  | MUTABLE virtual_flag label COLON core_type\n      { mkrhs $3 3, Mutable, $2, $5 }\n  | label COLON core_type\n      { mkrhs $1 1, Immutable, Concrete, $3 }\n;\nconstrain:\n        core_type EQUAL core_type          { $1, $3, symbol_rloc() }\n;\nconstrain_field:\n        core_type EQUAL core_type          { $1, $3 }\n;\nclass_type_declarations:\n    class_type_declaration\n      { let (body, ext) = $1 in ([body],ext) }\n  | class_type_declarations and_class_type_declaration\n      { let (l, ext) = $1 in ($2 :: l, ext) }\n;\nclass_type_declaration:\n    CLASS TYPE ext_attributes virtual_flag class_type_parameters LIDENT EQUAL\n    class_signature post_item_attributes\n      { let (ext, attrs) = $3 in\n        Ci.mk (mkrhs $6 6) $8 ~virt:$4 ~params:$5 ~attrs:(attrs@$9)\n            ~loc:(symbol_rloc ()) ~docs:(symbol_docs ())\n      , ext}\n;\nand_class_type_declaration:\n    AND attributes virtual_flag class_type_parameters LIDENT EQUAL\n    class_signature post_item_attributes\n      { Ci.mk (mkrhs $5 5) $7 ~virt:$3 ~params:$4\n         ~attrs:($2@$8) ~loc:(symbol_rloc ())\n         ~text:(symbol_text ()) ~docs:(symbol_docs ()) }\n;\n\n/* Core expressions */\n\nseq_expr:\n  | expr        %prec below_SEMI  { $1 }\n  | expr SEMI                     { $1 }\n  | expr SEMI seq_expr            { mkexp(Pexp_sequence($1, $3)) }\n  | expr SEMI PERCENT attr_id seq_expr\n      { let seq = mkexp(Pexp_sequence ($1, $5)) in\n        let payload = PStr [mkstrexp seq []] in\n        mkexp (Pexp_extension ($4, payload)) }\n;\nlabeled_simple_pattern:\n    QUESTION LPAREN label_let_pattern opt_default RPAREN\n      { (Optional (fst $3), $4, snd $3) }\n  | QUESTION label_var\n      { (Optional (fst $2), None, snd $2) }\n  | OPTLABEL LPAREN let_pattern opt_default RPAREN\n      { (Optional $1, $4, $3) }\n  | OPTLABEL pattern_var\n      { (Optional $1, None, $2) }\n  | TILDE LPAREN label_let_pattern RPAREN\n      { (Labelled (fst $3), None, snd $3) }\n  | TILDE label_var\n      { (Labelled (fst $2), None, snd $2) }\n  | LABEL simple_pattern\n      { (Labelled $1, None, $2) }\n  | simple_pattern\n      { (Nolabel, None, $1) }\n;\npattern_var:\n    LIDENT            { mkpat(Ppat_var (mkrhs $1 1)) }\n  | UNDERSCORE        { mkpat Ppat_any }\n;\nopt_default:\n    /* empty */                         { None }\n  | EQUAL seq_expr                      { Some $2 }\n;\nlabel_let_pattern:\n    label_var\n      { $1 }\n  | label_var COLON core_type\n      { let (lab, pat) = $1 in (lab, mkpat(Ppat_constraint(pat, $3))) }\n;\nlabel_var:\n    LIDENT    { ($1, mkpat(Ppat_var (mkrhs $1 1))) }\n;\nlet_pattern:\n    pattern\n      { $1 }\n  | pattern COLON core_type\n      { mkpat(Ppat_constraint($1, $3)) }\n;\nexpr:\n    simple_expr %prec below_HASH\n      { $1 }\n  | simple_expr simple_labeled_expr_list\n      { mkexp(Pexp_apply($1, List.rev $2)) }\n  | let_bindings IN seq_expr\n      { expr_of_let_bindings $1 $3 }\n  | LET MODULE ext_attributes UIDENT module_binding_body IN seq_expr\n      { mkexp_attrs (Pexp_letmodule(mkrhs $4 4, $5, $7)) $3 }\n  | LET EXCEPTION ext_attributes let_exception_declaration IN seq_expr\n      { mkexp_attrs (Pexp_letexception($4, $6)) $3 }\n  | LET OPEN override_flag ext_attributes mod_longident IN seq_expr\n      { mkexp_attrs (Pexp_open($3, mkrhs $5 5, $7)) $4 }\n  | FUNCTION ext_attributes opt_bar match_cases\n      { mkexp_attrs (Pexp_function(List.rev $4)) $2 }\n  | FUN ext_attributes labeled_simple_pattern fun_def\n      { let (l,o,p) = $3 in\n        mkexp_attrs (Pexp_fun(l, o, p, $4)) $2 }\n  | FUN ext_attributes LPAREN TYPE lident_list RPAREN fun_def\n      { mkexp_attrs (mk_newtypes $5 $7).pexp_desc $2 }\n  | MATCH ext_attributes seq_expr WITH opt_bar match_cases\n      { mkexp_attrs (Pexp_match($3, List.rev $6)) $2 }\n  | TRY ext_attributes seq_expr WITH opt_bar match_cases\n      { mkexp_attrs (Pexp_try($3, List.rev $6)) $2 }\n  | TRY ext_attributes seq_expr WITH error\n      { syntax_error() }\n  | expr_comma_list %prec below_COMMA\n      { mkexp(Pexp_tuple(List.rev $1)) }\n  | constr_longident simple_expr %prec below_HASH\n      { mkexp(Pexp_construct(mkrhs $1 1, Some $2)) }\n  | name_tag simple_expr %prec below_HASH\n      { mkexp(Pexp_variant($1, Some $2)) }\n  | IF ext_attributes seq_expr THEN expr ELSE expr\n      { mkexp_attrs(Pexp_ifthenelse($3, $5, Some $7)) $2 }\n  | IF ext_attributes seq_expr THEN expr\n      { mkexp_attrs (Pexp_ifthenelse($3, $5, None)) $2 }\n  | WHILE ext_attributes seq_expr DO seq_expr DONE\n      { mkexp_attrs (Pexp_while($3, $5)) $2 }\n  | FOR ext_attributes pattern EQUAL seq_expr direction_flag seq_expr DO\n    seq_expr DONE\n      { mkexp_attrs(Pexp_for($3, $5, $7, $6, $9)) $2 }\n  | expr COLONCOLON expr\n      { mkexp_cons (rhs_loc 2) (ghexp(Pexp_tuple[$1;$3])) (symbol_rloc()) }\n  | expr INFIXOP0 expr\n      { mkinfix $1 $2 $3 }\n  | expr INFIXOP1 expr\n      { mkinfix $1 $2 $3 }\n  | expr INFIXOP2 expr\n      { mkinfix $1 $2 $3 }\n  | expr INFIXOP3 expr\n      { mkinfix $1 $2 $3 }\n  | expr INFIXOP4 expr\n      { mkinfix $1 $2 $3 }\n  | expr PLUS expr\n      { mkinfix $1 \"+\" $3 }\n  | expr PLUSDOT expr\n      { mkinfix $1 \"+.\" $3 }\n  | expr PLUSEQ expr\n      { mkinfix $1 \"+=\" $3 }\n  | expr MINUS expr\n      { mkinfix $1 \"-\" $3 }\n  | expr MINUSDOT expr\n      { mkinfix $1 \"-.\" $3 }\n  | expr STAR expr\n      { mkinfix $1 \"*\" $3 }\n  | expr PERCENT expr\n      { mkinfix $1 \"%\" $3 }\n  | expr EQUAL expr\n      { mkinfix $1 \"=\" $3 }\n  | expr LESS expr\n    { mkinfix $1 \"<\" $3 }\n  | expr GREATER expr\n      { mkinfix $1 \">\" $3 }\n  | expr OR expr\n      { mkinfix $1 \"or\" $3 }\n  | expr BARBAR expr\n      { mkinfix $1 \"||\" $3 }\n  | expr AMPERSAND expr\n      { mkinfix $1 \"&\" $3 }\n  | expr AMPERAMPER expr\n      { mkinfix $1 \"&&\" $3 }\n  | expr COLONEQUAL expr\n      { mkinfix $1 \":=\" $3 }\n  | subtractive expr %prec prec_unary_minus\n      { mkuminus $1 $2 }\n  | additive expr %prec prec_unary_plus\n      { mkuplus $1 $2 }\n  | simple_expr DOT label_longident LESSMINUS expr\n      { mkexp(Pexp_setfield($1, mkrhs $3 3, $5)) }\n  | simple_expr DOT LPAREN seq_expr RPAREN LESSMINUS expr\n      { mkexp(Pexp_apply(ghexp(Pexp_ident(array_function \"Array\" \"set\")),\n                         [Nolabel,$1; Nolabel,$4; Nolabel,$7])) }\n  | simple_expr DOT LBRACKET seq_expr RBRACKET LESSMINUS expr\n      { mkexp(Pexp_apply(ghexp(Pexp_ident(array_function \"String\" \"set\")),\n                         [Nolabel,$1; Nolabel,$4; Nolabel,$7])) }\n  | simple_expr DOTOP LBRACKET expr RBRACKET LESSMINUS expr\n      { let id = mkexp @@ Pexp_ident( ghloc @@ Lident (\".\" ^ $2 ^ \"[]<-\")) in\n        mkexp @@ Pexp_apply(id , [Nolabel, $1; Nolabel, $4; Nolabel, $7]) }\n  | simple_expr DOTOP LPAREN expr RPAREN LESSMINUS expr\n      { let id = mkexp @@ Pexp_ident( ghloc @@ Lident (\".\" ^ $2 ^ \"()<-\")) in\n        mkexp @@ Pexp_apply(id , [Nolabel, $1; Nolabel, $4; Nolabel, $7]) }\n  | simple_expr DOTOP LBRACE expr RBRACE LESSMINUS expr\n      { let id = mkexp @@ Pexp_ident( ghloc @@ Lident (\".\" ^ $2 ^ \"{}<-\")) in\n        mkexp @@ Pexp_apply(id , [Nolabel, $1; Nolabel, $4; Nolabel, $7]) }\n  | simple_expr DOT mod_longident DOTOP LBRACKET expr RBRACKET LESSMINUS expr\n      { let id = mkexp @@ Pexp_ident( ghloc @@ Ldot($3,\".\" ^ $4 ^ \"[]<-\")) in\n        mkexp @@ Pexp_apply(id , [Nolabel, $1; Nolabel, $6; Nolabel, $9]) }\n  | simple_expr DOT mod_longident DOTOP LPAREN expr RPAREN LESSMINUS expr\n      { let id = mkexp @@ Pexp_ident( ghloc @@ Ldot($3, \".\" ^ $4 ^ \"()<-\")) in\n        mkexp @@ Pexp_apply(id , [Nolabel, $1; Nolabel, $6; Nolabel, $9]) }\n  | simple_expr DOT mod_longident DOTOP LBRACE expr RBRACE LESSMINUS expr\n      { let id = mkexp @@ Pexp_ident( ghloc @@ Ldot($3, \".\" ^ $4 ^ \"{}<-\")) in\n        mkexp @@ Pexp_apply(id , [Nolabel, $1; Nolabel, $6; Nolabel, $9]) }\n  | label LESSMINUS expr\n      { mkexp(Pexp_setinstvar(mkrhs $1 1, $3)) }\n  | ASSERT ext_attributes simple_expr %prec below_HASH\n      { mkexp_attrs (Pexp_assert $3) $2 }\n  | LAZY ext_attributes simple_expr %prec below_HASH\n      { mkexp_attrs (Pexp_lazy $3) $2 }\n  | OBJECT ext_attributes class_structure END\n      { mkexp_attrs (Pexp_object $3) $2 }\n  | OBJECT ext_attributes class_structure error\n      { unclosed \"object\" 1 \"end\" 4 }\n  | expr attribute\n      { Exp.attr $1 $2 }\n  | UNDERSCORE\n     { not_expecting 1 \"wildcard \\\"_\\\"\" }\n;\nsimple_expr:\n    val_longident\n      { mkexp(Pexp_ident (mkrhs $1 1)) }\n  | constant\n      { mkexp(Pexp_constant $1) }\n  | constr_longident %prec prec_constant_constructor\n      { mkexp(Pexp_construct(mkrhs $1 1, None)) }\n  | name_tag %prec prec_constant_constructor\n      { mkexp(Pexp_variant($1, None)) }\n  | LPAREN seq_expr RPAREN\n      { reloc_exp $2 }\n  | LPAREN seq_expr error\n      { unclosed \"(\" 1 \")\" 3 }\n  | BEGIN ext_attributes seq_expr END\n      { wrap_exp_attrs (reloc_exp $3) $2 (* check location *) }\n  | BEGIN ext_attributes END\n      { mkexp_attrs (Pexp_construct (mkloc (Lident \"()\") (symbol_rloc ()),\n                               None)) $2 }\n  | BEGIN ext_attributes seq_expr error\n      { unclosed \"begin\" 1 \"end\" 4 }\n  | LPAREN seq_expr type_constraint RPAREN\n      { mkexp_constraint $2 $3 }\n  | simple_expr DOT label_longident\n      { mkexp(Pexp_field($1, mkrhs $3 3)) }\n  | mod_longident DOT LPAREN seq_expr RPAREN\n      { mkexp(Pexp_open(Fresh, mkrhs $1 1, $4)) }\n  | mod_longident DOT LPAREN RPAREN\n      { mkexp(Pexp_open(Fresh, mkrhs $1 1,\n                        mkexp(Pexp_construct(mkrhs (Lident \"()\") 1, None)))) }\n  | mod_longident DOT LPAREN seq_expr error\n      { unclosed \"(\" 3 \")\" 5 }\n  | simple_expr DOT LPAREN seq_expr RPAREN\n      { mkexp(Pexp_apply(ghexp(Pexp_ident(array_function \"Array\" \"get\")),\n                         [Nolabel,$1; Nolabel,$4])) }\n  | simple_expr DOT LPAREN seq_expr error\n      { unclosed \"(\" 3 \")\" 5 }\n  | simple_expr DOT LBRACKET seq_expr RBRACKET\n      { mkexp(Pexp_apply(ghexp(Pexp_ident(array_function \"String\" \"get\")),\n                         [Nolabel,$1; Nolabel,$4])) }\n  | simple_expr DOT LBRACKET seq_expr error\n      { unclosed \"[\" 3 \"]\" 5 }\n  | simple_expr DOTOP LBRACKET expr RBRACKET\n      { let id = mkexp @@ Pexp_ident( ghloc @@ Lident (\".\" ^ $2 ^ \"[]\")) in\n        mkexp @@ Pexp_apply(id, [Nolabel, $1; Nolabel, $4]) }\n  | simple_expr DOTOP LBRACKET expr error\n      { unclosed \"[\" 3 \"]\" 5 }\n  | simple_expr DOTOP LPAREN expr RPAREN\n      { let id = mkexp @@ Pexp_ident( ghloc @@ Lident (\".\" ^ $2 ^ \"()\")) in\n        mkexp @@ Pexp_apply(id, [Nolabel, $1; Nolabel, $4]) }\n  | simple_expr DOTOP LPAREN expr error\n      { unclosed \"(\" 3 \")\" 5 }\n  | simple_expr DOTOP LBRACE expr RBRACE\n      { let id = mkexp @@ Pexp_ident( ghloc @@ Lident (\".\" ^ $2 ^ \"{}\")) in\n        mkexp @@ Pexp_apply(id, [Nolabel, $1; Nolabel, $4]) }\n  | simple_expr DOTOP LBRACE expr error\n      { unclosed \"{\" 3 \"}\" 5 }\n  | simple_expr DOT mod_longident DOTOP LBRACKET expr RBRACKET\n      { let id = mkexp @@ Pexp_ident( ghloc @@ Ldot($3, \".\" ^ $4 ^ \"[]\")) in\n        mkexp @@ Pexp_apply(id, [Nolabel, $1; Nolabel, $6]) }\n  | simple_expr DOT mod_longident DOTOP LBRACKET expr error\n      { unclosed \"[\" 5 \"]\" 7 }\n  | simple_expr DOT mod_longident DOTOP LPAREN expr RPAREN\n      { let id = mkexp @@ Pexp_ident( ghloc @@ Ldot($3, \".\" ^ $4 ^ \"()\")) in\n        mkexp @@ Pexp_apply(id, [Nolabel, $1; Nolabel, $6]) }\n  | simple_expr DOT mod_longident DOTOP LPAREN expr error\n      { unclosed \"(\" 5 \")\" 7 }\n  | simple_expr DOT mod_longident DOTOP LBRACE expr RBRACE\n      { let id = mkexp @@ Pexp_ident( ghloc @@ Ldot($3, \".\" ^ $4 ^ \"{}\")) in\n        mkexp @@ Pexp_apply(id, [Nolabel, $1; Nolabel, $6]) }\n  | simple_expr DOT mod_longident DOTOP LBRACE expr error\n      { unclosed \"{\" 5 \"}\" 7 }\n  | simple_expr DOT LBRACE expr_comma_list error\n      { unclosed \"{\" 3 \"}\" 5 }\n  | LBRACE record_expr RBRACE\n      { let (exten, fields) = $2 in mkexp (Pexp_record(fields, exten)) }\n  | LBRACE record_expr error\n      { unclosed \"{\" 1 \"}\" 3 }\n  | mod_longident DOT LBRACE record_expr RBRACE\n      { let (exten, fields) = $4 in\n        let rec_exp = mkexp(Pexp_record(fields, exten)) in\n        mkexp(Pexp_open(Fresh, mkrhs $1 1, rec_exp)) }\n  | mod_longident DOT LBRACE record_expr error\n      { unclosed \"{\" 3 \"}\" 5 }\n  | LBRACKETBAR expr_semi_list opt_semi BARRBRACKET\n      { mkexp (Pexp_array(List.rev $2)) }\n  | LBRACKETBAR expr_semi_list opt_semi error\n      { unclosed \"[|\" 1 \"|]\" 4 }\n  | LBRACKETBAR BARRBRACKET\n      { mkexp (Pexp_array []) }\n  | mod_longident DOT LBRACKETBAR expr_semi_list opt_semi BARRBRACKET\n      { mkexp(Pexp_open(Fresh, mkrhs $1 1, mkexp(Pexp_array(List.rev $4)))) }\n  | mod_longident DOT LBRACKETBAR BARRBRACKET\n      { mkexp(Pexp_open(Fresh, mkrhs $1 1, mkexp(Pexp_array []))) }\n  | mod_longident DOT LBRACKETBAR expr_semi_list opt_semi error\n      { unclosed \"[|\" 3 \"|]\" 6 }\n  | LBRACKET expr_semi_list opt_semi RBRACKET\n      { reloc_exp (mktailexp (rhs_loc 4) (List.rev $2)) }\n  | LBRACKET expr_semi_list opt_semi error\n      { unclosed \"[\" 1 \"]\" 4 }\n  | mod_longident DOT LBRACKET expr_semi_list opt_semi RBRACKET\n      { let list_exp = reloc_exp (mktailexp (rhs_loc 6) (List.rev $4)) in\n        mkexp(Pexp_open(Fresh, mkrhs $1 1, list_exp)) }\n  | mod_longident DOT LBRACKET RBRACKET\n      { mkexp(Pexp_open(Fresh, mkrhs $1 1,\n                        mkexp(Pexp_construct(mkrhs (Lident \"[]\") 1, None)))) }\n  | mod_longident DOT LBRACKET expr_semi_list opt_semi error\n      { unclosed \"[\" 3 \"]\" 6 }\n  | PREFIXOP simple_expr\n      { mkexp(Pexp_apply(mkoperator $1 1, [Nolabel,$2])) }\n  | BANG simple_expr\n      { mkexp(Pexp_apply(mkoperator \"!\" 1, [Nolabel,$2])) }\n  | LBRACELESS field_expr_list GREATERRBRACE\n      { mkexp (Pexp_override $2) }\n  | LBRACELESS field_expr_list error\n      { unclosed \"{<\" 1 \">}\" 3 }\n  | LBRACELESS GREATERRBRACE\n      { mkexp (Pexp_override [])}\n  | mod_longident DOT LBRACELESS field_expr_list GREATERRBRACE\n      { mkexp(Pexp_open(Fresh, mkrhs $1 1, mkexp (Pexp_override $4)))}\n  | mod_longident DOT LBRACELESS GREATERRBRACE\n      { mkexp(Pexp_open(Fresh, mkrhs $1 1, mkexp (Pexp_override [])))}\n  | mod_longident DOT LBRACELESS field_expr_list error\n      { unclosed \"{<\" 3 \">}\" 5 }\n  | simple_expr HASH label\n      { mkexp(Pexp_send($1, mkrhs $3 3)) }\n  | simple_expr HASHOP simple_expr\n      { mkinfix $1 $2 $3 }\n  | LPAREN MODULE ext_attributes module_expr RPAREN\n      { mkexp_attrs (Pexp_pack $4) $3 }\n  | LPAREN MODULE ext_attributes module_expr COLON package_type RPAREN\n      { mkexp_attrs (Pexp_constraint (ghexp (Pexp_pack $4),\n                                      ghtyp (Ptyp_package $6)))\n                    $3 }\n  | LPAREN MODULE ext_attributes module_expr COLON error\n      { unclosed \"(\" 1 \")\" 6 }\n  | mod_longident DOT LPAREN MODULE ext_attributes module_expr COLON\n    package_type RPAREN\n      { mkexp(Pexp_open(Fresh, mkrhs $1 1,\n        mkexp_attrs (Pexp_constraint (ghexp (Pexp_pack $6),\n                                ghtyp (Ptyp_package $8)))\n                    $5 )) }\n  | mod_longident DOT LPAREN MODULE ext_attributes module_expr COLON error\n      { unclosed \"(\" 3 \")\" 8 }\n  | extension\n      { mkexp (Pexp_extension $1) }\n;\nsimple_labeled_expr_list:\n    labeled_simple_expr\n      { [$1] }\n  | simple_labeled_expr_list labeled_simple_expr\n      { $2 :: $1 }\n;\nlabeled_simple_expr:\n    simple_expr %prec below_HASH\n      { (Nolabel, $1) }\n  | label_expr\n      { $1 }\n;\nlabel_expr:\n    LABEL simple_expr %prec below_HASH\n      { (Labelled $1, $2) }\n  | TILDE label_ident\n      { (Labelled (fst $2), snd $2) }\n  | QUESTION label_ident\n      { (Optional (fst $2), snd $2) }\n  | OPTLABEL simple_expr %prec below_HASH\n      { (Optional $1, $2) }\n;\nlabel_ident:\n    LIDENT   { ($1, mkexp(Pexp_ident(mkrhs (Lident $1) 1))) }\n;\nlident_list:\n    LIDENT                            { [mkrhs $1 1] }\n  | LIDENT lident_list                { mkrhs $1 1 :: $2 }\n;\nlet_binding_body:\n    val_ident strict_binding\n      { (mkpatvar $1 1, $2) }\n  | val_ident type_constraint EQUAL seq_expr\n      { let v = mkpatvar $1 1 in (* PR#7344 *)\n        let t =\n          match $2 with\n            Some t, None -> t\n          | _, Some t -> t\n          | _ -> assert false\n        in\n        (ghpat(Ppat_constraint(v, ghtyp(Ptyp_poly([],t)))),\n         mkexp_constraint $4 $2) }\n  | val_ident COLON typevar_list DOT core_type EQUAL seq_expr\n      { (ghpat(Ppat_constraint(mkpatvar $1 1,\n                               ghtyp(Ptyp_poly(List.rev $3,$5)))),\n         $7) }\n  | val_ident COLON TYPE lident_list DOT core_type EQUAL seq_expr\n      { let exp, poly = wrap_type_annotation $4 $6 $8 in\n        (ghpat(Ppat_constraint(mkpatvar $1 1, poly)), exp) }\n  | pattern_no_exn EQUAL seq_expr\n      { ($1, $3) }\n  | simple_pattern_not_ident COLON core_type EQUAL seq_expr\n      { (ghpat(Ppat_constraint($1, $3)), $5) }\n;\nlet_bindings:\n    let_binding                                 { $1 }\n  | let_bindings and_let_binding                { addlb $1 $2 }\n;\nlet_binding:\n    LET ext_attributes rec_flag let_binding_body post_item_attributes\n      { let (ext, attr) = $2 in\n        mklbs ext $3 (mklb true $4 (attr@$5)) }\n;\nand_let_binding:\n    AND attributes let_binding_body post_item_attributes\n      { mklb false $3 ($2@$4) }\n;\nfun_binding:\n    strict_binding\n      { $1 }\n  | type_constraint EQUAL seq_expr\n      { mkexp_constraint $3 $1 }\n;\nstrict_binding:\n    EQUAL seq_expr\n      { $2 }\n  | labeled_simple_pattern fun_binding\n      { let (l, o, p) = $1 in ghexp(Pexp_fun(l, o, p, $2)) }\n  | LPAREN TYPE lident_list RPAREN fun_binding\n      { mk_newtypes $3 $5 }\n;\nmatch_cases:\n    match_case { [$1] }\n  | match_cases BAR match_case { $3 :: $1 }\n;\nmatch_case:\n    pattern MINUSGREATER seq_expr\n      { Exp.case $1 $3 }\n  | pattern WHEN seq_expr MINUSGREATER seq_expr\n      { Exp.case $1 ~guard:$3 $5 }\n  | pattern MINUSGREATER DOT\n      { Exp.case $1 (Exp.unreachable ~loc:(rhs_loc 3) ())}\n;\nfun_def:\n    MINUSGREATER seq_expr\n      { $2 }\n  | COLON simple_core_type MINUSGREATER seq_expr\n      { mkexp (Pexp_constraint ($4, $2)) }\n/* Cf #5939: we used to accept (fun p when e0 -> e) */\n  | labeled_simple_pattern fun_def\n      {\n       let (l,o,p) = $1 in\n       ghexp(Pexp_fun(l, o, p, $2))\n      }\n  | LPAREN TYPE lident_list RPAREN fun_def\n      { mk_newtypes $3 $5 }\n;\nexpr_comma_list:\n    expr_comma_list COMMA expr                  { $3 :: $1 }\n  | expr COMMA expr                             { [$3; $1] }\n;\nrecord_expr:\n    simple_expr WITH lbl_expr_list              { (Some $1, $3) }\n  | lbl_expr_list                               { (None, $1) }\n;\nlbl_expr_list:\n     lbl_expr { [$1] }\n  |  lbl_expr SEMI lbl_expr_list { $1 :: $3 }\n  |  lbl_expr SEMI { [$1] }\n;\nlbl_expr:\n    label_longident opt_type_constraint EQUAL expr\n      { (mkrhs $1 1, mkexp_opt_constraint $4 $2) }\n  | label_longident opt_type_constraint\n      { (mkrhs $1 1, mkexp_opt_constraint (exp_of_label $1 1) $2) }\n;\nfield_expr_list:\n    field_expr opt_semi { [$1] }\n  | field_expr SEMI field_expr_list { $1 :: $3 }\n;\nfield_expr:\n    label EQUAL expr\n      { (mkrhs $1 1, $3) }\n  | label\n      { (mkrhs $1 1, exp_of_label (Lident $1) 1) }\n;\nexpr_semi_list:\n    expr                                        { [$1] }\n  | expr_semi_list SEMI expr                    { $3 :: $1 }\n;\ntype_constraint:\n    COLON core_type                             { (Some $2, None) }\n  | COLON core_type COLONGREATER core_type      { (Some $2, Some $4) }\n  | COLONGREATER core_type                      { (None, Some $2) }\n  | COLON error                                 { syntax_error() }\n  | COLONGREATER error                          { syntax_error() }\n;\nopt_type_constraint:\n    type_constraint { Some $1 }\n  | /* empty */ { None }\n;\n\n/* Patterns */\n\npattern:\n  | pattern AS val_ident\n      { mkpat(Ppat_alias($1, mkrhs $3 3)) }\n  | pattern AS error\n      { expecting 3 \"identifier\" }\n  | pattern_comma_list  %prec below_COMMA\n      { mkpat(Ppat_tuple(List.rev $1)) }\n  | pattern COLONCOLON pattern\n      { mkpat_cons (rhs_loc 2) (ghpat(Ppat_tuple[$1;$3])) (symbol_rloc()) }\n  | pattern COLONCOLON error\n      { expecting 3 \"pattern\" }\n  | pattern BAR pattern\n      { mkpat(Ppat_or($1, $3)) }\n  | pattern BAR error\n      { expecting 3 \"pattern\" }\n  | EXCEPTION ext_attributes pattern %prec prec_constr_appl\n      { mkpat_attrs (Ppat_exception $3) $2}\n  | pattern attribute\n      { Pat.attr $1 $2 }\n  | pattern_gen { $1 }\n;\npattern_no_exn:\n  | pattern_no_exn AS val_ident\n      { mkpat(Ppat_alias($1, mkrhs $3 3)) }\n  | pattern_no_exn AS error\n      { expecting 3 \"identifier\" }\n  | pattern_no_exn_comma_list  %prec below_COMMA\n      { mkpat(Ppat_tuple(List.rev $1)) }\n  | pattern_no_exn COLONCOLON pattern\n      { mkpat_cons (rhs_loc 2) (ghpat(Ppat_tuple[$1;$3])) (symbol_rloc()) }\n  | pattern_no_exn COLONCOLON error\n      { expecting 3 \"pattern\" }\n  | pattern_no_exn BAR pattern\n      { mkpat(Ppat_or($1, $3)) }\n  | pattern_no_exn BAR error\n      { expecting 3 \"pattern\" }\n  | pattern_no_exn attribute\n      { Pat.attr $1 $2 }\n  | pattern_gen { $1 }\n;\npattern_gen:\n    simple_pattern\n      { $1 }\n  | constr_longident pattern %prec prec_constr_appl\n      { mkpat(Ppat_construct(mkrhs $1 1, Some $2)) }\n  | name_tag pattern %prec prec_constr_appl\n      { mkpat(Ppat_variant($1, Some $2)) }\n  | LAZY ext_attributes simple_pattern\n      { mkpat_attrs (Ppat_lazy $3) $2}\n;\nsimple_pattern:\n    val_ident %prec below_EQUAL\n      { mkpat(Ppat_var (mkrhs $1 1)) }\n  | simple_pattern_not_ident { $1 }\n;\nsimple_pattern_not_ident:\n  | UNDERSCORE\n      { mkpat(Ppat_any) }\n  | signed_constant\n      { mkpat(Ppat_constant $1) }\n  | signed_constant DOTDOT signed_constant\n      { mkpat(Ppat_interval ($1, $3)) }\n  | constr_longident\n      { mkpat(Ppat_construct(mkrhs $1 1, None)) }\n  | name_tag\n      { mkpat(Ppat_variant($1, None)) }\n  | HASH type_longident\n      { mkpat(Ppat_type (mkrhs $2 2)) }\n  | simple_delimited_pattern\n      { $1 }\n  | mod_longident DOT simple_delimited_pattern\n      { mkpat @@ Ppat_open(mkrhs $1 1, $3) }\n  | mod_longident DOT LBRACKET RBRACKET\n    { mkpat @@ Ppat_open(mkrhs $1 1, mkpat @@\n               Ppat_construct ( mkrhs (Lident \"[]\") 4, None)) }\n  | mod_longident DOT LPAREN RPAREN\n      { mkpat @@ Ppat_open( mkrhs $1 1, mkpat @@\n                 Ppat_construct ( mkrhs (Lident \"()\") 4, None) ) }\n  | mod_longident DOT LPAREN pattern RPAREN\n      { mkpat @@ Ppat_open (mkrhs $1 1, $4)}\n  | mod_longident DOT LPAREN pattern error\n      {unclosed \"(\" 3 \")\" 5  }\n  | mod_longident DOT LPAREN error\n      { expecting 4 \"pattern\" }\n  | LPAREN pattern RPAREN\n      { reloc_pat $2 }\n  | LPAREN pattern error\n      { unclosed \"(\" 1 \")\" 3 }\n  | LPAREN pattern COLON core_type RPAREN\n      { mkpat(Ppat_constraint($2, $4)) }\n  | LPAREN pattern COLON core_type error\n      { unclosed \"(\" 1 \")\" 5 }\n  | LPAREN pattern COLON error\n      { expecting 4 \"type\" }\n  | LPAREN MODULE ext_attributes UIDENT RPAREN\n      { mkpat_attrs (Ppat_unpack (mkrhs $4 4)) $3 }\n  | LPAREN MODULE ext_attributes UIDENT COLON package_type RPAREN\n      { mkpat_attrs\n          (Ppat_constraint(mkpat(Ppat_unpack (mkrhs $4 4)),\n                           ghtyp(Ptyp_package $6)))\n          $3 }\n  | LPAREN MODULE ext_attributes UIDENT COLON package_type error\n      { unclosed \"(\" 1 \")\" 7 }\n  | extension\n      { mkpat(Ppat_extension $1) }\n;\n\nsimple_delimited_pattern:\n  | LBRACE lbl_pattern_list RBRACE\n    { let (fields, closed) = $2 in mkpat(Ppat_record(fields, closed)) }\n  | LBRACE lbl_pattern_list error\n    { unclosed \"{\" 1 \"}\" 3 }\n  | LBRACKET pattern_semi_list opt_semi RBRACKET\n    { reloc_pat (mktailpat (rhs_loc 4) (List.rev $2)) }\n  | LBRACKET pattern_semi_list opt_semi error\n    { unclosed \"[\" 1 \"]\" 4 }\n  | LBRACKETBAR pattern_semi_list opt_semi BARRBRACKET\n    { mkpat(Ppat_array(List.rev $2)) }\n  | LBRACKETBAR BARRBRACKET\n    { mkpat(Ppat_array []) }\n  | LBRACKETBAR pattern_semi_list opt_semi error\n    { unclosed \"[|\" 1 \"|]\" 4 }\n\npattern_comma_list:\n    pattern_comma_list COMMA pattern            { $3 :: $1 }\n  | pattern COMMA pattern                       { [$3; $1] }\n  | pattern COMMA error                         { expecting 3 \"pattern\" }\n;\npattern_no_exn_comma_list:\n    pattern_no_exn_comma_list COMMA pattern     { $3 :: $1 }\n  | pattern_no_exn COMMA pattern                { [$3; $1] }\n  | pattern_no_exn COMMA error                  { expecting 3 \"pattern\" }\n;\npattern_semi_list:\n    pattern                                     { [$1] }\n  | pattern_semi_list SEMI pattern              { $3 :: $1 }\n;\nlbl_pattern_list:\n    lbl_pattern { [$1], Closed }\n  | lbl_pattern SEMI { [$1], Closed }\n  | lbl_pattern SEMI UNDERSCORE opt_semi { [$1], Open }\n  | lbl_pattern SEMI lbl_pattern_list\n      { let (fields, closed) = $3 in $1 :: fields, closed }\n;\nlbl_pattern:\n    label_longident opt_pattern_type_constraint EQUAL pattern\n     { (mkrhs $1 1, mkpat_opt_constraint $4 $2) }\n  | label_longident opt_pattern_type_constraint\n     { (mkrhs $1 1, mkpat_opt_constraint (pat_of_label $1 1) $2) }\n;\nopt_pattern_type_constraint:\n    COLON core_type { Some $2 }\n  | /* empty */ { None }\n;\n\n/* Value descriptions */\n\nvalue_description:\n    VAL ext_attributes val_ident COLON core_type post_item_attributes\n      { let (ext, attrs) = $2 in\n        Val.mk (mkrhs $3 3) $5 ~attrs:(attrs@$6)\n              ~loc:(symbol_rloc()) ~docs:(symbol_docs ())\n      , ext }\n;\n\n/* Primitive declarations */\n\nprimitive_declaration_body:\n    STRING                                      { [fst $1] }\n  | STRING primitive_declaration_body           { fst $1 :: $2 }\n;\nprimitive_declaration:\n    EXTERNAL ext_attributes val_ident COLON core_type EQUAL\n    primitive_declaration_body post_item_attributes\n      { let (ext, attrs) = $2 in\n        Val.mk (mkrhs $3 3) $5 ~prim:$7 ~attrs:(attrs@$8)\n              ~loc:(symbol_rloc ()) ~docs:(symbol_docs ())\n      , ext }\n;\n\n/* Type declarations */\n\ntype_declarations:\n    type_declaration\n      { let (nonrec_flag, ty, ext) = $1 in (nonrec_flag, [ty], ext) }\n  | type_declarations and_type_declaration\n      { let (nonrec_flag, tys, ext) = $1 in (nonrec_flag, $2 :: tys, ext) }\n;\n\ntype_declaration:\n    TYPE ext_attributes nonrec_flag optional_type_parameters LIDENT\n    type_kind constraints post_item_attributes\n      { let (kind, priv, manifest) = $6 in\n        let (ext, attrs) = $2 in\n        let ty =\n          Type.mk (mkrhs $5 5) ~params:$4 ~cstrs:(List.rev $7) ~kind\n            ~priv ?manifest ~attrs:(attrs@$8)\n            ~loc:(symbol_rloc ()) ~docs:(symbol_docs ())\n        in\n          ($3, ty, ext) }\n;\nand_type_declaration:\n    AND attributes optional_type_parameters LIDENT type_kind constraints\n    post_item_attributes\n      { let (kind, priv, manifest) = $5 in\n          Type.mk (mkrhs $4 4) ~params:$3 ~cstrs:(List.rev $6)\n            ~kind ~priv ?manifest ~attrs:($2@$7) ~loc:(symbol_rloc ())\n            ~text:(symbol_text ()) ~docs:(symbol_docs ()) }\n;\nconstraints:\n        constraints CONSTRAINT constrain        { $3 :: $1 }\n      | /* empty */                             { [] }\n;\ntype_kind:\n    /*empty*/\n      { (Ptype_abstract, Public, None) }\n  | EQUAL core_type\n      { (Ptype_abstract, Public, Some $2) }\n  | EQUAL PRIVATE core_type\n      { (Ptype_abstract, Private, Some $3) }\n  | EQUAL constructor_declarations\n      { (Ptype_variant(List.rev $2), Public, None) }\n  | EQUAL PRIVATE constructor_declarations\n      { (Ptype_variant(List.rev $3), Private, None) }\n  | EQUAL DOTDOT\n      { (Ptype_open, Public, None) }\n  | EQUAL PRIVATE DOTDOT\n      { (Ptype_open, Private, None) }\n  | EQUAL private_flag LBRACE label_declarations RBRACE\n      { (Ptype_record $4, $2, None) }\n  | EQUAL core_type EQUAL private_flag constructor_declarations\n      { (Ptype_variant(List.rev $5), $4, Some $2) }\n  | EQUAL core_type EQUAL private_flag DOTDOT\n      { (Ptype_open, $4, Some $2) }\n  | EQUAL core_type EQUAL private_flag LBRACE label_declarations RBRACE\n      { (Ptype_record $6, $4, Some $2) }\n;\noptional_type_parameters:\n    /*empty*/                                   { [] }\n  | optional_type_parameter                     { [$1] }\n  | LPAREN optional_type_parameter_list RPAREN  { List.rev $2 }\n;\noptional_type_parameter:\n    type_variance optional_type_variable        { $2, $1 }\n;\noptional_type_parameter_list:\n    optional_type_parameter                              { [$1] }\n  | optional_type_parameter_list COMMA optional_type_parameter    { $3 :: $1 }\n;\noptional_type_variable:\n    QUOTE ident                                 { mktyp(Ptyp_var $2) }\n  | UNDERSCORE                                  { mktyp(Ptyp_any) }\n;\n\n\ntype_parameter:\n    type_variance type_variable                   { $2, $1 }\n;\ntype_variance:\n    /* empty */                                 { Invariant }\n  | PLUS                                        { Covariant }\n  | MINUS                                       { Contravariant }\n;\ntype_variable:\n    QUOTE ident                                 { mktyp(Ptyp_var $2) }\n;\ntype_parameter_list:\n    type_parameter                              { [$1] }\n  | type_parameter_list COMMA type_parameter    { $3 :: $1 }\n;\nconstructor_declarations:\n    constructor_declaration                              { [$1] }\n  | bar_constructor_declaration                          { [$1] }\n  | constructor_declarations bar_constructor_declaration { $2 :: $1 }\n;\nconstructor_declaration:\n  | constr_ident generalized_constructor_arguments attributes\n      {\n       let args,res = $2 in\n       Type.constructor (mkrhs $1 1) ~args ?res ~attrs:$3\n         ~loc:(symbol_rloc()) ~info:(symbol_info ())\n      }\n;\nbar_constructor_declaration:\n  | BAR constr_ident generalized_constructor_arguments attributes\n      {\n       let args,res = $3 in\n       Type.constructor (mkrhs $2 2) ~args ?res ~attrs:$4\n         ~loc:(symbol_rloc()) ~info:(symbol_info ())\n      }\n;\nstr_exception_declaration:\n  | sig_exception_declaration                    { $1 }\n  | EXCEPTION ext_attributes constr_ident EQUAL constr_longident attributes\n    post_item_attributes\n      { let (ext,attrs) = $2 in\n        Te.rebind (mkrhs $3 3) (mkrhs $5 5) ~attrs:(attrs @ $6 @ $7)\n          ~loc:(symbol_rloc()) ~docs:(symbol_docs ())\n        , ext }\n;\nsig_exception_declaration:\n  | EXCEPTION ext_attributes constr_ident generalized_constructor_arguments\n    attributes post_item_attributes\n      { let args, res = $4 in\n        let (ext,attrs) = $2 in\n          Te.decl (mkrhs $3 3) ~args ?res ~attrs:(attrs @ $5 @ $6)\n            ~loc:(symbol_rloc()) ~docs:(symbol_docs ())\n        , ext }\n;\nlet_exception_declaration:\n    constr_ident generalized_constructor_arguments attributes\n      { let args, res = $2 in\n        Te.decl (mkrhs $1 1) ~args ?res ~attrs:$3 ~loc:(symbol_rloc()) }\n;\ngeneralized_constructor_arguments:\n    /*empty*/                     { (Pcstr_tuple [],None) }\n  | OF constructor_arguments      { ($2,None) }\n  | COLON constructor_arguments MINUSGREATER simple_core_type\n                                  { ($2,Some $4) }\n  | COLON simple_core_type\n                                  { (Pcstr_tuple [],Some $2) }\n;\n\nconstructor_arguments:\n  | core_type_list                   { Pcstr_tuple (List.rev $1) }\n  | LBRACE label_declarations RBRACE { Pcstr_record $2 }\n;\nlabel_declarations:\n    label_declaration                           { [$1] }\n  | label_declaration_semi                      { [$1] }\n  | label_declaration_semi label_declarations   { $1 :: $2 }\n;\nlabel_declaration:\n    mutable_flag label COLON poly_type_no_attr attributes\n      {\n       Type.field (mkrhs $2 2) $4 ~mut:$1 ~attrs:$5\n         ~loc:(symbol_rloc()) ~info:(symbol_info ())\n      }\n;\nlabel_declaration_semi:\n    mutable_flag label COLON poly_type_no_attr attributes SEMI attributes\n      {\n       let info =\n         match rhs_info 5 with\n         | Some _ as info_before_semi -> info_before_semi\n         | None -> symbol_info ()\n       in\n       Type.field (mkrhs $2 2) $4 ~mut:$1 ~attrs:($5 @ $7)\n         ~loc:(symbol_rloc()) ~info\n      }\n;\n\n/* Type Extensions */\n\nstr_type_extension:\n  TYPE ext_attributes nonrec_flag optional_type_parameters type_longident\n  PLUSEQ private_flag str_extension_constructors post_item_attributes\n      { let (ext, attrs) = $2 in\n        if $3 <> Recursive then not_expecting 3 \"nonrec flag\";\n        Te.mk (mkrhs $5 5) (List.rev $8) ~params:$4 ~priv:$7\n          ~attrs:(attrs@$9) ~docs:(symbol_docs ())\n        , ext }\n;\nsig_type_extension:\n  TYPE ext_attributes nonrec_flag optional_type_parameters type_longident\n  PLUSEQ private_flag sig_extension_constructors post_item_attributes\n      { let (ext, attrs) = $2 in\n        if $3 <> Recursive then not_expecting 3 \"nonrec flag\";\n        Te.mk (mkrhs $5 5) (List.rev $8) ~params:$4 ~priv:$7\n          ~attrs:(attrs @ $9) ~docs:(symbol_docs ())\n        , ext }\n;\nstr_extension_constructors:\n    extension_constructor_declaration                     { [$1] }\n  | bar_extension_constructor_declaration                 { [$1] }\n  | extension_constructor_rebind                          { [$1] }\n  | bar_extension_constructor_rebind                      { [$1] }\n  | str_extension_constructors bar_extension_constructor_declaration\n      { $2 :: $1 }\n  | str_extension_constructors bar_extension_constructor_rebind\n      { $2 :: $1 }\n;\nsig_extension_constructors:\n    extension_constructor_declaration                     { [$1] }\n  | bar_extension_constructor_declaration                 { [$1] }\n  | sig_extension_constructors bar_extension_constructor_declaration\n      { $2 :: $1 }\n;\nextension_constructor_declaration:\n  | constr_ident generalized_constructor_arguments attributes\n      { let args, res = $2 in\n        Te.decl (mkrhs $1 1) ~args ?res ~attrs:$3\n          ~loc:(symbol_rloc()) ~info:(symbol_info ()) }\n;\nbar_extension_constructor_declaration:\n  | BAR constr_ident generalized_constructor_arguments attributes\n      { let args, res = $3 in\n        Te.decl (mkrhs $2 2) ~args ?res ~attrs:$4\n           ~loc:(symbol_rloc()) ~info:(symbol_info ()) }\n;\nextension_constructor_rebind:\n  | constr_ident EQUAL constr_longident attributes\n      { Te.rebind (mkrhs $1 1) (mkrhs $3 3) ~attrs:$4\n          ~loc:(symbol_rloc()) ~info:(symbol_info ()) }\n;\nbar_extension_constructor_rebind:\n  | BAR constr_ident EQUAL constr_longident attributes\n      { Te.rebind (mkrhs $2 2) (mkrhs $4 4) ~attrs:$5\n          ~loc:(symbol_rloc()) ~info:(symbol_info ()) }\n;\n\n/* \"with\" constraints (additional type equations over signature components) */\n\nwith_constraints:\n    with_constraint                             { [$1] }\n  | with_constraints AND with_constraint        { $3 :: $1 }\n;\nwith_constraint:\n    TYPE optional_type_parameters label_longident with_type_binder\n    core_type_no_attr constraints\n      { Pwith_type\n          (mkrhs $3 3,\n           (Type.mk (mkrhs (Longident.last $3) 3)\n              ~params:$2\n              ~cstrs:(List.rev $6)\n              ~manifest:$5\n              ~priv:$4\n              ~loc:(symbol_rloc()))) }\n    /* used label_longident instead of type_longident to disallow\n       functor applications in type path */\n  | TYPE optional_type_parameters label_longident COLONEQUAL core_type_no_attr\n      { Pwith_typesubst\n         (mkrhs $3 3,\n           (Type.mk (mkrhs (Longident.last $3) 3)\n             ~params:$2\n             ~manifest:$5\n             ~loc:(symbol_rloc()))) }\n  | MODULE mod_longident EQUAL mod_ext_longident\n      { Pwith_module (mkrhs $2 2, mkrhs $4 4) }\n  | MODULE mod_longident COLONEQUAL mod_ext_longident\n      { Pwith_modsubst (mkrhs $2 2, mkrhs $4 4) }\n;\nwith_type_binder:\n    EQUAL          { Public }\n  | EQUAL PRIVATE  { Private }\n;\n\n/* Polymorphic types */\n\ntypevar_list:\n        QUOTE ident                             { [mkrhs $2 2] }\n      | typevar_list QUOTE ident                { mkrhs $3 3 :: $1 }\n;\npoly_type:\n        core_type\n          { $1 }\n      | typevar_list DOT core_type\n          { mktyp(Ptyp_poly(List.rev $1, $3)) }\n;\npoly_type_no_attr:\n        core_type_no_attr\n          { $1 }\n      | typevar_list DOT core_type_no_attr\n          { mktyp(Ptyp_poly(List.rev $1, $3)) }\n;\n\n/* Core types */\n\ncore_type:\n    core_type_no_attr\n      { $1 }\n  | core_type attribute\n      { Typ.attr $1 $2 }\n;\ncore_type_no_attr:\n    core_type2 %prec MINUSGREATER\n      { $1 }\n  | core_type2 AS QUOTE ident\n      { mktyp(Ptyp_alias($1, $4)) }\n;\ncore_type2:\n    simple_core_type_or_tuple\n      { $1 }\n  | QUESTION LIDENT COLON core_type2 MINUSGREATER core_type2\n      { let param = extra_rhs_core_type $4 ~pos:4 in\n        mktyp (Ptyp_arrow(Optional $2 , param, $6)) }\n  | OPTLABEL core_type2 MINUSGREATER core_type2\n      { let param = extra_rhs_core_type $2 ~pos:2 in\n        mktyp(Ptyp_arrow(Optional $1 , param, $4))\n      }\n  | LIDENT COLON core_type2 MINUSGREATER core_type2\n      { let param = extra_rhs_core_type $3 ~pos:3 in\n        mktyp(Ptyp_arrow(Labelled $1, param, $5)) }\n  | core_type2 MINUSGREATER core_type2\n      { let param = extra_rhs_core_type $1 ~pos:1 in\n        mktyp(Ptyp_arrow(Nolabel, param, $3)) }\n;\n\nsimple_core_type:\n    simple_core_type2  %prec below_HASH\n      { $1 }\n  | LPAREN core_type_comma_list RPAREN %prec below_HASH\n      { match $2 with [sty] -> sty | _ -> raise Parse_error }\n;\n\nsimple_core_type2:\n    QUOTE ident\n      { mktyp(Ptyp_var $2) }\n  | UNDERSCORE\n      { mktyp(Ptyp_any) }\n  | type_longident\n      { mktyp(Ptyp_constr(mkrhs $1 1, [])) }\n  | simple_core_type2 type_longident\n      { mktyp(Ptyp_constr(mkrhs $2 2, [$1])) }\n  | LPAREN core_type_comma_list RPAREN type_longident\n      { mktyp(Ptyp_constr(mkrhs $4 4, List.rev $2)) }\n  | LESS meth_list GREATER\n      { let (f, c) = $2 in mktyp(Ptyp_object (f, c)) }\n  | LESS GREATER\n      { mktyp(Ptyp_object ([], Closed)) }\n  | HASH class_longident\n      { mktyp(Ptyp_class(mkrhs $2 2, [])) }\n  | simple_core_type2 HASH class_longident\n      { mktyp(Ptyp_class(mkrhs $3 3, [$1])) }\n  | LPAREN core_type_comma_list RPAREN HASH class_longident\n      { mktyp(Ptyp_class(mkrhs $5 5, List.rev $2)) }\n  | LBRACKET tag_field RBRACKET\n      { mktyp(Ptyp_variant([$2], Closed, None)) }\n/* PR#3835: this is not LR(1), would need lookahead=2\n  | LBRACKET simple_core_type RBRACKET\n      { mktyp(Ptyp_variant([$2], Closed, None)) }\n*/\n  | LBRACKET BAR row_field_list RBRACKET\n      { mktyp(Ptyp_variant(List.rev $3, Closed, None)) }\n  | LBRACKET row_field BAR row_field_list RBRACKET\n      { mktyp(Ptyp_variant($2 :: List.rev $4, Closed, None)) }\n  | LBRACKETGREATER opt_bar row_field_list RBRACKET\n      { mktyp(Ptyp_variant(List.rev $3, Open, None)) }\n  | LBRACKETGREATER RBRACKET\n      { mktyp(Ptyp_variant([], Open, None)) }\n  | LBRACKETLESS opt_bar row_field_list RBRACKET\n      { mktyp(Ptyp_variant(List.rev $3, Closed, Some [])) }\n  | LBRACKETLESS opt_bar row_field_list GREATER name_tag_list RBRACKET\n      { mktyp(Ptyp_variant(List.rev $3, Closed, Some (List.rev $5))) }\n  | LPAREN MODULE ext_attributes package_type RPAREN\n      { mktyp_attrs (Ptyp_package $4) $3 }\n  | extension\n      { mktyp (Ptyp_extension $1) }\n;\npackage_type:\n    module_type { package_type_of_module_type $1 }\n;\nrow_field_list:\n    row_field                                   { [$1] }\n  | row_field_list BAR row_field                { $3 :: $1 }\n;\nrow_field:\n    tag_field                                   { $1 }\n  | simple_core_type                            { Rinherit $1 }\n;\ntag_field:\n    name_tag OF opt_ampersand amper_type_list attributes\n      { Rtag (mkrhs $1 1, add_info_attrs (symbol_info ()) $5,\n               $3, List.rev $4) }\n  | name_tag attributes\n      { Rtag (mkrhs $1 1, add_info_attrs (symbol_info ()) $2, true, []) }\n;\nopt_ampersand:\n    AMPERSAND                                   { true }\n  | /* empty */                                 { false }\n;\namper_type_list:\n    core_type_no_attr                           { [$1] }\n  | amper_type_list AMPERSAND core_type_no_attr { $3 :: $1 }\n;\nname_tag_list:\n    name_tag                                    { [$1] }\n  | name_tag_list name_tag                      { $2 :: $1 }\n;\nsimple_core_type_or_tuple:\n    simple_core_type { $1 }\n  | simple_core_type STAR core_type_list\n      { mktyp(Ptyp_tuple($1 :: List.rev $3)) }\n;\ncore_type_comma_list:\n    core_type                                   { [$1] }\n  | core_type_comma_list COMMA core_type        { $3 :: $1 }\n;\ncore_type_list:\n    simple_core_type                            { [$1] }\n  | core_type_list STAR simple_core_type        { $3 :: $1 }\n;\nmeth_list:\n    field_semi meth_list                        { let (f, c) = $2 in ($1 :: f, c) }\n  | inherit_field_semi meth_list                { let (f, c) = $2 in ($1 :: f, c) }\n  | field_semi                                  { [$1], Closed }\n  | field                                       { [$1], Closed }\n  | inherit_field_semi                          { [$1], Closed }\n  | simple_core_type                            { [Oinherit $1], Closed }\n  | DOTDOT                                      { [], Open }\n;\nfield:\n  label COLON poly_type_no_attr attributes\n    { Otag (mkrhs $1 1, add_info_attrs (symbol_info ()) $4, $3) }\n;\n\nfield_semi:\n  label COLON poly_type_no_attr attributes SEMI attributes\n    { let info =\n        match rhs_info 4 with\n        | Some _ as info_before_semi -> info_before_semi\n        | None -> symbol_info ()\n      in\n      ( Otag (mkrhs $1 1, add_info_attrs info ($4 @ $6), $3)) }\n;\n\ninherit_field_semi:\n  simple_core_type SEMI { Oinherit $1 }\n\nlabel:\n    LIDENT                                      { $1 }\n;\n\n/* Constants */\n\nconstant:\n  | INT          { let (n, m) = $1 in Pconst_integer (n, m) }\n  | CHAR         { Pconst_char (Char.code $1) }\n  | STRING       { let (s, d) = $1 in Pconst_string (s, d) }\n  | FLOAT        { let (f, m) = $1 in Pconst_float (f, m) }\n;\nsigned_constant:\n    constant     { $1 }\n  | MINUS INT    { let (n, m) = $2 in Pconst_integer(\"-\" ^ n, m) }\n  | MINUS FLOAT  { let (f, m) = $2 in Pconst_float(\"-\" ^ f, m) }\n  | PLUS INT     { let (n, m) = $2 in Pconst_integer (n, m) }\n  | PLUS FLOAT   { let (f, m) = $2 in Pconst_float(f, m) }\n;\n\n/* Identifiers and long identifiers */\n\nident:\n    UIDENT                                      { $1 }\n  | LIDENT                                      { $1 }\n;\nval_ident:\n    LIDENT                                      { $1 }\n  | LPAREN operator RPAREN                      { $2 }\n  | LPAREN operator error                       { unclosed \"(\" 1 \")\" 3 }\n  | LPAREN error                                { expecting 2 \"operator\" }\n  | LPAREN MODULE error                         { expecting 3 \"module-expr\" }\n;\noperator:\n    PREFIXOP                                    { $1 }\n  | INFIXOP0                                    { $1 }\n  | INFIXOP1                                    { $1 }\n  | INFIXOP2                                    { $1 }\n  | INFIXOP3                                    { $1 }\n  | INFIXOP4                                    { $1 }\n  | DOTOP LPAREN RPAREN                         { \".\"^ $1 ^\"()\" }\n  | DOTOP LPAREN RPAREN LESSMINUS               { \".\"^ $1 ^ \"()<-\" }\n  | DOTOP LBRACKET RBRACKET                     { \".\"^ $1 ^\"[]\" }\n  | DOTOP LBRACKET RBRACKET LESSMINUS           { \".\"^ $1 ^ \"[]<-\" }\n  | DOTOP LBRACE RBRACE                         { \".\"^ $1 ^\"{}\" }\n  | DOTOP LBRACE RBRACE LESSMINUS               { \".\"^ $1 ^ \"{}<-\" }\n  | HASHOP                                      { $1 }\n  | BANG                                        { \"!\" }\n  | PLUS                                        { \"+\" }\n  | PLUSDOT                                     { \"+.\" }\n  | MINUS                                       { \"-\" }\n  | MINUSDOT                                    { \"-.\" }\n  | STAR                                        { \"*\" }\n  | EQUAL                                       { \"=\" }\n  | LESS                                        { \"<\" }\n  | GREATER                                     { \">\" }\n  | OR                                          { \"or\" }\n  | BARBAR                                      { \"||\" }\n  | AMPERSAND                                   { \"&\" }\n  | AMPERAMPER                                  { \"&&\" }\n  | COLONEQUAL                                  { \":=\" }\n  | PLUSEQ                                      { \"+=\" }\n  | PERCENT                                     { \"%\" }\n;\nconstr_ident:\n    UIDENT                                      { $1 }\n  | LBRACKET RBRACKET                           { \"[]\" }\n  | LPAREN RPAREN                               { \"()\" }\n  | LPAREN COLONCOLON RPAREN                    { \"::\" }\n  | FALSE                                       { \"false\" }\n  | TRUE                                        { \"true\" }\n;\n\nval_longident:\n    val_ident                                   { Lident $1 }\n  | mod_longident DOT val_ident                 { Ldot($1, $3) }\n;\nconstr_longident:\n    mod_longident       %prec below_DOT         { $1 }\n  | mod_longident DOT LPAREN COLONCOLON RPAREN  { Ldot($1,\"::\") }\n  | LBRACKET RBRACKET                           { Lident \"[]\" }\n  | LPAREN RPAREN                               { Lident \"()\" }\n  | LPAREN COLONCOLON RPAREN                    { Lident \"::\" }\n  | FALSE                                       { Lident \"false\" }\n  | TRUE                                        { Lident \"true\" }\n;\nlabel_longident:\n    LIDENT                                      { Lident $1 }\n  | mod_longident DOT LIDENT                    { Ldot($1, $3) }\n;\ntype_longident:\n    LIDENT                                      { Lident $1 }\n  | mod_ext_longident DOT LIDENT                { Ldot($1, $3) }\n;\nmod_longident:\n    UIDENT                                      { Lident $1 }\n  | mod_longident DOT UIDENT                    { Ldot($1, $3) }\n;\nmod_ext_longident:\n    UIDENT                                      { Lident $1 }\n  | mod_ext_longident DOT UIDENT                { Ldot($1, $3) }\n  | mod_ext_longident LPAREN mod_ext_longident RPAREN { lapply $1 $3 }\n;\nmty_longident:\n    ident                                       { Lident $1 }\n  | mod_ext_longident DOT ident                 { Ldot($1, $3) }\n;\nclty_longident:\n    LIDENT                                      { Lident $1 }\n  | mod_ext_longident DOT LIDENT                { Ldot($1, $3) }\n;\nclass_longident:\n    LIDENT                                      { Lident $1 }\n  | mod_longident DOT LIDENT                    { Ldot($1, $3) }\n;\n\n/* Toplevel directives */\n\n\n/* Miscellaneous */\n\nname_tag:\n    BACKQUOTE ident                             { $2 }\n;\nrec_flag:\n    /* empty */                                 { Nonrecursive }\n  | REC                                         { Recursive }\n;\nnonrec_flag:\n    /* empty */                                 { Recursive }\n  | NONREC                                      { Nonrecursive }\n;\ndirection_flag:\n    TO                                          { Upto }\n  | DOWNTO                                      { Downto }\n;\nprivate_flag:\n    /* empty */                                 { Public }\n  | PRIVATE                                     { Private }\n;\nmutable_flag:\n    /* empty */                                 { Immutable }\n  | MUTABLE                                     { Mutable }\n;\nvirtual_flag:\n    /* empty */                                 { Concrete }\n  | VIRTUAL                                     { Virtual }\n;\nprivate_virtual_flags:\n    /* empty */  { Public, Concrete }\n  | PRIVATE { Private, Concrete }\n  | VIRTUAL { Public, Virtual }\n  | PRIVATE VIRTUAL { Private, Virtual }\n  | VIRTUAL PRIVATE { Private, Virtual }\n;\noverride_flag:\n    /* empty */                                 { Fresh }\n  | BANG                                        { Override }\n;\nopt_bar:\n    /* empty */                                 { () }\n  | BAR                                         { () }\n;\nopt_semi:\n  | /* empty */                                 { () }\n  | SEMI                                        { () }\n;\nsubtractive:\n  | MINUS                                       { \"-\" }\n  | MINUSDOT                                    { \"-.\" }\n;\nadditive:\n  | PLUS                                        { \"+\" }\n  | PLUSDOT                                     { \"+.\" }\n;\n\n/* Attributes and extensions */\n\nsingle_attr_id:\n    LIDENT { $1 }\n  | UIDENT { $1 }\n  | AND { \"and\" }\n  | AS { \"as\" }\n  | ASSERT { \"assert\" }\n  | BEGIN { \"begin\" }\n  | CLASS { \"class\" }\n  | CONSTRAINT { \"constraint\" }\n  | DO { \"do\" }\n  | DONE { \"done\" }\n  | DOWNTO { \"downto\" }\n  | ELSE { \"else\" }\n  | END { \"end\" }\n  | EXCEPTION { \"exception\" }\n  | EXTERNAL { \"external\" }\n  | FALSE { \"false\" }\n  | FOR { \"for\" }\n  | FUN { \"fun\" }\n  | FUNCTION { \"function\" }\n  | FUNCTOR { \"functor\" }\n  | IF { \"if\" }\n  | IN { \"in\" }\n  | INCLUDE { \"include\" }\n  | INHERIT { \"inherit\" }\n  | INITIALIZER { \"initializer\" }\n  | LAZY { \"lazy\" }\n  | LET { \"let\" }\n  | MATCH { \"match\" }\n  | METHOD { \"method\" }\n  | MODULE { \"module\" }\n  | MUTABLE { \"mutable\" }\n  | NEW { \"new\" }\n  | NONREC { \"nonrec\" }\n  | OBJECT { \"object\" }\n  | OF { \"of\" }\n  | OPEN { \"open\" }\n  | OR { \"or\" }\n  | PRIVATE { \"private\" }\n  | REC { \"rec\" }\n  | SIG { \"sig\" }\n  | STRUCT { \"struct\" }\n  | THEN { \"then\" }\n  | TO { \"to\" }\n  | TRUE { \"true\" }\n  | TRY { \"try\" }\n  | TYPE { \"type\" }\n  | VAL { \"val\" }\n  | VIRTUAL { \"virtual\" }\n  | WHEN { \"when\" }\n  | WHILE { \"while\" }\n  | WITH { \"with\" }\n/* mod/land/lor/lxor/lsl/lsr/asr are not supported for now */\n;\n\nattr_id:\n    single_attr_id { mkloc $1 (symbol_rloc()) }\n  | single_attr_id DOT attr_id { mkloc ($1 ^ \".\" ^ $3.txt) (symbol_rloc())}\n;\nattribute:\n  LBRACKETAT attr_id payload RBRACKET { ($2, $3) }\n;\npost_item_attribute:\n  LBRACKETATAT attr_id payload RBRACKET { ($2, $3) }\n;\nfloating_attribute:\n  LBRACKETATATAT attr_id payload RBRACKET { ($2, $3) }\n;\npost_item_attributes:\n    /* empty */  { [] }\n  | post_item_attribute post_item_attributes { $1 :: $2 }\n;\nattributes:\n    /* empty */{ [] }\n  | attribute attributes { $1 :: $2 }\n;\next_attributes:\n    /* empty */  { None, [] }\n  | attribute attributes { None, $1 :: $2 }\n  | PERCENT attr_id attributes { Some $2, $3 }\n;\nextension:\n  LBRACKETPERCENT attr_id payload RBRACKET { ($2, $3) }\n;\nitem_extension:\n  LBRACKETPERCENTPERCENT attr_id payload RBRACKET { ($2, $3) }\n;\npayload:\n    structure { PStr $1 }\n  | COLON signature { PSig $2 }\n  | COLON core_type { PTyp $2 }\n  | QUESTION pattern { PPat ($2, None) }\n  | QUESTION pattern WHEN seq_expr { PPat ($2, Some $4) }\n;\n%%\n"
  },
  {
    "path": "analysis/vendor/ml/parsetree.ml",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(** Abstract syntax tree produced by parsing *)\n\nopen Asttypes\n\ntype constant =\n  | Pconst_integer of string * char option\n  (* 3 3l 3L 3n\n\n     Suffixes [g-z][G-Z] are accepted by the parser.\n     Suffixes except 'l', 'L' are rejected by the typechecker\n  *)\n  | Pconst_char of int\n  (* 'c' *)\n  | Pconst_string of string * string option\n  (* \"constant\"\n     {delim|other constant|delim}\n  *)\n  | Pconst_float of string * char option\n(* 3.4 2e5 1.4e-4\n\n   Suffixes [g-z][G-Z] are accepted by the parser.\n   Suffixes are rejected by the typechecker.\n*)\n\n(** {1 Extension points} *)\n\ntype attribute = string loc * payload\n(* [@id ARG]\n   [@@id ARG]\n\n   Metadata containers passed around within the AST.\n   The compiler ignores unknown attributes.\n*)\n\nand extension = string loc * payload\n(* [%id ARG]\n   [%%id ARG]\n\n   Sub-language placeholder -- rejected by the typechecker.\n*)\n\nand attributes = attribute list\n\nand payload =\n  | PStr of structure\n  | PSig of signature (* : SIG *)\n  | PTyp of core_type (* : T *)\n  | PPat of pattern * expression option\n(* ? P  or  ? P when E *)\n\n(* Type expressions *)\n\n(** {1 Core language} *)\n\nand core_type = {\n  ptyp_desc: core_type_desc;\n  ptyp_loc: Location.t;\n  ptyp_attributes: attributes; (* ... [@id1] [@id2] *)\n}\n\nand core_type_desc =\n  | Ptyp_any (*  _ *)\n  | Ptyp_var of string (* 'a *)\n  | Ptyp_arrow of arg_label * core_type * core_type\n    (* T1 -> T2       Simple\n       ~l:T1 -> T2    Labelled\n       ?l:T1 -> T2    Optional\n    *)\n  | Ptyp_tuple of core_type list\n    (* T1 * ... * Tn\n\n       Invariant: n >= 2\n    *)\n  | Ptyp_constr of Longident.t loc * core_type list\n    (* tconstr\n       T tconstr\n       (T1, ..., Tn) tconstr\n    *)\n  | Ptyp_object of object_field list * closed_flag\n    (* < l1:T1; ...; ln:Tn >     (flag = Closed)\n       < l1:T1; ...; ln:Tn; .. > (flag = Open)\n    *)\n  | Ptyp_class of Longident.t loc * core_type list\n    (* #tconstr\n       T #tconstr\n       (T1, ..., Tn) #tconstr\n    *)\n  | Ptyp_alias of core_type * string (* T as 'a *)\n  | Ptyp_variant of row_field list * closed_flag * label list option\n    (* [ `A|`B ]         (flag = Closed; labels = None)\n       [> `A|`B ]        (flag = Open;   labels = None)\n       [< `A|`B ]        (flag = Closed; labels = Some [])\n       [< `A|`B > `X `Y ](flag = Closed; labels = Some [\"X\";\"Y\"])\n    *)\n  | Ptyp_poly of string loc list * core_type\n    (* 'a1 ... 'an. T\n\n       Can only appear in the following context:\n\n       - As the core_type of a Ppat_constraint node corresponding\n         to a constraint on a let-binding: let x : 'a1 ... 'an. T\n         = e ...\n\n       - Under Cfk_virtual for methods (not values).\n\n       - As the core_type of a Pctf_method node.\n\n       - As the core_type of a Pexp_poly node.\n\n       - As the pld_type field of a label_declaration.\n\n       - As a core_type of a Ptyp_object node.\n    *)\n  | Ptyp_package of package_type (* (module S) *)\n  | Ptyp_extension of extension\n(* [%id] *)\n\nand package_type = Longident.t loc * (Longident.t loc * core_type) list\n(*\n        (module S)\n        (module S with type t1 = T1 and ... and tn = Tn)\n       *)\n\nand row_field =\n  | Rtag of label loc * attributes * bool * core_type list\n    (* [`A]                   ( true,  [] )\n        [`A of T]              ( false, [T] )\n        [`A of T1 & .. & Tn]   ( false, [T1;...Tn] )\n        [`A of & T1 & .. & Tn] ( true,  [T1;...Tn] )\n\n       - The 2nd field is true if the tag contains a\n         constant (empty) constructor.\n       - '&' occurs when several types are used for the same constructor\n         (see 4.2 in the manual)\n\n       - TODO: switch to a record representation, and keep location\n    *)\n  | Rinherit of core_type\n(* [ T ] *)\n\nand object_field =\n  | Otag of label loc * attributes * core_type\n  | Oinherit of core_type\n\n(* Patterns *)\nand pattern = {\n  ppat_desc: pattern_desc;\n  ppat_loc: Location.t;\n  ppat_attributes: attributes; (* ... [@id1] [@id2] *)\n}\n\nand pattern_desc =\n  | Ppat_any (* _ *)\n  | Ppat_var of string loc (* x *)\n  | Ppat_alias of pattern * string loc (* P as 'a *)\n  | Ppat_constant of constant (* 1, 'a', \"true\", 1.0, 1l, 1L, 1n *)\n  | Ppat_interval of constant * constant\n    (* 'a'..'z'\n\n       Other forms of interval are recognized by the parser\n       but rejected by the type-checker. *)\n  | Ppat_tuple of pattern list (* (P1, ..., Pn)\n\n       Invariant: n >= 2\n    *)\n  | Ppat_construct of Longident.t loc * pattern option\n    (* C                None\n       C P              Some P\n       C (P1, ..., Pn)  Some (Ppat_tuple [P1; ...; Pn])\n    *)\n  | Ppat_variant of label * pattern option\n    (* `A             (None)\n       `A P           (Some P)\n    *)\n  | Ppat_record of (Longident.t loc * pattern) list * closed_flag\n    (* { l1=P1; ...; ln=Pn }     (flag = Closed)\n       { l1=P1; ...; ln=Pn; _}   (flag = Open)\n\n       Invariant: n > 0\n    *)\n  | Ppat_array of pattern list (* [| P1; ...; Pn |] *)\n  | Ppat_or of pattern * pattern (* P1 | P2 *)\n  | Ppat_constraint of pattern * core_type (* (P : T) *)\n  | Ppat_type of Longident.t loc (* #tconst *)\n  | Ppat_lazy of pattern (* lazy P *)\n  | Ppat_unpack of string loc\n    (* (module P)\n       Note: (module P : S) is represented as\n       Ppat_constraint(Ppat_unpack, Ptyp_package)\n    *)\n  | Ppat_exception of pattern (* exception P *)\n  | Ppat_extension of extension (* [%id] *)\n  | Ppat_open of Longident.t loc * pattern\n(* M.(P) *)\n\n(* Value expressions *)\n\nand expression = {\n  pexp_desc: expression_desc;\n  pexp_loc: Location.t;\n  mutable pexp_attributes: attributes; (* ... [@id1] [@id2] *)\n}\n\nand expression_desc =\n  | Pexp_ident of Longident.t loc (* x\n       M.x\n    *)\n  | Pexp_constant of constant (* 1, 'a', \"true\", 1.0, 1l, 1L, 1n *)\n  | Pexp_let of rec_flag * value_binding list * expression\n    (* let P1 = E1 and ... and Pn = EN in E       (flag = Nonrecursive)\n       let rec P1 = E1 and ... and Pn = EN in E   (flag = Recursive)\n    *)\n  | Pexp_function of case list (* function P1 -> E1 | ... | Pn -> En *)\n  | Pexp_fun of arg_label * expression option * pattern * expression\n    (* fun P -> E1                          (Simple, None)\n       fun ~l:P -> E1                       (Labelled l, None)\n       fun ?l:P -> E1                       (Optional l, None)\n       fun ?l:(P = E0) -> E1                (Optional l, Some E0)\n\n       Notes:\n       - If E0 is provided, only Optional is allowed.\n       - \"fun P1 P2 .. Pn -> E1\" is represented as nested Pexp_fun.\n       - \"let f P = E\" is represented using Pexp_fun.\n    *)\n  | Pexp_apply of expression * (arg_label * expression) list\n    (* E0 ~l1:E1 ... ~ln:En\n       li can be empty (non labeled argument) or start with '?'\n       (optional argument).\n\n       Invariant: n > 0\n    *)\n  | Pexp_match of expression * case list\n    (* match E0 with P1 -> E1 | ... | Pn -> En *)\n  | Pexp_try of expression * case list\n    (* try E0 with P1 -> E1 | ... | Pn -> En *)\n  | Pexp_tuple of expression list\n    (* (E1, ..., En)\n\n       Invariant: n >= 2\n    *)\n  | Pexp_construct of Longident.t loc * expression option\n    (* C                None\n       C E              Some E\n       C (E1, ..., En)  Some (Pexp_tuple[E1;...;En])\n    *)\n  | Pexp_variant of label * expression option\n    (* `A             (None)\n       `A E           (Some E)\n    *)\n  | Pexp_record of (Longident.t loc * expression) list * expression option\n    (* { l1=P1; ...; ln=Pn }     (None)\n       { E0 with l1=P1; ...; ln=Pn }   (Some E0)\n\n       Invariant: n > 0\n    *)\n  | Pexp_field of expression * Longident.t loc (* E.l *)\n  | Pexp_setfield of expression * Longident.t loc * expression (* E1.l <- E2 *)\n  | Pexp_array of expression list (* [| E1; ...; En |] *)\n  | Pexp_ifthenelse of expression * expression * expression option\n    (* if E1 then E2 else E3 *)\n  | Pexp_sequence of expression * expression (* E1; E2 *)\n  | Pexp_while of expression * expression (* while E1 do E2 done *)\n  | Pexp_for of pattern * expression * expression * direction_flag * expression\n    (* for i = E1 to E2 do E3 done      (flag = Upto)\n       for i = E1 downto E2 do E3 done  (flag = Downto)\n    *)\n  | Pexp_constraint of expression * core_type (* (E : T) *)\n  | Pexp_coerce of expression * core_type option * core_type\n    (* (E :> T)        (None, T)\n       (E : T0 :> T)   (Some T0, T)\n    *)\n  | Pexp_send of expression * label loc (*  E # m *)\n  | Pexp_new of Longident.t loc (* new M.c *)\n  | Pexp_setinstvar of label loc * expression (* x <- 2 *)\n  | Pexp_override of (label loc * expression) list\n    (* {< x1 = E1; ...; Xn = En >} *)\n  | Pexp_letmodule of string loc * module_expr * expression\n    (* let module M = ME in E *)\n  | Pexp_letexception of extension_constructor * expression\n    (* let exception C in E *)\n  | Pexp_assert of expression\n    (* assert E\n       Note: \"assert false\" is treated in a special way by the\n       type-checker. *)\n  | Pexp_lazy of expression (* lazy E *)\n  | Pexp_poly of expression * core_type option\n    (* Used for method bodies.\n\n       Can only be used as the expression under Cfk_concrete\n       for methods (not values). *)\n  | Pexp_object of class_structure (* object ... end *)\n  | Pexp_newtype of string loc * expression (* fun (type t) -> E *)\n  | Pexp_pack of module_expr\n    (* (module ME)\n\n       (module ME : S) is represented as\n       Pexp_constraint(Pexp_pack, Ptyp_package S) *)\n  | Pexp_open of override_flag * Longident.t loc * expression\n    (* M.(E)\n       let open M in E\n       let! open M in E *)\n  | Pexp_extension of extension (* [%id] *)\n  | Pexp_unreachable\n(* . *)\n\nand case = {\n  (* (P -> E) or (P when E0 -> E) *)\n  pc_lhs: pattern;\n  pc_guard: expression option;\n  pc_rhs: expression;\n}\n\n(* Value descriptions *)\nand value_description = {\n  pval_name: string loc;\n  pval_type: core_type;\n  pval_prim: string list;\n  pval_attributes: attributes; (* ... [@@id1] [@@id2] *)\n  pval_loc: Location.t;\n}\n\n(*\n  val x: T                            (prim = [])\n  external x: T = \"s1\" ... \"sn\"       (prim = [\"s1\";...\"sn\"])\n*)\n\n(* Type declarations *)\nand type_declaration = {\n  ptype_name: string loc;\n  ptype_params: (core_type * variance) list;\n      (* ('a1,...'an) t; None represents  _*)\n  ptype_cstrs: (core_type * core_type * Location.t) list;\n      (* ... constraint T1=T1'  ... constraint Tn=Tn' *)\n  ptype_kind: type_kind;\n  ptype_private: private_flag; (* = private ... *)\n  ptype_manifest: core_type option; (* = T *)\n  ptype_attributes: attributes; (* ... [@@id1] [@@id2] *)\n  ptype_loc: Location.t;\n}\n\n(*\n  type t                     (abstract, no manifest)\n  type t = T0                (abstract, manifest=T0)\n  type t = C of T | ...      (variant,  no manifest)\n  type t = T0 = C of T | ... (variant,  manifest=T0)\n  type t = {l: T; ...}       (record,   no manifest)\n  type t = T0 = {l : T; ...} (record,   manifest=T0)\n  type t = ..                (open,     no manifest)\n*)\nand type_kind =\n  | Ptype_abstract\n  | Ptype_variant of constructor_declaration list\n    (* Invariant: non-empty list *)\n  | Ptype_record of label_declaration list (* Invariant: non-empty list *)\n  | Ptype_open\n\nand label_declaration = {\n  pld_name: string loc;\n  pld_mutable: mutable_flag;\n  pld_type: core_type;\n  pld_loc: Location.t;\n  pld_attributes: attributes; (* l : T [@id1] [@id2] *)\n}\n\n(* { ...; l: T; ... }            (mutable=Immutable)\n   { ...; mutable l: T; ... }    (mutable=Mutable)\n\n   Note: T can be a Ptyp_poly.\n*)\nand constructor_declaration = {\n  pcd_name: string loc;\n  pcd_args: constructor_arguments;\n  pcd_res: core_type option;\n  pcd_loc: Location.t;\n  pcd_attributes: attributes; (* C of ... [@id1] [@id2] *)\n}\n\nand constructor_arguments =\n  | Pcstr_tuple of core_type list\n  | Pcstr_record of label_declaration list\n\n(*\n  | C of T1 * ... * Tn     (res = None,    args = Pcstr_tuple [])\n  | C: T0                  (res = Some T0, args = [])\n  | C: T1 * ... * Tn -> T0 (res = Some T0, args = Pcstr_tuple)\n  | C of {...}             (res = None,    args = Pcstr_record)\n  | C: {...} -> T0         (res = Some T0, args = Pcstr_record)\n  | C of {...} as t        (res = None,    args = Pcstr_record)\n*)\nand type_extension = {\n  ptyext_path: Longident.t loc;\n  ptyext_params: (core_type * variance) list;\n  ptyext_constructors: extension_constructor list;\n  ptyext_private: private_flag;\n  ptyext_attributes: attributes; (* ... [@@id1] [@@id2] *)\n}\n(*\n  type t += ...\n*)\n\nand extension_constructor = {\n  pext_name: string loc;\n  pext_kind: extension_constructor_kind;\n  pext_loc: Location.t;\n  pext_attributes: attributes; (* C of ... [@id1] [@id2] *)\n}\n\nand extension_constructor_kind =\n  | Pext_decl of constructor_arguments * core_type option\n    (*\n         | C of T1 * ... * Tn     ([T1; ...; Tn], None)\n         | C: T0                  ([], Some T0)\n         | C: T1 * ... * Tn -> T0 ([T1; ...; Tn], Some T0)\n       *)\n  | Pext_rebind of Longident.t loc\n(*\n         | C = D\n       *)\n\n(* Type expressions for the class language *)\n\n(** {1 Class language} *)\n\nand class_type = {\n  pcty_desc: class_type_desc;\n  pcty_loc: Location.t;\n  pcty_attributes: attributes; (* ... [@id1] [@id2] *)\n}\n\nand class_type_desc =\n  | Pcty_constr of Longident.t loc * core_type list\n    (* c\n       ['a1, ..., 'an] c *)\n  | Pcty_signature of class_signature (* object ... end *)\n  | Pcty_arrow of arg_label * core_type * class_type\n    (* T -> CT       Simple\n       ~l:T -> CT    Labelled l\n       ?l:T -> CT    Optional l\n    *)\n  | Pcty_extension of extension (* [%id] *)\n  | Pcty_open of override_flag * Longident.t loc * class_type\n(* let open M in CT *)\n\nand class_signature = {\n  pcsig_self: core_type;\n  pcsig_fields: class_type_field list;\n}\n(* object('selfpat) ... end\n   object ... end             (self = Ptyp_any)\n*)\n\nand class_type_field = {\n  pctf_desc: class_type_field_desc;\n  pctf_loc: Location.t;\n  pctf_attributes: attributes; (* ... [@@id1] [@@id2] *)\n}\n\nand class_type_field_desc =\n  | Pctf_inherit of class_type (* inherit CT *)\n  | Pctf_val of (label loc * mutable_flag * virtual_flag * core_type)\n    (* val x: T *)\n  | Pctf_method of (label loc * private_flag * virtual_flag * core_type)\n    (* method x: T\n\n       Note: T can be a Ptyp_poly.\n    *)\n  | Pctf_constraint of (core_type * core_type) (* constraint T1 = T2 *)\n  | Pctf_attribute of attribute (* [@@@id] *)\n  | Pctf_extension of extension\n(* [%%id] *)\n\nand 'a class_infos = {\n  pci_virt: virtual_flag;\n  pci_params: (core_type * variance) list;\n  pci_name: string loc;\n  pci_expr: 'a;\n  pci_loc: Location.t;\n  pci_attributes: attributes; (* ... [@@id1] [@@id2] *)\n}\n(* class c = ...\n   class ['a1,...,'an] c = ...\n   class virtual c = ...\n\n   Also used for \"class type\" declaration.\n*)\n\nand class_type_declaration = class_type class_infos\n\n(* Value expressions for the class language *)\nand class_expr = {\n  pcl_desc: class_expr_desc;\n  pcl_loc: Location.t;\n  pcl_attributes: attributes; (* ... [@id1] [@id2] *)\n}\n\nand class_expr_desc =\n  | Pcl_constr of Longident.t loc * core_type list\n    (* c\n       ['a1, ..., 'an] c *)\n  | Pcl_structure of class_structure (* object ... end *)\n  | Pcl_fun of arg_label * expression option * pattern * class_expr\n    (* fun P -> CE                          (Simple, None)\n       fun ~l:P -> CE                       (Labelled l, None)\n       fun ?l:P -> CE                       (Optional l, None)\n       fun ?l:(P = E0) -> CE                (Optional l, Some E0)\n    *)\n  | Pcl_apply of class_expr * (arg_label * expression) list\n    (* CE ~l1:E1 ... ~ln:En\n       li can be empty (non labeled argument) or start with '?'\n       (optional argument).\n\n       Invariant: n > 0\n    *)\n  | Pcl_let of rec_flag * value_binding list * class_expr\n    (* let P1 = E1 and ... and Pn = EN in CE      (flag = Nonrecursive)\n       let rec P1 = E1 and ... and Pn = EN in CE  (flag = Recursive)\n    *)\n  | Pcl_constraint of class_expr * class_type (* (CE : CT) *)\n  | Pcl_extension of extension\n  (* [%id] *)\n  | Pcl_open of override_flag * Longident.t loc * class_expr\n(* let open M in CE *)\n\nand class_structure = {pcstr_self: pattern; pcstr_fields: class_field list}\n(* object(selfpat) ... end\n   object ... end           (self = Ppat_any)\n*)\n\nand class_field = {\n  pcf_desc: class_field_desc;\n  pcf_loc: Location.t;\n  pcf_attributes: attributes; (* ... [@@id1] [@@id2] *)\n}\n\nand class_field_desc =\n  | Pcf_inherit of unit\n    (* inherit CE\n       inherit CE as x\n       inherit! CE\n       inherit! CE as x\n    *)\n  | Pcf_val of (label loc * mutable_flag * class_field_kind)\n    (* val x = E\n       val virtual x: T\n    *)\n  | Pcf_method of (label loc * private_flag * class_field_kind)\n    (* method x = E            (E can be a Pexp_poly)\n       method virtual x: T     (T can be a Ptyp_poly)\n    *)\n  | Pcf_constraint of (core_type * core_type) (* constraint T1 = T2 *)\n  | Pcf_initializer of expression (* initializer E *)\n  | Pcf_attribute of attribute (* [@@@id] *)\n  | Pcf_extension of extension\n(* [%%id] *)\n\nand class_field_kind =\n  | Cfk_virtual of core_type\n  | Cfk_concrete of override_flag * expression\n(* Type expressions for the module language *)\n\n(** {1 Module language} *)\n\nand module_type = {\n  pmty_desc: module_type_desc;\n  pmty_loc: Location.t;\n  pmty_attributes: attributes; (* ... [@id1] [@id2] *)\n}\n\nand module_type_desc =\n  | Pmty_ident of Longident.t loc (* S *)\n  | Pmty_signature of signature (* sig ... end *)\n  | Pmty_functor of string loc * module_type option * module_type\n    (* functor(X : MT1) -> MT2 *)\n  | Pmty_with of module_type * with_constraint list (* MT with ... *)\n  | Pmty_typeof of module_expr (* module type of ME *)\n  | Pmty_extension of extension (* [%id] *)\n  | Pmty_alias of Longident.t loc\n(* (module M) *)\n\nand signature = signature_item list\n\nand signature_item = {psig_desc: signature_item_desc; psig_loc: Location.t}\n\nand signature_item_desc =\n  | Psig_value of value_description\n    (*\n          val x: T\n          external x: T = \"s1\" ... \"sn\"\n         *)\n  | Psig_type of rec_flag * type_declaration list\n    (* type t1 = ... and ... and tn = ... *)\n  | Psig_typext of type_extension (* type t1 += ... *)\n  | Psig_exception of extension_constructor (* exception C of T *)\n  | Psig_module of module_declaration (* module X : MT *)\n  | Psig_recmodule of module_declaration list\n    (* module rec X1 : MT1 and ... and Xn : MTn *)\n  | Psig_modtype of module_type_declaration\n    (* module type S = MT\n       module type S *)\n  | Psig_open of open_description (* open X *)\n  | Psig_include of include_description (* include MT *)\n  | Psig_class of unit (* class c1 : ... and ... and cn : ... *)\n  | Psig_class_type of class_type_declaration list\n    (* class type ct1 = ... and ... and ctn = ... *)\n  | Psig_attribute of attribute (* [@@@id] *)\n  | Psig_extension of extension * attributes\n(* [%%id] *)\n\nand module_declaration = {\n  pmd_name: string loc;\n  pmd_type: module_type;\n  pmd_attributes: attributes; (* ... [@@id1] [@@id2] *)\n  pmd_loc: Location.t;\n}\n(* S : MT *)\n\nand module_type_declaration = {\n  pmtd_name: string loc;\n  pmtd_type: module_type option;\n  pmtd_attributes: attributes; (* ... [@@id1] [@@id2] *)\n  pmtd_loc: Location.t;\n}\n(* S = MT\n   S       (abstract module type declaration, pmtd_type = None)\n*)\n\nand open_description = {\n  popen_lid: Longident.t loc;\n  popen_override: override_flag;\n  popen_loc: Location.t;\n  popen_attributes: attributes;\n}\n(* open! X - popen_override = Override (silences the 'used identifier\n                              shadowing' warning)\n   open  X - popen_override = Fresh\n*)\n\nand 'a include_infos = {\n  pincl_mod: 'a;\n  pincl_loc: Location.t;\n  pincl_attributes: attributes;\n}\n\nand include_description = module_type include_infos\n(* include MT *)\n\nand include_declaration = module_expr include_infos\n(* include ME *)\n\nand with_constraint =\n  | Pwith_type of Longident.t loc * type_declaration\n    (* with type X.t = ...\n\n       Note: the last component of the longident must match\n       the name of the type_declaration. *)\n  | Pwith_module of Longident.t loc * Longident.t loc (* with module X.Y = Z *)\n  | Pwith_typesubst of Longident.t loc * type_declaration\n    (* with type X.t := ..., same format as [Pwith_type] *)\n  | Pwith_modsubst of Longident.t loc * Longident.t loc\n(* with module X.Y := Z *)\n\n(* Value expressions for the module language *)\n\nand module_expr = {\n  pmod_desc: module_expr_desc;\n  pmod_loc: Location.t;\n  pmod_attributes: attributes; (* ... [@id1] [@id2] *)\n}\n\nand module_expr_desc =\n  | Pmod_ident of Longident.t loc (* X *)\n  | Pmod_structure of structure (* struct ... end *)\n  | Pmod_functor of string loc * module_type option * module_expr\n    (* functor(X : MT1) -> ME *)\n  | Pmod_apply of module_expr * module_expr (* ME1(ME2) *)\n  | Pmod_constraint of module_expr * module_type (* (ME : MT) *)\n  | Pmod_unpack of expression (* (val E) *)\n  | Pmod_extension of extension\n(* [%id] *)\n\nand structure = structure_item list\n\nand structure_item = {pstr_desc: structure_item_desc; pstr_loc: Location.t}\n\nand structure_item_desc =\n  | Pstr_eval of expression * attributes (* E *)\n  | Pstr_value of rec_flag * value_binding list\n    (* let P1 = E1 and ... and Pn = EN       (flag = Nonrecursive)\n       let rec P1 = E1 and ... and Pn = EN   (flag = Recursive)\n    *)\n  | Pstr_primitive of value_description\n    (* val x: T\n       external x: T = \"s1\" ... \"sn\" *)\n  | Pstr_type of rec_flag * type_declaration list\n    (* type t1 = ... and ... and tn = ... *)\n  | Pstr_typext of type_extension (* type t1 += ... *)\n  | Pstr_exception of extension_constructor\n    (* exception C of T\n       exception C = M.X *)\n  | Pstr_module of module_binding (* module X = ME *)\n  | Pstr_recmodule of module_binding list\n    (* module rec X1 = ME1 and ... and Xn = MEn *)\n  | Pstr_modtype of module_type_declaration (* module type S = MT *)\n  | Pstr_open of open_description (* open X *)\n  | Pstr_class of unit (* Dummy AST node *)\n  | Pstr_class_type of class_type_declaration list\n    (* class type ct1 = ... and ... and ctn = ... *)\n  | Pstr_include of include_declaration (* include ME *)\n  | Pstr_attribute of attribute (* [@@@id] *)\n  | Pstr_extension of extension * attributes\n(* [%%id] *)\n\nand value_binding = {\n  pvb_pat: pattern;\n  pvb_expr: expression;\n  pvb_attributes: attributes;\n  pvb_loc: Location.t;\n}\n\nand module_binding = {\n  pmb_name: string loc;\n  pmb_expr: module_expr;\n  pmb_attributes: attributes;\n  pmb_loc: Location.t;\n}\n(* X = ME *)\n"
  },
  {
    "path": "analysis/vendor/ml/path.ml",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\ntype t = Pident of Ident.t | Pdot of t * string * int | Papply of t * t\n\nlet nopos = -1\n\nlet rec same p1 p2 =\n  match (p1, p2) with\n  | Pident id1, Pident id2 -> Ident.same id1 id2\n  | Pdot (p1, s1, _pos1), Pdot (p2, s2, _pos2) -> s1 = s2 && same p1 p2\n  | Papply (fun1, arg1), Papply (fun2, arg2) -> same fun1 fun2 && same arg1 arg2\n  | _, _ -> false\n\nlet rec compare p1 p2 =\n  match (p1, p2) with\n  | Pident id1, Pident id2 -> Ident.compare id1 id2\n  | Pdot (p1, s1, _pos1), Pdot (p2, s2, _pos2) ->\n    let h = compare p1 p2 in\n    if h <> 0 then h else String.compare s1 s2\n  | Papply (fun1, arg1), Papply (fun2, arg2) ->\n    let h = compare fun1 fun2 in\n    if h <> 0 then h else compare arg1 arg2\n  | (Pident _ | Pdot _), (Pdot _ | Papply _) -> -1\n  | (Pdot _ | Papply _), (Pident _ | Pdot _) -> 1\n\nlet rec isfree id = function\n  | Pident id' -> Ident.same id id'\n  | Pdot (p, _s, _pos) -> isfree id p\n  | Papply (p1, p2) -> isfree id p1 || isfree id p2\n\nlet rec binding_time = function\n  | Pident id -> Ident.binding_time id\n  | Pdot (p, _s, _pos) -> binding_time p\n  | Papply (p1, p2) ->\n    Ext_pervasives.max_int (binding_time p1) (binding_time p2)\n\nlet kfalse _ = false\n\nlet rec name ?(paren = kfalse) = function\n  | Pident id -> Ident.name id\n  | Pdot (p, s, _pos) ->\n    name ~paren p ^ if paren s then \".( \" ^ s ^ \" )\" else \".\" ^ s\n  | Papply (p1, p2) -> name ~paren p1 ^ \"(\" ^ name ~paren p2 ^ \")\"\n\nlet rec head = function\n  | Pident id -> id\n  | Pdot (p, _s, _pos) -> head p\n  | Papply _ -> assert false\n\nlet flatten =\n  let rec flatten acc = function\n    | Pident id -> `Ok (id, acc)\n    | Pdot (p, s, _) -> flatten (s :: acc) p\n    | Papply _ -> `Contains_apply\n  in\n  fun t -> flatten [] t\n\nlet heads p =\n  let rec heads p acc =\n    match p with\n    | Pident id -> id :: acc\n    | Pdot (p, _s, _pos) -> heads p acc\n    | Papply (p1, p2) -> heads p1 (heads p2 acc)\n  in\n  heads p []\n\nlet rec last = function\n  | Pident id -> Ident.name id\n  | Pdot (_, s, _) -> s\n  | Papply (_, p) -> last p\n\nlet is_uident s =\n  assert (s <> \"\");\n  match s.[0] with\n  | 'A' .. 'Z' -> true\n  | _ -> false\n\ntype typath =\n  | Regular of t\n  | Ext of t * string\n  | LocalExt of Ident.t\n  | Cstr of t * string\n\nlet constructor_typath = function\n  | Pident id when is_uident (Ident.name id) -> LocalExt id\n  | Pdot (ty_path, s, _) when is_uident s ->\n    if is_uident (last ty_path) then Ext (ty_path, s) else Cstr (ty_path, s)\n  | p -> Regular p\n\nlet is_constructor_typath p =\n  match constructor_typath p with\n  | Regular _ -> false\n  | _ -> true\n"
  },
  {
    "path": "analysis/vendor/ml/path.mli",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(* Access paths *)\n\ntype t = Pident of Ident.t | Pdot of t * string * int | Papply of t * t\n\nval same : t -> t -> bool\nval compare : t -> t -> int\nval isfree : Ident.t -> t -> bool\nval binding_time : t -> int\nval flatten : t -> [`Contains_apply | `Ok of Ident.t * string list]\n\nval nopos : int\n\nval name : ?paren:(string -> bool) -> t -> string\n(* [paren] tells whether a path suffix needs parentheses *)\n\nval head : t -> Ident.t\n\nval heads : t -> Ident.t list\n\nval last : t -> string\n\ntype typath =\n  | Regular of t\n  | Ext of t * string\n  | LocalExt of Ident.t\n  | Cstr of t * string\n\nval constructor_typath : t -> typath\nval is_constructor_typath : t -> bool\n"
  },
  {
    "path": "analysis/vendor/ml/pprintast.ml",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*                      Thomas Gazagnaire, OCamlPro                       *)\n(*                   Fabrice Le Fessant, INRIA Saclay                     *)\n(*               Hongbo Zhang, University of Pennsylvania                 *)\n(*                                                                        *)\n(*   Copyright 2007 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(* Original Code from Ber-metaocaml, modified for 3.12.0 and fixed *)\n(* Printing code expressions *)\n(* Authors:  Ed Pizzi, Fabrice Le Fessant *)\n(* Extensive Rewrite: Hongbo Zhang: University of Pennsylvania *)\n(* TODO more fine-grained precedence pretty-printing *)\n\nopen Asttypes\nopen Format\nopen Location\nopen Longident\nopen Parsetree\nopen Ast_helper\n\nlet prefix_symbols  = [ '!'; '?'; '~' ] ;;\nlet infix_symbols = [ '='; '<'; '>'; '@'; '^'; '|'; '&'; '+'; '-'; '*'; '/';\n                      '$'; '%'; '#' ]\n\n(* type fixity = Infix| Prefix  *)\nlet special_infix_strings =\n  [\"asr\"; \"land\"; \"lor\"; \"lsl\"; \"lsr\"; \"lxor\"; \"mod\"; \"or\"; \":=\"; \"!=\"; \"::\" ]\n\n(* determines if the string is an infix string.\n   checks backwards, first allowing a renaming postfix (\"_102\") which\n   may have resulted from Pexp -> Texp -> Pexp translation, then checking\n   if all the characters in the beginning of the string are valid infix\n   characters. *)\nlet fixity_of_string  = function\n  | s when List.mem s special_infix_strings -> `Infix s\n  | s when List.mem s.[0] infix_symbols -> `Infix s\n  | s when List.mem s.[0] prefix_symbols -> `Prefix s\n  | s when s.[0] = '.' -> `Mixfix s\n  | _ -> `Normal\n\nlet view_fixity_of_exp = function\n  | {pexp_desc = Pexp_ident {txt=Lident l;_}; pexp_attributes = []} ->\n      fixity_of_string l\n  | _ -> `Normal\n\nlet is_infix  = function  | `Infix _ -> true | _  -> false\nlet is_mixfix = function `Mixfix _ -> true | _ -> false\n\n(* which identifiers are in fact operators needing parentheses *)\nlet needs_parens txt =\n  let fix = fixity_of_string txt in\n  is_infix fix\n  || is_mixfix fix\n  || List.mem txt.[0] prefix_symbols\n\n(* some infixes need spaces around parens to avoid clashes with comment\n   syntax *)\nlet needs_spaces txt =\n  txt.[0]='*' || txt.[String.length txt - 1] = '*'\n\n(* add parentheses to binders when they are in fact infix or prefix operators *)\nlet protect_ident ppf txt =\n  let format : (_, _, _) format =\n    if not (needs_parens txt) then \"%s\"\n    else if needs_spaces txt then \"(@;%s@;)\"\n    else \"(%s)\"\n  in fprintf ppf format txt\n\nlet protect_longident ppf print_longident longprefix txt =\n  let format : (_, _, _) format =\n    if not (needs_parens txt) then \"%a.%s\"\n    else if needs_spaces txt then  \"%a.(@;%s@;)\"\n    else \"%a.(%s)\" in\n  fprintf ppf format print_longident longprefix txt\n\ntype space_formatter = (unit, Format.formatter, unit) format\n\nlet override = function\n  | Override -> \"!\"\n  | Fresh -> \"\"\n\n(* variance encoding: need to sync up with the [parser.mly] *)\nlet type_variance = function\n  | Invariant -> \"\"\n  | Covariant -> \"+\"\n  | Contravariant -> \"-\"\n\ntype construct =\n  [ `cons of expression list\n  | `list of expression list\n  | `nil\n  | `normal\n  | `simple of Longident.t\n  | `tuple ]\n\nlet view_expr x =\n  match x.pexp_desc with\n  | Pexp_construct ( {txt= Lident \"()\"; _},_) -> `tuple\n  | Pexp_construct ( {txt= Lident \"[]\";_},_) -> `nil\n  | Pexp_construct ( {txt= Lident\"::\";_},Some _) ->\n      let rec loop exp acc = match exp with\n          | {pexp_desc=Pexp_construct ({txt=Lident \"[]\";_},_);\n             pexp_attributes = []} ->\n              (List.rev acc,true)\n          | {pexp_desc=\n             Pexp_construct ({txt=Lident \"::\";_},\n                             Some ({pexp_desc= Pexp_tuple([e1;e2]);\n                                    pexp_attributes = []}));\n             pexp_attributes = []}\n            ->\n              loop e2 (e1::acc)\n          | e -> (List.rev (e::acc),false) in\n      let (ls,b) = loop x []  in\n      if b then\n        `list ls\n      else `cons ls\n  | Pexp_construct (x,None) -> `simple (x.txt)\n  | _ -> `normal\n\nlet is_simple_construct :construct -> bool = function\n  | `nil | `tuple | `list _ | `simple _  -> true\n  | `cons _ | `normal -> false\n\nlet pp = fprintf\n\ntype ctxt = {\n  pipe : bool;\n  semi : bool;\n  ifthenelse : bool;\n}\n\nlet reset_ctxt = { pipe=false; semi=false; ifthenelse=false }\nlet under_pipe ctxt = { ctxt with pipe=true }\nlet under_semi ctxt = { ctxt with semi=true }\nlet under_ifthenelse ctxt = { ctxt with ifthenelse=true }\n(*\nlet reset_semi ctxt = { ctxt with semi=false }\nlet reset_ifthenelse ctxt = { ctxt with ifthenelse=false }\nlet reset_pipe ctxt = { ctxt with pipe=false }\n*)\n\nlet list : 'a . ?sep:space_formatter -> ?first:space_formatter ->\n  ?last:space_formatter -> (Format.formatter -> 'a -> unit) ->\n  Format.formatter -> 'a list -> unit\n  = fun ?sep ?first ?last fu f xs ->\n    let first = match first with Some x -> x |None -> (\"\": _ format6)\n    and last = match last with Some x -> x |None -> (\"\": _ format6)\n    and sep = match sep with Some x -> x |None -> (\"@ \": _ format6) in\n    let aux f = function\n      | [] -> ()\n      | [x] -> fu f x\n      | xs ->\n          let rec loop  f = function\n            | [x] -> fu f x\n            | x::xs ->  fu f x; pp f sep; loop f xs;\n            | _ -> assert false in begin\n            pp f first; loop f xs; pp f last;\n          end in\n    aux f xs\n\nlet option : 'a. ?first:space_formatter -> ?last:space_formatter ->\n  (Format.formatter -> 'a -> unit) -> Format.formatter -> 'a option -> unit\n  = fun  ?first  ?last fu f a ->\n    let first = match first with Some x -> x | None -> (\"\": _ format6)\n    and last = match last with Some x -> x | None -> (\"\": _ format6) in\n    match a with\n    | None -> ()\n    | Some x -> pp f first; fu f x; pp f last\n\nlet paren: 'a . ?first:space_formatter -> ?last:space_formatter ->\n  bool -> (Format.formatter -> 'a -> unit) -> Format.formatter -> 'a -> unit\n  = fun  ?(first=(\"\": _ format6)) ?(last=(\"\": _ format6)) b fu f x ->\n    if b then (pp f \"(\"; pp f first; fu f x; pp f last; pp f \")\")\n    else fu f x\n\nlet rec longident f = function\n  | Lident s -> protect_ident f s\n  | Ldot(y,s) -> protect_longident f longident y s\n  | Lapply (y,s) ->\n      pp f \"%a(%a)\" longident y longident s\n\nlet longident_loc f x = pp f \"%a\" longident x.txt\n\nlet string_of_int_as_char i = Ext_util.string_of_int_as_char i\n\nlet constant f = function\n  | Pconst_char i -> pp f \"%s\"  (string_of_int_as_char i)\n  | Pconst_string (i, None) -> pp f \"%S\" i\n  | Pconst_string (i, Some delim) -> pp f \"{%s|%s|%s}\" delim i delim\n  | Pconst_integer (i, None) -> paren (i.[0]='-') (fun f -> pp f \"%s\") f i\n  | Pconst_integer (i, Some m) ->\n    paren (i.[0]='-') (fun f (i, m) -> pp f \"%s%c\" i m) f (i,m)\n  | Pconst_float (i, None) -> paren (i.[0]='-') (fun f -> pp f \"%s\") f i\n  | Pconst_float (i, Some m) -> paren (i.[0]='-') (fun f (i,m) ->\n      pp f \"%s%c\" i m) f (i,m)\n\n(* trailing space*)\nlet mutable_flag f = function\n  | Immutable -> ()\n  | Mutable -> pp f \"mutable@;\"\nlet virtual_flag f  = function\n  | Concrete -> ()\n  | Virtual -> pp f \"virtual@;\"\n\n(* trailing space added *)\nlet rec_flag f rf =\n  match rf with\n  | Nonrecursive -> ()\n  | Recursive -> pp f \"rec \"\nlet nonrec_flag f rf =\n  match rf with\n  | Nonrecursive -> pp f \"nonrec \"\n  | Recursive -> ()\nlet direction_flag f = function\n  | Upto -> pp f \"to@ \"\n  | Downto -> pp f \"downto@ \"\nlet private_flag f = function\n  | Public -> ()\n  | Private -> pp f \"private@ \"\n\nlet constant_string f s = pp f \"%S\" s\nlet tyvar f str = pp f \"'%s\" str\nlet tyvar_loc f str = pp f \"'%s\" str.txt\nlet string_quot f x = pp f \"`%s\" x\n\n(* c ['a,'b] *)\nlet rec class_params_def ctxt f =  function\n  | [] -> ()\n  | l ->\n      pp f \"[%a] \" (* space *)\n        (list (type_param ctxt) ~sep:\",\") l\n\nand type_with_label ctxt f (label, c) =\n  match label with\n  | Nolabel    -> core_type1 ctxt f c (* otherwise parenthesize *)\n  | Labelled s -> pp f \"%s:%a\" s (core_type1 ctxt) c\n  | Optional s -> pp f \"?%s:%a\" s (core_type1 ctxt) c\n\nand core_type ctxt f x =\n  if x.ptyp_attributes <> [] then begin\n    pp f \"((%a)%a)\" (core_type ctxt) {x with ptyp_attributes=[]}\n      (attributes ctxt) x.ptyp_attributes\n  end\n  else match x.ptyp_desc with\n    | Ptyp_arrow (l, ct1, ct2) ->\n        pp f \"@[<2>%a@;->@;%a@]\" (* FIXME remove parens later *)\n          (type_with_label ctxt) (l,ct1) (core_type ctxt) ct2\n    | Ptyp_alias (ct, s) ->\n        pp f \"@[<2>%a@;as@;'%s@]\" (core_type1 ctxt) ct s\n    | Ptyp_poly ([], ct) ->\n        core_type ctxt f ct\n    | Ptyp_poly (sl, ct) ->\n        pp f \"@[<2>%a%a@]\"\n          (fun f l ->\n             pp f \"%a\"\n               (fun f l -> match l with\n                  | [] -> ()\n                  | _ ->\n                      pp f \"%a@;.@;\"\n                        (list tyvar_loc ~sep:\"@;\")  l)\n               l)\n          sl (core_type ctxt) ct\n    | _ -> pp f \"@[<2>%a@]\" (core_type1 ctxt) x\n\nand core_type1 ctxt f x =\n  if x.ptyp_attributes <> [] then core_type ctxt f x\n  else match x.ptyp_desc with\n    | Ptyp_any -> pp f \"_\";\n    | Ptyp_var s -> tyvar f  s;\n    | Ptyp_tuple l ->  pp f \"(%a)\" (list (core_type1 ctxt) ~sep:\"@;*@;\") l\n    | Ptyp_constr (li, l) ->\n        pp f (* \"%a%a@;\" *) \"%a%a\"\n          (fun f l -> match l with\n             |[] -> ()\n             |[x]-> pp f \"%a@;\" (core_type1 ctxt)  x\n             | _ -> list ~first:\"(\" ~last:\")@;\" (core_type ctxt) ~sep:\",@;\" f l)\n          l longident_loc li\n    | Ptyp_variant (l, closed, low) ->\n        let type_variant_helper f x =\n          match x with\n          | Rtag (l, attrs, _, ctl) ->\n              pp f \"@[<2>%a%a@;%a@]\" string_quot l.txt\n                (fun f l -> match l with\n                   |[] -> ()\n                   | _ -> pp f \"@;of@;%a\"\n                            (list (core_type ctxt) ~sep:\"&\")  ctl) ctl\n                (attributes ctxt) attrs\n          | Rinherit ct -> core_type ctxt f ct in\n        pp f \"@[<2>[%a%a]@]\"\n          (fun f l ->\n             match l, closed with\n             | [], Closed -> ()\n             | [], Open -> pp f \">\" (* Cf #7200: print [>] correctly *)\n             | _ ->\n                 pp f \"%s@;%a\"\n                   (match (closed,low) with\n                    | (Closed,None) -> \"\"\n                    | (Closed,Some _) -> \"<\" (* FIXME desugar the syntax sugar*)\n                    | (Open,_) -> \">\")\n                   (list type_variant_helper ~sep:\"@;<1 -2>| \") l) l\n          (fun f low -> match low with\n             |Some [] |None -> ()\n             |Some xs ->\n                 pp f \">@ %a\"\n                   (list string_quot) xs) low\n    | Ptyp_object (l, o) ->\n        let core_field_type f = function\n          | Otag (l, attrs, ct) ->\n            pp f \"@[<hov2>%s: %a@ %a@ @]\" l.txt\n              (core_type ctxt) ct (attributes ctxt) attrs (* Cf #7200 *)\n          | Oinherit ct ->\n            pp f \"@[<hov2>%a@ @]\" (core_type ctxt) ct\n        in\n        let field_var f = function\n          | Asttypes.Closed -> ()\n          | Asttypes.Open ->\n              match l with\n              | [] -> pp f \"..\"\n              | _ -> pp f \" ;..\"\n        in\n        pp f \"@[<hov2><@ %a%a@ > @]\" (list core_field_type ~sep:\";\") l\n          field_var o (* Cf #7200 *)\n    | Ptyp_class (li, l) ->   (*FIXME*)\n        pp f \"@[<hov2>%a#%a@]\"\n          (list (core_type ctxt) ~sep:\",\" ~first:\"(\" ~last:\")\") l\n          longident_loc li\n    | Ptyp_package (lid, cstrs) ->\n        let aux f (s, ct) =\n          pp f \"type %a@ =@ %a\" longident_loc s (core_type ctxt) ct  in\n        (match cstrs with\n         |[] -> pp f \"@[<hov2>(module@ %a)@]\" longident_loc lid\n         |_ ->\n             pp f \"@[<hov2>(module@ %a@ with@ %a)@]\" longident_loc lid\n               (list aux  ~sep:\"@ and@ \")  cstrs)\n    | Ptyp_extension e -> extension ctxt f e\n    | _ -> paren true (core_type ctxt) f x\n\n(********************pattern********************)\n(* be cautious when use [pattern], [pattern1] is preferred *)\nand pattern ctxt f x =\n  let rec list_of_pattern acc = function (* only consider ((A|B)|C)*)\n    | {ppat_desc= Ppat_or (p1,p2); ppat_attributes = []} ->\n        list_of_pattern  (p2::acc) p1\n    | x -> x::acc\n  in\n  if x.ppat_attributes <> [] then begin\n    pp f \"((%a)%a)\" (pattern ctxt) {x with ppat_attributes=[]}\n      (attributes ctxt) x.ppat_attributes\n  end\n  else match x.ppat_desc with\n    | Ppat_alias (p, s) ->\n        pp f \"@[<2>%a@;as@;%a@]\" (pattern ctxt) p protect_ident s.txt (* RA*)\n    | Ppat_or _ -> (* *)\n        pp f \"@[<hov0>%a@]\" (list ~sep:\"@,|\" (pattern ctxt))\n          (list_of_pattern [] x)\n    | _ -> pattern1 ctxt f x\n\nand pattern1 ctxt (f:Format.formatter) (x:pattern) : unit =\n  let rec pattern_list_helper f = function\n    | {ppat_desc =\n         Ppat_construct\n           ({ txt = Lident(\"::\") ;_},\n            Some ({ppat_desc = Ppat_tuple([pat1; pat2]);_}));\n       ppat_attributes = []}\n\n      ->\n        pp f \"%a::%a\" (simple_pattern ctxt) pat1 pattern_list_helper pat2 (*RA*)\n    | p -> pattern1 ctxt f p\n  in\n  if x.ppat_attributes <> [] then pattern ctxt f x\n  else match x.ppat_desc with\n    | Ppat_variant (l, Some p) ->\n        pp f \"@[<2>`%s@;%a@]\" l (simple_pattern ctxt) p\n    | Ppat_construct (({txt=Lident(\"()\"|\"[]\");_}), _) -> simple_pattern ctxt f x\n    | Ppat_construct (({txt;_} as li), po) ->\n        (* FIXME The third field always false *)\n        if txt = Lident \"::\" then\n          pp f \"%a\" pattern_list_helper x\n        else\n          (match po with\n           | Some x -> pp f \"%a@;%a\"  longident_loc li (simple_pattern ctxt) x\n           | None -> pp f \"%a\" longident_loc li)\n    | _ -> simple_pattern ctxt f x\n\nand simple_pattern ctxt (f:Format.formatter) (x:pattern) : unit =\n  if x.ppat_attributes <> [] then pattern ctxt f x\n  else match x.ppat_desc with\n    | Ppat_construct (({txt=Lident (\"()\"|\"[]\" as x);_}), _) -> pp f  \"%s\" x\n    | Ppat_any -> pp f \"_\";\n    | Ppat_var ({txt = txt;_}) -> protect_ident f txt\n    | Ppat_array l ->\n        pp f \"@[<2>[|%a|]@]\"  (list (pattern1 ctxt) ~sep:\";\") l\n    | Ppat_unpack (s) ->\n        pp f \"(module@ %s)@ \" s.txt\n    | Ppat_type li ->\n        pp f \"#%a\" longident_loc li\n    | Ppat_record (l, closed) ->\n        let longident_x_pattern f (li, p) =\n          match (li,p) with\n          | ({txt=Lident s;_ },\n             {ppat_desc=Ppat_var {txt;_};\n              ppat_attributes=[]; _})\n            when s = txt ->\n              pp f \"@[<2>%a@]\"  longident_loc li\n          | _ ->\n              pp f \"@[<2>%a@;=@;%a@]\" longident_loc li (pattern1 ctxt) p\n        in\n        begin match closed with\n        | Closed ->\n            pp f \"@[<2>{@;%a@;}@]\" (list longident_x_pattern ~sep:\";@;\") l\n        | _ ->\n            pp f \"@[<2>{@;%a;_}@]\" (list longident_x_pattern ~sep:\";@;\") l\n        end\n    | Ppat_tuple l ->\n        pp f \"@[<1>(%a)@]\" (list  ~sep:\",@;\" (pattern1 ctxt))  l (* level1*)\n    | Ppat_constant (c) -> pp f \"%a\" constant c\n    | Ppat_interval (c1, c2) -> pp f \"%a..%a\" constant c1 constant c2\n    | Ppat_variant (l,None) ->  pp f \"`%s\" l\n    | Ppat_constraint (p, ct) ->\n        pp f \"@[<2>(%a@;:@;%a)@]\" (pattern1 ctxt) p (core_type ctxt) ct\n    | Ppat_lazy p ->\n        pp f \"@[<2>(lazy@;%a)@]\" (pattern1 ctxt) p\n    | Ppat_exception p ->\n        pp f \"@[<2>exception@;%a@]\" (pattern1 ctxt) p\n    | Ppat_extension e -> extension ctxt f e\n    | Ppat_open (lid, p) ->\n        let with_paren =\n        match p.ppat_desc with\n        | Ppat_array _ | Ppat_record _\n        | Ppat_construct (({txt=Lident (\"()\"|\"[]\");_}), _) -> false\n        | _ -> true in\n        pp f \"@[<2>%a.%a @]\" longident_loc lid\n          (paren with_paren @@ pattern1 ctxt) p\n    | _ -> paren true (pattern ctxt) f x\n\nand label_exp ctxt f (l,opt,p) =\n  match l with\n  | Nolabel ->\n      (* single case pattern parens needed here *)\n      pp f \"%a@ \" (simple_pattern ctxt) p\n  | Optional rest ->\n      begin match p with\n      | {ppat_desc = Ppat_var {txt;_}; ppat_attributes = []}\n        when txt = rest ->\n          (match opt with\n           | Some o -> pp f \"?(%s=@;%a)@;\" rest  (expression ctxt) o\n           | None -> pp f \"?%s@ \" rest)\n      | _ ->\n          (match opt with\n           | Some o ->\n               pp f \"?%s:(%a=@;%a)@;\"\n                 rest (pattern1 ctxt) p (expression ctxt) o\n           | None -> pp f \"?%s:%a@;\" rest (simple_pattern ctxt) p)\n      end\n  | Labelled l -> match p with\n    | {ppat_desc  = Ppat_var {txt;_}; ppat_attributes = []}\n      when txt = l ->\n        pp f \"~%s@;\" l\n    | _ ->  pp f \"~%s:%a@;\" l (simple_pattern ctxt) p\n\nand sugar_expr ctxt f e =\n  if e.pexp_attributes <> [] then false\n  else match e.pexp_desc with\n  | Pexp_apply ({ pexp_desc = Pexp_ident {txt = id; _};\n                  pexp_attributes=[]; _}, args)\n    when List.for_all (fun (lab, _) -> lab = Nolabel) args -> begin\n      let print_indexop a path_prefix assign left right print_index indices\n          rem_args =\n        let print_path ppf = function\n          | None -> ()\n          | Some m -> pp ppf \".%a\" longident m in\n        match assign, rem_args with\n            | false, [] ->\n              pp f \"@[%a%a%s%a%s@]\"\n                (simple_expr ctxt) a print_path path_prefix\n                left (list ~sep:\",\" print_index) indices right; true\n            | true, [v] ->\n              pp f \"@[%a%a%s%a%s@ <-@;<1 2>%a@]\"\n                (simple_expr ctxt) a print_path path_prefix\n                left (list ~sep:\",\" print_index) indices right\n                (simple_expr ctxt) v; true\n            | _ -> false in\n      match id, List.map snd args with\n      | Lident \"!\", [e] ->\n        pp f \"@[<hov>!%a@]\" (simple_expr ctxt) e; true\n      | Ldot (path, (\"get\"|\"set\" as func)), a :: other_args -> begin\n          let assign = func = \"set\" in\n          let print = print_indexop a None assign in\n          match path, other_args with\n          | Lident \"Array\", i :: rest ->\n            print \".(\" \")\" (expression ctxt) [i] rest\n          | Lident \"String\", i :: rest ->\n            print \".[\" \"]\" (expression ctxt) [i] rest\n          | Ldot (Lident \"Bigarray\", \"Array1\"), i1 :: rest ->\n            print \".{\" \"}\" (simple_expr ctxt) [i1] rest\n          | Ldot (Lident \"Bigarray\", \"Array2\"), i1 :: i2 :: rest ->\n            print \".{\" \"}\" (simple_expr ctxt) [i1; i2] rest\n          | Ldot (Lident \"Bigarray\", \"Array3\"), i1 :: i2 :: i3 :: rest ->\n            print \".{\" \"}\" (simple_expr ctxt) [i1; i2; i3] rest\n          | Ldot (Lident \"Bigarray\", \"Genarray\"),\n            {pexp_desc = Pexp_array indexes; pexp_attributes = []} :: rest ->\n              print \".{\" \"}\" (simple_expr ctxt) indexes rest\n          | _ -> false\n        end\n      | (Lident s | Ldot(_,s)) , a :: i :: rest\n        when s.[0] = '.' ->\n          let n = String.length s in\n          (* extract operator:\n             assignment operators end with [right_bracket ^ \"<-\"],\n             access operators end with [right_bracket] directly\n          *)\n          let assign = s.[n - 1] = '-'  in\n          let kind =\n            (* extract the right end bracket *)\n            if assign then s.[n - 3] else s.[n - 1] in\n          let left, right = match kind with\n            | ')' -> '(', \")\"\n            | ']' -> '[', \"]\"\n            | '}' -> '{', \"}\"\n            | _ -> assert false in\n          let path_prefix = match id with\n            | Ldot(m,_) -> Some m\n            | _ -> None in\n          let left = String.sub s 0 (1+String.index s left) in\n          print_indexop a path_prefix assign left right\n            (expression ctxt) [i] rest\n      | _ -> false\n    end\n  | _ -> false\n\nand expression ctxt f x =\n  if x.pexp_attributes <> [] then\n    pp f \"((%a)@,%a)\" (expression ctxt) {x with pexp_attributes=[]}\n      (attributes ctxt) x.pexp_attributes\n  else match x.pexp_desc with\n    | Pexp_function _ | Pexp_fun _ | Pexp_match _ | Pexp_try _ | Pexp_sequence _\n      when ctxt.pipe || ctxt.semi ->\n        paren true (expression reset_ctxt) f x\n    | Pexp_ifthenelse _ | Pexp_sequence _ when ctxt.ifthenelse ->\n        paren true (expression reset_ctxt) f x\n    | Pexp_let _ | Pexp_letmodule _ | Pexp_open _ | Pexp_letexception _\n        when ctxt.semi ->\n        paren true (expression reset_ctxt) f x\n    | Pexp_fun (l, e0, p, e) ->\n        pp f \"@[<2>fun@;%a->@;%a@]\"\n          (label_exp ctxt) (l, e0, p)\n          (expression ctxt) e\n    | Pexp_function l ->\n        pp f \"@[<hv>function%a@]\" (case_list ctxt) l\n    | Pexp_match (e, l) ->\n        pp f \"@[<hv0>@[<hv0>@[<2>match %a@]@ with@]%a@]\"\n          (expression reset_ctxt) e (case_list ctxt) l\n\n    | Pexp_try (e, l) ->\n        pp f \"@[<0>@[<hv2>try@ %a@]@ @[<0>with%a@]@]\"\n             (* \"try@;@[<2>%a@]@\\nwith@\\n%a\"*)\n          (expression reset_ctxt) e  (case_list ctxt) l\n    | Pexp_let (rf, l, e) ->\n        (* pp f \"@[<2>let %a%a in@;<1 -2>%a@]\"\n           (*no indentation here, a new line*) *)\n        (*   rec_flag rf *)\n        pp f \"@[<2>%a in@;<1 -2>%a@]\"\n          (bindings reset_ctxt) (rf,l)\n          (expression ctxt) e\n    | Pexp_apply (e, l) ->\n        begin if not (sugar_expr ctxt f x) then\n            match view_fixity_of_exp e with\n            | `Infix s ->\n                begin match l with\n                | [ (Nolabel, _) as arg1; (Nolabel, _) as arg2 ] ->\n                    (* FIXME associativity label_x_expression_param *)\n                    pp f \"@[<2>%a@;%s@;%a@]\"\n                      (label_x_expression_param reset_ctxt) arg1 s\n                      (label_x_expression_param ctxt) arg2\n                | _ ->\n                    pp f \"@[<2>%a %a@]\"\n                      (simple_expr ctxt) e\n                      (list (label_x_expression_param ctxt)) l\n                end\n            | `Prefix s ->\n                let s =\n                  if List.mem s [\"~+\";\"~-\";\"~+.\";\"~-.\"] &&\n                   (match l with\n                    (* See #7200: avoid turning (~- 1) into (- 1) which is\n                       parsed as an int literal *)\n                    |[(_,{pexp_desc=Pexp_constant _})] -> false\n                    | _ -> true)\n                  then String.sub s 1 (String.length s -1)\n                  else s in\n                begin match l with\n                | [(Nolabel, x)] ->\n                  pp f \"@[<2>%s@;%a@]\" s (simple_expr ctxt) x\n                | _   ->\n                  pp f \"@[<2>%a %a@]\" (simple_expr ctxt) e\n                    (list (label_x_expression_param ctxt)) l\n                end\n            | _ ->\n                pp f \"@[<hov2>%a@]\" begin fun f (e,l) ->\n                  pp f \"%a@ %a\" (expression2 ctxt) e\n                    (list (label_x_expression_param reset_ctxt))  l\n                    (* reset here only because [function,match,try,sequence]\n                       are lower priority *)\n                end (e,l)\n        end\n\n    | Pexp_construct (li, Some eo)\n      when not (is_simple_construct (view_expr x))-> (* Not efficient FIXME*)\n        (match view_expr x with\n         | `cons ls -> list (simple_expr ctxt) f ls ~sep:\"@;::@;\"\n         | `normal ->\n             pp f \"@[<2>%a@;%a@]\" longident_loc li\n               (simple_expr ctxt) eo\n         | _ -> assert false)\n    | Pexp_setfield (e1, li, e2) ->\n        pp f \"@[<2>%a.%a@ <-@ %a@]\"\n          (simple_expr ctxt) e1 longident_loc li (simple_expr ctxt) e2\n    | Pexp_ifthenelse (e1, e2, eo) ->\n        (* @;@[<2>else@ %a@]@] *)\n        let fmt:(_,_,_)format =\"@[<hv0>@[<2>if@ %a@]@;@[<2>then@ %a@]%a@]\" in\n        let expression_under_ifthenelse = expression (under_ifthenelse ctxt) in\n        pp f fmt expression_under_ifthenelse e1 expression_under_ifthenelse e2\n          (fun f eo -> match eo with\n             | Some x ->\n                 pp f \"@;@[<2>else@;%a@]\" (expression (under_semi ctxt)) x\n             | None -> () (* pp f \"()\" *)) eo\n    | Pexp_sequence _ ->\n        let rec sequence_helper acc = function\n          | {pexp_desc=Pexp_sequence(e1,e2); pexp_attributes = []} ->\n              sequence_helper (e1::acc) e2\n          | v -> List.rev (v::acc) in\n        let lst = sequence_helper [] x in\n        pp f \"@[<hv>%a@]\"\n          (list (expression (under_semi ctxt)) ~sep:\";@;\") lst\n    | Pexp_new (li) ->\n        pp f \"@[<hov2>new@ %a@]\" longident_loc li;\n    | Pexp_setinstvar (s, e) ->\n        pp f \"@[<hov2>%s@ <-@ %a@]\" s.txt (expression ctxt) e\n    | Pexp_override l -> (* FIXME *)\n        let string_x_expression f (s, e) =\n          pp f \"@[<hov2>%s@ =@ %a@]\" s.txt (expression ctxt) e in\n        pp f \"@[<hov2>{<%a>}@]\"\n          (list string_x_expression  ~sep:\";\"  )  l;\n    | Pexp_letmodule (s, me, e) ->\n        pp f \"@[<hov2>let@ module@ %s@ =@ %a@ in@ %a@]\" s.txt\n          (module_expr reset_ctxt) me (expression ctxt) e\n    | Pexp_letexception (cd, e) ->\n        pp f \"@[<hov2>let@ exception@ %a@ in@ %a@]\"\n          (extension_constructor ctxt) cd\n          (expression ctxt) e\n    | Pexp_assert e ->\n        pp f \"@[<hov2>assert@ %a@]\" (simple_expr ctxt) e\n    | Pexp_lazy (e) ->\n        pp f \"@[<hov2>lazy@ %a@]\" (simple_expr ctxt) e\n    (* Pexp_poly: impossible but we should print it anyway, rather than\n       assert false *)\n    | Pexp_poly (e, None) ->\n        pp f \"@[<hov2>!poly!@ %a@]\" (simple_expr ctxt) e\n    | Pexp_poly (e, Some ct) ->\n        pp f \"@[<hov2>(!poly!@ %a@ : %a)@]\"\n          (simple_expr ctxt) e (core_type ctxt) ct\n    | Pexp_open (ovf, lid, e) ->\n        pp f \"@[<2>let open%s %a in@;%a@]\" (override ovf) longident_loc lid\n          (expression ctxt) e\n    | Pexp_variant (l,Some eo) ->\n        pp f \"@[<2>`%s@;%a@]\" l (simple_expr ctxt) eo\n    | Pexp_extension e -> extension ctxt f e\n    | Pexp_unreachable -> pp f \".\"\n    | _ -> expression1 ctxt f x\n\nand expression1 ctxt f x =\n  if x.pexp_attributes <> [] then expression ctxt f x\n  else match x.pexp_desc with\n    | Pexp_object cs -> pp f \"%a\" (class_structure ctxt) cs\n    | _ -> expression2 ctxt f x\n(* used in [Pexp_apply] *)\n\nand expression2 ctxt f x =\n  if x.pexp_attributes <> [] then expression ctxt f x\n  else match x.pexp_desc with\n    | Pexp_field (e, li) ->\n        pp f \"@[<hov2>%a.%a@]\" (simple_expr ctxt) e longident_loc li\n    | Pexp_send (e, s) -> pp f \"@[<hov2>%a#%s@]\" (simple_expr ctxt) e s.txt\n\n    | _ -> simple_expr ctxt f x\n\nand simple_expr ctxt f x =\n  if x.pexp_attributes <> [] then expression ctxt f x\n  else match x.pexp_desc with\n    | Pexp_construct _  when is_simple_construct (view_expr x) ->\n        (match view_expr x with\n         | `nil -> pp f \"[]\"\n         | `tuple -> pp f \"()\"\n         | `list xs ->\n             pp f \"@[<hv0>[%a]@]\"\n               (list (expression (under_semi ctxt)) ~sep:\";@;\") xs\n         | `simple x -> longident f x\n         | _ -> assert false)\n    | Pexp_ident li ->\n        longident_loc f li\n    (* (match view_fixity_of_exp x with *)\n    (* |`Normal -> longident_loc f li *)\n    (* | `Prefix _ | `Infix _ -> pp f \"( %a )\" longident_loc li) *)\n    | Pexp_constant c -> constant f c;\n    | Pexp_pack me ->\n        pp f \"(module@;%a)\" (module_expr ctxt) me\n    | Pexp_newtype (lid, e) ->\n        pp f \"fun@;(type@;%s)@;->@;%a\" lid.txt (expression ctxt) e\n    | Pexp_tuple l ->\n        pp f \"@[<hov2>(%a)@]\" (list (simple_expr ctxt) ~sep:\",@;\") l\n    | Pexp_constraint (e, ct) ->\n        pp f \"(%a : %a)\" (expression ctxt) e (core_type ctxt) ct\n    | Pexp_coerce (e, cto1, ct) ->\n        pp f \"(%a%a :> %a)\" (expression ctxt) e\n          (option (core_type ctxt) ~first:\" : \" ~last:\" \") cto1 (* no sep hint*)\n          (core_type ctxt) ct\n    | Pexp_variant (l, None) -> pp f \"`%s\" l\n    | Pexp_record (l, eo) ->\n        let longident_x_expression f ( li, e) =\n          match e with\n          |  {pexp_desc=Pexp_ident {txt;_};\n              pexp_attributes=[]; _} when li.txt = txt ->\n              pp f \"@[<hov2>%a@]\" longident_loc li\n          | _ ->\n              pp f \"@[<hov2>%a@;=@;%a@]\" longident_loc li (simple_expr ctxt) e\n        in\n        pp f \"@[<hv0>@[<hv2>{@;%a%a@]@;}@]\"(* \"@[<hov2>{%a%a}@]\" *)\n          (option ~last:\" with@;\" (simple_expr ctxt)) eo\n          (list longident_x_expression ~sep:\";@;\") l\n    | Pexp_array (l) ->\n        pp f \"@[<0>@[<2>[|%a|]@]@]\"\n          (list (simple_expr (under_semi ctxt)) ~sep:\";\") l\n    | Pexp_while (e1, e2) ->\n        let fmt : (_,_,_) format = \"@[<2>while@;%a@;do@;%a@;done@]\" in\n        pp f fmt (expression ctxt) e1 (expression ctxt) e2\n    | Pexp_for (s, e1, e2, df, e3) ->\n        let fmt:(_,_,_)format =\n          \"@[<hv0>@[<hv2>@[<2>for %a =@;%a@;%a%a@;do@]@;%a@]@;done@]\" in\n        let expression = expression ctxt in\n        pp f fmt (pattern ctxt) s expression e1 direction_flag\n          df expression e2 expression e3\n    | _ ->  paren true (expression ctxt) f x\n\nand attributes ctxt f l =\n  List.iter (attribute ctxt f) l\n\nand item_attributes ctxt f l =\n  List.iter (item_attribute ctxt f) l\n\nand attribute ctxt f (s, e) =\n  pp f \"@[<2>[@@%s@ %a]@]\" s.txt (payload ctxt) e\n\nand item_attribute ctxt f (s, e) =\n  pp f \"@[<2>[@@@@%s@ %a]@]\" s.txt (payload ctxt) e\n\nand floating_attribute ctxt f (s, e) =\n  pp f \"@[<2>[@@@@@@%s@ %a]@]\" s.txt (payload ctxt) e\n\nand value_description ctxt f x =\n  (* note: value_description has an attribute field,\n           but they're already printed by the callers this method *)\n  pp f \"@[<hov2>%a%a@]\" (core_type ctxt) x.pval_type\n    (fun f x ->\n#ifndef RELEASE\n      match x.pval_prim with \n      | first :: second :: _ \n        when Ext_string.first_marshal_char second\n        -> \n        pp f \"@ =@ %a -- %a\" \n          constant_string first\n          Ext_obj.pp_any (Marshal.from_string second 0)\n      | [] -> ()\n      | _ -> \n        pp f \"@ =@ %a\" (list constant_string) x.pval_prim\n      \n#else        \n       if x.pval_prim <> []\n       then pp f \"@ =@ %a\" (list constant_string) x.pval_prim\n#endif\n    ) x\n\nand extension ctxt f (s, e) =\n  pp f \"@[<2>[%%%s@ %a]@]\" s.txt (payload ctxt) e\n\nand item_extension ctxt f (s, e) =\n  pp f \"@[<2>[%%%%%s@ %a]@]\" s.txt (payload ctxt) e\n\nand exception_declaration ctxt f ext =\n  pp f \"@[<hov2>exception@ %a@]\" (extension_constructor ctxt) ext\n\nand class_signature ctxt f { pcsig_self = ct; pcsig_fields = l ;_} =\n  let class_type_field f x =\n    match x.pctf_desc with\n    | Pctf_inherit (ct) ->\n        pp f \"@[<2>inherit@ %a@]%a\" (class_type ctxt) ct\n          (item_attributes ctxt) x.pctf_attributes\n    | Pctf_val (s, mf, vf, ct) ->\n        pp f \"@[<2>val @ %a%a%s@ :@ %a@]%a\"\n          mutable_flag mf virtual_flag vf s.txt (core_type ctxt) ct\n          (item_attributes ctxt) x.pctf_attributes\n    | Pctf_method (s, pf, vf, ct) ->\n        pp f \"@[<2>method %a %a%s :@;%a@]%a\"\n          private_flag pf virtual_flag vf s.txt (core_type ctxt) ct\n          (item_attributes ctxt) x.pctf_attributes\n    | Pctf_constraint (ct1, ct2) ->\n        pp f \"@[<2>constraint@ %a@ =@ %a@]%a\"\n          (core_type ctxt) ct1 (core_type ctxt) ct2\n          (item_attributes ctxt) x.pctf_attributes\n    | Pctf_attribute a -> floating_attribute ctxt f a\n    | Pctf_extension e ->\n        item_extension ctxt f e;\n        item_attributes ctxt f x.pctf_attributes\n  in\n  pp f \"@[<hv0>@[<hv2>object@[<1>%a@]@ %a@]@ end@]\"\n    (fun f -> function\n         {ptyp_desc=Ptyp_any; ptyp_attributes=[]; _} -> ()\n       | ct -> pp f \" (%a)\" (core_type ctxt) ct) ct\n    (list class_type_field ~sep:\"@;\") l\n\n(* call [class_signature] called by [class_signature] *)\nand class_type ctxt f x =\n  match x.pcty_desc with\n  | Pcty_signature cs ->\n      class_signature ctxt f cs;\n      attributes ctxt f x.pcty_attributes\n  | Pcty_constr (li, l) ->\n      pp f \"%a%a%a\"\n        (fun f l -> match l with\n           | [] -> ()\n           | _  -> pp f \"[%a]@ \" (list (core_type ctxt) ~sep:\",\" ) l) l\n        longident_loc li\n        (attributes ctxt) x.pcty_attributes\n  | Pcty_arrow (l, co, cl) ->\n      pp f \"@[<2>%a@;->@;%a@]\" (* FIXME remove parens later *)\n        (type_with_label ctxt) (l,co)\n        (class_type ctxt) cl\n  | Pcty_extension e ->\n      extension ctxt f e;\n      attributes ctxt f x.pcty_attributes\n  | Pcty_open (ovf, lid, e) ->\n      pp f \"@[<2>let open%s %a in@;%a@]\" (override ovf) longident_loc lid\n        (class_type ctxt) e\n\n(* [class type a = object end] *)\nand class_type_declaration_list ctxt f l =\n  let class_type_declaration kwd f x =\n    let { pci_params=ls; pci_name={ txt; _ }; _ } = x in\n    pp f \"@[<2>%s %a%a%s@ =@ %a@]%a\" kwd\n      virtual_flag x.pci_virt\n      (class_params_def ctxt) ls txt\n      (class_type ctxt) x.pci_expr\n      (item_attributes ctxt) x.pci_attributes\n  in\n  match l with\n  | [] -> ()\n  | [x] -> class_type_declaration \"class type\" f x\n  | x :: xs ->\n      pp f \"@[<v>%a@,%a@]\"\n        (class_type_declaration \"class type\") x\n        (list ~sep:\"@,\" (class_type_declaration \"and\")) xs\n\nand class_field ctxt f x =\n  match x.pcf_desc with\n  | Pcf_inherit () -> ()  \n  | Pcf_val (s, mf, Cfk_concrete (ovf, e)) ->\n      pp f \"@[<2>val%s %a%s =@;%a@]%a\" (override ovf)\n        mutable_flag mf s.txt\n        (expression ctxt) e\n        (item_attributes ctxt) x.pcf_attributes\n  | Pcf_method (s, pf, Cfk_virtual ct) ->\n      pp f \"@[<2>method virtual %a %s :@;%a@]%a\"\n        private_flag pf s.txt\n        (core_type ctxt) ct\n        (item_attributes ctxt) x.pcf_attributes\n  | Pcf_val (s, mf, Cfk_virtual ct) ->\n      pp f \"@[<2>val virtual %a%s :@ %a@]%a\"\n        mutable_flag mf s.txt\n        (core_type ctxt) ct\n        (item_attributes ctxt) x.pcf_attributes\n  | Pcf_method (s, pf, Cfk_concrete (ovf, e)) ->\n      let bind e =\n        binding ctxt f\n          {pvb_pat=\n             {ppat_desc=Ppat_var s;ppat_loc=Location.none;ppat_attributes=[]};\n           pvb_expr=e;\n           pvb_attributes=[];\n           pvb_loc=Location.none;\n          }\n      in\n      pp f \"@[<2>method%s %a%a@]%a\"\n        (override ovf)\n        private_flag pf\n        (fun f -> function\n           | {pexp_desc=Pexp_poly (e, Some ct); pexp_attributes=[]; _} ->\n               pp f \"%s :@;%a=@;%a\"\n                 s.txt (core_type ctxt) ct (expression ctxt) e\n           | {pexp_desc=Pexp_poly (e, None); pexp_attributes=[]; _} ->\n               bind e\n           | _ -> bind e) e\n        (item_attributes ctxt) x.pcf_attributes\n  | Pcf_constraint (ct1, ct2) ->\n      pp f \"@[<2>constraint %a =@;%a@]%a\"\n        (core_type ctxt) ct1\n        (core_type ctxt) ct2\n        (item_attributes ctxt) x.pcf_attributes\n  | Pcf_initializer (e) ->\n      pp f \"@[<2>initializer@ %a@]%a\"\n        (expression ctxt) e\n        (item_attributes ctxt) x.pcf_attributes\n  | Pcf_attribute a -> floating_attribute ctxt f a\n  | Pcf_extension e ->\n      item_extension ctxt f e;\n      item_attributes ctxt f x.pcf_attributes\n\nand class_structure ctxt f { pcstr_self = p; pcstr_fields =  l } =\n  pp f \"@[<hv0>@[<hv2>object%a@;%a@]@;end@]\"\n    (fun f p -> match p.ppat_desc with\n       | Ppat_any -> ()\n       | Ppat_constraint _ -> pp f \" %a\" (pattern ctxt) p\n       | _ -> pp f \" (%a)\" (pattern ctxt) p) p\n    (list (class_field ctxt)) l\n\nand module_type ctxt f x =\n  if x.pmty_attributes <> [] then begin\n    pp f \"((%a)%a)\" (module_type ctxt) {x with pmty_attributes=[]}\n      (attributes ctxt) x.pmty_attributes\n  end else\n    match x.pmty_desc with\n    | Pmty_ident li ->\n        pp f \"%a\" longident_loc li;\n    | Pmty_alias li ->\n        pp f \"(module %a)\" longident_loc li;\n    | Pmty_signature (s) ->\n        pp f \"@[<hv0>@[<hv2>sig@ %a@]@ end@]\" (* \"@[<hov>sig@ %a@ end@]\" *)\n          (list (signature_item ctxt)) s (* FIXME wrong indentation*)\n    | Pmty_functor (_, None, mt2) ->\n        pp f \"@[<hov2>functor () ->@ %a@]\" (module_type ctxt) mt2\n    | Pmty_functor (s, Some mt1, mt2) ->\n        if s.txt = \"_\" then\n          pp f \"@[<hov2>%a@ ->@ %a@]\"\n            (module_type ctxt) mt1 (module_type ctxt) mt2\n        else\n          pp f \"@[<hov2>functor@ (%s@ :@ %a)@ ->@ %a@]\" s.txt\n            (module_type ctxt) mt1 (module_type ctxt) mt2\n    | Pmty_with (mt, l) ->\n        let with_constraint f = function\n          | Pwith_type (li, ({ptype_params= ls ;_} as td)) ->\n              let ls = List.map fst ls in\n              pp f \"type@ %a %a =@ %a\"\n                (list (core_type ctxt) ~sep:\",\" ~first:\"(\" ~last:\")\")\n                ls longident_loc li (type_declaration ctxt) td\n          | Pwith_module (li, li2) ->\n              pp f \"module %a =@ %a\" longident_loc li longident_loc li2;\n          | Pwith_typesubst (li, ({ptype_params=ls;_} as td)) ->\n              let ls = List.map fst ls in\n              pp f \"type@ %a %a :=@ %a\"\n                (list (core_type ctxt) ~sep:\",\" ~first:\"(\" ~last:\")\")\n                ls longident_loc li\n                (type_declaration ctxt) td\n          | Pwith_modsubst (li, li2) ->\n             pp f \"module %a :=@ %a\" longident_loc li longident_loc li2 in\n        (match l with\n         | [] -> pp f \"@[<hov2>%a@]\" (module_type ctxt) mt\n         | _ -> pp f \"@[<hov2>(%a@ with@ %a)@]\"\n                  (module_type ctxt) mt (list with_constraint ~sep:\"@ and@ \") l)\n    | Pmty_typeof me ->\n        pp f \"@[<hov2>module@ type@ of@ %a@]\" (module_expr ctxt) me\n    | Pmty_extension e -> extension ctxt f e\n\nand signature ctxt f x =  list ~sep:\"@\\n\" (signature_item ctxt) f x\n\nand signature_item ctxt f x : unit =\n  match x.psig_desc with\n  | Psig_type (rf, l) ->\n      type_def_list ctxt f (rf, l)\n  | Psig_value vd ->\n      let intro = if vd.pval_prim = [] then \"val\" else \"external\" in\n      pp f \"@[<2>%s@ %a@ :@ %a@]%a\" intro\n        protect_ident vd.pval_name.txt\n        (value_description ctxt) vd\n        (item_attributes ctxt) vd.pval_attributes\n  | Psig_typext te ->\n      type_extension ctxt f te\n  | Psig_exception ed ->\n      exception_declaration ctxt f ed\n  | Psig_class () ->\n    ()\n  | Psig_module ({pmd_type={pmty_desc=Pmty_alias alias;\n                            pmty_attributes=[]; _};_} as pmd) ->\n      pp f \"@[<hov>module@ %s@ =@ %a@]%a\" pmd.pmd_name.txt\n        longident_loc alias\n        (item_attributes ctxt) pmd.pmd_attributes\n  | Psig_module pmd ->\n      pp f \"@[<hov>module@ %s@ :@ %a@]%a\"\n        pmd.pmd_name.txt\n        (module_type ctxt) pmd.pmd_type\n        (item_attributes ctxt) pmd.pmd_attributes\n  | Psig_open od ->\n      pp f \"@[<hov2>open%s@ %a@]%a\"\n        (override od.popen_override)\n        longident_loc od.popen_lid\n        (item_attributes ctxt) od.popen_attributes\n  | Psig_include incl ->\n      pp f \"@[<hov2>include@ %a@]%a\"\n        (module_type ctxt) incl.pincl_mod\n        (item_attributes ctxt) incl.pincl_attributes\n  | Psig_modtype {pmtd_name=s; pmtd_type=md; pmtd_attributes=attrs} ->\n      pp f \"@[<hov2>module@ type@ %s%a@]%a\"\n        s.txt\n        (fun f md -> match md with\n           | None -> ()\n           | Some mt ->\n               pp_print_space f () ;\n               pp f \"@ =@ %a\" (module_type ctxt) mt\n        ) md\n        (item_attributes ctxt) attrs\n  | Psig_class_type (l) -> class_type_declaration_list ctxt f l\n  | Psig_recmodule decls ->\n      let rec  string_x_module_type_list f ?(first=true) l =\n        match l with\n        | [] -> () ;\n        | pmd :: tl ->\n            if not first then\n              pp f \"@ @[<hov2>and@ %s:@ %a@]%a\" pmd.pmd_name.txt\n                (module_type ctxt) pmd.pmd_type\n                (item_attributes ctxt) pmd.pmd_attributes\n            else\n              pp f \"@[<hov2>module@ rec@ %s:@ %a@]%a\" pmd.pmd_name.txt\n                (module_type ctxt) pmd.pmd_type\n                (item_attributes ctxt) pmd.pmd_attributes;\n            string_x_module_type_list f ~first:false tl\n      in\n      string_x_module_type_list f decls\n  | Psig_attribute a -> floating_attribute ctxt f a\n  | Psig_extension(e, a) ->\n      item_extension ctxt f e;\n      item_attributes ctxt f a\n\nand module_expr ctxt f x =\n  if x.pmod_attributes <> [] then\n    pp f \"((%a)%a)\" (module_expr ctxt) {x with pmod_attributes=[]}\n      (attributes ctxt) x.pmod_attributes\n  else match x.pmod_desc with\n    | Pmod_structure (s) ->\n        pp f \"@[<hv2>struct@;@[<0>%a@]@;<1 -2>end@]\"\n          (list (structure_item ctxt) ~sep:\"@\\n\") s;\n    | Pmod_constraint (me, mt) ->\n        pp f \"@[<hov2>(%a@ :@ %a)@]\"\n          (module_expr ctxt) me\n          (module_type ctxt) mt\n    | Pmod_ident (li) ->\n        pp f \"%a\" longident_loc li;\n    | Pmod_functor (_, None, me) ->\n        pp f \"functor ()@;->@;%a\" (module_expr ctxt) me\n    | Pmod_functor (s, Some mt, me) ->\n        pp f \"functor@ (%s@ :@ %a)@;->@;%a\"\n          s.txt (module_type ctxt) mt (module_expr ctxt) me\n    | Pmod_apply (me1, me2) ->\n        pp f \"(%a)(%a)\" (module_expr ctxt) me1 (module_expr ctxt) me2\n        (* Cf: #7200 *)\n    | Pmod_unpack e ->\n        pp f \"(val@ %a)\" (expression ctxt) e\n    | Pmod_extension e -> extension ctxt f e\n\nand structure ctxt f x = list ~sep:\"@\\n\" (structure_item ctxt) f x\n\nand payload ctxt f = function\n  | PStr [{pstr_desc = Pstr_eval (e, attrs)}] ->\n      pp f \"@[<2>%a@]%a\"\n        (expression ctxt) e\n        (item_attributes ctxt) attrs\n  | PStr x -> structure ctxt f x\n  | PTyp x -> pp f \":\"; core_type ctxt f x\n  | PSig x -> pp f \":\"; signature ctxt f x\n  | PPat (x, None) -> pp f \"?\"; pattern ctxt f x\n  | PPat (x, Some e) ->\n      pp f \"?\"; pattern ctxt f x;\n      pp f \" when \"; expression ctxt f e\n\n(* transform [f = fun g h -> ..] to [f g h = ... ] could be improved *)\nand binding ctxt f {pvb_pat=p; pvb_expr=x; _} =\n  (* .pvb_attributes have already been printed by the caller, #bindings *)\n  let rec pp_print_pexp_function f x =\n    if x.pexp_attributes <> [] then pp f \"=@;%a\" (expression ctxt) x\n    else match x.pexp_desc with\n      | Pexp_fun (label, eo, p, e) ->\n          if label=Nolabel then\n            pp f \"%a@ %a\" (simple_pattern ctxt) p pp_print_pexp_function e\n          else\n            pp f \"%a@ %a\"\n              (label_exp ctxt) (label,eo,p) pp_print_pexp_function e\n      | Pexp_newtype (str,e) ->\n          pp f \"(type@ %s)@ %a\" str.txt pp_print_pexp_function e\n      | _ -> pp f \"=@;%a\" (expression ctxt) x\n  in\n  let tyvars_str tyvars = List.map (fun v -> v.txt) tyvars in\n  let is_desugared_gadt p e =\n    let gadt_pattern =\n      match p with\n      | {ppat_desc=Ppat_constraint({ppat_desc=Ppat_var _} as pat,\n                                   {ptyp_desc=Ptyp_poly (args_tyvars, rt)});\n         ppat_attributes=[]}->\n          Some (pat, args_tyvars, rt)\n      | _ -> None in\n    let rec gadt_exp tyvars e =\n      match e with\n      | {pexp_desc=Pexp_newtype (tyvar, e); pexp_attributes=[]} ->\n          gadt_exp (tyvar :: tyvars) e\n      | {pexp_desc=Pexp_constraint (e, ct); pexp_attributes=[]} ->\n          Some (List.rev tyvars, e, ct)\n      | _ -> None in\n    let gadt_exp = gadt_exp [] e in\n    match gadt_pattern, gadt_exp with\n    | Some (p, pt_tyvars, pt_ct), Some (e_tyvars, e, e_ct)\n      when tyvars_str pt_tyvars = tyvars_str e_tyvars ->\n      let ety = Typ.varify_constructors e_tyvars e_ct in\n      if ety = pt_ct then\n      Some (p, pt_tyvars, e_ct, e) else None\n    | _ -> None in\n  if x.pexp_attributes <> []\n  then pp f \"%a@;=@;%a\" (pattern ctxt) p (expression ctxt) x else\n  match is_desugared_gadt p x with\n  | Some (p, [], ct, e) ->\n      pp f \"%a@;: %a@;=@;%a\"\n        (simple_pattern ctxt) p (core_type ctxt) ct (expression ctxt) e\n  | Some (p, tyvars, ct, e) -> begin\n    pp f \"%a@;: type@;%a.@;%a@;=@;%a\"\n    (simple_pattern ctxt) p (list pp_print_string ~sep:\"@;\")\n    (tyvars_str tyvars) (core_type ctxt) ct (expression ctxt) e\n    end\n  | None -> begin\n      match p with\n      | {ppat_desc=Ppat_constraint(p ,ty);\n         ppat_attributes=[]} -> (* special case for the first*)\n          begin match ty with\n          | {ptyp_desc=Ptyp_poly _; ptyp_attributes=[]} ->\n              pp f \"%a@;:@;%a@;=@;%a\" (simple_pattern ctxt) p\n                (core_type ctxt) ty (expression ctxt) x\n          | _ ->\n              pp f \"(%a@;:@;%a)@;=@;%a\" (simple_pattern ctxt) p\n                (core_type ctxt) ty (expression ctxt) x\n          end\n      | {ppat_desc=Ppat_var _; ppat_attributes=[]} ->\n          pp f \"%a@ %a\" (simple_pattern ctxt) p pp_print_pexp_function x\n      | _ ->\n          pp f \"%a@;=@;%a\" (pattern ctxt) p (expression ctxt) x\n    end\n\n(* [in] is not printed *)\nand bindings ctxt f (rf,l) =\n  let binding kwd rf f x =\n    pp f \"@[<2>%s %a%a@]%a\" kwd rec_flag rf\n      (binding ctxt) x (item_attributes ctxt) x.pvb_attributes\n  in\n  match l with\n  | [] -> ()\n  | [x] -> binding \"let\" rf f x\n  | x::xs ->\n      pp f \"@[<v>%a@,%a@]\"\n        (binding \"let\" rf) x\n        (list ~sep:\"@,\" (binding \"and\" Nonrecursive)) xs\n\nand structure_item ctxt f x =\n  match x.pstr_desc with\n  | Pstr_eval (e, attrs) ->\n      pp f \"@[<hov2>;;%a@]%a\"\n        (expression ctxt) e\n        (item_attributes ctxt) attrs\n  | Pstr_type (_, []) -> assert false\n  | Pstr_type (rf, l)  -> type_def_list ctxt f (rf, l)\n  | Pstr_value (rf, l) ->\n      (* pp f \"@[<hov2>let %a%a@]\"  rec_flag rf bindings l *)\n      pp f \"@[<2>%a@]\" (bindings ctxt) (rf,l)\n  | Pstr_typext te -> type_extension ctxt f te\n  | Pstr_exception ed -> exception_declaration ctxt f ed\n  | Pstr_module x ->\n      let rec module_helper = function\n        | {pmod_desc=Pmod_functor(s,mt,me'); pmod_attributes = []} ->\n            if mt = None then pp f \"()\"\n            else Misc.may (pp f \"(%s:%a)\" s.txt (module_type ctxt)) mt;\n            module_helper me'\n        | me -> me\n      in\n      pp f \"@[<hov2>module %s%a@]%a\"\n        x.pmb_name.txt\n        (fun f me ->\n           let me = module_helper me in\n           match me with\n           | {pmod_desc=\n                Pmod_constraint\n                  (me',\n                   ({pmty_desc=(Pmty_ident (_)\n                               | Pmty_signature (_));_} as mt));\n              pmod_attributes = []} ->\n               pp f \" :@;%a@;=@;%a@;\"\n                 (module_type ctxt) mt (module_expr ctxt) me'\n           | _ -> pp f \" =@ %a\" (module_expr ctxt) me\n        ) x.pmb_expr\n        (item_attributes ctxt) x.pmb_attributes\n  | Pstr_open od ->\n      pp f \"@[<2>open%s@;%a@]%a\"\n        (override od.popen_override)\n        longident_loc od.popen_lid\n        (item_attributes ctxt) od.popen_attributes\n  | Pstr_modtype {pmtd_name=s; pmtd_type=md; pmtd_attributes=attrs} ->\n      pp f \"@[<hov2>module@ type@ %s%a@]%a\"\n        s.txt\n        (fun f md -> match md with\n           | None -> ()\n           | Some mt ->\n               pp_print_space f () ;\n               pp f \"@ =@ %a\" (module_type ctxt) mt\n        ) md\n        (item_attributes ctxt) attrs\n  | Pstr_class () -> ()\n  | Pstr_class_type l -> class_type_declaration_list ctxt f l\n  | Pstr_primitive vd ->\n      pp f \"@[<hov2>external@ %a@ :@ %a@]%a\"\n        protect_ident vd.pval_name.txt\n        (value_description ctxt) vd\n        (item_attributes ctxt) vd.pval_attributes\n  | Pstr_include incl ->\n      pp f \"@[<hov2>include@ %a@]%a\"\n        (module_expr ctxt) incl.pincl_mod\n        (item_attributes ctxt) incl.pincl_attributes\n  | Pstr_recmodule decls -> (* 3.07 *)\n      let aux f = function\n        | ({pmb_expr={pmod_desc=Pmod_constraint (expr, typ)}} as pmb) ->\n            pp f \"@[<hov2>@ and@ %s:%a@ =@ %a@]%a\" pmb.pmb_name.txt\n              (module_type ctxt) typ\n              (module_expr ctxt) expr\n              (item_attributes ctxt) pmb.pmb_attributes\n        | _ -> assert false\n      in\n      begin match decls with\n      | ({pmb_expr={pmod_desc=Pmod_constraint (expr, typ)}} as pmb) :: l2 ->\n          pp f \"@[<hv>@[<hov2>module@ rec@ %s:%a@ =@ %a@]%a@ %a@]\"\n            pmb.pmb_name.txt\n            (module_type ctxt) typ\n            (module_expr ctxt) expr\n            (item_attributes ctxt) pmb.pmb_attributes\n            (fun f l2 -> List.iter (aux f) l2) l2\n      | _ -> assert false\n      end\n  | Pstr_attribute a -> floating_attribute ctxt f a\n  | Pstr_extension(e, a) ->\n      item_extension ctxt f e;\n      item_attributes ctxt f a\n\nand type_param ctxt f (ct, a) =\n  pp f \"%s%a\" (type_variance a) (core_type ctxt) ct\n\nand type_params ctxt f = function\n  | [] -> ()\n  | l -> pp f \"%a \" (list (type_param ctxt) ~first:\"(\" ~last:\")\" ~sep:\",@;\") l\n\nand type_def_list ctxt f (rf, l) =\n  let type_decl kwd rf f x =\n    let eq =\n      if (x.ptype_kind = Ptype_abstract)\n         && (x.ptype_manifest = None) then \"\"\n      else \" =\"\n    in\n    pp f \"@[<2>%s %a%a%s%s%a@]%a\" kwd\n      nonrec_flag rf\n      (type_params ctxt) x.ptype_params\n      x.ptype_name.txt eq\n      (type_declaration ctxt) x\n      (item_attributes ctxt) x.ptype_attributes\n  in\n  match l with\n  | [] -> assert false\n  | [x] -> type_decl \"type\" rf f x\n  | x :: xs -> pp f \"@[<v>%a@,%a@]\"\n                 (type_decl \"type\" rf) x\n                 (list ~sep:\"@,\" (type_decl \"and\" Recursive)) xs\n\nand record_declaration ctxt f lbls =\n  let type_record_field f pld =\n    pp f \"@[<2>%a%s:@;%a@;%a@]\"\n      mutable_flag pld.pld_mutable\n      pld.pld_name.txt\n      (core_type ctxt) pld.pld_type\n      (attributes ctxt) pld.pld_attributes\n  in\n  pp f \"{@\\n%a}\"\n    (list type_record_field ~sep:\";@\\n\" )  lbls\n\nand type_declaration ctxt f x =\n  (* type_declaration has an attribute field,\n     but it's been printed by the caller of this method *)\n  let priv f =\n    match x.ptype_private with\n    | Public -> ()\n    | Private -> pp f \"@;private\"\n  in\n  let manifest f =\n    match x.ptype_manifest with\n    | None -> ()\n    | Some y ->\n        if x.ptype_kind = Ptype_abstract then\n          pp f \"%t@;%a\" priv (core_type ctxt) y\n        else\n          pp f \"@;%a\" (core_type ctxt) y\n  in\n  let constructor_declaration f pcd =\n    pp f \"|@;\";\n    constructor_declaration ctxt f\n      (pcd.pcd_name.txt, pcd.pcd_args, pcd.pcd_res, pcd.pcd_attributes)\n  in\n  let repr f =\n    let intro f =\n      if x.ptype_manifest = None then ()\n      else pp f \"@;=\"\n    in\n    match x.ptype_kind with\n    | Ptype_variant xs ->\n        pp f \"%t%t@\\n%a\" intro priv\n          (list ~sep:\"@\\n\" constructor_declaration) xs\n    | Ptype_abstract -> ()\n    | Ptype_record l ->\n        pp f \"%t%t@;%a\" intro priv (record_declaration ctxt) l\n    | Ptype_open -> pp f \"%t%t@;..\" intro priv\n  in\n  let constraints f =\n    List.iter\n      (fun (ct1,ct2,_) ->\n         pp f \"@[<hov2>@ constraint@ %a@ =@ %a@]\"\n           (core_type ctxt) ct1 (core_type ctxt) ct2)\n      x.ptype_cstrs\n  in\n  pp f \"%t%t%t\" manifest repr constraints\n\nand type_extension ctxt f x =\n  let extension_constructor f x =\n    pp f \"@\\n|@;%a\" (extension_constructor ctxt) x\n  in\n  pp f \"@[<2>type %a%a += %a@ %a@]%a\"\n    (fun f -> function\n       | [] -> ()\n       | l ->\n           pp f \"%a@;\" (list (type_param ctxt) ~first:\"(\" ~last:\")\" ~sep:\",\") l)\n    x.ptyext_params\n    longident_loc x.ptyext_path\n    private_flag x.ptyext_private (* Cf: #7200 *)\n    (list ~sep:\"\" extension_constructor)\n    x.ptyext_constructors\n    (item_attributes ctxt) x.ptyext_attributes\n\nand constructor_declaration ctxt f (name, args, res, attrs) =\n  let name =\n    match name with\n    | \"::\" -> \"(::)\"\n    | s -> s in\n  match res with\n  | None ->\n      pp f \"%s%a@;%a\" name\n        (fun f -> function\n           | Pcstr_tuple [] -> ()\n           | Pcstr_tuple l ->\n             pp f \"@;of@;%a\" (list (core_type1 ctxt) ~sep:\"@;*@;\") l\n           | Pcstr_record l -> pp f \"@;of@;%a\" (record_declaration ctxt) l\n        ) args\n        (attributes ctxt) attrs\n  | Some r ->\n      pp f \"%s:@;%a@;%a\" name\n        (fun f -> function\n           | Pcstr_tuple [] -> core_type1 ctxt f r\n           | Pcstr_tuple l -> pp f \"%a@;->@;%a\"\n                                (list (core_type1 ctxt) ~sep:\"@;*@;\") l\n                                (core_type1 ctxt) r\n           | Pcstr_record l ->\n               pp f \"%a@;->@;%a\" (record_declaration ctxt) l (core_type1 ctxt) r\n        )\n        args\n        (attributes ctxt) attrs\n\nand extension_constructor ctxt f x =\n  (* Cf: #7200 *)\n  match x.pext_kind with\n  | Pext_decl(l, r) ->\n      constructor_declaration ctxt f (x.pext_name.txt, l, r, x.pext_attributes)\n  | Pext_rebind li ->\n      pp f \"%s%a@;=@;%a\" x.pext_name.txt\n        (attributes ctxt) x.pext_attributes\n        longident_loc li\n\nand case_list ctxt f l : unit =\n  let aux f {pc_lhs; pc_guard; pc_rhs} =\n    pp f \"@;| @[<2>%a%a@;->@;%a@]\"\n      (pattern ctxt) pc_lhs (option (expression ctxt) ~first:\"@;when@;\")\n      pc_guard (expression (under_pipe ctxt)) pc_rhs\n  in\n  list aux f l ~sep:\"\"\n\nand label_x_expression_param ctxt f (l,e) =\n  let simple_name = match e with\n    | {pexp_desc=Pexp_ident {txt=Lident l;_};\n       pexp_attributes=[]} -> Some l\n    | _ -> None\n  in match l with\n  | Nolabel  -> expression2 ctxt f e (* level 2*)\n  | Optional str ->\n      if Some str = simple_name then\n        pp f \"?%s\" str\n      else\n        pp f \"?%s:%a\" str (simple_expr ctxt) e\n  | Labelled lbl ->\n      if Some lbl = simple_name then\n        pp f \"~%s\" lbl\n      else\n        pp f \"~%s:%a\" lbl (simple_expr ctxt) e\n\n\n\nlet expression f x =\n  pp f \"@[%a@]\" (expression reset_ctxt) x\n\nlet string_of_expression x =\n  ignore (flush_str_formatter ()) ;\n  let f = str_formatter in\n  expression f x;\n  flush_str_formatter ()\n\nlet string_of_structure x =\n  ignore (flush_str_formatter ());\n  let f = str_formatter in\n  structure reset_ctxt f x;\n  flush_str_formatter ()\n\n\nlet core_type = core_type reset_ctxt\nlet pattern = pattern reset_ctxt\nlet signature = signature reset_ctxt\nlet structure = structure reset_ctxt\n"
  },
  {
    "path": "analysis/vendor/ml/pprintast.mli",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Hongbo Zhang (University of Pennsylvania)                  *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\ntype space_formatter = (unit, Format.formatter, unit) format\n\nval expression : Format.formatter -> Parsetree.expression -> unit\nval string_of_expression : Parsetree.expression -> string\n\nval core_type : Format.formatter -> Parsetree.core_type -> unit\nval pattern : Format.formatter -> Parsetree.pattern -> unit\nval signature : Format.formatter -> Parsetree.signature -> unit\nval structure : Format.formatter -> Parsetree.structure -> unit\nval string_of_structure : Parsetree.structure -> string\nval string_of_int_as_char : int -> string\n"
  },
  {
    "path": "analysis/vendor/ml/predef.ml",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(* Predefined type constructors (with special typing rules in typecore) *)\n\nopen Path\nopen Types\nopen Btype\n\nlet builtin_idents = ref []\n\nlet wrap create s =\n  let id = create s in\n  builtin_idents := (s, id) :: !builtin_idents;\n  id\n\nlet ident_create = wrap Ident.create\nlet ident_create_predef_exn = wrap Ident.create_predef_exn\n\nlet ident_int = ident_create \"int\"\n\nand ident_char = ident_create \"char\"\n\nand ident_bytes = ident_create \"bytes\"\n\nand ident_float = ident_create \"float\"\n\nand ident_bool = ident_create \"bool\"\n\nand ident_unit = ident_create \"unit\"\n\nand ident_exn = ident_create \"exn\"\n\nand ident_array = ident_create \"array\"\n\nand ident_list = ident_create \"list\"\n\nand ident_option = ident_create \"option\"\n\nand ident_result = ident_create \"result\"\n\nand ident_dict = ident_create \"dict\"\n\nand ident_int64 = ident_create \"int64\"\n\nand ident_bigint = ident_create \"bigint\"\n\nand ident_lazy_t = ident_create \"lazy_t\"\n\nand ident_string = ident_create \"string\"\n\nand ident_extension_constructor = ident_create \"extension_constructor\"\n\nand ident_floatarray = ident_create \"floatarray\"\n\nand ident_unknown = ident_create \"unknown\"\n\nand ident_promise = ident_create \"promise\"\n\nand ident_uncurried = ident_create \"function$\"\n\ntype test = For_sure_yes | For_sure_no | NA\n\nlet type_is_builtin_path_but_option (p : Path.t) : test =\n  match p with\n  | Pident {stamp} ->\n    if stamp >= ident_int.stamp && stamp <= ident_floatarray.stamp then\n      if stamp = ident_option.stamp || stamp = ident_unit.stamp then For_sure_no\n      else For_sure_yes\n    else NA\n  | _ -> NA\n\nlet path_int = Pident ident_int\n\nand path_char = Pident ident_char\n\nand path_bytes = Pident ident_bytes\n\nand path_float = Pident ident_float\n\nand path_bool = Pident ident_bool\n\nand path_unit = Pident ident_unit\n\nand path_exn = Pident ident_exn\n\nand path_array = Pident ident_array\n\nand path_list = Pident ident_list\n\nand path_option = Pident ident_option\n\nand path_result = Pident ident_result\n\nand path_dict = Pident ident_dict\n\nand path_int64 = Pident ident_int64\n\nand path_bigint = Pident ident_bigint\n\nand path_lazy_t = Pident ident_lazy_t\n\nand path_string = Pident ident_string\n\nand path_unkonwn = Pident ident_unknown\n\nand path_extension_constructor = Pident ident_extension_constructor\n\nand path_floatarray = Pident ident_floatarray\n\nand path_promise = Pident ident_promise\n\nand path_uncurried = Pident ident_uncurried\n\nlet type_int = newgenty (Tconstr (path_int, [], ref Mnil))\n\nand type_char = newgenty (Tconstr (path_char, [], ref Mnil))\n\nand type_bytes = newgenty (Tconstr (path_bytes, [], ref Mnil))\n\nand type_float = newgenty (Tconstr (path_float, [], ref Mnil))\n\nand type_bool = newgenty (Tconstr (path_bool, [], ref Mnil))\n\nand type_unit = newgenty (Tconstr (path_unit, [], ref Mnil))\n\nand type_exn = newgenty (Tconstr (path_exn, [], ref Mnil))\n\nand type_array t = newgenty (Tconstr (path_array, [t], ref Mnil))\n\nand type_list t = newgenty (Tconstr (path_list, [t], ref Mnil))\n\nand type_option t = newgenty (Tconstr (path_option, [t], ref Mnil))\n\nand type_result t1 t2 = newgenty (Tconstr (path_result, [t1; t2], ref Mnil))\n\nand type_dict t = newgenty (Tconstr (path_dict, [t], ref Mnil))\n\nand type_int64 = newgenty (Tconstr (path_int64, [], ref Mnil))\n\nand type_bigint = newgenty (Tconstr (path_bigint, [], ref Mnil))\n\nand type_lazy_t t = newgenty (Tconstr (path_lazy_t, [t], ref Mnil))\n\nand type_string = newgenty (Tconstr (path_string, [], ref Mnil))\n\nand type_unknown = newgenty (Tconstr (path_unkonwn, [], ref Mnil))\n\nand type_extension_constructor =\n  newgenty (Tconstr (path_extension_constructor, [], ref Mnil))\n\nand type_floatarray = newgenty (Tconstr (path_floatarray, [], ref Mnil))\n\nlet ident_match_failure = ident_create_predef_exn \"Match_failure\"\n\nand ident_invalid_argument = ident_create_predef_exn \"Invalid_argument\"\n\nand ident_failure = ident_create_predef_exn \"Failure\"\n\nand ident_ok = ident_create_predef_exn \"Ok\"\n\nand ident_error = ident_create_predef_exn \"Error\"\n\nand ident_js_error = ident_create_predef_exn \"JsError\"\n\nand ident_not_found = ident_create_predef_exn \"Not_found\"\n\nand ident_end_of_file = ident_create_predef_exn \"End_of_file\"\n\nand ident_division_by_zero = ident_create_predef_exn \"Division_by_zero\"\n\nand ident_assert_failure = ident_create_predef_exn \"Assert_failure\"\n\nand ident_undefined_recursive_module =\n  ident_create_predef_exn \"Undefined_recursive_module\"\n\nlet all_predef_exns =\n  [\n    ident_match_failure;\n    ident_invalid_argument;\n    ident_failure;\n    ident_js_error;\n    ident_not_found;\n    ident_end_of_file;\n    ident_division_by_zero;\n    ident_assert_failure;\n    ident_undefined_recursive_module;\n  ]\n\nlet path_match_failure = Pident ident_match_failure\n\nand path_assert_failure = Pident ident_assert_failure\n\nand path_undefined_recursive_module = Pident ident_undefined_recursive_module\n\nlet decl_abstr =\n  {\n    type_params = [];\n    type_arity = 0;\n    type_kind = Type_abstract;\n    type_loc = Location.none;\n    type_private = Asttypes.Public;\n    type_manifest = None;\n    type_variance = [];\n    type_newtype_level = None;\n    type_attributes = [];\n    type_immediate = false;\n    type_unboxed = unboxed_false_default_false;\n  }\n\nlet decl_abstr_imm = {decl_abstr with type_immediate = true}\n\nlet cstr id args =\n  {\n    cd_id = id;\n    cd_args = Cstr_tuple args;\n    cd_res = None;\n    cd_loc = Location.none;\n    cd_attributes = [];\n  }\n\nlet ident_false = ident_create \"false\"\n\nand ident_true = ident_create \"true\"\n\nand ident_void = ident_create \"()\"\n\nand ident_nil = ident_create \"[]\"\n\nand ident_cons = ident_create \"::\"\n\nand ident_none = ident_create \"None\"\n\nand ident_some = ident_create \"Some\"\n\nand ident_ctor_unknown = ident_create \"Unknown\"\n\nand ident_ctor_uncurried = ident_create \"Function$\"\n\nlet common_initial_env add_type add_extension empty_env =\n  let decl_bool =\n    {\n      decl_abstr with\n      type_kind = Type_variant [cstr ident_false []; cstr ident_true []];\n      type_immediate = true;\n    }\n  and decl_unit =\n    {\n      decl_abstr with\n      type_kind = Type_variant [cstr ident_void []];\n      type_immediate = true;\n    }\n  and decl_exn = {decl_abstr with type_kind = Type_open}\n  and decl_array =\n    let tvar = newgenvar () in\n    {\n      decl_abstr with\n      type_params = [tvar];\n      type_arity = 1;\n      type_variance = [Variance.full];\n    }\n  and decl_list =\n    let tvar = newgenvar () in\n    {\n      decl_abstr with\n      type_params = [tvar];\n      type_arity = 1;\n      type_kind =\n        Type_variant [cstr ident_nil []; cstr ident_cons [tvar; type_list tvar]];\n      type_variance = [Variance.covariant];\n    }\n  and decl_option =\n    let tvar = newgenvar () in\n    {\n      decl_abstr with\n      type_params = [tvar];\n      type_arity = 1;\n      type_kind = Type_variant [cstr ident_none []; cstr ident_some [tvar]];\n      type_variance = [Variance.covariant];\n    }\n  and decl_result =\n    let tvar1, tvar2 = (newgenvar (), newgenvar ()) in\n    {\n      decl_abstr with\n      type_params = [tvar1; tvar2];\n      type_arity = 2;\n      type_kind = Type_variant [cstr ident_ok [tvar1]; cstr ident_error [tvar2]];\n      type_variance = [Variance.covariant; Variance.covariant];\n    }\n  and decl_dict =\n    let tvar = newgenvar () in\n    {\n      decl_abstr with\n      type_params = [tvar];\n      type_arity = 1;\n      type_variance = [Variance.full];\n    }\n  and decl_uncurried =\n    let tvar1, tvar2 = (newgenvar (), newgenvar ()) in\n    {\n      decl_abstr with\n      type_params = [tvar1; tvar2];\n      type_arity = 2;\n      type_kind = Type_variant [cstr ident_ctor_uncurried [tvar1]];\n      type_variance = [Variance.covariant; Variance.covariant];\n      type_unboxed = Types.unboxed_true_default_false;\n    }\n  and decl_unknown =\n    let tvar = newgenvar () in\n    {\n      decl_abstr with\n      type_params = [];\n      type_arity = 0;\n      type_kind =\n        Type_variant\n          [\n            {\n              cd_id = ident_ctor_unknown;\n              cd_args = Cstr_tuple [tvar];\n              cd_res = Some type_unknown;\n              cd_loc = Location.none;\n              cd_attributes = [];\n            };\n          ];\n      type_unboxed = Types.unboxed_true_default_false;\n    }\n  and decl_lazy_t =\n    let tvar = newgenvar () in\n    {\n      decl_abstr with\n      type_params = [tvar];\n      type_arity = 1;\n      type_variance = [Variance.covariant];\n    }\n  and decl_promise =\n    let tvar = newgenvar () in\n    {\n      decl_abstr with\n      type_params = [tvar];\n      type_arity = 1;\n      type_variance = [Variance.covariant];\n    }\n  in\n\n  let add_extension id l =\n    add_extension id\n      {\n        ext_type_path = path_exn;\n        ext_type_params = [];\n        ext_args = Cstr_tuple l;\n        ext_ret_type = None;\n        ext_private = Asttypes.Public;\n        ext_loc = Location.none;\n        ext_attributes =\n          [\n            ( {\n                Asttypes.txt = \"ocaml.warn_on_literal_pattern\";\n                loc = Location.none;\n              },\n              Parsetree.PStr [] );\n          ];\n      }\n  in\n  add_extension ident_match_failure\n    [newgenty (Ttuple [type_string; type_int; type_int])]\n    (add_extension ident_invalid_argument [type_string]\n       (add_extension ident_js_error [type_unknown]\n          (add_extension ident_failure [type_string]\n             (add_extension ident_not_found []\n                (add_extension ident_end_of_file []\n                   (add_extension ident_division_by_zero []\n                      (add_extension ident_assert_failure\n                         [newgenty (Ttuple [type_string; type_int; type_int])]\n                         (add_extension ident_undefined_recursive_module\n                            [\n                              newgenty (Ttuple [type_string; type_int; type_int]);\n                            ]\n                            (add_type ident_int64 decl_abstr\n                               (add_type ident_bigint decl_abstr\n                                  (add_type ident_lazy_t decl_lazy_t\n                                     (add_type ident_option decl_option\n                                        (add_type ident_result decl_result\n                                           (add_type ident_dict decl_dict\n                                              (add_type ident_list decl_list\n                                                 (add_type ident_array\n                                                    decl_array\n                                                    (add_type ident_exn decl_exn\n                                                       (add_type ident_unit\n                                                          decl_unit\n                                                          (add_type ident_bool\n                                                             decl_bool\n                                                             (add_type\n                                                                ident_float\n                                                                decl_abstr\n                                                                (add_type\n                                                                   ident_unknown\n                                                                   decl_unknown\n                                                                   (add_type\n                                                                      ident_uncurried\n                                                                      decl_uncurried\n                                                                      (add_type\n                                                                         ident_string\n                                                                         decl_abstr\n                                                                         (add_type\n                                                                            ident_int\n                                                                            decl_abstr_imm\n                                                                            (add_type\n                                                                               ident_extension_constructor\n                                                                               decl_abstr\n                                                                               (add_type\n                                                                                ident_floatarray\n                                                                                decl_abstr\n                                                                                (\n                                                                                add_type\n                                                                                ident_promise\n                                                                                decl_promise\n                                                                                empty_env)))))))))))))))))))))))))))\n\nlet build_initial_env add_type add_exception empty_env =\n  let common = common_initial_env add_type add_exception empty_env in\n  let res = add_type ident_bytes decl_abstr common in\n  let decl_type_char =\n    {decl_abstr with type_manifest = Some type_int; type_private = Private}\n  in\n  add_type ident_char decl_type_char res\n\nlet builtin_values =\n  List.map\n    (fun id ->\n      Ident.make_global id;\n      (Ident.name id, id))\n    [\n      ident_match_failure;\n      ident_invalid_argument;\n      ident_failure;\n      ident_js_error;\n      ident_not_found;\n      ident_end_of_file;\n      ident_division_by_zero;\n      ident_assert_failure;\n      ident_undefined_recursive_module;\n    ]\n\n(* Start non-predef identifiers at 1000.  This way, more predefs can\n   be defined in this file (above!) without breaking .cmi\n   compatibility. *)\n\nlet _ = Ident.set_current_time 999\nlet builtin_idents = List.rev !builtin_idents\n"
  },
  {
    "path": "analysis/vendor/ml/predef.mli",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(* Predefined type constructors (with special typing rules in typecore) *)\n\nopen Types\n\nval type_int : type_expr\nval type_char : type_expr\nval type_string : type_expr\nval type_bytes : type_expr\nval type_float : type_expr\nval type_bool : type_expr\nval type_unit : type_expr\nval type_exn : type_expr\nval type_array : type_expr -> type_expr\nval type_list : type_expr -> type_expr\nval type_option : type_expr -> type_expr\nval type_result : type_expr -> type_expr -> type_expr\nval type_dict : type_expr -> type_expr\n\nval type_int64 : type_expr\nval type_bigint : type_expr\nval type_lazy_t : type_expr -> type_expr\nval type_extension_constructor : type_expr\nval type_floatarray : type_expr\n\nval path_int : Path.t\nval path_char : Path.t\nval path_string : Path.t\nval path_bytes : Path.t\nval path_float : Path.t\nval path_bool : Path.t\nval path_unit : Path.t\nval path_exn : Path.t\nval path_array : Path.t\nval path_list : Path.t\nval path_option : Path.t\nval path_result : Path.t\nval path_dict : Path.t\n\nval path_int64 : Path.t\nval path_bigint : Path.t\nval path_lazy_t : Path.t\nval path_extension_constructor : Path.t\nval path_floatarray : Path.t\nval path_promise : Path.t\nval path_uncurried : Path.t\n\nval path_match_failure : Path.t\nval path_assert_failure : Path.t\nval path_undefined_recursive_module : Path.t\n\n(* To build the initial environment. Since there is a nasty mutual\n   recursion between predef and env, we break it by parameterizing\n   over Env.t, Env.add_type and Env.add_extension. *)\n\nval build_initial_env :\n  (Ident.t -> type_declaration -> 'a -> 'a) ->\n  (Ident.t -> extension_constructor -> 'a -> 'a) ->\n  'a ->\n  'a\n\n(* To initialize linker tables *)\n\nval builtin_values : (string * Ident.t) list\nval builtin_idents : (string * Ident.t) list\n\nval ident_division_by_zero : Ident.t\n(** All predefined exceptions, exposed as [Ident.t] for flambda (for building\n    value approximations). The [Ident.t] for division by zero is also exported\n    explicitly so flambda can generate code to raise it. *)\n\nval all_predef_exns : Ident.t list\n\ntype test = For_sure_yes | For_sure_no | NA\n\nval type_is_builtin_path_but_option : Path.t -> test\n"
  },
  {
    "path": "analysis/vendor/ml/primitive.ml",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(* Description of primitive functions *)\n\nopen Misc\nopen Parsetree\n\ntype boxed_integer = Pbigint | Pint32 | Pint64\n\ntype native_repr = Same_as_ocaml_repr\n\ntype description = {\n  prim_name: string; (* Name of primitive  or C function *)\n  prim_arity: int; (* Number of arguments *)\n  prim_alloc: bool; (* Does it allocates or raise? *)\n  prim_native_name: string; (* Name of C function for the nat. code gen. *)\n  prim_native_repr_args: native_repr list;\n  prim_native_repr_res: native_repr;\n}\n\nlet coerce : (description -> description -> bool) ref =\n  ref (fun (p1 : description) (p2 : description) -> p1 = p2)\n\nlet rec make_native_repr_args arity x =\n  if arity = 0 then [] else x :: make_native_repr_args (arity - 1) x\n\nlet simple ~name ~arity ~alloc =\n  {\n    prim_name = name;\n    prim_arity = arity;\n    prim_alloc = alloc;\n    prim_native_name = \"\";\n    prim_native_repr_args = make_native_repr_args arity Same_as_ocaml_repr;\n    prim_native_repr_res = Same_as_ocaml_repr;\n  }\n\nlet make ~name ~alloc ~native_name ~native_repr_args ~native_repr_res =\n  {\n    prim_name = name;\n    prim_arity = List.length native_repr_args;\n    prim_alloc = alloc;\n    prim_native_name = native_name;\n    prim_native_repr_args = native_repr_args;\n    prim_native_repr_res = native_repr_res;\n  }\n\nlet parse_declaration valdecl ~native_repr_args ~native_repr_res =\n  let arity = List.length native_repr_args in\n  let name, native_name =\n    match valdecl.pval_prim with\n    | name :: name2 :: _ -> (name, name2)\n    | name :: _ -> (name, \"\")\n    | [] -> fatal_error \"Primitive.parse_declaration\"\n  in\n  {\n    prim_name = name;\n    prim_arity = arity;\n    prim_alloc = true;\n    prim_native_name = native_name;\n    prim_native_repr_args = native_repr_args;\n    prim_native_repr_res = native_repr_res;\n  }\n\nopen Outcometree\n\nlet print p osig_val_decl =\n  let prims =\n    if p.prim_native_name <> \"\" then [p.prim_name; p.prim_native_name]\n    else [p.prim_name]\n  in\n  {osig_val_decl with oval_prims = prims; oval_attributes = []}\n\nlet native_name p =\n  if p.prim_native_name <> \"\" then p.prim_native_name else p.prim_name\n\nlet byte_name p = p.prim_name\n"
  },
  {
    "path": "analysis/vendor/ml/primitive.mli",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(* Description of primitive functions *)\n\ntype boxed_integer = Pbigint | Pint32 | Pint64\n\n(* Representation of arguments/result for the native code version\n   of a primitive *)\ntype native_repr = Same_as_ocaml_repr\n\ntype description = private {\n  prim_name: string; (* Name of primitive  or C function *)\n  prim_arity: int; (* Number of arguments *)\n  prim_alloc: bool; (* Does it allocates or raise? *)\n  prim_native_name: string; (* Name of C function for the nat. code gen. *)\n  prim_native_repr_args: native_repr list;\n  prim_native_repr_res: native_repr;\n}\n\n(* Invariant [List.length d.prim_native_repr_args = d.prim_arity] *)\n\nval simple : name:string -> arity:int -> alloc:bool -> description\n\nval make :\n  name:string ->\n  alloc:bool ->\n  native_name:string ->\n  native_repr_args:native_repr list ->\n  native_repr_res:native_repr ->\n  description\n\nval parse_declaration :\n  Parsetree.value_description ->\n  native_repr_args:native_repr list ->\n  native_repr_res:native_repr ->\n  description\n\nval print : description -> Outcometree.out_val_decl -> Outcometree.out_val_decl\n\nval native_name : description -> string\nval byte_name : description -> string\n\nval coerce : (description -> description -> bool) ref\n"
  },
  {
    "path": "analysis/vendor/ml/printast.ml",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*              Damien Doligez, projet Para, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1999 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\nopen Asttypes\nopen Format\nopen Lexing\nopen Location\nopen Parsetree\n\nlet fmt_position with_name f l =\n  let fname = if with_name then l.pos_fname else \"\" in\n  if l.pos_lnum = -1 then fprintf f \"%s[%d]\" fname l.pos_cnum\n  else\n    fprintf f \"%s[%d,%d+%d]\" fname l.pos_lnum l.pos_bol (l.pos_cnum - l.pos_bol)\n\nlet fmt_location f loc =\n  if !Clflags.dump_location then (\n    let p_2nd_name = loc.loc_start.pos_fname <> loc.loc_end.pos_fname in\n    fprintf f \"(%a..%a)\" (fmt_position true) loc.loc_start\n      (fmt_position p_2nd_name) loc.loc_end;\n    if loc.loc_ghost then fprintf f \" ghost\")\n\nlet rec fmt_longident_aux f x =\n  match x with\n  | Longident.Lident s -> fprintf f \"%s\" s\n  | Longident.Ldot (y, s) -> fprintf f \"%a.%s\" fmt_longident_aux y s\n  | Longident.Lapply (y, z) ->\n    fprintf f \"%a(%a)\" fmt_longident_aux y fmt_longident_aux z\n\nlet fmt_longident_loc f (x : Longident.t loc) =\n  fprintf f \"\\\"%a\\\" %a\" fmt_longident_aux x.txt fmt_location x.loc\n\nlet fmt_string_loc f (x : string loc) =\n  fprintf f \"\\\"%s\\\" %a\" x.txt fmt_location x.loc\n\nlet fmt_char_option f = function\n  | None -> fprintf f \"None\"\n  | Some c -> fprintf f \"Some %c\" c\n\nlet fmt_constant f x =\n  match x with\n  | Pconst_integer (i, m) -> fprintf f \"PConst_int (%s,%a)\" i fmt_char_option m\n  | Pconst_char c -> fprintf f \"PConst_char %02x\" c\n  | Pconst_string (s, None) -> fprintf f \"PConst_string(%S,None)\" s\n  | Pconst_string (s, Some delim) ->\n    fprintf f \"PConst_string (%S,Some %S)\" s delim\n  | Pconst_float (s, m) -> fprintf f \"PConst_float (%s,%a)\" s fmt_char_option m\n\nlet fmt_mutable_flag f x =\n  match x with\n  | Immutable -> fprintf f \"Immutable\"\n  | Mutable -> fprintf f \"Mutable\"\n\nlet fmt_virtual_flag f x =\n  match x with\n  | Virtual -> fprintf f \"Virtual\"\n  | Concrete -> fprintf f \"Concrete\"\n\nlet fmt_override_flag f x =\n  match x with\n  | Override -> fprintf f \"Override\"\n  | Fresh -> fprintf f \"Fresh\"\n\nlet fmt_closed_flag f x =\n  match x with\n  | Closed -> fprintf f \"Closed\"\n  | Open -> fprintf f \"Open\"\n\nlet fmt_rec_flag f x =\n  match x with\n  | Nonrecursive -> fprintf f \"Nonrec\"\n  | Recursive -> fprintf f \"Rec\"\n\nlet fmt_direction_flag f x =\n  match x with\n  | Upto -> fprintf f \"Up\"\n  | Downto -> fprintf f \"Down\"\n\nlet fmt_private_flag f x =\n  match x with\n  | Public -> fprintf f \"Public\"\n  | Private -> fprintf f \"Private\"\n\nlet line i f s (*...*) =\n  fprintf f \"%s\" (String.make (2 * i mod 72) ' ');\n  fprintf f s (*...*)\n\nlet list i f ppf l =\n  match l with\n  | [] -> line i ppf \"[]\\n\"\n  | _ :: _ ->\n    line i ppf \"[\\n\";\n    List.iter (f (i + 1) ppf) l;\n    line i ppf \"]\\n\"\n\nlet option i f ppf x =\n  match x with\n  | None -> line i ppf \"None\\n\"\n  | Some x ->\n    line i ppf \"Some\\n\";\n    f (i + 1) ppf x\n\nlet longident_loc i ppf li = line i ppf \"%a\\n\" fmt_longident_loc li\nlet string i ppf s = line i ppf \"\\\"%s\\\"\\n\" s\nlet string_loc i ppf s = line i ppf \"%a\\n\" fmt_string_loc s\nlet arg_label i ppf = function\n  | Nolabel -> line i ppf \"Nolabel\\n\"\n  | Optional s -> line i ppf \"Optional \\\"%s\\\"\\n\" s\n  | Labelled s -> line i ppf \"Labelled \\\"%s\\\"\\n\" s\n\nlet rec core_type i ppf x =\n  line i ppf \"core_type %a\\n\" fmt_location x.ptyp_loc;\n  attributes i ppf x.ptyp_attributes;\n  let i = i + 1 in\n  match x.ptyp_desc with\n  | Ptyp_any -> line i ppf \"Ptyp_any\\n\"\n  | Ptyp_var s -> line i ppf \"Ptyp_var %s\\n\" s\n  | Ptyp_arrow (l, ct1, ct2) ->\n    line i ppf \"Ptyp_arrow\\n\";\n    arg_label i ppf l;\n    core_type i ppf ct1;\n    core_type i ppf ct2\n  | Ptyp_tuple l ->\n    line i ppf \"Ptyp_tuple\\n\";\n    list i core_type ppf l\n  | Ptyp_constr (li, l) ->\n    line i ppf \"Ptyp_constr %a\\n\" fmt_longident_loc li;\n    list i core_type ppf l\n  | Ptyp_variant (l, closed, low) ->\n    line i ppf \"Ptyp_variant closed=%a\\n\" fmt_closed_flag closed;\n    list i label_x_bool_x_core_type_list ppf l;\n    option i (fun i -> list i string) ppf low\n  | Ptyp_object (l, c) ->\n    line i ppf \"Ptyp_object %a\\n\" fmt_closed_flag c;\n    let i = i + 1 in\n    List.iter\n      (function\n        | Otag (l, attrs, t) ->\n          line i ppf \"method %s\\n\" l.txt;\n          attributes i ppf attrs;\n          core_type (i + 1) ppf t\n        | Oinherit ct ->\n          line i ppf \"Oinherit\\n\";\n          core_type (i + 1) ppf ct)\n      l\n  | Ptyp_class (li, l) ->\n    line i ppf \"Ptyp_class %a\\n\" fmt_longident_loc li;\n    list i core_type ppf l\n  | Ptyp_alias (ct, s) ->\n    line i ppf \"Ptyp_alias \\\"%s\\\"\\n\" s;\n    core_type i ppf ct\n  | Ptyp_poly (sl, ct) ->\n    line i ppf \"Ptyp_poly%a\\n\"\n      (fun ppf -> List.iter (fun x -> fprintf ppf \" '%s\" x.txt))\n      sl;\n    core_type i ppf ct\n  | Ptyp_package (s, l) ->\n    line i ppf \"Ptyp_package %a\\n\" fmt_longident_loc s;\n    list i package_with ppf l\n  | Ptyp_extension (s, arg) ->\n    line i ppf \"Ptyp_extension \\\"%s\\\"\\n\" s.txt;\n    payload i ppf arg\n\nand package_with i ppf (s, t) =\n  line i ppf \"with type %a\\n\" fmt_longident_loc s;\n  core_type i ppf t\n\nand pattern i ppf x =\n  line i ppf \"pattern %a\\n\" fmt_location x.ppat_loc;\n  attributes i ppf x.ppat_attributes;\n  let i = i + 1 in\n  match x.ppat_desc with\n  | Ppat_any -> line i ppf \"Ppat_any\\n\"\n  | Ppat_var s -> line i ppf \"Ppat_var %a\\n\" fmt_string_loc s\n  | Ppat_alias (p, s) ->\n    line i ppf \"Ppat_alias %a\\n\" fmt_string_loc s;\n    pattern i ppf p\n  | Ppat_constant c -> line i ppf \"Ppat_constant %a\\n\" fmt_constant c\n  | Ppat_interval (c1, c2) ->\n    line i ppf \"Ppat_interval %a..%a\\n\" fmt_constant c1 fmt_constant c2\n  | Ppat_tuple l ->\n    line i ppf \"Ppat_tuple\\n\";\n    list i pattern ppf l\n  | Ppat_construct (li, po) ->\n    line i ppf \"Ppat_construct %a\\n\" fmt_longident_loc li;\n    option i pattern ppf po\n  | Ppat_variant (l, po) ->\n    line i ppf \"Ppat_variant \\\"%s\\\"\\n\" l;\n    option i pattern ppf po\n  | Ppat_record (l, c) ->\n    line i ppf \"Ppat_record %a\\n\" fmt_closed_flag c;\n    list i longident_x_pattern ppf l\n  | Ppat_array l ->\n    line i ppf \"Ppat_array\\n\";\n    list i pattern ppf l\n  | Ppat_or (p1, p2) ->\n    line i ppf \"Ppat_or\\n\";\n    pattern i ppf p1;\n    pattern i ppf p2\n  | Ppat_lazy p ->\n    line i ppf \"Ppat_lazy\\n\";\n    pattern i ppf p\n  | Ppat_constraint (p, ct) ->\n    line i ppf \"Ppat_constraint\\n\";\n    pattern i ppf p;\n    core_type i ppf ct\n  | Ppat_type li ->\n    line i ppf \"Ppat_type\\n\";\n    longident_loc i ppf li\n  | Ppat_unpack s -> line i ppf \"Ppat_unpack %a\\n\" fmt_string_loc s\n  | Ppat_exception p ->\n    line i ppf \"Ppat_exception\\n\";\n    pattern i ppf p\n  | Ppat_open (m, p) ->\n    line i ppf \"Ppat_open \\\"%a\\\"\\n\" fmt_longident_loc m;\n    pattern i ppf p\n  | Ppat_extension (s, arg) ->\n    line i ppf \"Ppat_extension \\\"%s\\\"\\n\" s.txt;\n    payload i ppf arg\n\nand expression i ppf x =\n  line i ppf \"expression %a\\n\" fmt_location x.pexp_loc;\n  attributes i ppf x.pexp_attributes;\n  let i = i + 1 in\n  match x.pexp_desc with\n  | Pexp_ident li -> line i ppf \"Pexp_ident %a\\n\" fmt_longident_loc li\n  | Pexp_constant c -> line i ppf \"Pexp_constant %a\\n\" fmt_constant c\n  | Pexp_let (rf, l, e) ->\n    line i ppf \"Pexp_let %a\\n\" fmt_rec_flag rf;\n    list i value_binding ppf l;\n    expression i ppf e\n  | Pexp_function l ->\n    line i ppf \"Pexp_function\\n\";\n    list i case ppf l\n  | Pexp_fun (l, eo, p, e) ->\n    line i ppf \"Pexp_fun\\n\";\n    arg_label i ppf l;\n    option i expression ppf eo;\n    pattern i ppf p;\n    expression i ppf e\n  | Pexp_apply (e, l) ->\n    line i ppf \"Pexp_apply\\n\";\n    expression i ppf e;\n    list i label_x_expression ppf l\n  | Pexp_match (e, l) ->\n    line i ppf \"Pexp_match\\n\";\n    expression i ppf e;\n    list i case ppf l\n  | Pexp_try (e, l) ->\n    line i ppf \"Pexp_try\\n\";\n    expression i ppf e;\n    list i case ppf l\n  | Pexp_tuple l ->\n    line i ppf \"Pexp_tuple\\n\";\n    list i expression ppf l\n  | Pexp_construct (li, eo) ->\n    line i ppf \"Pexp_construct %a\\n\" fmt_longident_loc li;\n    option i expression ppf eo\n  | Pexp_variant (l, eo) ->\n    line i ppf \"Pexp_variant \\\"%s\\\"\\n\" l;\n    option i expression ppf eo\n  | Pexp_record (l, eo) ->\n    line i ppf \"Pexp_record\\n\";\n    list i longident_x_expression ppf l;\n    option i expression ppf eo\n  | Pexp_field (e, li) ->\n    line i ppf \"Pexp_field\\n\";\n    expression i ppf e;\n    longident_loc i ppf li\n  | Pexp_setfield (e1, li, e2) ->\n    line i ppf \"Pexp_setfield\\n\";\n    expression i ppf e1;\n    longident_loc i ppf li;\n    expression i ppf e2\n  | Pexp_array l ->\n    line i ppf \"Pexp_array\\n\";\n    list i expression ppf l\n  | Pexp_ifthenelse (e1, e2, eo) ->\n    line i ppf \"Pexp_ifthenelse\\n\";\n    expression i ppf e1;\n    expression i ppf e2;\n    option i expression ppf eo\n  | Pexp_sequence (e1, e2) ->\n    line i ppf \"Pexp_sequence\\n\";\n    expression i ppf e1;\n    expression i ppf e2\n  | Pexp_while (e1, e2) ->\n    line i ppf \"Pexp_while\\n\";\n    expression i ppf e1;\n    expression i ppf e2\n  | Pexp_for (p, e1, e2, df, e3) ->\n    line i ppf \"Pexp_for %a\\n\" fmt_direction_flag df;\n    pattern i ppf p;\n    expression i ppf e1;\n    expression i ppf e2;\n    expression i ppf e3\n  | Pexp_constraint (e, ct) ->\n    line i ppf \"Pexp_constraint\\n\";\n    expression i ppf e;\n    core_type i ppf ct\n  | Pexp_coerce (e, cto1, cto2) ->\n    line i ppf \"Pexp_coerce\\n\";\n    expression i ppf e;\n    option i core_type ppf cto1;\n    core_type i ppf cto2\n  | Pexp_send (e, s) ->\n    line i ppf \"Pexp_send \\\"%s\\\"\\n\" s.txt;\n    expression i ppf e\n  | Pexp_new li -> line i ppf \"Pexp_new %a\\n\" fmt_longident_loc li\n  | Pexp_setinstvar (s, e) ->\n    line i ppf \"Pexp_setinstvar %a\\n\" fmt_string_loc s;\n    expression i ppf e\n  | Pexp_override l ->\n    line i ppf \"Pexp_override\\n\";\n    list i string_x_expression ppf l\n  | Pexp_letmodule (s, me, e) ->\n    line i ppf \"Pexp_letmodule %a\\n\" fmt_string_loc s;\n    module_expr i ppf me;\n    expression i ppf e\n  | Pexp_letexception (cd, e) ->\n    line i ppf \"Pexp_letexception\\n\";\n    extension_constructor i ppf cd;\n    expression i ppf e\n  | Pexp_assert e ->\n    line i ppf \"Pexp_assert\\n\";\n    expression i ppf e\n  | Pexp_lazy e ->\n    line i ppf \"Pexp_lazy\\n\";\n    expression i ppf e\n  | Pexp_poly (e, cto) ->\n    line i ppf \"Pexp_poly\\n\";\n    expression i ppf e;\n    option i core_type ppf cto\n  | Pexp_object s ->\n    line i ppf \"Pexp_object\\n\";\n    class_structure i ppf s\n  | Pexp_newtype (s, e) ->\n    line i ppf \"Pexp_newtype \\\"%s\\\"\\n\" s.txt;\n    expression i ppf e\n  | Pexp_pack me ->\n    line i ppf \"Pexp_pack\\n\";\n    module_expr i ppf me\n  | Pexp_open (ovf, m, e) ->\n    line i ppf \"Pexp_open %a \\\"%a\\\"\\n\" fmt_override_flag ovf fmt_longident_loc m;\n    expression i ppf e\n  | Pexp_extension (s, arg) ->\n    line i ppf \"Pexp_extension \\\"%s\\\"\\n\" s.txt;\n    payload i ppf arg\n  | Pexp_unreachable -> line i ppf \"Pexp_unreachable\"\n\nand value_description i ppf x =\n  line i ppf \"value_description %a %a\\n\" fmt_string_loc x.pval_name fmt_location\n    x.pval_loc;\n  attributes i ppf x.pval_attributes;\n  core_type (i + 1) ppf x.pval_type;\n  list (i + 1) string ppf x.pval_prim\n\nand type_parameter i ppf (x, _variance) = core_type i ppf x\n\nand type_declaration i ppf x =\n  line i ppf \"type_declaration %a %a\\n\" fmt_string_loc x.ptype_name fmt_location\n    x.ptype_loc;\n  attributes i ppf x.ptype_attributes;\n  let i = i + 1 in\n  line i ppf \"ptype_params =\\n\";\n  list (i + 1) type_parameter ppf x.ptype_params;\n  line i ppf \"ptype_cstrs =\\n\";\n  list (i + 1) core_type_x_core_type_x_location ppf x.ptype_cstrs;\n  line i ppf \"ptype_kind =\\n\";\n  type_kind (i + 1) ppf x.ptype_kind;\n  line i ppf \"ptype_private = %a\\n\" fmt_private_flag x.ptype_private;\n  line i ppf \"ptype_manifest =\\n\";\n  option (i + 1) core_type ppf x.ptype_manifest\n\nand attributes i ppf l =\n  let i = i + 1 in\n  List.iter\n    (fun (s, arg) ->\n      line i ppf \"attribute %a \\\"%s\\\"\\n\" fmt_location (s : _ Asttypes.loc).loc\n        s.txt;\n      payload (i + 1) ppf arg)\n    l\n\nand payload i ppf = function\n  | PStr x -> structure i ppf x\n  | PSig x -> signature i ppf x\n  | PTyp x -> core_type i ppf x\n  | PPat (x, None) -> pattern i ppf x\n  | PPat (x, Some g) ->\n    pattern i ppf x;\n    line i ppf \"<when>\\n\";\n    expression (i + 1) ppf g\n\nand type_kind i ppf x =\n  match x with\n  | Ptype_abstract -> line i ppf \"Ptype_abstract\\n\"\n  | Ptype_variant l ->\n    line i ppf \"Ptype_variant\\n\";\n    list (i + 1) constructor_decl ppf l\n  | Ptype_record l ->\n    line i ppf \"Ptype_record\\n\";\n    list (i + 1) label_decl ppf l\n  | Ptype_open -> line i ppf \"Ptype_open\\n\"\n\nand type_extension i ppf x =\n  line i ppf \"type_extension\\n\";\n  attributes i ppf x.ptyext_attributes;\n  let i = i + 1 in\n  line i ppf \"ptyext_path = %a\\n\" fmt_longident_loc x.ptyext_path;\n  line i ppf \"ptyext_params =\\n\";\n  list (i + 1) type_parameter ppf x.ptyext_params;\n  line i ppf \"ptyext_constructors =\\n\";\n  list (i + 1) extension_constructor ppf x.ptyext_constructors;\n  line i ppf \"ptyext_private = %a\\n\" fmt_private_flag x.ptyext_private\n\nand extension_constructor i ppf x =\n  line i ppf \"extension_constructor %a\\n\" fmt_location x.pext_loc;\n  attributes i ppf x.pext_attributes;\n  let i = i + 1 in\n  line i ppf \"pext_name = \\\"%s\\\"\\n\" x.pext_name.txt;\n  line i ppf \"pext_kind =\\n\";\n  extension_constructor_kind (i + 1) ppf x.pext_kind\n\nand extension_constructor_kind i ppf x =\n  match x with\n  | Pext_decl (a, r) ->\n    line i ppf \"Pext_decl\\n\";\n    constructor_arguments (i + 1) ppf a;\n    option (i + 1) core_type ppf r\n  | Pext_rebind li ->\n    line i ppf \"Pext_rebind\\n\";\n    line (i + 1) ppf \"%a\\n\" fmt_longident_loc li\n\nand class_type i ppf x =\n  line i ppf \"class_type %a\\n\" fmt_location x.pcty_loc;\n  attributes i ppf x.pcty_attributes;\n  let i = i + 1 in\n  match x.pcty_desc with\n  | Pcty_constr (li, l) ->\n    line i ppf \"Pcty_constr %a\\n\" fmt_longident_loc li;\n    list i core_type ppf l\n  | Pcty_signature cs ->\n    line i ppf \"Pcty_signature\\n\";\n    class_signature i ppf cs\n  | Pcty_arrow (l, co, cl) ->\n    line i ppf \"Pcty_arrow\\n\";\n    arg_label i ppf l;\n    core_type i ppf co;\n    class_type i ppf cl\n  | Pcty_extension (s, arg) ->\n    line i ppf \"Pcty_extension \\\"%s\\\"\\n\" s.txt;\n    payload i ppf arg\n  | Pcty_open (ovf, m, e) ->\n    line i ppf \"Pcty_open %a \\\"%a\\\"\\n\" fmt_override_flag ovf fmt_longident_loc m;\n    class_type i ppf e\n\nand class_signature i ppf cs =\n  line i ppf \"class_signature\\n\";\n  core_type (i + 1) ppf cs.pcsig_self;\n  list (i + 1) class_type_field ppf cs.pcsig_fields\n\nand class_type_field i ppf x =\n  line i ppf \"class_type_field %a\\n\" fmt_location x.pctf_loc;\n  let i = i + 1 in\n  attributes i ppf x.pctf_attributes;\n  match x.pctf_desc with\n  | Pctf_inherit ct ->\n    line i ppf \"Pctf_inherit\\n\";\n    class_type i ppf ct\n  | Pctf_val (s, mf, vf, ct) ->\n    line i ppf \"Pctf_val \\\"%s\\\" %a %a\\n\" s.txt fmt_mutable_flag mf\n      fmt_virtual_flag vf;\n    core_type (i + 1) ppf ct\n  | Pctf_method (s, pf, vf, ct) ->\n    line i ppf \"Pctf_method \\\"%s\\\" %a %a\\n\" s.txt fmt_private_flag pf\n      fmt_virtual_flag vf;\n    core_type (i + 1) ppf ct\n  | Pctf_constraint (ct1, ct2) ->\n    line i ppf \"Pctf_constraint\\n\";\n    core_type (i + 1) ppf ct1;\n    core_type (i + 1) ppf ct2\n  | Pctf_attribute (s, arg) ->\n    line i ppf \"Pctf_attribute \\\"%s\\\"\\n\" s.txt;\n    payload i ppf arg\n  | Pctf_extension (s, arg) ->\n    line i ppf \"Pctf_extension \\\"%s\\\"\\n\" s.txt;\n    payload i ppf arg\n\nand class_type_declaration i ppf x =\n  line i ppf \"class_type_declaration %a\\n\" fmt_location x.pci_loc;\n  attributes i ppf x.pci_attributes;\n  let i = i + 1 in\n  line i ppf \"pci_virt = %a\\n\" fmt_virtual_flag x.pci_virt;\n  line i ppf \"pci_params =\\n\";\n  list (i + 1) type_parameter ppf x.pci_params;\n  line i ppf \"pci_name = %a\\n\" fmt_string_loc x.pci_name;\n  line i ppf \"pci_expr =\\n\";\n  class_type (i + 1) ppf x.pci_expr\n\nand class_structure i ppf {pcstr_self = p; pcstr_fields = l} =\n  line i ppf \"class_structure\\n\";\n  pattern (i + 1) ppf p;\n  list (i + 1) class_field ppf l\n\nand class_field i ppf x =\n  line i ppf \"class_field %a\\n\" fmt_location x.pcf_loc;\n  let i = i + 1 in\n  attributes i ppf x.pcf_attributes;\n  match x.pcf_desc with\n  | Pcf_inherit () -> ()\n  | Pcf_val (s, mf, k) ->\n    line i ppf \"Pcf_val %a\\n\" fmt_mutable_flag mf;\n    line (i + 1) ppf \"%a\\n\" fmt_string_loc s;\n    class_field_kind (i + 1) ppf k\n  | Pcf_method (s, pf, k) ->\n    line i ppf \"Pcf_method %a\\n\" fmt_private_flag pf;\n    line (i + 1) ppf \"%a\\n\" fmt_string_loc s;\n    class_field_kind (i + 1) ppf k\n  | Pcf_constraint (ct1, ct2) ->\n    line i ppf \"Pcf_constraint\\n\";\n    core_type (i + 1) ppf ct1;\n    core_type (i + 1) ppf ct2\n  | Pcf_initializer e ->\n    line i ppf \"Pcf_initializer\\n\";\n    expression (i + 1) ppf e\n  | Pcf_attribute (s, arg) ->\n    line i ppf \"Pcf_attribute \\\"%s\\\"\\n\" s.txt;\n    payload i ppf arg\n  | Pcf_extension (s, arg) ->\n    line i ppf \"Pcf_extension \\\"%s\\\"\\n\" s.txt;\n    payload i ppf arg\n\nand class_field_kind i ppf = function\n  | Cfk_concrete (o, e) ->\n    line i ppf \"Concrete %a\\n\" fmt_override_flag o;\n    expression i ppf e\n  | Cfk_virtual t ->\n    line i ppf \"Virtual\\n\";\n    core_type i ppf t\n\nand module_type i ppf x =\n  line i ppf \"module_type %a\\n\" fmt_location x.pmty_loc;\n  attributes i ppf x.pmty_attributes;\n  let i = i + 1 in\n  match x.pmty_desc with\n  | Pmty_ident li -> line i ppf \"Pmty_ident %a\\n\" fmt_longident_loc li\n  | Pmty_alias li -> line i ppf \"Pmty_alias %a\\n\" fmt_longident_loc li\n  | Pmty_signature s ->\n    line i ppf \"Pmty_signature\\n\";\n    signature i ppf s\n  | Pmty_functor (s, mt1, mt2) ->\n    line i ppf \"Pmty_functor %a\\n\" fmt_string_loc s;\n    Misc.may (module_type i ppf) mt1;\n    module_type i ppf mt2\n  | Pmty_with (mt, l) ->\n    line i ppf \"Pmty_with\\n\";\n    module_type i ppf mt;\n    list i with_constraint ppf l\n  | Pmty_typeof m ->\n    line i ppf \"Pmty_typeof\\n\";\n    module_expr i ppf m\n  | Pmty_extension (s, arg) ->\n    line i ppf \"Pmod_extension \\\"%s\\\"\\n\" s.txt;\n    payload i ppf arg\n\nand signature i ppf x = list i signature_item ppf x\n\nand signature_item i ppf x =\n  line i ppf \"signature_item %a\\n\" fmt_location x.psig_loc;\n  let i = i + 1 in\n  match x.psig_desc with\n  | Psig_value vd ->\n    line i ppf \"Psig_value\\n\";\n    value_description i ppf vd\n  | Psig_type (rf, l) ->\n    line i ppf \"Psig_type %a\\n\" fmt_rec_flag rf;\n    list i type_declaration ppf l\n  | Psig_typext te ->\n    line i ppf \"Psig_typext\\n\";\n    type_extension i ppf te\n  | Psig_exception ext ->\n    line i ppf \"Psig_exception\\n\";\n    extension_constructor i ppf ext\n  | Psig_module pmd ->\n    line i ppf \"Psig_module %a\\n\" fmt_string_loc pmd.pmd_name;\n    attributes i ppf pmd.pmd_attributes;\n    module_type i ppf pmd.pmd_type\n  | Psig_recmodule decls ->\n    line i ppf \"Psig_recmodule\\n\";\n    list i module_declaration ppf decls\n  | Psig_modtype x ->\n    line i ppf \"Psig_modtype %a\\n\" fmt_string_loc x.pmtd_name;\n    attributes i ppf x.pmtd_attributes;\n    modtype_declaration i ppf x.pmtd_type\n  | Psig_open od ->\n    line i ppf \"Psig_open %a %a\\n\" fmt_override_flag od.popen_override\n      fmt_longident_loc od.popen_lid;\n    attributes i ppf od.popen_attributes\n  | Psig_include incl ->\n    line i ppf \"Psig_include\\n\";\n    module_type i ppf incl.pincl_mod;\n    attributes i ppf incl.pincl_attributes\n  | Psig_class () -> ()\n  | Psig_class_type l ->\n    line i ppf \"Psig_class_type\\n\";\n    list i class_type_declaration ppf l\n  | Psig_extension ((s, arg), attrs) ->\n    line i ppf \"Psig_extension \\\"%s\\\"\\n\" s.txt;\n    attributes i ppf attrs;\n    payload i ppf arg\n  | Psig_attribute (s, arg) ->\n    line i ppf \"Psig_attribute \\\"%s\\\"\\n\" s.txt;\n    payload i ppf arg\n\nand modtype_declaration i ppf = function\n  | None -> line i ppf \"#abstract\"\n  | Some mt -> module_type (i + 1) ppf mt\n\nand with_constraint i ppf x =\n  match x with\n  | Pwith_type (lid, td) ->\n    line i ppf \"Pwith_type %a\\n\" fmt_longident_loc lid;\n    type_declaration (i + 1) ppf td\n  | Pwith_typesubst (lid, td) ->\n    line i ppf \"Pwith_typesubst %a\\n\" fmt_longident_loc lid;\n    type_declaration (i + 1) ppf td\n  | Pwith_module (lid1, lid2) ->\n    line i ppf \"Pwith_module %a = %a\\n\" fmt_longident_loc lid1 fmt_longident_loc\n      lid2\n  | Pwith_modsubst (lid1, lid2) ->\n    line i ppf \"Pwith_modsubst %a = %a\\n\" fmt_longident_loc lid1\n      fmt_longident_loc lid2\n\nand module_expr i ppf x =\n  line i ppf \"module_expr %a\\n\" fmt_location x.pmod_loc;\n  attributes i ppf x.pmod_attributes;\n  let i = i + 1 in\n  match x.pmod_desc with\n  | Pmod_ident li -> line i ppf \"Pmod_ident %a\\n\" fmt_longident_loc li\n  | Pmod_structure s ->\n    line i ppf \"Pmod_structure\\n\";\n    structure i ppf s\n  | Pmod_functor (s, mt, me) ->\n    line i ppf \"Pmod_functor %a\\n\" fmt_string_loc s;\n    Misc.may (module_type i ppf) mt;\n    module_expr i ppf me\n  | Pmod_apply (me1, me2) ->\n    line i ppf \"Pmod_apply\\n\";\n    module_expr i ppf me1;\n    module_expr i ppf me2\n  | Pmod_constraint (me, mt) ->\n    line i ppf \"Pmod_constraint\\n\";\n    module_expr i ppf me;\n    module_type i ppf mt\n  | Pmod_unpack e ->\n    line i ppf \"Pmod_unpack\\n\";\n    expression i ppf e\n  | Pmod_extension (s, arg) ->\n    line i ppf \"Pmod_extension \\\"%s\\\"\\n\" s.txt;\n    payload i ppf arg\n\nand structure i ppf x = list i structure_item ppf x\n\nand structure_item i ppf x =\n  line i ppf \"structure_item %a\\n\" fmt_location x.pstr_loc;\n  let i = i + 1 in\n  match x.pstr_desc with\n  | Pstr_eval (e, attrs) ->\n    line i ppf \"Pstr_eval\\n\";\n    attributes i ppf attrs;\n    expression i ppf e\n  | Pstr_value (rf, l) ->\n    line i ppf \"Pstr_value %a\\n\" fmt_rec_flag rf;\n    list i value_binding ppf l\n  | Pstr_primitive vd ->\n    line i ppf \"Pstr_primitive\\n\";\n    value_description i ppf vd\n  | Pstr_type (rf, l) ->\n    line i ppf \"Pstr_type %a\\n\" fmt_rec_flag rf;\n    list i type_declaration ppf l\n  | Pstr_typext te ->\n    line i ppf \"Pstr_typext\\n\";\n    type_extension i ppf te\n  | Pstr_exception ext ->\n    line i ppf \"Pstr_exception\\n\";\n    extension_constructor i ppf ext\n  | Pstr_module x ->\n    line i ppf \"Pstr_module\\n\";\n    module_binding i ppf x\n  | Pstr_recmodule bindings ->\n    line i ppf \"Pstr_recmodule\\n\";\n    list i module_binding ppf bindings\n  | Pstr_modtype x ->\n    line i ppf \"Pstr_modtype %a\\n\" fmt_string_loc x.pmtd_name;\n    attributes i ppf x.pmtd_attributes;\n    modtype_declaration i ppf x.pmtd_type\n  | Pstr_open od ->\n    line i ppf \"Pstr_open %a %a\\n\" fmt_override_flag od.popen_override\n      fmt_longident_loc od.popen_lid;\n    attributes i ppf od.popen_attributes\n  | Pstr_class () -> ()\n  | Pstr_class_type l ->\n    line i ppf \"Pstr_class_type\\n\";\n    list i class_type_declaration ppf l\n  | Pstr_include incl ->\n    line i ppf \"Pstr_include\";\n    attributes i ppf incl.pincl_attributes;\n    module_expr i ppf incl.pincl_mod\n  | Pstr_extension ((s, arg), attrs) ->\n    line i ppf \"Pstr_extension \\\"%s\\\"\\n\" s.txt;\n    attributes i ppf attrs;\n    payload i ppf arg\n  | Pstr_attribute (s, arg) ->\n    line i ppf \"Pstr_attribute \\\"%s\\\"\\n\" s.txt;\n    payload i ppf arg\n\nand module_declaration i ppf pmd =\n  string_loc i ppf pmd.pmd_name;\n  attributes i ppf pmd.pmd_attributes;\n  module_type (i + 1) ppf pmd.pmd_type\n\nand module_binding i ppf x =\n  string_loc i ppf x.pmb_name;\n  attributes i ppf x.pmb_attributes;\n  module_expr (i + 1) ppf x.pmb_expr\n\nand core_type_x_core_type_x_location i ppf (ct1, ct2, l) =\n  line i ppf \"<constraint> %a\\n\" fmt_location l;\n  core_type (i + 1) ppf ct1;\n  core_type (i + 1) ppf ct2\n\nand constructor_decl i ppf\n    {pcd_name; pcd_args; pcd_res; pcd_loc; pcd_attributes} =\n  line i ppf \"%a\\n\" fmt_location pcd_loc;\n  line (i + 1) ppf \"%a\\n\" fmt_string_loc pcd_name;\n  attributes i ppf pcd_attributes;\n  constructor_arguments (i + 1) ppf pcd_args;\n  option (i + 1) core_type ppf pcd_res\n\nand constructor_arguments i ppf = function\n  | Pcstr_tuple l -> list i core_type ppf l\n  | Pcstr_record l -> list i label_decl ppf l\n\nand label_decl i ppf {pld_name; pld_mutable; pld_type; pld_loc; pld_attributes}\n    =\n  line i ppf \"%a\\n\" fmt_location pld_loc;\n  attributes i ppf pld_attributes;\n  line (i + 1) ppf \"%a\\n\" fmt_mutable_flag pld_mutable;\n  line (i + 1) ppf \"%a\" fmt_string_loc pld_name;\n  core_type (i + 1) ppf pld_type\n\nand longident_x_pattern i ppf (li, p) =\n  line i ppf \"%a\\n\" fmt_longident_loc li;\n  pattern (i + 1) ppf p\n\nand case i ppf {pc_lhs; pc_guard; pc_rhs} =\n  line i ppf \"<case>\\n\";\n  pattern (i + 1) ppf pc_lhs;\n  (match pc_guard with\n  | None -> ()\n  | Some g ->\n    line (i + 1) ppf \"<when>\\n\";\n    expression (i + 2) ppf g);\n  expression (i + 1) ppf pc_rhs\n\nand value_binding i ppf x =\n  line i ppf \"<def>\\n\";\n  attributes (i + 1) ppf x.pvb_attributes;\n  pattern (i + 1) ppf x.pvb_pat;\n  expression (i + 1) ppf x.pvb_expr\n\nand string_x_expression i ppf (s, e) =\n  line i ppf \"<override> %a\\n\" fmt_string_loc s;\n  expression (i + 1) ppf e\n\nand longident_x_expression i ppf (li, e) =\n  line i ppf \"%a\\n\" fmt_longident_loc li;\n  expression (i + 1) ppf e\n\nand label_x_expression i ppf (l, e) =\n  line i ppf \"<arg>\\n\";\n  arg_label i ppf l;\n  expression (i + 1) ppf e\n\nand label_x_bool_x_core_type_list i ppf x =\n  match x with\n  | Rtag (l, attrs, b, ctl) ->\n    line i ppf \"Rtag \\\"%s\\\" %s\\n\" l.txt (string_of_bool b);\n    attributes (i + 1) ppf attrs;\n    list (i + 1) core_type ppf ctl\n  | Rinherit ct ->\n    line i ppf \"Rinherit\\n\";\n    core_type (i + 1) ppf ct\n\nlet interface ppf x = list 0 signature_item ppf x\n\nlet implementation ppf x = list 0 structure_item ppf x\n"
  },
  {
    "path": "analysis/vendor/ml/printast.mli",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*              Damien Doligez, projet Para, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1999 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\nopen Parsetree\nopen Format\n\nval interface : formatter -> signature_item list -> unit\nval implementation : formatter -> structure_item list -> unit\n\nval expression : int -> formatter -> expression -> unit\nval structure : int -> formatter -> structure -> unit\nval payload : int -> formatter -> payload -> unit\n"
  },
  {
    "path": "analysis/vendor/ml/printlambda.ml",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\nopen Format\nopen Asttypes\nopen Primitive\nopen Lambda\n\nlet rec struct_const ppf = function\n  | Const_base (Const_int n) -> fprintf ppf \"%i\" n\n  | Const_base (Const_char i) ->\n    fprintf ppf \"%s\" (Pprintast.string_of_int_as_char i)\n  | Const_base (Const_string (s, _)) -> fprintf ppf \"%S\" s\n  | Const_immstring s -> fprintf ppf \"#%S\" s\n  | Const_base (Const_float f) -> fprintf ppf \"%s\" f\n  | Const_base (Const_int32 n) -> fprintf ppf \"%lil\" n\n  | Const_base (Const_int64 n) -> fprintf ppf \"%LiL\" n\n  | Const_base (Const_bigint (sign, n)) ->\n    fprintf ppf \"%sn\" (Bigint_utils.to_string sign n)\n  | Const_pointer (n, _) -> fprintf ppf \"%ia\" n\n  | Const_block (tag_info, []) ->\n    let tag = Lambda.tag_of_tag_info tag_info in\n    fprintf ppf \"[%i]\" tag\n  | Const_block (tag_info, sc1 :: scl) ->\n    let tag = Lambda.tag_of_tag_info tag_info in\n    let sconsts ppf scl =\n      List.iter (fun sc -> fprintf ppf \"@ %a\" struct_const sc) scl\n    in\n    fprintf ppf \"@[<1>[%i:@ @[%a%a@]]@]\" tag struct_const sc1 sconsts scl\n  | Const_float_array [] -> fprintf ppf \"[| |]\"\n  | Const_float_array (f1 :: fl) ->\n    let floats ppf fl = List.iter (fun f -> fprintf ppf \"@ %s\" f) fl in\n    fprintf ppf \"@[<1>[|@[%s%a@]|]@]\" f1 floats fl\n  | Const_false -> fprintf ppf \"false\"\n  | Const_true -> fprintf ppf \"true\"\nlet boxed_integer_name = function\n  | Pbigint -> \"bigint\"\n  | Pint32 -> \"int32\"\n  | Pint64 -> \"int64\"\n\nlet value_kind = function\n  | Pgenval -> \"\"\n\n(* let field_kind = function\n   | Pgenval -> \"*\"\n   | Pintval -> \"int\"\n   | Pfloatval -> \"float\"\n   | Pboxedintval bi -> boxed_integer_name bi *)\n\nlet print_boxed_integer_conversion ppf bi1 bi2 =\n  fprintf ppf \"%s_of_%s\" (boxed_integer_name bi2) (boxed_integer_name bi1)\n\nlet boxed_integer_mark name = function\n  | Pbigint -> Printf.sprintf \"BigInt.%s\" name\n  | Pint32 -> Printf.sprintf \"Int32.%s\" name\n  | Pint64 -> Printf.sprintf \"Int64.%s\" name\n\nlet print_boxed_integer name ppf bi =\n  fprintf ppf \"%s\" (boxed_integer_mark name bi)\n\nlet string_of_loc_kind = function\n  | Loc_FILE -> \"loc_FILE\"\n  | Loc_LINE -> \"loc_LINE\"\n  | Loc_MODULE -> \"loc_MODULE\"\n  | Loc_POS -> \"loc_POS\"\n  | Loc_LOC -> \"loc_LOC\"\n\n(* let block_shape ppf shape = match shape with\n   | None | Some [] -> ()\n   | Some l when List.for_all ((=) Pgenval) l -> ()\n   | Some [elt] ->\n       Format.fprintf ppf \" (%s)\" (field_kind elt)\n   | Some (h :: t) ->\n       Format.fprintf ppf \" (%s\" (field_kind h);\n       List.iter (fun elt ->\n           Format.fprintf ppf \",%s\" (field_kind elt))\n         t;\n       Format.fprintf ppf \")\" *)\n\nlet str_of_field_info (fld_info : Lambda.field_dbg_info) =\n  match fld_info with\n  | Fld_module {name}\n  | Fld_record {name}\n  | Fld_record_inline {name}\n  | Fld_record_extension {name} ->\n    name\n  | Fld_tuple -> \"[]\"\n  | Fld_poly_var_tag -> \"`\"\n  | Fld_poly_var_content -> \"#\"\n  | Fld_extension -> \"ext\"\n  | Fld_variant -> \"var\"\n  | Fld_cons -> \"cons\"\n  | Fld_array -> \"[||]\"\nlet print_taginfo ppf = function\n  | Blk_extension -> fprintf ppf \"ext\"\n  | Blk_record_ext {fields = ss} ->\n    fprintf ppf \"[%s]\" (String.concat \";\" (Array.to_list ss))\n  | Blk_tuple -> fprintf ppf \"tuple\"\n  | Blk_constructor {name; num_nonconst} ->\n    fprintf ppf \"%s/%i\" name num_nonconst\n  | Blk_poly_var name -> fprintf ppf \"`%s\" name\n  | Blk_record {fields = ss} ->\n    fprintf ppf \"[%s]\" (String.concat \";\" (Array.to_list ss))\n  | Blk_module ss -> fprintf ppf \"[%s]\" (String.concat \";\" ss)\n  | Blk_some -> fprintf ppf \"some\"\n  | Blk_some_not_nested -> fprintf ppf \"some_not_nested\"\n  | Blk_lazy_general -> fprintf ppf \"lazy_general\"\n  | Blk_module_export _ -> fprintf ppf \"module/exports\"\n  | Blk_record_inlined {fields = ss} ->\n    fprintf ppf \"[%s]\" (String.concat \";\" (Array.to_list ss))\n\nlet primitive ppf = function\n  | Puncurried_apply -> fprintf ppf \"@app\"\n  | Pidentity -> fprintf ppf \"id\"\n  | Pbytes_to_string -> fprintf ppf \"bytes_to_string\"\n  | Pignore -> fprintf ppf \"ignore\"\n  | Prevapply -> fprintf ppf \"revapply\"\n  | Pdirapply -> fprintf ppf \"dirapply\"\n  | Ploc kind -> fprintf ppf \"%s\" (string_of_loc_kind kind)\n  | Pgetglobal id -> fprintf ppf \"global %a\" Ident.print id\n  | Pmakeblock taginfo -> fprintf ppf \"makeblock %a\" print_taginfo taginfo\n  | Pfield (n, fld) -> fprintf ppf \"field:%s/%i\" (str_of_field_info fld) n\n  | Psetfield (n, _) -> fprintf ppf \"setfield %i\" n\n  | Pduprecord -> fprintf ppf \"duprecord\"\n  | Plazyforce -> fprintf ppf \"force\"\n  | Pccall p -> fprintf ppf \"%s\" p.prim_name\n  | Praise k -> fprintf ppf \"%s\" (Lambda.raise_kind k)\n  | Psequand -> fprintf ppf \"&&\"\n  | Psequor -> fprintf ppf \"||\"\n  | Pnot -> fprintf ppf \"not\"\n  | Pnegint -> fprintf ppf \"~\"\n  | Paddint -> fprintf ppf \"+\"\n  | Psubint -> fprintf ppf \"-\"\n  | Pmulint -> fprintf ppf \"*\"\n  | Pdivint Safe -> fprintf ppf \"/\"\n  | Pdivint Unsafe -> fprintf ppf \"/u\"\n  | Pmodint Safe -> fprintf ppf \"mod\"\n  | Pmodint Unsafe -> fprintf ppf \"mod_unsafe\"\n  | Pandint -> fprintf ppf \"and\"\n  | Porint -> fprintf ppf \"or\"\n  | Pxorint -> fprintf ppf \"xor\"\n  | Plslint -> fprintf ppf \"lsl\"\n  | Plsrint -> fprintf ppf \"lsr\"\n  | Pasrint -> fprintf ppf \"asr\"\n  | Pintcomp Ceq -> fprintf ppf \"==\"\n  | Pintcomp Cneq -> fprintf ppf \"!=\"\n  | Pintcomp Clt -> fprintf ppf \"<\"\n  | Pintcomp Cle -> fprintf ppf \"<=\"\n  | Pintcomp Cgt -> fprintf ppf \">\"\n  | Pintcomp Cge -> fprintf ppf \">=\"\n  | Poffsetint n -> fprintf ppf \"%i+\" n\n  | Poffsetref n -> fprintf ppf \"+:=%i\" n\n  | Pintoffloat -> fprintf ppf \"int_of_float\"\n  | Pfloatofint -> fprintf ppf \"float_of_int\"\n  | Pnegfloat -> fprintf ppf \"~.\"\n  | Pabsfloat -> fprintf ppf \"abs.\"\n  | Paddfloat -> fprintf ppf \"+.\"\n  | Psubfloat -> fprintf ppf \"-.\"\n  | Pmulfloat -> fprintf ppf \"*.\"\n  | Pdivfloat -> fprintf ppf \"/.\"\n  | Pfloatcomp Ceq -> fprintf ppf \"==.\"\n  | Pfloatcomp Cneq -> fprintf ppf \"!=.\"\n  | Pfloatcomp Clt -> fprintf ppf \"<.\"\n  | Pfloatcomp Cle -> fprintf ppf \"<=.\"\n  | Pfloatcomp Cgt -> fprintf ppf \">.\"\n  | Pfloatcomp Cge -> fprintf ppf \">=.\"\n  | Pnegbigint -> fprintf ppf \"~\"\n  | Paddbigint -> fprintf ppf \"+\"\n  | Psubbigint -> fprintf ppf \"-\"\n  | Pmulbigint -> fprintf ppf \"*\"\n  | Ppowbigint -> fprintf ppf \"**\"\n  | Pandbigint -> fprintf ppf \"and\"\n  | Porbigint -> fprintf ppf \"or\"\n  | Pxorbigint -> fprintf ppf \"xor\"\n  | Plslbigint -> fprintf ppf \"lsl\"\n  | Pasrbigint -> fprintf ppf \"asr\"\n  | Pdivbigint -> fprintf ppf \"/\"\n  | Pmodbigint -> fprintf ppf \"mod\"\n  | Pbigintcomp Ceq -> fprintf ppf \"==,\"\n  | Pbigintcomp Cneq -> fprintf ppf \"!=,\"\n  | Pbigintcomp Clt -> fprintf ppf \"<,\"\n  | Pbigintcomp Cle -> fprintf ppf \"<=,\"\n  | Pbigintcomp Cgt -> fprintf ppf \">,\"\n  | Pbigintcomp Cge -> fprintf ppf \">=,\"\n  | Pstringlength -> fprintf ppf \"string.length\"\n  | Pstringrefu -> fprintf ppf \"string.unsafe_get\"\n  | Pstringrefs -> fprintf ppf \"string.get\"\n  | Pbyteslength -> fprintf ppf \"bytes.length\"\n  | Pbytesrefu -> fprintf ppf \"bytes.unsafe_get\"\n  | Pbytessetu -> fprintf ppf \"bytes.unsafe_set\"\n  | Pbytesrefs -> fprintf ppf \"bytes.get\"\n  | Pbytessets -> fprintf ppf \"bytes.set\"\n  | Parraylength -> fprintf ppf \"array.length\"\n  | Pmakearray Mutable -> fprintf ppf \"makearray\"\n  | Pmakearray Immutable -> fprintf ppf \"makearray_imm\"\n  | Parrayrefu -> fprintf ppf \"array.unsafe_get\"\n  | Parraysetu -> fprintf ppf \"array.unsafe_set\"\n  | Parrayrefs -> fprintf ppf \"array.get\"\n  | Parraysets -> fprintf ppf \"array.set\"\n  | Pctconst c ->\n    let const_name =\n      match c with\n      | Big_endian -> \"big_endian\"\n      | Word_size -> \"word_size\"\n      | Int_size -> \"int_size\"\n      | Max_wosize -> \"max_wosize\"\n      | Ostype_unix -> \"ostype_unix\"\n      | Ostype_win32 -> \"ostype_win32\"\n      | Ostype_cygwin -> \"ostype_cygwin\"\n      | Backend_type -> \"backend_type\"\n    in\n    fprintf ppf \"sys.constant_%s\" const_name\n  | Pisint -> fprintf ppf \"isint\"\n  | Pisout -> fprintf ppf \"isout\"\n  | Pbintofint bi -> print_boxed_integer \"of_int\" ppf bi\n  | Pintofbint bi -> print_boxed_integer \"to_int\" ppf bi\n  | Pcvtbint (bi1, bi2) -> print_boxed_integer_conversion ppf bi1 bi2\n  | Pnegbint bi -> print_boxed_integer \"neg\" ppf bi\n  | Paddbint bi -> print_boxed_integer \"add\" ppf bi\n  | Psubbint bi -> print_boxed_integer \"sub\" ppf bi\n  | Pmulbint bi -> print_boxed_integer \"mul\" ppf bi\n  | Pdivbint {size = bi; is_safe = Safe} -> print_boxed_integer \"div\" ppf bi\n  | Pdivbint {size = bi; is_safe = Unsafe} ->\n    print_boxed_integer \"div_unsafe\" ppf bi\n  | Pmodbint {size = bi; is_safe = Safe} -> print_boxed_integer \"mod\" ppf bi\n  | Pmodbint {size = bi; is_safe = Unsafe} ->\n    print_boxed_integer \"mod_unsafe\" ppf bi\n  | Pandbint bi -> print_boxed_integer \"and\" ppf bi\n  | Porbint bi -> print_boxed_integer \"or\" ppf bi\n  | Pxorbint bi -> print_boxed_integer \"xor\" ppf bi\n  | Plslbint bi -> print_boxed_integer \"lsl\" ppf bi\n  | Plsrbint bi -> print_boxed_integer \"lsr\" ppf bi\n  | Pasrbint bi -> print_boxed_integer \"asr\" ppf bi\n  | Pbintcomp (bi, Ceq) -> print_boxed_integer \"==\" ppf bi\n  | Pbintcomp (bi, Cneq) -> print_boxed_integer \"!=\" ppf bi\n  | Pbintcomp (bi, Clt) -> print_boxed_integer \"<\" ppf bi\n  | Pbintcomp (bi, Cgt) -> print_boxed_integer \">\" ppf bi\n  | Pbintcomp (bi, Cle) -> print_boxed_integer \"<=\" ppf bi\n  | Pbintcomp (bi, Cge) -> print_boxed_integer \">=\" ppf bi\n  | Popaque -> fprintf ppf \"opaque\"\n  | Pcreate_extension s -> fprintf ppf \"extension[%s]\" s\nlet name_of_primitive = function\n  | Puncurried_apply -> \"Puncurried_apply\"\n  | Pidentity -> \"Pidentity\"\n  | Pbytes_to_string -> \"Pbytes_to_string\"\n  | Pignore -> \"Pignore\"\n  | Prevapply -> \"Prevapply\"\n  | Pdirapply -> \"Pdirapply\"\n  | Ploc _ -> \"Ploc\"\n  | Pgetglobal _ -> \"Pgetglobal\"\n  | Pmakeblock _ -> \"Pmakeblock\"\n  | Pfield _ -> \"Pfield\"\n  | Psetfield _ -> \"Psetfield\"\n  | Pduprecord -> \"Pduprecord\"\n  | Plazyforce -> \"Plazyforce\"\n  | Pccall _ -> \"Pccall\"\n  | Praise _ -> \"Praise\"\n  | Psequand -> \"Psequand\"\n  | Psequor -> \"Psequor\"\n  | Pnot -> \"Pnot\"\n  | Pnegint -> \"Pnegint\"\n  | Paddint -> \"Paddint\"\n  | Psubint -> \"Psubint\"\n  | Pmulint -> \"Pmulint\"\n  | Pdivint _ -> \"Pdivint\"\n  | Pmodint _ -> \"Pmodint\"\n  | Pandint -> \"Pandint\"\n  | Porint -> \"Porint\"\n  | Pxorint -> \"Pxorint\"\n  | Plslint -> \"Plslint\"\n  | Plsrint -> \"Plsrint\"\n  | Pasrint -> \"Pasrint\"\n  | Pintcomp _ -> \"Pintcomp\"\n  | Poffsetint _ -> \"Poffsetint\"\n  | Poffsetref _ -> \"Poffsetref\"\n  | Pintoffloat -> \"Pintoffloat\"\n  | Pfloatofint -> \"Pfloatofint\"\n  | Pnegfloat -> \"Pnegfloat\"\n  | Pabsfloat -> \"Pabsfloat\"\n  | Paddfloat -> \"Paddfloat\"\n  | Psubfloat -> \"Psubfloat\"\n  | Pmulfloat -> \"Pmulfloat\"\n  | Pdivfloat -> \"Pdivfloat\"\n  | Pfloatcomp _ -> \"Pfloatcomp\"\n  | Pnegbigint -> \"Pnegbigint\"\n  | Paddbigint -> \"Paddbigint\"\n  | Psubbigint -> \"Psubbigint\"\n  | Pmulbigint -> \"Pmulbigint\"\n  | Pdivbigint -> \"Pdivbigint\"\n  | Pmodbigint -> \"Pmodbigint\"\n  | Ppowbigint -> \"Ppowbigint\"\n  | Pandbigint -> \"Pandbigint\"\n  | Porbigint -> \"Porbigint\"\n  | Pxorbigint -> \"Pxorbigint\"\n  | Plslbigint -> \"Plslbigint\"\n  | Pasrbigint -> \"Pasrbigint\"\n  | Pbigintcomp _ -> \"Pbigintcomp\"\n  | Pstringlength -> \"Pstringlength\"\n  | Pstringrefu -> \"Pstringrefu\"\n  | Pstringrefs -> \"Pstringrefs\"\n  | Pbyteslength -> \"Pbyteslength\"\n  | Pbytesrefu -> \"Pbytesrefu\"\n  | Pbytessetu -> \"Pbytessetu\"\n  | Pbytesrefs -> \"Pbytesrefs\"\n  | Pbytessets -> \"Pbytessets\"\n  | Parraylength -> \"Parraylength\"\n  | Pmakearray _ -> \"Pmakearray\"\n  | Parrayrefu -> \"Parrayrefu\"\n  | Parraysetu -> \"Parraysetu\"\n  | Parrayrefs -> \"Parrayrefs\"\n  | Parraysets -> \"Parraysets\"\n  | Pctconst _ -> \"Pctconst\"\n  | Pisint -> \"Pisint\"\n  | Pisout -> \"Pisout\"\n  | Pbintofint _ -> \"Pbintofint\"\n  | Pintofbint _ -> \"Pintofbint\"\n  | Pcvtbint _ -> \"Pcvtbint\"\n  | Pnegbint _ -> \"Pnegbint\"\n  | Paddbint _ -> \"Paddbint\"\n  | Psubbint _ -> \"Psubbint\"\n  | Pmulbint _ -> \"Pmulbint\"\n  | Pdivbint _ -> \"Pdivbint\"\n  | Pmodbint _ -> \"Pmodbint\"\n  | Pandbint _ -> \"Pandbint\"\n  | Porbint _ -> \"Porbint\"\n  | Pxorbint _ -> \"Pxorbint\"\n  | Plslbint _ -> \"Plslbint\"\n  | Plsrbint _ -> \"Plsrbint\"\n  | Pasrbint _ -> \"Pasrbint\"\n  | Pbintcomp _ -> \"Pbintcomp\"\n  | Popaque -> \"Popaque\"\n  | Pcreate_extension _ -> \"Pcreate_extension\"\n\nlet function_attribute ppf {inline; is_a_functor; return_unit} =\n  if is_a_functor then fprintf ppf \"is_a_functor@ \";\n  if return_unit then fprintf ppf \"void@ \";\n  match inline with\n  | Default_inline -> ()\n  | Always_inline -> fprintf ppf \"always_inline@ \"\n  | Never_inline -> fprintf ppf \"never_inline@ \"\n\nlet apply_inlined_attribute ppf = function\n  | Default_inline -> ()\n  | Always_inline -> fprintf ppf \" always_inline\"\n  | Never_inline -> fprintf ppf \" never_inline\"\n\nlet rec lam ppf = function\n  | Lvar id -> Ident.print ppf id\n  | Lconst cst -> struct_const ppf cst\n  | Lapply ap ->\n    let lams ppf largs = List.iter (fun l -> fprintf ppf \"@ %a\" lam l) largs in\n    fprintf ppf \"@[<2>(apply@ %a%a%a)@]\" lam ap.ap_func lams ap.ap_args\n      apply_inlined_attribute ap.ap_inlined\n  | Lfunction {params; body; attr} ->\n    let pr_params ppf params =\n      List.iter (fun param -> fprintf ppf \"@ %a\" Ident.print param) params\n    in\n    fprintf ppf \"@[<2>(function%a@ %a%a)@]\" pr_params params function_attribute\n      attr lam body\n  | Llet (str, k, id, arg, body) ->\n    let kind = function\n      | Alias -> \"a\"\n      | Strict -> \"\"\n      | StrictOpt -> \"o\"\n      | Variable -> \"v\"\n    in\n    let rec letbody = function\n      | Llet (str, k, id, arg, body) ->\n        fprintf ppf \"@ @[<2>%a =%s%s@ %a@]\" Ident.print id (kind str)\n          (value_kind k) lam arg;\n        letbody body\n      | expr -> expr\n    in\n    fprintf ppf \"@[<2>(let@ @[<hv 1>(@[<2>%a =%s%s@ %a@]\" Ident.print id\n      (kind str) (value_kind k) lam arg;\n    let expr = letbody body in\n    fprintf ppf \")@]@ %a)@]\" lam expr\n  | Lletrec (id_arg_list, body) ->\n    let bindings ppf id_arg_list =\n      let spc = ref false in\n      List.iter\n        (fun (id, l) ->\n          if !spc then fprintf ppf \"@ \" else spc := true;\n          fprintf ppf \"@[<2>%a@ %a@]\" Ident.print id lam l)\n        id_arg_list\n    in\n    fprintf ppf \"@[<2>(letrec@ (@[<hv 1>%a@])@ %a)@]\" bindings id_arg_list lam\n      body\n  | Lprim (prim, largs, _) ->\n    let lams ppf largs = List.iter (fun l -> fprintf ppf \"@ %a\" lam l) largs in\n    fprintf ppf \"@[<2>(%a%a)@]\" primitive prim lams largs\n  | Lswitch (larg, sw, _loc) ->\n    let switch ppf sw =\n      let spc = ref false in\n      List.iter\n        (fun (n, l) ->\n          if !spc then fprintf ppf \"@ \" else spc := true;\n          fprintf ppf \"@[<hv 1>case int %i:@ %a@]\" n lam l)\n        sw.sw_consts;\n      List.iter\n        (fun (n, l) ->\n          if !spc then fprintf ppf \"@ \" else spc := true;\n          fprintf ppf \"@[<hv 1>case tag %i:@ %a@]\" n lam l)\n        sw.sw_blocks;\n      match sw.sw_failaction with\n      | None -> ()\n      | Some l ->\n        if !spc then fprintf ppf \"@ \" else spc := true;\n        fprintf ppf \"@[<hv 1>default:@ %a@]\" lam l\n    in\n    fprintf ppf \"@[<1>(%s %a@ @[<v 0>%a@])@]\"\n      (match sw.sw_failaction with\n      | None -> \"switch*\"\n      | _ -> \"switch\")\n      lam larg switch sw\n  | Lstringswitch (arg, cases, default, _) ->\n    let switch ppf cases =\n      let spc = ref false in\n      List.iter\n        (fun (s, l) ->\n          if !spc then fprintf ppf \"@ \" else spc := true;\n          fprintf ppf \"@[<hv 1>case \\\"%s\\\":@ %a@]\" (String.escaped s) lam l)\n        cases;\n      match default with\n      | Some default ->\n        if !spc then fprintf ppf \"@ \" else spc := true;\n        fprintf ppf \"@[<hv 1>default:@ %a@]\" lam default\n      | None -> ()\n    in\n    fprintf ppf \"@[<1>(stringswitch %a@ @[<v 0>%a@])@]\" lam arg switch cases\n  | Lstaticraise (i, ls) ->\n    let lams ppf largs = List.iter (fun l -> fprintf ppf \"@ %a\" lam l) largs in\n    fprintf ppf \"@[<2>(exit@ %d%a)@]\" i lams ls\n  | Lstaticcatch (lbody, (i, vars), lhandler) ->\n    fprintf ppf \"@[<2>(catch@ %a@;<1 -1>with (%d%a)@ %a)@]\" lam lbody i\n      (fun ppf vars ->\n        match vars with\n        | [] -> ()\n        | _ -> List.iter (fun x -> fprintf ppf \" %a\" Ident.print x) vars)\n      vars lam lhandler\n  | Ltrywith (lbody, param, lhandler) ->\n    fprintf ppf \"@[<2>(try@ %a@;<1 -1>with %a@ %a)@]\" lam lbody Ident.print\n      param lam lhandler\n  | Lifthenelse (lcond, lif, lelse) ->\n    fprintf ppf \"@[<2>(if@ %a@ %a@ %a)@]\" lam lcond lam lif lam lelse\n  | Lsequence (l1, l2) -> fprintf ppf \"@[<2>(seq@ %a@ %a)@]\" lam l1 sequence l2\n  | Lwhile (lcond, lbody) ->\n    fprintf ppf \"@[<2>(while@ %a@ %a)@]\" lam lcond lam lbody\n  | Lfor (param, lo, hi, dir, body) ->\n    fprintf ppf \"@[<2>(for %a@ %a@ %s@ %a@ %a)@]\" Ident.print param lam lo\n      (match dir with\n      | Upto -> \"to\"\n      | Downto -> \"downto\")\n      lam hi lam body\n  | Lassign (id, expr) ->\n    fprintf ppf \"@[<2>(assign@ %a@ %a)@]\" Ident.print id lam expr\n  | Lsend (name, obj, _) -> fprintf ppf \"@[<2>(send%s@ %a@ )@]\" name lam obj\n\nand sequence ppf = function\n  | Lsequence (l1, l2) -> fprintf ppf \"%a@ %a\" sequence l1 sequence l2\n  | l -> lam ppf l\n\nlet structured_constant = struct_const\n\nlet lambda = lam\n"
  },
  {
    "path": "analysis/vendor/ml/printlambda.mli",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\nopen Lambda\n\nopen Format\n\nval structured_constant : formatter -> structured_constant -> unit\nval lambda : formatter -> lambda -> unit\n\nval primitive : formatter -> primitive -> unit\nval name_of_primitive : primitive -> string\nval value_kind : value_kind -> string\n"
  },
  {
    "path": "analysis/vendor/ml/printtyp.ml",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*  Xavier Leroy and Jerome Vouillon, projet Cristal, INRIA Rocquencourt  *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(* Printing functions *)\n\nopen Misc\nopen Ctype\nopen Format\nopen Longident\nopen Path\nopen Asttypes\nopen Types\nopen Btype\nopen Outcometree\n\nlet print_res_poly_identifier : (string -> string) ref =\n  ref (fun _ -> assert false)\n\n(* Print a long identifier *)\n\nlet rec longident ppf = function\n  | Lident s -> pp_print_string ppf s\n  | Ldot (p, s) -> fprintf ppf \"%a.%s\" longident p s\n  | Lapply (p1, p2) -> fprintf ppf \"%a(%a)\" longident p1 longident p2\n\n(* Print an identifier *)\n\nlet unique_names = ref Ident.empty\n\nlet ident_name id =\n  try Ident.find_same id !unique_names with Not_found -> Ident.name id\n\nlet add_unique id =\n  try ignore (Ident.find_same id !unique_names)\n  with Not_found ->\n    unique_names := Ident.add id (Ident.unique_toplevel_name id) !unique_names\n\nlet ident ppf id = pp_print_string ppf (ident_name id)\n\n(* Print a path *)\n\nlet ident_pervasives = Ident.create_persistent \"Pervasives\"\nlet ident_pervasives_u = Ident.create_persistent \"PervasivesU\"\nlet printing_env = ref Env.empty\nlet non_shadowed_pervasive = function\n  | Pdot (Pident id, s, _pos) as path -> (\n    (Ident.same id ident_pervasives || Ident.same id ident_pervasives_u)\n    &&\n    try Path.same path (Env.lookup_type (Lident s) !printing_env)\n    with Not_found -> true)\n  | _ -> false\n\nlet rec tree_of_path = function\n  | Pident id -> Oide_ident (ident_name id)\n  | Pdot (_, s, _pos) as path when non_shadowed_pervasive path -> Oide_ident s\n  | Pdot (p, s, _pos) -> Oide_dot (tree_of_path p, s)\n  | Papply (p1, p2) -> Oide_apply (tree_of_path p1, tree_of_path p2)\n\nlet rec path ppf = function\n  | Pident id -> ident ppf id\n  | Pdot (_, s, _pos) as path when non_shadowed_pervasive path ->\n    pp_print_string ppf s\n  | Pdot (p, s, _pos) ->\n    path ppf p;\n    pp_print_char ppf '.';\n    pp_print_string ppf s\n  | Papply (p1, p2) -> fprintf ppf \"%a(%a)\" path p1 path p2\n\nlet rec string_of_out_ident = function\n  | Oide_ident s -> s\n  | Oide_dot (id, s) -> String.concat \".\" [string_of_out_ident id; s]\n  | Oide_apply (id1, id2) ->\n    String.concat \"\"\n      [string_of_out_ident id1; \"(\"; string_of_out_ident id2; \")\"]\n\nlet string_of_path p = string_of_out_ident (tree_of_path p)\n\n(* Print a recursive annotation *)\n\nlet tree_of_rec = function\n  | Trec_not -> Orec_not\n  | Trec_first -> Orec_first\n  | Trec_next -> Orec_next\n\n(* Print a raw type expression, with sharing *)\n\nlet raw_list pr ppf = function\n  | [] -> fprintf ppf \"[]\"\n  | a :: l ->\n    fprintf ppf \"@[<1>[%a%t]@]\" pr a (fun ppf ->\n        List.iter (fun x -> fprintf ppf \";@,%a\" pr x) l)\n\nlet kind_vars = ref []\nlet kind_count = ref 0\n\nlet rec safe_kind_repr v = function\n  | Fvar {contents = Some k} ->\n    if List.memq k v then \"Fvar loop\" else safe_kind_repr (k :: v) k\n  | Fvar r ->\n    let vid =\n      try List.assq r !kind_vars\n      with Not_found ->\n        let c =\n          incr kind_count;\n          !kind_count\n        in\n        kind_vars := (r, c) :: !kind_vars;\n        c\n    in\n    Printf.sprintf \"Fvar {None}@%d\" vid\n  | Fpresent -> \"Fpresent\"\n  | Fabsent -> \"Fabsent\"\n\nlet rec safe_commu_repr v = function\n  | Cok -> \"Cok\"\n  | Cunknown -> \"Cunknown\"\n  | Clink r ->\n    if List.memq r v then \"Clink loop\" else safe_commu_repr (r :: v) !r\n\nlet rec safe_repr v = function\n  | {desc = Tlink t} when not (List.memq t v) -> safe_repr (t :: v) t\n  | t -> t\n\nlet rec list_of_memo = function\n  | Mnil -> []\n  | Mcons (_priv, p, _t1, _t2, rem) -> p :: list_of_memo rem\n  | Mlink rem -> list_of_memo !rem\n\nlet print_name ppf = function\n  | None -> fprintf ppf \"None\"\n  | Some name -> fprintf ppf \"\\\"%s\\\"\" name\n\nlet string_of_label = function\n  | Nolabel -> \"\"\n  | Labelled s -> s\n  | Optional s -> \"?\" ^ s\n\nlet visited = ref []\nlet rec raw_type ppf ty =\n  let ty = safe_repr [] ty in\n  if List.memq ty !visited then fprintf ppf \"{id=%d}\" ty.id\n  else (\n    visited := ty :: !visited;\n    fprintf ppf \"@[<1>{id=%d;level=%d;desc=@,%a}@]\" ty.id ty.level raw_type_desc\n      ty.desc)\n\nand raw_type_list tl = raw_list raw_type tl\n\nand raw_type_desc ppf = function\n  | Tvar name -> fprintf ppf \"Tvar %a\" print_name name\n  | Tarrow (l, t1, t2, c) ->\n    fprintf ppf \"@[<hov1>Tarrow(\\\"%s\\\",@,%a,@,%a,@,%s)@]\" (string_of_label l)\n      raw_type t1 raw_type t2 (safe_commu_repr [] c)\n  | Ttuple tl -> fprintf ppf \"@[<1>Ttuple@,%a@]\" raw_type_list tl\n  | Tconstr (p, tl, abbrev) ->\n    fprintf ppf \"@[<hov1>Tconstr(@,%a,@,%a,@,%a)@]\" path p raw_type_list tl\n      (raw_list path) (list_of_memo !abbrev)\n  | Tobject (t, nm) ->\n    fprintf ppf \"@[<hov1>Tobject(@,%a,@,@[<1>ref%t@])@]\" raw_type t (fun ppf ->\n        match !nm with\n        | None -> fprintf ppf \" None\"\n        | Some (p, tl) ->\n          fprintf ppf \"(Some(@,%a,@,%a))\" path p raw_type_list tl)\n  | Tfield (f, k, t1, t2) ->\n    fprintf ppf \"@[<hov1>Tfield(@,%s,@,%s,@,%a,@;<0 -1>%a)@]\" f\n      (safe_kind_repr [] k) raw_type t1 raw_type t2\n  | Tnil -> fprintf ppf \"Tnil\"\n  | Tlink t -> fprintf ppf \"@[<1>Tlink@,%a@]\" raw_type t\n  | Tsubst t -> fprintf ppf \"@[<1>Tsubst@,%a@]\" raw_type t\n  | Tunivar name -> fprintf ppf \"Tunivar %a\" print_name name\n  | Tpoly (t, tl) ->\n    fprintf ppf \"@[<hov1>Tpoly(@,%a,@,%a)@]\" raw_type t raw_type_list tl\n  | Tvariant row ->\n    fprintf ppf\n      \"@[<hov1>{@[%s@,%a;@]@ @[%s@,%a;@]@ %s%B;@ %s%B;@ @[<1>%s%t@]}@]\"\n      \"row_fields=\"\n      (raw_list (fun ppf (l, f) -> fprintf ppf \"@[%s,@ %a@]\" l raw_field f))\n      row.row_fields \"row_more=\" raw_type row.row_more \"row_closed=\"\n      row.row_closed \"row_fixed=\" row.row_fixed \"row_name=\"\n      (fun ppf ->\n        match row.row_name with\n        | None -> fprintf ppf \"None\"\n        | Some (p, tl) -> fprintf ppf \"Some(@,%a,@,%a)\" path p raw_type_list tl)\n  | Tpackage (p, _, tl) ->\n    fprintf ppf \"@[<hov1>Tpackage(@,%a@,%a)@]\" path p raw_type_list tl\n\nand raw_field ppf = function\n  | Rpresent None -> fprintf ppf \"Rpresent None\"\n  | Rpresent (Some t) -> fprintf ppf \"@[<1>Rpresent(Some@,%a)@]\" raw_type t\n  | Reither (c, tl, m, e) ->\n    fprintf ppf \"@[<hov1>Reither(%B,@,%a,@,%B,@,@[<1>ref%t@])@]\" c raw_type_list\n      tl m (fun ppf ->\n        match !e with\n        | None -> fprintf ppf \" None\"\n        | Some f -> fprintf ppf \"@,@[<1>(%a)@]\" raw_field f)\n  | Rabsent -> fprintf ppf \"Rabsent\"\n\nlet raw_type_expr ppf t =\n  visited := [];\n  kind_vars := [];\n  kind_count := 0;\n  raw_type ppf t;\n  visited := [];\n  kind_vars := []\n\nlet () = Btype.print_raw := raw_type_expr\n\n(* Normalize paths *)\n\ntype param_subst = Id | Nth of int | Map of int list\n\nlet is_nth = function\n  | Nth _ -> true\n  | _ -> false\n\nlet compose l1 = function\n  | Id -> Map l1\n  | Map l2 -> Map (List.map (List.nth l1) l2)\n  | Nth n -> Nth (List.nth l1 n)\n\nlet apply_subst s1 tyl =\n  if tyl = [] then []\n    (* cf. PR#7543: Typemod.type_package doesn't respect type constructor arity *)\n  else\n    match s1 with\n    | Nth n1 -> [List.nth tyl n1]\n    | Map l1 -> List.map (List.nth tyl) l1\n    | Id -> tyl\n\ntype best_path = Paths of Path.t list | Best of Path.t\n\nlet printing_depth = ref 0\nlet printing_cont = ref ([] : Env.iter_cont list)\nlet printing_old = ref Env.empty\nlet printing_pers = ref Concr.empty\nmodule PathMap = Map.Make (Path)\nlet printing_map = ref PathMap.empty\n\nlet same_type t t' = repr t == repr t'\n\nlet rec index l x =\n  match l with\n  | [] -> raise Not_found\n  | a :: l -> if x == a then 0 else 1 + index l x\n\nlet rec uniq = function\n  | [] -> true\n  | a :: l -> (not (List.memq a l)) && uniq l\n\nlet rec normalize_type_path ?(cache = false) env p =\n  try\n    let params, ty, _ = Env.find_type_expansion p env in\n    let params = List.map repr params in\n    match repr ty with\n    | {desc = Tconstr (p1, tyl, _)} ->\n      let tyl = List.map repr tyl in\n      if List.length params = List.length tyl && List.for_all2 ( == ) params tyl\n      then normalize_type_path ~cache env p1\n      else if cache || List.length params <= List.length tyl || not (uniq tyl)\n      then (p, Id)\n      else\n        let l1 = List.map (index params) tyl in\n        let p2, s2 = normalize_type_path ~cache env p1 in\n        (p2, compose l1 s2)\n    | ty -> (p, Nth (index params ty))\n  with Not_found -> (Env.normalize_path None env p, Id)\n\nlet penalty s =\n  if s <> \"\" && s.[0] = '_' then 10\n  else\n    try\n      for i = 0 to String.length s - 2 do\n        if s.[i] = '_' && s.[i + 1] = '_' then raise Exit\n      done;\n      1\n    with Exit -> 10\n\nlet rec path_size = function\n  | Pident id -> (penalty (Ident.name id), -Ident.binding_time id)\n  | Pdot (p, _, _) ->\n    let l, b = path_size p in\n    (1 + l, b)\n  | Papply (p1, p2) ->\n    let l, b = path_size p1 in\n    (l + fst (path_size p2), b)\n\nlet same_printing_env env =\n  let used_pers = Env.used_persistent () in\n  Env.same_types !printing_old env && Concr.equal !printing_pers used_pers\n\nlet set_printing_env env =\n  printing_env := env;\n  if !Clflags.real_paths || !printing_env == Env.empty || same_printing_env env\n  then ()\n  else (\n    (* printf \"Reset printing_map@.\"; *)\n    printing_old := env;\n    printing_pers := Env.used_persistent ();\n    printing_map := PathMap.empty;\n    printing_depth := 0;\n    (* printf \"Recompute printing_map.@.\"; *)\n    let cont =\n      Env.iter_types\n        (fun p (p', _decl) ->\n          let p1, s1 = normalize_type_path env p' ~cache:true in\n          (* Format.eprintf \"%a -> %a = %a@.\" path p path p' path p1 *)\n          if s1 = Id then\n            try\n              let r = PathMap.find p1 !printing_map in\n              match !r with\n              | Paths l -> r := Paths (p :: l)\n              | Best p' -> r := Paths [p; p']\n              (* assert false *)\n            with Not_found ->\n              printing_map := PathMap.add p1 (ref (Paths [p])) !printing_map)\n        env\n    in\n    printing_cont := [cont])\n\nlet wrap_printing_env env f =\n  set_printing_env env;\n  try_finally f (fun () -> set_printing_env Env.empty)\n\nlet wrap_printing_env env f = Env.without_cmis (wrap_printing_env env) f\n\nlet is_unambiguous path env =\n  let l = Env.find_shadowed_types path env in\n  List.exists (Path.same path) l\n  ||\n  (* concrete paths are ok *)\n  match l with\n  | [] -> true\n  | p :: rem ->\n    (* allow also coherent paths:  *)\n    let normalize p = fst (normalize_type_path ~cache:true env p) in\n    let p' = normalize p in\n    List.for_all (fun p -> Path.same (normalize p) p') rem\n    ||\n    (* also allow repeatedly defining and opening (for toplevel) *)\n    let id = lid_of_path p in\n    List.for_all (fun p -> lid_of_path p = id) rem\n    && Path.same p (Env.lookup_type id env)\n\nlet rec get_best_path r =\n  match !r with\n  | Best p' -> p'\n  | Paths [] -> raise Not_found\n  | Paths l ->\n    r := Paths [];\n    List.iter\n      (fun p ->\n        (* Format.eprintf \"evaluating %a@.\" path p; *)\n        match !r with\n        | Best p' when path_size p >= path_size p' -> ()\n        | _ -> if is_unambiguous p !printing_env then r := Best p)\n        (* else Format.eprintf \"%a ignored as ambiguous@.\" path p *)\n      l;\n    get_best_path r\n\nlet best_type_path p =\n  if !Clflags.real_paths || !printing_env == Env.empty then (p, Id)\n  else\n    let p', s = normalize_type_path !printing_env p in\n    let get_path () = get_best_path (PathMap.find p' !printing_map) in\n    while\n      !printing_cont <> []\n      &&\n      try fst (path_size (get_path ())) > !printing_depth\n      with Not_found -> true\n    do\n      printing_cont := List.map snd (Env.run_iter_cont !printing_cont);\n      incr printing_depth\n    done;\n    let p'' = try get_path () with Not_found -> p' in\n    (* Format.eprintf \"%a = %a -> %a@.\" path p path p' path p''; *)\n    (p'', s)\n\n(* Print a type expression *)\n\nlet names = ref ([] : (type_expr * string) list)\nlet name_counter = ref 0\nlet named_vars = ref ([] : string list)\n\nlet weak_counter = ref 1\nlet weak_var_map = ref TypeMap.empty\nlet named_weak_vars = ref StringSet.empty\n\nlet reset_names () =\n  names := [];\n  name_counter := 0;\n  named_vars := []\nlet add_named_var ty =\n  match ty.desc with\n  | Tvar (Some name) | Tunivar (Some name) ->\n    if List.mem name !named_vars then () else named_vars := name :: !named_vars\n  | _ -> ()\n\nlet name_is_already_used name =\n  List.mem name !named_vars\n  || List.exists (fun (_, name') -> name = name') !names\n  || StringSet.mem name !named_weak_vars\n\nlet rec new_name () =\n  let name =\n    if !name_counter < 26 then String.make 1 (Char.chr (97 + !name_counter))\n    else\n      String.make 1 (Char.chr (97 + (!name_counter mod 26)))\n      ^ string_of_int (!name_counter / 26)\n  in\n  incr name_counter;\n  if name_is_already_used name then new_name () else name\n\nlet rec new_weak_name ty () =\n  let name = \"weak\" ^ string_of_int !weak_counter in\n  incr weak_counter;\n  if name_is_already_used name then new_weak_name ty ()\n  else (\n    named_weak_vars := StringSet.add name !named_weak_vars;\n    weak_var_map := TypeMap.add ty name !weak_var_map;\n    name)\n\nlet name_of_type name_generator t =\n  (* We've already been through repr at this stage, so t is our representative\n     of the union-find class. *)\n  try List.assq t !names\n  with Not_found -> (\n    try TypeMap.find t !weak_var_map\n    with Not_found ->\n      let name =\n        match t.desc with\n        | Tvar (Some name) | Tunivar (Some name) ->\n          (* Some part of the type we've already printed has assigned another\n           * unification variable to that name. We want to keep the name, so try\n           * adding a number until we find a name that's not taken. *)\n          let current_name = ref name in\n          let i = ref 0 in\n          while List.exists (fun (_, name') -> !current_name = name') !names do\n            current_name := name ^ string_of_int !i;\n            i := !i + 1\n          done;\n          !current_name\n        | _ ->\n          (* No name available, create a new one *)\n          name_generator ()\n      in\n      (* Exception for type declarations *)\n      if name <> \"_\" then names := (t, name) :: !names;\n      name)\n\nlet check_name_of_type t = ignore (name_of_type new_name t)\n\nlet remove_names tyl =\n  let tyl = List.map repr tyl in\n  names := Ext_list.filter !names (fun (ty, _) -> not (List.memq ty tyl))\n\nlet visited_objects = ref ([] : type_expr list)\nlet aliased = ref ([] : type_expr list)\nlet delayed = ref ([] : type_expr list)\n\nlet add_delayed t = if not (List.memq t !delayed) then delayed := t :: !delayed\n\nlet is_aliased ty = List.memq (proxy ty) !aliased\nlet add_alias ty =\n  let px = proxy ty in\n  if not (is_aliased px) then (\n    aliased := px :: !aliased;\n    add_named_var px)\n\nlet aliasable ty =\n  match ty.desc with\n  | Tvar _ | Tunivar _ | Tpoly _ -> false\n  | Tconstr (p, _, _) -> not (is_nth (snd (best_type_path p)))\n  | _ -> true\n\nlet namable_row row =\n  row.row_name <> None\n  && List.for_all\n       (fun (_, f) ->\n         match row_field_repr f with\n         | Reither (c, l, _, _) ->\n           row.row_closed && if c then l = [] else List.length l = 1\n         | _ -> true)\n       row.row_fields\n\nlet rec mark_loops_rec visited ty =\n  let ty = repr ty in\n  let px = proxy ty in\n  if List.memq px visited && aliasable ty then add_alias px\n  else\n    let visited = px :: visited in\n    match ty.desc with\n    | Tvar _ -> add_named_var ty\n    | Tarrow (_, ty1, ty2, _) ->\n      mark_loops_rec visited ty1;\n      mark_loops_rec visited ty2\n    | Ttuple tyl -> List.iter (mark_loops_rec visited) tyl\n    | Tconstr (p, tyl, _) ->\n      let _p', s = best_type_path p in\n      List.iter (mark_loops_rec visited) (apply_subst s tyl)\n    | Tpackage (_, _, tyl) -> List.iter (mark_loops_rec visited) tyl\n    | Tvariant row -> (\n      if List.memq px !visited_objects then add_alias px\n      else\n        let row = row_repr row in\n        if not (static_row row) then visited_objects := px :: !visited_objects;\n        match row.row_name with\n        | Some (_p, tyl) when namable_row row ->\n          List.iter (mark_loops_rec visited) tyl\n        | _ -> iter_row (mark_loops_rec visited) row)\n    | Tobject (fi, nm) ->\n      if List.memq px !visited_objects then add_alias px\n      else (\n        if opened_object ty then visited_objects := px :: !visited_objects;\n        match !nm with\n        | None ->\n          let fields, _ = flatten_fields fi in\n          List.iter\n            (fun (_, kind, ty) ->\n              if field_kind_repr kind = Fpresent then mark_loops_rec visited ty)\n            fields\n        | Some (_, l) -> List.iter (mark_loops_rec visited) (List.tl l))\n    | Tfield (_, kind, ty1, ty2) when field_kind_repr kind = Fpresent ->\n      mark_loops_rec visited ty1;\n      mark_loops_rec visited ty2\n    | Tfield (_, _, _, ty2) -> mark_loops_rec visited ty2\n    | Tnil -> ()\n    | Tsubst ty -> mark_loops_rec visited ty\n    | Tlink _ -> fatal_error \"Printtyp.mark_loops_rec (2)\"\n    | Tpoly (ty, tyl) ->\n      List.iter (fun t -> add_alias t) tyl;\n      mark_loops_rec visited ty\n    | Tunivar _ -> add_named_var ty\n\nlet mark_loops ty =\n  normalize_type Env.empty ty;\n  mark_loops_rec [] ty\n\nlet reset_loop_marks () =\n  visited_objects := [];\n  aliased := [];\n  delayed := []\n\nlet reset () =\n  unique_names := Ident.empty;\n  reset_names ();\n  reset_loop_marks ()\n\nlet reset_and_mark_loops ty =\n  reset ();\n  mark_loops ty\n\nlet reset_and_mark_loops_list tyl =\n  reset ();\n  List.iter mark_loops tyl\n\n(* Disabled in classic mode when printing an unification error *)\n\nlet rec tree_of_typexp sch ty =\n  let ty = repr ty in\n  let px = proxy ty in\n  if List.mem_assq px !names && not (List.memq px !delayed) then\n    let mark = is_non_gen sch ty in\n    let name = name_of_type (if mark then new_weak_name ty else new_name) px in\n    Otyp_var (mark, name)\n  else\n    let pr_typ () =\n      match ty.desc with\n      | Tvar _ ->\n        (*let lev =\n          if is_non_gen sch ty then \"/\" ^ string_of_int ty.level else \"\" in*)\n        let non_gen = is_non_gen sch ty in\n        let name_gen = if non_gen then new_weak_name ty else new_name in\n        Otyp_var (non_gen, name_of_type name_gen ty)\n      | Tarrow (l, ty1, ty2, _) ->\n        let pr_arrow l ty1 ty2 =\n          let lab = string_of_label l in\n          let t1 =\n            if is_optional l then\n              match (repr ty1).desc with\n              | Tconstr (path, [ty], _) when Path.same path Predef.path_option\n                ->\n                tree_of_typexp sch ty\n              | _ -> Otyp_stuff \"<hidden>\"\n            else tree_of_typexp sch ty1\n          in\n          Otyp_arrow (lab, t1, tree_of_typexp sch ty2)\n        in\n        pr_arrow l ty1 ty2\n      | Ttuple tyl -> Otyp_tuple (tree_of_typlist sch tyl)\n      | Tconstr (p, tyl, _abbrev) ->\n        let p', s = best_type_path p in\n        let tyl' = apply_subst s tyl in\n        if is_nth s && not (tyl' = []) then tree_of_typexp sch (List.hd tyl')\n        else Otyp_constr (tree_of_path p', tree_of_typlist sch tyl')\n      | Tvariant row -> (\n        let row = row_repr row in\n        let fields =\n          if row.row_closed then\n            Ext_list.filter row.row_fields (fun (_, f) ->\n                row_field_repr f <> Rabsent)\n          else row.row_fields\n        in\n        let present =\n          Ext_list.filter fields (fun (_, f) ->\n              match row_field_repr f with\n              | Rpresent _ -> true\n              | _ -> false)\n        in\n        let all_present = List.length present = List.length fields in\n        match row.row_name with\n        | Some (p, tyl) when namable_row row ->\n          let p', s = best_type_path p in\n          let id = tree_of_path p' in\n          let args = tree_of_typlist sch (apply_subst s tyl) in\n          let out_variant =\n            if is_nth s then List.hd args else Otyp_constr (id, args)\n          in\n          if row.row_closed && all_present then out_variant\n          else\n            let non_gen = is_non_gen sch px in\n            let tags =\n              if all_present then None else Some (List.map fst present)\n            in\n            Otyp_variant (non_gen, Ovar_typ out_variant, row.row_closed, tags)\n        | _ ->\n          let non_gen =\n            (not (row.row_closed && all_present)) && is_non_gen sch px\n          in\n          let fields = List.map (tree_of_row_field sch) fields in\n          let tags =\n            if all_present then None else Some (List.map fst present)\n          in\n          Otyp_variant (non_gen, Ovar_fields fields, row.row_closed, tags))\n      | Tobject (fi, nm) -> tree_of_typobject sch fi !nm\n      | Tnil | Tfield _ -> tree_of_typobject sch ty None\n      | Tsubst ty -> tree_of_typexp sch ty\n      | Tlink _ -> fatal_error \"Printtyp.tree_of_typexp\"\n      | Tpoly (ty, []) -> tree_of_typexp sch ty\n      | Tpoly (ty, tyl) ->\n        (*let print_names () =\n          List.iter (fun (_, name) -> prerr_string (name ^ \" \")) !names;\n          prerr_string \"; \" in *)\n        let tyl = List.map repr tyl in\n        if tyl = [] then tree_of_typexp sch ty\n        else\n          let old_delayed = !delayed in\n          (* Make the names delayed, so that the real type is\n             printed once when used as proxy *)\n          List.iter add_delayed tyl;\n          let tl = List.map (name_of_type new_name) tyl in\n          let tr = Otyp_poly (tl, tree_of_typexp sch ty) in\n          (* Forget names when we leave scope *)\n          remove_names tyl;\n          delayed := old_delayed;\n          tr\n      | Tunivar _ -> Otyp_var (false, name_of_type new_name ty)\n      | Tpackage (p, n, tyl) ->\n        let n =\n          List.map (fun li -> String.concat \".\" (Longident.flatten li)) n\n        in\n        Otyp_module (Path.name p, n, tree_of_typlist sch tyl)\n    in\n    if List.memq px !delayed then\n      delayed := Ext_list.filter !delayed (( != ) px);\n    if is_aliased px && aliasable ty then (\n      check_name_of_type px;\n      Otyp_alias (pr_typ (), name_of_type new_name px))\n    else pr_typ ()\n\nand tree_of_row_field sch (l, f) =\n  match row_field_repr f with\n  | Rpresent None | Reither (true, [], _, _) -> (l, false, [])\n  | Rpresent (Some ty) -> (l, false, [tree_of_typexp sch ty])\n  | Reither (c, tyl, _, _) ->\n    if c (* contradiction: constant constructor with an argument *) then\n      (l, true, tree_of_typlist sch tyl)\n    else (l, false, tree_of_typlist sch tyl)\n  | Rabsent -> (l, false, [] (* actually, an error *))\n\nand tree_of_typlist sch tyl = List.map (tree_of_typexp sch) tyl\n\nand tree_of_typobject sch fi nm =\n  match nm with\n  | None ->\n    let pr_fields fi =\n      let fields, rest = flatten_fields fi in\n      let present_fields =\n        List.fold_right\n          (fun (n, k, t) l ->\n            match field_kind_repr k with\n            | Fpresent -> (n, t) :: l\n            | _ -> l)\n          fields []\n      in\n      let sorted_fields =\n        List.sort (fun (n, _) (n', _) -> String.compare n n') present_fields\n      in\n      tree_of_typfields sch rest sorted_fields\n    in\n    let fields, rest = pr_fields fi in\n    Otyp_object (fields, rest)\n  | Some (p, ty :: tyl) ->\n    let non_gen = is_non_gen sch (repr ty) in\n    let args = tree_of_typlist sch tyl in\n    let p', s = best_type_path p in\n    assert (s = Id);\n    Otyp_class (non_gen, tree_of_path p', args)\n  | _ -> fatal_error \"Printtyp.tree_of_typobject\"\n\nand is_non_gen sch ty = sch && is_Tvar ty && ty.level <> generic_level\n\nand tree_of_typfields sch rest = function\n  | [] ->\n    let rest =\n      match rest.desc with\n      | Tvar _ | Tunivar _ -> Some (is_non_gen sch rest)\n      | Tconstr _ -> Some false\n      | Tnil -> None\n      | _ -> fatal_error \"typfields (1)\"\n    in\n    ([], rest)\n  | (s, t) :: l ->\n    let field = (s, tree_of_typexp sch t) in\n    let fields, rest = tree_of_typfields sch rest l in\n    (field :: fields, rest)\n\nlet typexp sch ppf ty = !Oprint.out_type ppf (tree_of_typexp sch ty)\n\nlet type_expr ppf ty = typexp false ppf ty\n\nand type_sch ppf ty = typexp true ppf ty\n\nand type_scheme ppf ty =\n  reset_and_mark_loops ty;\n  typexp true ppf ty\n\n(* Maxence *)\nlet type_scheme_max ?(b_reset_names = true) ppf ty =\n  if b_reset_names then reset_names ();\n  typexp true ppf ty\n(* End Maxence *)\n\nlet tree_of_type_scheme ty =\n  reset_and_mark_loops ty;\n  tree_of_typexp true ty\n\n(* Print one type declaration *)\n\nlet tree_of_constraints params =\n  List.fold_right\n    (fun ty list ->\n      let ty' = unalias ty in\n      if proxy ty != proxy ty' then\n        let tr = tree_of_typexp true ty in\n        (tr, tree_of_typexp true ty') :: list\n      else list)\n    params []\n\nlet filter_params tyl =\n  let params =\n    List.fold_left\n      (fun tyl ty ->\n        let ty = repr ty in\n        if List.memq ty tyl then Btype.newgenty (Tsubst ty) :: tyl\n        else ty :: tyl)\n      [] tyl\n  in\n  List.rev params\n\nlet mark_loops_constructor_arguments = function\n  | Cstr_tuple l -> List.iter mark_loops l\n  | Cstr_record l -> List.iter (fun l -> mark_loops l.ld_type) l\n\nlet rec tree_of_type_decl id decl =\n  reset ();\n\n  let params = filter_params decl.type_params in\n\n  (match decl.type_manifest with\n  | Some ty ->\n    let vars = free_variables ty in\n    List.iter\n      (function\n        | {desc = Tvar (Some \"_\")} as ty ->\n          if List.memq ty vars then ty.desc <- Tvar None\n        | _ -> ())\n      params\n  | None -> ());\n\n  List.iter add_alias params;\n  List.iter mark_loops params;\n  List.iter check_name_of_type (List.map proxy params);\n  let ty_manifest =\n    match decl.type_manifest with\n    | None -> None\n    | Some ty ->\n      let ty =\n        (* Special hack to hide variant name *)\n        match repr ty with\n        | {desc = Tvariant row} -> (\n          let row = row_repr row in\n          match row.row_name with\n          | Some (Pident id', _) when Ident.same id id' ->\n            newgenty (Tvariant {row with row_name = None})\n          | _ -> ty)\n        | _ -> ty\n      in\n      mark_loops ty;\n      Some ty\n  in\n  (match decl.type_kind with\n  | Type_abstract -> ()\n  | Type_variant cstrs ->\n    List.iter\n      (fun c ->\n        mark_loops_constructor_arguments c.cd_args;\n        may mark_loops c.cd_res)\n      cstrs\n  | Type_record (l, _rep) -> List.iter (fun l -> mark_loops l.ld_type) l\n  | Type_open -> ());\n\n  let type_param = function\n    | Otyp_var (_, id) -> id\n    | _ -> \"?\"\n  in\n  let type_defined decl =\n    let abstr =\n      match decl.type_kind with\n      | Type_abstract ->\n        decl.type_manifest = None || decl.type_private = Private\n      | Type_record _ -> decl.type_private = Private\n      | Type_variant tll ->\n        decl.type_private = Private\n        || List.exists (fun cd -> cd.cd_res <> None) tll\n      | Type_open -> decl.type_manifest = None\n    in\n    let vari =\n      List.map2\n        (fun ty v ->\n          if abstr || not (is_Tvar (repr ty)) then Variance.get_upper v\n          else (true, true))\n        decl.type_params decl.type_variance\n    in\n    ( Ident.name id,\n      List.map2\n        (fun ty cocn -> (type_param (tree_of_typexp false ty), cocn))\n        params vari )\n  in\n  let tree_of_manifest ty1 =\n    match ty_manifest with\n    | None -> ty1\n    | Some ty -> Otyp_manifest (tree_of_typexp false ty, ty1)\n  in\n  let name, args = type_defined decl in\n  let constraints = tree_of_constraints params in\n  let ty, priv =\n    match decl.type_kind with\n    | Type_abstract -> (\n      match ty_manifest with\n      | None -> (Otyp_abstract, Public)\n      | Some ty -> (tree_of_typexp false ty, decl.type_private))\n    | Type_variant cstrs ->\n      ( tree_of_manifest (Otyp_sum (List.map tree_of_constructor cstrs)),\n        decl.type_private )\n    | Type_record (lbls, _rep) ->\n      ( tree_of_manifest (Otyp_record (List.map tree_of_label lbls)),\n        decl.type_private )\n    | Type_open -> (tree_of_manifest Otyp_open, decl.type_private)\n  in\n  let immediate = Builtin_attributes.immediate decl.type_attributes in\n  {\n    otype_name = name;\n    otype_params = args;\n    otype_type = ty;\n    otype_private = priv;\n    otype_immediate = immediate;\n    otype_unboxed = decl.type_unboxed.unboxed;\n    otype_cstrs = constraints;\n  }\n\nand tree_of_constructor_arguments = function\n  | Cstr_tuple l -> tree_of_typlist false l\n  | Cstr_record l -> [Otyp_record (List.map tree_of_label l)]\n\nand tree_of_constructor cd =\n  let name = Ident.name cd.cd_id in\n  let arg () = tree_of_constructor_arguments cd.cd_args in\n  match cd.cd_res with\n  | None -> (name, arg (), None)\n  | Some res ->\n    let nm = !names in\n    names := [];\n    let ret = tree_of_typexp false res in\n    let args = arg () in\n    names := nm;\n    (name, args, Some ret)\n\nand tree_of_label l =\n  let opt =\n    l.ld_attributes\n    |> List.exists (fun ({txt}, _) ->\n           txt = \"ns.optional\" || txt = \"res.optional\")\n  in\n  let typ =\n    match l.ld_type.desc with\n    | Tconstr (p, [t1], _) when opt && Path.same p Predef.path_option -> t1\n    | _ -> l.ld_type\n  in\n  (Ident.name l.ld_id, l.ld_mutable = Mutable, opt, tree_of_typexp false typ)\n\nlet tree_of_type_declaration id decl rs =\n  Osig_type (tree_of_type_decl id decl, tree_of_rec rs)\n\nlet type_declaration id ppf decl =\n  !Oprint.out_sig_item ppf (tree_of_type_declaration id decl Trec_not)\n\nlet constructor_arguments ppf a =\n  let tys = tree_of_constructor_arguments a in\n  !Oprint.out_type ppf (Otyp_tuple tys)\n\n(* Print an extension declaration *)\n\nlet tree_of_extension_constructor id ext es =\n  reset ();\n  let ty_name = Path.name ext.ext_type_path in\n  let ty_params = filter_params ext.ext_type_params in\n  List.iter add_alias ty_params;\n  List.iter mark_loops ty_params;\n  List.iter check_name_of_type (List.map proxy ty_params);\n  mark_loops_constructor_arguments ext.ext_args;\n  may mark_loops ext.ext_ret_type;\n  let type_param = function\n    | Otyp_var (_, id) -> id\n    | _ -> \"?\"\n  in\n  let ty_params =\n    List.map (fun ty -> type_param (tree_of_typexp false ty)) ty_params\n  in\n  let name = Ident.name id in\n  let args, ret =\n    match ext.ext_ret_type with\n    | None -> (tree_of_constructor_arguments ext.ext_args, None)\n    | Some res ->\n      let nm = !names in\n      names := [];\n      let ret = tree_of_typexp false res in\n      let args = tree_of_constructor_arguments ext.ext_args in\n      names := nm;\n      (args, Some ret)\n  in\n  let ext =\n    {\n      oext_name = name;\n      oext_type_name = ty_name;\n      oext_type_params = ty_params;\n      oext_args = args;\n      oext_ret_type = ret;\n      oext_private = ext.ext_private;\n    }\n  in\n  let es =\n    match es with\n    | Text_first -> Oext_first\n    | Text_next -> Oext_next\n    | Text_exception -> Oext_exception\n  in\n  Osig_typext (ext, es)\n\nlet extension_constructor id ppf ext =\n  !Oprint.out_sig_item ppf (tree_of_extension_constructor id ext Text_first)\n\n(* Print a value declaration *)\n\nlet tree_of_value_description id decl =\n  (* Format.eprintf \"@[%a@]@.\" raw_type_expr decl.val_type; *)\n  let id = Ident.name id in\n  let ty = tree_of_type_scheme decl.val_type in\n  let vd =\n    {oval_name = id; oval_type = ty; oval_prims = []; oval_attributes = []}\n  in\n  let vd =\n    match decl.val_kind with\n    | Val_prim p -> Primitive.print p vd\n    | _ -> vd\n  in\n  Osig_value vd\n\nlet value_description id ppf decl =\n  !Oprint.out_sig_item ppf (tree_of_value_description id decl)\n\n(* Print a class type *)\n\nlet method_type (_, kind, ty) =\n  match (field_kind_repr kind, repr ty) with\n  | Fpresent, {desc = Tpoly (ty, tyl)} -> (ty, tyl)\n  | _, ty -> (ty, [])\n\nlet tree_of_metho sch concrete csil (lab, kind, ty) =\n  if lab <> dummy_method then (\n    let kind = field_kind_repr kind in\n    let priv = kind <> Fpresent in\n    let virt = not (Concr.mem lab concrete) in\n    let ty, tyl = method_type (lab, kind, ty) in\n    let tty = tree_of_typexp sch ty in\n    remove_names tyl;\n    Ocsg_method (lab, priv, virt, tty) :: csil)\n  else csil\n\nlet rec prepare_class_type params = function\n  | Cty_constr (_p, tyl, cty) ->\n    let sty = Ctype.self_type cty in\n    if\n      List.memq (proxy sty) !visited_objects\n      || (not (List.for_all is_Tvar params))\n      || List.exists (deep_occur sty) tyl\n    then prepare_class_type params cty\n    else List.iter mark_loops tyl\n  | Cty_signature sign ->\n    let sty = repr sign.csig_self in\n    (* Self may have a name *)\n    let px = proxy sty in\n    if List.memq px !visited_objects then add_alias sty\n    else visited_objects := px :: !visited_objects;\n    let fields, _ = Ctype.flatten_fields (Ctype.object_fields sign.csig_self) in\n    List.iter (fun met -> mark_loops (fst (method_type met))) fields;\n    Vars.iter (fun _ (_, _, ty) -> mark_loops ty) sign.csig_vars\n  | Cty_arrow (_, ty, cty) ->\n    mark_loops ty;\n    prepare_class_type params cty\n\nlet rec tree_of_class_type sch params = function\n  | Cty_constr (p', tyl, cty) ->\n    let sty = Ctype.self_type cty in\n    if\n      List.memq (proxy sty) !visited_objects\n      || not (List.for_all is_Tvar params)\n    then tree_of_class_type sch params cty\n    else Octy_constr (tree_of_path p', tree_of_typlist true tyl)\n  | Cty_signature sign ->\n    let sty = repr sign.csig_self in\n    let self_ty =\n      if is_aliased sty then\n        Some (Otyp_var (false, name_of_type new_name (proxy sty)))\n      else None\n    in\n    let fields, _ = Ctype.flatten_fields (Ctype.object_fields sign.csig_self) in\n    let csil = [] in\n    let csil =\n      List.fold_left\n        (fun csil (ty1, ty2) -> Ocsg_constraint (ty1, ty2) :: csil)\n        csil\n        (tree_of_constraints params)\n    in\n    let all_vars =\n      Vars.fold (fun l (m, v, t) all -> (l, m, v, t) :: all) sign.csig_vars []\n    in\n    (* Consequence of PR#3607: order of Map.fold has changed! *)\n    let all_vars = List.rev all_vars in\n    let csil =\n      List.fold_left\n        (fun csil (l, m, v, t) ->\n          Ocsg_value (l, m = Mutable, v = Virtual, tree_of_typexp sch t) :: csil)\n        csil all_vars\n    in\n    let csil = List.fold_left (tree_of_metho sch sign.csig_concr) csil fields in\n    Octy_signature (self_ty, List.rev csil)\n  | Cty_arrow (l, ty, cty) ->\n    let lab = string_of_label l in\n    let ty =\n      if is_optional l then\n        match (repr ty).desc with\n        | Tconstr (path, [ty], _) when Path.same path Predef.path_option -> ty\n        | _ -> newconstr (Path.Pident (Ident.create \"<hidden>\")) []\n      else ty\n    in\n    let tr = tree_of_typexp sch ty in\n    Octy_arrow (lab, tr, tree_of_class_type sch params cty)\n\nlet class_type ppf cty =\n  reset ();\n  prepare_class_type [] cty;\n  !Oprint.out_class_type ppf (tree_of_class_type false [] cty)\n\nlet tree_of_class_param param variance =\n  ( (match tree_of_typexp true param with\n    | Otyp_var (_, s) -> s\n    | _ -> \"?\"),\n    if is_Tvar (repr param) then (true, true) else variance )\n\nlet class_variance = List.map Variance.(fun v -> (mem May_pos v, mem May_neg v))\n\nlet tree_of_class_declaration id cl rs =\n  let params = filter_params cl.cty_params in\n\n  reset ();\n  List.iter add_alias params;\n  prepare_class_type params cl.cty_type;\n  let sty = Ctype.self_type cl.cty_type in\n  List.iter mark_loops params;\n\n  List.iter check_name_of_type (List.map proxy params);\n  if is_aliased sty then check_name_of_type (proxy sty);\n\n  let vir_flag = cl.cty_new = None in\n  Osig_class\n    ( vir_flag,\n      Ident.name id,\n      List.map2 tree_of_class_param params (class_variance cl.cty_variance),\n      tree_of_class_type true params cl.cty_type,\n      tree_of_rec rs )\n\nlet class_declaration id ppf cl =\n  !Oprint.out_sig_item ppf (tree_of_class_declaration id cl Trec_first)\n\nlet tree_of_cltype_declaration id cl rs =\n  let params = List.map repr cl.clty_params in\n\n  reset ();\n  List.iter add_alias params;\n  prepare_class_type params cl.clty_type;\n  let sty = Ctype.self_type cl.clty_type in\n  List.iter mark_loops params;\n\n  List.iter check_name_of_type (List.map proxy params);\n  if is_aliased sty then check_name_of_type (proxy sty);\n\n  let sign = Ctype.signature_of_class_type cl.clty_type in\n\n  let virt =\n    let fields, _ = Ctype.flatten_fields (Ctype.object_fields sign.csig_self) in\n    List.exists\n      (fun (lab, _, _) ->\n        not (lab = dummy_method || Concr.mem lab sign.csig_concr))\n      fields\n    || Vars.fold (fun _ (_, vr, _) b -> vr = Virtual || b) sign.csig_vars false\n  in\n\n  Osig_class_type\n    ( virt,\n      Ident.name id,\n      List.map2 tree_of_class_param params (class_variance cl.clty_variance),\n      tree_of_class_type true params cl.clty_type,\n      tree_of_rec rs )\n\nlet cltype_declaration id ppf cl =\n  !Oprint.out_sig_item ppf (tree_of_cltype_declaration id cl Trec_first)\n\n(* Print a module type *)\n\nlet wrap_env fenv ftree arg =\n  let env = !printing_env in\n  set_printing_env (fenv env);\n  let tree = ftree arg in\n  set_printing_env env;\n  tree\n\nlet filter_rem_sig item rem =\n  match (item, rem) with\n  | Sig_class_type _, tydecl1 :: tydecl2 :: rem -> ([tydecl1; tydecl2], rem)\n  | _ -> ([], rem)\n\nlet dummy =\n  {\n    type_params = [];\n    type_arity = 0;\n    type_kind = Type_abstract;\n    type_private = Public;\n    type_manifest = None;\n    type_variance = [];\n    type_newtype_level = None;\n    type_loc = Location.none;\n    type_attributes = [];\n    type_immediate = false;\n    type_unboxed = unboxed_false_default_false;\n  }\n\nlet hide_rec_items = function\n  | Sig_type (id, _decl, rs) :: rem\n    when rs = Trec_first && not !Clflags.real_paths ->\n    let rec get_ids = function\n      | Sig_type (id, _, Trec_next) :: rem -> id :: get_ids rem\n      | _ -> []\n    in\n    let ids = id :: get_ids rem in\n    set_printing_env\n      (List.fold_right\n         (fun id -> Env.add_type ~check:false (Ident.rename id) dummy)\n         ids !printing_env)\n  | _ -> ()\n\nlet rec tree_of_modtype ?(ellipsis = false) = function\n  | Mty_ident p -> Omty_ident (tree_of_path p)\n  | Mty_signature sg ->\n    Omty_signature (if ellipsis then [Osig_ellipsis] else tree_of_signature sg)\n  | Mty_functor (param, ty_arg, ty_res) ->\n    let res =\n      match ty_arg with\n      | None -> tree_of_modtype ~ellipsis ty_res\n      | Some mty ->\n        wrap_env\n          (Env.add_module ~arg:true param mty)\n          (tree_of_modtype ~ellipsis)\n          ty_res\n    in\n    Omty_functor\n      (Ident.name param, may_map (tree_of_modtype ~ellipsis:false) ty_arg, res)\n  | Mty_alias (_, p) -> Omty_alias (tree_of_path p)\n\nand tree_of_signature sg =\n  wrap_env (fun env -> env) (tree_of_signature_rec !printing_env false) sg\n\nand tree_of_signature_rec env' in_type_group = function\n  | [] -> []\n  | item :: rem as items ->\n    let in_type_group =\n      match (in_type_group, item) with\n      | true, Sig_type (_, _, Trec_next) -> true\n      | _, Sig_type (_, _, (Trec_not | Trec_first)) ->\n        set_printing_env env';\n        true\n      | _ ->\n        set_printing_env env';\n        false\n    in\n    let sg, rem = filter_rem_sig item rem in\n    hide_rec_items items;\n    let trees = trees_of_sigitem item in\n    let env' = Env.add_signature (item :: sg) env' in\n    trees @ tree_of_signature_rec env' in_type_group rem\n\nand trees_of_sigitem = function\n  | Sig_value (id, decl) -> [tree_of_value_description id decl]\n  | Sig_type (id, _, _) when is_row_name (Ident.name id) -> []\n  | Sig_type (id, decl, rs) -> [tree_of_type_declaration id decl rs]\n  | Sig_typext (id, ext, es) -> [tree_of_extension_constructor id ext es]\n  | Sig_module (id, md, rs) ->\n    let ellipsis =\n      List.exists\n        (function\n          | {txt = \"...\"}, Parsetree.PStr [] -> true\n          | _ -> false)\n        md.md_attributes\n    in\n    [tree_of_module id md.md_type rs ~ellipsis]\n  | Sig_modtype (id, decl) -> [tree_of_modtype_declaration id decl]\n  | Sig_class () -> []\n  | Sig_class_type (id, decl, rs) -> [tree_of_cltype_declaration id decl rs]\n\nand tree_of_modtype_declaration id decl =\n  let mty =\n    match decl.mtd_type with\n    | None -> Omty_abstract\n    | Some mty -> tree_of_modtype mty\n  in\n  Osig_modtype (Ident.name id, mty)\n\nand tree_of_module id ?ellipsis mty rs =\n  Osig_module (Ident.name id, tree_of_modtype ?ellipsis mty, tree_of_rec rs)\n\nlet modtype ppf mty = !Oprint.out_module_type ppf (tree_of_modtype mty)\nlet modtype_declaration id ppf decl =\n  !Oprint.out_sig_item ppf (tree_of_modtype_declaration id decl)\n\n(* For the toplevel: merge with tree_of_signature? *)\n\n(* Refresh weak variable map in the toplevel *)\nlet refresh_weak () =\n  let refresh t name (m, s) =\n    if is_non_gen true (repr t) then (TypeMap.add t name m, StringSet.add name s)\n    else (m, s)\n  in\n  let m, s =\n    TypeMap.fold refresh !weak_var_map (TypeMap.empty, StringSet.empty)\n  in\n  named_weak_vars := s;\n  weak_var_map := m\n\nlet print_items showval env x =\n  refresh_weak ();\n  let rec print showval env = function\n    | [] -> []\n    | item :: rem as items ->\n      let _sg, rem = filter_rem_sig item rem in\n      hide_rec_items items;\n      let trees = trees_of_sigitem item in\n      List.map (fun d -> (d, showval env item)) trees @ print showval env rem\n  in\n  print showval env x\n\n(* Print a signature body (used by -i when compiling a .ml) *)\n\nlet print_signature ppf tree =\n  fprintf ppf \"@[<v>%a@]\" !Oprint.out_signature tree\n\nlet signature ppf sg = fprintf ppf \"%a\" print_signature (tree_of_signature sg)\n\n(* Print an unification error *)\n\nlet same_path t t' =\n  let t = repr t and t' = repr t' in\n  t == t'\n  ||\n  match (t.desc, t'.desc) with\n  | Tconstr (p, tl, _), Tconstr (p', tl', _) -> (\n    let p1, s1 = best_type_path p and p2, s2 = best_type_path p' in\n    match (s1, s2) with\n    | Nth n1, Nth n2 when n1 = n2 -> true\n    | (Id | Map _), (Id | Map _) when Path.same p1 p2 ->\n      let tl = apply_subst s1 tl and tl' = apply_subst s2 tl' in\n      List.length tl = List.length tl' && List.for_all2 same_type tl tl'\n    | _ -> false)\n  | _ -> false\n\nlet type_expansion t ppf t' =\n  if same_path t t' then (\n    add_delayed (proxy t);\n    type_expr ppf t)\n  else\n    let t' = if proxy t == proxy t' then unalias t' else t' in\n    fprintf ppf \"@[<2>%a@ =@ %a@]\" type_expr t type_expr t'\n\nlet type_path_expansion tp ppf tp' =\n  if Path.same tp tp' then path ppf tp\n  else fprintf ppf \"@[<2>%a@ =@ %a@]\" path tp path tp'\n\nlet rec trace fst txt ppf = function\n  | (t1, t1') :: (t2, t2') :: rem ->\n    if not fst then fprintf ppf \"@,\";\n    fprintf ppf \"@[Type@;<1 2>%a@ %s@;<1 2>%a@] %a\" (type_expansion t1) t1' txt\n      (type_expansion t2) t2' (trace false txt) rem\n  | _ -> ()\n\nlet rec filter_trace keep_last = function\n  | [(_, t1'); (_, t2')] when is_Tvar t1' || is_Tvar t2' -> []\n  | (t1, t1') :: (t2, t2') :: rem ->\n    let rem' = filter_trace keep_last rem in\n    if\n      is_constr_row ~allow_ident:true t1'\n      || is_constr_row ~allow_ident:true t2'\n      || (same_path t1 t1' && same_path t2 t2' && not (keep_last && rem' = []))\n    then rem'\n    else (t1, t1') :: (t2, t2') :: rem'\n  | _ -> []\n\nlet rec type_path_list ppf = function\n  | [(tp, tp')] -> type_path_expansion tp ppf tp'\n  | (tp, tp') :: rem ->\n    fprintf ppf \"%a@;<2 0>%a\" (type_path_expansion tp) tp' type_path_list rem\n  | [] -> ()\n\n(* Hide variant name and var, to force printing the expanded type *)\nlet hide_variant_name t =\n  match repr t with\n  | {desc = Tvariant row} as t when (row_repr row).row_name <> None ->\n    newty2 t.level\n      (Tvariant\n         {\n           (row_repr row) with\n           row_name = None;\n           row_more = newvar2 (row_more row).level;\n         })\n  | _ -> t\n\nlet prepare_expansion (t, t') =\n  let t' = hide_variant_name t' in\n  mark_loops t;\n  if not (same_path t t') then mark_loops t';\n  (t, t')\n\nlet may_prepare_expansion compact (t, t') =\n  match (repr t').desc with\n  | (Tvariant _ | Tobject _) when compact ->\n    mark_loops t;\n    (t, t)\n  | _ -> prepare_expansion (t, t')\n\nlet print_tags ppf fields =\n  match fields with\n  | [] -> ()\n  | (t, _) :: fields ->\n    fprintf ppf \"%s\" (!print_res_poly_identifier t);\n    List.iter\n      (fun (t, _) -> fprintf ppf \",@ %s\" (!print_res_poly_identifier t))\n      fields\n\nlet has_explanation t3 t4 =\n  match (t3.desc, t4.desc) with\n  | Tfield _, (Tnil | Tconstr _)\n  | (Tnil | Tconstr _), Tfield _\n  | Tnil, Tconstr _\n  | Tconstr _, Tnil\n  | _, Tvar _\n  | Tvar _, _\n  | Tvariant _, Tvariant _ ->\n    true\n  | Tfield (l, _, _, {desc = Tnil}), Tfield (l', _, _, {desc = Tnil}) -> l = l'\n  | _ -> false\n\nlet rec mismatch = function\n  | (_, t) :: (_, t') :: rem -> (\n    match mismatch rem with\n    | Some _ as m -> m\n    | None -> if has_explanation t t' then Some (t, t') else None)\n  | [] -> None\n  | _ -> assert false\n\nlet explanation unif t3 t4 ppf =\n  match (t3.desc, t4.desc) with\n  | Ttuple [], Tvar _ | Tvar _, Ttuple [] ->\n    fprintf ppf \"@,Self type cannot escape its class\"\n  | Tconstr (p, _, _), Tvar _ when unif && t4.level < Path.binding_time p ->\n    fprintf ppf \"@,@[The type constructor@;<1 2>%a@ would escape its scope@]\"\n      path p\n  | Tvar _, Tconstr (p, _, _) when unif && t3.level < Path.binding_time p ->\n    fprintf ppf \"@,@[The type constructor@;<1 2>%a@ would escape its scope@]\"\n      path p\n  | Tvar _, Tunivar _ | Tunivar _, Tvar _ ->\n    fprintf ppf \"@,The universal variable %a would escape its scope\" type_expr\n      (if is_Tunivar t3 then t3 else t4)\n  | Tvar _, _ | _, Tvar _ ->\n    let t, t' = if is_Tvar t3 then (t3, t4) else (t4, t3) in\n    if occur_in Env.empty t t' then\n      fprintf ppf \"@,@[<hov>The type variable %a occurs inside@ %a@]\" type_expr\n        t type_expr t'\n    else\n      fprintf ppf \"@,@[<hov>This instance of %a is ambiguous:@ %s@]\" type_expr\n        t' \"it would escape the scope of its equation\"\n  | Tfield (lab, _, _, _), _ when lab = dummy_method ->\n    fprintf ppf \"@,Self type cannot be unified with a closed object type\"\n  | _, Tfield (lab, _, _, _) when lab = dummy_method ->\n    fprintf ppf \"@,Self type cannot be unified with a closed object type\"\n  | Tfield (l, _, _, {desc = Tnil}), Tfield (l', _, _, {desc = Tnil})\n    when l = l' ->\n    fprintf ppf \"@,Types for method %s are incompatible\" l\n  | (Tnil | Tconstr _), Tfield (l, _, _, _) ->\n    fprintf ppf \"@,@[The first object type has no field %s@]\" l\n  | Tfield (l, _, _, _), (Tnil | Tconstr _) ->\n    fprintf ppf \"@,@[The second object type has no field %s@]\" l\n  | Tnil, Tconstr _ | Tconstr _, Tnil ->\n    fprintf ppf\n      \"@,@[The %s object type has an abstract row, it cannot be closed@]\"\n      (if t4.desc = Tnil then \"first\" else \"second\")\n  | Tvariant row1, Tvariant row2 -> (\n    let row1 = row_repr row1 and row2 = row_repr row2 in\n    match\n      (row1.row_fields, row1.row_closed, row2.row_fields, row2.row_closed)\n    with\n    | [], true, [], true ->\n      fprintf ppf \"@,These two variant types have no intersection\"\n    | [], true, (_ :: _ as fields), _ ->\n      fprintf ppf\n        \"@,@[The first variant type does not allow tag(s)@ @[<hov>%a@]@]\"\n        print_tags fields\n    | (_ :: _ as fields), _, [], true ->\n      fprintf ppf\n        \"@,@[The second variant type does not allow tag(s)@ @[<hov>%a@]@]\"\n        print_tags fields\n    | [(l1, _)], true, [(l2, _)], true when l1 = l2 ->\n      fprintf ppf \"@,Types for tag %s are incompatible\"\n        (!print_res_poly_identifier l1)\n    | _ -> ())\n  | _ -> ()\n\nlet warn_on_missing_def env ppf t =\n  match t.desc with\n  | Tconstr (p, _, _) -> (\n    try ignore (Env.find_type p env : Types.type_declaration)\n    with Not_found ->\n      fprintf ppf\n        \"@,\\\n         @[%a is abstract because no corresponding cmi file was found in \\\n         path.@]\"\n        path p)\n  | _ -> ()\n\nlet explanation unif mis ppf =\n  match mis with\n  | None -> ()\n  | Some (t3, t4) -> explanation unif t3 t4 ppf\n\nlet ident_same_name id1 id2 =\n  if Ident.equal id1 id2 && not (Ident.same id1 id2) then (\n    add_unique id1;\n    add_unique id2)\n\nlet rec path_same_name p1 p2 =\n  match (p1, p2) with\n  | Pident id1, Pident id2 -> ident_same_name id1 id2\n  | Pdot (p1, s1, _), Pdot (p2, s2, _) when s1 = s2 -> path_same_name p1 p2\n  | Papply (p1, p1'), Papply (p2, p2') ->\n    path_same_name p1 p2;\n    path_same_name p1' p2'\n  | _ -> ()\n\nlet type_same_name t1 t2 =\n  match ((repr t1).desc, (repr t2).desc) with\n  | Tconstr (p1, _, _), Tconstr (p2, _, _) ->\n    path_same_name (fst (best_type_path p1)) (fst (best_type_path p2))\n  | _ -> ()\n\nlet rec trace_same_names = function\n  | (t1, t1') :: (t2, t2') :: rem ->\n    type_same_name t1 t2;\n    type_same_name t1' t2';\n    trace_same_names rem\n  | _ -> ()\n\nlet unification_error env unif tr txt1 ppf txt2 =\n  reset ();\n  trace_same_names tr;\n  let tr = List.map (fun (t, t') -> (t, hide_variant_name t')) tr in\n  let mis = mismatch tr in\n  match tr with\n  | [] | _ :: [] -> assert false\n  | t1 :: t2 :: tr -> (\n    try\n      let tr = filter_trace (mis = None) tr in\n      let t1, t1' = may_prepare_expansion (tr = []) t1\n      and t2, t2' = may_prepare_expansion (tr = []) t2 in\n      let tr = List.map prepare_expansion tr in\n      fprintf ppf \"@[<v>@[%t@;<1 2>%a@ %t@;<1 2>%a@]%a%t@]\" txt1\n        (type_expansion t1) t1' txt2 (type_expansion t2) t2'\n        (trace false \"is not compatible with type\")\n        tr (explanation unif mis);\n      if env <> Env.empty then (\n        warn_on_missing_def env ppf t1;\n        warn_on_missing_def env ppf t2)\n    with exn -> raise exn)\n\nlet report_unification_error ppf env ?(unif = true) tr txt1 txt2 =\n  wrap_printing_env env (fun () -> unification_error env unif tr txt1 ppf txt2)\n\nlet super_type_expansion ~tag t ppf t' =\n  let tag = Format.String_tag tag in\n  if same_path t t' then (\n    Format.pp_open_stag ppf tag;\n    type_expr ppf t;\n    Format.pp_close_stag ppf ())\n  else\n    let t' = if proxy t == proxy t' then unalias t' else t' in\n    fprintf ppf \"@[<2>\";\n    Format.pp_open_stag ppf tag;\n    fprintf ppf \"%a\" type_expr t;\n    Format.pp_close_stag ppf ();\n    fprintf ppf \"@ @{<dim>(defined as@}@ \";\n    Format.pp_open_stag ppf tag;\n    fprintf ppf \"%a\" type_expr t';\n    Format.pp_close_stag ppf ();\n    fprintf ppf \"@{<dim>)@}\";\n    fprintf ppf \"@]\"\n\nlet super_trace ppf =\n  let rec super_trace first_report ppf = function\n    | (t1, t1') :: (t2, t2') :: rem ->\n      fprintf ppf \"@,@,@[<v 2>\";\n      if first_report then fprintf ppf \"The incompatible parts:@,\"\n      else fprintf ppf \"Further expanded:@,\";\n      fprintf ppf \"@[<hov>%a@ vs@ %a@]%a\"\n        (super_type_expansion ~tag:\"error\" t1)\n        t1'\n        (super_type_expansion ~tag:\"info\" t2)\n        t2' (super_trace false) rem;\n      fprintf ppf \"@]\"\n    | _ -> ()\n  in\n  super_trace true ppf\n\nlet super_unification_error unif tr txt1 ppf txt2 =\n  reset ();\n  trace_same_names tr;\n  let tr = List.map (fun (t, t') -> (t, hide_variant_name t')) tr in\n  let mis = mismatch tr in\n  match tr with\n  | [] | _ :: [] -> assert false\n  | t1 :: t2 :: tr -> (\n    try\n      let tr = filter_trace (mis = None) tr in\n      let t1, t1' = may_prepare_expansion (tr = []) t1\n      and t2, t2' = may_prepare_expansion (tr = []) t2 in\n      let tr = List.map prepare_expansion tr in\n      fprintf ppf \"@[<v 0>@[<hov 2>%t@ %a@]@,@[<hov 2>%t@ %a@]%a%t@]\" txt1\n        (super_type_expansion ~tag:\"error\" t1)\n        t1' txt2\n        (super_type_expansion ~tag:\"info\" t2)\n        t2' super_trace tr (explanation unif mis)\n    with exn -> raise exn)\n\nlet super_report_unification_error ppf env ?(unif = true) tr txt1 txt2 =\n  wrap_printing_env env (fun () ->\n      super_unification_error unif tr txt1 ppf txt2)\n\nlet trace fst keep_last txt ppf tr =\n  trace_same_names tr;\n  try\n    match tr with\n    | t1 :: t2 :: tr' ->\n      if fst then trace fst txt ppf (t1 :: t2 :: filter_trace keep_last tr')\n      else trace fst txt ppf (filter_trace keep_last tr)\n    | _ -> ()\n  with exn -> raise exn\n\nlet report_subtyping_error ppf env tr1 txt1 tr2 =\n  wrap_printing_env env (fun () ->\n      reset ();\n      let tr1 = List.map prepare_expansion tr1\n      and tr2 = List.map prepare_expansion tr2 in\n      fprintf ppf \"@[<v>%a\" (trace true (tr2 = []) txt1) tr1;\n      if tr2 = [] then fprintf ppf \"@]\"\n      else\n        let mis = mismatch tr2 in\n        fprintf ppf \"%a%t@]\"\n          (trace false (mis = None) \"is not compatible with type\")\n          tr2 (explanation true mis))\n\nlet report_ambiguous_type_error ppf env (tp0, tp0') tpl txt1 txt2 txt3 =\n  wrap_printing_env env (fun () ->\n      reset ();\n      List.iter\n        (fun (tp, tp') ->\n          path_same_name tp0 tp;\n          path_same_name tp0' tp')\n        tpl;\n      match tpl with\n      | [] -> assert false\n      | [(tp, tp')] ->\n        fprintf ppf \"@[%t@;<1 2>%a@ %t@;<1 2>%a@]\" txt1 (type_path_expansion tp)\n          tp' txt3 (type_path_expansion tp0) tp0'\n      | _ ->\n        fprintf ppf \"@[%t@;<1 2>@[<hv>%a@]@ %t@;<1 2>%a@]\" txt2 type_path_list\n          tpl txt3 (type_path_expansion tp0) tp0')\n"
  },
  {
    "path": "analysis/vendor/ml/printtyp.mli",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(* Printing functions *)\n\nopen Format\nopen Types\nopen Outcometree\n\nval print_res_poly_identifier : (string -> string) ref\nval longident : formatter -> Longident.t -> unit\nval ident : formatter -> Ident.t -> unit\nval tree_of_path : Path.t -> out_ident\nval path : formatter -> Path.t -> unit\nval string_of_path : Path.t -> string\nval raw_type_expr : formatter -> type_expr -> unit\nval string_of_label : Asttypes.arg_label -> string\n\nval wrap_printing_env : Env.t -> (unit -> 'a) -> 'a\n(* Call the function using the environment for type path shortening *)\n(* This affects all the printing functions below *)\n\nval reset : unit -> unit\nval mark_loops : type_expr -> unit\nval reset_and_mark_loops : type_expr -> unit\nval reset_and_mark_loops_list : type_expr list -> unit\nval type_expr : formatter -> type_expr -> unit\nval constructor_arguments : formatter -> constructor_arguments -> unit\nval tree_of_type_scheme : type_expr -> out_type\nval type_sch : formatter -> type_expr -> unit\nval type_scheme : formatter -> type_expr -> unit\n\n(* Maxence *)\nval reset_names : unit -> unit\nval type_scheme_max : ?b_reset_names:bool -> formatter -> type_expr -> unit\n\n(* End Maxence *)\nval tree_of_value_description : Ident.t -> value_description -> out_sig_item\nval value_description : Ident.t -> formatter -> value_description -> unit\nval tree_of_type_declaration :\n  Ident.t -> type_declaration -> rec_status -> out_sig_item\nval type_declaration : Ident.t -> formatter -> type_declaration -> unit\nval tree_of_extension_constructor :\n  Ident.t -> extension_constructor -> ext_status -> out_sig_item\nval extension_constructor :\n  Ident.t -> formatter -> extension_constructor -> unit\nval tree_of_module :\n  Ident.t -> ?ellipsis:bool -> module_type -> rec_status -> out_sig_item\nval modtype : formatter -> module_type -> unit\nval signature : formatter -> signature -> unit\nval tree_of_modtype_declaration : Ident.t -> modtype_declaration -> out_sig_item\nval tree_of_signature : Types.signature -> out_sig_item list\nval tree_of_typexp : bool -> type_expr -> out_type\nval modtype_declaration : Ident.t -> formatter -> modtype_declaration -> unit\nval class_type : formatter -> class_type -> unit\nval tree_of_class_declaration :\n  Ident.t -> class_declaration -> rec_status -> out_sig_item\nval class_declaration : Ident.t -> formatter -> class_declaration -> unit\nval tree_of_cltype_declaration :\n  Ident.t -> class_type_declaration -> rec_status -> out_sig_item\nval cltype_declaration : Ident.t -> formatter -> class_type_declaration -> unit\nval type_expansion : type_expr -> Format.formatter -> type_expr -> unit\nval prepare_expansion : type_expr * type_expr -> type_expr * type_expr\nval trace :\n  bool -> bool -> string -> formatter -> (type_expr * type_expr) list -> unit\nval report_unification_error :\n  formatter ->\n  Env.t ->\n  ?unif:bool ->\n  (type_expr * type_expr) list ->\n  (formatter -> unit) ->\n  (formatter -> unit) ->\n  unit\n\nval super_report_unification_error :\n  formatter ->\n  Env.t ->\n  ?unif:bool ->\n  (type_expr * type_expr) list ->\n  (formatter -> unit) ->\n  (formatter -> unit) ->\n  unit\n\nval report_subtyping_error :\n  formatter ->\n  Env.t ->\n  (type_expr * type_expr) list ->\n  string ->\n  (type_expr * type_expr) list ->\n  unit\nval report_ambiguous_type_error :\n  formatter ->\n  Env.t ->\n  Path.t * Path.t ->\n  (Path.t * Path.t) list ->\n  (formatter -> unit) ->\n  (formatter -> unit) ->\n  (formatter -> unit) ->\n  unit\n\n(* for toploop *)\nval print_items :\n  (Env.t -> signature_item -> 'a option) ->\n  Env.t ->\n  signature_item list ->\n  (out_sig_item * 'a option) list\n"
  },
  {
    "path": "analysis/vendor/ml/printtyped.ml",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*                   Fabrice Le Fessant, INRIA Saclay                     *)\n(*                                                                        *)\n(*   Copyright 1999 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\nopen Asttypes\nopen Format\nopen Lexing\nopen Location\nopen Typedtree\n\nlet fmt_position f l =\n  if l.pos_lnum = -1 then fprintf f \"%s[%d]\" l.pos_fname l.pos_cnum\n  else\n    fprintf f \"%s[%d,%d+%d]\" l.pos_fname l.pos_lnum l.pos_bol\n      (l.pos_cnum - l.pos_bol)\n\nlet fmt_location f loc =\n  if !Clflags.dump_location then (\n    fprintf f \"(%a..%a)\" fmt_position loc.loc_start fmt_position loc.loc_end;\n    if loc.loc_ghost then fprintf f \" ghost\")\n\nlet rec fmt_longident_aux f x =\n  match x with\n  | Longident.Lident s -> fprintf f \"%s\" s\n  | Longident.Ldot (y, s) -> fprintf f \"%a.%s\" fmt_longident_aux y s\n  | Longident.Lapply (y, z) ->\n    fprintf f \"%a(%a)\" fmt_longident_aux y fmt_longident_aux z\n\nlet fmt_longident f x = fprintf f \"\\\"%a\\\"\" fmt_longident_aux x.txt\n\nlet fmt_ident = Ident.print\n\nlet rec fmt_path_aux f x =\n  match x with\n  | Path.Pident s -> fprintf f \"%a\" fmt_ident s\n  | Path.Pdot (y, s, _pos) -> fprintf f \"%a.%s\" fmt_path_aux y s\n  | Path.Papply (y, z) -> fprintf f \"%a(%a)\" fmt_path_aux y fmt_path_aux z\n\nlet fmt_path f x = fprintf f \"\\\"%a\\\"\" fmt_path_aux x\n\nlet fmt_constant f x =\n  match x with\n  | Const_int i -> fprintf f \"Const_int %d\" i\n  | Const_char c -> fprintf f \"Const_char %02x\" c\n  | Const_string (s, None) -> fprintf f \"Const_string(%S,None)\" s\n  | Const_string (s, Some delim) ->\n    fprintf f \"Const_string (%S,Some %S)\" s delim\n  | Const_float s -> fprintf f \"Const_float %s\" s\n  | Const_int32 i -> fprintf f \"Const_int32 %ld\" i\n  | Const_int64 i -> fprintf f \"Const_int64 %Ld\" i\n  | Const_bigint (sign, i) ->\n    fprintf f \"Const_bigint %s\" (Bigint_utils.to_string sign i)\n\nlet fmt_mutable_flag f x =\n  match x with\n  | Immutable -> fprintf f \"Immutable\"\n  | Mutable -> fprintf f \"Mutable\"\n\nlet fmt_virtual_flag f x =\n  match x with\n  | Virtual -> fprintf f \"Virtual\"\n  | Concrete -> fprintf f \"Concrete\"\n\nlet fmt_override_flag f x =\n  match x with\n  | Override -> fprintf f \"Override\"\n  | Fresh -> fprintf f \"Fresh\"\n\nlet fmt_closed_flag f x =\n  match x with\n  | Closed -> fprintf f \"Closed\"\n  | Open -> fprintf f \"Open\"\n\nlet fmt_rec_flag f x =\n  match x with\n  | Nonrecursive -> fprintf f \"Nonrec\"\n  | Recursive -> fprintf f \"Rec\"\n\nlet fmt_direction_flag f x =\n  match x with\n  | Upto -> fprintf f \"Up\"\n  | Downto -> fprintf f \"Down\"\n\nlet fmt_private_flag f x =\n  match x with\n  | Public -> fprintf f \"Public\"\n  | Private -> fprintf f \"Private\"\n\nlet line i f s (*...*) =\n  fprintf f \"%s\" (String.make (2 * i) ' ');\n  fprintf f s (*...*)\n\nlet list i f ppf l =\n  match l with\n  | [] -> line i ppf \"[]\\n\"\n  | _ :: _ ->\n    line i ppf \"[\\n\";\n    List.iter (f (i + 1) ppf) l;\n    line i ppf \"]\\n\"\n\nlet array i f ppf a =\n  if Array.length a = 0 then line i ppf \"[]\\n\"\n  else (\n    line i ppf \"[\\n\";\n    Array.iter (f (i + 1) ppf) a;\n    line i ppf \"]\\n\")\n\nlet option i f ppf x =\n  match x with\n  | None -> line i ppf \"None\\n\"\n  | Some x ->\n    line i ppf \"Some\\n\";\n    f (i + 1) ppf x\n\nlet longident i ppf li = line i ppf \"%a\\n\" fmt_longident li\nlet string i ppf s = line i ppf \"\\\"%s\\\"\\n\" s\nlet arg_label i ppf = function\n  | Nolabel -> line i ppf \"Nolabel\\n\"\n  | Optional s -> line i ppf \"Optional \\\"%s\\\"\\n\" s\n  | Labelled s -> line i ppf \"Labelled \\\"%s\\\"\\n\" s\n\nlet record_representation i ppf =\n  let open Types in\n  function\n  | Record_regular -> line i ppf \"Record_regular\\n\"\n  | Record_float_unused -> assert false\n  | Record_optional_labels lbls ->\n    line i ppf \"Record_optional_labels %s\\n\" (lbls |> String.concat \", \")\n  | Record_unboxed b -> line i ppf \"Record_unboxed %b\\n\" b\n  | Record_inlined {tag = i} -> line i ppf \"Record_inlined %d\\n\" i\n  | Record_extension -> line i ppf \"Record_extension\\n\"\n\nlet attributes i ppf l =\n  let i = i + 1 in\n  List.iter\n    (fun (s, arg) ->\n      line i ppf \"attribute \\\"%s\\\"\\n\" s.txt;\n      Printast.payload (i + 1) ppf arg)\n    l\n\nlet rec core_type i ppf x =\n  line i ppf \"core_type %a\\n\" fmt_location x.ctyp_loc;\n  attributes i ppf x.ctyp_attributes;\n  let i = i + 1 in\n  match x.ctyp_desc with\n  | Ttyp_any -> line i ppf \"Ttyp_any\\n\"\n  | Ttyp_var s -> line i ppf \"Ttyp_var %s\\n\" s\n  | Ttyp_arrow (l, ct1, ct2) ->\n    line i ppf \"Ttyp_arrow\\n\";\n    arg_label i ppf l;\n    core_type i ppf ct1;\n    core_type i ppf ct2\n  | Ttyp_tuple l ->\n    line i ppf \"Ttyp_tuple\\n\";\n    list i core_type ppf l\n  | Ttyp_constr (li, _, l) ->\n    line i ppf \"Ttyp_constr %a\\n\" fmt_path li;\n    list i core_type ppf l\n  | Ttyp_variant (l, closed, low) ->\n    line i ppf \"Ttyp_variant closed=%a\\n\" fmt_closed_flag closed;\n    list i label_x_bool_x_core_type_list ppf l;\n    option i (fun i -> list i string) ppf low\n  | Ttyp_object (l, c) ->\n    line i ppf \"Ttyp_object %a\\n\" fmt_closed_flag c;\n    let i = i + 1 in\n    List.iter\n      (function\n        | OTtag (s, attrs, t) ->\n          line i ppf \"method %s\\n\" s.txt;\n          attributes i ppf attrs;\n          core_type (i + 1) ppf t\n        | OTinherit ct ->\n          line i ppf \"OTinherit\\n\";\n          core_type (i + 1) ppf ct)\n      l\n  | Ttyp_class (li, _, l) ->\n    line i ppf \"Ttyp_class %a\\n\" fmt_path li;\n    list i core_type ppf l\n  | Ttyp_alias (ct, s) ->\n    line i ppf \"Ttyp_alias \\\"%s\\\"\\n\" s;\n    core_type i ppf ct\n  | Ttyp_poly (sl, ct) ->\n    line i ppf \"Ttyp_poly%a\\n\"\n      (fun ppf -> List.iter (fun x -> fprintf ppf \" '%s\" x))\n      sl;\n    core_type i ppf ct\n  | Ttyp_package {pack_path = s; pack_fields = l} ->\n    line i ppf \"Ttyp_package %a\\n\" fmt_path s;\n    list i package_with ppf l\n\nand package_with i ppf (s, t) =\n  line i ppf \"with type %a\\n\" fmt_longident s;\n  core_type i ppf t\n\nand pattern i ppf x =\n  line i ppf \"pattern %a\\n\" fmt_location x.pat_loc;\n  attributes i ppf x.pat_attributes;\n  let i = i + 1 in\n  match x.pat_extra with\n  | (Tpat_unpack, _, attrs) :: rem ->\n    line i ppf \"Tpat_unpack\\n\";\n    attributes i ppf attrs;\n    pattern i ppf {x with pat_extra = rem}\n  | (Tpat_constraint cty, _, attrs) :: rem ->\n    line i ppf \"Tpat_constraint\\n\";\n    attributes i ppf attrs;\n    core_type i ppf cty;\n    pattern i ppf {x with pat_extra = rem}\n  | (Tpat_type (id, _), _, attrs) :: rem ->\n    line i ppf \"Tpat_type %a\\n\" fmt_path id;\n    attributes i ppf attrs;\n    pattern i ppf {x with pat_extra = rem}\n  | (Tpat_open (id, _, _), _, attrs) :: rem ->\n    line i ppf \"Tpat_open \\\"%a\\\"\\n\" fmt_path id;\n    attributes i ppf attrs;\n    pattern i ppf {x with pat_extra = rem}\n  | [] -> (\n    match x.pat_desc with\n    | Tpat_any -> line i ppf \"Tpat_any\\n\"\n    | Tpat_var (s, _) -> line i ppf \"Tpat_var \\\"%a\\\"\\n\" fmt_ident s\n    | Tpat_alias (p, s, _) ->\n      line i ppf \"Tpat_alias \\\"%a\\\"\\n\" fmt_ident s;\n      pattern i ppf p\n    | Tpat_constant c -> line i ppf \"Tpat_constant %a\\n\" fmt_constant c\n    | Tpat_tuple l ->\n      line i ppf \"Tpat_tuple\\n\";\n      list i pattern ppf l\n    | Tpat_construct (li, _, po) ->\n      line i ppf \"Tpat_construct %a\\n\" fmt_longident li;\n      list i pattern ppf po\n    | Tpat_variant (l, po, _) ->\n      line i ppf \"Tpat_variant \\\"%s\\\"\\n\" l;\n      option i pattern ppf po\n    | Tpat_record (l, _c) ->\n      line i ppf \"Tpat_record\\n\";\n      list i longident_x_pattern ppf l\n    | Tpat_array l ->\n      line i ppf \"Tpat_array\\n\";\n      list i pattern ppf l\n    | Tpat_or (p1, p2, _) ->\n      line i ppf \"Tpat_or\\n\";\n      pattern i ppf p1;\n      pattern i ppf p2\n    | Tpat_lazy p ->\n      line i ppf \"Tpat_lazy\\n\";\n      pattern i ppf p)\n\nand expression_extra i ppf x attrs =\n  match x with\n  | Texp_constraint ct ->\n    line i ppf \"Texp_constraint\\n\";\n    attributes i ppf attrs;\n    core_type i ppf ct\n  | Texp_coerce (cto1, cto2) ->\n    line i ppf \"Texp_coerce\\n\";\n    attributes i ppf attrs;\n    option i core_type ppf cto1;\n    core_type i ppf cto2\n  | Texp_open (ovf, m, _, _) ->\n    line i ppf \"Texp_open %a \\\"%a\\\"\\n\" fmt_override_flag ovf fmt_path m;\n    attributes i ppf attrs\n  | Texp_poly cto ->\n    line i ppf \"Texp_poly\\n\";\n    attributes i ppf attrs;\n    option i core_type ppf cto\n  | Texp_newtype s ->\n    line i ppf \"Texp_newtype \\\"%s\\\"\\n\" s;\n    attributes i ppf attrs\n\nand expression i ppf x =\n  line i ppf \"expression %a\\n\" fmt_location x.exp_loc;\n  attributes i ppf x.exp_attributes;\n  let i =\n    List.fold_left\n      (fun i (extra, _, attrs) ->\n        expression_extra i ppf extra attrs;\n        i + 1)\n      (i + 1) x.exp_extra\n  in\n  match x.exp_desc with\n  | Texp_ident (li, _, _) -> line i ppf \"Texp_ident %a\\n\" fmt_path li\n  | Texp_instvar () -> assert false\n  | Texp_constant c -> line i ppf \"Texp_constant %a\\n\" fmt_constant c\n  | Texp_let (rf, l, e) ->\n    line i ppf \"Texp_let %a\\n\" fmt_rec_flag rf;\n    list i value_binding ppf l;\n    expression i ppf e\n  | Texp_function {arg_label = p; param; cases; partial = _} ->\n    line i ppf \"Texp_function\\n\";\n    line i ppf \"%a\" Ident.print param;\n    arg_label i ppf p;\n    list i case ppf cases\n  | Texp_apply (e, l) ->\n    line i ppf \"Texp_apply\\n\";\n    expression i ppf e;\n    list i label_x_expression ppf l\n  | Texp_match (e, l1, l2, _partial) ->\n    line i ppf \"Texp_match\\n\";\n    expression i ppf e;\n    list i case ppf l1;\n    list i case ppf l2\n  | Texp_try (e, l) ->\n    line i ppf \"Texp_try\\n\";\n    expression i ppf e;\n    list i case ppf l\n  | Texp_tuple l ->\n    line i ppf \"Texp_tuple\\n\";\n    list i expression ppf l\n  | Texp_construct (li, _, eo) ->\n    line i ppf \"Texp_construct %a\\n\" fmt_longident li;\n    list i expression ppf eo\n  | Texp_variant (l, eo) ->\n    line i ppf \"Texp_variant \\\"%s\\\"\\n\" l;\n    option i expression ppf eo\n  | Texp_record {fields; representation; extended_expression} ->\n    line i ppf \"Texp_record\\n\";\n    let i = i + 1 in\n    line i ppf \"fields =\\n\";\n    array (i + 1) record_field ppf fields;\n    line i ppf \"representation =\\n\";\n    record_representation (i + 1) ppf representation;\n    line i ppf \"extended_expression =\\n\";\n    option (i + 1) expression ppf extended_expression\n  | Texp_field (e, li, _) ->\n    line i ppf \"Texp_field\\n\";\n    expression i ppf e;\n    longident i ppf li\n  | Texp_setfield (e1, li, _, e2) ->\n    line i ppf \"Texp_setfield\\n\";\n    expression i ppf e1;\n    longident i ppf li;\n    expression i ppf e2\n  | Texp_array l ->\n    line i ppf \"Texp_array\\n\";\n    list i expression ppf l\n  | Texp_ifthenelse (e1, e2, eo) ->\n    line i ppf \"Texp_ifthenelse\\n\";\n    expression i ppf e1;\n    expression i ppf e2;\n    option i expression ppf eo\n  | Texp_sequence (e1, e2) ->\n    line i ppf \"Texp_sequence\\n\";\n    expression i ppf e1;\n    expression i ppf e2\n  | Texp_while (e1, e2) ->\n    line i ppf \"Texp_while\\n\";\n    expression i ppf e1;\n    expression i ppf e2\n  | Texp_for (s, _, e1, e2, df, e3) ->\n    line i ppf \"Texp_for \\\"%a\\\" %a\\n\" fmt_ident s fmt_direction_flag df;\n    expression i ppf e1;\n    expression i ppf e2;\n    expression i ppf e3\n  | Texp_send (e, Tmeth_name s, eo) ->\n    line i ppf \"Texp_send \\\"%s\\\"\\n\" s;\n    expression i ppf e;\n    option i expression ppf eo\n  | Texp_new _ | Texp_setinstvar _ | Texp_override _ -> ()\n  | Texp_letmodule (s, _, me, e) ->\n    line i ppf \"Texp_letmodule \\\"%a\\\"\\n\" fmt_ident s;\n    module_expr i ppf me;\n    expression i ppf e\n  | Texp_letexception (cd, e) ->\n    line i ppf \"Texp_letexception\\n\";\n    extension_constructor i ppf cd;\n    expression i ppf e\n  | Texp_assert e ->\n    line i ppf \"Texp_assert\";\n    expression i ppf e\n  | Texp_lazy e ->\n    line i ppf \"Texp_lazy\";\n    expression i ppf e\n  | Texp_object () -> ()\n  | Texp_pack me ->\n    line i ppf \"Texp_pack\";\n    module_expr i ppf me\n  | Texp_unreachable -> line i ppf \"Texp_unreachable\"\n  | Texp_extension_constructor (li, _) ->\n    line i ppf \"Texp_extension_constructor %a\" fmt_longident li\n\nand value_description i ppf x =\n  line i ppf \"value_description %a %a\\n\" fmt_ident x.val_id fmt_location\n    x.val_loc;\n  attributes i ppf x.val_attributes;\n  core_type (i + 1) ppf x.val_desc;\n  list (i + 1) string ppf x.val_prim\n\nand type_parameter i ppf (x, _variance) = core_type i ppf x\n\nand type_declaration i ppf x =\n  line i ppf \"type_declaration %a %a\\n\" fmt_ident x.typ_id fmt_location\n    x.typ_loc;\n  attributes i ppf x.typ_attributes;\n  let i = i + 1 in\n  line i ppf \"ptype_params =\\n\";\n  list (i + 1) type_parameter ppf x.typ_params;\n  line i ppf \"ptype_cstrs =\\n\";\n  list (i + 1) core_type_x_core_type_x_location ppf x.typ_cstrs;\n  line i ppf \"ptype_kind =\\n\";\n  type_kind (i + 1) ppf x.typ_kind;\n  line i ppf \"ptype_private = %a\\n\" fmt_private_flag x.typ_private;\n  line i ppf \"ptype_manifest =\\n\";\n  option (i + 1) core_type ppf x.typ_manifest\n\nand type_kind i ppf x =\n  match x with\n  | Ttype_abstract -> line i ppf \"Ttype_abstract\\n\"\n  | Ttype_variant l ->\n    line i ppf \"Ttype_variant\\n\";\n    list (i + 1) constructor_decl ppf l\n  | Ttype_record l ->\n    line i ppf \"Ttype_record\\n\";\n    list (i + 1) label_decl ppf l\n  | Ttype_open -> line i ppf \"Ttype_open\\n\"\n\nand type_extension i ppf x =\n  line i ppf \"type_extension\\n\";\n  attributes i ppf x.tyext_attributes;\n  let i = i + 1 in\n  line i ppf \"ptyext_path = %a\\n\" fmt_path x.tyext_path;\n  line i ppf \"ptyext_params =\\n\";\n  list (i + 1) type_parameter ppf x.tyext_params;\n  line i ppf \"ptyext_constructors =\\n\";\n  list (i + 1) extension_constructor ppf x.tyext_constructors;\n  line i ppf \"ptyext_private = %a\\n\" fmt_private_flag x.tyext_private\n\nand extension_constructor i ppf x =\n  line i ppf \"extension_constructor %a\\n\" fmt_location x.ext_loc;\n  attributes i ppf x.ext_attributes;\n  let i = i + 1 in\n  line i ppf \"pext_name = \\\"%a\\\"\\n\" fmt_ident x.ext_id;\n  line i ppf \"pext_kind =\\n\";\n  extension_constructor_kind (i + 1) ppf x.ext_kind\n\nand extension_constructor_kind i ppf x =\n  match x with\n  | Text_decl (a, r) ->\n    line i ppf \"Text_decl\\n\";\n    constructor_arguments (i + 1) ppf a;\n    option (i + 1) core_type ppf r\n  | Text_rebind (p, _) ->\n    line i ppf \"Text_rebind\\n\";\n    line (i + 1) ppf \"%a\\n\" fmt_path p\n\nand class_type i ppf x =\n  line i ppf \"class_type %a\\n\" fmt_location x.cltyp_loc;\n  attributes i ppf x.cltyp_attributes;\n  let i = i + 1 in\n  match x.cltyp_desc with\n  | Tcty_constr (li, _, l) ->\n    line i ppf \"Tcty_constr %a\\n\" fmt_path li;\n    list i core_type ppf l\n  | Tcty_signature cs ->\n    line i ppf \"Tcty_signature\\n\";\n    class_signature i ppf cs\n  | Tcty_arrow (l, co, cl) ->\n    line i ppf \"Tcty_arrow\\n\";\n    arg_label i ppf l;\n    core_type i ppf co;\n    class_type i ppf cl\n  | Tcty_open (ovf, m, _, _, e) ->\n    line i ppf \"Tcty_open %a \\\"%a\\\"\\n\" fmt_override_flag ovf fmt_path m;\n    class_type i ppf e\n\nand class_signature i ppf {csig_self = ct; csig_fields = l} =\n  line i ppf \"class_signature\\n\";\n  core_type (i + 1) ppf ct;\n  list (i + 1) class_type_field ppf l\n\nand class_type_field i ppf x =\n  line i ppf \"class_type_field %a\\n\" fmt_location x.ctf_loc;\n  let i = i + 1 in\n  attributes i ppf x.ctf_attributes;\n  match x.ctf_desc with\n  | Tctf_inherit ct ->\n    line i ppf \"Tctf_inherit\\n\";\n    class_type i ppf ct\n  | Tctf_val (s, mf, vf, ct) ->\n    line i ppf \"Tctf_val \\\"%s\\\" %a %a\\n\" s fmt_mutable_flag mf fmt_virtual_flag\n      vf;\n    core_type (i + 1) ppf ct\n  | Tctf_method (s, pf, vf, ct) ->\n    line i ppf \"Tctf_method \\\"%s\\\" %a %a\\n\" s fmt_private_flag pf\n      fmt_virtual_flag vf;\n    core_type (i + 1) ppf ct\n  | Tctf_constraint (ct1, ct2) ->\n    line i ppf \"Tctf_constraint\\n\";\n    core_type (i + 1) ppf ct1;\n    core_type (i + 1) ppf ct2\n  | Tctf_attribute (s, arg) ->\n    line i ppf \"Tctf_attribute \\\"%s\\\"\\n\" s.txt;\n    Printast.payload i ppf arg\n\nand class_type_declaration i ppf x =\n  line i ppf \"class_type_declaration %a\\n\" fmt_location x.ci_loc;\n  let i = i + 1 in\n  line i ppf \"pci_virt = %a\\n\" fmt_virtual_flag x.ci_virt;\n  line i ppf \"pci_params =\\n\";\n  list (i + 1) type_parameter ppf x.ci_params;\n  line i ppf \"pci_name = \\\"%s\\\"\\n\" x.ci_id_name.txt;\n  line i ppf \"pci_expr =\\n\";\n  class_type (i + 1) ppf x.ci_expr\n\nand module_type i ppf x =\n  line i ppf \"module_type %a\\n\" fmt_location x.mty_loc;\n  attributes i ppf x.mty_attributes;\n  let i = i + 1 in\n  match x.mty_desc with\n  | Tmty_ident (li, _) -> line i ppf \"Tmty_ident %a\\n\" fmt_path li\n  | Tmty_alias (li, _) -> line i ppf \"Tmty_alias %a\\n\" fmt_path li\n  | Tmty_signature s ->\n    line i ppf \"Tmty_signature\\n\";\n    signature i ppf s\n  | Tmty_functor (s, _, mt1, mt2) ->\n    line i ppf \"Tmty_functor \\\"%a\\\"\\n\" fmt_ident s;\n    Misc.may (module_type i ppf) mt1;\n    module_type i ppf mt2\n  | Tmty_with (mt, l) ->\n    line i ppf \"Tmty_with\\n\";\n    module_type i ppf mt;\n    list i longident_x_with_constraint ppf l\n  | Tmty_typeof m ->\n    line i ppf \"Tmty_typeof\\n\";\n    module_expr i ppf m\n\nand signature i ppf x = list i signature_item ppf x.sig_items\n\nand signature_item i ppf x =\n  line i ppf \"signature_item %a\\n\" fmt_location x.sig_loc;\n  let i = i + 1 in\n  match x.sig_desc with\n  | Tsig_value vd ->\n    line i ppf \"Tsig_value\\n\";\n    value_description i ppf vd\n  | Tsig_type (rf, l) ->\n    line i ppf \"Tsig_type %a\\n\" fmt_rec_flag rf;\n    list i type_declaration ppf l\n  | Tsig_typext e ->\n    line i ppf \"Tsig_typext\\n\";\n    type_extension i ppf e\n  | Tsig_exception ext ->\n    line i ppf \"Tsig_exception\\n\";\n    extension_constructor i ppf ext\n  | Tsig_module md ->\n    line i ppf \"Tsig_module \\\"%a\\\"\\n\" fmt_ident md.md_id;\n    attributes i ppf md.md_attributes;\n    module_type i ppf md.md_type\n  | Tsig_recmodule decls ->\n    line i ppf \"Tsig_recmodule\\n\";\n    list i module_declaration ppf decls\n  | Tsig_modtype x ->\n    line i ppf \"Tsig_modtype \\\"%a\\\"\\n\" fmt_ident x.mtd_id;\n    attributes i ppf x.mtd_attributes;\n    modtype_declaration i ppf x.mtd_type\n  | Tsig_open od ->\n    line i ppf \"Tsig_open %a %a\\n\" fmt_override_flag od.open_override fmt_path\n      od.open_path;\n    attributes i ppf od.open_attributes\n  | Tsig_include incl ->\n    line i ppf \"Tsig_include\\n\";\n    attributes i ppf incl.incl_attributes;\n    module_type i ppf incl.incl_mod\n  | Tsig_class () -> ()\n  | Tsig_class_type l ->\n    line i ppf \"Tsig_class_type\\n\";\n    list i class_type_declaration ppf l\n  | Tsig_attribute (s, arg) ->\n    line i ppf \"Tsig_attribute \\\"%s\\\"\\n\" s.txt;\n    Printast.payload i ppf arg\n\nand module_declaration i ppf md =\n  line i ppf \"%a\" fmt_ident md.md_id;\n  attributes i ppf md.md_attributes;\n  module_type (i + 1) ppf md.md_type\n\nand module_binding i ppf x =\n  line i ppf \"%a\\n\" fmt_ident x.mb_id;\n  attributes i ppf x.mb_attributes;\n  module_expr (i + 1) ppf x.mb_expr\n\nand modtype_declaration i ppf = function\n  | None -> line i ppf \"#abstract\"\n  | Some mt -> module_type (i + 1) ppf mt\n\nand with_constraint i ppf x =\n  match x with\n  | Twith_type td ->\n    line i ppf \"Twith_type\\n\";\n    type_declaration (i + 1) ppf td\n  | Twith_typesubst td ->\n    line i ppf \"Twith_typesubst\\n\";\n    type_declaration (i + 1) ppf td\n  | Twith_module (li, _) -> line i ppf \"Twith_module %a\\n\" fmt_path li\n  | Twith_modsubst (li, _) -> line i ppf \"Twith_modsubst %a\\n\" fmt_path li\n\nand module_expr i ppf x =\n  line i ppf \"module_expr %a\\n\" fmt_location x.mod_loc;\n  attributes i ppf x.mod_attributes;\n  let i = i + 1 in\n  match x.mod_desc with\n  | Tmod_ident (li, _) -> line i ppf \"Tmod_ident %a\\n\" fmt_path li\n  | Tmod_structure s ->\n    line i ppf \"Tmod_structure\\n\";\n    structure i ppf s\n  | Tmod_functor (s, _, mt, me) ->\n    line i ppf \"Tmod_functor \\\"%a\\\"\\n\" fmt_ident s;\n    Misc.may (module_type i ppf) mt;\n    module_expr i ppf me\n  | Tmod_apply (me1, me2, _) ->\n    line i ppf \"Tmod_apply\\n\";\n    module_expr i ppf me1;\n    module_expr i ppf me2\n  | Tmod_constraint (me, _, Tmodtype_explicit mt, _) ->\n    line i ppf \"Tmod_constraint\\n\";\n    module_expr i ppf me;\n    module_type i ppf mt\n  | Tmod_constraint (me, _, Tmodtype_implicit, _) -> module_expr i ppf me\n  | Tmod_unpack (e, _) ->\n    line i ppf \"Tmod_unpack\\n\";\n    expression i ppf e\n\nand structure i ppf x = list i structure_item ppf x.str_items\n\nand structure_item i ppf x =\n  line i ppf \"structure_item %a\\n\" fmt_location x.str_loc;\n  let i = i + 1 in\n  match x.str_desc with\n  | Tstr_eval (e, attrs) ->\n    line i ppf \"Tstr_eval\\n\";\n    attributes i ppf attrs;\n    expression i ppf e\n  | Tstr_value (rf, l) ->\n    line i ppf \"Tstr_value %a\\n\" fmt_rec_flag rf;\n    list i value_binding ppf l\n  | Tstr_primitive vd ->\n    line i ppf \"Tstr_primitive\\n\";\n    value_description i ppf vd\n  | Tstr_type (rf, l) ->\n    line i ppf \"Tstr_type %a\\n\" fmt_rec_flag rf;\n    list i type_declaration ppf l\n  | Tstr_typext te ->\n    line i ppf \"Tstr_typext\\n\";\n    type_extension i ppf te\n  | Tstr_exception ext ->\n    line i ppf \"Tstr_exception\\n\";\n    extension_constructor i ppf ext\n  | Tstr_module x ->\n    line i ppf \"Tstr_module\\n\";\n    module_binding i ppf x\n  | Tstr_recmodule bindings ->\n    line i ppf \"Tstr_recmodule\\n\";\n    list i module_binding ppf bindings\n  | Tstr_modtype x ->\n    line i ppf \"Tstr_modtype \\\"%a\\\"\\n\" fmt_ident x.mtd_id;\n    attributes i ppf x.mtd_attributes;\n    modtype_declaration i ppf x.mtd_type\n  | Tstr_open od ->\n    line i ppf \"Tstr_open %a %a\\n\" fmt_override_flag od.open_override fmt_path\n      od.open_path;\n    attributes i ppf od.open_attributes\n  | Tstr_class () -> ()\n  | Tstr_class_type l ->\n    line i ppf \"Tstr_class_type\\n\";\n    list i class_type_declaration ppf (List.map (fun (_, _, cl) -> cl) l)\n  | Tstr_include incl ->\n    line i ppf \"Tstr_include\";\n    attributes i ppf incl.incl_attributes;\n    module_expr i ppf incl.incl_mod\n  | Tstr_attribute (s, arg) ->\n    line i ppf \"Tstr_attribute \\\"%s\\\"\\n\" s.txt;\n    Printast.payload i ppf arg\n\nand longident_x_with_constraint i ppf (li, _, wc) =\n  line i ppf \"%a\\n\" fmt_path li;\n  with_constraint (i + 1) ppf wc\n\nand core_type_x_core_type_x_location i ppf (ct1, ct2, l) =\n  line i ppf \"<constraint> %a\\n\" fmt_location l;\n  core_type (i + 1) ppf ct1;\n  core_type (i + 1) ppf ct2\n\nand constructor_decl i ppf\n    {cd_id; cd_name = _; cd_args; cd_res; cd_loc; cd_attributes} =\n  line i ppf \"%a\\n\" fmt_location cd_loc;\n  line (i + 1) ppf \"%a\\n\" fmt_ident cd_id;\n  attributes i ppf cd_attributes;\n  constructor_arguments (i + 1) ppf cd_args;\n  option (i + 1) core_type ppf cd_res\n\nand constructor_arguments i ppf = function\n  | Cstr_tuple l -> list i core_type ppf l\n  | Cstr_record l -> list i label_decl ppf l\n\nand label_decl i ppf\n    {ld_id; ld_name = _; ld_mutable; ld_type; ld_loc; ld_attributes} =\n  line i ppf \"%a\\n\" fmt_location ld_loc;\n  attributes i ppf ld_attributes;\n  line (i + 1) ppf \"%a\\n\" fmt_mutable_flag ld_mutable;\n  line (i + 1) ppf \"%a\" fmt_ident ld_id;\n  core_type (i + 1) ppf ld_type\n\nand longident_x_pattern i ppf (li, _, p) =\n  line i ppf \"%a\\n\" fmt_longident li;\n  pattern (i + 1) ppf p\n\nand case i ppf {c_lhs; c_guard; c_rhs} =\n  line i ppf \"<case>\\n\";\n  pattern (i + 1) ppf c_lhs;\n  (match c_guard with\n  | None -> ()\n  | Some g ->\n    line (i + 1) ppf \"<when>\\n\";\n    expression (i + 2) ppf g);\n  expression (i + 1) ppf c_rhs\n\nand value_binding i ppf x =\n  line i ppf \"<def>\\n\";\n  attributes (i + 1) ppf x.vb_attributes;\n  pattern (i + 1) ppf x.vb_pat;\n  expression (i + 1) ppf x.vb_expr\n\nand record_field i ppf = function\n  | _, Overridden (li, e) ->\n    line i ppf \"%a\\n\" fmt_longident li;\n    expression (i + 1) ppf e\n  | _, Kept _ -> line i ppf \"<kept>\"\n\nand label_x_expression i ppf (l, e) =\n  line i ppf \"<arg>\\n\";\n  arg_label (i + 1) ppf l;\n  match e with\n  | None -> ()\n  | Some e -> expression (i + 1) ppf e\n\nand label_x_bool_x_core_type_list i ppf x =\n  match x with\n  | Ttag (l, attrs, b, ctl) ->\n    line i ppf \"Ttag \\\"%s\\\" %s\\n\" l.txt (string_of_bool b);\n    attributes (i + 1) ppf attrs;\n    list (i + 1) core_type ppf ctl\n  | Tinherit ct ->\n    line i ppf \"Tinherit\\n\";\n    core_type (i + 1) ppf ct\n\nlet interface ppf x = list 0 signature_item ppf x.sig_items\n\nlet implementation ppf x = list 0 structure_item ppf x.str_items\n\nlet implementation_with_coercion ppf (x, _) = implementation ppf x\n"
  },
  {
    "path": "analysis/vendor/ml/printtyped.mli",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*              Damien Doligez, projet Para, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1999 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\nopen Typedtree\nopen Format\n\nval interface : formatter -> signature -> unit\nval implementation : formatter -> structure -> unit\n\nval implementation_with_coercion :\n  formatter -> structure * module_coercion -> unit\n"
  },
  {
    "path": "analysis/vendor/ml/rec_check.ml",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\ntype error = Illegal_letrec_expr\n\nexception Error of Location.t * error\n\nmodule Rec_context = struct\n  type access =\n    | Dereferenced\n        (** [Dereferenced] indicates that the value (not just the address) of a\n            variable is accessed *)\n    | Guarded\n        (** [Guarded] indicates that the address of a variable is used in a\n            guarded context, i.e. under a constructor. A variable that is\n            dereferenced within a function body or lazy context is also\n            considered guarded. *)\n    | Unguarded\n        (** [Unguarded] indicates that the address of a variable is used in an\n            unguarded context, i.e. not under a constructor. *)\n\n  (** [guard] represents guarded contexts such as [C -] and [{l = -}] *)\n  let guard : access -> access = function\n    | Dereferenced -> Dereferenced\n    | Guarded -> Guarded\n    | Unguarded -> Guarded\n\n  (** [inspect] represents elimination contexts such as [match - with cases],\n      [e -] and [- e] *)\n  let inspect : access -> access = function\n    | Dereferenced -> Dereferenced\n    | Guarded -> Dereferenced\n    | Unguarded -> Dereferenced\n\n  (** [delay] represents contexts that delay evaluation such as [fun p -> -] or\n      [lazy -] *)\n  let delay : access -> access = function\n    | Dereferenced -> Guarded\n    | Guarded -> Guarded\n    | Unguarded -> Guarded\n\n  module Use : sig\n    type t\n\n    val guard : t -> t\n    (** An expression appears in a guarded context *)\n\n    val discard : t -> t\n    (** The address of a subexpression is not used, but may be bound *)\n\n    val inspect : t -> t\n    (** The value of a subexpression is inspected with match, application, etc.\n    *)\n\n    val delay : t -> t\n    (** An expression appears under 'fun p ->' or 'lazy' *)\n\n    val join : t -> t -> t\n    (** Combine the access information of two expressions *)\n\n    val single : Ident.t -> access -> t\n    (** Combine the access information of two expressions *)\n\n    val empty : t\n    (** No variables are accessed in an expression; it might be a constant or a\n        global identifier *)\n\n    val unguarded : t -> Ident.t list\n    (** The list of identifiers that are used in an unguarded context *)\n\n    val dependent : t -> Ident.t list\n    (** The list of all used identifiers *)\n  end = struct\n    module M = Map.Make (Ident)\n\n    type t = access M.t\n    (** A \"t\" maps each rec-bound variable to an access status *)\n\n    let map f tbl = M.map f tbl\n\n    let guard t = map guard t\n\n    let inspect t = map inspect t\n\n    let delay t = map delay t\n\n    let discard = guard\n\n    let prec x y =\n      match (x, y) with\n      | Dereferenced, _ | _, Dereferenced -> Dereferenced\n      | Unguarded, _ | _, Unguarded -> Unguarded\n      | _ -> Guarded\n\n    let join x y =\n      M.fold\n        (fun id v tbl ->\n          let v' = try M.find id tbl with Not_found -> Guarded in\n          M.add id (prec v v') tbl)\n        x y\n\n    let single id access = M.add id access M.empty\n\n    let empty = M.empty\n\n    let list_matching p t =\n      let r = ref [] in\n      M.iter (fun id v -> if p v then r := id :: !r) t;\n      !r\n\n    let unguarded =\n      list_matching (function\n        | Unguarded | Dereferenced -> true\n        | _ -> false)\n\n    let dependent = list_matching (function _ -> true)\n  end\n\n  module Env = struct\n    (* A typing environment maps identifiers to types *)\n    type env = Use.t Ident.tbl\n\n    let empty = Ident.empty\n\n    let join x y =\n      let r =\n        Ident.fold_all\n          (fun id v tbl ->\n            let v' = try Ident.find_same id tbl with Not_found -> Use.empty in\n            Ident.add id (Use.join v v') tbl)\n          x y\n      in\n      r\n  end\nend\n\nlet rec pattern_variables : Typedtree.pattern -> Ident.t list =\n fun pat ->\n  match pat.pat_desc with\n  | Tpat_any -> []\n  | Tpat_var (id, _) -> [id]\n  | Tpat_alias (pat, id, _) -> id :: pattern_variables pat\n  | Tpat_constant _ -> []\n  | Tpat_tuple pats -> List.concat (List.map pattern_variables pats)\n  | Tpat_construct (_, _, pats) -> List.concat (List.map pattern_variables pats)\n  | Tpat_variant (_, Some pat, _) -> pattern_variables pat\n  | Tpat_variant (_, None, _) -> []\n  | Tpat_record (fields, _) ->\n    List.concat (List.map (fun (_, _, p) -> pattern_variables p) fields)\n  | Tpat_array pats -> List.concat (List.map pattern_variables pats)\n  | Tpat_or (l, r, _) -> pattern_variables l @ pattern_variables r\n  | Tpat_lazy p -> pattern_variables p\n\nopen Rec_context\nopen Asttypes\nopen Typedtree\n\nlet build_unguarded_env : Ident.t list -> Env.env =\n fun idlist ->\n  List.fold_left\n    (fun env id -> Ident.add id (Use.single id Unguarded) env)\n    Env.empty idlist\n\nlet is_ref : Types.value_description -> bool = function\n  | {\n      Types.val_kind =\n        Types.Val_prim {Primitive.prim_name = \"%makemutable\"; prim_arity = 1};\n    } ->\n    true\n  | _ -> false\n\ntype sd = Static | Dynamic\n\nlet rec classify_expression : Typedtree.expression -> sd =\n fun exp ->\n  match exp.exp_desc with\n  | Texp_let (_, _, e)\n  | Texp_letmodule (_, _, _, e)\n  | Texp_sequence (_, e)\n  | Texp_letexception (_, e) ->\n    classify_expression e\n  | Texp_ident _ | Texp_for _ | Texp_constant _ | Texp_new _ | Texp_instvar _\n  | Texp_tuple _ | Texp_array _ | Texp_construct _ | Texp_variant _\n  | Texp_record _ | Texp_setfield _ | Texp_while _ | Texp_setinstvar _\n  | Texp_pack _ | Texp_object _ | Texp_function _ | Texp_lazy _\n  | Texp_unreachable | Texp_extension_constructor _ ->\n    Static\n  | Texp_apply ({exp_desc = Texp_ident (_, _, vd)}, _) when is_ref vd -> Static\n  | Texp_apply _ | Texp_match _ | Texp_ifthenelse _ | Texp_send _ | Texp_field _\n  | Texp_assert _ | Texp_try _ | Texp_override _ ->\n    Dynamic\n\nlet rec expression : Env.env -> Typedtree.expression -> Use.t =\n fun env exp ->\n  match exp.exp_desc with\n  | Texp_ident (pth, _, _) -> path env pth\n  | Texp_let (rec_flag, bindings, body) ->\n    let env', ty = value_bindings rec_flag env bindings in\n    (* Here and in other binding constructs 'discard' is used in a\n       similar way to the way it's used in sequence: uses are\n       propagated, but unguarded access are not. *)\n    Use.join (Use.discard ty) (expression (Env.join env env') body)\n  | Texp_letmodule (x, _, m, e) ->\n    let ty = modexp env m in\n    Use.join (Use.discard ty) (expression (Ident.add x ty env) e)\n  | Texp_match (e, val_cases, exn_cases, _) ->\n    let t = expression env e in\n    let exn_case env {Typedtree.c_rhs} = expression env c_rhs in\n    let cs = list (case ~scrutinee:t) env val_cases\n    and es = list exn_case env exn_cases in\n    Use.(join cs es)\n  | Texp_for (_, _, e1, e2, _, e3) ->\n    Use.(\n      join\n        (join (inspect (expression env e1)) (inspect (expression env e2)))\n        (* The body is evaluated, but not used, and not available\n           for inclusion in another value *)\n        (discard (expression env e3)))\n  | Texp_constant _ -> Use.empty\n  | Texp_new _ -> assert false\n  | Texp_instvar _ -> Use.empty\n  | Texp_apply ({exp_desc = Texp_ident (_, _, vd)}, [(_, Some arg)])\n    when is_ref vd ->\n    Use.guard (expression env arg)\n  | Texp_apply (e, args) ->\n    let arg env (_, eo) = option expression env eo in\n    Use.(join (inspect (expression env e)) (inspect (list arg env args)))\n  | Texp_tuple exprs -> Use.guard (list expression env exprs)\n  | Texp_array exprs -> Use.guard (list expression env exprs)\n  | Texp_construct (_, desc, exprs) ->\n    let access_constructor =\n      match desc.cstr_tag with\n      | Cstr_extension (pth, _) -> Use.inspect (path env pth)\n      | _ -> Use.empty\n    in\n    let use =\n      match desc.cstr_tag with\n      | Cstr_unboxed -> fun x -> x\n      | Cstr_constant _ | Cstr_block _ | Cstr_extension _ -> Use.guard\n    in\n    Use.join access_constructor (use (list expression env exprs))\n  | Texp_variant (_, eo) -> Use.guard (option expression env eo)\n  | Texp_record {fields = es; extended_expression = eo; representation = rep} ->\n    let use =\n      match rep with\n      | Record_unboxed _ -> fun x -> x\n      | Record_float_unused -> assert false\n      | Record_optional_labels _ | Record_regular | Record_inlined _\n      | Record_extension ->\n        Use.guard\n    in\n    let field env = function\n      | _, Kept _ -> Use.empty\n      | _, Overridden (_, e) -> expression env e\n    in\n    Use.join (use (array field env es)) (option expression env eo)\n  | Texp_ifthenelse (cond, ifso, ifnot) ->\n    Use.(\n      join\n        (inspect (expression env cond))\n        (join (expression env ifso) (option expression env ifnot)))\n  | Texp_setfield (e1, _, _, e2) ->\n    Use.(join (inspect (expression env e1)) (inspect (expression env e2)))\n  | Texp_sequence (e1, e2) ->\n    Use.(join (discard (expression env e1)) (expression env e2))\n  | Texp_while (e1, e2) ->\n    Use.(join (inspect (expression env e1)) (discard (expression env e2)))\n  | Texp_send (e1, _, eo) ->\n    Use.(\n      join (inspect (expression env e1)) (inspect (option expression env eo)))\n  | Texp_field (e, _, _) -> Use.(inspect (expression env e))\n  | Texp_setinstvar () -> assert false\n  | Texp_letexception (_, e) -> expression env e\n  | Texp_assert e -> Use.inspect (expression env e)\n  | Texp_pack m -> modexp env m\n  | Texp_object () -> assert false\n  | Texp_try (e, cases) ->\n    (* This is more permissive than the old check. *)\n    let case env {Typedtree.c_rhs} = expression env c_rhs in\n    Use.join (expression env e) (list case env cases)\n  | Texp_override () -> assert false\n  | Texp_function {cases} ->\n    Use.delay (list (case ~scrutinee:Use.empty) env cases)\n  | Texp_lazy e -> (\n    match Typeopt.classify_lazy_argument e with\n    | `Constant_or_function | `Identifier _ | `Float -> expression env e\n    | `Other -> Use.delay (expression env e))\n  | Texp_unreachable -> Use.empty\n  | Texp_extension_constructor _ -> Use.empty\n\nand option : 'a. (Env.env -> 'a -> Use.t) -> Env.env -> 'a option -> Use.t =\n fun f env -> Misc.Stdlib.Option.value_default (f env) ~default:Use.empty\n\nand list : 'a. (Env.env -> 'a -> Use.t) -> Env.env -> 'a list -> Use.t =\n fun f env ->\n  List.fold_left (fun typ item -> Use.join (f env item) typ) Use.empty\n\nand array : 'a. (Env.env -> 'a -> Use.t) -> Env.env -> 'a array -> Use.t =\n fun f env ->\n  Array.fold_left (fun typ item -> Use.join (f env item) typ) Use.empty\n\nand modexp : Env.env -> Typedtree.module_expr -> Use.t =\n fun env m ->\n  match m.mod_desc with\n  | Tmod_ident (pth, _) -> path env pth\n  | Tmod_structure s -> structure env s\n  | Tmod_functor (_, _, _, e) -> Use.delay (modexp env e)\n  | Tmod_apply (f, p, _) ->\n    Use.(join (inspect (modexp env f)) (inspect (modexp env p)))\n  | Tmod_constraint (m, _, _, Tcoerce_none) -> modexp env m\n  | Tmod_constraint (m, _, _, _) -> Use.inspect (modexp env m)\n  | Tmod_unpack (e, _) -> expression env e\n\nand path : Env.env -> Path.t -> Use.t =\n fun env pth ->\n  match pth with\n  | Path.Pident x -> ( try Ident.find_same x env with Not_found -> Use.empty)\n  | Path.Pdot (t, _, _) -> Use.inspect (path env t)\n  | Path.Papply (f, p) -> Use.(inspect (join (path env f) (path env p)))\n\nand structure : Env.env -> Typedtree.structure -> Use.t =\n fun env s ->\n  let _, ty =\n    List.fold_left\n      (fun (env, ty) item ->\n        let env', ty' = structure_item env item in\n        (Env.join env env', Use.join ty ty'))\n      (env, Use.empty) s.str_items\n  in\n  Use.guard ty\n\nand structure_item : Env.env -> Typedtree.structure_item -> Env.env * Use.t =\n fun env s ->\n  match s.str_desc with\n  | Tstr_eval (e, _) -> (Env.empty, expression env e)\n  | Tstr_value (rec_flag, valbinds) -> value_bindings rec_flag env valbinds\n  | Tstr_module {mb_id; mb_expr} ->\n    let ty = modexp env mb_expr in\n    (Ident.add mb_id ty Env.empty, ty)\n  | Tstr_recmodule mbs ->\n    let modbind env {mb_expr} = modexp env mb_expr in\n    (* Over-approximate: treat any access as a use *)\n    (Env.empty, Use.inspect (list modbind env mbs))\n  | Tstr_primitive _ -> (Env.empty, Use.empty)\n  | Tstr_type _ -> (Env.empty, Use.empty)\n  | Tstr_typext _ -> (Env.empty, Use.empty)\n  | Tstr_exception _ -> (Env.empty, Use.empty)\n  | Tstr_modtype _ -> (Env.empty, Use.empty)\n  | Tstr_open _ -> (Env.empty, Use.empty)\n  | Tstr_class () -> (Env.empty, Use.empty)\n  | Tstr_class_type _ -> (Env.empty, Use.empty)\n  | Tstr_include inc ->\n    (* This is a kind of projection.  There's no need to add\n       anything to the environment because everything is used in\n       the type component already *)\n    (Env.empty, Use.inspect (modexp env inc.incl_mod))\n  | Tstr_attribute _ -> (Env.empty, Use.empty)\n\nand case : Env.env -> Typedtree.case -> scrutinee:Use.t -> Use.t =\n fun env {Typedtree.c_lhs; c_guard; c_rhs} ~scrutinee:ty ->\n  let ty =\n    if is_destructuring_pattern c_lhs then Use.inspect ty else Use.discard ty\n    (* as in 'let' *)\n  in\n  let vars = pattern_variables c_lhs in\n  let env = List.fold_left (fun env id -> Ident.add id ty env) env vars in\n  Use.(\n    join ty\n      (join (expression env c_rhs) (inspect (option expression env c_guard))))\n\nand value_bindings :\n    rec_flag -> Env.env -> Typedtree.value_binding list -> Env.env * Use.t =\n fun rec_flag env bindings ->\n  match rec_flag with\n  | Recursive ->\n    (* Approximation:\n          let rec y =\n            let rec x1 = e1\n                and x2 = e2\n              in e\n       treated as\n          let rec y =\n             let rec x = (e1, e2)[x1:=fst x, x2:=snd x] in\n                e[x1:=fst x, x2:=snd x]\n       Further, use the fact that x1,x2 cannot occur unguarded in e1, e2\n       to avoid recursive trickiness.\n    *)\n    let ids, ty =\n      List.fold_left\n        (fun (pats, tys) {vb_pat = p; vb_expr = e} ->\n          (pattern_variables p @ pats, Use.join (expression env e) tys))\n        ([], Use.empty) bindings\n    in\n    ( List.fold_left\n        (fun (env : Env.env) (id : Ident.t) -> Ident.add id ty env)\n        Env.empty ids,\n      ty )\n  | Nonrecursive ->\n    List.fold_left\n      (fun (env2, ty) binding ->\n        let env', ty' = value_binding env binding in\n        (Env.join env2 env', Use.join ty ty'))\n      (Env.empty, Use.empty) bindings\n\nand value_binding : Env.env -> Typedtree.value_binding -> Env.env * Use.t =\n (* NB: returns new environment only *)\n fun env {vb_pat; vb_expr} ->\n  let vars = pattern_variables vb_pat in\n  let ty = expression env vb_expr in\n  let ty = if is_destructuring_pattern vb_pat then Use.inspect ty else ty in\n  (List.fold_left (fun env id -> Ident.add id ty env) Env.empty vars, ty)\n\nand is_destructuring_pattern : Typedtree.pattern -> bool =\n fun pat ->\n  match pat.pat_desc with\n  | Tpat_any -> false\n  | Tpat_var (_, _) -> false\n  | Tpat_alias (pat, _, _) -> is_destructuring_pattern pat\n  | Tpat_constant _ -> true\n  | Tpat_tuple _ -> true\n  | Tpat_construct (_, _, _) -> true\n  | Tpat_variant _ -> true\n  | Tpat_record (_, _) -> true\n  | Tpat_array _ -> true\n  | Tpat_or (l, r, _) ->\n    is_destructuring_pattern l || is_destructuring_pattern r\n  | Tpat_lazy _ -> true\n\nlet check_recursive_expression idlist expr =\n  let ty = expression (build_unguarded_env idlist) expr in\n  match (Use.unguarded ty, Use.dependent ty, classify_expression expr) with\n  | _ :: _, _, _ (* The expression inspects rec-bound variables *)\n  | _, _ :: _, Dynamic ->\n    (* The expression depends on rec-bound variables\n       and its size is unknown *)\n    raise (Error (expr.exp_loc, Illegal_letrec_expr))\n  | [], _, Static (* The expression has known size *) | [], [], Dynamic ->\n    (* The expression has unknown size,\n       but does not depend on rec-bound variables *)\n    ()\n\nlet check_recursive_bindings valbinds =\n  let ids =\n    List.concat (List.map (fun b -> pattern_variables b.vb_pat) valbinds)\n  in\n  Ext_list.iter valbinds (fun {vb_expr} ->\n      match vb_expr.exp_desc with\n      | Texp_record\n          {fields = [|(_, Overridden (_, {exp_desc = Texp_function _}))|]}\n      | Texp_function _ ->\n        ()\n      (*TODO: add uncurried function too*)\n      | _ -> check_recursive_expression ids vb_expr)\n\nlet report_error ppf = function\n  | Illegal_letrec_expr ->\n    Format.fprintf ppf\n      \"This kind of expression is not allowed as right-hand side of `let rec'\"\n\nlet () =\n  Location.register_error_of_exn (function\n    | Error (loc, err) -> Some (Location.error_of_printer loc report_error err)\n    | _ -> None)\n"
  },
  {
    "path": "analysis/vendor/ml/rec_check.mli",
    "content": "val check_recursive_bindings : Typedtree.value_binding list -> unit\n"
  },
  {
    "path": "analysis/vendor/ml/record_coercion.ml",
    "content": "let check_record_fields ?repr1 ?repr2 (fields1 : Types.label_declaration list)\n    (fields2 : Types.label_declaration list) =\n  let field_is_optional id repr =\n    match repr with\n    | Some (Types.Record_optional_labels lbls) -> List.mem (Ident.name id) lbls\n    | _ -> false\n  in\n  let violation = ref false in\n  let label_decl_sub (acc1, acc2) (ld2 : Types.label_declaration) =\n    match\n      Ext_list.find_first fields1 (fun ld1 -> ld1.ld_id.name = ld2.ld_id.name)\n    with\n    | Some ld1 ->\n      if field_is_optional ld1.ld_id repr1 <> field_is_optional ld2.ld_id repr2\n      then\n        (* optional field can't be modified *)\n        violation := true;\n      let get_as (({txt}, payload) : Parsetree.attribute) =\n        if txt = \"as\" then Ast_payload.is_single_string payload else None\n      in\n      let get_as_name (ld : Types.label_declaration) =\n        match Ext_list.filter_map ld.ld_attributes get_as with\n        | [] -> ld.ld_id.name\n        | (s, _) :: _ -> s\n      in\n      if get_as_name ld1 <> get_as_name ld2 then violation := true;\n      (ld1.ld_type :: acc1, ld2.ld_type :: acc2)\n    | None ->\n      (* field must be present *)\n      violation := true;\n      (acc1, acc2)\n  in\n  let tl1, tl2 = List.fold_left label_decl_sub ([], []) fields2 in\n  (!violation, tl1, tl2)\n"
  },
  {
    "path": "analysis/vendor/ml/record_type_spread.ml",
    "content": "module StringMap = Map.Make (String)\n\nlet t_equals t1 t2 = t1.Types.level = t2.Types.level && t1.id = t2.id\n\nlet substitute_types ~type_map (t : Types.type_expr) =\n  if StringMap.is_empty type_map then t\n  else\n    let apply_substitution type_variable_name t =\n      match StringMap.find_opt type_variable_name type_map with\n      | None -> t\n      | Some substituted_type -> substituted_type\n    in\n    let rec loop (t : Types.type_expr) =\n      match t.desc with\n      | Tlink t -> {t with desc = Tlink (loop t)}\n      | Tvar (Some type_variable_name) ->\n        apply_substitution type_variable_name t\n      | Tvar None -> t\n      | Tunivar _ -> t\n      | Tconstr (path, args, _memo) ->\n        {t with desc = Tconstr (path, args |> List.map loop, ref Types.Mnil)}\n      | Tsubst t -> {t with desc = Tsubst (loop t)}\n      | Tvariant rd -> {t with desc = Tvariant (row_desc rd)}\n      | Tnil -> t\n      | Tarrow (lbl, t1, t2, c) ->\n        {t with desc = Tarrow (lbl, loop t1, loop t2, c)}\n      | Ttuple tl -> {t with desc = Ttuple (tl |> List.map loop)}\n      | Tobject (t, r) -> {t with desc = Tobject (loop t, r)}\n      | Tfield (n, k, t1, t2) -> {t with desc = Tfield (n, k, loop t1, loop t2)}\n      | Tpoly (t, []) -> loop t\n      | Tpoly (t, tl) -> {t with desc = Tpoly (loop t, tl |> List.map loop)}\n      | Tpackage (p, l, tl) ->\n        {t with desc = Tpackage (p, l, tl |> List.map loop)}\n    and row_desc (rd : Types.row_desc) =\n      let row_fields =\n        rd.row_fields |> List.map (fun (l, rf) -> (l, row_field rf))\n      in\n      let row_more = loop rd.row_more in\n      let row_name =\n        match rd.row_name with\n        | None -> None\n        | Some (p, tl) -> Some (p, tl |> List.map loop)\n      in\n      {rd with row_fields; row_more; row_name}\n    and row_field (rf : Types.row_field) =\n      match rf with\n      | Rpresent None -> rf\n      | Rpresent (Some t) -> Rpresent (Some (loop t))\n      | Reither (b1, tl, b2, r) -> Reither (b1, tl |> List.map loop, b2, r)\n      | Rabsent -> Rabsent\n    in\n    loop t\n\nlet substitute_type_vars (type_vars : (string * Types.type_expr) list)\n    (typ : Types.type_expr) =\n  let type_map =\n    type_vars\n    |> List.fold_left\n         (fun acc (tvar_name, tvar_typ) -> StringMap.add tvar_name tvar_typ acc)\n         StringMap.empty\n  in\n  substitute_types ~type_map typ\n\nlet has_type_spread (lbls : Typedtree.label_declaration list) =\n  lbls\n  |> List.exists (fun (l : Typedtree.label_declaration) ->\n         match l with\n         | {ld_name = {txt = \"...\"}} -> true\n         | _ -> false)\n\nlet extract_type_vars (type_params : Types.type_expr list)\n    (typ : Types.type_expr) =\n  (* The type variables applied to the record spread itself. *)\n  let applied_type_vars =\n    match Ctype.repr typ with\n    | {desc = Tpoly ({desc = Tconstr (_, tvars, _)}, _)} -> tvars\n    | _ -> []\n  in\n  if List.length type_params = List.length applied_type_vars then\n    (* Track which type param in the record we're spreading\n       belongs to which type variable applied to the spread itself. *)\n    let paired_type_vars = List.combine type_params applied_type_vars in\n    paired_type_vars\n    |> List.filter_map (fun (t, applied_tvar) ->\n           match t.Types.desc with\n           | Tvar (Some tname) -> Some (tname, applied_tvar)\n           | _ -> None)\n  else []\n"
  },
  {
    "path": "analysis/vendor/ml/rescript_cpp.ml",
    "content": "(* Copyright (C) 2021- Hongbo Zhang, Authors of ReScript\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\ntype directive_type =\n  | Dir_type_bool\n  | Dir_type_float\n  | Dir_type_int\n  | Dir_type_string\n  | Dir_type_null\n\ntype pp_error =\n  | Unterminated_paren_in_conditional\n  | Unterminated_if\n  | Unterminated_else\n  | Unexpected_token_in_conditional\n  | Expect_hash_then_in_conditional\n  | Illegal_semver of string\n  | Unexpected_directive\n  | Conditional_expr_expected_type of directive_type * directive_type\n\nexception Pp_error of pp_error * Location.t\n\ntype directive_value =\n  | Dir_bool of bool\n  | Dir_float of float\n  | Dir_int of int\n  | Dir_string of string\n  | Dir_null\n\nlet type_of_directive x =\n  match x with\n  | Dir_bool _ -> Dir_type_bool\n  | Dir_float _ -> Dir_type_float\n  | Dir_int _ -> Dir_type_int\n  | Dir_string _ -> Dir_type_string\n  | Dir_null -> Dir_type_null\n\nlet string_of_type_directive x =\n  match x with\n  | Dir_type_bool -> \"bool\"\n  | Dir_type_float -> \"float\"\n  | Dir_type_int -> \"int\"\n  | Dir_type_string -> \"string\"\n  | Dir_type_null -> \"null\"\n\nlet prepare_pp_error loc = function\n  | Unterminated_if -> Location.errorf ~loc \"#if not terminated\"\n  | Unterminated_else -> Location.errorf ~loc \"#else not terminated\"\n  | Unexpected_directive -> Location.errorf ~loc \"Unexpected directive\"\n  | Unexpected_token_in_conditional ->\n    Location.errorf ~loc \"Unexpected token in conditional predicate\"\n  | Unterminated_paren_in_conditional ->\n    Location.errorf ~loc \"Unterminated parens in conditional predicate\"\n  | Expect_hash_then_in_conditional ->\n    Location.errorf ~loc \"Expect `then` after conditional predicate\"\n  | Conditional_expr_expected_type (a, b) ->\n    Location.errorf ~loc \"Conditional expression type mismatch (%s,%s)\"\n      (string_of_type_directive a)\n      (string_of_type_directive b)\n  | Illegal_semver s ->\n    Location.errorf ~loc \"Illegal semantic version string %s\" s\n\nlet () =\n  Location.register_error_of_exn (function\n    | Pp_error (err, loc) -> Some (prepare_pp_error loc err)\n    | _ -> None)\n\nlet assert_same_type lexbuf x y =\n  let lhs = type_of_directive x in\n  let rhs = type_of_directive y in\n  if lhs <> rhs then\n    raise\n      (Pp_error (Conditional_expr_expected_type (lhs, rhs), Location.curr lexbuf))\n  else y\n\nlet directive_built_in_values = Hashtbl.create 51\n\nlet replace_directive_built_in_value k v =\n  Hashtbl.replace directive_built_in_values k v\n\nlet remove_directive_built_in_value k =\n  Hashtbl.replace directive_built_in_values k Dir_null\n\nlet replace_directive_bool k v =\n  Hashtbl.replace directive_built_in_values k (Dir_bool v)\n\nlet replace_directive_string k v =\n  Hashtbl.replace directive_built_in_values k (Dir_string v)\n\nlet () =\n  (* Note we use {!Config} instead of {!Sys} becasue\n     we want to overwrite in some cases with the\n     same stdlib\n  *)\n  let version =\n    Config.version\n    (* so that it can be overridden*)\n  in\n  replace_directive_built_in_value \"OCAML_VERSION\" (Dir_string version);\n  replace_directive_built_in_value \"OS_TYPE\" (Dir_string Sys.os_type)\n\nlet find_directive_built_in_value k = Hashtbl.find directive_built_in_values k\n\nlet iter_directive_built_in_value f = Hashtbl.iter f directive_built_in_values\n(* let iter_directive_built_in_value f = Hashtbl.iter f directive_built_in_values *)\n\n(*\n     {[\n       # semver 0 \"12\";;\n       - : int * int * int * string = (12, 0, 0, \"\");;\n       # semver 0 \"12.3\";;\n       - : int * int * int * string = (12, 3, 0, \"\");;\n         semver 0 \"12.3.10\";;\n       - : int * int * int * string = (12, 3, 10, \"\");;\n       # semver 0 \"12.3.10+x\";;\n       - : int * int * int * string = (12, 3, 10, \"+x\")\n     ]}\n  *)\nlet zero = Char.code '0'\n\nlet dot = Char.code '.'\n\nlet semantic_version_parse str start last_index =\n  let rec aux start acc last_index =\n    if start <= last_index then\n      let c = Char.code (String.unsafe_get str start) in\n      if c = dot then (acc, start + 1) (* consume [4.] instead of [4]*)\n      else\n        let v = c - zero in\n        if v >= 0 && v <= 9 then aux (start + 1) ((acc * 10) + v) last_index\n        else (acc, start)\n    else (acc, start)\n  in\n  let major, major_end = aux start 0 last_index in\n  let minor, minor_end = aux major_end 0 last_index in\n  let patch, patch_end = aux minor_end 0 last_index in\n  let additional = String.sub str patch_end (last_index - patch_end + 1) in\n  ((major, minor, patch), additional)\n\n(** {[\n      semver Location.none \"1.2.3\" \"~1.3.0\" = false;;\n      semver Location.none \"1.2.3\" \"^1.3.0\" = true;;\n      semver Location.none \"1.2.3\" \">1.3.0\" = false;;\n      semver Location.none \"1.2.3\" \">=1.3.0\" = false;;\n      semver Location.none \"1.2.3\" \"<1.3.0\" = true;;\n      semver Location.none \"1.2.3\" \"<=1.3.0\" = true\n    ]} *)\nlet semver loc lhs str =\n  let last_index = String.length str - 1 in\n  if last_index < 0 then raise (Pp_error (Illegal_semver str, loc))\n  else\n    let pred, (((major, minor, _patch) as version), _) =\n      let v = String.unsafe_get str 0 in\n      match v with\n      | '>' ->\n        if last_index = 0 then raise (Pp_error (Illegal_semver str, loc))\n        else if String.unsafe_get str 1 = '=' then\n          (`Ge, semantic_version_parse str 2 last_index)\n        else (`Gt, semantic_version_parse str 1 last_index)\n      | '<' ->\n        if last_index = 0 then raise (Pp_error (Illegal_semver str, loc))\n        else if String.unsafe_get str 1 = '=' then\n          (`Le, semantic_version_parse str 2 last_index)\n        else (`Lt, semantic_version_parse str 1 last_index)\n      | '^' -> (`Compatible, semantic_version_parse str 1 last_index)\n      | '~' -> (`Approximate, semantic_version_parse str 1 last_index)\n      | _ -> (`Exact, semantic_version_parse str 0 last_index)\n    in\n    let ((l_major, l_minor, _l_patch) as lversion), _ =\n      semantic_version_parse lhs 0 (String.length lhs - 1)\n    in\n    match pred with\n    | `Ge -> lversion >= version\n    | `Gt -> lversion > version\n    | `Le -> lversion <= version\n    | `Lt -> lversion < version\n    | `Approximate -> major = l_major && minor = l_minor\n    | `Compatible -> major = l_major\n    | `Exact -> lversion = version\n\nlet pp_directive_value fmt (x : directive_value) =\n  match x with\n  | Dir_bool b -> Format.pp_print_bool fmt b\n  | Dir_int b -> Format.pp_print_int fmt b\n  | Dir_float b -> Format.pp_print_float fmt b\n  | Dir_string s -> Format.fprintf fmt \"%S\" s\n  | Dir_null -> Format.pp_print_string fmt \"null\"\n\nlet list_variables fmt =\n  iter_directive_built_in_value (fun s dir_value ->\n      Format.fprintf fmt \"@[%s@ %a@]@.\" s pp_directive_value dir_value)\n\nlet defined str =\n  match find_directive_built_in_value str with\n  | Dir_null -> false\n  | _ -> true\n  | exception _ -> (\n    try\n      ignore @@ Sys.getenv str;\n      true\n    with _ -> false)\n\nlet query _loc str =\n  match find_directive_built_in_value str with\n  | Dir_null -> Dir_bool false\n  | v -> v\n  | exception Not_found -> (\n    match Sys.getenv str with\n    | v -> (\n      try Dir_bool (bool_of_string v)\n      with _ -> (\n        try Dir_int (int_of_string v)\n        with _ -> ( try Dir_float (float_of_string v) with _ -> Dir_string v)))\n    | exception Not_found -> Dir_bool false)\n\nlet define_key_value key v =\n  if String.length key > 0 && Char.uppercase_ascii key.[0] = key.[0] then (\n    replace_directive_built_in_value key\n      (* NEED Sync up across {!lexer.mll} {!bspp.ml} and here,\n         TODO: put it in {!lexer.mll}\n      *)\n      (try Dir_bool (bool_of_string v)\n       with _ -> (\n         try Dir_int (int_of_string v)\n         with _ -> (\n           try Dir_float (float_of_string v) with _ -> Dir_string v)));\n    true)\n  else false\n\nlet cvt_int_literal s = -int_of_string (\"-\" ^ s)\n\nlet value_of_token loc (t : Parser.token) =\n  match t with\n  | INT (i, None) -> Dir_int (cvt_int_literal i)\n  | STRING (s, _) -> Dir_string s\n  | FLOAT (s, None) -> Dir_float (float_of_string s)\n  | TRUE -> Dir_bool true\n  | FALSE -> Dir_bool false\n  | UIDENT s -> query loc s\n  | _ -> raise (Pp_error (Unexpected_token_in_conditional, loc))\n\nlet directive_parse (token_with_comments : Lexing.lexbuf -> Parser.token) lexbuf\n    =\n  let look_ahead = ref None in\n  let token () : Parser.token =\n    let v = !look_ahead in\n    match v with\n    | Some v ->\n      look_ahead := None;\n      v\n    | None ->\n      let rec skip () =\n        match token_with_comments lexbuf with\n        | COMMENT _ | DOCSTRING _ -> skip ()\n        | EOF -> raise (Pp_error (Unterminated_if, Location.curr lexbuf))\n        | t -> t\n      in\n      skip ()\n  in\n  let push e =\n    (* INVARIANT: only look at most one token *)\n    assert (!look_ahead = None);\n    look_ahead := Some e\n  in\n  let rec token_op calc ~no lhs =\n    match token () with\n    | (LESS | GREATER | INFIXOP0 \"<=\" | INFIXOP0 \">=\" | EQUAL | INFIXOP0 \"<>\")\n      as op ->\n      let f =\n        match op with\n        | LESS -> ( < )\n        | GREATER -> ( > )\n        | INFIXOP0 \"<=\" -> ( <= )\n        | EQUAL -> ( = )\n        | INFIXOP0 \"<>\" -> ( <> )\n        | _ -> assert false\n      in\n      let curr_loc = Location.curr lexbuf in\n      let rhs = value_of_token curr_loc (token ()) in\n      (not calc) || f lhs (assert_same_type lexbuf lhs rhs)\n    | INFIXOP0 \"=~\" -> (\n      (not calc)\n      ||\n      match lhs with\n      | Dir_string s -> (\n        let curr_loc = Location.curr lexbuf in\n        let rhs = value_of_token curr_loc (token ()) in\n        match rhs with\n        | Dir_string rhs -> semver curr_loc s rhs\n        | _ ->\n          raise\n            (Pp_error\n               ( Conditional_expr_expected_type\n                   (Dir_type_string, type_of_directive lhs),\n                 Location.curr lexbuf )))\n      | _ ->\n        raise\n          (Pp_error\n             ( Conditional_expr_expected_type\n                 (Dir_type_string, type_of_directive lhs),\n               Location.curr lexbuf )))\n    | e -> no e\n  and parse_or calc : bool = parse_or_aux calc (parse_and calc)\n  and\n      (* a || (b || (c || d))*)\n      parse_or_aux calc v : bool =\n    (* let l = v  in *)\n    match token () with\n    | BARBAR ->\n      let b = parse_or (calc && not v) in\n      v || b\n    | e ->\n      push e;\n      v\n  and parse_and calc = parse_and_aux calc (parse_relation calc)\n  and parse_and_aux calc v =\n    (* a && (b && (c && d)) *)\n    (* let l = v  in *)\n    match token () with\n    | AMPERAMPER ->\n      let b = parse_and (calc && v) in\n      v && b\n    | e ->\n      push e;\n      v\n  and parse_relation (calc : bool) : bool =\n    let curr_token = token () in\n    let curr_loc = Location.curr lexbuf in\n    match curr_token with\n    | TRUE -> true\n    | FALSE -> false\n    | UIDENT v ->\n      let value_v = query curr_loc v in\n      token_op calc\n        ~no:(fun e ->\n          push e;\n          match value_v with\n          | Dir_bool b -> b\n          | _ ->\n            let ty = type_of_directive value_v in\n            raise\n              (Pp_error\n                 (Conditional_expr_expected_type (Dir_type_bool, ty), curr_loc)))\n        value_v\n    | INT (v, None) ->\n      let num_v = cvt_int_literal v in\n      token_op calc\n        ~no:(fun e ->\n          push e;\n          num_v <> 0)\n        (Dir_int num_v)\n    | FLOAT (v, None) ->\n      token_op calc\n        ~no:(fun _e ->\n          raise\n            (Pp_error\n               ( Conditional_expr_expected_type (Dir_type_bool, Dir_type_float),\n                 curr_loc )))\n        (Dir_float (float_of_string v))\n    | STRING (v, _) ->\n      token_op calc\n        ~no:(fun _e ->\n          raise\n            (Pp_error\n               ( Conditional_expr_expected_type (Dir_type_bool, Dir_type_string),\n                 curr_loc )))\n        (Dir_string v)\n    | LIDENT ((\"defined\" | \"undefined\") as r) -> (\n      let t = token () in\n      let loc = Location.curr lexbuf in\n      match t with\n      | UIDENT s ->\n        (not calc) || if r.[0] = 'u' then not @@ defined s else defined s\n      | _ -> raise (Pp_error (Unexpected_token_in_conditional, loc)))\n    | LPAREN -> (\n      let v = parse_or calc in\n      match token () with\n      | RPAREN -> v\n      | _ ->\n        raise\n          (Pp_error (Unterminated_paren_in_conditional, Location.curr lexbuf)))\n    | _ -> raise (Pp_error (Unexpected_token_in_conditional, curr_loc))\n  in\n  let v = parse_or true in\n  match token () with\n  | THEN | EOL -> v\n  | _ ->\n    raise (Pp_error (Expect_hash_then_in_conditional, Location.curr lexbuf))\n\ntype dir_conditional = Dir_if_true | Dir_if_false | Dir_out\n\n(* let string_of_dir_conditional (x : dir_conditional) = *)\n(*   match x with  *)\n(*   | Dir_if_true -> \"Dir_if_true\" *)\n(*   | Dir_if_false -> \"Dir_if_false\" *)\n(*   | Dir_out -> \"Dir_out\" *)\n\nlet if_then_else = ref Dir_out\n\n(* store the token after hash, [# token]\n   when we see `#if` we do the processing immediately\n   when we see #method, we produce `HASH` token and save `method`\n   token so that the next lexing produce the right one.\n*)\nlet sharp_look_ahead = ref None\n\nlet update_if_then_else v =\n  (* Format.fprintf Format.err_formatter \"@[update %s \\n@]@.\" (string_of_dir_conditional v); *)\n  if_then_else := v\n\nlet at_bol lexbuf =\n  let pos = Lexing.lexeme_start_p lexbuf in\n  pos.pos_cnum = pos.pos_bol\n\n(* skip to #else | #end | #elif *)\nlet rec skip_from_if_false (token_with_comments : Lexing.lexbuf -> Parser.token)\n    cont lexbuf =\n  let token = token_with_comments lexbuf in\n  if token = EOF then raise (Pp_error (Unterminated_if, Location.curr lexbuf))\n  else if token = HASH && at_bol lexbuf then\n    let token = token_with_comments lexbuf in\n    match token with\n    | END | LIDENT \"endif\" ->\n      update_if_then_else Dir_out;\n      cont lexbuf\n    | ELSE ->\n      update_if_then_else Dir_if_false;\n      cont lexbuf\n    | IF -> raise (Pp_error (Unexpected_directive, Location.curr lexbuf))\n    | LIDENT \"elif\" when directive_parse token_with_comments lexbuf ->\n      update_if_then_else Dir_if_true;\n      cont lexbuf\n    | _ -> skip_from_if_false token_with_comments cont lexbuf\n  else skip_from_if_false token_with_comments cont lexbuf\n\nlet interpret_directive_cont lexbuf ~cont\n    ~(token_with_comments : Lexing.lexbuf -> Parser.token) look_ahead =\n  (* current state *)\n  let if_then_else = !if_then_else in\n  match (token_with_comments lexbuf, if_then_else) with\n  | IF, Dir_out ->\n    if directive_parse token_with_comments lexbuf then (\n      update_if_then_else Dir_if_true (* Next state: ELSE *);\n      cont lexbuf)\n    else skip_from_if_false token_with_comments cont lexbuf\n  | LIDENT ((\"ifndef\" | \"ifdef\") as s), Dir_out ->\n    let rec token () =\n      match token_with_comments lexbuf with\n      | COMMENT _ | DOCSTRING _ -> token ()\n      | EOF -> raise (Pp_error (Unterminated_if, Location.curr lexbuf))\n      | t -> t\n    in\n    let t0 = token () in\n    let t =\n      match t0 with\n      | UIDENT t -> t\n      | _ ->\n        raise (Pp_error (Unexpected_token_in_conditional, Location.curr lexbuf))\n    in\n    let t1 = token () in\n    (match t1 with\n    | THEN | EOL -> ()\n    | _ ->\n      raise (Pp_error (Expect_hash_then_in_conditional, Location.curr lexbuf)));\n    let boolean = defined t = (s = \"ifdef\") in\n    if boolean then (\n      update_if_then_else Dir_if_true (* Next state: ELSE *);\n      cont lexbuf)\n    else skip_from_if_false token_with_comments cont lexbuf\n  | (IF | LIDENT \"ifndef\" | LIDENT \"ifdef\"), (Dir_if_false | Dir_if_true) ->\n    raise (Pp_error (Unexpected_directive, Location.curr lexbuf))\n  | LIDENT \"elif\", (Dir_if_false | Dir_out) ->\n    (* when the predicate is false, it will continue eating `elif` *)\n    raise (Pp_error (Unexpected_directive, Location.curr lexbuf))\n  | ((LIDENT \"elif\" | ELSE) as token), Dir_if_true ->\n    (* looking for #end, however, it can not see #if anymore,\n       we need do some validation *)\n    let rec skip_from_if_true else_seen =\n      let token = token_with_comments lexbuf in\n      if token = EOF then\n        raise (Pp_error (Unterminated_else, Location.curr lexbuf))\n      else if token = HASH && at_bol lexbuf then\n        let token = token_with_comments lexbuf in\n        match token with\n        | END | LIDENT \"endif\" ->\n          update_if_then_else Dir_out;\n          cont lexbuf\n        | IF -> raise (Pp_error (Unexpected_directive, Location.curr lexbuf))\n        | ELSE ->\n          if else_seen then\n            raise (Pp_error (Unexpected_directive, Location.curr lexbuf))\n          else skip_from_if_true true\n        | LIDENT \"elif\" when else_seen ->\n          raise (Pp_error (Unexpected_directive, Location.curr lexbuf))\n        | _ -> skip_from_if_true else_seen\n      else skip_from_if_true else_seen\n    in\n    skip_from_if_true (token = ELSE)\n  | ELSE, Dir_if_false | ELSE, Dir_out ->\n    raise (Pp_error (Unexpected_directive, Location.curr lexbuf))\n  | (END | LIDENT \"endif\"), (Dir_if_false | Dir_if_true) ->\n    update_if_then_else Dir_out;\n    cont lexbuf\n  | (END | LIDENT \"endif\"), Dir_out ->\n    raise (Pp_error (Unexpected_directive, Location.curr lexbuf))\n  | token, (Dir_if_true | Dir_if_false | Dir_out) -> look_ahead token\n\nlet interpret_directive lexbuf ~cont ~token_with_comments : Parser.token =\n  interpret_directive_cont lexbuf ~cont ~token_with_comments\n    (fun (token : 'a) : 'a ->\n      sharp_look_ahead := Some token;\n      HASH)\n\nlet eof_check lexbuf =\n  if !if_then_else <> Dir_out then\n    if !if_then_else = Dir_if_true then\n      raise (Pp_error (Unterminated_if, Location.curr lexbuf))\n    else raise (Pp_error (Unterminated_else, Location.curr lexbuf))\n\nlet init () =\n  sharp_look_ahead := None;\n  update_if_then_else Dir_out\n\nlet check_sharp_look_ahead action : Parser.token =\n  match !sharp_look_ahead with\n  | None -> action ()\n  | Some token ->\n    sharp_look_ahead := None;\n    token\n\nlet rec filter_directive ~(token_with_comments : Lexing.lexbuf -> Parser.token)\n    pos acc lexbuf : (int * int) list =\n  match token_with_comments lexbuf with\n  | HASH when at_bol lexbuf ->\n    (* ^[start_pos]#if ... #then^[end_pos] *)\n    let start_pos = Lexing.lexeme_start lexbuf in\n    interpret_directive_cont lexbuf\n      ~cont:(fun lexbuf ->\n        filter_directive (Lexing.lexeme_end lexbuf) ~token_with_comments\n          ((pos, start_pos) :: acc) lexbuf)\n      ~token_with_comments\n      (fun _token -> filter_directive pos acc lexbuf ~token_with_comments)\n  | EOF -> (pos, Lexing.lexeme_end lexbuf) :: acc\n  | _ -> filter_directive ~token_with_comments pos acc lexbuf\n\nlet filter_directive_from_lexbuf lexbuf ~token_with_comments =\n  List.rev (filter_directive 0 [] lexbuf ~token_with_comments)\n"
  },
  {
    "path": "analysis/vendor/ml/rescript_cpp.mli",
    "content": "(* Copyright (C) 2021- Hongbo Zhang, Authors of ReScript\n * \n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\nval at_bol : Lexing.lexbuf -> bool\n\nval interpret_directive :\n  Lexing.lexbuf ->\n  cont:(Lexing.lexbuf -> Parser.token) ->\n  token_with_comments:(Lexing.lexbuf -> Parser.token) ->\n  Parser.token\n\nval eof_check : Lexing.lexbuf -> unit\n\nval init : unit -> unit\n\nval check_sharp_look_ahead : (unit -> Parser.token) -> Parser.token\n\n(* Methods below are used for cpp, they are not needed by the compiler patches*)\nval remove_directive_built_in_value : string -> unit\n\nval replace_directive_string : string -> string -> unit\n\nval replace_directive_bool : string -> bool -> unit\n\nval define_key_value : string -> string -> bool\n(** @return false means failed to define *)\n\nval list_variables : Format.formatter -> unit\n\nval filter_directive_from_lexbuf :\n  Lexing.lexbuf ->\n  token_with_comments:(Lexing.lexbuf -> Parser.token) ->\n  (int * int) list\n"
  },
  {
    "path": "analysis/vendor/ml/stypes.ml",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*           Damien Doligez, projet Moscova, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 2003 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(* Recording and dumping (partial) type information *)\n\n(*\n  We record all types in a list as they are created.\n  This means we can dump type information even if type inference fails,\n  which is extremely important, since type information is most\n  interesting in case of errors.\n*)\n\nopen Annot\nopen Lexing\nopen Location\nopen Typedtree\n\nlet output_int oc i = output_string oc (string_of_int i)\n\ntype annotation =\n  | Ti_pat of pattern\n  | Ti_expr of expression\n  | Ti_class of unit\n  | Ti_mod of module_expr\n  | An_call of Location.t * Annot.call\n  | An_ident of Location.t * string * Annot.ident\n\nlet get_location ti =\n  match ti with\n  | Ti_pat p -> p.pat_loc\n  | Ti_expr e -> e.exp_loc\n  | Ti_class () -> assert false\n  | Ti_mod m -> m.mod_loc\n  | An_call (l, _k) -> l\n  | An_ident (l, _s, _k) -> l\n\nlet annotations = ref ([] : annotation list)\nlet phrases = ref ([] : Location.t list)\n\nlet record ti =\n  if !Clflags.annotations && not (get_location ti).Location.loc_ghost then\n    annotations := ti :: !annotations\n\nlet record_phrase loc = if !Clflags.annotations then phrases := loc :: !phrases\n\n(* comparison order:\n   the intervals are sorted by order of increasing upper bound\n   same upper bound -> sorted by decreasing lower bound\n*)\nlet cmp_loc_inner_first loc1 loc2 =\n  match compare loc1.loc_end.pos_cnum loc2.loc_end.pos_cnum with\n  | 0 -> compare loc2.loc_start.pos_cnum loc1.loc_start.pos_cnum\n  | x -> x\nlet cmp_ti_inner_first ti1 ti2 =\n  cmp_loc_inner_first (get_location ti1) (get_location ti2)\n\nlet print_position pp pos =\n  if pos = dummy_pos then output_string pp \"--\"\n  else (\n    output_char pp '\\\"';\n    output_string pp (String.escaped pos.pos_fname);\n    output_string pp \"\\\" \";\n    output_int pp pos.pos_lnum;\n    output_char pp ' ';\n    output_int pp pos.pos_bol;\n    output_char pp ' ';\n    output_int pp pos.pos_cnum)\n\nlet print_location pp loc =\n  print_position pp loc.loc_start;\n  output_char pp ' ';\n  print_position pp loc.loc_end\n\nlet sort_filter_phrases () =\n  let ph = List.sort (fun x y -> cmp_loc_inner_first y x) !phrases in\n  let rec loop accu cur l =\n    match l with\n    | [] -> accu\n    | loc :: t ->\n      if\n        cur.loc_start.pos_cnum <= loc.loc_start.pos_cnum\n        && cur.loc_end.pos_cnum >= loc.loc_end.pos_cnum\n      then loop accu cur t\n      else loop (loc :: accu) loc t\n  in\n  phrases := loop [] Location.none ph\n\nlet rec printtyp_reset_maybe loc =\n  match !phrases with\n  | cur :: t when cur.loc_start.pos_cnum <= loc.loc_start.pos_cnum ->\n    Printtyp.reset ();\n    phrases := t;\n    printtyp_reset_maybe loc\n  | _ -> ()\n\nlet call_kind_string k =\n  match k with\n  | Tail -> \"tail\"\n  | Stack -> \"stack\"\n  | Inline -> \"inline\"\n\nlet print_ident_annot pp str k =\n  match k with\n  | Idef l ->\n    output_string pp \"def \";\n    output_string pp str;\n    output_char pp ' ';\n    print_location pp l;\n    output_char pp '\\n'\n  | Iref_internal l ->\n    output_string pp \"int_ref \";\n    output_string pp str;\n    output_char pp ' ';\n    print_location pp l;\n    output_char pp '\\n'\n  | Iref_external ->\n    output_string pp \"ext_ref \";\n    output_string pp str;\n    output_char pp '\\n'\n\n(* The format of the annotation file is documented in emacs/caml-types.el. *)\n\nlet print_info pp prev_loc ti =\n  match ti with\n  | Ti_class _ | Ti_mod _ -> prev_loc\n  | Ti_pat {pat_loc = loc; pat_type = typ; pat_env = env}\n  | Ti_expr {exp_loc = loc; exp_type = typ; exp_env = env} ->\n    if loc <> prev_loc then (\n      print_location pp loc;\n      output_char pp '\\n');\n    output_string pp \"type(\\n\";\n    printtyp_reset_maybe loc;\n    Printtyp.mark_loops typ;\n    Format.pp_print_string Format.str_formatter \"  \";\n    Printtyp.wrap_printing_env env (fun () ->\n        Printtyp.type_sch Format.str_formatter typ);\n    Format.pp_print_newline Format.str_formatter ();\n    let s = Format.flush_str_formatter () in\n    output_string pp s;\n    output_string pp \")\\n\";\n    loc\n  | An_call (loc, k) ->\n    if loc <> prev_loc then (\n      print_location pp loc;\n      output_char pp '\\n');\n    output_string pp \"call(\\n  \";\n    output_string pp (call_kind_string k);\n    output_string pp \"\\n)\\n\";\n    loc\n  | An_ident (loc, str, k) ->\n    if loc <> prev_loc then (\n      print_location pp loc;\n      output_char pp '\\n');\n    output_string pp \"ident(\\n  \";\n    print_ident_annot pp str k;\n    output_string pp \")\\n\";\n    loc\n\nlet get_info () =\n  let info = List.fast_sort cmp_ti_inner_first !annotations in\n  annotations := [];\n  info\n\nlet dump filename =\n  if !Clflags.annotations then (\n    let do_dump _temp_filename pp =\n      let info = get_info () in\n      sort_filter_phrases ();\n      ignore (List.fold_left (print_info pp) Location.none info)\n    in\n    (match filename with\n    | None -> do_dump \"\" stdout\n    | Some filename ->\n      Misc.output_to_file_via_temporary ~mode:[Open_text] filename do_dump);\n    phrases := [])\n  else annotations := []\n"
  },
  {
    "path": "analysis/vendor/ml/stypes.mli",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*           Damien Doligez, projet Moscova, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 2003 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(* Recording and dumping (partial) type information *)\n\n(* Clflags.save_types must be true *)\n\nopen Typedtree\n\ntype annotation =\n  | Ti_pat of pattern\n  | Ti_expr of expression\n  | Ti_class of unit\n  | Ti_mod of module_expr\n  | An_call of Location.t * Annot.call\n  | An_ident of Location.t * string * Annot.ident\n\nval record : annotation -> unit\nval record_phrase : Location.t -> unit\nval dump : string option -> unit\n\nval get_location : annotation -> Location.t\nval get_info : unit -> annotation list\n"
  },
  {
    "path": "analysis/vendor/ml/subst.ml",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(* Substitutions *)\n\nopen Misc\nopen Path\nopen Types\nopen Btype\n\ntype type_replacement =\n  | Path of Path.t\n  | Type_function of {params: type_expr list; body: type_expr}\n\nmodule PathMap = Map.Make (Path)\n\ntype t = {\n  types: type_replacement PathMap.t;\n  modules: Path.t PathMap.t;\n  modtypes: (Ident.t, module_type) Tbl.t;\n  for_saving: bool;\n}\n\nlet identity =\n  {\n    types = PathMap.empty;\n    modules = PathMap.empty;\n    modtypes = Tbl.empty;\n    for_saving = false;\n  }\n\nlet add_type_path id p s = {s with types = PathMap.add id (Path p) s.types}\nlet add_type id p s = add_type_path (Pident id) p s\n\nlet add_type_function id ~params ~body s =\n  {s with types = PathMap.add id (Type_function {params; body}) s.types}\n\nlet add_module_path id p s = {s with modules = PathMap.add id p s.modules}\nlet add_module id p s = add_module_path (Pident id) p s\n\nlet add_modtype id ty s = {s with modtypes = Tbl.add id ty s.modtypes}\n\nlet for_saving s = {s with for_saving = true}\n\nlet loc s x =\n  if s.for_saving && not !Clflags.keep_locs then Location.none else x\n\nlet remove_loc =\n  let open Ast_mapper in\n  {default_mapper with location = (fun _this _loc -> Location.none)}\n\nlet is_not_doc = function\n  | {Location.txt = \"ocaml.doc\"}, _ -> false\n  | {Location.txt = \"ocaml.text\"}, _ -> false\n  | {Location.txt = \"doc\"}, _ -> false\n  | {Location.txt = \"text\"}, _ -> false\n  | _ -> true\n\nlet attrs s x =\n  let x =\n    if s.for_saving && not !Clflags.keep_docs then Ext_list.filter x is_not_doc\n    else x\n  in\n  if s.for_saving && not !Clflags.keep_locs then\n    remove_loc.Ast_mapper.attributes remove_loc x\n  else x\n\nlet rec module_path s path =\n  try PathMap.find path s.modules\n  with Not_found -> (\n    match path with\n    | Pident _ -> path\n    | Pdot (p, n, pos) -> Pdot (module_path s p, n, pos)\n    | Papply (p1, p2) -> Papply (module_path s p1, module_path s p2))\n\nlet modtype_path s = function\n  | Pident id as p -> (\n    try\n      match Tbl.find id s.modtypes with\n      | Mty_ident p -> p\n      | _ -> fatal_error \"Subst.modtype_path\"\n    with Not_found -> p)\n  | Pdot (p, n, pos) -> Pdot (module_path s p, n, pos)\n  | Papply _ -> fatal_error \"Subst.modtype_path\"\n\nlet type_path s path =\n  match PathMap.find path s.types with\n  | Path p -> p\n  | Type_function _ -> assert false\n  | exception Not_found -> (\n    match path with\n    | Pident _ -> path\n    | Pdot (p, n, pos) -> Pdot (module_path s p, n, pos)\n    | Papply _ -> fatal_error \"Subst.type_path\")\n\nlet type_path s p =\n  match Path.constructor_typath p with\n  | Regular p -> type_path s p\n  | Cstr (ty_path, cstr) -> Pdot (type_path s ty_path, cstr, nopos)\n  | LocalExt _ -> type_path s p\n  | Ext (p, cstr) -> Pdot (module_path s p, cstr, nopos)\n\nlet to_subst_by_type_function s p =\n  match PathMap.find p s.types with\n  | Path _ -> false\n  | Type_function _ -> true\n  | exception Not_found -> false\n\n(* Special type ids for saved signatures *)\n\nlet new_id = ref (-1)\nlet reset_for_saving () = new_id := -1\n\nlet newpersty desc =\n  decr new_id;\n  {desc; level = generic_level; id = !new_id}\n\n(* ensure that all occurrences of 'Tvar None' are physically shared *)\nlet tvar_none = Tvar None\nlet tunivar_none = Tunivar None\nlet norm = function\n  | Tvar None -> tvar_none\n  | Tunivar None -> tunivar_none\n  | d -> d\n\nlet ctype_apply_env_empty = ref (fun _ -> assert false)\n\n(* Similar to [Ctype.nondep_type_rec]. *)\nlet rec typexp s ty =\n  let ty = repr ty in\n  match ty.desc with\n  | (Tvar _ | Tunivar _) as desc ->\n    if s.for_saving || ty.id < 0 then (\n      let ty' =\n        if s.for_saving then newpersty (norm desc) else newty2 ty.level desc\n      in\n      save_desc ty desc;\n      ty.desc <- Tsubst ty';\n      ty')\n    else ty\n  | Tsubst ty -> ty\n  | Tfield (m, k, _t1, _t2)\n    when (not s.for_saving) && m = dummy_method\n         && field_kind_repr k <> Fabsent\n         && (repr ty).level < generic_level ->\n    (* do not copy the type of self when it is not generalized *)\n    ty\n  (* cannot do it, since it would omit substitution\n     | Tvariant row when not (static_row row) ->\n         ty\n  *)\n  | _ ->\n    let desc = ty.desc in\n    save_desc ty desc;\n    let tm = row_of_type ty in\n    let has_fixed_row =\n      (not (is_Tconstr ty)) && is_constr_row ~allow_ident:false tm\n    in\n    (* Make a stub *)\n    let ty' = if s.for_saving then newpersty (Tvar None) else newgenvar () in\n    ty.desc <- Tsubst ty';\n    ty'.desc <-\n      (if has_fixed_row then\n         match tm.desc with\n         (* PR#7348 *)\n         | Tconstr (Pdot (m, i, pos), tl, _abbrev) ->\n           let i' = String.sub i 0 (String.length i - 4) in\n           Tconstr (type_path s (Pdot (m, i', pos)), tl, ref Mnil)\n         | _ -> assert false\n       else\n         match desc with\n         | Tconstr (p, args, _abbrev) -> (\n           let args = List.map (typexp s) args in\n           match PathMap.find p s.types with\n           | exception Not_found -> Tconstr (type_path s p, args, ref Mnil)\n           | Path _ -> Tconstr (type_path s p, args, ref Mnil)\n           | Type_function {params; body} ->\n             (!ctype_apply_env_empty params body args).desc)\n         | Tpackage (p, n, tl) ->\n           Tpackage (modtype_path s p, n, List.map (typexp s) tl)\n         | Tobject (t1, name) ->\n           Tobject\n             ( typexp s t1,\n               ref\n                 (match !name with\n                 | None -> None\n                 | Some (p, tl) ->\n                   if to_subst_by_type_function s p then None\n                   else Some (type_path s p, List.map (typexp s) tl)) )\n         | Tvariant row -> (\n           let row = row_repr row in\n           let more = repr row.row_more in\n           (* We must substitute in a subtle way *)\n           (* Tsubst takes a tuple containing the row var and the variant *)\n           match more.desc with\n           | Tsubst {desc = Ttuple [_; ty2]} ->\n             (* This variant type has been already copied *)\n             ty.desc <- Tsubst ty2;\n             (* avoid Tlink in the new type *)\n             Tlink ty2\n           | _ -> (\n             let dup =\n               s.for_saving || more.level = generic_level || static_row row\n               ||\n               match more.desc with\n               | Tconstr _ -> true\n               | _ -> false\n             in\n             (* Various cases for the row variable *)\n             let more' =\n               match more.desc with\n               | Tsubst ty -> ty\n               | Tconstr _ | Tnil -> typexp s more\n               | Tunivar _ | Tvar _ ->\n                 save_desc more more.desc;\n                 if s.for_saving then newpersty (norm more.desc)\n                 else if dup && is_Tvar more then newgenty more.desc\n                 else more\n               | _ -> assert false\n             in\n             (* Register new type first for recursion *)\n             more.desc <- Tsubst (newgenty (Ttuple [more'; ty']));\n             (* Return a new copy *)\n             let row = copy_row (typexp s) true row (not dup) more' in\n             match row.row_name with\n             | Some (p, tl) ->\n               Tvariant\n                 {\n                   row with\n                   row_name =\n                     (if to_subst_by_type_function s p then None\n                      else Some (type_path s p, tl));\n                 }\n             | None -> Tvariant row))\n         | Tfield (_label, kind, _t1, t2) when field_kind_repr kind = Fabsent ->\n           Tlink (typexp s t2)\n         | _ -> copy_type_desc (typexp s) desc);\n    ty'\n\n(*\n   Always make a copy of the type. If this is not done, type levels\n   might not be correct.\n*)\nlet type_expr s ty =\n  let ty' = typexp s ty in\n  cleanup_types ();\n  ty'\n\nlet label_declaration s l =\n  {\n    ld_id = l.ld_id;\n    ld_mutable = l.ld_mutable;\n    ld_type = typexp s l.ld_type;\n    ld_loc = loc s l.ld_loc;\n    ld_attributes = attrs s l.ld_attributes;\n  }\n\nlet constructor_arguments s = function\n  | Cstr_tuple l -> Cstr_tuple (List.map (typexp s) l)\n  | Cstr_record l -> Cstr_record (List.map (label_declaration s) l)\n\nlet constructor_declaration s c =\n  {\n    cd_id = c.cd_id;\n    cd_args = constructor_arguments s c.cd_args;\n    cd_res = may_map (typexp s) c.cd_res;\n    cd_loc = loc s c.cd_loc;\n    cd_attributes = attrs s c.cd_attributes;\n  }\n\nlet type_declaration s decl =\n  let decl =\n    {\n      type_params = List.map (typexp s) decl.type_params;\n      type_arity = decl.type_arity;\n      type_kind =\n        (match decl.type_kind with\n        | Type_abstract -> Type_abstract\n        | Type_variant cstrs ->\n          Type_variant (List.map (constructor_declaration s) cstrs)\n        | Type_record (lbls, rep) ->\n          Type_record (List.map (label_declaration s) lbls, rep)\n        | Type_open -> Type_open);\n      type_manifest =\n        (match decl.type_manifest with\n        | None -> None\n        | Some ty -> Some (typexp s ty));\n      type_private = decl.type_private;\n      type_variance = decl.type_variance;\n      type_newtype_level = None;\n      type_loc = loc s decl.type_loc;\n      type_attributes = attrs s decl.type_attributes;\n      type_immediate = decl.type_immediate;\n      type_unboxed = decl.type_unboxed;\n    }\n  in\n  cleanup_types ();\n  decl\n\nlet class_signature s sign =\n  {\n    csig_self = typexp s sign.csig_self;\n    csig_vars =\n      Vars.map\n        (function\n          | m, v, t -> (m, v, typexp s t))\n        sign.csig_vars;\n    csig_concr = sign.csig_concr;\n    csig_inher =\n      List.map\n        (fun (p, tl) -> (type_path s p, List.map (typexp s) tl))\n        sign.csig_inher;\n  }\n\nlet rec class_type s = function\n  | Cty_constr (p, tyl, cty) ->\n    Cty_constr (type_path s p, List.map (typexp s) tyl, class_type s cty)\n  | Cty_signature sign -> Cty_signature (class_signature s sign)\n  | Cty_arrow (l, ty, cty) -> Cty_arrow (l, typexp s ty, class_type s cty)\n\nlet cltype_declaration s decl =\n  let decl =\n    {\n      clty_params = List.map (typexp s) decl.clty_params;\n      clty_variance = decl.clty_variance;\n      clty_type = class_type s decl.clty_type;\n      clty_path = type_path s decl.clty_path;\n      clty_loc = loc s decl.clty_loc;\n      clty_attributes = attrs s decl.clty_attributes;\n    }\n  in\n  (* Do clean up even if saving: type_declaration may be recursive *)\n  cleanup_types ();\n  decl\n\nlet class_type s cty =\n  let cty = class_type s cty in\n  cleanup_types ();\n  cty\n\nlet value_description s descr =\n  {\n    val_type = type_expr s descr.val_type;\n    val_kind = descr.val_kind;\n    val_loc = loc s descr.val_loc;\n    val_attributes = attrs s descr.val_attributes;\n  }\n\nlet extension_constructor s ext =\n  let ext =\n    {\n      ext_type_path = type_path s ext.ext_type_path;\n      ext_type_params = List.map (typexp s) ext.ext_type_params;\n      ext_args = constructor_arguments s ext.ext_args;\n      ext_ret_type = may_map (typexp s) ext.ext_ret_type;\n      ext_private = ext.ext_private;\n      ext_attributes = attrs s ext.ext_attributes;\n      ext_loc = (if s.for_saving then Location.none else ext.ext_loc);\n    }\n  in\n  cleanup_types ();\n  ext\n\nlet rec rename_bound_idents s idents = function\n  | [] -> (List.rev idents, s)\n  | Sig_type (id, _, _) :: sg ->\n    let id' = Ident.rename id in\n    rename_bound_idents (add_type id (Pident id') s) (id' :: idents) sg\n  | Sig_module (id, _, _) :: sg ->\n    let id' = Ident.rename id in\n    rename_bound_idents (add_module id (Pident id') s) (id' :: idents) sg\n  | Sig_modtype (id, _) :: sg ->\n    let id' = Ident.rename id in\n    rename_bound_idents\n      (add_modtype id (Mty_ident (Pident id')) s)\n      (id' :: idents) sg\n  | Sig_class_type (id, _, _) :: sg ->\n    (* cheat and pretend they are types cf. PR#6650 *)\n    let id' = Ident.rename id in\n    rename_bound_idents (add_type id (Pident id') s) (id' :: idents) sg\n  | (Sig_value (id, _) | Sig_typext (id, _, _)) :: sg ->\n    let id' = Ident.rename id in\n    rename_bound_idents s (id' :: idents) sg\n  | Sig_class _ :: _ -> assert false\nlet rec modtype s = function\n  | Mty_ident p as mty -> (\n    match p with\n    | Pident id -> ( try Tbl.find id s.modtypes with Not_found -> mty)\n    | Pdot (p, n, pos) -> Mty_ident (Pdot (module_path s p, n, pos))\n    | Papply _ -> fatal_error \"Subst.modtype\")\n  | Mty_signature sg -> Mty_signature (signature s sg)\n  | Mty_functor (id, arg, res) ->\n    let id' = Ident.rename id in\n    Mty_functor\n      (id', may_map (modtype s) arg, modtype (add_module id (Pident id') s) res)\n  | Mty_alias (pres, p) -> Mty_alias (pres, module_path s p)\n\nand signature s sg =\n  (* Components of signature may be mutually recursive (e.g. type declarations\n     or class and type declarations), so first build global renaming\n     substitution... *)\n  let new_idents, s' = rename_bound_idents s [] sg in\n  (* ... then apply it to each signature component in turn *)\n  List.map2 (signature_component s') sg new_idents\n\nand signature_component s comp newid =\n  match comp with\n  | Sig_value (_id, d) -> Sig_value (newid, value_description s d)\n  | Sig_type (_id, d, rs) -> Sig_type (newid, type_declaration s d, rs)\n  | Sig_typext (_id, ext, es) ->\n    Sig_typext (newid, extension_constructor s ext, es)\n  | Sig_module (_id, d, rs) -> Sig_module (newid, module_declaration s d, rs)\n  | Sig_modtype (_id, d) -> Sig_modtype (newid, modtype_declaration s d)\n  | Sig_class () -> Sig_class ()\n  | Sig_class_type (_id, d, rs) ->\n    Sig_class_type (newid, cltype_declaration s d, rs)\n\nand module_declaration s decl =\n  {\n    md_type = modtype s decl.md_type;\n    md_attributes = attrs s decl.md_attributes;\n    md_loc = loc s decl.md_loc;\n  }\n\nand modtype_declaration s decl =\n  {\n    mtd_type = may_map (modtype s) decl.mtd_type;\n    mtd_attributes = attrs s decl.mtd_attributes;\n    mtd_loc = loc s decl.mtd_loc;\n  }\n\n(* For every binding k |-> d of m1, add k |-> f d to m2\n   and return resulting merged map. *)\n\nlet merge_tbls f m1 m2 = Tbl.fold (fun k d accu -> Tbl.add k (f d) accu) m1 m2\n\nlet merge_path_maps f m1 m2 =\n  PathMap.fold (fun k d accu -> PathMap.add k (f d) accu) m1 m2\n\nlet type_replacement s = function\n  | Path p -> Path (type_path s p)\n  | Type_function {params; body} ->\n    let params = List.map (typexp s) params in\n    let body = typexp s body in\n    Type_function {params; body}\n\n(* Composition of substitutions:\n     apply (compose s1 s2) x = apply s2 (apply s1 x) *)\n\nlet compose s1 s2 =\n  {\n    types = merge_path_maps (type_replacement s2) s1.types s2.types;\n    modules = merge_path_maps (module_path s2) s1.modules s2.modules;\n    modtypes = merge_tbls (modtype s2) s1.modtypes s2.modtypes;\n    for_saving = s1.for_saving || s2.for_saving;\n  }\n"
  },
  {
    "path": "analysis/vendor/ml/subst.mli",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(* Substitutions *)\n\nopen Types\n\ntype t\n\n(*\n   Substitutions are used to translate a type from one context to\n   another.  This requires substituting paths for identifiers, and\n   possibly also lowering the level of non-generic variables so that\n   they are inferior to the maximum level of the new context.\n\n   Substitutions can also be used to create a \"clean\" copy of a type.\n   Indeed, non-variable node of a type are duplicated, with their\n   levels set to generic level.  That way, the resulting type is\n   well-formed (decreasing levels), even if the original one was not.\n*)\n\nval identity : t\n\nval add_type : Ident.t -> Path.t -> t -> t\nval add_type_path : Path.t -> Path.t -> t -> t\nval add_type_function :\n  Path.t -> params:type_expr list -> body:type_expr -> t -> t\nval add_module : Ident.t -> Path.t -> t -> t\nval add_module_path : Path.t -> Path.t -> t -> t\nval add_modtype : Ident.t -> module_type -> t -> t\nval for_saving : t -> t\nval reset_for_saving : unit -> unit\n\nval module_path : t -> Path.t -> Path.t\nval type_path : t -> Path.t -> Path.t\n\nval type_expr : t -> type_expr -> type_expr\nval class_type : t -> class_type -> class_type\nval value_description : t -> value_description -> value_description\nval type_declaration : t -> type_declaration -> type_declaration\nval extension_constructor : t -> extension_constructor -> extension_constructor\n\nval cltype_declaration : t -> class_type_declaration -> class_type_declaration\nval modtype : t -> module_type -> module_type\nval signature : t -> signature -> signature\nval modtype_declaration : t -> modtype_declaration -> modtype_declaration\nval module_declaration : t -> module_declaration -> module_declaration\nval typexp : t -> Types.type_expr -> Types.type_expr\nval class_signature : t -> class_signature -> class_signature\n\n(* Composition of substitutions:\n     apply (compose s1 s2) x = apply s2 (apply s1 x) *)\nval compose : t -> t -> t\n\n(* A forward reference to be filled in ctype.ml. *)\nval ctype_apply_env_empty :\n  (type_expr list -> type_expr -> type_expr list -> type_expr) ref\n"
  },
  {
    "path": "analysis/vendor/ml/switch.ml",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Luc Maranget, projet Moscova, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 2000 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\ntype 'a shared = Shared of 'a | Single of 'a\n\ntype 'a t_store = {\n  act_get: unit -> 'a array;\n  act_get_shared: unit -> 'a shared array;\n  act_store: 'a -> int;\n  act_store_shared: 'a -> int;\n}\n\nexception Not_simple\n\nmodule type Stored = sig\n  type t\n  type key\n  val compare_key : key -> key -> int\n  val make_key : t -> key option\nend\n\nmodule Store (A : Stored) = struct\n  module AMap = Map.Make (struct\n    type t = A.key\n    let compare = A.compare_key\n  end)\n\n  type intern = {\n    mutable map: (bool * int) AMap.t;\n    mutable next: int;\n    mutable acts: (bool * A.t) list;\n  }\n\n  let mk_store () =\n    let st = {map = AMap.empty; next = 0; acts = []} in\n\n    let add mustshare act =\n      let i = st.next in\n      st.acts <- (mustshare, act) :: st.acts;\n      st.next <- i + 1;\n      i\n    in\n\n    let store mustshare act =\n      match A.make_key act with\n      | Some key -> (\n        try\n          let shared, i = AMap.find key st.map in\n          if not shared then st.map <- AMap.add key (true, i) st.map;\n          i\n        with Not_found ->\n          let i = add mustshare act in\n          st.map <- AMap.add key (mustshare, i) st.map;\n          i)\n      | None -> add mustshare act\n    and get () = Array.of_list (List.rev_map (fun (_, act) -> act) st.acts)\n    and get_shared () =\n      let acts =\n        Array.of_list\n          (List.rev_map\n             (fun (shared, act) -> if shared then Shared act else Single act)\n             st.acts)\n      in\n      AMap.iter\n        (fun _ (shared, i) ->\n          if shared then\n            match acts.(i) with\n            | Single act -> acts.(i) <- Shared act\n            | Shared _ -> ())\n        st.map;\n      acts\n    in\n    {\n      act_store = store false;\n      act_store_shared = store true;\n      act_get = get;\n      act_get_shared = get_shared;\n    }\nend\n\nmodule type S = sig\n  type primitive\n  val eqint : primitive\n  val neint : primitive\n  val leint : primitive\n  val ltint : primitive\n  val geint : primitive\n  val gtint : primitive\n  type act\n\n  val bind : act -> (act -> act) -> act\n  val make_const : int -> act\n  val make_offset : act -> int -> act\n  val make_prim : primitive -> act list -> act\n  val make_isout : act -> act -> act\n  val make_isin : act -> act -> act\n  val make_if : act -> act -> act -> act\n  val make_switch :\n    Location.t ->\n    act ->\n    int array ->\n    act array ->\n    offset:int ->\n    Ast_untagged_variants.switch_names option ->\n    act\n  val make_catch : act -> int * (act -> act)\n  val make_exit : int -> act\nend\n\n(* The module will ``produce good code for the case statement'' *)\n(*\n  Adaptation of\n   R.L. Berstein\n   ``Producing good code for the case statement''\n   Sofware Practice and Experience, 15(10) (1985)\n and\n   D.L. Spuler\n    ``Two-Way Comparison Search Trees, a Generalisation of Binary Search Trees\n      and Split Trees''\n    ``Compiler Code Generation for Multiway Branch Statement as\n      a Static Search Problem''\n   Technical Reports, James Cook University\n*)\n(*\n  Main adaptation is considering interval tests\n (implemented as one addition + one unsigned test and branch)\n  which leads to exhaustive search for finding the optimal\n  test sequence in small cases and heuristics otherwise.\n*)\nmodule Make (Arg : S) = struct\n  type 'a inter = {cases: (int * int * int) array; actions: 'a array}\n\n  type 'a t_ctx = {off: int; arg: 'a}\n\n  let cut = ref 8\n\n  and more_cut = ref 16\n\n  (*\nlet pint chan i =\n  if i = min_int then Printf.fprintf chan \"-oo\"\n  else if i=max_int then Printf.fprintf chan \"oo\"\n  else Printf.fprintf chan \"%d\" i\n\nlet pcases chan cases =\n  for i =0 to Array.length cases-1 do\n    let l,h,act = cases.(i) in\n    if l=h then\n      Printf.fprintf chan \"%d:%d \" l act\n    else\n      Printf.fprintf chan \"%a..%a:%d \" pint l pint h act\n  done\n\nlet prerr_inter i = Printf.fprintf stderr\n        \"cases=%a\" pcases i.cases\n*)\n\n  let get_act cases i =\n    let _, _, r = cases.(i) in\n    r\n\n  and get_low cases i =\n    let r, _, _ = cases.(i) in\n    r\n\n  type ctests = {mutable n: int; mutable ni: int}\n\n  let too_much = {n = max_int; ni = max_int}\n\n  (*\nlet ptests chan {n=n ; ni=ni} =\n  Printf.fprintf chan \"{n=%d ; ni=%d}\" n ni\n\nlet pta chan t =\n  for i =0 to Array.length t-1 do\n    Printf.fprintf chan \"%d: %a\\n\" i ptests t.(i)\n  done\n*)\n\n  let less_tests c1 c2 =\n    if c1.n < c2.n then true\n    else if c1.n = c2.n then if c1.ni < c2.ni then true else false\n    else false\n\n  and eq_tests c1 c2 = c1.n = c2.n && c1.ni = c2.ni\n\n  let less2tests (c1, d1) (c2, d2) =\n    if eq_tests c1 c2 then less_tests d1 d2 else less_tests c1 c2\n\n  let add_test t1 t2 =\n    t1.n <- t1.n + t2.n;\n    t1.ni <- t1.ni + t2.ni\n\n  type t_ret = Inter of int * int | Sep of int | No\n\n  (*\nlet pret chan = function\n  | Inter (i,j)-> Printf.fprintf chan \"Inter %d %d\" i j\n  | Sep i -> Printf.fprintf chan \"Sep %d\" i\n  | No -> Printf.fprintf chan \"No\"\n*)\n\n  let coupe cases i =\n    let l, _, _ = cases.(i) in\n    (l, Array.sub cases 0 i, Array.sub cases i (Array.length cases - i))\n\n  let case_append c1 c2 =\n    let len1 = Array.length c1 and len2 = Array.length c2 in\n    match (len1, len2) with\n    | 0, _ -> c2\n    | _, 0 -> c1\n    | _, _ ->\n      let l1, h1, act1 = c1.(Array.length c1 - 1) and l2, h2, act2 = c2.(0) in\n      if act1 = act2 then (\n        let r = Array.make (len1 + len2 - 1) c1.(0) in\n        for i = 0 to len1 - 2 do\n          r.(i) <- c1.(i)\n        done;\n\n        let l =\n          if len1 - 2 >= 0 then\n            let _, h, _ = r.(len1 - 2) in\n            if h + 1 < l1 then h + 1 else l1\n          else l1\n        and h =\n          if 1 < len2 - 1 then\n            let l, _, _ = c2.(1) in\n            if h2 + 1 < l then l - 1 else h2\n          else h2\n        in\n        r.(len1 - 1) <- (l, h, act1);\n        for i = 1 to len2 - 1 do\n          r.(len1 - 1 + i) <- c2.(i)\n        done;\n        r)\n      else if h1 > l1 then (\n        let r = Array.make (len1 + len2) c1.(0) in\n        for i = 0 to len1 - 2 do\n          r.(i) <- c1.(i)\n        done;\n        r.(len1 - 1) <- (l1, l2 - 1, act1);\n        for i = 0 to len2 - 1 do\n          r.(len1 + i) <- c2.(i)\n        done;\n        r)\n      else if h2 > l2 then (\n        let r = Array.make (len1 + len2) c1.(0) in\n        for i = 0 to len1 - 1 do\n          r.(i) <- c1.(i)\n        done;\n        r.(len1) <- (h1 + 1, h2, act2);\n        for i = 1 to len2 - 1 do\n          r.(len1 + i) <- c2.(i)\n        done;\n        r)\n      else Array.append c1 c2\n\n  let coupe_inter i j cases =\n    let lcases = Array.length cases in\n    let low, _, _ = cases.(i) and _, high, _ = cases.(j) in\n    ( low,\n      high,\n      Array.sub cases i (j - i + 1),\n      case_append (Array.sub cases 0 i)\n        (Array.sub cases (j + 1) (lcases - (j + 1))) )\n\n  type kind = Kvalue of int | Kinter of int | Kempty\n\n  (*\nlet pkind chan = function\n  | Kvalue i ->Printf.fprintf chan \"V%d\" i\n  | Kinter i -> Printf.fprintf chan \"I%d\" i\n  | Kempty -> Printf.fprintf chan \"E\"\n\nlet rec pkey chan  = function\n  | [] -> ()\n  | [k] -> pkind chan k\n  | k::rem ->\n      Printf.fprintf chan \"%a %a\" pkey rem pkind k\n*)\n\n  let t = Hashtbl.create 17\n\n  let make_key cases =\n    let seen = ref [] and count = ref 0 in\n    let rec got_it act = function\n      | [] ->\n        seen := (act, !count) :: !seen;\n        let r = !count in\n        incr count;\n        r\n      | (act0, index) :: rem -> if act0 = act then index else got_it act rem\n    in\n\n    let make_one (l : int) h act =\n      if l = h then Kvalue (got_it act !seen) else Kinter (got_it act !seen)\n    in\n\n    let rec make_rec i pl =\n      if i < 0 then []\n      else\n        let l, h, act = cases.(i) in\n        if pl = h + 1 then make_one l h act :: make_rec (i - 1) l\n        else Kempty :: make_one l h act :: make_rec (i - 1) l\n    in\n\n    let l, h, act = cases.(Array.length cases - 1) in\n    make_one l h act :: make_rec (Array.length cases - 2) l\n\n  let same_act t =\n    let len = Array.length t in\n    let a = get_act t (len - 1) in\n    let rec do_rec i =\n      if i < 0 then true\n      else\n        let b = get_act t i in\n        b = a && do_rec (i - 1)\n    in\n    do_rec (len - 2)\n\n  (*\n  Interval test x in [l,h] works by checking x-l in [0,h-l]\n   * This may be false for arithmetic modulo 2^31\n   * Subtracting l may change the relative ordering of values\n     and invalid the invariant that matched values are given in\n     increasing order\n\n   To avoid this, interval check is allowed only when the\n   integers indeed present in the whole case interval are\n   in [-2^16 ; 2^16]\n\n   This condition is checked by zyva\n   *)\n\n  let inter_limit = 1 lsl 16\n\n  let ok_inter = ref false\n\n  let rec opt_count top cases =\n    let key = make_key cases in\n    try Hashtbl.find t key\n    with Not_found ->\n      let r =\n        let lcases = Array.length cases in\n        match lcases with\n        | 0 -> assert false\n        | _ when same_act cases -> (No, ({n = 0; ni = 0}, {n = 0; ni = 0}))\n        | _ ->\n          if lcases < !cut then enum top cases\n          else if lcases < !more_cut then heuristic cases\n          else divide cases\n      in\n      Hashtbl.add t key r;\n      r\n\n  and divide cases =\n    let lcases = Array.length cases in\n    let m = lcases / 2 in\n    let _, left, right = coupe cases m in\n    let ci = {n = 1; ni = 0}\n    and cm = {n = 1; ni = 0}\n    and _, (cml, cleft) = opt_count false left\n    and _, (cmr, cright) = opt_count false right in\n    add_test ci cleft;\n    add_test ci cright;\n    if less_tests cml cmr then add_test cm cmr else add_test cm cml;\n    (Sep m, (cm, ci))\n\n  and heuristic cases =\n    let lcases = Array.length cases in\n\n    let sep, csep = divide cases\n    and inter, cinter =\n      if !ok_inter then\n        let _, _, act0 = cases.(0) and _, _, act1 = cases.(lcases - 1) in\n        if act0 = act1 then (\n          let low, high, inside, outside = coupe_inter 1 (lcases - 2) cases in\n          let _, (cmi, cinside) = opt_count false inside\n          and _, (cmo, coutside) = opt_count false outside\n          and cmij = {n = 1; ni = (if low = high then 0 else 1)}\n          and cij = {n = 1; ni = (if low = high then 0 else 1)} in\n          add_test cij cinside;\n          add_test cij coutside;\n          if less_tests cmi cmo then add_test cmij cmo else add_test cmij cmi;\n          (Inter (1, lcases - 2), (cmij, cij)))\n        else (Inter (-1, -1), (too_much, too_much))\n      else (Inter (-1, -1), (too_much, too_much))\n    in\n    if less2tests csep cinter then (sep, csep) else (inter, cinter)\n\n  and enum top cases =\n    let lcases = Array.length cases in\n    let lim, with_sep =\n      let best = ref (-1) and best_cost = ref (too_much, too_much) in\n\n      for i = 1 to lcases - 1 do\n        let _, left, right = coupe cases i in\n        let ci = {n = 1; ni = 0}\n        and cm = {n = 1; ni = 0}\n        and _, (cml, cleft) = opt_count false left\n        and _, (cmr, cright) = opt_count false right in\n        add_test ci cleft;\n        add_test ci cright;\n        if less_tests cml cmr then add_test cm cmr else add_test cm cml;\n\n        if less2tests (cm, ci) !best_cost then (\n          if top then Printf.fprintf stderr \"Get it: %d\\n\" i;\n          best := i;\n          best_cost := (cm, ci))\n      done;\n      (!best, !best_cost)\n    in\n\n    let ilow, ihigh, with_inter =\n      if not !ok_inter then (\n        let rlow = ref (-1)\n        and rhigh = ref (-1)\n        and best_cost = ref (too_much, too_much) in\n        for i = 1 to lcases - 2 do\n          let low, high, inside, outside = coupe_inter i i cases in\n          if low = high then (\n            let _, (cmi, cinside) = opt_count false inside\n            and _, (cmo, coutside) = opt_count false outside\n            and cmij = {n = 1; ni = 0}\n            and cij = {n = 1; ni = 0} in\n            add_test cij cinside;\n            add_test cij coutside;\n            if less_tests cmi cmo then add_test cmij cmo else add_test cmij cmi;\n            if less2tests (cmij, cij) !best_cost then (\n              rlow := i;\n              rhigh := i;\n              best_cost := (cmij, cij)))\n        done;\n        (!rlow, !rhigh, !best_cost))\n      else\n        let rlow = ref (-1)\n        and rhigh = ref (-1)\n        and best_cost = ref (too_much, too_much) in\n        for i = 1 to lcases - 2 do\n          for j = i to lcases - 2 do\n            let low, high, inside, outside = coupe_inter i j cases in\n            let _, (cmi, cinside) = opt_count false inside\n            and _, (cmo, coutside) = opt_count false outside\n            and cmij = {n = 1; ni = (if low = high then 0 else 1)}\n            and cij = {n = 1; ni = (if low = high then 0 else 1)} in\n            add_test cij cinside;\n            add_test cij coutside;\n            if less_tests cmi cmo then add_test cmij cmo else add_test cmij cmi;\n            if less2tests (cmij, cij) !best_cost then (\n              rlow := i;\n              rhigh := j;\n              best_cost := (cmij, cij))\n          done\n        done;\n        (!rlow, !rhigh, !best_cost)\n    in\n    let r = ref (Inter (ilow, ihigh)) and rc = ref with_inter in\n    if less2tests with_sep !rc then (\n      r := Sep lim;\n      rc := with_sep);\n    (!r, !rc)\n\n  let make_if_test test arg i ifso ifnot =\n    Arg.make_if (Arg.make_prim test [arg; Arg.make_const i]) ifso ifnot\n\n  let make_if_lt arg i ifso ifnot =\n    match i with\n    | 1 -> make_if_test Arg.leint arg 0 ifso ifnot\n    | _ -> make_if_test Arg.ltint arg i ifso ifnot\n\n  and make_if_ge arg i ifso ifnot =\n    match i with\n    | 1 -> make_if_test Arg.gtint arg 0 ifso ifnot\n    | _ -> make_if_test Arg.geint arg i ifso ifnot\n\n  and make_if_eq arg i ifso ifnot = make_if_test Arg.eqint arg i ifso ifnot\n\n  and make_if_ne arg i ifso ifnot = make_if_test Arg.neint arg i ifso ifnot\n\n  let do_make_if_out h arg ifso ifno =\n    Arg.make_if (Arg.make_isout h arg) ifso ifno\n\n  let make_if_out ctx l d mk_ifso mk_ifno =\n    match l with\n    | 0 -> do_make_if_out (Arg.make_const d) ctx.arg (mk_ifso ctx) (mk_ifno ctx)\n    | _ ->\n      if (*true || *) !Config.bs_only then\n        do_make_if_out (Arg.make_const d)\n          (Arg.make_offset ctx.arg (-l))\n          (mk_ifso ctx) (mk_ifno ctx)\n      else\n        Arg.bind (Arg.make_offset ctx.arg (-l)) (fun arg ->\n            let ctx = {off = -l + ctx.off; arg} in\n            do_make_if_out (Arg.make_const d) arg (mk_ifso ctx) (mk_ifno ctx))\n\n  let do_make_if_in h arg ifso ifno =\n    Arg.make_if (Arg.make_isin h arg) ifso ifno\n\n  let make_if_in ctx l d mk_ifso mk_ifno =\n    match l with\n    | 0 -> do_make_if_in (Arg.make_const d) ctx.arg (mk_ifso ctx) (mk_ifno ctx)\n    | _ ->\n      if (*true || *) !Config.bs_only then\n        do_make_if_in (Arg.make_const d)\n          (Arg.make_offset ctx.arg (-l))\n          (mk_ifso ctx) (mk_ifno ctx)\n      else\n        Arg.bind (Arg.make_offset ctx.arg (-l)) (fun arg ->\n            let ctx = {off = -l + ctx.off; arg} in\n            do_make_if_in (Arg.make_const d) arg (mk_ifso ctx) (mk_ifno ctx))\n\n  let rec c_test ctx ({cases; actions} as s) =\n    let lcases = Array.length cases in\n    assert (lcases > 0);\n    if lcases = 1 then actions.(get_act cases 0) ctx\n    else\n      let w, _c = opt_count false cases in\n      (*\n  Printf.fprintf stderr\n  \"off=%d tactic=%a for %a\\n\"\n  ctx.off pret w pcases cases ;\n  *)\n      match w with\n      | No -> actions.(get_act cases 0) ctx\n      | Inter (i, j) ->\n        let low, high, inside, outside = coupe_inter i j cases in\n        let _, (cinside, _) = opt_count false inside\n        and _, (coutside, _) = opt_count false outside in\n        (* Costs are retrieved to put the code with more remaining tests\n           in the privileged (positive) branch of ``if'' *)\n        if low = high then\n          if less_tests coutside cinside then\n            make_if_eq ctx.arg (low + ctx.off)\n              (c_test ctx {s with cases = inside})\n              (c_test ctx {s with cases = outside})\n          else\n            make_if_ne ctx.arg (low + ctx.off)\n              (c_test ctx {s with cases = outside})\n              (c_test ctx {s with cases = inside})\n        else if less_tests coutside cinside then\n          make_if_in ctx (low + ctx.off) (high - low)\n            (fun ctx -> c_test ctx {s with cases = inside})\n            (fun ctx -> c_test ctx {s with cases = outside})\n        else\n          make_if_out ctx (low + ctx.off) (high - low)\n            (fun ctx -> c_test ctx {s with cases = outside})\n            (fun ctx -> c_test ctx {s with cases = inside})\n      | Sep i ->\n        let lim, left, right = coupe cases i in\n        let _, (cleft, _) = opt_count false left\n        and _, (cright, _) = opt_count false right in\n        let left = {s with cases = left} and right = {s with cases = right} in\n\n        if i = 1 && lim + ctx.off = 1 && get_low cases 0 + ctx.off = 0 then\n          make_if_ne ctx.arg 0 (c_test ctx right) (c_test ctx left)\n        else if less_tests cright cleft then\n          make_if_lt ctx.arg (lim + ctx.off) (c_test ctx left)\n            (c_test ctx right)\n        else\n          make_if_ge ctx.arg (lim + ctx.off) (c_test ctx right)\n            (c_test ctx left)\n\n  (* Minimal density of switches *)\n  let theta = ref 0.33333\n\n  (* Minimal number of tests to make a switch *)\n  let switch_min = ref 3\n\n  (* Particular case 0, 1, 2 *)\n  let particular_case cases i j =\n    j - i = 2\n    &&\n    let l1, _h1, act1 = cases.(i)\n    and l2, _h2, _act2 = cases.(i + 1)\n    and l3, h3, act3 = cases.(i + 2) in\n    l1 + 1 = l2 && l2 + 1 = l3 && l3 = h3 && act1 <> act3\n\n  let approx_count cases i j =\n    let l = j - i + 1 in\n    if l < !cut then\n      let _, (_, {n = ntests}) = opt_count false (Array.sub cases i l) in\n      ntests\n    else l - 1\n\n  (* Sends back a boolean that says whether is switch is worth or not *)\n\n  let dense {cases} i j =\n    if i = j then true\n    else\n      let l, _, _ = cases.(i) and _, h, _ = cases.(j) in\n      let ntests = approx_count cases i j in\n      (*\n  (ntests+1) >= theta * (h-l+1)\n*)\n      particular_case cases i j\n      || ntests >= !switch_min\n         && float_of_int ntests +. 1.0\n            >= !theta *. (float_of_int h -. float_of_int l +. 1.0)\n\n  (* Compute clusters by dynamic programming\n     Adaptation of the correction to Bernstein\n     ``Correction to `Producing Good Code for the Case Statement' ''\n     S.K. Kannan and T.A. Proebsting\n     Software Practice and Experience Vol. 24(2) 233 (Feb 1994)\n  *)\n\n  let comp_clusters s =\n    let len = Array.length s.cases in\n    let min_clusters = Array.make len max_int and k = Array.make len 0 in\n    let get_min i = if i < 0 then 0 else min_clusters.(i) in\n\n    for i = 0 to len - 1 do\n      for j = 0 to i do\n        if dense s j i && get_min (j - 1) + 1 < min_clusters.(i) then (\n          k.(i) <- j;\n          min_clusters.(i) <- get_min (j - 1) + 1)\n      done\n    done;\n    (min_clusters.(len - 1), k)\n\n  (* Assume j > i *)\n  let make_switch loc {cases; actions} i j sw_names =\n    let ll, _, _ = cases.(i) and _, hh, _ = cases.(j) in\n    let tbl = Array.make (hh - ll + 1) 0\n    and t = Hashtbl.create 17\n    and index = ref 0 in\n    let get_index act =\n      try Hashtbl.find t act\n      with Not_found ->\n        let i = !index in\n        incr index;\n        Hashtbl.add t act i;\n        i\n    in\n\n    for k = i to j do\n      let l, h, act = cases.(k) in\n      let index = get_index act in\n      for kk = l - ll to h - ll do\n        tbl.(kk) <- index\n      done\n    done;\n    let acts = Array.make !index actions.(0) in\n    Hashtbl.iter (fun act i -> acts.(i) <- actions.(act)) t;\n    fun ctx ->\n      if !Config.bs_only then\n        Arg.make_switch ~offset:(ll + ctx.off) loc ctx.arg tbl acts sw_names\n      else\n        match -ll - ctx.off with\n        | 0 -> Arg.make_switch loc ctx.arg tbl acts sw_names ~offset:0\n        | _ ->\n          Arg.bind\n            (Arg.make_offset ctx.arg (-ll - ctx.off))\n            (fun arg -> Arg.make_switch loc arg tbl acts sw_names ~offset:0)\n\n  let make_clusters loc ({cases; actions} as s) n_clusters k sw_names =\n    let len = Array.length cases in\n    let r = Array.make n_clusters (0, 0, 0)\n    and t = Hashtbl.create 17\n    and index = ref 0\n    and bidon = ref (Array.length actions) in\n    let get_index act =\n      try\n        let i, _ = Hashtbl.find t act in\n        i\n      with Not_found ->\n        let i = !index in\n        incr index;\n        Hashtbl.add t act (i, fun _ -> actions.(act));\n        i\n    and add_index act =\n      let i = !index in\n      incr index;\n      incr bidon;\n      Hashtbl.add t !bidon (i, act);\n      i\n    in\n\n    let rec zyva j ir =\n      let i = k.(j) in\n      (if i = j then\n         let l, h, act = cases.(i) in\n         r.(ir) <- (l, h, get_index act)\n       else\n         (* assert i < j *)\n         let l, _, _ = cases.(i) and _, h, _ = cases.(j) in\n         r.(ir) <- (l, h, add_index (make_switch loc s i j sw_names)));\n      if i > 0 then zyva (i - 1) (ir - 1)\n    in\n\n    zyva (len - 1) (n_clusters - 1);\n    let acts = Array.make !index (fun _ -> assert false) in\n    Hashtbl.iter (fun _ (i, act) -> acts.(i) <- act) t;\n    {cases = r; actions = acts}\n\n  let do_zyva loc (low, high) arg cases actions sw_names =\n    let old_ok = !ok_inter in\n    ok_inter := abs low <= inter_limit && abs high <= inter_limit;\n    if !ok_inter <> old_ok then Hashtbl.clear t;\n\n    let s = {cases; actions} in\n\n    (*\n  Printf.eprintf \"ZYVA: %B [low=%i,high=%i]\\n\" !ok_inter low high ;\n  pcases stderr cases ;\n  prerr_endline \"\" ;\n*)\n    let n_clusters, k = comp_clusters s in\n    let clusters = make_clusters loc s n_clusters k sw_names in\n    c_test {arg; off = 0} clusters\n\n  let abstract_shared actions =\n    let handlers = ref (fun x -> x) in\n    let actions =\n      Array.map\n        (fun act ->\n          match act with\n          | Single act -> act\n          | Shared act ->\n            let i, h = Arg.make_catch act in\n            let oh = !handlers in\n            (handlers := fun act -> h (oh act));\n            Arg.make_exit i)\n        actions\n    in\n    (!handlers, actions)\n\n  let zyva loc lh arg cases actions names =\n    assert (Array.length cases > 0);\n    let actions = actions.act_get_shared () in\n    let hs, actions = abstract_shared actions in\n    hs (do_zyva loc lh arg cases actions names)\n\n  and test_sequence arg cases actions =\n    assert (Array.length cases > 0);\n    let actions = actions.act_get_shared () in\n    let hs, actions = abstract_shared actions in\n    let old_ok = !ok_inter in\n    ok_inter := false;\n    if !ok_inter <> old_ok then Hashtbl.clear t;\n    let s = {cases; actions = Array.map (fun act _ -> act) actions} in\n    (*\n  Printf.eprintf \"SEQUENCE: %B\\n\" !ok_inter ;\n  pcases stderr cases ;\n  prerr_endline \"\" ;\n*)\n    hs (c_test {arg; off = 0} s)\nend\n"
  },
  {
    "path": "analysis/vendor/ml/switch.mli",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Luc Maranget, projet Moscova, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 2000 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(*\n  This module transforms generic switches in combinations\n  of if tests and switches.\n*)\n\n(* For detecting action sharing, object style *)\n\n(* Store for actions in object style:\n   act_store : store an action, returns index in table\n               In case an action with equal key exists, returns index\n               of the stored action. Otherwise add entry in table.\n   act_store_shared : This stored action will always be shared.\n   act_get   : retrieve table\n   act_get_shared : retrieve table, with sharing explicit\n*)\n\ntype 'a shared = Shared of 'a | Single of 'a\n\ntype 'a t_store = {\n  act_get: unit -> 'a array;\n  act_get_shared: unit -> 'a shared array;\n  act_store: 'a -> int;\n  act_store_shared: 'a -> int;\n}\n\nexception Not_simple\n\nmodule type Stored = sig\n  type t\n  type key\n  val compare_key : key -> key -> int\n  val make_key : t -> key option\nend\n\nmodule Store (A : Stored) : sig\n  val mk_store : unit -> A.t t_store\nend\n\n(* Arguments to the Make functor *)\nmodule type S = sig\n  (* type of basic tests *)\n  type primitive\n\n  (* basic tests themselves *)\n  val eqint : primitive\n  val neint : primitive\n  val leint : primitive\n  val ltint : primitive\n  val geint : primitive\n  val gtint : primitive\n\n  (* type of actions *)\n  type act\n\n  (* Various constructors, for making a binder,\n      adding one integer, etc. *)\n  val bind : act -> (act -> act) -> act\n  val make_const : int -> act\n  val make_offset : act -> int -> act\n  val make_prim : primitive -> act list -> act\n  val make_isout : act -> act -> act\n  val make_isin : act -> act -> act\n  val make_if : act -> act -> act -> act\n\n  (* construct an actual switch :\n     make_switch arg cases acts\n     NB:  cases is in the value form *)\n  val make_switch :\n    Location.t ->\n    act ->\n    int array ->\n    act array ->\n    offset:int ->\n    Ast_untagged_variants.switch_names option ->\n    act\n\n  (* Build last minute sharing of action stuff *)\n  val make_catch : act -> int * (act -> act)\n  val make_exit : int -> act\nend\n\n(*\n  Make.zyva arg low high cases actions where\n    - arg is the argument of the switch.\n    - low, high are the interval limits.\n    - cases is a list of sub-interval and action indices\n    - actions is an array of actions.\n\n  All these arguments specify a switch construct and zyva\n  returns an action that performs the switch.\n*)\nmodule Make : functor (Arg : S) -> sig\n  (* Standard entry point, sharing is tracked *)\n  val zyva :\n    Location.t ->\n    int * int ->\n    Arg.act ->\n    (int * int * int) array ->\n    Arg.act t_store ->\n    Ast_untagged_variants.switch_names option ->\n    Arg.act\n\n  (* Output test sequence, sharing tracked *)\n  val test_sequence :\n    Arg.act -> (int * int * int) array -> Arg.act t_store -> Arg.act\nend\n"
  },
  {
    "path": "analysis/vendor/ml/syntaxerr.ml",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1997 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(* Auxiliary type for reporting syntax errors *)\n\ntype error =\n  | Unclosed of Location.t * string * Location.t * string\n  | Expecting of Location.t * string\n  | Not_expecting of Location.t * string\n  | Applicative_path of Location.t\n  | Variable_in_scope of Location.t * string\n  | Other of Location.t\n  | Ill_formed_ast of Location.t * string\n  | Invalid_package_type of Location.t * string\n\nexception Error of error\nexception Escape_error\n\nlet prepare_error = function\n  | Unclosed (opening_loc, opening, closing_loc, closing) ->\n    Location.errorf ~loc:closing_loc\n      ~sub:\n        [\n          Location.errorf ~loc:opening_loc \"This '%s' might be unmatched\" opening;\n        ]\n      ~if_highlight:\n        (Printf.sprintf\n           \"Syntax error: '%s' expected, the highlighted '%s' might be \\\n            unmatched\"\n           closing opening)\n      \"Syntax error: '%s' expected\" closing\n  | Expecting (loc, nonterm) ->\n    Location.errorf ~loc \"Syntax error: %s expected.\" nonterm\n  | Not_expecting (loc, nonterm) ->\n    Location.errorf ~loc \"Syntax error: %s not expected.\" nonterm\n  | Applicative_path loc ->\n    Location.errorf ~loc\n      \"Syntax error: applicative paths of the form F(X).t are not supported \\\n       when the option -no-app-func is set.\"\n  | Variable_in_scope (loc, var) ->\n    Location.errorf ~loc\n      \"In this scoped type, variable '%s is reserved for the local type %s.\" var\n      var\n  | Other loc -> Location.errorf ~loc \"Syntax error\"\n  | Ill_formed_ast (loc, s) ->\n    Location.errorf ~loc \"broken invariant in parsetree: %s\" s\n  | Invalid_package_type (loc, s) ->\n    Location.errorf ~loc \"invalid package type: %s\" s\n\nlet () =\n  Location.register_error_of_exn (function\n    | Error err -> Some (prepare_error err)\n    | _ -> None)\n\nlet report_error ppf err = Location.report_error ppf (prepare_error err)\n\nlet location_of_error = function\n  | Unclosed (l, _, _, _)\n  | Applicative_path l\n  | Variable_in_scope (l, _)\n  | Other l\n  | Not_expecting (l, _)\n  | Ill_formed_ast (l, _)\n  | Invalid_package_type (l, _)\n  | Expecting (l, _) ->\n    l\n\nlet ill_formed_ast loc s = raise (Error (Ill_formed_ast (loc, s)))\n"
  },
  {
    "path": "analysis/vendor/ml/syntaxerr.mli",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1997 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(** Auxiliary type for reporting syntax errors *)\n\nopen Format\n\ntype error =\n  | Unclosed of Location.t * string * Location.t * string\n  | Expecting of Location.t * string\n  | Not_expecting of Location.t * string\n  | Applicative_path of Location.t\n  | Variable_in_scope of Location.t * string\n  | Other of Location.t\n  | Ill_formed_ast of Location.t * string\n  | Invalid_package_type of Location.t * string\n\nexception Error of error\nexception Escape_error\n\nval report_error : formatter -> error -> unit\n(** @deprecated Use {!Location.error_of_exn}, {!Location.report_error}. *)\n\nval location_of_error : error -> Location.t\nval ill_formed_ast : Location.t -> string -> 'a\n"
  },
  {
    "path": "analysis/vendor/ml/tast_iterator.ml",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*                          Isaac \"Izzy\" Avram                            *)\n(*                                                                        *)\n(*   Copyright 2019 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\nopen Asttypes\nopen Typedtree\n\ntype iterator = {\n  case: iterator -> case -> unit;\n  cases: iterator -> case list -> unit;\n  class_description: iterator -> class_description -> unit;\n  class_signature: iterator -> class_signature -> unit;\n  class_type: iterator -> class_type -> unit;\n  class_type_declaration: iterator -> class_type_declaration -> unit;\n  class_type_field: iterator -> class_type_field -> unit;\n  env: iterator -> Env.t -> unit;\n  expr: iterator -> expression -> unit;\n  extension_constructor: iterator -> extension_constructor -> unit;\n  module_binding: iterator -> module_binding -> unit;\n  module_coercion: iterator -> module_coercion -> unit;\n  module_declaration: iterator -> module_declaration -> unit;\n  module_expr: iterator -> module_expr -> unit;\n  module_type: iterator -> module_type -> unit;\n  module_type_declaration: iterator -> module_type_declaration -> unit;\n  package_type: iterator -> package_type -> unit;\n  pat: iterator -> pattern -> unit;\n  row_field: iterator -> row_field -> unit;\n  object_field: iterator -> object_field -> unit;\n  signature: iterator -> signature -> unit;\n  signature_item: iterator -> signature_item -> unit;\n  structure: iterator -> structure -> unit;\n  structure_item: iterator -> structure_item -> unit;\n  typ: iterator -> core_type -> unit;\n  type_declaration: iterator -> type_declaration -> unit;\n  type_declarations: iterator -> rec_flag * type_declaration list -> unit;\n  type_extension: iterator -> type_extension -> unit;\n  type_kind: iterator -> type_kind -> unit;\n  value_binding: iterator -> value_binding -> unit;\n  value_bindings: iterator -> rec_flag * value_binding list -> unit;\n  value_description: iterator -> value_description -> unit;\n  with_constraint: iterator -> with_constraint -> unit;\n}\n\nlet structure sub {str_items; str_final_env; _} =\n  List.iter (sub.structure_item sub) str_items;\n  sub.env sub str_final_env\n\nlet class_infos sub f x =\n  List.iter (fun (ct, _) -> sub.typ sub ct) x.ci_params;\n  f x.ci_expr\n\nlet module_type_declaration sub {mtd_type; _} =\n  Option.iter (sub.module_type sub) mtd_type\n\nlet module_declaration sub {md_type; _} = sub.module_type sub md_type\nlet include_infos f {incl_mod; _} = f incl_mod\nlet class_type_declaration sub x = class_infos sub (sub.class_type sub) x\n\nlet structure_item sub {str_desc; str_env; _} =\n  sub.env sub str_env;\n  match str_desc with\n  | Tstr_eval (exp, _) -> sub.expr sub exp\n  | Tstr_value (rec_flag, list) -> sub.value_bindings sub (rec_flag, list)\n  | Tstr_primitive v -> sub.value_description sub v\n  | Tstr_type (rec_flag, list) -> sub.type_declarations sub (rec_flag, list)\n  | Tstr_typext te -> sub.type_extension sub te\n  | Tstr_exception ext -> sub.extension_constructor sub ext\n  | Tstr_module mb -> sub.module_binding sub mb\n  | Tstr_recmodule list -> List.iter (sub.module_binding sub) list\n  | Tstr_modtype x -> sub.module_type_declaration sub x\n  | Tstr_class _ -> ()\n  | Tstr_class_type list ->\n    List.iter (fun (_, _, cltd) -> sub.class_type_declaration sub cltd) list\n  | Tstr_include incl -> include_infos (sub.module_expr sub) incl\n  | Tstr_open _ -> ()\n  | Tstr_attribute _ -> ()\n\nlet value_description sub x = sub.typ sub x.val_desc\nlet label_decl sub {ld_type; _} = sub.typ sub ld_type\n\nlet constructor_args sub = function\n  | Cstr_tuple l -> List.iter (sub.typ sub) l\n  | Cstr_record l -> List.iter (label_decl sub) l\n\nlet constructor_decl sub {cd_args; cd_res; _} =\n  constructor_args sub cd_args;\n  Option.iter (sub.typ sub) cd_res\n\nlet type_kind sub = function\n  | Ttype_abstract -> ()\n  | Ttype_variant list -> List.iter (constructor_decl sub) list\n  | Ttype_record list -> List.iter (label_decl sub) list\n  | Ttype_open -> ()\n\nlet type_declaration sub {typ_cstrs; typ_kind; typ_manifest; typ_params; _} =\n  List.iter\n    (fun (c1, c2, _) ->\n      sub.typ sub c1;\n      sub.typ sub c2)\n    typ_cstrs;\n  sub.type_kind sub typ_kind;\n  Option.iter (sub.typ sub) typ_manifest;\n  List.iter (fun (c, _) -> sub.typ sub c) typ_params\n\nlet type_declarations sub (_, list) = List.iter (sub.type_declaration sub) list\n\nlet type_extension sub {tyext_constructors; tyext_params; _} =\n  List.iter (fun (c, _) -> sub.typ sub c) tyext_params;\n  List.iter (sub.extension_constructor sub) tyext_constructors\n\nlet extension_constructor sub {ext_kind; _} =\n  match ext_kind with\n  | Text_decl (ctl, cto) ->\n    constructor_args sub ctl;\n    Option.iter (sub.typ sub) cto\n  | Text_rebind _ -> ()\n\nlet pat sub {pat_extra; pat_desc; pat_env; _} =\n  let extra = function\n    | Tpat_type _ -> ()\n    | Tpat_unpack -> ()\n    | Tpat_open (_, _, env) -> sub.env sub env\n    | Tpat_constraint ct -> sub.typ sub ct\n  in\n  sub.env sub pat_env;\n  List.iter (fun (e, _, _) -> extra e) pat_extra;\n  match pat_desc with\n  | Tpat_any -> ()\n  | Tpat_var _ -> ()\n  | Tpat_constant _ -> ()\n  | Tpat_tuple l -> List.iter (sub.pat sub) l\n  | Tpat_construct (_, _, l) -> List.iter (sub.pat sub) l\n  | Tpat_variant (_, po, _) -> Option.iter (sub.pat sub) po\n  | Tpat_record (l, _) -> List.iter (fun (_, _, i) -> sub.pat sub i) l\n  | Tpat_array l -> List.iter (sub.pat sub) l\n  | Tpat_or (p1, p2, _) ->\n    sub.pat sub p1;\n    sub.pat sub p2\n  | Tpat_alias (p, _, _) -> sub.pat sub p\n  | Tpat_lazy p -> sub.pat sub p\n\nlet expr sub {exp_extra; exp_desc; exp_env; _} =\n  let extra = function\n    | Texp_constraint cty -> sub.typ sub cty\n    | Texp_coerce (cty1, cty2) ->\n      Option.iter (sub.typ sub) cty1;\n      sub.typ sub cty2\n    | Texp_newtype _ -> ()\n    | Texp_poly cto -> Option.iter (sub.typ sub) cto\n    | Texp_open (_, _, _, _) -> ()\n  in\n  List.iter (fun (e, _, _) -> extra e) exp_extra;\n  sub.env sub exp_env;\n  match exp_desc with\n  | Texp_ident _ -> ()\n  | Texp_constant _ -> ()\n  | Texp_let (rec_flag, list, exp) ->\n    sub.value_bindings sub (rec_flag, list);\n    sub.expr sub exp\n  | Texp_function {cases; _} -> sub.cases sub cases\n  | Texp_apply (exp, list) ->\n    sub.expr sub exp;\n    List.iter (fun (_, o) -> Option.iter (sub.expr sub) o) list\n  | Texp_match (exp, list1, list2, _) ->\n    sub.expr sub exp;\n    sub.cases sub list1;\n    sub.cases sub list2\n  | Texp_try (exp, cases) ->\n    sub.expr sub exp;\n    sub.cases sub cases\n  | Texp_tuple list -> List.iter (sub.expr sub) list\n  | Texp_construct (_, _, args) -> List.iter (sub.expr sub) args\n  | Texp_variant (_, expo) -> Option.iter (sub.expr sub) expo\n  | Texp_record {fields; extended_expression; _} ->\n    Array.iter\n      (function\n        | _, Kept _ -> ()\n        | _, Overridden (_, exp) -> sub.expr sub exp)\n      fields;\n    Option.iter (sub.expr sub) extended_expression\n  | Texp_field (exp, _, _) -> sub.expr sub exp\n  | Texp_setfield (exp1, _, _, exp2) ->\n    sub.expr sub exp1;\n    sub.expr sub exp2\n  | Texp_array list -> List.iter (sub.expr sub) list\n  | Texp_ifthenelse (exp1, exp2, expo) ->\n    sub.expr sub exp1;\n    sub.expr sub exp2;\n    Option.iter (sub.expr sub) expo\n  | Texp_sequence (exp1, exp2) ->\n    sub.expr sub exp1;\n    sub.expr sub exp2\n  | Texp_while (exp1, exp2) ->\n    sub.expr sub exp1;\n    sub.expr sub exp2\n  | Texp_for (_, _, exp1, exp2, _, exp3) ->\n    sub.expr sub exp1;\n    sub.expr sub exp2;\n    sub.expr sub exp3\n  | Texp_send (exp, _, expo) ->\n    sub.expr sub exp;\n    Option.iter (sub.expr sub) expo\n  | Texp_new _ -> ()\n  | Texp_instvar _ -> ()\n  | Texp_setinstvar _ -> ()\n  | Texp_override _ -> ()\n  | Texp_letmodule (_, _, mexpr, exp) ->\n    sub.module_expr sub mexpr;\n    sub.expr sub exp\n  | Texp_letexception (cd, exp) ->\n    sub.extension_constructor sub cd;\n    sub.expr sub exp\n  | Texp_assert exp -> sub.expr sub exp\n  | Texp_lazy exp -> sub.expr sub exp\n  | Texp_object _ -> ()\n  | Texp_pack mexpr -> sub.module_expr sub mexpr\n  | Texp_unreachable -> ()\n  | Texp_extension_constructor _ -> ()\n\nlet package_type sub {pack_fields; _} =\n  List.iter (fun (_, p) -> sub.typ sub p) pack_fields\n\nlet signature sub {sig_items; sig_final_env; _} =\n  sub.env sub sig_final_env;\n  List.iter (sub.signature_item sub) sig_items\n\nlet signature_item sub {sig_desc; sig_env; _} =\n  sub.env sub sig_env;\n  match sig_desc with\n  | Tsig_value v -> sub.value_description sub v\n  | Tsig_type (rf, tdl) -> sub.type_declarations sub (rf, tdl)\n  | Tsig_typext te -> sub.type_extension sub te\n  | Tsig_exception ext -> sub.extension_constructor sub ext\n  | Tsig_module x -> sub.module_declaration sub x\n  | Tsig_recmodule list -> List.iter (sub.module_declaration sub) list\n  | Tsig_modtype x -> sub.module_type_declaration sub x\n  | Tsig_include incl -> include_infos (sub.module_type sub) incl\n  | Tsig_class () -> ()\n  | Tsig_class_type list -> List.iter (sub.class_type_declaration sub) list\n  | Tsig_open _od -> ()\n  | Tsig_attribute _ -> ()\n\nlet class_description sub x = class_infos sub (sub.class_type sub) x\n\nlet module_type sub {mty_desc; mty_env; _} =\n  sub.env sub mty_env;\n  match mty_desc with\n  | Tmty_ident _ -> ()\n  | Tmty_alias _ -> ()\n  | Tmty_signature sg -> sub.signature sub sg\n  | Tmty_functor (_, _, mtype1, mtype2) ->\n    Option.iter (sub.module_type sub) mtype1;\n    sub.module_type sub mtype2\n  | Tmty_with (mtype, list) ->\n    sub.module_type sub mtype;\n    List.iter (fun (_, _, e) -> sub.with_constraint sub e) list\n  | Tmty_typeof mexpr -> sub.module_expr sub mexpr\n\nlet with_constraint sub = function\n  | Twith_type decl -> sub.type_declaration sub decl\n  | Twith_typesubst decl -> sub.type_declaration sub decl\n  | Twith_module _ -> ()\n  | Twith_modsubst _ -> ()\n\nlet module_coercion sub = function\n  | Tcoerce_none -> ()\n  | Tcoerce_functor (c1, c2) ->\n    sub.module_coercion sub c1;\n    sub.module_coercion sub c2\n  | Tcoerce_alias (_, c1) -> sub.module_coercion sub c1\n  | Tcoerce_structure (l1, l2, _) ->\n    List.iter (fun (_, c) -> sub.module_coercion sub c) l1;\n    List.iter (fun (_, _, c) -> sub.module_coercion sub c) l2\n  | Tcoerce_primitive {pc_env; _} -> sub.env sub pc_env\n\nlet module_expr sub {mod_desc; mod_env; _} =\n  sub.env sub mod_env;\n  match mod_desc with\n  | Tmod_ident _ -> ()\n  | Tmod_structure st -> sub.structure sub st\n  | Tmod_functor (_, _, mtype, mexpr) ->\n    Option.iter (sub.module_type sub) mtype;\n    sub.module_expr sub mexpr\n  | Tmod_apply (mexp1, mexp2, c) ->\n    sub.module_expr sub mexp1;\n    sub.module_expr sub mexp2;\n    sub.module_coercion sub c\n  | Tmod_constraint (mexpr, _, Tmodtype_implicit, c) ->\n    sub.module_expr sub mexpr;\n    sub.module_coercion sub c\n  | Tmod_constraint (mexpr, _, Tmodtype_explicit mtype, c) ->\n    sub.module_expr sub mexpr;\n    sub.module_type sub mtype;\n    sub.module_coercion sub c\n  | Tmod_unpack (exp, _) -> sub.expr sub exp\n\nlet module_binding sub {mb_expr; _} = sub.module_expr sub mb_expr\n\nlet class_type sub {cltyp_desc; cltyp_env; _} =\n  sub.env sub cltyp_env;\n  match cltyp_desc with\n  | Tcty_signature csg -> sub.class_signature sub csg\n  | Tcty_constr (_, _, list) -> List.iter (sub.typ sub) list\n  | Tcty_arrow (_, ct, cl) ->\n    sub.typ sub ct;\n    sub.class_type sub cl\n  | Tcty_open (_, _, _, _, e) -> sub.class_type sub e\n\nlet class_signature sub {csig_self; csig_fields; _} =\n  sub.typ sub csig_self;\n  List.iter (sub.class_type_field sub) csig_fields\n\nlet class_type_field sub {ctf_desc; _} =\n  match ctf_desc with\n  | Tctf_inherit ct -> sub.class_type sub ct\n  | Tctf_val (_, _, _, ct) -> sub.typ sub ct\n  | Tctf_method (_, _, _, ct) -> sub.typ sub ct\n  | Tctf_constraint (ct1, ct2) ->\n    sub.typ sub ct1;\n    sub.typ sub ct2\n  | Tctf_attribute _ -> ()\n\nlet typ sub {ctyp_desc; ctyp_env; _} =\n  sub.env sub ctyp_env;\n  match ctyp_desc with\n  | Ttyp_any -> ()\n  | Ttyp_var _ -> ()\n  | Ttyp_arrow (_, ct1, ct2) ->\n    sub.typ sub ct1;\n    sub.typ sub ct2\n  | Ttyp_tuple list -> List.iter (sub.typ sub) list\n  | Ttyp_constr (_, _, list) -> List.iter (sub.typ sub) list\n  | Ttyp_object (list, _) -> List.iter (sub.object_field sub) list\n  | Ttyp_class (_, _, list) -> List.iter (sub.typ sub) list\n  | Ttyp_alias (ct, _) -> sub.typ sub ct\n  | Ttyp_variant (list, _, _) -> List.iter (sub.row_field sub) list\n  | Ttyp_poly (_, ct) -> sub.typ sub ct\n  | Ttyp_package pack -> sub.package_type sub pack\n\nlet row_field sub = function\n  | Ttag (_label, _attrs, _bool, list) -> List.iter (sub.typ sub) list\n  | Tinherit ct -> sub.typ sub ct\n\nlet object_field sub = function\n  | OTtag (_, _, ct) | OTinherit ct -> sub.typ sub ct\n\nlet value_bindings sub (_, list) = List.iter (sub.value_binding sub) list\nlet cases sub l = List.iter (sub.case sub) l\n\nlet case sub {c_lhs; c_guard; c_rhs} =\n  sub.pat sub c_lhs;\n  Option.iter (sub.expr sub) c_guard;\n  sub.expr sub c_rhs\n\nlet value_binding sub {vb_pat; vb_expr; _} =\n  sub.pat sub vb_pat;\n  sub.expr sub vb_expr\n\nlet env _sub _ = ()\n\nlet default_iterator =\n  {\n    case;\n    cases;\n    class_description;\n    class_signature;\n    class_type;\n    class_type_declaration;\n    class_type_field;\n    env;\n    expr;\n    extension_constructor;\n    module_binding;\n    module_coercion;\n    module_declaration;\n    module_expr;\n    module_type;\n    module_type_declaration;\n    package_type;\n    pat;\n    object_field;\n    row_field;\n    signature;\n    signature_item;\n    structure;\n    structure_item;\n    typ;\n    type_declaration;\n    type_declarations;\n    type_extension;\n    type_kind;\n    value_binding;\n    value_bindings;\n    value_description;\n    with_constraint;\n  }\n"
  },
  {
    "path": "analysis/vendor/ml/tast_iterator.mli",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*                           Isaac \"Izzy\" Avram                           *)\n(*                                                                        *)\n(*   Copyright 2019 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(** Allows the implementation of typed tree inspection using open recursion *)\n\nopen Asttypes\nopen Typedtree\n\ntype iterator = {\n  case: iterator -> case -> unit;\n  cases: iterator -> case list -> unit;\n  class_description: iterator -> class_description -> unit;\n  class_signature: iterator -> class_signature -> unit;\n  class_type: iterator -> class_type -> unit;\n  class_type_declaration: iterator -> class_type_declaration -> unit;\n  class_type_field: iterator -> class_type_field -> unit;\n  env: iterator -> Env.t -> unit;\n  expr: iterator -> expression -> unit;\n  extension_constructor: iterator -> extension_constructor -> unit;\n  module_binding: iterator -> module_binding -> unit;\n  module_coercion: iterator -> module_coercion -> unit;\n  module_declaration: iterator -> module_declaration -> unit;\n  module_expr: iterator -> module_expr -> unit;\n  module_type: iterator -> module_type -> unit;\n  module_type_declaration: iterator -> module_type_declaration -> unit;\n  package_type: iterator -> package_type -> unit;\n  pat: iterator -> pattern -> unit;\n  row_field: iterator -> row_field -> unit;\n  object_field: iterator -> object_field -> unit;\n  signature: iterator -> signature -> unit;\n  signature_item: iterator -> signature_item -> unit;\n  structure: iterator -> structure -> unit;\n  structure_item: iterator -> structure_item -> unit;\n  typ: iterator -> core_type -> unit;\n  type_declaration: iterator -> type_declaration -> unit;\n  type_declarations: iterator -> rec_flag * type_declaration list -> unit;\n  type_extension: iterator -> type_extension -> unit;\n  type_kind: iterator -> type_kind -> unit;\n  value_binding: iterator -> value_binding -> unit;\n  value_bindings: iterator -> rec_flag * value_binding list -> unit;\n  value_description: iterator -> value_description -> unit;\n  with_constraint: iterator -> with_constraint -> unit;\n}\n\nval default_iterator : iterator\n"
  },
  {
    "path": "analysis/vendor/ml/tast_mapper.ml",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*                        Alain Frisch, LexiFi                            *)\n(*                                                                        *)\n(*   Copyright 2015 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\nopen Asttypes\nopen Typedtree\n\n(* TODO: add 'methods' for location, attribute, extension,\n   open_description, include_declaration, include_description *)\n\ntype mapper = {\n  case: mapper -> case -> case;\n  cases: mapper -> case list -> case list;\n  class_description: mapper -> class_description -> class_description;\n  class_signature: mapper -> class_signature -> class_signature;\n  class_type: mapper -> class_type -> class_type;\n  class_type_declaration:\n    mapper -> class_type_declaration -> class_type_declaration;\n  class_type_field: mapper -> class_type_field -> class_type_field;\n  env: mapper -> Env.t -> Env.t;\n  expr: mapper -> expression -> expression;\n  extension_constructor:\n    mapper -> extension_constructor -> extension_constructor;\n  module_binding: mapper -> module_binding -> module_binding;\n  module_coercion: mapper -> module_coercion -> module_coercion;\n  module_declaration: mapper -> module_declaration -> module_declaration;\n  module_expr: mapper -> module_expr -> module_expr;\n  module_type: mapper -> module_type -> module_type;\n  module_type_declaration:\n    mapper -> module_type_declaration -> module_type_declaration;\n  package_type: mapper -> package_type -> package_type;\n  pat: mapper -> pattern -> pattern;\n  row_field: mapper -> row_field -> row_field;\n  object_field: mapper -> object_field -> object_field;\n  signature: mapper -> signature -> signature;\n  signature_item: mapper -> signature_item -> signature_item;\n  structure: mapper -> structure -> structure;\n  structure_item: mapper -> structure_item -> structure_item;\n  typ: mapper -> core_type -> core_type;\n  type_declaration: mapper -> type_declaration -> type_declaration;\n  type_declarations:\n    mapper ->\n    rec_flag * type_declaration list ->\n    rec_flag * type_declaration list;\n  type_extension: mapper -> type_extension -> type_extension;\n  type_kind: mapper -> type_kind -> type_kind;\n  value_binding: mapper -> value_binding -> value_binding;\n  value_bindings:\n    mapper -> rec_flag * value_binding list -> rec_flag * value_binding list;\n  value_description: mapper -> value_description -> value_description;\n  with_constraint: mapper -> with_constraint -> with_constraint;\n}\n\nlet id x = x\nlet tuple2 f1 f2 (x, y) = (f1 x, f2 y)\nlet tuple3 f1 f2 f3 (x, y, z) = (f1 x, f2 y, f3 z)\nlet opt f = function\n  | None -> None\n  | Some x -> Some (f x)\n\nlet structure sub {str_items; str_type; str_final_env} =\n  {\n    str_items = List.map (sub.structure_item sub) str_items;\n    str_final_env = sub.env sub str_final_env;\n    str_type;\n  }\n\nlet class_infos sub f x =\n  {\n    x with\n    ci_params = List.map (tuple2 (sub.typ sub) id) x.ci_params;\n    ci_expr = f x.ci_expr;\n  }\n\nlet module_type_declaration sub x =\n  let mtd_type = opt (sub.module_type sub) x.mtd_type in\n  {x with mtd_type}\n\nlet module_declaration sub x =\n  let md_type = sub.module_type sub x.md_type in\n  {x with md_type}\n\nlet include_infos f x = {x with incl_mod = f x.incl_mod}\n\nlet class_type_declaration sub x = class_infos sub (sub.class_type sub) x\n\nlet structure_item sub {str_desc; str_loc; str_env} =\n  let str_env = sub.env sub str_env in\n  let str_desc =\n    match str_desc with\n    | Tstr_eval (exp, attrs) -> Tstr_eval (sub.expr sub exp, attrs)\n    | Tstr_value (rec_flag, list) ->\n      let rec_flag, list = sub.value_bindings sub (rec_flag, list) in\n      Tstr_value (rec_flag, list)\n    | Tstr_primitive v -> Tstr_primitive (sub.value_description sub v)\n    | Tstr_type (rec_flag, list) ->\n      let rec_flag, list = sub.type_declarations sub (rec_flag, list) in\n      Tstr_type (rec_flag, list)\n    | Tstr_typext te -> Tstr_typext (sub.type_extension sub te)\n    | Tstr_exception ext -> Tstr_exception (sub.extension_constructor sub ext)\n    | Tstr_module mb -> Tstr_module (sub.module_binding sub mb)\n    | Tstr_recmodule list ->\n      Tstr_recmodule (List.map (sub.module_binding sub) list)\n    | Tstr_modtype x -> Tstr_modtype (sub.module_type_declaration sub x)\n    | Tstr_class () -> Tstr_class ()\n    | Tstr_class_type list ->\n      Tstr_class_type\n        (List.map (tuple3 id id (sub.class_type_declaration sub)) list)\n    | Tstr_include incl ->\n      Tstr_include (include_infos (sub.module_expr sub) incl)\n    | (Tstr_open _ | Tstr_attribute _) as d -> d\n  in\n  {str_desc; str_env; str_loc}\n\nlet value_description sub x =\n  let val_desc = sub.typ sub x.val_desc in\n  {x with val_desc}\n\nlet label_decl sub x =\n  let ld_type = sub.typ sub x.ld_type in\n  {x with ld_type}\n\nlet constructor_args sub = function\n  | Cstr_tuple l -> Cstr_tuple (List.map (sub.typ sub) l)\n  | Cstr_record l -> Cstr_record (List.map (label_decl sub) l)\n\nlet constructor_decl sub cd =\n  let cd_args = constructor_args sub cd.cd_args in\n  let cd_res = opt (sub.typ sub) cd.cd_res in\n  {cd with cd_args; cd_res}\n\nlet type_kind sub = function\n  | Ttype_abstract -> Ttype_abstract\n  | Ttype_variant list -> Ttype_variant (List.map (constructor_decl sub) list)\n  | Ttype_record list -> Ttype_record (List.map (label_decl sub) list)\n  | Ttype_open -> Ttype_open\n\nlet type_declaration sub x =\n  let typ_cstrs =\n    List.map (tuple3 (sub.typ sub) (sub.typ sub) id) x.typ_cstrs\n  in\n  let typ_kind = sub.type_kind sub x.typ_kind in\n  let typ_manifest = opt (sub.typ sub) x.typ_manifest in\n  let typ_params = List.map (tuple2 (sub.typ sub) id) x.typ_params in\n  {x with typ_cstrs; typ_kind; typ_manifest; typ_params}\n\nlet type_declarations sub (rec_flag, list) =\n  (rec_flag, List.map (sub.type_declaration sub) list)\n\nlet type_extension sub x =\n  let tyext_params = List.map (tuple2 (sub.typ sub) id) x.tyext_params in\n  let tyext_constructors =\n    List.map (sub.extension_constructor sub) x.tyext_constructors\n  in\n  {x with tyext_constructors; tyext_params}\n\nlet extension_constructor sub x =\n  let ext_kind =\n    match x.ext_kind with\n    | Text_decl (ctl, cto) ->\n      Text_decl (constructor_args sub ctl, opt (sub.typ sub) cto)\n    | Text_rebind _ as d -> d\n  in\n  {x with ext_kind}\n\nlet pat sub x =\n  let extra = function\n    | (Tpat_type _ | Tpat_unpack) as d -> d\n    | Tpat_open (path, loc, env) -> Tpat_open (path, loc, sub.env sub env)\n    | Tpat_constraint ct -> Tpat_constraint (sub.typ sub ct)\n  in\n  let pat_env = sub.env sub x.pat_env in\n  let pat_extra = List.map (tuple3 extra id id) x.pat_extra in\n  let pat_desc =\n    match x.pat_desc with\n    | (Tpat_any | Tpat_var _ | Tpat_constant _) as d -> d\n    | Tpat_tuple l -> Tpat_tuple (List.map (sub.pat sub) l)\n    | Tpat_construct (loc, cd, l) ->\n      Tpat_construct (loc, cd, List.map (sub.pat sub) l)\n    | Tpat_variant (l, po, rd) -> Tpat_variant (l, opt (sub.pat sub) po, rd)\n    | Tpat_record (l, closed) ->\n      Tpat_record (List.map (tuple3 id id (sub.pat sub)) l, closed)\n    | Tpat_array l -> Tpat_array (List.map (sub.pat sub) l)\n    | Tpat_or (p1, p2, rd) -> Tpat_or (sub.pat sub p1, sub.pat sub p2, rd)\n    | Tpat_alias (p, id, s) -> Tpat_alias (sub.pat sub p, id, s)\n    | Tpat_lazy p -> Tpat_lazy (sub.pat sub p)\n  in\n  {x with pat_extra; pat_desc; pat_env}\n\nlet expr sub x =\n  let extra = function\n    | Texp_constraint cty -> Texp_constraint (sub.typ sub cty)\n    | Texp_coerce (cty1, cty2) ->\n      Texp_coerce (opt (sub.typ sub) cty1, sub.typ sub cty2)\n    | Texp_open (ovf, path, loc, env) ->\n      Texp_open (ovf, path, loc, sub.env sub env)\n    | Texp_newtype _ as d -> d\n    | Texp_poly cto -> Texp_poly (opt (sub.typ sub) cto)\n  in\n  let exp_extra = List.map (tuple3 extra id id) x.exp_extra in\n  let exp_env = sub.env sub x.exp_env in\n  let exp_desc =\n    match x.exp_desc with\n    | (Texp_ident _ | Texp_constant _) as d -> d\n    | Texp_let (rec_flag, list, exp) ->\n      let rec_flag, list = sub.value_bindings sub (rec_flag, list) in\n      Texp_let (rec_flag, list, sub.expr sub exp)\n    | Texp_function {arg_label; param; cases; partial} ->\n      Texp_function {arg_label; param; cases = sub.cases sub cases; partial}\n    | Texp_apply (exp, list) ->\n      Texp_apply\n        (sub.expr sub exp, List.map (tuple2 id (opt (sub.expr sub))) list)\n    | Texp_match (exp, cases, exn_cases, p) ->\n      Texp_match\n        (sub.expr sub exp, sub.cases sub cases, sub.cases sub exn_cases, p)\n    | Texp_try (exp, cases) -> Texp_try (sub.expr sub exp, sub.cases sub cases)\n    | Texp_tuple list -> Texp_tuple (List.map (sub.expr sub) list)\n    | Texp_construct (lid, cd, args) ->\n      Texp_construct (lid, cd, List.map (sub.expr sub) args)\n    | Texp_variant (l, expo) -> Texp_variant (l, opt (sub.expr sub) expo)\n    | Texp_record {fields; representation; extended_expression} ->\n      let fields =\n        Array.map\n          (function\n            | label, Kept t -> (label, Kept t)\n            | label, Overridden (lid, exp) ->\n              (label, Overridden (lid, sub.expr sub exp)))\n          fields\n      in\n      Texp_record\n        {\n          fields;\n          representation;\n          extended_expression = opt (sub.expr sub) extended_expression;\n        }\n    | Texp_field (exp, lid, ld) -> Texp_field (sub.expr sub exp, lid, ld)\n    | Texp_setfield (exp1, lid, ld, exp2) ->\n      Texp_setfield (sub.expr sub exp1, lid, ld, sub.expr sub exp2)\n    | Texp_array list -> Texp_array (List.map (sub.expr sub) list)\n    | Texp_ifthenelse (exp1, exp2, expo) ->\n      Texp_ifthenelse\n        (sub.expr sub exp1, sub.expr sub exp2, opt (sub.expr sub) expo)\n    | Texp_sequence (exp1, exp2) ->\n      Texp_sequence (sub.expr sub exp1, sub.expr sub exp2)\n    | Texp_while (exp1, exp2) ->\n      Texp_while (sub.expr sub exp1, sub.expr sub exp2)\n    | Texp_for (id, p, exp1, exp2, dir, exp3) ->\n      Texp_for\n        (id, p, sub.expr sub exp1, sub.expr sub exp2, dir, sub.expr sub exp3)\n    | Texp_send (exp, meth, expo) ->\n      Texp_send (sub.expr sub exp, meth, opt (sub.expr sub) expo)\n    | (Texp_new _ | Texp_instvar _) as d -> d\n    | Texp_setinstvar _ | Texp_override _ -> assert false\n    | Texp_letmodule (id, s, mexpr, exp) ->\n      Texp_letmodule (id, s, sub.module_expr sub mexpr, sub.expr sub exp)\n    | Texp_letexception (cd, exp) ->\n      Texp_letexception (sub.extension_constructor sub cd, sub.expr sub exp)\n    | Texp_assert exp -> Texp_assert (sub.expr sub exp)\n    | Texp_lazy exp -> Texp_lazy (sub.expr sub exp)\n    | Texp_object () -> Texp_object ()\n    | Texp_pack mexpr -> Texp_pack (sub.module_expr sub mexpr)\n    | Texp_unreachable -> Texp_unreachable\n    | Texp_extension_constructor _ as e -> e\n  in\n  {x with exp_extra; exp_desc; exp_env}\n\nlet package_type sub x =\n  let pack_fields = List.map (tuple2 id (sub.typ sub)) x.pack_fields in\n  {x with pack_fields}\n\nlet signature sub x =\n  let sig_final_env = sub.env sub x.sig_final_env in\n  let sig_items = List.map (sub.signature_item sub) x.sig_items in\n  {x with sig_items; sig_final_env}\n\nlet signature_item sub x =\n  let sig_env = sub.env sub x.sig_env in\n  let sig_desc =\n    match x.sig_desc with\n    | Tsig_value v -> Tsig_value (sub.value_description sub v)\n    | Tsig_type (rec_flag, list) ->\n      let rec_flag, list = sub.type_declarations sub (rec_flag, list) in\n      Tsig_type (rec_flag, list)\n    | Tsig_typext te -> Tsig_typext (sub.type_extension sub te)\n    | Tsig_exception ext -> Tsig_exception (sub.extension_constructor sub ext)\n    | Tsig_module x -> Tsig_module (sub.module_declaration sub x)\n    | Tsig_recmodule list ->\n      Tsig_recmodule (List.map (sub.module_declaration sub) list)\n    | Tsig_modtype x -> Tsig_modtype (sub.module_type_declaration sub x)\n    | Tsig_include incl ->\n      Tsig_include (include_infos (sub.module_type sub) incl)\n    | Tsig_class_type list ->\n      Tsig_class_type (List.map (sub.class_type_declaration sub) list)\n    | (Tsig_class _ | Tsig_open _ | Tsig_attribute _) as d -> d\n  in\n  {x with sig_desc; sig_env}\n\nlet class_description sub x = class_infos sub (sub.class_type sub) x\n\nlet module_type sub x =\n  let mty_env = sub.env sub x.mty_env in\n  let mty_desc =\n    match x.mty_desc with\n    | (Tmty_ident _ | Tmty_alias _) as d -> d\n    | Tmty_signature sg -> Tmty_signature (sub.signature sub sg)\n    | Tmty_functor (id, s, mtype1, mtype2) ->\n      Tmty_functor\n        (id, s, opt (sub.module_type sub) mtype1, sub.module_type sub mtype2)\n    | Tmty_with (mtype, list) ->\n      Tmty_with\n        ( sub.module_type sub mtype,\n          List.map (tuple3 id id (sub.with_constraint sub)) list )\n    | Tmty_typeof mexpr -> Tmty_typeof (sub.module_expr sub mexpr)\n  in\n  {x with mty_desc; mty_env}\n\nlet with_constraint sub = function\n  | Twith_type decl -> Twith_type (sub.type_declaration sub decl)\n  | Twith_typesubst decl -> Twith_typesubst (sub.type_declaration sub decl)\n  | (Twith_module _ | Twith_modsubst _) as d -> d\n\nlet module_coercion sub = function\n  | Tcoerce_none -> Tcoerce_none\n  | Tcoerce_functor (c1, c2) ->\n    Tcoerce_functor (sub.module_coercion sub c1, sub.module_coercion sub c2)\n  | Tcoerce_alias (p, c1) -> Tcoerce_alias (p, sub.module_coercion sub c1)\n  | Tcoerce_structure (l1, l2, runtime_fields) ->\n    let l1' = List.map (fun (i, c) -> (i, sub.module_coercion sub c)) l1 in\n    let l2' =\n      List.map (fun (id, i, c) -> (id, i, sub.module_coercion sub c)) l2\n    in\n    Tcoerce_structure (l1', l2', runtime_fields)\n  | Tcoerce_primitive pc ->\n    Tcoerce_primitive {pc with pc_env = sub.env sub pc.pc_env}\n\nlet module_expr sub x =\n  let mod_env = sub.env sub x.mod_env in\n  let mod_desc =\n    match x.mod_desc with\n    | Tmod_ident _ as d -> d\n    | Tmod_structure st -> Tmod_structure (sub.structure sub st)\n    | Tmod_functor (id, s, mtype, mexpr) ->\n      Tmod_functor\n        (id, s, opt (sub.module_type sub) mtype, sub.module_expr sub mexpr)\n    | Tmod_apply (mexp1, mexp2, c) ->\n      Tmod_apply\n        ( sub.module_expr sub mexp1,\n          sub.module_expr sub mexp2,\n          sub.module_coercion sub c )\n    | Tmod_constraint (mexpr, mt, Tmodtype_implicit, c) ->\n      Tmod_constraint\n        ( sub.module_expr sub mexpr,\n          mt,\n          Tmodtype_implicit,\n          sub.module_coercion sub c )\n    | Tmod_constraint (mexpr, mt, Tmodtype_explicit mtype, c) ->\n      Tmod_constraint\n        ( sub.module_expr sub mexpr,\n          mt,\n          Tmodtype_explicit (sub.module_type sub mtype),\n          sub.module_coercion sub c )\n    | Tmod_unpack (exp, mty) -> Tmod_unpack (sub.expr sub exp, mty)\n  in\n  {x with mod_desc; mod_env}\n\nlet module_binding sub x =\n  let mb_expr = sub.module_expr sub x.mb_expr in\n  {x with mb_expr}\n\nlet class_type sub x =\n  let cltyp_env = sub.env sub x.cltyp_env in\n  let cltyp_desc =\n    match x.cltyp_desc with\n    | Tcty_signature csg -> Tcty_signature (sub.class_signature sub csg)\n    | Tcty_constr (path, lid, list) ->\n      Tcty_constr (path, lid, List.map (sub.typ sub) list)\n    | Tcty_arrow (label, ct, cl) ->\n      Tcty_arrow (label, sub.typ sub ct, sub.class_type sub cl)\n    | Tcty_open (ovf, p, lid, env, e) ->\n      Tcty_open (ovf, p, lid, sub.env sub env, sub.class_type sub e)\n  in\n  {x with cltyp_desc; cltyp_env}\n\nlet class_signature sub x =\n  let csig_self = sub.typ sub x.csig_self in\n  let csig_fields = List.map (sub.class_type_field sub) x.csig_fields in\n  {x with csig_self; csig_fields}\n\nlet class_type_field sub x =\n  let ctf_desc =\n    match x.ctf_desc with\n    | Tctf_inherit ct -> Tctf_inherit (sub.class_type sub ct)\n    | Tctf_val (s, mut, virt, ct) -> Tctf_val (s, mut, virt, sub.typ sub ct)\n    | Tctf_method (s, priv, virt, ct) ->\n      Tctf_method (s, priv, virt, sub.typ sub ct)\n    | Tctf_constraint (ct1, ct2) ->\n      Tctf_constraint (sub.typ sub ct1, sub.typ sub ct2)\n    | Tctf_attribute _ as d -> d\n  in\n  {x with ctf_desc}\n\nlet typ sub x =\n  let ctyp_env = sub.env sub x.ctyp_env in\n  let ctyp_desc =\n    match x.ctyp_desc with\n    | (Ttyp_any | Ttyp_var _) as d -> d\n    | Ttyp_arrow (label, ct1, ct2) ->\n      Ttyp_arrow (label, sub.typ sub ct1, sub.typ sub ct2)\n    | Ttyp_tuple list -> Ttyp_tuple (List.map (sub.typ sub) list)\n    | Ttyp_constr (path, lid, list) ->\n      Ttyp_constr (path, lid, List.map (sub.typ sub) list)\n    | Ttyp_object (list, closed) ->\n      Ttyp_object (List.map (sub.object_field sub) list, closed)\n    | Ttyp_class (path, lid, list) ->\n      Ttyp_class (path, lid, List.map (sub.typ sub) list)\n    | Ttyp_alias (ct, s) -> Ttyp_alias (sub.typ sub ct, s)\n    | Ttyp_variant (list, closed, labels) ->\n      Ttyp_variant (List.map (sub.row_field sub) list, closed, labels)\n    | Ttyp_poly (sl, ct) -> Ttyp_poly (sl, sub.typ sub ct)\n    | Ttyp_package pack -> Ttyp_package (sub.package_type sub pack)\n  in\n  {x with ctyp_desc; ctyp_env}\n\nlet row_field sub = function\n  | Ttag (label, attrs, b, list) ->\n    Ttag (label, attrs, b, List.map (sub.typ sub) list)\n  | Tinherit ct -> Tinherit (sub.typ sub ct)\n\nlet object_field sub = function\n  | OTtag (label, attrs, ct) -> OTtag (label, attrs, sub.typ sub ct)\n  | OTinherit ct -> OTinherit (sub.typ sub ct)\n\nlet value_bindings sub (rec_flag, list) =\n  (rec_flag, List.map (sub.value_binding sub) list)\n\nlet cases sub l = List.map (sub.case sub) l\n\nlet case sub {c_lhs; c_guard; c_rhs} =\n  {\n    c_lhs = sub.pat sub c_lhs;\n    c_guard = opt (sub.expr sub) c_guard;\n    c_rhs = sub.expr sub c_rhs;\n  }\n\nlet value_binding sub x =\n  let vb_pat = sub.pat sub x.vb_pat in\n  let vb_expr = sub.expr sub x.vb_expr in\n  {x with vb_pat; vb_expr}\n\nlet env _sub x = x\n\nlet default =\n  {\n    case;\n    cases;\n    class_description;\n    class_signature;\n    class_type;\n    class_type_declaration;\n    class_type_field;\n    env;\n    expr;\n    extension_constructor;\n    module_binding;\n    module_coercion;\n    module_declaration;\n    module_expr;\n    module_type;\n    module_type_declaration;\n    package_type;\n    pat;\n    row_field;\n    object_field;\n    signature;\n    signature_item;\n    structure;\n    structure_item;\n    typ;\n    type_declaration;\n    type_declarations;\n    type_extension;\n    type_kind;\n    value_binding;\n    value_bindings;\n    value_description;\n    with_constraint;\n  }\n"
  },
  {
    "path": "analysis/vendor/ml/tast_mapper.mli",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*                        Alain Frisch, LexiFi                            *)\n(*                                                                        *)\n(*   Copyright 2015 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\nopen Asttypes\nopen Typedtree\n\n(** {1 A generic Typedtree mapper} *)\n\ntype mapper = {\n  case: mapper -> case -> case;\n  cases: mapper -> case list -> case list;\n  class_description: mapper -> class_description -> class_description;\n  class_signature: mapper -> class_signature -> class_signature;\n  class_type: mapper -> class_type -> class_type;\n  class_type_declaration:\n    mapper -> class_type_declaration -> class_type_declaration;\n  class_type_field: mapper -> class_type_field -> class_type_field;\n  env: mapper -> Env.t -> Env.t;\n  expr: mapper -> expression -> expression;\n  extension_constructor:\n    mapper -> extension_constructor -> extension_constructor;\n  module_binding: mapper -> module_binding -> module_binding;\n  module_coercion: mapper -> module_coercion -> module_coercion;\n  module_declaration: mapper -> module_declaration -> module_declaration;\n  module_expr: mapper -> module_expr -> module_expr;\n  module_type: mapper -> module_type -> module_type;\n  module_type_declaration:\n    mapper -> module_type_declaration -> module_type_declaration;\n  package_type: mapper -> package_type -> package_type;\n  pat: mapper -> pattern -> pattern;\n  row_field: mapper -> row_field -> row_field;\n  object_field: mapper -> object_field -> object_field;\n  signature: mapper -> signature -> signature;\n  signature_item: mapper -> signature_item -> signature_item;\n  structure: mapper -> structure -> structure;\n  structure_item: mapper -> structure_item -> structure_item;\n  typ: mapper -> core_type -> core_type;\n  type_declaration: mapper -> type_declaration -> type_declaration;\n  type_declarations:\n    mapper ->\n    rec_flag * type_declaration list ->\n    rec_flag * type_declaration list;\n  type_extension: mapper -> type_extension -> type_extension;\n  type_kind: mapper -> type_kind -> type_kind;\n  value_binding: mapper -> value_binding -> value_binding;\n  value_bindings:\n    mapper -> rec_flag * value_binding list -> rec_flag * value_binding list;\n  value_description: mapper -> value_description -> value_description;\n  with_constraint: mapper -> with_constraint -> with_constraint;\n}\n\nval default : mapper\n"
  },
  {
    "path": "analysis/vendor/ml/tbl.ml",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\ntype ('k, 'v) t = Empty | Node of ('k, 'v) t * 'k * 'v * ('k, 'v) t * int\n\nlet empty = Empty\n\nlet height = function\n  | Empty -> 0\n  | Node (_, _, _, _, h) -> h\n\nlet create l x d r =\n  let hl = height l and hr = height r in\n  Node (l, x, d, r, if hl >= hr then hl + 1 else hr + 1)\n\nlet bal l x d r =\n  let hl = height l and hr = height r in\n  if hl > hr + 1 then\n    match l with\n    | Node (ll, lv, ld, lr, _) when height ll >= height lr ->\n      create ll lv ld (create lr x d r)\n    | Node (ll, lv, ld, Node (lrl, lrv, lrd, lrr, _), _) ->\n      create (create ll lv ld lrl) lrv lrd (create lrr x d r)\n    | _ -> assert false\n  else if hr > hl + 1 then\n    match r with\n    | Node (rl, rv, rd, rr, _) when height rr >= height rl ->\n      create (create l x d rl) rv rd rr\n    | Node (Node (rll, rlv, rld, rlr, _), rv, rd, rr, _) ->\n      create (create l x d rll) rlv rld (create rlr rv rd rr)\n    | _ -> assert false\n  else create l x d r\n\nlet rec add x data = function\n  | Empty -> Node (Empty, x, data, Empty, 1)\n  | Node (l, v, d, r, h) ->\n    let c = compare x v in\n    if c = 0 then Node (l, x, data, r, h)\n    else if c < 0 then bal (add x data l) v d r\n    else bal l v d (add x data r)\n\nlet rec find x = function\n  | Empty -> raise Not_found\n  | Node (l, v, d, r, _) ->\n    let c = compare x v in\n    if c = 0 then d else find x (if c < 0 then l else r)\n\nlet rec find_str (x : string) = function\n  | Empty -> raise Not_found\n  | Node (l, v, d, r, _) ->\n    let c = compare x v in\n    if c = 0 then d else find_str x (if c < 0 then l else r)\n\nlet rec mem x = function\n  | Empty -> false\n  | Node (l, v, _d, r, _) ->\n    let c = compare x v in\n    c = 0 || mem x (if c < 0 then l else r)\n\nlet rec merge t1 t2 =\n  match (t1, t2) with\n  | Empty, t -> t\n  | t, Empty -> t\n  | Node (l1, v1, d1, r1, _h1), Node (l2, v2, d2, r2, _h2) ->\n    bal l1 v1 d1 (bal (merge r1 l2) v2 d2 r2)\n\nlet rec remove x = function\n  | Empty -> Empty\n  | Node (l, v, d, r, _h) ->\n    let c = compare x v in\n    if c = 0 then merge l r\n    else if c < 0 then bal (remove x l) v d r\n    else bal l v d (remove x r)\n\nlet rec iter f = function\n  | Empty -> ()\n  | Node (l, v, d, r, _) ->\n    iter f l;\n    f v d;\n    iter f r\n\nlet rec map f = function\n  | Empty -> Empty\n  | Node (l, v, d, r, h) -> Node (map f l, v, f v d, map f r, h)\n\nlet rec fold f m accu =\n  match m with\n  | Empty -> accu\n  | Node (l, v, d, r, _) -> fold f r (f v d (fold f l accu))\n\nopen Format\n\nlet print print_key print_data ppf tbl =\n  let print_tbl ppf tbl =\n    iter\n      (fun k d -> fprintf ppf \"@[<2>%a ->@ %a;@]@ \" print_key k print_data d)\n      tbl\n  in\n  fprintf ppf \"@[<hv 2>[[%a]]@]\" print_tbl tbl\n"
  },
  {
    "path": "analysis/vendor/ml/tbl.mli",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(* Association tables from any ordered type to any type.\n   We use the generic ordering to compare keys. *)\n\ntype ('k, 'v) t\n\nval empty : ('k, 'v) t\nval add : 'k -> 'v -> ('k, 'v) t -> ('k, 'v) t\nval find : 'k -> ('k, 'v) t -> 'v\nval find_str : string -> (string, 'v) t -> 'v\nval mem : 'k -> ('k, 'v) t -> bool\nval remove : 'k -> ('k, 'v) t -> ('k, 'v) t\nval iter : ('k -> 'v -> unit) -> ('k, 'v) t -> unit\nval map : ('k -> 'v1 -> 'v2) -> ('k, 'v1) t -> ('k, 'v2) t\nval fold : ('k -> 'v -> 'acc -> 'acc) -> ('k, 'v) t -> 'acc -> 'acc\n\nopen Format\n\nval print :\n  (formatter -> 'k -> unit) ->\n  (formatter -> 'v -> unit) ->\n  formatter ->\n  ('k, 'v) t ->\n  unit\n"
  },
  {
    "path": "analysis/vendor/ml/terminfo.ml",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(* Basic interface to the terminfo database *)\n\ntype status = Uninitialised | Bad_term | Good_term of int\nexternal setup : out_channel -> status = \"caml_terminfo_setup\"\nexternal backup : int -> unit = \"caml_terminfo_backup\"\nexternal standout : bool -> unit = \"caml_terminfo_standout\"\nexternal resume : int -> unit = \"caml_terminfo_resume\"\n"
  },
  {
    "path": "analysis/vendor/ml/terminfo.mli",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(* Basic interface to the terminfo database *)\n\ntype status =\n  | Uninitialised\n  | Bad_term\n  | Good_term of int (* number of lines of the terminal *)\nexternal setup : out_channel -> status = \"caml_terminfo_setup\"\nexternal backup : int -> unit = \"caml_terminfo_backup\"\nexternal standout : bool -> unit = \"caml_terminfo_standout\"\nexternal resume : int -> unit = \"caml_terminfo_resume\"\n"
  },
  {
    "path": "analysis/vendor/ml/transl_recmodule.ml",
    "content": "open Types\nopen Typedtree\nopen Lambda\n\ntype error = Circular_dependency of Ident.t\n\nexception Error of Location.t * error\n(* Reorder bindings to honor dependencies.  *)\n\n(* Utilities for compiling \"module rec\" definitions *)\n\nlet undefined_location loc =\n  let fname, line, char = Location.get_pos_info loc.Location.loc_start in\n  let fname = Filename.basename fname in\n  Lconst\n    (Const_block\n       ( Lambda.Blk_tuple,\n         [\n           Const_base (Const_string (fname, None));\n           Const_base (Const_int line);\n           Const_base (Const_int char);\n         ] ))\n\nlet cstr_const = 3\n\nlet cstr_non_const = 2\n\nlet init_shape modl =\n  let add_name x id =\n    Const_block (Blk_tuple, [x; Const_base (Const_string (Ident.name id, None))])\n  in\n  let module_tag_info : Lambda.tag_info =\n    Blk_constructor {name = \"Module\"; num_nonconst = 2; tag = 0; attrs = []}\n  in\n  let value_tag_info : Lambda.tag_info =\n    Blk_constructor {name = \"value\"; num_nonconst = 2; tag = 1; attrs = []}\n  in\n  let rec init_shape_mod env mty =\n    match Mtype.scrape env mty with\n    | Mty_ident _ -> raise Not_found\n    | Mty_alias _ ->\n      Const_block (value_tag_info, [Const_pointer (0, Pt_module_alias)])\n    | Mty_signature sg ->\n      Const_block\n        (module_tag_info, [Const_block (Blk_tuple, init_shape_struct env sg)])\n    | Mty_functor _ -> raise Not_found\n  (* can we do better? *)\n  and init_shape_struct env sg =\n    match sg with\n    | [] -> []\n    | Sig_value (id, {val_kind = Val_reg; val_type = ty}) :: rem ->\n      let is_function t =\n        Ast_uncurried_utils.type_is_uncurried_fun t\n        ||\n        match t.desc with\n        | Tarrow _ -> true\n        | _ -> false\n      in\n      let init_v =\n        match Ctype.expand_head env ty with\n        | t when is_function t ->\n          Const_pointer\n            ( 0,\n              Pt_constructor\n                {\n                  name = \"Function\";\n                  const = cstr_const;\n                  non_const = cstr_non_const;\n                  attrs = [];\n                } )\n        | {desc = Tconstr (p, _, _)} when Path.same p Predef.path_lazy_t ->\n          Const_pointer\n            ( 1,\n              Pt_constructor\n                {\n                  name = \"Lazy\";\n                  const = cstr_const;\n                  non_const = cstr_non_const;\n                  attrs = [];\n                } )\n        | _ -> raise Not_found\n      in\n      add_name init_v id :: init_shape_struct env rem\n    | Sig_value (_, {val_kind = Val_prim _}) :: rem -> init_shape_struct env rem\n    | Sig_type (id, tdecl, _) :: rem ->\n      init_shape_struct (Env.add_type ~check:false id tdecl env) rem\n    | Sig_typext _ :: _ -> raise Not_found\n    | Sig_module (id, md, _) :: rem ->\n      add_name (init_shape_mod env md.md_type) id\n      :: init_shape_struct\n           (Env.add_module_declaration ~check:false id md env)\n           rem\n    | Sig_modtype (id, minfo) :: rem ->\n      init_shape_struct (Env.add_modtype id minfo env) rem\n    | Sig_class _ :: _ -> assert false\n    | Sig_class_type _ :: rem -> init_shape_struct env rem\n  in\n  try\n    Some\n      ( undefined_location modl.mod_loc,\n        Lconst (init_shape_mod modl.mod_env modl.mod_type) )\n  with Not_found -> None\n\ntype binding_status = Undefined | Inprogress | Defined\n\nlet reorder_rec_bindings bindings =\n  let id = Array.of_list (List.map (fun (id, _, _, _) -> id) bindings)\n  and loc = Array.of_list (List.map (fun (_, loc, _, _) -> loc) bindings)\n  and init = Array.of_list (List.map (fun (_, _, init, _) -> init) bindings)\n  and rhs = Array.of_list (List.map (fun (_, _, _, rhs) -> rhs) bindings) in\n  let fv = Array.map Lambda.free_variables rhs in\n  let num_bindings = Array.length id in\n  let status = Array.make num_bindings Undefined in\n  let res = ref [] in\n  let rec emit_binding i =\n    match status.(i) with\n    | Defined -> ()\n    | Inprogress -> raise (Error (loc.(i), Circular_dependency id.(i)))\n    | Undefined ->\n      if init.(i) = None then (\n        status.(i) <- Inprogress;\n        for j = 0 to num_bindings - 1 do\n          if IdentSet.mem id.(j) fv.(i) then emit_binding j\n        done);\n      res := (id.(i), init.(i), rhs.(i)) :: !res;\n      status.(i) <- Defined\n  in\n  for i = 0 to num_bindings - 1 do\n    match status.(i) with\n    | Undefined -> emit_binding i\n    | Inprogress -> assert false\n    | Defined -> ()\n  done;\n  List.rev !res\n\ntype t = Lambda.lambda\n\n(* Utilities for compiling \"module rec\" definitions *)\n\nlet bs_init_mod (args : t list) loc : t =\n  Lprim\n    (Pccall (Primitive.simple ~name:\"#init_mod\" ~arity:2 ~alloc:true), args, loc)\n\nlet bs_update_mod (args : t list) loc : t =\n  Lprim\n    ( Pccall (Primitive.simple ~name:\"#update_mod\" ~arity:3 ~alloc:true),\n      args,\n      loc )\n\ntype loc = t\n\ntype shape = t\n\ntype binding = Ident.t * (loc * shape) option * t\n\nlet eval_rec_bindings_aux (bindings : binding list) (cont : t) : t =\n  let rec bind_inits args acc =\n    match args with\n    | [] -> acc\n    | (_id, None, _rhs) :: rem -> bind_inits rem acc\n    | (id, Some (loc, shape), _rhs) :: rem ->\n      Lambda.Llet\n        ( Strict,\n          Pgenval,\n          id,\n          bs_init_mod [loc; shape] Location.none,\n          bind_inits rem acc )\n  in\n  let rec bind_strict args acc =\n    match args with\n    | [] -> acc\n    | (id, None, rhs) :: rem ->\n      Lambda.Llet (Strict, Pgenval, id, rhs, bind_strict rem acc)\n    | (_id, Some _, _rhs) :: rem -> bind_strict rem acc\n  in\n  let rec patch_forwards args =\n    match args with\n    | [] -> cont\n    | (_id, None, _rhs) :: rem -> patch_forwards rem\n    | (id, Some (_loc, shape), rhs) :: rem ->\n      Lsequence\n        (bs_update_mod [shape; Lvar id; rhs] Location.none, patch_forwards rem)\n  in\n  bind_inits bindings (bind_strict bindings (patch_forwards bindings))\n\n(* collect all function declarations\n    if the module creation is just a set of function declarations and consts,\n    it is good\n*)\nlet rec is_function_or_const_block (lam : Lambda.lambda) acc =\n  match lam with\n  | Lprim (Pmakeblock _, args, _) ->\n    Ext_list.for_all args (fun x ->\n        match x with\n        | Lvar id -> Set_ident.mem acc id\n        | Lfunction _ | Lconst _ -> true\n        | _ -> false)\n  | Llet (_, _, id, Lfunction _, cont) ->\n    is_function_or_const_block cont (Set_ident.add acc id)\n  | Lletrec (bindings, cont) -> (\n    let rec aux_bindings bindings acc =\n      match bindings with\n      | [] -> Some acc\n      | (id, Lambda.Lfunction _) :: rest ->\n        aux_bindings rest (Set_ident.add acc id)\n      | (_, _) :: _ -> None\n    in\n    match aux_bindings bindings acc with\n    | None -> false\n    | Some acc -> is_function_or_const_block cont acc)\n  | Llet (_, _, _, Lconst _, cont) -> is_function_or_const_block cont acc\n  | Llet (_, _, id1, Lvar id2, cont) when Set_ident.mem acc id2 ->\n    is_function_or_const_block cont (Set_ident.add acc id1)\n  | _ -> false\n\nlet is_strict_or_all_functions (xs : binding list) =\n  Ext_list.for_all xs (fun (_, opt, rhs) ->\n      match opt with\n      | None -> true\n      | _ -> is_function_or_const_block rhs Set_ident.empty)\n\n(* Without such optimizations:\n\n   {[\n     module rec X : sig\n       val f : int -> int\n     end = struct\n       let f x = x + 1\n     end\n     and Y : sig\n       val f : int -> int\n     end = struct\n       let f x  = x + 2\n     end\n   ]}\n   would generate such rawlambda:\n\n   {[\n     (setglobal Debug_tmp!\n        (let\n          (X/1002 = (#init_mod [0: \"debug_tmp.ml\" 15 6] [0: [0: [0: 0a \"f\"]]])\n             Y/1003 = (#init_mod [0: \"debug_tmp.ml\" 20 6] [0: [0: [0: 0a \"f\"]]]))\n            (seq\n               (#update_mod [0: [0: [0: 0a \"f\"]]] X/1002\n                  (let (f/1010 = (function x/1011 (+ x/1011 1)))\n                       (makeblock 0/[f] f/1010)))\n               (#update_mod [0: [0: [0: 0a \"f\"]]] Y/1003\n                  (let (f/1012 = (function x/1013 (+ x/1013 2)))\n                       (makeblock 0/[f] f/1012)))\n               (makeblock 0/module/exports X/1002 Y/1003))))\n\n   ]}\n*)\nlet eval_rec_bindings (bindings : binding list) (cont : t) : t =\n  if is_strict_or_all_functions bindings then\n    Lambda.Lletrec (Ext_list.map bindings (fun (id, _, rhs) -> (id, rhs)), cont)\n  else eval_rec_bindings_aux bindings cont\n\nlet compile_recmodule compile_rhs bindings cont =\n  eval_rec_bindings\n    (reorder_rec_bindings\n       (List.map\n          (fun {mb_id = id; mb_expr = modl; mb_loc = loc; _} ->\n            (id, modl.mod_loc, init_shape modl, compile_rhs id modl loc))\n          bindings))\n    cont\n\nlet report_error ppf = function\n  | Circular_dependency id ->\n    Format.fprintf ppf\n      \"@[Cannot safely evaluate the definition@ of the recursively-defined \\\n       module %a@]\"\n      Printtyp.ident id\n\nlet () =\n  Location.register_error_of_exn (function\n    | Error (loc, err) -> Some (Location.error_of_printer loc report_error err)\n    | _ -> None)\n"
  },
  {
    "path": "analysis/vendor/ml/transl_recmodule.mli",
    "content": "(* Copyright (C) 2015 - Hongbo Zhang, Authors of ReScript\n * This program is free software: you can redistribute it and/or modify\n * it under the terms of the GNU Lesser General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * In addition to the permissions granted to you by the LGPL, you may combine\n * or link a \"work that uses the Library\" with a publicly distributed version\n * of this file to produce a combined library or application, then distribute\n * that combined work under the terms of your choosing, with no requirement\n * to comply with the obligations normally placed on you by section 4 of the\n * LGPL version 3 (or the corresponding section of a later version of the LGPL\n * should you choose to use a later version).\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU Lesser General Public License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)\n\nval compile_recmodule :\n  (Ident.t -> Typedtree.module_expr -> Location.t -> Lambda.lambda) ->\n  Typedtree.module_binding list ->\n  Lambda.lambda ->\n  Lambda.lambda\n"
  },
  {
    "path": "analysis/vendor/ml/translattribute.ml",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*                      Pierre Chambart, OCamlPro                         *)\n(*                                                                        *)\n(*   Copyright 2015 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\ntype t = Parsetree.attribute\n\nlet is_inline_attribute (attr : t) =\n  match attr with\n  | {txt = \"inline\"}, _ -> true\n  | _ -> false\n\nlet is_inlined_attribute (attr : t) =\n  match attr with\n  | {txt = \"inlined\"}, _ -> true\n  | _ -> false\n\nlet find_attribute p (attributes : t list) =\n  let inline_attribute, other_attributes = List.partition p attributes in\n  let attr =\n    match inline_attribute with\n    | [] -> None\n    | [attr] -> Some attr\n    | _ :: ({txt; loc}, _) :: _ ->\n      Location.prerr_warning loc (Warnings.Duplicated_attribute txt);\n      None\n  in\n  (attr, other_attributes)\n\nlet parse_inline_attribute (attr : t option) : Lambda.inline_attribute =\n  match attr with\n  | None -> Default_inline\n  | Some ({txt; loc}, payload) -> (\n    let open Parsetree in\n    (* the 'inline' and 'inlined' attributes can be used as\n       [@inline], [@inline never] or [@inline always].\n       [@inline] is equivalent to [@inline always] *)\n    let warning txt =\n      Warnings.Attribute_payload\n        (txt, \"It must be either empty, 'always' or 'never'\")\n    in\n    match payload with\n    | PStr [] -> Always_inline\n    | PStr [{pstr_desc = Pstr_eval ({pexp_desc}, [])}] -> (\n      match pexp_desc with\n      | Pexp_ident {txt = Longident.Lident \"never\"} -> Never_inline\n      | Pexp_ident {txt = Longident.Lident \"always\"} -> Always_inline\n      | _ ->\n        Location.prerr_warning loc (warning txt);\n        Default_inline)\n    | _ ->\n      Location.prerr_warning loc (warning txt);\n      Default_inline)\n\nlet get_inline_attribute l =\n  let attr, _ = find_attribute is_inline_attribute l in\n  parse_inline_attribute attr\n\nlet rec add_inline_attribute (expr : Lambda.lambda) loc attributes =\n  match (expr, get_inline_attribute attributes) with\n  | expr, Default_inline -> expr\n  | Lfunction ({attr} as funct), inline ->\n    (match attr.inline with\n    | Default_inline -> ()\n    | Always_inline | Never_inline ->\n      Location.prerr_warning loc (Warnings.Duplicated_attribute \"inline\"));\n    let attr = {attr with inline} in\n    Lfunction {funct with attr}\n  | Lprim ((Pccall {prim_name = \"#fn_mk\" | \"#fn_mk_unit\"} as p), [e], l), _ ->\n    Lambda.Lprim (p, [add_inline_attribute e loc attributes], l)\n  | expr, Always_inline ->\n    Location.prerr_warning loc (Warnings.Misplaced_attribute \"inline1\");\n    expr\n  | expr, Never_inline ->\n    Location.prerr_warning loc (Warnings.Misplaced_attribute \"inline2\");\n    expr\n\n(* Get the [@inlined] attribute payload (or default if not present).\n   It also returns the expression without this attribute. This is\n   used to ensure that this attribute is not misplaced: If it\n   appears on any expression, it is an error, otherwise it would\n   have been removed by this function *)\nlet get_and_remove_inlined_attribute (e : Typedtree.expression) =\n  let attr, exp_attributes =\n    find_attribute is_inlined_attribute e.exp_attributes\n  in\n  let inlined = parse_inline_attribute attr in\n  (inlined, {e with exp_attributes})\n\nlet get_and_remove_inlined_attribute_on_module (e : Typedtree.module_expr) =\n  let attr, mod_attributes =\n    find_attribute is_inlined_attribute e.mod_attributes\n  in\n  let inlined = parse_inline_attribute attr in\n  (inlined, {e with mod_attributes})\n\nlet check_attribute (e : Typedtree.expression) (({txt; loc}, _) : t) =\n  match txt with\n  | \"inline\" -> (\n    match e.exp_desc with\n    | Texp_function _ -> ()\n    | _ -> Location.prerr_warning loc (Warnings.Misplaced_attribute txt))\n  | \"inlined\" ->\n    (* Removed by the Texp_apply cases *)\n    Location.prerr_warning loc (Warnings.Misplaced_attribute txt)\n  | _ -> ()\n\nlet check_attribute_on_module (e : Typedtree.module_expr) (({txt; loc}, _) : t)\n    =\n  match txt with\n  | \"inline\" -> (\n    match e.mod_desc with\n    | Tmod_functor _ -> ()\n    | _ -> Location.prerr_warning loc (Warnings.Misplaced_attribute txt))\n  | \"inlined\" ->\n    (* Removed by the Texp_apply cases *)\n    Location.prerr_warning loc (Warnings.Misplaced_attribute txt)\n  | _ -> ()\n"
  },
  {
    "path": "analysis/vendor/ml/translattribute.mli",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*                      Pierre Chambart, OCamlPro                         *)\n(*                                                                        *)\n(*   Copyright 2015 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\nval check_attribute : Typedtree.expression -> Parsetree.attribute -> unit\n\nval check_attribute_on_module :\n  Typedtree.module_expr -> Parsetree.attribute -> unit\n\nval add_inline_attribute :\n  Lambda.lambda -> Location.t -> Parsetree.attributes -> Lambda.lambda\n\nval get_inline_attribute : Parsetree.attributes -> Lambda.inline_attribute\n\nval get_and_remove_inlined_attribute :\n  Typedtree.expression -> Lambda.inline_attribute * Typedtree.expression\n\nval get_and_remove_inlined_attribute_on_module :\n  Typedtree.module_expr -> Lambda.inline_attribute * Typedtree.module_expr\n"
  },
  {
    "path": "analysis/vendor/ml/translcore.ml",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(* Translation from typed abstract syntax to lambda terms,\n   for the core language *)\n\nopen Misc\nopen Asttypes\nopen Primitive\nopen Types\nopen Typedtree\nopen Typeopt\nopen Lambda\n\ntype error = Unknown_builtin_primitive of string | Unreachable_reached\n\nexception Error of Location.t * error\n\n(* Forward declaration -- to be filled in by Translmod.transl_module *)\nlet transl_module =\n  ref\n    (fun _cc _rootpath _modl -> assert false\n      : module_coercion -> Path.t option -> module_expr -> lambda)\n\n(* Compile an exception/extension definition *)\n\nlet transl_extension_constructor env path ext =\n  let name =\n    match path (*!Clflags.for_package*) with\n    | None -> Ident.name ext.ext_id\n    | Some p -> Path.name p\n  in\n  let loc = ext.ext_loc in\n  match ext.ext_kind with\n  | Text_decl _ -> Lprim (Pcreate_extension name, [], loc)\n  | Text_rebind (path, _lid) -> transl_extension_path ~loc env path\n\n(* Translation of primitives *)\n\ntype specialized = {\n  gencomp: Lambda.primitive;\n  intcomp: Lambda.primitive;\n  boolcomp: Lambda.primitive;\n  floatcomp: Lambda.primitive;\n  stringcomp: Lambda.primitive;\n  bytescomp: Lambda.primitive;\n  int64comp: Lambda.primitive;\n  bigintcomp: Lambda.primitive;\n  simplify_constant_constructor: bool;\n}\n\nlet arity2 name : Lambda.primitive =\n  Lambda.Pccall (Primitive.simple ~name ~arity:2 ~alloc:true)\n\nlet comparisons_table =\n  create_hashtable 11\n    [\n      ( \"%equal\",\n        {\n          gencomp =\n            Pccall (Primitive.simple ~name:\"caml_equal\" ~arity:2 ~alloc:true);\n          intcomp = Pintcomp Ceq;\n          boolcomp =\n            Pccall\n              (Primitive.simple ~name:\"caml_bool_equal\" ~arity:2 ~alloc:false);\n          floatcomp = Pfloatcomp Ceq;\n          stringcomp =\n            Pccall\n              (Primitive.simple ~name:\"caml_string_equal\" ~arity:2 ~alloc:false);\n          bytescomp =\n            Pccall\n              (Primitive.simple ~name:\"caml_bytes_equal\" ~arity:2 ~alloc:false);\n          int64comp = Pbintcomp (Pint64, Ceq);\n          bigintcomp = Pbigintcomp Ceq;\n          simplify_constant_constructor = true;\n        } );\n      ( \"%notequal\",\n        {\n          gencomp =\n            Pccall (Primitive.simple ~name:\"caml_notequal\" ~arity:2 ~alloc:true);\n          intcomp = Pintcomp Cneq;\n          boolcomp =\n            Pccall\n              (Primitive.simple ~name:\"caml_bool_notequal\" ~arity:2 ~alloc:false);\n          floatcomp = Pfloatcomp Cneq;\n          stringcomp =\n            Pccall\n              (Primitive.simple ~name:\"caml_string_notequal\" ~arity:2\n                 ~alloc:false);\n          bytescomp =\n            Pccall\n              (Primitive.simple ~name:\"caml_bytes_notequal\" ~arity:2\n                 ~alloc:false);\n          int64comp = Pbintcomp (Pint64, Cneq);\n          bigintcomp = Pbigintcomp Cneq;\n          simplify_constant_constructor = true;\n        } );\n      ( \"%lessthan\",\n        {\n          gencomp =\n            Pccall (Primitive.simple ~name:\"caml_lessthan\" ~arity:2 ~alloc:true);\n          intcomp = Pintcomp Clt;\n          boolcomp =\n            Pccall\n              (Primitive.simple ~name:\"caml_bool_lessthan\" ~arity:2 ~alloc:false);\n          floatcomp = Pfloatcomp Clt;\n          stringcomp =\n            Pccall\n              (Primitive.simple ~name:\"caml_string_lessthan\" ~arity:2\n                 ~alloc:false);\n          bytescomp =\n            Pccall\n              (Primitive.simple ~name:\"caml_bytes_lessthan\" ~arity:2\n                 ~alloc:false);\n          int64comp = Pbintcomp (Pint64, Clt);\n          bigintcomp = Pbigintcomp Clt;\n          simplify_constant_constructor = false;\n        } );\n      ( \"%greaterthan\",\n        {\n          gencomp =\n            Pccall\n              (Primitive.simple ~name:\"caml_greaterthan\" ~arity:2 ~alloc:true);\n          intcomp = Pintcomp Cgt;\n          boolcomp =\n            Pccall\n              (Primitive.simple ~name:\"caml_bool_greaterthan\" ~arity:2\n                 ~alloc:false);\n          floatcomp = Pfloatcomp Cgt;\n          stringcomp =\n            Pccall\n              (Primitive.simple ~name:\"caml_string_greaterthan\" ~arity:2\n                 ~alloc:false);\n          bytescomp =\n            Pccall\n              (Primitive.simple ~name:\"caml_bytes_greaterthan\" ~arity:2\n                 ~alloc:false);\n          int64comp = Pbintcomp (Pint64, Cgt);\n          bigintcomp = Pbigintcomp Cgt;\n          simplify_constant_constructor = false;\n        } );\n      ( \"%lessequal\",\n        {\n          gencomp =\n            Pccall\n              (Primitive.simple ~name:\"caml_lessequal\" ~arity:2 ~alloc:true);\n          intcomp = Pintcomp Cle;\n          boolcomp =\n            Pccall\n              (Primitive.simple ~name:\"caml_bool_lessequal\" ~arity:2\n                 ~alloc:false);\n          floatcomp = Pfloatcomp Cle;\n          stringcomp =\n            Pccall\n              (Primitive.simple ~name:\"caml_string_lessequal\" ~arity:2\n                 ~alloc:false);\n          bytescomp =\n            Pccall\n              (Primitive.simple ~name:\"caml_bytes_lessequal\" ~arity:2\n                 ~alloc:false);\n          int64comp = Pbintcomp (Pint64, Cle);\n          bigintcomp = Pbigintcomp Cle;\n          simplify_constant_constructor = false;\n        } );\n      ( \"%greaterequal\",\n        {\n          gencomp =\n            Pccall\n              (Primitive.simple ~name:\"caml_greaterequal\" ~arity:2 ~alloc:true);\n          intcomp = Pintcomp Cge;\n          boolcomp =\n            Pccall\n              (Primitive.simple ~name:\"caml_bool_greaterequal\" ~arity:2\n                 ~alloc:false);\n          floatcomp = Pfloatcomp Cge;\n          stringcomp =\n            Pccall\n              (Primitive.simple ~name:\"caml_string_greaterequal\" ~arity:2\n                 ~alloc:false);\n          bytescomp =\n            Pccall\n              (Primitive.simple ~name:\"caml_bytes_greaterequal\" ~arity:2\n                 ~alloc:false);\n          int64comp = Pbintcomp (Pint64, Cge);\n          bigintcomp = Pbigintcomp Cge;\n          simplify_constant_constructor = false;\n        } );\n      ( \"%compare\",\n        {\n          gencomp =\n            Pccall (Primitive.simple ~name:\"caml_compare\" ~arity:2 ~alloc:true);\n          (* Not unboxed since the comparison is done directly on tagged int *)\n          intcomp =\n            Pccall\n              (Primitive.simple ~name:\"caml_int_compare\" ~arity:2 ~alloc:false);\n          boolcomp =\n            Pccall\n              (Primitive.simple ~name:\"caml_bool_compare\" ~arity:2 ~alloc:false);\n          floatcomp =\n            Pccall\n              (Primitive.simple ~name:\"caml_float_compare\" ~arity:2 ~alloc:false);\n          stringcomp =\n            Pccall\n              (Primitive.simple ~name:\"caml_string_compare\" ~arity:2\n                 ~alloc:false);\n          bytescomp =\n            Pccall\n              (Primitive.simple ~name:\"caml_bytes_compare\" ~arity:2 ~alloc:false);\n          int64comp =\n            Pccall\n              (Primitive.simple ~name:\"caml_int64_compare\" ~arity:2 ~alloc:false);\n          bigintcomp =\n            Pccall\n              (Primitive.simple ~name:\"caml_bigint_compare\" ~arity:2\n                 ~alloc:false);\n          simplify_constant_constructor = false;\n        } );\n      ( \"%bs_max\",\n        {\n          gencomp = arity2 \"caml_max\";\n          bytescomp = arity2 \"caml_max\";\n          (* FIXME bytescomp*)\n          intcomp = arity2 \"caml_int_max\";\n          boolcomp = arity2 \"caml_bool_max\";\n          floatcomp = arity2 \"caml_float_max\";\n          stringcomp = arity2 \"caml_string_max\";\n          int64comp = arity2 \"caml_int64_max\";\n          bigintcomp = arity2 \"caml_bigint_max\";\n          simplify_constant_constructor = false;\n        } );\n      ( \"%bs_min\",\n        {\n          gencomp = arity2 \"caml_min\";\n          bytescomp = arity2 \"caml_min\";\n          intcomp = arity2 \"caml_int_min\";\n          boolcomp = arity2 \"caml_bool_min\";\n          floatcomp = arity2 \"caml_float_min\";\n          stringcomp = arity2 \"caml_string_min\";\n          int64comp = arity2 \"caml_int64_min\";\n          bigintcomp = arity2 \"caml_bigint_min\";\n          simplify_constant_constructor = false;\n        } );\n      ( \"%bs_equal_null\",\n        {\n          gencomp = arity2 \"caml_equal_null\";\n          bytescomp = arity2 \"caml_equal_null\";\n          (* FIXME*)\n          intcomp = arity2 \"caml_int_equal_null\";\n          boolcomp = arity2 \"caml_bool_equal_null\";\n          floatcomp = arity2 \"caml_float_equal_null\";\n          stringcomp = arity2 \"caml_string_equal_null\";\n          int64comp = arity2 \"caml_int64_equal_null\";\n          bigintcomp = arity2 \"caml_bigint_equal_null\";\n          simplify_constant_constructor = true;\n        } );\n      ( \"%bs_equal_undefined\",\n        {\n          gencomp = arity2 \"caml_equal_undefined\";\n          bytescomp = arity2 \"caml_equal_undefined\";\n          (* FIXME*)\n          intcomp = arity2 \"caml_int_equal_undefined\";\n          boolcomp = arity2 \"caml_bool_equal_undefined\";\n          floatcomp = arity2 \"caml_float_equal_undefined\";\n          stringcomp = arity2 \"caml_string_equal_undefined\";\n          int64comp = arity2 \"caml_int64_equal_undefined\";\n          bigintcomp = arity2 \"caml_bigint_equal_undefined\";\n          simplify_constant_constructor = true;\n        } );\n      ( \"%bs_equal_nullable\",\n        {\n          gencomp = arity2 \"caml_equal_nullable\";\n          bytescomp = arity2 \"caml_equal_nullable\";\n          (* FIXME *)\n          intcomp = arity2 \"caml_int_equal_nullable\";\n          boolcomp = arity2 \"caml_bool_equal_nullable\";\n          floatcomp = arity2 \"caml_float_equal_nullable\";\n          stringcomp = arity2 \"caml_string_equal_nullable\";\n          int64comp = arity2 \"caml_int64_equal_nullable\";\n          bigintcomp = arity2 \"caml_bigint_equal_nullable\";\n          simplify_constant_constructor = true;\n        } );\n    ]\n\nlet primitives_table =\n  create_hashtable 57\n    [\n      (\"%identity\", Pidentity);\n      (\"%bytes_to_string\", Pbytes_to_string);\n      (\"%ignore\", Pignore);\n      (\"%revapply\", Prevapply);\n      (\"%apply\", Pdirapply);\n      (\"%loc_LOC\", Ploc Loc_LOC);\n      (\"%loc_FILE\", Ploc Loc_FILE);\n      (\"%loc_LINE\", Ploc Loc_LINE);\n      (\"%loc_POS\", Ploc Loc_POS);\n      (\"%loc_MODULE\", Ploc Loc_MODULE);\n      (* BEGIN Triples for  ref data type *)\n      (\"%bs_ref_setfield0\", Psetfield (0, Lambda.ref_field_set_info));\n      (\"%bs_ref_field0\", Pfield (0, Lambda.ref_field_info));\n      (\"%makemutable\", Pmakeblock Lambda.ref_tag_info);\n      (\"%incr\", Poffsetref 1);\n      (\"%decr\", Poffsetref (-1));\n      (* Finish Triples for  ref data type *)\n      (\"%field0\", Pfield (0, Fld_tuple));\n      (\"%field1\", Pfield (1, Fld_tuple));\n      (\"%obj_field\", Parrayrefu);\n      (\"%obj_set_field\", Parraysetu);\n      (\"%obj_is_int\", Pisint);\n      (\"%raise\", Praise Raise_regular);\n      (\"%reraise\", Praise Raise_reraise);\n      (\"%raise_notrace\", Praise Raise_notrace);\n      (\"%sequand\", Psequand);\n      (\"%sequor\", Psequor);\n      (\"%boolnot\", Pnot);\n      (\"%big_endian\", Pctconst Big_endian);\n      (\"%backend_type\", Pctconst Backend_type);\n      (\"%word_size\", Pctconst Word_size);\n      (\"%int_size\", Pctconst Int_size);\n      (\"%max_wosize\", Pctconst Max_wosize);\n      (\"%ostype_unix\", Pctconst Ostype_unix);\n      (\"%ostype_win32\", Pctconst Ostype_win32);\n      (\"%ostype_cygwin\", Pctconst Ostype_cygwin);\n      (\"%negint\", Pnegint);\n      (\"%succint\", Poffsetint 1);\n      (\"%predint\", Poffsetint (-1));\n      (\"%addint\", Paddint);\n      (\"%subint\", Psubint);\n      (\"%mulint\", Pmulint);\n      (\"%divint\", Pdivint Safe);\n      (\"%modint\", Pmodint Safe);\n      (\"%andint\", Pandint);\n      (\"%orint\", Porint);\n      (\"%xorint\", Pxorint);\n      (\"%lslint\", Plslint);\n      (\"%lsrint\", Plsrint);\n      (\"%asrint\", Pasrint);\n      (\"%andbigint\", Pandbigint);\n      (\"%orbigint\", Porbigint);\n      (\"%xorbigint\", Pxorbigint);\n      (\"%lslbigint\", Plslbigint);\n      (\"%asrbigint\", Pasrbigint);\n      (\"%eq\", Pintcomp Ceq);\n      (\"%noteq\", Pintcomp Cneq);\n      (\"%ltint\", Pintcomp Clt);\n      (\"%leint\", Pintcomp Cle);\n      (\"%gtint\", Pintcomp Cgt);\n      (\"%geint\", Pintcomp Cge);\n      (\"%intoffloat\", Pintoffloat);\n      (\"%floatofint\", Pfloatofint);\n      (\"%negfloat\", Pnegfloat);\n      (\"%absfloat\", Pabsfloat);\n      (\"%addfloat\", Paddfloat);\n      (\"%subfloat\", Psubfloat);\n      (\"%mulfloat\", Pmulfloat);\n      (\"%divfloat\", Pdivfloat);\n      (\"%eqfloat\", Pfloatcomp Ceq);\n      (\"%noteqfloat\", Pfloatcomp Cneq);\n      (\"%ltfloat\", Pfloatcomp Clt);\n      (\"%lefloat\", Pfloatcomp Cle);\n      (\"%gtfloat\", Pfloatcomp Cgt);\n      (\"%gefloat\", Pfloatcomp Cge);\n      (\"%negbigint\", Pnegbigint);\n      (\"%addbigint\", Paddbigint);\n      (\"%subbigint\", Psubbigint);\n      (\"%mulbigint\", Pmulbigint);\n      (\"%divbigint\", Pdivbigint);\n      (\"%powbigint\", Ppowbigint);\n      (\"%modbigint\", Pmodbigint);\n      (\"%eqbigint\", Pbigintcomp Ceq);\n      (\"%noteqbigint\", Pbigintcomp Cneq);\n      (\"%ltbigint\", Pbigintcomp Clt);\n      (\"%lebigint\", Pbigintcomp Cle);\n      (\"%gtbigint\", Pbigintcomp Cgt);\n      (\"%gebigint\", Pbigintcomp Cge);\n      (\"%string_length\", Pstringlength);\n      (\"%string_safe_get\", Pstringrefs);\n      (\"%string_unsafe_get\", Pstringrefu);\n      (\"%bytes_length\", Pbyteslength);\n      (\"%bytes_safe_get\", Pbytesrefs);\n      (\"%bytes_safe_set\", Pbytessets);\n      (\"%bytes_unsafe_get\", Pbytesrefu);\n      (\"%bytes_unsafe_set\", Pbytessetu);\n      (\"%array_length\", Parraylength);\n      (\"%array_safe_get\", Parrayrefs);\n      (\"%array_safe_set\", Parraysets);\n      (\"%array_unsafe_get\", Parrayrefu);\n      (\"%array_unsafe_set\", Parraysetu);\n      (\"%floatarray_length\", Parraylength);\n      (\"%floatarray_safe_get\", Parrayrefs);\n      (\"%floatarray_safe_set\", Parraysets);\n      (\"%floatarray_unsafe_get\", Parrayrefu);\n      (\"%floatarray_unsafe_set\", Parraysetu);\n      (\"%lazy_force\", Plazyforce);\n      (\"%int64_of_int\", Pbintofint Pint64);\n      (\"%int64_to_int\", Pintofbint Pint64);\n      (\"%int64_neg\", Pnegbint Pint64);\n      (\"%int64_add\", Paddbint Pint64);\n      (\"%int64_sub\", Psubbint Pint64);\n      (\"%int64_mul\", Pmulbint Pint64);\n      (\"%int64_div\", Pdivbint {size = Pint64; is_safe = Safe});\n      (\"%int64_mod\", Pmodbint {size = Pint64; is_safe = Safe});\n      (\"%int64_and\", Pandbint Pint64);\n      (\"%int64_or\", Porbint Pint64);\n      (\"%int64_xor\", Pxorbint Pint64);\n      (\"%int64_lsl\", Plslbint Pint64);\n      (\"%int64_lsr\", Plsrbint Pint64);\n      (\"%int64_asr\", Pasrbint Pint64);\n      (\"%bigint_of_int32\", Pcvtbint (Pint32, Pbigint));\n      (\"%bigint_to_int32\", Pcvtbint (Pbigint, Pint32));\n      (\"%int64_of_int32\", Pcvtbint (Pint32, Pint64));\n      (\"%int64_to_int32\", Pcvtbint (Pint64, Pint32));\n      (\"%int64_of_bigint\", Pcvtbint (Pbigint, Pint64));\n      (\"%int64_to_bigint\", Pcvtbint (Pint64, Pbigint));\n      (\"%opaque\", Popaque);\n      (\"%uncurried_apply\", Puncurried_apply);\n    ]\n\nlet find_primitive prim_name = Hashtbl.find primitives_table prim_name\n\nlet specialize_comparison\n    ({\n       gencomp;\n       intcomp;\n       floatcomp;\n       stringcomp;\n       bytescomp;\n       int64comp;\n       bigintcomp;\n       boolcomp;\n     } :\n      specialized) env ty =\n  match () with\n  | ()\n    when is_base_type env ty Predef.path_int\n         || is_base_type env ty Predef.path_char\n         || maybe_pointer_type env ty = Immediate ->\n    intcomp\n  | () when is_base_type env ty Predef.path_float -> floatcomp\n  | () when is_base_type env ty Predef.path_string -> stringcomp\n  | () when is_base_type env ty Predef.path_bytes -> bytescomp\n  | () when is_base_type env ty Predef.path_int64 -> int64comp\n  | () when is_base_type env ty Predef.path_bigint -> bigintcomp\n  | () when is_base_type env ty Predef.path_bool -> boolcomp\n  | () -> gencomp\n\n(* Specialize a primitive from available type information,\n   raise Not_found if primitive is unknown *)\n\nlet specialize_primitive p env ty (* ~has_constant_constructor *) =\n  try\n    let table = Hashtbl.find comparisons_table p.prim_name in\n    match is_function_type env ty with\n    | Some (lhs, _rhs) -> specialize_comparison table env lhs\n    | None -> table.gencomp\n  with Not_found -> find_primitive p.prim_name\n\n(* Eta-expand a primitive *)\n\nlet transl_primitive loc p env ty =\n  let prim =\n    try specialize_primitive p env ty (* ~has_constant_constructor:false *)\n    with Not_found -> Pccall p\n  in\n  match prim with\n  | Plazyforce ->\n    let parm = Ident.create \"prim\" in\n    Lfunction\n      {\n        params = [parm];\n        body = Matching.inline_lazy_force (Lvar parm) Location.none;\n        loc;\n        attr = default_function_attribute;\n      }\n  | Ploc kind -> (\n    let lam = lam_of_loc kind loc in\n    match p.prim_arity with\n    | 0 -> lam\n    | 1 ->\n      (* TODO: we should issue a warning ? *)\n      let param = Ident.create \"prim\" in\n      Lfunction\n        {\n          params = [param];\n          attr = default_function_attribute;\n          loc;\n          body = Lprim (Pmakeblock Blk_tuple, [lam; Lvar param], loc);\n        }\n    | _ -> assert false)\n  | _ ->\n    let rec make_params n total =\n      if n <= 0 then []\n      else\n        Ident.create (\"prim\" ^ string_of_int (total - n))\n        :: make_params (n - 1) total\n    in\n    let prim_arity = p.prim_arity in\n    if prim_arity = 0 then Lprim (prim, [], loc)\n    else\n      let params =\n        if prim_arity = 1 then [Ident.create \"prim\"]\n        else make_params prim_arity prim_arity\n      in\n      Lfunction\n        {\n          params;\n          attr = default_function_attribute;\n          loc;\n          body = Lprim (prim, List.map (fun id -> Lvar id) params, loc);\n        }\n\nlet transl_primitive_application loc prim env ty args =\n  let prim_name = prim.prim_name in\n  try\n    match args with\n    | [arg1; _]\n      when is_base_type env arg1.exp_type Predef.path_bool\n           && Hashtbl.mem comparisons_table prim_name ->\n      (Hashtbl.find comparisons_table prim_name).boolcomp\n    | _ ->\n      let has_constant_constructor =\n        match args with\n        | [_; {exp_desc = Texp_construct (_, {cstr_tag = Cstr_constant _}, _)}]\n        | [{exp_desc = Texp_construct (_, {cstr_tag = Cstr_constant _}, _)}; _]\n        | [_; {exp_desc = Texp_variant (_, None)}]\n        | [{exp_desc = Texp_variant (_, None)}; _] ->\n          true\n        | _ -> false\n      in\n      if has_constant_constructor then\n        match Hashtbl.find_opt comparisons_table prim_name with\n        | Some table when table.simplify_constant_constructor -> table.intcomp\n        | Some _ | None -> specialize_primitive prim env ty\n        (* ~has_constant_constructor*)\n      else specialize_primitive prim env ty\n  with Not_found ->\n    if String.length prim_name > 0 && prim_name.[0] = '%' then\n      raise (Error (loc, Unknown_builtin_primitive prim_name));\n    Pccall prim\n\n(* To propagate structured constants *)\n\nexception Not_constant\n\nlet extract_constant = function\n  | Lconst sc -> sc\n  | _ -> raise_notrace Not_constant\n\n(* Push the default values under the functional abstractions *)\n(* Also push bindings of module patterns, since this sound *)\n\ntype binding =\n  | Bind_value of value_binding list\n  | Bind_module of Ident.t * string loc * module_expr\n\nlet rec push_defaults loc bindings cases partial =\n  match cases with\n  | [\n   {\n     c_lhs = pat;\n     c_guard = None;\n     c_rhs =\n       {exp_desc = Texp_function {arg_label; param; cases; partial}} as exp;\n   };\n  ] ->\n    let cases = push_defaults exp.exp_loc bindings cases partial in\n    [\n      {\n        c_lhs = pat;\n        c_guard = None;\n        c_rhs =\n          {exp with exp_desc = Texp_function {arg_label; param; cases; partial}};\n      };\n    ]\n  | [\n   {\n     c_lhs = pat;\n     c_guard = None;\n     c_rhs =\n       {\n         exp_attributes = [({txt = \"#default\"}, _)];\n         exp_desc =\n           Texp_let (Nonrecursive, binds, ({exp_desc = Texp_function _} as e2));\n       };\n   };\n  ] ->\n    push_defaults loc\n      (Bind_value binds :: bindings)\n      [{c_lhs = pat; c_guard = None; c_rhs = e2}]\n      partial\n  | [\n   {\n     c_lhs = pat;\n     c_guard = None;\n     c_rhs =\n       {\n         exp_attributes = [({txt = \"#modulepat\"}, _)];\n         exp_desc =\n           Texp_letmodule (id, name, mexpr, ({exp_desc = Texp_function _} as e2));\n       };\n   };\n  ] ->\n    push_defaults loc\n      (Bind_module (id, name, mexpr) :: bindings)\n      [{c_lhs = pat; c_guard = None; c_rhs = e2}]\n      partial\n  | [case] ->\n    let exp =\n      List.fold_left\n        (fun exp binds ->\n          {\n            exp with\n            exp_desc =\n              (match binds with\n              | Bind_value binds -> Texp_let (Nonrecursive, binds, exp)\n              | Bind_module (id, name, mexpr) ->\n                Texp_letmodule (id, name, mexpr, exp));\n          })\n        case.c_rhs bindings\n    in\n    [{case with c_rhs = exp}]\n  | {c_lhs = pat; c_rhs = exp; c_guard = _} :: _ when bindings <> [] ->\n    let param = Typecore.name_pattern \"param\" cases in\n    let name = Ident.name param in\n    let exp =\n      {\n        exp with\n        exp_loc = loc;\n        exp_desc =\n          Texp_match\n            ( {\n                exp with\n                exp_type = pat.pat_type;\n                exp_desc =\n                  Texp_ident\n                    ( Path.Pident param,\n                      mknoloc (Longident.Lident name),\n                      {\n                        val_type = pat.pat_type;\n                        val_kind = Val_reg;\n                        val_attributes = [];\n                        Types.val_loc = Location.none;\n                      } );\n              },\n              cases,\n              [],\n              partial );\n      }\n    in\n    push_defaults loc bindings\n      [\n        {\n          c_lhs = {pat with pat_desc = Tpat_var (param, mknoloc name)};\n          c_guard = None;\n          c_rhs = exp;\n        };\n      ]\n      Total\n  | _ -> cases\n\n(* Assertions *)\n\nlet assert_failed exp =\n  let fname, line, char =\n    Location.get_pos_info exp.exp_loc.Location.loc_start\n  in\n  let fname = Filename.basename fname in\n  Lprim\n    ( Praise Raise_regular,\n      [\n        Lprim\n          ( Pmakeblock Blk_extension,\n            [\n              transl_normal_path Predef.path_assert_failure;\n              Lconst\n                (Const_block\n                   ( Blk_tuple,\n                     [\n                       Const_base (Const_string (fname, None));\n                       Const_base (Const_int line);\n                       Const_base (Const_int char);\n                     ] ));\n            ],\n            exp.exp_loc );\n      ],\n      exp.exp_loc )\n\nlet rec cut n l =\n  if n = 0 then ([], l)\n  else\n    match l with\n    | [] -> failwith \"Translcore.cut\"\n    | a :: l ->\n      let l1, l2 = cut (n - 1) l in\n      (a :: l1, l2)\n\n(* Translation of expressions *)\n\nlet try_ids = Hashtbl.create 8\n\nlet has_async_attribute exp =\n  exp.exp_attributes |> List.exists (fun ({txt}, _payload) -> txt = \"res.async\")\n\nlet extract_directive_for_fn exp =\n  exp.exp_attributes\n  |> List.find_map (fun ({txt}, payload) ->\n         if txt = \"directive\" then Ast_payload.is_single_string payload\n         else None)\n\nlet rec transl_exp e =\n  List.iter (Translattribute.check_attribute e) e.exp_attributes;\n  transl_exp0 e\n\nand transl_exp0 (e : Typedtree.expression) : Lambda.lambda =\n  match e.exp_desc with\n  | Texp_ident (_, _, {val_kind = Val_prim p}) ->\n    transl_primitive e.exp_loc p e.exp_env e.exp_type\n  | Texp_ident (path, _, {val_kind = Val_reg}) ->\n    transl_value_path ~loc:e.exp_loc e.exp_env path\n  | Texp_constant cst -> Lconst (Const_base cst)\n  | Texp_let (rec_flag, pat_expr_list, body) ->\n    transl_let rec_flag pat_expr_list (transl_exp body)\n  | Texp_function {arg_label = _; param; cases; partial} ->\n    let async = has_async_attribute e in\n    let directive =\n      match extract_directive_for_fn e with\n      | None -> None\n      | Some (directive, _) -> Some directive\n    in\n    let params, body, return_unit =\n      let pl = push_defaults e.exp_loc [] cases partial in\n      transl_function e.exp_loc partial param pl\n    in\n    let attr =\n      {\n        default_function_attribute with\n        inline = Translattribute.get_inline_attribute e.exp_attributes;\n        async;\n        return_unit;\n        directive;\n      }\n    in\n    let loc = e.exp_loc in\n    Lfunction {params; body; attr; loc}\n  | Texp_apply\n      ( ({\n           exp_desc = Texp_ident (_, _, {val_kind = Val_prim p});\n           exp_type = prim_type;\n         } as funct),\n        oargs )\n    when List.length oargs >= p.prim_arity\n         && List.for_all (fun (_, arg) -> arg <> None) oargs -> (\n    let args, args' = cut p.prim_arity oargs in\n    let wrap f =\n      if args' = [] then f\n      else\n        let inlined, _ =\n          Translattribute.get_and_remove_inlined_attribute funct\n        in\n        transl_apply ~inlined f args' e.exp_loc\n    in\n    let args =\n      List.map\n        (function\n          | _, Some x -> x\n          | _ -> assert false)\n        args\n    in\n    let argl = transl_list args in\n    let prim =\n      transl_primitive_application e.exp_loc p e.exp_env prim_type args\n    in\n    match (prim, args) with\n    | Praise k, [_] ->\n      let targ = List.hd argl in\n      let k =\n        match (k, targ) with\n        | Raise_regular, Lvar id when Hashtbl.mem try_ids id -> Raise_reraise\n        | _ -> k\n      in\n      wrap (Lprim (Praise k, [targ], e.exp_loc))\n    | Ploc kind, [] -> lam_of_loc kind e.exp_loc\n    | Ploc kind, [arg1] ->\n      let lam = lam_of_loc kind arg1.exp_loc in\n      Lprim (Pmakeblock Blk_tuple, lam :: argl, e.exp_loc)\n    | Ploc _, _ -> assert false\n    | _, _ -> (\n      match (prim, argl) with\n      | Plazyforce, [a] -> wrap (Matching.inline_lazy_force a e.exp_loc)\n      | Plazyforce, _ -> assert false\n      | _ -> wrap (Lprim (prim, argl, e.exp_loc))))\n  | Texp_apply (funct, oargs) ->\n    let inlined, funct =\n      Translattribute.get_and_remove_inlined_attribute funct\n    in\n    let uncurried_partial_application =\n      (* In case of partial application foo(args, ...) when some args are missing,\n         get the arity *)\n      let uncurried_partial_app =\n        Ext_list.exists e.exp_attributes (fun ({txt}, _) -> txt = \"res.partial\")\n      in\n      if uncurried_partial_app then\n        let arity_opt =\n          Ast_uncurried.uncurried_type_get_arity_opt ~env:funct.exp_env\n            funct.exp_type\n        in\n        match arity_opt with\n        | Some arity ->\n          let real_args = List.filter (fun (_, x) -> Option.is_some x) oargs in\n          if arity > List.length real_args then Some arity else None\n        | None -> None\n      else None\n    in\n    transl_apply ~inlined ~uncurried_partial_application (transl_exp funct)\n      oargs e.exp_loc\n  | Texp_match (arg, pat_expr_list, exn_pat_expr_list, partial) ->\n    transl_match e arg pat_expr_list exn_pat_expr_list partial\n  | Texp_try (body, pat_expr_list) ->\n    let id = Typecore.name_pattern \"exn\" pat_expr_list in\n    Ltrywith\n      ( transl_exp body,\n        id,\n        Matching.for_trywith (Lvar id) (transl_cases_try pat_expr_list) )\n  | Texp_tuple el -> (\n    let ll = transl_list el in\n    try Lconst (Const_block (Blk_tuple, List.map extract_constant ll))\n    with Not_constant -> Lprim (Pmakeblock Blk_tuple, ll, e.exp_loc))\n  | Texp_construct ({txt = Lident \"false\"}, _, []) -> Lconst Const_false\n  | Texp_construct ({txt = Lident \"true\"}, _, []) -> Lconst Const_true\n  | Texp_construct ({txt = Lident \"Function$\"}, _, [expr]) ->\n    (* ReScript uncurried encoding *)\n    let loc = expr.exp_loc in\n    let lambda = transl_exp expr in\n    let arity =\n      Ast_uncurried.uncurried_type_get_arity ~env:e.exp_env e.exp_type\n    in\n    let arity_s = arity |> string_of_int in\n    let name =\n      match (Ctype.expand_head expr.exp_env expr.exp_type).desc with\n      | Tarrow (Nolabel, t, _, _) -> (\n        match (Ctype.expand_head expr.exp_env t).desc with\n        | Tconstr (Pident {name = \"unit\"}, [], _) -> \"#fn_mk_unit\"\n        | _ -> \"#fn_mk\")\n      | _ -> \"#fn_mk\"\n    in\n    let prim =\n      Primitive.make ~name ~alloc:true ~native_name:arity_s\n        ~native_repr_args:[Same_as_ocaml_repr]\n        ~native_repr_res:Same_as_ocaml_repr\n    in\n    Lprim\n      ( Pccall prim\n        (* could be replaced with Opaque in the future except arity 0*),\n        [lambda],\n        loc )\n  | Texp_construct (lid, cstr, args) -> (\n    let ll = transl_list args in\n    if cstr.cstr_inlined <> None then\n      match ll with\n      | [x] -> x\n      | _ -> assert false\n    else\n      match cstr.cstr_tag with\n      | Cstr_constant n ->\n        Lconst\n          (Const_pointer\n             ( n,\n               match lid.txt with\n               | Longident.Ldot (Longident.Lident \"*predef*\", \"None\")\n               | Longident.Lident \"None\"\n                 when Datarepr.constructor_has_optional_shape cstr ->\n                 Pt_shape_none\n               | _ ->\n                 if Datarepr.constructor_has_optional_shape cstr then\n                   Pt_shape_none\n                 else\n                   Pt_constructor\n                     {\n                       name = cstr.cstr_name;\n                       const = cstr.cstr_consts;\n                       non_const = cstr.cstr_nonconsts;\n                       attrs = cstr.cstr_attributes;\n                     } ))\n      | Cstr_unboxed -> (\n        match ll with\n        | [v] -> v\n        | _ -> assert false)\n      | Cstr_block n -> (\n        let tag_info : Lambda.tag_info =\n          if Datarepr.constructor_has_optional_shape cstr then\n            match args with\n            | [arg]\n              when Typeopt.type_cannot_contain_undefined arg.exp_type\n                     arg.exp_env ->\n              (* Format.fprintf Format.err_formatter \"@[special boxingl@]@.\"; *)\n              Blk_some_not_nested\n            | _ -> Blk_some\n          else\n            Blk_constructor\n              {\n                name = cstr.cstr_name;\n                num_nonconst = cstr.cstr_nonconsts;\n                tag = n;\n                attrs = cstr.cstr_attributes;\n              }\n        in\n        try Lconst (Const_block (tag_info, List.map extract_constant ll))\n        with Not_constant -> Lprim (Pmakeblock tag_info, ll, e.exp_loc))\n      | Cstr_extension (path, _) ->\n        Lprim\n          ( Pmakeblock Blk_extension,\n            transl_extension_path e.exp_env path :: ll,\n            e.exp_loc ))\n  | Texp_extension_constructor (_, path) -> transl_extension_path e.exp_env path\n  | Texp_variant (l, arg) -> (\n    let tag = Btype.hash_variant l in\n    match arg with\n    | None -> Lconst (Const_pointer (tag, Pt_variant {name = l}))\n    | Some arg -> (\n      let lam = transl_exp arg in\n      let tag_info = Blk_poly_var l in\n      try\n        Lconst\n          (Const_block\n             (tag_info, [Const_base (Const_int tag); extract_constant lam]))\n      with Not_constant ->\n        Lprim\n          ( Pmakeblock tag_info,\n            [Lconst (Const_base (Const_int tag)); lam],\n            e.exp_loc )))\n  | Texp_record {fields; representation; extended_expression} ->\n    transl_record e.exp_loc e.exp_env fields representation extended_expression\n  | Texp_field (arg, _, lbl) -> (\n    let targ = transl_exp arg in\n    match lbl.lbl_repres with\n    | Record_float_unused -> assert false\n    | Record_regular | Record_optional_labels _ ->\n      Lprim (Pfield (lbl.lbl_pos, Lambda.fld_record lbl), [targ], e.exp_loc)\n    | Record_inlined _ ->\n      Lprim\n        (Pfield (lbl.lbl_pos, Lambda.fld_record_inline lbl), [targ], e.exp_loc)\n    | Record_unboxed _ -> targ\n    | Record_extension ->\n      Lprim\n        ( Pfield (lbl.lbl_pos + 1, Lambda.fld_record_extension lbl),\n          [targ],\n          e.exp_loc ))\n  | Texp_setfield (arg, _, lbl, newval) ->\n    let access =\n      match lbl.lbl_repres with\n      | Record_float_unused -> assert false\n      | Record_regular | Record_optional_labels _ ->\n        Psetfield (lbl.lbl_pos, Lambda.fld_record_set lbl)\n      | Record_inlined _ ->\n        Psetfield (lbl.lbl_pos, Lambda.fld_record_inline_set lbl)\n      | Record_unboxed _ -> assert false\n      | Record_extension ->\n        Psetfield (lbl.lbl_pos + 1, Lambda.fld_record_extension_set lbl)\n    in\n    Lprim (access, [transl_exp arg; transl_exp newval], e.exp_loc)\n  | Texp_array expr_list ->\n    let ll = transl_list expr_list in\n    Lprim (Pmakearray Mutable, ll, e.exp_loc)\n  | Texp_ifthenelse (cond, ifso, Some ifnot) ->\n    Lifthenelse (transl_exp cond, transl_exp ifso, transl_exp ifnot)\n  | Texp_ifthenelse (cond, ifso, None) ->\n    Lifthenelse (transl_exp cond, transl_exp ifso, lambda_unit)\n  | Texp_sequence (expr1, expr2) ->\n    Lsequence (transl_exp expr1, transl_exp expr2)\n  | Texp_while (cond, body) -> Lwhile (transl_exp cond, transl_exp body)\n  | Texp_for (param, _, low, high, dir, body) ->\n    Lfor (param, transl_exp low, transl_exp high, dir, transl_exp body)\n  | Texp_send (expr, Tmeth_name nm, _) ->\n    let obj = transl_exp expr in\n    Lsend (nm, obj, e.exp_loc)\n  | Texp_new _ | Texp_instvar _ | Texp_setinstvar _ | Texp_override _ ->\n    assert false\n  | Texp_letmodule (id, _loc, modl, body) ->\n    let defining_expr = !transl_module Tcoerce_none None modl in\n    Llet (Strict, Pgenval, id, defining_expr, transl_exp body)\n  | Texp_letexception (cd, body) ->\n    Llet\n      ( Strict,\n        Pgenval,\n        cd.ext_id,\n        transl_extension_constructor e.exp_env None cd,\n        transl_exp body )\n  | Texp_pack modl -> !transl_module Tcoerce_none None modl\n  | Texp_assert {exp_desc = Texp_construct (_, {cstr_name = \"false\"}, _)} ->\n    if !Clflags.no_assert_false then Lambda.lambda_assert_false\n    else assert_failed e\n  | Texp_assert cond ->\n    if !Clflags.noassert then lambda_unit\n    else Lifthenelse (transl_exp cond, lambda_unit, assert_failed e)\n  | Texp_lazy e ->\n    (* when e needs no computation (constants, identifiers, ...), we\n       optimize the translation just as Lazy.lazy_from_val would\n       do *)\n    Lprim (Pmakeblock Blk_lazy_general, [transl_exp e], e.exp_loc)\n  | Texp_object () -> assert false\n  | Texp_unreachable -> raise (Error (e.exp_loc, Unreachable_reached))\n\nand transl_list expr_list = List.map transl_exp expr_list\n\nand transl_guard guard rhs =\n  let expr = transl_exp rhs in\n  match guard with\n  | None -> expr\n  | Some cond -> Lifthenelse (transl_exp cond, expr, staticfail)\n\nand transl_case {c_lhs; c_guard; c_rhs} = (c_lhs, transl_guard c_guard c_rhs)\n\nand transl_cases cases =\n  let cases =\n    Ext_list.filter cases (fun c -> c.c_rhs.exp_desc <> Texp_unreachable)\n  in\n  List.map transl_case cases\n\nand transl_case_try {c_lhs; c_guard; c_rhs} =\n  match c_lhs.pat_desc with\n  | Tpat_var (id, _) | Tpat_alias (_, id, _) ->\n    Hashtbl.replace try_ids id ();\n    Misc.try_finally\n      (fun () -> (c_lhs, transl_guard c_guard c_rhs))\n      (fun () -> Hashtbl.remove try_ids id)\n  | _ -> (c_lhs, transl_guard c_guard c_rhs)\n\nand transl_cases_try cases =\n  let cases =\n    Ext_list.filter cases (fun c -> c.c_rhs.exp_desc <> Texp_unreachable)\n  in\n  List.map transl_case_try cases\n\nand transl_apply ?(inlined = Default_inline)\n    ?(uncurried_partial_application = None) lam sargs loc =\n  let lapply funct args =\n    match funct with\n    (* Attention: This may not be what we need to change the application arity*)\n    | Lapply ap -> Lapply {ap with ap_args = ap.ap_args @ args; ap_loc = loc}\n    | lexp ->\n      Lapply\n        {ap_loc = loc; ap_func = lexp; ap_args = args; ap_inlined = inlined}\n  in\n  let rec build_apply lam args = function\n    | (None, optional) :: l ->\n      let defs = ref [] in\n      let protect name lam =\n        match lam with\n        | Lvar _ | Lconst _ -> lam\n        | _ ->\n          let id = Ident.create name in\n          defs := (id, lam) :: !defs;\n          Lvar id\n      in\n      let args, args' =\n        if List.for_all (fun (_, opt) -> opt) args then ([], args)\n        else (args, [])\n      in\n      let lam = if args = [] then lam else lapply lam (List.rev_map fst args) in\n      let handle = protect \"func\" lam\n      and l = List.map (fun (arg, opt) -> (may_map (protect \"arg\") arg, opt)) l\n      and id_arg = Ident.create \"param\" in\n      let body =\n        match build_apply handle ((Lvar id_arg, optional) :: args') l with\n        | Lfunction {params = ids; body = lam; attr; loc} ->\n          Lfunction {params = id_arg :: ids; body = lam; attr; loc}\n        | lam ->\n          Lfunction\n            {\n              params = [id_arg];\n              body = lam;\n              attr = default_function_attribute;\n              loc;\n            }\n      in\n      List.fold_left\n        (fun body (id, lam) -> Llet (Strict, Pgenval, id, lam, body))\n        body !defs\n    | (Some arg, optional) :: l -> build_apply lam ((arg, optional) :: args) l\n    | [] -> lapply lam (List.rev_map fst args)\n  in\n  match uncurried_partial_application with\n  | Some arity ->\n    let extra_arity = arity - List.length sargs in\n    let none_ids = ref [] in\n    let args =\n      Ext_list.filter_map sargs (function\n        | _, Some e -> Some (transl_exp e)\n        | _, None ->\n          let id_arg = Ident.create \"none\" in\n          none_ids := id_arg :: !none_ids;\n          Some (Lvar id_arg))\n    in\n    let extra_ids = ref [] in\n    extra_ids := Ident.create \"extra\" :: !extra_ids;\n    let extra_ids =\n      Array.init extra_arity (fun _ -> Ident.create \"extra\") |> Array.to_list\n    in\n    let extra_args = Ext_list.map extra_ids (fun id -> Lvar id) in\n    let ap_args = args @ extra_args in\n    let l0 =\n      Lapply {ap_func = lam; ap_args; ap_inlined = inlined; ap_loc = loc}\n    in\n    Lfunction\n      {\n        params = List.rev_append !none_ids extra_ids;\n        body = l0;\n        attr = default_function_attribute;\n        loc;\n      }\n  | _ ->\n    (build_apply lam []\n       (List.map\n          (fun (l, x) -> (may_map transl_exp x, Btype.is_optional l))\n          sargs)\n      : Lambda.lambda)\n\nand transl_function loc partial param cases =\n  match cases with\n  | [\n   {\n     c_lhs = pat;\n     c_guard = None;\n     c_rhs =\n       {\n         exp_desc =\n           Texp_function\n             {arg_label = _; param = param'; cases; partial = partial'};\n       } as exp;\n   };\n  ]\n    when Parmatch.inactive ~partial pat && not (exp |> has_async_attribute) ->\n    let params, body, return_unit =\n      transl_function exp.exp_loc partial' param' cases\n    in\n    ( param :: params,\n      Matching.for_function loc None (Lvar param) [(pat, body)] partial,\n      return_unit )\n  | {c_rhs = {exp_env; exp_type}; _} :: _ ->\n    ( [param],\n      Matching.for_function loc None (Lvar param) (transl_cases cases) partial,\n      is_base_type exp_env exp_type Predef.path_unit )\n  | _ -> assert false\n\nand transl_let rec_flag pat_expr_list body =\n  match rec_flag with\n  | Nonrecursive ->\n    let rec transl = function\n      | [] -> body\n      | {vb_pat = pat; vb_expr = expr; vb_attributes = attr; vb_loc} :: rem ->\n        let lam = transl_exp expr in\n        let lam = Translattribute.add_inline_attribute lam vb_loc attr in\n        Matching.for_let pat.pat_loc lam pat (transl rem)\n    in\n    transl pat_expr_list\n  | Recursive ->\n    let transl_case {vb_expr = expr; vb_attributes; vb_loc; vb_pat = pat} =\n      let id =\n        match pat.pat_desc with\n        | Tpat_var (id, _) -> id\n        | Tpat_alias ({pat_desc = Tpat_any}, id, _) -> id\n        | _ -> assert false\n        (* Illegal_letrec_pat\n           Only variables are allowed as left-hand side of `let rec'\n        *)\n      in\n      let lam = transl_exp expr in\n      let lam = Translattribute.add_inline_attribute lam vb_loc vb_attributes in\n      (id, lam)\n    in\n    Lletrec (Ext_list.map pat_expr_list transl_case, body)\n\nand transl_record loc env fields repres opt_init_expr =\n  match (opt_init_expr, repres, fields) with\n  | None, Record_unboxed _, [|({lbl_name; lbl_loc}, Overridden (_, expr))|] ->\n    (* ReScript uncurried encoding *)\n    let loc = lbl_loc in\n    let lambda = transl_exp expr in\n    if lbl_name.[0] = 'I' then\n      let arity_s = String.sub lbl_name 1 (String.length lbl_name - 1) in\n      let prim =\n        Primitive.make ~name:\"#fn_mk\" ~alloc:true ~native_name:arity_s\n          ~native_repr_args:[Same_as_ocaml_repr]\n          ~native_repr_res:Same_as_ocaml_repr\n      in\n      Lprim\n        ( Pccall prim\n          (* could be replaced with Opaque in the future except arity 0*),\n          [lambda],\n          loc )\n    else lambda\n  | _ -> (\n    let size = Array.length fields in\n    (* Determine if there are \"enough\" fields (only relevant if this is a\n       functional-style record update *)\n    let no_init =\n      match opt_init_expr with\n      | None -> true\n      | _ -> false\n    in\n    if\n      no_init\n      || size < 20\n         &&\n         match repres with\n         | Record_optional_labels _ -> false\n         | _ -> true\n      (* TODO: More strategies\n         3 + 2 * List.length lbl_expr_list >= size (density)\n      *)\n    then\n      (* Allocate new record with given fields (and remaining fields\n         taken from init_expr if any *)\n      let init_id = Ident.create \"init\" in\n      let lv =\n        Array.mapi\n          (fun i (lbl, definition) ->\n            match definition with\n            | Kept _ ->\n              let access =\n                match repres with\n                | Record_float_unused -> assert false\n                | Record_regular | Record_optional_labels _ ->\n                  Pfield (i, Lambda.fld_record lbl)\n                | Record_inlined _ -> Pfield (i, Lambda.fld_record_inline lbl)\n                | Record_unboxed _ -> assert false\n                | Record_extension ->\n                  Pfield (i + 1, Lambda.fld_record_extension lbl)\n              in\n              Lprim (access, [Lvar init_id], loc)\n            | Overridden (_lid, expr) -> transl_exp expr)\n          fields\n      in\n      let ll = Array.to_list lv in\n      let mut =\n        if Array.exists (fun (lbl, _) -> lbl.lbl_mut = Mutable) fields then\n          Mutable\n        else Immutable\n      in\n      let lam =\n        try\n          if mut = Mutable then raise Not_constant;\n          let cl = List.map extract_constant ll in\n          match repres with\n          | Record_float_unused -> assert false\n          | Record_regular ->\n            Lconst\n              (Const_block (Lambda.blk_record fields mut Record_regular, cl))\n          | Record_optional_labels _ ->\n            Lconst\n              (Const_block (Lambda.blk_record fields mut Record_optional, cl))\n          | Record_inlined {tag; name; num_nonconsts; optional_labels; attrs} ->\n            Lconst\n              (Const_block\n                 ( Lambda.blk_record_inlined fields name num_nonconsts\n                     optional_labels ~tag ~attrs mut,\n                   cl ))\n          | Record_unboxed _ ->\n            Lconst\n              (match cl with\n              | [v] -> v\n              | _ -> assert false)\n          | Record_extension -> raise Not_constant\n        with Not_constant -> (\n          match repres with\n          | Record_regular ->\n            Lprim\n              (Pmakeblock (Lambda.blk_record fields mut Record_regular), ll, loc)\n          | Record_optional_labels _ ->\n            Lprim\n              ( Pmakeblock (Lambda.blk_record fields mut Record_optional),\n                ll,\n                loc )\n          | Record_float_unused -> assert false\n          | Record_inlined {tag; name; num_nonconsts; optional_labels; attrs} ->\n            Lprim\n              ( Pmakeblock\n                  (Lambda.blk_record_inlined fields name num_nonconsts\n                     optional_labels ~tag ~attrs mut),\n                ll,\n                loc )\n          | Record_unboxed _ -> (\n            match ll with\n            | [v] -> v\n            | _ -> assert false)\n          | Record_extension ->\n            let path =\n              let label, _ = fields.(0) in\n              match label.lbl_res.desc with\n              | Tconstr (p, _, _) -> p\n              | _ -> assert false\n            in\n            let slot = transl_extension_path env path in\n            Lprim\n              (Pmakeblock (Lambda.blk_record_ext fields mut), slot :: ll, loc))\n      in\n      match opt_init_expr with\n      | None -> lam\n      | Some init_expr ->\n        Llet (Strict, Pgenval, init_id, transl_exp init_expr, lam)\n    else\n      (* Take a shallow copy of the init record, then mutate the fields\n         of the copy *)\n      let copy_id = Ident.create \"newrecord\" in\n      let update_field cont (lbl, definition) =\n        match definition with\n        | Kept _type -> cont\n        | Overridden (_lid, expr) ->\n          let upd =\n            match repres with\n            | Record_float_unused -> assert false\n            | Record_regular | Record_optional_labels _ ->\n              Psetfield (lbl.lbl_pos, Lambda.fld_record_set lbl)\n            | Record_inlined _ ->\n              Psetfield (lbl.lbl_pos, Lambda.fld_record_inline_set lbl)\n            | Record_unboxed _ -> assert false\n            | Record_extension ->\n              Psetfield (lbl.lbl_pos + 1, Lambda.fld_record_extension_set lbl)\n          in\n          Lsequence (Lprim (upd, [Lvar copy_id; transl_exp expr], loc), cont)\n      in\n      match opt_init_expr with\n      | None -> assert false\n      | Some init_expr ->\n        Llet\n          ( Strict,\n            Pgenval,\n            copy_id,\n            Lprim (Pduprecord, [transl_exp init_expr], loc),\n            Array.fold_left update_field (Lvar copy_id) fields ))\n\nand transl_match e arg pat_expr_list exn_pat_expr_list partial =\n  let id = Typecore.name_pattern \"exn\" exn_pat_expr_list\n  and cases = transl_cases pat_expr_list\n  and exn_cases = transl_cases_try exn_pat_expr_list in\n  let static_catch body val_ids handler =\n    let static_exception_id = next_negative_raise_count () in\n    Lstaticcatch\n      ( Ltrywith\n          ( Lstaticraise (static_exception_id, body),\n            id,\n            Matching.for_trywith (Lvar id) exn_cases ),\n        (static_exception_id, val_ids),\n        handler )\n  in\n  match (arg, exn_cases) with\n  | {exp_desc = Texp_tuple argl}, [] ->\n    Matching.for_multiple_match e.exp_loc (transl_list argl) cases partial\n  | {exp_desc = Texp_tuple argl}, _ :: _ ->\n    let val_ids = List.map (fun _ -> Typecore.name_pattern \"val\" []) argl in\n    let lvars = List.map (fun id -> Lvar id) val_ids in\n    static_catch (transl_list argl) val_ids\n      (Matching.for_multiple_match e.exp_loc lvars cases partial)\n  | arg, [] ->\n    Matching.for_function e.exp_loc None (transl_exp arg) cases partial\n  | arg, _ :: _ ->\n    let val_id = Typecore.name_pattern \"val\" pat_expr_list in\n    static_catch\n      [transl_exp arg]\n      [val_id]\n      (Matching.for_function e.exp_loc None (Lvar val_id) cases partial)\n\nopen Format\n\nlet report_error ppf = function\n  | Unknown_builtin_primitive prim_name ->\n    fprintf ppf \"Unknown builtin primitive \\\"%s\\\"\" prim_name\n  | Unreachable_reached -> fprintf ppf \"Unreachable expression was reached\"\n\nlet () =\n  Location.register_error_of_exn (function\n    | Error (loc, err) -> Some (Location.error_of_printer loc report_error err)\n    | _ -> None)\n"
  },
  {
    "path": "analysis/vendor/ml/translcore.mli",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(* Translation from typed abstract syntax to lambda terms,\n   for the core language *)\n\nval transl_exp : Typedtree.expression -> Lambda.lambda\n\nval transl_let :\n  Asttypes.rec_flag ->\n  Typedtree.value_binding list ->\n  Lambda.lambda ->\n  Lambda.lambda\n\nval transl_primitive :\n  Location.t ->\n  Primitive.description ->\n  Env.t ->\n  Types.type_expr ->\n  Lambda.lambda\n\nval transl_extension_constructor :\n  Env.t -> Path.t option -> Typedtree.extension_constructor -> Lambda.lambda\n\n(* Forward declaration -- to be filled in by Translmod.transl_module *)\nval transl_module :\n  (Typedtree.module_coercion ->\n  Path.t option ->\n  Typedtree.module_expr ->\n  Lambda.lambda)\n  ref\n"
  },
  {
    "path": "analysis/vendor/ml/translmod.ml",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(* Translation from typed abstract syntax to lambda terms,\n   for the module language *)\n\nopen Typedtree\n\ntype error = Conflicting_inline_attributes | Fragile_pattern_in_toplevel\n\nexception Error of Location.t * error\n\n(* Keep track of the root path (from the root of the namespace to the\n   currently compiled module expression).  Useful for naming extensions. *)\n\nlet global_path glob : Path.t option = Some (Pident glob)\n\nlet is_top (rootpath : Path.t option) =\n  match rootpath with\n  | Some (Pident _) -> true\n  | _ -> false\n\nlet functor_path path param : Path.t option =\n  match path with\n  | None -> None\n  | Some p -> Some (Papply (p, Pident param))\n\nlet field_path path field : Path.t option =\n  match path with\n  | None -> None\n  | Some p -> Some (Pdot (p, Ident.name field, Path.nopos))\n\n(* Compile type extensions *)\n\nlet transl_type_extension env rootpath (tyext : Typedtree.type_extension) body :\n    Lambda.lambda =\n  List.fold_right\n    (fun ext body ->\n      let lam =\n        Translcore.transl_extension_constructor env\n          (field_path rootpath ext.ext_id)\n          ext\n      in\n      Lambda.Llet (Strict, Pgenval, ext.ext_id, lam, body))\n    tyext.tyext_constructors body\n\n(* Compile a coercion *)\n\nlet rec apply_coercion loc strict (restr : Typedtree.module_coercion) arg =\n  match restr with\n  | Tcoerce_none -> arg\n  | Tcoerce_structure (pos_cc_list, id_pos_list, runtime_fields) ->\n    Lambda.name_lambda strict arg (fun id ->\n        let get_field_name name pos =\n          Lambda.Lprim (Pfield (pos, Fld_module {name}), [Lvar id], loc)\n        in\n        let lam =\n          Lambda.Lprim\n            ( Pmakeblock (Blk_module runtime_fields),\n              Ext_list.map2 pos_cc_list runtime_fields (fun (pos, cc) name ->\n                  apply_coercion loc Alias cc\n                    (Lprim (Pfield (pos, Fld_module {name}), [Lvar id], loc))),\n              loc )\n        in\n        wrap_id_pos_list loc id_pos_list get_field_name lam)\n  | Tcoerce_functor (cc_arg, cc_res) ->\n    let param = Ident.create \"funarg\" in\n    let carg = apply_coercion loc Alias cc_arg (Lvar param) in\n    apply_coercion_result loc strict arg [param] [carg] cc_res\n  | Tcoerce_primitive {pc_loc; pc_desc; pc_env; pc_type} ->\n    Translcore.transl_primitive pc_loc pc_desc pc_env pc_type\n  | Tcoerce_alias (path, cc) ->\n    Lambda.name_lambda strict arg (fun _ ->\n        apply_coercion loc Alias cc (Lambda.transl_normal_path path))\n\nand apply_coercion_result loc strict funct params args cc_res =\n  match cc_res with\n  | Tcoerce_functor (cc_arg, cc_res) ->\n    let param = Ident.create \"funarg\" in\n    let arg = apply_coercion loc Alias cc_arg (Lvar param) in\n    apply_coercion_result loc strict funct (param :: params) (arg :: args)\n      cc_res\n  | _ ->\n    Lambda.name_lambda strict funct (fun id ->\n        Lfunction\n          {\n            params = List.rev params;\n            attr = {Lambda.default_function_attribute with is_a_functor = true};\n            loc;\n            body =\n              apply_coercion loc Strict cc_res\n                (Lapply\n                   {\n                     ap_loc = loc;\n                     ap_func = Lvar id;\n                     ap_args = List.rev args;\n                     ap_inlined = Default_inline;\n                   });\n          })\n\nand wrap_id_pos_list loc id_pos_list get_field lam =\n  let fv = Lambda.free_variables lam in\n  (*Format.eprintf \"%a@.\" Printlambda.lambda lam;\n    IdentSet.iter (fun id -> Format.eprintf \"%a \" Ident.print id) fv;\n    Format.eprintf \"@.\";*)\n  let lam, s =\n    List.fold_left\n      (fun (lam, s) (id', pos, c) ->\n        if Lambda.IdentSet.mem id' fv then\n          let id'' = Ident.create (Ident.name id') in\n          ( Lambda.Llet\n              ( Alias,\n                Pgenval,\n                id'',\n                apply_coercion loc Alias c (get_field (Ident.name id') pos),\n                lam ),\n            Ident.add id' (Lambda.Lvar id'') s )\n        else (lam, s))\n      (lam, Ident.empty) id_pos_list\n  in\n  if s == Ident.empty then lam else Lambda.subst_lambda s lam\n\n(* Compose two coercions\n   apply_coercion c1 (apply_coercion c2 e) behaves like\n   apply_coercion (compose_coercions c1 c2) e. *)\n\nlet rec compose_coercions c1 c2 =\n  match (c1, c2) with\n  | Tcoerce_none, c2 -> c2\n  | c1, Tcoerce_none -> c1\n  | ( Tcoerce_structure (pc1, ids1, runtime_fields1),\n      Tcoerce_structure (pc2, ids2, _runtime_fields2) ) ->\n    let v2 = Array.of_list pc2 in\n    let ids1 =\n      List.map\n        (fun (id, pos1, c1) ->\n          let pos2, c2 = v2.(pos1) in\n          (id, pos2, compose_coercions c1 c2))\n        ids1\n    in\n    Tcoerce_structure\n      ( List.map\n          (function\n            | (_p1, Tcoerce_primitive _) as x ->\n              x (* (p1, Tcoerce_primitive p) *)\n            | p1, c1 ->\n              let p2, c2 = v2.(p1) in\n              (p2, compose_coercions c1 c2))\n          pc1,\n        ids1 @ ids2,\n        runtime_fields1 )\n  | Tcoerce_functor (arg1, res1), Tcoerce_functor (arg2, res2) ->\n    Tcoerce_functor (compose_coercions arg2 arg1, compose_coercions res1 res2)\n  | c1, Tcoerce_alias (path, c2) -> Tcoerce_alias (path, compose_coercions c1 c2)\n  | _, _ -> Misc.fatal_error \"Translmod.compose_coercions\"\n\n(*\nlet apply_coercion a b c =\n  Format.eprintf \"@[<2>apply_coercion@ %a@]@.\" Includemod.print_coercion b;\n  apply_coercion a b c\n\nlet compose_coercions c1 c2 =\n  let c3 = compose_coercions c1 c2 in\n  let open Includemod in\n  Format.eprintf \"@[<2>compose_coercions@ (%a)@ (%a) =@ %a@]@.\"\n    print_coercion c1 print_coercion c2 print_coercion c3;\n  c3\n*)\n\n(* Record the primitive declarations occurring in the module compiled *)\n\nlet rec pure_module m : Lambda.let_kind =\n  match m.mod_desc with\n  | Tmod_ident _ -> Alias\n  | Tmod_constraint (m, _, _, _) -> pure_module m\n  | _ -> Strict\n\n(* Generate lambda-code for a reordered list of bindings *)\n\n(* Extract the list of \"value\" identifiers bound by a signature.\n   \"Value\" identifiers are identifiers for signature components that\n   correspond to a run-time value: values, extensions, modules, classes.\n   Note: manifest primitives do not correspond to a run-time value! *)\n\nlet rec bound_value_identifiers : Types.signature_item list -> Ident.t list =\n  function\n  | [] -> []\n  | Sig_value (id, {val_kind = Val_reg}) :: rem ->\n    id :: bound_value_identifiers rem\n  | Sig_typext (id, _, _) :: rem -> id :: bound_value_identifiers rem\n  | Sig_module (id, _, _) :: rem -> id :: bound_value_identifiers rem\n  | Sig_class _ :: _ -> assert false\n  | _ :: rem -> bound_value_identifiers rem\n\n(* Compile one or more functors, merging curried functors to produce\n   multi-argument functors.  Any [@inline] attribute on a functor that is\n   merged must be consistent with any other [@inline] attribute(s) on the\n   functor(s) being merged with.  Such an attribute will be placed on the\n   resulting merged functor. *)\n\nlet merge_inline_attributes (attr1 : Lambda.inline_attribute)\n    (attr2 : Lambda.inline_attribute) loc =\n  match (attr1, attr2) with\n  | Lambda.Default_inline, _ -> attr2\n  | _, Lambda.Default_inline -> attr1\n  | _, _ ->\n    if attr1 = attr2 then attr1\n    else raise (Error (loc, Conflicting_inline_attributes))\n\nlet merge_functors mexp coercion root_path =\n  let rec merge mexp coercion path acc inline_attribute =\n    let finished = (acc, mexp, path, coercion, inline_attribute) in\n    match mexp.mod_desc with\n    | Tmod_functor (param, _, _, body) ->\n      let inline_attribute' =\n        Translattribute.get_inline_attribute mexp.mod_attributes\n      in\n      let arg_coercion, res_coercion =\n        match coercion with\n        | Tcoerce_none -> (Tcoerce_none, Tcoerce_none)\n        | Tcoerce_functor (arg_coercion, res_coercion) ->\n          (arg_coercion, res_coercion)\n        | _ -> Misc.fatal_error \"Translmod.merge_functors: bad coercion\"\n      in\n      let loc = mexp.mod_loc in\n      let path = functor_path path param in\n      let inline_attribute =\n        merge_inline_attributes inline_attribute inline_attribute' loc\n      in\n      merge body res_coercion path\n        ((param, loc, arg_coercion) :: acc)\n        inline_attribute\n    | _ -> finished\n  in\n  merge mexp coercion root_path [] Default_inline\n\nlet export_identifiers : Ident.t list ref = ref []\n\nlet rec compile_functor mexp coercion root_path loc =\n  let functor_params_rev, body, body_path, res_coercion, inline_attribute =\n    merge_functors mexp coercion root_path\n  in\n  assert (functor_params_rev <> []);\n  (* cf. [transl_module] *)\n  let params, body =\n    List.fold_left\n      (fun (params, body) (param, loc, arg_coercion) ->\n        let param' = Ident.rename param in\n        let arg = apply_coercion loc Alias arg_coercion (Lvar param') in\n        let params = param' :: params in\n        let body = Lambda.Llet (Alias, Pgenval, param, arg, body) in\n        (params, body))\n      ([], transl_module res_coercion body_path body)\n      functor_params_rev\n  in\n  Lambda.Lfunction\n    {\n      params;\n      attr =\n        {\n          inline = inline_attribute;\n          is_a_functor = true;\n          return_unit = false;\n          async = false;\n          one_unit_arg = false;\n          directive = None;\n        };\n      loc;\n      body;\n    }\n\n(* Compile a module expression *)\nand transl_module cc rootpath mexp =\n  List.iter (Translattribute.check_attribute_on_module mexp) mexp.mod_attributes;\n  let loc = mexp.mod_loc in\n  match mexp.mod_type with\n  | Mty_alias (Mta_absent, _) ->\n    apply_coercion loc Alias cc Lambda.lambda_module_alias\n  | _ -> (\n    match mexp.mod_desc with\n    | Tmod_ident (path, _) ->\n      apply_coercion loc Strict cc\n        (Lambda.transl_module_path ~loc mexp.mod_env path)\n    | Tmod_structure str -> fst (transl_struct loc [] cc rootpath str)\n    | Tmod_functor _ -> compile_functor mexp cc rootpath loc\n    | Tmod_apply (funct, arg, ccarg) ->\n      let inlined_attribute, funct =\n        Translattribute.get_and_remove_inlined_attribute_on_module funct\n      in\n      apply_coercion loc Strict cc\n        (Lapply\n           {\n             ap_loc = loc;\n             ap_func = transl_module Tcoerce_none None funct;\n             ap_args = [transl_module ccarg None arg];\n             ap_inlined = inlined_attribute;\n           })\n    | Tmod_constraint (arg, _, _, ccarg) ->\n      transl_module (compose_coercions cc ccarg) rootpath arg\n    | Tmod_unpack (arg, _) ->\n      apply_coercion loc Strict cc (Translcore.transl_exp arg))\n\nand transl_struct loc fields cc rootpath str =\n  transl_structure loc fields cc rootpath str.str_final_env str.str_items\n\nand transl_structure loc fields cc rootpath final_env = function\n  | [] -> (\n    let is_top_root_path = is_top rootpath in\n\n    match cc with\n    | Tcoerce_none ->\n      let block_fields =\n        List.fold_left\n          (fun acc id ->\n            if is_top_root_path then\n              export_identifiers := id :: !export_identifiers;\n            Lambda.Lvar id :: acc)\n          [] fields\n      in\n      ( Lambda.Lprim\n          ( Pmakeblock\n              (if is_top_root_path then Blk_module_export !export_identifiers\n               else Blk_module (List.rev_map (fun id -> id.Ident.name) fields)),\n            block_fields,\n            loc ),\n        List.length fields )\n    | Tcoerce_structure (pos_cc_list, id_pos_list, runtime_fields) ->\n      (* Do not ignore id_pos_list ! *)\n      (*Format.eprintf \"%a@.@[\" Includemod.print_coercion cc;\n        List.iter (fun l -> Format.eprintf \"%a@ \" Ident.print l)\n          fields;\n        Format.eprintf \"@]@.\";*)\n      assert (List.length runtime_fields = List.length pos_cc_list);\n      let v = Ext_array.reverse_of_list fields in\n      let get_field pos = Lambda.Lvar v.(pos)\n      and ids =\n        List.fold_right Lambda.IdentSet.add fields Lambda.IdentSet.empty\n      in\n      let get_field_name _name = get_field in\n      let result =\n        List.fold_right\n          (fun (pos, cc) code ->\n            match cc with\n            | Tcoerce_primitive p ->\n              if is_top rootpath then\n                export_identifiers := p.pc_id :: !export_identifiers;\n              Translcore.transl_primitive p.pc_loc p.pc_desc p.pc_env p.pc_type\n              :: code\n            | _ ->\n              if is_top rootpath then\n                export_identifiers := v.(pos) :: !export_identifiers;\n              apply_coercion loc Strict cc (get_field pos) :: code)\n          pos_cc_list []\n      in\n      let lam =\n        Lambda.Lprim\n          ( Pmakeblock\n              (if is_top_root_path then Blk_module_export !export_identifiers\n               else Blk_module runtime_fields),\n            result,\n            loc )\n      and id_pos_list =\n        Ext_list.filter id_pos_list (fun (id, _, _) ->\n            not (Lambda.IdentSet.mem id ids))\n      in\n      ( wrap_id_pos_list loc id_pos_list get_field_name lam,\n        List.length pos_cc_list )\n    | _ -> Misc.fatal_error \"Translmod.transl_structure\")\n  | item :: rem -> (\n    match item.str_desc with\n    | Tstr_eval (expr, _) ->\n      let body, size = transl_structure loc fields cc rootpath final_env rem in\n      (Lsequence (Translcore.transl_exp expr, body), size)\n    | Tstr_value (rec_flag, pat_expr_list) ->\n      let ext_fields = rev_let_bound_idents pat_expr_list @ fields in\n      let body, size =\n        transl_structure loc ext_fields cc rootpath final_env rem\n      in\n      (* Recursve already excludes complex pattern bindings*)\n      if is_top rootpath && rec_flag = Nonrecursive then\n        Ext_list.iter pat_expr_list (fun {vb_pat} ->\n            match vb_pat.pat_desc with\n            | Tpat_var _ | Tpat_alias _ -> ()\n            | _ ->\n              if not (Parmatch.irrefutable vb_pat) then\n                raise (Error (vb_pat.pat_loc, Fragile_pattern_in_toplevel)));\n      (Translcore.transl_let rec_flag pat_expr_list body, size)\n    | Tstr_typext tyext ->\n      let ids = List.map (fun ext -> ext.ext_id) tyext.tyext_constructors in\n      let body, size =\n        transl_structure loc\n          (List.rev_append ids fields)\n          cc rootpath final_env rem\n      in\n      (transl_type_extension item.str_env rootpath tyext body, size)\n    | Tstr_exception ext ->\n      let id = ext.ext_id in\n      let path = field_path rootpath id in\n      let body, size =\n        transl_structure loc (id :: fields) cc rootpath final_env rem\n      in\n      ( Llet\n          ( Strict,\n            Pgenval,\n            id,\n            Translcore.transl_extension_constructor item.str_env path ext,\n            body ),\n        size )\n    | Tstr_module mb as s ->\n      let id = mb.mb_id in\n      let body, size =\n        transl_structure loc\n          (if Typemod.rescript_hide s then fields else id :: fields)\n          cc rootpath final_env rem\n      in\n      let module_body =\n        transl_module Tcoerce_none (field_path rootpath id) mb.mb_expr\n      in\n      let module_body =\n        Translattribute.add_inline_attribute module_body mb.mb_loc\n          mb.mb_attributes\n      in\n      (Llet (pure_module mb.mb_expr, Pgenval, id, module_body, body), size)\n    | Tstr_recmodule bindings ->\n      let ext_fields =\n        List.rev_append (List.map (fun mb -> mb.mb_id) bindings) fields\n      in\n      let body, size =\n        transl_structure loc ext_fields cc rootpath final_env rem\n      in\n      let lam =\n        Transl_recmodule.compile_recmodule\n          (fun id modl _loc ->\n            transl_module Tcoerce_none (field_path rootpath id) modl)\n          bindings body\n      in\n      (lam, size)\n    | Tstr_include incl ->\n      let ids = bound_value_identifiers incl.incl_type in\n      let modl = incl.incl_mod in\n      let mid = Ident.create \"include\" in\n      let rec rebind_idents pos newfields = function\n        | [] -> transl_structure loc newfields cc rootpath final_env rem\n        | id :: ids ->\n          let body, size = rebind_idents (pos + 1) (id :: newfields) ids in\n          ( Llet\n              ( Alias,\n                Pgenval,\n                id,\n                Lprim\n                  ( Pfield (pos, Fld_module {name = Ident.name id}),\n                    [Lvar mid],\n                    incl.incl_loc ),\n                body ),\n            size )\n      in\n      let body, size = rebind_idents 0 fields ids in\n      ( Llet\n          ( pure_module modl,\n            Pgenval,\n            mid,\n            transl_module Tcoerce_none None modl,\n            body ),\n        size )\n    | Tstr_class _ | Tstr_primitive _ | Tstr_type _ | Tstr_modtype _\n    | Tstr_open _ | Tstr_class_type _ | Tstr_attribute _ ->\n      transl_structure loc fields cc rootpath final_env rem)\n\n(* Update forward declaration in Translcore *)\nlet _ = Translcore.transl_module := transl_module\n\n(* Introduce dependencies on modules referenced only by \"external\". *)\n\n(* Compile an implementation *)\n\nlet transl_implementation module_name (str, cc) =\n  export_identifiers := [];\n  let module_id = Ident.create_persistent module_name in\n  let body, _ = transl_struct Location.none [] cc (global_path module_id) str in\n  (body, !export_identifiers)\n\n(* Build the list of value identifiers defined by a toplevel structure\n   (excluding primitive declarations). *)\n\n(* second level idents (module M = struct ... let id = ... end),\n   and all sub-levels idents *)\n(* A variant of transl_structure used to compile toplevel structure definitions\n   for the native-code compiler. Store the defined values in the fields\n   of the global as soon as they are defined, in order to reduce register\n   pressure.  Also rewrites the defining expressions so that they\n   refer to earlier fields of the structure through the fields of\n   the global, not by their names.\n   \"map\" is a table from defined idents to (pos in global block, coercion).\n   \"prim\" is a list of (pos in global block, primitive declaration). *)\n\n(* Compile an implementation using transl_store_structure\n   (for the native-code compiler). *)\n\n(* Compile a toplevel phrase *)\n\n(* Error report *)\n\nlet report_error ppf = function\n  | Conflicting_inline_attributes ->\n    Format.fprintf ppf \"@[Conflicting ``inline'' attributes@]\"\n  | Fragile_pattern_in_toplevel ->\n    Format.fprintf ppf \"@[Such fragile pattern not allowed in the toplevel@]\"\n\nlet () =\n  Location.register_error_of_exn (function\n    | Error (loc, err) -> Some (Location.error_of_printer loc report_error err)\n    | _ -> None)\n"
  },
  {
    "path": "analysis/vendor/ml/translmod.mli",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(* Translation from typed abstract syntax to lambda terms,\n   for the module language *)\n\nval transl_implementation :\n  string ->\n  Typedtree.structure * Typedtree.module_coercion ->\n  Lambda.lambda * Ident.t list\n\ntype error\n(* exception Error of Location.t * error *)\n\nval report_error : Format.formatter -> error -> unit\n"
  },
  {
    "path": "analysis/vendor/ml/typeclass.ml",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*          Jerome Vouillon, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\nopen Parsetree\nopen Asttypes\nopen Types\nopen Typetexp\nopen Format\n\ntype 'a class_info = {\n  cls_id: Ident.t;\n  cls_id_loc: string loc;\n  cls_decl: class_declaration;\n  cls_ty_id: Ident.t;\n  cls_ty_decl: class_type_declaration;\n  cls_obj_id: Ident.t;\n  cls_obj_abbr: type_declaration;\n  cls_typesharp_id: Ident.t;\n  cls_abbr: type_declaration;\n  cls_arity: int;\n  cls_pub_methods: string list;\n  cls_info: 'a;\n}\n\ntype class_type_info = {\n  clsty_ty_id: Ident.t;\n  clsty_id_loc: string loc;\n  clsty_ty_decl: class_type_declaration;\n  clsty_obj_id: Ident.t;\n  clsty_obj_abbr: type_declaration;\n  clsty_typesharp_id: Ident.t;\n  clsty_abbr: type_declaration;\n  clsty_info: Typedtree.class_type_declaration;\n}\n\ntype error =\n  | Unconsistent_constraint of (type_expr * type_expr) list\n  | Field_type_mismatch of string * string * (type_expr * type_expr) list\n  | Structure_expected of class_type\n  | Pattern_type_clash of type_expr\n  | Repeated_parameter\n  | Unbound_class_type_2 of Longident.t\n  | Abbrev_type_clash of type_expr * type_expr * type_expr\n  | Constructor_type_mismatch of string * (type_expr * type_expr) list\n  | Virtual_class of bool * bool * string list * string list\n  | Parameter_arity_mismatch of Longident.t * int * int\n  | Parameter_mismatch of (type_expr * type_expr) list\n  | Bad_parameters of Ident.t * type_expr * type_expr\n  | Unbound_type_var of (formatter -> unit) * Ctype.closed_class_failure\n  | Non_generalizable_class of Ident.t * Types.class_declaration\n  | Cannot_coerce_self of type_expr\n  | Non_collapsable_conjunction of\n      Ident.t * Types.class_declaration * (type_expr * type_expr) list\n  | No_overriding of string * string\n\nexception Error of Location.t * Env.t * error\nexception Error_forward of Location.error\n\nopen Typedtree\n\nlet ctyp desc typ env loc =\n  {\n    ctyp_desc = desc;\n    ctyp_type = typ;\n    ctyp_loc = loc;\n    ctyp_env = env;\n    ctyp_attributes = [];\n  }\n\n(**********************)\n(*  Useful constants  *)\n(**********************)\n\n(*\n   Self type have a dummy private method, thus preventing it to become\n   closed.\n*)\nlet dummy_method = Btype.dummy_method\n\n(*\n   Path associated to the temporary class type of a class being typed\n   (its constructor is not available).\n*)\nlet unbound_class = Path.Pident (Ident.create \"*undef*\")\n\n(************************************)\n(*  Some operations on class types  *)\n(************************************)\n\n(* Fully expand the head of a class type *)\nlet rec scrape_class_type = function\n  | Cty_constr (_, _, cty) -> scrape_class_type cty\n  | cty -> cty\n\n(* Generalize a class type *)\nlet rec generalize_class_type gen = function\n  | Cty_constr (_, params, cty) ->\n    List.iter gen params;\n    generalize_class_type gen cty\n  | Cty_signature {csig_self = sty; csig_vars = vars; csig_inher = inher} ->\n    gen sty;\n    Vars.iter (fun _ (_, _, ty) -> gen ty) vars;\n    List.iter (fun (_, tl) -> List.iter gen tl) inher\n  | Cty_arrow (_, ty, cty) ->\n    gen ty;\n    generalize_class_type gen cty\n\nlet generalize_class_type vars =\n  let gen = if vars then Ctype.generalize else Ctype.generalize_structure in\n  generalize_class_type gen\n\n(* Return the virtual methods of a class type *)\nlet virtual_methods sign =\n  let fields, _ =\n    Ctype.flatten_fields (Ctype.object_fields sign.Types.csig_self)\n  in\n  List.fold_left\n    (fun virt (lab, _, _) ->\n      if lab = dummy_method then virt\n      else if Concr.mem lab sign.csig_concr then virt\n      else lab :: virt)\n    [] fields\n\n(* Return the constructor type associated to a class type *)\nlet rec constructor_type constr cty =\n  match cty with\n  | Cty_constr (_, _, cty) -> constructor_type constr cty\n  | Cty_signature _ -> constr\n  | Cty_arrow (l, ty, cty) ->\n    Ctype.newty (Tarrow (l, ty, constructor_type constr cty, Cok))\n\nlet rec class_body cty =\n  match cty with\n  | Cty_constr _ -> cty (* Only class bodies can be abbreviated *)\n  | Cty_signature _ -> cty\n  | Cty_arrow (_, _, cty) -> class_body cty\n\n(* Check that all type variables are generalizable *)\n(* Use Env.empty to prevent expansion of recursively defined object types;\n   cf. typing-poly/poly.ml *)\nlet rec closed_class_type = function\n  | Cty_constr (_, params, _) ->\n    List.for_all (Ctype.closed_schema Env.empty) params\n  | Cty_signature sign ->\n    Ctype.closed_schema Env.empty sign.csig_self\n    && Vars.fold\n         (fun _ (_, _, ty) cc -> Ctype.closed_schema Env.empty ty && cc)\n         sign.csig_vars true\n  | Cty_arrow (_, ty, cty) ->\n    Ctype.closed_schema Env.empty ty && closed_class_type cty\n\nlet closed_class cty =\n  List.for_all (Ctype.closed_schema Env.empty) cty.cty_params\n  && closed_class_type cty.cty_type\n\nlet rec limited_generalize rv = function\n  | Cty_constr (_path, params, cty) ->\n    List.iter (Ctype.limited_generalize rv) params;\n    limited_generalize rv cty\n  | Cty_signature sign ->\n    Ctype.limited_generalize rv sign.csig_self;\n    Vars.iter\n      (fun _ (_, _, ty) -> Ctype.limited_generalize rv ty)\n      sign.csig_vars;\n    List.iter\n      (fun (_, tl) -> List.iter (Ctype.limited_generalize rv) tl)\n      sign.csig_inher\n  | Cty_arrow (_, ty, cty) ->\n    Ctype.limited_generalize rv ty;\n    limited_generalize rv cty\n\n(***********************************)\n(*  Primitives for typing classes  *)\n(***********************************)\n\n(* Enter an instance variable in the environment *)\nlet concr_vals vars =\n  Vars.fold\n    (fun id (_, vf, _) s -> if vf = Virtual then s else Concr.add id s)\n    vars Concr.empty\n\nlet inheritance self_type env ovf concr_meths warn_vals loc parent =\n  match scrape_class_type parent with\n  | Cty_signature cl_sig ->\n    (* Methods *)\n    (try Ctype.unify env self_type cl_sig.csig_self\n     with Ctype.Unify trace -> (\n       match trace with\n       | _ :: _ :: _ :: ({desc = Tfield (n, _, _, _)}, _) :: rem ->\n         raise (Error (loc, env, Field_type_mismatch (\"method\", n, rem)))\n       | _ -> assert false));\n\n    (* Overriding *)\n    let over_meths = Concr.inter cl_sig.csig_concr concr_meths in\n    let concr_vals = concr_vals cl_sig.csig_vars in\n    let over_vals = Concr.inter concr_vals warn_vals in\n    (match ovf with\n    | Some Fresh ->\n      let cname =\n        match parent with\n        | Cty_constr (p, _, _) -> Path.name p\n        | _ -> \"inherited\"\n      in\n      if not (Concr.is_empty over_meths) then\n        Location.prerr_warning loc\n          (Warnings.Method_override (cname :: Concr.elements over_meths));\n      if not (Concr.is_empty over_vals) then\n        Location.prerr_warning loc\n          (Warnings.Instance_variable_override\n             (cname :: Concr.elements over_vals))\n    | Some Override when Concr.is_empty over_meths && Concr.is_empty over_vals\n      ->\n      raise (Error (loc, env, No_overriding (\"\", \"\")))\n    | _ -> ());\n\n    let concr_meths = Concr.union cl_sig.csig_concr concr_meths\n    and warn_vals = Concr.union concr_vals warn_vals in\n\n    (cl_sig, concr_meths, warn_vals)\n  | _ -> raise (Error (loc, env, Structure_expected parent))\n\nlet delayed_meth_specs = ref []\n\nlet declare_method val_env meths self_type lab priv sty loc =\n  let _, ty' = Ctype.filter_self_method val_env lab priv meths self_type in\n  let unif ty =\n    try Ctype.unify val_env ty ty'\n    with Ctype.Unify trace ->\n      raise (Error (loc, val_env, Field_type_mismatch (\"method\", lab, trace)))\n  in\n  let sty = Ast_helper.Typ.force_poly sty in\n  match (sty.ptyp_desc, priv) with\n  | Ptyp_poly ([], sty'), Public ->\n    (* TODO: we moved the [transl_simple_type_univars] outside of the lazy,\n       so that we can get an immediate value. Is that correct ? Ask Jacques. *)\n    let returned_cty = ctyp Ttyp_any (Ctype.newty Tnil) val_env loc in\n    delayed_meth_specs :=\n      Warnings.mk_lazy (fun () ->\n          let cty = transl_simple_type_univars val_env sty' in\n          let ty = cty.ctyp_type in\n          unif ty;\n          returned_cty.ctyp_desc <- Ttyp_poly ([], cty);\n          returned_cty.ctyp_type <- ty)\n      :: !delayed_meth_specs;\n    returned_cty\n  | _ ->\n    let cty = transl_simple_type val_env false sty in\n    let ty = cty.ctyp_type in\n    unif ty;\n    cty\n\nlet type_constraint val_env sty sty' loc =\n  let cty = transl_simple_type val_env false sty in\n  let ty = cty.ctyp_type in\n  let cty' = transl_simple_type val_env false sty' in\n  let ty' = cty'.ctyp_type in\n  (try Ctype.unify val_env ty ty'\n   with Ctype.Unify trace ->\n     raise (Error (loc, val_env, Unconsistent_constraint trace)));\n  (cty, cty')\n\n(*******************************)\n\nlet add_val lab (mut, virt, ty) val_sig =\n  let virt =\n    try\n      let _mut', virt', _ty' = Vars.find lab val_sig in\n      if virt' = Concrete then virt' else virt\n    with Not_found -> virt\n  in\n  Vars.add lab (mut, virt, ty) val_sig\n\nlet rec class_type_field env self_type meths arg ctf =\n  Builtin_attributes.warning_scope ctf.pctf_attributes (fun () ->\n      class_type_field_aux env self_type meths arg ctf)\n\nand class_type_field_aux env self_type meths\n    (fields, val_sig, concr_meths, inher) ctf =\n  let loc = ctf.pctf_loc in\n  let mkctf desc =\n    {ctf_desc = desc; ctf_loc = loc; ctf_attributes = ctf.pctf_attributes}\n  in\n  match ctf.pctf_desc with\n  | Pctf_inherit sparent ->\n    let parent = class_type env sparent in\n    let inher =\n      match parent.cltyp_type with\n      | Cty_constr (p, tl, _) -> (p, tl) :: inher\n      | _ -> inher\n    in\n    let cl_sig, concr_meths, _ =\n      inheritance self_type env None concr_meths Concr.empty sparent.pcty_loc\n        parent.cltyp_type\n    in\n    let val_sig = Vars.fold add_val cl_sig.csig_vars val_sig in\n    (mkctf (Tctf_inherit parent) :: fields, val_sig, concr_meths, inher)\n  | Pctf_val ({txt = lab}, mut, virt, sty) ->\n    let cty = transl_simple_type env false sty in\n    let ty = cty.ctyp_type in\n    ( mkctf (Tctf_val (lab, mut, virt, cty)) :: fields,\n      add_val lab (mut, virt, ty) val_sig,\n      concr_meths,\n      inher )\n  | Pctf_method ({txt = lab}, priv, virt, sty) ->\n    let cty = declare_method env meths self_type lab priv sty ctf.pctf_loc in\n    let concr_meths =\n      match virt with\n      | Concrete -> Concr.add lab concr_meths\n      | Virtual -> concr_meths\n    in\n    ( mkctf (Tctf_method (lab, priv, virt, cty)) :: fields,\n      val_sig,\n      concr_meths,\n      inher )\n  | Pctf_constraint (sty, sty') ->\n    let cty, cty' = type_constraint env sty sty' ctf.pctf_loc in\n    (mkctf (Tctf_constraint (cty, cty')) :: fields, val_sig, concr_meths, inher)\n  | Pctf_attribute x ->\n    Builtin_attributes.warning_attribute x;\n    (mkctf (Tctf_attribute x) :: fields, val_sig, concr_meths, inher)\n  | Pctf_extension ext ->\n    raise (Error_forward (Builtin_attributes.error_of_extension ext))\n\nand class_signature env {pcsig_self = sty; pcsig_fields = sign} =\n  let meths = ref Meths.empty in\n  let self_cty = transl_simple_type env false sty in\n  let self_cty =\n    {self_cty with ctyp_type = Ctype.expand_head env self_cty.ctyp_type}\n  in\n  let self_type = self_cty.ctyp_type in\n\n  (* Check that the binder is a correct type, and introduce a dummy\n     method preventing self type from being closed. *)\n  let dummy_obj = Ctype.newvar () in\n  Ctype.unify env\n    (Ctype.filter_method env dummy_method Private dummy_obj)\n    (Ctype.newty (Ttuple []));\n  (try Ctype.unify env self_type dummy_obj\n   with Ctype.Unify _ ->\n     raise (Error (sty.ptyp_loc, env, Pattern_type_clash self_type)));\n\n  (* Class type fields *)\n  let rev_fields, val_sig, concr_meths, inher =\n    Builtin_attributes.warning_scope [] (fun () ->\n        List.fold_left\n          (class_type_field env self_type meths)\n          ([], Vars.empty, Concr.empty, [])\n          sign)\n  in\n  let cty =\n    {\n      csig_self = self_type;\n      csig_vars = val_sig;\n      csig_concr = concr_meths;\n      csig_inher = inher;\n    }\n  in\n  {csig_self = self_cty; csig_fields = List.rev rev_fields; csig_type = cty}\n\nand class_type env scty =\n  Builtin_attributes.warning_scope scty.pcty_attributes (fun () ->\n      class_type_aux env scty)\n\nand class_type_aux env scty =\n  let cltyp desc typ =\n    {\n      cltyp_desc = desc;\n      cltyp_type = typ;\n      cltyp_loc = scty.pcty_loc;\n      cltyp_env = env;\n      cltyp_attributes = scty.pcty_attributes;\n    }\n  in\n  match scty.pcty_desc with\n  | Pcty_constr (lid, styl) ->\n    let path, decl = Typetexp.find_class_type env scty.pcty_loc lid.txt in\n    if Path.same decl.clty_path unbound_class then\n      raise (Error (scty.pcty_loc, env, Unbound_class_type_2 lid.txt));\n    let params, clty = Ctype.instance_class decl.clty_params decl.clty_type in\n    if List.length params <> List.length styl then\n      raise\n        (Error\n           ( scty.pcty_loc,\n             env,\n             Parameter_arity_mismatch\n               (lid.txt, List.length params, List.length styl) ));\n    let ctys =\n      List.map2\n        (fun sty ty ->\n          let cty' = transl_simple_type env false sty in\n          let ty' = cty'.ctyp_type in\n          (try Ctype.unify env ty' ty\n           with Ctype.Unify trace ->\n             raise (Error (sty.ptyp_loc, env, Parameter_mismatch trace)));\n          cty')\n        styl params\n    in\n    let typ = Cty_constr (path, params, clty) in\n    cltyp (Tcty_constr (path, lid, ctys)) typ\n  | Pcty_signature pcsig ->\n    let clsig = class_signature env pcsig in\n    let typ = Cty_signature clsig.csig_type in\n    cltyp (Tcty_signature clsig) typ\n  | Pcty_arrow (l, sty, scty) ->\n    let cty = transl_simple_type env false sty in\n    let ty = cty.ctyp_type in\n    let ty =\n      if Btype.is_optional l then\n        Ctype.newty (Tconstr (Predef.path_option, [ty], ref Mnil))\n      else ty\n    in\n    let clty = class_type env scty in\n    let typ = Cty_arrow (l, ty, clty.cltyp_type) in\n    cltyp (Tcty_arrow (l, cty, clty)) typ\n  | Pcty_open (ovf, lid, e) ->\n    let path, newenv = !Typecore.type_open ovf env scty.pcty_loc lid in\n    let clty = class_type newenv e in\n    cltyp (Tcty_open (ovf, path, lid, newenv, clty)) clty.cltyp_type\n  | Pcty_extension ext ->\n    raise (Error_forward (Builtin_attributes.error_of_extension ext))\n\nlet class_type env scty =\n  delayed_meth_specs := [];\n  let cty = class_type env scty in\n  List.iter Lazy.force (List.rev !delayed_meth_specs);\n  delayed_meth_specs := [];\n  cty\n\n(*******************************)\n\n(*******************************)\n\n(* Approximate the type of the constructor to allow recursive use *)\n(* of optional parameters                                         *)\n\nlet var_option = Predef.type_option (Btype.newgenvar ())\n\nlet rec approx_description ct =\n  match ct.pcty_desc with\n  | Pcty_arrow (l, _, ct) ->\n    let arg =\n      if Btype.is_optional l then Ctype.instance_def var_option\n      else Ctype.newvar ()\n    in\n    Ctype.newty (Tarrow (l, arg, approx_description ct, Cok))\n  | _ -> Ctype.newvar ()\n\n(*******************************)\n\nlet temp_abbrev loc env id arity =\n  let params = ref [] in\n  for _i = 1 to arity do\n    params := Ctype.newvar () :: !params\n  done;\n  let ty = Ctype.newobj (Ctype.newvar ()) in\n  let env =\n    Env.add_type ~check:true id\n      {\n        type_params = !params;\n        type_arity = arity;\n        type_kind = Type_abstract;\n        type_private = Public;\n        type_manifest = Some ty;\n        type_variance = Misc.replicate_list Variance.full arity;\n        type_newtype_level = None;\n        type_loc = loc;\n        type_attributes = [];\n        (* or keep attrs from the class decl? *)\n        type_immediate = false;\n        type_unboxed = unboxed_false_default_false;\n      }\n      env\n  in\n  (!params, ty, env)\n\nlet initial_env approx (res, env) (cl, id, ty_id, obj_id, cl_id) =\n  (* Temporary abbreviations *)\n  let arity = List.length cl.pci_params in\n  let obj_params, obj_ty, env = temp_abbrev cl.pci_loc env obj_id arity in\n  let cl_params, cl_ty, env = temp_abbrev cl.pci_loc env cl_id arity in\n\n  (* Temporary type for the class constructor *)\n  let constr_type = approx cl.pci_expr in\n  let dummy_cty =\n    Cty_signature\n      {\n        csig_self = Ctype.newvar ();\n        csig_vars = Vars.empty;\n        csig_concr = Concr.empty;\n        csig_inher = [];\n      }\n  in\n  let dummy_class =\n    {\n      Types.cty_params = [];\n      (* Dummy value *)\n      cty_variance = [];\n      cty_type = dummy_cty;\n      (* Dummy value *)\n      cty_path = unbound_class;\n      cty_new =\n        (match cl.pci_virt with\n        | Virtual -> None\n        | Concrete -> Some constr_type);\n      cty_loc = Location.none;\n      cty_attributes = [];\n    }\n  in\n  let env =\n    Env.add_cltype ty_id\n      {\n        clty_params = [];\n        (* Dummy value *)\n        clty_variance = [];\n        clty_type = dummy_cty;\n        (* Dummy value *)\n        clty_path = unbound_class;\n        clty_loc = Location.none;\n        clty_attributes = [];\n      }\n      env\n  in\n  ( ( cl,\n      id,\n      ty_id,\n      obj_id,\n      obj_params,\n      obj_ty,\n      cl_id,\n      cl_params,\n      cl_ty,\n      constr_type,\n      dummy_class )\n    :: res,\n    env )\n\nlet class_infos kind\n    ( cl,\n      id,\n      ty_id,\n      obj_id,\n      obj_params,\n      obj_ty,\n      cl_id,\n      cl_params,\n      cl_ty,\n      constr_type,\n      dummy_class ) (res, env) =\n  reset_type_variables ();\n  Ctype.begin_class_def ();\n\n  (* Introduce class parameters *)\n  let ci_params =\n    let make_param (sty, v) =\n      try (transl_type_param env sty, v)\n      with Already_bound ->\n        raise (Error (sty.ptyp_loc, env, Repeated_parameter))\n    in\n    List.map make_param cl.pci_params\n  in\n  let params = List.map (fun (cty, _) -> cty.ctyp_type) ci_params in\n\n  (* Allow self coercions (only for class declarations) *)\n  let coercion_locs = ref [] in\n\n  (* Type the class expression *)\n  let expr, typ =\n    try\n      Typecore.self_coercion :=\n        (Path.Pident obj_id, coercion_locs) :: !Typecore.self_coercion;\n      let res = kind env cl.pci_expr in\n      Typecore.self_coercion := List.tl !Typecore.self_coercion;\n      res\n    with exn ->\n      Typecore.self_coercion := [];\n      raise exn\n  in\n\n  Ctype.end_def ();\n\n  let sty = Ctype.self_type typ in\n\n  (* First generalize the type of the dummy method (cf PR#6123) *)\n  let fields, _ = Ctype.flatten_fields (Ctype.object_fields sty) in\n  List.iter\n    (fun (met, _, ty) -> if met = dummy_method then Ctype.generalize ty)\n    fields;\n  (* Generalize the row variable *)\n  let rv = Ctype.row_variable sty in\n  List.iter (Ctype.limited_generalize rv) params;\n  limited_generalize rv typ;\n\n  (* Check the abbreviation for the object type *)\n  let obj_params', obj_type = Ctype.instance_class params typ in\n  let constr = Ctype.newconstr (Path.Pident obj_id) obj_params in\n  (let ty = Ctype.self_type obj_type in\n   Ctype.hide_private_methods ty;\n   Ctype.close_object ty;\n   (try List.iter2 (Ctype.unify env) obj_params obj_params'\n    with Ctype.Unify _ ->\n      raise\n        (Error\n           ( cl.pci_loc,\n             env,\n             Bad_parameters\n               (obj_id, constr, Ctype.newconstr (Path.Pident obj_id) obj_params')\n           )));\n   try Ctype.unify env ty constr\n   with Ctype.Unify _ ->\n     raise\n       (Error\n          ( cl.pci_loc,\n            env,\n            Abbrev_type_clash (constr, ty, Ctype.expand_head env constr) )));\n\n  (* Check the other temporary abbreviation (#-type) *)\n  (let cl_params', cl_type = Ctype.instance_class params typ in\n   let ty = Ctype.self_type cl_type in\n   Ctype.hide_private_methods ty;\n   Ctype.set_object_name obj_id (Ctype.row_variable ty) cl_params ty;\n   (try List.iter2 (Ctype.unify env) cl_params cl_params'\n    with Ctype.Unify _ ->\n      raise\n        (Error\n           ( cl.pci_loc,\n             env,\n             Bad_parameters\n               ( cl_id,\n                 Ctype.newconstr (Path.Pident cl_id) cl_params,\n                 Ctype.newconstr (Path.Pident cl_id) cl_params' ) )));\n   try Ctype.unify env ty cl_ty\n   with Ctype.Unify _ ->\n     let constr = Ctype.newconstr (Path.Pident cl_id) params in\n     raise (Error (cl.pci_loc, env, Abbrev_type_clash (constr, ty, cl_ty))));\n\n  (* Type of the class constructor *)\n  (try\n     Ctype.unify env\n       (constructor_type constr obj_type)\n       (Ctype.instance env constr_type)\n   with Ctype.Unify trace ->\n     raise\n       (Error\n          (cl.pci_loc, env, Constructor_type_mismatch (cl.pci_name.txt, trace))));\n\n  (* Class and class type temporary definitions *)\n  let cty_variance = List.map (fun _ -> Variance.full) params in\n  let cltydef =\n    {\n      clty_params = params;\n      clty_type = class_body typ;\n      clty_variance = cty_variance;\n      clty_path = Path.Pident obj_id;\n      clty_loc = cl.pci_loc;\n      clty_attributes = cl.pci_attributes;\n    }\n  in\n  dummy_class.cty_type <- typ;\n  let env = Env.add_cltype ty_id cltydef env in\n\n  (if cl.pci_virt = Concrete then\n     let sign = Ctype.signature_of_class_type typ in\n     let mets = virtual_methods sign in\n     let vals =\n       Vars.fold\n         (fun name (_mut, vr, _ty) l -> if vr = Virtual then name :: l else l)\n         sign.csig_vars []\n     in\n     if mets <> [] || vals <> [] then\n       raise (Error (cl.pci_loc, env, Virtual_class (false, false, mets, vals))));\n\n  (* Misc. *)\n  let arity = Ctype.class_type_arity typ in\n  let pub_meths =\n    let fields, _ =\n      Ctype.flatten_fields (Ctype.object_fields (Ctype.expand_head env obj_ty))\n    in\n    List.map\n      (function\n        | lab, _, _ -> lab)\n      fields\n  in\n\n  (* Final definitions *)\n  let params', typ' = Ctype.instance_class params typ in\n  let cltydef =\n    {\n      clty_params = params';\n      clty_type = class_body typ';\n      clty_variance = cty_variance;\n      clty_path = Path.Pident obj_id;\n      clty_loc = cl.pci_loc;\n      clty_attributes = cl.pci_attributes;\n    }\n  and clty =\n    {\n      cty_params = params';\n      cty_type = typ';\n      cty_variance;\n      cty_path = Path.Pident obj_id;\n      cty_new =\n        (match cl.pci_virt with\n        | Virtual -> None\n        | Concrete -> Some (Ctype.instance env constr_type));\n      cty_loc = cl.pci_loc;\n      cty_attributes = cl.pci_attributes;\n    }\n  in\n  let obj_abbr =\n    {\n      type_params = obj_params;\n      type_arity = List.length obj_params;\n      type_kind = Type_abstract;\n      type_private = Public;\n      type_manifest = Some obj_ty;\n      type_variance = List.map (fun _ -> Variance.full) obj_params;\n      type_newtype_level = None;\n      type_loc = cl.pci_loc;\n      type_attributes = [];\n      (* or keep attrs from cl? *)\n      type_immediate = false;\n      type_unboxed = unboxed_false_default_false;\n    }\n  in\n  let cl_params, cl_ty =\n    Ctype.instance_parameterized_type params (Ctype.self_type typ)\n  in\n  Ctype.hide_private_methods cl_ty;\n  Ctype.set_object_name obj_id (Ctype.row_variable cl_ty) cl_params cl_ty;\n  let cl_abbr =\n    {\n      type_params = cl_params;\n      type_arity = List.length cl_params;\n      type_kind = Type_abstract;\n      type_private = Public;\n      type_manifest = Some cl_ty;\n      type_variance = List.map (fun _ -> Variance.full) cl_params;\n      type_newtype_level = None;\n      type_loc = cl.pci_loc;\n      type_attributes = [];\n      (* or keep attrs from cl? *)\n      type_immediate = false;\n      type_unboxed = unboxed_false_default_false;\n    }\n  in\n  ( ( cl,\n      id,\n      clty,\n      ty_id,\n      cltydef,\n      obj_id,\n      obj_abbr,\n      cl_id,\n      cl_abbr,\n      ci_params,\n      arity,\n      pub_meths,\n      List.rev !coercion_locs,\n      expr )\n    :: res,\n    env )\n\nlet final_decl env\n    ( cl,\n      id,\n      clty,\n      ty_id,\n      cltydef,\n      obj_id,\n      obj_abbr,\n      cl_id,\n      cl_abbr,\n      ci_params,\n      arity,\n      pub_meths,\n      coe,\n      expr ) =\n  (try Ctype.collapse_conj_params env clty.cty_params\n   with Ctype.Unify trace ->\n     raise\n       (Error (cl.pci_loc, env, Non_collapsable_conjunction (id, clty, trace))));\n\n  List.iter Ctype.generalize clty.cty_params;\n  generalize_class_type true clty.cty_type;\n  Misc.may Ctype.generalize clty.cty_new;\n  List.iter Ctype.generalize obj_abbr.type_params;\n  Misc.may Ctype.generalize obj_abbr.type_manifest;\n  List.iter Ctype.generalize cl_abbr.type_params;\n  Misc.may Ctype.generalize cl_abbr.type_manifest;\n\n  if not (closed_class clty) then\n    raise (Error (cl.pci_loc, env, Non_generalizable_class (id, clty)));\n\n  (match\n     Ctype.closed_class clty.cty_params\n       (Ctype.signature_of_class_type clty.cty_type)\n   with\n  | None -> ()\n  | Some reason ->\n    let printer = function\n      | ppf -> Printtyp.cltype_declaration id ppf cltydef\n    in\n    raise (Error (cl.pci_loc, env, Unbound_type_var (printer, reason))));\n\n  ( id,\n    cl.pci_name,\n    clty,\n    ty_id,\n    cltydef,\n    obj_id,\n    obj_abbr,\n    cl_id,\n    cl_abbr,\n    arity,\n    pub_meths,\n    coe,\n    expr,\n    {\n      ci_loc = cl.pci_loc;\n      ci_virt = cl.pci_virt;\n      ci_params;\n      (* TODO : check that we have the correct use of identifiers *)\n      ci_id_name = cl.pci_name;\n      ci_id_class = id;\n      ci_id_class_type = ty_id;\n      ci_id_object = obj_id;\n      ci_id_typehash = cl_id;\n      ci_expr = expr;\n      ci_decl = clty;\n      ci_type_decl = cltydef;\n      ci_attributes = cl.pci_attributes;\n    } )\n(*   (cl.pci_variance, cl.pci_loc)) *)\n\nlet class_infos kind\n    ( cl,\n      id,\n      ty_id,\n      obj_id,\n      obj_params,\n      obj_ty,\n      cl_id,\n      cl_params,\n      cl_ty,\n      constr_type,\n      dummy_class ) (res, env) =\n  Builtin_attributes.warning_scope cl.pci_attributes (fun () ->\n      class_infos kind\n        ( cl,\n          id,\n          ty_id,\n          obj_id,\n          obj_params,\n          obj_ty,\n          cl_id,\n          cl_params,\n          cl_ty,\n          constr_type,\n          dummy_class )\n        (res, env))\n\nlet extract_type_decls\n    ( _id,\n      _id_loc,\n      clty,\n      _ty_id,\n      cltydef,\n      obj_id,\n      obj_abbr,\n      _cl_id,\n      cl_abbr,\n      _arity,\n      _pub_meths,\n      _coe,\n      _expr,\n      required ) decls =\n  (obj_id, obj_abbr, cl_abbr, clty, cltydef, required) :: decls\n\nlet merge_type_decls\n    ( id,\n      id_loc,\n      _clty,\n      ty_id,\n      _cltydef,\n      obj_id,\n      _obj_abbr,\n      cl_id,\n      _cl_abbr,\n      arity,\n      pub_meths,\n      coe,\n      expr,\n      req ) (obj_abbr, cl_abbr, clty, cltydef) =\n  ( id,\n    id_loc,\n    clty,\n    ty_id,\n    cltydef,\n    obj_id,\n    obj_abbr,\n    cl_id,\n    cl_abbr,\n    arity,\n    pub_meths,\n    coe,\n    expr,\n    req )\n\nlet final_env env\n    ( _id,\n      _id_loc,\n      _clty,\n      ty_id,\n      cltydef,\n      obj_id,\n      obj_abbr,\n      cl_id,\n      cl_abbr,\n      _arity,\n      _pub_meths,\n      _coe,\n      _expr,\n      _req ) =\n  (* Add definitions after cleaning them *)\n  Env.add_type ~check:true obj_id\n    (Subst.type_declaration Subst.identity obj_abbr)\n    (Env.add_type ~check:true cl_id\n       (Subst.type_declaration Subst.identity cl_abbr)\n       (Env.add_cltype ty_id\n          (Subst.cltype_declaration Subst.identity cltydef)\n          env))\n\n(* Check that #c is coercible to c if there is a self-coercion *)\nlet check_coercions env\n    ( id,\n      id_loc,\n      clty,\n      ty_id,\n      cltydef,\n      obj_id,\n      obj_abbr,\n      cl_id,\n      cl_abbr,\n      arity,\n      pub_meths,\n      coercion_locs,\n      _expr,\n      req ) =\n  (match coercion_locs with\n  | [] -> ()\n  | loc :: _ ->\n    let cl_ty, obj_ty =\n      match (cl_abbr.type_manifest, obj_abbr.type_manifest) with\n      | Some cl_ab, Some obj_ab ->\n        let cl_params, cl_ty =\n          Ctype.instance_parameterized_type cl_abbr.type_params cl_ab\n        and obj_params, obj_ty =\n          Ctype.instance_parameterized_type obj_abbr.type_params obj_ab\n        in\n        List.iter2 (Ctype.unify env) cl_params obj_params;\n        (cl_ty, obj_ty)\n      | _ -> assert false\n    in\n    (try Ctype.subtype env cl_ty obj_ty ()\n     with Ctype.Subtype (tr1, tr2) ->\n       raise (Typecore.Error (loc, env, Typecore.Not_subtype (tr1, tr2))));\n    if not (Ctype.opened_object cl_ty) then\n      raise (Error (loc, env, Cannot_coerce_self obj_ty)));\n  {\n    cls_id = id;\n    cls_id_loc = id_loc;\n    cls_decl = clty;\n    cls_ty_id = ty_id;\n    cls_ty_decl = cltydef;\n    cls_obj_id = obj_id;\n    cls_obj_abbr = obj_abbr;\n    cls_typesharp_id = cl_id;\n    cls_abbr = cl_abbr;\n    cls_arity = arity;\n    cls_pub_methods = pub_meths;\n    cls_info = req;\n  }\n\n(*******************************)\n(* FIXME: [define_class] is always [false] here *)\nlet type_classes approx kind env cls =\n  let cls =\n    List.map\n      (function\n        | cl ->\n          ( cl,\n            Ident.create cl.pci_name.txt,\n            Ident.create cl.pci_name.txt,\n            Ident.create cl.pci_name.txt,\n            Ident.create (\"#\" ^ cl.pci_name.txt) ))\n      cls\n  in\n  Ctype.init_def (Ident.current_time ());\n  Ctype.begin_class_def ();\n  let res, env = List.fold_left (initial_env approx) ([], env) cls in\n  let res, env = List.fold_right (class_infos kind) res ([], env) in\n  Ctype.end_def ();\n  let res = List.rev_map (final_decl env) res in\n  let decls = List.fold_right extract_type_decls res [] in\n  let decls = Typedecl.compute_variance_decls env decls in\n  let res = List.map2 merge_type_decls res decls in\n  let env = List.fold_left final_env env res in\n  let res = List.map (check_coercions env) res in\n  (res, env)\n\nlet class_description env sexpr =\n  let expr = class_type env sexpr in\n  (expr, expr.cltyp_type)\n\nlet class_type_declarations env cls =\n  let decls, env = type_classes approx_description class_description env cls in\n  ( List.map\n      (fun decl ->\n        {\n          clsty_ty_id = decl.cls_ty_id;\n          clsty_id_loc = decl.cls_id_loc;\n          clsty_ty_decl = decl.cls_ty_decl;\n          clsty_obj_id = decl.cls_obj_id;\n          clsty_obj_abbr = decl.cls_obj_abbr;\n          clsty_typesharp_id = decl.cls_typesharp_id;\n          clsty_abbr = decl.cls_abbr;\n          clsty_info = decl.cls_info;\n        })\n      decls,\n    env )\n\n(*******************************)\n\n(* Approximate the class declaration as class ['params] id = object end *)\nlet approx_class sdecl =\n  let open Ast_helper in\n  let self' = Typ.any () in\n  let clty' = Cty.signature ~loc:sdecl.pci_expr.pcty_loc (Csig.mk self' []) in\n  {sdecl with pci_expr = clty'}\n\nlet approx_class_declarations env sdecls =\n  fst (class_type_declarations env (List.map approx_class sdecls))\n\n(*******************************)\n\n(* Error report *)\n\nopen Format\n\nlet report_error env ppf = function\n  | Repeated_parameter -> fprintf ppf \"A type parameter occurs several times\"\n  | Unconsistent_constraint trace ->\n    fprintf ppf \"The class constraints are not consistent.@.\";\n    Printtyp.report_unification_error ppf env trace\n      (fun ppf -> fprintf ppf \"Type\")\n      (fun ppf -> fprintf ppf \"is not compatible with type\")\n  | Field_type_mismatch (k, m, trace) ->\n    Printtyp.report_unification_error ppf env trace\n      (function\n        | ppf -> fprintf ppf \"The %s %s@ has type\" k m)\n      (function ppf -> fprintf ppf \"but is expected to have type\")\n  | Structure_expected clty ->\n    fprintf ppf\n      \"@[This class expression is not a class structure; it has type@ %a@]\"\n      Printtyp.class_type clty\n  | Pattern_type_clash ty ->\n    (* XXX Trace *)\n    (* XXX Revoir message d'erreur | Improve error message *)\n    Printtyp.reset_and_mark_loops ty;\n    fprintf ppf \"@[%s@ %a@]\"\n      \"This pattern cannot match self: it only matches values of type\"\n      Printtyp.type_expr ty\n  | Unbound_class_type_2 cl ->\n    fprintf ppf \"@[The class type@ %a@ is not yet completely defined@]\"\n      Printtyp.longident cl\n  | Abbrev_type_clash (abbrev, actual, expected) ->\n    (* XXX Afficher une trace ? | Print a trace? *)\n    Printtyp.reset_and_mark_loops_list [abbrev; actual; expected];\n    fprintf ppf\n      \"@[The abbreviation@ %a@ expands to type@ %a@ but is used with type@ %a@]\"\n      Printtyp.type_expr abbrev Printtyp.type_expr actual Printtyp.type_expr\n      expected\n  | Constructor_type_mismatch (c, trace) ->\n    Printtyp.report_unification_error ppf env trace\n      (function\n        | ppf -> fprintf ppf \"The expression \\\"new %s\\\" has type\" c)\n      (function ppf -> fprintf ppf \"but is used with type\")\n  | Virtual_class (cl, imm, mets, vals) ->\n    let print_mets ppf mets =\n      List.iter\n        (function\n          | met -> fprintf ppf \"@ %s\" met)\n        mets\n    in\n    let missings =\n      match (mets, vals) with\n      | [], _ -> \"variables\"\n      | _, [] -> \"methods\"\n      | _ -> \"methods and variables\"\n    in\n    let print_msg ppf =\n      if imm then fprintf ppf \"This object has virtual %s\" missings\n      else if cl then fprintf ppf \"This class should be virtual\"\n      else fprintf ppf \"This class type should be virtual\"\n    in\n    fprintf ppf \"@[%t.@ @[<2>The following %s are undefined :%a@]@]\" print_msg\n      missings print_mets (mets @ vals)\n  | Parameter_arity_mismatch (lid, expected, provided) ->\n    fprintf ppf\n      \"@[The class constructor %a@ expects %i type argument(s),@ but is here \\\n       applied to %i type argument(s)@]\"\n      Printtyp.longident lid expected provided\n  | Parameter_mismatch trace ->\n    Printtyp.report_unification_error ppf env trace\n      (function\n        | ppf -> fprintf ppf \"The type parameter\")\n      (function\n        | ppf -> fprintf ppf \"does not meet its constraint: it should be\")\n  | Bad_parameters (id, params, cstrs) ->\n    Printtyp.reset_and_mark_loops_list [params; cstrs];\n    fprintf ppf\n      \"@[The abbreviation %a@ is used with parameters@ %a@ which are \\\n       incompatible with constraints@ %a@]\"\n      Printtyp.ident id Printtyp.type_expr params Printtyp.type_expr cstrs\n  | Unbound_type_var (printer, reason) ->\n    let print_common ppf kind ty0 real lab ty =\n      let ty1 =\n        if real then ty0 else Btype.newgenty (Tobject (ty0, ref None))\n      in\n      List.iter Printtyp.mark_loops [ty; ty1];\n      fprintf ppf \"The %s %s@ has type@;<1 2>%a@ where@ %a@ is unbound\" kind lab\n        Printtyp.type_expr ty Printtyp.type_expr ty0\n    in\n    let print_reason ppf = function\n      | Ctype.CC_Method (ty0, real, lab, ty) ->\n        print_common ppf \"method\" ty0 real lab ty\n      | Ctype.CC_Value (ty0, real, lab, ty) ->\n        print_common ppf \"instance variable\" ty0 real lab ty\n    in\n    Printtyp.reset ();\n    fprintf ppf\n      \"@[<v>@[Some type variables are unbound in this type:@;\\\n       <1 2>%t@]@ @[%a@]@]\"\n      printer print_reason reason\n  | Non_generalizable_class (id, clty) ->\n    fprintf ppf\n      \"@[The type of this class,@ %a,@ contains type variables that cannot be \\\n       generalized@]\"\n      (Printtyp.class_declaration id)\n      clty\n  | Cannot_coerce_self ty ->\n    fprintf ppf\n      \"@[The type of self cannot be coerced to@ the type of the current \\\n       class:@ %a.@.Some occurrences are contravariant@]\"\n      Printtyp.type_scheme ty\n  | Non_collapsable_conjunction (id, clty, trace) ->\n    fprintf ppf\n      \"@[The type of this class,@ %a,@ contains non-collapsible conjunctive \\\n       types in constraints@]\"\n      (Printtyp.class_declaration id)\n      clty;\n    Printtyp.report_unification_error ppf env trace\n      (fun ppf -> fprintf ppf \"Type\")\n      (fun ppf -> fprintf ppf \"is not compatible with type\")\n  | No_overriding (_, \"\") ->\n    fprintf ppf \"@[This inheritance does not override any method@ %s@]\"\n      \"instance variable\"\n  | No_overriding (kind, name) ->\n    fprintf ppf \"@[The %s `%s'@ has no previous definition@]\" kind name\n\nlet report_error env ppf err =\n  Printtyp.wrap_printing_env env (fun () -> report_error env ppf err)\n\nlet () =\n  Location.register_error_of_exn (function\n    | Error (loc, env, err) ->\n      Some (Location.error_of_printer loc (report_error env) err)\n    | Error_forward err -> Some err\n    | _ -> None)\n"
  },
  {
    "path": "analysis/vendor/ml/typeclass.mli",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*          Jerome Vouillon, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\nopen Asttypes\nopen Types\nopen Format\n\ntype 'a class_info = {\n  cls_id: Ident.t;\n  cls_id_loc: string loc;\n  cls_decl: class_declaration;\n  cls_ty_id: Ident.t;\n  cls_ty_decl: class_type_declaration;\n  cls_obj_id: Ident.t;\n  cls_obj_abbr: type_declaration;\n  cls_typesharp_id: Ident.t;\n  cls_abbr: type_declaration;\n  cls_arity: int;\n  cls_pub_methods: string list;\n  cls_info: 'a;\n}\n\ntype class_type_info = {\n  clsty_ty_id: Ident.t;\n  clsty_id_loc: string loc;\n  clsty_ty_decl: class_type_declaration;\n  clsty_obj_id: Ident.t;\n  clsty_obj_abbr: type_declaration;\n  clsty_typesharp_id: Ident.t;\n  clsty_abbr: type_declaration;\n  clsty_info: Typedtree.class_type_declaration;\n}\n\nval class_type_declarations :\n  Env.t -> Parsetree.class_type_declaration list -> class_type_info list * Env.t\n\nval approx_class_declarations :\n  Env.t -> Parsetree.class_type_declaration list -> class_type_info list\n\nval virtual_methods : Types.class_signature -> label list\n\ntype error\n\nexception Error of Location.t * Env.t * error\nexception Error_forward of Location.error\n\nval report_error : Env.t -> formatter -> error -> unit\n"
  },
  {
    "path": "analysis/vendor/ml/typecore.ml",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(* Typechecking for the core language *)\n\nopen Misc\nopen Asttypes\nopen Parsetree\nopen Types\nopen Typedtree\nopen Btype\nopen Ctype\nopen Error_message_utils\n\ntype error =\n  | Polymorphic_label of Longident.t\n  | Constructor_arity_mismatch of Longident.t * int * int\n  | Label_mismatch of Longident.t * (type_expr * type_expr) list\n  | Pattern_type_clash of (type_expr * type_expr) list\n  | Or_pattern_type_clash of Ident.t * (type_expr * type_expr) list\n  | Multiply_bound_variable of string\n  | Orpat_vars of Ident.t * Ident.t list\n  | Expr_type_clash of (type_expr * type_expr) list * type_clash_context option\n  | Apply_non_function of type_expr\n  | Apply_wrong_label of arg_label * type_expr\n  | Label_multiply_defined of string\n  | Labels_missing of string list * bool\n  | Label_not_mutable of Longident.t\n  | Wrong_name of string * type_expr * string * Path.t * string * string list\n  | Name_type_mismatch of\n      string * Longident.t * (Path.t * Path.t) * (Path.t * Path.t) list\n  | Undefined_method of type_expr * string * string list option\n  | Private_type of type_expr\n  | Private_label of Longident.t * type_expr\n  | Not_subtype of (type_expr * type_expr) list * (type_expr * type_expr) list\n  | Coercion_failure of\n      type_expr * type_expr * (type_expr * type_expr) list * bool\n  | Too_many_arguments of bool * type_expr\n  | Abstract_wrong_label of arg_label * type_expr\n  | Scoping_let_module of string * type_expr\n  | Not_a_variant_type of Longident.t\n  | Incoherent_label_order\n  | Less_general of string * (type_expr * type_expr) list\n  | Modules_not_allowed\n  | Cannot_infer_signature\n  | Not_a_packed_module of type_expr\n  | Recursive_local_constraint of (type_expr * type_expr) list\n  | Unexpected_existential\n  | Unqualified_gadt_pattern of Path.t * string\n  | Invalid_interval\n  | Invalid_for_loop_index\n  | No_value_clauses\n  | Exception_pattern_below_toplevel\n  | Inlined_record_escape\n  | Inlined_record_expected\n  | Unrefuted_pattern of pattern\n  | Invalid_extension_constructor_payload\n  | Not_an_extension_constructor\n  | Literal_overflow of string\n  | Unknown_literal of string * char\n  | Illegal_letrec_pat\n  | Labels_omitted of string list\n  | Empty_record_literal\n  | Uncurried_arity_mismatch of type_expr * int * int\n  | Field_not_optional of string * type_expr\nexception Error of Location.t * Env.t * error\nexception Error_forward of Location.error\n\n(* Forward declaration, to be filled in by Typemod.type_module *)\n\nlet type_module =\n  ref\n    (fun _env _md -> assert false\n      : Env.t -> Parsetree.module_expr -> Typedtree.module_expr)\n\n(* Forward declaration, to be filled in by Typemod.type_open *)\n\nlet type_open :\n    (?used_slot:bool ref ->\n    override_flag ->\n    Env.t ->\n    Location.t ->\n    Longident.t loc ->\n    Path.t * Env.t)\n    ref =\n  ref (fun ?used_slot:_ _ -> assert false)\n\n(* Forward declaration, to be filled in by Typemod.type_package *)\n\nlet type_package = ref (fun _ -> assert false)\n\n(* Forward declaration, to be filled in by Typeclass.class_structure *)\n\n(*\n  Saving and outputting type information.\n  We keep these function names short, because they have to be\n  called each time we create a record of type [Typedtree.expression]\n  or [Typedtree.pattern] that will end up in the typed AST.\n*)\nlet re node =\n  Cmt_format.add_saved_type (Cmt_format.Partial_expression node);\n  Stypes.record (Stypes.Ti_expr node);\n  node\nlet rp node =\n  Cmt_format.add_saved_type (Cmt_format.Partial_pattern node);\n  Stypes.record (Stypes.Ti_pat node);\n  node\n\ntype recarg = Allowed | Required | Rejected\n\nlet case lhs rhs = {c_lhs = lhs; c_guard = None; c_rhs = rhs}\n\n(* Upper approximation of free identifiers on the parse tree *)\n\nlet iter_expression f e =\n  let rec expr e =\n    f e;\n    match e.pexp_desc with\n    | Pexp_extension _ (* we don't iterate under extension point *)\n    | Pexp_ident _ | Pexp_new _ | Pexp_constant _ ->\n      ()\n    | Pexp_function pel -> List.iter case pel\n    | Pexp_fun (_, eo, _, e) ->\n      may expr eo;\n      expr e\n    | Pexp_apply (e, lel) ->\n      expr e;\n      List.iter (fun (_, e) -> expr e) lel\n    | Pexp_let (_, pel, e) ->\n      expr e;\n      List.iter binding pel\n    | Pexp_match (e, pel) | Pexp_try (e, pel) ->\n      expr e;\n      List.iter case pel\n    | Pexp_array el | Pexp_tuple el -> List.iter expr el\n    | Pexp_construct (_, eo) | Pexp_variant (_, eo) -> may expr eo\n    | Pexp_record (iel, eo) ->\n      may expr eo;\n      List.iter (fun (_, e) -> expr e) iel\n    | Pexp_open (_, _, e)\n    | Pexp_newtype (_, e)\n    | Pexp_poly (e, _)\n    | Pexp_lazy e\n    | Pexp_assert e\n    | Pexp_setinstvar (_, e)\n    | Pexp_send (e, _)\n    | Pexp_constraint (e, _)\n    | Pexp_coerce (e, _, _)\n    | Pexp_letexception (_, e)\n    | Pexp_field (e, _) ->\n      expr e\n    | Pexp_while (e1, e2) | Pexp_sequence (e1, e2) | Pexp_setfield (e1, _, e2)\n      ->\n      expr e1;\n      expr e2\n    | Pexp_ifthenelse (e1, e2, eo) ->\n      expr e1;\n      expr e2;\n      may expr eo\n    | Pexp_for (_, e1, e2, _, e3) ->\n      expr e1;\n      expr e2;\n      expr e3\n    | Pexp_override sel -> List.iter (fun (_, e) -> expr e) sel\n    | Pexp_letmodule (_, me, e) ->\n      expr e;\n      module_expr me\n    | Pexp_object _ -> assert false\n    | Pexp_pack me -> module_expr me\n    | Pexp_unreachable -> ()\n  and case {pc_lhs = _; pc_guard; pc_rhs} =\n    may expr pc_guard;\n    expr pc_rhs\n  and binding x = expr x.pvb_expr\n  and module_expr me =\n    match me.pmod_desc with\n    | Pmod_extension _ | Pmod_ident _ -> ()\n    | Pmod_structure str -> List.iter structure_item str\n    | Pmod_constraint (me, _) | Pmod_functor (_, _, me) -> module_expr me\n    | Pmod_apply (me1, me2) ->\n      module_expr me1;\n      module_expr me2\n    | Pmod_unpack e -> expr e\n  and structure_item str =\n    match str.pstr_desc with\n    | Pstr_eval (e, _) -> expr e\n    | Pstr_value (_, pel) -> List.iter binding pel\n    | Pstr_primitive _ | Pstr_type _ | Pstr_typext _ | Pstr_exception _\n    | Pstr_modtype _ | Pstr_open _ | Pstr_class_type _ | Pstr_attribute _\n    | Pstr_extension _ ->\n      ()\n    | Pstr_include {pincl_mod = me} | Pstr_module {pmb_expr = me} ->\n      module_expr me\n    | Pstr_recmodule l -> List.iter (fun x -> module_expr x.pmb_expr) l\n    | Pstr_class () -> ()\n  in\n\n  expr e\n\nlet all_idents_cases el =\n  let idents = Hashtbl.create 8 in\n  let f = function\n    | {pexp_desc = Pexp_ident {txt = Longident.Lident id; _}; _} ->\n      Hashtbl.replace idents id ()\n    | _ -> ()\n  in\n  List.iter\n    (fun cp ->\n      may (iter_expression f) cp.pc_guard;\n      iter_expression f cp.pc_rhs)\n    el;\n  Hashtbl.fold (fun x () rest -> x :: rest) idents []\n\n(* Typing of constants *)\n\nlet type_constant = function\n  | Const_int _ -> instance_def Predef.type_int\n  | Const_char _ -> instance_def Predef.type_char\n  | Const_string _ -> instance_def Predef.type_string\n  | Const_float _ -> instance_def Predef.type_float\n  | Const_int64 _ -> instance_def Predef.type_int64\n  | Const_bigint _ -> instance_def Predef.type_bigint\n  | Const_int32 _ -> assert false\n\nlet constant : Parsetree.constant -> (Asttypes.constant, error) result =\n  function\n  | Pconst_integer (i, None) -> (\n    try Ok (Const_int (Misc.Int_literal_converter.int i))\n    with Failure _ -> Error (Literal_overflow \"int\"))\n  | Pconst_integer (i, Some 'l') -> (\n    try Ok (Const_int32 (Misc.Int_literal_converter.int32 i))\n    with Failure _ -> Error (Literal_overflow \"int32\"))\n  | Pconst_integer (i, Some 'L') -> (\n    try Ok (Const_int64 (Misc.Int_literal_converter.int64 i))\n    with Failure _ -> Error (Literal_overflow \"int64\"))\n  | Pconst_integer (i, Some 'n') ->\n    let sign, i = Bigint_utils.parse_bigint i in\n    Ok (Const_bigint (sign, i))\n  | Pconst_integer (i, Some c) -> Error (Unknown_literal (i, c))\n  | Pconst_char c -> Ok (Const_char c)\n  | Pconst_string (s, d) -> Ok (Const_string (s, d))\n  | Pconst_float (f, None) -> Ok (Const_float f)\n  | Pconst_float (f, Some c) -> Error (Unknown_literal (f, c))\n\nlet constant_or_raise env loc cst =\n  match constant cst with\n  | Ok c -> c\n  | Error err -> raise (Error (loc, env, err))\n\n(* Specific version of type_option, using newty rather than newgenty *)\n\nlet type_option ty = newty (Tconstr (Predef.path_option, [ty], ref Mnil))\n\nlet mkexp exp_desc exp_type exp_loc exp_env =\n  {exp_desc; exp_type; exp_loc; exp_env; exp_extra = []; exp_attributes = []}\n\nlet option_none ty loc =\n  let lid = Longident.Lident \"None\" and env = Env.initial_safe_string in\n  let cnone = Env.lookup_constructor lid env in\n  mkexp (Texp_construct (mknoloc lid, cnone, [])) ty loc env\n\nlet option_some texp =\n  let lid = Longident.Lident \"Some\" in\n  let csome = Env.lookup_constructor lid Env.initial_safe_string in\n  mkexp\n    (Texp_construct (mknoloc lid, csome, [texp]))\n    (type_option texp.exp_type)\n    texp.exp_loc texp.exp_env\n\nlet extract_option_type env ty =\n  match expand_head env ty with\n  | {desc = Tconstr (path, [ty], _)} when Path.same path Predef.path_option ->\n    ty\n  | _ -> assert false\n\nlet extract_concrete_record env ty =\n  match extract_concrete_typedecl env ty with\n  | p0, p, {type_kind = Type_record (fields, repr)} -> (p0, p, fields, repr)\n  | _ -> raise Not_found\n\nlet extract_concrete_variant env ty =\n  match extract_concrete_typedecl env ty with\n  | p0, p, {type_kind = Type_variant cstrs}\n    when not (Ast_uncurried.type_is_uncurried_fun ty) ->\n    (p0, p, cstrs)\n  | p0, p, {type_kind = Type_open} -> (p0, p, [])\n  | _ -> raise Not_found\n\nlet label_is_optional ld =\n  match ld.lbl_repres with\n  | Record_optional_labels lbls -> Ext_list.mem_string lbls ld.lbl_name\n  | Record_inlined {optional_labels} ->\n    Ext_list.mem_string optional_labels ld.lbl_name\n  | _ -> false\n\nlet check_optional_attr env ld attrs loc =\n  let check_redundant () =\n    if not (label_is_optional ld) then\n      raise (Error (loc, env, Field_not_optional (ld.lbl_name, ld.lbl_res)));\n    true\n  in\n  Ext_list.exists attrs (fun ({txt}, _) ->\n      txt = \"res.optional\" && check_redundant ())\n\n(* unification inside type_pat*)\nlet unify_pat_types loc env ty ty' =\n  try unify env ty ty' with\n  | Unify trace -> raise (Error (loc, env, Pattern_type_clash trace))\n  | Tags (l1, l2) ->\n    raise (Typetexp.Error (loc, env, Typetexp.Variant_tags (l1, l2)))\n\n(* unification inside type_exp and type_expect *)\nlet unify_exp_types ?type_clash_context loc env ty expected_ty =\n  (* Format.eprintf \"@[%a@ %a@]@.\" Printtyp.raw_type_expr exp.exp_type\n     Printtyp.raw_type_expr expected_ty; *)\n  try unify env ty expected_ty with\n  | Unify trace ->\n    raise (Error (loc, env, Expr_type_clash (trace, type_clash_context)))\n  | Tags (l1, l2) ->\n    raise (Typetexp.Error (loc, env, Typetexp.Variant_tags (l1, l2)))\n\n(* level at which to create the local type declarations *)\nlet newtype_level = ref None\nlet get_newtype_level () =\n  match !newtype_level with\n  | Some y -> y\n  | None -> assert false\n\nlet unify_pat_types_gadt loc env ty ty' =\n  let newtype_level =\n    match !newtype_level with\n    | None -> assert false\n    | Some x -> x\n  in\n  try unify_gadt ~newtype_level env ty ty' with\n  | Unify trace -> raise (Error (loc, !env, Pattern_type_clash trace))\n  | Tags (l1, l2) ->\n    raise (Typetexp.Error (loc, !env, Typetexp.Variant_tags (l1, l2)))\n  | Unification_recursive_abbrev trace ->\n    raise (Error (loc, !env, Recursive_local_constraint trace))\n\n(* Creating new conjunctive types is not allowed when typing patterns *)\n\nlet unify_pat env pat expected_ty =\n  unify_pat_types pat.pat_loc env pat.pat_type expected_ty\n\n(* make all Reither present in open variants *)\nlet finalize_variant pat =\n  match pat.pat_desc with\n  | Tpat_variant (tag, opat, r) -> (\n    let row =\n      match expand_head pat.pat_env pat.pat_type with\n      | {desc = Tvariant row} ->\n        r := row;\n        row_repr row\n      | _ -> assert false\n    in\n    match row_field tag row with\n    | Rabsent -> () (* assert false *)\n    | Reither (true, [], _, e) when not row.row_closed ->\n      set_row_field e (Rpresent None)\n    | Reither (false, ty :: tl, _, e) when not row.row_closed -> (\n      set_row_field e (Rpresent (Some ty));\n      match opat with\n      | None -> assert false\n      | Some pat -> List.iter (unify_pat pat.pat_env pat) (ty :: tl))\n    | Reither (c, _l, true, e) when not (row_fixed row) ->\n      set_row_field e (Reither (c, [], false, ref None))\n    | _ -> ()\n    (* Force check of well-formedness   WHY? *)\n    (* unify_pat pat.pat_env pat\n       (newty(Tvariant{row_fields=[]; row_more=newvar(); row_closed=false;\n                       row_bound=(); row_fixed=false; row_name=None})); *)\n    )\n  | _ -> ()\n\nlet rec iter_pattern f p =\n  f p;\n  iter_pattern_desc (iter_pattern f) p.pat_desc\n\nlet has_variants p =\n  try\n    iter_pattern\n      (function\n        | {pat_desc = Tpat_variant _} -> raise Exit\n        | _ -> ())\n      p;\n    false\n  with Exit -> true\n\n(* pattern environment *)\nlet pattern_variables =\n  ref\n    ([]\n      : (Ident.t * type_expr * string loc * Location.t * bool (* as-variable *))\n        list)\nlet pattern_force = ref ([] : (unit -> unit) list)\nlet pattern_scope = ref (None : Annot.ident option)\nlet allow_modules = ref false\nlet module_variables = ref ([] : (string loc * Location.t) list)\nlet reset_pattern scope allow =\n  pattern_variables := [];\n  pattern_force := [];\n  pattern_scope := scope;\n  allow_modules := allow;\n  module_variables := []\n\nlet enter_variable ?(is_module = false) ?(is_as_variable = false) loc name ty =\n  if\n    List.exists\n      (fun (id, _, _, _, _) -> Ident.name id = name.txt)\n      !pattern_variables\n  then raise (Error (loc, Env.empty, Multiply_bound_variable name.txt));\n  let id = Ident.create name.txt in\n  pattern_variables := (id, ty, name, loc, is_as_variable) :: !pattern_variables;\n  if is_module then (\n    (* Note: unpack patterns enter a variable of the same name *)\n    if not !allow_modules then\n      raise (Error (loc, Env.empty, Modules_not_allowed));\n    module_variables := (name, loc) :: !module_variables)\n  else\n    (* moved to genannot *)\n    may\n      (fun s -> Stypes.record (Stypes.An_ident (name.loc, name.txt, s)))\n      !pattern_scope;\n  id\n\nlet sort_pattern_variables vs =\n  List.sort\n    (fun (x, _, _, _, _) (y, _, _, _, _) ->\n      compare (Ident.name x) (Ident.name y))\n    vs\n\nlet enter_orpat_variables loc env p1_vs p2_vs =\n  (* unify_vars operate on sorted lists *)\n  let p1_vs = sort_pattern_variables p1_vs\n  and p2_vs = sort_pattern_variables p2_vs in\n\n  let rec unify_vars p1_vs p2_vs =\n    let vars vs = List.map (fun (x, _t, _, _l, _a) -> x) vs in\n    match (p1_vs, p2_vs) with\n    | (x1, t1, _, _l1, _a1) :: rem1, (x2, t2, _, _l2, _a2) :: rem2\n      when Ident.equal x1 x2 ->\n      if x1 == x2 then unify_vars rem1 rem2\n      else (\n        (try unify env t1 t2\n         with Unify trace ->\n           raise (Error (loc, env, Or_pattern_type_clash (x1, trace))));\n        (x2, x1) :: unify_vars rem1 rem2)\n    | [], [] -> []\n    | (x, _, _, _, _) :: _, [] -> raise (Error (loc, env, Orpat_vars (x, [])))\n    | [], (y, _, _, _, _) :: _ -> raise (Error (loc, env, Orpat_vars (y, [])))\n    | (x, _, _, _, _) :: _, (y, _, _, _, _) :: _ ->\n      let err =\n        if Ident.name x < Ident.name y then Orpat_vars (x, vars p2_vs)\n        else Orpat_vars (y, vars p1_vs)\n      in\n      raise (Error (loc, env, err))\n  in\n  unify_vars p1_vs p2_vs\n\nlet rec build_as_type env p =\n  match p.pat_desc with\n  | Tpat_alias (p1, _, _) -> build_as_type env p1\n  | Tpat_tuple pl ->\n    let tyl = List.map (build_as_type env) pl in\n    newty (Ttuple tyl)\n  | Tpat_construct (_, cstr, pl) ->\n    let keep = cstr.cstr_private = Private || cstr.cstr_existentials <> [] in\n    if keep then p.pat_type\n    else\n      let tyl = List.map (build_as_type env) pl in\n      let ty_args, ty_res = instance_constructor cstr in\n      List.iter2\n        (fun (p, ty) -> unify_pat env {p with pat_type = ty})\n        (List.combine pl tyl) ty_args;\n      ty_res\n  | Tpat_variant (l, p', _) ->\n    let ty = may_map (build_as_type env) p' in\n    newty\n      (Tvariant\n         {\n           row_fields = [(l, Rpresent ty)];\n           row_more = newvar ();\n           row_bound = ();\n           row_name = None;\n           row_fixed = false;\n           row_closed = false;\n         })\n  | Tpat_record (lpl, _) ->\n    let lbl = snd3 (List.hd lpl) in\n    if lbl.lbl_private = Private then p.pat_type\n    else\n      let ty = newvar () in\n      let ppl = List.map (fun (_, l, p) -> (l.lbl_pos, p)) lpl in\n      let do_label lbl =\n        let _, ty_arg, ty_res = instance_label false lbl in\n        unify_pat env {p with pat_type = ty} ty_res;\n        let refinable =\n          lbl.lbl_mut = Immutable\n          && List.mem_assoc lbl.lbl_pos ppl\n          &&\n          match (repr lbl.lbl_arg).desc with\n          | Tpoly _ -> false\n          | _ -> true\n        in\n        if refinable then\n          let arg = List.assoc lbl.lbl_pos ppl in\n          unify_pat env {arg with pat_type = build_as_type env arg} ty_arg\n        else\n          let _, ty_arg', ty_res' = instance_label false lbl in\n          unify env ty_arg ty_arg';\n          unify_pat env p ty_res'\n      in\n      Array.iter do_label lbl.lbl_all;\n      ty\n  | Tpat_or (p1, p2, row) -> (\n    match row with\n    | None ->\n      let ty1 = build_as_type env p1 and ty2 = build_as_type env p2 in\n      unify_pat env {p2 with pat_type = ty2} ty1;\n      ty1\n    | Some row ->\n      let row = row_repr row in\n      newty (Tvariant {row with row_closed = false; row_more = newvar ()}))\n  | Tpat_any | Tpat_var _ | Tpat_constant _ | Tpat_array _ | Tpat_lazy _ ->\n    p.pat_type\n\nlet build_or_pat env loc lid =\n  let path, decl = Typetexp.find_type env lid.loc lid.txt in\n  let tyl = List.map (fun _ -> newvar ()) decl.type_params in\n  let row0 =\n    let ty = expand_head env (newty (Tconstr (path, tyl, ref Mnil))) in\n    match ty.desc with\n    | Tvariant row when static_row row -> row\n    | _ -> raise (Error (lid.loc, env, Not_a_variant_type lid.txt))\n  in\n  let pats, fields =\n    List.fold_left\n      (fun (pats, fields) (l, f) ->\n        match row_field_repr f with\n        | Rpresent None ->\n          ((l, None) :: pats, (l, Reither (true, [], true, ref None)) :: fields)\n        | Rpresent (Some ty) ->\n          ( ( l,\n              Some\n                {\n                  pat_desc = Tpat_any;\n                  pat_loc = Location.none;\n                  pat_env = env;\n                  pat_type = ty;\n                  pat_extra = [];\n                  pat_attributes = [];\n                } )\n            :: pats,\n            (l, Reither (false, [ty], true, ref None)) :: fields )\n        | _ -> (pats, fields))\n      ([], []) (row_repr row0).row_fields\n  in\n  let row =\n    {\n      row_fields = List.rev fields;\n      row_more = newvar ();\n      row_bound = ();\n      row_closed = false;\n      row_fixed = false;\n      row_name = Some (path, tyl);\n    }\n  in\n  let ty = newty (Tvariant row) in\n  let gloc = {loc with Location.loc_ghost = true} in\n  let row' = ref {row with row_more = newvar ()} in\n  let pats =\n    List.map\n      (fun (l, p) ->\n        {\n          pat_desc = Tpat_variant (l, p, row');\n          pat_loc = gloc;\n          pat_env = env;\n          pat_type = ty;\n          pat_extra = [];\n          pat_attributes = [];\n        })\n      pats\n  in\n  match pats with\n  | [] -> raise (Error (lid.loc, env, Not_a_variant_type lid.txt))\n  | pat :: pats ->\n    let r =\n      List.fold_left\n        (fun pat pat0 ->\n          {\n            pat_desc = Tpat_or (pat0, pat, Some row0);\n            pat_extra = [];\n            pat_loc = gloc;\n            pat_env = env;\n            pat_type = ty;\n            pat_attributes = [];\n          })\n        pat pats\n    in\n    (path, rp {r with pat_loc = loc}, ty)\n\n(* Type paths *)\n\nlet rec expand_path env p =\n  let decl = try Some (Env.find_type p env) with Not_found -> None in\n  match decl with\n  | Some {type_manifest = Some ty} -> (\n    match repr ty with\n    | {desc = Tconstr (p, _, _)} -> expand_path env p\n    | _ -> p (* PR#6394: recursive module may introduce incoherent manifest *))\n  | _ ->\n    let p' = Env.normalize_path None env p in\n    if Path.same p p' then p else expand_path env p'\n\nlet compare_type_path env tpath1 tpath2 =\n  Path.same (expand_path env tpath1) (expand_path env tpath2)\n\nlet fprintf = Format.fprintf\n\nlet rec bottom_aliases = function\n  | (_, one) :: (_, two) :: rest -> (\n    match bottom_aliases rest with\n    | Some types -> Some types\n    | None -> Some (one, two))\n  | _ -> None\n\nlet simple_conversions =\n  [\n    ((\"float\", \"int\"), \"Belt.Float.toInt\");\n    ((\"float\", \"string\"), \"Belt.Float.toString\");\n    ((\"int\", \"float\"), \"Belt.Int.toFloat\");\n    ((\"int\", \"string\"), \"Belt.Int.toString\");\n    ((\"string\", \"float\"), \"Belt.Float.fromString\");\n    ((\"string\", \"int\"), \"Belt.Int.fromString\");\n  ]\n\nlet print_simple_conversion ppf (actual, expected) =\n  try\n    let converter = List.assoc (actual, expected) simple_conversions in\n    fprintf ppf\n      \"@,\\\n       @,\\\n       @[<v 2>You can convert @{<info>%s@} to @{<info>%s@} with @{<info>%s@}.@]\"\n      actual expected converter\n  with Not_found -> ()\n\nlet print_simple_message ppf = function\n  | \"float\", \"int\" ->\n    fprintf ppf\n      \"@ If this is a literal, try a number without a trailing dot (e.g. \\\n       @{<info>20@}).\"\n  | \"int\", \"float\" ->\n    fprintf ppf\n      \"@ If this is a literal, try a number with a trailing dot (e.g. \\\n       @{<info>20.@}).\"\n  | _ -> ()\n\nlet show_extra_help ppf _env trace =\n  match bottom_aliases trace with\n  | Some\n      ( {Types.desc = Tconstr (actual_path, actual_args, _)},\n        {desc = Tconstr (expected_path, expexted_args, _)} ) -> (\n    match (actual_path, actual_args, expected_path, expexted_args) with\n    | Pident {name = actual_name}, [], Pident {name = expected_name}, [] ->\n      print_simple_conversion ppf (actual_name, expected_name);\n      print_simple_message ppf (actual_name, expected_name)\n    | _ -> ())\n  | _ -> ()\n\nlet rec collect_missing_arguments env type1 type2 =\n  match type1 with\n  (* why do we use Ctype.matches here? Please see https://github.com/rescript-lang/rescript-compiler/pull/2554 *)\n  | {Types.desc = Tarrow (label, argtype, typ, _)}\n    when Ctype.matches env typ type2 ->\n    Some [(label, argtype)]\n  | {desc = Tarrow (label, argtype, typ, _)} -> (\n    match collect_missing_arguments env typ type2 with\n    | Some res -> Some ((label, argtype) :: res)\n    | None -> None)\n  | t when Ast_uncurried.type_is_uncurried_fun t ->\n    let typ = Ast_uncurried.type_extract_uncurried_fun t in\n    collect_missing_arguments env typ type2\n  | _ -> None\n\nlet print_expr_type_clash ?type_clash_context env trace ppf =\n  (* this is the most frequent error. We should do whatever we can to provide\n      specific guidance to this generic error before giving up *)\n  let bottom_aliases_result = bottom_aliases trace in\n  let missing_arguments =\n    match bottom_aliases_result with\n    | Some (actual, expected) -> collect_missing_arguments env actual expected\n    | None -> assert false\n  in\n  let print_arguments =\n    Format.pp_print_list\n      ~pp_sep:(fun ppf _ -> fprintf ppf \",@ \")\n      (fun ppf (label, argtype) ->\n        match label with\n        | Asttypes.Nolabel -> fprintf ppf \"@[%a@]\" Printtyp.type_expr argtype\n        | Labelled label ->\n          fprintf ppf \"@[(~%s: %a)@]\" label Printtyp.type_expr argtype\n        | Optional label ->\n          fprintf ppf \"@[(?%s: %a)@]\" label Printtyp.type_expr argtype)\n  in\n  match missing_arguments with\n  | Some [single_argument] ->\n    (* btw, you can't say \"final arguments\". Intermediate labeled\n        arguments might be the ones missing *)\n    fprintf ppf \"@[@{<info>This call is missing an argument@} of type@ %a@]\"\n      print_arguments [single_argument]\n  | Some arguments ->\n    fprintf ppf \"@[<hv>@{<info>This call is missing arguments@} of type:@ %a@]\"\n      print_arguments arguments\n  | None ->\n    let missing_parameters =\n      match bottom_aliases_result with\n      | Some (actual, expected) -> collect_missing_arguments env expected actual\n      | None -> assert false\n    in\n    (match missing_parameters with\n    | Some [single_parameter] ->\n      fprintf ppf\n        \"@[This value might need to be @{<info>wrapped in a function@ that@ \\\n         takes@ an@ extra@ parameter@}@ of@ type@ %a@]@,\\\n         @,\"\n        print_arguments [single_parameter];\n      fprintf ppf \"@[@{<info>Here's the original error message@}@]@,\"\n    | Some arguments ->\n      fprintf ppf\n        \"@[This value seems to @{<info>need to be wrapped in a function that \\\n         takes extra@ arguments@}@ of@ type:@ @[<hv>%a@]@]@,\\\n         @,\"\n        print_arguments arguments;\n      fprintf ppf \"@[@{<info>Here's the original error message@}@]@,\"\n    | None -> ());\n\n    Printtyp.super_report_unification_error ppf env trace\n      (function\n        | ppf -> error_type_text ppf type_clash_context)\n      (function ppf -> error_expected_type_text ppf type_clash_context);\n    print_extra_type_clash_help ppf trace type_clash_context;\n    show_extra_help ppf env trace\n\nlet report_arity_mismatch ~arity_a ~arity_b ppf =\n  fprintf ppf \"This function expected @{<info>%s@} %s, but got @{<error>%s@}\"\n    arity_b\n    (if arity_b = \"1\" then \"argument\" else \"arguments\")\n    arity_a\n\n(* Records *)\nlet label_of_kind kind = if kind = \"record\" then \"field\" else \"constructor\"\n\nmodule NameChoice (Name : sig\n  type t\n  val type_kind : string\n  val get_name : t -> string\n  val get_type : t -> type_expr\n  val get_descrs : Env.type_descriptions -> t list\n  val unbound_name_error : Env.t -> Longident.t loc -> 'a\nend) =\nstruct\n  open Name\n\n  let get_type_path d =\n    match (repr (get_type d)).desc with\n    | Tconstr (p, _, _) -> p\n    | _ -> assert false\n\n  let lookup_from_type env tpath lid =\n    let descrs = get_descrs (Env.find_type_descrs tpath env) in\n    Env.mark_type_used env (Path.last tpath) (Env.find_type tpath env);\n    match lid.txt with\n    | Longident.Lident s -> (\n      try List.find (fun nd -> get_name nd = s) descrs\n      with Not_found ->\n        let names = List.map get_name descrs in\n        raise\n          (Error\n             ( lid.loc,\n               env,\n               Wrong_name (\"\", newvar (), type_kind, tpath, s, names) )))\n    | _ -> raise Not_found\n\n  let rec unique eq acc = function\n    | [] -> List.rev acc\n    | x :: rem ->\n      if List.exists (eq x) acc then unique eq acc rem\n      else unique eq (x :: acc) rem\n\n  let ambiguous_types env lbl others =\n    let tpath = get_type_path lbl in\n    let others = List.map (fun (lbl, _) -> get_type_path lbl) others in\n    let tpaths = unique (compare_type_path env) [tpath] others in\n    match tpaths with\n    | [_] -> []\n    | _ -> List.map Printtyp.string_of_path tpaths\n\n  let disambiguate_by_type env tpath lbls =\n    let check_type (lbl, _) =\n      let lbl_tpath = get_type_path lbl in\n      compare_type_path env tpath lbl_tpath\n    in\n    List.find check_type lbls\n\n  let disambiguate ?(warn = Location.prerr_warning) ?(check_lk = fun _ _ -> ())\n      ?scope lid env opath lbls =\n    let scope =\n      match scope with\n      | None -> lbls\n      | Some l -> l\n    in\n    let lbl =\n      match opath with\n      | None -> (\n        match lbls with\n        | [] -> unbound_name_error env lid\n        | (lbl, use) :: rest ->\n          use ();\n          let paths = ambiguous_types env lbl rest in\n          if paths <> [] then\n            warn lid.loc\n              (Warnings.Ambiguous_name ([Longident.last lid.txt], paths, false));\n          lbl)\n      | Some (tpath0, tpath) -> (\n        try\n          let lbl, use = disambiguate_by_type env tpath scope in\n          use ();\n          lbl\n        with Not_found -> (\n          try\n            let lbl = lookup_from_type env tpath lid in\n            check_lk tpath lbl;\n            lbl\n          with Not_found ->\n            if lbls = [] then unbound_name_error env lid\n            else\n              let tp = (tpath0, expand_path env tpath) in\n              let tpl =\n                List.map\n                  (fun (lbl, _) ->\n                    let tp0 = get_type_path lbl in\n                    let tp = expand_path env tp0 in\n                    (tp0, tp))\n                  lbls\n              in\n              raise\n                (Error\n                   ( lid.loc,\n                     env,\n                     Name_type_mismatch (type_kind, lid.txt, tp, tpl) ))))\n    in\n    lbl\nend\n\nlet wrap_disambiguate kind ty f x =\n  try f x\n  with Error (loc, env, Wrong_name (\"\", _, tk, tp, name, valid_names)) ->\n    raise (Error (loc, env, Wrong_name (kind, ty, tk, tp, name, valid_names)))\n\nmodule Label = NameChoice (struct\n  type t = label_description\n  let type_kind = \"record\"\n  let get_name lbl = lbl.lbl_name\n  let get_type lbl = lbl.lbl_res\n  let get_descrs = snd\n  let unbound_name_error = Typetexp.unbound_label_error\nend)\n\nlet disambiguate_label_by_ids keep closed ids labels =\n  let check_ids (lbl, _) =\n    let lbls = Hashtbl.create 8 in\n    Array.iter (fun lbl -> Hashtbl.add lbls lbl.lbl_name ()) lbl.lbl_all;\n    List.for_all (Hashtbl.mem lbls) ids\n  and check_closed (lbl, _) =\n    (not closed) || List.length ids = Array.length lbl.lbl_all\n  in\n  let labels' = Ext_list.filter labels check_ids in\n  if keep && labels' = [] then (false, labels)\n  else\n    let labels'' = Ext_list.filter labels' check_closed in\n    if keep && labels'' = [] then (false, labels') else (true, labels'')\n\n(* Only issue warnings once per record constructor/pattern *)\nlet disambiguate_lid_a_list loc closed env opath lid_a_list =\n  let ids = List.map (fun (lid, _) -> Longident.last lid.txt) lid_a_list in\n  let w_amb = ref [] in\n  let warn loc msg =\n    let open Warnings in\n    match msg with\n    | Ambiguous_name ([s], l, _) -> w_amb := (s, l) :: !w_amb\n    | _ -> Location.prerr_warning loc msg\n  in\n  let process_label lid =\n    (* Strategy for each field:\n       * collect all the labels in scope for that name\n       * if the type is known and principal, just eventually warn\n         if the real label was not in scope\n       * fail if there is no known type and no label found\n       * otherwise use other fields to reduce the list of candidates\n       * if there is no known type reduce it incrementally, so that\n         there is still at least one candidate (for error message)\n       * if the reduced list is valid, call Label.disambiguate\n    *)\n    let scope = Typetexp.find_all_labels env lid.loc lid.txt in\n    if opath = None && scope = [] then Typetexp.unbound_label_error env lid;\n    let ok, labels =\n      match opath with\n      | Some (_, _) -> (true, scope) (* disambiguate only checks scope *)\n      | _ -> disambiguate_label_by_ids (opath = None) closed ids scope\n    in\n    if ok then Label.disambiguate lid env opath labels ~warn ~scope\n    else fst (List.hd labels)\n    (* will fail later *)\n  in\n  let lbl_a_list =\n    List.map (fun (lid, a) -> (lid, process_label lid, a)) lid_a_list\n  in\n  (match List.rev !w_amb with\n  | (_, types) :: _ as amb ->\n    let paths =\n      List.map (fun (_, lbl, _) -> Label.get_type_path lbl) lbl_a_list\n    in\n    let path = List.hd paths in\n    if List.for_all (compare_type_path env path) (List.tl paths) then\n      Location.prerr_warning loc\n        (Warnings.Ambiguous_name (List.map fst amb, types, true))\n    else\n      List.iter\n        (fun (s, l) ->\n          Location.prerr_warning loc (Warnings.Ambiguous_name ([s], l, false)))\n        amb\n  | _ -> ());\n  lbl_a_list\n\nlet rec find_record_qual = function\n  | [] -> None\n  | ({txt = Longident.Ldot (modname, _)}, _) :: _ -> Some modname\n  | _ :: rest -> find_record_qual rest\n\nlet map_fold_cont f xs k =\n  List.fold_right\n    (fun x k ys -> f x (fun y -> k (y :: ys)))\n    xs\n    (fun ys -> k (List.rev ys))\n    []\n\nlet type_label_a_list ?labels loc closed env type_lbl_a opath lid_a_list k =\n  let lbl_a_list =\n    match (lid_a_list, labels) with\n    | ({txt = Longident.Lident s}, _) :: _, Some labels\n      when Hashtbl.mem labels s ->\n      (* Special case for rebuilt syntax trees *)\n      List.map\n        (function\n          | lid, a -> (\n            match lid.txt with\n            | Longident.Lident s -> (lid, Hashtbl.find labels s, a)\n            | _ -> assert false))\n        lid_a_list\n    | _ ->\n      let lid_a_list =\n        match find_record_qual lid_a_list with\n        | None -> lid_a_list\n        | Some modname ->\n          List.map\n            (fun ((lid, a) as lid_a) ->\n              match lid.txt with\n              | Longident.Lident s ->\n                ({lid with txt = Longident.Ldot (modname, s)}, a)\n              | _ -> lid_a)\n            lid_a_list\n      in\n      disambiguate_lid_a_list loc closed env opath lid_a_list\n  in\n  (* Invariant: records are sorted in the typed tree *)\n  let lbl_a_list =\n    List.sort\n      (fun (_, lbl1, _) (_, lbl2, _) -> compare lbl1.lbl_pos lbl2.lbl_pos)\n      lbl_a_list\n  in\n  map_fold_cont type_lbl_a lbl_a_list k\n\n(* Checks over the labels mentioned in a record pattern:\n   no duplicate definitions (error); properly closed (warning) *)\n\nlet check_recordpat_labels loc lbl_pat_list closed =\n  match lbl_pat_list with\n  | [] -> () (* should not happen *)\n  | (_, label1, _) :: _ ->\n    let all = label1.lbl_all in\n    let defined = Array.make (Array.length all) false in\n    let check_defined (_, label, _) =\n      if defined.(label.lbl_pos) then\n        raise (Error (loc, Env.empty, Label_multiply_defined label.lbl_name))\n      else defined.(label.lbl_pos) <- true\n    in\n    List.iter check_defined lbl_pat_list;\n    if\n      closed = Closed\n      && Warnings.is_active (Warnings.Non_closed_record_pattern \"\")\n    then (\n      let undefined = ref [] in\n      for i = 0 to Array.length all - 1 do\n        if not defined.(i) then undefined := all.(i).lbl_name :: !undefined\n      done;\n      if !undefined <> [] then\n        let u = String.concat \", \" (List.rev !undefined) in\n        Location.prerr_warning loc (Warnings.Non_closed_record_pattern u))\n\n(* Constructors *)\n\nmodule Constructor = NameChoice (struct\n  type t = constructor_description\n  let type_kind = \"variant\"\n  let get_name cstr = cstr.cstr_name\n  let get_type cstr = cstr.cstr_res\n  let get_descrs = fst\n  let unbound_name_error = Typetexp.unbound_constructor_error\nend)\n\n(* unification of a type with a tconstr with\n   freshly created arguments *)\nlet unify_head_only loc env ty constr =\n  let _, ty_res = instance_constructor constr in\n  match (repr ty_res).desc with\n  | Tconstr (p, args, m) ->\n    ty_res.desc <- Tconstr (p, List.map (fun _ -> newvar ()) args, m);\n    enforce_constraints env ty_res;\n    unify_pat_types loc env ty_res ty\n  | _ -> assert false\n\n(* Typing of patterns *)\n\n(* Remember current state for backtracking.\n   No variable information, as we only backtrack on\n   patterns without variables (cf. assert statements). *)\ntype state = {snapshot: Btype.snapshot; levels: Ctype.levels; env: Env.t}\nlet save_state env =\n  {snapshot = Btype.snapshot (); levels = Ctype.save_levels (); env = !env}\nlet set_state s env =\n  Btype.backtrack s.snapshot;\n  Ctype.set_levels s.levels;\n  env := s.env\n\n(* type_pat does not generate local constraints inside or patterns *)\ntype type_pat_mode =\n  | Normal\n  | Splitting_or (* splitting an or-pattern *)\n  | Inside_or (* inside a non-split or-pattern *)\n  | Split_or (* always split or-patterns *)\n\nexception Need_backtrack\n\n(* type_pat propagates the expected type as well as maps for\n   constructors and labels.\n   Unification may update the typing environment. *)\n(* constrs <> None => called from parmatch: backtrack on or-patterns\n   explode > 0 => explode Ppat_any for gadts *)\nlet rec type_pat ~constrs ~labels ~no_existentials ~mode ~explode ~env sp\n    expected_ty k =\n  Builtin_attributes.warning_scope sp.ppat_attributes (fun () ->\n      type_pat_aux ~constrs ~labels ~no_existentials ~mode ~explode ~env sp\n        expected_ty k)\n\nand type_pat_aux ~constrs ~labels ~no_existentials ~mode ~explode ~env sp\n    expected_ty k =\n  let mode' = if mode = Splitting_or then Normal else mode in\n  let type_pat ?(constrs = constrs) ?(labels = labels) ?(mode = mode')\n      ?(explode = explode) ?(env = env) =\n    type_pat ~constrs ~labels ~no_existentials ~mode ~explode ~env\n  in\n  let loc = sp.ppat_loc in\n  let rp k x : pattern = if constrs = None then k (rp x) else k x in\n  match sp.ppat_desc with\n  | Ppat_any ->\n    let k' d =\n      rp k\n        {\n          pat_desc = d;\n          pat_loc = loc;\n          pat_extra = [];\n          pat_type = expected_ty;\n          pat_attributes = sp.ppat_attributes;\n          pat_env = !env;\n        }\n    in\n    if explode > 0 then\n      let sp, constrs, labels = Parmatch.ppat_of_type !env expected_ty in\n      if sp.ppat_desc = Parsetree.Ppat_any then k' Tpat_any\n      else if mode = Inside_or then raise Need_backtrack\n      else\n        let explode =\n          match sp.ppat_desc with\n          | Parsetree.Ppat_or _ -> explode - 5\n          | _ -> explode - 1\n        in\n        type_pat ~constrs:(Some constrs) ~labels:(Some labels) ~explode sp\n          expected_ty k\n    else k' Tpat_any\n  | Ppat_var name ->\n    let id =\n      (* PR#7330 *)\n      if name.txt = \"*extension*\" then Ident.create name.txt\n      else enter_variable loc name expected_ty\n    in\n    rp k\n      {\n        pat_desc = Tpat_var (id, name);\n        pat_loc = loc;\n        pat_extra = [];\n        pat_type = expected_ty;\n        pat_attributes = sp.ppat_attributes;\n        pat_env = !env;\n      }\n  | Ppat_unpack name ->\n    assert (constrs = None);\n    let id = enter_variable loc name expected_ty ~is_module:true in\n    rp k\n      {\n        pat_desc = Tpat_var (id, name);\n        pat_loc = sp.ppat_loc;\n        pat_extra = [(Tpat_unpack, loc, sp.ppat_attributes)];\n        pat_type = expected_ty;\n        pat_attributes = [];\n        pat_env = !env;\n      }\n  | Ppat_constraint\n      ( {ppat_desc = Ppat_var name; ppat_loc = lloc},\n        ({ptyp_desc = Ptyp_poly _} as sty) ) -> (\n    (* explicitly polymorphic type *)\n    assert (constrs = None);\n    let cty, force = Typetexp.transl_simple_type_delayed !env sty in\n    let ty = cty.ctyp_type in\n    unify_pat_types lloc !env ty expected_ty;\n    pattern_force := force :: !pattern_force;\n    match ty.desc with\n    | Tpoly (body, tyl) ->\n      begin_def ();\n      let _, ty' = instance_poly ~keep_names:true false tyl body in\n      end_def ();\n      generalize ty';\n      let id = enter_variable lloc name ty' in\n      rp k\n        {\n          pat_desc = Tpat_var (id, name);\n          pat_loc = lloc;\n          pat_extra = [(Tpat_constraint cty, loc, sp.ppat_attributes)];\n          pat_type = ty;\n          pat_attributes = [];\n          pat_env = !env;\n        }\n    | _ -> assert false)\n  | Ppat_alias (sq, name) ->\n    assert (constrs = None);\n    type_pat sq expected_ty (fun q ->\n        begin_def ();\n        let ty_var = build_as_type !env q in\n        end_def ();\n        generalize ty_var;\n        let id = enter_variable ~is_as_variable:true loc name ty_var in\n        rp k\n          {\n            pat_desc = Tpat_alias (q, id, name);\n            pat_loc = loc;\n            pat_extra = [];\n            pat_type = q.pat_type;\n            pat_attributes = sp.ppat_attributes;\n            pat_env = !env;\n          })\n  | Ppat_constant cst ->\n    let cst = constant_or_raise !env loc cst in\n    unify_pat_types loc !env (type_constant cst) expected_ty;\n    rp k\n      {\n        pat_desc = Tpat_constant cst;\n        pat_loc = loc;\n        pat_extra = [];\n        pat_type = expected_ty;\n        pat_attributes = sp.ppat_attributes;\n        pat_env = !env;\n      }\n  | Ppat_interval (Pconst_char c1, Pconst_char c2) ->\n    let open Ast_helper.Pat in\n    let gloc = {loc with Location.loc_ghost = true} in\n    let rec loop c1 c2 =\n      if c1 = c2 then constant ~loc:gloc (Pconst_char c1)\n      else\n        or_ ~loc:gloc (constant ~loc:gloc (Pconst_char c1)) (loop (c1 + 1) c2)\n    in\n    let p = if c1 <= c2 then loop c1 c2 else loop c2 c1 in\n    let p = {p with ppat_loc = loc} in\n    type_pat ~explode:0 p expected_ty k\n    (* TODO: record 'extra' to remember about interval *)\n  | Ppat_interval _ -> raise (Error (loc, !env, Invalid_interval))\n  | Ppat_tuple spl ->\n    assert (List.length spl >= 2);\n    let spl_ann = List.map (fun p -> (p, newvar ())) spl in\n    let ty = newty (Ttuple (List.map snd spl_ann)) in\n    unify_pat_types loc !env ty expected_ty;\n    map_fold_cont\n      (fun (p, t) -> type_pat p t)\n      spl_ann\n      (fun pl ->\n        rp k\n          {\n            pat_desc = Tpat_tuple pl;\n            pat_loc = loc;\n            pat_extra = [];\n            pat_type = expected_ty;\n            pat_attributes = sp.ppat_attributes;\n            pat_env = !env;\n          })\n  | Ppat_construct (lid, sarg) ->\n    let opath =\n      try\n        let p0, p, _ = extract_concrete_variant !env expected_ty in\n        Some (p0, p)\n      with Not_found -> None\n    in\n    let candidates =\n      match (lid.txt, constrs) with\n      | Longident.Lident s, Some constrs when Hashtbl.mem constrs s ->\n        [(Hashtbl.find constrs s, fun () -> ())]\n      | _ -> Typetexp.find_all_constructors !env lid.loc lid.txt\n    in\n    let check_lk tpath constr =\n      if constr.cstr_generalized then\n        raise\n          (Error\n             (lid.loc, !env, Unqualified_gadt_pattern (tpath, constr.cstr_name)))\n    in\n    let constr =\n      wrap_disambiguate \"This variant pattern is expected to have\" expected_ty\n        (Constructor.disambiguate lid !env opath ~check_lk)\n        candidates\n    in\n    if constr.cstr_generalized && constrs <> None && mode = Inside_or then\n      raise Need_backtrack;\n    Env.mark_constructor Env.Pattern !env (Longident.last lid.txt) constr;\n    Builtin_attributes.check_deprecated loc constr.cstr_attributes\n      constr.cstr_name;\n    if no_existentials && constr.cstr_existentials <> [] then\n      raise (Error (loc, !env, Unexpected_existential));\n    (* if constructor is gadt, we must verify that the expected type has the\n       correct head *)\n    if constr.cstr_generalized then unify_head_only loc !env expected_ty constr;\n    let sargs =\n      match sarg with\n      | None -> []\n      | Some {ppat_desc = Ppat_tuple spl}\n        when constr.cstr_arity > 1\n             || Builtin_attributes.explicit_arity sp.ppat_attributes ->\n        spl\n      | Some ({ppat_desc = Ppat_any} as sp) when constr.cstr_arity <> 1 ->\n        if constr.cstr_arity = 0 then\n          Location.prerr_warning sp.ppat_loc\n            Warnings.Wildcard_arg_to_constant_constr;\n        replicate_list sp constr.cstr_arity\n      | Some sp -> [sp]\n    in\n    (match sargs with\n    | [({ppat_desc = Ppat_constant _} as sp)]\n      when Builtin_attributes.warn_on_literal_pattern constr.cstr_attributes ->\n      Location.prerr_warning sp.ppat_loc Warnings.Fragile_literal_pattern\n    | _ -> ());\n    if List.length sargs <> constr.cstr_arity then\n      raise\n        (Error\n           ( loc,\n             !env,\n             Constructor_arity_mismatch\n               (lid.txt, constr.cstr_arity, List.length sargs) ));\n    let ty_args, ty_res =\n      instance_constructor ~in_pattern:(env, get_newtype_level ()) constr\n    in\n    (* PR#7214: do not use gadt unification for toplevel lets *)\n    if (not constr.cstr_generalized) || mode = Inside_or || no_existentials then\n      unify_pat_types loc !env ty_res expected_ty\n    else unify_pat_types_gadt loc env ty_res expected_ty;\n\n    let rec check_non_escaping p =\n      match p.ppat_desc with\n      | Ppat_or (p1, p2) ->\n        check_non_escaping p1;\n        check_non_escaping p2\n      | Ppat_alias (p, _) -> check_non_escaping p\n      | Ppat_constraint _ ->\n        raise (Error (p.ppat_loc, !env, Inlined_record_escape))\n      | _ -> ()\n    in\n    if constr.cstr_inlined <> None then List.iter check_non_escaping sargs;\n\n    map_fold_cont\n      (fun (p, t) -> type_pat p t)\n      (List.combine sargs ty_args)\n      (fun args ->\n        rp k\n          {\n            pat_desc = Tpat_construct (lid, constr, args);\n            pat_loc = loc;\n            pat_extra = [];\n            pat_type = expected_ty;\n            pat_attributes = sp.ppat_attributes;\n            pat_env = !env;\n          })\n  | Ppat_variant (l, sarg) -> (\n    let arg_type =\n      match sarg with\n      | None -> []\n      | Some _ -> [newvar ()]\n    in\n    let row =\n      {\n        row_fields = [(l, Reither (sarg = None, arg_type, true, ref None))];\n        row_bound = ();\n        row_closed = false;\n        row_more = newvar ();\n        row_fixed = false;\n        row_name = None;\n      }\n    in\n    (* PR#7404: allow some_other_tag blindly, as it would not unify with\n       the abstract row variable *)\n    if l = Parmatch.some_other_tag then assert (constrs <> None)\n    else unify_pat_types loc !env (newty (Tvariant row)) expected_ty;\n    let k arg =\n      rp k\n        {\n          pat_desc = Tpat_variant (l, arg, ref {row with row_more = newvar ()});\n          pat_loc = loc;\n          pat_extra = [];\n          pat_type = expected_ty;\n          pat_attributes = sp.ppat_attributes;\n          pat_env = !env;\n        }\n    in\n    (* PR#6235: propagate type information *)\n    match (sarg, arg_type) with\n    | Some p, [ty] -> type_pat p ty (fun p -> k (Some p))\n    | _ -> k None)\n  | Ppat_record (lid_sp_list, closed) ->\n    let opath, record_ty =\n      try\n        let p0, p, _, _ = extract_concrete_record !env expected_ty in\n        (Some (p0, p), expected_ty)\n      with Not_found -> (None, newvar ())\n    in\n    let process_optional_label (ld, pat) =\n      let exp_optional_attr =\n        check_optional_attr !env ld pat.ppat_attributes pat.ppat_loc\n      in\n      let is_from_pamatch =\n        match pat.ppat_desc with\n        | Ppat_construct ({txt = Lident s}, _) ->\n          String.length s >= 2 && s.[0] = '#' && s.[1] = '$'\n        | _ -> false\n      in\n      if label_is_optional ld && (not exp_optional_attr) && not is_from_pamatch\n      then\n        let lid = mknoloc Longident.(Ldot (Lident \"*predef*\", \"Some\")) in\n        Ast_helper.Pat.construct ~loc:pat.ppat_loc lid (Some pat)\n      else pat\n    in\n    let type_label_pat (label_lid, label, sarg) k =\n      let sarg = process_optional_label (label, sarg) in\n      begin_def ();\n      let vars, ty_arg, ty_res = instance_label false label in\n      if vars = [] then end_def ();\n      (try unify_pat_types loc !env ty_res record_ty\n       with Unify trace ->\n         raise\n           (Error (label_lid.loc, !env, Label_mismatch (label_lid.txt, trace))));\n      type_pat sarg ty_arg (fun arg ->\n          if vars <> [] then (\n            end_def ();\n            generalize ty_arg;\n            List.iter generalize vars;\n            let instantiated tv =\n              let tv = expand_head !env tv in\n              (not (is_Tvar tv)) || tv.level <> generic_level\n            in\n            if List.exists instantiated vars then\n              raise\n                (Error (label_lid.loc, !env, Polymorphic_label label_lid.txt)));\n          k (label_lid, label, arg))\n    in\n    let k' k lbl_pat_list =\n      check_recordpat_labels loc lbl_pat_list closed;\n      unify_pat_types loc !env record_ty expected_ty;\n      rp k\n        {\n          pat_desc = Tpat_record (lbl_pat_list, closed);\n          pat_loc = loc;\n          pat_extra = [];\n          pat_type = expected_ty;\n          pat_attributes = sp.ppat_attributes;\n          pat_env = !env;\n        }\n    in\n    if constrs = None then\n      k\n        (wrap_disambiguate \"This record pattern is expected to have\" expected_ty\n           (type_label_a_list ?labels loc false !env type_label_pat opath\n              lid_sp_list)\n           (k' (fun x -> x)))\n    else\n      type_label_a_list ?labels loc false !env type_label_pat opath lid_sp_list\n        (k' k)\n  | Ppat_array spl ->\n    let ty_elt = newvar () in\n    unify_pat_types loc !env\n      (instance_def (Predef.type_array ty_elt))\n      expected_ty;\n    let spl_ann = List.map (fun p -> (p, newvar ())) spl in\n    map_fold_cont\n      (fun (p, _) -> type_pat p ty_elt)\n      spl_ann\n      (fun pl ->\n        rp k\n          {\n            pat_desc = Tpat_array pl;\n            pat_loc = loc;\n            pat_extra = [];\n            pat_type = expected_ty;\n            pat_attributes = sp.ppat_attributes;\n            pat_env = !env;\n          })\n  | Ppat_or (sp1, sp2) -> (\n    let state = save_state env in\n    match\n      if mode = Split_or || mode = Splitting_or then raise Need_backtrack;\n      let initial_pattern_variables = !pattern_variables in\n      let initial_module_variables = !module_variables in\n      let p1 =\n        try Some (type_pat ~mode:Inside_or sp1 expected_ty (fun x -> x))\n        with Need_backtrack -> None\n      in\n      let p1_variables = !pattern_variables in\n      let p1_module_variables = !module_variables in\n      pattern_variables := initial_pattern_variables;\n      module_variables := initial_module_variables;\n      let p2 =\n        try Some (type_pat ~mode:Inside_or sp2 expected_ty (fun x -> x))\n        with Need_backtrack -> None\n      in\n      let p2_variables = !pattern_variables in\n      match (p1, p2) with\n      | None, None -> raise Need_backtrack\n      | Some p, None | None, Some p -> p (* no variables in this case *)\n      | Some p1, Some p2 ->\n        let alpha_env =\n          enter_orpat_variables loc !env p1_variables p2_variables\n        in\n        pattern_variables := p1_variables;\n        module_variables := p1_module_variables;\n        {\n          pat_desc = Tpat_or (p1, alpha_pat alpha_env p2, None);\n          pat_loc = loc;\n          pat_extra = [];\n          pat_type = expected_ty;\n          pat_attributes = sp.ppat_attributes;\n          pat_env = !env;\n        }\n    with\n    | p -> rp k p\n    | exception Need_backtrack when mode <> Inside_or -> (\n      assert (constrs <> None);\n      set_state state env;\n      let mode = if mode = Split_or then mode else Splitting_or in\n      try type_pat ~mode sp1 expected_ty k\n      with Error _ ->\n        set_state state env;\n        type_pat ~mode sp2 expected_ty k))\n  | Ppat_lazy sp1 ->\n    let nv = newvar () in\n    unify_pat_types loc !env (instance_def (Predef.type_lazy_t nv)) expected_ty;\n    (* do not explode under lazy: PR#7421 *)\n    type_pat ~explode:0 sp1 nv (fun p1 ->\n        rp k\n          {\n            pat_desc = Tpat_lazy p1;\n            pat_loc = loc;\n            pat_extra = [];\n            pat_type = expected_ty;\n            pat_attributes = sp.ppat_attributes;\n            pat_env = !env;\n          })\n  | Ppat_constraint (sp, sty) ->\n    (* Separate when not already separated by !principal *)\n    let separate = true in\n    if separate then begin_def ();\n    let cty, force = Typetexp.transl_simple_type_delayed !env sty in\n    let ty = cty.ctyp_type in\n    let ty, expected_ty' =\n      if separate then (\n        end_def ();\n        generalize_structure ty;\n        (instance !env ty, instance !env ty))\n      else (ty, ty)\n    in\n    unify_pat_types loc !env ty expected_ty;\n    type_pat sp expected_ty' (fun p ->\n        (*Format.printf \"%a@.%a@.\"\n          Printtyp.raw_type_expr ty\n          Printtyp.raw_type_expr p.pat_type;*)\n        pattern_force := force :: !pattern_force;\n        let extra = (Tpat_constraint cty, loc, sp.ppat_attributes) in\n        let p =\n          if not separate then p\n          else\n            match p.pat_desc with\n            | Tpat_var (id, s) ->\n              {\n                p with\n                pat_type = ty;\n                pat_desc =\n                  Tpat_alias\n                    ({p with pat_desc = Tpat_any; pat_attributes = []}, id, s);\n                pat_extra = [extra];\n              }\n            | _ -> {p with pat_type = ty; pat_extra = extra :: p.pat_extra}\n        in\n        k p)\n  | Ppat_type lid ->\n    let path, p, ty = build_or_pat !env loc lid in\n    unify_pat_types loc !env ty expected_ty;\n    k\n      {\n        p with\n        pat_extra =\n          (Tpat_type (path, lid), loc, sp.ppat_attributes) :: p.pat_extra;\n      }\n  | Ppat_open (lid, p) ->\n    let path, new_env = !type_open Asttypes.Fresh !env sp.ppat_loc lid in\n    let new_env = ref new_env in\n    type_pat ~env:new_env p expected_ty (fun p ->\n        env := Env.copy_local !env ~from:!new_env;\n        k\n          {\n            p with\n            pat_extra =\n              (Tpat_open (path, lid, !new_env), loc, sp.ppat_attributes)\n              :: p.pat_extra;\n          })\n  | Ppat_exception _ ->\n    raise (Error (loc, !env, Exception_pattern_below_toplevel))\n  | Ppat_extension ext ->\n    raise (Error_forward (Builtin_attributes.error_of_extension ext))\n\nlet type_pat ?(allow_existentials = false) ?constrs ?labels ?(mode = Normal)\n    ?(explode = 0) ?(lev = get_current_level ()) env sp expected_ty =\n  newtype_level := Some lev;\n  try\n    let r =\n      type_pat ~no_existentials:(not allow_existentials) ~constrs ~labels ~mode\n        ~explode ~env sp expected_ty (fun x -> x)\n    in\n    iter_pattern (fun p -> p.pat_env <- !env) r;\n    newtype_level := None;\n    r\n  with e ->\n    newtype_level := None;\n    raise e\n\n(* this function is passed to Partial.parmatch\n   to type check gadt nonexhaustiveness *)\nlet partial_pred ~lev ?mode ?explode env expected_ty constrs labels p =\n  let env = ref env in\n  let state = save_state env in\n  try\n    reset_pattern None true;\n    let typed_p =\n      Ctype.with_passive_variants\n        (type_pat ~allow_existentials:true ~lev ~constrs ~labels ?mode ?explode\n           env p)\n        expected_ty\n    in\n    set_state state env;\n    (* types are invalidated but we don't need them here *)\n    Some typed_p\n  with Error _ ->\n    set_state state env;\n    None\n\nlet check_partial ?(lev = get_current_level ()) env expected_ty loc cases =\n  let explode =\n    match cases with\n    | [_] -> 5\n    | _ -> 0\n  in\n  Parmatch.check_partial_gadt\n    (partial_pred ~lev ~explode env expected_ty)\n    loc cases\n\nlet check_unused ?(lev = get_current_level ()) env expected_ty cases =\n  Parmatch.check_unused\n    (fun refute constrs labels spat ->\n      match\n        partial_pred ~lev ~mode:Split_or ~explode:5 env expected_ty constrs\n          labels spat\n      with\n      | Some pat when refute ->\n        raise (Error (spat.ppat_loc, env, Unrefuted_pattern pat))\n      | r -> r)\n    cases\n\nlet add_pattern_variables ?check ?check_as env =\n  let pv = get_ref pattern_variables in\n  ( List.fold_right\n      (fun (id, ty, _name, loc, as_var) env ->\n        let check = if as_var then check_as else check in\n        Env.add_value ?check id\n          {\n            val_type = ty;\n            val_kind = Val_reg;\n            Types.val_loc = loc;\n            val_attributes = [];\n          }\n          env)\n      pv env,\n    get_ref module_variables )\n\nlet type_pattern ~lev env spat scope expected_ty =\n  reset_pattern scope true;\n  let new_env = ref env in\n  let pat = type_pat ~allow_existentials:true ~lev new_env spat expected_ty in\n  let new_env, unpacks =\n    add_pattern_variables !new_env\n      ~check:(fun s -> Warnings.Unused_var_strict s)\n      ~check_as:(fun s -> Warnings.Unused_var s)\n  in\n  (pat, new_env, get_ref pattern_force, unpacks)\n\nlet type_pattern_list env spatl scope expected_tys allow =\n  reset_pattern scope allow;\n  let new_env = ref env in\n  let type_pat (attrs, pat) ty =\n    Builtin_attributes.warning_scope ~ppwarning:false attrs (fun () ->\n        type_pat new_env pat ty)\n  in\n  let patl = List.map2 type_pat spatl expected_tys in\n  let new_env, unpacks = add_pattern_variables !new_env in\n  (patl, new_env, get_ref pattern_force, unpacks)\n\nlet rec final_subexpression sexp =\n  match sexp.pexp_desc with\n  | Pexp_let (_, _, e)\n  | Pexp_sequence (_, e)\n  | Pexp_try (e, _)\n  | Pexp_ifthenelse (_, e, _)\n  | Pexp_match (_, {pc_rhs = e} :: _) ->\n    final_subexpression e\n  | _ -> sexp\n\n(* Generalization criterion for expressions *)\n\nlet rec is_nonexpansive exp =\n  List.exists\n    (function\n      | (({txt = \"internal.expansive\"}, _) : Parsetree.attribute) -> true\n      | _ -> false)\n    exp.exp_attributes\n  ||\n  match exp.exp_desc with\n  | Texp_ident (_, _, _) -> true\n  | Texp_constant _ -> true\n  | Texp_let (_rec_flag, pat_exp_list, body) ->\n    List.for_all (fun vb -> is_nonexpansive vb.vb_expr) pat_exp_list\n    && is_nonexpansive body\n  | Texp_function _ -> true\n  | Texp_apply (e, (_, None) :: el) ->\n    is_nonexpansive e && List.for_all is_nonexpansive_opt (List.map snd el)\n  | Texp_match (e, cases, [], _) ->\n    is_nonexpansive e\n    && List.for_all\n         (fun {c_lhs = _; c_guard; c_rhs} ->\n           is_nonexpansive_opt c_guard && is_nonexpansive c_rhs)\n         cases\n  | Texp_tuple el -> List.for_all is_nonexpansive el\n  | Texp_construct (_, _, el) -> List.for_all is_nonexpansive el\n  | Texp_variant (_, arg) -> is_nonexpansive_opt arg\n  | Texp_record {fields; extended_expression} ->\n    Array.for_all\n      (fun (lbl, definition) ->\n        match definition with\n        | Overridden (_, exp) -> lbl.lbl_mut = Immutable && is_nonexpansive exp\n        | Kept _ -> true)\n      fields\n    && is_nonexpansive_opt extended_expression\n  | Texp_field (exp, _, _) -> is_nonexpansive exp\n  | Texp_array [] -> !Config.unsafe_empty_array\n  | Texp_ifthenelse (_cond, ifso, ifnot) ->\n    is_nonexpansive ifso && is_nonexpansive_opt ifnot\n  | Texp_sequence (_e1, e2) -> is_nonexpansive e2 (* PR#4354 *)\n  | Texp_new _ -> assert false\n  (* Note: nonexpansive only means no _observable_ side effects *)\n  | Texp_lazy e -> is_nonexpansive e\n  | Texp_object () -> assert false\n  | Texp_letmodule (_, _, mexp, e) ->\n    is_nonexpansive_mod mexp && is_nonexpansive e\n  | Texp_pack mexp -> is_nonexpansive_mod mexp\n  (* Computations which raise exceptions are nonexpansive, since (raise e) is equivalent\n     to (raise e; diverge), and a nonexpansive \"diverge\" can be produced using lazy values\n     or the relaxed value restriction. See GPR#1142 *)\n  | Texp_assert exp -> is_nonexpansive exp\n  | Texp_apply\n      ( {\n          exp_desc =\n            Texp_ident\n              (_, _, {val_kind = Val_prim {Primitive.prim_name = \"%raise\"}});\n        },\n        [(Nolabel, Some e)] ) ->\n    is_nonexpansive e\n  | _ -> false\n\nand is_nonexpansive_mod mexp =\n  match mexp.mod_desc with\n  | Tmod_ident _ -> true\n  | Tmod_functor _ -> true\n  | Tmod_unpack (e, _) -> is_nonexpansive e\n  | Tmod_constraint (m, _, _, _) -> is_nonexpansive_mod m\n  | Tmod_structure str ->\n    List.for_all\n      (fun item ->\n        match item.str_desc with\n        | Tstr_eval _ | Tstr_primitive _ | Tstr_type _ | Tstr_modtype _\n        | Tstr_open _ | Tstr_class_type _ ->\n          true\n        | Tstr_value (_, pat_exp_list) ->\n          List.for_all (fun vb -> is_nonexpansive vb.vb_expr) pat_exp_list\n        | Tstr_module {mb_expr = m; _} | Tstr_include {incl_mod = m; _} ->\n          is_nonexpansive_mod m\n        | Tstr_recmodule id_mod_list ->\n          List.for_all\n            (fun {mb_expr = m; _} -> is_nonexpansive_mod m)\n            id_mod_list\n        | Tstr_exception {ext_kind = Text_decl _} ->\n          false (* true would be unsound *)\n        | Tstr_exception {ext_kind = Text_rebind _} -> true\n        | Tstr_typext te ->\n          List.for_all\n            (function\n              | {ext_kind = Text_decl _} -> false\n              | {ext_kind = Text_rebind _} -> true)\n            te.tyext_constructors\n        | Tstr_class _ -> false (* could be more precise *)\n        | Tstr_attribute _ -> true)\n      str.str_items\n  | Tmod_apply _ -> false\n\nand is_nonexpansive_opt = function\n  | None -> true\n  | Some e -> is_nonexpansive e\n\n(* Approximate the type of an expression, for better recursion *)\n\nlet rec approx_type env sty =\n  match sty.ptyp_desc with\n  | Ptyp_arrow (p, _, sty) ->\n    let ty1 = if is_optional p then type_option (newvar ()) else newvar () in\n    newty (Tarrow (p, ty1, approx_type env sty, Cok))\n  | Ptyp_tuple args -> newty (Ttuple (List.map (approx_type env) args))\n  | Ptyp_constr (lid, ctl) -> (\n    try\n      let path = Env.lookup_type lid.txt env in\n      let decl = Env.find_type path env in\n      if List.length ctl <> decl.type_arity then raise Not_found;\n      let tyl = List.map (approx_type env) ctl in\n      newconstr path tyl\n    with Not_found -> newvar ())\n  | Ptyp_poly (_, sty) -> approx_type env sty\n  | _ -> newvar ()\n\nlet rec type_approx env sexp =\n  match sexp.pexp_desc with\n  | Pexp_let (_, _, e) -> type_approx env e\n  | Pexp_fun (p, _, _, e) ->\n    let ty = if is_optional p then type_option (newvar ()) else newvar () in\n    newty (Tarrow (p, ty, type_approx env e, Cok))\n  | Pexp_function ({pc_rhs = e} :: _) ->\n    newty (Tarrow (Nolabel, newvar (), type_approx env e, Cok))\n  | Pexp_match (_, {pc_rhs = e} :: _) -> type_approx env e\n  | Pexp_try (e, _) -> type_approx env e\n  | Pexp_tuple l -> newty (Ttuple (List.map (type_approx env) l))\n  | Pexp_ifthenelse (_, e, _) -> type_approx env e\n  | Pexp_sequence (_, e) -> type_approx env e\n  | Pexp_constraint (e, sty) ->\n    let ty = type_approx env e in\n    let ty1 = approx_type env sty in\n    (try unify env ty ty1\n     with Unify trace ->\n       raise (Error (sexp.pexp_loc, env, Expr_type_clash (trace, None))));\n    ty1\n  | Pexp_coerce (e, sty1, sty2) ->\n    let approx_ty_opt = function\n      | None -> newvar ()\n      | Some sty -> approx_type env sty\n    in\n    let ty = type_approx env e\n    and ty1 = approx_ty_opt sty1\n    and ty2 = approx_type env sty2 in\n    (try unify env ty ty1\n     with Unify trace ->\n       raise (Error (sexp.pexp_loc, env, Expr_type_clash (trace, None))));\n    ty2\n  | _ -> newvar ()\n\n(* List labels in a function type, and whether return type is a variable *)\nlet rec list_labels_aux env visited ls ty_fun =\n  let ty = expand_head env ty_fun in\n  if List.memq ty visited then (List.rev ls, false)\n  else\n    match ty.desc with\n    | Tarrow (l, _, ty_res, _) ->\n      list_labels_aux env (ty :: visited) (l :: ls) ty_res\n    | _ -> (List.rev ls, is_Tvar ty)\n\nlet list_labels env ty =\n  wrap_trace_gadt_instances env (list_labels_aux env [] []) ty\n\n(* Check that all univars are safe in a type *)\nlet check_univars env expans kind exp ty_expected vars =\n  if expans && not (is_nonexpansive exp) then\n    generalize_expansive env exp.exp_type;\n  (* need to expand twice? cf. Ctype.unify2 *)\n  let vars = List.map (expand_head env) vars in\n  let vars = List.map (expand_head env) vars in\n  let vars' =\n    Ext_list.filter vars (fun t ->\n        let t = repr t in\n        generalize t;\n        match t.desc with\n        | Tvar name when t.level = generic_level ->\n          log_type t;\n          t.desc <- Tunivar name;\n          true\n        | _ -> false)\n  in\n  if List.length vars = List.length vars' then ()\n  else\n    let ty = newgenty (Tpoly (repr exp.exp_type, vars'))\n    and ty_expected = repr ty_expected in\n    raise\n      (Error\n         ( exp.exp_loc,\n           env,\n           Less_general (kind, [(ty, ty); (ty_expected, ty_expected)]) ))\n\n(* Check that a type is not a function *)\nlet check_application_result env statement exp =\n  let loc = exp.exp_loc in\n  match (expand_head env exp.exp_type).desc with\n  | Tarrow _ -> Location.prerr_warning exp.exp_loc Warnings.Partial_application\n  | Tvar _ -> ()\n  | Tconstr (p, _, _) when Path.same p Predef.path_unit -> ()\n  | _ -> if statement then Location.prerr_warning loc Warnings.Statement_type\n\n(* Check that a type is generalizable at some level *)\nlet generalizable level ty =\n  let rec check ty =\n    let ty = repr ty in\n    if ty.level < lowest_level then ()\n    else if ty.level <= level then raise Exit\n    else (\n      mark_type_node ty;\n      iter_type_expr check ty)\n  in\n  try\n    check ty;\n    unmark_type ty;\n    true\n  with Exit ->\n    unmark_type ty;\n    false\n\n(* Hack to allow coercion of self. Will clean-up later. *)\nlet self_coercion = ref ([] : (Path.t * Location.t list ref) list)\n\n(* Helpers for packaged modules. *)\nlet create_package_type loc env (p, l) =\n  let s = !Typetexp.transl_modtype_longident loc env p in\n  let fields =\n    List.map\n      (fun (name, ct) -> (name, Typetexp.transl_simple_type env false ct))\n      l\n  in\n  let ty =\n    newty\n      (Tpackage\n         (s, List.map fst l, List.map (fun (_, cty) -> cty.ctyp_type) fields))\n  in\n  (s, fields, ty)\n\nlet wrap_unpacks sexp unpacks =\n  let open Ast_helper in\n  List.fold_left\n    (fun sexp (name, loc) ->\n      Exp.letmodule ~loc:sexp.pexp_loc\n        ~attrs:[(mknoloc \"#modulepat\", PStr [])]\n        name\n        (Mod.unpack ~loc\n           (Exp.ident ~loc:name.loc\n              (mkloc (Longident.Lident name.txt) name.loc)))\n        sexp)\n    sexp unpacks\n\n(* Helpers for type_cases *)\n\nlet contains_variant_either ty =\n  let rec loop ty =\n    let ty = repr ty in\n    if ty.level >= lowest_level then (\n      mark_type_node ty;\n      match ty.desc with\n      | Tvariant row ->\n        let row = row_repr row in\n        if not row.row_fixed then\n          List.iter\n            (fun (_, f) ->\n              match row_field_repr f with\n              | Reither _ -> raise Exit\n              | _ -> ())\n            row.row_fields;\n        iter_row loop row\n      | _ -> iter_type_expr loop ty)\n  in\n  try\n    loop ty;\n    unmark_type ty;\n    false\n  with Exit ->\n    unmark_type ty;\n    true\n\nlet iter_ppat f p =\n  match p.ppat_desc with\n  | Ppat_any | Ppat_var _ | Ppat_constant _ | Ppat_interval _ | Ppat_extension _\n  | Ppat_type _ | Ppat_unpack _ ->\n    ()\n  | Ppat_array pats -> List.iter f pats\n  | Ppat_or (p1, p2) ->\n    f p1;\n    f p2\n  | Ppat_variant (_, arg) | Ppat_construct (_, arg) -> may f arg\n  | Ppat_tuple lst -> List.iter f lst\n  | Ppat_exception p\n  | Ppat_alias (p, _)\n  | Ppat_open (_, p)\n  | Ppat_constraint (p, _)\n  | Ppat_lazy p ->\n    f p\n  | Ppat_record (args, _flag) -> List.iter (fun (_, p) -> f p) args\n\nlet contains_polymorphic_variant p =\n  let rec loop p =\n    match p.ppat_desc with\n    | Ppat_variant _ | Ppat_type _ -> raise Exit\n    | _ -> iter_ppat loop p\n  in\n  try\n    loop p;\n    false\n  with Exit -> true\n\nlet contains_gadt env p =\n  let rec loop env p =\n    match p.ppat_desc with\n    | Ppat_construct (lid, _) ->\n      (try\n         let cstrs = Env.lookup_all_constructors lid.txt env in\n         List.iter\n           (fun (cstr, _) -> if cstr.cstr_generalized then raise_notrace Exit)\n           cstrs\n       with Not_found -> ());\n      iter_ppat (loop env) p\n    | Ppat_open (lid, sub_p) ->\n      let _, new_env = !type_open Asttypes.Override env p.ppat_loc lid in\n      loop new_env sub_p\n    | _ -> iter_ppat (loop env) p\n  in\n  try\n    loop env p;\n    false\n  with Exit -> true\n\nlet check_absent_variant env =\n  iter_pattern (function\n    | {pat_desc = Tpat_variant (s, arg, row)} as pat ->\n      let row = row_repr !row in\n      if\n        List.exists\n          (fun (s', fi) -> s = s' && row_field_repr fi <> Rabsent)\n          row.row_fields\n        || ((not row.row_fixed) && not (static_row row))\n        (* same as Ctype.poly *)\n      then ()\n      else\n        let ty_arg =\n          match arg with\n          | None -> []\n          | Some p -> [correct_levels p.pat_type]\n        in\n        let row' =\n          {\n            row_fields = [(s, Reither (arg = None, ty_arg, true, ref None))];\n            row_more = newvar ();\n            row_bound = ();\n            row_closed = false;\n            row_fixed = false;\n            row_name = None;\n          }\n        in\n        (* Should fail *)\n        unify_pat env\n          {pat with pat_type = newty (Tvariant row')}\n          (correct_levels pat.pat_type)\n    | _ -> ())\n\n(* Duplicate types of values in the environment *)\n(* XXX Should we do something about global type variables too? *)\n\nlet duplicate_ident_types caselist env =\n  let caselist =\n    Ext_list.filter caselist (fun {pc_lhs} -> contains_gadt env pc_lhs)\n  in\n  Env.copy_types (all_idents_cases caselist) env\n\n(* type_label_a_list returns a list of labels sorted by lbl_pos *)\n(* note: check_duplicates would better be implemented in\n         type_label_a_list directly *)\nlet rec check_duplicates loc env = function\n  | (_, lbl1, _) :: (_, lbl2, _) :: _ when lbl1.lbl_pos = lbl2.lbl_pos ->\n    raise (Error (loc, env, Label_multiply_defined lbl1.lbl_name))\n  | _ :: rem -> check_duplicates loc env rem\n  | [] -> ()\n\n(* Getting proper location of already typed expressions.\n\n   Used to avoid confusing locations on type error messages in presence of\n   type constraints.\n   For example:\n\n       (* Before patch *)\n       # let x : string = (5 : int);;\n                           ^\n       (* After patch *)\n       # let x : string = (5 : int);;\n                          ^^^^^^^^^\n*)\nlet proper_exp_loc exp =\n  let rec aux = function\n    | [] -> exp.exp_loc\n    | ((Texp_constraint _ | Texp_coerce _), loc, _) :: _ -> loc\n    | _ :: rest -> aux rest\n  in\n  aux exp.exp_extra\n\nlet id_of_pattern : Typedtree.pattern -> Ident.t option =\n fun pat ->\n  match pat.pat_desc with\n  | Tpat_var (id, _) -> Some id\n  | Tpat_alias (_, id, _) -> Some id\n  | Tpat_construct\n      (_, _, [{pat_desc = Tpat_var (id, _) | Tpat_alias (_, id, _)}]) ->\n    Some (Ident.rename id)\n  | _ -> None\n(* To find reasonable names for let-bound and lambda-bound idents *)\n\nlet rec name_pattern default = function\n  | [] -> Ident.create default\n  | {c_lhs = p; _} :: rem -> (\n    match id_of_pattern p with\n    | None -> name_pattern default rem\n    | Some id -> id)\n\n(* Typing of expressions *)\n\nlet unify_exp ?type_clash_context env exp expected_ty =\n  let loc = proper_exp_loc exp in\n  unify_exp_types ?type_clash_context loc env exp.exp_type expected_ty\n\nlet is_ignore funct env =\n  match funct.exp_desc with\n  | Texp_ident (_, _, {val_kind = Val_prim {Primitive.prim_name = \"%ignore\"}})\n    -> (\n    try\n      ignore (filter_arrow env (instance env funct.exp_type) Nolabel);\n      true\n    with Unify _ -> false)\n  | _ -> false\n\nlet not_identity = function\n  | Texp_ident (_, _, {val_kind = Val_prim {Primitive.prim_name = \"%identity\"}})\n    ->\n    false\n  | _ -> true\n\nlet rec lower_args env seen ty_fun =\n  let ty = expand_head env ty_fun in\n  if List.memq ty seen then ()\n  else\n    match ty.desc with\n    | Tarrow (_l, ty_arg, ty_fun, _com) ->\n      (try unify_var env (newvar ()) ty_arg with Unify _ -> assert false);\n      lower_args env (ty :: seen) ty_fun\n    | _ -> ()\n\nlet not_function env ty =\n  let ls, tvar = list_labels env ty in\n  ls = [] && not tvar\n\nlet check_might_be_component env ty_record =\n  match (expand_head env ty_record).desc with\n  | Tconstr (path, _, _) when path |> Path.last = \"props\" -> true\n  | _ -> false\n\ntype lazy_args =\n  (Asttypes.arg_label * (unit -> Typedtree.expression) option) list\n\ntype targs = (Asttypes.arg_label * Typedtree.expression option) list\nlet rec type_exp ?recarg env sexp =\n  (* We now delegate everything to type_expect *)\n  type_expect ?recarg env sexp (newvar ())\n\n(* Typing of an expression with an expected type.\n   This provide better error messages, and allows controlled\n   propagation of return type information.\n   In the principal case, [type_expected'] may be at generic_level.\n*)\n\nand type_expect ?type_clash_context ?in_function ?recarg env sexp ty_expected =\n  let previous_saved_types = Cmt_format.get_saved_types () in\n  let exp =\n    Builtin_attributes.warning_scope sexp.pexp_attributes (fun () ->\n        type_expect_ ?type_clash_context ?in_function ?recarg env sexp\n          ty_expected)\n  in\n  Cmt_format.set_saved_types\n    (Cmt_format.Partial_expression exp :: previous_saved_types);\n  exp\n\nand type_expect_ ?type_clash_context ?in_function ?(recarg = Rejected) env sexp\n    ty_expected =\n  let loc = sexp.pexp_loc in\n  (* Record the expression type before unifying it with the expected type *)\n  let rue exp =\n    unify_exp ?type_clash_context env (re exp) (instance env ty_expected);\n    exp\n  in\n  let process_optional_label (id, ld, e) =\n    let exp_optional_attr =\n      check_optional_attr env ld e.pexp_attributes e.pexp_loc\n    in\n    if label_is_optional ld && not exp_optional_attr then\n      let lid = mknoloc Longident.(Ldot (Lident \"*predef*\", \"Some\")) in\n      let e = Ast_helper.Exp.construct ~loc:e.pexp_loc lid (Some e) in\n      (id, ld, e)\n    else (id, ld, e)\n  in\n  match sexp.pexp_desc with\n  | Pexp_ident lid ->\n    let path, desc = Typetexp.find_value env lid.loc lid.txt in\n    (if !Clflags.annotations then\n       let dloc = desc.Types.val_loc in\n       let annot =\n         if dloc.Location.loc_ghost then Annot.Iref_external\n         else Annot.Iref_internal dloc\n       in\n       let name = Path.name ~paren:Oprint.parenthesized_ident path in\n       Stypes.record (Stypes.An_ident (loc, name, annot)));\n    let is_recarg =\n      match (repr desc.val_type).desc with\n      | Tconstr (p, _, _) -> Path.is_constructor_typath p\n      | _ -> false\n    in\n\n    (match (is_recarg, recarg, (repr desc.val_type).desc) with\n    | _, Allowed, _ | true, Required, _ | false, Rejected, _ -> ()\n    | true, Rejected, _ | false, Required, (Tvar _ | Tconstr _) ->\n      raise (Error (loc, env, Inlined_record_escape))\n    | false, Required, _ -> () (* will fail later *));\n    rue\n      {\n        exp_desc = Texp_ident (path, lid, desc);\n        exp_loc = loc;\n        exp_extra = [];\n        exp_type = instance env desc.val_type;\n        exp_attributes = sexp.pexp_attributes;\n        exp_env = env;\n      }\n  | Pexp_constant cst ->\n    let cst = constant_or_raise env loc cst in\n    rue\n      {\n        exp_desc = Texp_constant cst;\n        exp_loc = loc;\n        exp_extra = [];\n        exp_type = type_constant cst;\n        exp_attributes = sexp.pexp_attributes;\n        exp_env = env;\n      }\n  | Pexp_let\n      ( Nonrecursive,\n        [{pvb_pat = spat; pvb_expr = sval; pvb_attributes = []}],\n        sbody )\n    when contains_gadt env spat ->\n    (* TODO: allow non-empty attributes? *)\n    type_expect ?in_function env\n      {\n        sexp with\n        pexp_desc = Pexp_match (sval, [Ast_helper.Exp.case spat sbody]);\n      }\n      ty_expected\n  | Pexp_let (rec_flag, spat_sexp_list, sbody) ->\n    let scp =\n      match (sexp.pexp_attributes, rec_flag) with\n      | [({txt = \"#default\"}, _)], _ -> None\n      | _, Recursive -> Some (Annot.Idef loc)\n      | _, Nonrecursive -> Some (Annot.Idef sbody.pexp_loc)\n    in\n    let pat_exp_list, new_env, unpacks =\n      type_let env rec_flag spat_sexp_list scp true\n    in\n    let body = type_expect new_env (wrap_unpacks sbody unpacks) ty_expected in\n    let () =\n      if rec_flag = Recursive then\n        Rec_check.check_recursive_bindings pat_exp_list\n    in\n    re\n      {\n        exp_desc = Texp_let (rec_flag, pat_exp_list, body);\n        exp_loc = loc;\n        exp_extra = [];\n        exp_type = body.exp_type;\n        exp_attributes = sexp.pexp_attributes;\n        exp_env = env;\n      }\n  | Pexp_fun (l, Some default, spat, sbody) ->\n    assert (is_optional l);\n    (* default allowed only with optional argument *)\n    let open Ast_helper in\n    let default_loc = default.pexp_loc in\n    let scases =\n      [\n        Exp.case\n          (Pat.construct ~loc:default_loc\n             (mknoloc Longident.(Ldot (Lident \"*predef*\", \"Some\")))\n             (Some (Pat.var ~loc:default_loc (mknoloc \"*sth*\"))))\n          (Exp.ident ~loc:default_loc (mknoloc (Longident.Lident \"*sth*\")));\n        Exp.case\n          (Pat.construct ~loc:default_loc\n             (mknoloc Longident.(Ldot (Lident \"*predef*\", \"None\")))\n             None)\n          default;\n      ]\n    in\n    let sloc =\n      {\n        Location.loc_start = spat.ppat_loc.Location.loc_start;\n        loc_end = default_loc.Location.loc_end;\n        loc_ghost = true;\n      }\n    in\n    let smatch =\n      Exp.match_ ~loc:sloc\n        (Exp.ident ~loc (mknoloc (Longident.Lident \"*opt*\")))\n        scases\n    in\n    let pat = Pat.var ~loc:sloc (mknoloc \"*opt*\") in\n    let body =\n      Exp.let_ ~loc Nonrecursive\n        ~attrs:[(mknoloc \"#default\", PStr [])]\n        [Vb.mk spat smatch]\n        sbody\n    in\n    type_function ?in_function loc sexp.pexp_attributes env ty_expected l\n      [Exp.case pat body]\n  | Pexp_fun (l, None, spat, sbody) ->\n    type_function ?in_function loc sexp.pexp_attributes env ty_expected l\n      [Ast_helper.Exp.case spat sbody]\n  | Pexp_function caselist ->\n    type_function ?in_function loc sexp.pexp_attributes env ty_expected Nolabel\n      caselist\n  | Pexp_apply (sfunct, sargs) ->\n    assert (sargs <> []);\n    begin_def ();\n    (* one more level for non-returning functions *)\n    let funct = type_exp env sfunct in\n    let ty = instance env funct.exp_type in\n    end_def ();\n    wrap_trace_gadt_instances env (lower_args env []) ty;\n    begin_def ();\n    let uncurried =\n      Ext_list.exists sexp.pexp_attributes (fun ({txt}, _) -> txt = \"res.uapp\")\n      && not\n         @@ Ext_list.exists sexp.pexp_attributes (fun ({txt}, _) ->\n                txt = \"res.partial\")\n      && (not @@ is_automatic_curried_application env funct)\n    in\n    let type_clash_context = type_clash_context_from_function sexp sfunct in\n    let args, ty_res, fully_applied =\n      type_application ?type_clash_context uncurried env funct sargs\n    in\n    end_def ();\n    unify_var env (newvar ()) funct.exp_type;\n\n    let mk_exp ?(loc = Location.none) exp_desc exp_type =\n      {\n        exp_desc;\n        exp_loc = loc;\n        exp_extra = [];\n        exp_type;\n        exp_attributes = [];\n        exp_env = env;\n      }\n    in\n    let apply_internal name e =\n      let lid : Longident.t = Ldot (Ldot (Lident \"Js\", \"Internal\"), name) in\n      let path, desc = Env.lookup_value lid env in\n      let id =\n        mk_exp\n          (Texp_ident (path, {txt = lid; loc = Location.none}, desc))\n          desc.val_type\n      in\n      mk_exp ~loc:e.exp_loc (Texp_apply (id, [(Nolabel, Some e)])) e.exp_type\n    in\n\n    let mk_apply funct args =\n      rue\n        {\n          exp_desc = Texp_apply (funct, args);\n          exp_loc = loc;\n          exp_extra = [];\n          exp_type = ty_res;\n          exp_attributes = sexp.pexp_attributes;\n          exp_env = env;\n        }\n    in\n\n    let is_primitive =\n      match funct.exp_desc with\n      | Texp_ident (_, _, {val_kind = Val_prim _}) -> true\n      | _ -> false\n    in\n\n    if fully_applied && not is_primitive then\n      rue\n        (apply_internal \"opaqueFullApply\"\n           (mk_apply (apply_internal \"opaque\" funct) args))\n    else rue (mk_apply funct args)\n  | Pexp_match (sarg, caselist) ->\n    begin_def ();\n    let arg = type_exp env sarg in\n    end_def ();\n    if not (is_nonexpansive arg) then generalize_expansive env arg.exp_type;\n    generalize arg.exp_type;\n    let rec split_cases vc ec = function\n      | [] -> (List.rev vc, List.rev ec)\n      | ({pc_lhs = {ppat_desc = Ppat_exception p}} as c) :: rest ->\n        split_cases vc ({c with pc_lhs = p} :: ec) rest\n      | c :: rest -> split_cases (c :: vc) ec rest\n    in\n    let val_caselist, exn_caselist = split_cases [] [] caselist in\n    if val_caselist = [] && exn_caselist <> [] then\n      raise (Error (loc, env, No_value_clauses));\n    (* Note: val_caselist = [] and exn_caselist = [], i.e. a fully\n       empty pattern matching can be generated by Camlp4 with its\n       revised syntax.  Let's accept it for backward compatibility. *)\n    let val_cases, partial =\n      type_cases ~root_type_clash_context:Switch env arg.exp_type ty_expected\n        true loc val_caselist\n    in\n    let exn_cases, _ =\n      type_cases ~root_type_clash_context:Switch env Predef.type_exn ty_expected\n        false loc exn_caselist\n    in\n    re\n      {\n        exp_desc = Texp_match (arg, val_cases, exn_cases, partial);\n        exp_loc = loc;\n        exp_extra = [];\n        exp_type = instance env ty_expected;\n        exp_attributes = sexp.pexp_attributes;\n        exp_env = env;\n      }\n  | Pexp_try (sbody, caselist) ->\n    let body = type_expect env sbody ty_expected in\n    let cases, _ =\n      type_cases env Predef.type_exn ty_expected false loc caselist\n    in\n    re\n      {\n        exp_desc = Texp_try (body, cases);\n        exp_loc = loc;\n        exp_extra = [];\n        exp_type = body.exp_type;\n        exp_attributes = sexp.pexp_attributes;\n        exp_env = env;\n      }\n  | Pexp_tuple sexpl ->\n    assert (List.length sexpl >= 2);\n    let subtypes = List.map (fun _ -> newgenvar ()) sexpl in\n    let to_unify = newgenty (Ttuple subtypes) in\n    unify_exp_types loc env to_unify ty_expected;\n    let expl =\n      List.map2 (fun body ty -> type_expect env body ty) sexpl subtypes\n    in\n    re\n      {\n        exp_desc = Texp_tuple expl;\n        exp_loc = loc;\n        exp_extra = [];\n        (* Keep sharing *)\n        exp_type = newty (Ttuple (List.map (fun e -> e.exp_type) expl));\n        exp_attributes = sexp.pexp_attributes;\n        exp_env = env;\n      }\n  | Pexp_construct (({txt = Lident \"Function$\"} as lid), sarg) ->\n    let state = Warnings.backup () in\n    let arity = Ast_uncurried.attributes_to_arity sexp.pexp_attributes in\n    let uncurried_typ =\n      Ast_uncurried.make_uncurried_type ~env ~arity (newvar ())\n    in\n    unify_exp_types loc env uncurried_typ ty_expected;\n    (* Disable Unerasable_optional_argument for uncurried functions *)\n    let unerasable_optional_argument =\n      Warnings.number Unerasable_optional_argument\n    in\n    Warnings.parse_options false\n      (\"-\" ^ string_of_int unerasable_optional_argument);\n    let exp =\n      type_construct env loc lid sarg uncurried_typ sexp.pexp_attributes\n    in\n    Warnings.restore state;\n    exp\n  | Pexp_construct (lid, sarg) ->\n    type_construct env loc lid sarg ty_expected sexp.pexp_attributes\n  | Pexp_variant (l, sarg) -> (\n    (* Keep sharing *)\n    let ty_expected0 = instance env ty_expected in\n    try\n      match\n        (sarg, expand_head env ty_expected, expand_head env ty_expected0)\n      with\n      | Some sarg, {desc = Tvariant row}, {desc = Tvariant row0} -> (\n        let row = row_repr row in\n        match\n          ( row_field_repr (List.assoc l row.row_fields),\n            row_field_repr (List.assoc l row0.row_fields) )\n        with\n        | Rpresent (Some ty), Rpresent (Some ty0) ->\n          let arg = type_argument env sarg ty ty0 in\n          re\n            {\n              exp_desc = Texp_variant (l, Some arg);\n              exp_loc = loc;\n              exp_extra = [];\n              exp_type = ty_expected0;\n              exp_attributes = sexp.pexp_attributes;\n              exp_env = env;\n            }\n        | _ -> raise Not_found)\n      | _ -> raise Not_found\n    with Not_found ->\n      let arg = may_map (type_exp env) sarg in\n      let arg_type = may_map (fun arg -> arg.exp_type) arg in\n      rue\n        {\n          exp_desc = Texp_variant (l, arg);\n          exp_loc = loc;\n          exp_extra = [];\n          exp_type =\n            newty\n              (Tvariant\n                 {\n                   row_fields = [(l, Rpresent arg_type)];\n                   row_more = newvar ();\n                   row_bound = ();\n                   row_closed = false;\n                   row_fixed = false;\n                   row_name = None;\n                 });\n          exp_attributes = sexp.pexp_attributes;\n          exp_env = env;\n        })\n  | Pexp_record (lid_sexp_list, None) ->\n    let ty_record, opath, fields, repr_opt =\n      match extract_concrete_record env ty_expected with\n      | p0, p, fields, repr ->\n        (* XXX level may be wrong *)\n        (ty_expected, Some (p0, p), fields, Some repr)\n      | exception Not_found -> (newvar (), None, [], None)\n    in\n\n    let lbl_exp_list =\n      wrap_disambiguate \"This record expression is expected to have\" ty_record\n        (type_label_a_list loc true env\n           (fun e k ->\n             k\n               (type_label_exp true env loc ty_record (process_optional_label e)))\n           opath lid_sexp_list)\n        (fun x -> x)\n    in\n    unify_exp_types loc env ty_record (instance env ty_expected);\n    check_duplicates loc env lbl_exp_list;\n    let label_descriptions, representation =\n      match (lbl_exp_list, repr_opt) with\n      | ( (_, {lbl_all = label_descriptions; lbl_repres = representation}, _)\n          :: _,\n          _ ) ->\n        (label_descriptions, representation)\n      | [], Some representation when lid_sexp_list = [] ->\n        let optional_labels =\n          match representation with\n          | Record_optional_labels optional_labels -> optional_labels\n          | Record_inlined {optional_labels} -> optional_labels\n          | _ -> []\n        in\n        let filter_missing (ld : Types.label_declaration) =\n          let name = Ident.name ld.ld_id in\n          if List.mem name optional_labels then None else Some name\n        in\n        let labels_missing = fields |> List.filter_map filter_missing in\n        (if labels_missing <> [] then\n           let might_be_component = check_might_be_component env ty_record in\n           raise\n             (Error\n                (loc, env, Labels_missing (labels_missing, might_be_component))));\n        ([||], representation)\n      | [], _ ->\n        if fields = [] && repr_opt <> None then ([||], Record_optional_labels [])\n        else raise (Error (loc, env, Empty_record_literal))\n    in\n    let labels_missing = ref [] in\n    let label_definitions =\n      let matching_label lbl =\n        List.find (fun (_, lbl', _) -> lbl'.lbl_pos = lbl.lbl_pos) lbl_exp_list\n      in\n      Array.map\n        (fun lbl ->\n          match matching_label lbl with\n          | lid, _lbl, lbl_exp -> Overridden (lid, lbl_exp)\n          | exception Not_found ->\n            if not (label_is_optional lbl) then\n              labels_missing := lbl.lbl_name :: !labels_missing;\n            Overridden\n              ({loc; txt = Lident lbl.lbl_name}, option_none lbl.lbl_arg loc))\n        label_descriptions\n    in\n    (if !labels_missing <> [] then\n       let might_be_component = check_might_be_component env ty_record in\n       raise\n         (Error\n            ( loc,\n              env,\n              Labels_missing (List.rev !labels_missing, might_be_component) )));\n    let fields =\n      Array.map2\n        (fun descr def -> (descr, def))\n        label_descriptions label_definitions\n    in\n    re\n      {\n        exp_desc =\n          Texp_record {fields; representation; extended_expression = None};\n        exp_loc = loc;\n        exp_extra = [];\n        exp_type = instance env ty_expected;\n        exp_attributes = sexp.pexp_attributes;\n        exp_env = env;\n      }\n  | Pexp_record (lid_sexp_list, Some sexp) ->\n    assert (lid_sexp_list <> []);\n    let exp = type_exp ~recarg env sexp in\n    let ty_record, opath =\n      let get_path ty =\n        try\n          let p0, p, _, _ = extract_concrete_record env ty in\n          (* XXX level may be wrong *)\n          Some (p0, p)\n        with Not_found -> None\n      in\n      match get_path ty_expected with\n      | None -> (\n        match get_path exp.exp_type with\n        | None -> (newvar (), None)\n        | Some (_, p') as op ->\n          let decl = Env.find_type p' env in\n          begin_def ();\n          let ty = newconstr p' (instance_list env decl.type_params) in\n          end_def ();\n          generalize_structure ty;\n          (ty, op))\n      | op -> (ty_expected, op)\n    in\n    let closed = false in\n    let lbl_exp_list =\n      wrap_disambiguate \"This record expression is expected to have\" ty_record\n        (type_label_a_list loc closed env\n           (fun e k ->\n             k\n               (type_label_exp true env loc ty_record (process_optional_label e)))\n           opath lid_sexp_list)\n        (fun x -> x)\n    in\n    unify_exp_types loc env ty_record (instance env ty_expected);\n    check_duplicates loc env lbl_exp_list;\n    let opt_exp, label_definitions =\n      let _lid, lbl, _lbl_exp = List.hd lbl_exp_list in\n      let matching_label lbl =\n        List.find (fun (_, lbl', _) -> lbl'.lbl_pos = lbl.lbl_pos) lbl_exp_list\n      in\n      let ty_exp = instance env exp.exp_type in\n      let unify_kept lbl =\n        let _, ty_arg1, ty_res1 = instance_label false lbl in\n        unify_exp_types exp.exp_loc env ty_exp ty_res1;\n        match matching_label lbl with\n        | lid, _lbl, lbl_exp ->\n          (* do not connect result types for overridden labels *)\n          Overridden (lid, lbl_exp)\n        | exception Not_found ->\n          let _, ty_arg2, ty_res2 = instance_label false lbl in\n          unify_exp_types loc env ty_arg1 ty_arg2;\n          unify_exp_types loc env (instance env ty_expected) ty_res2;\n          Kept ty_arg1\n      in\n      let label_definitions = Array.map unify_kept lbl.lbl_all in\n      (Some {exp with exp_type = ty_exp}, label_definitions)\n    in\n    let num_fields =\n      match lbl_exp_list with\n      | [] -> assert false\n      | (_, lbl, _) :: _ -> Array.length lbl.lbl_all\n    in\n    let opt_exp =\n      if List.length lid_sexp_list = num_fields then (\n        Location.prerr_warning loc Warnings.Useless_record_with;\n        None)\n      else opt_exp\n    in\n    let label_descriptions, representation =\n      let _, {lbl_all; lbl_repres}, _ = List.hd lbl_exp_list in\n      (lbl_all, lbl_repres)\n    in\n    let fields =\n      Array.map2\n        (fun descr def -> (descr, def))\n        label_descriptions label_definitions\n    in\n    re\n      {\n        exp_desc =\n          Texp_record {fields; representation; extended_expression = opt_exp};\n        exp_loc = loc;\n        exp_extra = [];\n        exp_type = instance env ty_expected;\n        exp_attributes = sexp.pexp_attributes;\n        exp_env = env;\n      }\n  | Pexp_field (srecord, lid) ->\n    let record, label, _ = type_label_access env srecord lid in\n    let _, ty_arg, ty_res = instance_label false label in\n    unify_exp env record ty_res;\n    rue\n      {\n        exp_desc = Texp_field (record, lid, label);\n        exp_loc = loc;\n        exp_extra = [];\n        exp_type = ty_arg;\n        exp_attributes = sexp.pexp_attributes;\n        exp_env = env;\n      }\n  | Pexp_setfield (srecord, lid, snewval) ->\n    let record, label, opath = type_label_access env srecord lid in\n    let ty_record = if opath = None then newvar () else record.exp_type in\n    let label_loc, label, newval =\n      type_label_exp ~type_clash_context:SetRecordField false env loc ty_record\n        (lid, label, snewval)\n    in\n    unify_exp env record ty_record;\n    if label.lbl_mut = Immutable then\n      raise (Error (loc, env, Label_not_mutable lid.txt));\n    Builtin_attributes.check_deprecated_mutable lid.loc label.lbl_attributes\n      (Longident.last lid.txt);\n    rue\n      {\n        exp_desc = Texp_setfield (record, label_loc, label, newval);\n        exp_loc = loc;\n        exp_extra = [];\n        exp_type = instance_def Predef.type_unit;\n        exp_attributes = sexp.pexp_attributes;\n        exp_env = env;\n      }\n  | Pexp_array sargl ->\n    let ty = newgenvar () in\n    let to_unify = Predef.type_array ty in\n    unify_exp_types loc env to_unify ty_expected;\n    let argl =\n      List.map\n        (fun sarg -> type_expect ~type_clash_context:ArrayValue env sarg ty)\n        sargl\n    in\n    re\n      {\n        exp_desc = Texp_array argl;\n        exp_loc = loc;\n        exp_extra = [];\n        exp_type = instance env ty_expected;\n        exp_attributes = sexp.pexp_attributes;\n        exp_env = env;\n      }\n  | Pexp_ifthenelse (scond, sifso, sifnot) -> (\n    let cond =\n      type_expect ~type_clash_context:IfCondition env scond Predef.type_bool\n    in\n    match sifnot with\n    | None ->\n      let ifso =\n        type_expect ~type_clash_context:IfReturn env sifso Predef.type_unit\n      in\n      rue\n        {\n          exp_desc = Texp_ifthenelse (cond, ifso, None);\n          exp_loc = loc;\n          exp_extra = [];\n          exp_type = ifso.exp_type;\n          exp_attributes = sexp.pexp_attributes;\n          exp_env = env;\n        }\n    | Some sifnot ->\n      let ifso =\n        type_expect ~type_clash_context:IfReturn env sifso ty_expected\n      in\n      let ifnot =\n        type_expect ~type_clash_context:IfReturn env sifnot ty_expected\n      in\n      (* Keep sharing *)\n      unify_exp ~type_clash_context:IfReturn env ifnot ifso.exp_type;\n      re\n        {\n          exp_desc = Texp_ifthenelse (cond, ifso, Some ifnot);\n          exp_loc = loc;\n          exp_extra = [];\n          exp_type = ifso.exp_type;\n          exp_attributes = sexp.pexp_attributes;\n          exp_env = env;\n        })\n  | Pexp_sequence (sexp1, sexp2) ->\n    let exp1 = type_statement env sexp1 in\n    let exp2 = type_expect env sexp2 ty_expected in\n    re\n      {\n        exp_desc = Texp_sequence (exp1, exp2);\n        exp_loc = loc;\n        exp_extra = [];\n        exp_type = exp2.exp_type;\n        exp_attributes = sexp.pexp_attributes;\n        exp_env = env;\n      }\n  | Pexp_while (scond, sbody) ->\n    let cond = type_expect env scond Predef.type_bool in\n    let body = type_statement env sbody in\n    rue\n      {\n        exp_desc = Texp_while (cond, body);\n        exp_loc = loc;\n        exp_extra = [];\n        exp_type = instance_def Predef.type_unit;\n        exp_attributes = sexp.pexp_attributes;\n        exp_env = env;\n      }\n  | Pexp_for (param, slow, shigh, dir, sbody) ->\n    let low = type_expect env slow Predef.type_int in\n    let high = type_expect env shigh Predef.type_int in\n    let id, new_env =\n      match param.ppat_desc with\n      | Ppat_any -> (Ident.create \"_for\", env)\n      | Ppat_var {txt} ->\n        Env.enter_value txt\n          {\n            val_type = instance_def Predef.type_int;\n            val_attributes = [];\n            val_kind = Val_reg;\n            Types.val_loc = loc;\n          }\n          env\n          ~check:(fun s -> Warnings.Unused_for_index s)\n      | _ -> raise (Error (param.ppat_loc, env, Invalid_for_loop_index))\n    in\n    let body = type_statement new_env sbody in\n    rue\n      {\n        exp_desc = Texp_for (id, param, low, high, dir, body);\n        exp_loc = loc;\n        exp_extra = [];\n        exp_type = instance_def Predef.type_unit;\n        exp_attributes = sexp.pexp_attributes;\n        exp_env = env;\n      }\n  | Pexp_constraint (sarg, sty) ->\n    let separate = true in\n    (* always separate, 1% slowdown for lablgtk *)\n    if separate then begin_def ();\n    let cty = Typetexp.transl_simple_type env false sty in\n    let ty = cty.ctyp_type in\n    let arg, ty' =\n      if separate then (\n        end_def ();\n        generalize_structure ty;\n        (type_argument env sarg ty (instance env ty), instance env ty))\n      else (type_argument env sarg ty ty, ty)\n    in\n    rue\n      {\n        exp_desc = arg.exp_desc;\n        exp_loc = arg.exp_loc;\n        exp_type = ty';\n        exp_attributes = arg.exp_attributes;\n        exp_env = env;\n        exp_extra =\n          (Texp_constraint cty, loc, sexp.pexp_attributes) :: arg.exp_extra;\n      }\n  | Pexp_coerce (sarg, sty, sty') ->\n    let separate = true in\n    (* always separate, 1% slowdown for lablgtk *)\n    (* Also see PR#7199 for a problem with the following:\n       let separate =  Env.has_local_constraints env in*)\n    let arg, ty', cty, cty' =\n      match sty with\n      | None ->\n        let cty', force = Typetexp.transl_simple_type_delayed env sty' in\n        let ty' = cty'.ctyp_type in\n        if separate then begin_def ();\n        let arg = type_exp env sarg in\n        let gen =\n          if separate then (\n            end_def ();\n            let tv = newvar () in\n            let gen = generalizable tv.level arg.exp_type in\n            (try unify_var env tv arg.exp_type\n             with Unify trace ->\n               raise\n                 (Error\n                    ( arg.exp_loc,\n                      env,\n                      Expr_type_clash (trace, type_clash_context) )));\n            gen)\n          else true\n        in\n        (match (arg.exp_desc, !self_coercion, (repr ty').desc) with\n        | _\n          when free_variables ~env arg.exp_type = []\n               && free_variables ~env ty' = [] -> (\n          if\n            (not gen)\n            &&\n            (* first try a single coercion *)\n            let snap = snapshot () in\n            let ty, _b = enlarge_type env ty' in\n            try\n              force ();\n              Ctype.unify env arg.exp_type ty;\n              true\n            with Unify _ ->\n              backtrack snap;\n              false\n          then ()\n          else\n            try\n              let force' = subtype env arg.exp_type ty' in\n              force ();\n              force' ()\n            with Subtype (tr1, tr2) ->\n              (* prerr_endline \"coercion failed\"; *)\n              raise (Error (loc, env, Not_subtype (tr1, tr2))))\n        | _ -> (\n          let ty, b = enlarge_type env ty' in\n          force ();\n          try Ctype.unify env arg.exp_type ty\n          with Unify trace ->\n            raise\n              (Error\n                 ( sarg.pexp_loc,\n                   env,\n                   Coercion_failure (ty', full_expand env ty', trace, b) ))));\n        (arg, ty', None, cty')\n      | Some sty ->\n        if separate then begin_def ();\n        let cty, force = Typetexp.transl_simple_type_delayed env sty\n        and cty', force' = Typetexp.transl_simple_type_delayed env sty' in\n        let ty = cty.ctyp_type in\n        let ty' = cty'.ctyp_type in\n        (try\n           let force'' = subtype env ty ty' in\n           force ();\n           force' ();\n           force'' ()\n         with Subtype (tr1, tr2) ->\n           raise (Error (loc, env, Not_subtype (tr1, tr2))));\n        if separate then (\n          end_def ();\n          generalize_structure ty;\n          generalize_structure ty';\n          ( type_argument env sarg ty (instance env ty),\n            instance env ty',\n            Some cty,\n            cty' ))\n        else (type_argument env sarg ty ty, ty', Some cty, cty')\n    in\n    rue\n      {\n        exp_desc = arg.exp_desc;\n        exp_loc = arg.exp_loc;\n        exp_type = ty';\n        exp_attributes = arg.exp_attributes;\n        exp_env = env;\n        exp_extra =\n          (Texp_coerce (cty, cty'), loc, sexp.pexp_attributes) :: arg.exp_extra;\n      }\n  | Pexp_send (e, {txt = met}) -> (\n    let obj = type_exp env e in\n    let obj_meths = ref None in\n    try\n      let meth, exp, typ =\n        match obj.exp_desc with\n        | _ -> (Tmeth_name met, None, filter_method env met Public obj.exp_type)\n      in\n      let typ =\n        match repr typ with\n        | {desc = Tpoly (ty, [])} -> instance env ty\n        | {desc = Tpoly (ty, tl); level = _} -> snd (instance_poly false tl ty)\n        | {desc = Tvar _} as ty ->\n          let ty' = newvar () in\n          unify env (instance_def ty) (newty (Tpoly (ty', [])));\n          (* if not !Clflags.nolabels then\n             Location.prerr_warning loc (Warnings.Unknown_method met); *)\n          ty'\n        | _ -> assert false\n      in\n      rue\n        {\n          exp_desc = Texp_send (obj, meth, exp);\n          exp_loc = loc;\n          exp_extra = [];\n          exp_type = typ;\n          exp_attributes = sexp.pexp_attributes;\n          exp_env = env;\n        }\n    with Unify _ ->\n      let valid_methods =\n        match !obj_meths with\n        | Some meths ->\n          Some (Meths.fold (fun meth _meth_ty li -> meth :: li) !meths [])\n        | None -> (\n          match (expand_head env obj.exp_type).desc with\n          | Tobject (fields, _) ->\n            let fields, _ = Ctype.flatten_fields fields in\n            let collect_fields li (meth, meth_kind, _meth_ty) =\n              if meth_kind = Fpresent then meth :: li else li\n            in\n            Some (List.fold_left collect_fields [] fields)\n          | _ -> None)\n      in\n      raise\n        (Error\n           (e.pexp_loc, env, Undefined_method (obj.exp_type, met, valid_methods)))\n    )\n  | Pexp_new _ | Pexp_setinstvar _ | Pexp_override _ -> assert false\n  | Pexp_letmodule (name, smodl, sbody) ->\n    let ty = newvar () in\n    (* remember original level *)\n    begin_def ();\n    Ident.set_current_time ty.level;\n    let context = Typetexp.narrow () in\n    let modl = !type_module env smodl in\n    let id, new_env = Env.enter_module name.txt modl.mod_type env in\n    Ctype.init_def (Ident.current_time ());\n    Typetexp.widen context;\n    let body = type_expect new_env sbody ty_expected in\n    (* go back to original level *)\n    end_def ();\n    (* Unification of body.exp_type with the fresh variable ty\n       fails if and only if the prefix condition is violated,\n       i.e. if generative types rooted at id show up in the\n       type body.exp_type.  Thus, this unification enforces the\n       scoping condition on \"let module\". *)\n    (* Note that this code will only be reached if ty_expected\n       is a generic type variable, otherwise the error will occur\n       above in type_expect *)\n    (try Ctype.unify_var new_env ty body.exp_type\n     with Unify _ ->\n       raise (Error (loc, env, Scoping_let_module (name.txt, body.exp_type))));\n    re\n      {\n        exp_desc = Texp_letmodule (id, name, modl, body);\n        exp_loc = loc;\n        exp_extra = [];\n        exp_type = ty;\n        exp_attributes = sexp.pexp_attributes;\n        exp_env = env;\n      }\n  | Pexp_letexception (cd, sbody) ->\n    let cd, newenv = Typedecl.transl_exception env cd in\n    let body = type_expect newenv sbody ty_expected in\n    re\n      {\n        exp_desc = Texp_letexception (cd, body);\n        exp_loc = loc;\n        exp_extra = [];\n        exp_type = body.exp_type;\n        exp_attributes = sexp.pexp_attributes;\n        exp_env = env;\n      }\n  | Pexp_assert e ->\n    let cond = type_expect env e Predef.type_bool in\n    let exp_type =\n      match cond.exp_desc with\n      | Texp_construct (_, {cstr_name = \"false\"}, _) -> instance env ty_expected\n      | _ -> instance_def Predef.type_unit\n    in\n    rue\n      {\n        exp_desc = Texp_assert cond;\n        exp_loc = loc;\n        exp_extra = [];\n        exp_type;\n        exp_attributes = sexp.pexp_attributes;\n        exp_env = env;\n      }\n  | Pexp_lazy e ->\n    let ty = newgenvar () in\n    let to_unify = Predef.type_lazy_t ty in\n    unify_exp_types loc env to_unify ty_expected;\n    let arg = type_expect env e ty in\n    re\n      {\n        exp_desc = Texp_lazy arg;\n        exp_loc = loc;\n        exp_extra = [];\n        exp_type = instance env ty_expected;\n        exp_attributes = sexp.pexp_attributes;\n        exp_env = env;\n      }\n  | Pexp_object _ -> assert false\n  | Pexp_poly (sbody, sty) ->\n    let ty, cty =\n      match sty with\n      | None -> (repr ty_expected, None)\n      | Some sty ->\n        let sty = Ast_helper.Typ.force_poly sty in\n        let cty = Typetexp.transl_simple_type env false sty in\n        (repr cty.ctyp_type, Some cty)\n    in\n    if sty <> None then\n      unify_exp_types loc env (instance env ty) (instance env ty_expected);\n    let exp =\n      match (expand_head env ty).desc with\n      | Tpoly (ty', []) ->\n        let exp = type_expect env sbody ty' in\n        {exp with exp_type = instance env ty}\n      | Tpoly (ty', tl) ->\n        (* One more level to generalize locally *)\n        begin_def ();\n        let vars, ty'' = instance_poly true tl ty' in\n        let exp = type_expect env sbody ty'' in\n        end_def ();\n        check_univars env false \"method\" exp ty_expected vars;\n        {exp with exp_type = instance env ty}\n      | Tvar _ ->\n        let exp = type_exp env sbody in\n        let exp = {exp with exp_type = newty (Tpoly (exp.exp_type, []))} in\n        unify_exp env exp ty;\n        exp\n      | _ -> assert false\n    in\n    re\n      {\n        exp with\n        exp_extra = (Texp_poly cty, loc, sexp.pexp_attributes) :: exp.exp_extra;\n      }\n  | Pexp_newtype ({txt = name}, sbody) ->\n    let ty = newvar () in\n    (* remember original level *)\n    begin_def ();\n    (* Create a fake abstract type declaration for name. *)\n    let level = get_current_level () in\n    let decl =\n      {\n        type_params = [];\n        type_arity = 0;\n        type_kind = Type_abstract;\n        type_private = Public;\n        type_manifest = None;\n        type_variance = [];\n        type_newtype_level = Some (level, level);\n        type_loc = loc;\n        type_attributes = [];\n        type_immediate = false;\n        type_unboxed = unboxed_false_default_false;\n      }\n    in\n    Ident.set_current_time ty.level;\n    let id, new_env = Env.enter_type name decl env in\n    Ctype.init_def (Ident.current_time ());\n\n    let body = type_exp new_env sbody in\n    (* Replace every instance of this type constructor in the resulting\n       type. *)\n    let seen = Hashtbl.create 8 in\n    let rec replace t =\n      if Hashtbl.mem seen t.id then ()\n      else (\n        Hashtbl.add seen t.id ();\n        match t.desc with\n        | Tconstr (Path.Pident id', _, _) when id == id' -> link_type t ty\n        | _ -> Btype.iter_type_expr replace t)\n    in\n    let ety = Subst.type_expr Subst.identity body.exp_type in\n    replace ety;\n    (* back to original level *)\n    end_def ();\n\n    (* lower the levels of the result type *)\n    (* unify_var env ty ety; *)\n\n    (* non-expansive if the body is non-expansive, so we don't introduce\n       any new extra node in the typed AST. *)\n    rue\n      {\n        body with\n        exp_loc = loc;\n        exp_type = ety;\n        exp_extra =\n          (Texp_newtype name, loc, sexp.pexp_attributes) :: body.exp_extra;\n      }\n  | Pexp_pack m ->\n    let p, nl =\n      match Ctype.expand_head env (instance env ty_expected) with\n      | {desc = Tpackage (p, nl, _tl)} -> (p, nl)\n      | {desc = Tvar _} -> raise (Error (loc, env, Cannot_infer_signature))\n      | _ -> raise (Error (loc, env, Not_a_packed_module ty_expected))\n    in\n    let modl, tl' = !type_package env m p nl in\n    rue\n      {\n        exp_desc = Texp_pack modl;\n        exp_loc = loc;\n        exp_extra = [];\n        exp_type = newty (Tpackage (p, nl, tl'));\n        exp_attributes = sexp.pexp_attributes;\n        exp_env = env;\n      }\n  | Pexp_open (ovf, lid, e) ->\n    let path, newenv = !type_open ovf env sexp.pexp_loc lid in\n    let exp = type_expect newenv e ty_expected in\n    {\n      exp with\n      exp_extra =\n        (Texp_open (ovf, path, lid, newenv), loc, sexp.pexp_attributes)\n        :: exp.exp_extra;\n    }\n  | Pexp_extension\n      ( {txt = \"ocaml.extension_constructor\" | \"extension_constructor\"; _},\n        payload ) -> (\n    match payload with\n    | PStr\n        [\n          {\n            pstr_desc =\n              Pstr_eval ({pexp_desc = Pexp_construct (lid, None); _}, _);\n          };\n        ] ->\n      let path =\n        match (Typetexp.find_constructor env lid.loc lid.txt).cstr_tag with\n        | Cstr_extension (path, _) -> path\n        | _ -> raise (Error (lid.loc, env, Not_an_extension_constructor))\n      in\n      rue\n        {\n          exp_desc = Texp_extension_constructor (lid, path);\n          exp_loc = loc;\n          exp_extra = [];\n          exp_type = instance_def Predef.type_extension_constructor;\n          exp_attributes = sexp.pexp_attributes;\n          exp_env = env;\n        }\n    | _ -> raise (Error (loc, env, Invalid_extension_constructor_payload)))\n  | Pexp_extension ext ->\n    raise (Error_forward (Builtin_attributes.error_of_extension ext))\n  | Pexp_unreachable ->\n    re\n      {\n        exp_desc = Texp_unreachable;\n        exp_loc = loc;\n        exp_extra = [];\n        exp_type = instance env ty_expected;\n        exp_attributes = sexp.pexp_attributes;\n        exp_env = env;\n      }\n\nand type_function ?in_function loc attrs env ty_expected l caselist =\n  let loc_fun, ty_fun =\n    match in_function with\n    | Some p -> p\n    | None -> (loc, instance env ty_expected)\n  in\n  let separate = Env.has_local_constraints env in\n  if separate then begin_def ();\n  let ty_arg, ty_res =\n    try filter_arrow env (instance env ty_expected) l\n    with Unify _ -> (\n      match expand_head env ty_expected with\n      | {desc = Tarrow _} as ty ->\n        raise (Error (loc, env, Abstract_wrong_label (l, ty)))\n      | _ ->\n        raise\n          (Error (loc_fun, env, Too_many_arguments (in_function <> None, ty_fun))))\n  in\n  let ty_arg =\n    if is_optional l then (\n      let tv = newvar () in\n      (try unify env ty_arg (type_option tv) with Unify _ -> assert false);\n      type_option tv)\n    else ty_arg\n  in\n  if separate then (\n    end_def ();\n    generalize_structure ty_arg;\n    generalize_structure ty_res);\n  let cases, partial =\n    type_cases ~in_function:(loc_fun, ty_fun) env ty_arg ty_res true loc\n      caselist\n  in\n  if is_optional l && not_function env ty_res then\n    Location.prerr_warning (List.hd cases).c_lhs.pat_loc\n      Warnings.Unerasable_optional_argument;\n  let param = name_pattern \"param\" cases in\n  re\n    {\n      exp_desc = Texp_function {arg_label = l; param; cases; partial};\n      exp_loc = loc;\n      exp_extra = [];\n      exp_type = instance env (newgenty (Tarrow (l, ty_arg, ty_res, Cok)));\n      exp_attributes = attrs;\n      exp_env = env;\n    }\n\nand type_label_access env srecord lid =\n  let record = type_exp ~recarg:Allowed env srecord in\n  let ty_exp = record.exp_type in\n  let opath =\n    try\n      let p0, p, _, _ = extract_concrete_record env ty_exp in\n      Some (p0, p)\n    with Not_found -> None\n  in\n  let labels = Typetexp.find_all_labels env lid.loc lid.txt in\n  let label =\n    wrap_disambiguate \"This expression has\" ty_exp\n      (Label.disambiguate lid env opath)\n      labels\n  in\n  (record, label, opath)\n\n(* Typing format strings for printing or reading.\n   These formats are used by functions in modules Printf, Format, and Scanf.\n   (Handling of * modifiers contributed by Thorsten Ohl.) *)\nand type_label_exp ?type_clash_context create env loc ty_expected\n    (lid, label, sarg) =\n  (* Here also ty_expected may be at generic_level *)\n  begin_def ();\n  let separate = Env.has_local_constraints env in\n  if separate then (\n    begin_def ();\n    begin_def ());\n  let vars, ty_arg, ty_res = instance_label true label in\n  if separate then (\n    end_def ();\n    (* Generalize label information *)\n    generalize_structure ty_arg;\n    generalize_structure ty_res);\n  (try unify env (instance_def ty_res) (instance env ty_expected)\n   with Unify trace ->\n     raise (Error (lid.loc, env, Label_mismatch (lid.txt, trace))));\n  (* Instantiate so that we can generalize internal nodes *)\n  let ty_arg = instance_def ty_arg in\n  if separate then (\n    end_def ();\n    (* Generalize information merged from ty_expected *)\n    generalize_structure ty_arg);\n  if label.lbl_private = Private then\n    if create then raise (Error (loc, env, Private_type ty_expected))\n    else raise (Error (lid.loc, env, Private_label (lid.txt, ty_expected)));\n  let arg =\n    let snap = if vars = [] then None else Some (Btype.snapshot ()) in\n    let arg =\n      type_argument ?type_clash_context env sarg ty_arg (instance env ty_arg)\n    in\n    end_def ();\n    try\n      check_univars env (vars <> []) \"field value\" arg label.lbl_arg vars;\n      arg\n    with exn when not (is_nonexpansive arg) -> (\n      try\n        (* Try to retype without propagating ty_arg, cf PR#4862 *)\n        may Btype.backtrack snap;\n        begin_def ();\n        let arg = type_exp env sarg in\n        end_def ();\n        generalize_expansive env arg.exp_type;\n        unify_exp env arg ty_arg;\n        check_univars env false \"field value\" arg label.lbl_arg vars;\n        arg\n      with\n      | Error (_, _, Less_general _) as e -> raise e\n      | _ -> raise exn (* In case of failure return the first error *))\n  in\n  (lid, label, {arg with exp_type = instance env arg.exp_type})\n\nand type_argument ?type_clash_context ?recarg env sarg ty_expected' ty_expected\n    =\n  (* ty_expected' may be generic *)\n  let no_labels ty =\n    let ls, tvar = list_labels env ty in\n    (not tvar) && List.for_all (fun x -> x = Nolabel) ls\n  in\n  let rec is_inferred sexp =\n    match sexp.pexp_desc with\n    | Pexp_ident _ | Pexp_apply _ | Pexp_field _ | Pexp_constraint _\n    | Pexp_coerce _ | Pexp_send _ | Pexp_new _ ->\n      true\n    | Pexp_sequence (_, e) | Pexp_open (_, _, e) -> is_inferred e\n    | Pexp_ifthenelse (_, e1, Some e2) -> is_inferred e1 && is_inferred e2\n    | _ -> false\n  in\n  match expand_head env ty_expected' with\n  | {desc = Tarrow (Nolabel, ty_arg, ty_res, _); level = _}\n    when is_inferred sarg ->\n    (* apply optional arguments when expected type is \"\" *)\n    (* we must be very careful about not breaking the semantics *)\n    let texp = type_exp env sarg in\n    let rec make_args args ty_fun =\n      match (expand_head env ty_fun).desc with\n      | Tarrow (l, ty_arg, ty_fun, _) when is_optional l ->\n        let ty = option_none (instance env ty_arg) sarg.pexp_loc in\n        make_args ((l, Some ty) :: args) ty_fun\n      | Tarrow (Nolabel, _, ty_res', _) ->\n        (List.rev args, ty_fun, no_labels ty_res')\n      | Tvar _ -> (List.rev args, ty_fun, false)\n      | _ -> ([], texp.exp_type, false)\n    in\n    let args, ty_fun', simple_res = make_args [] texp.exp_type in\n    let texp = {texp with exp_type = instance env texp.exp_type}\n    and ty_fun = instance env ty_fun' in\n    if not (simple_res || no_labels ty_res) then (\n      unify_exp env texp ty_expected;\n      texp)\n    else (\n      unify_exp env {texp with exp_type = ty_fun} ty_expected;\n      if args = [] then texp\n      else\n        (* eta-expand to avoid side effects *)\n        let var_pair name ty =\n          let id = Ident.create name in\n          ( {\n              pat_desc = Tpat_var (id, mknoloc name);\n              pat_type = ty;\n              pat_extra = [];\n              pat_attributes = [];\n              pat_loc = Location.none;\n              pat_env = env;\n            },\n            {\n              exp_type = ty;\n              exp_loc = Location.none;\n              exp_env = env;\n              exp_extra = [];\n              exp_attributes = [];\n              exp_desc =\n                Texp_ident\n                  ( Path.Pident id,\n                    mknoloc (Longident.Lident name),\n                    {\n                      val_type = ty;\n                      val_kind = Val_reg;\n                      val_attributes = [];\n                      Types.val_loc = Location.none;\n                    } );\n            } )\n        in\n        let eta_pat, eta_var = var_pair \"eta\" ty_arg in\n        let func texp =\n          let e =\n            {\n              texp with\n              exp_type = ty_res;\n              exp_desc = Texp_apply (texp, args @ [(Nolabel, Some eta_var)]);\n            }\n          in\n          let cases = [case eta_pat e] in\n          let param = name_pattern \"param\" cases in\n          {\n            texp with\n            exp_type = ty_fun;\n            exp_desc =\n              Texp_function {arg_label = Nolabel; param; cases; partial = Total};\n          }\n        in\n        Location.prerr_warning texp.exp_loc\n          (Warnings.Eliminated_optional_arguments\n             (List.map (fun (l, _) -> Printtyp.string_of_label l) args));\n        (* let-expand to have side effects *)\n        let let_pat, let_var = var_pair \"arg\" texp.exp_type in\n        re\n          {\n            texp with\n            exp_type = ty_fun;\n            exp_desc =\n              Texp_let\n                ( Nonrecursive,\n                  [\n                    {\n                      vb_pat = let_pat;\n                      vb_expr = texp;\n                      vb_attributes = [];\n                      vb_loc = Location.none;\n                    };\n                  ],\n                  func let_var );\n          })\n  | _ ->\n    let texp = type_expect ?type_clash_context ?recarg env sarg ty_expected' in\n    unify_exp ?type_clash_context env texp ty_expected;\n    texp\n\nand is_automatic_curried_application env funct =\n  (* When a curried function is used with uncurried application, treat it as a curried application *)\n  !Config.uncurried = Uncurried\n  &&\n  match (expand_head env funct.exp_type).desc with\n  | Tarrow _ -> true\n  | _ -> false\n\nand type_application ?type_clash_context uncurried env funct (sargs : sargs) :\n    targs * Types.type_expr * bool =\n  (* funct.exp_type may be generic *)\n  let result_type omitted ty_fun =\n    List.fold_left\n      (fun ty_fun (l, ty, lv) -> newty2 lv (Tarrow (l, ty, ty_fun, Cok)))\n      ty_fun omitted\n  in\n  let has_label l ty_fun =\n    let ls, tvar = list_labels env ty_fun in\n    tvar || List.mem l ls\n  in\n  let ignored = ref [] in\n  let has_uncurried_type t =\n    match (expand_head env t).desc with\n    | Tconstr (Pident {name = \"function$\"}, [t; t_arity], _) ->\n      let arity = Ast_uncurried.type_to_arity t_arity in\n      Some (arity, t)\n    | _ -> None\n  in\n  let force_uncurried_type funct =\n    match has_uncurried_type funct.exp_type with\n    | None -> (\n      let arity = List.length sargs in\n      let uncurried_typ =\n        Ast_uncurried.make_uncurried_type ~env ~arity (newvar ())\n      in\n      match (expand_head env funct.exp_type).desc with\n      | Tvar _ | Tarrow _ -> unify_exp env funct uncurried_typ\n      | _ ->\n        raise\n          (Error\n             ( funct.exp_loc,\n               env,\n               Apply_non_function (expand_head env funct.exp_type) )))\n    | Some _ -> ()\n  in\n  let extract_uncurried_type t =\n    match has_uncurried_type t with\n    | Some (arity, t1) ->\n      if List.length sargs > arity then\n        raise\n          (Error\n             ( funct.exp_loc,\n               env,\n               Uncurried_arity_mismatch (t, arity, List.length sargs) ));\n      (t1, arity)\n    | None -> (t, max_int)\n  in\n  let update_uncurried_arity ~nargs t new_t =\n    match has_uncurried_type t with\n    | Some (arity, _) ->\n      let newarity = arity - nargs in\n      let fully_applied = newarity <= 0 in\n      if uncurried && not fully_applied then\n        raise\n          (Error\n             ( funct.exp_loc,\n               env,\n               Uncurried_arity_mismatch (t, arity, List.length sargs) ));\n      let new_t =\n        if fully_applied then new_t\n        else Ast_uncurried.make_uncurried_type ~env ~arity:newarity new_t\n      in\n      (fully_applied, new_t)\n    | _ -> (false, new_t)\n  in\n  let rec type_unknown_args max_arity ~(args : lazy_args) omitted ty_fun\n      (syntax_args : sargs) : targs * _ =\n    match syntax_args with\n    | [] ->\n      let collect_args () =\n        ( List.map\n            (function\n              | l, None -> (l, None)\n              | l, Some f -> (l, Some (f ())))\n            (List.rev args),\n          instance env (result_type omitted ty_fun) )\n      in\n      if List.length args < max_arity && uncurried then\n        match (expand_head env ty_fun).desc with\n        | Tarrow (Optional l, t1, t2, _) ->\n          ignored := (Optional l, t1, ty_fun.level) :: !ignored;\n          let arg =\n            ( Optional l,\n              Some (fun () -> option_none (instance env t1) Location.none) )\n          in\n          type_unknown_args max_arity ~args:(arg :: args) omitted t2 []\n        | _ -> collect_args ()\n      else collect_args ()\n    | [(Nolabel, {pexp_desc = Pexp_construct ({txt = Lident \"()\"}, None)})]\n      when uncurried && omitted = [] && args <> []\n           && List.length args = List.length !ignored ->\n      (* foo(. ) treated as empty application if all args are optional (hence ignored) *)\n      type_unknown_args max_arity ~args omitted ty_fun []\n    | (l1, sarg1) :: sargl ->\n      let ty1, ty2 =\n        let ty_fun = expand_head env ty_fun in\n        let arity_ok = List.length args < max_arity in\n        match ty_fun.desc with\n        | Tvar _ ->\n          let t1 = newvar () and t2 = newvar () in\n          if ty_fun.level >= t1.level && not_identity funct.exp_desc then\n            Location.prerr_warning sarg1.pexp_loc Warnings.Unused_argument;\n          unify env ty_fun (newty (Tarrow (l1, t1, t2, Clink (ref Cunknown))));\n          (t1, t2)\n        | Tarrow (l, t1, t2, _) when Asttypes.same_arg_label l l1 && arity_ok ->\n          (t1, t2)\n        | td -> (\n          let ty_fun =\n            match td with\n            | Tarrow _ -> newty td\n            | _ -> ty_fun\n          in\n          let ty_res = result_type (omitted @ !ignored) ty_fun in\n          match ty_res.desc with\n          | Tarrow _ ->\n            if not arity_ok then\n              raise\n                (Error\n                   (sarg1.pexp_loc, env, Apply_wrong_label (l1, funct.exp_type)))\n            else if not (has_label l1 ty_fun) then\n              raise\n                (Error (sarg1.pexp_loc, env, Apply_wrong_label (l1, ty_res)))\n            else raise (Error (funct.exp_loc, env, Incoherent_label_order))\n          | _ ->\n            raise\n              (Error\n                 ( funct.exp_loc,\n                   env,\n                   Apply_non_function (expand_head env funct.exp_type) )))\n      in\n      let optional = is_optional l1 in\n      let arg1 () =\n        let arg1 = type_expect env sarg1 ty1 in\n        if optional then unify_exp env arg1 (type_option (newvar ()));\n        arg1\n      in\n      type_unknown_args max_arity ~args:((l1, Some arg1) :: args) omitted ty2\n        sargl\n  in\n  let rec type_args ?type_clash_context max_arity args omitted ~ty_fun ty_fun0\n      ~(sargs : sargs) =\n    match (expand_head env ty_fun, expand_head env ty_fun0) with\n    | ( {desc = Tarrow (l, ty, ty_fun, com); level = lv},\n        {desc = Tarrow (_, ty0, ty_fun0, _)} )\n      when sargs <> [] && commu_repr com = Cok && List.length args < max_arity\n      ->\n      let name = label_name l and optional = is_optional l in\n      let sargs, omitted, arg =\n        match extract_label name sargs with\n        | None ->\n          if optional && (uncurried || label_assoc Nolabel sargs) then (\n            ignored := (l, ty, lv) :: !ignored;\n            ( sargs,\n              omitted,\n              Some (fun () -> option_none (instance env ty) Location.none) ))\n          else (sargs, (l, ty, lv) :: omitted, None)\n        | Some (l', sarg0, sargs) ->\n          if (not optional) && is_optional l' then\n            Location.prerr_warning sarg0.pexp_loc\n              (Warnings.Nonoptional_label (Printtyp.string_of_label l));\n          ( sargs,\n            omitted,\n            Some\n              (if (not optional) || is_optional l' then fun () ->\n                 type_argument\n                   ?type_clash_context:\n                     (type_clash_context_for_function_argument\n                        type_clash_context sarg0)\n                   env sarg0 ty ty0\n               else fun () ->\n                 option_some\n                   (type_argument ?type_clash_context env sarg0\n                      (extract_option_type env ty)\n                      (extract_option_type env ty0))) )\n      in\n      type_args ?type_clash_context max_arity ((l, arg) :: args) omitted ~ty_fun\n        ty_fun0 ~sargs\n    | _ ->\n      type_unknown_args max_arity ~args omitted ty_fun0\n        sargs (* This is the hot path for non-labeled function*)\n  in\n  let () =\n    let ls, tvar = list_labels env funct.exp_type in\n    if not tvar then\n      let labels = Ext_list.filter ls (fun l -> not (is_optional l)) in\n      if\n        Ext_list.same_length labels sargs\n        && List.for_all (fun (l, _) -> l = Nolabel) sargs\n        && List.exists (fun l -> l <> Nolabel) labels\n      then\n        raise\n          (Error\n             ( funct.exp_loc,\n               env,\n               Labels_omitted\n                 (List.map Printtyp.string_of_label\n                    (Ext_list.filter labels (fun x -> x <> Nolabel))) ))\n  in\n  match sargs with\n  (* Special case for ignore: avoid discarding warning *)\n  | [(Nolabel, sarg)] when is_ignore funct env ->\n    let ty_arg, ty_res =\n      filter_arrow env (instance env funct.exp_type) Nolabel\n    in\n    let exp = type_expect env sarg ty_arg in\n    (match (expand_head env exp.exp_type).desc with\n    | Tarrow _ ->\n      Location.prerr_warning exp.exp_loc Warnings.Partial_application\n    | Tvar _ ->\n      Delayed_checks.add_delayed_check (fun () ->\n          check_application_result env false exp)\n    | _ -> ());\n    ([(Nolabel, Some exp)], ty_res, false)\n  | _ ->\n    if uncurried then force_uncurried_type funct;\n    let ty, max_arity = extract_uncurried_type funct.exp_type in\n    let targs, ret_t =\n      type_args ?type_clash_context max_arity [] [] ~ty_fun:ty (instance env ty)\n        ~sargs\n    in\n    let fully_applied, ret_t =\n      update_uncurried_arity funct.exp_type\n        ~nargs:(List.length !ignored + List.length sargs)\n        ret_t\n    in\n    (targs, ret_t, fully_applied)\n\nand type_construct env loc lid sarg ty_expected attrs =\n  let opath =\n    try\n      let p0, p, _ = extract_concrete_variant env ty_expected in\n      Some (p0, p)\n    with Not_found -> None\n  in\n  let constrs = Typetexp.find_all_constructors env lid.loc lid.txt in\n  let constr =\n    wrap_disambiguate \"This variant expression is expected to have\" ty_expected\n      (Constructor.disambiguate lid env opath)\n      constrs\n  in\n  Env.mark_constructor Env.Positive env (Longident.last lid.txt) constr;\n  Builtin_attributes.check_deprecated loc constr.cstr_attributes\n    constr.cstr_name;\n  let sargs =\n    match sarg with\n    | None -> []\n    | Some {pexp_desc = Pexp_tuple sel}\n      when constr.cstr_arity > 1 || Builtin_attributes.explicit_arity attrs ->\n      sel\n    | Some se -> [se]\n  in\n  if List.length sargs <> constr.cstr_arity then\n    raise\n      (Error\n         ( loc,\n           env,\n           Constructor_arity_mismatch\n             (lid.txt, constr.cstr_arity, List.length sargs) ));\n  let separate = Env.has_local_constraints env in\n  if separate then (\n    begin_def ();\n    begin_def ());\n  let ty_args, ty_res = instance_constructor constr in\n  let texp =\n    re\n      {\n        exp_desc = Texp_construct (lid, constr, []);\n        exp_loc = loc;\n        exp_extra = [];\n        exp_type = ty_res;\n        exp_attributes = attrs;\n        exp_env = env;\n      }\n  in\n  let type_clash_context = type_clash_context_maybe_option ty_expected ty_res in\n  if separate then (\n    end_def ();\n    generalize_structure ty_res;\n    unify_exp ?type_clash_context env\n      {texp with exp_type = instance_def ty_res}\n      (instance env ty_expected);\n    end_def ();\n    List.iter generalize_structure ty_args;\n    generalize_structure ty_res);\n  let ty_args0, ty_res =\n    match instance_list env (ty_res :: ty_args) with\n    | t :: tl -> (tl, t)\n    | _ -> assert false\n  in\n  let texp = {texp with exp_type = ty_res} in\n  if not separate then\n    unify_exp ?type_clash_context env texp (instance env ty_expected);\n  let recarg =\n    match constr.cstr_inlined with\n    | None -> Rejected\n    | Some _ -> (\n      match sargs with\n      | [\n       {\n         pexp_desc =\n           ( Pexp_ident _\n           | Pexp_record (_, (Some {pexp_desc = Pexp_ident _} | None)) );\n       };\n      ] ->\n        Required\n      | _ -> raise (Error (loc, env, Inlined_record_expected)))\n  in\n  let args =\n    List.map2\n      (fun e (t, t0) -> type_argument ~recarg env e t t0)\n      sargs\n      (List.combine ty_args ty_args0)\n  in\n  if constr.cstr_private = Private then\n    raise (Error (loc, env, Private_type ty_res));\n  (* NOTE: shouldn't we call \"re\" on this final expression? -- AF *)\n  {texp with exp_desc = Texp_construct (lid, constr, args)}\n\n(* Typing of statements (expressions whose values are discarded) *)\n\nand type_statement env sexp =\n  let loc = (final_subexpression sexp).pexp_loc in\n  begin_def ();\n  let exp = type_exp env sexp in\n  end_def ();\n  let ty = expand_head env exp.exp_type and tv = newvar () in\n  if is_Tvar ty && ty.level > tv.level then\n    Location.prerr_warning loc Warnings.Nonreturning_statement;\n  let expected_ty = instance_def Predef.type_unit in\n  let type_clash_context = type_clash_context_in_statement sexp in\n  unify_exp ?type_clash_context env exp expected_ty;\n  exp\n\n(* Typing of match cases *)\n\nand type_cases ?root_type_clash_context ?in_function env ty_arg ty_res\n    partial_flag loc caselist : _ * Typedtree.partial =\n  (* ty_arg is _fully_ generalized *)\n  let patterns = List.map (fun {pc_lhs = p} -> p) caselist in\n  let contains_polyvars = List.exists contains_polymorphic_variant patterns in\n  let erase_either = contains_polyvars && contains_variant_either ty_arg\n  and has_gadts = List.exists (contains_gadt env) patterns in\n  (*  prerr_endline ( if has_gadts then \"contains gadt\" else \"no gadt\"); *)\n  let ty_arg =\n    if has_gadts || erase_either then correct_levels ty_arg else ty_arg\n  and ty_res, env =\n    if has_gadts then (correct_levels ty_res, duplicate_ident_types caselist env)\n    else (ty_res, env)\n  in\n  let rec is_var spat =\n    match spat.ppat_desc with\n    | Ppat_any | Ppat_var _ -> true\n    | Ppat_alias (spat, _) -> is_var spat\n    | _ -> false\n  in\n  let needs_exhaust_check =\n    match caselist with\n    | [{pc_rhs = {pexp_desc = Pexp_unreachable}}] -> true\n    | [{pc_lhs}] when is_var pc_lhs -> false\n    | _ -> true\n  in\n  let init_env () =\n    (* raise level for existentials *)\n    begin_def ();\n    Ident.set_current_time (get_current_level ());\n    let lev = Ident.current_time () in\n    Ctype.init_def (lev + 1000);\n    (* up to 1000 existentials *)\n    (lev, Env.add_gadt_instance_level lev env)\n  in\n  let lev, env =\n    if has_gadts then init_env () else (get_current_level (), env)\n  in\n  (* if has_gadts then\n     Format.printf \"lev = %d@.%a@.\" lev Printtyp.raw_type_expr ty_res; *)\n  (* Do we need to propagate polymorphism *)\n  let propagate =\n    has_gadts\n    || (repr ty_arg).level = generic_level\n    ||\n    match caselist with\n    | [{pc_lhs}] when is_var pc_lhs -> false\n    | _ -> true\n  in\n  if propagate then begin_def ();\n  (* propagation of the argument *)\n  let pattern_force = ref [] in\n  (* Format.printf \"@[%i %i@ %a@]@.\" lev (get_current_level())\n     Printtyp.raw_type_expr ty_arg; *)\n  let pat_env_list =\n    List.map\n      (fun {pc_lhs; pc_guard; pc_rhs} ->\n        let loc =\n          let open Location in\n          match pc_guard with\n          | None -> pc_rhs.pexp_loc\n          | Some g -> {pc_rhs.pexp_loc with loc_start = g.pexp_loc.loc_start}\n        in\n        let scope = Some (Annot.Idef loc) in\n        let pat, ext_env, force, unpacks =\n          let partial = if erase_either then Some false else None in\n          let ty_arg = instance ?partial env ty_arg in\n          type_pattern ~lev env pc_lhs scope ty_arg\n        in\n        pattern_force := force @ !pattern_force;\n        (pat, (ext_env, unpacks)))\n      caselist\n  in\n  (* Unify all cases (delayed to keep it order-free) *)\n  let ty_arg' = newvar () in\n  let unify_pats ty =\n    List.iter (fun (pat, (ext_env, _)) -> unify_pat ext_env pat ty) pat_env_list\n  in\n  unify_pats ty_arg';\n  (* Check for polymorphic variants to close *)\n  let patl = List.map fst pat_env_list in\n  if List.exists has_variants patl then (\n    Parmatch.pressure_variants env patl;\n    List.iter (iter_pattern finalize_variant) patl);\n  (* `Contaminating' unifications start here *)\n  List.iter (fun f -> f ()) !pattern_force;\n  (* Post-processing and generalization *)\n  if propagate || erase_either then unify_pats (instance env ty_arg);\n  if propagate then (\n    List.iter\n      (iter_pattern (fun {pat_type = t} -> unify_var env t (newvar ())))\n      patl;\n    end_def ();\n    List.iter (iter_pattern (fun {pat_type = t} -> generalize t)) patl);\n  (* type bodies *)\n  let in_function = if List.length caselist = 1 then in_function else None in\n  let cases =\n    List.map2\n      (fun (pat, (ext_env, unpacks)) {pc_lhs; pc_guard; pc_rhs} ->\n        let sexp = wrap_unpacks pc_rhs unpacks in\n        let ty_res' =\n          if contains_gadt env pc_lhs then correct_levels ty_res else ty_res\n        in\n        (* Format.printf \"@[%i %i, ty_res' =@ %a@]@.\" lev (get_current_level())\n           Printtyp.raw_type_expr ty_res'; *)\n        let guard =\n          match pc_guard with\n          | None -> None\n          | Some scond ->\n            Some\n              (type_expect\n                 ?type_clash_context:\n                   (if Option.is_some root_type_clash_context then\n                      Some IfCondition\n                    else None)\n                 ext_env\n                 (wrap_unpacks scond unpacks)\n                 Predef.type_bool)\n        in\n        let exp =\n          type_expect ?type_clash_context:root_type_clash_context ?in_function\n            ext_env sexp ty_res'\n        in\n        {\n          c_lhs = pat;\n          c_guard = guard;\n          c_rhs = {exp with exp_type = instance env ty_res'};\n        })\n      pat_env_list caselist\n  in\n  (if has_gadts then\n     let ty_res' = instance env ty_res in\n     List.iter (fun c -> unify_exp env c.c_rhs ty_res') cases);\n  let do_init = has_gadts || needs_exhaust_check in\n  let lev, env = if do_init && not has_gadts then init_env () else (lev, env) in\n  let ty_arg_check =\n    if do_init then\n      (* Hack: use for_saving to copy variables too *)\n      Subst.type_expr (Subst.for_saving Subst.identity) ty_arg\n    else ty_arg\n  in\n  let partial =\n    if partial_flag then check_partial ~lev env ty_arg_check loc cases\n    else Partial\n  in\n  let unused_check () =\n    List.iter (fun (pat, (env, _)) -> check_absent_variant env pat) pat_env_list;\n    check_unused ~lev env (instance env ty_arg_check) cases;\n    Parmatch.check_ambiguous_bindings cases\n  in\n  if contains_polyvars || do_init then\n    Delayed_checks.add_delayed_check unused_check\n  else unused_check ();\n  (* Check for unused cases, do not delay because of gadts *)\n  if do_init then (\n    end_def ();\n    (* Ensure that existential types do not escape *)\n    unify_exp_types loc env (instance env ty_res) (newvar ()));\n  (cases, partial)\n\n(* Typing of let bindings *)\n\nand type_let ?(check = fun s -> Warnings.Unused_var s)\n    ?(check_strict = fun s -> Warnings.Unused_var_strict s) env rec_flag\n    spat_sexp_list scope allow =\n  begin_def ();\n  let is_fake_let =\n    match spat_sexp_list with\n    | [\n     {\n       pvb_expr =\n         {\n           pexp_desc =\n             Pexp_match\n               ({pexp_desc = Pexp_ident {txt = Longident.Lident \"*opt*\"}}, _);\n         };\n     };\n    ] ->\n      true (* the fake let-declaration introduced by fun ?(x = e) -> ... *)\n    | _ -> false\n  in\n  let check = if is_fake_let then check_strict else check in\n\n  let spatl =\n    List.map\n      (fun {pvb_pat = spat; pvb_attributes = attrs} -> (attrs, spat))\n      spat_sexp_list\n  in\n  let nvs = List.map (fun _ -> newvar ()) spatl in\n  let pat_list, new_env, force, unpacks =\n    type_pattern_list env spatl scope nvs allow\n  in\n  let attrs_list = List.map fst spatl in\n  let is_recursive = rec_flag = Recursive in\n  (* If recursive, first unify with an approximation of the expression *)\n  if is_recursive then\n    List.iter2\n      (fun pat binding ->\n        let pat =\n          match pat.pat_type.desc with\n          | Tpoly (ty, tl) ->\n            {\n              pat with\n              pat_type = snd (instance_poly ~keep_names:true false tl ty);\n            }\n          | _ -> pat\n        in\n        unify_pat env pat (type_approx env binding.pvb_expr))\n      pat_list spat_sexp_list;\n  (* Polymorphic variant processing *)\n  List.iter\n    (fun pat ->\n      if has_variants pat then (\n        Parmatch.pressure_variants env [pat];\n        iter_pattern finalize_variant pat))\n    pat_list;\n  (* Only bind pattern variables after generalizing *)\n  List.iter (fun f -> f ()) force;\n  let exp_env = if is_recursive then new_env else env in\n\n  let current_slot = ref None in\n  let rec_needed = ref false in\n  let warn_about_unused_bindings =\n    List.exists\n      (fun attrs ->\n        Builtin_attributes.warning_scope ~ppwarning:false attrs (fun () ->\n            Warnings.is_active (check \"\")\n            || Warnings.is_active (check_strict \"\")\n            || (is_recursive && Warnings.is_active Warnings.Unused_rec_flag)))\n      attrs_list\n  in\n  let pat_slot_list =\n    (* Algorithm to detect unused declarations in recursive bindings:\n       - During type checking of the definitions, we capture the 'value_used'\n         events on the bound identifiers and record them in a slot corresponding\n         to the current definition (!current_slot).\n         In effect, this creates a dependency graph between definitions.\n\n       - After type checking the definition (!current_slot = None),\n         when one of the bound identifier is effectively used, we trigger\n         again all the events recorded in the corresponding slot.\n         The effect is to traverse the transitive closure of the graph created\n         in the first step.\n\n       We also keep track of whether *all* variables in a given pattern\n       are unused. If this is the case, for local declarations, the issued\n       warning is 26, not 27.\n    *)\n    List.map2\n      (fun attrs pat ->\n        Builtin_attributes.warning_scope ~ppwarning:false attrs (fun () ->\n            if not warn_about_unused_bindings then (pat, None)\n            else\n              let some_used = ref false in\n              (* has one of the identifier of this pattern been used? *)\n              let slot = ref [] in\n              List.iter\n                (fun id ->\n                  let vd = Env.find_value (Path.Pident id) new_env in\n                  (* note: Env.find_value does not trigger the value_used event *)\n                  let name = Ident.name id in\n                  let used = ref false in\n                  if not (name = \"\" || name.[0] = '_' || name.[0] = '#') then\n                    Delayed_checks.add_delayed_check (fun () ->\n                        if not !used then\n                          Location.prerr_warning vd.Types.val_loc\n                            ((if !some_used then check_strict else check) name));\n                  Env.set_value_used_callback name vd (fun () ->\n                      match !current_slot with\n                      | Some slot ->\n                        slot := (name, vd) :: !slot;\n                        rec_needed := true\n                      | None ->\n                        List.iter\n                          (fun (name, vd) -> Env.mark_value_used env name vd)\n                          (get_ref slot);\n                        used := true;\n                        some_used := true))\n                (Typedtree.pat_bound_idents pat);\n              (pat, Some slot)))\n      attrs_list pat_list\n  in\n  let exp_list =\n    List.map2\n      (fun {pvb_expr = sexp; pvb_attributes; _} (pat, slot) ->\n        let sexp =\n          if rec_flag = Recursive then wrap_unpacks sexp unpacks else sexp\n        in\n        if is_recursive then current_slot := slot;\n        match pat.pat_type.desc with\n        | Tpoly (ty, tl) ->\n          begin_def ();\n          let vars, ty' = instance_poly ~keep_names:true true tl ty in\n          let exp =\n            Builtin_attributes.warning_scope pvb_attributes (fun () ->\n                type_expect exp_env sexp ty')\n          in\n          end_def ();\n          check_univars env true \"definition\" exp pat.pat_type vars;\n          {exp with exp_type = instance env exp.exp_type}\n        | _ ->\n          Builtin_attributes.warning_scope pvb_attributes (fun () ->\n              type_expect exp_env sexp pat.pat_type))\n      spat_sexp_list pat_slot_list\n  in\n  current_slot := None;\n  (if\n     is_recursive && (not !rec_needed)\n     && Warnings.is_active Warnings.Unused_rec_flag\n   then\n     let {pvb_pat; pvb_attributes} = List.hd spat_sexp_list in\n     (* See PR#6677 *)\n     Builtin_attributes.warning_scope ~ppwarning:false pvb_attributes (fun () ->\n         Location.prerr_warning pvb_pat.ppat_loc Warnings.Unused_rec_flag));\n  List.iter2\n    (fun pat (attrs, exp) ->\n      Builtin_attributes.warning_scope ~ppwarning:false attrs (fun () ->\n          ignore (check_partial env pat.pat_type pat.pat_loc [case pat exp])))\n    pat_list\n    (List.map2 (fun (attrs, _) e -> (attrs, e)) spatl exp_list);\n  end_def ();\n  List.iter2\n    (fun pat exp ->\n      if not (is_nonexpansive exp) then\n        iter_pattern (fun pat -> generalize_expansive env pat.pat_type) pat)\n    pat_list exp_list;\n  List.iter\n    (fun pat -> iter_pattern (fun pat -> generalize pat.pat_type) pat)\n    pat_list;\n  let l = List.combine pat_list exp_list in\n  let l =\n    List.map2\n      (fun (p, e) pvb ->\n        {\n          vb_pat = p;\n          vb_expr = e;\n          vb_attributes = pvb.pvb_attributes;\n          vb_loc = pvb.pvb_loc;\n        })\n      l spat_sexp_list\n  in\n  if is_recursive then\n    List.iter\n      (fun {vb_pat = pat} ->\n        match pat.pat_desc with\n        | Tpat_var _ -> ()\n        | Tpat_alias ({pat_desc = Tpat_any}, _, _) -> ()\n        | _ -> raise (Error (pat.pat_loc, env, Illegal_letrec_pat)))\n      l;\n  (l, new_env, unpacks)\n\n(* Typing of toplevel bindings *)\n\nlet type_binding env rec_flag spat_sexp_list scope =\n  Typetexp.reset_type_variables ();\n  let pat_exp_list, new_env, _unpacks =\n    type_let\n      ~check:(fun s -> Warnings.Unused_value_declaration s)\n      ~check_strict:(fun s -> Warnings.Unused_value_declaration s)\n      env rec_flag spat_sexp_list scope false\n  in\n  (pat_exp_list, new_env)\n\nlet type_let env rec_flag spat_sexp_list scope =\n  let pat_exp_list, new_env, _unpacks =\n    type_let env rec_flag spat_sexp_list scope false\n  in\n  (pat_exp_list, new_env)\n\n(* Typing of toplevel expressions *)\n\nlet type_expression env sexp =\n  Typetexp.reset_type_variables ();\n  begin_def ();\n  let exp = type_exp env sexp in\n  (if Warnings.is_active (Bs_toplevel_expression_unit None) then\n     try unify env exp.exp_type (instance_def Predef.type_unit) with\n     | Unify _ ->\n       let buffer = Buffer.create 10 in\n       let formatter = Format.formatter_of_buffer buffer in\n       Printtyp.type_expr formatter exp.exp_type;\n       Format.pp_print_flush formatter ();\n       let return_type = Buffer.contents buffer in\n       Location.prerr_warning sexp.pexp_loc\n         (Bs_toplevel_expression_unit\n            (match sexp.pexp_desc with\n            | Pexp_apply _ -> Some (return_type, FunctionCall)\n            | _ -> Some (return_type, Other)))\n     | Tags _ ->\n       Location.prerr_warning sexp.pexp_loc (Bs_toplevel_expression_unit None));\n  end_def ();\n  if not (is_nonexpansive exp) then generalize_expansive env exp.exp_type;\n  generalize exp.exp_type;\n  match sexp.pexp_desc with\n  | Pexp_ident lid ->\n    (* Special case for keeping type variables when looking-up a variable *)\n    let _path, desc = Env.lookup_value lid.txt env in\n    {exp with exp_type = desc.val_type}\n  | _ -> exp\n\n(* Error report *)\n\nlet spellcheck ppf unbound_name valid_names =\n  Misc.did_you_mean ppf (fun () -> Misc.spellcheck valid_names unbound_name)\n\nlet spellcheck_idents ppf unbound valid_idents =\n  spellcheck ppf (Ident.name unbound) (List.map Ident.name valid_idents)\n\nopen Format\nopen Printtyp\n\nlet report_error env ppf = function\n  | Polymorphic_label lid ->\n    fprintf ppf \"@[The record field %a is polymorphic.@ %s@]\" longident lid\n      \"You cannot instantiate it in a pattern.\"\n  | Constructor_arity_mismatch (lid, expected, provided) ->\n    (* modified *)\n    fprintf ppf\n      \"@[This variant constructor, %a, expects %i %s; here, we've %sfound %i.@]\"\n      longident lid expected\n      (if expected == 1 then \"argument\" else \"arguments\")\n      (if provided < expected then \"only \" else \"\")\n      provided\n  | Label_mismatch (lid, trace) ->\n    (* modified *)\n    super_report_unification_error ppf env trace\n      (function\n        | ppf ->\n          fprintf ppf \"The record field %a@ belongs to the type\" longident lid)\n      (function ppf -> fprintf ppf \"but is mixed here with fields of type\")\n  | Pattern_type_clash trace ->\n    (* modified *)\n    super_report_unification_error ppf env trace\n      (function\n        | ppf -> fprintf ppf \"This pattern matches values of type\")\n      (function\n        | ppf ->\n        fprintf ppf \"but a pattern was expected which matches values of type\")\n  | Or_pattern_type_clash (id, trace) ->\n    (* modified *)\n    super_report_unification_error ppf env trace\n      (function\n        | ppf ->\n          fprintf ppf\n            \"The variable %s on the left-hand side of this or-pattern has type\"\n            (Ident.name id))\n      (function ppf -> fprintf ppf \"but on the right-hand side it has type\")\n  | Multiply_bound_variable name ->\n    fprintf ppf \"Variable %s is bound several times in this matching\" name\n  | Orpat_vars (id, valid_idents) ->\n    fprintf ppf \"Variable %s must occur on both sides of this | pattern\"\n      (Ident.name id);\n    spellcheck_idents ppf id valid_idents\n  | Expr_type_clash\n      ( (_, {desc = Tarrow _})\n        :: (_, {desc = Tconstr (Pident {name = \"function$\"}, _, _)})\n        :: _,\n        _ ) ->\n    fprintf ppf\n      \"This function is a curried function where an uncurried function is \\\n       expected\"\n  | Expr_type_clash\n      ( ( _,\n          {\n            desc = Tconstr (Pident {name = \"function$\"}, [{desc = Tvar _}; _], _);\n          } )\n        :: (_, {desc = Tarrow _})\n        :: _,\n        _ ) ->\n    fprintf ppf\n      \"This function is an uncurried function where a curried function is \\\n       expected\"\n  | Expr_type_clash\n      ( (_, {desc = Tconstr (Pident {name = \"function$\"}, [_; t_a], _)})\n        :: (_, {desc = Tconstr (Pident {name = \"function$\"}, [_; t_b], _)})\n        :: _,\n        _ )\n    when Ast_uncurried.type_to_arity t_a <> Ast_uncurried.type_to_arity t_b ->\n    let arity_a = Ast_uncurried.type_to_arity t_a |> string_of_int in\n    let arity_b = Ast_uncurried.type_to_arity t_b |> string_of_int in\n    report_arity_mismatch ~arity_a ~arity_b ppf\n  | Expr_type_clash\n      ( ( _,\n          {\n            desc =\n              Tconstr\n                (Pdot (Pdot (Pident {name = \"Js_OO\"}, \"Meth\", _), a, _), _, _);\n          } )\n        :: ( _,\n             {\n               desc =\n                 Tconstr\n                   (Pdot (Pdot (Pident {name = \"Js_OO\"}, \"Meth\", _), b, _), _, _);\n             } )\n        :: _,\n        _ )\n    when a <> b ->\n    fprintf ppf \"This method has %s but was expected %s\" a b\n  | Expr_type_clash (trace, type_clash_context) ->\n    (* modified *)\n    fprintf ppf \"@[<v>\";\n    print_expr_type_clash ?type_clash_context env trace ppf;\n    fprintf ppf \"@]\"\n  | Apply_non_function typ -> (\n    (* modified *)\n    reset_and_mark_loops typ;\n    match (repr typ).desc with\n    | Tarrow (_, _inputType, return_type, _) ->\n      let rec count_number_of_args count {Types.desc} =\n        match desc with\n        | Tarrow (_, _inputType, return_type, _) ->\n          count_number_of_args (count + 1) return_type\n        | _ -> count\n      in\n      let count_number_of_args = count_number_of_args 1 in\n      let accepts_count = count_number_of_args return_type in\n      fprintf ppf \"@[<v>@[<2>This function has type@ @{<info>%a@}@]\" type_expr\n        typ;\n      fprintf ppf \"@ @[It only accepts %i %s; here, it's called with more.@]@]\"\n        accepts_count\n        (if accepts_count == 1 then \"argument\" else \"arguments\")\n    | _ ->\n      fprintf ppf \"@[<v>@[<2>This expression has type@ %a@]@ %s@]\" type_expr typ\n        \"It is not a function.\")\n  | Apply_wrong_label (l, ty) ->\n    let print_label ppf = function\n      | Nolabel -> fprintf ppf \"without label\"\n      | l -> fprintf ppf \"with label %s\" (prefixed_label_name l)\n    in\n    reset_and_mark_loops ty;\n    fprintf ppf\n      \"@[<v>@[<2>The function applied to this argument has type@ %a@]@.This \\\n       argument cannot be applied %a@]\"\n      type_expr ty print_label l\n  | Label_multiply_defined s ->\n    fprintf ppf \"The record field label %s is defined several times\" s\n  | Labels_missing (labels, might_be_component) ->\n    let print_labels ppf = List.iter (fun lbl -> fprintf ppf \"@ %s\" lbl) in\n    let component_text =\n      if might_be_component then\n        \" If this is a component, add the missing props.\"\n      else \"\"\n    in\n    fprintf ppf \"@[<hov>Some required record fields are missing:%a.%s@]\"\n      print_labels labels component_text\n  | Label_not_mutable lid ->\n    fprintf ppf \"The record field %a is not mutable\" longident lid\n  | Wrong_name (eorp, ty, kind, p, name, valid_names) ->\n    (* modified *)\n    reset_and_mark_loops ty;\n    if Path.is_constructor_typath p then\n      fprintf ppf\n        \"@[The field %s is not part of the record argument for the %a \\\n         constructor@]\"\n        name Printtyp.path p\n    else (\n      fprintf ppf \"@[<v>\";\n\n      fprintf ppf\n        \"@[<2>The %s @{<error>%s@} does not belong to type @{<info>%a@}@]@,@,\"\n        (label_of_kind kind) name (*kind*) Printtyp.path p;\n\n      fprintf ppf \"@[<2>%s type@ @{<info>%a@}@]\" eorp type_expr ty;\n\n      fprintf ppf \"@]\");\n    spellcheck ppf name valid_names\n  | Name_type_mismatch (kind, lid, tp, tpl) ->\n    let name = label_of_kind kind in\n    report_ambiguous_type_error ppf env tp tpl\n      (function\n        | ppf ->\n          fprintf ppf \"The %s %a@ belongs to the %s type\" name longident lid\n            kind)\n      (function\n        | ppf ->\n          fprintf ppf \"The %s %a@ belongs to one of the following %s types:\"\n            name longident lid kind)\n      (function\n        | ppf ->\n        fprintf ppf \"but a %s was expected belonging to the %s type\" name kind)\n  | Undefined_method (ty, me, valid_methods) -> (\n    reset_and_mark_loops ty;\n    fprintf ppf\n      \"@[<v>@[This expression has type@;<1 2>%a@]@,It has no field %s@]\"\n      type_expr ty me;\n    match valid_methods with\n    | None -> ()\n    | Some valid_methods -> spellcheck ppf me valid_methods)\n  | Not_subtype (tr1, tr2) ->\n    report_subtyping_error ppf env tr1 \"is not a subtype of\" tr2\n  | Coercion_failure (ty, ty', trace, b) ->\n    (* modified *)\n    super_report_unification_error ppf env trace\n      (function\n        | ppf ->\n          let ty, ty' = Printtyp.prepare_expansion (ty, ty') in\n          fprintf ppf\n            \"This expression cannot be coerced to type@;<1 2>%a;@ it has type\"\n            (Printtyp.type_expansion ty)\n            ty')\n      (function ppf -> fprintf ppf \"but is here used with type\");\n    if b then\n      fprintf ppf \".@.@[<hov>%s@ %s@]\"\n        \"This simple coercion was not fully general.\"\n        \"Consider using a double coercion.\"\n  | Too_many_arguments (in_function, ty) -> (\n    (* modified *)\n    reset_and_mark_loops ty;\n    if in_function then (\n      fprintf ppf \"@[This function expects too many arguments,@ \";\n      fprintf ppf \"it should have type@ %a@]\" type_expr ty)\n    else\n      match ty with\n      | {desc = Tconstr (Pident {name = \"function$\"}, _, _)} ->\n        fprintf ppf \"This expression is expected to have an uncurried function\"\n      | _ ->\n        fprintf ppf \"@[This expression should not be a function,@ \";\n        fprintf ppf \"the expected type is@ %a@]\" type_expr ty)\n  | Abstract_wrong_label (l, ty) ->\n    let label_mark = function\n      | Nolabel -> \"but its first argument is not labelled\"\n      | l ->\n        sprintf \"but its first argument is labelled %s\" (prefixed_label_name l)\n    in\n    reset_and_mark_loops ty;\n    fprintf ppf \"@[<v>@[<2>This function should have type@ %a@]@,%s@]\" type_expr\n      ty (label_mark l)\n  | Scoping_let_module (id, ty) ->\n    reset_and_mark_loops ty;\n    fprintf ppf \"This `let module' expression has type@ %a@ \" type_expr ty;\n    fprintf ppf\n      \"In this type, the locally bound module name %s escapes its scope\" id\n  | Private_type ty ->\n    fprintf ppf \"Cannot create values of the private type %a\" type_expr ty\n  | Private_label (lid, ty) ->\n    fprintf ppf \"Cannot assign field %a of the private type %a\" longident lid\n      type_expr ty\n  | Not_a_variant_type lid ->\n    fprintf ppf \"The type %a@ is not a variant type\" longident lid\n  | Incoherent_label_order ->\n    fprintf ppf \"This labeled function is applied to arguments@ \";\n    fprintf ppf \"in an order different from other calls.@ \";\n    fprintf ppf \"This is only allowed when the real type is known.\"\n  | Less_general (kind, trace) ->\n    (* modified *)\n    super_report_unification_error ppf env trace\n      (fun ppf -> fprintf ppf \"This %s has type\" kind)\n      (fun ppf -> fprintf ppf \"which is less general than\")\n  | Modules_not_allowed ->\n    fprintf ppf \"Modules are not allowed in this pattern.\"\n  | Cannot_infer_signature ->\n    fprintf ppf \"The signature for this packaged module couldn't be inferred.\"\n  | Not_a_packed_module ty ->\n    fprintf ppf \"This expression is packed module, but the expected type is@ %a\"\n      type_expr ty\n  | Recursive_local_constraint trace ->\n    (* modified *)\n    super_report_unification_error ppf env trace\n      (function\n        | ppf -> fprintf ppf \"Recursive local constraint when unifying\")\n      (function ppf -> fprintf ppf \"with\")\n  | Unexpected_existential -> fprintf ppf \"Unexpected existential\"\n  | Unqualified_gadt_pattern (tpath, name) ->\n    fprintf ppf \"@[The GADT constructor %s of type %a@ %s.@]\" name path tpath\n      \"must be qualified in this pattern\"\n  | Invalid_interval ->\n    fprintf ppf \"@[Only character intervals are supported in patterns.@]\"\n  | Invalid_for_loop_index ->\n    fprintf ppf \"@[Invalid for-loop index: only variables and _ are allowed.@]\"\n  | No_value_clauses ->\n    fprintf ppf \"None of the patterns in this 'match' expression match values.\"\n  | Exception_pattern_below_toplevel ->\n    fprintf ppf\n      \"@[Exception patterns must be at the top level of a match case.@]\"\n  | Inlined_record_escape ->\n    fprintf ppf\n      \"@[This form is not allowed as the type of the inlined record could \\\n       escape.@]\"\n  | Inlined_record_expected ->\n    fprintf ppf \"@[This constructor expects an inlined record argument.@]\"\n  | Unrefuted_pattern pat ->\n    fprintf ppf \"@[%s@ %s@ %a@]\" \"This match case could not be refuted.\"\n      \"Here is an example of a value that would reach it:\" Parmatch.top_pretty\n      pat\n  | Invalid_extension_constructor_payload ->\n    fprintf ppf\n      \"Invalid [%%extension_constructor] payload, a constructor is expected.\"\n  | Not_an_extension_constructor ->\n    fprintf ppf \"This constructor is not an extension constructor.\"\n  | Literal_overflow ty ->\n    fprintf ppf\n      \"Integer literal exceeds the range of representable integers of type %s\"\n      ty\n  | Unknown_literal (n, m) ->\n    fprintf ppf \"Unknown modifier '%c' for literal %s%c\" m n m\n  | Illegal_letrec_pat ->\n    fprintf ppf \"Only variables are allowed as left-hand side of `let rec'\"\n  | Labels_omitted [label] ->\n    fprintf ppf\n      \"Label ~%s was omitted in the application of this labeled function.\" label\n  | Labels_omitted labels ->\n    let labels_string =\n      labels |> List.map (fun label -> \"~\" ^ label) |> String.concat \", \"\n    in\n    fprintf ppf\n      \"Labels %s were omitted in the application of this labeled function.\"\n      labels_string\n  | Empty_record_literal ->\n    fprintf ppf\n      \"Empty record literal {} should be type annotated or used in a record \\\n       context.\"\n  | Uncurried_arity_mismatch (typ, arity, args) ->\n    fprintf ppf \"@[<v>@[<2>This uncurried function has type@ %a@]\" type_expr typ;\n    fprintf ppf\n      \"@ @[It is applied with @{<error>%d@} argument%s but it requires \\\n       @{<info>%d@}.@]@]\"\n      args\n      (if args = 0 then \"\" else \"s\")\n      arity\n  | Field_not_optional (name, typ) ->\n    fprintf ppf \"Field @{<info>%s@} is not optional in type %a. Use without ?\"\n      name type_expr typ\n\nlet super_report_error_no_wrap_printing_env = report_error\n\nlet report_error env ppf err =\n  wrap_printing_env env (fun () -> report_error env ppf err)\n\nlet () =\n  Location.register_error_of_exn (function\n    | Error (loc, env, err) ->\n      Some (Location.error_of_printer loc (report_error env) err)\n    | Error_forward err -> Some err\n    | _ -> None)\n\n(* drop ?recarg argument from the external API *)\nlet type_expect ?in_function env e ty = type_expect ?in_function env e ty\nlet type_exp env e = type_exp env e\nlet type_argument env e t1 t2 = type_argument env e t1 t2\n"
  },
  {
    "path": "analysis/vendor/ml/typecore.mli",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(* Type inference for the core language *)\n\nopen Asttypes\nopen Types\nopen Format\n\nval is_nonexpansive : Typedtree.expression -> bool\n\nval type_binding :\n  Env.t ->\n  rec_flag ->\n  Parsetree.value_binding list ->\n  Annot.ident option ->\n  Typedtree.value_binding list * Env.t\nval type_let :\n  Env.t ->\n  rec_flag ->\n  Parsetree.value_binding list ->\n  Annot.ident option ->\n  Typedtree.value_binding list * Env.t\nval type_expression : Env.t -> Parsetree.expression -> Typedtree.expression\nval check_partial :\n  ?lev:int ->\n  Env.t ->\n  type_expr ->\n  Location.t ->\n  Typedtree.case list ->\n  Typedtree.partial\nval type_expect :\n  ?in_function:Location.t * type_expr ->\n  Env.t ->\n  Parsetree.expression ->\n  type_expr ->\n  Typedtree.expression\nval type_exp : Env.t -> Parsetree.expression -> Typedtree.expression\nval type_approx : Env.t -> Parsetree.expression -> type_expr\nval type_argument :\n  Env.t ->\n  Parsetree.expression ->\n  type_expr ->\n  type_expr ->\n  Typedtree.expression\n\nval option_some : Typedtree.expression -> Typedtree.expression\nval option_none : type_expr -> Location.t -> Typedtree.expression\nval extract_option_type : Env.t -> type_expr -> type_expr\nval iter_pattern : (Typedtree.pattern -> unit) -> Typedtree.pattern -> unit\nval generalizable : int -> type_expr -> bool\n\nval id_of_pattern : Typedtree.pattern -> Ident.t option\nval name_pattern : string -> Typedtree.case list -> Ident.t\n\nval self_coercion : (Path.t * Location.t list ref) list ref\n\ntype error =\n  | Polymorphic_label of Longident.t\n  | Constructor_arity_mismatch of Longident.t * int * int\n  | Label_mismatch of Longident.t * (type_expr * type_expr) list\n  | Pattern_type_clash of (type_expr * type_expr) list\n  | Or_pattern_type_clash of Ident.t * (type_expr * type_expr) list\n  | Multiply_bound_variable of string\n  | Orpat_vars of Ident.t * Ident.t list\n  | Expr_type_clash of\n      (type_expr * type_expr) list\n      * Error_message_utils.type_clash_context option\n  | Apply_non_function of type_expr\n  | Apply_wrong_label of arg_label * type_expr\n  | Label_multiply_defined of string\n  | Labels_missing of string list * bool\n  | Label_not_mutable of Longident.t\n  | Wrong_name of string * type_expr * string * Path.t * string * string list\n  | Name_type_mismatch of\n      string * Longident.t * (Path.t * Path.t) * (Path.t * Path.t) list\n  | Undefined_method of type_expr * string * string list option\n  | Private_type of type_expr\n  | Private_label of Longident.t * type_expr\n  | Not_subtype of (type_expr * type_expr) list * (type_expr * type_expr) list\n  | Coercion_failure of\n      type_expr * type_expr * (type_expr * type_expr) list * bool\n  | Too_many_arguments of bool * type_expr\n  | Abstract_wrong_label of arg_label * type_expr\n  | Scoping_let_module of string * type_expr\n  | Not_a_variant_type of Longident.t\n  | Incoherent_label_order\n  | Less_general of string * (type_expr * type_expr) list\n  | Modules_not_allowed\n  | Cannot_infer_signature\n  | Not_a_packed_module of type_expr\n  | Recursive_local_constraint of (type_expr * type_expr) list\n  | Unexpected_existential\n  | Unqualified_gadt_pattern of Path.t * string\n  | Invalid_interval\n  | Invalid_for_loop_index\n  | No_value_clauses\n  | Exception_pattern_below_toplevel\n  | Inlined_record_escape\n  | Inlined_record_expected\n  | Unrefuted_pattern of Typedtree.pattern\n  | Invalid_extension_constructor_payload\n  | Not_an_extension_constructor\n  | Literal_overflow of string\n  | Unknown_literal of string * char\n  | Illegal_letrec_pat\n  | Labels_omitted of string list\n  | Empty_record_literal\n  | Uncurried_arity_mismatch of type_expr * int * int\n  | Field_not_optional of string * type_expr\nexception Error of Location.t * Env.t * error\nexception Error_forward of Location.error\n\nval super_report_error_no_wrap_printing_env :\n  Env.t -> formatter -> error -> unit\n\nval report_error : Env.t -> formatter -> error -> unit\n(* Deprecated.  Use Location.{error_of_exn, report_error}. *)\n\n(* Forward declaration, to be filled in by Typemod.type_module *)\nval type_module : (Env.t -> Parsetree.module_expr -> Typedtree.module_expr) ref\n\n(* Forward declaration, to be filled in by Typemod.type_open *)\nval type_open :\n  (?used_slot:bool ref ->\n  override_flag ->\n  Env.t ->\n  Location.t ->\n  Longident.t loc ->\n  Path.t * Env.t)\n  ref\n\n(* Forward declaration, to be filled in by Typeclass.class_structure *)\nval type_package :\n  (Env.t ->\n  Parsetree.module_expr ->\n  Path.t ->\n  Longident.t list ->\n  Typedtree.module_expr * type_expr list)\n  ref\n\nval create_package_type :\n  Location.t ->\n  Env.t ->\n  Longident.t * (Longident.t * Parsetree.core_type) list ->\n  Path.t * (Longident.t * Typedtree.core_type) list * Types.type_expr\n\nval constant : Parsetree.constant -> (Asttypes.constant, error) result\n"
  },
  {
    "path": "analysis/vendor/ml/typedecl.ml",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*  Xavier Leroy and Jerome Vouillon, projet Cristal, INRIA Rocquencourt  *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(**** Typing of type definitions ****)\n\nopen Misc\nopen Asttypes\nopen Parsetree\nopen Primitive\nopen Types\nopen Typetexp\n\ntype native_repr_kind = Unboxed | Untagged\n\ntype error =\n  | Repeated_parameter\n  | Duplicate_constructor of string\n  | Duplicate_label of string * string option\n  | Recursive_abbrev of string\n  | Cycle_in_def of string * type_expr\n  | Definition_mismatch of type_expr * Includecore.type_mismatch list\n  | Constraint_failed of type_expr * type_expr\n  | Inconsistent_constraint of Env.t * (type_expr * type_expr) list\n  | Type_clash of Env.t * (type_expr * type_expr) list\n  | Parameters_differ of Path.t * type_expr * type_expr\n  | Null_arity_external\n  | Unbound_type_var of type_expr * type_declaration\n  | Cannot_extend_private_type of Path.t\n  | Not_extensible_type of Path.t\n  | Extension_mismatch of Path.t * Includecore.type_mismatch list\n  | Rebind_wrong_type of Longident.t * Env.t * (type_expr * type_expr) list\n  | Rebind_mismatch of Longident.t * Path.t * Path.t\n  | Rebind_private of Longident.t\n  | Bad_variance of int * (bool * bool * bool) * (bool * bool * bool)\n  | Unavailable_type_constructor of Path.t\n  | Bad_fixed_type of string\n  | Unbound_type_var_ext of type_expr * extension_constructor\n  | Varying_anonymous\n  | Val_in_structure\n  | Bad_immediate_attribute\n  | Bad_unboxed_attribute of string\n  | Boxed_and_unboxed\n  | Nonrec_gadt\n  | Variant_runtime_representation_mismatch of Variant_coercion.variant_error\n  | Variant_spread_fail of Variant_type_spread.variant_type_spread_error\n\nopen Typedtree\n\nexception Error of Location.t * error\n\n(* Note: do not factor the branches in the following pattern-matching:\n   the records must be constants for the compiler to do sharing on them.\n*)\nlet get_unboxed_from_attributes sdecl =\n  let unboxed = Builtin_attributes.has_unboxed sdecl.ptype_attributes in\n  let boxed = Builtin_attributes.has_boxed sdecl.ptype_attributes in\n  match (boxed, unboxed, !Clflags.unboxed_types) with\n  | true, true, _ -> raise (Error (sdecl.ptype_loc, Boxed_and_unboxed))\n  | true, false, _ -> unboxed_false_default_false\n  | false, true, _ -> unboxed_true_default_false\n  | false, false, false -> unboxed_false_default_true\n  | false, false, true -> unboxed_true_default_true\n\n(* Enter all declared types in the environment as abstract types *)\n\nlet enter_type rec_flag env sdecl id =\n  let needed =\n    match rec_flag with\n    | Asttypes.Nonrecursive ->\n      (match sdecl.ptype_kind with\n      | Ptype_variant scds ->\n        List.iter\n          (fun cd ->\n            if cd.pcd_res <> None then raise (Error (cd.pcd_loc, Nonrec_gadt)))\n          scds\n      | _ -> ());\n      Btype.is_row_name (Ident.name id)\n    | Asttypes.Recursive -> true\n  in\n  if not needed then env\n  else\n    let decl =\n      {\n        type_params = List.map (fun _ -> Btype.newgenvar ()) sdecl.ptype_params;\n        type_arity = List.length sdecl.ptype_params;\n        type_kind = Type_abstract;\n        type_private = sdecl.ptype_private;\n        type_manifest =\n          (match sdecl.ptype_manifest with\n          | None -> None\n          | Some _ -> Some (Ctype.newvar ()));\n        type_variance = List.map (fun _ -> Variance.full) sdecl.ptype_params;\n        type_newtype_level = None;\n        type_loc = sdecl.ptype_loc;\n        type_attributes = sdecl.ptype_attributes;\n        type_immediate = false;\n        type_unboxed = unboxed_false_default_false;\n      }\n    in\n    Env.add_type ~check:true id decl env\n\nlet update_type temp_env env id loc =\n  let path = Path.Pident id in\n  let decl = Env.find_type path temp_env in\n  match decl.type_manifest with\n  | None -> ()\n  | Some ty -> (\n    let params = List.map (fun _ -> Ctype.newvar ()) decl.type_params in\n    try Ctype.unify env (Ctype.newconstr path params) ty\n    with Ctype.Unify trace -> raise (Error (loc, Type_clash (env, trace))))\n\n(* We use the Ctype.expand_head_opt version of expand_head to get access\n   to the manifest type of private abbreviations. *)\nlet rec get_unboxed_type_representation env ty fuel =\n  if fuel < 0 then None\n  else\n    let ty = Ctype.repr (Ctype.expand_head_opt env ty) in\n    match ty.desc with\n    | Tconstr (p, args, _) -> (\n      match Env.find_type p env with\n      | exception Not_found -> Some ty\n      | {type_unboxed = {unboxed = false}} -> Some ty\n      | {\n       type_params;\n       type_kind =\n         ( Type_record ([{ld_type = ty2; _}], _)\n         | Type_variant [{cd_args = Cstr_tuple [ty2]; _}]\n         | Type_variant [{cd_args = Cstr_record [{ld_type = ty2; _}]; _}] );\n      } ->\n        get_unboxed_type_representation env\n          (Ctype.apply env type_params ty2 args)\n          (fuel - 1)\n      | {type_kind = Type_abstract} ->\n        None\n        (* This case can occur when checking a recursive unboxed type\n           declaration. *)\n      | _ -> assert false (* only the above can be unboxed *))\n    | _ -> Some ty\n\nlet get_unboxed_type_representation env ty =\n  (* Do not give too much fuel: PR#7424 *)\n  get_unboxed_type_representation env ty 100\n\n(* Determine if a type definition defines a fixed type. (PW) *)\nlet is_fixed_type sd =\n  let rec has_row_var sty =\n    match sty.ptyp_desc with\n    | Ptyp_alias (sty, _) -> has_row_var sty\n    | Ptyp_class _\n    | Ptyp_object (_, Open)\n    | Ptyp_variant (_, Open, _)\n    | Ptyp_variant (_, Closed, Some _) ->\n      true\n    | _ -> false\n  in\n  match sd.ptype_manifest with\n  | None -> false\n  | Some sty ->\n    sd.ptype_kind = Ptype_abstract\n    && sd.ptype_private = Private && has_row_var sty\n\n(* Set the row variable in a fixed type *)\nlet set_fixed_row env loc p decl =\n  let tm =\n    match decl.type_manifest with\n    | None -> assert false\n    | Some t -> Ctype.expand_head env t\n  in\n  let rv =\n    match tm.desc with\n    | Tvariant row ->\n      let row = Btype.row_repr row in\n      tm.desc <- Tvariant {row with row_fixed = true};\n      if Btype.static_row row then Btype.newgenty Tnil else row.row_more\n    | Tobject (ty, _) -> snd (Ctype.flatten_fields ty)\n    | _ -> raise (Error (loc, Bad_fixed_type \"is not an object or variant\"))\n  in\n  if not (Btype.is_Tvar rv) then\n    raise (Error (loc, Bad_fixed_type \"has no row variable\"));\n  rv.desc <- Tconstr (p, decl.type_params, ref Mnil)\n\n(* Translate one type declaration *)\n\nmodule StringSet = Set.Make (struct\n  type t = string\n  let compare (x : t) y = compare x y\nend)\n\nlet make_params env params =\n  let make_param (sty, v) =\n    try (transl_type_param env sty, v)\n    with Already_bound -> raise (Error (sty.ptyp_loc, Repeated_parameter))\n  in\n  List.map make_param params\n\nlet transl_labels ?record_name env closed lbls =\n  (if !Config.bs_only then\n     match !Builtin_attributes.check_duplicated_labels lbls with\n     | None -> ()\n     | Some {loc; txt = name} ->\n       raise (Error (loc, Duplicate_label (name, record_name)))\n   else\n     let all_labels = ref StringSet.empty in\n     List.iter\n       (fun {pld_name = {txt = name; loc}} ->\n         if StringSet.mem name !all_labels then\n           raise (Error (loc, Duplicate_label (name, record_name)));\n         all_labels := StringSet.add name !all_labels)\n       lbls);\n  let mk\n      {\n        pld_name = name;\n        pld_mutable = mut;\n        pld_type = arg;\n        pld_loc = loc;\n        pld_attributes = attrs;\n      } =\n    Builtin_attributes.warning_scope attrs (fun () ->\n        let arg = Ast_helper.Typ.force_poly arg in\n        let cty = transl_simple_type env closed arg in\n        {\n          ld_id = Ident.create name.txt;\n          ld_name = name;\n          ld_mutable = mut;\n          ld_type = cty;\n          ld_loc = loc;\n          ld_attributes = attrs;\n        })\n  in\n  let lbls = List.map mk lbls in\n  let lbls' =\n    List.map\n      (fun ld ->\n        let ty = ld.ld_type.ctyp_type in\n        let ty =\n          match ty.desc with\n          | Tpoly (t, []) -> t\n          | _ -> ty\n        in\n        {\n          Types.ld_id = ld.ld_id;\n          ld_mutable = ld.ld_mutable;\n          ld_type = ty;\n          ld_loc = ld.ld_loc;\n          ld_attributes = ld.ld_attributes;\n        })\n      lbls\n  in\n  (lbls, lbls')\n\nlet transl_constructor_arguments env closed = function\n  | Pcstr_tuple l ->\n    let l = List.map (transl_simple_type env closed) l in\n    (Types.Cstr_tuple (List.map (fun t -> t.ctyp_type) l), Cstr_tuple l)\n  | Pcstr_record l ->\n    let lbls, lbls' = transl_labels env closed l in\n    (Types.Cstr_record lbls', Cstr_record lbls)\n\nlet make_constructor env type_path type_params sargs sret_type =\n  match sret_type with\n  | None ->\n    let args, targs = transl_constructor_arguments env true sargs in\n    (targs, None, args, None, type_params)\n  | Some sret_type ->\n    (* if it's a generalized constructor we must first narrow and\n       then widen so as to not introduce any new constraints *)\n    let z = narrow () in\n    reset_type_variables ();\n    let args, targs = transl_constructor_arguments env false sargs in\n    let tret_type = transl_simple_type env false sret_type in\n    let ret_type = tret_type.ctyp_type in\n    let params =\n      match (Ctype.repr ret_type).desc with\n      | Tconstr (p', params, _) when Path.same type_path p' -> params\n      | _ ->\n        raise\n          (Error\n             ( sret_type.ptyp_loc,\n               Constraint_failed\n                 (ret_type, Ctype.newconstr type_path type_params) ))\n    in\n    widen z;\n    (targs, Some tret_type, args, Some ret_type, params)\n\n(* Check that all the variables found in [ty] are in [univ].\n   Because [ty] is the argument to an abstract type, the representation\n   of that abstract type could be any subexpression of [ty], in particular\n   any type variable present in [ty].\n*)\n\nlet transl_declaration ~type_record_as_object env sdecl id =\n  (* Bind type parameters *)\n  reset_type_variables ();\n  Ctype.begin_def ();\n  let tparams = make_params env sdecl.ptype_params in\n  let params = List.map (fun (cty, _) -> cty.ctyp_type) tparams in\n  let cstrs =\n    List.map\n      (fun (sty, sty', loc) ->\n        ( transl_simple_type env false sty,\n          transl_simple_type env false sty',\n          loc ))\n      sdecl.ptype_cstrs\n  in\n  let raw_status = get_unboxed_from_attributes sdecl in\n\n  let check_untagged_variant () =\n    match sdecl.ptype_kind with\n    | Ptype_variant cds ->\n      Ext_list.for_all cds (function\n        | {pcd_args = Pcstr_tuple ([] | [_])} ->\n          (* at most one payload allowed for untagged  variants *)\n          true\n        | {pcd_args = Pcstr_tuple (_ :: _ :: _); pcd_name = {txt = name}} ->\n          Ast_untagged_variants.report_constructor_more_than_one_arg\n            ~loc:sdecl.ptype_loc ~name\n        | {pcd_args = Pcstr_record _} -> true)\n    | _ -> false\n  in\n\n  (if\n     raw_status.unboxed && (not raw_status.default)\n     && not (check_untagged_variant ())\n   then\n     match sdecl.ptype_kind with\n     | Ptype_abstract ->\n       raise (Error (sdecl.ptype_loc, Bad_unboxed_attribute \"it is abstract\"))\n     | Ptype_variant _ -> ()\n     | Ptype_record [{pld_mutable = Immutable; _}] -> ()\n     | Ptype_record [{pld_mutable = Mutable; _}] ->\n       raise (Error (sdecl.ptype_loc, Bad_unboxed_attribute \"it is mutable\"))\n     | Ptype_record _ ->\n       raise\n         (Error\n            (sdecl.ptype_loc, Bad_unboxed_attribute \"it has more than one field\"))\n     | Ptype_open ->\n       raise\n         (Error\n            ( sdecl.ptype_loc,\n              Bad_unboxed_attribute \"extensible variant types cannot be unboxed\"\n            )));\n  let unboxed_status =\n    match sdecl.ptype_kind with\n    | Ptype_variant [{pcd_args = Pcstr_tuple []; _}] ->\n      unboxed_false_default_false\n    | Ptype_variant [{pcd_args = Pcstr_tuple _; _}]\n    | Ptype_variant\n        [{pcd_args = Pcstr_record [{pld_mutable = Immutable; _}]; _}]\n    | Ptype_record [{pld_mutable = Immutable; _}] ->\n      raw_status\n    | _ ->\n      (* The type is not unboxable, mark it as boxed *)\n      unboxed_false_default_false\n  in\n  let unbox = unboxed_status.unboxed in\n  let tkind, kind, sdecl =\n    match sdecl.ptype_kind with\n    | Ptype_abstract -> (Ttype_abstract, Type_abstract, sdecl)\n    | Ptype_variant scstrs ->\n      assert (scstrs <> []);\n      (if List.exists (fun cstr -> cstr.pcd_res <> None) scstrs then\n         match cstrs with\n         | [] -> ()\n         | (_, _, loc) :: _ ->\n           Location.prerr_warning loc Warnings.Constraint_on_gadt);\n      let has_optional attrs =\n        Ext_list.exists attrs (fun ({txt}, _) -> txt = \"res.optional\")\n      in\n      let scstrs =\n        Ext_list.map scstrs (fun ({pcd_args} as cstr) ->\n            match pcd_args with\n            | Pcstr_tuple _ -> cstr\n            | Pcstr_record lds ->\n              {\n                cstr with\n                pcd_args =\n                  Pcstr_record\n                    (Ext_list.map lds (fun ld ->\n                         if has_optional ld.pld_attributes then\n                           let typ = ld.pld_type in\n                           let typ =\n                             {\n                               typ with\n                               ptyp_desc =\n                                 Ptyp_constr\n                                   ( {txt = Lident \"option\"; loc = typ.ptyp_loc},\n                                     [typ] );\n                             }\n                           in\n                           {ld with pld_type = typ}\n                         else ld));\n              })\n      in\n      let all_constrs = ref StringSet.empty in\n      List.iter\n        (fun {pcd_name = {txt = name}} ->\n          if StringSet.mem name !all_constrs then\n            raise (Error (sdecl.ptype_loc, Duplicate_constructor name));\n          all_constrs := StringSet.add name !all_constrs)\n        scstrs;\n      let copy_tag_attr_from_decl attr =\n        let tag_attrs =\n          Ext_list.filter sdecl.ptype_attributes (fun ({txt}, _) ->\n              txt = \"tag\" || txt = Ast_untagged_variants.untagged)\n        in\n        if tag_attrs = [] then attr else tag_attrs @ attr\n      in\n      let constructors_from_variant_spreads = Hashtbl.create 10 in\n      let make_cstr scstr =\n        let name = Ident.create scstr.pcd_name.txt in\n        let targs, tret_type, args, ret_type, _cstr_params =\n          make_constructor env (Path.Pident id) params scstr.pcd_args\n            scstr.pcd_res\n        in\n        if String.starts_with scstr.pcd_name.txt ~prefix:\"...\" then (\n          (* Any constructor starting with \"...\" represents a variant type spread, and\n             will have the spread variant itself as a single argument.\n\n             We pull that variant type out, and then track the type of each of its\n             constructors, so that we can replace our dummy constructors added before\n             type checking  with the realtypes for each constructor.\n          *)\n          (match args with\n          | Cstr_tuple [spread_variant] -> (\n            match Ctype.extract_concrete_typedecl env spread_variant with\n            | _, _, {type_kind = Type_variant constructors} ->\n              constructors\n              |> List.iter (fun (c : Types.constructor_declaration) ->\n                     Hashtbl.add constructors_from_variant_spreads c.cd_id.name\n                       c)\n            | _ -> ())\n          | _ -> ());\n          None)\n        else\n          (* Check if this constructor is from a variant spread. If so, we need to replace\n             its type with the right type we've pulled from the type checked spread variant\n             itself. *)\n          let tcstr, cstr =\n            match\n              Hashtbl.find_opt constructors_from_variant_spreads\n                (Ident.name name)\n            with\n            | Some cstr ->\n              let tcstr =\n                {\n                  cd_id = name;\n                  cd_name = scstr.pcd_name;\n                  cd_args =\n                    (match cstr.cd_args with\n                    | Cstr_tuple args ->\n                      Cstr_tuple\n                        (args\n                        |> List.map (fun texpr : Typedtree.core_type ->\n                               {\n                                 ctyp_attributes = cstr.cd_attributes;\n                                 ctyp_loc = cstr.cd_loc;\n                                 ctyp_env = env;\n                                 ctyp_type = texpr;\n                                 ctyp_desc = Ttyp_any;\n                                 (* This is fine because the type checker seems to only look at `ctyp_type` for type checking. *)\n                               }))\n                    | Cstr_record lbls ->\n                      Cstr_record\n                        (lbls\n                        |> List.map\n                             (fun\n                               (l : Types.label_declaration)\n                               :\n                               Typedtree.label_declaration\n                             ->\n                               {\n                                 ld_id = l.ld_id;\n                                 ld_name =\n                                   Location.mkloc (Ident.name l.ld_id) l.ld_loc;\n                                 ld_mutable = l.ld_mutable;\n                                 ld_type =\n                                   {\n                                     ctyp_desc = Ttyp_any;\n                                     ctyp_type = l.ld_type;\n                                     ctyp_env = env;\n                                     ctyp_loc = l.ld_loc;\n                                     ctyp_attributes = [];\n                                   };\n                                 ld_loc = l.ld_loc;\n                                 ld_attributes = l.ld_attributes;\n                               })));\n                  cd_res = tret_type;\n                  (* This is also strictly wrong, but is fine because the type checker does not look at this field. *)\n                  cd_loc = scstr.pcd_loc;\n                  cd_attributes =\n                    scstr.pcd_attributes |> copy_tag_attr_from_decl;\n                }\n              in\n              (tcstr, cstr)\n            | None ->\n              let tcstr =\n                {\n                  cd_id = name;\n                  cd_name = scstr.pcd_name;\n                  cd_args = targs;\n                  cd_res = tret_type;\n                  cd_loc = scstr.pcd_loc;\n                  cd_attributes =\n                    scstr.pcd_attributes |> copy_tag_attr_from_decl;\n                }\n              in\n              let cstr =\n                {\n                  Types.cd_id = name;\n                  cd_args = args;\n                  cd_res = ret_type;\n                  cd_loc = scstr.pcd_loc;\n                  cd_attributes =\n                    scstr.pcd_attributes |> copy_tag_attr_from_decl;\n                }\n              in\n              (tcstr, cstr)\n          in\n          Some (tcstr, cstr)\n      in\n      let make_cstr scstr =\n        Builtin_attributes.warning_scope scstr.pcd_attributes (fun () ->\n            make_cstr scstr)\n      in\n      let tcstrs, cstrs = List.split (List.filter_map make_cstr scstrs) in\n      let is_untagged_def =\n        Ast_untagged_variants.has_untagged sdecl.ptype_attributes\n      in\n      Ast_untagged_variants.check_well_formed ~env ~is_untagged_def cstrs;\n      (Ttype_variant tcstrs, Type_variant cstrs, sdecl)\n    | Ptype_record lbls_ -> (\n      let has_optional attrs =\n        Ext_list.exists attrs (fun ({txt}, _) -> txt = \"res.optional\")\n      in\n      let optional_labels =\n        Ext_list.filter_map lbls_ (fun lbl ->\n            if has_optional lbl.pld_attributes then Some lbl.pld_name.txt\n            else None)\n      in\n      let lbls =\n        if optional_labels = [] then lbls_\n        else\n          Ext_list.map lbls_ (fun lbl ->\n              let typ = lbl.pld_type in\n              let typ =\n                if has_optional lbl.pld_attributes then\n                  {\n                    typ with\n                    ptyp_desc =\n                      Ptyp_constr\n                        ({txt = Lident \"option\"; loc = typ.ptyp_loc}, [typ]);\n                  }\n                else typ\n              in\n              {lbl with pld_type = typ})\n      in\n      let lbls, lbls' =\n        transl_labels ~record_name:sdecl.ptype_name.txt env true lbls\n      in\n      let lbls_opt =\n        match Record_type_spread.has_type_spread lbls with\n        | true ->\n          let rec extract t =\n            match t.desc with\n            | Tpoly (t, []) -> extract t\n            | _ -> Ctype.repr t\n          in\n          let mk_lbl (l : Types.label_declaration)\n              (ld_type : Typedtree.core_type)\n              (type_vars : (string * Types.type_expr) list) :\n              Typedtree.label_declaration =\n            {\n              ld_id = l.ld_id;\n              ld_name = {txt = Ident.name l.ld_id; loc = l.ld_loc};\n              ld_mutable = l.ld_mutable;\n              ld_type =\n                {\n                  ld_type with\n                  ctyp_type =\n                    Record_type_spread.substitute_type_vars type_vars l.ld_type;\n                };\n              ld_loc = l.ld_loc;\n              ld_attributes = l.ld_attributes;\n            }\n          in\n          let rec process_lbls acc lbls lbls' =\n            match (lbls, lbls') with\n            | {ld_name = {txt = \"...\"}; ld_type} :: rest, _ :: rest' -> (\n              match\n                Ctype.extract_concrete_typedecl env (extract ld_type.ctyp_type)\n              with\n              | _p0, _p, {type_kind = Type_record (fields, _repr); type_params}\n                ->\n                let type_vars =\n                  Record_type_spread.extract_type_vars type_params\n                    ld_type.ctyp_type\n                in\n                process_lbls\n                  ( fst acc\n                    @ Ext_list.map fields (fun l -> mk_lbl l ld_type type_vars),\n                    snd acc\n                    @ Ext_list.map fields (fun l ->\n                          {\n                            l with\n                            ld_type =\n                              Record_type_spread.substitute_type_vars type_vars\n                                l.ld_type;\n                          }) )\n                  rest rest'\n              | _ -> assert false\n              | exception _ -> None)\n            | lbl :: rest, lbl' :: rest' ->\n              process_lbls (fst acc @ [lbl], snd acc @ [lbl']) rest rest'\n            | _ -> Some acc\n          in\n          process_lbls ([], []) lbls lbls'\n        | false -> Some (lbls, lbls')\n      in\n      let rec check_duplicates loc (lbls : Typedtree.label_declaration list)\n          seen =\n        match lbls with\n        | [] -> ()\n        | lbl :: rest ->\n          let name = lbl.ld_id.name in\n          if StringSet.mem name seen then\n            raise\n              (Error (loc, Duplicate_label (name, Some sdecl.ptype_name.txt)));\n          check_duplicates loc rest (StringSet.add name seen)\n      in\n      match lbls_opt with\n      | Some (lbls, lbls') ->\n        check_duplicates sdecl.ptype_loc lbls StringSet.empty;\n        let optional_labels =\n          Ext_list.filter_map lbls (fun lbl ->\n              if has_optional lbl.ld_attributes then Some lbl.ld_name.txt\n              else None)\n        in\n        ( Ttype_record lbls,\n          Type_record\n            ( lbls',\n              if unbox then Record_unboxed false\n              else if optional_labels <> [] then\n                Record_optional_labels optional_labels\n              else Record_regular ),\n          sdecl )\n      | None ->\n        (* Could not find record type decl for ...t: assume t is an object type and this is syntax ambiguity *)\n        type_record_as_object := true;\n        let fields =\n          Ext_list.map lbls_ (fun ld ->\n              match ld.pld_name.txt with\n              | \"...\" -> Parsetree.Oinherit ld.pld_type\n              | _ -> Otag (ld.pld_name, ld.pld_attributes, ld.pld_type))\n        in\n        let sdecl =\n          {\n            sdecl with\n            ptype_kind = Ptype_abstract;\n            ptype_manifest =\n              Some (Ast_helper.Typ.object_ ~loc:sdecl.ptype_loc fields Closed);\n          }\n        in\n        (Ttype_abstract, Type_abstract, sdecl))\n    | Ptype_open -> (Ttype_open, Type_open, sdecl)\n  in\n  let tman, man =\n    match sdecl.ptype_manifest with\n    | None -> (None, None)\n    | Some sty ->\n      let no_row = not (is_fixed_type sdecl) in\n      let cty = transl_simple_type env no_row sty in\n      (Some cty, Some cty.ctyp_type)\n  in\n  let decl =\n    {\n      type_params = params;\n      type_arity = List.length params;\n      type_kind = kind;\n      type_private = sdecl.ptype_private;\n      type_manifest = man;\n      type_variance = List.map (fun _ -> Variance.full) params;\n      type_newtype_level = None;\n      type_loc = sdecl.ptype_loc;\n      type_attributes = sdecl.ptype_attributes;\n      type_immediate = false;\n      type_unboxed = unboxed_status;\n    }\n  in\n\n  (* Check constraints *)\n  List.iter\n    (fun (cty, cty', loc) ->\n      let ty = cty.ctyp_type in\n      let ty' = cty'.ctyp_type in\n      try Ctype.unify env ty ty'\n      with Ctype.Unify tr ->\n        raise (Error (loc, Inconsistent_constraint (env, tr))))\n    cstrs;\n  Ctype.end_def ();\n  (* Add abstract row *)\n  (if is_fixed_type sdecl then\n     let p =\n       try Env.lookup_type (Longident.Lident (Ident.name id ^ \"#row\")) env\n       with Not_found -> assert false\n     in\n     set_fixed_row env sdecl.ptype_loc p decl);\n  (* Check for cyclic abbreviations *)\n  (match decl.type_manifest with\n  | None -> ()\n  | Some ty ->\n    if Ctype.cyclic_abbrev env id ty then\n      raise (Error (sdecl.ptype_loc, Recursive_abbrev sdecl.ptype_name.txt)));\n  {\n    typ_id = id;\n    typ_name = sdecl.ptype_name;\n    typ_params = tparams;\n    typ_type = decl;\n    typ_cstrs = cstrs;\n    typ_loc = sdecl.ptype_loc;\n    typ_manifest = tman;\n    typ_kind = tkind;\n    typ_private = sdecl.ptype_private;\n    typ_attributes = sdecl.ptype_attributes;\n  }\n\n(* Generalize a type declaration *)\n\nlet generalize_decl decl =\n  List.iter Ctype.generalize decl.type_params;\n  Btype.iter_type_expr_kind Ctype.generalize decl.type_kind;\n  match decl.type_manifest with\n  | None -> ()\n  | Some ty -> Ctype.generalize ty\n\n(* Check that all constraints are enforced *)\n\nmodule TypeSet = Btype.TypeSet\nmodule TypeMap = Btype.TypeMap\n\nlet rec check_constraints_rec env loc visited ty =\n  let ty = Ctype.repr ty in\n  if TypeSet.mem ty !visited then ()\n  else (\n    visited := TypeSet.add ty !visited;\n    match ty.desc with\n    | Tconstr (path, args, _) ->\n      let args' = List.map (fun _ -> Ctype.newvar ()) args in\n      let ty' = Ctype.newconstr path args' in\n      (try Ctype.enforce_constraints env ty' with\n      | Ctype.Unify _ -> assert false\n      | Not_found -> raise (Error (loc, Unavailable_type_constructor path)));\n      if not (Ctype.matches env ty ty') then\n        raise (Error (loc, Constraint_failed (ty, ty')));\n      List.iter (check_constraints_rec env loc visited) args\n    | Tpoly (ty, tl) ->\n      let _, ty = Ctype.instance_poly false tl ty in\n      check_constraints_rec env loc visited ty\n    | _ -> Btype.iter_type_expr (check_constraints_rec env loc visited) ty)\n\nmodule SMap = Map.Make (String)\n\nlet check_constraints_labels env visited l pl =\n  let rec get_loc name = function\n    | [] -> Location.none\n    | pld :: tl ->\n      if name = pld.pld_name.txt then pld.pld_type.ptyp_loc else get_loc name tl\n  in\n  List.iter\n    (fun {Types.ld_id = name; ld_type = ty} ->\n      check_constraints_rec env (get_loc (Ident.name name) pl) visited ty)\n    l\n\nlet check_constraints ~type_record_as_object env sdecl (_, decl) =\n  let visited = ref TypeSet.empty in\n  (match decl.type_kind with\n  | Type_abstract -> ()\n  | Type_variant l ->\n    let find_pl = function\n      | Ptype_variant pl -> pl\n      | Ptype_record _ | Ptype_abstract | Ptype_open -> assert false\n    in\n    let pl = find_pl sdecl.ptype_kind in\n    let pl_index =\n      let foldf acc x = SMap.add x.pcd_name.txt x acc in\n      List.fold_left foldf SMap.empty pl\n    in\n    List.iter\n      (fun {Types.cd_id = name; cd_args; cd_res} ->\n        let {pcd_args; pcd_res; _} =\n          try SMap.find (Ident.name name) pl_index\n          with Not_found -> assert false\n        in\n        (match (cd_args, pcd_args) with\n        | Cstr_tuple tyl, Pcstr_tuple styl ->\n          List.iter2\n            (fun sty ty -> check_constraints_rec env sty.ptyp_loc visited ty)\n            styl tyl\n        | Cstr_record tyl, Pcstr_record styl ->\n          check_constraints_labels env visited tyl styl\n        | _ -> assert false);\n        match (pcd_res, cd_res) with\n        | Some sr, Some r -> check_constraints_rec env sr.ptyp_loc visited r\n        | _ -> ())\n      l\n  | Type_record (l, _) ->\n    let find_pl = function\n      | Ptype_record pl -> pl\n      | Ptype_variant _ | Ptype_abstract | Ptype_open -> assert false\n    in\n    let pl = find_pl sdecl.ptype_kind in\n    check_constraints_labels env visited l pl\n  | Type_open -> ());\n  match decl.type_manifest with\n  | None -> ()\n  | Some ty ->\n    if not !type_record_as_object then\n      let sty =\n        match sdecl.ptype_manifest with\n        | Some sty -> sty\n        | _ -> assert false\n      in\n      check_constraints_rec env sty.ptyp_loc visited ty\n\n(*\n   If both a variant/record definition and a type equation are given,\n   need to check that the equation refers to a type of the same kind\n   with the same constructors and labels.\n*)\nlet check_coherence env loc id decl =\n  match decl with\n  | {\n   type_kind = Type_variant _ | Type_record _ | Type_open;\n   type_manifest = Some ty;\n  } -> (\n    match (Ctype.repr ty).desc with\n    | Tconstr (path, args, _) -> (\n      try\n        let decl' = Env.find_type path env in\n        let err =\n          if List.length args <> List.length decl.type_params then\n            [Includecore.Arity]\n          else if not (Ctype.equal env false args decl.type_params) then\n            [Includecore.Constraint]\n          else\n            Includecore.type_declarations ~loc ~equality:true env\n              (Path.last path) decl' id\n              (Subst.type_declaration\n                 (Subst.add_type id path Subst.identity)\n                 decl)\n        in\n        if err <> [] then raise (Error (loc, Definition_mismatch (ty, err)))\n      with Not_found -> raise (Error (loc, Unavailable_type_constructor path)))\n    | _ -> raise (Error (loc, Definition_mismatch (ty, []))))\n  | _ -> ()\n\nlet check_abbrev env sdecl (id, decl) =\n  check_coherence env sdecl.ptype_loc id decl\n\n(* Check that recursion is well-founded *)\n\nlet check_well_founded env loc path to_check ty =\n  let visited = ref TypeMap.empty in\n  let rec check ty0 parents ty =\n    let ty = Btype.repr ty in\n    if TypeSet.mem ty parents then\n      (*Format.eprintf \"@[%a@]@.\" Printtyp.raw_type_expr ty;*)\n      if\n        match ty0.desc with\n        | Tconstr (p, _, _) -> Path.same p path\n        | _ -> false\n      then raise (Error (loc, Recursive_abbrev (Path.name path)))\n      else raise (Error (loc, Cycle_in_def (Path.name path, ty0)));\n    let fini, parents =\n      try\n        let prev = TypeMap.find ty !visited in\n        if TypeSet.subset parents prev then (true, parents)\n        else (false, TypeSet.union parents prev)\n      with Not_found -> (false, parents)\n    in\n    if fini then ()\n    else\n      let rec_ok =\n        match ty.desc with\n        | Tconstr (_p, _, _) ->\n          false (*!Clflags.recursive_types && Ctype.is_contractive env p*)\n        | Tobject _ | Tvariant _ -> true\n        | _ -> false (* !Clflags.recursive_types*)\n      in\n      let visited' = TypeMap.add ty parents !visited in\n      let arg_exn =\n        try\n          visited := visited';\n          let parents =\n            if rec_ok then TypeSet.empty else TypeSet.add ty parents\n          in\n          Btype.iter_type_expr (check ty0 parents) ty;\n          None\n        with e ->\n          visited := visited';\n          Some e\n      in\n      match ty.desc with\n      | Tconstr (p, _, _) when arg_exn <> None || to_check p -> (\n        if to_check p then may raise arg_exn\n        else Btype.iter_type_expr (check ty0 TypeSet.empty) ty;\n        try\n          let ty' = Ctype.try_expand_once_opt env ty in\n          let ty0 = if TypeSet.is_empty parents then ty else ty0 in\n          check ty0 (TypeSet.add ty parents) ty'\n        with Ctype.Cannot_expand -> may raise arg_exn)\n      | _ -> may raise arg_exn\n  in\n  let snap = Btype.snapshot () in\n  try Ctype.wrap_trace_gadt_instances env (check ty TypeSet.empty) ty\n  with Ctype.Unify _ ->\n    (* Will be detected by check_recursion *)\n    Btype.backtrack snap\n\nlet check_well_founded_manifest env loc path decl =\n  if decl.type_manifest = None then ()\n  else\n    let args = List.map (fun _ -> Ctype.newvar ()) decl.type_params in\n    check_well_founded env loc path (Path.same path) (Ctype.newconstr path args)\n\nlet check_well_founded_decl env loc path decl to_check =\n  let open Btype in\n  let it =\n    {\n      type_iterators with\n      it_type_expr = (fun _ -> check_well_founded env loc path to_check);\n    }\n  in\n  it.it_type_declaration it (Ctype.instance_declaration decl)\n\n(* Check for ill-defined abbrevs *)\n\nlet check_recursion env loc path decl to_check =\n  (* to_check is true for potentially mutually recursive paths.\n     (path, decl) is the type declaration to be checked. *)\n  if decl.type_params = [] then ()\n  else\n    let visited = ref [] in\n\n    let rec check_regular cpath args prev_exp ty =\n      let ty = Ctype.repr ty in\n      if not (List.memq ty !visited) then (\n        visited := ty :: !visited;\n        match ty.desc with\n        | Tconstr (path', args', _) ->\n          (if Path.same path path' then (\n             if not (Ctype.equal env false args args') then\n               raise\n                 (Error\n                    ( loc,\n                      Parameters_differ (cpath, ty, Ctype.newconstr path args)\n                    )))\n           else if\n             (* Attempt to expand a type abbreviation if:\n                 1- [to_check path'] holds\n                    (otherwise the expansion cannot involve [path]);\n                 2- we haven't expanded this type constructor before\n                    (otherwise we could loop if [path'] is itself\n                    a non-regular abbreviation). *)\n             to_check path' && not (List.mem path' prev_exp)\n           then\n             try\n               (* Attempt expansion *)\n               let params0, body0, _ = Env.find_type_expansion path' env in\n               let params, body =\n                 Ctype.instance_parameterized_type params0 body0\n               in\n               (try List.iter2 (Ctype.unify env) params args'\n                with Ctype.Unify _ ->\n                  raise\n                    (Error\n                       ( loc,\n                         Constraint_failed (ty, Ctype.newconstr path' params0)\n                       )));\n               check_regular path' args (path' :: prev_exp) body\n             with Not_found -> ());\n          List.iter (check_regular cpath args prev_exp) args'\n        | Tpoly (ty, tl) ->\n          let _, ty = Ctype.instance_poly ~keep_names:true false tl ty in\n          check_regular cpath args prev_exp ty\n        | _ -> Btype.iter_type_expr (check_regular cpath args prev_exp) ty)\n    in\n\n    Misc.may\n      (fun body ->\n        let args, body =\n          Ctype.instance_parameterized_type ~keep_names:true decl.type_params\n            body\n        in\n        check_regular path args [] body)\n      decl.type_manifest\n\nlet check_abbrev_recursion env id_loc_list to_check tdecl =\n  let decl = tdecl.typ_type in\n  let id = tdecl.typ_id in\n  check_recursion env (List.assoc id id_loc_list) (Path.Pident id) decl to_check\n\n(* Compute variance *)\n\nlet get_variance ty visited =\n  try TypeMap.find ty !visited with Not_found -> Variance.null\n\nlet compute_variance env visited vari ty =\n  let rec compute_variance_rec vari ty =\n    (* Format.eprintf \"%a: %x@.\" Printtyp.type_expr ty (Obj.magic vari); *)\n    let ty = Ctype.repr ty in\n    let vari' = get_variance ty visited in\n    if Variance.subset vari vari' then ()\n    else\n      let vari = Variance.union vari vari' in\n      visited := TypeMap.add ty vari !visited;\n      let compute_same = compute_variance_rec vari in\n      match ty.desc with\n      | Tarrow (_, ty1, ty2, _) ->\n        let open Variance in\n        let v = conjugate vari in\n        let v1 =\n          if mem May_pos v || mem May_neg v then set May_weak true v else v\n        in\n        compute_variance_rec v1 ty1;\n        compute_same ty2\n      | Ttuple tl -> List.iter compute_same tl\n      | Tconstr (path, tl, _) -> (\n        let open Variance in\n        if tl = [] then ()\n        else\n          try\n            let decl = Env.find_type path env in\n            let cvari f = mem f vari in\n            List.iter2\n              (fun ty v ->\n                let cv f = mem f v in\n                let strict =\n                  (cvari Inv && cv Inj) || ((cvari Pos || cvari Neg) && cv Inv)\n                in\n                if strict then compute_variance_rec full ty\n                else\n                  let p1 = inter v vari and n1 = inter v (conjugate vari) in\n                  let v1 =\n                    union\n                      (inter covariant (union p1 (conjugate p1)))\n                      (inter (conjugate covariant) (union n1 (conjugate n1)))\n                  and weak =\n                    (cvari May_weak && (cv May_pos || cv May_neg))\n                    || ((cvari May_pos || cvari May_neg) && cv May_weak)\n                  in\n                  let v2 = set May_weak weak v1 in\n                  compute_variance_rec v2 ty)\n              tl decl.type_variance\n          with Not_found -> List.iter (compute_variance_rec may_inv) tl)\n      | Tobject (ty, _) -> compute_same ty\n      | Tfield (_, _, ty1, ty2) ->\n        compute_same ty1;\n        compute_same ty2\n      | Tsubst ty -> compute_same ty\n      | Tvariant row ->\n        let row = Btype.row_repr row in\n        List.iter\n          (fun (_, f) ->\n            match Btype.row_field_repr f with\n            | Rpresent (Some ty) -> compute_same ty\n            | Reither (_, tyl, _, _) ->\n              let open Variance in\n              let upper =\n                List.fold_left\n                  (fun s f -> set f true s)\n                  null\n                  [May_pos; May_neg; May_weak]\n              in\n              let v = inter vari upper in\n              (* cf PR#7269:\n                 if List.length tyl > 1 then upper else inter vari upper *)\n              List.iter (compute_variance_rec v) tyl\n            | _ -> ())\n          row.row_fields;\n        compute_same row.row_more\n      | Tpoly (ty, _) -> compute_same ty\n      | Tvar _ | Tnil | Tlink _ | Tunivar _ -> ()\n      | Tpackage (_, _, tyl) ->\n        let v =\n          Variance.(if mem Pos vari || mem Neg vari then full else may_inv)\n        in\n        List.iter (compute_variance_rec v) tyl\n  in\n  compute_variance_rec vari ty\n\nlet make p n i =\n  let open Variance in\n  set May_pos p (set May_neg n (set May_weak n (set Inj i null)))\n\nlet compute_variance_type env check (required, loc) decl tyl =\n  (* Requirements *)\n  let required =\n    List.map\n      (fun (c, n, i) -> if c || n then (c, n, i) else (true, true, i))\n      required\n  in\n  (* Prepare *)\n  let params = List.map Btype.repr decl.type_params in\n  let tvl = ref TypeMap.empty in\n  (* Compute occurrences in the body *)\n  let open Variance in\n  List.iter\n    (fun (cn, ty) ->\n      compute_variance env tvl (if cn then full else covariant) ty)\n    tyl;\n  if check then (\n    (* Check variance of parameters *)\n    let pos = ref 0 in\n    List.iter2\n      (fun ty (c, n, i) ->\n        incr pos;\n        let var = get_variance ty tvl in\n        let co, cn = get_upper var and ij = mem Inj var in\n        if\n          Btype.is_Tvar ty && ((co && not c) || (cn && not n) || ((not ij) && i))\n        then raise (Error (loc, Bad_variance (!pos, (co, cn, ij), (c, n, i)))))\n      params required;\n    (* Check propagation from constrained parameters *)\n    let args = Btype.newgenty (Ttuple params) in\n    let fvl = Ctype.free_variables args in\n    let fvl = Ext_list.filter fvl (fun v -> not (List.memq v params)) in\n    (* If there are no extra variables there is nothing to do *)\n    if fvl = [] then ()\n    else\n      let tvl2 = ref TypeMap.empty in\n      List.iter2\n        (fun ty (p, n, _) ->\n          if Btype.is_Tvar ty then ()\n          else\n            let v =\n              if p then if n then full else covariant else conjugate covariant\n            in\n            compute_variance env tvl2 v ty)\n        params required;\n      let visited = ref TypeSet.empty in\n      let rec check ty =\n        let ty = Ctype.repr ty in\n        if TypeSet.mem ty !visited then ()\n        else\n          let visited' = TypeSet.add ty !visited in\n          visited := visited';\n          let v1 = get_variance ty tvl in\n          let snap = Btype.snapshot () in\n          let v2 =\n            TypeMap.fold\n              (fun t vt v ->\n                if Ctype.equal env false [ty] [t] then union vt v else v)\n              !tvl2 null\n          in\n          Btype.backtrack snap;\n          let c1, n1 = get_upper v1 and c2, n2, _, i2 = get_lower v2 in\n          if (c1 && not c2) || (n1 && not n2) then\n            if List.memq ty fvl then\n              let code = if not i2 then -2 else if c2 || n2 then -1 else -3 in\n              raise\n                (Error\n                   (loc, Bad_variance (code, (c1, n1, false), (c2, n2, false))))\n            else Btype.iter_type_expr check ty\n      in\n      List.iter (fun (_, ty) -> check ty) tyl);\n  List.map2\n    (fun ty (p, n, i) ->\n      let v = get_variance ty tvl in\n      let tr = decl.type_private in\n      (* Use required variance where relevant *)\n      let concr =\n        decl.type_kind <> Type_abstract\n        (*|| tr = Type_new*)\n      in\n      let p, n =\n        if tr = Private || not (Btype.is_Tvar ty) then (p, n) (* set *)\n        else (false, false)\n      (* only check *)\n      and i = concr || (i && tr = Private) in\n      let v = union v (make p n i) in\n      let v =\n        if not concr then v\n        else if mem Pos v && mem Neg v then full\n        else if Btype.is_Tvar ty then v\n        else\n          union v\n            (if p then if n then full else covariant else conjugate covariant)\n      in\n      if decl.type_kind = Type_abstract && tr = Public then v\n      else set May_weak (mem May_neg v) v)\n    params required\n\nlet add_false = List.map (fun ty -> (false, ty))\n\n(* A parameter is constrained if it is either instantiated,\n   or it is a variable appearing in another parameter *)\nlet constrained vars ty =\n  match ty.desc with\n  | Tvar _ -> List.exists (fun tl -> List.memq ty tl) vars\n  | _ -> true\n\nlet for_constr = function\n  | Types.Cstr_tuple l -> add_false l\n  | Types.Cstr_record l ->\n    List.map\n      (fun {Types.ld_mutable; ld_type} -> (ld_mutable = Mutable, ld_type))\n      l\n\nlet compute_variance_gadt env check ((required, loc) as rloc) decl\n    (tl, ret_type_opt) =\n  match ret_type_opt with\n  | None ->\n    compute_variance_type env check rloc\n      {decl with type_private = Private}\n      (for_constr tl)\n  | Some ret_type -> (\n    match Ctype.repr ret_type with\n    | {desc = Tconstr (_, tyl, _)} ->\n      (* let tyl = List.map (Ctype.expand_head env) tyl in *)\n      let tyl = List.map Ctype.repr tyl in\n      let fvl = List.map (Ctype.free_variables ?env:None) tyl in\n      let _ =\n        List.fold_left2\n          (fun (fv1, fv2) ty (c, n, _) ->\n            match fv2 with\n            | [] -> assert false\n            | fv :: fv2 ->\n              (* fv1 @ fv2 = free_variables of other parameters *)\n              if (c || n) && constrained (fv1 @ fv2) ty then\n                raise (Error (loc, Varying_anonymous));\n              (fv :: fv1, fv2))\n          ([], fvl) tyl required\n      in\n      compute_variance_type env check rloc\n        {decl with type_params = tyl; type_private = Private}\n        (for_constr tl)\n    | _ -> assert false)\n\nlet compute_variance_extension env check decl ext rloc =\n  compute_variance_gadt env check rloc\n    {decl with type_params = ext.ext_type_params}\n    (ext.ext_args, ext.ext_ret_type)\n\nlet compute_variance_decl env check decl ((required, _) as rloc) =\n  if\n    (decl.type_kind = Type_abstract || decl.type_kind = Type_open)\n    && decl.type_manifest = None\n  then\n    List.map\n      (fun (c, n, i) ->\n        make (not n) (not c) (decl.type_kind <> Type_abstract || i))\n      required\n  else\n    let mn =\n      match decl.type_manifest with\n      | None -> []\n      | Some ty -> [(false, ty)]\n    in\n    match decl.type_kind with\n    | Type_abstract | Type_open -> compute_variance_type env check rloc decl mn\n    | Type_variant tll -> (\n      if List.for_all (fun c -> c.Types.cd_res = None) tll then\n        compute_variance_type env check rloc decl\n          (mn\n          @ List.flatten (List.map (fun c -> for_constr c.Types.cd_args) tll))\n      else\n        let mn = List.map (fun (_, ty) -> (Types.Cstr_tuple [ty], None)) mn in\n        let tll =\n          mn @ List.map (fun c -> (c.Types.cd_args, c.Types.cd_res)) tll\n        in\n        match List.map (compute_variance_gadt env check rloc decl) tll with\n        | vari :: rem ->\n          let varl = List.fold_left (List.map2 Variance.union) vari rem in\n          List.map\n            Variance.(fun v -> if mem Pos v && mem Neg v then full else v)\n            varl\n        | _ -> assert false)\n    | Type_record (ftl, _) ->\n      compute_variance_type env check rloc decl\n        (mn\n        @ List.map\n            (fun {Types.ld_mutable; ld_type} -> (ld_mutable = Mutable, ld_type))\n            ftl)\n\nlet is_hash id =\n  let s = Ident.name id in\n  String.length s > 0 && s.[0] = '#'\n\nlet marked_as_immediate decl = Builtin_attributes.immediate decl.type_attributes\n\nlet compute_immediacy env tdecl =\n  match (tdecl.type_kind, tdecl.type_manifest) with\n  | Type_variant [{cd_args = Cstr_tuple [arg]; _}], _\n  | Type_variant [{cd_args = Cstr_record [{ld_type = arg; _}]; _}], _\n  | Type_record ([{ld_type = arg; _}], _), _\n    when tdecl.type_unboxed.unboxed -> (\n    match get_unboxed_type_representation env arg with\n    | Some argrepr -> not (Ctype.maybe_pointer_type env argrepr)\n    | None -> false)\n  | Type_variant (_ :: _ as cstrs), _ ->\n    not (List.exists (fun c -> c.Types.cd_args <> Types.Cstr_tuple []) cstrs)\n  | Type_abstract, Some typ -> not (Ctype.maybe_pointer_type env typ)\n  | Type_abstract, None -> marked_as_immediate tdecl\n  | _ -> false\n\n(* Computes the fixpoint for the variance and immediacy of type declarations *)\n\nlet rec compute_properties_fixpoint env decls required variances immediacies =\n  let new_decls =\n    List.map2\n      (fun (id, decl) (variance, immediacy) ->\n        (id, {decl with type_variance = variance; type_immediate = immediacy}))\n      decls\n      (List.combine variances immediacies)\n  in\n  let new_env =\n    List.fold_right\n      (fun (id, decl) env -> Env.add_type ~check:true id decl env)\n      new_decls env\n  in\n  let new_variances =\n    List.map2\n      (fun (_id, decl) -> compute_variance_decl new_env false decl)\n      new_decls required\n  in\n  let new_variances =\n    List.map2 (List.map2 Variance.union) new_variances variances\n  in\n  let new_immediacies =\n    List.map (fun (_id, decl) -> compute_immediacy new_env decl) new_decls\n  in\n  if new_variances <> variances || new_immediacies <> immediacies then\n    compute_properties_fixpoint env decls required new_variances new_immediacies\n  else (\n    (* List.iter (fun (id, decl) ->\n       Printf.eprintf \"%s:\" (Ident.name id);\n       List.iter (fun (v : Variance.t) ->\n         Printf.eprintf \" %x\" (Obj.magic v : int))\n         decl.type_variance;\n       prerr_endline \"\")\n       new_decls; *)\n    List.iter\n      (fun (_, decl) ->\n        if marked_as_immediate decl && not decl.type_immediate then\n          raise (Error (decl.type_loc, Bad_immediate_attribute))\n        else ())\n      new_decls;\n    List.iter2\n      (fun (id, decl) req ->\n        if not (is_hash id) then\n          ignore (compute_variance_decl new_env true decl req))\n      new_decls required;\n    (new_decls, new_env))\n\nlet init_variance (_id, decl) =\n  List.map (fun _ -> Variance.null) decl.type_params\n\nlet add_injectivity =\n  List.map (function\n    | Covariant -> (true, false, false)\n    | Contravariant -> (false, true, false)\n    | Invariant -> (false, false, false))\n\n(* for typeclass.ml *)\nlet compute_variance_decls env cldecls =\n  let decls, required =\n    List.fold_right\n      (fun (obj_id, obj_abbr, _cl_abbr, _clty, _cltydef, ci) (decls, req) ->\n        let variance = List.map snd ci.ci_params in\n        ( (obj_id, obj_abbr) :: decls,\n          (add_injectivity variance, ci.ci_loc) :: req ))\n      cldecls ([], [])\n  in\n  let decls, _ =\n    compute_properties_fixpoint env decls required\n      (List.map init_variance decls)\n      (List.map (fun _ -> false) decls)\n  in\n  List.map2\n    (fun (_, decl) (_, _, cl_abbr, clty, cltydef, _) ->\n      let variance = decl.type_variance in\n      ( decl,\n        {cl_abbr with type_variance = variance},\n        {clty with cty_variance = variance},\n        {cltydef with clty_variance = variance} ))\n    decls cldecls\n\n(* Check multiple declarations of labels/constructors *)\n\nlet check_duplicates sdecl_list =\n  let labels = Hashtbl.create 7 and constrs = Hashtbl.create 7 in\n  List.iter\n    (fun sdecl ->\n      match sdecl.ptype_kind with\n      | Ptype_variant cl ->\n        List.iter\n          (fun pcd ->\n            try\n              let name' = Hashtbl.find constrs pcd.pcd_name.txt in\n              Location.prerr_warning pcd.pcd_loc\n                (Warnings.Duplicate_definitions\n                   (\"constructor\", pcd.pcd_name.txt, name', sdecl.ptype_name.txt))\n            with Not_found ->\n              Hashtbl.add constrs pcd.pcd_name.txt sdecl.ptype_name.txt)\n          cl\n      | Ptype_record fl ->\n        List.iter\n          (fun {pld_name = cname; pld_loc = loc} ->\n            try\n              let name' = Hashtbl.find labels cname.txt in\n              if cname.txt <> \"...\" then\n                Location.prerr_warning loc\n                  (Warnings.Duplicate_definitions\n                     (\"label\", cname.txt, name', sdecl.ptype_name.txt))\n            with Not_found ->\n              Hashtbl.add labels cname.txt sdecl.ptype_name.txt)\n          fl\n      | Ptype_abstract -> ()\n      | Ptype_open -> ())\n    sdecl_list\n\n(* Force recursion to go through id for private types*)\nlet name_recursion sdecl id decl =\n  match decl with\n  | {type_kind = Type_abstract; type_manifest = Some ty; type_private = Private}\n    when is_fixed_type sdecl ->\n    let ty = Ctype.repr ty in\n    let ty' = Btype.newty2 ty.level ty.desc in\n    if Ctype.deep_occur ty ty' then (\n      let td = Tconstr (Path.Pident id, decl.type_params, ref Mnil) in\n      Btype.link_type ty (Btype.newty2 ty.level td);\n      {decl with type_manifest = Some ty'})\n    else decl\n  | _ -> decl\n\n(* Translate a set of type declarations, mutually recursive or not *)\nlet transl_type_decl env rec_flag sdecl_list =\n  (* Add dummy types for fixed rows *)\n  let fixed_types = Ext_list.filter sdecl_list is_fixed_type in\n  let sdecl_list =\n    List.map\n      (fun sdecl ->\n        let ptype_name =\n          mkloc (sdecl.ptype_name.txt ^ \"#row\") sdecl.ptype_name.loc\n        in\n        {\n          sdecl with\n          ptype_name;\n          ptype_kind = Ptype_abstract;\n          ptype_manifest = None;\n        })\n      fixed_types\n    @\n    try sdecl_list |> Variant_type_spread.expand_variant_spreads env with\n    | Variant_coercion.VariantConfigurationError\n        (VariantError {left_loc} as err) ->\n      raise (Error (left_loc, Variant_runtime_representation_mismatch err))\n    | Variant_type_spread.VariantTypeSpreadError (loc, err) ->\n      raise (Error (loc, Variant_spread_fail err))\n  in\n\n  (* Create identifiers. *)\n  let id_list =\n    List.map (fun sdecl -> Ident.create sdecl.ptype_name.txt) sdecl_list\n  in\n  (*\n     Since we've introduced fresh idents, make sure the definition\n     level is at least the binding time of these events. Otherwise,\n     passing one of the recursively-defined type constrs as argument\n     to an abbreviation may fail.\n  *)\n  Ctype.init_def (Ident.current_time ());\n  Ctype.begin_def ();\n  (* Enter types. *)\n  let temp_env = List.fold_left2 (enter_type rec_flag) env sdecl_list id_list in\n  (* Translate each declaration. *)\n  let current_slot = ref None in\n  let warn_unused = Warnings.is_active (Warnings.Unused_type_declaration \"\") in\n  let id_slots id =\n    match rec_flag with\n    | Asttypes.Recursive when warn_unused ->\n      (* See typecore.ml for a description of the algorithm used\n           to detect unused declarations in a set of recursive definitions. *)\n      let slot = ref [] in\n      let td = Env.find_type (Path.Pident id) temp_env in\n      let name = Ident.name id in\n      Env.set_type_used_callback name td (fun old_callback ->\n          match !current_slot with\n          | Some slot -> slot := (name, td) :: !slot\n          | None ->\n            List.iter\n              (fun (name, d) -> Env.mark_type_used env name d)\n              (get_ref slot);\n            old_callback ());\n      (id, Some slot)\n    | Asttypes.Recursive | Asttypes.Nonrecursive -> (id, None)\n  in\n  let type_record_as_object = ref false in\n  let transl_declaration name_sdecl (id, slot) =\n    current_slot := slot;\n    Builtin_attributes.warning_scope name_sdecl.ptype_attributes (fun () ->\n        transl_declaration ~type_record_as_object temp_env name_sdecl id)\n  in\n  let tdecls =\n    List.map2 transl_declaration sdecl_list (List.map id_slots id_list)\n  in\n  let decls = List.map (fun tdecl -> (tdecl.typ_id, tdecl.typ_type)) tdecls in\n  let sdecl_list =\n    Variant_type_spread.expand_dummy_constructor_args sdecl_list decls\n  in\n  current_slot := None;\n  (* Check for duplicates *)\n  check_duplicates sdecl_list;\n  (* Build the final env. *)\n  let newenv =\n    List.fold_right\n      (fun (id, decl) env -> Env.add_type ~check:true id decl env)\n      decls env\n  in\n  (* Update stubs *)\n  (match rec_flag with\n  | Asttypes.Nonrecursive -> ()\n  | Asttypes.Recursive ->\n    List.iter2\n      (fun id sdecl -> update_type temp_env newenv id sdecl.ptype_loc)\n      id_list sdecl_list);\n  (* Generalize type declarations. *)\n  Ctype.end_def ();\n  List.iter (fun (_, decl) -> generalize_decl decl) decls;\n  (* Check for ill-formed abbrevs *)\n  let id_loc_list =\n    List.map2 (fun id sdecl -> (id, sdecl.ptype_loc)) id_list sdecl_list\n  in\n  List.iter\n    (fun (id, decl) ->\n      check_well_founded_manifest newenv\n        (List.assoc id id_loc_list)\n        (Path.Pident id) decl)\n    decls;\n  let to_check = function\n    | Path.Pident id -> List.mem_assoc id id_loc_list\n    | _ -> false\n  in\n  List.iter\n    (fun (id, decl) ->\n      check_well_founded_decl newenv\n        (List.assoc id id_loc_list)\n        (Path.Pident id) decl to_check)\n    decls;\n  List.iter (check_abbrev_recursion newenv id_loc_list to_check) tdecls;\n  (* Check that all type variables are closed *)\n  List.iter2\n    (fun sdecl tdecl ->\n      let decl = tdecl.typ_type in\n      match Ctype.closed_type_decl decl with\n      | Some ty -> raise (Error (sdecl.ptype_loc, Unbound_type_var (ty, decl)))\n      | None -> ())\n    sdecl_list tdecls;\n  (* Check that constraints are enforced *)\n  List.iter2 (check_constraints ~type_record_as_object newenv) sdecl_list decls;\n  (* Name recursion *)\n  let decls =\n    List.map2\n      (fun sdecl (id, decl) -> (id, name_recursion sdecl id decl))\n      sdecl_list decls\n  in\n  (* Add variances to the environment *)\n  let required =\n    List.map\n      (fun sdecl ->\n        (add_injectivity (List.map snd sdecl.ptype_params), sdecl.ptype_loc))\n      sdecl_list\n  in\n  let final_decls, final_env =\n    compute_properties_fixpoint env decls required\n      (List.map init_variance decls)\n      (List.map (fun _ -> false) decls)\n  in\n  (* Check re-exportation *)\n  List.iter2 (check_abbrev final_env) sdecl_list final_decls;\n  (* Keep original declaration *)\n  let final_decls =\n    List.map2\n      (fun tdecl (_id2, decl) -> {tdecl with typ_type = decl})\n      tdecls final_decls\n  in\n  (* Done *)\n  (final_decls, final_env)\n\n(* Translating type extensions *)\n\nlet transl_extension_constructor env type_path type_params typext_params priv\n    sext =\n  let id = Ident.create sext.pext_name.txt in\n  let args, ret_type, kind =\n    match sext.pext_kind with\n    | Pext_decl (sargs, sret_type) ->\n      let targs, tret_type, args, ret_type, _ =\n        make_constructor env type_path typext_params sargs sret_type\n      in\n      (args, ret_type, Text_decl (targs, tret_type))\n    | Pext_rebind lid ->\n      let cdescr = Typetexp.find_constructor env lid.loc lid.txt in\n      let usage =\n        if cdescr.cstr_private = Private || priv = Public then Env.Positive\n        else Env.Privatize\n      in\n      Env.mark_constructor usage env (Longident.last lid.txt) cdescr;\n      let args, cstr_res = Ctype.instance_constructor cdescr in\n      let res, ret_type =\n        if cdescr.cstr_generalized then\n          let params = Ctype.instance_list env type_params in\n          let res = Ctype.newconstr type_path params in\n          let ret_type = Some (Ctype.newconstr type_path params) in\n          (res, ret_type)\n        else (Ctype.newconstr type_path typext_params, None)\n      in\n      (try Ctype.unify env cstr_res res\n       with Ctype.Unify trace ->\n         raise (Error (lid.loc, Rebind_wrong_type (lid.txt, env, trace))));\n      (* Remove \"_\" names from parameters used in the constructor *)\n      (if not cdescr.cstr_generalized then\n         let vars = Ctype.free_variables (Btype.newgenty (Ttuple args)) in\n         List.iter\n           (function\n             | {desc = Tvar (Some \"_\")} as ty ->\n               if List.memq ty vars then ty.desc <- Tvar None\n             | _ -> ())\n           typext_params);\n      (* Ensure that constructor's type matches the type being extended *)\n      let cstr_type_path, cstr_type_params =\n        match cdescr.cstr_res.desc with\n        | Tconstr (p, _, _) ->\n          let decl = Env.find_type p env in\n          (p, decl.type_params)\n        | _ -> assert false\n      in\n      let cstr_types =\n        Btype.newgenty (Tconstr (cstr_type_path, cstr_type_params, ref Mnil))\n        :: cstr_type_params\n      in\n      let ext_types =\n        Btype.newgenty (Tconstr (type_path, type_params, ref Mnil))\n        :: type_params\n      in\n      if not (Ctype.equal env true cstr_types ext_types) then\n        raise\n          (Error (lid.loc, Rebind_mismatch (lid.txt, cstr_type_path, type_path)));\n      (* Disallow rebinding private constructors to non-private *)\n      (match (cdescr.cstr_private, priv) with\n      | Private, Public -> raise (Error (lid.loc, Rebind_private lid.txt))\n      | _ -> ());\n      let path =\n        match cdescr.cstr_tag with\n        | Cstr_extension (path, _) -> path\n        | _ -> assert false\n      in\n      let args =\n        match cdescr.cstr_inlined with\n        | None -> Types.Cstr_tuple args\n        | Some decl ->\n          let tl =\n            match args with\n            | [{desc = Tconstr (_, tl, _)}] -> tl\n            | _ -> assert false\n          in\n          let decl = Ctype.instance_declaration decl in\n          assert (List.length decl.type_params = List.length tl);\n          List.iter2 (Ctype.unify env) decl.type_params tl;\n          let lbls =\n            match decl.type_kind with\n            | Type_record (lbls, Record_extension) -> lbls\n            | _ -> assert false\n          in\n          Types.Cstr_record lbls\n      in\n      (args, ret_type, Text_rebind (path, lid))\n  in\n  let ext =\n    {\n      ext_type_path = type_path;\n      ext_type_params = typext_params;\n      ext_args = args;\n      ext_ret_type = ret_type;\n      ext_private = priv;\n      Types.ext_loc = sext.pext_loc;\n      Types.ext_attributes = sext.pext_attributes;\n    }\n  in\n  {\n    ext_id = id;\n    ext_name = sext.pext_name;\n    ext_type = ext;\n    ext_kind = kind;\n    Typedtree.ext_loc = sext.pext_loc;\n    Typedtree.ext_attributes = sext.pext_attributes;\n  }\n\nlet transl_extension_constructor env type_path type_params typext_params priv\n    sext =\n  Builtin_attributes.warning_scope sext.pext_attributes (fun () ->\n      transl_extension_constructor env type_path type_params typext_params priv\n        sext)\n\nlet transl_type_extension extend env loc styext =\n  reset_type_variables ();\n  Ctype.begin_def ();\n  let type_path, type_decl =\n    let lid = styext.ptyext_path in\n    Typetexp.find_type env lid.loc lid.txt\n  in\n  (match type_decl.type_kind with\n  | Type_open -> (\n    match type_decl.type_private with\n    | Private when extend -> (\n      match\n        List.find\n          (function\n            | {pext_kind = Pext_decl _} -> true\n            | {pext_kind = Pext_rebind _} -> false)\n          styext.ptyext_constructors\n      with\n      | {pext_loc} ->\n        raise (Error (pext_loc, Cannot_extend_private_type type_path))\n      | exception Not_found -> ())\n    | _ -> ())\n  | _ -> raise (Error (loc, Not_extensible_type type_path)));\n  let type_variance =\n    List.map\n      (fun v ->\n        let co, cn = Variance.get_upper v in\n        (not cn, not co, false))\n      type_decl.type_variance\n  in\n  let err =\n    if type_decl.type_arity <> List.length styext.ptyext_params then\n      [Includecore.Arity]\n    else if\n      List.for_all2\n        (fun (c1, n1, _) (c2, n2, _) -> ((not c2) || c1) && ((not n2) || n1))\n        type_variance\n        (add_injectivity (List.map snd styext.ptyext_params))\n    then []\n    else [Includecore.Variance]\n  in\n  if err <> [] then raise (Error (loc, Extension_mismatch (type_path, err)));\n  let ttype_params = make_params env styext.ptyext_params in\n  let type_params = List.map (fun (cty, _) -> cty.ctyp_type) ttype_params in\n  List.iter2 (Ctype.unify_var env)\n    (Ctype.instance_list env type_decl.type_params)\n    type_params;\n  let constructors =\n    List.map\n      (transl_extension_constructor env type_path type_decl.type_params\n         type_params styext.ptyext_private)\n      styext.ptyext_constructors\n  in\n  Ctype.end_def ();\n  (* Generalize types *)\n  List.iter Ctype.generalize type_params;\n  List.iter\n    (fun ext ->\n      Btype.iter_type_expr_cstr_args Ctype.generalize ext.ext_type.ext_args;\n      may Ctype.generalize ext.ext_type.ext_ret_type)\n    constructors;\n  (* Check that all type variables are closed *)\n  List.iter\n    (fun ext ->\n      match Ctype.closed_extension_constructor ext.ext_type with\n      | Some ty ->\n        raise (Error (ext.ext_loc, Unbound_type_var_ext (ty, ext.ext_type)))\n      | None -> ())\n    constructors;\n  (* Check variances are correct *)\n  List.iter\n    (fun ext ->\n      ignore\n        (compute_variance_extension env true type_decl ext.ext_type\n           (type_variance, loc)))\n    constructors;\n  (* Add extension constructors to the environment *)\n  let newenv =\n    List.fold_left\n      (fun env ext -> Env.add_extension ~check:true ext.ext_id ext.ext_type env)\n      env constructors\n  in\n  let tyext =\n    {\n      tyext_path = type_path;\n      tyext_txt = styext.ptyext_path;\n      tyext_params = ttype_params;\n      tyext_constructors = constructors;\n      tyext_private = styext.ptyext_private;\n      tyext_attributes = styext.ptyext_attributes;\n    }\n  in\n  (tyext, newenv)\n\nlet transl_type_extension extend env loc styext =\n  Builtin_attributes.warning_scope styext.ptyext_attributes (fun () ->\n      transl_type_extension extend env loc styext)\n\nlet transl_exception env sext =\n  reset_type_variables ();\n  Ctype.begin_def ();\n  let ext =\n    transl_extension_constructor env Predef.path_exn [] [] Asttypes.Public sext\n  in\n  Ctype.end_def ();\n  (* Generalize types *)\n  Btype.iter_type_expr_cstr_args Ctype.generalize ext.ext_type.ext_args;\n  may Ctype.generalize ext.ext_type.ext_ret_type;\n  (* Check that all type variables are closed *)\n  (match Ctype.closed_extension_constructor ext.ext_type with\n  | Some ty ->\n    raise (Error (ext.ext_loc, Unbound_type_var_ext (ty, ext.ext_type)))\n  | None -> ());\n  let newenv = Env.add_extension ~check:true ext.ext_id ext.ext_type env in\n  (ext, newenv)\n\nlet rec parse_native_repr_attributes env core_type ty =\n  match (core_type.ptyp_desc, (Ctype.repr ty).desc) with\n  | Ptyp_arrow (_, _, ct2), Tarrow (_, _, t2, _) ->\n    let repr_arg = Same_as_ocaml_repr in\n    let repr_args, repr_res = parse_native_repr_attributes env ct2 t2 in\n    (repr_arg :: repr_args, repr_res)\n  | Ptyp_arrow _, _ | _, Tarrow _ -> assert false\n  | _ -> ([], Same_as_ocaml_repr)\n\nlet parse_native_repr_attributes env core_type ty =\n  match (core_type.ptyp_desc, (Ctype.repr ty).desc) with\n  | ( Ptyp_constr\n        ({txt = Lident \"function$\"}, [{ptyp_desc = Ptyp_arrow (_, _, ct2)}; _]),\n      Tconstr\n        (Pident {name = \"function$\"}, [{desc = Tarrow (_, _, t2, _)}; _], _) )\n    ->\n    let repr_args, repr_res = parse_native_repr_attributes env ct2 t2 in\n    let native_repr_args = Same_as_ocaml_repr :: repr_args in\n    (native_repr_args, repr_res)\n  | _ -> parse_native_repr_attributes env core_type ty\n\n(* Translate a value declaration *)\nlet transl_value_decl env loc valdecl =\n  let cty = Typetexp.transl_type_scheme env valdecl.pval_type in\n  let ty = cty.ctyp_type in\n  let v =\n    match valdecl.pval_prim with\n    | [] when Env.is_in_signature env ->\n      {\n        val_type = ty;\n        val_kind = Val_reg;\n        Types.val_loc = loc;\n        val_attributes = valdecl.pval_attributes;\n      }\n    | [] -> raise (Error (valdecl.pval_loc, Val_in_structure))\n    | _ ->\n      let native_repr_args, native_repr_res =\n        let rec scann (attrs : Parsetree.attributes) =\n          match attrs with\n          | ( {txt = \"internal.arity\"; _},\n              PStr\n                [\n                  {\n                    pstr_desc =\n                      Pstr_eval\n                        ( ({pexp_desc = Pexp_constant (Pconst_integer (i, _))} :\n                            Parsetree.expression),\n                          _ );\n                  };\n                ] )\n            :: _ ->\n            Some (int_of_string i)\n          | _ :: rest -> scann rest\n          | [] -> None\n        and make n =\n          if n = 0 then [] else Primitive.Same_as_ocaml_repr :: make (n - 1)\n        in\n        match scann valdecl.pval_attributes with\n        | None -> parse_native_repr_attributes env valdecl.pval_type ty\n        | Some x -> (make x, Primitive.Same_as_ocaml_repr)\n      in\n      let prim =\n        Primitive.parse_declaration valdecl ~native_repr_args ~native_repr_res\n      in\n      let prim_native_name = prim.prim_native_name in\n      if\n        prim.prim_arity = 0\n        && (not\n              (String.length prim_native_name >= 20\n              && String.unsafe_get prim_native_name 0 = '\\132'\n              && String.unsafe_get prim_native_name 1 = '\\149'))\n        && (prim.prim_name = \"\"\n           || (prim.prim_name.[0] <> '%' && prim.prim_name.[0] <> '#'))\n      then raise (Error (valdecl.pval_type.ptyp_loc, Null_arity_external));\n      {\n        val_type = ty;\n        val_kind = Val_prim prim;\n        Types.val_loc = loc;\n        val_attributes = valdecl.pval_attributes;\n      }\n  in\n  let id, newenv =\n    Env.enter_value valdecl.pval_name.txt v env ~check:(fun s ->\n        Warnings.Unused_value_declaration s)\n  in\n  let desc =\n    {\n      val_id = id;\n      val_name = valdecl.pval_name;\n      val_desc = cty;\n      val_val = v;\n      val_prim = valdecl.pval_prim;\n      val_loc = valdecl.pval_loc;\n      val_attributes = valdecl.pval_attributes;\n    }\n  in\n  (desc, newenv)\n\nlet transl_value_decl env loc valdecl =\n  Builtin_attributes.warning_scope valdecl.pval_attributes (fun () ->\n      transl_value_decl env loc valdecl)\n\n(* Translate a \"with\" constraint -- much simplified version of\n    transl_type_decl. *)\nlet transl_with_constraint env id row_path orig_decl sdecl =\n  Env.mark_type_used env (Ident.name id) orig_decl;\n  reset_type_variables ();\n  Ctype.begin_def ();\n  let tparams = make_params env sdecl.ptype_params in\n  let params = List.map (fun (cty, _) -> cty.ctyp_type) tparams in\n  let orig_decl = Ctype.instance_declaration orig_decl in\n  let arity_ok = List.length params = orig_decl.type_arity in\n  if arity_ok then List.iter2 (Ctype.unify_var env) params orig_decl.type_params;\n  let constraints =\n    List.map\n      (function\n        | ty, ty', loc -> (\n          try\n            let cty = transl_simple_type env false ty in\n            let cty' = transl_simple_type env false ty' in\n            let ty = cty.ctyp_type in\n            let ty' = cty'.ctyp_type in\n            Ctype.unify env ty ty';\n            (cty, cty', loc)\n          with Ctype.Unify tr ->\n            raise (Error (loc, Inconsistent_constraint (env, tr)))))\n      sdecl.ptype_cstrs\n  in\n  let no_row = not (is_fixed_type sdecl) in\n  let tman, man =\n    match sdecl.ptype_manifest with\n    | None -> (None, None)\n    | Some sty ->\n      let cty = transl_simple_type env no_row sty in\n      (Some cty, Some cty.ctyp_type)\n  in\n  let priv =\n    if sdecl.ptype_private = Private then Private\n    else if arity_ok && orig_decl.type_kind <> Type_abstract then\n      orig_decl.type_private\n    else sdecl.ptype_private\n  in\n  if\n    arity_ok\n    && orig_decl.type_kind <> Type_abstract\n    && sdecl.ptype_private = Private\n  then Location.deprecated sdecl.ptype_loc \"spurious use of private\";\n  let type_kind, type_unboxed =\n    if arity_ok && man <> None then (orig_decl.type_kind, orig_decl.type_unboxed)\n    else (Type_abstract, unboxed_false_default_false)\n  in\n  let decl =\n    {\n      type_params = params;\n      type_arity = List.length params;\n      type_kind;\n      type_private = priv;\n      type_manifest = man;\n      type_variance = [];\n      type_newtype_level = None;\n      type_loc = sdecl.ptype_loc;\n      type_attributes = sdecl.ptype_attributes;\n      type_immediate = false;\n      type_unboxed;\n    }\n  in\n  (match row_path with\n  | None -> ()\n  | Some p -> set_fixed_row env sdecl.ptype_loc p decl);\n  (match Ctype.closed_type_decl decl with\n  | None -> ()\n  | Some ty -> raise (Error (sdecl.ptype_loc, Unbound_type_var (ty, decl))));\n  let decl = name_recursion sdecl id decl in\n  let type_variance =\n    compute_variance_decl env true decl\n      (add_injectivity (List.map snd sdecl.ptype_params), sdecl.ptype_loc)\n  in\n  let type_immediate = compute_immediacy env decl in\n  let decl = {decl with type_variance; type_immediate} in\n  Ctype.end_def ();\n  generalize_decl decl;\n  {\n    typ_id = id;\n    typ_name = sdecl.ptype_name;\n    typ_params = tparams;\n    typ_type = decl;\n    typ_cstrs = constraints;\n    typ_loc = sdecl.ptype_loc;\n    typ_manifest = tman;\n    typ_kind = Ttype_abstract;\n    typ_private = sdecl.ptype_private;\n    typ_attributes = sdecl.ptype_attributes;\n  }\n\n(* Approximate a type declaration: just make all types abstract *)\n\nlet abstract_type_decl arity =\n  let rec make_params n =\n    if n <= 0 then [] else Ctype.newvar () :: make_params (n - 1)\n  in\n  Ctype.begin_def ();\n  let decl =\n    {\n      type_params = make_params arity;\n      type_arity = arity;\n      type_kind = Type_abstract;\n      type_private = Public;\n      type_manifest = None;\n      type_variance = replicate_list Variance.full arity;\n      type_newtype_level = None;\n      type_loc = Location.none;\n      type_attributes = [];\n      type_immediate = false;\n      type_unboxed = unboxed_false_default_false;\n    }\n  in\n  Ctype.end_def ();\n  generalize_decl decl;\n  decl\n\nlet approx_type_decl sdecl_list =\n  List.map\n    (fun sdecl ->\n      ( Ident.create sdecl.ptype_name.txt,\n        abstract_type_decl (List.length sdecl.ptype_params) ))\n    sdecl_list\n\n(* Variant of check_abbrev_recursion to check the well-formedness\n   conditions on type abbreviations defined within recursive modules. *)\n\nlet check_recmod_typedecl env loc recmod_ids path decl =\n  (* recmod_ids is the list of recursively-defined module idents.\n     (path, decl) is the type declaration to be checked. *)\n  let to_check path = List.exists (fun id -> Path.isfree id path) recmod_ids in\n  check_well_founded_decl env loc path decl to_check;\n  check_recursion env loc path decl to_check\n\n(**** Error report ****)\n\nopen Format\n\nlet explain_unbound_gen ppf tv tl typ kwd pr =\n  try\n    let ti = List.find (fun ti -> Ctype.deep_occur tv (typ ti)) tl in\n    let ty0 =\n      (* Hack to force aliasing when needed *)\n      Btype.newgenty (Tobject (tv, ref None))\n    in\n    Printtyp.reset_and_mark_loops_list [typ ti; ty0];\n    fprintf ppf \".@.@[<hov2>In %s@ %a@;<1 -2>the variable %a is unbound@]\" kwd\n      pr ti Printtyp.type_expr tv\n  with Not_found -> ()\n\nlet explain_unbound ppf tv tl typ kwd lab =\n  explain_unbound_gen ppf tv tl typ kwd (fun ppf ti ->\n      fprintf ppf \"%s%a\" (lab ti) Printtyp.type_expr (typ ti))\n\nlet explain_unbound_single ppf tv ty =\n  let trivial ty =\n    explain_unbound ppf tv [ty] (fun t -> t) \"type\" (fun _ -> \"\")\n  in\n  match (Ctype.repr ty).desc with\n  | Tobject (fi, _) ->\n    let tl, rv = Ctype.flatten_fields fi in\n    if rv == tv then trivial ty\n    else\n      explain_unbound ppf tv tl\n        (fun (_, _, t) -> t)\n        \"method\"\n        (fun (lab, _, _) -> lab ^ \": \")\n  | Tvariant row ->\n    let row = Btype.row_repr row in\n    if row.row_more == tv then trivial ty\n    else\n      explain_unbound ppf tv row.row_fields\n        (fun (_l, f) ->\n          match Btype.row_field_repr f with\n          | Rpresent (Some t) -> t\n          | Reither (_, [t], _, _) -> t\n          | Reither (_, tl, _, _) -> Btype.newgenty (Ttuple tl)\n          | _ -> Btype.newgenty (Ttuple []))\n        \"case\"\n        (fun (lab, _) -> \"`\" ^ lab ^ \" of \")\n  | _ -> trivial ty\n\nlet tys_of_constr_args = function\n  | Types.Cstr_tuple tl -> tl\n  | Types.Cstr_record lbls -> List.map (fun l -> l.Types.ld_type) lbls\n\nlet report_error ppf = function\n  | Repeated_parameter -> fprintf ppf \"A type parameter occurs several times\"\n  | Duplicate_constructor s -> fprintf ppf \"Two constructors are named %s\" s\n  | Duplicate_label (s, None) ->\n    fprintf ppf\n      \"The field @{<info>%s@} is defined several times in this record. Fields \\\n       can only be added once to a record.\"\n      s\n  | Duplicate_label (s, Some record_name) ->\n    fprintf ppf\n      \"The field @{<info>%s@} is defined several times in the record \\\n       @{<info>%s@}. Fields can only be added once to a record.\"\n      s record_name\n  | Recursive_abbrev s -> fprintf ppf \"The type abbreviation %s is cyclic\" s\n  | Cycle_in_def (s, ty) ->\n    Printtyp.reset_and_mark_loops ty;\n    fprintf ppf \"@[<v>The definition of %s contains a cycle:@ %a@]\" s\n      Printtyp.type_expr ty\n  | Definition_mismatch (ty, errs) ->\n    Printtyp.reset_and_mark_loops ty;\n    fprintf ppf \"@[<v>@[<hov>%s@ %s@;<1 2>%a@]%a@]\"\n      \"This variant or record definition\" \"does not match that of type\"\n      Printtyp.type_expr ty\n      (Includecore.report_type_mismatch \"the original\" \"this\" \"definition\")\n      errs\n  | Constraint_failed (ty, ty') ->\n    Printtyp.reset_and_mark_loops ty;\n    Printtyp.mark_loops ty';\n    fprintf ppf \"@[%s@ @[<hv>Type@ %a@ should be an instance of@ %a@]@]\"\n      \"Constraints are not satisfied in this type.\" Printtyp.type_expr ty\n      Printtyp.type_expr ty'\n  | Parameters_differ (path, ty, ty') ->\n    Printtyp.reset_and_mark_loops ty;\n    Printtyp.mark_loops ty';\n    fprintf ppf \"@[<hv>In the definition of %s, type@ %a@ should be@ %a@]\"\n      (Path.name path) Printtyp.type_expr ty Printtyp.type_expr ty'\n  | Inconsistent_constraint (env, trace) ->\n    fprintf ppf \"The type constraints are not consistent.@.\";\n    Printtyp.report_unification_error ppf env trace\n      (fun ppf -> fprintf ppf \"Type\")\n      (fun ppf -> fprintf ppf \"is not compatible with type\")\n  | Type_clash (env, trace) ->\n    Printtyp.report_unification_error ppf env trace\n      (function\n        | ppf -> fprintf ppf \"This type constructor expands to type\")\n      (function ppf -> fprintf ppf \"but is used here with type\")\n  | Null_arity_external -> fprintf ppf \"External identifiers must be functions\"\n  | Unbound_type_var (ty, decl) -> (\n    fprintf ppf \"A type variable is unbound in this type declaration\";\n    let ty = Ctype.repr ty in\n    match (decl.type_kind, decl.type_manifest) with\n    | Type_variant tl, _ ->\n      explain_unbound_gen ppf ty tl\n        (fun c ->\n          let tl = tys_of_constr_args c.Types.cd_args in\n          Btype.newgenty (Ttuple tl))\n        \"case\"\n        (fun ppf c ->\n          fprintf ppf \"%s of %a\" (Ident.name c.Types.cd_id)\n            Printtyp.constructor_arguments c.Types.cd_args)\n    | Type_record (tl, _), _ ->\n      explain_unbound ppf ty tl\n        (fun l -> l.Types.ld_type)\n        \"field\"\n        (fun l -> Ident.name l.Types.ld_id ^ \": \")\n    | Type_abstract, Some ty' -> explain_unbound_single ppf ty ty'\n    | _ -> ())\n  | Unbound_type_var_ext (ty, ext) ->\n    fprintf ppf \"A type variable is unbound in this extension constructor\";\n    let args = tys_of_constr_args ext.ext_args in\n    explain_unbound ppf ty args (fun c -> c) \"type\" (fun _ -> \"\")\n  | Cannot_extend_private_type path ->\n    fprintf ppf \"@[%s@ %a@]\" \"Cannot extend private type definition\"\n      Printtyp.path path\n  | Not_extensible_type path ->\n    fprintf ppf \"@[%s@ %a@ %s@]\" \"Type definition\" Printtyp.path path\n      \"is not extensible\"\n  | Extension_mismatch (path, errs) ->\n    fprintf ppf \"@[<v>@[<hov>%s@ %s@;<1 2>%s@]%a@]\" \"This extension\"\n      \"does not match the definition of type\" (Path.name path)\n      (Includecore.report_type_mismatch \"the type\" \"this extension\" \"definition\")\n      errs\n  | Rebind_wrong_type (lid, env, trace) ->\n    Printtyp.report_unification_error ppf env trace\n      (function\n        | ppf ->\n          fprintf ppf \"The constructor %a@ has type\" Printtyp.longident lid)\n      (function ppf -> fprintf ppf \"but was expected to be of type\")\n  | Rebind_mismatch (lid, p, p') ->\n    fprintf ppf \"@[%s@ %a@ %s@ %s@ %s@ %s@ %s@]\" \"The constructor\"\n      Printtyp.longident lid \"extends type\" (Path.name p)\n      \"whose declaration does not match\" \"the declaration of type\"\n      (Path.name p')\n  | Rebind_private lid ->\n    fprintf ppf \"@[%s@ %a@ %s@]\" \"The constructor\" Printtyp.longident lid\n      \"is private\"\n  | Bad_variance (n, v1, v2) ->\n    let variance (p, n, i) =\n      let inj = if i then \"injective \" else \"\" in\n      match (p, n) with\n      | true, true -> inj ^ \"invariant\"\n      | true, false -> inj ^ \"covariant\"\n      | false, true -> inj ^ \"contravariant\"\n      | false, false -> if inj = \"\" then \"unrestricted\" else inj\n    in\n    let suffix n =\n      let teen = n mod 100 / 10 = 1 in\n      match n mod 10 with\n      | 1 when not teen -> \"st\"\n      | 2 when not teen -> \"nd\"\n      | 3 when not teen -> \"rd\"\n      | _ -> \"th\"\n    in\n    if n = -1 then\n      fprintf ppf \"@[%s@ %s@ It\"\n        \"In this definition, a type variable has a variance that\"\n        \"is not reflected by its occurrence in type parameters.\"\n    else if n = -2 then\n      fprintf ppf \"@[%s@ %s@]\"\n        \"In this definition, a type variable cannot be deduced\"\n        \"from the type parameters.\"\n    else if n = -3 then\n      fprintf ppf \"@[%s@ %s@ It\"\n        \"In this definition, a type variable has a variance that\"\n        \"cannot be deduced from the type parameters.\"\n    else\n      fprintf ppf \"@[%s@ %s@ The %d%s type parameter\"\n        \"In this definition, expected parameter\" \"variances are not satisfied.\"\n        n (suffix n);\n    if n <> -2 then\n      fprintf ppf \" was expected to be %s,@ but it is %s.@]\" (variance v2)\n        (variance v1)\n  | Unavailable_type_constructor p ->\n    fprintf ppf \"The definition of type %a@ is unavailable\" Printtyp.path p\n  | Bad_fixed_type r -> fprintf ppf \"This fixed type %s\" r\n  | Varying_anonymous ->\n    fprintf ppf \"@[%s@ %s@ %s@]\" \"In this GADT definition,\"\n      \"the variance of some parameter\" \"cannot be checked\"\n  | Val_in_structure ->\n    fprintf ppf \"Value declarations are only allowed in signatures\"\n  | Bad_immediate_attribute ->\n    fprintf ppf \"@[%s@ %s@]\" \"Types marked with the immediate attribute must be\"\n      \"non-pointer types like int or bool\"\n  | Bad_unboxed_attribute msg ->\n    fprintf ppf \"@[This type cannot be unboxed because@ %s.@]\" msg\n  | Boxed_and_unboxed ->\n    fprintf ppf \"@[A type cannot be boxed and unboxed at the same time.@]\"\n  | Nonrec_gadt ->\n    fprintf ppf \"@[GADT case syntax cannot be used in a 'nonrec' block.@]\"\n  | Variant_runtime_representation_mismatch\n      (Variant_coercion.VariantError\n         {\n           is_spread_context;\n           error = Variant_coercion.Untagged {left_is_unboxed};\n         }) ->\n    let other_variant_text =\n      if is_spread_context then \"the variant where this is spread\"\n      else \"the other variant\"\n    in\n    fprintf ppf \"@[%s.@]\"\n      (\"This variant is \"\n      ^ (if left_is_unboxed then \"unboxed\" else \"not unboxed\")\n      ^ \", but \" ^ other_variant_text\n      ^ \" is not. Both variants unboxed configuration must match\")\n  | Variant_runtime_representation_mismatch\n      (Variant_coercion.VariantError\n         {is_spread_context; error = Variant_coercion.TagName _}) ->\n    let other_variant_text =\n      if is_spread_context then \"the variant where this is spread\"\n      else \"the other variant\"\n    in\n    fprintf ppf \"@[%s.@]\"\n      (\"The @tag attribute does not match for this variant and \"\n     ^ other_variant_text\n     ^ \". Both variants must have the same @tag attribute configuration, or no \\\n        @tag attribute at all\")\n  | Variant_spread_fail Variant_type_spread.CouldNotFindType ->\n    fprintf ppf\n      \"@[This type could not be found. It's only possible to spread variants \\\n       that are known as the spread happens. This means for example that you \\\n       can't spread variants in recursive definitions.@]\"\n  | Variant_spread_fail Variant_type_spread.HasTypeParams ->\n    fprintf ppf \"@[Type parameters are not supported in variant type spreads.@]\"\n  | Variant_spread_fail\n      (Variant_type_spread.DuplicateConstructor\n         {variant_with_overlapping_constructor; overlapping_constructor_name})\n    ->\n    fprintf ppf\n      \"@[Variant %s has a constructor named %s, but a constructor named %s \\\n       already exists in the variant it's spread into.@ You cannot spread \\\n       variants with overlapping constructors.@]\"\n      variant_with_overlapping_constructor overlapping_constructor_name\n      overlapping_constructor_name\n\nlet () =\n  Location.register_error_of_exn (function\n    | Error (loc, err) -> Some (Location.error_of_printer loc report_error err)\n    | _ -> None)\n"
  },
  {
    "path": "analysis/vendor/ml/typedecl.mli",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(* Typing of type definitions and primitive definitions *)\n\nopen Types\nopen Format\n\nval transl_type_decl :\n  Env.t ->\n  Asttypes.rec_flag ->\n  Parsetree.type_declaration list ->\n  Typedtree.type_declaration list * Env.t\n\nval transl_exception :\n  Env.t ->\n  Parsetree.extension_constructor ->\n  Typedtree.extension_constructor * Env.t\n\nval transl_type_extension :\n  bool ->\n  Env.t ->\n  Location.t ->\n  Parsetree.type_extension ->\n  Typedtree.type_extension * Env.t\n\nval transl_value_decl :\n  Env.t ->\n  Location.t ->\n  Parsetree.value_description ->\n  Typedtree.value_description * Env.t\n\nval transl_with_constraint :\n  Env.t ->\n  Ident.t ->\n  Path.t option ->\n  Types.type_declaration ->\n  Parsetree.type_declaration ->\n  Typedtree.type_declaration\n\nval abstract_type_decl : int -> type_declaration\nval approx_type_decl :\n  Parsetree.type_declaration list -> (Ident.t * type_declaration) list\nval check_recmod_typedecl :\n  Env.t -> Location.t -> Ident.t list -> Path.t -> type_declaration -> unit\nval check_coherence : Env.t -> Location.t -> Ident.t -> type_declaration -> unit\n\n(* for fixed types *)\nval is_fixed_type : Parsetree.type_declaration -> bool\n\n(* for typeclass.ml *)\nval compute_variance_decls :\n  Env.t ->\n  (Ident.t\n  * Types.type_declaration\n  * Types.type_declaration\n  * Types.class_declaration\n  * Types.class_type_declaration\n  * 'a Typedtree.class_infos)\n  list ->\n  (Types.type_declaration\n  * Types.type_declaration\n  * Types.class_declaration\n  * Types.class_type_declaration)\n  list\n\n(* for typeopt.ml *)\nval get_unboxed_type_representation : Env.t -> type_expr -> type_expr option\n\ntype native_repr_kind = Unboxed | Untagged\n\ntype error\n\nexception Error of Location.t * error\n\nval report_error : formatter -> error -> unit\n"
  },
  {
    "path": "analysis/vendor/ml/typedtree.ml",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(* Abstract syntax tree after typing *)\n\nopen Misc\nopen Asttypes\nopen Types\n\n(* Value expressions for the core language *)\n\ntype partial = Partial | Total\n\ntype attribute = Parsetree.attribute\ntype attributes = attribute list\n\ntype pattern = {\n  pat_desc: pattern_desc;\n  pat_loc: Location.t;\n  pat_extra: (pat_extra * Location.t * attribute list) list;\n  pat_type: type_expr;\n  mutable pat_env: Env.t;\n  pat_attributes: attribute list;\n}\n\nand pat_extra =\n  | Tpat_constraint of core_type\n  | Tpat_type of Path.t * Longident.t loc\n  | Tpat_open of Path.t * Longident.t loc * Env.t\n  | Tpat_unpack\n\nand pattern_desc =\n  | Tpat_any\n  | Tpat_var of Ident.t * string loc\n  | Tpat_alias of pattern * Ident.t * string loc\n  | Tpat_constant of constant\n  | Tpat_tuple of pattern list\n  | Tpat_construct of Longident.t loc * constructor_description * pattern list\n  | Tpat_variant of label * pattern option * row_desc ref\n  | Tpat_record of\n      (Longident.t loc * label_description * pattern) list * closed_flag\n  | Tpat_array of pattern list\n  | Tpat_or of pattern * pattern * row_desc option\n  | Tpat_lazy of pattern\n\nand expression = {\n  exp_desc: expression_desc;\n  exp_loc: Location.t;\n  exp_extra: (exp_extra * Location.t * attribute list) list;\n  exp_type: type_expr;\n  exp_env: Env.t;\n  exp_attributes: attribute list;\n}\n\nand exp_extra =\n  | Texp_constraint of core_type\n  | Texp_coerce of core_type option * core_type\n  | Texp_open of override_flag * Path.t * Longident.t loc * Env.t\n  | Texp_poly of core_type option\n  | Texp_newtype of string\n\nand expression_desc =\n  | Texp_ident of Path.t * Longident.t loc * Types.value_description\n  | Texp_constant of constant\n  | Texp_let of rec_flag * value_binding list * expression\n  | Texp_function of {\n      arg_label: arg_label;\n      param: Ident.t;\n      cases: case list;\n      partial: partial;\n    }\n  | Texp_apply of expression * (arg_label * expression option) list\n  | Texp_match of expression * case list * case list * partial\n  | Texp_try of expression * case list\n  | Texp_tuple of expression list\n  | Texp_construct of\n      Longident.t loc * constructor_description * expression list\n  | Texp_variant of label * expression option\n  | Texp_record of {\n      fields: (Types.label_description * record_label_definition) array;\n      representation: Types.record_representation;\n      extended_expression: expression option;\n    }\n  | Texp_field of expression * Longident.t loc * label_description\n  | Texp_setfield of\n      expression * Longident.t loc * label_description * expression\n  | Texp_array of expression list\n  | Texp_ifthenelse of expression * expression * expression option\n  | Texp_sequence of expression * expression\n  | Texp_while of expression * expression\n  | Texp_for of\n      Ident.t\n      * Parsetree.pattern\n      * expression\n      * expression\n      * direction_flag\n      * expression\n  | Texp_send of expression * meth * expression option\n  | Texp_new of unit\n  | Texp_instvar of unit\n  | Texp_setinstvar of unit\n  | Texp_override of unit\n  | Texp_letmodule of Ident.t * string loc * module_expr * expression\n  | Texp_letexception of extension_constructor * expression\n  | Texp_assert of expression\n  | Texp_lazy of expression\n  | Texp_object of unit\n  | Texp_pack of module_expr\n  | Texp_unreachable\n  | Texp_extension_constructor of Longident.t loc * Path.t\n\nand meth = Tmeth_name of string\n\nand case = {c_lhs: pattern; c_guard: expression option; c_rhs: expression}\n\nand record_label_definition =\n  | Kept of Types.type_expr\n  | Overridden of Longident.t loc * expression\n\n(* Value expressions for the class language *)\n\n(* Value expressions for the module language *)\nand module_expr = {\n  mod_desc: module_expr_desc;\n  mod_loc: Location.t;\n  mod_type: Types.module_type;\n  mod_env: Env.t;\n  mod_attributes: attribute list;\n}\n\nand module_type_constraint =\n  | Tmodtype_implicit\n  | Tmodtype_explicit of module_type\n\nand module_expr_desc =\n  | Tmod_ident of Path.t * Longident.t loc\n  | Tmod_structure of structure\n  | Tmod_functor of Ident.t * string loc * module_type option * module_expr\n  | Tmod_apply of module_expr * module_expr * module_coercion\n  | Tmod_constraint of\n      module_expr * Types.module_type * module_type_constraint * module_coercion\n  | Tmod_unpack of expression * Types.module_type\n\nand structure = {\n  str_items: structure_item list;\n  str_type: Types.signature;\n  str_final_env: Env.t;\n}\n\nand structure_item = {\n  str_desc: structure_item_desc;\n  str_loc: Location.t;\n  str_env: Env.t;\n}\n\nand structure_item_desc =\n  | Tstr_eval of expression * attributes\n  | Tstr_value of rec_flag * value_binding list\n  | Tstr_primitive of value_description\n  | Tstr_type of rec_flag * type_declaration list\n  | Tstr_typext of type_extension\n  | Tstr_exception of extension_constructor\n  | Tstr_module of module_binding\n  | Tstr_recmodule of module_binding list\n  | Tstr_modtype of module_type_declaration\n  | Tstr_open of open_description\n  | Tstr_class of unit\n  | Tstr_class_type of (Ident.t * string loc * class_type_declaration) list\n  | Tstr_include of include_declaration\n  | Tstr_attribute of attribute\n\nand module_binding = {\n  mb_id: Ident.t;\n  mb_name: string loc;\n  mb_expr: module_expr;\n  mb_attributes: attribute list;\n  mb_loc: Location.t;\n}\n\nand value_binding = {\n  vb_pat: pattern;\n  vb_expr: expression;\n  vb_attributes: attributes;\n  vb_loc: Location.t;\n}\n\nand module_coercion =\n  | Tcoerce_none\n  | Tcoerce_structure of\n      (int * module_coercion) list\n      * (Ident.t * int * module_coercion) list\n      * string list (* runtime fields *)\n  | Tcoerce_functor of module_coercion * module_coercion\n  | Tcoerce_primitive of primitive_coercion\n  | Tcoerce_alias of Path.t * module_coercion\n\nand module_type = {\n  mty_desc: module_type_desc;\n  mty_type: Types.module_type;\n  mty_env: Env.t;\n  mty_loc: Location.t;\n  mty_attributes: attribute list;\n}\n\nand module_type_desc =\n  | Tmty_ident of Path.t * Longident.t loc\n  | Tmty_signature of signature\n  | Tmty_functor of Ident.t * string loc * module_type option * module_type\n  | Tmty_with of module_type * (Path.t * Longident.t loc * with_constraint) list\n  | Tmty_typeof of module_expr\n  | Tmty_alias of Path.t * Longident.t loc\n\n(* Keep primitive type information for type-based lambda-code specialization *)\nand primitive_coercion = {\n  pc_desc: Primitive.description;\n  pc_type: type_expr;\n  pc_env: Env.t;\n  pc_loc: Location.t;\n  pc_id: Ident.t; (*RE:Added *)\n}\n\nand signature = {\n  sig_items: signature_item list;\n  sig_type: Types.signature;\n  sig_final_env: Env.t;\n}\n\nand signature_item = {\n  sig_desc: signature_item_desc;\n  sig_env: Env.t; (* BINANNOT ADDED *)\n  sig_loc: Location.t;\n}\n\nand signature_item_desc =\n  | Tsig_value of value_description\n  | Tsig_type of rec_flag * type_declaration list\n  | Tsig_typext of type_extension\n  | Tsig_exception of extension_constructor\n  | Tsig_module of module_declaration\n  | Tsig_recmodule of module_declaration list\n  | Tsig_modtype of module_type_declaration\n  | Tsig_open of open_description\n  | Tsig_include of include_description\n  | Tsig_class of unit\n  | Tsig_class_type of class_type_declaration list\n  | Tsig_attribute of attribute\n\nand module_declaration = {\n  md_id: Ident.t;\n  md_name: string loc;\n  md_type: module_type;\n  md_attributes: attribute list;\n  md_loc: Location.t;\n}\n\nand module_type_declaration = {\n  mtd_id: Ident.t;\n  mtd_name: string loc;\n  mtd_type: module_type option;\n  mtd_attributes: attribute list;\n  mtd_loc: Location.t;\n}\n\nand open_description = {\n  open_path: Path.t;\n  open_txt: Longident.t loc;\n  open_override: override_flag;\n  open_loc: Location.t;\n  open_attributes: attribute list;\n}\n\nand 'a include_infos = {\n  incl_mod: 'a;\n  incl_type: Types.signature;\n  incl_loc: Location.t;\n  incl_attributes: attribute list;\n}\n\nand include_description = module_type include_infos\n\nand include_declaration = module_expr include_infos\n\nand with_constraint =\n  | Twith_type of type_declaration\n  | Twith_module of Path.t * Longident.t loc\n  | Twith_typesubst of type_declaration\n  | Twith_modsubst of Path.t * Longident.t loc\n\nand core_type = {\n  (* mutable because of [Typeclass.declare_method] *)\n  mutable ctyp_desc: core_type_desc;\n  mutable ctyp_type: type_expr;\n  ctyp_env: Env.t; (* BINANNOT ADDED *)\n  ctyp_loc: Location.t;\n  ctyp_attributes: attribute list;\n}\n\nand core_type_desc =\n  | Ttyp_any\n  | Ttyp_var of string\n  | Ttyp_arrow of arg_label * core_type * core_type\n  | Ttyp_tuple of core_type list\n  | Ttyp_constr of Path.t * Longident.t loc * core_type list\n  | Ttyp_object of object_field list * closed_flag\n  | Ttyp_class of Path.t * Longident.t loc * core_type list\n  | Ttyp_alias of core_type * string\n  | Ttyp_variant of row_field list * closed_flag * label list option\n  | Ttyp_poly of string list * core_type\n  | Ttyp_package of package_type\n\nand package_type = {\n  pack_path: Path.t;\n  pack_fields: (Longident.t loc * core_type) list;\n  pack_type: Types.module_type;\n  pack_txt: Longident.t loc;\n}\n\nand row_field =\n  | Ttag of string loc * attributes * bool * core_type list\n  | Tinherit of core_type\n\nand object_field =\n  | OTtag of string loc * attributes * core_type\n  | OTinherit of core_type\n\nand value_description = {\n  val_id: Ident.t;\n  val_name: string loc;\n  val_desc: core_type;\n  val_val: Types.value_description;\n  val_prim: string list;\n  val_loc: Location.t;\n  val_attributes: attribute list;\n}\n\nand type_declaration = {\n  typ_id: Ident.t;\n  typ_name: string loc;\n  typ_params: (core_type * variance) list;\n  typ_type: Types.type_declaration;\n  typ_cstrs: (core_type * core_type * Location.t) list;\n  typ_kind: type_kind;\n  typ_private: private_flag;\n  typ_manifest: core_type option;\n  typ_loc: Location.t;\n  typ_attributes: attribute list;\n}\n\nand type_kind =\n  | Ttype_abstract\n  | Ttype_variant of constructor_declaration list\n  | Ttype_record of label_declaration list\n  | Ttype_open\n\nand label_declaration = {\n  ld_id: Ident.t;\n  ld_name: string loc;\n  ld_mutable: mutable_flag;\n  ld_type: core_type;\n  ld_loc: Location.t;\n  ld_attributes: attribute list;\n}\n\nand constructor_declaration = {\n  cd_id: Ident.t;\n  cd_name: string loc;\n  cd_args: constructor_arguments;\n  cd_res: core_type option;\n  cd_loc: Location.t;\n  cd_attributes: attribute list;\n}\n\nand constructor_arguments =\n  | Cstr_tuple of core_type list\n  | Cstr_record of label_declaration list\n\nand type_extension = {\n  tyext_path: Path.t;\n  tyext_txt: Longident.t loc;\n  tyext_params: (core_type * variance) list;\n  tyext_constructors: extension_constructor list;\n  tyext_private: private_flag;\n  tyext_attributes: attribute list;\n}\n\nand extension_constructor = {\n  ext_id: Ident.t;\n  ext_name: string loc;\n  ext_type: Types.extension_constructor;\n  ext_kind: extension_constructor_kind;\n  ext_loc: Location.t;\n  ext_attributes: attribute list;\n}\n\nand extension_constructor_kind =\n  | Text_decl of constructor_arguments * core_type option\n  | Text_rebind of Path.t * Longident.t loc\n\nand class_type = {\n  cltyp_desc: class_type_desc;\n  cltyp_type: Types.class_type;\n  cltyp_env: Env.t;\n  cltyp_loc: Location.t;\n  cltyp_attributes: attribute list;\n}\n\nand class_type_desc =\n  | Tcty_constr of Path.t * Longident.t loc * core_type list\n  | Tcty_signature of class_signature\n  | Tcty_arrow of arg_label * core_type * class_type\n  | Tcty_open of override_flag * Path.t * Longident.t loc * Env.t * class_type\n\nand class_signature = {\n  csig_self: core_type;\n  csig_fields: class_type_field list;\n  csig_type: Types.class_signature;\n}\n\nand class_type_field = {\n  ctf_desc: class_type_field_desc;\n  ctf_loc: Location.t;\n  ctf_attributes: attribute list;\n}\n\nand class_type_field_desc =\n  | Tctf_inherit of class_type\n  | Tctf_val of (string * mutable_flag * virtual_flag * core_type)\n  | Tctf_method of (string * private_flag * virtual_flag * core_type)\n  | Tctf_constraint of (core_type * core_type)\n  | Tctf_attribute of attribute\n\nand class_description = class_type class_infos\n\nand class_type_declaration = class_type class_infos\n\nand 'a class_infos = {\n  ci_virt: virtual_flag;\n  ci_params: (core_type * variance) list;\n  ci_id_name: string loc;\n  ci_id_class: Ident.t;\n  ci_id_class_type: Ident.t;\n  ci_id_object: Ident.t;\n  ci_id_typehash: Ident.t;\n  ci_expr: 'a;\n  ci_decl: Types.class_declaration;\n  ci_type_decl: Types.class_type_declaration;\n  ci_loc: Location.t;\n  ci_attributes: attribute list;\n}\n\n(* Auxiliary functions over the a.s.t. *)\n\nlet iter_pattern_desc f = function\n  | Tpat_alias (p, _, _) -> f p\n  | Tpat_tuple patl -> List.iter f patl\n  | Tpat_construct (_, _, patl) -> List.iter f patl\n  | Tpat_variant (_, pat, _) -> may f pat\n  | Tpat_record (lbl_pat_list, _) ->\n    List.iter (fun (_, _, pat) -> f pat) lbl_pat_list\n  | Tpat_array patl -> List.iter f patl\n  | Tpat_or (p1, p2, _) ->\n    f p1;\n    f p2\n  | Tpat_lazy p -> f p\n  | Tpat_any | Tpat_var _ | Tpat_constant _ -> ()\n\nlet map_pattern_desc f d =\n  match d with\n  | Tpat_alias (p1, id, s) -> Tpat_alias (f p1, id, s)\n  | Tpat_tuple pats -> Tpat_tuple (List.map f pats)\n  | Tpat_record (lpats, closed) ->\n    Tpat_record (List.map (fun (lid, l, p) -> (lid, l, f p)) lpats, closed)\n  | Tpat_construct (lid, c, pats) -> Tpat_construct (lid, c, List.map f pats)\n  | Tpat_array pats -> Tpat_array (List.map f pats)\n  | Tpat_lazy p1 -> Tpat_lazy (f p1)\n  | Tpat_variant (x1, Some p1, x2) -> Tpat_variant (x1, Some (f p1), x2)\n  | Tpat_or (p1, p2, path) -> Tpat_or (f p1, f p2, path)\n  | Tpat_var _ | Tpat_constant _ | Tpat_any | Tpat_variant (_, None, _) -> d\n\n(* List the identifiers bound by a pattern or a let *)\n\nlet idents = ref ([] : (Ident.t * string loc) list)\n\nlet rec bound_idents pat =\n  match pat.pat_desc with\n  | Tpat_var (id, s) -> idents := (id, s) :: !idents\n  | Tpat_alias (p, id, s) ->\n    bound_idents p;\n    idents := (id, s) :: !idents\n  | Tpat_or (p1, _, _) ->\n    (* Invariant : both arguments binds the same variables *)\n    bound_idents p1\n  | d -> iter_pattern_desc bound_idents d\n\nlet pat_bound_idents pat =\n  idents := [];\n  bound_idents pat;\n  let res = !idents in\n  idents := [];\n  List.map fst res\n\nlet rev_let_bound_idents_with_loc bindings =\n  idents := [];\n  List.iter (fun vb -> bound_idents vb.vb_pat) bindings;\n  let res = !idents in\n  idents := [];\n  res\n\nlet let_bound_idents_with_loc pat_expr_list =\n  List.rev (rev_let_bound_idents_with_loc pat_expr_list)\n\nlet rev_let_bound_idents pat = List.map fst (rev_let_bound_idents_with_loc pat)\nlet let_bound_idents pat = List.map fst (let_bound_idents_with_loc pat)\n\nlet alpha_var env id = List.assoc id env\n\nlet rec alpha_pat env p =\n  match p.pat_desc with\n  | Tpat_var (id, s) ->\n    (* note the ``Not_found'' case *)\n    {\n      p with\n      pat_desc =\n        (try Tpat_var (alpha_var env id, s) with Not_found -> Tpat_any);\n    }\n  | Tpat_alias (p1, id, s) -> (\n    let new_p = alpha_pat env p1 in\n    try {p with pat_desc = Tpat_alias (new_p, alpha_var env id, s)}\n    with Not_found -> new_p)\n  | d -> {p with pat_desc = map_pattern_desc (alpha_pat env) d}\n\nlet mkloc = Location.mkloc\nlet mknoloc = Location.mknoloc\n"
  },
  {
    "path": "analysis/vendor/ml/typedtree.mli",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(** Abstract syntax tree after typing *)\n\n(** By comparison with {!Parsetree}:\n    - Every {!Longindent.t} is accompanied by a resolved {!Path.t}. *)\n\nopen Asttypes\nopen Types\n\n(* Value expressions for the core language *)\n\ntype partial = Partial | Total\n\n(** {1 Extension points} *)\n\ntype attribute = Parsetree.attribute\ntype attributes = attribute list\n\n(** {1 Core language} *)\n\ntype pattern = {\n  pat_desc: pattern_desc;\n  pat_loc: Location.t;\n  pat_extra: (pat_extra * Location.t * attributes) list;\n  pat_type: type_expr;\n  mutable pat_env: Env.t;\n  pat_attributes: attributes;\n}\n\nand pat_extra =\n  | Tpat_constraint of core_type\n      (** P : T          { pat_desc = P\n                           ; pat_extra = (Tpat_constraint T, _, _) :: ... }\n         *)\n  | Tpat_type of Path.t * Longident.t loc\n      (** #tconst        { pat_desc = disjunction\n                           ; pat_extra = (Tpat_type (P, \"tconst\"), _, _) :: ...}\n\n                           where [disjunction] is a [Tpat_or _] representing the\n                           branches of [tconst].\n         *)\n  | Tpat_open of Path.t * Longident.t loc * Env.t\n  | Tpat_unpack\n      (** (module P)     { pat_desc  = Tpat_var \"P\"\n                           ; pat_extra = (Tpat_unpack, _, _) :: ... }\n         *)\n\nand pattern_desc =\n  | Tpat_any  (** _ *)\n  | Tpat_var of Ident.t * string loc  (** x *)\n  | Tpat_alias of pattern * Ident.t * string loc  (** P as a *)\n  | Tpat_constant of constant  (** 1, 'a', \"true\", 1.0, 1l, 1L, 1n *)\n  | Tpat_tuple of pattern list\n      (** (P1, ..., Pn)\n\n          Invariant: n >= 2 *)\n  | Tpat_construct of Longident.t loc * constructor_description * pattern list\n      (** C [] C P [P] C (P1, ..., Pn) [P1; ...; Pn] *)\n  | Tpat_variant of label * pattern option * row_desc ref\n      (** `A (None) `A P (Some P)\n\n          See {!Types.row_desc} for an explanation of the last parameter. *)\n  | Tpat_record of\n      (Longident.t loc * label_description * pattern) list * closed_flag\n      (** { l1=P1; ...; ln=Pn }     (flag = Closed)\n            { l1=P1; ...; ln=Pn; _}   (flag = Open)\n\n            Invariant: n > 0\n         *)\n  | Tpat_array of pattern list  (** [| P1; ...; Pn |] *)\n  | Tpat_or of pattern * pattern * row_desc option\n      (** P1 | P2\n\n          [row_desc] = [Some _] when translating [Ppat_type _], [None]\n          otherwise. *)\n  | Tpat_lazy of pattern  (** lazy P *)\n\nand expression = {\n  exp_desc: expression_desc;\n  exp_loc: Location.t;\n  exp_extra: (exp_extra * Location.t * attributes) list;\n  exp_type: type_expr;\n  exp_env: Env.t;\n  exp_attributes: attributes;\n}\n\nand exp_extra =\n  | Texp_constraint of core_type  (** E : T *)\n  | Texp_coerce of core_type option * core_type\n      (** E :> T [Texp_coerce (None, T)] E : T0 :> T [Texp_coerce (Some T0, T)]\n      *)\n  | Texp_open of override_flag * Path.t * Longident.t loc * Env.t\n      (** let open[!] M in [Texp_open (!, P, M, env)] where [env] is the\n          environment after opening [P] *)\n  | Texp_poly of core_type option  (** Used for method bodies. *)\n  | Texp_newtype of string  (** fun (type t) -> *)\n\nand expression_desc =\n  | Texp_ident of Path.t * Longident.t loc * Types.value_description\n      (** x M.x *)\n  | Texp_constant of constant  (** 1, 'a', \"true\", 1.0, 1l, 1L, 1n *)\n  | Texp_let of rec_flag * value_binding list * expression\n      (** let P1 = E1 and ... and Pn = EN in E (flag = Nonrecursive) let rec P1\n          = E1 and ... and Pn = EN in E (flag = Recursive) *)\n  | Texp_function of {\n      arg_label: arg_label;\n      param: Ident.t;\n      cases: case list;\n      partial: partial;\n    }\n      (** [Pexp_fun] and [Pexp_function] both translate to [Texp_function]. See\n          {!Parsetree} for more details.\n\n          [param] is the identifier that is to be used to name the parameter of\n          the function.\n\n          partial = [Partial] if the pattern match is partial [Total] otherwise.\n      *)\n  | Texp_apply of expression * (arg_label * expression option) list\n      (** E0 ~l1:E1 ... ~ln:En\n\n          The expression can be None if the expression is abstracted over this\n          argument. It currently appears when a label is applied.\n\n          For example: let f x ~y = x + y in f ~y:3\n\n          The resulting typedtree for the application is: Texp_apply (Texp_ident\n          \"f/1037\",\n          [(Nolabel, None); (Labelled \"y\", Some (Texp_constant Const_int 3)) ])\n      *)\n  | Texp_match of expression * case list * case list * partial\n      (** match E0 with | P1 -> E1 | P2 -> E2 | exception P3 -> E3\n\n          [Texp_match (E0, [(P1, E1); (P2, E2)], [(P3, E3)], _)] *)\n  | Texp_try of expression * case list\n      (** try E with P1 -> E1 | ... | PN -> EN *)\n  | Texp_tuple of expression list  (** (E1, ..., EN) *)\n  | Texp_construct of\n      Longident.t loc * constructor_description * expression list\n      (** C [] C E [E] C (E1, ..., En) [E1;...;En] *)\n  | Texp_variant of label * expression option\n  | Texp_record of {\n      fields: (Types.label_description * record_label_definition) array;\n      representation: Types.record_representation;\n      extended_expression: expression option;\n    }\n      (** { l1=P1; ...; ln=Pn }           (extended_expression = None)\n            { E0 with l1=P1; ...; ln=Pn }   (extended_expression = Some E0)\n\n            Invariant: n > 0\n\n            If the type is { l1: t1; l2: t2 }, the expression\n            { E0 with t2=P2 } is represented as\n            Texp_record\n              { fields = [| l1, Kept t1; l2 Override P2 |]; representation;\n                extended_expression = Some E0 }\n        *)\n  | Texp_field of expression * Longident.t loc * label_description\n  | Texp_setfield of\n      expression * Longident.t loc * label_description * expression\n  | Texp_array of expression list\n  | Texp_ifthenelse of expression * expression * expression option\n  | Texp_sequence of expression * expression\n  | Texp_while of expression * expression\n  | Texp_for of\n      Ident.t\n      * Parsetree.pattern\n      * expression\n      * expression\n      * direction_flag\n      * expression\n  | Texp_send of expression * meth * expression option\n  | Texp_new of unit\n  | Texp_instvar of unit\n  | Texp_setinstvar of unit\n  | Texp_override of unit\n  | Texp_letmodule of Ident.t * string loc * module_expr * expression\n  | Texp_letexception of extension_constructor * expression\n  | Texp_assert of expression\n  | Texp_lazy of expression\n  | Texp_object of unit\n  | Texp_pack of module_expr\n  | Texp_unreachable\n  | Texp_extension_constructor of Longident.t loc * Path.t\n\nand meth = Tmeth_name of string\n\nand case = {c_lhs: pattern; c_guard: expression option; c_rhs: expression}\n\nand record_label_definition =\n  | Kept of Types.type_expr\n  | Overridden of Longident.t loc * expression\n\n(* Value expressions for the module language *)\nand module_expr = {\n  mod_desc: module_expr_desc;\n  mod_loc: Location.t;\n  mod_type: Types.module_type;\n  mod_env: Env.t;\n  mod_attributes: attributes;\n}\n\n(** Annotations for [Tmod_constraint]. *)\nand module_type_constraint =\n  | Tmodtype_implicit\n      (** The module type constraint has been synthesized during typechecking.\n      *)\n  | Tmodtype_explicit of module_type\n      (** The module type was in the source file. *)\n\nand module_expr_desc =\n  | Tmod_ident of Path.t * Longident.t loc\n  | Tmod_structure of structure\n  | Tmod_functor of Ident.t * string loc * module_type option * module_expr\n  | Tmod_apply of module_expr * module_expr * module_coercion\n  | Tmod_constraint of\n      module_expr * Types.module_type * module_type_constraint * module_coercion\n      (** ME (constraint = Tmodtype_implicit) (ME : MT) (constraint =\n          Tmodtype_explicit MT) *)\n  | Tmod_unpack of expression * Types.module_type\n\nand structure = {\n  str_items: structure_item list;\n  str_type: Types.signature;\n  str_final_env: Env.t;\n}\n\nand structure_item = {\n  str_desc: structure_item_desc;\n  str_loc: Location.t;\n  str_env: Env.t;\n}\n\nand structure_item_desc =\n  | Tstr_eval of expression * attributes\n  | Tstr_value of rec_flag * value_binding list\n  | Tstr_primitive of value_description\n  | Tstr_type of rec_flag * type_declaration list\n  | Tstr_typext of type_extension\n  | Tstr_exception of extension_constructor\n  | Tstr_module of module_binding\n  | Tstr_recmodule of module_binding list\n  | Tstr_modtype of module_type_declaration\n  | Tstr_open of open_description\n  | Tstr_class of unit\n  | Tstr_class_type of (Ident.t * string loc * class_type_declaration) list\n  | Tstr_include of include_declaration\n  | Tstr_attribute of attribute\n\nand module_binding = {\n  mb_id: Ident.t;\n  mb_name: string loc;\n  mb_expr: module_expr;\n  mb_attributes: attributes;\n  mb_loc: Location.t;\n}\n\nand value_binding = {\n  vb_pat: pattern;\n  vb_expr: expression;\n  vb_attributes: attributes;\n  vb_loc: Location.t;\n}\n\nand module_coercion =\n  | Tcoerce_none\n  | Tcoerce_structure of\n      (int * module_coercion) list\n      * (Ident.t * int * module_coercion) list\n      * string list (* runtime fields *)\n  | Tcoerce_functor of module_coercion * module_coercion\n  | Tcoerce_primitive of primitive_coercion\n  | Tcoerce_alias of Path.t * module_coercion\n\nand module_type = {\n  mty_desc: module_type_desc;\n  mty_type: Types.module_type;\n  mty_env: Env.t;\n  mty_loc: Location.t;\n  mty_attributes: attributes;\n}\n\nand module_type_desc =\n  | Tmty_ident of Path.t * Longident.t loc\n  | Tmty_signature of signature\n  | Tmty_functor of Ident.t * string loc * module_type option * module_type\n  | Tmty_with of module_type * (Path.t * Longident.t loc * with_constraint) list\n  | Tmty_typeof of module_expr\n  | Tmty_alias of Path.t * Longident.t loc\n\nand primitive_coercion = {\n  pc_desc: Primitive.description;\n  pc_type: type_expr;\n  pc_env: Env.t;\n  pc_loc: Location.t;\n  pc_id: Ident.t;\n}\n\nand signature = {\n  sig_items: signature_item list;\n  sig_type: Types.signature;\n  sig_final_env: Env.t;\n}\n\nand signature_item = {\n  sig_desc: signature_item_desc;\n  sig_env: Env.t; (* BINANNOT ADDED *)\n  sig_loc: Location.t;\n}\n\nand signature_item_desc =\n  | Tsig_value of value_description\n  | Tsig_type of rec_flag * type_declaration list\n  | Tsig_typext of type_extension\n  | Tsig_exception of extension_constructor\n  | Tsig_module of module_declaration\n  | Tsig_recmodule of module_declaration list\n  | Tsig_modtype of module_type_declaration\n  | Tsig_open of open_description\n  | Tsig_include of include_description\n  | Tsig_class of unit\n  | Tsig_class_type of class_type_declaration list\n  | Tsig_attribute of attribute\n\nand module_declaration = {\n  md_id: Ident.t;\n  md_name: string loc;\n  md_type: module_type;\n  md_attributes: attributes;\n  md_loc: Location.t;\n}\n\nand module_type_declaration = {\n  mtd_id: Ident.t;\n  mtd_name: string loc;\n  mtd_type: module_type option;\n  mtd_attributes: attributes;\n  mtd_loc: Location.t;\n}\n\nand open_description = {\n  open_path: Path.t;\n  open_txt: Longident.t loc;\n  open_override: override_flag;\n  open_loc: Location.t;\n  open_attributes: attribute list;\n}\n\nand 'a include_infos = {\n  incl_mod: 'a;\n  incl_type: Types.signature;\n  incl_loc: Location.t;\n  incl_attributes: attribute list;\n}\n\nand include_description = module_type include_infos\n\nand include_declaration = module_expr include_infos\n\nand with_constraint =\n  | Twith_type of type_declaration\n  | Twith_module of Path.t * Longident.t loc\n  | Twith_typesubst of type_declaration\n  | Twith_modsubst of Path.t * Longident.t loc\n\nand core_type = {\n  mutable ctyp_desc: core_type_desc;\n      (** mutable because of [Typeclass.declare_method] *)\n  mutable ctyp_type: type_expr;\n      (** mutable because of [Typeclass.declare_method] *)\n  ctyp_env: Env.t; (* BINANNOT ADDED *)\n  ctyp_loc: Location.t;\n  ctyp_attributes: attributes;\n}\n\nand core_type_desc =\n  | Ttyp_any\n  | Ttyp_var of string\n  | Ttyp_arrow of arg_label * core_type * core_type\n  | Ttyp_tuple of core_type list\n  | Ttyp_constr of Path.t * Longident.t loc * core_type list\n  | Ttyp_object of object_field list * closed_flag\n  | Ttyp_class of Path.t * Longident.t loc * core_type list\n  | Ttyp_alias of core_type * string\n  | Ttyp_variant of row_field list * closed_flag * label list option\n  | Ttyp_poly of string list * core_type\n  | Ttyp_package of package_type\n\nand package_type = {\n  pack_path: Path.t;\n  pack_fields: (Longident.t loc * core_type) list;\n  pack_type: Types.module_type;\n  pack_txt: Longident.t loc;\n}\n\nand row_field =\n  | Ttag of string loc * attributes * bool * core_type list\n  | Tinherit of core_type\n\nand object_field =\n  | OTtag of string loc * attributes * core_type\n  | OTinherit of core_type\n\nand value_description = {\n  val_id: Ident.t;\n  val_name: string loc;\n  val_desc: core_type;\n  val_val: Types.value_description;\n  val_prim: string list;\n  val_loc: Location.t;\n  val_attributes: attributes;\n}\n\nand type_declaration = {\n  typ_id: Ident.t;\n  typ_name: string loc;\n  typ_params: (core_type * variance) list;\n  typ_type: Types.type_declaration;\n  typ_cstrs: (core_type * core_type * Location.t) list;\n  typ_kind: type_kind;\n  typ_private: private_flag;\n  typ_manifest: core_type option;\n  typ_loc: Location.t;\n  typ_attributes: attributes;\n}\n\nand type_kind =\n  | Ttype_abstract\n  | Ttype_variant of constructor_declaration list\n  | Ttype_record of label_declaration list\n  | Ttype_open\n\nand label_declaration = {\n  ld_id: Ident.t;\n  ld_name: string loc;\n  ld_mutable: mutable_flag;\n  ld_type: core_type;\n  ld_loc: Location.t;\n  ld_attributes: attributes;\n}\n\nand constructor_declaration = {\n  cd_id: Ident.t;\n  cd_name: string loc;\n  cd_args: constructor_arguments;\n  cd_res: core_type option;\n  cd_loc: Location.t;\n  cd_attributes: attributes;\n}\n\nand constructor_arguments =\n  | Cstr_tuple of core_type list\n  | Cstr_record of label_declaration list\n\nand type_extension = {\n  tyext_path: Path.t;\n  tyext_txt: Longident.t loc;\n  tyext_params: (core_type * variance) list;\n  tyext_constructors: extension_constructor list;\n  tyext_private: private_flag;\n  tyext_attributes: attributes;\n}\n\nand extension_constructor = {\n  ext_id: Ident.t;\n  ext_name: string loc;\n  ext_type: Types.extension_constructor;\n  ext_kind: extension_constructor_kind;\n  ext_loc: Location.t;\n  ext_attributes: attributes;\n}\n\nand extension_constructor_kind =\n  | Text_decl of constructor_arguments * core_type option\n  | Text_rebind of Path.t * Longident.t loc\n\nand class_type = {\n  cltyp_desc: class_type_desc;\n  cltyp_type: Types.class_type;\n  cltyp_env: Env.t;\n  cltyp_loc: Location.t;\n  cltyp_attributes: attributes;\n}\n\nand class_type_desc =\n  | Tcty_constr of Path.t * Longident.t loc * core_type list\n  | Tcty_signature of class_signature\n  | Tcty_arrow of arg_label * core_type * class_type\n  | Tcty_open of override_flag * Path.t * Longident.t loc * Env.t * class_type\n\nand class_signature = {\n  csig_self: core_type;\n  csig_fields: class_type_field list;\n  csig_type: Types.class_signature;\n}\n\nand class_type_field = {\n  ctf_desc: class_type_field_desc;\n  ctf_loc: Location.t;\n  ctf_attributes: attributes;\n}\n\nand class_type_field_desc =\n  | Tctf_inherit of class_type\n  | Tctf_val of (string * mutable_flag * virtual_flag * core_type)\n  | Tctf_method of (string * private_flag * virtual_flag * core_type)\n  | Tctf_constraint of (core_type * core_type)\n  | Tctf_attribute of attribute\n\nand class_description = class_type class_infos\n\nand class_type_declaration = class_type class_infos\n\nand 'a class_infos = {\n  ci_virt: virtual_flag;\n  ci_params: (core_type * variance) list;\n  ci_id_name: string loc;\n  ci_id_class: Ident.t;\n  ci_id_class_type: Ident.t;\n  ci_id_object: Ident.t;\n  ci_id_typehash: Ident.t;\n  ci_expr: 'a;\n  ci_decl: Types.class_declaration;\n  ci_type_decl: Types.class_type_declaration;\n  ci_loc: Location.t;\n  ci_attributes: attributes;\n}\n\n(* Auxiliary functions over the a.s.t. *)\n\nval iter_pattern_desc : (pattern -> unit) -> pattern_desc -> unit\nval map_pattern_desc : (pattern -> pattern) -> pattern_desc -> pattern_desc\n\nval let_bound_idents : value_binding list -> Ident.t list\nval rev_let_bound_idents : value_binding list -> Ident.t list\n\nval alpha_pat : (Ident.t * Ident.t) list -> pattern -> pattern\n(** Alpha conversion of patterns *)\n\nval mknoloc : 'a -> 'a Asttypes.loc\nval mkloc : 'a -> Location.t -> 'a Asttypes.loc\n\nval pat_bound_idents : pattern -> Ident.t list\n"
  },
  {
    "path": "analysis/vendor/ml/typedtreeIter.ml",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*    Thomas Gazagnaire (OCamlPro), Fabrice Le Fessant (INRIA Saclay)     *)\n(*                                                                        *)\n(*   Copyright 2007 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(*\nTODO:\n - 2012/05/10: Follow camlp4 way of building map and iter using classes\n     and inheritance ?\n*)\n\nopen Asttypes\nopen Typedtree\n\nmodule type IteratorArgument = sig\n  val enter_structure : structure -> unit\n  val enter_value_description : value_description -> unit\n  val enter_type_extension : type_extension -> unit\n  val enter_extension_constructor : extension_constructor -> unit\n  val enter_pattern : pattern -> unit\n  val enter_expression : expression -> unit\n  val enter_package_type : package_type -> unit\n  val enter_signature : signature -> unit\n  val enter_signature_item : signature_item -> unit\n  val enter_module_type_declaration : module_type_declaration -> unit\n  val enter_module_type : module_type -> unit\n  val enter_module_expr : module_expr -> unit\n  val enter_with_constraint : with_constraint -> unit\n  val enter_class_signature : class_signature -> unit\n\n  val enter_class_description : class_description -> unit\n  val enter_class_type_declaration : class_type_declaration -> unit\n  val enter_class_type : class_type -> unit\n  val enter_class_type_field : class_type_field -> unit\n  val enter_core_type : core_type -> unit\n  val enter_structure_item : structure_item -> unit\n\n  val leave_structure : structure -> unit\n  val leave_value_description : value_description -> unit\n  val leave_type_extension : type_extension -> unit\n  val leave_extension_constructor : extension_constructor -> unit\n  val leave_pattern : pattern -> unit\n  val leave_expression : expression -> unit\n  val leave_package_type : package_type -> unit\n  val leave_signature : signature -> unit\n  val leave_signature_item : signature_item -> unit\n  val leave_module_type_declaration : module_type_declaration -> unit\n  val leave_module_type : module_type -> unit\n  val leave_module_expr : module_expr -> unit\n  val leave_with_constraint : with_constraint -> unit\n  val leave_class_signature : class_signature -> unit\n\n  val leave_class_description : class_description -> unit\n  val leave_class_type_declaration : class_type_declaration -> unit\n  val leave_class_type : class_type -> unit\n  val leave_class_type_field : class_type_field -> unit\n  val leave_core_type : core_type -> unit\n  val leave_structure_item : structure_item -> unit\n\n  val enter_bindings : rec_flag -> unit\n  val enter_binding : value_binding -> unit\n  val leave_binding : value_binding -> unit\n  val leave_bindings : rec_flag -> unit\n\n  val enter_type_declarations : rec_flag -> unit\n  val enter_type_declaration : type_declaration -> unit\n  val leave_type_declaration : type_declaration -> unit\n  val leave_type_declarations : rec_flag -> unit\nend\n\nmodule MakeIterator (Iter : IteratorArgument) : sig\n  val iter_structure : structure -> unit\n  val iter_signature : signature -> unit\n  val iter_structure_item : structure_item -> unit\n  val iter_signature_item : signature_item -> unit\n  val iter_expression : expression -> unit\n  val iter_module_type : module_type -> unit\n  val iter_pattern : pattern -> unit\nend = struct\n  let may_iter f v =\n    match v with\n    | None -> ()\n    | Some x -> f x\n\n  let rec iter_structure str =\n    Iter.enter_structure str;\n    List.iter iter_structure_item str.str_items;\n    Iter.leave_structure str\n\n  and iter_binding vb =\n    Iter.enter_binding vb;\n    iter_pattern vb.vb_pat;\n    iter_expression vb.vb_expr;\n    Iter.leave_binding vb\n\n  and iter_bindings rec_flag list =\n    Iter.enter_bindings rec_flag;\n    List.iter iter_binding list;\n    Iter.leave_bindings rec_flag\n\n  and iter_case {c_lhs; c_guard; c_rhs} =\n    iter_pattern c_lhs;\n    may_iter iter_expression c_guard;\n    iter_expression c_rhs\n\n  and iter_cases cases = List.iter iter_case cases\n\n  and iter_structure_item item =\n    Iter.enter_structure_item item;\n    (match item.str_desc with\n    | Tstr_eval (exp, _attrs) -> iter_expression exp\n    | Tstr_value (rec_flag, list) -> iter_bindings rec_flag list\n    | Tstr_primitive vd -> iter_value_description vd\n    | Tstr_type (rf, list) -> iter_type_declarations rf list\n    | Tstr_typext tyext -> iter_type_extension tyext\n    | Tstr_exception ext -> iter_extension_constructor ext\n    | Tstr_module x -> iter_module_binding x\n    | Tstr_recmodule list -> List.iter iter_module_binding list\n    | Tstr_modtype mtd -> iter_module_type_declaration mtd\n    | Tstr_open _ -> ()\n    | Tstr_class () -> ()\n    | Tstr_class_type list ->\n      List.iter (fun (_, _, ct) -> iter_class_type_declaration ct) list\n    | Tstr_include incl -> iter_module_expr incl.incl_mod\n    | Tstr_attribute _ -> ());\n    Iter.leave_structure_item item\n\n  and iter_module_binding x = iter_module_expr x.mb_expr\n\n  and iter_value_description v =\n    Iter.enter_value_description v;\n    iter_core_type v.val_desc;\n    Iter.leave_value_description v\n\n  and iter_constructor_arguments = function\n    | Cstr_tuple l -> List.iter iter_core_type l\n    | Cstr_record l -> List.iter (fun ld -> iter_core_type ld.ld_type) l\n\n  and iter_constructor_declaration cd =\n    iter_constructor_arguments cd.cd_args;\n    option iter_core_type cd.cd_res\n\n  and iter_type_parameter (ct, _v) = iter_core_type ct\n\n  and iter_type_declaration decl =\n    Iter.enter_type_declaration decl;\n    List.iter iter_type_parameter decl.typ_params;\n    List.iter\n      (fun (ct1, ct2, _loc) ->\n        iter_core_type ct1;\n        iter_core_type ct2)\n      decl.typ_cstrs;\n    (match decl.typ_kind with\n    | Ttype_abstract -> ()\n    | Ttype_variant list -> List.iter iter_constructor_declaration list\n    | Ttype_record list -> List.iter (fun ld -> iter_core_type ld.ld_type) list\n    | Ttype_open -> ());\n    option iter_core_type decl.typ_manifest;\n    Iter.leave_type_declaration decl\n\n  and iter_type_declarations rec_flag decls =\n    Iter.enter_type_declarations rec_flag;\n    List.iter iter_type_declaration decls;\n    Iter.leave_type_declarations rec_flag\n\n  and iter_extension_constructor ext =\n    Iter.enter_extension_constructor ext;\n    (match ext.ext_kind with\n    | Text_decl (args, ret) ->\n      iter_constructor_arguments args;\n      option iter_core_type ret\n    | Text_rebind _ -> ());\n    Iter.leave_extension_constructor ext\n\n  and iter_type_extension tyext =\n    Iter.enter_type_extension tyext;\n    List.iter iter_type_parameter tyext.tyext_params;\n    List.iter iter_extension_constructor tyext.tyext_constructors;\n    Iter.leave_type_extension tyext\n\n  and iter_pattern pat =\n    Iter.enter_pattern pat;\n    List.iter\n      (fun (cstr, _, _attrs) ->\n        match cstr with\n        | Tpat_type _ -> ()\n        | Tpat_unpack -> ()\n        | Tpat_open _ -> ()\n        | Tpat_constraint ct -> iter_core_type ct)\n      pat.pat_extra;\n    (match pat.pat_desc with\n    | Tpat_any -> ()\n    | Tpat_var _ -> ()\n    | Tpat_alias (pat1, _, _) -> iter_pattern pat1\n    | Tpat_constant _ -> ()\n    | Tpat_tuple list -> List.iter iter_pattern list\n    | Tpat_construct (_, _, args) -> List.iter iter_pattern args\n    | Tpat_variant (_, pato, _) -> (\n      match pato with\n      | None -> ()\n      | Some pat -> iter_pattern pat)\n    | Tpat_record (list, _closed) ->\n      List.iter (fun (_, _, pat) -> iter_pattern pat) list\n    | Tpat_array list -> List.iter iter_pattern list\n    | Tpat_or (p1, p2, _) ->\n      iter_pattern p1;\n      iter_pattern p2\n    | Tpat_lazy p -> iter_pattern p);\n    Iter.leave_pattern pat\n\n  and option f x =\n    match x with\n    | None -> ()\n    | Some e -> f e\n\n  and iter_expression exp =\n    Iter.enter_expression exp;\n    List.iter\n      (function\n        | cstr, _, _attrs -> (\n          match cstr with\n          | Texp_constraint ct -> iter_core_type ct\n          | Texp_coerce (cty1, cty2) ->\n            option iter_core_type cty1;\n            iter_core_type cty2\n          | Texp_open _ -> ()\n          | Texp_poly cto -> option iter_core_type cto\n          | Texp_newtype _ -> ()))\n      exp.exp_extra;\n    (match exp.exp_desc with\n    | Texp_ident _ -> ()\n    | Texp_constant _ -> ()\n    | Texp_let (rec_flag, list, exp) ->\n      iter_bindings rec_flag list;\n      iter_expression exp\n    | Texp_function {cases; _} -> iter_cases cases\n    | Texp_apply (exp, list) ->\n      iter_expression exp;\n      List.iter\n        (fun (_label, expo) ->\n          match expo with\n          | None -> ()\n          | Some exp -> iter_expression exp)\n        list\n    | Texp_match (exp, list1, list2, _) ->\n      iter_expression exp;\n      iter_cases list1;\n      iter_cases list2\n    | Texp_try (exp, list) ->\n      iter_expression exp;\n      iter_cases list\n    | Texp_tuple list -> List.iter iter_expression list\n    | Texp_construct (_, _, args) -> List.iter iter_expression args\n    | Texp_variant (_label, expo) -> (\n      match expo with\n      | None -> ()\n      | Some exp -> iter_expression exp)\n    | Texp_record {fields; extended_expression; _} -> (\n      Array.iter\n        (function\n          | _, Kept _ -> ()\n          | _, Overridden (_, exp) -> iter_expression exp)\n        fields;\n      match extended_expression with\n      | None -> ()\n      | Some exp -> iter_expression exp)\n    | Texp_field (exp, _, _label) -> iter_expression exp\n    | Texp_setfield (exp1, _, _label, exp2) ->\n      iter_expression exp1;\n      iter_expression exp2\n    | Texp_array list -> List.iter iter_expression list\n    | Texp_ifthenelse (exp1, exp2, expo) -> (\n      iter_expression exp1;\n      iter_expression exp2;\n      match expo with\n      | None -> ()\n      | Some exp -> iter_expression exp)\n    | Texp_sequence (exp1, exp2) ->\n      iter_expression exp1;\n      iter_expression exp2\n    | Texp_while (exp1, exp2) ->\n      iter_expression exp1;\n      iter_expression exp2\n    | Texp_for (_id, _, exp1, exp2, _dir, exp3) ->\n      iter_expression exp1;\n      iter_expression exp2;\n      iter_expression exp3\n    | Texp_send (exp, _meth, expo) -> (\n      iter_expression exp;\n      match expo with\n      | None -> ()\n      | Some exp -> iter_expression exp)\n    | Texp_new _ | Texp_instvar _ | Texp_setinstvar _ | Texp_override _ -> ()\n    | Texp_letmodule (_id, _, mexpr, exp) ->\n      iter_module_expr mexpr;\n      iter_expression exp\n    | Texp_letexception (cd, exp) ->\n      iter_extension_constructor cd;\n      iter_expression exp\n    | Texp_assert exp -> iter_expression exp\n    | Texp_lazy exp -> iter_expression exp\n    | Texp_object () -> ()\n    | Texp_pack mexpr -> iter_module_expr mexpr\n    | Texp_unreachable -> ()\n    | Texp_extension_constructor _ -> ());\n    Iter.leave_expression exp\n\n  and iter_package_type pack =\n    Iter.enter_package_type pack;\n    List.iter (fun (_s, ct) -> iter_core_type ct) pack.pack_fields;\n    Iter.leave_package_type pack\n\n  and iter_signature sg =\n    Iter.enter_signature sg;\n    List.iter iter_signature_item sg.sig_items;\n    Iter.leave_signature sg\n\n  and iter_signature_item item =\n    Iter.enter_signature_item item;\n    (match item.sig_desc with\n    | Tsig_value vd -> iter_value_description vd\n    | Tsig_type (rf, list) -> iter_type_declarations rf list\n    | Tsig_exception ext -> iter_extension_constructor ext\n    | Tsig_typext tyext -> iter_type_extension tyext\n    | Tsig_module md -> iter_module_type md.md_type\n    | Tsig_recmodule list ->\n      List.iter (fun md -> iter_module_type md.md_type) list\n    | Tsig_modtype mtd -> iter_module_type_declaration mtd\n    | Tsig_open _ -> ()\n    | Tsig_include incl -> iter_module_type incl.incl_mod\n    | Tsig_class () -> ()\n    | Tsig_class_type list -> List.iter iter_class_type_declaration list\n    | Tsig_attribute _ -> ());\n    Iter.leave_signature_item item\n\n  and iter_module_type_declaration mtd =\n    Iter.enter_module_type_declaration mtd;\n    (match mtd.mtd_type with\n    | None -> ()\n    | Some mtype -> iter_module_type mtype);\n    Iter.leave_module_type_declaration mtd\n\n  and iter_class_type_declaration cd =\n    Iter.enter_class_type_declaration cd;\n    List.iter iter_type_parameter cd.ci_params;\n    iter_class_type cd.ci_expr;\n    Iter.leave_class_type_declaration cd\n\n  and iter_module_type mty =\n    Iter.enter_module_type mty;\n    (match mty.mty_desc with\n    | Tmty_ident _ -> ()\n    | Tmty_alias _ -> ()\n    | Tmty_signature sg -> iter_signature sg\n    | Tmty_functor (_, _, mtype1, mtype2) ->\n      Misc.may iter_module_type mtype1;\n      iter_module_type mtype2\n    | Tmty_with (mtype, list) ->\n      iter_module_type mtype;\n      List.iter (fun (_path, _, withc) -> iter_with_constraint withc) list\n    | Tmty_typeof mexpr -> iter_module_expr mexpr);\n    Iter.leave_module_type mty\n\n  and iter_with_constraint cstr =\n    Iter.enter_with_constraint cstr;\n    (match cstr with\n    | Twith_type decl -> iter_type_declaration decl\n    | Twith_module _ -> ()\n    | Twith_typesubst decl -> iter_type_declaration decl\n    | Twith_modsubst _ -> ());\n    Iter.leave_with_constraint cstr\n\n  and iter_module_expr mexpr =\n    Iter.enter_module_expr mexpr;\n    (match mexpr.mod_desc with\n    | Tmod_ident _ -> ()\n    | Tmod_structure st -> iter_structure st\n    | Tmod_functor (_, _, mtype, mexpr) ->\n      Misc.may iter_module_type mtype;\n      iter_module_expr mexpr\n    | Tmod_apply (mexp1, mexp2, _) ->\n      iter_module_expr mexp1;\n      iter_module_expr mexp2\n    | Tmod_constraint (mexpr, _, Tmodtype_implicit, _) -> iter_module_expr mexpr\n    | Tmod_constraint (mexpr, _, Tmodtype_explicit mtype, _) ->\n      iter_module_expr mexpr;\n      iter_module_type mtype\n    | Tmod_unpack (exp, _mty) -> iter_expression exp)\n    (*          iter_module_type mty *);\n    Iter.leave_module_expr mexpr\n\n  and iter_class_type ct =\n    Iter.enter_class_type ct;\n    (match ct.cltyp_desc with\n    | Tcty_signature csg -> iter_class_signature csg\n    | Tcty_constr (_path, _, list) -> List.iter iter_core_type list\n    | Tcty_arrow (_label, ct, cl) ->\n      iter_core_type ct;\n      iter_class_type cl\n    | Tcty_open (_, _, _, _, e) -> iter_class_type e);\n    Iter.leave_class_type ct\n\n  and iter_class_signature cs =\n    Iter.enter_class_signature cs;\n    iter_core_type cs.csig_self;\n    List.iter iter_class_type_field cs.csig_fields;\n    Iter.leave_class_signature cs\n\n  and iter_class_type_field ctf =\n    Iter.enter_class_type_field ctf;\n    (match ctf.ctf_desc with\n    | Tctf_inherit ct -> iter_class_type ct\n    | Tctf_val (_s, _mut, _virt, ct) -> iter_core_type ct\n    | Tctf_method (_s, _priv, _virt, ct) -> iter_core_type ct\n    | Tctf_constraint (ct1, ct2) ->\n      iter_core_type ct1;\n      iter_core_type ct2\n    | Tctf_attribute _ -> ());\n    Iter.leave_class_type_field ctf\n\n  and iter_core_type ct =\n    Iter.enter_core_type ct;\n    (match ct.ctyp_desc with\n    | Ttyp_any -> ()\n    | Ttyp_var _ -> ()\n    | Ttyp_arrow (_label, ct1, ct2) ->\n      iter_core_type ct1;\n      iter_core_type ct2\n    | Ttyp_tuple list -> List.iter iter_core_type list\n    | Ttyp_constr (_path, _, list) -> List.iter iter_core_type list\n    | Ttyp_object (list, _o) -> List.iter iter_object_field list\n    | Ttyp_class (_path, _, list) -> List.iter iter_core_type list\n    | Ttyp_alias (ct, _s) -> iter_core_type ct\n    | Ttyp_variant (list, _bool, _labels) -> List.iter iter_row_field list\n    | Ttyp_poly (_list, ct) -> iter_core_type ct\n    | Ttyp_package pack -> iter_package_type pack);\n    Iter.leave_core_type ct\n\n  and iter_row_field rf =\n    match rf with\n    | Ttag (_label, _attrs, _bool, list) -> List.iter iter_core_type list\n    | Tinherit ct -> iter_core_type ct\n\n  and iter_object_field ofield =\n    match ofield with\n    | OTtag (_, _, ct) | OTinherit ct -> iter_core_type ct\nend\n\nmodule DefaultIteratorArgument = struct\n  let enter_structure _ = ()\n  let enter_value_description _ = ()\n  let enter_type_extension _ = ()\n  let enter_extension_constructor _ = ()\n  let enter_pattern _ = ()\n  let enter_expression _ = ()\n  let enter_package_type _ = ()\n  let enter_signature _ = ()\n  let enter_signature_item _ = ()\n  let enter_module_type_declaration _ = ()\n  let enter_module_type _ = ()\n  let enter_module_expr _ = ()\n  let enter_with_constraint _ = ()\n  let enter_class_signature _ = ()\n\n  let enter_class_description _ = ()\n  let enter_class_type_declaration _ = ()\n  let enter_class_type _ = ()\n  let enter_class_type_field _ = ()\n  let enter_core_type _ = ()\n  let enter_structure_item _ = ()\n\n  let leave_structure _ = ()\n  let leave_value_description _ = ()\n  let leave_type_extension _ = ()\n  let leave_extension_constructor _ = ()\n  let leave_pattern _ = ()\n  let leave_expression _ = ()\n  let leave_package_type _ = ()\n  let leave_signature _ = ()\n  let leave_signature_item _ = ()\n  let leave_module_type_declaration _ = ()\n  let leave_module_type _ = ()\n  let leave_module_expr _ = ()\n  let leave_with_constraint _ = ()\n  let leave_class_signature _ = ()\n\n  let leave_class_description _ = ()\n  let leave_class_type_declaration _ = ()\n  let leave_class_type _ = ()\n  let leave_class_type_field _ = ()\n  let leave_core_type _ = ()\n  let leave_structure_item _ = ()\n\n  let enter_binding _ = ()\n  let leave_binding _ = ()\n\n  let enter_bindings _ = ()\n  let leave_bindings _ = ()\n\n  let enter_type_declaration _ = ()\n  let leave_type_declaration _ = ()\n\n  let enter_type_declarations _ = ()\n  let leave_type_declarations _ = ()\nend\n"
  },
  {
    "path": "analysis/vendor/ml/typedtreeIter.mli",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*    Thomas Gazagnaire (OCamlPro), Fabrice Le Fessant (INRIA Saclay)     *)\n(*                                                                        *)\n(*   Copyright 2007 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\nopen Asttypes\nopen Typedtree\n\nmodule type IteratorArgument = sig\n  val enter_structure : structure -> unit\n  val enter_value_description : value_description -> unit\n  val enter_type_extension : type_extension -> unit\n  val enter_extension_constructor : extension_constructor -> unit\n  val enter_pattern : pattern -> unit\n  val enter_expression : expression -> unit\n  val enter_package_type : package_type -> unit\n  val enter_signature : signature -> unit\n  val enter_signature_item : signature_item -> unit\n  val enter_module_type_declaration : module_type_declaration -> unit\n  val enter_module_type : module_type -> unit\n  val enter_module_expr : module_expr -> unit\n  val enter_with_constraint : with_constraint -> unit\n  val enter_class_signature : class_signature -> unit\n  val enter_class_description : class_description -> unit\n  val enter_class_type_declaration : class_type_declaration -> unit\n  val enter_class_type : class_type -> unit\n  val enter_class_type_field : class_type_field -> unit\n  val enter_core_type : core_type -> unit\n  val enter_structure_item : structure_item -> unit\n\n  val leave_structure : structure -> unit\n  val leave_value_description : value_description -> unit\n  val leave_type_extension : type_extension -> unit\n  val leave_extension_constructor : extension_constructor -> unit\n  val leave_pattern : pattern -> unit\n  val leave_expression : expression -> unit\n  val leave_package_type : package_type -> unit\n  val leave_signature : signature -> unit\n  val leave_signature_item : signature_item -> unit\n  val leave_module_type_declaration : module_type_declaration -> unit\n  val leave_module_type : module_type -> unit\n  val leave_module_expr : module_expr -> unit\n  val leave_with_constraint : with_constraint -> unit\n  val leave_class_signature : class_signature -> unit\n  val leave_class_description : class_description -> unit\n  val leave_class_type_declaration : class_type_declaration -> unit\n  val leave_class_type : class_type -> unit\n  val leave_class_type_field : class_type_field -> unit\n  val leave_core_type : core_type -> unit\n  val leave_structure_item : structure_item -> unit\n\n  val enter_bindings : rec_flag -> unit\n  val enter_binding : value_binding -> unit\n  val leave_binding : value_binding -> unit\n  val leave_bindings : rec_flag -> unit\n\n  val enter_type_declarations : rec_flag -> unit\n  val enter_type_declaration : type_declaration -> unit\n  val leave_type_declaration : type_declaration -> unit\n  val leave_type_declarations : rec_flag -> unit\nend\n\nmodule MakeIterator : functor (Iter : IteratorArgument) -> sig\n  val iter_structure : structure -> unit\n  val iter_signature : signature -> unit\n  val iter_structure_item : structure_item -> unit\n  val iter_signature_item : signature_item -> unit\n  val iter_expression : expression -> unit\n  val iter_module_type : module_type -> unit\n  val iter_pattern : pattern -> unit\nend\n[@@warning \"-67\"]\n\nmodule DefaultIteratorArgument : IteratorArgument\n"
  },
  {
    "path": "analysis/vendor/ml/typedtreeMap.ml",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*                   Fabrice Le Fessant, INRIA Saclay                     *)\n(*                                                                        *)\n(*   Copyright 2012 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\nopen Typedtree\n\nmodule type MapArgument = sig\n  val enter_structure : structure -> structure\n  val enter_value_description : value_description -> value_description\n  val enter_type_declaration : type_declaration -> type_declaration\n  val enter_type_extension : type_extension -> type_extension\n  val enter_extension_constructor :\n    extension_constructor -> extension_constructor\n  val enter_pattern : pattern -> pattern\n  val enter_expression : expression -> expression\n  val enter_package_type : package_type -> package_type\n  val enter_signature : signature -> signature\n  val enter_signature_item : signature_item -> signature_item\n  val enter_module_type_declaration :\n    module_type_declaration -> module_type_declaration\n  val enter_module_type : module_type -> module_type\n  val enter_module_expr : module_expr -> module_expr\n  val enter_with_constraint : with_constraint -> with_constraint\n  val enter_class_signature : class_signature -> class_signature\n\n  val enter_class_description : class_description -> class_description\n  val enter_class_type_declaration :\n    class_type_declaration -> class_type_declaration\n  val enter_class_type : class_type -> class_type\n  val enter_class_type_field : class_type_field -> class_type_field\n  val enter_core_type : core_type -> core_type\n  val enter_structure_item : structure_item -> structure_item\n\n  val leave_structure : structure -> structure\n  val leave_value_description : value_description -> value_description\n  val leave_type_declaration : type_declaration -> type_declaration\n  val leave_type_extension : type_extension -> type_extension\n  val leave_extension_constructor :\n    extension_constructor -> extension_constructor\n  val leave_pattern : pattern -> pattern\n  val leave_expression : expression -> expression\n  val leave_package_type : package_type -> package_type\n  val leave_signature : signature -> signature\n  val leave_signature_item : signature_item -> signature_item\n  val leave_module_type_declaration :\n    module_type_declaration -> module_type_declaration\n  val leave_module_type : module_type -> module_type\n  val leave_module_expr : module_expr -> module_expr\n  val leave_with_constraint : with_constraint -> with_constraint\n  val leave_class_signature : class_signature -> class_signature\n\n  val leave_class_description : class_description -> class_description\n  val leave_class_type_declaration :\n    class_type_declaration -> class_type_declaration\n  val leave_class_type : class_type -> class_type\n  val leave_class_type_field : class_type_field -> class_type_field\n  val leave_core_type : core_type -> core_type\n  val leave_structure_item : structure_item -> structure_item\nend\n\nmodule MakeMap (Map : MapArgument) = struct\n  open Misc\n\n  let rec map_structure str =\n    let str = Map.enter_structure str in\n    let str_items = List.map map_structure_item str.str_items in\n    Map.leave_structure {str with str_items}\n\n  and map_binding vb =\n    {\n      vb_pat = map_pattern vb.vb_pat;\n      vb_expr = map_expression vb.vb_expr;\n      vb_attributes = vb.vb_attributes;\n      vb_loc = vb.vb_loc;\n    }\n\n  and map_bindings list = List.map map_binding list\n\n  and map_case {c_lhs; c_guard; c_rhs} =\n    {\n      c_lhs = map_pattern c_lhs;\n      c_guard = may_map map_expression c_guard;\n      c_rhs = map_expression c_rhs;\n    }\n\n  and map_cases list = List.map map_case list\n\n  and map_structure_item item =\n    let item = Map.enter_structure_item item in\n    let str_desc =\n      match item.str_desc with\n      | Tstr_eval (exp, attrs) -> Tstr_eval (map_expression exp, attrs)\n      | Tstr_value (rec_flag, list) -> Tstr_value (rec_flag, map_bindings list)\n      | Tstr_primitive vd -> Tstr_primitive (map_value_description vd)\n      | Tstr_type (rf, list) ->\n        Tstr_type (rf, List.map map_type_declaration list)\n      | Tstr_typext tyext -> Tstr_typext (map_type_extension tyext)\n      | Tstr_exception ext -> Tstr_exception (map_extension_constructor ext)\n      | Tstr_module x -> Tstr_module (map_module_binding x)\n      | Tstr_recmodule list ->\n        let list = List.map map_module_binding list in\n        Tstr_recmodule list\n      | Tstr_modtype mtd -> Tstr_modtype (map_module_type_declaration mtd)\n      | Tstr_open od -> Tstr_open od\n      | Tstr_class () -> assert false\n      | Tstr_class_type list ->\n        let list =\n          List.map\n            (fun (id, name, ct) -> (id, name, map_class_type_declaration ct))\n            list\n        in\n        Tstr_class_type list\n      | Tstr_include incl ->\n        Tstr_include {incl with incl_mod = map_module_expr incl.incl_mod}\n      | Tstr_attribute x -> Tstr_attribute x\n    in\n    Map.leave_structure_item {item with str_desc}\n\n  and map_module_binding x = {x with mb_expr = map_module_expr x.mb_expr}\n\n  and map_value_description v =\n    let v = Map.enter_value_description v in\n    let val_desc = map_core_type v.val_desc in\n    Map.leave_value_description {v with val_desc}\n\n  and map_type_declaration decl =\n    let decl = Map.enter_type_declaration decl in\n    let typ_params = List.map map_type_parameter decl.typ_params in\n    let typ_cstrs =\n      List.map\n        (fun (ct1, ct2, loc) -> (map_core_type ct1, map_core_type ct2, loc))\n        decl.typ_cstrs\n    in\n    let typ_kind =\n      match decl.typ_kind with\n      | Ttype_abstract -> Ttype_abstract\n      | Ttype_variant list ->\n        let list = List.map map_constructor_declaration list in\n        Ttype_variant list\n      | Ttype_record list ->\n        let list =\n          List.map (fun ld -> {ld with ld_type = map_core_type ld.ld_type}) list\n        in\n        Ttype_record list\n      | Ttype_open -> Ttype_open\n    in\n    let typ_manifest = may_map map_core_type decl.typ_manifest in\n    Map.leave_type_declaration\n      {decl with typ_params; typ_cstrs; typ_kind; typ_manifest}\n\n  and map_type_parameter (ct, v) = (map_core_type ct, v)\n\n  and map_constructor_arguments = function\n    | Cstr_tuple l -> Cstr_tuple (List.map map_core_type l)\n    | Cstr_record l ->\n      Cstr_record\n        (List.map (fun ld -> {ld with ld_type = map_core_type ld.ld_type}) l)\n\n  and map_constructor_declaration cd =\n    let cd_args = map_constructor_arguments cd.cd_args in\n    {cd with cd_args; cd_res = may_map map_core_type cd.cd_res}\n\n  and map_type_extension tyext =\n    let tyext = Map.enter_type_extension tyext in\n    let tyext_params = List.map map_type_parameter tyext.tyext_params in\n    let tyext_constructors =\n      List.map map_extension_constructor tyext.tyext_constructors\n    in\n    Map.leave_type_extension {tyext with tyext_params; tyext_constructors}\n\n  and map_extension_constructor ext =\n    let ext = Map.enter_extension_constructor ext in\n    let ext_kind =\n      match ext.ext_kind with\n      | Text_decl (args, ret) ->\n        let args = map_constructor_arguments args in\n        let ret = may_map map_core_type ret in\n        Text_decl (args, ret)\n      | Text_rebind (p, lid) -> Text_rebind (p, lid)\n    in\n    Map.leave_extension_constructor {ext with ext_kind}\n\n  and map_pattern pat =\n    let pat = Map.enter_pattern pat in\n    let pat_desc =\n      match pat.pat_desc with\n      | Tpat_alias (pat1, p, text) ->\n        let pat1 = map_pattern pat1 in\n        Tpat_alias (pat1, p, text)\n      | Tpat_tuple list -> Tpat_tuple (List.map map_pattern list)\n      | Tpat_construct (lid, cstr_decl, args) ->\n        Tpat_construct (lid, cstr_decl, List.map map_pattern args)\n      | Tpat_variant (label, pato, rowo) ->\n        let pato =\n          match pato with\n          | None -> pato\n          | Some pat -> Some (map_pattern pat)\n        in\n        Tpat_variant (label, pato, rowo)\n      | Tpat_record (list, closed) ->\n        Tpat_record\n          ( List.map\n              (fun (lid, lab_desc, pat) -> (lid, lab_desc, map_pattern pat))\n              list,\n            closed )\n      | Tpat_array list -> Tpat_array (List.map map_pattern list)\n      | Tpat_or (p1, p2, rowo) -> Tpat_or (map_pattern p1, map_pattern p2, rowo)\n      | Tpat_lazy p -> Tpat_lazy (map_pattern p)\n      | Tpat_constant _ | Tpat_any | Tpat_var _ -> pat.pat_desc\n    in\n\n    let pat_extra = List.map map_pat_extra pat.pat_extra in\n    Map.leave_pattern {pat with pat_desc; pat_extra}\n\n  and map_pat_extra pat_extra =\n    match pat_extra with\n    | Tpat_constraint ct, loc, attrs ->\n      (Tpat_constraint (map_core_type ct), loc, attrs)\n    | (Tpat_type _ | Tpat_unpack | Tpat_open _), _, _ -> pat_extra\n\n  and map_expression exp =\n    let exp = Map.enter_expression exp in\n    let exp_desc =\n      match exp.exp_desc with\n      | Texp_ident (_, _, _) | Texp_constant _ -> exp.exp_desc\n      | Texp_let (rec_flag, list, exp) ->\n        Texp_let (rec_flag, map_bindings list, map_expression exp)\n      | Texp_function {arg_label; param; cases; partial} ->\n        Texp_function {arg_label; param; cases = map_cases cases; partial}\n      | Texp_apply (exp, list) ->\n        Texp_apply\n          ( map_expression exp,\n            List.map\n              (fun (label, expo) ->\n                let expo =\n                  match expo with\n                  | None -> expo\n                  | Some exp -> Some (map_expression exp)\n                in\n                (label, expo))\n              list )\n      | Texp_match (exp, list1, list2, partial) ->\n        Texp_match\n          (map_expression exp, map_cases list1, map_cases list2, partial)\n      | Texp_try (exp, list) -> Texp_try (map_expression exp, map_cases list)\n      | Texp_tuple list -> Texp_tuple (List.map map_expression list)\n      | Texp_construct (lid, cstr_desc, args) ->\n        Texp_construct (lid, cstr_desc, List.map map_expression args)\n      | Texp_variant (label, expo) ->\n        let expo =\n          match expo with\n          | None -> expo\n          | Some exp -> Some (map_expression exp)\n        in\n        Texp_variant (label, expo)\n      | Texp_record {fields; representation; extended_expression} ->\n        let fields =\n          Array.map\n            (function\n              | label, Kept t -> (label, Kept t)\n              | label, Overridden (lid, exp) ->\n                (label, Overridden (lid, map_expression exp)))\n            fields\n        in\n        let extended_expression =\n          match extended_expression with\n          | None -> extended_expression\n          | Some exp -> Some (map_expression exp)\n        in\n        Texp_record {fields; representation; extended_expression}\n      | Texp_field (exp, lid, label) ->\n        Texp_field (map_expression exp, lid, label)\n      | Texp_setfield (exp1, lid, label, exp2) ->\n        Texp_setfield (map_expression exp1, lid, label, map_expression exp2)\n      | Texp_array list -> Texp_array (List.map map_expression list)\n      | Texp_ifthenelse (exp1, exp2, expo) ->\n        Texp_ifthenelse\n          ( map_expression exp1,\n            map_expression exp2,\n            match expo with\n            | None -> expo\n            | Some exp -> Some (map_expression exp) )\n      | Texp_sequence (exp1, exp2) ->\n        Texp_sequence (map_expression exp1, map_expression exp2)\n      | Texp_while (exp1, exp2) ->\n        Texp_while (map_expression exp1, map_expression exp2)\n      | Texp_for (id, name, exp1, exp2, dir, exp3) ->\n        Texp_for\n          ( id,\n            name,\n            map_expression exp1,\n            map_expression exp2,\n            dir,\n            map_expression exp3 )\n      | Texp_send (exp, meth, expo) ->\n        Texp_send (map_expression exp, meth, may_map map_expression expo)\n      | Texp_new _ | Texp_instvar _ | Texp_setinstvar _ | Texp_override _ ->\n        assert false\n      | Texp_letmodule (id, name, mexpr, exp) ->\n        Texp_letmodule (id, name, map_module_expr mexpr, map_expression exp)\n      | Texp_letexception (cd, exp) ->\n        Texp_letexception (map_extension_constructor cd, map_expression exp)\n      | Texp_assert exp -> Texp_assert (map_expression exp)\n      | Texp_lazy exp -> Texp_lazy (map_expression exp)\n      | Texp_object () -> Texp_object ()\n      | Texp_pack mexpr -> Texp_pack (map_module_expr mexpr)\n      | Texp_unreachable -> Texp_unreachable\n      | Texp_extension_constructor _ as e -> e\n    in\n    let exp_extra = List.map map_exp_extra exp.exp_extra in\n    Map.leave_expression {exp with exp_desc; exp_extra}\n\n  and map_exp_extra ((desc, loc, attrs) as exp_extra) =\n    match desc with\n    | Texp_constraint ct -> (Texp_constraint (map_core_type ct), loc, attrs)\n    | Texp_coerce (None, ct) ->\n      (Texp_coerce (None, map_core_type ct), loc, attrs)\n    | Texp_coerce (Some ct1, ct2) ->\n      (Texp_coerce (Some (map_core_type ct1), map_core_type ct2), loc, attrs)\n    | Texp_poly (Some ct) -> (Texp_poly (Some (map_core_type ct)), loc, attrs)\n    | Texp_newtype _ | Texp_open _ | Texp_poly None -> exp_extra\n\n  and map_package_type pack =\n    let pack = Map.enter_package_type pack in\n    let pack_fields =\n      List.map (fun (s, ct) -> (s, map_core_type ct)) pack.pack_fields\n    in\n    Map.leave_package_type {pack with pack_fields}\n\n  and map_signature sg =\n    let sg = Map.enter_signature sg in\n    let sig_items = List.map map_signature_item sg.sig_items in\n    Map.leave_signature {sg with sig_items}\n\n  and map_signature_item item =\n    let item = Map.enter_signature_item item in\n    let sig_desc =\n      match item.sig_desc with\n      | Tsig_value vd -> Tsig_value (map_value_description vd)\n      | Tsig_type (rf, list) ->\n        Tsig_type (rf, List.map map_type_declaration list)\n      | Tsig_typext tyext -> Tsig_typext (map_type_extension tyext)\n      | Tsig_exception ext -> Tsig_exception (map_extension_constructor ext)\n      | Tsig_module md ->\n        Tsig_module {md with md_type = map_module_type md.md_type}\n      | Tsig_recmodule list ->\n        Tsig_recmodule\n          (List.map\n             (fun md -> {md with md_type = map_module_type md.md_type})\n             list)\n      | Tsig_modtype mtd -> Tsig_modtype (map_module_type_declaration mtd)\n      | Tsig_open _ -> item.sig_desc\n      | Tsig_include incl ->\n        Tsig_include {incl with incl_mod = map_module_type incl.incl_mod}\n      | Tsig_class () -> Tsig_class ()\n      | Tsig_class_type list ->\n        Tsig_class_type (List.map map_class_type_declaration list)\n      | Tsig_attribute _ as x -> x\n    in\n    Map.leave_signature_item {item with sig_desc}\n\n  and map_module_type_declaration mtd =\n    let mtd = Map.enter_module_type_declaration mtd in\n    let mtd = {mtd with mtd_type = may_map map_module_type mtd.mtd_type} in\n    Map.leave_module_type_declaration mtd\n\n  and map_class_type_declaration cd =\n    let cd = Map.enter_class_type_declaration cd in\n    let ci_params = List.map map_type_parameter cd.ci_params in\n    let ci_expr = map_class_type cd.ci_expr in\n    Map.leave_class_type_declaration {cd with ci_params; ci_expr}\n\n  and map_module_type mty =\n    let mty = Map.enter_module_type mty in\n    let mty_desc =\n      match mty.mty_desc with\n      | Tmty_ident _ -> mty.mty_desc\n      | Tmty_alias _ -> mty.mty_desc\n      | Tmty_signature sg -> Tmty_signature (map_signature sg)\n      | Tmty_functor (id, name, mtype1, mtype2) ->\n        Tmty_functor\n          (id, name, Misc.may_map map_module_type mtype1, map_module_type mtype2)\n      | Tmty_with (mtype, list) ->\n        Tmty_with\n          ( map_module_type mtype,\n            List.map\n              (fun (path, lid, withc) -> (path, lid, map_with_constraint withc))\n              list )\n      | Tmty_typeof mexpr -> Tmty_typeof (map_module_expr mexpr)\n    in\n    Map.leave_module_type {mty with mty_desc}\n\n  and map_with_constraint cstr =\n    let cstr = Map.enter_with_constraint cstr in\n    let cstr =\n      match cstr with\n      | Twith_type decl -> Twith_type (map_type_declaration decl)\n      | Twith_typesubst decl -> Twith_typesubst (map_type_declaration decl)\n      | Twith_module _ -> cstr\n      | Twith_modsubst _ -> cstr\n    in\n    Map.leave_with_constraint cstr\n\n  and map_module_expr mexpr =\n    let mexpr = Map.enter_module_expr mexpr in\n    let mod_desc =\n      match mexpr.mod_desc with\n      | Tmod_ident _ -> mexpr.mod_desc\n      | Tmod_structure st -> Tmod_structure (map_structure st)\n      | Tmod_functor (id, name, mtype, mexpr) ->\n        Tmod_functor\n          (id, name, Misc.may_map map_module_type mtype, map_module_expr mexpr)\n      | Tmod_apply (mexp1, mexp2, coercion) ->\n        Tmod_apply (map_module_expr mexp1, map_module_expr mexp2, coercion)\n      | Tmod_constraint (mexpr, mod_type, Tmodtype_implicit, coercion) ->\n        Tmod_constraint\n          (map_module_expr mexpr, mod_type, Tmodtype_implicit, coercion)\n      | Tmod_constraint (mexpr, mod_type, Tmodtype_explicit mtype, coercion) ->\n        Tmod_constraint\n          ( map_module_expr mexpr,\n            mod_type,\n            Tmodtype_explicit (map_module_type mtype),\n            coercion )\n      | Tmod_unpack (exp, mod_type) -> Tmod_unpack (map_expression exp, mod_type)\n    in\n    Map.leave_module_expr {mexpr with mod_desc}\n\n  and map_class_type ct =\n    let ct = Map.enter_class_type ct in\n    let cltyp_desc =\n      match ct.cltyp_desc with\n      | Tcty_signature csg -> Tcty_signature (map_class_signature csg)\n      | Tcty_constr (path, lid, list) ->\n        Tcty_constr (path, lid, List.map map_core_type list)\n      | Tcty_arrow (label, ct, cl) ->\n        Tcty_arrow (label, map_core_type ct, map_class_type cl)\n      | Tcty_open (ovf, p, lid, env, e) ->\n        Tcty_open (ovf, p, lid, env, map_class_type e)\n    in\n    Map.leave_class_type {ct with cltyp_desc}\n\n  and map_class_signature cs =\n    let cs = Map.enter_class_signature cs in\n    let csig_self = map_core_type cs.csig_self in\n    let csig_fields = List.map map_class_type_field cs.csig_fields in\n    Map.leave_class_signature {cs with csig_self; csig_fields}\n\n  and map_class_type_field ctf =\n    let ctf = Map.enter_class_type_field ctf in\n    let ctf_desc =\n      match ctf.ctf_desc with\n      | Tctf_inherit ct -> Tctf_inherit (map_class_type ct)\n      | Tctf_val (s, mut, virt, ct) -> Tctf_val (s, mut, virt, map_core_type ct)\n      | Tctf_method (s, priv, virt, ct) ->\n        Tctf_method (s, priv, virt, map_core_type ct)\n      | Tctf_constraint (ct1, ct2) ->\n        Tctf_constraint (map_core_type ct1, map_core_type ct2)\n      | Tctf_attribute _ as x -> x\n    in\n    Map.leave_class_type_field {ctf with ctf_desc}\n\n  and map_core_type ct =\n    let ct = Map.enter_core_type ct in\n    let ctyp_desc =\n      match ct.ctyp_desc with\n      | Ttyp_any | Ttyp_var _ -> ct.ctyp_desc\n      | Ttyp_arrow (label, ct1, ct2) ->\n        Ttyp_arrow (label, map_core_type ct1, map_core_type ct2)\n      | Ttyp_tuple list -> Ttyp_tuple (List.map map_core_type list)\n      | Ttyp_constr (path, lid, list) ->\n        Ttyp_constr (path, lid, List.map map_core_type list)\n      | Ttyp_object (list, o) -> Ttyp_object (List.map map_object_field list, o)\n      | Ttyp_class (path, lid, list) ->\n        Ttyp_class (path, lid, List.map map_core_type list)\n      | Ttyp_alias (ct, s) -> Ttyp_alias (map_core_type ct, s)\n      | Ttyp_variant (list, bool, labels) ->\n        Ttyp_variant (List.map map_row_field list, bool, labels)\n      | Ttyp_poly (list, ct) -> Ttyp_poly (list, map_core_type ct)\n      | Ttyp_package pack -> Ttyp_package (map_package_type pack)\n    in\n    Map.leave_core_type {ct with ctyp_desc}\n\n  and map_row_field rf =\n    match rf with\n    | Ttag (label, attrs, bool, list) ->\n      Ttag (label, attrs, bool, List.map map_core_type list)\n    | Tinherit ct -> Tinherit (map_core_type ct)\n\n  and map_object_field ofield =\n    match ofield with\n    | OTtag (label, attrs, ct) -> OTtag (label, attrs, map_core_type ct)\n    | OTinherit ct -> OTinherit (map_core_type ct)\nend\n\nmodule DefaultMapArgument = struct\n  let enter_structure t = t\n  let enter_value_description t = t\n  let enter_type_declaration t = t\n  let enter_type_extension t = t\n  let enter_extension_constructor t = t\n  let enter_pattern t = t\n  let enter_expression t = t\n  let enter_package_type t = t\n  let enter_signature t = t\n  let enter_signature_item t = t\n  let enter_module_type_declaration t = t\n  let enter_module_type t = t\n  let enter_module_expr t = t\n  let enter_with_constraint t = t\n  let enter_class_signature t = t\n\n  let enter_class_description t = t\n  let enter_class_type_declaration t = t\n  let enter_class_type t = t\n  let enter_class_type_field t = t\n  let enter_core_type t = t\n  let enter_structure_item t = t\n\n  let leave_structure t = t\n  let leave_value_description t = t\n  let leave_type_declaration t = t\n  let leave_type_extension t = t\n  let leave_extension_constructor t = t\n  let leave_pattern t = t\n  let leave_expression t = t\n  let leave_package_type t = t\n  let leave_signature t = t\n  let leave_signature_item t = t\n  let leave_module_type_declaration t = t\n  let leave_module_type t = t\n  let leave_module_expr t = t\n  let leave_with_constraint t = t\n  let leave_class_signature t = t\n\n  let leave_class_description t = t\n  let leave_class_type_declaration t = t\n  let leave_class_type t = t\n  let leave_class_type_field t = t\n  let leave_core_type t = t\n  let leave_structure_item t = t\nend\n"
  },
  {
    "path": "analysis/vendor/ml/typedtreeMap.mli",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*                   Fabrice Le Fessant, INRIA Saclay                     *)\n(*                                                                        *)\n(*   Copyright 2012 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\nopen Typedtree\n\nmodule type MapArgument = sig\n  val enter_structure : structure -> structure\n  val enter_value_description : value_description -> value_description\n  val enter_type_declaration : type_declaration -> type_declaration\n  val enter_type_extension : type_extension -> type_extension\n  val enter_extension_constructor :\n    extension_constructor -> extension_constructor\n  val enter_pattern : pattern -> pattern\n  val enter_expression : expression -> expression\n  val enter_package_type : package_type -> package_type\n  val enter_signature : signature -> signature\n  val enter_signature_item : signature_item -> signature_item\n  val enter_module_type_declaration :\n    module_type_declaration -> module_type_declaration\n  val enter_module_type : module_type -> module_type\n  val enter_module_expr : module_expr -> module_expr\n  val enter_with_constraint : with_constraint -> with_constraint\n  val enter_class_signature : class_signature -> class_signature\n  val enter_class_description : class_description -> class_description\n  val enter_class_type_declaration :\n    class_type_declaration -> class_type_declaration\n  val enter_class_type : class_type -> class_type\n  val enter_class_type_field : class_type_field -> class_type_field\n  val enter_core_type : core_type -> core_type\n  val enter_structure_item : structure_item -> structure_item\n\n  val leave_structure : structure -> structure\n  val leave_value_description : value_description -> value_description\n  val leave_type_declaration : type_declaration -> type_declaration\n  val leave_type_extension : type_extension -> type_extension\n  val leave_extension_constructor :\n    extension_constructor -> extension_constructor\n  val leave_pattern : pattern -> pattern\n  val leave_expression : expression -> expression\n  val leave_package_type : package_type -> package_type\n  val leave_signature : signature -> signature\n  val leave_signature_item : signature_item -> signature_item\n  val leave_module_type_declaration :\n    module_type_declaration -> module_type_declaration\n  val leave_module_type : module_type -> module_type\n  val leave_module_expr : module_expr -> module_expr\n  val leave_with_constraint : with_constraint -> with_constraint\n  val leave_class_signature : class_signature -> class_signature\n  val leave_class_description : class_description -> class_description\n  val leave_class_type_declaration :\n    class_type_declaration -> class_type_declaration\n  val leave_class_type : class_type -> class_type\n  val leave_class_type_field : class_type_field -> class_type_field\n  val leave_core_type : core_type -> core_type\n  val leave_structure_item : structure_item -> structure_item\nend\n\nmodule MakeMap : functor (Iter : MapArgument) -> sig\n  val map_structure : structure -> structure\n  val map_pattern : pattern -> pattern\n  val map_structure_item : structure_item -> structure_item\n  val map_expression : expression -> expression\n\n  val map_signature : signature -> signature\n  val map_signature_item : signature_item -> signature_item\n  val map_module_type : module_type -> module_type\nend\n\nmodule DefaultMapArgument : MapArgument\n"
  },
  {
    "path": "analysis/vendor/ml/typemod.ml",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\nopen Misc\nopen Longident\nopen Path\nopen Asttypes\nopen Parsetree\nopen Types\nopen Format\n\ntype error =\n  | Cannot_apply of module_type\n  | Not_included of Includemod.error list\n  | Cannot_eliminate_dependency of module_type\n  | Signature_expected\n  | Structure_expected of module_type\n  | With_no_component of Longident.t\n  | With_mismatch of Longident.t * Includemod.error list\n  | With_makes_applicative_functor_ill_typed of\n      Longident.t * Path.t * Includemod.error list\n  | With_changes_module_alias of Longident.t * Ident.t * Path.t\n  | With_cannot_remove_constrained_type\n  | Repeated_name of string * string * Warnings.loc\n  | Non_generalizable of type_expr\n  | Non_generalizable_module of module_type\n  | Interface_not_compiled of string\n  | Not_allowed_in_functor_body\n  | Not_a_packed_module of type_expr\n  | Incomplete_packed_module of type_expr\n  | Scoping_pack of Longident.t * type_expr\n  | Recursive_module_require_explicit_type\n  | Apply_generative\n  | Cannot_scrape_alias of Path.t\n\nexception Error of Location.t * Env.t * error\nexception Error_forward of Location.error\n\nlet rescript_hide_attributes (x : Typedtree.attributes) =\n  match x with\n  | [] -> false\n  | ({txt = \"internal.local\"; _}, _) :: _ -> true\n  | _ :: rest -> Ext_list.exists rest (fun (x, _) -> x.txt = \"internal.local\")\n\nlet rescript_hide (x : Typedtree.structure_item_desc) =\n  match x with\n  | Tstr_module {mb_attributes} -> rescript_hide_attributes mb_attributes\n  | _ -> false\n\nopen Typedtree\n\nlet fst3 (x, _, _) = x\n\nlet rec path_concat head p =\n  match p with\n  | Pident tail -> Pdot (Pident head, Ident.name tail, 0)\n  | Pdot (pre, s, pos) -> Pdot (path_concat head pre, s, pos)\n  | Papply _ -> assert false\n\n(* Extract a signature from a module type *)\n\nlet extract_sig env loc mty =\n  match Env.scrape_alias env mty with\n  | Mty_signature sg -> sg\n  | Mty_alias (_, path) -> raise (Error (loc, env, Cannot_scrape_alias path))\n  | _ -> raise (Error (loc, env, Signature_expected))\n\nlet extract_sig_open env loc mty =\n  match Env.scrape_alias env mty with\n  | Mty_signature sg -> sg\n  | Mty_alias (_, path) -> raise (Error (loc, env, Cannot_scrape_alias path))\n  | mty -> raise (Error (loc, env, Structure_expected mty))\n\n(* Compute the environment after opening a module *)\n\nlet type_open_ ?used_slot ?toplevel ovf env loc lid =\n  let path = Typetexp.lookup_module ~load:true env lid.loc lid.txt in\n  match Env.open_signature ~loc ?used_slot ?toplevel ovf path env with\n  | Some env -> (path, env)\n  | None ->\n    let md = Env.find_module path env in\n    ignore (extract_sig_open env lid.loc md.md_type);\n    assert false\n\nlet type_open ?toplevel env sod =\n  let path, newenv =\n    Builtin_attributes.warning_scope sod.popen_attributes (fun () ->\n        type_open_ ?toplevel sod.popen_override env sod.popen_loc sod.popen_lid)\n  in\n  let od =\n    {\n      open_override = sod.popen_override;\n      open_path = path;\n      open_txt = sod.popen_lid;\n      open_attributes = sod.popen_attributes;\n      open_loc = sod.popen_loc;\n    }\n  in\n  (path, newenv, od)\n\n(* Record a module type *)\nlet rm node =\n  Stypes.record (Stypes.Ti_mod node);\n  node\n\n(* Forward declaration, to be filled in by type_module_type_of *)\nlet type_module_type_of_fwd :\n    (Env.t ->\n    Parsetree.module_expr ->\n    Typedtree.module_expr * Types.module_type)\n    ref =\n  ref (fun _env _m -> assert false)\n\n(* Merge one \"with\" constraint in a signature *)\n\nlet rec add_rec_types env = function\n  | Sig_type (id, decl, Trec_next) :: rem ->\n    add_rec_types (Env.add_type ~check:true id decl env) rem\n  | _ -> env\n\nlet check_type_decl env loc id row_id newdecl decl rs rem =\n  let env = Env.add_type ~check:true id newdecl env in\n  let env =\n    match row_id with\n    | None -> env\n    | Some id -> Env.add_type ~check:false id newdecl env\n  in\n  let env = if rs = Trec_not then env else add_rec_types env rem in\n  Includemod.type_declarations ~loc env id newdecl decl;\n  Typedecl.check_coherence env loc id newdecl\n\nlet update_rec_next rs rem =\n  match rs with\n  | Trec_next -> rem\n  | Trec_first | Trec_not -> (\n    match rem with\n    | Sig_type (id, decl, Trec_next) :: rem -> Sig_type (id, decl, rs) :: rem\n    | Sig_module (id, mty, Trec_next) :: rem -> Sig_module (id, mty, rs) :: rem\n    | _ -> rem)\n\nlet make p n i =\n  let open Variance in\n  set May_pos p (set May_neg n (set May_weak n (set Inj i null)))\n\nlet rec iter_path_apply p ~f =\n  match p with\n  | Pident _ -> ()\n  | Pdot (p, _, _) -> iter_path_apply p ~f\n  | Papply (p1, p2) ->\n    iter_path_apply p1 ~f;\n    iter_path_apply p2 ~f;\n    f p1 p2 (* after recursing, so we know both paths are well typed *)\n\nlet path_is_strict_prefix =\n  let rec list_is_strict_prefix l ~prefix =\n    match (l, prefix) with\n    | [], [] -> false\n    | _ :: _, [] -> true\n    | [], _ :: _ -> false\n    | s1 :: t1, s2 :: t2 ->\n      String.equal s1 s2 && list_is_strict_prefix t1 ~prefix:t2\n  in\n  fun path ~prefix ->\n    match (Path.flatten path, Path.flatten prefix) with\n    | `Contains_apply, _ | _, `Contains_apply -> false\n    | `Ok (ident1, l1), `Ok (ident2, l2) ->\n      Ident.same ident1 ident2 && list_is_strict_prefix l1 ~prefix:l2\n\nlet iterator_with_env env =\n  let env = ref env in\n  let super = Btype.type_iterators in\n  ( env,\n    {\n      super with\n      Btype.it_signature =\n        (fun self sg ->\n          (* add all items to the env before recursing down, to handle recursive\n             definitions *)\n          let env_before = !env in\n          List.iter (fun i -> env := Env.add_item i !env) sg;\n          super.Btype.it_signature self sg;\n          env := env_before);\n      Btype.it_module_type =\n        (fun self -> function\n          | Mty_functor (param, mty_arg, mty_body) ->\n            may (self.Btype.it_module_type self) mty_arg;\n            let env_before = !env in\n            env :=\n              Env.add_module ~arg:true param (Btype.default_mty mty_arg) !env;\n            self.Btype.it_module_type self mty_body;\n            env := env_before\n          | mty -> super.Btype.it_module_type self mty);\n    } )\n\nlet retype_applicative_functor_type ~loc env funct arg =\n  let mty_functor = (Env.find_module funct env).md_type in\n  let mty_arg = (Env.find_module arg env).md_type in\n  let mty_param =\n    match Env.scrape_alias env mty_functor with\n    | Mty_functor (_, Some mty_param, _) -> mty_param\n    | _ -> assert false (* could trigger due to MPR#7611 *)\n  in\n  let aliasable = not (Env.is_functor_arg arg env) in\n  ignore\n    (Includemod.modtypes ~loc env\n       (Mtype.strengthen ~aliasable env mty_arg arg)\n       mty_param)\n\n(* When doing a deep destructive substitution with type M.N.t := .., we change M\n   and M.N and so we have to check that uses of the modules other than just\n   extracting components from them still make sense. There are only two such\n   kinds of uses:\n   - applicative functor types: F(M).t might not be well typed anymore\n   - aliases: module A = M still makes sense but it doesn't mean the same thing\n     anymore, so it's forbidden until it's clear what we should do with it.\n   This function would be called with M.N.t and N.t to check for these uses. *)\nlet check_usage_of_path_of_substituted_item paths env signature ~loc ~lid =\n  let iterator =\n    let env, super = iterator_with_env env in\n    {\n      super with\n      Btype.it_signature_item =\n        (fun self -> function\n          | Sig_module (id, {md_type = Mty_alias (_, aliased_path); _}, _)\n            when List.exists\n                   (fun path -> path_is_strict_prefix path ~prefix:aliased_path)\n                   paths ->\n            let e = With_changes_module_alias (lid.txt, id, aliased_path) in\n            raise (Error (loc, !env, e))\n          | sig_item -> super.Btype.it_signature_item self sig_item);\n      Btype.it_path =\n        (fun referenced_path ->\n          iter_path_apply referenced_path ~f:(fun funct arg ->\n              if\n                List.exists\n                  (fun path -> path_is_strict_prefix path ~prefix:arg)\n                  paths\n              then\n                let env = !env in\n                try retype_applicative_functor_type ~loc env funct arg\n                with Includemod.Error explanation ->\n                  raise\n                    (Error\n                       ( loc,\n                         env,\n                         With_makes_applicative_functor_ill_typed\n                           (lid.txt, referenced_path, explanation) ))));\n    }\n  in\n  iterator.Btype.it_signature iterator signature;\n  Btype.unmark_iterators.Btype.it_signature Btype.unmark_iterators signature\n\nlet type_decl_is_alias sdecl =\n  (* assuming no explicit constraint *)\n  match sdecl.ptype_manifest with\n  | Some {ptyp_desc = Ptyp_constr (lid, stl)}\n    when List.length stl = List.length sdecl.ptype_params -> (\n    match\n      List.iter2\n        (fun x (y, _) ->\n          match (x, y) with\n          | {ptyp_desc = Ptyp_var sx}, {ptyp_desc = Ptyp_var sy} when sx = sy ->\n            ()\n          | _, _ -> raise Exit)\n        stl sdecl.ptype_params\n    with\n    | exception Exit -> None\n    | () -> Some lid)\n  | _ -> None\n\nlet params_are_constrained =\n  let rec loop = function\n    | [] -> false\n    | hd :: tl -> (\n      match (Btype.repr hd).desc with\n      | Tvar _ -> List.memq hd tl || loop tl\n      | _ -> true)\n  in\n  loop\n\nlet merge_constraint initial_env loc sg constr =\n  let lid =\n    match constr with\n    | Pwith_type (lid, _)\n    | Pwith_module (lid, _)\n    | Pwith_typesubst (lid, _)\n    | Pwith_modsubst (lid, _) ->\n      lid\n  in\n  let destructive_substitution =\n    match constr with\n    | Pwith_type _ | Pwith_module _ -> false\n    | Pwith_typesubst _ | Pwith_modsubst _ -> true\n  in\n  let real_ids = ref [] in\n  let rec merge env sg namelist row_id =\n    match (sg, namelist, constr) with\n    | [], _, _ -> raise (Error (loc, env, With_no_component lid.txt))\n    | ( Sig_type (id, decl, rs) :: rem,\n        [s],\n        Pwith_type (_, ({ptype_kind = Ptype_abstract} as sdecl)) )\n      when Ident.name id = s && Typedecl.is_fixed_type sdecl ->\n      let decl_row =\n        {\n          type_params = List.map (fun _ -> Btype.newgenvar ()) sdecl.ptype_params;\n          type_arity = List.length sdecl.ptype_params;\n          type_kind = Type_abstract;\n          type_private = Private;\n          type_manifest = None;\n          type_variance =\n            List.map\n              (fun (_, v) ->\n                let c, n =\n                  match v with\n                  | Covariant -> (true, false)\n                  | Contravariant -> (false, true)\n                  | Invariant -> (false, false)\n                in\n                make (not n) (not c) false)\n              sdecl.ptype_params;\n          type_loc = sdecl.ptype_loc;\n          type_newtype_level = None;\n          type_attributes = [];\n          type_immediate = false;\n          type_unboxed = unboxed_false_default_false;\n        }\n      and id_row = Ident.create (s ^ \"#row\") in\n      let initial_env = Env.add_type ~check:false id_row decl_row initial_env in\n      let tdecl =\n        Typedecl.transl_with_constraint initial_env id (Some (Pident id_row))\n          decl sdecl\n      in\n      let newdecl = tdecl.typ_type in\n      check_type_decl env sdecl.ptype_loc id row_id newdecl decl rs rem;\n      let decl_row = {decl_row with type_params = newdecl.type_params} in\n      let rs' = if rs = Trec_first then Trec_not else rs in\n      ( (Pident id, lid, Twith_type tdecl),\n        Sig_type (id_row, decl_row, rs') :: Sig_type (id, newdecl, rs) :: rem )\n    | Sig_type (id, decl, rs) :: rem, [s], Pwith_type (_, sdecl)\n      when Ident.name id = s ->\n      let tdecl =\n        Typedecl.transl_with_constraint initial_env id None decl sdecl\n      in\n      let newdecl = tdecl.typ_type in\n      check_type_decl env sdecl.ptype_loc id row_id newdecl decl rs rem;\n      ((Pident id, lid, Twith_type tdecl), Sig_type (id, newdecl, rs) :: rem)\n    | Sig_type (id, _, _) :: rem, [s], (Pwith_type _ | Pwith_typesubst _)\n      when Ident.name id = s ^ \"#row\" ->\n      merge env rem namelist (Some id)\n    | Sig_type (id, decl, rs) :: rem, [s], Pwith_typesubst (_, sdecl)\n      when Ident.name id = s ->\n      (* Check as for a normal with constraint, but discard definition *)\n      let tdecl =\n        Typedecl.transl_with_constraint initial_env id None decl sdecl\n      in\n      let newdecl = tdecl.typ_type in\n      check_type_decl env sdecl.ptype_loc id row_id newdecl decl rs rem;\n      real_ids := [Pident id];\n      ((Pident id, lid, Twith_typesubst tdecl), update_rec_next rs rem)\n    | Sig_module (id, md, rs) :: rem, [s], Pwith_module (_, lid')\n      when Ident.name id = s ->\n      let path, md' = Typetexp.find_module initial_env loc lid'.txt in\n      let md'' = {md' with md_type = Mtype.remove_aliases env md'.md_type} in\n      let newmd = Mtype.strengthen_decl ~aliasable:false env md'' path in\n      ignore (Includemod.modtypes ~loc env newmd.md_type md.md_type);\n      ( (Pident id, lid, Twith_module (path, lid')),\n        Sig_module (id, newmd, rs) :: rem )\n    | Sig_module (id, md, rs) :: rem, [s], Pwith_modsubst (_, lid')\n      when Ident.name id = s ->\n      let path, md' = Typetexp.find_module initial_env loc lid'.txt in\n      let newmd = Mtype.strengthen_decl ~aliasable:false env md' path in\n      ignore (Includemod.modtypes ~loc env newmd.md_type md.md_type);\n      real_ids := [Pident id];\n      ((Pident id, lid, Twith_modsubst (path, lid')), update_rec_next rs rem)\n    | Sig_module (id, md, rs) :: rem, s :: namelist, _ when Ident.name id = s ->\n      let (path, _path_loc, tcstr), newsg =\n        merge env (extract_sig env loc md.md_type) namelist None\n      in\n      let path = path_concat id path in\n      real_ids := path :: !real_ids;\n      let item = Sig_module (id, {md with md_type = Mty_signature newsg}, rs) in\n      ((path, lid, tcstr), item :: rem)\n    | item :: rem, _, _ ->\n      let cstr, items = merge (Env.add_item item env) rem namelist row_id in\n      (cstr, item :: items)\n  in\n  try\n    let names = Longident.flatten lid.txt in\n    let tcstr, sg = merge initial_env sg names None in\n    (if destructive_substitution then\n       match List.rev !real_ids with\n       | [] -> assert false\n       | last :: rest -> (\n         (* The last item is the one that's removed. We don't need to check how\n            it's used since it's replaced by a more specific type/module. *)\n         assert (\n           match last with\n           | Pident _ -> true\n           | _ -> false);\n         match rest with\n         | [] -> ()\n         | _ :: _ ->\n           check_usage_of_path_of_substituted_item rest initial_env sg ~loc ~lid\n         ));\n    let sg =\n      match tcstr with\n      | _, _, Twith_typesubst tdecl ->\n        let how_to_extend_subst =\n          let sdecl =\n            match constr with\n            | Pwith_typesubst (_, sdecl) -> sdecl\n            | _ -> assert false\n          in\n          match type_decl_is_alias sdecl with\n          | Some lid ->\n            let replacement =\n              try Env.lookup_type lid.txt initial_env\n              with Not_found -> assert false\n            in\n            fun s path -> Subst.add_type_path path replacement s\n          | None ->\n            let body =\n              match tdecl.typ_type.type_manifest with\n              | None -> assert false\n              | Some x -> x\n            in\n            let params = tdecl.typ_type.type_params in\n            if params_are_constrained params then\n              raise\n                (Error (loc, initial_env, With_cannot_remove_constrained_type));\n            fun s path -> Subst.add_type_function path ~params ~body s\n        in\n        let sub = List.fold_left how_to_extend_subst Subst.identity !real_ids in\n        Subst.signature sub sg\n      | _, _, Twith_modsubst (real_path, _) ->\n        let sub =\n          List.fold_left\n            (fun s path -> Subst.add_module_path path real_path s)\n            Subst.identity !real_ids\n        in\n        Subst.signature sub sg\n      | _ -> sg\n    in\n    (tcstr, sg)\n  with Includemod.Error explanation ->\n    raise (Error (loc, initial_env, With_mismatch (lid.txt, explanation)))\n\n(* Add recursion flags on declarations arising from a mutually recursive\n   block. *)\n\nlet map_rec fn decls rem =\n  match decls with\n  | [] -> rem\n  | d1 :: dl -> fn Trec_first d1 :: map_end (fn Trec_next) dl rem\n\nlet map_rec_type ~rec_flag fn decls rem =\n  match decls with\n  | [] -> rem\n  | d1 :: dl ->\n    let first =\n      match rec_flag with\n      | Recursive -> Trec_first\n      | Nonrecursive -> Trec_not\n    in\n    fn first d1 :: map_end (fn Trec_next) dl rem\n\nlet rec map_rec_type_with_row_types ~rec_flag fn decls rem =\n  match decls with\n  | [] -> rem\n  | d1 :: dl ->\n    if Btype.is_row_name (Ident.name d1.typ_id) then\n      fn Trec_not d1 :: map_rec_type_with_row_types ~rec_flag fn dl rem\n    else map_rec_type ~rec_flag fn decls rem\n\n(* Add type extension flags to extension constructors *)\nlet map_ext fn exts rem =\n  match exts with\n  | [] -> rem\n  | d1 :: dl -> fn Text_first d1 :: map_end (fn Text_next) dl rem\n\n(* Auxiliary for translating recursively-defined module types.\n   Return a module type that approximates the shape of the given module\n   type AST.  Retain only module, type, and module type\n   components of signatures.  For types, retain only their arity,\n   making them abstract otherwise. *)\n\nlet rec approx_modtype env smty =\n  match smty.pmty_desc with\n  | Pmty_ident lid ->\n    let path, _info = Typetexp.find_modtype env smty.pmty_loc lid.txt in\n    Mty_ident path\n  | Pmty_alias lid ->\n    let path = Typetexp.lookup_module env smty.pmty_loc lid.txt in\n    Mty_alias (Mta_absent, path)\n  | Pmty_signature ssg -> Mty_signature (approx_sig env ssg)\n  | Pmty_functor (param, sarg, sres) ->\n    let arg = may_map (approx_modtype env) sarg in\n    let id, newenv =\n      Env.enter_module ~arg:true param.txt (Btype.default_mty arg) env\n    in\n    let res = approx_modtype newenv sres in\n    Mty_functor (id, arg, res)\n  | Pmty_with (sbody, _constraints) -> approx_modtype env sbody\n  | Pmty_typeof smod ->\n    let _, mty = !type_module_type_of_fwd env smod in\n    mty\n  | Pmty_extension ext ->\n    raise (Error_forward (Builtin_attributes.error_of_extension ext))\n\nand approx_module_declaration env pmd =\n  {\n    Types.md_type = approx_modtype env pmd.pmd_type;\n    md_attributes = pmd.pmd_attributes;\n    md_loc = pmd.pmd_loc;\n  }\n\nand approx_sig env ssg =\n  match ssg with\n  | [] -> []\n  | item :: srem -> (\n    match item.psig_desc with\n    | Psig_type (rec_flag, sdecls) ->\n      let decls = Typedecl.approx_type_decl sdecls in\n      let rem = approx_sig env srem in\n      map_rec_type ~rec_flag\n        (fun rs (id, info) -> Sig_type (id, info, rs))\n        decls rem\n    | Psig_module pmd ->\n      let id = Ident.create pmd.pmd_name.txt in\n      let md = approx_module_declaration env pmd in\n      let newenv = Env.enter_module_declaration id md env in\n      Sig_module (id, md, Trec_not) :: approx_sig newenv srem\n    | Psig_recmodule sdecls ->\n      let decls =\n        List.map\n          (fun pmd ->\n            (Ident.create pmd.pmd_name.txt, approx_module_declaration env pmd))\n          sdecls\n      in\n      let newenv =\n        List.fold_left\n          (fun env (id, md) ->\n            Env.add_module_declaration ~check:false id md env)\n          env decls\n      in\n      map_rec\n        (fun rs (id, md) -> Sig_module (id, md, rs))\n        decls (approx_sig newenv srem)\n    | Psig_modtype d ->\n      let info = approx_modtype_info env d in\n      let id, newenv = Env.enter_modtype d.pmtd_name.txt info env in\n      Sig_modtype (id, info) :: approx_sig newenv srem\n    | Psig_open sod ->\n      let _path, mty, _od = type_open env sod in\n      approx_sig mty srem\n    | Psig_include sincl ->\n      let smty = sincl.pincl_mod in\n      let mty = approx_modtype env smty in\n      let sg =\n        Subst.signature Subst.identity (extract_sig env smty.pmty_loc mty)\n      in\n      let newenv = Env.add_signature sg env in\n      sg @ approx_sig newenv srem\n    | Psig_class_type sdecls ->\n      let decls = Typeclass.approx_class_declarations env sdecls in\n      let rem = approx_sig env srem in\n      List.flatten\n        (map_rec\n           (fun rs decl ->\n             let open Typeclass in\n             [\n               Sig_class_type (decl.clsty_ty_id, decl.clsty_ty_decl, rs);\n               Sig_type (decl.clsty_obj_id, decl.clsty_obj_abbr, rs);\n               Sig_type (decl.clsty_typesharp_id, decl.clsty_abbr, rs);\n             ])\n           decls [rem])\n    | Psig_class () -> assert false\n    | _ -> approx_sig env srem)\n\nand approx_modtype_info env sinfo =\n  {\n    mtd_type = may_map (approx_modtype env) sinfo.pmtd_type;\n    mtd_attributes = sinfo.pmtd_attributes;\n    mtd_loc = sinfo.pmtd_loc;\n  }\n\nlet approx_modtype env smty =\n  Warnings.without_warnings (fun () -> approx_modtype env smty)\n\n(* Additional validity checks on type definitions arising from\n   recursive modules *)\n\nlet check_recmod_typedecls env sdecls decls =\n  let recmod_ids = List.map fst3 decls in\n  List.iter2\n    (fun pmd (id, _, mty) ->\n      let mty = mty.mty_type in\n      List.iter\n        (fun path ->\n          Typedecl.check_recmod_typedecl env pmd.pmd_type.pmty_loc recmod_ids\n            path (Env.find_type path env))\n        (Mtype.type_paths env (Pident id) mty))\n    sdecls decls\n\n(* Auxiliaries for checking uniqueness of names in signatures and structures *)\n\nmodule StringSet = Set.Make (struct\n  type t = string\n  let compare (x : t) y = String.compare x y\nend)\n\nlet check cl loc tbl name =\n  match Hashtbl.find_opt tbl name with\n  | Some repeated_loc ->\n    raise (Error (loc, Env.empty, Repeated_name (cl, name, repeated_loc)))\n  | None -> Hashtbl.add tbl name loc\n\ntype names = {\n  types: (string, Warnings.loc) Hashtbl.t;\n  modules: (string, Warnings.loc) Hashtbl.t;\n  modtypes: (string, Warnings.loc) Hashtbl.t;\n  typexts: (string, Warnings.loc) Hashtbl.t;\n}\n\nlet new_names () =\n  {\n    types = Hashtbl.create 10;\n    modules = Hashtbl.create 10;\n    modtypes = Hashtbl.create 10;\n    typexts = Hashtbl.create 10;\n  }\n\nlet check_name check names name = check names name.loc name.txt\nlet check_type names loc s = check \"type\" loc names.types s\nlet check_module names loc s = check \"module\" loc names.modules s\nlet check_modtype names loc s = check \"module type\" loc names.modtypes s\nlet check_typext names loc s = check \"extension constructor\" loc names.typexts s\n\nlet check_sig_item names loc = function\n  | Sig_type (id, _, _) -> check_type names loc (Ident.name id)\n  | Sig_module (id, _, _) -> check_module names loc (Ident.name id)\n  | Sig_modtype (id, _) -> check_modtype names loc (Ident.name id)\n  | Sig_typext (id, _, _) -> check_typext names loc (Ident.name id)\n  | _ -> ()\n\n(* Simplify multiple specifications of a value or an extension in a signature.\n   (Other signature components, e.g. types, modules, etc, are checked for\n   name uniqueness.)  If multiple specifications with the same name,\n   keep only the last (rightmost) one. *)\n\nlet simplify_signature sg =\n  let rec aux = function\n    | [] -> ([], StringSet.empty)\n    | (Sig_value (id, _descr) as component) :: sg ->\n      let ((sg, val_names) as k) = aux sg in\n      let name = Ident.name id in\n      if StringSet.mem name val_names then k\n      else (component :: sg, StringSet.add name val_names)\n    | component :: sg ->\n      let sg, val_names = aux sg in\n      (component :: sg, val_names)\n  in\n  let sg, _ = aux sg in\n  sg\n\n(* Check and translate a module type expression *)\n\nlet transl_modtype_longident loc env lid =\n  let path, _info = Typetexp.find_modtype env loc lid in\n  path\n\nlet transl_module_alias loc env lid = Typetexp.lookup_module env loc lid\n\nlet mkmty desc typ env loc attrs =\n  let mty =\n    {\n      mty_desc = desc;\n      mty_type = typ;\n      mty_loc = loc;\n      mty_env = env;\n      mty_attributes = attrs;\n    }\n  in\n  Cmt_format.add_saved_type (Cmt_format.Partial_module_type mty);\n  mty\n\nlet mksig desc env loc =\n  let sg = {sig_desc = desc; sig_loc = loc; sig_env = env} in\n  Cmt_format.add_saved_type (Cmt_format.Partial_signature_item sg);\n  sg\n\n(* let signature sg = List.map (fun item -> item.sig_type) sg *)\n\nlet rec transl_modtype env smty =\n  Builtin_attributes.warning_scope smty.pmty_attributes (fun () ->\n      transl_modtype_aux env smty)\n\nand transl_modtype_aux env smty =\n  let loc = smty.pmty_loc in\n  match smty.pmty_desc with\n  | Pmty_ident lid ->\n    let path = transl_modtype_longident loc env lid.txt in\n    mkmty (Tmty_ident (path, lid)) (Mty_ident path) env loc smty.pmty_attributes\n  | Pmty_alias lid ->\n    let path = transl_module_alias loc env lid.txt in\n    mkmty\n      (Tmty_alias (path, lid))\n      (Mty_alias (Mta_absent, path))\n      env loc smty.pmty_attributes\n  | Pmty_signature ssg ->\n    let sg = transl_signature env ssg in\n    mkmty (Tmty_signature sg) (Mty_signature sg.sig_type) env loc\n      smty.pmty_attributes\n  | Pmty_functor (param, sarg, sres) ->\n    let arg = Misc.may_map (transl_modtype env) sarg in\n    let ty_arg = Misc.may_map (fun m -> m.mty_type) arg in\n    let id, newenv =\n      Env.enter_module ~arg:true param.txt (Btype.default_mty ty_arg) env\n    in\n    Ctype.init_def (Ident.current_time ());\n    (* PR#6513 *)\n    let res = transl_modtype newenv sres in\n    mkmty\n      (Tmty_functor (id, param, arg, res))\n      (Mty_functor (id, ty_arg, res.mty_type))\n      env loc smty.pmty_attributes\n  | Pmty_with (sbody, constraints) ->\n    let body = transl_modtype env sbody in\n    let init_sg = extract_sig env sbody.pmty_loc body.mty_type in\n    let rev_tcstrs, final_sg =\n      List.fold_left\n        (fun (rev_tcstrs, sg) sdecl ->\n          let tcstr, sg = merge_constraint env smty.pmty_loc sg sdecl in\n          (tcstr :: rev_tcstrs, sg))\n        ([], init_sg) constraints\n    in\n    mkmty\n      (Tmty_with (body, List.rev rev_tcstrs))\n      (Mtype.freshen (Mty_signature final_sg))\n      env loc smty.pmty_attributes\n  | Pmty_typeof smod ->\n    let env = Env.in_signature false env in\n    let tmty, mty = !type_module_type_of_fwd env smod in\n    mkmty (Tmty_typeof tmty) mty env loc smty.pmty_attributes\n  | Pmty_extension ext ->\n    raise (Error_forward (Builtin_attributes.error_of_extension ext))\n\nand transl_signature env sg =\n  let names = new_names () in\n  let rec transl_sig env sg =\n    Ctype.init_def (Ident.current_time ());\n    match sg with\n    | [] -> ([], [], env)\n    | item :: srem -> (\n      let loc = item.psig_loc in\n      match item.psig_desc with\n      | Psig_value sdesc ->\n        let tdesc, newenv =\n          Typedecl.transl_value_decl env item.psig_loc sdesc\n        in\n        let trem, rem, final_env = transl_sig newenv srem in\n        ( mksig (Tsig_value tdesc) env loc :: trem,\n          Sig_value (tdesc.val_id, tdesc.val_val) :: rem,\n          final_env )\n      | Psig_type (rec_flag, sdecls) ->\n        List.iter\n          (fun decl -> check_name check_type names decl.ptype_name)\n          sdecls;\n        let decls, newenv = Typedecl.transl_type_decl env rec_flag sdecls in\n        let trem, rem, final_env = transl_sig newenv srem in\n        ( mksig (Tsig_type (rec_flag, decls)) env loc :: trem,\n          map_rec_type_with_row_types ~rec_flag\n            (fun rs td -> Sig_type (td.typ_id, td.typ_type, rs))\n            decls rem,\n          final_env )\n      | Psig_typext styext ->\n        List.iter\n          (fun pext -> check_name check_typext names pext.pext_name)\n          styext.ptyext_constructors;\n        let tyext, newenv =\n          Typedecl.transl_type_extension false env item.psig_loc styext\n        in\n        let trem, rem, final_env = transl_sig newenv srem in\n        let constructors = tyext.tyext_constructors in\n        ( mksig (Tsig_typext tyext) env loc :: trem,\n          map_ext\n            (fun es ext -> Sig_typext (ext.ext_id, ext.ext_type, es))\n            constructors rem,\n          final_env )\n      | Psig_exception sext ->\n        check_name check_typext names sext.pext_name;\n        let ext, newenv = Typedecl.transl_exception env sext in\n        let trem, rem, final_env = transl_sig newenv srem in\n        ( mksig (Tsig_exception ext) env loc :: trem,\n          Sig_typext (ext.ext_id, ext.ext_type, Text_exception) :: rem,\n          final_env )\n      | Psig_module pmd ->\n        check_name check_module names pmd.pmd_name;\n        let id = Ident.create pmd.pmd_name.txt in\n        let tmty =\n          Builtin_attributes.warning_scope pmd.pmd_attributes (fun () ->\n              transl_modtype env pmd.pmd_type)\n        in\n        let md =\n          {\n            md_type = tmty.mty_type;\n            md_attributes = pmd.pmd_attributes;\n            md_loc = pmd.pmd_loc;\n          }\n        in\n        let newenv = Env.enter_module_declaration id md env in\n        let trem, rem, final_env = transl_sig newenv srem in\n        ( mksig\n            (Tsig_module\n               {\n                 md_id = id;\n                 md_name = pmd.pmd_name;\n                 md_type = tmty;\n                 md_loc = pmd.pmd_loc;\n                 md_attributes = pmd.pmd_attributes;\n               })\n            env loc\n          :: trem,\n          Sig_module (id, md, Trec_not) :: rem,\n          final_env )\n      | Psig_recmodule sdecls ->\n        List.iter (fun pmd -> check_name check_module names pmd.pmd_name) sdecls;\n        let decls, newenv = transl_recmodule_modtypes env sdecls in\n        let trem, rem, final_env = transl_sig newenv srem in\n        ( mksig (Tsig_recmodule decls) env loc :: trem,\n          map_rec\n            (fun rs md ->\n              let d =\n                {\n                  Types.md_type = md.md_type.mty_type;\n                  md_attributes = md.md_attributes;\n                  md_loc = md.md_loc;\n                }\n              in\n              Sig_module (md.md_id, d, rs))\n            decls rem,\n          final_env )\n      | Psig_modtype pmtd ->\n        let newenv, mtd, sg = transl_modtype_decl names env pmtd in\n        let trem, rem, final_env = transl_sig newenv srem in\n        (mksig (Tsig_modtype mtd) env loc :: trem, sg :: rem, final_env)\n      | Psig_open sod ->\n        let _path, newenv, od = type_open env sod in\n        let trem, rem, final_env = transl_sig newenv srem in\n        (mksig (Tsig_open od) env loc :: trem, rem, final_env)\n      | Psig_include sincl ->\n        let smty = sincl.pincl_mod in\n        let tmty =\n          Builtin_attributes.warning_scope sincl.pincl_attributes (fun () ->\n              transl_modtype env smty)\n        in\n        let mty = tmty.mty_type in\n        let sg =\n          Subst.signature Subst.identity (extract_sig env smty.pmty_loc mty)\n        in\n        List.iter (check_sig_item names item.psig_loc) sg;\n        let newenv = Env.add_signature sg env in\n        let incl =\n          {\n            incl_mod = tmty;\n            incl_type = sg;\n            incl_attributes = sincl.pincl_attributes;\n            incl_loc = sincl.pincl_loc;\n          }\n        in\n        let trem, rem, final_env = transl_sig newenv srem in\n        (mksig (Tsig_include incl) env loc :: trem, sg @ rem, final_env)\n      | Psig_class _ -> assert false\n      | Psig_class_type cl ->\n        List.iter (fun {pci_name} -> check_name check_type names pci_name) cl;\n        let classes, newenv = Typeclass.class_type_declarations env cl in\n        let trem, rem, final_env = transl_sig newenv srem in\n        ( mksig\n            (Tsig_class_type\n               (List.map (fun decl -> decl.Typeclass.clsty_info) classes))\n            env loc\n          :: trem,\n          List.flatten\n            (map_rec\n               (fun rs decl ->\n                 let open Typeclass in\n                 [\n                   Sig_class_type (decl.clsty_ty_id, decl.clsty_ty_decl, rs);\n                   Sig_type (decl.clsty_obj_id, decl.clsty_obj_abbr, rs);\n                   Sig_type (decl.clsty_typesharp_id, decl.clsty_abbr, rs);\n                 ])\n               classes [rem]),\n          final_env )\n      | Psig_attribute x ->\n        Builtin_attributes.warning_attribute x;\n        let trem, rem, final_env = transl_sig env srem in\n        (mksig (Tsig_attribute x) env loc :: trem, rem, final_env)\n      | Psig_extension (ext, _attrs) ->\n        raise (Error_forward (Builtin_attributes.error_of_extension ext)))\n  in\n  let previous_saved_types = Cmt_format.get_saved_types () in\n  Builtin_attributes.warning_scope [] (fun () ->\n      let trem, rem, final_env = transl_sig (Env.in_signature true env) sg in\n      let rem = simplify_signature rem in\n      let sg = {sig_items = trem; sig_type = rem; sig_final_env = final_env} in\n      Cmt_format.set_saved_types\n        (Cmt_format.Partial_signature sg :: previous_saved_types);\n      sg)\n\nand transl_modtype_decl names env pmtd =\n  Builtin_attributes.warning_scope pmtd.pmtd_attributes (fun () ->\n      transl_modtype_decl_aux names env pmtd)\n\nand transl_modtype_decl_aux names env\n    {pmtd_name; pmtd_type; pmtd_attributes; pmtd_loc} =\n  check_name check_modtype names pmtd_name;\n  let tmty = Misc.may_map (transl_modtype env) pmtd_type in\n  let decl =\n    {\n      Types.mtd_type = may_map (fun t -> t.mty_type) tmty;\n      mtd_attributes = pmtd_attributes;\n      mtd_loc = pmtd_loc;\n    }\n  in\n  let id, newenv = Env.enter_modtype pmtd_name.txt decl env in\n  let mtd =\n    {\n      mtd_id = id;\n      mtd_name = pmtd_name;\n      mtd_type = tmty;\n      mtd_attributes = pmtd_attributes;\n      mtd_loc = pmtd_loc;\n    }\n  in\n  (newenv, mtd, Sig_modtype (id, decl))\n\nand transl_recmodule_modtypes env sdecls =\n  let make_env curr =\n    List.fold_left\n      (fun env (id, _, mty) -> Env.add_module ~arg:true id mty env)\n      env curr\n  in\n  let make_env2 curr =\n    List.fold_left\n      (fun env (id, _, mty) -> Env.add_module ~arg:true id mty.mty_type env)\n      env curr\n  in\n  let transition env_c curr =\n    List.map2\n      (fun pmd (id, id_loc, _mty) ->\n        let tmty =\n          Builtin_attributes.warning_scope pmd.pmd_attributes (fun () ->\n              transl_modtype env_c pmd.pmd_type)\n        in\n        (id, id_loc, tmty))\n      sdecls curr\n  in\n  let ids = List.map (fun x -> Ident.create x.pmd_name.txt) sdecls in\n  let approx_env =\n    (*\n       cf #5965\n       We use a dummy module type in order to detect a reference to one\n       of the module being defined during the call to approx_modtype.\n       It will be detected in Env.lookup_module.\n    *)\n    List.fold_left\n      (fun env id ->\n        let dummy = Mty_ident (Path.Pident (Ident.create \"#recmod#\")) in\n        Env.add_module ~arg:true id dummy env)\n      env ids\n  in\n  Ctype.init_def (Ident.current_time ());\n  (* PR#7082 *)\n  let init =\n    List.map2\n      (fun id pmd -> (id, pmd.pmd_name, approx_modtype approx_env pmd.pmd_type))\n      ids sdecls\n  in\n  let env0 = make_env init in\n  let dcl1 = Warnings.without_warnings (fun () -> transition env0 init) in\n  let env1 = make_env2 dcl1 in\n  check_recmod_typedecls env1 sdecls dcl1;\n  let dcl2 = transition env1 dcl1 in\n  (*\n  List.iter\n    (fun (id, mty) ->\n      Format.printf \"%a: %a@.\" Printtyp.ident id Printtyp.modtype mty)\n    dcl2;\n*)\n  let env2 = make_env2 dcl2 in\n  check_recmod_typedecls env2 sdecls dcl2;\n  let dcl2 =\n    List.map2\n      (fun pmd (id, id_loc, mty) ->\n        {\n          md_id = id;\n          md_name = id_loc;\n          md_type = mty;\n          md_loc = pmd.pmd_loc;\n          md_attributes = pmd.pmd_attributes;\n        })\n      sdecls dcl2\n  in\n  (dcl2, env2)\n\n(* Try to convert a module expression to a module path. *)\n\nexception Not_a_path\n\nlet rec path_of_module mexp =\n  match mexp.mod_desc with\n  | Tmod_ident (p, _) -> p\n  | Tmod_apply (funct, arg, _coercion) when !Clflags.applicative_functors ->\n    Papply (path_of_module funct, path_of_module arg)\n  | Tmod_constraint (mexp, _, _, _) -> path_of_module mexp\n  | _ -> raise Not_a_path\n\nlet path_of_module mexp =\n  try Some (path_of_module mexp) with Not_a_path -> None\n\n(* Check that all core type schemes in a structure are closed *)\n\nlet rec closed_modtype env = function\n  | Mty_ident _ -> true\n  | Mty_alias _ -> true\n  | Mty_signature sg ->\n    let env = Env.add_signature sg env in\n    List.for_all (closed_signature_item env) sg\n  | Mty_functor (id, param, body) ->\n    let env = Env.add_module ~arg:true id (Btype.default_mty param) env in\n    closed_modtype env body\n\nand closed_signature_item env = function\n  | Sig_value (_id, desc) -> Ctype.closed_schema env desc.val_type\n  | Sig_module (_id, md, _) -> closed_modtype env md.md_type\n  | _ -> true\n\nlet check_nongen_scheme env sig_item =\n  match sig_item with\n  | Sig_value (_id, vd) ->\n    if not (Ctype.closed_schema env vd.val_type) then\n      raise (Error (vd.val_loc, env, Non_generalizable vd.val_type))\n  | Sig_module (_id, md, _) ->\n    if not (closed_modtype env md.md_type) then\n      raise (Error (md.md_loc, env, Non_generalizable_module md.md_type))\n  | _ -> ()\n\nlet check_nongen_schemes env sg = List.iter (check_nongen_scheme env) sg\n\n(* Helpers for typing recursive modules *)\n\nlet anchor_submodule name anchor =\n  match anchor with\n  | None -> None\n  | Some p -> Some (Pdot (p, name, nopos))\nlet anchor_recmodule id = Some (Pident id)\n\nlet enrich_type_decls anchor decls oldenv newenv =\n  match anchor with\n  | None -> newenv\n  | Some p ->\n    List.fold_left\n      (fun e info ->\n        let id = info.typ_id in\n        let info' =\n          Mtype.enrich_typedecl oldenv\n            (Pdot (p, Ident.name id, nopos))\n            info.typ_type\n        in\n        Env.add_type ~check:true id info' e)\n      oldenv decls\n\nlet enrich_module_type anchor name mty env =\n  match anchor with\n  | None -> mty\n  | Some p -> Mtype.enrich_modtype env (Pdot (p, name, nopos)) mty\n\nlet check_recmodule_inclusion env bindings =\n  (* PR#4450, PR#4470: consider\n        module rec X : DECL = MOD  where MOD has inferred type ACTUAL\n     The \"natural\" typing condition\n        E, X: ACTUAL |- ACTUAL <: DECL\n     leads to circularities through manifest types.\n     Instead, we \"unroll away\" the potential circularities a finite number\n     of times.  The (weaker) condition we implement is:\n        E, X: DECL,\n           X1: ACTUAL,\n           X2: ACTUAL{X <- X1}/X1\n           ...\n           Xn: ACTUAL{X <- X(n-1)}/X(n-1)\n        |- ACTUAL{X <- Xn}/Xn <: DECL{X <- Xn}\n     so that manifest types rooted at X(n+1) are expanded in terms of X(n),\n     avoiding circularities.  The strengthenings ensure that\n     Xn.t = X(n-1).t = ... = X2.t = X1.t.\n     N can be chosen arbitrarily; larger values of N result in more\n     recursive definitions being accepted.  A good choice appears to be\n     the number of mutually recursive declarations. *)\n  let subst_and_strengthen env s id mty =\n    Mtype.strengthen ~aliasable:false env (Subst.modtype s mty)\n      (Subst.module_path s (Pident id))\n  in\n\n  let rec check_incl first_time n env s =\n    if n > 0 then\n      (* Generate fresh names Y_i for the rec. bound module idents X_i *)\n      let bindings1 =\n        List.map\n          (fun (id, _, _mty_decl, _modl, mty_actual, _attrs, _loc) ->\n            (id, Ident.rename id, mty_actual))\n          bindings\n      in\n      (* Enter the Y_i in the environment with their actual types substituted\n         by the input substitution s *)\n      let env' =\n        List.fold_left\n          (fun env (id, id', mty_actual) ->\n            let mty_actual' =\n              if first_time then mty_actual\n              else subst_and_strengthen env s id mty_actual\n            in\n            Env.add_module ~arg:false id' mty_actual' env)\n          env bindings1\n      in\n      (* Build the output substitution Y_i <- X_i *)\n      let s' =\n        List.fold_left\n          (fun s (id, id', _mty_actual) -> Subst.add_module id (Pident id') s)\n          Subst.identity bindings1\n      in\n      (* Recurse with env' and s' *)\n      check_incl false (n - 1) env' s'\n    else\n      (* Base case: check inclusion of s(mty_actual) in s(mty_decl)\n         and insert coercion if needed *)\n      let check_inclusion (id, id_loc, mty_decl, modl, mty_actual, attrs, loc) =\n        let mty_decl' = Subst.modtype s mty_decl.mty_type\n        and mty_actual' = subst_and_strengthen env s id mty_actual in\n        let coercion =\n          try Includemod.modtypes ~loc:modl.mod_loc env mty_actual' mty_decl'\n          with Includemod.Error msg ->\n            raise (Error (modl.mod_loc, env, Not_included msg))\n        in\n        let modl' =\n          {\n            mod_desc =\n              Tmod_constraint\n                (modl, mty_decl.mty_type, Tmodtype_explicit mty_decl, coercion);\n            mod_type = mty_decl.mty_type;\n            mod_env = env;\n            mod_loc = modl.mod_loc;\n            mod_attributes = [];\n          }\n        in\n        {\n          mb_id = id;\n          mb_name = id_loc;\n          mb_expr = modl';\n          mb_attributes = attrs;\n          mb_loc = loc;\n        }\n      in\n      List.map check_inclusion bindings\n  in\n  check_incl true (List.length bindings) env Subst.identity\n\n(* Helper for unpack *)\n\nlet rec package_constraints env loc mty constrs =\n  if constrs = [] then mty\n  else\n    let sg = extract_sig env loc mty in\n    let sg' =\n      List.map\n        (function\n          | Sig_type (id, ({type_params = []} as td), rs)\n            when List.mem_assoc [Ident.name id] constrs ->\n            let ty = List.assoc [Ident.name id] constrs in\n            Sig_type (id, {td with type_manifest = Some ty}, rs)\n          | Sig_module (id, md, rs) ->\n            let rec aux = function\n              | (m :: (_ :: _ as l), t) :: rest when m = Ident.name id ->\n                (l, t) :: aux rest\n              | _ :: rest -> aux rest\n              | [] -> []\n            in\n            let md =\n              {\n                md with\n                md_type = package_constraints env loc md.md_type (aux constrs);\n              }\n            in\n            Sig_module (id, md, rs)\n          | item -> item)\n        sg\n    in\n    Mty_signature sg'\n\nlet modtype_of_package env loc p nl tl =\n  try\n    match (Env.find_modtype p env).mtd_type with\n    | Some mty when nl <> [] ->\n      package_constraints env loc mty\n        (List.combine (List.map Longident.flatten nl) tl)\n    | _ ->\n      if nl = [] then Mty_ident p\n      else raise (Error (loc, env, Signature_expected))\n  with Not_found ->\n    let error = Typetexp.Unbound_modtype (Ctype.lid_of_path p) in\n    raise (Typetexp.Error (loc, env, error))\n\nlet package_subtype env p1 nl1 tl1 p2 nl2 tl2 =\n  let mkmty p nl tl =\n    let ntl =\n      Ext_list.filter (List.combine nl tl) (fun (_n, t) ->\n          Ctype.free_variables t = [])\n    in\n    let nl, tl = List.split ntl in\n    modtype_of_package env Location.none p nl tl\n  in\n  let mty1 = mkmty p1 nl1 tl1 and mty2 = mkmty p2 nl2 tl2 in\n  try Includemod.modtypes ~loc:Location.none env mty1 mty2 = Tcoerce_none\n  with Includemod.Error _msg -> false\n(* raise(Error(Location.none, env, Not_included msg)) *)\n\nlet () = Ctype.package_subtype := package_subtype\n\nlet wrap_constraint env arg mty explicit =\n  let coercion =\n    try Includemod.modtypes ~loc:arg.mod_loc env arg.mod_type mty\n    with Includemod.Error msg ->\n      raise (Error (arg.mod_loc, env, Not_included msg))\n  in\n  {\n    mod_desc = Tmod_constraint (arg, mty, explicit, coercion);\n    mod_type = mty;\n    mod_env = env;\n    mod_attributes = [];\n    mod_loc = arg.mod_loc;\n  }\n\n(* Type a module value expression *)\n\nlet rec type_module ?(alias = false) sttn funct_body anchor env smod =\n  Builtin_attributes.warning_scope smod.pmod_attributes (fun () ->\n      type_module_aux ~alias sttn funct_body anchor env smod)\n\nand type_module_aux ~alias sttn funct_body anchor env smod =\n  match smod.pmod_desc with\n  | Pmod_ident lid ->\n    let path =\n      Typetexp.lookup_module ~load:(not alias) env smod.pmod_loc lid.txt\n    in\n    let md =\n      {\n        mod_desc = Tmod_ident (path, lid);\n        mod_type = Mty_alias (Mta_absent, path);\n        mod_env = env;\n        mod_attributes = smod.pmod_attributes;\n        mod_loc = smod.pmod_loc;\n      }\n    in\n    let aliasable = not (Env.is_functor_arg path env) in\n    let md =\n      if alias && aliasable then md\n      else\n        match (Env.find_module path env).md_type with\n        | Mty_alias (_, p1) when not alias ->\n          let p1 = Env.normalize_path (Some smod.pmod_loc) env p1 in\n          let mty = Includemod.expand_module_alias env [] p1 in\n          {\n            md with\n            mod_desc =\n              Tmod_constraint\n                (md, mty, Tmodtype_implicit, Tcoerce_alias (p1, Tcoerce_none));\n            mod_type =\n              (if sttn then Mtype.strengthen ~aliasable:true env mty p1 else mty);\n          }\n        | mty ->\n          let mty =\n            if sttn then Mtype.strengthen ~aliasable env mty path else mty\n          in\n          {md with mod_type = mty}\n    in\n    rm md\n  | Pmod_structure sstr ->\n    let str, sg, _finalenv =\n      type_structure funct_body anchor env sstr smod.pmod_loc\n    in\n    let md =\n      rm\n        {\n          mod_desc = Tmod_structure str;\n          mod_type = Mty_signature sg;\n          mod_env = env;\n          mod_attributes = smod.pmod_attributes;\n          mod_loc = smod.pmod_loc;\n        }\n    in\n    let sg' = simplify_signature sg in\n    if List.length sg' = List.length sg then md\n    else\n      wrap_constraint\n        (Env.implicit_coercion env)\n        md (Mty_signature sg') Tmodtype_implicit\n  | Pmod_functor (name, smty, sbody) ->\n    let mty = may_map (transl_modtype env) smty in\n    let ty_arg = may_map (fun m -> m.mty_type) mty in\n    let (id, newenv), funct_body =\n      match ty_arg with\n      | None -> ((Ident.create \"*\", env), false)\n      | Some mty -> (Env.enter_module ~arg:true name.txt mty env, true)\n    in\n    Ctype.init_def (Ident.current_time ());\n    (* PR#6981 *)\n    let body = type_module sttn funct_body None newenv sbody in\n    rm\n      {\n        mod_desc = Tmod_functor (id, name, mty, body);\n        mod_type = Mty_functor (id, ty_arg, body.mod_type);\n        mod_env = env;\n        mod_attributes = smod.pmod_attributes;\n        mod_loc = smod.pmod_loc;\n      }\n  | Pmod_apply (sfunct, sarg) -> (\n    let arg = type_module true funct_body None env sarg in\n    let path = path_of_module arg in\n    let funct = type_module (sttn && path <> None) funct_body None env sfunct in\n    match Env.scrape_alias env funct.mod_type with\n    | Mty_functor (param, mty_param, mty_res) as mty_functor ->\n      let generative, mty_param =\n        (mty_param = None, Btype.default_mty mty_param)\n      in\n      if generative then (\n        if sarg.pmod_desc <> Pmod_structure [] then\n          raise (Error (sfunct.pmod_loc, env, Apply_generative));\n        if funct_body && Mtype.contains_type env funct.mod_type then\n          raise (Error (smod.pmod_loc, env, Not_allowed_in_functor_body)));\n      let coercion =\n        try Includemod.modtypes ~loc:sarg.pmod_loc env arg.mod_type mty_param\n        with Includemod.Error msg ->\n          raise (Error (sarg.pmod_loc, env, Not_included msg))\n      in\n      let mty_appl =\n        match path with\n        | Some path ->\n          Subst.modtype (Subst.add_module param path Subst.identity) mty_res\n        | None -> (\n          if generative then mty_res\n          else\n            try\n              Mtype.nondep_supertype\n                (Env.add_module ~arg:true param arg.mod_type env)\n                param mty_res\n            with Not_found ->\n              raise\n                (Error\n                   (smod.pmod_loc, env, Cannot_eliminate_dependency mty_functor))\n          )\n      in\n      rm\n        {\n          mod_desc = Tmod_apply (funct, arg, coercion);\n          mod_type = mty_appl;\n          mod_env = env;\n          mod_attributes = smod.pmod_attributes;\n          mod_loc = smod.pmod_loc;\n        }\n    | Mty_alias (_, path) ->\n      raise (Error (sfunct.pmod_loc, env, Cannot_scrape_alias path))\n    | _ -> raise (Error (sfunct.pmod_loc, env, Cannot_apply funct.mod_type)))\n  | Pmod_constraint (sarg, smty) ->\n    let arg = type_module ~alias true funct_body anchor env sarg in\n    let mty = transl_modtype env smty in\n    rm\n      {\n        (wrap_constraint env arg mty.mty_type (Tmodtype_explicit mty)) with\n        mod_loc = smod.pmod_loc;\n        mod_attributes = smod.pmod_attributes;\n      }\n  | Pmod_unpack sexp ->\n    let exp = Typecore.type_exp env sexp in\n    let mty =\n      match Ctype.expand_head env exp.exp_type with\n      | {desc = Tpackage (p, nl, tl)} ->\n        if List.exists (fun t -> Ctype.free_variables t <> []) tl then\n          raise\n            (Error (smod.pmod_loc, env, Incomplete_packed_module exp.exp_type));\n        modtype_of_package env smod.pmod_loc p nl tl\n      | {desc = Tvar _} ->\n        raise\n          (Typecore.Error (smod.pmod_loc, env, Typecore.Cannot_infer_signature))\n      | _ ->\n        raise (Error (smod.pmod_loc, env, Not_a_packed_module exp.exp_type))\n    in\n    if funct_body && Mtype.contains_type env mty then\n      raise (Error (smod.pmod_loc, env, Not_allowed_in_functor_body));\n    rm\n      {\n        mod_desc = Tmod_unpack (exp, mty);\n        mod_type = mty;\n        mod_env = env;\n        mod_attributes = smod.pmod_attributes;\n        mod_loc = smod.pmod_loc;\n      }\n  | Pmod_extension ext ->\n    raise (Error_forward (Builtin_attributes.error_of_extension ext))\n\nand type_structure ?(toplevel = false) funct_body anchor env sstr scope =\n  let names = new_names () in\n\n  let type_str_item env srem {pstr_loc = loc; pstr_desc = desc} =\n    match desc with\n    | Pstr_eval (sexpr, attrs) ->\n      let expr =\n        Builtin_attributes.warning_scope attrs (fun () ->\n            Typecore.type_expression env sexpr)\n      in\n      (Tstr_eval (expr, attrs), [], env)\n    | Pstr_value (rec_flag, sdefs) ->\n      let scope =\n        match rec_flag with\n        | Recursive ->\n          Some\n            (Annot.Idef {scope with Location.loc_start = loc.Location.loc_start})\n        | Nonrecursive ->\n          let start =\n            match srem with\n            | [] -> loc.Location.loc_end\n            | {pstr_loc = loc2} :: _ -> loc2.Location.loc_start\n          in\n          Some (Annot.Idef {scope with Location.loc_start = start})\n      in\n      let defs, newenv = Typecore.type_binding env rec_flag sdefs scope in\n      let () =\n        if rec_flag = Recursive then Rec_check.check_recursive_bindings defs\n      in\n      (* Note: Env.find_value does not trigger the value_used event. Values\n         will be marked as being used during the signature inclusion test. *)\n      ( Tstr_value (rec_flag, defs),\n        List.map\n          (fun id -> Sig_value (id, Env.find_value (Pident id) newenv))\n          (let_bound_idents defs),\n        newenv )\n    | Pstr_primitive sdesc ->\n      let desc, newenv = Typedecl.transl_value_decl env loc sdesc in\n      (Tstr_primitive desc, [Sig_value (desc.val_id, desc.val_val)], newenv)\n    | Pstr_type (rec_flag, sdecls) ->\n      List.iter (fun decl -> check_name check_type names decl.ptype_name) sdecls;\n      let decls, newenv = Typedecl.transl_type_decl env rec_flag sdecls in\n      ( Tstr_type (rec_flag, decls),\n        map_rec_type_with_row_types ~rec_flag\n          (fun rs info -> Sig_type (info.typ_id, info.typ_type, rs))\n          decls [],\n        enrich_type_decls anchor decls env newenv )\n    | Pstr_typext styext ->\n      List.iter\n        (fun pext -> check_name check_typext names pext.pext_name)\n        styext.ptyext_constructors;\n      let tyext, newenv = Typedecl.transl_type_extension true env loc styext in\n      ( Tstr_typext tyext,\n        map_ext\n          (fun es ext -> Sig_typext (ext.ext_id, ext.ext_type, es))\n          tyext.tyext_constructors [],\n        newenv )\n    | Pstr_exception sext ->\n      check_name check_typext names sext.pext_name;\n      let ext, newenv = Typedecl.transl_exception env sext in\n      ( Tstr_exception ext,\n        [Sig_typext (ext.ext_id, ext.ext_type, Text_exception)],\n        newenv )\n    | Pstr_module\n        {pmb_name = name; pmb_expr = smodl; pmb_attributes = attrs; pmb_loc} ->\n      check_name check_module names name;\n      let id = Ident.create name.txt in\n      (* create early for PR#6752 *)\n      let modl =\n        Builtin_attributes.warning_scope attrs (fun () ->\n            type_module ~alias:true true funct_body\n              (anchor_submodule name.txt anchor)\n              env smodl)\n      in\n      let md =\n        {\n          md_type = enrich_module_type anchor name.txt modl.mod_type env;\n          md_attributes = attrs;\n          md_loc = pmb_loc;\n        }\n      in\n      (*prerr_endline (Ident.unique_toplevel_name id);*)\n      Mtype.lower_nongen (Ident.binding_time id - 1) md.md_type;\n      let newenv = Env.enter_module_declaration id md env in\n      ( Tstr_module\n          {\n            mb_id = id;\n            mb_name = name;\n            mb_expr = modl;\n            mb_attributes = attrs;\n            mb_loc = pmb_loc;\n          },\n        [\n          Sig_module\n            ( id,\n              {md_type = modl.mod_type; md_attributes = attrs; md_loc = pmb_loc},\n              Trec_not );\n        ],\n        newenv )\n    | Pstr_recmodule sbind ->\n      let sbind =\n        List.map\n          (function\n            | {\n                pmb_name = name;\n                pmb_expr = {pmod_desc = Pmod_constraint (expr, typ)};\n                pmb_attributes = attrs;\n                pmb_loc = loc;\n              } ->\n              (name, typ, expr, attrs, loc)\n            | mb ->\n              raise\n                (Error\n                   ( mb.pmb_expr.pmod_loc,\n                     env,\n                     Recursive_module_require_explicit_type )))\n          sbind\n      in\n      List.iter\n        (fun (name, _, _, _, _) -> check_name check_module names name)\n        sbind;\n      let decls, newenv =\n        transl_recmodule_modtypes env\n          (List.map\n             (fun (name, smty, _smodl, attrs, loc) ->\n               {\n                 pmd_name = name;\n                 pmd_type = smty;\n                 pmd_attributes = attrs;\n                 pmd_loc = loc;\n               })\n             sbind)\n      in\n      let bindings1 =\n        List.map2\n          (fun {md_id = id; md_type = mty} (name, _, smodl, attrs, loc) ->\n            let modl =\n              Builtin_attributes.warning_scope attrs (fun () ->\n                  type_module true funct_body (anchor_recmodule id) newenv smodl)\n            in\n            let mty' =\n              enrich_module_type anchor (Ident.name id) modl.mod_type newenv\n            in\n            (id, name, mty, modl, mty', attrs, loc))\n          decls sbind\n      in\n      let newenv =\n        (* allow aliasing recursive modules from outside *)\n        List.fold_left\n          (fun env md ->\n            let mdecl =\n              {\n                md_type = md.md_type.mty_type;\n                md_attributes = md.md_attributes;\n                md_loc = md.md_loc;\n              }\n            in\n            Env.add_module_declaration ~check:true md.md_id mdecl env)\n          env decls\n      in\n      let bindings2 = check_recmodule_inclusion newenv bindings1 in\n      ( Tstr_recmodule bindings2,\n        map_rec\n          (fun rs mb ->\n            Sig_module\n              ( mb.mb_id,\n                {\n                  md_type = mb.mb_expr.mod_type;\n                  md_attributes = mb.mb_attributes;\n                  md_loc = mb.mb_loc;\n                },\n                rs ))\n          bindings2 [],\n        newenv )\n    | Pstr_modtype pmtd ->\n      (* check that it is non-abstract *)\n      let newenv, mtd, sg = transl_modtype_decl names env pmtd in\n      (Tstr_modtype mtd, [sg], newenv)\n    | Pstr_open sod ->\n      let _path, newenv, od = type_open ~toplevel env sod in\n      (Tstr_open od, [], newenv)\n    | Pstr_class () -> assert false\n    | Pstr_class_type cl ->\n      List.iter (fun {pci_name} -> check_name check_type names pci_name) cl;\n      let classes, new_env = Typeclass.class_type_declarations env cl in\n      ( Tstr_class_type\n          (List.map\n             (fun cl ->\n               ( cl.Typeclass.clsty_ty_id,\n                 cl.Typeclass.clsty_id_loc,\n                 cl.Typeclass.clsty_info ))\n             classes),\n        (* TODO: check with Jacques why this is here\n                  Tstr_type\n                    (List.map (fun (_, _, i, d, _, _) -> (i, d)) classes) ::\n                  Tstr_type\n                    (List.map (fun (_, _, _, _, i, d) -> (i, d)) classes) :: *)\n        List.flatten\n          (map_rec\n             (fun rs decl ->\n               let open Typeclass in\n               [\n                 Sig_class_type (decl.clsty_ty_id, decl.clsty_ty_decl, rs);\n                 Sig_type (decl.clsty_obj_id, decl.clsty_obj_abbr, rs);\n                 Sig_type (decl.clsty_typesharp_id, decl.clsty_abbr, rs);\n               ])\n             classes []),\n        new_env )\n    | Pstr_include sincl ->\n      let smodl = sincl.pincl_mod in\n      let modl =\n        Builtin_attributes.warning_scope sincl.pincl_attributes (fun () ->\n            type_module true funct_body None env smodl)\n      in\n      (* Rename all identifiers bound by this signature to avoid clashes *)\n      let sg =\n        Subst.signature Subst.identity\n          (extract_sig_open env smodl.pmod_loc modl.mod_type)\n      in\n      List.iter (check_sig_item names loc) sg;\n      let new_env = Env.add_signature sg env in\n      let incl =\n        {\n          incl_mod = modl;\n          incl_type = sg;\n          incl_attributes = sincl.pincl_attributes;\n          incl_loc = sincl.pincl_loc;\n        }\n      in\n      (Tstr_include incl, sg, new_env)\n    | Pstr_extension (ext, _attrs) ->\n      raise (Error_forward (Builtin_attributes.error_of_extension ext))\n    | Pstr_attribute x ->\n      Builtin_attributes.warning_attribute x;\n      (Tstr_attribute x, [], env)\n  in\n  let rec type_struct env sstr =\n    Ctype.init_def (Ident.current_time ());\n    match sstr with\n    | [] -> ([], [], env)\n    | pstr :: srem ->\n      let previous_saved_types = Cmt_format.get_saved_types () in\n      let desc, sg, new_env = type_str_item env srem pstr in\n      let str = {str_desc = desc; str_loc = pstr.pstr_loc; str_env = env} in\n      Cmt_format.set_saved_types\n        (Cmt_format.Partial_structure_item str :: previous_saved_types);\n      let str_rem, sig_rem, final_env = type_struct new_env srem in\n      let new_sg = if rescript_hide desc then sig_rem else sg @ sig_rem in\n      (str :: str_rem, new_sg, final_env)\n  in\n  if !Clflags.annotations then\n    (* moved to genannot *)\n    List.iter\n      (function\n        | {pstr_loc = l} -> Stypes.record_phrase l)\n      sstr;\n  let previous_saved_types = Cmt_format.get_saved_types () in\n  let run () =\n    let items, sg, final_env = type_struct env sstr in\n    let str = {str_items = items; str_type = sg; str_final_env = final_env} in\n    Cmt_format.set_saved_types\n      (Cmt_format.Partial_structure str :: previous_saved_types);\n    (str, sg, final_env)\n  in\n  if toplevel then run () else Builtin_attributes.warning_scope [] run\n\nlet type_toplevel_phrase env s =\n  type_structure ~toplevel:true false None env s Location.none\n\nlet type_module_alias = type_module ~alias:true true false None\nlet type_module = type_module true false None\nlet type_structure = type_structure false None\n\n(* Normalize types in a signature *)\n\nlet rec normalize_modtype env = function\n  | Mty_ident _ | Mty_alias _ -> ()\n  | Mty_signature sg -> normalize_signature env sg\n  | Mty_functor (_id, _param, body) -> normalize_modtype env body\n\nand normalize_signature env = List.iter (normalize_signature_item env)\n\nand normalize_signature_item env = function\n  | Sig_value (_id, desc) -> Ctype.normalize_type env desc.val_type\n  | Sig_module (_id, md, _) -> normalize_modtype env md.md_type\n  | _ -> ()\n\n(* Extract the module type of a module expression *)\n\nlet type_module_type_of env smod =\n  let tmty =\n    match smod.pmod_desc with\n    | Pmod_ident lid ->\n      (* turn off strengthening in this case *)\n      let path, md = Typetexp.find_module env smod.pmod_loc lid.txt in\n      rm\n        {\n          mod_desc = Tmod_ident (path, lid);\n          mod_type = md.md_type;\n          mod_env = env;\n          mod_attributes = smod.pmod_attributes;\n          mod_loc = smod.pmod_loc;\n        }\n    | _ -> type_module env smod\n  in\n  let mty = tmty.mod_type in\n  (* PR#6307: expand aliases at root and submodules *)\n  let mty = Mtype.remove_aliases env mty in\n  (* PR#5036: must not contain non-generalized type variables *)\n  if not (closed_modtype env mty) then\n    raise (Error (smod.pmod_loc, env, Non_generalizable_module mty));\n  (tmty, mty)\n\n(* For Typecore *)\n\nlet type_package env m p nl =\n  (* Same as Pexp_letmodule *)\n  (* remember original level *)\n  let lv = Ctype.get_current_level () in\n  Ctype.begin_def ();\n  Ident.set_current_time lv;\n  let context = Typetexp.narrow () in\n  let modl = type_module env m in\n  Ctype.init_def (Ident.current_time ());\n  Typetexp.widen context;\n  let mp, env =\n    match modl.mod_desc with\n    | Tmod_ident (mp, _) -> (mp, env)\n    | Tmod_constraint ({mod_desc = Tmod_ident (mp, _)}, _, Tmodtype_implicit, _)\n      ->\n      (mp, env) (* PR#6982 *)\n    | _ ->\n      let id, new_env = Env.enter_module ~arg:true \"%M\" modl.mod_type env in\n      (Pident id, new_env)\n  in\n  let rec mkpath mp = function\n    | Lident name -> Pdot (mp, name, nopos)\n    | Ldot (m, name) -> Pdot (mkpath mp m, name, nopos)\n    | _ -> assert false\n  in\n  let tl' =\n    List.map\n      (fun name -> Btype.newgenty (Tconstr (mkpath mp name, [], ref Mnil)))\n      (* beware of interactions with Printtyp and short-path:\n         mp.name may have an arity > 0, cf. PR#7534 *)\n      nl\n  in\n  (* go back to original level *)\n  Ctype.end_def ();\n  if nl = [] then (wrap_constraint env modl (Mty_ident p) Tmodtype_implicit, [])\n  else\n    let mty = modtype_of_package env modl.mod_loc p nl tl' in\n    List.iter2\n      (fun n ty ->\n        try Ctype.unify env ty (Ctype.newvar ())\n        with Ctype.Unify _ ->\n          raise (Error (m.pmod_loc, env, Scoping_pack (n, ty))))\n      nl tl';\n    (wrap_constraint env modl mty Tmodtype_implicit, tl')\n\n(* Fill in the forward declarations *)\nlet () =\n  Typecore.type_module := type_module_alias;\n  Typetexp.transl_modtype_longident := transl_modtype_longident;\n  Typetexp.transl_modtype := transl_modtype;\n  Typecore.type_open := type_open_ ?toplevel:None;\n  Typecore.type_package := type_package;\n  type_module_type_of_fwd := type_module_type_of\n\n(* Typecheck an implementation file *)\n\nlet type_implementation_more ?check_exists sourcefile outputprefix modulename\n    initial_env ast =\n  Cmt_format.clear ();\n  try\n    Delayed_checks.reset_delayed_checks ();\n    let str, sg, finalenv =\n      type_structure initial_env ast (Location.in_file sourcefile)\n    in\n    let simple_sg = simplify_signature sg in\n    let sourceintf =\n      Filename.remove_extension sourcefile ^ !Config.interface_suffix\n    in\n    let mli_status = !Clflags.assume_no_mli in\n    if mli_status = Clflags.Mli_exists then (\n      let intf_file =\n        try find_in_path_uncap !Config.load_path (modulename ^ \".cmi\")\n        with Not_found ->\n          raise\n            (Error\n               ( Location.in_file sourcefile,\n                 Env.empty,\n                 Interface_not_compiled sourceintf ))\n      in\n      let dclsig = Env.read_signature modulename intf_file in\n      let coercion =\n        Includemod.compunit initial_env sourcefile sg intf_file dclsig\n      in\n      Delayed_checks.force_delayed_checks ();\n      (* It is important to run these checks after the inclusion test above,\n         so that value declarations which are not used internally but exported\n         are not reported as being unused. *)\n      Cmt_format.save_cmt (outputprefix ^ \".cmt\") modulename\n        (Cmt_format.Implementation str) (Some sourcefile) initial_env None;\n      (str, coercion, finalenv, dclsig)\n      (* identifier is useless might read from serialized cmi files*))\n    else\n      let coercion =\n        Includemod.compunit initial_env sourcefile sg \"(inferred signature)\"\n          simple_sg\n      in\n      check_nongen_schemes finalenv simple_sg;\n      normalize_signature finalenv simple_sg;\n      Delayed_checks.force_delayed_checks ();\n      (* See comment above. Here the target signature contains all\n         the value being exported. We can still capture unused\n         declarations like \"let x = true;; let x = 1;;\", because in this\n         case, the inferred signature contains only the last declaration. *)\n      (if not !Clflags.dont_write_files then\n         let deprecated = Builtin_attributes.deprecated_of_str ast in\n         let cmi =\n           Env.save_signature ?check_exists ~deprecated simple_sg modulename\n             (outputprefix ^ \".cmi\")\n         in\n         Cmt_format.save_cmt (outputprefix ^ \".cmt\") modulename\n           (Cmt_format.Implementation str) (Some sourcefile) initial_env\n           (Some cmi));\n      (str, coercion, finalenv, simple_sg)\n  with e ->\n    Cmt_format.save_cmt (outputprefix ^ \".cmt\") modulename\n      (Cmt_format.Partial_implementation\n         (Array.of_list (Cmt_format.get_saved_types ())))\n      (Some sourcefile) initial_env None;\n    raise e\n\nlet type_implementation sourcefile outputprefix modulename initial_env ast =\n  let a, b, _, _ =\n    type_implementation_more sourcefile outputprefix modulename initial_env ast\n  in\n  (a, b)\n\nlet save_signature modname tsg outputprefix source_file initial_env cmi =\n  Cmt_format.save_cmt (outputprefix ^ \".cmti\") modname\n    (Cmt_format.Interface tsg) (Some source_file) initial_env (Some cmi)\n\n(* \"Packaging\" of several compilation units into one unit\n   having them as sub-modules. *)\n\n(* Error report *)\n\nopen Printtyp\n\nlet non_generalizable_msg ppf print_fallback_msg =\n  fprintf ppf\n    \"%a@,\\\n     @,\\\n     @[This happens when the type system senses there's a \\\n     mutation/side-effect,@ in combination with a polymorphic value.@,\\\n     @{<info>Using or annotating that value usually solves it.@}@]\"\n    print_fallback_msg ()\n\nlet report_error ppf = function\n  | Cannot_apply mty ->\n    fprintf ppf \"@[This module is not a functor; it has type@ %a@]\" modtype mty\n  | Not_included errs ->\n    fprintf ppf \"@[<v>Signature mismatch:@ %a@]\" Includemod.report_error errs\n  | Cannot_eliminate_dependency mty ->\n    fprintf ppf\n      \"@[This functor has type@ %a@ The parameter cannot be eliminated in the \\\n       result type.@  Please bind the argument to a module identifier.@]\"\n      modtype mty\n  | Signature_expected -> fprintf ppf \"This module type is not a signature\"\n  | Structure_expected mty ->\n    fprintf ppf \"@[This module is not a structure; it has type@ %a\" modtype mty\n  | With_no_component lid ->\n    fprintf ppf\n      \"@[The signature constrained by `with' has no component named %a@]\"\n      longident lid\n  | With_mismatch (lid, explanation) ->\n    fprintf ppf\n      \"@[<v>@[In this `with' constraint, the new definition of %a@ does not \\\n       match its original definition@ in the constrained signature:@]@ %a@]\"\n      longident lid Includemod.report_error explanation\n  | With_makes_applicative_functor_ill_typed (lid, path, explanation) ->\n    fprintf ppf\n      \"@[<v>@[This `with' constraint on %a makes the applicative functor @ \\\n       type %s ill-typed in the constrained signature:@]@ %a@]\"\n      longident lid (Path.name path) Includemod.report_error explanation\n  | With_changes_module_alias (lid, id, path) ->\n    fprintf ppf\n      \"@[<v>@[This `with' constraint on %a changes %s, which is aliased @ in \\\n       the constrained signature (as %s)@].@]\"\n      longident lid (Path.name path) (Ident.name id)\n  | With_cannot_remove_constrained_type ->\n    fprintf ppf\n      \"@[<v>Destructive substitutions are not supported for constrained @ \\\n       types (other than when replacing a type constructor with @ a type \\\n       constructor with the same arguments).@]\"\n  | Repeated_name (kind, name, repeated_loc) ->\n    fprintf ppf\n      \"@[Multiple definition of the %s name %s @ at @{<loc>%a@}@ @ Names must \\\n       be unique in a given structure or signature.@]\"\n      kind name Location.print_loc repeated_loc\n  | Non_generalizable typ ->\n    (* modified *)\n    fprintf ppf \"@[<v>\";\n    non_generalizable_msg ppf (fun ppf () ->\n        fprintf ppf\n          \"@[This expression's type contains type variables that cannot be \\\n           generalized:@,\\\n           @{<error>%a@}@]\"\n          type_scheme typ);\n    fprintf ppf \"@]\"\n  | Non_generalizable_module mty ->\n    (* modified *)\n    fprintf ppf \"@[<v>\";\n    non_generalizable_msg ppf (fun ppf () ->\n        fprintf ppf\n          \"@[The type of this module contains type variables that cannot be \\\n           generalized:@,\\\n           @{<error>%a@}@]\"\n          modtype mty);\n    fprintf ppf \"@]\"\n  | Interface_not_compiled intf_name ->\n    fprintf ppf \"@[Could not find the .cmi file for interface@ %a.@]\"\n      Location.print_filename intf_name\n  | Not_allowed_in_functor_body ->\n    fprintf ppf \"@[This expression creates fresh types.@ %s@]\"\n      \"It is not allowed inside applicative functors.\"\n  | Not_a_packed_module ty ->\n    fprintf ppf \"This expression is not a packed module. It has type@ %a\"\n      type_expr ty\n  | Incomplete_packed_module ty ->\n    fprintf ppf \"The type of this packed module contains variables:@ %a\"\n      type_expr ty\n  | Scoping_pack (lid, ty) ->\n    fprintf ppf \"The type %a in this module cannot be exported.@ \" longident lid;\n    fprintf ppf \"Its type contains local dependencies:@ %a\" type_expr ty\n  | Recursive_module_require_explicit_type ->\n    fprintf ppf \"Recursive modules require an explicit module type.\"\n  | Apply_generative ->\n    fprintf ppf \"This is a generative functor. It can only be applied to ()\"\n  | Cannot_scrape_alias p ->\n    fprintf ppf \"This is an alias for module %a, which is missing\" path p\n\nlet super_report_error_no_wrap_printing_env = report_error\n\nlet report_error env ppf err =\n  Printtyp.wrap_printing_env env (fun () -> report_error ppf err)\n\nlet () =\n  Location.register_error_of_exn (function\n    | Error (loc, env, err) ->\n      Some (Location.error_of_printer loc (report_error env) err)\n    | Error_forward err -> Some err\n    | _ -> None)\n"
  },
  {
    "path": "analysis/vendor/ml/typemod.mli",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(** Type-checking of the module language and typed ast plugin hooks *)\n\nopen Types\nopen Format\n\nval type_module : Env.t -> Parsetree.module_expr -> Typedtree.module_expr\nval type_structure :\n  Env.t ->\n  Parsetree.structure ->\n  Location.t ->\n  Typedtree.structure * Types.signature * Env.t\nval type_toplevel_phrase :\n  Env.t -> Parsetree.structure -> Typedtree.structure * Types.signature * Env.t\n\nval rescript_hide : Typedtree.structure_item_desc -> bool\n\nval type_implementation_more :\n  ?check_exists:unit ->\n  string ->\n  string ->\n  string ->\n  Env.t ->\n  Parsetree.structure ->\n  Typedtree.structure * Typedtree.module_coercion * Env.t * Types.signature\n\nval type_implementation :\n  string ->\n  string ->\n  string ->\n  Env.t ->\n  Parsetree.structure ->\n  Typedtree.structure * Typedtree.module_coercion\n\nval transl_signature : Env.t -> Parsetree.signature -> Typedtree.signature\nval check_nongen_schemes : Env.t -> Types.signature -> unit\nval type_open_ :\n  ?used_slot:bool ref ->\n  ?toplevel:bool ->\n  Asttypes.override_flag ->\n  Env.t ->\n  Location.t ->\n  Longident.t Asttypes.loc ->\n  Path.t * Env.t\nval simplify_signature : signature -> signature\n\nval path_of_module : Typedtree.module_expr -> Path.t option\n\nval save_signature :\n  string ->\n  Typedtree.signature ->\n  string ->\n  string ->\n  Env.t ->\n  Cmi_format.cmi_infos ->\n  unit\n\ntype error =\n  | Cannot_apply of module_type\n  | Not_included of Includemod.error list\n  | Cannot_eliminate_dependency of module_type\n  | Signature_expected\n  | Structure_expected of module_type\n  | With_no_component of Longident.t\n  | With_mismatch of Longident.t * Includemod.error list\n  | With_makes_applicative_functor_ill_typed of\n      Longident.t * Path.t * Includemod.error list\n  | With_changes_module_alias of Longident.t * Ident.t * Path.t\n  | With_cannot_remove_constrained_type\n  | Repeated_name of string * string * Warnings.loc\n  | Non_generalizable of type_expr\n  | Non_generalizable_module of module_type\n  | Interface_not_compiled of string\n  | Not_allowed_in_functor_body\n  | Not_a_packed_module of type_expr\n  | Incomplete_packed_module of type_expr\n  | Scoping_pack of Longident.t * type_expr\n  | Recursive_module_require_explicit_type\n  | Apply_generative\n  | Cannot_scrape_alias of Path.t\n\nexception Error of Location.t * Env.t * error\nexception Error_forward of Location.error\n\nval super_report_error_no_wrap_printing_env : formatter -> error -> unit\n\nval report_error : Env.t -> formatter -> error -> unit\n"
  },
  {
    "path": "analysis/vendor/ml/typeopt.ml",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1998 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(* Auxiliaries for type-based optimizations, e.g. array kinds *)\n\nopen Types\nopen Asttypes\nopen Typedtree\nopen Lambda\n\nlet scrape_ty env ty =\n  let ty = Ctype.expand_head_opt env (Ctype.correct_levels ty) in\n  match ty.desc with\n  | Tconstr (p, _, _) -> (\n    match Env.find_type p env with\n    | {type_unboxed = {unboxed = true; _}; _} -> (\n      match Typedecl.get_unboxed_type_representation env ty with\n      | None -> ty\n      | Some ty2 -> ty2)\n    | _ -> ty\n    | exception Not_found -> ty)\n  | _ -> ty\n\nlet scrape env ty = (scrape_ty env ty).desc\n\n(** [Types.constructor_description] records the type at the definition type so\n    for ['a option] it will always be [Tvar] *)\nlet rec type_cannot_contain_undefined (typ : Types.type_expr) (env : Env.t) =\n  match scrape env typ with\n  | Tconstr (p, _, _) -> (\n    (* all built in types could not inhabit none-like values:\n       int, char, float, bool, unit, exn, array, list, nativeint,\n       int32, int64, lazy_t, bytes\n    *)\n    match Predef.type_is_builtin_path_but_option p with\n    | For_sure_yes -> true\n    | For_sure_no -> false\n    | NA -> (\n      let untagged = ref false in\n      match\n        let decl = Env.find_type p env in\n        let () =\n          if Ast_untagged_variants.has_untagged decl.type_attributes then\n            untagged := true\n        in\n        decl.type_kind\n      with\n      | exception _ -> false\n      | Type_abstract | Type_open -> false\n      | Type_record _ -> true\n      | Type_variant\n          ( [\n              {cd_id = {name = \"None\"}; cd_args = Cstr_tuple []};\n              {cd_id = {name = \"Some\"}; cd_args = Cstr_tuple [_]};\n            ]\n          | [\n              {cd_id = {name = \"Some\"}; cd_args = Cstr_tuple [_]};\n              {cd_id = {name = \"None\"}; cd_args = Cstr_tuple []};\n            ]\n          | [{cd_id = {name = \"()\"}; cd_args = Cstr_tuple []}] ) ->\n        false (* conservative *)\n      | Type_variant cdecls ->\n        Ext_list.for_all cdecls (fun cd ->\n            if Ast_untagged_variants.has_undefined_literal cd.cd_attributes then\n              false\n            else if !untagged then\n              match cd.cd_args with\n              | Cstr_tuple [t] ->\n                Ast_untagged_variants.type_is_builtin_object t\n                || type_cannot_contain_undefined t env\n              | Cstr_tuple [] -> true\n              | Cstr_tuple (_ :: _ :: _) ->\n                true (* Not actually possible for untagged *)\n              | Cstr_record [{ld_type = t}] ->\n                Ast_untagged_variants.type_is_builtin_object t\n                || type_cannot_contain_undefined t env\n              | Cstr_record ([] | _ :: _ :: _) -> true\n            else true)))\n  | Ttuple _ | Tvariant _ | Tpackage _ | Tarrow _ -> true\n  | Tfield _ | Tpoly _ | Tunivar _ | Tlink _ | Tsubst _ | Tnil | Tvar _\n  | Tobject _ ->\n    false\n\nlet is_function_type env ty =\n  match scrape env ty with\n  | Tarrow (_, lhs, rhs, _) -> Some (lhs, rhs)\n  | _ -> None\n\nlet is_base_type env ty base_ty_path =\n  match scrape env ty with\n  | Tconstr (p, _, _) -> Path.same p base_ty_path\n  | _ -> false\n\nlet maybe_pointer_type env ty =\n  if Ctype.maybe_pointer_type env ty then Pointer else Immediate\n\nlet maybe_pointer exp = maybe_pointer_type exp.exp_env exp.exp_type\n\ntype classification =\n  | Int\n  | Float\n  | Lazy\n  | Addr (* anything except a float or a lazy *)\n  | Any\n\nlet classify env ty =\n  let ty = scrape_ty env ty in\n  if maybe_pointer_type env ty = Immediate then Int\n  else\n    match ty.desc with\n    | Tvar _ | Tunivar _ -> Any\n    | Tconstr (p, _args, _abbrev) -> (\n      if Path.same p Predef.path_float then Float\n      else if Path.same p Predef.path_lazy_t then Lazy\n      else if\n        Path.same p Predef.path_string\n        || Path.same p Predef.path_bytes\n        || Path.same p Predef.path_array\n        || Path.same p Predef.path_int64\n      then Addr\n      else\n        try\n          match (Env.find_type p env).type_kind with\n          | Type_abstract -> Any\n          | Type_record _ | Type_variant _ | Type_open -> Addr\n        with Not_found ->\n          (* This can happen due to e.g. missing -I options,\n             causing some .cmi files to be unavailable.\n             Maybe we should emit a warning. *)\n          Any)\n    | Tarrow _ | Ttuple _ | Tpackage _ | Tobject _ | Tnil | Tvariant _ -> Addr\n    | Tlink _ | Tsubst _ | Tpoly _ | Tfield _ -> assert false\n\n(** Whether a forward block is needed for a lazy thunk on a value, i.e. if the\n    value can be represented as a float/forward/lazy *)\nlet lazy_val_requires_forward env ty =\n  match classify env ty with\n  | Any | Lazy -> true\n  | Float (*-> Config.flat_float_array*) | Addr | Int -> false\n\n(** The compilation of the expression [lazy e] depends on the form of e:\n    constants, floats and identifiers are optimized. The optimization must be\n    taken into account when determining whether a recursive binding is safe. *)\nlet classify_lazy_argument :\n    Typedtree.expression ->\n    [ `Constant_or_function\n    | `Float\n    | `Identifier of [`Forward_value | `Other]\n    | `Other ] =\n fun e ->\n  match e.exp_desc with\n  | Texp_constant\n      ( Const_int _ | Const_char _ | Const_string _ | Const_int32 _\n      | Const_int64 _ | Const_bigint _ )\n  | Texp_function _\n  | Texp_construct (_, {cstr_arity = 0}, _) ->\n    `Constant_or_function\n  | Texp_constant (Const_float _) -> `Float\n  | Texp_ident _ when lazy_val_requires_forward e.exp_env e.exp_type ->\n    `Identifier `Forward_value\n  | Texp_ident _ -> `Identifier `Other\n  | _ -> `Other\n"
  },
  {
    "path": "analysis/vendor/ml/typeopt.mli",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1998 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(* Auxiliaries for type-based optimizations, e.g. array kinds *)\n\nval is_function_type :\n  Env.t -> Types.type_expr -> (Types.type_expr * Types.type_expr) option\nval is_base_type : Env.t -> Types.type_expr -> Path.t -> bool\n\nval maybe_pointer_type : Env.t -> Types.type_expr -> Lambda.immediate_or_pointer\nval maybe_pointer : Typedtree.expression -> Lambda.immediate_or_pointer\n\nval classify_lazy_argument :\n  Typedtree.expression ->\n  [ `Constant_or_function\n  | `Float\n  | `Identifier of [`Forward_value | `Other]\n  | `Other ]\n\nval type_cannot_contain_undefined : Types.type_expr -> Env.t -> bool\n"
  },
  {
    "path": "analysis/vendor/ml/types.ml",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(* Representation of types and declarations *)\n\nopen Asttypes\n\n(* Type expressions for the core language *)\n\ntype type_expr = {mutable desc: type_desc; mutable level: int; id: int}\n\nand type_desc =\n  | Tvar of string option\n  | Tarrow of arg_label * type_expr * type_expr * commutable\n  | Ttuple of type_expr list\n  | Tconstr of Path.t * type_expr list * abbrev_memo ref\n  | Tobject of type_expr * (Path.t * type_expr list) option ref\n  | Tfield of string * field_kind * type_expr * type_expr\n  | Tnil\n  | Tlink of type_expr\n  | Tsubst of type_expr (* for copying *)\n  | Tvariant of row_desc\n  | Tunivar of string option\n  | Tpoly of type_expr * type_expr list\n  | Tpackage of Path.t * Longident.t list * type_expr list\n\nand row_desc = {\n  row_fields: (label * row_field) list;\n  row_more: type_expr;\n  row_bound: unit;\n  row_closed: bool;\n  row_fixed: bool;\n  row_name: (Path.t * type_expr list) option;\n}\n\nand row_field =\n  | Rpresent of type_expr option\n  | Reither of bool * type_expr list * bool * row_field option ref\n  (* 1st true denotes a constant constructor *)\n  (* 2nd true denotes a tag in a pattern matching, and\n     is erased later *)\n  | Rabsent\n\nand abbrev_memo =\n  | Mnil\n  | Mcons of private_flag * Path.t * type_expr * type_expr * abbrev_memo\n  | Mlink of abbrev_memo ref\n\nand field_kind = Fvar of field_kind option ref | Fpresent | Fabsent\n\nand commutable = Cok | Cunknown | Clink of commutable ref\n\nmodule TypeOps = struct\n  type t = type_expr\n  let compare t1 t2 = t1.id - t2.id\n  let hash t = t.id\n  let equal t1 t2 = t1 == t2\nend\n\n(* Maps of methods and instance variables *)\n\nmodule OrderedString = struct\n  type t = string\n  let compare (x : t) y = compare x y\nend\nmodule Meths = Map.Make (OrderedString)\nmodule Vars = Meths\n\n(* Value descriptions *)\n\ntype value_description = {\n  val_type: type_expr; (* Type of the value *)\n  val_kind: value_kind;\n  val_loc: Location.t;\n  val_attributes: Parsetree.attributes;\n}\n\nand value_kind =\n  | Val_reg (* Regular value *)\n  | Val_prim of Primitive.description (* Primitive *)\n\n(* Variance *)\n\nmodule Variance = struct\n  type t = int\n  type f = May_pos | May_neg | May_weak | Inj | Pos | Neg | Inv\n  let single = function\n    | May_pos -> 1\n    | May_neg -> 2\n    | May_weak -> 4\n    | Inj -> 8\n    | Pos -> 16\n    | Neg -> 32\n    | Inv -> 64\n  let union v1 v2 = v1 lor v2\n  let inter v1 v2 = v1 land v2\n  let subset v1 v2 = v1 land v2 = v1\n  let set x b v = if b then v lor single x else v land lnot (single x)\n  let mem x = subset (single x)\n  let null = 0\n  let may_inv = 7\n  let full = 127\n  let covariant = single May_pos lor single Pos lor single Inj\n  let swap f1 f2 v =\n    let v' = set f1 (mem f2 v) v in\n    set f2 (mem f1 v) v'\n  let conjugate v = swap May_pos May_neg (swap Pos Neg v)\n  let get_upper v = (mem May_pos v, mem May_neg v)\n  let get_lower v = (mem Pos v, mem Neg v, mem Inv v, mem Inj v)\nend\n\n(* Type definitions *)\n\ntype type_declaration = {\n  type_params: type_expr list;\n  type_arity: int;\n  type_kind: type_kind;\n  type_private: private_flag;\n  type_manifest: type_expr option;\n  type_variance: Variance.t list;\n  type_newtype_level: (int * int) option;\n  type_loc: Location.t;\n  type_attributes: Parsetree.attributes;\n  type_immediate: bool;\n  type_unboxed: unboxed_status;\n}\n\nand type_kind =\n  | Type_abstract\n  | Type_record of label_declaration list * record_representation\n  | Type_variant of constructor_declaration list\n  | Type_open\n\nand record_representation =\n  | Record_regular (* All fields are boxed / tagged *)\n  | Record_float_unused (* Was: all fields are floats. Now: unused *)\n  | Record_unboxed of bool (* Unboxed single-field record, inlined or not *)\n  | Record_inlined of\n      (* Inlined record *)\n       {\n      tag: int;\n      name: string;\n      num_nonconsts: int;\n      optional_labels: string list;\n      attrs: Parsetree.attributes;\n    }\n  | Record_extension (* Inlined record under extension *)\n  | Record_optional_labels of string list (* List of optional labels *)\n\nand label_declaration = {\n  ld_id: Ident.t;\n  ld_mutable: mutable_flag;\n  ld_type: type_expr;\n  ld_loc: Location.t;\n  ld_attributes: Parsetree.attributes;\n}\n\nand constructor_declaration = {\n  cd_id: Ident.t;\n  cd_args: constructor_arguments;\n  cd_res: type_expr option;\n  cd_loc: Location.t;\n  cd_attributes: Parsetree.attributes;\n}\n\nand constructor_arguments =\n  | Cstr_tuple of type_expr list\n  | Cstr_record of label_declaration list\n\nand unboxed_status = {\n  unboxed: bool;\n  default: bool; (* False if the unboxed field was set from an attribute. *)\n}\n\nlet unboxed_false_default_false = {unboxed = false; default = false}\nlet unboxed_false_default_true = {unboxed = false; default = true}\nlet unboxed_true_default_false = {unboxed = true; default = false}\nlet unboxed_true_default_true = {unboxed = true; default = true}\n\ntype extension_constructor = {\n  ext_type_path: Path.t;\n  ext_type_params: type_expr list;\n  ext_args: constructor_arguments;\n  ext_ret_type: type_expr option;\n  ext_private: private_flag;\n  ext_loc: Location.t;\n  ext_attributes: Parsetree.attributes;\n}\n\nand type_transparence =\n  | Type_public (* unrestricted expansion *)\n  | Type_new (* \"new\" type *)\n  | Type_private (* private type *)\n\n(* Type expressions for the class language *)\n\nmodule Concr = Set.Make (OrderedString)\n\ntype class_type =\n  | Cty_constr of Path.t * type_expr list * class_type\n  | Cty_signature of class_signature\n  | Cty_arrow of arg_label * type_expr * class_type\n\nand class_signature = {\n  csig_self: type_expr;\n  csig_vars: (Asttypes.mutable_flag * Asttypes.virtual_flag * type_expr) Vars.t;\n  csig_concr: Concr.t;\n  csig_inher: (Path.t * type_expr list) list;\n}\n\ntype class_declaration = {\n  cty_params: type_expr list;\n  mutable cty_type: class_type;\n  cty_path: Path.t;\n  cty_new: type_expr option;\n  cty_variance: Variance.t list;\n  cty_loc: Location.t;\n  cty_attributes: Parsetree.attributes;\n}\n\ntype class_type_declaration = {\n  clty_params: type_expr list;\n  clty_type: class_type;\n  clty_path: Path.t;\n  clty_variance: Variance.t list;\n  clty_loc: Location.t;\n  clty_attributes: Parsetree.attributes;\n}\n\n(* Type expressions for the module language *)\n\ntype module_type =\n  | Mty_ident of Path.t\n  | Mty_signature of signature\n  | Mty_functor of Ident.t * module_type option * module_type\n  | Mty_alias of alias_presence * Path.t\n\nand alias_presence = Mta_present | Mta_absent\n\nand signature = signature_item list\n\nand signature_item =\n  | Sig_value of Ident.t * value_description\n  | Sig_type of Ident.t * type_declaration * rec_status\n  | Sig_typext of Ident.t * extension_constructor * ext_status\n  | Sig_module of Ident.t * module_declaration * rec_status\n  | Sig_modtype of Ident.t * modtype_declaration\n  | Sig_class of unit\n  | Sig_class_type of Ident.t * class_type_declaration * rec_status\n\nand module_declaration = {\n  md_type: module_type;\n  md_attributes: Parsetree.attributes;\n  md_loc: Location.t;\n}\n\nand modtype_declaration = {\n  mtd_type: module_type option; (* Note: abstract *)\n  mtd_attributes: Parsetree.attributes;\n  mtd_loc: Location.t;\n}\n\nand rec_status =\n  | Trec_not (* first in a nonrecursive group *)\n  | Trec_first (* first in a recursive group *)\n  | Trec_next (* not first in a recursive/nonrecursive group *)\n\nand ext_status =\n  | Text_first (* first constructor of an extension *)\n  | Text_next (* not first constructor of an extension *)\n  | Text_exception (* an exception *)\n\n(* Constructor and record label descriptions inserted held in typing\n   environments *)\n\ntype constructor_description = {\n  cstr_name: string; (* Constructor name *)\n  cstr_res: type_expr; (* Type of the result *)\n  cstr_existentials: type_expr list; (* list of existentials *)\n  cstr_args: type_expr list; (* Type of the arguments *)\n  cstr_arity: int; (* Number of arguments *)\n  cstr_tag: constructor_tag; (* Tag for heap blocks *)\n  cstr_consts: int; (* Number of constant constructors *)\n  cstr_nonconsts: int; (* Number of non-const constructors *)\n  cstr_normal: int; (* Number of non generalized constrs *)\n  cstr_generalized: bool; (* Constrained return type? *)\n  cstr_private: private_flag; (* Read-only constructor? *)\n  cstr_loc: Location.t;\n  cstr_attributes: Parsetree.attributes;\n  cstr_inlined: type_declaration option;\n}\n\nand constructor_tag =\n  | Cstr_constant of int (* Constant constructor (an int) *)\n  | Cstr_block of int (* Regular constructor (a block) *)\n  | Cstr_unboxed (* Constructor of an unboxed type *)\n  | Cstr_extension of Path.t * bool\n(* Extension constructor\n   true if a constant false if a block*)\n\nlet equal_tag t1 t2 =\n  match (t1, t2) with\n  | Cstr_constant i1, Cstr_constant i2 -> i2 = i1\n  | Cstr_block i1, Cstr_block i2 -> i2 = i1\n  | Cstr_unboxed, Cstr_unboxed -> true\n  | Cstr_extension (path1, b1), Cstr_extension (path2, b2) ->\n    Path.same path1 path2 && b1 = b2\n  | (Cstr_constant _ | Cstr_block _ | Cstr_unboxed | Cstr_extension _), _ ->\n    false\n\nlet may_equal_constr c1 c2 =\n  match (c1.cstr_tag, c2.cstr_tag) with\n  | Cstr_extension _, Cstr_extension _ -> c1.cstr_arity = c2.cstr_arity\n  | tag1, tag2 -> equal_tag tag1 tag2\n\ntype label_description = {\n  lbl_name: string; (* Short name *)\n  lbl_res: type_expr; (* Type of the result *)\n  lbl_arg: type_expr; (* Type of the argument *)\n  lbl_mut: mutable_flag; (* Is this a mutable field? *)\n  lbl_pos: int; (* Position in block *)\n  lbl_all: label_description array; (* All the labels in this type *)\n  lbl_repres: record_representation; (* Representation for this record *)\n  lbl_private: private_flag; (* Read-only field? *)\n  lbl_loc: Location.t;\n  lbl_attributes: Parsetree.attributes;\n}\nlet same_record_representation x y =\n  match x with\n  | Record_regular -> y = Record_regular\n  | Record_float_unused -> y = Record_float_unused\n  | Record_optional_labels lbls -> (\n    match y with\n    | Record_optional_labels lbls2 -> lbls = lbls2\n    | _ -> false)\n  | Record_inlined {tag; name; num_nonconsts; optional_labels} -> (\n    match y with\n    | Record_inlined y ->\n      tag = y.tag && name = y.name\n      && num_nonconsts = y.num_nonconsts\n      && optional_labels = y.optional_labels\n    | _ -> false)\n  | Record_extension -> y = Record_extension\n  | Record_unboxed x -> (\n    match y with\n    | Record_unboxed y -> x = y\n    | _ -> false)\n"
  },
  {
    "path": "analysis/vendor/ml/types.mli",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(** {0 Representation of types and declarations} *)\n\n(** [Types] defines the representation of types and declarations (that is, the\n    content of module signatures).\n\n    CMI files are made of marshalled types. *)\n\nopen Asttypes\n(** Asttypes exposes basic definitions shared both by Parsetree and Types. *)\n\ntype type_expr = {mutable desc: type_desc; mutable level: int; id: int}\n(** Type expressions for the core language.\n\n    The [type_desc] variant defines all the possible type expressions one can\n    find in OCaml. [type_expr] wraps this with some annotations.\n\n    The [level] field tracks the level of polymorphism associated to a type,\n    guiding the generalization algorithm. Put shortly, when referring to a type\n    in a given environment, both the type and the environment have a level. If\n    the type has an higher level, then it can be considered fully polymorphic\n    (type variables will be printed as ['a]), otherwise it'll be weakly\n    polymorphic, or non generalized (type variables printed as ['_a]). See\n    [http://okmij.org/ftp/ML/generalization.html] for more information.\n\n    Note about [type_declaration]: one should not make the confusion between\n    [type_expr] and [type_declaration].\n\n    [type_declaration] refers specifically to the [type] construct in OCaml\n    language, where you create and name a new type or type alias.\n\n    [type_expr] is used when you refer to existing types, e.g. when annotating\n    the expected type of a value.\n\n    Also, as the type system of OCaml is generative, a [type_declaration] can\n    have the side-effect of introducing a new type constructor, different from\n    all other known types. Whereas [type_expr] is a pure construct which allows\n    referring to existing types.\n\n    Note on mutability: TBD. *)\n\nand type_desc =\n  | Tvar of string option\n      (** [Tvar (Some \"a\")] ==> ['a] or ['_a] [Tvar None] ==> [_] *)\n  | Tarrow of arg_label * type_expr * type_expr * commutable\n      (** [Tarrow (Nolabel,      e1, e2, c)] ==> [e1    -> e2]\n          [Tarrow (Labelled \"l\", e1, e2, c)] ==> [l:e1  -> e2]\n          [Tarrow (Optional \"l\", e1, e2, c)] ==> [?l:e1 -> e2]\n\n          See [commutable] for the last argument. *)\n  | Ttuple of type_expr list  (** [Ttuple [t1;...;tn]] ==> [(t1 * ... * tn)] *)\n  | Tconstr of Path.t * type_expr list * abbrev_memo ref\n      (** [Tconstr (`A.B.t', [t1;...;tn], _)] ==> [(t1,...,tn) A.B.t] The last\n          parameter keep tracks of known expansions, see [abbrev_memo]. *)\n  | Tobject of type_expr * (Path.t * type_expr list) option ref\n      (** [Tobject (`f1:t1;...;fn: tn', `None')] ==> [< f1: t1; ...; fn: tn >]\n          f1, fn are represented as a linked list of types using Tfield and Tnil\n          constructors.\n\n          [Tobject (_, `Some (`A.ct', [t1;...;tn]')] ==> [(t1, ..., tn) A.ct].\n          where A.ct is the type of some class.\n\n          There are also special cases for so-called \"class-types\", cf.\n          [Typeclass] and [Ctype.set_object_name]:\n\n          [Tobject (Tfield(_,_,...(Tfield(_,_,rv)...), Some(`A.#ct`,\n           [rv;t1;...;tn])] ==> [(t1, ..., tn) #A.ct]\n          [Tobject (_, Some(`A.#ct`, [Tnil;t1;...;tn])] ==> [(t1, ..., tn) A.ct]\n\n          where [rv] is the hidden row variable. *)\n  | Tfield of string * field_kind * type_expr * type_expr\n      (** [Tfield (\"foo\", Fpresent, t, ts)] ==> [<...; foo : t; ts>] *)\n  | Tnil  (** [Tnil] ==> [<...; >] *)\n  | Tlink of type_expr  (** Indirection used by unification engine. *)\n  | Tsubst of type_expr (* for copying *)\n      (** [Tsubst] is used temporarily to store information in low-level\n          functions manipulating representation of types, such as instantiation\n          or copy. This constructor should not appear outside of these cases. *)\n  | Tvariant of row_desc\n      (** Representation of polymorphic variants, see [row_desc]. *)\n  | Tunivar of string option\n      (** Occurrence of a type variable introduced by a forall quantifier /\n          [Tpoly]. *)\n  | Tpoly of type_expr * type_expr list\n      (** [Tpoly (ty,tyl)] ==> ['a1... 'an. ty], where 'a1 ... 'an are names\n          given to types in tyl and occurrences of those types in ty. *)\n  | Tpackage of Path.t * Longident.t list * type_expr list\n      (** Type of a first-class module (a.k.a package). *)\n\nand row_desc = {\n  row_fields: (label * row_field) list;\n  row_more: type_expr;\n  row_bound: unit; (* kept for compatibility *)\n  row_closed: bool;\n  row_fixed: bool;\n  row_name: (Path.t * type_expr list) option;\n}\n(** [  `X | `Y ]       (row_closed = true)\n    [< `X | `Y ]       (row_closed = true)\n    [> `X | `Y ]       (row_closed = false)\n    [< `X | `Y > `X ]  (row_closed = true)\n\n    type t = [> `X ] as 'a      (row_more = Tvar a)\n    type t = private [> `X ]    (row_more = Tconstr (t#row, [], ref Mnil)\n\n    And for:\n\n        let f = function `X -> `X -> | `Y -> `X\n\n    the type of \"f\" will be a [Tarrow] whose lhs will (basically) be:\n\n        Tvariant { row_fields = [(\"X\", _)];\n                   row_more   =\n                     Tvariant { row_fields = [(\"Y\", _)];\n                                row_more   =\n                                  Tvariant { row_fields = [];\n                                             row_more   = _;\n                                             _ };\n                                _ };\n                   _\n                 }\n\n*)\n\nand row_field =\n  | Rpresent of type_expr option\n  | Reither of bool * type_expr list * bool * row_field option ref\n  (* 1st true denotes a constant constructor *)\n  (* 2nd true denotes a tag in a pattern matching, and\n     is erased later *)\n  | Rabsent\n\n(** [abbrev_memo] allows one to keep track of different expansions of a type\n    alias. This is done for performance purposes.\n\n    For instance, when defining [type 'a pair = 'a * 'a], when one refers to an\n    ['a pair], it is just a shortcut for the ['a * 'a] type. This expansion will\n    be stored in the [abbrev_memo] of the corresponding [Tconstr] node.\n\n    In practice, [abbrev_memo] behaves like list of expansions with a mutable\n    tail.\n\n    Note on marshalling: [abbrev_memo] must not appear in saved types. [Btype],\n    with [cleanup_abbrev] and [memo], takes care of tracking and removing\n    abbreviations. *)\nand abbrev_memo =\n  | Mnil  (** No known abbreviation *)\n  | Mcons of private_flag * Path.t * type_expr * type_expr * abbrev_memo\n      (** Found one abbreviation. A valid abbreviation should be at least as\n          visible and reachable by the same path. The first expression is the\n          abbreviation and the second the expansion. *)\n  | Mlink of abbrev_memo ref\n      (** Abbreviations can be found after this indirection *)\n\nand field_kind = Fvar of field_kind option ref | Fpresent | Fabsent\n\n(** [commutable] is a flag appended to every arrow type.\n\n    When typing an application, if the type of the functional is known, its type\n    is instantiated with [Cok] arrows, otherwise as [Clink (ref Cunknown)].\n\n    When the type is not known, the application will be used to infer the actual\n    type. This is fragile in presence of labels where there is no principal\n    type.\n\n    Two incompatible applications relying on [Cunknown] arrows will trigger an\n    error.\n\n    let f g = g ~a:() ~b:(); g ~b:() ~a:();\n\n    Error: This function is applied to arguments in an order different from\n    other calls. This is only allowed when the real type is known. *)\nand commutable = Cok | Cunknown | Clink of commutable ref\n\nmodule TypeOps : sig\n  type t = type_expr\n  val compare : t -> t -> int\n  val equal : t -> t -> bool\n  val hash : t -> int\nend\n\n(* Maps of methods and instance variables *)\n\nmodule Meths : Map.S with type key = string\nmodule Vars : Map.S with type key = string\n\n(* Value descriptions *)\n\ntype value_description = {\n  val_type: type_expr; (* Type of the value *)\n  val_kind: value_kind;\n  val_loc: Location.t;\n  val_attributes: Parsetree.attributes;\n}\n\nand value_kind =\n  | Val_reg (* Regular value *)\n  | Val_prim of Primitive.description (* Primitive *)\n\n(* Variance *)\n\nmodule Variance : sig\n  type t\n  type f = May_pos | May_neg | May_weak | Inj | Pos | Neg | Inv\n  val null : t (* no occurrence *)\n  val full : t (* strictly invariant *)\n  val covariant : t (* strictly covariant *)\n  val may_inv : t (* maybe invariant *)\n  val union : t -> t -> t\n  val inter : t -> t -> t\n  val subset : t -> t -> bool\n  val set : f -> bool -> t -> t\n  val mem : f -> t -> bool\n  val conjugate : t -> t (* exchange positive and negative *)\n  val get_upper : t -> bool * bool (* may_pos, may_neg   *)\n  val get_lower : t -> bool * bool * bool * bool (* pos, neg, inv, inj *)\nend\n\n(* Type definitions *)\n\ntype type_declaration = {\n  type_params: type_expr list;\n  type_arity: int;\n  type_kind: type_kind;\n  type_private: private_flag;\n  type_manifest: type_expr option;\n  type_variance: Variance.t list;\n  (* covariant, contravariant, weakly contravariant, injective *)\n  type_newtype_level: (int * int) option;\n  (* definition level * expansion level *)\n  type_loc: Location.t;\n  type_attributes: Parsetree.attributes;\n  type_immediate: bool; (* true iff type should not be a pointer *)\n  type_unboxed: unboxed_status;\n}\n\nand type_kind =\n  | Type_abstract\n  | Type_record of label_declaration list * record_representation\n  | Type_variant of constructor_declaration list\n  | Type_open\n\nand record_representation =\n  | Record_regular (* All fields are boxed / tagged *)\n  | Record_float_unused (* Was: all fields are floats. Now: unused *)\n  | Record_unboxed of bool (* Unboxed single-field record, inlined or not *)\n  | Record_inlined of\n      (* Inlined record *)\n       {\n      tag: int;\n      name: string;\n      num_nonconsts: int;\n      optional_labels: string list;\n      attrs: Parsetree.attributes;\n    }\n  | Record_extension (* Inlined record under extension *)\n  | Record_optional_labels of string list (* List of optional labels *)\n\nand label_declaration = {\n  ld_id: Ident.t;\n  ld_mutable: mutable_flag;\n  ld_type: type_expr;\n  ld_loc: Location.t;\n  ld_attributes: Parsetree.attributes;\n}\n\nand constructor_declaration = {\n  cd_id: Ident.t;\n  cd_args: constructor_arguments;\n  cd_res: type_expr option;\n  cd_loc: Location.t;\n  cd_attributes: Parsetree.attributes;\n}\n\nand constructor_arguments =\n  | Cstr_tuple of type_expr list\n  | Cstr_record of label_declaration list\n\nand unboxed_status =\n  private\n  (* This type must be private in order to ensure perfect sharing of the\n     four possible values. Otherwise, ocamlc.byte and ocamlc.opt produce\n     different executables. *) {\n  unboxed: bool;\n  default: bool; (* True for unannotated unboxable types. *)\n}\n\nval unboxed_false_default_false : unboxed_status\nval unboxed_false_default_true : unboxed_status\nval unboxed_true_default_false : unboxed_status\nval unboxed_true_default_true : unboxed_status\n\ntype extension_constructor = {\n  ext_type_path: Path.t;\n  ext_type_params: type_expr list;\n  ext_args: constructor_arguments;\n  ext_ret_type: type_expr option;\n  ext_private: private_flag;\n  ext_loc: Location.t;\n  ext_attributes: Parsetree.attributes;\n}\n\nand type_transparence =\n  | Type_public (* unrestricted expansion *)\n  | Type_new (* \"new\" type *)\n  | Type_private (* private type *)\n\n(* Type expressions for the class language *)\n\nmodule Concr : Set.S with type elt = string\n\ntype class_type =\n  | Cty_constr of Path.t * type_expr list * class_type\n  | Cty_signature of class_signature\n  | Cty_arrow of arg_label * type_expr * class_type\n\nand class_signature = {\n  csig_self: type_expr;\n  csig_vars: (Asttypes.mutable_flag * Asttypes.virtual_flag * type_expr) Vars.t;\n  csig_concr: Concr.t;\n  csig_inher: (Path.t * type_expr list) list;\n}\n\ntype class_declaration = {\n  cty_params: type_expr list;\n  mutable cty_type: class_type;\n  cty_path: Path.t;\n  cty_new: type_expr option;\n  cty_variance: Variance.t list;\n  cty_loc: Location.t;\n  cty_attributes: Parsetree.attributes;\n}\n\ntype class_type_declaration = {\n  clty_params: type_expr list;\n  clty_type: class_type;\n  clty_path: Path.t;\n  clty_variance: Variance.t list;\n  clty_loc: Location.t;\n  clty_attributes: Parsetree.attributes;\n}\n\n(* Type expressions for the module language *)\n\ntype module_type =\n  | Mty_ident of Path.t\n  | Mty_signature of signature\n  | Mty_functor of Ident.t * module_type option * module_type\n  | Mty_alias of alias_presence * Path.t\n\nand alias_presence = Mta_present | Mta_absent\n\nand signature = signature_item list\n\nand signature_item =\n  | Sig_value of Ident.t * value_description\n  | Sig_type of Ident.t * type_declaration * rec_status\n  | Sig_typext of Ident.t * extension_constructor * ext_status\n  | Sig_module of Ident.t * module_declaration * rec_status\n  | Sig_modtype of Ident.t * modtype_declaration\n  | Sig_class of unit\n  | Sig_class_type of Ident.t * class_type_declaration * rec_status\n\nand module_declaration = {\n  md_type: module_type;\n  md_attributes: Parsetree.attributes;\n  md_loc: Location.t;\n}\n\nand modtype_declaration = {\n  mtd_type: module_type option; (* None: abstract *)\n  mtd_attributes: Parsetree.attributes;\n  mtd_loc: Location.t;\n}\n\nand rec_status =\n  | Trec_not (* first in a nonrecursive group *)\n  | Trec_first (* first in a recursive group *)\n  | Trec_next (* not first in a recursive/nonrecursive group *)\n\nand ext_status =\n  | Text_first (* first constructor in an extension *)\n  | Text_next (* not first constructor in an extension *)\n  | Text_exception\n\n(* Constructor and record label descriptions inserted held in typing\n   environments *)\n\ntype constructor_description = {\n  cstr_name: string; (* Constructor name *)\n  cstr_res: type_expr; (* Type of the result *)\n  cstr_existentials: type_expr list; (* list of existentials *)\n  cstr_args: type_expr list; (* Type of the arguments *)\n  cstr_arity: int; (* Number of arguments *)\n  cstr_tag: constructor_tag; (* Tag for heap blocks *)\n  cstr_consts: int; (* Number of constant constructors *)\n  cstr_nonconsts: int; (* Number of non-const constructors *)\n  cstr_normal: int; (* Number of non generalized constrs *)\n  cstr_generalized: bool; (* Constrained return type? *)\n  cstr_private: private_flag; (* Read-only constructor? *)\n  cstr_loc: Location.t;\n  cstr_attributes: Parsetree.attributes;\n  cstr_inlined: type_declaration option;\n}\n\nand constructor_tag =\n  | Cstr_constant of int (* Constant constructor (an int) *)\n  | Cstr_block of int (* Regular constructor (a block) *)\n  | Cstr_unboxed (* Constructor of an unboxed type *)\n  | Cstr_extension of Path.t * bool\n(* Extension constructor\n   true if a constant false if a block*)\n\n(* Constructors are the same *)\nval equal_tag : constructor_tag -> constructor_tag -> bool\n\n(* Constructors may be the same, given potential rebinding *)\nval may_equal_constr :\n  constructor_description -> constructor_description -> bool\n\ntype label_description = {\n  lbl_name: string; (* Short name *)\n  lbl_res: type_expr; (* Type of the result *)\n  lbl_arg: type_expr; (* Type of the argument *)\n  lbl_mut: mutable_flag; (* Is this a mutable field? *)\n  lbl_pos: int; (* Position in block *)\n  lbl_all: label_description array; (* All the labels in this type *)\n  lbl_repres: record_representation; (* Representation for this record *)\n  lbl_private: private_flag; (* Read-only field? *)\n  lbl_loc: Location.t;\n  lbl_attributes: Parsetree.attributes;\n}\n\nval same_record_representation :\n  record_representation -> record_representation -> bool\n"
  },
  {
    "path": "analysis/vendor/ml/typetexp.ml",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(* typetexp.ml,v 1.34.4.9 2002/01/07 08:39:16 garrigue Exp *)\n\n(* Typechecking of type expressions for the core language *)\n\nopen Asttypes\nopen Misc\nopen Parsetree\nopen Typedtree\nopen Types\nopen Ctype\n\nexception Already_bound\n\ntype error =\n  | Unbound_type_variable of string\n  | Unbound_type_constructor of Longident.t\n  | Unbound_type_constructor_2 of Path.t\n  | Type_arity_mismatch of Longident.t * int * int\n  | Bound_type_variable of string\n  | Recursive_type\n  | Unbound_row_variable of Longident.t\n  | Type_mismatch of (type_expr * type_expr) list\n  | Alias_type_mismatch of (type_expr * type_expr) list\n  | Present_has_conjunction of string\n  | Present_has_no_type of string\n  | Constructor_mismatch of type_expr * type_expr\n  | Not_a_variant of type_expr\n  | Variant_tags of string * string\n  | Invalid_variable_name of string\n  | Cannot_quantify of string * type_expr\n  | Multiple_constraints_on_type of Longident.t\n  | Method_mismatch of string * type_expr * type_expr\n  | Unbound_value of Longident.t\n  | Unbound_constructor of Longident.t\n  | Unbound_label of Longident.t\n  | Unbound_module of Longident.t\n  | Unbound_class of Longident.t\n  | Unbound_modtype of Longident.t\n  | Unbound_cltype of Longident.t\n  | Ill_typed_functor_application of Longident.t\n  | Illegal_reference_to_recursive_module\n  | Access_functor_as_structure of Longident.t\n  | Apply_structure_as_functor of Longident.t\n  | Cannot_scrape_alias of Longident.t * Path.t\n  | Opened_object of Path.t option\n  | Not_an_object of type_expr\n\nexception Error of Location.t * Env.t * error\nexception Error_forward of Location.error\n\ntype variable_context = int * (string, type_expr) Tbl.t\n\n(* Local definitions *)\n\nlet instance_list = Ctype.instance_list Env.empty\n\n(* Narrowing unbound identifier errors. *)\n\nlet rec narrow_unbound_lid_error : 'a. _ -> _ -> _ -> _ -> 'a =\n fun env loc lid make_error ->\n  let check_module mlid =\n    try ignore (Env.lookup_module ~load:true mlid env) with\n    | Not_found ->\n      narrow_unbound_lid_error env loc mlid (fun lid -> Unbound_module lid)\n    | Env.Recmodule ->\n      raise (Error (loc, env, Illegal_reference_to_recursive_module))\n  in\n  (match lid with\n  | Longident.Lident _ -> ()\n  | Longident.Ldot (mlid, _) -> (\n    check_module mlid;\n    let md = Env.find_module (Env.lookup_module ~load:true mlid env) env in\n    match Env.scrape_alias env md.md_type with\n    | Mty_functor _ ->\n      raise (Error (loc, env, Access_functor_as_structure mlid))\n    | Mty_alias (_, p) ->\n      raise (Error (loc, env, Cannot_scrape_alias (mlid, p)))\n    | _ -> ())\n  | Longident.Lapply (flid, mlid) -> (\n    check_module flid;\n    let fmd = Env.find_module (Env.lookup_module ~load:true flid env) env in\n    (match Env.scrape_alias env fmd.md_type with\n    | Mty_signature _ ->\n      raise (Error (loc, env, Apply_structure_as_functor flid))\n    | Mty_alias (_, p) ->\n      raise (Error (loc, env, Cannot_scrape_alias (flid, p)))\n    | _ -> ());\n    check_module mlid;\n    let mmd = Env.find_module (Env.lookup_module ~load:true mlid env) env in\n    match Env.scrape_alias env mmd.md_type with\n    | Mty_alias (_, p) ->\n      raise (Error (loc, env, Cannot_scrape_alias (mlid, p)))\n    | _ -> raise (Error (loc, env, Ill_typed_functor_application lid))));\n  raise (Error (loc, env, make_error lid))\n\nlet find_component (lookup : ?loc:_ -> _) make_error env loc lid =\n  try\n    match lid with\n    | Longident.Ldot (Longident.Lident \"*predef*\", s) ->\n      lookup ~loc (Longident.Lident s) Env.initial_safe_string\n    | _ -> lookup ~loc lid env\n  with\n  | Not_found -> narrow_unbound_lid_error env loc lid make_error\n  | Env.Recmodule ->\n    raise (Error (loc, env, Illegal_reference_to_recursive_module))\n\nlet find_type env loc lid =\n  let path =\n    find_component Env.lookup_type\n      (fun lid -> Unbound_type_constructor lid)\n      env loc lid\n  in\n  let decl = Env.find_type path env in\n  Builtin_attributes.check_deprecated loc decl.type_attributes (Path.name path);\n  (path, decl)\n\nlet find_constructor =\n  find_component Env.lookup_constructor (fun lid -> Unbound_constructor lid)\nlet find_all_constructors =\n  find_component Env.lookup_all_constructors (fun lid ->\n      Unbound_constructor lid)\nlet find_label = find_component Env.lookup_label (fun lid -> Unbound_label lid)\nlet find_all_labels =\n  find_component Env.lookup_all_labels (fun lid -> Unbound_label lid)\n\nlet find_class env loc lid =\n  let ((path, decl) as r) =\n    find_component Env.lookup_class (fun lid -> Unbound_class lid) env loc lid\n  in\n  Builtin_attributes.check_deprecated loc decl.cty_attributes (Path.name path);\n  r\n\nlet find_value env loc lid =\n  Env.check_value_name (Longident.last lid) loc;\n  let ((path, decl) as r) =\n    find_component Env.lookup_value (fun lid -> Unbound_value lid) env loc lid\n  in\n  Builtin_attributes.check_deprecated loc decl.val_attributes (Path.name path);\n  r\n\nlet lookup_module ?(load = false) env loc lid =\n  find_component\n    (fun ?loc lid env -> Env.lookup_module ~load ?loc lid env)\n    (fun lid -> Unbound_module lid)\n    env loc lid\n\nlet find_module env loc lid =\n  let path = lookup_module ~load:true env loc lid in\n  let decl = Env.find_module path env in\n  (* No need to check for deprecated here, this is done in Env. *)\n  (path, decl)\n\nlet find_modtype env loc lid =\n  let ((path, decl) as r) =\n    find_component Env.lookup_modtype\n      (fun lid -> Unbound_modtype lid)\n      env loc lid\n  in\n  Builtin_attributes.check_deprecated loc decl.mtd_attributes (Path.name path);\n  r\n\nlet find_class_type env loc lid =\n  let ((path, decl) as r) =\n    find_component Env.lookup_cltype (fun lid -> Unbound_cltype lid) env loc lid\n  in\n  Builtin_attributes.check_deprecated loc decl.clty_attributes (Path.name path);\n  r\n\nlet unbound_constructor_error env lid =\n  narrow_unbound_lid_error env lid.loc lid.txt (fun lid ->\n      Unbound_constructor lid)\n\nlet unbound_label_error env lid =\n  narrow_unbound_lid_error env lid.loc lid.txt (fun lid -> Unbound_label lid)\n\n(* Support for first-class modules. *)\n\nlet transl_modtype_longident = ref (fun _ -> assert false)\nlet transl_modtype = ref (fun _ -> assert false)\n\nlet create_package_mty fake loc env (p, l) =\n  let l =\n    List.sort\n      (fun (s1, _t1) (s2, _t2) ->\n        if s1.txt = s2.txt then\n          raise (Error (loc, env, Multiple_constraints_on_type s1.txt));\n        compare s1.txt s2.txt)\n      l\n  in\n  ( l,\n    List.fold_left\n      (fun mty (s, t) ->\n        let d =\n          {\n            ptype_name = mkloc (Longident.last s.txt) s.loc;\n            ptype_params = [];\n            ptype_cstrs = [];\n            ptype_kind = Ptype_abstract;\n            ptype_private = Asttypes.Public;\n            ptype_manifest = (if fake then None else Some t);\n            ptype_attributes = [];\n            ptype_loc = loc;\n          }\n        in\n        Ast_helper.Mty.mk ~loc\n          (Pmty_with (mty, [Pwith_type ({txt = s.txt; loc}, d)])))\n      (Ast_helper.Mty.mk ~loc (Pmty_ident p))\n      l )\n\n(* Translation of type expressions *)\n\nlet type_variables = ref (Tbl.empty : (string, type_expr) Tbl.t)\nlet univars = ref ([] : (string * type_expr) list)\nlet pre_univars = ref ([] : type_expr list)\nlet used_variables = ref (Tbl.empty : (string, type_expr * Location.t) Tbl.t)\n\nlet reset_type_variables () =\n  reset_global_level ();\n  Ctype.reset_reified_var_counter ();\n  type_variables := Tbl.empty\n\nlet narrow () = (increase_global_level (), !type_variables)\n\nlet widen (gl, tv) =\n  restore_global_level gl;\n  type_variables := tv\n\nlet strict_ident c = c = '_' || (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z')\n\nlet validate_name = function\n  | None -> None\n  | Some name as s -> if name <> \"\" && strict_ident name.[0] then s else None\n\nlet new_global_var ?name () = new_global_var ?name:(validate_name name) ()\nlet newvar ?name () = newvar ?name:(validate_name name) ()\n\nlet type_variable loc name =\n  try Tbl.find name !type_variables\n  with Not_found ->\n    raise (Error (loc, Env.empty, Unbound_type_variable (\"'\" ^ name)))\n\nlet transl_type_param env styp =\n  let loc = styp.ptyp_loc in\n  match styp.ptyp_desc with\n  | Ptyp_any ->\n    let ty = new_global_var ~name:\"_\" () in\n    {\n      ctyp_desc = Ttyp_any;\n      ctyp_type = ty;\n      ctyp_env = env;\n      ctyp_loc = loc;\n      ctyp_attributes = styp.ptyp_attributes;\n    }\n  | Ptyp_var name ->\n    let ty =\n      try\n        if name <> \"\" && name.[0] = '_' then\n          raise (Error (loc, Env.empty, Invalid_variable_name (\"'\" ^ name)));\n        ignore (Tbl.find name !type_variables);\n        raise Already_bound\n      with Not_found ->\n        let v = new_global_var ~name () in\n        type_variables := Tbl.add name v !type_variables;\n        v\n    in\n    {\n      ctyp_desc = Ttyp_var name;\n      ctyp_type = ty;\n      ctyp_env = env;\n      ctyp_loc = loc;\n      ctyp_attributes = styp.ptyp_attributes;\n    }\n  | _ -> assert false\n\nlet transl_type_param env styp =\n  (* Currently useless, since type parameters cannot hold attributes\n     (but this could easily be lifted in the future). *)\n  Builtin_attributes.warning_scope styp.ptyp_attributes (fun () ->\n      transl_type_param env styp)\n\nlet new_pre_univar ?name () =\n  let v = newvar ?name () in\n  pre_univars := v :: !pre_univars;\n  v\n\nlet rec swap_list = function\n  | x :: y :: l -> y :: x :: swap_list l\n  | l -> l\n\ntype policy = Fixed | Extensible | Univars\n\nlet rec transl_type env policy styp =\n  Builtin_attributes.warning_scope styp.ptyp_attributes (fun () ->\n      transl_type_aux env policy styp)\n\nand transl_type_aux env policy styp =\n  let loc = styp.ptyp_loc in\n  let ctyp ctyp_desc ctyp_type =\n    {\n      ctyp_desc;\n      ctyp_type;\n      ctyp_env = env;\n      ctyp_loc = loc;\n      ctyp_attributes = styp.ptyp_attributes;\n    }\n  in\n  match styp.ptyp_desc with\n  | Ptyp_any ->\n    let ty =\n      if policy = Univars then new_pre_univar ()\n      else if policy = Fixed then\n        raise (Error (styp.ptyp_loc, env, Unbound_type_variable \"_\"))\n      else newvar ()\n    in\n    ctyp Ttyp_any ty\n  | Ptyp_var name ->\n    let ty =\n      if name <> \"\" && name.[0] = '_' then\n        raise (Error (styp.ptyp_loc, env, Invalid_variable_name (\"'\" ^ name)));\n      try instance env (List.assoc name !univars)\n      with Not_found -> (\n        try instance env (fst (Tbl.find name !used_variables))\n        with Not_found ->\n          let v =\n            if policy = Univars then new_pre_univar ~name ()\n            else newvar ~name ()\n          in\n          used_variables := Tbl.add name (v, styp.ptyp_loc) !used_variables;\n          v)\n    in\n    ctyp (Ttyp_var name) ty\n  | Ptyp_arrow (l, st1, st2) ->\n    let cty1 = transl_type env policy st1 in\n    let cty2 = transl_type env policy st2 in\n    let ty1 = cty1.ctyp_type in\n    let ty1 =\n      if Btype.is_optional l then\n        newty (Tconstr (Predef.path_option, [ty1], ref Mnil))\n      else ty1\n    in\n    let ty = newty (Tarrow (l, ty1, cty2.ctyp_type, Cok)) in\n    ctyp (Ttyp_arrow (l, cty1, cty2)) ty\n  | Ptyp_tuple stl ->\n    assert (List.length stl >= 2);\n    let ctys = List.map (transl_type env policy) stl in\n    let ty = newty (Ttuple (List.map (fun ctyp -> ctyp.ctyp_type) ctys)) in\n    ctyp (Ttyp_tuple ctys) ty\n  | Ptyp_constr (lid, stl) ->\n    let path, decl = find_type env lid.loc lid.txt in\n    let stl =\n      match stl with\n      | [({ptyp_desc = Ptyp_any} as t)] when decl.type_arity > 1 ->\n        List.map (fun _ -> t) decl.type_params\n      | _ -> stl\n    in\n    if List.length stl <> decl.type_arity then\n      raise\n        (Error\n           ( styp.ptyp_loc,\n             env,\n             Type_arity_mismatch (lid.txt, decl.type_arity, List.length stl) ));\n    let args = List.map (transl_type env policy) stl in\n    let params = instance_list decl.type_params in\n    let unify_param =\n      match decl.type_manifest with\n      | None -> unify_var\n      | Some ty ->\n        if (repr ty).level = Btype.generic_level then unify_var else unify\n    in\n    List.iter2\n      (fun (sty, cty) ty' ->\n        try unify_param env ty' cty.ctyp_type\n        with Unify trace ->\n          raise (Error (sty.ptyp_loc, env, Type_mismatch (swap_list trace))))\n      (List.combine stl args) params;\n    let constr = newconstr path (List.map (fun ctyp -> ctyp.ctyp_type) args) in\n    (try Ctype.enforce_constraints env constr\n     with Unify trace ->\n       raise (Error (styp.ptyp_loc, env, Type_mismatch trace)));\n    ctyp (Ttyp_constr (path, lid, args)) constr\n  | Ptyp_object (fields, o) ->\n    let ty, fields = transl_fields env policy o fields in\n    ctyp (Ttyp_object (fields, o)) (newobj ty)\n  | Ptyp_class (lid, stl) ->\n    let path, decl, _is_variant =\n      try\n        let path = Env.lookup_type lid.txt env in\n        let decl = Env.find_type path env in\n        let rec check decl =\n          match decl.type_manifest with\n          | None -> raise Not_found\n          | Some ty -> (\n            match (repr ty).desc with\n            | Tvariant row when Btype.static_row row -> ()\n            | Tconstr (path, _, _) -> check (Env.find_type path env)\n            | _ -> raise Not_found)\n        in\n        check decl;\n        Location.deprecated styp.ptyp_loc\n          \"old syntax for polymorphic variant type\";\n        (path, decl, true)\n      with Not_found -> (\n        try\n          let lid2 =\n            match lid.txt with\n            | Longident.Lident s -> Longident.Lident (\"#\" ^ s)\n            | Longident.Ldot (r, s) -> Longident.Ldot (r, \"#\" ^ s)\n            | Longident.Lapply (_, _) -> fatal_error \"Typetexp.transl_type\"\n          in\n          let path = Env.lookup_type lid2 env in\n          let decl = Env.find_type path env in\n          (path, decl, false)\n        with Not_found ->\n          ignore (find_class env lid.loc lid.txt);\n          assert false)\n    in\n    if List.length stl <> decl.type_arity then\n      raise\n        (Error\n           ( styp.ptyp_loc,\n             env,\n             Type_arity_mismatch (lid.txt, decl.type_arity, List.length stl) ));\n    let args = List.map (transl_type env policy) stl in\n    let params = instance_list decl.type_params in\n    List.iter2\n      (fun (sty, cty) ty' ->\n        try unify_var env ty' cty.ctyp_type\n        with Unify trace ->\n          raise (Error (sty.ptyp_loc, env, Type_mismatch (swap_list trace))))\n      (List.combine stl args) params;\n    let ty_args = List.map (fun ctyp -> ctyp.ctyp_type) args in\n    let ty =\n      try Ctype.expand_head env (newconstr path ty_args)\n      with Unify trace ->\n        raise (Error (styp.ptyp_loc, env, Type_mismatch trace))\n    in\n    let ty =\n      match ty.desc with\n      | Tvariant row ->\n        let row = Btype.row_repr row in\n        let fields =\n          List.map\n            (fun (l, f) ->\n              ( l,\n                match Btype.row_field_repr f with\n                | Rpresent (Some ty) -> Reither (false, [ty], false, ref None)\n                | Rpresent None -> Reither (true, [], false, ref None)\n                | _ -> f ))\n            row.row_fields\n        in\n        let row =\n          {\n            row_closed = true;\n            row_fields = fields;\n            row_bound = ();\n            row_name = Some (path, ty_args);\n            row_fixed = false;\n            row_more = newvar ();\n          }\n        in\n        let static = Btype.static_row row in\n        let row =\n          if static then {row with row_more = newty Tnil}\n          else if policy <> Univars then row\n          else {row with row_more = new_pre_univar ()}\n        in\n        newty (Tvariant row)\n      | Tobject (fi, _) ->\n        let _, tv = flatten_fields fi in\n        if policy = Univars then pre_univars := tv :: !pre_univars;\n        ty\n      | _ -> assert false\n    in\n    ctyp (Ttyp_class (path, lid, args)) ty\n  | Ptyp_alias (st, alias) ->\n    let cty =\n      try\n        let t =\n          try List.assoc alias !univars\n          with Not_found ->\n            instance env (fst (Tbl.find alias !used_variables))\n        in\n        let ty = transl_type env policy st in\n        (try unify_var env t ty.ctyp_type\n         with Unify trace ->\n           let trace = swap_list trace in\n           raise (Error (styp.ptyp_loc, env, Alias_type_mismatch trace)));\n        ty\n      with Not_found ->\n        let t = newvar () in\n        used_variables := Tbl.add alias (t, styp.ptyp_loc) !used_variables;\n        let ty = transl_type env policy st in\n        (try unify_var env t ty.ctyp_type\n         with Unify trace ->\n           let trace = swap_list trace in\n           raise (Error (styp.ptyp_loc, env, Alias_type_mismatch trace)));\n        let t = instance env t in\n        let px = Btype.proxy t in\n        (match px.desc with\n        | Tvar None ->\n          Btype.log_type px;\n          px.desc <- Tvar (Some alias)\n        | Tunivar None ->\n          Btype.log_type px;\n          px.desc <- Tunivar (Some alias)\n        | _ -> ());\n        {ty with ctyp_type = t}\n    in\n    ctyp (Ttyp_alias (cty, alias)) cty.ctyp_type\n  | Ptyp_variant (fields, closed, present) ->\n    let name = ref None in\n    let mkfield l f =\n      newty\n        (Tvariant\n           {\n             row_fields = [(l, f)];\n             row_more = newvar ();\n             row_bound = ();\n             row_closed = true;\n             row_fixed = false;\n             row_name = None;\n           })\n    in\n    let hfields = Hashtbl.create 17 in\n    let collection_detect = Hashtbl.create 17 in\n    let add_typed_field loc l f =\n      (if not !Config.bs_only then\n         let h = Btype.hash_variant l in\n         if Hashtbl.mem collection_detect h then (\n           let l' = Hashtbl.find collection_detect h in\n           (* Check for tag conflicts *)\n           if l <> l' then\n             raise (Error (styp.ptyp_loc, env, Variant_tags (l, l'))))\n         else Hashtbl.add collection_detect h l);\n      try\n        let _, f' = Hashtbl.find hfields l in\n        let ty = mkfield l f and ty' = mkfield l f' in\n        if equal env false [ty] [ty'] then ()\n        else\n          try unify env ty ty'\n          with Unify _trace ->\n            raise (Error (loc, env, Constructor_mismatch (ty, ty')))\n      with Not_found -> Hashtbl.add hfields l (l, f)\n    in\n    let add_field = function\n      | Rtag (l, attrs, c, stl) ->\n        name := None;\n        let tl =\n          Builtin_attributes.warning_scope attrs (fun () ->\n              List.map (transl_type env policy) stl)\n        in\n        let f =\n          match present with\n          | Some present when not (List.mem l.txt present) ->\n            let ty_tl = List.map (fun cty -> cty.ctyp_type) tl in\n            Reither (c, ty_tl, false, ref None)\n          | _ -> (\n            if List.length stl > 1 || (c && stl <> []) then\n              raise (Error (styp.ptyp_loc, env, Present_has_conjunction l.txt));\n            match tl with\n            | [] -> Rpresent None\n            | st :: _ -> Rpresent (Some st.ctyp_type))\n        in\n        add_typed_field styp.ptyp_loc l.txt f;\n        Ttag (l, attrs, c, tl)\n      | Rinherit sty ->\n        let cty = transl_type env policy sty in\n        let ty = cty.ctyp_type in\n        let nm =\n          match repr cty.ctyp_type with\n          | {desc = Tconstr (p, tl, _)} -> Some (p, tl)\n          | _ -> None\n        in\n        (* Set name if there are no fields yet *)\n        if Hashtbl.length hfields <> 0 then name := None else name := nm;\n        let fl =\n          match (expand_head env cty.ctyp_type, nm) with\n          | {desc = Tvariant row}, _ when Btype.static_row row ->\n            let row = Btype.row_repr row in\n            row.row_fields\n          | {desc = Tvar _}, Some (p, _) ->\n            raise (Error (sty.ptyp_loc, env, Unbound_type_constructor_2 p))\n          | _ -> raise (Error (sty.ptyp_loc, env, Not_a_variant ty))\n        in\n        List.iter\n          (fun (l, f) ->\n            let f =\n              match present with\n              | Some present when not (List.mem l present) -> (\n                match f with\n                | Rpresent (Some ty) -> Reither (false, [ty], false, ref None)\n                | Rpresent None -> Reither (true, [], false, ref None)\n                | _ -> assert false)\n              | _ -> f\n            in\n            add_typed_field sty.ptyp_loc l f)\n          fl;\n        Tinherit cty\n    in\n    let tfields = List.map add_field fields in\n    let fields = Hashtbl.fold (fun _ p l -> p :: l) hfields [] in\n    (match present with\n    | None -> ()\n    | Some present ->\n      List.iter\n        (fun l ->\n          if not (List.mem_assoc l fields) then\n            raise (Error (styp.ptyp_loc, env, Present_has_no_type l)))\n        present);\n    let row =\n      {\n        row_fields = List.rev fields;\n        row_more = newvar ();\n        row_bound = ();\n        row_closed = closed = Closed;\n        row_fixed = false;\n        row_name = !name;\n      }\n    in\n    let static = Btype.static_row row in\n    let row =\n      if static then {row with row_more = newty Tnil}\n      else if policy <> Univars then row\n      else {row with row_more = new_pre_univar ()}\n    in\n    let ty = newty (Tvariant row) in\n    ctyp (Ttyp_variant (tfields, closed, present)) ty\n  | Ptyp_poly (vars, st) ->\n    let vars = List.map (fun v -> v.txt) vars in\n    begin_def ();\n    let new_univars = List.map (fun name -> (name, newvar ~name ())) vars in\n    let old_univars = !univars in\n    univars := new_univars @ !univars;\n    let cty = transl_type env policy st in\n    let ty = cty.ctyp_type in\n    univars := old_univars;\n    end_def ();\n    generalize ty;\n    let ty_list =\n      List.fold_left\n        (fun tyl (name, ty1) ->\n          let v = Btype.proxy ty1 in\n          if deep_occur v ty then\n            match v.desc with\n            | Tvar name when v.level = Btype.generic_level ->\n              v.desc <- Tunivar name;\n              v :: tyl\n            | _ -> raise (Error (styp.ptyp_loc, env, Cannot_quantify (name, v)))\n          else tyl)\n        [] new_univars\n    in\n    let ty' = Btype.newgenty (Tpoly (ty, List.rev ty_list)) in\n    unify_var env (newvar ()) ty';\n    ctyp (Ttyp_poly (vars, cty)) ty'\n  | Ptyp_package (p, l) ->\n    let l, mty = create_package_mty true styp.ptyp_loc env (p, l) in\n    let z = narrow () in\n    let mty = !transl_modtype env mty in\n    widen z;\n    let ptys = List.map (fun (s, pty) -> (s, transl_type env policy pty)) l in\n    let path = !transl_modtype_longident styp.ptyp_loc env p.txt in\n    let ty =\n      newty\n        (Tpackage\n           ( path,\n             List.map (fun (s, _pty) -> s.txt) l,\n             List.map (fun (_, cty) -> cty.ctyp_type) ptys ))\n    in\n    ctyp\n      (Ttyp_package\n         {\n           pack_path = path;\n           pack_type = mty.mty_type;\n           pack_fields = ptys;\n           pack_txt = p;\n         })\n      ty\n  | Ptyp_extension ext ->\n    raise (Error_forward (Builtin_attributes.error_of_extension ext))\n\nand transl_poly_type env policy t =\n  transl_type env policy (Ast_helper.Typ.force_poly t)\n\nand transl_fields env policy o fields =\n  let hfields = Hashtbl.create 17 in\n  let add_typed_field loc l ty =\n    try\n      let ty' = Hashtbl.find hfields l in\n      if equal env false [ty] [ty'] then ()\n      else\n        try unify env ty ty'\n        with Unify _trace ->\n          raise (Error (loc, env, Method_mismatch (l, ty, ty')))\n    with Not_found -> Hashtbl.add hfields l ty\n  in\n  let add_field = function\n    | Otag (s, a, ty1) ->\n      let ty1 =\n        Builtin_attributes.warning_scope a (fun () ->\n            transl_poly_type env policy ty1)\n      in\n      let field = OTtag (s, a, ty1) in\n      add_typed_field ty1.ctyp_loc s.txt ty1.ctyp_type;\n      field\n    | Oinherit sty -> (\n      let cty = transl_type env policy sty in\n      let nm =\n        match repr cty.ctyp_type with\n        | {desc = Tconstr (p, _, _)} -> Some p\n        | _ -> None\n      in\n      let t = expand_head env cty.ctyp_type in\n      match (t, nm) with\n      | {desc = Tobject ({desc = (Tfield _ | Tnil) as tf}, _)}, _ ->\n        if opened_object t then\n          raise (Error (sty.ptyp_loc, env, Opened_object nm));\n        let rec iter_add = function\n          | Tfield (s, _k, ty1, ty2) ->\n            add_typed_field sty.ptyp_loc s ty1;\n            iter_add ty2.desc\n          | Tnil -> ()\n          | _ -> assert false\n        in\n        iter_add tf;\n        OTinherit cty\n      | {desc = Tvar _}, Some p ->\n        raise (Error (sty.ptyp_loc, env, Unbound_type_constructor_2 p))\n      | _ -> raise (Error (sty.ptyp_loc, env, Not_an_object t)))\n  in\n  let object_fields = List.map add_field fields in\n  let fields = Hashtbl.fold (fun s ty l -> (s, ty) :: l) hfields [] in\n  let ty_init =\n    match (o, policy) with\n    | Closed, _ -> newty Tnil\n    | Open, Univars -> new_pre_univar ()\n    | Open, _ -> newvar ()\n  in\n  let ty =\n    List.fold_left\n      (fun ty (s, ty') -> newty (Tfield (s, Fpresent, ty', ty)))\n      ty_init fields\n  in\n  (ty, object_fields)\n\n(* Make the rows \"fixed\" in this type, to make universal check easier *)\nlet rec make_fixed_univars ty =\n  let ty = repr ty in\n  if ty.level >= Btype.lowest_level then (\n    Btype.mark_type_node ty;\n    match ty.desc with\n    | Tvariant row ->\n      let row = Btype.row_repr row in\n      if Btype.is_Tunivar (Btype.row_more row) then\n        ty.desc <-\n          Tvariant\n            {\n              row with\n              row_fixed = true;\n              row_fields =\n                List.map\n                  (fun ((s, f) as p) ->\n                    match Btype.row_field_repr f with\n                    | Reither (c, tl, _m, r) -> (s, Reither (c, tl, true, r))\n                    | _ -> p)\n                  row.row_fields;\n            };\n      Btype.iter_row make_fixed_univars row\n    | _ -> Btype.iter_type_expr make_fixed_univars ty)\n\nlet make_fixed_univars ty =\n  make_fixed_univars ty;\n  Btype.unmark_type ty\n\nlet create_package_mty = create_package_mty false\n\nlet globalize_used_variables env fixed =\n  let r = ref [] in\n  Tbl.iter\n    (fun name (ty, loc) ->\n      let v = new_global_var () in\n      let snap = Btype.snapshot () in\n      if\n        try\n          unify env v ty;\n          true\n        with _ ->\n          Btype.backtrack snap;\n          false\n      then (\n        try r := (loc, v, Tbl.find name !type_variables) :: !r\n        with Not_found ->\n          if fixed && Btype.is_Tvar (repr ty) then\n            raise (Error (loc, env, Unbound_type_variable (\"'\" ^ name)));\n          let v2 = new_global_var () in\n          r := (loc, v, v2) :: !r;\n          type_variables := Tbl.add name v2 !type_variables))\n    !used_variables;\n  used_variables := Tbl.empty;\n  fun () ->\n    List.iter\n      (function\n        | loc, t1, t2 -> (\n          try unify env t1 t2\n          with Unify trace -> raise (Error (loc, env, Type_mismatch trace))))\n      !r\n\nlet transl_simple_type env fixed styp =\n  univars := [];\n  used_variables := Tbl.empty;\n  let typ = transl_type env (if fixed then Fixed else Extensible) styp in\n  globalize_used_variables env fixed ();\n  make_fixed_univars typ.ctyp_type;\n  typ\n\nlet transl_simple_type_univars env styp =\n  univars := [];\n  used_variables := Tbl.empty;\n  pre_univars := [];\n  begin_def ();\n  let typ = transl_type env Univars styp in\n  (* Only keep already global variables in used_variables *)\n  let new_variables = !used_variables in\n  used_variables := Tbl.empty;\n  Tbl.iter\n    (fun name p ->\n      if Tbl.mem name !type_variables then\n        used_variables := Tbl.add name p !used_variables)\n    new_variables;\n  globalize_used_variables env false ();\n  end_def ();\n  generalize typ.ctyp_type;\n  let univs =\n    List.fold_left\n      (fun acc v ->\n        let v = repr v in\n        match v.desc with\n        | Tvar name when v.level = Btype.generic_level ->\n          v.desc <- Tunivar name;\n          v :: acc\n        | _ -> acc)\n      [] !pre_univars\n  in\n  make_fixed_univars typ.ctyp_type;\n  {\n    typ with\n    ctyp_type = instance env (Btype.newgenty (Tpoly (typ.ctyp_type, univs)));\n  }\n\nlet transl_simple_type_delayed env styp =\n  univars := [];\n  used_variables := Tbl.empty;\n  let typ = transl_type env Extensible styp in\n  make_fixed_univars typ.ctyp_type;\n  (typ, globalize_used_variables env false)\n\nlet transl_type_scheme env styp =\n  reset_type_variables ();\n  begin_def ();\n  let typ = transl_simple_type env false styp in\n  end_def ();\n  generalize typ.ctyp_type;\n  typ\n\n(* Error report *)\n\nopen Format\nopen Printtyp\n\nlet did_you_mean ppf choices : bool =\n  (* flush now to get the error report early, in the (unheard of) case\n     where the linear search would take a bit of time; in the worst\n     case, the user has seen the error, she can interrupt the process\n     before the spell-checking terminates. *)\n  Format.fprintf ppf \"@?\";\n  match choices () with\n  | [] -> false\n  | last :: rev_rest ->\n    Format.fprintf ppf \"@[<v 2>@,@,@{<info>Hint: Did you mean %s%s%s?@}@]\"\n      (String.concat \", \" (List.rev rev_rest))\n      (if rev_rest = [] then \"\" else \" or \")\n      last;\n    true\n\nlet super_spellcheck ppf fold env lid =\n  let choices path name : string list =\n    let env : string list = fold (fun x _ _ xs -> x :: xs) path env [] in\n    Misc.spellcheck env name\n  in\n  match lid with\n  | Longident.Lapply _ -> false\n  | Longident.Lident s -> did_you_mean ppf (fun _ -> choices None s)\n  | Longident.Ldot (r, s) -> did_you_mean ppf (fun _ -> choices (Some r) s)\n\nlet spellcheck ppf fold env lid =\n  let choices ~path name =\n    let env = fold (fun x xs -> x :: xs) path env [] in\n    Misc.spellcheck env name\n  in\n  match lid with\n  | Longident.Lapply _ -> ()\n  | Longident.Lident s -> Misc.did_you_mean ppf (fun () -> choices ~path:None s)\n  | Longident.Ldot (r, s) ->\n    Misc.did_you_mean ppf (fun () -> choices ~path:(Some r) s)\n\nlet fold_descr fold get_name f = fold (fun descr acc -> f (get_name descr) acc)\nlet fold_simple fold4 f = fold4 (fun name _path _descr acc -> f name acc)\n\nlet fold_constructors = fold_descr Env.fold_constructors (fun d -> d.cstr_name)\nlet fold_labels = fold_descr Env.fold_labels (fun d -> d.lbl_name)\nlet fold_classs = fold_simple Env.fold_classs\nlet fold_modtypes = fold_simple Env.fold_modtypes\nlet fold_cltypes = fold_simple Env.fold_cltypes\n\nlet report_error env ppf = function\n  | Unbound_type_variable name ->\n    (* we don't use \"spellcheck\" here: the function that raises this\n       error seems not to be called anywhere, so it's unclear how it\n       should be handled *)\n    fprintf ppf \"Unbound type parameter %s@.\" name\n  | Unbound_type_constructor lid ->\n    (* modified *)\n    Format.fprintf ppf \"@[<v>This type constructor, `%a`, can't be found.@ \"\n      Printtyp.longident lid;\n    let has_candidate = super_spellcheck ppf Env.fold_types env lid in\n    if !Config.syntax_kind = `rescript && not has_candidate then\n      Format.fprintf ppf\n        \"If you wanted to write a recursive type, don't forget the `rec` in \\\n         `type rec`@]\"\n  | Unbound_type_constructor_2 p ->\n    fprintf ppf \"The type constructor@ %a@ is not yet completely defined\" path p\n  | Type_arity_mismatch (lid, expected, provided) ->\n    if expected == 0 then\n      fprintf ppf\n        \"@[The type %a is not generic so expects no arguments,@ but is here \\\n         applied to %i argument(s).@ Have you tried removing the angular \\\n         brackets `<` and `>` and the@ arguments within them and just writing \\\n         `%a` instead? @]\"\n        longident lid provided longident lid\n    else\n      fprintf ppf\n        \"@[The type constructor %a@ expects %i argument(s),@ but is here \\\n         applied to %i argument(s)@]\"\n        longident lid expected provided\n  | Bound_type_variable name ->\n    fprintf ppf \"Already bound type parameter '%s\" name\n  | Recursive_type -> fprintf ppf \"This type is recursive\"\n  | Unbound_row_variable lid ->\n    (* we don't use \"spellcheck\" here: this error is not raised\n       anywhere so it's unclear how it should be handled *)\n    fprintf ppf \"Unbound row variable in #%a\" longident lid\n  | Type_mismatch trace ->\n    Printtyp.report_unification_error ppf Env.empty trace\n      (function\n        | ppf -> fprintf ppf \"This type\")\n      (function ppf -> fprintf ppf \"should be an instance of type\")\n  | Alias_type_mismatch trace ->\n    Printtyp.report_unification_error ppf Env.empty trace\n      (function\n        | ppf -> fprintf ppf \"This alias is bound to type\")\n      (function ppf -> fprintf ppf \"but is used as an instance of type\")\n  | Present_has_conjunction l ->\n    fprintf ppf \"The present constructor %s has a conjunctive type\" l\n  | Present_has_no_type l ->\n    fprintf ppf \"The present constructor %s has no type\" l\n  | Constructor_mismatch (ty, ty') ->\n    wrap_printing_env env (fun () ->\n        Printtyp.reset_and_mark_loops_list [ty; ty'];\n        fprintf ppf \"@[<hov>%s %a@ %s@ %a@]\"\n          \"This variant type contains a constructor\" Printtyp.type_expr ty\n          \"which should be\" Printtyp.type_expr ty')\n  | Not_a_variant ty -> (\n    Printtyp.reset_and_mark_loops ty;\n    fprintf ppf \"@[The type %a@ does not expand to a polymorphic variant type@]\"\n      Printtyp.type_expr ty;\n    match ty.desc with\n    | Tvar (Some s) ->\n      (* PR#7012: help the user that wrote 'Foo instead of `Foo *)\n      Misc.did_you_mean ppf (fun () -> [\"`\" ^ s])\n    | _ -> ())\n  | Variant_tags (lab1, lab2) ->\n    fprintf ppf \"@[Variant tags %s@ and %s have the same hash value.@ %s@]\"\n      (!Printtyp.print_res_poly_identifier lab1)\n      (!Printtyp.print_res_poly_identifier lab2)\n      \"Change one of them.\"\n  | Invalid_variable_name name ->\n    fprintf ppf \"The type variable name %s is not allowed in programs\" name\n  | Cannot_quantify (name, v) ->\n    fprintf ppf\n      \"@[<hov>The universal type variable '%s cannot be generalized:@ %s.@]\"\n      name\n      (if Btype.is_Tvar v then \"it escapes its scope\"\n       else if Btype.is_Tunivar v then \"it is already bound to another variable\"\n       else \"it is not a variable\")\n  | Multiple_constraints_on_type s ->\n    fprintf ppf \"Multiple constraints for type %a\" longident s\n  | Method_mismatch (l, ty, ty') ->\n    wrap_printing_env env (fun () ->\n        Printtyp.reset_and_mark_loops_list [ty; ty'];\n        fprintf ppf \"@[<hov>Method '%s' has type %a,@ which should be %a@]\" l\n          Printtyp.type_expr ty Printtyp.type_expr ty')\n  | Unbound_value lid ->\n    (* modified *)\n    (match lid with\n    | Ldot (outer, inner) ->\n      Format.fprintf ppf \"The value %s can't be found in %a\" inner\n        Printtyp.longident outer\n    | other_ident ->\n      Format.fprintf ppf \"The value %a can't be found\" Printtyp.longident\n        other_ident);\n    super_spellcheck ppf Env.fold_values env lid |> ignore\n  | Unbound_module lid ->\n    (* modified *)\n    (match lid with\n    | Lident \"Str\" ->\n      Format.fprintf ppf\n        \"@[@{<info>The module or file %a can't be found.@}@,\\\n         @,\\\n         Are you trying to use the standard library's Str?@ If you're \\\n         compiling to JavaScript,@ use @{<info>Js.Re@} instead.@ Otherwise, \\\n         add str.cma to your ocamlc/ocamlopt command.@]\"\n        Printtyp.longident lid\n    | lid ->\n      Format.fprintf ppf\n        \"@[<v>@{<info>The module or file %a can't be found.@}@,\\\n         @[<v 2>- If it's a third-party dependency:@,\\\n         - Did you add it to the \\\"bs-dependencies\\\" or \\\n         \\\"bs-dev-dependencies\\\" in bsconfig.json?@]@,\\\n         - Did you include the file's directory to the \\\"sources\\\" in \\\n         bsconfig.json?@,\"\n        Printtyp.longident lid);\n    super_spellcheck ppf Env.fold_modules env lid |> ignore\n  | Unbound_constructor lid ->\n    (* modified *)\n    Format.fprintf ppf\n      \"@[<v>@{<info>The variant constructor %a can't be found.@}@,\\\n       @,\\\n       @[<v 2>- If it's defined in another module or file, bring it into scope \\\n       by:@,\\\n       @[- Prefixing it with said module name:@ @{<info>TheModule.%a@}@]@,\\\n       @[- Or specifying its type:@ @{<info>let theValue: TheModule.theType = \\\n       %a@}@]@]@,\\\n       - @[Constructors and modules are both capitalized.@ Did you want the \\\n       latter?@ Then instead of @{<dim>let foo = Bar@}, try @{<info>module Foo \\\n       = Bar@}.@]@]\"\n      Printtyp.longident lid Printtyp.longident lid Printtyp.longident lid;\n    spellcheck ppf fold_constructors env lid\n  | Unbound_label lid ->\n    (* modified *)\n    Format.fprintf ppf\n      \"@[<v>@{<info>The record field %a can't be found.@}@,\\\n       @,\\\n       If it's defined in another module or file, bring it into scope by:@,\\\n       @[- Prefixing it with said module name:@ @{<info>TheModule.%a@}@]@,\\\n       @[- Or specifying its type:@ @{<info>let theValue: TheModule.theType = \\\n       {%a: VALUE}@}@]@]\"\n      Printtyp.longident lid Printtyp.longident lid Printtyp.longident lid;\n    spellcheck ppf fold_labels env lid\n  | Unbound_class lid ->\n    fprintf ppf \"Unbound class %a\" longident lid;\n    spellcheck ppf fold_classs env lid\n  | Unbound_modtype lid ->\n    fprintf ppf \"Unbound module type %a\" longident lid;\n    spellcheck ppf fold_modtypes env lid\n  | Unbound_cltype lid ->\n    fprintf ppf \"Unbound class type %a\" longident lid;\n    spellcheck ppf fold_cltypes env lid\n  | Ill_typed_functor_application lid ->\n    fprintf ppf \"Ill-typed functor application %a\" longident lid\n  | Illegal_reference_to_recursive_module ->\n    fprintf ppf \"Illegal recursive module reference\"\n  | Access_functor_as_structure lid ->\n    fprintf ppf \"The module %a is a functor, not a structure\" longident lid\n  | Apply_structure_as_functor lid ->\n    fprintf ppf \"The module %a is a structure, not a functor\" longident lid\n  | Cannot_scrape_alias (lid, p) ->\n    fprintf ppf \"The module %a is an alias for module %a, which is missing\"\n      longident lid path p\n  | Opened_object nm ->\n    fprintf ppf \"Illegal open object type%a\"\n      (fun ppf -> function\n        | Some p -> fprintf ppf \"@ %a\" path p\n        | None -> fprintf ppf \"\")\n      nm\n  | Not_an_object ty ->\n    Printtyp.reset_and_mark_loops ty;\n    fprintf ppf \"@[The type %a@ is not an object type@]\" Printtyp.type_expr ty\n\nlet () =\n  Location.register_error_of_exn (function\n    | Error (loc, env, err) ->\n      Some (Location.error_of_printer loc (report_error env) err)\n    | Error_forward err -> Some err\n    | _ -> None)\n"
  },
  {
    "path": "analysis/vendor/ml/typetexp.mli",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*             Xavier Leroy, projet Cristal, INRIA Rocquencourt           *)\n(*                                                                        *)\n(*   Copyright 1996 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\n(* Typechecking of type expressions for the core language *)\n\nopen Types\n\nval transl_simple_type :\n  Env.t -> bool -> Parsetree.core_type -> Typedtree.core_type\nval transl_simple_type_univars :\n  Env.t -> Parsetree.core_type -> Typedtree.core_type\nval transl_simple_type_delayed :\n  Env.t -> Parsetree.core_type -> Typedtree.core_type * (unit -> unit)\n(* Translate a type, but leave type variables unbound. Returns\n   the type and a function that binds the type variable. *)\n\nval transl_type_scheme : Env.t -> Parsetree.core_type -> Typedtree.core_type\nval reset_type_variables : unit -> unit\nval type_variable : Location.t -> string -> type_expr\nval transl_type_param : Env.t -> Parsetree.core_type -> Typedtree.core_type\n\ntype variable_context\nval narrow : unit -> variable_context\nval widen : variable_context -> unit\n\nexception Already_bound\n\ntype error =\n  | Unbound_type_variable of string\n  | Unbound_type_constructor of Longident.t\n  | Unbound_type_constructor_2 of Path.t\n  | Type_arity_mismatch of Longident.t * int * int\n  | Bound_type_variable of string\n  | Recursive_type\n  | Unbound_row_variable of Longident.t\n  | Type_mismatch of (type_expr * type_expr) list\n  | Alias_type_mismatch of (type_expr * type_expr) list\n  | Present_has_conjunction of string\n  | Present_has_no_type of string\n  | Constructor_mismatch of type_expr * type_expr\n  | Not_a_variant of type_expr\n  | Variant_tags of string * string\n  | Invalid_variable_name of string\n  | Cannot_quantify of string * type_expr\n  | Multiple_constraints_on_type of Longident.t\n  | Method_mismatch of string * type_expr * type_expr\n  | Unbound_value of Longident.t\n  | Unbound_constructor of Longident.t\n  | Unbound_label of Longident.t\n  | Unbound_module of Longident.t\n  | Unbound_class of Longident.t\n  | Unbound_modtype of Longident.t\n  | Unbound_cltype of Longident.t\n  | Ill_typed_functor_application of Longident.t\n  | Illegal_reference_to_recursive_module\n  | Access_functor_as_structure of Longident.t\n  | Apply_structure_as_functor of Longident.t\n  | Cannot_scrape_alias of Longident.t * Path.t\n  | Opened_object of Path.t option\n  | Not_an_object of type_expr\n\nexception Error of Location.t * Env.t * error\n\nval report_error : Env.t -> Format.formatter -> error -> unit\n\n(* Support for first-class modules. *)\nval transl_modtype_longident :\n  (* from Typemod *)\n  (Location.t -> Env.t -> Longident.t -> Path.t) ref\nval transl_modtype :\n  (* from Typemod *)\n  (Env.t -> Parsetree.module_type -> Typedtree.module_type) ref\nval create_package_mty :\n  Location.t ->\n  Env.t ->\n  Parsetree.package_type ->\n  (Longident.t Asttypes.loc * Parsetree.core_type) list * Parsetree.module_type\n\nval find_type : Env.t -> Location.t -> Longident.t -> Path.t * type_declaration\nval find_constructor :\n  Env.t -> Location.t -> Longident.t -> constructor_description\nval find_all_constructors :\n  Env.t ->\n  Location.t ->\n  Longident.t ->\n  (constructor_description * (unit -> unit)) list\nval find_label : Env.t -> Location.t -> Longident.t -> label_description\nval find_all_labels :\n  Env.t ->\n  Location.t ->\n  Longident.t ->\n  (label_description * (unit -> unit)) list\nval find_value :\n  Env.t -> Location.t -> Longident.t -> Path.t * value_description\nval find_class :\n  Env.t -> Location.t -> Longident.t -> Path.t * class_declaration\nval find_module :\n  Env.t -> Location.t -> Longident.t -> Path.t * module_declaration\nval lookup_module : ?load:bool -> Env.t -> Location.t -> Longident.t -> Path.t\nval find_modtype :\n  Env.t -> Location.t -> Longident.t -> Path.t * modtype_declaration\nval find_class_type :\n  Env.t -> Location.t -> Longident.t -> Path.t * class_type_declaration\n\nval unbound_constructor_error : Env.t -> Longident.t Location.loc -> 'a\nval unbound_label_error : Env.t -> Longident.t Location.loc -> 'a\n\nval spellcheck :\n  Format.formatter ->\n  (('a -> 'a list -> 'a list) ->\n  Longident.t option ->\n  'b ->\n  'c list ->\n  string list) ->\n  'b ->\n  Longident.t ->\n  unit\n"
  },
  {
    "path": "analysis/vendor/ml/untypeast.ml",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*    Thomas Gazagnaire (OCamlPro), Fabrice Le Fessant (INRIA Saclay)     *)\n(*                                                                        *)\n(*   Copyright 2007 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\nopen Longident\nopen Asttypes\nopen Parsetree\nopen Ast_helper\n\nmodule T = Typedtree\n\ntype mapper = {\n  attribute: mapper -> T.attribute -> attribute;\n  attributes: mapper -> T.attribute list -> attribute list;\n  case: mapper -> T.case -> case;\n  cases: mapper -> T.case list -> case list;\n  class_signature: mapper -> T.class_signature -> class_signature;\n  class_type: mapper -> T.class_type -> class_type;\n  class_type_declaration:\n    mapper -> T.class_type_declaration -> class_type_declaration;\n  class_type_field: mapper -> T.class_type_field -> class_type_field;\n  constructor_declaration:\n    mapper -> T.constructor_declaration -> constructor_declaration;\n  expr: mapper -> T.expression -> expression;\n  extension_constructor:\n    mapper -> T.extension_constructor -> extension_constructor;\n  include_declaration: mapper -> T.include_declaration -> include_declaration;\n  include_description: mapper -> T.include_description -> include_description;\n  label_declaration: mapper -> T.label_declaration -> label_declaration;\n  location: mapper -> Location.t -> Location.t;\n  module_binding: mapper -> T.module_binding -> module_binding;\n  module_declaration: mapper -> T.module_declaration -> module_declaration;\n  module_expr: mapper -> T.module_expr -> module_expr;\n  module_type: mapper -> T.module_type -> module_type;\n  module_type_declaration:\n    mapper -> T.module_type_declaration -> module_type_declaration;\n  package_type: mapper -> T.package_type -> package_type;\n  open_description: mapper -> T.open_description -> open_description;\n  pat: mapper -> T.pattern -> pattern;\n  row_field: mapper -> T.row_field -> row_field;\n  object_field: mapper -> T.object_field -> object_field;\n  signature: mapper -> T.signature -> signature;\n  signature_item: mapper -> T.signature_item -> signature_item;\n  structure: mapper -> T.structure -> structure;\n  structure_item: mapper -> T.structure_item -> structure_item;\n  typ: mapper -> T.core_type -> core_type;\n  type_declaration: mapper -> T.type_declaration -> type_declaration;\n  type_extension: mapper -> T.type_extension -> type_extension;\n  type_kind: mapper -> T.type_kind -> type_kind;\n  value_binding: mapper -> T.value_binding -> value_binding;\n  value_description: mapper -> T.value_description -> value_description;\n  with_constraint:\n    mapper ->\n    Path.t * Longident.t Location.loc * T.with_constraint ->\n    with_constraint;\n}\n\nopen T\n\n(*\nSome notes:\n\n   * For Pexp_function, we cannot go back to the exact original version\n   when there is a default argument, because the default argument is\n   translated in the typer. The code, if printed, will not be parsable because\n   new generated identifiers are not correct.\n\n   * For Pexp_apply, it is unclear whether arguments are reordered, especially\n    when there are optional arguments.\n\n*)\n\n(** Utility functions. *)\n\nlet map_opt f = function\n  | None -> None\n  | Some e -> Some (f e)\n\nlet rec lident_of_path = function\n  | Path.Pident id -> Longident.Lident (Ident.name id)\n  | Path.Pdot (p, s, _) -> Longident.Ldot (lident_of_path p, s)\n  | Path.Papply (p1, p2) ->\n    Longident.Lapply (lident_of_path p1, lident_of_path p2)\n\nlet map_loc sub {loc; txt} = {loc = sub.location sub loc; txt}\n\n(** Try a name [$name$0], check if it's free, if not, increment and repeat. *)\nlet fresh_name s env =\n  let rec aux i =\n    let name = s ^ string_of_int i in\n    try\n      let _ = Env.lookup_value (Lident name) env in\n      name\n    with Not_found -> aux (i + 1)\n  in\n  aux 0\n\n(** Mapping functions. *)\n\nlet constant = function\n  | Const_char c -> Pconst_char c\n  | Const_string (s, d) -> Pconst_string (s, d)\n  | Const_int i -> Pconst_integer (string_of_int i, None)\n  | Const_int32 i -> Pconst_integer (Int32.to_string i, Some 'l')\n  | Const_int64 i -> Pconst_integer (Int64.to_string i, Some 'L')\n  | Const_bigint (sign, i) ->\n    Pconst_integer (Bigint_utils.to_string sign i, Some 'n')\n  | Const_float f -> Pconst_float (f, None)\n\nlet attribute sub (s, p) = (map_loc sub s, p)\nlet attributes sub l = List.map (sub.attribute sub) l\n\nlet structure sub str = List.map (sub.structure_item sub) str.str_items\n\nlet open_description sub od =\n  let loc = sub.location sub od.open_loc in\n  let attrs = sub.attributes sub od.open_attributes in\n  Opn.mk ~loc ~attrs ~override:od.open_override (map_loc sub od.open_txt)\n\nlet structure_item sub item =\n  let loc = sub.location sub item.str_loc in\n  let desc =\n    match item.str_desc with\n    | Tstr_eval (exp, attrs) -> Pstr_eval (sub.expr sub exp, attrs)\n    | Tstr_value (rec_flag, list) ->\n      Pstr_value (rec_flag, List.map (sub.value_binding sub) list)\n    | Tstr_primitive vd -> Pstr_primitive (sub.value_description sub vd)\n    | Tstr_type (rec_flag, list) ->\n      Pstr_type (rec_flag, List.map (sub.type_declaration sub) list)\n    | Tstr_typext tyext -> Pstr_typext (sub.type_extension sub tyext)\n    | Tstr_exception ext -> Pstr_exception (sub.extension_constructor sub ext)\n    | Tstr_module mb -> Pstr_module (sub.module_binding sub mb)\n    | Tstr_recmodule list ->\n      Pstr_recmodule (List.map (sub.module_binding sub) list)\n    | Tstr_modtype mtd -> Pstr_modtype (sub.module_type_declaration sub mtd)\n    | Tstr_open od -> Pstr_open (sub.open_description sub od)\n    | Tstr_class _list -> Pstr_class ()\n    | Tstr_class_type list ->\n      Pstr_class_type\n        (List.map\n           (fun (_id, _name, ct) -> sub.class_type_declaration sub ct)\n           list)\n    | Tstr_include incl -> Pstr_include (sub.include_declaration sub incl)\n    | Tstr_attribute x -> Pstr_attribute x\n  in\n  Str.mk ~loc desc\n\nlet value_description sub v =\n  let loc = sub.location sub v.val_loc in\n  let attrs = sub.attributes sub v.val_attributes in\n  Val.mk ~loc ~attrs ~prim:v.val_prim (map_loc sub v.val_name)\n    (sub.typ sub v.val_desc)\n\nlet module_binding sub mb =\n  let loc = sub.location sub mb.mb_loc in\n  let attrs = sub.attributes sub mb.mb_attributes in\n  Mb.mk ~loc ~attrs (map_loc sub mb.mb_name) (sub.module_expr sub mb.mb_expr)\n\nlet type_parameter sub (ct, v) = (sub.typ sub ct, v)\n\nlet type_declaration sub decl =\n  let loc = sub.location sub decl.typ_loc in\n  let attrs = sub.attributes sub decl.typ_attributes in\n  Type.mk ~loc ~attrs\n    ~params:(List.map (type_parameter sub) decl.typ_params)\n    ~cstrs:\n      (List.map\n         (fun (ct1, ct2, loc) ->\n           (sub.typ sub ct1, sub.typ sub ct2, sub.location sub loc))\n         decl.typ_cstrs)\n    ~kind:(sub.type_kind sub decl.typ_kind)\n    ~priv:decl.typ_private\n    ?manifest:(map_opt (sub.typ sub) decl.typ_manifest)\n    (map_loc sub decl.typ_name)\n\nlet type_kind sub tk =\n  match tk with\n  | Ttype_abstract -> Ptype_abstract\n  | Ttype_variant list ->\n    Ptype_variant (List.map (sub.constructor_declaration sub) list)\n  | Ttype_record list ->\n    Ptype_record (List.map (sub.label_declaration sub) list)\n  | Ttype_open -> Ptype_open\n\nlet constructor_arguments sub = function\n  | Cstr_tuple l -> Pcstr_tuple (List.map (sub.typ sub) l)\n  | Cstr_record l -> Pcstr_record (List.map (sub.label_declaration sub) l)\n\nlet constructor_declaration sub cd =\n  let loc = sub.location sub cd.cd_loc in\n  let attrs = sub.attributes sub cd.cd_attributes in\n  Type.constructor ~loc ~attrs\n    ~args:(constructor_arguments sub cd.cd_args)\n    ?res:(map_opt (sub.typ sub) cd.cd_res)\n    (map_loc sub cd.cd_name)\n\nlet label_declaration sub ld =\n  let loc = sub.location sub ld.ld_loc in\n  let attrs = sub.attributes sub ld.ld_attributes in\n  Type.field ~loc ~attrs ~mut:ld.ld_mutable (map_loc sub ld.ld_name)\n    (sub.typ sub ld.ld_type)\n\nlet type_extension sub tyext =\n  let attrs = sub.attributes sub tyext.tyext_attributes in\n  Te.mk ~attrs\n    ~params:(List.map (type_parameter sub) tyext.tyext_params)\n    ~priv:tyext.tyext_private\n    (map_loc sub tyext.tyext_txt)\n    (List.map (sub.extension_constructor sub) tyext.tyext_constructors)\n\nlet extension_constructor sub ext =\n  let loc = sub.location sub ext.ext_loc in\n  let attrs = sub.attributes sub ext.ext_attributes in\n  Te.constructor ~loc ~attrs (map_loc sub ext.ext_name)\n    (match ext.ext_kind with\n    | Text_decl (args, ret) ->\n      Pext_decl (constructor_arguments sub args, map_opt (sub.typ sub) ret)\n    | Text_rebind (_p, lid) -> Pext_rebind (map_loc sub lid))\n\nlet pattern sub pat =\n  let loc = sub.location sub pat.pat_loc in\n  (* todo: fix attributes on extras *)\n  let attrs = sub.attributes sub pat.pat_attributes in\n  let desc =\n    match pat with\n    | {pat_extra = [(Tpat_unpack, _, _attrs)]; pat_desc = Tpat_var (_, name); _}\n      ->\n      Ppat_unpack name\n    | {pat_extra = [(Tpat_type (_path, lid), _, _attrs)]; _} ->\n      Ppat_type (map_loc sub lid)\n    | {pat_extra = (Tpat_constraint ct, _, _attrs) :: rem; _} ->\n      Ppat_constraint (sub.pat sub {pat with pat_extra = rem}, sub.typ sub ct)\n    | _ -> (\n      match pat.pat_desc with\n      | Tpat_any -> Ppat_any\n      | Tpat_var (id, name) -> (\n        match (Ident.name id).[0] with\n        | 'A' .. 'Z' -> Ppat_unpack name\n        | _ -> Ppat_var name)\n      (* We transform (_ as x) in x if _ and x have the same location.\n         The compiler transforms (x:t) into (_ as x : t).\n         This avoids transforming a warning 27 into a 26.\n      *)\n      | Tpat_alias ({pat_desc = Tpat_any; pat_loc}, _id, name)\n        when pat_loc = pat.pat_loc ->\n        Ppat_var name\n      | Tpat_alias (pat, _id, name) -> Ppat_alias (sub.pat sub pat, name)\n      | Tpat_constant cst -> Ppat_constant (constant cst)\n      | Tpat_tuple list -> Ppat_tuple (List.map (sub.pat sub) list)\n      | Tpat_construct (lid, _, args) ->\n        Ppat_construct\n          ( map_loc sub lid,\n            match args with\n            | [] -> None\n            | [arg] -> Some (sub.pat sub arg)\n            | args -> Some (Pat.tuple ~loc (List.map (sub.pat sub) args)) )\n      | Tpat_variant (label, pato, _) ->\n        Ppat_variant (label, map_opt (sub.pat sub) pato)\n      | Tpat_record (list, closed) ->\n        Ppat_record\n          ( List.map\n              (fun (lid, _, pat) -> (map_loc sub lid, sub.pat sub pat))\n              list,\n            closed )\n      | Tpat_array list -> Ppat_array (List.map (sub.pat sub) list)\n      | Tpat_or (p1, p2, _) -> Ppat_or (sub.pat sub p1, sub.pat sub p2)\n      | Tpat_lazy p -> Ppat_lazy (sub.pat sub p))\n  in\n  Pat.mk ~loc ~attrs desc\n\nlet exp_extra sub (extra, loc, attrs) sexp =\n  let loc = sub.location sub loc in\n  let attrs = sub.attributes sub attrs in\n  let desc =\n    match extra with\n    | Texp_coerce (cty1, cty2) ->\n      Pexp_coerce (sexp, map_opt (sub.typ sub) cty1, sub.typ sub cty2)\n    | Texp_constraint cty -> Pexp_constraint (sexp, sub.typ sub cty)\n    | Texp_open (ovf, _path, lid, _) -> Pexp_open (ovf, map_loc sub lid, sexp)\n    | Texp_poly cto -> Pexp_poly (sexp, map_opt (sub.typ sub) cto)\n    | Texp_newtype s -> Pexp_newtype (mkloc s loc, sexp)\n  in\n  Exp.mk ~loc ~attrs desc\n\nlet cases sub l = List.map (sub.case sub) l\n\nlet case sub {c_lhs; c_guard; c_rhs} =\n  {\n    pc_lhs = sub.pat sub c_lhs;\n    pc_guard = map_opt (sub.expr sub) c_guard;\n    pc_rhs = sub.expr sub c_rhs;\n  }\n\nlet value_binding sub vb =\n  let loc = sub.location sub vb.vb_loc in\n  let attrs = sub.attributes sub vb.vb_attributes in\n  Vb.mk ~loc ~attrs (sub.pat sub vb.vb_pat) (sub.expr sub vb.vb_expr)\n\nlet expression sub exp =\n  let loc = sub.location sub exp.exp_loc in\n  let attrs = sub.attributes sub exp.exp_attributes in\n  let desc =\n    match exp.exp_desc with\n    | Texp_ident (_path, lid, _) -> Pexp_ident (map_loc sub lid)\n    | Texp_constant cst -> Pexp_constant (constant cst)\n    | Texp_let (rec_flag, list, exp) ->\n      Pexp_let\n        (rec_flag, List.map (sub.value_binding sub) list, sub.expr sub exp)\n    (* Pexp_function can't have a label, so we split in 3 cases. *)\n    (* One case, no guard: It's a fun. *)\n    | Texp_function\n        {arg_label; cases = [{c_lhs = p; c_guard = None; c_rhs = e}]; _} ->\n      Pexp_fun (arg_label, None, sub.pat sub p, sub.expr sub e)\n    (* No label: it's a function. *)\n    | Texp_function {arg_label = Nolabel; cases; _} ->\n      Pexp_function (sub.cases sub cases)\n    (* Mix of both, we generate `fun ~label:$name$ -> match $name$ with ...` *)\n    | Texp_function {arg_label = (Labelled s | Optional s) as label; cases; _}\n      ->\n      let name = fresh_name s exp.exp_env in\n      Pexp_fun\n        ( label,\n          None,\n          Pat.var ~loc {loc; txt = name},\n          Exp.match_ ~loc\n            (Exp.ident ~loc {loc; txt = Lident name})\n            (sub.cases sub cases) )\n    | Texp_apply (exp, list) ->\n      Pexp_apply\n        ( sub.expr sub exp,\n          List.fold_right\n            (fun (label, expo) list ->\n              match expo with\n              | None -> list\n              | Some exp -> (label, sub.expr sub exp) :: list)\n            list [] )\n    | Texp_match (exp, cases, exn_cases, _) ->\n      let merged_cases =\n        sub.cases sub cases\n        @ List.map\n            (fun c ->\n              let uc = sub.case sub c in\n              let pat = {uc.pc_lhs with ppat_desc = Ppat_exception uc.pc_lhs} in\n              {uc with pc_lhs = pat})\n            exn_cases\n      in\n      Pexp_match (sub.expr sub exp, merged_cases)\n    | Texp_try (exp, cases) -> Pexp_try (sub.expr sub exp, sub.cases sub cases)\n    | Texp_tuple list -> Pexp_tuple (List.map (sub.expr sub) list)\n    | Texp_construct (lid, _, args) ->\n      Pexp_construct\n        ( map_loc sub lid,\n          match args with\n          | [] -> None\n          | [arg] -> Some (sub.expr sub arg)\n          | args -> Some (Exp.tuple ~loc (List.map (sub.expr sub) args)) )\n    | Texp_variant (label, expo) ->\n      Pexp_variant (label, map_opt (sub.expr sub) expo)\n    | Texp_record {fields; extended_expression; _} ->\n      let list =\n        Array.fold_left\n          (fun l -> function\n            | _, Kept _ -> l\n            | _, Overridden (lid, exp) -> (lid, sub.expr sub exp) :: l)\n          [] fields\n      in\n      Pexp_record (list, map_opt (sub.expr sub) extended_expression)\n    | Texp_field (exp, lid, _label) ->\n      Pexp_field (sub.expr sub exp, map_loc sub lid)\n    | Texp_setfield (exp1, lid, _label, exp2) ->\n      Pexp_setfield (sub.expr sub exp1, map_loc sub lid, sub.expr sub exp2)\n    | Texp_array list -> Pexp_array (List.map (sub.expr sub) list)\n    | Texp_ifthenelse (exp1, exp2, expo) ->\n      Pexp_ifthenelse\n        (sub.expr sub exp1, sub.expr sub exp2, map_opt (sub.expr sub) expo)\n    | Texp_sequence (exp1, exp2) ->\n      Pexp_sequence (sub.expr sub exp1, sub.expr sub exp2)\n    | Texp_while (exp1, exp2) ->\n      Pexp_while (sub.expr sub exp1, sub.expr sub exp2)\n    | Texp_for (_id, name, exp1, exp2, dir, exp3) ->\n      Pexp_for\n        (name, sub.expr sub exp1, sub.expr sub exp2, dir, sub.expr sub exp3)\n    | Texp_send (exp, meth, _) ->\n      Pexp_send\n        ( sub.expr sub exp,\n          match meth with\n          | Tmeth_name name -> mkloc name loc )\n    | Texp_new _ | Texp_instvar _ | Texp_setinstvar _ | Texp_override _ ->\n      assert false\n    | Texp_letmodule (_id, name, mexpr, exp) ->\n      Pexp_letmodule (name, sub.module_expr sub mexpr, sub.expr sub exp)\n    | Texp_letexception (ext, exp) ->\n      Pexp_letexception (sub.extension_constructor sub ext, sub.expr sub exp)\n    | Texp_assert exp -> Pexp_assert (sub.expr sub exp)\n    | Texp_lazy exp -> Pexp_lazy (sub.expr sub exp)\n    | Texp_object () -> assert false\n    | Texp_pack mexpr -> Pexp_pack (sub.module_expr sub mexpr)\n    | Texp_unreachable -> Pexp_unreachable\n    | Texp_extension_constructor (lid, _) ->\n      Pexp_extension\n        ( {txt = \"ocaml.extension_constructor\"; loc},\n          PStr [Str.eval ~loc (Exp.construct ~loc (map_loc sub lid) None)] )\n  in\n  List.fold_right (exp_extra sub) exp.exp_extra (Exp.mk ~loc ~attrs desc)\n\nlet package_type sub pack =\n  ( map_loc sub pack.pack_txt,\n    List.map (fun (s, ct) -> (s, sub.typ sub ct)) pack.pack_fields )\n\nlet module_type_declaration sub mtd =\n  let loc = sub.location sub mtd.mtd_loc in\n  let attrs = sub.attributes sub mtd.mtd_attributes in\n  Mtd.mk ~loc ~attrs\n    ?typ:(map_opt (sub.module_type sub) mtd.mtd_type)\n    (map_loc sub mtd.mtd_name)\n\nlet signature sub sg = List.map (sub.signature_item sub) sg.sig_items\n\nlet signature_item sub item =\n  let loc = sub.location sub item.sig_loc in\n  let desc =\n    match item.sig_desc with\n    | Tsig_value v -> Psig_value (sub.value_description sub v)\n    | Tsig_type (rec_flag, list) ->\n      Psig_type (rec_flag, List.map (sub.type_declaration sub) list)\n    | Tsig_typext tyext -> Psig_typext (sub.type_extension sub tyext)\n    | Tsig_exception ext -> Psig_exception (sub.extension_constructor sub ext)\n    | Tsig_module md -> Psig_module (sub.module_declaration sub md)\n    | Tsig_recmodule list ->\n      Psig_recmodule (List.map (sub.module_declaration sub) list)\n    | Tsig_modtype mtd -> Psig_modtype (sub.module_type_declaration sub mtd)\n    | Tsig_open od -> Psig_open (sub.open_description sub od)\n    | Tsig_include incl -> Psig_include (sub.include_description sub incl)\n    | Tsig_class () -> Psig_class ()\n    | Tsig_class_type list ->\n      Psig_class_type (List.map (sub.class_type_declaration sub) list)\n    | Tsig_attribute x -> Psig_attribute x\n  in\n  Sig.mk ~loc desc\n\nlet module_declaration sub md =\n  let loc = sub.location sub md.md_loc in\n  let attrs = sub.attributes sub md.md_attributes in\n  Md.mk ~loc ~attrs (map_loc sub md.md_name) (sub.module_type sub md.md_type)\n\nlet include_infos f sub incl =\n  let loc = sub.location sub incl.incl_loc in\n  let attrs = sub.attributes sub incl.incl_attributes in\n  Incl.mk ~loc ~attrs (f sub incl.incl_mod)\n\nlet include_declaration sub = include_infos sub.module_expr sub\nlet include_description sub = include_infos sub.module_type sub\n\nlet class_infos f sub ci =\n  let loc = sub.location sub ci.ci_loc in\n  let attrs = sub.attributes sub ci.ci_attributes in\n  Ci.mk ~loc ~attrs ~virt:ci.ci_virt\n    ~params:(List.map (type_parameter sub) ci.ci_params)\n    (map_loc sub ci.ci_id_name)\n    (f sub ci.ci_expr)\n\nlet class_type_declaration sub = class_infos sub.class_type sub\n\nlet module_type sub mty =\n  let loc = sub.location sub mty.mty_loc in\n  let attrs = sub.attributes sub mty.mty_attributes in\n  let desc =\n    match mty.mty_desc with\n    | Tmty_ident (_path, lid) -> Pmty_ident (map_loc sub lid)\n    | Tmty_alias (_path, lid) -> Pmty_alias (map_loc sub lid)\n    | Tmty_signature sg -> Pmty_signature (sub.signature sub sg)\n    | Tmty_functor (_id, name, mtype1, mtype2) ->\n      Pmty_functor\n        (name, map_opt (sub.module_type sub) mtype1, sub.module_type sub mtype2)\n    | Tmty_with (mtype, list) ->\n      Pmty_with\n        (sub.module_type sub mtype, List.map (sub.with_constraint sub) list)\n    | Tmty_typeof mexpr -> Pmty_typeof (sub.module_expr sub mexpr)\n  in\n  Mty.mk ~loc ~attrs desc\n\nlet with_constraint sub (_path, lid, cstr) =\n  match cstr with\n  | Twith_type decl ->\n    Pwith_type (map_loc sub lid, sub.type_declaration sub decl)\n  | Twith_module (_path, lid2) ->\n    Pwith_module (map_loc sub lid, map_loc sub lid2)\n  | Twith_typesubst decl ->\n    Pwith_typesubst (map_loc sub lid, sub.type_declaration sub decl)\n  | Twith_modsubst (_path, lid2) ->\n    Pwith_modsubst (map_loc sub lid, map_loc sub lid2)\n\nlet module_expr sub mexpr =\n  let loc = sub.location sub mexpr.mod_loc in\n  let attrs = sub.attributes sub mexpr.mod_attributes in\n  match mexpr.mod_desc with\n  | Tmod_constraint (m, _, Tmodtype_implicit, _) -> sub.module_expr sub m\n  | _ ->\n    let desc =\n      match mexpr.mod_desc with\n      | Tmod_ident (_p, lid) -> Pmod_ident (map_loc sub lid)\n      | Tmod_structure st -> Pmod_structure (sub.structure sub st)\n      | Tmod_functor (_id, name, mtype, mexpr) ->\n        Pmod_functor\n          ( name,\n            Misc.may_map (sub.module_type sub) mtype,\n            sub.module_expr sub mexpr )\n      | Tmod_apply (mexp1, mexp2, _) ->\n        Pmod_apply (sub.module_expr sub mexp1, sub.module_expr sub mexp2)\n      | Tmod_constraint (mexpr, _, Tmodtype_explicit mtype, _) ->\n        Pmod_constraint (sub.module_expr sub mexpr, sub.module_type sub mtype)\n      | Tmod_constraint (_mexpr, _, Tmodtype_implicit, _) -> assert false\n      | Tmod_unpack (exp, _pack) -> Pmod_unpack (sub.expr sub exp)\n      (* TODO , sub.package_type sub pack) *)\n    in\n    Mod.mk ~loc ~attrs desc\n\nlet class_type sub ct =\n  let loc = sub.location sub ct.cltyp_loc in\n  let attrs = sub.attributes sub ct.cltyp_attributes in\n  let desc =\n    match ct.cltyp_desc with\n    | Tcty_signature csg -> Pcty_signature (sub.class_signature sub csg)\n    | Tcty_constr (_path, lid, list) ->\n      Pcty_constr (map_loc sub lid, List.map (sub.typ sub) list)\n    | Tcty_arrow (label, ct, cl) ->\n      Pcty_arrow (label, sub.typ sub ct, sub.class_type sub cl)\n    | Tcty_open (ovf, _p, lid, _env, e) ->\n      Pcty_open (ovf, lid, sub.class_type sub e)\n  in\n  Cty.mk ~loc ~attrs desc\n\nlet class_signature sub cs =\n  {\n    pcsig_self = sub.typ sub cs.csig_self;\n    pcsig_fields = List.map (sub.class_type_field sub) cs.csig_fields;\n  }\n\nlet class_type_field sub ctf =\n  let loc = sub.location sub ctf.ctf_loc in\n  let attrs = sub.attributes sub ctf.ctf_attributes in\n  let desc =\n    match ctf.ctf_desc with\n    | Tctf_inherit ct -> Pctf_inherit (sub.class_type sub ct)\n    | Tctf_val (s, mut, virt, ct) ->\n      Pctf_val (mkloc s loc, mut, virt, sub.typ sub ct)\n    | Tctf_method (s, priv, virt, ct) ->\n      Pctf_method (mkloc s loc, priv, virt, sub.typ sub ct)\n    | Tctf_constraint (ct1, ct2) ->\n      Pctf_constraint (sub.typ sub ct1, sub.typ sub ct2)\n    | Tctf_attribute x -> Pctf_attribute x\n  in\n  Ctf.mk ~loc ~attrs desc\n\nlet core_type sub ct =\n  let loc = sub.location sub ct.ctyp_loc in\n  let attrs = sub.attributes sub ct.ctyp_attributes in\n  let desc =\n    match ct.ctyp_desc with\n    | Ttyp_any -> Ptyp_any\n    | Ttyp_var s -> Ptyp_var s\n    | Ttyp_arrow (label, ct1, ct2) ->\n      Ptyp_arrow (label, sub.typ sub ct1, sub.typ sub ct2)\n    | Ttyp_tuple list -> Ptyp_tuple (List.map (sub.typ sub) list)\n    | Ttyp_constr (_path, lid, list) ->\n      Ptyp_constr (map_loc sub lid, List.map (sub.typ sub) list)\n    | Ttyp_object (list, o) ->\n      Ptyp_object (List.map (sub.object_field sub) list, o)\n    | Ttyp_class (_path, lid, list) ->\n      Ptyp_class (map_loc sub lid, List.map (sub.typ sub) list)\n    | Ttyp_alias (ct, s) -> Ptyp_alias (sub.typ sub ct, s)\n    | Ttyp_variant (list, bool, labels) ->\n      Ptyp_variant (List.map (sub.row_field sub) list, bool, labels)\n    | Ttyp_poly (list, ct) ->\n      let list = List.map (fun v -> mkloc v loc) list in\n      Ptyp_poly (list, sub.typ sub ct)\n    | Ttyp_package pack -> Ptyp_package (sub.package_type sub pack)\n  in\n  Typ.mk ~loc ~attrs desc\n\nlet row_field sub rf =\n  match rf with\n  | Ttag (label, attrs, bool, list) ->\n    Rtag (label, sub.attributes sub attrs, bool, List.map (sub.typ sub) list)\n  | Tinherit ct -> Rinherit (sub.typ sub ct)\n\nlet object_field sub ofield =\n  match ofield with\n  | OTtag (label, attrs, ct) ->\n    Otag (label, sub.attributes sub attrs, sub.typ sub ct)\n  | OTinherit ct -> Oinherit (sub.typ sub ct)\n\nlet location _sub l = l\n\nlet default_mapper =\n  {\n    attribute;\n    attributes;\n    structure;\n    structure_item;\n    module_expr;\n    signature;\n    signature_item;\n    module_type;\n    with_constraint;\n    class_type;\n    class_type_field;\n    class_signature;\n    class_type_declaration;\n    type_declaration;\n    type_kind;\n    typ = core_type;\n    type_extension;\n    extension_constructor;\n    value_description;\n    pat = pattern;\n    expr = expression;\n    module_declaration;\n    module_type_declaration;\n    module_binding;\n    package_type;\n    open_description;\n    include_description;\n    include_declaration;\n    value_binding;\n    constructor_declaration;\n    label_declaration;\n    cases;\n    case;\n    location;\n    row_field;\n    object_field;\n  }\n\nlet untype_structure ?(mapper = default_mapper) structure =\n  mapper.structure mapper structure\n\nlet untype_signature ?(mapper = default_mapper) signature =\n  mapper.signature mapper signature\n"
  },
  {
    "path": "analysis/vendor/ml/untypeast.mli",
    "content": "(**************************************************************************)\n(*                                                                        *)\n(*                                 OCaml                                  *)\n(*                                                                        *)\n(*      Thomas Gazagnaire (OCamlPro), Fabrice Le Fessant (INRIA Saclay)   *)\n(*                                                                        *)\n(*   Copyright 2007 Institut National de Recherche en Informatique et     *)\n(*     en Automatique.                                                    *)\n(*                                                                        *)\n(*   All rights reserved.  This file is distributed under the terms of    *)\n(*   the GNU Lesser General Public License version 2.1, with the          *)\n(*   special exception on linking described in the file LICENSE.          *)\n(*                                                                        *)\n(**************************************************************************)\n\nopen Parsetree\n\nval lident_of_path : Path.t -> Longident.t\n\ntype mapper = {\n  attribute: mapper -> Typedtree.attribute -> attribute;\n  attributes: mapper -> Typedtree.attribute list -> attribute list;\n  case: mapper -> Typedtree.case -> case;\n  cases: mapper -> Typedtree.case list -> case list;\n  class_signature: mapper -> Typedtree.class_signature -> class_signature;\n  class_type: mapper -> Typedtree.class_type -> class_type;\n  class_type_declaration:\n    mapper -> Typedtree.class_type_declaration -> class_type_declaration;\n  class_type_field: mapper -> Typedtree.class_type_field -> class_type_field;\n  constructor_declaration:\n    mapper -> Typedtree.constructor_declaration -> constructor_declaration;\n  expr: mapper -> Typedtree.expression -> expression;\n  extension_constructor:\n    mapper -> Typedtree.extension_constructor -> extension_constructor;\n  include_declaration:\n    mapper -> Typedtree.include_declaration -> include_declaration;\n  include_description:\n    mapper -> Typedtree.include_description -> include_description;\n  label_declaration: mapper -> Typedtree.label_declaration -> label_declaration;\n  location: mapper -> Location.t -> Location.t;\n  module_binding: mapper -> Typedtree.module_binding -> module_binding;\n  module_declaration:\n    mapper -> Typedtree.module_declaration -> module_declaration;\n  module_expr: mapper -> Typedtree.module_expr -> module_expr;\n  module_type: mapper -> Typedtree.module_type -> module_type;\n  module_type_declaration:\n    mapper -> Typedtree.module_type_declaration -> module_type_declaration;\n  package_type: mapper -> Typedtree.package_type -> package_type;\n  open_description: mapper -> Typedtree.open_description -> open_description;\n  pat: mapper -> Typedtree.pattern -> pattern;\n  row_field: mapper -> Typedtree.row_field -> row_field;\n  object_field: mapper -> Typedtree.object_field -> object_field;\n  signature: mapper -> Typedtree.signature -> signature;\n  signature_item: mapper -> Typedtree.signature_item -> signature_item;\n  structure: mapper -> Typedtree.structure -> structure;\n  structure_item: mapper -> Typedtree.structure_item -> structure_item;\n  typ: mapper -> Typedtree.core_type -> core_type;\n  type_declaration: mapper -> Typedtree.type_declaration -> type_declaration;\n  type_extension: mapper -> Typedtree.type_extension -> type_extension;\n  type_kind: mapper -> Typedtree.type_kind -> type_kind;\n  value_binding: mapper -> Typedtree.value_binding -> value_binding;\n  value_description: mapper -> Typedtree.value_description -> value_description;\n  with_constraint:\n    mapper ->\n    Path.t * Longident.t Location.loc * Typedtree.with_constraint ->\n    with_constraint;\n}\n\nval default_mapper : mapper\n\nval untype_structure : ?mapper:mapper -> Typedtree.structure -> structure\nval untype_signature : ?mapper:mapper -> Typedtree.signature -> signature\n\nval constant : Asttypes.constant -> Parsetree.constant\n"
  },
  {
    "path": "analysis/vendor/ml/variant_coercion.ml",
    "content": "(* TODO: Improve error messages? Say why we can't coerce. *)\n\n(* Right now we only allow coercing to primitives string/int/float *)\nlet can_coerce_primitive (path : Path.t) =\n  Path.same path Predef.path_string\n  || Path.same path Predef.path_int\n  || Path.same path Predef.path_float\n  || Path.same path Predef.path_bigint\n\nlet check_paths_same p1 p2 target_path =\n  Path.same p1 target_path && Path.same p2 target_path\n\nlet variant_has_catch_all_case\n    (constructors : Types.constructor_declaration list) path_is_same =\n  let has_catch_all_string_case (c : Types.constructor_declaration) =\n    let args = c.cd_args in\n    match args with\n    | Cstr_tuple [{desc = Tconstr (p, [], _)}] -> path_is_same p\n    | _ -> false\n  in\n\n  constructors |> List.exists has_catch_all_string_case\n\nlet variant_has_relevant_primitive_catch_all\n    (constructors : Types.constructor_declaration list) =\n  variant_has_catch_all_case constructors can_coerce_primitive\n\n(* Checks if every case of the variant has the same runtime representation as the target type. *)\nlet variant_has_same_runtime_representation_as_target ~(target_path : Path.t)\n    ~unboxed (constructors : Types.constructor_declaration list) =\n  (* Helper function to check if a constructor has the same runtime representation as the target type *)\n  let has_same_runtime_representation (c : Types.constructor_declaration) =\n    let args = c.cd_args in\n    let as_payload = Ast_untagged_variants.process_tag_type c.cd_attributes in\n\n    match args with\n    | Cstr_tuple [{desc = Tconstr (p, [], _)}] when unboxed ->\n      let path_same = check_paths_same p target_path in\n      (* unboxed String(string) :> string *)\n      path_same Predef.path_string\n      (* unboxed Number(float) :> float *)\n      || path_same Predef.path_float\n      ||\n      (* unboxed BigInt(bigint) :> bigint *)\n      path_same Predef.path_bigint\n    | Cstr_tuple [] -> (\n      (* Check that @as payloads match with the target path to coerce to.\n           No @as means the default encoding, which is string *)\n      match as_payload with\n      | None | Some (String _) -> Path.same target_path Predef.path_string\n      | Some (Int _) -> Path.same target_path Predef.path_int\n      | Some (Float _) -> Path.same target_path Predef.path_float\n      | Some (BigInt _) -> Path.same target_path Predef.path_bigint\n      | Some (Null | Undefined | Bool _ | Untagged _) -> false)\n    | _ -> false\n  in\n\n  List.for_all has_same_runtime_representation constructors\n\nlet can_try_coerce_variant_to_primitive\n    ((_, p, typedecl) : Path.t * Path.t * Types.type_declaration) =\n  match typedecl with\n  | {type_kind = Type_variant constructors; type_params = []; type_attributes}\n    when Path.name p <> \"bool\" ->\n    (* bool is represented as a variant internally, so we need to account for that *)\n    Some (constructors, type_attributes |> Ast_untagged_variants.has_untagged)\n  | _ -> None\n\nlet can_try_coerce_variant_to_primitive_opt p =\n  match p with\n  | None -> None\n  | Some p -> can_try_coerce_variant_to_primitive p\n\nlet variant_representation_matches (c1_attrs : Parsetree.attributes)\n    (c2_attrs : Parsetree.attributes) =\n  match\n    ( Ast_untagged_variants.process_tag_type c1_attrs,\n      Ast_untagged_variants.process_tag_type c2_attrs )\n  with\n  | None, None -> true\n  | Some s1, Some s2 when s1 = s2 -> true\n  | _ -> false\n\ntype variant_configuration_error =\n  | Untagged of {left_is_unboxed: bool}\n  | TagName of {left_tag: string option; right_tag: string option}\n\ntype variant_error =\n  | VariantError of {\n      left_loc: Location.t;\n      right_loc: Location.t;\n      error: variant_configuration_error;\n      is_spread_context: bool;\n    }\n\nexception VariantConfigurationError of variant_error\n\nlet variant_configuration_can_be_coerced (a1 : Parsetree.attributes)\n    (a2 : Parsetree.attributes) =\n  let unboxed =\n    match\n      ( Ast_untagged_variants.process_untagged a1,\n        Ast_untagged_variants.process_untagged a2 )\n    with\n    | true, true | false, false -> true\n    | _ -> false\n  in\n  if not unboxed then false\n  else\n    let tag =\n      match\n        ( Ast_untagged_variants.process_tag_name a1,\n          Ast_untagged_variants.process_tag_name a2 )\n      with\n      | Some tag1, Some tag2 when tag1 = tag2 -> true\n      | None, None -> true\n      | _ -> false\n    in\n    if not tag then false else true\n\nlet variant_configuration_can_be_coerced_raises ~is_spread_context ~left_loc\n    ~right_loc ~(left_attributes : Parsetree.attributes)\n    ~(right_attributes : Parsetree.attributes) =\n  (match\n     ( Ast_untagged_variants.process_untagged left_attributes,\n       Ast_untagged_variants.process_untagged right_attributes )\n   with\n  | true, true | false, false -> ()\n  | left, _right ->\n    raise\n      (VariantConfigurationError\n         (VariantError\n            {\n              is_spread_context;\n              left_loc;\n              right_loc;\n              error = Untagged {left_is_unboxed = left};\n            })));\n\n  match\n    ( Ast_untagged_variants.process_tag_name left_attributes,\n      Ast_untagged_variants.process_tag_name right_attributes )\n  with\n  | Some host_tag, Some spread_tag when host_tag = spread_tag -> ()\n  | None, None -> ()\n  | left_tag, right_tag ->\n    raise\n      (VariantConfigurationError\n         (VariantError\n            {\n              is_spread_context;\n              left_loc;\n              right_loc;\n              error = TagName {left_tag; right_tag};\n            }))\n"
  },
  {
    "path": "analysis/vendor/ml/variant_type_spread.ml",
    "content": "let mk_constructor_comes_from_spread_attr () : Parsetree.attribute =\n  (Location.mknoloc \"res.constructor_from_spread\", PStr [])\n\ntype variant_type_spread_error =\n  | CouldNotFindType\n  | HasTypeParams\n  | DuplicateConstructor of {\n      variant_with_overlapping_constructor: string;\n      overlapping_constructor_name: string;\n    }\n\nexception VariantTypeSpreadError of Location.t * variant_type_spread_error\n\n(* Spreads in variants are parsed as constructors named \"...\", with a single payload that's an identifier\n   pointing to the type that's spread. We need to expand those constructors as soon as we can, before type\n   checking. So, here we look for constructors named \"...\", look up their type, and add the constructors that\n   type itself has.\n*)\n\nlet map_constructors ~(sdecl : Parsetree.type_declaration) ~all_constructors env\n    (c : Parsetree.constructor_declaration) =\n  match c with\n  | {\n   pcd_name = {txt = \"...\"};\n   pcd_args = Pcstr_tuple [{ptyp_loc; ptyp_desc = Ptyp_constr (loc, _)}];\n  } -> (\n    (* This is a variant type spread constructor. Look up its type *)\n    let _, type_decl =\n      try Typetexp.find_type env ptyp_loc loc.txt\n      with _ -> raise (VariantTypeSpreadError (loc.loc, CouldNotFindType))\n    in\n\n    match type_decl with\n    | {type_kind = Type_variant cstrs; type_attributes; type_params} ->\n      if List.length type_params > 0 then\n        raise (VariantTypeSpreadError (loc.loc, HasTypeParams));\n\n      Variant_coercion.variant_configuration_can_be_coerced_raises\n        ~is_spread_context:true ~left_loc:loc.loc\n        ~left_attributes:type_attributes\n        ~right_attributes:sdecl.ptype_attributes ~right_loc:sdecl.ptype_loc;\n      (* We add back the spread constructor here so the type checker\n         helps us resolve its type (we'll obviously filter this out\n         at a later stage). We also append the type identifier so we\n         can have multiple spreads, since each constructor name needs\n         to be unique. *)\n      let variant_name = Longident.flatten loc.txt |> String.concat \".\" in\n      let spread_constructor_name = \"...\" ^ variant_name in\n      {c with pcd_name = {c.pcd_name with txt = spread_constructor_name}}\n      :: (cstrs\n         |> List.map\n              (fun\n                (cstr : Types.constructor_declaration)\n                :\n                Parsetree.constructor_declaration\n              ->\n                match Hashtbl.find_opt all_constructors cstr.cd_id.name with\n                | Some _ ->\n                  raise\n                    (VariantTypeSpreadError\n                       ( loc.loc,\n                         DuplicateConstructor\n                           {\n                             overlapping_constructor_name = cstr.cd_id.name;\n                             variant_with_overlapping_constructor = variant_name;\n                           } ))\n                | None ->\n                  Hashtbl.add all_constructors cstr.cd_id.name ();\n                  {\n                    (* This will mark this constructor as originating from a variant type spread.\n                       We use that hint to fill in the real, typed constructor arguments (if any)\n                       at a later stage when that information is available. *)\n                    pcd_attributes =\n                      mk_constructor_comes_from_spread_attr ()\n                      :: cstr.cd_attributes;\n                    pcd_loc = cstr.cd_loc;\n                    pcd_res = None;\n                    (* It's important that we _don't_ fill in pcd_args here, since we have no way to produce\n                       a valid set of args for the parsetree at this stage. Inserting dummies here instead\n                       of later means that our dummies would end up being typechecked, and we don't want that.\n\n                       We'll fill in the correct arg types in the type checked version of this constructor later. *)\n                    pcd_args = Pcstr_tuple [];\n                    pcd_name = Location.mkloc cstr.cd_id.name cstr.cd_loc;\n                  }))\n    | _ -> [c])\n  | _ ->\n    Hashtbl.add all_constructors c.pcd_name.txt ();\n    [c]\n\nlet expand_variant_spreads (env : Env.t)\n    (sdecl_list : Parsetree.type_declaration list) =\n  sdecl_list\n  |> List.map (fun (sdecl : Parsetree.type_declaration) ->\n         match sdecl with\n         | {ptype_kind = Ptype_variant constructors} ->\n           let has_spread = ref false in\n           let all_constructors = Hashtbl.create (List.length constructors) in\n           constructors\n           |> List.iter (fun (c : Parsetree.constructor_declaration) ->\n                  if c.pcd_name.txt = \"...\" then has_spread := true\n                  else Hashtbl.add all_constructors c.pcd_name.txt ());\n           if !has_spread = false then sdecl\n           else\n             {\n               sdecl with\n               ptype_kind =\n                 Ptype_variant\n                   (constructors\n                   |> List.map (map_constructors ~all_constructors ~sdecl env)\n                   |> List.concat);\n             }\n         | _ -> sdecl)\n\nlet constructor_is_from_spread (attrs : Parsetree.attributes) =\n  attrs\n  |> List.exists (fun (a : Parsetree.attribute) ->\n         match a with\n         | {txt = \"res.constructor_from_spread\"}, PStr [] -> true\n         | _ -> false)\n\nlet remove_is_spread_attribute (attr : Parsetree.attribute) =\n  match attr with\n  | {txt = \"res.constructor_from_spread\"}, PStr [] -> false\n  | _ -> false\n\n(* Add dummy arguments of the right length to constructors that comes\n   from spreads, and that has arguments. *)\nlet expand_dummy_constructor_args (sdecl_list : Parsetree.type_declaration list)\n    (decls : (Ident.t * Types.type_declaration) list) =\n  List.map2\n    (fun sdecl (_, decl) ->\n      match (sdecl, decl) with\n      | ( {Parsetree.ptype_kind = Ptype_variant c1},\n          {Types.type_kind = Type_variant c2} ) ->\n        {\n          sdecl with\n          ptype_kind =\n            Ptype_variant\n              (c1\n              |> List.map (fun (c : Parsetree.constructor_declaration) ->\n                     if constructor_is_from_spread c.pcd_attributes then\n                       match\n                         c2\n                         |> List.find_opt\n                              (fun (cc : Types.constructor_declaration) ->\n                                Ident.name cc.cd_id = c.pcd_name.txt)\n                       with\n                       | None -> c\n                       | Some constructor -> (\n                         match constructor with\n                         | {cd_args = Cstr_record lbls} ->\n                           {\n                             c with\n                             pcd_attributes =\n                               c.pcd_attributes\n                               |> List.filter remove_is_spread_attribute;\n                             pcd_args =\n                               Pcstr_record\n                                 (lbls\n                                 |> List.map\n                                      (fun (l : Types.label_declaration) ->\n                                        {\n                                          Parsetree.pld_name = c.pcd_name;\n                                          pld_mutable = l.ld_mutable;\n                                          pld_loc = l.ld_loc;\n                                          pld_attributes = [];\n                                          pld_type =\n                                            {\n                                              ptyp_desc = Ptyp_any;\n                                              ptyp_loc = l.ld_loc;\n                                              ptyp_attributes = [];\n                                            };\n                                        }));\n                           }\n                         | {cd_args = Cstr_tuple args} ->\n                           {\n                             c with\n                             pcd_attributes =\n                               c.pcd_attributes\n                               |> List.filter remove_is_spread_attribute;\n                             pcd_args =\n                               Pcstr_tuple\n                                 (args\n                                 |> List.map (fun _t ->\n                                        {\n                                          Parsetree.ptyp_loc = c.pcd_loc;\n                                          ptyp_attributes = [];\n                                          ptyp_desc = Ptyp_any;\n                                        }));\n                           })\n                     else c));\n        }\n      | _ -> sdecl)\n    sdecl_list decls\n"
  },
  {
    "path": "analysis/vendor/res_syntax/dune",
    "content": "(library\n (name syntax)\n (wrapped false)\n (flags\n  (:standard -w +a-4-42-40-9-48-70))\n (libraries ml))\n"
  },
  {
    "path": "analysis/vendor/res_syntax/jsx_common.ml",
    "content": "open Asttypes\nopen Parsetree\n\ntype jsx_config = {\n  mutable version: int;\n  mutable module_: string;\n  mutable mode: string;\n  mutable nested_modules: string list;\n  mutable has_component: bool;\n}\n\n(* Helper method to look up the [@react.component] attribute *)\nlet has_attr (loc, _) =\n  match loc.txt with\n  | \"react.component\" | \"jsx.component\" -> true\n  | _ -> false\n\n(* Iterate over the attributes and try to find the [@react.component] attribute *)\nlet has_attr_on_binding {pvb_attributes} =\n  List.find_opt has_attr pvb_attributes <> None\n\nlet core_type_of_attrs attributes =\n  List.find_map\n    (fun ({txt}, payload) ->\n      match (txt, payload) with\n      | (\"react.component\" | \"jsx.component\"), PTyp core_type -> Some core_type\n      | _ -> None)\n    attributes\n\nlet typ_vars_of_core_type {ptyp_desc} =\n  match ptyp_desc with\n  | Ptyp_constr (_, core_types) ->\n    List.filter\n      (fun {ptyp_desc} ->\n        match ptyp_desc with\n        | Ptyp_var _ -> true\n        | _ -> false)\n      core_types\n  | _ -> []\n\nlet raise_error ~loc msg = Location.raise_errorf ~loc msg\n\nlet raise_error_multiple_component ~loc =\n  raise_error ~loc\n    \"Only one component definition is allowed for each module. Move to a \\\n     submodule or other file if necessary.\"\n\nlet optional_attr = ({txt = \"res.optional\"; loc = Location.none}, PStr [])\n\nlet extract_uncurried typ =\n  if Ast_uncurried.core_type_is_uncurried_fun typ then\n    let _arity, t = Ast_uncurried.core_type_extract_uncurried_fun typ in\n    t\n  else typ\n\nlet remove_arity binding =\n  let rec remove_arity_record expr =\n    match expr.pexp_desc with\n    | _ when Ast_uncurried.expr_is_uncurried_fun expr ->\n      Ast_uncurried.expr_extract_uncurried_fun expr\n    | Pexp_newtype (label, e) ->\n      {expr with pexp_desc = Pexp_newtype (label, remove_arity_record e)}\n    | Pexp_apply (forward_ref, [(label, e)]) ->\n      {\n        expr with\n        pexp_desc = Pexp_apply (forward_ref, [(label, remove_arity_record e)]);\n      }\n    | _ -> expr\n  in\n  {binding with pvb_expr = remove_arity_record binding.pvb_expr}\n\nlet async_component ~async expr =\n  if async then\n    let open Ast_helper in\n    Exp.apply\n      (Exp.ident\n         {\n           loc = Location.none;\n           txt = Ldot (Lident \"JsxPPXReactSupport\", \"asyncComponent\");\n         })\n      [(Nolabel, expr)]\n  else expr\n"
  },
  {
    "path": "analysis/vendor/res_syntax/jsx_ppx.ml",
    "content": "open Ast_mapper\nopen Asttypes\nopen Parsetree\nopen Longident\n\nlet get_payload_fields payload =\n  match payload with\n  | PStr\n      ({\n         pstr_desc =\n           Pstr_eval ({pexp_desc = Pexp_record (record_fields, None)}, _);\n       }\n      :: _rest) ->\n    record_fields\n  | _ -> []\n\ntype config_key = Int | String\n\nlet get_jsx_config_by_key ~key ~type_ record_fields =\n  let values =\n    List.filter_map\n      (fun ((lid, expr) : Longident.t Location.loc * expression) ->\n        match (type_, lid, expr) with\n        | ( Int,\n            {txt = Lident k},\n            {pexp_desc = Pexp_constant (Pconst_integer (value, None))} )\n          when k = key ->\n          Some value\n        | ( String,\n            {txt = Lident k},\n            (* accept both normal strings and \"js\" strings *)\n            {pexp_desc = Pexp_constant (Pconst_string (value, _))} )\n          when k = key ->\n          Some value\n        | _ -> None)\n      record_fields\n  in\n  match values with\n  | [] -> None\n  | [v] | v :: _ -> Some v\n\nlet get_int ~key fields =\n  match fields |> get_jsx_config_by_key ~key ~type_:Int with\n  | None -> None\n  | Some s -> int_of_string_opt s\n\nlet get_string ~key fields = fields |> get_jsx_config_by_key ~key ~type_:String\n\nlet update_config config payload =\n  let fields = get_payload_fields payload in\n  let module_raw = get_string ~key:\"module_\" fields in\n  let is_generic =\n    match module_raw |> Option.map (fun m -> String.lowercase_ascii m) with\n    | Some \"react\" | None -> false\n    | Some _ -> true\n  in\n  (match (is_generic, get_int ~key:\"version\" fields) with\n  | true, _ -> config.Jsx_common.version <- 4\n  | false, Some i -> config.Jsx_common.version <- i\n  | _ -> ());\n  (match module_raw with\n  | None -> ()\n  | Some s -> config.module_ <- s);\n  match (is_generic, get_string ~key:\"mode\" fields) with\n  | true, _ -> config.mode <- \"automatic\"\n  | false, Some s -> config.mode <- s\n  | _ -> ()\n\nlet is_jsx_config_attr ((loc, _) : attribute) = loc.txt = \"jsxConfig\"\n\nlet process_config_attribute attribute config =\n  if is_jsx_config_attr attribute then update_config config (snd attribute)\n\nlet get_mapper ~config =\n  let ( expr3,\n        module_binding3,\n        transform_signature_item3,\n        transform_structure_item3 ) =\n    Reactjs_jsx_v3.jsx_mapper ~config\n  in\n  let ( expr4,\n        module_binding4,\n        transform_signature_item4,\n        transform_structure_item4 ) =\n    Jsx_v4.jsx_mapper ~config\n  in\n\n  let expr mapper e =\n    match config.version with\n    | 3 -> expr3 mapper e\n    | 4 -> expr4 mapper e\n    | _ -> default_mapper.expr mapper e\n  in\n  let module_binding mapper mb =\n    match config.version with\n    | 3 -> module_binding3 mapper mb\n    | 4 -> module_binding4 mapper mb\n    | _ -> default_mapper.module_binding mapper mb\n  in\n  let save_config () =\n    {\n      config with\n      version = config.version;\n      module_ = config.module_;\n      mode = config.mode;\n      has_component = config.has_component;\n    }\n  in\n  let restore_config old_config =\n    config.version <- old_config.Jsx_common.version;\n    config.module_ <- old_config.module_;\n    config.mode <- old_config.mode;\n    config.has_component <- old_config.has_component\n  in\n  let signature mapper items =\n    let old_config = save_config () in\n    config.has_component <- false;\n    let result =\n      List.map\n        (fun item ->\n          (match item.psig_desc with\n          | Psig_attribute attr -> process_config_attribute attr config\n          | _ -> ());\n          let item = default_mapper.signature_item mapper item in\n          if config.version = 3 then transform_signature_item3 item\n          else if config.version = 4 then transform_signature_item4 item\n          else [item])\n        items\n      |> List.flatten\n    in\n    restore_config old_config;\n    result\n  in\n  let structure mapper items =\n    let old_config = save_config () in\n    config.has_component <- false;\n    let result =\n      List.map\n        (fun item ->\n          (match item.pstr_desc with\n          | Pstr_attribute attr -> process_config_attribute attr config\n          | _ -> ());\n          let item = default_mapper.structure_item mapper item in\n          if config.version = 3 then transform_structure_item3 item\n          else if config.version = 4 then transform_structure_item4 item\n          else [item])\n        items\n      |> List.flatten\n    in\n    restore_config old_config;\n    result\n  in\n\n  {default_mapper with expr; module_binding; signature; structure}\n\nlet rewrite_implementation ~jsx_version ~jsx_module ~jsx_mode\n    (code : Parsetree.structure) : Parsetree.structure =\n  let config =\n    {\n      Jsx_common.version = jsx_version;\n      module_ = jsx_module;\n      mode = jsx_mode;\n      nested_modules = [];\n      has_component = false;\n    }\n  in\n  let mapper = get_mapper ~config in\n  mapper.structure mapper code\n\nlet rewrite_signature ~jsx_version ~jsx_module ~jsx_mode\n    (code : Parsetree.signature) : Parsetree.signature =\n  let config =\n    {\n      Jsx_common.version = jsx_version;\n      module_ = jsx_module;\n      mode = jsx_mode;\n      nested_modules = [];\n      has_component = false;\n    }\n  in\n  let mapper = get_mapper ~config in\n  mapper.signature mapper code\n"
  },
  {
    "path": "analysis/vendor/res_syntax/jsx_ppx.mli",
    "content": "(*\n  This is the module that handles turning Reason JSX' agnostic function call into\n  a ReasonReact-specific function call. Aka, this is a macro, using OCaml's ppx\n  facilities; https://whitequark.org/blog/2014/04/16/a-guide-to-extension-\n  points-in-ocaml/\n  You wouldn't use this file directly; it's used by ReScript's\n  rescript.json. Specifically, there's a field called `react-jsx` inside the\n  field `reason`, which enables this ppx through some internal call in bsb\n*)\n\nval rewrite_implementation :\n  jsx_version:int ->\n  jsx_module:string ->\n  jsx_mode:string ->\n  Parsetree.structure ->\n  Parsetree.structure\n\nval rewrite_signature :\n  jsx_version:int ->\n  jsx_module:string ->\n  jsx_mode:string ->\n  Parsetree.signature ->\n  Parsetree.signature\n"
  },
  {
    "path": "analysis/vendor/res_syntax/jsx_v4.ml",
    "content": "open! Ast_helper\nopen Ast_mapper\nopen Asttypes\nopen Parsetree\nopen Longident\n\nlet module_access_name config value =\n  String.capitalize_ascii config.Jsx_common.module_ ^ \".\" ^ value\n  |> Longident.parse\n\nlet nolabel = Nolabel\n\nlet labelled str = Labelled str\n\nlet is_optional str =\n  match str with\n  | Optional _ -> true\n  | _ -> false\n\nlet is_labelled str =\n  match str with\n  | Labelled _ -> true\n  | _ -> false\n\nlet is_forward_ref = function\n  | {pexp_desc = Pexp_ident {txt = Ldot (Lident \"React\", \"forwardRef\")}} -> true\n  | _ -> false\n\nlet get_label str =\n  match str with\n  | Optional str | Labelled str -> str\n  | Nolabel -> \"\"\n\nlet optional_attrs = [Jsx_common.optional_attr]\n\nlet constant_string ~loc str =\n  Ast_helper.Exp.constant ~loc (Pconst_string (str, None))\n\n(* {} empty record *)\nlet empty_record ~loc = Exp.record ~loc [] None\n\nlet unit_expr ~loc = Exp.construct ~loc (Location.mkloc (Lident \"()\") loc) None\n\nlet safe_type_from_value value_str =\n  let value_str = get_label value_str in\n  if value_str = \"\" || (value_str.[0] [@doesNotRaise]) <> '_' then value_str\n  else \"T\" ^ value_str\n\nlet ref_type_var loc = Typ.var ~loc \"ref\"\n\nlet ref_type loc =\n  Typ.constr ~loc\n    {loc; txt = Ldot (Ldot (Lident \"Js\", \"Nullable\"), \"t\")}\n    [ref_type_var loc]\n\ntype 'a children = ListLiteral of 'a | Exact of 'a\n\n(* if children is a list, convert it to an array while mapping each element. If not, just map over it, as usual *)\nlet transform_children_if_list_upper ~mapper the_list =\n  let rec transformChildren_ the_list accum =\n    (* not in the sense of converting a list to an array; convert the AST\n       reprensentation of a list to the AST reprensentation of an array *)\n    match the_list with\n    | {pexp_desc = Pexp_construct ({txt = Lident \"[]\"}, None)} -> (\n      match accum with\n      | [single_element] -> Exact single_element\n      | accum -> ListLiteral (Exp.array (List.rev accum)))\n    | {\n     pexp_desc =\n       Pexp_construct\n         ({txt = Lident \"::\"}, Some {pexp_desc = Pexp_tuple [v; acc]});\n    } ->\n      transformChildren_ acc (mapper.expr mapper v :: accum)\n    | not_a_list -> Exact (mapper.expr mapper not_a_list)\n  in\n  transformChildren_ the_list []\n\nlet transform_children_if_list ~mapper the_list =\n  let rec transformChildren_ the_list accum =\n    (* not in the sense of converting a list to an array; convert the AST\n       reprensentation of a list to the AST reprensentation of an array *)\n    match the_list with\n    | {pexp_desc = Pexp_construct ({txt = Lident \"[]\"}, None)} ->\n      Exp.array (List.rev accum)\n    | {\n     pexp_desc =\n       Pexp_construct\n         ({txt = Lident \"::\"}, Some {pexp_desc = Pexp_tuple [v; acc]});\n    } ->\n      transformChildren_ acc (mapper.expr mapper v :: accum)\n    | not_a_list -> mapper.expr mapper not_a_list\n  in\n  transformChildren_ the_list []\n\nlet extract_children ?(remove_last_position_unit = false) ~loc\n    props_and_children =\n  let rec allButLast_ lst acc =\n    match lst with\n    | [] -> []\n    | [(Nolabel, {pexp_desc = Pexp_construct ({txt = Lident \"()\"}, None)})] ->\n      acc\n    | (Nolabel, {pexp_loc}) :: _rest ->\n      Jsx_common.raise_error ~loc:pexp_loc\n        \"JSX: found non-labelled argument before the last position\"\n    | arg :: rest -> allButLast_ rest (arg :: acc)\n  in\n  let all_but_last lst = allButLast_ lst [] |> List.rev in\n  match\n    List.partition\n      (fun (label, _) -> label = labelled \"children\")\n      props_and_children\n  with\n  | [], props ->\n    (* no children provided? Place a placeholder list *)\n    ( Exp.construct {loc = Location.none; txt = Lident \"[]\"} None,\n      if remove_last_position_unit then all_but_last props else props )\n  | [(_, children_expr)], props ->\n    ( children_expr,\n      if remove_last_position_unit then all_but_last props else props )\n  | _ ->\n    Jsx_common.raise_error ~loc\n      \"JSX: somehow there's more than one `children` label\"\n\nlet merlin_focus = ({loc = Location.none; txt = \"merlin.focus\"}, PStr [])\n\n(* Helper method to filter out any attribute that isn't [@react.component] *)\nlet other_attrs_pure (loc, _) =\n  match loc.txt with\n  | \"react.component\" | \"jsx.component\" -> false\n  | _ -> true\n\n(* Finds the name of the variable the binding is assigned to, otherwise raises Invalid_argument *)\nlet rec get_fn_name binding =\n  match binding with\n  | {ppat_desc = Ppat_var {txt}} -> txt\n  | {ppat_desc = Ppat_constraint (pat, _)} -> get_fn_name pat\n  | {ppat_loc} ->\n    Jsx_common.raise_error ~loc:ppat_loc\n      \"JSX component calls cannot be destructured.\"\n\nlet make_new_binding binding expression new_name =\n  match binding with\n  | {pvb_pat = {ppat_desc = Ppat_var ppat_var} as pvb_pat} ->\n    {\n      binding with\n      pvb_pat =\n        {pvb_pat with ppat_desc = Ppat_var {ppat_var with txt = new_name}};\n      pvb_expr = expression;\n      pvb_attributes = [merlin_focus];\n    }\n  | {pvb_loc} ->\n    Jsx_common.raise_error ~loc:pvb_loc\n      \"JSX component calls cannot be destructured.\"\n\n(* Lookup the filename from the location information on the AST node and turn it into a valid module identifier *)\nlet filename_from_loc (pstr_loc : Location.t) =\n  let file_name =\n    match pstr_loc.loc_start.pos_fname with\n    | \"\" -> !Location.input_name\n    | file_name -> file_name\n  in\n  let file_name =\n    try Filename.chop_extension (Filename.basename file_name)\n    with Invalid_argument _ -> file_name\n  in\n  let file_name = String.capitalize_ascii file_name in\n  file_name\n\n(* Build a string representation of a module name with segments separated by $ *)\nlet make_module_name file_name nested_modules fn_name =\n  let full_module_name =\n    match (file_name, nested_modules, fn_name) with\n    (* TODO: is this even reachable? It seems like the fileName always exists *)\n    | \"\", nested_modules, \"make\" -> nested_modules\n    | \"\", nested_modules, fn_name -> List.rev (fn_name :: nested_modules)\n    | file_name, nested_modules, \"make\" -> file_name :: List.rev nested_modules\n    | file_name, nested_modules, fn_name ->\n      file_name :: List.rev (fn_name :: nested_modules)\n  in\n  let full_module_name = String.concat \"$\" full_module_name in\n  full_module_name\n\n(*\n  AST node builders\n  These functions help us build AST nodes that are needed when transforming a [@react.component] into a\n  constructor and a props external\n  *)\n\n(* make record from props and spread props if exists *)\nlet record_from_props ~loc ~remove_key call_arguments =\n  let spread_props_label = \"_spreadProps\" in\n  let rec remove_last_position_unit_aux props acc =\n    match props with\n    | [] -> acc\n    | [(Nolabel, {pexp_desc = Pexp_construct ({txt = Lident \"()\"}, None)})] ->\n      acc\n    | (Nolabel, {pexp_loc}) :: _rest ->\n      Jsx_common.raise_error ~loc:pexp_loc\n        \"JSX: found non-labelled argument before the last position\"\n    | ((Labelled txt, {pexp_loc}) as prop) :: rest\n    | ((Optional txt, {pexp_loc}) as prop) :: rest ->\n      if txt = spread_props_label then\n        match acc with\n        | [] -> remove_last_position_unit_aux rest (prop :: acc)\n        | _ ->\n          Jsx_common.raise_error ~loc:pexp_loc\n            \"JSX: use {...p} {x: v} not {x: v} {...p} \\n\\\n            \\     multiple spreads {...p} {...p} not allowed.\"\n      else remove_last_position_unit_aux rest (prop :: acc)\n  in\n  let props, props_to_spread =\n    remove_last_position_unit_aux call_arguments []\n    |> List.rev\n    |> List.partition (fun (label, _) -> label <> labelled \"_spreadProps\")\n  in\n  let props =\n    if remove_key then\n      props |> List.filter (fun (arg_label, _) -> \"key\" <> get_label arg_label)\n    else props\n  in\n\n  let process_prop (arg_label, ({pexp_loc} as pexpr)) =\n    (* In case filed label is \"key\" only then change expression to option *)\n    let id = get_label arg_label in\n    if is_optional arg_label then\n      ( {txt = Lident id; loc = pexp_loc},\n        {pexpr with pexp_attributes = optional_attrs} )\n    else ({txt = Lident id; loc = pexp_loc}, pexpr)\n  in\n  let fields = props |> List.map process_prop in\n  let spread_fields =\n    props_to_spread |> List.map (fun (_, expression) -> expression)\n  in\n  match (fields, spread_fields) with\n  | [], [spread_props] | [], spread_props :: _ -> spread_props\n  | _, [] ->\n    {\n      pexp_desc = Pexp_record (fields, None);\n      pexp_loc = loc;\n      pexp_attributes = [];\n    }\n  | _, [spread_props]\n  (* take the first spreadProps only *)\n  | _, spread_props :: _ ->\n    {\n      pexp_desc = Pexp_record (fields, Some spread_props);\n      pexp_loc = loc;\n      pexp_attributes = [];\n    }\n\n(* make type params for make fn arguments *)\n(* let make = ({id, name, children}: props<'id, 'name, 'children>) *)\nlet make_props_type_params_tvar named_type_list =\n  named_type_list\n  |> List.filter_map (fun (_isOptional, label, _, loc, _interiorType) ->\n         if label = \"key\" then None\n         else Some (Typ.var ~loc @@ safe_type_from_value (Labelled label)))\n\nlet strip_option core_type =\n  match core_type with\n  | {ptyp_desc = Ptyp_constr ({txt = Lident \"option\"}, core_types)} ->\n    List.nth_opt core_types 0 [@doesNotRaise]\n  | _ -> Some core_type\n\nlet strip_js_nullable core_type =\n  match core_type with\n  | {\n   ptyp_desc =\n     Ptyp_constr ({txt = Ldot (Ldot (Lident \"Js\", \"Nullable\"), \"t\")}, core_types);\n  } ->\n    List.nth_opt core_types 0 [@doesNotRaise]\n  | _ -> Some core_type\n\n(* Make type params of the props type *)\n(* (Sig) let make: React.componentLike<props<string>, React.element> *)\n(* (Str) let make = ({x, _}: props<'x>) => body *)\n(* (Str) external make: React.componentLike<props< .. >, React.element> = \"default\" *)\nlet make_props_type_params ?(strip_explicit_option = false)\n    ?(strip_explicit_js_nullable_of_ref = false) named_type_list =\n  named_type_list\n  |> List.filter_map (fun (is_optional, label, _, loc, interior_type) ->\n         if label = \"key\" then None\n           (* TODO: Worth thinking how about \"ref_\" or \"_ref\" usages *)\n         else if label = \"ref\" then\n           (*\n                If ref has a type annotation then use it, else 'ref.\n                For example, if JSX ppx is used for React Native, type would be different.\n             *)\n           match interior_type with\n           | {ptyp_desc = Ptyp_any} -> Some (ref_type_var loc)\n           | _ ->\n             (* Strip explicit Js.Nullable.t in case of forwardRef *)\n             if strip_explicit_js_nullable_of_ref then\n               strip_js_nullable interior_type\n             else Some interior_type\n           (* Strip the explicit option type in implementation *)\n           (* let make = (~x: option<string>=?) => ... *)\n         else if is_optional && strip_explicit_option then\n           strip_option interior_type\n         else Some interior_type)\n\nlet make_label_decls named_type_list =\n  let rec check_duplicated_label l =\n    let rec mem_label ((_, (la : string), _, _, _) as x) = function\n      | [] -> false\n      | (_, (lb : string), _, _, _) :: l -> lb = la || mem_label x l\n    in\n    match l with\n    | [] -> ()\n    | hd :: tl ->\n      if mem_label hd tl then\n        let _, label, _, loc, _ = hd in\n        Jsx_common.raise_error ~loc \"JSX: found the duplicated prop `%s`\" label\n      else check_duplicated_label tl\n  in\n  let () = named_type_list |> List.rev |> check_duplicated_label in\n\n  named_type_list\n  |> List.map (fun (is_optional, label, attrs, loc, interior_type) ->\n         if label = \"key\" then\n           Type.field ~loc ~attrs:(optional_attrs @ attrs) {txt = label; loc}\n             interior_type\n         else if is_optional then\n           Type.field ~loc ~attrs:(optional_attrs @ attrs) {txt = label; loc}\n             (Typ.var @@ safe_type_from_value @@ Labelled label)\n         else\n           Type.field ~loc ~attrs {txt = label; loc}\n             (Typ.var @@ safe_type_from_value @@ Labelled label))\n\nlet make_type_decls props_name loc named_type_list =\n  let label_decl_list = make_label_decls named_type_list in\n  (* 'id, 'className, ... *)\n  let params =\n    make_props_type_params_tvar named_type_list\n    |> List.map (fun core_type -> (core_type, Invariant))\n  in\n  [\n    Type.mk ~loc ~params {txt = props_name; loc}\n      ~kind:(Ptype_record label_decl_list);\n  ]\n\nlet make_type_decls_with_core_type props_name loc core_type typ_vars =\n  [\n    Type.mk ~loc {txt = props_name; loc} ~kind:Ptype_abstract\n      ~params:(typ_vars |> List.map (fun v -> (v, Invariant)))\n      ~manifest:core_type;\n  ]\n\n(* type props<'x, 'y, ...> = { x: 'x, y?: 'y, ... } *)\nlet make_props_record_type ~core_type_of_attr ~typ_vars_of_core_type props_name\n    loc named_type_list =\n  Str.type_ Nonrecursive\n    (match core_type_of_attr with\n    | None -> make_type_decls props_name loc named_type_list\n    | Some core_type ->\n      make_type_decls_with_core_type props_name loc core_type\n        typ_vars_of_core_type)\n\n(* type props<'x, 'y, ...> = { x: 'x, y?: 'y, ... } *)\nlet make_props_record_type_sig ~core_type_of_attr ~typ_vars_of_core_type\n    props_name loc named_type_list =\n  Sig.type_ Nonrecursive\n    (match core_type_of_attr with\n    | None -> make_type_decls props_name loc named_type_list\n    | Some core_type ->\n      make_type_decls_with_core_type props_name loc core_type\n        typ_vars_of_core_type)\n\nlet transform_uppercase_call3 ~config module_path mapper jsx_expr_loc\n    call_expr_loc attrs call_arguments =\n  let children, args_with_labels =\n    extract_children ~remove_last_position_unit:true ~loc:jsx_expr_loc\n      call_arguments\n  in\n  let args_for_make = args_with_labels in\n  let children_expr = transform_children_if_list_upper ~mapper children in\n  let recursively_transformed_args_for_make =\n    args_for_make\n    |> List.map (fun (label, expression) ->\n           (label, mapper.expr mapper expression))\n  in\n  let children_arg = ref None in\n  let args =\n    recursively_transformed_args_for_make\n    @\n    match children_expr with\n    | Exact children -> [(labelled \"children\", children)]\n    | ListLiteral {pexp_desc = Pexp_array list} when list = [] -> []\n    | ListLiteral expression -> (\n      (* this is a hack to support react components that introspect into their children *)\n      children_arg := Some expression;\n      match config.Jsx_common.mode with\n      | \"automatic\" ->\n        [\n          ( labelled \"children\",\n            Exp.apply\n              (Exp.ident\n                 {txt = module_access_name config \"array\"; loc = Location.none})\n              [(Nolabel, expression)] );\n        ]\n      | _ ->\n        [\n          ( labelled \"children\",\n            Exp.ident {loc = Location.none; txt = Ldot (Lident \"React\", \"null\")}\n          );\n        ])\n  in\n\n  let is_cap str = String.capitalize_ascii str = str in\n  let ident ~suffix =\n    match module_path with\n    | Lident _ -> Ldot (module_path, suffix)\n    | Ldot (_modulePath, value) as full_path when is_cap value ->\n      Ldot (full_path, suffix)\n    | module_path -> module_path\n  in\n  let is_empty_record {pexp_desc} =\n    match pexp_desc with\n    | Pexp_record (label_decls, _) when List.length label_decls = 0 -> true\n    | _ -> false\n  in\n\n  (* handle key, ref, children *)\n  (* React.createElement(Component.make, props, ...children) *)\n  let record = record_from_props ~loc:jsx_expr_loc ~remove_key:true args in\n  let props =\n    if is_empty_record record then empty_record ~loc:jsx_expr_loc else record\n  in\n  let key_prop =\n    args |> List.filter (fun (arg_label, _) -> \"key\" = get_label arg_label)\n  in\n  let make_i_d =\n    Exp.ident ~loc:call_expr_loc\n      {txt = ident ~suffix:\"make\"; loc = call_expr_loc}\n  in\n  match config.mode with\n  (* The new jsx transform *)\n  | \"automatic\" ->\n    let jsx_expr, key_and_unit =\n      match (!children_arg, key_prop) with\n      | None, key :: _ ->\n        ( Exp.ident\n            {loc = Location.none; txt = module_access_name config \"jsxKeyed\"},\n          [key; (nolabel, unit_expr ~loc:Location.none)] )\n      | None, [] ->\n        ( Exp.ident {loc = Location.none; txt = module_access_name config \"jsx\"},\n          [] )\n      | Some _, key :: _ ->\n        ( Exp.ident\n            {loc = Location.none; txt = module_access_name config \"jsxsKeyed\"},\n          [key; (nolabel, unit_expr ~loc:Location.none)] )\n      | Some _, [] ->\n        ( Exp.ident {loc = Location.none; txt = module_access_name config \"jsxs\"},\n          [] )\n    in\n    Exp.apply ~loc:jsx_expr_loc ~attrs jsx_expr\n      ([(nolabel, make_i_d); (nolabel, props)] @ key_and_unit)\n  | _ -> (\n    match (!children_arg, key_prop) with\n    | None, key :: _ ->\n      Exp.apply ~loc:jsx_expr_loc ~attrs\n        (Exp.ident\n           {\n             loc = Location.none;\n             txt = Ldot (Lident \"JsxPPXReactSupport\", \"createElementWithKey\");\n           })\n        [key; (nolabel, make_i_d); (nolabel, props)]\n    | None, [] ->\n      Exp.apply ~loc:jsx_expr_loc ~attrs\n        (Exp.ident\n           {loc = Location.none; txt = Ldot (Lident \"React\", \"createElement\")})\n        [(nolabel, make_i_d); (nolabel, props)]\n    | Some children, key :: _ ->\n      Exp.apply ~loc:jsx_expr_loc ~attrs\n        (Exp.ident\n           {\n             loc = Location.none;\n             txt =\n               Ldot (Lident \"JsxPPXReactSupport\", \"createElementVariadicWithKey\");\n           })\n        [key; (nolabel, make_i_d); (nolabel, props); (nolabel, children)]\n    | Some children, [] ->\n      Exp.apply ~loc:jsx_expr_loc ~attrs\n        (Exp.ident\n           {\n             loc = Location.none;\n             txt = Ldot (Lident \"React\", \"createElementVariadic\");\n           })\n        [(nolabel, make_i_d); (nolabel, props); (nolabel, children)])\n\nlet transform_lowercase_call3 ~config mapper jsx_expr_loc call_expr_loc attrs\n    call_arguments id =\n  let component_name_expr = constant_string ~loc:call_expr_loc id in\n  match config.Jsx_common.mode with\n  (* the new jsx transform *)\n  | \"automatic\" ->\n    let element_binding =\n      match config.module_ |> String.lowercase_ascii with\n      | \"react\" -> Lident \"ReactDOM\"\n      | _generic -> module_access_name config \"Elements\"\n    in\n\n    let children, non_children_props =\n      extract_children ~remove_last_position_unit:true ~loc:jsx_expr_loc\n        call_arguments\n    in\n    let args_for_make = non_children_props in\n    let children_expr = transform_children_if_list_upper ~mapper children in\n    let recursively_transformed_args_for_make =\n      args_for_make\n      |> List.map (fun (label, expression) ->\n             (label, mapper.expr mapper expression))\n    in\n    let children_arg = ref None in\n    let args =\n      recursively_transformed_args_for_make\n      @\n      match children_expr with\n      | Exact children ->\n        [\n          ( labelled \"children\",\n            Exp.apply ~attrs:optional_attrs\n              (Exp.ident\n                 {\n                   txt = Ldot (element_binding, \"someElement\");\n                   loc = Location.none;\n                 })\n              [(Nolabel, children)] );\n        ]\n      | ListLiteral {pexp_desc = Pexp_array list} when list = [] -> []\n      | ListLiteral expression ->\n        (* this is a hack to support react components that introspect into their children *)\n        children_arg := Some expression;\n        [\n          ( labelled \"children\",\n            Exp.apply\n              (Exp.ident\n                 {txt = module_access_name config \"array\"; loc = Location.none})\n              [(Nolabel, expression)] );\n        ]\n    in\n    let is_empty_record {pexp_desc} =\n      match pexp_desc with\n      | Pexp_record (label_decls, _) when List.length label_decls = 0 -> true\n      | _ -> false\n    in\n    let record = record_from_props ~loc:jsx_expr_loc ~remove_key:true args in\n    let props =\n      if is_empty_record record then empty_record ~loc:jsx_expr_loc else record\n    in\n    let key_prop =\n      args |> List.filter (fun (arg_label, _) -> \"key\" = get_label arg_label)\n    in\n    let jsx_expr, key_and_unit =\n      match (!children_arg, key_prop) with\n      | None, key :: _ ->\n        ( Exp.ident\n            {loc = Location.none; txt = Ldot (element_binding, \"jsxKeyed\")},\n          [key; (nolabel, unit_expr ~loc:Location.none)] )\n      | None, [] ->\n        ( Exp.ident {loc = Location.none; txt = Ldot (element_binding, \"jsx\")},\n          [] )\n      | Some _, key :: _ ->\n        ( Exp.ident\n            {loc = Location.none; txt = Ldot (element_binding, \"jsxsKeyed\")},\n          [key; (nolabel, unit_expr ~loc:Location.none)] )\n      | Some _, [] ->\n        ( Exp.ident {loc = Location.none; txt = Ldot (element_binding, \"jsxs\")},\n          [] )\n    in\n    Exp.apply ~loc:jsx_expr_loc ~attrs jsx_expr\n      ([(nolabel, component_name_expr); (nolabel, props)] @ key_and_unit)\n  | _ ->\n    let children, non_children_props =\n      extract_children ~loc:jsx_expr_loc call_arguments\n    in\n    let children_expr = transform_children_if_list ~mapper children in\n    let create_element_call =\n      match children with\n      (* [@JSX] div(~children=[a]), coming from <div> a </div> *)\n      | {\n       pexp_desc =\n         ( Pexp_construct ({txt = Lident \"::\"}, Some {pexp_desc = Pexp_tuple _})\n         | Pexp_construct ({txt = Lident \"[]\"}, None) );\n      } ->\n        \"createDOMElementVariadic\"\n      (* [@JSX] div(~children= value), coming from <div> ...(value) </div> *)\n      | {pexp_loc} ->\n        Jsx_common.raise_error ~loc:pexp_loc\n          \"A spread as a DOM element's children don't make sense written \\\n           together. You can simply remove the spread.\"\n    in\n    let args =\n      match non_children_props with\n      | [_justTheUnitArgumentAtEnd] ->\n        [\n          (* \"div\" *)\n          (nolabel, component_name_expr);\n          (* [|moreCreateElementCallsHere|] *)\n          (nolabel, children_expr);\n        ]\n      | non_empty_props ->\n        let props_record =\n          record_from_props ~loc:Location.none ~remove_key:false non_empty_props\n        in\n        [\n          (* \"div\" *)\n          (nolabel, component_name_expr);\n          (* ReactDOM.domProps(~className=blabla, ~foo=bar, ()) *)\n          (labelled \"props\", props_record);\n          (* [|moreCreateElementCallsHere|] *)\n          (nolabel, children_expr);\n        ]\n    in\n    Exp.apply ~loc:jsx_expr_loc ~attrs\n      (* ReactDOM.createElement *)\n      (Exp.ident\n         {\n           loc = Location.none;\n           txt = Ldot (Lident \"ReactDOM\", create_element_call);\n         })\n      args\n\nlet rec recursively_transform_named_args_for_make expr args newtypes core_type =\n  match expr.pexp_desc with\n  (* TODO: make this show up with a loc. *)\n  | Pexp_fun (Labelled \"key\", _, _, _) | Pexp_fun (Optional \"key\", _, _, _) ->\n    Jsx_common.raise_error ~loc:expr.pexp_loc\n      \"Key cannot be accessed inside of a component. Don't worry - you can \\\n       always key a component from its parent!\"\n  | Pexp_fun (Labelled \"ref\", _, _, _) | Pexp_fun (Optional \"ref\", _, _, _) ->\n    Jsx_common.raise_error ~loc:expr.pexp_loc\n      \"Ref cannot be passed as a normal prop. Please use `forwardRef` API \\\n       instead.\"\n  | Pexp_fun (arg, default, pattern, expression)\n    when is_optional arg || is_labelled arg ->\n    let () =\n      match (is_optional arg, pattern, default) with\n      | true, {ppat_desc = Ppat_constraint (_, {ptyp_desc})}, None -> (\n        match ptyp_desc with\n        | Ptyp_constr ({txt = Lident \"option\"}, [_]) -> ()\n        | _ ->\n          let current_type =\n            match ptyp_desc with\n            | Ptyp_constr ({txt}, []) ->\n              String.concat \".\" (Longident.flatten txt)\n            | Ptyp_constr ({txt}, _innerTypeArgs) ->\n              String.concat \".\" (Longident.flatten txt) ^ \"(...)\"\n            | _ -> \"...\"\n          in\n          Location.prerr_warning pattern.ppat_loc\n            (Preprocessor\n               (Printf.sprintf\n                  \"React: optional argument annotations must have explicit \\\n                   `option`. Did you mean `option<%s>=?`?\"\n                  current_type)))\n      | _ -> ()\n    in\n    let alias =\n      match pattern with\n      | {\n       ppat_desc =\n         ( Ppat_alias (_, {txt})\n         | Ppat_var {txt}\n         | Ppat_constraint ({ppat_desc = Ppat_var {txt}}, _) );\n      } ->\n        txt\n      | {ppat_desc = Ppat_any} -> \"_\"\n      | _ -> get_label arg\n    in\n    let type_ =\n      match pattern with\n      | {ppat_desc = Ppat_constraint (_, {ptyp_desc = Ptyp_package _})} -> None\n      | {ppat_desc = Ppat_constraint (_, type_)} -> Some type_\n      | _ -> None\n    in\n\n    recursively_transform_named_args_for_make expression\n      ((arg, default, pattern, alias, pattern.ppat_loc, type_) :: args)\n      newtypes core_type\n  | Pexp_fun\n      ( Nolabel,\n        _,\n        {ppat_desc = Ppat_construct ({txt = Lident \"()\"}, _) | Ppat_any},\n        _expression ) ->\n    (args, newtypes, core_type)\n  | Pexp_fun\n      ( Nolabel,\n        _,\n        ({\n           ppat_desc =\n             Ppat_var {txt} | Ppat_constraint ({ppat_desc = Ppat_var {txt}}, _);\n         } as pattern),\n        _expression ) ->\n    if txt = \"ref\" then\n      let type_ =\n        match pattern with\n        | {ppat_desc = Ppat_constraint (_, type_)} -> Some type_\n        | _ -> None\n      in\n      (* The ref arguement of forwardRef should be optional *)\n      ( (Optional \"ref\", None, pattern, txt, pattern.ppat_loc, type_) :: args,\n        newtypes,\n        core_type )\n    else (args, newtypes, core_type)\n  | Pexp_fun (Nolabel, _, pattern, _expression) ->\n    Location.raise_errorf ~loc:pattern.ppat_loc\n      \"React: react.component refs only support plain arguments and type \\\n       annotations.\"\n  | Pexp_newtype (label, expression) ->\n    recursively_transform_named_args_for_make expression args\n      (label :: newtypes) core_type\n  | Pexp_constraint (expression, core_type) ->\n    recursively_transform_named_args_for_make expression args newtypes\n      (Some core_type)\n  | _ -> (args, newtypes, core_type)\n\nlet arg_to_type types\n    ((name, default, {ppat_attributes = attrs}, _alias, loc, type_) :\n      arg_label * expression option * pattern * label * 'loc * core_type option)\n    =\n  match (type_, name, default) with\n  | Some type_, name, _ when is_optional name ->\n    (true, get_label name, attrs, loc, type_) :: types\n  | Some type_, name, _ -> (false, get_label name, attrs, loc, type_) :: types\n  | None, name, _ when is_optional name ->\n    (true, get_label name, attrs, loc, Typ.any ~loc ()) :: types\n  | None, name, _ when is_labelled name ->\n    (false, get_label name, attrs, loc, Typ.any ~loc ()) :: types\n  | _ -> types\n\nlet has_default_value name_arg_list =\n  name_arg_list\n  |> List.exists (fun (name, default, _, _, _, _) ->\n         Option.is_some default && is_optional name)\n\nlet arg_to_concrete_type types (name, attrs, loc, type_) =\n  match name with\n  | name when is_labelled name ->\n    (false, get_label name, attrs, loc, type_) :: types\n  | name when is_optional name ->\n    (true, get_label name, attrs, loc, type_) :: types\n  | _ -> types\n\nlet check_string_int_attribute_iter =\n  let attribute _ ({txt; loc}, _) =\n    if txt = \"string\" || txt = \"int\" then\n      Jsx_common.raise_error ~loc\n        \"@string and @int attributes not supported. See \\\n         https://github.com/rescript-lang/rescript-compiler/issues/5724\"\n  in\n\n  {Ast_iterator.default_iterator with attribute}\n\nlet check_multiple_components ~config ~loc =\n  (* If there is another component, throw error *)\n  if config.Jsx_common.has_component then\n    Jsx_common.raise_error_multiple_component ~loc\n  else config.has_component <- true\n\nlet modified_binding_old binding =\n  let expression = binding.pvb_expr in\n  (* TODO: there is a long-tail of unsupported features inside of blocks - Pexp_letmodule , Pexp_letexception , Pexp_ifthenelse *)\n  let rec spelunk_for_fun_expression expression =\n    match expression with\n    (* let make = (~prop) => ... *)\n    | {pexp_desc = Pexp_fun _} | {pexp_desc = Pexp_newtype _} -> expression\n    (* let make = {let foo = bar in (~prop) => ...} *)\n    | {pexp_desc = Pexp_let (_recursive, _vbs, return_expression)} ->\n      (* here's where we spelunk! *)\n      spelunk_for_fun_expression return_expression\n    (* let make = React.forwardRef((~prop) => ...) *)\n    | {\n     pexp_desc =\n       Pexp_apply (_wrapperExpression, [(Nolabel, inner_function_expression)]);\n    } ->\n      spelunk_for_fun_expression inner_function_expression\n    | {\n     pexp_desc = Pexp_sequence (_wrapperExpression, inner_function_expression);\n    } ->\n      spelunk_for_fun_expression inner_function_expression\n    | {pexp_desc = Pexp_constraint (inner_function_expression, _typ)} ->\n      spelunk_for_fun_expression inner_function_expression\n    | {pexp_loc} ->\n      Jsx_common.raise_error ~loc:pexp_loc\n        \"JSX component calls can only be on function definitions or component \\\n         wrappers (forwardRef, memo).\"\n  in\n  spelunk_for_fun_expression expression\n\nlet modified_binding ~binding_loc ~binding_pat_loc ~fn_name binding =\n  let has_application = ref false in\n  let wrap_expression_with_binding expression_fn expression =\n    Vb.mk ~loc:binding_loc ~attrs:binding.pvb_attributes\n      (Pat.var ~loc:binding_pat_loc {loc = binding_pat_loc; txt = fn_name})\n      (expression_fn expression)\n  in\n  let expression = binding.pvb_expr in\n  (* TODO: there is a long-tail of unsupported features inside of blocks - Pexp_letmodule , Pexp_letexception , Pexp_ifthenelse *)\n  let rec spelunk_for_fun_expression expression =\n    match expression with\n    (* let make = (~prop) => ... with no final unit *)\n    | {\n     pexp_desc =\n       Pexp_fun\n         ( ((Labelled _ | Optional _) as label),\n           default,\n           pattern,\n           ({pexp_desc = Pexp_fun _} as internal_expression) );\n    } ->\n      let wrap, has_forward_ref, exp =\n        spelunk_for_fun_expression internal_expression\n      in\n      ( wrap,\n        has_forward_ref,\n        {expression with pexp_desc = Pexp_fun (label, default, pattern, exp)} )\n    (* let make = (()) => ... *)\n    (* let make = (_) => ... *)\n    | {\n     pexp_desc =\n       Pexp_fun\n         ( Nolabel,\n           _default,\n           {ppat_desc = Ppat_construct ({txt = Lident \"()\"}, _) | Ppat_any},\n           _internalExpression );\n    } ->\n      ((fun a -> a), false, expression)\n    (* let make = (~prop) => ... *)\n    | {\n     pexp_desc =\n       Pexp_fun\n         ((Labelled _ | Optional _), _default, _pattern, _internalExpression);\n    } ->\n      ((fun a -> a), false, expression)\n    (* let make = (prop) => ... *)\n    | {pexp_desc = Pexp_fun (_nolabel, _default, pattern, _internalExpression)}\n      ->\n      if !has_application then ((fun a -> a), false, expression)\n      else\n        Location.raise_errorf ~loc:pattern.ppat_loc\n          \"React: props need to be labelled arguments.\\n\\\n          \\  If you are working with refs be sure to wrap with React.forwardRef.\\n\\\n          \\  If your component doesn't have any props use () or _ instead of a \\\n           name.\"\n    (* let make = {let foo = bar in (~prop) => ...} *)\n    | {pexp_desc = Pexp_let (recursive, vbs, internal_expression)} ->\n      (* here's where we spelunk! *)\n      let wrap, has_forward_ref, exp =\n        spelunk_for_fun_expression internal_expression\n      in\n      ( wrap,\n        has_forward_ref,\n        {expression with pexp_desc = Pexp_let (recursive, vbs, exp)} )\n    (* let make = React.forwardRef((~prop) => ...) *)\n    | {\n     pexp_desc =\n       Pexp_apply (wrapper_expression, [(Nolabel, internal_expression)]);\n    } ->\n      let () = has_application := true in\n      let _, _, exp = spelunk_for_fun_expression internal_expression in\n      let has_forward_ref = is_forward_ref wrapper_expression in\n      ( (fun exp -> Exp.apply wrapper_expression [(nolabel, exp)]),\n        has_forward_ref,\n        exp )\n    | {pexp_desc = Pexp_sequence (wrapper_expression, internal_expression)} ->\n      let wrap, has_forward_ref, exp =\n        spelunk_for_fun_expression internal_expression\n      in\n      ( wrap,\n        has_forward_ref,\n        {expression with pexp_desc = Pexp_sequence (wrapper_expression, exp)} )\n    | e -> ((fun a -> a), false, e)\n  in\n  let wrap_expression, has_forward_ref, expression =\n    spelunk_for_fun_expression expression\n  in\n  (wrap_expression_with_binding wrap_expression, has_forward_ref, expression)\n\nlet vb_match ~expr (name, default, _, alias, loc, _) =\n  let label = get_label name in\n  match default with\n  | Some default ->\n    let value_binding =\n      Vb.mk\n        (Pat.var (Location.mkloc alias loc))\n        (Exp.match_\n           (Exp.ident {txt = Lident (\"__\" ^ alias); loc = Location.none})\n           [\n             Exp.case\n               (Pat.construct\n                  (Location.mknoloc @@ Lident \"Some\")\n                  (Some (Pat.var (Location.mknoloc label))))\n               (Exp.ident (Location.mknoloc @@ Lident label));\n             Exp.case\n               (Pat.construct (Location.mknoloc @@ Lident \"None\") None)\n               default;\n           ])\n    in\n    Exp.let_ Nonrecursive [value_binding] expr\n  | None -> expr\n\nlet vb_match_expr named_arg_list expr =\n  let rec aux named_arg_list =\n    match named_arg_list with\n    | [] -> expr\n    | named_arg :: rest -> vb_match named_arg ~expr:(aux rest)\n  in\n  aux (List.rev named_arg_list)\n\nlet map_binding ~config ~empty_loc ~pstr_loc ~file_name ~rec_flag binding =\n  if Jsx_common.has_attr_on_binding binding then (\n    check_multiple_components ~config ~loc:pstr_loc;\n    let binding = Jsx_common.remove_arity binding in\n    let core_type_of_attr =\n      Jsx_common.core_type_of_attrs binding.pvb_attributes\n    in\n    let typ_vars_of_core_type =\n      core_type_of_attr\n      |> Option.map Jsx_common.typ_vars_of_core_type\n      |> Option.value ~default:[]\n    in\n    let binding_loc = binding.pvb_loc in\n    let binding_pat_loc = binding.pvb_pat.ppat_loc in\n    let binding =\n      {\n        binding with\n        pvb_pat = {binding.pvb_pat with ppat_loc = empty_loc};\n        pvb_loc = empty_loc;\n        pvb_attributes = binding.pvb_attributes |> List.filter other_attrs_pure;\n      }\n    in\n    let fn_name = get_fn_name binding.pvb_pat in\n    let internal_fn_name = fn_name ^ \"$Internal\" in\n    let full_module_name =\n      make_module_name file_name config.nested_modules fn_name\n    in\n    let binding_wrapper, has_forward_ref, expression =\n      modified_binding ~binding_loc ~binding_pat_loc ~fn_name binding\n    in\n    let is_async =\n      Ext_list.find_first binding.pvb_expr.pexp_attributes Ast_async.is_async\n      |> Option.is_some\n    in\n    (* do stuff here! *)\n    let named_arg_list, newtypes, _typeConstraints =\n      recursively_transform_named_args_for_make\n        (modified_binding_old binding)\n        [] [] None\n    in\n    let named_type_list = List.fold_left arg_to_type [] named_arg_list in\n    (* type props = { ... } *)\n    let props_record_type =\n      make_props_record_type ~core_type_of_attr ~typ_vars_of_core_type \"props\"\n        pstr_loc named_type_list\n    in\n    let inner_expression =\n      Exp.apply\n        (Exp.ident\n           (Location.mknoloc\n           @@ Lident\n                (match rec_flag with\n                | Recursive -> internal_fn_name\n                | Nonrecursive -> fn_name)))\n        ([(Nolabel, Exp.ident (Location.mknoloc @@ Lident \"props\"))]\n        @\n        match has_forward_ref with\n        | true -> [(Nolabel, Exp.ident (Location.mknoloc @@ Lident \"ref\"))]\n        | false -> [])\n    in\n    let make_props_pattern = function\n      | [] -> Pat.var @@ Location.mknoloc \"props\"\n      | _ ->\n        Pat.constraint_\n          (Pat.var @@ Location.mknoloc \"props\")\n          (Typ.constr (Location.mknoloc @@ Lident \"props\") [Typ.any ()])\n    in\n    let inner_expression =\n      Jsx_common.async_component ~async:is_async inner_expression\n    in\n    let full_expression =\n      (* React component name should start with uppercase letter *)\n      (* let make = { let \\\"App\" = props => make(props); \\\"App\" } *)\n      (* let make = React.forwardRef({\n           let \\\"App\" = (props, ref) => make({...props, ref: @optional (Js.Nullabel.toOption(ref))})\n         })*)\n      Exp.fun_ nolabel None\n        (match core_type_of_attr with\n        | None -> make_props_pattern named_type_list\n        | Some _ -> make_props_pattern typ_vars_of_core_type)\n        (if has_forward_ref then\n           Exp.fun_ nolabel None\n             (Pat.var @@ Location.mknoloc \"ref\")\n             inner_expression\n         else inner_expression)\n    in\n    let full_expression =\n      if !Config.uncurried = Uncurried then\n        full_expression\n        |> Ast_uncurried.uncurried_fun ~loc:full_expression.pexp_loc\n             ~arity:(if has_forward_ref then 2 else 1)\n      else full_expression\n    in\n    let full_expression =\n      match full_module_name with\n      | \"\" -> full_expression\n      | txt ->\n        Exp.let_ Nonrecursive\n          [\n            Vb.mk ~loc:empty_loc\n              (Pat.var ~loc:empty_loc {loc = empty_loc; txt})\n              full_expression;\n          ]\n          (Exp.ident ~loc:pstr_loc {loc = empty_loc; txt = Lident txt})\n    in\n    let rec strip_constraint_unpack ~label pattern =\n      match pattern with\n      | {ppat_desc = Ppat_constraint (_, {ptyp_desc = Ptyp_package _})} ->\n        pattern\n      | {ppat_desc = Ppat_constraint (pattern, _)} ->\n        strip_constraint_unpack ~label pattern\n      | _ -> pattern\n    in\n    let safe_pattern_label pattern =\n      match pattern with\n      | {ppat_desc = Ppat_var {txt; loc}} ->\n        {pattern with ppat_desc = Ppat_var {txt = \"__\" ^ txt; loc}}\n      | {ppat_desc = Ppat_alias (p, {txt; loc})} ->\n        {pattern with ppat_desc = Ppat_alias (p, {txt = \"__\" ^ txt; loc})}\n      | _ -> pattern\n    in\n    let rec returned_expression patterns_with_label patterns_with_nolabel\n        ({pexp_desc} as expr) =\n      match pexp_desc with\n      | Pexp_newtype (_, expr) ->\n        returned_expression patterns_with_label patterns_with_nolabel expr\n      | Pexp_constraint (expr, _) ->\n        returned_expression patterns_with_label patterns_with_nolabel expr\n      | Pexp_fun\n          ( _arg_label,\n            _default,\n            {ppat_desc = Ppat_construct ({txt = Lident \"()\"}, _)},\n            expr ) ->\n        (patterns_with_label, patterns_with_nolabel, expr)\n      | Pexp_fun (arg_label, default, ({ppat_loc; ppat_desc} as pattern), expr)\n        -> (\n        let pattern_without_constraint =\n          strip_constraint_unpack ~label:(get_label arg_label) pattern\n        in\n        (*\n           If prop has the default value as Ident, it will get a build error\n           when the referenced Ident value and the prop have the same name.\n           So we add a \"__\" to label to resolve the build error.\n        *)\n        let pattern_with_safe_label =\n          match default with\n          | Some _ -> safe_pattern_label pattern_without_constraint\n          | _ -> pattern_without_constraint\n        in\n        if is_labelled arg_label || is_optional arg_label then\n          returned_expression\n            (( {loc = ppat_loc; txt = Lident (get_label arg_label)},\n               {\n                 pattern_with_safe_label with\n                 ppat_attributes =\n                   (if is_optional arg_label then optional_attrs else [])\n                   @ pattern.ppat_attributes;\n               } )\n            :: patterns_with_label)\n            patterns_with_nolabel expr\n        else\n          (* Special case of nolabel arg \"ref\" in forwardRef fn *)\n          (* let make = React.forwardRef(ref => body) *)\n          match ppat_desc with\n          | Ppat_var {txt} | Ppat_constraint ({ppat_desc = Ppat_var {txt}}, _)\n            ->\n            returned_expression patterns_with_label\n              (( {loc = ppat_loc; txt = Lident txt},\n                 {\n                   pattern with\n                   ppat_attributes = optional_attrs @ pattern.ppat_attributes;\n                 } )\n              :: patterns_with_nolabel)\n              expr\n          | _ ->\n            returned_expression patterns_with_label patterns_with_nolabel expr)\n      | _ -> (patterns_with_label, patterns_with_nolabel, expr)\n    in\n    let patterns_with_label, patterns_with_nolabel, expression =\n      returned_expression [] [] expression\n    in\n    (* add pattern matching for optional prop value *)\n    let expression =\n      if has_default_value named_arg_list then\n        vb_match_expr named_arg_list expression\n      else expression\n    in\n    (* (ref) => expr *)\n    let expression =\n      List.fold_left\n        (fun expr (_, pattern) ->\n          let pattern =\n            match pattern.ppat_desc with\n            | Ppat_var {txt} when txt = \"ref\" ->\n              Pat.constraint_ pattern (ref_type Location.none)\n            | _ -> pattern\n          in\n          Exp.fun_ Nolabel None pattern expr)\n        expression patterns_with_nolabel\n    in\n    (* ({a, b, _}: props<'a, 'b>) *)\n    let record_pattern =\n      match patterns_with_label with\n      | [] -> Pat.any ()\n      | _ -> Pat.record (List.rev patterns_with_label) Open\n    in\n    let expression =\n      Exp.fun_ Nolabel None\n        (Pat.constraint_ record_pattern\n           (Typ.constr ~loc:empty_loc\n              {txt = Lident \"props\"; loc = empty_loc}\n              (match core_type_of_attr with\n              | None ->\n                make_props_type_params ~strip_explicit_option:true\n                  ~strip_explicit_js_nullable_of_ref:has_forward_ref\n                  named_type_list\n              | Some _ -> (\n                match typ_vars_of_core_type with\n                | [] -> []\n                | _ -> [Typ.any ()]))))\n        expression\n    in\n    let expression = Ast_async.add_async_attribute ~async:is_async expression in\n    let expression =\n      (* Add new tupes (type a,b,c) to make's definition *)\n      newtypes\n      |> List.fold_left (fun e newtype -> Exp.newtype newtype e) expression\n    in\n    (* let make = ({id, name, ...}: props<'id, 'name, ...>) => { ... } *)\n    let binding, new_binding =\n      match rec_flag with\n      | Recursive ->\n        ( binding_wrapper\n            (Exp.let_ ~loc:empty_loc Nonrecursive\n               [make_new_binding binding expression internal_fn_name]\n               (Exp.let_ ~loc:empty_loc Nonrecursive\n                  [\n                    Vb.mk\n                      (Pat.var {loc = empty_loc; txt = fn_name})\n                      full_expression;\n                  ]\n                  (Exp.ident {loc = empty_loc; txt = Lident fn_name}))),\n          None )\n      | Nonrecursive ->\n        ( {\n            binding with\n            pvb_expr = expression;\n            pvb_pat = Pat.var {txt = fn_name; loc = Location.none};\n          },\n          Some (binding_wrapper full_expression) )\n    in\n    (Some props_record_type, binding, new_binding))\n  else (None, binding, None)\n\nlet transform_structure_item ~config item =\n  match item with\n  (* external *)\n  | {\n      pstr_loc;\n      pstr_desc =\n        Pstr_primitive ({pval_attributes; pval_type} as value_description);\n    } as pstr -> (\n    match List.filter Jsx_common.has_attr pval_attributes with\n    | [] -> [item]\n    | [_] ->\n      check_multiple_components ~config ~loc:pstr_loc;\n      check_string_int_attribute_iter.structure_item\n        check_string_int_attribute_iter item;\n      let pval_type = Jsx_common.extract_uncurried pval_type in\n      let core_type_of_attr = Jsx_common.core_type_of_attrs pval_attributes in\n      let typ_vars_of_core_type =\n        core_type_of_attr\n        |> Option.map Jsx_common.typ_vars_of_core_type\n        |> Option.value ~default:[]\n      in\n      let rec get_prop_types types\n          ({ptyp_loc; ptyp_desc; ptyp_attributes} as full_type) =\n        match ptyp_desc with\n        | Ptyp_arrow (name, type_, ({ptyp_desc = Ptyp_arrow _} as rest))\n          when is_labelled name || is_optional name ->\n          get_prop_types\n            ((name, ptyp_attributes, ptyp_loc, type_) :: types)\n            rest\n        | Ptyp_arrow (Nolabel, _type, rest) -> get_prop_types types rest\n        | Ptyp_arrow (name, type_, return_value)\n          when is_labelled name || is_optional name ->\n          ( return_value,\n            (name, ptyp_attributes, return_value.ptyp_loc, type_) :: types )\n        | _ -> (full_type, types)\n      in\n      let inner_type, prop_types = get_prop_types [] pval_type in\n      let named_type_list = List.fold_left arg_to_concrete_type [] prop_types in\n      let ret_props_type =\n        Typ.constr ~loc:pstr_loc\n          (Location.mkloc (Lident \"props\") pstr_loc)\n          (match core_type_of_attr with\n          | None -> make_props_type_params named_type_list\n          | Some _ -> (\n            match typ_vars_of_core_type with\n            | [] -> []\n            | _ -> [Typ.any ()]))\n      in\n      (* type props<'x, 'y> = { x: 'x, y?: 'y, ... } *)\n      let props_record_type =\n        make_props_record_type ~core_type_of_attr ~typ_vars_of_core_type \"props\"\n          pstr_loc named_type_list\n      in\n      (* can't be an arrow because it will defensively uncurry *)\n      let new_external_type =\n        Ptyp_constr\n          ( {loc = pstr_loc; txt = module_access_name config \"componentLike\"},\n            [ret_props_type; inner_type] )\n      in\n      let new_structure =\n        {\n          pstr with\n          pstr_desc =\n            Pstr_primitive\n              {\n                value_description with\n                pval_type = {pval_type with ptyp_desc = new_external_type};\n                pval_attributes = List.filter other_attrs_pure pval_attributes;\n              };\n        }\n      in\n      [props_record_type; new_structure]\n    | _ ->\n      Jsx_common.raise_error ~loc:pstr_loc\n        \"Only one JSX component call can exist on a component at one time\")\n  (* let component = ... *)\n  | {pstr_loc; pstr_desc = Pstr_value (rec_flag, value_bindings)} -> (\n    let file_name = filename_from_loc pstr_loc in\n    let empty_loc = Location.in_file file_name in\n    let process_binding binding (new_items, bindings, new_bindings) =\n      let new_item, binding, new_binding =\n        map_binding ~config ~empty_loc ~pstr_loc ~file_name ~rec_flag binding\n      in\n      let new_items =\n        match new_item with\n        | Some item -> item :: new_items\n        | None -> new_items\n      in\n      let new_bindings =\n        match new_binding with\n        | Some new_binding -> new_binding :: new_bindings\n        | None -> new_bindings\n      in\n      (new_items, binding :: bindings, new_bindings)\n    in\n    let new_items, bindings, new_bindings =\n      List.fold_right process_binding value_bindings ([], [], [])\n    in\n    new_items\n    @ [{pstr_loc; pstr_desc = Pstr_value (rec_flag, bindings)}]\n    @\n    match new_bindings with\n    | [] -> []\n    | new_bindings ->\n      [{pstr_loc = empty_loc; pstr_desc = Pstr_value (rec_flag, new_bindings)}])\n  | _ -> [item]\n\nlet transform_signature_item ~config item =\n  match item with\n  | {\n      psig_loc;\n      psig_desc = Psig_value ({pval_attributes; pval_type} as psig_desc);\n    } as psig -> (\n    match List.filter Jsx_common.has_attr pval_attributes with\n    | [] -> [item]\n    | [_] ->\n      check_multiple_components ~config ~loc:psig_loc;\n      let pval_type = Jsx_common.extract_uncurried pval_type in\n      check_string_int_attribute_iter.signature_item\n        check_string_int_attribute_iter item;\n      let core_type_of_attr = Jsx_common.core_type_of_attrs pval_attributes in\n      let typ_vars_of_core_type =\n        core_type_of_attr\n        |> Option.map Jsx_common.typ_vars_of_core_type\n        |> Option.value ~default:[]\n      in\n      let rec get_prop_types types ({ptyp_loc; ptyp_desc} as full_type) =\n        match ptyp_desc with\n        | Ptyp_arrow\n            ( name,\n              ({ptyp_attributes = attrs} as type_),\n              ({ptyp_desc = Ptyp_arrow _} as rest) )\n          when is_optional name || is_labelled name ->\n          get_prop_types ((name, attrs, ptyp_loc, type_) :: types) rest\n        | Ptyp_arrow\n            (Nolabel, {ptyp_desc = Ptyp_constr ({txt = Lident \"unit\"}, _)}, rest)\n          ->\n          get_prop_types types rest\n        | Ptyp_arrow (Nolabel, _type, rest) -> get_prop_types types rest\n        | Ptyp_arrow (name, ({ptyp_attributes = attrs} as type_), return_value)\n          when is_optional name || is_labelled name ->\n          (return_value, (name, attrs, return_value.ptyp_loc, type_) :: types)\n        | _ -> (full_type, types)\n      in\n      let inner_type, prop_types = get_prop_types [] pval_type in\n      let named_type_list = List.fold_left arg_to_concrete_type [] prop_types in\n      let ret_props_type =\n        Typ.constr\n          (Location.mkloc (Lident \"props\") psig_loc)\n          (match core_type_of_attr with\n          | None -> make_props_type_params named_type_list\n          | Some _ -> (\n            match typ_vars_of_core_type with\n            | [] -> []\n            | _ -> [Typ.any ()]))\n      in\n      let props_record_type =\n        make_props_record_type_sig ~core_type_of_attr ~typ_vars_of_core_type\n          \"props\" psig_loc named_type_list\n      in\n      (* can't be an arrow because it will defensively uncurry *)\n      let new_external_type =\n        Ptyp_constr\n          ( {loc = psig_loc; txt = module_access_name config \"componentLike\"},\n            [ret_props_type; inner_type] )\n      in\n      let new_structure =\n        {\n          psig with\n          psig_desc =\n            Psig_value\n              {\n                psig_desc with\n                pval_type = {pval_type with ptyp_desc = new_external_type};\n                pval_attributes = List.filter other_attrs_pure pval_attributes;\n              };\n        }\n      in\n      [props_record_type; new_structure]\n    | _ ->\n      Jsx_common.raise_error ~loc:psig_loc\n        \"Only one JSX component call can exist on a component at one time\")\n  | _ -> [item]\n\nlet transform_jsx_call ~config mapper call_expression call_arguments\n    jsx_expr_loc attrs =\n  match call_expression.pexp_desc with\n  | Pexp_ident caller -> (\n    match caller with\n    | {txt = Lident \"createElement\"; loc} ->\n      Jsx_common.raise_error ~loc\n        \"JSX: `createElement` should be preceeded by a module name.\"\n    (* Foo.createElement(~prop1=foo, ~prop2=bar, ~children=[], ()) *)\n    | {loc; txt = Ldot (module_path, (\"createElement\" | \"make\"))} ->\n      transform_uppercase_call3 ~config module_path mapper jsx_expr_loc loc\n        attrs call_arguments\n    (* div(~prop1=foo, ~prop2=bar, ~children=[bla], ()) *)\n    (* turn that into\n       ReactDOM.createElement(~props=ReactDOM.props(~props1=foo, ~props2=bar, ()), [|bla|]) *)\n    | {loc; txt = Lident id} ->\n      transform_lowercase_call3 ~config mapper jsx_expr_loc loc attrs\n        call_arguments id\n    | {txt = Ldot (_, anything_not_create_element_or_make); loc} ->\n      Jsx_common.raise_error ~loc\n        \"JSX: the JSX attribute should be attached to a \\\n         `YourModuleName.createElement` or `YourModuleName.make` call. We saw \\\n         `%s` instead\"\n        anything_not_create_element_or_make\n    | {txt = Lapply _; loc} ->\n      (* don't think there's ever a case where this is reached *)\n      Jsx_common.raise_error ~loc\n        \"JSX: encountered a weird case while processing the code. Please \\\n         report this!\")\n  | _ ->\n    Jsx_common.raise_error ~loc:call_expression.pexp_loc\n      \"JSX: `createElement` should be preceeded by a simple, direct module \\\n       name.\"\n\nlet expr ~config mapper expression =\n  match expression with\n  (* Does the function application have the @JSX attribute? *)\n  | {\n   pexp_desc = Pexp_apply (call_expression, call_arguments);\n   pexp_attributes;\n   pexp_loc;\n  } -> (\n    let jsx_attribute, non_jsx_attributes =\n      List.partition\n        (fun (attribute, _) -> attribute.txt = \"JSX\")\n        pexp_attributes\n    in\n    match (jsx_attribute, non_jsx_attributes) with\n    (* no JSX attribute *)\n    | [], _ -> default_mapper.expr mapper expression\n    | _, non_jsx_attributes ->\n      transform_jsx_call ~config mapper call_expression call_arguments pexp_loc\n        non_jsx_attributes)\n  (* is it a list with jsx attribute? Reason <>foo</> desugars to [@JSX][foo]*)\n  | {\n      pexp_desc =\n        ( Pexp_construct\n            ({txt = Lident \"::\"; loc}, Some {pexp_desc = Pexp_tuple _})\n        | Pexp_construct ({txt = Lident \"[]\"; loc}, None) );\n      pexp_attributes;\n    } as list_items -> (\n    let jsx_attribute, non_jsx_attributes =\n      List.partition\n        (fun (attribute, _) -> attribute.txt = \"JSX\")\n        pexp_attributes\n    in\n    match (jsx_attribute, non_jsx_attributes) with\n    (* no JSX attribute *)\n    | [], _ -> default_mapper.expr mapper expression\n    | _, non_jsx_attributes ->\n      let loc = {loc with loc_ghost = true} in\n      let fragment =\n        match config.mode with\n        | \"automatic\" ->\n          Exp.ident ~loc {loc; txt = module_access_name config \"jsxFragment\"}\n        | \"classic\" | _ ->\n          Exp.ident ~loc {loc; txt = Ldot (Lident \"React\", \"fragment\")}\n      in\n      let children_expr = transform_children_if_list ~mapper list_items in\n      let record_of_children children =\n        Exp.record [(Location.mknoloc (Lident \"children\"), children)] None\n      in\n      let apply_jsx_array expr =\n        Exp.apply\n          (Exp.ident\n             {txt = module_access_name config \"array\"; loc = Location.none})\n          [(Nolabel, expr)]\n      in\n      let count_of_children = function\n        | {pexp_desc = Pexp_array children} -> List.length children\n        | _ -> 0\n      in\n      let transform_children_to_props children_expr =\n        match children_expr with\n        | {pexp_desc = Pexp_array children} -> (\n          match children with\n          | [] -> empty_record ~loc:Location.none\n          | [child] -> record_of_children child\n          | _ -> (\n            match config.mode with\n            | \"automatic\" -> record_of_children @@ apply_jsx_array children_expr\n            | \"classic\" | _ -> empty_record ~loc:Location.none))\n        | _ -> (\n          match config.mode with\n          | \"automatic\" -> record_of_children @@ apply_jsx_array children_expr\n          | \"classic\" | _ -> empty_record ~loc:Location.none)\n      in\n      let args =\n        (nolabel, fragment)\n        :: (nolabel, transform_children_to_props children_expr)\n        ::\n        (match config.mode with\n        | \"classic\" when count_of_children children_expr > 1 ->\n          [(nolabel, children_expr)]\n        | _ -> [])\n      in\n      Exp.apply\n        ~loc (* throw away the [@JSX] attribute and keep the others, if any *)\n        ~attrs:non_jsx_attributes\n        (* ReactDOM.createElement *)\n        (match config.mode with\n        | \"automatic\" ->\n          if count_of_children children_expr > 1 then\n            Exp.ident ~loc {loc; txt = module_access_name config \"jsxs\"}\n          else Exp.ident ~loc {loc; txt = module_access_name config \"jsx\"}\n        | \"classic\" | _ ->\n          if count_of_children children_expr > 1 then\n            Exp.ident ~loc\n              {loc; txt = Ldot (Lident \"React\", \"createElementVariadic\")}\n          else\n            Exp.ident ~loc {loc; txt = Ldot (Lident \"React\", \"createElement\")})\n        args)\n  (* Delegate to the default mapper, a deep identity traversal *)\n  | e -> default_mapper.expr mapper e\n\nlet module_binding ~(config : Jsx_common.jsx_config) mapper module_binding =\n  config.nested_modules <- module_binding.pmb_name.txt :: config.nested_modules;\n  let mapped = default_mapper.module_binding mapper module_binding in\n  let () =\n    match config.nested_modules with\n    | _ :: rest -> config.nested_modules <- rest\n    | [] -> ()\n  in\n  mapped\n\n(* TODO: some line number might still be wrong *)\nlet jsx_mapper ~config =\n  let expr = expr ~config in\n  let module_binding = module_binding ~config in\n  let transform_structure_item = transform_structure_item ~config in\n  let transform_signature_item = transform_signature_item ~config in\n  (expr, module_binding, transform_signature_item, transform_structure_item)\n"
  },
  {
    "path": "analysis/vendor/res_syntax/reactjs_jsx_v3.ml",
    "content": "open Ast_helper\nopen Ast_mapper\nopen Asttypes\nopen Parsetree\nopen Longident\n\nlet nolabel = Nolabel\n\nlet labelled str = Labelled str\n\nlet optional str = Optional str\n\nlet is_optional str =\n  match str with\n  | Optional _ -> true\n  | _ -> false\n\nlet is_labelled str =\n  match str with\n  | Labelled _ -> true\n  | _ -> false\n\nlet get_label str =\n  match str with\n  | Optional str | Labelled str -> str\n  | Nolabel -> \"\"\n\nlet option_ident = Lident \"option\"\n\nlet constant_string ~loc str =\n  Ast_helper.Exp.constant ~loc (Pconst_string (str, None))\n\nlet safe_type_from_value value_str =\n  let value_str = get_label value_str in\n  if value_str = \"\" || (value_str.[0] [@doesNotRaise]) <> '_' then value_str\n  else \"T\" ^ value_str\n\nlet key_type loc =\n  Typ.constr ~loc {loc; txt = option_ident}\n    [Typ.constr ~loc {loc; txt = Lident \"string\"} []]\n\ntype 'a children = ListLiteral of 'a | Exact of 'a\n\ntype component_config = {props_name: string}\n\n(* if children is a list, convert it to an array while mapping each element. If not, just map over it, as usual *)\nlet transform_children_if_list_upper ~loc ~mapper the_list =\n  let rec transformChildren_ the_list accum =\n    (* not in the sense of converting a list to an array; convert the AST\n       reprensentation of a list to the AST reprensentation of an array *)\n    match the_list with\n    | {pexp_desc = Pexp_construct ({txt = Lident \"[]\"}, None)} -> (\n      match accum with\n      | [single_element] -> Exact single_element\n      | accum -> ListLiteral (Exp.array ~loc (List.rev accum)))\n    | {\n     pexp_desc =\n       Pexp_construct\n         ({txt = Lident \"::\"}, Some {pexp_desc = Pexp_tuple [v; acc]});\n    } ->\n      transformChildren_ acc (mapper.expr mapper v :: accum)\n    | not_a_list -> Exact (mapper.expr mapper not_a_list)\n  in\n  transformChildren_ the_list []\n\nlet transform_children_if_list ~loc ~mapper the_list =\n  let rec transformChildren_ the_list accum =\n    (* not in the sense of converting a list to an array; convert the AST\n       reprensentation of a list to the AST reprensentation of an array *)\n    match the_list with\n    | {pexp_desc = Pexp_construct ({txt = Lident \"[]\"}, None)} ->\n      Exp.array ~loc (List.rev accum)\n    | {\n     pexp_desc =\n       Pexp_construct\n         ({txt = Lident \"::\"}, Some {pexp_desc = Pexp_tuple [v; acc]});\n    } ->\n      transformChildren_ acc (mapper.expr mapper v :: accum)\n    | not_a_list -> mapper.expr mapper not_a_list\n  in\n  transformChildren_ the_list []\n\nlet extract_children ?(remove_last_position_unit = false) ~loc\n    props_and_children =\n  let rec allButLast_ lst acc =\n    match lst with\n    | [] -> []\n    | [(Nolabel, {pexp_desc = Pexp_construct ({txt = Lident \"()\"}, None)})] ->\n      acc\n    | (Nolabel, {pexp_loc}) :: _rest ->\n      Jsx_common.raise_error ~loc:pexp_loc\n        \"JSX: found non-labelled argument before the last position\"\n    | arg :: rest -> allButLast_ rest (arg :: acc)\n  in\n  let all_but_last lst = allButLast_ lst [] |> List.rev in\n  match\n    List.partition\n      (fun (label, _) -> label = labelled \"children\")\n      props_and_children\n  with\n  | [], props ->\n    (* no children provided? Place a placeholder list *)\n    ( Exp.construct ~loc {loc; txt = Lident \"[]\"} None,\n      if remove_last_position_unit then all_but_last props else props )\n  | [(_, children_expr)], props ->\n    ( children_expr,\n      if remove_last_position_unit then all_but_last props else props )\n  | _ ->\n    Jsx_common.raise_error ~loc\n      \"JSX: somehow there's more than one `children` label\"\n\nlet unerasable_ignore loc =\n  ( {loc; txt = \"warning\"},\n    PStr [Str.eval (Exp.constant (Pconst_string (\"-16\", None)))] )\n\nlet merlin_focus = ({loc = Location.none; txt = \"merlin.focus\"}, PStr [])\n\n(* Helper method to filter out any attribute that isn't [@react.component] *)\nlet other_attrs_pure (loc, _) = loc.txt <> \"react.component\"\n\n(* Finds the name of the variable the binding is assigned to, otherwise raises Invalid_argument *)\nlet rec get_fn_name binding =\n  match binding with\n  | {ppat_desc = Ppat_var {txt}} -> txt\n  | {ppat_desc = Ppat_constraint (pat, _)} -> get_fn_name pat\n  | {ppat_loc} ->\n    Jsx_common.raise_error ~loc:ppat_loc\n      \"react.component calls cannot be destructured.\"\n\nlet make_new_binding binding expression new_name =\n  match binding with\n  | {pvb_pat = {ppat_desc = Ppat_var ppat_var} as pvb_pat} ->\n    {\n      binding with\n      pvb_pat =\n        {pvb_pat with ppat_desc = Ppat_var {ppat_var with txt = new_name}};\n      pvb_expr = expression;\n      pvb_attributes = [merlin_focus];\n    }\n  | {pvb_loc} ->\n    Jsx_common.raise_error ~loc:pvb_loc\n      \"react.component calls cannot be destructured.\"\n\n(* Lookup the value of `props` otherwise raise Invalid_argument error *)\nlet get_props_name_value _acc (loc, exp) =\n  match (loc, exp) with\n  | {txt = Lident \"props\"}, {pexp_desc = Pexp_ident {txt = Lident str}} ->\n    {props_name = str}\n  | {txt; loc}, _ ->\n    Jsx_common.raise_error ~loc\n      \"react.component only accepts props as an option, given: { %s }\"\n      (Longident.last txt)\n\n(* Lookup the `props` record or string as part of [@react.component] and store the name for use when rewriting *)\nlet get_props_attr payload =\n  let default_props = {props_name = \"Props\"} in\n  match payload with\n  | Some\n      (PStr\n         ({\n            pstr_desc =\n              Pstr_eval ({pexp_desc = Pexp_record (record_fields, None)}, _);\n          }\n         :: _rest)) ->\n    List.fold_left get_props_name_value default_props record_fields\n  | Some\n      (PStr\n         ({\n            pstr_desc =\n              Pstr_eval ({pexp_desc = Pexp_ident {txt = Lident \"props\"}}, _);\n          }\n         :: _rest)) ->\n    {props_name = \"props\"}\n  | Some (PStr ({pstr_desc = Pstr_eval (_, _); pstr_loc} :: _rest)) ->\n    Jsx_common.raise_error ~loc:pstr_loc\n      \"react.component accepts a record config with props as an options.\"\n  | _ -> default_props\n\n(* Plucks the label, loc, and type_ from an AST node *)\nlet pluck_label_default_loc_type (label, default, _, _, loc, type_) =\n  (label, default, loc, type_)\n\n(* Lookup the filename from the location information on the AST node and turn it into a valid module identifier *)\nlet filename_from_loc (pstr_loc : Location.t) =\n  let file_name =\n    match pstr_loc.loc_start.pos_fname with\n    | \"\" -> !Location.input_name\n    | file_name -> file_name\n  in\n  let file_name =\n    try Filename.chop_extension (Filename.basename file_name)\n    with Invalid_argument _ -> file_name\n  in\n  let file_name = String.capitalize_ascii file_name in\n  file_name\n\n(* Build a string representation of a module name with segments separated by $ *)\nlet make_module_name file_name nested_modules fn_name =\n  let full_module_name =\n    match (file_name, nested_modules, fn_name) with\n    (* TODO: is this even reachable? It seems like the fileName always exists *)\n    | \"\", nested_modules, \"make\" -> nested_modules\n    | \"\", nested_modules, fn_name -> List.rev (fn_name :: nested_modules)\n    | file_name, nested_modules, \"make\" -> file_name :: List.rev nested_modules\n    | file_name, nested_modules, fn_name ->\n      file_name :: List.rev (fn_name :: nested_modules)\n  in\n  let full_module_name = String.concat \"$\" full_module_name in\n  full_module_name\n\n(*\n  AST node builders\n  These functions help us build AST nodes that are needed when transforming a [@react.component] into a\n  constructor and a props external\n*)\n\n(* Build an AST node representing all named args for the `external` definition for a component's props *)\nlet rec recursively_make_named_args_for_external list args =\n  match list with\n  | (label, default, loc, interior_type) :: tl ->\n    recursively_make_named_args_for_external tl\n      (Typ.arrow ~loc label\n         (match (label, interior_type, default) with\n         (* ~foo=1 *)\n         | label, None, Some _ ->\n           {\n             ptyp_desc = Ptyp_var (safe_type_from_value label);\n             ptyp_loc = loc;\n             ptyp_attributes = [];\n           }\n         (* ~foo: int=1 *)\n         | _label, Some type_, Some _ -> type_\n         (* ~foo: option<int>=? *)\n         | ( label,\n             Some {ptyp_desc = Ptyp_constr ({txt = Lident \"option\"}, [type_])},\n             _ )\n         | ( label,\n             Some\n               {\n                 ptyp_desc =\n                   Ptyp_constr\n                     ({txt = Ldot (Lident \"*predef*\", \"option\")}, [type_]);\n               },\n             _ )\n         (* ~foo: int=? - note this isnt valid. but we want to get a type error *)\n         | label, Some type_, _\n           when is_optional label ->\n           type_\n         (* ~foo=? *)\n         | label, None, _ when is_optional label ->\n           {\n             ptyp_desc = Ptyp_var (safe_type_from_value label);\n             ptyp_loc = loc;\n             ptyp_attributes = [];\n           }\n         (* ~foo *)\n         | label, None, _ ->\n           {\n             ptyp_desc = Ptyp_var (safe_type_from_value label);\n             ptyp_loc = loc;\n             ptyp_attributes = [];\n           }\n         | _label, Some type_, _ -> type_)\n         args)\n  | [] -> args\n\n(* Build an AST node for the [@obj] representing props for a component *)\nlet make_props_value fn_name loc named_arg_list_with_key_and_ref props_type =\n  let props_name = fn_name ^ \"Props\" in\n  {\n    pval_name = {txt = props_name; loc};\n    pval_type =\n      recursively_make_named_args_for_external named_arg_list_with_key_and_ref\n        (Typ.arrow nolabel\n           {\n             ptyp_desc = Ptyp_constr ({txt = Lident \"unit\"; loc}, []);\n             ptyp_loc = loc;\n             ptyp_attributes = [];\n           }\n           props_type);\n    pval_prim = [\"\"];\n    pval_attributes = [({txt = \"obj\"; loc}, PStr [])];\n    pval_loc = loc;\n  }\n\n(* Build an AST node representing an `external` with the definition of the [@obj] *)\nlet make_props_external fn_name loc named_arg_list_with_key_and_ref props_type =\n  {\n    pstr_loc = loc;\n    pstr_desc =\n      Pstr_primitive\n        (make_props_value fn_name loc named_arg_list_with_key_and_ref props_type);\n  }\n\n(* Build an AST node for the signature of the `external` definition *)\nlet make_props_external_sig fn_name loc named_arg_list_with_key_and_ref\n    props_type =\n  {\n    psig_loc = loc;\n    psig_desc =\n      Psig_value\n        (make_props_value fn_name loc named_arg_list_with_key_and_ref props_type);\n  }\n\n(* Build an AST node for the props name when converted to an object inside the function signature  *)\nlet make_props_name ~loc name =\n  {ppat_desc = Ppat_var {txt = name; loc}; ppat_loc = loc; ppat_attributes = []}\n\nlet make_object_field loc (str, attrs, type_) =\n  Otag ({loc; txt = str}, attrs, type_)\n\n(* Build an AST node representing a \"closed\" object representing a component's props *)\nlet make_props_type ~loc named_type_list =\n  Typ.mk ~loc\n    (Ptyp_object (List.map (make_object_field loc) named_type_list, Closed))\n\n(* Builds an AST node for the entire `external` definition of props *)\nlet make_external_decl fn_name loc named_arg_list_with_key_and_ref\n    named_type_list =\n  make_props_external fn_name loc\n    (List.map pluck_label_default_loc_type named_arg_list_with_key_and_ref)\n    (make_props_type ~loc named_type_list)\n\nlet newtype_to_var newtype type_ =\n  let var_desc = Ptyp_var (\"type-\" ^ newtype) in\n  let typ (mapper : Ast_mapper.mapper) typ =\n    match typ.ptyp_desc with\n    | Ptyp_constr ({txt = Lident name}, _) when name = newtype ->\n      {typ with ptyp_desc = var_desc}\n    | _ -> Ast_mapper.default_mapper.typ mapper typ\n  in\n  let mapper = {Ast_mapper.default_mapper with typ} in\n  mapper.typ mapper type_\n\n(* TODO: some line number might still be wrong *)\nlet jsx_mapper ~config =\n  let transform_uppercase_call3 module_path mapper loc attrs _ call_arguments =\n    let children, args_with_labels =\n      extract_children ~loc ~remove_last_position_unit:true call_arguments\n    in\n    let args_for_make = args_with_labels in\n    let children_expr =\n      transform_children_if_list_upper ~loc ~mapper children\n    in\n    let recursively_transformed_args_for_make =\n      args_for_make\n      |> List.map (fun (label, expression) ->\n             (label, mapper.expr mapper expression))\n    in\n    let children_arg = ref None in\n    let args =\n      recursively_transformed_args_for_make\n      @ (match children_expr with\n        | Exact children -> [(labelled \"children\", children)]\n        | ListLiteral {pexp_desc = Pexp_array list} when list = [] -> []\n        | ListLiteral expression ->\n          (* this is a hack to support react components that introspect into their children *)\n          children_arg := Some expression;\n          [\n            ( labelled \"children\",\n              Exp.ident ~loc {loc; txt = Ldot (Lident \"React\", \"null\")} );\n          ])\n      @ [(nolabel, Exp.construct ~loc {loc; txt = Lident \"()\"} None)]\n    in\n    let is_cap str = String.capitalize_ascii str = str in\n    let ident =\n      match module_path with\n      | Lident _ -> Ldot (module_path, \"make\")\n      | Ldot (_modulePath, value) as full_path when is_cap value ->\n        Ldot (full_path, \"make\")\n      | module_path -> module_path\n    in\n    let props_ident =\n      match ident with\n      | Lident path -> Lident (path ^ \"Props\")\n      | Ldot (ident, path) -> Ldot (ident, path ^ \"Props\")\n      | _ ->\n        Jsx_common.raise_error ~loc\n          \"JSX name can't be the result of function applications\"\n    in\n    let props =\n      Exp.apply ~attrs ~loc (Exp.ident ~loc {loc; txt = props_ident}) args\n    in\n    (* handle key, ref, children *)\n    (* React.createElement(Component.make, props, ...children) *)\n    match !children_arg with\n    | None ->\n      Exp.apply ~loc ~attrs\n        (Exp.ident ~loc {loc; txt = Ldot (Lident \"React\", \"createElement\")})\n        [(nolabel, Exp.ident ~loc {txt = ident; loc}); (nolabel, props)]\n    | Some children ->\n      Exp.apply ~loc ~attrs\n        (Exp.ident ~loc\n           {loc; txt = Ldot (Lident \"React\", \"createElementVariadic\")})\n        [\n          (nolabel, Exp.ident ~loc {txt = ident; loc});\n          (nolabel, props);\n          (nolabel, children);\n        ]\n  in\n\n  let transform_lowercase_call3 mapper loc attrs call_arguments id =\n    let children, non_children_props = extract_children ~loc call_arguments in\n    let component_name_expr = constant_string ~loc id in\n    let children_expr = transform_children_if_list ~loc ~mapper children in\n    let create_element_call =\n      match children with\n      (* [@JSX] div(~children=[a]), coming from <div> a </div> *)\n      | {\n       pexp_desc =\n         ( Pexp_construct ({txt = Lident \"::\"}, Some {pexp_desc = Pexp_tuple _})\n         | Pexp_construct ({txt = Lident \"[]\"}, None) );\n      } ->\n        \"createDOMElementVariadic\"\n      (* [@JSX] div(~children= value), coming from <div> ...(value) </div> *)\n      | {pexp_loc} ->\n        Jsx_common.raise_error ~loc:pexp_loc\n          \"A spread as a DOM element's children don't make sense written \\\n           together. You can simply remove the spread.\"\n    in\n    let args =\n      match non_children_props with\n      | [_justTheUnitArgumentAtEnd] ->\n        [\n          (* \"div\" *)\n          (nolabel, component_name_expr);\n          (* [|moreCreateElementCallsHere|] *)\n          (nolabel, children_expr);\n        ]\n      | non_empty_props ->\n        let props_call =\n          Exp.apply ~loc\n            (Exp.ident ~loc {loc; txt = Ldot (Lident \"ReactDOMRe\", \"domProps\")})\n            (non_empty_props\n            |> List.map (fun (label, expression) ->\n                   (label, mapper.expr mapper expression)))\n        in\n        [\n          (* \"div\" *)\n          (nolabel, component_name_expr);\n          (* ReactDOMRe.props(~className=blabla, ~foo=bar, ()) *)\n          (labelled \"props\", props_call);\n          (* [|moreCreateElementCallsHere|] *)\n          (nolabel, children_expr);\n        ]\n    in\n    Exp.apply\n      ~loc (* throw away the [@JSX] attribute and keep the others, if any *)\n      ~attrs\n      (* ReactDOMRe.createElement *)\n      (Exp.ident ~loc\n         {loc; txt = Ldot (Lident \"ReactDOMRe\", create_element_call)})\n      args\n  in\n\n  let rec recursively_transform_named_args_for_make expr args newtypes =\n    match expr.pexp_desc with\n    (* TODO: make this show up with a loc. *)\n    | Pexp_fun (Labelled \"key\", _, _, _) | Pexp_fun (Optional \"key\", _, _, _) ->\n      Jsx_common.raise_error ~loc:expr.pexp_loc\n        \"Key cannot be accessed inside of a component. Don't worry - you can \\\n         always key a component from its parent!\"\n    | Pexp_fun (Labelled \"ref\", _, _, _) | Pexp_fun (Optional \"ref\", _, _, _) ->\n      Jsx_common.raise_error ~loc:expr.pexp_loc\n        \"Ref cannot be passed as a normal prop. Either give the prop a \\\n         different name or use the `forwardRef` API instead.\"\n    | Pexp_fun (arg, default, pattern, expression)\n      when is_optional arg || is_labelled arg ->\n      let () =\n        match (is_optional arg, pattern, default) with\n        | true, {ppat_desc = Ppat_constraint (_, {ptyp_desc})}, None -> (\n          match ptyp_desc with\n          | Ptyp_constr ({txt = Lident \"option\"}, [_]) -> ()\n          | _ ->\n            let current_type =\n              match ptyp_desc with\n              | Ptyp_constr ({txt}, []) ->\n                String.concat \".\" (Longident.flatten txt)\n              | Ptyp_constr ({txt}, _innerTypeArgs) ->\n                String.concat \".\" (Longident.flatten txt) ^ \"(...)\"\n              | _ -> \"...\"\n            in\n            Location.prerr_warning pattern.ppat_loc\n              (Preprocessor\n                 (Printf.sprintf\n                    \"React: optional argument annotations must have explicit \\\n                     `option`. Did you mean `option<%s>=?`?\"\n                    current_type)))\n        | _ -> ()\n      in\n      let alias =\n        match pattern with\n        | {ppat_desc = Ppat_alias (_, {txt}) | Ppat_var {txt}} -> txt\n        | {ppat_desc = Ppat_any} -> \"_\"\n        | _ -> get_label arg\n      in\n      let type_ =\n        match pattern with\n        | {ppat_desc = Ppat_constraint (_, type_)} -> Some type_\n        | _ -> None\n      in\n\n      recursively_transform_named_args_for_make expression\n        ((arg, default, pattern, alias, pattern.ppat_loc, type_) :: args)\n        newtypes\n    | Pexp_fun\n        ( Nolabel,\n          _,\n          {ppat_desc = Ppat_construct ({txt = Lident \"()\"}, _) | Ppat_any},\n          _expression ) ->\n      (args, newtypes, None)\n    | Pexp_fun\n        ( Nolabel,\n          _,\n          {\n            ppat_desc =\n              Ppat_var {txt} | Ppat_constraint ({ppat_desc = Ppat_var {txt}}, _);\n          },\n          _expression ) ->\n      (args, newtypes, Some txt)\n    | Pexp_fun (Nolabel, _, pattern, _expression) ->\n      Location.raise_errorf ~loc:pattern.ppat_loc\n        \"React: react.component refs only support plain arguments and type \\\n         annotations.\"\n    | Pexp_newtype (label, expression) ->\n      recursively_transform_named_args_for_make expression args\n        (label :: newtypes)\n    | Pexp_constraint (expression, _typ) ->\n      recursively_transform_named_args_for_make expression args newtypes\n    | _ -> (args, newtypes, None)\n  in\n\n  let arg_to_type types (name, default, _noLabelName, _alias, loc, type_) =\n    match (type_, name, default) with\n    | Some {ptyp_desc = Ptyp_constr ({txt = Lident \"option\"}, [type_])}, name, _\n      when is_optional name ->\n      ( get_label name,\n        [],\n        {\n          type_ with\n          ptyp_desc =\n            Ptyp_constr ({loc = type_.ptyp_loc; txt = option_ident}, [type_]);\n        } )\n      :: types\n    | Some type_, name, Some _default ->\n      ( get_label name,\n        [],\n        {\n          ptyp_desc = Ptyp_constr ({loc; txt = option_ident}, [type_]);\n          ptyp_loc = loc;\n          ptyp_attributes = [];\n        } )\n      :: types\n    | Some type_, name, _ -> (get_label name, [], type_) :: types\n    | None, name, _ when is_optional name ->\n      ( get_label name,\n        [],\n        {\n          ptyp_desc =\n            Ptyp_constr\n              ( {loc; txt = option_ident},\n                [\n                  {\n                    ptyp_desc = Ptyp_var (safe_type_from_value name);\n                    ptyp_loc = loc;\n                    ptyp_attributes = [];\n                  };\n                ] );\n          ptyp_loc = loc;\n          ptyp_attributes = [];\n        } )\n      :: types\n    | None, name, _ when is_labelled name ->\n      ( get_label name,\n        [],\n        {\n          ptyp_desc = Ptyp_var (safe_type_from_value name);\n          ptyp_loc = loc;\n          ptyp_attributes = [];\n        } )\n      :: types\n    | _ -> types\n  in\n\n  let arg_to_concrete_type types (name, loc, type_) =\n    match name with\n    | name when is_labelled name -> (get_label name, [], type_) :: types\n    | name when is_optional name ->\n      (get_label name, [], Typ.constr ~loc {loc; txt = option_ident} [type_])\n      :: types\n    | _ -> types\n  in\n\n  let nested_modules = ref [] in\n  let transform_structure_item item =\n    match item with\n    (* external *)\n    | {\n        pstr_loc;\n        pstr_desc =\n          Pstr_primitive\n            ({pval_name = {txt = fn_name}; pval_attributes; pval_type} as\n             value_description);\n      } as pstr -> (\n      match List.filter Jsx_common.has_attr pval_attributes with\n      | [] -> [item]\n      | [_] ->\n        let pval_type = Jsx_common.extract_uncurried pval_type in\n        let rec get_prop_types types ({ptyp_loc; ptyp_desc} as full_type) =\n          match ptyp_desc with\n          | Ptyp_arrow (name, type_, ({ptyp_desc = Ptyp_arrow _} as rest))\n            when is_labelled name || is_optional name ->\n            get_prop_types ((name, ptyp_loc, type_) :: types) rest\n          | Ptyp_arrow (Nolabel, _type, rest) -> get_prop_types types rest\n          | Ptyp_arrow (name, type_, return_value)\n            when is_labelled name || is_optional name ->\n            (return_value, (name, return_value.ptyp_loc, type_) :: types)\n          | _ -> (full_type, types)\n        in\n        let inner_type, prop_types = get_prop_types [] pval_type in\n        let named_type_list =\n          List.fold_left arg_to_concrete_type [] prop_types\n        in\n        let pluck_label_and_loc (label, loc, type_) =\n          (label, None (* default *), loc, Some type_)\n        in\n        let ret_props_type = make_props_type ~loc:pstr_loc named_type_list in\n        let external_props_decl =\n          make_props_external fn_name pstr_loc\n            ((optional \"key\", None, pstr_loc, Some (key_type pstr_loc))\n            :: List.map pluck_label_and_loc prop_types)\n            ret_props_type\n        in\n        (* can't be an arrow because it will defensively uncurry *)\n        let new_external_type =\n          Ptyp_constr\n            ( {loc = pstr_loc; txt = Ldot (Lident \"React\", \"componentLike\")},\n              [ret_props_type; inner_type] )\n        in\n        let new_structure =\n          {\n            pstr with\n            pstr_desc =\n              Pstr_primitive\n                {\n                  value_description with\n                  pval_type = {pval_type with ptyp_desc = new_external_type};\n                  pval_attributes = List.filter other_attrs_pure pval_attributes;\n                };\n          }\n        in\n        [external_props_decl; new_structure]\n      | _ ->\n        Jsx_common.raise_error ~loc:pstr_loc\n          \"Only one react.component call can exist on a component at one time\")\n    (* let component = ... *)\n    | {pstr_loc; pstr_desc = Pstr_value (rec_flag, value_bindings)} -> (\n      let file_name = filename_from_loc pstr_loc in\n      let empty_loc = Location.in_file file_name in\n      let map_binding binding =\n        if Jsx_common.has_attr_on_binding binding then\n          let binding = Jsx_common.remove_arity binding in\n          let binding_loc = binding.pvb_loc in\n          let binding_pat_loc = binding.pvb_pat.ppat_loc in\n          let binding =\n            {\n              binding with\n              pvb_pat = {binding.pvb_pat with ppat_loc = empty_loc};\n              pvb_loc = empty_loc;\n            }\n          in\n          let fn_name = get_fn_name binding.pvb_pat in\n          let internal_fn_name = fn_name ^ \"$Internal\" in\n          let full_module_name =\n            make_module_name file_name !nested_modules fn_name\n          in\n          let modified_binding_old binding =\n            let expression = binding.pvb_expr in\n            (* TODO: there is a long-tail of unsupported features inside of blocks - Pexp_letmodule , Pexp_letexception , Pexp_ifthenelse *)\n            let rec spelunk_for_fun_expression expression =\n              match expression with\n              (* let make = (~prop) => ... *)\n              | {pexp_desc = Pexp_fun _} | {pexp_desc = Pexp_newtype _} ->\n                expression\n              (* let make = {let foo = bar in (~prop) => ...} *)\n              | {pexp_desc = Pexp_let (_recursive, _vbs, return_expression)} ->\n                (* here's where we spelunk! *)\n                spelunk_for_fun_expression return_expression\n              (* let make = React.forwardRef((~prop) => ...) *)\n              | {\n               pexp_desc =\n                 Pexp_apply\n                   (_wrapperExpression, [(Nolabel, inner_function_expression)]);\n              } ->\n                spelunk_for_fun_expression inner_function_expression\n              | {\n               pexp_desc =\n                 Pexp_sequence (_wrapperExpression, inner_function_expression);\n              } ->\n                spelunk_for_fun_expression inner_function_expression\n              | {pexp_desc = Pexp_constraint (inner_function_expression, _typ)}\n                ->\n                spelunk_for_fun_expression inner_function_expression\n              | {pexp_loc} ->\n                Jsx_common.raise_error ~loc:pexp_loc\n                  \"react.component calls can only be on function definitions \\\n                   or component wrappers (forwardRef, memo).\"\n            in\n            spelunk_for_fun_expression expression\n          in\n          let modified_binding binding =\n            let has_application = ref false in\n            let wrap_expression_with_binding expression_fn expression =\n              Vb.mk ~loc:binding_loc\n                ~attrs:(List.filter other_attrs_pure binding.pvb_attributes)\n                (Pat.var ~loc:binding_pat_loc\n                   {loc = binding_pat_loc; txt = fn_name})\n                (expression_fn expression)\n            in\n            let expression = binding.pvb_expr in\n            let unerasable_ignore_exp exp =\n              {\n                exp with\n                pexp_attributes =\n                  unerasable_ignore empty_loc :: exp.pexp_attributes;\n              }\n            in\n            (* TODO: there is a long-tail of unsupported features inside of blocks - Pexp_letmodule , Pexp_letexception , Pexp_ifthenelse *)\n            let rec spelunk_for_fun_expression expression =\n              match expression with\n              (* let make = (~prop) => ... with no final unit *)\n              | {\n               pexp_desc =\n                 Pexp_fun\n                   ( ((Labelled _ | Optional _) as label),\n                     default,\n                     pattern,\n                     ({pexp_desc = Pexp_fun _} as internal_expression) );\n              } ->\n                let wrap, has_unit, exp =\n                  spelunk_for_fun_expression internal_expression\n                in\n                ( wrap,\n                  has_unit,\n                  unerasable_ignore_exp\n                    {\n                      expression with\n                      pexp_desc = Pexp_fun (label, default, pattern, exp);\n                    } )\n              (* let make = (()) => ... *)\n              (* let make = (_) => ... *)\n              | {\n               pexp_desc =\n                 Pexp_fun\n                   ( Nolabel,\n                     _default,\n                     {\n                       ppat_desc =\n                         Ppat_construct ({txt = Lident \"()\"}, _) | Ppat_any;\n                     },\n                     _internalExpression );\n              } ->\n                ((fun a -> a), true, expression)\n              (* let make = (~prop) => ... *)\n              | {\n               pexp_desc =\n                 Pexp_fun\n                   ( (Labelled _ | Optional _),\n                     _default,\n                     _pattern,\n                     _internalExpression );\n              } ->\n                ((fun a -> a), false, unerasable_ignore_exp expression)\n              (* let make = (prop) => ... *)\n              | {\n               pexp_desc =\n                 Pexp_fun (_nolabel, _default, pattern, _internalExpression);\n              } ->\n                if has_application.contents then\n                  ((fun a -> a), false, unerasable_ignore_exp expression)\n                else\n                  Location.raise_errorf ~loc:pattern.ppat_loc\n                    \"React: props need to be labelled arguments.\\n\\\n                    \\  If you are working with refs be sure to wrap with \\\n                     React.forwardRef.\\n\\\n                    \\  If your component doesn't have any props use () or _ \\\n                     instead of a name.\"\n              (* let make = {let foo = bar in (~prop) => ...} *)\n              | {pexp_desc = Pexp_let (recursive, vbs, internal_expression)} ->\n                (* here's where we spelunk! *)\n                let wrap, has_unit, exp =\n                  spelunk_for_fun_expression internal_expression\n                in\n                ( wrap,\n                  has_unit,\n                  {expression with pexp_desc = Pexp_let (recursive, vbs, exp)}\n                )\n              (* let make = React.forwardRef((~prop) => ...) *)\n              | {\n               pexp_desc =\n                 Pexp_apply\n                   (wrapper_expression, [(Nolabel, internal_expression)]);\n              } ->\n                let () = has_application := true in\n                let _, has_unit, exp =\n                  spelunk_for_fun_expression internal_expression\n                in\n                ( (fun exp -> Exp.apply wrapper_expression [(nolabel, exp)]),\n                  has_unit,\n                  exp )\n              | {\n               pexp_desc =\n                 Pexp_sequence (wrapper_expression, internal_expression);\n              } ->\n                let wrap, has_unit, exp =\n                  spelunk_for_fun_expression internal_expression\n                in\n                ( wrap,\n                  has_unit,\n                  {\n                    expression with\n                    pexp_desc = Pexp_sequence (wrapper_expression, exp);\n                  } )\n              | e -> ((fun a -> a), false, e)\n            in\n            let wrap_expression, has_unit, expression =\n              spelunk_for_fun_expression expression\n            in\n            (wrap_expression_with_binding wrap_expression, has_unit, expression)\n          in\n          let binding_wrapper, has_unit, expression =\n            modified_binding binding\n          in\n          let react_component_attribute =\n            try Some (List.find Jsx_common.has_attr binding.pvb_attributes)\n            with Not_found -> None\n          in\n          let _attr_loc, payload =\n            match react_component_attribute with\n            | Some (loc, payload) -> (loc.loc, Some payload)\n            | None -> (empty_loc, None)\n          in\n          let props = get_props_attr payload in\n          (* do stuff here! *)\n          let named_arg_list, newtypes, forward_ref =\n            recursively_transform_named_args_for_make\n              (modified_binding_old binding)\n              [] []\n          in\n          let named_arg_list_with_key_and_ref =\n            ( optional \"key\",\n              None,\n              Pat.var {txt = \"key\"; loc = empty_loc},\n              \"key\",\n              empty_loc,\n              Some (key_type empty_loc) )\n            :: named_arg_list\n          in\n          let named_arg_list_with_key_and_ref =\n            match forward_ref with\n            | Some _ ->\n              ( optional \"ref\",\n                None,\n                Pat.var {txt = \"key\"; loc = empty_loc},\n                \"ref\",\n                empty_loc,\n                None )\n              :: named_arg_list_with_key_and_ref\n            | None -> named_arg_list_with_key_and_ref\n          in\n          let named_arg_list_with_key_and_ref_for_new =\n            match forward_ref with\n            | Some txt ->\n              named_arg_list\n              @ [\n                  ( nolabel,\n                    None,\n                    Pat.var {txt; loc = empty_loc},\n                    txt,\n                    empty_loc,\n                    None );\n                ]\n            | None -> named_arg_list\n          in\n          let pluck_arg (label, _, _, alias, loc, _) =\n            let label_string =\n              match label with\n              | label when is_optional label || is_labelled label ->\n                get_label label\n              | _ -> \"\"\n            in\n            ( label,\n              match label_string with\n              | \"\" -> Exp.ident ~loc {txt = Lident alias; loc}\n              | label_string ->\n                Exp.apply ~loc\n                  (Exp.ident ~loc {txt = Lident \"##\"; loc})\n                  [\n                    ( nolabel,\n                      Exp.ident ~loc {txt = Lident props.props_name; loc} );\n                    (nolabel, Exp.ident ~loc {txt = Lident label_string; loc});\n                  ] )\n          in\n          let named_type_list = List.fold_left arg_to_type [] named_arg_list in\n          let loc = empty_loc in\n          let external_args =\n            (* translate newtypes to type variables *)\n            List.fold_left\n              (fun args newtype ->\n                List.map\n                  (fun (a, b, c, d, e, maybe_typ) ->\n                    match maybe_typ with\n                    | Some typ ->\n                      (a, b, c, d, e, Some (newtype_to_var newtype.txt typ))\n                    | None -> (a, b, c, d, e, None))\n                  args)\n              named_arg_list_with_key_and_ref newtypes\n          in\n          let external_types =\n            (* translate newtypes to type variables *)\n            List.fold_left\n              (fun args newtype ->\n                List.map\n                  (fun (a, b, typ) -> (a, b, newtype_to_var newtype.txt typ))\n                  args)\n              named_type_list newtypes\n          in\n          let external_decl =\n            make_external_decl fn_name loc external_args external_types\n          in\n          let inner_expression_args =\n            List.map pluck_arg named_arg_list_with_key_and_ref_for_new\n            @\n            if has_unit then\n              [(Nolabel, Exp.construct {loc; txt = Lident \"()\"} None)]\n            else []\n          in\n          let inner_expression =\n            Exp.apply\n              (Exp.ident\n                 {\n                   loc;\n                   txt =\n                     Lident\n                       (match rec_flag with\n                       | Recursive -> internal_fn_name\n                       | Nonrecursive -> fn_name);\n                 })\n              inner_expression_args\n          in\n          let inner_expression_with_ref =\n            match forward_ref with\n            | Some txt ->\n              {\n                inner_expression with\n                pexp_desc =\n                  Pexp_fun\n                    ( nolabel,\n                      None,\n                      {\n                        ppat_desc = Ppat_var {txt; loc = empty_loc};\n                        ppat_loc = empty_loc;\n                        ppat_attributes = [];\n                      },\n                      inner_expression );\n              }\n            | None -> inner_expression\n          in\n          let full_expression =\n            Exp.fun_ nolabel None\n              {\n                ppat_desc =\n                  Ppat_constraint\n                    ( make_props_name ~loc:empty_loc props.props_name,\n                      make_props_type ~loc:empty_loc external_types );\n                ppat_loc = empty_loc;\n                ppat_attributes = [];\n              }\n              inner_expression_with_ref\n          in\n          let full_expression =\n            if !Config.uncurried = Uncurried then\n              full_expression\n              |> Ast_uncurried.uncurried_fun ~loc:full_expression.pexp_loc\n                   ~arity:1\n            else full_expression\n          in\n          let full_expression =\n            match full_module_name with\n            | \"\" -> full_expression\n            | txt ->\n              Exp.let_ Nonrecursive\n                [\n                  Vb.mk ~loc:empty_loc\n                    (Pat.var ~loc:empty_loc {loc = empty_loc; txt})\n                    full_expression;\n                ]\n                (Exp.ident ~loc:empty_loc {loc = empty_loc; txt = Lident txt})\n          in\n          let bindings, new_binding =\n            match rec_flag with\n            | Recursive ->\n              ( [\n                  binding_wrapper\n                    (Exp.let_ ~loc:empty_loc Recursive\n                       [\n                         make_new_binding binding expression internal_fn_name;\n                         Vb.mk\n                           (Pat.var {loc = empty_loc; txt = fn_name})\n                           full_expression;\n                       ]\n                       (Exp.ident {loc = empty_loc; txt = Lident fn_name}));\n                ],\n                None )\n            | Nonrecursive ->\n              ( [{binding with pvb_expr = expression}],\n                Some (binding_wrapper full_expression) )\n          in\n          (Some external_decl, bindings, new_binding)\n        else (None, [binding], None)\n      in\n      let structures_and_binding = List.map map_binding value_bindings in\n      let other_structures (extern, binding, new_binding)\n          (externs, bindings, new_bindings) =\n        let externs =\n          match extern with\n          | Some extern -> extern :: externs\n          | None -> externs\n        in\n        let new_bindings =\n          match new_binding with\n          | Some new_binding -> new_binding :: new_bindings\n          | None -> new_bindings\n        in\n        (externs, binding @ bindings, new_bindings)\n      in\n      let externs, bindings, new_bindings =\n        List.fold_right other_structures structures_and_binding ([], [], [])\n      in\n      externs\n      @ [{pstr_loc; pstr_desc = Pstr_value (rec_flag, bindings)}]\n      @\n      match new_bindings with\n      | [] -> []\n      | new_bindings ->\n        [\n          {pstr_loc = empty_loc; pstr_desc = Pstr_value (rec_flag, new_bindings)};\n        ])\n    | _ -> [item]\n  in\n\n  let transform_signature_item item =\n    match item with\n    | {\n        psig_loc;\n        psig_desc =\n          Psig_value\n            ({pval_name = {txt = fn_name}; pval_attributes; pval_type} as\n             psig_desc);\n      } as psig -> (\n      match List.filter Jsx_common.has_attr pval_attributes with\n      | [] -> [item]\n      | [_] ->\n        let pval_type = Jsx_common.extract_uncurried pval_type in\n        let rec get_prop_types types ({ptyp_loc; ptyp_desc} as full_type) =\n          match ptyp_desc with\n          | Ptyp_arrow (name, type_, ({ptyp_desc = Ptyp_arrow _} as rest))\n            when is_optional name || is_labelled name ->\n            get_prop_types ((name, ptyp_loc, type_) :: types) rest\n          | Ptyp_arrow (Nolabel, _type, rest) -> get_prop_types types rest\n          | Ptyp_arrow (name, type_, return_value)\n            when is_optional name || is_labelled name ->\n            (return_value, (name, return_value.ptyp_loc, type_) :: types)\n          | _ -> (full_type, types)\n        in\n        let inner_type, prop_types = get_prop_types [] pval_type in\n        let named_type_list =\n          List.fold_left arg_to_concrete_type [] prop_types\n        in\n        let pluck_label_and_loc (label, loc, type_) =\n          (label, None, loc, Some type_)\n        in\n        let ret_props_type = make_props_type ~loc:psig_loc named_type_list in\n        let external_props_decl =\n          make_props_external_sig fn_name psig_loc\n            ((optional \"key\", None, psig_loc, Some (key_type psig_loc))\n            :: List.map pluck_label_and_loc prop_types)\n            ret_props_type\n        in\n        (* can't be an arrow because it will defensively uncurry *)\n        let new_external_type =\n          Ptyp_constr\n            ( {loc = psig_loc; txt = Ldot (Lident \"React\", \"componentLike\")},\n              [ret_props_type; inner_type] )\n        in\n        let new_structure =\n          {\n            psig with\n            psig_desc =\n              Psig_value\n                {\n                  psig_desc with\n                  pval_type = {pval_type with ptyp_desc = new_external_type};\n                  pval_attributes = List.filter other_attrs_pure pval_attributes;\n                };\n          }\n        in\n        [external_props_decl; new_structure]\n      | _ ->\n        Jsx_common.raise_error ~loc:psig_loc\n          \"Only one react.component call can exist on a component at one time\")\n    | _ -> [item]\n  in\n\n  let transform_jsx_call mapper call_expression call_arguments attrs =\n    match call_expression.pexp_desc with\n    | Pexp_ident caller -> (\n      match caller with\n      | {txt = Lident \"createElement\"; loc} ->\n        Jsx_common.raise_error ~loc\n          \"JSX: `createElement` should be preceeded by a module name.\"\n      (* Foo.createElement(~prop1=foo, ~prop2=bar, ~children=[], ()) *)\n      | {loc; txt = Ldot (module_path, (\"createElement\" | \"make\"))} -> (\n        match config.Jsx_common.version with\n        | 3 ->\n          transform_uppercase_call3 module_path mapper loc attrs call_expression\n            call_arguments\n        | _ -> Jsx_common.raise_error ~loc \"JSX: the JSX version must be  3\")\n      (* div(~prop1=foo, ~prop2=bar, ~children=[bla], ()) *)\n      (* turn that into\n         ReactDOMRe.createElement(~props=ReactDOMRe.props(~props1=foo, ~props2=bar, ()), [|bla|]) *)\n      | {loc; txt = Lident id} -> (\n        match config.version with\n        | 3 -> transform_lowercase_call3 mapper loc attrs call_arguments id\n        | _ -> Jsx_common.raise_error ~loc \"JSX: the JSX version must be 3\")\n      | {txt = Ldot (_, anything_not_create_element_or_make); loc} ->\n        Jsx_common.raise_error ~loc\n          \"JSX: the JSX attribute should be attached to a \\\n           `YourModuleName.createElement` or `YourModuleName.make` call. We \\\n           saw `%s` instead\"\n          anything_not_create_element_or_make\n      | {txt = Lapply _; loc} ->\n        (* don't think there's ever a case where this is reached *)\n        Jsx_common.raise_error ~loc\n          \"JSX: encountered a weird case while processing the code. Please \\\n           report this!\")\n    | _ ->\n      Jsx_common.raise_error ~loc:call_expression.pexp_loc\n        \"JSX: `createElement` should be preceeded by a simple, direct module \\\n         name.\"\n  in\n\n  let expr mapper expression =\n    match expression with\n    (* Does the function application have the @JSX attribute? *)\n    | {\n     pexp_desc = Pexp_apply (call_expression, call_arguments);\n     pexp_attributes;\n    } -> (\n      let jsx_attribute, non_jsx_attributes =\n        List.partition\n          (fun (attribute, _) -> attribute.txt = \"JSX\")\n          pexp_attributes\n      in\n      match (jsx_attribute, non_jsx_attributes) with\n      (* no JSX attribute *)\n      | [], _ -> default_mapper.expr mapper expression\n      | _, non_jsx_attributes ->\n        transform_jsx_call mapper call_expression call_arguments\n          non_jsx_attributes)\n    (* is it a list with jsx attribute? Reason <>foo</> desugars to [@JSX][foo]*)\n    | {\n        pexp_desc =\n          ( Pexp_construct\n              ({txt = Lident \"::\"; loc}, Some {pexp_desc = Pexp_tuple _})\n          | Pexp_construct ({txt = Lident \"[]\"; loc}, None) );\n        pexp_attributes;\n      } as list_items -> (\n      let jsx_attribute, non_jsx_attributes =\n        List.partition\n          (fun (attribute, _) -> attribute.txt = \"JSX\")\n          pexp_attributes\n      in\n      match (jsx_attribute, non_jsx_attributes) with\n      (* no JSX attribute *)\n      | [], _ -> default_mapper.expr mapper expression\n      | _, non_jsx_attributes ->\n        let loc = {loc with loc_ghost = true} in\n        let fragment =\n          Exp.ident ~loc {loc; txt = Ldot (Lident \"ReasonReact\", \"fragment\")}\n        in\n        let children_expr =\n          transform_children_if_list ~loc ~mapper list_items\n        in\n        let args =\n          [\n            (* \"div\" *)\n            (nolabel, fragment);\n            (* [|moreCreateElementCallsHere|] *)\n            (nolabel, children_expr);\n          ]\n        in\n        Exp.apply\n          ~loc (* throw away the [@JSX] attribute and keep the others, if any *)\n          ~attrs:non_jsx_attributes\n          (* ReactDOMRe.createElement *)\n          (Exp.ident ~loc\n             {loc; txt = Ldot (Lident \"ReactDOMRe\", \"createElement\")})\n          args)\n    (* Delegate to the default mapper, a deep identity traversal *)\n    | e -> default_mapper.expr mapper e\n  in\n\n  let module_binding mapper module_binding =\n    let _ = nested_modules := module_binding.pmb_name.txt :: !nested_modules in\n    let mapped = default_mapper.module_binding mapper module_binding in\n    let () =\n      match !nested_modules with\n      | _ :: rest -> nested_modules := rest\n      | [] -> ()\n    in\n    mapped\n  in\n  (expr, module_binding, transform_signature_item, transform_structure_item)\n"
  },
  {
    "path": "analysis/vendor/res_syntax/res_ast_conversion.ml",
    "content": "let concat_longidents l1 l2 =\n  let parts1 = Longident.flatten l1 in\n  let parts2 = Longident.flatten l2 in\n  match List.concat [parts1; parts2] |> Longident.unflatten with\n  | Some longident -> longident\n  | None -> l2\n\n(* TODO: support nested open's ? *)\nlet rec rewrite_ppat_open longident_open pat =\n  match pat.Parsetree.ppat_desc with\n  | Ppat_array (first :: rest) ->\n    (* Color.[Red, Blue, Green] -> [Color.Red, Blue, Green] *)\n    {\n      pat with\n      ppat_desc = Ppat_array (rewrite_ppat_open longident_open first :: rest);\n    }\n  | Ppat_tuple (first :: rest) ->\n    (* Color.(Red, Blue, Green) -> (Color.Red, Blue, Green) *)\n    {\n      pat with\n      ppat_desc = Ppat_tuple (rewrite_ppat_open longident_open first :: rest);\n    }\n  | Ppat_construct\n      ( ({txt = Longident.Lident \"::\"} as list_constructor),\n        Some ({ppat_desc = Ppat_tuple (pat :: rest)} as element) ) ->\n    (* Color.(list[Red, Blue, Green]) -> list[Color.Red, Blue, Green] *)\n    {\n      pat with\n      ppat_desc =\n        Ppat_construct\n          ( list_constructor,\n            Some\n              {\n                element with\n                ppat_desc =\n                  Ppat_tuple (rewrite_ppat_open longident_open pat :: rest);\n              } );\n    }\n  | Ppat_construct (({txt = constructor} as longident_loc), opt_pattern) ->\n    (* Foo.(Bar(a)) -> Foo.Bar(a) *)\n    {\n      pat with\n      ppat_desc =\n        Ppat_construct\n          ( {\n              longident_loc with\n              txt = concat_longidents longident_open constructor;\n            },\n            opt_pattern );\n    }\n  | Ppat_record ((({txt = lbl} as longident_loc), first_pat) :: rest, flag) ->\n    (* Foo.{x} -> {Foo.x: x} *)\n    let first_row =\n      ( {longident_loc with txt = concat_longidents longident_open lbl},\n        first_pat )\n    in\n    {pat with ppat_desc = Ppat_record (first_row :: rest, flag)}\n  | Ppat_or (pat1, pat2) ->\n    {\n      pat with\n      ppat_desc =\n        Ppat_or\n          ( rewrite_ppat_open longident_open pat1,\n            rewrite_ppat_open longident_open pat2 );\n    }\n  | Ppat_constraint (pattern, typ) ->\n    {\n      pat with\n      ppat_desc = Ppat_constraint (rewrite_ppat_open longident_open pattern, typ);\n    }\n  | Ppat_type ({txt = constructor} as longident_loc) ->\n    {\n      pat with\n      ppat_desc =\n        Ppat_type\n          {\n            longident_loc with\n            txt = concat_longidents longident_open constructor;\n          };\n    }\n  | Ppat_lazy p ->\n    {pat with ppat_desc = Ppat_lazy (rewrite_ppat_open longident_open p)}\n  | Ppat_exception p ->\n    {pat with ppat_desc = Ppat_exception (rewrite_ppat_open longident_open p)}\n  | _ -> pat\n\nlet escape_template_literal s =\n  let len = String.length s in\n  let b = Buffer.create len in\n  let i = ref 0 in\n  while !i < len do\n    let c = (String.get [@doesNotRaise]) s !i in\n    if c = '`' then (\n      Buffer.add_char b '\\\\';\n      Buffer.add_char b '`';\n      incr i)\n    else if c = '$' then\n      if !i + 1 < len then (\n        let c2 = (String.get [@doesNotRaise]) s (!i + 1) in\n        if c2 = '{' then (\n          Buffer.add_char b '\\\\';\n          Buffer.add_char b '$';\n          Buffer.add_char b '{')\n        else (\n          Buffer.add_char b c;\n          Buffer.add_char b c2);\n        i := !i + 2)\n      else (\n        Buffer.add_char b c;\n        incr i)\n    else if c = '\\\\' then (\n      Buffer.add_char b '\\\\';\n      Buffer.add_char b '\\\\';\n      incr i)\n    else (\n      Buffer.add_char b c;\n      incr i)\n  done;\n  Buffer.contents b\n\nlet escape_string_contents s =\n  let len = String.length s in\n  let b = Buffer.create len in\n\n  let i = ref 0 in\n\n  while !i < len do\n    let c = String.unsafe_get s !i in\n    if c = '\\\\' then (\n      incr i;\n      Buffer.add_char b c;\n      let c = String.unsafe_get s !i in\n      if !i < len then\n        let () = Buffer.add_char b c in\n        incr i\n      else ())\n    else if c = '\"' then (\n      Buffer.add_char b '\\\\';\n      Buffer.add_char b c;\n      incr i)\n    else (\n      Buffer.add_char b c;\n      incr i)\n  done;\n  Buffer.contents b\n\nlet looks_like_recursive_type_declaration type_declaration =\n  let open Parsetree in\n  let name = type_declaration.ptype_name.txt in\n  let rec check_kind kind =\n    match kind with\n    | Ptype_abstract | Ptype_open -> false\n    | Ptype_variant constructor_declarations ->\n      List.exists check_constructor_declaration constructor_declarations\n    | Ptype_record label_declarations ->\n      List.exists check_label_declaration label_declarations\n  and check_constructor_declaration constr_decl =\n    check_constructor_arguments constr_decl.pcd_args\n    ||\n    match constr_decl.pcd_res with\n    | Some typexpr -> check_typ_expr typexpr\n    | None -> false\n  and check_label_declaration label_declaration =\n    check_typ_expr label_declaration.pld_type\n  and check_constructor_arguments constr_arg =\n    match constr_arg with\n    | Pcstr_tuple types -> List.exists check_typ_expr types\n    | Pcstr_record label_declarations ->\n      List.exists check_label_declaration label_declarations\n  and check_typ_expr typ =\n    match typ.ptyp_desc with\n    | Ptyp_any -> false\n    | Ptyp_var _ -> false\n    | Ptyp_object (fields, _) -> List.exists check_object_field fields\n    | Ptyp_class _ -> false\n    | Ptyp_package _ -> false\n    | Ptyp_extension _ -> false\n    | Ptyp_arrow (_lbl, typ1, typ2) ->\n      check_typ_expr typ1 || check_typ_expr typ2\n    | Ptyp_tuple types -> List.exists check_typ_expr types\n    | Ptyp_constr ({txt = longident}, types) ->\n      (match longident with\n      | Lident ident -> ident = name\n      | _ -> false)\n      || List.exists check_typ_expr types\n    | Ptyp_alias (typ, _) -> check_typ_expr typ\n    | Ptyp_variant (row_fields, _, _) -> List.exists check_row_fields row_fields\n    | Ptyp_poly (_, typ) -> check_typ_expr typ\n  and check_object_field field =\n    match field with\n    | Otag (_label, _attrs, typ) -> check_typ_expr typ\n    | Oinherit typ -> check_typ_expr typ\n  and check_row_fields row_field =\n    match row_field with\n    | Rtag (_, _, _, types) -> List.exists check_typ_expr types\n    | Rinherit typexpr -> check_typ_expr typexpr\n  and check_manifest manifest =\n    match manifest with\n    | Some typ -> check_typ_expr typ\n    | None -> false\n  in\n  check_kind type_declaration.ptype_kind\n  || check_manifest type_declaration.ptype_manifest\n\nlet filter_reason_raw_literal attrs =\n  List.filter\n    (fun attr ->\n      match attr with\n      | {Location.txt = \"reason.raw_literal\"}, _ -> false\n      | _ -> true)\n    attrs\n\nlet string_literal_mapper string_data =\n  let is_same_location l1 l2 =\n    let open Location in\n    l1.loc_start.pos_cnum == l2.loc_start.pos_cnum\n  in\n  let remaining_string_data = string_data in\n  let open Ast_mapper in\n  {\n    default_mapper with\n    expr =\n      (fun mapper expr ->\n        match expr.pexp_desc with\n        | Pexp_constant (Pconst_string (_txt, None)) -> (\n          match\n            List.find_opt\n              (fun (_stringData, string_loc) ->\n                is_same_location string_loc expr.pexp_loc)\n              remaining_string_data\n          with\n          | Some (string_data, _) ->\n            let string_data =\n              let attr =\n                List.find_opt\n                  (fun attr ->\n                    match attr with\n                    | {Location.txt = \"reason.raw_literal\"}, _ -> true\n                    | _ -> false)\n                  expr.pexp_attributes\n              in\n              match attr with\n              | Some\n                  ( _,\n                    PStr\n                      [\n                        {\n                          pstr_desc =\n                            Pstr_eval\n                              ( {\n                                  pexp_desc =\n                                    Pexp_constant (Pconst_string (raw, _));\n                                },\n                                _ );\n                        };\n                      ] ) ->\n                raw\n              | _ ->\n                (String.sub [@doesNotRaise]) string_data 1\n                  (String.length string_data - 2)\n            in\n            {\n              expr with\n              pexp_attributes = filter_reason_raw_literal expr.pexp_attributes;\n              pexp_desc = Pexp_constant (Pconst_string (string_data, None));\n            }\n          | None -> default_mapper.expr mapper expr)\n        | _ -> default_mapper.expr mapper expr);\n  }\n\nlet has_uncurried_attribute attrs =\n  List.exists\n    (fun attr ->\n      match attr with\n      | {Asttypes.txt = \"bs\"}, Parsetree.PStr [] -> true\n      | _ -> false)\n    attrs\n\nlet template_literal_attr = (Location.mknoloc \"res.template\", Parsetree.PStr [])\n\nlet normalize =\n  let open Ast_mapper in\n  {\n    default_mapper with\n    attributes =\n      (fun mapper attrs ->\n        attrs\n        |> List.filter (fun attr ->\n               match attr with\n               | ( {\n                     Location.txt =\n                       ( \"reason.preserve_braces\" | \"explicit_arity\"\n                       | \"implicity_arity\" );\n                   },\n                   _ ) ->\n                 false\n               | _ -> true)\n        |> default_mapper.attributes mapper);\n    pat =\n      (fun mapper p ->\n        match p.ppat_desc with\n        | Ppat_open ({txt = longident_open}, pattern) ->\n          let p = rewrite_ppat_open longident_open pattern in\n          default_mapper.pat mapper p\n        | Ppat_constant (Pconst_string (txt, tag)) ->\n          let new_tag =\n            match tag with\n            (* transform {|abc|} into {js|abc|js}, because `template string` is interpreted as {js||js} *)\n            | Some \"\" -> Some \"js\"\n            | tag -> tag\n          in\n          let s =\n            Parsetree.Pconst_string (escape_template_literal txt, new_tag)\n          in\n          {\n            p with\n            ppat_attributes =\n              template_literal_attr\n              :: mapper.attributes mapper p.ppat_attributes;\n            ppat_desc = Ppat_constant s;\n          }\n        | _ -> default_mapper.pat mapper p);\n    typ =\n      (fun mapper typ ->\n        match typ.ptyp_desc with\n        | Ptyp_constr\n            ({txt = Longident.Ldot (Longident.Lident \"Js\", \"t\")}, [arg]) ->\n          (* Js.t({\"a\": b}) -> {\"a\": b}\n             Since compiler >9.0.1 objects don't need Js.t wrapping anymore *)\n          mapper.typ mapper arg\n        | _ -> default_mapper.typ mapper typ);\n    expr =\n      (fun mapper expr ->\n        match expr.pexp_desc with\n        | Pexp_constant (Pconst_string (txt, None)) ->\n          let raw = escape_string_contents txt in\n          let s = Parsetree.Pconst_string (raw, None) in\n          {expr with pexp_desc = Pexp_constant s}\n        | Pexp_constant (Pconst_string (txt, tag)) ->\n          let new_tag =\n            match tag with\n            (* transform {|abc|} into {js|abc|js}, we want to preserve unicode by default *)\n            | Some \"\" -> Some \"js\"\n            | tag -> tag\n          in\n          let s =\n            Parsetree.Pconst_string (escape_template_literal txt, new_tag)\n          in\n          {\n            expr with\n            pexp_attributes =\n              template_literal_attr\n              :: mapper.attributes mapper expr.pexp_attributes;\n            pexp_desc = Pexp_constant s;\n          }\n        | Pexp_apply\n            ( call_expr,\n              [\n                ( Nolabel,\n                  ({\n                     pexp_desc =\n                       Pexp_construct ({txt = Longident.Lident \"()\"}, None);\n                     pexp_attributes = [];\n                   } as unit_expr) );\n              ] )\n          when has_uncurried_attribute expr.pexp_attributes ->\n          {\n            expr with\n            pexp_attributes = mapper.attributes mapper expr.pexp_attributes;\n            pexp_desc =\n              Pexp_apply\n                ( call_expr,\n                  [\n                    ( Nolabel,\n                      {\n                        unit_expr with\n                        pexp_loc = {unit_expr.pexp_loc with loc_ghost = true};\n                      } );\n                  ] );\n          }\n        | Pexp_function cases ->\n          let loc =\n            match (cases, List.rev cases) with\n            | first :: _, last :: _ ->\n              {\n                first.pc_lhs.ppat_loc with\n                loc_end = last.pc_rhs.pexp_loc.loc_end;\n              }\n            | _ -> Location.none\n          in\n          let var =\n            {\n              Parsetree.ppat_loc = Location.none;\n              ppat_attributes = [];\n              ppat_desc = Ppat_var (Location.mknoloc \"x\");\n            }\n          in\n          {\n            pexp_loc = loc;\n            pexp_attributes = [];\n            pexp_desc =\n              Pexp_fun\n                ( Asttypes.Nolabel,\n                  None,\n                  var,\n                  {\n                    pexp_loc = loc;\n                    pexp_attributes = [];\n                    pexp_desc =\n                      Pexp_match\n                        ( {\n                            pexp_loc = Location.none;\n                            pexp_attributes = [];\n                            pexp_desc =\n                              Pexp_ident\n                                (Location.mknoloc (Longident.Lident \"x\"));\n                          },\n                          mapper.cases mapper cases );\n                  } );\n          }\n        | Pexp_apply\n            ( {pexp_desc = Pexp_ident {txt = Longident.Lident \"!\"}},\n              [(Asttypes.Nolabel, operand)] ) ->\n          (* turn `!foo` into `foo.contents` *)\n          {\n            pexp_loc = expr.pexp_loc;\n            pexp_attributes = expr.pexp_attributes;\n            pexp_desc =\n              Pexp_field\n                ( mapper.expr mapper operand,\n                  Location.mknoloc (Longident.Lident \"contents\") );\n          }\n        | Pexp_apply\n            ( {pexp_desc = Pexp_ident {txt = Longident.Lident \"##\"}},\n              [\n                (Asttypes.Nolabel, lhs);\n                ( Nolabel,\n                  {\n                    pexp_desc =\n                      ( Pexp_constant (Pconst_string (txt, None))\n                      | Pexp_ident {txt = Longident.Lident txt} );\n                    pexp_loc = label_loc;\n                  } );\n              ] ) ->\n          let label = Location.mkloc txt label_loc in\n          {\n            pexp_loc = expr.pexp_loc;\n            pexp_attributes = expr.pexp_attributes;\n            pexp_desc = Pexp_send (mapper.expr mapper lhs, label);\n          }\n        | Pexp_match\n            ( condition,\n              [\n                {\n                  pc_lhs =\n                    {\n                      ppat_desc =\n                        Ppat_construct ({txt = Longident.Lident \"true\"}, None);\n                    };\n                  pc_rhs = then_expr;\n                };\n                {\n                  pc_lhs =\n                    {\n                      ppat_desc =\n                        Ppat_construct ({txt = Longident.Lident \"false\"}, None);\n                    };\n                  pc_rhs = else_expr;\n                };\n              ] ) ->\n          let ternary_marker =\n            (Location.mknoloc \"res.ternary\", Parsetree.PStr [])\n          in\n          {\n            Parsetree.pexp_loc = expr.pexp_loc;\n            pexp_desc =\n              Pexp_ifthenelse\n                ( mapper.expr mapper condition,\n                  mapper.expr mapper then_expr,\n                  Some (mapper.expr mapper else_expr) );\n            pexp_attributes = ternary_marker :: expr.pexp_attributes;\n          }\n        | _ -> default_mapper.expr mapper expr);\n    structure_item =\n      (fun mapper structure_item ->\n        match structure_item.pstr_desc with\n        (* heuristic: if we have multiple type declarations, mark them recursive *)\n        | Pstr_type ((Recursive as rec_flag), type_declarations) ->\n          let flag =\n            match type_declarations with\n            | [td] ->\n              if looks_like_recursive_type_declaration td then\n                Asttypes.Recursive\n              else Asttypes.Nonrecursive\n            | _ -> rec_flag\n          in\n          {\n            structure_item with\n            pstr_desc =\n              Pstr_type\n                ( flag,\n                  List.map\n                    (fun type_declaration ->\n                      default_mapper.type_declaration mapper type_declaration)\n                    type_declarations );\n          }\n        | _ -> default_mapper.structure_item mapper structure_item);\n    signature_item =\n      (fun mapper signature_item ->\n        match signature_item.psig_desc with\n        (* heuristic: if we have multiple type declarations, mark them recursive *)\n        | Psig_type ((Recursive as rec_flag), type_declarations) ->\n          let flag =\n            match type_declarations with\n            | [td] ->\n              if looks_like_recursive_type_declaration td then\n                Asttypes.Recursive\n              else Asttypes.Nonrecursive\n            | _ -> rec_flag\n          in\n          {\n            signature_item with\n            psig_desc =\n              Psig_type\n                ( flag,\n                  List.map\n                    (fun type_declaration ->\n                      default_mapper.type_declaration mapper type_declaration)\n                    type_declarations );\n          }\n        | _ -> default_mapper.signature_item mapper signature_item);\n    value_binding =\n      (fun mapper vb ->\n        match vb with\n        | {\n         pvb_pat = {ppat_desc = Ppat_var _} as pat;\n         pvb_expr =\n           {pexp_loc = expr_loc; pexp_desc = Pexp_constraint (expr, typ)};\n        }\n          when expr_loc.loc_ghost ->\n          (* let t: t = (expr : t) -> let t: t = expr *)\n          let typ = default_mapper.typ mapper typ in\n          let pat = default_mapper.pat mapper pat in\n          let expr = mapper.expr mapper expr in\n          let new_pattern =\n            {\n              Parsetree.ppat_loc =\n                {pat.ppat_loc with loc_end = typ.ptyp_loc.loc_end};\n              ppat_attributes = [];\n              ppat_desc = Ppat_constraint (pat, typ);\n            }\n          in\n          {\n            vb with\n            pvb_pat = new_pattern;\n            pvb_expr = expr;\n            pvb_attributes = default_mapper.attributes mapper vb.pvb_attributes;\n          }\n        | {\n         pvb_pat =\n           {ppat_desc = Ppat_constraint (pat, {ptyp_desc = Ptyp_poly ([], _)})};\n         pvb_expr =\n           {pexp_loc = expr_loc; pexp_desc = Pexp_constraint (expr, typ)};\n        }\n          when expr_loc.loc_ghost ->\n          (* let t: . t = (expr : t) -> let t: t = expr *)\n          let typ = default_mapper.typ mapper typ in\n          let pat = default_mapper.pat mapper pat in\n          let expr = mapper.expr mapper expr in\n          let new_pattern =\n            {\n              Parsetree.ppat_loc =\n                {pat.ppat_loc with loc_end = typ.ptyp_loc.loc_end};\n              ppat_attributes = [];\n              ppat_desc = Ppat_constraint (pat, typ);\n            }\n          in\n          {\n            vb with\n            pvb_pat = new_pattern;\n            pvb_expr = expr;\n            pvb_attributes = default_mapper.attributes mapper vb.pvb_attributes;\n          }\n        | _ -> default_mapper.value_binding mapper vb);\n  }\n\nlet structure s = normalize.Ast_mapper.structure normalize s\nlet signature s = normalize.Ast_mapper.signature normalize s\n\nlet replace_string_literal_structure string_data structure =\n  let mapper = string_literal_mapper string_data in\n  mapper.Ast_mapper.structure mapper structure\n\nlet replace_string_literal_signature string_data signature =\n  let mapper = string_literal_mapper string_data in\n  mapper.Ast_mapper.signature mapper signature\n"
  },
  {
    "path": "analysis/vendor/res_syntax/res_ast_conversion.mli",
    "content": "(* The purpose of this module is to convert a parsetree coming from the reason\n * or ocaml parser, into something consumable by the rescript printer. *)\n\n(* Ocaml/Reason parser interprets string literals: i.e. escape sequences and unicode.\n * For printing purposes you want to preserve the original string.\n * Example: \"😎\" is interpreted as \"\\240\\159\\152\\142\"\n * The purpose of this routine is to place the original string back in\n * the parsetree for printing purposes. Unicode and escape sequences\n * shouldn't be mangled when *)\nval replace_string_literal_structure :\n  (string * Location.t) list -> Parsetree.structure -> Parsetree.structure\nval replace_string_literal_signature :\n  (string * Location.t) list -> Parsetree.signature -> Parsetree.signature\n\n(* transform parts of the parsetree into a suitable parsetree suitable\n * for printing. Example: convert reason ternaries into rescript ternaries *)\nval structure : Parsetree.structure -> Parsetree.structure\nval signature : Parsetree.signature -> Parsetree.signature\n"
  },
  {
    "path": "analysis/vendor/res_syntax/res_ast_debugger.ml",
    "content": "module Doc = Res_doc\nmodule CommentTable = Res_comments_table\n\nlet print_engine =\n  Res_driver.\n    {\n      print_implementation =\n        (fun ~width:_ ~filename:_ ~comments:_ structure ->\n          Printast.implementation Format.std_formatter structure);\n      print_interface =\n        (fun ~width:_ ~filename:_ ~comments:_ signature ->\n          Printast.interface Format.std_formatter signature);\n    }\n\nmodule Sexp : sig\n  type t\n\n  val atom : string -> t\n  val list : t list -> t\n  val to_string : t -> string\nend = struct\n  type t = Atom of string | List of t list\n\n  let atom s = Atom s\n  let list l = List l\n\n  let rec to_doc t =\n    match t with\n    | Atom s -> Doc.text s\n    | List [] -> Doc.text \"()\"\n    | List [sexpr] -> Doc.concat [Doc.lparen; to_doc sexpr; Doc.rparen]\n    | List (hd :: tail) ->\n      Doc.group\n        (Doc.concat\n           [\n             Doc.lparen;\n             to_doc hd;\n             Doc.indent\n               (Doc.concat\n                  [Doc.line; Doc.join ~sep:Doc.line (List.map to_doc tail)]);\n             Doc.rparen;\n           ])\n\n  let to_string sexpr =\n    let doc = to_doc sexpr in\n    Doc.to_string ~width:80 doc\nend\n\nmodule SexpAst = struct\n  open Parsetree\n\n  let map_empty ~f items =\n    match items with\n    | [] -> [Sexp.list []]\n    | items -> List.map f items\n\n  let string txt =\n    Sexp.atom (\"\\\"\" ^ Ext_ident.unwrap_uppercase_exotic txt ^ \"\\\"\")\n\n  let char c = Sexp.atom (\"'\" ^ Char.escaped c ^ \"'\")\n\n  let opt_char oc =\n    match oc with\n    | None -> Sexp.atom \"None\"\n    | Some c -> Sexp.list [Sexp.atom \"Some\"; char c]\n\n  let longident l =\n    let rec loop l =\n      match l with\n      | Longident.Lident ident -> Sexp.list [Sexp.atom \"Lident\"; string ident]\n      | Longident.Ldot (lident, txt) ->\n        Sexp.list [Sexp.atom \"Ldot\"; loop lident; string txt]\n      | Longident.Lapply (l1, l2) ->\n        Sexp.list [Sexp.atom \"Lapply\"; loop l1; loop l2]\n    in\n    Sexp.list [Sexp.atom \"longident\"; loop l]\n\n  let closed_flag flag =\n    match flag with\n    | Asttypes.Closed -> Sexp.atom \"Closed\"\n    | Open -> Sexp.atom \"Open\"\n\n  let direction_flag flag =\n    match flag with\n    | Asttypes.Upto -> Sexp.atom \"Upto\"\n    | Downto -> Sexp.atom \"Downto\"\n\n  let rec_flag flag =\n    match flag with\n    | Asttypes.Recursive -> Sexp.atom \"Recursive\"\n    | Nonrecursive -> Sexp.atom \"Nonrecursive\"\n\n  let override_flag flag =\n    match flag with\n    | Asttypes.Override -> Sexp.atom \"Override\"\n    | Fresh -> Sexp.atom \"Fresh\"\n\n  let private_flag flag =\n    match flag with\n    | Asttypes.Public -> Sexp.atom \"Public\"\n    | Private -> Sexp.atom \"Private\"\n\n  let mutable_flag flag =\n    match flag with\n    | Asttypes.Immutable -> Sexp.atom \"Immutable\"\n    | Mutable -> Sexp.atom \"Mutable\"\n\n  let variance v =\n    match v with\n    | Asttypes.Covariant -> Sexp.atom \"Covariant\"\n    | Contravariant -> Sexp.atom \"Contravariant\"\n    | Invariant -> Sexp.atom \"Invariant\"\n\n  let arg_label lbl =\n    match lbl with\n    | Asttypes.Nolabel -> Sexp.atom \"Nolabel\"\n    | Labelled txt -> Sexp.list [Sexp.atom \"Labelled\"; string txt]\n    | Optional txt -> Sexp.list [Sexp.atom \"Optional\"; string txt]\n\n  let constant c =\n    let sexpr =\n      match c with\n      | Pconst_integer (txt, tag) ->\n        Sexp.list [Sexp.atom \"Pconst_integer\"; string txt; opt_char tag]\n      | Pconst_char _ -> Sexp.list [Sexp.atom \"Pconst_char\"]\n      | Pconst_string (_, Some \"INTERNAL_RES_CHAR_CONTENTS\") ->\n        Sexp.list [Sexp.atom \"Pconst_char\"]\n      | Pconst_string (txt, tag) ->\n        Sexp.list\n          [\n            Sexp.atom \"Pconst_string\";\n            string txt;\n            (match tag with\n            | Some txt -> Sexp.list [Sexp.atom \"Some\"; string txt]\n            | None -> Sexp.atom \"None\");\n          ]\n      | Pconst_float (txt, tag) ->\n        Sexp.list [Sexp.atom \"Pconst_float\"; string txt; opt_char tag]\n    in\n    Sexp.list [Sexp.atom \"constant\"; sexpr]\n\n  let rec structure s =\n    Sexp.list (Sexp.atom \"structure\" :: List.map structure_item s)\n\n  and structure_item si =\n    let desc =\n      match si.pstr_desc with\n      | Pstr_eval (expr, attrs) ->\n        Sexp.list [Sexp.atom \"Pstr_eval\"; expression expr; attributes attrs]\n      | Pstr_value (flag, vbs) ->\n        Sexp.list\n          [\n            Sexp.atom \"Pstr_value\";\n            rec_flag flag;\n            Sexp.list (map_empty ~f:value_binding vbs);\n          ]\n      | Pstr_primitive vd ->\n        Sexp.list [Sexp.atom \"Pstr_primitive\"; value_description vd]\n      | Pstr_type (flag, tds) ->\n        Sexp.list\n          [\n            Sexp.atom \"Pstr_type\";\n            rec_flag flag;\n            Sexp.list (map_empty ~f:type_declaration tds);\n          ]\n      | Pstr_typext typext ->\n        Sexp.list [Sexp.atom \"Pstr_type\"; type_extension typext]\n      | Pstr_exception ec ->\n        Sexp.list [Sexp.atom \"Pstr_exception\"; extension_constructor ec]\n      | Pstr_module mb -> Sexp.list [Sexp.atom \"Pstr_module\"; module_binding mb]\n      | Pstr_recmodule mbs ->\n        Sexp.list\n          [\n            Sexp.atom \"Pstr_recmodule\";\n            Sexp.list (map_empty ~f:module_binding mbs);\n          ]\n      | Pstr_modtype mod_typ_decl ->\n        Sexp.list\n          [Sexp.atom \"Pstr_modtype\"; module_type_declaration mod_typ_decl]\n      | Pstr_open open_desc ->\n        Sexp.list [Sexp.atom \"Pstr_open\"; open_description open_desc]\n      | Pstr_class _ -> Sexp.atom \"Pstr_class\"\n      | Pstr_class_type _ -> Sexp.atom \"Pstr_class_type\"\n      | Pstr_include id ->\n        Sexp.list [Sexp.atom \"Pstr_include\"; include_declaration id]\n      | Pstr_attribute attr ->\n        Sexp.list [Sexp.atom \"Pstr_attribute\"; attribute attr]\n      | Pstr_extension (ext, attrs) ->\n        Sexp.list [Sexp.atom \"Pstr_extension\"; extension ext; attributes attrs]\n    in\n    Sexp.list [Sexp.atom \"structure_item\"; desc]\n\n  and include_declaration id =\n    Sexp.list\n      [\n        Sexp.atom \"include_declaration\";\n        module_expression id.pincl_mod;\n        attributes id.pincl_attributes;\n      ]\n\n  and open_description od =\n    Sexp.list\n      [\n        Sexp.atom \"open_description\";\n        longident od.popen_lid.Asttypes.txt;\n        attributes od.popen_attributes;\n      ]\n\n  and module_type_declaration mtd =\n    Sexp.list\n      [\n        Sexp.atom \"module_type_declaration\";\n        string mtd.pmtd_name.Asttypes.txt;\n        (match mtd.pmtd_type with\n        | None -> Sexp.atom \"None\"\n        | Some mod_type -> Sexp.list [Sexp.atom \"Some\"; module_type mod_type]);\n        attributes mtd.pmtd_attributes;\n      ]\n\n  and module_binding mb =\n    Sexp.list\n      [\n        Sexp.atom \"module_binding\";\n        string mb.pmb_name.Asttypes.txt;\n        module_expression mb.pmb_expr;\n        attributes mb.pmb_attributes;\n      ]\n\n  and module_expression me =\n    let desc =\n      match me.pmod_desc with\n      | Pmod_ident mod_name ->\n        Sexp.list [Sexp.atom \"Pmod_ident\"; longident mod_name.Asttypes.txt]\n      | Pmod_structure s -> Sexp.list [Sexp.atom \"Pmod_structure\"; structure s]\n      | Pmod_functor (lbl, opt_mod_type, mod_expr) ->\n        Sexp.list\n          [\n            Sexp.atom \"Pmod_functor\";\n            string lbl.Asttypes.txt;\n            (match opt_mod_type with\n            | None -> Sexp.atom \"None\"\n            | Some mod_type ->\n              Sexp.list [Sexp.atom \"Some\"; module_type mod_type]);\n            module_expression mod_expr;\n          ]\n      | Pmod_apply (call_mod_expr, mod_expr_arg) ->\n        Sexp.list\n          [\n            Sexp.atom \"Pmod_apply\";\n            module_expression call_mod_expr;\n            module_expression mod_expr_arg;\n          ]\n      | Pmod_constraint (mod_expr, mod_type) ->\n        Sexp.list\n          [\n            Sexp.atom \"Pmod_constraint\";\n            module_expression mod_expr;\n            module_type mod_type;\n          ]\n      | Pmod_unpack expr -> Sexp.list [Sexp.atom \"Pmod_unpack\"; expression expr]\n      | Pmod_extension ext ->\n        Sexp.list [Sexp.atom \"Pmod_extension\"; extension ext]\n    in\n    Sexp.list [Sexp.atom \"module_expr\"; desc; attributes me.pmod_attributes]\n\n  and module_type mt =\n    let desc =\n      match mt.pmty_desc with\n      | Pmty_ident longident_loc ->\n        Sexp.list [Sexp.atom \"Pmty_ident\"; longident longident_loc.Asttypes.txt]\n      | Pmty_signature s -> Sexp.list [Sexp.atom \"Pmty_signature\"; signature s]\n      | Pmty_functor (lbl, opt_mod_type, mod_type) ->\n        Sexp.list\n          [\n            Sexp.atom \"Pmty_functor\";\n            string lbl.Asttypes.txt;\n            (match opt_mod_type with\n            | None -> Sexp.atom \"None\"\n            | Some mod_type ->\n              Sexp.list [Sexp.atom \"Some\"; module_type mod_type]);\n            module_type mod_type;\n          ]\n      | Pmty_alias longident_loc ->\n        Sexp.list [Sexp.atom \"Pmty_alias\"; longident longident_loc.Asttypes.txt]\n      | Pmty_extension ext ->\n        Sexp.list [Sexp.atom \"Pmty_extension\"; extension ext]\n      | Pmty_typeof mod_expr ->\n        Sexp.list [Sexp.atom \"Pmty_typeof\"; module_expression mod_expr]\n      | Pmty_with (mod_type, with_constraints) ->\n        Sexp.list\n          [\n            Sexp.atom \"Pmty_with\";\n            module_type mod_type;\n            Sexp.list (map_empty ~f:with_constraint with_constraints);\n          ]\n    in\n    Sexp.list [Sexp.atom \"module_type\"; desc; attributes mt.pmty_attributes]\n\n  and with_constraint wc =\n    match wc with\n    | Pwith_type (longident_loc, td) ->\n      Sexp.list\n        [\n          Sexp.atom \"Pmty_with\";\n          longident longident_loc.Asttypes.txt;\n          type_declaration td;\n        ]\n    | Pwith_module (l1, l2) ->\n      Sexp.list\n        [\n          Sexp.atom \"Pwith_module\";\n          longident l1.Asttypes.txt;\n          longident l2.Asttypes.txt;\n        ]\n    | Pwith_typesubst (longident_loc, td) ->\n      Sexp.list\n        [\n          Sexp.atom \"Pwith_typesubst\";\n          longident longident_loc.Asttypes.txt;\n          type_declaration td;\n        ]\n    | Pwith_modsubst (l1, l2) ->\n      Sexp.list\n        [\n          Sexp.atom \"Pwith_modsubst\";\n          longident l1.Asttypes.txt;\n          longident l2.Asttypes.txt;\n        ]\n\n  and signature s =\n    Sexp.list (Sexp.atom \"signature\" :: List.map signature_item s)\n\n  and signature_item si =\n    let descr =\n      match si.psig_desc with\n      | Psig_value vd ->\n        Sexp.list [Sexp.atom \"Psig_value\"; value_description vd]\n      | Psig_type (flag, type_declarations) ->\n        Sexp.list\n          [\n            Sexp.atom \"Psig_type\";\n            rec_flag flag;\n            Sexp.list (map_empty ~f:type_declaration type_declarations);\n          ]\n      | Psig_typext typ_ext ->\n        Sexp.list [Sexp.atom \"Psig_typext\"; type_extension typ_ext]\n      | Psig_exception ext_constr ->\n        Sexp.list [Sexp.atom \"Psig_exception\"; extension_constructor ext_constr]\n      | Psig_module mod_decl ->\n        Sexp.list [Sexp.atom \"Psig_module\"; module_declaration mod_decl]\n      | Psig_recmodule mod_decls ->\n        Sexp.list\n          [\n            Sexp.atom \"Psig_recmodule\";\n            Sexp.list (map_empty ~f:module_declaration mod_decls);\n          ]\n      | Psig_modtype mod_typ_decl ->\n        Sexp.list\n          [Sexp.atom \"Psig_modtype\"; module_type_declaration mod_typ_decl]\n      | Psig_open open_desc ->\n        Sexp.list [Sexp.atom \"Psig_open\"; open_description open_desc]\n      | Psig_include incl_decl ->\n        Sexp.list [Sexp.atom \"Psig_include\"; include_description incl_decl]\n      | Psig_class _ -> Sexp.list [Sexp.atom \"Psig_class\"]\n      | Psig_class_type _ -> Sexp.list [Sexp.atom \"Psig_class_type\"]\n      | Psig_attribute attr ->\n        Sexp.list [Sexp.atom \"Psig_attribute\"; attribute attr]\n      | Psig_extension (ext, attrs) ->\n        Sexp.list [Sexp.atom \"Psig_extension\"; extension ext; attributes attrs]\n    in\n    Sexp.list [Sexp.atom \"signature_item\"; descr]\n\n  and include_description id =\n    Sexp.list\n      [\n        Sexp.atom \"include_description\";\n        module_type id.pincl_mod;\n        attributes id.pincl_attributes;\n      ]\n\n  and module_declaration md =\n    Sexp.list\n      [\n        Sexp.atom \"module_declaration\";\n        string md.pmd_name.Asttypes.txt;\n        module_type md.pmd_type;\n        attributes md.pmd_attributes;\n      ]\n\n  and value_binding vb =\n    Sexp.list\n      [\n        Sexp.atom \"value_binding\";\n        pattern vb.pvb_pat;\n        expression vb.pvb_expr;\n        attributes vb.pvb_attributes;\n      ]\n\n  and value_description vd =\n    Sexp.list\n      [\n        Sexp.atom \"value_description\";\n        string vd.pval_name.Asttypes.txt;\n        core_type vd.pval_type;\n        Sexp.list (map_empty ~f:string vd.pval_prim);\n        attributes vd.pval_attributes;\n      ]\n\n  and type_declaration td =\n    Sexp.list\n      [\n        Sexp.atom \"type_declaration\";\n        string td.ptype_name.Asttypes.txt;\n        Sexp.list\n          [\n            Sexp.atom \"ptype_params\";\n            Sexp.list\n              (map_empty\n                 ~f:(fun (typexpr, var) ->\n                   Sexp.list [core_type typexpr; variance var])\n                 td.ptype_params);\n          ];\n        Sexp.list\n          [\n            Sexp.atom \"ptype_cstrs\";\n            Sexp.list\n              (map_empty\n                 ~f:(fun (typ1, typ2, _loc) ->\n                   Sexp.list [core_type typ1; core_type typ2])\n                 td.ptype_cstrs);\n          ];\n        Sexp.list [Sexp.atom \"ptype_kind\"; type_kind td.ptype_kind];\n        Sexp.list\n          [\n            Sexp.atom \"ptype_manifest\";\n            (match td.ptype_manifest with\n            | None -> Sexp.atom \"None\"\n            | Some typ -> Sexp.list [Sexp.atom \"Some\"; core_type typ]);\n          ];\n        Sexp.list [Sexp.atom \"ptype_private\"; private_flag td.ptype_private];\n        attributes td.ptype_attributes;\n      ]\n\n  and extension_constructor ec =\n    Sexp.list\n      [\n        Sexp.atom \"extension_constructor\";\n        string ec.pext_name.Asttypes.txt;\n        extension_constructor_kind ec.pext_kind;\n        attributes ec.pext_attributes;\n      ]\n\n  and extension_constructor_kind kind =\n    match kind with\n    | Pext_decl (args, opt_typ_expr) ->\n      Sexp.list\n        [\n          Sexp.atom \"Pext_decl\";\n          constructor_arguments args;\n          (match opt_typ_expr with\n          | None -> Sexp.atom \"None\"\n          | Some typ -> Sexp.list [Sexp.atom \"Some\"; core_type typ]);\n        ]\n    | Pext_rebind longident_loc ->\n      Sexp.list [Sexp.atom \"Pext_rebind\"; longident longident_loc.Asttypes.txt]\n\n  and type_extension te =\n    Sexp.list\n      [\n        Sexp.atom \"type_extension\";\n        Sexp.list\n          [Sexp.atom \"ptyext_path\"; longident te.ptyext_path.Asttypes.txt];\n        Sexp.list\n          [\n            Sexp.atom \"ptyext_parms\";\n            Sexp.list\n              (map_empty\n                 ~f:(fun (typexpr, var) ->\n                   Sexp.list [core_type typexpr; variance var])\n                 te.ptyext_params);\n          ];\n        Sexp.list\n          [\n            Sexp.atom \"ptyext_constructors\";\n            Sexp.list\n              (map_empty ~f:extension_constructor te.ptyext_constructors);\n          ];\n        Sexp.list [Sexp.atom \"ptyext_private\"; private_flag te.ptyext_private];\n        attributes te.ptyext_attributes;\n      ]\n\n  and type_kind kind =\n    match kind with\n    | Ptype_abstract -> Sexp.atom \"Ptype_abstract\"\n    | Ptype_variant constr_decls ->\n      Sexp.list\n        [\n          Sexp.atom \"Ptype_variant\";\n          Sexp.list (map_empty ~f:constructor_declaration constr_decls);\n        ]\n    | Ptype_record lbl_decls ->\n      Sexp.list\n        [\n          Sexp.atom \"Ptype_record\";\n          Sexp.list (map_empty ~f:label_declaration lbl_decls);\n        ]\n    | Ptype_open -> Sexp.atom \"Ptype_open\"\n\n  and constructor_declaration cd =\n    Sexp.list\n      [\n        Sexp.atom \"constructor_declaration\";\n        string cd.pcd_name.Asttypes.txt;\n        Sexp.list [Sexp.atom \"pcd_args\"; constructor_arguments cd.pcd_args];\n        Sexp.list\n          [\n            Sexp.atom \"pcd_res\";\n            (match cd.pcd_res with\n            | None -> Sexp.atom \"None\"\n            | Some typ -> Sexp.list [Sexp.atom \"Some\"; core_type typ]);\n          ];\n        attributes cd.pcd_attributes;\n      ]\n\n  and constructor_arguments args =\n    match args with\n    | Pcstr_tuple types ->\n      Sexp.list\n        [Sexp.atom \"Pcstr_tuple\"; Sexp.list (map_empty ~f:core_type types)]\n    | Pcstr_record lds ->\n      Sexp.list\n        [\n          Sexp.atom \"Pcstr_record\";\n          Sexp.list (map_empty ~f:label_declaration lds);\n        ]\n\n  and label_declaration ld =\n    Sexp.list\n      [\n        Sexp.atom \"label_declaration\";\n        string ld.pld_name.Asttypes.txt;\n        mutable_flag ld.pld_mutable;\n        core_type ld.pld_type;\n        attributes ld.pld_attributes;\n      ]\n\n  and expression expr =\n    let desc =\n      match expr.pexp_desc with\n      | Pexp_ident longident_loc ->\n        Sexp.list [Sexp.atom \"Pexp_ident\"; longident longident_loc.Asttypes.txt]\n      | Pexp_constant c -> Sexp.list [Sexp.atom \"Pexp_constant\"; constant c]\n      | Pexp_let (flag, vbs, expr) ->\n        Sexp.list\n          [\n            Sexp.atom \"Pexp_let\";\n            rec_flag flag;\n            Sexp.list (map_empty ~f:value_binding vbs);\n            expression expr;\n          ]\n      | Pexp_function cases ->\n        Sexp.list\n          [Sexp.atom \"Pexp_function\"; Sexp.list (map_empty ~f:case cases)]\n      | Pexp_fun (arg_lbl, expr_opt, pat, expr) ->\n        Sexp.list\n          [\n            Sexp.atom \"Pexp_fun\";\n            arg_label arg_lbl;\n            (match expr_opt with\n            | None -> Sexp.atom \"None\"\n            | Some expr -> Sexp.list [Sexp.atom \"Some\"; expression expr]);\n            pattern pat;\n            expression expr;\n          ]\n      | Pexp_apply (expr, args) ->\n        Sexp.list\n          [\n            Sexp.atom \"Pexp_apply\";\n            expression expr;\n            Sexp.list\n              (map_empty\n                 ~f:(fun (arg_lbl, expr) ->\n                   Sexp.list [arg_label arg_lbl; expression expr])\n                 args);\n          ]\n      | Pexp_match (expr, cases) ->\n        Sexp.list\n          [\n            Sexp.atom \"Pexp_match\";\n            expression expr;\n            Sexp.list (map_empty ~f:case cases);\n          ]\n      | Pexp_try (expr, cases) ->\n        Sexp.list\n          [\n            Sexp.atom \"Pexp_try\";\n            expression expr;\n            Sexp.list (map_empty ~f:case cases);\n          ]\n      | Pexp_tuple exprs ->\n        Sexp.list\n          [Sexp.atom \"Pexp_tuple\"; Sexp.list (map_empty ~f:expression exprs)]\n      | Pexp_construct (longident_loc, expr_opt) ->\n        Sexp.list\n          [\n            Sexp.atom \"Pexp_construct\";\n            longident longident_loc.Asttypes.txt;\n            (match expr_opt with\n            | None -> Sexp.atom \"None\"\n            | Some expr -> Sexp.list [Sexp.atom \"Some\"; expression expr]);\n          ]\n      | Pexp_variant (lbl, expr_opt) ->\n        Sexp.list\n          [\n            Sexp.atom \"Pexp_variant\";\n            string lbl;\n            (match expr_opt with\n            | None -> Sexp.atom \"None\"\n            | Some expr -> Sexp.list [Sexp.atom \"Some\"; expression expr]);\n          ]\n      | Pexp_record (rows, opt_expr) ->\n        Sexp.list\n          [\n            Sexp.atom \"Pexp_record\";\n            Sexp.list\n              (map_empty\n                 ~f:(fun (longident_loc, expr) ->\n                   Sexp.list\n                     [longident longident_loc.Asttypes.txt; expression expr])\n                 rows);\n            (match opt_expr with\n            | None -> Sexp.atom \"None\"\n            | Some expr -> Sexp.list [Sexp.atom \"Some\"; expression expr]);\n          ]\n      | Pexp_field (expr, longident_loc) ->\n        Sexp.list\n          [\n            Sexp.atom \"Pexp_field\";\n            expression expr;\n            longident longident_loc.Asttypes.txt;\n          ]\n      | Pexp_setfield (expr1, longident_loc, expr2) ->\n        Sexp.list\n          [\n            Sexp.atom \"Pexp_setfield\";\n            expression expr1;\n            longident longident_loc.Asttypes.txt;\n            expression expr2;\n          ]\n      | Pexp_array exprs ->\n        Sexp.list\n          [Sexp.atom \"Pexp_array\"; Sexp.list (map_empty ~f:expression exprs)]\n      | Pexp_ifthenelse (expr1, expr2, opt_expr) ->\n        Sexp.list\n          [\n            Sexp.atom \"Pexp_ifthenelse\";\n            expression expr1;\n            expression expr2;\n            (match opt_expr with\n            | None -> Sexp.atom \"None\"\n            | Some expr -> Sexp.list [Sexp.atom \"Some\"; expression expr]);\n          ]\n      | Pexp_sequence (expr1, expr2) ->\n        Sexp.list\n          [Sexp.atom \"Pexp_sequence\"; expression expr1; expression expr2]\n      | Pexp_while (expr1, expr2) ->\n        Sexp.list [Sexp.atom \"Pexp_while\"; expression expr1; expression expr2]\n      | Pexp_for (pat, e1, e2, flag, e3) ->\n        Sexp.list\n          [\n            Sexp.atom \"Pexp_for\";\n            pattern pat;\n            expression e1;\n            expression e2;\n            direction_flag flag;\n            expression e3;\n          ]\n      | Pexp_constraint (expr, typexpr) ->\n        Sexp.list\n          [Sexp.atom \"Pexp_constraint\"; expression expr; core_type typexpr]\n      | Pexp_coerce (expr, opt_typ, typexpr) ->\n        Sexp.list\n          [\n            Sexp.atom \"Pexp_coerce\";\n            expression expr;\n            (match opt_typ with\n            | None -> Sexp.atom \"None\"\n            | Some typ -> Sexp.list [Sexp.atom \"Some\"; core_type typ]);\n            core_type typexpr;\n          ]\n      | Pexp_send _ -> Sexp.list [Sexp.atom \"Pexp_send\"]\n      | Pexp_new _ -> Sexp.list [Sexp.atom \"Pexp_new\"]\n      | Pexp_setinstvar _ -> Sexp.list [Sexp.atom \"Pexp_setinstvar\"]\n      | Pexp_override _ -> Sexp.list [Sexp.atom \"Pexp_override\"]\n      | Pexp_letmodule (mod_name, mod_expr, expr) ->\n        Sexp.list\n          [\n            Sexp.atom \"Pexp_letmodule\";\n            string mod_name.Asttypes.txt;\n            module_expression mod_expr;\n            expression expr;\n          ]\n      | Pexp_letexception (ext_constr, expr) ->\n        Sexp.list\n          [\n            Sexp.atom \"Pexp_letexception\";\n            extension_constructor ext_constr;\n            expression expr;\n          ]\n      | Pexp_assert expr -> Sexp.list [Sexp.atom \"Pexp_assert\"; expression expr]\n      | Pexp_lazy expr -> Sexp.list [Sexp.atom \"Pexp_lazy\"; expression expr]\n      | Pexp_poly _ -> Sexp.list [Sexp.atom \"Pexp_poly\"]\n      | Pexp_object _ -> Sexp.list [Sexp.atom \"Pexp_object\"]\n      | Pexp_newtype (lbl, expr) ->\n        Sexp.list\n          [Sexp.atom \"Pexp_newtype\"; string lbl.Asttypes.txt; expression expr]\n      | Pexp_pack mod_expr ->\n        Sexp.list [Sexp.atom \"Pexp_pack\"; module_expression mod_expr]\n      | Pexp_open (flag, longident_loc, expr) ->\n        Sexp.list\n          [\n            Sexp.atom \"Pexp_open\";\n            override_flag flag;\n            longident longident_loc.Asttypes.txt;\n            expression expr;\n          ]\n      | Pexp_extension ext ->\n        Sexp.list [Sexp.atom \"Pexp_extension\"; extension ext]\n      | Pexp_unreachable -> Sexp.atom \"Pexp_unreachable\"\n    in\n    Sexp.list [Sexp.atom \"expression\"; desc]\n\n  and case c =\n    Sexp.list\n      [\n        Sexp.atom \"case\";\n        Sexp.list [Sexp.atom \"pc_lhs\"; pattern c.pc_lhs];\n        Sexp.list\n          [\n            Sexp.atom \"pc_guard\";\n            (match c.pc_guard with\n            | None -> Sexp.atom \"None\"\n            | Some expr -> Sexp.list [Sexp.atom \"Some\"; expression expr]);\n          ];\n        Sexp.list [Sexp.atom \"pc_rhs\"; expression c.pc_rhs];\n      ]\n\n  and pattern p =\n    let descr =\n      match p.ppat_desc with\n      | Ppat_any -> Sexp.atom \"Ppat_any\"\n      | Ppat_var var ->\n        Sexp.list [Sexp.atom \"Ppat_var\"; string var.Location.txt]\n      | Ppat_alias (p, alias) ->\n        Sexp.list [Sexp.atom \"Ppat_alias\"; pattern p; string alias.txt]\n      | Ppat_constant c -> Sexp.list [Sexp.atom \"Ppat_constant\"; constant c]\n      | Ppat_interval (lo, hi) ->\n        Sexp.list [Sexp.atom \"Ppat_interval\"; constant lo; constant hi]\n      | Ppat_tuple patterns ->\n        Sexp.list\n          [Sexp.atom \"Ppat_tuple\"; Sexp.list (map_empty ~f:pattern patterns)]\n      | Ppat_construct (longident_loc, opt_pattern) ->\n        Sexp.list\n          [\n            Sexp.atom \"Ppat_construct\";\n            longident longident_loc.Location.txt;\n            (match opt_pattern with\n            | None -> Sexp.atom \"None\"\n            | Some p -> Sexp.list [Sexp.atom \"some\"; pattern p]);\n          ]\n      | Ppat_variant (lbl, opt_pattern) ->\n        Sexp.list\n          [\n            Sexp.atom \"Ppat_variant\";\n            string lbl;\n            (match opt_pattern with\n            | None -> Sexp.atom \"None\"\n            | Some p -> Sexp.list [Sexp.atom \"Some\"; pattern p]);\n          ]\n      | Ppat_record (rows, flag) ->\n        Sexp.list\n          [\n            Sexp.atom \"Ppat_record\";\n            closed_flag flag;\n            Sexp.list\n              (map_empty\n                 ~f:(fun (longident_loc, p) ->\n                   Sexp.list [longident longident_loc.Location.txt; pattern p])\n                 rows);\n          ]\n      | Ppat_array patterns ->\n        Sexp.list\n          [Sexp.atom \"Ppat_array\"; Sexp.list (map_empty ~f:pattern patterns)]\n      | Ppat_or (p1, p2) ->\n        Sexp.list [Sexp.atom \"Ppat_or\"; pattern p1; pattern p2]\n      | Ppat_constraint (p, typexpr) ->\n        Sexp.list [Sexp.atom \"Ppat_constraint\"; pattern p; core_type typexpr]\n      | Ppat_type longident_loc ->\n        Sexp.list [Sexp.atom \"Ppat_type\"; longident longident_loc.Location.txt]\n      | Ppat_lazy p -> Sexp.list [Sexp.atom \"Ppat_lazy\"; pattern p]\n      | Ppat_unpack string_loc ->\n        Sexp.list [Sexp.atom \"Ppat_unpack\"; string string_loc.Location.txt]\n      | Ppat_exception p -> Sexp.list [Sexp.atom \"Ppat_exception\"; pattern p]\n      | Ppat_extension ext ->\n        Sexp.list [Sexp.atom \"Ppat_extension\"; extension ext]\n      | Ppat_open (longident_loc, p) ->\n        Sexp.list\n          [\n            Sexp.atom \"Ppat_open\";\n            longident longident_loc.Location.txt;\n            pattern p;\n          ]\n    in\n    Sexp.list [Sexp.atom \"pattern\"; descr]\n\n  and object_field field =\n    match field with\n    | Otag (lbl_loc, attrs, typexpr) ->\n      Sexp.list\n        [\n          Sexp.atom \"Otag\";\n          string lbl_loc.txt;\n          attributes attrs;\n          core_type typexpr;\n        ]\n    | Oinherit typexpr -> Sexp.list [Sexp.atom \"Oinherit\"; core_type typexpr]\n\n  and row_field field =\n    match field with\n    | Rtag (label_loc, attrs, truth, types) ->\n      Sexp.list\n        [\n          Sexp.atom \"Rtag\";\n          string label_loc.txt;\n          attributes attrs;\n          Sexp.atom (if truth then \"true\" else \"false\");\n          Sexp.list (map_empty ~f:core_type types);\n        ]\n    | Rinherit typexpr -> Sexp.list [Sexp.atom \"Rinherit\"; core_type typexpr]\n\n  and package_type (mod_name_loc, package_constraints) =\n    Sexp.list\n      [\n        Sexp.atom \"package_type\";\n        longident mod_name_loc.Asttypes.txt;\n        Sexp.list\n          (map_empty\n             ~f:(fun (mod_name_loc, typexpr) ->\n               Sexp.list\n                 [longident mod_name_loc.Asttypes.txt; core_type typexpr])\n             package_constraints);\n      ]\n\n  and core_type typexpr =\n    let desc =\n      match typexpr.ptyp_desc with\n      | Ptyp_any -> Sexp.atom \"Ptyp_any\"\n      | Ptyp_var var -> Sexp.list [Sexp.atom \"Ptyp_var\"; string var]\n      | Ptyp_arrow (arg_lbl, typ1, typ2) ->\n        Sexp.list\n          [\n            Sexp.atom \"Ptyp_arrow\";\n            arg_label arg_lbl;\n            core_type typ1;\n            core_type typ2;\n          ]\n      | Ptyp_tuple types ->\n        Sexp.list\n          [Sexp.atom \"Ptyp_tuple\"; Sexp.list (map_empty ~f:core_type types)]\n      | Ptyp_constr (longident_loc, types) ->\n        Sexp.list\n          [\n            Sexp.atom \"Ptyp_constr\";\n            longident longident_loc.txt;\n            Sexp.list (map_empty ~f:core_type types);\n          ]\n      | Ptyp_alias (typexpr, alias) ->\n        Sexp.list [Sexp.atom \"Ptyp_alias\"; core_type typexpr; string alias]\n      | Ptyp_object (fields, flag) ->\n        Sexp.list\n          [\n            Sexp.atom \"Ptyp_object\";\n            closed_flag flag;\n            Sexp.list (map_empty ~f:object_field fields);\n          ]\n      | Ptyp_class (longident_loc, types) ->\n        Sexp.list\n          [\n            Sexp.atom \"Ptyp_class\";\n            longident longident_loc.Location.txt;\n            Sexp.list (map_empty ~f:core_type types);\n          ]\n      | Ptyp_variant (fields, flag, opt_labels) ->\n        Sexp.list\n          [\n            Sexp.atom \"Ptyp_variant\";\n            Sexp.list (map_empty ~f:row_field fields);\n            closed_flag flag;\n            (match opt_labels with\n            | None -> Sexp.atom \"None\"\n            | Some lbls -> Sexp.list (map_empty ~f:string lbls));\n          ]\n      | Ptyp_poly (lbls, typexpr) ->\n        Sexp.list\n          [\n            Sexp.atom \"Ptyp_poly\";\n            Sexp.list (map_empty ~f:(fun lbl -> string lbl.Asttypes.txt) lbls);\n            core_type typexpr;\n          ]\n      | Ptyp_package package ->\n        Sexp.list [Sexp.atom \"Ptyp_package\"; package_type package]\n      | Ptyp_extension ext ->\n        Sexp.list [Sexp.atom \"Ptyp_extension\"; extension ext]\n    in\n    Sexp.list [Sexp.atom \"core_type\"; desc]\n\n  and payload p =\n    match p with\n    | PStr s -> Sexp.list (Sexp.atom \"PStr\" :: map_empty ~f:structure_item s)\n    | PSig s -> Sexp.list [Sexp.atom \"PSig\"; signature s]\n    | PTyp ct -> Sexp.list [Sexp.atom \"PTyp\"; core_type ct]\n    | PPat (pat, opt_expr) ->\n      Sexp.list\n        [\n          Sexp.atom \"PPat\";\n          pattern pat;\n          (match opt_expr with\n          | Some expr -> Sexp.list [Sexp.atom \"Some\"; expression expr]\n          | None -> Sexp.atom \"None\");\n        ]\n\n  and attribute (string_loc, p) =\n    Sexp.list\n      [Sexp.atom \"attribute\"; Sexp.atom string_loc.Asttypes.txt; payload p]\n\n  and extension (string_loc, p) =\n    Sexp.list\n      [Sexp.atom \"extension\"; Sexp.atom string_loc.Asttypes.txt; payload p]\n\n  and attributes attrs =\n    let sexprs = map_empty ~f:attribute attrs in\n    Sexp.list (Sexp.atom \"attributes\" :: sexprs)\n\n  let print_engine =\n    Res_driver.\n      {\n        print_implementation =\n          (fun ~width:_ ~filename:_ ~comments:_ parsetree ->\n            parsetree |> structure |> Sexp.to_string |> print_string);\n        print_interface =\n          (fun ~width:_ ~filename:_ ~comments:_ parsetree ->\n            parsetree |> signature |> Sexp.to_string |> print_string);\n      }\nend\n\nlet sexp_print_engine = SexpAst.print_engine\n\nlet comments_print_engine =\n  {\n    Res_driver.print_implementation =\n      (fun ~width:_ ~filename:_ ~comments s ->\n        let cmt_tbl = CommentTable.make () in\n        CommentTable.walk_structure s cmt_tbl comments;\n        CommentTable.log cmt_tbl);\n    print_interface =\n      (fun ~width:_ ~filename:_ ~comments s ->\n        let cmt_tbl = CommentTable.make () in\n        CommentTable.walk_signature s cmt_tbl comments;\n        CommentTable.log cmt_tbl);\n  }\n"
  },
  {
    "path": "analysis/vendor/res_syntax/res_ast_debugger.mli",
    "content": "val print_engine : Res_driver.print_engine\nval sexp_print_engine : Res_driver.print_engine\nval comments_print_engine : Res_driver.print_engine\n"
  },
  {
    "path": "analysis/vendor/res_syntax/res_cli.ml",
    "content": "(*\n  This CLI isn't used apart for this repo's testing purposes. The syntax\n  itself is used by ReScript's compiler programmatically through various other apis.\n*)\n\n(*\n  This is OCaml's Misc.ml's Color module. More specifically, this is\n  ReScript's OCaml fork's Misc.ml's Color module:\n  https://github.com/rescript-lang/ocaml/blob/92e58bedced8d7e3e177677800a38922327ab860/utils/misc.ml#L540\n\n  The syntax's printing's coloring logic depends on:\n    1. a global mutable variable that's set in the compiler: Misc.Color.color_enabled\n    2. the colors tags supported by Misc.Color, e.g. style_of_tag, which Format\n      tags like @{<error>hello@} use\n    3. etc.\n\n  When this syntax is programmatically used inside ReScript, the various\n  Format tags like <error> and <dim> get properly colored depending on the\n  above points.\n\n  But when used by this cli file, that coloring logic doesn't render properly\n  because we're compiling against vanilla OCaml 4.06 instead of ReScript's\n  OCaml fork. For example, the vanilla compiler doesn't support the `dim`\n  color (grey). So we emulate the right coloring logic by copy pasting how our\n  forked OCaml compiler does it.\n*)\nmodule Color = struct\n  (* use ANSI color codes, see https://en.wikipedia.org/wiki/ANSI_escape_code *)\n  type color =\n    | Black [@live]\n    | Red\n    | Green [@live]\n    | Yellow\n    | Blue [@live]\n    | Magenta\n    | Cyan\n    | White [@live]\n\n  type style =\n    | FG of color (* foreground *)\n    | BG of color [@live] (* background *)\n    | Bold\n    | Reset\n    | Dim\n\n  let ansi_of_color = function\n    | Black -> \"0\"\n    | Red -> \"1\"\n    | Green -> \"2\"\n    | Yellow -> \"3\"\n    | Blue -> \"4\"\n    | Magenta -> \"5\"\n    | Cyan -> \"6\"\n    | White -> \"7\"\n\n  let code_of_style = function\n    | FG c -> \"3\" ^ ansi_of_color c\n    | BG c -> \"4\" ^ ansi_of_color c\n    | Bold -> \"1\"\n    | Reset -> \"0\"\n    | Dim -> \"2\"\n\n  let ansi_of_style_l l =\n    let s =\n      match l with\n      | [] -> code_of_style Reset\n      | [s] -> code_of_style s\n      | _ -> String.concat \";\" (List.map code_of_style l)\n    in\n    \"\\x1b[\" ^ s ^ \"m\"\n\n  type styles = {error: style list; warning: style list; loc: style list}\n\n  let default_styles =\n    {warning = [Bold; FG Magenta]; error = [Bold; FG Red]; loc = [Bold]}\n\n  let cur_styles = ref default_styles\n\n  (* let get_styles () = !cur_styles *)\n  (* let set_styles s = cur_styles := s *)\n\n  (* map a tag to a style, if the tag is known.\n     @raise Not_found otherwise *)\n  let style_of_tag s =\n    match s with\n    | Format.String_tag \"error\" -> !cur_styles.error\n    | Format.String_tag \"warning\" -> !cur_styles.warning\n    | Format.String_tag \"loc\" -> !cur_styles.loc\n    | Format.String_tag \"info\" -> [Bold; FG Yellow]\n    | Format.String_tag \"dim\" -> [Dim]\n    | Format.String_tag \"filename\" -> [FG Cyan]\n    | _ -> raise Not_found\n  [@@raises Not_found]\n\n  let color_enabled = ref true\n\n  (* either prints the tag of [s] or delegates to [or_else] *)\n  let mark_open_tag ~or_else s =\n    try\n      let style = style_of_tag s in\n      if !color_enabled then ansi_of_style_l style else \"\"\n    with Not_found -> or_else s\n\n  let mark_close_tag ~or_else s =\n    try\n      let _ = style_of_tag s in\n      if !color_enabled then ansi_of_style_l [Reset] else \"\"\n    with Not_found -> or_else s\n\n  (* add color handling to formatter [ppf] *)\n  let set_color_tag_handling ppf =\n    let open Format in\n    let functions = pp_get_formatter_stag_functions ppf () in\n    let functions' =\n      {\n        functions with\n        mark_open_stag = mark_open_tag ~or_else:functions.mark_open_stag;\n        mark_close_stag = mark_close_tag ~or_else:functions.mark_close_stag;\n      }\n    in\n    pp_set_mark_tags ppf true;\n    (* enable tags *)\n    pp_set_formatter_stag_functions ppf functions';\n    (* also setup margins *)\n    pp_set_margin ppf (pp_get_margin std_formatter ());\n    ()\n\n  external isatty : out_channel -> bool = \"caml_sys_isatty\"\n\n  (* reasonable heuristic on whether colors should be enabled *)\n  let should_enable_color () =\n    let term = try Sys.getenv \"TERM\" with Not_found -> \"\" in\n    term <> \"dumb\" && term <> \"\" && isatty stderr\n\n  type setting = Auto [@live] | Always [@live] | Never [@live]\n\n  let setup =\n    let first = ref true in\n    (* initialize only once *)\n    let formatter_l =\n      [Format.std_formatter; Format.err_formatter; Format.str_formatter]\n    in\n    fun o ->\n      if !first then (\n        first := false;\n        Format.set_mark_tags true;\n        List.iter set_color_tag_handling formatter_l;\n        color_enabled :=\n          match o with\n          | Some Always -> true\n          | Some Auto -> should_enable_color ()\n          | Some Never -> false\n          | None -> should_enable_color ());\n      ()\nend\n\n(* command line flags *)\nmodule ResClflags : sig\n  val recover : bool ref\n  val print : string ref\n  val width : int ref\n  val origin : string ref\n  val file : string ref\n  val interface : bool ref\n  val jsx_version : int ref\n  val jsx_module : string ref\n  val jsx_mode : string ref\n  val typechecker : bool ref\n\n  val parse : unit -> unit\nend = struct\n  let recover = ref false\n  let width = ref 100\n\n  let print = ref \"res\"\n  let origin = ref \"\"\n  let interface = ref false\n  let jsx_version = ref (-1)\n  let jsx_module = ref \"react\"\n  let jsx_mode = ref \"automatic\"\n  let file = ref \"\"\n  let typechecker = ref false\n\n  let usage =\n    \"\\n\\\n     **This command line is for the repo developer's testing purpose only. DO \\\n     NOT use it in production**!\\n\\n\"\n    ^ \"Usage:\\n  res_parser <options> <file>\\n\\n\" ^ \"Examples:\\n\"\n    ^ \"  res_parser myFile.res\\n\"\n    ^ \"  res_parser -parse ml -print res myFile.ml\\n\"\n    ^ \"  res_parser -parse res -print binary -interface myFile.resi\\n\\n\"\n    ^ \"Options are:\"\n\n  let spec =\n    [\n      (\"-recover\", Arg.Unit (fun () -> recover := true), \"Emit partial ast\");\n      ( \"-parse\",\n        Arg.String (fun txt -> origin := txt),\n        \"Parse ml or res. Default: res\" );\n      ( \"-print\",\n        Arg.String (fun txt -> print := txt),\n        \"Print either binary, ml, ast, sexp, comments or res. Default: res\" );\n      ( \"-width\",\n        Arg.Int (fun w -> width := w),\n        \"Specify the line length for the printer (formatter)\" );\n      ( \"-interface\",\n        Arg.Unit (fun () -> interface := true),\n        \"Parse as interface\" );\n      ( \"-jsx-version\",\n        Arg.Int (fun i -> jsx_version := i),\n        \"Apply a specific built-in ppx before parsing, none or 3, 4. Default: \\\n         none\" );\n      ( \"-jsx-module\",\n        Arg.String (fun txt -> jsx_module := txt),\n        \"Specify the jsx module. Default: react\" );\n      ( \"-jsx-mode\",\n        Arg.String (fun txt -> jsx_mode := txt),\n        \"Specify the jsx mode, classic or automatic. Default: automatic\" );\n      ( \"-typechecker\",\n        Arg.Unit (fun () -> typechecker := true),\n        \"Parses the ast as it would be passed to the typechecker and not the \\\n         printer\" );\n    ]\n\n  let parse () = Arg.parse spec (fun f -> file := f) usage\nend\n\nmodule CliArgProcessor = struct\n  type backend = Parser : 'diagnostics Res_driver.parsing_engine -> backend\n  [@@unboxed]\n\n  let process_file ~is_interface ~width ~recover ~origin ~target ~jsx_version\n      ~jsx_module ~jsx_mode ~typechecker filename =\n    let len = String.length filename in\n    let process_interface =\n      is_interface\n      || (len > 0 && (String.get [@doesNotRaise]) filename (len - 1) = 'i')\n    in\n    let parsing_engine =\n      match origin with\n      | \"ml\" -> Parser Res_driver_ml_parser.parsing_engine\n      | \"res\" -> Parser Res_driver.parsing_engine\n      | \"\" -> (\n        match Filename.extension filename with\n        | \".ml\" | \".mli\" -> Parser Res_driver_ml_parser.parsing_engine\n        | _ -> Parser Res_driver.parsing_engine)\n      | origin ->\n        print_endline\n          (\"-parse needs to be either ml or res. You provided \" ^ origin);\n        exit 1\n    in\n    let print_engine =\n      match target with\n      | \"binary\" -> Res_driver_binary.print_engine\n      | \"ml\" -> Res_driver_ml_parser.print_engine\n      | \"ast\" -> Res_ast_debugger.print_engine\n      | \"sexp\" -> Res_ast_debugger.sexp_print_engine\n      | \"comments\" -> Res_ast_debugger.comments_print_engine\n      | \"res\" -> Res_driver.print_engine\n      | target ->\n        print_endline\n          (\"-print needs to be either binary, ml, ast, sexp, comments or res. \\\n            You provided \" ^ target);\n        exit 1\n    in\n\n    let for_printer =\n      match target with\n      | (\"res\" | \"sexp\") when not typechecker -> true\n      | _ -> false\n    in\n\n    let (Parser backend) = parsing_engine in\n    (* This is the whole purpose of the Color module above *)\n    Color.setup None;\n    if process_interface then\n      let parse_result = backend.parse_interface ~for_printer ~filename in\n      if parse_result.invalid then (\n        backend.string_of_diagnostics ~source:parse_result.source\n          ~filename:parse_result.filename parse_result.diagnostics;\n        if recover then\n          print_engine.print_interface ~width ~filename\n            ~comments:parse_result.comments parse_result.parsetree\n        else exit 1)\n      else\n        let parsetree =\n          Jsx_ppx.rewrite_signature ~jsx_version ~jsx_module ~jsx_mode\n            parse_result.parsetree\n        in\n        print_engine.print_interface ~width ~filename\n          ~comments:parse_result.comments parsetree\n    else\n      let parse_result = backend.parse_implementation ~for_printer ~filename in\n      if parse_result.invalid then (\n        backend.string_of_diagnostics ~source:parse_result.source\n          ~filename:parse_result.filename parse_result.diagnostics;\n        if recover then\n          print_engine.print_implementation ~width ~filename\n            ~comments:parse_result.comments parse_result.parsetree\n        else exit 1)\n      else\n        let parsetree =\n          Jsx_ppx.rewrite_implementation ~jsx_version ~jsx_module ~jsx_mode\n            parse_result.parsetree\n        in\n        print_engine.print_implementation ~width ~filename\n          ~comments:parse_result.comments parsetree\n  [@@raises exit]\nend\n\n(*let () =\n    if not !Sys.interactive then (\n      ResClflags.parse ();\n      CliArgProcessor.process_file ~is_interface:!ResClflags.interface\n        ~width:!ResClflags.width ~recover:!ResClflags.recover\n        ~target:!ResClflags.print ~origin:!ResClflags.origin\n        ~jsx_version:!ResClflags.jsx_version ~jsx_module:!ResClflags.jsx_module\n        ~jsx_mode:!ResClflags.jsx_mode ~typechecker:!ResClflags.typechecker\n        !ResClflags.file)\n  [@@raises exit]*)\n"
  },
  {
    "path": "analysis/vendor/res_syntax/res_comment.ml",
    "content": "type style = SingleLine | MultiLine | DocComment | ModuleComment\n\nlet style_to_string s =\n  match s with\n  | SingleLine -> \"SingleLine\"\n  | MultiLine -> \"MultiLine\"\n  | DocComment -> \"DocComment\"\n  | ModuleComment -> \"ModuleComment\"\n\ntype t = {\n  txt: string;\n  style: style;\n  loc: Location.t;\n  mutable prev_tok_end_pos: Lexing.position;\n}\n\nlet loc t = t.loc\nlet txt t = t.txt\nlet prev_tok_end_pos t = t.prev_tok_end_pos\n\nlet set_prev_tok_end_pos t pos = t.prev_tok_end_pos <- pos\n\nlet is_single_line_comment t = t.style = SingleLine\n\nlet is_doc_comment t = t.style = DocComment\n\nlet is_module_comment t = t.style = ModuleComment\n\nlet to_string t =\n  let {Location.loc_start; loc_end} = t.loc in\n  Format.sprintf \"(txt: %s\\nstyle: %s\\nlocation: %d,%d-%d,%d)\" t.txt\n    (style_to_string t.style) loc_start.pos_lnum\n    (loc_start.pos_cnum - loc_start.pos_bol)\n    loc_end.pos_lnum\n    (loc_end.pos_cnum - loc_end.pos_bol)\n\nlet make_single_line_comment ~loc txt =\n  {txt; loc; style = SingleLine; prev_tok_end_pos = Lexing.dummy_pos}\n\nlet make_multi_line_comment ~loc ~doc_comment ~standalone txt =\n  {\n    txt;\n    loc;\n    style =\n      (if doc_comment then if standalone then ModuleComment else DocComment\n       else MultiLine);\n    prev_tok_end_pos = Lexing.dummy_pos;\n  }\n\nlet from_ocaml_comment ~loc ~txt ~prev_tok_end_pos =\n  {txt; loc; style = MultiLine; prev_tok_end_pos}\n\nlet trim_spaces s =\n  let len = String.length s in\n  if len = 0 then s\n  else if String.unsafe_get s 0 = ' ' || String.unsafe_get s (len - 1) = ' '\n  then (\n    let i = ref 0 in\n    while !i < len && String.unsafe_get s !i = ' ' do\n      incr i\n    done;\n    let j = ref (len - 1) in\n    while !j >= !i && String.unsafe_get s !j = ' ' do\n      decr j\n    done;\n    if !j >= !i then (String.sub [@doesNotRaise]) s !i (!j - !i + 1) else \"\")\n  else s\n"
  },
  {
    "path": "analysis/vendor/res_syntax/res_comment.mli",
    "content": "type t\n\nval to_string : t -> string\n\nval loc : t -> Location.t\nval txt : t -> string\nval prev_tok_end_pos : t -> Lexing.position\n\nval set_prev_tok_end_pos : t -> Lexing.position -> unit\n\nval is_doc_comment : t -> bool\n\nval is_module_comment : t -> bool\n\nval is_single_line_comment : t -> bool\n\nval make_single_line_comment : loc:Location.t -> string -> t\nval make_multi_line_comment :\n  loc:Location.t -> doc_comment:bool -> standalone:bool -> string -> t\nval from_ocaml_comment :\n  loc:Location.t -> txt:string -> prev_tok_end_pos:Lexing.position -> t\nval trim_spaces : string -> string\n"
  },
  {
    "path": "analysis/vendor/res_syntax/res_comments_table.ml",
    "content": "module Comment = Res_comment\nmodule Doc = Res_doc\nmodule ParsetreeViewer = Res_parsetree_viewer\n\ntype t = {\n  leading: (Location.t, Comment.t list) Hashtbl.t;\n  inside: (Location.t, Comment.t list) Hashtbl.t;\n  trailing: (Location.t, Comment.t list) Hashtbl.t;\n}\n\nlet make () =\n  {\n    leading = Hashtbl.create 100;\n    inside = Hashtbl.create 100;\n    trailing = Hashtbl.create 100;\n  }\n\nlet copy tbl =\n  {\n    leading = Hashtbl.copy tbl.leading;\n    inside = Hashtbl.copy tbl.inside;\n    trailing = Hashtbl.copy tbl.trailing;\n  }\n\nlet empty = make ()\n\nlet print_entries tbl =\n  let open Location in\n  Hashtbl.fold\n    (fun (k : Location.t) (v : Comment.t list) acc ->\n      let loc =\n        Doc.concat\n          [\n            Doc.lbracket;\n            Doc.text (string_of_int k.loc_start.pos_lnum);\n            Doc.text \":\";\n            Doc.text\n              (string_of_int (k.loc_start.pos_cnum - k.loc_start.pos_bol));\n            Doc.text \"-\";\n            Doc.text (string_of_int k.loc_end.pos_lnum);\n            Doc.text \":\";\n            Doc.text (string_of_int (k.loc_end.pos_cnum - k.loc_end.pos_bol));\n            Doc.rbracket;\n          ]\n      in\n      let doc =\n        Doc.breakable_group ~force_break:true\n          (Doc.concat\n             [\n               loc;\n               Doc.indent\n                 (Doc.concat\n                    [\n                      Doc.line;\n                      Doc.join\n                        ~sep:(Doc.concat [Doc.comma; Doc.line])\n                        (List.map (fun c -> Doc.text (Comment.txt c)) v);\n                    ]);\n               Doc.line;\n             ])\n      in\n      doc :: acc)\n    tbl []\n\nlet log t =\n  let leading_stuff = print_entries t.leading in\n  let trailing_stuff = print_entries t.trailing in\n  let stuff_inside = print_entries t.inside in\n  Doc.breakable_group ~force_break:true\n    (Doc.concat\n       [\n         Doc.text \"leading comments:\";\n         Doc.indent (Doc.concat [Doc.line; Doc.concat leading_stuff]);\n         Doc.line;\n         Doc.text \"comments inside:\";\n         Doc.indent (Doc.concat [Doc.line; Doc.concat stuff_inside]);\n         Doc.line;\n         Doc.text \"trailing comments:\";\n         Doc.indent (Doc.concat [Doc.line; Doc.concat trailing_stuff]);\n         Doc.line;\n       ])\n  |> Doc.to_string ~width:80 |> print_endline\n\nlet attach tbl loc comments =\n  match comments with\n  | [] -> ()\n  | comments -> Hashtbl.replace tbl loc comments\n\nlet partition_by_loc comments loc =\n  let rec loop (leading, inside, trailing) comments =\n    let open Location in\n    match comments with\n    | comment :: rest ->\n      let cmt_loc = Comment.loc comment in\n      if cmt_loc.loc_end.pos_cnum <= loc.loc_start.pos_cnum then\n        loop (comment :: leading, inside, trailing) rest\n      else if cmt_loc.loc_start.pos_cnum >= loc.loc_end.pos_cnum then\n        loop (leading, inside, comment :: trailing) rest\n      else loop (leading, comment :: inside, trailing) rest\n    | [] -> (List.rev leading, List.rev inside, List.rev trailing)\n  in\n  loop ([], [], []) comments\n\nlet partition_leading_trailing comments loc =\n  let rec loop (leading, trailing) comments =\n    let open Location in\n    match comments with\n    | comment :: rest ->\n      let cmt_loc = Comment.loc comment in\n      if cmt_loc.loc_end.pos_cnum <= loc.loc_start.pos_cnum then\n        loop (comment :: leading, trailing) rest\n      else loop (leading, comment :: trailing) rest\n    | [] -> (List.rev leading, List.rev trailing)\n  in\n  loop ([], []) comments\n\nlet partition_by_on_same_line loc comments =\n  let rec loop (on_same_line, on_other_line) comments =\n    let open Location in\n    match comments with\n    | [] -> (List.rev on_same_line, List.rev on_other_line)\n    | comment :: rest ->\n      let cmt_loc = Comment.loc comment in\n      if cmt_loc.loc_start.pos_lnum == loc.loc_end.pos_lnum then\n        loop (comment :: on_same_line, on_other_line) rest\n      else loop (on_same_line, comment :: on_other_line) rest\n  in\n  loop ([], []) comments\n\nlet partition_adjacent_trailing loc1 comments =\n  let open Location in\n  let open Lexing in\n  let rec loop ~prev_end_pos after_loc1 comments =\n    match comments with\n    | [] -> (List.rev after_loc1, [])\n    | comment :: rest as comments ->\n      let cmt_prev_end_pos = Comment.prev_tok_end_pos comment in\n      if prev_end_pos.Lexing.pos_cnum == cmt_prev_end_pos.pos_cnum then\n        let comment_end = (Comment.loc comment).loc_end in\n        loop ~prev_end_pos:comment_end (comment :: after_loc1) rest\n      else (List.rev after_loc1, comments)\n  in\n  loop ~prev_end_pos:loc1.loc_end [] comments\n\nlet rec collect_list_patterns acc pattern =\n  let open Parsetree in\n  match pattern.ppat_desc with\n  | Ppat_construct\n      ({txt = Longident.Lident \"::\"}, Some {ppat_desc = Ppat_tuple [pat; rest]})\n    ->\n    collect_list_patterns (pat :: acc) rest\n  | Ppat_construct ({txt = Longident.Lident \"[]\"}, None) -> List.rev acc\n  | _ -> List.rev (pattern :: acc)\n\nlet rec collect_list_exprs acc expr =\n  let open Parsetree in\n  match expr.pexp_desc with\n  | Pexp_construct\n      ({txt = Longident.Lident \"::\"}, Some {pexp_desc = Pexp_tuple [expr; rest]})\n    ->\n    collect_list_exprs (expr :: acc) rest\n  | Pexp_construct ({txt = Longident.Lident \"[]\"}, _) -> List.rev acc\n  | _ -> List.rev (expr :: acc)\n\n(* TODO: use ParsetreeViewer *)\nlet arrow_type ct =\n  let open Parsetree in\n  let rec process attrs_before acc typ =\n    match typ with\n    | {\n     ptyp_desc = Ptyp_arrow ((Nolabel as lbl), typ1, typ2);\n     ptyp_attributes = [];\n    } ->\n      let arg = ([], lbl, typ1) in\n      process attrs_before (arg :: acc) typ2\n    | {\n     ptyp_desc = Ptyp_arrow ((Nolabel as lbl), typ1, typ2);\n     ptyp_attributes = [({txt = \"bs\"}, _)] as attrs;\n    } ->\n      let arg = (attrs, lbl, typ1) in\n      process attrs_before (arg :: acc) typ2\n    | {ptyp_desc = Ptyp_arrow (Nolabel, _typ1, _typ2); ptyp_attributes = _attrs}\n      as return_type ->\n      let args = List.rev acc in\n      (attrs_before, args, return_type)\n    | {\n     ptyp_desc = Ptyp_arrow (((Labelled _ | Optional _) as lbl), typ1, typ2);\n     ptyp_attributes = attrs;\n    } ->\n      let arg = (attrs, lbl, typ1) in\n      process attrs_before (arg :: acc) typ2\n    | typ -> (attrs_before, List.rev acc, typ)\n  in\n  match ct with\n  | {ptyp_desc = Ptyp_arrow (Nolabel, _typ1, _typ2); ptyp_attributes = attrs} as\n    typ ->\n    process attrs [] {typ with ptyp_attributes = []}\n  | typ -> process [] [] typ\n\n(* TODO: avoiding the dependency on ParsetreeViewer here, is this a good idea? *)\nlet mod_expr_apply mod_expr =\n  let rec loop acc mod_expr =\n    match mod_expr with\n    | {Parsetree.pmod_desc = Pmod_apply (next, arg)} -> loop (arg :: acc) next\n    | _ -> mod_expr :: acc\n  in\n  loop [] mod_expr\n\n(* TODO: avoiding the dependency on ParsetreeViewer here, is this a good idea? *)\nlet mod_expr_functor mod_expr =\n  let rec loop acc mod_expr =\n    match mod_expr with\n    | {\n     Parsetree.pmod_desc = Pmod_functor (lbl, mod_type, return_mod_expr);\n     pmod_attributes = attrs;\n    } ->\n      let param = (attrs, lbl, mod_type) in\n      loop (param :: acc) return_mod_expr\n    | return_mod_expr -> (List.rev acc, return_mod_expr)\n  in\n  loop [] mod_expr\n\nlet functor_type modtype =\n  let rec process acc modtype =\n    match modtype with\n    | {\n     Parsetree.pmty_desc = Pmty_functor (lbl, arg_type, return_type);\n     pmty_attributes = attrs;\n    } ->\n      let arg = (attrs, lbl, arg_type) in\n      process (arg :: acc) return_type\n    | mod_type -> (List.rev acc, mod_type)\n  in\n  process [] modtype\n\nlet fun_expr expr =\n  let open Parsetree in\n  (* Turns (type t, type u, type z) into \"type t u z\" *)\n  let rec collect_new_types acc return_expr =\n    match return_expr with\n    | {pexp_desc = Pexp_newtype (string_loc, return_expr); pexp_attributes = []}\n      ->\n      collect_new_types (string_loc :: acc) return_expr\n    | return_expr ->\n      let loc =\n        match (acc, List.rev acc) with\n        | _startLoc :: _, end_loc :: _ ->\n          {end_loc.loc with loc_end = end_loc.loc.loc_end}\n        | _ -> Location.none\n      in\n      let txt =\n        List.fold_right\n          (fun curr acc -> acc ^ \" \" ^ curr.Location.txt)\n          acc \"type\"\n      in\n      (Location.mkloc txt loc, return_expr)\n  in\n  (* For simplicity reason Pexp_newtype gets converted to a Nolabel parameter,\n   * otherwise this function would need to return a variant:\n   * | NormalParamater(...)\n   * | NewType(...)\n   * This complicates printing with an extra variant/boxing/allocation for a code-path\n   * that is not often used. Lets just keep it simple for now *)\n  let rec collect attrs_before acc expr =\n    match expr with\n    | {\n     pexp_desc = Pexp_fun (lbl, default_expr, pattern, return_expr);\n     pexp_attributes = [];\n    } ->\n      let parameter = ([], lbl, default_expr, pattern) in\n      collect attrs_before (parameter :: acc) return_expr\n    | {pexp_desc = Pexp_newtype (string_loc, rest); pexp_attributes = attrs} ->\n      let var, return_expr = collect_new_types [string_loc] rest in\n      let parameter =\n        ( attrs,\n          Asttypes.Nolabel,\n          None,\n          Ast_helper.Pat.var ~loc:string_loc.loc var )\n      in\n      collect attrs_before (parameter :: acc) return_expr\n    | {\n     pexp_desc = Pexp_fun (lbl, default_expr, pattern, return_expr);\n     pexp_attributes = [({txt = \"bs\"}, _)] as attrs;\n    } ->\n      let parameter = (attrs, lbl, default_expr, pattern) in\n      collect attrs_before (parameter :: acc) return_expr\n    | {\n     pexp_desc =\n       Pexp_fun\n         (((Labelled _ | Optional _) as lbl), default_expr, pattern, return_expr);\n     pexp_attributes = attrs;\n    } ->\n      let parameter = (attrs, lbl, default_expr, pattern) in\n      collect attrs_before (parameter :: acc) return_expr\n    | expr -> (attrs_before, List.rev acc, expr)\n  in\n  match expr with\n  | {\n      pexp_desc = Pexp_fun (Nolabel, _defaultExpr, _pattern, _returnExpr);\n      pexp_attributes = attrs;\n    } as expr ->\n    collect attrs [] {expr with pexp_attributes = []}\n  | expr -> collect [] [] expr\n\nlet rec is_block_expr expr =\n  let open Parsetree in\n  match expr.pexp_desc with\n  | Pexp_letmodule _ | Pexp_letexception _ | Pexp_let _ | Pexp_open _\n  | Pexp_sequence _ ->\n    true\n  | Pexp_apply (call_expr, _) when is_block_expr call_expr -> true\n  | Pexp_constraint (expr, _) when is_block_expr expr -> true\n  | Pexp_field (expr, _) when is_block_expr expr -> true\n  | Pexp_setfield (expr, _, _) when is_block_expr expr -> true\n  | _ -> false\n\nlet is_if_then_else_expr expr =\n  let open Parsetree in\n  match expr.pexp_desc with\n  | Pexp_ifthenelse _ -> true\n  | _ -> false\n\ntype node =\n  | Case of Parsetree.case\n  | CoreType of Parsetree.core_type\n  | ExprArgument of Parsetree.expression\n  | Expression of Parsetree.expression\n  | ExprRecordRow of Longident.t Asttypes.loc * Parsetree.expression\n  | ExtensionConstructor of Parsetree.extension_constructor\n  | LabelDeclaration of Parsetree.label_declaration\n  | ModuleBinding of Parsetree.module_binding\n  | ModuleDeclaration of Parsetree.module_declaration\n  | ModuleExpr of Parsetree.module_expr\n  | ObjectField of Parsetree.object_field\n  | PackageConstraint of Longident.t Asttypes.loc * Parsetree.core_type\n  | Pattern of Parsetree.pattern\n  | PatternRecordRow of Longident.t Asttypes.loc * Parsetree.pattern\n  | RowField of Parsetree.row_field\n  | SignatureItem of Parsetree.signature_item\n  | StructureItem of Parsetree.structure_item\n  | TypeDeclaration of Parsetree.type_declaration\n  | ValueBinding of Parsetree.value_binding\n\nlet get_loc node =\n  let open Parsetree in\n  match node with\n  | Case case ->\n    {\n      case.pc_lhs.ppat_loc with\n      loc_end =\n        (match ParsetreeViewer.process_braces_attr case.pc_rhs with\n        | None, _ -> case.pc_rhs.pexp_loc.loc_end\n        | Some ({loc}, _), _ -> loc.Location.loc_end);\n    }\n  | CoreType ct -> ct.ptyp_loc\n  | ExprArgument expr -> (\n    match expr.Parsetree.pexp_attributes with\n    | ({Location.txt = \"res.namedArgLoc\"; loc}, _) :: _attrs ->\n      {loc with loc_end = expr.pexp_loc.loc_end}\n    | _ -> expr.pexp_loc)\n  | Expression e -> (\n    match e.pexp_attributes with\n    | ({txt = \"res.braces\" | \"ns.braces\"; loc}, _) :: _ -> loc\n    | _ -> e.pexp_loc)\n  | ExprRecordRow (li, e) -> {li.loc with loc_end = e.pexp_loc.loc_end}\n  | ExtensionConstructor ec -> ec.pext_loc\n  | LabelDeclaration ld -> ld.pld_loc\n  | ModuleBinding mb -> mb.pmb_loc\n  | ModuleDeclaration md -> md.pmd_loc\n  | ModuleExpr me -> me.pmod_loc\n  | ObjectField field -> (\n    match field with\n    | Parsetree.Otag (lbl, _, typ) ->\n      {lbl.loc with loc_end = typ.ptyp_loc.loc_end}\n    | _ -> Location.none)\n  | PackageConstraint (li, te) -> {li.loc with loc_end = te.ptyp_loc.loc_end}\n  | Pattern p -> p.ppat_loc\n  | PatternRecordRow (li, p) -> {li.loc with loc_end = p.ppat_loc.loc_end}\n  | RowField rf -> (\n    match rf with\n    | Parsetree.Rtag ({loc}, _, _, _) -> loc\n    | Rinherit {ptyp_loc} -> ptyp_loc)\n  | SignatureItem si -> si.psig_loc\n  | StructureItem si -> si.pstr_loc\n  | TypeDeclaration td -> td.ptype_loc\n  | ValueBinding vb -> vb.pvb_loc\n\nlet rec walk_structure s t comments =\n  match s with\n  | _ when comments = [] -> ()\n  | [] -> attach t.inside Location.none comments\n  | s -> walk_list (s |> List.map (fun si -> StructureItem si)) t comments\n\nand walk_structure_item si t comments =\n  match si.Parsetree.pstr_desc with\n  | _ when comments = [] -> ()\n  | Pstr_primitive value_description ->\n    walk_value_description value_description t comments\n  | Pstr_open open_description ->\n    walk_open_description open_description t comments\n  | Pstr_value (_, value_bindings) ->\n    walk_value_bindings value_bindings t comments\n  | Pstr_type (_, type_declarations) ->\n    walk_type_declarations type_declarations t comments\n  | Pstr_eval (expr, _) -> walk_expression expr t comments\n  | Pstr_module module_binding -> walk_module_binding module_binding t comments\n  | Pstr_recmodule module_bindings ->\n    walk_list\n      (module_bindings |> List.map (fun mb -> ModuleBinding mb))\n      t comments\n  | Pstr_modtype mod_typ_decl ->\n    walk_module_type_declaration mod_typ_decl t comments\n  | Pstr_attribute attribute -> walk_attribute attribute t comments\n  | Pstr_extension (extension, _) -> walk_extension extension t comments\n  | Pstr_include include_declaration ->\n    walk_include_declaration include_declaration t comments\n  | Pstr_exception extension_constructor ->\n    walk_extension_constructor extension_constructor t comments\n  | Pstr_typext type_extension -> walk_type_extension type_extension t comments\n  | Pstr_class_type _ | Pstr_class _ -> ()\n\nand walk_value_description vd t comments =\n  let leading, trailing =\n    partition_leading_trailing comments vd.pval_name.loc\n  in\n  attach t.leading vd.pval_name.loc leading;\n  let after_name, rest =\n    partition_adjacent_trailing vd.pval_name.loc trailing\n  in\n  attach t.trailing vd.pval_name.loc after_name;\n  let before, inside, after = partition_by_loc rest vd.pval_type.ptyp_loc in\n  attach t.leading vd.pval_type.ptyp_loc before;\n  walk_core_type vd.pval_type t inside;\n  attach t.trailing vd.pval_type.ptyp_loc after\n\nand walk_type_extension te t comments =\n  let leading, trailing =\n    partition_leading_trailing comments te.ptyext_path.loc\n  in\n  attach t.leading te.ptyext_path.loc leading;\n  let after_path, rest =\n    partition_adjacent_trailing te.ptyext_path.loc trailing\n  in\n  attach t.trailing te.ptyext_path.loc after_path;\n\n  (* type params *)\n  let rest =\n    match te.ptyext_params with\n    | [] -> rest\n    | type_params ->\n      visit_list_but_continue_with_remaining_comments\n        ~get_loc:(fun (typexpr, _variance) -> typexpr.Parsetree.ptyp_loc)\n        ~walk_node:walk_type_param ~newline_delimited:false type_params t rest\n  in\n  walk_list\n    (te.ptyext_constructors |> List.map (fun ec -> ExtensionConstructor ec))\n    t rest\n\nand walk_include_declaration incl_decl t comments =\n  let before, inside, after =\n    partition_by_loc comments incl_decl.pincl_mod.pmod_loc\n  in\n  attach t.leading incl_decl.pincl_mod.pmod_loc before;\n  walk_module_expr incl_decl.pincl_mod t inside;\n  attach t.trailing incl_decl.pincl_mod.pmod_loc after\n\nand walk_module_type_declaration mtd t comments =\n  let leading, trailing =\n    partition_leading_trailing comments mtd.pmtd_name.loc\n  in\n  attach t.leading mtd.pmtd_name.loc leading;\n  match mtd.pmtd_type with\n  | None -> attach t.trailing mtd.pmtd_name.loc trailing\n  | Some mod_type ->\n    let after_name, rest =\n      partition_adjacent_trailing mtd.pmtd_name.loc trailing\n    in\n    attach t.trailing mtd.pmtd_name.loc after_name;\n    let before, inside, after = partition_by_loc rest mod_type.pmty_loc in\n    attach t.leading mod_type.pmty_loc before;\n    walk_mod_type mod_type t inside;\n    attach t.trailing mod_type.pmty_loc after\n\nand walk_module_binding mb t comments =\n  let leading, trailing = partition_leading_trailing comments mb.pmb_name.loc in\n  attach t.leading mb.pmb_name.loc leading;\n  let after_name, rest = partition_adjacent_trailing mb.pmb_name.loc trailing in\n  attach t.trailing mb.pmb_name.loc after_name;\n  let leading, inside, trailing = partition_by_loc rest mb.pmb_expr.pmod_loc in\n  (match mb.pmb_expr.pmod_desc with\n  | Pmod_constraint _ ->\n    walk_module_expr mb.pmb_expr t (List.concat [leading; inside])\n  | _ ->\n    attach t.leading mb.pmb_expr.pmod_loc leading;\n    walk_module_expr mb.pmb_expr t inside);\n  attach t.trailing mb.pmb_expr.pmod_loc trailing\n\nand walk_signature signature t comments =\n  match signature with\n  | _ when comments = [] -> ()\n  | [] -> attach t.inside Location.none comments\n  | _s ->\n    walk_list (signature |> List.map (fun si -> SignatureItem si)) t comments\n\nand walk_signature_item (si : Parsetree.signature_item) t comments =\n  match si.psig_desc with\n  | _ when comments = [] -> ()\n  | Psig_value value_description ->\n    walk_value_description value_description t comments\n  | Psig_type (_, type_declarations) ->\n    walk_type_declarations type_declarations t comments\n  | Psig_typext type_extension -> walk_type_extension type_extension t comments\n  | Psig_exception extension_constructor ->\n    walk_extension_constructor extension_constructor t comments\n  | Psig_module module_declaration ->\n    walk_module_declaration module_declaration t comments\n  | Psig_recmodule module_declarations ->\n    walk_list\n      (module_declarations |> List.map (fun md -> ModuleDeclaration md))\n      t comments\n  | Psig_modtype module_type_declaration ->\n    walk_module_type_declaration module_type_declaration t comments\n  | Psig_open open_description ->\n    walk_open_description open_description t comments\n  | Psig_include include_description ->\n    walk_include_description include_description t comments\n  | Psig_attribute attribute -> walk_attribute attribute t comments\n  | Psig_extension (extension, _) -> walk_extension extension t comments\n  | Psig_class _ | Psig_class_type _ -> ()\n\nand walk_include_description id t comments =\n  let before, inside, after = partition_by_loc comments id.pincl_mod.pmty_loc in\n  attach t.leading id.pincl_mod.pmty_loc before;\n  walk_mod_type id.pincl_mod t inside;\n  attach t.trailing id.pincl_mod.pmty_loc after\n\nand walk_module_declaration md t comments =\n  let leading, trailing = partition_leading_trailing comments md.pmd_name.loc in\n  attach t.leading md.pmd_name.loc leading;\n  let after_name, rest = partition_adjacent_trailing md.pmd_name.loc trailing in\n  attach t.trailing md.pmd_name.loc after_name;\n  let leading, inside, trailing = partition_by_loc rest md.pmd_type.pmty_loc in\n  attach t.leading md.pmd_type.pmty_loc leading;\n  walk_mod_type md.pmd_type t inside;\n  attach t.trailing md.pmd_type.pmty_loc trailing\n\nand walk_node node tbl comments =\n  match node with\n  | Case c -> walk_case c tbl comments\n  | CoreType ct -> walk_core_type ct tbl comments\n  | ExprArgument ea -> walk_expr_argument ea tbl comments\n  | Expression e -> walk_expression e tbl comments\n  | ExprRecordRow (ri, e) -> walk_expr_record_row (ri, e) tbl comments\n  | ExtensionConstructor ec -> walk_extension_constructor ec tbl comments\n  | LabelDeclaration ld -> walk_label_declaration ld tbl comments\n  | ModuleBinding mb -> walk_module_binding mb tbl comments\n  | ModuleDeclaration md -> walk_module_declaration md tbl comments\n  | ModuleExpr me -> walk_module_expr me tbl comments\n  | ObjectField f -> walk_object_field f tbl comments\n  | PackageConstraint (li, te) -> walk_package_constraint (li, te) tbl comments\n  | Pattern p -> walk_pattern p tbl comments\n  | PatternRecordRow (li, p) -> walk_pattern_record_row (li, p) tbl comments\n  | RowField rf -> walk_row_field rf tbl comments\n  | SignatureItem si -> walk_signature_item si tbl comments\n  | StructureItem si -> walk_structure_item si tbl comments\n  | TypeDeclaration td -> walk_type_declaration td tbl comments\n  | ValueBinding vb -> walk_value_binding vb tbl comments\n\nand walk_list : ?prev_loc:Location.t -> node list -> t -> Comment.t list -> unit\n    =\n fun ?prev_loc l t comments ->\n  match l with\n  | _ when comments = [] -> ()\n  | [] -> (\n    match prev_loc with\n    | Some loc -> attach t.trailing loc comments\n    | None -> ())\n  | node :: rest ->\n    let curr_loc = get_loc node in\n    let leading, inside, trailing = partition_by_loc comments curr_loc in\n    (match prev_loc with\n    | None ->\n      (* first node, all leading comments attach here *)\n      attach t.leading curr_loc leading\n    | Some prev_loc ->\n      (* Same line *)\n      if prev_loc.loc_end.pos_lnum == curr_loc.loc_start.pos_lnum then (\n        let after_prev, before_curr =\n          partition_adjacent_trailing prev_loc leading\n        in\n        attach t.trailing prev_loc after_prev;\n        attach t.leading curr_loc before_curr)\n      else\n        let on_same_line_as_prev, after_prev =\n          partition_by_on_same_line prev_loc leading\n        in\n        attach t.trailing prev_loc on_same_line_as_prev;\n        let leading, _inside, _trailing =\n          partition_by_loc after_prev curr_loc\n        in\n        attach t.leading curr_loc leading);\n    walk_node node t inside;\n    walk_list ~prev_loc:curr_loc rest t trailing\n\n(* The parsetree doesn't always contain location info about the opening or\n * closing token of a \"list-of-things\". This routine visits the whole list,\n * but returns any remaining comments that likely fall after the whole list. *)\nand visit_list_but_continue_with_remaining_comments :\n    'node.\n    ?prev_loc:Location.t ->\n    newline_delimited:bool ->\n    get_loc:('node -> Location.t) ->\n    walk_node:('node -> t -> Comment.t list -> unit) ->\n    'node list ->\n    t ->\n    Comment.t list ->\n    Comment.t list =\n fun ?prev_loc ~newline_delimited ~get_loc ~walk_node l t comments ->\n  let open Location in\n  match l with\n  | _ when comments = [] -> []\n  | [] -> (\n    match prev_loc with\n    | Some loc ->\n      let after_prev, rest =\n        if newline_delimited then partition_by_on_same_line loc comments\n        else partition_adjacent_trailing loc comments\n      in\n      attach t.trailing loc after_prev;\n      rest\n    | None -> comments)\n  | node :: rest ->\n    let curr_loc = get_loc node in\n    let leading, inside, trailing = partition_by_loc comments curr_loc in\n    let () =\n      match prev_loc with\n      | None ->\n        (* first node, all leading comments attach here *)\n        attach t.leading curr_loc leading;\n        ()\n      | Some prev_loc ->\n        (* Same line *)\n        if prev_loc.loc_end.pos_lnum == curr_loc.loc_start.pos_lnum then\n          let after_prev, before_curr =\n            partition_adjacent_trailing prev_loc leading\n          in\n          let () = attach t.trailing prev_loc after_prev in\n          let () = attach t.leading curr_loc before_curr in\n          ()\n        else\n          let on_same_line_as_prev, after_prev =\n            partition_by_on_same_line prev_loc leading\n          in\n          let () = attach t.trailing prev_loc on_same_line_as_prev in\n          let leading, _inside, _trailing =\n            partition_by_loc after_prev curr_loc\n          in\n          let () = attach t.leading curr_loc leading in\n          ()\n    in\n    walk_node node t inside;\n    visit_list_but_continue_with_remaining_comments ~prev_loc:curr_loc ~get_loc\n      ~walk_node ~newline_delimited rest t trailing\n\nand walk_value_bindings vbs t comments =\n  walk_list (vbs |> List.map (fun vb -> ValueBinding vb)) t comments\n\nand walk_open_description open_description t comments =\n  let loc = open_description.popen_lid.loc in\n  let leading, trailing = partition_leading_trailing comments loc in\n  attach t.leading loc leading;\n  attach t.trailing loc trailing\n\nand walk_type_declarations type_declarations t comments =\n  walk_list\n    (type_declarations |> List.map (fun td -> TypeDeclaration td))\n    t comments\n\nand walk_type_param (typexpr, _variance) t comments =\n  walk_core_type typexpr t comments\n\nand walk_type_declaration (td : Parsetree.type_declaration) t comments =\n  let before_name, rest =\n    partition_leading_trailing comments td.ptype_name.loc\n  in\n  attach t.leading td.ptype_name.loc before_name;\n\n  let after_name, rest = partition_adjacent_trailing td.ptype_name.loc rest in\n  attach t.trailing td.ptype_name.loc after_name;\n\n  (* type params *)\n  let rest =\n    match td.ptype_params with\n    | [] -> rest\n    | type_params ->\n      visit_list_but_continue_with_remaining_comments\n        ~get_loc:(fun (typexpr, _variance) -> typexpr.Parsetree.ptyp_loc)\n        ~walk_node:walk_type_param ~newline_delimited:false type_params t rest\n  in\n\n  (* manifest:  = typexpr *)\n  let rest =\n    match td.ptype_manifest with\n    | Some typexpr ->\n      let before_typ, inside_typ, after_typ =\n        partition_by_loc rest typexpr.ptyp_loc\n      in\n      attach t.leading typexpr.ptyp_loc before_typ;\n      walk_core_type typexpr t inside_typ;\n      let after_typ, rest =\n        partition_adjacent_trailing typexpr.Parsetree.ptyp_loc after_typ\n      in\n      attach t.trailing typexpr.ptyp_loc after_typ;\n      rest\n    | None -> rest\n  in\n\n  let rest =\n    match td.ptype_kind with\n    | Ptype_abstract | Ptype_open -> rest\n    | Ptype_record label_declarations ->\n      let () =\n        if label_declarations = [] then attach t.inside td.ptype_loc rest\n        else\n          walk_list\n            (label_declarations |> List.map (fun ld -> LabelDeclaration ld))\n            t rest\n      in\n      []\n    | Ptype_variant constructor_declarations ->\n      walk_constructor_declarations constructor_declarations t rest\n  in\n  attach t.trailing td.ptype_loc rest\n\nand walk_label_declarations lds t comments =\n  visit_list_but_continue_with_remaining_comments\n    ~get_loc:(fun ld -> ld.Parsetree.pld_loc)\n    ~walk_node:walk_label_declaration ~newline_delimited:false lds t comments\n\nand walk_label_declaration ld t comments =\n  let before_name, rest = partition_leading_trailing comments ld.pld_name.loc in\n  attach t.leading ld.pld_name.loc before_name;\n  let after_name, rest = partition_adjacent_trailing ld.pld_name.loc rest in\n  attach t.trailing ld.pld_name.loc after_name;\n  let before_typ, inside_typ, after_typ =\n    partition_by_loc rest ld.pld_type.ptyp_loc\n  in\n  attach t.leading ld.pld_type.ptyp_loc before_typ;\n  walk_core_type ld.pld_type t inside_typ;\n  attach t.trailing ld.pld_type.ptyp_loc after_typ\n\nand walk_constructor_declarations cds t comments =\n  visit_list_but_continue_with_remaining_comments\n    ~get_loc:(fun cd -> cd.Parsetree.pcd_loc)\n    ~walk_node:walk_constructor_declaration ~newline_delimited:false cds t\n    comments\n\nand walk_constructor_declaration cd t comments =\n  let before_name, rest = partition_leading_trailing comments cd.pcd_name.loc in\n  attach t.leading cd.pcd_name.loc before_name;\n  let after_name, rest = partition_adjacent_trailing cd.pcd_name.loc rest in\n  attach t.trailing cd.pcd_name.loc after_name;\n  let rest = walk_constructor_arguments cd.pcd_args t rest in\n\n  let rest =\n    match cd.pcd_res with\n    | Some typexpr ->\n      let before_typ, inside_typ, after_typ =\n        partition_by_loc rest typexpr.ptyp_loc\n      in\n      attach t.leading typexpr.ptyp_loc before_typ;\n      walk_core_type typexpr t inside_typ;\n      let after_typ, rest =\n        partition_adjacent_trailing typexpr.Parsetree.ptyp_loc after_typ\n      in\n      attach t.trailing typexpr.ptyp_loc after_typ;\n      rest\n    | None -> rest\n  in\n  attach t.trailing cd.pcd_loc rest\n\nand walk_constructor_arguments args t comments =\n  match args with\n  | Pcstr_tuple typexprs ->\n    visit_list_but_continue_with_remaining_comments\n      ~get_loc:(fun n -> n.Parsetree.ptyp_loc)\n      ~walk_node:walk_core_type ~newline_delimited:false typexprs t comments\n  | Pcstr_record label_declarations ->\n    walk_label_declarations label_declarations t comments\n\nand walk_value_binding vb t comments =\n  let open Location in\n  let vb =\n    let open Parsetree in\n    match (vb.pvb_pat, vb.pvb_expr) with\n    | ( {ppat_desc = Ppat_constraint (pat, {ptyp_desc = Ptyp_poly ([], t)})},\n        {pexp_desc = Pexp_constraint (expr, _typ)} ) ->\n      {\n        vb with\n        pvb_pat =\n          Ast_helper.Pat.constraint_\n            ~loc:{pat.ppat_loc with loc_end = t.Parsetree.ptyp_loc.loc_end}\n            pat t;\n        pvb_expr = expr;\n      }\n    | ( {ppat_desc = Ppat_constraint (pat, {ptyp_desc = Ptyp_poly (_ :: _, t)})},\n        {pexp_desc = Pexp_fun _} ) ->\n      {\n        vb with\n        pvb_pat =\n          {\n            vb.pvb_pat with\n            ppat_loc = {pat.ppat_loc with loc_end = t.ptyp_loc.loc_end};\n          };\n      }\n    | ( ({\n           ppat_desc =\n             Ppat_constraint (pat, ({ptyp_desc = Ptyp_poly (_ :: _, t)} as typ));\n         } as constrained_pattern),\n        {pexp_desc = Pexp_newtype (_, {pexp_desc = Pexp_constraint (expr, _)})}\n      ) ->\n      (*\n       * The location of the Ptyp_poly on the pattern is the whole thing.\n       * let x:\n       *   type t. (int, int) => int =\n       *   (a, b) => {\n       *     // comment\n       *     a + b\n       *   }\n       *)\n      {\n        vb with\n        pvb_pat =\n          {\n            constrained_pattern with\n            ppat_desc = Ppat_constraint (pat, typ);\n            ppat_loc =\n              {constrained_pattern.ppat_loc with loc_end = t.ptyp_loc.loc_end};\n          };\n        pvb_expr = expr;\n      }\n    | _ -> vb\n  in\n  let pattern_loc = vb.Parsetree.pvb_pat.ppat_loc in\n  let expr_loc = vb.Parsetree.pvb_expr.pexp_loc in\n  let expr = vb.pvb_expr in\n\n  let leading, inside, trailing = partition_by_loc comments pattern_loc in\n\n  (* everything before start of pattern can only be leading on the pattern:\n   *   let |* before *| a = 1 *)\n  attach t.leading pattern_loc leading;\n  walk_pattern vb.Parsetree.pvb_pat t inside;\n  let after_pat, surrounding_expr =\n    partition_adjacent_trailing pattern_loc trailing\n  in\n  attach t.trailing pattern_loc after_pat;\n  let before_expr, inside_expr, after_expr =\n    partition_by_loc surrounding_expr expr_loc\n  in\n  if is_block_expr expr then\n    walk_expression expr t (List.concat [before_expr; inside_expr; after_expr])\n  else (\n    attach t.leading expr_loc before_expr;\n    walk_expression expr t inside_expr;\n    attach t.trailing expr_loc after_expr)\n\nand walk_expression expr t comments =\n  let open Location in\n  match expr.Parsetree.pexp_desc with\n  | _ when comments = [] -> ()\n  | Pexp_constant _ ->\n    let leading, trailing = partition_leading_trailing comments expr.pexp_loc in\n    attach t.leading expr.pexp_loc leading;\n    attach t.trailing expr.pexp_loc trailing\n  | Pexp_ident longident ->\n    let leading, trailing = partition_leading_trailing comments longident.loc in\n    attach t.leading longident.loc leading;\n    attach t.trailing longident.loc trailing\n  | Pexp_let\n      ( _recFlag,\n        value_bindings,\n        {pexp_desc = Pexp_construct ({txt = Longident.Lident \"()\"}, None)} ) ->\n    walk_value_bindings value_bindings t comments\n  | Pexp_let (_recFlag, value_bindings, expr2) ->\n    let comments =\n      visit_list_but_continue_with_remaining_comments\n        ~get_loc:(fun n ->\n          if n.Parsetree.pvb_pat.ppat_loc.loc_ghost then n.pvb_expr.pexp_loc\n          else n.Parsetree.pvb_loc)\n        ~walk_node:walk_value_binding ~newline_delimited:true value_bindings t\n        comments\n    in\n    if is_block_expr expr2 then walk_expression expr2 t comments\n    else\n      let leading, inside, trailing =\n        partition_by_loc comments expr2.pexp_loc\n      in\n      attach t.leading expr2.pexp_loc leading;\n      walk_expression expr2 t inside;\n      attach t.trailing expr2.pexp_loc trailing\n  | Pexp_sequence (expr1, expr2) ->\n    let leading, inside, trailing = partition_by_loc comments expr1.pexp_loc in\n    let comments =\n      if is_block_expr expr1 then (\n        let after_expr, comments =\n          partition_by_on_same_line expr1.pexp_loc trailing\n        in\n        walk_expression expr1 t (List.concat [leading; inside; after_expr]);\n        comments)\n      else (\n        attach t.leading expr1.pexp_loc leading;\n        walk_expression expr1 t inside;\n        let after_expr, comments =\n          partition_by_on_same_line expr1.pexp_loc trailing\n        in\n        attach t.trailing expr1.pexp_loc after_expr;\n        comments)\n    in\n    if is_block_expr expr2 then walk_expression expr2 t comments\n    else\n      let leading, inside, trailing =\n        partition_by_loc comments expr2.pexp_loc\n      in\n      attach t.leading expr2.pexp_loc leading;\n      walk_expression expr2 t inside;\n      attach t.trailing expr2.pexp_loc trailing\n  | Pexp_open (_override, longident, expr2) ->\n    let leading, comments = partition_leading_trailing comments expr.pexp_loc in\n    attach t.leading\n      {expr.pexp_loc with loc_end = longident.loc.loc_end}\n      leading;\n    let leading, trailing = partition_leading_trailing comments longident.loc in\n    attach t.leading longident.loc leading;\n    let after_longident, rest =\n      partition_by_on_same_line longident.loc trailing\n    in\n    attach t.trailing longident.loc after_longident;\n    if is_block_expr expr2 then walk_expression expr2 t rest\n    else\n      let leading, inside, trailing = partition_by_loc rest expr2.pexp_loc in\n      attach t.leading expr2.pexp_loc leading;\n      walk_expression expr2 t inside;\n      attach t.trailing expr2.pexp_loc trailing\n  | Pexp_extension\n      ( {txt = \"obj\"},\n        PStr [{pstr_desc = Pstr_eval ({pexp_desc = Pexp_record (rows, _)}, [])}]\n      ) ->\n    walk_list\n      (rows |> List.map (fun (li, e) -> ExprRecordRow (li, e)))\n      t comments\n  | Pexp_extension extension -> walk_extension extension t comments\n  | Pexp_letexception (extension_constructor, expr2) ->\n    let leading, comments = partition_leading_trailing comments expr.pexp_loc in\n    attach t.leading\n      {expr.pexp_loc with loc_end = extension_constructor.pext_loc.loc_end}\n      leading;\n    let leading, inside, trailing =\n      partition_by_loc comments extension_constructor.pext_loc\n    in\n    attach t.leading extension_constructor.pext_loc leading;\n    walk_extension_constructor extension_constructor t inside;\n    let after_ext_constr, rest =\n      partition_by_on_same_line extension_constructor.pext_loc trailing\n    in\n    attach t.trailing extension_constructor.pext_loc after_ext_constr;\n    if is_block_expr expr2 then walk_expression expr2 t rest\n    else\n      let leading, inside, trailing = partition_by_loc rest expr2.pexp_loc in\n      attach t.leading expr2.pexp_loc leading;\n      walk_expression expr2 t inside;\n      attach t.trailing expr2.pexp_loc trailing\n  | Pexp_letmodule (string_loc, mod_expr, expr2) ->\n    let leading, comments = partition_leading_trailing comments expr.pexp_loc in\n    attach t.leading\n      {expr.pexp_loc with loc_end = mod_expr.pmod_loc.loc_end}\n      leading;\n    let leading, trailing =\n      partition_leading_trailing comments string_loc.loc\n    in\n    attach t.leading string_loc.loc leading;\n    let after_string, rest =\n      partition_adjacent_trailing string_loc.loc trailing\n    in\n    attach t.trailing string_loc.loc after_string;\n    let before_mod_expr, inside_mod_expr, after_mod_expr =\n      partition_by_loc rest mod_expr.pmod_loc\n    in\n    attach t.leading mod_expr.pmod_loc before_mod_expr;\n    walk_module_expr mod_expr t inside_mod_expr;\n    let after_mod_expr, rest =\n      partition_by_on_same_line mod_expr.pmod_loc after_mod_expr\n    in\n    attach t.trailing mod_expr.pmod_loc after_mod_expr;\n    if is_block_expr expr2 then walk_expression expr2 t rest\n    else\n      let leading, inside, trailing = partition_by_loc rest expr2.pexp_loc in\n      attach t.leading expr2.pexp_loc leading;\n      walk_expression expr2 t inside;\n      attach t.trailing expr2.pexp_loc trailing\n  | Pexp_assert expr | Pexp_lazy expr ->\n    if is_block_expr expr then walk_expression expr t comments\n    else\n      let leading, inside, trailing = partition_by_loc comments expr.pexp_loc in\n      attach t.leading expr.pexp_loc leading;\n      walk_expression expr t inside;\n      attach t.trailing expr.pexp_loc trailing\n  | Pexp_coerce (expr, opt_typexpr, typexpr) ->\n    let leading, inside, trailing = partition_by_loc comments expr.pexp_loc in\n    attach t.leading expr.pexp_loc leading;\n    walk_expression expr t inside;\n    let after_expr, rest = partition_adjacent_trailing expr.pexp_loc trailing in\n    attach t.trailing expr.pexp_loc after_expr;\n    let rest =\n      match opt_typexpr with\n      | Some typexpr ->\n        let leading, inside, trailing =\n          partition_by_loc comments typexpr.ptyp_loc\n        in\n        attach t.leading typexpr.ptyp_loc leading;\n        walk_core_type typexpr t inside;\n        let after_typ, rest =\n          partition_adjacent_trailing typexpr.ptyp_loc trailing\n        in\n        attach t.trailing typexpr.ptyp_loc after_typ;\n        rest\n      | None -> rest\n    in\n    let leading, inside, trailing = partition_by_loc rest typexpr.ptyp_loc in\n    attach t.leading typexpr.ptyp_loc leading;\n    walk_core_type typexpr t inside;\n    attach t.trailing typexpr.ptyp_loc trailing\n  | Pexp_constraint (expr, typexpr) ->\n    let leading, inside, trailing = partition_by_loc comments expr.pexp_loc in\n    attach t.leading expr.pexp_loc leading;\n    walk_expression expr t inside;\n    let after_expr, rest = partition_adjacent_trailing expr.pexp_loc trailing in\n    attach t.trailing expr.pexp_loc after_expr;\n    let leading, inside, trailing = partition_by_loc rest typexpr.ptyp_loc in\n    attach t.leading typexpr.ptyp_loc leading;\n    walk_core_type typexpr t inside;\n    attach t.trailing typexpr.ptyp_loc trailing\n  | Pexp_tuple []\n  | Pexp_array []\n  | Pexp_construct ({txt = Longident.Lident \"[]\"}, _) ->\n    attach t.inside expr.pexp_loc comments\n  | Pexp_construct ({txt = Longident.Lident \"::\"}, _) ->\n    walk_list\n      (collect_list_exprs [] expr |> List.map (fun e -> Expression e))\n      t comments\n  | Pexp_construct (longident, args) -> (\n    let leading, trailing = partition_leading_trailing comments longident.loc in\n    attach t.leading longident.loc leading;\n    match args with\n    | Some expr ->\n      let after_longident, rest =\n        partition_adjacent_trailing longident.loc trailing\n      in\n      attach t.trailing longident.loc after_longident;\n      walk_expression expr t rest\n    | None -> attach t.trailing longident.loc trailing)\n  | Pexp_variant (_label, None) -> ()\n  | Pexp_variant (_label, Some expr) -> walk_expression expr t comments\n  | Pexp_array exprs | Pexp_tuple exprs ->\n    walk_list (exprs |> List.map (fun e -> Expression e)) t comments\n  | Pexp_record (rows, spread_expr) ->\n    if rows = [] then attach t.inside expr.pexp_loc comments\n    else\n      let comments =\n        match spread_expr with\n        | None -> comments\n        | Some expr ->\n          let leading, inside, trailing =\n            partition_by_loc comments expr.pexp_loc\n          in\n          attach t.leading expr.pexp_loc leading;\n          walk_expression expr t inside;\n          let after_expr, rest =\n            partition_adjacent_trailing expr.pexp_loc trailing\n          in\n          attach t.trailing expr.pexp_loc after_expr;\n          rest\n      in\n      walk_list\n        (rows |> List.map (fun (li, e) -> ExprRecordRow (li, e)))\n        t comments\n  | Pexp_field (expr, longident) ->\n    let leading, inside, trailing = partition_by_loc comments expr.pexp_loc in\n    let trailing =\n      if is_block_expr expr then (\n        let after_expr, rest =\n          partition_adjacent_trailing expr.pexp_loc trailing\n        in\n        walk_expression expr t (List.concat [leading; inside; after_expr]);\n        rest)\n      else (\n        attach t.leading expr.pexp_loc leading;\n        walk_expression expr t inside;\n        trailing)\n    in\n    let after_expr, rest = partition_adjacent_trailing expr.pexp_loc trailing in\n    attach t.trailing expr.pexp_loc after_expr;\n    let leading, trailing = partition_leading_trailing rest longident.loc in\n    attach t.leading longident.loc leading;\n    attach t.trailing longident.loc trailing\n  | Pexp_setfield (expr1, longident, expr2) ->\n    let leading, inside, trailing = partition_by_loc comments expr1.pexp_loc in\n    let rest =\n      if is_block_expr expr1 then (\n        let after_expr, rest =\n          partition_adjacent_trailing expr1.pexp_loc trailing\n        in\n        walk_expression expr1 t (List.concat [leading; inside; after_expr]);\n        rest)\n      else\n        let after_expr, rest =\n          partition_adjacent_trailing expr1.pexp_loc trailing\n        in\n        attach t.leading expr1.pexp_loc leading;\n        walk_expression expr1 t inside;\n        attach t.trailing expr1.pexp_loc after_expr;\n        rest\n    in\n    let before_longident, after_longident =\n      partition_leading_trailing rest longident.loc\n    in\n    attach t.leading longident.loc before_longident;\n    let after_longident, rest =\n      partition_adjacent_trailing longident.loc after_longident\n    in\n    attach t.trailing longident.loc after_longident;\n    if is_block_expr expr2 then walk_expression expr2 t rest\n    else\n      let leading, inside, trailing = partition_by_loc rest expr2.pexp_loc in\n      attach t.leading expr2.pexp_loc leading;\n      walk_expression expr2 t inside;\n      attach t.trailing expr2.pexp_loc trailing\n  | Pexp_ifthenelse (if_expr, then_expr, else_expr) -> (\n    let leading, rest = partition_leading_trailing comments expr.pexp_loc in\n    attach t.leading expr.pexp_loc leading;\n    let leading, inside, trailing = partition_by_loc rest if_expr.pexp_loc in\n    let comments =\n      if is_block_expr if_expr then (\n        let after_expr, comments =\n          partition_adjacent_trailing if_expr.pexp_loc trailing\n        in\n        walk_expression if_expr t (List.concat [leading; inside; after_expr]);\n        comments)\n      else (\n        attach t.leading if_expr.pexp_loc leading;\n        walk_expression if_expr t inside;\n        let after_expr, comments =\n          partition_adjacent_trailing if_expr.pexp_loc trailing\n        in\n        attach t.trailing if_expr.pexp_loc after_expr;\n        comments)\n    in\n    let leading, inside, trailing =\n      partition_by_loc comments then_expr.pexp_loc\n    in\n    let comments =\n      if is_block_expr then_expr then (\n        let after_expr, trailing =\n          partition_adjacent_trailing then_expr.pexp_loc trailing\n        in\n        walk_expression then_expr t (List.concat [leading; inside; after_expr]);\n        trailing)\n      else (\n        attach t.leading then_expr.pexp_loc leading;\n        walk_expression then_expr t inside;\n        let after_expr, comments =\n          partition_adjacent_trailing then_expr.pexp_loc trailing\n        in\n        attach t.trailing then_expr.pexp_loc after_expr;\n        comments)\n    in\n    match else_expr with\n    | None -> ()\n    | Some expr ->\n      if is_block_expr expr || is_if_then_else_expr expr then\n        walk_expression expr t comments\n      else\n        let leading, inside, trailing =\n          partition_by_loc comments expr.pexp_loc\n        in\n        attach t.leading expr.pexp_loc leading;\n        walk_expression expr t inside;\n        attach t.trailing expr.pexp_loc trailing)\n  | Pexp_while (expr1, expr2) ->\n    let leading, inside, trailing = partition_by_loc comments expr1.pexp_loc in\n    let rest =\n      if is_block_expr expr1 then (\n        let after_expr, rest =\n          partition_adjacent_trailing expr1.pexp_loc trailing\n        in\n        walk_expression expr1 t (List.concat [leading; inside; after_expr]);\n        rest)\n      else (\n        attach t.leading expr1.pexp_loc leading;\n        walk_expression expr1 t inside;\n        let after_expr, rest =\n          partition_adjacent_trailing expr1.pexp_loc trailing\n        in\n        attach t.trailing expr1.pexp_loc after_expr;\n        rest)\n    in\n    if is_block_expr expr2 then walk_expression expr2 t rest\n    else\n      let leading, inside, trailing = partition_by_loc rest expr2.pexp_loc in\n      attach t.leading expr2.pexp_loc leading;\n      walk_expression expr2 t inside;\n      attach t.trailing expr2.pexp_loc trailing\n  | Pexp_for (pat, expr1, expr2, _, expr3) ->\n    let leading, inside, trailing = partition_by_loc comments pat.ppat_loc in\n    attach t.leading pat.ppat_loc leading;\n    walk_pattern pat t inside;\n    let after_pat, rest = partition_adjacent_trailing pat.ppat_loc trailing in\n    attach t.trailing pat.ppat_loc after_pat;\n    let leading, inside, trailing = partition_by_loc rest expr1.pexp_loc in\n    attach t.leading expr1.pexp_loc leading;\n    walk_expression expr1 t inside;\n    let after_expr, rest =\n      partition_adjacent_trailing expr1.pexp_loc trailing\n    in\n    attach t.trailing expr1.pexp_loc after_expr;\n    let leading, inside, trailing = partition_by_loc rest expr2.pexp_loc in\n    attach t.leading expr2.pexp_loc leading;\n    walk_expression expr2 t inside;\n    let after_expr, rest =\n      partition_adjacent_trailing expr2.pexp_loc trailing\n    in\n    attach t.trailing expr2.pexp_loc after_expr;\n    if is_block_expr expr3 then walk_expression expr3 t rest\n    else\n      let leading, inside, trailing = partition_by_loc rest expr3.pexp_loc in\n      attach t.leading expr3.pexp_loc leading;\n      walk_expression expr3 t inside;\n      attach t.trailing expr3.pexp_loc trailing\n  | Pexp_pack mod_expr ->\n    let before, inside, after = partition_by_loc comments mod_expr.pmod_loc in\n    attach t.leading mod_expr.pmod_loc before;\n    walk_module_expr mod_expr t inside;\n    attach t.trailing mod_expr.pmod_loc after\n  | Pexp_match (expr1, [case; else_branch])\n    when Res_parsetree_viewer.has_if_let_attribute expr.pexp_attributes ->\n    let before, inside, after =\n      partition_by_loc comments case.pc_lhs.ppat_loc\n    in\n    attach t.leading case.pc_lhs.ppat_loc before;\n    walk_pattern case.pc_lhs t inside;\n    let after_pat, rest =\n      partition_adjacent_trailing case.pc_lhs.ppat_loc after\n    in\n    attach t.trailing case.pc_lhs.ppat_loc after_pat;\n    let before, inside, after = partition_by_loc rest expr1.pexp_loc in\n    attach t.leading expr1.pexp_loc before;\n    walk_expression expr1 t inside;\n    let after_expr, rest = partition_adjacent_trailing expr1.pexp_loc after in\n    attach t.trailing expr1.pexp_loc after_expr;\n    let before, inside, after = partition_by_loc rest case.pc_rhs.pexp_loc in\n    let after =\n      if is_block_expr case.pc_rhs then (\n        let after_expr, rest =\n          partition_adjacent_trailing case.pc_rhs.pexp_loc after\n        in\n        walk_expression case.pc_rhs t (List.concat [before; inside; after_expr]);\n        rest)\n      else (\n        attach t.leading case.pc_rhs.pexp_loc before;\n        walk_expression case.pc_rhs t inside;\n        after)\n    in\n    let after_expr, rest =\n      partition_adjacent_trailing case.pc_rhs.pexp_loc after\n    in\n    attach t.trailing case.pc_rhs.pexp_loc after_expr;\n    let before, inside, after =\n      partition_by_loc rest else_branch.pc_rhs.pexp_loc\n    in\n    let after =\n      if is_block_expr else_branch.pc_rhs then (\n        let after_expr, rest =\n          partition_adjacent_trailing else_branch.pc_rhs.pexp_loc after\n        in\n        walk_expression else_branch.pc_rhs t\n          (List.concat [before; inside; after_expr]);\n        rest)\n      else (\n        attach t.leading else_branch.pc_rhs.pexp_loc before;\n        walk_expression else_branch.pc_rhs t inside;\n        after)\n    in\n    attach t.trailing else_branch.pc_rhs.pexp_loc after\n  | Pexp_match (expr, cases) | Pexp_try (expr, cases) ->\n    let before, inside, after = partition_by_loc comments expr.pexp_loc in\n    let after =\n      if is_block_expr expr then (\n        let after_expr, rest =\n          partition_adjacent_trailing expr.pexp_loc after\n        in\n        walk_expression expr t (List.concat [before; inside; after_expr]);\n        rest)\n      else (\n        attach t.leading expr.pexp_loc before;\n        walk_expression expr t inside;\n        after)\n    in\n    let after_expr, rest = partition_adjacent_trailing expr.pexp_loc after in\n    attach t.trailing expr.pexp_loc after_expr;\n    walk_list (cases |> List.map (fun case -> Case case)) t rest\n    (* unary expression: todo use parsetreeviewer *)\n  | Pexp_apply\n      ( {\n          pexp_desc =\n            Pexp_ident\n              {\n                txt =\n                  Longident.Lident (\"~+\" | \"~+.\" | \"~-\" | \"~-.\" | \"not\" | \"!\");\n              };\n        },\n        [(Nolabel, arg_expr)] ) ->\n    let before, inside, after = partition_by_loc comments arg_expr.pexp_loc in\n    attach t.leading arg_expr.pexp_loc before;\n    walk_expression arg_expr t inside;\n    attach t.trailing arg_expr.pexp_loc after\n  (* binary expression *)\n  | Pexp_apply\n      ( {\n          pexp_desc =\n            Pexp_ident\n              {\n                txt =\n                  Longident.Lident\n                    ( \":=\" | \"||\" | \"&&\" | \"=\" | \"==\" | \"<\" | \">\" | \"!=\" | \"!==\"\n                    | \"<=\" | \">=\" | \"|>\" | \"+\" | \"+.\" | \"-\" | \"-.\" | \"++\" | \"^\"\n                    | \"*\" | \"*.\" | \"/\" | \"/.\" | \"**\" | \"|.\" | \"|.u\" | \"<>\" );\n              };\n        },\n        [(Nolabel, operand1); (Nolabel, operand2)] ) ->\n    let before, inside, after = partition_by_loc comments operand1.pexp_loc in\n    attach t.leading operand1.pexp_loc before;\n    walk_expression operand1 t inside;\n    let after_operand1, rest =\n      partition_adjacent_trailing operand1.pexp_loc after\n    in\n    attach t.trailing operand1.pexp_loc after_operand1;\n    let before, inside, after = partition_by_loc rest operand2.pexp_loc in\n    attach t.leading operand2.pexp_loc before;\n    walk_expression operand2 t inside;\n    (* (List.concat [inside; after]); *)\n    attach t.trailing operand2.pexp_loc after\n  | Pexp_apply\n      ( {pexp_desc = Pexp_ident {txt = Longident.Ldot (Lident \"Array\", \"get\")}},\n        [(Nolabel, parent_expr); (Nolabel, member_expr)] ) ->\n    walk_list [Expression parent_expr; Expression member_expr] t comments\n  | Pexp_apply\n      ( {pexp_desc = Pexp_ident {txt = Longident.Ldot (Lident \"Array\", \"set\")}},\n        [(Nolabel, parent_expr); (Nolabel, member_expr); (Nolabel, target_expr)]\n      ) ->\n    walk_list\n      [Expression parent_expr; Expression member_expr; Expression target_expr]\n      t comments\n  | Pexp_apply (call_expr, arguments) ->\n    let before, inside, after = partition_by_loc comments call_expr.pexp_loc in\n    let after =\n      if is_block_expr call_expr then (\n        let after_expr, rest =\n          partition_adjacent_trailing call_expr.pexp_loc after\n        in\n        walk_expression call_expr t (List.concat [before; inside; after_expr]);\n        rest)\n      else (\n        attach t.leading call_expr.pexp_loc before;\n        walk_expression call_expr t inside;\n        after)\n    in\n    if ParsetreeViewer.is_jsx_expression expr then (\n      let props =\n        arguments\n        |> List.filter (fun (label, _) ->\n               match label with\n               | Asttypes.Labelled \"children\" -> false\n               | Asttypes.Nolabel -> false\n               | _ -> true)\n      in\n      let maybe_children =\n        arguments\n        |> List.find_opt (fun (label, _) ->\n               label = Asttypes.Labelled \"children\")\n      in\n      match maybe_children with\n      (* There is no need to deal with this situation as the children cannot be NONE *)\n      | None -> ()\n      | Some (_, children) ->\n        let leading, inside, _ = partition_by_loc after children.pexp_loc in\n        if props = [] then\n          (* All comments inside a tag are trailing comments of the tag if there are no props\n             <A\n             // comment\n             // comment\n             />\n          *)\n          let after_expr, _ =\n            partition_adjacent_trailing call_expr.pexp_loc after\n          in\n          attach t.trailing call_expr.pexp_loc after_expr\n        else\n          walk_list (props |> List.map (fun (_, e) -> ExprArgument e)) t leading;\n        walk_expression children t inside)\n    else\n      let after_expr, rest =\n        partition_adjacent_trailing call_expr.pexp_loc after\n      in\n      attach t.trailing call_expr.pexp_loc after_expr;\n      walk_list (arguments |> List.map (fun (_, e) -> ExprArgument e)) t rest\n  | Pexp_fun (_, _, _, _) | Pexp_newtype _ -> (\n    let _, parameters, return_expr = fun_expr expr in\n    let comments =\n      visit_list_but_continue_with_remaining_comments ~newline_delimited:false\n        ~walk_node:walk_expr_pararameter\n        ~get_loc:(fun (_attrs, _argLbl, expr_opt, pattern) ->\n          let open Parsetree in\n          let start_pos =\n            match pattern.ppat_attributes with\n            | ({Location.txt = \"res.namedArgLoc\"; loc}, _) :: _attrs ->\n              loc.loc_start\n            | _ -> pattern.ppat_loc.loc_start\n          in\n          match expr_opt with\n          | None -> {pattern.ppat_loc with loc_start = start_pos}\n          | Some expr ->\n            {\n              pattern.ppat_loc with\n              loc_start = start_pos;\n              loc_end = expr.pexp_loc.loc_end;\n            })\n        parameters t comments\n    in\n    match return_expr.pexp_desc with\n    | Pexp_constraint (expr, typ)\n      when expr.pexp_loc.loc_start.pos_cnum >= typ.ptyp_loc.loc_end.pos_cnum ->\n      let leading, inside, trailing = partition_by_loc comments typ.ptyp_loc in\n      attach t.leading typ.ptyp_loc leading;\n      walk_core_type typ t inside;\n      let after_typ, comments =\n        partition_adjacent_trailing typ.ptyp_loc trailing\n      in\n      attach t.trailing typ.ptyp_loc after_typ;\n      if is_block_expr expr then walk_expression expr t comments\n      else\n        let leading, inside, trailing =\n          partition_by_loc comments expr.pexp_loc\n        in\n        attach t.leading expr.pexp_loc leading;\n        walk_expression expr t inside;\n        attach t.trailing expr.pexp_loc trailing\n    | _ ->\n      if is_block_expr return_expr then walk_expression return_expr t comments\n      else\n        let leading, inside, trailing =\n          partition_by_loc comments return_expr.pexp_loc\n        in\n        attach t.leading return_expr.pexp_loc leading;\n        walk_expression return_expr t inside;\n        attach t.trailing return_expr.pexp_loc trailing)\n  | _ -> ()\n\nand walk_expr_pararameter (_attrs, _argLbl, expr_opt, pattern) t comments =\n  let leading, inside, trailing = partition_by_loc comments pattern.ppat_loc in\n  attach t.leading pattern.ppat_loc leading;\n  walk_pattern pattern t inside;\n  match expr_opt with\n  | Some expr ->\n    let _afterPat, rest =\n      partition_adjacent_trailing pattern.ppat_loc trailing\n    in\n    attach t.trailing pattern.ppat_loc trailing;\n    if is_block_expr expr then walk_expression expr t rest\n    else\n      let leading, inside, trailing = partition_by_loc rest expr.pexp_loc in\n      attach t.leading expr.pexp_loc leading;\n      walk_expression expr t inside;\n      attach t.trailing expr.pexp_loc trailing\n  | None -> attach t.trailing pattern.ppat_loc trailing\n\nand walk_expr_argument expr t comments =\n  match expr.Parsetree.pexp_attributes with\n  | ({Location.txt = \"res.namedArgLoc\"; loc}, _) :: _attrs ->\n    let leading, trailing = partition_leading_trailing comments loc in\n    attach t.leading loc leading;\n    let after_label, rest = partition_adjacent_trailing loc trailing in\n    attach t.trailing loc after_label;\n    let before, inside, after = partition_by_loc rest expr.pexp_loc in\n    attach t.leading expr.pexp_loc before;\n    walk_expression expr t inside;\n    attach t.trailing expr.pexp_loc after\n  | _ ->\n    let before, inside, after = partition_by_loc comments expr.pexp_loc in\n    attach t.leading expr.pexp_loc before;\n    walk_expression expr t inside;\n    attach t.trailing expr.pexp_loc after\n\nand walk_case (case : Parsetree.case) t comments =\n  let before, inside, after = partition_by_loc comments case.pc_lhs.ppat_loc in\n  (* cases don't have a location on their own, leading comments should go\n   * after the bar on the pattern *)\n  walk_pattern case.pc_lhs t (List.concat [before; inside]);\n  let after_pat, rest =\n    partition_adjacent_trailing case.pc_lhs.ppat_loc after\n  in\n  attach t.trailing case.pc_lhs.ppat_loc after_pat;\n  let comments =\n    match case.pc_guard with\n    | Some expr ->\n      let before, inside, after = partition_by_loc rest expr.pexp_loc in\n      let after_expr, rest = partition_adjacent_trailing expr.pexp_loc after in\n      if is_block_expr expr then\n        walk_expression expr t (List.concat [before; inside; after_expr])\n      else (\n        attach t.leading expr.pexp_loc before;\n        walk_expression expr t inside;\n        attach t.trailing expr.pexp_loc after_expr);\n      rest\n    | None -> rest\n  in\n  if is_block_expr case.pc_rhs then walk_expression case.pc_rhs t comments\n  else\n    let before, inside, after =\n      partition_by_loc comments case.pc_rhs.pexp_loc\n    in\n    attach t.leading case.pc_rhs.pexp_loc before;\n    walk_expression case.pc_rhs t inside;\n    attach t.trailing case.pc_rhs.pexp_loc after\n\nand walk_expr_record_row (longident, expr) t comments =\n  let before_longident, after_longident =\n    partition_leading_trailing comments longident.loc\n  in\n  attach t.leading longident.loc before_longident;\n  let after_longident, rest =\n    partition_adjacent_trailing longident.loc after_longident\n  in\n  attach t.trailing longident.loc after_longident;\n  let leading, inside, trailing = partition_by_loc rest expr.pexp_loc in\n  attach t.leading expr.pexp_loc leading;\n  walk_expression expr t inside;\n  attach t.trailing expr.pexp_loc trailing\n\nand walk_extension_constructor ext_constr t comments =\n  let leading, trailing =\n    partition_leading_trailing comments ext_constr.pext_name.loc\n  in\n  attach t.leading ext_constr.pext_name.loc leading;\n  let after_name, rest =\n    partition_adjacent_trailing ext_constr.pext_name.loc trailing\n  in\n  attach t.trailing ext_constr.pext_name.loc after_name;\n  walk_extension_constructor_kind ext_constr.pext_kind t rest\n\nand walk_extension_constructor_kind kind t comments =\n  match kind with\n  | Pext_rebind longident ->\n    let leading, trailing = partition_leading_trailing comments longident.loc in\n    attach t.leading longident.loc leading;\n    attach t.trailing longident.loc trailing\n  | Pext_decl (constructor_arguments, maybe_typ_expr) -> (\n    let rest = walk_constructor_arguments constructor_arguments t comments in\n    match maybe_typ_expr with\n    | None -> ()\n    | Some typexpr ->\n      let before, inside, after = partition_by_loc rest typexpr.ptyp_loc in\n      attach t.leading typexpr.ptyp_loc before;\n      walk_core_type typexpr t inside;\n      attach t.trailing typexpr.ptyp_loc after)\n\nand walk_module_expr mod_expr t comments =\n  match mod_expr.pmod_desc with\n  | Pmod_ident longident ->\n    let before, after = partition_leading_trailing comments longident.loc in\n    attach t.leading longident.loc before;\n    attach t.trailing longident.loc after\n  | Pmod_structure [] -> attach t.inside mod_expr.pmod_loc comments\n  | Pmod_structure structure -> walk_structure structure t comments\n  | Pmod_extension extension -> walk_extension extension t comments\n  | Pmod_unpack expr ->\n    let before, inside, after = partition_by_loc comments expr.pexp_loc in\n    attach t.leading expr.pexp_loc before;\n    walk_expression expr t inside;\n    attach t.trailing expr.pexp_loc after\n  | Pmod_constraint (modexpr, modtype) ->\n    if modtype.pmty_loc.loc_start >= modexpr.pmod_loc.loc_end then (\n      let before, inside, after = partition_by_loc comments modexpr.pmod_loc in\n      attach t.leading modexpr.pmod_loc before;\n      walk_module_expr modexpr t inside;\n      let after, rest = partition_adjacent_trailing modexpr.pmod_loc after in\n      attach t.trailing modexpr.pmod_loc after;\n      let before, inside, after = partition_by_loc rest modtype.pmty_loc in\n      attach t.leading modtype.pmty_loc before;\n      walk_mod_type modtype t inside;\n      attach t.trailing modtype.pmty_loc after)\n    else\n      let before, inside, after = partition_by_loc comments modtype.pmty_loc in\n      attach t.leading modtype.pmty_loc before;\n      walk_mod_type modtype t inside;\n      let after, rest = partition_adjacent_trailing modtype.pmty_loc after in\n      attach t.trailing modtype.pmty_loc after;\n      let before, inside, after = partition_by_loc rest modexpr.pmod_loc in\n      attach t.leading modexpr.pmod_loc before;\n      walk_module_expr modexpr t inside;\n      attach t.trailing modexpr.pmod_loc after\n  | Pmod_apply (_callModExpr, _argModExpr) ->\n    let mod_exprs = mod_expr_apply mod_expr in\n    walk_list (mod_exprs |> List.map (fun me -> ModuleExpr me)) t comments\n  | Pmod_functor _ -> (\n    let parameters, return_mod_expr = mod_expr_functor mod_expr in\n    let comments =\n      visit_list_but_continue_with_remaining_comments\n        ~get_loc:(fun (_, lbl, mod_type_option) ->\n          match mod_type_option with\n          | None -> lbl.Asttypes.loc\n          | Some mod_type ->\n            {lbl.loc with loc_end = mod_type.Parsetree.pmty_loc.loc_end})\n        ~walk_node:walk_mod_expr_parameter ~newline_delimited:false parameters t\n        comments\n    in\n    match return_mod_expr.pmod_desc with\n    | Pmod_constraint (mod_expr, mod_type)\n      when mod_type.pmty_loc.loc_end.pos_cnum\n           <= mod_expr.pmod_loc.loc_start.pos_cnum ->\n      let before, inside, after = partition_by_loc comments mod_type.pmty_loc in\n      attach t.leading mod_type.pmty_loc before;\n      walk_mod_type mod_type t inside;\n      let after, rest = partition_adjacent_trailing mod_type.pmty_loc after in\n      attach t.trailing mod_type.pmty_loc after;\n      let before, inside, after = partition_by_loc rest mod_expr.pmod_loc in\n      attach t.leading mod_expr.pmod_loc before;\n      walk_module_expr mod_expr t inside;\n      attach t.trailing mod_expr.pmod_loc after\n    | _ ->\n      let before, inside, after =\n        partition_by_loc comments return_mod_expr.pmod_loc\n      in\n      attach t.leading return_mod_expr.pmod_loc before;\n      walk_module_expr return_mod_expr t inside;\n      attach t.trailing return_mod_expr.pmod_loc after)\n\nand walk_mod_expr_parameter parameter t comments =\n  let _attrs, lbl, mod_type_option = parameter in\n  let leading, trailing = partition_leading_trailing comments lbl.loc in\n  attach t.leading lbl.loc leading;\n  match mod_type_option with\n  | None -> attach t.trailing lbl.loc trailing\n  | Some mod_type ->\n    let after_lbl, rest = partition_adjacent_trailing lbl.loc trailing in\n    attach t.trailing lbl.loc after_lbl;\n    let before, inside, after = partition_by_loc rest mod_type.pmty_loc in\n    attach t.leading mod_type.pmty_loc before;\n    walk_mod_type mod_type t inside;\n    attach t.trailing mod_type.pmty_loc after\n\nand walk_mod_type mod_type t comments =\n  match mod_type.pmty_desc with\n  | Pmty_ident longident | Pmty_alias longident ->\n    let leading, trailing = partition_leading_trailing comments longident.loc in\n    attach t.leading longident.loc leading;\n    attach t.trailing longident.loc trailing\n  | Pmty_signature [] -> attach t.inside mod_type.pmty_loc comments\n  | Pmty_signature signature -> walk_signature signature t comments\n  | Pmty_extension extension -> walk_extension extension t comments\n  | Pmty_typeof mod_expr ->\n    let before, inside, after = partition_by_loc comments mod_expr.pmod_loc in\n    attach t.leading mod_expr.pmod_loc before;\n    walk_module_expr mod_expr t inside;\n    attach t.trailing mod_expr.pmod_loc after\n  | Pmty_with (mod_type, _withConstraints) ->\n    let before, inside, after = partition_by_loc comments mod_type.pmty_loc in\n    attach t.leading mod_type.pmty_loc before;\n    walk_mod_type mod_type t inside;\n    attach t.trailing mod_type.pmty_loc after\n    (* TODO: withConstraints*)\n  | Pmty_functor _ ->\n    let parameters, return_mod_type = functor_type mod_type in\n    let comments =\n      visit_list_but_continue_with_remaining_comments\n        ~get_loc:(fun (_, lbl, mod_type_option) ->\n          match mod_type_option with\n          | None -> lbl.Asttypes.loc\n          | Some mod_type ->\n            if lbl.txt = \"_\" then mod_type.Parsetree.pmty_loc\n            else {lbl.loc with loc_end = mod_type.Parsetree.pmty_loc.loc_end})\n        ~walk_node:walk_mod_type_parameter ~newline_delimited:false parameters t\n        comments\n    in\n    let before, inside, after =\n      partition_by_loc comments return_mod_type.pmty_loc\n    in\n    attach t.leading return_mod_type.pmty_loc before;\n    walk_mod_type return_mod_type t inside;\n    attach t.trailing return_mod_type.pmty_loc after\n\nand walk_mod_type_parameter (_, lbl, mod_type_option) t comments =\n  let leading, trailing = partition_leading_trailing comments lbl.loc in\n  attach t.leading lbl.loc leading;\n  match mod_type_option with\n  | None -> attach t.trailing lbl.loc trailing\n  | Some mod_type ->\n    let after_lbl, rest = partition_adjacent_trailing lbl.loc trailing in\n    attach t.trailing lbl.loc after_lbl;\n    let before, inside, after = partition_by_loc rest mod_type.pmty_loc in\n    attach t.leading mod_type.pmty_loc before;\n    walk_mod_type mod_type t inside;\n    attach t.trailing mod_type.pmty_loc after\n\nand walk_pattern pat t comments =\n  let open Location in\n  match pat.Parsetree.ppat_desc with\n  | _ when comments = [] -> ()\n  | Ppat_alias (pat, alias) ->\n    let leading, inside, trailing = partition_by_loc comments pat.ppat_loc in\n    attach t.leading pat.ppat_loc leading;\n    walk_pattern pat t inside;\n    let after_pat, rest = partition_adjacent_trailing pat.ppat_loc trailing in\n    attach t.leading pat.ppat_loc leading;\n    attach t.trailing pat.ppat_loc after_pat;\n    let before_alias, after_alias = partition_leading_trailing rest alias.loc in\n    attach t.leading alias.loc before_alias;\n    attach t.trailing alias.loc after_alias\n  | Ppat_tuple []\n  | Ppat_array []\n  | Ppat_construct ({txt = Longident.Lident \"()\"}, _)\n  | Ppat_construct ({txt = Longident.Lident \"[]\"}, _) ->\n    attach t.inside pat.ppat_loc comments\n  | Ppat_array patterns ->\n    walk_list (patterns |> List.map (fun p -> Pattern p)) t comments\n  | Ppat_tuple patterns ->\n    walk_list (patterns |> List.map (fun p -> Pattern p)) t comments\n  | Ppat_construct ({txt = Longident.Lident \"::\"}, _) ->\n    walk_list\n      (collect_list_patterns [] pat |> List.map (fun p -> Pattern p))\n      t comments\n  | Ppat_construct (constr, None) ->\n    let before_constr, after_constr =\n      partition_leading_trailing comments constr.loc\n    in\n    attach t.leading constr.loc before_constr;\n    attach t.trailing constr.loc after_constr\n  | Ppat_construct (constr, Some pat) ->\n    let leading, trailing = partition_leading_trailing comments constr.loc in\n    attach t.leading constr.loc leading;\n    let after_constructor, rest =\n      partition_adjacent_trailing constr.loc trailing\n    in\n    attach t.trailing constr.loc after_constructor;\n    let leading, inside, trailing = partition_by_loc rest pat.ppat_loc in\n    attach t.leading pat.ppat_loc leading;\n    walk_pattern pat t inside;\n    attach t.trailing pat.ppat_loc trailing\n  | Ppat_variant (_label, None) -> ()\n  | Ppat_variant (_label, Some pat) -> walk_pattern pat t comments\n  | Ppat_type _ -> ()\n  | Ppat_record (record_rows, _) ->\n    walk_list\n      (record_rows |> List.map (fun (li, p) -> PatternRecordRow (li, p)))\n      t comments\n  | Ppat_or _ ->\n    walk_list\n      (Res_parsetree_viewer.collect_or_pattern_chain pat\n      |> List.map (fun pat -> Pattern pat))\n      t comments\n  | Ppat_constraint (pattern, typ) ->\n    let before_pattern, inside_pattern, after_pattern =\n      partition_by_loc comments pattern.ppat_loc\n    in\n    attach t.leading pattern.ppat_loc before_pattern;\n    walk_pattern pattern t inside_pattern;\n    let after_pattern, rest =\n      partition_adjacent_trailing pattern.ppat_loc after_pattern\n    in\n    attach t.trailing pattern.ppat_loc after_pattern;\n    let before_typ, inside_typ, after_typ =\n      partition_by_loc rest typ.ptyp_loc\n    in\n    attach t.leading typ.ptyp_loc before_typ;\n    walk_core_type typ t inside_typ;\n    attach t.trailing typ.ptyp_loc after_typ\n  | Ppat_lazy pattern | Ppat_exception pattern ->\n    let leading, inside, trailing =\n      partition_by_loc comments pattern.ppat_loc\n    in\n    attach t.leading pattern.ppat_loc leading;\n    walk_pattern pattern t inside;\n    attach t.trailing pattern.ppat_loc trailing\n  | Ppat_unpack string_loc ->\n    let leading, trailing =\n      partition_leading_trailing comments string_loc.loc\n    in\n    attach t.leading string_loc.loc leading;\n    attach t.trailing string_loc.loc trailing\n  | Ppat_extension extension -> walk_extension extension t comments\n  | _ -> ()\n\n(* name: firstName *)\nand walk_pattern_record_row row t comments =\n  match row with\n  (* punned {x}*)\n  | ( {Location.txt = Longident.Lident ident; loc = longident_loc},\n      {Parsetree.ppat_desc = Ppat_var {txt; _}} )\n    when ident = txt ->\n    let before_lbl, after_lbl =\n      partition_leading_trailing comments longident_loc\n    in\n    attach t.leading longident_loc before_lbl;\n    attach t.trailing longident_loc after_lbl\n  | longident, pattern ->\n    let before_lbl, after_lbl =\n      partition_leading_trailing comments longident.loc\n    in\n    attach t.leading longident.loc before_lbl;\n    let after_lbl, rest = partition_adjacent_trailing longident.loc after_lbl in\n    attach t.trailing longident.loc after_lbl;\n    let leading, inside, trailing = partition_by_loc rest pattern.ppat_loc in\n    attach t.leading pattern.ppat_loc leading;\n    walk_pattern pattern t inside;\n    attach t.trailing pattern.ppat_loc trailing\n\nand walk_row_field (row_field : Parsetree.row_field) t comments =\n  match row_field with\n  | Parsetree.Rtag ({loc}, _, _, _) ->\n    let before, after = partition_leading_trailing comments loc in\n    attach t.leading loc before;\n    attach t.trailing loc after\n  | Rinherit _ -> ()\n\nand walk_core_type typ t comments =\n  match typ.Parsetree.ptyp_desc with\n  | _ when comments = [] -> ()\n  | Ptyp_tuple typexprs ->\n    walk_list (typexprs |> List.map (fun ct -> CoreType ct)) t comments\n  | Ptyp_extension extension -> walk_extension extension t comments\n  | Ptyp_package package_type -> walk_package_type package_type t comments\n  | Ptyp_alias (typexpr, _alias) ->\n    let before_typ, inside_typ, after_typ =\n      partition_by_loc comments typexpr.ptyp_loc\n    in\n    attach t.leading typexpr.ptyp_loc before_typ;\n    walk_core_type typexpr t inside_typ;\n    attach t.trailing typexpr.ptyp_loc after_typ\n  | Ptyp_poly (strings, typexpr) ->\n    let comments =\n      visit_list_but_continue_with_remaining_comments\n        ~get_loc:(fun n -> n.Asttypes.loc)\n        ~walk_node:(fun longident t comments ->\n          let before_longident, after_longident =\n            partition_leading_trailing comments longident.loc\n          in\n          attach t.leading longident.loc before_longident;\n          attach t.trailing longident.loc after_longident)\n        ~newline_delimited:false strings t comments\n    in\n    let before_typ, inside_typ, after_typ =\n      partition_by_loc comments typexpr.ptyp_loc\n    in\n    attach t.leading typexpr.ptyp_loc before_typ;\n    walk_core_type typexpr t inside_typ;\n    attach t.trailing typexpr.ptyp_loc after_typ\n  | Ptyp_variant (row_fields, _, _) ->\n    walk_list (row_fields |> List.map (fun rf -> RowField rf)) t comments\n  | Ptyp_constr\n      ({txt = Lident \"function$\"}, [({ptyp_desc = Ptyp_arrow _} as desc); _]) ->\n    walk_core_type desc t comments\n  | Ptyp_constr (longident, typexprs) ->\n    let before_longident, _afterLongident =\n      partition_leading_trailing comments longident.loc\n    in\n    let after_longident, rest =\n      partition_adjacent_trailing longident.loc comments\n    in\n    attach t.leading longident.loc before_longident;\n    attach t.trailing longident.loc after_longident;\n    walk_list (typexprs |> List.map (fun ct -> CoreType ct)) t rest\n  | Ptyp_arrow _ ->\n    let _, parameters, typexpr = arrow_type typ in\n    let comments = walk_type_parameters parameters t comments in\n    let before_typ, inside_typ, after_typ =\n      partition_by_loc comments typexpr.ptyp_loc\n    in\n    attach t.leading typexpr.ptyp_loc before_typ;\n    walk_core_type typexpr t inside_typ;\n    attach t.trailing typexpr.ptyp_loc after_typ\n  | Ptyp_object (fields, _) -> walk_typ_object_fields fields t comments\n  | _ -> ()\n\nand walk_typ_object_fields fields t comments =\n  walk_list (fields |> List.map (fun f -> ObjectField f)) t comments\n\nand walk_object_field field t comments =\n  match field with\n  | Otag (lbl, _, typexpr) ->\n    let before_lbl, after_lbl = partition_leading_trailing comments lbl.loc in\n    attach t.leading lbl.loc before_lbl;\n    let after_lbl, rest = partition_adjacent_trailing lbl.loc after_lbl in\n    attach t.trailing lbl.loc after_lbl;\n    let before_typ, inside_typ, after_typ =\n      partition_by_loc rest typexpr.ptyp_loc\n    in\n    attach t.leading typexpr.ptyp_loc before_typ;\n    walk_core_type typexpr t inside_typ;\n    attach t.trailing typexpr.ptyp_loc after_typ\n  | _ -> ()\n\nand walk_type_parameters type_parameters t comments =\n  visit_list_but_continue_with_remaining_comments\n    ~get_loc:(fun (_, _, typexpr) ->\n      match typexpr.Parsetree.ptyp_attributes with\n      | ({Location.txt = \"res.namedArgLoc\"; loc}, _) :: _attrs ->\n        {loc with loc_end = typexpr.ptyp_loc.loc_end}\n      | _ -> typexpr.ptyp_loc)\n    ~walk_node:walk_type_parameter ~newline_delimited:false type_parameters t\n    comments\n\nand walk_type_parameter (_attrs, _lbl, typexpr) t comments =\n  let before_typ, inside_typ, after_typ =\n    partition_by_loc comments typexpr.ptyp_loc\n  in\n  attach t.leading typexpr.ptyp_loc before_typ;\n  walk_core_type typexpr t inside_typ;\n  attach t.trailing typexpr.ptyp_loc after_typ\n\nand walk_package_type package_type t comments =\n  let longident, package_constraints = package_type in\n  let before_longident, after_longident =\n    partition_leading_trailing comments longident.loc\n  in\n  attach t.leading longident.loc before_longident;\n  let after_longident, rest =\n    partition_adjacent_trailing longident.loc after_longident\n  in\n  attach t.trailing longident.loc after_longident;\n  walk_package_constraints package_constraints t rest\n\nand walk_package_constraints package_constraints t comments =\n  walk_list\n    (package_constraints\n    |> List.map (fun (li, te) -> PackageConstraint (li, te)))\n    t comments\n\nand walk_package_constraint package_constraint t comments =\n  let longident, typexpr = package_constraint in\n  let before_longident, after_longident =\n    partition_leading_trailing comments longident.loc\n  in\n  attach t.leading longident.loc before_longident;\n  let after_longident, rest =\n    partition_adjacent_trailing longident.loc after_longident\n  in\n  attach t.trailing longident.loc after_longident;\n  let before_typ, inside_typ, after_typ =\n    partition_by_loc rest typexpr.ptyp_loc\n  in\n  attach t.leading typexpr.ptyp_loc before_typ;\n  walk_core_type typexpr t inside_typ;\n  attach t.trailing typexpr.ptyp_loc after_typ\n\nand walk_extension extension t comments =\n  let id, payload = extension in\n  let before_id, after_id = partition_leading_trailing comments id.loc in\n  attach t.leading id.loc before_id;\n  let after_id, rest = partition_adjacent_trailing id.loc after_id in\n  attach t.trailing id.loc after_id;\n  walk_payload payload t rest\n\nand walk_attribute (id, payload) t comments =\n  let before_id, after_id = partition_leading_trailing comments id.loc in\n  attach t.leading id.loc before_id;\n  let after_id, rest = partition_adjacent_trailing id.loc after_id in\n  attach t.trailing id.loc after_id;\n  walk_payload payload t rest\n\nand walk_payload payload t comments =\n  match payload with\n  | PStr s -> walk_structure s t comments\n  | _ -> ()\n"
  },
  {
    "path": "analysis/vendor/res_syntax/res_core.ml",
    "content": "module Doc = Res_doc\nmodule Grammar = Res_grammar\nmodule Token = Res_token\nmodule Diagnostics = Res_diagnostics\nmodule CommentTable = Res_comments_table\nmodule ResPrinter = Res_printer\nmodule Scanner = Res_scanner\nmodule Parser = Res_parser\n\nmodule LoopProgress = struct\n  let list_rest list =\n    match list with\n    | [] -> assert false\n    | _ :: rest -> rest\nend\n\nlet mk_loc start_loc end_loc =\n  Location.{loc_start = start_loc; loc_end = end_loc; loc_ghost = false}\n\nmodule Recover = struct\n  let default_expr () =\n    let id = Location.mknoloc \"rescript.exprhole\" in\n    Ast_helper.Exp.mk (Pexp_extension (id, PStr []))\n\n  let default_type () =\n    let id = Location.mknoloc \"rescript.typehole\" in\n    Ast_helper.Typ.extension (id, PStr [])\n\n  let default_pattern () =\n    let id = Location.mknoloc \"rescript.patternhole\" in\n    Ast_helper.Pat.extension (id, PStr [])\n\n  let default_module_expr () = Ast_helper.Mod.structure []\n  let default_module_type () = Ast_helper.Mty.signature []\n\n  let default_signature_item =\n    let id = Location.mknoloc \"rescript.sigitemhole\" in\n    Ast_helper.Sig.extension (id, PStr [])\n\n  let recover_equal_greater p =\n    Parser.expect EqualGreater p;\n    match p.Parser.token with\n    | MinusGreater -> Parser.next p\n    | _ -> ()\n\n  let should_abort_list_parse p =\n    let rec check breadcrumbs =\n      match breadcrumbs with\n      | [] -> false\n      | (grammar, _) :: rest ->\n        if Grammar.is_part_of_list grammar p.Parser.token then true\n        else check rest\n    in\n    check p.breadcrumbs\nend\n\nmodule ErrorMessages = struct\n  let list_pattern_spread =\n    \"List pattern matches only supports one `...` spread, at the end.\\n\\\n     Explanation: a list spread at the tail is efficient, but a spread in the \\\n     middle would create new lists; out of performance concern, our pattern \\\n     matching currently guarantees to never create new intermediate data.\"\n\n  let record_pattern_spread =\n    \"Record's `...` spread is not supported in pattern matches.\\n\\\n     Explanation: you can't collect a subset of a record's field into its own \\\n     record, since a record needs an explicit declaration and that subset \\\n     wouldn't have one.\\n\\\n     Solution: you need to pull out each field you want explicitly.\"\n  (* let recordPatternUnderscore = \"Record patterns only support one `_`, at the end.\" *)\n  [@@live]\n\n  let array_pattern_spread =\n    \"Array's `...` spread is not supported in pattern matches.\\n\\\n     Explanation: such spread would create a subarray; out of performance \\\n     concern, our pattern matching currently guarantees to never create new \\\n     intermediate data.\\n\\\n     Solution: if it's to validate the first few elements, use a `when` clause \\\n     + Array size check + `get` checks on the current pattern. If it's to \\\n     obtain a subarray, use `Array.sub` or `Belt.Array.slice`.\"\n\n  let record_expr_spread =\n    \"Records can only have one `...` spread, at the beginning.\\n\\\n     Explanation: since records have a known, fixed shape, a spread like `{a, \\\n     ...b}` wouldn't make sense, as `b` would override every field of `a` \\\n     anyway.\"\n\n  let variant_ident =\n    \"A polymorphic variant (e.g. #id) must start with an alphabetical letter \\\n     or be a number (e.g. #742)\"\n\n  let experimental_if_let expr =\n    let switch_expr = {expr with Parsetree.pexp_attributes = []} in\n    Doc.concat\n      [\n        Doc.text \"If-let is currently highly experimental.\";\n        Doc.line;\n        Doc.text \"Use a regular `switch` with pattern matching instead:\";\n        Doc.concat\n          [\n            Doc.hard_line;\n            Doc.hard_line;\n            ResPrinter.print_expression switch_expr CommentTable.empty;\n          ];\n      ]\n    |> Doc.to_string ~width:80\n\n  let type_param =\n    \"A type param consists of a singlequote followed by a name like `'a` or \\\n     `'A`\"\n  let type_var =\n    \"A type variable consists of a singlequote followed by a name like `'a` or \\\n     `'A`\"\n\n  let attribute_without_node (attr : Parsetree.attribute) =\n    let {Asttypes.txt = attr_name}, _ = attr in\n    \"Did you forget to attach `\" ^ attr_name\n    ^ \"` to an item?\\n  Standalone attributes start with `@@` like: `@@\"\n    ^ attr_name ^ \"`\"\n\n  let type_declaration_name_longident longident =\n    \"A type declaration's name cannot contain a module access. Did you mean `\"\n    ^ Longident.last longident ^ \"`?\"\n\n  let tuple_single_element = \"A tuple needs at least two elements\"\n\n  let missing_tilde_labeled_parameter name =\n    if name = \"\" then \"A labeled parameter starts with a `~`.\"\n    else \"A labeled parameter starts with a `~`. Did you mean: `~\" ^ name ^ \"`?\"\n\n  let string_interpolation_in_pattern =\n    \"String interpolation is not supported in pattern matching.\"\n\n  let spread_in_record_declaration =\n    \"A record type declaration doesn't support the ... spread. Only an object \\\n     (with quoted field names) does.\"\n\n  let object_quoted_field_name name =\n    \"An object type declaration needs quoted field names. Did you mean \\\"\"\n    ^ name ^ \"\\\"?\"\n\n  let forbidden_inline_record_declaration =\n    \"An inline record type declaration is only allowed in a variant \\\n     constructor's declaration\"\n\n  let poly_var_int_with_suffix number =\n    \"A numeric polymorphic variant cannot be followed by a letter. Did you \\\n     mean `#\" ^ number ^ \"`?\"\nend\n\nmodule InExternal = struct\n  let status = ref false\nend\n\nlet jsx_attr = (Location.mknoloc \"JSX\", Parsetree.PStr [])\nlet uncurried_app_attr = (Location.mknoloc \"res.uapp\", Parsetree.PStr [])\nlet ternary_attr = (Location.mknoloc \"res.ternary\", Parsetree.PStr [])\nlet if_let_attr = (Location.mknoloc \"res.iflet\", Parsetree.PStr [])\nlet optional_attr = (Location.mknoloc \"res.optional\", Parsetree.PStr [])\nlet make_await_attr loc = (Location.mkloc \"res.await\" loc, Parsetree.PStr [])\nlet make_async_attr loc = (Location.mkloc \"res.async\" loc, Parsetree.PStr [])\n\nlet make_expression_optional ~optional (e : Parsetree.expression) =\n  if optional then {e with pexp_attributes = optional_attr :: e.pexp_attributes}\n  else e\nlet make_pattern_optional ~optional (p : Parsetree.pattern) =\n  if optional then {p with ppat_attributes = optional_attr :: p.ppat_attributes}\n  else p\n\nlet suppress_fragile_match_warning_attr =\n  ( Location.mknoloc \"warning\",\n    Parsetree.PStr\n      [\n        Ast_helper.Str.eval\n          (Ast_helper.Exp.constant (Pconst_string (\"-4\", None)));\n      ] )\nlet make_braces_attr loc = (Location.mkloc \"res.braces\" loc, Parsetree.PStr [])\nlet template_literal_attr = (Location.mknoloc \"res.template\", Parsetree.PStr [])\n\nlet tagged_template_literal_attr =\n  (Location.mknoloc \"res.taggedTemplate\", Parsetree.PStr [])\n\nlet spread_attr = (Location.mknoloc \"res.spread\", Parsetree.PStr [])\n\ntype argument = {\n  dotted: bool;\n  label: Asttypes.arg_label;\n  expr: Parsetree.expression;\n}\n\ntype type_parameter = {\n  dotted: bool;\n  attrs: Ast_helper.attrs;\n  label: Asttypes.arg_label;\n  typ: Parsetree.core_type;\n  start_pos: Lexing.position;\n}\n\ntype typ_def_or_ext =\n  | TypeDef of {\n      rec_flag: Asttypes.rec_flag;\n      types: Parsetree.type_declaration list;\n    }\n  | TypeExt of Parsetree.type_extension\n\ntype labelled_parameter =\n  | TermParameter of {\n      dotted: bool;\n      attrs: Parsetree.attributes;\n      label: Asttypes.arg_label;\n      expr: Parsetree.expression option;\n      pat: Parsetree.pattern;\n      pos: Lexing.position;\n    }\n  | TypeParameter of {\n      dotted: bool;\n      attrs: Parsetree.attributes;\n      locs: string Location.loc list;\n      pos: Lexing.position;\n    }\n\ntype record_pattern_item =\n  | PatUnderscore\n  | PatField of (Ast_helper.lid * Parsetree.pattern)\n\ntype context = OrdinaryExpr | TernaryTrueBranchExpr | WhenExpr\n\nlet get_closing_token = function\n  | Token.Lparen -> Token.Rparen\n  | Lbrace -> Rbrace\n  | Lbracket -> Rbracket\n  | List -> Rbrace\n  | LessThan -> GreaterThan\n  | _ -> assert false\n\nlet rec go_to_closing closing_token state =\n  match (state.Parser.token, closing_token) with\n  | Rparen, Token.Rparen\n  | Rbrace, Rbrace\n  | Rbracket, Rbracket\n  | GreaterThan, GreaterThan ->\n    Parser.next state;\n    ()\n  | ((Token.Lbracket | Lparen | Lbrace | List | LessThan) as t), _ ->\n    Parser.next state;\n    go_to_closing (get_closing_token t) state;\n    go_to_closing closing_token state\n  | (Rparen | Token.Rbrace | Rbracket | Eof), _ ->\n    () (* TODO: how do report errors here? *)\n  | _ ->\n    Parser.next state;\n    go_to_closing closing_token state\n\n(* Madness *)\nlet is_es6_arrow_expression ~in_ternary p =\n  Parser.lookahead p (fun state ->\n      let async =\n        match state.Parser.token with\n        | Lident \"async\" ->\n          Parser.next state;\n          true\n        | _ -> false\n      in\n      match state.Parser.token with\n      | Lident _ | Underscore -> (\n        Parser.next state;\n        match state.Parser.token with\n        (* Don't think that this valid\n         * Imagine: let x = (a: int)\n         * This is a parenthesized expression with a type constraint, wait for\n         * the arrow *)\n        (* | Colon when not inTernary -> true *)\n        | EqualGreater -> true\n        | _ -> false)\n      | Lparen -> (\n        let prev_end_pos = state.prev_end_pos in\n        Parser.next state;\n        match state.token with\n        (* arrived at `()` here *)\n        | Rparen -> (\n          Parser.next state;\n          match state.Parser.token with\n          (* arrived at `() :` here *)\n          | Colon when not in_ternary -> (\n            Parser.next state;\n            match state.Parser.token with\n            (* arrived at `() :typ` here *)\n            | Lident _ -> (\n              Parser.next state;\n              (match state.Parser.token with\n              (* arrived at `() :typ<` here *)\n              | LessThan ->\n                Parser.next state;\n                go_to_closing GreaterThan state\n              | _ -> ());\n              match state.Parser.token with\n              (* arrived at `() :typ =>` or `() :typ<'a,'b> =>` here *)\n              | EqualGreater -> true\n              | _ -> false)\n            | _ -> true)\n          | EqualGreater -> true\n          | _ -> false)\n        | Dot (* uncurried *) -> true\n        | Tilde when not async -> true\n        | Backtick ->\n          false\n          (* (` always indicates the start of an expr, can't be es6 parameter *)\n        | _ -> (\n          go_to_closing Rparen state;\n          match state.Parser.token with\n          | EqualGreater -> true\n          (* | Lbrace TODO: detect missing =>, is this possible? *)\n          | Colon when not in_ternary -> true\n          | Rparen ->\n            (* imagine having something as :\n             * switch colour {\n             * | Red\n             *    when l == l'\n             *    || (&Clflags.classic && (l == Nolabel && !is_optional(l'))) => (t1, t2)\n             * We'll arrive at the outer rparen just before the =>.\n             * This is not an es6 arrow.\n             *)\n            false\n          | _ -> (\n            Parser.next_unsafe state;\n            (* error recovery, peek at the next token,\n             * (elements, providerId] => {\n             *  in the example above, we have an unbalanced ] here\n             *)\n            match state.Parser.token with\n            | EqualGreater\n              when state.start_pos.pos_lnum == prev_end_pos.pos_lnum ->\n              true\n            | _ -> false)))\n      | _ -> false)\n\nlet is_es6_arrow_functor p =\n  Parser.lookahead p (fun state ->\n      match state.Parser.token with\n      (* | Uident _ | Underscore -> *)\n      (* Parser.next state; *)\n      (* begin match state.Parser.token with *)\n      (* | EqualGreater -> true *)\n      (* | _ -> false *)\n      (* end *)\n      | Lparen -> (\n        Parser.next state;\n        match state.token with\n        | Rparen -> (\n          Parser.next state;\n          match state.token with\n          | Colon | EqualGreater -> true\n          | _ -> false)\n        | _ -> (\n          go_to_closing Rparen state;\n          match state.Parser.token with\n          | EqualGreater | Lbrace -> true\n          | Colon -> true\n          | _ -> false))\n      | _ -> false)\n\nlet is_es6_arrow_type p =\n  Parser.lookahead p (fun state ->\n      match state.Parser.token with\n      | Lparen -> (\n        Parser.next state;\n        match state.Parser.token with\n        | Rparen -> (\n          Parser.next state;\n          match state.Parser.token with\n          | EqualGreater -> true\n          | _ -> false)\n        | Tilde | Dot -> true\n        | _ -> (\n          go_to_closing Rparen state;\n          match state.Parser.token with\n          | EqualGreater -> true\n          | _ -> false))\n      | Tilde -> true\n      | _ -> false)\n\nlet build_longident words =\n  match List.rev words with\n  | [] -> assert false\n  | hd :: tl -> List.fold_left (fun p s -> Longident.Ldot (p, s)) (Lident hd) tl\n\nlet make_infix_operator (p : Parser.t) token start_pos end_pos =\n  let stringified_token =\n    if token = Token.MinusGreater then\n      if p.uncurried_config = Legacy then \"|.\" else \"|.u\"\n    else if token = Token.PlusPlus then \"^\"\n    else if token = Token.BangEqual then \"<>\"\n    else if token = Token.BangEqualEqual then \"!=\"\n    else if token = Token.Equal then (\n      (* TODO: could have a totally different meaning like x->fooSet(y)*)\n      Parser.err ~start_pos ~end_pos p\n        (Diagnostics.message \"Did you mean `==` here?\");\n      \"=\")\n    else if token = Token.EqualEqual then \"=\"\n    else if token = Token.EqualEqualEqual then \"==\"\n    else Token.to_string token\n  in\n  let loc = mk_loc start_pos end_pos in\n  let operator = Location.mkloc (Longident.Lident stringified_token) loc in\n  Ast_helper.Exp.ident ~loc operator\n\nlet negate_string s =\n  if String.length s > 0 && (s.[0] [@doesNotRaise]) = '-' then\n    (String.sub [@doesNotRaise]) s 1 (String.length s - 1)\n  else \"-\" ^ s\n\nlet make_unary_expr start_pos token_end token operand =\n  match (token, operand.Parsetree.pexp_desc) with\n  | (Token.Plus | PlusDot), Pexp_constant (Pconst_integer _ | Pconst_float _) ->\n    operand\n  | Minus, Pexp_constant (Pconst_integer (n, m)) ->\n    {\n      operand with\n      pexp_desc = Pexp_constant (Pconst_integer (negate_string n, m));\n    }\n  | (Minus | MinusDot), Pexp_constant (Pconst_float (n, m)) ->\n    {operand with pexp_desc = Pexp_constant (Pconst_float (negate_string n, m))}\n  | (Token.Plus | PlusDot | Minus | MinusDot), _ ->\n    let token_loc = mk_loc start_pos token_end in\n    let operator = \"~\" ^ Token.to_string token in\n    Ast_helper.Exp.apply\n      ~loc:(mk_loc start_pos operand.Parsetree.pexp_loc.loc_end)\n      (Ast_helper.Exp.ident ~loc:token_loc\n         (Location.mkloc (Longident.Lident operator) token_loc))\n      [(Nolabel, operand)]\n  | Token.Bang, _ ->\n    let token_loc = mk_loc start_pos token_end in\n    Ast_helper.Exp.apply\n      ~loc:(mk_loc start_pos operand.Parsetree.pexp_loc.loc_end)\n      (Ast_helper.Exp.ident ~loc:token_loc\n         (Location.mkloc (Longident.Lident \"not\") token_loc))\n      [(Nolabel, operand)]\n  | _ -> operand\n\nlet make_list_expression loc seq ext_opt =\n  let rec handle_seq = function\n    | [] -> (\n      match ext_opt with\n      | Some ext -> ext\n      | None ->\n        let loc = {loc with Location.loc_ghost = true} in\n        let nil = Location.mkloc (Longident.Lident \"[]\") loc in\n        Ast_helper.Exp.construct ~loc nil None)\n    | e1 :: el ->\n      let exp_el = handle_seq el in\n      let loc =\n        mk_loc e1.Parsetree.pexp_loc.Location.loc_start exp_el.pexp_loc.loc_end\n      in\n      let arg = Ast_helper.Exp.tuple ~loc [e1; exp_el] in\n      Ast_helper.Exp.construct ~loc\n        (Location.mkloc (Longident.Lident \"::\") loc)\n        (Some arg)\n  in\n  let expr = handle_seq seq in\n  {expr with pexp_loc = loc}\n\nlet make_list_pattern loc seq ext_opt =\n  let rec handle_seq = function\n    | [] ->\n      let base_case =\n        match ext_opt with\n        | Some ext -> ext\n        | None ->\n          let loc = {loc with Location.loc_ghost = true} in\n          let nil = {Location.txt = Longident.Lident \"[]\"; loc} in\n          Ast_helper.Pat.construct ~loc nil None\n      in\n      base_case\n    | p1 :: pl ->\n      let pat_pl = handle_seq pl in\n      let loc =\n        mk_loc p1.Parsetree.ppat_loc.loc_start pat_pl.ppat_loc.loc_end\n      in\n      let arg = Ast_helper.Pat.mk ~loc (Ppat_tuple [p1; pat_pl]) in\n      Ast_helper.Pat.mk ~loc\n        (Ppat_construct (Location.mkloc (Longident.Lident \"::\") loc, Some arg))\n  in\n  handle_seq seq\n\n(* TODO: diagnostic reporting *)\nlet lident_of_path longident =\n  match Longident.flatten longident |> List.rev with\n  | [] -> \"\"\n  | ident :: _ -> ident\n\nlet make_newtypes ~attrs ~loc newtypes exp =\n  let expr =\n    List.fold_right\n      (fun newtype exp -> Ast_helper.Exp.mk ~loc (Pexp_newtype (newtype, exp)))\n      newtypes exp\n  in\n  {expr with pexp_attributes = attrs}\n\n(* locally abstract types syntax sugar\n * Transforms\n *  let f: type t u v. = (foo : list</t, u, v/>) => ...\n * into\n *  let f = (type t u v. foo : list</t, u, v/>) => ...\n *)\nlet wrap_type_annotation ~loc newtypes core_type body =\n  let exp =\n    make_newtypes ~attrs:[] ~loc newtypes\n      (Ast_helper.Exp.constraint_ ~loc body core_type)\n  in\n  let typ =\n    Ast_helper.Typ.poly ~loc newtypes\n      (Ast_helper.Typ.varify_constructors newtypes core_type)\n  in\n  (exp, typ)\n\n(** * process the occurrence of _ in the arguments of a function application *\n    replace _ with a new variable, currently __x, in the arguments * return a\n    wrapping function that wraps ((__x) => ...) around an expression * e.g.\n    foo(_, 3) becomes (__x) => foo(__x, 3) *)\nlet process_underscore_application (p : Parser.t) args =\n  let exp_question = ref None in\n  let hidden_var = \"__x\" in\n  let check_arg ((lab, exp) as arg) =\n    match exp.Parsetree.pexp_desc with\n    | Pexp_ident ({txt = Lident \"_\"} as id) ->\n      let new_id = Location.mkloc (Longident.Lident hidden_var) id.loc in\n      let new_exp = Ast_helper.Exp.mk (Pexp_ident new_id) ~loc:exp.pexp_loc in\n      exp_question := Some new_exp;\n      (lab, new_exp)\n    | _ -> arg\n  in\n  let args = List.map check_arg args in\n  let wrap (exp_apply : Parsetree.expression) =\n    match !exp_question with\n    | Some {pexp_loc = loc} ->\n      let pattern =\n        Ast_helper.Pat.mk\n          (Ppat_var (Location.mkloc hidden_var loc))\n          ~loc:Location.none\n      in\n      let fun_expr = Ast_helper.Exp.fun_ ~loc Nolabel None pattern exp_apply in\n      if p.uncurried_config = Legacy then fun_expr\n      else Ast_uncurried.uncurried_fun ~loc ~arity:1 fun_expr\n    | None -> exp_apply\n  in\n  (args, wrap)\n\n(* Transform A.a into a. For use with punned record fields as in {A.a, b}. *)\nlet remove_module_name_from_punned_field_value exp =\n  match exp.Parsetree.pexp_desc with\n  | Pexp_ident path_ident ->\n    {\n      exp with\n      pexp_desc =\n        Pexp_ident\n          {path_ident with txt = Lident (Longident.last path_ident.txt)};\n    }\n  | _ -> exp\n\nlet rec parse_lident p =\n  let recover_lident p =\n    if\n      Token.is_keyword p.Parser.token\n      && p.Parser.prev_end_pos.pos_lnum == p.start_pos.pos_lnum\n    then (\n      Parser.err p (Diagnostics.lident p.Parser.token);\n      Parser.next p;\n      None)\n    else\n      let rec loop p =\n        if (not (Recover.should_abort_list_parse p)) && p.token <> Eof then (\n          Parser.next p;\n          loop p)\n      in\n      Parser.err p (Diagnostics.lident p.Parser.token);\n      Parser.next p;\n      loop p;\n      match p.Parser.token with\n      | Lident _ -> Some ()\n      | _ -> None\n  in\n  let start_pos = p.Parser.start_pos in\n  match p.Parser.token with\n  | Lident ident ->\n    Parser.next p;\n    let loc = mk_loc start_pos p.prev_end_pos in\n    (ident, loc)\n  | Eof ->\n    Parser.err ~start_pos p\n      (Diagnostics.unexpected p.Parser.token p.breadcrumbs);\n    (\"_\", mk_loc start_pos p.prev_end_pos)\n  | _ -> (\n    match recover_lident p with\n    | Some () -> parse_lident p\n    | None -> (\"_\", mk_loc start_pos p.prev_end_pos))\n\nlet parse_ident ~msg ~start_pos p =\n  match p.Parser.token with\n  | Lident ident | Uident ident ->\n    Parser.next p;\n    let loc = mk_loc start_pos p.prev_end_pos in\n    (ident, loc)\n  | token\n    when Token.is_keyword token\n         && p.prev_end_pos.pos_lnum == p.start_pos.pos_lnum ->\n    let token_txt = Token.to_string token in\n    let msg =\n      \"`\" ^ token_txt\n      ^ \"` is a reserved keyword. Keywords need to be escaped: \\\\\\\"\" ^ token_txt\n      ^ \"\\\"\"\n    in\n    Parser.err ~start_pos p (Diagnostics.message msg);\n    Parser.next p;\n    (token_txt, mk_loc start_pos p.prev_end_pos)\n  | _token ->\n    Parser.err ~start_pos p (Diagnostics.message msg);\n    Parser.next p;\n    (\"\", mk_loc start_pos p.prev_end_pos)\n\nlet parse_hash_ident ~start_pos p =\n  Parser.expect Hash p;\n  match p.token with\n  | String text ->\n    Parser.next p;\n    (text, mk_loc start_pos p.prev_end_pos)\n  | Int {i; suffix} ->\n    let () =\n      match suffix with\n      | Some _ ->\n        Parser.err p\n          (Diagnostics.message (ErrorMessages.poly_var_int_with_suffix i))\n      | None -> ()\n    in\n    Parser.next p;\n    (i, mk_loc start_pos p.prev_end_pos)\n  | Eof ->\n    Parser.err ~start_pos p (Diagnostics.unexpected p.token p.breadcrumbs);\n    (\"\", mk_loc start_pos p.prev_end_pos)\n  | _ -> parse_ident ~start_pos ~msg:ErrorMessages.variant_ident p\n\n(* Ldot (Ldot (Lident \"Foo\", \"Bar\"), \"baz\") *)\nlet parse_value_path p =\n  let start_pos = p.Parser.start_pos in\n  let rec aux p path =\n    let start_pos = p.Parser.start_pos in\n    let token = p.token in\n\n    Parser.next p;\n    if p.Parser.token = Dot then (\n      Parser.expect Dot p;\n\n      match p.Parser.token with\n      | Lident ident -> Longident.Ldot (path, ident)\n      | Uident uident -> aux p (Ldot (path, uident))\n      | token ->\n        Parser.err p (Diagnostics.unexpected token p.breadcrumbs);\n        Longident.Ldot (path, \"_\"))\n    else (\n      Parser.err p ~start_pos ~end_pos:p.prev_end_pos (Diagnostics.lident token);\n      path)\n  in\n  let ident =\n    match p.Parser.token with\n    | Lident ident ->\n      Parser.next p;\n      Longident.Lident ident\n    | Uident ident ->\n      let res = aux p (Lident ident) in\n      Parser.next_unsafe p;\n      res\n    | token ->\n      Parser.err p (Diagnostics.unexpected token p.breadcrumbs);\n      Parser.next_unsafe p;\n      Longident.Lident \"_\"\n  in\n  Location.mkloc ident (mk_loc start_pos p.prev_end_pos)\n\nlet parse_value_path_after_dot p =\n  let start_pos = p.Parser.start_pos in\n  match p.Parser.token with\n  | Lident _ | Uident _ -> parse_value_path p\n  | token ->\n    Parser.err p (Diagnostics.unexpected token p.breadcrumbs);\n    Location.mkloc (Longident.Lident \"_\") (mk_loc start_pos p.prev_end_pos)\n\nlet parse_value_path_tail p start_pos ident =\n  let rec loop p path =\n    match p.Parser.token with\n    | Lident ident ->\n      Parser.next p;\n      Location.mkloc\n        (Longident.Ldot (path, ident))\n        (mk_loc start_pos p.prev_end_pos)\n    | Uident ident ->\n      Parser.next p;\n      Parser.expect Dot p;\n      loop p (Longident.Ldot (path, ident))\n    | token ->\n      Parser.err p (Diagnostics.unexpected token p.breadcrumbs);\n      Location.mkloc\n        (Longident.Ldot (path, \"_\"))\n        (mk_loc start_pos p.prev_end_pos)\n  in\n  loop p ident\n\nlet parse_module_long_ident_tail ~lowercase p start_pos ident =\n  let rec loop p acc =\n    match p.Parser.token with\n    | Lident ident when lowercase ->\n      Parser.next p;\n      let lident = Longident.Ldot (acc, ident) in\n      Location.mkloc lident (mk_loc start_pos p.prev_end_pos)\n    | Uident ident -> (\n      Parser.next p;\n      let end_pos = p.prev_end_pos in\n      let lident = Longident.Ldot (acc, ident) in\n      match p.Parser.token with\n      | Dot ->\n        Parser.next p;\n        loop p lident\n      | _ -> Location.mkloc lident (mk_loc start_pos end_pos))\n    | t ->\n      Parser.err p (Diagnostics.uident t);\n      Location.mkloc\n        (Longident.Ldot (acc, \"_\"))\n        (mk_loc start_pos p.prev_end_pos)\n  in\n  loop p ident\n\n(* Parses module identifiers:\n     Foo\n     Foo.Bar *)\nlet parse_module_long_ident ~lowercase p =\n  (* Parser.leaveBreadcrumb p Reporting.ModuleLongIdent; *)\n  let start_pos = p.Parser.start_pos in\n  let module_ident =\n    match p.Parser.token with\n    | Lident ident when lowercase ->\n      let loc = mk_loc start_pos p.end_pos in\n      let lident = Longident.Lident ident in\n      Parser.next p;\n      Location.mkloc lident loc\n    | Uident ident -> (\n      let lident = Longident.Lident ident in\n      let end_pos = p.end_pos in\n      Parser.next p;\n      match p.Parser.token with\n      | Dot ->\n        Parser.next p;\n        parse_module_long_ident_tail ~lowercase p start_pos lident\n      | _ -> Location.mkloc lident (mk_loc start_pos end_pos))\n    | t ->\n      Parser.err p (Diagnostics.uident t);\n      Location.mkloc (Longident.Lident \"_\") (mk_loc start_pos p.prev_end_pos)\n  in\n  (* Parser.eatBreadcrumb p; *)\n  module_ident\n\nlet verify_jsx_opening_closing_name p name_expr =\n  let closing =\n    match p.Parser.token with\n    | Lident lident ->\n      Parser.next p;\n      Longident.Lident lident\n    | Uident _ -> (parse_module_long_ident ~lowercase:true p).txt\n    | _ -> Longident.Lident \"\"\n  in\n  match name_expr.Parsetree.pexp_desc with\n  | Pexp_ident opening_ident ->\n    let opening =\n      let without_create_element =\n        Longident.flatten opening_ident.txt\n        |> List.filter (fun s -> s <> \"createElement\")\n      in\n      match Longident.unflatten without_create_element with\n      | Some li -> li\n      | None -> Longident.Lident \"\"\n    in\n    opening = closing\n  | _ -> assert false\n\nlet string_of_pexp_ident name_expr =\n  match name_expr.Parsetree.pexp_desc with\n  | Pexp_ident opening_ident ->\n    Longident.flatten opening_ident.txt\n    |> List.filter (fun s -> s <> \"createElement\")\n    |> String.concat \".\"\n  | _ -> \"\"\n\n(* open-def ::=\n *   | open module-path\n *   | open! module-path *)\nlet parse_open_description ~attrs p =\n  Parser.leave_breadcrumb p Grammar.OpenDescription;\n  let start_pos = p.Parser.start_pos in\n  Parser.expect Open p;\n  let override =\n    if Parser.optional p Token.Bang then Asttypes.Override else Asttypes.Fresh\n  in\n  let modident = parse_module_long_ident ~lowercase:false p in\n  let loc = mk_loc start_pos p.prev_end_pos in\n  Parser.eat_breadcrumb p;\n  Ast_helper.Opn.mk ~loc ~attrs ~override modident\n\n(* constant\t::=\tinteger-literal   *)\n(* ∣\t float-literal   *)\n(* ∣\t string-literal   *)\nlet parse_constant p =\n  let is_negative =\n    match p.Parser.token with\n    | Token.Minus ->\n      Parser.next p;\n      true\n    | Plus ->\n      Parser.next p;\n      false\n    | _ -> false\n  in\n  let constant =\n    match p.Parser.token with\n    | Int {i; suffix} ->\n      (* Only decimal literal is allowed for bigint *)\n      if suffix = Some 'n' && not (Bigint_utils.is_valid i) then\n        Parser.err p\n          (Diagnostics.message\n             \"Invalid bigint literal. Only decimal literal is allowed for \\\n              bigint.\");\n      let int_txt = if is_negative then \"-\" ^ i else i in\n      Parsetree.Pconst_integer (int_txt, suffix)\n    | Float {f; suffix} ->\n      let float_txt = if is_negative then \"-\" ^ f else f in\n      Parsetree.Pconst_float (float_txt, suffix)\n    | String s ->\n      Pconst_string (s, if p.mode = ParseForTypeChecker then Some \"js\" else None)\n    | Codepoint {c; original} ->\n      if p.mode = ParseForTypeChecker then Pconst_char c\n      else\n        (* Pconst_char char does not have enough information for formatting.\n         * When parsing for the printer, we encode the char contents as a string\n         * with a special prefix. *)\n        Pconst_string (original, Some \"INTERNAL_RES_CHAR_CONTENTS\")\n    | token ->\n      Parser.err p (Diagnostics.unexpected token p.breadcrumbs);\n      Pconst_string (\"\", None)\n  in\n  Parser.next_unsafe p;\n  constant\n\nlet parse_template_constant ~prefix (p : Parser.t) =\n  (* Arrived at the ` char *)\n  let start_pos = p.start_pos in\n  Parser.next_template_literal_token p;\n  match p.token with\n  | TemplateTail (txt, _) ->\n    Parser.next p;\n    Parsetree.Pconst_string (txt, prefix)\n  | _ ->\n    let rec skip_tokens () =\n      if p.token <> Eof then (\n        Parser.next p;\n        match p.token with\n        | Backtick ->\n          Parser.next p;\n          ()\n        | _ -> skip_tokens ())\n    in\n    skip_tokens ();\n    Parser.err ~start_pos ~end_pos:p.prev_end_pos p\n      (Diagnostics.message ErrorMessages.string_interpolation_in_pattern);\n    Pconst_string (\"\", None)\n\nlet parse_comma_delimited_region p ~grammar ~closing ~f =\n  Parser.leave_breadcrumb p grammar;\n  let rec loop nodes =\n    match f p with\n    | Some node -> (\n      match p.Parser.token with\n      | Comma ->\n        Parser.next p;\n        loop (node :: nodes)\n      | token when token = closing || token = Eof -> List.rev (node :: nodes)\n      | _ when Grammar.is_list_element grammar p.token ->\n        (* missing comma between nodes in the region and the current token\n         * looks like the start of something valid in the current region.\n         * Example:\n         *   type student<'extraInfo> = {\n         *     name: string,\n         *     age: int\n         *     otherInfo: 'extraInfo\n         *   }\n         * There is a missing comma between `int` and `otherInfo`.\n         * `otherInfo` looks like a valid start of the record declaration.\n         * We report the error here and then continue parsing the region.\n         *)\n        Parser.expect Comma p;\n        loop (node :: nodes)\n      | _ ->\n        if\n          not\n            (p.token = Eof || p.token = closing\n            || Recover.should_abort_list_parse p)\n        then Parser.expect Comma p;\n        if p.token = Semicolon then Parser.next p;\n        loop (node :: nodes))\n    | None ->\n      if p.token = Eof || p.token = closing || Recover.should_abort_list_parse p\n      then List.rev nodes\n      else (\n        Parser.err p (Diagnostics.unexpected p.token p.breadcrumbs);\n        Parser.next p;\n        loop nodes)\n  in\n  let nodes = loop [] in\n  Parser.eat_breadcrumb p;\n  nodes\n\nlet parse_comma_delimited_reversed_list p ~grammar ~closing ~f =\n  Parser.leave_breadcrumb p grammar;\n  let rec loop nodes =\n    match f p with\n    | Some node -> (\n      match p.Parser.token with\n      | Comma ->\n        Parser.next p;\n        loop (node :: nodes)\n      | token when token = closing || token = Eof -> node :: nodes\n      | _ when Grammar.is_list_element grammar p.token ->\n        (* missing comma between nodes in the region and the current token\n         * looks like the start of something valid in the current region.\n         * Example:\n         *   type student<'extraInfo> = {\n         *     name: string,\n         *     age: int\n         *     otherInfo: 'extraInfo\n         *   }\n         * There is a missing comma between `int` and `otherInfo`.\n         * `otherInfo` looks like a valid start of the record declaration.\n         * We report the error here and then continue parsing the region.\n         *)\n        Parser.expect Comma p;\n        loop (node :: nodes)\n      | _ ->\n        if\n          not\n            (p.token = Eof || p.token = closing\n            || Recover.should_abort_list_parse p)\n        then Parser.expect Comma p;\n        if p.token = Semicolon then Parser.next p;\n        loop (node :: nodes))\n    | None ->\n      if p.token = Eof || p.token = closing || Recover.should_abort_list_parse p\n      then nodes\n      else (\n        Parser.err p (Diagnostics.unexpected p.token p.breadcrumbs);\n        Parser.next p;\n        loop nodes)\n  in\n  let nodes = loop [] in\n  Parser.eat_breadcrumb p;\n  nodes\n\nlet parse_delimited_region p ~grammar ~closing ~f =\n  Parser.leave_breadcrumb p grammar;\n  let rec loop nodes =\n    match f p with\n    | Some node -> loop (node :: nodes)\n    | None ->\n      if\n        p.Parser.token = Token.Eof || p.token = closing\n        || Recover.should_abort_list_parse p\n      then List.rev nodes\n      else (\n        Parser.err p (Diagnostics.unexpected p.token p.breadcrumbs);\n        Parser.next p;\n        loop nodes)\n  in\n  let nodes = loop [] in\n  Parser.eat_breadcrumb p;\n  nodes\n\nlet parse_region p ~grammar ~f =\n  Parser.leave_breadcrumb p grammar;\n  let rec loop nodes =\n    match f p with\n    | Some node -> loop (node :: nodes)\n    | None ->\n      if p.Parser.token = Token.Eof || Recover.should_abort_list_parse p then\n        List.rev nodes\n      else (\n        Parser.err p (Diagnostics.unexpected p.token p.breadcrumbs);\n        Parser.next p;\n        loop nodes)\n  in\n  let nodes = loop [] in\n  Parser.eat_breadcrumb p;\n  nodes\n\n(* let-binding\t::=\tpattern =  expr   *)\n(* ∣\t value-name  { parameter }  [: typexpr]  [:> typexpr] =  expr   *)\n(* ∣\t value-name :  poly-typexpr =  expr   *)\n\n(* pattern\t::=\tvalue-name   *)\n(* ∣\t _   *)\n(* ∣\t constant   *)\n(* ∣\t pattern as  value-name   *)\n(* ∣\t ( pattern )   *)\n(* ∣\t ( pattern :  typexpr )   *)\n(* ∣\t pattern |  pattern   *)\n(* ∣\t constr  pattern   *)\n(* ∣\t #variant variant-pattern *)\n(* ∣\t #...type  *)\n(* ∣\t / pattern  { , pattern }+  /   *)\n(* ∣\t { field  [: typexpr]  [= pattern] { ; field  [: typexpr]  [= pattern] }  [; _ ] [ ; ] }   *)\n(* ∣\t [ pattern  { ; pattern }  [ ; ] ]   *)\n(* ∣\t pattern ::  pattern   *)\n(* ∣\t [| pattern  { ; pattern }  [ ; ] |]   *)\n(* ∣\t char-literal ..  char-literal *)\n(*\t∣\t exception pattern  *)\nlet rec parse_pattern ?(alias = true) ?(or_ = true) p =\n  let start_pos = p.Parser.start_pos in\n  let attrs = parse_attributes p in\n  let pat =\n    match p.Parser.token with\n    | (True | False) as token ->\n      let end_pos = p.end_pos in\n      Parser.next p;\n      let loc = mk_loc start_pos end_pos in\n      Ast_helper.Pat.construct ~loc\n        (Location.mkloc (Longident.Lident (Token.to_string token)) loc)\n        None\n    | Int _ | String _ | Float _ | Codepoint _ | Minus | Plus -> (\n      let c = parse_constant p in\n      match p.token with\n      | DotDot ->\n        Parser.next p;\n        let c2 = parse_constant p in\n        Ast_helper.Pat.interval ~loc:(mk_loc start_pos p.prev_end_pos) c c2\n      | _ -> Ast_helper.Pat.constant ~loc:(mk_loc start_pos p.prev_end_pos) c)\n    | Backtick ->\n      let constant = parse_template_constant ~prefix:(Some \"js\") p in\n      Ast_helper.Pat.constant ~attrs:[template_literal_attr]\n        ~loc:(mk_loc start_pos p.prev_end_pos)\n        constant\n    | Lparen -> (\n      Parser.next p;\n      match p.token with\n      | Rparen ->\n        Parser.next p;\n        let loc = mk_loc start_pos p.prev_end_pos in\n        let lid = Location.mkloc (Longident.Lident \"()\") loc in\n        Ast_helper.Pat.construct ~loc lid None\n      | _ -> (\n        let pat = parse_constrained_pattern p in\n        match p.token with\n        | Comma ->\n          Parser.next p;\n          parse_tuple_pattern ~attrs ~first:pat ~start_pos p\n        | _ ->\n          Parser.expect Rparen p;\n          let loc = mk_loc start_pos p.prev_end_pos in\n          {\n            pat with\n            ppat_loc = loc;\n            ppat_attributes = attrs @ pat.Parsetree.ppat_attributes;\n          }))\n    | Lbracket -> parse_array_pattern ~attrs p\n    | Lbrace -> parse_record_pattern ~attrs p\n    | Underscore ->\n      let end_pos = p.end_pos in\n      let loc = mk_loc start_pos end_pos in\n      Parser.next p;\n      Ast_helper.Pat.any ~loc ~attrs ()\n    | Lident ident -> (\n      let end_pos = p.end_pos in\n      let loc = mk_loc start_pos end_pos in\n      Parser.next p;\n      match p.token with\n      | Backtick ->\n        let constant = parse_template_constant ~prefix:(Some ident) p in\n        Ast_helper.Pat.constant ~loc:(mk_loc start_pos p.prev_end_pos) constant\n      | _ -> Ast_helper.Pat.var ~loc ~attrs (Location.mkloc ident loc))\n    | Uident _ -> (\n      let constr = parse_module_long_ident ~lowercase:false p in\n      match p.Parser.token with\n      | Lparen -> parse_constructor_pattern_args p constr start_pos attrs\n      | _ -> Ast_helper.Pat.construct ~loc:constr.loc ~attrs constr None)\n    | Hash -> (\n      Parser.next p;\n      if p.Parser.token == DotDotDot then (\n        Parser.next p;\n        let ident = parse_value_path p in\n        let loc = mk_loc start_pos ident.loc.loc_end in\n        Ast_helper.Pat.type_ ~loc ~attrs ident)\n      else\n        let ident, loc =\n          match p.token with\n          | String text ->\n            Parser.next p;\n            (text, mk_loc start_pos p.prev_end_pos)\n          | Int {i; suffix} ->\n            let () =\n              match suffix with\n              | Some _ ->\n                Parser.err p\n                  (Diagnostics.message\n                     (ErrorMessages.poly_var_int_with_suffix i))\n              | None -> ()\n            in\n            Parser.next p;\n            (i, mk_loc start_pos p.prev_end_pos)\n          | Eof ->\n            Parser.err ~start_pos p\n              (Diagnostics.unexpected p.token p.breadcrumbs);\n            (\"\", mk_loc start_pos p.prev_end_pos)\n          | _ -> parse_ident ~msg:ErrorMessages.variant_ident ~start_pos p\n        in\n        match p.Parser.token with\n        | Lparen -> parse_variant_pattern_args p ident start_pos attrs\n        | _ -> Ast_helper.Pat.variant ~loc ~attrs ident None)\n    | Exception ->\n      Parser.next p;\n      let pat = parse_pattern ~alias:false ~or_:false p in\n      let loc = mk_loc start_pos p.prev_end_pos in\n      Ast_helper.Pat.exception_ ~loc ~attrs pat\n    | List ->\n      Parser.next p;\n      parse_list_pattern ~start_pos ~attrs p\n    | Module -> parse_module_pattern ~attrs p\n    | Percent ->\n      let extension = parse_extension p in\n      let loc = mk_loc start_pos p.prev_end_pos in\n      Ast_helper.Pat.extension ~loc ~attrs extension\n    | Eof ->\n      Parser.err p (Diagnostics.unexpected p.Parser.token p.breadcrumbs);\n      Recover.default_pattern ()\n    | token -> (\n      Parser.err p (Diagnostics.unexpected token p.breadcrumbs);\n      match\n        skip_tokens_and_maybe_retry p\n          ~is_start_of_grammar:Grammar.is_atomic_pattern_start\n      with\n      | None -> Recover.default_pattern ()\n      | Some () -> parse_pattern p)\n  in\n  let pat = if alias then parse_alias_pattern ~attrs pat p else pat in\n  if or_ then parse_or_pattern pat p else pat\n\nand skip_tokens_and_maybe_retry p ~is_start_of_grammar =\n  if\n    Token.is_keyword p.Parser.token\n    && p.Parser.prev_end_pos.pos_lnum == p.start_pos.pos_lnum\n  then (\n    Parser.next p;\n    None)\n  else if Recover.should_abort_list_parse p then\n    if is_start_of_grammar p.Parser.token then (\n      Parser.next p;\n      Some ())\n    else None\n  else (\n    Parser.next p;\n    let rec loop p =\n      if not (Recover.should_abort_list_parse p) then (\n        Parser.next p;\n        loop p)\n    in\n    loop p;\n    if is_start_of_grammar p.Parser.token then Some () else None)\n\n(* alias ::= pattern as lident *)\nand parse_alias_pattern ~attrs pattern p =\n  match p.Parser.token with\n  | As ->\n    Parser.next p;\n    let name, loc = parse_lident p in\n    let name = Location.mkloc name loc in\n    Ast_helper.Pat.alias\n      ~loc:{pattern.ppat_loc with loc_end = p.prev_end_pos}\n      ~attrs pattern name\n  | _ -> pattern\n\n(* or ::= pattern | pattern\n * precedence: Red | Blue | Green is interpreted as (Red | Blue) | Green *)\nand parse_or_pattern pattern1 p =\n  let rec loop pattern1 =\n    match p.Parser.token with\n    | Bar ->\n      Parser.next p;\n      let pattern2 = parse_pattern ~or_:false p in\n      let loc =\n        {pattern1.Parsetree.ppat_loc with loc_end = pattern2.ppat_loc.loc_end}\n      in\n      loop (Ast_helper.Pat.or_ ~loc pattern1 pattern2)\n    | _ -> pattern1\n  in\n  loop pattern1\n\nand parse_non_spread_pattern ~msg p =\n  let () =\n    match p.Parser.token with\n    | DotDotDot ->\n      Parser.err p (Diagnostics.message msg);\n      Parser.next p\n    | _ -> ()\n  in\n  match p.Parser.token with\n  | token when Grammar.is_pattern_start token -> (\n    let pat = parse_pattern p in\n    match p.Parser.token with\n    | Colon ->\n      Parser.next p;\n      let typ = parse_typ_expr p in\n      let loc = mk_loc pat.ppat_loc.loc_start typ.Parsetree.ptyp_loc.loc_end in\n      Some (Ast_helper.Pat.constraint_ ~loc pat typ)\n    | _ -> Some pat)\n  | _ -> None\n\nand parse_constrained_pattern p =\n  let pat = parse_pattern p in\n  match p.Parser.token with\n  | Colon ->\n    Parser.next p;\n    let typ = parse_typ_expr p in\n    let loc = mk_loc pat.ppat_loc.loc_start typ.Parsetree.ptyp_loc.loc_end in\n    Ast_helper.Pat.constraint_ ~loc pat typ\n  | _ -> pat\n\nand parse_constrained_pattern_region p =\n  match p.Parser.token with\n  | token when Grammar.is_pattern_start token ->\n    Some (parse_constrained_pattern p)\n  | _ -> None\n\nand parse_optional_label p =\n  match p.Parser.token with\n  | Question ->\n    Parser.next p;\n    true\n  | _ -> false\n\n(* field ::=\n *   | longident\n *   | longident : pattern\n *   | longident as lident\n *\n *  row ::=\n *\t | field ,\n *\t | field , _\n *\t | field , _,\n *)\nand parse_record_pattern_row_field ~attrs p =\n  let label = parse_value_path p in\n  let pattern =\n    match p.Parser.token with\n    | Colon ->\n      Parser.next p;\n      let optional = parse_optional_label p in\n      let pat = parse_pattern p in\n      make_pattern_optional ~optional pat\n    | _ ->\n      Ast_helper.Pat.var ~loc:label.loc ~attrs\n        (Location.mkloc (Longident.last label.txt) label.loc)\n  in\n  (label, pattern)\n\n(* TODO: there are better representations than PatField|Underscore ? *)\nand parse_record_pattern_row p =\n  let attrs = parse_attributes p in\n  match p.Parser.token with\n  | DotDotDot ->\n    Parser.next p;\n    Some (true, PatField (parse_record_pattern_row_field ~attrs p))\n  | Uident _ | Lident _ ->\n    Some (false, PatField (parse_record_pattern_row_field ~attrs p))\n  | Question -> (\n    Parser.next p;\n    match p.token with\n    | Uident _ | Lident _ ->\n      let lid, pat = parse_record_pattern_row_field ~attrs p in\n      Some (false, PatField (lid, make_pattern_optional ~optional:true pat))\n    | _ -> None)\n  | Underscore ->\n    Parser.next p;\n    Some (false, PatUnderscore)\n  | _ -> None\n\nand parse_record_pattern ~attrs p =\n  let start_pos = p.start_pos in\n  Parser.expect Lbrace p;\n  let raw_fields =\n    parse_comma_delimited_reversed_list p ~grammar:PatternRecord ~closing:Rbrace\n      ~f:parse_record_pattern_row\n  in\n  Parser.expect Rbrace p;\n  let fields, closed_flag =\n    let raw_fields, flag =\n      match raw_fields with\n      | (_hasSpread, PatUnderscore) :: rest -> (rest, Asttypes.Open)\n      | raw_fields -> (raw_fields, Asttypes.Closed)\n    in\n    List.fold_left\n      (fun (fields, flag) curr ->\n        let has_spread, field = curr in\n        match field with\n        | PatField field ->\n          (if has_spread then\n             let _, pattern = field in\n             Parser.err ~start_pos:pattern.Parsetree.ppat_loc.loc_start p\n               (Diagnostics.message ErrorMessages.record_pattern_spread));\n          (field :: fields, flag)\n        | PatUnderscore -> (fields, flag))\n      ([], flag) raw_fields\n  in\n  let loc = mk_loc start_pos p.prev_end_pos in\n  Ast_helper.Pat.record ~loc ~attrs fields closed_flag\n\nand parse_tuple_pattern ~attrs ~first ~start_pos p =\n  let patterns =\n    first\n    :: parse_comma_delimited_region p ~grammar:Grammar.PatternList\n         ~closing:Rparen ~f:parse_constrained_pattern_region\n  in\n  Parser.expect Rparen p;\n  let () =\n    match patterns with\n    | [_] ->\n      Parser.err ~start_pos ~end_pos:p.prev_end_pos p\n        (Diagnostics.message ErrorMessages.tuple_single_element)\n    | _ -> ()\n  in\n  let loc = mk_loc start_pos p.prev_end_pos in\n  Ast_helper.Pat.tuple ~loc ~attrs patterns\n\nand parse_pattern_region p =\n  match p.Parser.token with\n  | DotDotDot ->\n    Parser.next p;\n    Some (true, parse_constrained_pattern p)\n  | token when Grammar.is_pattern_start token ->\n    Some (false, parse_constrained_pattern p)\n  | _ -> None\n\nand parse_module_pattern ~attrs p =\n  let start_pos = p.Parser.start_pos in\n  Parser.expect Module p;\n  Parser.expect Lparen p;\n  let uident =\n    match p.token with\n    | Uident uident ->\n      let loc = mk_loc p.start_pos p.end_pos in\n      Parser.next p;\n      Location.mkloc uident loc\n    | _ ->\n      (* TODO: error recovery *)\n      Location.mknoloc \"_\"\n  in\n  match p.token with\n  | Colon ->\n    let colon_start = p.Parser.start_pos in\n    Parser.next p;\n    let package_typ_attrs = parse_attributes p in\n    let package_type =\n      parse_package_type ~start_pos:colon_start ~attrs:package_typ_attrs p\n    in\n    Parser.expect Rparen p;\n    let loc = mk_loc start_pos p.prev_end_pos in\n    let unpack = Ast_helper.Pat.unpack ~loc:uident.loc uident in\n    Ast_helper.Pat.constraint_ ~loc ~attrs unpack package_type\n  | _ ->\n    Parser.expect Rparen p;\n    let loc = mk_loc start_pos p.prev_end_pos in\n    Ast_helper.Pat.unpack ~loc ~attrs uident\n\nand parse_list_pattern ~start_pos ~attrs p =\n  let list_patterns =\n    parse_comma_delimited_reversed_list p ~grammar:Grammar.PatternOcamlList\n      ~closing:Rbrace ~f:parse_pattern_region\n  in\n  Parser.expect Rbrace p;\n  let loc = mk_loc start_pos p.prev_end_pos in\n  let filter_spread (has_spread, pattern) =\n    if has_spread then (\n      Parser.err ~start_pos:pattern.Parsetree.ppat_loc.loc_start p\n        (Diagnostics.message ErrorMessages.list_pattern_spread);\n      pattern)\n    else pattern\n  in\n  match list_patterns with\n  | (true, pattern) :: patterns ->\n    let patterns = patterns |> List.map filter_spread |> List.rev in\n    let pat = make_list_pattern loc patterns (Some pattern) in\n    {pat with ppat_loc = loc; ppat_attributes = attrs}\n  | patterns ->\n    let patterns = patterns |> List.map filter_spread |> List.rev in\n    let pat = make_list_pattern loc patterns None in\n    {pat with ppat_loc = loc; ppat_attributes = attrs}\n\nand parse_array_pattern ~attrs p =\n  let start_pos = p.start_pos in\n  Parser.expect Lbracket p;\n  let patterns =\n    parse_comma_delimited_region p ~grammar:Grammar.PatternList\n      ~closing:Rbracket\n      ~f:(parse_non_spread_pattern ~msg:ErrorMessages.array_pattern_spread)\n  in\n  Parser.expect Rbracket p;\n  let loc = mk_loc start_pos p.prev_end_pos in\n  Ast_helper.Pat.array ~loc ~attrs patterns\n\nand parse_constructor_pattern_args p constr start_pos attrs =\n  let lparen = p.start_pos in\n  Parser.expect Lparen p;\n  let args =\n    parse_comma_delimited_region p ~grammar:Grammar.PatternList ~closing:Rparen\n      ~f:parse_constrained_pattern_region\n  in\n  Parser.expect Rparen p;\n  let args =\n    match args with\n    | [] ->\n      let loc = mk_loc lparen p.prev_end_pos in\n      Some\n        (Ast_helper.Pat.construct ~loc\n           (Location.mkloc (Longident.Lident \"()\") loc)\n           None)\n    | [({ppat_desc = Ppat_tuple _} as pat)] as patterns ->\n      if p.mode = ParseForTypeChecker then\n        (* Some(1, 2) for type-checker *)\n        Some pat\n      else\n        (* Some((1, 2)) for printer *)\n        Some (Ast_helper.Pat.tuple ~loc:(mk_loc lparen p.end_pos) patterns)\n    | [pattern] -> Some pattern\n    | patterns ->\n      Some (Ast_helper.Pat.tuple ~loc:(mk_loc lparen p.end_pos) patterns)\n  in\n  Ast_helper.Pat.construct\n    ~loc:(mk_loc start_pos p.prev_end_pos)\n    ~attrs constr args\n\nand parse_variant_pattern_args p ident start_pos attrs =\n  let lparen = p.start_pos in\n  Parser.expect Lparen p;\n  let patterns =\n    parse_comma_delimited_region p ~grammar:Grammar.PatternList ~closing:Rparen\n      ~f:parse_constrained_pattern_region\n  in\n  let args =\n    match patterns with\n    | [] ->\n      let loc = mk_loc lparen p.prev_end_pos in\n      Some\n        (Ast_helper.Pat.construct ~loc\n           (Location.mkloc (Longident.Lident \"()\") loc)\n           None)\n    | [({ppat_desc = Ppat_tuple _} as pat)] as patterns ->\n      if p.mode = ParseForTypeChecker then\n        (* #ident(1, 2) for type-checker *)\n        Some pat\n      else\n        (* #ident((1, 2)) for printer *)\n        Some (Ast_helper.Pat.tuple ~loc:(mk_loc lparen p.end_pos) patterns)\n    | [pattern] -> Some pattern\n    | patterns ->\n      Some (Ast_helper.Pat.tuple ~loc:(mk_loc lparen p.end_pos) patterns)\n  in\n  Parser.expect Rparen p;\n  Ast_helper.Pat.variant\n    ~loc:(mk_loc start_pos p.prev_end_pos)\n    ~attrs ident args\n\nand parse_expr ?(context = OrdinaryExpr) p =\n  let expr = parse_operand_expr ~context p in\n  let expr = parse_binary_expr ~context ~a:expr p 1 in\n  parse_ternary_expr expr p\n\n(* expr ? expr : expr *)\nand parse_ternary_expr left_operand p =\n  match p.Parser.token with\n  | Question ->\n    Parser.leave_breadcrumb p Grammar.Ternary;\n    Parser.next p;\n    let true_branch = parse_expr ~context:TernaryTrueBranchExpr p in\n    Parser.expect Colon p;\n    let false_branch = parse_expr p in\n    Parser.eat_breadcrumb p;\n    let loc =\n      {\n        left_operand.Parsetree.pexp_loc with\n        loc_start = left_operand.pexp_loc.loc_start;\n        loc_end = false_branch.Parsetree.pexp_loc.loc_end;\n      }\n    in\n    Ast_helper.Exp.ifthenelse ~attrs:[ternary_attr] ~loc left_operand\n      true_branch (Some false_branch)\n  | _ -> left_operand\n\nand parse_es6_arrow_expression ?(arrow_attrs = []) ?(arrow_start_pos = None)\n    ?context ?parameters p =\n  let start_pos = p.Parser.start_pos in\n  Parser.leave_breadcrumb p Grammar.Es6ArrowExpr;\n  (* Parsing function parameters and attributes:\n     1. Basically, attributes outside of `(...)` are added to the function, except\n     the uncurried attribute `(.)` is added to the function. e.g. async, uncurried\n\n     2. Attributes inside `(...)` are added to the arguments regardless of whether\n     labeled, optional or nolabeled *)\n  let parameters =\n    match parameters with\n    | Some params -> params\n    | None -> parse_parameters p\n  in\n  let parameters =\n    let update_attrs attrs = arrow_attrs @ attrs in\n    let update_pos pos =\n      match arrow_start_pos with\n      | Some start_pos -> start_pos\n      | None -> pos\n    in\n    match parameters with\n    | TermParameter p :: rest ->\n      TermParameter\n        {p with attrs = update_attrs p.attrs; pos = update_pos p.pos}\n      :: rest\n    | TypeParameter p :: rest ->\n      TypeParameter\n        {p with attrs = update_attrs p.attrs; pos = update_pos p.pos}\n      :: rest\n    | [] -> parameters\n  in\n  let parameters =\n    (* Propagate any dots from type parameters to the first term *)\n    let rec loop ~dot_in_type params =\n      match params with\n      | (TypeParameter {dotted} as p) :: _ ->\n        let rest = LoopProgress.list_rest params in\n        (* Tell termination checker about progress *)\n        p :: loop ~dot_in_type:(dot_in_type || dotted) rest\n      | TermParameter term_param :: rest ->\n        TermParameter\n          {term_param with dotted = dot_in_type || term_param.dotted}\n        :: rest\n      | [] -> []\n    in\n    loop ~dot_in_type:false parameters\n  in\n  let return_type =\n    match p.Parser.token with\n    | Colon ->\n      Parser.next p;\n      Some (parse_typ_expr ~es6_arrow:false p)\n    | _ -> None\n  in\n  Parser.expect EqualGreater p;\n  let body =\n    let expr = parse_expr ?context p in\n    match return_type with\n    | Some typ ->\n      Ast_helper.Exp.constraint_\n        ~loc:(mk_loc expr.pexp_loc.loc_start typ.Parsetree.ptyp_loc.loc_end)\n        expr typ\n    | None -> expr\n  in\n  Parser.eat_breadcrumb p;\n  let end_pos = p.prev_end_pos in\n  let term_parameters =\n    parameters\n    |> List.filter (function\n         | TermParameter _ -> true\n         | TypeParameter _ -> false)\n  in\n  let body_needs_braces =\n    let is_fun =\n      match body.pexp_desc with\n      | Pexp_fun _ -> true\n      | _ -> false\n    in\n    match term_parameters with\n    | TermParameter {dotted} :: _\n      when p.uncurried_config |> Res_uncurried.from_dotted ~dotted && is_fun ->\n      true\n    | TermParameter _ :: rest when p.uncurried_config = Legacy && is_fun ->\n      rest\n      |> List.exists (function\n           | TermParameter {dotted} -> dotted\n           | _ -> false)\n    | _ -> false\n  in\n  let body =\n    if body_needs_braces then\n      {\n        body with\n        pexp_attributes = make_braces_attr body.pexp_loc :: body.pexp_attributes;\n      }\n    else body\n  in\n  let _paramNum, arrow_expr, _arity =\n    List.fold_right\n      (fun parameter (term_param_num, expr, arity) ->\n        match parameter with\n        | TermParameter\n            {\n              dotted;\n              attrs;\n              label = lbl;\n              expr = default_expr;\n              pat;\n              pos = start_pos;\n            } ->\n          let loc = mk_loc start_pos end_pos in\n          let fun_expr =\n            Ast_helper.Exp.fun_ ~loc ~attrs lbl default_expr pat expr\n          in\n          let uncurried =\n            p.uncurried_config |> Res_uncurried.from_dotted ~dotted\n          in\n          if uncurried && (term_param_num = 1 || p.uncurried_config = Legacy)\n          then\n            ( term_param_num - 1,\n              Ast_uncurried.uncurried_fun ~loc ~arity fun_expr,\n              1 )\n          else (term_param_num - 1, fun_expr, arity + 1)\n        | TypeParameter {dotted = _; attrs; locs = newtypes; pos = start_pos} ->\n          ( term_param_num,\n            make_newtypes ~attrs ~loc:(mk_loc start_pos end_pos) newtypes expr,\n            arity ))\n      parameters\n      (List.length term_parameters, body, 1)\n  in\n  {arrow_expr with pexp_loc = {arrow_expr.pexp_loc with loc_start = start_pos}}\n\n(*\n * dotted_parameter ::=\n *   | . parameter\n *\n * parameter ::=\n *   | pattern\n *   | pattern : type\n *   | ~ labelName\n *   | ~ labelName as pattern\n *   | ~ labelName as pattern : type\n *   | ~ labelName = expr\n *   | ~ labelName as pattern = expr\n *   | ~ labelName as pattern : type = expr\n *   | ~ labelName = ?\n *   | ~ labelName as pattern = ?\n *   | ~ labelName as pattern : type = ?\n *\n * labelName ::= lident\n *)\nand parse_parameter p =\n  if\n    p.Parser.token = Token.Typ || p.token = Tilde || p.token = Dot\n    || Grammar.is_pattern_start p.token\n  then\n    let start_pos = p.Parser.start_pos in\n    let dotted = Parser.optional p Token.Dot in\n    let attrs = parse_attributes p in\n    if p.Parser.token = Typ then (\n      Parser.next p;\n      let lidents = parse_lident_list p in\n      Some (TypeParameter {dotted; attrs; locs = lidents; pos = start_pos}))\n    else\n      let attrs, lbl, pat =\n        match p.Parser.token with\n        | Tilde -> (\n          Parser.next p;\n          let lbl_name, loc = parse_lident p in\n          let prop_loc_attr =\n            (Location.mkloc \"res.namedArgLoc\" loc, Parsetree.PStr [])\n          in\n          match p.Parser.token with\n          | Comma | Equal | Rparen ->\n            let loc = mk_loc start_pos p.prev_end_pos in\n            ( [],\n              Asttypes.Labelled lbl_name,\n              Ast_helper.Pat.var ~attrs:(prop_loc_attr :: attrs) ~loc\n                (Location.mkloc lbl_name loc) )\n          | Colon ->\n            let lbl_end = p.prev_end_pos in\n            Parser.next p;\n            let typ = parse_typ_expr p in\n            let loc = mk_loc start_pos lbl_end in\n            let pat =\n              let pat = Ast_helper.Pat.var ~loc (Location.mkloc lbl_name loc) in\n              let loc = mk_loc start_pos p.prev_end_pos in\n              Ast_helper.Pat.constraint_ ~attrs:(prop_loc_attr :: attrs) ~loc\n                pat typ\n            in\n            ([], Asttypes.Labelled lbl_name, pat)\n          | As ->\n            Parser.next p;\n            let pat =\n              let pat = parse_constrained_pattern p in\n              {\n                pat with\n                ppat_attributes = (prop_loc_attr :: attrs) @ pat.ppat_attributes;\n              }\n            in\n            ([], Asttypes.Labelled lbl_name, pat)\n          | t ->\n            Parser.err p (Diagnostics.unexpected t p.breadcrumbs);\n            let loc = mk_loc start_pos p.prev_end_pos in\n            ( [],\n              Asttypes.Labelled lbl_name,\n              Ast_helper.Pat.var ~attrs:(prop_loc_attr :: attrs) ~loc\n                (Location.mkloc lbl_name loc) ))\n        | _ ->\n          let pattern = parse_constrained_pattern p in\n          let attrs = List.concat [pattern.ppat_attributes; attrs] in\n          ([], Asttypes.Nolabel, {pattern with ppat_attributes = attrs})\n      in\n      match p.Parser.token with\n      | Equal -> (\n        Parser.next p;\n        let lbl =\n          match lbl with\n          | Asttypes.Labelled lbl_name -> Asttypes.Optional lbl_name\n          | Asttypes.Nolabel ->\n            let lbl_name =\n              match pat.ppat_desc with\n              | Ppat_var var -> var.txt\n              | _ -> \"\"\n            in\n            Parser.err ~start_pos ~end_pos:p.prev_end_pos p\n              (Diagnostics.message\n                 (ErrorMessages.missing_tilde_labeled_parameter lbl_name));\n            Asttypes.Optional lbl_name\n          | lbl -> lbl\n        in\n        match p.Parser.token with\n        | Question ->\n          Parser.next p;\n          Some\n            (TermParameter\n               {dotted; attrs; label = lbl; expr = None; pat; pos = start_pos})\n        | _ ->\n          let expr = parse_constrained_or_coerced_expr p in\n          Some\n            (TermParameter\n               {\n                 dotted;\n                 attrs;\n                 label = lbl;\n                 expr = Some expr;\n                 pat;\n                 pos = start_pos;\n               }))\n      | _ ->\n        Some\n          (TermParameter\n             {dotted; attrs; label = lbl; expr = None; pat; pos = start_pos})\n  else None\n\nand parse_parameter_list p =\n  let parameters =\n    parse_comma_delimited_region ~grammar:Grammar.ParameterList\n      ~f:parse_parameter ~closing:Rparen p\n  in\n  Parser.expect Rparen p;\n  parameters\n\n(* parameters ::=\n *   | _\n *   | lident\n *   | ()\n *   | (.)\n *   | ( parameter {, parameter} [,] )\n *)\nand parse_parameters p =\n  let start_pos = p.Parser.start_pos in\n  match p.Parser.token with\n  | Lident ident ->\n    Parser.next p;\n    let loc = mk_loc start_pos p.Parser.prev_end_pos in\n    [\n      TermParameter\n        {\n          dotted = false;\n          attrs = [];\n          label = Asttypes.Nolabel;\n          expr = None;\n          pat = Ast_helper.Pat.var ~loc (Location.mkloc ident loc);\n          pos = start_pos;\n        };\n    ]\n  | Underscore ->\n    Parser.next p;\n    let loc = mk_loc start_pos p.Parser.prev_end_pos in\n    [\n      TermParameter\n        {\n          dotted = false;\n          attrs = [];\n          label = Asttypes.Nolabel;\n          expr = None;\n          pat = Ast_helper.Pat.any ~loc ();\n          pos = start_pos;\n        };\n    ]\n  | Lparen -> (\n    Parser.next p;\n    match p.Parser.token with\n    | Rparen ->\n      Parser.next p;\n      let loc = mk_loc start_pos p.Parser.prev_end_pos in\n      let unit_pattern =\n        Ast_helper.Pat.construct ~loc\n          (Location.mkloc (Longident.Lident \"()\") loc)\n          None\n      in\n      [\n        TermParameter\n          {\n            dotted = false;\n            attrs = [];\n            label = Asttypes.Nolabel;\n            expr = None;\n            pat = unit_pattern;\n            pos = start_pos;\n          };\n      ]\n    | Dot -> (\n      Parser.next p;\n      match p.token with\n      | Rparen ->\n        Parser.next p;\n        let loc = mk_loc start_pos p.Parser.prev_end_pos in\n        let unit_pattern =\n          Ast_helper.Pat.construct ~loc\n            (Location.mkloc (Longident.Lident \"()\") loc)\n            None\n        in\n        [\n          TermParameter\n            {\n              dotted = true;\n              attrs = [];\n              label = Asttypes.Nolabel;\n              expr = None;\n              pat = unit_pattern;\n              pos = start_pos;\n            };\n        ]\n      | _ -> (\n        match parse_parameter_list p with\n        | TermParameter p :: rest ->\n          TermParameter {p with dotted = true; pos = start_pos} :: rest\n        | TypeParameter p :: rest ->\n          TypeParameter {p with dotted = true; pos = start_pos} :: rest\n        | parameters -> parameters))\n    | _ -> parse_parameter_list p)\n  | token ->\n    Parser.err p (Diagnostics.unexpected token p.breadcrumbs);\n    []\n\nand parse_coerced_expr ~(expr : Parsetree.expression) p =\n  Parser.expect ColonGreaterThan p;\n  let typ = parse_typ_expr p in\n  let loc = mk_loc expr.pexp_loc.loc_start p.prev_end_pos in\n  Ast_helper.Exp.coerce ~loc expr None typ\n\nand parse_constrained_or_coerced_expr p =\n  let expr = parse_expr p in\n  match p.Parser.token with\n  | ColonGreaterThan -> parse_coerced_expr ~expr p\n  | Colon -> (\n    Parser.next p;\n    match p.token with\n    | _ -> (\n      let typ = parse_typ_expr p in\n      let loc = mk_loc expr.pexp_loc.loc_start typ.ptyp_loc.loc_end in\n      let expr = Ast_helper.Exp.constraint_ ~loc expr typ in\n      match p.token with\n      | ColonGreaterThan -> parse_coerced_expr ~expr p\n      | _ -> expr))\n  | _ -> expr\n\nand parse_constrained_expr_region p =\n  match p.Parser.token with\n  | token when Grammar.is_expr_start token -> (\n    let expr = parse_expr p in\n    match p.Parser.token with\n    | Colon ->\n      Parser.next p;\n      let typ = parse_typ_expr p in\n      let loc = mk_loc expr.pexp_loc.loc_start typ.ptyp_loc.loc_end in\n      Some (Ast_helper.Exp.constraint_ ~loc expr typ)\n    | _ -> Some expr)\n  | _ -> None\n\n(* Atomic expressions represent unambiguous expressions.\n * This means that regardless of the context, these expressions\n * are always interpreted correctly. *)\nand parse_atomic_expr p =\n  Parser.leave_breadcrumb p Grammar.ExprOperand;\n  let start_pos = p.Parser.start_pos in\n  let expr =\n    match p.Parser.token with\n    | (True | False) as token ->\n      Parser.next p;\n      let loc = mk_loc start_pos p.prev_end_pos in\n      Ast_helper.Exp.construct ~loc\n        (Location.mkloc (Longident.Lident (Token.to_string token)) loc)\n        None\n    | Int _ | String _ | Float _ | Codepoint _ ->\n      let c = parse_constant p in\n      let loc = mk_loc start_pos p.prev_end_pos in\n      Ast_helper.Exp.constant ~loc c\n    | Backtick ->\n      let expr = parse_template_expr p in\n      {expr with pexp_loc = mk_loc start_pos p.prev_end_pos}\n    | Uident _ | Lident _ -> parse_value_or_constructor p\n    | Hash -> parse_poly_variant_expr p\n    | Lparen -> (\n      Parser.next p;\n      match p.Parser.token with\n      | Rparen ->\n        Parser.next p;\n        let loc = mk_loc start_pos p.prev_end_pos in\n        Ast_helper.Exp.construct ~loc\n          (Location.mkloc (Longident.Lident \"()\") loc)\n          None\n      | _t -> (\n        let expr = parse_constrained_or_coerced_expr p in\n        match p.token with\n        | Comma ->\n          Parser.next p;\n          parse_tuple_expr ~start_pos ~first:expr p\n        | _ ->\n          Parser.expect Rparen p;\n          expr\n        (* {expr with pexp_loc = mkLoc startPos p.prevEndPos}\n         * What does this location mean here? It means that when there's\n         * a parenthesized we keep the location here for whitespace interleaving.\n         * Without the closing paren in the location there will always be an extra\n         * line. For now we don't include it, because it does weird things\n         * with for comments. *)))\n    | List ->\n      Parser.next p;\n      parse_list_expr ~start_pos p\n    | Module ->\n      Parser.next p;\n      parse_first_class_module_expr ~start_pos p\n    | Lbracket -> parse_array_exp p\n    | Lbrace -> parse_braced_or_record_expr p\n    | LessThan -> parse_jsx p\n    | Percent ->\n      let extension = parse_extension p in\n      let loc = mk_loc start_pos p.prev_end_pos in\n      Ast_helper.Exp.extension ~loc extension\n    | Underscore as token ->\n      (* This case is for error recovery. Not sure if it's the correct place *)\n      Parser.err p (Diagnostics.lident token);\n      Parser.next p;\n      Recover.default_expr ()\n    | Eof ->\n      Parser.err ~start_pos:p.prev_end_pos p\n        (Diagnostics.unexpected p.Parser.token p.breadcrumbs);\n      Recover.default_expr ()\n    | token -> (\n      let err_pos = p.prev_end_pos in\n      Parser.err ~start_pos:err_pos p\n        (Diagnostics.unexpected token p.breadcrumbs);\n      match\n        skip_tokens_and_maybe_retry p\n          ~is_start_of_grammar:Grammar.is_atomic_expr_start\n      with\n      | None -> Recover.default_expr ()\n      | Some () -> parse_atomic_expr p)\n  in\n  Parser.eat_breadcrumb p;\n  expr\n\n(* module(module-expr)\n * module(module-expr : package-type) *)\nand parse_first_class_module_expr ~start_pos p =\n  Parser.expect Lparen p;\n\n  let mod_expr = parse_module_expr p in\n  let mod_end_loc = p.prev_end_pos in\n  match p.Parser.token with\n  | Colon ->\n    let colon_start = p.Parser.start_pos in\n    Parser.next p;\n    let attrs = parse_attributes p in\n    let package_type = parse_package_type ~start_pos:colon_start ~attrs p in\n    Parser.expect Rparen p;\n    let loc = mk_loc start_pos mod_end_loc in\n    let first_class_module = Ast_helper.Exp.pack ~loc mod_expr in\n    let loc = mk_loc start_pos p.prev_end_pos in\n    Ast_helper.Exp.constraint_ ~loc first_class_module package_type\n  | _ ->\n    Parser.expect Rparen p;\n    let loc = mk_loc start_pos p.prev_end_pos in\n    Ast_helper.Exp.pack ~loc mod_expr\n\nand parse_bracket_access p expr start_pos =\n  Parser.leave_breadcrumb p Grammar.ExprArrayAccess;\n  let lbracket = p.start_pos in\n  Parser.expect Lbracket p;\n  let string_start = p.start_pos in\n  match p.Parser.token with\n  | String s -> (\n    Parser.next p;\n    let string_end = p.prev_end_pos in\n    Parser.expect Rbracket p;\n    Parser.eat_breadcrumb p;\n    let rbracket = p.prev_end_pos in\n    let e =\n      let ident_loc = mk_loc string_start string_end in\n      let loc = mk_loc start_pos rbracket in\n      Ast_helper.Exp.send ~loc expr (Location.mkloc s ident_loc)\n    in\n    let e = parse_primary_expr ~operand:e p in\n    let equal_start = p.start_pos in\n    match p.token with\n    | Equal ->\n      Parser.next p;\n      let equal_end = p.prev_end_pos in\n      let rhs_expr = parse_expr p in\n      let loc = mk_loc start_pos rhs_expr.pexp_loc.loc_end in\n      let operator_loc = mk_loc equal_start equal_end in\n      Ast_helper.Exp.apply ~loc\n        (Ast_helper.Exp.ident ~loc:operator_loc\n           (Location.mkloc (Longident.Lident \"#=\") operator_loc))\n        [(Nolabel, e); (Nolabel, rhs_expr)]\n    | _ -> e)\n  | _ -> (\n    let access_expr = parse_constrained_or_coerced_expr p in\n    Parser.expect Rbracket p;\n    Parser.eat_breadcrumb p;\n    let rbracket = p.prev_end_pos in\n    let array_loc = mk_loc lbracket rbracket in\n    match p.token with\n    | Equal ->\n      Parser.leave_breadcrumb p ExprArrayMutation;\n      Parser.next p;\n      let rhs_expr = parse_expr p in\n      let array_set =\n        Location.mkloc (Longident.Ldot (Lident \"Array\", \"set\")) array_loc\n      in\n      let end_pos = p.prev_end_pos in\n      let array_set =\n        Ast_helper.Exp.apply ~loc:(mk_loc start_pos end_pos)\n          (Ast_helper.Exp.ident ~loc:array_loc array_set)\n          [(Nolabel, expr); (Nolabel, access_expr); (Nolabel, rhs_expr)]\n      in\n      Parser.eat_breadcrumb p;\n      array_set\n    | _ ->\n      let end_pos = p.prev_end_pos in\n      let e =\n        Ast_helper.Exp.apply ~loc:(mk_loc start_pos end_pos)\n          (Ast_helper.Exp.ident ~loc:array_loc\n             (Location.mkloc (Longident.Ldot (Lident \"Array\", \"get\")) array_loc))\n          [(Nolabel, expr); (Nolabel, access_expr)]\n      in\n      parse_primary_expr ~operand:e p)\n\n(* * A primary expression represents\n *  - atomic-expr\n *  - john.age\n *  - array[0]\n *  - applyFunctionTo(arg1, arg2)\n *\n *  The \"operand\" represents the expression that is operated on\n *)\nand parse_primary_expr ~operand ?(no_call = false) p =\n  let start_pos = operand.pexp_loc.loc_start in\n  let rec loop p expr =\n    match p.Parser.token with\n    | Dot -> (\n      Parser.next p;\n      let lident = parse_value_path_after_dot p in\n      match p.Parser.token with\n      | Equal when no_call = false ->\n        Parser.leave_breadcrumb p Grammar.ExprSetField;\n        Parser.next p;\n        let target_expr = parse_expr p in\n        let loc = mk_loc start_pos p.prev_end_pos in\n        let setfield = Ast_helper.Exp.setfield ~loc expr lident target_expr in\n        Parser.eat_breadcrumb p;\n        setfield\n      | _ ->\n        let end_pos = p.prev_end_pos in\n        let loc = mk_loc start_pos end_pos in\n        loop p (Ast_helper.Exp.field ~loc expr lident))\n    | Lbracket\n      when no_call = false && p.prev_end_pos.pos_lnum == p.start_pos.pos_lnum ->\n      parse_bracket_access p expr start_pos\n    | Lparen\n      when no_call = false && p.prev_end_pos.pos_lnum == p.start_pos.pos_lnum ->\n      loop p (parse_call_expr p expr)\n    | Backtick\n      when no_call = false && p.prev_end_pos.pos_lnum == p.start_pos.pos_lnum\n      -> (\n      match expr.pexp_desc with\n      | Pexp_ident long_ident -> parse_template_expr ~prefix:long_ident p\n      | _ ->\n        Parser.err ~start_pos:expr.pexp_loc.loc_start\n          ~end_pos:expr.pexp_loc.loc_end p\n          (Diagnostics.message\n             \"Tagged template literals are currently restricted to names like: \\\n              json`null`.\");\n        parse_template_expr p)\n    | _ -> expr\n  in\n  loop p operand\n\n(* a unary expression is an expression with only one operand and\n * unary operator. Examples:\n *   -1\n *   !condition\n *   -. 1.6\n *)\nand parse_unary_expr p =\n  let start_pos = p.Parser.start_pos in\n  match p.Parser.token with\n  | (Minus | MinusDot | Plus | PlusDot | Bang) as token ->\n    Parser.leave_breadcrumb p Grammar.ExprUnary;\n    let token_end = p.end_pos in\n    Parser.next p;\n    let operand = parse_unary_expr p in\n    let unary_expr = make_unary_expr start_pos token_end token operand in\n    Parser.eat_breadcrumb p;\n    unary_expr\n  | _ -> parse_primary_expr ~operand:(parse_atomic_expr p) p\n\n(* Represents an \"operand\" in a binary expression.\n * If you have `a + b`, `a` and `b` both represent\n * the operands of the binary expression with opeartor `+` *)\nand parse_operand_expr ~context p =\n  let start_pos = p.Parser.start_pos in\n  let attrs = ref (parse_attributes p) in\n  let expr =\n    match p.Parser.token with\n    | Assert ->\n      Parser.next p;\n      let expr = parse_expr p in\n      let loc = mk_loc start_pos p.prev_end_pos in\n      Ast_helper.Exp.assert_ ~loc expr\n    | Lident \"async\"\n    (* we need to be careful when we're in a ternary true branch:\n       `condition ? ternary-true-branch : false-branch`\n       Arrow expressions could be of the form: `async (): int => stuff()`\n       But if we're in a ternary, the `:` of the ternary takes precedence\n    *)\n      when is_es6_arrow_expression\n             ~in_ternary:(context = TernaryTrueBranchExpr)\n             p ->\n      let arrow_attrs = !attrs in\n      let () = attrs := [] in\n      parse_async_arrow_expression ~arrow_attrs p\n    | Await -> parse_await_expression p\n    | Try -> parse_try_expression p\n    | If -> parse_if_or_if_let_expression p\n    | For -> parse_for_expression p\n    | While -> parse_while_expression p\n    | Switch -> parse_switch_expression p\n    | _ ->\n      if\n        context != WhenExpr\n        && is_es6_arrow_expression\n             ~in_ternary:(context = TernaryTrueBranchExpr)\n             p\n      then\n        let arrow_attrs = !attrs in\n        let () = attrs := [] in\n        parse_es6_arrow_expression ~arrow_attrs ~context p\n      else parse_unary_expr p\n  in\n  (* let endPos = p.Parser.prevEndPos in *)\n  {\n    expr with\n    pexp_attributes = List.concat [expr.Parsetree.pexp_attributes; !attrs];\n    (* pexp_loc = mkLoc startPos endPos *)\n  }\n\n(* a binary expression is an expression that combines two expressions with an\n * operator. Examples:\n *    a + b\n *    f(x) |> g(y)\n *)\nand parse_binary_expr ?(context = OrdinaryExpr) ?a p prec =\n  let a =\n    match a with\n    | Some e -> e\n    | None -> parse_operand_expr ~context p\n  in\n  let rec loop a =\n    let token = p.Parser.token in\n    let token_prec =\n      match token with\n      (* Can the minus be interpreted as a binary operator? Or is it a unary?\n       * let w = {\n       *   x\n       *   -10\n       * }\n       * vs\n       * let w = {\n       *   width\n       *   - gap\n       * }\n       *\n       * First case is unary, second is a binary operator.\n       * See Scanner.isBinaryOp *)\n      | (Minus | MinusDot | LessThan)\n        when (not\n                (Scanner.is_binary_op p.scanner.src p.start_pos.pos_cnum\n                   p.end_pos.pos_cnum))\n             && p.start_pos.pos_lnum > p.prev_end_pos.pos_lnum ->\n        -1\n      | token -> Token.precedence token\n    in\n    if token_prec < prec then a\n    else (\n      Parser.leave_breadcrumb p (Grammar.ExprBinaryAfterOp token);\n      let start_pos = p.start_pos in\n      Parser.next p;\n      let end_pos = p.prev_end_pos in\n      let token_prec =\n        (* exponentiation operator is right-associative *)\n        if token = Exponentiation then token_prec else token_prec + 1\n      in\n      let b = parse_binary_expr ~context p token_prec in\n      let loc = mk_loc a.Parsetree.pexp_loc.loc_start b.pexp_loc.loc_end in\n      let expr =\n        match (token, b.pexp_desc) with\n        | BarGreater, Pexp_apply (fun_expr, args)\n          when p.uncurried_config = Uncurried ->\n          {b with pexp_desc = Pexp_apply (fun_expr, args @ [(Nolabel, a)])}\n        | BarGreater, _ when p.uncurried_config = Uncurried ->\n          Ast_helper.Exp.apply ~loc b [(Nolabel, a)]\n        | _ ->\n          Ast_helper.Exp.apply ~loc\n            (make_infix_operator p token start_pos end_pos)\n            [(Nolabel, a); (Nolabel, b)]\n      in\n      Parser.eat_breadcrumb p;\n      loop expr)\n  in\n  loop a\n\n(* If we even need this, determines if < might be the start of jsx. Not 100% complete *)\n(* and isStartOfJsx p = *)\n(* Parser.lookahead p (fun p -> *)\n(* match p.Parser.token with *)\n(* | LessThan -> *)\n(* Parser.next p; *)\n(* begin match p.token with *)\n(* | GreaterThan (* <> *) -> true *)\n(* | Lident _ | Uident _ | List -> *)\n(* ignore (parseJsxName p); *)\n(* begin match p.token with *)\n(* | GreaterThan (* <div> *) -> true *)\n(* | Question (*<Component ? *) -> true *)\n(* | Lident _ | List -> *)\n(* Parser.next p; *)\n(* begin match p.token with *)\n(* | Equal (* <Component handleClick= *) -> true *)\n(* | _ -> false (* TODO *) *)\n(* end *)\n(* | Forwardslash (* <Component / *)-> *)\n(* Parser.next p; *)\n(* begin match p.token with *)\n(* | GreaterThan (* <Component /> *) -> true *)\n(* | _ -> false *)\n(* end *)\n(* | _ -> *)\n(* false *)\n(* end *)\n(* | _ -> false *)\n(* end *)\n(* | _ -> false *)\n(* ) *)\n\nand parse_template_expr ?prefix p =\n  let part_prefix =\n    (* we could stop treating js and j prefix as something special\n       for json, we would first need to remove @as(json`true`) feature *)\n    match prefix with\n    | Some {txt = Longident.Lident ((\"js\" | \"j\" | \"json\") as prefix); _} ->\n      Some prefix\n    | Some _ -> None\n    | None -> Some \"js\"\n  in\n  let start_pos = p.Parser.start_pos in\n\n  let parse_parts p =\n    let rec aux acc =\n      let start_pos = p.Parser.start_pos in\n      Parser.next_template_literal_token p;\n      match p.token with\n      | TemplateTail (txt, last_pos) ->\n        Parser.next p;\n        let loc = mk_loc start_pos last_pos in\n        let str =\n          Ast_helper.Exp.constant ~attrs:[template_literal_attr] ~loc\n            (Pconst_string (txt, part_prefix))\n        in\n        List.rev ((str, None) :: acc)\n      | TemplatePart (txt, last_pos) ->\n        Parser.next p;\n        let loc = mk_loc start_pos last_pos in\n        let expr = parse_expr_block p in\n        let str =\n          Ast_helper.Exp.constant ~attrs:[template_literal_attr] ~loc\n            (Pconst_string (txt, part_prefix))\n        in\n        aux ((str, Some expr) :: acc)\n      | token ->\n        Parser.err p (Diagnostics.unexpected token p.breadcrumbs);\n        []\n    in\n    aux []\n  in\n  let parts = parse_parts p in\n  let strings = List.map fst parts in\n  let values = Ext_list.filter_map parts snd in\n  let end_pos = p.Parser.end_pos in\n\n  let gen_tagged_template_call lident =\n    let ident =\n      Ast_helper.Exp.ident ~attrs:[] ~loc:Location.none\n        (Location.mknoloc lident)\n    in\n    let strings_array =\n      Ast_helper.Exp.array ~attrs:[] ~loc:Location.none strings\n    in\n    let values_array =\n      Ast_helper.Exp.array ~attrs:[] ~loc:Location.none values\n    in\n    Ast_helper.Exp.apply\n      ~attrs:[tagged_template_literal_attr]\n      ~loc:(mk_loc start_pos end_pos) ident\n      [(Nolabel, strings_array); (Nolabel, values_array)]\n  in\n\n  let hidden_operator =\n    let op = Location.mknoloc (Longident.Lident \"^\") in\n    Ast_helper.Exp.ident op\n  in\n  let concat (e1 : Parsetree.expression) (e2 : Parsetree.expression) =\n    let loc = mk_loc e1.pexp_loc.loc_start e2.pexp_loc.loc_end in\n    Ast_helper.Exp.apply ~attrs:[template_literal_attr] ~loc hidden_operator\n      [(Nolabel, e1); (Nolabel, e2)]\n  in\n  let gen_interpolated_string () =\n    let subparts =\n      List.flatten\n        (List.map\n           (fun part ->\n             match part with\n             | s, Some v -> [s; v]\n             | s, None -> [s])\n           parts)\n    in\n    let expr_option =\n      List.fold_left\n        (fun acc subpart ->\n          Some\n            (match acc with\n            | Some expr -> concat expr subpart\n            | None -> subpart))\n        None subparts\n    in\n    match expr_option with\n    | Some expr -> expr\n    | None -> Ast_helper.Exp.constant (Pconst_string (\"\", None))\n  in\n\n  match prefix with\n  | Some {txt = Longident.Lident (\"js\" | \"j\" | \"json\"); _} | None ->\n    gen_interpolated_string ()\n  | Some {txt = lident} -> gen_tagged_template_call lident\n\n(* Overparse: let f = a : int => a + 1, is it (a : int) => or (a): int =>\n * Also overparse constraints:\n *  let x = {\n *    let a = 1\n *    a + pi: int\n *  }\n *\n *  We want to give a nice error message in these cases\n *)\nand over_parse_constrained_or_coerced_or_arrow_expression p expr =\n  match p.Parser.token with\n  | ColonGreaterThan -> parse_coerced_expr ~expr p\n  | Colon -> (\n    Parser.next p;\n    let typ = parse_typ_expr ~es6_arrow:false p in\n    match p.Parser.token with\n    | EqualGreater ->\n      Parser.next p;\n      let body = parse_expr p in\n      let pat =\n        match expr.pexp_desc with\n        | Pexp_ident longident ->\n          Ast_helper.Pat.var ~loc:expr.pexp_loc\n            (Location.mkloc\n               (Longident.flatten longident.txt |> String.concat \".\")\n               longident.loc)\n        (* TODO: can we convert more expressions to patterns?*)\n        | _ ->\n          Ast_helper.Pat.var ~loc:expr.pexp_loc\n            (Location.mkloc \"pattern\" expr.pexp_loc)\n      in\n      let arrow1 =\n        Ast_helper.Exp.fun_\n          ~loc:(mk_loc expr.pexp_loc.loc_start body.pexp_loc.loc_end)\n          Asttypes.Nolabel None pat\n          (Ast_helper.Exp.constraint_ body typ)\n      in\n      let arrow2 =\n        Ast_helper.Exp.fun_\n          ~loc:(mk_loc expr.pexp_loc.loc_start body.pexp_loc.loc_end)\n          Asttypes.Nolabel None\n          (Ast_helper.Pat.constraint_ pat typ)\n          body\n      in\n      let msg =\n        Doc.breakable_group ~force_break:true\n          (Doc.concat\n             [\n               Doc.text\n                 \"Did you mean to annotate the parameter type or the return \\\n                  type?\";\n               Doc.indent\n                 (Doc.concat\n                    [\n                      Doc.line;\n                      Doc.text \"1) \";\n                      ResPrinter.print_expression arrow1 CommentTable.empty;\n                      Doc.line;\n                      Doc.text \"2) \";\n                      ResPrinter.print_expression arrow2 CommentTable.empty;\n                    ]);\n             ])\n        |> Doc.to_string ~width:80\n      in\n      Parser.err ~start_pos:expr.pexp_loc.loc_start\n        ~end_pos:body.pexp_loc.loc_end p (Diagnostics.message msg);\n      arrow1\n    | _ ->\n      let loc = mk_loc expr.pexp_loc.loc_start typ.ptyp_loc.loc_end in\n      let expr = Ast_helper.Exp.constraint_ ~loc expr typ in\n      let () =\n        Parser.err ~start_pos:expr.pexp_loc.loc_start\n          ~end_pos:typ.ptyp_loc.loc_end p\n          (Diagnostics.message\n             (Doc.breakable_group ~force_break:true\n                (Doc.concat\n                   [\n                     Doc.text\n                       \"Expressions with type constraints need to be wrapped \\\n                        in parens:\";\n                     Doc.indent\n                       (Doc.concat\n                          [\n                            Doc.line;\n                            ResPrinter.add_parens\n                              (ResPrinter.print_expression expr\n                                 CommentTable.empty);\n                          ]);\n                   ])\n             |> Doc.to_string ~width:80))\n      in\n      expr)\n  | _ -> expr\n\nand parse_let_binding_body ~start_pos ~attrs p =\n  Parser.begin_region p;\n  Parser.leave_breadcrumb p Grammar.LetBinding;\n  let pat, exp =\n    Parser.leave_breadcrumb p Grammar.Pattern;\n    let pat = parse_pattern p in\n    Parser.eat_breadcrumb p;\n    match p.Parser.token with\n    | Colon -> (\n      Parser.next p;\n      match p.token with\n      | Typ ->\n        (* locally abstract types *)\n        Parser.next p;\n        let newtypes = parse_lident_list p in\n        Parser.expect Dot p;\n        let typ = parse_typ_expr p in\n        Parser.expect Equal p;\n        let expr = parse_expr p in\n        let loc = mk_loc start_pos p.prev_end_pos in\n        let exp, poly = wrap_type_annotation ~loc newtypes typ expr in\n        let pat = Ast_helper.Pat.constraint_ ~loc pat poly in\n        (pat, exp)\n      | _ ->\n        let poly_type = parse_poly_type_expr p in\n        let loc =\n          {pat.ppat_loc with loc_end = poly_type.Parsetree.ptyp_loc.loc_end}\n        in\n        let pat = Ast_helper.Pat.constraint_ ~loc pat poly_type in\n        Parser.expect Token.Equal p;\n        let exp = parse_expr p in\n        let exp = over_parse_constrained_or_coerced_or_arrow_expression p exp in\n        (pat, exp))\n    | _ ->\n      Parser.expect Token.Equal p;\n      let exp =\n        over_parse_constrained_or_coerced_or_arrow_expression p (parse_expr p)\n      in\n      (pat, exp)\n  in\n  let loc = mk_loc start_pos p.prev_end_pos in\n  let vb = Ast_helper.Vb.mk ~loc ~attrs pat exp in\n  Parser.eat_breadcrumb p;\n  Parser.end_region p;\n  vb\n\n(* TODO: find a better way? Is it possible?\n * let a = 1\n * @attr\n * and b = 2\n *\n * The problem is that without semi we need a lookahead to determine\n * if the attr is on the letbinding or the start of a new thing\n *\n * let a = 1\n * @attr\n * let b = 1\n *\n * Here @attr should attach to something \"new\": `let b = 1`\n * The parser state is forked, which is quite expensive…\n *)\nand parse_attributes_and_binding (p : Parser.t) =\n  let err = p.scanner.err in\n  let ch = p.scanner.ch in\n  let offset = p.scanner.offset in\n  let offset16 = p.scanner.offset16 in\n  let line_offset = p.scanner.line_offset in\n  let lnum = p.scanner.lnum in\n  let mode = p.scanner.mode in\n  let token = p.token in\n  let start_pos = p.start_pos in\n  let end_pos = p.end_pos in\n  let prev_end_pos = p.prev_end_pos in\n  let breadcrumbs = p.breadcrumbs in\n  let errors = p.errors in\n  let diagnostics = p.diagnostics in\n  let comments = p.comments in\n\n  match p.Parser.token with\n  | At -> (\n    let attrs = parse_attributes p in\n    match p.Parser.token with\n    | And -> attrs\n    | _ ->\n      p.scanner.err <- err;\n      p.scanner.ch <- ch;\n      p.scanner.offset <- offset;\n      p.scanner.offset16 <- offset16;\n      p.scanner.line_offset <- line_offset;\n      p.scanner.lnum <- lnum;\n      p.scanner.mode <- mode;\n      p.token <- token;\n      p.start_pos <- start_pos;\n      p.end_pos <- end_pos;\n      p.prev_end_pos <- prev_end_pos;\n      p.breadcrumbs <- breadcrumbs;\n      p.errors <- errors;\n      p.diagnostics <- diagnostics;\n      p.comments <- comments;\n      [])\n  | _ -> []\n\n(* definition\t::=\tlet [rec] let-binding  { and let-binding }   *)\nand parse_let_bindings ~attrs ~start_pos p =\n  Parser.optional p Let |> ignore;\n  let rec_flag =\n    if Parser.optional p Token.Rec then Asttypes.Recursive\n    else Asttypes.Nonrecursive\n  in\n  let first = parse_let_binding_body ~start_pos ~attrs p in\n\n  let rec loop p bindings =\n    let start_pos = p.Parser.start_pos in\n    let attrs = parse_attributes_and_binding p in\n    match p.Parser.token with\n    | And ->\n      Parser.next p;\n      ignore (Parser.optional p Let);\n      (* overparse for fault tolerance *)\n      let let_binding = parse_let_binding_body ~start_pos ~attrs p in\n      loop p (let_binding :: bindings)\n    | _ -> List.rev bindings\n  in\n  (rec_flag, loop p [first])\n\n(*\n * div -> div\n * Foo -> Foo.createElement\n * Foo.Bar -> Foo.Bar.createElement\n *)\nand parse_jsx_name p =\n  let longident =\n    match p.Parser.token with\n    | Lident ident ->\n      let ident_start = p.start_pos in\n      let ident_end = p.end_pos in\n      Parser.next p;\n      let loc = mk_loc ident_start ident_end in\n      Location.mkloc (Longident.Lident ident) loc\n    | Uident _ ->\n      let longident = parse_module_long_ident ~lowercase:true p in\n      Location.mkloc\n        (Longident.Ldot (longident.txt, \"createElement\"))\n        longident.loc\n    | _ ->\n      let msg =\n        \"A jsx name must be a lowercase or uppercase name, like: div in <div \\\n         /> or Navbar in <Navbar />\"\n      in\n      Parser.err p (Diagnostics.message msg);\n      Location.mknoloc (Longident.Lident \"_\")\n  in\n  Ast_helper.Exp.ident ~loc:longident.loc longident\n\nand parse_jsx_opening_or_self_closing_element ~start_pos p =\n  let jsx_start_pos = p.Parser.start_pos in\n  let name = parse_jsx_name p in\n  let jsx_props = parse_jsx_props p in\n  let children =\n    match p.Parser.token with\n    | Forwardslash ->\n      (* <foo a=b /> *)\n      let children_start_pos = p.Parser.start_pos in\n      Parser.next p;\n      let children_end_pos = p.Parser.start_pos in\n      Scanner.pop_mode p.scanner Jsx;\n      Parser.expect GreaterThan p;\n      let loc = mk_loc children_start_pos children_end_pos in\n      make_list_expression loc [] None (* no children *)\n    | GreaterThan -> (\n      (* <foo a=b> bar </foo> *)\n      let children_start_pos = p.Parser.start_pos in\n      Parser.next p;\n      let spread, children = parse_jsx_children p in\n      let children_end_pos = p.Parser.start_pos in\n      let () =\n        match p.token with\n        | LessThanSlash -> Parser.next p\n        | LessThan ->\n          Parser.next p;\n          Parser.expect Forwardslash p\n        | token when Grammar.is_structure_item_start token -> ()\n        | _ -> Parser.expect LessThanSlash p\n      in\n      match p.Parser.token with\n      | (Lident _ | Uident _) when verify_jsx_opening_closing_name p name -> (\n        Scanner.pop_mode p.scanner Jsx;\n        Parser.expect GreaterThan p;\n        let loc = mk_loc children_start_pos children_end_pos in\n        match (spread, children) with\n        | true, child :: _ -> child\n        | _ -> make_list_expression loc children None)\n      | token -> (\n        Scanner.pop_mode p.scanner Jsx;\n        let () =\n          if Grammar.is_structure_item_start token then\n            let closing = \"</\" ^ string_of_pexp_ident name ^ \">\" in\n            let msg = Diagnostics.message (\"Missing \" ^ closing) in\n            Parser.err ~start_pos ~end_pos:p.prev_end_pos p msg\n          else\n            let opening = \"</\" ^ string_of_pexp_ident name ^ \">\" in\n            let msg =\n              \"Closing jsx name should be the same as the opening name. Did \\\n               you mean \" ^ opening ^ \" ?\"\n            in\n            Parser.err ~start_pos ~end_pos:p.prev_end_pos p\n              (Diagnostics.message msg);\n            Parser.expect GreaterThan p\n        in\n        let loc = mk_loc children_start_pos children_end_pos in\n        match (spread, children) with\n        | true, child :: _ -> child\n        | _ -> make_list_expression loc children None))\n    | token ->\n      Scanner.pop_mode p.scanner Jsx;\n      Parser.err p (Diagnostics.unexpected token p.breadcrumbs);\n      make_list_expression Location.none [] None\n  in\n  let jsx_end_pos = p.prev_end_pos in\n  let loc = mk_loc jsx_start_pos jsx_end_pos in\n  Ast_helper.Exp.apply ~loc name\n    (List.concat\n       [\n         jsx_props;\n         [\n           (Asttypes.Labelled \"children\", children);\n           ( Asttypes.Nolabel,\n             Ast_helper.Exp.construct\n               (Location.mknoloc (Longident.Lident \"()\"))\n               None );\n         ];\n       ])\n\n(*\n *  jsx ::=\n *    | <> jsx-children </>\n *    | <element-name {jsx-prop} />\n *    | <element-name {jsx-prop}> jsx-children </element-name>\n *\n *  jsx-children ::= primary-expr*          * => 0 or more\n *)\nand parse_jsx p =\n  Scanner.set_jsx_mode p.Parser.scanner;\n  Parser.leave_breadcrumb p Grammar.Jsx;\n  let start_pos = p.Parser.start_pos in\n  Parser.expect LessThan p;\n  let jsx_expr =\n    match p.Parser.token with\n    | Lident _ | Uident _ ->\n      parse_jsx_opening_or_self_closing_element ~start_pos p\n    | GreaterThan ->\n      (* fragment: <> foo </> *)\n      parse_jsx_fragment p\n    | _ -> parse_jsx_name p\n  in\n  Parser.eat_breadcrumb p;\n  {jsx_expr with pexp_attributes = [jsx_attr]}\n\n(*\n * jsx-fragment ::=\n *  | <> </>\n *  | <> jsx-children </>\n *)\nand parse_jsx_fragment p =\n  let children_start_pos = p.Parser.start_pos in\n  Parser.expect GreaterThan p;\n  let _spread, children = parse_jsx_children p in\n  let children_end_pos = p.Parser.start_pos in\n  if p.token = LessThan then p.token <- Scanner.reconsider_less_than p.scanner;\n  Parser.expect LessThanSlash p;\n  Scanner.pop_mode p.scanner Jsx;\n  Parser.expect GreaterThan p;\n  let loc = mk_loc children_start_pos children_end_pos in\n  make_list_expression loc children None\n\n(*\n * jsx-prop ::=\n *   |  lident\n *   | ?lident\n *   |  lident =  jsx_expr\n *   |  lident = ?jsx_expr\n *   |  {...jsx_expr}\n *)\nand parse_jsx_prop p =\n  match p.Parser.token with\n  | Question | Lident _ -> (\n    let optional = Parser.optional p Question in\n    let name, loc = parse_lident p in\n    let prop_loc_attr =\n      (Location.mkloc \"res.namedArgLoc\" loc, Parsetree.PStr [])\n    in\n    (* optional punning: <foo ?a /> *)\n    if optional then\n      Some\n        ( Asttypes.Optional name,\n          Ast_helper.Exp.ident ~attrs:[prop_loc_attr] ~loc\n            (Location.mkloc (Longident.Lident name) loc) )\n    else\n      match p.Parser.token with\n      | Equal ->\n        Parser.next p;\n        (* no punning *)\n        let optional = Parser.optional p Question in\n        Scanner.pop_mode p.scanner Jsx;\n        let attr_expr =\n          let e = parse_primary_expr ~operand:(parse_atomic_expr p) p in\n          {e with pexp_attributes = prop_loc_attr :: e.pexp_attributes}\n        in\n        let label =\n          if optional then Asttypes.Optional name else Asttypes.Labelled name\n        in\n        Some (label, attr_expr)\n      | _ ->\n        let attr_expr =\n          Ast_helper.Exp.ident ~loc ~attrs:[prop_loc_attr]\n            (Location.mkloc (Longident.Lident name) loc)\n        in\n        let label =\n          if optional then Asttypes.Optional name else Asttypes.Labelled name\n        in\n        Some (label, attr_expr))\n  (* {...props} *)\n  | Lbrace -> (\n    Scanner.pop_mode p.scanner Jsx;\n    Parser.next p;\n    match p.Parser.token with\n    | DotDotDot -> (\n      Scanner.pop_mode p.scanner Jsx;\n      Parser.next p;\n      let loc = mk_loc p.Parser.start_pos p.prev_end_pos in\n      let prop_loc_attr =\n        (Location.mkloc \"res.namedArgLoc\" loc, Parsetree.PStr [])\n      in\n      let attr_expr =\n        let e = parse_primary_expr ~operand:(parse_expr p) p in\n        {e with pexp_attributes = prop_loc_attr :: e.pexp_attributes}\n      in\n      (* using label \"spreadProps\" to distinguish from others *)\n      let label = Asttypes.Labelled \"_spreadProps\" in\n      match p.Parser.token with\n      | Rbrace ->\n        Parser.next p;\n        Scanner.set_jsx_mode p.scanner;\n        Some (label, attr_expr)\n      | _ -> None)\n    | _ -> None)\n  | _ -> None\n\nand parse_jsx_props p =\n  parse_region ~grammar:Grammar.JsxAttribute ~f:parse_jsx_prop p\n\nand parse_jsx_children p =\n  Scanner.pop_mode p.scanner Jsx;\n  let rec loop p children =\n    match p.Parser.token with\n    | Token.Eof | LessThanSlash -> children\n    | LessThan ->\n      (* Imagine: <div> <Navbar /> <\n       * is `<` the start of a jsx-child? <div …\n       * or is it the start of a closing tag?  </div>\n       * reconsiderLessThan peeks at the next token and\n       * determines the correct token to disambiguate *)\n      let token = Scanner.reconsider_less_than p.scanner in\n      if token = LessThan then\n        let child =\n          parse_primary_expr ~operand:(parse_atomic_expr p) ~no_call:true p\n        in\n        loop p (child :: children)\n      else\n        (* LessThanSlash *)\n        let () = p.token <- token in\n        children\n    | token when Grammar.is_jsx_child_start token ->\n      let child =\n        parse_primary_expr ~operand:(parse_atomic_expr p) ~no_call:true p\n      in\n      loop p (child :: children)\n    | _ -> children\n  in\n  let spread, children =\n    match p.Parser.token with\n    | DotDotDot ->\n      Parser.next p;\n      (true, [parse_primary_expr ~operand:(parse_atomic_expr p) ~no_call:true p])\n    | _ ->\n      let children = List.rev (loop p []) in\n      (false, children)\n  in\n  Scanner.set_jsx_mode p.scanner;\n  (spread, children)\n\nand parse_braced_or_record_expr p =\n  let start_pos = p.Parser.start_pos in\n  Parser.expect Lbrace p;\n  match p.Parser.token with\n  | Rbrace ->\n    Parser.next p;\n    let loc = mk_loc start_pos p.prev_end_pos in\n    Ast_helper.Exp.record ~loc [] None\n  | DotDotDot ->\n    (* beginning of record spread, parse record *)\n    Parser.next p;\n    let spread_expr = parse_constrained_or_coerced_expr p in\n    Parser.expect Comma p;\n    let expr = parse_record_expr ~start_pos ~spread:(Some spread_expr) [] p in\n    Parser.expect Rbrace p;\n    expr\n  | String s -> (\n    let field =\n      let loc = mk_loc p.start_pos p.end_pos in\n      Parser.next p;\n      Location.mkloc (Longident.Lident s) loc\n    in\n    match p.Parser.token with\n    | Colon ->\n      Parser.next p;\n      let field_expr = parse_expr p in\n      Parser.optional p Comma |> ignore;\n      let expr =\n        parse_record_expr_with_string_keys ~start_pos (field, field_expr) p\n      in\n      Parser.expect Rbrace p;\n      expr\n    | _ -> (\n      let tag = if p.mode = ParseForTypeChecker then Some \"js\" else None in\n      let constant =\n        Ast_helper.Exp.constant ~loc:field.loc\n          (Parsetree.Pconst_string (s, tag))\n      in\n      let a = parse_primary_expr ~operand:constant p in\n      let e = parse_binary_expr ~a p 1 in\n      let e = parse_ternary_expr e p in\n      match p.Parser.token with\n      | Semicolon ->\n        let expr = parse_expr_block ~first:e p in\n        Parser.expect Rbrace p;\n        let loc = mk_loc start_pos p.prev_end_pos in\n        let braces = make_braces_attr loc in\n        {\n          expr with\n          Parsetree.pexp_attributes = braces :: expr.Parsetree.pexp_attributes;\n        }\n      | Rbrace ->\n        Parser.next p;\n        let loc = mk_loc start_pos p.prev_end_pos in\n        let braces = make_braces_attr loc in\n        {e with pexp_attributes = braces :: e.pexp_attributes}\n      | _ ->\n        let expr = parse_expr_block ~first:e p in\n        Parser.expect Rbrace p;\n        let loc = mk_loc start_pos p.prev_end_pos in\n        let braces = make_braces_attr loc in\n        {expr with pexp_attributes = braces :: expr.pexp_attributes}))\n  | Question ->\n    let expr = parse_record_expr ~start_pos [] p in\n    Parser.expect Rbrace p;\n    expr\n  (*\n    The branch below takes care of the \"braced\" expression {async}.\n    The big reason that we need all these branches is that {x} isn't a record with a punned field x, but a braced expression… There's lots of \"ambiguity\" between a record with a single punned field and a braced expression…\n    What is {x}?\n      1) record {x: x}\n      2) expression x which happens to wrapped in braces\n    Due to historical reasons, we always follow 2\n  *)\n  | Lident \"async\" when is_es6_arrow_expression ~in_ternary:false p ->\n    let expr = parse_async_arrow_expression p in\n    let expr = parse_expr_block ~first:expr p in\n    Parser.expect Rbrace p;\n    let loc = mk_loc start_pos p.prev_end_pos in\n    let braces = make_braces_attr loc in\n    {expr with pexp_attributes = braces :: expr.pexp_attributes}\n  | Uident _ | Lident _ -> (\n    let start_token = p.token in\n    let value_or_constructor = parse_value_or_constructor p in\n    match value_or_constructor.pexp_desc with\n    | Pexp_ident path_ident -> (\n      let ident_end_pos = p.prev_end_pos in\n      match p.Parser.token with\n      | Comma ->\n        Parser.next p;\n        let value_or_constructor =\n          match start_token with\n          | Uident _ ->\n            remove_module_name_from_punned_field_value value_or_constructor\n          | _ -> value_or_constructor\n        in\n        let expr =\n          parse_record_expr ~start_pos [(path_ident, value_or_constructor)] p\n        in\n        Parser.expect Rbrace p;\n        expr\n      | Colon -> (\n        Parser.next p;\n        let optional = parse_optional_label p in\n        let field_expr = parse_expr p in\n        let field_expr = make_expression_optional ~optional field_expr in\n        match p.token with\n        | Rbrace ->\n          Parser.next p;\n          let loc = mk_loc start_pos p.prev_end_pos in\n          Ast_helper.Exp.record ~loc [(path_ident, field_expr)] None\n        | _ ->\n          Parser.expect Comma p;\n          let expr =\n            parse_record_expr ~start_pos [(path_ident, field_expr)] p\n          in\n          Parser.expect Rbrace p;\n          expr)\n      (* error case *)\n      | Lident _ ->\n        if p.prev_end_pos.pos_lnum < p.start_pos.pos_lnum then (\n          Parser.expect Comma p;\n          let expr =\n            parse_record_expr ~start_pos [(path_ident, value_or_constructor)] p\n          in\n          Parser.expect Rbrace p;\n          expr)\n        else (\n          Parser.expect Colon p;\n          let expr =\n            parse_record_expr ~start_pos [(path_ident, value_or_constructor)] p\n          in\n          Parser.expect Rbrace p;\n          expr)\n      | Semicolon ->\n        let expr =\n          parse_expr_block ~first:(Ast_helper.Exp.ident path_ident) p\n        in\n        Parser.expect Rbrace p;\n        let loc = mk_loc start_pos p.prev_end_pos in\n        let braces = make_braces_attr loc in\n        {expr with pexp_attributes = braces :: expr.pexp_attributes}\n      | Rbrace ->\n        Parser.next p;\n        let expr = Ast_helper.Exp.ident ~loc:path_ident.loc path_ident in\n        let loc = mk_loc start_pos p.prev_end_pos in\n        let braces = make_braces_attr loc in\n        {expr with pexp_attributes = braces :: expr.pexp_attributes}\n      | EqualGreater -> (\n        let loc = mk_loc start_pos ident_end_pos in\n        let ident = Location.mkloc (Longident.last path_ident.txt) loc in\n        let a =\n          parse_es6_arrow_expression\n            ~parameters:\n              [\n                TermParameter\n                  {\n                    dotted = false;\n                    attrs = [];\n                    label = Asttypes.Nolabel;\n                    expr = None;\n                    pat = Ast_helper.Pat.var ~loc:ident.loc ident;\n                    pos = start_pos;\n                  };\n              ]\n            p\n        in\n        let e = parse_binary_expr ~a p 1 in\n        let e = parse_ternary_expr e p in\n        match p.Parser.token with\n        | Semicolon ->\n          let expr = parse_expr_block ~first:e p in\n          Parser.expect Rbrace p;\n          let loc = mk_loc start_pos p.prev_end_pos in\n          let braces = make_braces_attr loc in\n          {expr with pexp_attributes = braces :: expr.pexp_attributes}\n        | Rbrace ->\n          Parser.next p;\n          let loc = mk_loc start_pos p.prev_end_pos in\n          let braces = make_braces_attr loc in\n          {e with pexp_attributes = braces :: e.pexp_attributes}\n        | _ ->\n          let expr = parse_expr_block ~first:e p in\n          Parser.expect Rbrace p;\n          let loc = mk_loc start_pos p.prev_end_pos in\n          let braces = make_braces_attr loc in\n          {expr with pexp_attributes = braces :: expr.pexp_attributes})\n      | _ -> (\n        Parser.leave_breadcrumb p Grammar.ExprBlock;\n        let a =\n          parse_primary_expr\n            ~operand:(Ast_helper.Exp.ident ~loc:path_ident.loc path_ident)\n            p\n        in\n        let e = parse_binary_expr ~a p 1 in\n        let e = parse_ternary_expr e p in\n        Parser.eat_breadcrumb p;\n        match p.Parser.token with\n        | Semicolon ->\n          let expr = parse_expr_block ~first:e p in\n          Parser.expect Rbrace p;\n          let loc = mk_loc start_pos p.prev_end_pos in\n          let braces = make_braces_attr loc in\n          {expr with pexp_attributes = braces :: expr.pexp_attributes}\n        | Rbrace ->\n          Parser.next p;\n          let loc = mk_loc start_pos p.prev_end_pos in\n          let braces = make_braces_attr loc in\n          {e with pexp_attributes = braces :: e.pexp_attributes}\n        | _ ->\n          let expr = parse_expr_block ~first:e p in\n          Parser.expect Rbrace p;\n          let loc = mk_loc start_pos p.prev_end_pos in\n          let braces = make_braces_attr loc in\n          {expr with pexp_attributes = braces :: expr.pexp_attributes}))\n    | _ -> (\n      Parser.leave_breadcrumb p Grammar.ExprBlock;\n      let a = parse_primary_expr ~operand:value_or_constructor p in\n      let e = parse_binary_expr ~a p 1 in\n      let e = parse_ternary_expr e p in\n      Parser.eat_breadcrumb p;\n      match p.Parser.token with\n      | Semicolon ->\n        let expr = parse_expr_block ~first:e p in\n        Parser.expect Rbrace p;\n        let loc = mk_loc start_pos p.prev_end_pos in\n        let braces = make_braces_attr loc in\n        {expr with pexp_attributes = braces :: expr.pexp_attributes}\n      | Rbrace ->\n        Parser.next p;\n        let loc = mk_loc start_pos p.prev_end_pos in\n        let braces = make_braces_attr loc in\n        {e with pexp_attributes = braces :: e.pexp_attributes}\n      | _ ->\n        let expr = parse_expr_block ~first:e p in\n        Parser.expect Rbrace p;\n        let loc = mk_loc start_pos p.prev_end_pos in\n        let braces = make_braces_attr loc in\n        {expr with pexp_attributes = braces :: expr.pexp_attributes}))\n  | _ ->\n    let expr = parse_expr_block p in\n    Parser.expect Rbrace p;\n    let loc = mk_loc start_pos p.prev_end_pos in\n    let braces = make_braces_attr loc in\n    {expr with pexp_attributes = braces :: expr.pexp_attributes}\n\nand parse_record_expr_row_with_string_key p =\n  match p.Parser.token with\n  | String s -> (\n    let loc = mk_loc p.start_pos p.end_pos in\n    Parser.next p;\n    let field = Location.mkloc (Longident.Lident s) loc in\n    match p.Parser.token with\n    | Colon ->\n      Parser.next p;\n      let field_expr = parse_expr p in\n      Some (field, field_expr)\n    | _ -> Some (field, Ast_helper.Exp.ident ~loc:field.loc field))\n  | _ -> None\n\nand parse_record_expr_row p =\n  let attrs = parse_attributes p in\n  let () =\n    match p.Parser.token with\n    | Token.DotDotDot ->\n      Parser.err p (Diagnostics.message ErrorMessages.record_expr_spread);\n      Parser.next p\n    | _ -> ()\n  in\n  match p.Parser.token with\n  | Lident _ | Uident _ -> (\n    let start_token = p.token in\n    let field = parse_value_path p in\n    match p.Parser.token with\n    | Colon ->\n      Parser.next p;\n      let optional = parse_optional_label p in\n      let field_expr = parse_expr p in\n      let field_expr = make_expression_optional ~optional field_expr in\n      Some (field, field_expr)\n    | _ ->\n      let value = Ast_helper.Exp.ident ~loc:field.loc ~attrs field in\n      let value =\n        match start_token with\n        | Uident _ -> remove_module_name_from_punned_field_value value\n        | _ -> value\n      in\n      Some (field, value))\n  | Question -> (\n    Parser.next p;\n    match p.Parser.token with\n    | Lident _ | Uident _ ->\n      let start_token = p.token in\n      let field = parse_value_path p in\n      let value = Ast_helper.Exp.ident ~loc:field.loc ~attrs field in\n      let value =\n        match start_token with\n        | Uident _ -> remove_module_name_from_punned_field_value value\n        | _ -> value\n      in\n      Some (field, make_expression_optional ~optional:true value)\n    | _ -> None)\n  | _ -> None\n\nand parse_record_expr_with_string_keys ~start_pos first_row p =\n  let rows =\n    first_row\n    :: parse_comma_delimited_region ~grammar:Grammar.RecordRowsStringKey\n         ~closing:Rbrace ~f:parse_record_expr_row_with_string_key p\n  in\n  let loc = mk_loc start_pos p.end_pos in\n  let record_str_expr =\n    Ast_helper.Str.eval ~loc (Ast_helper.Exp.record ~loc rows None)\n  in\n  Ast_helper.Exp.extension ~loc\n    (Location.mkloc \"obj\" loc, Parsetree.PStr [record_str_expr])\n\nand parse_record_expr ~start_pos ?(spread = None) rows p =\n  let exprs =\n    parse_comma_delimited_region ~grammar:Grammar.RecordRows ~closing:Rbrace\n      ~f:parse_record_expr_row p\n  in\n  let rows = List.concat [rows; exprs] in\n  let () =\n    match rows with\n    | [] ->\n      let msg = \"Record spread needs at least one field that's updated\" in\n      Parser.err p (Diagnostics.message msg)\n    | _rows -> ()\n  in\n  let loc = mk_loc start_pos p.end_pos in\n  Ast_helper.Exp.record ~loc rows spread\n\nand parse_newline_or_semicolon_expr_block p =\n  match p.Parser.token with\n  | Semicolon -> Parser.next p\n  | token when Grammar.is_block_expr_start token ->\n    if p.prev_end_pos.pos_lnum < p.start_pos.pos_lnum then ()\n    else\n      Parser.err ~start_pos:p.prev_end_pos ~end_pos:p.end_pos p\n        (Diagnostics.message\n           \"consecutive expressions on a line must be separated by ';' or a \\\n            newline\")\n  | _ -> ()\n\nand parse_expr_block_item p =\n  let start_pos = p.Parser.start_pos in\n  let attrs = parse_attributes p in\n  match p.Parser.token with\n  | Module -> (\n    Parser.next p;\n    match p.token with\n    | Lparen ->\n      let expr = parse_first_class_module_expr ~start_pos p in\n      let a = parse_primary_expr ~operand:expr p in\n      let expr = parse_binary_expr ~a p 1 in\n      parse_ternary_expr expr p\n    | _ ->\n      let name =\n        match p.Parser.token with\n        | Uident ident ->\n          let loc = mk_loc p.start_pos p.end_pos in\n          Parser.next p;\n          Location.mkloc ident loc\n        | t ->\n          Parser.err p (Diagnostics.uident t);\n          Location.mknoloc \"_\"\n      in\n      let body = parse_module_binding_body p in\n      parse_newline_or_semicolon_expr_block p;\n      let expr = parse_expr_block p in\n      let loc = mk_loc start_pos p.prev_end_pos in\n      Ast_helper.Exp.letmodule ~loc name body expr)\n  | Exception ->\n    let extension_constructor = parse_exception_def ~attrs p in\n    parse_newline_or_semicolon_expr_block p;\n    let block_expr = parse_expr_block p in\n    let loc = mk_loc start_pos p.prev_end_pos in\n    Ast_helper.Exp.letexception ~loc extension_constructor block_expr\n  | Open ->\n    let od = parse_open_description ~attrs p in\n    parse_newline_or_semicolon_expr_block p;\n    let block_expr = parse_expr_block p in\n    let loc = mk_loc start_pos p.prev_end_pos in\n    Ast_helper.Exp.open_ ~loc od.popen_override od.popen_lid block_expr\n  | Let ->\n    let rec_flag, let_bindings = parse_let_bindings ~attrs ~start_pos p in\n    parse_newline_or_semicolon_expr_block p;\n    let next =\n      if Grammar.is_block_expr_start p.Parser.token then parse_expr_block p\n      else\n        let loc = mk_loc p.start_pos p.end_pos in\n        Ast_helper.Exp.construct ~loc\n          (Location.mkloc (Longident.Lident \"()\") loc)\n          None\n    in\n    let loc = mk_loc start_pos p.prev_end_pos in\n    Ast_helper.Exp.let_ ~loc rec_flag let_bindings next\n  | _ ->\n    let e1 =\n      let expr = parse_expr p in\n      {expr with pexp_attributes = List.concat [attrs; expr.pexp_attributes]}\n    in\n    parse_newline_or_semicolon_expr_block p;\n    if Grammar.is_block_expr_start p.Parser.token then\n      let e2 = parse_expr_block p in\n      let loc = {e1.pexp_loc with loc_end = e2.pexp_loc.loc_end} in\n      Ast_helper.Exp.sequence ~loc e1 e2\n    else e1\n\n(* blockExpr ::= expr\n *            |  expr          ;\n *            |  expr          ; blockExpr\n *            |  module    ... ; blockExpr\n *            |  open      ... ; blockExpr\n *            |  exception ... ; blockExpr\n *            |  let       ...\n *            |  let       ... ;\n *            |  let       ... ; blockExpr\n *\n *  note: semi should be made optional\n *  a block of expression is always\n *)\nand parse_expr_block ?first p =\n  Parser.leave_breadcrumb p Grammar.ExprBlock;\n  let item =\n    match first with\n    | Some e -> e\n    | None -> parse_expr_block_item p\n  in\n  parse_newline_or_semicolon_expr_block p;\n  let block_expr =\n    if Grammar.is_block_expr_start p.Parser.token then\n      let next = parse_expr_block_item p in\n      let loc = {item.pexp_loc with loc_end = next.pexp_loc.loc_end} in\n      Ast_helper.Exp.sequence ~loc item next\n    else item\n  in\n  Parser.eat_breadcrumb p;\n  over_parse_constrained_or_coerced_or_arrow_expression p block_expr\n\nand parse_async_arrow_expression ?(arrow_attrs = []) p =\n  let start_pos = p.Parser.start_pos in\n  Parser.expect (Lident \"async\") p;\n  let async_attr = make_async_attr (mk_loc start_pos p.prev_end_pos) in\n  parse_es6_arrow_expression\n    ~arrow_attrs:(async_attr :: arrow_attrs)\n    ~arrow_start_pos:(Some start_pos) p\n\nand parse_await_expression p =\n  let await_loc = mk_loc p.Parser.start_pos p.end_pos in\n  let await_attr = make_await_attr await_loc in\n  Parser.expect Await p;\n  let token_prec = Token.precedence MinusGreater in\n  let expr = parse_binary_expr ~context:OrdinaryExpr p token_prec in\n  {\n    expr with\n    pexp_attributes = await_attr :: expr.pexp_attributes;\n    pexp_loc = {expr.pexp_loc with loc_start = await_loc.loc_start};\n  }\n\nand parse_try_expression p =\n  let start_pos = p.Parser.start_pos in\n  Parser.expect Try p;\n  let expr = parse_expr ~context:WhenExpr p in\n  Parser.expect Res_token.catch p;\n  Parser.expect Lbrace p;\n  let cases = parse_pattern_matching p in\n  Parser.expect Rbrace p;\n  let loc = mk_loc start_pos p.prev_end_pos in\n  Ast_helper.Exp.try_ ~loc expr cases\n\nand parse_if_condition p =\n  Parser.leave_breadcrumb p Grammar.IfCondition;\n  (* doesn't make sense to try es6 arrow here? *)\n  let condition_expr = parse_expr ~context:WhenExpr p in\n  Parser.eat_breadcrumb p;\n  condition_expr\n\nand parse_then_branch p =\n  Parser.leave_breadcrumb p IfBranch;\n  Parser.expect Lbrace p;\n  let then_expr = parse_expr_block p in\n  Parser.expect Rbrace p;\n  Parser.eat_breadcrumb p;\n  then_expr\n\nand parse_else_branch p =\n  Parser.expect Lbrace p;\n  let block_expr = parse_expr_block p in\n  Parser.expect Rbrace p;\n  block_expr\n\nand parse_if_expr start_pos p =\n  let condition_expr = parse_if_condition p in\n  let then_expr = parse_then_branch p in\n  let else_expr =\n    match p.Parser.token with\n    | Else ->\n      Parser.end_region p;\n      Parser.leave_breadcrumb p Grammar.ElseBranch;\n      Parser.next p;\n      Parser.begin_region p;\n      let else_expr =\n        match p.token with\n        | If -> parse_if_or_if_let_expression p\n        | _ -> parse_else_branch p\n      in\n      Parser.eat_breadcrumb p;\n      Parser.end_region p;\n      Some else_expr\n    | _ ->\n      Parser.end_region p;\n      None\n  in\n  let loc = mk_loc start_pos p.prev_end_pos in\n  Ast_helper.Exp.ifthenelse ~loc condition_expr then_expr else_expr\n\nand parse_if_let_expr start_pos p =\n  let pattern = parse_pattern p in\n  Parser.expect Equal p;\n  let condition_expr = parse_if_condition p in\n  let then_expr = parse_then_branch p in\n  let else_expr =\n    match p.Parser.token with\n    | Else ->\n      Parser.end_region p;\n      Parser.leave_breadcrumb p Grammar.ElseBranch;\n      Parser.next p;\n      Parser.begin_region p;\n      let else_expr =\n        match p.token with\n        | If -> parse_if_or_if_let_expression p\n        | _ -> parse_else_branch p\n      in\n      Parser.eat_breadcrumb p;\n      Parser.end_region p;\n      else_expr\n    | _ ->\n      Parser.end_region p;\n      let start_pos = p.Parser.start_pos in\n      let loc = mk_loc start_pos p.prev_end_pos in\n      Ast_helper.Exp.construct ~loc\n        (Location.mkloc (Longident.Lident \"()\") loc)\n        None\n  in\n  let loc = mk_loc start_pos p.prev_end_pos in\n  Ast_helper.Exp.match_\n    ~attrs:[if_let_attr; suppress_fragile_match_warning_attr]\n    ~loc condition_expr\n    [\n      Ast_helper.Exp.case pattern then_expr;\n      Ast_helper.Exp.case (Ast_helper.Pat.any ()) else_expr;\n    ]\n\nand parse_if_or_if_let_expression p =\n  Parser.begin_region p;\n  Parser.leave_breadcrumb p Grammar.ExprIf;\n  let start_pos = p.Parser.start_pos in\n  Parser.expect If p;\n  let expr =\n    match p.Parser.token with\n    | Let ->\n      Parser.next p;\n      let if_let_expr = parse_if_let_expr start_pos p in\n      Parser.err ~start_pos:if_let_expr.pexp_loc.loc_start\n        ~end_pos:if_let_expr.pexp_loc.loc_end p\n        (Diagnostics.message (ErrorMessages.experimental_if_let if_let_expr));\n      if_let_expr\n    | _ -> parse_if_expr start_pos p\n  in\n  Parser.eat_breadcrumb p;\n  expr\n\nand parse_for_rest has_opening_paren pattern start_pos p =\n  Parser.expect In p;\n  let e1 = parse_expr p in\n  let direction =\n    match p.Parser.token with\n    | Lident \"to\" -> Asttypes.Upto\n    | Lident \"downto\" -> Asttypes.Downto\n    | token ->\n      Parser.err p (Diagnostics.unexpected token p.breadcrumbs);\n      Asttypes.Upto\n  in\n  if p.Parser.token = Eof then\n    Parser.err ~start_pos:p.start_pos p\n      (Diagnostics.unexpected p.Parser.token p.breadcrumbs)\n  else Parser.next p;\n  let e2 = parse_expr ~context:WhenExpr p in\n  if has_opening_paren then Parser.expect Rparen p;\n  Parser.expect Lbrace p;\n  let body_expr = parse_expr_block p in\n  Parser.expect Rbrace p;\n  let loc = mk_loc start_pos p.prev_end_pos in\n  Ast_helper.Exp.for_ ~loc pattern e1 e2 direction body_expr\n\nand parse_for_expression p =\n  let start_pos = p.Parser.start_pos in\n  Parser.leave_breadcrumb p Grammar.ExprFor;\n  Parser.expect For p;\n  Parser.begin_region p;\n  let for_expr =\n    match p.token with\n    | Lparen -> (\n      let lparen = p.start_pos in\n      Parser.next p;\n      match p.token with\n      | Rparen ->\n        Parser.next p;\n        let unit_pattern =\n          let loc = mk_loc lparen p.prev_end_pos in\n          let lid = Location.mkloc (Longident.Lident \"()\") loc in\n          Ast_helper.Pat.construct lid None\n        in\n        parse_for_rest false\n          (parse_alias_pattern ~attrs:[] unit_pattern p)\n          start_pos p\n      | _ -> (\n        Parser.leave_breadcrumb p Grammar.Pattern;\n        let pat = parse_pattern p in\n        Parser.eat_breadcrumb p;\n        match p.token with\n        | Comma ->\n          Parser.next p;\n          let tuple_pattern =\n            parse_tuple_pattern ~attrs:[] ~start_pos:lparen ~first:pat p\n          in\n          let pattern = parse_alias_pattern ~attrs:[] tuple_pattern p in\n          parse_for_rest false pattern start_pos p\n        | _ -> parse_for_rest true pat start_pos p))\n    | _ ->\n      Parser.leave_breadcrumb p Grammar.Pattern;\n      let pat = parse_pattern p in\n      Parser.eat_breadcrumb p;\n      parse_for_rest false pat start_pos p\n  in\n  Parser.eat_breadcrumb p;\n  Parser.end_region p;\n  for_expr\n\nand parse_while_expression p =\n  let start_pos = p.Parser.start_pos in\n  Parser.expect While p;\n  let expr1 = parse_expr ~context:WhenExpr p in\n  Parser.expect Lbrace p;\n  let expr2 = parse_expr_block p in\n  Parser.expect Rbrace p;\n  let loc = mk_loc start_pos p.prev_end_pos in\n  Ast_helper.Exp.while_ ~loc expr1 expr2\n\nand parse_pattern_guard p =\n  match p.Parser.token with\n  | When | If ->\n    Parser.next p;\n    Some (parse_expr ~context:WhenExpr p)\n  | _ -> None\n\nand parse_pattern_match_case p =\n  Parser.begin_region p;\n  Parser.leave_breadcrumb p Grammar.PatternMatchCase;\n  match p.Parser.token with\n  | Token.Bar ->\n    Parser.next p;\n    Parser.leave_breadcrumb p Grammar.Pattern;\n    let lhs = parse_pattern p in\n    Parser.eat_breadcrumb p;\n    let guard = parse_pattern_guard p in\n    let () =\n      match p.token with\n      | EqualGreater -> Parser.next p\n      | _ -> Recover.recover_equal_greater p\n    in\n    let rhs = parse_expr_block p in\n    Parser.end_region p;\n    Parser.eat_breadcrumb p;\n    Some (Ast_helper.Exp.case lhs ?guard rhs)\n  | _ ->\n    Parser.end_region p;\n    Parser.eat_breadcrumb p;\n    None\n\nand parse_pattern_matching p =\n  let cases =\n    parse_delimited_region ~grammar:Grammar.PatternMatching ~closing:Rbrace\n      ~f:parse_pattern_match_case p\n  in\n  let () =\n    match cases with\n    | [] ->\n      Parser.err ~start_pos:p.prev_end_pos p\n        (Diagnostics.message \"Pattern matching needs at least one case\")\n    | _ -> ()\n  in\n  cases\n\nand parse_switch_expression p =\n  let start_pos = p.Parser.start_pos in\n  Parser.expect Switch p;\n  let switch_expr = parse_expr ~context:WhenExpr p in\n  Parser.expect Lbrace p;\n  let cases = parse_pattern_matching p in\n  Parser.expect Rbrace p;\n  let loc = mk_loc start_pos p.prev_end_pos in\n  Ast_helper.Exp.match_ ~loc switch_expr cases\n\n(*\n * argument ::=\n *   | _                            (* syntax sugar *)\n *   | expr\n *   | expr : type\n *   | ~ label-name\n *   | ~ label-name\n *   | ~ label-name ?\n *   | ~ label-name =   expr\n *   | ~ label-name =   _           (* syntax sugar *)\n *   | ~ label-name =   expr : type\n *   | ~ label-name = ? expr\n *   | ~ label-name = ? _           (* syntax sugar *)\n *   | ~ label-name = ? expr : type\n *\n *  dotted_argument ::=\n *   | . argument\n *)\nand parse_argument p : argument option =\n  if\n    p.Parser.token = Token.Tilde\n    || p.token = Dot || p.token = Underscore\n    || Grammar.is_expr_start p.token\n  then\n    match p.Parser.token with\n    | Dot -> (\n      let dotted = true in\n      Parser.next p;\n      match p.token with\n      (* apply(.) *)\n      | Rparen ->\n        let unit_expr =\n          Ast_helper.Exp.construct\n            (Location.mknoloc (Longident.Lident \"()\"))\n            None\n        in\n        Some {dotted; label = Asttypes.Nolabel; expr = unit_expr}\n      | _ -> parse_argument2 p ~dotted)\n    | _ -> parse_argument2 p ~dotted:false\n  else None\n\nand parse_argument2 p ~dotted : argument option =\n  match p.Parser.token with\n  (* foo(_), do not confuse with foo(_ => x), TODO: performance *)\n  | Underscore when not (is_es6_arrow_expression ~in_ternary:false p) ->\n    let loc = mk_loc p.start_pos p.end_pos in\n    Parser.next p;\n    let expr =\n      Ast_helper.Exp.ident ~loc (Location.mkloc (Longident.Lident \"_\") loc)\n    in\n    Some {dotted; label = Nolabel; expr}\n  | Tilde -> (\n    Parser.next p;\n    (* TODO: nesting of pattern matches not intuitive for error recovery *)\n    match p.Parser.token with\n    | Lident ident -> (\n      let start_pos = p.start_pos in\n      Parser.next p;\n      let end_pos = p.prev_end_pos in\n      let loc = mk_loc start_pos end_pos in\n      let prop_loc_attr =\n        (Location.mkloc \"res.namedArgLoc\" loc, Parsetree.PStr [])\n      in\n      let ident_expr =\n        Ast_helper.Exp.ident ~attrs:[prop_loc_attr] ~loc\n          (Location.mkloc (Longident.Lident ident) loc)\n      in\n      match p.Parser.token with\n      | Question ->\n        Parser.next p;\n        Some {dotted; label = Optional ident; expr = ident_expr}\n      | Equal ->\n        Parser.next p;\n        let label =\n          match p.Parser.token with\n          | Question ->\n            Parser.next p;\n            Asttypes.Optional ident\n          | _ -> Labelled ident\n        in\n        let expr =\n          match p.Parser.token with\n          | Underscore when not (is_es6_arrow_expression ~in_ternary:false p) ->\n            let loc = mk_loc p.start_pos p.end_pos in\n            Parser.next p;\n            Ast_helper.Exp.ident ~loc\n              (Location.mkloc (Longident.Lident \"_\") loc)\n          | _ ->\n            let expr = parse_constrained_or_coerced_expr p in\n            {expr with pexp_attributes = prop_loc_attr :: expr.pexp_attributes}\n        in\n        Some {dotted; label; expr}\n      | Colon ->\n        Parser.next p;\n        let typ = parse_typ_expr p in\n        let loc = mk_loc start_pos p.prev_end_pos in\n        let expr =\n          Ast_helper.Exp.constraint_ ~attrs:[prop_loc_attr] ~loc ident_expr typ\n        in\n        Some {dotted; label = Labelled ident; expr}\n      | _ -> Some {dotted; label = Labelled ident; expr = ident_expr})\n    | t ->\n      Parser.err p (Diagnostics.lident t);\n      Some {dotted; label = Nolabel; expr = Recover.default_expr ()})\n  | _ ->\n    Some {dotted; label = Nolabel; expr = parse_constrained_or_coerced_expr p}\n\nand parse_call_expr p fun_expr =\n  Parser.expect Lparen p;\n  let start_pos = p.Parser.start_pos in\n  Parser.leave_breadcrumb p Grammar.ExprCall;\n  let args =\n    parse_comma_delimited_region ~grammar:Grammar.ArgumentList ~closing:Rparen\n      ~f:parse_argument p\n  in\n  let res_partial_attr =\n    let loc = mk_loc start_pos p.prev_end_pos in\n    (Location.mkloc \"res.partial\" loc, Parsetree.PStr [])\n  in\n  let is_partial =\n    match p.token with\n    | DotDotDot when args <> [] ->\n      Parser.next p;\n      true\n    | _ -> false\n  in\n  Parser.expect Rparen p;\n  let args =\n    match args with\n    | [] ->\n      let loc = mk_loc start_pos p.prev_end_pos in\n      (* No args -> unit sugar: `foo()` *)\n      [\n        {\n          dotted = false;\n          label = Nolabel;\n          expr =\n            Ast_helper.Exp.construct ~loc\n              (Location.mkloc (Longident.Lident \"()\") loc)\n              None;\n        };\n      ]\n    | [\n     {\n       dotted = true;\n       label = Nolabel;\n       expr =\n         {\n           pexp_desc = Pexp_construct ({txt = Longident.Lident \"()\"}, None);\n           pexp_loc = loc;\n           pexp_attributes = [];\n         } as expr;\n     };\n    ]\n      when (not loc.loc_ghost) && p.mode = ParseForTypeChecker && not is_partial\n      ->\n      (*  Since there is no syntax space for arity zero vs arity one,\n       *  we expand\n       *    `fn(. ())` into\n       *    `fn(. {let __res_unit = (); __res_unit})`\n       *  when the parsetree is intended for type checking\n       *\n       *  Note:\n       *    `fn(.)` is treated as zero arity application.\n       *  The invisible unit expression here has loc_ghost === true\n       *\n       *  Related: https://github.com/rescript-lang/syntax/issues/138\n       *)\n      [\n        {\n          dotted = true;\n          label = Nolabel;\n          expr =\n            Ast_helper.Exp.let_ Asttypes.Nonrecursive\n              [\n                Ast_helper.Vb.mk\n                  (Ast_helper.Pat.var (Location.mknoloc \"__res_unit\"))\n                  expr;\n              ]\n              (Ast_helper.Exp.ident\n                 (Location.mknoloc (Longident.Lident \"__res_unit\")));\n        };\n      ]\n    | args -> args\n  in\n  let loc = {fun_expr.pexp_loc with loc_end = p.prev_end_pos} in\n  let args =\n    match args with\n    | {dotted = d; label = lbl; expr} :: args ->\n      let group (grp, acc) {dotted; label = lbl; expr} =\n        let _d, grp = grp in\n        if dotted == true then ((true, [(lbl, expr)]), (_d, List.rev grp) :: acc)\n        else ((_d, (lbl, expr) :: grp), acc)\n      in\n      let (_d, grp), acc = List.fold_left group ((d, [(lbl, expr)]), []) args in\n      List.rev ((_d, List.rev grp) :: acc)\n    | [] -> []\n  in\n  let apply =\n    Ext_list.fold_left args fun_expr (fun call_body group ->\n        let dotted, args = group in\n        let args, wrap = process_underscore_application p args in\n        let exp =\n          let uncurried =\n            p.uncurried_config |> Res_uncurried.from_dotted ~dotted\n          in\n          let attrs = if uncurried then [uncurried_app_attr] else [] in\n          let attrs = if is_partial then res_partial_attr :: attrs else attrs in\n          Ast_helper.Exp.apply ~loc ~attrs call_body args\n        in\n        wrap exp)\n  in\n\n  Parser.eat_breadcrumb p;\n  apply\n\nand parse_value_or_constructor p =\n  let start_pos = p.Parser.start_pos in\n  let rec aux p acc =\n    match p.Parser.token with\n    | Uident ident -> (\n      let end_pos_lident = p.end_pos in\n      Parser.next p;\n      match p.Parser.token with\n      | Dot ->\n        Parser.next p;\n        aux p (ident :: acc)\n      | Lparen when p.prev_end_pos.pos_lnum == p.start_pos.pos_lnum ->\n        let lparen = p.start_pos in\n        let args = parse_constructor_args p in\n        let rparen = p.prev_end_pos in\n        let lident = build_longident (ident :: acc) in\n        let tail =\n          match args with\n          | [] -> None\n          | [({Parsetree.pexp_desc = Pexp_tuple _} as arg)] as args ->\n            let loc = mk_loc lparen rparen in\n            if p.mode = ParseForTypeChecker then\n              (* Some(1, 2) for type-checker *)\n              Some arg\n            else\n              (* Some((1, 2)) for printer *)\n              Some (Ast_helper.Exp.tuple ~loc args)\n          | [arg] -> Some arg\n          | args ->\n            let loc = mk_loc lparen rparen in\n            Some (Ast_helper.Exp.tuple ~loc args)\n        in\n        let loc = mk_loc start_pos p.prev_end_pos in\n        let ident_loc = mk_loc start_pos end_pos_lident in\n        Ast_helper.Exp.construct ~loc (Location.mkloc lident ident_loc) tail\n      | _ ->\n        let loc = mk_loc start_pos p.prev_end_pos in\n        let lident = build_longident (ident :: acc) in\n        Ast_helper.Exp.construct ~loc (Location.mkloc lident loc) None)\n    | Lident ident ->\n      Parser.next p;\n      let loc = mk_loc start_pos p.prev_end_pos in\n      let lident = build_longident (ident :: acc) in\n      Ast_helper.Exp.ident ~loc (Location.mkloc lident loc)\n    | token ->\n      if acc = [] then (\n        Parser.next_unsafe p;\n        Parser.err p (Diagnostics.unexpected token p.breadcrumbs);\n        Recover.default_expr ())\n      else\n        let loc = mk_loc start_pos p.prev_end_pos in\n        Parser.err p (Diagnostics.unexpected token p.breadcrumbs);\n        let lident = build_longident (\"_\" :: acc) in\n        Ast_helper.Exp.ident ~loc (Location.mkloc lident loc)\n  in\n  aux p []\n\nand parse_poly_variant_expr p =\n  let start_pos = p.start_pos in\n  let ident, _loc = parse_hash_ident ~start_pos p in\n  match p.Parser.token with\n  | Lparen when p.prev_end_pos.pos_lnum == p.start_pos.pos_lnum ->\n    let lparen = p.start_pos in\n    let args = parse_constructor_args p in\n    let rparen = p.prev_end_pos in\n    let loc_paren = mk_loc lparen rparen in\n    let tail =\n      match args with\n      | [] -> None\n      | [({Parsetree.pexp_desc = Pexp_tuple _} as expr)] as args ->\n        if p.mode = ParseForTypeChecker then\n          (* #a(1, 2) for type-checker *)\n          Some expr\n        else\n          (* #a((1, 2)) for type-checker *)\n          Some (Ast_helper.Exp.tuple ~loc:loc_paren args)\n      | [arg] -> Some arg\n      | args ->\n        (* #a((1, 2)) for printer *)\n        Some (Ast_helper.Exp.tuple ~loc:loc_paren args)\n    in\n    let loc = mk_loc start_pos p.prev_end_pos in\n    Ast_helper.Exp.variant ~loc ident tail\n  | _ ->\n    let loc = mk_loc start_pos p.prev_end_pos in\n    Ast_helper.Exp.variant ~loc ident None\n\nand parse_constructor_args p =\n  let lparen = p.Parser.start_pos in\n  Parser.expect Lparen p;\n  let args =\n    parse_comma_delimited_region ~grammar:Grammar.ExprList\n      ~f:parse_constrained_expr_region ~closing:Rparen p\n  in\n  Parser.expect Rparen p;\n  match args with\n  | [] ->\n    let loc = mk_loc lparen p.prev_end_pos in\n    [\n      Ast_helper.Exp.construct ~loc\n        (Location.mkloc (Longident.Lident \"()\") loc)\n        None;\n    ]\n  | args -> args\n\nand parse_tuple_expr ~first ~start_pos p =\n  let exprs =\n    first\n    :: parse_comma_delimited_region p ~grammar:Grammar.ExprList ~closing:Rparen\n         ~f:parse_constrained_expr_region\n  in\n  Parser.expect Rparen p;\n  let () =\n    match exprs with\n    | [_] ->\n      Parser.err ~start_pos ~end_pos:p.prev_end_pos p\n        (Diagnostics.message ErrorMessages.tuple_single_element)\n    | _ -> ()\n  in\n  let loc = mk_loc start_pos p.prev_end_pos in\n  Ast_helper.Exp.tuple ~loc exprs\n\nand parse_spread_expr_region_with_loc p =\n  let start_pos = p.Parser.prev_end_pos in\n  match p.Parser.token with\n  | DotDotDot ->\n    Parser.next p;\n    let expr = parse_constrained_or_coerced_expr p in\n    Some (true, expr, start_pos, p.prev_end_pos)\n  | token when Grammar.is_expr_start token ->\n    Some (false, parse_constrained_or_coerced_expr p, start_pos, p.prev_end_pos)\n  | _ -> None\n\nand parse_list_expr ~start_pos p =\n  let split_by_spread exprs =\n    List.fold_left\n      (fun acc curr ->\n        match (curr, acc) with\n        | (true, expr, start_pos, end_pos), _ ->\n          (* find a spread expression, prepend a new sublist *)\n          ([], Some expr, start_pos, end_pos) :: acc\n        | ( (false, expr, start_pos, _endPos),\n            (no_spreads, spread, _accStartPos, acc_end_pos) :: acc ) ->\n          (* find a non-spread expression, and the accumulated is not empty,\n           * prepend to the first sublist, and update the loc of the first sublist *)\n          (expr :: no_spreads, spread, start_pos, acc_end_pos) :: acc\n        | (false, expr, start_pos, end_pos), [] ->\n          (* find a non-spread expression, and the accumulated is empty *)\n          [([expr], None, start_pos, end_pos)])\n      [] exprs\n  in\n  let make_sub_expr = function\n    | exprs, Some spread, start_pos, end_pos ->\n      make_list_expression (mk_loc start_pos end_pos) exprs (Some spread)\n    | exprs, None, start_pos, end_pos ->\n      make_list_expression (mk_loc start_pos end_pos) exprs None\n  in\n  let list_exprs_rev =\n    parse_comma_delimited_reversed_list p ~grammar:Grammar.ListExpr\n      ~closing:Rbrace ~f:parse_spread_expr_region_with_loc\n  in\n  Parser.expect Rbrace p;\n  let loc = mk_loc start_pos p.prev_end_pos in\n  match split_by_spread list_exprs_rev with\n  | [] -> make_list_expression loc [] None\n  | [(exprs, Some spread, _, _)] -> make_list_expression loc exprs (Some spread)\n  | [(exprs, None, _, _)] -> make_list_expression loc exprs None\n  | exprs ->\n    let list_exprs = List.map make_sub_expr exprs in\n    Ast_helper.Exp.apply ~loc\n      (Ast_helper.Exp.ident ~loc ~attrs:[spread_attr]\n         (Location.mkloc\n            (Longident.Ldot\n               (Longident.Ldot (Longident.Lident \"Belt\", \"List\"), \"concatMany\"))\n            loc))\n      [(Asttypes.Nolabel, Ast_helper.Exp.array ~loc list_exprs)]\n\nand parse_array_exp p =\n  let start_pos = p.Parser.start_pos in\n  Parser.expect Lbracket p;\n  let split_by_spread exprs =\n    List.fold_left\n      (fun acc curr ->\n        match (curr, acc) with\n        | (true, expr, start_pos, end_pos), _ ->\n          (* find a spread expression, prepend a new sublist *)\n          ([], Some expr, start_pos, end_pos) :: acc\n        | ( (false, expr, start_pos, _endPos),\n            (no_spreads, spread, _accStartPos, acc_end_pos) :: acc ) ->\n          (* find a non-spread expression, and the accumulated is not empty,\n           * prepend to the first sublist, and update the loc of the first sublist *)\n          (expr :: no_spreads, spread, start_pos, acc_end_pos) :: acc\n        | (false, expr, start_pos, end_pos), [] ->\n          (* find a non-spread expression, and the accumulated is empty *)\n          [([expr], None, start_pos, end_pos)])\n      [] exprs\n  in\n  let list_exprs_rev =\n    parse_comma_delimited_reversed_list p ~grammar:Grammar.ExprList\n      ~closing:Rbracket ~f:parse_spread_expr_region_with_loc\n  in\n  Parser.expect Rbracket p;\n  let loc = mk_loc start_pos p.prev_end_pos in\n  let collect_exprs = function\n    | [], Some spread, _startPos, _endPos -> [spread]\n    | exprs, Some spread, _startPos, _endPos ->\n      let els = Ast_helper.Exp.array ~loc exprs in\n      [els; spread]\n    | exprs, None, _startPos, _endPos ->\n      let els = Ast_helper.Exp.array ~loc exprs in\n      [els]\n  in\n  match split_by_spread list_exprs_rev with\n  | [] -> Ast_helper.Exp.array ~loc:(mk_loc start_pos p.prev_end_pos) []\n  | [(exprs, None, _, _)] ->\n    Ast_helper.Exp.array ~loc:(mk_loc start_pos p.prev_end_pos) exprs\n  | exprs ->\n    let xs = List.map collect_exprs exprs in\n    let list_exprs =\n      List.fold_right\n        (fun exprs1 acc ->\n          List.fold_right (fun expr1 acc1 -> expr1 :: acc1) exprs1 acc)\n        xs []\n    in\n    Ast_helper.Exp.apply ~loc\n      (Ast_helper.Exp.ident ~loc ~attrs:[spread_attr]\n         (Location.mkloc\n            (Longident.Ldot\n               (Longident.Ldot (Longident.Lident \"Belt\", \"Array\"), \"concatMany\"))\n            loc))\n      [(Asttypes.Nolabel, Ast_helper.Exp.array ~loc list_exprs)]\n\n(* TODO: check attributes in the case of poly type vars,\n * might be context dependend: parseFieldDeclaration (see ocaml) *)\nand parse_poly_type_expr p =\n  let start_pos = p.Parser.start_pos in\n  match p.Parser.token with\n  | SingleQuote -> (\n    let vars = parse_type_var_list p in\n    match vars with\n    | _v1 :: _v2 :: _ ->\n      Parser.expect Dot p;\n      let typ = parse_typ_expr p in\n      let loc = mk_loc start_pos p.prev_end_pos in\n      Ast_helper.Typ.poly ~loc vars typ\n    | [var] -> (\n      match p.Parser.token with\n      | Dot ->\n        Parser.next p;\n        let typ = parse_typ_expr p in\n        let loc = mk_loc start_pos p.prev_end_pos in\n        Ast_helper.Typ.poly ~loc vars typ\n      | EqualGreater ->\n        Parser.next p;\n        let typ = Ast_helper.Typ.var ~loc:var.loc var.txt in\n        let return_type = parse_typ_expr ~alias:false p in\n        let loc = mk_loc typ.Parsetree.ptyp_loc.loc_start p.prev_end_pos in\n        let t_fun =\n          Ast_helper.Typ.arrow ~loc Asttypes.Nolabel typ return_type\n        in\n        if p.uncurried_config = Legacy then t_fun\n        else Ast_uncurried.uncurried_type ~loc ~arity:1 t_fun\n      | _ -> Ast_helper.Typ.var ~loc:var.loc var.txt)\n    | _ -> assert false)\n  | _ -> parse_typ_expr p\n\n(* 'a 'b 'c *)\nand parse_type_var_list p =\n  let rec loop p vars =\n    match p.Parser.token with\n    | SingleQuote ->\n      Parser.next p;\n      let lident, loc = parse_lident p in\n      let var = Location.mkloc lident loc in\n      loop p (var :: vars)\n    | _ -> List.rev vars\n  in\n  loop p []\n\nand parse_lident_list p =\n  let rec loop p ls =\n    match p.Parser.token with\n    | Lident lident ->\n      let loc = mk_loc p.start_pos p.end_pos in\n      Parser.next p;\n      loop p (Location.mkloc lident loc :: ls)\n    | _ -> List.rev ls\n  in\n  loop p []\n\nand parse_atomic_typ_expr ~attrs p =\n  Parser.leave_breadcrumb p Grammar.AtomicTypExpr;\n  let start_pos = p.Parser.start_pos in\n  let typ =\n    match p.Parser.token with\n    | SingleQuote ->\n      Parser.next p;\n      let ident, loc =\n        if p.Parser.token = Eof then (\n          Parser.err ~start_pos:p.start_pos p\n            (Diagnostics.unexpected p.Parser.token p.breadcrumbs);\n          (\"\", mk_loc p.start_pos p.prev_end_pos))\n        else parse_ident ~msg:ErrorMessages.type_var ~start_pos:p.start_pos p\n      in\n      Ast_helper.Typ.var ~loc ~attrs ident\n    | Underscore ->\n      let end_pos = p.end_pos in\n      Parser.next p;\n      Ast_helper.Typ.any ~loc:(mk_loc start_pos end_pos) ~attrs ()\n    | Lparen -> (\n      Parser.next p;\n      match p.Parser.token with\n      | Rparen ->\n        Parser.next p;\n        let loc = mk_loc start_pos p.prev_end_pos in\n        let unit_constr = Location.mkloc (Longident.Lident \"unit\") loc in\n        Ast_helper.Typ.constr ~attrs unit_constr []\n      | _ -> (\n        let t = parse_typ_expr p in\n        match p.token with\n        | Comma ->\n          Parser.next p;\n          parse_tuple_type ~attrs ~first:t ~start_pos p\n        | _ ->\n          Parser.expect Rparen p;\n          {\n            t with\n            ptyp_loc = mk_loc start_pos p.prev_end_pos;\n            ptyp_attributes = List.concat [attrs; t.ptyp_attributes];\n          }))\n    | Lbracket -> parse_polymorphic_variant_type ~attrs p\n    | Uident _ | Lident _ ->\n      let constr = parse_value_path p in\n      let args = parse_type_constructor_args ~constr_name:constr p in\n      Ast_helper.Typ.constr\n        ~loc:(mk_loc start_pos p.prev_end_pos)\n        ~attrs constr args\n    | Module ->\n      Parser.next p;\n      Parser.expect Lparen p;\n      let package_type = parse_package_type ~start_pos ~attrs p in\n      Parser.expect Rparen p;\n      {package_type with ptyp_loc = mk_loc start_pos p.prev_end_pos}\n    | Percent ->\n      let extension = parse_extension p in\n      let loc = mk_loc start_pos p.prev_end_pos in\n      Ast_helper.Typ.extension ~attrs ~loc extension\n    | Lbrace -> parse_record_or_object_type ~attrs p\n    | Eof ->\n      Parser.err p (Diagnostics.unexpected p.Parser.token p.breadcrumbs);\n      Recover.default_type ()\n    | token -> (\n      Parser.err p (Diagnostics.unexpected token p.breadcrumbs);\n      match\n        skip_tokens_and_maybe_retry p\n          ~is_start_of_grammar:Grammar.is_atomic_typ_expr_start\n      with\n      | Some () -> parse_atomic_typ_expr ~attrs p\n      | None ->\n        Parser.err ~start_pos:p.prev_end_pos p\n          (Diagnostics.unexpected token p.breadcrumbs);\n        Recover.default_type ())\n  in\n  Parser.eat_breadcrumb p;\n  typ\n\n(* package-type\t::=\n    | modtype-path\n    ∣ modtype-path with package-constraint  { and package-constraint }\n*)\nand parse_package_type ~start_pos ~attrs p =\n  let mod_type_path = parse_module_long_ident ~lowercase:true p in\n  match p.Parser.token with\n  | Lident \"with\" ->\n    Parser.next p;\n    let constraints = parse_package_constraints p in\n    let loc = mk_loc start_pos p.prev_end_pos in\n    Ast_helper.Typ.package ~loc ~attrs mod_type_path constraints\n  | _ ->\n    let loc = mk_loc start_pos p.prev_end_pos in\n    Ast_helper.Typ.package ~loc ~attrs mod_type_path []\n\n(* package-constraint  { and package-constraint } *)\nand parse_package_constraints p =\n  let first =\n    Parser.expect Typ p;\n    let type_constr = parse_value_path p in\n    Parser.expect Equal p;\n    let typ = parse_typ_expr p in\n    (type_constr, typ)\n  in\n  let rest =\n    parse_region ~grammar:Grammar.PackageConstraint ~f:parse_package_constraint\n      p\n  in\n  first :: rest\n\n(* and type typeconstr = typexpr *)\nand parse_package_constraint p =\n  match p.Parser.token with\n  | And ->\n    Parser.next p;\n    Parser.expect Typ p;\n    let type_constr = parse_value_path p in\n    Parser.expect Equal p;\n    let typ = parse_typ_expr p in\n    Some (type_constr, typ)\n  | _ -> None\n\nand parse_record_or_object_type ~attrs p =\n  (* for inline record in constructor *)\n  let start_pos = p.Parser.start_pos in\n  Parser.expect Lbrace p;\n  let closed_flag =\n    match p.token with\n    | DotDot ->\n      Parser.next p;\n      Asttypes.Open\n    | Dot ->\n      Parser.next p;\n      Asttypes.Closed\n    | _ -> Asttypes.Closed\n  in\n  let () =\n    match p.token with\n    | Lident _ ->\n      Parser.err p\n        (Diagnostics.message ErrorMessages.forbidden_inline_record_declaration)\n    | _ -> ()\n  in\n  let fields =\n    parse_comma_delimited_region ~grammar:Grammar.StringFieldDeclarations\n      ~closing:Rbrace ~f:parse_string_field_declaration p\n  in\n  Parser.expect Rbrace p;\n  let loc = mk_loc start_pos p.prev_end_pos in\n  Ast_helper.Typ.object_ ~loc ~attrs fields closed_flag\n\n(* TODO: check associativity in combination with attributes *)\nand parse_type_alias p typ =\n  match p.Parser.token with\n  | As ->\n    Parser.next p;\n    Parser.expect SingleQuote p;\n    let ident, _loc = parse_lident p in\n    (* TODO: how do we parse attributes here? *)\n    Ast_helper.Typ.alias\n      ~loc:(mk_loc typ.Parsetree.ptyp_loc.loc_start p.prev_end_pos)\n      typ ident\n  | _ -> typ\n\n(* type_parameter ::=\n *  | type_expr\n *  | ~ident: type_expr\n *  | ~ident: type_expr=?\n *\n * note:\n *  | attrs ~ident: type_expr    -> attrs are on the arrow\n *  | attrs type_expr            -> attrs are here part of the type_expr\n *\n * dotted_type_parameter ::=\n *  | . type_parameter\n *)\nand parse_type_parameter p =\n  let doc_attr : Parsetree.attributes =\n    match p.Parser.token with\n    | DocComment (loc, s) ->\n      Parser.next p;\n      [doc_comment_to_attribute loc s]\n    | _ -> []\n  in\n  if\n    p.Parser.token = Token.Tilde\n    || p.token = Dot\n    || Grammar.is_typ_expr_start p.token\n  then\n    let start_pos = p.Parser.start_pos in\n    let dotted = Parser.optional p Dot in\n    let attrs = doc_attr @ parse_attributes p in\n    match p.Parser.token with\n    | Tilde -> (\n      Parser.next p;\n      let name, loc = parse_lident p in\n      let lbl_loc_attr =\n        (Location.mkloc \"res.namedArgLoc\" loc, Parsetree.PStr [])\n      in\n      Parser.expect ~grammar:Grammar.TypeExpression Colon p;\n      let typ =\n        let typ = parse_typ_expr p in\n        {typ with ptyp_attributes = lbl_loc_attr :: typ.ptyp_attributes}\n      in\n      match p.Parser.token with\n      | Equal ->\n        Parser.next p;\n        Parser.expect Question p;\n        Some {dotted; attrs; label = Optional name; typ; start_pos}\n      | _ -> Some {dotted; attrs; label = Labelled name; typ; start_pos})\n    | Lident _ -> (\n      let name, loc = parse_lident p in\n      match p.token with\n      | Colon -> (\n        let () =\n          let error =\n            Diagnostics.message\n              (ErrorMessages.missing_tilde_labeled_parameter name)\n          in\n          Parser.err ~start_pos:loc.loc_start ~end_pos:loc.loc_end p error\n        in\n        Parser.next p;\n        let typ = parse_typ_expr p in\n        match p.Parser.token with\n        | Equal ->\n          Parser.next p;\n          Parser.expect Question p;\n          Some {dotted; attrs; label = Optional name; typ; start_pos}\n        | _ -> Some {dotted; attrs; label = Labelled name; typ; start_pos})\n      | _ ->\n        let constr = Location.mkloc (Longident.Lident name) loc in\n        let args = parse_type_constructor_args ~constr_name:constr p in\n        let typ =\n          Ast_helper.Typ.constr\n            ~loc:(mk_loc start_pos p.prev_end_pos)\n            ~attrs constr args\n        in\n\n        let typ = parse_arrow_type_rest ~es6_arrow:true ~start_pos typ p in\n        let typ = parse_type_alias p typ in\n        Some {dotted; attrs = []; label = Nolabel; typ; start_pos})\n    | _ ->\n      let typ = parse_typ_expr p in\n      let typ_with_attributes =\n        {typ with ptyp_attributes = List.concat [attrs; typ.ptyp_attributes]}\n      in\n      Some\n        {\n          dotted;\n          attrs = [];\n          label = Nolabel;\n          typ = typ_with_attributes;\n          start_pos;\n        }\n  else None\n\n(* (int, ~x:string, float) *)\nand parse_type_parameters p =\n  let start_pos = p.Parser.start_pos in\n  Parser.expect Lparen p;\n  match p.Parser.token with\n  | Rparen ->\n    Parser.next p;\n    let loc = mk_loc start_pos p.prev_end_pos in\n    let unit_constr = Location.mkloc (Longident.Lident \"unit\") loc in\n    let typ = Ast_helper.Typ.constr unit_constr [] in\n    [{dotted = false; attrs = []; label = Nolabel; typ; start_pos}]\n  | _ ->\n    let params =\n      parse_comma_delimited_region ~grammar:Grammar.TypeParameters\n        ~closing:Rparen ~f:parse_type_parameter p\n    in\n    Parser.expect Rparen p;\n    params\n\nand parse_es6_arrow_type ~attrs p =\n  let start_pos = p.Parser.start_pos in\n  match p.Parser.token with\n  | Tilde ->\n    Parser.next p;\n    let name, loc = parse_lident p in\n    let lbl_loc_attr =\n      (Location.mkloc \"res.namedArgLoc\" loc, Parsetree.PStr [])\n    in\n    Parser.expect ~grammar:Grammar.TypeExpression Colon p;\n    let typ =\n      let typ = parse_typ_expr ~alias:false ~es6_arrow:false p in\n      {typ with ptyp_attributes = lbl_loc_attr :: typ.ptyp_attributes}\n    in\n    let arg =\n      match p.Parser.token with\n      | Equal ->\n        Parser.next p;\n        Parser.expect Question p;\n        Asttypes.Optional name\n      | _ -> Asttypes.Labelled name\n    in\n    Parser.expect EqualGreater p;\n    let return_type = parse_typ_expr ~alias:false p in\n    let loc = mk_loc start_pos p.prev_end_pos in\n    Ast_helper.Typ.arrow ~loc ~attrs arg typ return_type\n  | DocComment _ -> assert false\n  | _ ->\n    let parameters = parse_type_parameters p in\n    Parser.expect EqualGreater p;\n    let return_type = parse_typ_expr ~alias:false p in\n    let end_pos = p.prev_end_pos in\n    let return_type_arity =\n      match parameters with\n      | _ when p.uncurried_config <> Legacy -> 0\n      | _ ->\n        if parameters |> List.exists (function {dotted; typ = _} -> dotted)\n        then 0\n        else\n          let _, args, _ = Res_parsetree_viewer.arrow_type return_type in\n          List.length args\n    in\n    let _paramNum, typ, _arity =\n      List.fold_right\n        (fun {dotted; attrs; label = arg_lbl; typ; start_pos}\n             (param_num, t, arity) ->\n          let uncurried =\n            p.uncurried_config |> Res_uncurried.from_dotted ~dotted\n          in\n          let loc = mk_loc start_pos end_pos in\n          let arity =\n            (* Workaround for ~lbl: @as(json`false`) _, which changes the arity *)\n            match arg_lbl with\n            | Labelled _s ->\n              let typ_is_any =\n                match typ.ptyp_desc with\n                | Ptyp_any -> true\n                | _ -> false\n              in\n              let has_as =\n                Ext_list.exists typ.ptyp_attributes (fun (x, _) -> x.txt = \"as\")\n              in\n              if !InExternal.status && typ_is_any && has_as then arity - 1\n              else arity\n            | _ -> arity\n          in\n          let t_arg = Ast_helper.Typ.arrow ~loc ~attrs arg_lbl typ t in\n          if uncurried && (param_num = 1 || p.uncurried_config = Legacy) then\n            (param_num - 1, Ast_uncurried.uncurried_type ~loc ~arity t_arg, 1)\n          else (param_num - 1, t_arg, arity + 1))\n        parameters\n        (List.length parameters, return_type, return_type_arity + 1)\n    in\n    {\n      typ with\n      ptyp_attributes = List.concat [typ.ptyp_attributes; attrs];\n      ptyp_loc = mk_loc start_pos p.prev_end_pos;\n    }\n\n(*\n * typexpr ::=\n *  | 'ident\n *  | _\n *  | (typexpr)\n *  | typexpr => typexpr            --> es6 arrow\n *  | (typexpr, typexpr) => typexpr --> es6 arrow\n *  | /typexpr, typexpr, typexpr/  --> tuple\n *  | typeconstr\n *  | typeconstr<typexpr>\n *  | typeconstr<typexpr, typexpr,>\n *  | typexpr as 'ident\n *  | %attr-id                      --> extension\n *  | %attr-id(payload)             --> extension\n *\n * typeconstr ::=\n *  | lident\n *  | uident.lident\n *  | uident.uident.lident     --> long module path\n *)\nand parse_typ_expr ?attrs ?(es6_arrow = true) ?(alias = true) p =\n  (* Parser.leaveBreadcrumb p Grammar.TypeExpression; *)\n  let start_pos = p.Parser.start_pos in\n  let attrs =\n    match attrs with\n    | Some attrs -> attrs\n    | None -> parse_attributes p\n  in\n  let typ =\n    if es6_arrow && is_es6_arrow_type p then parse_es6_arrow_type ~attrs p\n    else\n      let typ = parse_atomic_typ_expr ~attrs p in\n      parse_arrow_type_rest ~es6_arrow ~start_pos typ p\n  in\n  let typ = if alias then parse_type_alias p typ else typ in\n  (* Parser.eatBreadcrumb p; *)\n  typ\n\nand parse_arrow_type_rest ~es6_arrow ~start_pos typ p =\n  match p.Parser.token with\n  | (EqualGreater | MinusGreater) as token when es6_arrow == true ->\n    (* error recovery *)\n    if token = MinusGreater then Parser.expect EqualGreater p;\n    Parser.next p;\n    let return_type = parse_typ_expr ~alias:false p in\n    let loc = mk_loc start_pos p.prev_end_pos in\n    let arrow_typ =\n      Ast_helper.Typ.arrow ~loc Asttypes.Nolabel typ return_type\n    in\n    if p.uncurried_config = Legacy then arrow_typ\n    else Ast_uncurried.uncurried_type ~loc ~arity:1 arrow_typ\n  | _ -> typ\n\nand parse_typ_expr_region p =\n  if Grammar.is_typ_expr_start p.Parser.token then Some (parse_typ_expr p)\n  else None\n\nand parse_tuple_type ~attrs ~first ~start_pos p =\n  let typexprs =\n    first\n    :: parse_comma_delimited_region ~grammar:Grammar.TypExprList ~closing:Rparen\n         ~f:parse_typ_expr_region p\n  in\n  Parser.expect Rparen p;\n  let () =\n    match typexprs with\n    | [_] ->\n      Parser.err ~start_pos ~end_pos:p.prev_end_pos p\n        (Diagnostics.message ErrorMessages.tuple_single_element)\n    | _ -> ()\n  in\n  let tuple_loc = mk_loc start_pos p.prev_end_pos in\n  Ast_helper.Typ.tuple ~attrs ~loc:tuple_loc typexprs\n\nand parse_type_constructor_arg_region p =\n  if Grammar.is_typ_expr_start p.Parser.token then Some (parse_typ_expr p)\n  else if p.token = LessThan then (\n    Parser.next p;\n    parse_type_constructor_arg_region p)\n  else None\n\n(* Js.Nullable.value<'a> *)\nand parse_type_constructor_args ~constr_name p =\n  let opening = p.Parser.token in\n  let opening_start_pos = p.start_pos in\n  match opening with\n  | LessThan | Lparen ->\n    Scanner.set_diamond_mode p.scanner;\n    Parser.next p;\n    let type_args =\n      (* TODO: change Grammar.TypExprList to TypArgList!!! Why did I wrote this? *)\n      parse_comma_delimited_region ~grammar:Grammar.TypExprList\n        ~closing:GreaterThan ~f:parse_type_constructor_arg_region p\n    in\n    let () =\n      match p.token with\n      | Rparen when opening = Token.Lparen ->\n        let typ = Ast_helper.Typ.constr constr_name type_args in\n        let msg =\n          Doc.breakable_group ~force_break:true\n            (Doc.concat\n               [\n                 Doc.text \"Type parameters require angle brackets:\";\n                 Doc.indent\n                   (Doc.concat\n                      [\n                        Doc.line;\n                        ResPrinter.print_typ_expr typ CommentTable.empty;\n                      ]);\n               ])\n          |> Doc.to_string ~width:80\n        in\n        Parser.err ~start_pos:opening_start_pos p (Diagnostics.message msg);\n        Parser.next p\n      | _ -> Parser.expect GreaterThan p\n    in\n    Scanner.pop_mode p.scanner Diamond;\n    type_args\n  | _ -> []\n\n(* string-field-decl ::=\n *  | string: poly-typexpr\n *  | attributes string-field-decl *)\nand parse_string_field_declaration p =\n  let attrs = parse_attributes p in\n  match p.Parser.token with\n  | String name ->\n    let name_start_pos = p.start_pos in\n    let name_end_pos = p.end_pos in\n    Parser.next p;\n    let field_name = Location.mkloc name (mk_loc name_start_pos name_end_pos) in\n    Parser.expect ~grammar:Grammar.TypeExpression Colon p;\n    let typ = parse_poly_type_expr p in\n    Some (Parsetree.Otag (field_name, attrs, typ))\n  | DotDotDot ->\n    Parser.next p;\n    let typ = parse_typ_expr p in\n    Some (Parsetree.Oinherit typ)\n  | Lident name ->\n    let name_loc = mk_loc p.start_pos p.end_pos in\n    Parser.err p\n      (Diagnostics.message (ErrorMessages.object_quoted_field_name name));\n    Parser.next p;\n    let field_name = Location.mkloc name name_loc in\n    Parser.expect ~grammar:Grammar.TypeExpression Colon p;\n    let typ = parse_poly_type_expr p in\n    Some (Parsetree.Otag (field_name, attrs, typ))\n  | _token -> None\n\n(* field-decl\t::=\n *  | [mutable] field-name : poly-typexpr\n *  | attributes field-decl *)\nand parse_field_declaration p =\n  let start_pos = p.Parser.start_pos in\n  let attrs = parse_attributes p in\n  let mut =\n    if Parser.optional p Token.Mutable then Asttypes.Mutable\n    else Asttypes.Immutable\n  in\n  let lident, loc =\n    match p.token with\n    | _ -> parse_lident p\n  in\n  let optional = parse_optional_label p in\n  let name = Location.mkloc lident loc in\n  let typ =\n    match p.Parser.token with\n    | Colon ->\n      Parser.next p;\n      parse_poly_type_expr p\n    | _ ->\n      Ast_helper.Typ.constr ~loc:name.loc {name with txt = Lident name.txt} []\n  in\n  let loc = mk_loc start_pos typ.ptyp_loc.loc_end in\n  (optional, Ast_helper.Type.field ~attrs ~loc ~mut name typ)\n\nand parse_field_declaration_region ?found_object_field p =\n  let start_pos = p.Parser.start_pos in\n  let attrs = parse_attributes p in\n  let mut =\n    if Parser.optional p Token.Mutable then Asttypes.Mutable\n    else Asttypes.Immutable\n  in\n  match p.token with\n  | DotDotDot ->\n    Parser.next p;\n    let name = Location.mkloc \"...\" (mk_loc start_pos p.prev_end_pos) in\n    let typ = parse_poly_type_expr p in\n    let loc = mk_loc start_pos typ.ptyp_loc.loc_end in\n    Some (Ast_helper.Type.field ~attrs ~loc ~mut name typ)\n  | String s when found_object_field <> None ->\n    Option.get found_object_field := true;\n    Parser.next p;\n    let name = Location.mkloc s (mk_loc start_pos p.prev_end_pos) in\n    Parser.expect Colon p;\n    let typ = parse_poly_type_expr p in\n    let loc = mk_loc start_pos typ.ptyp_loc.loc_end in\n    Some (Ast_helper.Type.field ~attrs ~loc ~mut name typ)\n  | Lident _ ->\n    let lident, loc = parse_lident p in\n    let name = Location.mkloc lident loc in\n    let optional = parse_optional_label p in\n    let typ =\n      match p.Parser.token with\n      | Colon ->\n        Parser.next p;\n        parse_poly_type_expr p\n      | _ ->\n        Ast_helper.Typ.constr ~loc:name.loc ~attrs\n          {name with txt = Lident name.txt}\n          []\n    in\n    let loc = mk_loc start_pos typ.ptyp_loc.loc_end in\n    let attrs = if optional then optional_attr :: attrs else attrs in\n    Some (Ast_helper.Type.field ~attrs ~loc ~mut name typ)\n  | _ ->\n    if attrs <> [] then\n      Parser.err ~start_pos p\n        (Diagnostics.message\n           \"Attributes and doc comments can only be used at the beginning of a \\\n            field declaration\");\n    if mut = Mutable then\n      Parser.err ~start_pos p\n        (Diagnostics.message\n           \"The `mutable` qualifier can only be used at the beginning of a \\\n            field declaration\");\n    None\n\n(* record-decl ::=\n *  | { field-decl }\n *  | { field-decl, field-decl }\n *  | { field-decl, field-decl, field-decl, }\n *)\nand parse_record_declaration p =\n  Parser.leave_breadcrumb p Grammar.RecordDecl;\n  Parser.expect Lbrace p;\n  let rows =\n    parse_comma_delimited_region ~grammar:Grammar.RecordDecl ~closing:Rbrace\n      ~f:parse_field_declaration_region p\n  in\n  Parser.expect Rbrace p;\n  Parser.eat_breadcrumb p;\n  rows\n\n(* constr-args ::=\n *  | (typexpr)\n *  | (typexpr, typexpr)\n *  | (typexpr, typexpr, typexpr,)\n *  | (record-decl)\n *\n * TODO: should we overparse inline-records in every position?\n * Give a good error message afterwards?\n *)\nand parse_constr_decl_args p =\n  let constr_args =\n    match p.Parser.token with\n    | Lparen -> (\n      Parser.next p;\n      (* TODO: this could use some cleanup/stratification *)\n      match p.Parser.token with\n      | Lbrace -> (\n        Parser.next p;\n        let start_pos = p.Parser.start_pos in\n        match p.Parser.token with\n        | DotDot | Dot ->\n          let closed_flag =\n            match p.token with\n            | DotDot ->\n              Parser.next p;\n              Asttypes.Open\n            | Dot ->\n              Parser.next p;\n              Asttypes.Closed\n            | _ -> Asttypes.Closed\n          in\n          let fields =\n            parse_comma_delimited_region\n              ~grammar:Grammar.StringFieldDeclarations ~closing:Rbrace\n              ~f:parse_string_field_declaration p\n          in\n          Parser.expect Rbrace p;\n          let loc = mk_loc start_pos p.prev_end_pos in\n          let typ = Ast_helper.Typ.object_ ~loc ~attrs:[] fields closed_flag in\n          Parser.optional p Comma |> ignore;\n          let more_args =\n            parse_comma_delimited_region ~grammar:Grammar.TypExprList\n              ~closing:Rparen ~f:parse_typ_expr_region p\n          in\n          Parser.expect Rparen p;\n          Parsetree.Pcstr_tuple (typ :: more_args)\n        | DotDotDot ->\n          let dotdotdot_start = p.start_pos in\n          let dotdotdot_end = p.end_pos in\n          (* start of object type spreading, e.g. `User({...a, \"u\": int})` *)\n          Parser.next p;\n          let typ = parse_typ_expr p in\n          let () =\n            match p.token with\n            | Rbrace ->\n              (* {...x}, spread without extra fields *)\n              Parser.next p\n            | _ -> Parser.expect Comma p\n          in\n          let () =\n            match p.token with\n            | Lident _ ->\n              Parser.err ~start_pos:dotdotdot_start ~end_pos:dotdotdot_end p\n                (Diagnostics.message ErrorMessages.spread_in_record_declaration)\n            | _ -> ()\n          in\n          let fields =\n            Parsetree.Oinherit typ\n            :: parse_comma_delimited_region\n                 ~grammar:Grammar.StringFieldDeclarations ~closing:Rbrace\n                 ~f:parse_string_field_declaration p\n          in\n          Parser.expect Rbrace p;\n          let loc = mk_loc start_pos p.prev_end_pos in\n          let typ =\n            Ast_helper.Typ.object_ ~loc fields Asttypes.Closed\n            |> parse_type_alias p\n          in\n          let typ = parse_arrow_type_rest ~es6_arrow:true ~start_pos typ p in\n          Parser.optional p Comma |> ignore;\n          let more_args =\n            parse_comma_delimited_region ~grammar:Grammar.TypExprList\n              ~closing:Rparen ~f:parse_typ_expr_region p\n          in\n          Parser.expect Rparen p;\n          Parsetree.Pcstr_tuple (typ :: more_args)\n        | _ -> (\n          let attrs = parse_attributes p in\n          match p.Parser.token with\n          | String _ ->\n            let closed_flag = Asttypes.Closed in\n            let fields =\n              match attrs with\n              | [] ->\n                parse_comma_delimited_region\n                  ~grammar:Grammar.StringFieldDeclarations ~closing:Rbrace\n                  ~f:parse_string_field_declaration p\n              | attrs ->\n                let first =\n                  Parser.leave_breadcrumb p Grammar.StringFieldDeclarations;\n                  let field =\n                    match parse_string_field_declaration p with\n                    | Some field -> field\n                    | None -> assert false\n                  in\n                  (* parse comma after first *)\n                  let () =\n                    match p.Parser.token with\n                    | Rbrace | Eof -> ()\n                    | Comma -> Parser.next p\n                    | _ -> Parser.expect Comma p\n                  in\n                  Parser.eat_breadcrumb p;\n                  match field with\n                  | Parsetree.Otag (label, _, ct) ->\n                    Parsetree.Otag (label, attrs, ct)\n                  | Oinherit ct -> Oinherit ct\n                in\n                first\n                :: parse_comma_delimited_region\n                     ~grammar:Grammar.StringFieldDeclarations ~closing:Rbrace\n                     ~f:parse_string_field_declaration p\n            in\n            Parser.expect Rbrace p;\n            let loc = mk_loc start_pos p.prev_end_pos in\n            let typ =\n              Ast_helper.Typ.object_ ~loc ~attrs:[] fields closed_flag\n              |> parse_type_alias p\n            in\n            let typ = parse_arrow_type_rest ~es6_arrow:true ~start_pos typ p in\n            Parser.optional p Comma |> ignore;\n            let more_args =\n              parse_comma_delimited_region ~grammar:Grammar.TypExprList\n                ~closing:Rparen ~f:parse_typ_expr_region p\n            in\n            Parser.expect Rparen p;\n            Parsetree.Pcstr_tuple (typ :: more_args)\n          | _ ->\n            let fields =\n              match attrs with\n              | [] ->\n                parse_comma_delimited_region ~grammar:Grammar.FieldDeclarations\n                  ~closing:Rbrace ~f:parse_field_declaration_region p\n              | attrs ->\n                let first =\n                  let optional, field = parse_field_declaration p in\n                  let attrs =\n                    if optional then optional_attr :: attrs else attrs\n                  in\n                  {field with Parsetree.pld_attributes = attrs}\n                in\n                if p.token = Rbrace then [first]\n                else (\n                  Parser.expect Comma p;\n                  first\n                  :: parse_comma_delimited_region\n                       ~grammar:Grammar.FieldDeclarations ~closing:Rbrace\n                       ~f:parse_field_declaration_region p)\n            in\n            Parser.expect Rbrace p;\n            Parser.optional p Comma |> ignore;\n            Parser.expect Rparen p;\n            Parsetree.Pcstr_record fields))\n      | _ ->\n        let args =\n          parse_comma_delimited_region ~grammar:Grammar.TypExprList\n            ~closing:Rparen ~f:parse_typ_expr_region p\n        in\n        Parser.expect Rparen p;\n        Parsetree.Pcstr_tuple args)\n    | _ -> Pcstr_tuple []\n  in\n  let res =\n    match p.Parser.token with\n    | Colon ->\n      Parser.next p;\n      Some (parse_typ_expr p)\n    | _ -> None\n  in\n  (constr_args, res)\n\n(* constr-decl ::=\n *  | constr-name\n *  | attrs constr-name\n *  | constr-name const-args\n *  | attrs constr-name const-args *)\nand parse_type_constructor_declaration_with_bar p =\n  match p.Parser.token with\n  | Bar ->\n    let start_pos = p.Parser.start_pos in\n    Parser.next p;\n    Some (parse_type_constructor_declaration ~start_pos p)\n  | _ -> None\n\nand parse_type_constructor_declaration ~start_pos p =\n  Parser.leave_breadcrumb p Grammar.ConstructorDeclaration;\n  let attrs = parse_attributes p in\n  match p.Parser.token with\n  | DotDotDot ->\n    Parser.next p;\n    let name = Location.mkloc \"...\" (mk_loc start_pos p.prev_end_pos) in\n    let typ = parse_poly_type_expr p in\n    let loc = mk_loc start_pos typ.ptyp_loc.loc_end in\n    Ast_helper.Type.constructor ~loc ~attrs ~args:(Pcstr_tuple [typ]) name\n  | Uident uident ->\n    let uident_loc = mk_loc p.start_pos p.end_pos in\n    Parser.next p;\n    let args, res = parse_constr_decl_args p in\n    Parser.eat_breadcrumb p;\n    let loc = mk_loc start_pos p.prev_end_pos in\n    Ast_helper.Type.constructor ~loc ~attrs ?res ~args\n      (Location.mkloc uident uident_loc)\n  | t ->\n    Parser.err p (Diagnostics.uident t);\n    Ast_helper.Type.constructor (Location.mknoloc \"_\")\n\n(* [|] constr-decl  { | constr-decl }   *)\nand parse_type_constructor_declarations ?first p =\n  let first_constr_decl =\n    match first with\n    | None ->\n      let start_pos = p.Parser.start_pos in\n      ignore (Parser.optional p Token.Bar);\n      parse_type_constructor_declaration ~start_pos p\n    | Some first_constr_decl -> first_constr_decl\n  in\n  first_constr_decl\n  :: parse_region ~grammar:Grammar.ConstructorDeclaration\n       ~f:parse_type_constructor_declaration_with_bar p\n\n(*\n * type-representation ::=\n *  ∣\t = [ | ] constr-decl  { | constr-decl }\n *  ∣\t = private [ | ] constr-decl  { | constr-decl }\n *  |  = |\n *  ∣\t = private |\n *  ∣\t = record-decl\n *  ∣\t = private record-decl\n *  |  = ..\n *)\nand parse_type_representation p =\n  Parser.leave_breadcrumb p Grammar.TypeRepresentation;\n  (* = consumed *)\n  let private_flag =\n    if Parser.optional p Token.Private then Asttypes.Private\n    else Asttypes.Public\n  in\n  let kind =\n    match p.Parser.token with\n    | Bar | Uident _ ->\n      Parsetree.Ptype_variant (parse_type_constructor_declarations p)\n    | Lbrace -> Parsetree.Ptype_record (parse_record_declaration p)\n    | DotDot ->\n      Parser.next p;\n      Ptype_open\n    | token ->\n      Parser.err p (Diagnostics.unexpected token p.breadcrumbs);\n      (* TODO: I have no idea if this is even remotely a good idea *)\n      Parsetree.Ptype_variant []\n  in\n  Parser.eat_breadcrumb p;\n  (private_flag, kind)\n\n(* type-param\t::=\n *  | variance 'lident\n *  | variance 'uident\n *  | variance _\n *\n * variance ::=\n *   | +\n *   | -\n *   | (* empty *)\n *)\nand parse_type_param p =\n  let variance =\n    match p.Parser.token with\n    | Plus ->\n      Parser.next p;\n      Asttypes.Covariant\n    | Minus ->\n      Parser.next p;\n      Contravariant\n    | _ -> Invariant\n  in\n  match p.Parser.token with\n  | SingleQuote ->\n    Parser.next p;\n    let ident, loc =\n      if p.Parser.token = Eof then (\n        Parser.err ~start_pos:p.start_pos p\n          (Diagnostics.unexpected p.Parser.token p.breadcrumbs);\n        (\"\", mk_loc p.start_pos p.prev_end_pos))\n      else parse_ident ~msg:ErrorMessages.type_param ~start_pos:p.start_pos p\n    in\n    Some (Ast_helper.Typ.var ~loc ident, variance)\n  | Underscore ->\n    let loc = mk_loc p.start_pos p.end_pos in\n    Parser.next p;\n    Some (Ast_helper.Typ.any ~loc (), variance)\n  | (Uident _ | Lident _) as token ->\n    Parser.err p\n      (Diagnostics.message\n         (\"Type params start with a singlequote: '\" ^ Token.to_string token));\n    let ident, loc =\n      parse_ident ~msg:ErrorMessages.type_param ~start_pos:p.start_pos p\n    in\n    Some (Ast_helper.Typ.var ~loc ident, variance)\n  | _token -> None\n\n(* type-params\t::=\n *  | <type-param>\n *  ∣\t<type-param, type-param>\n *  ∣\t<type-param, type-param, type-param>\n *  ∣\t<type-param, type-param, type-param,>\n *\n *  TODO: when we have pretty-printer show an error\n *  with the actual code corrected. *)\nand parse_type_params ~parent p =\n  let opening = p.Parser.token in\n  match opening with\n  | (LessThan | Lparen) when p.start_pos.pos_lnum == p.prev_end_pos.pos_lnum ->\n    Scanner.set_diamond_mode p.scanner;\n    let opening_start_pos = p.start_pos in\n    Parser.leave_breadcrumb p Grammar.TypeParams;\n    Parser.next p;\n    let params =\n      parse_comma_delimited_region ~grammar:Grammar.TypeParams\n        ~closing:GreaterThan ~f:parse_type_param p\n    in\n    let () =\n      match p.token with\n      | Rparen when opening = Token.Lparen ->\n        let msg =\n          Doc.breakable_group ~force_break:true\n            (Doc.concat\n               [\n                 Doc.text \"Type parameters require angle brackets:\";\n                 Doc.indent\n                   (Doc.concat\n                      [\n                        Doc.line;\n                        Doc.concat\n                          [\n                            ResPrinter.print_longident parent.Location.txt;\n                            ResPrinter.print_type_params params\n                              CommentTable.empty;\n                          ];\n                      ]);\n               ])\n          |> Doc.to_string ~width:80\n        in\n        Parser.err ~start_pos:opening_start_pos p (Diagnostics.message msg);\n        Parser.next p\n      | _ -> Parser.expect GreaterThan p\n    in\n    Scanner.pop_mode p.scanner Diamond;\n    Parser.eat_breadcrumb p;\n    params\n  | _ -> []\n\n(* type-constraint\t::=\tconstraint ' ident =  typexpr *)\nand parse_type_constraint p =\n  let start_pos = p.Parser.start_pos in\n  match p.Parser.token with\n  | Token.Constraint -> (\n    Parser.next p;\n    Parser.expect SingleQuote p;\n    match p.Parser.token with\n    | Lident ident ->\n      let ident_loc = mk_loc start_pos p.end_pos in\n      Parser.next p;\n      Parser.expect Equal p;\n      let typ = parse_typ_expr p in\n      let loc = mk_loc start_pos p.prev_end_pos in\n      Some (Ast_helper.Typ.var ~loc:ident_loc ident, typ, loc)\n    | t ->\n      Parser.err p (Diagnostics.lident t);\n      let loc = mk_loc start_pos p.prev_end_pos in\n      Some (Ast_helper.Typ.any (), parse_typ_expr p, loc))\n  | _ -> None\n\n(* type-constraints ::=\n *  | (* empty *)\n *  | type-constraint\n *  | type-constraint type-constraint\n *  | type-constraint type-constraint type-constraint (* 0 or more *)\n *)\nand parse_type_constraints p =\n  parse_region ~grammar:Grammar.TypeConstraint ~f:parse_type_constraint p\n\nand parse_type_equation_or_constr_decl p =\n  let uident_start_pos = p.Parser.start_pos in\n  match p.Parser.token with\n  | Uident uident -> (\n    Parser.next p;\n    match p.Parser.token with\n    | Dot -> (\n      Parser.next p;\n      let type_constr =\n        parse_value_path_tail p uident_start_pos (Longident.Lident uident)\n      in\n      let loc = mk_loc uident_start_pos p.prev_end_pos in\n      let typ =\n        parse_type_alias p\n          (Ast_helper.Typ.constr ~loc type_constr\n             (parse_type_constructor_args ~constr_name:type_constr p))\n      in\n      match p.token with\n      | Equal ->\n        Parser.next p;\n        let priv, kind = parse_type_representation p in\n        (Some typ, priv, kind)\n      | EqualGreater ->\n        Parser.next p;\n        let return_type = parse_typ_expr ~alias:false p in\n        let loc = mk_loc uident_start_pos p.prev_end_pos in\n        let arrow_type =\n          Ast_helper.Typ.arrow ~loc Asttypes.Nolabel typ return_type\n        in\n        let uncurried = p.uncurried_config <> Legacy in\n        let arrow_type =\n          if uncurried then\n            Ast_uncurried.uncurried_type ~loc ~arity:1 arrow_type\n          else arrow_type\n        in\n        let typ = parse_type_alias p arrow_type in\n        (Some typ, Asttypes.Public, Parsetree.Ptype_abstract)\n      | _ -> (Some typ, Asttypes.Public, Parsetree.Ptype_abstract))\n    | _ ->\n      let uident_end_pos = p.prev_end_pos in\n      let args, res = parse_constr_decl_args p in\n      let first =\n        Some\n          (let uident_loc = mk_loc uident_start_pos uident_end_pos in\n           Ast_helper.Type.constructor\n             ~loc:(mk_loc uident_start_pos p.prev_end_pos)\n             ?res ~args\n             (Location.mkloc uident uident_loc))\n      in\n      ( None,\n        Asttypes.Public,\n        Parsetree.Ptype_variant (parse_type_constructor_declarations p ?first)\n      ))\n  | t ->\n    Parser.err p (Diagnostics.uident t);\n    (* TODO: is this a good idea? *)\n    (None, Asttypes.Public, Parsetree.Ptype_abstract)\n\nand parse_record_or_object_decl p =\n  let start_pos = p.Parser.start_pos in\n  Parser.expect Lbrace p;\n  match p.Parser.token with\n  | DotDot | Dot ->\n    let closed_flag =\n      match p.token with\n      | DotDot ->\n        Parser.next p;\n        Asttypes.Open\n      | Dot ->\n        Parser.next p;\n        Asttypes.Closed\n      | _ -> Asttypes.Closed\n    in\n    let fields =\n      parse_comma_delimited_region ~grammar:Grammar.StringFieldDeclarations\n        ~closing:Rbrace ~f:parse_string_field_declaration p\n    in\n    Parser.expect Rbrace p;\n    let loc = mk_loc start_pos p.prev_end_pos in\n    let typ =\n      Ast_helper.Typ.object_ ~loc ~attrs:[] fields closed_flag\n      |> parse_type_alias p\n    in\n    let typ = parse_arrow_type_rest ~es6_arrow:true ~start_pos typ p in\n    (Some typ, Asttypes.Public, Parsetree.Ptype_abstract)\n  | DotDotDot -> (\n    let dotdotdot_start = p.start_pos in\n    let dotdotdot_end = p.end_pos in\n    (* start of object type spreading, e.g. `type u = {...a, \"u\": int}` *)\n    Parser.next p;\n    let typ = parse_typ_expr p in\n    match p.token with\n    | Rbrace ->\n      (* {...x}, spread without extra fields *)\n      Parser.next p;\n      let loc = mk_loc start_pos p.prev_end_pos in\n      let dot_field =\n        Ast_helper.Type.field ~loc\n          {txt = \"...\"; loc = mk_loc dotdotdot_start dotdotdot_end}\n          typ\n      in\n      let kind = Parsetree.Ptype_record [dot_field] in\n      (None, Public, kind)\n    | _ ->\n      Parser.expect Comma p;\n      let loc = mk_loc start_pos p.prev_end_pos in\n      let dot_field =\n        Ast_helper.Type.field ~loc\n          {txt = \"...\"; loc = mk_loc dotdotdot_start dotdotdot_end}\n          typ\n      in\n      let found_object_field = ref false in\n      let fields =\n        parse_comma_delimited_region ~grammar:Grammar.RecordDecl ~closing:Rbrace\n          ~f:(parse_field_declaration_region ~found_object_field)\n          p\n      in\n      Parser.expect Rbrace p;\n      if !found_object_field then\n        let fields =\n          Ext_list.map fields (fun ld ->\n              match ld.pld_name.txt with\n              | \"...\" -> Parsetree.Oinherit ld.pld_type\n              | _ -> Otag (ld.pld_name, ld.pld_attributes, ld.pld_type))\n        in\n        let dot_field = Parsetree.Oinherit typ in\n        let typ_obj = Ast_helper.Typ.object_ (dot_field :: fields) Closed in\n        let typ_obj = parse_type_alias p typ_obj in\n        let typ_obj =\n          parse_arrow_type_rest ~es6_arrow:true ~start_pos typ_obj p\n        in\n        (Some typ_obj, Public, Ptype_abstract)\n      else\n        let kind = Parsetree.Ptype_record (dot_field :: fields) in\n        (None, Public, kind))\n  | _ -> (\n    let attrs = parse_attributes p in\n    match p.Parser.token with\n    | String _ ->\n      let closed_flag = Asttypes.Closed in\n      let fields =\n        match attrs with\n        | [] ->\n          parse_comma_delimited_region ~grammar:Grammar.StringFieldDeclarations\n            ~closing:Rbrace ~f:parse_string_field_declaration p\n        | attrs ->\n          let first =\n            Parser.leave_breadcrumb p Grammar.StringFieldDeclarations;\n            let field =\n              match parse_string_field_declaration p with\n              | Some field -> field\n              | None -> assert false\n            in\n            (* parse comma after first *)\n            let () =\n              match p.Parser.token with\n              | Rbrace | Eof -> ()\n              | Comma -> Parser.next p\n              | _ -> Parser.expect Comma p\n            in\n            Parser.eat_breadcrumb p;\n            match field with\n            | Parsetree.Otag (label, _, ct) -> Parsetree.Otag (label, attrs, ct)\n            | Oinherit ct -> Oinherit ct\n          in\n          first\n          :: parse_comma_delimited_region\n               ~grammar:Grammar.StringFieldDeclarations ~closing:Rbrace\n               ~f:parse_string_field_declaration p\n      in\n      Parser.expect Rbrace p;\n      let loc = mk_loc start_pos p.prev_end_pos in\n      let typ =\n        Ast_helper.Typ.object_ ~loc ~attrs:[] fields closed_flag\n        |> parse_type_alias p\n      in\n      let typ = parse_arrow_type_rest ~es6_arrow:true ~start_pos typ p in\n      (Some typ, Asttypes.Public, Parsetree.Ptype_abstract)\n    | _ ->\n      Parser.leave_breadcrumb p Grammar.RecordDecl;\n      let fields =\n        (* XXX *)\n        match attrs with\n        | [] ->\n          parse_comma_delimited_region ~grammar:Grammar.FieldDeclarations\n            ~closing:Rbrace ~f:parse_field_declaration_region p\n        | attr :: _ as attrs ->\n          let first =\n            let optional, field = parse_field_declaration p in\n            let attrs = if optional then optional_attr :: attrs else attrs in\n            Parser.optional p Comma |> ignore;\n            {\n              field with\n              Parsetree.pld_attributes = attrs;\n              pld_loc =\n                {\n                  field.Parsetree.pld_loc with\n                  loc_start = (attr |> fst).loc.loc_start;\n                };\n            }\n          in\n          first\n          :: parse_comma_delimited_region ~grammar:Grammar.FieldDeclarations\n               ~closing:Rbrace ~f:parse_field_declaration_region p\n      in\n      Parser.expect Rbrace p;\n      Parser.eat_breadcrumb p;\n      (None, Asttypes.Public, Parsetree.Ptype_record fields))\n\nand parse_private_eq_or_repr p =\n  Parser.expect Private p;\n  match p.Parser.token with\n  | Lbrace ->\n    let manifest, _, kind = parse_record_or_object_decl p in\n    (manifest, Asttypes.Private, kind)\n  | Uident _ ->\n    let manifest, _, kind = parse_type_equation_or_constr_decl p in\n    (manifest, Asttypes.Private, kind)\n  | Bar | DotDot ->\n    let _, kind = parse_type_representation p in\n    (None, Asttypes.Private, kind)\n  | t when Grammar.is_typ_expr_start t ->\n    (Some (parse_typ_expr p), Asttypes.Private, Parsetree.Ptype_abstract)\n  | _ ->\n    let _, kind = parse_type_representation p in\n    (None, Asttypes.Private, kind)\n\n(*\n  polymorphic-variant-type\t::=\n                            | [ tag-spec-first  { | tag-spec } ]\n                            | [> [ tag-spec ]  { | tag-spec } ]\n                            | [< [|] tag-spec-full  { | tag-spec-full }  [ > { `tag-name }+ ] ]\n\n            tag-spec-first\t::=\t`tag-name  [ of typexpr ]\n                            |\t[ typexpr ] |  tag-spec\n\n                  tag-spec\t::=\t`tag-name  [ of typexpr ]\n                            |\ttypexpr\n\n              tag-spec-full\t::=\t`tag-name  [ of [&] typexpr  { & typexpr } ]\n                             |\ttypexpr\n*)\nand parse_polymorphic_variant_type ~attrs p =\n  let start_pos = p.Parser.start_pos in\n  Parser.expect Lbracket p;\n  match p.token with\n  | GreaterThan ->\n    Parser.next p;\n    let row_fields =\n      match p.token with\n      | Rbracket -> []\n      | Bar -> parse_tag_specs p\n      | _ ->\n        let row_field = parse_tag_spec p in\n        row_field :: parse_tag_specs p\n    in\n    let variant =\n      let loc = mk_loc start_pos p.prev_end_pos in\n      Ast_helper.Typ.variant ~attrs ~loc row_fields Open None\n    in\n    Parser.expect Rbracket p;\n    variant\n  | LessThan ->\n    Parser.next p;\n    Parser.optional p Bar |> ignore;\n    let row_field = parse_tag_spec_full p in\n    let row_fields = parse_tag_spec_fulls p in\n    let tag_names = parse_tag_names p in\n    let variant =\n      let loc = mk_loc start_pos p.prev_end_pos in\n      Ast_helper.Typ.variant ~attrs ~loc (row_field :: row_fields) Closed\n        (Some tag_names)\n    in\n    Parser.expect Rbracket p;\n    variant\n  | _ ->\n    let row_fields1 = parse_tag_spec_first p in\n    let row_fields2 = parse_tag_specs p in\n    let variant =\n      let loc = mk_loc start_pos p.prev_end_pos in\n      Ast_helper.Typ.variant ~attrs ~loc (row_fields1 @ row_fields2) Closed None\n    in\n    Parser.expect Rbracket p;\n    variant\n\nand parse_tag_name p =\n  match p.Parser.token with\n  | Hash ->\n    let ident, _loc = parse_hash_ident ~start_pos:p.start_pos p in\n    Some ident\n  | _ -> None\n\nand parse_tag_names p =\n  if p.Parser.token == GreaterThan then (\n    Parser.next p;\n    parse_region p ~grammar:Grammar.TagNames ~f:parse_tag_name)\n  else []\n\nand parse_tag_spec_fulls p =\n  match p.Parser.token with\n  | Rbracket -> []\n  | GreaterThan -> []\n  | Bar ->\n    Parser.next p;\n    let row_field = parse_tag_spec_full p in\n    row_field :: parse_tag_spec_fulls p\n  | _ -> []\n\nand parse_tag_spec_full p =\n  let attrs = parse_attributes p in\n  match p.Parser.token with\n  | Hash -> parse_polymorphic_variant_type_spec_hash ~attrs ~full:true p\n  | _ ->\n    let typ = parse_typ_expr ~attrs p in\n    Parsetree.Rinherit typ\n\nand parse_tag_specs p =\n  match p.Parser.token with\n  | Bar ->\n    Parser.next p;\n    let row_field = parse_tag_spec p in\n    row_field :: parse_tag_specs p\n  | _ -> []\n\nand parse_tag_spec p =\n  let attrs = parse_attributes p in\n  match p.Parser.token with\n  | Hash -> parse_polymorphic_variant_type_spec_hash ~attrs ~full:false p\n  | _ ->\n    let typ = parse_typ_expr ~attrs p in\n    Parsetree.Rinherit typ\n\nand parse_tag_spec_first p =\n  let attrs = parse_attributes p in\n  match p.Parser.token with\n  | Bar ->\n    Parser.next p;\n    [parse_tag_spec p]\n  | Hash -> [parse_polymorphic_variant_type_spec_hash ~attrs ~full:false p]\n  | _ -> (\n    let typ = parse_typ_expr ~attrs p in\n    match p.token with\n    | Rbracket ->\n      (* example: [ListStyleType.t] *)\n      [Parsetree.Rinherit typ]\n    | _ ->\n      Parser.expect Bar p;\n      [Parsetree.Rinherit typ; parse_tag_spec p])\n\nand parse_polymorphic_variant_type_spec_hash ~attrs ~full p :\n    Parsetree.row_field =\n  let start_pos = p.Parser.start_pos in\n  let ident, loc = parse_hash_ident ~start_pos p in\n  let rec loop p =\n    match p.Parser.token with\n    | Band when full ->\n      Parser.next p;\n      let row_field = parse_polymorphic_variant_type_args p in\n      row_field :: loop p\n    | _ -> []\n  in\n  let first_tuple, tag_contains_a_constant_empty_constructor =\n    match p.Parser.token with\n    | Band when full ->\n      Parser.next p;\n      ([parse_polymorphic_variant_type_args p], true)\n    | Lparen -> ([parse_polymorphic_variant_type_args p], false)\n    | _ -> ([], true)\n  in\n  let tuples = first_tuple @ loop p in\n  Parsetree.Rtag\n    ( Location.mkloc ident loc,\n      attrs,\n      tag_contains_a_constant_empty_constructor,\n      tuples )\n\nand parse_polymorphic_variant_type_args p =\n  let start_pos = p.Parser.start_pos in\n  Parser.expect Lparen p;\n  let args =\n    parse_comma_delimited_region ~grammar:Grammar.TypExprList ~closing:Rparen\n      ~f:parse_typ_expr_region p\n  in\n  Parser.expect Rparen p;\n  let attrs = [] in\n  let loc = mk_loc start_pos p.prev_end_pos in\n  match args with\n  | [({ptyp_desc = Ptyp_tuple _} as typ)] as types ->\n    if p.mode = ParseForTypeChecker then typ\n    else Ast_helper.Typ.tuple ~loc ~attrs types\n  | [typ] -> typ\n  | types -> Ast_helper.Typ.tuple ~loc ~attrs types\n\nand parse_type_equation_and_representation p =\n  match p.Parser.token with\n  | (Equal | Bar) as token -> (\n    if token = Bar then Parser.expect Equal p;\n    Parser.next p;\n    match p.Parser.token with\n    | Uident _ -> parse_type_equation_or_constr_decl p\n    | Lbrace -> parse_record_or_object_decl p\n    | Private -> parse_private_eq_or_repr p\n    | Bar | DotDot ->\n      let priv, kind = parse_type_representation p in\n      (None, priv, kind)\n    | _ -> (\n      let manifest = Some (parse_typ_expr p) in\n      match p.Parser.token with\n      | Equal ->\n        Parser.next p;\n        let priv, kind = parse_type_representation p in\n        (manifest, priv, kind)\n      | _ -> (manifest, Public, Parsetree.Ptype_abstract)))\n  | _ -> (None, Public, Parsetree.Ptype_abstract)\n\n(* type-definition\t::=\ttype [rec] typedef  { and typedef }\n * typedef\t::=\ttypeconstr-name [type-params] type-information\n * type-information\t::=\t[type-equation]  [type-representation]  { type-constraint }\n * type-equation\t::=\t= typexpr *)\nand parse_type_def ~attrs ~start_pos p =\n  Parser.leave_breadcrumb p Grammar.TypeDef;\n  (* let attrs = match attrs with | Some attrs -> attrs | None -> parseAttributes p in *)\n  Parser.leave_breadcrumb p Grammar.TypeConstrName;\n  let name, loc = parse_lident p in\n  let type_constr_name = Location.mkloc name loc in\n  Parser.eat_breadcrumb p;\n  let params =\n    let constr_name = Location.mkloc (Longident.Lident name) loc in\n    parse_type_params ~parent:constr_name p\n  in\n  let type_def =\n    let manifest, priv, kind = parse_type_equation_and_representation p in\n    let cstrs = parse_type_constraints p in\n    let loc = mk_loc start_pos p.prev_end_pos in\n    Ast_helper.Type.mk ~loc ~attrs ~priv ~kind ~params ~cstrs ?manifest\n      type_constr_name\n  in\n  Parser.eat_breadcrumb p;\n  type_def\n\nand parse_type_extension ~params ~attrs ~name p =\n  Parser.expect PlusEqual p;\n  let priv =\n    if Parser.optional p Token.Private then Asttypes.Private\n    else Asttypes.Public\n  in\n  let constr_start = p.Parser.start_pos in\n  Parser.optional p Bar |> ignore;\n  let first =\n    let attrs, name, kind =\n      match p.Parser.token with\n      | Bar ->\n        Parser.next p;\n        parse_constr_def ~parse_attrs:true p\n      | _ -> parse_constr_def ~parse_attrs:true p\n    in\n    let loc = mk_loc constr_start p.prev_end_pos in\n    Ast_helper.Te.constructor ~loc ~attrs name kind\n  in\n  let rec loop p cs =\n    match p.Parser.token with\n    | Bar ->\n      let start_pos = p.Parser.start_pos in\n      Parser.next p;\n      let attrs, name, kind = parse_constr_def ~parse_attrs:true p in\n      let ext_constr =\n        Ast_helper.Te.constructor ~attrs\n          ~loc:(mk_loc start_pos p.prev_end_pos)\n          name kind\n      in\n      loop p (ext_constr :: cs)\n    | _ -> List.rev cs\n  in\n  let constructors = loop p [first] in\n  Ast_helper.Te.mk ~attrs ~params ~priv name constructors\n\nand parse_type_definitions ~attrs ~name ~params ~start_pos p =\n  let type_def =\n    let manifest, priv, kind = parse_type_equation_and_representation p in\n    let cstrs = parse_type_constraints p in\n    let loc = mk_loc start_pos p.prev_end_pos in\n    Ast_helper.Type.mk ~loc ~attrs ~priv ~kind ~params ~cstrs ?manifest\n      {name with txt = lident_of_path name.Location.txt}\n  in\n  let rec loop p defs =\n    let start_pos = p.Parser.start_pos in\n    let attrs = parse_attributes_and_binding p in\n    match p.Parser.token with\n    | And ->\n      Parser.next p;\n      let type_def = parse_type_def ~attrs ~start_pos p in\n      loop p (type_def :: defs)\n    | _ -> List.rev defs\n  in\n  loop p [type_def]\n\n(* TODO: decide if we really want type extensions (eg. type x += Blue)\n * It adds quite a bit of complexity that can be avoided,\n * implemented for now. Needed to get a feel for the complexities of\n * this territory of the grammar *)\nand parse_type_definition_or_extension ~attrs p =\n  let start_pos = p.Parser.start_pos in\n  Parser.expect Token.Typ p;\n  let rec_flag =\n    match p.token with\n    | Rec ->\n      Parser.next p;\n      Asttypes.Recursive\n    | Lident \"nonrec\" ->\n      Parser.next p;\n      Asttypes.Nonrecursive\n    | _ -> Asttypes.Nonrecursive\n  in\n  let name = parse_value_path p in\n  let params = parse_type_params ~parent:name p in\n  match p.Parser.token with\n  | PlusEqual -> TypeExt (parse_type_extension ~params ~attrs ~name p)\n  | _ ->\n    (* shape of type name should be Lident, i.e. `t` is accepted. `User.t` not *)\n    let () =\n      match name.Location.txt with\n      | Lident _ -> ()\n      | longident ->\n        Parser.err ~start_pos:name.loc.loc_start ~end_pos:name.loc.loc_end p\n          (longident |> ErrorMessages.type_declaration_name_longident\n         |> Diagnostics.message)\n    in\n    let type_defs = parse_type_definitions ~attrs ~name ~params ~start_pos p in\n    TypeDef {rec_flag; types = type_defs}\n\n(* external value-name : typexp = external-declaration *)\nand parse_external_def ~attrs ~start_pos p =\n  let in_external = !InExternal.status in\n  InExternal.status := true;\n  Parser.leave_breadcrumb p Grammar.External;\n  Parser.expect Token.External p;\n  let name, loc = parse_lident p in\n  let name = Location.mkloc name loc in\n  Parser.expect ~grammar:Grammar.TypeExpression Colon p;\n  let typ_expr = parse_typ_expr p in\n  let equal_start = p.start_pos in\n  let equal_end = p.end_pos in\n  Parser.expect Equal p;\n  let prim =\n    match p.token with\n    | String s ->\n      Parser.next p;\n      [s]\n    | _ ->\n      Parser.err ~start_pos:equal_start ~end_pos:equal_end p\n        (Diagnostics.message\n           (\"An external requires the name of the JS value you're referring \\\n             to, like \\\"\" ^ name.txt ^ \"\\\".\"));\n      []\n  in\n  let loc = mk_loc start_pos p.prev_end_pos in\n  let vb = Ast_helper.Val.mk ~loc ~attrs ~prim name typ_expr in\n  Parser.eat_breadcrumb p;\n  InExternal.status := in_external;\n  vb\n\n(* constr-def ::=\n *  | constr-decl\n *  | constr-name = constr\n *\n *  constr-decl ::= constr-name constr-args\n *  constr-name ::= uident\n *  constr      ::= path-uident *)\nand parse_constr_def ~parse_attrs p =\n  let attrs = if parse_attrs then parse_attributes p else [] in\n  let name =\n    match p.Parser.token with\n    | Uident name ->\n      let loc = mk_loc p.start_pos p.end_pos in\n      Parser.next p;\n      Location.mkloc name loc\n    | t ->\n      Parser.err p (Diagnostics.uident t);\n      Location.mknoloc \"_\"\n  in\n  let kind =\n    match p.Parser.token with\n    | Lparen ->\n      let args, res = parse_constr_decl_args p in\n      Parsetree.Pext_decl (args, res)\n    | Equal ->\n      Parser.next p;\n      let longident = parse_module_long_ident ~lowercase:false p in\n      Parsetree.Pext_rebind longident\n    | Colon ->\n      Parser.next p;\n      let typ = parse_typ_expr p in\n      Parsetree.Pext_decl (Pcstr_tuple [], Some typ)\n    | _ -> Parsetree.Pext_decl (Pcstr_tuple [], None)\n  in\n  (attrs, name, kind)\n\n(*\n * exception-definition\t::=\n *  | exception constr-decl\n *  ∣\texception constr-name = constr\n *\n *  constr-name ::= uident\n *  constr ::= long_uident *)\nand parse_exception_def ~attrs p =\n  let start_pos = p.Parser.start_pos in\n  Parser.expect Token.Exception p;\n  let _, name, kind = parse_constr_def ~parse_attrs:false p in\n  let loc = mk_loc start_pos p.prev_end_pos in\n  Ast_helper.Te.constructor ~loc ~attrs name kind\n\nand parse_newline_or_semicolon_structure p =\n  match p.Parser.token with\n  | Semicolon -> Parser.next p\n  | token when Grammar.is_structure_item_start token ->\n    if p.prev_end_pos.pos_lnum < p.start_pos.pos_lnum then ()\n    else\n      Parser.err ~start_pos:p.prev_end_pos ~end_pos:p.end_pos p\n        (Diagnostics.message\n           \"consecutive statements on a line must be separated by ';' or a \\\n            newline\")\n  | _ -> ()\n\nand parse_structure_item_region p =\n  let start_pos = p.Parser.start_pos in\n  let attrs = parse_attributes p in\n  match p.Parser.token with\n  | Open ->\n    let open_description = parse_open_description ~attrs p in\n    parse_newline_or_semicolon_structure p;\n    let loc = mk_loc start_pos p.prev_end_pos in\n    Some (Ast_helper.Str.open_ ~loc open_description)\n  | Let ->\n    let rec_flag, let_bindings = parse_let_bindings ~attrs ~start_pos p in\n    parse_newline_or_semicolon_structure p;\n    let loc = mk_loc start_pos p.prev_end_pos in\n    Some (Ast_helper.Str.value ~loc rec_flag let_bindings)\n  | Typ -> (\n    Parser.begin_region p;\n    match parse_type_definition_or_extension ~attrs p with\n    | TypeDef {rec_flag; types} ->\n      parse_newline_or_semicolon_structure p;\n      let loc = mk_loc start_pos p.prev_end_pos in\n      Parser.end_region p;\n      Some (Ast_helper.Str.type_ ~loc rec_flag types)\n    | TypeExt ext ->\n      parse_newline_or_semicolon_structure p;\n      let loc = mk_loc start_pos p.prev_end_pos in\n      Parser.end_region p;\n      Some (Ast_helper.Str.type_extension ~loc ext))\n  | External ->\n    let external_def = parse_external_def ~attrs ~start_pos p in\n    parse_newline_or_semicolon_structure p;\n    let loc = mk_loc start_pos p.prev_end_pos in\n    Some (Ast_helper.Str.primitive ~loc external_def)\n  | Exception ->\n    let exception_def = parse_exception_def ~attrs p in\n    parse_newline_or_semicolon_structure p;\n    let loc = mk_loc start_pos p.prev_end_pos in\n    Some (Ast_helper.Str.exception_ ~loc exception_def)\n  | Include ->\n    let include_statement = parse_include_statement ~attrs p in\n    parse_newline_or_semicolon_structure p;\n    let loc = mk_loc start_pos p.prev_end_pos in\n    Some (Ast_helper.Str.include_ ~loc include_statement)\n  | Module ->\n    Parser.begin_region p;\n    let structure_item =\n      parse_module_or_module_type_impl_or_pack_expr ~attrs p\n    in\n    parse_newline_or_semicolon_structure p;\n    let loc = mk_loc start_pos p.prev_end_pos in\n    Parser.end_region p;\n    Some {structure_item with pstr_loc = loc}\n  | ModuleComment (loc, s) ->\n    Parser.next p;\n    Some\n      (Ast_helper.Str.attribute ~loc\n         ( {txt = \"res.doc\"; loc},\n           PStr\n             [\n               Ast_helper.Str.eval ~loc\n                 (Ast_helper.Exp.constant ~loc (Pconst_string (s, None)));\n             ] ))\n  | AtAt ->\n    let attr = parse_standalone_attribute p in\n    parse_newline_or_semicolon_structure p;\n    let loc = mk_loc start_pos p.prev_end_pos in\n    Some (Ast_helper.Str.attribute ~loc attr)\n  | PercentPercent ->\n    let extension = parse_extension ~module_language:true p in\n    parse_newline_or_semicolon_structure p;\n    let loc = mk_loc start_pos p.prev_end_pos in\n    Some (Ast_helper.Str.extension ~attrs ~loc extension)\n  | token when Grammar.is_expr_start token ->\n    let prev_end_pos = p.Parser.end_pos in\n    let exp = parse_expr p in\n    parse_newline_or_semicolon_structure p;\n    let loc = mk_loc start_pos p.prev_end_pos in\n    Parser.check_progress ~prev_end_pos\n      ~result:(Ast_helper.Str.eval ~loc ~attrs exp)\n      p\n  | _ -> (\n    match attrs with\n    | (({Asttypes.loc = attr_loc}, _) as attr) :: _ ->\n      Parser.err ~start_pos:attr_loc.loc_start ~end_pos:attr_loc.loc_end p\n        (Diagnostics.message (ErrorMessages.attribute_without_node attr));\n      let expr = parse_expr p in\n      Some\n        (Ast_helper.Str.eval\n           ~loc:(mk_loc p.start_pos p.prev_end_pos)\n           ~attrs expr)\n    | _ -> None)\n[@@progress Parser.next, Parser.expect, LoopProgress.list_rest]\n\n(* include-statement ::= include module-expr *)\nand parse_include_statement ~attrs p =\n  let start_pos = p.Parser.start_pos in\n  Parser.expect Token.Include p;\n  let mod_expr = parse_module_expr p in\n  let loc = mk_loc start_pos p.prev_end_pos in\n  Ast_helper.Incl.mk ~loc ~attrs mod_expr\n\nand parse_atomic_module_expr p =\n  let start_pos = p.Parser.start_pos in\n  match p.Parser.token with\n  | Uident _ident ->\n    let longident = parse_module_long_ident ~lowercase:false p in\n    Ast_helper.Mod.ident ~loc:longident.loc longident\n  | Lbrace ->\n    Parser.next p;\n    let structure =\n      Ast_helper.Mod.structure\n        (parse_delimited_region ~grammar:Grammar.Structure ~closing:Rbrace\n           ~f:parse_structure_item_region p)\n    in\n    Parser.expect Rbrace p;\n    let end_pos = p.prev_end_pos in\n    {structure with pmod_loc = mk_loc start_pos end_pos}\n  | Lparen ->\n    Parser.next p;\n    let mod_expr =\n      match p.token with\n      | Rparen ->\n        Ast_helper.Mod.structure ~loc:(mk_loc start_pos p.prev_end_pos) []\n      | _ -> parse_constrained_mod_expr p\n    in\n    Parser.expect Rparen p;\n    mod_expr\n  | Lident \"unpack\" -> (\n    (* TODO: should this be made a keyword?? *)\n    Parser.next p;\n    Parser.expect Lparen p;\n    let expr = parse_expr p in\n    match p.Parser.token with\n    | Colon ->\n      let colon_start = p.Parser.start_pos in\n      Parser.next p;\n      let attrs = parse_attributes p in\n      let package_type = parse_package_type ~start_pos:colon_start ~attrs p in\n      Parser.expect Rparen p;\n      let loc = mk_loc start_pos p.prev_end_pos in\n      let constraint_expr = Ast_helper.Exp.constraint_ ~loc expr package_type in\n      Ast_helper.Mod.unpack ~loc constraint_expr\n    | _ ->\n      Parser.expect Rparen p;\n      let loc = mk_loc start_pos p.prev_end_pos in\n      Ast_helper.Mod.unpack ~loc expr)\n  | Percent ->\n    let extension = parse_extension p in\n    let loc = mk_loc start_pos p.prev_end_pos in\n    Ast_helper.Mod.extension ~loc extension\n  | token ->\n    Parser.err p (Diagnostics.unexpected token p.breadcrumbs);\n    Recover.default_module_expr ()\n\nand parse_primary_mod_expr p =\n  let start_pos = p.Parser.start_pos in\n  let mod_expr = parse_atomic_module_expr p in\n  let rec loop p mod_expr =\n    match p.Parser.token with\n    | Lparen when p.prev_end_pos.pos_lnum == p.start_pos.pos_lnum ->\n      loop p (parse_module_application p mod_expr)\n    | _ -> mod_expr\n  in\n  let mod_expr = loop p mod_expr in\n  {mod_expr with pmod_loc = mk_loc start_pos p.prev_end_pos}\n\n(*\n * functor-arg ::=\n *  | uident : modtype\n *  | _ : modtype\n *  | modtype           --> \"punning\" for _ : modtype\n *  | attributes functor-arg\n *)\nand parse_functor_arg p =\n  let start_pos = p.Parser.start_pos in\n  let attrs = parse_attributes p in\n  match p.Parser.token with\n  | Uident ident -> (\n    Parser.next p;\n    let uident_end_pos = p.prev_end_pos in\n    match p.Parser.token with\n    | Colon ->\n      Parser.next p;\n      let module_type = parse_module_type p in\n      let loc = mk_loc start_pos uident_end_pos in\n      let arg_name = Location.mkloc ident loc in\n      Some (attrs, arg_name, Some module_type, start_pos)\n    | Dot ->\n      Parser.next p;\n      let module_type =\n        let module_long_ident =\n          parse_module_long_ident_tail ~lowercase:false p start_pos\n            (Longident.Lident ident)\n        in\n        Ast_helper.Mty.ident ~loc:module_long_ident.loc module_long_ident\n      in\n      let arg_name = Location.mknoloc \"_\" in\n      Some (attrs, arg_name, Some module_type, start_pos)\n    | _ ->\n      let loc = mk_loc start_pos uident_end_pos in\n      let mod_ident = Location.mkloc (Longident.Lident ident) loc in\n      let module_type = Ast_helper.Mty.ident ~loc mod_ident in\n      let arg_name = Location.mknoloc \"_\" in\n      Some (attrs, arg_name, Some module_type, start_pos))\n  | Underscore ->\n    Parser.next p;\n    let arg_name = Location.mkloc \"_\" (mk_loc start_pos p.prev_end_pos) in\n    Parser.expect Colon p;\n    let module_type = parse_module_type p in\n    Some (attrs, arg_name, Some module_type, start_pos)\n  | Lparen ->\n    Parser.next p;\n    Parser.expect Rparen p;\n    let arg_name = Location.mkloc \"*\" (mk_loc start_pos p.prev_end_pos) in\n    Some (attrs, arg_name, None, start_pos)\n  | _ -> None\n\nand parse_functor_args p =\n  let start_pos = p.Parser.start_pos in\n  Parser.expect Lparen p;\n  let args =\n    parse_comma_delimited_region ~grammar:Grammar.FunctorArgs ~closing:Rparen\n      ~f:parse_functor_arg p\n  in\n  Parser.expect Rparen p;\n  match args with\n  | [] ->\n    [\n      ([], Location.mkloc \"*\" (mk_loc start_pos p.prev_end_pos), None, start_pos);\n    ]\n  | args -> args\n\nand parse_functor_module_expr p =\n  let start_pos = p.Parser.start_pos in\n  let args = parse_functor_args p in\n  let return_type =\n    match p.Parser.token with\n    | Colon ->\n      Parser.next p;\n      Some (parse_module_type ~es6_arrow:false p)\n    | _ -> None\n  in\n  Parser.expect EqualGreater p;\n  let rhs_module_expr =\n    let mod_expr = parse_module_expr p in\n    match return_type with\n    | Some mod_type ->\n      Ast_helper.Mod.constraint_\n        ~loc:\n          (mk_loc mod_expr.pmod_loc.loc_start\n             mod_type.Parsetree.pmty_loc.loc_end)\n        mod_expr mod_type\n    | None -> mod_expr\n  in\n  let end_pos = p.prev_end_pos in\n  let mod_expr =\n    List.fold_right\n      (fun (attrs, name, module_type, start_pos) acc ->\n        Ast_helper.Mod.functor_ ~loc:(mk_loc start_pos end_pos) ~attrs name\n          module_type acc)\n      args rhs_module_expr\n  in\n  {mod_expr with pmod_loc = mk_loc start_pos end_pos}\n\n(* module-expr\t::=\n *  | module-path\n *  ∣\t{ structure-items }\n *  ∣\tfunctorArgs =>  module-expr\n *  ∣\tmodule-expr(module-expr)\n *  ∣\t( module-expr )\n *  ∣\t( module-expr : module-type )\n *  | extension\n *  | attributes module-expr *)\nand parse_module_expr p =\n  let has_await, loc_await =\n    let start_pos = p.start_pos in\n    match p.Parser.token with\n    | Await ->\n      Parser.expect Await p;\n      let end_pos = p.end_pos in\n      (true, mk_loc start_pos end_pos)\n    | _ -> (false, mk_loc start_pos start_pos)\n  in\n  let attrs = parse_attributes p in\n  let attrs =\n    if has_await then\n      (({txt = \"res.await\"; loc = loc_await}, PStr []) : Parsetree.attribute)\n      :: attrs\n    else attrs\n  in\n  let mod_expr =\n    if is_es6_arrow_functor p then parse_functor_module_expr p\n    else parse_primary_mod_expr p\n  in\n  {\n    mod_expr with\n    pmod_attributes = List.concat [mod_expr.pmod_attributes; attrs];\n  }\n\nand parse_constrained_mod_expr p =\n  let mod_expr = parse_module_expr p in\n  match p.Parser.token with\n  | Colon ->\n    Parser.next p;\n    let mod_type = parse_module_type p in\n    let loc = mk_loc mod_expr.pmod_loc.loc_start mod_type.pmty_loc.loc_end in\n    Ast_helper.Mod.constraint_ ~loc mod_expr mod_type\n  | _ -> mod_expr\n\nand parse_constrained_mod_expr_region p =\n  if Grammar.is_mod_expr_start p.Parser.token then\n    Some (parse_constrained_mod_expr p)\n  else None\n\nand parse_module_application p mod_expr =\n  let start_pos = p.Parser.start_pos in\n  Parser.expect Lparen p;\n  let args =\n    parse_comma_delimited_region ~grammar:Grammar.ModExprList ~closing:Rparen\n      ~f:parse_constrained_mod_expr_region p\n  in\n  Parser.expect Rparen p;\n  let args =\n    match args with\n    | [] ->\n      let loc = mk_loc start_pos p.prev_end_pos in\n      [Ast_helper.Mod.structure ~loc []]\n    | args -> args\n  in\n  List.fold_left\n    (fun mod_expr arg ->\n      Ast_helper.Mod.apply\n        ~loc:\n          (mk_loc mod_expr.Parsetree.pmod_loc.loc_start\n             arg.Parsetree.pmod_loc.loc_end)\n        mod_expr arg)\n    mod_expr args\n\nand parse_module_or_module_type_impl_or_pack_expr ~attrs p =\n  let start_pos = p.Parser.start_pos in\n  Parser.expect Module p;\n  match p.Parser.token with\n  | Typ -> parse_module_type_impl ~attrs start_pos p\n  | Lparen ->\n    let expr = parse_first_class_module_expr ~start_pos p in\n    let a = parse_primary_expr ~operand:expr p in\n    let expr = parse_binary_expr ~a p 1 in\n    let expr = parse_ternary_expr expr p in\n    Ast_helper.Str.eval ~attrs expr\n  | _ -> parse_maybe_rec_module_binding ~attrs ~start_pos p\n\nand parse_module_type_impl ~attrs start_pos p =\n  Parser.expect Typ p;\n  let name_start = p.Parser.start_pos in\n  let name =\n    match p.Parser.token with\n    | Lident ident ->\n      Parser.next p;\n      let loc = mk_loc name_start p.prev_end_pos in\n      Location.mkloc ident loc\n    | Uident ident ->\n      Parser.next p;\n      let loc = mk_loc name_start p.prev_end_pos in\n      Location.mkloc ident loc\n    | t ->\n      Parser.err p (Diagnostics.uident t);\n      Location.mknoloc \"_\"\n  in\n  Parser.expect Equal p;\n  let module_type = parse_module_type p in\n  let module_type_declaration =\n    Ast_helper.Mtd.mk ~attrs\n      ~loc:(mk_loc name_start p.prev_end_pos)\n      ~typ:module_type name\n  in\n  let loc = mk_loc start_pos p.prev_end_pos in\n  Ast_helper.Str.modtype ~loc module_type_declaration\n\n(* definition\t::=\n   ∣\t module rec module-name :  module-type =  module-expr   { and module-name\n   :  module-type =  module-expr } *)\nand parse_maybe_rec_module_binding ~attrs ~start_pos p =\n  match p.Parser.token with\n  | Token.Rec ->\n    Parser.next p;\n    Ast_helper.Str.rec_module (parse_module_bindings ~start_pos ~attrs p)\n  | _ ->\n    Ast_helper.Str.module_\n      (parse_module_binding ~attrs ~start_pos:p.Parser.start_pos p)\n\nand parse_module_binding ~attrs ~start_pos p =\n  let name =\n    match p.Parser.token with\n    | Uident ident ->\n      let start_pos = p.Parser.start_pos in\n      Parser.next p;\n      let loc = mk_loc start_pos p.prev_end_pos in\n      Location.mkloc ident loc\n    | t ->\n      Parser.err p (Diagnostics.uident t);\n      Location.mknoloc \"_\"\n  in\n  let body = parse_module_binding_body p in\n  let loc = mk_loc start_pos p.prev_end_pos in\n  Ast_helper.Mb.mk ~attrs ~loc name body\n\nand parse_module_binding_body p =\n  (* TODO: make required with good error message when rec module binding *)\n  let return_mod_type =\n    match p.Parser.token with\n    | Colon ->\n      Parser.next p;\n      Some (parse_module_type p)\n    | _ -> None\n  in\n  Parser.expect Equal p;\n  let mod_expr = parse_module_expr p in\n  match return_mod_type with\n  | Some mod_type ->\n    Ast_helper.Mod.constraint_\n      ~loc:(mk_loc mod_type.pmty_loc.loc_start mod_expr.pmod_loc.loc_end)\n      mod_expr mod_type\n  | None -> mod_expr\n\n(* module-name :  module-type =  module-expr\n * { and module-name :  module-type =  module-expr } *)\nand parse_module_bindings ~attrs ~start_pos p =\n  let rec loop p acc =\n    let start_pos = p.Parser.start_pos in\n    let doc_attr : Parsetree.attributes =\n      match p.Parser.token with\n      | DocComment (loc, s) ->\n        Parser.next p;\n        [doc_comment_to_attribute loc s]\n      | _ -> []\n    in\n    let attrs = doc_attr @ parse_attributes_and_binding p in\n    match p.Parser.token with\n    | And ->\n      Parser.next p;\n      ignore (Parser.optional p Module);\n      (* over-parse for fault-tolerance *)\n      let mod_binding = parse_module_binding ~attrs ~start_pos p in\n      loop p (mod_binding :: acc)\n    | _ -> List.rev acc\n  in\n  let first = parse_module_binding ~attrs ~start_pos p in\n  loop p [first]\n\nand parse_atomic_module_type p =\n  let start_pos = p.Parser.start_pos in\n  let module_type =\n    match p.Parser.token with\n    | Uident _ | Lident _ ->\n      (* Ocaml allows module types to end with lowercase: module Foo : bar = { ... }\n       * lets go with uppercase terminal for now *)\n      let module_long_ident = parse_module_long_ident ~lowercase:true p in\n      Ast_helper.Mty.ident ~loc:module_long_ident.loc module_long_ident\n    | Lparen ->\n      Parser.next p;\n      let mty = parse_module_type p in\n      Parser.expect Rparen p;\n      {mty with pmty_loc = mk_loc start_pos p.prev_end_pos}\n    | Lbrace ->\n      Parser.next p;\n      let spec =\n        parse_delimited_region ~grammar:Grammar.Signature ~closing:Rbrace\n          ~f:parse_signature_item_region p\n      in\n      Parser.expect Rbrace p;\n      let loc = mk_loc start_pos p.prev_end_pos in\n      Ast_helper.Mty.signature ~loc spec\n    | Module ->\n      (* TODO: check if this is still atomic when implementing first class modules*)\n      parse_module_type_of p\n    | Percent ->\n      let extension = parse_extension p in\n      let loc = mk_loc start_pos p.prev_end_pos in\n      Ast_helper.Mty.extension ~loc extension\n    | token ->\n      Parser.err p (Diagnostics.unexpected token p.breadcrumbs);\n      Recover.default_module_type ()\n  in\n  let module_type_loc = mk_loc start_pos p.prev_end_pos in\n  {module_type with pmty_loc = module_type_loc}\n\nand parse_functor_module_type p =\n  let start_pos = p.Parser.start_pos in\n  let args = parse_functor_args p in\n  Parser.expect EqualGreater p;\n  let rhs = parse_module_type p in\n  let end_pos = p.prev_end_pos in\n  let mod_type =\n    List.fold_right\n      (fun (attrs, name, module_type, start_pos) acc ->\n        Ast_helper.Mty.functor_ ~loc:(mk_loc start_pos end_pos) ~attrs name\n          module_type acc)\n      args rhs\n  in\n  {mod_type with pmty_loc = mk_loc start_pos end_pos}\n\n(* Module types are the module-level equivalent of type expressions: they\n * specify the general shape and type properties of modules.\n *\n * module-type ::=\n *  | modtype-path\n *  | { signature }\n *  | ( module-type )               --> parenthesized module-type\n *  | functor-args => module-type   --> functor\n *  | module-type => module-type    --> functor\n *  | module type of module-expr\n *  | attributes module-type\n *  | module-type with-mod-constraints\n *  | extension\n *)\nand parse_module_type ?(es6_arrow = true) ?(with_ = true) p =\n  let attrs = parse_attributes p in\n  let modty =\n    if es6_arrow && is_es6_arrow_functor p then parse_functor_module_type p\n    else\n      let modty = parse_atomic_module_type p in\n      match p.Parser.token with\n      | EqualGreater when es6_arrow == true ->\n        Parser.next p;\n        let rhs = parse_module_type ~with_:false p in\n        let str = Location.mknoloc \"_\" in\n        let loc = mk_loc modty.pmty_loc.loc_start p.prev_end_pos in\n        Ast_helper.Mty.functor_ ~loc str (Some modty) rhs\n      | _ -> modty\n  in\n  let module_type =\n    {modty with pmty_attributes = List.concat [modty.pmty_attributes; attrs]}\n  in\n  if with_ then parse_with_constraints module_type p else module_type\n\nand parse_with_constraints module_type p =\n  match p.Parser.token with\n  | Lident \"with\" ->\n    Parser.next p;\n    let first = parse_with_constraint p in\n    let rec loop p acc =\n      match p.Parser.token with\n      | And ->\n        Parser.next p;\n        loop p (parse_with_constraint p :: acc)\n      | _ -> List.rev acc\n    in\n    let constraints = loop p [first] in\n    let loc = mk_loc module_type.pmty_loc.loc_start p.prev_end_pos in\n    Ast_helper.Mty.with_ ~loc module_type constraints\n  | _ -> module_type\n\n(* mod-constraint\t::=\n *  |  type typeconstr<type-params> type-equation type-constraints?\n *  ∣\t type typeconstr-name<type-params> := typexpr\n *  ∣\t module module-path = extended-module-path\n *  ∣\t module module-path :=  extended-module-path\n *\n *  TODO: split this up into multiple functions, better errors *)\nand parse_with_constraint p =\n  match p.Parser.token with\n  | Module -> (\n    Parser.next p;\n    let module_path = parse_module_long_ident ~lowercase:false p in\n    match p.Parser.token with\n    | ColonEqual ->\n      Parser.next p;\n      let lident = parse_module_long_ident ~lowercase:false p in\n      Parsetree.Pwith_modsubst (module_path, lident)\n    | Equal ->\n      Parser.next p;\n      let lident = parse_module_long_ident ~lowercase:false p in\n      Parsetree.Pwith_module (module_path, lident)\n    | token ->\n      (* TODO: revisit *)\n      Parser.err p (Diagnostics.unexpected token p.breadcrumbs);\n      let lident = parse_module_long_ident ~lowercase:false p in\n      Parsetree.Pwith_modsubst (module_path, lident))\n  | Typ -> (\n    Parser.next p;\n    let type_constr = parse_value_path p in\n    let params = parse_type_params ~parent:type_constr p in\n    match p.Parser.token with\n    | ColonEqual ->\n      Parser.next p;\n      let typ_expr = parse_typ_expr p in\n      Parsetree.Pwith_typesubst\n        ( type_constr,\n          Ast_helper.Type.mk ~loc:type_constr.loc ~params ~manifest:typ_expr\n            (Location.mkloc (Longident.last type_constr.txt) type_constr.loc) )\n    | Equal ->\n      Parser.next p;\n      let typ_expr = parse_typ_expr p in\n      let type_constraints = parse_type_constraints p in\n      Parsetree.Pwith_type\n        ( type_constr,\n          Ast_helper.Type.mk ~loc:type_constr.loc ~params ~manifest:typ_expr\n            ~cstrs:type_constraints\n            (Location.mkloc (Longident.last type_constr.txt) type_constr.loc) )\n    | token ->\n      (* TODO: revisit *)\n      Parser.err p (Diagnostics.unexpected token p.breadcrumbs);\n      let typ_expr = parse_typ_expr p in\n      let type_constraints = parse_type_constraints p in\n      Parsetree.Pwith_type\n        ( type_constr,\n          Ast_helper.Type.mk ~loc:type_constr.loc ~params ~manifest:typ_expr\n            ~cstrs:type_constraints\n            (Location.mkloc (Longident.last type_constr.txt) type_constr.loc) ))\n  | token ->\n    (* TODO: implement recovery strategy *)\n    Parser.err p (Diagnostics.unexpected token p.breadcrumbs);\n    Parsetree.Pwith_type\n      ( Location.mknoloc (Longident.Lident \"\"),\n        Ast_helper.Type.mk ~params:[] ~manifest:(Recover.default_type ())\n          ~cstrs:[] (Location.mknoloc \"\") )\n\nand parse_module_type_of p =\n  let start_pos = p.Parser.start_pos in\n  Parser.expect Module p;\n  Parser.expect Typ p;\n  Parser.expect Of p;\n  let module_expr = parse_module_expr p in\n  Ast_helper.Mty.typeof_ ~loc:(mk_loc start_pos p.prev_end_pos) module_expr\n\nand parse_newline_or_semicolon_signature p =\n  match p.Parser.token with\n  | Semicolon -> Parser.next p\n  | token when Grammar.is_signature_item_start token ->\n    if p.prev_end_pos.pos_lnum < p.start_pos.pos_lnum then ()\n    else\n      Parser.err ~start_pos:p.prev_end_pos ~end_pos:p.end_pos p\n        (Diagnostics.message\n           \"consecutive specifications on a line must be separated by ';' or a \\\n            newline\")\n  | _ -> ()\n\nand parse_signature_item_region p =\n  let start_pos = p.Parser.start_pos in\n  let attrs = parse_attributes p in\n  match p.Parser.token with\n  | Let ->\n    Parser.begin_region p;\n    let value_desc = parse_sign_let_desc ~attrs p in\n    parse_newline_or_semicolon_signature p;\n    let loc = mk_loc start_pos p.prev_end_pos in\n    Parser.end_region p;\n    Some (Ast_helper.Sig.value ~loc value_desc)\n  | Typ -> (\n    Parser.begin_region p;\n    match parse_type_definition_or_extension ~attrs p with\n    | TypeDef {rec_flag; types} ->\n      parse_newline_or_semicolon_signature p;\n      let loc = mk_loc start_pos p.prev_end_pos in\n      Parser.end_region p;\n      Some (Ast_helper.Sig.type_ ~loc rec_flag types)\n    | TypeExt ext ->\n      parse_newline_or_semicolon_signature p;\n      let loc = mk_loc start_pos p.prev_end_pos in\n      Parser.end_region p;\n      Some (Ast_helper.Sig.type_extension ~loc ext))\n  | External ->\n    let external_def = parse_external_def ~attrs ~start_pos p in\n    parse_newline_or_semicolon_signature p;\n    let loc = mk_loc start_pos p.prev_end_pos in\n    Some (Ast_helper.Sig.value ~loc external_def)\n  | Exception ->\n    let exception_def = parse_exception_def ~attrs p in\n    parse_newline_or_semicolon_signature p;\n    let loc = mk_loc start_pos p.prev_end_pos in\n    Some (Ast_helper.Sig.exception_ ~loc exception_def)\n  | Open ->\n    let open_description = parse_open_description ~attrs p in\n    parse_newline_or_semicolon_signature p;\n    let loc = mk_loc start_pos p.prev_end_pos in\n    Some (Ast_helper.Sig.open_ ~loc open_description)\n  | Include ->\n    Parser.next p;\n    let module_type = parse_module_type p in\n    let include_description =\n      Ast_helper.Incl.mk\n        ~loc:(mk_loc start_pos p.prev_end_pos)\n        ~attrs module_type\n    in\n    parse_newline_or_semicolon_signature p;\n    let loc = mk_loc start_pos p.prev_end_pos in\n    Some (Ast_helper.Sig.include_ ~loc include_description)\n  | Module -> (\n    Parser.begin_region p;\n    Parser.next p;\n    match p.Parser.token with\n    | Uident _ ->\n      let mod_decl = parse_module_declaration_or_alias ~attrs p in\n      parse_newline_or_semicolon_signature p;\n      let loc = mk_loc start_pos p.prev_end_pos in\n      Parser.end_region p;\n      Some (Ast_helper.Sig.module_ ~loc mod_decl)\n    | Rec ->\n      let rec_module = parse_rec_module_spec ~attrs ~start_pos p in\n      parse_newline_or_semicolon_signature p;\n      let loc = mk_loc start_pos p.prev_end_pos in\n      Parser.end_region p;\n      Some (Ast_helper.Sig.rec_module ~loc rec_module)\n    | Typ ->\n      let mod_type_decl = parse_module_type_declaration ~attrs ~start_pos p in\n      Parser.end_region p;\n      Some mod_type_decl\n    | _t ->\n      let mod_decl = parse_module_declaration_or_alias ~attrs p in\n      parse_newline_or_semicolon_signature p;\n      let loc = mk_loc start_pos p.prev_end_pos in\n      Parser.end_region p;\n      Some (Ast_helper.Sig.module_ ~loc mod_decl))\n  | AtAt ->\n    let attr = parse_standalone_attribute p in\n    parse_newline_or_semicolon_signature p;\n    let loc = mk_loc start_pos p.prev_end_pos in\n    Some (Ast_helper.Sig.attribute ~loc attr)\n  | ModuleComment (loc, s) ->\n    Parser.next p;\n    Some\n      (Ast_helper.Sig.attribute ~loc\n         ( {txt = \"res.doc\"; loc},\n           PStr\n             [\n               Ast_helper.Str.eval ~loc\n                 (Ast_helper.Exp.constant ~loc (Pconst_string (s, None)));\n             ] ))\n  | PercentPercent ->\n    let extension = parse_extension ~module_language:true p in\n    parse_newline_or_semicolon_signature p;\n    let loc = mk_loc start_pos p.prev_end_pos in\n    Some (Ast_helper.Sig.extension ~attrs ~loc extension)\n  | _ -> (\n    match attrs with\n    | (({Asttypes.loc = attr_loc}, _) as attr) :: _ ->\n      Parser.err ~start_pos:attr_loc.loc_start ~end_pos:attr_loc.loc_end p\n        (Diagnostics.message (ErrorMessages.attribute_without_node attr));\n      Some Recover.default_signature_item\n    | _ -> None)\n[@@progress Parser.next, Parser.expect, LoopProgress.list_rest]\n\n(* module rec module-name :  module-type  { and module-name:  module-type } *)\nand parse_rec_module_spec ~attrs ~start_pos p =\n  Parser.expect Rec p;\n  let rec loop p spec =\n    let start_pos = p.Parser.start_pos in\n    let attrs = parse_attributes_and_binding p in\n    match p.Parser.token with\n    | And ->\n      (* TODO: give a good error message when with constraint, no parens\n       * and ASet: (Set.S with type elt = A.t)\n       * and BTree: (Btree.S with type elt = A.t)\n       * Without parens, the `and` signals the start of another\n       * `with-constraint`\n       *)\n      Parser.expect And p;\n      let decl = parse_rec_module_declaration ~attrs ~start_pos p in\n      loop p (decl :: spec)\n    | _ -> List.rev spec\n  in\n  let first = parse_rec_module_declaration ~attrs ~start_pos p in\n  loop p [first]\n\n(* module-name : module-type *)\nand parse_rec_module_declaration ~attrs ~start_pos p =\n  let name =\n    match p.Parser.token with\n    | Uident mod_name ->\n      let loc = mk_loc p.start_pos p.end_pos in\n      Parser.next p;\n      Location.mkloc mod_name loc\n    | t ->\n      Parser.err p (Diagnostics.uident t);\n      Location.mknoloc \"_\"\n  in\n  Parser.expect Colon p;\n  let mod_type = parse_module_type p in\n  Ast_helper.Md.mk ~loc:(mk_loc start_pos p.prev_end_pos) ~attrs name mod_type\n\nand parse_module_declaration_or_alias ~attrs p =\n  let start_pos = p.Parser.start_pos in\n  let module_name =\n    match p.Parser.token with\n    | Uident ident ->\n      let loc = mk_loc p.Parser.start_pos p.end_pos in\n      Parser.next p;\n      Location.mkloc ident loc\n    | t ->\n      Parser.err p (Diagnostics.uident t);\n      Location.mknoloc \"_\"\n  in\n  let body =\n    match p.Parser.token with\n    | Colon ->\n      Parser.next p;\n      parse_module_type p\n    | Equal ->\n      Parser.next p;\n      let lident = parse_module_long_ident ~lowercase:false p in\n      Ast_helper.Mty.alias lident\n    | token ->\n      Parser.err p (Diagnostics.unexpected token p.breadcrumbs);\n      Recover.default_module_type ()\n  in\n  let loc = mk_loc start_pos p.prev_end_pos in\n  Ast_helper.Md.mk ~loc ~attrs module_name body\n\nand parse_module_type_declaration ~attrs ~start_pos p =\n  Parser.expect Typ p;\n  let module_name =\n    match p.Parser.token with\n    | Uident ident ->\n      let loc = mk_loc p.start_pos p.end_pos in\n      Parser.next p;\n      Location.mkloc ident loc\n    | Lident ident ->\n      let loc = mk_loc p.start_pos p.end_pos in\n      Parser.next p;\n      Location.mkloc ident loc\n    | t ->\n      Parser.err p (Diagnostics.uident t);\n      Location.mknoloc \"_\"\n  in\n  let typ =\n    match p.Parser.token with\n    | Equal ->\n      Parser.next p;\n      Some (parse_module_type p)\n    | _ -> None\n  in\n  let module_decl = Ast_helper.Mtd.mk ~attrs ?typ module_name in\n  Ast_helper.Sig.modtype ~loc:(mk_loc start_pos p.prev_end_pos) module_decl\n\nand parse_sign_let_desc ~attrs p =\n  let start_pos = p.Parser.start_pos in\n  Parser.optional p Let |> ignore;\n  let name, loc = parse_lident p in\n  let name = Location.mkloc name loc in\n  Parser.expect Colon p;\n  let typ_expr = parse_poly_type_expr p in\n  let loc = mk_loc start_pos p.prev_end_pos in\n  Ast_helper.Val.mk ~loc ~attrs name typ_expr\n\n(* attr-id\t::=\tlowercase-ident\n   ∣\t  capitalized-ident\n   ∣\t  attr-id .  attr-id *)\nand parse_attribute_id ~start_pos p =\n  let rec loop p acc =\n    match p.Parser.token with\n    | Lident ident | Uident ident -> (\n      Parser.next p;\n      let id = acc ^ ident in\n      match p.Parser.token with\n      | Dot ->\n        Parser.next p;\n        loop p (id ^ \".\")\n      | _ -> id)\n    | token when Token.is_keyword token -> (\n      Parser.next p;\n      let id = acc ^ Token.to_string token in\n      match p.Parser.token with\n      | Dot ->\n        Parser.next p;\n        loop p (id ^ \".\")\n      | _ -> id)\n    | token ->\n      Parser.err p (Diagnostics.unexpected token p.breadcrumbs);\n      acc\n  in\n  let id = loop p \"\" in\n  let end_pos = p.prev_end_pos in\n  Location.mkloc id (mk_loc start_pos end_pos)\n\n(*\n * payload ::=  empty\n *          |  ( structure-item )\n *\n * TODO: what about multiple structure items?\n * @attr({let x = 1; let x = 2})\n *\n * Also what about type-expressions and specifications?\n * @attr(:myType) ???\n *)\nand parse_payload p =\n  match p.Parser.token with\n  | Lparen when p.start_pos.pos_cnum = p.prev_end_pos.pos_cnum -> (\n    Parser.leave_breadcrumb p Grammar.AttributePayload;\n    Parser.next p;\n    match p.token with\n    | Colon ->\n      Parser.next p;\n      let payload =\n        if Grammar.is_signature_item_start p.token then\n          Parsetree.PSig\n            (parse_delimited_region ~grammar:Grammar.Signature ~closing:Rparen\n               ~f:parse_signature_item_region p)\n        else Parsetree.PTyp (parse_typ_expr p)\n      in\n      Parser.expect Rparen p;\n      Parser.eat_breadcrumb p;\n      payload\n    | Question ->\n      Parser.next p;\n      let pattern = parse_pattern p in\n      let expr =\n        match p.token with\n        | When | If ->\n          Parser.next p;\n          Some (parse_expr p)\n        | _ -> None\n      in\n      Parser.expect Rparen p;\n      Parser.eat_breadcrumb p;\n      Parsetree.PPat (pattern, expr)\n    | _ ->\n      let items =\n        parse_delimited_region ~grammar:Grammar.Structure ~closing:Rparen\n          ~f:parse_structure_item_region p\n      in\n      Parser.expect Rparen p;\n      Parser.eat_breadcrumb p;\n      Parsetree.PStr items)\n  | _ -> Parsetree.PStr []\n\n(* type attribute = string loc * payload *)\nand parse_attribute p =\n  match p.Parser.token with\n  | At ->\n    let start_pos = p.start_pos in\n    Parser.next p;\n    let attr_id = parse_attribute_id ~start_pos p in\n    let payload = parse_payload p in\n    Some (attr_id, payload)\n  | DocComment (loc, s) ->\n    Parser.next p;\n    Some (doc_comment_to_attribute loc s)\n  | _ -> None\n\nand doc_comment_to_attribute loc s : Parsetree.attribute =\n  ( {txt = \"res.doc\"; loc},\n    PStr\n      [\n        Ast_helper.Str.eval ~loc\n          (Ast_helper.Exp.constant ~loc (Pconst_string (s, None)));\n      ] )\n\nand parse_attributes p =\n  parse_region p ~grammar:Grammar.Attribute ~f:parse_attribute\n\n(*\n * standalone-attribute ::=\n *  | @@ atribute-id\n *  | @@ attribute-id ( structure-item )\n *)\nand parse_standalone_attribute p =\n  let start_pos = p.start_pos in\n  Parser.expect AtAt p;\n  let attr_id = parse_attribute_id ~start_pos p in\n  let attr_id =\n    match attr_id.txt with\n    | \"uncurried.swap\" ->\n      p.uncurried_config <- Config.Swap;\n      attr_id\n    | \"uncurried\" ->\n      p.uncurried_config <- Config.Uncurried;\n      attr_id\n    | _ -> attr_id\n  in\n  let payload = parse_payload p in\n  (attr_id, payload)\n\n(* extension\t::=\t% attr-id  attr-payload\n *              | %% attr-id(\n *  expr\t::=\t ...\n *    ∣\t extension\n *\n *  typexpr\t::=\t ...\n *    ∣\t extension\n *\n *  pattern\t::=\t ...\n *    ∣\t extension\n *\n *  module-expr\t::=\t ...\n *    ∣\t extension\n *\n *  module-type\t::=\t ...\n *    ∣\t extension\n *\n *  class-expr\t::=\t ...\n *    ∣\t extension\n *\n *  class-type\t::=\t ...\n *    ∣\t extension\n *\n *\n * item extension nodes usable in structures and signature\n *\n * item-extension ::= %% attr-id\n *                  | %% attr-id(structure-item)\n *\n *  attr-payload ::= structure-item\n *\n *  ~moduleLanguage represents whether we're on the module level or not\n *)\nand parse_extension ?(module_language = false) p =\n  let start_pos = p.Parser.start_pos in\n  if module_language then Parser.expect PercentPercent p\n  else Parser.expect Percent p;\n  let attr_id = parse_attribute_id ~start_pos p in\n  let payload = parse_payload p in\n  (attr_id, payload)\n\n(* module signature on the file level *)\nlet parse_specification p : Parsetree.signature =\n  parse_region p ~grammar:Grammar.Specification ~f:parse_signature_item_region\n\n(* module structure on the file level *)\nlet parse_implementation p : Parsetree.structure =\n  parse_region p ~grammar:Grammar.Implementation ~f:parse_structure_item_region\n"
  },
  {
    "path": "analysis/vendor/res_syntax/res_core.mli",
    "content": "val parse_implementation : Res_parser.t -> Parsetree.structure\nval parse_specification : Res_parser.t -> Parsetree.signature\n"
  },
  {
    "path": "analysis/vendor/res_syntax/res_diagnostics.ml",
    "content": "module Grammar = Res_grammar\nmodule Token = Res_token\n\ntype category =\n  | Unexpected of {token: Token.t; context: (Grammar.t * Lexing.position) list}\n  | Expected of {\n      context: Grammar.t option;\n      pos: Lexing.position; (* prev token end*)\n      token: Token.t;\n    }\n  | Message of string\n  | Uident of Token.t\n  | Lident of Token.t\n  | UnclosedString\n  | UnclosedTemplate\n  | UnclosedComment\n  | UnknownUchar of Char.t\n\ntype t = {\n  start_pos: Lexing.position;\n  end_pos: Lexing.position;\n  category: category;\n}\n\ntype report = t list\n\nlet get_start_pos t = t.start_pos\nlet get_end_pos t = t.end_pos\n\nlet default_unexpected token =\n  \"I'm not sure what to parse here when looking at \\\"\" ^ Token.to_string token\n  ^ \"\\\".\"\n\nlet reserved_keyword token =\n  let token_txt = Token.to_string token in\n  \"`\" ^ token_txt ^ \"` is a reserved keyword. Keywords need to be escaped: \\\\\\\"\"\n  ^ token_txt ^ \"\\\"\"\n\nlet explain t =\n  match t.category with\n  | Uident current_token -> (\n    match current_token with\n    | Lident lident ->\n      let guess = String.capitalize_ascii lident in\n      \"Did you mean `\" ^ guess ^ \"` instead of `\" ^ lident ^ \"`?\"\n    | t when Token.is_keyword t ->\n      let token = Token.to_string t in\n      \"`\" ^ token ^ \"` is a reserved keyword.\"\n    | _ ->\n      \"At this point, I'm looking for an uppercased name like `Belt` or `Array`\"\n    )\n  | Lident current_token -> (\n    match current_token with\n    | Uident uident ->\n      let guess = String.uncapitalize_ascii uident in\n      \"Did you mean `\" ^ guess ^ \"` instead of `\" ^ uident ^ \"`?\"\n    | t when Token.is_keyword t ->\n      let token = Token.to_string t in\n      \"`\" ^ token ^ \"` is a reserved keyword. Keywords need to be escaped: \\\\\\\"\"\n      ^ token ^ \"\\\"\"\n    | Underscore -> \"`_` isn't a valid name.\"\n    | _ -> \"I'm expecting a lowercase name like `user or `age`\")\n  | Message txt -> txt\n  | UnclosedString -> \"This string is missing a double quote at the end\"\n  | UnclosedTemplate ->\n    \"Did you forget to close this template expression with a backtick?\"\n  | UnclosedComment -> \"This comment seems to be missing a closing `*/`\"\n  | UnknownUchar uchar -> (\n    match uchar with\n    | '^' ->\n      \"Not sure what to do with this character.\\n\"\n      ^ \"  If you're trying to dereference a mutable value, use \\\n         `myValue.contents` instead.\\n\"\n      ^ \"  To concatenate strings, use `\\\"a\\\" ++ \\\"b\\\"` instead.\"\n    | _ -> \"Not sure what to do with this character.\")\n  | Expected {context; token = t} ->\n    let hint =\n      match context with\n      | Some grammar -> \" It signals the start of \" ^ Grammar.to_string grammar\n      | None -> \"\"\n    in\n    \"Did you forget a `\" ^ Token.to_string t ^ \"` here?\" ^ hint\n  | Unexpected {token = t; context = breadcrumbs} -> (\n    let name = Token.to_string t in\n    match breadcrumbs with\n    | (AtomicTypExpr, _) :: breadcrumbs -> (\n      match (breadcrumbs, t) with\n      | ( ((StringFieldDeclarations | FieldDeclarations), _) :: _,\n          (String _ | At | Rbrace | Comma | Eof) ) ->\n        \"I'm missing a type here\"\n      | _, t when Grammar.is_structure_item_start t || t = Eof ->\n        \"Missing a type here\"\n      | _ -> default_unexpected t)\n    | (ExprOperand, _) :: breadcrumbs -> (\n      match (breadcrumbs, t) with\n      | (ExprBlock, _) :: _, Rbrace ->\n        \"It seems that this expression block is empty\"\n      | (ExprBlock, _) :: _, Bar ->\n        (* Pattern matching *)\n        \"Looks like there might be an expression missing here\"\n      | (ExprSetField, _) :: _, _ ->\n        \"It seems that this record field mutation misses an expression\"\n      | (ExprArrayMutation, _) :: _, _ ->\n        \"Seems that an expression is missing, with what do I mutate the array?\"\n      | ((ExprBinaryAfterOp _ | ExprUnary), _) :: _, _ ->\n        \"Did you forget to write an expression here?\"\n      | (Grammar.LetBinding, _) :: _, _ ->\n        \"This let-binding misses an expression\"\n      | _ :: _, (Rbracket | Rbrace | Eof) -> \"Missing expression\"\n      | _ -> \"I'm not sure what to parse here when looking at \\\"\" ^ name ^ \"\\\".\"\n      )\n    | (TypeParam, _) :: _ -> (\n      match t with\n      | Lident ident ->\n        \"Did you mean '\" ^ ident ^ \"? A Type parameter starts with a quote.\"\n      | _ -> \"I'm not sure what to parse here when looking at \\\"\" ^ name ^ \"\\\".\"\n      )\n    | (Pattern, _) :: breadcrumbs -> (\n      match (t, breadcrumbs) with\n      | Equal, (LetBinding, _) :: _ ->\n        \"I was expecting a name for this let-binding. Example: `let message = \\\n         \\\"hello\\\"`\"\n      | In, (ExprFor, _) :: _ ->\n        \"A for-loop has the following form: `for i in 0 to 10`. Did you forget \\\n         to supply a name before `in`?\"\n      | EqualGreater, (PatternMatchCase, _) :: _ ->\n        \"I was expecting a pattern to match on before the `=>`\"\n      | token, _ when Token.is_keyword t -> reserved_keyword token\n      | token, _ -> default_unexpected token)\n    | _ ->\n      (* TODO: match on circumstance to verify Lident needed ? *)\n      if Token.is_keyword t then\n        \"`\" ^ name\n        ^ \"` is a reserved keyword. Keywords need to be escaped: \\\\\\\"\"\n        ^ Token.to_string t ^ \"\\\"\"\n      else \"I'm not sure what to parse here when looking at \\\"\" ^ name ^ \"\\\".\")\n\nlet make ~start_pos ~end_pos category = {start_pos; end_pos; category}\n\nlet print_report diagnostics src =\n  let rec print diagnostics src =\n    match diagnostics with\n    | [] -> ()\n    | d :: rest ->\n      Location.report_error ~src:(Some src) Format.err_formatter\n        Location.\n          {\n            loc =\n              {loc_start = d.start_pos; loc_end = d.end_pos; loc_ghost = false};\n            msg = explain d;\n            sub = [];\n            if_highlight = \"\";\n          };\n      (match rest with\n      | [] -> ()\n      | _ -> Format.fprintf Format.err_formatter \"@.\");\n      print rest src\n  in\n  Format.fprintf Format.err_formatter \"@[<v>\";\n  print (List.rev diagnostics) src;\n  Format.fprintf Format.err_formatter \"@]@.\"\n\nlet unexpected token context = Unexpected {token; context}\n\nlet expected ?grammar pos token = Expected {context = grammar; pos; token}\n\nlet uident current_token = Uident current_token\nlet lident current_token = Lident current_token\nlet unclosed_string = UnclosedString\nlet unclosed_comment = UnclosedComment\nlet unclosed_template = UnclosedTemplate\nlet unknown_uchar code = UnknownUchar code\nlet message txt = Message txt\n"
  },
  {
    "path": "analysis/vendor/res_syntax/res_diagnostics.mli",
    "content": "module Token = Res_token\nmodule Grammar = Res_grammar\n\ntype t\ntype category\ntype report\n\nval get_start_pos : t -> Lexing.position [@@live] (* for playground *)\nval get_end_pos : t -> Lexing.position [@@live] (* for playground *)\n\nval explain : t -> string [@@live] (* for playground *)\n\nval unexpected : Token.t -> (Grammar.t * Lexing.position) list -> category\nval expected : ?grammar:Grammar.t -> Lexing.position -> Token.t -> category\nval uident : Token.t -> category\nval lident : Token.t -> category\nval unclosed_string : category\nval unclosed_template : category\nval unclosed_comment : category\nval unknown_uchar : Char.t -> category\nval message : string -> category\n\nval make : start_pos:Lexing.position -> end_pos:Lexing.position -> category -> t\n\nval print_report : t list -> string -> unit\n"
  },
  {
    "path": "analysis/vendor/res_syntax/res_doc.ml",
    "content": "module MiniBuffer = Res_minibuffer\n\ntype mode = Break | Flat\n\ntype line_style =\n  | Classic (* fits? -> replace with space *)\n  | Soft (* fits? -> replaced with nothing *)\n  | Hard\n  (* always included, forces breaks in parents *)\n  (* always included, forces breaks in parents, but doesn't increase indentation\n     use case: template literals, multiline string content *)\n  | Literal\n\ntype t =\n  | Nil\n  | Text of string\n  | Concat of t list\n  | Indent of t\n  | IfBreaks of {yes: t; no: t; mutable broken: bool}\n    (* when broken is true, treat as the yes branch *)\n  | LineSuffix of t\n  | LineBreak of line_style\n  | Group of {mutable should_break: bool; doc: t}\n  | CustomLayout of t list\n  | BreakParent\n\nlet nil = Nil\nlet line = LineBreak Classic\nlet hard_line = LineBreak Hard\nlet soft_line = LineBreak Soft\nlet literal_line = LineBreak Literal\nlet text s = Text s\n\n(* Optimization. We eagerly collapse and reduce whatever allocation we can *)\nlet rec _concat acc l =\n  match l with\n  | Text s1 :: Text s2 :: rest -> Text (s1 ^ s2) :: _concat acc rest\n  | Nil :: rest -> _concat acc rest\n  | Concat l2 :: rest ->\n    _concat (_concat acc rest) l2 (* notice the order here *)\n  | x :: rest ->\n    let rest1 = _concat acc rest in\n    if rest1 == rest then l else x :: rest1\n  | [] -> acc\n\nlet concat l = Concat (_concat [] l)\n\nlet indent d = Indent d\nlet if_breaks t f = IfBreaks {yes = t; no = f; broken = false}\nlet line_suffix d = LineSuffix d\nlet group d = Group {should_break = false; doc = d}\nlet breakable_group ~force_break d = Group {should_break = force_break; doc = d}\nlet custom_layout gs = CustomLayout gs\nlet break_parent = BreakParent\n\nlet space = Text \" \"\nlet comma = Text \",\"\nlet dot = Text \".\"\nlet dotdot = Text \"..\"\nlet dotdotdot = Text \"...\"\nlet less_than = Text \"<\"\nlet greater_than = Text \">\"\nlet lbrace = Text \"{\"\nlet rbrace = Text \"}\"\nlet lparen = Text \"(\"\nlet rparen = Text \")\"\nlet lbracket = Text \"[\"\nlet rbracket = Text \"]\"\nlet question = Text \"?\"\nlet tilde = Text \"~\"\nlet equal = Text \"=\"\nlet trailing_comma = if_breaks comma nil\nlet double_quote = Text \"\\\"\"\n\nlet propagate_forced_breaks doc =\n  let rec walk doc =\n    match doc with\n    | Text _ | Nil | LineSuffix _ -> false\n    | BreakParent -> true\n    | LineBreak (Hard | Literal) -> true\n    | LineBreak (Classic | Soft) -> false\n    | Indent children ->\n      let child_forces_break = walk children in\n      child_forces_break\n    | IfBreaks ({yes = true_doc; no = false_doc} as ib) ->\n      let false_force_break = walk false_doc in\n      if false_force_break then (\n        let _ = walk true_doc in\n        ib.broken <- true;\n        true)\n      else\n        let force_break = walk true_doc in\n        force_break\n    | Group ({should_break = force_break; doc = children} as gr) ->\n      let child_forces_break = walk children in\n      let should_break = force_break || child_forces_break in\n      gr.should_break <- should_break;\n      should_break\n    | Concat children ->\n      List.fold_left\n        (fun force_break child ->\n          let child_forces_break = walk child in\n          force_break || child_forces_break)\n        false children\n    | CustomLayout children ->\n      (* When using CustomLayout, we don't want to propagate forced breaks\n       * from the children up. By definition it picks the first layout that fits\n       * otherwise it takes the last of the list.\n       * However we do want to propagate forced breaks in the sublayouts. They\n       * might need to be broken. We just don't propagate them any higher here *)\n      let _ = walk (Concat children) in\n      false\n  in\n  let _ = walk doc in\n  ()\n\n(* See documentation in interface file *)\nlet rec will_break doc =\n  match doc with\n  | LineBreak (Hard | Literal) | BreakParent | Group {should_break = true} ->\n    true\n  | Group {doc} | Indent doc | CustomLayout (doc :: _) -> will_break doc\n  | Concat docs -> List.exists will_break docs\n  | IfBreaks {yes; no} -> will_break yes || will_break no\n  | _ -> false\n\nlet join ~sep docs =\n  let rec loop acc sep docs =\n    match docs with\n    | [] -> List.rev acc\n    | [x] -> List.rev (x :: acc)\n    | x :: xs -> loop (sep :: x :: acc) sep xs\n  in\n  concat (loop [] sep docs)\n\nlet join_with_sep docs_with_sep =\n  let rec loop acc docs =\n    match docs with\n    | [] -> List.rev acc\n    | [(x, _sep)] -> List.rev (x :: acc)\n    | (x, sep) :: xs -> loop (sep :: x :: acc) xs\n  in\n  concat (loop [] docs_with_sep)\n\nlet fits w stack =\n  let width = ref w in\n  let result = ref None in\n\n  let rec calculate indent mode doc =\n    match (mode, doc) with\n    | _ when result.contents != None -> ()\n    | _ when width.contents < 0 -> result := Some false\n    | _, Nil | _, LineSuffix _ | _, BreakParent -> ()\n    | _, Text txt -> width := width.contents - String.length txt\n    | _, Indent doc -> calculate (indent + 2) mode doc\n    | Flat, LineBreak Hard | Flat, LineBreak Literal -> result := Some true\n    | Flat, LineBreak Classic -> width := width.contents - 1\n    | Flat, LineBreak Soft -> ()\n    | Break, LineBreak _ -> result := Some true\n    | _, Group {should_break = true; doc} -> calculate indent Break doc\n    | _, Group {doc} -> calculate indent mode doc\n    | _, IfBreaks {yes = break_doc; broken = true} ->\n      calculate indent mode break_doc\n    | Break, IfBreaks {yes = break_doc} -> calculate indent mode break_doc\n    | Flat, IfBreaks {no = flat_doc} -> calculate indent mode flat_doc\n    | _, Concat docs -> calculate_concat indent mode docs\n    | _, CustomLayout (hd :: _) ->\n      (* TODO: if we have nested custom layouts, what we should do here? *)\n      calculate indent mode hd\n    | _, CustomLayout [] -> ()\n  and calculate_concat indent mode docs =\n    if result.contents == None then\n      match docs with\n      | [] -> ()\n      | doc :: rest ->\n        calculate indent mode doc;\n        calculate_concat indent mode rest\n  in\n  let rec calculate_all stack =\n    match (result.contents, stack) with\n    | Some r, _ -> r\n    | None, [] -> !width >= 0\n    | None, (indent, mode, doc) :: rest ->\n      calculate indent mode doc;\n      calculate_all rest\n  in\n  calculate_all stack\n\nlet to_string ~width doc =\n  propagate_forced_breaks doc;\n  let buffer = MiniBuffer.create 1000 in\n\n  let rec process ~pos line_suffices stack =\n    match stack with\n    | ((ind, mode, doc) as cmd) :: rest -> (\n      match doc with\n      | Nil | BreakParent -> process ~pos line_suffices rest\n      | Text txt ->\n        MiniBuffer.add_string buffer txt;\n        process ~pos:(String.length txt + pos) line_suffices rest\n      | LineSuffix doc -> process ~pos ((ind, mode, doc) :: line_suffices) rest\n      | Concat docs ->\n        let ops = List.map (fun doc -> (ind, mode, doc)) docs in\n        process ~pos line_suffices (List.append ops rest)\n      | Indent doc -> process ~pos line_suffices ((ind + 2, mode, doc) :: rest)\n      | IfBreaks {yes = break_doc; broken = true} ->\n        process ~pos line_suffices ((ind, mode, break_doc) :: rest)\n      | IfBreaks {yes = break_doc; no = flat_doc} ->\n        if mode = Break then\n          process ~pos line_suffices ((ind, mode, break_doc) :: rest)\n        else process ~pos line_suffices ((ind, mode, flat_doc) :: rest)\n      | LineBreak line_style ->\n        if mode = Break then\n          match line_suffices with\n          | [] ->\n            if line_style = Literal then (\n              MiniBuffer.add_char buffer '\\n';\n              process ~pos:0 [] rest)\n            else (\n              MiniBuffer.flush_newline buffer;\n              MiniBuffer.add_string buffer (String.make ind ' ' [@doesNotRaise]);\n              process ~pos:ind [] rest)\n          | _docs ->\n            process ~pos:ind []\n              (List.concat [List.rev line_suffices; cmd :: rest])\n        else\n          (* mode = Flat *)\n          let pos =\n            match line_style with\n            | Classic ->\n              MiniBuffer.add_string buffer \" \";\n              pos + 1\n            | Hard ->\n              MiniBuffer.flush_newline buffer;\n              0\n            | Literal ->\n              MiniBuffer.add_char buffer '\\n';\n              0\n            | Soft -> pos\n          in\n          process ~pos line_suffices rest\n      | Group {should_break; doc} ->\n        if should_break || not (fits (width - pos) ((ind, Flat, doc) :: rest))\n        then process ~pos line_suffices ((ind, Break, doc) :: rest)\n        else process ~pos line_suffices ((ind, Flat, doc) :: rest)\n      | CustomLayout docs ->\n        let rec find_group_that_fits groups =\n          match groups with\n          | [] -> Nil\n          | [last_group] -> last_group\n          | doc :: docs ->\n            if fits (width - pos) ((ind, Flat, doc) :: rest) then doc\n            else find_group_that_fits docs\n        in\n        let doc = find_group_that_fits docs in\n        process ~pos line_suffices ((ind, Flat, doc) :: rest))\n    | [] -> (\n      match line_suffices with\n      | [] -> ()\n      | suffices -> process ~pos:0 [] (List.rev suffices))\n  in\n  process ~pos:0 [] [(0, Flat, doc)];\n  MiniBuffer.contents buffer\n\nlet debug t =\n  let rec to_doc = function\n    | Nil -> text \"nil\"\n    | BreakParent -> text \"breakparent\"\n    | Text txt -> text (\"text(\\\"\" ^ txt ^ \"\\\")\")\n    | LineSuffix doc ->\n      group\n        (concat\n           [\n             text \"linesuffix(\";\n             indent (concat [line; to_doc doc]);\n             line;\n             text \")\";\n           ])\n    | Concat [] -> text \"concat()\"\n    | Concat docs ->\n      group\n        (concat\n           [\n             text \"concat(\";\n             indent\n               (concat\n                  [\n                    line;\n                    join ~sep:(concat [text \",\"; line]) (List.map to_doc docs);\n                  ]);\n             line;\n             text \")\";\n           ])\n    | CustomLayout docs ->\n      group\n        (concat\n           [\n             text \"customLayout(\";\n             indent\n               (concat\n                  [\n                    line;\n                    join ~sep:(concat [text \",\"; line]) (List.map to_doc docs);\n                  ]);\n             line;\n             text \")\";\n           ])\n    | Indent doc ->\n      concat [text \"indent(\"; soft_line; to_doc doc; soft_line; text \")\"]\n    | IfBreaks {yes = true_doc; broken = true} -> to_doc true_doc\n    | IfBreaks {yes = true_doc; no = false_doc} ->\n      group\n        (concat\n           [\n             text \"ifBreaks(\";\n             indent\n               (concat\n                  [\n                    line;\n                    to_doc true_doc;\n                    concat [text \",\"; line];\n                    to_doc false_doc;\n                  ]);\n             line;\n             text \")\";\n           ])\n    | LineBreak break ->\n      let break_txt =\n        match break with\n        | Classic -> \"Classic\"\n        | Soft -> \"Soft\"\n        | Hard -> \"Hard\"\n        | Literal -> \"Liteal\"\n      in\n      text (\"LineBreak(\" ^ break_txt ^ \")\")\n    | Group {should_break; doc} ->\n      group\n        (concat\n           [\n             text \"Group(\";\n             indent\n               (concat\n                  [\n                    line;\n                    text (\"{shouldBreak: \" ^ string_of_bool should_break ^ \"}\");\n                    concat [text \",\"; line];\n                    to_doc doc;\n                  ]);\n             line;\n             text \")\";\n           ])\n  in\n  let doc = to_doc t in\n  to_string ~width:10 doc |> print_endline\n[@@live]\n"
  },
  {
    "path": "analysis/vendor/res_syntax/res_doc.mli",
    "content": "type t\n\nval nil : t\nval line : t\nval hard_line : t\nval soft_line : t\nval literal_line : t\nval text : string -> t\nval concat : t list -> t\nval indent : t -> t\nval if_breaks : t -> t -> t\nval line_suffix : t -> t\nval group : t -> t\nval breakable_group : force_break:bool -> t -> t\n\n(* `customLayout docs` will pick the layout that fits from `docs`.\n * This is a very expensive computation as every layout from the list\n * will be checked until one fits. *)\nval custom_layout : t list -> t\nval break_parent : t\nval join : sep:t -> t list -> t\n\n(* [(doc1, sep1); (doc2,sep2)] joins as doc1 sep1 doc2 *)\nval join_with_sep : (t * t) list -> t\n\nval space : t\nval comma : t\nval dot : t\nval dotdot : t\nval dotdotdot : t\nval less_than : t\nval greater_than : t\nval lbrace : t\nval rbrace : t\nval lparen : t\nval rparen : t\nval lbracket : t\nval rbracket : t\nval question : t\nval tilde : t\nval equal : t\nval trailing_comma : t\nval double_quote : t [@@live]\n\n(*\n * `willBreak doc` checks whether `doc` contains forced line breaks.\n * This is more or less a \"workaround\" to make the parent of a `customLayout` break.\n * Forced breaks are not propagated through `customLayout`; otherwise we would always\n * get the last layout the algorithm tries…\n * This might result into some weird layouts:\n *  [fn(x => {\n *     let _ = x\n *   }), fn(y => {\n *     let _ = y\n *   }), fn(z => {\n *     let _ = z\n *   })]\n *  The `[` and `]` would be a lot better broken out.\n *  Although the layout of `fn(x => {...})` is correct, we need to break its parent (the array).\n *  `willBreak` can be used in this scenario to check if the `fn…` contains any forced breaks.\n *  The consumer can then manually insert a `breakParent` doc, to manually propagate the\n *  force breaks from bottom to top.\n *)\nval will_break : t -> bool\n\nval to_string : width:int -> t -> string\nval debug : t -> unit [@@live]\n"
  },
  {
    "path": "analysis/vendor/res_syntax/res_driver.ml",
    "content": "module IO = Res_io\n\ntype ('ast, 'diagnostics) parse_result = {\n  filename: string; [@live]\n  source: string;\n  parsetree: 'ast;\n  diagnostics: 'diagnostics;\n  invalid: bool;\n  comments: Res_comment.t list;\n}\n\ntype 'diagnostics parsing_engine = {\n  parse_implementation:\n    for_printer:bool ->\n    filename:string ->\n    (Parsetree.structure, 'diagnostics) parse_result;\n  parse_interface:\n    for_printer:bool ->\n    filename:string ->\n    (Parsetree.signature, 'diagnostics) parse_result;\n  string_of_diagnostics:\n    source:string -> filename:string -> 'diagnostics -> unit;\n}\n\ntype print_engine = {\n  print_implementation:\n    width:int ->\n    filename:string ->\n    comments:Res_comment.t list ->\n    Parsetree.structure ->\n    unit;\n  print_interface:\n    width:int ->\n    filename:string ->\n    comments:Res_comment.t list ->\n    Parsetree.signature ->\n    unit;\n}\n\nlet setup ~filename ~for_printer () =\n  let src = IO.read_file ~filename in\n  let mode = if for_printer then Res_parser.Default else ParseForTypeChecker in\n  Res_parser.make ~mode src filename\n\nlet setup_from_source ~display_filename ~source ~for_printer () =\n  let mode = if for_printer then Res_parser.Default else ParseForTypeChecker in\n  Res_parser.make ~mode source display_filename\n\nlet parsing_engine =\n  {\n    parse_implementation =\n      (fun ~for_printer ~filename ->\n        let engine = setup ~filename ~for_printer () in\n        let structure = Res_core.parse_implementation engine in\n        let invalid, diagnostics =\n          match engine.diagnostics with\n          | [] as diagnostics -> (false, diagnostics)\n          | _ as diagnostics -> (true, diagnostics)\n        in\n        {\n          filename = engine.scanner.filename;\n          source = engine.scanner.src;\n          parsetree = structure;\n          diagnostics;\n          invalid;\n          comments = List.rev engine.comments;\n        });\n    parse_interface =\n      (fun ~for_printer ~filename ->\n        let engine = setup ~filename ~for_printer () in\n        let signature = Res_core.parse_specification engine in\n        let invalid, diagnostics =\n          match engine.diagnostics with\n          | [] as diagnostics -> (false, diagnostics)\n          | _ as diagnostics -> (true, diagnostics)\n        in\n        {\n          filename = engine.scanner.filename;\n          source = engine.scanner.src;\n          parsetree = signature;\n          diagnostics;\n          invalid;\n          comments = List.rev engine.comments;\n        });\n    string_of_diagnostics =\n      (fun ~source ~filename:_ diagnostics ->\n        Res_diagnostics.print_report diagnostics source);\n  }\n\nlet parse_implementation_from_source ~for_printer ~display_filename ~source =\n  let engine = setup_from_source ~display_filename ~source ~for_printer () in\n  let structure = Res_core.parse_implementation engine in\n  let invalid, diagnostics =\n    match engine.diagnostics with\n    | [] as diagnostics -> (false, diagnostics)\n    | _ as diagnostics -> (true, diagnostics)\n  in\n  {\n    filename = engine.scanner.filename;\n    source = engine.scanner.src;\n    parsetree = structure;\n    diagnostics;\n    invalid;\n    comments = List.rev engine.comments;\n  }\n\nlet parse_interface_from_source ~for_printer ~display_filename ~source =\n  let engine = setup_from_source ~display_filename ~source ~for_printer () in\n  let signature = Res_core.parse_specification engine in\n  let invalid, diagnostics =\n    match engine.diagnostics with\n    | [] as diagnostics -> (false, diagnostics)\n    | _ as diagnostics -> (true, diagnostics)\n  in\n  {\n    filename = engine.scanner.filename;\n    source = engine.scanner.src;\n    parsetree = signature;\n    diagnostics;\n    invalid;\n    comments = List.rev engine.comments;\n  }\n\nlet print_engine =\n  {\n    print_implementation =\n      (fun ~width ~filename:_ ~comments structure ->\n        print_string\n          (Res_printer.print_implementation ~width structure ~comments));\n    print_interface =\n      (fun ~width ~filename:_ ~comments signature ->\n        print_string (Res_printer.print_interface ~width signature ~comments));\n  }\n\nlet parse_implementation ?(ignore_parse_errors = false) sourcefile =\n  Location.input_name := sourcefile;\n  let parse_result =\n    parsing_engine.parse_implementation ~for_printer:false ~filename:sourcefile\n  in\n  if parse_result.invalid then (\n    Res_diagnostics.print_report parse_result.diagnostics parse_result.source;\n    if not ignore_parse_errors then exit 1);\n  parse_result.parsetree\n[@@raises exit]\n\nlet parse_interface ?(ignore_parse_errors = false) sourcefile =\n  Location.input_name := sourcefile;\n  let parse_result =\n    parsing_engine.parse_interface ~for_printer:false ~filename:sourcefile\n  in\n  if parse_result.invalid then (\n    Res_diagnostics.print_report parse_result.diagnostics parse_result.source;\n    if not ignore_parse_errors then exit 1);\n  parse_result.parsetree\n[@@raises exit]\n\n(* suppress unused optional arg *)\nlet _ =\n fun s ->\n  ( parse_implementation ~ignore_parse_errors:false s,\n    parse_interface ~ignore_parse_errors:false s )\n[@@raises exit]\n"
  },
  {
    "path": "analysis/vendor/res_syntax/res_driver.mli",
    "content": "type ('ast, 'diagnostics) parse_result = {\n  filename: string; [@live]\n  source: string;\n  parsetree: 'ast;\n  diagnostics: 'diagnostics;\n  invalid: bool;\n  comments: Res_comment.t list;\n}\n\ntype 'diagnostics parsing_engine = {\n  parse_implementation:\n    for_printer:bool ->\n    filename:string ->\n    (Parsetree.structure, 'diagnostics) parse_result;\n  parse_interface:\n    for_printer:bool ->\n    filename:string ->\n    (Parsetree.signature, 'diagnostics) parse_result;\n  string_of_diagnostics:\n    source:string -> filename:string -> 'diagnostics -> unit;\n}\n\nval parse_implementation_from_source :\n  for_printer:bool ->\n  display_filename:string ->\n  source:string ->\n  (Parsetree.structure, Res_diagnostics.t list) parse_result\n[@@live]\n\nval parse_interface_from_source :\n  for_printer:bool ->\n  display_filename:string ->\n  source:string ->\n  (Parsetree.signature, Res_diagnostics.t list) parse_result\n[@@live]\n\ntype print_engine = {\n  print_implementation:\n    width:int ->\n    filename:string ->\n    comments:Res_comment.t list ->\n    Parsetree.structure ->\n    unit;\n  print_interface:\n    width:int ->\n    filename:string ->\n    comments:Res_comment.t list ->\n    Parsetree.signature ->\n    unit;\n}\n\nval parsing_engine : Res_diagnostics.t list parsing_engine\n\nval print_engine : print_engine\n\n(* ReScript implementation parsing compatible with ocaml pparse driver. Used by the compiler. *)\nval parse_implementation :\n  ?ignore_parse_errors:bool -> string -> Parsetree.structure\n[@@live] [@@raises Location.Error]\n\n(* ReScript interface parsing compatible with ocaml pparse driver. Used by the compiler *)\nval parse_interface : ?ignore_parse_errors:bool -> string -> Parsetree.signature\n[@@live] [@@raises Location.Error]\n"
  },
  {
    "path": "analysis/vendor/res_syntax/res_driver_binary.ml",
    "content": "let print_engine =\n  Res_driver.\n    {\n      print_implementation =\n        (fun ~width:_ ~filename ~comments:_ structure ->\n          output_string stdout Config.ast_impl_magic_number;\n          output_value stdout filename;\n          output_value stdout structure);\n      print_interface =\n        (fun ~width:_ ~filename ~comments:_ signature ->\n          output_string stdout Config.ast_intf_magic_number;\n          output_value stdout filename;\n          output_value stdout signature);\n    }\n"
  },
  {
    "path": "analysis/vendor/res_syntax/res_driver_binary.mli",
    "content": "val print_engine : Res_driver.print_engine\n"
  },
  {
    "path": "analysis/vendor/res_syntax/res_driver_ml_parser.ml",
    "content": "module OcamlParser = Parser\nmodule IO = Res_io\n\nlet setup ~filename =\n  if String.length filename > 0 then (\n    Location.input_name := filename;\n    IO.read_file ~filename |> Lexing.from_string)\n  else Lexing.from_channel stdin\n\nlet extract_ocaml_concrete_syntax filename =\n  let lexbuf =\n    if String.length filename > 0 then\n      IO.read_file ~filename |> Lexing.from_string\n    else Lexing.from_channel stdin\n  in\n  let string_locs = ref [] in\n  let comment_data = ref [] in\n  let rec next (prev_tok_end_pos : Lexing.position) () =\n    let token = Lexer.token_with_comments lexbuf in\n    match token with\n    | OcamlParser.COMMENT (txt, loc) ->\n      let comment =\n        Res_comment.from_ocaml_comment ~loc ~prev_tok_end_pos ~txt\n      in\n      comment_data := comment :: !comment_data;\n      next loc.Location.loc_end ()\n    | OcamlParser.STRING (_txt, None) ->\n      let open Location in\n      let loc =\n        {\n          loc_start = lexbuf.lex_start_p;\n          loc_end = lexbuf.Lexing.lex_curr_p;\n          loc_ghost = false;\n        }\n      in\n      let len = loc.loc_end.pos_cnum - loc.loc_start.pos_cnum in\n      let txt =\n        Bytes.to_string\n          ((Bytes.sub [@doesNotRaise]) lexbuf.Lexing.lex_buffer\n             loc.loc_start.pos_cnum len)\n      in\n      string_locs := (txt, loc) :: !string_locs;\n      next lexbuf.Lexing.lex_curr_p ()\n    | OcamlParser.EOF -> ()\n    | _ -> next lexbuf.Lexing.lex_curr_p ()\n  in\n  next lexbuf.Lexing.lex_start_p ();\n  (List.rev !string_locs, List.rev !comment_data)\n\nlet parsing_engine =\n  {\n    Res_driver.parse_implementation =\n      (fun ~for_printer:_ ~filename ->\n        let lexbuf = setup ~filename in\n        let string_data, comments =\n          extract_ocaml_concrete_syntax !Location.input_name\n        in\n        let structure =\n          Parse.implementation lexbuf\n          |> Res_ast_conversion.replace_string_literal_structure string_data\n          |> Res_ast_conversion.structure\n        in\n        {\n          filename = !Location.input_name;\n          source = Bytes.to_string lexbuf.lex_buffer;\n          parsetree = structure;\n          diagnostics = ();\n          invalid = false;\n          comments;\n        });\n    parse_interface =\n      (fun ~for_printer:_ ~filename ->\n        let lexbuf = setup ~filename in\n        let string_data, comments =\n          extract_ocaml_concrete_syntax !Location.input_name\n        in\n        let signature =\n          Parse.interface lexbuf\n          |> Res_ast_conversion.replace_string_literal_signature string_data\n          |> Res_ast_conversion.signature\n        in\n        {\n          filename = !Location.input_name;\n          source = Bytes.to_string lexbuf.lex_buffer;\n          parsetree = signature;\n          diagnostics = ();\n          invalid = false;\n          comments;\n        });\n    string_of_diagnostics = (fun ~source:_ ~filename:_ _diagnostics -> ());\n  }\n\nlet print_engine =\n  Res_driver.\n    {\n      print_implementation =\n        (fun ~width:_ ~filename:_ ~comments:_ structure ->\n          Pprintast.structure Format.std_formatter structure);\n      print_interface =\n        (fun ~width:_ ~filename:_ ~comments:_ signature ->\n          Pprintast.signature Format.std_formatter signature);\n    }\n"
  },
  {
    "path": "analysis/vendor/res_syntax/res_driver_ml_parser.mli",
    "content": "(* This module represents a general interface to parse marshalled reason ast *)\n\n(* extracts comments and the original string data from an ocaml file *)\nval extract_ocaml_concrete_syntax :\n  string -> (string * Location.t) list * Res_comment.t list\n[@@live]\n\nval parsing_engine : unit Res_driver.parsing_engine\n\nval print_engine : Res_driver.print_engine\n"
  },
  {
    "path": "analysis/vendor/res_syntax/res_grammar.ml",
    "content": "module Token = Res_token\n\ntype t =\n  | OpenDescription (* open Belt *)\n  | ModuleLongIdent (* Foo or Foo.Bar *) [@live]\n  | Ternary (* condExpr ? trueExpr : falseExpr *)\n  | Es6ArrowExpr\n  | Jsx\n  | JsxAttribute\n  | JsxChild [@live]\n  | ExprOperand\n  | ExprUnary\n  | ExprSetField\n  | ExprBinaryAfterOp of Token.t\n  | ExprBlock\n  | ExprCall\n  | ExprList\n  | ExprArrayAccess\n  | ExprArrayMutation\n  | ExprIf\n  | ExprFor\n  | IfCondition\n  | IfBranch\n  | ElseBranch\n  | TypeExpression\n  | External\n  | PatternMatching\n  | PatternMatchCase\n  | LetBinding\n  | PatternList\n  | PatternOcamlList\n  | PatternRecord\n  | TypeDef\n  | TypeConstrName\n  | TypeParams\n  | TypeParam [@live]\n  | PackageConstraint\n  | TypeRepresentation\n  | RecordDecl\n  | ConstructorDeclaration\n  | ParameterList\n  | StringFieldDeclarations\n  | FieldDeclarations\n  | TypExprList\n  | FunctorArgs\n  | ModExprList\n  | TypeParameters\n  | RecordRows\n  | RecordRowsStringKey\n  | ArgumentList\n  | Signature\n  | Specification\n  | Structure\n  | Implementation\n  | Attribute\n  | TypeConstraint\n  | AtomicTypExpr\n  | ListExpr\n  | Pattern\n  | AttributePayload\n  | TagNames\n\nlet to_string = function\n  | OpenDescription -> \"an open description\"\n  | ModuleLongIdent -> \"a module path\"\n  | Ternary -> \"a ternary expression\"\n  | Es6ArrowExpr -> \"an es6 arrow function\"\n  | Jsx -> \"a jsx expression\"\n  | JsxAttribute -> \"a jsx attribute\"\n  | ExprOperand -> \"a basic expression\"\n  | ExprUnary -> \"a unary expression\"\n  | ExprBinaryAfterOp op ->\n    \"an expression after the operator \\\"\" ^ Token.to_string op ^ \"\\\"\"\n  | ExprIf -> \"an if expression\"\n  | IfCondition -> \"the condition of an if expression\"\n  | IfBranch -> \"the true-branch of an if expression\"\n  | ElseBranch -> \"the else-branch of an if expression\"\n  | TypeExpression -> \"a type\"\n  | External -> \"an external\"\n  | PatternMatching -> \"the cases of a pattern match\"\n  | ExprBlock -> \"a block with expressions\"\n  | ExprSetField -> \"a record field mutation\"\n  | ExprCall -> \"a function application\"\n  | ExprArrayAccess -> \"an array access expression\"\n  | ExprArrayMutation -> \"an array mutation\"\n  | LetBinding -> \"a let binding\"\n  | TypeDef -> \"a type definition\"\n  | TypeParams -> \"type parameters\"\n  | TypeParam -> \"a type parameter\"\n  | TypeConstrName -> \"a type-constructor name\"\n  | TypeRepresentation -> \"a type representation\"\n  | RecordDecl -> \"a record declaration\"\n  | PatternMatchCase -> \"a pattern match case\"\n  | ConstructorDeclaration -> \"a constructor declaration\"\n  | ExprList -> \"multiple expressions\"\n  | PatternList -> \"multiple patterns\"\n  | PatternOcamlList -> \"a list pattern\"\n  | PatternRecord -> \"a record pattern\"\n  | ParameterList -> \"parameters\"\n  | StringFieldDeclarations -> \"string field declarations\"\n  | FieldDeclarations -> \"field declarations\"\n  | TypExprList -> \"list of types\"\n  | FunctorArgs -> \"functor arguments\"\n  | ModExprList -> \"list of module expressions\"\n  | TypeParameters -> \"list of type parameters\"\n  | RecordRows -> \"rows of a record\"\n  | RecordRowsStringKey -> \"rows of a record with string keys\"\n  | ArgumentList -> \"arguments\"\n  | Signature -> \"signature\"\n  | Specification -> \"specification\"\n  | Structure -> \"structure\"\n  | Implementation -> \"implementation\"\n  | Attribute -> \"an attribute\"\n  | TypeConstraint -> \"constraints on a type\"\n  | AtomicTypExpr -> \"a type\"\n  | ListExpr -> \"an ocaml list expr\"\n  | PackageConstraint -> \"a package constraint\"\n  | JsxChild -> \"jsx child\"\n  | Pattern -> \"pattern\"\n  | ExprFor -> \"a for expression\"\n  | AttributePayload -> \"an attribute payload\"\n  | TagNames -> \"tag names\"\n\nlet is_signature_item_start = function\n  | Token.At | Let | Typ | External | Exception | Open | Include | Module | AtAt\n  | PercentPercent ->\n    true\n  | _ -> false\n\nlet is_atomic_pattern_start = function\n  | Token.Int _ | String _ | Codepoint _ | Backtick | Lparen | Lbracket | Lbrace\n  | Underscore | Lident _ | Uident _ | List | Exception | Percent ->\n    true\n  | _ -> false\n\nlet is_atomic_expr_start = function\n  | Token.True | False | Int _ | String _ | Float _ | Codepoint _ | Backtick\n  | Uident _ | Lident _ | Hash | Lparen | List | Lbracket | Lbrace | LessThan\n  | Module | Percent ->\n    true\n  | _ -> false\n\nlet is_atomic_typ_expr_start = function\n  | Token.SingleQuote | Underscore | Lparen | Lbrace | Uident _ | Lident _\n  | Percent ->\n    true\n  | _ -> false\n\nlet is_expr_start = function\n  | Token.Assert | At | Await | Backtick | Bang | Codepoint _ | False | Float _\n  | For | Hash | If | Int _ | Lbrace | Lbracket | LessThan | Lident _ | List\n  | Lparen | Minus | MinusDot | Module | Percent | Plus | PlusDot | String _\n  | Switch | True | Try | Uident _ | Underscore (* _ => doThings() *)\n  | While ->\n    true\n  | _ -> false\n\nlet is_jsx_attribute_start = function\n  | Token.Lident _ | Question | Lbrace -> true\n  | _ -> false\n\nlet is_structure_item_start = function\n  | Token.Open | Let | Typ | External | Exception | Include | Module | AtAt\n  | PercentPercent | At ->\n    true\n  | t when is_expr_start t -> true\n  | _ -> false\n\nlet is_pattern_start = function\n  | Token.Int _ | Float _ | String _ | Codepoint _ | Backtick | True | False\n  | Minus | Plus | Lparen | Lbracket | Lbrace | List | Underscore | Lident _\n  | Uident _ | Hash | Exception | Percent | Module | At ->\n    true\n  | _ -> false\n\nlet is_parameter_start = function\n  | Token.Typ | Tilde | Dot -> true\n  | token when is_pattern_start token -> true\n  | _ -> false\n\n(* TODO: overparse Uident ? *)\nlet is_string_field_decl_start = function\n  | Token.String _ | Lident _ | At | DotDotDot -> true\n  | _ -> false\n\n(* TODO: overparse Uident ? *)\nlet is_field_decl_start = function\n  | Token.At | Mutable | Lident _ -> true\n  (* recovery, TODO: this is not ideal… *)\n  | Uident _ -> true\n  | t when Token.is_keyword t -> true\n  | _ -> false\n\nlet is_record_decl_start = function\n  | Token.At | Mutable | Lident _ | DotDotDot | String _ -> true\n  | _ -> false\n\nlet is_typ_expr_start = function\n  | Token.At | SingleQuote | Underscore | Lparen | Lbracket | Uident _\n  | Lident _ | Module | Percent | Lbrace ->\n    true\n  | _ -> false\n\nlet is_type_parameter_start = function\n  | Token.Tilde | Dot -> true\n  | token when is_typ_expr_start token -> true\n  | _ -> false\n\nlet is_type_param_start = function\n  | Token.Plus | Minus | SingleQuote | Underscore -> true\n  | _ -> false\n\nlet is_functor_arg_start = function\n  | Token.At | Uident _ | Underscore | Percent | Lbrace | Lparen -> true\n  | _ -> false\n\nlet is_mod_expr_start = function\n  | Token.At | Percent | Uident _ | Lbrace | Lparen | Lident \"unpack\" | Await ->\n    true\n  | _ -> false\n\nlet is_record_row_start = function\n  | Token.DotDotDot -> true\n  | Token.Uident _ | Lident _ -> true\n  (* TODO *)\n  | t when Token.is_keyword t -> true\n  | _ -> false\n\nlet is_record_row_string_key_start = function\n  | Token.String _ -> true\n  | _ -> false\n\nlet is_argument_start = function\n  | Token.Tilde | Dot | Underscore -> true\n  | t when is_expr_start t -> true\n  | _ -> false\n\nlet is_pattern_match_start = function\n  | Token.Bar -> true\n  | t when is_pattern_start t -> true\n  | _ -> false\n\nlet is_pattern_ocaml_list_start = function\n  | Token.DotDotDot -> true\n  | t when is_pattern_start t -> true\n  | _ -> false\n\nlet is_pattern_record_item_start = function\n  | Token.DotDotDot | Uident _ | Lident _ | Underscore -> true\n  | _ -> false\n\nlet is_attribute_start = function\n  | Token.At -> true\n  | _ -> false\n\nlet is_jsx_child_start = is_atomic_expr_start\n\nlet is_block_expr_start = function\n  | Token.Assert | At | Await | Backtick | Bang | Codepoint _ | Exception\n  | False | Float _ | For | Forwardslash | Hash | If | Int _ | Lbrace | Lbracket\n  | LessThan | Let | Lident _ | List | Lparen | Minus | MinusDot | Module | Open\n  | Percent | Plus | PlusDot | String _ | Switch | True | Try | Uident _\n  | Underscore | While ->\n    true\n  | _ -> false\n\nlet is_list_element grammar token =\n  match grammar with\n  | ExprList -> token = Token.DotDotDot || is_expr_start token\n  | ListExpr -> token = DotDotDot || is_expr_start token\n  | PatternList -> token = DotDotDot || is_pattern_start token\n  | ParameterList -> is_parameter_start token\n  | StringFieldDeclarations -> is_string_field_decl_start token\n  | FieldDeclarations -> is_field_decl_start token\n  | RecordDecl -> is_record_decl_start token\n  | TypExprList -> is_typ_expr_start token || token = Token.LessThan\n  | TypeParams -> is_type_param_start token\n  | FunctorArgs -> is_functor_arg_start token\n  | ModExprList -> is_mod_expr_start token\n  | TypeParameters -> is_type_parameter_start token\n  | RecordRows -> is_record_row_start token\n  | RecordRowsStringKey -> is_record_row_string_key_start token\n  | ArgumentList -> is_argument_start token\n  | Signature | Specification -> is_signature_item_start token\n  | Structure | Implementation -> is_structure_item_start token\n  | PatternMatching -> is_pattern_match_start token\n  | PatternOcamlList -> is_pattern_ocaml_list_start token\n  | PatternRecord -> is_pattern_record_item_start token\n  | Attribute -> is_attribute_start token\n  | TypeConstraint -> token = Constraint\n  | PackageConstraint -> token = And\n  | ConstructorDeclaration -> token = Bar\n  | JsxAttribute -> is_jsx_attribute_start token\n  | AttributePayload -> token = Lparen\n  | TagNames -> token = Hash\n  | _ -> false\n\nlet is_list_terminator grammar token =\n  match (grammar, token) with\n  | _, Token.Eof\n  | ExprList, (Rparen | Forwardslash | Rbracket)\n  | ListExpr, Rparen\n  | ArgumentList, (Rparen | DotDotDot)\n  | TypExprList, (Rparen | Forwardslash | GreaterThan | Equal)\n  | ModExprList, Rparen\n  | ( (PatternList | PatternOcamlList | PatternRecord),\n      ( Forwardslash | Rbracket | Rparen | EqualGreater (* pattern matching => *)\n      | In (* for expressions *)\n      | Equal (* let {x} = foo *) ) )\n  | ExprBlock, Rbrace\n  | (Structure | Signature), Rbrace\n  | TypeParams, Rparen\n  | ParameterList, (EqualGreater | Lbrace)\n  | JsxAttribute, (Forwardslash | GreaterThan)\n  | StringFieldDeclarations, Rbrace ->\n    true\n  | Attribute, token when token <> At -> true\n  | TypeConstraint, token when token <> Constraint -> true\n  | PackageConstraint, token when token <> And -> true\n  | ConstructorDeclaration, token when token <> Bar -> true\n  | AttributePayload, Rparen -> true\n  | TagNames, Rbracket -> true\n  | _ -> false\n\nlet is_part_of_list grammar token =\n  is_list_element grammar token || is_list_terminator grammar token\n"
  },
  {
    "path": "analysis/vendor/res_syntax/res_io.ml",
    "content": "let read_file ~filename =\n  let chan = open_in_bin filename in\n  let content =\n    try really_input_string chan (in_channel_length chan)\n    with End_of_file -> \"\"\n  in\n  close_in_noerr chan;\n  content\n\nlet write_file ~filename ~contents:txt =\n  let chan = open_out_bin filename in\n  output_string chan txt;\n  close_out chan\n[@@raises Sys_error]\n"
  },
  {
    "path": "analysis/vendor/res_syntax/res_io.mli",
    "content": "(* utilities to read and write to/from files or stdin *)\n\n(* reads the contents of \"filename\" into a string *)\nval read_file : filename:string -> string\n\n(* writes \"content\" into file with name \"filename\" *)\nval write_file : filename:string -> contents:string -> unit\n"
  },
  {
    "path": "analysis/vendor/res_syntax/res_minibuffer.ml",
    "content": "type t = {mutable buffer: bytes; mutable position: int; mutable length: int}\n\nlet create n =\n  let n = if n < 1 then 1 else n in\n  let s = (Bytes.create [@doesNotRaise]) n in\n  {buffer = s; position = 0; length = n}\n\nlet contents b = (Bytes.sub_string [@doesNotRaise]) b.buffer 0 b.position\n\n(* Can't be called directly, don't add to the interface *)\nlet resize_internal b more =\n  let len = b.length in\n  let new_len = ref len in\n  while b.position + more > !new_len do\n    new_len := 2 * !new_len\n  done;\n  if !new_len > Sys.max_string_length then\n    if b.position + more <= Sys.max_string_length then\n      new_len := Sys.max_string_length;\n  let new_buffer = (Bytes.create [@doesNotRaise]) !new_len in\n  (* PR#6148: let's keep using [blit] rather than [unsafe_blit] in\n     this tricky function that is slow anyway. *)\n  Bytes.blit b.buffer 0 new_buffer 0 b.position [@doesNotRaise];\n  b.buffer <- new_buffer;\n  b.length <- !new_len\n\nlet add_char b c =\n  let pos = b.position in\n  if pos >= b.length then resize_internal b 1;\n  Bytes.unsafe_set b.buffer pos c;\n  b.position <- pos + 1\n\nlet add_string b s =\n  let len = String.length s in\n  let new_position = b.position + len in\n  if new_position > b.length then resize_internal b len;\n  Bytes.blit_string s 0 b.buffer b.position len [@doesNotRaise];\n  b.position <- new_position\n\n(* adds newline and trims all preceding whitespace *)\nlet flush_newline b =\n  let position = ref b.position in\n  while Bytes.unsafe_get b.buffer (!position - 1) = ' ' && !position >= 0 do\n    position := !position - 1\n  done;\n  b.position <- !position;\n  add_char b '\\n'\n"
  },
  {
    "path": "analysis/vendor/res_syntax/res_minibuffer.mli",
    "content": "type t\nval add_char : t -> char -> unit\nval add_string : t -> string -> unit\nval contents : t -> string\nval create : int -> t\nval flush_newline : t -> unit\n"
  },
  {
    "path": "analysis/vendor/res_syntax/res_multi_printer.ml",
    "content": "let default_print_width = 100\n\n(* Look at rescript.json (or bsconfig.json) to set Uncurried or Legacy mode if it contains \"uncurried\": false *)\nlet get_uncurried_from_config ~filename =\n  let rec find_config ~dir =\n    let config = Filename.concat dir \"rescript.json\" in\n    if Sys.file_exists config then Some (Res_io.read_file ~filename:config)\n    else\n      let config = Filename.concat dir \"bsconfig.json\" in\n      if Sys.file_exists config then Some (Res_io.read_file ~filename:config)\n      else\n        let parent = Filename.dirname dir in\n        if parent = dir then None else find_config ~dir:parent\n  in\n  let rec find_from_node_modules ~dir =\n    let parent = Filename.dirname dir in\n    if Filename.basename dir = \"node_modules\" then\n      let config = Filename.concat parent \"rescript.json\" in\n      if Sys.file_exists config then Some (Res_io.read_file ~filename:config)\n      else\n        let config = Filename.concat parent \"bsconfig.json\" in\n        if Sys.file_exists config then Some (Res_io.read_file ~filename:config)\n        else None\n    else if parent = dir then None\n    else find_from_node_modules ~dir:parent\n  in\n  let dir =\n    if Filename.is_relative filename then\n      Filename.dirname (Filename.concat (Sys.getcwd ()) filename)\n    else Filename.dirname filename\n  in\n  let config () =\n    match find_config ~dir with\n    | None ->\n      (* The editor calls format on a temporary file. So bsconfig can't be found.\n         This looks outside the node_modules containing the bsc binary *)\n      let dir = (Filename.dirname Sys.argv.(0) [@doesNotRaise]) in\n      find_from_node_modules ~dir\n    | x -> x\n  in\n  match config () with\n  | exception _ -> ()\n  | None -> ()\n  | Some config ->\n    let lines = config |> String.split_on_char '\\n' in\n    let is_legacy_uncurried =\n      lines\n      |> List.exists (fun line ->\n             let is_uncurried_option = ref false in\n             let is_option_falsy = ref false in\n             let words = line |> String.split_on_char ' ' in\n             words\n             |> List.iter (fun word ->\n                    match word with\n                    | \"\\\"uncurried\\\"\" | \"\\\"uncurried\\\":\" ->\n                      is_uncurried_option := true\n                    | \"\\\"uncurried\\\":false\" | \"\\\"uncurried\\\":false,\" ->\n                      is_uncurried_option := true;\n                      is_option_falsy := true\n                    | \"false\" | \":false\" | \"false,\" | \":false,\" ->\n                      is_option_falsy := true\n                    | _ -> ());\n             !is_uncurried_option && !is_option_falsy)\n    in\n    if not is_legacy_uncurried then Config.uncurried := Uncurried\n\n(* print res files to res syntax *)\nlet print_res ~ignore_parse_errors ~is_interface ~filename =\n  get_uncurried_from_config ~filename;\n  if is_interface then (\n    let parse_result =\n      Res_driver.parsing_engine.parse_interface ~for_printer:true ~filename\n    in\n    if parse_result.invalid then (\n      Res_diagnostics.print_report parse_result.diagnostics parse_result.source;\n      if not ignore_parse_errors then exit 1);\n    Res_printer.print_interface ~width:default_print_width\n      ~comments:parse_result.comments parse_result.parsetree)\n  else\n    let parse_result =\n      Res_driver.parsing_engine.parse_implementation ~for_printer:true ~filename\n    in\n    if parse_result.invalid then (\n      Res_diagnostics.print_report parse_result.diagnostics parse_result.source;\n      if not ignore_parse_errors then exit 1);\n    Res_printer.print_implementation ~width:default_print_width\n      ~comments:parse_result.comments parse_result.parsetree\n[@@raises exit]\n\n(* print ocaml files to res syntax *)\nlet print_ml ~is_interface ~filename =\n  if is_interface then\n    let parse_result =\n      Res_driver_ml_parser.parsing_engine.parse_interface ~for_printer:true\n        ~filename\n    in\n    Res_printer.print_interface ~width:default_print_width\n      ~comments:parse_result.comments parse_result.parsetree\n  else\n    let parse_result =\n      Res_driver_ml_parser.parsing_engine.parse_implementation ~for_printer:true\n        ~filename\n    in\n    Res_printer.print_implementation ~width:default_print_width\n      ~comments:parse_result.comments parse_result.parsetree\n\n(* print the given file named input to from \"language\" to res, general interface exposed by the compiler *)\nlet print ?(ignore_parse_errors = false) language ~input =\n  let is_interface =\n    let len = String.length input in\n    len > 0 && String.unsafe_get input (len - 1) = 'i'\n  in\n  match language with\n  | `res -> print_res ~ignore_parse_errors ~is_interface ~filename:input\n  | `ml -> print_ml ~is_interface ~filename:input\n[@@raises exit]\n\n(* suppress unused optional arg *)\nlet _ = fun s -> print ~ignore_parse_errors:false s [@@raises exit]\n"
  },
  {
    "path": "analysis/vendor/res_syntax/res_multi_printer.mli",
    "content": "(* Interface to print source code from different languages to res.\n * Takes a filename called \"input\" and returns the corresponding formatted res syntax *)\nval print : ?ignore_parse_errors:bool -> [`ml | `res] -> input:string -> string\n"
  },
  {
    "path": "analysis/vendor/res_syntax/res_outcome_printer.ml",
    "content": "(* For the curious: the outcome printer is a printer to print data\n * from the outcometree.mli file in the ocaml compiler.\n * The outcome tree is used by:\n *  - ocaml's toplevel/repl, print results/errors\n *  - super errors, print nice errors\n *  - editor tooling, e.g. show type on hover\n *\n * In general it represent messages to show results or errors to the user. *)\n\nmodule Doc = Res_doc\nmodule Printer = Res_printer\n\n(* ReScript doesn't have parenthesized identifiers.\n * We don't support custom operators. *)\nlet parenthesized_ident _name = true\n\n(* TODO: better allocation strategy for the buffer *)\nlet escape_string_contents s =\n  let len = String.length s in\n  let b = Buffer.create len in\n  for i = 0 to len - 1 do\n    let c = (String.get [@doesNotRaise]) s i in\n    if c = '\\008' then (\n      Buffer.add_char b '\\\\';\n      Buffer.add_char b 'b')\n    else if c = '\\009' then (\n      Buffer.add_char b '\\\\';\n      Buffer.add_char b 't')\n    else if c = '\\010' then (\n      Buffer.add_char b '\\\\';\n      Buffer.add_char b 'n')\n    else if c = '\\013' then (\n      Buffer.add_char b '\\\\';\n      Buffer.add_char b 'r')\n    else if c = '\\034' then (\n      Buffer.add_char b '\\\\';\n      Buffer.add_char b '\"')\n    else if c = '\\092' then (\n      Buffer.add_char b '\\\\';\n      Buffer.add_char b '\\\\')\n    else Buffer.add_char b c\n  done;\n  Buffer.contents b\n\n(* let rec print_ident fmt ident = match ident with\n   | Outcometree.Oide_ident s -> Format.pp_print_string fmt s\n   | Oide_dot (id, s) ->\n     print_ident fmt id;\n     Format.pp_print_char fmt '.';\n     Format.pp_print_string fmt s\n   | Oide_apply (id1, id2) ->\n     print_ident fmt id1;\n     Format.pp_print_char fmt '(';\n     print_ident fmt id2;\n     Format.pp_print_char fmt ')' *)\n\nlet rec print_out_ident_doc ?(allow_uident = true)\n    (ident : Outcometree.out_ident) =\n  match ident with\n  | Oide_ident s -> Printer.print_ident_like ~allow_uident s\n  | Oide_dot (ident, s) ->\n    Doc.concat [print_out_ident_doc ident; Doc.dot; Doc.text s]\n  | Oide_apply (call, arg) ->\n    Doc.concat\n      [\n        print_out_ident_doc call; Doc.lparen; print_out_ident_doc arg; Doc.rparen;\n      ]\n\nlet print_out_attribute_doc (out_attribute : Outcometree.out_attribute) =\n  Doc.concat [Doc.text \"@\"; Doc.text out_attribute.oattr_name]\n\nlet print_out_attributes_doc (attrs : Outcometree.out_attribute list) =\n  match attrs with\n  | [] -> Doc.nil\n  | attrs ->\n    Doc.concat\n      [\n        Doc.group\n          (Doc.join ~sep:Doc.line (List.map print_out_attribute_doc attrs));\n        Doc.line;\n      ]\n\nlet rec collect_arrow_args (out_type : Outcometree.out_type) args =\n  match out_type with\n  | Otyp_arrow (label, arg_type, return_type) ->\n    let arg = (label, arg_type) in\n    collect_arrow_args return_type (arg :: args)\n  | _ as return_type -> (List.rev args, return_type)\n\nlet rec collect_functor_args (out_module_type : Outcometree.out_module_type)\n    args =\n  match out_module_type with\n  | Omty_functor (lbl, opt_mod_type, return_mod_type) ->\n    let arg = (lbl, opt_mod_type) in\n    collect_functor_args return_mod_type (arg :: args)\n  | _ -> (List.rev args, out_module_type)\n\nlet rec print_out_type_doc (out_type : Outcometree.out_type) =\n  match out_type with\n  | Otyp_abstract | Otyp_open -> Doc.nil\n  | Otyp_variant (non_gen, out_variant, closed, labels) ->\n    (* bool * out_variant * bool * (string list) option *)\n    let opening =\n      match (closed, labels) with\n      | true, None -> (* [#A | #B] *) Doc.soft_line\n      | false, None ->\n        (* [> #A | #B] *)\n        Doc.concat [Doc.greater_than; Doc.line]\n      | true, Some [] ->\n        (* [< #A | #B] *)\n        Doc.concat [Doc.less_than; Doc.line]\n      | true, Some _ ->\n        (* [< #A | #B > #X #Y ] *)\n        Doc.concat [Doc.less_than; Doc.line]\n      | false, Some _ ->\n        (* impossible!? ocaml seems to print ?, see oprint.ml in 4.06 *)\n        Doc.concat [Doc.text \"?\"; Doc.line]\n    in\n    Doc.group\n      (Doc.concat\n         [\n           (if non_gen then Doc.text \"_\" else Doc.nil);\n           Doc.lbracket;\n           Doc.indent (Doc.concat [opening; print_out_variant out_variant]);\n           (match labels with\n           | None | Some [] -> Doc.nil\n           | Some tags ->\n             Doc.group\n               (Doc.concat\n                  [\n                    Doc.space;\n                    Doc.join ~sep:Doc.space\n                      (List.map\n                         (fun lbl ->\n                           Printer.print_ident_like ~allow_uident:true lbl)\n                         tags);\n                  ]));\n           Doc.soft_line;\n           Doc.rbracket;\n         ])\n  | Otyp_alias (typ, alias_txt) ->\n    Doc.concat\n      [\n        Doc.lparen;\n        print_out_type_doc typ;\n        Doc.text \" as '\";\n        Doc.text alias_txt;\n        Doc.rparen;\n      ]\n  | Otyp_constr (Oide_dot (Oide_dot (Oide_ident \"Js\", \"Fn\"), \"arity0\"), [typ])\n    ->\n    (* Compatibility with compiler up to v10.x *)\n    Doc.concat [Doc.text \"(. ()) => \"; print_out_type_doc typ]\n  | Otyp_constr\n      ( Oide_dot (Oide_dot (Oide_ident \"Js\", \"Fn\"), _),\n        [(Otyp_arrow _ as arrow_type)] ) ->\n    (* Compatibility with compiler up to v10.x *)\n    print_out_arrow_type ~uncurried:true arrow_type\n  | Otyp_constr (Oide_ident \"function$\", [(Otyp_arrow _ as arrow_type); _arity])\n    ->\n    (* function$<(int, int) => int, [#2]> -> (. int, int) => int *)\n    print_out_arrow_type ~uncurried:true arrow_type\n  | Otyp_constr (Oide_ident \"function$\", [Otyp_var _; _arity]) ->\n    (* function$<'a, arity> -> _ => _ *)\n    print_out_type_doc (Otyp_stuff \"_ => _\")\n  | Otyp_constr (out_ident, []) ->\n    print_out_ident_doc ~allow_uident:false out_ident\n  | Otyp_manifest (typ1, typ2) ->\n    Doc.concat\n      [print_out_type_doc typ1; Doc.text \" = \"; print_out_type_doc typ2]\n  | Otyp_record record -> print_record_declaration_doc ~inline:true record\n  | Otyp_stuff txt -> Doc.text txt\n  | Otyp_var (ng, s) ->\n    Doc.concat [Doc.text (\"'\" ^ if ng then \"_\" else \"\"); Doc.text s]\n  | Otyp_object (fields, rest) -> print_object_fields fields rest\n  | Otyp_class _ -> Doc.nil\n  | Otyp_attribute (typ, attribute) ->\n    Doc.group\n      (Doc.concat\n         [print_out_attribute_doc attribute; Doc.line; print_out_type_doc typ])\n  (* example: Red | Blue | Green | CustomColour(float, float, float) *)\n  | Otyp_sum constructors -> print_out_constructors_doc constructors\n  (* example: {\"name\": string, \"age\": int} *)\n  | Otyp_constr (Oide_dot (Oide_ident \"Js\", \"t\"), [Otyp_object (fields, rest)])\n    ->\n    print_object_fields fields rest\n  (* example: node<root, 'value> *)\n  | Otyp_constr (out_ident, args) ->\n    let args_doc =\n      match args with\n      | [] -> Doc.nil\n      | args ->\n        Doc.concat\n          [\n            Doc.less_than;\n            Doc.indent\n              (Doc.concat\n                 [\n                   Doc.soft_line;\n                   Doc.join\n                     ~sep:(Doc.concat [Doc.comma; Doc.line])\n                     (List.map print_out_type_doc args);\n                 ]);\n            Doc.trailing_comma;\n            Doc.soft_line;\n            Doc.greater_than;\n          ]\n    in\n    Doc.group (Doc.concat [print_out_ident_doc out_ident; args_doc])\n  | Otyp_tuple tuple_args ->\n    Doc.group\n      (Doc.concat\n         [\n           Doc.lparen;\n           Doc.indent\n             (Doc.concat\n                [\n                  Doc.soft_line;\n                  Doc.join\n                    ~sep:(Doc.concat [Doc.comma; Doc.line])\n                    (List.map print_out_type_doc tuple_args);\n                ]);\n           Doc.trailing_comma;\n           Doc.soft_line;\n           Doc.rparen;\n         ])\n  | Otyp_poly (vars, out_type) ->\n    Doc.group\n      (Doc.concat\n         [\n           Doc.join ~sep:Doc.space\n             (List.map (fun var -> Doc.text (\"'\" ^ var)) vars);\n           Doc.dot;\n           Doc.space;\n           print_out_type_doc out_type;\n         ])\n  | Otyp_arrow _ as typ -> print_out_arrow_type ~uncurried:false typ\n  | Otyp_module (mod_name, string_list, out_types) ->\n    let package_type_doc =\n      match (string_list, out_types) with\n      | [], [] -> Doc.nil\n      | labels, types ->\n        let i = ref 0 in\n        let package =\n          Doc.join ~sep:Doc.line\n            ((List.map2 [@doesNotRaise])\n               (fun lbl typ ->\n                 Doc.concat\n                   [\n                     Doc.text\n                       (if i.contents > 0 then \"and type \" else \"with type \");\n                     Doc.text lbl;\n                     Doc.text \" = \";\n                     print_out_type_doc typ;\n                   ])\n               labels types)\n        in\n        Doc.indent (Doc.concat [Doc.line; package])\n    in\n    Doc.concat\n      [\n        Doc.text \"module\";\n        Doc.lparen;\n        Doc.text mod_name;\n        package_type_doc;\n        Doc.rparen;\n      ]\n\nand print_out_arrow_type ~uncurried typ =\n  let uncurried = Res_uncurried.get_dotted ~uncurried !Config.uncurried in\n  let typ_args, typ = collect_arrow_args typ [] in\n  let args =\n    Doc.join\n      ~sep:(Doc.concat [Doc.comma; Doc.line])\n      (List.map\n         (fun (lbl, typ) ->\n           let lbl_len = String.length lbl in\n           if lbl_len = 0 then print_out_type_doc typ\n           else\n             let lbl, optional_indicator =\n               (* the ocaml compiler hardcodes the optional label inside the string of the label in printtyp.ml *)\n               match String.unsafe_get lbl 0 with\n               | '?' ->\n                 ( (String.sub [@doesNotRaise]) lbl 1 (lbl_len - 1),\n                   Doc.text \"=?\" )\n               | _ -> (lbl, Doc.nil)\n             in\n             Doc.group\n               (Doc.concat\n                  [\n                    Doc.text (\"~\" ^ lbl ^ \": \");\n                    print_out_type_doc typ;\n                    optional_indicator;\n                  ]))\n         typ_args)\n  in\n  let args_doc =\n    let needs_parens =\n      match typ_args with\n      | _ when uncurried -> true\n      | [\n       ( _,\n         ( Otyp_tuple _ | Otyp_arrow _\n         | Otyp_constr (Oide_ident \"function$\", [Otyp_arrow _; _]) ) );\n      ] ->\n        true\n      (* single argument should not be wrapped *)\n      | [(\"\", _)] -> false\n      | _ -> true\n    in\n    if needs_parens then\n      Doc.group\n        (Doc.concat\n           [\n             (if uncurried then Doc.text \"(. \" else Doc.lparen);\n             Doc.indent (Doc.concat [Doc.soft_line; args]);\n             Doc.trailing_comma;\n             Doc.soft_line;\n             Doc.rparen;\n           ])\n    else args\n  in\n  Doc.concat [args_doc; Doc.text \" => \"; print_out_type_doc typ]\n\nand print_out_variant variant =\n  match variant with\n  | Ovar_fields fields ->\n    (* (string * bool * out_type list) list *)\n    Doc.join ~sep:Doc.line\n      ((*\n        * [< | #T([< u2]) & ([< u2]) & ([< u1])]  --> no ampersand\n        * [< | #S & ([< u2]) & ([< u2]) & ([< u1])] --> ampersand\n        *)\n       List.mapi\n         (fun i (name, ampersand, types) ->\n           let needs_parens =\n             match types with\n             | [Outcometree.Otyp_tuple _] -> false\n             | _ -> true\n           in\n           Doc.concat\n             [\n               (if i > 0 then Doc.text \"| \"\n                else Doc.if_breaks (Doc.text \"| \") Doc.nil);\n               Doc.group\n                 (Doc.concat\n                    [\n                      Doc.text \"#\";\n                      Printer.print_poly_var_ident name;\n                      (match types with\n                      | [] -> Doc.nil\n                      | types ->\n                        Doc.concat\n                          [\n                            (if ampersand then Doc.text \" & \" else Doc.nil);\n                            Doc.indent\n                              (Doc.concat\n                                 [\n                                   Doc.join\n                                     ~sep:(Doc.concat [Doc.text \" &\"; Doc.line])\n                                     (List.map\n                                        (fun typ ->\n                                          let out_type_doc =\n                                            print_out_type_doc typ\n                                          in\n                                          if needs_parens then\n                                            Doc.concat\n                                              [\n                                                Doc.lparen;\n                                                out_type_doc;\n                                                Doc.rparen;\n                                              ]\n                                          else out_type_doc)\n                                        types);\n                                 ]);\n                          ]);\n                    ]);\n             ])\n         fields)\n  | Ovar_typ typ -> print_out_type_doc typ\n\nand print_object_fields fields rest =\n  let dots =\n    match rest with\n    | Some non_gen -> Doc.text ((if non_gen then \"_\" else \"\") ^ \"..\")\n    | None -> if fields = [] then Doc.dot else Doc.nil\n  in\n  Doc.group\n    (Doc.concat\n       [\n         Doc.lbrace;\n         dots;\n         Doc.indent\n           (Doc.concat\n              [\n                Doc.soft_line;\n                Doc.join\n                  ~sep:(Doc.concat [Doc.comma; Doc.line])\n                  (List.map\n                     (fun (lbl, out_type) ->\n                       Doc.group\n                         (Doc.concat\n                            [\n                              Doc.text (\"\\\"\" ^ lbl ^ \"\\\": \");\n                              print_out_type_doc out_type;\n                            ]))\n                     fields);\n              ]);\n         Doc.trailing_comma;\n         Doc.soft_line;\n         Doc.rbrace;\n       ])\n\nand print_out_constructors_doc constructors =\n  Doc.group\n    (Doc.indent\n       (Doc.concat\n          [\n            Doc.soft_line;\n            Doc.join ~sep:Doc.line\n              (List.mapi\n                 (fun i constructor ->\n                   Doc.concat\n                     [\n                       (if i > 0 then Doc.text \"| \"\n                        else Doc.if_breaks (Doc.text \"| \") Doc.nil);\n                       print_out_constructor_doc constructor;\n                     ])\n                 constructors);\n          ]))\n\nand print_out_constructor_doc (name, args, gadt) =\n  let gadt_doc =\n    match gadt with\n    | Some out_type -> Doc.concat [Doc.text \": \"; print_out_type_doc out_type]\n    | None -> Doc.nil\n  in\n  let args_doc =\n    match args with\n    | [] -> Doc.nil\n    | [Otyp_record record] ->\n      (* inline records\n       *   | Root({\n       *      mutable value: 'value,\n       *      mutable updatedTime: float,\n       *    })\n       *)\n      Doc.concat\n        [\n          Doc.lparen;\n          Doc.indent (print_record_declaration_doc ~inline:true record);\n          Doc.rparen;\n        ]\n    | _types ->\n      Doc.indent\n        (Doc.concat\n           [\n             Doc.lparen;\n             Doc.indent\n               (Doc.concat\n                  [\n                    Doc.soft_line;\n                    Doc.join\n                      ~sep:(Doc.concat [Doc.comma; Doc.line])\n                      (List.map print_out_type_doc args);\n                  ]);\n             Doc.trailing_comma;\n             Doc.soft_line;\n             Doc.rparen;\n           ])\n  in\n  Doc.group (Doc.concat [Doc.text name; args_doc; gadt_doc])\n\nand print_record_decl_row_doc (name, mut, opt, arg) =\n  Doc.group\n    (Doc.concat\n       [\n         (if mut then Doc.text \"mutable \" else Doc.nil);\n         Printer.print_ident_like ~allow_uident:false name;\n         (if opt then Doc.text \"?\" else Doc.nil);\n         Doc.text \": \";\n         print_out_type_doc arg;\n       ])\n\nand print_record_declaration_doc ~inline rows =\n  let content =\n    Doc.concat\n      [\n        Doc.lbrace;\n        Doc.indent\n          (Doc.concat\n             [\n               Doc.soft_line;\n               Doc.join\n                 ~sep:(Doc.concat [Doc.comma; Doc.line])\n                 (List.map print_record_decl_row_doc rows);\n             ]);\n        Doc.trailing_comma;\n        Doc.soft_line;\n        Doc.rbrace;\n      ]\n  in\n  if not inline then Doc.group content else content\n\nlet print_out_type fmt out_type =\n  Format.pp_print_string fmt\n    (Doc.to_string ~width:80 (print_out_type_doc out_type))\n\nlet print_type_parameter_doc (typ, (co, cn)) =\n  Doc.concat\n    [\n      (if not cn then Doc.text \"+\" else if not co then Doc.text \"-\" else Doc.nil);\n      (if typ = \"_\" then Doc.text \"_\" else Doc.text (\"'\" ^ typ));\n    ]\n\nlet rec print_out_sig_item_doc ?(print_name_as_is = false)\n    (out_sig_item : Outcometree.out_sig_item) =\n  match out_sig_item with\n  | Osig_class _ | Osig_class_type _ -> Doc.nil\n  | Osig_ellipsis -> Doc.dotdotdot\n  | Osig_value value_decl ->\n    Doc.group\n      (Doc.concat\n         [\n           print_out_attributes_doc value_decl.oval_attributes;\n           Doc.text\n             (match value_decl.oval_prims with\n             | [] -> \"let \"\n             | _ -> \"external \");\n           Doc.text value_decl.oval_name;\n           Doc.text \":\";\n           Doc.space;\n           print_out_type_doc value_decl.oval_type;\n           (match value_decl.oval_prims with\n           | [] -> Doc.nil\n           | primitives ->\n             Doc.indent\n               (Doc.concat\n                  [\n                    Doc.text \" =\";\n                    Doc.line;\n                    Doc.group\n                      (Doc.join ~sep:Doc.line\n                         (List.map\n                            (fun prim ->\n                              let prim =\n                                if\n                                  prim <> \"\"\n                                  && (prim.[0] [@doesNotRaise]) = '\\132'\n                                then \"#rescript-external\"\n                                else prim\n                              in\n                              (* not display those garbage '\\132' is a magic number for marshal *)\n                              Doc.text (\"\\\"\" ^ prim ^ \"\\\"\"))\n                            primitives));\n                  ]));\n         ])\n  | Osig_typext (out_extension_constructor, _outExtStatus) ->\n    print_out_extension_constructor_doc out_extension_constructor\n  | Osig_modtype (mod_name, Omty_signature []) ->\n    Doc.concat [Doc.text \"module type \"; Doc.text mod_name]\n  | Osig_modtype (mod_name, out_module_type) ->\n    Doc.group\n      (Doc.concat\n         [\n           Doc.text \"module type \";\n           Doc.text mod_name;\n           Doc.text \" = \";\n           print_out_module_type_doc out_module_type;\n         ])\n  | Osig_module (mod_name, Omty_alias ident, _) ->\n    Doc.group\n      (Doc.concat\n         [\n           Doc.text \"module \";\n           Doc.text mod_name;\n           Doc.text \" =\";\n           Doc.line;\n           print_out_ident_doc ident;\n         ])\n  | Osig_module (mod_name, out_mod_type, out_rec_status) ->\n    Doc.group\n      (Doc.concat\n         [\n           Doc.text\n             (match out_rec_status with\n             | Orec_not -> \"module \"\n             | Orec_first -> \"module rec \"\n             | Orec_next -> \"and \");\n           Doc.text mod_name;\n           Doc.text \": \";\n           print_out_module_type_doc out_mod_type;\n         ])\n  | Osig_type (out_type_decl, out_rec_status) ->\n    (* TODO: manifest ? *)\n    let attrs =\n      match (out_type_decl.otype_immediate, out_type_decl.otype_unboxed) with\n      | false, false -> Doc.nil\n      | true, false -> Doc.concat [Doc.text \"@immediate\"; Doc.line]\n      | false, true -> Doc.concat [Doc.text \"@unboxed\"; Doc.line]\n      | true, true -> Doc.concat [Doc.text \"@immediate @unboxed\"; Doc.line]\n    in\n    let kw =\n      Doc.text\n        (match out_rec_status with\n        | Orec_not -> \"type \"\n        | Orec_first -> \"type rec \"\n        | Orec_next -> \"and \")\n    in\n    let type_params =\n      match out_type_decl.otype_params with\n      | [] -> Doc.nil\n      | _params ->\n        Doc.group\n          (Doc.concat\n             [\n               Doc.less_than;\n               Doc.indent\n                 (Doc.concat\n                    [\n                      Doc.soft_line;\n                      Doc.join\n                        ~sep:(Doc.concat [Doc.comma; Doc.line])\n                        (List.map print_type_parameter_doc\n                           out_type_decl.otype_params);\n                    ]);\n               Doc.trailing_comma;\n               Doc.soft_line;\n               Doc.greater_than;\n             ])\n    in\n    let private_doc =\n      match out_type_decl.otype_private with\n      | Asttypes.Private -> Doc.text \"private \"\n      | Public -> Doc.nil\n    in\n    let kind =\n      match out_type_decl.otype_type with\n      | Otyp_open -> Doc.concat [Doc.text \" = \"; private_doc; Doc.text \"..\"]\n      | Otyp_abstract -> Doc.nil\n      | Otyp_record record ->\n        Doc.concat\n          [\n            Doc.text \" = \";\n            private_doc;\n            print_record_declaration_doc ~inline:false record;\n          ]\n      | typ -> Doc.concat [Doc.text \" = \"; print_out_type_doc typ]\n    in\n    let constraints =\n      match out_type_decl.otype_cstrs with\n      | [] -> Doc.nil\n      | _ ->\n        Doc.group\n          (Doc.indent\n             (Doc.concat\n                [\n                  Doc.hard_line;\n                  Doc.join ~sep:Doc.line\n                    (List.map\n                       (fun (typ1, typ2) ->\n                         Doc.group\n                           (Doc.concat\n                              [\n                                Doc.text \"constraint \";\n                                print_out_type_doc typ1;\n                                Doc.text \" =\";\n                                Doc.space;\n                                print_out_type_doc typ2;\n                              ]))\n                       out_type_decl.otype_cstrs);\n                ]))\n    in\n    Doc.group\n      (Doc.concat\n         [\n           attrs;\n           Doc.group\n             (Doc.concat\n                [\n                  attrs;\n                  kw;\n                  (if print_name_as_is then Doc.text out_type_decl.otype_name\n                   else\n                     Printer.print_ident_like ~allow_uident:false\n                       out_type_decl.otype_name);\n                  type_params;\n                  kind;\n                ]);\n           constraints;\n         ])\n\nand print_out_module_type_doc (out_mod_type : Outcometree.out_module_type) =\n  match out_mod_type with\n  | Omty_abstract -> Doc.nil\n  | Omty_ident ident -> print_out_ident_doc ident\n  (* example: module Increment = (M: X_int) => X_int *)\n  | Omty_functor _ ->\n    let args, return_mod_type = collect_functor_args out_mod_type [] in\n    let args_doc =\n      match args with\n      | [(_, None)] -> Doc.text \"()\"\n      | args ->\n        Doc.group\n          (Doc.concat\n             [\n               Doc.lparen;\n               Doc.indent\n                 (Doc.concat\n                    [\n                      Doc.soft_line;\n                      Doc.join\n                        ~sep:(Doc.concat [Doc.comma; Doc.line])\n                        (List.map\n                           (fun (lbl, opt_mod_type) ->\n                             Doc.group\n                               (Doc.concat\n                                  [\n                                    Doc.text lbl;\n                                    (match opt_mod_type with\n                                    | None -> Doc.nil\n                                    | Some mod_type ->\n                                      Doc.concat\n                                        [\n                                          Doc.text \": \";\n                                          print_out_module_type_doc mod_type;\n                                        ]);\n                                  ]))\n                           args);\n                    ]);\n               Doc.trailing_comma;\n               Doc.soft_line;\n               Doc.rparen;\n             ])\n    in\n    Doc.group\n      (Doc.concat\n         [args_doc; Doc.text \" => \"; print_out_module_type_doc return_mod_type])\n  | Omty_signature [] -> Doc.nil\n  | Omty_signature signature ->\n    Doc.breakable_group ~force_break:true\n      (Doc.concat\n         [\n           Doc.lbrace;\n           Doc.indent (Doc.concat [Doc.line; print_out_signature_doc signature]);\n           Doc.soft_line;\n           Doc.rbrace;\n         ])\n  | Omty_alias _ident -> Doc.nil\n\nand print_out_signature_doc (signature : Outcometree.out_sig_item list) =\n  let rec loop signature acc =\n    match signature with\n    | [] -> List.rev acc\n    | Outcometree.Osig_typext (ext, Oext_first) :: items ->\n      (* Gather together the extension constructors *)\n      let rec gather_extensions acc items =\n        match items with\n        | Outcometree.Osig_typext (ext, Oext_next) :: items ->\n          gather_extensions\n            ((ext.oext_name, ext.oext_args, ext.oext_ret_type) :: acc)\n            items\n        | _ -> (List.rev acc, items)\n      in\n      let exts, items =\n        gather_extensions\n          [(ext.oext_name, ext.oext_args, ext.oext_ret_type)]\n          items\n      in\n      let te =\n        {\n          Outcometree.otyext_name = ext.oext_type_name;\n          otyext_params = ext.oext_type_params;\n          otyext_constructors = exts;\n          otyext_private = ext.oext_private;\n        }\n      in\n      let doc = print_out_type_extension_doc te in\n      loop items (doc :: acc)\n    | item :: items ->\n      let doc = print_out_sig_item_doc ~print_name_as_is:false item in\n      loop items (doc :: acc)\n  in\n  match loop signature [] with\n  | [doc] -> doc\n  | docs -> Doc.breakable_group ~force_break:true (Doc.join ~sep:Doc.line docs)\n\nand print_out_extension_constructor_doc\n    (out_ext : Outcometree.out_extension_constructor) =\n  let type_params =\n    match out_ext.oext_type_params with\n    | [] -> Doc.nil\n    | params ->\n      Doc.group\n        (Doc.concat\n           [\n             Doc.less_than;\n             Doc.indent\n               (Doc.concat\n                  [\n                    Doc.soft_line;\n                    Doc.join\n                      ~sep:(Doc.concat [Doc.comma; Doc.line])\n                      (List.map\n                         (fun ty ->\n                           Doc.text (if ty = \"_\" then ty else \"'\" ^ ty))\n                         params);\n                  ]);\n             Doc.soft_line;\n             Doc.greater_than;\n           ])\n  in\n\n  Doc.group\n    (Doc.concat\n       [\n         Doc.text \"type \";\n         Printer.print_ident_like ~allow_uident:false out_ext.oext_type_name;\n         type_params;\n         Doc.text \" += \";\n         Doc.line;\n         (if out_ext.oext_private = Asttypes.Private then Doc.text \"private \"\n          else Doc.nil);\n         print_out_constructor_doc\n           (out_ext.oext_name, out_ext.oext_args, out_ext.oext_ret_type);\n       ])\n\nand print_out_type_extension_doc\n    (type_extension : Outcometree.out_type_extension) =\n  let type_params =\n    match type_extension.otyext_params with\n    | [] -> Doc.nil\n    | params ->\n      Doc.group\n        (Doc.concat\n           [\n             Doc.less_than;\n             Doc.indent\n               (Doc.concat\n                  [\n                    Doc.soft_line;\n                    Doc.join\n                      ~sep:(Doc.concat [Doc.comma; Doc.line])\n                      (List.map\n                         (fun ty ->\n                           Doc.text (if ty = \"_\" then ty else \"'\" ^ ty))\n                         params);\n                  ]);\n             Doc.soft_line;\n             Doc.greater_than;\n           ])\n  in\n\n  Doc.group\n    (Doc.concat\n       [\n         Doc.text \"type \";\n         Printer.print_ident_like ~allow_uident:false type_extension.otyext_name;\n         type_params;\n         Doc.text \" += \";\n         (if type_extension.otyext_private = Asttypes.Private then\n            Doc.text \"private \"\n          else Doc.nil);\n         print_out_constructors_doc type_extension.otyext_constructors;\n       ])\n\nlet print_out_sig_item fmt out_sig_item =\n  Format.pp_print_string fmt\n    (Doc.to_string ~width:80 (print_out_sig_item_doc out_sig_item))\n\nlet print_out_signature fmt signature =\n  Format.pp_print_string fmt\n    (Doc.to_string ~width:80 (print_out_signature_doc signature))\n\nlet valid_float_lexeme s =\n  let l = String.length s in\n  let rec loop i =\n    if i >= l then s ^ \".\"\n    else\n      match s.[i] [@doesNotRaise] with\n      | '0' .. '9' | '-' -> loop (i + 1)\n      | _ -> s\n  in\n  loop 0\n\nlet float_repres f =\n  match classify_float f with\n  | FP_nan -> \"nan\"\n  | FP_infinite -> if f < 0.0 then \"neg_infinity\" else \"infinity\"\n  | _ ->\n    let float_val =\n      let s1 = Printf.sprintf \"%.12g\" f in\n      if f = (float_of_string [@doesNotRaise]) s1 then s1\n      else\n        let s2 = Printf.sprintf \"%.15g\" f in\n        if f = (float_of_string [@doesNotRaise]) s2 then s2\n        else Printf.sprintf \"%.18g\" f\n    in\n    valid_float_lexeme float_val\n\nlet rec print_out_value_doc (out_value : Outcometree.out_value) =\n  match out_value with\n  | Oval_array out_values ->\n    Doc.group\n      (Doc.concat\n         [\n           Doc.lbracket;\n           Doc.indent\n             (Doc.concat\n                [\n                  Doc.soft_line;\n                  Doc.join\n                    ~sep:(Doc.concat [Doc.comma; Doc.line])\n                    (List.map print_out_value_doc out_values);\n                ]);\n           Doc.trailing_comma;\n           Doc.soft_line;\n           Doc.rbracket;\n         ])\n  | Oval_char c -> Doc.text (\"'\" ^ Char.escaped c ^ \"'\")\n  | Oval_constr (out_ident, out_values) ->\n    Doc.group\n      (Doc.concat\n         [\n           print_out_ident_doc out_ident;\n           Doc.lparen;\n           Doc.indent\n             (Doc.concat\n                [\n                  Doc.soft_line;\n                  Doc.join\n                    ~sep:(Doc.concat [Doc.comma; Doc.line])\n                    (List.map print_out_value_doc out_values);\n                ]);\n           Doc.trailing_comma;\n           Doc.soft_line;\n           Doc.rparen;\n         ])\n  | Oval_ellipsis -> Doc.text \"...\"\n  | Oval_int i -> Doc.text (Format.sprintf \"%i\" i)\n  | Oval_int32 i -> Doc.text (Format.sprintf \"%lil\" i)\n  | Oval_int64 i -> Doc.text (Format.sprintf \"%LiL\" i)\n  | Oval_nativeint i -> Doc.text (Format.sprintf \"%nin\" i)\n  | Oval_float f -> Doc.text (float_repres f)\n  | Oval_list out_values ->\n    Doc.group\n      (Doc.concat\n         [\n           Doc.text \"list[\";\n           Doc.indent\n             (Doc.concat\n                [\n                  Doc.soft_line;\n                  Doc.join\n                    ~sep:(Doc.concat [Doc.comma; Doc.line])\n                    (List.map print_out_value_doc out_values);\n                ]);\n           Doc.trailing_comma;\n           Doc.soft_line;\n           Doc.rbracket;\n         ])\n  | Oval_printer fn ->\n    let fmt = Format.str_formatter in\n    fn fmt;\n    let str = Format.flush_str_formatter () in\n    Doc.text str\n  | Oval_record rows ->\n    Doc.group\n      (Doc.concat\n         [\n           Doc.lparen;\n           Doc.indent\n             (Doc.concat\n                [\n                  Doc.soft_line;\n                  Doc.join\n                    ~sep:(Doc.concat [Doc.comma; Doc.line])\n                    (List.map\n                       (fun (out_ident, out_value) ->\n                         Doc.group\n                           (Doc.concat\n                              [\n                                print_out_ident_doc out_ident;\n                                Doc.text \": \";\n                                print_out_value_doc out_value;\n                              ]))\n                       rows);\n                ]);\n           Doc.trailing_comma;\n           Doc.soft_line;\n           Doc.rparen;\n         ])\n  | Oval_string (txt, _sizeToPrint, _kind) ->\n    Doc.text (escape_string_contents txt)\n  | Oval_stuff txt -> Doc.text txt\n  | Oval_tuple out_values ->\n    Doc.group\n      (Doc.concat\n         [\n           Doc.lparen;\n           Doc.indent\n             (Doc.concat\n                [\n                  Doc.soft_line;\n                  Doc.join\n                    ~sep:(Doc.concat [Doc.comma; Doc.line])\n                    (List.map print_out_value_doc out_values);\n                ]);\n           Doc.trailing_comma;\n           Doc.soft_line;\n           Doc.rparen;\n         ])\n  (* Not supported by ReScript *)\n  | Oval_variant _ -> Doc.nil\n\nlet print_out_exception_doc exc out_value =\n  match exc with\n  | Sys.Break -> Doc.text \"Interrupted.\"\n  | Out_of_memory -> Doc.text \"Out of memory during evaluation.\"\n  | Stack_overflow ->\n    Doc.text \"Stack overflow during evaluation (looping recursion?).\"\n  | _ ->\n    Doc.group\n      (Doc.indent\n         (Doc.concat\n            [Doc.text \"Exception:\"; Doc.line; print_out_value_doc out_value]))\n\nlet print_out_phrase_signature signature =\n  let rec loop signature acc =\n    match signature with\n    | [] -> List.rev acc\n    | (Outcometree.Osig_typext (ext, Oext_first), None) :: signature ->\n      (* Gather together extension constructors *)\n      let rec gather_extensions acc items =\n        match items with\n        | (Outcometree.Osig_typext (ext, Oext_next), None) :: items ->\n          gather_extensions\n            ((ext.oext_name, ext.oext_args, ext.oext_ret_type) :: acc)\n            items\n        | _ -> (List.rev acc, items)\n      in\n      let exts, signature =\n        gather_extensions\n          [(ext.oext_name, ext.oext_args, ext.oext_ret_type)]\n          signature\n      in\n      let te =\n        {\n          Outcometree.otyext_name = ext.oext_type_name;\n          otyext_params = ext.oext_type_params;\n          otyext_constructors = exts;\n          otyext_private = ext.oext_private;\n        }\n      in\n      let doc = print_out_type_extension_doc te in\n      loop signature (doc :: acc)\n    | (sig_item, opt_out_value) :: signature ->\n      let doc =\n        match opt_out_value with\n        | None -> print_out_sig_item_doc sig_item\n        | Some out_value ->\n          Doc.group\n            (Doc.concat\n               [\n                 print_out_sig_item_doc sig_item;\n                 Doc.text \" = \";\n                 print_out_value_doc out_value;\n               ])\n      in\n      loop signature (doc :: acc)\n  in\n  Doc.breakable_group ~force_break:true\n    (Doc.join ~sep:Doc.line (loop signature []))\n\nlet print_out_phrase_doc (out_phrase : Outcometree.out_phrase) =\n  match out_phrase with\n  | Ophr_eval (out_value, out_type) ->\n    Doc.group\n      (Doc.concat\n         [\n           Doc.text \"- : \";\n           print_out_type_doc out_type;\n           Doc.text \" =\";\n           Doc.indent (Doc.concat [Doc.line; print_out_value_doc out_value]);\n         ])\n  | Ophr_signature [] -> Doc.nil\n  | Ophr_signature signature -> print_out_phrase_signature signature\n  | Ophr_exception (exc, out_value) -> print_out_exception_doc exc out_value\n\nlet print_out_phrase fmt out_phrase =\n  Format.pp_print_string fmt\n    (Doc.to_string ~width:80 (print_out_phrase_doc out_phrase))\n\nlet print_out_module_type fmt out_module_type =\n  Format.pp_print_string fmt\n    (Doc.to_string ~width:80 (print_out_module_type_doc out_module_type))\n\nlet print_out_type_extension fmt type_extension =\n  Format.pp_print_string fmt\n    (Doc.to_string ~width:80 (print_out_type_extension_doc type_extension))\n\nlet print_out_value fmt out_value =\n  Format.pp_print_string fmt\n    (Doc.to_string ~width:80 (print_out_value_doc out_value))\n\n(* Not supported in ReScript *)\n(* Oprint.out_class_type *)\nlet setup =\n  lazy\n    (Oprint.out_value := print_out_value;\n     Oprint.out_type := print_out_type;\n     Oprint.out_module_type := print_out_module_type;\n     Oprint.out_sig_item := print_out_sig_item;\n     Oprint.out_signature := print_out_signature;\n     Oprint.out_type_extension := print_out_type_extension;\n     Oprint.out_phrase := print_out_phrase)\n"
  },
  {
    "path": "analysis/vendor/res_syntax/res_outcome_printer.mli",
    "content": "(* For the curious: the outcome printer is a printer to print data\n * from the outcometree.mli file in the ocaml compiler.\n * The outcome tree is used by:\n *  - ocaml's toplevel/repl, print results/errors\n *  - super errors, print nice errors\n *  - editor tooling, e.g. show type on hover\n *\n * In general it represent messages to show results or errors to the user. *)\n\nval parenthesized_ident : string -> bool [@@live]\n\nval setup : unit lazy_t [@@live]\n\n(* Needed for e.g. the playground to print typedtree data *)\nval print_out_type_doc : Outcometree.out_type -> Res_doc.t [@@live]\nval print_out_sig_item_doc :\n  ?print_name_as_is:bool -> Outcometree.out_sig_item -> Res_doc.t\n[@@live]\n"
  },
  {
    "path": "analysis/vendor/res_syntax/res_parens.ml",
    "content": "module ParsetreeViewer = Res_parsetree_viewer\ntype kind = Parenthesized | Braced of Location.t | Nothing\n\nlet expr expr =\n  let opt_braces, _ = ParsetreeViewer.process_braces_attr expr in\n  match opt_braces with\n  | Some ({Location.loc = braces_loc}, _) -> Braced braces_loc\n  | _ -> (\n    match expr with\n    | {\n     Parsetree.pexp_desc =\n       Pexp_constraint ({pexp_desc = Pexp_pack _}, {ptyp_desc = Ptyp_package _});\n    } ->\n      Nothing\n    | {pexp_desc = Pexp_constraint _} -> Parenthesized\n    | _ -> Nothing)\n\nlet expr_record_row_rhs e =\n  let kind = expr e in\n  match kind with\n  | Nothing when Res_parsetree_viewer.has_optional_attribute e.pexp_attributes\n    -> (\n    match e.pexp_desc with\n    | Pexp_ifthenelse _ | Pexp_fun _ -> Parenthesized\n    | _ -> kind)\n  | _ -> kind\n\nlet call_expr expr =\n  let opt_braces, _ = ParsetreeViewer.process_braces_attr expr in\n  match opt_braces with\n  | Some ({Location.loc = braces_loc}, _) -> Braced braces_loc\n  | _ -> (\n    match expr with\n    | {Parsetree.pexp_attributes = attrs}\n      when match ParsetreeViewer.filter_parsing_attrs attrs with\n           | _ :: _ -> true\n           | [] -> false ->\n      Parenthesized\n    | _\n      when ParsetreeViewer.is_unary_expression expr\n           || ParsetreeViewer.is_binary_expression expr ->\n      Parenthesized\n    | {\n     Parsetree.pexp_desc =\n       Pexp_constraint ({pexp_desc = Pexp_pack _}, {ptyp_desc = Ptyp_package _});\n    } ->\n      Nothing\n    | {pexp_desc = Pexp_fun _}\n      when ParsetreeViewer.is_underscore_apply_sugar expr ->\n      Nothing\n    | {\n     pexp_desc =\n       ( Pexp_lazy _ | Pexp_assert _ | Pexp_fun _ | Pexp_newtype _\n       | Pexp_function _ | Pexp_constraint _ | Pexp_setfield _ | Pexp_match _\n       | Pexp_try _ | Pexp_while _ | Pexp_for _ | Pexp_ifthenelse _ );\n    } ->\n      Parenthesized\n    | _ when Ast_uncurried.expr_is_uncurried_fun expr -> Parenthesized\n    | _ when ParsetreeViewer.has_await_attribute expr.pexp_attributes ->\n      Parenthesized\n    | _ -> Nothing)\n\nlet structure_expr expr =\n  let opt_braces, _ = ParsetreeViewer.process_braces_attr expr in\n  match opt_braces with\n  | Some ({Location.loc = braces_loc}, _) -> Braced braces_loc\n  | None -> (\n    match expr with\n    | _\n      when ParsetreeViewer.has_attributes expr.pexp_attributes\n           && not (ParsetreeViewer.is_jsx_expression expr) ->\n      Parenthesized\n    | {\n     Parsetree.pexp_desc =\n       Pexp_constraint ({pexp_desc = Pexp_pack _}, {ptyp_desc = Ptyp_package _});\n    } ->\n      Nothing\n    | {pexp_desc = Pexp_constraint _} -> Parenthesized\n    | _ -> Nothing)\n\nlet unary_expr_operand expr =\n  let opt_braces, _ = ParsetreeViewer.process_braces_attr expr in\n  match opt_braces with\n  | Some ({Location.loc = braces_loc}, _) -> Braced braces_loc\n  | None -> (\n    match expr with\n    | {Parsetree.pexp_attributes = attrs}\n      when match ParsetreeViewer.filter_parsing_attrs attrs with\n           | _ :: _ -> true\n           | [] -> false ->\n      Parenthesized\n    | expr\n      when ParsetreeViewer.is_unary_expression expr\n           || ParsetreeViewer.is_binary_expression expr ->\n      Parenthesized\n    | {\n     pexp_desc =\n       Pexp_constraint ({pexp_desc = Pexp_pack _}, {ptyp_desc = Ptyp_package _});\n    } ->\n      Nothing\n    | {pexp_desc = Pexp_fun _}\n      when ParsetreeViewer.is_underscore_apply_sugar expr ->\n      Nothing\n    | {\n     pexp_desc =\n       ( Pexp_lazy _ | Pexp_assert _ | Pexp_fun _ | Pexp_newtype _\n       | Pexp_function _ | Pexp_constraint _ | Pexp_setfield _\n       | Pexp_extension _ (* readability? maybe remove *) | Pexp_match _\n       | Pexp_try _ | Pexp_while _ | Pexp_for _ | Pexp_ifthenelse _ );\n    } ->\n      Parenthesized\n    | _ when ParsetreeViewer.has_await_attribute expr.pexp_attributes ->\n      Parenthesized\n    | _ -> Nothing)\n\nlet binary_expr_operand ~is_lhs expr =\n  let opt_braces, _ = ParsetreeViewer.process_braces_attr expr in\n  match opt_braces with\n  | Some ({Location.loc = braces_loc}, _) -> Braced braces_loc\n  | None -> (\n    match expr with\n    | {\n     Parsetree.pexp_desc =\n       Pexp_constraint ({pexp_desc = Pexp_pack _}, {ptyp_desc = Ptyp_package _});\n    } ->\n      Nothing\n    | {pexp_desc = Pexp_fun _}\n      when ParsetreeViewer.is_underscore_apply_sugar expr ->\n      Nothing\n    | {\n     pexp_desc =\n       Pexp_constraint _ | Pexp_fun _ | Pexp_function _ | Pexp_newtype _;\n    } ->\n      Parenthesized\n    | _ when Ast_uncurried.expr_is_uncurried_fun expr -> Parenthesized\n    | expr when ParsetreeViewer.is_binary_expression expr -> Parenthesized\n    | expr when ParsetreeViewer.is_ternary_expr expr -> Parenthesized\n    | {pexp_desc = Pexp_lazy _ | Pexp_assert _} when is_lhs -> Parenthesized\n    | _ when ParsetreeViewer.has_await_attribute expr.pexp_attributes ->\n      Parenthesized\n    | {Parsetree.pexp_attributes = attrs} ->\n      if ParsetreeViewer.has_printable_attributes attrs then Parenthesized\n      else Nothing)\n\nlet sub_binary_expr_operand parent_operator child_operator =\n  let prec_parent = ParsetreeViewer.operator_precedence parent_operator in\n  let prec_child = ParsetreeViewer.operator_precedence child_operator in\n  prec_parent > prec_child\n  || prec_parent == prec_child\n     && not\n          (ParsetreeViewer.flattenable_operators parent_operator child_operator)\n  ||\n  (* a && b || c, add parens to (a && b) for readability, who knows the difference by heart… *)\n  (parent_operator = \"||\" && child_operator = \"&&\")\n\nlet rhs_binary_expr_operand parent_operator rhs =\n  match rhs.Parsetree.pexp_desc with\n  | Parsetree.Pexp_apply\n      ( {\n          pexp_attributes = [];\n          pexp_desc =\n            Pexp_ident {txt = Longident.Lident operator; loc = operator_loc};\n        },\n        [(_, _left); (_, _right)] )\n    when ParsetreeViewer.is_binary_operator operator\n         && not (operator_loc.loc_ghost && operator = \"^\") ->\n    let prec_parent = ParsetreeViewer.operator_precedence parent_operator in\n    let prec_child = ParsetreeViewer.operator_precedence operator in\n    prec_parent == prec_child\n  | _ -> false\n\nlet flatten_operand_rhs parent_operator rhs =\n  match rhs.Parsetree.pexp_desc with\n  | Parsetree.Pexp_apply\n      ( {\n          pexp_desc =\n            Pexp_ident {txt = Longident.Lident operator; loc = operator_loc};\n        },\n        [(_, _left); (_, _right)] )\n    when ParsetreeViewer.is_binary_operator operator\n         && not (operator_loc.loc_ghost && operator = \"^\") ->\n    let prec_parent = ParsetreeViewer.operator_precedence parent_operator in\n    let prec_child = ParsetreeViewer.operator_precedence operator in\n    prec_parent >= prec_child || rhs.pexp_attributes <> []\n  | Pexp_construct ({txt = Lident \"Function$\"}, Some _) -> true\n  | Pexp_constraint ({pexp_desc = Pexp_pack _}, {ptyp_desc = Ptyp_package _}) ->\n    false\n  | Pexp_fun _ when ParsetreeViewer.is_underscore_apply_sugar rhs -> false\n  | Pexp_fun _ | Pexp_newtype _ | Pexp_setfield _ | Pexp_constraint _ -> true\n  | _ when ParsetreeViewer.is_ternary_expr rhs -> true\n  | _ -> false\n\nlet binary_operator_inside_await_needs_parens operator =\n  ParsetreeViewer.operator_precedence operator\n  < ParsetreeViewer.operator_precedence \"|.\"\n\nlet lazy_or_assert_or_await_expr_rhs ?(in_await = false) expr =\n  let opt_braces, _ = ParsetreeViewer.process_braces_attr expr in\n  match opt_braces with\n  | Some ({Location.loc = braces_loc}, _) -> Braced braces_loc\n  | None -> (\n    match expr with\n    | {Parsetree.pexp_attributes = attrs}\n      when match ParsetreeViewer.filter_parsing_attrs attrs with\n           | _ :: _ -> true\n           | [] -> false ->\n      Parenthesized\n    | {\n     pexp_desc =\n       Pexp_apply ({pexp_desc = Pexp_ident {txt = Longident.Lident operator}}, _);\n    }\n      when ParsetreeViewer.is_binary_expression expr ->\n      if in_await && not (binary_operator_inside_await_needs_parens operator)\n      then Nothing\n      else Parenthesized\n    | {\n     pexp_desc =\n       Pexp_constraint ({pexp_desc = Pexp_pack _}, {ptyp_desc = Ptyp_package _});\n    } ->\n      Nothing\n    | {pexp_desc = Pexp_fun _}\n      when ParsetreeViewer.is_underscore_apply_sugar expr ->\n      Nothing\n    | {\n     pexp_desc =\n       ( Pexp_lazy _ | Pexp_assert _ | Pexp_fun _ | Pexp_newtype _\n       | Pexp_function _ | Pexp_constraint _ | Pexp_setfield _ | Pexp_match _\n       | Pexp_try _ | Pexp_while _ | Pexp_for _ | Pexp_ifthenelse _ );\n    } ->\n      Parenthesized\n    | _\n      when (not in_await)\n           && ParsetreeViewer.has_await_attribute expr.pexp_attributes ->\n      Parenthesized\n    | _ -> Nothing)\n\nlet is_negative_constant constant =\n  let is_neg txt =\n    let len = String.length txt in\n    len > 0 && (String.get [@doesNotRaise]) txt 0 = '-'\n  in\n  match constant with\n  | (Parsetree.Pconst_integer (i, _) | Pconst_float (i, _)) when is_neg i ->\n    true\n  | _ -> false\n\nlet field_expr expr =\n  let opt_braces, _ = ParsetreeViewer.process_braces_attr expr in\n  match opt_braces with\n  | Some ({Location.loc = braces_loc}, _) -> Braced braces_loc\n  | None -> (\n    match expr with\n    | {Parsetree.pexp_attributes = attrs}\n      when match ParsetreeViewer.filter_parsing_attrs attrs with\n           | _ :: _ -> true\n           | [] -> false ->\n      Parenthesized\n    | expr\n      when ParsetreeViewer.is_binary_expression expr\n           || ParsetreeViewer.is_unary_expression expr ->\n      Parenthesized\n    | {\n     pexp_desc =\n       Pexp_constraint ({pexp_desc = Pexp_pack _}, {ptyp_desc = Ptyp_package _});\n    } ->\n      Nothing\n    | {pexp_desc = Pexp_constant c} when is_negative_constant c -> Parenthesized\n    | {pexp_desc = Pexp_fun _}\n      when ParsetreeViewer.is_underscore_apply_sugar expr ->\n      Nothing\n    | {\n     pexp_desc =\n       ( Pexp_lazy _ | Pexp_assert _\n       | Pexp_extension _ (* %extension.x vs (%extension).x *) | Pexp_fun _\n       | Pexp_newtype _ | Pexp_function _ | Pexp_constraint _ | Pexp_setfield _\n       | Pexp_match _ | Pexp_try _ | Pexp_while _ | Pexp_for _\n       | Pexp_ifthenelse _ );\n    } ->\n      Parenthesized\n    | _ when ParsetreeViewer.has_await_attribute expr.pexp_attributes ->\n      Parenthesized\n    | _ -> Nothing)\n\nlet set_field_expr_rhs expr =\n  let opt_braces, _ = ParsetreeViewer.process_braces_attr expr in\n  match opt_braces with\n  | Some ({Location.loc = braces_loc}, _) -> Braced braces_loc\n  | None -> (\n    match expr with\n    | {\n     Parsetree.pexp_desc =\n       Pexp_constraint ({pexp_desc = Pexp_pack _}, {ptyp_desc = Ptyp_package _});\n    } ->\n      Nothing\n    | {pexp_desc = Pexp_constraint _} -> Parenthesized\n    | _ -> Nothing)\n\nlet ternary_operand expr =\n  let opt_braces, _ = ParsetreeViewer.process_braces_attr expr in\n  match opt_braces with\n  | Some ({Location.loc = braces_loc}, _) -> Braced braces_loc\n  | None -> (\n    match expr with\n    | {\n     Parsetree.pexp_desc =\n       Pexp_constraint ({pexp_desc = Pexp_pack _}, {ptyp_desc = Ptyp_package _});\n    } ->\n      Nothing\n    | {pexp_desc = Pexp_constraint _} -> Parenthesized\n    | _ when Res_parsetree_viewer.is_fun_newtype expr -> (\n      let _uncurried, _attrsOnArrow, _parameters, return_expr =\n        ParsetreeViewer.fun_expr expr\n      in\n      match return_expr.pexp_desc with\n      | Pexp_constraint _ -> Parenthesized\n      | _ -> Nothing)\n    | _ -> Nothing)\n\nlet starts_with_minus txt =\n  let len = String.length txt in\n  if len == 0 then false\n  else\n    let s = (String.get [@doesNotRaise]) txt 0 in\n    s = '-'\n\nlet jsx_prop_expr expr =\n  match expr.Parsetree.pexp_desc with\n  | Parsetree.Pexp_let _ | Pexp_sequence _ | Pexp_letexception _\n  | Pexp_letmodule _ | Pexp_open _ ->\n    Nothing\n  | _ -> (\n    let opt_braces, _ = ParsetreeViewer.process_braces_attr expr in\n    match opt_braces with\n    | Some ({Location.loc = braces_loc}, _) -> Braced braces_loc\n    | None -> (\n      match expr with\n      | {\n       Parsetree.pexp_desc =\n         Pexp_constant (Pconst_integer (x, _) | Pconst_float (x, _));\n       pexp_attributes = [];\n      }\n        when starts_with_minus x ->\n        Parenthesized\n      | _ when ParsetreeViewer.has_await_attribute expr.pexp_attributes ->\n        Parenthesized\n      | {\n       Parsetree.pexp_desc =\n         ( Pexp_ident _ | Pexp_constant _ | Pexp_field _ | Pexp_construct _\n         | Pexp_variant _ | Pexp_array _ | Pexp_pack _ | Pexp_record _\n         | Pexp_extension _ | Pexp_letmodule _ | Pexp_letexception _\n         | Pexp_open _ | Pexp_sequence _ | Pexp_let _ | Pexp_tuple _ );\n       pexp_attributes = [];\n      } ->\n        Nothing\n      | {\n       Parsetree.pexp_desc =\n         Pexp_constraint\n           ({pexp_desc = Pexp_pack _}, {ptyp_desc = Ptyp_package _});\n       pexp_attributes = [];\n      } ->\n        Nothing\n      | _ -> Parenthesized))\n\nlet jsx_child_expr expr =\n  match expr.Parsetree.pexp_desc with\n  | Parsetree.Pexp_let _ | Pexp_sequence _ | Pexp_letexception _\n  | Pexp_letmodule _ | Pexp_open _ ->\n    Nothing\n  | _ -> (\n    let opt_braces, _ = ParsetreeViewer.process_braces_attr expr in\n    match opt_braces with\n    | Some ({Location.loc = braces_loc}, _) -> Braced braces_loc\n    | _ -> (\n      match expr with\n      | {\n       Parsetree.pexp_desc =\n         Pexp_constant (Pconst_integer (x, _) | Pconst_float (x, _));\n       pexp_attributes = [];\n      }\n        when starts_with_minus x ->\n        Parenthesized\n      | _ when ParsetreeViewer.has_await_attribute expr.pexp_attributes ->\n        Parenthesized\n      | {\n       Parsetree.pexp_desc =\n         ( Pexp_ident _ | Pexp_constant _ | Pexp_field _ | Pexp_construct _\n         | Pexp_variant _ | Pexp_array _ | Pexp_pack _ | Pexp_record _\n         | Pexp_extension _ | Pexp_letmodule _ | Pexp_letexception _\n         | Pexp_open _ | Pexp_sequence _ | Pexp_let _ );\n       pexp_attributes = [];\n      } ->\n        Nothing\n      | {\n       Parsetree.pexp_desc =\n         Pexp_constraint\n           ({pexp_desc = Pexp_pack _}, {ptyp_desc = Ptyp_package _});\n       pexp_attributes = [];\n      } ->\n        Nothing\n      | expr when ParsetreeViewer.is_jsx_expression expr -> Nothing\n      | _ -> Parenthesized))\n\nlet binary_expr expr =\n  let opt_braces, _ = ParsetreeViewer.process_braces_attr expr in\n  match opt_braces with\n  | Some ({Location.loc = braces_loc}, _) -> Braced braces_loc\n  | None -> (\n    match expr with\n    | {Parsetree.pexp_attributes = _ :: _} as expr\n      when ParsetreeViewer.is_binary_expression expr ->\n      Parenthesized\n    | _ -> Nothing)\n\nlet mod_type_functor_return mod_type =\n  match mod_type with\n  | {Parsetree.pmty_desc = Pmty_with _} -> true\n  | _ -> false\n\n(* Add parens for readability:\n     module type Functor = SetLike => Set with type t = A.t\n   This is actually:\n     module type Functor = (SetLike => Set) with type t = A.t\n*)\nlet mod_type_with_operand mod_type =\n  match mod_type with\n  | {Parsetree.pmty_desc = Pmty_functor _ | Pmty_with _} -> true\n  | _ -> false\n\nlet mod_expr_functor_constraint mod_type =\n  match mod_type with\n  | {Parsetree.pmty_desc = Pmty_functor _ | Pmty_with _} -> true\n  | _ -> false\n\nlet braced_expr expr =\n  match expr.Parsetree.pexp_desc with\n  | Pexp_constraint ({pexp_desc = Pexp_pack _}, {ptyp_desc = Ptyp_package _}) ->\n    false\n  | Pexp_constraint _ -> true\n  | _ -> false\n\nlet include_mod_expr mod_expr =\n  match mod_expr.Parsetree.pmod_desc with\n  | Parsetree.Pmod_constraint _ -> true\n  | _ -> false\n\nlet arrow_return_typ_expr typ_expr =\n  match typ_expr.Parsetree.ptyp_desc with\n  | Parsetree.Ptyp_arrow _ -> true\n  | _ when Ast_uncurried.core_type_is_uncurried_fun typ_expr -> true\n  | _ -> false\n\nlet pattern_record_row_rhs (pattern : Parsetree.pattern) =\n  match pattern.ppat_desc with\n  | Ppat_constraint ({ppat_desc = Ppat_unpack _}, {ptyp_desc = Ptyp_package _})\n    ->\n    false\n  | Ppat_constraint _ -> true\n  | _ -> false\n"
  },
  {
    "path": "analysis/vendor/res_syntax/res_parens.mli",
    "content": "type kind = Parenthesized | Braced of Location.t | Nothing\n\nval expr : Parsetree.expression -> kind\nval structure_expr : Parsetree.expression -> kind\n\nval unary_expr_operand : Parsetree.expression -> kind\n\nval binary_expr_operand : is_lhs:bool -> Parsetree.expression -> kind\nval sub_binary_expr_operand : string -> string -> bool\nval rhs_binary_expr_operand : string -> Parsetree.expression -> bool\nval flatten_operand_rhs : string -> Parsetree.expression -> bool\n\nval binary_operator_inside_await_needs_parens : string -> bool\nval lazy_or_assert_or_await_expr_rhs :\n  ?in_await:bool -> Parsetree.expression -> kind\n\nval field_expr : Parsetree.expression -> kind\n\nval set_field_expr_rhs : Parsetree.expression -> kind\n\nval ternary_operand : Parsetree.expression -> kind\n\nval jsx_prop_expr : Parsetree.expression -> kind\nval jsx_child_expr : Parsetree.expression -> kind\n\nval binary_expr : Parsetree.expression -> kind\nval mod_type_functor_return : Parsetree.module_type -> bool\nval mod_type_with_operand : Parsetree.module_type -> bool\nval mod_expr_functor_constraint : Parsetree.module_type -> bool\n\nval braced_expr : Parsetree.expression -> bool\nval call_expr : Parsetree.expression -> kind\n\nval include_mod_expr : Parsetree.module_expr -> bool\n\nval arrow_return_typ_expr : Parsetree.core_type -> bool\n\nval pattern_record_row_rhs : Parsetree.pattern -> bool\n\nval expr_record_row_rhs : Parsetree.expression -> kind\n"
  },
  {
    "path": "analysis/vendor/res_syntax/res_parser.ml",
    "content": "module Scanner = Res_scanner\nmodule Diagnostics = Res_diagnostics\nmodule Token = Res_token\nmodule Grammar = Res_grammar\nmodule Reporting = Res_reporting\n\nmodule Comment = Res_comment\n\ntype mode = ParseForTypeChecker | Default\n\ntype region_status = Report | Silent\n\ntype t = {\n  mode: mode;\n  mutable scanner: Scanner.t;\n  mutable token: Token.t;\n  mutable start_pos: Lexing.position;\n  mutable end_pos: Lexing.position;\n  mutable prev_end_pos: Lexing.position;\n  mutable breadcrumbs: (Grammar.t * Lexing.position) list;\n  mutable errors: Reporting.parse_error list;\n  mutable diagnostics: Diagnostics.t list;\n  mutable comments: Comment.t list;\n  mutable regions: region_status ref list;\n  mutable uncurried_config: Config.uncurried;\n}\n\nlet err ?start_pos ?end_pos p error =\n  match p.regions with\n  | ({contents = Report} as region) :: _ ->\n    let d =\n      Diagnostics.make\n        ~start_pos:\n          (match start_pos with\n          | Some pos -> pos\n          | None -> p.start_pos)\n        ~end_pos:\n          (match end_pos with\n          | Some pos -> pos\n          | None -> p.end_pos)\n        error\n    in\n    p.diagnostics <- d :: p.diagnostics;\n    region := Silent\n  | _ -> ()\n\nlet begin_region p = p.regions <- ref Report :: p.regions\nlet end_region p =\n  match p.regions with\n  | [] -> ()\n  | _ :: rest -> p.regions <- rest\n\nlet doc_comment_to_attribute_token comment =\n  let txt = Comment.txt comment in\n  let loc = Comment.loc comment in\n  Token.DocComment (loc, txt)\n\nlet module_comment_to_attribute_token comment =\n  let txt = Comment.txt comment in\n  let loc = Comment.loc comment in\n  Token.ModuleComment (loc, txt)\n\n(* Advance to the next non-comment token and store any encountered comment\n * in the parser's state. Every comment contains the end position of its\n * previous token to facilite comment interleaving *)\nlet rec next ?prev_end_pos p =\n  if p.token = Eof then assert false;\n  let prev_end_pos =\n    match prev_end_pos with\n    | Some pos -> pos\n    | None -> p.end_pos\n  in\n  let start_pos, end_pos, token = Scanner.scan p.scanner in\n  match token with\n  | Comment c ->\n    if Comment.is_doc_comment c then (\n      p.token <- doc_comment_to_attribute_token c;\n      p.prev_end_pos <- prev_end_pos;\n      p.start_pos <- start_pos;\n      p.end_pos <- end_pos)\n    else if Comment.is_module_comment c then (\n      p.token <- module_comment_to_attribute_token c;\n      p.prev_end_pos <- prev_end_pos;\n      p.start_pos <- start_pos;\n      p.end_pos <- end_pos)\n    else (\n      Comment.set_prev_tok_end_pos c p.end_pos;\n      p.comments <- c :: p.comments;\n      p.prev_end_pos <- p.end_pos;\n      p.end_pos <- end_pos;\n      next ~prev_end_pos p)\n  | _ ->\n    p.token <- token;\n    p.prev_end_pos <- prev_end_pos;\n    p.start_pos <- start_pos;\n    p.end_pos <- end_pos\n\nlet next_unsafe p = if p.token <> Eof then next p\n\nlet next_template_literal_token p =\n  let start_pos, end_pos, token =\n    Scanner.scan_template_literal_token p.scanner\n  in\n  p.token <- token;\n  p.prev_end_pos <- p.end_pos;\n  p.start_pos <- start_pos;\n  p.end_pos <- end_pos\n\nlet check_progress ~prev_end_pos ~result p =\n  if p.end_pos == prev_end_pos then None else Some result\n\nlet make ?(mode = ParseForTypeChecker) src filename =\n  let scanner = Scanner.make ~filename src in\n  let parser_state =\n    {\n      mode;\n      scanner;\n      token = Token.Semicolon;\n      start_pos = Lexing.dummy_pos;\n      prev_end_pos = Lexing.dummy_pos;\n      end_pos = Lexing.dummy_pos;\n      breadcrumbs = [];\n      errors = [];\n      diagnostics = [];\n      comments = [];\n      regions = [ref Report];\n      uncurried_config = !Config.uncurried;\n    }\n  in\n  parser_state.scanner.err <-\n    (fun ~start_pos ~end_pos error ->\n      let diagnostic = Diagnostics.make ~start_pos ~end_pos error in\n      parser_state.diagnostics <- diagnostic :: parser_state.diagnostics);\n  next parser_state;\n  parser_state\n\nlet leave_breadcrumb p circumstance =\n  let crumb = (circumstance, p.start_pos) in\n  p.breadcrumbs <- crumb :: p.breadcrumbs\n\nlet eat_breadcrumb p =\n  match p.breadcrumbs with\n  | [] -> ()\n  | _ :: crumbs -> p.breadcrumbs <- crumbs\n\nlet optional p token =\n  if p.token = token then\n    let () = next p in\n    true\n  else false\n\nlet expect ?grammar token p =\n  if p.token = token then next p\n  else\n    let error = Diagnostics.expected ?grammar p.prev_end_pos token in\n    err ~start_pos:p.prev_end_pos p error\n\n(* Don't use immutable copies here, it trashes certain heuristics\n * in the ocaml compiler, resulting in massive slowdowns of the parser *)\nlet lookahead p callback =\n  let err = p.scanner.err in\n  let ch = p.scanner.ch in\n  let offset = p.scanner.offset in\n  let offset16 = p.scanner.offset16 in\n  let line_offset = p.scanner.line_offset in\n  let lnum = p.scanner.lnum in\n  let mode = p.scanner.mode in\n  let token = p.token in\n  let start_pos = p.start_pos in\n  let end_pos = p.end_pos in\n  let prev_end_pos = p.prev_end_pos in\n  let breadcrumbs = p.breadcrumbs in\n  let errors = p.errors in\n  let diagnostics = p.diagnostics in\n  let comments = p.comments in\n  let uncurried_config = p.uncurried_config in\n\n  let res = callback p in\n\n  p.scanner.err <- err;\n  p.scanner.ch <- ch;\n  p.scanner.offset <- offset;\n  p.scanner.offset16 <- offset16;\n  p.scanner.line_offset <- line_offset;\n  p.scanner.lnum <- lnum;\n  p.scanner.mode <- mode;\n  p.token <- token;\n  p.start_pos <- start_pos;\n  p.end_pos <- end_pos;\n  p.prev_end_pos <- prev_end_pos;\n  p.breadcrumbs <- breadcrumbs;\n  p.errors <- errors;\n  p.diagnostics <- diagnostics;\n  p.comments <- comments;\n  p.uncurried_config <- uncurried_config;\n\n  res\n"
  },
  {
    "path": "analysis/vendor/res_syntax/res_parser.mli",
    "content": "module Scanner = Res_scanner\nmodule Token = Res_token\nmodule Grammar = Res_grammar\nmodule Reporting = Res_reporting\nmodule Diagnostics = Res_diagnostics\nmodule Comment = Res_comment\n\ntype mode = ParseForTypeChecker | Default\n\ntype region_status = Report | Silent\n\ntype t = {\n  mode: mode;\n  mutable scanner: Scanner.t;\n  mutable token: Token.t;\n  mutable start_pos: Lexing.position;\n  mutable end_pos: Lexing.position;\n  mutable prev_end_pos: Lexing.position;\n  mutable breadcrumbs: (Grammar.t * Lexing.position) list;\n  mutable errors: Reporting.parse_error list;\n  mutable diagnostics: Diagnostics.t list;\n  mutable comments: Comment.t list;\n  mutable regions: region_status ref list;\n  mutable uncurried_config: Config.uncurried;\n}\n\nval make : ?mode:mode -> string -> string -> t\n\nval expect : ?grammar:Grammar.t -> Token.t -> t -> unit\nval optional : t -> Token.t -> bool\nval next : ?prev_end_pos:Lexing.position -> t -> unit\nval next_unsafe : t -> unit (* Does not assert on Eof, makes no progress *)\nval next_template_literal_token : t -> unit\nval lookahead : t -> (t -> 'a) -> 'a\nval err :\n  ?start_pos:Lexing.position ->\n  ?end_pos:Lexing.position ->\n  t ->\n  Diagnostics.category ->\n  unit\n\nval leave_breadcrumb : t -> Grammar.t -> unit\nval eat_breadcrumb : t -> unit\n\nval begin_region : t -> unit\nval end_region : t -> unit\n\nval check_progress : prev_end_pos:Lexing.position -> result:'a -> t -> 'a option\n"
  },
  {
    "path": "analysis/vendor/res_syntax/res_parsetree_viewer.ml",
    "content": "open Parsetree\n\nlet arrow_type ?(arity = max_int) ct =\n  let rec process attrs_before acc typ arity =\n    match typ with\n    | typ when arity <= 0 -> (attrs_before, List.rev acc, typ)\n    | {\n     ptyp_desc = Ptyp_arrow ((Nolabel as lbl), typ1, typ2);\n     ptyp_attributes = [];\n    } ->\n      let arg = ([], lbl, typ1) in\n      process attrs_before (arg :: acc) typ2 (arity - 1)\n    | {\n     ptyp_desc = Ptyp_arrow (Nolabel, _typ1, _typ2);\n     ptyp_attributes = [({txt = \"bs\"}, _)];\n    } ->\n      (* stop here, the uncurried attribute always indicates the beginning of an arrow function\n       * e.g. `(. int) => (. int)` instead of `(. int, . int)` *)\n      (attrs_before, List.rev acc, typ)\n    | {ptyp_desc = Ptyp_arrow (Nolabel, _typ1, _typ2); ptyp_attributes = _attrs}\n      as return_type ->\n      let args = List.rev acc in\n      (attrs_before, args, return_type)\n    | {\n     ptyp_desc = Ptyp_arrow (((Labelled _ | Optional _) as lbl), typ1, typ2);\n     ptyp_attributes = attrs;\n    } ->\n      let arg = (attrs, lbl, typ1) in\n      process attrs_before (arg :: acc) typ2 (arity - 1)\n    | typ -> (attrs_before, List.rev acc, typ)\n  in\n  match ct with\n  | {ptyp_desc = Ptyp_arrow (Nolabel, _typ1, _typ2); ptyp_attributes = attrs} as\n    typ ->\n    process attrs [] {typ with ptyp_attributes = []} arity\n  | typ -> process [] [] typ arity\n\nlet functor_type modtype =\n  let rec process acc modtype =\n    match modtype with\n    | {\n     pmty_desc = Pmty_functor (lbl, arg_type, return_type);\n     pmty_attributes = attrs;\n    } ->\n      let arg = (attrs, lbl, arg_type) in\n      process (arg :: acc) return_type\n    | mod_type -> (List.rev acc, mod_type)\n  in\n  process [] modtype\n\nlet process_bs_attribute attrs =\n  let rec process bs_spotted acc attrs =\n    match attrs with\n    | [] -> (bs_spotted, List.rev acc)\n    | ({Location.txt = \"bs\"}, _) :: rest -> process true acc rest\n    | attr :: rest -> process bs_spotted (attr :: acc) rest\n  in\n  process false [] attrs\n\nlet process_uncurried_app_attribute attrs =\n  let rec process uncurried_app acc attrs =\n    match attrs with\n    | [] -> (uncurried_app, List.rev acc)\n    | ( {\n          Location.txt =\n            \"bs\" (* still support @bs to convert .ml files *) | \"res.uapp\";\n        },\n        _ )\n      :: rest ->\n      process true acc rest\n    | attr :: rest -> process uncurried_app (attr :: acc) rest\n  in\n  process false [] attrs\n\nlet process_partial_app_attribute attrs =\n  let rec process partial_app acc attrs =\n    match attrs with\n    | [] -> (partial_app, List.rev acc)\n    | ({Location.txt = \"res.partial\"}, _) :: rest -> process true acc rest\n    | attr :: rest -> process partial_app (attr :: acc) rest\n  in\n  process false [] attrs\n\ntype function_attributes_info = {\n  async: bool;\n  bs: bool;\n  attributes: Parsetree.attributes;\n}\n\nlet process_function_attributes attrs =\n  let rec process async bs acc attrs =\n    match attrs with\n    | [] -> {async; bs; attributes = List.rev acc}\n    | ({Location.txt = \"bs\"}, _) :: rest -> process async true acc rest\n    | ({Location.txt = \"res.async\"}, _) :: rest -> process true bs acc rest\n    | attr :: rest -> process async bs (attr :: acc) rest\n  in\n  process false false [] attrs\n\nlet has_await_attribute attrs =\n  List.exists\n    (function\n      | {Location.txt = \"res.await\"}, _ -> true\n      | _ -> false)\n    attrs\n\nlet collect_array_expressions expr =\n  match expr.pexp_desc with\n  | Pexp_array exprs -> (exprs, None)\n  | _ -> ([], Some expr)\n\nlet collect_list_expressions expr =\n  let rec collect acc expr =\n    match expr.pexp_desc with\n    | Pexp_construct ({txt = Longident.Lident \"[]\"}, _) -> (List.rev acc, None)\n    | Pexp_construct\n        ( {txt = Longident.Lident \"::\"},\n          Some {pexp_desc = Pexp_tuple (hd :: [tail])} ) ->\n      collect (hd :: acc) tail\n    | _ -> (List.rev acc, Some expr)\n  in\n  collect [] expr\n\n(* (__x) => f(a, __x, c) -----> f(a, _, c)  *)\nlet rewrite_underscore_apply expr =\n  let expr_fun =\n    if Ast_uncurried.expr_is_uncurried_fun expr then\n      Ast_uncurried.expr_extract_uncurried_fun expr\n    else expr\n  in\n  match expr_fun.pexp_desc with\n  | Pexp_fun\n      ( Nolabel,\n        None,\n        {ppat_desc = Ppat_var {txt = \"__x\"}},\n        ({pexp_desc = Pexp_apply (call_expr, args)} as e) ) ->\n    let new_args =\n      List.map\n        (fun arg ->\n          match arg with\n          | ( lbl,\n              ({pexp_desc = Pexp_ident ({txt = Longident.Lident \"__x\"} as lid)}\n               as arg_expr) ) ->\n            ( lbl,\n              {\n                arg_expr with\n                pexp_desc = Pexp_ident {lid with txt = Longident.Lident \"_\"};\n              } )\n          | arg -> arg)\n        args\n    in\n    {e with pexp_desc = Pexp_apply (call_expr, new_args)}\n  | _ -> expr\n\ntype fun_param_kind =\n  | Parameter of {\n      attrs: Parsetree.attributes;\n      lbl: Asttypes.arg_label;\n      default_expr: Parsetree.expression option;\n      pat: Parsetree.pattern;\n    }\n  | NewTypes of {attrs: Parsetree.attributes; locs: string Asttypes.loc list}\n\nlet fun_expr expr =\n  (* Turns (type t, type u, type z) into \"type t u z\" *)\n  let rec collect_new_types acc return_expr =\n    match return_expr with\n    | {pexp_desc = Pexp_newtype (string_loc, return_expr); pexp_attributes = []}\n      ->\n      collect_new_types (string_loc :: acc) return_expr\n    | return_expr -> (List.rev acc, return_expr)\n  in\n  let rec collect ~uncurried ~n_fun attrs_before acc expr =\n    match expr with\n    | {\n     pexp_desc =\n       Pexp_fun\n         ( Nolabel,\n           None,\n           {ppat_desc = Ppat_var {txt = \"__x\"}},\n           {pexp_desc = Pexp_apply _} );\n    } ->\n      (uncurried, attrs_before, List.rev acc, rewrite_underscore_apply expr)\n    | {pexp_desc = Pexp_newtype (string_loc, rest); pexp_attributes = attrs} ->\n      let string_locs, return_expr = collect_new_types [string_loc] rest in\n      let param = NewTypes {attrs; locs = string_locs} in\n      collect ~uncurried ~n_fun attrs_before (param :: acc) return_expr\n    | {\n     pexp_desc = Pexp_fun (lbl, default_expr, pattern, return_expr);\n     pexp_attributes = [];\n    } ->\n      let parameter =\n        Parameter {attrs = []; lbl; default_expr; pat = pattern}\n      in\n      collect ~uncurried ~n_fun:(n_fun + 1) attrs_before (parameter :: acc)\n        return_expr\n    (* If a fun has an attribute, then it stops here and makes currying.\n       i.e attributes outside of (...), uncurried `(.)` and `async` make currying *)\n    | {pexp_desc = Pexp_fun _} -> (uncurried, attrs_before, List.rev acc, expr)\n    | expr when n_fun = 0 && Ast_uncurried.expr_is_uncurried_fun expr ->\n      let expr = Ast_uncurried.expr_extract_uncurried_fun expr in\n      collect ~uncurried:true ~n_fun attrs_before acc expr\n    | expr -> (uncurried, attrs_before, List.rev acc, expr)\n  in\n  match expr with\n  | {pexp_desc = Pexp_fun _ | Pexp_newtype _} ->\n    collect ~uncurried:false ~n_fun:0 expr.pexp_attributes []\n      {expr with pexp_attributes = []}\n  | _ when Ast_uncurried.expr_is_uncurried_fun expr ->\n    let expr = Ast_uncurried.expr_extract_uncurried_fun expr in\n    collect ~uncurried:true ~n_fun:0 expr.pexp_attributes []\n      {expr with pexp_attributes = []}\n  | _ -> collect ~uncurried:false ~n_fun:0 [] [] expr\n\nlet process_braces_attr expr =\n  match expr.pexp_attributes with\n  | (({txt = \"res.braces\" | \"ns.braces\"}, _) as attr) :: attrs ->\n    (Some attr, {expr with pexp_attributes = attrs})\n  | _ -> (None, expr)\n\nlet filter_parsing_attrs attrs =\n  List.filter\n    (fun attr ->\n      match attr with\n      | ( {\n            Location.txt =\n              ( \"bs\" | \"res.uapp\" | \"res.arity\" | \"res.braces\" | \"ns.braces\"\n              | \"res.iflet\" | \"res.namedArgLoc\" | \"res.optional\" | \"res.ternary\"\n              | \"res.async\" | \"res.await\" | \"res.template\"\n              | \"res.taggedTemplate\" );\n          },\n          _ ) ->\n        false\n      | _ -> true)\n    attrs\n\nlet is_block_expr expr =\n  match expr.pexp_desc with\n  | Pexp_letmodule _ | Pexp_letexception _ | Pexp_let _ | Pexp_open _\n  | Pexp_sequence _ ->\n    true\n  | _ -> false\n\nlet is_braced_expr expr =\n  match process_braces_attr expr with\n  | Some _, _ -> true\n  | _ -> false\n\nlet is_multiline_text txt =\n  let len = String.length txt in\n  let rec check i =\n    if i >= len then false\n    else\n      let c = String.unsafe_get txt i in\n      match c with\n      | '\\010' | '\\013' -> true\n      | '\\\\' -> if i + 2 = len then false else check (i + 2)\n      | _ -> check (i + 1)\n  in\n  check 0\n\nlet is_huggable_expression expr =\n  match expr.pexp_desc with\n  | Pexp_array _ | Pexp_tuple _\n  | Pexp_constant (Pconst_string (_, Some _))\n  | Pexp_construct ({txt = Longident.Lident (\"::\" | \"[]\")}, _)\n  | Pexp_extension ({txt = \"obj\"}, _)\n  | Pexp_record _ ->\n    true\n  | _ when is_block_expr expr -> true\n  | _ when is_braced_expr expr -> true\n  | Pexp_constant (Pconst_string (txt, None)) when is_multiline_text txt -> true\n  | _ -> false\n\nlet is_huggable_rhs expr =\n  match expr.pexp_desc with\n  | Pexp_array _ | Pexp_tuple _\n  | Pexp_extension ({txt = \"obj\"}, _)\n  | Pexp_record _ ->\n    true\n  | _ when is_braced_expr expr -> true\n  | _ -> false\n\nlet is_huggable_pattern pattern =\n  match pattern.ppat_desc with\n  | Ppat_array _ | Ppat_tuple _ | Ppat_record _ | Ppat_variant _\n  | Ppat_construct _ ->\n    true\n  | _ -> false\n\nlet operator_precedence operator =\n  match operator with\n  | \":=\" -> 1\n  | \"||\" -> 2\n  | \"&&\" -> 3\n  | \"=\" | \"==\" | \"<\" | \">\" | \"!=\" | \"<>\" | \"!==\" | \"<=\" | \">=\" | \"|>\" -> 4\n  | \"+\" | \"+.\" | \"-\" | \"-.\" | \"^\" -> 5\n  | \"*\" | \"*.\" | \"/\" | \"/.\" -> 6\n  | \"**\" -> 7\n  | \"#\" | \"##\" | \"|.\" | \"|.u\" -> 8\n  | _ -> 0\n\nlet is_unary_operator operator =\n  match operator with\n  | \"~+\" | \"~+.\" | \"~-\" | \"~-.\" | \"not\" -> true\n  | _ -> false\n\nlet is_unary_expression expr =\n  match expr.pexp_desc with\n  | Pexp_apply\n      ( {pexp_desc = Pexp_ident {txt = Longident.Lident operator}},\n        [(Nolabel, _arg)] )\n    when is_unary_operator operator ->\n    true\n  | _ -> false\n\n(* TODO: tweak this to check for ghost ^ as template literal *)\nlet is_binary_operator operator =\n  match operator with\n  | \":=\" | \"||\" | \"&&\" | \"=\" | \"==\" | \"<\" | \">\" | \"!=\" | \"!==\" | \"<=\" | \">=\"\n  | \"|>\" | \"+\" | \"+.\" | \"-\" | \"-.\" | \"^\" | \"*\" | \"*.\" | \"/\" | \"/.\" | \"**\" | \"|.\"\n  | \"|.u\" | \"<>\" ->\n    true\n  | _ -> false\n\nlet is_binary_expression expr =\n  match expr.pexp_desc with\n  | Pexp_apply\n      ( {\n          pexp_desc =\n            Pexp_ident {txt = Longident.Lident operator; loc = operator_loc};\n        },\n        [(Nolabel, _operand1); (Nolabel, _operand2)] )\n    when is_binary_operator operator\n         && not (operator_loc.loc_ghost && operator = \"^\")\n         (* template literal *) ->\n    true\n  | _ -> false\n\nlet is_equality_operator operator =\n  match operator with\n  | \"=\" | \"==\" | \"<>\" | \"!=\" -> true\n  | _ -> false\n\nlet is_rhs_binary_operator operator =\n  match operator with\n  | \"**\" -> true\n  | _ -> false\n\nlet flattenable_operators parent_operator child_operator =\n  let prec_parent = operator_precedence parent_operator in\n  let prec_child = operator_precedence child_operator in\n  if prec_parent == prec_child then\n    not\n      (is_equality_operator parent_operator\n      && is_equality_operator child_operator)\n  else false\n\nlet rec has_if_let_attribute attrs =\n  match attrs with\n  | [] -> false\n  | ({Location.txt = \"res.iflet\"}, _) :: _ -> true\n  | _ :: attrs -> has_if_let_attribute attrs\n\nlet is_if_let_expr expr =\n  match expr with\n  | {pexp_attributes = attrs; pexp_desc = Pexp_match _}\n    when has_if_let_attribute attrs ->\n    true\n  | _ -> false\n\nlet rec has_optional_attribute attrs =\n  match attrs with\n  | [] -> false\n  | ({Location.txt = \"ns.optional\" | \"res.optional\"}, _) :: _ -> true\n  | _ :: attrs -> has_optional_attribute attrs\n\nlet has_attributes attrs =\n  List.exists\n    (fun attr ->\n      match attr with\n      | ( {\n            Location.txt =\n              ( \"bs\" | \"res.uapp\" | \"res.arity\" | \"res.braces\" | \"ns.braces\"\n              | \"res.iflet\" | \"res.ternary\" | \"res.async\" | \"res.await\"\n              | \"res.template\" );\n          },\n          _ ) ->\n        false\n      (* Remove the fragile pattern warning for iflet expressions *)\n      | ( {Location.txt = \"warning\"},\n          PStr\n            [\n              {\n                pstr_desc =\n                  Pstr_eval\n                    ({pexp_desc = Pexp_constant (Pconst_string (\"-4\", None))}, _);\n              };\n            ] ) ->\n        not (has_if_let_attribute attrs)\n      | _ -> true)\n    attrs\n\nlet is_array_access expr =\n  match expr.pexp_desc with\n  | Pexp_apply\n      ( {pexp_desc = Pexp_ident {txt = Longident.Ldot (Lident \"Array\", \"get\")}},\n        [(Nolabel, _parentExpr); (Nolabel, _memberExpr)] ) ->\n    true\n  | _ -> false\n\ntype if_condition_kind =\n  | If of Parsetree.expression\n  | IfLet of Parsetree.pattern * Parsetree.expression\n\nlet collect_if_expressions expr =\n  let rec collect acc expr =\n    let expr_loc = expr.pexp_loc in\n    match expr.pexp_desc with\n    | Pexp_ifthenelse (if_expr, then_expr, Some else_expr) ->\n      collect ((expr_loc, If if_expr, then_expr) :: acc) else_expr\n    | Pexp_ifthenelse (if_expr, then_expr, (None as else_expr)) ->\n      let ifs = List.rev ((expr_loc, If if_expr, then_expr) :: acc) in\n      (ifs, else_expr)\n    | Pexp_match\n        ( condition,\n          [\n            {pc_lhs = pattern; pc_guard = None; pc_rhs = then_expr};\n            {\n              pc_rhs =\n                {pexp_desc = Pexp_construct ({txt = Longident.Lident \"()\"}, _)};\n            };\n          ] )\n      when is_if_let_expr expr ->\n      let ifs =\n        List.rev ((expr_loc, IfLet (pattern, condition), then_expr) :: acc)\n      in\n      (ifs, None)\n    | Pexp_match\n        ( condition,\n          [\n            {pc_lhs = pattern; pc_guard = None; pc_rhs = then_expr};\n            {pc_rhs = else_expr};\n          ] )\n      when is_if_let_expr expr ->\n      collect\n        ((expr_loc, IfLet (pattern, condition), then_expr) :: acc)\n        else_expr\n    | _ -> (List.rev acc, Some expr)\n  in\n  collect [] expr\n\nlet rec has_ternary_attribute attrs =\n  match attrs with\n  | [] -> false\n  | ({Location.txt = \"res.ternary\"}, _) :: _ -> true\n  | _ :: attrs -> has_ternary_attribute attrs\n\nlet is_ternary_expr expr =\n  match expr with\n  | {pexp_attributes = attrs; pexp_desc = Pexp_ifthenelse _}\n    when has_ternary_attribute attrs ->\n    true\n  | _ -> false\n\nlet collect_ternary_parts expr =\n  let rec collect acc expr =\n    match expr with\n    | {\n     pexp_attributes = attrs;\n     pexp_desc = Pexp_ifthenelse (condition, consequent, Some alternate);\n    }\n      when has_ternary_attribute attrs ->\n      collect ((condition, consequent) :: acc) alternate\n    | alternate -> (List.rev acc, alternate)\n  in\n  collect [] expr\n\nlet parameters_should_hug parameters =\n  match parameters with\n  | [Parameter {attrs = []; lbl = Asttypes.Nolabel; default_expr = None; pat}]\n    when is_huggable_pattern pat ->\n    true\n  | _ -> false\n\nlet filter_ternary_attributes attrs =\n  List.filter\n    (fun attr ->\n      match attr with\n      | {Location.txt = \"res.ternary\"}, _ -> false\n      | _ -> true)\n    attrs\n\nlet filter_fragile_match_attributes attrs =\n  List.filter\n    (fun attr ->\n      match attr with\n      | ( {Location.txt = \"warning\"},\n          PStr\n            [\n              {\n                pstr_desc =\n                  Pstr_eval\n                    ({pexp_desc = Pexp_constant (Pconst_string (\"-4\", _))}, _);\n              };\n            ] ) ->\n        false\n      | _ -> true)\n    attrs\n\nlet is_jsx_expression expr =\n  let rec loop attrs =\n    match attrs with\n    | [] -> false\n    | ({Location.txt = \"JSX\"}, _) :: _ -> true\n    | _ :: attrs -> loop attrs\n  in\n  match expr.pexp_desc with\n  | Pexp_apply _ -> loop expr.Parsetree.pexp_attributes\n  | _ -> false\n\nlet has_jsx_attribute attributes =\n  let rec loop attrs =\n    match attrs with\n    | [] -> false\n    | ({Location.txt = \"JSX\"}, _) :: _ -> true\n    | _ :: attrs -> loop attrs\n  in\n  loop attributes\n\nlet should_indent_binary_expr expr =\n  let same_precedence_sub_expression operator sub_expression =\n    match sub_expression with\n    | {\n     pexp_desc =\n       Pexp_apply\n         ( {pexp_desc = Pexp_ident {txt = Longident.Lident sub_operator}},\n           [(Nolabel, _lhs); (Nolabel, _rhs)] );\n    }\n      when is_binary_operator sub_operator ->\n      flattenable_operators operator sub_operator\n    | _ -> true\n  in\n  match expr with\n  | {\n   pexp_desc =\n     Pexp_apply\n       ( {pexp_desc = Pexp_ident {txt = Longident.Lident operator}},\n         [(Nolabel, lhs); (Nolabel, _rhs)] );\n  }\n    when is_binary_operator operator ->\n    is_equality_operator operator\n    || (not (same_precedence_sub_expression operator lhs))\n    || operator = \":=\"\n  | _ -> false\n\nlet should_inline_rhs_binary_expr rhs =\n  match rhs.pexp_desc with\n  | Parsetree.Pexp_constant _ | Pexp_let _ | Pexp_letmodule _\n  | Pexp_letexception _ | Pexp_sequence _ | Pexp_open _ | Pexp_ifthenelse _\n  | Pexp_for _ | Pexp_while _ | Pexp_try _ | Pexp_array _ | Pexp_record _ ->\n    true\n  | _ -> false\n\nlet is_printable_attribute attr =\n  match attr with\n  | ( {\n        Location.txt =\n          ( \"bs\" | \"res.uapp\" | \"res.arity\" | \"res.iflet\" | \"res.braces\"\n          | \"ns.braces\" | \"JSX\" | \"res.async\" | \"res.await\" | \"res.template\"\n          | \"res.ternary\" );\n      },\n      _ ) ->\n    false\n  | _ -> true\n\nlet has_printable_attributes attrs = List.exists is_printable_attribute attrs\n\nlet filter_printable_attributes attrs = List.filter is_printable_attribute attrs\n\nlet partition_printable_attributes attrs =\n  List.partition is_printable_attribute attrs\n\nlet is_fun_newtype expr =\n  match expr.pexp_desc with\n  | Pexp_fun _ | Pexp_newtype _ -> true\n  | _ -> Ast_uncurried.expr_is_uncurried_fun expr\n\nlet requires_special_callback_printing_last_arg args =\n  let rec loop args =\n    match args with\n    | [] -> false\n    | [(_, expr)] when is_fun_newtype expr -> true\n    | (_, expr) :: _ when is_fun_newtype expr -> false\n    | _ :: rest -> loop rest\n  in\n  loop args\n\nlet requires_special_callback_printing_first_arg args =\n  let rec loop args =\n    match args with\n    | [] -> true\n    | (_, expr) :: _ when is_fun_newtype expr -> false\n    | _ :: rest -> loop rest\n  in\n  match args with\n  | [(_, expr)] when is_fun_newtype expr -> false\n  | (_, expr) :: rest when is_fun_newtype expr -> loop rest\n  | _ -> false\n\nlet mod_expr_apply mod_expr =\n  let rec loop acc mod_expr =\n    match mod_expr with\n    | {pmod_desc = Pmod_apply (next, arg)} -> loop (arg :: acc) next\n    | _ -> (acc, mod_expr)\n  in\n  loop [] mod_expr\n\nlet mod_expr_functor mod_expr =\n  let rec loop acc mod_expr =\n    match mod_expr with\n    | {\n     pmod_desc = Pmod_functor (lbl, mod_type, return_mod_expr);\n     pmod_attributes = attrs;\n    } ->\n      let param = (attrs, lbl, mod_type) in\n      loop (param :: acc) return_mod_expr\n    | return_mod_expr -> (List.rev acc, return_mod_expr)\n  in\n  loop [] mod_expr\n\nlet rec collect_patterns_from_list_construct acc pattern =\n  let open Parsetree in\n  match pattern.ppat_desc with\n  | Ppat_construct\n      ({txt = Longident.Lident \"::\"}, Some {ppat_desc = Ppat_tuple [pat; rest]})\n    ->\n    collect_patterns_from_list_construct (pat :: acc) rest\n  | _ -> (List.rev acc, pattern)\n\nlet has_template_literal_attr attrs =\n  List.exists\n    (fun attr ->\n      match attr with\n      | {Location.txt = \"res.template\"}, _ -> true\n      | _ -> false)\n    attrs\n\nlet has_tagged_template_literal_attr attrs =\n  List.exists\n    (fun attr ->\n      match attr with\n      | {Location.txt = \"res.taggedTemplate\"}, _ -> true\n      | _ -> false)\n    attrs\n\nlet is_template_literal expr =\n  match expr.pexp_desc with\n  | Pexp_apply\n      ( {pexp_desc = Pexp_ident {txt = Longident.Lident \"^\"}},\n        [(Nolabel, _); (Nolabel, _)] )\n    when has_template_literal_attr expr.pexp_attributes ->\n    true\n  | Pexp_constant (Pconst_string (_, Some \"\")) -> true\n  | Pexp_constant _ when has_template_literal_attr expr.pexp_attributes -> true\n  | _ -> false\n\nlet is_tagged_template_literal expr =\n  match expr with\n  | {pexp_desc = Pexp_apply _; pexp_attributes = attrs} ->\n    has_tagged_template_literal_attr attrs\n  | _ -> false\n\nlet has_spread_attr attrs =\n  List.exists\n    (fun attr ->\n      match attr with\n      | {Location.txt = \"res.spread\"}, _ -> true\n      | _ -> false)\n    attrs\n\nlet is_spread_belt_list_concat expr =\n  match expr.pexp_desc with\n  | Pexp_ident\n      {\n        txt =\n          Longident.Ldot\n            (Longident.Ldot (Longident.Lident \"Belt\", \"List\"), \"concatMany\");\n      } ->\n    has_spread_attr expr.pexp_attributes\n  | _ -> false\n\nlet is_spread_belt_array_concat expr =\n  match expr.pexp_desc with\n  | Pexp_ident\n      {\n        txt =\n          Longident.Ldot\n            (Longident.Ldot (Longident.Lident \"Belt\", \"Array\"), \"concatMany\");\n      } ->\n    has_spread_attr expr.pexp_attributes\n  | _ -> false\n\n(* Blue | Red | Green -> [Blue; Red; Green] *)\nlet collect_or_pattern_chain pat =\n  let rec loop pattern chain =\n    match pattern.ppat_desc with\n    | Ppat_or (left, right) -> loop left (right :: chain)\n    | _ -> pattern :: chain\n  in\n  loop pat []\n\nlet is_single_pipe_expr expr =\n  (* handles:\n   *   x\n   *   ->Js.Dict.get(\"wm-property\")\n   *   ->Option.flatMap(Js.Json.decodeString)\n   *   ->Option.flatMap(x =>\n   *     switch x {\n   *     | \"like-of\" => Some(#like)\n   *     | \"repost-of\" => Some(#repost)\n   *     | _ => None\n   *     }\n   *   )\n   *)\n  let is_pipe_expr expr =\n    match expr.pexp_desc with\n    | Pexp_apply\n        ( {pexp_desc = Pexp_ident {txt = Longident.Lident (\"|.\" | \"|.u\" | \"|>\")}},\n          [(Nolabel, _operand1); (Nolabel, _operand2)] ) ->\n      true\n    | _ -> false\n  in\n  match expr.pexp_desc with\n  | Pexp_apply\n      ( {pexp_desc = Pexp_ident {txt = Longident.Lident (\"|.\" | \"|.u\" | \"|>\")}},\n        [(Nolabel, operand1); (Nolabel, _operand2)] )\n    when not (is_pipe_expr operand1) ->\n    true\n  | _ -> false\n\nlet is_underscore_apply_sugar expr =\n  match expr.pexp_desc with\n  | Pexp_fun\n      ( Nolabel,\n        None,\n        {ppat_desc = Ppat_var {txt = \"__x\"}},\n        {pexp_desc = Pexp_apply _} ) ->\n    true\n  | _ -> false\n\nlet is_rewritten_underscore_apply_sugar expr =\n  match expr.pexp_desc with\n  | Pexp_ident {txt = Longident.Lident \"_\"} -> true\n  | _ -> false\n"
  },
  {
    "path": "analysis/vendor/res_syntax/res_parsetree_viewer.mli",
    "content": "(* Restructures a nested tree of arrow types into its args & returnType\n * The parsetree contains: a => b => c => d, for printing purposes\n * we restructure the tree into (a, b, c) and its returnType d *)\nval arrow_type :\n  ?arity:int ->\n  Parsetree.core_type ->\n  Parsetree.attributes\n  * (Parsetree.attributes * Asttypes.arg_label * Parsetree.core_type) list\n  * Parsetree.core_type\n\nval functor_type :\n  Parsetree.module_type ->\n  (Parsetree.attributes * string Asttypes.loc * Parsetree.module_type option)\n  list\n  * Parsetree.module_type\n\n(* filters @bs out of the provided attributes *)\nval process_bs_attribute : Parsetree.attributes -> bool * Parsetree.attributes\n\nval process_uncurried_app_attribute :\n  Parsetree.attributes -> bool * Parsetree.attributes\n\nval process_partial_app_attribute :\n  Parsetree.attributes -> bool * Parsetree.attributes\n\ntype function_attributes_info = {\n  async: bool;\n  bs: bool;\n  attributes: Parsetree.attributes;\n}\n\n(* determines whether a function is async and/or uncurried based on the given attributes *)\nval process_function_attributes :\n  Parsetree.attributes -> function_attributes_info\n\nval has_await_attribute : Parsetree.attributes -> bool\n\ntype if_condition_kind =\n  | If of Parsetree.expression\n  | IfLet of Parsetree.pattern * Parsetree.expression\n\n(* if ... else if ... else ... is represented as nested expressions: if ... else { if ... }\n * The purpose of this function is to flatten nested ifs into one sequence.\n * Basically compute: ([if, else if, else if, else if], else) *)\nval collect_if_expressions :\n  Parsetree.expression ->\n  (Location.t * if_condition_kind * Parsetree.expression) list\n  * Parsetree.expression option\n\nval collect_array_expressions :\n  Parsetree.expression ->\n  Parsetree.expression list * Parsetree.expression option\n\nval collect_list_expressions :\n  Parsetree.expression ->\n  Parsetree.expression list * Parsetree.expression option\n\ntype fun_param_kind =\n  | Parameter of {\n      attrs: Parsetree.attributes;\n      lbl: Asttypes.arg_label;\n      default_expr: Parsetree.expression option;\n      pat: Parsetree.pattern;\n    }\n  | NewTypes of {attrs: Parsetree.attributes; locs: string Asttypes.loc list}\n\nval fun_expr :\n  Parsetree.expression ->\n  bool * Parsetree.attributes * fun_param_kind list * Parsetree.expression\n\n(* example:\n *  `makeCoordinate({\n *    x: 1,\n *    y: 2,\n *  })`\n *  Notice howe `({` and `})` \"hug\" or stick to each other *)\nval is_huggable_expression : Parsetree.expression -> bool\n\nval is_huggable_pattern : Parsetree.pattern -> bool\n\nval is_huggable_rhs : Parsetree.expression -> bool\n\nval operator_precedence : string -> int\n\nval is_unary_expression : Parsetree.expression -> bool\nval is_binary_operator : string -> bool\nval is_binary_expression : Parsetree.expression -> bool\nval is_rhs_binary_operator : string -> bool\n\nval flattenable_operators : string -> string -> bool\n\nval has_attributes : Parsetree.attributes -> bool\n\nval is_array_access : Parsetree.expression -> bool\nval is_ternary_expr : Parsetree.expression -> bool\nval is_if_let_expr : Parsetree.expression -> bool\n\nval collect_ternary_parts :\n  Parsetree.expression ->\n  (Parsetree.expression * Parsetree.expression) list * Parsetree.expression\n\nval parameters_should_hug : fun_param_kind list -> bool\n\nval filter_ternary_attributes : Parsetree.attributes -> Parsetree.attributes\nval filter_fragile_match_attributes :\n  Parsetree.attributes -> Parsetree.attributes\n\nval is_jsx_expression : Parsetree.expression -> bool\nval has_jsx_attribute : Parsetree.attributes -> bool\nval has_optional_attribute : Parsetree.attributes -> bool\n\nval should_indent_binary_expr : Parsetree.expression -> bool\nval should_inline_rhs_binary_expr : Parsetree.expression -> bool\nval has_printable_attributes : Parsetree.attributes -> bool\nval filter_printable_attributes : Parsetree.attributes -> Parsetree.attributes\nval partition_printable_attributes :\n  Parsetree.attributes -> Parsetree.attributes * Parsetree.attributes\n\nval requires_special_callback_printing_last_arg :\n  (Asttypes.arg_label * Parsetree.expression) list -> bool\nval requires_special_callback_printing_first_arg :\n  (Asttypes.arg_label * Parsetree.expression) list -> bool\n\nval mod_expr_apply :\n  Parsetree.module_expr -> Parsetree.module_expr list * Parsetree.module_expr\n\n(* Collection of utilities to view the ast in a more a convenient form,\n * allowing for easier processing.\n * Example: given a ptyp_arrow type, what are its arguments and what is the\n * returnType? *)\n\nval mod_expr_functor :\n  Parsetree.module_expr ->\n  (Parsetree.attributes * string Asttypes.loc * Parsetree.module_type option)\n  list\n  * Parsetree.module_expr\n\nval collect_patterns_from_list_construct :\n  Parsetree.pattern list ->\n  Parsetree.pattern ->\n  Parsetree.pattern list * Parsetree.pattern\n\nval is_block_expr : Parsetree.expression -> bool\n\nval is_template_literal : Parsetree.expression -> bool\nval is_tagged_template_literal : Parsetree.expression -> bool\nval has_template_literal_attr : Parsetree.attributes -> bool\n\nval is_spread_belt_list_concat : Parsetree.expression -> bool\n\nval is_spread_belt_array_concat : Parsetree.expression -> bool\n\nval collect_or_pattern_chain : Parsetree.pattern -> Parsetree.pattern list\n\nval process_braces_attr :\n  Parsetree.expression -> Parsetree.attribute option * Parsetree.expression\n\nval filter_parsing_attrs : Parsetree.attributes -> Parsetree.attributes\n\nval is_braced_expr : Parsetree.expression -> bool\n\nval is_single_pipe_expr : Parsetree.expression -> bool\n\n(* (__x) => f(a, __x, c) -----> f(a, _, c)  *)\nval rewrite_underscore_apply : Parsetree.expression -> Parsetree.expression\n\n(* (__x) => f(a, __x, c) -----> f(a, _, c)  *)\nval is_underscore_apply_sugar : Parsetree.expression -> bool\n\nval has_if_let_attribute : Parsetree.attributes -> bool\n\nval is_rewritten_underscore_apply_sugar : Parsetree.expression -> bool\n\nval is_fun_newtype : Parsetree.expression -> bool\n"
  },
  {
    "path": "analysis/vendor/res_syntax/res_printer.ml",
    "content": "module Doc = Res_doc\nmodule CommentTable = Res_comments_table\nmodule Comment = Res_comment\nmodule Token = Res_token\nmodule Parens = Res_parens\nmodule ParsetreeViewer = Res_parsetree_viewer\n\ntype callback_style =\n  (* regular arrow function, example: `let f = x => x + 1` *)\n  | NoCallback\n  (* `Thing.map(foo, (arg1, arg2) => MyModuleBlah.toList(argument))` *)\n  | FitsOnOneLine\n  (* Thing.map(longArgumet, veryLooooongArgument, (arg1, arg2) =>\n   *   MyModuleBlah.toList(argument)\n   * )\n   *)\n  | ArgumentsFitOnOneLine\n\nlet add_parens doc =\n  Doc.group\n    (Doc.concat\n       [\n         Doc.lparen;\n         Doc.indent (Doc.concat [Doc.soft_line; doc]);\n         Doc.soft_line;\n         Doc.rparen;\n       ])\n\nlet add_braces doc =\n  Doc.group\n    (Doc.concat\n       [\n         Doc.lbrace;\n         Doc.indent (Doc.concat [Doc.soft_line; doc]);\n         Doc.soft_line;\n         Doc.rbrace;\n       ])\n\nlet add_async doc = Doc.concat [Doc.text \"async \"; doc]\n\nlet get_first_leading_comment tbl loc =\n  match Hashtbl.find tbl.CommentTable.leading loc with\n  | comment :: _ -> Some comment\n  | [] -> None\n  | exception Not_found -> None\n\n(* Checks if `loc` has a leading line comment, i.e. `// comment above`*)\nlet has_leading_line_comment tbl loc =\n  match get_first_leading_comment tbl loc with\n  | Some comment -> Comment.is_single_line_comment comment\n  | None -> false\n\nlet has_comment_below tbl loc =\n  match Hashtbl.find tbl.CommentTable.trailing loc with\n  | comment :: _ ->\n    let comment_loc = Comment.loc comment in\n    comment_loc.Location.loc_start.pos_lnum > loc.Location.loc_end.pos_lnum\n  | [] -> false\n  | exception Not_found -> false\n\nlet has_nested_jsx_or_more_than_one_child expr =\n  let rec loop in_recursion expr =\n    match expr.Parsetree.pexp_desc with\n    | Pexp_construct\n        ({txt = Longident.Lident \"::\"}, Some {pexp_desc = Pexp_tuple [hd; tail]})\n      ->\n      if in_recursion || ParsetreeViewer.is_jsx_expression hd then true\n      else loop true tail\n    | _ -> false\n  in\n  loop false expr\n\nlet has_comments_inside tbl loc =\n  match Hashtbl.find_opt tbl.CommentTable.inside loc with\n  | None -> false\n  | _ -> true\n\nlet has_trailing_comments tbl loc =\n  match Hashtbl.find_opt tbl.CommentTable.trailing loc with\n  | None -> false\n  | _ -> true\n\nlet print_multiline_comment_content txt =\n  (* Turns\n   *         |* first line\n   *  * second line\n   *      * third line *|\n   * Into\n   * |* first line\n   *  * second line\n   *  * third line *|\n   *\n   * What makes a comment suitable for this kind of indentation?\n   *  ->  multiple lines + every line starts with a star\n   *)\n  let rec indent_stars lines acc =\n    match lines with\n    | [] -> Doc.nil\n    | [last_line] ->\n      let line = String.trim last_line in\n      let doc = Doc.text (\" \" ^ line) in\n      let trailing_space = if line = \"\" then Doc.nil else Doc.space in\n      List.rev (trailing_space :: doc :: acc) |> Doc.concat\n    | line :: lines ->\n      let line = String.trim line in\n      if line != \"\" && String.unsafe_get line 0 == '*' then\n        let doc = Doc.text (\" \" ^ line) in\n        indent_stars lines (Doc.hard_line :: doc :: acc)\n      else\n        let trailing_space =\n          let len = String.length txt in\n          if len > 0 && String.unsafe_get txt (len - 1) = ' ' then Doc.space\n          else Doc.nil\n        in\n        let content = Comment.trim_spaces txt in\n        Doc.concat [Doc.text content; trailing_space]\n  in\n  let lines = String.split_on_char '\\n' txt in\n  match lines with\n  | [] -> Doc.text \"/* */\"\n  | [line] ->\n    Doc.concat\n      [Doc.text \"/* \"; Doc.text (Comment.trim_spaces line); Doc.text \" */\"]\n  | first :: rest ->\n    let first_line = Comment.trim_spaces first in\n    Doc.concat\n      [\n        Doc.text \"/*\";\n        (match first_line with\n        | \"\" | \"*\" -> Doc.nil\n        | _ -> Doc.space);\n        indent_stars rest [Doc.hard_line; Doc.text first_line];\n        Doc.text \"*/\";\n      ]\n\nlet print_trailing_comment (prev_loc : Location.t) (node_loc : Location.t)\n    comment =\n  let single_line = Comment.is_single_line_comment comment in\n  let content =\n    let txt = Comment.txt comment in\n    if single_line then Doc.text (\"//\" ^ txt)\n    else print_multiline_comment_content txt\n  in\n  let diff =\n    let cmt_start = (Comment.loc comment).loc_start in\n    cmt_start.pos_lnum - prev_loc.loc_end.pos_lnum\n  in\n  let is_below =\n    (Comment.loc comment).loc_start.pos_lnum > node_loc.loc_end.pos_lnum\n  in\n  if diff > 0 || is_below then\n    Doc.concat\n      [\n        Doc.break_parent;\n        Doc.line_suffix\n          (Doc.concat\n             [\n               Doc.hard_line;\n               (if diff > 1 then Doc.hard_line else Doc.nil);\n               content;\n             ]);\n      ]\n  else if not single_line then Doc.concat [Doc.space; content]\n  else Doc.line_suffix (Doc.concat [Doc.space; content])\n\nlet print_leading_comment ?next_comment comment =\n  let single_line = Comment.is_single_line_comment comment in\n  let content =\n    let txt = Comment.txt comment in\n    if single_line then Doc.text (\"//\" ^ txt)\n    else print_multiline_comment_content txt\n  in\n  let separator =\n    Doc.concat\n      [\n        (if single_line then Doc.concat [Doc.hard_line; Doc.break_parent]\n         else Doc.nil);\n        (match next_comment with\n        | Some next ->\n          let next_loc = Comment.loc next in\n          let curr_loc = Comment.loc comment in\n          let diff =\n            next_loc.Location.loc_start.pos_lnum\n            - curr_loc.Location.loc_end.pos_lnum\n          in\n          let next_single_line = Comment.is_single_line_comment next in\n          if single_line && next_single_line then\n            if diff > 1 then Doc.hard_line else Doc.nil\n          else if single_line && not next_single_line then\n            if diff > 1 then Doc.hard_line else Doc.nil\n          else if diff > 1 then Doc.concat [Doc.hard_line; Doc.hard_line]\n          else if diff == 1 then Doc.hard_line\n          else Doc.space\n        | None -> Doc.nil);\n      ]\n  in\n  Doc.concat [content; separator]\n\n(* This function is used for printing comments inside an empty block *)\nlet print_comments_inside cmt_tbl loc =\n  let print_comment comment =\n    let single_line = Comment.is_single_line_comment comment in\n    let txt = Comment.txt comment in\n    if single_line then Doc.text (\"//\" ^ txt)\n    else print_multiline_comment_content txt\n  in\n  let force_break =\n    loc.Location.loc_start.pos_lnum <> loc.Location.loc_end.pos_lnum\n  in\n  let rec loop acc comments =\n    match comments with\n    | [] -> Doc.nil\n    | [comment] ->\n      let cmt_doc = print_comment comment in\n      let cmts_doc = Doc.concat (Doc.soft_line :: List.rev (cmt_doc :: acc)) in\n      let doc =\n        Doc.breakable_group ~force_break\n          (Doc.concat\n             [Doc.if_breaks (Doc.indent cmts_doc) cmts_doc; Doc.soft_line])\n      in\n      doc\n    | comment :: rest ->\n      let cmt_doc = Doc.concat [print_comment comment; Doc.line] in\n      loop (cmt_doc :: acc) rest\n  in\n  match Hashtbl.find cmt_tbl.CommentTable.inside loc with\n  | exception Not_found -> Doc.nil\n  | comments ->\n    Hashtbl.remove cmt_tbl.inside loc;\n    loop [] comments\n\n(* This function is used for printing comments inside an empty file *)\nlet print_comments_inside_file cmt_tbl =\n  let rec loop acc comments =\n    match comments with\n    | [] -> Doc.nil\n    | [comment] ->\n      let cmt_doc = print_leading_comment comment in\n      let doc =\n        Doc.group (Doc.concat [Doc.concat (List.rev (cmt_doc :: acc))])\n      in\n      doc\n    | comment :: (next_comment :: _comments as rest) ->\n      let cmt_doc = print_leading_comment ~next_comment comment in\n      loop (cmt_doc :: acc) rest\n  in\n  match Hashtbl.find cmt_tbl.CommentTable.inside Location.none with\n  | exception Not_found -> Doc.nil\n  | comments ->\n    Hashtbl.remove cmt_tbl.inside Location.none;\n    Doc.group (loop [] comments)\n\nlet print_leading_comments node tbl loc =\n  let rec loop acc comments =\n    match comments with\n    | [] -> node\n    | [comment] ->\n      let cmt_doc = print_leading_comment comment in\n      let diff =\n        loc.Location.loc_start.pos_lnum\n        - (Comment.loc comment).Location.loc_end.pos_lnum\n      in\n      let separator =\n        if Comment.is_single_line_comment comment then\n          if diff > 1 then Doc.hard_line else Doc.nil\n        else if diff == 0 then Doc.space\n        else if diff > 1 then Doc.concat [Doc.hard_line; Doc.hard_line]\n        else Doc.hard_line\n      in\n      let doc =\n        Doc.group\n          (Doc.concat [Doc.concat (List.rev (cmt_doc :: acc)); separator; node])\n      in\n      doc\n    | comment :: (next_comment :: _comments as rest) ->\n      let cmt_doc = print_leading_comment ~next_comment comment in\n      loop (cmt_doc :: acc) rest\n  in\n  match Hashtbl.find tbl loc with\n  | exception Not_found -> node\n  | comments ->\n    (* Remove comments from tbl: Some ast nodes have the same location.\n     * We only want to print comments once *)\n    Hashtbl.remove tbl loc;\n    loop [] comments\n\nlet print_trailing_comments node tbl loc =\n  let rec loop prev acc comments =\n    match comments with\n    | [] -> Doc.concat (List.rev acc)\n    | comment :: comments ->\n      let cmt_doc = print_trailing_comment prev loc comment in\n      loop (Comment.loc comment) (cmt_doc :: acc) comments\n  in\n  match Hashtbl.find tbl loc with\n  | exception Not_found -> node\n  | [] -> node\n  | _first :: _ as comments ->\n    (* Remove comments from tbl: Some ast nodes have the same location.\n     * We only want to print comments once *)\n    Hashtbl.remove tbl loc;\n    let cmts_doc = loop loc [] comments in\n    Doc.concat [node; cmts_doc]\n\nlet print_comments doc (tbl : CommentTable.t) loc =\n  let doc_with_leading_comments = print_leading_comments doc tbl.leading loc in\n  print_trailing_comments doc_with_leading_comments tbl.trailing loc\n\nlet print_list ~get_loc ~nodes ~print ?(force_break = false) t =\n  let rec loop (prev_loc : Location.t) acc nodes =\n    match nodes with\n    | [] -> (prev_loc, Doc.concat (List.rev acc))\n    | node :: nodes ->\n      let loc = get_loc node in\n      let start_pos =\n        match get_first_leading_comment t loc with\n        | None -> loc.loc_start\n        | Some comment -> (Comment.loc comment).loc_start\n      in\n      let sep =\n        if start_pos.pos_lnum - prev_loc.loc_end.pos_lnum > 1 then\n          Doc.concat [Doc.hard_line; Doc.hard_line]\n        else Doc.hard_line\n      in\n      let doc = print_comments (print node t) t loc in\n      loop loc (doc :: sep :: acc) nodes\n  in\n  match nodes with\n  | [] -> Doc.nil\n  | node :: nodes ->\n    let first_loc = get_loc node in\n    let doc = print_comments (print node t) t first_loc in\n    let last_loc, docs = loop first_loc [doc] nodes in\n    let force_break =\n      force_break || first_loc.loc_start.pos_lnum != last_loc.loc_end.pos_lnum\n    in\n    Doc.breakable_group ~force_break docs\n\nlet print_listi ~get_loc ~nodes ~print ?(force_break = false) t =\n  let rec loop i (prev_loc : Location.t) acc nodes =\n    match nodes with\n    | [] -> (prev_loc, Doc.concat (List.rev acc))\n    | node :: nodes ->\n      let loc = get_loc node in\n      let start_pos =\n        match get_first_leading_comment t loc with\n        | None -> loc.loc_start\n        | Some comment -> (Comment.loc comment).loc_start\n      in\n      let sep =\n        if start_pos.pos_lnum - prev_loc.loc_end.pos_lnum > 1 then\n          Doc.concat [Doc.hard_line; Doc.hard_line]\n        else Doc.line\n      in\n      let doc = print_comments (print node t i) t loc in\n      loop (i + 1) loc (doc :: sep :: acc) nodes\n  in\n  match nodes with\n  | [] -> Doc.nil\n  | node :: nodes ->\n    let first_loc = get_loc node in\n    let doc = print_comments (print node t 0) t first_loc in\n    let last_loc, docs = loop 1 first_loc [doc] nodes in\n    let force_break =\n      force_break || first_loc.loc_start.pos_lnum != last_loc.loc_end.pos_lnum\n    in\n    Doc.breakable_group ~force_break docs\n\nlet rec print_longident_aux accu = function\n  | Longident.Lident s -> Doc.text s :: accu\n  | Ldot (lid, s) -> print_longident_aux (Doc.text s :: accu) lid\n  | Lapply (lid1, lid2) ->\n    let d1 = Doc.join ~sep:Doc.dot (print_longident_aux [] lid1) in\n    let d2 = Doc.join ~sep:Doc.dot (print_longident_aux [] lid2) in\n    Doc.concat [d1; Doc.lparen; d2; Doc.rparen] :: accu\n\nlet print_longident = function\n  | Longident.Lident txt -> Doc.text txt\n  | lid -> Doc.join ~sep:Doc.dot (print_longident_aux [] lid)\n\ntype identifier_style = ExoticIdent | NormalIdent\n\nlet classify_ident_content ?(allow_uident = false) ?(allow_hyphen = false) txt =\n  if Token.is_keyword_txt txt then ExoticIdent\n  else\n    let len = String.length txt in\n    let rec loop i =\n      if i == len then NormalIdent\n      else if i == 0 then\n        match String.unsafe_get txt i with\n        | 'A' .. 'Z' when allow_uident -> loop (i + 1)\n        | 'a' .. 'z' | '_' -> loop (i + 1)\n        | '-' when allow_hyphen -> loop (i + 1)\n        | _ -> ExoticIdent\n      else\n        match String.unsafe_get txt i with\n        | 'A' .. 'Z' | 'a' .. 'z' | '0' .. '9' | '\\'' | '_' -> loop (i + 1)\n        | '-' when allow_hyphen -> loop (i + 1)\n        | _ -> ExoticIdent\n    in\n    loop 0\n\nlet print_ident_like ?allow_uident ?allow_hyphen txt =\n  let txt = Ext_ident.unwrap_uppercase_exotic txt in\n  match classify_ident_content ?allow_uident ?allow_hyphen txt with\n  | ExoticIdent -> Doc.concat [Doc.text \"\\\\\\\"\"; Doc.text txt; Doc.text \"\\\"\"]\n  | NormalIdent -> Doc.text txt\n\nlet rec unsafe_for_all_range s ~start ~finish p =\n  start > finish\n  || p (String.unsafe_get s start)\n     && unsafe_for_all_range s ~start:(start + 1) ~finish p\n\nlet for_all_from s start p =\n  let len = String.length s in\n  unsafe_for_all_range s ~start ~finish:(len - 1) p\n\n(* See https://github.com/rescript-lang/rescript-compiler/blob/726cfa534314b586e5b5734471bc2023ad99ebd9/jscomp/ext/ext_string.ml#L510 *)\nlet is_valid_numeric_polyvar_number (x : string) =\n  let len = String.length x in\n  len > 0\n  &&\n  let a = Char.code (String.unsafe_get x 0) in\n  a <= 57\n  &&\n  if len > 1 then\n    a > 48\n    && for_all_from x 1 (function\n         | '0' .. '9' -> true\n         | _ -> false)\n  else a >= 48\n\n(* Exotic identifiers in poly-vars have a \"lighter\" syntax: #\"ease-in\" *)\nlet print_poly_var_ident txt =\n  (* numeric poly-vars don't need quotes: #644 *)\n  if is_valid_numeric_polyvar_number txt then Doc.text txt\n  else\n    let txt = Ext_ident.unwrap_uppercase_exotic txt in\n    match classify_ident_content ~allow_uident:true txt with\n    | ExoticIdent -> Doc.concat [Doc.text \"\\\"\"; Doc.text txt; Doc.text \"\\\"\"]\n    | NormalIdent -> (\n      match txt with\n      | \"\" -> Doc.concat [Doc.text \"\\\"\"; Doc.text txt; Doc.text \"\\\"\"]\n      | _ -> Doc.text txt)\n\nlet polyvar_ident_to_string poly_var_ident =\n  Doc.concat [Doc.text \"#\"; print_poly_var_ident poly_var_ident]\n  |> Doc.to_string ~width:80\n\nlet print_lident l =\n  let flat_lid_opt lid =\n    let rec flat accu = function\n      | Longident.Lident s -> Some (s :: accu)\n      | Ldot (lid, s) -> flat (s :: accu) lid\n      | Lapply (_, _) -> None\n    in\n    flat [] lid\n  in\n  match l with\n  | Longident.Lident txt -> print_ident_like txt\n  | Longident.Ldot (path, txt) ->\n    let doc =\n      match flat_lid_opt path with\n      | Some txts ->\n        Doc.concat\n          [\n            Doc.join ~sep:Doc.dot (List.map Doc.text txts);\n            Doc.dot;\n            print_ident_like txt;\n          ]\n      | None -> Doc.text \"printLident: Longident.Lapply is not supported\"\n    in\n    doc\n  | Lapply (_, _) -> Doc.text \"printLident: Longident.Lapply is not supported\"\n\nlet print_longident_location l cmt_tbl =\n  let doc = print_longident l.Location.txt in\n  print_comments doc cmt_tbl l.loc\n\n(* Module.SubModule.x *)\nlet print_lident_path path cmt_tbl =\n  let doc = print_lident path.Location.txt in\n  print_comments doc cmt_tbl path.loc\n\n(* Module.SubModule.x or Module.SubModule.X *)\nlet print_ident_path path cmt_tbl =\n  let doc = print_lident path.Location.txt in\n  print_comments doc cmt_tbl path.loc\n\nlet print_string_loc sloc cmt_tbl =\n  let doc = print_ident_like sloc.Location.txt in\n  print_comments doc cmt_tbl sloc.loc\n\nlet print_string_contents txt =\n  let lines = String.split_on_char '\\n' txt in\n  Doc.join ~sep:Doc.literal_line (List.map Doc.text lines)\n\nlet print_constant ?(template_literal = false) c =\n  match c with\n  | Parsetree.Pconst_integer (s, suffix) -> (\n    match suffix with\n    | Some c -> Doc.text (s ^ Char.escaped c)\n    | None -> Doc.text s)\n  | Pconst_string (txt, None) ->\n    Doc.concat [Doc.text \"\\\"\"; print_string_contents txt; Doc.text \"\\\"\"]\n  | Pconst_string (txt, Some prefix) ->\n    if prefix = \"INTERNAL_RES_CHAR_CONTENTS\" then\n      Doc.concat [Doc.text \"'\"; Doc.text txt; Doc.text \"'\"]\n    else\n      let lquote, rquote =\n        if template_literal then (\"`\", \"`\") else (\"\\\"\", \"\\\"\")\n      in\n      Doc.concat\n        [\n          (if prefix = \"js\" then Doc.nil else Doc.text prefix);\n          Doc.text lquote;\n          print_string_contents txt;\n          Doc.text rquote;\n        ]\n  | Pconst_float (s, _) -> Doc.text s\n  | Pconst_char c ->\n    let str =\n      match Char.unsafe_chr c with\n      | '\\'' -> \"\\\\'\"\n      | '\\\\' -> \"\\\\\\\\\"\n      | '\\n' -> \"\\\\n\"\n      | '\\t' -> \"\\\\t\"\n      | '\\r' -> \"\\\\r\"\n      | '\\b' -> \"\\\\b\"\n      | ' ' .. '~' as c ->\n        let s = (Bytes.create [@doesNotRaise]) 1 in\n        Bytes.unsafe_set s 0 c;\n        Bytes.unsafe_to_string s\n      | _ -> Res_utf8.encode_code_point c\n    in\n    Doc.text (\"'\" ^ str ^ \"'\")\n\nlet print_optional_label attrs =\n  if Res_parsetree_viewer.has_optional_attribute attrs then Doc.text \"?\"\n  else Doc.nil\n\nmodule State = struct\n  let custom_layout_threshold = 2\n\n  type t = {custom_layout: int; mutable uncurried_config: Config.uncurried}\n\n  let init () = {custom_layout = 0; uncurried_config = !Config.uncurried}\n\n  let next_custom_layout t = {t with custom_layout = t.custom_layout + 1}\n\n  let should_break_callback t = t.custom_layout > custom_layout_threshold\nend\n\nlet rec print_structure ~state (s : Parsetree.structure) t =\n  match s with\n  | [] -> print_comments_inside_file t\n  | structure ->\n    print_list\n      ~get_loc:(fun s -> s.Parsetree.pstr_loc)\n      ~nodes:structure\n      ~print:(print_structure_item ~state)\n      t\n\nand print_structure_item ~state (si : Parsetree.structure_item) cmt_tbl =\n  match si.pstr_desc with\n  | Pstr_value (rec_flag, value_bindings) ->\n    let rec_flag =\n      match rec_flag with\n      | Asttypes.Nonrecursive -> Doc.nil\n      | Asttypes.Recursive -> Doc.text \"rec \"\n    in\n    print_value_bindings ~state ~rec_flag value_bindings cmt_tbl\n  | Pstr_type (rec_flag, type_declarations) ->\n    let rec_flag =\n      match rec_flag with\n      | Asttypes.Nonrecursive -> Doc.nil\n      | Asttypes.Recursive -> Doc.text \"rec \"\n    in\n    print_type_declarations ~state ~rec_flag type_declarations cmt_tbl\n  | Pstr_primitive value_description ->\n    print_value_description ~state value_description cmt_tbl\n  | Pstr_eval (expr, attrs) ->\n    let expr_doc =\n      let doc = print_expression_with_comments ~state expr cmt_tbl in\n      match Parens.structure_expr expr with\n      | Parens.Parenthesized -> add_parens doc\n      | Braced braces -> print_braces doc expr braces\n      | Nothing -> doc\n    in\n    Doc.concat [print_attributes ~state attrs cmt_tbl; expr_doc]\n  | Pstr_attribute attr ->\n    fst (print_attribute ~state ~standalone:true attr cmt_tbl)\n  | Pstr_extension (extension, attrs) ->\n    Doc.concat\n      [\n        print_attributes ~state attrs cmt_tbl;\n        Doc.concat\n          [print_extension ~state ~at_module_lvl:true extension cmt_tbl];\n      ]\n  | Pstr_include include_declaration ->\n    print_include_declaration ~state include_declaration cmt_tbl\n  | Pstr_open open_description ->\n    print_open_description ~state open_description cmt_tbl\n  | Pstr_modtype mod_type_decl ->\n    print_module_type_declaration ~state mod_type_decl cmt_tbl\n  | Pstr_module module_binding ->\n    print_module_binding ~state ~is_rec:false module_binding cmt_tbl 0\n  | Pstr_recmodule module_bindings ->\n    print_listi\n      ~get_loc:(fun mb -> mb.Parsetree.pmb_loc)\n      ~nodes:module_bindings\n      ~print:(print_module_binding ~state ~is_rec:true)\n      cmt_tbl\n  | Pstr_exception extension_constructor ->\n    print_exception_def ~state extension_constructor cmt_tbl\n  | Pstr_typext type_extension ->\n    print_type_extension ~state type_extension cmt_tbl\n  | Pstr_class _ | Pstr_class_type _ -> Doc.nil\n\nand print_type_extension ~state (te : Parsetree.type_extension) cmt_tbl =\n  let prefix = Doc.text \"type \" in\n  let name = print_lident_path te.ptyext_path cmt_tbl in\n  let type_params = print_type_params ~state te.ptyext_params cmt_tbl in\n  let extension_constructors =\n    let ecs = te.ptyext_constructors in\n    let force_break =\n      match (ecs, List.rev ecs) with\n      | first :: _, last :: _ ->\n        first.pext_loc.loc_start.pos_lnum > te.ptyext_path.loc.loc_end.pos_lnum\n        || first.pext_loc.loc_start.pos_lnum < last.pext_loc.loc_end.pos_lnum\n      | _ -> false\n    in\n    let private_flag =\n      match te.ptyext_private with\n      | Asttypes.Private -> Doc.concat [Doc.text \"private\"; Doc.line]\n      | Public -> Doc.nil\n    in\n    let rows =\n      print_listi\n        ~get_loc:(fun n -> n.Parsetree.pext_loc)\n        ~print:(print_extension_constructor ~state)\n        ~nodes:ecs ~force_break cmt_tbl\n    in\n    Doc.breakable_group ~force_break\n      (Doc.indent\n         (Doc.concat\n            [\n              Doc.line;\n              private_flag;\n              rows;\n              (* Doc.join ~sep:Doc.line ( *)\n              (* List.mapi printExtensionConstructor ecs *)\n              (* ) *)\n            ]))\n  in\n  Doc.group\n    (Doc.concat\n       [\n         print_attributes ~state ~loc:te.ptyext_path.loc te.ptyext_attributes\n           cmt_tbl;\n         prefix;\n         name;\n         type_params;\n         Doc.text \" +=\";\n         extension_constructors;\n       ])\n\nand print_module_binding ~state ~is_rec module_binding cmt_tbl i =\n  let prefix =\n    if i = 0 then\n      Doc.concat\n        [Doc.text \"module \"; (if is_rec then Doc.text \"rec \" else Doc.nil)]\n    else Doc.text \"and \"\n  in\n  let mod_expr_doc, mod_constraint_doc =\n    match module_binding.pmb_expr with\n    | {pmod_desc = Pmod_constraint (mod_expr, mod_type)}\n      when not\n             (ParsetreeViewer.has_await_attribute\n                module_binding.pmb_expr.pmod_attributes) ->\n      ( print_mod_expr ~state mod_expr cmt_tbl,\n        Doc.concat [Doc.text \": \"; print_mod_type ~state mod_type cmt_tbl] )\n    | mod_expr -> (print_mod_expr ~state mod_expr cmt_tbl, Doc.nil)\n  in\n  let mod_name =\n    let doc = Doc.text module_binding.pmb_name.Location.txt in\n    print_comments doc cmt_tbl module_binding.pmb_name.loc\n  in\n  let doc =\n    Doc.concat\n      [\n        print_attributes ~state ~loc:module_binding.pmb_name.loc\n          module_binding.pmb_attributes cmt_tbl;\n        prefix;\n        mod_name;\n        mod_constraint_doc;\n        Doc.text \" = \";\n        mod_expr_doc;\n      ]\n  in\n  print_comments doc cmt_tbl module_binding.pmb_loc\n\nand print_module_type_declaration ~state\n    (mod_type_decl : Parsetree.module_type_declaration) cmt_tbl =\n  let mod_name =\n    let doc = Doc.text mod_type_decl.pmtd_name.txt in\n    print_comments doc cmt_tbl mod_type_decl.pmtd_name.loc\n  in\n  Doc.concat\n    [\n      print_attributes ~state mod_type_decl.pmtd_attributes cmt_tbl;\n      Doc.text \"module type \";\n      mod_name;\n      (match mod_type_decl.pmtd_type with\n      | None -> Doc.nil\n      | Some mod_type ->\n        Doc.concat [Doc.text \" = \"; print_mod_type ~state mod_type cmt_tbl]);\n    ]\n\nand print_mod_type ~state mod_type cmt_tbl =\n  let mod_type_doc =\n    match mod_type.pmty_desc with\n    | Parsetree.Pmty_ident longident ->\n      Doc.concat\n        [\n          print_attributes ~state ~loc:longident.loc mod_type.pmty_attributes\n            cmt_tbl;\n          print_longident_location longident cmt_tbl;\n        ]\n    | Pmty_signature [] ->\n      if has_comments_inside cmt_tbl mod_type.pmty_loc then\n        let doc = print_comments_inside cmt_tbl mod_type.pmty_loc in\n        Doc.concat [Doc.lbrace; doc; Doc.rbrace]\n      else\n        let should_break =\n          mod_type.pmty_loc.loc_start.pos_lnum\n          < mod_type.pmty_loc.loc_end.pos_lnum\n        in\n        Doc.breakable_group ~force_break:should_break\n          (Doc.concat [Doc.lbrace; Doc.soft_line; Doc.soft_line; Doc.rbrace])\n    | Pmty_signature signature ->\n      let signature_doc =\n        Doc.breakable_group ~force_break:true\n          (Doc.concat\n             [\n               Doc.lbrace;\n               Doc.indent\n                 (Doc.concat\n                    [Doc.line; print_signature ~state signature cmt_tbl]);\n               Doc.line;\n               Doc.rbrace;\n             ])\n      in\n      Doc.concat\n        [\n          print_attributes ~state mod_type.pmty_attributes cmt_tbl; signature_doc;\n        ]\n    | Pmty_functor _ ->\n      let parameters, return_type = ParsetreeViewer.functor_type mod_type in\n      let parameters_doc =\n        match parameters with\n        | [] -> Doc.nil\n        | [(attrs, {Location.txt = \"_\"; loc}, Some mod_type)] ->\n          let cmt_loc =\n            {loc with loc_end = mod_type.Parsetree.pmty_loc.loc_end}\n          in\n          let attrs = print_attributes ~state attrs cmt_tbl in\n          let doc =\n            Doc.concat [attrs; print_mod_type ~state mod_type cmt_tbl]\n          in\n          print_comments doc cmt_tbl cmt_loc\n        | params ->\n          Doc.group\n            (Doc.concat\n               [\n                 Doc.lparen;\n                 Doc.indent\n                   (Doc.concat\n                      [\n                        Doc.soft_line;\n                        Doc.join\n                          ~sep:(Doc.concat [Doc.comma; Doc.line])\n                          (List.map\n                             (fun (attrs, lbl, mod_type) ->\n                               let cmt_loc =\n                                 match mod_type with\n                                 | None -> lbl.Asttypes.loc\n                                 | Some mod_type ->\n                                   {\n                                     lbl.Asttypes.loc with\n                                     loc_end =\n                                       mod_type.Parsetree.pmty_loc.loc_end;\n                                   }\n                               in\n                               let attrs =\n                                 print_attributes ~state attrs cmt_tbl\n                               in\n                               let lbl_doc =\n                                 if lbl.Location.txt = \"_\" || lbl.txt = \"*\" then\n                                   Doc.nil\n                                 else\n                                   let doc = Doc.text lbl.txt in\n                                   print_comments doc cmt_tbl lbl.loc\n                               in\n                               let doc =\n                                 Doc.concat\n                                   [\n                                     attrs;\n                                     lbl_doc;\n                                     (match mod_type with\n                                     | None -> Doc.nil\n                                     | Some mod_type ->\n                                       Doc.concat\n                                         [\n                                           (if lbl.txt = \"_\" then Doc.nil\n                                            else Doc.text \": \");\n                                           print_mod_type ~state mod_type\n                                             cmt_tbl;\n                                         ]);\n                                   ]\n                               in\n                               print_comments doc cmt_tbl cmt_loc)\n                             params);\n                      ]);\n                 Doc.trailing_comma;\n                 Doc.soft_line;\n                 Doc.rparen;\n               ])\n      in\n      let return_doc =\n        let doc = print_mod_type ~state return_type cmt_tbl in\n        if Parens.mod_type_functor_return return_type then add_parens doc\n        else doc\n      in\n      Doc.group\n        (Doc.concat\n           [\n             parameters_doc;\n             Doc.group (Doc.concat [Doc.text \" =>\"; Doc.line; return_doc]);\n           ])\n    | Pmty_typeof mod_expr ->\n      Doc.concat\n        [Doc.text \"module type of \"; print_mod_expr ~state mod_expr cmt_tbl]\n    | Pmty_extension extension ->\n      print_extension ~state ~at_module_lvl:false extension cmt_tbl\n    | Pmty_alias longident ->\n      Doc.concat\n        [Doc.text \"module \"; print_longident_location longident cmt_tbl]\n    | Pmty_with (mod_type, with_constraints) ->\n      let operand =\n        let doc = print_mod_type ~state mod_type cmt_tbl in\n        if Parens.mod_type_with_operand mod_type then add_parens doc else doc\n      in\n      Doc.group\n        (Doc.concat\n           [\n             operand;\n             Doc.indent\n               (Doc.concat\n                  [\n                    Doc.line;\n                    print_with_constraints ~state with_constraints cmt_tbl;\n                  ]);\n           ])\n  in\n  let attrs_already_printed =\n    match mod_type.pmty_desc with\n    | Pmty_functor _ | Pmty_signature _ | Pmty_ident _ -> true\n    | _ -> false\n  in\n  let doc =\n    Doc.concat\n      [\n        (if attrs_already_printed then Doc.nil\n         else print_attributes ~state mod_type.pmty_attributes cmt_tbl);\n        mod_type_doc;\n      ]\n  in\n  print_comments doc cmt_tbl mod_type.pmty_loc\n\nand print_with_constraints ~state with_constraints cmt_tbl =\n  let rows =\n    List.mapi\n      (fun i with_constraint ->\n        Doc.group\n          (Doc.concat\n             [\n               (if i == 0 then Doc.text \"with \" else Doc.text \"and \");\n               print_with_constraint ~state with_constraint cmt_tbl;\n             ]))\n      with_constraints\n  in\n  Doc.join ~sep:Doc.line rows\n\nand print_with_constraint ~state (with_constraint : Parsetree.with_constraint)\n    cmt_tbl =\n  match with_constraint with\n  (* with type X.t = ... *)\n  | Pwith_type (longident, type_declaration) ->\n    Doc.group\n      (print_type_declaration ~state\n         ~name:(print_lident_path longident cmt_tbl)\n         ~equal_sign:\"=\" ~rec_flag:Doc.nil 0 type_declaration CommentTable.empty)\n  (* with module X.Y = Z *)\n  | Pwith_module ({txt = longident1}, {txt = longident2}) ->\n    Doc.concat\n      [\n        Doc.text \"module \";\n        print_longident longident1;\n        Doc.text \" =\";\n        Doc.indent (Doc.concat [Doc.line; print_longident longident2]);\n      ]\n  (* with type X.t := ..., same format as [Pwith_type] *)\n  | Pwith_typesubst (longident, type_declaration) ->\n    Doc.group\n      (print_type_declaration ~state\n         ~name:(print_lident_path longident cmt_tbl)\n         ~equal_sign:\":=\" ~rec_flag:Doc.nil 0 type_declaration\n         CommentTable.empty)\n  | Pwith_modsubst ({txt = longident1}, {txt = longident2}) ->\n    Doc.concat\n      [\n        Doc.text \"module \";\n        print_longident longident1;\n        Doc.text \" :=\";\n        Doc.indent (Doc.concat [Doc.line; print_longident longident2]);\n      ]\n\nand print_signature ~state signature cmt_tbl =\n  match signature with\n  | [] -> print_comments_inside_file cmt_tbl\n  | signature ->\n    print_list\n      ~get_loc:(fun s -> s.Parsetree.psig_loc)\n      ~nodes:signature\n      ~print:(print_signature_item ~state)\n      cmt_tbl\n\nand print_signature_item ~state (si : Parsetree.signature_item) cmt_tbl =\n  match si.psig_desc with\n  | Parsetree.Psig_value value_description ->\n    print_value_description ~state value_description cmt_tbl\n  | Psig_type (rec_flag, type_declarations) ->\n    let rec_flag =\n      match rec_flag with\n      | Asttypes.Nonrecursive -> Doc.nil\n      | Asttypes.Recursive -> Doc.text \"rec \"\n    in\n    print_type_declarations ~state ~rec_flag type_declarations cmt_tbl\n  | Psig_typext type_extension ->\n    print_type_extension ~state type_extension cmt_tbl\n  | Psig_exception extension_constructor ->\n    print_exception_def ~state extension_constructor cmt_tbl\n  | Psig_module module_declaration ->\n    print_module_declaration ~state module_declaration cmt_tbl\n  | Psig_recmodule module_declarations ->\n    print_rec_module_declarations ~state module_declarations cmt_tbl\n  | Psig_modtype mod_type_decl ->\n    print_module_type_declaration ~state mod_type_decl cmt_tbl\n  | Psig_open open_description ->\n    print_open_description ~state open_description cmt_tbl\n  | Psig_include include_description ->\n    print_include_description ~state include_description cmt_tbl\n  | Psig_attribute attr ->\n    fst (print_attribute ~state ~standalone:true attr cmt_tbl)\n  | Psig_extension (extension, attrs) ->\n    Doc.concat\n      [\n        print_attributes ~state attrs cmt_tbl;\n        Doc.concat\n          [print_extension ~state ~at_module_lvl:true extension cmt_tbl];\n      ]\n  | Psig_class _ | Psig_class_type _ -> Doc.nil\n\nand print_rec_module_declarations ~state module_declarations cmt_tbl =\n  print_listi\n    ~get_loc:(fun n -> n.Parsetree.pmd_loc)\n    ~nodes:module_declarations\n    ~print:(print_rec_module_declaration ~state)\n    cmt_tbl\n\nand print_rec_module_declaration ~state md cmt_tbl i =\n  let body =\n    match md.pmd_type.pmty_desc with\n    | Parsetree.Pmty_alias longident ->\n      Doc.concat [Doc.text \" = \"; print_longident_location longident cmt_tbl]\n    | _ ->\n      let needs_parens =\n        match md.pmd_type.pmty_desc with\n        | Pmty_with _ -> true\n        | _ -> false\n      in\n      let mod_type_doc =\n        let doc = print_mod_type ~state md.pmd_type cmt_tbl in\n        if needs_parens then add_parens doc else doc\n      in\n      Doc.concat [Doc.text \": \"; mod_type_doc]\n  in\n  let prefix = if i < 1 then \"module rec \" else \"and \" in\n  Doc.concat\n    [\n      print_attributes ~state ~loc:md.pmd_name.loc md.pmd_attributes cmt_tbl;\n      Doc.text prefix;\n      print_comments (Doc.text md.pmd_name.txt) cmt_tbl md.pmd_name.loc;\n      body;\n    ]\n\nand print_module_declaration ~state (md : Parsetree.module_declaration) cmt_tbl\n    =\n  let body =\n    match md.pmd_type.pmty_desc with\n    | Parsetree.Pmty_alias longident ->\n      Doc.concat [Doc.text \" = \"; print_longident_location longident cmt_tbl]\n    | _ -> Doc.concat [Doc.text \": \"; print_mod_type ~state md.pmd_type cmt_tbl]\n  in\n  Doc.concat\n    [\n      print_attributes ~state ~loc:md.pmd_name.loc md.pmd_attributes cmt_tbl;\n      Doc.text \"module \";\n      print_comments (Doc.text md.pmd_name.txt) cmt_tbl md.pmd_name.loc;\n      body;\n    ]\n\nand print_open_description ~state\n    (open_description : Parsetree.open_description) cmt_tbl =\n  Doc.concat\n    [\n      print_attributes ~state open_description.popen_attributes cmt_tbl;\n      Doc.text \"open\";\n      (match open_description.popen_override with\n      | Asttypes.Fresh -> Doc.space\n      | Asttypes.Override -> Doc.text \"! \");\n      print_longident_location open_description.popen_lid cmt_tbl;\n    ]\n\nand print_include_description ~state\n    (include_description : Parsetree.include_description) cmt_tbl =\n  Doc.concat\n    [\n      print_attributes ~state include_description.pincl_attributes cmt_tbl;\n      Doc.text \"include \";\n      print_mod_type ~state include_description.pincl_mod cmt_tbl;\n    ]\n\nand print_include_declaration ~state\n    (include_declaration : Parsetree.include_declaration) cmt_tbl =\n  Doc.concat\n    [\n      print_attributes ~state include_declaration.pincl_attributes cmt_tbl;\n      Doc.text \"include \";\n      (let include_doc =\n         print_mod_expr ~state include_declaration.pincl_mod cmt_tbl\n       in\n       if Parens.include_mod_expr include_declaration.pincl_mod then\n         add_parens include_doc\n       else include_doc);\n    ]\n\nand print_value_bindings ~state ~rec_flag (vbs : Parsetree.value_binding list)\n    cmt_tbl =\n  print_listi\n    ~get_loc:(fun vb -> vb.Parsetree.pvb_loc)\n    ~nodes:vbs\n    ~print:(print_value_binding ~state ~rec_flag)\n    cmt_tbl\n\nand print_value_description ~state value_description cmt_tbl =\n  let is_external =\n    match value_description.pval_prim with\n    | [] -> false\n    | _ -> true\n  in\n  let attrs =\n    print_attributes ~state ~loc:value_description.pval_name.loc\n      value_description.pval_attributes cmt_tbl\n  in\n  let header = if is_external then \"external \" else \"let \" in\n  Doc.group\n    (Doc.concat\n       [\n         attrs;\n         Doc.text header;\n         print_comments\n           (print_ident_like value_description.pval_name.txt)\n           cmt_tbl value_description.pval_name.loc;\n         Doc.text \": \";\n         print_typ_expr ~state value_description.pval_type cmt_tbl;\n         (if is_external then\n            Doc.group\n              (Doc.concat\n                 [\n                   Doc.text \" =\";\n                   Doc.indent\n                     (Doc.concat\n                        [\n                          Doc.line;\n                          Doc.join ~sep:Doc.line\n                            (List.map\n                               (fun s ->\n                                 Doc.concat\n                                   [Doc.text \"\\\"\"; Doc.text s; Doc.text \"\\\"\"])\n                               value_description.pval_prim);\n                        ]);\n                 ])\n          else Doc.nil);\n       ])\n\nand print_type_declarations ~state ~rec_flag type_declarations cmt_tbl =\n  print_listi\n    ~get_loc:(fun n -> n.Parsetree.ptype_loc)\n    ~nodes:type_declarations\n    ~print:(print_type_declaration2 ~state ~rec_flag)\n    cmt_tbl\n\n(*\n * type_declaration = {\n *    ptype_name: string loc;\n *    ptype_params: (core_type * variance) list;\n *          (* ('a1,...'an) t; None represents  _*)\n *    ptype_cstrs: (core_type * core_type * Location.t) list;\n *          (* ... constraint T1=T1'  ... constraint Tn=Tn' *)\n *    ptype_kind: type_kind;\n *    ptype_private: private_flag;   (* = private ... *)\n *    ptype_manifest: core_type option;  (* = T *)\n *    ptype_attributes: attributes;   (* ... [@@id1] [@@id2] *)\n *    ptype_loc: Location.t;\n * }\n *\n *\n *  type t                     (abstract, no manifest)\n *  type t = T0                (abstract, manifest=T0)\n *  type t = C of T | ...      (variant,  no manifest)\n *  type t = T0 = C of T | ... (variant,  manifest=T0)\n *  type t = {l: T; ...}       (record,   no manifest)\n *  type t = T0 = {l : T; ...} (record,   manifest=T0)\n *  type t = ..                (open,     no manifest)\n *\n *\n * and type_kind =\n *  | Ptype_abstract\n *  | Ptype_variant of constructor_declaration list\n *        (* Invariant: non-empty list *)\n *  | Ptype_record of label_declaration list\n *        (* Invariant: non-empty list *)\n *  | Ptype_open\n *)\nand print_type_declaration ~state ~name ~equal_sign ~rec_flag i\n    (td : Parsetree.type_declaration) cmt_tbl =\n  let attrs =\n    print_attributes ~state ~loc:td.ptype_loc td.ptype_attributes cmt_tbl\n  in\n  let prefix =\n    if i > 0 then Doc.text \"and \" else Doc.concat [Doc.text \"type \"; rec_flag]\n  in\n  let type_name = name in\n  let type_params = print_type_params ~state td.ptype_params cmt_tbl in\n  let manifest_and_kind =\n    match td.ptype_kind with\n    | Ptype_abstract -> (\n      match td.ptype_manifest with\n      | None -> Doc.nil\n      | Some typ ->\n        Doc.concat\n          [\n            Doc.concat [Doc.space; Doc.text equal_sign; Doc.space];\n            print_private_flag td.ptype_private;\n            print_typ_expr ~state typ cmt_tbl;\n          ])\n    | Ptype_open ->\n      Doc.concat\n        [\n          Doc.concat [Doc.space; Doc.text equal_sign; Doc.space];\n          print_private_flag td.ptype_private;\n          Doc.text \"..\";\n        ]\n    | Ptype_record lds ->\n      let manifest =\n        match td.ptype_manifest with\n        | None -> Doc.nil\n        | Some typ ->\n          Doc.concat\n            [\n              Doc.concat [Doc.space; Doc.text equal_sign; Doc.space];\n              print_typ_expr ~state typ cmt_tbl;\n            ]\n      in\n      Doc.concat\n        [\n          manifest;\n          Doc.concat [Doc.space; Doc.text equal_sign; Doc.space];\n          print_private_flag td.ptype_private;\n          print_record_declaration ~state lds cmt_tbl;\n        ]\n    | Ptype_variant cds ->\n      let manifest =\n        match td.ptype_manifest with\n        | None -> Doc.nil\n        | Some typ ->\n          Doc.concat\n            [\n              Doc.concat [Doc.space; Doc.text equal_sign; Doc.space];\n              print_typ_expr ~state typ cmt_tbl;\n            ]\n      in\n      Doc.concat\n        [\n          manifest;\n          Doc.concat [Doc.space; Doc.text equal_sign];\n          print_constructor_declarations ~state ~private_flag:td.ptype_private\n            cds cmt_tbl;\n        ]\n  in\n  let constraints = print_type_definition_constraints ~state td.ptype_cstrs in\n  Doc.group\n    (Doc.concat\n       [attrs; prefix; type_name; type_params; manifest_and_kind; constraints])\n\nand print_type_declaration2 ~state ~rec_flag (td : Parsetree.type_declaration)\n    cmt_tbl i =\n  let name =\n    let doc = print_ident_like td.Parsetree.ptype_name.txt in\n    print_comments doc cmt_tbl td.ptype_name.loc\n  in\n  let equal_sign = \"=\" in\n  let attrs =\n    print_attributes ~state ~loc:td.ptype_loc td.ptype_attributes cmt_tbl\n  in\n  let prefix =\n    if i > 0 then Doc.text \"and \" else Doc.concat [Doc.text \"type \"; rec_flag]\n  in\n  let type_name = name in\n  let type_params = print_type_params ~state td.ptype_params cmt_tbl in\n  let manifest_and_kind =\n    match td.ptype_kind with\n    | Ptype_abstract -> (\n      match td.ptype_manifest with\n      | None -> Doc.nil\n      | Some typ ->\n        Doc.concat\n          [\n            Doc.concat [Doc.space; Doc.text equal_sign; Doc.space];\n            print_private_flag td.ptype_private;\n            print_typ_expr ~state typ cmt_tbl;\n          ])\n    | Ptype_open ->\n      Doc.concat\n        [\n          Doc.concat [Doc.space; Doc.text equal_sign; Doc.space];\n          print_private_flag td.ptype_private;\n          Doc.text \"..\";\n        ]\n    | Ptype_record lds ->\n      if lds = [] then\n        Doc.concat\n          [\n            Doc.space;\n            Doc.text equal_sign;\n            Doc.space;\n            Doc.lbrace;\n            print_comments_inside cmt_tbl td.ptype_loc;\n            Doc.rbrace;\n          ]\n      else\n        let manifest =\n          match td.ptype_manifest with\n          | None -> Doc.nil\n          | Some typ ->\n            Doc.concat\n              [\n                Doc.concat [Doc.space; Doc.text equal_sign; Doc.space];\n                print_typ_expr ~state typ cmt_tbl;\n              ]\n        in\n        Doc.concat\n          [\n            manifest;\n            Doc.concat [Doc.space; Doc.text equal_sign; Doc.space];\n            print_private_flag td.ptype_private;\n            print_record_declaration ~state lds cmt_tbl;\n          ]\n    | Ptype_variant cds ->\n      let manifest =\n        match td.ptype_manifest with\n        | None -> Doc.nil\n        | Some typ ->\n          Doc.concat\n            [\n              Doc.concat [Doc.space; Doc.text equal_sign; Doc.space];\n              print_typ_expr ~state typ cmt_tbl;\n            ]\n      in\n      Doc.concat\n        [\n          manifest;\n          Doc.concat [Doc.space; Doc.text equal_sign];\n          print_constructor_declarations ~state ~private_flag:td.ptype_private\n            cds cmt_tbl;\n        ]\n  in\n  let constraints = print_type_definition_constraints ~state td.ptype_cstrs in\n  Doc.group\n    (Doc.concat\n       [attrs; prefix; type_name; type_params; manifest_and_kind; constraints])\n\nand print_type_definition_constraints ~state cstrs =\n  match cstrs with\n  | [] -> Doc.nil\n  | cstrs ->\n    Doc.indent\n      (Doc.group\n         (Doc.concat\n            [\n              Doc.line;\n              Doc.group\n                (Doc.join ~sep:Doc.line\n                   (List.map (print_type_definition_constraint ~state) cstrs));\n            ]))\n\nand print_type_definition_constraint ~state\n    ((typ1, typ2, _loc) :\n      Parsetree.core_type * Parsetree.core_type * Location.t) =\n  Doc.concat\n    [\n      Doc.text \"constraint \";\n      print_typ_expr ~state typ1 CommentTable.empty;\n      Doc.text \" = \";\n      print_typ_expr ~state typ2 CommentTable.empty;\n    ]\n\nand print_private_flag (flag : Asttypes.private_flag) =\n  match flag with\n  | Private -> Doc.text \"private \"\n  | Public -> Doc.nil\n\nand print_type_params ~state type_params cmt_tbl =\n  match type_params with\n  | [] -> Doc.nil\n  | type_params ->\n    Doc.group\n      (Doc.concat\n         [\n           Doc.less_than;\n           Doc.indent\n             (Doc.concat\n                [\n                  Doc.soft_line;\n                  Doc.join\n                    ~sep:(Doc.concat [Doc.comma; Doc.line])\n                    (List.map\n                       (fun type_param ->\n                         let doc = print_type_param ~state type_param cmt_tbl in\n                         print_comments doc cmt_tbl\n                           (fst type_param).Parsetree.ptyp_loc)\n                       type_params);\n                ]);\n           Doc.trailing_comma;\n           Doc.soft_line;\n           Doc.greater_than;\n         ])\n\nand print_type_param ~state (param : Parsetree.core_type * Asttypes.variance)\n    cmt_tbl =\n  let typ, variance = param in\n  let printed_variance =\n    match variance with\n    | Covariant -> Doc.text \"+\"\n    | Contravariant -> Doc.text \"-\"\n    | Invariant -> Doc.nil\n  in\n  Doc.concat [printed_variance; print_typ_expr ~state typ cmt_tbl]\n\nand print_record_declaration ~state (lds : Parsetree.label_declaration list)\n    cmt_tbl =\n  let force_break =\n    match (lds, List.rev lds) with\n    | first :: _, last :: _ ->\n      first.pld_loc.loc_start.pos_lnum < last.pld_loc.loc_end.pos_lnum\n    | _ -> false\n  in\n  Doc.breakable_group ~force_break\n    (Doc.concat\n       [\n         Doc.lbrace;\n         Doc.indent\n           (Doc.concat\n              [\n                Doc.soft_line;\n                Doc.join\n                  ~sep:(Doc.concat [Doc.comma; Doc.line])\n                  (List.map\n                     (fun ld ->\n                       let doc = print_label_declaration ~state ld cmt_tbl in\n                       print_comments doc cmt_tbl ld.Parsetree.pld_loc)\n                     lds);\n              ]);\n         Doc.trailing_comma;\n         Doc.soft_line;\n         Doc.rbrace;\n       ])\n\nand print_constructor_declarations ~state ~private_flag\n    (cds : Parsetree.constructor_declaration list) cmt_tbl =\n  let force_break =\n    match (cds, List.rev cds) with\n    | first :: _, last :: _ ->\n      first.pcd_loc.loc_start.pos_lnum < last.pcd_loc.loc_end.pos_lnum\n    | _ -> false\n  in\n  let private_flag =\n    match private_flag with\n    | Asttypes.Private -> Doc.concat [Doc.text \"private\"; Doc.line]\n    | Public -> Doc.nil\n  in\n  let rows =\n    print_listi\n      ~get_loc:(fun cd -> cd.Parsetree.pcd_loc)\n      ~nodes:cds\n      ~print:(fun cd cmt_tbl i ->\n        let doc = print_constructor_declaration2 ~state i cd cmt_tbl in\n        print_comments doc cmt_tbl cd.Parsetree.pcd_loc)\n      ~force_break cmt_tbl\n  in\n  Doc.breakable_group ~force_break\n    (Doc.indent (Doc.concat [Doc.line; private_flag; rows]))\n\nand print_constructor_declaration2 ~state i\n    (cd : Parsetree.constructor_declaration) cmt_tbl =\n  let attrs = print_attributes ~state cd.pcd_attributes cmt_tbl in\n  let is_dot_dot_dot = cd.pcd_name.txt = \"...\" in\n  let bar =\n    if i > 0 || cd.pcd_attributes <> [] || is_dot_dot_dot then Doc.text \"| \"\n    else Doc.if_breaks (Doc.text \"| \") Doc.nil\n  in\n  let constr_name =\n    let doc = Doc.text cd.pcd_name.txt in\n    print_comments doc cmt_tbl cd.pcd_name.loc\n  in\n  let constr_args =\n    print_constructor_arguments ~is_dot_dot_dot ~state ~indent:true cd.pcd_args\n      cmt_tbl\n  in\n  let gadt =\n    match cd.pcd_res with\n    | None -> Doc.nil\n    | Some typ ->\n      Doc.indent (Doc.concat [Doc.text \": \"; print_typ_expr ~state typ cmt_tbl])\n  in\n  Doc.concat\n    [\n      bar;\n      Doc.group\n        (Doc.concat\n           [\n             attrs;\n             (* TODO: fix parsing of attributes, so when can print them above the bar? *)\n             constr_name;\n             constr_args;\n             gadt;\n           ]);\n    ]\n\nand print_constructor_arguments ?(is_dot_dot_dot = false) ~state ~indent\n    (cd_args : Parsetree.constructor_arguments) cmt_tbl =\n  match cd_args with\n  | Pcstr_tuple [] -> Doc.nil\n  | Pcstr_tuple types ->\n    let args =\n      Doc.concat\n        [\n          (if is_dot_dot_dot then Doc.nil else Doc.lparen);\n          Doc.indent\n            (Doc.concat\n               [\n                 Doc.soft_line;\n                 Doc.join\n                   ~sep:(Doc.concat [Doc.comma; Doc.line])\n                   (List.map\n                      (fun typexpr -> print_typ_expr ~state typexpr cmt_tbl)\n                      types);\n               ]);\n          Doc.trailing_comma;\n          Doc.soft_line;\n          (if is_dot_dot_dot then Doc.nil else Doc.rparen);\n        ]\n    in\n    Doc.group (if indent then Doc.indent args else args)\n  | Pcstr_record lds ->\n    let args =\n      Doc.concat\n        [\n          Doc.lparen;\n          (* manually inline the printRecordDeclaration, gives better layout *)\n          Doc.lbrace;\n          Doc.indent\n            (Doc.concat\n               [\n                 Doc.soft_line;\n                 Doc.join\n                   ~sep:(Doc.concat [Doc.comma; Doc.line])\n                   (List.map\n                      (fun ld ->\n                        let doc = print_label_declaration ~state ld cmt_tbl in\n                        print_comments doc cmt_tbl ld.Parsetree.pld_loc)\n                      lds);\n               ]);\n          Doc.trailing_comma;\n          Doc.soft_line;\n          Doc.rbrace;\n          Doc.rparen;\n        ]\n    in\n    if indent then Doc.indent args else args\n\nand print_label_declaration ~state (ld : Parsetree.label_declaration) cmt_tbl =\n  let attrs =\n    print_attributes ~state ~loc:ld.pld_name.loc ld.pld_attributes cmt_tbl\n  in\n  let mutable_flag =\n    match ld.pld_mutable with\n    | Mutable -> Doc.text \"mutable \"\n    | Immutable -> Doc.nil\n  in\n  let name, is_dot =\n    let doc, is_dot =\n      if ld.pld_name.txt = \"...\" then (Doc.text ld.pld_name.txt, true)\n      else (print_ident_like ld.pld_name.txt, false)\n    in\n    (print_comments doc cmt_tbl ld.pld_name.loc, is_dot)\n  in\n  let optional = print_optional_label ld.pld_attributes in\n  Doc.group\n    (Doc.concat\n       [\n         attrs;\n         mutable_flag;\n         name;\n         optional;\n         (if is_dot then Doc.nil else Doc.text \": \");\n         print_typ_expr ~state ld.pld_type cmt_tbl;\n       ])\n\nand print_typ_expr ~(state : State.t) (typ_expr : Parsetree.core_type) cmt_tbl =\n  let print_arrow ~uncurried ?(arity = max_int) typ_expr =\n    let attrs_before, args, return_type =\n      ParsetreeViewer.arrow_type ~arity typ_expr\n    in\n    let dotted, attrs_before =\n      let dotted =\n        state.uncurried_config |> Res_uncurried.get_dotted ~uncurried\n      in\n      (* Converting .ml code to .res requires processing uncurried attributes *)\n      let has_bs, attrs = ParsetreeViewer.process_bs_attribute attrs_before in\n      (dotted || has_bs, attrs)\n    in\n    let return_type_needs_parens =\n      match return_type.ptyp_desc with\n      | Ptyp_alias _ -> true\n      | _ -> false\n    in\n    let return_doc =\n      let doc = print_typ_expr ~state return_type cmt_tbl in\n      if return_type_needs_parens then Doc.concat [Doc.lparen; doc; Doc.rparen]\n      else doc\n    in\n    match args with\n    | [] -> Doc.nil\n    | [([], Nolabel, n)] when not dotted ->\n      let has_attrs_before = not (attrs_before = []) in\n      let attrs =\n        if has_attrs_before then\n          print_attributes ~state ~inline:true attrs_before cmt_tbl\n        else Doc.nil\n      in\n      let typ_doc =\n        let doc = print_typ_expr ~state n cmt_tbl in\n        match n.ptyp_desc with\n        | Ptyp_arrow _ | Ptyp_tuple _ | Ptyp_alias _ -> add_parens doc\n        | _ when Ast_uncurried.core_type_is_uncurried_fun n -> add_parens doc\n        | _ -> doc\n      in\n      Doc.group\n        (Doc.concat\n           [\n             Doc.group attrs;\n             Doc.group\n               (if has_attrs_before then\n                  Doc.concat\n                    [\n                      Doc.lparen;\n                      Doc.indent\n                        (Doc.concat\n                           [Doc.soft_line; typ_doc; Doc.text \" => \"; return_doc]);\n                      Doc.soft_line;\n                      Doc.rparen;\n                    ]\n                else Doc.concat [typ_doc; Doc.text \" => \"; return_doc]);\n           ])\n    | args ->\n      let attrs = print_attributes ~state ~inline:true attrs_before cmt_tbl in\n      let rendered_args =\n        Doc.concat\n          [\n            attrs;\n            Doc.text \"(\";\n            Doc.indent\n              (Doc.concat\n                 [\n                   Doc.soft_line;\n                   (if dotted then Doc.concat [Doc.dot; Doc.space] else Doc.nil);\n                   Doc.join\n                     ~sep:(Doc.concat [Doc.comma; Doc.line])\n                     (List.map\n                        (fun tp -> print_type_parameter ~state tp cmt_tbl)\n                        args);\n                 ]);\n            Doc.trailing_comma;\n            Doc.soft_line;\n            Doc.text \")\";\n          ]\n      in\n      Doc.group (Doc.concat [rendered_args; Doc.text \" => \"; return_doc])\n  in\n  let rendered_type =\n    match typ_expr.ptyp_desc with\n    | Ptyp_any -> Doc.text \"_\"\n    | Ptyp_var var ->\n      Doc.concat [Doc.text \"'\"; print_ident_like ~allow_uident:true var]\n    | Ptyp_extension extension ->\n      print_extension ~state ~at_module_lvl:false extension cmt_tbl\n    | Ptyp_alias (typ, alias) ->\n      let typ =\n        (* Technically type t = (string, float) => unit as 'x, doesn't require\n         * parens around the arrow expression. This is very confusing though.\n         * Is the \"as\" part of \"unit\" or \"(string, float) => unit\". By printing\n         * parens we guide the user towards its meaning.*)\n        let needs_parens =\n          match typ.ptyp_desc with\n          | Ptyp_arrow _ -> true\n          | _ when Ast_uncurried.core_type_is_uncurried_fun typ -> true\n          | _ -> false\n        in\n        let doc = print_typ_expr ~state typ cmt_tbl in\n        if needs_parens then Doc.concat [Doc.lparen; doc; Doc.rparen] else doc\n      in\n      Doc.concat\n        [\n          typ; Doc.text \" as \"; Doc.concat [Doc.text \"'\"; print_ident_like alias];\n        ]\n    (* object printings *)\n    | Ptyp_object (fields, open_flag) ->\n      print_object ~state ~inline:false fields open_flag cmt_tbl\n    | Ptyp_arrow _ -> print_arrow ~uncurried:false typ_expr\n    | Ptyp_constr _ when Ast_uncurried.core_type_is_uncurried_fun typ_expr ->\n      let arity, t_arg =\n        Ast_uncurried.core_type_extract_uncurried_fun typ_expr\n      in\n      print_arrow ~uncurried:true ~arity t_arg\n    | Ptyp_constr\n        (longident_loc, [{ptyp_desc = Ptyp_object (fields, open_flag)}]) ->\n      (* for foo<{\"a\": b}>, when the object is long and needs a line break, we\n         want the <{ and }> to stay hugged together *)\n      let constr_name = print_lident_path longident_loc cmt_tbl in\n      Doc.concat\n        [\n          constr_name;\n          Doc.less_than;\n          print_object ~state ~inline:true fields open_flag cmt_tbl;\n          Doc.greater_than;\n        ]\n    | Ptyp_constr (longident_loc, [{ptyp_desc = Parsetree.Ptyp_tuple tuple}]) ->\n      let constr_name = print_lident_path longident_loc cmt_tbl in\n      Doc.group\n        (Doc.concat\n           [\n             constr_name;\n             Doc.less_than;\n             print_tuple_type ~state ~inline:true tuple cmt_tbl;\n             Doc.greater_than;\n           ])\n    | Ptyp_constr (longident_loc, constr_args) -> (\n      let constr_name = print_lident_path longident_loc cmt_tbl in\n      match constr_args with\n      | [] -> constr_name\n      | _args ->\n        Doc.group\n          (Doc.concat\n             [\n               constr_name;\n               Doc.less_than;\n               Doc.indent\n                 (Doc.concat\n                    [\n                      Doc.soft_line;\n                      Doc.join\n                        ~sep:(Doc.concat [Doc.comma; Doc.line])\n                        (List.map\n                           (fun typexpr ->\n                             print_typ_expr ~state typexpr cmt_tbl)\n                           constr_args);\n                    ]);\n               Doc.trailing_comma;\n               Doc.soft_line;\n               Doc.greater_than;\n             ]))\n    | Ptyp_tuple types -> print_tuple_type ~state ~inline:false types cmt_tbl\n    | Ptyp_poly ([], typ) -> print_typ_expr ~state typ cmt_tbl\n    | Ptyp_poly (string_locs, typ) ->\n      Doc.concat\n        [\n          Doc.join ~sep:Doc.space\n            (List.map\n               (fun {Location.txt; loc} ->\n                 let doc = Doc.concat [Doc.text \"'\"; Doc.text txt] in\n                 print_comments doc cmt_tbl loc)\n               string_locs);\n          Doc.dot;\n          Doc.space;\n          print_typ_expr ~state typ cmt_tbl;\n        ]\n    | Ptyp_package package_type ->\n      print_package_type ~state ~print_module_keyword_and_parens:true\n        package_type cmt_tbl\n    | Ptyp_class _ -> Doc.text \"classes are not supported in types\"\n    | Ptyp_variant (row_fields, closed_flag, labels_opt) ->\n      let force_break =\n        typ_expr.ptyp_loc.Location.loc_start.pos_lnum\n        < typ_expr.ptyp_loc.loc_end.pos_lnum\n      in\n      let print_row_field = function\n        | Parsetree.Rtag ({txt; loc}, attrs, true, []) ->\n          let doc =\n            Doc.group\n              (Doc.concat\n                 [\n                   print_attributes ~state attrs cmt_tbl;\n                   Doc.concat [Doc.text \"#\"; print_poly_var_ident txt];\n                 ])\n          in\n          print_comments doc cmt_tbl loc\n        | Rtag ({txt}, attrs, truth, types) ->\n          let do_type t =\n            match t.Parsetree.ptyp_desc with\n            | Ptyp_tuple _ -> print_typ_expr ~state t cmt_tbl\n            | _ ->\n              Doc.concat\n                [Doc.lparen; print_typ_expr ~state t cmt_tbl; Doc.rparen]\n          in\n          let printed_types = List.map do_type types in\n          let cases =\n            Doc.join ~sep:(Doc.concat [Doc.line; Doc.text \"& \"]) printed_types\n          in\n          let cases =\n            if truth then Doc.concat [Doc.line; Doc.text \"& \"; cases] else cases\n          in\n          Doc.group\n            (Doc.concat\n               [\n                 print_attributes ~state attrs cmt_tbl;\n                 Doc.concat [Doc.text \"#\"; print_poly_var_ident txt];\n                 cases;\n               ])\n        | Rinherit core_type -> print_typ_expr ~state core_type cmt_tbl\n      in\n      let docs = List.map print_row_field row_fields in\n      let cases = Doc.join ~sep:(Doc.concat [Doc.line; Doc.text \"| \"]) docs in\n      let cases =\n        if docs = [] then cases\n        else Doc.concat [Doc.if_breaks (Doc.text \"| \") Doc.nil; cases]\n      in\n      let opening_symbol =\n        if closed_flag = Open then Doc.concat [Doc.greater_than; Doc.line]\n        else if labels_opt = None then Doc.soft_line\n        else Doc.concat [Doc.less_than; Doc.line]\n      in\n      let labels =\n        match labels_opt with\n        | None | Some [] -> Doc.nil\n        | Some labels ->\n          Doc.concat\n            (List.map\n               (fun label ->\n                 Doc.concat [Doc.line; Doc.text \"#\"; print_poly_var_ident label])\n               labels)\n      in\n      let closing_symbol =\n        match labels_opt with\n        | None | Some [] -> Doc.nil\n        | _ -> Doc.text \" >\"\n      in\n      Doc.breakable_group ~force_break\n        (Doc.concat\n           [\n             Doc.lbracket;\n             Doc.indent\n               (Doc.concat [opening_symbol; cases; closing_symbol; labels]);\n             Doc.soft_line;\n             Doc.rbracket;\n           ])\n  in\n  let should_print_its_own_attributes =\n    match typ_expr.ptyp_desc with\n    | Ptyp_arrow _ (* es6 arrow types print their own attributes *) -> true\n    | _ -> false\n  in\n  let doc =\n    match typ_expr.ptyp_attributes with\n    | _ :: _ as attrs when not should_print_its_own_attributes ->\n      Doc.group\n        (Doc.concat [print_attributes ~state attrs cmt_tbl; rendered_type])\n    | _ -> rendered_type\n  in\n  print_comments doc cmt_tbl typ_expr.ptyp_loc\n\nand print_object ~state ~inline fields open_flag cmt_tbl =\n  let doc =\n    match fields with\n    | [] ->\n      Doc.concat\n        [\n          Doc.lbrace;\n          (match open_flag with\n          | Asttypes.Closed -> Doc.dot\n          | Open -> Doc.dotdot);\n          Doc.rbrace;\n        ]\n    | fields ->\n      Doc.concat\n        [\n          Doc.lbrace;\n          (match open_flag with\n          | Asttypes.Closed -> Doc.nil\n          | Open -> (\n            match fields with\n            (* handle `type t = {.. ...objType, \"x\": int}`\n             * .. and ... should have a space in between *)\n            | Oinherit _ :: _ -> Doc.text \".. \"\n            | _ -> Doc.dotdot));\n          Doc.indent\n            (Doc.concat\n               [\n                 Doc.soft_line;\n                 Doc.join\n                   ~sep:(Doc.concat [Doc.comma; Doc.line])\n                   (List.map\n                      (fun field -> print_object_field ~state field cmt_tbl)\n                      fields);\n               ]);\n          Doc.trailing_comma;\n          Doc.soft_line;\n          Doc.rbrace;\n        ]\n  in\n  if inline then doc else Doc.group doc\n\nand print_tuple_type ~state ~inline (types : Parsetree.core_type list) cmt_tbl =\n  let tuple =\n    Doc.concat\n      [\n        Doc.lparen;\n        Doc.indent\n          (Doc.concat\n             [\n               Doc.soft_line;\n               Doc.join\n                 ~sep:(Doc.concat [Doc.comma; Doc.line])\n                 (List.map\n                    (fun typexpr -> print_typ_expr ~state typexpr cmt_tbl)\n                    types);\n             ]);\n        Doc.trailing_comma;\n        Doc.soft_line;\n        Doc.rparen;\n      ]\n  in\n  if inline == false then Doc.group tuple else tuple\n\nand print_object_field ~state (field : Parsetree.object_field) cmt_tbl =\n  match field with\n  | Otag (label_loc, attrs, typ) ->\n    let lbl =\n      let doc = Doc.text (\"\\\"\" ^ label_loc.txt ^ \"\\\"\") in\n      print_comments doc cmt_tbl label_loc.loc\n    in\n    let doc =\n      Doc.concat\n        [\n          print_attributes ~state ~loc:label_loc.loc attrs cmt_tbl;\n          lbl;\n          Doc.text \": \";\n          print_typ_expr ~state typ cmt_tbl;\n        ]\n    in\n    let cmt_loc = {label_loc.loc with loc_end = typ.ptyp_loc.loc_end} in\n    print_comments doc cmt_tbl cmt_loc\n  | Oinherit typexpr ->\n    Doc.concat [Doc.dotdotdot; print_typ_expr ~state typexpr cmt_tbl]\n\n(* es6 arrow type arg\n * type t = (~foo: string, ~bar: float=?, unit) => unit\n * i.e. ~foo: string, ~bar: float *)\nand print_type_parameter ~state (attrs, lbl, typ) cmt_tbl =\n  (* Converting .ml code to .res requires processing uncurried attributes *)\n  let has_bs, attrs = ParsetreeViewer.process_bs_attribute attrs in\n  let dotted = if has_bs then Doc.concat [Doc.dot; Doc.space] else Doc.nil in\n  let attrs = print_attributes ~state attrs cmt_tbl in\n  let label =\n    match lbl with\n    | Asttypes.Nolabel -> Doc.nil\n    | Labelled lbl ->\n      Doc.concat [Doc.text \"~\"; print_ident_like lbl; Doc.text \": \"]\n    | Optional lbl ->\n      Doc.concat [Doc.text \"~\"; print_ident_like lbl; Doc.text \": \"]\n  in\n  let optional_indicator =\n    match lbl with\n    | Asttypes.Nolabel | Labelled _ -> Doc.nil\n    | Optional _lbl -> Doc.text \"=?\"\n  in\n  let loc, typ =\n    match typ.ptyp_attributes with\n    | ({Location.txt = \"res.namedArgLoc\"; loc}, _) :: attrs ->\n      ( {loc with loc_end = typ.ptyp_loc.loc_end},\n        {typ with ptyp_attributes = attrs} )\n    | _ -> (typ.ptyp_loc, typ)\n  in\n  let doc =\n    Doc.group\n      (Doc.concat\n         [\n           dotted;\n           attrs;\n           label;\n           print_typ_expr ~state typ cmt_tbl;\n           optional_indicator;\n         ])\n  in\n  print_comments doc cmt_tbl loc\n\nand print_value_binding ~state ~rec_flag (vb : Parsetree.value_binding) cmt_tbl\n    i =\n  let attrs =\n    print_attributes ~state ~loc:vb.pvb_pat.ppat_loc vb.pvb_attributes cmt_tbl\n  in\n  let header =\n    if i == 0 then Doc.concat [Doc.text \"let \"; rec_flag] else Doc.text \"and \"\n  in\n  match vb with\n  | {\n   pvb_pat =\n     {\n       ppat_desc =\n         Ppat_constraint (pattern, ({ptyp_desc = Ptyp_poly _} as pat_typ));\n     };\n   pvb_expr = {pexp_desc = Pexp_newtype _} as expr;\n  } -> (\n    let _uncurried, _attrs, parameters, return_expr =\n      ParsetreeViewer.fun_expr expr\n    in\n    let abstract_type =\n      match parameters with\n      | [NewTypes {locs = vars}] ->\n        Doc.concat\n          [\n            Doc.text \"type \";\n            Doc.join ~sep:Doc.space\n              (List.map (fun var -> Doc.text var.Asttypes.txt) vars);\n            Doc.dot;\n          ]\n      | _ -> Doc.nil\n    in\n    match return_expr.pexp_desc with\n    | Pexp_constraint (expr, typ) ->\n      Doc.group\n        (Doc.concat\n           [\n             attrs;\n             header;\n             print_pattern ~state pattern cmt_tbl;\n             Doc.text \":\";\n             Doc.indent\n               (Doc.concat\n                  [\n                    Doc.line;\n                    abstract_type;\n                    Doc.space;\n                    print_typ_expr ~state typ cmt_tbl;\n                    Doc.text \" =\";\n                    Doc.concat\n                      [\n                        Doc.line;\n                        print_expression_with_comments ~state expr cmt_tbl;\n                      ];\n                  ]);\n           ])\n    | _ ->\n      (* Example:\n       * let cancel_and_collect_callbacks:\n       *   'a 'u 'c. (list<packed_callbacks>, promise<'a, 'u, 'c>) => list<packed_callbacks> =         *  (type x, callbacks_accumulator, p: promise<_, _, c>)\n       *)\n      Doc.group\n        (Doc.concat\n           [\n             attrs;\n             header;\n             print_pattern ~state pattern cmt_tbl;\n             Doc.text \":\";\n             Doc.indent\n               (Doc.concat\n                  [\n                    Doc.line;\n                    abstract_type;\n                    Doc.space;\n                    print_typ_expr ~state pat_typ cmt_tbl;\n                    Doc.text \" =\";\n                    Doc.concat\n                      [\n                        Doc.line;\n                        print_expression_with_comments ~state expr cmt_tbl;\n                      ];\n                  ]);\n           ]))\n  | _ ->\n    let opt_braces, expr = ParsetreeViewer.process_braces_attr vb.pvb_expr in\n    let printed_expr =\n      let doc = print_expression_with_comments ~state vb.pvb_expr cmt_tbl in\n      match Parens.expr vb.pvb_expr with\n      | Parens.Parenthesized -> add_parens doc\n      | Braced braces -> print_braces doc expr braces\n      | Nothing -> doc\n    in\n    let pattern_doc = print_pattern ~state vb.pvb_pat cmt_tbl in\n    (*\n     * we want to optimize the layout of one pipe:\n     *   let tbl = data->Js.Array2.reduce((map, curr) => {\n     *     ...\n     *   })\n     * important is that we don't do this for multiple pipes:\n     *   let decoratorTags =\n     *     items\n     *     ->Js.Array2.filter(items => {items.category === Decorators})\n     *     ->Belt.Array.map(...)\n     * Multiple pipes chained together lend themselves more towards the last layout.\n     *)\n    if ParsetreeViewer.is_single_pipe_expr vb.pvb_expr then\n      Doc.custom_layout\n        [\n          Doc.group\n            (Doc.concat\n               [\n                 attrs;\n                 header;\n                 pattern_doc;\n                 Doc.text \" =\";\n                 Doc.space;\n                 printed_expr;\n               ]);\n          Doc.group\n            (Doc.concat\n               [\n                 attrs;\n                 header;\n                 pattern_doc;\n                 Doc.text \" =\";\n                 Doc.indent (Doc.concat [Doc.line; printed_expr]);\n               ]);\n        ]\n    else\n      let should_indent =\n        match opt_braces with\n        | Some _ -> false\n        | _ -> (\n          ParsetreeViewer.is_binary_expression expr\n          ||\n          match vb.pvb_expr with\n          | {\n           pexp_attributes = [({Location.txt = \"res.ternary\"}, _)];\n           pexp_desc = Pexp_ifthenelse (if_expr, _, _);\n          } ->\n            ParsetreeViewer.is_binary_expression if_expr\n            || ParsetreeViewer.has_attributes if_expr.pexp_attributes\n          | {pexp_desc = Pexp_newtype _} -> false\n          | {pexp_attributes = [({Location.txt = \"res.taggedTemplate\"}, _)]} ->\n            false\n          | e ->\n            ParsetreeViewer.has_attributes e.pexp_attributes\n            || ParsetreeViewer.is_array_access e)\n      in\n      Doc.group\n        (Doc.concat\n           [\n             attrs;\n             header;\n             pattern_doc;\n             Doc.text \" =\";\n             (if should_indent then\n                Doc.indent (Doc.concat [Doc.line; printed_expr])\n              else Doc.concat [Doc.space; printed_expr]);\n           ])\n\nand print_package_type ~state ~print_module_keyword_and_parens\n    (package_type : Parsetree.package_type) cmt_tbl =\n  let doc =\n    match package_type with\n    | longident_loc, [] ->\n      Doc.group (Doc.concat [print_longident_location longident_loc cmt_tbl])\n    | longident_loc, package_constraints ->\n      Doc.group\n        (Doc.concat\n           [\n             print_longident_location longident_loc cmt_tbl;\n             print_package_constraints ~state package_constraints cmt_tbl;\n             Doc.soft_line;\n           ])\n  in\n  if print_module_keyword_and_parens then\n    Doc.concat [Doc.text \"module(\"; doc; Doc.rparen]\n  else doc\n\nand print_package_constraints ~state package_constraints cmt_tbl =\n  Doc.concat\n    [\n      Doc.text \" with\";\n      Doc.indent\n        (Doc.concat\n           [\n             Doc.line;\n             Doc.join ~sep:Doc.line\n               (List.mapi\n                  (fun i pc ->\n                    let longident, typexpr = pc in\n                    let cmt_loc =\n                      {\n                        longident.Asttypes.loc with\n                        loc_end = typexpr.Parsetree.ptyp_loc.loc_end;\n                      }\n                    in\n                    let doc = print_package_constraint ~state i cmt_tbl pc in\n                    print_comments doc cmt_tbl cmt_loc)\n                  package_constraints);\n           ]);\n    ]\n\nand print_package_constraint ~state i cmt_tbl (longident_loc, typ) =\n  let prefix = if i == 0 then Doc.text \"type \" else Doc.text \"and type \" in\n  Doc.concat\n    [\n      prefix;\n      print_longident_location longident_loc cmt_tbl;\n      Doc.text \" = \";\n      print_typ_expr ~state typ cmt_tbl;\n    ]\n\nand print_extension ~state ~at_module_lvl (string_loc, payload) cmt_tbl =\n  let txt = string_loc.Location.txt in\n  let ext_name =\n    let doc =\n      Doc.concat\n        [\n          Doc.text \"%\";\n          (if at_module_lvl then Doc.text \"%\" else Doc.nil);\n          Doc.text txt;\n        ]\n    in\n    print_comments doc cmt_tbl string_loc.Location.loc\n  in\n  Doc.group (Doc.concat [ext_name; print_payload ~state payload cmt_tbl])\n\nand print_pattern ~state (p : Parsetree.pattern) cmt_tbl =\n  let pattern_without_attributes =\n    match p.ppat_desc with\n    | Ppat_any -> Doc.text \"_\"\n    | Ppat_var var -> print_ident_like var.txt\n    | Ppat_constant c ->\n      let template_literal =\n        ParsetreeViewer.has_template_literal_attr p.ppat_attributes\n      in\n      print_constant ~template_literal c\n    | Ppat_tuple patterns ->\n      Doc.group\n        (Doc.concat\n           [\n             Doc.lparen;\n             Doc.indent\n               (Doc.concat\n                  [\n                    Doc.soft_line;\n                    Doc.join\n                      ~sep:(Doc.concat [Doc.text \",\"; Doc.line])\n                      (List.map\n                         (fun pat -> print_pattern ~state pat cmt_tbl)\n                         patterns);\n                  ]);\n             Doc.trailing_comma;\n             Doc.soft_line;\n             Doc.rparen;\n           ])\n    | Ppat_array [] ->\n      Doc.concat\n        [Doc.lbracket; print_comments_inside cmt_tbl p.ppat_loc; Doc.rbracket]\n    | Ppat_array patterns ->\n      Doc.group\n        (Doc.concat\n           [\n             Doc.text \"[\";\n             Doc.indent\n               (Doc.concat\n                  [\n                    Doc.soft_line;\n                    Doc.join\n                      ~sep:(Doc.concat [Doc.text \",\"; Doc.line])\n                      (List.map\n                         (fun pat -> print_pattern ~state pat cmt_tbl)\n                         patterns);\n                  ]);\n             Doc.trailing_comma;\n             Doc.soft_line;\n             Doc.text \"]\";\n           ])\n    | Ppat_construct ({txt = Longident.Lident \"()\"}, _) ->\n      Doc.concat\n        [Doc.lparen; print_comments_inside cmt_tbl p.ppat_loc; Doc.rparen]\n    | Ppat_construct ({txt = Longident.Lident \"[]\"}, _) ->\n      Doc.concat\n        [Doc.text \"list{\"; print_comments_inside cmt_tbl p.ppat_loc; Doc.rbrace]\n    | Ppat_construct ({txt = Longident.Lident \"::\"}, _) ->\n      let patterns, tail =\n        ParsetreeViewer.collect_patterns_from_list_construct [] p\n      in\n      let should_hug =\n        match (patterns, tail) with\n        | [pat], {ppat_desc = Ppat_construct ({txt = Longident.Lident \"[]\"}, _)}\n          when ParsetreeViewer.is_huggable_pattern pat ->\n          true\n        | _ -> false\n      in\n      let children =\n        Doc.concat\n          [\n            (if should_hug then Doc.nil else Doc.soft_line);\n            Doc.join\n              ~sep:(Doc.concat [Doc.text \",\"; Doc.line])\n              (List.map (fun pat -> print_pattern ~state pat cmt_tbl) patterns);\n            (match tail.Parsetree.ppat_desc with\n            | Ppat_construct ({txt = Longident.Lident \"[]\"}, _) -> Doc.nil\n            | _ ->\n              let doc =\n                Doc.concat [Doc.text \"...\"; print_pattern ~state tail cmt_tbl]\n              in\n              let tail = print_comments doc cmt_tbl tail.ppat_loc in\n              Doc.concat [Doc.text \",\"; Doc.line; tail]);\n          ]\n      in\n      Doc.group\n        (Doc.concat\n           [\n             Doc.text \"list{\";\n             (if should_hug then children\n              else\n                Doc.concat\n                  [\n                    Doc.indent children;\n                    Doc.if_breaks (Doc.text \",\") Doc.nil;\n                    Doc.soft_line;\n                  ]);\n             Doc.rbrace;\n           ])\n    | Ppat_construct (constr_name, constructor_args) ->\n      let constr_name = print_longident_location constr_name cmt_tbl in\n      let args_doc =\n        match constructor_args with\n        | None -> Doc.nil\n        | Some\n            {\n              ppat_loc;\n              ppat_desc = Ppat_construct ({txt = Longident.Lident \"()\"}, _);\n            } ->\n          Doc.concat\n            [Doc.lparen; print_comments_inside cmt_tbl ppat_loc; Doc.rparen]\n        | Some {ppat_desc = Ppat_tuple []; ppat_loc = loc} ->\n          Doc.concat [Doc.lparen; print_comments_inside cmt_tbl loc; Doc.rparen]\n        (* Some((1, 2) *)\n        | Some {ppat_desc = Ppat_tuple [({ppat_desc = Ppat_tuple _} as arg)]} ->\n          Doc.concat [Doc.lparen; print_pattern ~state arg cmt_tbl; Doc.rparen]\n        | Some {ppat_desc = Ppat_tuple patterns} ->\n          Doc.concat\n            [\n              Doc.lparen;\n              Doc.indent\n                (Doc.concat\n                   [\n                     Doc.soft_line;\n                     Doc.join\n                       ~sep:(Doc.concat [Doc.comma; Doc.line])\n                       (List.map\n                          (fun pat -> print_pattern ~state pat cmt_tbl)\n                          patterns);\n                   ]);\n              Doc.trailing_comma;\n              Doc.soft_line;\n              Doc.rparen;\n            ]\n        | Some arg ->\n          let arg_doc = print_pattern ~state arg cmt_tbl in\n          let should_hug = ParsetreeViewer.is_huggable_pattern arg in\n          Doc.concat\n            [\n              Doc.lparen;\n              (if should_hug then arg_doc\n               else\n                 Doc.concat\n                   [\n                     Doc.indent (Doc.concat [Doc.soft_line; arg_doc]);\n                     Doc.trailing_comma;\n                     Doc.soft_line;\n                   ]);\n              Doc.rparen;\n            ]\n      in\n      Doc.group (Doc.concat [constr_name; args_doc])\n    | Ppat_variant (label, None) ->\n      Doc.concat [Doc.text \"#\"; print_poly_var_ident label]\n    | Ppat_variant (label, variant_args) ->\n      let variant_name =\n        Doc.concat [Doc.text \"#\"; print_poly_var_ident label]\n      in\n      let args_doc =\n        match variant_args with\n        | None -> Doc.nil\n        | Some {ppat_desc = Ppat_construct ({txt = Longident.Lident \"()\"}, _)}\n          ->\n          Doc.text \"()\"\n        | Some {ppat_desc = Ppat_tuple []; ppat_loc = loc} ->\n          Doc.concat [Doc.lparen; print_comments_inside cmt_tbl loc; Doc.rparen]\n        (* Some((1, 2) *)\n        | Some {ppat_desc = Ppat_tuple [({ppat_desc = Ppat_tuple _} as arg)]} ->\n          Doc.concat [Doc.lparen; print_pattern ~state arg cmt_tbl; Doc.rparen]\n        | Some {ppat_desc = Ppat_tuple patterns} ->\n          Doc.concat\n            [\n              Doc.lparen;\n              Doc.indent\n                (Doc.concat\n                   [\n                     Doc.soft_line;\n                     Doc.join\n                       ~sep:(Doc.concat [Doc.comma; Doc.line])\n                       (List.map\n                          (fun pat -> print_pattern ~state pat cmt_tbl)\n                          patterns);\n                   ]);\n              Doc.trailing_comma;\n              Doc.soft_line;\n              Doc.rparen;\n            ]\n        | Some arg ->\n          let arg_doc = print_pattern ~state arg cmt_tbl in\n          let should_hug = ParsetreeViewer.is_huggable_pattern arg in\n          Doc.concat\n            [\n              Doc.lparen;\n              (if should_hug then arg_doc\n               else\n                 Doc.concat\n                   [\n                     Doc.indent (Doc.concat [Doc.soft_line; arg_doc]);\n                     Doc.trailing_comma;\n                     Doc.soft_line;\n                   ]);\n              Doc.rparen;\n            ]\n      in\n      Doc.group (Doc.concat [variant_name; args_doc])\n    | Ppat_type ident ->\n      Doc.concat [Doc.text \"#...\"; print_ident_path ident cmt_tbl]\n    | Ppat_record (rows, open_flag) ->\n      Doc.group\n        (Doc.concat\n           [\n             Doc.lbrace;\n             Doc.indent\n               (Doc.concat\n                  [\n                    Doc.soft_line;\n                    Doc.join\n                      ~sep:(Doc.concat [Doc.text \",\"; Doc.line])\n                      (List.map\n                         (fun row ->\n                           print_pattern_record_row ~state row cmt_tbl)\n                         rows);\n                    (match open_flag with\n                    | Open -> Doc.concat [Doc.text \",\"; Doc.line; Doc.text \"_\"]\n                    | Closed -> Doc.nil);\n                  ]);\n             Doc.if_breaks (Doc.text \",\") Doc.nil;\n             Doc.soft_line;\n             Doc.rbrace;\n           ])\n    | Ppat_exception p ->\n      let needs_parens =\n        match p.ppat_desc with\n        | Ppat_or (_, _) | Ppat_alias (_, _) -> true\n        | _ -> false\n      in\n      let pat =\n        let p = print_pattern ~state p cmt_tbl in\n        if needs_parens then Doc.concat [Doc.text \"(\"; p; Doc.text \")\"] else p\n      in\n      Doc.group (Doc.concat [Doc.text \"exception\"; Doc.line; pat])\n    | Ppat_or _ ->\n      (* Blue | Red | Green -> [Blue; Red; Green] *)\n      let or_chain = ParsetreeViewer.collect_or_pattern_chain p in\n      let docs =\n        List.mapi\n          (fun i pat ->\n            let pattern_doc = print_pattern ~state pat cmt_tbl in\n            Doc.concat\n              [\n                (if i == 0 then Doc.nil else Doc.concat [Doc.line; Doc.text \"| \"]);\n                (match pat.ppat_desc with\n                (* (Blue | Red) | (Green | Black) | White *)\n                | Ppat_or _ -> add_parens pattern_doc\n                | _ -> pattern_doc);\n              ])\n          or_chain\n      in\n      let is_spread_over_multiple_lines =\n        match (or_chain, List.rev or_chain) with\n        | first :: _, last :: _ ->\n          first.ppat_loc.loc_start.pos_lnum < last.ppat_loc.loc_end.pos_lnum\n        | _ -> false\n      in\n      Doc.breakable_group ~force_break:is_spread_over_multiple_lines\n        (Doc.concat docs)\n    | Ppat_extension ext ->\n      print_extension ~state ~at_module_lvl:false ext cmt_tbl\n    | Ppat_lazy p ->\n      let needs_parens =\n        match p.ppat_desc with\n        | Ppat_or (_, _) | Ppat_alias (_, _) -> true\n        | _ -> false\n      in\n      let pat =\n        let p = print_pattern ~state p cmt_tbl in\n        if needs_parens then Doc.concat [Doc.text \"(\"; p; Doc.text \")\"] else p\n      in\n      Doc.concat [Doc.text \"lazy \"; pat]\n    | Ppat_alias (p, alias_loc) ->\n      let needs_parens =\n        match p.ppat_desc with\n        | Ppat_or (_, _) | Ppat_alias (_, _) -> true\n        | _ -> false\n      in\n      let rendered_pattern =\n        let p = print_pattern ~state p cmt_tbl in\n        if needs_parens then Doc.concat [Doc.text \"(\"; p; Doc.text \")\"] else p\n      in\n      Doc.concat\n        [rendered_pattern; Doc.text \" as \"; print_string_loc alias_loc cmt_tbl]\n    (* Note: module(P : S) is represented as *)\n    (* Ppat_constraint(Ppat_unpack, Ptyp_package) *)\n    | Ppat_constraint\n        ( {ppat_desc = Ppat_unpack string_loc},\n          {ptyp_desc = Ptyp_package package_type; ptyp_loc} ) ->\n      Doc.concat\n        [\n          Doc.text \"module(\";\n          print_comments (Doc.text string_loc.txt) cmt_tbl string_loc.loc;\n          Doc.text \": \";\n          print_comments\n            (print_package_type ~state ~print_module_keyword_and_parens:false\n               package_type cmt_tbl)\n            cmt_tbl ptyp_loc;\n          Doc.rparen;\n        ]\n    | Ppat_constraint (pattern, typ) ->\n      Doc.concat\n        [\n          print_pattern ~state pattern cmt_tbl;\n          Doc.text \": \";\n          print_typ_expr ~state typ cmt_tbl;\n        ]\n    (* Note: module(P : S) is represented as *)\n    (* Ppat_constraint(Ppat_unpack, Ptyp_package) *)\n    | Ppat_unpack string_loc ->\n      Doc.concat\n        [\n          Doc.text \"module(\";\n          print_comments (Doc.text string_loc.txt) cmt_tbl string_loc.loc;\n          Doc.rparen;\n        ]\n    | Ppat_interval (a, b) ->\n      Doc.concat [print_constant a; Doc.text \" .. \"; print_constant b]\n    | Ppat_open _ -> Doc.nil\n  in\n  let doc =\n    match p.ppat_attributes with\n    | [] -> pattern_without_attributes\n    | attrs ->\n      Doc.group\n        (Doc.concat\n           [print_attributes ~state attrs cmt_tbl; pattern_without_attributes])\n  in\n  print_comments doc cmt_tbl p.ppat_loc\n\nand print_pattern_record_row ~state row cmt_tbl =\n  match row with\n  (* punned {x}*)\n  | ( ({Location.txt = Longident.Lident ident} as longident),\n      {Parsetree.ppat_desc = Ppat_var {txt; _}; ppat_attributes} )\n    when ident = txt ->\n    Doc.concat\n      [\n        print_optional_label ppat_attributes;\n        print_attributes ~state ppat_attributes cmt_tbl;\n        print_lident_path longident cmt_tbl;\n      ]\n  | longident, pattern ->\n    let loc_for_comments =\n      {longident.loc with loc_end = pattern.Parsetree.ppat_loc.loc_end}\n    in\n    let rhs_doc =\n      let doc = print_pattern ~state pattern cmt_tbl in\n      let doc =\n        if Parens.pattern_record_row_rhs pattern then add_parens doc else doc\n      in\n      Doc.concat [print_optional_label pattern.ppat_attributes; doc]\n    in\n    let doc =\n      Doc.group\n        (Doc.concat\n           [\n             print_lident_path longident cmt_tbl;\n             Doc.text \":\";\n             (if ParsetreeViewer.is_huggable_pattern pattern then\n                Doc.concat [Doc.space; rhs_doc]\n              else Doc.indent (Doc.concat [Doc.line; rhs_doc]));\n           ])\n    in\n    print_comments doc cmt_tbl loc_for_comments\n\nand print_expression_with_comments ~state expr cmt_tbl : Doc.t =\n  let doc = print_expression ~state expr cmt_tbl in\n  print_comments doc cmt_tbl expr.Parsetree.pexp_loc\n\nand print_if_chain ~state pexp_attributes ifs else_expr cmt_tbl =\n  let if_docs =\n    Doc.join ~sep:Doc.space\n      (List.mapi\n         (fun i (outer_loc, if_expr, then_expr) ->\n           let if_txt = if i > 0 then Doc.text \"else if \" else Doc.text \"if \" in\n           let doc =\n             match if_expr with\n             | ParsetreeViewer.If if_expr ->\n               let condition =\n                 if ParsetreeViewer.is_block_expr if_expr then\n                   print_expression_block ~state ~braces:true if_expr cmt_tbl\n                 else\n                   let doc =\n                     print_expression_with_comments ~state if_expr cmt_tbl\n                   in\n                   match Parens.expr if_expr with\n                   | Parens.Parenthesized -> add_parens doc\n                   | Braced braces -> print_braces doc if_expr braces\n                   | Nothing -> Doc.if_breaks (add_parens doc) doc\n               in\n               Doc.concat\n                 [\n                   if_txt;\n                   Doc.group condition;\n                   Doc.space;\n                   (let then_expr =\n                      match ParsetreeViewer.process_braces_attr then_expr with\n                      (* This case only happens when coming from Reason, we strip braces *)\n                      | Some _, expr -> expr\n                      | _ -> then_expr\n                    in\n                    print_expression_block ~state ~braces:true then_expr cmt_tbl);\n                 ]\n             | IfLet (pattern, condition_expr) ->\n               let condition_doc =\n                 let doc =\n                   print_expression_with_comments ~state condition_expr cmt_tbl\n                 in\n                 match Parens.expr condition_expr with\n                 | Parens.Parenthesized -> add_parens doc\n                 | Braced braces -> print_braces doc condition_expr braces\n                 | Nothing -> doc\n               in\n               Doc.concat\n                 [\n                   if_txt;\n                   Doc.text \"let \";\n                   print_pattern ~state pattern cmt_tbl;\n                   Doc.text \" = \";\n                   condition_doc;\n                   Doc.space;\n                   print_expression_block ~state ~braces:true then_expr cmt_tbl;\n                 ]\n           in\n           print_leading_comments doc cmt_tbl.leading outer_loc)\n         ifs)\n  in\n  let else_doc =\n    match else_expr with\n    | None -> Doc.nil\n    | Some expr ->\n      Doc.concat\n        [\n          Doc.text \" else \";\n          print_expression_block ~state ~braces:true expr cmt_tbl;\n        ]\n  in\n  let attrs = ParsetreeViewer.filter_fragile_match_attributes pexp_attributes in\n  Doc.concat [print_attributes ~state attrs cmt_tbl; if_docs; else_doc]\n\nand print_expression ~state (e : Parsetree.expression) cmt_tbl =\n  let print_arrow e =\n    let uncurried, attrs_on_arrow, parameters, return_expr =\n      ParsetreeViewer.fun_expr e\n    in\n    let ParsetreeViewer.{async; bs; attributes = attrs} =\n      ParsetreeViewer.process_function_attributes attrs_on_arrow\n    in\n    let uncurried = uncurried || bs in\n    let return_expr, typ_constraint =\n      match return_expr.pexp_desc with\n      | Pexp_constraint (expr, typ) ->\n        ( {\n            expr with\n            pexp_attributes =\n              List.concat [expr.pexp_attributes; return_expr.pexp_attributes];\n          },\n          Some typ )\n      | _ -> (return_expr, None)\n    in\n    let has_constraint =\n      match typ_constraint with\n      | Some _ -> true\n      | None -> false\n    in\n    let parameters_doc =\n      print_expr_fun_parameters ~state ~in_callback:NoCallback ~uncurried ~async\n        ~has_constraint parameters cmt_tbl\n    in\n    let return_expr_doc =\n      let opt_braces, _ = ParsetreeViewer.process_braces_attr return_expr in\n      let should_inline =\n        match (return_expr.pexp_desc, opt_braces) with\n        | _, Some _ -> true\n        | ( ( Pexp_array _ | Pexp_tuple _\n            | Pexp_construct (_, Some _)\n            | Pexp_record _ ),\n            _ ) ->\n          true\n        | _ -> false\n      in\n      let should_indent =\n        match return_expr.pexp_desc with\n        | Pexp_sequence _ | Pexp_let _ | Pexp_letmodule _ | Pexp_letexception _\n        | Pexp_open _ ->\n          false\n        | _ -> true\n      in\n      let return_doc =\n        let doc = print_expression_with_comments ~state return_expr cmt_tbl in\n        match Parens.expr return_expr with\n        | Parens.Parenthesized -> add_parens doc\n        | Braced braces -> print_braces doc return_expr braces\n        | Nothing -> doc\n      in\n      if should_inline then Doc.concat [Doc.space; return_doc]\n      else\n        Doc.group\n          (if should_indent then Doc.indent (Doc.concat [Doc.line; return_doc])\n           else Doc.concat [Doc.space; return_doc])\n    in\n    let typ_constraint_doc =\n      match typ_constraint with\n      | Some typ ->\n        let typ_doc =\n          let doc = print_typ_expr ~state typ cmt_tbl in\n          if Parens.arrow_return_typ_expr typ then add_parens doc else doc\n        in\n        Doc.concat [Doc.text \": \"; typ_doc]\n      | _ -> Doc.nil\n    in\n    let attrs = print_attributes ~state attrs cmt_tbl in\n    Doc.group\n      (Doc.concat\n         [\n           attrs;\n           parameters_doc;\n           typ_constraint_doc;\n           Doc.text \" =>\";\n           return_expr_doc;\n         ])\n  in\n  let uncurried = Ast_uncurried.expr_is_uncurried_fun e in\n  let e_fun =\n    if uncurried then Ast_uncurried.expr_extract_uncurried_fun e else e\n  in\n  let printed_expression =\n    match e_fun.pexp_desc with\n    | Pexp_fun\n        ( Nolabel,\n          None,\n          {ppat_desc = Ppat_var {txt = \"__x\"}},\n          {pexp_desc = Pexp_apply _} )\n    | Pexp_construct\n        ( {txt = Lident \"Function$\"},\n          Some\n            {\n              pexp_desc =\n                Pexp_fun\n                  ( Nolabel,\n                    None,\n                    {ppat_desc = Ppat_var {txt = \"__x\"}},\n                    {pexp_desc = Pexp_apply _} );\n            } ) ->\n      (* (__x) => f(a, __x, c) -----> f(a, _, c)  *)\n      print_expression_with_comments ~state\n        (ParsetreeViewer.rewrite_underscore_apply e_fun)\n        cmt_tbl\n    | Pexp_fun _ | Pexp_newtype _ -> print_arrow e\n    | Parsetree.Pexp_constant c ->\n      print_constant ~template_literal:(ParsetreeViewer.is_template_literal e) c\n    | Pexp_construct _ when ParsetreeViewer.has_jsx_attribute e.pexp_attributes\n      ->\n      print_jsx_fragment ~state e cmt_tbl\n    | Pexp_construct ({txt = Longident.Lident \"()\"}, _) -> Doc.text \"()\"\n    | Pexp_construct ({txt = Longident.Lident \"[]\"}, _) ->\n      Doc.concat\n        [Doc.text \"list{\"; print_comments_inside cmt_tbl e.pexp_loc; Doc.rbrace]\n    | Pexp_construct ({txt = Longident.Lident \"::\"}, _) ->\n      let expressions, spread = ParsetreeViewer.collect_list_expressions e in\n      let spread_doc =\n        match spread with\n        | Some expr ->\n          Doc.concat\n            [\n              Doc.text \",\";\n              Doc.line;\n              Doc.dotdotdot;\n              (let doc = print_expression_with_comments ~state expr cmt_tbl in\n               match Parens.expr expr with\n               | Parens.Parenthesized -> add_parens doc\n               | Braced braces -> print_braces doc expr braces\n               | Nothing -> doc);\n            ]\n        | None -> Doc.nil\n      in\n      Doc.group\n        (Doc.concat\n           [\n             Doc.text \"list{\";\n             Doc.indent\n               (Doc.concat\n                  [\n                    Doc.soft_line;\n                    Doc.join\n                      ~sep:(Doc.concat [Doc.text \",\"; Doc.line])\n                      (List.map\n                         (fun expr ->\n                           let doc =\n                             print_expression_with_comments ~state expr cmt_tbl\n                           in\n                           match Parens.expr expr with\n                           | Parens.Parenthesized -> add_parens doc\n                           | Braced braces -> print_braces doc expr braces\n                           | Nothing -> doc)\n                         expressions);\n                    spread_doc;\n                  ]);\n             Doc.trailing_comma;\n             Doc.soft_line;\n             Doc.rbrace;\n           ])\n    | Pexp_construct (longident_loc, args) ->\n      let constr = print_longident_location longident_loc cmt_tbl in\n      let args =\n        match args with\n        | None -> Doc.nil\n        | Some {pexp_desc = Pexp_construct ({txt = Longident.Lident \"()\"}, _)}\n          ->\n          Doc.text \"()\"\n        (* Some((1, 2)) *)\n        | Some {pexp_desc = Pexp_tuple [({pexp_desc = Pexp_tuple _} as arg)]} ->\n          Doc.concat\n            [\n              Doc.lparen;\n              (let doc = print_expression_with_comments ~state arg cmt_tbl in\n               match Parens.expr arg with\n               | Parens.Parenthesized -> add_parens doc\n               | Braced braces -> print_braces doc arg braces\n               | Nothing -> doc);\n              Doc.rparen;\n            ]\n        | Some {pexp_desc = Pexp_tuple args} ->\n          Doc.concat\n            [\n              Doc.lparen;\n              Doc.indent\n                (Doc.concat\n                   [\n                     Doc.soft_line;\n                     Doc.join\n                       ~sep:(Doc.concat [Doc.comma; Doc.line])\n                       (List.map\n                          (fun expr ->\n                            let doc =\n                              print_expression_with_comments ~state expr cmt_tbl\n                            in\n                            match Parens.expr expr with\n                            | Parens.Parenthesized -> add_parens doc\n                            | Braced braces -> print_braces doc expr braces\n                            | Nothing -> doc)\n                          args);\n                   ]);\n              Doc.trailing_comma;\n              Doc.soft_line;\n              Doc.rparen;\n            ]\n        | Some arg ->\n          let arg_doc =\n            let doc = print_expression_with_comments ~state arg cmt_tbl in\n            match Parens.expr arg with\n            | Parens.Parenthesized -> add_parens doc\n            | Braced braces -> print_braces doc arg braces\n            | Nothing -> doc\n          in\n          let should_hug = ParsetreeViewer.is_huggable_expression arg in\n          Doc.concat\n            [\n              Doc.lparen;\n              (if should_hug then arg_doc\n               else\n                 Doc.concat\n                   [\n                     Doc.indent (Doc.concat [Doc.soft_line; arg_doc]);\n                     Doc.trailing_comma;\n                     Doc.soft_line;\n                   ]);\n              Doc.rparen;\n            ]\n      in\n      Doc.group (Doc.concat [constr; args])\n    | Pexp_ident path -> print_lident_path path cmt_tbl\n    | Pexp_tuple exprs ->\n      Doc.group\n        (Doc.concat\n           [\n             Doc.lparen;\n             Doc.indent\n               (Doc.concat\n                  [\n                    Doc.soft_line;\n                    Doc.join\n                      ~sep:(Doc.concat [Doc.text \",\"; Doc.line])\n                      (List.map\n                         (fun expr ->\n                           let doc =\n                             print_expression_with_comments ~state expr cmt_tbl\n                           in\n                           match Parens.expr expr with\n                           | Parens.Parenthesized -> add_parens doc\n                           | Braced braces -> print_braces doc expr braces\n                           | Nothing -> doc)\n                         exprs);\n                  ]);\n             Doc.if_breaks (Doc.text \",\") Doc.nil;\n             Doc.soft_line;\n             Doc.rparen;\n           ])\n    | Pexp_array [] ->\n      Doc.concat\n        [Doc.lbracket; print_comments_inside cmt_tbl e.pexp_loc; Doc.rbracket]\n    | Pexp_array exprs ->\n      Doc.group\n        (Doc.concat\n           [\n             Doc.lbracket;\n             Doc.indent\n               (Doc.concat\n                  [\n                    Doc.soft_line;\n                    Doc.join\n                      ~sep:(Doc.concat [Doc.text \",\"; Doc.line])\n                      (List.map\n                         (fun expr ->\n                           let doc =\n                             print_expression_with_comments ~state expr cmt_tbl\n                           in\n                           match Parens.expr expr with\n                           | Parens.Parenthesized -> add_parens doc\n                           | Braced braces -> print_braces doc expr braces\n                           | Nothing -> doc)\n                         exprs);\n                  ]);\n             Doc.trailing_comma;\n             Doc.soft_line;\n             Doc.rbracket;\n           ])\n    | Pexp_variant (label, args) ->\n      let variant_name =\n        Doc.concat [Doc.text \"#\"; print_poly_var_ident label]\n      in\n      let args =\n        match args with\n        | None -> Doc.nil\n        | Some {pexp_desc = Pexp_construct ({txt = Longident.Lident \"()\"}, _)}\n          ->\n          Doc.text \"()\"\n        (* #poly((1, 2) *)\n        | Some {pexp_desc = Pexp_tuple [({pexp_desc = Pexp_tuple _} as arg)]} ->\n          Doc.concat\n            [\n              Doc.lparen;\n              (let doc = print_expression_with_comments ~state arg cmt_tbl in\n               match Parens.expr arg with\n               | Parens.Parenthesized -> add_parens doc\n               | Braced braces -> print_braces doc arg braces\n               | Nothing -> doc);\n              Doc.rparen;\n            ]\n        | Some {pexp_desc = Pexp_tuple args} ->\n          Doc.concat\n            [\n              Doc.lparen;\n              Doc.indent\n                (Doc.concat\n                   [\n                     Doc.soft_line;\n                     Doc.join\n                       ~sep:(Doc.concat [Doc.comma; Doc.line])\n                       (List.map\n                          (fun expr ->\n                            let doc =\n                              print_expression_with_comments ~state expr cmt_tbl\n                            in\n                            match Parens.expr expr with\n                            | Parens.Parenthesized -> add_parens doc\n                            | Braced braces -> print_braces doc expr braces\n                            | Nothing -> doc)\n                          args);\n                   ]);\n              Doc.trailing_comma;\n              Doc.soft_line;\n              Doc.rparen;\n            ]\n        | Some arg ->\n          let arg_doc =\n            let doc = print_expression_with_comments ~state arg cmt_tbl in\n            match Parens.expr arg with\n            | Parens.Parenthesized -> add_parens doc\n            | Braced braces -> print_braces doc arg braces\n            | Nothing -> doc\n          in\n          let should_hug = ParsetreeViewer.is_huggable_expression arg in\n          Doc.concat\n            [\n              Doc.lparen;\n              (if should_hug then arg_doc\n               else\n                 Doc.concat\n                   [\n                     Doc.indent (Doc.concat [Doc.soft_line; arg_doc]);\n                     Doc.trailing_comma;\n                     Doc.soft_line;\n                   ]);\n              Doc.rparen;\n            ]\n      in\n      Doc.group (Doc.concat [variant_name; args])\n    | Pexp_record (rows, spread_expr) ->\n      if rows = [] then\n        Doc.concat\n          [Doc.lbrace; print_comments_inside cmt_tbl e.pexp_loc; Doc.rbrace]\n      else\n        let spread =\n          match spread_expr with\n          | None -> Doc.nil\n          | Some ({pexp_desc} as expr) ->\n            let doc =\n              match pexp_desc with\n              | Pexp_ident {txt = expr} -> print_lident expr\n              | _ -> print_expression ~state expr cmt_tbl\n            in\n            let doc_with_spread =\n              Doc.concat\n                [\n                  Doc.dotdotdot;\n                  (match Parens.expr expr with\n                  | Parens.Parenthesized -> add_parens doc\n                  | Braced braces -> print_braces doc expr braces\n                  | Nothing -> doc);\n                ]\n            in\n            Doc.concat\n              [\n                print_comments doc_with_spread cmt_tbl expr.Parsetree.pexp_loc;\n                Doc.comma;\n                Doc.line;\n              ]\n        in\n        (* If the record is written over multiple lines, break automatically\n         * `let x = {a: 1, b: 3}` -> same line, break when line-width exceeded\n         * `let x = {\n         *   a: 1,\n         *   b: 2,\n         *  }` -> record is written on multiple lines, break the group *)\n        let force_break =\n          e.pexp_loc.loc_start.pos_lnum < e.pexp_loc.loc_end.pos_lnum\n        in\n        let punning_allowed =\n          match (spread_expr, rows) with\n          | None, [_] -> false (* disallow punning for single-element records *)\n          | _ -> true\n        in\n        Doc.breakable_group ~force_break\n          (Doc.concat\n             [\n               Doc.lbrace;\n               Doc.indent\n                 (Doc.concat\n                    [\n                      Doc.soft_line;\n                      spread;\n                      Doc.join\n                        ~sep:(Doc.concat [Doc.text \",\"; Doc.line])\n                        (List.map\n                           (fun row ->\n                             print_expression_record_row ~state row cmt_tbl\n                               punning_allowed)\n                           rows);\n                    ]);\n               Doc.trailing_comma;\n               Doc.soft_line;\n               Doc.rbrace;\n             ])\n    | Pexp_extension extension -> (\n      match extension with\n      | ( {txt = \"obj\"},\n          PStr\n            [\n              {\n                pstr_loc = loc;\n                pstr_desc = Pstr_eval ({pexp_desc = Pexp_record (rows, _)}, []);\n              };\n            ] ) ->\n        (* If the object is written over multiple lines, break automatically\n         * `let x = {\"a\": 1, \"b\": 3}` -> same line, break when line-width exceeded\n         * `let x = {\n         *   \"a\": 1,\n         *   \"b\": 2,\n         *  }` -> object is written on multiple lines, break the group *)\n        let force_break = loc.loc_start.pos_lnum < loc.loc_end.pos_lnum in\n        Doc.breakable_group ~force_break\n          (Doc.concat\n             [\n               Doc.lbrace;\n               Doc.indent\n                 (Doc.concat\n                    [\n                      Doc.soft_line;\n                      Doc.join\n                        ~sep:(Doc.concat [Doc.text \",\"; Doc.line])\n                        (List.map\n                           (fun row -> print_bs_object_row ~state row cmt_tbl)\n                           rows);\n                    ]);\n               Doc.trailing_comma;\n               Doc.soft_line;\n               Doc.rbrace;\n             ])\n      | extension ->\n        print_extension ~state ~at_module_lvl:false extension cmt_tbl)\n    | Pexp_apply (e, [(Nolabel, {pexp_desc = Pexp_array sub_lists})])\n      when ParsetreeViewer.is_spread_belt_array_concat e ->\n      print_belt_array_concat_apply ~state sub_lists cmt_tbl\n    | Pexp_apply (e, [(Nolabel, {pexp_desc = Pexp_array sub_lists})])\n      when ParsetreeViewer.is_spread_belt_list_concat e ->\n      print_belt_list_concat_apply ~state sub_lists cmt_tbl\n    | Pexp_apply (call_expr, args) ->\n      if ParsetreeViewer.is_unary_expression e then\n        print_unary_expression ~state e cmt_tbl\n      else if ParsetreeViewer.is_template_literal e then\n        print_template_literal ~state e cmt_tbl\n      else if ParsetreeViewer.is_tagged_template_literal e then\n        print_tagged_template_literal ~state call_expr args cmt_tbl\n      else if ParsetreeViewer.is_binary_expression e then\n        print_binary_expression ~state e cmt_tbl\n      else print_pexp_apply ~state e cmt_tbl\n    | Pexp_unreachable -> Doc.dot\n    | Pexp_field (expr, longident_loc) ->\n      let lhs =\n        let doc = print_expression_with_comments ~state expr cmt_tbl in\n        match Parens.field_expr expr with\n        | Parens.Parenthesized -> add_parens doc\n        | Braced braces -> print_braces doc expr braces\n        | Nothing -> doc\n      in\n      Doc.concat [lhs; Doc.dot; print_lident_path longident_loc cmt_tbl]\n    | Pexp_setfield (expr1, longident_loc, expr2) ->\n      print_set_field_expr ~state e.pexp_attributes expr1 longident_loc expr2\n        e.pexp_loc cmt_tbl\n    | Pexp_ifthenelse (_ifExpr, _thenExpr, _elseExpr)\n      when ParsetreeViewer.is_ternary_expr e ->\n      let parts, alternate = ParsetreeViewer.collect_ternary_parts e in\n      let ternary_doc =\n        match parts with\n        | (condition1, consequent1) :: rest ->\n          Doc.group\n            (Doc.concat\n               [\n                 print_ternary_operand ~state condition1 cmt_tbl;\n                 Doc.indent\n                   (Doc.concat\n                      [\n                        Doc.line;\n                        Doc.indent\n                          (Doc.concat\n                             [\n                               Doc.text \"? \";\n                               print_ternary_operand ~state consequent1 cmt_tbl;\n                             ]);\n                        Doc.concat\n                          (List.map\n                             (fun (condition, consequent) ->\n                               Doc.concat\n                                 [\n                                   Doc.line;\n                                   Doc.text \": \";\n                                   print_ternary_operand ~state condition\n                                     cmt_tbl;\n                                   Doc.line;\n                                   Doc.text \"? \";\n                                   print_ternary_operand ~state consequent\n                                     cmt_tbl;\n                                 ])\n                             rest);\n                        Doc.line;\n                        Doc.text \": \";\n                        Doc.indent\n                          (print_ternary_operand ~state alternate cmt_tbl);\n                      ]);\n               ])\n        | _ -> Doc.nil\n      in\n      let attrs = ParsetreeViewer.filter_ternary_attributes e.pexp_attributes in\n      let needs_parens =\n        match ParsetreeViewer.filter_parsing_attrs attrs with\n        | [] -> false\n        | _ -> true\n      in\n      Doc.concat\n        [\n          print_attributes ~state attrs cmt_tbl;\n          (if needs_parens then add_parens ternary_doc else ternary_doc);\n        ]\n    | Pexp_ifthenelse (_ifExpr, _thenExpr, _elseExpr) ->\n      let ifs, else_expr = ParsetreeViewer.collect_if_expressions e in\n      print_if_chain ~state e.pexp_attributes ifs else_expr cmt_tbl\n    | Pexp_while (expr1, expr2) ->\n      let condition =\n        let doc = print_expression_with_comments ~state expr1 cmt_tbl in\n        match Parens.expr expr1 with\n        | Parens.Parenthesized -> add_parens doc\n        | Braced braces -> print_braces doc expr1 braces\n        | Nothing -> doc\n      in\n      Doc.breakable_group ~force_break:true\n        (Doc.concat\n           [\n             Doc.text \"while \";\n             (if ParsetreeViewer.is_block_expr expr1 then condition\n              else Doc.group (Doc.if_breaks (add_parens condition) condition));\n             Doc.space;\n             print_expression_block ~state ~braces:true expr2 cmt_tbl;\n           ])\n    | Pexp_for (pattern, from_expr, to_expr, direction_flag, body) ->\n      Doc.breakable_group ~force_break:true\n        (Doc.concat\n           [\n             Doc.text \"for \";\n             print_pattern ~state pattern cmt_tbl;\n             Doc.text \" in \";\n             (let doc =\n                print_expression_with_comments ~state from_expr cmt_tbl\n              in\n              match Parens.expr from_expr with\n              | Parens.Parenthesized -> add_parens doc\n              | Braced braces -> print_braces doc from_expr braces\n              | Nothing -> doc);\n             print_direction_flag direction_flag;\n             (let doc = print_expression_with_comments ~state to_expr cmt_tbl in\n              match Parens.expr to_expr with\n              | Parens.Parenthesized -> add_parens doc\n              | Braced braces -> print_braces doc to_expr braces\n              | Nothing -> doc);\n             Doc.space;\n             print_expression_block ~state ~braces:true body cmt_tbl;\n           ])\n    | Pexp_constraint\n        ( {pexp_desc = Pexp_pack mod_expr},\n          {ptyp_desc = Ptyp_package package_type; ptyp_loc} ) ->\n      Doc.group\n        (Doc.concat\n           [\n             Doc.text \"module(\";\n             Doc.indent\n               (Doc.concat\n                  [\n                    Doc.soft_line;\n                    print_mod_expr ~state mod_expr cmt_tbl;\n                    Doc.text \": \";\n                    print_comments\n                      (print_package_type ~state\n                         ~print_module_keyword_and_parens:false package_type\n                         cmt_tbl)\n                      cmt_tbl ptyp_loc;\n                  ]);\n             Doc.soft_line;\n             Doc.rparen;\n           ])\n    | Pexp_constraint (expr, typ) ->\n      let expr_doc =\n        let doc = print_expression_with_comments ~state expr cmt_tbl in\n        match Parens.expr expr with\n        | Parens.Parenthesized -> add_parens doc\n        | Braced braces -> print_braces doc expr braces\n        | Nothing -> doc\n      in\n      Doc.concat [expr_doc; Doc.text \": \"; print_typ_expr ~state typ cmt_tbl]\n    | Pexp_letmodule ({txt = _modName}, _modExpr, _expr) ->\n      print_expression_block ~state ~braces:true e cmt_tbl\n    | Pexp_letexception (_extensionConstructor, _expr) ->\n      print_expression_block ~state ~braces:true e cmt_tbl\n    | Pexp_assert expr ->\n      let expr = print_expression_with_comments ~state expr cmt_tbl in\n      Doc.concat [Doc.text \"assert(\"; expr; Doc.text \")\"]\n    | Pexp_lazy expr ->\n      let rhs =\n        let doc = print_expression_with_comments ~state expr cmt_tbl in\n        match Parens.lazy_or_assert_or_await_expr_rhs expr with\n        | Parens.Parenthesized -> add_parens doc\n        | Braced braces -> print_braces doc expr braces\n        | Nothing -> doc\n      in\n      Doc.group (Doc.concat [Doc.text \"lazy \"; rhs])\n    | Pexp_open (_overrideFlag, _longidentLoc, _expr) ->\n      print_expression_block ~state ~braces:true e cmt_tbl\n    | Pexp_pack mod_expr ->\n      Doc.group\n        (Doc.concat\n           [\n             Doc.text \"module(\";\n             Doc.indent\n               (Doc.concat\n                  [Doc.soft_line; print_mod_expr ~state mod_expr cmt_tbl]);\n             Doc.soft_line;\n             Doc.rparen;\n           ])\n    | Pexp_sequence _ -> print_expression_block ~state ~braces:true e cmt_tbl\n    | Pexp_let _ -> print_expression_block ~state ~braces:true e cmt_tbl\n    | Pexp_try (expr, cases) ->\n      let expr_doc =\n        let doc = print_expression_with_comments ~state expr cmt_tbl in\n        match Parens.expr expr with\n        | Parens.Parenthesized -> add_parens doc\n        | Braced braces -> print_braces doc expr braces\n        | Nothing -> doc\n      in\n      Doc.concat\n        [\n          Doc.text \"try \";\n          expr_doc;\n          Doc.text \" catch \";\n          print_cases ~state cases cmt_tbl;\n        ]\n    | Pexp_match (_, [_; _]) when ParsetreeViewer.is_if_let_expr e ->\n      let ifs, else_expr = ParsetreeViewer.collect_if_expressions e in\n      print_if_chain ~state e.pexp_attributes ifs else_expr cmt_tbl\n    | Pexp_match (expr, cases) ->\n      let expr_doc =\n        let doc = print_expression_with_comments ~state expr cmt_tbl in\n        match Parens.expr expr with\n        | Parens.Parenthesized -> add_parens doc\n        | Braced braces -> print_braces doc expr braces\n        | Nothing -> doc\n      in\n      Doc.concat\n        [\n          Doc.text \"switch \";\n          expr_doc;\n          Doc.space;\n          print_cases ~state cases cmt_tbl;\n        ]\n    | Pexp_function cases ->\n      Doc.concat [Doc.text \"x => switch x \"; print_cases ~state cases cmt_tbl]\n    | Pexp_coerce (expr, typ_opt, typ) ->\n      let doc_expr = print_expression_with_comments ~state expr cmt_tbl in\n      let doc_typ = print_typ_expr ~state typ cmt_tbl in\n      let of_type =\n        match typ_opt with\n        | None -> Doc.nil\n        | Some typ1 ->\n          Doc.concat [Doc.text \": \"; print_typ_expr ~state typ1 cmt_tbl]\n      in\n      Doc.concat\n        [Doc.lparen; doc_expr; of_type; Doc.text \" :> \"; doc_typ; Doc.rparen]\n    | Pexp_send (parent_expr, label) ->\n      let parent_doc =\n        let doc = print_expression_with_comments ~state parent_expr cmt_tbl in\n        match Parens.unary_expr_operand parent_expr with\n        | Parens.Parenthesized -> add_parens doc\n        | Braced braces -> print_braces doc parent_expr braces\n        | Nothing -> doc\n      in\n      let member =\n        let member_doc =\n          print_comments (Doc.text label.txt) cmt_tbl label.loc\n        in\n        Doc.concat [Doc.text \"\\\"\"; member_doc; Doc.text \"\\\"\"]\n      in\n      Doc.group (Doc.concat [parent_doc; Doc.lbracket; member; Doc.rbracket])\n    | Pexp_new _ -> Doc.text \"Pexp_new not implemented in printer\"\n    | Pexp_setinstvar _ -> Doc.text \"Pexp_setinstvar not implemented in printer\"\n    | Pexp_override _ -> Doc.text \"Pexp_override not implemented in printer\"\n    | Pexp_poly _ -> Doc.text \"Pexp_poly not implemented in printer\"\n    | Pexp_object _ -> Doc.text \"Pexp_object not implemented in printer\"\n  in\n  let expr_with_await =\n    if ParsetreeViewer.has_await_attribute e.pexp_attributes then\n      let rhs =\n        match\n          Parens.lazy_or_assert_or_await_expr_rhs ~in_await:true\n            {\n              e with\n              pexp_attributes =\n                List.filter\n                  (function\n                    | {Location.txt = \"res.braces\" | \"ns.braces\"}, _ -> false\n                    | _ -> true)\n                  e.pexp_attributes;\n            }\n        with\n        | Parens.Parenthesized -> add_parens printed_expression\n        | Braced braces -> print_braces printed_expression e braces\n        | Nothing -> printed_expression\n      in\n      Doc.concat [Doc.text \"await \"; rhs]\n    else printed_expression\n  in\n  let should_print_its_own_attributes =\n    match e.pexp_desc with\n    | Pexp_apply _ | Pexp_fun _ | Pexp_newtype _ | Pexp_setfield _\n    | Pexp_ifthenelse _ ->\n      true\n    | Pexp_match _ when ParsetreeViewer.is_if_let_expr e -> true\n    | Pexp_construct _ when ParsetreeViewer.has_jsx_attribute e.pexp_attributes\n      ->\n      true\n    | _ -> false\n  in\n  match e.pexp_attributes with\n  | [] -> expr_with_await\n  | attrs when not should_print_its_own_attributes ->\n    Doc.group\n      (Doc.concat [print_attributes ~state attrs cmt_tbl; expr_with_await])\n  | _ -> expr_with_await\n\nand print_pexp_fun ~state ~in_callback e cmt_tbl =\n  let uncurried, attrs_on_arrow, parameters, return_expr =\n    ParsetreeViewer.fun_expr e\n  in\n  let ParsetreeViewer.{async; bs; attributes = attrs} =\n    ParsetreeViewer.process_function_attributes attrs_on_arrow\n  in\n  let uncurried = bs || uncurried in\n  let return_expr, typ_constraint =\n    match return_expr.pexp_desc with\n    | Pexp_constraint (expr, typ) ->\n      ( {\n          expr with\n          pexp_attributes =\n            List.concat [expr.pexp_attributes; return_expr.pexp_attributes];\n        },\n        Some typ )\n    | _ -> (return_expr, None)\n  in\n  let parameters_doc =\n    print_expr_fun_parameters ~state ~in_callback ~async ~uncurried\n      ~has_constraint:\n        (match typ_constraint with\n        | Some _ -> true\n        | None -> false)\n      parameters cmt_tbl\n  in\n  let return_should_indent =\n    match return_expr.pexp_desc with\n    | Pexp_sequence _ | Pexp_let _ | Pexp_letmodule _ | Pexp_letexception _\n    | Pexp_open _ ->\n      false\n    | _ -> true\n  in\n  let return_expr_doc =\n    let opt_braces, _ = ParsetreeViewer.process_braces_attr return_expr in\n    let should_inline =\n      match (return_expr.pexp_desc, opt_braces) with\n      | _, Some _ -> true\n      | ( ( Pexp_array _ | Pexp_tuple _\n          | Pexp_construct (_, Some _)\n          | Pexp_record _ ),\n          _ ) ->\n        true\n      | _ -> false\n    in\n    let return_doc =\n      let doc = print_expression_with_comments ~state return_expr cmt_tbl in\n      match Parens.expr return_expr with\n      | Parens.Parenthesized -> add_parens doc\n      | Braced braces -> print_braces doc return_expr braces\n      | Nothing -> doc\n    in\n    if should_inline then Doc.concat [Doc.space; return_doc]\n    else\n      Doc.group\n        (if return_should_indent then\n           Doc.concat\n             [\n               Doc.indent (Doc.concat [Doc.line; return_doc]);\n               (match in_callback with\n               | FitsOnOneLine | ArgumentsFitOnOneLine -> Doc.soft_line\n               | _ -> Doc.nil);\n             ]\n         else Doc.concat [Doc.space; return_doc])\n  in\n  let typ_constraint_doc =\n    match typ_constraint with\n    | Some typ -> Doc.concat [Doc.text \": \"; print_typ_expr ~state typ cmt_tbl]\n    | _ -> Doc.nil\n  in\n  Doc.concat\n    [\n      print_attributes ~state attrs cmt_tbl;\n      parameters_doc;\n      typ_constraint_doc;\n      Doc.text \" =>\";\n      return_expr_doc;\n    ]\n\nand print_ternary_operand ~state expr cmt_tbl =\n  let doc = print_expression_with_comments ~state expr cmt_tbl in\n  match Parens.ternary_operand expr with\n  | Parens.Parenthesized -> add_parens doc\n  | Braced braces -> print_braces doc expr braces\n  | Nothing -> doc\n\nand print_set_field_expr ~state attrs lhs longident_loc rhs loc cmt_tbl =\n  let rhs_doc =\n    let doc = print_expression_with_comments ~state rhs cmt_tbl in\n    match Parens.set_field_expr_rhs rhs with\n    | Parens.Parenthesized -> add_parens doc\n    | Braced braces -> print_braces doc rhs braces\n    | Nothing -> doc\n  in\n  let lhs_doc =\n    let doc = print_expression_with_comments ~state lhs cmt_tbl in\n    match Parens.field_expr lhs with\n    | Parens.Parenthesized -> add_parens doc\n    | Braced braces -> print_braces doc lhs braces\n    | Nothing -> doc\n  in\n  let should_indent = ParsetreeViewer.is_binary_expression rhs in\n  let doc =\n    Doc.group\n      (Doc.concat\n         [\n           lhs_doc;\n           Doc.dot;\n           print_lident_path longident_loc cmt_tbl;\n           Doc.text \" =\";\n           (if should_indent then\n              Doc.group (Doc.indent (Doc.concat [Doc.line; rhs_doc]))\n            else Doc.concat [Doc.space; rhs_doc]);\n         ])\n  in\n  let doc =\n    match attrs with\n    | [] -> doc\n    | attrs ->\n      Doc.group (Doc.concat [print_attributes ~state attrs cmt_tbl; doc])\n  in\n  print_comments doc cmt_tbl loc\n\nand print_template_literal ~state expr cmt_tbl =\n  let tag = ref \"js\" in\n  let rec walk_expr expr =\n    let open Parsetree in\n    match expr.pexp_desc with\n    | Pexp_apply\n        ( {pexp_desc = Pexp_ident {txt = Longident.Lident \"^\"}},\n          [(Nolabel, arg1); (Nolabel, arg2)] ) ->\n      let lhs = walk_expr arg1 in\n      let rhs = walk_expr arg2 in\n      Doc.concat [lhs; rhs]\n    | Pexp_constant (Pconst_string (txt, Some prefix)) ->\n      tag := prefix;\n      print_string_contents txt\n    | _ ->\n      let doc = print_expression_with_comments ~state expr cmt_tbl in\n      let doc =\n        match Parens.expr expr with\n        | Parens.Parenthesized -> add_parens doc\n        | Braced braces -> print_braces doc expr braces\n        | Nothing -> doc\n      in\n      Doc.group (Doc.concat [Doc.text \"${\"; Doc.indent doc; Doc.rbrace])\n  in\n  let content = walk_expr expr in\n  Doc.concat\n    [\n      (if !tag = \"js\" then Doc.nil else Doc.text !tag);\n      Doc.text \"`\";\n      content;\n      Doc.text \"`\";\n    ]\n\nand print_tagged_template_literal ~state call_expr args cmt_tbl =\n  let strings_list, values_list =\n    match args with\n    | [\n     (_, {Parsetree.pexp_desc = Pexp_array strings});\n     (_, {Parsetree.pexp_desc = Pexp_array values});\n    ] ->\n      (strings, values)\n    | _ -> assert false\n  in\n\n  let strings =\n    List.map\n      (fun x ->\n        match x with\n        | {Parsetree.pexp_desc = Pexp_constant (Pconst_string (txt, _))} ->\n          print_string_contents txt\n        | _ -> assert false)\n      strings_list\n  in\n\n  let values =\n    List.map\n      (fun x ->\n        Doc.concat\n          [\n            Doc.text \"${\";\n            print_expression_with_comments ~state x cmt_tbl;\n            Doc.text \"}\";\n          ])\n      values_list\n  in\n\n  let process strings values =\n    let rec aux acc = function\n      | [], [] -> acc\n      | a_head :: a_rest, b -> aux (Doc.concat [acc; a_head]) (b, a_rest)\n      | _ -> assert false\n    in\n    aux Doc.nil (strings, values)\n  in\n\n  let content : Doc.t = process strings values in\n\n  let tag = print_expression_with_comments ~state call_expr cmt_tbl in\n  Doc.concat [tag; Doc.text \"`\"; content; Doc.text \"`\"]\n\nand print_unary_expression ~state expr cmt_tbl =\n  let print_unary_operator op =\n    Doc.text\n      (match op with\n      | \"~+\" -> \"+\"\n      | \"~+.\" -> \"+.\"\n      | \"~-\" -> \"-\"\n      | \"~-.\" -> \"-.\"\n      | \"not\" -> \"!\"\n      | _ -> assert false)\n  in\n  match expr.pexp_desc with\n  | Pexp_apply\n      ( {pexp_desc = Pexp_ident {txt = Longident.Lident operator}},\n        [(Nolabel, operand)] ) ->\n    let printed_operand =\n      let doc = print_expression_with_comments ~state operand cmt_tbl in\n      match Parens.unary_expr_operand operand with\n      | Parens.Parenthesized -> add_parens doc\n      | Braced braces -> print_braces doc operand braces\n      | Nothing -> doc\n    in\n    let doc = Doc.concat [print_unary_operator operator; printed_operand] in\n    print_comments doc cmt_tbl expr.pexp_loc\n  | _ -> assert false\n\nand print_binary_expression ~state (expr : Parsetree.expression) cmt_tbl =\n  let print_binary_operator ~inline_rhs operator =\n    let operator_txt =\n      match operator with\n      | \"|.\" | \"|.u\" -> \"->\"\n      | \"^\" -> \"++\"\n      | \"=\" -> \"==\"\n      | \"==\" -> \"===\"\n      | \"<>\" -> \"!=\"\n      | \"!=\" -> \"!==\"\n      | txt -> txt\n    in\n    let spacing_before_operator =\n      if operator = \"|.\" || operator = \"|.u\" then Doc.soft_line\n      else if operator = \"|>\" then Doc.line\n      else Doc.space\n    in\n    let spacing_after_operator =\n      if operator = \"|.\" || operator = \"|.u\" then Doc.nil\n      else if operator = \"|>\" then Doc.space\n      else if inline_rhs then Doc.space\n      else Doc.line\n    in\n    Doc.concat\n      [spacing_before_operator; Doc.text operator_txt; spacing_after_operator]\n  in\n  let print_operand ~is_lhs ~is_multiline expr parent_operator =\n    let rec flatten ~is_lhs ~is_multiline expr parent_operator =\n      if ParsetreeViewer.is_binary_expression expr then\n        match expr with\n        | {\n         pexp_desc =\n           Pexp_apply\n             ( {pexp_desc = Pexp_ident {txt = Longident.Lident operator}},\n               [(_, left); (_, right)] );\n        } ->\n          if\n            ParsetreeViewer.flattenable_operators parent_operator operator\n            && not (ParsetreeViewer.has_attributes expr.pexp_attributes)\n          then\n            let left_printed =\n              flatten ~is_lhs:true ~is_multiline left operator\n            in\n            let right_printed =\n              let right_printeable_attrs, right_internal_attrs =\n                ParsetreeViewer.partition_printable_attributes\n                  right.pexp_attributes\n              in\n              let doc =\n                print_expression_with_comments ~state\n                  {right with pexp_attributes = right_internal_attrs}\n                  cmt_tbl\n              in\n              let doc =\n                if Parens.flatten_operand_rhs parent_operator right then\n                  Doc.concat [Doc.lparen; doc; Doc.rparen]\n                else doc\n              in\n              let doc =\n                Doc.concat\n                  [print_attributes ~state right_printeable_attrs cmt_tbl; doc]\n              in\n              match right_printeable_attrs with\n              | [] -> doc\n              | _ -> add_parens doc\n            in\n            let is_await =\n              ParsetreeViewer.has_await_attribute expr.pexp_attributes\n            in\n            let doc =\n              if is_await then\n                let parens =\n                  Res_parens.binary_operator_inside_await_needs_parens operator\n                in\n                Doc.concat\n                  [\n                    Doc.lparen;\n                    Doc.text \"await \";\n                    (if parens then Doc.lparen else Doc.nil);\n                    left_printed;\n                    print_binary_operator ~inline_rhs:false operator;\n                    right_printed;\n                    (if parens then Doc.rparen else Doc.nil);\n                    Doc.rparen;\n                  ]\n              else\n                match operator with\n                | (\"|.\" | \"|.u\") when is_multiline ->\n                  (* If the pipe-chain is written over multiple lines, break automatically\n                   * `let x = a->b->c -> same line, break when line-width exceeded\n                   * `let x = a->\n                   *   b->c` -> pipe-chain is written on multiple lines, break the group *)\n                  Doc.breakable_group ~force_break:true\n                    (Doc.concat\n                       [\n                         left_printed;\n                         print_binary_operator ~inline_rhs:false operator;\n                         right_printed;\n                       ])\n                | _ ->\n                  Doc.concat\n                    [\n                      left_printed;\n                      print_binary_operator ~inline_rhs:false operator;\n                      right_printed;\n                    ]\n            in\n\n            let doc =\n              if (not is_lhs) && Parens.rhs_binary_expr_operand operator expr\n              then Doc.concat [Doc.lparen; doc; Doc.rparen]\n              else doc\n            in\n            print_comments doc cmt_tbl expr.pexp_loc\n          else\n            let printeable_attrs, internal_attrs =\n              ParsetreeViewer.partition_printable_attributes\n                expr.pexp_attributes\n            in\n            let doc =\n              print_expression_with_comments ~state\n                {expr with pexp_attributes = internal_attrs}\n                cmt_tbl\n            in\n            let doc =\n              if\n                Parens.sub_binary_expr_operand parent_operator operator\n                || printeable_attrs <> []\n                   && (ParsetreeViewer.is_binary_expression expr\n                      || ParsetreeViewer.is_ternary_expr expr)\n              then Doc.concat [Doc.lparen; doc; Doc.rparen]\n              else doc\n            in\n            Doc.concat [print_attributes ~state printeable_attrs cmt_tbl; doc]\n        | _ -> assert false\n      else\n        match expr.pexp_desc with\n        | Pexp_apply\n            ( {pexp_desc = Pexp_ident {txt = Longident.Lident \"^\"; loc}},\n              [(Nolabel, _); (Nolabel, _)] )\n          when loc.loc_ghost ->\n          let doc = print_template_literal ~state expr cmt_tbl in\n          print_comments doc cmt_tbl expr.Parsetree.pexp_loc\n        | Pexp_setfield (lhs, field, rhs) ->\n          let doc =\n            print_set_field_expr ~state expr.pexp_attributes lhs field rhs\n              expr.pexp_loc cmt_tbl\n          in\n          if is_lhs then add_parens doc else doc\n        | Pexp_apply\n            ( {pexp_desc = Pexp_ident {txt = Longident.Lident \"#=\"}},\n              [(Nolabel, lhs); (Nolabel, rhs)] ) ->\n          let rhs_doc = print_expression_with_comments ~state rhs cmt_tbl in\n          let lhs_doc = print_expression_with_comments ~state lhs cmt_tbl in\n          (* TODO: unify indentation of \"=\" *)\n          let should_indent = ParsetreeViewer.is_binary_expression rhs in\n          let doc =\n            Doc.group\n              (Doc.concat\n                 [\n                   lhs_doc;\n                   Doc.text \" =\";\n                   (if should_indent then\n                      Doc.group (Doc.indent (Doc.concat [Doc.line; rhs_doc]))\n                    else Doc.concat [Doc.space; rhs_doc]);\n                 ])\n          in\n          let doc =\n            match expr.pexp_attributes with\n            | [] -> doc\n            | attrs ->\n              Doc.group\n                (Doc.concat [print_attributes ~state attrs cmt_tbl; doc])\n          in\n          if is_lhs then add_parens doc else doc\n        | _ -> (\n          let doc = print_expression_with_comments ~state expr cmt_tbl in\n          match Parens.binary_expr_operand ~is_lhs expr with\n          | Parens.Parenthesized -> add_parens doc\n          | Braced braces -> print_braces doc expr braces\n          | Nothing -> doc)\n    in\n    flatten ~is_lhs ~is_multiline expr parent_operator\n  in\n  match expr.pexp_desc with\n  | Pexp_apply\n      ( {\n          pexp_desc =\n            Pexp_ident {txt = Longident.Lident ((\"|.\" | \"|.u\" | \"|>\") as op)};\n        },\n        [(Nolabel, lhs); (Nolabel, rhs)] )\n    when not\n           (ParsetreeViewer.is_binary_expression lhs\n           || ParsetreeViewer.is_binary_expression rhs\n           || print_attributes ~state expr.pexp_attributes cmt_tbl <> Doc.nil)\n    ->\n    let lhs_has_comment_below = has_comment_below cmt_tbl lhs.pexp_loc in\n    let lhs_doc = print_operand ~is_lhs:true ~is_multiline:false lhs op in\n    let rhs_doc = print_operand ~is_lhs:false ~is_multiline:false rhs op in\n    Doc.group\n      (Doc.concat\n         [\n           print_attributes ~state expr.pexp_attributes cmt_tbl;\n           lhs_doc;\n           (match (lhs_has_comment_below, op) with\n           | true, (\"|.\" | \"|.u\") -> Doc.concat [Doc.soft_line; Doc.text \"->\"]\n           | false, (\"|.\" | \"|.u\") -> Doc.text \"->\"\n           | true, \"|>\" -> Doc.concat [Doc.line; Doc.text \"|> \"]\n           | false, \"|>\" -> Doc.text \" |> \"\n           | _ -> Doc.nil);\n           rhs_doc;\n         ])\n  | Pexp_apply\n      ( {pexp_desc = Pexp_ident {txt = Longident.Lident operator}},\n        [(Nolabel, lhs); (Nolabel, rhs)] ) ->\n    let is_multiline =\n      lhs.pexp_loc.loc_start.pos_lnum < rhs.pexp_loc.loc_start.pos_lnum\n    in\n\n    let right =\n      let operator_with_rhs =\n        let rhs_doc =\n          print_operand\n            ~is_lhs:(ParsetreeViewer.is_rhs_binary_operator operator)\n            ~is_multiline rhs operator\n        in\n        Doc.concat\n          [\n            print_binary_operator\n              ~inline_rhs:(ParsetreeViewer.should_inline_rhs_binary_expr rhs)\n              operator;\n            rhs_doc;\n          ]\n      in\n      if ParsetreeViewer.should_indent_binary_expr expr then\n        Doc.group (Doc.indent operator_with_rhs)\n      else operator_with_rhs\n    in\n    let doc =\n      Doc.group\n        (Doc.concat\n           [\n             print_operand\n               ~is_lhs:(not @@ ParsetreeViewer.is_rhs_binary_operator operator)\n               ~is_multiline lhs operator;\n             right;\n           ])\n    in\n    Doc.group\n      (Doc.concat\n         [\n           print_attributes ~state expr.pexp_attributes cmt_tbl;\n           (match\n              Parens.binary_expr\n                {\n                  expr with\n                  pexp_attributes =\n                    ParsetreeViewer.filter_printable_attributes\n                      expr.pexp_attributes;\n                }\n            with\n           | Braced braces_loc -> print_braces doc expr braces_loc\n           | Parenthesized -> add_parens doc\n           | Nothing -> doc);\n         ])\n  | _ -> Doc.nil\n\nand print_belt_array_concat_apply ~state sub_lists cmt_tbl =\n  let make_spread_doc comma_before_spread = function\n    | Some expr ->\n      Doc.concat\n        [\n          comma_before_spread;\n          Doc.dotdotdot;\n          (let doc = print_expression_with_comments ~state expr cmt_tbl in\n           match Parens.expr expr with\n           | Parens.Parenthesized -> add_parens doc\n           | Braced braces -> print_braces doc expr braces\n           | Nothing -> doc);\n        ]\n    | None -> Doc.nil\n  in\n  let make_sub_list_doc (expressions, spread) =\n    let comma_before_spread =\n      match expressions with\n      | [] -> Doc.nil\n      | _ -> Doc.concat [Doc.text \",\"; Doc.line]\n    in\n    let spread_doc = make_spread_doc comma_before_spread spread in\n    Doc.concat\n      [\n        Doc.join\n          ~sep:(Doc.concat [Doc.text \",\"; Doc.line])\n          (List.map\n             (fun expr ->\n               let doc = print_expression_with_comments ~state expr cmt_tbl in\n               match Parens.expr expr with\n               | Parens.Parenthesized -> add_parens doc\n               | Braced braces -> print_braces doc expr braces\n               | Nothing -> doc)\n             expressions);\n        spread_doc;\n      ]\n  in\n  Doc.group\n    (Doc.concat\n       [\n         Doc.lbracket;\n         Doc.indent\n           (Doc.concat\n              [\n                Doc.soft_line;\n                Doc.join\n                  ~sep:(Doc.concat [Doc.text \",\"; Doc.line])\n                  (List.map make_sub_list_doc\n                     (List.map ParsetreeViewer.collect_array_expressions\n                        sub_lists));\n              ]);\n         Doc.trailing_comma;\n         Doc.soft_line;\n         Doc.rbracket;\n       ])\n\nand print_belt_list_concat_apply ~state sub_lists cmt_tbl =\n  let make_spread_doc comma_before_spread = function\n    | Some expr ->\n      Doc.concat\n        [\n          comma_before_spread;\n          Doc.dotdotdot;\n          (let doc = print_expression_with_comments ~state expr cmt_tbl in\n           match Parens.expr expr with\n           | Parens.Parenthesized -> add_parens doc\n           | Braced braces -> print_braces doc expr braces\n           | Nothing -> doc);\n        ]\n    | None -> Doc.nil\n  in\n  let make_sub_list_doc (expressions, spread) =\n    let comma_before_spread =\n      match expressions with\n      | [] -> Doc.nil\n      | _ -> Doc.concat [Doc.text \",\"; Doc.line]\n    in\n    let spread_doc = make_spread_doc comma_before_spread spread in\n    Doc.concat\n      [\n        Doc.join\n          ~sep:(Doc.concat [Doc.text \",\"; Doc.line])\n          (List.map\n             (fun expr ->\n               let doc = print_expression_with_comments ~state expr cmt_tbl in\n               match Parens.expr expr with\n               | Parens.Parenthesized -> add_parens doc\n               | Braced braces -> print_braces doc expr braces\n               | Nothing -> doc)\n             expressions);\n        spread_doc;\n      ]\n  in\n  Doc.group\n    (Doc.concat\n       [\n         Doc.text \"list{\";\n         Doc.indent\n           (Doc.concat\n              [\n                Doc.soft_line;\n                Doc.join\n                  ~sep:(Doc.concat [Doc.text \",\"; Doc.line])\n                  (List.map make_sub_list_doc\n                     (List.map ParsetreeViewer.collect_list_expressions\n                        sub_lists));\n              ]);\n         Doc.trailing_comma;\n         Doc.soft_line;\n         Doc.rbrace;\n       ])\n\n(* callExpr(arg1, arg2) *)\nand print_pexp_apply ~state expr cmt_tbl =\n  match expr.pexp_desc with\n  | Pexp_apply\n      ( {pexp_desc = Pexp_ident {txt = Longident.Lident \"##\"}},\n        [(Nolabel, parent_expr); (Nolabel, member_expr)] ) ->\n    let parent_doc =\n      let doc = print_expression_with_comments ~state parent_expr cmt_tbl in\n      match Parens.unary_expr_operand parent_expr with\n      | Parens.Parenthesized -> add_parens doc\n      | Braced braces -> print_braces doc parent_expr braces\n      | Nothing -> doc\n    in\n    let member =\n      let member_doc =\n        match member_expr.pexp_desc with\n        | Pexp_ident lident ->\n          print_comments\n            (print_longident lident.txt)\n            cmt_tbl member_expr.pexp_loc\n        | _ -> print_expression_with_comments ~state member_expr cmt_tbl\n      in\n      Doc.concat [Doc.text \"\\\"\"; member_doc; Doc.text \"\\\"\"]\n    in\n    Doc.group\n      (Doc.concat\n         [\n           print_attributes ~state expr.pexp_attributes cmt_tbl;\n           parent_doc;\n           Doc.lbracket;\n           member;\n           Doc.rbracket;\n         ])\n  | Pexp_apply\n      ( {pexp_desc = Pexp_ident {txt = Longident.Lident \"#=\"}},\n        [(Nolabel, lhs); (Nolabel, rhs)] ) -> (\n    let rhs_doc =\n      let doc = print_expression_with_comments ~state rhs cmt_tbl in\n      match Parens.expr rhs with\n      | Parens.Parenthesized -> add_parens doc\n      | Braced braces -> print_braces doc rhs braces\n      | Nothing -> doc\n    in\n    (* TODO: unify indentation of \"=\" *)\n    let should_indent =\n      (not (ParsetreeViewer.is_braced_expr rhs))\n      && ParsetreeViewer.is_binary_expression rhs\n    in\n    let doc =\n      Doc.group\n        (Doc.concat\n           [\n             print_expression_with_comments ~state lhs cmt_tbl;\n             Doc.text \" =\";\n             (if should_indent then\n                Doc.group (Doc.indent (Doc.concat [Doc.line; rhs_doc]))\n              else Doc.concat [Doc.space; rhs_doc]);\n           ])\n    in\n    match expr.pexp_attributes with\n    | [] -> doc\n    | attrs ->\n      Doc.group (Doc.concat [print_attributes ~state attrs cmt_tbl; doc]))\n  | Pexp_apply\n      ( {pexp_desc = Pexp_ident {txt = Longident.Ldot (Lident \"Array\", \"get\")}},\n        [(Nolabel, parent_expr); (Nolabel, member_expr)] )\n    when not (ParsetreeViewer.is_rewritten_underscore_apply_sugar parent_expr)\n    ->\n    (* Don't print the Array.get(_, 0) sugar a.k.a. (__x) => Array.get(__x, 0) as _[0] *)\n    let member =\n      let member_doc =\n        let doc = print_expression_with_comments ~state member_expr cmt_tbl in\n        match Parens.expr member_expr with\n        | Parens.Parenthesized -> add_parens doc\n        | Braced braces -> print_braces doc member_expr braces\n        | Nothing -> doc\n      in\n      let should_inline =\n        match member_expr.pexp_desc with\n        | Pexp_constant _ | Pexp_ident _ -> true\n        | _ -> false\n      in\n      if should_inline then member_doc\n      else\n        Doc.concat\n          [Doc.indent (Doc.concat [Doc.soft_line; member_doc]); Doc.soft_line]\n    in\n    let parent_doc =\n      let doc = print_expression_with_comments ~state parent_expr cmt_tbl in\n      match Parens.unary_expr_operand parent_expr with\n      | Parens.Parenthesized -> add_parens doc\n      | Braced braces -> print_braces doc parent_expr braces\n      | Nothing -> doc\n    in\n    Doc.group\n      (Doc.concat\n         [\n           print_attributes ~state expr.pexp_attributes cmt_tbl;\n           parent_doc;\n           Doc.lbracket;\n           member;\n           Doc.rbracket;\n         ])\n  | Pexp_apply\n      ( {pexp_desc = Pexp_ident {txt = Longident.Ldot (Lident \"Array\", \"set\")}},\n        [(Nolabel, parent_expr); (Nolabel, member_expr); (Nolabel, target_expr)]\n      ) ->\n    let member =\n      let member_doc =\n        let doc = print_expression_with_comments ~state member_expr cmt_tbl in\n        match Parens.expr member_expr with\n        | Parens.Parenthesized -> add_parens doc\n        | Braced braces -> print_braces doc member_expr braces\n        | Nothing -> doc\n      in\n      let should_inline =\n        match member_expr.pexp_desc with\n        | Pexp_constant _ | Pexp_ident _ -> true\n        | _ -> false\n      in\n      if should_inline then member_doc\n      else\n        Doc.concat\n          [Doc.indent (Doc.concat [Doc.soft_line; member_doc]); Doc.soft_line]\n    in\n    let should_indent_target_expr =\n      if ParsetreeViewer.is_braced_expr target_expr then false\n      else\n        ParsetreeViewer.is_binary_expression target_expr\n        ||\n        match target_expr with\n        | {\n         pexp_attributes = [({Location.txt = \"res.ternary\"}, _)];\n         pexp_desc = Pexp_ifthenelse (if_expr, _, _);\n        } ->\n          ParsetreeViewer.is_binary_expression if_expr\n          || ParsetreeViewer.has_attributes if_expr.pexp_attributes\n        | {pexp_desc = Pexp_newtype _} -> false\n        | e ->\n          ParsetreeViewer.has_attributes e.pexp_attributes\n          || ParsetreeViewer.is_array_access e\n    in\n    let target_expr =\n      let doc = print_expression_with_comments ~state target_expr cmt_tbl in\n      match Parens.expr target_expr with\n      | Parens.Parenthesized -> add_parens doc\n      | Braced braces -> print_braces doc target_expr braces\n      | Nothing -> doc\n    in\n    let parent_doc =\n      let doc = print_expression_with_comments ~state parent_expr cmt_tbl in\n      match Parens.unary_expr_operand parent_expr with\n      | Parens.Parenthesized -> add_parens doc\n      | Braced braces -> print_braces doc parent_expr braces\n      | Nothing -> doc\n    in\n    Doc.group\n      (Doc.concat\n         [\n           print_attributes ~state expr.pexp_attributes cmt_tbl;\n           parent_doc;\n           Doc.lbracket;\n           member;\n           Doc.rbracket;\n           Doc.text \" =\";\n           (if should_indent_target_expr then\n              Doc.indent (Doc.concat [Doc.line; target_expr])\n            else Doc.concat [Doc.space; target_expr]);\n         ])\n  (* TODO: cleanup, are those branches even remotely performant? *)\n  | Pexp_apply ({pexp_desc = Pexp_ident lident}, args)\n    when ParsetreeViewer.is_jsx_expression expr ->\n    print_jsx_expression ~state lident args cmt_tbl\n  | Pexp_apply (call_expr, args) ->\n    let args =\n      List.map\n        (fun (lbl, arg) -> (lbl, ParsetreeViewer.rewrite_underscore_apply arg))\n        args\n    in\n    let uncurried, attrs =\n      ParsetreeViewer.process_uncurried_app_attribute expr.pexp_attributes\n    in\n    let partial, attrs = ParsetreeViewer.process_partial_app_attribute attrs in\n    let args =\n      if partial then\n        let dummy = Ast_helper.Exp.constant (Ast_helper.Const.int 0) in\n        args @ [(Asttypes.Labelled \"...\", dummy)]\n      else args\n    in\n    let dotted =\n      state.uncurried_config |> Res_uncurried.get_dotted ~uncurried\n    in\n    let call_expr_doc =\n      let doc = print_expression_with_comments ~state call_expr cmt_tbl in\n      match Parens.call_expr call_expr with\n      | Parens.Parenthesized -> add_parens doc\n      | Braced braces -> print_braces doc call_expr braces\n      | Nothing -> doc\n    in\n    if ParsetreeViewer.requires_special_callback_printing_first_arg args then\n      let args_doc =\n        print_arguments_with_callback_in_first_position ~dotted ~state args\n          cmt_tbl\n      in\n      Doc.concat\n        [print_attributes ~state attrs cmt_tbl; call_expr_doc; args_doc]\n    else if ParsetreeViewer.requires_special_callback_printing_last_arg args\n    then\n      let args_doc =\n        print_arguments_with_callback_in_last_position ~state ~dotted args\n          cmt_tbl\n      in\n      (*\n       * Fixes the following layout (the `[` and `]` should break):\n       *   [fn(x => {\n       *     let _ = x\n       *   }), fn(y => {\n       *     let _ = y\n       *   }), fn(z => {\n       *     let _ = z\n       *   })]\n       * See `Doc.willBreak documentation in interface file for more context.\n       * Context:\n       *  https://github.com/rescript-lang/syntax/issues/111\n       *  https://github.com/rescript-lang/syntax/issues/166\n       *)\n      let maybe_break_parent =\n        if Doc.will_break args_doc then Doc.break_parent else Doc.nil\n      in\n      Doc.concat\n        [\n          maybe_break_parent;\n          print_attributes ~state attrs cmt_tbl;\n          call_expr_doc;\n          args_doc;\n        ]\n    else\n      let args_doc = print_arguments ~state ~dotted ~partial args cmt_tbl in\n      Doc.concat\n        [print_attributes ~state attrs cmt_tbl; call_expr_doc; args_doc]\n  | _ -> assert false\n\nand print_jsx_expression ~state lident args cmt_tbl =\n  let name = print_jsx_name lident in\n  let formatted_props, children = print_jsx_props ~state args cmt_tbl in\n  (* <div className=\"test\" /> *)\n  let has_children =\n    match children with\n    | Some\n        {\n          Parsetree.pexp_desc =\n            Pexp_construct ({txt = Longident.Lident \"[]\"}, None);\n        } ->\n      false\n    | None -> false\n    | _ -> true\n  in\n  let is_self_closing =\n    match children with\n    | Some\n        {\n          Parsetree.pexp_desc =\n            Pexp_construct ({txt = Longident.Lident \"[]\"}, None);\n          pexp_loc = loc;\n        } ->\n      not (has_comments_inside cmt_tbl loc)\n    | _ -> false\n  in\n  let print_children children =\n    let line_sep =\n      match children with\n      | Some expr ->\n        if has_nested_jsx_or_more_than_one_child expr then Doc.hard_line\n        else Doc.line\n      | None -> Doc.line\n    in\n    Doc.concat\n      [\n        Doc.indent\n          (Doc.concat\n             [\n               Doc.line;\n               (match children with\n               | Some children_expression ->\n                 print_jsx_children ~state children_expression ~sep:line_sep\n                   cmt_tbl\n               | None -> Doc.nil);\n             ]);\n        line_sep;\n      ]\n  in\n  Doc.group\n    (Doc.concat\n       [\n         Doc.group\n           (Doc.concat\n              [\n                print_comments\n                  (Doc.concat [Doc.less_than; name])\n                  cmt_tbl lident.Asttypes.loc;\n                formatted_props;\n                (match children with\n                | Some\n                    {\n                      Parsetree.pexp_desc =\n                        Pexp_construct ({txt = Longident.Lident \"[]\"}, None);\n                    }\n                  when is_self_closing ->\n                  Doc.text \"/>\"\n                | _ ->\n                  (* if tag A has trailing comments then put > on the next line\n                     <A\n                     // comments\n                     >\n                     </A>\n                  *)\n                  if has_trailing_comments cmt_tbl lident.Asttypes.loc then\n                    Doc.concat [Doc.soft_line; Doc.greater_than]\n                  else Doc.greater_than);\n              ]);\n         (if is_self_closing then Doc.nil\n          else\n            Doc.concat\n              [\n                (if has_children then print_children children\n                 else\n                   match children with\n                   | Some\n                       {\n                         Parsetree.pexp_desc =\n                           Pexp_construct ({txt = Longident.Lident \"[]\"}, None);\n                         pexp_loc = loc;\n                       } ->\n                     print_comments_inside cmt_tbl loc\n                   | _ -> Doc.nil);\n                Doc.text \"</\";\n                name;\n                Doc.greater_than;\n              ]);\n       ])\n\nand print_jsx_fragment ~state expr cmt_tbl =\n  let opening = Doc.text \"<>\" in\n  let closing = Doc.text \"</>\" in\n  let line_sep =\n    if has_nested_jsx_or_more_than_one_child expr then Doc.hard_line\n    else Doc.line\n  in\n  Doc.group\n    (Doc.concat\n       [\n         opening;\n         (match expr.pexp_desc with\n         | Pexp_construct ({txt = Longident.Lident \"[]\"}, None) -> Doc.nil\n         | _ ->\n           Doc.indent\n             (Doc.concat\n                [Doc.line; print_jsx_children ~state expr ~sep:line_sep cmt_tbl]));\n         line_sep;\n         closing;\n       ])\n\nand print_jsx_children ~state (children_expr : Parsetree.expression) ~sep\n    cmt_tbl =\n  match children_expr.pexp_desc with\n  | Pexp_construct ({txt = Longident.Lident \"::\"}, _) ->\n    let children, _ = ParsetreeViewer.collect_list_expressions children_expr in\n    Doc.group\n      (Doc.join ~sep\n         (List.map\n            (fun (expr : Parsetree.expression) ->\n              let leading_line_comment_present =\n                has_leading_line_comment cmt_tbl expr.pexp_loc\n              in\n              let expr_doc =\n                print_expression_with_comments ~state expr cmt_tbl\n              in\n              let add_parens_or_braces expr_doc =\n                (* {(20: int)} make sure that we also protect the expression inside *)\n                let inner_doc =\n                  if Parens.braced_expr expr then add_parens expr_doc\n                  else expr_doc\n                in\n                if leading_line_comment_present then add_braces inner_doc\n                else Doc.concat [Doc.lbrace; inner_doc; Doc.rbrace]\n              in\n              match Parens.jsx_child_expr expr with\n              | Nothing -> expr_doc\n              | Parenthesized -> add_parens_or_braces expr_doc\n              | Braced braces_loc ->\n                print_comments\n                  (add_parens_or_braces expr_doc)\n                  cmt_tbl braces_loc)\n            children))\n  | _ ->\n    let leading_line_comment_present =\n      has_leading_line_comment cmt_tbl children_expr.pexp_loc\n    in\n    let expr_doc =\n      print_expression_with_comments ~state children_expr cmt_tbl\n    in\n    Doc.concat\n      [\n        Doc.dotdotdot;\n        (match Parens.jsx_child_expr children_expr with\n        | Parenthesized | Braced _ ->\n          let inner_doc =\n            if Parens.braced_expr children_expr then add_parens expr_doc\n            else expr_doc\n          in\n          if leading_line_comment_present then add_braces inner_doc\n          else Doc.concat [Doc.lbrace; inner_doc; Doc.rbrace]\n        | Nothing -> expr_doc);\n      ]\n\nand print_jsx_props ~state args cmt_tbl : Doc.t * Parsetree.expression option =\n  (* This function was introduced because we have different formatting behavior for self-closing tags and other tags\n     we always put /> on a new line for self-closing tag when it breaks\n     <A\n      a=\"\"\n     />\n\n     <A\n     a=\"\">\n      <B />\n     </A>\n     we should remove this function once the format is unified\n  *)\n  let is_self_closing children =\n    match children with\n    | {\n     Parsetree.pexp_desc = Pexp_construct ({txt = Longident.Lident \"[]\"}, None);\n     pexp_loc = loc;\n    } ->\n      not (has_comments_inside cmt_tbl loc)\n    | _ -> false\n  in\n  let rec loop props args =\n    match args with\n    | [] -> (Doc.nil, None)\n    | [\n     (Asttypes.Labelled \"children\", children);\n     ( Asttypes.Nolabel,\n       {\n         Parsetree.pexp_desc =\n           Pexp_construct ({txt = Longident.Lident \"()\"}, None);\n       } );\n    ] ->\n      let doc = if is_self_closing children then Doc.line else Doc.nil in\n      (doc, Some children)\n    | ((_, expr) as last_prop)\n      :: [\n           (Asttypes.Labelled \"children\", children);\n           ( Asttypes.Nolabel,\n             {\n               Parsetree.pexp_desc =\n                 Pexp_construct ({txt = Longident.Lident \"()\"}, None);\n             } );\n         ] ->\n      let loc =\n        match expr.Parsetree.pexp_attributes with\n        | ({Location.txt = \"res.namedArgLoc\"; loc}, _) :: _attrs ->\n          {loc with loc_end = expr.pexp_loc.loc_end}\n        | _ -> expr.pexp_loc\n      in\n      let trailing_comments_present = has_trailing_comments cmt_tbl loc in\n      let prop_doc = print_jsx_prop ~state last_prop cmt_tbl in\n      let formatted_props =\n        Doc.concat\n          [\n            Doc.indent\n              (Doc.concat\n                 [\n                   Doc.line;\n                   Doc.group\n                     (Doc.join ~sep:Doc.line (prop_doc :: props |> List.rev));\n                 ]);\n            (* print > on new line if the last prop has trailing comments *)\n            (match (is_self_closing children, trailing_comments_present) with\n            (* we always put /> on a new line when a self-closing tag breaks *)\n            | true, _ -> Doc.line\n            | false, true -> Doc.soft_line\n            | false, false -> Doc.nil);\n          ]\n      in\n      (formatted_props, Some children)\n    | arg :: args ->\n      let prop_doc = print_jsx_prop ~state arg cmt_tbl in\n      loop (prop_doc :: props) args\n  in\n  loop [] args\n\nand print_jsx_prop ~state arg cmt_tbl =\n  match arg with\n  | ( ((Asttypes.Labelled lbl_txt | Optional lbl_txt) as lbl),\n      {\n        Parsetree.pexp_attributes =\n          [({Location.txt = \"res.namedArgLoc\"; loc = arg_loc}, _)];\n        pexp_desc = Pexp_ident {txt = Longident.Lident ident};\n      } )\n    when lbl_txt = ident (* jsx punning *) -> (\n    match lbl with\n    | Nolabel -> Doc.nil\n    | Labelled _lbl -> print_comments (print_ident_like ident) cmt_tbl arg_loc\n    | Optional _lbl ->\n      let doc = Doc.concat [Doc.question; print_ident_like ident] in\n      print_comments doc cmt_tbl arg_loc)\n  | ( ((Asttypes.Labelled lbl_txt | Optional lbl_txt) as lbl),\n      {\n        Parsetree.pexp_attributes = [];\n        pexp_desc = Pexp_ident {txt = Longident.Lident ident};\n      } )\n    when lbl_txt = ident (* jsx punning when printing from Reason *) -> (\n    match lbl with\n    | Nolabel -> Doc.nil\n    | Labelled _lbl -> print_ident_like ident\n    | Optional _lbl -> Doc.concat [Doc.question; print_ident_like ident])\n  | Asttypes.Labelled \"_spreadProps\", expr ->\n    let doc = print_expression_with_comments ~state expr cmt_tbl in\n    Doc.concat [Doc.lbrace; Doc.dotdotdot; doc; Doc.rbrace]\n  | lbl, expr ->\n    let arg_loc, expr =\n      match expr.pexp_attributes with\n      | ({Location.txt = \"res.namedArgLoc\"; loc}, _) :: attrs ->\n        (loc, {expr with pexp_attributes = attrs})\n      | _ -> (Location.none, expr)\n    in\n    let lbl_doc =\n      match lbl with\n      | Asttypes.Labelled lbl ->\n        let lbl = print_comments (print_ident_like lbl) cmt_tbl arg_loc in\n        Doc.concat [lbl; Doc.equal]\n      | Asttypes.Optional lbl ->\n        let lbl = print_comments (print_ident_like lbl) cmt_tbl arg_loc in\n        Doc.concat [lbl; Doc.equal; Doc.question]\n      | Nolabel -> Doc.nil\n    in\n    let expr_doc =\n      let leading_line_comment_present =\n        has_leading_line_comment cmt_tbl expr.pexp_loc\n      in\n      let doc = print_expression_with_comments ~state expr cmt_tbl in\n      match Parens.jsx_prop_expr expr with\n      | Parenthesized | Braced _ ->\n        (* {(20: int)} make sure that we also protect the expression inside *)\n        let inner_doc =\n          if Parens.braced_expr expr then add_parens doc else doc\n        in\n        if leading_line_comment_present then add_braces inner_doc\n        else Doc.concat [Doc.lbrace; inner_doc; Doc.rbrace]\n      | _ -> doc\n    in\n    let full_loc = {arg_loc with loc_end = expr.pexp_loc.loc_end} in\n    print_comments (Doc.concat [lbl_doc; expr_doc]) cmt_tbl full_loc\n\n(* div -> div.\n * Navabar.createElement -> Navbar\n * Staff.Users.createElement -> Staff.Users *)\nand print_jsx_name {txt = lident} =\n  let print_ident = print_ident_like ~allow_uident:true ~allow_hyphen:true in\n  let rec flatten acc lident =\n    match lident with\n    | Longident.Lident txt -> print_ident txt :: acc\n    | Ldot (lident, \"createElement\") -> flatten acc lident\n    | Ldot (lident, txt) -> flatten (print_ident txt :: acc) lident\n    | _ -> acc\n  in\n  match lident with\n  | Longident.Lident txt -> print_ident txt\n  | _ as lident ->\n    let segments = flatten [] lident in\n    Doc.join ~sep:Doc.dot segments\n\nand print_arguments_with_callback_in_first_position ~dotted ~state args cmt_tbl\n    =\n  (* Because the same subtree gets printed twice, we need to copy the cmtTbl.\n   * consumed comments need to be marked not-consumed and reprinted…\n   * Cheng's different comment algorithm will solve this. *)\n  let state = State.next_custom_layout state in\n  let cmt_tbl_copy = CommentTable.copy cmt_tbl in\n  let callback, printed_args =\n    match args with\n    | (lbl, expr) :: args ->\n      let lbl_doc =\n        match lbl with\n        | Asttypes.Nolabel -> Doc.nil\n        | Asttypes.Labelled txt ->\n          Doc.concat [Doc.tilde; print_ident_like txt; Doc.equal]\n        | Asttypes.Optional txt ->\n          Doc.concat [Doc.tilde; print_ident_like txt; Doc.equal; Doc.question]\n      in\n      let callback =\n        Doc.concat\n          [\n            lbl_doc;\n            print_pexp_fun ~state ~in_callback:FitsOnOneLine expr cmt_tbl;\n          ]\n      in\n      let callback = lazy (print_comments callback cmt_tbl expr.pexp_loc) in\n      let printed_args =\n        lazy\n          (Doc.join\n             ~sep:(Doc.concat [Doc.comma; Doc.line])\n             (List.map (fun arg -> print_argument ~state arg cmt_tbl) args))\n      in\n      (callback, printed_args)\n    | _ -> assert false\n  in\n\n  (* Thing.map((arg1, arg2) => MyModuleBlah.toList(argument), foo) *)\n  (* Thing.map((arg1, arg2) => {\n   *   MyModuleBlah.toList(argument)\n   * }, longArgumet, veryLooooongArgument)\n   *)\n  let fits_on_one_line =\n    lazy\n      (Doc.concat\n         [\n           (if dotted then Doc.text \"(. \" else Doc.lparen);\n           Lazy.force callback;\n           Doc.comma;\n           Doc.line;\n           Lazy.force printed_args;\n           Doc.rparen;\n         ])\n  in\n\n  (* Thing.map(\n   *   (param1, parm2) => doStuff(param1, parm2),\n   *   arg1,\n   *   arg2,\n   *   arg3,\n   * )\n   *)\n  let break_all_args =\n    lazy (print_arguments ~state ~dotted args cmt_tbl_copy)\n  in\n\n  (* Sometimes one of the non-callback arguments will break.\n   * There might be a single line comment in there, or a multiline string etc.\n   * showDialog(\n   *   ~onConfirm={() => ()},\n   *   `\n   *   Do you really want to leave this workspace?\n   *   Some more text with detailed explanations...\n   *   `,\n   *   ~danger=true,\n   *   // comment   --> here a single line comment\n   *   ~confirmText=\"Yes, I am sure!\",\n   *  )\n   * In this case, we always want the arguments broken over multiple lines,\n   * like a normal function call.\n   *)\n  if state |> State.should_break_callback then Lazy.force break_all_args\n  else if Doc.will_break (Lazy.force printed_args) then\n    Lazy.force break_all_args\n  else\n    Doc.custom_layout [Lazy.force fits_on_one_line; Lazy.force break_all_args]\n\nand print_arguments_with_callback_in_last_position ~state ~dotted args cmt_tbl =\n  (* Because the same subtree gets printed twice, we need to copy the cmtTbl.\n   * consumed comments need to be marked not-consumed and reprinted…\n   * Cheng's different comment algorithm will solve this. *)\n  let state = state |> State.next_custom_layout in\n  let cmt_tbl_copy = CommentTable.copy cmt_tbl in\n  let cmt_tbl_copy2 = CommentTable.copy cmt_tbl in\n  let rec loop acc args =\n    match args with\n    | [] -> (lazy Doc.nil, lazy Doc.nil, lazy Doc.nil)\n    | [(lbl, expr)] ->\n      let lbl_doc =\n        match lbl with\n        | Asttypes.Nolabel -> Doc.nil\n        | Asttypes.Labelled txt ->\n          Doc.concat [Doc.tilde; print_ident_like txt; Doc.equal]\n        | Asttypes.Optional txt ->\n          Doc.concat [Doc.tilde; print_ident_like txt; Doc.equal; Doc.question]\n      in\n      let callback_fits_on_one_line =\n        lazy\n          (let pexp_fun_doc =\n             print_pexp_fun ~state ~in_callback:FitsOnOneLine expr cmt_tbl\n           in\n           let doc = Doc.concat [lbl_doc; pexp_fun_doc] in\n           print_comments doc cmt_tbl expr.pexp_loc)\n      in\n      let callback_arguments_fits_on_one_line =\n        lazy\n          (let pexp_fun_doc =\n             print_pexp_fun ~state ~in_callback:ArgumentsFitOnOneLine expr\n               cmt_tbl_copy\n           in\n           let doc = Doc.concat [lbl_doc; pexp_fun_doc] in\n           print_comments doc cmt_tbl_copy expr.pexp_loc)\n      in\n      ( lazy (Doc.concat (List.rev acc)),\n        callback_fits_on_one_line,\n        callback_arguments_fits_on_one_line )\n    | arg :: args ->\n      let arg_doc = print_argument ~state arg cmt_tbl in\n      loop (Doc.line :: Doc.comma :: arg_doc :: acc) args\n  in\n  let printed_args, callback, callback2 = loop [] args in\n\n  (* Thing.map(foo, (arg1, arg2) => MyModuleBlah.toList(argument)) *)\n  let fits_on_one_line =\n    lazy\n      (Doc.concat\n         [\n           (if dotted then Doc.text \"(.\" else Doc.lparen);\n           Lazy.force printed_args;\n           Lazy.force callback;\n           Doc.rparen;\n         ])\n  in\n\n  (* Thing.map(longArgumet, veryLooooongArgument, (arg1, arg2) =>\n   *   MyModuleBlah.toList(argument)\n   * )\n   *)\n  let arugments_fit_on_one_line =\n    lazy\n      (Doc.concat\n         [\n           (if dotted then Doc.text \"(.\" else Doc.lparen);\n           Lazy.force printed_args;\n           Doc.breakable_group ~force_break:true (Lazy.force callback2);\n           Doc.rparen;\n         ])\n  in\n\n  (* Thing.map(\n   *   arg1,\n   *   arg2,\n   *   arg3,\n   *   (param1, parm2) => doStuff(param1, parm2)\n   * )\n   *)\n  let break_all_args =\n    lazy (print_arguments ~state ~dotted args cmt_tbl_copy2)\n  in\n\n  (* Sometimes one of the non-callback arguments will break.\n   * There might be a single line comment in there, or a multiline string etc.\n   * showDialog(\n   *   `\n   *   Do you really want to leave this workspace?\n   *   Some more text with detailed explanations...\n   *   `,\n   *   ~danger=true,\n   *   // comment   --> here a single line comment\n   *   ~confirmText=\"Yes, I am sure!\",\n   *   ~onConfirm={() => ()},\n   *  )\n   * In this case, we always want the arguments broken over multiple lines,\n   * like a normal function call.\n   *)\n  if state |> State.should_break_callback then Lazy.force break_all_args\n  else if Doc.will_break (Lazy.force printed_args) then\n    Lazy.force break_all_args\n  else\n    Doc.custom_layout\n      [\n        Lazy.force fits_on_one_line;\n        Lazy.force arugments_fit_on_one_line;\n        Lazy.force break_all_args;\n      ]\n\nand print_arguments ~state ~dotted ?(partial = false)\n    (args : (Asttypes.arg_label * Parsetree.expression) list) cmt_tbl =\n  match args with\n  | [\n   ( Nolabel,\n     {\n       pexp_desc = Pexp_construct ({txt = Longident.Lident \"()\"}, _);\n       pexp_loc = loc;\n     } );\n  ] -> (\n    (* See \"parseCallExpr\", ghost unit expression is used the implement\n     * arity zero vs arity one syntax.\n     * Related: https://github.com/rescript-lang/syntax/issues/138 *)\n    match (dotted, loc.loc_ghost) with\n    | true, true -> Doc.text \"(.)\" (* arity zero *)\n    | true, false -> Doc.text \"(. ())\" (* arity one *)\n    | _ -> Doc.text \"()\")\n  | [(Nolabel, arg)] when ParsetreeViewer.is_huggable_expression arg ->\n    let arg_doc =\n      let doc = print_expression_with_comments ~state arg cmt_tbl in\n      match Parens.expr arg with\n      | Parens.Parenthesized -> add_parens doc\n      | Braced braces -> print_braces doc arg braces\n      | Nothing -> doc\n    in\n    Doc.concat\n      [(if dotted then Doc.text \"(. \" else Doc.lparen); arg_doc; Doc.rparen]\n  | args ->\n    Doc.group\n      (Doc.concat\n         [\n           (if dotted then Doc.text \"(.\" else Doc.lparen);\n           Doc.indent\n             (Doc.concat\n                [\n                  (if dotted then Doc.line else Doc.soft_line);\n                  Doc.join\n                    ~sep:(Doc.concat [Doc.comma; Doc.line])\n                    (List.map\n                       (fun arg -> print_argument ~state arg cmt_tbl)\n                       args);\n                ]);\n           (if partial then Doc.nil else Doc.trailing_comma);\n           Doc.soft_line;\n           Doc.rparen;\n         ])\n\n(*\n * argument ::=\n *   | _                            (* syntax sugar *)\n *   | expr\n *   | expr : type\n *   | ~ label-name\n *   | ~ label-name\n *   | ~ label-name ?\n *   | ~ label-name =   expr\n *   | ~ label-name =   _           (* syntax sugar *)\n *   | ~ label-name =   expr : type\n *   | ~ label-name = ? expr\n *   | ~ label-name = ? _           (* syntax sugar *)\n *   | ~ label-name = ? expr : type *)\nand print_argument ~state (arg_lbl, arg) cmt_tbl =\n  match (arg_lbl, arg) with\n  (* ~a (punned)*)\n  | ( Labelled lbl,\n      ({\n         pexp_desc = Pexp_ident {txt = Longident.Lident name};\n         pexp_attributes = [] | [({Location.txt = \"res.namedArgLoc\"}, _)];\n       } as arg_expr) )\n    when lbl = name && not (ParsetreeViewer.is_braced_expr arg_expr) ->\n    let loc =\n      match arg.pexp_attributes with\n      | ({Location.txt = \"res.namedArgLoc\"; loc}, _) :: _ -> loc\n      | _ -> arg.pexp_loc\n    in\n    let doc = Doc.concat [Doc.tilde; print_ident_like lbl] in\n    print_comments doc cmt_tbl loc\n  (* ~a: int (punned)*)\n  | ( Labelled lbl,\n      {\n        pexp_desc =\n          Pexp_constraint\n            ( ({pexp_desc = Pexp_ident {txt = Longident.Lident name}} as arg_expr),\n              typ );\n        pexp_loc;\n        pexp_attributes =\n          ([] | [({Location.txt = \"res.namedArgLoc\"}, _)]) as attrs;\n      } )\n    when lbl = name && not (ParsetreeViewer.is_braced_expr arg_expr) ->\n    let loc =\n      match attrs with\n      | ({Location.txt = \"res.namedArgLoc\"; loc}, _) :: _ ->\n        {loc with loc_end = pexp_loc.loc_end}\n      | _ -> arg.pexp_loc\n    in\n    let doc =\n      Doc.concat\n        [\n          Doc.tilde;\n          print_ident_like lbl;\n          Doc.text \": \";\n          print_typ_expr ~state typ cmt_tbl;\n        ]\n    in\n    print_comments doc cmt_tbl loc\n  (* ~a? (optional lbl punned)*)\n  | ( Optional lbl,\n      {\n        pexp_desc = Pexp_ident {txt = Longident.Lident name};\n        pexp_attributes = [] | [({Location.txt = \"res.namedArgLoc\"}, _)];\n      } )\n    when lbl = name ->\n    let loc =\n      match arg.pexp_attributes with\n      | ({Location.txt = \"res.namedArgLoc\"; loc}, _) :: _ -> loc\n      | _ -> arg.pexp_loc\n    in\n    let doc = Doc.concat [Doc.tilde; print_ident_like lbl; Doc.question] in\n    print_comments doc cmt_tbl loc\n  | _lbl, expr ->\n    let arg_loc, expr =\n      match expr.pexp_attributes with\n      | ({Location.txt = \"res.namedArgLoc\"; loc}, _) :: attrs ->\n        (loc, {expr with pexp_attributes = attrs})\n      | _ -> (expr.pexp_loc, expr)\n    in\n    let printed_lbl, dotdotdot =\n      match arg_lbl with\n      | Nolabel -> (Doc.nil, false)\n      | Labelled \"...\" ->\n        let doc = Doc.text \"...\" in\n        (print_comments doc cmt_tbl arg_loc, true)\n      | Labelled lbl ->\n        let doc = Doc.concat [Doc.tilde; print_ident_like lbl; Doc.equal] in\n        (print_comments doc cmt_tbl arg_loc, false)\n      | Optional lbl ->\n        let doc =\n          Doc.concat [Doc.tilde; print_ident_like lbl; Doc.equal; Doc.question]\n        in\n        (print_comments doc cmt_tbl arg_loc, false)\n    in\n    let printed_expr =\n      let doc = print_expression_with_comments ~state expr cmt_tbl in\n      match Parens.expr expr with\n      | Parenthesized -> add_parens doc\n      | Braced braces -> print_braces doc expr braces\n      | Nothing -> doc\n    in\n    let loc = {arg_loc with loc_end = expr.pexp_loc.loc_end} in\n    let doc =\n      if dotdotdot then printed_lbl else Doc.concat [printed_lbl; printed_expr]\n    in\n    print_comments doc cmt_tbl loc\n\nand print_cases ~state (cases : Parsetree.case list) cmt_tbl =\n  Doc.breakable_group ~force_break:true\n    (Doc.concat\n       [\n         Doc.lbrace;\n         Doc.concat\n           [\n             Doc.line;\n             print_list\n               ~get_loc:(fun n ->\n                 {\n                   n.Parsetree.pc_lhs.ppat_loc with\n                   loc_end =\n                     (match ParsetreeViewer.process_braces_attr n.pc_rhs with\n                     | None, _ -> n.pc_rhs.pexp_loc.loc_end\n                     | Some ({loc}, _), _ -> loc.Location.loc_end);\n                 })\n               ~print:(print_case ~state) ~nodes:cases cmt_tbl;\n           ];\n         Doc.line;\n         Doc.rbrace;\n       ])\n\nand print_case ~state (case : Parsetree.case) cmt_tbl =\n  let rhs =\n    match case.pc_rhs.pexp_desc with\n    | Pexp_let _ | Pexp_letmodule _ | Pexp_letexception _ | Pexp_open _\n    | Pexp_sequence _ ->\n      print_expression_block ~state\n        ~braces:(ParsetreeViewer.is_braced_expr case.pc_rhs)\n        case.pc_rhs cmt_tbl\n    | _ -> (\n      let doc = print_expression_with_comments ~state case.pc_rhs cmt_tbl in\n      match Parens.expr case.pc_rhs with\n      | Parenthesized -> add_parens doc\n      | _ -> doc)\n  in\n\n  let guard =\n    match case.pc_guard with\n    | None -> Doc.nil\n    | Some expr ->\n      Doc.group\n        (Doc.concat\n           [\n             Doc.line;\n             Doc.text \"if \";\n             print_expression_with_comments ~state expr cmt_tbl;\n           ])\n  in\n  let should_inline_rhs =\n    match case.pc_rhs.pexp_desc with\n    | Pexp_construct ({txt = Longident.Lident (\"()\" | \"true\" | \"false\")}, _)\n    | Pexp_constant _ | Pexp_ident _ ->\n      true\n    | _ when ParsetreeViewer.is_huggable_rhs case.pc_rhs -> true\n    | _ -> false\n  in\n  let should_indent_pattern =\n    match case.pc_lhs.ppat_desc with\n    | Ppat_or _ -> false\n    | _ -> true\n  in\n  let pattern_doc =\n    let doc = print_pattern ~state case.pc_lhs cmt_tbl in\n    match case.pc_lhs.ppat_desc with\n    | Ppat_constraint _ -> add_parens doc\n    | _ -> doc\n  in\n  let content =\n    Doc.concat\n      [\n        (if should_indent_pattern then Doc.indent pattern_doc else pattern_doc);\n        Doc.indent guard;\n        Doc.text \" =>\";\n        Doc.indent\n          (Doc.concat\n             [(if should_inline_rhs then Doc.space else Doc.line); rhs]);\n      ]\n  in\n  Doc.group (Doc.concat [Doc.text \"| \"; content])\n\nand print_expr_fun_parameters ~state ~in_callback ~async ~uncurried\n    ~has_constraint parameters cmt_tbl =\n  let dotted = state.uncurried_config |> Res_uncurried.get_dotted ~uncurried in\n  match parameters with\n  (* let f = _ => () *)\n  | [\n   ParsetreeViewer.Parameter\n     {\n       attrs = [];\n       lbl = Asttypes.Nolabel;\n       default_expr = None;\n       pat = {Parsetree.ppat_desc = Ppat_any; ppat_loc};\n     };\n  ]\n    when not dotted ->\n    let any =\n      let doc = if has_constraint then Doc.text \"(_)\" else Doc.text \"_\" in\n      print_comments doc cmt_tbl ppat_loc\n    in\n    if async then add_async any else any\n  (* let f = a => () *)\n  | [\n   ParsetreeViewer.Parameter\n     {\n       attrs = [];\n       lbl = Asttypes.Nolabel;\n       default_expr = None;\n       pat =\n         {\n           Parsetree.ppat_desc = Ppat_var string_loc;\n           Parsetree.ppat_attributes = attrs;\n         };\n     };\n  ]\n    when not dotted ->\n    let txt_doc =\n      let var = print_ident_like string_loc.txt in\n      let var =\n        match attrs with\n        | [] -> if has_constraint then add_parens var else var\n        | attrs ->\n          let attrs = print_attributes ~state attrs cmt_tbl in\n          add_parens (Doc.concat [attrs; var])\n      in\n      if async then add_async var else var\n    in\n    print_comments txt_doc cmt_tbl string_loc.loc\n  (* let f = () => () *)\n  | [\n   ParsetreeViewer.Parameter\n     {\n       attrs = [];\n       lbl = Asttypes.Nolabel;\n       default_expr = None;\n       pat =\n         {ppat_desc = Ppat_construct ({txt = Longident.Lident \"()\"; loc}, None)};\n     };\n  ]\n    when not dotted ->\n    let doc =\n      let lparen_rparen = Doc.text \"()\" in\n      if async then add_async lparen_rparen else lparen_rparen\n    in\n    print_comments doc cmt_tbl loc\n  (* let f = (~greeting, ~from as hometown, ~x=?) => () *)\n  | parameters ->\n    let in_callback =\n      match in_callback with\n      | FitsOnOneLine -> true\n      | _ -> false\n    in\n    let maybe_async_lparen =\n      let lparen = if dotted then Doc.text \"(. \" else Doc.lparen in\n      if async then add_async lparen else lparen\n    in\n    let should_hug = ParsetreeViewer.parameters_should_hug parameters in\n    let printed_paramaters =\n      Doc.concat\n        [\n          (if should_hug || in_callback then Doc.nil else Doc.soft_line);\n          Doc.join\n            ~sep:(Doc.concat [Doc.comma; Doc.line])\n            (List.map\n               (fun p -> print_exp_fun_parameter ~state p cmt_tbl)\n               parameters);\n        ]\n    in\n    Doc.group\n      (Doc.concat\n         [\n           maybe_async_lparen;\n           (if should_hug || in_callback then printed_paramaters\n            else\n              Doc.concat\n                [\n                  Doc.indent printed_paramaters;\n                  Doc.trailing_comma;\n                  Doc.soft_line;\n                ]);\n           Doc.rparen;\n         ])\n\nand print_exp_fun_parameter ~state parameter cmt_tbl =\n  match parameter with\n  | ParsetreeViewer.NewTypes {attrs; locs = lbls} ->\n    Doc.group\n      (Doc.concat\n         [\n           print_attributes ~state attrs cmt_tbl;\n           Doc.text \"type \";\n           (* XX *)\n           Doc.join ~sep:Doc.space\n             (List.map\n                (fun lbl ->\n                  print_comments\n                    (print_ident_like lbl.Asttypes.txt)\n                    cmt_tbl lbl.Asttypes.loc)\n                lbls);\n         ])\n  | Parameter {attrs; lbl; default_expr; pat = pattern} ->\n    let has_bs, attrs = ParsetreeViewer.process_bs_attribute attrs in\n    let dotted = if has_bs then Doc.concat [Doc.dot; Doc.space] else Doc.nil in\n    let attrs = print_attributes ~state attrs cmt_tbl in\n    (* =defaultValue *)\n    let default_expr_doc =\n      match default_expr with\n      | Some expr ->\n        Doc.concat\n          [Doc.text \"=\"; print_expression_with_comments ~state expr cmt_tbl]\n      | None -> Doc.nil\n    in\n    (* ~from as hometown\n     * ~from                   ->  punning *)\n    let label_with_pattern =\n      match (lbl, pattern) with\n      | Asttypes.Nolabel, pattern -> print_pattern ~state pattern cmt_tbl\n      | ( (Asttypes.Labelled lbl | Optional lbl),\n          {ppat_desc = Ppat_var string_loc; ppat_attributes} )\n        when lbl = string_loc.txt ->\n        (* ~d *)\n        Doc.concat\n          [\n            print_attributes ~state ppat_attributes cmt_tbl;\n            Doc.text \"~\";\n            print_ident_like lbl;\n          ]\n      | ( (Asttypes.Labelled lbl | Optional lbl),\n          {\n            ppat_desc = Ppat_constraint ({ppat_desc = Ppat_var {txt}}, typ);\n            ppat_attributes;\n          } )\n        when lbl = txt ->\n        (* ~d: e *)\n        Doc.concat\n          [\n            print_attributes ~state ppat_attributes cmt_tbl;\n            Doc.text \"~\";\n            print_ident_like lbl;\n            Doc.text \": \";\n            print_typ_expr ~state typ cmt_tbl;\n          ]\n      | (Asttypes.Labelled lbl | Optional lbl), pattern ->\n        (* ~b as c *)\n        Doc.concat\n          [\n            Doc.text \"~\";\n            print_ident_like lbl;\n            Doc.text \" as \";\n            print_pattern ~state pattern cmt_tbl;\n          ]\n    in\n    let optional_label_suffix =\n      match (lbl, default_expr) with\n      | Asttypes.Optional _, None -> Doc.text \"=?\"\n      | _ -> Doc.nil\n    in\n    let doc =\n      Doc.group\n        (Doc.concat\n           [\n             dotted;\n             attrs;\n             label_with_pattern;\n             default_expr_doc;\n             optional_label_suffix;\n           ])\n    in\n    let cmt_loc =\n      match default_expr with\n      | None -> (\n        match pattern.ppat_attributes with\n        | ({Location.txt = \"res.namedArgLoc\"; loc}, _) :: _ ->\n          {loc with loc_end = pattern.ppat_loc.loc_end}\n        | _ -> pattern.ppat_loc)\n      | Some expr ->\n        let start_pos =\n          match pattern.ppat_attributes with\n          | ({Location.txt = \"res.namedArgLoc\"; loc}, _) :: _ -> loc.loc_start\n          | _ -> pattern.ppat_loc.loc_start\n        in\n        {\n          pattern.ppat_loc with\n          loc_start = start_pos;\n          loc_end = expr.pexp_loc.loc_end;\n        }\n    in\n    print_comments doc cmt_tbl cmt_loc\n\nand print_expression_block ~state ~braces expr cmt_tbl =\n  let rec collect_rows acc expr =\n    match expr.Parsetree.pexp_desc with\n    | Parsetree.Pexp_letmodule (mod_name, mod_expr, expr2) ->\n      let name =\n        let doc = Doc.text mod_name.txt in\n        print_comments doc cmt_tbl mod_name.loc\n      in\n      let name, mod_expr =\n        match mod_expr.pmod_desc with\n        | Pmod_constraint (mod_expr2, mod_type)\n          when not\n                 (ParsetreeViewer.has_await_attribute mod_expr.pmod_attributes)\n          ->\n          let name =\n            Doc.concat\n              [name; Doc.text \": \"; print_mod_type ~state mod_type cmt_tbl]\n          in\n          (name, mod_expr2)\n        | _ -> (name, mod_expr)\n      in\n      let let_module_doc =\n        Doc.concat\n          [\n            Doc.text \"module \";\n            name;\n            Doc.text \" = \";\n            print_mod_expr ~state mod_expr cmt_tbl;\n          ]\n      in\n      let loc = {expr.pexp_loc with loc_end = mod_expr.pmod_loc.loc_end} in\n      collect_rows ((loc, let_module_doc) :: acc) expr2\n    | Pexp_letexception (extension_constructor, expr2) ->\n      let loc =\n        let loc =\n          {expr.pexp_loc with loc_end = extension_constructor.pext_loc.loc_end}\n        in\n        match get_first_leading_comment cmt_tbl loc with\n        | None -> loc\n        | Some comment ->\n          let cmt_loc = Comment.loc comment in\n          {cmt_loc with loc_end = loc.loc_end}\n      in\n      let let_exception_doc =\n        print_exception_def ~state extension_constructor cmt_tbl\n      in\n      collect_rows ((loc, let_exception_doc) :: acc) expr2\n    | Pexp_open (override_flag, longident_loc, expr2) ->\n      let open_doc =\n        Doc.concat\n          [\n            Doc.text \"open\";\n            print_override_flag override_flag;\n            Doc.space;\n            print_longident_location longident_loc cmt_tbl;\n          ]\n      in\n      let loc = {expr.pexp_loc with loc_end = longident_loc.loc.loc_end} in\n      collect_rows ((loc, open_doc) :: acc) expr2\n    | Pexp_sequence (expr1, expr2) ->\n      let expr_doc =\n        let doc = print_expression ~state expr1 cmt_tbl in\n        match Parens.expr expr1 with\n        | Parens.Parenthesized -> add_parens doc\n        | Braced braces -> print_braces doc expr1 braces\n        | Nothing -> doc\n      in\n      let loc = expr1.pexp_loc in\n      collect_rows ((loc, expr_doc) :: acc) expr2\n    | Pexp_let (rec_flag, value_bindings, expr2) -> (\n      let loc =\n        let loc =\n          match (value_bindings, List.rev value_bindings) with\n          | vb :: _, last_vb :: _ ->\n            {vb.pvb_loc with loc_end = last_vb.pvb_loc.loc_end}\n          | _ -> Location.none\n        in\n        match get_first_leading_comment cmt_tbl loc with\n        | None -> loc\n        | Some comment ->\n          let cmt_loc = Comment.loc comment in\n          {cmt_loc with loc_end = loc.loc_end}\n      in\n      let rec_flag =\n        match rec_flag with\n        | Asttypes.Nonrecursive -> Doc.nil\n        | Asttypes.Recursive -> Doc.text \"rec \"\n      in\n      let let_doc =\n        print_value_bindings ~state ~rec_flag value_bindings cmt_tbl\n      in\n      (* let () = {\n       *   let () = foo()\n       *   ()\n       * }\n       * We don't need to print the () on the last line of the block\n       *)\n      match expr2.pexp_desc with\n      | Pexp_construct ({txt = Longident.Lident \"()\"}, _) ->\n        List.rev ((loc, let_doc) :: acc)\n      | _ -> collect_rows ((loc, let_doc) :: acc) expr2)\n    | _ ->\n      let expr_doc =\n        let doc = print_expression ~state expr cmt_tbl in\n        match Parens.expr expr with\n        | Parens.Parenthesized -> add_parens doc\n        | Braced braces -> print_braces doc expr braces\n        | Nothing -> doc\n      in\n      List.rev ((expr.pexp_loc, expr_doc) :: acc)\n  in\n  let rows = collect_rows [] expr in\n  let block =\n    print_list ~get_loc:fst ~nodes:rows\n      ~print:(fun (_, doc) _ -> doc)\n      ~force_break:true cmt_tbl\n  in\n  Doc.breakable_group ~force_break:true\n    (if braces then\n       Doc.concat\n         [\n           Doc.lbrace;\n           Doc.indent (Doc.concat [Doc.line; block]);\n           Doc.line;\n           Doc.rbrace;\n         ]\n     else block)\n\n(*\n * // user types:\n * let f = (a, b) => { a + b }\n *\n * // printer: everything is on one line\n * let f = (a, b) => { a + b }\n *\n * // user types: over multiple lines\n * let f = (a, b) => {\n *   a + b\n * }\n *\n * // printer: over multiple lines\n * let f = (a, b) => {\n *   a + b\n * }\n *)\nand print_braces doc expr braces_loc =\n  let over_multiple_lines =\n    let open Location in\n    braces_loc.loc_end.pos_lnum > braces_loc.loc_start.pos_lnum\n  in\n  match expr.Parsetree.pexp_desc with\n  | Pexp_letmodule _ | Pexp_letexception _ | Pexp_let _ | Pexp_open _\n  | Pexp_sequence _ ->\n    (* already has braces *)\n    doc\n  | _ ->\n    Doc.breakable_group ~force_break:over_multiple_lines\n      (Doc.concat\n         [\n           Doc.lbrace;\n           Doc.indent\n             (Doc.concat\n                [\n                  Doc.soft_line;\n                  (if Parens.braced_expr expr then add_parens doc else doc);\n                ]);\n           Doc.soft_line;\n           Doc.rbrace;\n         ])\n\nand print_override_flag override_flag =\n  match override_flag with\n  | Asttypes.Override -> Doc.text \"!\"\n  | Fresh -> Doc.nil\n\nand print_direction_flag flag =\n  match flag with\n  | Asttypes.Downto -> Doc.text \" downto \"\n  | Asttypes.Upto -> Doc.text \" to \"\n\nand print_expression_record_row ~state (lbl, expr) cmt_tbl punning_allowed =\n  let cmt_loc = {lbl.loc with loc_end = expr.pexp_loc.loc_end} in\n  let doc =\n    Doc.group\n      (match expr.pexp_desc with\n      | Pexp_ident {txt = Lident key; loc = _keyLoc}\n        when punning_allowed && Longident.last lbl.txt = key ->\n        (* print punned field *)\n        Doc.concat\n          [\n            print_attributes ~state expr.pexp_attributes cmt_tbl;\n            print_optional_label expr.pexp_attributes;\n            print_lident_path lbl cmt_tbl;\n          ]\n      | _ ->\n        Doc.concat\n          [\n            print_lident_path lbl cmt_tbl;\n            Doc.text \": \";\n            print_optional_label expr.pexp_attributes;\n            (let doc = print_expression_with_comments ~state expr cmt_tbl in\n             match Parens.expr_record_row_rhs expr with\n             | Parens.Parenthesized -> add_parens doc\n             | Braced braces -> print_braces doc expr braces\n             | Nothing -> doc);\n          ])\n  in\n  print_comments doc cmt_tbl cmt_loc\n\nand print_bs_object_row ~state (lbl, expr) cmt_tbl =\n  let cmt_loc = {lbl.loc with loc_end = expr.pexp_loc.loc_end} in\n  let lbl_doc =\n    let doc =\n      Doc.concat [Doc.text \"\\\"\"; print_longident lbl.txt; Doc.text \"\\\"\"]\n    in\n    print_comments doc cmt_tbl lbl.loc\n  in\n  let doc =\n    Doc.concat\n      [\n        lbl_doc;\n        Doc.text \": \";\n        (let doc = print_expression_with_comments ~state expr cmt_tbl in\n         match Parens.expr expr with\n         | Parens.Parenthesized -> add_parens doc\n         | Braced braces -> print_braces doc expr braces\n         | Nothing -> doc);\n      ]\n  in\n  print_comments doc cmt_tbl cmt_loc\n\n(* The optional loc indicates whether we need to print the attributes in\n * relation to some location. In practise this means the following:\n *  `@attr type t = string` -> on the same line, print on the same line\n *  `@attr\n *   type t = string` -> attr is on prev line, print the attributes\n *   with a line break between, we respect the users' original layout *)\nand print_attributes ?loc ?(inline = false) ~state\n    (attrs : Parsetree.attributes) cmt_tbl =\n  match ParsetreeViewer.filter_parsing_attrs attrs with\n  | [] -> Doc.nil\n  | attrs ->\n    let line_break =\n      match loc with\n      | None -> Doc.line\n      | Some loc -> (\n        match List.rev attrs with\n        | ({loc = first_loc}, _) :: _\n          when loc.loc_start.pos_lnum > first_loc.loc_end.pos_lnum ->\n          Doc.hard_line\n        | _ -> Doc.line)\n    in\n    Doc.concat\n      [\n        Doc.group\n          (Doc.join_with_sep\n             (List.map (fun attr -> print_attribute ~state attr cmt_tbl) attrs));\n        (if inline then Doc.space else line_break);\n      ]\n\nand print_payload ~state (payload : Parsetree.payload) cmt_tbl =\n  match payload with\n  | PStr [] -> Doc.nil\n  | PStr [{pstr_desc = Pstr_eval (expr, attrs)}] ->\n    let expr_doc = print_expression_with_comments ~state expr cmt_tbl in\n    let needs_parens =\n      match attrs with\n      | [] -> false\n      | _ -> true\n    in\n    let should_hug = ParsetreeViewer.is_huggable_expression expr in\n    if should_hug then\n      Doc.concat\n        [\n          Doc.lparen;\n          print_attributes ~state attrs cmt_tbl;\n          (if needs_parens then add_parens expr_doc else expr_doc);\n          Doc.rparen;\n        ]\n    else\n      Doc.concat\n        [\n          Doc.lparen;\n          Doc.indent\n            (Doc.concat\n               [\n                 Doc.soft_line;\n                 print_attributes ~state attrs cmt_tbl;\n                 (if needs_parens then add_parens expr_doc else expr_doc);\n               ]);\n          Doc.soft_line;\n          Doc.rparen;\n        ]\n  | PStr [({pstr_desc = Pstr_value (_recFlag, _bindings)} as si)] ->\n    add_parens (print_structure_item ~state si cmt_tbl)\n  | PStr structure -> add_parens (print_structure ~state structure cmt_tbl)\n  | PTyp typ ->\n    Doc.concat\n      [\n        Doc.lparen;\n        Doc.text \":\";\n        Doc.indent (Doc.concat [Doc.line; print_typ_expr ~state typ cmt_tbl]);\n        Doc.soft_line;\n        Doc.rparen;\n      ]\n  | PPat (pat, opt_expr) ->\n    let when_doc =\n      match opt_expr with\n      | Some expr ->\n        Doc.concat\n          [\n            Doc.line;\n            Doc.text \"if \";\n            print_expression_with_comments ~state expr cmt_tbl;\n          ]\n      | None -> Doc.nil\n    in\n    Doc.concat\n      [\n        Doc.lparen;\n        Doc.indent\n          (Doc.concat\n             [\n               Doc.soft_line;\n               Doc.text \"? \";\n               print_pattern ~state pat cmt_tbl;\n               when_doc;\n             ]);\n        Doc.soft_line;\n        Doc.rparen;\n      ]\n  | PSig signature ->\n    Doc.concat\n      [\n        Doc.lparen;\n        Doc.text \":\";\n        Doc.indent\n          (Doc.concat [Doc.line; print_signature ~state signature cmt_tbl]);\n        Doc.soft_line;\n        Doc.rparen;\n      ]\n\nand print_attribute ?(standalone = false) ~state\n    ((id, payload) : Parsetree.attribute) cmt_tbl =\n  match (id, payload) with\n  | ( {txt = \"res.doc\"},\n      PStr\n        [\n          {\n            pstr_desc =\n              Pstr_eval ({pexp_desc = Pexp_constant (Pconst_string (txt, _))}, _);\n          };\n        ] ) ->\n    ( Doc.concat\n        [\n          Doc.text (if standalone then \"/***\" else \"/**\");\n          Doc.text txt;\n          Doc.text \"*/\";\n        ],\n      Doc.hard_line )\n  | _ ->\n    let id =\n      match id.txt with\n      | \"uncurried.swap\" ->\n        state.uncurried_config <- Config.Swap;\n        id\n      | \"uncurried\" ->\n        state.uncurried_config <- Config.Uncurried;\n        id\n      | _ -> id\n    in\n    ( Doc.group\n        (Doc.concat\n           [\n             Doc.text (if standalone then \"@@\" else \"@\");\n             Doc.text id.txt;\n             print_payload ~state payload cmt_tbl;\n           ]),\n      Doc.line )\n\nand print_mod_expr ~state mod_expr cmt_tbl =\n  let doc =\n    match mod_expr.pmod_desc with\n    | Pmod_ident longident_loc -> print_longident_location longident_loc cmt_tbl\n    | Pmod_structure [] ->\n      let should_break =\n        mod_expr.pmod_loc.loc_start.pos_lnum\n        < mod_expr.pmod_loc.loc_end.pos_lnum\n      in\n      Doc.breakable_group ~force_break:should_break\n        (Doc.concat\n           [\n             Doc.lbrace;\n             print_comments_inside cmt_tbl mod_expr.pmod_loc;\n             Doc.rbrace;\n           ])\n    | Pmod_structure structure ->\n      Doc.breakable_group ~force_break:true\n        (Doc.concat\n           [\n             Doc.lbrace;\n             Doc.indent\n               (Doc.concat\n                  [Doc.soft_line; print_structure ~state structure cmt_tbl]);\n             Doc.soft_line;\n             Doc.rbrace;\n           ])\n    | Pmod_unpack expr ->\n      let should_hug =\n        match expr.pexp_desc with\n        | Pexp_let _ -> true\n        | Pexp_constraint\n            ({pexp_desc = Pexp_let _}, {ptyp_desc = Ptyp_package _packageType})\n          ->\n          true\n        | _ -> false\n      in\n      let expr, module_constraint =\n        match expr.pexp_desc with\n        | Pexp_constraint\n            (expr, {ptyp_desc = Ptyp_package package_type; ptyp_loc}) ->\n          let package_doc =\n            let doc =\n              print_package_type ~state ~print_module_keyword_and_parens:false\n                package_type cmt_tbl\n            in\n            print_comments doc cmt_tbl ptyp_loc\n          in\n          let type_doc =\n            Doc.group\n              (Doc.concat\n                 [Doc.text \":\"; Doc.indent (Doc.concat [Doc.line; package_doc])])\n          in\n          (expr, type_doc)\n        | _ -> (expr, Doc.nil)\n      in\n      let unpack_doc =\n        Doc.group\n          (Doc.concat\n             [\n               print_expression_with_comments ~state expr cmt_tbl;\n               module_constraint;\n             ])\n      in\n      Doc.group\n        (Doc.concat\n           [\n             Doc.text \"unpack(\";\n             (if should_hug then unpack_doc\n              else\n                Doc.concat\n                  [\n                    Doc.indent (Doc.concat [Doc.soft_line; unpack_doc]);\n                    Doc.soft_line;\n                  ]);\n             Doc.rparen;\n           ])\n    | Pmod_extension extension ->\n      print_extension ~state ~at_module_lvl:false extension cmt_tbl\n    | Pmod_apply _ ->\n      let args, call_expr = ParsetreeViewer.mod_expr_apply mod_expr in\n      let is_unit_sugar =\n        match args with\n        | [{pmod_desc = Pmod_structure []}] -> true\n        | _ -> false\n      in\n      let should_hug =\n        match args with\n        | [{pmod_desc = Pmod_structure _}] -> true\n        | _ -> false\n      in\n      Doc.group\n        (Doc.concat\n           [\n             print_mod_expr ~state call_expr cmt_tbl;\n             (if is_unit_sugar then\n                print_mod_apply_arg ~state\n                  (List.hd args [@doesNotRaise])\n                  cmt_tbl\n              else\n                Doc.concat\n                  [\n                    Doc.lparen;\n                    (if should_hug then\n                       print_mod_apply_arg ~state\n                         (List.hd args [@doesNotRaise])\n                         cmt_tbl\n                     else\n                       Doc.indent\n                         (Doc.concat\n                            [\n                              Doc.soft_line;\n                              Doc.join\n                                ~sep:(Doc.concat [Doc.comma; Doc.line])\n                                (List.map\n                                   (fun mod_arg ->\n                                     print_mod_apply_arg ~state mod_arg cmt_tbl)\n                                   args);\n                            ]));\n                    (if not should_hug then\n                       Doc.concat [Doc.trailing_comma; Doc.soft_line]\n                     else Doc.nil);\n                    Doc.rparen;\n                  ]);\n           ])\n    | Pmod_constraint (mod_expr, mod_type) ->\n      Doc.concat\n        [\n          print_mod_expr ~state mod_expr cmt_tbl;\n          Doc.text \": \";\n          print_mod_type ~state mod_type cmt_tbl;\n        ]\n    | Pmod_functor _ -> print_mod_functor ~state mod_expr cmt_tbl\n  in\n  let doc =\n    if ParsetreeViewer.has_await_attribute mod_expr.pmod_attributes then\n      match mod_expr.pmod_desc with\n      | Pmod_constraint _ ->\n        Doc.concat [Doc.text \"await \"; Doc.lparen; doc; Doc.rparen]\n      | _ -> Doc.concat [Doc.text \"await \"; doc]\n    else doc\n  in\n  print_comments doc cmt_tbl mod_expr.pmod_loc\n\nand print_mod_functor ~state mod_expr cmt_tbl =\n  let parameters, return_mod_expr = ParsetreeViewer.mod_expr_functor mod_expr in\n  (* let shouldInline = match returnModExpr.pmod_desc with *)\n  (* | Pmod_structure _ | Pmod_ident _ -> true *)\n  (* | Pmod_constraint ({pmod_desc = Pmod_structure _}, _) -> true *)\n  (* | _ -> false *)\n  (* in *)\n  let return_constraint, return_mod_expr =\n    match return_mod_expr.pmod_desc with\n    | Pmod_constraint (mod_expr, mod_type) ->\n      let constraint_doc =\n        let doc = print_mod_type ~state mod_type cmt_tbl in\n        if Parens.mod_expr_functor_constraint mod_type then add_parens doc\n        else doc\n      in\n      let mod_constraint = Doc.concat [Doc.text \": \"; constraint_doc] in\n      (mod_constraint, print_mod_expr ~state mod_expr cmt_tbl)\n    | _ -> (Doc.nil, print_mod_expr ~state return_mod_expr cmt_tbl)\n  in\n  let parameters_doc =\n    match parameters with\n    | [(attrs, {txt = \"*\"}, None)] ->\n      Doc.group\n        (Doc.concat [print_attributes ~state attrs cmt_tbl; Doc.text \"()\"])\n    | [([], {txt = lbl}, None)] -> Doc.text lbl\n    | parameters ->\n      Doc.group\n        (Doc.concat\n           [\n             Doc.lparen;\n             Doc.indent\n               (Doc.concat\n                  [\n                    Doc.soft_line;\n                    Doc.join\n                      ~sep:(Doc.concat [Doc.comma; Doc.line])\n                      (List.map\n                         (fun param ->\n                           print_mod_functor_param ~state param cmt_tbl)\n                         parameters);\n                  ]);\n             Doc.trailing_comma;\n             Doc.soft_line;\n             Doc.rparen;\n           ])\n  in\n  Doc.group\n    (Doc.concat\n       [parameters_doc; return_constraint; Doc.text \" => \"; return_mod_expr])\n\nand print_mod_functor_param ~state (attrs, lbl, opt_mod_type) cmt_tbl =\n  let cmt_loc =\n    match opt_mod_type with\n    | None -> lbl.Asttypes.loc\n    | Some mod_type ->\n      {lbl.loc with loc_end = mod_type.Parsetree.pmty_loc.loc_end}\n  in\n  let attrs = print_attributes ~state attrs cmt_tbl in\n  let lbl_doc =\n    let doc = if lbl.txt = \"*\" then Doc.text \"()\" else Doc.text lbl.txt in\n    print_comments doc cmt_tbl lbl.loc\n  in\n  let doc =\n    Doc.group\n      (Doc.concat\n         [\n           attrs;\n           lbl_doc;\n           (match opt_mod_type with\n           | None -> Doc.nil\n           | Some mod_type ->\n             Doc.concat [Doc.text \": \"; print_mod_type ~state mod_type cmt_tbl]);\n         ])\n  in\n  print_comments doc cmt_tbl cmt_loc\n\nand print_mod_apply_arg ~state mod_expr cmt_tbl =\n  match mod_expr.pmod_desc with\n  | Pmod_structure [] -> Doc.text \"()\"\n  | _ -> print_mod_expr ~state mod_expr cmt_tbl\n\nand print_exception_def ~state (constr : Parsetree.extension_constructor)\n    cmt_tbl =\n  let kind =\n    match constr.pext_kind with\n    | Pext_rebind longident ->\n      Doc.indent\n        (Doc.concat\n           [Doc.text \" =\"; Doc.line; print_longident_location longident cmt_tbl])\n    | Pext_decl (Pcstr_tuple [], None) -> Doc.nil\n    | Pext_decl (args, gadt) ->\n      let gadt_doc =\n        match gadt with\n        | Some typ ->\n          Doc.concat [Doc.text \": \"; print_typ_expr ~state typ cmt_tbl]\n        | None -> Doc.nil\n      in\n      Doc.concat\n        [\n          print_constructor_arguments ~state ~indent:false args cmt_tbl; gadt_doc;\n        ]\n  in\n  let name =\n    print_comments (Doc.text constr.pext_name.txt) cmt_tbl constr.pext_name.loc\n  in\n  let doc =\n    Doc.group\n      (Doc.concat\n         [\n           print_attributes ~state constr.pext_attributes cmt_tbl;\n           Doc.text \"exception \";\n           name;\n           kind;\n         ])\n  in\n  print_comments doc cmt_tbl constr.pext_loc\n\nand print_extension_constructor ~state\n    (constr : Parsetree.extension_constructor) cmt_tbl i =\n  let attrs = print_attributes ~state constr.pext_attributes cmt_tbl in\n  let bar =\n    if i > 0 then Doc.text \"| \" else Doc.if_breaks (Doc.text \"| \") Doc.nil\n  in\n  let kind =\n    match constr.pext_kind with\n    | Pext_rebind longident ->\n      Doc.indent\n        (Doc.concat\n           [Doc.text \" =\"; Doc.line; print_longident_location longident cmt_tbl])\n    | Pext_decl (Pcstr_tuple [], None) -> Doc.nil\n    | Pext_decl (args, gadt) ->\n      let gadt_doc =\n        match gadt with\n        | Some typ ->\n          Doc.concat [Doc.text \": \"; print_typ_expr ~state typ cmt_tbl]\n        | None -> Doc.nil\n      in\n      Doc.concat\n        [\n          print_constructor_arguments ~state ~indent:false args cmt_tbl; gadt_doc;\n        ]\n  in\n  let name =\n    print_comments (Doc.text constr.pext_name.txt) cmt_tbl constr.pext_name.loc\n  in\n  Doc.concat [bar; Doc.group (Doc.concat [attrs; name; kind])]\n\nlet print_type_params params = print_type_params ~state:(State.init ()) params\nlet print_typ_expr t = print_typ_expr ~state:(State.init ()) t\nlet print_expression e = print_expression ~state:(State.init ()) e\nlet print_pattern p = print_pattern ~state:(State.init ()) p\n\nlet print_implementation ~width (s : Parsetree.structure) ~comments =\n  let cmt_tbl = CommentTable.make () in\n  CommentTable.walk_structure s cmt_tbl comments;\n  (* CommentTable.log cmtTbl; *)\n  let doc = print_structure ~state:(State.init ()) s cmt_tbl in\n  (* Doc.debug doc; *)\n  Doc.to_string ~width doc ^ \"\\n\"\n\nlet print_interface ~width (s : Parsetree.signature) ~comments =\n  let cmt_tbl = CommentTable.make () in\n  CommentTable.walk_signature s cmt_tbl comments;\n  Doc.to_string ~width (print_signature ~state:(State.init ()) s cmt_tbl) ^ \"\\n\"\n\nlet print_structure = print_structure ~state:(State.init ())\n"
  },
  {
    "path": "analysis/vendor/res_syntax/res_printer.mli",
    "content": "val print_type_params :\n  (Parsetree.core_type * Asttypes.variance) list ->\n  Res_comments_table.t ->\n  Res_doc.t\n\nval print_longident : Longident.t -> Res_doc.t\n\nval print_typ_expr : Parsetree.core_type -> Res_comments_table.t -> Res_doc.t\n\nval add_parens : Res_doc.t -> Res_doc.t\n\nval print_expression : Parsetree.expression -> Res_comments_table.t -> Res_doc.t\n\nval print_pattern : Parsetree.pattern -> Res_comments_table.t -> Res_doc.t\n[@@live]\n\nval print_structure : Parsetree.structure -> Res_comments_table.t -> Res_doc.t\n[@@live]\n\nval print_implementation :\n  width:int -> Parsetree.structure -> comments:Res_comment.t list -> string\nval print_interface :\n  width:int -> Parsetree.signature -> comments:Res_comment.t list -> string\n\nval print_ident_like :\n  ?allow_uident:bool -> ?allow_hyphen:bool -> string -> Res_doc.t\n\nval print_poly_var_ident : string -> Res_doc.t\n\nval polyvar_ident_to_string : string -> string [@@live]\n"
  },
  {
    "path": "analysis/vendor/res_syntax/res_reporting.ml",
    "content": "module Token = Res_token\nmodule Grammar = Res_grammar\n\ntype problem =\n  | Unexpected of Token.t [@live]\n  | Expected of {\n      token: Token.t;\n      pos: Lexing.position;\n      context: Grammar.t option;\n    } [@live]\n  | Message of string [@live]\n  | Uident [@live]\n  | Lident [@live]\n  | Unbalanced of Token.t [@live]\n\ntype parse_error = Lexing.position * problem\n"
  },
  {
    "path": "analysis/vendor/res_syntax/res_scanner.ml",
    "content": "module Diagnostics = Res_diagnostics\nmodule Token = Res_token\nmodule Comment = Res_comment\n\ntype mode = Jsx | Diamond\n\n(* We hide the implementation detail of the scanner reading character. Our char\n   will also contain the special -1 value to indicate end-of-file. This isn't\n   ideal; we should clean this up *)\nlet hacky_eof_char = Char.unsafe_chr (-1)\ntype char_encoding = Char.t\n\ntype t = {\n  filename: string;\n  src: string;\n  mutable err:\n    start_pos:Lexing.position ->\n    end_pos:Lexing.position ->\n    Diagnostics.category ->\n    unit;\n  mutable ch: char_encoding; (* current character *)\n  mutable offset: int; (* current byte offset *)\n  mutable offset16: int;\n      (* current number of utf16 code units since line start *)\n  mutable line_offset: int; (* current line offset *)\n  mutable lnum: int; (* current line number *)\n  mutable mode: mode list;\n}\n\nlet set_diamond_mode scanner = scanner.mode <- Diamond :: scanner.mode\n\nlet set_jsx_mode scanner = scanner.mode <- Jsx :: scanner.mode\n\nlet pop_mode scanner mode =\n  match scanner.mode with\n  | m :: ms when m = mode -> scanner.mode <- ms\n  | _ -> ()\n\nlet in_diamond_mode scanner =\n  match scanner.mode with\n  | Diamond :: _ -> true\n  | _ -> false\n\nlet in_jsx_mode scanner =\n  match scanner.mode with\n  | Jsx :: _ -> true\n  | _ -> false\n\nlet position scanner =\n  Lexing.\n    {\n      pos_fname = scanner.filename;\n      (* line number *)\n      pos_lnum = scanner.lnum;\n      (* offset of the beginning of the line (number\n         of bytes between the beginning of the scanner and the beginning\n         of the line) *)\n      pos_bol = scanner.line_offset;\n      (* [pos_cnum - pos_bol]  is the number of utf16 code units since line start *)\n      pos_cnum = scanner.line_offset + scanner.offset16;\n    }\n\n(* Small debugging util\n   ❯ echo 'let msg = \"hello\"' | ./lib/rescript.exe\n   let msg = \"hello\"\n   ^-^ let 0-3\n   let msg = \"hello\"\n       ^-^ msg 4-7\n   let msg = \"hello\"\n           ^ = 8-9\n   let msg = \"hello\"\n             ^-----^ string \"hello\" 10-17\n   let msg = \"hello\"\n                     ^ eof 18-18\n   let msg = \"hello\"\n*)\nlet _printDebug ~start_pos ~end_pos scanner token =\n  let open Lexing in\n  print_string scanner.src;\n  print_string ((String.make [@doesNotRaise]) start_pos.pos_cnum ' ');\n  print_char '^';\n  (match end_pos.pos_cnum - start_pos.pos_cnum with\n  | 0 -> if token = Token.Eof then () else assert false\n  | 1 -> ()\n  | n ->\n    print_string ((String.make [@doesNotRaise]) (n - 2) '-');\n    print_char '^');\n  print_char ' ';\n  print_string (Res_token.to_string token);\n  print_char ' ';\n  print_int start_pos.pos_cnum;\n  print_char '-';\n  print_int end_pos.pos_cnum;\n  print_endline \"\"\n[@@live]\n\nlet next scanner =\n  let next_offset = scanner.offset + 1 in\n  let utf16len =\n    match Ext_utf8.classify scanner.ch with\n    | Single _ | Invalid -> 1\n    | Leading (n, _) -> ( (((n + 1) / 2) [@doesNotRaise]))\n    | Cont _ -> 0\n  in\n  let newline =\n    scanner.ch = '\\n'\n    (* What about CRLF (\\r + \\n) on windows?\n       \\r\\n will always be terminated by a \\n\n       -> we can just bump the line count on \\n *)\n  in\n  if newline then (\n    scanner.line_offset <- next_offset;\n    scanner.offset16 <- 0;\n    scanner.lnum <- scanner.lnum + 1)\n  else scanner.offset16 <- scanner.offset16 + utf16len;\n  if next_offset < String.length scanner.src then (\n    scanner.offset <- next_offset;\n    scanner.ch <- String.unsafe_get scanner.src next_offset)\n  else (\n    scanner.offset <- String.length scanner.src;\n    scanner.offset16 <- scanner.offset - scanner.line_offset;\n    scanner.ch <- hacky_eof_char)\n\nlet next2 scanner =\n  next scanner;\n  next scanner\n\nlet next3 scanner =\n  next scanner;\n  next scanner;\n  next scanner\n\nlet peek scanner =\n  if scanner.offset + 1 < String.length scanner.src then\n    String.unsafe_get scanner.src (scanner.offset + 1)\n  else hacky_eof_char\n\nlet peek2 scanner =\n  if scanner.offset + 2 < String.length scanner.src then\n    String.unsafe_get scanner.src (scanner.offset + 2)\n  else hacky_eof_char\n\nlet peek3 scanner =\n  if scanner.offset + 3 < String.length scanner.src then\n    String.unsafe_get scanner.src (scanner.offset + 3)\n  else hacky_eof_char\n\nlet make ~filename src =\n  {\n    filename;\n    src;\n    err = (fun ~start_pos:_ ~end_pos:_ _ -> ());\n    ch = (if src = \"\" then hacky_eof_char else String.unsafe_get src 0);\n    offset = 0;\n    offset16 = 0;\n    line_offset = 0;\n    lnum = 1;\n    mode = [];\n  }\n\n(* generic helpers *)\n\nlet is_whitespace ch =\n  match ch with\n  | ' ' | '\\t' | '\\n' | '\\r' -> true\n  | _ -> false\n\nlet rec skip_whitespace scanner =\n  if is_whitespace scanner.ch then (\n    next scanner;\n    skip_whitespace scanner)\n\nlet digit_value ch =\n  match ch with\n  | '0' .. '9' -> Char.code ch - 48\n  | 'a' .. 'f' -> Char.code ch - Char.code 'a' + 10\n  | 'A' .. 'F' -> Char.code ch + 32 - Char.code 'a' + 10\n  | _ -> 16 (* larger than any legal value *)\n\n(* scanning helpers *)\n\nlet scan_identifier scanner =\n  let start_off = scanner.offset in\n  let rec skip_good_chars scanner =\n    match (scanner.ch, in_jsx_mode scanner) with\n    | ('A' .. 'Z' | 'a' .. 'z' | '0' .. '9' | '_' | '\\''), false ->\n      next scanner;\n      skip_good_chars scanner\n    | ('A' .. 'Z' | 'a' .. 'z' | '0' .. '9' | '_' | '\\'' | '-'), true ->\n      next scanner;\n      skip_good_chars scanner\n    | _ -> ()\n  in\n  skip_good_chars scanner;\n  let str =\n    (String.sub [@doesNotRaise]) scanner.src start_off\n      (scanner.offset - start_off)\n  in\n  if '{' == scanner.ch && str = \"list\" then (\n    next scanner;\n    (* TODO: this isn't great *)\n    Token.lookup_keyword \"list{\")\n  else Token.lookup_keyword str\n\nlet scan_digits scanner ~base =\n  if base <= 10 then\n    let rec loop scanner =\n      match scanner.ch with\n      | '0' .. '9' | '_' ->\n        next scanner;\n        loop scanner\n      | _ -> ()\n    in\n    loop scanner\n  else\n    let rec loop scanner =\n      match scanner.ch with\n      (* hex *)\n      | '0' .. '9' | 'a' .. 'f' | 'A' .. 'F' | '_' ->\n        next scanner;\n        loop scanner\n      | _ -> ()\n    in\n    loop scanner\n\n(* float: (0…9) { 0…9∣ _ } [. { 0…9∣ _ }] [(e∣ E) [+∣ -] (0…9) { 0…9∣ _ }]   *)\nlet scan_number scanner =\n  let start_off = scanner.offset in\n\n  (* integer part *)\n  let base =\n    match scanner.ch with\n    | '0' -> (\n      match peek scanner with\n      | 'x' | 'X' ->\n        next2 scanner;\n        16\n      | 'o' | 'O' ->\n        next2 scanner;\n        8\n      | 'b' | 'B' ->\n        next2 scanner;\n        2\n      | _ ->\n        next scanner;\n        8)\n    | _ -> 10\n  in\n  scan_digits scanner ~base;\n\n  (* *)\n  let is_float =\n    if '.' == scanner.ch then (\n      next scanner;\n      scan_digits scanner ~base;\n      true)\n    else false\n  in\n\n  (* exponent part *)\n  let is_float =\n    match scanner.ch with\n    | 'e' | 'E' | 'p' | 'P' ->\n      (match peek scanner with\n      | '+' | '-' -> next2 scanner\n      | _ -> next scanner);\n      scan_digits scanner ~base;\n      true\n    | _ -> is_float\n  in\n  let literal =\n    (String.sub [@doesNotRaise]) scanner.src start_off\n      (scanner.offset - start_off)\n  in\n\n  (* suffix *)\n  let suffix =\n    match scanner.ch with\n    | ('g' .. 'z' | 'G' .. 'Z') as ch ->\n      next scanner;\n      Some ch\n    | _ -> None\n  in\n  if is_float then Token.Float {f = literal; suffix}\n  else Token.Int {i = literal; suffix}\n\nlet scan_exotic_identifier scanner =\n  let start_pos = position scanner in\n  let start_off = scanner.offset in\n\n  next2 scanner;\n\n  let rec scan () =\n    match scanner.ch with\n    | '\"' -> next scanner\n    | '\\n' | '\\r' ->\n      (* line break *)\n      let end_pos = position scanner in\n      scanner.err ~start_pos ~end_pos\n        (Diagnostics.message \"A quoted identifier can't contain line breaks.\");\n      next scanner\n    | ch when ch == hacky_eof_char ->\n      let end_pos = position scanner in\n      scanner.err ~start_pos ~end_pos\n        (Diagnostics.message \"Did you forget a \\\" here?\")\n    | _ ->\n      next scanner;\n      scan ()\n  in\n  scan ();\n\n  let ident =\n    (String.sub [@doesNotRaise]) scanner.src start_off\n      (scanner.offset - start_off)\n  in\n  let name = Ext_ident.unwrap_uppercase_exotic ident in\n  if name = String.empty then (\n    let end_pos = position scanner in\n    scanner.err ~start_pos ~end_pos\n      (Diagnostics.message \"A quoted identifier can't be empty string.\");\n    Token.Lident ident)\n  else if Ext_ident.is_uident name then Token.Lident ident\n    (* Exotic ident with uppercase letter should be encoded to avoid confusing in OCaml parsetree *)\n  else Token.Lident name\n\nlet scan_string_escape_sequence ~start_pos scanner =\n  let scan ~n ~base ~max =\n    let rec loop n x =\n      if n == 0 then x\n      else\n        let d = digit_value scanner.ch in\n        if d >= base then (\n          let pos = position scanner in\n          let msg =\n            if scanner.ch == hacky_eof_char then \"unclosed escape sequence\"\n            else \"unknown escape sequence\"\n          in\n          scanner.err ~start_pos ~end_pos:pos (Diagnostics.message msg);\n          -1)\n        else\n          let () = next scanner in\n          loop (n - 1) ((x * base) + d)\n    in\n    let x = loop n 0 in\n    if x > max || (0xD800 <= x && x < 0xE000) then\n      let pos = position scanner in\n      let msg = \"escape sequence is invalid unicode code point\" in\n      scanner.err ~start_pos ~end_pos:pos (Diagnostics.message msg)\n  in\n  match scanner.ch with\n  (* \\ already consumed *)\n  | 'n' | 't' | 'b' | 'r' | '\\\\' | ' ' | '\\'' | '\"' -> next scanner\n  | '0'\n    when let c = peek scanner in\n         c < '0' || c > '9' ->\n    (* Allow \\0 *)\n    next scanner\n  | '0' .. '9' -> scan ~n:3 ~base:10 ~max:255\n  | 'x' ->\n    (* hex *)\n    next scanner;\n    scan ~n:2 ~base:16 ~max:255\n  | 'u' -> (\n    next scanner;\n    match scanner.ch with\n    | '{' -> (\n      (* unicode code point escape sequence: '\\u{7A}', one or more hex digits *)\n      next scanner;\n      let x = ref 0 in\n      while\n        match scanner.ch with\n        | '0' .. '9' | 'a' .. 'f' | 'A' .. 'F' -> true\n        | _ -> false\n      do\n        x := (!x * 16) + digit_value scanner.ch;\n        next scanner\n      done;\n      (* consume '}' in '\\u{7A}' *)\n      match scanner.ch with\n      | '}' -> next scanner\n      | _ -> ())\n    | _ -> scan ~n:4 ~base:16 ~max:Res_utf8.max)\n  | _ ->\n    (* unknown escape sequence\n     * TODO: we should warn the user here. Let's not make it a hard error for now, for reason compat *)\n    (*\n      let pos = position scanner in\n      let msg =\n        if ch == -1 then \"unclosed escape sequence\"\n        else \"unknown escape sequence\"\n      in\n      scanner.err ~startPos ~endPos:pos (Diagnostics.message msg)\n     *)\n    ()\n\nlet scan_string scanner =\n  (* assumption: we've just matched a quote *)\n  let start_pos_with_quote = position scanner in\n  next scanner;\n\n  (* If the text needs changing, a buffer is used *)\n  let buf = Buffer.create 0 in\n  let first_char_offset = scanner.offset in\n  let last_offset_in_buf = ref first_char_offset in\n\n  let bring_buf_up_to_date ~start_offset =\n    let str_up_to_now =\n      (String.sub scanner.src !last_offset_in_buf\n         (start_offset - !last_offset_in_buf) [@doesNotRaise])\n    in\n    Buffer.add_string buf str_up_to_now;\n    last_offset_in_buf := start_offset\n  in\n\n  let result ~first_char_offset ~last_char_offset =\n    if Buffer.length buf = 0 then\n      (String.sub [@doesNotRaise]) scanner.src first_char_offset\n        (last_char_offset - first_char_offset)\n    else (\n      bring_buf_up_to_date ~start_offset:last_char_offset;\n      Buffer.contents buf)\n  in\n\n  let rec scan () =\n    match scanner.ch with\n    | '\"' ->\n      let last_char_offset = scanner.offset in\n      next scanner;\n      result ~first_char_offset ~last_char_offset\n    | '\\\\' ->\n      let start_pos = position scanner in\n      let start_offset = scanner.offset + 1 in\n      next scanner;\n      scan_string_escape_sequence ~start_pos scanner;\n      let end_offset = scanner.offset in\n      convert_octal_to_hex ~start_offset ~end_offset\n    | ch when ch == hacky_eof_char ->\n      let end_pos = position scanner in\n      scanner.err ~start_pos:start_pos_with_quote ~end_pos\n        Diagnostics.unclosed_string;\n      let last_char_offset = scanner.offset in\n      result ~first_char_offset ~last_char_offset\n    | _ ->\n      next scanner;\n      scan ()\n  and convert_octal_to_hex ~start_offset ~end_offset =\n    let len = end_offset - start_offset in\n    let is_digit = function\n      | '0' .. '9' -> true\n      | _ -> false\n    in\n    let txt = scanner.src in\n    let is_numeric_escape =\n      len = 3\n      && (is_digit txt.[start_offset] [@doesNotRaise])\n      && (is_digit txt.[start_offset + 1] [@doesNotRaise])\n      && (is_digit txt.[start_offset + 2] [@doesNotRaise])\n    in\n    if is_numeric_escape then (\n      let str_decimal = (String.sub txt start_offset 3 [@doesNotRaise]) in\n      bring_buf_up_to_date ~start_offset;\n      let str_hex = Res_string.convert_decimal_to_hex ~str_decimal in\n      last_offset_in_buf := start_offset + 3;\n      Buffer.add_string buf str_hex;\n      scan ())\n    else scan ()\n  in\n  Token.String (scan ())\n\nlet scan_escape scanner =\n  (* '\\' consumed *)\n  let offset = scanner.offset - 1 in\n  let convert_number scanner ~n ~base =\n    let x = ref 0 in\n    for _ = n downto 1 do\n      let d = digit_value scanner.ch in\n      x := (!x * base) + d;\n      next scanner\n    done;\n    let c = !x in\n    if Res_utf8.is_valid_code_point c then c else Res_utf8.repl\n  in\n  let codepoint =\n    match scanner.ch with\n    | '0' .. '9' -> convert_number scanner ~n:3 ~base:10\n    | 'b' ->\n      next scanner;\n      8\n    | 'n' ->\n      next scanner;\n      10\n    | 'r' ->\n      next scanner;\n      13\n    | 't' ->\n      next scanner;\n      009\n    | 'x' ->\n      next scanner;\n      convert_number scanner ~n:2 ~base:16\n    | 'o' ->\n      next scanner;\n      convert_number scanner ~n:3 ~base:8\n    | 'u' -> (\n      next scanner;\n      match scanner.ch with\n      | '{' ->\n        (* unicode code point escape sequence: '\\u{7A}', one or more hex digits *)\n        next scanner;\n        let x = ref 0 in\n        while\n          match scanner.ch with\n          | '0' .. '9' | 'a' .. 'f' | 'A' .. 'F' -> true\n          | _ -> false\n        do\n          x := (!x * 16) + digit_value scanner.ch;\n          next scanner\n        done;\n        (* consume '}' in '\\u{7A}' *)\n        (match scanner.ch with\n        | '}' -> next scanner\n        | _ -> ());\n        let c = !x in\n        if Res_utf8.is_valid_code_point c then c else Res_utf8.repl\n      | _ ->\n        (* unicode escape sequence: '\\u007A', exactly 4 hex digits *)\n        convert_number scanner ~n:4 ~base:16)\n    | ch ->\n      next scanner;\n      Char.code ch\n  in\n  let contents =\n    (String.sub [@doesNotRaise]) scanner.src offset (scanner.offset - offset)\n  in\n  next scanner;\n  (* Consume \\' *)\n  (* TODO: do we know it's \\' ? *)\n  Token.Codepoint {c = codepoint; original = contents}\n\nlet scan_single_line_comment scanner =\n  let start_off = scanner.offset in\n  let start_pos = position scanner in\n  let rec skip scanner =\n    match scanner.ch with\n    | '\\n' | '\\r' -> ()\n    | ch when ch == hacky_eof_char -> ()\n    | _ ->\n      next scanner;\n      skip scanner\n  in\n  skip scanner;\n  let end_pos = position scanner in\n  Token.Comment\n    (Comment.make_single_line_comment\n       ~loc:\n         Location.{loc_start = start_pos; loc_end = end_pos; loc_ghost = false}\n       ((String.sub [@doesNotRaise]) scanner.src start_off\n          (scanner.offset - start_off)))\n\nlet scan_multi_line_comment scanner =\n  (* assumption: we're only ever using this helper in `scan` after detecting a comment *)\n  let doc_comment =\n    peek2 scanner = '*' && peek3 scanner <> '/'\n    (* no /**/ *)\n  in\n  let standalone =\n    doc_comment && peek3 scanner = '*'\n    (* /*** *)\n  in\n  let content_start_off =\n    scanner.offset + if doc_comment then if standalone then 4 else 3 else 2\n  in\n  let start_pos = position scanner in\n  let rec scan ~depth =\n    (* invariant: depth > 0 right after this match. See assumption *)\n    match (scanner.ch, peek scanner) with\n    | '/', '*' ->\n      next2 scanner;\n      scan ~depth:(depth + 1)\n    | '*', '/' ->\n      next2 scanner;\n      if depth > 1 then scan ~depth:(depth - 1)\n    | ch, _ when ch == hacky_eof_char ->\n      let end_pos = position scanner in\n      scanner.err ~start_pos ~end_pos Diagnostics.unclosed_comment\n    | _ ->\n      next scanner;\n      scan ~depth\n  in\n  scan ~depth:0;\n  let length = scanner.offset - 2 - content_start_off in\n  let length = if length < 0 (* in case of EOF *) then 0 else length in\n  Token.Comment\n    (Comment.make_multi_line_comment ~doc_comment ~standalone\n       ~loc:\n         Location.\n           {\n             loc_start = start_pos;\n             loc_end = position scanner;\n             loc_ghost = false;\n           }\n       ((String.sub [@doesNotRaise]) scanner.src content_start_off length))\n\nlet scan_template_literal_token scanner =\n  let start_off = scanner.offset in\n\n  (* if starting } here, consume it *)\n  if scanner.ch == '}' then next scanner;\n\n  let start_pos = position scanner in\n\n  let rec scan () =\n    let last_pos = position scanner in\n    match scanner.ch with\n    | '`' ->\n      next scanner;\n      let contents =\n        (String.sub [@doesNotRaise]) scanner.src start_off\n          (scanner.offset - 1 - start_off)\n      in\n      Token.TemplateTail (contents, last_pos)\n    | '$' -> (\n      match peek scanner with\n      | '{' ->\n        next2 scanner;\n        let contents =\n          (String.sub [@doesNotRaise]) scanner.src start_off\n            (scanner.offset - 2 - start_off)\n        in\n        Token.TemplatePart (contents, last_pos)\n      | _ ->\n        next scanner;\n        scan ())\n    | '\\\\' -> (\n      match peek scanner with\n      | '`' | '\\\\' | '$' | '\\n' | '\\r' ->\n        (* line break *)\n        next2 scanner;\n        scan ()\n      | _ ->\n        next scanner;\n        scan ())\n    | ch when ch = hacky_eof_char ->\n      let end_pos = position scanner in\n      scanner.err ~start_pos ~end_pos Diagnostics.unclosed_template;\n      let contents =\n        (String.sub [@doesNotRaise]) scanner.src start_off\n          (max (scanner.offset - 1 - start_off) 0)\n      in\n      Token.TemplateTail (contents, last_pos)\n    | _ ->\n      next scanner;\n      scan ()\n  in\n  let token = scan () in\n  let end_pos = position scanner in\n  (start_pos, end_pos, token)\n\nlet rec scan scanner =\n  skip_whitespace scanner;\n  let start_pos = position scanner in\n\n  let token =\n    match scanner.ch with\n    (* peeking 0 char *)\n    | 'A' .. 'Z' | 'a' .. 'z' -> scan_identifier scanner\n    | '0' .. '9' -> scan_number scanner\n    | '`' ->\n      next scanner;\n      Token.Backtick\n    | '~' ->\n      next scanner;\n      Token.Tilde\n    | '?' ->\n      next scanner;\n      Token.Question\n    | ';' ->\n      next scanner;\n      Token.Semicolon\n    | '(' ->\n      next scanner;\n      Token.Lparen\n    | ')' ->\n      next scanner;\n      Token.Rparen\n    | '[' ->\n      next scanner;\n      Token.Lbracket\n    | ']' ->\n      next scanner;\n      Token.Rbracket\n    | '{' ->\n      next scanner;\n      Token.Lbrace\n    | '}' ->\n      next scanner;\n      Token.Rbrace\n    | ',' ->\n      next scanner;\n      Token.Comma\n    | '\"' -> scan_string scanner\n    (* peeking 1 char *)\n    | '_' -> (\n      match peek scanner with\n      | 'A' .. 'Z' | 'a' .. 'z' | '0' .. '9' | '_' -> scan_identifier scanner\n      | _ ->\n        next scanner;\n        Token.Underscore)\n    | '#' -> (\n      match peek scanner with\n      | '=' ->\n        next2 scanner;\n        Token.HashEqual\n      | _ ->\n        next scanner;\n        Token.Hash)\n    | '*' -> (\n      match peek scanner with\n      | '*' ->\n        next2 scanner;\n        Token.Exponentiation\n      | '.' ->\n        next2 scanner;\n        Token.AsteriskDot\n      | _ ->\n        next scanner;\n        Token.Asterisk)\n    | '@' -> (\n      match peek scanner with\n      | '@' ->\n        next2 scanner;\n        Token.AtAt\n      | _ ->\n        next scanner;\n        Token.At)\n    | '%' -> (\n      match peek scanner with\n      | '%' ->\n        next2 scanner;\n        Token.PercentPercent\n      | _ ->\n        next scanner;\n        Token.Percent)\n    | '|' -> (\n      match peek scanner with\n      | '|' ->\n        next2 scanner;\n        Token.Lor\n      | '>' ->\n        next2 scanner;\n        Token.BarGreater\n      | _ ->\n        next scanner;\n        Token.Bar)\n    | '&' -> (\n      match peek scanner with\n      | '&' ->\n        next2 scanner;\n        Token.Land\n      | _ ->\n        next scanner;\n        Token.Band)\n    | ':' -> (\n      match peek scanner with\n      | '=' ->\n        next2 scanner;\n        Token.ColonEqual\n      | '>' ->\n        next2 scanner;\n        Token.ColonGreaterThan\n      | _ ->\n        next scanner;\n        Token.Colon)\n    | '\\\\' -> scan_exotic_identifier scanner\n    | '/' -> (\n      match peek scanner with\n      | '/' ->\n        next2 scanner;\n        scan_single_line_comment scanner\n      | '*' -> scan_multi_line_comment scanner\n      | '.' ->\n        next2 scanner;\n        Token.ForwardslashDot\n      | _ ->\n        next scanner;\n        Token.Forwardslash)\n    | '-' -> (\n      match peek scanner with\n      | '.' ->\n        next2 scanner;\n        Token.MinusDot\n      | '>' ->\n        next2 scanner;\n        Token.MinusGreater\n      | _ ->\n        next scanner;\n        Token.Minus)\n    | '+' -> (\n      match peek scanner with\n      | '.' ->\n        next2 scanner;\n        Token.PlusDot\n      | '+' ->\n        next2 scanner;\n        Token.PlusPlus\n      | '=' ->\n        next2 scanner;\n        Token.PlusEqual\n      | _ ->\n        next scanner;\n        Token.Plus)\n    | '>' -> (\n      match peek scanner with\n      | '=' when not (in_diamond_mode scanner) ->\n        next2 scanner;\n        Token.GreaterEqual\n      | _ ->\n        next scanner;\n        Token.GreaterThan)\n    | '<' when not (in_jsx_mode scanner) -> (\n      match peek scanner with\n      | '=' ->\n        next2 scanner;\n        Token.LessEqual\n      | _ ->\n        next scanner;\n        Token.LessThan)\n    (* special handling for JSX < *)\n    | '<' -> (\n      (* Imagine the following: <div><\n       * < indicates the start of a new jsx-element, the parser expects\n       * the name of a new element after the <\n       * Example: <div> <div\n       * But what if we have a / here: example </ in  <div></div>\n       * This signals a closing element. To simulate the two-token lookahead,\n       * the </ is emitted as a single new token LessThanSlash *)\n      next scanner;\n      skip_whitespace scanner;\n      match scanner.ch with\n      | '/' ->\n        next scanner;\n        Token.LessThanSlash\n      | '=' ->\n        next scanner;\n        Token.LessEqual\n      | _ -> Token.LessThan)\n    (* peeking 2 chars *)\n    | '.' -> (\n      match (peek scanner, peek2 scanner) with\n      | '.', '.' ->\n        next3 scanner;\n        Token.DotDotDot\n      | '.', _ ->\n        next2 scanner;\n        Token.DotDot\n      | _ ->\n        next scanner;\n        Token.Dot)\n    | '\\'' -> (\n      match (peek scanner, peek2 scanner) with\n      | '\\\\', '\"' ->\n        (* careful with this one! We're next-ing _once_ (not twice),\n           then relying on matching on the quote *)\n        next scanner;\n        SingleQuote\n      | '\\\\', _ ->\n        next2 scanner;\n        scan_escape scanner\n      | ch, '\\'' ->\n        let offset = scanner.offset + 1 in\n        next3 scanner;\n        Token.Codepoint\n          {\n            c = Char.code ch;\n            original = (String.sub [@doesNotRaise]) scanner.src offset 1;\n          }\n      | ch, _ ->\n        next scanner;\n        let offset = scanner.offset in\n        let offset16 = scanner.offset16 in\n        let codepoint, length =\n          Res_utf8.decode_code_point scanner.offset scanner.src\n            (String.length scanner.src)\n        in\n        for _ = 0 to length - 1 do\n          next scanner\n        done;\n        if scanner.ch = '\\'' then (\n          let contents =\n            (String.sub [@doesNotRaise]) scanner.src offset length\n          in\n          next scanner;\n          Token.Codepoint {c = codepoint; original = contents})\n        else (\n          scanner.ch <- ch;\n          scanner.offset <- offset;\n          scanner.offset16 <- offset16;\n          SingleQuote))\n    | '!' -> (\n      match (peek scanner, peek2 scanner) with\n      | '=', '=' ->\n        next3 scanner;\n        Token.BangEqualEqual\n      | '=', _ ->\n        next2 scanner;\n        Token.BangEqual\n      | _ ->\n        next scanner;\n        Token.Bang)\n    | '=' -> (\n      match (peek scanner, peek2 scanner) with\n      | '=', '=' ->\n        next3 scanner;\n        Token.EqualEqualEqual\n      | '=', _ ->\n        next2 scanner;\n        Token.EqualEqual\n      | '>', _ ->\n        next2 scanner;\n        Token.EqualGreater\n      | _ ->\n        next scanner;\n        Token.Equal)\n    (* special cases *)\n    | ch when ch == hacky_eof_char ->\n      next scanner;\n      Token.Eof\n    | ch ->\n      (* if we arrive here, we're dealing with an unknown character,\n       * report the error and continue scanning… *)\n      next scanner;\n      let end_pos = position scanner in\n      scanner.err ~start_pos ~end_pos (Diagnostics.unknown_uchar ch);\n      let _, _, token = scan scanner in\n      token\n  in\n  let end_pos = position scanner in\n  (* _printDebug ~startPos ~endPos scanner token; *)\n  (start_pos, end_pos, token)\n\n(* misc helpers used elsewhere *)\n\n(* Imagine: <div> <Navbar /> <\n * is `<` the start of a jsx-child? <div …\n * or is it the start of a closing tag?  </div>\n * reconsiderLessThan peeks at the next token and\n * determines the correct token to disambiguate *)\nlet reconsider_less_than scanner =\n  (* < consumed *)\n  skip_whitespace scanner;\n  if scanner.ch == '/' then\n    let () = next scanner in\n    Token.LessThanSlash\n  else Token.LessThan\n\n(* If an operator has whitespace around both sides, it's a binary operator *)\n(* TODO: this helper seems out of place *)\nlet is_binary_op src start_cnum end_cnum =\n  if start_cnum == 0 then false\n  else (\n    (* we're gonna put some assertions and invariant checks here because this is\n       used outside of the scanner's normal invariant assumptions *)\n    assert (end_cnum >= 0);\n    assert (start_cnum > 0 && start_cnum < String.length src);\n    let left_ok = is_whitespace (String.unsafe_get src (start_cnum - 1)) in\n    (* we need some stronger confidence that endCnum is ok *)\n    let right_ok =\n      end_cnum >= String.length src\n      || is_whitespace (String.unsafe_get src end_cnum)\n    in\n    left_ok && right_ok)\n"
  },
  {
    "path": "analysis/vendor/res_syntax/res_scanner.mli",
    "content": "type mode = Jsx | Diamond\n\ntype char_encoding\n\ntype t = {\n  filename: string;\n  src: string;\n  mutable err:\n    start_pos:Lexing.position ->\n    end_pos:Lexing.position ->\n    Res_diagnostics.category ->\n    unit;\n  mutable ch: char_encoding; (* current character *)\n  mutable offset: int; (* current byte offset *)\n  mutable offset16: int;\n      (* current number of utf16 code units since line start *)\n  mutable line_offset: int; (* current line offset *)\n  mutable lnum: int; (* current line number *)\n  mutable mode: mode list;\n}\n\nval make : filename:string -> string -> t\n\n(* TODO: make this a record *)\nval scan : t -> Lexing.position * Lexing.position * Res_token.t\n\nval is_binary_op : string -> int -> int -> bool\n\nval set_jsx_mode : t -> unit\nval set_diamond_mode : t -> unit\nval pop_mode : t -> mode -> unit\n\nval reconsider_less_than : t -> Res_token.t\n\nval scan_template_literal_token :\n  t -> Lexing.position * Lexing.position * Res_token.t\n"
  },
  {
    "path": "analysis/vendor/res_syntax/res_string.ml",
    "content": "let hex_table =\n  [| '0'; '1'; '2'; '3'; '4'; '5'; '6'; '7'; '8'; '9'; 'a'; 'b'; 'c'; 'd'; 'e'; 'f'; |]\n  [@ocamlformat \"disable\"]\n\nlet convert_decimal_to_hex ~str_decimal =\n  try\n    let int_num = int_of_string str_decimal in\n    let c1 = Array.get hex_table (int_num lsr 4) in\n    let c2 = Array.get hex_table (int_num land 15) in\n    \"x\" ^ String.concat \"\" [String.make 1 c1; String.make 1 c2]\n  with Invalid_argument _ | Failure _ -> str_decimal\n"
  },
  {
    "path": "analysis/vendor/res_syntax/res_token.ml",
    "content": "module Comment = Res_comment\n\ntype t =\n  | Await\n  | Open\n  | True\n  | False\n  | Codepoint of {c: int; original: string}\n  | Int of {i: string; suffix: char option}\n  | Float of {f: string; suffix: char option}\n  | String of string\n  | Lident of string\n  | Uident of string\n  | As\n  | Dot\n  | DotDot\n  | DotDotDot\n  | Bang\n  | Semicolon\n  | Let\n  | And\n  | Rec\n  | Underscore\n  | SingleQuote\n  | Equal\n  | EqualEqual\n  | EqualEqualEqual\n  | Bar\n  | Lparen\n  | Rparen\n  | Lbracket\n  | Rbracket\n  | Lbrace\n  | Rbrace\n  | Colon\n  | Comma\n  | Eof\n  | Exception\n  | Backslash [@live]\n  | Forwardslash\n  | ForwardslashDot\n  | Asterisk\n  | AsteriskDot\n  | Exponentiation\n  | Minus\n  | MinusDot\n  | Plus\n  | PlusDot\n  | PlusPlus\n  | PlusEqual\n  | ColonGreaterThan\n  | GreaterThan\n  | LessThan\n  | LessThanSlash\n  | Hash\n  | HashEqual\n  | Assert\n  | Tilde\n  | Question\n  | If\n  | Else\n  | For\n  | In\n  | While\n  | Switch\n  | When\n  | EqualGreater\n  | MinusGreater\n  | External\n  | Typ\n  | Private\n  | Mutable\n  | Constraint\n  | Include\n  | Module\n  | Of\n  | Land\n  | Lor\n  | Band (* Bitwise and: & *)\n  | BangEqual\n  | BangEqualEqual\n  | LessEqual\n  | GreaterEqual\n  | ColonEqual\n  | At\n  | AtAt\n  | Percent\n  | PercentPercent\n  | Comment of Comment.t\n  | List\n  | TemplateTail of string * Lexing.position\n  | TemplatePart of string * Lexing.position\n  | Backtick\n  | BarGreater\n  | Try\n  | DocComment of Location.t * string\n  | ModuleComment of Location.t * string\n\nlet precedence = function\n  | HashEqual | ColonEqual -> 1\n  | Lor -> 2\n  | Land -> 3\n  | Equal | EqualEqual | EqualEqualEqual | LessThan | GreaterThan | BangEqual\n  | BangEqualEqual | LessEqual | GreaterEqual | BarGreater ->\n    4\n  | Plus | PlusDot | Minus | MinusDot | PlusPlus -> 5\n  | Asterisk | AsteriskDot | Forwardslash | ForwardslashDot -> 6\n  | Exponentiation -> 7\n  | MinusGreater -> 8\n  | Dot -> 9\n  | _ -> 0\n\nlet to_string = function\n  | Await -> \"await\"\n  | Open -> \"open\"\n  | True -> \"true\"\n  | False -> \"false\"\n  | Codepoint {original} -> \"codepoint '\" ^ original ^ \"'\"\n  | String s -> \"string \\\"\" ^ s ^ \"\\\"\"\n  | Lident str -> str\n  | Uident str -> str\n  | Dot -> \".\"\n  | DotDot -> \"..\"\n  | DotDotDot -> \"...\"\n  | Int {i} -> \"int \" ^ i\n  | Float {f} -> \"Float: \" ^ f\n  | Bang -> \"!\"\n  | Semicolon -> \";\"\n  | Let -> \"let\"\n  | And -> \"and\"\n  | Rec -> \"rec\"\n  | Underscore -> \"_\"\n  | SingleQuote -> \"'\"\n  | Equal -> \"=\"\n  | EqualEqual -> \"==\"\n  | EqualEqualEqual -> \"===\"\n  | Eof -> \"eof\"\n  | Bar -> \"|\"\n  | As -> \"as\"\n  | Lparen -> \"(\"\n  | Rparen -> \")\"\n  | Lbracket -> \"[\"\n  | Rbracket -> \"]\"\n  | Lbrace -> \"{\"\n  | Rbrace -> \"}\"\n  | ColonGreaterThan -> \":>\"\n  | Colon -> \":\"\n  | Comma -> \",\"\n  | Minus -> \"-\"\n  | MinusDot -> \"-.\"\n  | Plus -> \"+\"\n  | PlusDot -> \"+.\"\n  | PlusPlus -> \"++\"\n  | PlusEqual -> \"+=\"\n  | Backslash -> \"\\\\\"\n  | Forwardslash -> \"/\"\n  | ForwardslashDot -> \"/.\"\n  | Exception -> \"exception\"\n  | Hash -> \"#\"\n  | HashEqual -> \"#=\"\n  | GreaterThan -> \">\"\n  | LessThan -> \"<\"\n  | LessThanSlash -> \"</\"\n  | Asterisk -> \"*\"\n  | AsteriskDot -> \"*.\"\n  | Exponentiation -> \"**\"\n  | Assert -> \"assert\"\n  | Tilde -> \"tilde\"\n  | Question -> \"?\"\n  | If -> \"if\"\n  | Else -> \"else\"\n  | For -> \"for\"\n  | In -> \"in\"\n  | While -> \"while\"\n  | Switch -> \"switch\"\n  | When -> \"when\"\n  | EqualGreater -> \"=>\"\n  | MinusGreater -> \"->\"\n  | External -> \"external\"\n  | Typ -> \"type\"\n  | Private -> \"private\"\n  | Constraint -> \"constraint\"\n  | Mutable -> \"mutable\"\n  | Include -> \"include\"\n  | Module -> \"module\"\n  | Of -> \"of\"\n  | Lor -> \"||\"\n  | Band -> \"&\"\n  | Land -> \"&&\"\n  | BangEqual -> \"!=\"\n  | BangEqualEqual -> \"!==\"\n  | GreaterEqual -> \">=\"\n  | LessEqual -> \"<=\"\n  | ColonEqual -> \":=\"\n  | At -> \"@\"\n  | AtAt -> \"@@\"\n  | Percent -> \"%\"\n  | PercentPercent -> \"%%\"\n  | Comment c -> \"Comment\" ^ Comment.to_string c\n  | List -> \"list{\"\n  | TemplatePart (text, _) -> text ^ \"${\"\n  | TemplateTail (text, _) -> \"TemplateTail(\" ^ text ^ \")\"\n  | Backtick -> \"`\"\n  | BarGreater -> \"|>\"\n  | Try -> \"try\"\n  | DocComment (_loc, s) -> \"DocComment \" ^ s\n  | ModuleComment (_loc, s) -> \"ModuleComment \" ^ s\n\nlet keyword_table = function\n  | \"and\" -> And\n  | \"as\" -> As\n  | \"assert\" -> Assert\n  | \"await\" -> Await\n  | \"constraint\" -> Constraint\n  | \"else\" -> Else\n  | \"exception\" -> Exception\n  | \"external\" -> External\n  | \"false\" -> False\n  | \"for\" -> For\n  | \"if\" -> If\n  | \"in\" -> In\n  | \"include\" -> Include\n  | \"let\" -> Let\n  | \"list{\" -> List\n  | \"module\" -> Module\n  | \"mutable\" -> Mutable\n  | \"of\" -> Of\n  | \"open\" -> Open\n  | \"private\" -> Private\n  | \"rec\" -> Rec\n  | \"switch\" -> Switch\n  | \"true\" -> True\n  | \"try\" -> Try\n  | \"type\" -> Typ\n  | \"when\" -> When\n  | \"while\" -> While\n  | _ -> raise Not_found\n[@@raises Not_found]\n\nlet is_keyword = function\n  | Await | And | As | Assert | Constraint | Else | Exception | External | False\n  | For | If | In | Include | Land | Let | List | Lor | Module | Mutable | Of\n  | Open | Private | Rec | Switch | True | Try | Typ | When | While ->\n    true\n  | _ -> false\n\nlet lookup_keyword str =\n  try keyword_table str\n  with Not_found -> (\n    match str.[0] [@doesNotRaise] with\n    | 'A' .. 'Z' -> Uident str\n    | _ -> Lident str)\n\nlet is_keyword_txt str =\n  try\n    let _ = keyword_table str in\n    true\n  with Not_found -> false\n\nlet catch = Lident \"catch\"\n"
  },
  {
    "path": "analysis/vendor/res_syntax/res_uncurried.ml",
    "content": "(* For parsing *)\nlet from_dotted ~dotted = function\n  | Config.Legacy -> dotted\n  | Swap -> not dotted\n  | Uncurried -> true\n\n(* For printing *)\nlet get_dotted ~uncurried = function\n  | Config.Legacy -> uncurried\n  | Swap -> not uncurried\n  | Uncurried -> false\n"
  },
  {
    "path": "analysis/vendor/res_syntax/res_utf8.ml",
    "content": "(* https://tools.ietf.org/html/rfc3629#section-10 *)\n(* let bom = 0xFEFF *)\n\nlet repl = 0xFFFD\n\n(* let min = 0x0000 *)\nlet max = 0x10FFFF\n\nlet surrogate_min = 0xD800\nlet surrogate_max = 0xDFFF\n\n(*\n * Char. number range  |        UTF-8 octet sequence\n *       (hexadecimal)    |              (binary)\n *    --------------------+---------------------------------------------\n *    0000 0000-0000 007F | 0xxxxxxx\n *    0000 0080-0000 07FF | 110xxxxx 10xxxxxx\n *    0000 0800-0000 FFFF | 1110xxxx 10xxxxxx 10xxxxxx\n *    0001 0000-0010 FFFF | 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx\n *)\nlet h2 = 0b1100_0000\nlet h3 = 0b1110_0000\nlet h4 = 0b1111_0000\n\nlet cont_mask = 0b0011_1111\n\ntype category = {low: int; high: int; size: int}\n\nlet locb = 0b1000_0000\nlet hicb = 0b1011_1111\n\nlet category_table = [|\n  (* 0 *) {low = -1; high= -1; size= 1}; (* invalid *)\n  (* 1 *) {low = 1; high= -1; size= 1}; (* ascii *)\n  (* 2 *) {low = locb; high= hicb; size= 2};\n  (* 3 *) {low = 0xA0; high= hicb; size= 3};\n  (* 4 *) {low = locb; high= hicb; size= 3};\n  (* 5 *) {low = locb; high= 0x9F; size= 3};\n  (* 6 *) {low = 0x90; high= hicb;  size= 4};\n  (* 7 *) {low = locb; high= hicb; size= 4};\n  (* 8 *) {low = locb; high= 0x8F; size= 4};\n|] [@@ocamlformat \"disable\"]\n\nlet categories = [|\n  1; 1; 1; 1; 1; 1 ;1 ;1 ;1 ;1 ;1 ;1 ;1 ;1 ;1 ;1;\n  1; 1; 1; 1; 1; 1 ;1 ;1 ;1 ;1 ;1 ;1 ;1 ;1 ;1 ;1;\n  1; 1; 1; 1; 1; 1 ;1 ;1 ;1 ;1 ;1 ;1 ;1 ;1 ;1 ;1;\n  1; 1; 1; 1; 1; 1 ;1 ;1 ;1 ;1 ;1 ;1 ;1 ;1 ;1 ;1;\n  1; 1; 1; 1; 1; 1 ;1 ;1 ;1 ;1 ;1 ;1 ;1 ;1 ;1 ;1;\n  1; 1; 1; 1; 1; 1 ;1 ;1 ;1 ;1 ;1 ;1 ;1 ;1 ;1 ;1;\n  1; 1; 1; 1; 1; 1 ;1 ;1 ;1 ;1 ;1 ;1 ;1 ;1 ;1 ;1;\n  1; 1; 1; 1; 1; 1 ;1 ;1 ;1 ;1 ;1 ;1 ;1 ;1 ;1 ;1;\n\n  0; 0; 0; 0;0; 0; 0; 0;0; 0; 0; 0;0; 0; 0; 0;\n  0; 0; 0; 0;0; 0; 0; 0;0; 0; 0; 0;0; 0; 0; 0;\n  0; 0; 0; 0;0; 0; 0; 0;0; 0; 0; 0;0; 0; 0; 0;\n  0; 0; 0; 0;0; 0; 0; 0;0; 0; 0; 0;0; 0; 0; 0;\n  (* surrogate range U+D800 - U+DFFFF = 55296 - 917503 *)\n  0; 0; 2; 2;2; 2; 2; 2;2; 2; 2; 2;2; 2; 2; 2;\n   2; 2; 2; 2; 2; 2; 2; 2; 2; 2; 2; 2; 2; 2; 2; 2;\n   3; 4; 4; 4; 4; 4; 4; 4; 4; 4; 4; 4; 4; 5; 4; 4;\n  6; 7; 7 ;7; 8; 0; 0; 0; 0; 0; 0; 0; 0; 0; 0; 0;\n|] [@@ocamlformat \"disable\"]\n\nlet decode_code_point i s len =\n  if len < 1 then (repl, 1)\n  else\n    let first = int_of_char (String.unsafe_get s i) in\n    if first < 128 then (first, 1)\n    else\n      let index = Array.unsafe_get categories first in\n      if index = 0 then (repl, 1)\n      else\n        let cat = Array.unsafe_get category_table index in\n        if len < i + cat.size then (repl, 1)\n        else if cat.size == 2 then\n          let c1 = int_of_char (String.unsafe_get s (i + 1)) in\n          if c1 < cat.low || cat.high < c1 then (repl, 1)\n          else\n            let i1 = c1 land 0b00111111 in\n            let i0 = (first land 0b00011111) lsl 6 in\n            let uc = i0 lor i1 in\n            (uc, 2)\n        else if cat.size == 3 then\n          let c1 = int_of_char (String.unsafe_get s (i + 1)) in\n          let c2 = int_of_char (String.unsafe_get s (i + 2)) in\n          if c1 < cat.low || cat.high < c1 || c2 < locb || hicb < c2 then\n            (repl, 1)\n          else\n            let i0 = (first land 0b00001111) lsl 12 in\n            let i1 = (c1 land 0b00111111) lsl 6 in\n            let i2 = c2 land 0b00111111 in\n            let uc = i0 lor i1 lor i2 in\n            (uc, 3)\n        else\n          let c1 = int_of_char (String.unsafe_get s (i + 1)) in\n          let c2 = int_of_char (String.unsafe_get s (i + 2)) in\n          let c3 = int_of_char (String.unsafe_get s (i + 3)) in\n          if\n            c1 < cat.low || cat.high < c1 || c2 < locb || hicb < c2 || c3 < locb\n            || hicb < c3\n          then (repl, 1)\n          else\n            let i1 = (c1 land 0x3f) lsl 12 in\n            let i2 = (c2 land 0x3f) lsl 6 in\n            let i3 = c3 land 0x3f in\n            let i0 = (first land 0x07) lsl 18 in\n            let uc = i0 lor i3 lor i2 lor i1 in\n            (uc, 4)\n\nlet encode_code_point c =\n  if c <= 127 then (\n    let bytes = (Bytes.create [@doesNotRaise]) 1 in\n    Bytes.unsafe_set bytes 0 (Char.unsafe_chr c);\n    Bytes.unsafe_to_string bytes)\n  else if c <= 2047 then (\n    let bytes = (Bytes.create [@doesNotRaise]) 2 in\n    Bytes.unsafe_set bytes 0 (Char.unsafe_chr (h2 lor (c lsr 6)));\n    Bytes.unsafe_set bytes 1\n      (Char.unsafe_chr (0b1000_0000 lor (c land cont_mask)));\n    Bytes.unsafe_to_string bytes)\n  else if c <= 65535 then (\n    let bytes = (Bytes.create [@doesNotRaise]) 3 in\n    Bytes.unsafe_set bytes 0 (Char.unsafe_chr (h3 lor (c lsr 12)));\n    Bytes.unsafe_set bytes 1\n      (Char.unsafe_chr (0b1000_0000 lor ((c lsr 6) land cont_mask)));\n    Bytes.unsafe_set bytes 2\n      (Char.unsafe_chr (0b1000_0000 lor (c land cont_mask)));\n    Bytes.unsafe_to_string bytes)\n  else\n    (* if c <= max then *)\n    let bytes = (Bytes.create [@doesNotRaise]) 4 in\n    Bytes.unsafe_set bytes 0 (Char.unsafe_chr (h4 lor (c lsr 18)));\n    Bytes.unsafe_set bytes 1\n      (Char.unsafe_chr (0b1000_0000 lor ((c lsr 12) land cont_mask)));\n    Bytes.unsafe_set bytes 2\n      (Char.unsafe_chr (0b1000_0000 lor ((c lsr 6) land cont_mask)));\n    Bytes.unsafe_set bytes 3\n      (Char.unsafe_chr (0b1000_0000 lor (c land cont_mask)));\n    Bytes.unsafe_to_string bytes\n\nlet is_valid_code_point c =\n  (0 <= c && c < surrogate_min) || (surrogate_max < c && c <= max)\n"
  },
  {
    "path": "analysis/vendor/res_syntax/res_utf8.mli",
    "content": "val repl : int\n\nval max : int\n\nval decode_code_point : int -> string -> int -> int * int\n\nval encode_code_point : int -> string\n\nval is_valid_code_point : int -> bool\n"
  },
  {
    "path": "analysis.opam",
    "content": "# This file is generated by dune, edit dune-project instead\nopam-version: \"2.0\"\nsynopsis: \"ReScript Analysis\"\nmaintainer: [\"ReScript Team\"]\nauthors: [\"ReScript Team\"]\nhomepage: \"https://github.com/rescript-lang/rescript-vscode\"\nbug-reports: \"https://github.com/rescript-lang/rescript-vscode/issues\"\ndepends: [\n  \"ocaml\" {>= \"4.10\"}\n  \"cppo\" {= \"1.6.9\"}\n  \"dune\"\n]\nbuild: [\n  [\"dune\" \"subst\"] {pinned}\n  [\n    \"dune\"\n    \"build\"\n    \"-p\"\n    name\n    \"-j\"\n    jobs\n    \"@install\"\n    \"@runtest\" {with-test}\n    \"@doc\" {with-doc}\n  ]\n]\n"
  },
  {
    "path": "client/.vscode-test.mjs",
    "content": "import { defineConfig } from \"@vscode/test-cli\";\nimport * as path from \"path\";\n\nexport default defineConfig([\n  {\n    label: \"example-project\",\n    files: \"out/client/src/test/suite/exampleProject.test.js\",\n    version: \"stable\",\n    extensionDevelopmentPath: path.resolve(import.meta.dirname, \"..\"),\n    workspaceFolder: path.resolve(\n      import.meta.dirname,\n      \"../analysis/examples/example-project\",\n    ),\n    mocha: {\n      ui: \"tdd\",\n      timeout: 60000,\n    },\n    launchArgs: [\"--disable-extensions\"],\n  },\n  {\n    label: \"monorepo-root\",\n    files: \"out/client/src/test/suite/monorepoRoot.test.js\",\n    version: \"stable\",\n    extensionDevelopmentPath: path.resolve(import.meta.dirname, \"..\"),\n    workspaceFolder: path.resolve(\n      import.meta.dirname,\n      \"../analysis/examples/monorepo-project\",\n    ),\n    mocha: {\n      ui: \"tdd\",\n      timeout: 60000,\n    },\n    launchArgs: [\"--disable-extensions\"],\n  },\n  {\n    label: \"monorepo-subpackage\",\n    files: \"out/client/src/test/suite/monorepoSubpackage.test.js\",\n    version: \"stable\",\n    extensionDevelopmentPath: path.resolve(import.meta.dirname, \"..\"),\n    workspaceFolder: path.resolve(\n      import.meta.dirname,\n      \"../analysis/examples/monorepo-project/packages/app\",\n    ),\n    mocha: {\n      ui: \"tdd\",\n      timeout: 60000,\n    },\n    launchArgs: [\"--disable-extensions\"],\n  },\n  {\n    label: \"rescript9-project\",\n    files: \"out/client/src/test/suite/rescript9.test.js\",\n    version: \"stable\",\n    extensionDevelopmentPath: path.resolve(import.meta.dirname, \"..\"),\n    workspaceFolder: path.resolve(\n      import.meta.dirname,\n      \"../analysis/examples/rescript9-project\",\n    ),\n    mocha: {\n      ui: \"tdd\",\n      timeout: 60000,\n    },\n    launchArgs: [\"--disable-extensions\"],\n  },\n]);\n"
  },
  {
    "path": "client/package.json",
    "content": "{\n  \"name\": \"rescript-vscode-client\",\n  \"private\": true,\n  \"version\": \"0.0.0\",\n  \"description\": \"ReScript's language-client\",\n  \"keywords\": [],\n  \"author\": \"ReScript Team\",\n  \"license\": \"MIT\",\n  \"scripts\": {\n    \"test\": \"vscode-test\"\n  },\n  \"dependencies\": {\n    \"vscode-languageclient\": \"8.1.0-next.5\"\n  },\n  \"devDependencies\": {\n    \"@types/mocha\": \"^10.0.10\",\n    \"@vscode/test-cli\": \"^0.0.12\",\n    \"@vscode/test-electron\": \"^2.5.2\"\n  }\n}\n"
  },
  {
    "path": "client/src/commands/code_analysis.ts",
    "content": "import * as cp from \"child_process\";\nimport * as fs from \"fs\";\nimport * as path from \"path\";\nimport * as semver from \"semver\";\nimport {\n  window,\n  DiagnosticCollection,\n  Diagnostic,\n  Range,\n  Position,\n  DiagnosticSeverity,\n  Uri,\n  CodeAction,\n  CodeActionKind,\n  WorkspaceEdit,\n  OutputChannel,\n  StatusBarItem,\n} from \"vscode\";\nimport { NormalizedPath, normalizePath } from \"../utils\";\nimport {\n  findBinary,\n  getMonorepoRootFromBinaryPath,\n} from \"../../../shared/src/findBinary\";\nimport { findProjectRootOfFile } from \"../../../shared/src/projectRoots\";\n\n// Reanalyze server constants (matches rescript monorepo)\nconst REANALYZE_SOCKET_FILENAME = \".rescript-reanalyze.sock\";\nconst REANALYZE_SERVER_MIN_VERSION = \"12.1.0\";\n\n// Server state per monorepo root\nexport interface ReanalyzeServerState {\n  process: cp.ChildProcess | null;\n  monorepoRoot: string;\n  socketPath: string;\n  startedByUs: boolean;\n  outputChannel: OutputChannel | null;\n}\n\n// Map from monorepo root to server state\nexport const reanalyzeServers: Map<string, ReanalyzeServerState> = new Map();\n\n// Check if ReScript version supports reanalyze-server\nconst supportsReanalyzeServer = async (\n  monorepoRootPath: string | null,\n): Promise<boolean> => {\n  if (monorepoRootPath === null) return false;\n\n  try {\n    const rescriptDir = path.join(monorepoRootPath, \"node_modules\", \"rescript\");\n    const packageJsonPath = path.join(rescriptDir, \"package.json\");\n    const packageJson = JSON.parse(\n      await fs.promises.readFile(packageJsonPath, \"utf-8\"),\n    );\n    const version = packageJson.version;\n\n    return (\n      semver.valid(version) != null &&\n      semver.gte(version, REANALYZE_SERVER_MIN_VERSION)\n    );\n  } catch {\n    return false;\n  }\n};\n\n// Get socket path for a monorepo root\nconst getSocketPath = (monorepoRoot: string): string => {\n  return path.join(monorepoRoot, REANALYZE_SOCKET_FILENAME);\n};\n\n// Check if server is running (socket file exists)\nconst isServerRunning = (monorepoRoot: string): boolean => {\n  const socketPath = getSocketPath(monorepoRoot);\n  return fs.existsSync(socketPath);\n};\n\n// Start reanalyze server for a monorepo.\n// Note: This should only be called after supportsReanalyzeServer() returns true,\n// which ensures ReScript >= 12.1.0 where the reanalyze-server subcommand exists.\nexport const startReanalyzeServer = async (\n  monorepoRoot: string,\n  binaryPath: string,\n  clientOutputChannel?: OutputChannel,\n): Promise<ReanalyzeServerState | null> => {\n  // Check if already running (either by us or externally)\n  if (isServerRunning(monorepoRoot)) {\n    // Check if we have a record of starting it\n    const existing = reanalyzeServers.get(monorepoRoot);\n    if (existing) {\n      existing.outputChannel?.appendLine(\n        \"[info] Server already running (started by us)\",\n      );\n      return existing;\n    }\n    // Server running but not started by us - just record it\n    clientOutputChannel?.appendLine(\n      `[info] Found existing reanalyze-server for ${path.basename(monorepoRoot)} (not started by extension)`,\n    );\n    const state: ReanalyzeServerState = {\n      process: null,\n      monorepoRoot,\n      socketPath: getSocketPath(monorepoRoot),\n      startedByUs: false,\n      outputChannel: null,\n    };\n    reanalyzeServers.set(monorepoRoot, state);\n    return state;\n  }\n\n  // Create output channel for server logs\n  const outputChannel = window.createOutputChannel(\n    `ReScript Reanalyze Server (${path.basename(monorepoRoot)})`,\n  );\n\n  outputChannel.appendLine(\n    `[info] Starting reanalyze-server in ${monorepoRoot}`,\n  );\n\n  // Start the server\n  const serverProcess = cp.spawn(binaryPath, [\"reanalyze-server\"], {\n    cwd: monorepoRoot,\n    stdio: [\"ignore\", \"pipe\", \"pipe\"],\n  });\n\n  if (serverProcess.pid == null) {\n    outputChannel.appendLine(\"[error] Failed to start reanalyze-server\");\n    return null;\n  }\n\n  const state: ReanalyzeServerState = {\n    process: serverProcess,\n    monorepoRoot,\n    socketPath: getSocketPath(monorepoRoot),\n    startedByUs: true,\n    outputChannel,\n  };\n\n  // Log stdout and stderr to output channel\n  serverProcess.stdout?.on(\"data\", (data) => {\n    outputChannel.appendLine(`[stdout] ${data.toString().trim()}`);\n  });\n\n  serverProcess.stderr?.on(\"data\", (data) => {\n    outputChannel.appendLine(`[stderr] ${data.toString().trim()}`);\n  });\n\n  serverProcess.on(\"error\", (err) => {\n    outputChannel.appendLine(`[error] Server error: ${err.message}`);\n  });\n\n  serverProcess.on(\"exit\", (code, signal) => {\n    outputChannel.appendLine(\n      `[info] Server exited with code ${code}, signal ${signal}`,\n    );\n    reanalyzeServers.delete(monorepoRoot);\n  });\n\n  reanalyzeServers.set(monorepoRoot, state);\n\n  // Wait briefly for socket file to be created (up to 3 seconds)\n  for (let i = 0; i < 30; i++) {\n    if (isServerRunning(monorepoRoot)) {\n      outputChannel.appendLine(`[info] Server socket ready`);\n      return state;\n    }\n    await new Promise((resolve) => setTimeout(resolve, 100));\n  }\n\n  outputChannel.appendLine(\n    \"[warn] Server started but socket not found after 3 seconds\",\n  );\n  return state;\n};\n\n// Clean up socket file if it exists\nconst cleanupSocketFile = (socketPath: string): void => {\n  try {\n    if (fs.existsSync(socketPath)) {\n      fs.unlinkSync(socketPath);\n    }\n  } catch {\n    // Ignore errors during cleanup\n  }\n};\n\n// Stop reanalyze server for a monorepo (only if we started it)\nexport const stopReanalyzeServer = (\n  monorepoRoot: string | null,\n  clientOutputChannel?: OutputChannel,\n): void => {\n  if (monorepoRoot == null) return;\n\n  const state = reanalyzeServers.get(monorepoRoot);\n  if (!state) return;\n\n  // Only kill the process if we started it\n  if (state.startedByUs && state.process != null) {\n    state.process.kill();\n    state.outputChannel?.appendLine(\"[info] Server stopped by extension\");\n    // Clean up socket file to prevent stale socket issues\n    cleanupSocketFile(state.socketPath);\n  } else if (!state.startedByUs) {\n    clientOutputChannel?.appendLine(\n      `[info] Leaving external reanalyze-server running for ${path.basename(monorepoRoot)}`,\n    );\n  }\n\n  reanalyzeServers.delete(monorepoRoot);\n};\n\n// Stop all servers we started\nexport const stopAllReanalyzeServers = (): void => {\n  for (const [_monorepoRoot, state] of reanalyzeServers) {\n    if (state.startedByUs && state.process != null) {\n      state.process.kill();\n      state.outputChannel?.appendLine(\"[info] Server stopped by extension\");\n      // Clean up socket file to prevent stale socket issues\n      cleanupSocketFile(state.socketPath);\n    }\n  }\n  reanalyzeServers.clear();\n};\n\n// Show server log for a monorepo\n// Returns true if the output channel was shown, false otherwise\n// This is an async function because it may need to find the binary to derive monorepo root\nexport const showReanalyzeServerLog = async (\n  monorepoRoot: string | null,\n): Promise<boolean> => {\n  if (monorepoRoot == null) {\n    // Try to find any running server\n    const firstServer = reanalyzeServers.values().next().value;\n    if (firstServer?.outputChannel) {\n      firstServer.outputChannel.show();\n      return true;\n    } else {\n      window.showInformationMessage(\n        \"No reanalyze server is currently running.\",\n      );\n      return false;\n    }\n  }\n\n  // First try direct lookup\n  let state = reanalyzeServers.get(monorepoRoot);\n  if (state?.outputChannel) {\n    state.outputChannel.show();\n    return true;\n  }\n\n  // If not found, try to derive monorepo root from binary path\n  // (the server is registered under monorepo root, not subpackage root)\n  const binaryPath = await findBinary({\n    projectRootPath: monorepoRoot,\n    binary: \"rescript-tools.exe\",\n  });\n  if (binaryPath != null) {\n    const derivedMonorepoRoot = getMonorepoRootFromBinaryPath(binaryPath);\n    if (derivedMonorepoRoot != null && derivedMonorepoRoot !== monorepoRoot) {\n      state = reanalyzeServers.get(derivedMonorepoRoot);\n      if (state?.outputChannel) {\n        state.outputChannel.show();\n        return true;\n      }\n    }\n  }\n\n  window.showInformationMessage(\n    `No reanalyze server log available for ${path.basename(monorepoRoot)}`,\n  );\n  return false;\n};\n\nexport let statusBarItem = {\n  setToStopText: (codeAnalysisRunningStatusBarItem: StatusBarItem) => {\n    codeAnalysisRunningStatusBarItem.text = \"$(debug-stop) Stop Code Analyzer\";\n    codeAnalysisRunningStatusBarItem.tooltip = null;\n  },\n  setToRunningText: (codeAnalysisRunningStatusBarItem: StatusBarItem) => {\n    codeAnalysisRunningStatusBarItem.text =\n      \"$(loading~spin) Running code analysis...\";\n    codeAnalysisRunningStatusBarItem.tooltip = null;\n  },\n  setToFailed: (codeAnalysisRunningStatusBarItem: StatusBarItem) => {\n    codeAnalysisRunningStatusBarItem.text = \"$(alert) Failed\";\n    codeAnalysisRunningStatusBarItem.tooltip =\n      \"Something went wrong when running the code analysis.\";\n  },\n};\n\nexport type DiagnosticsResultCodeActionsMap = Map<\n  string,\n  { range: Range; codeAction: CodeAction }[]\n>;\n\nexport type DiagnosticsResultFormat = Array<{\n  name: string;\n  kind: string;\n  file: string;\n  range: [number, number, number, number];\n  message: string;\n  annotate?: {\n    line: number;\n    character: number;\n    text: string;\n    action: string;\n  };\n}>;\n\nenum ClassifiedMessage {\n  Removable,\n  Default,\n}\n\nlet classifyMessage = (msg: string) => {\n  if (\n    msg.endsWith(\" is never used\") ||\n    msg.endsWith(\" is never used and could have side effects\") ||\n    msg.endsWith(\" has no side effects and can be removed\")\n  ) {\n    return ClassifiedMessage.Removable;\n  }\n\n  return ClassifiedMessage.Default;\n};\n\nlet resultsToDiagnostics = (\n  results: DiagnosticsResultFormat,\n  diagnosticsResultCodeActions: DiagnosticsResultCodeActionsMap,\n): {\n  diagnosticsMap: Map<string, Diagnostic[]>;\n} => {\n  let diagnosticsMap: Map<string, Diagnostic[]> = new Map();\n\n  results.forEach((item) => {\n    {\n      let startPos: Position, endPos: Position;\n      let [startLine, startCharacter, endLine, endCharacter] = item.range;\n\n      // Detect if this diagnostic is for the entire file. If so, reanalyze will\n      // say that the issue is on line -1. This code below ensures\n      // that the full file is highlighted, if that's the case.\n      if (startLine < 0 || endLine < 0) {\n        startPos = new Position(0, 0);\n        endPos = new Position(99999, 0);\n      } else {\n        startPos = new Position(startLine, startCharacter);\n        endPos = new Position(endLine, endCharacter);\n      }\n\n      let issueLocationRange = new Range(startPos, endPos);\n      let diagnosticText = item.message.trim();\n\n      let diagnostic = new Diagnostic(\n        issueLocationRange,\n        diagnosticText,\n        DiagnosticSeverity.Warning,\n      );\n\n      // Don't show reports about optional arguments.\n      if (item.name.toLowerCase().includes(\"unused argument\")) {\n        return;\n      }\n\n      if (diagnosticsMap.has(item.file)) {\n        diagnosticsMap.get(item.file).push(diagnostic);\n      } else {\n        diagnosticsMap.set(item.file, [diagnostic]);\n      }\n\n      // If reanalyze suggests a fix, we'll set that up as a refactor code\n      // action in VSCode. This way, it'll be easy to suppress the issue\n      // reported if wanted. We also save the range of the issue, so we can\n      // leverage that to make looking up the code actions for each cursor\n      // position very cheap.\n      if (item.annotate != null) {\n        {\n          let { line, character, text, action } = item.annotate;\n          let codeAction = new CodeAction(action);\n          codeAction.kind = CodeActionKind.RefactorRewrite;\n\n          let codeActionEdit = new WorkspaceEdit();\n\n          codeActionEdit.replace(\n            Uri.parse(item.file),\n            // Make sure the full line is replaced\n\n            new Range(\n              new Position(line, character),\n              new Position(line, character),\n            ),\n            // reanalyze seems to add two extra spaces at the start of the line\n            // content to replace.\n            text,\n          );\n\n          codeAction.edit = codeActionEdit;\n\n          if (diagnosticsResultCodeActions.has(item.file)) {\n            diagnosticsResultCodeActions\n              .get(item.file)\n              .push({ range: issueLocationRange, codeAction });\n          } else {\n            diagnosticsResultCodeActions.set(item.file, [\n              { range: issueLocationRange, codeAction },\n            ]);\n          }\n        }\n      }\n\n      // This heuristic below helps only target dead code that can be removed\n      // safely by just removing its text.\n      if (classifyMessage(item.message) === ClassifiedMessage.Removable) {\n        {\n          let codeAction = new CodeAction(\"Remove unused\");\n          codeAction.kind = CodeActionKind.RefactorRewrite;\n\n          let codeActionEdit = new WorkspaceEdit();\n\n          codeActionEdit.replace(\n            Uri.parse(item.file),\n            new Range(\n              new Position(item.range[0], item.range[1]),\n              new Position(item.range[2], item.range[3]),\n            ),\n            \"\",\n          );\n\n          codeAction.command = {\n            command: \"rescript-vscode.clear_diagnostic\",\n            title: \"Clear diagnostic\",\n            arguments: [diagnostic],\n          };\n\n          codeAction.edit = codeActionEdit;\n\n          if (diagnosticsResultCodeActions.has(item.file)) {\n            diagnosticsResultCodeActions\n              .get(item.file)\n              .push({ range: issueLocationRange, codeAction });\n          } else {\n            diagnosticsResultCodeActions.set(item.file, [\n              { range: issueLocationRange, codeAction },\n            ]);\n          }\n        }\n      }\n    }\n  });\n\n  return {\n    diagnosticsMap,\n  };\n};\n\n// Returns the monorepo root path if a reanalyze server was started, null otherwise.\n// This allows the caller to track which server to stop later.\nexport const runCodeAnalysisWithReanalyze = async (\n  diagnosticsCollection: DiagnosticCollection,\n  diagnosticsResultCodeActions: DiagnosticsResultCodeActionsMap,\n  outputChannel: OutputChannel,\n  codeAnalysisRunningStatusBarItem: StatusBarItem,\n): Promise<string | null> => {\n  let currentDocument = window.activeTextEditor?.document;\n  if (!currentDocument) {\n    window.showErrorMessage(\"No active document found.\");\n    return null;\n  }\n\n  let projectRootPath: NormalizedPath | null = normalizePath(\n    findProjectRootOfFile(currentDocument.uri.fsPath),\n  );\n\n  // findBinary walks up the directory tree to find node_modules/rescript,\n  // so it works correctly for monorepos (finds the workspace root's binary)\n  // Note: rescript-tools.exe (with reanalyze command) is only available in ReScript 12+\n  const binaryPath: string | null = await findBinary({\n    projectRootPath,\n    binary: \"rescript-tools.exe\",\n  });\n\n  if (binaryPath === null) {\n    outputChannel.appendLine(\n      `[error] rescript-tools.exe not found for project root: ${projectRootPath}. Code analysis requires ReScript 12 or later.`,\n    );\n    window.showErrorMessage(\n      \"Code analysis requires ReScript 12 or later (rescript-tools.exe not found).\",\n    );\n    return null;\n  }\n\n  // Derive monorepo root from binary path - the directory containing node_modules\n  // This handles monorepos correctly since findBinary walks up to find the binary\n  const monorepoRootPath: NormalizedPath | null = normalizePath(\n    getMonorepoRootFromBinaryPath(binaryPath),\n  );\n\n  if (monorepoRootPath === null) {\n    outputChannel.appendLine(\n      `[error] Could not determine workspace root from binary path: ${binaryPath}`,\n    );\n    window.showErrorMessage(\"Could not determine workspace root.\");\n    return null;\n  }\n\n  // Check if we should use reanalyze-server (ReScript >= 12.1.0)\n  const useServer = await supportsReanalyzeServer(monorepoRootPath);\n\n  if (useServer && monorepoRootPath) {\n    // Ensure server is running from workspace root\n    const serverState = await startReanalyzeServer(\n      monorepoRootPath,\n      binaryPath,\n      outputChannel,\n    );\n    if (serverState) {\n      outputChannel.appendLine(\n        `[info] Using reanalyze-server for ${path.basename(monorepoRootPath)}`,\n      );\n    }\n  }\n\n  statusBarItem.setToRunningText(codeAnalysisRunningStatusBarItem);\n\n  let opts = [\"reanalyze\", \"-json\"];\n  let p = cp.spawn(binaryPath, opts, { cwd: monorepoRootPath });\n\n  if (p.stdout == null) {\n    outputChannel.appendLine(\n      `[error] Failed to spawn reanalyze process: stdout is null. Binary: ${binaryPath}, cwd: ${monorepoRootPath}`,\n    );\n    statusBarItem.setToFailed(codeAnalysisRunningStatusBarItem);\n    window.showErrorMessage(\"Failed to start code analysis process.\");\n    return null;\n  }\n\n  let data = \"\";\n\n  p.stdout.on(\"data\", (d) => {\n    data += d;\n  });\n\n  p.stderr?.on(\"data\", (e) => {\n    // Sometimes the compiler artifacts has been corrupted in some way, and\n    // reanalyze will spit out a \"End_of_file\" exception. The solution is to\n    // clean and rebuild the ReScript project, which we can tell the user about\n    // here.\n    if (e.includes(\"End_of_file\")) {\n      window.showErrorMessage(\n        `Something went wrong trying to run reanalyze. Please try cleaning and rebuilding your ReScript project.`,\n      );\n    } else {\n      window.showErrorMessage(\n        `Something went wrong trying to run reanalyze: '${e}'`,\n      );\n    }\n  });\n\n  p.on(\"close\", () => {\n    diagnosticsResultCodeActions.clear();\n\n    let json: DiagnosticsResultFormat | null = null;\n\n    try {\n      json = JSON.parse(data);\n    } catch (e) {\n      window\n        .showErrorMessage(\n          `Something went wrong when running the code analyzer.`,\n          \"See details in error log\",\n        )\n        .then((_choice) => {\n          outputChannel.show();\n        });\n\n      outputChannel.appendLine(\"\\n\\n>>>>\");\n      outputChannel.appendLine(\n        \"Parsing JSON from reanalyze failed. The raw, invalid JSON can be reproduced by following the instructions below. Please run that command and report the issue + failing JSON on the extension bug tracker: https://github.com/rescript-lang/rescript-vscode/issues\",\n      );\n      outputChannel.appendLine(\n        `> To reproduce, run \"${binaryPath} ${opts.join(\n          \" \",\n        )}\" in directory: \"${monorepoRootPath}\"`,\n      );\n      outputChannel.appendLine(\"\\n\");\n    }\n\n    if (json == null) {\n      // If reanalyze failed for some reason we'll clear the diagnostics.\n      diagnosticsCollection.clear();\n      statusBarItem.setToFailed(codeAnalysisRunningStatusBarItem);\n      return;\n    }\n\n    let { diagnosticsMap } = resultsToDiagnostics(\n      json,\n      diagnosticsResultCodeActions,\n    );\n\n    // This smoothens the experience of the diagnostics updating a bit by\n    // clearing only the visible diagnostics that has been fixed after the\n    // updated diagnostics has been applied.\n    diagnosticsCollection.forEach((uri, _) => {\n      if (!diagnosticsMap.has(uri.fsPath)) {\n        diagnosticsCollection.delete(uri);\n      }\n    });\n\n    diagnosticsMap.forEach((diagnostics, filePath) => {\n      diagnosticsCollection.set(Uri.parse(filePath), diagnostics);\n    });\n\n    statusBarItem.setToStopText(codeAnalysisRunningStatusBarItem);\n  });\n\n  // Return the monorepo root so the caller can track which server to stop\n  return monorepoRootPath;\n};\n"
  },
  {
    "path": "client/src/commands/create_interface.ts",
    "content": "import * as fs from \"fs\";\nimport * as p from \"vscode-languageserver-protocol\";\nimport { LanguageClient, RequestType } from \"vscode-languageclient/node\";\nimport { window } from \"vscode\";\n\nexport const createInterfaceRequest = new RequestType<\n  p.TextDocumentIdentifier,\n  p.TextDocumentIdentifier,\n  void\n>(\"textDocument/createInterface\");\n\nexport const createInterface = (client: LanguageClient) => {\n  if (!client) {\n    return window.showInformationMessage(\"Language server not running\");\n  }\n\n  const editor = window.activeTextEditor;\n\n  if (!editor) {\n    return window.showInformationMessage(\"No active editor\");\n  }\n\n  if (fs.existsSync(editor.document.uri.fsPath + \"i\")) {\n    return window\n      .showInformationMessage(\n        \"Interface file already exists. Do you want to overwrite it?\",\n        \"Yes\",\n        \"No\",\n      )\n      .then((answer) => {\n        if (answer === \"Yes\") {\n          client.sendRequest(createInterfaceRequest, {\n            uri: editor.document.uri.toString(),\n          });\n        }\n      });\n  }\n\n  client.sendRequest(createInterfaceRequest, {\n    uri: editor.document.uri.toString(),\n  });\n};\n"
  },
  {
    "path": "client/src/commands/dump_debug.ts",
    "content": "import * as cp from \"child_process\";\nimport * as fs from \"fs\";\nimport {\n  ExtensionContext,\n  StatusBarItem,\n  Uri,\n  ViewColumn,\n  window,\n} from \"vscode\";\nimport {\n  createFileInTempDir,\n  findProjectRootOfFileInDir,\n  getBinaryPath,\n  NormalizedPath,\n} from \"../utils\";\nimport * as path from \"path\";\n\n// Maps to Cli.ml\nconst debugCommands = [\n  { command: \"dumpAst\" as const, title: \"Dump the AST\" },\n  { command: \"completion\" as const, title: \"Completion\" },\n  { command: \"definition\" as const, title: \"Definition\" },\n  { command: \"typeDefinition\" as const, title: \"Type Definition\" },\n  { command: \"documentSymbol\" as const, title: \"Document Symbol\" },\n  { command: \"hover\" as const, title: \"Hover\" },\n  { command: \"signatureHelp\" as const, title: \"Signature Help\" },\n  { command: \"inlayHint\" as const, title: \"Inlay Hint\" },\n  { command: \"codeLens\" as const, title: \"Code Lens\" },\n  { command: \"extractDocs\" as const, title: \"Extract Docs\" },\n  { command: \"codeAction\" as const, title: \"Code Action\" },\n  { command: \"codemod\" as const, title: \"Code Mod\" },\n  { command: \"diagnosticSyntax\" as const, title: \"Diagnostic Syntax\" },\n  { command: \"references\" as const, title: \"References\" },\n  { command: \"rename\" as const, title: \"Rename\" },\n  { command: \"semanticTokens\" as const, title: \"Semantic Tokens\" },\n  { command: \"createInterface\" as const, title: \"Create Interface\" },\n  { command: \"format\" as const, title: \"Format\" },\n];\n\nconst logLevels = [\"verbose\" as const, \"regular\" as const, \"off\" as const];\n\nfunction runDebugDump({\n  binaryPath,\n  cwd,\n  cliOptions,\n}: {\n  binaryPath: string;\n  cwd: string;\n  cliOptions: string[];\n}): Promise<string | null> {\n  return new Promise((resolve) => {\n    let opts = [...cliOptions];\n    window.showInformationMessage(JSON.stringify(opts));\n    let p = cp.spawn(binaryPath, opts, {\n      cwd,\n    });\n\n    if (p.stdout == null) {\n      window.showErrorMessage(\"Something went wrong.\");\n      resolve(null);\n      return;\n    }\n\n    let data = \"\";\n\n    p.stdout.on(\"data\", (d) => {\n      data += d;\n    });\n\n    p.stderr?.on(\"data\", (e) => {\n      window.showErrorMessage(\n        `Something went wrong trying to run debug dump: '${e}'`,\n      );\n      resolve(e.toString());\n    });\n\n    p.on(\"close\", () => {\n      resolve(data);\n    });\n  });\n}\n\nfunction runBsc({\n  cwd,\n  cliOptions,\n}: {\n  cwd: string;\n  cliOptions: string[];\n}): Promise<string | null> {\n  return new Promise((resolve) => {\n    let opts = [\"bsc\", ...cliOptions];\n    let p = cp.spawn(\"npx\", opts, {\n      cwd,\n    });\n\n    if (p.stdout == null) {\n      window.showErrorMessage(\"Something went wrong.\");\n      resolve(null);\n      return;\n    }\n\n    let data = \"\";\n\n    p.stdout.on(\"data\", (d) => {\n      data += d;\n    });\n\n    p.stderr?.on(\"data\", (e) => {\n      data += e.toString();\n    });\n\n    p.on(\"close\", () => {\n      resolve(data);\n    });\n  });\n}\n\nlet rerunCommand: null | (() => Promise<void>) = null;\n\nexport const dumpDebugRetrigger = () => {\n  if (rerunCommand != null) {\n    rerunCommand();\n  }\n};\n\nexport const dumpDebug = async (\n  context: ExtensionContext,\n  statusBarItem: StatusBarItem,\n) => {\n  const editor = window.activeTextEditor;\n\n  if (!editor) {\n    return window.showInformationMessage(\"No active editor\");\n  }\n\n  const { line, character } = editor.selection.active;\n  const { line: endLine, character: endChar } = editor.selection.end;\n  const filePath = editor.document.uri.fsPath;\n\n  let projectRootPath: NormalizedPath | null =\n    findProjectRootOfFileInDir(filePath);\n  const binaryPath = getBinaryPath(\n    \"rescript-editor-analysis.exe\",\n    projectRootPath,\n  );\n  if (binaryPath === null) {\n    window.showErrorMessage(\"Binary executable not found.\");\n    return;\n  }\n\n  const callTypeTitle = await window.showQuickPick(\n    debugCommands.map((d) => d.title),\n    {\n      title: \"Select call type\",\n    },\n  );\n  const callType = debugCommands.find((d) => d.title === callTypeTitle);\n\n  if (callType == null) {\n    window.showErrorMessage(`Debug call type not found: \"${callTypeTitle}\"`);\n    return null;\n  }\n\n  let logLevel = \"verbose\";\n\n  if (![\"dumpAst\"].includes(callType.command)) {\n    logLevel = await window.showQuickPick(logLevels, {\n      title: \"Select log level\",\n    });\n  }\n\n  const outputFile = createFileInTempDir(callType.title, \".txt\");\n  const document = window.activeTextEditor.document;\n\n  const runCommand = async () => {\n    const extension = path.extname(filePath);\n    const currentFile = createFileInTempDir(\"current_file\", extension);\n\n    fs.writeFileSync(currentFile, document.getText());\n\n    if ([\"dumpAst\"].includes(callType.command)) {\n      switch (callType.command) {\n        case \"dumpAst\":\n          const res = await runBsc({\n            cwd: path.dirname(filePath),\n            cliOptions: [\n              \"-dparsetree\",\n              \"-only-parse\",\n              \"-ignore-parse-errors\",\n              \"-bs-loc\",\n              currentFile,\n            ],\n          });\n          fs.writeFileSync(outputFile, `Pos: ${line}:${character}\\n\\n${res}`);\n          return;\n      }\n\n      fs.rmSync(currentFile);\n    }\n\n    const opts: string[] = [\"debug-dump\", logLevel, callType.command];\n\n    switch (callType.command) {\n      case \"completion\": {\n        opts.push(filePath, line.toString(), character.toString(), currentFile);\n        break;\n      }\n      case \"definition\": {\n        opts.push(filePath, line.toString(), character.toString());\n        break;\n      }\n      case \"typeDefinition\": {\n        opts.push(filePath, line.toString(), character.toString());\n        break;\n      }\n      case \"documentSymbol\": {\n        opts.push(filePath);\n        break;\n      }\n      case \"hover\": {\n        opts.push(filePath, line.toString(), character.toString(), currentFile);\n        break;\n      }\n      case \"signatureHelp\": {\n        opts.push(filePath, line.toString(), character.toString(), currentFile);\n        break;\n      }\n      case \"inlayHint\": {\n        window.showErrorMessage(\"Not implemented yet.\");\n        return null;\n      }\n      case \"codeLens\": {\n        opts.push(filePath);\n        break;\n      }\n      case \"extractDocs\": {\n        opts.push(filePath);\n        break;\n      }\n      case \"codeAction\": {\n        opts.push(\n          filePath,\n          line.toString(),\n          character.toString(),\n          endLine.toString(),\n          endChar.toString(),\n          currentFile,\n        );\n        break;\n      }\n      case \"codemod\": {\n        opts.push(\n          currentFile,\n          line.toString(),\n          character.toString(),\n          \"add-missing-cases\", // TODO: Make selectable\n        );\n        break;\n      }\n      case \"diagnosticSyntax\": {\n        opts.push(currentFile);\n        break;\n      }\n      case \"references\": {\n        opts.push(filePath, line.toString(), character.toString());\n        break;\n      }\n      case \"semanticTokens\": {\n        opts.push(currentFile);\n        break;\n      }\n      case \"createInterface\": {\n        window.showErrorMessage(\"Not implemented yet.\");\n        return null;\n      }\n      case \"format\": {\n        opts.push(currentFile);\n        break;\n      }\n      default:\n        window.showErrorMessage(`\"${callType.title}\" is not implemented yet.`);\n        return null;\n    }\n\n    const res = await runDebugDump({\n      binaryPath,\n      cwd: path.dirname(filePath),\n      cliOptions: opts,\n    });\n\n    fs.writeFileSync(outputFile, res);\n    fs.rmSync(currentFile);\n  };\n\n  rerunCommand = runCommand;\n\n  await runCommand();\n\n  await window.showTextDocument(Uri.parse(outputFile), {\n    viewColumn: ViewColumn.Beside,\n  });\n\n  statusBarItem.show();\n  statusBarItem.text = \"$(debug-restart) Rerun command\";\n  statusBarItem.command = \"rescript-vscode.debug-dump-retrigger\";\n\n  const unwatch = fs.watch(outputFile, (event, _) => {\n    if (event === \"rename\") {\n      fs.rmSync(outputFile);\n      statusBarItem.hide();\n      rerunCommand = null;\n    }\n  });\n\n  context.subscriptions.push({ dispose: () => unwatch });\n};\n"
  },
  {
    "path": "client/src/commands/open_compiled.ts",
    "content": "import * as fs from \"fs\";\nimport * as p from \"vscode-languageserver-protocol\";\nimport { window, Uri, ViewColumn } from \"vscode\";\nimport { LanguageClient, RequestType } from \"vscode-languageclient/node\";\n\nlet openCompiledFileRequest = new RequestType<\n  p.TextDocumentIdentifier,\n  p.TextDocumentIdentifier,\n  void\n>(\"textDocument/openCompiled\");\n\nexport const openCompiled = (client: LanguageClient) => {\n  if (!client) {\n    return window.showInformationMessage(\"Language server not running\");\n  }\n\n  const editor = window.activeTextEditor;\n\n  if (!editor) {\n    return window.showInformationMessage(\"No active editor\");\n  }\n\n  if (!fs.existsSync(editor.document.uri.fsPath)) {\n    return window.showInformationMessage(\"Compiled file does not exist\");\n  }\n\n  client\n    .sendRequest(openCompiledFileRequest, {\n      uri: editor.document.uri.toString(),\n    })\n    .then((response) => {\n      const document = Uri.parse(response.uri);\n\n      return window.showTextDocument(document, {\n        viewColumn: ViewColumn.Beside,\n      });\n    });\n};\n"
  },
  {
    "path": "client/src/commands/paste_as_rescript_json.ts",
    "content": "import { env, window, Position, Selection, TextDocument } from \"vscode\";\n\nconst INDENT_SIZE = 2;\nconst INDENT_UNIT = \" \".repeat(INDENT_SIZE);\n\nconst indent = (level: number) => INDENT_UNIT.repeat(level);\n\nconst isLikelyJson = (text: string): boolean => {\n  const trimmed = text.trim();\n  if (trimmed.length === 0) {\n    return false;\n  }\n  const first = trimmed[0];\n  return first === \"{\" || first === \"[\";\n};\n\nconst ensureFloatString = (value: number): string => {\n  const raw = Number.isFinite(value) ? String(value) : \"0\";\n  if (raw.includes(\".\") || raw.includes(\"e\") || raw.includes(\"E\")) {\n    return raw;\n  }\n  return `${raw}.`;\n};\n\nconst formatJsonValue = (value: unknown, level = 0): string => {\n  if (value === null) {\n    return \"JSON.Null\";\n  }\n\n  switch (typeof value) {\n    case \"string\":\n      return `JSON.String(${JSON.stringify(value)})`;\n    case \"number\":\n      return `JSON.Number(${ensureFloatString(value)})`;\n    case \"boolean\":\n      return `JSON.Boolean(${value})`;\n    case \"object\":\n      if (Array.isArray(value)) {\n        return formatArray(value, level);\n      }\n      return formatObject(value as Record<string, unknown>, level);\n    default:\n      return \"JSON.Null\";\n  }\n};\n\nconst formatObject = (\n  value: Record<string, unknown>,\n  level: number,\n): string => {\n  const entries = Object.entries(value);\n  if (entries.length === 0) {\n    return \"JSON.Object(dict{})\";\n  }\n  const nextLevel = level + 1;\n  const lines = entries.map(\n    ([key, val]) =>\n      `${indent(nextLevel)}${JSON.stringify(key)}: ${formatJsonValue(\n        val,\n        nextLevel,\n      )}`,\n  );\n  return `JSON.Object(dict{\\n${lines.join(\",\\n\")}\\n${indent(level)}})`;\n};\n\nconst formatArray = (values: unknown[], level: number): string => {\n  if (values.length === 0) {\n    return \"JSON.Array([])\";\n  }\n  const nextLevel = level + 1;\n  const lines = values.map(\n    (item) => `${indent(nextLevel)}${formatJsonValue(item, nextLevel)}`,\n  );\n  return `JSON.Array([\\n${lines.join(\",\\n\")}\\n${indent(level)}])`;\n};\n\nexport type JsonConversionResult =\n  | { kind: \"success\"; formatted: string }\n  | { kind: \"notJson\" }\n  | { kind: \"error\"; errorMessage: string };\n\nexport const convertPlainTextToJsonT = (text: string): JsonConversionResult => {\n  if (!isLikelyJson(text)) {\n    return { kind: \"notJson\" };\n  }\n\n  try {\n    const parsed = JSON.parse(text);\n    // Only convert objects and arrays, not primitive values\n    if (typeof parsed !== \"object\" || parsed === null) {\n      return { kind: \"notJson\" };\n    }\n    return { kind: \"success\", formatted: formatJsonValue(parsed) };\n  } catch {\n    return {\n      kind: \"error\",\n      errorMessage: \"Clipboard JSON could not be parsed.\",\n    };\n  }\n};\n\nexport const getBaseIndent = (\n  document: TextDocument,\n  position: Position,\n): string => {\n  const linePrefix = document\n    .lineAt(position)\n    .text.slice(0, position.character);\n  return /^\\s*$/.test(linePrefix) ? linePrefix : \"\";\n};\n\nexport const applyBaseIndent = (formatted: string, baseIndent: string) => {\n  if (baseIndent.length === 0) {\n    return formatted;\n  }\n\n  return formatted\n    .split(\"\\n\")\n    .map((line, index) => (index === 0 ? line : `${baseIndent}${line}`))\n    .join(\"\\n\");\n};\n\nexport const buildInsertionText = (\n  document: TextDocument,\n  position: Position,\n  formatted: string,\n) => {\n  const baseIndent = getBaseIndent(document, position);\n  return applyBaseIndent(formatted, baseIndent);\n};\n\nconst computeEndPosition = (\n  insertionStart: Position,\n  indentedText: string,\n): Position => {\n  const lines = indentedText.split(\"\\n\");\n  if (lines.length === 1) {\n    return insertionStart.translate(0, lines[0].length);\n  }\n  return new Position(\n    insertionStart.line + lines.length - 1,\n    lines[lines.length - 1].length,\n  );\n};\n\nexport const pasteAsRescriptJson = async () => {\n  const editor = window.activeTextEditor;\n  if (!editor) {\n    window.showInformationMessage(\n      \"No active editor to paste the ReScript JSON into.\",\n    );\n    return;\n  }\n\n  const clipboardText = await env.clipboard.readText();\n  const conversion = convertPlainTextToJsonT(clipboardText);\n\n  if (conversion.kind === \"notJson\") {\n    window.showInformationMessage(\"Clipboard does not appear to contain JSON.\");\n    return;\n  }\n\n  if (conversion.kind === \"error\") {\n    window.showErrorMessage(\"Clipboard JSON could not be parsed.\");\n    return;\n  }\n\n  const formatted = conversion.formatted;\n  const selection = editor.selection;\n  const indentedText = buildInsertionText(\n    editor.document,\n    selection.start,\n    formatted,\n  );\n  const insertionStart = selection.start;\n  const didEdit = await editor.edit((editBuilder) => {\n    editBuilder.replace(selection, indentedText);\n  });\n\n  if (didEdit) {\n    const endPosition = computeEndPosition(insertionStart, indentedText);\n    editor.selection = new Selection(endPosition, endPosition);\n  }\n};\n"
  },
  {
    "path": "client/src/commands/paste_as_rescript_jsx.ts",
    "content": "import { env, window, Position, Selection } from \"vscode\";\n\nimport { buildInsertionText } from \"./paste_as_rescript_json\";\nimport { transformJsx } from \"./transform-jsx\";\n\nexport type JsxConversionResult =\n  | { kind: \"success\"; formatted: string }\n  | { kind: \"empty\" }\n  | { kind: \"error\"; errorMessage: string };\n\nexport const convertPlainTextToRescriptJsx = (\n  text: string,\n): JsxConversionResult => {\n  if (text.trim().length === 0) {\n    return { kind: \"empty\" };\n  }\n\n  try {\n    // If you ever need to fix a bug in transformJsx,\n    // please do so in https://github.com/nojaf/vanilla-jsx-to-rescript-jsx/blob/main/index.ts\n    // and then copy the changes to transform-jsx.ts\n    const formatted = transformJsx(text);\n    return { kind: \"success\", formatted };\n  } catch (error) {\n    const errorMessage =\n      error instanceof Error ? error.message : \"Unknown conversion error.\";\n    return {\n      kind: \"error\",\n      errorMessage,\n    };\n  }\n};\n\nconst computeEndPosition = (\n  insertionStart: Position,\n  indentedText: string,\n): Position => {\n  const lines = indentedText.split(\"\\n\");\n  if (lines.length === 1) {\n    return insertionStart.translate(0, lines[0].length);\n  }\n  return new Position(\n    insertionStart.line + lines.length - 1,\n    lines[lines.length - 1].length,\n  );\n};\n\nexport const pasteAsRescriptJsx = async () => {\n  const editor = window.activeTextEditor;\n  if (!editor) {\n    window.showInformationMessage(\n      \"No active editor to paste the ReScript JSX into.\",\n    );\n    return;\n  }\n\n  const clipboardText = await env.clipboard.readText();\n  const conversion = convertPlainTextToRescriptJsx(clipboardText);\n\n  if (conversion.kind === \"empty\") {\n    window.showInformationMessage(\n      \"Clipboard does not appear to contain any JSX content.\",\n    );\n    return;\n  }\n\n  if (conversion.kind === \"error\") {\n    window.showErrorMessage(\n      `Clipboard JSX could not be transformed: ${conversion.errorMessage}`,\n    );\n    return;\n  }\n\n  const formatted = conversion.formatted;\n  const selection = editor.selection;\n  const indentedText = buildInsertionText(\n    editor.document,\n    selection.start,\n    formatted,\n  );\n  const insertionStart = selection.start;\n  const didEdit = await editor.edit((editBuilder) => {\n    editBuilder.replace(selection, indentedText);\n  });\n\n  if (didEdit) {\n    const endPosition = computeEndPosition(insertionStart, indentedText);\n    editor.selection = new Selection(endPosition, endPosition);\n  }\n};\n"
  },
  {
    "path": "client/src/commands/switch_impl_intf.ts",
    "content": "import * as fs from \"fs\";\nimport { LanguageClient } from \"vscode-languageclient/node\";\nimport { window } from \"vscode\";\nimport { createInterfaceRequest } from \"./create_interface\";\n\nexport const switchImplIntf = async (client: LanguageClient) => {\n  if (!client) {\n    return window.showInformationMessage(\"Language server not running\");\n  }\n\n  const editor = window.activeTextEditor;\n\n  if (!editor) {\n    return window.showInformationMessage(\"No active editor\");\n  }\n\n  const isIntf = editor.document.uri.path.endsWith(\".resi\");\n  const isImpl = editor.document.uri.path.endsWith(\".res\");\n\n  if (!(isIntf || isImpl)) {\n    await window.showInformationMessage(\n      \"This command only can run on *.res or *.resi files.\",\n    );\n    return;\n  }\n\n  if (isIntf) {\n    // *.res\n    const newUri = editor.document.uri.with({\n      path: editor.document.uri.path.slice(0, -1),\n    });\n    await window.showTextDocument(newUri, { preview: false });\n    return;\n  }\n\n  if (!fs.existsSync(editor.document.uri.fsPath + \"i\")) {\n    // if interface doesn't exist, ask the user before creating.\n    const selection = await window.showInformationMessage(\n      \"Do you want to create an interface *.resi?\",\n      ...[\"No\", \"Yes\"],\n    );\n\n    if (selection !== \"Yes\") return;\n\n    // create interface\n    await client.sendRequest(createInterfaceRequest, {\n      uri: editor.document.uri.toString(),\n    });\n  }\n\n  // *.resi\n  const newUri = editor.document.uri.with({\n    path: editor.document.uri.path + \"i\",\n  });\n  await window.showTextDocument(newUri, { preview: false });\n  return;\n};\n"
  },
  {
    "path": "client/src/commands/transform-jsx.ts",
    "content": "import { parseSync, type Node } from \"oxc-parser\";\nimport { walk } from \"oxc-walker\";\nimport MagicString from \"magic-string\";\n\nconst integerRegex = /^-?\\d+$/;\nconst floatRegex = /^-?\\d+(\\.\\d+)?$/;\n\nconst rescriptKeywords = new Set([\"type\", \"open\", \"as\", \"in\"]);\n\ntype Rule<T extends Node = Node> = {\n  match: (node: T, parent: Node | null) => boolean;\n  transform: (node: T, parent: Node | null, magicString: MagicString) => void;\n  stopAfterMatch?: boolean; // If true, stop applying further rules after this one matches\n};\n\n// Single quotes to double quotes\nconst singleQuotesToDouble: Rule<Node> = {\n  match: (node) =>\n    node.type === \"JSXAttribute\" &&\n    node.value?.type === \"Literal\" &&\n    typeof node.value.raw === \"string\" &&\n    node.value.raw.startsWith(\"'\"),\n  transform: (node, _, magicString) => {\n    const attr = node as Extract<Node, { type: \"JSXAttribute\" }>;\n    const value = attr.value as Extract<typeof attr.value, { type: \"Literal\" }>;\n    magicString.update(value.start, value.end, `\"${value.raw!.slice(1, -1)}\"`);\n  },\n};\n\n// SVG width/height numeric to string\nconst svgWidthHeightToString: Rule<Node> = {\n  match: (node, parent) =>\n    node.type === \"JSXAttribute\" &&\n    parent?.type === \"JSXOpeningElement\" &&\n    parent.name.type === \"JSXIdentifier\" &&\n    parent.name.name.toLowerCase() === \"svg\" &&\n    node.name.type === \"JSXIdentifier\" &&\n    (node.name.name === \"width\" || node.name.name === \"height\") &&\n    node.value?.type === \"JSXExpressionContainer\" &&\n    node.value.expression?.type === \"Literal\" &&\n    typeof node.value.expression.value === \"number\",\n  transform: (node, _, magicString) => {\n    const attr = node as Extract<Node, { type: \"JSXAttribute\" }>;\n    const value = attr.value as Extract<\n      typeof attr.value,\n      { type: \"JSXExpressionContainer\" }\n    >;\n    const expression = value.expression as Extract<\n      typeof value.expression,\n      { type: \"Literal\" }\n    >;\n    const numericValue = String(expression.value);\n    magicString.update(value.start, value.end, `\"${numericValue}\"`);\n  },\n};\n\n// Rescript keywords get underscore suffix\nconst rescriptKeywordUnderscore: Rule<Node> = {\n  match: (node) =>\n    node.type === \"JSXAttribute\" &&\n    node.name.type === \"JSXIdentifier\" &&\n    rescriptKeywords.has(node.name.name),\n  transform: (node, _, magicString) => {\n    const attr = node as Extract<Node, { type: \"JSXAttribute\" }>;\n    magicString.appendRight(attr.name.end, \"_\");\n  },\n};\n\n// aria- attributes to camelCase\nconst ariaToCamelCase: Rule<Node> = {\n  match: (node) =>\n    node.type === \"JSXAttribute\" &&\n    node.name.type === \"JSXIdentifier\" &&\n    typeof node.name.name === \"string\" &&\n    node.name.name.startsWith(\"aria-\"),\n  transform: (node, _, magicString) => {\n    const attr = node as Extract<Node, { type: \"JSXAttribute\" }>;\n    const name = attr.name.name as string;\n    magicString.update(\n      attr.name.start + 4,\n      attr.name.start + 6,\n      name[5]?.toUpperCase() || \"\",\n    );\n  },\n};\n\n// data-testid to dataTestId\nconst dataTestIdToCamelCase: Rule<Node> = {\n  match: (node) =>\n    node.type === \"JSXAttribute\" &&\n    node.name.type === \"JSXIdentifier\" &&\n    node.name.name === \"data-testid\",\n  transform: (node, _, magicString) => {\n    const attr = node as Extract<Node, { type: \"JSXAttribute\" }>;\n    magicString.update(attr.name.start, attr.name.end, \"dataTestId\");\n  },\n};\n\n// Null values become =true\nconst nullValueToTrue: Rule<Node> = {\n  match: (node) => node.type === \"JSXAttribute\" && node.value === null,\n  transform: (node, _, magicString) => {\n    magicString.appendRight(node.end, \"=true\");\n  },\n};\n\n// Integer text nodes\nconst integerTextNode: Rule<Node> = {\n  match: (node) =>\n    node.type === \"JSXText\" &&\n    typeof node.raw === \"string\" &&\n    integerRegex.test(node.raw.trim()),\n  transform: (node, _, magicString) => {\n    magicString.prependLeft(node.start, \"{React.int(\");\n    magicString.appendRight(node.end, \")}\");\n  },\n  stopAfterMatch: true,\n};\n\n// Float text nodes\nconst floatTextNode: Rule<Node> = {\n  match: (node) =>\n    node.type === \"JSXText\" &&\n    typeof node.raw === \"string\" &&\n    floatRegex.test(node.raw.trim()),\n  transform: (node, _, magicString) => {\n    magicString.prependLeft(node.start, \"{React.float(\");\n    magicString.appendRight(node.end, \")}\");\n  },\n  stopAfterMatch: true,\n};\n\n// String text nodes\nconst stringTextNode: Rule<Node> = {\n  match: (node) =>\n    node.type === \"JSXText\" &&\n    typeof node.value === \"string\" &&\n    node.value.trim() !== \"\",\n  transform: (node, _, magicString) => {\n    magicString.prependLeft(node.start, '{React.string(\"');\n    magicString.appendRight(node.end, '\")}');\n  },\n  stopAfterMatch: true,\n};\n\nconst rules: Rule<Node>[] = [\n  singleQuotesToDouble,\n  svgWidthHeightToString,\n  rescriptKeywordUnderscore,\n  ariaToCamelCase,\n  dataTestIdToCamelCase,\n  nullValueToTrue,\n  integerTextNode,\n  floatTextNode,\n  stringTextNode,\n];\n\nfunction applyRules(\n  node: Node,\n  parent: Node | null,\n  rules: Rule<Node>[],\n  magicString: MagicString,\n): void {\n  for (const rule of rules) {\n    if (rule.match(node, parent)) {\n      rule.transform(node, parent, magicString);\n      if (rule.stopAfterMatch) {\n        break;\n      }\n    }\n  }\n}\n\nexport function transformJsx(input: string): string {\n  const magicString = new MagicString(input);\n  const parseResult = parseSync(\"clipboard-input.tsx\", input, {\n    astType: \"ts\",\n    lang: \"tsx\",\n  });\n\n  walk(parseResult.program, {\n    enter: (node: Node, parent: Node | null) => {\n      applyRules(node, parent, rules, magicString);\n    },\n  });\n\n  return magicString.toString();\n}\n\nexport default transformJsx;\n"
  },
  {
    "path": "client/src/commands.ts",
    "content": "import { DiagnosticCollection, OutputChannel, StatusBarItem } from \"vscode\";\n\nimport {\n  DiagnosticsResultCodeActionsMap,\n  runCodeAnalysisWithReanalyze,\n  reanalyzeServers,\n  stopReanalyzeServer,\n  stopAllReanalyzeServers,\n  showReanalyzeServerLog,\n} from \"./commands/code_analysis\";\n\nexport {\n  reanalyzeServers,\n  stopReanalyzeServer,\n  stopAllReanalyzeServers,\n  showReanalyzeServerLog,\n};\n\nexport { createInterface } from \"./commands/create_interface\";\nexport { openCompiled } from \"./commands/open_compiled\";\nexport { switchImplIntf } from \"./commands/switch_impl_intf\";\nexport { dumpDebug, dumpDebugRetrigger } from \"./commands/dump_debug\";\nexport { pasteAsRescriptJson } from \"./commands/paste_as_rescript_json\";\nexport { pasteAsRescriptJsx } from \"./commands/paste_as_rescript_jsx\";\n\n// Returns the monorepo root path if a reanalyze server was started, null otherwise.\nexport const codeAnalysisWithReanalyze = (\n  diagnosticsCollection: DiagnosticCollection,\n  diagnosticsResultCodeActions: DiagnosticsResultCodeActionsMap,\n  outputChannel: OutputChannel,\n  codeAnalysisRunningStatusBarItem: StatusBarItem,\n): Promise<string | null> => {\n  return runCodeAnalysisWithReanalyze(\n    diagnosticsCollection,\n    diagnosticsResultCodeActions,\n    outputChannel,\n    codeAnalysisRunningStatusBarItem,\n  );\n};\n"
  },
  {
    "path": "client/src/extension.ts",
    "content": "import * as path from \"path\";\nimport {\n  workspace,\n  ExtensionContext,\n  commands,\n  languages,\n  window,\n  StatusBarAlignment,\n  Uri,\n  Range,\n  Position,\n  CodeAction,\n  WorkspaceEdit,\n  CodeActionKind,\n  Diagnostic,\n  ViewColumn,\n} from \"vscode\";\nimport { ThemeColor } from \"vscode\";\n\nimport {\n  LanguageClient,\n  LanguageClientOptions,\n  ServerOptions,\n  State,\n  TransportKind,\n} from \"vscode-languageclient/node\";\n\nimport * as customCommands from \"./commands\";\nimport {\n  DiagnosticsResultCodeActionsMap,\n  statusBarItem,\n} from \"./commands/code_analysis\";\nimport { pasteAsRescriptJson } from \"./commands/paste_as_rescript_json\";\nimport { pasteAsRescriptJsx } from \"./commands/paste_as_rescript_jsx\";\nimport { findProjectRootOfFile } from \"../../shared/src/projectRoots\";\n\nlet client: LanguageClient;\n\n// let taskProvider = tasks.registerTaskProvider('Run ReScript build', {\n// \tprovideTasks: () => {\n// \t\t// if (!rakePromise) {\n// \t\t// \trakePromise = getRakeTasks();\n// \t\t// }\n// \t\t// return rakePromise;\n\n// \t\t// taskDefinition: TaskDefinition,\n// \t\t// scope: WorkspaceFolder | TaskScope.Global | TaskScope.Workspace,\n// \t\t// name: string,\n// \t\t// source: string,\n// \t\t// execution ?: ProcessExecution | ShellExecution | CustomExecution,\n// \t\t// problemMatchers ?: string | string[]\n// \t\treturn [\n// \t\t\tnew Task(\n// \t\t\t\t{\n// \t\t\t\t\ttype: 'bsb',\n// \t\t\t\t},\n// \t\t\t\tTaskScope.Workspace,\n// \t\t\t\t// definition.task,\n// \t\t\t\t'build and watch',\n// \t\t\t\t'bsb',\n// \t\t\t\tnew ShellExecution(\n// \t\t\t\t\t// `./node_modules/.bin/bsb -make-world -w`\n// \t\t\t\t\t`pwd`\n// \t\t\t\t),\n// \t\t\t\t\"Hello\"\n// \t\t\t)\n// \t\t]\n// \t},\n// \tresolveTask(_task: Task): Task | undefined {\n// \t\t// const task = _task.definition.task;\n// \t\t// // A Rake task consists of a task and an optional file as specified in RakeTaskDefinition\n// \t\t// // Make sure that this looks like a Rake task by checking that there is a task.\n// \t\t// if (task) {\n// \t\t// \t// resolveTask requires that the same definition object be used.\n// \t\t// \tconst definition: RakeTaskDefinition = <any>_task.definition;\n// \t\t// \treturn new Task(\n// \t\t// \t\tdefinition,\n// \t\t// \t\tdefinition.task,\n// \t\t// \t\t'rake',\n// \t\t// \t\tnew vscode.ShellExecution(`rake ${definition.task}`)\n// \t\t// \t);\n// \t\t// }\n// \t\treturn undefined;\n// \t}\n// });\n\nexport function activate(context: ExtensionContext) {\n  let outputChannel = window.createOutputChannel(\n    \"ReScript Language Server\",\n    \"rescript\",\n  );\n\n  function createLanguageClient() {\n    // The server is implemented in node\n    let serverModule = context.asAbsolutePath(\n      path.join(\"server\", \"out\", \"cli.js\"),\n    );\n    // The debug options for the server\n    // --inspect=6009: runs the server in Node's Inspector mode so VS Code can attach to the server for debugging\n    let debugOptions = { execArgv: [\"--nolazy\", \"--inspect=6009\"] };\n\n    // If the extension is launched in debug mode then the debug server options are used\n    // Otherwise the run options are used\n    let serverOptions: ServerOptions = {\n      run: {\n        module: serverModule,\n        args: [\"--node-ipc\"],\n        transport: TransportKind.ipc,\n      },\n      debug: {\n        module: serverModule,\n        args: [\"--node-ipc\"],\n        transport: TransportKind.ipc,\n        options: debugOptions,\n      },\n    };\n\n    // Options to control the language client\n    let clientOptions: LanguageClientOptions = {\n      documentSelector: [{ scheme: \"file\", language: \"rescript\" }],\n      // We'll send the initial configuration in here, but this might be\n      // problematic because every consumer of the LS will need to mimic this.\n      // We'll leave it like this for now, but might be worth revisiting later on.\n      initializationOptions: {\n        extensionConfiguration: workspace.getConfiguration(\"rescript.settings\"),\n\n        // Keep this in sync with the `extensionClientCapabilities` type in the\n        // server.\n        extensionClientCapabilities: {\n          supportsMarkdownLinks: true,\n        },\n      },\n      outputChannel,\n      markdown: {\n        isTrusted: true,\n      },\n    };\n\n    const client = new LanguageClient(\n      \"ReScriptLSP\",\n      \"ReScript Language Server\",\n      serverOptions,\n      clientOptions,\n    );\n\n    // This sets up a listener that, if we're in code analysis mode, triggers\n    // code analysis as the LS server reports that ReScript compilation has\n    // finished. This is needed because code analysis must wait until\n    // compilation has finished, and the most reliable source for that is the LS\n    // server, that already keeps track of when the compiler finishes in order to\n    // other provide fresh diagnostics.\n    context.subscriptions.push(\n      client.onDidChangeState(({ newState }) => {\n        if (newState === State.Running) {\n          context.subscriptions.push(\n            client.onNotification(\"rescript/compilationFinished\", () => {\n              if (inCodeAnalysisState.active === true) {\n                customCommands.codeAnalysisWithReanalyze(\n                  diagnosticsCollection,\n                  diagnosticsResultCodeActions,\n                  outputChannel,\n                  codeAnalysisRunningStatusBarItem,\n                );\n              }\n            }),\n          );\n        }\n      }),\n    );\n\n    return client;\n  }\n\n  // Create the language client and start the client.\n  client = createLanguageClient();\n\n  // Create a custom diagnostics collection, for cases where we want to report\n  // diagnostics programatically from inside of the extension. The reason this\n  // is separate from the diagnostics provided by the LS server itself is that\n  // this should be possible to clear independently of the other diagnostics\n  // coming from the ReScript compiler.\n  let diagnosticsCollection = languages.createDiagnosticCollection(\"rescript\");\n\n  // This map will hold code actions produced by the code analysis, in a\n  // format that's cheap to look up.\n  let diagnosticsResultCodeActions: DiagnosticsResultCodeActionsMap = new Map();\n  let codeAnalysisRunningStatusBarItem = window.createStatusBarItem(\n    StatusBarAlignment.Right,\n  );\n\n  let debugDumpStatusBarItem = window.createStatusBarItem(\n    StatusBarAlignment.Right,\n  );\n\n  let compilationStatusBarItem = window.createStatusBarItem(\n    StatusBarAlignment.Right,\n  );\n  context.subscriptions.push(compilationStatusBarItem);\n\n  let compileStatusEnabled: boolean = workspace\n    .getConfiguration(\"rescript.settings\")\n    .get<boolean>(\"compileStatus.enable\", true);\n\n  type ClientCompileStatus = {\n    status: \"compiling\" | \"success\" | \"error\" | \"warning\";\n    project: string;\n    errorCount: number;\n    warningCount: number;\n  };\n  const projectStatuses: Map<string, ClientCompileStatus> = new Map();\n\n  const refreshCompilationStatusItem = () => {\n    if (!compileStatusEnabled) {\n      compilationStatusBarItem.hide();\n      compilationStatusBarItem.tooltip = undefined;\n      compilationStatusBarItem.backgroundColor = undefined;\n      compilationStatusBarItem.command = undefined;\n      return;\n    }\n    const entries = [...projectStatuses.values()];\n    const compiling = entries.filter((e) => e.status === \"compiling\");\n    const errors = entries.filter((e) => e.status === \"error\");\n    const warnings = entries.filter((e) => e.status === \"warning\");\n\n    if (compiling.length > 0) {\n      compilationStatusBarItem.text = `$(loading~spin) ReScript`;\n      compilationStatusBarItem.tooltip = compiling\n        .map((e) => e.project)\n        .join(\", \");\n      compilationStatusBarItem.backgroundColor = undefined;\n      compilationStatusBarItem.command = undefined;\n      compilationStatusBarItem.show();\n      return;\n    }\n\n    if (errors.length > 0) {\n      compilationStatusBarItem.text = `$(alert) ReScript: Failed`;\n      compilationStatusBarItem.backgroundColor = new ThemeColor(\n        \"statusBarItem.errorBackground\",\n      );\n      compilationStatusBarItem.command = \"rescript-vscode.showProblems\";\n      const byProject = errors.map((e) => `${e.project} (${e.errorCount})`);\n      compilationStatusBarItem.tooltip = `Failed: ${byProject.join(\", \")}`;\n      compilationStatusBarItem.show();\n      return;\n    }\n\n    if (warnings.length > 0) {\n      compilationStatusBarItem.text = `$(warning) ReScript: Warnings`;\n      compilationStatusBarItem.backgroundColor = undefined;\n      compilationStatusBarItem.color = new ThemeColor(\n        \"statusBarItem.warningBackground\",\n      );\n      compilationStatusBarItem.command = \"rescript-vscode.showProblems\";\n      const byProject = warnings.map((e) => `${e.project} (${e.warningCount})`);\n      compilationStatusBarItem.tooltip = `Warnings: ${byProject.join(\", \")}`;\n      compilationStatusBarItem.show();\n      return;\n    }\n\n    const successes = entries.filter((e) => e.status === \"success\");\n    if (successes.length > 0) {\n      // Compact success display: project label plus a green check emoji\n      compilationStatusBarItem.text = `$(check) ReScript: Ok`;\n      compilationStatusBarItem.backgroundColor = undefined;\n      compilationStatusBarItem.color = null;\n      compilationStatusBarItem.command = undefined;\n      const projects = successes.map((e) => e.project).join(\", \");\n      compilationStatusBarItem.tooltip = projects\n        ? `Compilation Succeeded: ${projects}`\n        : `Compilation Succeeded`;\n      compilationStatusBarItem.show();\n      return;\n    }\n\n    compilationStatusBarItem.hide();\n    compilationStatusBarItem.tooltip = undefined;\n    compilationStatusBarItem.backgroundColor = undefined;\n    compilationStatusBarItem.command = undefined;\n  };\n\n  context.subscriptions.push(\n    client.onDidChangeState(({ newState }) => {\n      if (newState === State.Running) {\n        context.subscriptions.push(\n          client.onNotification(\n            \"rescript/compilationStatus\",\n            (payload: {\n              project: string;\n              projectRootPath: string;\n              status: \"compiling\" | \"success\" | \"error\" | \"warning\";\n              errorCount: number;\n              warningCount: number;\n            }) => {\n              projectStatuses.set(payload.projectRootPath, {\n                status: payload.status,\n                project: payload.project,\n                errorCount: payload.errorCount,\n                warningCount: payload.warningCount,\n              });\n              refreshCompilationStatusItem();\n            },\n          ),\n        );\n      }\n    }),\n  );\n\n  let inCodeAnalysisState: {\n    active: boolean;\n    currentMonorepoRoot: string | null;\n  } = { active: false, currentMonorepoRoot: null };\n\n  // This code actions provider yields the code actions potentially extracted\n  // from the code analysis to the editor.\n  languages.registerCodeActionsProvider(\"rescript\", {\n    async provideCodeActions(document, rangeOrSelection) {\n      let availableActions =\n        diagnosticsResultCodeActions.get(document.uri.fsPath) ?? [];\n\n      const allRemoveActionEdits = availableActions.filter(\n        ({ codeAction }) => codeAction.title === \"Remove unused\",\n      );\n\n      const actions: CodeAction[] = availableActions\n        .filter(\n          ({ range }) =>\n            range.contains(rangeOrSelection) || range.isEqual(rangeOrSelection),\n        )\n        .map(({ codeAction }) => codeAction);\n\n      if (allRemoveActionEdits.length > 0) {\n        const removeAllCodeAction = new CodeAction(\"Remove all unused in file\");\n        const edit = new WorkspaceEdit();\n        allRemoveActionEdits.forEach((subEdit) => {\n          subEdit.codeAction.edit.entries().forEach(([uri, [textEdit]]) => {\n            edit.replace(uri, textEdit.range, textEdit.newText);\n          });\n        });\n        removeAllCodeAction.kind = CodeActionKind.RefactorRewrite;\n        removeAllCodeAction.edit = edit;\n        actions.push(removeAllCodeAction);\n      }\n\n      return actions;\n    },\n  });\n\n  // Register custom commands\n  commands.registerCommand(\"rescript-vscode.create_interface\", () => {\n    customCommands.createInterface(client);\n  });\n\n  commands.registerCommand(\n    \"rescript-vscode.clear_diagnostic\",\n    (diagnostic: Diagnostic) => {\n      const editor = window.activeTextEditor;\n      if (!editor) {\n        return;\n      }\n\n      const document = editor.document;\n      const diagnostics = diagnosticsCollection.get(document.uri);\n      const newDiagnostics = diagnostics.filter((d) => d !== diagnostic);\n      diagnosticsCollection.set(document.uri, newDiagnostics);\n    },\n  );\n\n  commands.registerCommand(\"rescript-vscode.open_compiled\", () => {\n    customCommands.openCompiled(client);\n  });\n\n  commands.registerCommand(\"rescript-vscode.debug-dump-start\", () => {\n    customCommands.dumpDebug(context, debugDumpStatusBarItem);\n  });\n\n  commands.registerCommand(\"rescript-vscode.dump-server-state\", async () => {\n    try {\n      const result = (await client.sendRequest(\"workspace/executeCommand\", {\n        command: \"rescript/dumpServerState\",\n      })) as { content: string };\n\n      // Create an unsaved document with the server state content\n      const document = await workspace.openTextDocument({\n        content: result.content,\n        language: \"json\",\n      });\n\n      // Show the document in the editor\n      await window.showTextDocument(document, {\n        viewColumn: ViewColumn.Beside,\n        preview: false,\n      });\n    } catch (e) {\n      outputChannel.appendLine(`Failed to dump server state: ${String(e)}`);\n      window.showErrorMessage(\n        \"Failed to dump server state. See 'Output' tab, 'ReScript Language Server' channel for details.\",\n      );\n      outputChannel.show();\n    }\n  });\n\n  commands.registerCommand(\"rescript-vscode.showProblems\", async () => {\n    try {\n      await commands.executeCommand(\"workbench.actions.view.problems\");\n    } catch {\n      outputChannel.show();\n    }\n  });\n\n  commands.registerCommand(\"rescript-vscode.debug-dump-retrigger\", () => {\n    customCommands.dumpDebugRetrigger();\n  });\n\n  commands.registerCommand(\"rescript-vscode.paste_as_rescript_json\", () => {\n    pasteAsRescriptJson();\n  });\n\n  commands.registerCommand(\"rescript-vscode.paste_as_rescript_jsx\", () => {\n    pasteAsRescriptJsx();\n  });\n\n  commands.registerCommand(\n    \"rescript-vscode.go_to_location\",\n    async (fileUri: string, startLine: number, startCol: number) => {\n      await window.showTextDocument(Uri.parse(fileUri), {\n        selection: new Range(\n          new Position(startLine, startCol),\n          new Position(startLine, startCol),\n        ),\n      });\n    },\n  );\n\n  // Starts the code analysis mode.\n  commands.registerCommand(\"rescript-vscode.start_code_analysis\", async () => {\n    inCodeAnalysisState.active = true;\n\n    codeAnalysisRunningStatusBarItem.command =\n      \"rescript-vscode.stop_code_analysis\";\n    codeAnalysisRunningStatusBarItem.show();\n    statusBarItem.setToStopText(codeAnalysisRunningStatusBarItem);\n\n    // Start code analysis and capture the monorepo root for server management\n    const monorepoRoot = await customCommands.codeAnalysisWithReanalyze(\n      diagnosticsCollection,\n      diagnosticsResultCodeActions,\n      outputChannel,\n      codeAnalysisRunningStatusBarItem,\n    );\n    inCodeAnalysisState.currentMonorepoRoot = monorepoRoot;\n  });\n\n  commands.registerCommand(\"rescript-vscode.stop_code_analysis\", () => {\n    inCodeAnalysisState.active = false;\n\n    // Stop server if we started it for this project\n    customCommands.stopReanalyzeServer(\n      inCodeAnalysisState.currentMonorepoRoot,\n      outputChannel,\n    );\n    inCodeAnalysisState.currentMonorepoRoot = null;\n\n    diagnosticsCollection.clear();\n    diagnosticsResultCodeActions.clear();\n\n    codeAnalysisRunningStatusBarItem.hide();\n  });\n\n  // Show reanalyze server log\n  commands.registerCommand(\n    \"rescript-vscode.show_reanalyze_server_log\",\n    async () => {\n      let currentDocument = window.activeTextEditor?.document;\n      let projectRootPath: string | null = null;\n\n      if (currentDocument) {\n        projectRootPath = findProjectRootOfFile(currentDocument.uri.fsPath);\n      }\n\n      return await customCommands.showReanalyzeServerLog(projectRootPath);\n    },\n  );\n\n  commands.registerCommand(\"rescript-vscode.switch-impl-intf\", () => {\n    customCommands.switchImplIntf(client);\n  });\n\n  // Start build command\n  commands.registerCommand(\"rescript-vscode.start_build\", async () => {\n    let currentDocument = window.activeTextEditor?.document;\n    if (!currentDocument) {\n      window.showErrorMessage(\"No active document found.\");\n      return;\n    }\n\n    try {\n      const result = (await client.sendRequest(\"rescript/startBuild\", {\n        uri: currentDocument.uri.toString(),\n      })) as { success: boolean };\n\n      if (result.success) {\n        window.showInformationMessage(\"Build watcher started.\");\n      } else {\n        window.showErrorMessage(\n          \"Failed to start build. Check that a ReScript project is open.\",\n        );\n      }\n    } catch (e) {\n      window.showErrorMessage(`Failed to start build: ${String(e)}`);\n    }\n  });\n\n  commands.registerCommand(\"rescript-vscode.restart_language_server\", () => {\n    client.stop().then(() => {\n      client = createLanguageClient();\n      client.start();\n    });\n  });\n\n  // Start the client. This will also launch the server\n  client.start();\n\n  // Restart the language client automatically when certain configuration\n  // changes. These are typically settings that affect the capabilities of the\n  // language client, and because of that requires a full restart.\n  context.subscriptions.push(\n    workspace.onDidChangeConfiguration(({ affectsConfiguration }) => {\n      // Put any configuration that, when changed, requires a full restart of\n      // the server here. That will typically be any configuration that affects\n      // the capabilities declared by the server, since those cannot be updated\n      // on the fly, and require a full restart with new capabilities set when\n      // initializing.\n      if (\n        affectsConfiguration(\"rescript.settings.inlayHints\") ||\n        affectsConfiguration(\"rescript.settings.codeLens\") ||\n        affectsConfiguration(\"rescript.settings.signatureHelp\") ||\n        affectsConfiguration(\"rescript.settings.incrementalTypechecking\") ||\n        affectsConfiguration(\"rescript.settings.cache\")\n      ) {\n        commands.executeCommand(\"rescript-vscode.restart_language_server\");\n      } else {\n        if (affectsConfiguration(\"rescript.settings.compileStatus.enable\")) {\n          compileStatusEnabled = workspace\n            .getConfiguration(\"rescript.settings\")\n            .get<boolean>(\"compileStatus.enable\", true);\n          refreshCompilationStatusItem();\n        }\n        // Send a general message that configuration has updated. Clients\n        // interested can then pull the new configuration as they see fit.\n        client\n          .sendNotification(\"workspace/didChangeConfiguration\")\n          .catch((err) => {\n            window.showErrorMessage(String(err));\n          });\n      }\n    }),\n  );\n}\n\nexport function deactivate(): Thenable<void> | undefined {\n  // Stop all reanalyze servers we started\n  customCommands.stopAllReanalyzeServers();\n\n  if (!client) {\n    return undefined;\n  }\n  return client.stop();\n}\n"
  },
  {
    "path": "client/src/test/suite/exampleProject.test.ts",
    "content": "/**\n * Example Project Tests (Code Analysis Server Test Suite)\n *\n * Run these tests:\n *   cd client && npm run test -- --label example-project\n */\nimport * as assert from \"assert\";\nimport * as path from \"path\";\nimport * as vscode from \"vscode\";\nimport {\n  getWorkspaceRoot,\n  removeRescriptLockFile,\n  removeReanalyzeSocketFile,\n  ensureExtensionActivated,\n  sleep,\n  getCompilerLogPath,\n  getFileMtime,\n  waitForFileUpdate,\n  insertCommentAndSave,\n  restoreContentAndSave,\n  startBuildWatcher,\n  startCodeAnalysis,\n  stopCodeAnalysis,\n  showReanalyzeServerLog,\n  findBuildPromptInLogs,\n} from \"./helpers\";\n\nsuite(\"Code Analysis Server Test Suite\", () => {\n  test(\"Extension should be present\", async () => {\n    const extension = vscode.extensions.getExtension(\n      \"chenglou92.rescript-vscode\",\n    );\n    assert.ok(extension, \"ReScript extension should be present\");\n    console.log(\"Extension found:\", extension.id);\n  });\n\n  test(\"Commands should be registered after activation\", async () => {\n    const extension = vscode.extensions.getExtension(\n      \"chenglou92.rescript-vscode\",\n    );\n    if (!extension) {\n      console.log(\"Extension not found, skipping command test\");\n      return;\n    }\n\n    if (!extension.isActive) {\n      console.log(\"Activating extension...\");\n      await extension.activate();\n    }\n\n    const commands = await vscode.commands.getCommands(true);\n\n    const expectedCommands = [\n      \"rescript-vscode.start_code_analysis\",\n      \"rescript-vscode.stop_code_analysis\",\n      \"rescript-vscode.show_reanalyze_server_log\",\n      \"rescript-vscode.start_build\",\n    ];\n\n    for (const cmd of expectedCommands) {\n      assert.ok(commands.includes(cmd), `Command ${cmd} should be registered`);\n    }\n    console.log(\"All commands registered successfully!\");\n  });\n\n  test(\"Start Code Analysis should run on a ReScript file\", async () => {\n    const workspaceRoot = getWorkspaceRoot();\n    if (!workspaceRoot) {\n      console.log(\"No workspace folder found, skipping test\");\n      return;\n    }\n    console.log(\"Workspace root:\", workspaceRoot);\n\n    const resFilePath = path.join(workspaceRoot, \"src\", \"Hello.res\");\n    console.log(\"Opening file:\", resFilePath);\n\n    const document = await vscode.workspace.openTextDocument(resFilePath);\n    await vscode.window.showTextDocument(document);\n    console.log(\"File opened successfully\");\n\n    await ensureExtensionActivated();\n\n    console.log(\"Running start_code_analysis command...\");\n    await startCodeAnalysis();\n    console.log(\"Code analysis command completed\");\n\n    console.log(\"Running stop_code_analysis command...\");\n    await stopCodeAnalysis();\n    console.log(\"Test completed successfully\");\n  });\n\n  test(\"Start Build command should start build watcher\", async () => {\n    const workspaceRoot = getWorkspaceRoot();\n    if (!workspaceRoot) {\n      console.log(\"No workspace folder found, skipping test\");\n      return;\n    }\n    console.log(\"Workspace root:\", workspaceRoot);\n\n    const resFilePath = path.join(workspaceRoot, \"src\", \"Hello.res\");\n    console.log(\"Opening file:\", resFilePath);\n\n    const document = await vscode.workspace.openTextDocument(resFilePath);\n    await vscode.window.showTextDocument(document);\n    console.log(\"File opened successfully\");\n\n    await ensureExtensionActivated();\n\n    console.log(\"Running start_build command...\");\n    await startBuildWatcher();\n\n    console.log(\n      \"Test completed - check Language Server log for 'Starting build watcher' or 'Build watcher already running' message\",\n    );\n  });\n\n  test(\"Build watcher recompiles on file save\", async () => {\n    const workspaceRoot = getWorkspaceRoot();\n    if (!workspaceRoot) {\n      console.log(\"No workspace folder found, skipping test\");\n      return;\n    }\n    console.log(\"Workspace root:\", workspaceRoot);\n\n    const resFilePath = path.join(workspaceRoot, \"src\", \"Hello.res\");\n    console.log(\"Opening file:\", resFilePath);\n\n    const document = await vscode.workspace.openTextDocument(resFilePath);\n    const editor = await vscode.window.showTextDocument(document);\n    console.log(\"File opened successfully\");\n\n    await ensureExtensionActivated();\n\n    console.log(\"Starting build watcher...\");\n    await startBuildWatcher();\n    console.log(\"Build watcher started\");\n\n    const compilerLogPath = getCompilerLogPath(workspaceRoot);\n    const mtimeBefore = getFileMtime(compilerLogPath);\n    if (mtimeBefore) {\n      console.log(`compiler.log mtime before: ${mtimeBefore.toISOString()}`);\n    } else {\n      console.log(\"compiler.log does not exist yet\");\n    }\n\n    console.log(\"Editing file...\");\n    const originalContent = document.getText();\n    await insertCommentAndSave(editor, \"/* test comment */\\n\");\n    console.log(\"File saved with edit\");\n\n    console.log(\"Waiting for compilation...\");\n    const mtimeAfter = await waitForFileUpdate(compilerLogPath, mtimeBefore);\n    if (mtimeAfter) {\n      console.log(`compiler.log mtime after: ${mtimeAfter.toISOString()}`);\n    } else {\n      console.log(\"compiler.log still does not exist\");\n    }\n\n    assert.ok(mtimeAfter, \"compiler.log should exist after file save\");\n    if (mtimeBefore && mtimeAfter) {\n      assert.ok(\n        mtimeAfter > mtimeBefore,\n        \"compiler.log should be updated after file save\",\n      );\n      console.log(\"SUCCESS: compiler.log was updated after file save\");\n    } else if (!mtimeBefore && mtimeAfter) {\n      console.log(\"SUCCESS: compiler.log was created after file save\");\n    }\n\n    console.log(\"Restoring original content...\");\n    await restoreContentAndSave(editor, originalContent);\n    console.log(\"Original content restored\");\n\n    await sleep(1000);\n    console.log(\"Test completed\");\n  });\n\n  test(\"Code analysis with incremental updates\", async () => {\n    const workspaceRoot = getWorkspaceRoot();\n    if (!workspaceRoot) {\n      console.log(\"No workspace folder found, skipping test\");\n      return;\n    }\n    console.log(\"Workspace root:\", workspaceRoot);\n\n    // Remove stale socket file to ensure a fresh server is started\n    // Note: Only remove the socket file, not the lock file, to keep the build watcher running\n    removeReanalyzeSocketFile(workspaceRoot);\n\n    const resFilePath = path.join(workspaceRoot, \"src\", \"Hello.res\");\n    console.log(\"Step 1: Opening file:\", resFilePath);\n\n    const document = await vscode.workspace.openTextDocument(resFilePath);\n    const editor = await vscode.window.showTextDocument(document);\n    console.log(\"File opened successfully\");\n\n    await ensureExtensionActivated();\n\n    console.log(\"Step 2: Starting build...\");\n    await startBuildWatcher();\n    console.log(\"Build started\");\n\n    console.log(\"Step 3: Starting code analysis...\");\n    await startCodeAnalysis();\n    console.log(\"Code analysis started\");\n\n    console.log(\"Step 3b: Opening reanalyze server log...\");\n    await showReanalyzeServerLog();\n\n    console.log(\"Step 4: Checking for diagnostics...\");\n    const diagnostics = vscode.languages.getDiagnostics(document.uri);\n    console.log(`Found ${diagnostics.length} diagnostics in Hello.res`);\n    assert.ok(diagnostics.length > 0, \"Should have diagnostics for dead code\");\n    for (const diag of diagnostics.slice(0, 5)) {\n      console.log(\n        `  - Line ${diag.range.start.line + 1}: ${diag.message.substring(0, 80)}...`,\n      );\n    }\n    if (diagnostics.length > 5) {\n      console.log(`  ... and ${diagnostics.length - 5} more`);\n    }\n    const initialDiagnosticsCount = diagnostics.length;\n\n    console.log(\"Step 5: Adding dead code...\");\n    const originalContent = document.getText();\n    const deadCode = \"let testDeadVariable12345 = 999\\n\";\n\n    const compilerLogPath = getCompilerLogPath(workspaceRoot);\n    const compilerLogMtimeBefore = getFileMtime(compilerLogPath);\n    if (compilerLogMtimeBefore) {\n      console.log(\n        `compiler.log mtime before: ${compilerLogMtimeBefore.toISOString()}`,\n      );\n    } else {\n      console.log(\"compiler.log does not exist before edit\");\n    }\n\n    await insertCommentAndSave(editor, deadCode);\n    console.log(\"Dead code added and saved\");\n\n    console.log(\"Step 5a: Waiting for compilation...\");\n    const mtimeAfter = await waitForFileUpdate(\n      compilerLogPath,\n      compilerLogMtimeBefore,\n    );\n    if (mtimeAfter) {\n      console.log(`compiler.log updated: ${mtimeAfter.toISOString()}`);\n    } else {\n      console.log(\"Warning: compilation may not have completed\");\n    }\n\n    console.log(\"Step 5b: Re-running code analysis...\");\n    await vscode.window.showTextDocument(document);\n    await startCodeAnalysis();\n    console.log(\"Code analysis re-run complete\");\n\n    console.log(\"Step 6: Checking for updated diagnostics...\");\n    const updatedDiagnostics = vscode.languages.getDiagnostics(document.uri);\n    console.log(\n      `Found ${updatedDiagnostics.length} diagnostics after edit (was ${initialDiagnosticsCount})`,\n    );\n\n    assert.ok(\n      updatedDiagnostics.length > initialDiagnosticsCount,\n      `Diagnostics count should increase after adding dead code (was ${initialDiagnosticsCount}, now ${updatedDiagnostics.length})`,\n    );\n\n    const deadVarDiagnostic = updatedDiagnostics.find((d) =>\n      d.message.includes(\"testDeadVariable12345\"),\n    );\n    assert.ok(\n      deadVarDiagnostic,\n      \"Should find diagnostic for testDeadVariable12345\",\n    );\n    console.log(\n      `Found diagnostic for testDeadVariable12345: ${deadVarDiagnostic.message}`,\n    );\n\n    console.log(\"Step 7: Undoing change...\");\n    await restoreContentAndSave(editor, originalContent);\n    console.log(\"Change undone and saved\");\n\n    await sleep(1000);\n\n    console.log(\"Step 8: Stopping code analysis...\");\n    await stopCodeAnalysis();\n    console.log(\"Code analysis stopped\");\n\n    console.log(\"Step 9: Test completed - check Reanalyze Server log for:\");\n    console.log(\"  - [request #1] with 'files: X processed, 0 cached'\");\n    console.log(\n      \"  - [request #2] with 'files: X processed, Y cached' where Y > 0 (incremental)\",\n    );\n  });\n\n  test(\"Should prompt to start build when no lock file exists\", async () => {\n    const workspaceRoot = getWorkspaceRoot();\n    if (!workspaceRoot) {\n      console.log(\"No workspace folder found, skipping test\");\n      return;\n    }\n    console.log(\"Workspace root:\", workspaceRoot);\n\n    removeRescriptLockFile(workspaceRoot);\n\n    const resFilePath = path.join(workspaceRoot, \"src\", \"More.res\");\n    console.log(\"Opening file:\", resFilePath);\n\n    const document = await vscode.workspace.openTextDocument(resFilePath);\n    await vscode.window.showTextDocument(document);\n    console.log(\"File opened successfully\");\n\n    await sleep(1000);\n\n    const promptResult = findBuildPromptInLogs();\n    if (promptResult.found) {\n      console.log(\n        `Found prompt message: \"Prompting to start build for ${promptResult.path}\"`,\n      );\n    }\n\n    assert.ok(\n      promptResult.found,\n      \"Should find 'Prompting to start build' message in Language Server log\",\n    );\n    console.log(\"SUCCESS: Build prompt was shown\");\n  });\n});\n"
  },
  {
    "path": "client/src/test/suite/helpers.ts",
    "content": "/**\n * Shared test helpers for ReScript VSCode extension tests\n */\nimport * as assert from \"assert\";\nimport * as fs from \"fs\";\nimport * as path from \"path\";\nimport * as vscode from \"vscode\";\n\n/**\n * Get the workspace root folder path\n */\nexport function getWorkspaceRoot(): string {\n  const workspaceFolders = vscode.workspace.workspaceFolders;\n  if (workspaceFolders && workspaceFolders.length > 0) {\n    return workspaceFolders[0].uri.fsPath;\n  }\n  return \"\";\n}\n\n/**\n * Remove ReScript 12+ lock file (lib/rescript.lock) and reanalyze socket file\n */\nexport function removeRescriptLockFile(workspaceRoot: string): void {\n  const filesToRemove = [\n    path.join(workspaceRoot, \"lib\", \"rescript.lock\"),\n    // Also remove reanalyze socket file to ensure a fresh server is started\n    path.join(workspaceRoot, \".rescript-reanalyze.sock\"),\n  ];\n\n  for (const file of filesToRemove) {\n    try {\n      if (fs.existsSync(file)) {\n        fs.unlinkSync(file);\n        console.log(`Removed ${path.basename(file)}`);\n      }\n    } catch (e) {\n      console.log(`Could not remove ${path.basename(file)}:`, e);\n    }\n  }\n}\n\n/**\n * Remove only the reanalyze socket file (not the lock file)\n * Use this when you need a fresh reanalyze server but want to keep the build watcher running\n */\nexport function removeReanalyzeSocketFile(workspaceRoot: string): void {\n  const socketPath = path.join(workspaceRoot, \".rescript-reanalyze.sock\");\n  try {\n    if (fs.existsSync(socketPath)) {\n      fs.unlinkSync(socketPath);\n      console.log(\"Removed .rescript-reanalyze.sock\");\n    }\n  } catch (e) {\n    console.log(\"Could not remove .rescript-reanalyze.sock:\", e);\n  }\n}\n\n/**\n * Remove ReScript 9/10/11 lock file (.bsb.lock)\n */\nexport function removeBsbLockFile(workspaceRoot: string): void {\n  const lockPath = path.join(workspaceRoot, \".bsb.lock\");\n  try {\n    if (fs.existsSync(lockPath)) {\n      fs.unlinkSync(lockPath);\n      console.log(\"Removed .bsb.lock\");\n    }\n  } catch (e) {\n    console.log(\"Could not remove .bsb.lock:\", e);\n  }\n}\n\n/**\n * Remove monorepo lock files (both rewatch.lock and rescript.lock)\n */\nexport function removeMonorepoLockFiles(monorepoRoot: string): void {\n  const filesToRemove = [\n    path.join(monorepoRoot, \"lib\", \"rewatch.lock\"),\n    path.join(monorepoRoot, \"lib\", \"rescript.lock\"),\n    // Also remove reanalyze socket file to ensure a fresh server is started\n    path.join(monorepoRoot, \".rescript-reanalyze.sock\"),\n  ];\n\n  for (const file of filesToRemove) {\n    try {\n      if (fs.existsSync(file)) {\n        fs.unlinkSync(file);\n        console.log(`Removed ${path.basename(file)}`);\n      }\n    } catch (e) {\n      console.log(`Could not remove ${path.basename(file)}:`, e);\n    }\n  }\n}\n\n/**\n * Ensure the ReScript extension is activated\n */\nexport async function ensureExtensionActivated(): Promise<\n  vscode.Extension<unknown> | undefined\n> {\n  const extension = vscode.extensions.getExtension(\n    \"chenglou92.rescript-vscode\",\n  );\n  if (extension && !extension.isActive) {\n    await extension.activate();\n  }\n  return extension;\n}\n\n/**\n * Open a file in the editor and return the document\n */\nexport async function openFile(filePath: string): Promise<vscode.TextDocument> {\n  const document = await vscode.workspace.openTextDocument(filePath);\n  await vscode.window.showTextDocument(document);\n  return document;\n}\n\n/**\n * Find the LSP log file in the most recent test logs directory\n */\nexport function findLspLogContent(): string | null {\n  // __dirname is client/out/client/src/test/suite when running tests\n  const vscodeTestDir = path.resolve(__dirname, \"../../../../../.vscode-test\");\n  const logsBaseDir = path.join(vscodeTestDir, \"user-data\", \"logs\");\n\n  try {\n    // Find the most recent log directory (format: YYYYMMDDTHHMMSS)\n    const logDirs = fs\n      .readdirSync(logsBaseDir)\n      .filter((d) => /^\\d{8}T\\d{6}$/.test(d))\n      .sort()\n      .reverse();\n\n    for (const logDir of logDirs) {\n      const outputLoggingDir = path.join(\n        logsBaseDir,\n        logDir,\n        \"window1\",\n        \"exthost\",\n      );\n      if (!fs.existsSync(outputLoggingDir)) continue;\n\n      const outputDirs = fs\n        .readdirSync(outputLoggingDir)\n        .filter((d) => d.startsWith(\"output_logging_\"));\n\n      for (const outputDir of outputDirs) {\n        const lspLogPath = path.join(\n          outputLoggingDir,\n          outputDir,\n          \"1-ReScript Language Server.log\",\n        );\n        if (fs.existsSync(lspLogPath)) {\n          console.log(\"Checking log file:\", lspLogPath);\n          return fs.readFileSync(lspLogPath, \"utf-8\");\n        }\n      }\n    }\n  } catch (e) {\n    console.log(\"Error reading logs:\", e);\n  }\n\n  return null;\n}\n\n/**\n * Wait for a condition to become true, polling at intervals\n */\nexport async function waitFor(\n  condition: () => boolean,\n  options: { timeout?: number; interval?: number; message?: string } = {},\n): Promise<boolean> {\n  const { timeout = 5000, interval = 500, message = \"condition\" } = options;\n  const maxAttempts = Math.ceil(timeout / interval);\n\n  for (let i = 0; i < maxAttempts; i++) {\n    await new Promise((resolve) => setTimeout(resolve, interval));\n    const result = condition();\n    console.log(`Checking ${message} (attempt ${i + 1}): ${result}`);\n    if (result) return true;\n  }\n  return false;\n}\n\n/**\n * Sleep for a given number of milliseconds\n */\nexport function sleep(ms: number): Promise<void> {\n  return new Promise((resolve) => setTimeout(resolve, ms));\n}\n\n/**\n * Get the path to the compiler.log file\n */\nexport function getCompilerLogPath(workspaceRoot: string): string {\n  return path.join(workspaceRoot, \"lib\", \"bs\", \".compiler.log\");\n}\n\n/**\n * Get the mtime of a file, or null if it doesn't exist\n */\nexport function getFileMtime(filePath: string): Date | null {\n  try {\n    return fs.statSync(filePath).mtime;\n  } catch {\n    return null;\n  }\n}\n\n/**\n * Wait for a file's mtime to be updated (newer than the given mtime)\n */\nexport async function waitForFileUpdate(\n  filePath: string,\n  mtimeBefore: Date | null,\n  options: { timeout?: number; interval?: number } = {},\n): Promise<Date | null> {\n  const { timeout = 5000, interval = 500 } = options;\n  const maxAttempts = Math.ceil(timeout / interval);\n\n  for (let i = 0; i < maxAttempts; i++) {\n    await sleep(interval);\n    const mtimeAfter = getFileMtime(filePath);\n    if (mtimeAfter && (!mtimeBefore || mtimeAfter > mtimeBefore)) {\n      return mtimeAfter;\n    }\n  }\n  return getFileMtime(filePath);\n}\n\n/**\n * Insert a comment at the beginning of a document and save\n */\nexport async function insertCommentAndSave(\n  editor: vscode.TextEditor,\n  comment: string,\n): Promise<void> {\n  await editor.edit((editBuilder) => {\n    editBuilder.insert(new vscode.Position(0, 0), comment);\n  });\n  await editor.document.save();\n}\n\n/**\n * Restore original content to a document and save\n */\nexport async function restoreContentAndSave(\n  editor: vscode.TextEditor,\n  originalContent: string,\n): Promise<void> {\n  const document = editor.document;\n  await editor.edit((editBuilder) => {\n    const fullRange = new vscode.Range(\n      new vscode.Position(0, 0),\n      document.lineAt(document.lineCount - 1).range.end,\n    );\n    editBuilder.replace(fullRange, originalContent);\n  });\n  await document.save();\n}\n\n/**\n * Start the build watcher and wait for it to initialize\n */\nexport async function startBuildWatcher(waitMs: number = 1000): Promise<void> {\n  await vscode.commands.executeCommand(\"rescript-vscode.start_build\");\n  await sleep(waitMs);\n}\n\n/**\n * Start code analysis and wait for it to initialize\n */\nexport async function startCodeAnalysis(waitMs: number = 1000): Promise<void> {\n  await vscode.commands.executeCommand(\"rescript-vscode.start_code_analysis\");\n  await sleep(waitMs);\n}\n\n/**\n * Stop code analysis\n */\nexport async function stopCodeAnalysis(): Promise<void> {\n  await vscode.commands.executeCommand(\"rescript-vscode.stop_code_analysis\");\n}\n\n/**\n * Show the reanalyze server log and assert it returns true\n */\nexport async function showReanalyzeServerLog(): Promise<void> {\n  const result = await vscode.commands.executeCommand<boolean>(\n    \"rescript-vscode.show_reanalyze_server_log\",\n  );\n  console.log(`Show reanalyze server log result: ${result}`);\n  assert.strictEqual(\n    result,\n    true,\n    \"Show reanalyze server log should return true when output channel is shown\",\n  );\n}\n\n/**\n * Result of searching for build prompt in logs\n */\nexport interface BuildPromptResult {\n  found: boolean;\n  path: string;\n}\n\n/**\n * Find \"Prompting to start build\" message in LSP logs\n */\nexport function findBuildPromptInLogs(): BuildPromptResult {\n  const logContent = findLspLogContent();\n  if (logContent) {\n    const promptMatch = logContent.match(\n      /\\[Info.*\\] Prompting to start build for (.+)/,\n    );\n    if (promptMatch) {\n      return { found: true, path: promptMatch[1] };\n    }\n  }\n  return { found: false, path: \"\" };\n}\n"
  },
  {
    "path": "client/src/test/suite/monorepoRoot.test.ts",
    "content": "/**\n * Monorepo Root Tests (Monorepo Code Analysis Test Suite)\n *\n * Run these tests:\n *   cd client && npm run test -- --label monorepo-root\n */\nimport * as assert from \"assert\";\nimport * as fs from \"fs\";\nimport * as path from \"path\";\nimport * as vscode from \"vscode\";\nimport {\n  getWorkspaceRoot,\n  removeMonorepoLockFiles,\n  ensureExtensionActivated,\n  sleep,\n  getCompilerLogPath,\n  getFileMtime,\n  waitForFileUpdate,\n  insertCommentAndSave,\n  restoreContentAndSave,\n  startBuildWatcher,\n  startCodeAnalysis,\n  stopCodeAnalysis,\n  showReanalyzeServerLog,\n  findBuildPromptInLogs,\n} from \"./helpers\";\n\nsuite(\"Monorepo Code Analysis Test Suite\", () => {\n  test(\"Monorepo: Build watcher works when opening root package\", async () => {\n    const monorepoRoot = getWorkspaceRoot();\n    console.log(\"Monorepo root:\", monorepoRoot);\n\n    const rootResPath = path.join(monorepoRoot, \"src\", \"Root.res\");\n    if (!fs.existsSync(rootResPath)) {\n      console.log(\"Monorepo project not found, skipping test\");\n      return;\n    }\n\n    removeMonorepoLockFiles(monorepoRoot);\n\n    console.log(\"Opening root file:\", rootResPath);\n    const document = await vscode.workspace.openTextDocument(rootResPath);\n    const editor = await vscode.window.showTextDocument(document);\n    console.log(\"Root file opened successfully\");\n\n    await ensureExtensionActivated();\n\n    console.log(\"Starting build watcher from root...\");\n    await startBuildWatcher();\n    console.log(\"Build watcher started\");\n\n    const compilerLogPath = getCompilerLogPath(monorepoRoot);\n    const mtimeBefore = getFileMtime(compilerLogPath);\n    if (mtimeBefore) {\n      console.log(\n        `Root compiler.log mtime before: ${mtimeBefore.toISOString()}`,\n      );\n    } else {\n      console.log(\"Root compiler.log does not exist yet\");\n    }\n\n    console.log(\"Editing root file...\");\n    const originalContent = document.getText();\n    await insertCommentAndSave(editor, \"/* monorepo root test */\\n\");\n    console.log(\"Root file saved with edit\");\n\n    const mtimeAfter = await waitForFileUpdate(compilerLogPath, mtimeBefore);\n    if (mtimeAfter) {\n      console.log(`Root compiler.log mtime after: ${mtimeAfter.toISOString()}`);\n    } else {\n      console.log(\"Root compiler.log still does not exist\");\n    }\n\n    assert.ok(mtimeAfter, \"Root compiler.log should exist after file save\");\n    if (mtimeBefore && mtimeAfter) {\n      assert.ok(\n        mtimeAfter > mtimeBefore,\n        \"Root compiler.log should be updated after file save\",\n      );\n      console.log(\"SUCCESS: Root compiler.log was updated\");\n    }\n\n    await restoreContentAndSave(editor, originalContent);\n    console.log(\"Original content restored\");\n\n    await sleep(1000);\n    console.log(\"Monorepo root test completed\");\n  });\n\n  test(\"Monorepo: Build watcher works when opening subpackage file\", async () => {\n    const monorepoRoot = getWorkspaceRoot();\n    console.log(\"Monorepo root:\", monorepoRoot);\n\n    const appResPath = path.join(\n      monorepoRoot,\n      \"packages\",\n      \"app\",\n      \"src\",\n      \"App.res\",\n    );\n    if (!fs.existsSync(appResPath)) {\n      console.log(\"Monorepo app package not found, skipping test\");\n      return;\n    }\n\n    console.log(\"Opening subpackage file:\", appResPath);\n    const document = await vscode.workspace.openTextDocument(appResPath);\n    const editor = await vscode.window.showTextDocument(document);\n    console.log(\"Subpackage file opened successfully\");\n\n    await ensureExtensionActivated();\n\n    console.log(\"Starting build watcher from subpackage...\");\n    await startBuildWatcher();\n    console.log(\"Build watcher started\");\n\n    const rootCompilerLogPath = getCompilerLogPath(monorepoRoot);\n    const mtimeBefore = getFileMtime(rootCompilerLogPath);\n    if (mtimeBefore) {\n      console.log(\n        `Root compiler.log mtime before: ${mtimeBefore.toISOString()}`,\n      );\n    } else {\n      console.log(\n        \"Root compiler.log does not exist yet (expected for monorepo subpackage)\",\n      );\n    }\n\n    console.log(\"Editing subpackage file...\");\n    const originalContent = document.getText();\n    await insertCommentAndSave(editor, \"/* monorepo subpackage test */\\n\");\n    console.log(\"Subpackage file saved with edit\");\n\n    const mtimeAfter = await waitForFileUpdate(\n      rootCompilerLogPath,\n      mtimeBefore,\n    );\n    if (mtimeAfter) {\n      console.log(`Root compiler.log mtime after: ${mtimeAfter.toISOString()}`);\n    } else {\n      console.log(\"Root compiler.log still does not exist\");\n    }\n\n    assert.ok(\n      mtimeAfter,\n      \"Root compiler.log should exist after subpackage file save\",\n    );\n    if (mtimeBefore && mtimeAfter) {\n      assert.ok(\n        mtimeAfter > mtimeBefore,\n        \"Root compiler.log should be updated after subpackage file save\",\n      );\n      console.log(\n        \"SUCCESS: Root compiler.log was updated from subpackage edit\",\n      );\n    }\n\n    await restoreContentAndSave(editor, originalContent);\n    console.log(\"Original content restored\");\n\n    await sleep(1000);\n    console.log(\"Monorepo subpackage test completed\");\n  });\n\n  test(\"Monorepo: Code analysis works from subpackage\", async () => {\n    const monorepoRoot = getWorkspaceRoot();\n    console.log(\"Monorepo root:\", monorepoRoot);\n\n    const libResPath = path.join(\n      monorepoRoot,\n      \"packages\",\n      \"lib\",\n      \"src\",\n      \"Lib.res\",\n    );\n    if (!fs.existsSync(libResPath)) {\n      console.log(\"Monorepo lib package not found, skipping test\");\n      return;\n    }\n\n    console.log(\"Opening lib file:\", libResPath);\n    const document = await vscode.workspace.openTextDocument(libResPath);\n    await vscode.window.showTextDocument(document);\n    console.log(\"Lib file opened successfully\");\n\n    await ensureExtensionActivated();\n\n    console.log(\"Starting build...\");\n    await startBuildWatcher();\n\n    console.log(\"Starting code analysis...\");\n    await startCodeAnalysis();\n    console.log(\"Code analysis started\");\n\n    console.log(\"Opening reanalyze server log...\");\n    await showReanalyzeServerLog();\n\n    const diagnostics = vscode.languages.getDiagnostics(document.uri);\n    console.log(`Found ${diagnostics.length} diagnostics in Lib.res`);\n    for (const diag of diagnostics.slice(0, 5)) {\n      console.log(\n        `  - Line ${diag.range.start.line + 1}: ${diag.message.substring(0, 80)}...`,\n      );\n    }\n\n    assert.ok(\n      diagnostics.length > 0,\n      \"Should have diagnostics for dead code in Lib.res\",\n    );\n\n    const deadFuncDiagnostic = diagnostics.find((d) =>\n      d.message.includes(\"unusedLibFunction\"),\n    );\n    assert.ok(\n      deadFuncDiagnostic,\n      \"Should find diagnostic for unusedLibFunction in monorepo lib\",\n    );\n    console.log(\n      `Found diagnostic for unusedLibFunction: ${deadFuncDiagnostic?.message}`,\n    );\n\n    console.log(\"Stopping code analysis...\");\n    await stopCodeAnalysis();\n    console.log(\"Code analysis stopped\");\n\n    console.log(\"Monorepo code analysis test completed\");\n  });\n\n  test(\"Monorepo: Should prompt to start build when opening subpackage without lock file\", async () => {\n    const monorepoRoot = getWorkspaceRoot();\n    console.log(\"Monorepo root:\", monorepoRoot);\n\n    const appResPath = path.join(\n      monorepoRoot,\n      \"packages\",\n      \"app\",\n      \"src\",\n      \"App.res\",\n    );\n    if (!fs.existsSync(appResPath)) {\n      console.log(\"Monorepo app package not found, skipping test\");\n      return;\n    }\n\n    console.log(\"Removing lock file from monorepo root...\");\n    removeMonorepoLockFiles(monorepoRoot);\n\n    console.log(\"Opening subpackage file:\", appResPath);\n    const document = await vscode.workspace.openTextDocument(appResPath);\n    await vscode.window.showTextDocument(document);\n    console.log(\"Subpackage file opened successfully\");\n\n    await sleep(1000);\n\n    const promptResult = findBuildPromptInLogs();\n    if (promptResult.found) {\n      console.log(\n        `Found prompt message: \"Prompting to start build for ${promptResult.path}\"`,\n      );\n    }\n\n    assert.ok(\n      promptResult.found,\n      \"Should find 'Prompting to start build' message in Language Server log\",\n    );\n\n    assert.ok(\n      promptResult.path.includes(\"monorepo-project\") &&\n        !promptResult.path.includes(\"packages\"),\n      `Prompt path should be monorepo root, not subpackage. Got: ${promptResult.path}`,\n    );\n    console.log(\"SUCCESS: Build prompt was shown for monorepo root path\");\n  });\n});\n"
  },
  {
    "path": "client/src/test/suite/monorepoSubpackage.test.ts",
    "content": "/**\n * Monorepo Subpackage Tests (Monorepo Subpackage Test Suite)\n *\n * This test suite runs with VSCode opened on a subpackage (packages/app),\n * not the monorepo root. It tests that the extension correctly detects\n * monorepo structure even when opened from a subpackage.\n *\n * Run these tests:\n *   cd client && npm run test -- --label monorepo-subpackage\n */\nimport * as assert from \"assert\";\nimport * as fs from \"fs\";\nimport * as path from \"path\";\nimport * as vscode from \"vscode\";\nimport {\n  getWorkspaceRoot,\n  removeMonorepoLockFiles,\n  sleep,\n  startBuildWatcher,\n  startCodeAnalysis,\n  stopCodeAnalysis,\n  showReanalyzeServerLog,\n  findBuildPromptInLogs,\n} from \"./helpers\";\n\nsuite(\"Monorepo Subpackage Test Suite\", () => {\n  test(\"Subpackage workspace: Should prompt to start build with monorepo root path\", async () => {\n    // In this test, workspaceRoot is packages/app (the subpackage)\n    const workspaceRoot = getWorkspaceRoot();\n    console.log(\"Workspace root (subpackage):\", workspaceRoot);\n\n    // The monorepo root is 2 levels up from packages/app\n    const monorepoRoot = path.resolve(workspaceRoot, \"../..\");\n    console.log(\"Monorepo root:\", monorepoRoot);\n\n    // Verify we're in the right setup - workspace should be a subpackage\n    assert.ok(\n      workspaceRoot.includes(\"packages\"),\n      `Workspace should be in packages folder, got: ${workspaceRoot}`,\n    );\n\n    // Check if the subpackage has a rescript.json\n    const rescriptJsonPath = path.join(workspaceRoot, \"rescript.json\");\n    if (!fs.existsSync(rescriptJsonPath)) {\n      console.log(\"Subpackage rescript.json not found, skipping test\");\n      return;\n    }\n\n    // Remove lock file from MONOREPO ROOT to trigger the \"Start Build\" prompt\n    console.log(\"Removing lock file from monorepo root...\");\n    removeMonorepoLockFiles(monorepoRoot);\n\n    // Open a .res file from the subpackage workspace\n    const resFilePath = path.join(workspaceRoot, \"src\", \"App.res\");\n    console.log(\"Opening file:\", resFilePath);\n\n    const document = await vscode.workspace.openTextDocument(resFilePath);\n    await vscode.window.showTextDocument(document);\n    console.log(\"File opened successfully\");\n\n    // Wait for LSP to process - since no lock file exists, it should prompt for build\n    await sleep(1000);\n\n    // Read the Language Server log to verify the prompt was shown\n    const promptResult = findBuildPromptInLogs();\n    if (promptResult.found) {\n      console.log(\n        `Found prompt message: \"Prompting to start build for ${promptResult.path}\"`,\n      );\n    }\n\n    // Assert that the prompt was shown\n    assert.ok(\n      promptResult.found,\n      \"Should find 'Prompting to start build' message in Language Server log\",\n    );\n\n    // Assert that the prompt path is the monorepo root, not the subpackage\n    // Even though we opened from packages/app, the build prompt should use monorepo root\n    assert.ok(\n      promptResult.path.includes(\"monorepo-project\") &&\n        !promptResult.path.includes(\"packages\"),\n      `Prompt path should be monorepo root, not subpackage. Got: ${promptResult.path}`,\n    );\n    console.log(\n      \"SUCCESS: Build prompt correctly uses monorepo root path when opened from subpackage\",\n    );\n\n    // Now start the build and verify the lock file is created at the monorepo root\n    console.log(\"Starting build from subpackage workspace...\");\n    await startBuildWatcher(1500);\n    console.log(\"Build started\");\n\n    // Check that the lock file exists at the MONOREPO ROOT, not the subpackage\n    const monorepoLockPath = path.join(monorepoRoot, \"lib\", \"rescript.lock\");\n    const subpackageLockPath = path.join(workspaceRoot, \"lib\", \"rescript.lock\");\n\n    const monorepoLockExists = fs.existsSync(monorepoLockPath);\n    const subpackageLockExists = fs.existsSync(subpackageLockPath);\n\n    console.log(\n      `Monorepo lock file (${monorepoLockPath}): ${monorepoLockExists ? \"EXISTS\" : \"NOT FOUND\"}`,\n    );\n    console.log(\n      `Subpackage lock file (${subpackageLockPath}): ${subpackageLockExists ? \"EXISTS\" : \"NOT FOUND\"}`,\n    );\n\n    // The lock file should exist at the monorepo root\n    assert.ok(\n      monorepoLockExists,\n      `Lock file should exist at monorepo root: ${monorepoLockPath}`,\n    );\n\n    // The lock file should NOT exist at the subpackage level\n    assert.ok(\n      !subpackageLockExists,\n      `Lock file should NOT exist at subpackage: ${subpackageLockPath}`,\n    );\n\n    console.log(\"SUCCESS: Lock file created at monorepo root, not subpackage\");\n\n    // Remove any stale reanalyze socket file from a previous test run\n    const socketPath = path.join(monorepoRoot, \".rescript-reanalyze.sock\");\n    if (fs.existsSync(socketPath)) {\n      fs.unlinkSync(socketPath);\n      console.log(\"Removed stale socket file\");\n    }\n\n    // Start code analysis\n    console.log(\"Starting code analysis...\");\n    await startCodeAnalysis();\n    console.log(\"Code analysis started\");\n\n    // Open the reanalyze server log - verify it returns true (output channel shown)\n    console.log(\"Opening reanalyze server log...\");\n    await showReanalyzeServerLog();\n\n    // Verify diagnostics are shown (code analysis is working from subpackage)\n    const diagnostics = vscode.languages.getDiagnostics(document.uri);\n    console.log(`Found ${diagnostics.length} diagnostics in App.res`);\n    assert.ok(\n      diagnostics.length > 0,\n      \"Code analysis should find diagnostics in App.res when run from subpackage\",\n    );\n\n    // Stop code analysis\n    console.log(\"Stopping code analysis...\");\n    await stopCodeAnalysis();\n    console.log(\"Code analysis stopped\");\n\n    console.log(\"Test complete - lock file will be cleaned up on LSP shutdown\");\n  });\n});\n"
  },
  {
    "path": "client/src/test/suite/rescript9.test.ts",
    "content": "/**\n * ReScript 9 Tests (ReScript 9 Build Test Suite)\n *\n * This tests that the build watcher works with older ReScript versions\n * that use \"rescript build -w\" instead of \"rescript watch\".\n *\n * Run these tests:\n *   cd client && npm run test -- --label rescript9-project\n */\nimport * as assert from \"assert\";\nimport * as fs from \"fs\";\nimport * as path from \"path\";\nimport * as vscode from \"vscode\";\nimport {\n  getWorkspaceRoot,\n  removeBsbLockFile,\n  ensureExtensionActivated,\n  waitFor,\n  sleep,\n  getCompilerLogPath,\n  getFileMtime,\n  waitForFileUpdate,\n  insertCommentAndSave,\n  restoreContentAndSave,\n  startBuildWatcher,\n  findBuildPromptInLogs,\n} from \"./helpers\";\n\nsuite(\"ReScript 9 Build Test Suite\", () => {\n  test(\"ReScript 9: Extension should be present\", async () => {\n    const extension = vscode.extensions.getExtension(\n      \"chenglou92.rescript-vscode\",\n    );\n    assert.ok(extension, \"ReScript extension should be present\");\n    console.log(\"Extension found:\", extension.id);\n  });\n\n  test(\"ReScript 9: Build watcher should start with 'rescript build -w'\", async () => {\n    const workspaceRoot = getWorkspaceRoot();\n    console.log(\"Workspace root:\", workspaceRoot);\n\n    // Verify we're in the ReScript 9 project\n    const bsconfigPath = path.join(workspaceRoot, \"bsconfig.json\");\n    if (!fs.existsSync(bsconfigPath)) {\n      console.log(\"bsconfig.json not found, skipping test\");\n      return;\n    }\n\n    // Check ReScript version\n    const packageJsonPath = path.join(\n      workspaceRoot,\n      \"node_modules\",\n      \"rescript\",\n      \"package.json\",\n    );\n    if (fs.existsSync(packageJsonPath)) {\n      const packageJson = JSON.parse(fs.readFileSync(packageJsonPath, \"utf-8\"));\n      console.log(\"ReScript version:\", packageJson.version);\n      assert.ok(\n        packageJson.version.startsWith(\"9.\"),\n        `Expected ReScript 9.x, got ${packageJson.version}`,\n      );\n    }\n\n    // Open a ReScript file\n    const resFilePath = path.join(workspaceRoot, \"src\", \"Hello.res\");\n    console.log(\"Opening file:\", resFilePath);\n\n    const document = await vscode.workspace.openTextDocument(resFilePath);\n    await vscode.window.showTextDocument(document);\n    console.log(\"File opened successfully\");\n\n    await ensureExtensionActivated();\n\n    // Start the build watcher\n    console.log(\"Starting build watcher (should use 'rescript build -w')...\");\n    await startBuildWatcher(1500);\n    console.log(\"Build watcher started\");\n\n    // Check if the lock file was created (.bsb.lock for ReScript 9)\n    const lockPath = path.join(workspaceRoot, \".bsb.lock\");\n    const lockExists = fs.existsSync(lockPath);\n    console.log(`.bsb.lock exists: ${lockExists}`);\n    assert.ok(\n      lockExists,\n      \".bsb.lock should exist after starting build watcher\",\n    );\n\n    console.log(\"ReScript 9 build watcher test completed\");\n  });\n\n  test(\"ReScript 9: Build watcher recompiles on file save\", async () => {\n    const workspaceRoot = getWorkspaceRoot();\n    console.log(\"Workspace root:\", workspaceRoot);\n\n    // Open a ReScript file\n    const resFilePath = path.join(workspaceRoot, \"src\", \"Hello.res\");\n    console.log(\"Opening file:\", resFilePath);\n\n    const document = await vscode.workspace.openTextDocument(resFilePath);\n    const editor = await vscode.window.showTextDocument(document);\n    console.log(\"File opened successfully\");\n\n    await ensureExtensionActivated();\n\n    // Start the build watcher\n    console.log(\"Starting build watcher...\");\n    await startBuildWatcher();\n    console.log(\"Build watcher started\");\n\n    // Check compiler.log modification time before edit\n    const compilerLogPath = getCompilerLogPath(workspaceRoot);\n    const mtimeBefore = getFileMtime(compilerLogPath);\n    if (mtimeBefore) {\n      console.log(`compiler.log mtime before: ${mtimeBefore.toISOString()}`);\n    } else {\n      console.log(\"compiler.log does not exist yet\");\n    }\n\n    // Edit the file and save\n    console.log(\"Editing file...\");\n    const originalContent = document.getText();\n    await insertCommentAndSave(editor, \"/* rescript 9 test */\\n\");\n    console.log(\"File saved with edit\");\n\n    // Wait for compilation\n    console.log(\"Waiting for compilation...\");\n    const mtimeAfter = await waitForFileUpdate(compilerLogPath, mtimeBefore, {\n      timeout: 3000,\n    });\n    if (mtimeAfter) {\n      console.log(`compiler.log mtime after: ${mtimeAfter.toISOString()}`);\n    } else {\n      console.log(\"compiler.log still does not exist\");\n    }\n\n    // Assert that compiler.log was updated\n    assert.ok(mtimeAfter, \"compiler.log should exist after file save\");\n    if (mtimeBefore && mtimeAfter) {\n      assert.ok(\n        mtimeAfter > mtimeBefore,\n        \"compiler.log should be updated after file save\",\n      );\n      console.log(\"SUCCESS: compiler.log was updated after file save\");\n    } else if (!mtimeBefore && mtimeAfter) {\n      console.log(\"SUCCESS: compiler.log was created after file save\");\n    }\n\n    // Restore original content\n    console.log(\"Restoring original content...\");\n    await restoreContentAndSave(editor, originalContent);\n    console.log(\"Original content restored\");\n\n    await sleep(1000);\n    console.log(\"ReScript 9 recompilation test completed\");\n  });\n\n  test(\"ReScript 9: Should prompt to start build when no lock file exists\", async () => {\n    const workspaceRoot = getWorkspaceRoot();\n    console.log(\"Workspace root:\", workspaceRoot);\n\n    // Remove lock file to trigger the \"Start Build\" prompt\n    removeBsbLockFile(workspaceRoot);\n\n    // Open a .res file to trigger the LSP\n    const resFilePath = path.join(workspaceRoot, \"src\", \"Hello.res\");\n    console.log(\"Opening file:\", resFilePath);\n\n    const document = await vscode.workspace.openTextDocument(resFilePath);\n    await vscode.window.showTextDocument(document);\n    console.log(\"File opened successfully\");\n\n    // Wait for LSP to process and potentially show the prompt\n    await sleep(1000);\n\n    // Read the Language Server log to verify the prompt was shown\n    const promptResult = findBuildPromptInLogs();\n    if (promptResult.found) {\n      console.log(\"Found prompt message in logs\");\n    }\n\n    assert.ok(\n      promptResult.found,\n      \"Should find 'Prompting to start build' message in Language Server log\",\n    );\n    console.log(\"SUCCESS: Build prompt was shown for ReScript 9 project\");\n  });\n\n  test(\"ReScript 9: Lock file should be cleaned up on language server restart\", async () => {\n    const workspaceRoot = getWorkspaceRoot();\n    console.log(\"Workspace root:\", workspaceRoot);\n\n    // First restart language server to ensure clean state\n    console.log(\"Restarting language server to ensure clean state...\");\n    await vscode.commands.executeCommand(\n      \"rescript-vscode.restart_language_server\",\n    );\n    await sleep(2000);\n\n    // Open a ReScript file\n    const resFilePath = path.join(workspaceRoot, \"src\", \"Hello.res\");\n    console.log(\"Opening file:\", resFilePath);\n\n    const document = await vscode.workspace.openTextDocument(resFilePath);\n    await vscode.window.showTextDocument(document);\n    console.log(\"File opened successfully\");\n\n    // Start the build watcher\n    console.log(\"Starting build watcher...\");\n    await vscode.commands.executeCommand(\"rescript-vscode.start_build\");\n\n    // Wait for lock file to appear (poll up to 5 seconds)\n    await sleep(500);\n    const lockPath = path.join(workspaceRoot, \".bsb.lock\");\n    const lockExistsBefore = await waitFor(() => fs.existsSync(lockPath), {\n      timeout: 5000,\n      interval: 500,\n      message: \".bsb.lock\",\n    });\n    assert.ok(lockExistsBefore, \".bsb.lock should exist before restart\");\n\n    // Restart language server (this should kill the build watcher and clean up lock file)\n    console.log(\"Restarting language server...\");\n    await vscode.commands.executeCommand(\n      \"rescript-vscode.restart_language_server\",\n    );\n\n    // Wait for restart to complete\n    await sleep(2000);\n\n    // Verify lock file is cleaned up\n    const lockExistsAfter = fs.existsSync(lockPath);\n    console.log(`.bsb.lock exists after restart: ${lockExistsAfter}`);\n    assert.ok(\n      !lockExistsAfter,\n      \".bsb.lock should be cleaned up after language server restart\",\n    );\n\n    console.log(\"SUCCESS: Lock file was cleaned up on language server restart\");\n  });\n});\n"
  },
  {
    "path": "client/src/utils.ts",
    "content": "import * as path from \"path\";\nimport * as fs from \"fs\";\nimport * as os from \"os\";\nimport { DocumentUri } from \"vscode-languageclient\";\nimport { findBinary, type BinaryName } from \"../../shared/src/findBinary\";\nimport {\n  findProjectRootOfFileInDir as findProjectRootOfFileInDirShared,\n  normalizePath as normalizePathShared,\n} from \"../../shared/src/projectRoots\";\n\n/*\n * Much of the code in here is duplicated from the server code.\n * At some point we should move the functionality powered by this\n * to the server itself.\n */\n\n/**\n * Branded type for normalized file paths.\n *\n * All paths should be normalized to ensure consistent lookups and prevent\n * path format mismatches (e.g., trailing slashes, relative vs absolute paths).\n *\n * Use `normalizePath()` to convert a regular path to a `NormalizedPath`.\n */\nexport type NormalizedPath = string & { __brand: \"NormalizedPath\" };\n\n/**\n * Normalizes a file path and returns it as a `NormalizedPath`.\n *\n * @param filePath - The path to normalize (can be null)\n * @returns The normalized path, or null if input was null\n */\nexport function normalizePath(filePath: string | null): NormalizedPath | null {\n  // `path.normalize` ensures we can assume string is now NormalizedPath\n  return normalizePathShared(filePath) as NormalizedPath | null;\n}\n\ntype binaryName = \"rescript-editor-analysis.exe\" | \"rescript-tools.exe\";\n\nconst platformDir =\n  process.arch === \"arm64\" ? process.platform + process.arch : process.platform;\n\nconst getLegacyBinaryDevPath = (b: binaryName) =>\n  path.join(path.dirname(__dirname), \"..\", \"analysis\", b);\n\nexport const getLegacyBinaryProdPath = (b: binaryName) =>\n  path.join(\n    path.dirname(__dirname),\n    \"..\",\n    \"server\",\n    \"analysis_binaries\",\n    platformDir,\n    b,\n  );\n\nexport const getBinaryPath = (\n  binaryName: \"rescript-editor-analysis.exe\" | \"rescript-tools.exe\",\n  projectRootPath: NormalizedPath | null = null,\n): string | null => {\n  const binaryFromCompilerPackage = path.join(\n    projectRootPath ?? \"\",\n    \"node_modules\",\n    \"rescript\",\n    platformDir,\n    binaryName,\n  );\n\n  if (projectRootPath != null && fs.existsSync(binaryFromCompilerPackage)) {\n    return binaryFromCompilerPackage;\n  } else if (fs.existsSync(getLegacyBinaryDevPath(binaryName))) {\n    return getLegacyBinaryDevPath(binaryName);\n  } else if (fs.existsSync(getLegacyBinaryProdPath(binaryName))) {\n    return getLegacyBinaryProdPath(binaryName);\n  } else {\n    return null;\n  }\n};\n\nlet tempFilePrefix = \"rescript_\" + process.pid + \"_\";\nlet tempFileId = 0;\n\nexport const createFileInTempDir = (prefix = \"\", extension = \"\") => {\n  let tempFileName = prefix + \"_\" + tempFilePrefix + tempFileId + extension;\n  tempFileId = tempFileId + 1;\n  return path.join(os.tmpdir(), tempFileName);\n};\n\nexport let findProjectRootOfFileInDir = (\n  source: string,\n): NormalizedPath | null => {\n  return normalizePath(findProjectRootOfFileInDirShared(source));\n};\n\nexport { findBinary, BinaryName };\n"
  },
  {
    "path": "client/tsconfig.json",
    "content": "{\n  \"compilerOptions\": {\n    \"module\": \"commonjs\",\n    \"target\": \"es2019\",\n    \"lib\": [\"ES2019\"],\n    \"outDir\": \"out\",\n    \"rootDirs\": [\"src\", \"../shared/src\"],\n    \"sourceMap\": true,\n    \"skipLibCheck\": true\n  },\n  \"include\": [\"src\", \"../shared/src\"],\n  \"exclude\": [\"node_modules\"]\n}\n"
  },
  {
    "path": "dune-project",
    "content": "(lang dune 2.3)\n\n(generate_opam_files true)\n\n(authors \"ReScript Team\")\n\n(maintainers \"ReScript Team\")\n\n(homepage \"https://github.com/rescript-lang/rescript-vscode\")\n\n(bug_reports \"https://github.com/rescript-lang/rescript-vscode/issues\")\n\n(package\n (name analysis)\n (synopsis \"ReScript Analysis\")\n (depends\n  (ocaml\n   (>= 4.10))\n  (cppo\n   (= 1.6.9))\n  dune))\n\n(package\n (name tools)\n (synopsis \"ReScript Tools\")\n (depends\n  (ocaml\n   (>= 4.10))\n  (cppo\n   (= 1.6.9))\n  analysis\n  dune))\n"
  },
  {
    "path": "grammars/rescript.markdown.json",
    "content": "{\n  \"fileTypes\": [],\n  \"injectionSelector\": \"L:text.html.markdown\",\n  \"patterns\": [\n    {\n      \"include\": \"#rescript-code-block\"\n    }\n  ],\n  \"repository\": {\n    \"rescript-code-block\": {\n      \"name\": \"markup.fenced_code.block.markdown\",\n      \"begin\": \"(^|\\\\G)(\\\\s*)(\\\\`{3,}|~{3,})\\\\s*(?i:(res|rescript)(\\\\s+[^`~]*)?$)\",\n      \"end\": \"(^|\\\\G)(\\\\2|\\\\s{0,3})(\\\\3)\\\\s*$\",\n      \"beginCaptures\": {\n        \"3\": {\n          \"name\": \"punctuation.definition.markdown\"\n        },\n        \"5\": {\n          \"name\": \"fenced_code.block.language\"\n        },\n        \"6\": {\n          \"name\": \"fenced_code.block.language.attributes\"\n        }\n      },\n      \"endCaptures\": {\n        \"3\": {\n          \"name\": \"punctuation.definition.markdown\"\n        }\n      },\n      \"patterns\": [\n        {\n          \"begin\": \"(^|\\\\G)(\\\\s*)(.*)\",\n          \"while\": \"(^|\\\\G)(?!\\\\s*([`~]{3,})\\\\s*$)\",\n          \"contentName\": \"meta.embedded.block.rescript\",\n          \"patterns\": [\n            {\n              \"include\": \"source.rescript\"\n            }\n          ]\n        }\n      ]\n    }\n  },\n  \"scopeName\": \"markdown.rescript.codeblock\"\n}\n"
  },
  {
    "path": "grammars/rescript.tmLanguage.json",
    "content": "{\n  \"$schema\": \"https://raw.githubusercontent.com/martinring/tmlanguage/master/tmlanguage.json\",\n  \"name\": \"ReScript\",\n  \"scopeName\": \"source.rescript\",\n  \"repository\": {\n    \"RE_KEYWORD_CONTROL\": {\n      \"name\": \"keyword.control\",\n      \"match\": \"\\\\b(and|as|assert|async|await|catch|constraint|downto|else|exception|external|for|if|in|lazy|mutable|rec|switch|to|try|when|while|with|private)\\\\b\"\n    },\n    \"RE_TO_DOWNTO_AS_LABELS\": {\n      \"patterns\": [\n        {\n          \"match\": \"~(to|downto)\\\\s*(=)\",\n          \"captures\": {\n            \"1\": {\n              \"name\": \"variable\"\n            },\n            \"2\": {\n              \"name\": \"keyword.operator keyword\"\n            }\n          }\n        },\n        {\n          \"match\": \"~(to|downto)\\\\s+(as)\",\n          \"captures\": {\n            \"1\": {\n              \"name\": \"variable\"\n            },\n            \"2\": {\n              \"name\": \"keyword.control\"\n            }\n          }\n        }\n      ]\n    },\n    \"RE_CONSTANTS_BOOL\": {\n      \"name\": \"constant.language.boolean\",\n      \"match\": \"\\\\b(false|true)\\\\b\"\n    },\n    \"RE_KEYWORD\": {\n      \"name\": \"storage.type\",\n      \"match\": \"\\\\b(include|let|module|of|open|type)\\\\b\"\n    },\n    \"commentLine\": {\n      \"match\": \"//.*\",\n      \"name\": \"comment.line\"\n    },\n    \"commentBlock\": {\n      \"name\": \"comment.block\",\n      \"begin\": \"/\\\\*\",\n      \"end\": \"\\\\*/\",\n      \"patterns\": [\n        {\n          \"include\": \"#commentBlock\"\n        }\n      ]\n    },\n    \"punctuation\": {\n      \"patterns\": [\n        {\n          \"match\": \"~\",\n          \"name\": \"punctuation.definition.keyword\"\n        },\n        {\n          \"match\": \";\",\n          \"name\": \"punctuation.terminator\"\n        },\n        {\n          \"match\": \"\\\\.\",\n          \"name\": \"punctuation.accessor\"\n        },\n        {\n          \"match\": \"\\\\,\",\n          \"name\": \"punctuation.separator\"\n        },\n        {\n          \"match\": \"\\\\?|:\",\n          \"name\": \"punctuation.separator\"\n        },\n        {\n          \"match\": \"\\\\|(?!\\\\|)\",\n          \"name\": \"punctuation.separator\"\n        },\n        {\n          \"match\": \"\\\\{\",\n          \"name\": \"punctuation.section.braces.begin\"\n        },\n        {\n          \"match\": \"\\\\}\",\n          \"name\": \"punctuation.section.braces.end\"\n        },\n        {\n          \"match\": \"\\\\[\",\n          \"name\": \"punctuation.section.brackets.begin\"\n        },\n        {\n          \"match\": \"\\\\]\",\n          \"name\": \"punctuation.section.brackets.end\"\n        },\n        {\n          \"match\": \"\\\\(\",\n          \"name\": \"punctuation.section.parens.begin\"\n        },\n        {\n          \"match\": \"\\\\)\",\n          \"name\": \"punctuation.section.parens.end\"\n        }\n      ]\n    },\n    \"keyword\": {\n      \"patterns\": [\n        {\n          \"include\": \"#RE_TO_DOWNTO_AS_LABELS\"\n        },\n        {\n          \"include\": \"#RE_KEYWORD_CONTROL\"\n        },\n        {\n          \"include\": \"#RE_KEYWORD\"\n        }\n      ]\n    },\n    \"constant\": {\n      \"patterns\": [\n        {\n          \"include\": \"#RE_CONSTANTS_BOOL\"\n        }\n      ]\n    },\n    \"string-character-escape\": {\n      \"name\": \"constant.character.escape\",\n      \"match\": \"\\\\\\\\(x[0-9A-Fa-f]{2}|u[0-9A-Fa-f]{4}|u{[0-9A-Fa-f]+}|[0-2][0-7]{0,2}|3[0-6][0-7]?|37[0-7]?|[4-7][0-7]?|.|$)\"\n    },\n    \"string\": {\n      \"patterns\": [\n        {\n          \"name\": \"string.quoted.double\",\n          \"begin\": \"\\\"\",\n          \"end\": \"\\\"\",\n          \"beginCaptures\": {\n            \"1\": {\n              \"name\": \"punctuation.definition.string.begin\"\n            }\n          },\n          \"endCaptures\": {\n            \"1\": {\n              \"name\": \"punctuation.definition.string.end\"\n            }\n          },\n          \"patterns\": [\n            {\n              \"include\": \"#string-character-escape\"\n            }\n          ]\n        },\n        {\n          \"name\": \"string.template\",\n          \"begin\": \"([a-z_][0-9a-zA-Z_]*)?(`)\",\n          \"end\": \"(?<!\\\\\\\\)`\",\n          \"beginCaptures\": {\n            \"1\": {\n              \"name\": \"entity.name.function\"\n            },\n            \"2\": {\n              \"name\": \"punctuation.definition.string.template.begin\"\n            }\n          },\n          \"endCaptures\": {\n            \"1\": {\n              \"name\": \"punctuation.definition.string.template.end\"\n            }\n          },\n          \"patterns\": [\n            {\n              \"include\": \"#string-character-escape\"\n            },\n            {\n              \"name\": \"meta.template.expression\",\n              \"begin\": \"\\\\$\\\\{\",\n              \"beginCaptures\": {\n                \"0\": {\n                  \"name\": \"punctuation.definition.template-expression.begin\"\n                }\n              },\n              \"end\": \"\\\\}\",\n              \"endCaptures\": {\n                \"0\": {\n                  \"name\": \"punctuation.definition.template-expression.end\"\n                }\n              },\n              \"patterns\": [\n                {\n                  \"match\": \"[a-z_][0-9a-zA-Z_]*\"\n                },\n                {\n                  \"include\": \"#operator\"\n                },\n                {\n                  \"include\": \"#punctuation\"\n                },\n                {\n                  \"include\": \"#string\"\n                }\n              ]\n            }\n          ]\n        }\n      ]\n    },\n    \"function\": {\n      \"patterns\": [\n        {\n          \"match\": \"=>\",\n          \"name\": \"storage.type.function keyword.declaration.function\"\n        }\n      ]\n    },\n    \"character\": {\n      \"patterns\": [\n        {\n          \"match\": \"'[\\\\x00-\\\\x7F]'\",\n          \"name\": \"string.quoted.single\"\n        }\n      ]\n    },\n    \"typeParameter\": {\n      \"patterns\": [\n        {\n          \"match\": \"'[A-Za-z][A-Za-z0-9_]*\",\n          \"name\": \"support.type\"\n        }\n      ]\n    },\n    \"defaultIdIsVariable\": {\n      \"patterns\": [\n        {\n          \"match\": \"[A-Za-z_][A-Za-z0-9_]*\",\n          \"name\": \"variable\"\n        }\n      ]\n    },\n    \"number\": {\n      \"patterns\": [\n        {\n          \"match\": \"\\\\b(0[xX][a-fA-F0-9_]+[Lln]?|0[oO][0-7_]+[Lln]?|0[bB][01_]+[Lln]?|[0-9][0-9_]*([Lln]|(\\\\.[0-9_]+)?([eE][-+]?[0-9_]+)?)?)\\\\b\",\n          \"name\": \"constant.numeric\"\n        }\n      ]\n    },\n    \"operator\": {\n      \"patterns\": [\n        {\n          \"match\": \"->|\\\\|\\\\||&&|\\\\+\\\\+|\\\\*\\\\*|\\\\+\\\\.|\\\\+|-\\\\.|-|\\\\*\\\\.|\\\\*|/\\\\.|/|\\\\.\\\\.\\\\.|\\\\.\\\\.|===|==|\\\\^|:=|!|>=(?! *\\\\?)|<=|=\",\n          \"name\": \"keyword.operator\"\n        },\n        {\n          \"match\": \"\\\\|>\",\n          \"name\": \"invalid.deprecated\"\n        }\n      ]\n    },\n    \"constructor\": {\n      \"patterns\": [\n        {\n          \"match\": \"\\\\b[A-Z][0-9a-zA-Z_]*\\\\b\",\n          \"name\": \"variable.other.enummember\"\n        },\n        {\n          \"match\": \"(#)\\\\s*([a-zA-Z][0-9a-zA-Z_]*)\\\\b\",\n          \"captures\": {\n            \"1\": {\n              \"name\": \"variable.other.enummember\"\n            },\n            \"2\": {\n              \"name\": \"variable.other.enummember\"\n            }\n          }\n        },\n        {\n          \"match\": \"(#)\\\\s*(\\\\.\\\\.\\\\.)\\\\b\",\n          \"captures\": {\n            \"1\": {\n              \"name\": \"variable.other.enummember\"\n            },\n            \"2\": {\n              \"name\": \"variable.other.enummember\"\n            }\n          }\n        },\n        {\n          \"match\": \"(#)\",\n          \"captures\": {\n            \"1\": {\n              \"name\": \"variable.other.enummember\"\n            }\n          }\n        }\n      ]\n    },\n    \"entity-literal\": {\n      \"patterns\": [\n        {\n          \"match\": \"\\\\b(list|dict)(\\\\{)\",\n          \"captures\": {\n            \"1\": {\n              \"name\": \"keyword\"\n            },\n            \"2\": {\n              \"name\": \"punctuation.section.braces.begin\"\n            }\n          }\n        },\n        {\n          \"match\": \"\\\\}\",\n          \"name\": \"punctuation.section.braces.end\"\n        }\n      ]\n    },\n    \"attribute\": {\n      \"patterns\": [\n        {\n          \"match\": \"(%%?|@@?)([A-Za-z_][A-Za-z0-9_\\\\.]*)\",\n          \"captures\": {\n            \"1\": {\n              \"name\": \"punctuation.decorator\"\n            },\n            \"2\": {\n              \"patterns\": [\n                {\n                  \"match\": \"bs\\\\.send\\\\.pipe\",\n                  \"name\": \"invalid.deprecated\"\n                },\n                {\n                  \"match\": \"splice\",\n                  \"name\": \"invalid.illegal\"\n                },\n                {\n                  \"match\": \"(bs\\\\.)?([A-Za-z_][A-Za-z0-9_\\\\.]*)\",\n                  \"captures\": {\n                    \"1\": {\n                      \"name\": \"invalid.deprecated\"\n                    },\n                    \"2\": {\n                      \"name\": \"entity.name.function\"\n                    }\n                  }\n                },\n                {\n                  \"match\": \"[A-Za-z_][A-Za-z0-9_\\\\.]*\",\n                  \"name\": \"entity.name.function\"\n                }\n              ]\n            }\n          }\n        }\n      ]\n    },\n    \"jsx\": {\n      \"patterns\": [\n        {\n          \"match\": \"<>|</>|</|/>\",\n          \"name\": \"punctuation.definition.tag\"\n        },\n        {\n          \"match\": \"</([A-Z_][0-9a-zA-Z_]*)\",\n          \"captures\": {\n            \"0\": {\n              \"name\": \"punctuation.definition.tag\"\n            },\n            \"1\": {\n              \"name\": \"entity.name.class\"\n            }\n          }\n        },\n        {\n          \"match\": \"</([a-z_][0-9a-zA-Z_]*)\",\n          \"captures\": {\n            \"0\": {\n              \"name\": \"punctuation.definition.tag\"\n            },\n            \"1\": {\n              \"name\": \"variable\"\n            }\n          }\n        },\n        {\n          \"match\": \"<([A-Z_][0-9a-zA-Z_]*)\",\n          \"captures\": {\n            \"0\": {\n              \"name\": \"punctuation.definition.tag\"\n            },\n            \"1\": {\n              \"name\": \"entity.name.class\"\n            }\n          }\n        }\n      ]\n    },\n    \"openOrIncludeModule\": {\n      \"patterns\": [\n        {\n          \"match\": \"\\\\b(open|include)\\\\s+([A-Z_][0-9a-zA-Z_]*((\\\\.)([A-Z_][0-9a-zA-Z_]*))*)\",\n          \"captures\": {\n            \"1\": {\n              \"name\": \"keyword\"\n            },\n            \"2\": {\n              \"patterns\": [\n                {\n                  \"include\": \"#moduleAccessEndsWithModule\"\n                }\n              ]\n            }\n          }\n        },\n        {\n          \"match\": \"\\\\b(open|include)\\\\s+\",\n          \"name\": \"keyword\"\n        }\n      ]\n    },\n    \"moduleAccessEndsWithModule\": {\n      \"patterns\": [\n        {\n          \"match\": \"[A-Z_][0-9a-zA-Z_]*\",\n          \"name\": \"entity.name.class\"\n        },\n        {\n          \"match\": \"(\\\\.)([A-Z_][0-9a-zA-Z_]*)\",\n          \"captures\": {\n            \"1\": {\n              \"name\": \"punctuation.accessor\"\n            },\n            \"2\": {\n              \"name\": \"entity.name.class\"\n            }\n          }\n        }\n      ]\n    },\n    \"moduleAccess\": {\n      \"patterns\": [\n        {\n          \"match\": \"\\\\b([A-Z_][0-9a-zA-Z_]*)(\\\\.)\",\n          \"captures\": {\n            \"1\": {\n              \"name\": \"entity.name.class\"\n            },\n            \"2\": {\n              \"name\": \"punctuation.accessor\"\n            }\n          }\n        }\n      ]\n    },\n    \"moduleDeclaration\": {\n      \"patterns\": [\n        {\n          \"match\": \"\\\\b(module)\\\\s+(type\\\\s+)?(of\\\\s+)?([A-Z_][0-9a-zA-Z_]*)\",\n          \"captures\": {\n            \"1\": {\n              \"name\": \"keyword\"\n            },\n            \"2\": {\n              \"name\": \"keyword\"\n            },\n            \"3\": {\n              \"name\": \"keyword\"\n            },\n            \"4\": {\n              \"name\": \"entity.name.class\"\n            }\n          },\n          \"patterns\": [\n            {\n              \"match\": \"\\\\s*:\\\\s*([A-Z_][0-9a-zA-Z_]*)\",\n              \"captures\": {\n                \"1\": {\n                  \"name\": \"entity.name.class\"\n                }\n              }\n            }\n          ]\n        }\n      ]\n    },\n    \"ffi\": {\n      \"name\": \"source.embedded.javascript\",\n      \"contentName\": \"meta.embedded.block.javascript\",\n      \"begin\": \"(%|%%)(raw|ffi)(\\\\()(`)\",\n      \"end\": \"(`)(\\\\))\",\n      \"beginCaptures\": {\n        \"1\": {\n          \"name\": \"punctuation.decorator\"\n        },\n        \"2\": {\n          \"name\": \"entity.name.function\"\n        },\n        \"4\": {\n          \"name\": \"punctuation.definition.string.template.begin.embedded-js\"\n        }\n      },\n      \"endCaptures\": {\n        \"1\": {\n          \"name\": \"punctuation.definition.string.template.end.embedded-js\"\n        }\n      },\n      \"patterns\": [\n        {\n          \"include\": \"source.js\"\n        }\n      ]\n    },\n    \"ffi-single\": {\n      \"name\": \"source.embedded.javascript.single\",\n      \"match\": \"(%|%%)(raw|ffi)(\\\\()(`)(.*?)(`)(\\\\))\",\n      \"captures\": {\n        \"1\": {\n          \"name\": \"punctuation.decorator\"\n        },\n        \"2\": {\n          \"name\": \"entity.name.function\"\n        },\n        \"4\": {\n          \"name\": \"punctuation.definition.string.template.begin.embedded-js\"\n        },\n        \"5\": {\n          \"patterns\": [\n            {\n              \"include\": \"source.js\"\n            }\n          ]\n        },\n        \"6\": {\n          \"name\": \"punctuation.definition.string.template.end.embedded-js\"\n        }\n      }\n    }\n  },\n  \"patterns\": [\n    { \"include\": \"#ffi-single\" },\n    { \"include\": \"#ffi\" },\n    { \"include\": \"#constant\" },\n    { \"include\": \"#commentLine\" },\n    { \"include\": \"#commentBlock\" },\n    { \"include\": \"#character\" },\n    { \"include\": \"#typeParameter\" },\n    { \"include\": \"#string\" },\n    { \"include\": \"#attribute\" },\n    { \"include\": \"#function\" },\n    { \"include\": \"#entity-literal\" },\n    { \"include\": \"#jsx\" },\n    { \"include\": \"#operator\" },\n    { \"include\": \"#number\" },\n    { \"include\": \"#openOrIncludeModule\" },\n    { \"include\": \"#moduleDeclaration\" },\n    { \"include\": \"#moduleAccess\" },\n    { \"include\": \"#constructor\" },\n    { \"include\": \"#keyword\" },\n    { \"include\": \"#punctuation\" },\n    { \"include\": \"#defaultIdIsVariable\" }\n  ]\n}\n"
  },
  {
    "path": "package.json",
    "content": "{\n  \"name\": \"rescript-vscode\",\n  \"displayName\": \"ReScript\",\n  \"description\": \"ReScript language support (official)\",\n  \"author\": \"ReScript Team\",\n  \"license\": \"MIT\",\n  \"version\": \"1.72.0\",\n  \"repository\": {\n    \"type\": \"git\",\n    \"url\": \"https://github.com/rescript-lang/rescript-vscode\"\n  },\n  \"publisher\": \"chenglou92\",\n  \"icon\": \"logo.png\",\n  \"categories\": [\n    \"Programming Languages\",\n    \"Snippets\",\n    \"Linters\",\n    \"Formatters\"\n  ],\n  \"keywords\": [\n    \"rescript\",\n    \"language-server\"\n  ],\n  \"engines\": {\n    \"vscode\": \"^1.97.0\"\n  },\n  \"activationEvents\": [\n    \"onLanguage:rescript\"\n  ],\n  \"main\": \"./client/out/extension\",\n  \"contributes\": {\n    \"semanticTokenScopes\": [\n      {\n        \"language\": \"rescript\",\n        \"scopes\": {\n          \"interface\": [\n            \"entity.name.tag\"\n          ],\n          \"modifier\": [\n            \"punctuation.definition.tag\"\n          ],\n          \"type\": [\n            \"support.type.primitive\"\n          ]\n        }\n      }\n    ],\n    \"jsonValidation\": [\n      {\n        \"fileMatch\": [\n          \"bsconfig.json\",\n          \"rescript.json\"\n        ],\n        \"url\": \"https://raw.githubusercontent.com/rescript-lang/rescript-compiler/master/docs/docson/build-schema.json\"\n      }\n    ],\n    \"commands\": [\n      {\n        \"command\": \"rescript-vscode.create_interface\",\n        \"title\": \"ReScript: Create an interface file for this implementation file\"\n      },\n      {\n        \"command\": \"rescript-vscode.open_compiled\",\n        \"category\": \"ReScript\",\n        \"title\": \"Open the compiled JS file for this implementation file\",\n        \"icon\": \"$(output)\"\n      },\n      {\n        \"command\": \"rescript-vscode.start_code_analysis\",\n        \"title\": \"ReScript: Start Code Analyzer\"\n      },\n      {\n        \"command\": \"rescript-vscode.stop_code_analysis\",\n        \"title\": \"ReScript: Stop Code Analyzer\"\n      },\n      {\n        \"command\": \"rescript-vscode.show_reanalyze_server_log\",\n        \"title\": \"ReScript: Show Code Analyzer Server Log\"\n      },\n      {\n        \"command\": \"rescript-vscode.restart_language_server\",\n        \"title\": \"ReScript: Restart Language Server\"\n      },\n      {\n        \"command\": \"rescript-vscode.start_build\",\n        \"title\": \"ReScript: Start Build\"\n      },\n      {\n        \"command\": \"rescript-vscode.switch-impl-intf\",\n        \"title\": \"ReScript: Switch implementation/interface\",\n        \"icon\": {\n          \"light\": \"assets/switch-impl-intf-light.svg\",\n          \"dark\": \"assets/switch-impl-intf-dark.svg\"\n        }\n      },\n      {\n        \"command\": \"rescript-vscode.debug-dump-start\",\n        \"title\": \"DEBUG ReScript: Dump analysis info\"\n      },\n      {\n        \"command\": \"rescript-vscode.dump-server-state\",\n        \"title\": \"DEBUG ReScript: Dump LSP Server State\"\n      },\n      {\n        \"command\": \"rescript-vscode.paste_as_rescript_json\",\n        \"category\": \"ReScript\",\n        \"title\": \"Paste as ReScript JSON.t\"\n      },\n      {\n        \"command\": \"rescript-vscode.paste_as_rescript_jsx\",\n        \"category\": \"ReScript\",\n        \"title\": \"Paste as ReScript JSX\"\n      }\n    ],\n    \"keybindings\": [\n      {\n        \"command\": \"rescript-vscode.switch-impl-intf\",\n        \"key\": \"Alt+O\",\n        \"when\": \"editorLangId == rescript\"\n      }\n    ],\n    \"menus\": {\n      \"editor/title\": [\n        {\n          \"command\": \"rescript-vscode.open_compiled\",\n          \"when\": \"editorLangId == rescript\",\n          \"group\": \"navigation\"\n        },\n        {\n          \"command\": \"rescript-vscode.switch-impl-intf\",\n          \"key\": \"Alt+O\",\n          \"when\": \"editorLangId == rescript\",\n          \"group\": \"navigation\"\n        }\n      ]\n    },\n    \"snippets\": [\n      {\n        \"language\": \"rescript\",\n        \"path\": \"./snippets.json\"\n      }\n    ],\n    \"taskDefinitions_unused\": [\n      {\n        \"type\": \"bsb\",\n        \"required\": [\n          \"task\"\n        ],\n        \"properties\": {\n          \"task\": {\n            \"type\": \"string\",\n            \"description\": \"The bsb task\"\n          }\n        }\n      }\n    ],\n    \"configuration\": {\n      \"type\": \"object\",\n      \"title\": \"ReScript\",\n      \"properties\": {\n        \"rescript.settings.askToStartBuild\": {\n          \"scope\": \"language-overridable\",\n          \"type\": \"boolean\",\n          \"default\": true,\n          \"description\": \"Whether you want the extension to prompt for autostarting a ReScript build if a project is opened with no build running.\"\n        },\n        \"rescript.settings.inlayHints.enable\": {\n          \"type\": \"boolean\",\n          \"default\": false,\n          \"description\": \"Enable (experimental) inlay hints.\"\n        },\n        \"rescript.settings.inlayHints.maxLength\": {\n          \"markdownDescription\": \"Maximum length of character for inlay hints. Set to null to have an unlimited length. Inlay hints that exceed the maximum length will not be shown.\",\n          \"default\": 25,\n          \"type\": [\n            \"null\",\n            \"integer\"\n          ],\n          \"minimum\": 0\n        },\n        \"rescript.settings.codeLens\": {\n          \"type\": \"boolean\",\n          \"default\": false,\n          \"description\": \"Enable (experimental) code lens for function definitions.\"\n        },\n        \"rescript.settings.signatureHelp.enabled\": {\n          \"type\": \"boolean\",\n          \"default\": true,\n          \"description\": \"Enable signature help for function calls.\"\n        },\n        \"rescript.settings.signatureHelp.forConstructorPayloads\": {\n          \"type\": \"boolean\",\n          \"default\": true,\n          \"description\": \"Enable signature help for variant constructor payloads.\"\n        },\n        \"rescript.settings.incrementalTypechecking.enable\": {\n          \"type\": \"boolean\",\n          \"default\": true,\n          \"description\": \"Enable incremental type checking.\"\n        },\n        \"rescript.settings.incrementalTypechecking.acrossFiles\": {\n          \"type\": \"boolean\",\n          \"default\": false,\n          \"description\": \"(beta/experimental) Enable incremental type checking across files, so that unsaved file A gets access to unsaved file B.\"\n        },\n        \"rescript.settings.cache.projectConfig.enable\": {\n          \"type\": \"boolean\",\n          \"default\": true,\n          \"description\": \"Enable project config caching. Can speed up latency dramatically.\"\n        },\n        \"rescript.settings.binaryPath\": {\n          \"type\": [\n            \"string\",\n            \"null\"\n          ],\n          \"default\": null,\n          \"description\": \"Path to the directory where cross-platform ReScript binaries are. You can use it if you haven't or don't want to use the installed ReScript from node_modules in your project.\"\n        },\n        \"rescript.settings.platformPath\": {\n          \"type\": [\n            \"string\",\n            \"null\"\n          ],\n          \"default\": null,\n          \"description\": \"Path to the directory where platform-specific ReScript binaries are. You can use it if you haven't or don't want to use the installed ReScript from node_modules in your project.\"\n        },\n        \"rescript.settings.runtimePath\": {\n          \"type\": [\n            \"string\",\n            \"null\"\n          ],\n          \"default\": null,\n          \"description\": \"Optional path to the directory containing the @rescript/runtime package. Set this if your tooling is unable to automatically locate the package in your project.\"\n        },\n        \"rescript.settings.compileStatus.enable\": {\n          \"type\": \"boolean\",\n          \"default\": true,\n          \"description\": \"Show compile status in the status bar (compiling/errors/warnings/success).\"\n        },\n        \"rescript.settings.logLevel\": {\n          \"type\": \"string\",\n          \"enum\": [\n            \"error\",\n            \"warn\",\n            \"info\",\n            \"log\"\n          ],\n          \"default\": \"info\",\n          \"description\": \"Verbosity of ReScript language server logs sent to the Output channel.\"\n        }\n      }\n    },\n    \"grammars\": [\n      {\n        \"language\": \"rescript\",\n        \"scopeName\": \"source.rescript\",\n        \"path\": \"./grammars/rescript.tmLanguage.json\",\n        \"embeddedLanguages\": {\n          \"meta.embedded.block.javascript\": \"javascript\"\n        }\n      },\n      {\n        \"scopeName\": \"markdown.rescript.codeblock\",\n        \"path\": \"./grammars/rescript.markdown.json\",\n        \"injectTo\": [\n          \"text.html.markdown\"\n        ],\n        \"embeddedLanguages\": {\n          \"meta.embedded.block.rescript\": \"rescript\"\n        }\n      }\n    ],\n    \"languages\": [\n      {\n        \"id\": \"rescript\",\n        \"aliases\": [\n          \"ReScript\"\n        ],\n        \"extensions\": [\n          \".res\",\n          \".resi\"\n        ],\n        \"configuration\": \"./rescript.configuration.json\"\n      }\n    ]\n  },\n  \"scripts\": {\n    \"clean\": \"rm -rf client/out server/out\",\n    \"vscode:prepublish\": \"npm run clean && npm run bundle\",\n    \"verify-package\": \"node scripts/verify-package.mjs\",\n    \"compile\": \"tsc -b\",\n    \"watch\": \"tsc -b -w\",\n    \"postinstall\": \"cd server && npm i && cd ../client && npm i && cd ../tools && npm i && cd ../tools/tests && npm i && cd ../../analysis/tests && npm i && cd ../reanalyze/examples/deadcode && npm i && cd ../termination && npm i\",\n    \"bundle-server\": \"esbuild server/src/cli.ts --bundle --sourcemap --outfile=server/out/cli.js --format=cjs --platform=node --loader:.node=file --minify\",\n    \"bundle-client\": \"esbuild client/src/extension.ts --bundle --external:vscode --external:oxc-parser --sourcemap --outfile=client/out/extension.js --format=cjs --platform=node --loader:.node=file --minify\",\n    \"bundle\": \"npm run bundle-server && npm run bundle-client\"\n  },\n  \"devDependencies\": {\n    \"@types/node\": \"^20.19.13\",\n    \"@types/semver\": \"^7.7.0\",\n    \"@types/vscode\": \"1.97.0\",\n    \"esbuild\": \"^0.20.1\",\n    \"prettier\": \"^3.6.2\",\n    \"typescript\": \"^5.8.3\"\n  },\n  \"dependencies\": {\n    \"magic-string\": \"^0.30.21\",\n    \"oxc-parser\": \"0.97.0\",\n    \"oxc-walker\": \"^0.5.2\",\n    \"semver\": \"^7.7.2\"\n  },\n  \"packageManager\": \"yarn@1.22.22+sha512.a6b2f7906b721bba3d67d4aff083df04dad64c399707841b7acf00f6b133b7ac24255f2652fa22ae3534329dc6180534e98d17432037ff6fd140556e2bb3137e\"\n}\n"
  },
  {
    "path": "rescript.configuration.json",
    "content": "{\n  \"comments\": {\n    \"lineComment\": \"//\",\n    \"blockComment\": [\"/*\", \"*/\"]\n  },\n  \"brackets\": [\n    [\"{\", \"}\"],\n    [\"[\", \"]\"],\n    [\"(\", \")\"]\n  ],\n  \"autoClosingPairs\": [\n    [\"{\", \"}\"],\n    [\"[\", \"]\"],\n    [\"(\", \")\"],\n    [\"\\\"\", \"\\\"\"],\n    [\"`\", \"`\"],\n    {\n      \"open\": \"/*\",\n      \"close\": \" */\",\n      \"notIn\": [\"string\"]\n    }\n  ],\n  \"surroundingPairs\": [\n    [\"{\", \"}\"],\n    [\"[\", \"]\"],\n    [\"(\", \")\"],\n    [\"\\\"\", \"\\\"\"],\n    [\"`\", \"`\"]\n  ],\n  \"folding\": {\n    \"markers\": {\n      \"start\": \"^\\\\s*//\\\\s*#?region\\\\b\",\n      \"end\": \"^\\\\s*//\\\\s*#?endregion\\\\b\"\n    }\n  },\n  \"onEnterRules\": [\n    {\n      \"beforeText\": { \"pattern\": \"^\\\\s*/\\\\*(?!/)([^\\\\*]|\\\\*(?!/))*$\" },\n      \"afterText\": { \"pattern\": \"^\\\\s*\\\\*/$\" },\n      \"action\": { \"indent\": \"indentOutdent\", \"appendText\": \" \" }\n    },\n    {\n      \"beforeText\": { \"pattern\": \"^\\\\s*/\\\\*(?!/)([^\\\\*]|\\\\*(?!/))*$\" },\n      \"action\": { \"indent\": \"none\", \"appendText\": \" \" }\n    },\n    {\n      \"beforeText\": { \"pattern\": \"^(\\\\t|[ ])*[ ]\\\\*/\\\\s*$\" },\n      \"action\": { \"indent\": \"none\", \"removeText\": 1 }\n    }\n  ]\n}\n"
  },
  {
    "path": "scripts/find-runtime.ts",
    "content": "// benchmark\nconst start = process.hrtime.bigint();\n\n// start code\nconst args = process.argv.slice(2);\n\nif (args.length === 0) {\n  console.log(`\nUsage: node find-runtime.mjs <project-folder>\nFind @rescript/runtime directories in a project's node_modules.\nArguments:\n  project-folder    Path to the project directory to search\nExamples:\n  node find-runtime.mjs /path/to/project\n  node find-runtime.mjs .\n`);\n  process.exit(1);\n}\n\nconst project = args[args.length - 1];\n\nimport { findRescriptRuntimesInProject } from \"../server/src/find-runtime.ts\";\n\nconst runtimes = await findRescriptRuntimesInProject(project);\n\nconsole.log(\"Found @rescript/runtime directories:\", runtimes);\n\n// end code\nconst end = process.hrtime.bigint();\nconst durationMs = Number(end - start) / 1e6; // convert ns → ms\n\nconsole.log(`Script took ${durationMs.toFixed(3)}ms`);\n"
  },
  {
    "path": "scripts/updateVersion.js",
    "content": "//@ts-check\n// This file is used only in dev time\n// Bump version in package.json and this script will update version in ml file\n// and rescript.json\n\nconst fs = require(\"fs\");\nconst path = require(\"path\");\n\nconst toolsPkgDir = path.join(__dirname, \"..\", \"tools\");\n\nconst { version } = JSON.parse(\n  fs.readFileSync(path.join(toolsPkgDir, \"package.json\"), \"utf8\"),\n);\n\nconst rescriptJsonPath = path.join(toolsPkgDir, \"rescript.json\");\n\nconst rescriptJson = JSON.parse(fs.readFileSync(rescriptJsonPath, \"utf8\"));\nrescriptJson.version = version;\nfs.writeFileSync(rescriptJsonPath, JSON.stringify(rescriptJson, null, 2));\n\nfs.writeFileSync(\n  path.join(toolsPkgDir, \"bin\", \"version.ml\"),\n  `let version = \"${version}\"`,\n);\n"
  },
  {
    "path": "scripts/verify-package.mjs",
    "content": "#!/usr/bin/env node\n\n/**\n * Script to verify that platform-specific native bindings and required dependencies\n * are included in the packaged .vsix file\n */\n\nimport fs from \"fs\";\nimport path from \"path\";\nimport { fileURLToPath } from \"url\";\nimport { execSync } from \"child_process\";\n\nconst __filename = fileURLToPath(import.meta.url);\nconst __dirname = path.dirname(__filename);\nconst ROOT_DIR = path.join(__dirname, \"..\");\n\n// Find .vsix file\nconst vsixFiles = fs\n  .readdirSync(ROOT_DIR)\n  .filter((f) => f.endsWith(\".vsix\"))\n  .sort((a, b) => {\n    const statA = fs.statSync(path.join(ROOT_DIR, a));\n    const statB = fs.statSync(path.join(ROOT_DIR, b));\n    return statB.mtimeMs - statA.mtimeMs; // Most recent first\n  });\n\nif (vsixFiles.length === 0) {\n  console.error('No .vsix file found. Run \"npx vsce package\" first.');\n  process.exit(1);\n}\n\nconst vsixFile = vsixFiles[0];\nconsole.log(`Checking ${vsixFile}...\\n`);\n\n// Extract and check contents\nconst tempDir = path.join(ROOT_DIR, \".vsix-check\");\ntry {\n  fs.mkdirSync(tempDir, { recursive: true });\n\n  // .vsix is just a zip file\n  execSync(`unzip -q \"${path.join(ROOT_DIR, vsixFile)}\" -d \"${tempDir}\"`, {\n    stdio: \"inherit\",\n  });\n\n  const extensionDir = path.join(tempDir, \"extension\");\n  const oxcParserDir = path.join(extensionDir, \"node_modules\", \"@oxc-parser\");\n\n  // Platform-specific bindings that should be included\n  const platformBindings = [\n    \"@oxc-parser/binding-darwin-arm64\",\n    \"@oxc-parser/binding-darwin-x64\",\n    \"@oxc-parser/binding-linux-x64-gnu\",\n    \"@oxc-parser/binding-win32-x64-msvc\",\n  ];\n\n  const checks = [\n    {\n      name: \"oxc-parser\",\n      path: path.join(extensionDir, \"node_modules\", \"oxc-parser\"),\n      required: true,\n    },\n    ...platformBindings.map((binding) => ({\n      name: binding,\n      path: path.join(oxcParserDir, binding.replace(\"@oxc-parser/\", \"\")),\n      required: true,\n    })),\n  ];\n\n  let allGood = true;\n  let foundCount = 0;\n\n  for (const check of checks) {\n    const exists = fs.existsSync(check.path);\n    const status = exists ? \"✓\" : \"✗\";\n    console.log(`${status} ${check.name}: ${exists ? \"FOUND\" : \"MISSING\"}`);\n\n    if (exists) {\n      foundCount++;\n      // Check for key files in bindings\n      if (check.name.startsWith(\"@oxc-parser/binding-\")) {\n        // Look for .node files (native bindings) or package.json\n        const packageJson = path.join(check.path, \"package.json\");\n        if (fs.existsSync(packageJson)) {\n          console.log(`  ✓ package.json found`);\n        } else {\n          console.log(`  ✗ package.json missing`);\n          allGood = false;\n        }\n      }\n    } else if (check.required) {\n      allGood = false;\n    }\n  }\n\n  console.log(\"\");\n  console.log(`Found ${foundCount}/${checks.length} required packages`);\n\n  if (allGood && foundCount === checks.length) {\n    console.log(\"✓ All required files are included in the package!\");\n  } else {\n    console.log(\"✗ Some required files are missing!\");\n    if (foundCount < platformBindings.length) {\n      console.log(\n        `  Warning: Only ${foundCount - 1} platform bindings found, expected ${platformBindings.length}`,\n      );\n      console.log(\"  The extension may not work on all platforms.\");\n    }\n    process.exit(1);\n  }\n} finally {\n  // Cleanup\n  if (fs.existsSync(tempDir)) {\n    fs.rmSync(tempDir, { recursive: true, force: true });\n  }\n}\n"
  },
  {
    "path": "server/README.md",
    "content": "# ReScript Language Server\n\n## Install\n\n```sh\nnpm install -g @rescript/language-server\n```\n\n## Run\n\n```sh\nrescript-language-server --stdio\n```\n\n```sh\nReScript Language Server\n\nUsage: rescript-language-server [options]\n\nOptions:\n\n--stdio         Use stdio\n--node-ipc      Use node-ipc\n-v, --version   Print version\n-h, --help      Print help\n```\n"
  },
  {
    "path": "server/analysis_binaries/README.md",
    "content": "Put the `linux`, `darwin` and `win32` folders unzipped from CI here. These are the production binaries.\n"
  },
  {
    "path": "server/config.md",
    "content": "# Configuration\n\nThe ReScript Language Server support the folowing configuration.\n\nThese configurations are sent to the server on [initialization](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#initialize)\n\n```typescript\ninterface config {\n  /**\n   * Whether you want the extension to prompt for autostarting a ReScript build if a project is opened with no build running\n   * @default true\n   */\n  askToStartBuild: boolean;\n\n  /**\n   * Inlay Hint config\n   */\n  inlayHints: {\n    /**\n     * Enable Inlay Hint\n     * @defalt false\n     */\n    enable: boolean;\n    /**\n     * Maximum length of character for inlay hints. Set to null to have an unlimited length. Inlay hints that exceed the maximum length will not be shown\n     * @defalt 25\n     */\n    maxLength: number | null;\n  };\n  /**\n   * Enable CodeLens\n   * @default false\n   */\n  codeLens: boolean;\n  /**\n   * Path to the directory where cross-platform ReScript binaries are. You can use it if you haven't or don't want to use the installed ReScript from node_modules in your project.\n   * @default null\n   */\n  binaryPath: string | null;\n  /**\n   * Path to the directory where platform-specific ReScript binaries are. You can use it if you haven't or don't want to use the installed ReScript from node_modules in your project.\n   * @default null\n   */\n  platformPath: string | null;\n\n  /**\n   * Signature Help config\n   */\n  signatureHelp: {\n    /**\n     * Enable Signature Help\n     * @default true\n     */\n    enabled: boolean;\n  };\n}\n```\n"
  },
  {
    "path": "server/package.json",
    "content": "{\n  \"name\": \"@rescript/language-server\",\n  \"description\": \"LSP server for ReScript\",\n  \"version\": \"1.72.0\",\n  \"author\": \"ReScript Team\",\n  \"license\": \"MIT\",\n  \"bin\": {\n    \"rescript-language-server\": \"./out/cli.js\"\n  },\n  \"keywords\": [\n    \"ReScript\",\n    \"LSP\",\n    \"Language Server\"\n  ],\n  \"files\": [\n    \"out/*\",\n    \"analysis_binaries\",\n    \"README.md\"\n  ],\n  \"engines\": {\n    \"node\": \"*\"\n  },\n  \"homepage\": \"https://github.com/rescript-lang/rescript-vscode/blob/master/server/README.md\",\n  \"repository\": {\n    \"type\": \"git\",\n    \"url\": \"https://github.com/rescript-lang/rescript-vscode\",\n    \"directory\": \"server\"\n  },\n  \"bugs\": {\n    \"url\": \"https://github.com/rescript-lang/rescript-vscode/issues\"\n  },\n  \"dependencies\": {\n    \"semver\": \"^7.7.2\",\n    \"vscode-jsonrpc\": \"^8.0.1\",\n    \"vscode-languageserver\": \"^9.0.1\",\n    \"vscode-languageserver-protocol\": \"^3.17.1\"\n  }\n}\n"
  },
  {
    "path": "server/src/bsc-args/bsb.ts",
    "content": "import * as path from \"path\";\nimport fs from \"fs\";\nimport { IncrementallyCompiledFileInfo } from \"../incrementalCompilation\";\nimport { buildNinjaPartialPath } from \"../constants\";\n\nexport type BsbCompilerArgs = string[];\n\nexport async function getBsbBscArgs(\n  entry: IncrementallyCompiledFileInfo,\n): Promise<BsbCompilerArgs | null> {\n  const buildNinjaPath = path.resolve(\n    entry.project.rootPath,\n    buildNinjaPartialPath,\n  );\n\n  let stat: fs.Stats;\n  try {\n    stat = await fs.promises.stat(buildNinjaPath);\n  } catch {\n    return null;\n  }\n\n  const cache = entry.buildNinja;\n  if (cache && cache.fileMtime >= stat.mtimeMs) {\n    return cache.rawExtracted;\n  }\n\n  const fh = await fs.promises.open(buildNinjaPath, \"r\");\n  try {\n    let captureNext = false;\n    let haveAst = false;\n    const captured: string[] = [];\n\n    for await (const rawLine of fh.readLines()) {\n      const line = String(rawLine).trim();\n      if (captureNext) {\n        captured.push(line);\n        captureNext = false;\n        if (haveAst && captured.length === 2) break; // got ast + mij\n      }\n      if (line.startsWith(\"rule astj\")) {\n        captureNext = true;\n        haveAst = true;\n      } else if (line.startsWith(\"rule mij\")) {\n        captureNext = true;\n      }\n    }\n\n    if (captured.length !== 2) return null;\n\n    entry.buildNinja = {\n      fileMtime: stat.mtimeMs,\n      rawExtracted: captured,\n    };\n    return captured;\n  } finally {\n    await fh.close();\n  }\n}\n"
  },
  {
    "path": "server/src/bsc-args/rewatch.ts",
    "content": "import * as path from \"path\";\nimport * as utils from \"../utils\";\nimport * as cp from \"node:child_process\";\nimport * as p from \"vscode-languageserver-protocol\";\nimport semver from \"semver\";\nimport { IncrementallyCompiledFileInfo } from \"../incrementalCompilation\";\nimport type { projectFiles } from \"../projectFiles\";\nimport { jsonrpcVersion } from \"../constants\";\nimport { getLogger } from \"../logger\";\n\nexport type RewatchCompilerArgs = {\n  compiler_args: Array<string>;\n  parser_args: Array<string>;\n};\n\nasync function getRuntimePath(\n  entry: IncrementallyCompiledFileInfo,\n): Promise<utils.NormalizedPath | null> {\n  return utils.getRuntimePathFromWorkspaceRoot(entry.project.workspaceRootPath);\n}\n\nexport async function getRewatchBscArgs(\n  send: (msg: p.Message) => void,\n  bscBinaryLocation: utils.NormalizedPath | null,\n  projectsFiles: Map<utils.NormalizedPath, projectFiles>,\n  entry: IncrementallyCompiledFileInfo,\n): Promise<RewatchCompilerArgs | null> {\n  const rewatchCacheEntry = entry.buildRewatch;\n\n  if (\n    rewatchCacheEntry != null &&\n    rewatchCacheEntry.lastFile === entry.file.sourceFilePath\n  ) {\n    return Promise.resolve(rewatchCacheEntry.compilerArgs);\n  }\n\n  try {\n    const project = projectsFiles.get(entry.project.rootPath);\n    if (project?.rescriptVersion == null) return null;\n    let rewatchPath = path.resolve(\n      entry.project.workspaceRootPath,\n      \"node_modules/@rolandpeelen/rewatch/rewatch\",\n    );\n    let rescriptRewatchPath = null;\n    if (\n      semver.valid(project.rescriptVersion) &&\n      semver.satisfies(project.rescriptVersion as string, \">11\", {\n        includePrerelease: true,\n      })\n    ) {\n      rescriptRewatchPath = await utils.findRewatchBinary(\n        entry.project.workspaceRootPath,\n      );\n    }\n\n    if (\n      semver.valid(project.rescriptVersion) &&\n      semver.satisfies(project.rescriptVersion as string, \">=12.0.0-beta.1\", {\n        includePrerelease: true,\n      })\n    ) {\n      rescriptRewatchPath = await utils.findRescriptExeBinary(\n        entry.project.workspaceRootPath,\n      );\n    }\n\n    if (rescriptRewatchPath != null) {\n      rewatchPath = rescriptRewatchPath;\n      getLogger().log(\n        `Found rewatch binary bundled with v12: ${rescriptRewatchPath}`,\n      );\n    } else {\n      getLogger().log(\"Did not find rewatch binary bundled with v12\");\n    }\n\n    const rewatchArguments = semver.satisfies(\n      project.rescriptVersion,\n      \">=12.0.0-beta.2\",\n      { includePrerelease: true },\n    )\n      ? [\"compiler-args\", entry.file.sourceFilePath]\n      : [\n          \"--rescript-version\",\n          project.rescriptVersion,\n          \"--compiler-args\",\n          entry.file.sourceFilePath,\n        ];\n\n    const env: NodeJS.ProcessEnv = {};\n    if (bscBinaryLocation != null) {\n      env[\"RESCRIPT_BSC_EXE\"] = bscBinaryLocation;\n    }\n\n    // For ReScript >= 12.0.0-beta.11 we need to set RESCRIPT_RUNTIME\n    if (\n      semver.satisfies(project.rescriptVersion, \">=12.0.0-beta.11\", {\n        includePrerelease: true,\n      })\n    ) {\n      let rescriptRuntime: utils.NormalizedPath | null =\n        await getRuntimePath(entry);\n\n      if (rescriptRuntime !== null) {\n        env[\"RESCRIPT_RUNTIME\"] = rescriptRuntime;\n      } else {\n        // If no runtime was found, we should let the user know.\n        let params: p.ShowMessageParams = {\n          type: p.MessageType.Error,\n          message:\n            `[Incremental type checking] The @rescript/runtime package was not found in your project. ` +\n            `It is normally included with ReScript, but either it's missing or could not be detected. ` +\n            `Check that it exists in your dependencies, or configure 'rescript.settings.runtimePath' to point to it. ` +\n            `Without this package, incremental type checking may not work as expected.`,\n        };\n        let message: p.NotificationMessage = {\n          jsonrpc: jsonrpcVersion,\n          method: \"window/showMessage\",\n          params: params,\n        };\n        send(message);\n      }\n    }\n\n    const compilerArgs = JSON.parse(\n      cp.execFileSync(rewatchPath, rewatchArguments, { env }).toString().trim(),\n    ) as RewatchCompilerArgs;\n\n    entry.buildRewatch = {\n      lastFile: entry.file.sourceFilePath,\n      compilerArgs: compilerArgs,\n    };\n\n    return compilerArgs;\n  } catch (e) {\n    console.error(e);\n    return null;\n  }\n}\n"
  },
  {
    "path": "server/src/buildSchema.ts",
    "content": "// This file has been generated from https://raw.githubusercontent.com/rescript-lang/rescript-compiler/master/docs/docson/build-schema.json\n// with https://app.quicktype.io/ and stripped down to what we actually need for the extension.\n\nexport interface BuildSchema {\n  name: string;\n  namespace?: boolean | string;\n  \"package-specs\"?:\n    | Array<ModuleFormat | ModuleFormatObject>\n    | ModuleFormat\n    | ModuleFormatObject;\n  suffix?: SuffixSpec;\n}\n\nexport enum ModuleFormat {\n  Commonjs = \"commonjs\",\n  Es6 = \"es6\",\n  Es6Global = \"es6-global\",\n  Esmodule = \"esmodule\",\n}\n\nexport interface ModuleFormatObject {\n  \"in-source\"?: boolean;\n  module: ModuleFormat;\n  suffix?: SuffixSpec;\n}\n\nexport enum SuffixSpec {\n  BsCjs = \".bs.cjs\",\n  BsJS = \".bs.js\",\n  BsMjs = \".bs.mjs\",\n  Cjs = \".cjs\",\n  JS = \".js\",\n  Mjs = \".mjs\",\n}\n"
  },
  {
    "path": "server/src/cli.ts",
    "content": "#!/usr/bin/env node\nimport fs from \"fs\";\nimport path from \"path\";\nimport server from \"./server\";\n\nconst args = process.argv.slice(2);\n\nconst help = `ReScript Language Server\n\nUsage: rescript-language-server [options]\n\nOptions:\n\n--stdio               Use stdio\n--node-ipc            Use node-ipc\n-v, --version         Print version\n-h, --help            Print help`;\n\n(() => {\n  switch (args[0]) {\n    case \"--stdio\":\n      return server(true);\n    case \"--node-ipc\":\n      return server(false);\n    case \"--version\":\n    case \"-v\":\n      const { version } = JSON.parse(\n        fs.readFileSync(path.join(__dirname, \"..\", \"package.json\")).toString(),\n      );\n      console.log(version);\n      process.exit(0);\n    case \"--help\":\n    case \"-h\":\n      console.log(help);\n      process.exit(0);\n    default:\n      console.log(help);\n      process.exit(1);\n  }\n})();\n"
  },
  {
    "path": "server/src/codeActions.ts",
    "content": "// This file holds code actions derived from diagnostics. There are more code\n// actions available in the extension, but they are derived via the analysis\n// OCaml binary.\nimport * as p from \"vscode-languageserver-protocol\";\nimport * as utils from \"./utils\";\n\nexport type fileCodeActions = { range: p.Range; codeAction: p.CodeAction };\n\nexport type filesCodeActions = {\n  [key: utils.FileURI]: fileCodeActions[];\n};\n\ninterface findCodeActionsConfig {\n  diagnostic: p.Diagnostic;\n  diagnosticMessage: string[];\n  file: utils.FileURI;\n  range: p.Range;\n  addFoundActionsHere: filesCodeActions;\n}\n\nlet wrapRangeInText = (\n  range: p.Range,\n  wrapStart: string,\n  wrapEnd: string,\n): p.TextEdit[] => {\n  // We need to adjust the start of where we replace if this is a single\n  // character on a single line.\n  let offset =\n    range.start.line === range.end.line &&\n    range.start.character === range.end.character\n      ? 1\n      : 0;\n\n  let startRange = {\n    start: {\n      line: range.start.line,\n      character: range.start.character - offset,\n    },\n    end: {\n      line: range.start.line,\n      character: range.start.character - offset,\n    },\n  };\n\n  let endRange = {\n    start: {\n      line: range.end.line,\n      character: range.end.character,\n    },\n    end: {\n      line: range.end.line,\n      character: range.end.character,\n    },\n  };\n\n  return [\n    {\n      range: startRange,\n      newText: wrapStart,\n    },\n    {\n      range: endRange,\n      newText: wrapEnd,\n    },\n  ];\n};\n\nlet insertBeforeEndingChar = (\n  range: p.Range,\n  newText: string,\n): p.TextEdit[] => {\n  let beforeEndingChar = {\n    line: range.end.line,\n    character: range.end.character - 1,\n  };\n\n  return [\n    {\n      range: {\n        start: beforeEndingChar,\n        end: beforeEndingChar,\n      },\n      newText,\n    },\n  ];\n};\n\nlet replaceText = (range: p.Range, newText: string): p.TextEdit[] => {\n  return [\n    {\n      range,\n      newText,\n    },\n  ];\n};\n\nlet removeTrailingComma = (text: string): string => {\n  let str = text.trim();\n  if (str.endsWith(\",\")) {\n    return str.slice(0, str.length - 1);\n  }\n\n  return str;\n};\n\nlet extractTypename = (lines: string[]): string => {\n  let arrFiltered: string[] = [];\n\n  for (let i = 0; i <= lines.length - 1; i += 1) {\n    let line = lines[i];\n    if (line.includes(\"(defined as\")) {\n      let [typeStr, _] = line.split(\"(defined as\");\n      arrFiltered.push(removeTrailingComma(typeStr));\n      break;\n    } else {\n      arrFiltered.push(removeTrailingComma(line));\n    }\n  }\n\n  return arrFiltered.join(\"\").trim();\n};\n\nlet takeUntil = (array: string[], startsWith: string): string[] => {\n  let res: string[] = [];\n  let arr = array.slice();\n\n  let matched = false;\n  arr.forEach((line) => {\n    if (matched) {\n      return;\n    }\n\n    if (line.startsWith(startsWith)) {\n      matched = true;\n    } else {\n      res.push(line);\n    }\n  });\n\n  return res;\n};\n\nexport let findCodeActionsInDiagnosticsMessage = async ({\n  diagnostic,\n  diagnosticMessage,\n  file,\n  range,\n  addFoundActionsHere: codeActions,\n}: findCodeActionsConfig) => {\n  for (const [index, line] of diagnosticMessage.entries()) {\n    // Because of how actions work, there can only be one per diagnostic. So,\n    // halt whenever a code action has been found.\n    let codeActionExtractors = [\n      simpleTypeMismatches,\n      didYouMeanAction,\n      addUndefinedRecordFieldsV10,\n      addUndefinedRecordFieldsV11,\n      simpleConversion,\n      applyUncurried,\n      simpleAddMissingCases,\n      wrapInSome,\n    ];\n\n    for (let extractCodeAction of codeActionExtractors) {\n      let didFindAction = false;\n\n      try {\n        didFindAction = await extractCodeAction({\n          array: diagnosticMessage,\n          codeActions,\n          diagnostic,\n          file,\n          index,\n          line,\n          range,\n        });\n      } catch (e) {\n        console.error(e);\n      }\n\n      if (didFindAction) {\n        break;\n      }\n    }\n  }\n};\n\ninterface codeActionExtractorConfig {\n  line: string;\n  index: number;\n  array: string[];\n  file: utils.FileURI;\n  range: p.Range;\n  diagnostic: p.Diagnostic;\n  codeActions: filesCodeActions;\n}\n\ntype codeActionExtractor = (\n  config: codeActionExtractorConfig,\n) => Promise<boolean>;\n\n// This action extracts hints the compiler emits for misspelled identifiers, and\n// offers to replace the misspelled name with the correct name suggested by the\n// compiler.\nlet didYouMeanAction: codeActionExtractor = async ({\n  codeActions,\n  diagnostic,\n  file,\n  line,\n  range,\n}) => {\n  if (line.startsWith(\"Hint: Did you mean\")) {\n    let regex = /Did you mean ([A-Za-z0-9_]*)?/;\n    let match = line.match(regex);\n\n    if (match === null) {\n      return false;\n    }\n\n    let [_, suggestion] = match;\n\n    if (suggestion != null) {\n      codeActions[file] = codeActions[file] || [];\n      let codeAction: p.CodeAction = {\n        title: `Replace with '${suggestion}'`,\n        edit: {\n          changes: {\n            [file]: [{ range, newText: suggestion }],\n          },\n        },\n        diagnostics: [diagnostic],\n        kind: p.CodeActionKind.QuickFix,\n        isPreferred: true,\n      };\n\n      codeActions[file].push({\n        range,\n        codeAction,\n      });\n\n      return true;\n    }\n  }\n\n  return false;\n};\n\n// This action offers to wrap patterns that aren't option in Some.\nlet wrapInSome: codeActionExtractor = async ({\n  codeActions,\n  diagnostic,\n  file,\n  line,\n  range,\n  array,\n  index,\n}) => {\n  if (line.startsWith(\"This pattern matches values of type\")) {\n    let regex = /This pattern matches values of type (.*)$/;\n\n    let match = line.match(regex);\n\n    if (match === null) {\n      return false;\n    }\n\n    let [_, type] = match;\n\n    if (!type.startsWith(\"option<\")) {\n      // Look for the expected type\n      let restOfMessage = array.slice(index + 1);\n      let lineIndexWithType = restOfMessage.findIndex((l) =>\n        l\n          .trim()\n          .startsWith(\n            \"but a pattern was expected which matches values of type\",\n          ),\n      );\n\n      if (lineIndexWithType === -1) return false;\n      // The type is either on this line or the next\n      let [_, typ = \"\"] = restOfMessage[lineIndexWithType].split(\n        \"but a pattern was expected which matches values of type\",\n      );\n\n      if (typ.trim() === \"\") {\n        // Type is on the next line\n        typ = (restOfMessage[lineIndexWithType + 1] ?? \"\").trim();\n      }\n\n      if (typ.trim().startsWith(\"option<\")) {\n        codeActions[file] = codeActions[file] || [];\n\n        let codeAction: p.CodeAction = {\n          title: `Wrap in option Some`,\n          edit: {\n            changes: {\n              [file]: wrapRangeInText(range, `Some(`, `)`),\n            },\n          },\n          diagnostics: [diagnostic],\n          kind: p.CodeActionKind.QuickFix,\n          isPreferred: true,\n        };\n\n        codeActions[file].push({\n          range,\n          codeAction,\n        });\n\n        return true;\n      }\n    }\n  }\n\n  return false;\n};\n\nlet handleUndefinedRecordFieldsAction = ({\n  recordFieldNames,\n  codeActions,\n  file,\n  range,\n  diagnostic,\n  todoValue,\n}: {\n  recordFieldNames: string[];\n  codeActions: filesCodeActions;\n  file: utils.FileURI;\n  range: p.Range;\n  diagnostic: p.Diagnostic;\n  todoValue: string;\n}) => {\n  if (recordFieldNames != null) {\n    codeActions[file] = codeActions[file] || [];\n\n    // The formatter outputs trailing commas automatically if the record\n    // definition is on multiple lines, and no trailing comma if it's on a\n    // single line. We need to adapt to this so we don't accidentally\n    // insert an invalid comma.\n    let multilineRecordDefinitionBody = range.start.line !== range.end.line;\n\n    // Let's build up the text we're going to insert.\n    let newText = \"\";\n\n    if (multilineRecordDefinitionBody) {\n      // If it's a multiline body, we know it looks like this:\n      // ```\n      // let someRecord = {\n      //   atLeastOneExistingField: string,\n      // }\n      // ```\n      // We can figure out the formatting from the range the code action\n      // gives us. We'll insert to the direct left of the ending brace.\n\n      // The end char is the closing brace, and it's always going to be 2\n      // characters back from the record fields.\n      let paddingCharacters = multilineRecordDefinitionBody\n        ? range.end.character + 2\n        : 0;\n      let paddingContentRecordField = Array.from({\n        length: paddingCharacters,\n      }).join(\" \");\n      let paddingContentEndBrace = Array.from({\n        length: range.end.character,\n      }).join(\" \");\n\n      recordFieldNames.forEach((fieldName, index) => {\n        if (index === 0) {\n          // This adds spacing from the ending brace up to the equivalent\n          // of the last record field name, needed for the first inserted\n          // record field name.\n          newText += \"  \";\n        } else {\n          // The rest of the new record field names will start from a new\n          // line, so they need left padding all the way to the same level\n          // as the rest of the record fields.\n          newText += paddingContentRecordField;\n        }\n\n        newText += `${fieldName}: ${todoValue},\\n`;\n      });\n\n      // Let's put the end brace back where it was (we still have it to the direct right of us).\n      newText += `${paddingContentEndBrace}`;\n    } else {\n      // A single line record definition body is a bit easier - we'll just add the new fields on the same line.\n\n      // For an empty record (`range.end.character - range.start.character == 2`),\n      // we don't want to add an initial trailing comma as that would be invalid syntax.\n      //\n      // We assume that records that already contain some characters between\n      // their braces have at least one field and therefore we need to insert\n      // an initial trailing comma.\n      if (range.end.character - range.start.character > 2) {\n        newText += \", \";\n      }\n\n      newText += recordFieldNames\n        .map((fieldName) => `${fieldName}: ${todoValue}`)\n        .join(\", \");\n    }\n\n    let codeAction: p.CodeAction = {\n      title: `Add missing record fields`,\n      edit: {\n        changes: {\n          [file]: insertBeforeEndingChar(range, newText),\n        },\n      },\n      diagnostics: [diagnostic],\n      kind: p.CodeActionKind.QuickFix,\n      isPreferred: true,\n    };\n\n    codeActions[file].push({\n      range,\n      codeAction,\n    });\n\n    return true;\n  }\n\n  return false;\n};\n\n// This action handles when the compiler errors on certain fields of a record\n// being undefined. We then offers an action that inserts all of the record\n// fields, with an `assert false` dummy value. `assert false` is so applying the\n// code action actually compiles.\nlet addUndefinedRecordFieldsV10: codeActionExtractor = async ({\n  array,\n  codeActions,\n  diagnostic,\n  file,\n  index,\n  line,\n  range,\n}) => {\n  if (line.startsWith(\"Some record fields are undefined:\")) {\n    let recordFieldNames = line\n      .trim()\n      .split(\"Some record fields are undefined: \")[1]\n      ?.split(\" \");\n\n    // This collects the rest of the fields if fields are printed on\n    // multiple lines.\n    array.slice(index + 1).forEach((line) => {\n      recordFieldNames.push(...line.trim().split(\" \"));\n    });\n\n    return handleUndefinedRecordFieldsAction({\n      recordFieldNames,\n      codeActions,\n      diagnostic,\n      file,\n      range,\n      todoValue: `failwith(\"TODO\")`,\n    });\n  }\n\n  return false;\n};\n\nlet addUndefinedRecordFieldsV11: codeActionExtractor = async ({\n  array,\n  codeActions,\n  diagnostic,\n  file,\n  index,\n  line,\n  range,\n}) => {\n  if (line.startsWith(\"Some required record fields are missing:\")) {\n    let theLine = line;\n    if (theLine.endsWith(\".\")) {\n      theLine = theLine.slice(0, theLine.length - 1);\n    }\n\n    let recordFieldNames = theLine\n      .trim()\n      .split(\"Some required record fields are missing: \")[1]\n      ?.split(\" \");\n\n    // This collects the rest of the fields if fields are printed on\n    // multiple lines.\n    let stop = false;\n    array.slice(index + 1).forEach((line) => {\n      if (stop) return;\n\n      // Remove trailing dot, split the rest of the field names\n      recordFieldNames.push(...line.trim().split(\".\")[0].split(\" \"));\n\n      if (line.includes(\".\")) {\n        stop = true;\n      }\n    });\n\n    return handleUndefinedRecordFieldsAction({\n      recordFieldNames,\n      codeActions,\n      diagnostic,\n      file,\n      range,\n      todoValue: `%todo`,\n    });\n  }\n\n  return false;\n};\n\n// This action detects suggestions of converting between mismatches in types\n// that the compiler tells us about.\nlet simpleConversion: codeActionExtractor = async ({\n  line,\n  codeActions,\n  file,\n  range,\n  diagnostic,\n}) => {\n  if (line.startsWith(\"You can convert \")) {\n    let regex = /You can convert (\\w*) to (\\w*) with ([\\w.]*).$/;\n    let match = line.match(regex);\n\n    if (match === null) {\n      return false;\n    }\n\n    let [_, from, to, fn] = match;\n\n    if (from != null && to != null && fn != null) {\n      codeActions[file] = codeActions[file] || [];\n\n      let codeAction: p.CodeAction = {\n        title: `Convert ${from} to ${to} with ${fn}`,\n        edit: {\n          changes: {\n            [file]: wrapRangeInText(range, `${fn}(`, `)`),\n          },\n        },\n        diagnostics: [diagnostic],\n        kind: p.CodeActionKind.QuickFix,\n        isPreferred: true,\n      };\n\n      codeActions[file].push({\n        range,\n        codeAction,\n      });\n\n      return true;\n    }\n  }\n\n  return false;\n};\n\n// This action will apply a curried function (essentially inserting a dot in the\n// correct place).\nlet applyUncurried: codeActionExtractor = async ({\n  line,\n  codeActions,\n  file,\n  range,\n  diagnostic,\n}) => {\n  if (\n    line.startsWith(\n      \"This is an uncurried ReScript function. It must be applied with a dot.\",\n    )\n  ) {\n    const locOfOpenFnParens = {\n      line: range.end.line,\n      character: range.end.character + 1,\n    };\n\n    codeActions[file] = codeActions[file] || [];\n    let codeAction: p.CodeAction = {\n      title: `Apply uncurried function call with dot`,\n      edit: {\n        changes: {\n          [file]: [\n            {\n              range: {\n                start: locOfOpenFnParens,\n                end: locOfOpenFnParens,\n              },\n              /*\n               * Turns `fn(123)` into `fn(. 123)`.\n               */\n              newText: `. `,\n            },\n          ],\n        },\n      },\n      diagnostics: [diagnostic],\n      kind: p.CodeActionKind.QuickFix,\n      isPreferred: true,\n    };\n\n    codeActions[file].push({\n      range,\n      codeAction,\n    });\n\n    return true;\n  }\n\n  return false;\n};\n\n// This action detects missing cases for exhaustive pattern matches, and offers\n// to insert dummy branches (using `failwith(\"TODO\")`) for those branches.\nlet simpleAddMissingCases: codeActionExtractor = async ({\n  line,\n  codeActions,\n  file,\n  range,\n  diagnostic,\n  array,\n  index,\n}) => {\n  if (\n    line.startsWith(\"You forgot to handle a possible case here, for example:\")\n  ) {\n    // This collects the rest of the fields if fields are printed on\n    // multiple lines.\n    let allCasesAsOneLine = array\n      .slice(index + 1)\n      .join(\"\")\n      .trim();\n\n    let filePath = utils.uriToNormalizedPath(file);\n\n    let newSwitchCode = await utils.runAnalysisAfterSanityCheck(filePath, [\n      \"codemod\",\n      filePath,\n      range.start.line,\n      range.start.character,\n      \"add-missing-cases\",\n      allCasesAsOneLine,\n    ]);\n\n    codeActions[file] = codeActions[file] || [];\n    let codeAction: p.CodeAction = {\n      title: `Insert missing cases`,\n      edit: {\n        changes: {\n          [file]: replaceText(range, newSwitchCode),\n        },\n      },\n      diagnostics: [diagnostic],\n      kind: p.CodeActionKind.QuickFix,\n      isPreferred: true,\n    };\n\n    codeActions[file].push({\n      range,\n      codeAction,\n    });\n\n    return true;\n  }\n\n  return false;\n};\n\n// This detects concrete variables or values put in a position which expects an\n// optional of that same type, and offers to wrap the value/variable in\n// `Some()`.\nlet simpleTypeMismatches: codeActionExtractor = async ({\n  line,\n  codeActions,\n  file,\n  range,\n  diagnostic,\n  array,\n  index,\n}) => {\n  // Examples:\n  //\n  // 46 │ let as_ = {\n  // 47 │   someProp: \"123\",\n  // 48 │   another: \"123\",\n  // 49 │ }\n  // 50 │\n  // This has type: string\n  // Somewhere wanted: option<string>\n  //\n  // ...but types etc can also be on multilines, so we need a good\n  // amount of cleanup.\n\n  let lookFor = \"This has type:\";\n\n  if (line.startsWith(lookFor)) {\n    let thisHasTypeArr = takeUntil(\n      [line.slice(lookFor.length), ...array.slice(index + 1)],\n      \"Somewhere wanted:\",\n    );\n    let somewhereWantedArr = array\n      .slice(index + thisHasTypeArr.length)\n      .map((line) => line.replace(\"Somewhere wanted:\", \"\"));\n\n    let thisHasType = extractTypename(thisHasTypeArr);\n    let somewhereWanted = extractTypename(somewhereWantedArr);\n\n    // Switching over an option\n    if (thisHasType === `option<${somewhereWanted}>`) {\n      codeActions[file] = codeActions[file] || [];\n\n      // We can figure out default values for primitives etc.\n      let defaultValue = \"assert false\";\n\n      switch (somewhereWanted) {\n        case \"string\": {\n          defaultValue = `\"-\"`;\n          break;\n        }\n        case \"bool\": {\n          defaultValue = `false`;\n          break;\n        }\n        case \"int\": {\n          defaultValue = `-1`;\n          break;\n        }\n        case \"float\": {\n          defaultValue = `-1.`;\n          break;\n        }\n      }\n\n      let codeAction: p.CodeAction = {\n        title: `Unwrap optional value`,\n        edit: {\n          changes: {\n            [file]: wrapRangeInText(\n              range,\n              \"switch \",\n              ` { | None => ${defaultValue} | Some(v) => v }`,\n            ),\n          },\n        },\n        diagnostics: [diagnostic],\n        kind: p.CodeActionKind.QuickFix,\n        isPreferred: true,\n      };\n\n      codeActions[file].push({\n        range,\n        codeAction,\n      });\n\n      return true;\n    }\n\n    // Wrapping a non-optional in Some\n    if (`option<${thisHasType}>` === somewhereWanted) {\n      codeActions[file] = codeActions[file] || [];\n\n      let codeAction: p.CodeAction = {\n        title: `Wrap value in Some`,\n        edit: {\n          changes: {\n            [file]: wrapRangeInText(range, \"Some(\", \")\"),\n          },\n        },\n        diagnostics: [diagnostic],\n        kind: p.CodeActionKind.QuickFix,\n        isPreferred: true,\n      };\n\n      codeActions[file].push({\n        range,\n        codeAction,\n      });\n\n      return true;\n    }\n  }\n\n  return false;\n};\n"
  },
  {
    "path": "server/src/config.ts",
    "content": "import { Message } from \"vscode-languageserver-protocol\";\n\nexport type send = (msg: Message) => void;\n\nexport interface extensionConfiguration {\n  askToStartBuild?: boolean;\n  logLevel?: \"error\" | \"warn\" | \"info\" | \"log\";\n  inlayHints?: {\n    enable?: boolean;\n    maxLength?: number | null;\n  };\n  codeLens?: boolean;\n  binaryPath?: string | null;\n  platformPath?: string | null;\n  runtimePath?: string | null;\n  signatureHelp?: {\n    enabled?: boolean;\n    forConstructorPayloads?: boolean;\n  };\n  incrementalTypechecking?: {\n    enable?: boolean;\n    acrossFiles?: boolean;\n  };\n  cache?: {\n    projectConfig?: {\n      enable?: boolean;\n    };\n  };\n}\n\nexport const initialConfiguration: extensionConfiguration = {\n  askToStartBuild: true,\n  logLevel: \"info\",\n  inlayHints: {\n    enable: false,\n    maxLength: 25,\n  },\n  codeLens: false,\n  binaryPath: null,\n  platformPath: null,\n  signatureHelp: {\n    enabled: true,\n    forConstructorPayloads: true,\n  },\n  incrementalTypechecking: {\n    enable: true,\n    acrossFiles: false,\n  },\n  cache: {\n    projectConfig: {\n      enable: true,\n    },\n  },\n};\n\n// All values here are temporary, and will be overridden as the server is\n// initialized, and the current config is received from the client.\nlet config: { extensionConfiguration: extensionConfiguration } = {\n  extensionConfiguration: initialConfiguration,\n};\n\nexport default config;\n"
  },
  {
    "path": "server/src/constants.ts",
    "content": "import * as path from \"path\";\nimport { ModuleFormat } from \"./buildSchema\";\n\nexport let platformDir =\n  process.arch == \"arm64\" ? process.platform + process.arch : process.platform;\n\n// See https://microsoft.github.io/language-server-protocol/specification Abstract Message\n// version is fixed to 2.0\nexport let jsonrpcVersion = \"2.0\";\n\nexport let editorAnalysisName = \"rescript-editor-analysis.exe\";\nexport let builtinAnalysisDevPath = path.join(\n  path.dirname(__dirname),\n  \"..\",\n  editorAnalysisName,\n);\nexport let builtinAnalysisProdPath = path.join(\n  path.dirname(__dirname),\n  \"analysis_binaries\",\n  platformDir,\n  editorAnalysisName,\n);\n\nexport let rescriptBinName = \"rescript\";\n\nexport let bscBinName = \"bsc\";\n\nexport let nodeModulesBinDir = path.join(\"node_modules\", \".bin\");\n\nexport let bsbLock = \".bsb.lock\";\nexport let bsconfigPartialPath = \"bsconfig.json\";\nexport let rescriptJsonPartialPath = \"rescript.json\";\nexport let compilerDirPartialPath = path.join(\"lib\", \"bs\");\nexport let compilerOcamlDirPartialPath = path.join(\"lib\", \"ocaml\");\nexport let compilerLogPartialPath = path.join(\"lib\", \"bs\", \".compiler.log\");\nexport let compilerInfoPartialPath = path.join(\n  \"lib\",\n  \"bs\",\n  \"compiler-info.json\",\n);\nexport let buildNinjaPartialPath = path.join(\"lib\", \"bs\", \"build.ninja\");\nexport let rewatchLockPartialPath = path.join(\"lib\", \"rewatch.lock\");\nexport let rescriptLockPartialPath = path.join(\"lib\", \"rescript.lock\");\nexport let resExt = \".res\";\nexport let resiExt = \".resi\";\nexport let cmiExt = \".cmi\";\nexport let startBuildAction = \"Start Build\";\n\n// bsconfig defaults according configuration schema (https://rescript-lang.org/docs/manual/latest/build-configuration-schema)\nexport let bsconfigModuleDefault = ModuleFormat.Commonjs;\nexport let bsconfigSuffixDefault = \".js\";\n\nexport let configurationRequestId = \"rescript_configuration_request\";\nexport let pullConfigurationInterval = 10_000;\n"
  },
  {
    "path": "server/src/errorReporter.ts",
    "content": "type cb = (msg: string) => void;\n\nlet subscribers: Array<cb> = [];\nconst errorLastNotified: Record<string, number> = {};\n\nexport const onErrorReported = (cb: (msg: string) => void) => {\n  subscribers.push(cb);\n  return () => {\n    subscribers = subscribers.filter((s) => s !== cb);\n  };\n};\n\nexport const reportError = (identifier: string, msg: string) => {\n  // Warn once per 15 min per error\n  if (\n    errorLastNotified[identifier] == null ||\n    errorLastNotified[identifier] < Date.now() - 15 * 1000 * 60\n  ) {\n    errorLastNotified[identifier] = Date.now();\n    subscribers.forEach((cb) => cb(msg));\n  }\n};\n"
  },
  {
    "path": "server/src/find-runtime.ts",
    "content": "import { readdir, stat as statAsync, readFile } from \"fs/promises\";\nimport { join, resolve } from \"path\";\nimport { compilerInfoPartialPath } from \"./constants\";\nimport { NormalizedPath, normalizePath } from \"./utils\";\n\n// Efficient parallel folder traversal to find node_modules directories\nasync function findNodeModulesDirs(\n  rootPath: string,\n  maxDepth = 12,\n): Promise<string[]> {\n  const nodeModulesDirs: string[] = [];\n  const stack: Array<{ dir: string; depth: number }> = [\n    { dir: rootPath, depth: 0 },\n  ];\n  const visited = new Set<string>();\n\n  while (stack.length) {\n    const { dir, depth } = stack.pop()!;\n    if (depth > maxDepth || visited.has(dir)) continue;\n    visited.add(dir);\n\n    let entries: string[];\n    try {\n      entries = await readdir(dir);\n    } catch {\n      continue;\n    }\n\n    if (entries.includes(\"node_modules\")) {\n      const nm = join(dir, \"node_modules\");\n      try {\n        const st = await statAsync(nm);\n        if (st.isDirectory()) {\n          nodeModulesDirs.push(nm);\n          // Do NOT push deeper here to keep same behavior (stop at first node_modules in this branch)\n          continue;\n        }\n      } catch {}\n    }\n\n    for (const entry of entries) {\n      if (entry === \"node_modules\" || entry.startsWith(\".\")) continue;\n      const full = join(dir, entry);\n      try {\n        const st = await statAsync(full);\n        if (st.isDirectory()) {\n          stack.push({ dir: full, depth: depth + 1 });\n        }\n      } catch {}\n    }\n  }\n\n  return nodeModulesDirs;\n}\n\n// Custom function to find Deno or pnpm vendorized @rescript/runtime directories\nasync function findRescriptRuntimeInAlternativeLayout(\n  subfolder: \".deno\" | \".pnpm\",\n  nodeModulesPath: string,\n) {\n  // We only care about the Deno vendorized layout:\n  // <nodeModulesPath>/.deno/@rescript+runtime@<version>/node_modules/@rescript/runtime\n  const alternativeRoot = join(nodeModulesPath, subfolder);\n  let entries: string[];\n  try {\n    entries = await readdir(alternativeRoot);\n  } catch {\n    return [];\n  }\n\n  // Collect all @rescript+runtime@<version> vendor dirs\n  const vendorDirs = entries.filter((e) => e.startsWith(\"@rescript+runtime@\"));\n  if (vendorDirs.length === 0) return [];\n\n  // Optionally pick “latest” by version; for now we return all valid matches.\n  const results: string[] = [];\n  for (const dir of vendorDirs) {\n    const runtimePath = join(\n      alternativeRoot,\n      dir,\n      \"node_modules\",\n      \"@rescript\",\n      \"runtime\",\n    );\n    try {\n      const st = await statAsync(runtimePath);\n      if (st.isDirectory()) results.push(runtimePath);\n    } catch {\n      // Ignore inaccessible / missing path\n    }\n  }\n\n  return results;\n}\n\nasync function findRuntimePath(\n  project: NormalizedPath,\n): Promise<NormalizedPath[]> {\n  // Try a compiler-info.json file first\n  const compilerInfo = resolve(project, compilerInfoPartialPath);\n  try {\n    const contents = await readFile(compilerInfo, \"utf8\");\n    const compileInfo: { runtime_path?: string } = JSON.parse(contents);\n    if (compileInfo && compileInfo.runtime_path) {\n      // We somewhat assume the user to pass down a normalized path, but we cannot be sure of this.\n      const normalizedRuntimePath = normalizePath(compileInfo.runtime_path);\n      return normalizedRuntimePath ? [normalizedRuntimePath] : [];\n    }\n  } catch {\n    // Ignore errors, fallback to node_modules search\n  }\n\n  // Find all node_modules directories using efficient traversal\n  const node_modules = await findNodeModulesDirs(project);\n\n  const rescriptRuntimeDirs = await Promise.all(\n    node_modules.map(async (nm) => {\n      const results = [];\n\n      // Check for standard layout: @rescript/runtime\n      const standardPath = join(nm, \"@rescript\", \"runtime\");\n      try {\n        const stat = await statAsync(standardPath);\n        if (stat.isDirectory()) {\n          results.push(standardPath);\n          // If we found standard layout, no need to search for pnpm or Deno layouts\n          return results;\n        }\n      } catch (e) {\n        // Directory doesn't exist, continue\n      }\n\n      // Only check for pnpm vendorized layouts if standard layout wasn't found\n      const pnpmResults = await findRescriptRuntimeInAlternativeLayout(\n        \".pnpm\",\n        nm,\n      );\n      results.push(...pnpmResults);\n      if (results.length > 0) {\n        return results;\n      }\n\n      // Only check for Deno vendorized layouts if standard layout wasn't found\n      const denoResults = await findRescriptRuntimeInAlternativeLayout(\n        \".deno\",\n        nm,\n      );\n      results.push(...denoResults);\n\n      return results;\n    }),\n  ).then((results) => results.flatMap((x) => x));\n\n  return rescriptRuntimeDirs.map(\n    // `resolve` ensures we can assume string is now NormalizedPath\n    (runtime) => resolve(runtime) as NormalizedPath,\n  );\n}\n\n/**\n * Find all installed @rescript/runtime directories in the given project path.\n * In a perfect world, there should be exactly one.\n * Note: This function is not cached here. Caching is handled by the caller\n * (see getRuntimePathFromWorkspaceRoot in utils.ts).\n */\nexport async function findRescriptRuntimesInProject(\n  project: NormalizedPath,\n): Promise<NormalizedPath[]> {\n  return await findRuntimePath(project);\n}\n"
  },
  {
    "path": "server/src/incrementalCompilation.ts",
    "content": "import * as path from \"path\";\nimport fs from \"fs\";\nimport * as utils from \"./utils\";\nimport { performance } from \"perf_hooks\";\nimport * as p from \"vscode-languageserver-protocol\";\nimport * as cp from \"node:child_process\";\nimport { promisify } from \"node:util\";\nimport semver from \"semver\";\nimport * as os from \"os\";\nimport config, { send } from \"./config\";\nimport * as c from \"./constants\";\nimport { fileCodeActions } from \"./codeActions\";\nimport { projectsFiles } from \"./projectFiles\";\nimport { getRewatchBscArgs, RewatchCompilerArgs } from \"./bsc-args/rewatch\";\nimport { BsbCompilerArgs, getBsbBscArgs } from \"./bsc-args/bsb\";\nimport { NormalizedPath } from \"./utils\";\nimport { getLogger } from \"./logger\";\n\nconst execFilePromise = promisify(cp.execFile);\n\nconst INCREMENTAL_FOLDER_NAME = \"___incremental\";\nconst INCREMENTAL_FILE_FOLDER_LOCATION = path.join(\n  c.compilerDirPartialPath,\n  INCREMENTAL_FOLDER_NAME,\n);\n\nexport type IncrementallyCompiledFileInfo = {\n  file: {\n    /** File type. */\n    extension: \".res\" | \".resi\";\n    /** Path to the source file (normalized). */\n    sourceFilePath: NormalizedPath;\n    /** Name of the source file. */\n    sourceFileName: string;\n    /** Module name of the source file. */\n    moduleName: string;\n    /** Namespaced module name of the source file. */\n    moduleNameNamespaced: string;\n    /** Path to where the incremental file is saved. */\n    incrementalFilePath: NormalizedPath;\n    /** Location of the original type file. */\n    originalTypeFileLocation: NormalizedPath;\n  };\n  buildSystem: \"bsb\" | \"rewatch\";\n  /** Cache for build.ninja assets. */\n  buildNinja: {\n    /** When build.ninja was last modified. Used as a cache key. */\n    fileMtime: number;\n    /** The raw, extracted needed info from build.ninja. Needs processing. */\n    rawExtracted: BsbCompilerArgs;\n  } | null;\n  /** Cache for rewatch compiler args. */\n  buildRewatch: {\n    lastFile: NormalizedPath;\n    compilerArgs: RewatchCompilerArgs;\n  } | null;\n  /** Info of the currently active incremental compilation. `null` if no incremental compilation is active. */\n  compilation: {\n    /** The timeout of the currently active compilation for this incremental file. */\n    timeout: NodeJS.Timeout;\n    /** The trigger token for the currently active compilation. */\n    triggerToken: number;\n  } | null;\n  /** Mechanism to kill the currently active compilation. */\n  abortCompilation: (() => void) | null;\n  /** Project specific information. */\n  project: {\n    /** The root path of the project (normalized to match projectsFiles keys). */\n    rootPath: NormalizedPath;\n    /** The root path of the workspace (if a monorepo) */\n    workspaceRootPath: NormalizedPath;\n    /** Computed location of bsc. */\n    bscBinaryLocation: NormalizedPath;\n    /** The arguments needed for bsc, derived from the project configuration/build.ninja. */\n    callArgs: Promise<Array<string> | null>;\n    /** The location of the incremental folder for this project. */\n    incrementalFolderPath: NormalizedPath;\n  };\n  /** Any code actions for this incremental file. */\n  codeActions: Array<fileCodeActions>;\n};\n\nconst incrementallyCompiledFileInfo: Map<\n  NormalizedPath,\n  IncrementallyCompiledFileInfo\n> = new Map();\nconst hasReportedFeatureFailedError: Set<NormalizedPath> = new Set();\nconst originalTypeFileToFilePath: Map<NormalizedPath, NormalizedPath> =\n  new Map();\n\n/**\n * Cancels the currently active compilation for an entry.\n * Clears the timeout, aborts the compilation, and resets state.\n */\nfunction cancelActiveCompilation(entry: IncrementallyCompiledFileInfo): void {\n  if (entry.compilation != null) {\n    clearTimeout(entry.compilation.timeout);\n    entry.abortCompilation?.();\n    entry.compilation = null;\n    entry.abortCompilation = null;\n  }\n}\n\nexport function incrementalCompilationFileChanged(changedPath: NormalizedPath) {\n  const filePath = originalTypeFileToFilePath.get(changedPath);\n  if (filePath != null) {\n    const entry = incrementallyCompiledFileInfo.get(filePath);\n    if (entry != null) {\n      getLogger().log(\n        \"[watcher] Cleaning up incremental files for \" + filePath,\n      );\n      if (entry.compilation != null) {\n        getLogger().log(\"[watcher] Was compiling, killing\");\n        cancelActiveCompilation(entry);\n      }\n      cleanUpIncrementalFiles(\n        entry.file.sourceFilePath,\n        entry.project.rootPath,\n      );\n    }\n  }\n}\n\nexport function removeIncrementalFileFolder(\n  projectRootPath: NormalizedPath,\n  onAfterRemove?: () => void,\n) {\n  fs.rm(\n    path.resolve(projectRootPath, INCREMENTAL_FILE_FOLDER_LOCATION),\n    { force: true, recursive: true },\n    (_) => {\n      onAfterRemove?.();\n    },\n  );\n}\n\nexport function recreateIncrementalFileFolder(projectRootPath: NormalizedPath) {\n  getLogger().log(\"Recreating incremental file folder\");\n  removeIncrementalFileFolder(projectRootPath, () => {\n    fs.mkdir(\n      path.resolve(projectRootPath, INCREMENTAL_FILE_FOLDER_LOCATION),\n      { recursive: true },\n      (_) => {},\n    );\n  });\n}\n\nexport function cleanUpIncrementalFiles(\n  filePath: NormalizedPath,\n  projectRootPath: NormalizedPath,\n) {\n  const ext = filePath.endsWith(\".resi\") ? \".resi\" : \".res\";\n  const namespace = utils.getNamespaceNameFromConfigFile(projectRootPath);\n  const fileNameNoExt = path.basename(filePath, ext);\n  const moduleNameNamespaced =\n    namespace.kind === \"success\" && namespace.result !== \"\"\n      ? `${fileNameNoExt}-${namespace.result}`\n      : fileNameNoExt;\n\n  getLogger().log(\"Cleaning up incremental file assets for: \" + fileNameNoExt);\n\n  fs.unlink(\n    path.resolve(\n      projectRootPath,\n      INCREMENTAL_FILE_FOLDER_LOCATION,\n      path.basename(filePath),\n    ),\n    (_) => {},\n  );\n\n  [\n    moduleNameNamespaced + \".ast\",\n    moduleNameNamespaced + \".cmt\",\n    moduleNameNamespaced + \".cmti\",\n    moduleNameNamespaced + \".cmi\",\n    moduleNameNamespaced + \".cmj\",\n  ].forEach((file) => {\n    fs.unlink(\n      path.resolve(projectRootPath, INCREMENTAL_FILE_FOLDER_LOCATION, file),\n      (_) => {},\n    );\n  });\n}\n\nexport async function getBscArgs(\n  send: (msg: p.Message) => void,\n  entry: IncrementallyCompiledFileInfo,\n): Promise<BsbCompilerArgs | RewatchCompilerArgs | null> {\n  return entry.buildSystem === \"bsb\"\n    ? await getBsbBscArgs(entry)\n    : await getRewatchBscArgs(\n        send,\n        entry.project.bscBinaryLocation,\n        projectsFiles,\n        entry,\n      );\n}\n\nfunction argCouples(argList: string[]): string[][] {\n  let args: string[][] = [];\n  for (let i = 0; i <= argList.length - 1; i++) {\n    const item = argList[i];\n    const nextIndex = i + 1;\n    const nextItem = argList[nextIndex] ?? \"\";\n    if (item.startsWith(\"-\") && nextItem.startsWith(\"-\")) {\n      // Single entry arg\n      args.push([item]);\n    } else if (item.startsWith(\"-\") && nextItem.startsWith(\"'\")) {\n      // Quoted arg, take until ending '\n      const arg = [nextItem.slice(1)];\n      for (let x = nextIndex + 1; x <= argList.length - 1; x++) {\n        let subItem = argList[x];\n        let break_ = false;\n        if (subItem.endsWith(\"'\")) {\n          subItem = subItem.slice(0, subItem.length - 1);\n          i = x;\n          break_ = true;\n        }\n        arg.push(subItem);\n        if (break_) {\n          break;\n        }\n      }\n      args.push([item, arg.join(\" \")]);\n    } else if (item.startsWith(\"-\")) {\n      args.push([item, nextItem]);\n    }\n  }\n  return args;\n}\n\nfunction argsFromCommandString(cmdString: string): Array<Array<string>> {\n  const argList = cmdString\n    .trim()\n    .split(\"command = \")[1]\n    .split(\" \")\n    .map((v) => v.trim())\n    .filter((v) => v !== \"\");\n\n  return argCouples(argList);\n}\nfunction removeAnsiCodes(s: string): string {\n  const ansiEscape = /\\x1B[@-_][0-?]*[ -/]*[@-~]/g;\n  return s.replace(ansiEscape, \"\");\n}\nfunction triggerIncrementalCompilationOfFile(\n  filePath: NormalizedPath,\n  fileContent: string,\n  send: send,\n  onCompilationFinished?: () => void,\n) {\n  let incrementalFileCacheEntry = incrementallyCompiledFileInfo.get(filePath);\n  if (incrementalFileCacheEntry == null) {\n    // New file\n    const projectRootPath = utils.findProjectRootOfFile(filePath);\n    if (projectRootPath == null) {\n      getLogger().log(\"Did not find project root path for \" + filePath);\n      return;\n    }\n    // projectRootPath is already normalized (NormalizedPath) from findProjectRootOfFile\n    // Use getProjectFile to verify the project exists\n    const project = utils.getProjectFile(projectRootPath);\n    if (project == null) {\n      getLogger().log(\"Did not find open project for \" + filePath);\n      return;\n    }\n\n    // computeWorkspaceRootPathFromLockfile returns null if lockfile found (local package) or if no parent found\n    const computedWorkspaceRoot =\n      utils.computeWorkspaceRootPathFromLockfile(projectRootPath);\n    // If null, it means either a lockfile was found (local package) or no parent project root exists\n    // In both cases, we default to projectRootPath\n    const workspaceRootPath: NormalizedPath =\n      computedWorkspaceRoot ?? projectRootPath;\n\n    // Determine if lockfile was found for debug logging\n    // If computedWorkspaceRoot is null and projectRootPath is not null, check if parent exists\n    const foundRewatchLockfileInProjectRoot =\n      computedWorkspaceRoot == null &&\n      projectRootPath != null &&\n      utils.findProjectRootOfDir(projectRootPath) != null;\n\n    if (foundRewatchLockfileInProjectRoot) {\n      getLogger().log(\n        `Found rewatch/rescript lockfile in project root, treating as local package in workspace`,\n      );\n    } else {\n      getLogger().log(\n        `Did not find rewatch/rescript lockfile in project root, assuming bsb`,\n      );\n    }\n\n    const bscBinaryLocation = project.bscBinaryLocation;\n    if (bscBinaryLocation == null) {\n      getLogger().log(\"Could not find bsc binary location for \" + filePath);\n      return;\n    }\n    const ext = filePath.endsWith(\".resi\") ? \".resi\" : \".res\";\n    const moduleName = path.basename(filePath, ext);\n    const moduleNameNamespaced =\n      project.namespaceName != null\n        ? `${moduleName}-${project.namespaceName}`\n        : moduleName;\n\n    // projectRootPath is already NormalizedPath, appending a constant string still makes it a NormalizedPath\n    const incrementalFolderPath: NormalizedPath = path.join(\n      projectRootPath,\n      INCREMENTAL_FILE_FOLDER_LOCATION,\n    ) as NormalizedPath;\n\n    const relSourcePath = path.relative(projectRootPath, filePath);\n    const relSourceDir = path.dirname(relSourcePath);\n    const typeExt = ext === \".res\" ? \".cmt\" : \".cmti\";\n    const originalTypeFileName =\n      project.namespaceName != null\n        ? `${moduleName}-${project.namespaceName}${typeExt}`\n        : `${moduleName}${typeExt}`;\n    // projectRootPath is already NormalizedPath, appending constant strings still yields a NormalizedPath\n    const originalTypeFileLocation = path.resolve(\n      projectRootPath,\n      c.compilerDirPartialPath,\n      relSourceDir,\n      originalTypeFileName,\n    ) as NormalizedPath;\n\n    incrementalFileCacheEntry = {\n      file: {\n        originalTypeFileLocation,\n        extension: ext,\n        moduleName,\n        moduleNameNamespaced,\n        sourceFileName: moduleName + ext,\n        sourceFilePath: filePath,\n        // As incrementalFolderPath was a NormalizedPath, path.join ensures we can assume string is now NormalizedPath\n        incrementalFilePath: path.join(\n          incrementalFolderPath,\n          moduleName + ext,\n        ) as NormalizedPath,\n      },\n      project: {\n        workspaceRootPath,\n        rootPath: projectRootPath,\n        callArgs: Promise.resolve([]),\n        bscBinaryLocation,\n        incrementalFolderPath,\n      },\n      buildSystem: foundRewatchLockfileInProjectRoot ? \"rewatch\" : \"bsb\",\n      buildRewatch: null,\n      buildNinja: null,\n      compilation: null,\n      abortCompilation: null,\n      codeActions: [],\n    };\n\n    incrementalFileCacheEntry.project.callArgs = figureOutBscArgs(\n      send,\n      incrementalFileCacheEntry,\n    );\n    originalTypeFileToFilePath.set(\n      incrementalFileCacheEntry.file.originalTypeFileLocation,\n      incrementalFileCacheEntry.file.sourceFilePath,\n    );\n    incrementallyCompiledFileInfo.set(filePath, incrementalFileCacheEntry);\n  }\n\n  if (incrementalFileCacheEntry == null) return;\n  const entry = incrementalFileCacheEntry;\n  cancelActiveCompilation(entry);\n  const triggerToken = performance.now();\n  const timeout = setTimeout(() => {\n    compileContents(entry, fileContent, send, onCompilationFinished);\n  }, 20);\n\n  entry.compilation = {\n    timeout,\n    triggerToken,\n  };\n}\nfunction verifyTriggerToken(\n  filePath: NormalizedPath,\n  triggerToken: number,\n): boolean {\n  return (\n    incrementallyCompiledFileInfo.get(filePath)?.compilation?.triggerToken ===\n    triggerToken\n  );\n}\n\nconst isWindows = os.platform() === \"win32\";\n\nasync function figureOutBscArgs(\n  send: (msg: p.Message) => void,\n  entry: IncrementallyCompiledFileInfo,\n) {\n  const project = projectsFiles.get(entry.project.rootPath);\n  if (project?.rescriptVersion == null) {\n    getLogger().log(\n      \"Found no project (or ReScript version) for \" + entry.file.sourceFilePath,\n    );\n    return null;\n  }\n  const res = await getBscArgs(send, entry);\n  if (res == null) return null;\n  let astArgs: Array<Array<string>> = [];\n  let buildArgs: Array<Array<string>> = [];\n  let isBsb = Array.isArray(res);\n  if (Array.isArray(res)) {\n    const [astBuildCommand, fullBuildCommand] = res;\n    astArgs = argsFromCommandString(astBuildCommand);\n    buildArgs = argsFromCommandString(fullBuildCommand);\n  } else {\n    astArgs = argCouples(res.parser_args);\n    buildArgs = argCouples(res.compiler_args);\n  }\n  let callArgs: Array<string> = [];\n\n  if (config.extensionConfiguration.incrementalTypechecking?.acrossFiles) {\n    callArgs.push(\n      \"-I\",\n      path.resolve(entry.project.rootPath, INCREMENTAL_FILE_FOLDER_LOCATION),\n    );\n  }\n\n  buildArgs.forEach(([key, value]: Array<string>) => {\n    if (key === \"-I\") {\n      if (isBsb) {\n        // On Windows, the value could be wrapped in quotes.\n        value =\n          value.startsWith('\"') && value.endsWith('\"')\n            ? value.substring(1, value.length - 1)\n            : value;\n        /*build.ninja could have quoted full paths\n        Example:\nrule mij\n  command = \"C:\\Users\\moi\\Projects\\my-project\\node_modules\\rescript\\win32\\bsc.exe\" -I src -I \"C:\\Users\\moi\\Projects\\my-project\\node_modules\\@rescript\\core\\lib\\ocaml\" -open RescriptCore  -uncurried -bs-package-name rewindow -bs-package-output esmodule:$in_d:.res.mjs -bs-v $g_finger $i\n        */\n        if (isWindows && value.includes(\":\\\\\")) {\n          callArgs.push(\"-I\", value);\n        } else {\n          callArgs.push(\n            \"-I\",\n            path.resolve(\n              entry.project.rootPath,\n              c.compilerDirPartialPath,\n              value,\n            ),\n          );\n        }\n      } else {\n        // TODO: once ReScript v12 is out we can remove this check for `.`\n        if (value === \".\") {\n          callArgs.push(\n            \"-I\",\n            path.resolve(entry.project.rootPath, c.compilerOcamlDirPartialPath),\n          );\n        } else {\n          callArgs.push(\"-I\", value);\n        }\n      }\n    } else if (key === \"-bs-v\") {\n      callArgs.push(\"-bs-v\", Date.now().toString());\n    } else if (key === \"-bs-package-output\") {\n      return;\n    } else if (value == null || value === \"\") {\n      callArgs.push(key);\n    } else {\n      callArgs.push(key, value);\n    }\n  });\n\n  astArgs.forEach(([key, value]: Array<string>) => {\n    if (key.startsWith(\"-bs-jsx\")) {\n      callArgs.push(key, value);\n    } else if (key.startsWith(\"-ppx\")) {\n      callArgs.push(key, value);\n    }\n  });\n\n  callArgs.push(\"-color\", \"never\");\n  // Only available in v11+\n  if (\n    semver.valid(project.rescriptVersion) &&\n    semver.satisfies(project.rescriptVersion as string, \">=11\", {\n      includePrerelease: true,\n    })\n  ) {\n    callArgs.push(\"-ignore-parse-errors\");\n  }\n\n  callArgs = callArgs.filter((v) => v != null && v !== \"\");\n  callArgs.push(entry.file.incrementalFilePath);\n  return callArgs;\n}\n\n/**\n * Remaps code action file paths from the incremental temp file to the actual source file.\n */\nfunction remapCodeActionsToSourceFile(\n  codeActions: Record<string, fileCodeActions[]>,\n  sourceFilePath: NormalizedPath,\n): fileCodeActions[] {\n  const actions = Object.values(codeActions)[0] ?? [];\n\n  // Code actions will point to the locally saved incremental file, so we must remap\n  // them so the editor understands it's supposed to apply them to the unsaved doc,\n  // not the saved \"dummy\" incremental file.\n  actions.forEach((ca) => {\n    if (ca.codeAction.edit != null && ca.codeAction.edit.changes != null) {\n      const change = Object.values(ca.codeAction.edit.changes)[0];\n\n      ca.codeAction.edit.changes = {\n        [utils.pathToURI(sourceFilePath)]: change,\n      };\n    }\n  });\n\n  return actions;\n}\n\n/**\n * Filters diagnostics to remove unwanted parser errors from incremental compilation.\n */\nfunction filterIncrementalDiagnostics(\n  diagnostics: p.Diagnostic[],\n  sourceFileName: string,\n): { filtered: p.Diagnostic[]; hasIgnoredMessages: boolean } {\n  let hasIgnoredMessages = false;\n\n  const filtered = diagnostics\n    .map((d) => ({\n      ...d,\n      message: removeAnsiCodes(d.message),\n    }))\n    // Filter out a few unwanted parser errors since we run the parser in ignore mode\n    .filter((d) => {\n      if (\n        !d.message.startsWith(\"Uninterpreted extension 'rescript.\") &&\n        (!d.message.includes(`/${INCREMENTAL_FOLDER_NAME}/${sourceFileName}`) ||\n          // The `Multiple definition of the <kind> name <name>` type error's\n          // message includes the filepath with LOC of the duplicate definition\n          d.message.startsWith(\"Multiple definition of the\") ||\n          // The signature mismatch, with mismatch and ill typed applicative functor\n          // type errors all include the filepath with LOC\n          d.message.startsWith(\"Signature mismatch\") ||\n          d.message.startsWith(\"In this `with' constraint\") ||\n          d.message.startsWith(\"This `with' constraint on\"))\n      ) {\n        hasIgnoredMessages = true;\n        return true;\n      }\n      return false;\n    });\n\n  return { filtered, hasIgnoredMessages };\n}\n\n/**\n * Logs an error when incremental compilation produces unexpected output.\n */\nfunction logIncrementalCompilationError(\n  entry: IncrementallyCompiledFileInfo,\n  stderr: string,\n  callArgs: string[] | null,\n  send: (msg: p.Message) => void,\n): void {\n  hasReportedFeatureFailedError.add(entry.project.rootPath);\n  const logfile = path.resolve(\n    entry.project.incrementalFolderPath,\n    \"error.log\",\n  );\n\n  try {\n    fs.writeFileSync(\n      logfile,\n      `== BSC ARGS ==\\n${callArgs?.join(\" \")}\\n\\n== OUTPUT ==\\n${stderr}`,\n    );\n\n    const params: p.ShowMessageParams = {\n      type: p.MessageType.Warning,\n      message: `[Incremental typechecking] Something might have gone wrong with incremental type checking. Check out the [error log](file://${logfile}) and report this issue please.`,\n    };\n\n    const message: p.NotificationMessage = {\n      jsonrpc: c.jsonrpcVersion,\n      method: \"window/showMessage\",\n      params: params,\n    };\n\n    send(message);\n  } catch (e) {\n    console.error(e);\n  }\n}\n\n/**\n * Processes compilation results and publishes diagnostics to the LSP client.\n */\nfunction processAndPublishDiagnostics(\n  entry: IncrementallyCompiledFileInfo,\n  result: Record<string, p.Diagnostic[]>,\n  codeActions: Record<string, fileCodeActions[]>,\n  stderr: string,\n  callArgs: string[] | null,\n  send: (msg: p.Message) => void,\n): void {\n  // Remap code actions to source file\n  const actions = remapCodeActionsToSourceFile(\n    codeActions,\n    entry.file.sourceFilePath,\n  );\n  entry.codeActions = actions;\n\n  // Filter diagnostics\n  const rawDiagnostics = Object.values(result)[0] ?? [];\n  const { filtered: res, hasIgnoredMessages } = filterIncrementalDiagnostics(\n    rawDiagnostics,\n    entry.file.sourceFileName,\n  );\n\n  // Log error if compilation produced unexpected output\n  if (\n    res.length === 0 &&\n    stderr !== \"\" &&\n    !hasIgnoredMessages &&\n    !hasReportedFeatureFailedError.has(entry.project.rootPath)\n  ) {\n    logIncrementalCompilationError(entry, stderr, callArgs, send);\n  }\n\n  const fileUri = utils.pathToURI(entry.file.sourceFilePath);\n\n  // Update filesWithDiagnostics to track this file\n  // entry.project.rootPath is guaranteed to match a key in projectsFiles\n  // (see triggerIncrementalCompilationOfFile where the entry is created)\n  const projectFile = projectsFiles.get(entry.project.rootPath);\n\n  if (projectFile != null) {\n    if (res.length > 0) {\n      projectFile.filesWithDiagnostics.add(fileUri);\n    } else {\n      // Only remove if there are no diagnostics at all\n      projectFile.filesWithDiagnostics.delete(fileUri);\n    }\n  }\n\n  const notification: p.NotificationMessage = {\n    jsonrpc: c.jsonrpcVersion,\n    method: \"textDocument/publishDiagnostics\",\n    params: {\n      uri: fileUri,\n      diagnostics: res,\n    },\n  };\n  send(notification);\n}\n\nasync function compileContents(\n  entry: IncrementallyCompiledFileInfo,\n  fileContent: string,\n  send: (msg: p.Message) => void,\n  onCompilationFinished?: () => void,\n) {\n  const triggerToken = entry.compilation?.triggerToken;\n  let callArgs = await entry.project.callArgs;\n  if (callArgs == null) {\n    const callArgsRetried = await figureOutBscArgs(send, entry);\n    if (callArgsRetried != null) {\n      callArgs = callArgsRetried;\n      entry.project.callArgs = Promise.resolve(callArgsRetried);\n    } else {\n      getLogger().log(\n        \"Could not figure out call args. Maybe build.ninja does not exist yet?\",\n      );\n      return;\n    }\n  }\n\n  const startTime = performance.now();\n  if (!fs.existsSync(entry.project.incrementalFolderPath)) {\n    try {\n      fs.mkdirSync(entry.project.incrementalFolderPath, { recursive: true });\n    } catch {}\n  }\n\n  try {\n    fs.writeFileSync(entry.file.incrementalFilePath, fileContent);\n    let cwd =\n      entry.buildSystem === \"bsb\"\n        ? entry.project.rootPath\n        : path.resolve(entry.project.rootPath, c.compilerDirPartialPath);\n\n    getLogger().log(\n      `About to invoke bsc from \\\"${cwd}\\\", used ${entry.buildSystem}`,\n    );\n    getLogger().log(\n      `${entry.project.bscBinaryLocation} ${callArgs.map((c) => `\"${c}\"`).join(\" \")}`,\n    );\n\n    // Create AbortController for this compilation\n    const abortController = new AbortController();\n    const { signal } = abortController;\n\n    // Store abort function directly on the entry\n    entry.abortCompilation = () => {\n      getLogger().log(`Aborting compilation of ${entry.file.sourceFileName}`);\n      abortController.abort();\n    };\n\n    try {\n      const { stderr } = await execFilePromise(\n        entry.project.bscBinaryLocation,\n        callArgs,\n        { cwd, signal },\n      ).catch((error) => {\n        if (error.stderr) {\n          return { stderr: error.stderr };\n        }\n        throw error;\n      });\n\n      getLogger().log(\n        `Recompiled ${entry.file.sourceFileName} in ${\n          (performance.now() - startTime) / 1000\n        }s`,\n      );\n\n      // Verify token after async operation\n      if (\n        triggerToken != null &&\n        !verifyTriggerToken(entry.file.sourceFilePath, triggerToken)\n      ) {\n        getLogger().log(\n          `Discarding stale compilation results for ${entry.file.sourceFileName} (token changed)`,\n        );\n        return;\n      }\n\n      const { result, codeActions } = await utils.parseCompilerLogOutput(\n        `${stderr}\\n#Done()`,\n      );\n\n      // Re-verify again after second async operation\n      if (\n        triggerToken != null &&\n        !verifyTriggerToken(entry.file.sourceFilePath, triggerToken)\n      ) {\n        getLogger().log(\n          `Discarding stale compilation results for ${entry.file.sourceFileName} (token changed after parsing)`,\n        );\n        return;\n      }\n\n      processAndPublishDiagnostics(\n        entry,\n        result,\n        codeActions,\n        stderr,\n        callArgs,\n        send,\n      );\n    } catch (error: any) {\n      if (error.name === \"AbortError\") {\n        getLogger().log(\n          `Compilation of ${entry.file.sourceFileName} was aborted.`,\n        );\n      } else {\n        getLogger().error(\n          `Unexpected error during compilation of ${entry.file.sourceFileName}: ${error}`,\n        );\n        throw error;\n      }\n    } finally {\n      // Only clean up if this is still the active compilation\n      if (entry.compilation?.triggerToken === triggerToken) {\n        getLogger().log(\"Cleaning up compilation status.\");\n        entry.compilation = null;\n        entry.abortCompilation = null;\n      }\n    }\n  } finally {\n    onCompilationFinished?.();\n  }\n}\n\nexport function handleUpdateOpenedFile(\n  filePath: utils.NormalizedPath,\n  fileContent: string,\n  send: send,\n  onCompilationFinished?: () => void,\n) {\n  getLogger().log(\"Updated: \" + filePath);\n  triggerIncrementalCompilationOfFile(\n    filePath,\n    fileContent,\n    send,\n    onCompilationFinished,\n  );\n}\n\nexport function handleClosedFile(filePath: NormalizedPath) {\n  getLogger().log(\"Closed: \" + filePath);\n  const entry = incrementallyCompiledFileInfo.get(filePath);\n  if (entry == null) return;\n  cleanUpIncrementalFiles(filePath, entry.project.rootPath);\n  incrementallyCompiledFileInfo.delete(filePath);\n  originalTypeFileToFilePath.delete(entry.file.originalTypeFileLocation);\n}\n\nexport function getCodeActionsFromIncrementalCompilation(\n  filePath: NormalizedPath,\n): Array<fileCodeActions> | null {\n  const entry = incrementallyCompiledFileInfo.get(filePath);\n  if (entry != null) {\n    return entry.codeActions;\n  }\n\n  return null;\n}\n"
  },
  {
    "path": "server/src/logger.ts",
    "content": "import * as p from \"vscode-languageserver-protocol\";\nimport * as c from \"./constants\";\n\nexport type LogLevel = \"error\" | \"warn\" | \"info\" | \"log\";\n\nconst levelOrder: Record<LogLevel, number> = {\n  log: 1,\n  info: 2,\n  warn: 3,\n  error: 4,\n};\n\nexport interface Logger {\n  error(message: string): void;\n  warn(message: string): void;\n  info(message: string): void;\n  log(message: string): void;\n}\n\nclass NoOpLogger implements Logger {\n  error(_message: string): void {}\n  warn(_message: string): void {}\n  info(_message: string): void {}\n  log(_message: string): void {}\n}\n\nclass LSPLogger implements Logger {\n  private logLevel: LogLevel = \"info\";\n\n  constructor(private send: (msg: p.Message) => void) {}\n\n  setLogLevel(level: LogLevel): void {\n    this.logLevel = level;\n  }\n\n  private shouldLog(level: LogLevel): boolean {\n    return levelOrder[level] >= levelOrder[this.logLevel];\n  }\n\n  error(message: string): void {\n    if (this.shouldLog(\"error\")) {\n      this.sendLogMessage(message, p.MessageType.Error);\n    }\n  }\n\n  warn(message: string): void {\n    if (this.shouldLog(\"warn\")) {\n      this.sendLogMessage(message, p.MessageType.Warning);\n    }\n  }\n\n  info(message: string): void {\n    if (this.shouldLog(\"info\")) {\n      this.sendLogMessage(message, p.MessageType.Info);\n    }\n  }\n\n  log(message: string): void {\n    if (this.shouldLog(\"log\")) {\n      this.sendLogMessage(message, p.MessageType.Log);\n    }\n  }\n\n  private sendLogMessage(message: string, type: p.MessageType): void {\n    const notification: p.NotificationMessage = {\n      jsonrpc: c.jsonrpcVersion,\n      method: \"window/logMessage\",\n      params: { type, message },\n    };\n    this.send(notification);\n  }\n}\n\n// Default no-op instance\nlet instance: Logger = new NoOpLogger();\n\nexport function initializeLogger(send: (msg: p.Message) => void): void {\n  instance = new LSPLogger(send);\n}\n\nexport function setLogLevel(level: LogLevel): void {\n  if (instance instanceof LSPLogger) {\n    instance.setLogLevel(level);\n  }\n}\n\nexport function getLogger(): Logger {\n  return instance;\n}\n"
  },
  {
    "path": "server/src/lookup.ts",
    "content": "import * as fs from \"fs\";\nimport * as path from \"path\";\n\nimport { BuildSchema, ModuleFormat, ModuleFormatObject } from \"./buildSchema\";\nimport * as c from \"./constants\";\nimport { NormalizedPath, normalizePath } from \"./utils\";\n\nconst getCompiledFolderName = (moduleFormat: ModuleFormat): string => {\n  switch (moduleFormat) {\n    case \"esmodule\":\n    case \"es6\":\n      return \"es6\";\n    case \"es6-global\":\n      return \"es6_global\";\n    case \"commonjs\":\n    default:\n      return \"js\";\n  }\n};\n\nexport const replaceFileExtension = (filePath: string, ext: string): string => {\n  let name = path.basename(filePath, path.extname(filePath));\n  return path.format({ dir: path.dirname(filePath), name, ext });\n};\n\nexport const replaceFileExtensionWithNormalizedPath = (\n  filePath: NormalizedPath,\n  ext: string,\n): NormalizedPath => {\n  let name = path.basename(filePath, path.extname(filePath));\n  const result = path.format({ dir: path.dirname(filePath), name, ext });\n  // path.format() doesn't preserve normalization, so we need to normalize the result\n  const normalized = normalizePath(result);\n  if (normalized == null) {\n    // Should never happen, but handle gracefully\n    return result as NormalizedPath;\n  }\n  return normalized;\n};\n\n// Check if filePartialPath exists at directory and return the joined path,\n// otherwise recursively check parent directories for it.\nexport const findFilePathFromProjectRoot = (\n  directory: NormalizedPath | null, // This must be a directory and not a file!\n  filePartialPath: string,\n): NormalizedPath | null => {\n  if (directory == null) {\n    return null;\n  }\n\n  let filePath = path.join(directory, filePartialPath);\n  if (fs.existsSync(filePath)) {\n    return normalizePath(filePath);\n  }\n\n  let parentDirStr = path.dirname(directory);\n  if (parentDirStr === directory) {\n    // reached the top\n    return null;\n  }\n\n  const parentDir = normalizePath(parentDirStr);\n\n  return findFilePathFromProjectRoot(parentDir, filePartialPath);\n};\n\nexport const readConfig = (projDir: NormalizedPath): BuildSchema | null => {\n  try {\n    let rescriptJson = path.join(projDir, c.rescriptJsonPartialPath);\n    let bsconfigJson = path.join(projDir, c.bsconfigPartialPath);\n\n    let configFile = fs.readFileSync(\n      fs.existsSync(rescriptJson) ? rescriptJson : bsconfigJson,\n      { encoding: \"utf-8\" },\n    );\n\n    let result: BuildSchema = JSON.parse(configFile);\n    return result;\n  } catch (e) {\n    return null;\n  }\n};\n\n// Collect data from bsconfig to be able to find out the correct path of\n// the compiled JS artifacts.\nexport const getSuffixAndPathFragmentFromBsconfig = (bsconfig: BuildSchema) => {\n  let pkgSpecs = bsconfig[\"package-specs\"];\n  let pathFragment = \"\";\n  let module = c.bsconfigModuleDefault;\n  let moduleFormatObj: ModuleFormatObject = { module: module };\n  let suffix = c.bsconfigSuffixDefault;\n\n  if (pkgSpecs) {\n    if (\n      !Array.isArray(pkgSpecs) &&\n      typeof pkgSpecs !== \"string\" &&\n      pkgSpecs.module\n    ) {\n      moduleFormatObj = pkgSpecs;\n    } else if (typeof pkgSpecs === \"string\") {\n      module = pkgSpecs;\n    } else if (Array.isArray(pkgSpecs) && pkgSpecs[0]) {\n      if (typeof pkgSpecs[0] === \"string\") {\n        module = pkgSpecs[0];\n      } else {\n        moduleFormatObj = pkgSpecs[0];\n      }\n    }\n  }\n\n  if (moduleFormatObj[\"module\"]) {\n    module = moduleFormatObj[\"module\"];\n  }\n\n  if (!moduleFormatObj[\"in-source\"]) {\n    pathFragment = \"lib/\" + getCompiledFolderName(module);\n  }\n\n  if (moduleFormatObj.suffix) {\n    suffix = moduleFormatObj.suffix;\n  } else if (bsconfig.suffix) {\n    suffix = bsconfig.suffix;\n  }\n\n  return [suffix, pathFragment];\n};\n\nexport const getFilenameFromBsconfig = (\n  projDir: NormalizedPath,\n  partialFilePath: string,\n): NormalizedPath | null => {\n  let bsconfig = readConfig(projDir);\n\n  if (!bsconfig) {\n    return null;\n  }\n\n  let [suffix, pathFragment] = getSuffixAndPathFragmentFromBsconfig(bsconfig);\n\n  let compiledPartialPath = replaceFileExtension(partialFilePath, suffix);\n\n  const result = path.join(projDir, pathFragment, compiledPartialPath);\n  return normalizePath(result);\n};\n\n// Monorepo helpers\nexport const getFilenameFromRootBsconfig = (\n  projDir: NormalizedPath,\n  partialFilePath: string,\n): NormalizedPath | null => {\n  // Start searching from the parent directory of projDir to find the workspace root\n  const parentDir = normalizePath(path.dirname(projDir));\n\n  let rootConfigPath = findFilePathFromProjectRoot(\n    parentDir,\n    c.rescriptJsonPartialPath,\n  );\n\n  if (!rootConfigPath) {\n    rootConfigPath = findFilePathFromProjectRoot(\n      parentDir,\n      c.bsconfigPartialPath,\n    );\n  }\n\n  if (!rootConfigPath) {\n    return null;\n  }\n\n  const rootConfigDir = normalizePath(path.dirname(rootConfigPath));\n  if (rootConfigDir == null) {\n    return null;\n  }\n  let rootConfig = readConfig(rootConfigDir);\n\n  if (!rootConfig) {\n    return null;\n  }\n\n  let [suffix, pathFragment] = getSuffixAndPathFragmentFromBsconfig(rootConfig);\n\n  let compiledPartialPath = replaceFileExtension(partialFilePath, suffix);\n\n  const result = path.join(projDir, pathFragment, compiledPartialPath);\n  return normalizePath(result);\n};\n"
  },
  {
    "path": "server/src/projectFiles.ts",
    "content": "import * as cp from \"node:child_process\";\nimport * as p from \"vscode-languageserver-protocol\";\nimport { NormalizedPath, FileURI } from \"./utils\";\n\nexport type filesDiagnostics = {\n  [key: FileURI]: p.Diagnostic[];\n};\n\nexport interface projectFiles {\n  openFiles: Set<NormalizedPath>;\n  filesWithDiagnostics: Set<FileURI>;\n  filesDiagnostics: filesDiagnostics;\n  rescriptVersion: string | undefined;\n  bscBinaryLocation: NormalizedPath | null;\n  editorAnalysisLocation: string | null;\n  namespaceName: string | null;\n\n  bsbWatcherByEditor: null | cp.ChildProcess;\n  // The root path where the build watcher runs (could be monorepo root)\n  // Used for lock file cleanup when killing the watcher\n  buildRootPath: NormalizedPath | null;\n\n  // This keeps track of whether we've prompted the user to start a build\n  // automatically, if there's no build currently running for the project. We\n  // only want to prompt the user about this once, or it becomes\n  // annoying.\n  // The type `never` means that we won't show the prompt if the project is inside node_modules\n  hasPromptedToStartBuild: boolean | \"never\";\n}\n\n/**\n * Map of project root paths to their project state.\n *\n * Keys are normalized paths (NormalizedPath) to ensure consistent lookups\n * and prevent path format mismatches. All paths should be normalized using\n * `normalizePath()` before being used as keys.\n */\nexport let projectsFiles: Map<NormalizedPath, projectFiles> = new Map();\n"
  },
  {
    "path": "server/src/server.ts",
    "content": "import process from \"process\";\nimport * as p from \"vscode-languageserver-protocol\";\nimport * as v from \"vscode-languageserver\";\nimport * as rpc from \"vscode-jsonrpc/node\";\nimport * as path from \"path\";\nimport semver from \"semver\";\nimport fs from \"fs\";\nimport fsAsync from \"fs/promises\";\nimport {\n  DidChangeWatchedFilesNotification,\n  DidOpenTextDocumentNotification,\n  DidChangeTextDocumentNotification,\n  DidCloseTextDocumentNotification,\n  DidChangeConfigurationNotification,\n  InitializeParams,\n  InlayHintParams,\n  CodeLensParams,\n  SignatureHelpParams,\n  InitializedNotification,\n} from \"vscode-languageserver-protocol\";\nimport * as lookup from \"./lookup\";\nimport * as utils from \"./utils\";\nimport * as codeActions from \"./codeActions\";\nimport * as c from \"./constants\";\nimport { assert } from \"console\";\nimport { WorkspaceEdit } from \"vscode-languageserver\";\nimport { onErrorReported } from \"./errorReporter\";\nimport * as ic from \"./incrementalCompilation\";\nimport config, { extensionConfiguration, initialConfiguration } from \"./config\";\nimport { projectsFiles } from \"./projectFiles\";\nimport { NormalizedPath } from \"./utils\";\nimport { initializeLogger, getLogger, setLogLevel } from \"./logger\";\n\nfunction applyUserConfiguration(configuration: extensionConfiguration) {\n  // We always want to spread the initial configuration to ensure all defaults are respected.\n  config.extensionConfiguration = Object.assign(\n    {},\n    initialConfiguration,\n    configuration,\n  );\n\n  const level = config.extensionConfiguration.logLevel;\n\n  if (\n    level === \"error\" ||\n    level === \"warn\" ||\n    level === \"info\" ||\n    level === \"log\"\n  ) {\n    setLogLevel(level);\n  }\n}\n\n// Absolute paths to all the workspace folders\n// Configured during the initialize request\nexport const workspaceFolders = new Set<NormalizedPath>();\n\n// This holds client capabilities specific to our extension, and not necessarily\n// related to the LS protocol. It's for enabling/disabling features that might\n// work in one client, like VSCode, but perhaps not in others, like vim.\nexport interface extensionClientCapabilities {\n  supportsMarkdownLinks?: boolean | null;\n  supportsSnippetSyntax?: boolean | null;\n}\nlet extensionClientCapabilities: extensionClientCapabilities = {};\n\n// Below here is some state that's not important exactly how long it lives.\nlet pullConfigurationPeriodically: NodeJS.Timeout | null = null;\n\n// https://microsoft.github.io/language-server-protocol/specification#initialize\n// According to the spec, there could be requests before the 'initialize' request. Link in comment tells how to handle them.\nlet initialized = false;\nlet serverSentRequestIdCounter = 0;\n// https://microsoft.github.io/language-server-protocol/specification#exit\nlet shutdownRequestAlreadyReceived = false;\nlet stupidFileContentCache: Map<string, string> = new Map();\n\n// ^ caching AND states AND distributed system. Why does LSP has to be stupid like this\n\n// This keeps track of code actions extracted from diagnostics.\nlet codeActionsFromDiagnostics: codeActions.filesCodeActions = {};\n\n// will be properly defined later depending on the mode (stdio/node-rpc)\nlet send: (msg: p.Message) => void = (_) => {};\n\ntype ProjectCompilationState = {\n  active: boolean;\n  startAt: number | null;\n  lastSent: {\n    status: \"compiling\" | \"success\" | \"error\" | \"warning\";\n    errorCount: number;\n    warningCount: number;\n  } | null;\n  timer: NodeJS.Timeout | null;\n};\nconst projectCompilationStates: Map<string, ProjectCompilationState> =\n  new Map();\n\ntype CompilationStatusPayload = {\n  project: string;\n  projectRootPath: NormalizedPath;\n  status: \"compiling\" | \"success\" | \"error\" | \"warning\";\n  errorCount: number;\n  warningCount: number;\n};\n\nconst sendCompilationStatus = (payload: CompilationStatusPayload) => {\n  const message: p.NotificationMessage = {\n    jsonrpc: c.jsonrpcVersion,\n    method: \"rescript/compilationStatus\",\n    params: payload,\n  };\n  send(message);\n};\n\nlet findRescriptBinary = async (\n  projectRootPath: utils.NormalizedPath | null,\n): Promise<utils.NormalizedPath | null> => {\n  if (\n    config.extensionConfiguration.binaryPath != null &&\n    fs.existsSync(\n      path.join(config.extensionConfiguration.binaryPath, \"rescript\"),\n    )\n  ) {\n    return utils.normalizePath(\n      path.join(config.extensionConfiguration.binaryPath, \"rescript\"),\n    );\n  }\n\n  return utils.findRescriptBinary(projectRootPath);\n};\n\nlet createInterfaceRequest = new v.RequestType<\n  p.TextDocumentIdentifier,\n  p.TextDocumentIdentifier,\n  void\n>(\"textDocument/createInterface\");\n\nlet openCompiledFileRequest = new v.RequestType<\n  p.TextDocumentIdentifier,\n  p.TextDocumentIdentifier,\n  void\n>(\"textDocument/openCompiled\");\n\n// Request to start the build watcher for a project\nlet startBuildRequest = new v.RequestType<\n  p.TextDocumentIdentifier,\n  { success: boolean },\n  void\n>(\"rescript/startBuild\");\n\nexport let getCurrentCompilerDiagnosticsForFile = (\n  fileUri: utils.FileURI,\n): p.Diagnostic[] => {\n  let diagnostics: p.Diagnostic[] | null = null;\n\n  projectsFiles.forEach((projectFile, _projectRootPath) => {\n    if (diagnostics == null && projectFile.filesDiagnostics[fileUri] != null) {\n      diagnostics = projectFile.filesDiagnostics[fileUri].slice();\n    }\n  });\n\n  return diagnostics ?? [];\n};\n\nlet sendUpdatedDiagnostics = async () => {\n  for (const [projectRootPath, projectFile] of projectsFiles) {\n    let { filesWithDiagnostics } = projectFile;\n    let compilerLogPath = path.join(projectRootPath, c.compilerLogPartialPath);\n    let content = \"\";\n    try {\n      content = fs.readFileSync(compilerLogPath, { encoding: \"utf-8\" });\n    } catch (e) {\n      console.error(`Error reading compiler log file ${compilerLogPath}: ${e}`);\n    }\n    let {\n      done,\n      result: filesAndErrors,\n      codeActions,\n      linesWithParseErrors,\n    } = await utils.parseCompilerLogOutput(content);\n\n    if (linesWithParseErrors.length > 0) {\n      let params: p.ShowMessageParams = {\n        type: p.MessageType.Warning,\n        message: `There are more compiler warning/errors that we could not parse. You can help us fix this by opening an [issue on the repository](https://github.com/rescript-lang/rescript-vscode/issues/new?title=Compiler%20log%20parse%20error), pasting the contents of the file [lib/bs/.compiler.log](file://${compilerLogPath}).`,\n      };\n      let message: p.NotificationMessage = {\n        jsonrpc: c.jsonrpcVersion,\n        method: \"window/showMessage\",\n        params: params,\n      };\n      send(message);\n    }\n\n    projectFile.filesDiagnostics = filesAndErrors;\n    codeActionsFromDiagnostics = codeActions;\n\n    // diff\n    (\n      Object.entries(filesAndErrors) as Array<[utils.FileURI, p.Diagnostic[]]>\n    ).forEach(([fileUri, diagnostics]) => {\n      let params: p.PublishDiagnosticsParams = {\n        uri: fileUri,\n        diagnostics,\n      };\n      let notification: p.NotificationMessage = {\n        jsonrpc: c.jsonrpcVersion,\n        method: \"textDocument/publishDiagnostics\",\n        params: params,\n      };\n      send(notification);\n\n      filesWithDiagnostics.add(fileUri);\n    });\n    if (done) {\n      // clear old files\n      filesWithDiagnostics.forEach((file) => {\n        if (filesAndErrors[file] == null) {\n          // Doesn't exist in the new diagnostics. Clear this diagnostic\n          let params: p.PublishDiagnosticsParams = {\n            uri: file,\n            diagnostics: [],\n          };\n          let notification: p.NotificationMessage = {\n            jsonrpc: c.jsonrpcVersion,\n            method: \"textDocument/publishDiagnostics\",\n            params: params,\n          };\n          send(notification);\n          filesWithDiagnostics.delete(file);\n        }\n      });\n    }\n\n    try {\n      const state = projectCompilationStates.get(projectRootPath) ?? {\n        active: false,\n        startAt: null,\n        lastSent: null,\n        timer: null,\n      };\n\n      const lastStart = content.lastIndexOf(\"#Start\");\n      const lastDone = content.lastIndexOf(\"#Done\");\n      const isActive = lastStart > lastDone;\n\n      let errorCount = 0;\n      let warningCount = 0;\n      for (const [fileUri, diags] of Object.entries(filesAndErrors) as Array<\n        [utils.FileURI, p.Diagnostic[]]\n      >) {\n        const filePath = utils.uriToNormalizedPath(fileUri);\n        if (filePath.startsWith(projectRootPath)) {\n          for (const d of diags as v.Diagnostic[]) {\n            if (d.severity === v.DiagnosticSeverity.Error) errorCount++;\n            else if (d.severity === v.DiagnosticSeverity.Warning)\n              warningCount++;\n          }\n        }\n      }\n\n      const projectName = path.basename(projectRootPath);\n\n      const sendIfChanged = (\n        status: \"compiling\" | \"success\" | \"error\" | \"warning\",\n      ) => {\n        const last = state.lastSent;\n        if (\n          last == null ||\n          last.status !== status ||\n          last.errorCount !== errorCount ||\n          last.warningCount !== warningCount\n        ) {\n          sendCompilationStatus({\n            project: projectName,\n            projectRootPath,\n            status,\n            errorCount,\n            warningCount,\n          });\n          state.lastSent = { status, errorCount, warningCount };\n        }\n      };\n\n      if (isActive) {\n        if (!state.active) {\n          state.active = true;\n          state.startAt = Date.now();\n          if (state.timer) clearTimeout(state.timer);\n          state.timer = setTimeout(() => {\n            const cur = projectCompilationStates.get(projectRootPath);\n            if (cur && cur.active) {\n              sendIfChanged(\"compiling\");\n            }\n          }, 100);\n        }\n      } else {\n        if (state.timer) {\n          clearTimeout(state.timer);\n          state.timer = null;\n        }\n        state.active = false;\n        state.startAt = null;\n\n        if (errorCount > 0) {\n          sendIfChanged(\"error\");\n        } else if (warningCount > 0) {\n          sendIfChanged(\"warning\");\n        } else {\n          sendIfChanged(\"success\");\n        }\n      }\n\n      projectCompilationStates.set(projectRootPath, state);\n    } catch {}\n  }\n};\n\nlet deleteProjectDiagnostics = (projectRootPath: utils.NormalizedPath) => {\n  let root = projectsFiles.get(projectRootPath);\n  if (root != null) {\n    root.filesWithDiagnostics.forEach((file) => {\n      let params: p.PublishDiagnosticsParams = {\n        uri: file,\n        diagnostics: [],\n      };\n      let notification: p.NotificationMessage = {\n        jsonrpc: c.jsonrpcVersion,\n        method: \"textDocument/publishDiagnostics\",\n        params: params,\n      };\n      send(notification);\n    });\n\n    projectsFiles.delete(projectRootPath);\n    projectCompilationStates.delete(projectRootPath);\n    if (config.extensionConfiguration.incrementalTypechecking?.enable) {\n      ic.removeIncrementalFileFolder(projectRootPath);\n    }\n  }\n};\n\nlet sendCompilationFinishedMessage = () => {\n  let notification: p.NotificationMessage = {\n    jsonrpc: c.jsonrpcVersion,\n    method: \"rescript/compilationFinished\",\n  };\n\n  send(notification);\n};\n\nlet syncProjectConfigCache = async (rootPath: utils.NormalizedPath) => {\n  try {\n    getLogger().log(\"syncing project config cache for \" + rootPath);\n    await utils.runAnalysisAfterSanityCheck(rootPath, [\n      \"cache-project\",\n      rootPath,\n    ]);\n    getLogger().log(\"OK - synced project config cache for \" + rootPath);\n  } catch (e) {\n    getLogger().error(String(e));\n  }\n};\n\nlet deleteProjectConfigCache = async (rootPath: utils.NormalizedPath) => {\n  try {\n    getLogger().log(\"deleting project config cache for \" + rootPath);\n    await utils.runAnalysisAfterSanityCheck(rootPath, [\n      \"cache-delete\",\n      rootPath,\n    ]);\n    getLogger().log(\"OK - deleted project config cache for \" + rootPath);\n  } catch (e) {\n    getLogger().error(String(e));\n  }\n};\n\nasync function onWorkspaceDidChangeWatchedFiles(\n  params: p.DidChangeWatchedFilesParams,\n) {\n  await Promise.all(\n    params.changes.map(async (change) => {\n      if (change.uri.includes(\"build.ninja\")) {\n        if (\n          config.extensionConfiguration.cache?.projectConfig?.enable === true\n        ) {\n          let projectRoot = utils.findProjectRootOfFile(\n            utils.uriToNormalizedPath(change.uri as utils.FileURI),\n          );\n          if (projectRoot != null) {\n            await syncProjectConfigCache(projectRoot);\n          }\n        }\n      } else if (change.uri.includes(\"compiler.log\")) {\n        try {\n          await sendUpdatedDiagnostics();\n          sendCompilationFinishedMessage();\n          if (config.extensionConfiguration.inlayHints?.enable === true) {\n            sendInlayHintsRefresh();\n          }\n          if (config.extensionConfiguration.codeLens === true) {\n            sendCodeLensRefresh();\n          }\n        } catch {\n          getLogger().error(\"Error while sending updated diagnostics\");\n        }\n      } else {\n        ic.incrementalCompilationFileChanged(\n          utils.uriToNormalizedPath(change.uri as utils.FileURI),\n        );\n      }\n    }),\n  );\n}\n\ntype clientSentBuildAction = {\n  title: string;\n  projectRootPath: utils.NormalizedPath;\n};\nlet openedFile = async (fileUri: utils.FileURI, fileContent: string) => {\n  let filePath = utils.uriToNormalizedPath(fileUri);\n\n  stupidFileContentCache.set(filePath, fileContent);\n\n  let projectRootPath = utils.findProjectRootOfFile(filePath);\n  if (projectRootPath != null) {\n    // projectRootPath is already normalized (NormalizedPath) from findProjectRootOfFile\n    let projectRootState = projectsFiles.get(projectRootPath);\n    if (projectRootState == null) {\n      if (config.extensionConfiguration.incrementalTypechecking?.enable) {\n        ic.recreateIncrementalFileFolder(projectRootPath);\n      }\n      const namespaceName =\n        utils.getNamespaceNameFromConfigFile(projectRootPath);\n\n      projectRootState = {\n        openFiles: new Set(),\n        filesWithDiagnostics: new Set(),\n        filesDiagnostics: {},\n        namespaceName:\n          namespaceName.kind === \"success\" ? namespaceName.result : null,\n        rescriptVersion:\n          await utils.findReScriptVersionForProjectRoot(projectRootPath),\n        bsbWatcherByEditor: null,\n        buildRootPath: null,\n        bscBinaryLocation: await utils.findBscExeBinary(projectRootPath),\n        editorAnalysisLocation:\n          await utils.findEditorAnalysisBinary(projectRootPath),\n        hasPromptedToStartBuild: /(\\/|\\\\)node_modules(\\/|\\\\)/.test(\n          projectRootPath,\n        )\n          ? \"never\"\n          : false,\n      };\n      projectsFiles.set(projectRootPath, projectRootState);\n      if (config.extensionConfiguration.cache?.projectConfig?.enable === true) {\n        await syncProjectConfigCache(projectRootPath);\n      }\n    }\n    let root = projectsFiles.get(projectRootPath)!;\n    root.openFiles.add(filePath);\n    // check if a lock file exists. If not, start a build watcher ourselves\n    // because otherwise the diagnostics info we'll display might be stale\n    // ReScript < 12: .bsb.lock in project root\n    // ReScript >= 12: lib/rescript.lock\n    // For monorepos, the lock file is at the monorepo root, not the subpackage\n    let rescriptBinaryPath = await findRescriptBinary(projectRootPath);\n    let buildRootPath = projectRootPath;\n    if (rescriptBinaryPath != null) {\n      const monorepoRootPath =\n        utils.getMonorepoRootFromBinaryPath(rescriptBinaryPath);\n      if (monorepoRootPath != null) {\n        buildRootPath = monorepoRootPath;\n      }\n    }\n    let bsbLockPath = path.join(buildRootPath, c.bsbLock);\n    let rescriptLockPath = path.join(buildRootPath, c.rescriptLockPartialPath);\n    let hasLockFile =\n      fs.existsSync(bsbLockPath) || fs.existsSync(rescriptLockPath);\n    if (\n      projectRootState.hasPromptedToStartBuild === false &&\n      config.extensionConfiguration.askToStartBuild === true &&\n      !hasLockFile\n    ) {\n      // TODO: sometime stale .bsb.lock dangling. bsb -w knows .bsb.lock is\n      // stale. Use that logic\n      // TODO: close watcher when lang-server shuts down\n      if (rescriptBinaryPath != null) {\n        getLogger().info(`Prompting to start build for ${buildRootPath}`);\n        let payload: clientSentBuildAction = {\n          title: c.startBuildAction,\n          projectRootPath: buildRootPath,\n        };\n        let params = {\n          type: p.MessageType.Info,\n          message: `Start a build for this project to get the freshest data?`,\n          actions: [payload],\n        };\n        let request: p.RequestMessage = {\n          jsonrpc: c.jsonrpcVersion,\n          id: serverSentRequestIdCounter++,\n          method: \"window/showMessageRequest\",\n          params: params,\n        };\n        send(request);\n        projectRootState.hasPromptedToStartBuild = true;\n        // the client might send us back the \"start build\" action, which we'll\n        // handle in the isResponseMessage check in the message handling way\n        // below\n      } else {\n        let request: p.NotificationMessage = {\n          jsonrpc: c.jsonrpcVersion,\n          method: \"window/showMessage\",\n          params: {\n            type: p.MessageType.Error,\n            message:\n              config.extensionConfiguration.binaryPath == null\n                ? `Can't find ReScript binary in  ${path.join(\n                    projectRootPath,\n                    c.nodeModulesBinDir,\n                  )} or parent directories. Did you install it? It's required to use \"rescript\" > 9.1`\n                : `Can't find ReScript binary in the directory ${config.extensionConfiguration.binaryPath}`,\n          },\n        };\n        send(request);\n      }\n    }\n\n    // no need to call sendUpdatedDiagnostics() here; the watcher add will\n    // call the listener which calls it\n  }\n};\n\nlet closedFile = async (fileUri: utils.FileURI) => {\n  let filePath = utils.uriToNormalizedPath(fileUri);\n  getLogger().log(`Closing file ${filePath}`);\n\n  if (config.extensionConfiguration.incrementalTypechecking?.enable) {\n    ic.handleClosedFile(filePath);\n  }\n\n  stupidFileContentCache.delete(filePath);\n\n  let projectRootPath = utils.findProjectRootOfFile(filePath);\n  if (projectRootPath != null) {\n    let root = projectsFiles.get(projectRootPath);\n    if (root != null) {\n      root.openFiles.delete(filePath);\n      getLogger().log(\n        `Open files remaining for ${projectRootPath}: ${root.openFiles.size}`,\n      );\n      // clear diagnostics too if no open files open in said project\n      if (root.openFiles.size === 0) {\n        await deleteProjectConfigCache(projectRootPath);\n        deleteProjectDiagnostics(projectRootPath);\n        if (root.bsbWatcherByEditor !== null) {\n          getLogger().info(\n            `Killing build watcher for ${projectRootPath} (all files closed)`,\n          );\n          utils.killBuildWatcher(\n            root.bsbWatcherByEditor,\n            root.buildRootPath ?? undefined,\n          );\n          root.bsbWatcherByEditor = null;\n          root.buildRootPath = null;\n        }\n      }\n    } else {\n      getLogger().log(`No project state found for ${projectRootPath}`);\n    }\n  }\n};\n\nlet updateOpenedFile = (fileUri: utils.FileURI, fileContent: string) => {\n  getLogger().info(\n    `Updating opened file ${fileUri}, incremental TC enabled: ${config.extensionConfiguration.incrementalTypechecking?.enable}`,\n  );\n  let filePath = utils.uriToNormalizedPath(fileUri);\n  assert(stupidFileContentCache.has(filePath));\n  stupidFileContentCache.set(filePath, fileContent);\n  if (config.extensionConfiguration.incrementalTypechecking?.enable) {\n    ic.handleUpdateOpenedFile(filePath, fileContent, send, () => {\n      if (config.extensionConfiguration.codeLens) {\n        sendCodeLensRefresh();\n      }\n      if (config.extensionConfiguration.inlayHints) {\n        sendInlayHintsRefresh();\n      }\n    });\n  }\n};\nlet getOpenedFileContent = (fileUri: utils.FileURI) => {\n  let filePath = utils.uriToNormalizedPath(fileUri);\n  let content = stupidFileContentCache.get(filePath)!;\n  assert(content != null);\n  return content;\n};\n\nexport default function listen(useStdio = false) {\n  // Start listening now!\n  // We support two modes: the regular node RPC mode for VSCode, and the --stdio\n  // mode for other editors The latter is _technically unsupported_. It's an\n  // implementation detail that might change at any time\n  if (useStdio) {\n    let writer = new rpc.StreamMessageWriter(process.stdout);\n    let reader = new rpc.StreamMessageReader(process.stdin);\n    // proper `this` scope for writer\n    send = (msg: p.Message) => writer.write(msg);\n    initializeLogger(send);\n    reader.listen(onMessage);\n  } else {\n    // proper `this` scope for process\n    send = (msg: p.Message) => process.send!(msg);\n    initializeLogger(send);\n    process.on(\"message\", onMessage);\n  }\n}\n\nasync function hover(msg: p.RequestMessage) {\n  let params = msg.params as p.HoverParams;\n  let filePath = utils.uriToNormalizedPath(\n    params.textDocument.uri as utils.FileURI,\n  );\n  let code = getOpenedFileContent(params.textDocument.uri as utils.FileURI);\n  let tmpname = utils.createFileInTempDir();\n  fs.writeFileSync(tmpname, code, { encoding: \"utf-8\" });\n  let response = await utils.runAnalysisCommand(\n    filePath,\n    [\n      \"hover\",\n      filePath,\n      params.position.line,\n      params.position.character,\n      tmpname,\n      Boolean(extensionClientCapabilities.supportsMarkdownLinks),\n    ],\n    msg,\n  );\n  fs.unlink(tmpname, () => null);\n  return response;\n}\n\nasync function inlayHint(msg: p.RequestMessage) {\n  const params = msg.params as p.InlayHintParams;\n  const filePath = utils.uriToNormalizedPath(\n    params.textDocument.uri as utils.FileURI,\n  );\n\n  const response = await utils.runAnalysisCommand(\n    filePath,\n    [\n      \"inlayHint\",\n      filePath,\n      params.range.start.line,\n      params.range.end.line,\n      config.extensionConfiguration.inlayHints?.maxLength,\n    ],\n    msg,\n  );\n  return response;\n}\n\nfunction sendInlayHintsRefresh() {\n  let request: p.RequestMessage = {\n    jsonrpc: c.jsonrpcVersion,\n    method: p.InlayHintRefreshRequest.method,\n    id: serverSentRequestIdCounter++,\n  };\n  send(request);\n}\n\nasync function codeLens(msg: p.RequestMessage) {\n  const params = msg.params as p.CodeLensParams;\n  const filePath = utils.uriToNormalizedPath(\n    params.textDocument.uri as utils.FileURI,\n  );\n\n  const response = await utils.runAnalysisCommand(\n    filePath,\n    [\"codeLens\", filePath],\n    msg,\n  );\n  return response;\n}\n\nfunction sendCodeLensRefresh() {\n  let request: p.RequestMessage = {\n    jsonrpc: c.jsonrpcVersion,\n    method: p.CodeLensRefreshRequest.method,\n    id: serverSentRequestIdCounter++,\n  };\n  send(request);\n}\n\nasync function signatureHelp(msg: p.RequestMessage) {\n  let params = msg.params as p.SignatureHelpParams;\n  let filePath = utils.uriToNormalizedPath(\n    params.textDocument.uri as utils.FileURI,\n  );\n  let code = getOpenedFileContent(params.textDocument.uri as utils.FileURI);\n  let tmpname = utils.createFileInTempDir();\n  fs.writeFileSync(tmpname, code, { encoding: \"utf-8\" });\n  let response = await utils.runAnalysisCommand(\n    filePath,\n    [\n      \"signatureHelp\",\n      filePath,\n      params.position.line,\n      params.position.character,\n      tmpname,\n      config.extensionConfiguration.signatureHelp?.forConstructorPayloads\n        ? \"true\"\n        : \"false\",\n    ],\n    msg,\n  );\n  fs.unlink(tmpname, () => null);\n  return response;\n}\n\nasync function definition(msg: p.RequestMessage) {\n  // https://microsoft.github.io/language-server-protocol/specifications/specification-current/#textDocument_definition\n  let params = msg.params as p.DefinitionParams;\n  let filePath = utils.uriToNormalizedPath(\n    params.textDocument.uri as utils.FileURI,\n  );\n  let response = await utils.runAnalysisCommand(\n    filePath,\n    [\"definition\", filePath, params.position.line, params.position.character],\n    msg,\n  );\n  return response;\n}\n\nasync function typeDefinition(msg: p.RequestMessage) {\n  // https://microsoft.github.io/language-server-protocol/specification/specification-current/#textDocument_typeDefinition\n  let params = msg.params as p.TypeDefinitionParams;\n  let filePath = utils.uriToNormalizedPath(\n    params.textDocument.uri as utils.FileURI,\n  );\n  let response = await utils.runAnalysisCommand(\n    filePath,\n    [\n      \"typeDefinition\",\n      filePath,\n      params.position.line,\n      params.position.character,\n    ],\n    msg,\n  );\n  return response;\n}\n\nasync function references(msg: p.RequestMessage) {\n  // https://microsoft.github.io/language-server-protocol/specifications/specification-current/#textDocument_references\n  let params = msg.params as p.ReferenceParams;\n  let filePath = utils.uriToNormalizedPath(\n    params.textDocument.uri as utils.FileURI,\n  );\n  let result: typeof p.ReferencesRequest.type =\n    await utils.getReferencesForPosition(filePath, params.position);\n  let response: p.ResponseMessage = {\n    jsonrpc: c.jsonrpcVersion,\n    id: msg.id,\n    result,\n    // error: code and message set in case an exception happens during the definition request.\n  };\n  return response;\n}\n\nasync function prepareRename(\n  msg: p.RequestMessage,\n): Promise<p.ResponseMessage> {\n  // https://microsoft.github.io/language-server-protocol/specifications/specification-current/#textDocument_prepareRename\n  let params = msg.params as p.PrepareRenameParams;\n  let filePath = utils.uriToNormalizedPath(\n    params.textDocument.uri as utils.FileURI,\n  );\n\n  // `prepareRename` was introduced in 12.0.0-beta.10\n  let projectRootPath = utils.findProjectRootOfFile(filePath);\n  let rescriptVersion =\n    (projectRootPath && projectsFiles.get(projectRootPath)?.rescriptVersion) ||\n    (await utils.findReScriptVersionForProjectRoot(projectRootPath ?? null));\n\n  let shouldUsePrepareRenameCommand = false;\n  if (rescriptVersion != null) {\n    shouldUsePrepareRenameCommand =\n      semver.valid(rescriptVersion) != null &&\n      semver.satisfies(rescriptVersion, \">=12.0.0-beta.10\", {\n        includePrerelease: true,\n      });\n  }\n\n  if (shouldUsePrepareRenameCommand) {\n    let analysisResult = await utils.runAnalysisAfterSanityCheck(filePath, [\n      \"prepareRename\",\n      filePath,\n      params.position.line,\n      params.position.character,\n    ]);\n\n    return {\n      jsonrpc: c.jsonrpcVersion,\n      id: msg.id,\n      result: analysisResult as p.PrepareRenameResult,\n    };\n  }\n\n  let locations: null | p.Location[] = await utils.getReferencesForPosition(\n    filePath,\n    params.position,\n  );\n  let result: p.Range | null = null;\n  if (locations !== null) {\n    locations.forEach((loc) => {\n      if (\n        utils.uriToNormalizedPath(loc.uri as utils.FileURI) ===\n        utils.uriToNormalizedPath(params.textDocument.uri as utils.FileURI)\n      ) {\n        let { start, end } = loc.range;\n        let pos = params.position;\n        if (\n          start.character <= pos.character &&\n          start.line <= pos.line &&\n          end.character >= pos.character &&\n          end.line >= pos.line\n        ) {\n          result = loc.range;\n        }\n      }\n    });\n  }\n  return {\n    jsonrpc: c.jsonrpcVersion,\n    id: msg.id,\n    result,\n  };\n}\n\nasync function rename(msg: p.RequestMessage) {\n  // https://microsoft.github.io/language-server-protocol/specifications/specification-current/#textDocument_rename\n  let params = msg.params as p.RenameParams;\n  let filePath = utils.uriToNormalizedPath(\n    params.textDocument.uri as utils.FileURI,\n  );\n  let documentChanges: (p.RenameFile | p.TextDocumentEdit)[] | null =\n    await utils.runAnalysisAfterSanityCheck(filePath, [\n      \"rename\",\n      filePath,\n      params.position.line,\n      params.position.character,\n      params.newName,\n    ]);\n  let result: WorkspaceEdit | null = null;\n  if (documentChanges !== null) {\n    result = { documentChanges };\n  }\n  let response: p.ResponseMessage = {\n    jsonrpc: c.jsonrpcVersion,\n    id: msg.id,\n    result,\n  };\n  return response;\n}\n\nasync function documentSymbol(msg: p.RequestMessage) {\n  // https://microsoft.github.io/language-server-protocol/specifications/specification-current/#textDocument_documentSymbol\n  let params = msg.params as p.DocumentSymbolParams;\n  let filePath = utils.uriToNormalizedPath(\n    params.textDocument.uri as utils.FileURI,\n  );\n  let extension = path.extname(params.textDocument.uri);\n  let code = getOpenedFileContent(params.textDocument.uri as utils.FileURI);\n  let tmpname = utils.createFileInTempDir(extension);\n  fs.writeFileSync(tmpname, code, { encoding: \"utf-8\" });\n  let response = await utils.runAnalysisCommand(\n    filePath,\n    [\"documentSymbol\", tmpname],\n    msg,\n    /* projectRequired */ false,\n  );\n  fs.unlink(tmpname, () => null);\n  return response;\n}\n\nfunction askForAllCurrentConfiguration() {\n  // https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#workspace_configuration\n  let params: p.ConfigurationParams = {\n    items: [\n      {\n        section: \"rescript.settings\",\n      },\n    ],\n  };\n  let req: p.RequestMessage = {\n    jsonrpc: c.jsonrpcVersion,\n    id: c.configurationRequestId,\n    method: p.ConfigurationRequest.type.method,\n    params,\n  };\n  send(req);\n}\n\nasync function semanticTokens(msg: p.RequestMessage) {\n  // https://microsoft.github.io/language-server-protocol/specifications/specification-current/#textDocument_semanticTokens\n  let params = msg.params as p.SemanticTokensParams;\n  let filePath = utils.uriToNormalizedPath(\n    params.textDocument.uri as utils.FileURI,\n  );\n  let extension = path.extname(params.textDocument.uri);\n  let code = getOpenedFileContent(params.textDocument.uri as utils.FileURI);\n  let tmpname = utils.createFileInTempDir(extension);\n  fs.writeFileSync(tmpname, code, { encoding: \"utf-8\" });\n  let response = await utils.runAnalysisCommand(\n    filePath,\n    [\"semanticTokens\", tmpname],\n    msg,\n    /* projectRequired */ false,\n  );\n  fs.unlink(tmpname, () => null);\n  return response;\n}\n\nasync function completion(msg: p.RequestMessage) {\n  // https://microsoft.github.io/language-server-protocol/specifications/specification-current/#textDocument_completion\n  let params = msg.params as p.ReferenceParams;\n  let filePath = utils.uriToNormalizedPath(\n    params.textDocument.uri as utils.FileURI,\n  );\n  let code = getOpenedFileContent(params.textDocument.uri as utils.FileURI);\n  let tmpname = utils.createFileInTempDir();\n  fs.writeFileSync(tmpname, code, { encoding: \"utf-8\" });\n  let response = await utils.runAnalysisCommand(\n    filePath,\n    [\n      \"completion\",\n      filePath,\n      params.position.line,\n      params.position.character,\n      tmpname,\n    ],\n    msg,\n  );\n  fs.unlink(tmpname, () => null);\n  return response;\n}\n\nasync function completionResolve(msg: p.RequestMessage) {\n  const item = msg.params as p.CompletionItem;\n  let response: p.ResponseMessage = {\n    jsonrpc: c.jsonrpcVersion,\n    id: msg.id,\n    result: item,\n  };\n\n  if (item.documentation == null && item.data != null) {\n    const data = item.data as { filePath: string; modulePath: string };\n    const normalizedFilePath = utils.normalizePath(data.filePath);\n    if (normalizedFilePath == null) {\n      return response;\n    }\n    let result = await utils.runAnalysisAfterSanityCheck(\n      normalizedFilePath,\n      [\"completionResolve\", data.filePath, data.modulePath],\n      true,\n    );\n    item.documentation = { kind: \"markdown\", value: result };\n  }\n\n  return response;\n}\n\nasync function codeAction(msg: p.RequestMessage): Promise<p.ResponseMessage> {\n  let params = msg.params as p.CodeActionParams;\n  let filePath = utils.uriToNormalizedPath(\n    params.textDocument.uri as utils.FileURI,\n  );\n  let code = getOpenedFileContent(params.textDocument.uri as utils.FileURI);\n  let extension = path.extname(params.textDocument.uri);\n  let tmpname = utils.createFileInTempDir(extension);\n\n  // Check local code actions coming from the diagnostics, or from incremental compilation.\n  let localResults: v.CodeAction[] = [];\n  const fromDiagnostics =\n    codeActionsFromDiagnostics[params.textDocument.uri as utils.FileURI] ?? [];\n  const fromIncrementalCompilation =\n    ic.getCodeActionsFromIncrementalCompilation(filePath) ?? [];\n  [...fromDiagnostics, ...fromIncrementalCompilation].forEach(\n    ({ range, codeAction }) => {\n      if (utils.rangeContainsRange(range, params.range)) {\n        localResults.push(codeAction);\n      }\n    },\n  );\n\n  fs.writeFileSync(tmpname, code, { encoding: \"utf-8\" });\n  let response = await utils.runAnalysisCommand(\n    filePath,\n    [\n      \"codeAction\",\n      filePath,\n      params.range.start.line,\n      params.range.start.character,\n      params.range.end.line,\n      params.range.end.character,\n      tmpname,\n    ],\n    msg,\n  );\n  fs.unlink(tmpname, () => null);\n\n  let { result } = response;\n\n  // We must send `null` when there are no results, empty array isn't enough.\n  let codeActions =\n    result != null && Array.isArray(result)\n      ? [...localResults, ...result]\n      : localResults;\n\n  let res: v.ResponseMessage = {\n    jsonrpc: c.jsonrpcVersion,\n    id: msg.id,\n    result: codeActions.length > 0 ? codeActions : null,\n  };\n  return res;\n}\n\nfunction format(msg: p.RequestMessage): Array<p.Message> {\n  // technically, a formatting failure should reply with the error. Sadly\n  // the LSP alert box for these error replies sucks (e.g. doesn't actually\n  // display the message). In order to signal the client to display a proper\n  // alert box (sometime with actionable buttons), we need to first send\n  // back a fake success message (because each request mandates a\n  // response), then right away send a server notification to display a\n  // nicer alert. Ugh.\n  let fakeSuccessResponse: p.ResponseMessage = {\n    jsonrpc: c.jsonrpcVersion,\n    id: msg.id,\n    result: [],\n  };\n  let params = msg.params as p.DocumentFormattingParams;\n  let filePath = utils.uriToNormalizedPath(\n    params.textDocument.uri as utils.FileURI,\n  );\n  let extension = path.extname(params.textDocument.uri);\n  if (extension !== c.resExt && extension !== c.resiExt) {\n    let params: p.ShowMessageParams = {\n      type: p.MessageType.Error,\n      message: `Not a ${c.resExt} or ${c.resiExt} file. Cannot format it.`,\n    };\n    let response: p.NotificationMessage = {\n      jsonrpc: c.jsonrpcVersion,\n      method: \"window/showMessage\",\n      params: params,\n    };\n    return [fakeSuccessResponse, response];\n  } else {\n    // code will always be defined here, even though technically it can be undefined\n    let code = getOpenedFileContent(params.textDocument.uri as utils.FileURI);\n\n    let projectRootPath = utils.findProjectRootOfFile(filePath);\n    let project =\n      projectRootPath != null ? projectsFiles.get(projectRootPath) : null;\n    let bscExeBinaryPath = project?.bscBinaryLocation ?? null;\n\n    let formattedResult = utils.formatCode(bscExeBinaryPath, filePath, code);\n    if (formattedResult.kind === \"success\") {\n      let max = code.length;\n      let result: p.TextEdit[] = [\n        {\n          range: {\n            start: { line: 0, character: 0 },\n            end: { line: max, character: max },\n          },\n          newText: formattedResult.result,\n        },\n      ];\n      let response: p.ResponseMessage = {\n        jsonrpc: c.jsonrpcVersion,\n        id: msg.id,\n        result: result,\n      };\n      return [response];\n    } else {\n      // let the diagnostics logic display the updated syntax errors,\n      // from the build.\n      // Again, not sending the actual errors. See fakeSuccessResponse\n      // above for explanation\n      return [fakeSuccessResponse];\n    }\n  }\n}\n\nlet updateDiagnosticSyntax = async (\n  fileUri: utils.FileURI,\n  fileContent: string,\n) => {\n  if (config.extensionConfiguration.incrementalTypechecking?.enable) {\n    // The incremental typechecking already sends syntax diagnostics.\n    return;\n  }\n  let filePath = utils.uriToNormalizedPath(fileUri);\n  let extension = path.extname(filePath);\n  let tmpname = utils.createFileInTempDir(extension);\n  fs.writeFileSync(tmpname, fileContent, { encoding: \"utf-8\" });\n\n  // We need to account for any existing diagnostics from the compiler for this\n  // file. If we don't we might accidentally clear the current file's compiler\n  // diagnostics if there's no syntax diagostics to send. This is because\n  // publishing an empty diagnostics array is equivalent to saying \"clear all\n  // errors\".\n  let compilerDiagnosticsForFile =\n    getCurrentCompilerDiagnosticsForFile(fileUri);\n  let syntaxDiagnosticsForFile: p.Diagnostic[] =\n    await utils.runAnalysisAfterSanityCheck(filePath, [\n      \"diagnosticSyntax\",\n      tmpname,\n    ]);\n\n  let allDiagnostics = [\n    ...syntaxDiagnosticsForFile,\n    ...compilerDiagnosticsForFile,\n  ];\n\n  // Update filesWithDiagnostics to track this file\n  let projectRootPath = utils.findProjectRootOfFile(filePath);\n  let projectFile = utils.getProjectFile(projectRootPath);\n\n  if (projectFile != null) {\n    if (allDiagnostics.length > 0) {\n      projectFile.filesWithDiagnostics.add(fileUri);\n    } else {\n      projectFile.filesWithDiagnostics.delete(fileUri);\n    }\n  }\n\n  let notification: p.NotificationMessage = {\n    jsonrpc: c.jsonrpcVersion,\n    method: \"textDocument/publishDiagnostics\",\n    params: {\n      uri: fileUri,\n      diagnostics: allDiagnostics,\n    },\n  };\n\n  fs.unlink(tmpname, () => null);\n\n  send(notification);\n};\n\nasync function createInterface(msg: p.RequestMessage): Promise<p.Message> {\n  let params = msg.params as p.TextDocumentIdentifier;\n  let extension = path.extname(params.uri);\n  let filePath = utils.uriToNormalizedPath(params.uri as utils.FileURI);\n  let projDir = utils.findProjectRootOfFile(filePath);\n\n  if (projDir === null) {\n    let params: p.ShowMessageParams = {\n      type: p.MessageType.Error,\n      message: `Cannot locate project directory to generate the interface file.`,\n    };\n\n    let response: p.NotificationMessage = {\n      jsonrpc: c.jsonrpcVersion,\n      method: \"window/showMessage\",\n      params: params,\n    };\n\n    return response;\n  }\n\n  if (extension !== c.resExt) {\n    let params: p.ShowMessageParams = {\n      type: p.MessageType.Error,\n      message: `Not a ${c.resExt} file. Cannot create an interface for it.`,\n    };\n\n    let response: p.NotificationMessage = {\n      jsonrpc: c.jsonrpcVersion,\n      method: \"window/showMessage\",\n      params: params,\n    };\n\n    return response;\n  }\n\n  let resPartialPath = filePath.split(projDir)[1];\n\n  // The .cmi filename may have a namespace suffix appended.\n  let namespaceResult = utils.getNamespaceNameFromConfigFile(projDir);\n\n  if (namespaceResult.kind === \"error\") {\n    let params: p.ShowMessageParams = {\n      type: p.MessageType.Error,\n      message: `Error reading ReScript config file.`,\n    };\n\n    let response: p.NotificationMessage = {\n      jsonrpc: c.jsonrpcVersion,\n      method: \"window/showMessage\",\n      params,\n    };\n\n    return response;\n  }\n\n  let namespace = namespaceResult.result;\n  let suffixToAppend = namespace.length > 0 ? \"-\" + namespace : \"\";\n\n  let cmiPartialPath = path.join(\n    path.dirname(resPartialPath),\n    path.basename(resPartialPath, c.resExt) + suffixToAppend + c.cmiExt,\n  );\n\n  let cmiPath = path.join(projDir, c.compilerDirPartialPath, cmiPartialPath);\n  let cmiAvailable = fs.existsSync(cmiPath);\n\n  if (!cmiAvailable) {\n    let params: p.ShowMessageParams = {\n      type: p.MessageType.Error,\n      message: `No compiled interface file found. Please compile your project first.`,\n    };\n\n    let response: p.NotificationMessage = {\n      jsonrpc: c.jsonrpcVersion,\n      method: \"window/showMessage\",\n      params,\n    };\n\n    return response;\n  }\n\n  let response = await utils.runAnalysisCommand(\n    filePath,\n    [\"createInterface\", filePath, cmiPath],\n    msg,\n  );\n  let result = typeof response.result === \"string\" ? response.result : \"\";\n\n  try {\n    let resiPath = lookup.replaceFileExtensionWithNormalizedPath(\n      filePath,\n      c.resiExt,\n    );\n    fs.writeFileSync(resiPath, result, { encoding: \"utf-8\" });\n    let response: p.ResponseMessage = {\n      jsonrpc: c.jsonrpcVersion,\n      id: msg.id,\n      result: {\n        uri: utils.pathToURI(resiPath),\n      },\n    };\n    return response;\n  } catch (e) {\n    let response: p.ResponseMessage = {\n      jsonrpc: c.jsonrpcVersion,\n      id: msg.id,\n      error: {\n        code: p.ErrorCodes.InternalError,\n        message: \"Unable to create interface file.\",\n      },\n    };\n    return response;\n  }\n}\n\n// Shared function to start build watcher for a project\n// Returns true if watcher was started or already running, false on failure\nasync function startBuildWatcher(\n  projectRootPath: utils.NormalizedPath,\n  rescriptBinaryPath: utils.NormalizedPath,\n  options?: {\n    createProjectStateIfMissing?: boolean;\n    monorepoRootPath?: utils.NormalizedPath | null;\n  },\n): Promise<boolean> {\n  let root = projectsFiles.get(projectRootPath);\n\n  // Create project state if missing and option is set\n  if (root == null && options?.createProjectStateIfMissing) {\n    const namespaceName = utils.getNamespaceNameFromConfigFile(projectRootPath);\n    root = {\n      openFiles: new Set(),\n      filesWithDiagnostics: new Set(),\n      filesDiagnostics: {},\n      namespaceName:\n        namespaceName.kind === \"success\" ? namespaceName.result : null,\n      rescriptVersion:\n        await utils.findReScriptVersionForProjectRoot(projectRootPath),\n      bsbWatcherByEditor: null,\n      buildRootPath: null,\n      bscBinaryLocation: await utils.findBscExeBinary(projectRootPath),\n      editorAnalysisLocation:\n        await utils.findEditorAnalysisBinary(projectRootPath),\n      hasPromptedToStartBuild: true, // Don't prompt since we're starting the build\n    };\n    projectsFiles.set(projectRootPath, root);\n  }\n\n  if (root == null) {\n    return false;\n  }\n\n  // If a build watcher is already running, return success\n  if (root.bsbWatcherByEditor != null) {\n    getLogger().info(`Build watcher already running for ${projectRootPath}`);\n    return true;\n  }\n\n  // Use monorepo root for cwd (monorepo support), fall back to project root\n  const buildCwd = options?.monorepoRootPath ?? projectRootPath;\n\n  getLogger().info(\n    `Starting build watcher for ${projectRootPath} in ${buildCwd} (ReScript ${root.rescriptVersion ?? \"unknown\"})`,\n  );\n  let bsbProcess = utils.runBuildWatcherUsingValidBuildPath(\n    rescriptBinaryPath,\n    buildCwd,\n    root.rescriptVersion,\n  );\n  root.bsbWatcherByEditor = bsbProcess;\n  root.buildRootPath = buildCwd;\n\n  return true;\n}\n\nasync function handleStartBuildRequest(\n  msg: p.RequestMessage,\n): Promise<p.ResponseMessage> {\n  let params = msg.params as p.TextDocumentIdentifier;\n  let filePath = utils.uriToNormalizedPath(params.uri as utils.FileURI);\n  let projectRootPath = utils.findProjectRootOfFile(filePath);\n\n  if (projectRootPath == null) {\n    return {\n      jsonrpc: c.jsonrpcVersion,\n      id: msg.id,\n      result: { success: false },\n    };\n  }\n\n  let rescriptBinaryPath = await findRescriptBinary(projectRootPath);\n  if (rescriptBinaryPath == null) {\n    return {\n      jsonrpc: c.jsonrpcVersion,\n      id: msg.id,\n      result: { success: false },\n    };\n  }\n\n  // Derive monorepo root from binary path for monorepo support\n  const monorepoRootPath =\n    utils.getMonorepoRootFromBinaryPath(rescriptBinaryPath);\n\n  const success = await startBuildWatcher(projectRootPath, rescriptBinaryPath, {\n    createProjectStateIfMissing: true,\n    monorepoRootPath,\n  });\n\n  return {\n    jsonrpc: c.jsonrpcVersion,\n    id: msg.id,\n    result: { success },\n  };\n}\n\nfunction openCompiledFile(msg: p.RequestMessage): p.Message {\n  let params = msg.params as p.TextDocumentIdentifier;\n  let filePath = utils.uriToNormalizedPath(params.uri as utils.FileURI);\n  let projDir = utils.findProjectRootOfFile(filePath);\n\n  if (projDir === null) {\n    let params: p.ShowMessageParams = {\n      type: p.MessageType.Error,\n      message: `Cannot locate project directory.`,\n    };\n\n    let response: p.NotificationMessage = {\n      jsonrpc: c.jsonrpcVersion,\n      method: \"window/showMessage\",\n      params: params,\n    };\n\n    return response;\n  }\n\n  let compiledFilePath = utils.getCompiledFilePath(filePath, projDir);\n\n  if (\n    compiledFilePath.kind === \"error\" ||\n    !fs.existsSync(compiledFilePath.result)\n  ) {\n    let message =\n      compiledFilePath.kind === \"success\"\n        ? `No compiled file found. Expected it at: ${compiledFilePath.result}`\n        : `No compiled file found. Please compile your project first.`;\n\n    let params: p.ShowMessageParams = {\n      type: p.MessageType.Error,\n      message,\n    };\n\n    let response: p.NotificationMessage = {\n      jsonrpc: c.jsonrpcVersion,\n      method: \"window/showMessage\",\n      params,\n    };\n\n    return response;\n  }\n\n  let response: p.ResponseMessage = {\n    jsonrpc: c.jsonrpcVersion,\n    id: msg.id,\n    result: {\n      uri: utils.pathToURI(compiledFilePath.result),\n    },\n  };\n\n  return response;\n}\n\nasync function getServerVersion(): Promise<string | undefined> {\n  // Read the server version from package.json\n  let serverVersion: string | undefined;\n  try {\n    const packageJsonPath = path.join(__dirname, \"..\", \"package.json\");\n    const packageJsonContent = await fsAsync.readFile(packageJsonPath, {\n      encoding: \"utf-8\",\n    });\n    const packageJson: { version?: unknown } = JSON.parse(packageJsonContent);\n    serverVersion =\n      typeof packageJson.version === \"string\" ? packageJson.version : undefined;\n  } catch (e) {\n    // If we can't read the version, that's okay - we'll just omit it\n    serverVersion = undefined;\n  }\n  return serverVersion;\n}\n\nasync function dumpServerState(\n  msg: p.RequestMessage,\n): Promise<p.ResponseMessage> {\n  // Custom debug endpoint: dump current server state (config + projectsFiles)\n  try {\n    const serverVersion = await getServerVersion();\n\n    const projects = Array.from(projectsFiles.entries()).map(\n      ([projectRootPath, pf]) => ({\n        projectRootPath,\n        openFiles: Array.from(pf.openFiles),\n        filesWithDiagnostics: Array.from(pf.filesWithDiagnostics),\n        filesDiagnostics: pf.filesDiagnostics,\n        rescriptVersion: pf.rescriptVersion,\n        bscBinaryLocation: pf.bscBinaryLocation,\n        editorAnalysisLocation: pf.editorAnalysisLocation,\n        namespaceName: pf.namespaceName,\n        hasPromptedToStartBuild: pf.hasPromptedToStartBuild,\n        bsbWatcherByEditor:\n          pf.bsbWatcherByEditor != null\n            ? { pid: pf.bsbWatcherByEditor.pid ?? null }\n            : null,\n      }),\n    );\n\n    const state = {\n      lspServerVersion: serverVersion,\n      config: config.extensionConfiguration,\n      projects,\n      workspaceFolders: Array.from(workspaceFolders),\n      runtimePathCache: utils.getRuntimePathCacheSnapshot(),\n    };\n\n    // Format JSON with pretty-printing (2-space indent) on the server side\n    // This ensures consistent formatting and handles any Maps/Sets that might\n    // have been converted to plain objects/arrays above\n    const formattedJson = JSON.stringify(state, null, 2);\n\n    // Return the content so the client can create an unsaved document\n    // This avoids creating temporary files that would never be cleaned up\n    let response: p.ResponseMessage = {\n      jsonrpc: c.jsonrpcVersion,\n      id: msg.id,\n      result: { content: formattedJson },\n    };\n    return response;\n  } catch (e) {\n    let response: p.ResponseMessage = {\n      jsonrpc: c.jsonrpcVersion,\n      id: msg.id,\n      error: {\n        code: p.ErrorCodes.InternalError,\n        message: `Failed to dump server state: ${String(e)}`,\n      },\n    };\n    return response;\n  }\n}\n\nasync function onMessage(msg: p.Message) {\n  if (p.Message.isNotification(msg)) {\n    // notification message, aka the client ends it and doesn't want a reply\n    if (!initialized && msg.method !== \"exit\") {\n      // From spec: \"Notifications should be dropped, except for the exit notification. This will allow the exit of a server without an initialize request\"\n      // For us: do nothing. We don't have anything we need to clean up right now\n      // TODO: we might have things we need to clean up now... like some watcher stuff\n    } else if (msg.method === \"exit\") {\n      // The server should exit with success code 0 if the shutdown request has been received before; otherwise with error code 1\n      if (shutdownRequestAlreadyReceived) {\n        process.exit(0);\n      } else {\n        process.exit(1);\n      }\n    } else if (msg.method === InitializedNotification.method) {\n      /*\n      The initialized notification is sent from the client to the server after the client received the result of the initialize request\n      but before the client is sending any other request or notification to the server.\n      The server can use the initialized notification, for example, to dynamically register capabilities.\n\n      We use this to register the file watchers for the project.\n      The client can watch files for us and send us events via the `workspace/didChangeWatchedFiles`\n      */\n      const watchers = Array.from(workspaceFolders).flatMap(\n        (projectRootPath) => [\n          {\n            // Only watch the root compiler log for each workspace folder.\n            // In monorepos, `**/lib/bs/.compiler.log` matches every package and dependency,\n            // causing a burst of events per save.\n            globPattern: {\n              baseUri: utils.pathToURI(projectRootPath),\n              pattern: c.compilerLogPartialPath,\n            },\n            kind: p.WatchKind.Change | p.WatchKind.Create | p.WatchKind.Delete,\n          },\n          {\n            // Watch ninja output\n            globPattern: {\n              baseUri: utils.pathToURI(projectRootPath),\n              pattern: path.join(\"**\", c.buildNinjaPartialPath),\n            },\n            kind: p.WatchKind.Change | p.WatchKind.Create | p.WatchKind.Delete,\n          },\n          {\n            // Watch build artifacts\n            globPattern: {\n              baseUri: utils.pathToURI(projectRootPath),\n              pattern: path.join(c.compilerDirPartialPath, \"**/*.{cmi,cmt}\"),\n            },\n            kind: p.WatchKind.Change | p.WatchKind.Create | p.WatchKind.Delete,\n          },\n        ],\n      );\n      const registrationParams: p.RegistrationParams = {\n        registrations: [\n          {\n            id: \"rescript_file_watcher\",\n            method: DidChangeWatchedFilesNotification.method,\n            registerOptions: {\n              watchers,\n            },\n          },\n        ],\n      };\n      const req: p.RequestMessage = {\n        jsonrpc: c.jsonrpcVersion,\n        id: serverSentRequestIdCounter++,\n        method: p.RegistrationRequest.method,\n        params: registrationParams,\n      };\n      send(req);\n    } else if (msg.method === DidChangeWatchedFilesNotification.method) {\n      const params = msg.params as p.DidChangeWatchedFilesParams;\n      await onWorkspaceDidChangeWatchedFiles(params);\n    } else if (msg.method === DidOpenTextDocumentNotification.method) {\n      let params = msg.params as p.DidOpenTextDocumentParams;\n      await openedFile(\n        params.textDocument.uri as utils.FileURI,\n        params.textDocument.text,\n      );\n\n      if (config.extensionConfiguration.incrementalTypechecking?.enable) {\n        // We run incremental typechecking to get the most accurate results\n        // the current file may have deviated from the last compilation.\n        updateOpenedFile(\n          params.textDocument.uri as utils.FileURI,\n          params.textDocument.text,\n        );\n      } else {\n        // Check the .compiler.log file for diagnostics\n        // This could be stale data of course.\n        await sendUpdatedDiagnostics();\n      }\n\n      await updateDiagnosticSyntax(\n        params.textDocument.uri as utils.FileURI,\n        params.textDocument.text,\n      );\n    } else if (msg.method === DidChangeTextDocumentNotification.method) {\n      let params = msg.params as p.DidChangeTextDocumentParams;\n      let extName = path.extname(params.textDocument.uri);\n      if (extName === c.resExt || extName === c.resiExt) {\n        let changes = params.contentChanges;\n        if (changes.length === 0) {\n          // no change?\n        } else {\n          // we currently only support full changes\n          updateOpenedFile(\n            params.textDocument.uri as utils.FileURI,\n            changes[changes.length - 1].text,\n          );\n          await updateDiagnosticSyntax(\n            params.textDocument.uri as utils.FileURI,\n            changes[changes.length - 1].text,\n          );\n        }\n      }\n    } else if (msg.method === DidCloseTextDocumentNotification.method) {\n      let params = msg.params as p.DidCloseTextDocumentParams;\n      await closedFile(params.textDocument.uri as utils.FileURI);\n    } else if (msg.method === DidChangeConfigurationNotification.type.method) {\n      // Can't seem to get this notification to trigger, but if it does this will be here and ensure we're synced up at the server.\n      askForAllCurrentConfiguration();\n    }\n  } else if (p.Message.isRequest(msg)) {\n    // request message, aka client sent request and waits for our mandatory reply\n    if (!initialized && msg.method !== \"initialize\") {\n      let response: p.ResponseMessage = {\n        jsonrpc: c.jsonrpcVersion,\n        id: msg.id,\n        error: {\n          code: p.ErrorCodes.ServerNotInitialized,\n          message: \"Server not initialized.\",\n        },\n      };\n      send(response);\n    } else if (msg.method === \"initialize\") {\n      const serverVersion = await getServerVersion();\n      getLogger().info(\n        `Received initialize request from client. Server version: ${serverVersion}`,\n      );\n      // Save initial configuration, if present\n      let initParams = msg.params as InitializeParams;\n      for (const workspaceFolder of initParams.workspaceFolders || []) {\n        workspaceFolders.add(\n          utils.uriToNormalizedPath(workspaceFolder.uri as utils.FileURI),\n        );\n      }\n      let initialConfiguration = initParams.initializationOptions\n        ?.extensionConfiguration as extensionConfiguration | undefined;\n\n      if (initialConfiguration != null) {\n        applyUserConfiguration(initialConfiguration);\n      }\n\n      // These are static configuration options the client can set to enable certain\n      let extensionClientCapabilitiesFromClient = initParams\n        .initializationOptions?.extensionClientCapabilities as\n        | extensionClientCapabilities\n        | undefined;\n\n      if (extensionClientCapabilitiesFromClient != null) {\n        extensionClientCapabilities = extensionClientCapabilitiesFromClient;\n      }\n\n      extensionClientCapabilities.supportsSnippetSyntax = Boolean(\n        initParams.capabilities.textDocument?.completion?.completionItem\n          ?.snippetSupport,\n      );\n\n      // send the list of features we support\n      let result: p.InitializeResult = {\n        // This tells the client: \"hey, we support the following operations\".\n        // Example: we want to expose \"jump-to-definition\".\n        // By adding `definitionProvider: true`, the client will now send \"jump-to-definition\" requests.\n        capabilities: {\n          // TODO: incremental sync?\n          textDocumentSync: v.TextDocumentSyncKind.Full,\n          documentFormattingProvider: true,\n          hoverProvider: true,\n          definitionProvider: true,\n          typeDefinitionProvider: true,\n          referencesProvider: true,\n          codeActionProvider: true,\n          renameProvider: { prepareProvider: true },\n          documentSymbolProvider: true,\n          completionProvider: {\n            triggerCharacters: [\".\", \">\", \"@\", \"~\", '\"', \"=\", \"(\"],\n            resolveProvider: true,\n          },\n          semanticTokensProvider: {\n            legend: {\n              tokenTypes: [\n                \"operator\",\n                \"variable\",\n                \"type\",\n                \"modifier\", // emit jsx-tag < and > in <div> as modifier\n                \"namespace\",\n                \"enumMember\",\n                \"property\",\n                \"interface\", // emit jsxlowercase, div in <div> as interface\n              ],\n              tokenModifiers: [],\n            },\n            documentSelector: [{ scheme: \"file\", language: \"rescript\" }],\n            // TODO: Support range for full, and add delta support\n            full: true,\n          },\n          inlayHintProvider: config.extensionConfiguration.inlayHints?.enable,\n          codeLensProvider: config.extensionConfiguration.codeLens\n            ? {\n                workDoneProgress: false,\n              }\n            : undefined,\n          signatureHelpProvider: config.extensionConfiguration.signatureHelp\n            ?.enabled\n            ? {\n                triggerCharacters: [\"(\"],\n                retriggerCharacters: [\"=\", \",\"],\n              }\n            : undefined,\n          executeCommandProvider: {\n            commands: [\"rescript/dumpServerState\"],\n          },\n        },\n      };\n      let response: p.ResponseMessage = {\n        jsonrpc: c.jsonrpcVersion,\n        id: msg.id,\n        result: result,\n      };\n      initialized = true;\n\n      // Periodically pull configuration from the client.\n      pullConfigurationPeriodically = setInterval(() => {\n        askForAllCurrentConfiguration();\n      }, c.pullConfigurationInterval);\n\n      send(response);\n    } else if (msg.method === \"initialized\") {\n      // sent from client after initialize. Nothing to do for now\n      let response: p.ResponseMessage = {\n        jsonrpc: c.jsonrpcVersion,\n        id: msg.id,\n        result: null,\n      };\n      send(response);\n    } else if (msg.method === \"shutdown\") {\n      // https://microsoft.github.io/language-server-protocol/specification#shutdown\n      if (shutdownRequestAlreadyReceived) {\n        let response: p.ResponseMessage = {\n          jsonrpc: c.jsonrpcVersion,\n          id: msg.id,\n          error: {\n            code: p.ErrorCodes.InvalidRequest,\n            message: `Language server already received the shutdown request`,\n          },\n        };\n        send(response);\n      } else {\n        shutdownRequestAlreadyReceived = true;\n        // TODO: recheck logic around init/shutdown...\n\n        if (pullConfigurationPeriodically != null) {\n          clearInterval(pullConfigurationPeriodically);\n        }\n\n        // Kill all build watchers on shutdown\n        for (const [projectPath, projectState] of projectsFiles) {\n          if (projectState.bsbWatcherByEditor != null) {\n            getLogger().info(`Killing build watcher for ${projectPath}`);\n            utils.killBuildWatcher(\n              projectState.bsbWatcherByEditor,\n              projectState.buildRootPath ?? undefined,\n            );\n            projectState.bsbWatcherByEditor = null;\n            projectState.buildRootPath = null;\n          }\n        }\n\n        let response: p.ResponseMessage = {\n          jsonrpc: c.jsonrpcVersion,\n          id: msg.id,\n          result: null,\n        };\n        send(response);\n      }\n    } else if (msg.method === p.HoverRequest.method) {\n      send(await hover(msg));\n    } else if (msg.method === p.DefinitionRequest.method) {\n      send(await definition(msg));\n    } else if (msg.method === p.TypeDefinitionRequest.method) {\n      send(await typeDefinition(msg));\n    } else if (msg.method === p.ReferencesRequest.method) {\n      send(await references(msg));\n    } else if (msg.method === p.PrepareRenameRequest.method) {\n      send(await prepareRename(msg));\n    } else if (msg.method === p.RenameRequest.method) {\n      send(await rename(msg));\n    } else if (msg.method === p.DocumentSymbolRequest.method) {\n      send(await documentSymbol(msg));\n    } else if (msg.method === p.CompletionRequest.method) {\n      send(await completion(msg));\n    } else if (msg.method === p.CompletionResolveRequest.method) {\n      send(await completionResolve(msg));\n    } else if (msg.method === p.SemanticTokensRequest.method) {\n      send(await semanticTokens(msg));\n    } else if (msg.method === p.CodeActionRequest.method) {\n      send(await codeAction(msg));\n    } else if (msg.method === p.DocumentFormattingRequest.method) {\n      let responses = format(msg);\n      responses.forEach((response) => send(response));\n    } else if (msg.method === createInterfaceRequest.method) {\n      send(await createInterface(msg));\n    } else if (msg.method === openCompiledFileRequest.method) {\n      send(openCompiledFile(msg));\n    } else if (msg.method === startBuildRequest.method) {\n      send(await handleStartBuildRequest(msg));\n    } else if (msg.method === p.InlayHintRequest.method) {\n      let params = msg.params as InlayHintParams;\n      let extName = path.extname(params.textDocument.uri);\n      if (extName === c.resExt) {\n        send(await inlayHint(msg));\n      }\n    } else if (msg.method === p.CodeLensRequest.method) {\n      let params = msg.params as CodeLensParams;\n      let extName = path.extname(params.textDocument.uri);\n      if (extName === c.resExt) {\n        send(await codeLens(msg));\n      }\n    } else if (msg.method === p.SignatureHelpRequest.method) {\n      let params = msg.params as SignatureHelpParams;\n      let extName = path.extname(params.textDocument.uri);\n      if (extName === c.resExt) {\n        send(await signatureHelp(msg));\n      }\n    } else if (msg.method === p.ExecuteCommandRequest.method) {\n      // Standard LSP executeCommand - supports editor-agnostic command execution\n      const params = msg.params as p.ExecuteCommandParams;\n      if (params.command === \"rescript/dumpServerState\") {\n        send(await dumpServerState(msg));\n      } else {\n        let response: p.ResponseMessage = {\n          jsonrpc: c.jsonrpcVersion,\n          id: msg.id,\n          error: {\n            code: p.ErrorCodes.InvalidRequest,\n            message: `Unknown command: ${params.command}`,\n          },\n        };\n        send(response);\n      }\n    } else {\n      let response: p.ResponseMessage = {\n        jsonrpc: c.jsonrpcVersion,\n        id: msg.id,\n        error: {\n          code: p.ErrorCodes.InvalidRequest,\n          message: \"Unrecognized editor request.\",\n        },\n      };\n      send(response);\n    }\n  } else if (p.Message.isResponse(msg)) {\n    if (msg.id === c.configurationRequestId) {\n      if (msg.result != null) {\n        // This is a response from a request to get updated configuration. Note\n        // that it seems to return the configuration in a way that lets the\n        // current workspace settings override the user settings. This is good\n        // as we get started, but _might_ be problematic further down the line\n        // if we want to support having several projects open at the same time\n        // without their settings overriding eachother. Not a problem now though\n        // as we'll likely only have \"global\" settings starting out.\n        let [configuration] = msg.result as [\n          extensionConfiguration | null | undefined,\n        ];\n        if (configuration != null) {\n          applyUserConfiguration(configuration);\n        }\n      }\n    } else if (\n      msg.result != null &&\n      // @ts-ignore\n      msg.result.title != null &&\n      // @ts-ignore\n      msg.result.title === c.startBuildAction\n    ) {\n      let msg_ = msg.result as clientSentBuildAction;\n      // Normalize the path since JSON serialization loses the branded type\n      // The type says it's NormalizedPath, so we ensure it actually is\n      let projectRootPath = utils.normalizePath(msg_.projectRootPath);\n      if (projectRootPath == null) {\n        // Should never happen, but handle gracefully and log a warning\n        console.warn(\n          \"[ReScript Language Server] Failed to normalize projectRootPath from clientSentBuildAction:\",\n          msg_.projectRootPath,\n        );\n        return;\n      }\n      // TODO: sometime stale lock file dangling\n      // TODO: close watcher when lang-server shuts down. However, by Node's\n      // default, these subprocesses are automatically killed when this\n      // language-server process exits\n      let rescriptBinaryPath = await findRescriptBinary(projectRootPath);\n      if (rescriptBinaryPath != null) {\n        // Derive monorepo root from binary path for monorepo support\n        const monorepoRootPath =\n          utils.getMonorepoRootFromBinaryPath(rescriptBinaryPath);\n        // Note: projectRootPath here might be the monorepo root (buildRootPath from prompt),\n        // which may not have project state if the file was opened from a subpackage.\n        // Use createProjectStateIfMissing to handle this case.\n        await startBuildWatcher(projectRootPath, rescriptBinaryPath, {\n          createProjectStateIfMissing: true,\n          monorepoRootPath,\n        });\n      }\n    }\n  }\n}\n\n// Gate behind a debug setting potentially?\nonErrorReported((msg) => {\n  let params: p.ShowMessageParams = {\n    type: p.MessageType.Warning,\n    message: `ReScript tooling: Internal error. Something broke. Here's the error message that you can report if you want:\n\n${msg}\n\n(this message will only be reported once every 15 minutes)`,\n  };\n  let message: p.NotificationMessage = {\n    jsonrpc: c.jsonrpcVersion,\n    method: \"window/showMessage\",\n    params: params,\n  };\n  send(message);\n});\n"
  },
  {
    "path": "server/src/utils.ts",
    "content": "import * as childProcess from \"child_process\";\nimport * as p from \"vscode-languageserver-protocol\";\nimport * as path from \"path\";\nimport * as t from \"vscode-languageserver-types\";\nimport {\n  RequestMessage,\n  ResponseMessage,\n} from \"vscode-languageserver-protocol\";\nimport fs from \"fs\";\nimport fsAsync from \"fs/promises\";\nimport * as os from \"os\";\nimport semver from \"semver\";\nimport { fileURLToPath, pathToFileURL } from \"url\";\nimport { getLogger } from \"./logger\";\n\nimport {\n  findBinary as findSharedBinary,\n  getMonorepoRootFromBinaryPath as getMonorepoRootFromBinaryPathShared,\n  type BinaryName,\n} from \"../../shared/src/findBinary\";\nimport { findProjectRootOfFileInDir as findProjectRootOfFileInDirShared } from \"../../shared/src/projectRoots\";\nimport * as codeActions from \"./codeActions\";\nimport * as c from \"./constants\";\nimport * as lookup from \"./lookup\";\nimport { reportError } from \"./errorReporter\";\nimport config from \"./config\";\nimport { filesDiagnostics, projectsFiles, projectFiles } from \"./projectFiles\";\nimport { workspaceFolders } from \"./server\";\nimport { rewatchLockPartialPath, rescriptLockPartialPath } from \"./constants\";\nimport { findRescriptRuntimesInProject } from \"./find-runtime\";\n\n/**\n * Branded type for normalized file paths.\n *\n * All paths stored as keys in `projectsFiles` are normalized to ensure\n * consistent lookups and prevent path format mismatches (e.g., trailing\n * slashes, symlinks, relative vs absolute paths).\n *\n * Use `normalizePath()` to convert a regular path to a `NormalizedPath`.\n */\nexport type NormalizedPath = string & { __brand: \"NormalizedPath\" };\n\n/**\n * Branded type for file URIs (e.g., `file:///path/to/file.res`).\n *\n * This represents a URI as used in the Language Server Protocol.\n * Use `uriToNormalizedPath()` to convert a URI to a normalized file path.\n */\nexport type FileURI = string & { __brand: \"FileURI\" };\n\n/**\n * Normalizes a file path and returns it as a `NormalizedPath`.\n *\n * This function should be used whenever storing a path as a key in `projectsFiles`\n * or when comparing paths that need to match exactly.\n *\n * @param filePath - The path to normalize (can be null)\n * @returns The normalized path, or null if input was null\n */\nexport function normalizePath(filePath: string | null): NormalizedPath | null {\n  // `path.normalize` ensures we can assume string is now NormalizedPath\n  return filePath != null ? (path.normalize(filePath) as NormalizedPath) : null;\n}\n\n/**\n * Converts a file URI (e.g., `file:///path/to/file.res`) to a normalized file path.\n *\n * This is the preferred way to convert LSP DocumentUri values to file paths,\n * as it ensures the resulting path is normalized for consistent use throughout\n * the codebase.\n *\n * @param uri - The file URI to convert\n * @returns The normalized file path\n * @throws If the URI cannot be converted to a file path\n */\nexport function uriToNormalizedPath(uri: FileURI): NormalizedPath {\n  const filePath = fileURLToPath(uri);\n  // fileURLToPath always returns a string (throws on invalid URI), so we can directly normalize\n  return path.normalize(filePath) as NormalizedPath;\n}\n\nlet tempFilePrefix = \"rescript_format_file_\" + process.pid + \"_\";\nlet tempFileId = 0;\n\nexport let createFileInTempDir = (extension = \"\"): NormalizedPath => {\n  let tempFileName = tempFilePrefix + tempFileId + extension;\n  tempFileId = tempFileId + 1;\n  // `os.tmpdir` returns an absolute path, so `path.join` ensures we can assume string is now NormalizedPath\n  return path.join(os.tmpdir(), tempFileName) as NormalizedPath;\n};\n\nfunction findProjectRootOfFileInDir(\n  source: NormalizedPath,\n): NormalizedPath | null {\n  return normalizePath(findProjectRootOfFileInDirShared(source));\n}\n\n/**\n * Searches for a project root path in projectsFiles that contains the given file path.\n * Excludes exact matches - the source must be a file inside a project, not the project root itself.\n */\nfunction findProjectRootContainingFile(\n  source: NormalizedPath,\n): NormalizedPath | null {\n  let foundRootFromProjectFiles: NormalizedPath | null = null;\n  for (const rootPath of projectsFiles.keys()) {\n    // Both are normalized, so direct comparison works\n    // For files, we exclude exact matches (source !== rootPath)\n    if (source.startsWith(rootPath) && source !== rootPath) {\n      // Prefer the longest path (most nested)\n      if (\n        foundRootFromProjectFiles == null ||\n        rootPath.length > foundRootFromProjectFiles.length\n      ) {\n        foundRootFromProjectFiles = rootPath;\n      }\n    }\n  }\n\n  return foundRootFromProjectFiles;\n}\n\n/**\n * Searches for a project root path in projectsFiles that matches the given directory path.\n * Allows exact matches - the source can be the project root directory itself.\n */\nfunction findProjectRootMatchingDir(\n  source: NormalizedPath,\n): NormalizedPath | null {\n  let foundRootFromProjectFiles: NormalizedPath | null = null;\n  for (const rootPath of projectsFiles.keys()) {\n    // Both are normalized, so direct comparison works\n    // For directories, we allow exact matches\n    if (source.startsWith(rootPath)) {\n      // Prefer the longest path (most nested)\n      if (\n        foundRootFromProjectFiles == null ||\n        rootPath.length > foundRootFromProjectFiles.length\n      ) {\n        foundRootFromProjectFiles = rootPath;\n      }\n    }\n  }\n\n  return foundRootFromProjectFiles;\n}\n\n/**\n * Finds the project root for a given file path.\n * This is the main function used throughout the codebase for finding project roots.\n */\nexport function findProjectRootOfFile(\n  source: NormalizedPath,\n): NormalizedPath | null {\n  // First look in project files - exclude exact matches since we're looking for a file\n  let foundRoot = findProjectRootContainingFile(source);\n\n  if (foundRoot != null) {\n    return foundRoot;\n  }\n\n  // Fallback: search the filesystem\n  const foundPath = findProjectRootOfFileInDir(source);\n  return foundPath;\n}\n\n/**\n * Finds the project root for a given directory path.\n * This allows exact matches and is used for workspace/lockfile detection.\n */\nexport function findProjectRootOfDir(\n  source: NormalizedPath,\n): NormalizedPath | null {\n  // First look in project files - allow exact matches since we're looking for a directory\n  let foundRoot = findProjectRootMatchingDir(source);\n\n  if (foundRoot != null) {\n    return foundRoot;\n  }\n\n  // Fallback: search the filesystem\n  const foundPath = findProjectRootOfFileInDir(source);\n  return foundPath;\n}\n\n/**\n * Gets the project file for a given project root path.\n *\n * All keys in `projectsFiles` are normalized (see `openedFile` in server.ts).\n * This function accepts a normalized project root path (or null) and performs a direct lookup.\n * The path must already be normalized before calling this function.\n *\n * @param projectRootPath - The normalized project root path to look up (or null)\n * @returns The project file if found, null otherwise\n */\nexport let getProjectFile = (\n  projectRootPath: NormalizedPath | null,\n): projectFiles | null => {\n  if (projectRootPath == null) {\n    return null;\n  }\n  return projectsFiles.get(projectRootPath) ?? null;\n};\n\n// If ReScript < 12.0.0-alpha.13, then we want `{project_root}/node_modules/rescript/{c.platformDir}/{binary}`.\n// Otherwise, we want to dynamically import `{project_root}/node_modules/rescript` and from `binPaths` get the relevant binary.\n// We won't know which version is in the project root until we read and parse `{project_root}/node_modules/rescript/package.json`\nlet findBinary = async (\n  projectRootPath: NormalizedPath | null,\n  binary: BinaryName,\n): Promise<NormalizedPath | null> => {\n  const result = await findSharedBinary({\n    projectRootPath,\n    binary,\n    platformPath: config.extensionConfiguration.platformPath ?? null,\n  });\n  return normalizePath(result);\n};\n\nexport let findRescriptBinary = (projectRootPath: NormalizedPath | null) =>\n  findBinary(projectRootPath, \"rescript\");\n\nexport let getMonorepoRootFromBinaryPath = (\n  binaryPath: string | null,\n): NormalizedPath | null =>\n  normalizePath(getMonorepoRootFromBinaryPathShared(binaryPath));\n\nexport let findBscExeBinary = (projectRootPath: NormalizedPath | null) =>\n  findBinary(projectRootPath, \"bsc.exe\");\n\nexport let findEditorAnalysisBinary = (\n  projectRootPath: NormalizedPath | null,\n) => findBinary(projectRootPath, \"rescript-editor-analysis.exe\");\n\nexport let findRewatchBinary = (projectRootPath: NormalizedPath | null) =>\n  findBinary(projectRootPath, \"rewatch.exe\");\n\nexport let findRescriptExeBinary = (projectRootPath: NormalizedPath | null) =>\n  findBinary(projectRootPath, \"rescript.exe\");\n\ntype execResult<T = string> =\n  | {\n      kind: \"success\";\n      result: T;\n    }\n  | {\n      kind: \"error\";\n      error: string;\n    };\n\ntype formatCodeResult = execResult<string>;\n\nexport let formatCode = (\n  bscPath: p.DocumentUri | null,\n  filePath: string,\n  code: string,\n): formatCodeResult => {\n  let extension = path.extname(filePath);\n  let formatTempFileFullPath = createFileInTempDir(extension);\n  fs.writeFileSync(formatTempFileFullPath, code, {\n    encoding: \"utf-8\",\n  });\n  try {\n    // It will try to use the user formatting binary.\n    // If not, use the one we ship with the analysis binary in the extension itself.\n    if (bscPath != null) {\n      let result = childProcess.execFileSync(bscPath, [\n        \"-color\",\n        \"never\",\n        \"-format\",\n        formatTempFileFullPath,\n      ]);\n      return {\n        kind: \"success\",\n        result: result.toString(),\n      };\n    } else {\n      throw new Error(\"Could not find ReScript compiler for project.\");\n    }\n  } catch (e) {\n    return {\n      kind: \"error\",\n      error: e instanceof Error ? e.message : String(e),\n    };\n  } finally {\n    // async close is fine. We don't use this file name again\n    fs.unlink(formatTempFileFullPath, () => null);\n  }\n};\n\nexport async function findReScriptVersionForProjectRoot(\n  projectRootPath: NormalizedPath | null,\n): Promise<string | undefined> {\n  if (projectRootPath == null) {\n    return undefined;\n  }\n\n  const bscExe = await findBscExeBinary(projectRootPath);\n\n  if (bscExe == null) {\n    return undefined;\n  }\n\n  try {\n    let version = childProcess.execSync(`${bscExe} -v`);\n    return version\n      .toString()\n      .replace(/rescript/gi, \"\")\n      .trim();\n  } catch (e) {\n    return undefined;\n  }\n}\n\n// This is the path for the _builtin_ legacy analysis, that works for versions 11 and below.\nlet builtinBinaryPath: string | null = null;\nif (fs.existsSync(c.builtinAnalysisDevPath)) {\n  builtinBinaryPath = c.builtinAnalysisDevPath;\n} else if (fs.existsSync(c.builtinAnalysisProdPath)) {\n  builtinBinaryPath = c.builtinAnalysisProdPath;\n}\n\nexport let runAnalysisAfterSanityCheck = async (\n  filePath: NormalizedPath,\n  args: Array<any>,\n  projectRequired = false,\n) => {\n  let projectRootPath = findProjectRootOfFile(filePath);\n  if (projectRootPath == null && projectRequired) {\n    return null;\n  }\n  let rescriptVersion =\n    (projectRootPath\n      ? projectsFiles.get(projectRootPath)?.rescriptVersion\n      : null) ?? (await findReScriptVersionForProjectRoot(projectRootPath));\n\n  let binaryPath = builtinBinaryPath;\n\n  let project = projectRootPath ? projectsFiles.get(projectRootPath) : null;\n\n  /**\n   * All versions including 12.0.0-alpha.5 and above should use the analysis binary\n   * that now ships with the compiler. Previous versions use the legacy one we ship\n   * with the extension itself.\n   */\n  let shouldUseBuiltinAnalysis =\n    semver.valid(rescriptVersion) &&\n    semver.lt(rescriptVersion as string, \"12.0.0-alpha.5\");\n\n  if (!shouldUseBuiltinAnalysis && project != null) {\n    binaryPath = project.editorAnalysisLocation;\n  } else if (!shouldUseBuiltinAnalysis && project == null) {\n    // TODO: Warn user about broken state?\n    return null;\n  } else {\n    binaryPath = builtinBinaryPath;\n  }\n\n  let runtime: string | undefined = undefined;\n  if (semver.gt(rescriptVersion as string, \"12.0.0-rc.1\")) {\n    const runtimePath = await getRuntimePathFromProjectRoot(projectRootPath);\n    runtime = runtimePath ?? undefined;\n  }\n\n  let options: childProcess.ExecFileSyncOptions = {\n    cwd: projectRootPath || undefined,\n    maxBuffer: Infinity,\n    env: {\n      ...process.env,\n      RESCRIPT_VERSION: rescriptVersion,\n      RESCRIPT_INCREMENTAL_TYPECHECKING:\n        config.extensionConfiguration.incrementalTypechecking?.enable === true\n          ? \"true\"\n          : undefined,\n      RESCRIPT_PROJECT_CONFIG_CACHE:\n        config.extensionConfiguration.cache?.projectConfig?.enable === true\n          ? \"true\"\n          : undefined,\n      RESCRIPT_RUNTIME: runtime,\n    },\n  };\n\n  if (binaryPath == null) {\n    return null;\n  }\n\n  let stdout = \"\";\n  try {\n    stdout = childProcess.execFileSync(binaryPath, args, options).toString();\n    return JSON.parse(stdout);\n  } catch (e) {\n    console.error(e);\n    console.error(\"Original response: \", stdout);\n    console.error(\"Args: \", args);\n    // Element 0 is the action we're performing\n    reportError(String(args[0]), String(e));\n    return null;\n  }\n};\n\nexport let runAnalysisCommand = async (\n  filePath: NormalizedPath,\n  args: Array<any>,\n  msg: RequestMessage,\n  projectRequired = true,\n) => {\n  let result = await runAnalysisAfterSanityCheck(\n    filePath,\n    args,\n    projectRequired,\n  );\n  let response: ResponseMessage = {\n    jsonrpc: c.jsonrpcVersion,\n    id: msg.id,\n    result,\n  };\n  return response;\n};\n\nexport let getReferencesForPosition = async (\n  filePath: NormalizedPath,\n  position: p.Position,\n) =>\n  await runAnalysisAfterSanityCheck(filePath, [\n    \"references\",\n    filePath,\n    position.line,\n    position.character,\n  ]);\n\nexport const toCamelCase = (text: string): string => {\n  return text\n    .replace(/(?:^\\w|[A-Z]|\\b\\w)/g, (s: string) => s.toUpperCase())\n    .replace(/(\\s|-)+/g, \"\");\n};\n\n/**\n * Computes the workspace root path from a project root path by checking for rewatch/rescript lockfiles.\n * In a monorepo, this finds the parent project root that contains the workspace.\n * If a rewatch/rescript lockfile is found in the project root, it's a local package\n * in the workspace, so we return null (which will default to projectRootPath).\n */\nexport function computeWorkspaceRootPathFromLockfile(\n  projectRootPath: NormalizedPath | null,\n): NormalizedPath | null {\n  if (projectRootPath == null) {\n    return null;\n  }\n\n  const projectRewatchLockfiles = [\n    ...Array.from(workspaceFolders).map((w) =>\n      path.resolve(w, rewatchLockPartialPath),\n    ),\n    ...Array.from(workspaceFolders).map((w) =>\n      path.resolve(w, rescriptLockPartialPath),\n    ),\n    path.resolve(projectRootPath, rewatchLockPartialPath),\n    path.resolve(projectRootPath, rescriptLockPartialPath),\n  ];\n\n  const foundRewatchLockfileInProjectRoot = projectRewatchLockfiles.some(\n    (lockFile) => fs.existsSync(lockFile),\n  );\n\n  // if we find a rewatch.lock in the project root, it's a compilation of a local package\n  // in the workspace.\n  return !foundRewatchLockfileInProjectRoot\n    ? findProjectRootOfDir(projectRootPath)\n    : null;\n}\n\n// Shared cache: key is either workspace root path or project root path\nconst runtimePathCache = new Map<NormalizedPath, NormalizedPath | null>();\n\n/**\n * Gets the runtime path from a workspace root path.\n * This function is cached per workspace root path.\n */\nexport async function getRuntimePathFromWorkspaceRoot(\n  workspaceRootPath: NormalizedPath,\n): Promise<NormalizedPath | null> {\n  // Check cache first\n  if (runtimePathCache.has(workspaceRootPath)) {\n    return runtimePathCache.get(workspaceRootPath)!;\n  }\n\n  // Compute and cache\n  let rescriptRuntime: NormalizedPath | null = normalizePath(\n    config.extensionConfiguration.runtimePath ?? null,\n  );\n\n  if (rescriptRuntime !== null) {\n    runtimePathCache.set(workspaceRootPath, rescriptRuntime);\n    return rescriptRuntime;\n  }\n\n  const rescriptRuntimes =\n    await findRescriptRuntimesInProject(workspaceRootPath);\n\n  const result = rescriptRuntimes.at(0) ?? null;\n  runtimePathCache.set(workspaceRootPath, result);\n  return result;\n}\n\n/**\n * Gets the runtime path from a project root path.\n * Computes the workspace root path and then resolves the runtime.\n * This function is cached per project root path.\n */\nexport async function getRuntimePathFromProjectRoot(\n  projectRootPath: NormalizedPath | null,\n): Promise<NormalizedPath | null> {\n  if (projectRootPath == null) {\n    return null;\n  }\n\n  // Check cache first (keyed by projectRootPath)\n  if (runtimePathCache.has(projectRootPath)) {\n    return runtimePathCache.get(projectRootPath)!;\n  }\n\n  // Compute workspace root and resolve runtime\n  const workspaceRootPath: NormalizedPath =\n    computeWorkspaceRootPathFromLockfile(projectRootPath) ?? projectRootPath;\n\n  // Check cache again with workspace root (might have been cached from a previous call)\n  if (runtimePathCache.has(workspaceRootPath)) {\n    const result = runtimePathCache.get(workspaceRootPath)!;\n    // Cache it under projectRootPath too for faster lookup next time\n    runtimePathCache.set(projectRootPath, result);\n    return result;\n  }\n\n  // Compute and cache\n  const result = await getRuntimePathFromWorkspaceRoot(workspaceRootPath);\n  // Cache it under both keys\n  runtimePathCache.set(workspaceRootPath, result);\n  runtimePathCache.set(projectRootPath, result);\n  return result;\n}\n\n/**\n * Returns a snapshot of the runtime path cache as a plain object.\n * Useful for debugging and state dumps.\n */\nexport function getRuntimePathCacheSnapshot(): Record<string, string | null> {\n  return Object.fromEntries(runtimePathCache);\n}\n\nexport const getNamespaceNameFromConfigFile = (\n  projDir: NormalizedPath,\n): execResult<string> => {\n  let config = lookup.readConfig(projDir);\n  let result = \"\";\n\n  if (!config) {\n    return {\n      kind: \"error\",\n      error: \"Could not read ReScript config file\",\n    };\n  }\n\n  if (config.namespace === true) {\n    result = toCamelCase(config.name);\n  } else if (typeof config.namespace === \"string\") {\n    result = toCamelCase(config.namespace);\n  }\n\n  return {\n    kind: \"success\",\n    result,\n  };\n};\n\nexport let getCompiledFilePath = (\n  filePath: string,\n  projDir: NormalizedPath,\n): execResult<NormalizedPath> => {\n  let error: execResult<NormalizedPath> = {\n    kind: \"error\",\n    error: \"Could not read ReScript config file\",\n  };\n  let partialFilePath = filePath.split(projDir)[1];\n  let compiledPath = lookup.getFilenameFromBsconfig(projDir, partialFilePath);\n\n  if (!compiledPath) {\n    return error;\n  }\n\n  let result = compiledPath;\n\n  // If the file is not found, lookup a possible root bsconfig that may contain\n  // info about the possible location of the file.\n  if (!fs.existsSync(result)) {\n    let compiledPath = lookup.getFilenameFromRootBsconfig(\n      projDir,\n      partialFilePath,\n    );\n\n    if (!compiledPath) {\n      return error;\n    }\n\n    result = compiledPath;\n  }\n\n  // Normalize the path before returning\n  const normalizedResult = normalizePath(result)!;\n\n  return {\n    kind: \"success\",\n    result: normalizedResult,\n  };\n};\n\nexport let runBuildWatcherUsingValidBuildPath = (\n  buildPath: p.DocumentUri,\n  projectRootPath: p.DocumentUri,\n  rescriptVersion?: string | null,\n) => {\n  let cwdEnv = {\n    cwd: projectRootPath,\n  };\n  // ReScript >= 12.0.0 uses \"rescript watch\" instead of \"rescript build -w\"\n  let useWatchCommand =\n    rescriptVersion != null &&\n    semver.valid(rescriptVersion) != null &&\n    semver.gte(rescriptVersion, \"12.0.0\");\n  let args = useWatchCommand ? [\"watch\"] : [\"build\", \"-w\"];\n\n  getLogger().info(\n    `Running build watcher: ${buildPath} ${args.join(\" \")} in ${projectRootPath}`,\n  );\n\n  let proc: childProcess.ChildProcess;\n  if (process.platform === \"win32\") {\n    /*\n      - a node.js script in node_modules/.bin on windows is wrapped in a\n        batch script wrapper (there's also a regular binary of the same name on\n        windows, but that one's a shell script wrapper for cygwin). More info:\n        https://github.com/npm/cmd-shim/blob/c5118da34126e6639361fe9706a5ff07e726ed45/index.js#L1\n      - a batch script adds the suffix .cmd to the script\n      - you can't call batch scripts through the regular `execFile`:\n        https://nodejs.org/api/child_process.html#child_process_spawning_bat_and_cmd_files_on_windows\n      - So you have to use `exec` instead, and make sure you quote the path\n        (since the path might have spaces), which `execFile` would have done\n        for you under the hood\n    */\n    proc = childProcess.exec(`\"${buildPath}\".cmd ${args.join(\" \")}`, cwdEnv);\n  } else {\n    // Use spawn with detached:true so we can kill the entire process group later\n    // This ensures child processes (like native rescript binary) are also killed\n    // Use \"pipe\" for stdin instead of \"ignore\" because older ReScript versions (9.x, 10.x, 11.x)\n    // have a handler that exits when stdin closes: `process.stdin.on(\"close\", exitProcess)`\n    proc = childProcess.spawn(buildPath, args, {\n      ...cwdEnv,\n      detached: true,\n      stdio: [\"pipe\", \"pipe\", \"pipe\"],\n    });\n  }\n\n  proc.on(\"error\", (err) => {\n    getLogger().error(`Build watcher error: ${err.message}`);\n  });\n\n  proc.on(\"exit\", (code, signal) => {\n    getLogger().info(\n      `Build watcher exited with code ${code}, signal ${signal}`,\n    );\n  });\n\n  if (proc.stdout) {\n    proc.stdout.on(\"data\", (data) => {\n      getLogger().log(`[build stdout] ${data.toString().trim()}`);\n    });\n  }\n\n  if (proc.stderr) {\n    proc.stderr.on(\"data\", (data) => {\n      getLogger().log(`[build stderr] ${data.toString().trim()}`);\n    });\n  }\n\n  return proc;\n};\n\n/**\n * Kill a build watcher process and all its children, and clean up the lock file.\n * On Unix, kills the entire process group if the process was started with detached:true.\n * Also removes the lock file since the rescript compiler doesn't clean it up on SIGTERM.\n */\nexport let killBuildWatcher = (\n  proc: childProcess.ChildProcess,\n  buildRootPath?: string,\n): void => {\n  if (proc.pid == null) {\n    return;\n  }\n  try {\n    if (process.platform !== \"win32\") {\n      // Kill the entire process group (negative PID)\n      // This ensures child processes spawned by the JS wrapper are also killed\n      process.kill(-proc.pid, \"SIGTERM\");\n    } else {\n      proc.kill();\n    }\n  } catch (e) {\n    // Process might already be dead\n    getLogger().log(`Error killing build watcher: ${e}`);\n  }\n\n  // Clean up lock files since the rescript compiler doesn't remove them on SIGTERM\n  // ReScript >= 12 uses lib/rescript.lock, older versions use .bsb.lock\n  if (buildRootPath != null) {\n    const lockFiles = [\n      path.join(buildRootPath, \"lib\", \"rescript.lock\"),\n      path.join(buildRootPath, \".bsb.lock\"),\n    ];\n    for (const lockFilePath of lockFiles) {\n      try {\n        if (fs.existsSync(lockFilePath)) {\n          fs.unlinkSync(lockFilePath);\n          getLogger().log(`Removed lock file: ${lockFilePath}`);\n        }\n      } catch (e) {\n        getLogger().log(`Error removing lock file: ${e}`);\n      }\n    }\n  }\n};\n\n// Logic for parsing .compiler.log\n/* example .compiler.log content:\n\n#Start(1600519680823)\n\n  Syntax error!\n  /Users/chenglou/github/reason-react/src/test.res:1:8-2:3\n\n  1 │ let a =\n  2 │ let b =\n  3 │\n\n  This let-binding misses an expression\n\n\n  Warning number 8\n  /Users/chenglou/github/reason-react/src/test.res:3:5-8\n\n  1 │ let a = j`😀`\n  2 │ let b = `😀`\n  3 │ let None = None\n  4 │ let bla: int = \"\n  5 │   hi\n\n  You forgot to handle a possible case here, for example:\n  Some _\n\n\n  We've found a bug for you!\n  /Users/chenglou/github/reason-react/src/test.res:3:9\n\n  1 │ let a = 1\n  2 │ let b = \"hi\"\n  3 │ let a = b + 1\n\n  This has type: string\n  Somewhere wanted: int\n\n#Done(1600519680836)\n*/\n\n// parser helpers\nexport let pathToURI = (file: NormalizedPath): FileURI => {\n  // `pathToFileURL` ensures we can assume string is now FileURI\n  return pathToFileURL(file).toString() as FileURI;\n};\nlet parseFileAndRange = (fileAndRange: string) => {\n  // https://github.com/rescript-lang/rescript-compiler/blob/0a3f4bb32ca81e89cefd5a912b8795878836f883/jscomp/super_errors/super_location.ml#L15-L25\n  /* The file + location format can be:\n    a/b.res <- fallback, no location available (usually due to bad ppx...)\n    a/b.res:10:20\n    a/b.res:10:20-21     <- last number here is the end char of line 10\n    a/b.res:10:20-30:11\n  */\n  let regex = /(.+)\\:(\\d+)\\:(\\d+)(-(\\d+)(\\:(\\d+))?)?$/;\n  /*            ^^ file\n                      ^^^ start line\n                             ^^^ start character\n                                  ^ optional range\n                                    ^^^ end line or chararacter\n                                            ^^^ end character\n  */\n  // for the trimming, see https://github.com/rescript-lang/rescript-vscode/pull/71#issuecomment-769160576\n  let trimmedFileAndRange = fileAndRange.trim();\n  let match = trimmedFileAndRange.match(regex);\n  if (match === null) {\n    // no location! Though LSP insist that we provide at least a dummy location\n    const normalizedPath = normalizePath(trimmedFileAndRange)!;\n    return {\n      file: pathToURI(normalizedPath),\n      range: {\n        start: { line: 0, character: 0 },\n        end: { line: 0, character: 0 },\n      },\n    };\n  }\n\n  let [\n    _source,\n    file,\n    startLine,\n    startChar,\n    optionalEndGroup,\n    endLineOrChar,\n    _colonPlusEndCharOrNothing,\n    endCharOrNothing,\n  ] = match;\n\n  // language-server position is 0-based. Ours is 1-based. Convert\n  // also, our end character is inclusive. Language-server's is exclusive\n  let range;\n  if (optionalEndGroup == null) {\n    let start = {\n      line: parseInt(startLine) - 1,\n      character: parseInt(startChar),\n    };\n    range = {\n      start: start,\n      end: start,\n    };\n  } else {\n    let isSingleLine = endCharOrNothing == null;\n    let [endLine, endChar] = isSingleLine\n      ? [startLine, endLineOrChar]\n      : [endLineOrChar, endCharOrNothing];\n    range = {\n      start: {\n        line: parseInt(startLine) - 1,\n        character: parseInt(startChar) - 1,\n      },\n      end: { line: parseInt(endLine) - 1, character: parseInt(endChar) },\n    };\n  }\n  const normalizedFile = normalizePath(file)!;\n  return {\n    file: pathToURI(normalizedFile),\n    range,\n  };\n};\n\n// main parsing logic\ntype parsedCompilerLogResult = {\n  done: boolean;\n  result: filesDiagnostics;\n  codeActions: codeActions.filesCodeActions;\n  linesWithParseErrors: string[];\n};\nexport let parseCompilerLogOutput = async (\n  content: string,\n): Promise<parsedCompilerLogResult> => {\n  type parsedDiagnostic = {\n    code: number | undefined;\n    severity: t.DiagnosticSeverity;\n    tag: t.DiagnosticTag | undefined;\n    content: string[];\n  };\n  let parsedDiagnostics: parsedDiagnostic[] = [];\n  let linesWithParseErrors: string[] = [];\n  let lines = content.split(os.EOL);\n  let done = false;\n\n  for (let i = 0; i < lines.length; i++) {\n    let line = lines[i];\n    if (line.startsWith(\"  We've found a bug for you!\")) {\n      parsedDiagnostics.push({\n        code: undefined,\n        severity: t.DiagnosticSeverity.Error,\n        tag: undefined,\n        content: [],\n      });\n    } else if (\n      line.startsWith(\"FAILED: cannot make progress due to previous errors.\")\n    ) {\n      // skip\n    } else if (line.startsWith(\"FAILED: dependency cycle\")) {\n      // skip as we can't extract a filepath from this error message\n    } else if (line.startsWith(\"FAILED:\")) {\n      parsedDiagnostics.push({\n        code: undefined,\n        severity: t.DiagnosticSeverity.Error,\n        tag: undefined,\n        content: [line],\n      });\n    } else if (line.startsWith(\"Fatal error:\")) {\n      parsedDiagnostics.push({\n        code: undefined,\n        severity: t.DiagnosticSeverity.Error,\n        tag: undefined,\n        content: [line],\n      });\n    } else if (line.startsWith(\"  Warning number \")) {\n      let warningNumber = parseInt(line.slice(\"  Warning number \".length));\n      let tag: t.DiagnosticTag | undefined = undefined;\n      switch (warningNumber) {\n        case 11:\n        case 20:\n        case 26:\n        case 27:\n        case 32:\n        case 33:\n        case 34:\n        case 35:\n        case 36:\n        case 37:\n        case 38:\n        case 39:\n        case 60:\n        case 66:\n        case 67:\n        case 101:\n          tag = t.DiagnosticTag.Unnecessary;\n          break;\n        case 3:\n          tag = t.DiagnosticTag.Deprecated;\n          break;\n      }\n      let severity = line.includes(\"(configured as error)\")\n        ? t.DiagnosticSeverity.Error\n        : t.DiagnosticSeverity.Warning;\n      parsedDiagnostics.push({\n        code: Number.isNaN(warningNumber) ? undefined : warningNumber,\n        severity,\n        tag: tag,\n        content: [],\n      });\n    } else if (line.startsWith(\"  Syntax error!\")) {\n      parsedDiagnostics.push({\n        code: undefined,\n        severity: t.DiagnosticSeverity.Error,\n        tag: undefined,\n        content: [],\n      });\n    } else if (line.startsWith(\"  Warning genType\")) {\n      parsedDiagnostics.push({\n        code: undefined,\n        severity: t.DiagnosticSeverity.Error,\n        tag: undefined,\n        content: [],\n      });\n    } else if (line.startsWith(\"#Start(\")) {\n      // do nothing for now\n    } else if (line.startsWith(\"#Done(\")) {\n      done = true;\n    } else if (\n      line.startsWith(\"File \") &&\n      i + 1 < lines.length &&\n      lines[i + 1].startsWith(\"Warning \")\n    ) {\n      // OCaml warning: skip\n      i++;\n    } else if (\n      line.startsWith(\"File \") &&\n      i + 1 < lines.length &&\n      lines[i + 1].startsWith(\"Error: Syntax error\")\n    ) {\n      // OCaml Syntax Error\n      parsedDiagnostics.push({\n        code: undefined,\n        severity: t.DiagnosticSeverity.Error,\n        tag: undefined,\n        content: [lines[i], lines[i + 1]],\n      });\n      i++;\n    } else if (/^  +([0-9]+| +|\\.) (│|┆)/.test(line)) {\n      //         ^^ indent\n      //           ^^^^^^^^^^^^^^^ gutter\n      //                           ^^^^^   separator\n      // swallow code display. Examples:\n      //   10 │\n      //    . │\n      //      │\n      //   10 ┆\n    } else if (line.startsWith(\"  \")) {\n      // part of the actual diagnostics message\n      if (parsedDiagnostics[parsedDiagnostics.length - 1] == null) {\n        linesWithParseErrors.push(line);\n      } else {\n        parsedDiagnostics[parsedDiagnostics.length - 1].content.push(\n          line.slice(2),\n        );\n      }\n    } else if (line.trim() != \"\") {\n      // We'll assume that everything else is also part of the diagnostics too.\n      // Most of these should have been indented 2 spaces; sadly, some of them\n      // aren't (e.g. outcome printer printing badly, and certain old ocaml type\n      // messages not printing with indent). We used to get bug reports and fix\n      // the messages, but that strategy turned out too slow. One day we should\n      // revert to not having this branch...\n      if (parsedDiagnostics[parsedDiagnostics.length - 1] == null) {\n        linesWithParseErrors.push(line);\n      } else {\n        parsedDiagnostics[parsedDiagnostics.length - 1].content.push(line);\n      }\n    }\n  }\n\n  let result: filesDiagnostics = {};\n  let foundCodeActions: codeActions.filesCodeActions = {};\n\n  for (const parsedDiagnostic of parsedDiagnostics) {\n    let [fileAndRangeLine, ...diagnosticMessage] = parsedDiagnostic.content;\n    let { file, range } = parseFileAndRange(fileAndRangeLine);\n\n    if (result[file] == null) {\n      result[file] = [];\n    }\n\n    // remove start and end whitespaces/newlines\n    let message = diagnosticMessage.join(\"\\n\").trim();\n\n    // vscode.Diagnostic throws an error if `message` is a blank string\n    if (message != \"\") {\n      let diagnostic: p.Diagnostic = {\n        severity: parsedDiagnostic.severity,\n        tags: parsedDiagnostic.tag === undefined ? [] : [parsedDiagnostic.tag],\n        code: parsedDiagnostic.code,\n        range,\n        source: \"ReScript\",\n        message,\n      };\n\n      // Check for potential code actions\n      await codeActions.findCodeActionsInDiagnosticsMessage({\n        addFoundActionsHere: foundCodeActions,\n        diagnostic,\n        diagnosticMessage,\n        file,\n        range,\n      });\n\n      result[file].push(diagnostic);\n    }\n  }\n\n  return {\n    done,\n    result,\n    codeActions: foundCodeActions,\n    linesWithParseErrors,\n  };\n};\n\nexport let rangeContainsRange = (\n  range: p.Range,\n  otherRange: p.Range,\n): boolean => {\n  if (\n    otherRange.start.line < range.start.line ||\n    otherRange.end.line < range.start.line\n  ) {\n    return false;\n  }\n  if (\n    otherRange.start.line > range.end.line ||\n    otherRange.end.line > range.end.line\n  ) {\n    return false;\n  }\n  if (\n    otherRange.start.line === range.start.line &&\n    otherRange.start.character < range.start.character\n  ) {\n    return false;\n  }\n  if (\n    otherRange.end.line === range.end.line &&\n    otherRange.end.character > range.end.character\n  ) {\n    return false;\n  }\n  return true;\n};\n"
  },
  {
    "path": "server/tsconfig.json",
    "content": "{\n  \"compilerOptions\": {\n    \"target\": \"es2019\",\n    \"lib\": [\"ES2019\"],\n    \"module\": \"commonjs\",\n    \"moduleResolution\": \"node\",\n    \"sourceMap\": true,\n    \"strict\": true,\n    \"outDir\": \"out\",\n    \"rootDirs\": [\"src\", \"../shared/src\"],\n    \"esModuleInterop\": true\n  },\n  \"include\": [\"src\", \"../shared/src\"],\n  \"exclude\": [\"node_modules\"]\n}\n"
  },
  {
    "path": "shared/src/findBinary.ts",
    "content": "import * as fs from \"fs\";\nimport * as fsAsync from \"fs/promises\";\nimport * as path from \"path\";\nimport * as semver from \"semver\";\nimport * as url from \"url\";\n\nexport type BinaryName =\n  | \"bsc.exe\"\n  | \"rescript-editor-analysis.exe\"\n  | \"rescript-tools.exe\"\n  | \"rescript\"\n  | \"rewatch.exe\"\n  | \"rescript.exe\";\n\ntype FindBinaryOptions = {\n  projectRootPath: string | null;\n  binary: BinaryName;\n  platformPath?: string | null;\n};\n\nconst compilerInfoPartialPath = path.join(\"lib\", \"bs\", \"compiler-info.json\");\n// For arm64, try the arm64-specific directory first (e.g., darwinarm64),\n// then fall back to the generic platform directory (e.g., darwin) for older ReScript versions\nconst platformDirArm64 =\n  process.arch === \"arm64\" ? process.platform + process.arch : null;\nconst platformDirGeneric = process.platform;\n\nconst normalizePath = (filePath: string | null): string | null => {\n  return filePath != null ? path.normalize(filePath) : null;\n};\n\nconst findFilePathFromProjectRoot = (\n  directory: string | null,\n  filePartialPath: string,\n): string | null => {\n  if (directory == null) {\n    return null;\n  }\n\n  const filePath = path.join(directory, filePartialPath);\n  if (fs.existsSync(filePath)) {\n    return normalizePath(filePath);\n  }\n\n  const parentDirStr = path.dirname(directory);\n  if (parentDirStr === directory) {\n    return null;\n  }\n\n  return findFilePathFromProjectRoot(\n    normalizePath(parentDirStr),\n    filePartialPath,\n  );\n};\n\nexport const findBinary = async ({\n  projectRootPath,\n  binary,\n  platformPath,\n}: FindBinaryOptions): Promise<string | null> => {\n  if (platformPath != null) {\n    const result = path.join(platformPath, binary);\n    return normalizePath(result);\n  }\n\n  if (projectRootPath !== null) {\n    try {\n      const compilerInfo = path.resolve(\n        projectRootPath,\n        compilerInfoPartialPath,\n      );\n      const contents = await fsAsync.readFile(compilerInfo, \"utf8\");\n      const compileInfo = JSON.parse(contents);\n      if (compileInfo && compileInfo.bsc_path) {\n        const bscPath = compileInfo.bsc_path;\n        if (binary === \"bsc.exe\") {\n          return normalizePath(bscPath);\n        } else if (binary !== \"rescript\") {\n          // For native binaries (not \"rescript\" JS wrapper), use the bsc_path directory\n          const binaryPath = path.join(path.dirname(bscPath), binary);\n          return normalizePath(binaryPath);\n        }\n        // For \"rescript\", fall through to find the JS wrapper below\n      }\n    } catch {}\n  }\n\n  const rescriptDir = findFilePathFromProjectRoot(\n    projectRootPath,\n    path.join(\"node_modules\", \"rescript\"),\n  );\n  if (rescriptDir == null) {\n    return null;\n  }\n\n  let rescriptVersion = null;\n  let rescriptJSWrapperPath = null;\n  try {\n    const rescriptPackageJSONPath = path.join(rescriptDir, \"package.json\");\n    const rescriptPackageJSON = JSON.parse(\n      await fsAsync.readFile(rescriptPackageJSONPath, \"utf-8\"),\n    );\n    rescriptVersion = rescriptPackageJSON.version;\n    rescriptJSWrapperPath = rescriptPackageJSON.bin.rescript;\n  } catch {\n    return null;\n  }\n\n  let binaryPath: string | null = null;\n  if (binary === \"rescript\") {\n    binaryPath = path.join(rescriptDir, rescriptJSWrapperPath);\n  } else if (semver.gte(rescriptVersion, \"12.0.0-alpha.13\")) {\n    const target = `${process.platform}-${process.arch}`;\n    const targetPackagePath = path.join(\n      fs.realpathSync(rescriptDir),\n      \"..\",\n      `@rescript/${target}/bin.js`,\n    );\n    const { binPaths } = await import(url.fileURLToPath(targetPackagePath));\n\n    if (binary === \"bsc.exe\") {\n      binaryPath = binPaths.bsc_exe;\n    } else if (binary === \"rescript-editor-analysis.exe\") {\n      binaryPath = binPaths.rescript_editor_analysis_exe;\n    } else if (binary === \"rewatch.exe\") {\n      binaryPath = binPaths.rewatch_exe;\n    } else if (binary === \"rescript.exe\") {\n      binaryPath = binPaths.rescript_exe;\n    }\n  } else {\n    // For older ReScript versions (< 12.0.0-alpha.13), try arm64-specific directory first,\n    // then fall back to generic platform directory (older versions don't have arm64 directories)\n    if (platformDirArm64 != null) {\n      const arm64Path = path.join(rescriptDir, platformDirArm64, binary);\n      if (fs.existsSync(arm64Path)) {\n        binaryPath = arm64Path;\n      }\n    }\n    if (binaryPath == null) {\n      binaryPath = path.join(rescriptDir, platformDirGeneric, binary);\n    }\n  }\n\n  if (binaryPath != null && fs.existsSync(binaryPath)) {\n    return normalizePath(binaryPath);\n  }\n\n  return null;\n};\n\n/**\n * Derives the monorepo root directory from a binary path.\n * For a path like `/monorepo/node_modules/.bin/rescript`, returns `/monorepo`.\n * This is useful for monorepo support where the binary is in the monorepo root's\n * node_modules, but the project root (nearest rescript.json) might be a subpackage.\n */\nexport const getMonorepoRootFromBinaryPath = (\n  binaryPath: string | null,\n): string | null => {\n  if (binaryPath == null) {\n    return null;\n  }\n  const match = binaryPath.match(/^(.*?)[\\\\/]+node_modules[\\\\/]+/);\n  return match ? normalizePath(match[1]) : null;\n};\n"
  },
  {
    "path": "shared/src/projectRoots.ts",
    "content": "import * as fs from \"fs\";\nimport * as path from \"path\";\n\nexport const normalizePath = (filePath: string | null): string | null => {\n  return filePath != null ? path.normalize(filePath) : null;\n};\n\nexport const findProjectRootOfFileInDir = (source: string): string | null => {\n  const normalizedSource = normalizePath(source);\n  if (normalizedSource == null) {\n    return null;\n  }\n\n  const dir = normalizePath(path.dirname(normalizedSource));\n  if (dir == null) {\n    return null;\n  }\n\n  if (\n    fs.existsSync(path.join(dir, \"rescript.json\")) ||\n    fs.existsSync(path.join(dir, \"bsconfig.json\"))\n  ) {\n    return dir;\n  }\n\n  if (dir === normalizedSource) {\n    return null;\n  }\n\n  return findProjectRootOfFileInDir(dir);\n};\n\nexport const findProjectRootOfFile = (source: string): string | null => {\n  const normalizedSource = normalizePath(source);\n  if (normalizedSource == null) {\n    return null;\n  }\n\n  return findProjectRootOfFileInDir(normalizedSource);\n};\n"
  },
  {
    "path": "shared/tsconfig.json",
    "content": "{\n  \"compilerOptions\": {\n    \"module\": \"commonjs\",\n    \"target\": \"es2019\",\n    \"lib\": [\"ES2019\"],\n    \"outDir\": \"out\",\n    \"rootDir\": \"src\",\n    \"sourceMap\": true\n  },\n  \"include\": [\"src\"],\n  \"exclude\": [\"node_modules\"]\n}\n"
  },
  {
    "path": "snippets.json",
    "content": "{\n  \"Module\": {\n    \"prefix\": [\"module\"],\n    \"body\": [\"module ${1:Name} = {\", \"\\t${2:// Module contents}\", \"}\"]\n  },\n  \"Try\": {\n    \"prefix\": [\"try\"],\n    \"body\": [\n      \"try {\",\n      \"\\t${1:expression}\",\n      \"} catch {\",\n      \"| ${2:MyException} => ${3:expression}\",\n      \"}\"\n    ]\n  },\n  \"For Loop\": {\n    \"prefix\": [\"for\"],\n    \"body\": [\n      \"for ${1:i} in ${2:startValueInclusive} to ${3:endValueInclusive} {\",\n      \"\\t${4:Js.log(${1:i})}\",\n      \"}\"\n    ]\n  },\n  \"Reverse For Loop\": {\n    \"prefix\": [\"for\"],\n    \"body\": [\n      \"for ${1:i} in ${2:startValueInclusive} downto ${3:endValueInclusive} {\",\n      \"\\t${4:Js.log(${1:i})}\",\n      \"}\"\n    ]\n  },\n  \"Global External Object\": {\n    \"prefix\": [\"external\"],\n    \"body\": [\n      \"@val external ${1:setTimeout}: ${2:(unit => unit, int) => float} = \\\"${3:setTimeout}\\\"\"\n    ]\n  },\n  \"Global External Module\": {\n    \"prefix\": [\"external\"],\n    \"body\": [\n      \"@scope(\\\"${1:Math}\\\") @val external ${2:random}: ${3:unit => float} = \\\"${4:random}\\\"\"\n    ]\n  },\n  \"JS Module External\": {\n    \"prefix\": [\"external\"],\n    \"body\": [\n      \"@module(\\\"${1:path}\\\") external ${2:dirname}: ${3:string => string} = \\\"${4:dirname}\\\"\"\n    ]\n  },\n  \"JS Module Default External\": {\n    \"prefix\": [\"external\"],\n    \"body\": [\n      \"@module external ${1:leftPad}: ${2:(string, int) => string} = \\\"${3:leftPad}\\\"\"\n    ]\n  }\n}\n"
  },
  {
    "path": "tools/CHANGELOG.md",
    "content": "# Changelog\n\n> **Tags:**\n>\n> - :boom: [Breaking Change]\n> - :eyeglasses: [Spec Compliance]\n> - :rocket: [New Feature]\n> - :bug: [Bug Fix]\n> - :memo: [Documentation]\n> - :house: [Internal]\n> - :nail_care: [Polish]\n\n## master\n\n## 0.6.6\n\n### :bug: Bug Fix\n\n- Correct ReScript bindings for signature details. https://github.com/rescript-lang/rescript-vscode/pull/1046\n\n## 0.6.5\n\n#### :rocket: New Feature\n\n- Add additional signature information to doc json. https://github.com/rescript-lang/rescript-vscode/pull/1043\n\n## 0.6.4\n\n#### :rocket: New Feature\n\n- Add `moduletypeid` field for explicitly annotated module type. https://github.com/rescript-lang/rescript-vscode/pull/1019\n\n### :bug: Bug Fix\n\n- Print module structure with signature to module path. https://github.com/rescript-lang/rescript-vscode/pull/1018\n\n## 0.6.3\n\n#### :bug: Bug Fix\n\n- Make sure Linux binaries are statically linked.\n\n#### :nail_care: Polish\n\n- Reverse order of extracted embeds, so they're in the correct order.\n\n## 0.6.2\n\n#### :rocket: New Feature\n\n- Ship Linux ARM64 binaries.\n\n## 0.6.1\n\n#### :rocket: New Feature\n\n- Expose `getBinaryPath` JS function that you can import to get the binary to call for the current platform.\n\n## 0.6.0\n\n#### :rocket: New Feature\n\n- _internal_ Add experimental command for extracting (string) contents from extension points.\n\n## 0.5.0\n\n#### :rocket: New Feature\n\n- Add `source` property to type, value, module and module alias. https://github.com/rescript-lang/rescript-vscode/pull/900.\n\n#### :bug: Bug Fix\n\n- Print docstrings for nested submodules. https://github.com/rescript-lang/rescript-vscode/pull/897\n- Print `deprecated` field for module. https://github.com/rescript-lang/rescript-vscode/pull/897\n\n## 0.4.0\n\n#### :bug: Bug Fix\n\n- Support inline record fields in constructors. https://github.com/rescript-lang/rescript-vscode/pull/889\n- Fix docstrings for module alias. Get internal docstrings of module file. https://github.com/rescript-lang/rescript-vscode/pull/878\n- Fix extracted docs of types include escaped linebreaks in signature. https://github.com/rescript-lang/rescript-vscode/pull/891\n\n## 0.3.0\n\n#### :rocket: New Feature\n\n- Expose more `decode` functions. https://github.com/rescript-lang/rescript-vscode/pull/866\n\n#### :house: [Internal]\n\n- Add env var `FROM_COMPILER` to extract docstrings from compiler repo. https://github.com/rescript-lang/rescript-vscode/pull/868\n\n#### :bug: Bug Fix\n\n- Fix tagged variant for `Module` and add attr to interface files. https://github.com/rescript-lang/rescript-vscode/pull/866\n- Fix `rescript-tools --version` command. https://github.com/rescript-lang/rescript-vscode/pull/873\n- Fix output truncate when run `rescript-tools doc path/to/file.res` in a separate process. https://github.com/rescript-lang/rescript-vscode/pull/868\n"
  },
  {
    "path": "tools/README.md",
    "content": "# ReScript Tools\n\n## Install\n\n```sh\nnpm install --save-dev @rescript/tools\n```\n\n## CLI Usage\n\n```sh\nrescript-tools --help\n```\n\n### Generate documentation\n\nPrint JSON:\n\n```sh\nrescript-tools doc src/EntryPointLibFile.res\n```\n\nWrite JSON:\n\n```sh\nrescript-tools doc src/EntryPointLibFile.res > doc.json\n```\n\n### Reanalyze\n\n```sh\nrescript-tools reanalyze --help\n```\n\n## Decode JSON\n\nAdd to `bs-dev-dependencies`:\n\n```json\n\"bs-dev-dependencies\": [\"@rescript/tools\"]\n```\n\n```rescript\n// Read JSON file and parse with `Js.Json.parseExn`\njson->RescriptTools.Docgen.decodeFromJson\n```\n"
  },
  {
    "path": "tools/bin/dune",
    "content": "(env\n (static\n  (flags\n   (:standard -ccopt -static))))\n\n(executable\n (public_name rescript-tools)\n (package tools)\n (modes byte exe)\n ; The main module that will become the binary.\n (name main)\n (libraries tools)\n (flags\n  (:standard -w \"+6+26+27+32+33+39\")))\n"
  },
  {
    "path": "tools/bin/main.ml",
    "content": "let docHelp =\n  {|ReScript Tools\n\nOutput documentation to standard output\n\nUsage: rescript-tools doc <FILE>\n\nExample: rescript-tools doc ./path/to/EntryPointLib.res|}\n\nlet help =\n  {|ReScript Tools\n\nUsage: rescript-tools [command]\n\nCommands:\n\ndoc <file>            Generate documentation\nreanalyze             Reanalyze\n-v, --version         Print version\n-h, --help            Print help|}\n\nlet logAndExit = function\n  | Ok log ->\n    Printf.printf \"%s\\n\" log;\n    exit 0\n  | Error log ->\n    Printf.eprintf \"%s\\n\" log;\n    exit 1\n\nlet version = Version.version\n\nlet main () =\n  match Sys.argv |> Array.to_list |> List.tl with\n  | \"doc\" :: rest -> (\n    match rest with\n    | [\"-h\"] | [\"--help\"] -> logAndExit (Ok docHelp)\n    | [path] ->\n      (* NOTE: Internal use to generate docs from compiler *)\n      let () =\n        match Sys.getenv_opt \"FROM_COMPILER\" with\n        | Some \"true\" -> Analysis.Cfg.isDocGenFromCompiler := true\n        | _ -> ()\n      in\n      logAndExit (Tools.extractDocs ~entryPointFile:path ~debug:false)\n    | _ -> logAndExit (Error docHelp))\n  | \"reanalyze\" :: _ ->\n    let len = Array.length Sys.argv in\n    for i = 1 to len - 2 do\n      Sys.argv.(i) <- Sys.argv.(i + 1)\n    done;\n    Sys.argv.(len - 1) <- \"\";\n    Reanalyze.cli ()\n  | \"extract-embedded\" :: extPointNames :: filename :: _ ->\n    logAndExit\n      (Ok\n         (Tools.extractEmbedded\n            ~extensionPoints:(extPointNames |> String.split_on_char ',')\n            ~filename))\n  | [\"-h\"] | [\"--help\"] -> logAndExit (Ok help)\n  | [\"-v\"] | [\"--version\"] -> logAndExit (Ok version)\n  | _ -> logAndExit (Error help)\n\nlet () = main ()\n"
  },
  {
    "path": "tools/bin/version.ml",
    "content": "let version = \"0.6.6\"\n"
  },
  {
    "path": "tools/binaries/.gitkeep",
    "content": ""
  },
  {
    "path": "tools/npm/RescriptTools.res",
    "content": "module Docgen = Tools_Docgen\n\n/** Returns the full file system path to the `rescript-tools` binary for the current platform, side stepping the JS that wraps the CLI.\n \n You can use this when you're already running a JS process and want to avoid the overhead of starting another one.\n\n ## Examples\n ```rescript\n // Prints the current ReScript Tools version.\n let stringifiedJson = ChildProcess.execFileSync(RescriptTools.getBinaryPath(), [\"-v\"])\n ```\n */\n@module(\"./getBinaryPath.js\")\nexternal getBinaryPath: unit => string = \"getBinaryPath\"\n"
  },
  {
    "path": "tools/npm/Tools_Docgen.res",
    "content": "type field = {\n  name: string,\n  docstrings: array<string>,\n  signature: string,\n  optional: bool,\n  deprecated?: string,\n}\n\n@tag(\"kind\")\ntype constructorPayload = | @as(\"inlineRecord\") InlineRecord({fields: array<field>})\n\ntype constructor = {\n  name: string,\n  docstrings: array<string>,\n  signature: string,\n  deprecated?: string,\n  payload?: constructorPayload,\n}\n\ntype rec typeInSignature = {\n  path: string,\n  genericTypeParameters: array<typeInSignature>,\n}\n\ntype signatureDetais = {\n  parameters: array<typeInSignature>,\n  returnType: typeInSignature,\n}\n\n@tag(\"kind\")\ntype detail =\n  | @as(\"record\") Record({items: array<field>})\n  | @as(\"variant\") Variant({items: array<constructor>})\n  | @as(\"alias\") Signature({details:signatureDetais})\n\ntype source = {\n  filepath: string,\n  line: int,\n  col: int,\n}\n\n@tag(\"kind\")\ntype rec item =\n  | @as(\"value\")\n  Value({\n      id: string,\n      docstrings: array<string>,\n      signature: string,\n      name: string,\n      deprecated?: string,\n      source: source,\n      /** Additional documentation of signature, if available. */\n      detail?: detail,\n    })\n  | @as(\"type\")\n  Type({\n      id: string,\n      docstrings: array<string>,\n      signature: string,\n      name: string,\n      deprecated?: string,\n      source: source,\n      /** Additional documentation for constructors and record fields, if available. */\n      detail?: detail,\n    })\n  | @as(\"module\")\n  Module({\n      id: string,\n      docstrings: array<string>,\n      deprecated?: string,\n      name: string,\n      moduletypeid?: string,\n      source: source,\n      items: array<item>,\n    })\n  | @as(\"moduleType\")\n  ModuleType({\n      id: string,\n      docstrings: array<string>,\n      deprecated?: string,\n      name: string,\n      source: source,\n      items: array<item>,\n    })\n  | @as(\"moduleAlias\")\n  ModuleAlias({\n      id: string,\n      docstrings: array<string>,\n      name: string,\n      source: source,\n      items: array<item>,\n    })\n\ntype doc = {\n  name: string,\n  deprecated: option<string>,\n  docstrings: array<string>,\n  source: source,\n  items: array<item>,\n}\n\n/**\n`decodeFromJson(json)` parse JSON generated from `restool doc` command\n*/\nexternal decodeFromJson: Js.Json.t => doc = \"%identity\"\n"
  },
  {
    "path": "tools/npm/Tools_Docgen.resi",
    "content": "type field = {\n  name: string,\n  docstrings: array<string>,\n  signature: string,\n  optional: bool,\n  deprecated?: string,\n}\n\n@tag(\"kind\")\ntype constructorPayload = | @as(\"inlineRecord\") InlineRecord({fields: array<field>})\n\ntype constructor = {\n  name: string,\n  docstrings: array<string>,\n  signature: string,\n  deprecated?: string,\n  payload?: constructorPayload,\n}\n\ntype rec typeInSignature = {\n  path: string,\n  genericTypeParameters: array<typeInSignature>,\n}\n\ntype signatureDetais = {\n  parameters: array<typeInSignature>,\n  returnType: typeInSignature,\n}\n\n@tag(\"kind\")\ntype detail =\n  | @as(\"record\") Record({items: array<field>})\n  | @as(\"variant\") Variant({items: array<constructor>})\n  | @as(\"signature\") Signature({details:signatureDetais})\n\ntype source = {\n  filepath: string,\n  line: int,\n  col: int,\n}\n\n@tag(\"kind\")\ntype rec item =\n  | @as(\"value\")\n  Value({\n      id: string,\n      docstrings: array<string>,\n      signature: string,\n      name: string,\n      deprecated?: string,\n      source: source,\n      /** Additional documentation of signature, if available. */\n      detail?: detail,\n    })\n  | @as(\"type\")\n  Type({\n      id: string,\n      docstrings: array<string>,\n      signature: string,\n      name: string,\n      deprecated?: string,\n      source: source,\n      /** Additional documentation for constructors and record fields, if available. */\n      detail?: detail,\n    })\n  | @as(\"module\")\n  Module({\n      id: string,\n      docstrings: array<string>,\n      deprecated?: string,\n      name: string,\n      moduletypeid?: string,\n      source: source,\n      items: array<item>,\n    })\n  | @as(\"moduleType\")\n  ModuleType({\n      id: string,\n      docstrings: array<string>,\n      deprecated?: string,\n      name: string,\n      source: source,\n      items: array<item>,\n    })\n  | @as(\"moduleAlias\")\n  ModuleAlias({\n      id: string,\n      docstrings: array<string>,\n      name: string,\n      source: source,\n      items: array<item>,\n    })\n\ntype doc = {\n  name: string,\n  deprecated: option<string>,\n  docstrings: array<string>,\n  source: source,\n  items: array<item>,\n}\n\nlet decodeFromJson: Js.Json.t => doc\n"
  },
  {
    "path": "tools/npm/cli.js",
    "content": "#!/usr/bin/env node\n//@ts-check\n\"use strict\";\n\nconst child_process = require(\"child_process\");\nconst { getBinaryPath } = require(\"./getBinaryPath\");\n\nconst args = process.argv.slice(2);\n\nconst spawn = child_process.spawnSync(getBinaryPath(), args, {\n  stdio: \"inherit\",\n});\n\nif (spawn.status != null) {\n  process.exit(spawn.status);\n}\n"
  },
  {
    "path": "tools/npm/getBinaryPath.js",
    "content": "const path = require(\"path\");\n\nfunction getBinaryPath() {\n  const platformArch =\n    process.arch === \"x64\" ? process.platform : process.platform + process.arch;\n\n  const binPath = path.join(\n    __dirname,\n    \"..\",\n    \"binaries\",\n    platformArch,\n    \"rescript-tools.exe\",\n  );\n  return binPath;\n}\n\nmodule.exports = {\n  getBinaryPath,\n};\n"
  },
  {
    "path": "tools/package.json",
    "content": "{\n  \"name\": \"@rescript/tools\",\n  \"description\": \"ReScript Tools\",\n  \"version\": \"0.6.6\",\n  \"author\": \"ReScript Team\",\n  \"license\": \"MIT\",\n  \"bin\": {\n    \"rescript-tools\": \"npm/cli.js\"\n  },\n  \"keywords\": [\n    \"ReScript\",\n    \"Tools\",\n    \"Docgen\"\n  ],\n  \"files\": [\n    \"npm/cli.js\",\n    \"npm/getBinaryPath.js\",\n    \"npm/*.res\",\n    \"npm/*.resi\",\n    \"binaries\",\n    \"rescript.json\",\n    \"README.md\"\n  ],\n  \"engines\": {\n    \"node\": \"*\"\n  },\n  \"homepage\": \"https://github.com/rescript-lang/rescript-vscode/tools/README.md\",\n  \"repository\": {\n    \"type\": \"git\",\n    \"url\": \"https://github.com/rescript-lang/rescript-vscode\",\n    \"directory\": \"tools\"\n  },\n  \"bugs\": {\n    \"url\": \"https://github.com/rescript-lang/rescript-vscode/issues\"\n  },\n  \"scripts\": {\n    \"build\": \"rescript build\"\n  },\n  \"dependencies\": {\n    \"rescript\": \"^11.0.0-rc.7\"\n  }\n}\n"
  },
  {
    "path": "tools/rescript.json",
    "content": "{\n  \"name\": \"@rescript/tools\",\n  \"version\": \"0.6.6\",\n  \"sources\": [\n    {\n      \"dir\": \"npm\"\n    }\n  ],\n  \"suffix\": \".bs.js\",\n  \"package-specs\": {\n    \"module\": \"commonjs\",\n    \"in-source\": false\n  }\n}\n"
  },
  {
    "path": "tools/src/dune",
    "content": "(library\n (name tools)\n (flags\n  (-w \"+6+26+27+32+33+39\"))\n (libraries analysis))\n"
  },
  {
    "path": "tools/src/tools.ml",
    "content": "open Analysis\n\ntype fieldDoc = {\n  fieldName: string;\n  docstrings: string list;\n  signature: string;\n  optional: bool;\n  deprecated: string option;\n}\n\ntype constructorPayload = InlineRecord of {fieldDocs: fieldDoc list}\n\ntype constructorDoc = {\n  constructorName: string;\n  docstrings: string list;\n  signature: string;\n  deprecated: string option;\n  items: constructorPayload option;\n}\n\ntype typeDoc = {path: string; genericParameters: typeDoc list}\ntype valueSignature = {parameters: typeDoc list; returnType: typeDoc}\n\ntype source = {filepath: string; line: int; col: int}\n\ntype docItemDetail =\n  | Record of {fieldDocs: fieldDoc list}\n  | Variant of {constructorDocs: constructorDoc list}\n  | Signature of valueSignature\n\ntype docItem =\n  | Value of {\n      id: string;\n      docstring: string list;\n      signature: string;\n      name: string;\n      deprecated: string option;\n      detail: docItemDetail option;\n      source: source;\n    }\n  | Type of {\n      id: string;\n      docstring: string list;\n      signature: string;\n      name: string;\n      deprecated: string option;\n      detail: docItemDetail option;\n      source: source;\n          (** Additional documentation for constructors and record fields, if\n              available. *)\n    }\n  | Module of docsForModule\n  | ModuleType of {\n      id: string;\n      docstring: string list;\n      deprecated: string option;\n      name: string;\n      source: source;\n      items: docItem list;\n    }\n  | ModuleAlias of {\n      id: string;\n      docstring: string list;\n      name: string;\n      source: source;\n      items: docItem list;\n    }\nand docsForModule = {\n  id: string;\n  docstring: string list;\n  deprecated: string option;\n  name: string;\n  moduletypeid: string option;\n  source: source;\n  items: docItem list;\n}\n\nlet stringifyDocstrings docstrings =\n  let open Protocol in\n  docstrings\n  |> List.map (fun docstring -> docstring |> String.trim |> wrapInQuotes)\n  |> array\n\nlet stringifyFieldDoc ~indentation (fieldDoc : fieldDoc) =\n  let open Protocol in\n  stringifyObject ~indentation:(indentation + 1)\n    [\n      (\"name\", Some (wrapInQuotes fieldDoc.fieldName));\n      ( \"deprecated\",\n        match fieldDoc.deprecated with\n        | Some d -> Some (wrapInQuotes d)\n        | None -> None );\n      (\"optional\", Some (string_of_bool fieldDoc.optional));\n      (\"docstrings\", Some (stringifyDocstrings fieldDoc.docstrings));\n      (\"signature\", Some (wrapInQuotes fieldDoc.signature));\n    ]\n\nlet stringifyConstructorPayload ~indentation\n    (constructorPayload : constructorPayload) =\n  let open Protocol in\n  match constructorPayload with\n  | InlineRecord {fieldDocs} ->\n    stringifyObject ~indentation:(indentation + 1)\n      [\n        (\"kind\", Some (wrapInQuotes \"inlineRecord\"));\n        ( \"fields\",\n          Some\n            (fieldDocs\n            |> List.map (stringifyFieldDoc ~indentation:(indentation + 1))\n            |> array) );\n      ]\n\nlet rec stringifyTypeDoc ~indentation (td : typeDoc) : string =\n  let open Protocol in\n  let ps =\n    match td.genericParameters with\n    | [] -> None\n    | ts ->\n      ts |> List.map (stringifyTypeDoc ~indentation:(indentation + 1))\n      |> fun ts -> Some (array ts)\n  in\n\n  stringifyObject ~indentation:(indentation + 1)\n    [(\"path\", Some (wrapInQuotes td.path)); (\"genericTypeParameters\", ps)]\n\nlet stringifyDetail ?(indentation = 0) (detail : docItemDetail) =\n  let open Protocol in\n  match detail with\n  | Record {fieldDocs} ->\n    stringifyObject ~startOnNewline:true ~indentation\n      [\n        (\"kind\", Some (wrapInQuotes \"record\"));\n        ( \"items\",\n          Some (fieldDocs |> List.map (stringifyFieldDoc ~indentation) |> array)\n        );\n      ]\n  | Variant {constructorDocs} ->\n    stringifyObject ~startOnNewline:true ~indentation\n      [\n        (\"kind\", Some (wrapInQuotes \"variant\"));\n        ( \"items\",\n          Some\n            (constructorDocs\n            |> List.map (fun constructorDoc ->\n                   stringifyObject ~startOnNewline:true\n                     ~indentation:(indentation + 1)\n                     [\n                       ( \"name\",\n                         Some (wrapInQuotes constructorDoc.constructorName) );\n                       ( \"deprecated\",\n                         match constructorDoc.deprecated with\n                         | Some d -> Some (wrapInQuotes d)\n                         | None -> None );\n                       ( \"docstrings\",\n                         Some (stringifyDocstrings constructorDoc.docstrings) );\n                       ( \"signature\",\n                         Some (wrapInQuotes constructorDoc.signature) );\n                       ( \"payload\",\n                         match constructorDoc.items with\n                         | None -> None\n                         | Some constructorPayload ->\n                           Some\n                             (stringifyConstructorPayload\n                                ~indentation:(indentation + 1)\n                                constructorPayload) );\n                     ])\n            |> array) );\n      ]\n  | Signature {parameters; returnType} ->\n    let ps =\n      match parameters with\n      | [] -> None\n      | ps ->\n        ps |> List.map (stringifyTypeDoc ~indentation:(indentation + 1))\n        |> fun ps -> Some (array ps)\n    in\n    stringifyObject ~startOnNewline:true ~indentation\n      [\n        (\"kind\", Some (wrapInQuotes \"signature\"));\n        ( \"details\",\n          Some\n            (stringifyObject ~startOnNewline:false ~indentation\n               [\n                 (\"parameters\", ps);\n                 (\"returnType\", Some (stringifyTypeDoc ~indentation returnType));\n               ]) );\n      ]\n\nlet stringifySource ~indentation source =\n  let open Protocol in\n  stringifyObject ~startOnNewline:false ~indentation\n    [\n      (\"filepath\", Some (source.filepath |> wrapInQuotes));\n      (\"line\", Some (source.line |> string_of_int));\n      (\"col\", Some (source.col |> string_of_int));\n    ]\n\nlet rec stringifyDocItem ?(indentation = 0) ~originalEnv (item : docItem) =\n  let open Protocol in\n  match item with\n  | Value {id; docstring; signature; name; deprecated; source; detail} ->\n    stringifyObject ~startOnNewline:true ~indentation\n      [\n        (\"id\", Some (wrapInQuotes id));\n        (\"kind\", Some (wrapInQuotes \"value\"));\n        (\"name\", Some (name |> wrapInQuotes));\n        ( \"deprecated\",\n          match deprecated with\n          | Some d -> Some (wrapInQuotes d)\n          | None -> None );\n        (\"signature\", Some (signature |> String.trim |> wrapInQuotes));\n        (\"docstrings\", Some (stringifyDocstrings docstring));\n        (\"source\", Some (stringifySource ~indentation:(indentation + 1) source));\n        ( \"detail\",\n          match detail with\n          | None -> None\n          | Some detail ->\n            Some (stringifyDetail ~indentation:(indentation + 1) detail) );\n      ]\n  | Type {id; docstring; signature; name; deprecated; detail; source} ->\n    stringifyObject ~startOnNewline:true ~indentation\n      [\n        (\"id\", Some (wrapInQuotes id));\n        (\"kind\", Some (wrapInQuotes \"type\"));\n        (\"name\", Some (name |> wrapInQuotes));\n        ( \"deprecated\",\n          match deprecated with\n          | Some d -> Some (wrapInQuotes d)\n          | None -> None );\n        (\"signature\", Some (signature |> wrapInQuotes));\n        (\"docstrings\", Some (stringifyDocstrings docstring));\n        (\"source\", Some (stringifySource ~indentation:(indentation + 1) source));\n        ( \"detail\",\n          match detail with\n          | None -> None\n          | Some detail ->\n            Some (stringifyDetail ~indentation:(indentation + 1) detail) );\n      ]\n  | Module m ->\n    stringifyObject ~startOnNewline:true ~indentation\n      [\n        (\"id\", Some (wrapInQuotes m.id));\n        (\"name\", Some (wrapInQuotes m.name));\n        (\"kind\", Some (wrapInQuotes \"module\"));\n        ( \"deprecated\",\n          match m.deprecated with\n          | Some d -> Some (wrapInQuotes d)\n          | None -> None );\n        ( \"moduletypeid\",\n          match m.moduletypeid with\n          | Some path -> Some (wrapInQuotes path)\n          | None -> None );\n        (\"docstrings\", Some (stringifyDocstrings m.docstring));\n        ( \"source\",\n          Some (stringifySource ~indentation:(indentation + 1) m.source) );\n        ( \"items\",\n          Some\n            (m.items\n            |> List.map\n                 (stringifyDocItem ~originalEnv ~indentation:(indentation + 1))\n            |> array) );\n      ]\n  | ModuleType m ->\n    stringifyObject ~startOnNewline:true ~indentation\n      [\n        (\"id\", Some (wrapInQuotes m.id));\n        (\"name\", Some (wrapInQuotes m.name));\n        (\"kind\", Some (wrapInQuotes \"moduleType\"));\n        ( \"deprecated\",\n          match m.deprecated with\n          | Some d -> Some (wrapInQuotes d)\n          | None -> None );\n        (\"docstrings\", Some (stringifyDocstrings m.docstring));\n        ( \"source\",\n          Some (stringifySource ~indentation:(indentation + 1) m.source) );\n        ( \"items\",\n          Some\n            (m.items\n            |> List.map\n                 (stringifyDocItem ~originalEnv ~indentation:(indentation + 1))\n            |> array) );\n      ]\n  | ModuleAlias m ->\n    stringifyObject ~startOnNewline:true ~indentation\n      [\n        (\"id\", Some (wrapInQuotes m.id));\n        (\"kind\", Some (wrapInQuotes \"moduleAlias\"));\n        (\"name\", Some (wrapInQuotes m.name));\n        (\"docstrings\", Some (stringifyDocstrings m.docstring));\n        ( \"source\",\n          Some (stringifySource ~indentation:(indentation + 1) m.source) );\n        ( \"items\",\n          Some\n            (m.items\n            |> List.map\n                 (stringifyDocItem ~originalEnv ~indentation:(indentation + 1))\n            |> array) );\n      ]\n\nand stringifyDocsForModule ?(indentation = 0) ~originalEnv (d : docsForModule) =\n  let open Protocol in\n  stringifyObject ~startOnNewline:true ~indentation\n    [\n      (\"name\", Some (wrapInQuotes d.name));\n      ( \"deprecated\",\n        match d.deprecated with\n        | Some d -> Some (wrapInQuotes d)\n        | None -> None );\n      (\"docstrings\", Some (stringifyDocstrings d.docstring));\n      (\"source\", Some (stringifySource ~indentation:(indentation + 1) d.source));\n      ( \"items\",\n        Some\n          (d.items\n          |> List.map\n               (stringifyDocItem ~originalEnv ~indentation:(indentation + 1))\n          |> array) );\n    ]\n\nlet fieldToFieldDoc (field : SharedTypes.field) : fieldDoc =\n  {\n    fieldName = field.fname.txt;\n    docstrings = field.docstring;\n    optional = field.optional;\n    signature = Shared.typeToString field.typ;\n    deprecated = field.deprecated;\n  }\n\nlet typeDetail typ ~env ~full =\n  let open SharedTypes in\n  match TypeUtils.extractTypeFromResolvedType ~env ~full typ with\n  | Some (Trecord {fields}) ->\n    Some (Record {fieldDocs = fields |> List.map fieldToFieldDoc})\n  | Some (Tvariant {constructors}) ->\n    Some\n      (Variant\n         {\n           constructorDocs =\n             constructors\n             |> List.map (fun (c : Constructor.t) ->\n                    {\n                      constructorName = c.cname.txt;\n                      docstrings = c.docstring;\n                      signature = CompletionBackEnd.showConstructor c;\n                      deprecated = c.deprecated;\n                      items =\n                        (match c.args with\n                        | InlineRecord fields ->\n                          Some\n                            (InlineRecord\n                               {fieldDocs = fields |> List.map fieldToFieldDoc})\n                        | _ -> None);\n                    });\n         })\n  | _ -> None\n\n(* split a list into two parts all the items except the last one and the last item *)\nlet splitLast l =\n  let rec splitLast' acc = function\n    | [] -> failwith \"splitLast: empty list\"\n    | [x] -> (List.rev acc, x)\n    | x :: xs -> splitLast' (x :: acc) xs\n  in\n  splitLast' [] l\n\nlet path_to_string path =\n  let buf = Buffer.create 64 in\n  let rec aux = function\n    | Path.Pident id -> Buffer.add_string buf (Ident.name id)\n    | Path.Pdot (p, s, _) ->\n      aux p;\n      Buffer.add_char buf '.';\n      Buffer.add_string buf s\n    | Path.Papply (p1, p2) ->\n      aux p1;\n      Buffer.add_char buf '(';\n      aux p2;\n      Buffer.add_char buf ')'\n  in\n  aux path;\n  Buffer.contents buf\n\nlet valueDetail (typ : Types.type_expr) =\n  let rec collectSignatureTypes (typ_desc : Types.type_desc) =\n    match typ_desc with\n    | Tlink t | Tsubst t | Tpoly (t, []) -> collectSignatureTypes t.desc\n    | Tconstr (Path.Pident {name = \"function$\"}, [t; _], _) ->\n      collectSignatureTypes t.desc\n    | Tconstr (path, ts, _) -> (\n      let p = path_to_string path in\n      match ts with\n      | [] -> [{path = p; genericParameters = []}]\n      | ts ->\n        let ts =\n          ts\n          |> List.concat_map (fun (t : Types.type_expr) ->\n                 collectSignatureTypes t.desc)\n        in\n        [{path = p; genericParameters = ts}])\n    | Tarrow (_, t1, t2, _) ->\n      collectSignatureTypes t1.desc @ collectSignatureTypes t2.desc\n    | Tvar None -> [{path = \"_\"; genericParameters = []}]\n    | _ -> []\n  in\n  match collectSignatureTypes typ.desc with\n  | [] -> None\n  | ts ->\n    let parameters, returnType = splitLast ts in\n    Some (Signature {parameters; returnType})\n\nlet makeId modulePath ~identifier =\n  identifier :: modulePath |> List.rev |> SharedTypes.ident\n\nlet getSource ~rootPath ({loc_start} : Location.t) =\n  let line, col = Pos.ofLexing loc_start in\n  let filepath =\n    Files.relpath rootPath loc_start.pos_fname\n    |> Files.split Filename.dir_sep\n    |> String.concat \"/\"\n  in\n  {filepath; line = line + 1; col = col + 1}\n\nlet extractDocs ~entryPointFile ~debug =\n  let path =\n    match Filename.is_relative entryPointFile with\n    | true -> Unix.realpath entryPointFile\n    | false -> entryPointFile\n  in\n  if debug then Printf.printf \"extracting docs for %s\\n\" path;\n  let result =\n    match\n      FindFiles.isImplementation path = false\n      && FindFiles.isInterface path = false\n    with\n    | false -> (\n      let path =\n        if FindFiles.isImplementation path then\n          let pathAsResi =\n            (path |> Filename.dirname) ^ \"/\"\n            ^ (path |> Filename.basename |> Filename.chop_extension)\n            ^ \".resi\"\n          in\n          if Sys.file_exists pathAsResi then (\n            if debug then\n              Printf.printf \"preferring found resi file for impl: %s\\n\"\n                pathAsResi;\n            pathAsResi)\n          else path\n        else path\n      in\n      match Cmt.loadFullCmtFromPath ~path with\n      | None ->\n        Error\n          (Printf.sprintf\n             \"error: failed to generate doc for %s, try to build the project\"\n             path)\n      | Some full ->\n        let file = full.file in\n        let structure = file.structure in\n        let rootPath = full.package.rootPath in\n        let open SharedTypes in\n        let env = QueryEnv.fromFile file in\n        let rec extractDocsForModule ?(modulePath = [env.file.moduleName])\n            (structure : Module.structure) =\n          {\n            id = modulePath |> List.rev |> ident;\n            docstring = structure.docstring |> List.map String.trim;\n            name = structure.name;\n            moduletypeid = None;\n            deprecated = structure.deprecated;\n            source =\n              {\n                filepath =\n                  (match rootPath = \".\" with\n                  | true -> file.uri |> Uri.toPath\n                  | false ->\n                    Files.relpath rootPath (file.uri |> Uri.toPath)\n                    |> Files.split Filename.dir_sep\n                    |> String.concat \"/\");\n                line = 1;\n                col = 1;\n              };\n            items =\n              structure.items\n              |> List.filter_map (fun (item : Module.item) ->\n                     let source = getSource ~rootPath item.loc in\n                     match item.kind with\n                     | Value typ ->\n                       Some\n                         (Value\n                            {\n                              id = modulePath |> makeId ~identifier:item.name;\n                              docstring = item.docstring |> List.map String.trim;\n                              signature =\n                                \"let \" ^ item.name ^ \": \"\n                                ^ Shared.typeToString typ;\n                              name = item.name;\n                              deprecated = item.deprecated;\n                              detail = valueDetail typ;\n                              source;\n                            })\n                     | Type (typ, _) ->\n                       Some\n                         (Type\n                            {\n                              id = modulePath |> makeId ~identifier:item.name;\n                              docstring = item.docstring |> List.map String.trim;\n                              signature =\n                                typ.decl |> Shared.declToString item.name;\n                              name = item.name;\n                              deprecated = item.deprecated;\n                              detail = typeDetail typ ~full ~env;\n                              source;\n                            })\n                     | Module {type_ = Ident p; isModuleType = false} ->\n                       (* module Whatever = OtherModule *)\n                       let aliasToModule = p |> pathIdentToString in\n                       let id =\n                         (modulePath |> List.rev |> List.hd) ^ \".\" ^ item.name\n                       in\n                       let items, internalDocstrings =\n                         match\n                           ProcessCmt.fileForModule ~package:full.package\n                             aliasToModule\n                         with\n                         | None -> ([], [])\n                         | Some file ->\n                           let docs =\n                             extractDocsForModule ~modulePath:[id]\n                               file.structure\n                           in\n                           (docs.items, docs.docstring)\n                       in\n                       Some\n                         (ModuleAlias\n                            {\n                              id;\n                              name = item.name;\n                              source;\n                              items;\n                              docstring =\n                                item.docstring @ internalDocstrings\n                                |> List.map String.trim;\n                            })\n                     | Module {type_ = Structure m; isModuleType = false} ->\n                       (* module Whatever = {} in res or module Whatever: {} in resi. *)\n                       let modulePath = m.name :: modulePath in\n                       let docs = extractDocsForModule ~modulePath m in\n                       Some\n                         (Module\n                            {\n                              id = modulePath |> List.rev |> ident;\n                              name = m.name;\n                              moduletypeid = None;\n                              docstring = item.docstring @ m.docstring;\n                              deprecated = item.deprecated;\n                              source;\n                              items = docs.items;\n                            })\n                     | Module {type_ = Structure m; isModuleType = true} ->\n                       (* module type Whatever = {} *)\n                       let modulePath = m.name :: modulePath in\n                       let docs = extractDocsForModule ~modulePath m in\n                       Some\n                         (ModuleType\n                            {\n                              id = modulePath |> List.rev |> ident;\n                              name = m.name;\n                              docstring = item.docstring @ m.docstring;\n                              deprecated = item.deprecated;\n                              source;\n                              items = docs.items;\n                            })\n                     | Module\n                         {\n                           type_ =\n                             Constraint (Structure _impl, Structure interface);\n                         } ->\n                       (* module Whatever: { <interface> } = { <impl> }. Prefer the interface. *)\n                       Some\n                         (Module\n                            (extractDocsForModule\n                               ~modulePath:(interface.name :: modulePath)\n                               interface))\n                     | Module {type_ = Constraint (Structure m, Ident p)} ->\n                       (* module M: T = { <impl> }. Print M *)\n                       let docs =\n                         extractDocsForModule ~modulePath:(m.name :: modulePath)\n                           m\n                       in\n                       let identModulePath = p |> Path.head |> Ident.name in\n\n                       let moduleTypeIdPath =\n                         match\n                           ProcessCmt.fileForModule ~package:full.package\n                             identModulePath\n                           |> Option.is_none\n                         with\n                         | false -> []\n                         | true -> [modulePath |> List.rev |> List.hd]\n                       in\n\n                       Some\n                         (Module\n                            {\n                              docs with\n                              moduletypeid =\n                                Some\n                                  (makeId ~identifier:(Path.name p)\n                                     moduleTypeIdPath);\n                            })\n                     | _ -> None);\n          }\n        in\n        let docs = extractDocsForModule structure in\n        Ok (stringifyDocsForModule ~originalEnv:env docs))\n    | true ->\n      Error\n        (Printf.sprintf\n           \"error: failed to read %s, expected an .res or .resi file\" path)\n  in\n\n  result\n\nlet extractEmbedded ~extensionPoints ~filename =\n  let {Res_driver.parsetree = structure} =\n    Res_driver.parsing_engine.parse_implementation ~for_printer:false ~filename\n  in\n  let content = ref [] in\n  let append item = content := item :: !content in\n  let extension (iterator : Ast_iterator.iterator) (ext : Parsetree.extension) =\n    (match ext with\n    | ( {txt},\n        PStr\n          [\n            {\n              pstr_desc =\n                Pstr_eval\n                  ( {\n                      pexp_loc;\n                      pexp_desc = Pexp_constant (Pconst_string (contents, _));\n                    },\n                    _ );\n            };\n          ] )\n      when extensionPoints |> List.exists (fun v -> v = txt) ->\n      append (pexp_loc, txt, contents)\n    | _ -> ());\n    Ast_iterator.default_iterator.extension iterator ext\n  in\n  let iterator = {Ast_iterator.default_iterator with extension} in\n  iterator.structure iterator structure;\n  let open Analysis.Protocol in\n  !content\n  |> List.map (fun (loc, extensionName, contents) ->\n         stringifyObject\n           [\n             (\"extensionName\", Some (wrapInQuotes extensionName));\n             (\"contents\", Some (wrapInQuotes contents));\n             (\"loc\", Some (Analysis.Utils.cmtLocToRange loc |> stringifyRange));\n           ])\n  |> List.rev |> array\n"
  },
  {
    "path": "tools/tests/Makefile",
    "content": "SHELL = /bin/bash\n\nnode_modules/.bin/rescript:\n\tnpm install\n\nbuild: node_modules/.bin/rescript\n\tnode_modules/.bin/rescript\n\ntest: build\n\t./test.sh\n\nclean:\n\trm -r node_modules lib\n\n.DEFAULT_GOAL := test\n\n.PHONY: clean test\n"
  },
  {
    "path": "tools/tests/package.json",
    "content": "{\n  \"name\": \"docstrings-test\",\n  \"version\": \"0.0.0\",\n  \"scripts\": {\n    \"res:build\": \"rescript\",\n    \"res:clean\": \"rescript clean\",\n    \"res:dev\": \"rescript -w\"\n  },\n  \"keywords\": [\n    \"rescript\"\n  ],\n  \"author\": \"\",\n  \"license\": \"MIT\",\n  \"dependencies\": {\n    \"@rescript/react\": \"^0.12.0-alpha.3\",\n    \"rescript\": \"^11.0.0\"\n  }\n}\n"
  },
  {
    "path": "tools/tests/rescript.json",
    "content": "{\n  \"name\": \"docstrings-test\",\n  \"sources\": {\n    \"dir\": \"src\",\n    \"subdirs\": true\n  },\n  \"package-specs\": {\n    \"module\": \"commonjs\",\n    \"in-source\": true\n  },\n  \"suffix\": \".res.js\",\n  \"bs-dependencies\": [\"@rescript/react\"]\n}\n"
  },
  {
    "path": "tools/tests/src/DocExtraction2.res",
    "content": "type t = string\n\nlet getStr = () => \"123\"\n\nlet make = getStr\n\nmodule InnerModule = {\n  type t = unit\n  let make = () => ()\n}\n\n// ^dex\n"
  },
  {
    "path": "tools/tests/src/DocExtraction2.resi",
    "content": "/*** Module level doc here.*/\n\n/** Type t is pretty cool.*/\ntype t\n\n/** Makerz of stuffz. */\nlet make: unit => t\n\nmodule InnerModule: {\n  /*** This inner module is nice...*/\n\n  /** This type is also t. */\n  type t\n\n  /** Maker of tea.*/\n  let make: unit => t\n}\n\n// ^dex\n"
  },
  {
    "path": "tools/tests/src/DocExtractionRes.res",
    "content": "/***Module level documentation goes here. */\n\n/** This type represents stuff. */\ntype t = {\n  /** The name of the stuff.*/\n  name: string,\n  /** Whether stuff is online.*/\n  online: bool,\n}\n\n/** Create stuff.\n\n```rescript example\nlet stuff = make(\"My name\")\n```\n*/\nlet make = name => {\n  name,\n  online: true,\n}\n\n/** Stuff goes offline.*/\nlet asOffline = (t: t) => {...t, online: false}\n\n/** exotic identifier */\nlet \\\"SomeConstant\" = 12\n\nmodule SomeInnerModule = {\n  /*** Another module level docstring here.*/\n  type status =\n    | /** If this is started or not */ Started(t) | /** Stopped? */ Stopped | /** Now idle.*/ Idle\n\n  /** These are all the valid inputs.*/\n  type validInputs = [#something | #\"needs-escaping\" | #withPayload(int) | #status(status)]\n\n  type callback = (t, ~status: status) => unit\n}\n\nmodule AnotherModule = {\n  /*** Mighty fine module here too!*/\n\n  /** This links another module. Neat. */\n  module LinkedModule = SomeInnerModule\n\n  /**\n  Testing what this looks like.*/\n  type callback = SomeInnerModule.status => unit\n\n  let isGoodStatus = (status: SomeInnerModule.status) => status == Stopped\n\n  /** Trying how it looks with an inline record in a variant. */\n  type someVariantWithInlineRecords =\n    | /** This has inline records...*/\n    SomeStuff({\n        offline: bool,\n        /** Is the user online? */ online?: bool,\n      })\n\n  open ReactDOM\n\n  /**Callback to get the DOM root...*/\n  type domRoot = unit => Client.Root.t\n}\n\nmodule ModuleWithThingsThatShouldNotBeExported: {\n  /*** BROKEN: This docstring isn't picked up Doesn't seem to be parsed at all, no attributes found.*/\n\n  /** The type t is stuff. */\n  type t\n\n  /** The maker of stuff!*/\n  let make: unit => t\n} = {\n  /*** Mighty fine module here too!*/\n  type t = string\n  type x = int\n  type f = bool\n\n  let m1 = (x: x) => {\n    x + 1\n  }\n\n  let m2 = (f: f) =>\n    if f {\n      true\n    } else {\n      false\n    }\n\n  let make = () => {\n    if m2(true) && m1(1) > 2 {\n      \"1\"\n    } else {\n      \"2\"\n    }\n  }\n}\n\nmodule type Example = {\n  /***\n  this is an example module type \n  */\n\n  /**\n  main type of this module \n  */\n  type t\n\n  /**\n  function from t to t\n  */\n  let f: t => t\n}\n\nmodule M: Example = {\n  /***\n  implementation of Example module type\n  */\n\n  /**\n  main type \n  */\n  type t = int\n\n  /**\n  identity function\n  */\n  let f = (x: int) => x\n}\n\nmodule type MT = {\n  let x: int\n}\n\nmodule A: MT = {\n  let x = 42\n}\n\nmodule C = {\n  module D: MT = {\n    let x = 42\n  }\n}\n\n// ^dex\n"
  },
  {
    "path": "tools/tests/src/ModC.res",
    "content": "/**\nUser Module\n*/\nmodule User = {\n  let name = \"ReScript\"\n}\n"
  },
  {
    "path": "tools/tests/src/ModC.resi",
    "content": "/**\nUser Module from interface file\n*/\nmodule User: {\n  let name: string\n}\n"
  },
  {
    "path": "tools/tests/src/expected/DocExtraction2.res.json",
    "content": "\n{\n  \"name\": \"DocExtraction2\",\n  \"docstrings\": [\"Module level doc here.\"],\n  \"source\": {\n    \"filepath\": \"src/DocExtraction2.resi\",\n    \"line\": 1,\n    \"col\": 1\n  },\n  \"items\": [\n  {\n    \"id\": \"DocExtraction2.t\",\n    \"kind\": \"type\",\n    \"name\": \"t\",\n    \"signature\": \"type t\",\n    \"docstrings\": [\"Type t is pretty cool.\"],\n    \"source\": {\n      \"filepath\": \"src/DocExtraction2.resi\",\n      \"line\": 4,\n      \"col\": 1\n    }\n  }, \n  {\n    \"id\": \"DocExtraction2.make\",\n    \"kind\": \"value\",\n    \"name\": \"make\",\n    \"signature\": \"let make: unit => t\",\n    \"docstrings\": [\"Makerz of stuffz.\"],\n    \"source\": {\n      \"filepath\": \"src/DocExtraction2.resi\",\n      \"line\": 7,\n      \"col\": 1\n    },\n    \"detail\": \n    {\n      \"kind\": \"signature\",\n      \"details\": {\n      \"parameters\": [{\n          \"path\": \"unit\"\n        }],\n      \"returnType\": {\n        \"path\": \"t\"\n      }\n    }\n    }\n  }, \n  {\n    \"id\": \"DocExtraction2.InnerModule\",\n    \"name\": \"InnerModule\",\n    \"kind\": \"module\",\n    \"docstrings\": [],\n    \"source\": {\n      \"filepath\": \"src/DocExtraction2.resi\",\n      \"line\": 9,\n      \"col\": 8\n    },\n    \"items\": [\n    {\n      \"id\": \"DocExtraction2.InnerModule.t\",\n      \"kind\": \"type\",\n      \"name\": \"t\",\n      \"signature\": \"type t\",\n      \"docstrings\": [\"This type is also t.\"],\n      \"source\": {\n        \"filepath\": \"src/DocExtraction2.resi\",\n        \"line\": 13,\n        \"col\": 3\n      }\n    }, \n    {\n      \"id\": \"DocExtraction2.InnerModule.make\",\n      \"kind\": \"value\",\n      \"name\": \"make\",\n      \"signature\": \"let make: unit => t\",\n      \"docstrings\": [\"Maker of tea.\"],\n      \"source\": {\n        \"filepath\": \"src/DocExtraction2.resi\",\n        \"line\": 15,\n        \"col\": 3\n      },\n      \"detail\": \n      {\n        \"kind\": \"signature\",\n        \"details\": {\n        \"parameters\": [{\n            \"path\": \"unit\"\n          }],\n        \"returnType\": {\n          \"path\": \"t\"\n        }\n      }\n      }\n    }]\n  }]\n}\n"
  },
  {
    "path": "tools/tests/src/expected/DocExtraction2.resi.json",
    "content": "\n{\n  \"name\": \"DocExtraction2\",\n  \"docstrings\": [\"Module level doc here.\"],\n  \"source\": {\n    \"filepath\": \"src/DocExtraction2.resi\",\n    \"line\": 1,\n    \"col\": 1\n  },\n  \"items\": [\n  {\n    \"id\": \"DocExtraction2.t\",\n    \"kind\": \"type\",\n    \"name\": \"t\",\n    \"signature\": \"type t\",\n    \"docstrings\": [\"Type t is pretty cool.\"],\n    \"source\": {\n      \"filepath\": \"src/DocExtraction2.resi\",\n      \"line\": 4,\n      \"col\": 1\n    }\n  }, \n  {\n    \"id\": \"DocExtraction2.make\",\n    \"kind\": \"value\",\n    \"name\": \"make\",\n    \"signature\": \"let make: unit => t\",\n    \"docstrings\": [\"Makerz of stuffz.\"],\n    \"source\": {\n      \"filepath\": \"src/DocExtraction2.resi\",\n      \"line\": 7,\n      \"col\": 1\n    },\n    \"detail\": \n    {\n      \"kind\": \"signature\",\n      \"details\": {\n      \"parameters\": [{\n          \"path\": \"unit\"\n        }],\n      \"returnType\": {\n        \"path\": \"t\"\n      }\n    }\n    }\n  }, \n  {\n    \"id\": \"DocExtraction2.InnerModule\",\n    \"name\": \"InnerModule\",\n    \"kind\": \"module\",\n    \"docstrings\": [],\n    \"source\": {\n      \"filepath\": \"src/DocExtraction2.resi\",\n      \"line\": 9,\n      \"col\": 8\n    },\n    \"items\": [\n    {\n      \"id\": \"DocExtraction2.InnerModule.t\",\n      \"kind\": \"type\",\n      \"name\": \"t\",\n      \"signature\": \"type t\",\n      \"docstrings\": [\"This type is also t.\"],\n      \"source\": {\n        \"filepath\": \"src/DocExtraction2.resi\",\n        \"line\": 13,\n        \"col\": 3\n      }\n    }, \n    {\n      \"id\": \"DocExtraction2.InnerModule.make\",\n      \"kind\": \"value\",\n      \"name\": \"make\",\n      \"signature\": \"let make: unit => t\",\n      \"docstrings\": [\"Maker of tea.\"],\n      \"source\": {\n        \"filepath\": \"src/DocExtraction2.resi\",\n        \"line\": 15,\n        \"col\": 3\n      },\n      \"detail\": \n      {\n        \"kind\": \"signature\",\n        \"details\": {\n        \"parameters\": [{\n            \"path\": \"unit\"\n          }],\n        \"returnType\": {\n          \"path\": \"t\"\n        }\n      }\n      }\n    }]\n  }]\n}\n"
  },
  {
    "path": "tools/tests/src/expected/DocExtractionRes.res.json",
    "content": "\n{\n  \"name\": \"DocExtractionRes\",\n  \"docstrings\": [\"Module level documentation goes here.\"],\n  \"source\": {\n    \"filepath\": \"src/DocExtractionRes.res\",\n    \"line\": 1,\n    \"col\": 1\n  },\n  \"items\": [\n  {\n    \"id\": \"DocExtractionRes.t\",\n    \"kind\": \"type\",\n    \"name\": \"t\",\n    \"signature\": \"type t = {name: string, online: bool}\",\n    \"docstrings\": [\"This type represents stuff.\"],\n    \"source\": {\n      \"filepath\": \"src/DocExtractionRes.res\",\n      \"line\": 4,\n      \"col\": 1\n    },\n    \"detail\": \n    {\n      \"kind\": \"record\",\n      \"items\": [{\n        \"name\": \"name\",\n        \"optional\": false,\n        \"docstrings\": [\"The name of the stuff.\"],\n        \"signature\": \"string\"\n      }, {\n        \"name\": \"online\",\n        \"optional\": false,\n        \"docstrings\": [\"Whether stuff is online.\"],\n        \"signature\": \"bool\"\n      }]\n    }\n  }, \n  {\n    \"id\": \"DocExtractionRes.make\",\n    \"kind\": \"value\",\n    \"name\": \"make\",\n    \"signature\": \"let make: string => t\",\n    \"docstrings\": [\"Create stuff.\\n\\n```rescript example\\nlet stuff = make(\\\"My name\\\")\\n```\"],\n    \"source\": {\n      \"filepath\": \"src/DocExtractionRes.res\",\n      \"line\": 17,\n      \"col\": 5\n    },\n    \"detail\": \n    {\n      \"kind\": \"signature\",\n      \"details\": {\n      \"parameters\": [{\n          \"path\": \"string\"\n        }],\n      \"returnType\": {\n        \"path\": \"t\"\n      }\n    }\n    }\n  }, \n  {\n    \"id\": \"DocExtractionRes.asOffline\",\n    \"kind\": \"value\",\n    \"name\": \"asOffline\",\n    \"signature\": \"let asOffline: t => t\",\n    \"docstrings\": [\"Stuff goes offline.\"],\n    \"source\": {\n      \"filepath\": \"src/DocExtractionRes.res\",\n      \"line\": 23,\n      \"col\": 5\n    },\n    \"detail\": \n    {\n      \"kind\": \"signature\",\n      \"details\": {\n      \"parameters\": [{\n          \"path\": \"t\"\n        }],\n      \"returnType\": {\n        \"path\": \"t\"\n      }\n    }\n    }\n  }, \n  {\n    \"id\": \"DocExtractionRes.SomeConstant\",\n    \"kind\": \"value\",\n    \"name\": \"SomeConstant\",\n    \"signature\": \"let SomeConstant: int\",\n    \"docstrings\": [\"exotic identifier\"],\n    \"source\": {\n      \"filepath\": \"src/DocExtractionRes.res\",\n      \"line\": 26,\n      \"col\": 5\n    },\n    \"detail\": \n    {\n      \"kind\": \"signature\",\n      \"details\": {\n      \"returnType\": {\n        \"path\": \"int\"\n      }\n    }\n    }\n  }, \n  {\n    \"id\": \"DocExtractionRes.SomeInnerModule\",\n    \"name\": \"SomeInnerModule\",\n    \"kind\": \"module\",\n    \"docstrings\": [\"Another module level docstring here.\"],\n    \"source\": {\n      \"filepath\": \"src/DocExtractionRes.res\",\n      \"line\": 28,\n      \"col\": 8\n    },\n    \"items\": [\n    {\n      \"id\": \"DocExtractionRes.SomeInnerModule.status\",\n      \"kind\": \"type\",\n      \"name\": \"status\",\n      \"signature\": \"type status = Started(t) | Stopped | Idle\",\n      \"docstrings\": [],\n      \"source\": {\n        \"filepath\": \"src/DocExtractionRes.res\",\n        \"line\": 30,\n        \"col\": 3\n      },\n      \"detail\": \n      {\n        \"kind\": \"variant\",\n        \"items\": [\n        {\n          \"name\": \"Started\",\n          \"docstrings\": [\"If this is started or not\"],\n          \"signature\": \"Started(t)\"\n        }, \n        {\n          \"name\": \"Stopped\",\n          \"docstrings\": [\"Stopped?\"],\n          \"signature\": \"Stopped\"\n        }, \n        {\n          \"name\": \"Idle\",\n          \"docstrings\": [\"Now idle.\"],\n          \"signature\": \"Idle\"\n        }]\n      }\n    }, \n    {\n      \"id\": \"DocExtractionRes.SomeInnerModule.validInputs\",\n      \"kind\": \"type\",\n      \"name\": \"validInputs\",\n      \"signature\": \"type validInputs = [\\n  | #\\\"needs-escaping\\\"\\n  | #something\\n  | #status(status)\\n  | #withPayload(int)\\n]\",\n      \"docstrings\": [\"These are all the valid inputs.\"],\n      \"source\": {\n        \"filepath\": \"src/DocExtractionRes.res\",\n        \"line\": 34,\n        \"col\": 3\n      }\n    }, \n    {\n      \"id\": \"DocExtractionRes.SomeInnerModule.callback\",\n      \"kind\": \"type\",\n      \"name\": \"callback\",\n      \"signature\": \"type callback = (t, ~status: status) => unit\",\n      \"docstrings\": [],\n      \"source\": {\n        \"filepath\": \"src/DocExtractionRes.res\",\n        \"line\": 36,\n        \"col\": 3\n      }\n    }]\n  }, \n  {\n    \"id\": \"DocExtractionRes.AnotherModule\",\n    \"name\": \"AnotherModule\",\n    \"kind\": \"module\",\n    \"docstrings\": [\"Mighty fine module here too!\"],\n    \"source\": {\n      \"filepath\": \"src/DocExtractionRes.res\",\n      \"line\": 39,\n      \"col\": 8\n    },\n    \"items\": [\n    {\n      \"id\": \"DocExtractionRes.LinkedModule\",\n      \"kind\": \"moduleAlias\",\n      \"name\": \"LinkedModule\",\n      \"docstrings\": [\"This links another module. Neat.\"],\n      \"source\": {\n        \"filepath\": \"src/DocExtractionRes.res\",\n        \"line\": 43,\n        \"col\": 10\n      },\n      \"items\": []\n    }, \n    {\n      \"id\": \"DocExtractionRes.AnotherModule.callback\",\n      \"kind\": \"type\",\n      \"name\": \"callback\",\n      \"signature\": \"type callback = SomeInnerModule.status => unit\",\n      \"docstrings\": [\"Testing what this looks like.\"],\n      \"source\": {\n        \"filepath\": \"src/DocExtractionRes.res\",\n        \"line\": 47,\n        \"col\": 3\n      }\n    }, \n    {\n      \"id\": \"DocExtractionRes.AnotherModule.isGoodStatus\",\n      \"kind\": \"value\",\n      \"name\": \"isGoodStatus\",\n      \"signature\": \"let isGoodStatus: SomeInnerModule.status => bool\",\n      \"docstrings\": [],\n      \"source\": {\n        \"filepath\": \"src/DocExtractionRes.res\",\n        \"line\": 49,\n        \"col\": 7\n      },\n      \"detail\": \n      {\n        \"kind\": \"signature\",\n        \"details\": {\n        \"parameters\": [{\n            \"path\": \"SomeInnerModule.status\"\n          }],\n        \"returnType\": {\n          \"path\": \"bool\"\n        }\n      }\n      }\n    }, \n    {\n      \"id\": \"DocExtractionRes.AnotherModule.someVariantWithInlineRecords\",\n      \"kind\": \"type\",\n      \"name\": \"someVariantWithInlineRecords\",\n      \"signature\": \"type someVariantWithInlineRecords =\\n  | SomeStuff({offline: bool, online?: bool})\",\n      \"docstrings\": [\"Trying how it looks with an inline record in a variant.\"],\n      \"source\": {\n        \"filepath\": \"src/DocExtractionRes.res\",\n        \"line\": 52,\n        \"col\": 3\n      },\n      \"detail\": \n      {\n        \"kind\": \"variant\",\n        \"items\": [\n        {\n          \"name\": \"SomeStuff\",\n          \"docstrings\": [\"This has inline records...\"],\n          \"signature\": \"SomeStuff({offline: bool, online?: bool})\",\n          \"payload\": {\n            \"kind\": \"inlineRecord\",\n            \"fields\": [{\n              \"name\": \"offline\",\n              \"optional\": false,\n              \"docstrings\": [],\n              \"signature\": \"bool\"\n            }, {\n              \"name\": \"online\",\n              \"optional\": true,\n              \"docstrings\": [\"Is the user online?\"],\n              \"signature\": \"option<bool>\"\n            }]\n          }\n        }]\n      }\n    }, \n    {\n      \"id\": \"DocExtractionRes.AnotherModule.domRoot\",\n      \"kind\": \"type\",\n      \"name\": \"domRoot\",\n      \"signature\": \"type domRoot = unit => ReactDOM.Client.Root.t\",\n      \"docstrings\": [\"Callback to get the DOM root...\"],\n      \"source\": {\n        \"filepath\": \"src/DocExtractionRes.res\",\n        \"line\": 62,\n        \"col\": 3\n      }\n    }]\n  }, \n  {\n    \"id\": \"DocExtractionRes.ModuleWithThingsThatShouldNotBeExported\",\n    \"name\": \"ModuleWithThingsThatShouldNotBeExported\",\n    \"kind\": \"module\",\n    \"docstrings\": [],\n    \"source\": {\n      \"filepath\": \"src/DocExtractionRes.res\",\n      \"line\": 1,\n      \"col\": 1\n    },\n    \"items\": [\n    {\n      \"id\": \"DocExtractionRes.ModuleWithThingsThatShouldNotBeExported.t\",\n      \"kind\": \"type\",\n      \"name\": \"t\",\n      \"signature\": \"type t\",\n      \"docstrings\": [\"The type t is stuff.\"],\n      \"source\": {\n        \"filepath\": \"src/DocExtractionRes.res\",\n        \"line\": 69,\n        \"col\": 3\n      }\n    }, \n    {\n      \"id\": \"DocExtractionRes.ModuleWithThingsThatShouldNotBeExported.make\",\n      \"kind\": \"value\",\n      \"name\": \"make\",\n      \"signature\": \"let make: unit => t\",\n      \"docstrings\": [\"The maker of stuff!\"],\n      \"source\": {\n        \"filepath\": \"src/DocExtractionRes.res\",\n        \"line\": 71,\n        \"col\": 3\n      },\n      \"detail\": \n      {\n        \"kind\": \"signature\",\n        \"details\": {\n        \"parameters\": [{\n            \"path\": \"unit\"\n          }],\n        \"returnType\": {\n          \"path\": \"t\"\n        }\n      }\n      }\n    }]\n  }, \n  {\n    \"id\": \"DocExtractionRes.Example\",\n    \"name\": \"Example\",\n    \"kind\": \"moduleType\",\n    \"docstrings\": [],\n    \"source\": {\n      \"filepath\": \"src/DocExtractionRes.res\",\n      \"line\": 99,\n      \"col\": 13\n    },\n    \"items\": [\n    {\n      \"id\": \"DocExtractionRes.Example.t\",\n      \"kind\": \"type\",\n      \"name\": \"t\",\n      \"signature\": \"type t\",\n      \"docstrings\": [\"main type of this module\"],\n      \"source\": {\n        \"filepath\": \"src/DocExtractionRes.res\",\n        \"line\": 107,\n        \"col\": 3\n      }\n    }, \n    {\n      \"id\": \"DocExtractionRes.Example.f\",\n      \"kind\": \"value\",\n      \"name\": \"f\",\n      \"signature\": \"let f: t => t\",\n      \"docstrings\": [\"function from t to t\"],\n      \"source\": {\n        \"filepath\": \"src/DocExtractionRes.res\",\n        \"line\": 109,\n        \"col\": 3\n      },\n      \"detail\": \n      {\n        \"kind\": \"signature\",\n        \"details\": {\n        \"parameters\": [{\n            \"path\": \"t\"\n          }],\n        \"returnType\": {\n          \"path\": \"t\"\n        }\n      }\n      }\n    }]\n  }, \n  {\n    \"id\": \"DocExtractionRes.M\",\n    \"name\": \"M\",\n    \"kind\": \"module\",\n    \"moduletypeid\": \"DocExtractionRes.Example\",\n    \"docstrings\": [\"implementation of Example module type\"],\n    \"source\": {\n      \"filepath\": \"src/DocExtractionRes.res\",\n      \"line\": 1,\n      \"col\": 1\n    },\n    \"items\": [\n    {\n      \"id\": \"DocExtractionRes.M.t\",\n      \"kind\": \"type\",\n      \"name\": \"t\",\n      \"signature\": \"type t = int\",\n      \"docstrings\": [\"main type\"],\n      \"source\": {\n        \"filepath\": \"src/DocExtractionRes.res\",\n        \"line\": 123,\n        \"col\": 3\n      }\n    }, \n    {\n      \"id\": \"DocExtractionRes.M.f\",\n      \"kind\": \"value\",\n      \"name\": \"f\",\n      \"signature\": \"let f: int => int\",\n      \"docstrings\": [\"identity function\"],\n      \"source\": {\n        \"filepath\": \"src/DocExtractionRes.res\",\n        \"line\": 128,\n        \"col\": 7\n      },\n      \"detail\": \n      {\n        \"kind\": \"signature\",\n        \"details\": {\n        \"parameters\": [{\n            \"path\": \"int\"\n          }],\n        \"returnType\": {\n          \"path\": \"int\"\n        }\n      }\n      }\n    }]\n  }, \n  {\n    \"id\": \"DocExtractionRes.MT\",\n    \"name\": \"MT\",\n    \"kind\": \"moduleType\",\n    \"docstrings\": [],\n    \"source\": {\n      \"filepath\": \"src/DocExtractionRes.res\",\n      \"line\": 131,\n      \"col\": 13\n    },\n    \"items\": [\n    {\n      \"id\": \"DocExtractionRes.MT.x\",\n      \"kind\": \"value\",\n      \"name\": \"x\",\n      \"signature\": \"let x: int\",\n      \"docstrings\": [],\n      \"source\": {\n        \"filepath\": \"src/DocExtractionRes.res\",\n        \"line\": 132,\n        \"col\": 3\n      },\n      \"detail\": \n      {\n        \"kind\": \"signature\",\n        \"details\": {\n        \"returnType\": {\n          \"path\": \"int\"\n        }\n      }\n      }\n    }]\n  }, \n  {\n    \"id\": \"DocExtractionRes.A\",\n    \"name\": \"A\",\n    \"kind\": \"module\",\n    \"moduletypeid\": \"DocExtractionRes.MT\",\n    \"docstrings\": [],\n    \"source\": {\n      \"filepath\": \"src/DocExtractionRes.res\",\n      \"line\": 1,\n      \"col\": 1\n    },\n    \"items\": [\n    {\n      \"id\": \"DocExtractionRes.A.x\",\n      \"kind\": \"value\",\n      \"name\": \"x\",\n      \"signature\": \"let x: int\",\n      \"docstrings\": [],\n      \"source\": {\n        \"filepath\": \"src/DocExtractionRes.res\",\n        \"line\": 136,\n        \"col\": 7\n      },\n      \"detail\": \n      {\n        \"kind\": \"signature\",\n        \"details\": {\n        \"returnType\": {\n          \"path\": \"int\"\n        }\n      }\n      }\n    }]\n  }, \n  {\n    \"id\": \"DocExtractionRes.C\",\n    \"name\": \"C\",\n    \"kind\": \"module\",\n    \"docstrings\": [],\n    \"source\": {\n      \"filepath\": \"src/DocExtractionRes.res\",\n      \"line\": 139,\n      \"col\": 8\n    },\n    \"items\": [\n    {\n      \"id\": \"DocExtractionRes.C.D\",\n      \"name\": \"D\",\n      \"kind\": \"module\",\n      \"moduletypeid\": \"DocExtractionRes.MT\",\n      \"docstrings\": [],\n      \"source\": {\n        \"filepath\": \"src/DocExtractionRes.res\",\n        \"line\": 1,\n        \"col\": 1\n      },\n      \"items\": [\n      {\n        \"id\": \"DocExtractionRes.C.D.x\",\n        \"kind\": \"value\",\n        \"name\": \"x\",\n        \"signature\": \"let x: int\",\n        \"docstrings\": [],\n        \"source\": {\n          \"filepath\": \"src/DocExtractionRes.res\",\n          \"line\": 141,\n          \"col\": 9\n        },\n        \"detail\": \n        {\n          \"kind\": \"signature\",\n          \"details\": {\n          \"returnType\": {\n            \"path\": \"int\"\n          }\n        }\n        }\n      }]\n    }]\n  }]\n}\n"
  },
  {
    "path": "tools/tests/src/expected/ModC.res.json",
    "content": "\n{\n  \"name\": \"ModC\",\n  \"docstrings\": [],\n  \"source\": {\n    \"filepath\": \"src/ModC.resi\",\n    \"line\": 1,\n    \"col\": 1\n  },\n  \"items\": [\n  {\n    \"id\": \"ModC.User\",\n    \"name\": \"User\",\n    \"kind\": \"module\",\n    \"docstrings\": [\"User Module from interface file\"],\n    \"source\": {\n      \"filepath\": \"src/ModC.resi\",\n      \"line\": 4,\n      \"col\": 8\n    },\n    \"items\": [\n    {\n      \"id\": \"ModC.User.name\",\n      \"kind\": \"value\",\n      \"name\": \"name\",\n      \"signature\": \"let name: string\",\n      \"docstrings\": [],\n      \"source\": {\n        \"filepath\": \"src/ModC.resi\",\n        \"line\": 5,\n        \"col\": 3\n      },\n      \"detail\": \n      {\n        \"kind\": \"signature\",\n        \"details\": {\n        \"returnType\": {\n          \"path\": \"string\"\n        }\n      }\n      }\n    }]\n  }]\n}\n"
  },
  {
    "path": "tools/tests/src/expected/ModC.resi.json",
    "content": "\n{\n  \"name\": \"ModC\",\n  \"docstrings\": [],\n  \"source\": {\n    \"filepath\": \"src/ModC.resi\",\n    \"line\": 1,\n    \"col\": 1\n  },\n  \"items\": [\n  {\n    \"id\": \"ModC.User\",\n    \"name\": \"User\",\n    \"kind\": \"module\",\n    \"docstrings\": [\"User Module from interface file\"],\n    \"source\": {\n      \"filepath\": \"src/ModC.resi\",\n      \"line\": 4,\n      \"col\": 8\n    },\n    \"items\": [\n    {\n      \"id\": \"ModC.User.name\",\n      \"kind\": \"value\",\n      \"name\": \"name\",\n      \"signature\": \"let name: string\",\n      \"docstrings\": [],\n      \"source\": {\n        \"filepath\": \"src/ModC.resi\",\n        \"line\": 5,\n        \"col\": 3\n      },\n      \"detail\": \n      {\n        \"kind\": \"signature\",\n        \"details\": {\n        \"returnType\": {\n          \"path\": \"string\"\n        }\n      }\n      }\n    }]\n  }]\n}\n"
  },
  {
    "path": "tools/tests/test.sh",
    "content": "for file in src/*.{res,resi}; do\n  output=\"$(dirname $file)/expected/$(basename $file).json\"\n  dune exec --no-print-directory -- rescript-tools doc $file > $output\n  # # CI. We use LF, and the CI OCaml fork prints CRLF. Convert.\n  if [ \"$RUNNER_OS\" == \"Windows\" ]; then\n    perl -pi -e 's/\\r\\n/\\n/g' -- $output\n  fi\ndone\n\nwarningYellow='\\033[0;33m'\nsuccessGreen='\\033[0;32m'\nreset='\\033[0m'\n\ndiff=$(git ls-files --modified src/expected)\nif [[ $diff = \"\" ]]; then\n  printf \"${successGreen}✅ No unstaged tests difference.${reset}\\n\"\nelse\n  printf \"${warningYellow}⚠️ There are unstaged differences in tests/! Did you break a test?\\n${diff}\\n${reset}\"\n  git --no-pager diff src/expected\n  exit 1\nfi\n"
  },
  {
    "path": "tools.opam",
    "content": "# This file is generated by dune, edit dune-project instead\nopam-version: \"2.0\"\nsynopsis: \"ReScript Tools\"\nmaintainer: [\"ReScript Team\"]\nauthors: [\"ReScript Team\"]\nhomepage: \"https://github.com/rescript-lang/rescript-vscode\"\nbug-reports: \"https://github.com/rescript-lang/rescript-vscode/issues\"\ndepends: [\n  \"ocaml\" {>= \"4.10\"}\n  \"cppo\" {= \"1.6.9\"}\n  \"analysis\"\n  \"dune\"\n]\nbuild: [\n  [\"dune\" \"subst\"] {pinned}\n  [\n    \"dune\"\n    \"build\"\n    \"-p\"\n    name\n    \"-j\"\n    jobs\n    \"@install\"\n    \"@runtest\" {with-test}\n    \"@doc\" {with-doc}\n  ]\n]\n"
  },
  {
    "path": "tsconfig.json",
    "content": "{\n  \"compilerOptions\": {\n    \"module\": \"commonjs\",\n    \"target\": \"ES2020\",\n    \"lib\": [\"ES2020\"],\n    \"outDir\": \"out\",\n    \"rootDir\": \"src\",\n    \"sourceMap\": true\n  },\n  \"include\": [\"src\"],\n  \"exclude\": [\"node_modules\"],\n  \"references\": [\n    {\n      \"path\": \"./client\"\n    },\n    {\n      \"path\": \"./server\"\n    },\n    {\n      \"path\": \"./shared\"\n    }\n  ]\n}\n"
  }
]